Compare commits
77 Commits
c980dc352a
...
dev
| Author | SHA1 | Date | |
|---|---|---|---|
|
ae0beac9e0
|
|||
|
072c2a26e6
|
|||
|
b0c165b8b0
|
|||
|
6b8686351a
|
|||
|
dcca9c5167
|
|||
|
041ede22e1
|
|||
|
adfded92d0
|
|||
|
7fae1b154a
|
|||
|
3dbd7fc445
|
|||
|
10a79554d3
|
|||
|
f2b676043c
|
|||
|
24bc6bcc94
|
|||
|
7dd01260b3
|
|||
|
2dafcc8c6b
|
|||
|
cbfbb32b86
|
|||
|
942b6c3cef
|
|||
|
a281fab8db
|
|||
|
033de7e7ca
|
|||
|
405e978796
|
|||
|
68c03c18d5
|
|||
|
d0cd6353d7
|
|||
|
9fd70deb9d
|
|||
|
dd042910a9
|
|||
|
fd825f7e87
|
|||
|
c7f0a67f17
|
|||
|
7705731dd5
|
|||
|
a922eaa542
|
|||
|
8836a0120b
|
|||
|
23a661107e
|
|||
|
cf508eb94c
|
|||
|
5ec43f9166
|
|||
|
cf49ffb8e8
|
|||
|
c34d30a006
|
|||
|
bb05990464
|
|||
|
aaa4a8f12c
|
|||
|
56da79f124
|
|||
|
0e8d568fea
|
|||
|
2ae9c38627
|
|||
|
667c418f09
|
|||
|
f618932584
|
|||
|
f5e455fc79
|
|||
|
78a4d9df36
|
|||
|
e639c12c20
|
|||
|
c10077392e
|
|||
|
7dd7b57e1f
|
|||
|
b10ed69d59
|
|||
|
ec8030fc81
|
|||
|
f050dcc94f
|
|||
|
f40a5c9276
|
|||
|
0e80ba0b0d
|
|||
|
a223f04909
|
|||
|
e42ae71862
|
|||
|
58c7f7d1be
|
|||
|
eefb21faff
|
|||
|
941c467fc2
|
|||
|
4dcab777ca
|
|||
|
a169890351
|
|||
|
b70070ba1a
|
|||
|
bd72d740e6
|
|||
|
e43c16adb3
|
|||
|
10200fceb9
|
|||
|
27924013d9
|
|||
|
138bc6d24a
|
|||
|
48179034a7
|
|||
|
f9001ecb9d
|
|||
|
86a6c8acce
|
|||
|
4f6f48247d
|
|||
|
8caba75060
|
|||
|
b35391f1f9
|
|||
|
c5992ef19e
|
|||
|
837cc1bcf4
|
|||
|
87db8a0498
|
|||
|
11513adf48
|
|||
|
aaa4ef61d5
|
|||
|
fec09b6d0b
|
|||
|
2566458e25
|
|||
|
14eb531e4a
|
2
.gitignore
vendored
2
.gitignore
vendored
@@ -1,2 +1,4 @@
|
|||||||
poetry.lock
|
poetry.lock
|
||||||
__pycache__
|
__pycache__
|
||||||
|
docs/build
|
||||||
|
dist/
|
||||||
|
|||||||
26
README.md
26
README.md
@@ -1,4 +1,26 @@
|
|||||||
Unisync is a data synchronisation tool written in python and based on [unison](https://github.com/bcpierce00/unison).
|
Unisync is a data synchronisation tool written in python and based on [unison](https://github.com/bcpierce00/unison).
|
||||||
The goal is to be able to keep data synchronised between multiple computers without needing to have all the data kept locally while at the same time being able to access everything.
|
I couldn't find a tool to fulfill the requirements I had for a synchronisation tool so I am creating my own as a wrapper around unison.
|
||||||
|
|
||||||
The development just started so the documentation will be written later.
|
# Prerequisite
|
||||||
|
|
||||||
|
You need to have the following tools installed.
|
||||||
|
|
||||||
|
Locally :
|
||||||
|
- unison
|
||||||
|
- sshfs
|
||||||
|
- nnn
|
||||||
|
|
||||||
|
Remotely :
|
||||||
|
- unison
|
||||||
|
- cleanlinks and lndir (Should be in `xutils-dev` when using apt)
|
||||||
|
|
||||||
|
# Goal
|
||||||
|
|
||||||
|
Unisync purpose is to keep personal data synchronised between multiple machines without needing to have all the data present an all the machines at the same time. For example you might not need to have your movies on your laptop but still want them on your desktop at home or you might want to keep your old pictures only on a server.
|
||||||
|
Unisync requires you to have a "server" (like a NAS at home) that will store all your data allowing you to only copy what you need when you need it.
|
||||||
|
The issue is that you need to know what data is stored on the server to avoid conflict if creating duplicate files or folders. To address this unisync places a symlink for every file you do not wish to keep locally and allows you to mount the remote filesystem (using sshfs) allowing you to access files that aren't synchronised.
|
||||||
|
|
||||||
|
# Developement
|
||||||
|
|
||||||
|
Unisync was at first a simple bash script but as it grew more complex I started struggling to maintain it which is why I am porting it to python. It will make everything more robust, easier to maintain and to add functionalities.
|
||||||
|
I am in the early stages of the developement process, this should be usable someday (hopefully).
|
||||||
|
|||||||
20
docs/Makefile
Normal file
20
docs/Makefile
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
# Minimal makefile for Sphinx documentation
|
||||||
|
#
|
||||||
|
|
||||||
|
# You can set these variables from the command line, and also
|
||||||
|
# from the environment for the first two.
|
||||||
|
SPHINXOPTS ?=
|
||||||
|
SPHINXBUILD ?= sphinx-build
|
||||||
|
SOURCEDIR = source
|
||||||
|
BUILDDIR = build
|
||||||
|
|
||||||
|
# Put it first so that "make" without argument is like "make help".
|
||||||
|
help:
|
||||||
|
@$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
|
||||||
|
|
||||||
|
.PHONY: help Makefile
|
||||||
|
|
||||||
|
# Catch-all target: route all unknown targets to Sphinx using the new
|
||||||
|
# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
|
||||||
|
%: Makefile
|
||||||
|
@$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
|
||||||
35
docs/make.bat
Normal file
35
docs/make.bat
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
@ECHO OFF
|
||||||
|
|
||||||
|
pushd %~dp0
|
||||||
|
|
||||||
|
REM Command file for Sphinx documentation
|
||||||
|
|
||||||
|
if "%SPHINXBUILD%" == "" (
|
||||||
|
set SPHINXBUILD=sphinx-build
|
||||||
|
)
|
||||||
|
set SOURCEDIR=source
|
||||||
|
set BUILDDIR=build
|
||||||
|
|
||||||
|
%SPHINXBUILD% >NUL 2>NUL
|
||||||
|
if errorlevel 9009 (
|
||||||
|
echo.
|
||||||
|
echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
|
||||||
|
echo.installed, then set the SPHINXBUILD environment variable to point
|
||||||
|
echo.to the full path of the 'sphinx-build' executable. Alternatively you
|
||||||
|
echo.may add the Sphinx directory to PATH.
|
||||||
|
echo.
|
||||||
|
echo.If you don't have Sphinx installed, grab it from
|
||||||
|
echo.https://www.sphinx-doc.org/
|
||||||
|
exit /b 1
|
||||||
|
)
|
||||||
|
|
||||||
|
if "%1" == "" goto help
|
||||||
|
|
||||||
|
%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
|
||||||
|
goto end
|
||||||
|
|
||||||
|
:help
|
||||||
|
%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
|
||||||
|
|
||||||
|
:end
|
||||||
|
popd
|
||||||
36
docs/source/conf.py
Normal file
36
docs/source/conf.py
Normal file
@@ -0,0 +1,36 @@
|
|||||||
|
# Configuration file for the Sphinx documentation builder.
|
||||||
|
#
|
||||||
|
# For the full list of built-in configuration values, see the documentation:
|
||||||
|
# https://www.sphinx-doc.org/en/master/usage/configuration.html
|
||||||
|
|
||||||
|
# -- Project information -----------------------------------------------------
|
||||||
|
# https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information
|
||||||
|
|
||||||
|
project = 'unisync'
|
||||||
|
copyright = '2026, Paul Retourné'
|
||||||
|
author = 'Paul Retourné'
|
||||||
|
release = '0.1.0'
|
||||||
|
|
||||||
|
# -- General configuration ---------------------------------------------------
|
||||||
|
# https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration
|
||||||
|
|
||||||
|
extensions = [
|
||||||
|
'sphinx.ext.autodoc',
|
||||||
|
'sphinx.ext.viewcode',
|
||||||
|
'sphinx.ext.napoleon',
|
||||||
|
'sphinx.ext.todo'
|
||||||
|
]
|
||||||
|
|
||||||
|
templates_path = ['_templates']
|
||||||
|
exclude_patterns = []
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# -- Options for HTML output -------------------------------------------------
|
||||||
|
# https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output
|
||||||
|
|
||||||
|
#html_theme = 'alabaster'
|
||||||
|
html_theme = 'sphinx_rtd_theme'
|
||||||
|
html_static_path = ['_static']
|
||||||
|
|
||||||
|
autodoc_docstring_signature = True
|
||||||
24
docs/source/example.rst
Normal file
24
docs/source/example.rst
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
.. _example_how_it_works:
|
||||||
|
|
||||||
|
Example of how unisync works
|
||||||
|
============================
|
||||||
|
|
||||||
|
Let's say you have the following structure::
|
||||||
|
|
||||||
|
$ tree .
|
||||||
|
.
|
||||||
|
├── big_file
|
||||||
|
└── folder
|
||||||
|
├── file
|
||||||
|
└── other_file
|
||||||
|
|
||||||
|
If you only want to synchronise `folder` and its content on your laptop the following will be automatically generated::
|
||||||
|
|
||||||
|
$ tree .
|
||||||
|
.
|
||||||
|
├── big_file -> ../.data/big_file
|
||||||
|
└── folder
|
||||||
|
├── file
|
||||||
|
└── other_file
|
||||||
|
|
||||||
|
`big_file` is now a symbolic link and by mounting the remote directory you can still seemlessly access `big_file` through the network.
|
||||||
31
docs/source/index.rst
Normal file
31
docs/source/index.rst
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
.. unisync documentation master file, created by
|
||||||
|
sphinx-quickstart on Sun Jan 4 15:02:58 2026.
|
||||||
|
You can adapt this file completely to your liking, but it should at least
|
||||||
|
contain the root `toctree` directive.
|
||||||
|
|
||||||
|
Documentation for unisync
|
||||||
|
=========================
|
||||||
|
|
||||||
|
Unisync is a data synchronising tool built around `unison`_ and expending on it.
|
||||||
|
|
||||||
|
Unisync tries to solve two problems that are often solved separately but never together :
|
||||||
|
|
||||||
|
* Keeping your data synchronised between multiple machines (through a central server), examples of this are rsync and of course unison.
|
||||||
|
* Being able to access and edit files stored on your server without having to download them, the gui interface of nextcloud for example.
|
||||||
|
* And of course I want to be able to do all of this without ever having to leave my terminal.
|
||||||
|
|
||||||
|
Unisync solves this problem by placing each file on your local machine but with only the selected files and folders being physically present on your drive,
|
||||||
|
the others are replaced by symbolic links pointing to a directory that is mounted from your server.
|
||||||
|
|
||||||
|
See this
|
||||||
|
:ref:`example_how_it_works`.
|
||||||
|
|
||||||
|
.. _unison: https://github.com/bcpierce00/unison
|
||||||
|
|
||||||
|
.. toctree::
|
||||||
|
:maxdepth: 2
|
||||||
|
:caption: Contents:
|
||||||
|
|
||||||
|
example
|
||||||
|
modules
|
||||||
|
|
||||||
7
docs/source/modules.rst
Normal file
7
docs/source/modules.rst
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
unisync
|
||||||
|
=======
|
||||||
|
|
||||||
|
.. toctree::
|
||||||
|
:maxdepth: 4
|
||||||
|
|
||||||
|
unisync
|
||||||
77
docs/source/unisync.rst
Normal file
77
docs/source/unisync.rst
Normal file
@@ -0,0 +1,77 @@
|
|||||||
|
unisync package
|
||||||
|
===============
|
||||||
|
|
||||||
|
Submodules
|
||||||
|
----------
|
||||||
|
|
||||||
|
unisync.argparser module
|
||||||
|
------------------------
|
||||||
|
|
||||||
|
.. automodule:: unisync.argparser
|
||||||
|
:members:
|
||||||
|
:show-inheritance:
|
||||||
|
:undoc-members:
|
||||||
|
|
||||||
|
unisync.config module
|
||||||
|
---------------------
|
||||||
|
|
||||||
|
.. automodule:: unisync.config
|
||||||
|
:members:
|
||||||
|
:show-inheritance:
|
||||||
|
:undoc-members:
|
||||||
|
|
||||||
|
unisync.defaults module
|
||||||
|
-----------------------
|
||||||
|
|
||||||
|
.. automodule:: unisync.defaults
|
||||||
|
:members:
|
||||||
|
:show-inheritance:
|
||||||
|
:undoc-members:
|
||||||
|
|
||||||
|
unisync.errors module
|
||||||
|
---------------------
|
||||||
|
|
||||||
|
.. automodule:: unisync.errors
|
||||||
|
:members:
|
||||||
|
:show-inheritance:
|
||||||
|
:undoc-members:
|
||||||
|
|
||||||
|
unisync.main module
|
||||||
|
-------------------
|
||||||
|
|
||||||
|
.. automodule:: unisync.main
|
||||||
|
:members:
|
||||||
|
:show-inheritance:
|
||||||
|
:undoc-members:
|
||||||
|
|
||||||
|
unisync.paths module
|
||||||
|
--------------------
|
||||||
|
|
||||||
|
.. automodule:: unisync.paths
|
||||||
|
:members:
|
||||||
|
:show-inheritance:
|
||||||
|
:undoc-members:
|
||||||
|
|
||||||
|
unisync.runners module
|
||||||
|
----------------------
|
||||||
|
|
||||||
|
.. automodule:: unisync.runners
|
||||||
|
:members:
|
||||||
|
:show-inheritance:
|
||||||
|
:undoc-members:
|
||||||
|
|
||||||
|
unisync.synchroniser module
|
||||||
|
---------------------------
|
||||||
|
|
||||||
|
.. automodule:: unisync.synchroniser
|
||||||
|
:members:
|
||||||
|
:show-inheritance:
|
||||||
|
:undoc-members:
|
||||||
|
|
||||||
|
Module contents
|
||||||
|
---------------
|
||||||
|
|
||||||
|
.. automodule:: unisync
|
||||||
|
:members:
|
||||||
|
:show-inheritance:
|
||||||
|
:undoc-members:
|
||||||
@@ -10,6 +10,9 @@ requires-python = ">=3.13"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[project.scripts]
|
||||||
|
unisync = "unisync.main:main"
|
||||||
|
|
||||||
[tool.poetry]
|
[tool.poetry]
|
||||||
packages = [{include = "unisync", from = "src"}]
|
packages = [{include = "unisync", from = "src"}]
|
||||||
|
|
||||||
@@ -17,3 +20,12 @@ packages = [{include = "unisync", from = "src"}]
|
|||||||
[build-system]
|
[build-system]
|
||||||
requires = ["poetry-core>=2.0.0,<3.0.0"]
|
requires = ["poetry-core>=2.0.0,<3.0.0"]
|
||||||
build-backend = "poetry.core.masonry.api"
|
build-backend = "poetry.core.masonry.api"
|
||||||
|
|
||||||
|
[dependency-groups]
|
||||||
|
docs = [
|
||||||
|
"sphinx (>=9.1.0,<10.0.0)",
|
||||||
|
"sphinx-rtd-theme (>=3.0.2,<4.0.0)",
|
||||||
|
]
|
||||||
|
dev = [
|
||||||
|
"pylint (>=4.0.4,<5.0.0)"
|
||||||
|
]
|
||||||
|
|||||||
@@ -1,22 +1,36 @@
|
|||||||
# Copyright (C) 2025 Paul Retourné
|
# Copyright (C) 2025-2026 Paul Retourné
|
||||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
|
|
||||||
def create_argparser() -> argparse.ArgumentParser:
|
def create_argparser(sync_function, add_function, mount_function) -> argparse.ArgumentParser:
|
||||||
|
"""
|
||||||
|
Creates an argument parser to parse the command line arguments.
|
||||||
|
We use subparsers and set a default function for each to perform the correct action.
|
||||||
|
"""
|
||||||
parser = argparse.ArgumentParser(
|
parser = argparse.ArgumentParser(
|
||||||
prog='unisync',
|
prog='unisync',
|
||||||
description='File synchronisation application',
|
description='File synchronisation application',
|
||||||
epilog="""
|
epilog="Copyright © 2025-2026 Paul Retourné.\n"
|
||||||
Copyright © 2025 Paul Retourné.
|
"License GPLv3+: GNU GPL version 3 or later <https://gnu.org/licenses/gpl.html>.",
|
||||||
License GPLv3+: GNU GPL version 3 or later <https://gnu.org/licenses/gpl.html>."""
|
formatter_class=argparse.RawDescriptionHelpFormatter
|
||||||
)
|
)
|
||||||
parser.add_argument("local", nargs="?")
|
parser.add_argument("local", nargs="?")
|
||||||
parser.add_argument("remote", nargs="?")
|
parser.add_argument("remote", nargs="?")
|
||||||
|
parser.set_defaults(func=sync_function)
|
||||||
|
|
||||||
remote_addr_group = parser.add_mutually_exclusive_group()
|
remote_addr_group = parser.add_mutually_exclusive_group()
|
||||||
remote_addr_group.add_argument("--ip")
|
remote_addr_group.add_argument("--ip")
|
||||||
remote_addr_group.add_argument("--hostname")
|
remote_addr_group.add_argument("--hostname")
|
||||||
|
|
||||||
parser.add_argument("--config", help="Path to the configuration file", metavar="path_to_config")
|
parser.add_argument("--config", help="Path to the configuration file", metavar="path_to_config")
|
||||||
|
|
||||||
|
subparsers = parser.add_subparsers(help='Actions other than synchronisation')
|
||||||
|
|
||||||
|
parser_add = subparsers.add_parser('add', help='Add files to be synchronised.')
|
||||||
|
parser_add.set_defaults(func=add_function)
|
||||||
|
|
||||||
|
parser_mount = subparsers.add_parser('mount', help='Mount the remote.')
|
||||||
|
parser_mount.set_defaults(func=mount_function)
|
||||||
|
|
||||||
return parser
|
return parser
|
||||||
|
|||||||
@@ -1,38 +1,84 @@
|
|||||||
# Copyright (C) 2025 Paul Retourné
|
# Copyright (C) 2025-2026 Paul Retourné
|
||||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
from dataclasses import dataclass, field
|
from configparser import UNNAMED_SECTION
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from pathlib import Path
|
||||||
import ipaddress
|
import ipaddress
|
||||||
import configparser
|
import configparser
|
||||||
from configparser import UNNAMED_SECTION
|
|
||||||
|
from unisync.defaults import *
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class ServerConfig:
|
class ServerConfig:
|
||||||
|
"""
|
||||||
|
Dataclass keeping the config for connecting to the server
|
||||||
|
"""
|
||||||
user: str
|
user: str
|
||||||
sshargs: list[str] | None = field(default_factory=list)
|
sshargs: str
|
||||||
hostname: str = ""
|
hostname: str
|
||||||
ip: str = ""
|
ip: str
|
||||||
port: int = 22
|
port: int
|
||||||
|
|
||||||
def __post_init__(self):
|
def __post_init__(self):
|
||||||
|
"""
|
||||||
|
Make sure a remote is provided and the ip address is valid
|
||||||
|
"""
|
||||||
if self.ip == "" and self.hostname == "":
|
if self.ip == "" and self.hostname == "":
|
||||||
raise ValueError("A remote must be provided (ip or hostname)")
|
raise ValueError("A remote must be provided (ip or hostname)")
|
||||||
|
|
||||||
if self.ip != "":
|
if self.ip != "":
|
||||||
try:
|
try:
|
||||||
ipaddress.ip_address(self.ip)
|
ipaddress.ip_address(self.ip)
|
||||||
except ValueError:
|
except ValueError as e:
|
||||||
raise ValueError("The provided ip address is invalid")
|
raise ValueError("The provided ip address is invalid") from e
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class RootsConfig:
|
class RootsConfig:
|
||||||
|
"""
|
||||||
|
Dataclass keeping the paths to the roots to synchronise
|
||||||
|
"""
|
||||||
local: str
|
local: str
|
||||||
remote: str
|
remote: str
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class UnisonConfig:
|
||||||
|
"""
|
||||||
|
Dataclass keeping unison specific configurations
|
||||||
|
"""
|
||||||
|
bools: list
|
||||||
|
values: dict
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class BackupConfig:
|
||||||
|
"""
|
||||||
|
Configuration options relative to backing up the files.
|
||||||
|
"""
|
||||||
|
enabled: bool
|
||||||
|
selection: str
|
||||||
|
location: str
|
||||||
|
max_backups: int
|
||||||
|
backupsuffix: str
|
||||||
|
backupprefix: str
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class OtherConfig:
|
||||||
|
"""
|
||||||
|
Dataclass keeping miscellanous configuration options
|
||||||
|
"""
|
||||||
|
cache_dir_path: Path
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class Config:
|
class Config:
|
||||||
|
"""
|
||||||
|
Main dataclass for the configurations
|
||||||
|
"""
|
||||||
server: ServerConfig
|
server: ServerConfig
|
||||||
roots: RootsConfig
|
roots: RootsConfig
|
||||||
|
unison: UnisonConfig
|
||||||
|
backup: BackupConfig
|
||||||
|
other: OtherConfig
|
||||||
|
|
||||||
|
|
||||||
def load_config(config_path:str) -> Config:
|
def load_config(config_path:str) -> Config:
|
||||||
"""
|
"""
|
||||||
@@ -42,22 +88,48 @@ def load_config(config_path:str) -> Config:
|
|||||||
Returns:
|
Returns:
|
||||||
Config: A populated Config object containing the loaded config.
|
Config: A populated Config object containing the loaded config.
|
||||||
"""
|
"""
|
||||||
config = configparser.ConfigParser(allow_unnamed_section=True)
|
config = configparser.ConfigParser(allow_unnamed_section=True, allow_no_value=True)
|
||||||
config.read(config_path)
|
config.read(config_path)
|
||||||
|
|
||||||
# Check if sections are provided
|
# Check if sections are provided
|
||||||
server_section = "Server" if "Server" in config.sections() else UNNAMED_SECTION
|
server_section = "Server" if "Server" in config.sections() else UNNAMED_SECTION
|
||||||
roots_section = "Roots" if "Roots" in config.sections() else UNNAMED_SECTION
|
roots_section = "Roots" if "Roots" in config.sections() else UNNAMED_SECTION
|
||||||
|
backup_section = "Backup"
|
||||||
|
other_section = "Other" if "Other" in config.sections() else UNNAMED_SECTION
|
||||||
|
|
||||||
server_config = ServerConfig(
|
server_config = ServerConfig(
|
||||||
config.get(server_section, "user"),
|
config.get(server_section, "user"),
|
||||||
config.get(server_section, "sshargs", fallback=None),
|
config.get(server_section, "sshargs", fallback=DEFAULT_SERVER_SSHARGS),
|
||||||
config.get(server_section, "hostname", fallback=None),
|
config.get(server_section, "hostname", fallback=DEFAULT_SERVER_HOSTNAME),
|
||||||
config.get(server_section, "ip", fallback=None),
|
config.get(server_section, "ip", fallback=DEFAULT_SERVER_IP),
|
||||||
config.getint(server_section, "port", fallback=None)
|
config.getint(server_section, "port", fallback=DEFAULT_SERVER_PORT)
|
||||||
)
|
)
|
||||||
roots_config = RootsConfig(
|
roots_config = RootsConfig(
|
||||||
config.get(roots_section, "local"),
|
config.get(roots_section, "local", fallback=DEFAULT_ROOTS_LOCAL),
|
||||||
config.get(roots_section, "remote")
|
config.get(roots_section, "remote")
|
||||||
)
|
)
|
||||||
return Config(server_config, roots_config)
|
backup_config = BackupConfig(
|
||||||
|
config.getboolean(backup_section, "enabled", fallback=DEFAULT_BACKUP_ENABLED),
|
||||||
|
config.get(backup_section, "selection", fallback=DEFAULT_BACKUP_SELECTION),
|
||||||
|
config.get(backup_section, "loction", fallback=DEFAULT_BACKUP_LOC),
|
||||||
|
config.getint(backup_section, "max_backups", fallback=DEFAULT_BACKUP_MAX_BACKUPS),
|
||||||
|
config.get(backup_section, "backupsuffix", fallback=DEFAULT_BACKUP_BACKUPSUFFIX),
|
||||||
|
config.get(backup_section, "backupprefix", fallback=DEFAULT_BACKUP_BACKUPPREFIX)
|
||||||
|
)
|
||||||
|
other_config = OtherConfig(
|
||||||
|
Path(config.get(other_section, "cache_dir_path", fallback=DEFAULT_MISC_CACHE_DIR_PATH)).expanduser()
|
||||||
|
)
|
||||||
|
|
||||||
|
args_bool = []
|
||||||
|
args_val = {}
|
||||||
|
if "Unison" in config.sections():
|
||||||
|
for key, val in config.items("Unison"):
|
||||||
|
if key in config["DEFAULT"].keys():
|
||||||
|
continue
|
||||||
|
if val in ("", None):
|
||||||
|
args_bool.append(key)
|
||||||
|
else:
|
||||||
|
args_val[key] = val
|
||||||
|
unison_config = UnisonConfig(args_bool, args_val)
|
||||||
|
|
||||||
|
return Config(server_config, roots_config, unison_config, backup_config, other_config)
|
||||||
|
|||||||
25
src/unisync/defaults.py
Normal file
25
src/unisync/defaults.py
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
# Copyright (c) 2026 paul retourné
|
||||||
|
# spdx-license-identifier: gpl-3.0-or-later
|
||||||
|
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
# Commented out values are part of the config but are required so there is no defaults.
|
||||||
|
# This allows this file to be a list of all the config options.
|
||||||
|
|
||||||
|
# DEFAULT_SERVER_USER: str = ""
|
||||||
|
DEFAULT_SERVER_SSHARGS: str = ""
|
||||||
|
DEFAULT_SERVER_HOSTNAME: str = ""
|
||||||
|
DEFAULT_SERVER_IP: str = ""
|
||||||
|
DEFAULT_SERVER_PORT: int = 22
|
||||||
|
|
||||||
|
DEFAULT_ROOTS_LOCAL: str = str(Path("~/files").expanduser())
|
||||||
|
# DEFAULT_ROOTS_REMOTE: str = ""
|
||||||
|
|
||||||
|
DEFAULT_MISC_CACHE_DIR_PATH: str = "~/.unisync"
|
||||||
|
|
||||||
|
DEFAULT_BACKUP_ENABLED: bool = False
|
||||||
|
DEFAULT_BACKUP_SELECTION: str = ""
|
||||||
|
DEFAULT_BACKUP_LOC: str = "local"
|
||||||
|
DEFAULT_BACKUP_MAX_BACKUPS: int = 2
|
||||||
|
DEFAULT_BACKUP_BACKUPSUFFIX: str = ".$VERSION.bak"
|
||||||
|
DEFAULT_BACKUP_BACKUPPREFIX: str = ".unison_backups/"
|
||||||
21
src/unisync/errors.py
Normal file
21
src/unisync/errors.py
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
# Copyright (C) 2025-2026 Paul Retourné
|
||||||
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
|
from typing import NoReturn
|
||||||
|
import sys
|
||||||
|
|
||||||
|
class RemoteMountedError(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class InvalidMountError(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class UnknownSSHError(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class FatalSyncError(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def unisync_exit_fatal(reason:str) -> NoReturn:
|
||||||
|
print(reason)
|
||||||
|
sys.exit(1)
|
||||||
@@ -1,22 +1,49 @@
|
|||||||
# Copyright (C) 2025 Paul Retourné
|
# Copyright (C) 2025-2026 Paul Retourné
|
||||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
import os
|
from pathlib import Path
|
||||||
from argparser import create_argparser
|
|
||||||
from config import RootsConfig, ServerConfig, Config, load_config
|
from unisync.argparser import create_argparser
|
||||||
|
from unisync.errors import UnknownSSHError, unisync_exit_fatal
|
||||||
|
from unisync.runners import unisync_sync, unisync_add, unisync_mount
|
||||||
|
from unisync.config import load_config
|
||||||
|
from unisync.synchroniser import Synchroniser
|
||||||
|
from unisync.paths import PathsManager
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
parser = create_argparser()
|
parser = create_argparser(unisync_sync, unisync_add, unisync_mount)
|
||||||
base_namespace = parser.parse_args()
|
cli_args = parser.parse_args()
|
||||||
|
|
||||||
config_path = os.path.expanduser("~/.config/unisync/config.ini")
|
config_path: Path = Path("~/.config/unisync/config.ini").expanduser()
|
||||||
if base_namespace.config != None and os.path.isfile(base_namespace.config):
|
# Check if --config is set
|
||||||
config = load_config(base_namespace.config)
|
if cli_args.config is not None and Path(cli_args.config).is_file():
|
||||||
elif os.path.isfile(config_path):
|
config = load_config(cli_args.config)
|
||||||
config = load_config(config_path)
|
elif config_path.is_file():
|
||||||
|
config = load_config(str(config_path))
|
||||||
else:
|
else:
|
||||||
# TODO make the command line arguments work and override the config options
|
# TODO replace the next line with something to do if no config file is found
|
||||||
pass
|
config = load_config(str(config_path))
|
||||||
|
|
||||||
|
# TODO: make the command line arguments work and override the config options
|
||||||
|
|
||||||
|
synchroniser = Synchroniser(
|
||||||
|
config.roots.remote,
|
||||||
|
config.roots.local,
|
||||||
|
config.server.user,
|
||||||
|
config.server.ip if config.server.ip != "" else config.server.hostname,
|
||||||
|
config.server.port,
|
||||||
|
config.unison.bools,
|
||||||
|
config.unison.values,
|
||||||
|
backup=config.backup
|
||||||
|
)
|
||||||
|
|
||||||
|
paths_manager = PathsManager(Path(config.roots.local), config.other.cache_dir_path)
|
||||||
|
|
||||||
|
try:
|
||||||
|
cli_args.func(synchroniser, paths_manager, config)
|
||||||
|
except UnknownSSHError:
|
||||||
|
unisync_exit_fatal("Connection failed quitting")
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
main()
|
main()
|
||||||
|
|||||||
113
src/unisync/paths.py
Normal file
113
src/unisync/paths.py
Normal file
@@ -0,0 +1,113 @@
|
|||||||
|
# Copyright (C) 2025-2026 Paul Retourné
|
||||||
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
|
|
||||||
|
import os.path
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
class PathsManager:
|
||||||
|
|
||||||
|
def __init__(self, local_dir:Path, cache_dir:Path):
|
||||||
|
"""
|
||||||
|
Creates a PathsManager with the necessary data
|
||||||
|
Args:
|
||||||
|
local_dir: Path to the top directory of the synchronisation
|
||||||
|
cache_dir: Path to the cache directory that contains the paths file
|
||||||
|
"""
|
||||||
|
if not local_dir.is_dir():
|
||||||
|
raise ValueError("Invalid local directory")
|
||||||
|
self.local_dir = local_dir
|
||||||
|
|
||||||
|
if not cache_dir.is_dir():
|
||||||
|
raise ValueError("Invalid cache directory")
|
||||||
|
self.cache_dir = cache_dir
|
||||||
|
|
||||||
|
self.paths_file:Path = self.cache_dir / "paths"
|
||||||
|
if not self.paths_file.is_file():
|
||||||
|
raise ValueError("The paths file does not exist")
|
||||||
|
|
||||||
|
|
||||||
|
def user_select_files(self, choice_timeout:int=120) -> list[str]:
|
||||||
|
"""
|
||||||
|
Make the user select files in the top directory.
|
||||||
|
Currently uses nnn for the selection.
|
||||||
|
The goal is to replace it in order to avoid using external programs.
|
||||||
|
Args:
|
||||||
|
choice_timeout: Time given to make choices in nnn
|
||||||
|
Returns:
|
||||||
|
list[str]: The list of paths that was selected relative to the top directory
|
||||||
|
Raise:
|
||||||
|
TimeoutExpired: User took too long to choose
|
||||||
|
CalledProcessError: An unknown error occured during the selection
|
||||||
|
"""
|
||||||
|
command = [
|
||||||
|
"/usr/bin/nnn",
|
||||||
|
"-H",
|
||||||
|
"-p", "-",
|
||||||
|
self.local_dir
|
||||||
|
]
|
||||||
|
nnn_process:subprocess.Popen = subprocess.Popen(command, stdout=subprocess.PIPE)
|
||||||
|
try:
|
||||||
|
ret_code = nnn_process.wait(timeout=choice_timeout)
|
||||||
|
except subprocess.TimeoutExpired as e:
|
||||||
|
print("Choice timeout expired", file=sys.stderr)
|
||||||
|
raise e
|
||||||
|
|
||||||
|
if ret_code != 0:
|
||||||
|
print("File selection failed", file=sys.stderr)
|
||||||
|
raise subprocess.CalledProcessError(1, "File selection failed")
|
||||||
|
|
||||||
|
paths_list:list[str] = []
|
||||||
|
while (next_path := nnn_process.stdout.readline()) != b'':
|
||||||
|
next_path = next_path.decode().strip()
|
||||||
|
# Make the path relative to the top directory
|
||||||
|
next_path = next_path[len(str(self.local_dir)):].lstrip("/")
|
||||||
|
paths_list.append(next_path)
|
||||||
|
return paths_list
|
||||||
|
|
||||||
|
def add_files_to_sync(self):
|
||||||
|
while True:
|
||||||
|
try:
|
||||||
|
paths = self.user_select_files()
|
||||||
|
break
|
||||||
|
except subprocess.TimeoutExpired:
|
||||||
|
if input("Timeout expired do you want to retry (y/n): ") != "y":
|
||||||
|
raise
|
||||||
|
self.write_new_paths(paths)
|
||||||
|
|
||||||
|
def get_paths_to_sync(self) -> list[str]:
|
||||||
|
"""
|
||||||
|
Return the paths to synchronise as list.
|
||||||
|
"""
|
||||||
|
paths:list[str] = self.paths_file.read_text().split("\n")
|
||||||
|
if paths[-1] == "":
|
||||||
|
paths.pop()
|
||||||
|
return paths
|
||||||
|
|
||||||
|
def write_new_paths(self, paths:list[str]):
|
||||||
|
"""
|
||||||
|
Writes a list of new paths to the file
|
||||||
|
"""
|
||||||
|
current_paths = self.get_paths_to_sync()
|
||||||
|
paths_to_add = []
|
||||||
|
# Check if one of the parent is already being synchronised
|
||||||
|
# If so there is no need to add the child path
|
||||||
|
for new_path in paths:
|
||||||
|
is_contained = False
|
||||||
|
for existing in current_paths:
|
||||||
|
common = os.path.commonpath([new_path, existing])
|
||||||
|
if common == existing:
|
||||||
|
is_contained = True
|
||||||
|
break
|
||||||
|
|
||||||
|
if not is_contained and new_path not in paths_to_add:
|
||||||
|
paths_to_add.append(new_path)
|
||||||
|
|
||||||
|
with self.paths_file.open("a") as f:
|
||||||
|
for p in paths_to_add:
|
||||||
|
f.write(p + "\n")
|
||||||
|
|
||||||
|
|
||||||
43
src/unisync/runners.py
Normal file
43
src/unisync/runners.py
Normal file
@@ -0,0 +1,43 @@
|
|||||||
|
# Copyright (C) 2025-2026 Paul Retourné
|
||||||
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
|
from unisync.synchroniser import Synchroniser
|
||||||
|
from unisync.paths import PathsManager
|
||||||
|
from unisync.config import Config
|
||||||
|
|
||||||
|
|
||||||
|
def unisync_sync(synchroniser:Synchroniser, paths_manager:PathsManager, config: Config):
|
||||||
|
del config # The function signature must be the same for all runners
|
||||||
|
|
||||||
|
synchroniser.create_ssh_master_connection()
|
||||||
|
print("Connected to the remote.")
|
||||||
|
|
||||||
|
synchroniser.sync_files(paths_manager.get_paths_to_sync())
|
||||||
|
synchroniser.sync_links(paths_manager.get_paths_to_sync())
|
||||||
|
|
||||||
|
# TODO check the config options and do or don't do the following
|
||||||
|
synchroniser.update_links()
|
||||||
|
#synchroniser.mount_remote_dir()
|
||||||
|
|
||||||
|
synchroniser.close_ssh_master_connection()
|
||||||
|
|
||||||
|
|
||||||
|
def unisync_add(synchroniser:Synchroniser, paths_manager:PathsManager, config: Config):
|
||||||
|
del config # The function signature must be the same for all runners
|
||||||
|
|
||||||
|
synchroniser.create_ssh_master_connection()
|
||||||
|
print("Connected to the remote.")
|
||||||
|
|
||||||
|
# TODO config or cli to skip this first sync
|
||||||
|
synchroniser.sync_files(paths_manager.get_paths_to_sync())
|
||||||
|
|
||||||
|
paths_manager.add_files_to_sync()
|
||||||
|
synchroniser.sync_files(paths_manager.get_paths_to_sync(), force=True)
|
||||||
|
|
||||||
|
synchroniser.close_ssh_master_connection()
|
||||||
|
|
||||||
|
|
||||||
|
def unisync_mount(synchroniser:Synchroniser, paths_manager:PathsManager, config: Config):
|
||||||
|
del paths_manager # The function signature must be the same for all runners
|
||||||
|
del config # The function signature must be the same for all runners
|
||||||
|
synchroniser.mount_remote_dir()
|
||||||
@@ -1,18 +1,60 @@
|
|||||||
# Copyright (C) 2025 Paul Retourné
|
# Copyright (C) 2025-2026 Paul Retourné
|
||||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
|
"""Exports the Synchroniser class.
|
||||||
|
|
||||||
|
This class is used to perform all the actions that require a connection to
|
||||||
|
the remote.
|
||||||
|
"""
|
||||||
|
|
||||||
import subprocess
|
import subprocess
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import cast
|
||||||
|
|
||||||
|
from unisync.errors import RemoteMountedError, InvalidMountError, UnknownSSHError, FatalSyncError
|
||||||
|
from unisync.config import BackupConfig
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
class Synchroniser:
|
class Synchroniser:
|
||||||
|
"""Synchroniser used to synchronise with a server.
|
||||||
|
|
||||||
def __init__(self, remote:str, local:str, user:str, ip:str,
|
It is used to perform every action needing a connection to the remote.
|
||||||
port:int=22, args_bool:list=[], args_value:dict={}, ssh_settings:dict={}):
|
Create an ssh connection.
|
||||||
|
Perform the various synchronisation steps (files, links).
|
||||||
|
Update the links on the remote.
|
||||||
|
Mount the remote directory.
|
||||||
|
Close the ssh connection.
|
||||||
|
|
||||||
|
Attributes:
|
||||||
|
remote: The directory to synchronise to on the remote.
|
||||||
|
local: The directory to synchronise from locally.
|
||||||
|
user: The user on the remote server.
|
||||||
|
ip: The ip of the remote server.
|
||||||
|
port: The ssh port on the remote.
|
||||||
|
args_bool:
|
||||||
|
A list of boolean arguments for unison.
|
||||||
|
They will be passed directly to unison when calling it.
|
||||||
|
For example : auto will be passed as -auto
|
||||||
|
args_value:
|
||||||
|
Same as args_bool but for key value arguments.
|
||||||
|
Will be passed to unison as "-key value".
|
||||||
|
ssh_settings:
|
||||||
|
Settings to pass to the underlying ssh connection.
|
||||||
|
Currently unused.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, remote:str, local:str, user:str, ip:str, port:int=22,
|
||||||
|
args_bool:list=[], args_value:dict={}, ssh_settings:dict={},
|
||||||
|
backup:BackupConfig | None = None
|
||||||
|
):
|
||||||
|
"""Initialises an instance of Synchroniser.
|
||||||
|
"""
|
||||||
self.remote_dir:str = remote
|
self.remote_dir:str = remote
|
||||||
self.local:str = local
|
self.local:str = local
|
||||||
self.args_bool:list[str] = args_bool
|
self.args_bool:list[str] = args_bool
|
||||||
@@ -21,16 +63,56 @@ class Synchroniser:
|
|||||||
self.remote_user:str = user
|
self.remote_user:str = user
|
||||||
self.remote_ip:str = ip
|
self.remote_ip:str = ip
|
||||||
self.remote_port:int = port
|
self.remote_port:int = port
|
||||||
|
self.files_extra:list = list()
|
||||||
|
self.links_extra:list = list()
|
||||||
|
|
||||||
def create_ssh_master_connection(self, control_path:str="~/.ssh/control_%C", connection_timeout:int=60) -> int:
|
if(backup != None and backup.enabled):
|
||||||
"""
|
backup = cast(BackupConfig, backup)
|
||||||
Creates an ssh master connection so the user only has to authenticate once to the remote server.
|
self.files_extra.append("-backup")
|
||||||
The subsequent connections will be made through this master connection which speeds up connecting.
|
if(backup.selection != ""):
|
||||||
@control_path: Set the location of the ssh control socket
|
self.files_extra.append(backup.selection)
|
||||||
@connection_timeout:
|
else:
|
||||||
|
self.files_extra.append("Name *")
|
||||||
|
|
||||||
|
self.files_extra.extend([
|
||||||
|
"-backuploc",
|
||||||
|
backup.location,
|
||||||
|
"-maxbackups",
|
||||||
|
str(backup.max_backups),
|
||||||
|
"-backupsuffix",
|
||||||
|
backup.backupsuffix,
|
||||||
|
"-backupprefix",
|
||||||
|
backup.backupprefix,
|
||||||
|
"-ignore",
|
||||||
|
f"Name {backup.backupprefix[:-1]}"
|
||||||
|
])
|
||||||
|
|
||||||
|
self.links_extra.extend([
|
||||||
|
"-ignore",
|
||||||
|
f"Name {backup.backupprefix[:-1]}"
|
||||||
|
])
|
||||||
|
|
||||||
|
def create_ssh_master_connection(self, control_path:str="~/.ssh/control_%C", connection_timeout:int=60) -> None:
|
||||||
|
"""Creates an ssh master connection.
|
||||||
|
|
||||||
|
It is used so the user only has to authenticate once to the remote server.
|
||||||
|
The subsequent connections will be made through this master connection
|
||||||
|
which speeds up connnection.
|
||||||
|
The users only have to enter their password once per synchronisation.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
control_path: Set the location of the ssh control socket
|
||||||
|
connection_timeout:
|
||||||
Time given to the user to authenticate to the remote server.
|
Time given to the user to authenticate to the remote server.
|
||||||
On slow connections one might want to increase this.
|
On slow connections one might want to increase this.
|
||||||
Returns 0 on success.
|
|
||||||
|
Raises:
|
||||||
|
subprocess.TimeoutExpired:
|
||||||
|
The user didn't finish loging in in time.
|
||||||
|
KeyboardInterrupt:
|
||||||
|
The user interrupted the process.
|
||||||
|
UnknownSSHError:
|
||||||
|
An error occured during the connection.
|
||||||
"""
|
"""
|
||||||
self.control_path = os.path.expanduser(control_path)
|
self.control_path = os.path.expanduser(control_path)
|
||||||
command = [
|
command = [
|
||||||
@@ -42,23 +124,25 @@ class Synchroniser:
|
|||||||
"-p", str(self.remote_port)
|
"-p", str(self.remote_port)
|
||||||
]
|
]
|
||||||
master_ssh = subprocess.Popen(command)
|
master_ssh = subprocess.Popen(command)
|
||||||
|
# TODO: Raise an exception instead of changing the return value
|
||||||
try:
|
try:
|
||||||
ret_code = master_ssh.wait(timeout=connection_timeout)
|
ret_code = master_ssh.wait(timeout=connection_timeout)
|
||||||
except subprocess.TimeoutExpired:
|
except subprocess.TimeoutExpired as e:
|
||||||
print("Time to login expired", file=sys.stderr)
|
print("Time to login expired", file=sys.stderr)
|
||||||
return 1
|
raise e
|
||||||
except KeyboardInterrupt:
|
except KeyboardInterrupt as e:
|
||||||
return 2
|
raise e
|
||||||
|
|
||||||
if ret_code != 0:
|
if ret_code != 0:
|
||||||
print("Login to remote failed", file=sys.stderr)
|
print("Login to remote failed", file=sys.stderr)
|
||||||
return ret_code
|
raise UnknownSSHError
|
||||||
return 0
|
|
||||||
|
|
||||||
|
|
||||||
def close_ssh_master_connection(self) -> int:
|
def close_ssh_master_connection(self) -> int:
|
||||||
"""
|
"""Closes the ssh master connection.
|
||||||
Close the ssh master connection.
|
|
||||||
|
Returns:
|
||||||
|
The return code of the ssh call.
|
||||||
"""
|
"""
|
||||||
command = [
|
command = [
|
||||||
"/usr/bin/ssh",
|
"/usr/bin/ssh",
|
||||||
@@ -70,40 +154,68 @@ class Synchroniser:
|
|||||||
close = subprocess.Popen(command)
|
close = subprocess.Popen(command)
|
||||||
return close.wait()
|
return close.wait()
|
||||||
|
|
||||||
def sync_files(self, paths:list, force:bool=False) -> int:
|
def sync_files(self, paths:list, force:bool=False) -> None:
|
||||||
|
"""Synchronises the files.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
paths: List of paths to synchronise.
|
||||||
|
force: Force the changes from remote to local.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
FatalSyncError: A fatal error occured during the synchronisation.
|
||||||
"""
|
"""
|
||||||
Synchronises the files.
|
|
||||||
"""
|
self.sync(
|
||||||
return self.sync(
|
|
||||||
f"ssh://{self.remote_user}@{self.remote_ip}/{self.remote_dir}/.data",
|
f"ssh://{self.remote_user}@{self.remote_ip}/{self.remote_dir}/.data",
|
||||||
self.local,
|
self.local,
|
||||||
paths=paths,
|
paths=paths,
|
||||||
force=force
|
force=force,
|
||||||
|
other=self.files_extra
|
||||||
)
|
)
|
||||||
|
|
||||||
def sync_links(self, ignore:list) -> int:
|
def sync_links(self, ignore:list) -> None:
|
||||||
|
"""Synchronises the links, they must exist already.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
ignore: List of paths to ignore.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
FatalSyncError: A fatal error occured during the synchronisation.
|
||||||
"""
|
"""
|
||||||
Synchronises the links, they must exist already.
|
self.sync(
|
||||||
"""
|
|
||||||
return self.sync(
|
|
||||||
f"ssh://{self.remote_user}@{self.remote_ip}/{self.remote_dir}/links",
|
f"ssh://{self.remote_user}@{self.remote_ip}/{self.remote_dir}/links",
|
||||||
self.local,
|
self.local,
|
||||||
ignore=ignore
|
ignore=ignore,
|
||||||
|
other=self.links_extra
|
||||||
)
|
)
|
||||||
|
|
||||||
def sync(self, remote_root:str, local_root:str,
|
def sync(self, remote_root:str, local_root:str,
|
||||||
paths:list=[], ignore:list=[], force:bool=False) -> int:
|
paths:list=[], ignore:list=[], force:bool=False,
|
||||||
"""
|
other:list=[]
|
||||||
Perform the synchronisation by calling unison.
|
) -> None:
|
||||||
@remote_root: The remote root, must be a full root usable by unison.
|
"""Performs the synchronisation by calling unison.
|
||||||
@local_root: The local root, must be a full root usable by unison.
|
|
||||||
@paths: List of paths to synchronise
|
Args:
|
||||||
@ignore: List of paths to ignore
|
remote_root: The remote root, must be a full root usable by unison.
|
||||||
|
local_root: The local root, must be a full root usable by unison.
|
||||||
|
paths: List of paths to synchronise
|
||||||
|
ignore: List of paths to ignore
|
||||||
The paths and everything under them will be ignored.
|
The paths and everything under them will be ignored.
|
||||||
If you need to ignore some specific files use the arguments.
|
If you need to ignore some specific files use the arguments.
|
||||||
@force: Force all changes from remote to local.
|
force: Force all changes from remote to local.
|
||||||
Used mostly when replacing a link by the file.
|
Used mostly when replacing a link by the file.
|
||||||
Returns: the unison return code see section 6.11 of the documentation
|
other:
|
||||||
|
Other arguments to add to unison.
|
||||||
|
These arguments will only be used for this sync which is not
|
||||||
|
the case for the ones in self.args_bool and self.args_value.
|
||||||
|
They will be added to the command as is no - in front.
|
||||||
|
For exemple backups are implemented using this argument.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
FatalSyncError:
|
||||||
|
If unison returns 3 it means either a fatal error occured or the synchronisation
|
||||||
|
was interrupted.
|
||||||
|
If this happens propagate the error to unisync.
|
||||||
"""
|
"""
|
||||||
command = [ "/usr/bin/unison", "-root", remote_root, "-root", local_root ]
|
command = [ "/usr/bin/unison", "-root", remote_root, "-root", local_root ]
|
||||||
for arg in self.args_bool:
|
for arg in self.args_bool:
|
||||||
@@ -113,6 +225,7 @@ class Synchroniser:
|
|||||||
command.append(value)
|
command.append(value)
|
||||||
|
|
||||||
sshargs = f"-p {self.remote_port} "
|
sshargs = f"-p {self.remote_port} "
|
||||||
|
sshargs += f"-S {self.control_path} "
|
||||||
for arg, value in self.ssh_settings.items():
|
for arg, value in self.ssh_settings.items():
|
||||||
sshargs += arg + " " + value + " "
|
sshargs += arg + " " + value + " "
|
||||||
command.append("-sshargs")
|
command.append("-sshargs")
|
||||||
@@ -127,12 +240,76 @@ class Synchroniser:
|
|||||||
command.append(f"BelowPath {path}")
|
command.append(f"BelowPath {path}")
|
||||||
|
|
||||||
if force:
|
if force:
|
||||||
command.append("-force")
|
command.append("-prefer")
|
||||||
command.append(remote_root)
|
command.append(remote_root)
|
||||||
command.append("-batch")
|
command.append("-batch")
|
||||||
|
|
||||||
print(command)
|
for arg in other:
|
||||||
|
command.append(arg)
|
||||||
|
|
||||||
proc = subprocess.Popen(command)
|
proc = subprocess.Popen(command)
|
||||||
ret_code = proc.wait()
|
ret_code = proc.wait()
|
||||||
return ret_code
|
if ret_code == 3:
|
||||||
|
raise FatalSyncError("Synchronisation could not be completed")
|
||||||
|
|
||||||
|
def update_links(self, background:bool=True):
|
||||||
|
"""Updates the links on the remote.
|
||||||
|
|
||||||
|
First calls cleanlinks to remove deadlinks and empty directories.
|
||||||
|
Then calls lndir to create the new links.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
background: controls if the update is done in the background or waited for.
|
||||||
|
"""
|
||||||
|
|
||||||
|
link_update_script = (f"cd {self.remote_dir}/links && "
|
||||||
|
"cleanlinks && "
|
||||||
|
"lndir -withrevinfo -ignorelinks -silent ../.data .;")
|
||||||
|
|
||||||
|
if background:
|
||||||
|
link_background_wrapper = f"nohup bash -c \"{link_update_script}\" > /dev/null 2>&1 < /dev/null &"
|
||||||
|
else:
|
||||||
|
link_background_wrapper = link_update_script
|
||||||
|
|
||||||
|
command = [
|
||||||
|
"/usr/bin/ssh",
|
||||||
|
"-S", self.control_path,
|
||||||
|
f"{self.remote_user}@{self.remote_ip}",
|
||||||
|
"-p", str(self.remote_port),
|
||||||
|
link_background_wrapper
|
||||||
|
]
|
||||||
|
|
||||||
|
link_update_process = subprocess.run(command, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
|
||||||
|
|
||||||
|
if not background:
|
||||||
|
print("Starting links update.")
|
||||||
|
link_update_process.wait()
|
||||||
|
print("Done")
|
||||||
|
|
||||||
|
def mount_remote_dir(self):
|
||||||
|
"""Mounts the remote directory to make the local links work.
|
||||||
|
|
||||||
|
This is achieved using sshfs which may fail.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
RemoteMountedError: The .data directory is already a mount point.
|
||||||
|
InvalidMountError: .data is either not a directory or not empty.
|
||||||
|
subprocess.CalledProcessError: An error occured with sshfs.
|
||||||
|
"""
|
||||||
|
# Get the absolute path to the correct .data directory resolving symlinks
|
||||||
|
path_to_mount:Path = Path(f"{self.local}/../.data").resolve()
|
||||||
|
if path_to_mount.is_mount():
|
||||||
|
raise RemoteMountedError
|
||||||
|
# Check if it is an empty directory
|
||||||
|
if not path_to_mount.is_dir() or any(path_to_mount.iterdir()):
|
||||||
|
raise InvalidMountError
|
||||||
|
command = [
|
||||||
|
"/usr/bin/sshfs",
|
||||||
|
"-o", "ControlPath={self.control_path}",
|
||||||
|
"-o", "ServerAliveInterval=15",
|
||||||
|
"-p", str(self.remote_port),
|
||||||
|
f"{self.remote_user}@{self.remote_ip}:{self.remote_dir}/.data",
|
||||||
|
str(path_to_mount)
|
||||||
|
]
|
||||||
|
completed_process = subprocess.run(command)
|
||||||
|
completed_process.check_returncode()
|
||||||
|
|||||||
8
tests/runners.py
Normal file
8
tests/runners.py
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
# Copyright (C) 2026 Paul Retourné
|
||||||
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
|
from unisync.synchroniser import Synchroniser
|
||||||
|
from unisync.paths import PathsManager
|
||||||
|
|
||||||
|
def unisync_test(synchroniser:Synchroniser, paths_manager:PathsManager):
|
||||||
|
print("Testing")
|
||||||
39
tests/test.py
Normal file
39
tests/test.py
Normal file
@@ -0,0 +1,39 @@
|
|||||||
|
# Copyright (C) 2026 Paul Retourné
|
||||||
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
|
import os
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from unisync.argparser import create_argparser
|
||||||
|
from unisync.runners import unisync_sync, unisync_add, unisync_mount
|
||||||
|
from unisync.config import load_config
|
||||||
|
from unisync.synchroniser import Synchroniser
|
||||||
|
from unisync.paths import *
|
||||||
|
|
||||||
|
from runners import *
|
||||||
|
|
||||||
|
def main():
|
||||||
|
parser = create_argparser(unisync_test, unisync_add, unisync_mount)
|
||||||
|
cli_args = parser.parse_args()
|
||||||
|
|
||||||
|
config_path = os.path.expanduser("./config.ini")
|
||||||
|
config = load_config(config_path)
|
||||||
|
|
||||||
|
print(config)
|
||||||
|
|
||||||
|
synchroniser = Synchroniser(
|
||||||
|
config.roots.remote,
|
||||||
|
config.roots.local,
|
||||||
|
config.server.user,
|
||||||
|
config.server.ip if config.server.ip != "" else config.server.hostname,
|
||||||
|
config.server.port,
|
||||||
|
config.unison.bools,
|
||||||
|
config.unison.values
|
||||||
|
)
|
||||||
|
|
||||||
|
paths_manager = PathsManager(Path(config.roots.local), config.other.cache_dir_path)
|
||||||
|
|
||||||
|
cli_args.func(synchroniser, paths_manager)
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
Reference in New Issue
Block a user