Compare commits

...

17 Commits

Author SHA1 Message Date
c7f0a67f17 runners : add synchronisation to unisync_add
unisync_add was missing synchronisation steps that allow to
synchronise a new file (present on the server but as a link locally).
This adds the two necessary synchronisations, the first one so all the
files are up to date and are not overwritten by the second one which
forces all the changes from the remote to overwrite the local ones. This
has the effect to replace the link with the actual file.
2026-01-21 10:33:52 +01:00
23a661107e Merge branch 'backup' into dev
Adds the possibility to use the backup function of unison
2026-01-20 10:59:47 +01:00
cf508eb94c main : pass the backup options to the synchroniser 2026-01-20 10:48:44 +01:00
5ec43f9166 synchroniser : move backup options to init
Moves the backup options from sync_files to init.
The options are needed in links (to ignore the backup folders)
so it is way easier to have them as attributes.
To do this we move everything related to backup into __init__.
Also remove the option from the runner.
2026-01-20 10:33:13 +01:00
cf49ffb8e8 synchroniser : fix broken synchronisation
Append was used instead of extend which made a list inside of a list
instead of appending the content at the end fix that.
Convert backup.maxbackups to str as needed for subprocess.
2026-01-09 18:31:00 +01:00
c34d30a006 defaults : switch prefix and suffix
I mixed up the prefix and suffix, fix that
2026-01-08 14:19:03 +01:00
bb05990464 runners : pass config.backup to sync_files
After adding the backup infrastructure to config and synchroniser the
only thing left to do is pass the BackupConfig to sync_files.
2026-01-08 14:13:14 +01:00
aaa4a8f12c runners : delete unused arguments
Use the del keyword for unused functions arguments in runners.
All the runners must have the same signature however some do not use
all of the provided arguments so we delete them so the developement
tools do not generate warnings.
2026-01-08 14:06:36 +01:00
56da79f124 runners, main : pass the config to the runners
Some of the runners need the configuration to perform their task.
So pass it to all of them and edit the call in main to reflect this
change.
2026-01-08 14:04:05 +01:00
0e8d568fea main : Use pathlib instead of os.path
Removes every use of os.path and replaces it with the equivalent pathlib
method.
Allows to avoid importing os.
2026-01-08 13:46:01 +01:00
2ae9c38627 tests : add some simple code to run a few tests 2026-01-07 23:35:26 +01:00
667c418f09 synchroniser : add backup to sync_files
Adds the option to enable backup when synchronising.
This is done in sync_files by passing the appropriate arguments to sync.
For this we need to add an argument to sync_files as the backup
configuration options are needed.
The configuration options are imported from unisync.config.BackupConfig.
Also import typing.cast to be able to narrow down a type.
2026-01-07 23:32:24 +01:00
f618932584 synchroniser : add arbitrary synchronisation arguments
Add the option to give arbitrary arguments to the unison call.
These arguments must be passed as a list to sync and will be given to
unison as is.
This is a prerequisite for using the backup system of unison as the
arguments for backup will only be given when synchronising the files and
not the links.
2026-01-07 23:27:48 +01:00
f5e455fc79 config, defaults: add configuration for backups
Add configuration options for creating backups during the
synchronisation.
2026-01-05 17:17:41 +01:00
78a4d9df36 gitignore : ignore docs/build
The docs will be added later but to prevent the mess when switching
between branches ignore the build folder.
2026-01-04 19:22:04 +01:00
7dd7b57e1f synchroniser : Use a consistent docstring format.
Edit the docstrings so they use a consistent format.
Also add a short module docstring.
2026-01-04 14:31:16 +01:00
b10ed69d59 defaults : change type of MISC_CACHE_DIR_PATH to str
DEFAULT_MISC_CACHE_DIR_PATH was a Path but the fallbacks of config.get
in config.py will be converted to a string so make it a string instead
and do the conversion later
2026-01-04 12:22:21 +01:00
8 changed files with 243 additions and 54 deletions

1
.gitignore vendored
View File

@@ -1,2 +1,3 @@
poetry.lock poetry.lock
__pycache__ __pycache__
docs/build

View File

@@ -49,6 +49,18 @@ class UnisonConfig:
bools: list bools: list
values: dict values: dict
@dataclass
class BackupConfig:
"""
Configuration options relative to backing up the files.
"""
enabled: bool
selection: str
location: str
max_backups: int
backupsuffix: str
backupprefix: str
@dataclass @dataclass
class OtherConfig: class OtherConfig:
""" """
@@ -64,6 +76,7 @@ class Config:
server: ServerConfig server: ServerConfig
roots: RootsConfig roots: RootsConfig
unison: UnisonConfig unison: UnisonConfig
backup: BackupConfig
other: OtherConfig other: OtherConfig
@@ -81,6 +94,7 @@ def load_config(config_path:str) -> Config:
# Check if sections are provided # Check if sections are provided
server_section = "Server" if "Server" in config.sections() else UNNAMED_SECTION server_section = "Server" if "Server" in config.sections() else UNNAMED_SECTION
roots_section = "Roots" if "Roots" in config.sections() else UNNAMED_SECTION roots_section = "Roots" if "Roots" in config.sections() else UNNAMED_SECTION
backup_section = "Backup"
other_section = "Other" if "Other" in config.sections() else UNNAMED_SECTION other_section = "Other" if "Other" in config.sections() else UNNAMED_SECTION
server_config = ServerConfig( server_config = ServerConfig(
@@ -94,6 +108,14 @@ def load_config(config_path:str) -> Config:
config.get(roots_section, "local", fallback=DEFAULT_ROOTS_LOCAL), config.get(roots_section, "local", fallback=DEFAULT_ROOTS_LOCAL),
config.get(roots_section, "remote") config.get(roots_section, "remote")
) )
backup_config = BackupConfig(
config.getboolean(backup_section, "enabled", fallback=DEFAULT_BACKUP_ENABLED),
config.get(backup_section, "selection", fallback=DEFAULT_BACKUP_SELECTION),
config.get(backup_section, "loction", fallback=DEFAULT_BACKUP_LOC),
config.getint(backup_section, "max_backups", fallback=DEFAULT_BACKUP_MAX_BACKUPS),
config.get(backup_section, "backupsuffix", fallback=DEFAULT_BACKUP_BACKUPSUFFIX),
config.get(backup_section, "backupprefix", fallback=DEFAULT_BACKUP_BACKUPPREFIX)
)
other_config = OtherConfig( other_config = OtherConfig(
Path(config.get(other_section, "cache_dir_path", fallback=DEFAULT_MISC_CACHE_DIR_PATH)).expanduser() Path(config.get(other_section, "cache_dir_path", fallback=DEFAULT_MISC_CACHE_DIR_PATH)).expanduser()
) )
@@ -110,4 +132,4 @@ def load_config(config_path:str) -> Config:
args_val[key] = val args_val[key] = val
unison_config = UnisonConfig(args_bool, args_val) unison_config = UnisonConfig(args_bool, args_val)
return Config(server_config, roots_config, unison_config, other_config) return Config(server_config, roots_config, unison_config, backup_config, other_config)

View File

@@ -15,4 +15,11 @@ DEFAULT_SERVER_PORT: int = 22
DEFAULT_ROOTS_LOCAL: str = str(Path("~/files").expanduser()) DEFAULT_ROOTS_LOCAL: str = str(Path("~/files").expanduser())
# DEFAULT_ROOTS_REMOTE: str = "" # DEFAULT_ROOTS_REMOTE: str = ""
DEFAULT_MISC_CACHE_DIR_PATH: Path = Path("~/.unisync").expanduser() DEFAULT_MISC_CACHE_DIR_PATH: str = "~/.unisync"
DEFAULT_BACKUP_ENABLED: bool = False
DEFAULT_BACKUP_SELECTION: str = ""
DEFAULT_BACKUP_LOC: str = "local"
DEFAULT_BACKUP_MAX_BACKUPS: int = 2
DEFAULT_BACKUP_BACKUPSUFFIX: str = ".$VERSION.bak"
DEFAULT_BACKUP_BACKUPPREFIX: str = ".unison_backups/"

View File

@@ -1,7 +1,6 @@
# Copyright (C) 2025-2026 Paul Retourné # Copyright (C) 2025-2026 Paul Retourné
# SPDX-License-Identifier: GPL-3.0-or-later # SPDX-License-Identifier: GPL-3.0-or-later
import os
from pathlib import Path from pathlib import Path
from unisync.argparser import create_argparser from unisync.argparser import create_argparser
@@ -14,15 +13,15 @@ def main():
parser = create_argparser(unisync_sync, unisync_add, unisync_mount) parser = create_argparser(unisync_sync, unisync_add, unisync_mount)
cli_args = parser.parse_args() cli_args = parser.parse_args()
config_path = os.path.expanduser("~/.config/unisync/config.ini") config_path: Path = Path("~/.config/unisync/config.ini").expanduser()
# Check if --config is set # Check if --config is set
if cli_args.config != None and os.path.isfile(cli_args.config): if cli_args.config != None and Path(cli_args.config).is_file():
config = load_config(cli_args.config) config = load_config(cli_args.config)
elif os.path.isfile(config_path): elif config_path.is_file():
config = load_config(config_path) config = load_config(str(config_path))
else: else:
# TODO replace the next line with something to do if no config file is found # TODO replace the next line with something to do if no config file is found
config = load_config(config_path) config = load_config(str(config_path))
pass pass
# TODO make the command line arguments work and override the config options # TODO make the command line arguments work and override the config options
@@ -34,12 +33,13 @@ def main():
config.server.ip if config.server.ip != "" else config.server.hostname, config.server.ip if config.server.ip != "" else config.server.hostname,
config.server.port, config.server.port,
config.unison.bools, config.unison.bools,
config.unison.values config.unison.values,
backup=config.backup
) )
paths_manager = PathsManager(Path(config.roots.local), config.other.cache_dir_path) paths_manager = PathsManager(Path(config.roots.local), config.other.cache_dir_path)
cli_args.func(synchroniser, paths_manager) cli_args.func(synchroniser, paths_manager, config)
if __name__ == "__main__": if __name__ == "__main__":

View File

@@ -3,9 +3,9 @@
from unisync.synchroniser import Synchroniser from unisync.synchroniser import Synchroniser
from unisync.paths import PathsManager from unisync.paths import PathsManager
from unisync.config import Config
def unisync_sync(synchroniser:Synchroniser, paths_manager:PathsManager, config: Config):
def unisync_sync(synchroniser:Synchroniser, paths_manager:PathsManager):
if synchroniser.create_ssh_master_connection() != 0: if synchroniser.create_ssh_master_connection() != 0:
print("Connection failed quitting") print("Connection failed quitting")
return 1 return 1
@@ -21,16 +21,23 @@ def unisync_sync(synchroniser:Synchroniser, paths_manager:PathsManager):
synchroniser.close_ssh_master_connection() synchroniser.close_ssh_master_connection()
def unisync_add(synchroniser:Synchroniser, paths_manager:PathsManager): def unisync_add(synchroniser:Synchroniser, paths_manager:PathsManager, config: Config):
del config # The function signature must be the same for all runners
if synchroniser.create_ssh_master_connection() != 0: if synchroniser.create_ssh_master_connection() != 0:
print("Connection failed quitting") print("Connection failed quitting")
return 1 return 1
print("Connected to the remote.") print("Connected to the remote.")
# TODO config or cli to skip this first sync
synchroniser.sync_files(paths_manager.get_paths_to_sync())
paths_manager.add_files_to_sync() paths_manager.add_files_to_sync()
synchroniser.sync_files(paths_manager.get_paths_to_sync(), force=True)
synchroniser.close_ssh_master_connection() synchroniser.close_ssh_master_connection()
def unisync_mount(synchroniser:Synchroniser, paths_manager:PathsManager): def unisync_mount(synchroniser:Synchroniser, paths_manager:PathsManager, config: Config):
del paths_manager # The function signature must be the same for all runners
del config # The function signature must be the same for all runners
synchroniser.mount_remote_dir() synchroniser.mount_remote_dir()

View File

@@ -1,6 +1,12 @@
# Copyright (C) 2025-2026 Paul Retourné # Copyright (C) 2025-2026 Paul Retourné
# SPDX-License-Identifier: GPL-3.0-or-later # SPDX-License-Identifier: GPL-3.0-or-later
"""Exports the Synchroniser class.
This class is used to perform all the actions that require a connection to
the remote.
"""
import subprocess import subprocess
import os import os
import sys import sys
@@ -8,15 +14,47 @@ import time
import logging import logging
from pathlib import Path from pathlib import Path
from typing import cast
from unisync.errors import RemoteMountedError, InvalidMountError from unisync.errors import RemoteMountedError, InvalidMountError
from unisync.config import BackupConfig
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
class Synchroniser: class Synchroniser:
"""Synchroniser used to synchronise with a server.
def __init__(self, remote:str, local:str, user:str, ip:str, It is used to perform every action needing a connection to the remote.
port:int=22, args_bool:list=[], args_value:dict={}, ssh_settings:dict={}): Create an ssh connection.
Perform the various synchronisation steps (files, links).
Update the links on the remote.
Mount the remote directory.
Close the ssh connection.
Attributes:
remote: The directory to synchronise to on the remote.
local: The directory to synchronise from locally.
user: The user on the remote server.
ip: The ip of the remote server.
port: The ssh port on the remote.
args_bool:
A list of boolean arguments for unison.
They will be passed directly to unison when calling it.
For example : auto will be passed as -auto
args_value:
Same as args_bool but for key value arguments.
Will be passed to unison as "-key value".
ssh_settings:
Settings to pass to the underlying ssh connection.
Currently unused.
"""
def __init__(self, remote:str, local:str, user:str, ip:str, port:int=22,
args_bool:list=[], args_value:dict={}, ssh_settings:dict={},
backup:BackupConfig | None = None
):
"""Initialises an instance of Synchroniser.
"""
self.remote_dir:str = remote self.remote_dir:str = remote
self.local:str = local self.local:str = local
self.args_bool:list[str] = args_bool self.args_bool:list[str] = args_bool
@@ -25,16 +63,51 @@ class Synchroniser:
self.remote_user:str = user self.remote_user:str = user
self.remote_ip:str = ip self.remote_ip:str = ip
self.remote_port:int = port self.remote_port:int = port
self.files_extra:list = list()
self.links_extra:list = list()
if(backup != None and backup.enabled):
backup = cast(BackupConfig, backup)
self.files_extra.append("-backup")
if(backup.selection != ""):
self.files_extra.append(backup.selection)
else:
self.files_extra.append("Name *")
self.files_extra.extend([
"-backuploc",
backup.location,
"-maxbackups",
str(backup.max_backups),
"-backupsuffix",
backup.backupsuffix,
"-backupprefix",
backup.backupprefix,
"-ignore",
f"Name {backup.backupprefix[:-1]}"
])
self.links_extra.extend([
"-ignore",
f"Name {backup.backupprefix[:-1]}"
])
def create_ssh_master_connection(self, control_path:str="~/.ssh/control_%C", connection_timeout:int=60) -> int: def create_ssh_master_connection(self, control_path:str="~/.ssh/control_%C", connection_timeout:int=60) -> int:
""" """Creates an ssh master connection.
Creates an ssh master connection so the user only has to authenticate once to the remote server.
The subsequent connections will be made through this master connection which speeds up connecting. It is used so the user only has to authenticate once to the remote server.
@control_path: Set the location of the ssh control socket The subsequent connections will be made through this master connection
@connection_timeout: which speeds up connnection.
The users only have to enter their password once per synchronisation.
Args:
control_path: Set the location of the ssh control socket
connection_timeout:
Time given to the user to authenticate to the remote server. Time given to the user to authenticate to the remote server.
On slow connections one might want to increase this. On slow connections one might want to increase this.
Returns 0 on success. Returns:
An error code (0 success, 1 TimeoutExpired, 2 KeyboardInterrupt).
TODO change that to raising the exception.
""" """
self.control_path = os.path.expanduser(control_path) self.control_path = os.path.expanduser(control_path)
command = [ command = [
@@ -61,8 +134,10 @@ class Synchroniser:
def close_ssh_master_connection(self) -> int: def close_ssh_master_connection(self) -> int:
""" """Closes the ssh master connection.
Close the ssh master connection.
Returns:
The return code of the ssh call.
""" """
command = [ command = [
"/usr/bin/ssh", "/usr/bin/ssh",
@@ -75,39 +150,64 @@ class Synchroniser:
return close.wait() return close.wait()
def sync_files(self, paths:list, force:bool=False) -> int: def sync_files(self, paths:list, force:bool=False) -> int:
"""Synchronises the files.
Args:
paths: List of paths to synchronise.
force: Force the changes from remote to local.
Returns:
The return code of sync.
""" """
Synchronises the files.
"""
return self.sync( return self.sync(
f"ssh://{self.remote_user}@{self.remote_ip}/{self.remote_dir}/.data", f"ssh://{self.remote_user}@{self.remote_ip}/{self.remote_dir}/.data",
self.local, self.local,
paths=paths, paths=paths,
force=force force=force,
other=self.files_extra
) )
def sync_links(self, ignore:list) -> int: def sync_links(self, ignore:list) -> int:
""" """Synchronises the links, they must exist already.
Synchronises the links, they must exist already.
Args:
ignore: List of paths to ignore.
Returns:
The return code of sync.
""" """
return self.sync( return self.sync(
f"ssh://{self.remote_user}@{self.remote_ip}/{self.remote_dir}/links", f"ssh://{self.remote_user}@{self.remote_ip}/{self.remote_dir}/links",
self.local, self.local,
ignore=ignore ignore=ignore,
other=self.links_extra
) )
def sync(self, remote_root:str, local_root:str, def sync(self, remote_root:str, local_root:str,
paths:list=[], ignore:list=[], force:bool=False) -> int: paths:list=[], ignore:list=[], force:bool=False,
""" other:list=[]
Perform the synchronisation by calling unison. ) -> int:
@remote_root: The remote root, must be a full root usable by unison. """Performs the synchronisation by calling unison.
@local_root: The local root, must be a full root usable by unison.
@paths: List of paths to synchronise Args:
@ignore: List of paths to ignore remote_root: The remote root, must be a full root usable by unison.
local_root: The local root, must be a full root usable by unison.
paths: List of paths to synchronise
ignore: List of paths to ignore
The paths and everything under them will be ignored. The paths and everything under them will be ignored.
If you need to ignore some specific files use the arguments. If you need to ignore some specific files use the arguments.
@force: Force all changes from remote to local. force: Force all changes from remote to local.
Used mostly when replacing a link by the file. Used mostly when replacing a link by the file.
Returns: the unison return code see section 6.11 of the documentation other:
Other arguments to add to unison.
These arguments will only be used for this sync which is not
the case for the ones in self.args_bool and self.args_value.
They will be added to the command as is no - in front.
For exemple backups are implemented using this argument.
Returns:
the unison return code see section 6.11 of the documentation
""" """
command = [ "/usr/bin/unison", "-root", remote_root, "-root", local_root ] command = [ "/usr/bin/unison", "-root", remote_root, "-root", local_root ]
for arg in self.args_bool: for arg in self.args_bool:
@@ -135,17 +235,21 @@ class Synchroniser:
command.append(remote_root) command.append(remote_root)
command.append("-batch") command.append("-batch")
for arg in other:
command.append(arg)
proc = subprocess.Popen(command) proc = subprocess.Popen(command)
ret_code = proc.wait() ret_code = proc.wait()
return ret_code return ret_code
def update_links(self, background:bool=True): def update_links(self, background:bool=True):
""" """Updates the links on the remote.
Update the links on the remote.
First calls cleanlinks to remove deadlinks and empty directories. First calls cleanlinks to remove deadlinks and empty directories.
Then calls lndir to create the new links. Then calls lndir to create the new links.
Args: Args:
- background: controls if the update is done in the background or waited for background: controls if the update is done in the background or waited for.
""" """
link_update_script = (f"cd {self.remote_dir}/links && " link_update_script = (f"cd {self.remote_dir}/links && "
@@ -173,13 +277,14 @@ class Synchroniser:
print("Done") print("Done")
def mount_remote_dir(self): def mount_remote_dir(self):
""" """Mounts the remote directory to make the local links work.
Mount the remote directory to make the local links work.
This is achieved using sshfs. This is achieved using sshfs which may fail.
Raise:
- RemoteMountedError: The .data directory is already a mount point Raises:
- InvalidMountError: .data is either not a directory or not empty RemoteMountedError: The .data directory is already a mount point.
- subprocess.CalledProcessError: An error occured with sshfs InvalidMountError: .data is either not a directory or not empty.
subprocess.CalledProcessError: An error occured with sshfs.
""" """
# Get the absolute path to the correct .data directory resolving symlinks # Get the absolute path to the correct .data directory resolving symlinks
path_to_mount:Path = Path(f"{self.local}/../.data").resolve() path_to_mount:Path = Path(f"{self.local}/../.data").resolve()

8
tests/runners.py Normal file
View File

@@ -0,0 +1,8 @@
# Copyright (C) 2026 Paul Retourné
# SPDX-License-Identifier: GPL-3.0-or-later
from unisync.synchroniser import Synchroniser
from unisync.paths import PathsManager
def unisync_test(synchroniser:Synchroniser, paths_manager:PathsManager):
print("Testing")

39
tests/test.py Normal file
View File

@@ -0,0 +1,39 @@
# Copyright (C) 2026 Paul Retourné
# SPDX-License-Identifier: GPL-3.0-or-later
import os
from pathlib import Path
from unisync.argparser import create_argparser
from unisync.runners import unisync_sync, unisync_add, unisync_mount
from unisync.config import load_config
from unisync.synchroniser import Synchroniser
from unisync.paths import *
from runners import *
def main():
parser = create_argparser(unisync_test, unisync_add, unisync_mount)
cli_args = parser.parse_args()
config_path = os.path.expanduser("./config.ini")
config = load_config(config_path)
print(config)
synchroniser = Synchroniser(
config.roots.remote,
config.roots.local,
config.server.user,
config.server.ip if config.server.ip != "" else config.server.hostname,
config.server.port,
config.unison.bools,
config.unison.values
)
paths_manager = PathsManager(Path(config.roots.local), config.other.cache_dir_path)
cli_args.func(synchroniser, paths_manager)
if __name__ == "__main__":
main()