Merge branch 'backup' into dev

Adds the possibility to use the backup function of unison
This commit is contained in:
2026-01-20 10:56:24 +01:00
7 changed files with 138 additions and 12 deletions

View File

@@ -49,6 +49,18 @@ class UnisonConfig:
bools: list
values: dict
@dataclass
class BackupConfig:
"""
Configuration options relative to backing up the files.
"""
enabled: bool
selection: str
location: str
max_backups: int
backupsuffix: str
backupprefix: str
@dataclass
class OtherConfig:
"""
@@ -64,6 +76,7 @@ class Config:
server: ServerConfig
roots: RootsConfig
unison: UnisonConfig
backup: BackupConfig
other: OtherConfig
@@ -81,6 +94,7 @@ def load_config(config_path:str) -> Config:
# Check if sections are provided
server_section = "Server" if "Server" in config.sections() else UNNAMED_SECTION
roots_section = "Roots" if "Roots" in config.sections() else UNNAMED_SECTION
backup_section = "Backup"
other_section = "Other" if "Other" in config.sections() else UNNAMED_SECTION
server_config = ServerConfig(
@@ -94,6 +108,14 @@ def load_config(config_path:str) -> Config:
config.get(roots_section, "local", fallback=DEFAULT_ROOTS_LOCAL),
config.get(roots_section, "remote")
)
backup_config = BackupConfig(
config.getboolean(backup_section, "enabled", fallback=DEFAULT_BACKUP_ENABLED),
config.get(backup_section, "selection", fallback=DEFAULT_BACKUP_SELECTION),
config.get(backup_section, "loction", fallback=DEFAULT_BACKUP_LOC),
config.getint(backup_section, "max_backups", fallback=DEFAULT_BACKUP_MAX_BACKUPS),
config.get(backup_section, "backupsuffix", fallback=DEFAULT_BACKUP_BACKUPSUFFIX),
config.get(backup_section, "backupprefix", fallback=DEFAULT_BACKUP_BACKUPPREFIX)
)
other_config = OtherConfig(
Path(config.get(other_section, "cache_dir_path", fallback=DEFAULT_MISC_CACHE_DIR_PATH)).expanduser()
)
@@ -110,4 +132,4 @@ def load_config(config_path:str) -> Config:
args_val[key] = val
unison_config = UnisonConfig(args_bool, args_val)
return Config(server_config, roots_config, unison_config, other_config)
return Config(server_config, roots_config, unison_config, backup_config, other_config)

View File

@@ -16,3 +16,10 @@ DEFAULT_ROOTS_LOCAL: str = str(Path("~/files").expanduser())
# DEFAULT_ROOTS_REMOTE: str = ""
DEFAULT_MISC_CACHE_DIR_PATH: str = "~/.unisync"
DEFAULT_BACKUP_ENABLED: bool = False
DEFAULT_BACKUP_SELECTION: str = ""
DEFAULT_BACKUP_LOC: str = "local"
DEFAULT_BACKUP_MAX_BACKUPS: int = 2
DEFAULT_BACKUP_BACKUPSUFFIX: str = ".$VERSION.bak"
DEFAULT_BACKUP_BACKUPPREFIX: str = ".unison_backups/"

View File

@@ -33,12 +33,13 @@ def main():
config.server.ip if config.server.ip != "" else config.server.hostname,
config.server.port,
config.unison.bools,
config.unison.values
config.unison.values,
backup=config.backup
)
paths_manager = PathsManager(Path(config.roots.local), config.other.cache_dir_path)
cli_args.func(synchroniser, paths_manager)
cli_args.func(synchroniser, paths_manager, config)
if __name__ == "__main__":

View File

@@ -3,9 +3,9 @@
from unisync.synchroniser import Synchroniser
from unisync.paths import PathsManager
from unisync.config import Config
def unisync_sync(synchroniser:Synchroniser, paths_manager:PathsManager):
def unisync_sync(synchroniser:Synchroniser, paths_manager:PathsManager, config: Config):
if synchroniser.create_ssh_master_connection() != 0:
print("Connection failed quitting")
return 1
@@ -21,7 +21,8 @@ def unisync_sync(synchroniser:Synchroniser, paths_manager:PathsManager):
synchroniser.close_ssh_master_connection()
def unisync_add(synchroniser:Synchroniser, paths_manager:PathsManager):
def unisync_add(synchroniser:Synchroniser, paths_manager:PathsManager, config: Config):
del config # The function signature must be the same for all runners
if synchroniser.create_ssh_master_connection() != 0:
print("Connection failed quitting")
return 1
@@ -32,5 +33,7 @@ def unisync_add(synchroniser:Synchroniser, paths_manager:PathsManager):
synchroniser.close_ssh_master_connection()
def unisync_mount(synchroniser:Synchroniser, paths_manager:PathsManager):
def unisync_mount(synchroniser:Synchroniser, paths_manager:PathsManager, config: Config):
del paths_manager # The function signature must be the same for all runners
del config # The function signature must be the same for all runners
synchroniser.mount_remote_dir()

View File

@@ -14,8 +14,10 @@ import time
import logging
from pathlib import Path
from typing import cast
from unisync.errors import RemoteMountedError, InvalidMountError
from unisync.config import BackupConfig
logger = logging.getLogger(__name__)
@@ -47,8 +49,10 @@ class Synchroniser:
Currently unused.
"""
def __init__(self, remote:str, local:str, user:str, ip:str,
port:int=22, args_bool:list=[], args_value:dict={}, ssh_settings:dict={}):
def __init__(self, remote:str, local:str, user:str, ip:str, port:int=22,
args_bool:list=[], args_value:dict={}, ssh_settings:dict={},
backup:BackupConfig | None = None
):
"""Initialises an instance of Synchroniser.
"""
self.remote_dir:str = remote
@@ -59,6 +63,34 @@ class Synchroniser:
self.remote_user:str = user
self.remote_ip:str = ip
self.remote_port:int = port
self.files_extra:list = list()
self.links_extra:list = list()
if(backup != None and backup.enabled):
backup = cast(BackupConfig, backup)
self.files_extra.append("-backup")
if(backup.selection != ""):
self.files_extra.append(backup.selection)
else:
self.files_extra.append("Name *")
self.files_extra.extend([
"-backuploc",
backup.location,
"-maxbackups",
str(backup.max_backups),
"-backupsuffix",
backup.backupsuffix,
"-backupprefix",
backup.backupprefix,
"-ignore",
f"Name {backup.backupprefix[:-1]}"
])
self.links_extra.extend([
"-ignore",
f"Name {backup.backupprefix[:-1]}"
])
def create_ssh_master_connection(self, control_path:str="~/.ssh/control_%C", connection_timeout:int=60) -> int:
"""Creates an ssh master connection.
@@ -127,11 +159,13 @@ class Synchroniser:
Returns:
The return code of sync.
"""
return self.sync(
f"ssh://{self.remote_user}@{self.remote_ip}/{self.remote_dir}/.data",
self.local,
paths=paths,
force=force
force=force,
other=self.files_extra
)
def sync_links(self, ignore:list) -> int:
@@ -146,11 +180,14 @@ class Synchroniser:
return self.sync(
f"ssh://{self.remote_user}@{self.remote_ip}/{self.remote_dir}/links",
self.local,
ignore=ignore
ignore=ignore,
other=self.links_extra
)
def sync(self, remote_root:str, local_root:str,
paths:list=[], ignore:list=[], force:bool=False) -> int:
paths:list=[], ignore:list=[], force:bool=False,
other:list=[]
) -> int:
"""Performs the synchronisation by calling unison.
Args:
@@ -162,6 +199,12 @@ class Synchroniser:
If you need to ignore some specific files use the arguments.
force: Force all changes from remote to local.
Used mostly when replacing a link by the file.
other:
Other arguments to add to unison.
These arguments will only be used for this sync which is not
the case for the ones in self.args_bool and self.args_value.
They will be added to the command as is no - in front.
For exemple backups are implemented using this argument.
Returns:
the unison return code see section 6.11 of the documentation
@@ -192,6 +235,9 @@ class Synchroniser:
command.append(remote_root)
command.append("-batch")
for arg in other:
command.append(arg)
proc = subprocess.Popen(command)
ret_code = proc.wait()
return ret_code

8
tests/runners.py Normal file
View File

@@ -0,0 +1,8 @@
# Copyright (C) 2026 Paul Retourné
# SPDX-License-Identifier: GPL-3.0-or-later
from unisync.synchroniser import Synchroniser
from unisync.paths import PathsManager
def unisync_test(synchroniser:Synchroniser, paths_manager:PathsManager):
print("Testing")

39
tests/test.py Normal file
View File

@@ -0,0 +1,39 @@
# Copyright (C) 2026 Paul Retourné
# SPDX-License-Identifier: GPL-3.0-or-later
import os
from pathlib import Path
from unisync.argparser import create_argparser
from unisync.runners import unisync_sync, unisync_add, unisync_mount
from unisync.config import load_config
from unisync.synchroniser import Synchroniser
from unisync.paths import *
from runners import *
def main():
parser = create_argparser(unisync_test, unisync_add, unisync_mount)
cli_args = parser.parse_args()
config_path = os.path.expanduser("./config.ini")
config = load_config(config_path)
print(config)
synchroniser = Synchroniser(
config.roots.remote,
config.roots.local,
config.server.user,
config.server.ip if config.server.ip != "" else config.server.hostname,
config.server.port,
config.unison.bools,
config.unison.values
)
paths_manager = PathsManager(Path(config.roots.local), config.other.cache_dir_path)
cli_args.func(synchroniser, paths_manager)
if __name__ == "__main__":
main()