Compare commits
41 Commits
7705731dd5
...
dev
| Author | SHA1 | Date | |
|---|---|---|---|
|
ae0beac9e0
|
|||
|
072c2a26e6
|
|||
|
b0c165b8b0
|
|||
|
6b8686351a
|
|||
|
dcca9c5167
|
|||
|
041ede22e1
|
|||
|
adfded92d0
|
|||
|
7fae1b154a
|
|||
|
3dbd7fc445
|
|||
|
10a79554d3
|
|||
|
f2b676043c
|
|||
|
24bc6bcc94
|
|||
|
7dd01260b3
|
|||
|
2dafcc8c6b
|
|||
|
cbfbb32b86
|
|||
|
942b6c3cef
|
|||
|
a281fab8db
|
|||
|
033de7e7ca
|
|||
|
405e978796
|
|||
|
68c03c18d5
|
|||
|
d0cd6353d7
|
|||
|
9fd70deb9d
|
|||
|
dd042910a9
|
|||
|
fd825f7e87
|
|||
|
c7f0a67f17
|
|||
|
a922eaa542
|
|||
|
8836a0120b
|
|||
|
23a661107e
|
|||
|
cf508eb94c
|
|||
|
5ec43f9166
|
|||
|
cf49ffb8e8
|
|||
|
c34d30a006
|
|||
|
bb05990464
|
|||
|
aaa4a8f12c
|
|||
|
56da79f124
|
|||
|
0e8d568fea
|
|||
|
2ae9c38627
|
|||
|
667c418f09
|
|||
|
f618932584
|
|||
|
f5e455fc79
|
|||
|
78a4d9df36
|
1
.gitignore
vendored
1
.gitignore
vendored
@@ -1,3 +1,4 @@
|
|||||||
poetry.lock
|
poetry.lock
|
||||||
__pycache__
|
__pycache__
|
||||||
docs/build
|
docs/build
|
||||||
|
dist/
|
||||||
|
|||||||
@@ -26,4 +26,6 @@ docs = [
|
|||||||
"sphinx (>=9.1.0,<10.0.0)",
|
"sphinx (>=9.1.0,<10.0.0)",
|
||||||
"sphinx-rtd-theme (>=3.0.2,<4.0.0)",
|
"sphinx-rtd-theme (>=3.0.2,<4.0.0)",
|
||||||
]
|
]
|
||||||
|
dev = [
|
||||||
|
"pylint (>=4.0.4,<5.0.0)"
|
||||||
|
]
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
from configparser import UNNAMED_SECTION
|
from configparser import UNNAMED_SECTION
|
||||||
from dataclasses import dataclass, field
|
from dataclasses import dataclass
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
import ipaddress
|
import ipaddress
|
||||||
import configparser
|
import configparser
|
||||||
@@ -30,8 +30,8 @@ class ServerConfig:
|
|||||||
if self.ip != "":
|
if self.ip != "":
|
||||||
try:
|
try:
|
||||||
ipaddress.ip_address(self.ip)
|
ipaddress.ip_address(self.ip)
|
||||||
except ValueError:
|
except ValueError as e:
|
||||||
raise ValueError("The provided ip address is invalid")
|
raise ValueError("The provided ip address is invalid") from e
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class RootsConfig:
|
class RootsConfig:
|
||||||
@@ -49,6 +49,18 @@ class UnisonConfig:
|
|||||||
bools: list
|
bools: list
|
||||||
values: dict
|
values: dict
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class BackupConfig:
|
||||||
|
"""
|
||||||
|
Configuration options relative to backing up the files.
|
||||||
|
"""
|
||||||
|
enabled: bool
|
||||||
|
selection: str
|
||||||
|
location: str
|
||||||
|
max_backups: int
|
||||||
|
backupsuffix: str
|
||||||
|
backupprefix: str
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class OtherConfig:
|
class OtherConfig:
|
||||||
"""
|
"""
|
||||||
@@ -64,6 +76,7 @@ class Config:
|
|||||||
server: ServerConfig
|
server: ServerConfig
|
||||||
roots: RootsConfig
|
roots: RootsConfig
|
||||||
unison: UnisonConfig
|
unison: UnisonConfig
|
||||||
|
backup: BackupConfig
|
||||||
other: OtherConfig
|
other: OtherConfig
|
||||||
|
|
||||||
|
|
||||||
@@ -81,6 +94,7 @@ def load_config(config_path:str) -> Config:
|
|||||||
# Check if sections are provided
|
# Check if sections are provided
|
||||||
server_section = "Server" if "Server" in config.sections() else UNNAMED_SECTION
|
server_section = "Server" if "Server" in config.sections() else UNNAMED_SECTION
|
||||||
roots_section = "Roots" if "Roots" in config.sections() else UNNAMED_SECTION
|
roots_section = "Roots" if "Roots" in config.sections() else UNNAMED_SECTION
|
||||||
|
backup_section = "Backup"
|
||||||
other_section = "Other" if "Other" in config.sections() else UNNAMED_SECTION
|
other_section = "Other" if "Other" in config.sections() else UNNAMED_SECTION
|
||||||
|
|
||||||
server_config = ServerConfig(
|
server_config = ServerConfig(
|
||||||
@@ -94,20 +108,28 @@ def load_config(config_path:str) -> Config:
|
|||||||
config.get(roots_section, "local", fallback=DEFAULT_ROOTS_LOCAL),
|
config.get(roots_section, "local", fallback=DEFAULT_ROOTS_LOCAL),
|
||||||
config.get(roots_section, "remote")
|
config.get(roots_section, "remote")
|
||||||
)
|
)
|
||||||
|
backup_config = BackupConfig(
|
||||||
|
config.getboolean(backup_section, "enabled", fallback=DEFAULT_BACKUP_ENABLED),
|
||||||
|
config.get(backup_section, "selection", fallback=DEFAULT_BACKUP_SELECTION),
|
||||||
|
config.get(backup_section, "loction", fallback=DEFAULT_BACKUP_LOC),
|
||||||
|
config.getint(backup_section, "max_backups", fallback=DEFAULT_BACKUP_MAX_BACKUPS),
|
||||||
|
config.get(backup_section, "backupsuffix", fallback=DEFAULT_BACKUP_BACKUPSUFFIX),
|
||||||
|
config.get(backup_section, "backupprefix", fallback=DEFAULT_BACKUP_BACKUPPREFIX)
|
||||||
|
)
|
||||||
other_config = OtherConfig(
|
other_config = OtherConfig(
|
||||||
Path(config.get(other_section, "cache_dir_path", fallback=DEFAULT_MISC_CACHE_DIR_PATH)).expanduser()
|
Path(config.get(other_section, "cache_dir_path", fallback=DEFAULT_MISC_CACHE_DIR_PATH)).expanduser()
|
||||||
)
|
)
|
||||||
|
|
||||||
args_bool = list()
|
args_bool = []
|
||||||
args_val = dict()
|
args_val = {}
|
||||||
if "Unison" in config.sections():
|
if "Unison" in config.sections():
|
||||||
for key, val in config.items("Unison"):
|
for key, val in config.items("Unison"):
|
||||||
if key in config["DEFAULT"].keys():
|
if key in config["DEFAULT"].keys():
|
||||||
continue
|
continue
|
||||||
elif val == "" or val == None:
|
if val in ("", None):
|
||||||
args_bool.append(key)
|
args_bool.append(key)
|
||||||
else:
|
else:
|
||||||
args_val[key] = val
|
args_val[key] = val
|
||||||
unison_config = UnisonConfig(args_bool, args_val)
|
unison_config = UnisonConfig(args_bool, args_val)
|
||||||
|
|
||||||
return Config(server_config, roots_config, unison_config, other_config)
|
return Config(server_config, roots_config, unison_config, backup_config, other_config)
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
# copyright (c) 2026 paul retourné
|
# Copyright (c) 2026 paul retourné
|
||||||
# spdx-license-identifier: gpl-3.0-or-later
|
# spdx-license-identifier: gpl-3.0-or-later
|
||||||
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
@@ -16,3 +16,10 @@ DEFAULT_ROOTS_LOCAL: str = str(Path("~/files").expanduser())
|
|||||||
# DEFAULT_ROOTS_REMOTE: str = ""
|
# DEFAULT_ROOTS_REMOTE: str = ""
|
||||||
|
|
||||||
DEFAULT_MISC_CACHE_DIR_PATH: str = "~/.unisync"
|
DEFAULT_MISC_CACHE_DIR_PATH: str = "~/.unisync"
|
||||||
|
|
||||||
|
DEFAULT_BACKUP_ENABLED: bool = False
|
||||||
|
DEFAULT_BACKUP_SELECTION: str = ""
|
||||||
|
DEFAULT_BACKUP_LOC: str = "local"
|
||||||
|
DEFAULT_BACKUP_MAX_BACKUPS: int = 2
|
||||||
|
DEFAULT_BACKUP_BACKUPSUFFIX: str = ".$VERSION.bak"
|
||||||
|
DEFAULT_BACKUP_BACKUPPREFIX: str = ".unison_backups/"
|
||||||
|
|||||||
@@ -1,8 +1,21 @@
|
|||||||
# Copyright (C) 2025-2026 Paul Retourné
|
# Copyright (C) 2025-2026 Paul Retourné
|
||||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
class RemoteMountedError(BaseException):
|
from typing import NoReturn
|
||||||
|
import sys
|
||||||
|
|
||||||
|
class RemoteMountedError(Exception):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
class InvalidMountError(BaseException):
|
class InvalidMountError(Exception):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
class UnknownSSHError(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class FatalSyncError(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def unisync_exit_fatal(reason:str) -> NoReturn:
|
||||||
|
print(reason)
|
||||||
|
sys.exit(1)
|
||||||
|
|||||||
@@ -1,29 +1,28 @@
|
|||||||
# Copyright (C) 2025-2026 Paul Retourné
|
# Copyright (C) 2025-2026 Paul Retourné
|
||||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
import os
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
from unisync.argparser import create_argparser
|
from unisync.argparser import create_argparser
|
||||||
|
from unisync.errors import UnknownSSHError, unisync_exit_fatal
|
||||||
from unisync.runners import unisync_sync, unisync_add, unisync_mount
|
from unisync.runners import unisync_sync, unisync_add, unisync_mount
|
||||||
from unisync.config import load_config
|
from unisync.config import load_config
|
||||||
from unisync.synchroniser import Synchroniser
|
from unisync.synchroniser import Synchroniser
|
||||||
from unisync.paths import *
|
from unisync.paths import PathsManager
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
parser = create_argparser(unisync_sync, unisync_add, unisync_mount)
|
parser = create_argparser(unisync_sync, unisync_add, unisync_mount)
|
||||||
cli_args = parser.parse_args()
|
cli_args = parser.parse_args()
|
||||||
|
|
||||||
config_path = os.path.expanduser("~/.config/unisync/config.ini")
|
config_path: Path = Path("~/.config/unisync/config.ini").expanduser()
|
||||||
# Check if --config is set
|
# Check if --config is set
|
||||||
if cli_args.config != None and os.path.isfile(cli_args.config):
|
if cli_args.config is not None and Path(cli_args.config).is_file():
|
||||||
config = load_config(cli_args.config)
|
config = load_config(cli_args.config)
|
||||||
elif os.path.isfile(config_path):
|
elif config_path.is_file():
|
||||||
config = load_config(config_path)
|
config = load_config(str(config_path))
|
||||||
else:
|
else:
|
||||||
# TODO: replace the next line with something to do if no config file is found
|
# TODO replace the next line with something to do if no config file is found
|
||||||
config = load_config(config_path)
|
config = load_config(str(config_path))
|
||||||
pass
|
|
||||||
|
|
||||||
# TODO: make the command line arguments work and override the config options
|
# TODO: make the command line arguments work and override the config options
|
||||||
|
|
||||||
@@ -34,12 +33,16 @@ def main():
|
|||||||
config.server.ip if config.server.ip != "" else config.server.hostname,
|
config.server.ip if config.server.ip != "" else config.server.hostname,
|
||||||
config.server.port,
|
config.server.port,
|
||||||
config.unison.bools,
|
config.unison.bools,
|
||||||
config.unison.values
|
config.unison.values,
|
||||||
|
backup=config.backup
|
||||||
)
|
)
|
||||||
|
|
||||||
paths_manager = PathsManager(Path(config.roots.local), config.other.cache_dir_path)
|
paths_manager = PathsManager(Path(config.roots.local), config.other.cache_dir_path)
|
||||||
|
|
||||||
cli_args.func(synchroniser, paths_manager)
|
try:
|
||||||
|
cli_args.func(synchroniser, paths_manager, config)
|
||||||
|
except UnknownSSHError:
|
||||||
|
unisync_exit_fatal("Connection failed quitting")
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
|||||||
@@ -92,7 +92,7 @@ class PathsManager:
|
|||||||
Writes a list of new paths to the file
|
Writes a list of new paths to the file
|
||||||
"""
|
"""
|
||||||
current_paths = self.get_paths_to_sync()
|
current_paths = self.get_paths_to_sync()
|
||||||
paths_to_add = list()
|
paths_to_add = []
|
||||||
# Check if one of the parent is already being synchronised
|
# Check if one of the parent is already being synchronised
|
||||||
# If so there is no need to add the child path
|
# If so there is no need to add the child path
|
||||||
for new_path in paths:
|
for new_path in paths:
|
||||||
|
|||||||
@@ -3,34 +3,41 @@
|
|||||||
|
|
||||||
from unisync.synchroniser import Synchroniser
|
from unisync.synchroniser import Synchroniser
|
||||||
from unisync.paths import PathsManager
|
from unisync.paths import PathsManager
|
||||||
|
from unisync.config import Config
|
||||||
|
|
||||||
|
|
||||||
def unisync_sync(synchroniser:Synchroniser, paths_manager:PathsManager):
|
def unisync_sync(synchroniser:Synchroniser, paths_manager:PathsManager, config: Config):
|
||||||
if synchroniser.create_ssh_master_connection() != 0:
|
del config # The function signature must be the same for all runners
|
||||||
print("Connection failed quitting")
|
|
||||||
return 1
|
synchroniser.create_ssh_master_connection()
|
||||||
print("Connected to the remote.")
|
print("Connected to the remote.")
|
||||||
|
|
||||||
synchroniser.sync_files(paths_manager.get_paths_to_sync())
|
synchroniser.sync_files(paths_manager.get_paths_to_sync())
|
||||||
synchroniser.sync_links(paths_manager.get_paths_to_sync())
|
synchroniser.sync_links(paths_manager.get_paths_to_sync())
|
||||||
|
|
||||||
# TODO: check the config options and do or don't do the following
|
# TODO check the config options and do or don't do the following
|
||||||
synchroniser.update_links()
|
synchroniser.update_links()
|
||||||
#synchroniser.mount_remote_dir()
|
#synchroniser.mount_remote_dir()
|
||||||
|
|
||||||
synchroniser.close_ssh_master_connection()
|
synchroniser.close_ssh_master_connection()
|
||||||
|
|
||||||
|
|
||||||
def unisync_add(synchroniser:Synchroniser, paths_manager:PathsManager):
|
def unisync_add(synchroniser:Synchroniser, paths_manager:PathsManager, config: Config):
|
||||||
if synchroniser.create_ssh_master_connection() != 0:
|
del config # The function signature must be the same for all runners
|
||||||
print("Connection failed quitting")
|
|
||||||
return 1
|
synchroniser.create_ssh_master_connection()
|
||||||
print("Connected to the remote.")
|
print("Connected to the remote.")
|
||||||
|
|
||||||
|
# TODO config or cli to skip this first sync
|
||||||
|
synchroniser.sync_files(paths_manager.get_paths_to_sync())
|
||||||
|
|
||||||
paths_manager.add_files_to_sync()
|
paths_manager.add_files_to_sync()
|
||||||
|
synchroniser.sync_files(paths_manager.get_paths_to_sync(), force=True)
|
||||||
|
|
||||||
synchroniser.close_ssh_master_connection()
|
synchroniser.close_ssh_master_connection()
|
||||||
|
|
||||||
|
|
||||||
def unisync_mount(synchroniser:Synchroniser, paths_manager:PathsManager):
|
def unisync_mount(synchroniser:Synchroniser, paths_manager:PathsManager, config: Config):
|
||||||
|
del paths_manager # The function signature must be the same for all runners
|
||||||
|
del config # The function signature must be the same for all runners
|
||||||
synchroniser.mount_remote_dir()
|
synchroniser.mount_remote_dir()
|
||||||
|
|||||||
@@ -14,8 +14,10 @@ import time
|
|||||||
import logging
|
import logging
|
||||||
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
from typing import cast
|
||||||
|
|
||||||
from unisync.errors import RemoteMountedError, InvalidMountError
|
from unisync.errors import RemoteMountedError, InvalidMountError, UnknownSSHError, FatalSyncError
|
||||||
|
from unisync.config import BackupConfig
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -47,8 +49,10 @@ class Synchroniser:
|
|||||||
Currently unused.
|
Currently unused.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, remote:str, local:str, user:str, ip:str,
|
def __init__(self, remote:str, local:str, user:str, ip:str, port:int=22,
|
||||||
port:int=22, args_bool:list=[], args_value:dict={}, ssh_settings:dict={}):
|
args_bool:list=[], args_value:dict={}, ssh_settings:dict={},
|
||||||
|
backup:BackupConfig | None = None
|
||||||
|
):
|
||||||
"""Initialises an instance of Synchroniser.
|
"""Initialises an instance of Synchroniser.
|
||||||
"""
|
"""
|
||||||
self.remote_dir:str = remote
|
self.remote_dir:str = remote
|
||||||
@@ -59,8 +63,36 @@ class Synchroniser:
|
|||||||
self.remote_user:str = user
|
self.remote_user:str = user
|
||||||
self.remote_ip:str = ip
|
self.remote_ip:str = ip
|
||||||
self.remote_port:int = port
|
self.remote_port:int = port
|
||||||
|
self.files_extra:list = list()
|
||||||
|
self.links_extra:list = list()
|
||||||
|
|
||||||
def create_ssh_master_connection(self, control_path:str="~/.ssh/control_%C", connection_timeout:int=60) -> int:
|
if(backup != None and backup.enabled):
|
||||||
|
backup = cast(BackupConfig, backup)
|
||||||
|
self.files_extra.append("-backup")
|
||||||
|
if(backup.selection != ""):
|
||||||
|
self.files_extra.append(backup.selection)
|
||||||
|
else:
|
||||||
|
self.files_extra.append("Name *")
|
||||||
|
|
||||||
|
self.files_extra.extend([
|
||||||
|
"-backuploc",
|
||||||
|
backup.location,
|
||||||
|
"-maxbackups",
|
||||||
|
str(backup.max_backups),
|
||||||
|
"-backupsuffix",
|
||||||
|
backup.backupsuffix,
|
||||||
|
"-backupprefix",
|
||||||
|
backup.backupprefix,
|
||||||
|
"-ignore",
|
||||||
|
f"Name {backup.backupprefix[:-1]}"
|
||||||
|
])
|
||||||
|
|
||||||
|
self.links_extra.extend([
|
||||||
|
"-ignore",
|
||||||
|
f"Name {backup.backupprefix[:-1]}"
|
||||||
|
])
|
||||||
|
|
||||||
|
def create_ssh_master_connection(self, control_path:str="~/.ssh/control_%C", connection_timeout:int=60) -> None:
|
||||||
"""Creates an ssh master connection.
|
"""Creates an ssh master connection.
|
||||||
|
|
||||||
It is used so the user only has to authenticate once to the remote server.
|
It is used so the user only has to authenticate once to the remote server.
|
||||||
@@ -73,8 +105,14 @@ class Synchroniser:
|
|||||||
connection_timeout:
|
connection_timeout:
|
||||||
Time given to the user to authenticate to the remote server.
|
Time given to the user to authenticate to the remote server.
|
||||||
On slow connections one might want to increase this.
|
On slow connections one might want to increase this.
|
||||||
Returns:
|
|
||||||
An error code (0 success, 1 TimeoutExpired, 2 KeyboardInterrupt).
|
Raises:
|
||||||
|
subprocess.TimeoutExpired:
|
||||||
|
The user didn't finish loging in in time.
|
||||||
|
KeyboardInterrupt:
|
||||||
|
The user interrupted the process.
|
||||||
|
UnknownSSHError:
|
||||||
|
An error occured during the connection.
|
||||||
"""
|
"""
|
||||||
self.control_path = os.path.expanduser(control_path)
|
self.control_path = os.path.expanduser(control_path)
|
||||||
command = [
|
command = [
|
||||||
@@ -89,16 +127,15 @@ class Synchroniser:
|
|||||||
# TODO: Raise an exception instead of changing the return value
|
# TODO: Raise an exception instead of changing the return value
|
||||||
try:
|
try:
|
||||||
ret_code = master_ssh.wait(timeout=connection_timeout)
|
ret_code = master_ssh.wait(timeout=connection_timeout)
|
||||||
except subprocess.TimeoutExpired:
|
except subprocess.TimeoutExpired as e:
|
||||||
print("Time to login expired", file=sys.stderr)
|
print("Time to login expired", file=sys.stderr)
|
||||||
return 1
|
raise e
|
||||||
except KeyboardInterrupt:
|
except KeyboardInterrupt as e:
|
||||||
return 2
|
raise e
|
||||||
|
|
||||||
if ret_code != 0:
|
if ret_code != 0:
|
||||||
print("Login to remote failed", file=sys.stderr)
|
print("Login to remote failed", file=sys.stderr)
|
||||||
return ret_code
|
raise UnknownSSHError
|
||||||
return 0
|
|
||||||
|
|
||||||
|
|
||||||
def close_ssh_master_connection(self) -> int:
|
def close_ssh_master_connection(self) -> int:
|
||||||
@@ -117,40 +154,45 @@ class Synchroniser:
|
|||||||
close = subprocess.Popen(command)
|
close = subprocess.Popen(command)
|
||||||
return close.wait()
|
return close.wait()
|
||||||
|
|
||||||
def sync_files(self, paths:list, force:bool=False) -> int:
|
def sync_files(self, paths:list, force:bool=False) -> None:
|
||||||
"""Synchronises the files.
|
"""Synchronises the files.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
paths: List of paths to synchronise.
|
paths: List of paths to synchronise.
|
||||||
force: Force the changes from remote to local.
|
force: Force the changes from remote to local.
|
||||||
|
|
||||||
Returns:
|
Raises:
|
||||||
The return code of sync.
|
FatalSyncError: A fatal error occured during the synchronisation.
|
||||||
"""
|
"""
|
||||||
return self.sync(
|
|
||||||
|
self.sync(
|
||||||
f"ssh://{self.remote_user}@{self.remote_ip}/{self.remote_dir}/.data",
|
f"ssh://{self.remote_user}@{self.remote_ip}/{self.remote_dir}/.data",
|
||||||
self.local,
|
self.local,
|
||||||
paths=paths,
|
paths=paths,
|
||||||
force=force
|
force=force,
|
||||||
|
other=self.files_extra
|
||||||
)
|
)
|
||||||
|
|
||||||
def sync_links(self, ignore:list) -> int:
|
def sync_links(self, ignore:list) -> None:
|
||||||
"""Synchronises the links, they must exist already.
|
"""Synchronises the links, they must exist already.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
ignore: List of paths to ignore.
|
ignore: List of paths to ignore.
|
||||||
|
|
||||||
Returns:
|
Raises:
|
||||||
The return code of sync.
|
FatalSyncError: A fatal error occured during the synchronisation.
|
||||||
"""
|
"""
|
||||||
return self.sync(
|
self.sync(
|
||||||
f"ssh://{self.remote_user}@{self.remote_ip}/{self.remote_dir}/links",
|
f"ssh://{self.remote_user}@{self.remote_ip}/{self.remote_dir}/links",
|
||||||
self.local,
|
self.local,
|
||||||
ignore=ignore
|
ignore=ignore,
|
||||||
|
other=self.links_extra
|
||||||
)
|
)
|
||||||
|
|
||||||
def sync(self, remote_root:str, local_root:str,
|
def sync(self, remote_root:str, local_root:str,
|
||||||
paths:list=[], ignore:list=[], force:bool=False) -> int:
|
paths:list=[], ignore:list=[], force:bool=False,
|
||||||
|
other:list=[]
|
||||||
|
) -> None:
|
||||||
"""Performs the synchronisation by calling unison.
|
"""Performs the synchronisation by calling unison.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
@@ -162,9 +204,18 @@ class Synchroniser:
|
|||||||
If you need to ignore some specific files use the arguments.
|
If you need to ignore some specific files use the arguments.
|
||||||
force: Force all changes from remote to local.
|
force: Force all changes from remote to local.
|
||||||
Used mostly when replacing a link by the file.
|
Used mostly when replacing a link by the file.
|
||||||
|
other:
|
||||||
|
Other arguments to add to unison.
|
||||||
|
These arguments will only be used for this sync which is not
|
||||||
|
the case for the ones in self.args_bool and self.args_value.
|
||||||
|
They will be added to the command as is no - in front.
|
||||||
|
For exemple backups are implemented using this argument.
|
||||||
|
|
||||||
Returns:
|
Raises:
|
||||||
the unison return code see section 6.11 of the documentation
|
FatalSyncError:
|
||||||
|
If unison returns 3 it means either a fatal error occured or the synchronisation
|
||||||
|
was interrupted.
|
||||||
|
If this happens propagate the error to unisync.
|
||||||
"""
|
"""
|
||||||
command = [ "/usr/bin/unison", "-root", remote_root, "-root", local_root ]
|
command = [ "/usr/bin/unison", "-root", remote_root, "-root", local_root ]
|
||||||
for arg in self.args_bool:
|
for arg in self.args_bool:
|
||||||
@@ -174,6 +225,7 @@ class Synchroniser:
|
|||||||
command.append(value)
|
command.append(value)
|
||||||
|
|
||||||
sshargs = f"-p {self.remote_port} "
|
sshargs = f"-p {self.remote_port} "
|
||||||
|
sshargs += f"-S {self.control_path} "
|
||||||
for arg, value in self.ssh_settings.items():
|
for arg, value in self.ssh_settings.items():
|
||||||
sshargs += arg + " " + value + " "
|
sshargs += arg + " " + value + " "
|
||||||
command.append("-sshargs")
|
command.append("-sshargs")
|
||||||
@@ -188,13 +240,17 @@ class Synchroniser:
|
|||||||
command.append(f"BelowPath {path}")
|
command.append(f"BelowPath {path}")
|
||||||
|
|
||||||
if force:
|
if force:
|
||||||
command.append("-force")
|
command.append("-prefer")
|
||||||
command.append(remote_root)
|
command.append(remote_root)
|
||||||
command.append("-batch")
|
command.append("-batch")
|
||||||
|
|
||||||
|
for arg in other:
|
||||||
|
command.append(arg)
|
||||||
|
|
||||||
proc = subprocess.Popen(command)
|
proc = subprocess.Popen(command)
|
||||||
ret_code = proc.wait()
|
ret_code = proc.wait()
|
||||||
return ret_code
|
if ret_code == 3:
|
||||||
|
raise FatalSyncError("Synchronisation could not be completed")
|
||||||
|
|
||||||
def update_links(self, background:bool=True):
|
def update_links(self, background:bool=True):
|
||||||
"""Updates the links on the remote.
|
"""Updates the links on the remote.
|
||||||
@@ -223,7 +279,7 @@ class Synchroniser:
|
|||||||
link_background_wrapper
|
link_background_wrapper
|
||||||
]
|
]
|
||||||
|
|
||||||
link_update_process = subprocess.Popen(command, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
|
link_update_process = subprocess.run(command, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
|
||||||
|
|
||||||
if not background:
|
if not background:
|
||||||
print("Starting links update.")
|
print("Starting links update.")
|
||||||
|
|||||||
8
tests/runners.py
Normal file
8
tests/runners.py
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
# Copyright (C) 2026 Paul Retourné
|
||||||
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
|
from unisync.synchroniser import Synchroniser
|
||||||
|
from unisync.paths import PathsManager
|
||||||
|
|
||||||
|
def unisync_test(synchroniser:Synchroniser, paths_manager:PathsManager):
|
||||||
|
print("Testing")
|
||||||
39
tests/test.py
Normal file
39
tests/test.py
Normal file
@@ -0,0 +1,39 @@
|
|||||||
|
# Copyright (C) 2026 Paul Retourné
|
||||||
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
|
import os
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from unisync.argparser import create_argparser
|
||||||
|
from unisync.runners import unisync_sync, unisync_add, unisync_mount
|
||||||
|
from unisync.config import load_config
|
||||||
|
from unisync.synchroniser import Synchroniser
|
||||||
|
from unisync.paths import *
|
||||||
|
|
||||||
|
from runners import *
|
||||||
|
|
||||||
|
def main():
|
||||||
|
parser = create_argparser(unisync_test, unisync_add, unisync_mount)
|
||||||
|
cli_args = parser.parse_args()
|
||||||
|
|
||||||
|
config_path = os.path.expanduser("./config.ini")
|
||||||
|
config = load_config(config_path)
|
||||||
|
|
||||||
|
print(config)
|
||||||
|
|
||||||
|
synchroniser = Synchroniser(
|
||||||
|
config.roots.remote,
|
||||||
|
config.roots.local,
|
||||||
|
config.server.user,
|
||||||
|
config.server.ip if config.server.ip != "" else config.server.hostname,
|
||||||
|
config.server.port,
|
||||||
|
config.unison.bools,
|
||||||
|
config.unison.values
|
||||||
|
)
|
||||||
|
|
||||||
|
paths_manager = PathsManager(Path(config.roots.local), config.other.cache_dir_path)
|
||||||
|
|
||||||
|
cli_args.func(synchroniser, paths_manager)
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
Reference in New Issue
Block a user