Compare commits
16 Commits
main
...
23a661107e
| Author | SHA1 | Date | |
|---|---|---|---|
|
23a661107e
|
|||
|
cf508eb94c
|
|||
|
5ec43f9166
|
|||
|
cf49ffb8e8
|
|||
|
c34d30a006
|
|||
|
bb05990464
|
|||
|
aaa4a8f12c
|
|||
|
56da79f124
|
|||
|
0e8d568fea
|
|||
|
2ae9c38627
|
|||
|
667c418f09
|
|||
|
f618932584
|
|||
|
f5e455fc79
|
|||
|
78a4d9df36
|
|||
|
7dd7b57e1f
|
|||
|
b10ed69d59
|
1
.gitignore
vendored
1
.gitignore
vendored
@@ -1,2 +1,3 @@
|
|||||||
poetry.lock
|
poetry.lock
|
||||||
__pycache__
|
__pycache__
|
||||||
|
docs/build
|
||||||
|
|||||||
@@ -49,6 +49,18 @@ class UnisonConfig:
|
|||||||
bools: list
|
bools: list
|
||||||
values: dict
|
values: dict
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class BackupConfig:
|
||||||
|
"""
|
||||||
|
Configuration options relative to backing up the files.
|
||||||
|
"""
|
||||||
|
enabled: bool
|
||||||
|
selection: str
|
||||||
|
location: str
|
||||||
|
max_backups: int
|
||||||
|
backupsuffix: str
|
||||||
|
backupprefix: str
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class OtherConfig:
|
class OtherConfig:
|
||||||
"""
|
"""
|
||||||
@@ -64,6 +76,7 @@ class Config:
|
|||||||
server: ServerConfig
|
server: ServerConfig
|
||||||
roots: RootsConfig
|
roots: RootsConfig
|
||||||
unison: UnisonConfig
|
unison: UnisonConfig
|
||||||
|
backup: BackupConfig
|
||||||
other: OtherConfig
|
other: OtherConfig
|
||||||
|
|
||||||
|
|
||||||
@@ -81,6 +94,7 @@ def load_config(config_path:str) -> Config:
|
|||||||
# Check if sections are provided
|
# Check if sections are provided
|
||||||
server_section = "Server" if "Server" in config.sections() else UNNAMED_SECTION
|
server_section = "Server" if "Server" in config.sections() else UNNAMED_SECTION
|
||||||
roots_section = "Roots" if "Roots" in config.sections() else UNNAMED_SECTION
|
roots_section = "Roots" if "Roots" in config.sections() else UNNAMED_SECTION
|
||||||
|
backup_section = "Backup"
|
||||||
other_section = "Other" if "Other" in config.sections() else UNNAMED_SECTION
|
other_section = "Other" if "Other" in config.sections() else UNNAMED_SECTION
|
||||||
|
|
||||||
server_config = ServerConfig(
|
server_config = ServerConfig(
|
||||||
@@ -94,6 +108,14 @@ def load_config(config_path:str) -> Config:
|
|||||||
config.get(roots_section, "local", fallback=DEFAULT_ROOTS_LOCAL),
|
config.get(roots_section, "local", fallback=DEFAULT_ROOTS_LOCAL),
|
||||||
config.get(roots_section, "remote")
|
config.get(roots_section, "remote")
|
||||||
)
|
)
|
||||||
|
backup_config = BackupConfig(
|
||||||
|
config.getboolean(backup_section, "enabled", fallback=DEFAULT_BACKUP_ENABLED),
|
||||||
|
config.get(backup_section, "selection", fallback=DEFAULT_BACKUP_SELECTION),
|
||||||
|
config.get(backup_section, "loction", fallback=DEFAULT_BACKUP_LOC),
|
||||||
|
config.getint(backup_section, "max_backups", fallback=DEFAULT_BACKUP_MAX_BACKUPS),
|
||||||
|
config.get(backup_section, "backupsuffix", fallback=DEFAULT_BACKUP_BACKUPSUFFIX),
|
||||||
|
config.get(backup_section, "backupprefix", fallback=DEFAULT_BACKUP_BACKUPPREFIX)
|
||||||
|
)
|
||||||
other_config = OtherConfig(
|
other_config = OtherConfig(
|
||||||
Path(config.get(other_section, "cache_dir_path", fallback=DEFAULT_MISC_CACHE_DIR_PATH)).expanduser()
|
Path(config.get(other_section, "cache_dir_path", fallback=DEFAULT_MISC_CACHE_DIR_PATH)).expanduser()
|
||||||
)
|
)
|
||||||
@@ -110,4 +132,4 @@ def load_config(config_path:str) -> Config:
|
|||||||
args_val[key] = val
|
args_val[key] = val
|
||||||
unison_config = UnisonConfig(args_bool, args_val)
|
unison_config = UnisonConfig(args_bool, args_val)
|
||||||
|
|
||||||
return Config(server_config, roots_config, unison_config, other_config)
|
return Config(server_config, roots_config, unison_config, backup_config, other_config)
|
||||||
|
|||||||
@@ -15,4 +15,11 @@ DEFAULT_SERVER_PORT: int = 22
|
|||||||
DEFAULT_ROOTS_LOCAL: str = str(Path("~/files").expanduser())
|
DEFAULT_ROOTS_LOCAL: str = str(Path("~/files").expanduser())
|
||||||
# DEFAULT_ROOTS_REMOTE: str = ""
|
# DEFAULT_ROOTS_REMOTE: str = ""
|
||||||
|
|
||||||
DEFAULT_MISC_CACHE_DIR_PATH: Path = Path("~/.unisync").expanduser()
|
DEFAULT_MISC_CACHE_DIR_PATH: str = "~/.unisync"
|
||||||
|
|
||||||
|
DEFAULT_BACKUP_ENABLED: bool = False
|
||||||
|
DEFAULT_BACKUP_SELECTION: str = ""
|
||||||
|
DEFAULT_BACKUP_LOC: str = "local"
|
||||||
|
DEFAULT_BACKUP_MAX_BACKUPS: int = 2
|
||||||
|
DEFAULT_BACKUP_BACKUPSUFFIX: str = ".$VERSION.bak"
|
||||||
|
DEFAULT_BACKUP_BACKUPPREFIX: str = ".unison_backups/"
|
||||||
|
|||||||
@@ -1,7 +1,6 @@
|
|||||||
# Copyright (C) 2025-2026 Paul Retourné
|
# Copyright (C) 2025-2026 Paul Retourné
|
||||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
import os
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
from unisync.argparser import create_argparser
|
from unisync.argparser import create_argparser
|
||||||
@@ -14,15 +13,15 @@ def main():
|
|||||||
parser = create_argparser(unisync_sync, unisync_add, unisync_mount)
|
parser = create_argparser(unisync_sync, unisync_add, unisync_mount)
|
||||||
cli_args = parser.parse_args()
|
cli_args = parser.parse_args()
|
||||||
|
|
||||||
config_path = os.path.expanduser("~/.config/unisync/config.ini")
|
config_path: Path = Path("~/.config/unisync/config.ini").expanduser()
|
||||||
# Check if --config is set
|
# Check if --config is set
|
||||||
if cli_args.config != None and os.path.isfile(cli_args.config):
|
if cli_args.config != None and Path(cli_args.config).is_file():
|
||||||
config = load_config(cli_args.config)
|
config = load_config(cli_args.config)
|
||||||
elif os.path.isfile(config_path):
|
elif config_path.is_file():
|
||||||
config = load_config(config_path)
|
config = load_config(str(config_path))
|
||||||
else:
|
else:
|
||||||
# TODO replace the next line with something to do if no config file is found
|
# TODO replace the next line with something to do if no config file is found
|
||||||
config = load_config(config_path)
|
config = load_config(str(config_path))
|
||||||
pass
|
pass
|
||||||
|
|
||||||
# TODO make the command line arguments work and override the config options
|
# TODO make the command line arguments work and override the config options
|
||||||
@@ -34,12 +33,13 @@ def main():
|
|||||||
config.server.ip if config.server.ip != "" else config.server.hostname,
|
config.server.ip if config.server.ip != "" else config.server.hostname,
|
||||||
config.server.port,
|
config.server.port,
|
||||||
config.unison.bools,
|
config.unison.bools,
|
||||||
config.unison.values
|
config.unison.values,
|
||||||
|
backup=config.backup
|
||||||
)
|
)
|
||||||
|
|
||||||
paths_manager = PathsManager(Path(config.roots.local), config.other.cache_dir_path)
|
paths_manager = PathsManager(Path(config.roots.local), config.other.cache_dir_path)
|
||||||
|
|
||||||
cli_args.func(synchroniser, paths_manager)
|
cli_args.func(synchroniser, paths_manager, config)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
|||||||
@@ -3,9 +3,9 @@
|
|||||||
|
|
||||||
from unisync.synchroniser import Synchroniser
|
from unisync.synchroniser import Synchroniser
|
||||||
from unisync.paths import PathsManager
|
from unisync.paths import PathsManager
|
||||||
|
from unisync.config import Config
|
||||||
|
|
||||||
|
def unisync_sync(synchroniser:Synchroniser, paths_manager:PathsManager, config: Config):
|
||||||
def unisync_sync(synchroniser:Synchroniser, paths_manager:PathsManager):
|
|
||||||
if synchroniser.create_ssh_master_connection() != 0:
|
if synchroniser.create_ssh_master_connection() != 0:
|
||||||
print("Connection failed quitting")
|
print("Connection failed quitting")
|
||||||
return 1
|
return 1
|
||||||
@@ -21,7 +21,8 @@ def unisync_sync(synchroniser:Synchroniser, paths_manager:PathsManager):
|
|||||||
synchroniser.close_ssh_master_connection()
|
synchroniser.close_ssh_master_connection()
|
||||||
|
|
||||||
|
|
||||||
def unisync_add(synchroniser:Synchroniser, paths_manager:PathsManager):
|
def unisync_add(synchroniser:Synchroniser, paths_manager:PathsManager, config: Config):
|
||||||
|
del config # The function signature must be the same for all runners
|
||||||
if synchroniser.create_ssh_master_connection() != 0:
|
if synchroniser.create_ssh_master_connection() != 0:
|
||||||
print("Connection failed quitting")
|
print("Connection failed quitting")
|
||||||
return 1
|
return 1
|
||||||
@@ -32,5 +33,7 @@ def unisync_add(synchroniser:Synchroniser, paths_manager:PathsManager):
|
|||||||
synchroniser.close_ssh_master_connection()
|
synchroniser.close_ssh_master_connection()
|
||||||
|
|
||||||
|
|
||||||
def unisync_mount(synchroniser:Synchroniser, paths_manager:PathsManager):
|
def unisync_mount(synchroniser:Synchroniser, paths_manager:PathsManager, config: Config):
|
||||||
|
del paths_manager # The function signature must be the same for all runners
|
||||||
|
del config # The function signature must be the same for all runners
|
||||||
synchroniser.mount_remote_dir()
|
synchroniser.mount_remote_dir()
|
||||||
|
|||||||
@@ -1,6 +1,12 @@
|
|||||||
# Copyright (C) 2025-2026 Paul Retourné
|
# Copyright (C) 2025-2026 Paul Retourné
|
||||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
|
"""Exports the Synchroniser class.
|
||||||
|
|
||||||
|
This class is used to perform all the actions that require a connection to
|
||||||
|
the remote.
|
||||||
|
"""
|
||||||
|
|
||||||
import subprocess
|
import subprocess
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
@@ -8,15 +14,47 @@ import time
|
|||||||
import logging
|
import logging
|
||||||
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
from typing import cast
|
||||||
|
|
||||||
from unisync.errors import RemoteMountedError, InvalidMountError
|
from unisync.errors import RemoteMountedError, InvalidMountError
|
||||||
|
from unisync.config import BackupConfig
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
class Synchroniser:
|
class Synchroniser:
|
||||||
|
"""Synchroniser used to synchronise with a server.
|
||||||
|
|
||||||
def __init__(self, remote:str, local:str, user:str, ip:str,
|
It is used to perform every action needing a connection to the remote.
|
||||||
port:int=22, args_bool:list=[], args_value:dict={}, ssh_settings:dict={}):
|
Create an ssh connection.
|
||||||
|
Perform the various synchronisation steps (files, links).
|
||||||
|
Update the links on the remote.
|
||||||
|
Mount the remote directory.
|
||||||
|
Close the ssh connection.
|
||||||
|
|
||||||
|
Attributes:
|
||||||
|
remote: The directory to synchronise to on the remote.
|
||||||
|
local: The directory to synchronise from locally.
|
||||||
|
user: The user on the remote server.
|
||||||
|
ip: The ip of the remote server.
|
||||||
|
port: The ssh port on the remote.
|
||||||
|
args_bool:
|
||||||
|
A list of boolean arguments for unison.
|
||||||
|
They will be passed directly to unison when calling it.
|
||||||
|
For example : auto will be passed as -auto
|
||||||
|
args_value:
|
||||||
|
Same as args_bool but for key value arguments.
|
||||||
|
Will be passed to unison as "-key value".
|
||||||
|
ssh_settings:
|
||||||
|
Settings to pass to the underlying ssh connection.
|
||||||
|
Currently unused.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, remote:str, local:str, user:str, ip:str, port:int=22,
|
||||||
|
args_bool:list=[], args_value:dict={}, ssh_settings:dict={},
|
||||||
|
backup:BackupConfig | None = None
|
||||||
|
):
|
||||||
|
"""Initialises an instance of Synchroniser.
|
||||||
|
"""
|
||||||
self.remote_dir:str = remote
|
self.remote_dir:str = remote
|
||||||
self.local:str = local
|
self.local:str = local
|
||||||
self.args_bool:list[str] = args_bool
|
self.args_bool:list[str] = args_bool
|
||||||
@@ -25,16 +63,51 @@ class Synchroniser:
|
|||||||
self.remote_user:str = user
|
self.remote_user:str = user
|
||||||
self.remote_ip:str = ip
|
self.remote_ip:str = ip
|
||||||
self.remote_port:int = port
|
self.remote_port:int = port
|
||||||
|
self.files_extra:list = list()
|
||||||
|
self.links_extra:list = list()
|
||||||
|
|
||||||
|
if(backup != None and backup.enabled):
|
||||||
|
backup = cast(BackupConfig, backup)
|
||||||
|
self.files_extra.append("-backup")
|
||||||
|
if(backup.selection != ""):
|
||||||
|
self.files_extra.append(backup.selection)
|
||||||
|
else:
|
||||||
|
self.files_extra.append("Name *")
|
||||||
|
|
||||||
|
self.files_extra.extend([
|
||||||
|
"-backuploc",
|
||||||
|
backup.location,
|
||||||
|
"-maxbackups",
|
||||||
|
str(backup.max_backups),
|
||||||
|
"-backupsuffix",
|
||||||
|
backup.backupsuffix,
|
||||||
|
"-backupprefix",
|
||||||
|
backup.backupprefix,
|
||||||
|
"-ignore",
|
||||||
|
f"Name {backup.backupprefix[:-1]}"
|
||||||
|
])
|
||||||
|
|
||||||
|
self.links_extra.extend([
|
||||||
|
"-ignore",
|
||||||
|
f"Name {backup.backupprefix[:-1]}"
|
||||||
|
])
|
||||||
|
|
||||||
def create_ssh_master_connection(self, control_path:str="~/.ssh/control_%C", connection_timeout:int=60) -> int:
|
def create_ssh_master_connection(self, control_path:str="~/.ssh/control_%C", connection_timeout:int=60) -> int:
|
||||||
"""
|
"""Creates an ssh master connection.
|
||||||
Creates an ssh master connection so the user only has to authenticate once to the remote server.
|
|
||||||
The subsequent connections will be made through this master connection which speeds up connecting.
|
It is used so the user only has to authenticate once to the remote server.
|
||||||
@control_path: Set the location of the ssh control socket
|
The subsequent connections will be made through this master connection
|
||||||
@connection_timeout:
|
which speeds up connnection.
|
||||||
Time given to the user to authenticate to the remote server.
|
The users only have to enter their password once per synchronisation.
|
||||||
On slow connections one might want to increase this.
|
|
||||||
Returns 0 on success.
|
Args:
|
||||||
|
control_path: Set the location of the ssh control socket
|
||||||
|
connection_timeout:
|
||||||
|
Time given to the user to authenticate to the remote server.
|
||||||
|
On slow connections one might want to increase this.
|
||||||
|
Returns:
|
||||||
|
An error code (0 success, 1 TimeoutExpired, 2 KeyboardInterrupt).
|
||||||
|
TODO change that to raising the exception.
|
||||||
"""
|
"""
|
||||||
self.control_path = os.path.expanduser(control_path)
|
self.control_path = os.path.expanduser(control_path)
|
||||||
command = [
|
command = [
|
||||||
@@ -61,8 +134,10 @@ class Synchroniser:
|
|||||||
|
|
||||||
|
|
||||||
def close_ssh_master_connection(self) -> int:
|
def close_ssh_master_connection(self) -> int:
|
||||||
"""
|
"""Closes the ssh master connection.
|
||||||
Close the ssh master connection.
|
|
||||||
|
Returns:
|
||||||
|
The return code of the ssh call.
|
||||||
"""
|
"""
|
||||||
command = [
|
command = [
|
||||||
"/usr/bin/ssh",
|
"/usr/bin/ssh",
|
||||||
@@ -75,39 +150,64 @@ class Synchroniser:
|
|||||||
return close.wait()
|
return close.wait()
|
||||||
|
|
||||||
def sync_files(self, paths:list, force:bool=False) -> int:
|
def sync_files(self, paths:list, force:bool=False) -> int:
|
||||||
|
"""Synchronises the files.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
paths: List of paths to synchronise.
|
||||||
|
force: Force the changes from remote to local.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
The return code of sync.
|
||||||
"""
|
"""
|
||||||
Synchronises the files.
|
|
||||||
"""
|
|
||||||
return self.sync(
|
return self.sync(
|
||||||
f"ssh://{self.remote_user}@{self.remote_ip}/{self.remote_dir}/.data",
|
f"ssh://{self.remote_user}@{self.remote_ip}/{self.remote_dir}/.data",
|
||||||
self.local,
|
self.local,
|
||||||
paths=paths,
|
paths=paths,
|
||||||
force=force
|
force=force,
|
||||||
|
other=self.files_extra
|
||||||
)
|
)
|
||||||
|
|
||||||
def sync_links(self, ignore:list) -> int:
|
def sync_links(self, ignore:list) -> int:
|
||||||
"""
|
"""Synchronises the links, they must exist already.
|
||||||
Synchronises the links, they must exist already.
|
|
||||||
|
Args:
|
||||||
|
ignore: List of paths to ignore.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
The return code of sync.
|
||||||
"""
|
"""
|
||||||
return self.sync(
|
return self.sync(
|
||||||
f"ssh://{self.remote_user}@{self.remote_ip}/{self.remote_dir}/links",
|
f"ssh://{self.remote_user}@{self.remote_ip}/{self.remote_dir}/links",
|
||||||
self.local,
|
self.local,
|
||||||
ignore=ignore
|
ignore=ignore,
|
||||||
|
other=self.links_extra
|
||||||
)
|
)
|
||||||
|
|
||||||
def sync(self, remote_root:str, local_root:str,
|
def sync(self, remote_root:str, local_root:str,
|
||||||
paths:list=[], ignore:list=[], force:bool=False) -> int:
|
paths:list=[], ignore:list=[], force:bool=False,
|
||||||
"""
|
other:list=[]
|
||||||
Perform the synchronisation by calling unison.
|
) -> int:
|
||||||
@remote_root: The remote root, must be a full root usable by unison.
|
"""Performs the synchronisation by calling unison.
|
||||||
@local_root: The local root, must be a full root usable by unison.
|
|
||||||
@paths: List of paths to synchronise
|
Args:
|
||||||
@ignore: List of paths to ignore
|
remote_root: The remote root, must be a full root usable by unison.
|
||||||
The paths and everything under them will be ignored.
|
local_root: The local root, must be a full root usable by unison.
|
||||||
If you need to ignore some specific files use the arguments.
|
paths: List of paths to synchronise
|
||||||
@force: Force all changes from remote to local.
|
ignore: List of paths to ignore
|
||||||
Used mostly when replacing a link by the file.
|
The paths and everything under them will be ignored.
|
||||||
Returns: the unison return code see section 6.11 of the documentation
|
If you need to ignore some specific files use the arguments.
|
||||||
|
force: Force all changes from remote to local.
|
||||||
|
Used mostly when replacing a link by the file.
|
||||||
|
other:
|
||||||
|
Other arguments to add to unison.
|
||||||
|
These arguments will only be used for this sync which is not
|
||||||
|
the case for the ones in self.args_bool and self.args_value.
|
||||||
|
They will be added to the command as is no - in front.
|
||||||
|
For exemple backups are implemented using this argument.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
the unison return code see section 6.11 of the documentation
|
||||||
"""
|
"""
|
||||||
command = [ "/usr/bin/unison", "-root", remote_root, "-root", local_root ]
|
command = [ "/usr/bin/unison", "-root", remote_root, "-root", local_root ]
|
||||||
for arg in self.args_bool:
|
for arg in self.args_bool:
|
||||||
@@ -135,17 +235,21 @@ class Synchroniser:
|
|||||||
command.append(remote_root)
|
command.append(remote_root)
|
||||||
command.append("-batch")
|
command.append("-batch")
|
||||||
|
|
||||||
|
for arg in other:
|
||||||
|
command.append(arg)
|
||||||
|
|
||||||
proc = subprocess.Popen(command)
|
proc = subprocess.Popen(command)
|
||||||
ret_code = proc.wait()
|
ret_code = proc.wait()
|
||||||
return ret_code
|
return ret_code
|
||||||
|
|
||||||
def update_links(self, background:bool=True):
|
def update_links(self, background:bool=True):
|
||||||
"""
|
"""Updates the links on the remote.
|
||||||
Update the links on the remote.
|
|
||||||
First calls cleanlinks to remove deadlinks and empty directories.
|
First calls cleanlinks to remove deadlinks and empty directories.
|
||||||
Then calls lndir to create the new links.
|
Then calls lndir to create the new links.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
- background: controls if the update is done in the background or waited for
|
background: controls if the update is done in the background or waited for.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
link_update_script = (f"cd {self.remote_dir}/links && "
|
link_update_script = (f"cd {self.remote_dir}/links && "
|
||||||
@@ -173,13 +277,14 @@ class Synchroniser:
|
|||||||
print("Done")
|
print("Done")
|
||||||
|
|
||||||
def mount_remote_dir(self):
|
def mount_remote_dir(self):
|
||||||
"""
|
"""Mounts the remote directory to make the local links work.
|
||||||
Mount the remote directory to make the local links work.
|
|
||||||
This is achieved using sshfs.
|
This is achieved using sshfs which may fail.
|
||||||
Raise:
|
|
||||||
- RemoteMountedError: The .data directory is already a mount point
|
Raises:
|
||||||
- InvalidMountError: .data is either not a directory or not empty
|
RemoteMountedError: The .data directory is already a mount point.
|
||||||
- subprocess.CalledProcessError: An error occured with sshfs
|
InvalidMountError: .data is either not a directory or not empty.
|
||||||
|
subprocess.CalledProcessError: An error occured with sshfs.
|
||||||
"""
|
"""
|
||||||
# Get the absolute path to the correct .data directory resolving symlinks
|
# Get the absolute path to the correct .data directory resolving symlinks
|
||||||
path_to_mount:Path = Path(f"{self.local}/../.data").resolve()
|
path_to_mount:Path = Path(f"{self.local}/../.data").resolve()
|
||||||
|
|||||||
8
tests/runners.py
Normal file
8
tests/runners.py
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
# Copyright (C) 2026 Paul Retourné
|
||||||
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
|
from unisync.synchroniser import Synchroniser
|
||||||
|
from unisync.paths import PathsManager
|
||||||
|
|
||||||
|
def unisync_test(synchroniser:Synchroniser, paths_manager:PathsManager):
|
||||||
|
print("Testing")
|
||||||
39
tests/test.py
Normal file
39
tests/test.py
Normal file
@@ -0,0 +1,39 @@
|
|||||||
|
# Copyright (C) 2026 Paul Retourné
|
||||||
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
|
import os
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from unisync.argparser import create_argparser
|
||||||
|
from unisync.runners import unisync_sync, unisync_add, unisync_mount
|
||||||
|
from unisync.config import load_config
|
||||||
|
from unisync.synchroniser import Synchroniser
|
||||||
|
from unisync.paths import *
|
||||||
|
|
||||||
|
from runners import *
|
||||||
|
|
||||||
|
def main():
|
||||||
|
parser = create_argparser(unisync_test, unisync_add, unisync_mount)
|
||||||
|
cli_args = parser.parse_args()
|
||||||
|
|
||||||
|
config_path = os.path.expanduser("./config.ini")
|
||||||
|
config = load_config(config_path)
|
||||||
|
|
||||||
|
print(config)
|
||||||
|
|
||||||
|
synchroniser = Synchroniser(
|
||||||
|
config.roots.remote,
|
||||||
|
config.roots.local,
|
||||||
|
config.server.user,
|
||||||
|
config.server.ip if config.server.ip != "" else config.server.hostname,
|
||||||
|
config.server.port,
|
||||||
|
config.unison.bools,
|
||||||
|
config.unison.values
|
||||||
|
)
|
||||||
|
|
||||||
|
paths_manager = PathsManager(Path(config.roots.local), config.other.cache_dir_path)
|
||||||
|
|
||||||
|
cli_args.func(synchroniser, paths_manager)
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
Reference in New Issue
Block a user