Compare commits
29 Commits
c7f0a67f17
...
dev
| Author | SHA1 | Date | |
|---|---|---|---|
|
ae0beac9e0
|
|||
|
072c2a26e6
|
|||
|
b0c165b8b0
|
|||
|
6b8686351a
|
|||
|
dcca9c5167
|
|||
|
041ede22e1
|
|||
|
adfded92d0
|
|||
|
7fae1b154a
|
|||
|
3dbd7fc445
|
|||
|
10a79554d3
|
|||
|
f2b676043c
|
|||
|
24bc6bcc94
|
|||
|
7dd01260b3
|
|||
|
2dafcc8c6b
|
|||
|
cbfbb32b86
|
|||
|
942b6c3cef
|
|||
|
a281fab8db
|
|||
|
033de7e7ca
|
|||
|
405e978796
|
|||
|
68c03c18d5
|
|||
|
d0cd6353d7
|
|||
|
9fd70deb9d
|
|||
|
dd042910a9
|
|||
|
fd825f7e87
|
|||
|
7705731dd5
|
|||
|
a922eaa542
|
|||
|
8836a0120b
|
|||
|
e639c12c20
|
|||
|
c10077392e
|
1
.gitignore
vendored
1
.gitignore
vendored
@@ -1,3 +1,4 @@
|
|||||||
poetry.lock
|
poetry.lock
|
||||||
__pycache__
|
__pycache__
|
||||||
docs/build
|
docs/build
|
||||||
|
dist/
|
||||||
|
|||||||
20
docs/Makefile
Normal file
20
docs/Makefile
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
# Minimal makefile for Sphinx documentation
|
||||||
|
#
|
||||||
|
|
||||||
|
# You can set these variables from the command line, and also
|
||||||
|
# from the environment for the first two.
|
||||||
|
SPHINXOPTS ?=
|
||||||
|
SPHINXBUILD ?= sphinx-build
|
||||||
|
SOURCEDIR = source
|
||||||
|
BUILDDIR = build
|
||||||
|
|
||||||
|
# Put it first so that "make" without argument is like "make help".
|
||||||
|
help:
|
||||||
|
@$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
|
||||||
|
|
||||||
|
.PHONY: help Makefile
|
||||||
|
|
||||||
|
# Catch-all target: route all unknown targets to Sphinx using the new
|
||||||
|
# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
|
||||||
|
%: Makefile
|
||||||
|
@$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
|
||||||
35
docs/make.bat
Normal file
35
docs/make.bat
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
@ECHO OFF
|
||||||
|
|
||||||
|
pushd %~dp0
|
||||||
|
|
||||||
|
REM Command file for Sphinx documentation
|
||||||
|
|
||||||
|
if "%SPHINXBUILD%" == "" (
|
||||||
|
set SPHINXBUILD=sphinx-build
|
||||||
|
)
|
||||||
|
set SOURCEDIR=source
|
||||||
|
set BUILDDIR=build
|
||||||
|
|
||||||
|
%SPHINXBUILD% >NUL 2>NUL
|
||||||
|
if errorlevel 9009 (
|
||||||
|
echo.
|
||||||
|
echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
|
||||||
|
echo.installed, then set the SPHINXBUILD environment variable to point
|
||||||
|
echo.to the full path of the 'sphinx-build' executable. Alternatively you
|
||||||
|
echo.may add the Sphinx directory to PATH.
|
||||||
|
echo.
|
||||||
|
echo.If you don't have Sphinx installed, grab it from
|
||||||
|
echo.https://www.sphinx-doc.org/
|
||||||
|
exit /b 1
|
||||||
|
)
|
||||||
|
|
||||||
|
if "%1" == "" goto help
|
||||||
|
|
||||||
|
%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
|
||||||
|
goto end
|
||||||
|
|
||||||
|
:help
|
||||||
|
%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
|
||||||
|
|
||||||
|
:end
|
||||||
|
popd
|
||||||
36
docs/source/conf.py
Normal file
36
docs/source/conf.py
Normal file
@@ -0,0 +1,36 @@
|
|||||||
|
# Configuration file for the Sphinx documentation builder.
|
||||||
|
#
|
||||||
|
# For the full list of built-in configuration values, see the documentation:
|
||||||
|
# https://www.sphinx-doc.org/en/master/usage/configuration.html
|
||||||
|
|
||||||
|
# -- Project information -----------------------------------------------------
|
||||||
|
# https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information
|
||||||
|
|
||||||
|
project = 'unisync'
|
||||||
|
copyright = '2026, Paul Retourné'
|
||||||
|
author = 'Paul Retourné'
|
||||||
|
release = '0.1.0'
|
||||||
|
|
||||||
|
# -- General configuration ---------------------------------------------------
|
||||||
|
# https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration
|
||||||
|
|
||||||
|
extensions = [
|
||||||
|
'sphinx.ext.autodoc',
|
||||||
|
'sphinx.ext.viewcode',
|
||||||
|
'sphinx.ext.napoleon',
|
||||||
|
'sphinx.ext.todo'
|
||||||
|
]
|
||||||
|
|
||||||
|
templates_path = ['_templates']
|
||||||
|
exclude_patterns = []
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# -- Options for HTML output -------------------------------------------------
|
||||||
|
# https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output
|
||||||
|
|
||||||
|
#html_theme = 'alabaster'
|
||||||
|
html_theme = 'sphinx_rtd_theme'
|
||||||
|
html_static_path = ['_static']
|
||||||
|
|
||||||
|
autodoc_docstring_signature = True
|
||||||
24
docs/source/example.rst
Normal file
24
docs/source/example.rst
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
.. _example_how_it_works:
|
||||||
|
|
||||||
|
Example of how unisync works
|
||||||
|
============================
|
||||||
|
|
||||||
|
Let's say you have the following structure::
|
||||||
|
|
||||||
|
$ tree .
|
||||||
|
.
|
||||||
|
├── big_file
|
||||||
|
└── folder
|
||||||
|
├── file
|
||||||
|
└── other_file
|
||||||
|
|
||||||
|
If you only want to synchronise `folder` and its content on your laptop the following will be automatically generated::
|
||||||
|
|
||||||
|
$ tree .
|
||||||
|
.
|
||||||
|
├── big_file -> ../.data/big_file
|
||||||
|
└── folder
|
||||||
|
├── file
|
||||||
|
└── other_file
|
||||||
|
|
||||||
|
`big_file` is now a symbolic link and by mounting the remote directory you can still seemlessly access `big_file` through the network.
|
||||||
31
docs/source/index.rst
Normal file
31
docs/source/index.rst
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
.. unisync documentation master file, created by
|
||||||
|
sphinx-quickstart on Sun Jan 4 15:02:58 2026.
|
||||||
|
You can adapt this file completely to your liking, but it should at least
|
||||||
|
contain the root `toctree` directive.
|
||||||
|
|
||||||
|
Documentation for unisync
|
||||||
|
=========================
|
||||||
|
|
||||||
|
Unisync is a data synchronising tool built around `unison`_ and expending on it.
|
||||||
|
|
||||||
|
Unisync tries to solve two problems that are often solved separately but never together :
|
||||||
|
|
||||||
|
* Keeping your data synchronised between multiple machines (through a central server), examples of this are rsync and of course unison.
|
||||||
|
* Being able to access and edit files stored on your server without having to download them, the gui interface of nextcloud for example.
|
||||||
|
* And of course I want to be able to do all of this without ever having to leave my terminal.
|
||||||
|
|
||||||
|
Unisync solves this problem by placing each file on your local machine but with only the selected files and folders being physically present on your drive,
|
||||||
|
the others are replaced by symbolic links pointing to a directory that is mounted from your server.
|
||||||
|
|
||||||
|
See this
|
||||||
|
:ref:`example_how_it_works`.
|
||||||
|
|
||||||
|
.. _unison: https://github.com/bcpierce00/unison
|
||||||
|
|
||||||
|
.. toctree::
|
||||||
|
:maxdepth: 2
|
||||||
|
:caption: Contents:
|
||||||
|
|
||||||
|
example
|
||||||
|
modules
|
||||||
|
|
||||||
7
docs/source/modules.rst
Normal file
7
docs/source/modules.rst
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
unisync
|
||||||
|
=======
|
||||||
|
|
||||||
|
.. toctree::
|
||||||
|
:maxdepth: 4
|
||||||
|
|
||||||
|
unisync
|
||||||
77
docs/source/unisync.rst
Normal file
77
docs/source/unisync.rst
Normal file
@@ -0,0 +1,77 @@
|
|||||||
|
unisync package
|
||||||
|
===============
|
||||||
|
|
||||||
|
Submodules
|
||||||
|
----------
|
||||||
|
|
||||||
|
unisync.argparser module
|
||||||
|
------------------------
|
||||||
|
|
||||||
|
.. automodule:: unisync.argparser
|
||||||
|
:members:
|
||||||
|
:show-inheritance:
|
||||||
|
:undoc-members:
|
||||||
|
|
||||||
|
unisync.config module
|
||||||
|
---------------------
|
||||||
|
|
||||||
|
.. automodule:: unisync.config
|
||||||
|
:members:
|
||||||
|
:show-inheritance:
|
||||||
|
:undoc-members:
|
||||||
|
|
||||||
|
unisync.defaults module
|
||||||
|
-----------------------
|
||||||
|
|
||||||
|
.. automodule:: unisync.defaults
|
||||||
|
:members:
|
||||||
|
:show-inheritance:
|
||||||
|
:undoc-members:
|
||||||
|
|
||||||
|
unisync.errors module
|
||||||
|
---------------------
|
||||||
|
|
||||||
|
.. automodule:: unisync.errors
|
||||||
|
:members:
|
||||||
|
:show-inheritance:
|
||||||
|
:undoc-members:
|
||||||
|
|
||||||
|
unisync.main module
|
||||||
|
-------------------
|
||||||
|
|
||||||
|
.. automodule:: unisync.main
|
||||||
|
:members:
|
||||||
|
:show-inheritance:
|
||||||
|
:undoc-members:
|
||||||
|
|
||||||
|
unisync.paths module
|
||||||
|
--------------------
|
||||||
|
|
||||||
|
.. automodule:: unisync.paths
|
||||||
|
:members:
|
||||||
|
:show-inheritance:
|
||||||
|
:undoc-members:
|
||||||
|
|
||||||
|
unisync.runners module
|
||||||
|
----------------------
|
||||||
|
|
||||||
|
.. automodule:: unisync.runners
|
||||||
|
:members:
|
||||||
|
:show-inheritance:
|
||||||
|
:undoc-members:
|
||||||
|
|
||||||
|
unisync.synchroniser module
|
||||||
|
---------------------------
|
||||||
|
|
||||||
|
.. automodule:: unisync.synchroniser
|
||||||
|
:members:
|
||||||
|
:show-inheritance:
|
||||||
|
:undoc-members:
|
||||||
|
|
||||||
|
Module contents
|
||||||
|
---------------
|
||||||
|
|
||||||
|
.. automodule:: unisync
|
||||||
|
:members:
|
||||||
|
:show-inheritance:
|
||||||
|
:undoc-members:
|
||||||
@@ -20,3 +20,12 @@ packages = [{include = "unisync", from = "src"}]
|
|||||||
[build-system]
|
[build-system]
|
||||||
requires = ["poetry-core>=2.0.0,<3.0.0"]
|
requires = ["poetry-core>=2.0.0,<3.0.0"]
|
||||||
build-backend = "poetry.core.masonry.api"
|
build-backend = "poetry.core.masonry.api"
|
||||||
|
|
||||||
|
[dependency-groups]
|
||||||
|
docs = [
|
||||||
|
"sphinx (>=9.1.0,<10.0.0)",
|
||||||
|
"sphinx-rtd-theme (>=3.0.2,<4.0.0)",
|
||||||
|
]
|
||||||
|
dev = [
|
||||||
|
"pylint (>=4.0.4,<5.0.0)"
|
||||||
|
]
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
from configparser import UNNAMED_SECTION
|
from configparser import UNNAMED_SECTION
|
||||||
from dataclasses import dataclass, field
|
from dataclasses import dataclass
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
import ipaddress
|
import ipaddress
|
||||||
import configparser
|
import configparser
|
||||||
@@ -30,8 +30,8 @@ class ServerConfig:
|
|||||||
if self.ip != "":
|
if self.ip != "":
|
||||||
try:
|
try:
|
||||||
ipaddress.ip_address(self.ip)
|
ipaddress.ip_address(self.ip)
|
||||||
except ValueError:
|
except ValueError as e:
|
||||||
raise ValueError("The provided ip address is invalid")
|
raise ValueError("The provided ip address is invalid") from e
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class RootsConfig:
|
class RootsConfig:
|
||||||
@@ -120,13 +120,13 @@ def load_config(config_path:str) -> Config:
|
|||||||
Path(config.get(other_section, "cache_dir_path", fallback=DEFAULT_MISC_CACHE_DIR_PATH)).expanduser()
|
Path(config.get(other_section, "cache_dir_path", fallback=DEFAULT_MISC_CACHE_DIR_PATH)).expanduser()
|
||||||
)
|
)
|
||||||
|
|
||||||
args_bool = list()
|
args_bool = []
|
||||||
args_val = dict()
|
args_val = {}
|
||||||
if "Unison" in config.sections():
|
if "Unison" in config.sections():
|
||||||
for key, val in config.items("Unison"):
|
for key, val in config.items("Unison"):
|
||||||
if key in config["DEFAULT"].keys():
|
if key in config["DEFAULT"].keys():
|
||||||
continue
|
continue
|
||||||
elif val == "" or val == None:
|
if val in ("", None):
|
||||||
args_bool.append(key)
|
args_bool.append(key)
|
||||||
else:
|
else:
|
||||||
args_val[key] = val
|
args_val[key] = val
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
# copyright (c) 2026 paul retourné
|
# Copyright (c) 2026 paul retourné
|
||||||
# spdx-license-identifier: gpl-3.0-or-later
|
# spdx-license-identifier: gpl-3.0-or-later
|
||||||
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
@@ -6,7 +6,7 @@ from pathlib import Path
|
|||||||
# Commented out values are part of the config but are required so there is no defaults.
|
# Commented out values are part of the config but are required so there is no defaults.
|
||||||
# This allows this file to be a list of all the config options.
|
# This allows this file to be a list of all the config options.
|
||||||
|
|
||||||
# DEFAULT_SERVER_USER: str = ""
|
# DEFAULT_SERVER_USER: str = ""
|
||||||
DEFAULT_SERVER_SSHARGS: str = ""
|
DEFAULT_SERVER_SSHARGS: str = ""
|
||||||
DEFAULT_SERVER_HOSTNAME: str = ""
|
DEFAULT_SERVER_HOSTNAME: str = ""
|
||||||
DEFAULT_SERVER_IP: str = ""
|
DEFAULT_SERVER_IP: str = ""
|
||||||
|
|||||||
@@ -1,8 +1,21 @@
|
|||||||
# Copyright (C) 2025-2026 Paul Retourné
|
# Copyright (C) 2025-2026 Paul Retourné
|
||||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
class RemoteMountedError(BaseException):
|
from typing import NoReturn
|
||||||
|
import sys
|
||||||
|
|
||||||
|
class RemoteMountedError(Exception):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
class InvalidMountError(BaseException):
|
class InvalidMountError(Exception):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
class UnknownSSHError(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class FatalSyncError(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def unisync_exit_fatal(reason:str) -> NoReturn:
|
||||||
|
print(reason)
|
||||||
|
sys.exit(1)
|
||||||
|
|||||||
@@ -4,10 +4,11 @@
|
|||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
from unisync.argparser import create_argparser
|
from unisync.argparser import create_argparser
|
||||||
|
from unisync.errors import UnknownSSHError, unisync_exit_fatal
|
||||||
from unisync.runners import unisync_sync, unisync_add, unisync_mount
|
from unisync.runners import unisync_sync, unisync_add, unisync_mount
|
||||||
from unisync.config import load_config
|
from unisync.config import load_config
|
||||||
from unisync.synchroniser import Synchroniser
|
from unisync.synchroniser import Synchroniser
|
||||||
from unisync.paths import *
|
from unisync.paths import PathsManager
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
parser = create_argparser(unisync_sync, unisync_add, unisync_mount)
|
parser = create_argparser(unisync_sync, unisync_add, unisync_mount)
|
||||||
@@ -15,16 +16,15 @@ def main():
|
|||||||
|
|
||||||
config_path: Path = Path("~/.config/unisync/config.ini").expanduser()
|
config_path: Path = Path("~/.config/unisync/config.ini").expanduser()
|
||||||
# Check if --config is set
|
# Check if --config is set
|
||||||
if cli_args.config != None and Path(cli_args.config).is_file():
|
if cli_args.config is not None and Path(cli_args.config).is_file():
|
||||||
config = load_config(cli_args.config)
|
config = load_config(cli_args.config)
|
||||||
elif config_path.is_file():
|
elif config_path.is_file():
|
||||||
config = load_config(str(config_path))
|
config = load_config(str(config_path))
|
||||||
else:
|
else:
|
||||||
# TODO replace the next line with something to do if no config file is found
|
# TODO replace the next line with something to do if no config file is found
|
||||||
config = load_config(str(config_path))
|
config = load_config(str(config_path))
|
||||||
pass
|
|
||||||
|
|
||||||
# TODO make the command line arguments work and override the config options
|
# TODO: make the command line arguments work and override the config options
|
||||||
|
|
||||||
synchroniser = Synchroniser(
|
synchroniser = Synchroniser(
|
||||||
config.roots.remote,
|
config.roots.remote,
|
||||||
@@ -39,7 +39,10 @@ def main():
|
|||||||
|
|
||||||
paths_manager = PathsManager(Path(config.roots.local), config.other.cache_dir_path)
|
paths_manager = PathsManager(Path(config.roots.local), config.other.cache_dir_path)
|
||||||
|
|
||||||
cli_args.func(synchroniser, paths_manager, config)
|
try:
|
||||||
|
cli_args.func(synchroniser, paths_manager, config)
|
||||||
|
except UnknownSSHError:
|
||||||
|
unisync_exit_fatal("Connection failed quitting")
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
|||||||
@@ -92,7 +92,7 @@ class PathsManager:
|
|||||||
Writes a list of new paths to the file
|
Writes a list of new paths to the file
|
||||||
"""
|
"""
|
||||||
current_paths = self.get_paths_to_sync()
|
current_paths = self.get_paths_to_sync()
|
||||||
paths_to_add = list()
|
paths_to_add = []
|
||||||
# Check if one of the parent is already being synchronised
|
# Check if one of the parent is already being synchronised
|
||||||
# If so there is no need to add the child path
|
# If so there is no need to add the child path
|
||||||
for new_path in paths:
|
for new_path in paths:
|
||||||
|
|||||||
@@ -5,15 +5,16 @@ from unisync.synchroniser import Synchroniser
|
|||||||
from unisync.paths import PathsManager
|
from unisync.paths import PathsManager
|
||||||
from unisync.config import Config
|
from unisync.config import Config
|
||||||
|
|
||||||
|
|
||||||
def unisync_sync(synchroniser:Synchroniser, paths_manager:PathsManager, config: Config):
|
def unisync_sync(synchroniser:Synchroniser, paths_manager:PathsManager, config: Config):
|
||||||
if synchroniser.create_ssh_master_connection() != 0:
|
del config # The function signature must be the same for all runners
|
||||||
print("Connection failed quitting")
|
|
||||||
return 1
|
synchroniser.create_ssh_master_connection()
|
||||||
print("Connected to the remote.")
|
print("Connected to the remote.")
|
||||||
|
|
||||||
synchroniser.sync_files(paths_manager.get_paths_to_sync())
|
synchroniser.sync_files(paths_manager.get_paths_to_sync())
|
||||||
synchroniser.sync_links(paths_manager.get_paths_to_sync())
|
synchroniser.sync_links(paths_manager.get_paths_to_sync())
|
||||||
|
|
||||||
# TODO check the config options and do or don't do the following
|
# TODO check the config options and do or don't do the following
|
||||||
synchroniser.update_links()
|
synchroniser.update_links()
|
||||||
#synchroniser.mount_remote_dir()
|
#synchroniser.mount_remote_dir()
|
||||||
@@ -23,9 +24,8 @@ def unisync_sync(synchroniser:Synchroniser, paths_manager:PathsManager, config:
|
|||||||
|
|
||||||
def unisync_add(synchroniser:Synchroniser, paths_manager:PathsManager, config: Config):
|
def unisync_add(synchroniser:Synchroniser, paths_manager:PathsManager, config: Config):
|
||||||
del config # The function signature must be the same for all runners
|
del config # The function signature must be the same for all runners
|
||||||
if synchroniser.create_ssh_master_connection() != 0:
|
|
||||||
print("Connection failed quitting")
|
synchroniser.create_ssh_master_connection()
|
||||||
return 1
|
|
||||||
print("Connected to the remote.")
|
print("Connected to the remote.")
|
||||||
|
|
||||||
# TODO config or cli to skip this first sync
|
# TODO config or cli to skip this first sync
|
||||||
|
|||||||
@@ -16,7 +16,7 @@ import logging
|
|||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import cast
|
from typing import cast
|
||||||
|
|
||||||
from unisync.errors import RemoteMountedError, InvalidMountError
|
from unisync.errors import RemoteMountedError, InvalidMountError, UnknownSSHError, FatalSyncError
|
||||||
from unisync.config import BackupConfig
|
from unisync.config import BackupConfig
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
@@ -92,7 +92,7 @@ class Synchroniser:
|
|||||||
f"Name {backup.backupprefix[:-1]}"
|
f"Name {backup.backupprefix[:-1]}"
|
||||||
])
|
])
|
||||||
|
|
||||||
def create_ssh_master_connection(self, control_path:str="~/.ssh/control_%C", connection_timeout:int=60) -> int:
|
def create_ssh_master_connection(self, control_path:str="~/.ssh/control_%C", connection_timeout:int=60) -> None:
|
||||||
"""Creates an ssh master connection.
|
"""Creates an ssh master connection.
|
||||||
|
|
||||||
It is used so the user only has to authenticate once to the remote server.
|
It is used so the user only has to authenticate once to the remote server.
|
||||||
@@ -105,9 +105,14 @@ class Synchroniser:
|
|||||||
connection_timeout:
|
connection_timeout:
|
||||||
Time given to the user to authenticate to the remote server.
|
Time given to the user to authenticate to the remote server.
|
||||||
On slow connections one might want to increase this.
|
On slow connections one might want to increase this.
|
||||||
Returns:
|
|
||||||
An error code (0 success, 1 TimeoutExpired, 2 KeyboardInterrupt).
|
Raises:
|
||||||
TODO change that to raising the exception.
|
subprocess.TimeoutExpired:
|
||||||
|
The user didn't finish loging in in time.
|
||||||
|
KeyboardInterrupt:
|
||||||
|
The user interrupted the process.
|
||||||
|
UnknownSSHError:
|
||||||
|
An error occured during the connection.
|
||||||
"""
|
"""
|
||||||
self.control_path = os.path.expanduser(control_path)
|
self.control_path = os.path.expanduser(control_path)
|
||||||
command = [
|
command = [
|
||||||
@@ -119,18 +124,18 @@ class Synchroniser:
|
|||||||
"-p", str(self.remote_port)
|
"-p", str(self.remote_port)
|
||||||
]
|
]
|
||||||
master_ssh = subprocess.Popen(command)
|
master_ssh = subprocess.Popen(command)
|
||||||
|
# TODO: Raise an exception instead of changing the return value
|
||||||
try:
|
try:
|
||||||
ret_code = master_ssh.wait(timeout=connection_timeout)
|
ret_code = master_ssh.wait(timeout=connection_timeout)
|
||||||
except subprocess.TimeoutExpired:
|
except subprocess.TimeoutExpired as e:
|
||||||
print("Time to login expired", file=sys.stderr)
|
print("Time to login expired", file=sys.stderr)
|
||||||
return 1
|
raise e
|
||||||
except KeyboardInterrupt:
|
except KeyboardInterrupt as e:
|
||||||
return 2
|
raise e
|
||||||
|
|
||||||
if ret_code != 0:
|
if ret_code != 0:
|
||||||
print("Login to remote failed", file=sys.stderr)
|
print("Login to remote failed", file=sys.stderr)
|
||||||
return ret_code
|
raise UnknownSSHError
|
||||||
return 0
|
|
||||||
|
|
||||||
|
|
||||||
def close_ssh_master_connection(self) -> int:
|
def close_ssh_master_connection(self) -> int:
|
||||||
@@ -149,18 +154,18 @@ class Synchroniser:
|
|||||||
close = subprocess.Popen(command)
|
close = subprocess.Popen(command)
|
||||||
return close.wait()
|
return close.wait()
|
||||||
|
|
||||||
def sync_files(self, paths:list, force:bool=False) -> int:
|
def sync_files(self, paths:list, force:bool=False) -> None:
|
||||||
"""Synchronises the files.
|
"""Synchronises the files.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
paths: List of paths to synchronise.
|
paths: List of paths to synchronise.
|
||||||
force: Force the changes from remote to local.
|
force: Force the changes from remote to local.
|
||||||
|
|
||||||
Returns:
|
Raises:
|
||||||
The return code of sync.
|
FatalSyncError: A fatal error occured during the synchronisation.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
return self.sync(
|
self.sync(
|
||||||
f"ssh://{self.remote_user}@{self.remote_ip}/{self.remote_dir}/.data",
|
f"ssh://{self.remote_user}@{self.remote_ip}/{self.remote_dir}/.data",
|
||||||
self.local,
|
self.local,
|
||||||
paths=paths,
|
paths=paths,
|
||||||
@@ -168,16 +173,16 @@ class Synchroniser:
|
|||||||
other=self.files_extra
|
other=self.files_extra
|
||||||
)
|
)
|
||||||
|
|
||||||
def sync_links(self, ignore:list) -> int:
|
def sync_links(self, ignore:list) -> None:
|
||||||
"""Synchronises the links, they must exist already.
|
"""Synchronises the links, they must exist already.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
ignore: List of paths to ignore.
|
ignore: List of paths to ignore.
|
||||||
|
|
||||||
Returns:
|
Raises:
|
||||||
The return code of sync.
|
FatalSyncError: A fatal error occured during the synchronisation.
|
||||||
"""
|
"""
|
||||||
return self.sync(
|
self.sync(
|
||||||
f"ssh://{self.remote_user}@{self.remote_ip}/{self.remote_dir}/links",
|
f"ssh://{self.remote_user}@{self.remote_ip}/{self.remote_dir}/links",
|
||||||
self.local,
|
self.local,
|
||||||
ignore=ignore,
|
ignore=ignore,
|
||||||
@@ -187,7 +192,7 @@ class Synchroniser:
|
|||||||
def sync(self, remote_root:str, local_root:str,
|
def sync(self, remote_root:str, local_root:str,
|
||||||
paths:list=[], ignore:list=[], force:bool=False,
|
paths:list=[], ignore:list=[], force:bool=False,
|
||||||
other:list=[]
|
other:list=[]
|
||||||
) -> int:
|
) -> None:
|
||||||
"""Performs the synchronisation by calling unison.
|
"""Performs the synchronisation by calling unison.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
@@ -206,8 +211,11 @@ class Synchroniser:
|
|||||||
They will be added to the command as is no - in front.
|
They will be added to the command as is no - in front.
|
||||||
For exemple backups are implemented using this argument.
|
For exemple backups are implemented using this argument.
|
||||||
|
|
||||||
Returns:
|
Raises:
|
||||||
the unison return code see section 6.11 of the documentation
|
FatalSyncError:
|
||||||
|
If unison returns 3 it means either a fatal error occured or the synchronisation
|
||||||
|
was interrupted.
|
||||||
|
If this happens propagate the error to unisync.
|
||||||
"""
|
"""
|
||||||
command = [ "/usr/bin/unison", "-root", remote_root, "-root", local_root ]
|
command = [ "/usr/bin/unison", "-root", remote_root, "-root", local_root ]
|
||||||
for arg in self.args_bool:
|
for arg in self.args_bool:
|
||||||
@@ -217,6 +225,7 @@ class Synchroniser:
|
|||||||
command.append(value)
|
command.append(value)
|
||||||
|
|
||||||
sshargs = f"-p {self.remote_port} "
|
sshargs = f"-p {self.remote_port} "
|
||||||
|
sshargs += f"-S {self.control_path} "
|
||||||
for arg, value in self.ssh_settings.items():
|
for arg, value in self.ssh_settings.items():
|
||||||
sshargs += arg + " " + value + " "
|
sshargs += arg + " " + value + " "
|
||||||
command.append("-sshargs")
|
command.append("-sshargs")
|
||||||
@@ -231,7 +240,7 @@ class Synchroniser:
|
|||||||
command.append(f"BelowPath {path}")
|
command.append(f"BelowPath {path}")
|
||||||
|
|
||||||
if force:
|
if force:
|
||||||
command.append("-force")
|
command.append("-prefer")
|
||||||
command.append(remote_root)
|
command.append(remote_root)
|
||||||
command.append("-batch")
|
command.append("-batch")
|
||||||
|
|
||||||
@@ -240,7 +249,8 @@ class Synchroniser:
|
|||||||
|
|
||||||
proc = subprocess.Popen(command)
|
proc = subprocess.Popen(command)
|
||||||
ret_code = proc.wait()
|
ret_code = proc.wait()
|
||||||
return ret_code
|
if ret_code == 3:
|
||||||
|
raise FatalSyncError("Synchronisation could not be completed")
|
||||||
|
|
||||||
def update_links(self, background:bool=True):
|
def update_links(self, background:bool=True):
|
||||||
"""Updates the links on the remote.
|
"""Updates the links on the remote.
|
||||||
@@ -269,7 +279,7 @@ class Synchroniser:
|
|||||||
link_background_wrapper
|
link_background_wrapper
|
||||||
]
|
]
|
||||||
|
|
||||||
link_update_process = subprocess.Popen(command, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
|
link_update_process = subprocess.run(command, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
|
||||||
|
|
||||||
if not background:
|
if not background:
|
||||||
print("Starting links update.")
|
print("Starting links update.")
|
||||||
|
|||||||
Reference in New Issue
Block a user