From c0e3d0e72d9d636728a5171511e3ce1003203567 Mon Sep 17 00:00:00 2001
From: Gregor Kleen <gkleen@yggdrasil.li>
Date: Mon, 10 Apr 2023 20:56:16 +0200
Subject: bump

---
 modules/borgcopy/copy/copy_borg/__main__.py | 555 ----------------------------
 modules/borgcopy/copy/setup.py              |  10 -
 modules/borgcopy/copy_borg/__main__.py      | 555 ++++++++++++++++++++++++++++
 modules/borgcopy/default.nix                |  40 +-
 modules/borgcopy/poetry.lock                | 180 +++++++++
 modules/borgcopy/pyproject.toml             |  22 ++
 6 files changed, 772 insertions(+), 590 deletions(-)
 delete mode 100755 modules/borgcopy/copy/copy_borg/__main__.py
 delete mode 100644 modules/borgcopy/copy/setup.py
 create mode 100755 modules/borgcopy/copy_borg/__main__.py
 create mode 100644 modules/borgcopy/poetry.lock
 create mode 100644 modules/borgcopy/pyproject.toml

(limited to 'modules')

diff --git a/modules/borgcopy/copy/copy_borg/__main__.py b/modules/borgcopy/copy/copy_borg/__main__.py
deleted file mode 100755
index 09f7557a..00000000
--- a/modules/borgcopy/copy/copy_borg/__main__.py
+++ /dev/null
@@ -1,555 +0,0 @@
-#!@python@/bin/python
-
-import json
-import os
-import subprocess
-import re
-import sys
-import io
-from sys import stderr
-from humanize import naturalsize
-
-from tempfile import TemporaryDirectory
-
-from datetime import (datetime, timedelta)
-from dateutil.tz import (tzlocal, tzutc)
-import dateutil.parser
-import argparse
-
-from tqdm import tqdm
-
-from xdg import xdg_runtime_dir
-import pathlib
-
-import unshare
-from pyprctl import CapState, Cap, cap_ambient_raise, cap_ambient_is_set, set_keepcaps
-from pwd import getpwnam
-
-import logging
-
-import signal
-import time
-import math
-
-from halo import Halo
-
-from collections import deque
-
-import select
-import fcntl
-
-from multiprocessing import Process, Manager
-from contextlib import closing
-
-
-halo_args = {
-    'stream': stderr,
-    'enabled': stderr.isatty(),
-    'spinner': 'arc'
-}
-
-borg_pwd = getpwnam('borg')
-
-def as_borg(caps=set()):
-    global logger
-
-    try:
-        if caps:
-            c_state = CapState.get_current()
-            c_state.permitted.add(*caps)
-            c_state.set_current()
-
-            # logger.debug("before setgid/setuid: cap_permitted=%s", CapState.get_current().permitted)
-
-            set_keepcaps(True)
-
-        os.setgid(borg_pwd.pw_gid)
-        os.setuid(borg_pwd.pw_uid)
-
-        if caps:
-            # logger.debug("after setgid/setuid: cap_permitted=%s", CapState.get_current().permitted)
-
-            c_state = CapState.get_current()
-            c_state.permitted = caps.copy()
-            c_state.inheritable.add(*caps)
-            c_state.set_current()
-
-            # logger.debug("cap_permitted=%s", CapState.get_current().permitted)
-            # logger.debug("cap_inheritable=%s", CapState.get_current().inheritable)
-
-            for cap in caps:
-                cap_ambient_raise(cap)
-                # logger.debug("cap_ambient[%s]=%s", cap, cap_ambient_is_set(cap))
-    except Exception:
-        logger.error(format_exc())
-        raise
-
-def borg_json(*args, **kwargs):
-    global logger
-
-    with subprocess.Popen(*args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True, **kwargs) as proc:
-        stdout_buffer = io.BytesIO()
-
-        proc_logger = logger.getChild('borg')
-        stdout_logger = proc_logger.getChild('stdout')
-        stderr_logger = proc_logger.getChild('stderr')
-
-        fcntl.fcntl(proc.stdout.fileno(), fcntl.F_SETFL, fcntl.fcntl(proc.stdout.fileno(), fcntl.F_GETFL) | os.O_NONBLOCK)
-        fcntl.fcntl(proc.stderr.fileno(), fcntl.F_SETFL, fcntl.fcntl(proc.stderr.fileno(), fcntl.F_GETFL) | os.O_NONBLOCK)
-
-        poll = select.poll()
-        poll.register(proc.stdout, select.POLLIN | select.POLLHUP)
-        poll.register(proc.stderr, select.POLLIN | select.POLLHUP)
-        pollc = 2
-        events = poll.poll()
-        stderr_linebuf = bytearray()
-
-        while pollc > 0 and len(events) > 0:
-            for rfd, event in events:
-                if event & select.POLLIN:
-                    if rfd == proc.stdout.fileno():
-                        try:
-                            buf = os.read(proc.stdout.fileno(), 8192)
-                            # stdout_logger.debug(buf)
-                            stdout_buffer.write(buf)
-                        except BlockingIOError:
-                            pass
-                    if rfd == proc.stderr.fileno():
-                        try:
-                            stderr_linebuf.extend(os.read(proc.stderr.fileno(), 8192))
-                        except BlockingIOError:
-                            pass
-
-                        while stderr_linebuf:
-                            line, sep, stderr_linebuf = stderr_linebuf.partition(b'\n')
-                            if not sep:
-                                stderr_linebuf = line
-                                break
-
-                            stderr_logger.info(line.decode())
-                if event == select.POLLHUP:
-                    poll.unregister(rfd)
-                    pollc -= 1
-
-            if pollc > 0:
-                events = poll.poll()
-
-        for handler in proc_logger.handlers:
-            handler.flush()
-
-        ret = proc.wait()
-        if ret != 0:
-            raise Exception(f'borg subprocess exited with returncode {ret}')
-
-        stdout_buffer.seek(0)
-        return json.load(stdout_buffer)
-
-def read_repo(path):
-    global logger
-
-    with Halo(text=f'Listing {path}', **halo_args) as sp:
-        if not sp.enabled:
-            logger.debug('Listing %s...', path)
-        res = borg_json(['borg', 'list', '--info', '--lock-wait=600', '--json', path], preexec_fn=lambda: as_borg())['archives']
-        if sp.enabled:
-            sp.succeed(f'{len(res)} archives in {path}')
-        else:
-            logger.info('%d archives in ‘%s’', len(res), path)
-    return res
-
-class ToSync:
-    to_sync = deque()
-
-    def __init__(self, source, target):
-        self.source = source
-        self.target = target
-
-    def __iter__(self):
-        return self
-
-    def __next__(self):
-        global logger
-
-        if self.to_sync:
-            return self.to_sync.popleft()
-
-        while True:
-            try:
-                src = read_repo(self.source)
-                dst = read_repo(self.target)
-            except (subprocess.CalledProcessError, json.decoder.JSONDecodeError) as err:
-                logger.error(err)
-                continue
-
-            self.to_sync.extend([entry for entry in src if entry['name'] not in {dst_entry['name'] for dst_entry in dst} and not entry['name'].endswith('.checkpoint')])
-
-            if self.to_sync:
-                return self.to_sync.popleft()
-
-            raise StopIteration
-
-def copy_archive(src_repo_path, dst_repo_path, entry):
-    global logger
-
-    def do_copy(tmpdir_q):
-        global logger
-
-        nonlocal src_repo_path, dst_repo_path, entry
-
-        tmpdir = tmpdir_q.get()
-
-        cache_suffix = None
-        with Halo(text=f'Determine archive parameters', **halo_args) as sp:
-            if not sp.enabled:
-                logger.debug('Determining archive parameters...')
-            match = re.compile('^(.*)-[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}(\.(checkpoint|recreate)(\.[0-9]+)?)?').fullmatch(entry['name'])
-            if match:
-                repo_id = borg_json(['borg', 'info', '--info', '--lock-wait=600', '--json', src_repo_path], preexec_fn=lambda: as_borg())['repository']['id']
-
-                if repo_id:
-                    cache_suffix = f'{repo_id}_{match.group(1)}'
-            if sp.enabled:
-                sp.succeed(f'Will process {entry["name"]} ({dateutil.parser.isoparse(entry["start"])}, cache_suffix={cache_suffix})')
-            else:
-                logger.info('Will process ‘%s’ (%s, cache_suffix=%s)', entry['name'], dateutil.parser.isoparse(entry['start']), cache_suffix)
-
-        logger.debug('Setting up environment...')
-        unshare.unshare(unshare.CLONE_NEWNS)
-        subprocess.run(['mount', '--make-rprivate', '/'], check=True)
-        chroot = pathlib.Path(tmpdir) / 'chroot'
-        upper = pathlib.Path(tmpdir) / 'upper'
-        work = pathlib.Path(tmpdir) / 'work'
-        for path in [chroot,upper,work]:
-            path.mkdir()
-        subprocess.run(['mount', '-t', 'overlay', 'overlay', '-o', f'lowerdir=/,upperdir={upper},workdir={work}', chroot], check=True)
-        bindMounts = ['nix', 'run', 'run/secrets.d', 'run/wrappers', 'proc', 'dev', 'sys', pathlib.Path(os.path.expanduser('~')).relative_to('/')]
-        if os.environ.get('BORG_BASE_DIR'):
-            bindMounts.append(pathlib.Path(os.environ['BORG_BASE_DIR']).relative_to('/'))
-        if not ":" in src_repo_path:
-            bindMounts.append(pathlib.Path(src_repo_path).relative_to('/'))
-        if 'SSH_AUTH_SOCK' in os.environ:
-            bindMounts.append(pathlib.Path(os.environ['SSH_AUTH_SOCK']).parent.relative_to('/'))
-        for bindMount in bindMounts:
-            (chroot / bindMount).mkdir(parents=True,exist_ok=True)
-            subprocess.run(['mount', '--bind', pathlib.Path('/') / bindMount, chroot / bindMount], check=True)
-        os.chroot(chroot)
-        os.chdir('/')
-        try:
-            os.unlink('/etc/fuse.conf')
-        except FileNotFoundError:
-            pass
-        pathlib.Path('/etc/fuse.conf').parent.mkdir(parents=True,exist_ok=True)
-        with open('/etc/fuse.conf', 'w') as fuse_conf:
-            fuse_conf.write('user_allow_other\nmount_max = 1000\n')
-        dir = pathlib.Path('/borg')
-        dir.mkdir(parents=True,exist_ok=True,mode=0o0750)
-        os.chown(dir, borg_pwd.pw_uid, borg_pwd.pw_gid)
-
-        total_size = None
-        total_files = None
-        if stderr.isatty():
-            with Halo(text=f'Determine size', **halo_args) as sp:
-                stats = borg_json(['borg', 'info', '--info', '--json', '--lock-wait=600', f'{src_repo_path}::{entry["name"]}'], preexec_fn=lambda: as_borg())['archives'][0]['stats']
-                total_size = stats['original_size']
-                total_files = stats['nfiles']
-                if sp.enabled:
-                    sp.succeed(f'{total_files} files, {naturalsize(total_size, binary=True)}')
-                else:
-                    logger.info('%d files, %s', total_files, naturalsize(total_size, binary=True))
-        with subprocess.Popen(['borg', 'mount', '-o', 'allow_other,ignore_permissions', '--foreground', '--progress', '--lock-wait=600', f'{src_repo_path}::{entry["name"]}', dir], preexec_fn=lambda: as_borg()) as mount_proc:
-            with Halo(text='Waiting for mount', **halo_args) as sp:
-                if not sp.enabled:
-                    logger.debug('Waiting for mount...')
-                wait_start = datetime.now()
-                while True:
-                    if os.path.ismount(dir):
-                        break
-                    elif datetime.now() - wait_start > timedelta(minutes=15):
-                        ret.check_returncode()
-                    time.sleep(0.1)
-                if sp.enabled:
-                    sp.succeed('Mounted')
-                else:
-                    logger.info('Mounted %s', f'{src_repo_path}::{entry["name"]}')
-
-            while True:
-                with tqdm(total=total_size, unit_scale=True, unit_divisor=1024, unit='B', smoothing=0.01, disable=None, dynamic_ncols=True, maxinterval=0.5, miniters=1) as progress:
-                    seen = 0
-                    env = os.environ.copy()
-                    create_args = ['borg',
-                                   'create',
-                                   '--lock-wait=600',
-                                   '--one-file-system',
-                                   '--compression=auto,zstd,10',
-                                   '--chunker-params=10,23,16,4095',
-                                   '--files-cache=ctime,size',
-                                   '--show-rc',
-                                   '--upload-buffer=100',
-                                   '--log-json',
-                                   '--progress',
-                                   '--list',
-                                   '--filter=AMEi-x?',
-                                   '--stats'
-                                   ]
-                    archive_time = datetime.strptime(entry["time"], "%Y-%m-%dT%H:%M:%S.%f").replace(tzinfo=tzlocal()).astimezone(tzutc())
-                    create_args += [f'--timestamp={archive_time.strftime("%Y-%m-%dT%H:%M:%S")}']
-                    if cache_suffix:
-                        env['BORG_FILES_CACHE_SUFFIX'] = cache_suffix
-                    else:
-                        create_args += ['--files-cache=disabled']
-                    create_args += [f'{dst_repo_path}::{entry["name"]}', '.']
-
-                    with subprocess.Popen(create_args, stdin=subprocess.DEVNULL, stderr=subprocess.PIPE, stdout=subprocess.PIPE, env=env, preexec_fn=lambda: as_borg(caps={Cap.DAC_READ_SEARCH}), cwd=dir) as proc:
-                        last_list = None
-                        last_list_time = time.monotonic_ns()
-                        logger.info('Creating...')
-
-                        proc_logger = logger.getChild('borg')
-                        stdout_logger = proc_logger.getChild('stdout')
-                        stderr_logger = proc_logger.getChild('stderr')
-
-                        fcntl.fcntl(proc.stdout.fileno(), fcntl.F_SETFL, fcntl.fcntl(proc.stdout.fileno(), fcntl.F_GETFL) | os.O_NONBLOCK)
-                        fcntl.fcntl(proc.stderr.fileno(), fcntl.F_SETFL, fcntl.fcntl(proc.stderr.fileno(), fcntl.F_GETFL) | os.O_NONBLOCK)
-
-                        poll = select.poll()
-                        poll.register(proc.stdout, select.POLLIN | select.POLLHUP)
-                        poll.register(proc.stderr, select.POLLIN | select.POLLHUP)
-                        pollc = 2
-                        events = poll.poll()
-                        stdout_linebuf = bytearray()
-                        stderr_linebuf = bytearray()
-
-                        while pollc > 0 and len(events) > 0:
-                            # logger.debug('%d events', len(events))
-                            for rfd, event in events:
-                                # logger.debug('event %s', event)
-                                if event & select.POLLIN:
-                                    if rfd == proc.stdout.fileno():
-                                        try:
-                                            # logger.debug('reading stdout...')
-                                            stdout_linebuf.extend(os.read(proc.stdout.fileno(), 8192))
-                                            # logger.debug('read stdout, len(stdout_linebuf)=%d', len(stdout_linebuf))
-                                        except BlockingIOError:
-                                            pass
-
-                                        while stdout_linebuf:
-                                            # logger.debug('stdout line...')
-                                            line, sep, stdout_linebuf = stdout_linebuf.partition(b'\n')
-                                            if not sep:
-                                                stdout_linebuf = line
-                                                break
-
-                                            stdout_logger.info(line.decode())
-                                        # logger.debug('handled stdout lines, %d leftover', len(stdout_linebuf))
-                                    if rfd == proc.stderr.fileno():
-                                        try:
-                                            # logger.debug('reading stderr...')
-                                            stderr_linebuf.extend(os.read(proc.stderr.fileno(), 8192))
-                                            # logger.debug('read stderr, len(stderr_linebuf)=%d', len(stderr_linebuf))
-                                        except BlockingIOError:
-                                            pass
-
-                                        while stderr_linebuf:
-                                            # logger.debug('stderr line...')
-                                            line, sep, stderr_linebuf = stderr_linebuf.partition(b'\n')
-                                            if not sep:
-                                                stderr_linebuf = line
-                                                break
-
-                                            try:
-                                                json_line = json.loads(line)
-                                            except json.decoder.JSONDecodeError:
-                                                if progress.disable:
-                                                    stderr_logger.error(line.decode())
-                                                else:
-                                                    tqdm.write(line.decode())
-                                                continue
-
-                                            # logger.debug('stderr line decoded: %s', json_line['type'] if 'type' in json_line else None)
-
-                                            t = ''
-                                            if 'time' in json_line and not progress.disable:
-                                                ts = datetime.fromtimestamp(json_line['time']).replace(tzinfo=tzlocal())
-                                                t = f'{ts.isoformat(timespec="minutes")} '
-                                            if json_line['type'] == 'archive_progress' and not progress.disable:
-                                                now = time.monotonic_ns()
-                                                if last_list_time is None or now - last_list_time >= 3e9:
-                                                    last_list_time = now
-                                                    if 'path' in json_line and json_line['path']:
-                                                        progress.set_description(f'… {json_line["path"]}', refresh=False)
-                                                    else:
-                                                        progress.set_description(None, refresh=False)
-                                                elif last_list is not None:
-                                                    progress.set_description(last_list, refresh=False)
-                                                nfiles=json_line["nfiles"]
-                                                if total_files is not None:
-                                                    nfiles=f'{json_line["nfiles"]}/{total_files}'
-                                                progress.set_postfix(compressed=naturalsize(json_line['compressed_size'], binary=True), deduplicated=naturalsize(json_line['deduplicated_size'], binary=True), nfiles=nfiles, refresh=False)
-                                                progress.update(json_line["original_size"] - seen)
-                                                seen = json_line["original_size"]
-                                            elif json_line['type'] == 'archive_progress':
-                                                now = time.monotonic_ns()
-                                                if last_list_time is None or now - last_list_time >= 3e9:
-                                                    last_list_time = now
-                                                    if 'path' in json_line and json_line['path']:
-                                                        stderr_logger.debug('… %s (%s)', json_line["path"], naturalsize(json_line["original_size"]))
-                                                    else:
-                                                        stderr_logger.debug('… (%s)', naturalsize(json_line["original_size"]))
-                                            elif json_line['type'] == 'file_status':
-                                                # tqdm.write(t + f'{json_line["status"]} {json_line["path"]}')
-                                                last_list = f'{json_line["status"]} {json_line["path"]}'
-                                                last_list_time = time.monotonic_ns()
-                                                progress.set_description(last_list, refresh=False)
-                                                if progress.disable:
-                                                    stderr_logger.info(last_list)
-                                            elif (json_line['type'] == 'log_message' or json_line['type'] == 'progress_message' or json_line['type'] == 'progress_percent') and ('message' in json_line or 'msgid' in json_line):
-                                                if 'message' in json_line:
-                                                    if progress.disable:
-                                                        stderr_logger.info(t + json_line['message'])
-                                                    else:
-                                                        tqdm.write(t + json_line['message'])
-                                                elif 'msgid' in json_line:
-                                                    if progress.disable:
-                                                        stderr_logger.info(t + json_line['msgid'])
-                                                    else:
-                                                        tqdm.write(t + json_line['msgid'])
-                                            else:
-                                                if progress.disable:
-                                                    stderr_logger.info(t + line.decode())
-                                                else:
-                                                    tqdm.write(t + line.decode())
-                                        # logger.debug('handled stderr lines, %d leftover', len(stderr_linebuf))
-                                if event == select.POLLHUP:
-                                    poll.unregister(rfd)
-                                    pollc -= 1
-
-                            if pollc > 0:
-                                # logger.debug('polling %d fds...', pollc)
-                                events = poll.poll()
-                                # logger.debug('done polling')
-
-                        # logger.debug('borg create closed stdout/stderr')
-                        if stdout_linebuf:
-                            logger.error('unterminated line leftover in stdout: %s', stdout_linebuf)
-                        if stderr_linebuf:
-                            logger.error('unterminated line leftover in stdout: %s', stderr_linebuf)
-                        progress.set_description(None)
-                        ret = proc.wait()
-                        # logger.debug('borg create terminated; ret=%d', ret)
-                        if ret != 0:
-                            dst = None
-                            try:
-                                dst = read_repo(dst_repo_path)
-                            except (subprocess.CalledProcessError, json.decoder.JSONDecodeError) as err:
-                                logger.error(err)
-                                continue
-                            else:
-                                if any(map(lambda other: entry['name'] == other['name'], dst)):
-                                    logger.info('destination exists, terminating')
-                                    break
-
-                            logger.warn('destination does not exist, retrying')
-                            continue
-                        else:
-                            # logger.debug('terminating')
-                            break
-            mount_proc.terminate()
-
-    with Manager() as manager:
-        tmpdir_q = manager.Queue(1)
-
-        with closing(Process(target=do_copy, args=(tmpdir_q,), name='do_copy')) as p:
-            p.start()
-
-            with TemporaryDirectory(prefix=f'borg-mount_{entry["name"]}_', dir=os.environ.get('RUNTIME_DIRECTORY')) as tmpdir:
-                tmpdir_q.put(tmpdir)
-                p.join()
-                return p.exitcode
-
-def sigterm(signum, frame):
-    raise SystemExit(128 + signum)
-
-def main():
-    signal.signal(signal.SIGTERM, sigterm)
-
-    global logger
-    logger = logging.getLogger(__name__)
-    console_handler = logging.StreamHandler()
-    console_handler.setFormatter( logging.Formatter('[%(levelname)s](%(name)s): %(message)s') )
-    if sys.stderr.isatty():
-        console_handler.setFormatter( logging.Formatter('%(asctime)s [%(levelname)s](%(name)s): %(message)s') )
-
-    burst_max = 1000
-    burst = burst_max
-    last_use = None
-    inv_rate = 1e7
-    def consume_filter(record):
-        nonlocal burst, burst_max, inv_rate, last_use
-
-        delay = None
-        while True:
-            now = time.monotonic_ns()
-            burst = min(burst_max, burst + math.floor((now - last_use) / inv_rate)) if last_use else burst_max
-            last_use = now
-
-            if burst > 0:
-                burst -= 1
-                if delay:
-                    delay = now - delay
-
-                return True
-
-            if delay is None:
-                delay = now
-            time.sleep(inv_rate / 1e9)
-    console_handler.addFilter(consume_filter)
-
-    logging.getLogger().addHandler(console_handler)
-
-    # log uncaught exceptions
-    def log_exceptions(type, value, tb):
-        global logger
-
-        logger.error(value)
-        sys.__excepthook__(type, value, tb) # calls default excepthook
-
-    sys.excepthook = log_exceptions
-
-    parser = argparse.ArgumentParser(prog='copy')
-    parser.add_argument('--verbosity', dest='log_level', action='append', type=int)
-    parser.add_argument('--verbose', '-v', dest='log_level', action='append_const', const=1)
-    parser.add_argument('--quiet', '-q', dest='log_level', action='append_const', const=-1)
-    parser.add_argument('source', metavar='REPO_OR_ARCHIVE')
-    parser.add_argument('target', metavar='REPO_OR_ARCHIVE')
-    args = parser.parse_args()
-
-
-    LOG_LEVELS = [logging.DEBUG, logging.INFO, logging.WARNING, logging.ERROR, logging.CRITICAL]
-    DEFAULT_LOG_LEVEL = logging.ERROR
-    log_level = LOG_LEVELS.index(DEFAULT_LOG_LEVEL)
-
-    for adjustment in args.log_level or ():
-        log_level = min(len(LOG_LEVELS) - 1, max(log_level - adjustment, 0))
-    logger.setLevel(LOG_LEVELS[log_level])
-
-
-    if "::" in args.source:
-        (src_repo_path, _, src_archive) = args.source.partition("::")
-        entry = None
-        for candidate_entry in read_repo(src_repo_path):
-            if entry['name'] != src_archive:
-                continue
-            entry = candidate_entry
-            break
-
-        if entry is None:
-            logger.critical("Did not find archive ‘%s’", src_archive)
-            os.exit(1)
-
-        copy_archive(src_repo_path, args.target, entry)
-    else:
-        for entry in ToSync(args.source, args.target):
-            copy_archive(args.source, args.target, entry)
-
-if __name__ == "__main__":
-    sys.exit(main())
diff --git a/modules/borgcopy/copy/setup.py b/modules/borgcopy/copy/setup.py
deleted file mode 100644
index f77d9560..00000000
--- a/modules/borgcopy/copy/setup.py
+++ /dev/null
@@ -1,10 +0,0 @@
-from setuptools import setup
-
-setup(name='copy_borg',
-      packages=['copy_borg'],
-      entry_points={
-          'console_scripts': [
-              'copy_borg=copy_borg.__main__:main',
-          ],
-      }
-)
diff --git a/modules/borgcopy/copy_borg/__main__.py b/modules/borgcopy/copy_borg/__main__.py
new file mode 100755
index 00000000..09f7557a
--- /dev/null
+++ b/modules/borgcopy/copy_borg/__main__.py
@@ -0,0 +1,555 @@
+#!@python@/bin/python
+
+import json
+import os
+import subprocess
+import re
+import sys
+import io
+from sys import stderr
+from humanize import naturalsize
+
+from tempfile import TemporaryDirectory
+
+from datetime import (datetime, timedelta)
+from dateutil.tz import (tzlocal, tzutc)
+import dateutil.parser
+import argparse
+
+from tqdm import tqdm
+
+from xdg import xdg_runtime_dir
+import pathlib
+
+import unshare
+from pyprctl import CapState, Cap, cap_ambient_raise, cap_ambient_is_set, set_keepcaps
+from pwd import getpwnam
+
+import logging
+
+import signal
+import time
+import math
+
+from halo import Halo
+
+from collections import deque
+
+import select
+import fcntl
+
+from multiprocessing import Process, Manager
+from contextlib import closing
+
+
+halo_args = {
+    'stream': stderr,
+    'enabled': stderr.isatty(),
+    'spinner': 'arc'
+}
+
+borg_pwd = getpwnam('borg')
+
+def as_borg(caps=set()):
+    global logger
+
+    try:
+        if caps:
+            c_state = CapState.get_current()
+            c_state.permitted.add(*caps)
+            c_state.set_current()
+
+            # logger.debug("before setgid/setuid: cap_permitted=%s", CapState.get_current().permitted)
+
+            set_keepcaps(True)
+
+        os.setgid(borg_pwd.pw_gid)
+        os.setuid(borg_pwd.pw_uid)
+
+        if caps:
+            # logger.debug("after setgid/setuid: cap_permitted=%s", CapState.get_current().permitted)
+
+            c_state = CapState.get_current()
+            c_state.permitted = caps.copy()
+            c_state.inheritable.add(*caps)
+            c_state.set_current()
+
+            # logger.debug("cap_permitted=%s", CapState.get_current().permitted)
+            # logger.debug("cap_inheritable=%s", CapState.get_current().inheritable)
+
+            for cap in caps:
+                cap_ambient_raise(cap)
+                # logger.debug("cap_ambient[%s]=%s", cap, cap_ambient_is_set(cap))
+    except Exception:
+        logger.error(format_exc())
+        raise
+
+def borg_json(*args, **kwargs):
+    global logger
+
+    with subprocess.Popen(*args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True, **kwargs) as proc:
+        stdout_buffer = io.BytesIO()
+
+        proc_logger = logger.getChild('borg')
+        stdout_logger = proc_logger.getChild('stdout')
+        stderr_logger = proc_logger.getChild('stderr')
+
+        fcntl.fcntl(proc.stdout.fileno(), fcntl.F_SETFL, fcntl.fcntl(proc.stdout.fileno(), fcntl.F_GETFL) | os.O_NONBLOCK)
+        fcntl.fcntl(proc.stderr.fileno(), fcntl.F_SETFL, fcntl.fcntl(proc.stderr.fileno(), fcntl.F_GETFL) | os.O_NONBLOCK)
+
+        poll = select.poll()
+        poll.register(proc.stdout, select.POLLIN | select.POLLHUP)
+        poll.register(proc.stderr, select.POLLIN | select.POLLHUP)
+        pollc = 2
+        events = poll.poll()
+        stderr_linebuf = bytearray()
+
+        while pollc > 0 and len(events) > 0:
+            for rfd, event in events:
+                if event & select.POLLIN:
+                    if rfd == proc.stdout.fileno():
+                        try:
+                            buf = os.read(proc.stdout.fileno(), 8192)
+                            # stdout_logger.debug(buf)
+                            stdout_buffer.write(buf)
+                        except BlockingIOError:
+                            pass
+                    if rfd == proc.stderr.fileno():
+                        try:
+                            stderr_linebuf.extend(os.read(proc.stderr.fileno(), 8192))
+                        except BlockingIOError:
+                            pass
+
+                        while stderr_linebuf:
+                            line, sep, stderr_linebuf = stderr_linebuf.partition(b'\n')
+                            if not sep:
+                                stderr_linebuf = line
+                                break
+
+                            stderr_logger.info(line.decode())
+                if event == select.POLLHUP:
+                    poll.unregister(rfd)
+                    pollc -= 1
+
+            if pollc > 0:
+                events = poll.poll()
+
+        for handler in proc_logger.handlers:
+            handler.flush()
+
+        ret = proc.wait()
+        if ret != 0:
+            raise Exception(f'borg subprocess exited with returncode {ret}')
+
+        stdout_buffer.seek(0)
+        return json.load(stdout_buffer)
+
+def read_repo(path):
+    global logger
+
+    with Halo(text=f'Listing {path}', **halo_args) as sp:
+        if not sp.enabled:
+            logger.debug('Listing %s...', path)
+        res = borg_json(['borg', 'list', '--info', '--lock-wait=600', '--json', path], preexec_fn=lambda: as_borg())['archives']
+        if sp.enabled:
+            sp.succeed(f'{len(res)} archives in {path}')
+        else:
+            logger.info('%d archives in ‘%s’', len(res), path)
+    return res
+
+class ToSync:
+    to_sync = deque()
+
+    def __init__(self, source, target):
+        self.source = source
+        self.target = target
+
+    def __iter__(self):
+        return self
+
+    def __next__(self):
+        global logger
+
+        if self.to_sync:
+            return self.to_sync.popleft()
+
+        while True:
+            try:
+                src = read_repo(self.source)
+                dst = read_repo(self.target)
+            except (subprocess.CalledProcessError, json.decoder.JSONDecodeError) as err:
+                logger.error(err)
+                continue
+
+            self.to_sync.extend([entry for entry in src if entry['name'] not in {dst_entry['name'] for dst_entry in dst} and not entry['name'].endswith('.checkpoint')])
+
+            if self.to_sync:
+                return self.to_sync.popleft()
+
+            raise StopIteration
+
+def copy_archive(src_repo_path, dst_repo_path, entry):
+    global logger
+
+    def do_copy(tmpdir_q):
+        global logger
+
+        nonlocal src_repo_path, dst_repo_path, entry
+
+        tmpdir = tmpdir_q.get()
+
+        cache_suffix = None
+        with Halo(text=f'Determine archive parameters', **halo_args) as sp:
+            if not sp.enabled:
+                logger.debug('Determining archive parameters...')
+            match = re.compile('^(.*)-[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}(\.(checkpoint|recreate)(\.[0-9]+)?)?').fullmatch(entry['name'])
+            if match:
+                repo_id = borg_json(['borg', 'info', '--info', '--lock-wait=600', '--json', src_repo_path], preexec_fn=lambda: as_borg())['repository']['id']
+
+                if repo_id:
+                    cache_suffix = f'{repo_id}_{match.group(1)}'
+            if sp.enabled:
+                sp.succeed(f'Will process {entry["name"]} ({dateutil.parser.isoparse(entry["start"])}, cache_suffix={cache_suffix})')
+            else:
+                logger.info('Will process ‘%s’ (%s, cache_suffix=%s)', entry['name'], dateutil.parser.isoparse(entry['start']), cache_suffix)
+
+        logger.debug('Setting up environment...')
+        unshare.unshare(unshare.CLONE_NEWNS)
+        subprocess.run(['mount', '--make-rprivate', '/'], check=True)
+        chroot = pathlib.Path(tmpdir) / 'chroot'
+        upper = pathlib.Path(tmpdir) / 'upper'
+        work = pathlib.Path(tmpdir) / 'work'
+        for path in [chroot,upper,work]:
+            path.mkdir()
+        subprocess.run(['mount', '-t', 'overlay', 'overlay', '-o', f'lowerdir=/,upperdir={upper},workdir={work}', chroot], check=True)
+        bindMounts = ['nix', 'run', 'run/secrets.d', 'run/wrappers', 'proc', 'dev', 'sys', pathlib.Path(os.path.expanduser('~')).relative_to('/')]
+        if os.environ.get('BORG_BASE_DIR'):
+            bindMounts.append(pathlib.Path(os.environ['BORG_BASE_DIR']).relative_to('/'))
+        if not ":" in src_repo_path:
+            bindMounts.append(pathlib.Path(src_repo_path).relative_to('/'))
+        if 'SSH_AUTH_SOCK' in os.environ:
+            bindMounts.append(pathlib.Path(os.environ['SSH_AUTH_SOCK']).parent.relative_to('/'))
+        for bindMount in bindMounts:
+            (chroot / bindMount).mkdir(parents=True,exist_ok=True)
+            subprocess.run(['mount', '--bind', pathlib.Path('/') / bindMount, chroot / bindMount], check=True)
+        os.chroot(chroot)
+        os.chdir('/')
+        try:
+            os.unlink('/etc/fuse.conf')
+        except FileNotFoundError:
+            pass
+        pathlib.Path('/etc/fuse.conf').parent.mkdir(parents=True,exist_ok=True)
+        with open('/etc/fuse.conf', 'w') as fuse_conf:
+            fuse_conf.write('user_allow_other\nmount_max = 1000\n')
+        dir = pathlib.Path('/borg')
+        dir.mkdir(parents=True,exist_ok=True,mode=0o0750)
+        os.chown(dir, borg_pwd.pw_uid, borg_pwd.pw_gid)
+
+        total_size = None
+        total_files = None
+        if stderr.isatty():
+            with Halo(text=f'Determine size', **halo_args) as sp:
+                stats = borg_json(['borg', 'info', '--info', '--json', '--lock-wait=600', f'{src_repo_path}::{entry["name"]}'], preexec_fn=lambda: as_borg())['archives'][0]['stats']
+                total_size = stats['original_size']
+                total_files = stats['nfiles']
+                if sp.enabled:
+                    sp.succeed(f'{total_files} files, {naturalsize(total_size, binary=True)}')
+                else:
+                    logger.info('%d files, %s', total_files, naturalsize(total_size, binary=True))
+        with subprocess.Popen(['borg', 'mount', '-o', 'allow_other,ignore_permissions', '--foreground', '--progress', '--lock-wait=600', f'{src_repo_path}::{entry["name"]}', dir], preexec_fn=lambda: as_borg()) as mount_proc:
+            with Halo(text='Waiting for mount', **halo_args) as sp:
+                if not sp.enabled:
+                    logger.debug('Waiting for mount...')
+                wait_start = datetime.now()
+                while True:
+                    if os.path.ismount(dir):
+                        break
+                    elif datetime.now() - wait_start > timedelta(minutes=15):
+                        ret.check_returncode()
+                    time.sleep(0.1)
+                if sp.enabled:
+                    sp.succeed('Mounted')
+                else:
+                    logger.info('Mounted %s', f'{src_repo_path}::{entry["name"]}')
+
+            while True:
+                with tqdm(total=total_size, unit_scale=True, unit_divisor=1024, unit='B', smoothing=0.01, disable=None, dynamic_ncols=True, maxinterval=0.5, miniters=1) as progress:
+                    seen = 0
+                    env = os.environ.copy()
+                    create_args = ['borg',
+                                   'create',
+                                   '--lock-wait=600',
+                                   '--one-file-system',
+                                   '--compression=auto,zstd,10',
+                                   '--chunker-params=10,23,16,4095',
+                                   '--files-cache=ctime,size',
+                                   '--show-rc',
+                                   '--upload-buffer=100',
+                                   '--log-json',
+                                   '--progress',
+                                   '--list',
+                                   '--filter=AMEi-x?',
+                                   '--stats'
+                                   ]
+                    archive_time = datetime.strptime(entry["time"], "%Y-%m-%dT%H:%M:%S.%f").replace(tzinfo=tzlocal()).astimezone(tzutc())
+                    create_args += [f'--timestamp={archive_time.strftime("%Y-%m-%dT%H:%M:%S")}']
+                    if cache_suffix:
+                        env['BORG_FILES_CACHE_SUFFIX'] = cache_suffix
+                    else:
+                        create_args += ['--files-cache=disabled']
+                    create_args += [f'{dst_repo_path}::{entry["name"]}', '.']
+
+                    with subprocess.Popen(create_args, stdin=subprocess.DEVNULL, stderr=subprocess.PIPE, stdout=subprocess.PIPE, env=env, preexec_fn=lambda: as_borg(caps={Cap.DAC_READ_SEARCH}), cwd=dir) as proc:
+                        last_list = None
+                        last_list_time = time.monotonic_ns()
+                        logger.info('Creating...')
+
+                        proc_logger = logger.getChild('borg')
+                        stdout_logger = proc_logger.getChild('stdout')
+                        stderr_logger = proc_logger.getChild('stderr')
+
+                        fcntl.fcntl(proc.stdout.fileno(), fcntl.F_SETFL, fcntl.fcntl(proc.stdout.fileno(), fcntl.F_GETFL) | os.O_NONBLOCK)
+                        fcntl.fcntl(proc.stderr.fileno(), fcntl.F_SETFL, fcntl.fcntl(proc.stderr.fileno(), fcntl.F_GETFL) | os.O_NONBLOCK)
+
+                        poll = select.poll()
+                        poll.register(proc.stdout, select.POLLIN | select.POLLHUP)
+                        poll.register(proc.stderr, select.POLLIN | select.POLLHUP)
+                        pollc = 2
+                        events = poll.poll()
+                        stdout_linebuf = bytearray()
+                        stderr_linebuf = bytearray()
+
+                        while pollc > 0 and len(events) > 0:
+                            # logger.debug('%d events', len(events))
+                            for rfd, event in events:
+                                # logger.debug('event %s', event)
+                                if event & select.POLLIN:
+                                    if rfd == proc.stdout.fileno():
+                                        try:
+                                            # logger.debug('reading stdout...')
+                                            stdout_linebuf.extend(os.read(proc.stdout.fileno(), 8192))
+                                            # logger.debug('read stdout, len(stdout_linebuf)=%d', len(stdout_linebuf))
+                                        except BlockingIOError:
+                                            pass
+
+                                        while stdout_linebuf:
+                                            # logger.debug('stdout line...')
+                                            line, sep, stdout_linebuf = stdout_linebuf.partition(b'\n')
+                                            if not sep:
+                                                stdout_linebuf = line
+                                                break
+
+                                            stdout_logger.info(line.decode())
+                                        # logger.debug('handled stdout lines, %d leftover', len(stdout_linebuf))
+                                    if rfd == proc.stderr.fileno():
+                                        try:
+                                            # logger.debug('reading stderr...')
+                                            stderr_linebuf.extend(os.read(proc.stderr.fileno(), 8192))
+                                            # logger.debug('read stderr, len(stderr_linebuf)=%d', len(stderr_linebuf))
+                                        except BlockingIOError:
+                                            pass
+
+                                        while stderr_linebuf:
+                                            # logger.debug('stderr line...')
+                                            line, sep, stderr_linebuf = stderr_linebuf.partition(b'\n')
+                                            if not sep:
+                                                stderr_linebuf = line
+                                                break
+
+                                            try:
+                                                json_line = json.loads(line)
+                                            except json.decoder.JSONDecodeError:
+                                                if progress.disable:
+                                                    stderr_logger.error(line.decode())
+                                                else:
+                                                    tqdm.write(line.decode())
+                                                continue
+
+                                            # logger.debug('stderr line decoded: %s', json_line['type'] if 'type' in json_line else None)
+
+                                            t = ''
+                                            if 'time' in json_line and not progress.disable:
+                                                ts = datetime.fromtimestamp(json_line['time']).replace(tzinfo=tzlocal())
+                                                t = f'{ts.isoformat(timespec="minutes")} '
+                                            if json_line['type'] == 'archive_progress' and not progress.disable:
+                                                now = time.monotonic_ns()
+                                                if last_list_time is None or now - last_list_time >= 3e9:
+                                                    last_list_time = now
+                                                    if 'path' in json_line and json_line['path']:
+                                                        progress.set_description(f'… {json_line["path"]}', refresh=False)
+                                                    else:
+                                                        progress.set_description(None, refresh=False)
+                                                elif last_list is not None:
+                                                    progress.set_description(last_list, refresh=False)
+                                                nfiles=json_line["nfiles"]
+                                                if total_files is not None:
+                                                    nfiles=f'{json_line["nfiles"]}/{total_files}'
+                                                progress.set_postfix(compressed=naturalsize(json_line['compressed_size'], binary=True), deduplicated=naturalsize(json_line['deduplicated_size'], binary=True), nfiles=nfiles, refresh=False)
+                                                progress.update(json_line["original_size"] - seen)
+                                                seen = json_line["original_size"]
+                                            elif json_line['type'] == 'archive_progress':
+                                                now = time.monotonic_ns()
+                                                if last_list_time is None or now - last_list_time >= 3e9:
+                                                    last_list_time = now
+                                                    if 'path' in json_line and json_line['path']:
+                                                        stderr_logger.debug('… %s (%s)', json_line["path"], naturalsize(json_line["original_size"]))
+                                                    else:
+                                                        stderr_logger.debug('… (%s)', naturalsize(json_line["original_size"]))
+                                            elif json_line['type'] == 'file_status':
+                                                # tqdm.write(t + f'{json_line["status"]} {json_line["path"]}')
+                                                last_list = f'{json_line["status"]} {json_line["path"]}'
+                                                last_list_time = time.monotonic_ns()
+                                                progress.set_description(last_list, refresh=False)
+                                                if progress.disable:
+                                                    stderr_logger.info(last_list)
+                                            elif (json_line['type'] == 'log_message' or json_line['type'] == 'progress_message' or json_line['type'] == 'progress_percent') and ('message' in json_line or 'msgid' in json_line):
+                                                if 'message' in json_line:
+                                                    if progress.disable:
+                                                        stderr_logger.info(t + json_line['message'])
+                                                    else:
+                                                        tqdm.write(t + json_line['message'])
+                                                elif 'msgid' in json_line:
+                                                    if progress.disable:
+                                                        stderr_logger.info(t + json_line['msgid'])
+                                                    else:
+                                                        tqdm.write(t + json_line['msgid'])
+                                            else:
+                                                if progress.disable:
+                                                    stderr_logger.info(t + line.decode())
+                                                else:
+                                                    tqdm.write(t + line.decode())
+                                        # logger.debug('handled stderr lines, %d leftover', len(stderr_linebuf))
+                                if event == select.POLLHUP:
+                                    poll.unregister(rfd)
+                                    pollc -= 1
+
+                            if pollc > 0:
+                                # logger.debug('polling %d fds...', pollc)
+                                events = poll.poll()
+                                # logger.debug('done polling')
+
+                        # logger.debug('borg create closed stdout/stderr')
+                        if stdout_linebuf:
+                            logger.error('unterminated line leftover in stdout: %s', stdout_linebuf)
+                        if stderr_linebuf:
+                            logger.error('unterminated line leftover in stdout: %s', stderr_linebuf)
+                        progress.set_description(None)
+                        ret = proc.wait()
+                        # logger.debug('borg create terminated; ret=%d', ret)
+                        if ret != 0:
+                            dst = None
+                            try:
+                                dst = read_repo(dst_repo_path)
+                            except (subprocess.CalledProcessError, json.decoder.JSONDecodeError) as err:
+                                logger.error(err)
+                                continue
+                            else:
+                                if any(map(lambda other: entry['name'] == other['name'], dst)):
+                                    logger.info('destination exists, terminating')
+                                    break
+
+                            logger.warn('destination does not exist, retrying')
+                            continue
+                        else:
+                            # logger.debug('terminating')
+                            break
+            mount_proc.terminate()
+
+    with Manager() as manager:
+        tmpdir_q = manager.Queue(1)
+
+        with closing(Process(target=do_copy, args=(tmpdir_q,), name='do_copy')) as p:
+            p.start()
+
+            with TemporaryDirectory(prefix=f'borg-mount_{entry["name"]}_', dir=os.environ.get('RUNTIME_DIRECTORY')) as tmpdir:
+                tmpdir_q.put(tmpdir)
+                p.join()
+                return p.exitcode
+
+def sigterm(signum, frame):
+    raise SystemExit(128 + signum)
+
+def main():
+    signal.signal(signal.SIGTERM, sigterm)
+
+    global logger
+    logger = logging.getLogger(__name__)
+    console_handler = logging.StreamHandler()
+    console_handler.setFormatter( logging.Formatter('[%(levelname)s](%(name)s): %(message)s') )
+    if sys.stderr.isatty():
+        console_handler.setFormatter( logging.Formatter('%(asctime)s [%(levelname)s](%(name)s): %(message)s') )
+
+    burst_max = 1000
+    burst = burst_max
+    last_use = None
+    inv_rate = 1e7
+    def consume_filter(record):
+        nonlocal burst, burst_max, inv_rate, last_use
+
+        delay = None
+        while True:
+            now = time.monotonic_ns()
+            burst = min(burst_max, burst + math.floor((now - last_use) / inv_rate)) if last_use else burst_max
+            last_use = now
+
+            if burst > 0:
+                burst -= 1
+                if delay:
+                    delay = now - delay
+
+                return True
+
+            if delay is None:
+                delay = now
+            time.sleep(inv_rate / 1e9)
+    console_handler.addFilter(consume_filter)
+
+    logging.getLogger().addHandler(console_handler)
+
+    # log uncaught exceptions
+    def log_exceptions(type, value, tb):
+        global logger
+
+        logger.error(value)
+        sys.__excepthook__(type, value, tb) # calls default excepthook
+
+    sys.excepthook = log_exceptions
+
+    parser = argparse.ArgumentParser(prog='copy')
+    parser.add_argument('--verbosity', dest='log_level', action='append', type=int)
+    parser.add_argument('--verbose', '-v', dest='log_level', action='append_const', const=1)
+    parser.add_argument('--quiet', '-q', dest='log_level', action='append_const', const=-1)
+    parser.add_argument('source', metavar='REPO_OR_ARCHIVE')
+    parser.add_argument('target', metavar='REPO_OR_ARCHIVE')
+    args = parser.parse_args()
+
+
+    LOG_LEVELS = [logging.DEBUG, logging.INFO, logging.WARNING, logging.ERROR, logging.CRITICAL]
+    DEFAULT_LOG_LEVEL = logging.ERROR
+    log_level = LOG_LEVELS.index(DEFAULT_LOG_LEVEL)
+
+    for adjustment in args.log_level or ():
+        log_level = min(len(LOG_LEVELS) - 1, max(log_level - adjustment, 0))
+    logger.setLevel(LOG_LEVELS[log_level])
+
+
+    if "::" in args.source:
+        (src_repo_path, _, src_archive) = args.source.partition("::")
+        entry = None
+        for candidate_entry in read_repo(src_repo_path):
+            if entry['name'] != src_archive:
+                continue
+            entry = candidate_entry
+            break
+
+        if entry is None:
+            logger.critical("Did not find archive ‘%s’", src_archive)
+            os.exit(1)
+
+        copy_archive(src_repo_path, args.target, entry)
+    else:
+        for entry in ToSync(args.source, args.target):
+            copy_archive(args.source, args.target, entry)
+
+if __name__ == "__main__":
+    sys.exit(main())
diff --git a/modules/borgcopy/default.nix b/modules/borgcopy/default.nix
index eae07dc8..afc6c37b 100644
--- a/modules/borgcopy/default.nix
+++ b/modules/borgcopy/default.nix
@@ -3,33 +3,23 @@
 with lib;
 
 let
-  copyBorg = flakeInputs.mach-nix.lib.${config.nixpkgs.system}.buildPythonPackage rec {
-    pname = "copy-borg";
-    src = ./copy;
-    version = "0.0.0";
-    ignoreDataOutdated = true;
+  copyBorg =
+    with pkgs.poetry2nix;
+    mkPoetryApplication {
+      projectDir = cleanPythonSources { src = ./.; };
 
-    requirements = ''
-      humanize
-      tqdm
-      python-dateutil
-      xdg
-      python-unshare
-      pyprctl
-      halo
-    '';
-    postInstall = ''
-      wrapProgram $out/bin/copy_borg \
-        --prefix PATH : ${makeBinPath (with pkgs; [util-linux borgbackup])}:${config.security.wrapperDir}
-    '';
-
-    providers.python-unshare = "nixpkgs";
-    overridesPre = [
-      (self: super: { python-unshare = super.python-unshare.overrideAttrs (oldAttrs: { name = "python-unshare-0.2.1"; version = "0.2.1"; }); })
-    ];
+      overrides = overrides.withDefaults (self: super: {
+        pyprctl = super.pyprctl.overridePythonAttrs (oldAttrs: {
+          buildInputs = (oldAttrs.buildInputs or []) ++ [super.setuptools];
+        });
+        inherit (pkgs.python3Packages) python-unshare;
+      });
 
-    # _.tomli.buildInputs.add = with pkgs."python3Packages"; [ flit-core ];
-  };
+      postInstall = ''
+        wrapProgram $out/bin/copy_borg \
+          --prefix PATH : ${makeBinPath (with pkgs; [util-linux borgbackup])}:${config.security.wrapperDir}
+      '';
+    };
 
   copyService = name: opts: nameValuePair "copy-borg@${utils.escapeSystemdPath name}" {
     serviceConfig = {
diff --git a/modules/borgcopy/poetry.lock b/modules/borgcopy/poetry.lock
new file mode 100644
index 00000000..759ecfe9
--- /dev/null
+++ b/modules/borgcopy/poetry.lock
@@ -0,0 +1,180 @@
+# This file is automatically @generated by Poetry 1.4.2 and should not be changed by hand.
+
+[[package]]
+name = "colorama"
+version = "0.4.6"
+description = "Cross-platform colored terminal text."
+category = "main"
+optional = false
+python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
+files = [
+    {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"},
+    {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
+]
+
+[[package]]
+name = "halo"
+version = "0.0.31"
+description = "Beautiful terminal spinners in Python"
+category = "main"
+optional = false
+python-versions = ">=3.4"
+files = [
+    {file = "halo-0.0.31-py2-none-any.whl", hash = "sha256:5350488fb7d2aa7c31a1344120cee67a872901ce8858f60da7946cef96c208ab"},
+    {file = "halo-0.0.31.tar.gz", hash = "sha256:7b67a3521ee91d53b7152d4ee3452811e1d2a6321975137762eb3d70063cc9d6"},
+]
+
+[package.dependencies]
+colorama = ">=0.3.9"
+log-symbols = ">=0.0.14"
+six = ">=1.12.0"
+spinners = ">=0.0.24"
+termcolor = ">=1.1.0"
+
+[package.extras]
+ipython = ["IPython (==5.7.0)", "ipywidgets (==7.1.0)"]
+
+[[package]]
+name = "humanize"
+version = "4.6.0"
+description = "Python humanize utilities"
+category = "main"
+optional = false
+python-versions = ">=3.7"
+files = [
+    {file = "humanize-4.6.0-py3-none-any.whl", hash = "sha256:401201aca462749773f02920139f302450cb548b70489b9b4b92be39fe3c3c50"},
+    {file = "humanize-4.6.0.tar.gz", hash = "sha256:5f1f22bc65911eb1a6ffe7659bd6598e33dcfeeb904eb16ee1e705a09bf75916"},
+]
+
+[package.extras]
+tests = ["freezegun", "pytest", "pytest-cov"]
+
+[[package]]
+name = "log-symbols"
+version = "0.0.14"
+description = "Colored symbols for various log levels for Python"
+category = "main"
+optional = false
+python-versions = "*"
+files = [
+    {file = "log_symbols-0.0.14-py3-none-any.whl", hash = "sha256:4952106ff8b605ab7d5081dd2c7e6ca7374584eff7086f499c06edd1ce56dcca"},
+    {file = "log_symbols-0.0.14.tar.gz", hash = "sha256:cf0bbc6fe1a8e53f0d174a716bc625c4f87043cc21eb55dd8a740cfe22680556"},
+]
+
+[package.dependencies]
+colorama = ">=0.3.9"
+
+[[package]]
+name = "pyprctl"
+version = "0.1.3"
+description = "An interface to Linux's prctl() syscall written in pure Python using ctypes."
+category = "main"
+optional = false
+python-versions = ">=3.6"
+files = [
+    {file = "pyprctl-0.1.3-py3-none-any.whl", hash = "sha256:6302e5114f078fb33e5799835d0a69e2fc180bb6b28ad073515fa40c5272f1dd"},
+    {file = "pyprctl-0.1.3.tar.gz", hash = "sha256:1fb54d3ab030ec02e4afc38fb9662d6634c12834e91ae7959de56a9c09f69c26"},
+]
+
+[[package]]
+name = "python-dateutil"
+version = "2.8.2"
+description = "Extensions to the standard Python datetime module"
+category = "main"
+optional = false
+python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7"
+files = [
+    {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"},
+    {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"},
+]
+
+[package.dependencies]
+six = ">=1.5"
+
+[[package]]
+name = "python-unshare"
+version = "0.2"
+description = "Python bindings for the Linux unshare() syscall"
+category = "main"
+optional = false
+python-versions = "*"
+files = [
+    {file = "python-unshare-0.2.tar.gz", hash = "sha256:f79b7de441b6c27930b775085a6a4fd2f378b628737aaaebc2a6c519023fd47a"},
+]
+
+[[package]]
+name = "six"
+version = "1.16.0"
+description = "Python 2 and 3 compatibility utilities"
+category = "main"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
+files = [
+    {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"},
+    {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"},
+]
+
+[[package]]
+name = "spinners"
+version = "0.0.24"
+description = "Spinners for terminals"
+category = "main"
+optional = false
+python-versions = "*"
+files = [
+    {file = "spinners-0.0.24-py3-none-any.whl", hash = "sha256:2fa30d0b72c9650ad12bbe031c9943b8d441e41b4f5602b0ec977a19f3290e98"},
+    {file = "spinners-0.0.24.tar.gz", hash = "sha256:1eb6aeb4781d72ab42ed8a01dcf20f3002bf50740d7154d12fb8c9769bf9e27f"},
+]
+
+[[package]]
+name = "termcolor"
+version = "2.2.0"
+description = "ANSI color formatting for output in terminal"
+category = "main"
+optional = false
+python-versions = ">=3.7"
+files = [
+    {file = "termcolor-2.2.0-py3-none-any.whl", hash = "sha256:91ddd848e7251200eac969846cbae2dacd7d71c2871e92733289e7e3666f48e7"},
+    {file = "termcolor-2.2.0.tar.gz", hash = "sha256:dfc8ac3f350788f23b2947b3e6cfa5a53b630b612e6cd8965a015a776020b99a"},
+]
+
+[package.extras]
+tests = ["pytest", "pytest-cov"]
+
+[[package]]
+name = "tqdm"
+version = "4.65.0"
+description = "Fast, Extensible Progress Meter"
+category = "main"
+optional = false
+python-versions = ">=3.7"
+files = [
+    {file = "tqdm-4.65.0-py3-none-any.whl", hash = "sha256:c4f53a17fe37e132815abceec022631be8ffe1b9381c2e6e30aa70edc99e9671"},
+    {file = "tqdm-4.65.0.tar.gz", hash = "sha256:1871fb68a86b8fb3b59ca4cdd3dcccbc7e6d613eeed31f4c332531977b89beb5"},
+]
+
+[package.dependencies]
+colorama = {version = "*", markers = "platform_system == \"Windows\""}
+
+[package.extras]
+dev = ["py-make (>=0.1.0)", "twine", "wheel"]
+notebook = ["ipywidgets (>=6)"]
+slack = ["slack-sdk"]
+telegram = ["requests"]
+
+[[package]]
+name = "xdg"
+version = "6.0.0"
+description = "Variables defined by the XDG Base Directory Specification"
+category = "main"
+optional = false
+python-versions = ">=3.7,<4.0"
+files = [
+    {file = "xdg-6.0.0-py3-none-any.whl", hash = "sha256:df3510755b4395157fc04fc3b02467c777f3b3ca383257397f09ab0d4c16f936"},
+    {file = "xdg-6.0.0.tar.gz", hash = "sha256:24278094f2d45e846d1eb28a2ebb92d7b67fc0cab5249ee3ce88c95f649a1c92"},
+]
+
+[metadata]
+lock-version = "2.0"
+python-versions = ">=3.10.0,<3.12"
+content-hash = "3c6b538852447a8f3ae34e1be122716d47e669a2b44f7c5d3d850e5d877353c7"
diff --git a/modules/borgcopy/pyproject.toml b/modules/borgcopy/pyproject.toml
new file mode 100644
index 00000000..f3401ed2
--- /dev/null
+++ b/modules/borgcopy/pyproject.toml
@@ -0,0 +1,22 @@
+[tool.poetry]
+name = "copy_borg"
+version = "0.0.0"
+authors = ["Gregor Kleen <gkleen@yggdrasil.li>"]
+description = ""
+
+[tool.poetry.scripts]
+copy_borg = "copy_borg.__main__:main"
+
+[tool.poetry.dependencies]
+python = ">=3.10.0,<3.12"
+humanize = "^4.6.0"
+tqdm = "^4.65.0"
+python-dateutil = "^2.8.2"
+xdg = "^6.0.0"
+python-unshare = "^0.2"
+pyprctl = "^0.1.3"
+halo = "^0.0.31"
+
+[build-system]
+requires = ["poetry-core>=1.0.0"]
+build-backend = "poetry.core.masonry.api"
\ No newline at end of file
-- 
cgit v1.2.3