From 0e9f1e85cd8c6f9d546ef88e971043b909017170 Mon Sep 17 00:00:00 2001 From: Gregor Kleen Date: Mon, 7 Nov 2022 20:51:39 +0100 Subject: ... --- flake.lock | 13 + flake.nix | 11 +- hosts/surtr/prometheus/tls.crt | 19 +- hosts/vidhar/borg/copy/copy_borg/__main__.py | 556 -------------------- hosts/vidhar/borg/copy/setup.py | 10 - hosts/vidhar/borg/default.nix | 91 ++-- hosts/vidhar/borg/pyprctl-packages.nix | 21 - hosts/vidhar/prometheus/ca/.gitignore | 3 - hosts/vidhar/prometheus/ca/ca.crt | 20 +- hosts/vidhar/prometheus/ca/ca.key | 21 + hosts/vidhar/prometheus/ca/ca.key.sops | 21 - hosts/vidhar/prometheus/ca/certs/01.pem | 39 -- hosts/vidhar/prometheus/ca/certs/02.pem | 38 -- hosts/vidhar/prometheus/ca/index.txt | 2 - hosts/vidhar/prometheus/ca/index.txt.attr | 1 - hosts/vidhar/prometheus/ca/serial | 1 - hosts/vidhar/prometheus/tls.crt | 17 +- modules/borgcopy/copy/copy_borg/__main__.py | 556 ++++++++++++++++++++ modules/borgcopy/copy/setup.py | 10 + modules/borgcopy/default.nix | 120 +++++ modules/borgsnap/default.nix | 16 +- modules/zfssnap/zfssnap/zfssnap/__main__.py | 2 - overlays/matrix-synapse/1.70.1/default.nix | 111 ++++ overlays/matrix-synapse/1.70.1/plugins/default.nix | 8 + overlays/matrix-synapse/1.70.1/plugins/ldap3.nix | 17 + .../1.70.1/plugins/mjolnir-antispam.nix | 32 ++ overlays/matrix-synapse/1.70.1/plugins/pam.nix | 15 + .../1.70.1/plugins/shared-secret-auth.nix | 26 + overlays/matrix-synapse/1.70.1/tools/default.nix | 6 + .../1.70.1/tools/rust-synapse-compress-state.nix | 30 ++ overlays/matrix-synapse/1.70.1/tools/synadm.nix | 47 ++ overlays/matrix-synapse/default.nix | 3 + shell.nix | 35 +- tools/ca/ca/__main__.py | 568 +++++++++++++++++++++ tools/ca/setup.py | 10 + 35 files changed, 1710 insertions(+), 786 deletions(-) delete mode 100755 hosts/vidhar/borg/copy/copy_borg/__main__.py delete mode 100644 hosts/vidhar/borg/copy/setup.py delete mode 100644 hosts/vidhar/borg/pyprctl-packages.nix delete mode 100644 hosts/vidhar/prometheus/ca/.gitignore create mode 100644 hosts/vidhar/prometheus/ca/ca.key delete mode 100644 hosts/vidhar/prometheus/ca/ca.key.sops delete mode 100644 hosts/vidhar/prometheus/ca/certs/01.pem delete mode 100644 hosts/vidhar/prometheus/ca/certs/02.pem delete mode 100644 hosts/vidhar/prometheus/ca/index.txt delete mode 100644 hosts/vidhar/prometheus/ca/index.txt.attr delete mode 100644 hosts/vidhar/prometheus/ca/serial create mode 100755 modules/borgcopy/copy/copy_borg/__main__.py create mode 100644 modules/borgcopy/copy/setup.py create mode 100644 modules/borgcopy/default.nix create mode 100644 overlays/matrix-synapse/1.70.1/default.nix create mode 100644 overlays/matrix-synapse/1.70.1/plugins/default.nix create mode 100644 overlays/matrix-synapse/1.70.1/plugins/ldap3.nix create mode 100644 overlays/matrix-synapse/1.70.1/plugins/mjolnir-antispam.nix create mode 100644 overlays/matrix-synapse/1.70.1/plugins/pam.nix create mode 100644 overlays/matrix-synapse/1.70.1/plugins/shared-secret-auth.nix create mode 100644 overlays/matrix-synapse/1.70.1/tools/default.nix create mode 100644 overlays/matrix-synapse/1.70.1/tools/rust-synapse-compress-state.nix create mode 100644 overlays/matrix-synapse/1.70.1/tools/synadm.nix create mode 100644 overlays/matrix-synapse/default.nix create mode 100644 tools/ca/ca/__main__.py create mode 100644 tools/ca/setup.py diff --git a/flake.lock b/flake.lock index 89e68eab..54334c39 100644 --- a/flake.lock +++ b/flake.lock @@ -94,6 +94,18 @@ "type": "github" } }, + "leapseconds": { + "flake": false, + "locked": { + "narHash": "sha256-VCE0xQRXz833Qann3dgKU2wHPRZDdSN4/M+0nd3yYl4=", + "type": "file", + "url": "https://www.ietf.org/timezones/data/leap-seconds.list" + }, + "original": { + "type": "file", + "url": "https://www.ietf.org/timezones/data/leap-seconds.list" + } + }, "mach-nix": { "inputs": { "flake-utils": "flake-utils", @@ -197,6 +209,7 @@ "deploy-rs": "deploy-rs", "flake-compat": "flake-compat", "home-manager": "home-manager", + "leapseconds": "leapseconds", "mach-nix": "mach-nix", "nixpkgs": "nixpkgs", "nvfetcher": "nvfetcher", diff --git a/flake.nix b/flake.nix index 8f52e8d0..2b22d7c5 100644 --- a/flake.nix +++ b/flake.nix @@ -66,6 +66,11 @@ pypi-deps-db.follows = "pypi-deps-db"; }; }; + + leapseconds = { + url = "https://www.ietf.org/timezones/data/leap-seconds.list"; + flake = false; + }; }; outputs = { self, nixpkgs, home-manager, sops-nix, deploy-rs, nvfetcher, ... }@inputs: @@ -210,11 +215,7 @@ apps = foldr recursiveUpdate {} [activateNixosConfigurations activateHomeManagerConfigurations]; - devShells = forAllSystems (system: systemPkgs: { default = import ./shell.nix { - pkgs = self.legacyPackages.${system}; - deploy-rs = deploy-rs.packages.${system}.deploy-rs; - nvfetcher = nvfetcher.defaultPackage.${system}; - };}); + devShells = forAllSystems (system: systemPkgs: { default = import ./shell.nix ({ inherit system; } // inputs); }); templates.default = { path = ./.; diff --git a/hosts/surtr/prometheus/tls.crt b/hosts/surtr/prometheus/tls.crt index ba958f40..d81f429f 100644 --- a/hosts/surtr/prometheus/tls.crt +++ b/hosts/surtr/prometheus/tls.crt @@ -1,10 +1,13 @@ -----BEGIN CERTIFICATE----- -MIIBXzCCARGgAwIBAgIBATAFBgMrZXAwHzEdMBsGA1UEAwwUcHJvbWV0aGV1cy55 -Z2dkcmFzaWwwIBcNMjIwNDA4MjAwMzU1WhgPMjA5MDA0MjYyMDAzNTVaMBoxGDAW -BgNVBAMMD3N1cnRyLnlnZ2RyYXNpbDAqMAUGAytlcAMhAAJd8I32X/z9J0cO2Oz+ -4KAoIJq0igdMdbLBA+8WO+vgo3UwczAMBgNVHRMBAf8EAjAAMEQGA1UdEQQ9MDuC -GnByb21ldGhldXMuc3VydHIueWdnZHJhc2lsgh1wcm9tZXRoZXVzLnN1cnRyLnln -Z2RyYXNpbC5saTAdBgNVHQ4EFgQUN52tPcv5FFppzeJx2AiXk6UgPDgwBQYDK2Vw -A0EAPN9zhaeBB2C1TursdARH0jVBz9g0dRhP7sO5ZG0K+xp24paLXiTF1rYub24p -/yZw71p7M0BAE+hJqYBzYo5YBQ== +MIIB5TCCAWWgAwIBAgIPQAAAAGNpYE436fsCRvVfMAUGAytlcTAfMR0wGwYDVQQD +DBRwcm9tZXRoZXVzLnlnZ2RyYXNpbDAeFw0yMjExMDcxOTM5NDFaFw0zMjExMDcx +OTQ0NDFaMBoxGDAWBgNVBAMMD3N1cnRyLnlnZ2RyYXNpbDAqMAUGAytlcAMhAAJd +8I32X/z9J0cO2Oz+4KAoIJq0igdMdbLBA+8WO+vgo4G8MIG5MB8GA1UdIwQYMBaA +FObrhCUDCZk6/JeeDMNWl8WeLr+MMB0GA1UdDgQWBBQ3na09y/kUWmnN4nHYCJeT +pSA8ODAOBgNVHQ8BAf8EBAMCBeAwDAYDVR0TAQH/BAIwADATBgNVHSUEDDAKBggr +BgEFBQcDAjBEBgNVHREEPTA7ghpwcm9tZXRoZXVzLnN1cnRyLnlnZ2RyYXNpbIId +cHJvbWV0aGV1cy5zdXJ0ci55Z2dkcmFzaWwubGkwBQYDK2VxA3MAYHd3I/Mg/t34 +zdcxrIKOAKJ9ZVVoP0msk/viKrZ4b+Q9rKSNEnkyk0y56Z7FlLDxGLScaemqQ3uA +5hjhdTci/xd4xYX/edLw1AWGRs2kBe3vs2WOmrdcKa849vdMH27G/P/+bgbdofCN +fukxYHzpESYA -----END CERTIFICATE----- diff --git a/hosts/vidhar/borg/copy/copy_borg/__main__.py b/hosts/vidhar/borg/copy/copy_borg/__main__.py deleted file mode 100755 index 5b374d99..00000000 --- a/hosts/vidhar/borg/copy/copy_borg/__main__.py +++ /dev/null @@ -1,556 +0,0 @@ -#!@python@/bin/python - -import json -import os -import subprocess -import re -import sys -import io -from sys import stderr -from humanize import naturalsize - -from tempfile import TemporaryDirectory - -from datetime import (datetime, timedelta) -from dateutil.tz import (tzlocal, tzutc) -import dateutil.parser -import argparse - -from tqdm import tqdm - -from xdg import xdg_runtime_dir -import pathlib - -import unshare -from pyprctl import CapState, Cap, cap_ambient_raise, cap_ambient_is_set, set_keepcaps -from pwd import getpwnam - -import logging - -import signal -import time -import math - -from halo import Halo - -from collections import deque - -import select -import fcntl - -from multiprocessing import Process, Manager -from contextlib import closing - - -halo_args = { - 'stream': stderr, - 'enabled': stderr.isatty(), - 'spinner': 'arc' -} - -borg_pwd = getpwnam('borg') - -def as_borg(caps=set()): - global logger - - try: - if caps: - c_state = CapState.get_current() - c_state.permitted.add(*caps) - c_state.set_current() - - # logger.debug("before setgid/setuid: cap_permitted=%s", CapState.get_current().permitted) - - set_keepcaps(True) - - os.setgid(borg_pwd.pw_gid) - os.setuid(borg_pwd.pw_uid) - - if caps: - # logger.debug("after setgid/setuid: cap_permitted=%s", CapState.get_current().permitted) - - c_state = CapState.get_current() - c_state.permitted = caps.copy() - c_state.inheritable.add(*caps) - c_state.set_current() - - # logger.debug("cap_permitted=%s", CapState.get_current().permitted) - # logger.debug("cap_inheritable=%s", CapState.get_current().inheritable) - - for cap in caps: - cap_ambient_raise(cap) - # logger.debug("cap_ambient[%s]=%s", cap, cap_ambient_is_set(cap)) - except Exception: - logger.error(format_exc()) - raise - -def borg_json(*args, **kwargs): - global logger - - with subprocess.Popen(*args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True, **kwargs) as proc: - stdout_buffer = io.BytesIO() - - proc_logger = logger.getChild('borg') - stdout_logger = proc_logger.getChild('stdout') - stderr_logger = proc_logger.getChild('stderr') - - fcntl.fcntl(proc.stdout.fileno(), fcntl.F_SETFL, fcntl.fcntl(proc.stdout.fileno(), fcntl.F_GETFL) | os.O_NONBLOCK) - fcntl.fcntl(proc.stderr.fileno(), fcntl.F_SETFL, fcntl.fcntl(proc.stderr.fileno(), fcntl.F_GETFL) | os.O_NONBLOCK) - - poll = select.poll() - poll.register(proc.stdout, select.POLLIN | select.POLLHUP) - poll.register(proc.stderr, select.POLLIN | select.POLLHUP) - pollc = 2 - events = poll.poll() - stderr_linebuf = bytearray() - - while pollc > 0 and len(events) > 0: - for rfd, event in events: - if event & select.POLLIN: - if rfd == proc.stdout.fileno(): - try: - buf = os.read(proc.stdout.fileno(), 8192) - # stdout_logger.debug(buf) - stdout_buffer.write(buf) - except BlockingIOError: - pass - if rfd == proc.stderr.fileno(): - try: - stderr_linebuf.extend(os.read(proc.stderr.fileno(), 8192)) - except BlockingIOError: - pass - - while stderr_linebuf: - line, sep, stderr_linebuf = stderr_linebuf.partition(b'\n') - if not sep: - stderr_linebuf = line - break - - stderr_logger.info(line.decode()) - if event == select.POLLHUP: - poll.unregister(rfd) - pollc -= 1 - - if pollc > 0: - events = poll.poll() - - for handler in proc_logger.handlers: - handler.flush() - - ret = proc.wait() - if ret != 0: - raise Exception(f'borg subprocess exited with returncode {ret}') - - stdout_buffer.seek(0) - return json.load(stdout_buffer) - -def read_repo(path): - global logger - - with Halo(text=f'Listing {path}', **halo_args) as sp: - if not sp.enabled: - logger.debug('Listing %s...', path) - res = borg_json(['borg', 'list', '--info', '--lock-wait=600', '--json', path], preexec_fn=lambda: as_borg())['archives'] - if sp.enabled: - sp.succeed(f'{len(res)} archives in {path}') - else: - logger.info('%d archives in ‘%s’', len(res), path) - return res - -class ToSync: - to_sync = deque() - - def __init__(self, source, target): - self.source = source - self.target = target - - def __iter__(self): - return self - - def __next__(self): - global logger - - if self.to_sync: - return self.to_sync.popleft() - - while True: - try: - src = read_repo(self.source) - dst = read_repo(self.target) - except (subprocess.CalledProcessError, json.decoder.JSONDecodeError) as err: - logger.error(err) - continue - - self.to_sync.extend([entry for entry in src if entry['name'] not in {dst_entry['name'] for dst_entry in dst} and not entry['name'].endswith('.checkpoint')]) - - if self.to_sync: - return self.to_sync.popleft() - - raise StopIteration - -def copy_archive(src_repo_path, dst_repo_path, entry): - global logger - - def do_copy(tmpdir_q): - global logger - - nonlocal src_repo_path, dst_repo_path, entry - - tmpdir = tmpdir_q.get() - - cache_suffix = None - with Halo(text=f'Determine archive parameters', **halo_args) as sp: - if not sp.enabled: - logger.debug('Determining archive parameters...') - match = re.compile('^(.*)-[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}(\.(checkpoint|recreate)(\.[0-9]+)?)?').fullmatch(entry['name']) - if match: - repo_id = borg_json(['borg', 'info', '--info', '--lock-wait=600', '--json', src_repo_path], preexec_fn=lambda: as_borg())['repository']['id'] - - if repo_id: - cache_suffix = f'{repo_id}_{match.group(1)}' - if sp.enabled: - sp.succeed(f'Will process {entry["name"]} ({dateutil.parser.isoparse(entry["start"])}, cache_suffix={cache_suffix})') - else: - logger.info('Will process ‘%s’ (%s, cache_suffix=%s)', entry['name'], dateutil.parser.isoparse(entry['start']), cache_suffix) - - logger.debug('Setting up environment...') - unshare.unshare(unshare.CLONE_NEWNS) - subprocess.run(['mount', '--make-rprivate', '/'], check=True) - chroot = pathlib.Path(tmpdir) / 'chroot' - upper = pathlib.Path(tmpdir) / 'upper' - work = pathlib.Path(tmpdir) / 'work' - for path in [chroot,upper,work]: - path.mkdir() - subprocess.run(['mount', '-t', 'overlay', 'overlay', '-o', f'lowerdir=/,upperdir={upper},workdir={work}', chroot], check=True) - bindMounts = ['nix', 'run', 'run/secrets.d', 'run/wrappers', 'proc', 'dev', 'sys', pathlib.Path(os.path.expanduser('~')).relative_to('/')] - if os.environ.get('BORG_BASE_DIR'): - bindMounts.append(pathlib.Path(os.environ['BORG_BASE_DIR']).relative_to('/')) - if not ":" in src_repo_path: - bindMounts.append(pathlib.Path(src_repo_path).relative_to('/')) - if 'SSH_AUTH_SOCK' in os.environ: - bindMounts.append(pathlib.Path(os.environ['SSH_AUTH_SOCK']).parent.relative_to('/')) - for bindMount in bindMounts: - (chroot / bindMount).mkdir(parents=True,exist_ok=True) - subprocess.run(['mount', '--bind', pathlib.Path('/') / bindMount, chroot / bindMount], check=True) - os.chroot(chroot) - os.chdir('/') - try: - os.unlink('/etc/fuse.conf') - except FileNotFoundError: - pass - pathlib.Path('/etc/fuse.conf').parent.mkdir(parents=True,exist_ok=True) - with open('/etc/fuse.conf', 'w') as fuse_conf: - fuse_conf.write('user_allow_other\nmount_max = 1000\n') - dir = pathlib.Path('/borg') - dir.mkdir(parents=True,exist_ok=True,mode=0o0750) - os.chown(dir, borg_pwd.pw_uid, borg_pwd.pw_gid) - - total_size = None - total_files = None - if stderr.isatty(): - with Halo(text=f'Determine size', **halo_args) as sp: - stats = borg_json(['borg', 'info', '--info', '--json', '--lock-wait=600', f'{src_repo_path}::{entry["name"]}'], preexec_fn=lambda: as_borg())['archives'][0]['stats'] - total_size = stats['original_size'] - total_files = stats['nfiles'] - if sp.enabled: - sp.succeed(f'{total_files} files, {naturalsize(total_size, binary=True)}') - else: - logger.info('%d files, %s', total_files, naturalsize(total_size, binary=True)) - with subprocess.Popen(['borg', 'mount', '-o', 'allow_other,ignore_permissions', '--foreground', '--progress', '--lock-wait=600', f'{src_repo_path}::{entry["name"]}', dir], preexec_fn=lambda: as_borg()) as mount_proc: - with Halo(text='Waiting for mount', **halo_args) as sp: - if not sp.enabled: - logger.debug('Waiting for mount...') - wait_start = datetime.now() - while True: - if os.path.ismount(dir): - break - elif datetime.now() - wait_start > timedelta(minutes=15): - ret.check_returncode() - time.sleep(0.1) - if sp.enabled: - sp.succeed('Mounted') - else: - logger.info('Mounted %s', f'{src_repo_path}::{entry["name"]}') - - while True: - with tqdm(total=total_size, unit_scale=True, unit_divisor=1024, unit='B', smoothing=0.01, disable=None, dynamic_ncols=True, maxinterval=0.5, miniters=1) as progress: - seen = 0 - env = os.environ.copy() - create_args = ['borg', - 'create', - '--lock-wait=600', - '--one-file-system', - '--compression=auto,zstd,10', - '--chunker-params=10,23,16,4095', - '--files-cache=ctime,size', - '--show-rc', - '--upload-buffer=100', - '--upload-ratelimit=20480', - '--log-json', - '--progress', - '--list', - '--filter=AMEi-x?', - '--stats' - ] - archive_time = datetime.strptime(entry["time"], "%Y-%m-%dT%H:%M:%S.%f").replace(tzinfo=tzlocal()).astimezone(tzutc()) - create_args += [f'--timestamp={archive_time.strftime("%Y-%m-%dT%H:%M:%S")}'] - if cache_suffix: - env['BORG_FILES_CACHE_SUFFIX'] = cache_suffix - else: - create_args += ['--files-cache=disabled'] - create_args += [f'{dst_repo_path}::{entry["name"]}', '.'] - - with subprocess.Popen(create_args, stdin=subprocess.DEVNULL, stderr=subprocess.PIPE, stdout=subprocess.PIPE, env=env, preexec_fn=lambda: as_borg(caps={Cap.DAC_READ_SEARCH}), cwd=dir) as proc: - last_list = None - last_list_time = time.monotonic_ns() - logger.info('Creating...') - - proc_logger = logger.getChild('borg') - stdout_logger = proc_logger.getChild('stdout') - stderr_logger = proc_logger.getChild('stderr') - - fcntl.fcntl(proc.stdout.fileno(), fcntl.F_SETFL, fcntl.fcntl(proc.stdout.fileno(), fcntl.F_GETFL) | os.O_NONBLOCK) - fcntl.fcntl(proc.stderr.fileno(), fcntl.F_SETFL, fcntl.fcntl(proc.stderr.fileno(), fcntl.F_GETFL) | os.O_NONBLOCK) - - poll = select.poll() - poll.register(proc.stdout, select.POLLIN | select.POLLHUP) - poll.register(proc.stderr, select.POLLIN | select.POLLHUP) - pollc = 2 - events = poll.poll() - stdout_linebuf = bytearray() - stderr_linebuf = bytearray() - - while pollc > 0 and len(events) > 0: - # logger.debug('%d events', len(events)) - for rfd, event in events: - # logger.debug('event %s', event) - if event & select.POLLIN: - if rfd == proc.stdout.fileno(): - try: - # logger.debug('reading stdout...') - stdout_linebuf.extend(os.read(proc.stdout.fileno(), 8192)) - # logger.debug('read stdout, len(stdout_linebuf)=%d', len(stdout_linebuf)) - except BlockingIOError: - pass - - while stdout_linebuf: - # logger.debug('stdout line...') - line, sep, stdout_linebuf = stdout_linebuf.partition(b'\n') - if not sep: - stdout_linebuf = line - break - - stdout_logger.info(line.decode()) - # logger.debug('handled stdout lines, %d leftover', len(stdout_linebuf)) - if rfd == proc.stderr.fileno(): - try: - # logger.debug('reading stderr...') - stderr_linebuf.extend(os.read(proc.stderr.fileno(), 8192)) - # logger.debug('read stderr, len(stderr_linebuf)=%d', len(stderr_linebuf)) - except BlockingIOError: - pass - - while stderr_linebuf: - # logger.debug('stderr line...') - line, sep, stderr_linebuf = stderr_linebuf.partition(b'\n') - if not sep: - stderr_linebuf = line - break - - try: - json_line = json.loads(line) - except json.decoder.JSONDecodeError: - if progress.disable: - stderr_logger.error(line.decode()) - else: - tqdm.write(line.decode()) - continue - - # logger.debug('stderr line decoded: %s', json_line['type'] if 'type' in json_line else None) - - t = '' - if 'time' in json_line and not progress.disable: - ts = datetime.fromtimestamp(json_line['time']).replace(tzinfo=tzlocal()) - t = f'{ts.isoformat(timespec="minutes")} ' - if json_line['type'] == 'archive_progress' and not progress.disable: - now = time.monotonic_ns() - if last_list_time is None or now - last_list_time >= 3e9: - last_list_time = now - if 'path' in json_line and json_line['path']: - progress.set_description(f'… {json_line["path"]}', refresh=False) - else: - progress.set_description(None, refresh=False) - elif last_list is not None: - progress.set_description(last_list, refresh=False) - nfiles=json_line["nfiles"] - if total_files is not None: - nfiles=f'{json_line["nfiles"]}/{total_files}' - progress.set_postfix(compressed=naturalsize(json_line['compressed_size'], binary=True), deduplicated=naturalsize(json_line['deduplicated_size'], binary=True), nfiles=nfiles, refresh=False) - progress.update(json_line["original_size"] - seen) - seen = json_line["original_size"] - elif json_line['type'] == 'archive_progress': - now = time.monotonic_ns() - if last_list_time is None or now - last_list_time >= 3e9: - last_list_time = now - if 'path' in json_line and json_line['path']: - stderr_logger.debug('… %s (%s)', json_line["path"], naturalsize(json_line["original_size"])) - else: - stderr_logger.debug('… (%s)', naturalsize(json_line["original_size"])) - elif json_line['type'] == 'file_status': - # tqdm.write(t + f'{json_line["status"]} {json_line["path"]}') - last_list = f'{json_line["status"]} {json_line["path"]}' - last_list_time = time.monotonic_ns() - progress.set_description(last_list, refresh=False) - if progress.disable: - stderr_logger.info(last_list) - elif (json_line['type'] == 'log_message' or json_line['type'] == 'progress_message' or json_line['type'] == 'progress_percent') and ('message' in json_line or 'msgid' in json_line): - if 'message' in json_line: - if progress.disable: - stderr_logger.info(t + json_line['message']) - else: - tqdm.write(t + json_line['message']) - elif 'msgid' in json_line: - if progress.disable: - stderr_logger.info(t + json_line['msgid']) - else: - tqdm.write(t + json_line['msgid']) - else: - if progress.disable: - stderr_logger.info(t + line.decode()) - else: - tqdm.write(t + line.decode()) - # logger.debug('handled stderr lines, %d leftover', len(stderr_linebuf)) - if event == select.POLLHUP: - poll.unregister(rfd) - pollc -= 1 - - if pollc > 0: - # logger.debug('polling %d fds...', pollc) - events = poll.poll() - # logger.debug('done polling') - - # logger.debug('borg create closed stdout/stderr') - if stdout_linebuf: - logger.error('unterminated line leftover in stdout: %s', stdout_linebuf) - if stderr_linebuf: - logger.error('unterminated line leftover in stdout: %s', stderr_linebuf) - progress.set_description(None) - ret = proc.wait() - # logger.debug('borg create terminated; ret=%d', ret) - if ret != 0: - dst = None - try: - dst = read_repo(dst_repo_path) - except (subprocess.CalledProcessError, json.decoder.JSONDecodeError) as err: - logger.error(err) - continue - else: - if any(map(lambda other: entry['name'] == other['name'], dst)): - logger.info('destination exists, terminating') - break - - logger.warn('destination does not exist, retrying') - continue - else: - # logger.debug('terminating') - break - mount_proc.terminate() - - with Manager() as manager: - tmpdir_q = manager.Queue(1) - - with closing(Process(target=do_copy, args=(tmpdir_q,), name='do_copy')) as p: - p.start() - - with TemporaryDirectory(prefix=f'borg-mount_{entry["name"]}_', dir=os.environ.get('RUNTIME_DIRECTORY')) as tmpdir: - tmpdir_q.put(tmpdir) - p.join() - return p.exitcode - -def sigterm(signum, frame): - raise SystemExit(128 + signum) - -def main(): - signal.signal(signal.SIGTERM, sigterm) - - global logger - logger = logging.getLogger(__name__) - console_handler = logging.StreamHandler() - console_handler.setFormatter( logging.Formatter('[%(levelname)s](%(name)s): %(message)s') ) - if sys.stderr.isatty(): - console_handler.setFormatter( logging.Formatter('%(asctime)s [%(levelname)s](%(name)s): %(message)s') ) - - burst_max = 1000 - burst = burst_max - last_use = None - inv_rate = 1e7 - def consume_filter(record): - nonlocal burst, burst_max, inv_rate, last_use - - delay = None - while True: - now = time.monotonic_ns() - burst = min(burst_max, burst + math.floor((now - last_use) / inv_rate)) if last_use else burst_max - last_use = now - - if burst > 0: - burst -= 1 - if delay: - delay = now - delay - - return True - - if delay is None: - delay = now - time.sleep(inv_rate / 1e9) - console_handler.addFilter(consume_filter) - - logging.getLogger().addHandler(console_handler) - - # log uncaught exceptions - def log_exceptions(type, value, tb): - global logger - - logger.error(value) - sys.__excepthook__(type, value, tb) # calls default excepthook - - sys.excepthook = log_exceptions - - parser = argparse.ArgumentParser(prog='copy') - parser.add_argument('--verbosity', dest='log_level', action='append', type=int) - parser.add_argument('--verbose', '-v', dest='log_level', action='append_const', const=1) - parser.add_argument('--quiet', '-q', dest='log_level', action='append_const', const=-1) - parser.add_argument('source', metavar='REPO_OR_ARCHIVE') - parser.add_argument('target', metavar='REPO_OR_ARCHIVE') - args = parser.parse_args() - - - LOG_LEVELS = [logging.DEBUG, logging.INFO, logging.WARNING, logging.ERROR, logging.CRITICAL] - DEFAULT_LOG_LEVEL = logging.ERROR - log_level = LOG_LEVELS.index(DEFAULT_LOG_LEVEL) - - for adjustment in args.log_level or (): - log_level = min(len(LOG_LEVELS) - 1, max(log_level - adjustment, 0)) - logger.setLevel(LOG_LEVELS[log_level]) - - - if "::" in args.source: - (src_repo_path, _, src_archive) = args.source.partition("::") - entry = None - for candidate_entry in read_repo(src_repo_path): - if entry['name'] != src_archive: - continue - entry = candidate_entry - break - - if entry is None: - logger.critical("Did not find archive ‘%s’", src_archive) - os.exit(1) - - copy_archive(src_repo_path, args.target, entry) - else: - for entry in ToSync(args.source, args.target): - copy_archive(args.source, args.target, entry) - -if __name__ == "__main__": - sys.exit(main()) diff --git a/hosts/vidhar/borg/copy/setup.py b/hosts/vidhar/borg/copy/setup.py deleted file mode 100644 index f77d9560..00000000 --- a/hosts/vidhar/borg/copy/setup.py +++ /dev/null @@ -1,10 +0,0 @@ -from setuptools import setup - -setup(name='copy_borg', - packages=['copy_borg'], - entry_points={ - 'console_scripts': [ - 'copy_borg=copy_borg.__main__:main', - ], - } -) diff --git a/hosts/vidhar/borg/default.nix b/hosts/vidhar/borg/default.nix index 7672de18..80ce9c7e 100644 --- a/hosts/vidhar/borg/default.nix +++ b/hosts/vidhar/borg/default.nix @@ -1,4 +1,4 @@ -{ config, pkgs, lib, flakeInputs, ... }: +{ config, pkgs, lib, flakeInputs, utils, ... }: with lib; @@ -21,60 +21,26 @@ let ServerAliveCountMax 30 ''; - copyService = { repo, repoEscaped }: let - serviceName = "copy-borg@${repoEscaped}"; - in nameValuePair serviceName { + checkBorgUnit = { serviceConfig = { Type = "oneshot"; - ExecStart = "${copyBorg}/bin/copy_borg --verbosity 3 ${escapeShellArg repo} yggdrasil.borgbase:repo"; - TimeoutStartSec = "8h"; - # User = "borg"; - # Group = "borg"; - # StateDirectory = "borg"; - RuntimeDirectory = "copy-borg"; + ExecStart = "${pkgs.borgbackup}/bin/borg ${utils.escapeSystemdExecArgs [ + "--lock-wait" "3600" + "--progress" + "check" + "--verify-data" + ]} %I"; Environment = [ - "BORG_RSH=\"${pkgs.openssh}/bin/ssh -F ${pkgs.writeText "config" sshConfig}\"" "BORG_BASE_DIR=/var/lib/borg" "BORG_CONFIG_DIR=/var/lib/borg/config" "BORG_CACHE_DIR=/var/lib/borg/cache" "BORG_SECURITY_DIR=/var/lib/borg/security" "BORG_KEYS_DIR=/var/lib/borg/keys" - "BORG_KEY_FILE=${config.sops.secrets."yggdrasil.borgkey".path}" - "BORG_UNKNOWN_UNENCRYPTED_REPO_ACCESS_IS_OK=yes" "BORG_HOSTNAME_IS_UNIQUE=yes" + "BORG_RSH=\"${pkgs.openssh}/bin/ssh -F ${pkgs.writeText "config" sshConfig}\"" ]; - - LogRateLimitIntervalSec = 0; }; }; - - copyBorg = flakeInputs.mach-nix.lib.${config.nixpkgs.system}.buildPythonPackage rec { - pname = "copy-borg"; - src = ./copy; - version = "0.0.0"; - ignoreDataOutdated = true; - - requirements = '' - humanize - tqdm - python-dateutil - xdg - python-unshare - pyprctl - halo - ''; - postInstall = '' - wrapProgram $out/bin/copy_borg \ - --prefix PATH : ${makeBinPath (with pkgs; [util-linux borgbackup])}:${config.security.wrapperDir} - ''; - - providers.python-unshare = "nixpkgs"; - overridesPre = [ - (self: super: { python-unshare = super.python-unshare.overrideAttrs (oldAttrs: { name = "python-unshare-0.2.1"; version = "0.2.1"; }); }) - ]; - - # _.tomli.buildInputs.add = with pkgs."python3Packages"; [ flit-core ]; - }; in { config = { services.borgsnap = { @@ -85,7 +51,15 @@ in { keyfile = config.sops.secrets."yggdrasil.borgkey".path; }; - systemd.services = listToAttrs (map copyService [{ repo = "/srv/backup/borg/jotnar"; repoEscaped = "srv-backup-borg-jotnar"; }]); + services.copyborg.jotnar = { + from = "/srv/backup/borg/jotnar"; + to = "yggdrasil.borgbase:repo"; + inherit sshConfig; + keyfile = config.sops.secrets."yggdrasil.borgkey".path; + timerOptions.timerConfig = { + OnCalendar = "*-*-* 00/4:00:00 Europe/Berlin"; + }; + }; services.borgbackup.repos.jotnar = { path = "/srv/backup/borg/jotnar"; @@ -95,6 +69,27 @@ in { in filter (v: v != null) (mapAttrsToList toAuthKey (builtins.readDir dir)); }; + systemd.services."check-borg@${utils.escapeSystemdPath "/srv/backup/borg/jotnar"}" = checkBorgUnit; + systemd.services."check-borg@${utils.escapeSystemdPath "yggdrasil.borgbase:repo"}" = recursiveUpdate checkBorgUnit { + serviceConfig = { + Environment = checkBorgUnit.serviceConfig.Environment ++ [ + "BORG_KEY_FILE=${config.sops.secrets."yggdrasil.borgkey".path}" + ]; + }; + }; + systemd.timers."check-borg@${utils.escapeSystemdPath "/srv/backup/borg/jotnar"}" = { + wantedBy = [ "timers.target" ]; + timerConfig = { + OnCalendar = "Sun *-*-02..08 01:30:00 Europe/Berlin"; + }; + }; + systemd.timers."check-borg@${utils.escapeSystemdPath "yggdrasil.borgbase:repo"}" = { + wantedBy = [ "timers.target" ]; + timerConfig = { + OnCalendar = "Sun *-*-02..08 01:30:00 Europe/Berlin"; + }; + }; + boot.postBootCommands = mkBefore '' ${pkgs.findutils}/bin/find /srv/backup/borg -type d -empty -delete ''; @@ -123,13 +118,5 @@ in { group = "borg"; mode = "0400"; }; - - systemd.timers."copy-borg@srv-backup-borg-jotnar" = { - wantedBy = ["multi-user.target"]; - - timerConfig = { - OnCalendar = "*-*-* 00/4:00:00 Europe/Berlin"; - }; - }; }; } diff --git a/hosts/vidhar/borg/pyprctl-packages.nix b/hosts/vidhar/borg/pyprctl-packages.nix deleted file mode 100644 index d3b4256a..00000000 --- a/hosts/vidhar/borg/pyprctl-packages.nix +++ /dev/null @@ -1,21 +0,0 @@ -# Generated by pip2nix 0.8.0.dev1 -# See https://github.com/nix-community/pip2nix - -{ pkgs, fetchurl, fetchgit, fetchhg }: - -self: super: { - "pyprctl" = super.buildPythonPackage rec { - pname = "pyprctl"; - version = "0.1.3"; - src = fetchurl { - url = "https://files.pythonhosted.org/packages/bf/5e/62765de39bbce8111fb1f4453a4a804913bf49179fa265fb713ed66c9d15/pyprctl-0.1.3-py3-none-any.whl"; - sha256 = "1pgif990r92za5rx12mjnq5iiz72d455v0wrawzb73q79w8ya0k3"; - }; - format = "wheel"; - doCheck = false; - buildInputs = []; - checkInputs = []; - nativeBuildInputs = []; - propagatedBuildInputs = []; - }; -} diff --git a/hosts/vidhar/prometheus/ca/.gitignore b/hosts/vidhar/prometheus/ca/.gitignore deleted file mode 100644 index 7c894574..00000000 --- a/hosts/vidhar/prometheus/ca/.gitignore +++ /dev/null @@ -1,3 +0,0 @@ -ca.key -ca.cnf -*.old \ No newline at end of file diff --git a/hosts/vidhar/prometheus/ca/ca.crt b/hosts/vidhar/prometheus/ca/ca.crt index 922fed28..8cfea666 100644 --- a/hosts/vidhar/prometheus/ca/ca.crt +++ b/hosts/vidhar/prometheus/ca/ca.crt @@ -1,12 +1,12 @@ -----BEGIN CERTIFICATE----- -MIIBsjCCAWSgAwIBAgIUOzZ8XcFb8XtI2yyWp4S/WMD6QxQwBQYDK2VwMB8xHTAb -BgNVBAMMFHByb21ldGhldXMueWdnZHJhc2lsMCAXDTIyMDQwODE5NDgwMFoYDzIw -OTAwNDI2MTk0ODAwWjAfMR0wGwYDVQQDDBRwcm9tZXRoZXVzLnlnZ2RyYXNpbDAq -MAUGAytlcAMhAOoxPLBH6pnCRtE7V5gejM92gg1vLNLHw3rFIXXchOJmo4GvMIGs -MB0GA1UdDgQWBBRnwBkgZFnueEa7aV8aEAoMRzW4CTBaBgNVHSMEUzBRgBRnwBkg -ZFnueEa7aV8aEAoMRzW4CaEjpCEwHzEdMBsGA1UEAwwUcHJvbWV0aGV1cy55Z2dk -cmFzaWyCFDs2fF3BW/F7SNsslqeEv1jA+kMUMA8GA1UdEwEB/wQFMAMBAf8wCwYD -VR0PBAQDAgEGMBEGCWCGSAGG+EIBAQQEAwICBDAFBgMrZXADQQD9AC2OHtzW8QSC -HU/4rGdRWRqr3pfclKXimSWaAXMPly2M1qehPI402lhQrIAVF+D1pi/EAGJfbbzF -aurykEMB +MIIBrjCCAS6gAwIBAgIUYV3YPBx91CbgMpOGb5HKMZ2hzRUwBQYDK2VxMB8xHTAb +BgNVBAMMFHByb21ldGhldXMueWdnZHJhc2lsMB4XDTIyMTEwNzE5MjgzNFoXDTMy +MTEwNzE5MzMzNFowHzEdMBsGA1UEAwwUcHJvbWV0aGV1cy55Z2dkcmFzaWwwQzAF +BgMrZXEDOgAVqcV3KGDhcbQt/UR3Yv6OuAGc+Kc8hrDHjAV8K9GTjahc/d49NK2v +FAz0uK8YidIaTVJZjzHhTgCjYzBhMB8GA1UdIwQYMBaAFObrhCUDCZk6/JeeDMNW +l8WeLr+MMB0GA1UdDgQWBBTm64QlAwmZOvyXngzDVpfFni6/jDAOBgNVHQ8BAf8E +BAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAFBgMrZXEDcwAFAqBlI7SpHaSE+0mMzx5x +0M6T3iJtLxP36Qz5MHx3vvcbbx1eJhZWKewuyz+9LXaCkf8Jpd5AFoC+HhoikVSz +46yVzmTBt6TISc4bh+eiWcXEKFbxEbXkwqZd2m/oHI4Em4qnDKp96FcOfq6RQ8pR +AwA= -----END CERTIFICATE----- diff --git a/hosts/vidhar/prometheus/ca/ca.key b/hosts/vidhar/prometheus/ca/ca.key new file mode 100644 index 00000000..32c4330a --- /dev/null +++ b/hosts/vidhar/prometheus/ca/ca.key @@ -0,0 +1,21 @@ +{ + "data": "ENC[AES256_GCM,data:yk8nI2Zz2F3XnBM9dqnA3UoWTTCGJLMZUYjpo+SW+ARmZVgYdcqHZunhoGRQP/r6qrIUvM/2Yl85Uosw43jllILCNESH17Gi6uI0gD9OE8I14oll8wCL+/GvP/IuU//1NEAeLF9cz8MBWPE0WW2wQk5DF4ikl+z3/McG+kaqeU+ka6aMmjIjUstjR2vCf+pfZN3KswylcLaeuvXP,iv:ByEIQCxQwjynCFxGZdYtg+nx9mFmwbqHL3iBhzLbKIQ=,tag:jIc+KcfbSmiZqM6Z2xIa0g==,type:str]", + "sops": { + "kms": null, + "gcp_kms": null, + "azure_kv": null, + "hc_vault": null, + "age": null, + "lastmodified": "2022-11-07T19:33:34Z", + "mac": "ENC[AES256_GCM,data:UE1+0M15ZBgsKOfEmz8DMeQsmzkRxcN5cjdpMswzc6vIgo6sRN4ArdtDKqAMcFtFhzokSZin6OIizsk6KLlsts5sgVHQHXKrqssc016OADRg4BoC9zM/MGLUXOHndrRSPGSQgRDCeVwmR9C5iE18VZ/NCcZtoztHt6DPt3xmGpo=,iv:JB6CTWUyyDpjciKfYugf78Xo+jDKCH3+tL8p9G7M5y0=,tag:n73uY6cX5EV6Rjq1/HM8kw==,type:str]", + "pgp": [ + { + "created_at": "2022-11-07T19:33:34Z", + "enc": "-----BEGIN PGP MESSAGE-----\n\nhF4DXxoViZlp6dISAQdAJxBqRR1DzDPs/sQgfZNaKZTWH+mbdQo9mpGRWcWkm1ww\nOjVRJjiBDyeItfbOS9hnEOJKwKUIk1tH7F5m+U5daFLSw/Ct/xzJ7iyphcfRzNFN\n0l4BHF6sMyoPFpSGpE+0d4IRqfDPF3t9d3NL1lAGV75MoEho38ptNCbAn32kWpZ9\n7/Vk3L+oR/3xhLAwm3/7JDed01zNnKRaxFh3zpYfwZWhMtEdoUoEIkojufEJ64s2\n=KZjL\n-----END PGP MESSAGE-----\n", + "fp": "30D3453B8CD02FE2A3E7C78C0FB536FB87AE8F51" + } + ], + "unencrypted_suffix": "_unencrypted", + "version": "3.7.3" + } +} \ No newline at end of file diff --git a/hosts/vidhar/prometheus/ca/ca.key.sops b/hosts/vidhar/prometheus/ca/ca.key.sops deleted file mode 100644 index 5313056e..00000000 --- a/hosts/vidhar/prometheus/ca/ca.key.sops +++ /dev/null @@ -1,21 +0,0 @@ -{ - "data": "ENC[AES256_GCM,data:XW6h0psHOSV0cR03vRg479A5XRM7KfiBfVgvm4QlxCZzhkk5U1ToDJIaCxqKpxlEu8wm79wmz+/CmSLDEBcs7x05a5vBDt81mlWJ49PolOrG9bL9Qkyq5u8sB8HWXRXxCP5kg2su+n9NqdHX9AIhYCXy7VJDuGo=,iv:v661AhF2Q/O+a7JtwHtnSkSI0mL8ltu5rPny8vWCL/Q=,tag:c7b0a6o6y/MI5vG85uFuUg==,type:str]", - "sops": { - "kms": null, - "gcp_kms": null, - "azure_kv": null, - "hc_vault": null, - "age": null, - "lastmodified": "2022-04-08T20:12:22Z", - "mac": "ENC[AES256_GCM,data:W/IF6WgTscbkcMUTR3aeqM/H/UwgFgILDbKBxYJQxcFtt4kq3UqzSd/e0hk5NQ9IkagAC4X0gZDuzco2mc7caUGyzMKRdA2ekgcdDwzruQ4i+UYyr80dFhqHpV+aksdZJVR+dJzkmIRmza3Ia5e/X01XNIbIrU13JKYm9jCskd0=,iv:2g+UFcSTxcTrf+toi4BDVvAaY5ydk7yRnhpQ/rrNvVo=,tag:3X01wEqL/Q8cIiF+DEMnpg==,type:str]", - "pgp": [ - { - "created_at": "2022-04-08T20:12:22Z", - "enc": "-----BEGIN PGP MESSAGE-----\n\nhF4DXxoViZlp6dISAQdADN+s7UQS8hEBc2mMRovD/zKuIoIAS3swLpP6ul9kRGMw\nDCUvOL41sxXmuodi4Pg69YB2YcL47Fod7nQWUYaK8L3CuyjWUq1cxomlYtTd03eH\n0l4BiyWTuZ+1OG4Xng8B4zdcM5jWfeTRWupDIXcnPFjwz47FetmrcCAaROKYL87e\nAjK76Y6gR/gSj0GTTAUIfKFpqsqAdBAf6oBekQcPgeqcrJcZ2ZZFWzmswGBvcGjs\n=gqhG\n-----END PGP MESSAGE-----\n", - "fp": "30D3453B8CD02FE2A3E7C78C0FB536FB87AE8F51" - } - ], - "unencrypted_suffix": "_unencrypted", - "version": "3.7.2" - } -} \ No newline at end of file diff --git a/hosts/vidhar/prometheus/ca/certs/01.pem b/hosts/vidhar/prometheus/ca/certs/01.pem deleted file mode 100644 index 81abe0b7..00000000 --- a/hosts/vidhar/prometheus/ca/certs/01.pem +++ /dev/null @@ -1,39 +0,0 @@ -Certificate: - Data: - Version: 3 (0x2) - Serial Number: 1 (0x1) - Signature Algorithm: ED25519 - Issuer: CN=prometheus.yggdrasil - Validity - Not Before: Apr 8 20:03:55 2022 GMT - Not After : Apr 26 20:03:55 2090 GMT - Subject: CN=surtr.yggdrasil - Subject Public Key Info: - Public Key Algorithm: ED25519 - ED25519 Public-Key: - pub: - 02:5d:f0:8d:f6:5f:fc:fd:27:47:0e:d8:ec:fe:e0: - a0:28:20:9a:b4:8a:07:4c:75:b2:c1:03:ef:16:3b: - eb:e0 - X509v3 extensions: - X509v3 Basic Constraints: critical - CA:FALSE - X509v3 Subject Alternative Name: - DNS:prometheus.surtr.yggdrasil, DNS:prometheus.surtr.yggdrasil.li - X509v3 Subject Key Identifier: - 37:9D:AD:3D:CB:F9:14:5A:69:CD:E2:71:D8:08:97:93:A5:20:3C:38 - Signature Algorithm: ED25519 - 3c:df:73:85:a7:81:07:60:b5:4e:ea:ec:74:04:47:d2:35:41: - cf:d8:34:75:18:4f:ee:c3:b9:64:6d:0a:fb:1a:76:e2:96:8b: - 5e:24:c5:d6:b6:2e:6f:6e:29:ff:26:70:ef:5a:7b:33:40:40: - 13:e8:49:a9:80:73:62:8e:58:05 ------BEGIN CERTIFICATE----- -MIIBXzCCARGgAwIBAgIBATAFBgMrZXAwHzEdMBsGA1UEAwwUcHJvbWV0aGV1cy55 -Z2dkcmFzaWwwIBcNMjIwNDA4MjAwMzU1WhgPMjA5MDA0MjYyMDAzNTVaMBoxGDAW -BgNVBAMMD3N1cnRyLnlnZ2RyYXNpbDAqMAUGAytlcAMhAAJd8I32X/z9J0cO2Oz+ -4KAoIJq0igdMdbLBA+8WO+vgo3UwczAMBgNVHRMBAf8EAjAAMEQGA1UdEQQ9MDuC -GnByb21ldGhldXMuc3VydHIueWdnZHJhc2lsgh1wcm9tZXRoZXVzLnN1cnRyLnln -Z2RyYXNpbC5saTAdBgNVHQ4EFgQUN52tPcv5FFppzeJx2AiXk6UgPDgwBQYDK2Vw -A0EAPN9zhaeBB2C1TursdARH0jVBz9g0dRhP7sO5ZG0K+xp24paLXiTF1rYub24p -/yZw71p7M0BAE+hJqYBzYo5YBQ== ------END CERTIFICATE----- diff --git a/hosts/vidhar/prometheus/ca/certs/02.pem b/hosts/vidhar/prometheus/ca/certs/02.pem deleted file mode 100644 index d908ca7d..00000000 --- a/hosts/vidhar/prometheus/ca/certs/02.pem +++ /dev/null @@ -1,38 +0,0 @@ -Certificate: - Data: - Version: 3 (0x2) - Serial Number: 2 (0x2) - Signature Algorithm: ED25519 - Issuer: CN=prometheus.yggdrasil - Validity - Not Before: Apr 8 20:07:13 2022 GMT - Not After : Apr 26 20:07:13 2090 GMT - Subject: CN=vidhar.yggdrasil - Subject Public Key Info: - Public Key Algorithm: ED25519 - ED25519 Public-Key: - pub: - 13:84:a6:01:07:7a:5e:8d:2b:8d:83:ee:73:1d:c6: - b8:9a:ad:b9:3d:40:51:ec:2c:f3:52:7d:81:90:e7: - ac:88 - X509v3 extensions: - X509v3 Basic Constraints: critical - CA:FALSE - X509v3 Subject Alternative Name: - DNS:prometheus.vidhar.yggdrasil - X509v3 Subject Key Identifier: - 44:AA:8E:CC:AB:C9:A7:D1:A1:D0:FA:7F:DB:87:1E:08:AA:6E:4D:59 - Signature Algorithm: ED25519 - 47:65:87:17:50:96:77:56:20:ac:9e:f4:e4:6d:19:6d:b7:24: - 11:af:0c:c3:f3:fd:75:19:d9:77:06:41:79:7f:a5:00:0c:18: - ee:82:3e:9e:09:61:34:cf:8f:f5:83:d1:5d:b2:e4:42:b6:3f: - 9c:b6:5a:f3:40:92:e6:8f:24:0f ------BEGIN CERTIFICATE----- -MIIBQTCB9KADAgECAgECMAUGAytlcDAfMR0wGwYDVQQDDBRwcm9tZXRoZXVzLnln -Z2RyYXNpbDAgFw0yMjA0MDgyMDA3MTNaGA8yMDkwMDQyNjIwMDcxM1owGzEZMBcG -A1UEAwwQdmlkaGFyLnlnZ2RyYXNpbDAqMAUGAytlcAMhABOEpgEHel6NK42D7nMd -xriarbk9QFHsLPNSfYGQ56yIo1cwVTAMBgNVHRMBAf8EAjAAMCYGA1UdEQQfMB2C -G3Byb21ldGhldXMudmlkaGFyLnlnZ2RyYXNpbDAdBgNVHQ4EFgQURKqOzKvJp9Gh -0Pp/24ceCKpuTVkwBQYDK2VwA0EAR2WHF1CWd1YgrJ705G0ZbbckEa8Mw/P9dRnZ -dwZBeX+lAAwY7oI+nglhNM+P9YPRXbLkQrY/nLZa80CS5o8kDw== ------END CERTIFICATE----- diff --git a/hosts/vidhar/prometheus/ca/index.txt b/hosts/vidhar/prometheus/ca/index.txt deleted file mode 100644 index 41ebb0f4..00000000 --- a/hosts/vidhar/prometheus/ca/index.txt +++ /dev/null @@ -1,2 +0,0 @@ -V 20900426200355Z 01 unknown /CN=surtr.yggdrasil -V 20900426200713Z 02 unknown /CN=vidhar.yggdrasil diff --git a/hosts/vidhar/prometheus/ca/index.txt.attr b/hosts/vidhar/prometheus/ca/index.txt.attr deleted file mode 100644 index 8f7e63a3..00000000 --- a/hosts/vidhar/prometheus/ca/index.txt.attr +++ /dev/null @@ -1 +0,0 @@ -unique_subject = yes diff --git a/hosts/vidhar/prometheus/ca/serial b/hosts/vidhar/prometheus/ca/serial deleted file mode 100644 index 75016ea3..00000000 --- a/hosts/vidhar/prometheus/ca/serial +++ /dev/null @@ -1 +0,0 @@ -03 diff --git a/hosts/vidhar/prometheus/tls.crt b/hosts/vidhar/prometheus/tls.crt index 792ed542..6516f185 100644 --- a/hosts/vidhar/prometheus/tls.crt +++ b/hosts/vidhar/prometheus/tls.crt @@ -1,9 +1,12 @@ -----BEGIN CERTIFICATE----- -MIIBQTCB9KADAgECAgECMAUGAytlcDAfMR0wGwYDVQQDDBRwcm9tZXRoZXVzLnln -Z2RyYXNpbDAgFw0yMjA0MDgyMDA3MTNaGA8yMDkwMDQyNjIwMDcxM1owGzEZMBcG -A1UEAwwQdmlkaGFyLnlnZ2RyYXNpbDAqMAUGAytlcAMhABOEpgEHel6NK42D7nMd -xriarbk9QFHsLPNSfYGQ56yIo1cwVTAMBgNVHRMBAf8EAjAAMCYGA1UdEQQfMB2C -G3Byb21ldGhldXMudmlkaGFyLnlnZ2RyYXNpbDAdBgNVHQ4EFgQURKqOzKvJp9Gh -0Pp/24ceCKpuTVkwBQYDK2VwA0EAR2WHF1CWd1YgrJ705G0ZbbckEa8Mw/P9dRnZ -dwZBeX+lAAwY7oI+nglhNM+P9YPRXbLkQrY/nLZa80CS5o8kDw== +MIIByDCCAUigAwIBAgIPQAAAAGNpXrc6y389EXtIMAUGAytlcTAfMR0wGwYDVQQD +DBRwcm9tZXRoZXVzLnlnZ2RyYXNpbDAeFw0yMjExMDcxOTMyNTRaFw0zMjExMDcx +OTM3NTRaMBsxGTAXBgNVBAMMEHZpZGhhci55Z2dkcmFzaWwwKjAFBgMrZXADIQAT +hKYBB3pejSuNg+5zHca4mq25PUBR7CzzUn2BkOesiKOBnjCBmzAfBgNVHSMEGDAW +gBTm64QlAwmZOvyXngzDVpfFni6/jDAdBgNVHQ4EFgQURKqOzKvJp9Gh0Pp/24ce +CKpuTVkwDgYDVR0PAQH/BAQDAgXgMAwGA1UdEwEB/wQCMAAwEwYDVR0lBAwwCgYI +KwYBBQUHAwIwJgYDVR0RBB8wHYIbcHJvbWV0aGV1cy52aWRoYXIueWdnZHJhc2ls +MAUGAytlcQNzAIPNcNWqVX4Ie971O/S2DL0HMFmPbR331U4snLBqPGWC1/j9NV4O +cxJvLo8Hzb4I0BXn/nZbyk/ogCCJU69BVeK378qgLo68DIZ4TA3ka5ZPNRSt464Q +NvbkDhtFVVxM04xUjI4dOeE9jczG9nN3jHESAA== -----END CERTIFICATE----- diff --git a/modules/borgcopy/copy/copy_borg/__main__.py b/modules/borgcopy/copy/copy_borg/__main__.py new file mode 100755 index 00000000..5b374d99 --- /dev/null +++ b/modules/borgcopy/copy/copy_borg/__main__.py @@ -0,0 +1,556 @@ +#!@python@/bin/python + +import json +import os +import subprocess +import re +import sys +import io +from sys import stderr +from humanize import naturalsize + +from tempfile import TemporaryDirectory + +from datetime import (datetime, timedelta) +from dateutil.tz import (tzlocal, tzutc) +import dateutil.parser +import argparse + +from tqdm import tqdm + +from xdg import xdg_runtime_dir +import pathlib + +import unshare +from pyprctl import CapState, Cap, cap_ambient_raise, cap_ambient_is_set, set_keepcaps +from pwd import getpwnam + +import logging + +import signal +import time +import math + +from halo import Halo + +from collections import deque + +import select +import fcntl + +from multiprocessing import Process, Manager +from contextlib import closing + + +halo_args = { + 'stream': stderr, + 'enabled': stderr.isatty(), + 'spinner': 'arc' +} + +borg_pwd = getpwnam('borg') + +def as_borg(caps=set()): + global logger + + try: + if caps: + c_state = CapState.get_current() + c_state.permitted.add(*caps) + c_state.set_current() + + # logger.debug("before setgid/setuid: cap_permitted=%s", CapState.get_current().permitted) + + set_keepcaps(True) + + os.setgid(borg_pwd.pw_gid) + os.setuid(borg_pwd.pw_uid) + + if caps: + # logger.debug("after setgid/setuid: cap_permitted=%s", CapState.get_current().permitted) + + c_state = CapState.get_current() + c_state.permitted = caps.copy() + c_state.inheritable.add(*caps) + c_state.set_current() + + # logger.debug("cap_permitted=%s", CapState.get_current().permitted) + # logger.debug("cap_inheritable=%s", CapState.get_current().inheritable) + + for cap in caps: + cap_ambient_raise(cap) + # logger.debug("cap_ambient[%s]=%s", cap, cap_ambient_is_set(cap)) + except Exception: + logger.error(format_exc()) + raise + +def borg_json(*args, **kwargs): + global logger + + with subprocess.Popen(*args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True, **kwargs) as proc: + stdout_buffer = io.BytesIO() + + proc_logger = logger.getChild('borg') + stdout_logger = proc_logger.getChild('stdout') + stderr_logger = proc_logger.getChild('stderr') + + fcntl.fcntl(proc.stdout.fileno(), fcntl.F_SETFL, fcntl.fcntl(proc.stdout.fileno(), fcntl.F_GETFL) | os.O_NONBLOCK) + fcntl.fcntl(proc.stderr.fileno(), fcntl.F_SETFL, fcntl.fcntl(proc.stderr.fileno(), fcntl.F_GETFL) | os.O_NONBLOCK) + + poll = select.poll() + poll.register(proc.stdout, select.POLLIN | select.POLLHUP) + poll.register(proc.stderr, select.POLLIN | select.POLLHUP) + pollc = 2 + events = poll.poll() + stderr_linebuf = bytearray() + + while pollc > 0 and len(events) > 0: + for rfd, event in events: + if event & select.POLLIN: + if rfd == proc.stdout.fileno(): + try: + buf = os.read(proc.stdout.fileno(), 8192) + # stdout_logger.debug(buf) + stdout_buffer.write(buf) + except BlockingIOError: + pass + if rfd == proc.stderr.fileno(): + try: + stderr_linebuf.extend(os.read(proc.stderr.fileno(), 8192)) + except BlockingIOError: + pass + + while stderr_linebuf: + line, sep, stderr_linebuf = stderr_linebuf.partition(b'\n') + if not sep: + stderr_linebuf = line + break + + stderr_logger.info(line.decode()) + if event == select.POLLHUP: + poll.unregister(rfd) + pollc -= 1 + + if pollc > 0: + events = poll.poll() + + for handler in proc_logger.handlers: + handler.flush() + + ret = proc.wait() + if ret != 0: + raise Exception(f'borg subprocess exited with returncode {ret}') + + stdout_buffer.seek(0) + return json.load(stdout_buffer) + +def read_repo(path): + global logger + + with Halo(text=f'Listing {path}', **halo_args) as sp: + if not sp.enabled: + logger.debug('Listing %s...', path) + res = borg_json(['borg', 'list', '--info', '--lock-wait=600', '--json', path], preexec_fn=lambda: as_borg())['archives'] + if sp.enabled: + sp.succeed(f'{len(res)} archives in {path}') + else: + logger.info('%d archives in ‘%s’', len(res), path) + return res + +class ToSync: + to_sync = deque() + + def __init__(self, source, target): + self.source = source + self.target = target + + def __iter__(self): + return self + + def __next__(self): + global logger + + if self.to_sync: + return self.to_sync.popleft() + + while True: + try: + src = read_repo(self.source) + dst = read_repo(self.target) + except (subprocess.CalledProcessError, json.decoder.JSONDecodeError) as err: + logger.error(err) + continue + + self.to_sync.extend([entry for entry in src if entry['name'] not in {dst_entry['name'] for dst_entry in dst} and not entry['name'].endswith('.checkpoint')]) + + if self.to_sync: + return self.to_sync.popleft() + + raise StopIteration + +def copy_archive(src_repo_path, dst_repo_path, entry): + global logger + + def do_copy(tmpdir_q): + global logger + + nonlocal src_repo_path, dst_repo_path, entry + + tmpdir = tmpdir_q.get() + + cache_suffix = None + with Halo(text=f'Determine archive parameters', **halo_args) as sp: + if not sp.enabled: + logger.debug('Determining archive parameters...') + match = re.compile('^(.*)-[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}(\.(checkpoint|recreate)(\.[0-9]+)?)?').fullmatch(entry['name']) + if match: + repo_id = borg_json(['borg', 'info', '--info', '--lock-wait=600', '--json', src_repo_path], preexec_fn=lambda: as_borg())['repository']['id'] + + if repo_id: + cache_suffix = f'{repo_id}_{match.group(1)}' + if sp.enabled: + sp.succeed(f'Will process {entry["name"]} ({dateutil.parser.isoparse(entry["start"])}, cache_suffix={cache_suffix})') + else: + logger.info('Will process ‘%s’ (%s, cache_suffix=%s)', entry['name'], dateutil.parser.isoparse(entry['start']), cache_suffix) + + logger.debug('Setting up environment...') + unshare.unshare(unshare.CLONE_NEWNS) + subprocess.run(['mount', '--make-rprivate', '/'], check=True) + chroot = pathlib.Path(tmpdir) / 'chroot' + upper = pathlib.Path(tmpdir) / 'upper' + work = pathlib.Path(tmpdir) / 'work' + for path in [chroot,upper,work]: + path.mkdir() + subprocess.run(['mount', '-t', 'overlay', 'overlay', '-o', f'lowerdir=/,upperdir={upper},workdir={work}', chroot], check=True) + bindMounts = ['nix', 'run', 'run/secrets.d', 'run/wrappers', 'proc', 'dev', 'sys', pathlib.Path(os.path.expanduser('~')).relative_to('/')] + if os.environ.get('BORG_BASE_DIR'): + bindMounts.append(pathlib.Path(os.environ['BORG_BASE_DIR']).relative_to('/')) + if not ":" in src_repo_path: + bindMounts.append(pathlib.Path(src_repo_path).relative_to('/')) + if 'SSH_AUTH_SOCK' in os.environ: + bindMounts.append(pathlib.Path(os.environ['SSH_AUTH_SOCK']).parent.relative_to('/')) + for bindMount in bindMounts: + (chroot / bindMount).mkdir(parents=True,exist_ok=True) + subprocess.run(['mount', '--bind', pathlib.Path('/') / bindMount, chroot / bindMount], check=True) + os.chroot(chroot) + os.chdir('/') + try: + os.unlink('/etc/fuse.conf') + except FileNotFoundError: + pass + pathlib.Path('/etc/fuse.conf').parent.mkdir(parents=True,exist_ok=True) + with open('/etc/fuse.conf', 'w') as fuse_conf: + fuse_conf.write('user_allow_other\nmount_max = 1000\n') + dir = pathlib.Path('/borg') + dir.mkdir(parents=True,exist_ok=True,mode=0o0750) + os.chown(dir, borg_pwd.pw_uid, borg_pwd.pw_gid) + + total_size = None + total_files = None + if stderr.isatty(): + with Halo(text=f'Determine size', **halo_args) as sp: + stats = borg_json(['borg', 'info', '--info', '--json', '--lock-wait=600', f'{src_repo_path}::{entry["name"]}'], preexec_fn=lambda: as_borg())['archives'][0]['stats'] + total_size = stats['original_size'] + total_files = stats['nfiles'] + if sp.enabled: + sp.succeed(f'{total_files} files, {naturalsize(total_size, binary=True)}') + else: + logger.info('%d files, %s', total_files, naturalsize(total_size, binary=True)) + with subprocess.Popen(['borg', 'mount', '-o', 'allow_other,ignore_permissions', '--foreground', '--progress', '--lock-wait=600', f'{src_repo_path}::{entry["name"]}', dir], preexec_fn=lambda: as_borg()) as mount_proc: + with Halo(text='Waiting for mount', **halo_args) as sp: + if not sp.enabled: + logger.debug('Waiting for mount...') + wait_start = datetime.now() + while True: + if os.path.ismount(dir): + break + elif datetime.now() - wait_start > timedelta(minutes=15): + ret.check_returncode() + time.sleep(0.1) + if sp.enabled: + sp.succeed('Mounted') + else: + logger.info('Mounted %s', f'{src_repo_path}::{entry["name"]}') + + while True: + with tqdm(total=total_size, unit_scale=True, unit_divisor=1024, unit='B', smoothing=0.01, disable=None, dynamic_ncols=True, maxinterval=0.5, miniters=1) as progress: + seen = 0 + env = os.environ.copy() + create_args = ['borg', + 'create', + '--lock-wait=600', + '--one-file-system', + '--compression=auto,zstd,10', + '--chunker-params=10,23,16,4095', + '--files-cache=ctime,size', + '--show-rc', + '--upload-buffer=100', + '--upload-ratelimit=20480', + '--log-json', + '--progress', + '--list', + '--filter=AMEi-x?', + '--stats' + ] + archive_time = datetime.strptime(entry["time"], "%Y-%m-%dT%H:%M:%S.%f").replace(tzinfo=tzlocal()).astimezone(tzutc()) + create_args += [f'--timestamp={archive_time.strftime("%Y-%m-%dT%H:%M:%S")}'] + if cache_suffix: + env['BORG_FILES_CACHE_SUFFIX'] = cache_suffix + else: + create_args += ['--files-cache=disabled'] + create_args += [f'{dst_repo_path}::{entry["name"]}', '.'] + + with subprocess.Popen(create_args, stdin=subprocess.DEVNULL, stderr=subprocess.PIPE, stdout=subprocess.PIPE, env=env, preexec_fn=lambda: as_borg(caps={Cap.DAC_READ_SEARCH}), cwd=dir) as proc: + last_list = None + last_list_time = time.monotonic_ns() + logger.info('Creating...') + + proc_logger = logger.getChild('borg') + stdout_logger = proc_logger.getChild('stdout') + stderr_logger = proc_logger.getChild('stderr') + + fcntl.fcntl(proc.stdout.fileno(), fcntl.F_SETFL, fcntl.fcntl(proc.stdout.fileno(), fcntl.F_GETFL) | os.O_NONBLOCK) + fcntl.fcntl(proc.stderr.fileno(), fcntl.F_SETFL, fcntl.fcntl(proc.stderr.fileno(), fcntl.F_GETFL) | os.O_NONBLOCK) + + poll = select.poll() + poll.register(proc.stdout, select.POLLIN | select.POLLHUP) + poll.register(proc.stderr, select.POLLIN | select.POLLHUP) + pollc = 2 + events = poll.poll() + stdout_linebuf = bytearray() + stderr_linebuf = bytearray() + + while pollc > 0 and len(events) > 0: + # logger.debug('%d events', len(events)) + for rfd, event in events: + # logger.debug('event %s', event) + if event & select.POLLIN: + if rfd == proc.stdout.fileno(): + try: + # logger.debug('reading stdout...') + stdout_linebuf.extend(os.read(proc.stdout.fileno(), 8192)) + # logger.debug('read stdout, len(stdout_linebuf)=%d', len(stdout_linebuf)) + except BlockingIOError: + pass + + while stdout_linebuf: + # logger.debug('stdout line...') + line, sep, stdout_linebuf = stdout_linebuf.partition(b'\n') + if not sep: + stdout_linebuf = line + break + + stdout_logger.info(line.decode()) + # logger.debug('handled stdout lines, %d leftover', len(stdout_linebuf)) + if rfd == proc.stderr.fileno(): + try: + # logger.debug('reading stderr...') + stderr_linebuf.extend(os.read(proc.stderr.fileno(), 8192)) + # logger.debug('read stderr, len(stderr_linebuf)=%d', len(stderr_linebuf)) + except BlockingIOError: + pass + + while stderr_linebuf: + # logger.debug('stderr line...') + line, sep, stderr_linebuf = stderr_linebuf.partition(b'\n') + if not sep: + stderr_linebuf = line + break + + try: + json_line = json.loads(line) + except json.decoder.JSONDecodeError: + if progress.disable: + stderr_logger.error(line.decode()) + else: + tqdm.write(line.decode()) + continue + + # logger.debug('stderr line decoded: %s', json_line['type'] if 'type' in json_line else None) + + t = '' + if 'time' in json_line and not progress.disable: + ts = datetime.fromtimestamp(json_line['time']).replace(tzinfo=tzlocal()) + t = f'{ts.isoformat(timespec="minutes")} ' + if json_line['type'] == 'archive_progress' and not progress.disable: + now = time.monotonic_ns() + if last_list_time is None or now - last_list_time >= 3e9: + last_list_time = now + if 'path' in json_line and json_line['path']: + progress.set_description(f'… {json_line["path"]}', refresh=False) + else: + progress.set_description(None, refresh=False) + elif last_list is not None: + progress.set_description(last_list, refresh=False) + nfiles=json_line["nfiles"] + if total_files is not None: + nfiles=f'{json_line["nfiles"]}/{total_files}' + progress.set_postfix(compressed=naturalsize(json_line['compressed_size'], binary=True), deduplicated=naturalsize(json_line['deduplicated_size'], binary=True), nfiles=nfiles, refresh=False) + progress.update(json_line["original_size"] - seen) + seen = json_line["original_size"] + elif json_line['type'] == 'archive_progress': + now = time.monotonic_ns() + if last_list_time is None or now - last_list_time >= 3e9: + last_list_time = now + if 'path' in json_line and json_line['path']: + stderr_logger.debug('… %s (%s)', json_line["path"], naturalsize(json_line["original_size"])) + else: + stderr_logger.debug('… (%s)', naturalsize(json_line["original_size"])) + elif json_line['type'] == 'file_status': + # tqdm.write(t + f'{json_line["status"]} {json_line["path"]}') + last_list = f'{json_line["status"]} {json_line["path"]}' + last_list_time = time.monotonic_ns() + progress.set_description(last_list, refresh=False) + if progress.disable: + stderr_logger.info(last_list) + elif (json_line['type'] == 'log_message' or json_line['type'] == 'progress_message' or json_line['type'] == 'progress_percent') and ('message' in json_line or 'msgid' in json_line): + if 'message' in json_line: + if progress.disable: + stderr_logger.info(t + json_line['message']) + else: + tqdm.write(t + json_line['message']) + elif 'msgid' in json_line: + if progress.disable: + stderr_logger.info(t + json_line['msgid']) + else: + tqdm.write(t + json_line['msgid']) + else: + if progress.disable: + stderr_logger.info(t + line.decode()) + else: + tqdm.write(t + line.decode()) + # logger.debug('handled stderr lines, %d leftover', len(stderr_linebuf)) + if event == select.POLLHUP: + poll.unregister(rfd) + pollc -= 1 + + if pollc > 0: + # logger.debug('polling %d fds...', pollc) + events = poll.poll() + # logger.debug('done polling') + + # logger.debug('borg create closed stdout/stderr') + if stdout_linebuf: + logger.error('unterminated line leftover in stdout: %s', stdout_linebuf) + if stderr_linebuf: + logger.error('unterminated line leftover in stdout: %s', stderr_linebuf) + progress.set_description(None) + ret = proc.wait() + # logger.debug('borg create terminated; ret=%d', ret) + if ret != 0: + dst = None + try: + dst = read_repo(dst_repo_path) + except (subprocess.CalledProcessError, json.decoder.JSONDecodeError) as err: + logger.error(err) + continue + else: + if any(map(lambda other: entry['name'] == other['name'], dst)): + logger.info('destination exists, terminating') + break + + logger.warn('destination does not exist, retrying') + continue + else: + # logger.debug('terminating') + break + mount_proc.terminate() + + with Manager() as manager: + tmpdir_q = manager.Queue(1) + + with closing(Process(target=do_copy, args=(tmpdir_q,), name='do_copy')) as p: + p.start() + + with TemporaryDirectory(prefix=f'borg-mount_{entry["name"]}_', dir=os.environ.get('RUNTIME_DIRECTORY')) as tmpdir: + tmpdir_q.put(tmpdir) + p.join() + return p.exitcode + +def sigterm(signum, frame): + raise SystemExit(128 + signum) + +def main(): + signal.signal(signal.SIGTERM, sigterm) + + global logger + logger = logging.getLogger(__name__) + console_handler = logging.StreamHandler() + console_handler.setFormatter( logging.Formatter('[%(levelname)s](%(name)s): %(message)s') ) + if sys.stderr.isatty(): + console_handler.setFormatter( logging.Formatter('%(asctime)s [%(levelname)s](%(name)s): %(message)s') ) + + burst_max = 1000 + burst = burst_max + last_use = None + inv_rate = 1e7 + def consume_filter(record): + nonlocal burst, burst_max, inv_rate, last_use + + delay = None + while True: + now = time.monotonic_ns() + burst = min(burst_max, burst + math.floor((now - last_use) / inv_rate)) if last_use else burst_max + last_use = now + + if burst > 0: + burst -= 1 + if delay: + delay = now - delay + + return True + + if delay is None: + delay = now + time.sleep(inv_rate / 1e9) + console_handler.addFilter(consume_filter) + + logging.getLogger().addHandler(console_handler) + + # log uncaught exceptions + def log_exceptions(type, value, tb): + global logger + + logger.error(value) + sys.__excepthook__(type, value, tb) # calls default excepthook + + sys.excepthook = log_exceptions + + parser = argparse.ArgumentParser(prog='copy') + parser.add_argument('--verbosity', dest='log_level', action='append', type=int) + parser.add_argument('--verbose', '-v', dest='log_level', action='append_const', const=1) + parser.add_argument('--quiet', '-q', dest='log_level', action='append_const', const=-1) + parser.add_argument('source', metavar='REPO_OR_ARCHIVE') + parser.add_argument('target', metavar='REPO_OR_ARCHIVE') + args = parser.parse_args() + + + LOG_LEVELS = [logging.DEBUG, logging.INFO, logging.WARNING, logging.ERROR, logging.CRITICAL] + DEFAULT_LOG_LEVEL = logging.ERROR + log_level = LOG_LEVELS.index(DEFAULT_LOG_LEVEL) + + for adjustment in args.log_level or (): + log_level = min(len(LOG_LEVELS) - 1, max(log_level - adjustment, 0)) + logger.setLevel(LOG_LEVELS[log_level]) + + + if "::" in args.source: + (src_repo_path, _, src_archive) = args.source.partition("::") + entry = None + for candidate_entry in read_repo(src_repo_path): + if entry['name'] != src_archive: + continue + entry = candidate_entry + break + + if entry is None: + logger.critical("Did not find archive ‘%s’", src_archive) + os.exit(1) + + copy_archive(src_repo_path, args.target, entry) + else: + for entry in ToSync(args.source, args.target): + copy_archive(args.source, args.target, entry) + +if __name__ == "__main__": + sys.exit(main()) diff --git a/modules/borgcopy/copy/setup.py b/modules/borgcopy/copy/setup.py new file mode 100644 index 00000000..f77d9560 --- /dev/null +++ b/modules/borgcopy/copy/setup.py @@ -0,0 +1,10 @@ +from setuptools import setup + +setup(name='copy_borg', + packages=['copy_borg'], + entry_points={ + 'console_scripts': [ + 'copy_borg=copy_borg.__main__:main', + ], + } +) diff --git a/modules/borgcopy/default.nix b/modules/borgcopy/default.nix new file mode 100644 index 00000000..eae07dc8 --- /dev/null +++ b/modules/borgcopy/default.nix @@ -0,0 +1,120 @@ +{ config, pkgs, lib, utils, flakeInputs, ... }: + +with lib; + +let + copyBorg = flakeInputs.mach-nix.lib.${config.nixpkgs.system}.buildPythonPackage rec { + pname = "copy-borg"; + src = ./copy; + version = "0.0.0"; + ignoreDataOutdated = true; + + requirements = '' + humanize + tqdm + python-dateutil + xdg + python-unshare + pyprctl + halo + ''; + postInstall = '' + wrapProgram $out/bin/copy_borg \ + --prefix PATH : ${makeBinPath (with pkgs; [util-linux borgbackup])}:${config.security.wrapperDir} + ''; + + providers.python-unshare = "nixpkgs"; + overridesPre = [ + (self: super: { python-unshare = super.python-unshare.overrideAttrs (oldAttrs: { name = "python-unshare-0.2.1"; version = "0.2.1"; }); }) + ]; + + # _.tomli.buildInputs.add = with pkgs."python3Packages"; [ flit-core ]; + }; + + copyService = name: opts: nameValuePair "copy-borg@${utils.escapeSystemdPath name}" { + serviceConfig = { + Type = "oneshot"; + ExecStart = "${copyBorg}/bin/copy_borg --verbosity ${toString opts.verbosity} ${utils.escapeSystemdExecArgs [opts.from opts.to]}"; + TimeoutStartSec = "8h"; + # User = "borg"; + # Group = "borg"; + # StateDirectory = "borg"; + RuntimeDirectory = "copy-borg"; + Environment = [ + "BORG_BASE_DIR=/var/lib/borg" + "BORG_CONFIG_DIR=/var/lib/borg/config" + "BORG_CACHE_DIR=/var/lib/borg/cache" + "BORG_SECURITY_DIR=/var/lib/borg/security" + "BORG_KEYS_DIR=/var/lib/borg/keys" + ] + ++ optional opts.unknownUnencryptedRepoAccessOk "BORG_UNKNOWN_UNENCRYPTED_REPO_ACCESS_IS_OK=yes" + ++ optional opts.hostnameIsUnique "BORG_HOSTNAME_IS_UNIQUE=yes" + ++ optional (!(isNull opts.sshConfig)) "BORG_RSH=\"${pkgs.openssh}/bin/ssh -F ${pkgs.writeText "config" opts.sshConfig}\"" + ++ optional (!(isNull opts.keyfile)) "BORG_KEY_FILE=${opts.keyfile}"; + + LogRateLimitIntervalSec = 0; + }; + }; + copyTimer = name: opts: nameValuePair "copy-borg@${utils.escapeSystemdPath name}" (recursiveUpdate { + wantedBy = [ "timers.target" ]; + + timerConfig = { + Unit = "copy-borg@${utils.escapeSystemdPath name}.service"; + }; + } opts.timerOptions); + + cfg = config.services.copyborg; +in { + options = { + services.copyborg = mkOption { + type = types.attrsOf (types.submodule { + options = { + from = mkOption { + type = types.str; + }; + to = mkOption { + type = types.str; + }; + + verbosity = mkOption { + type = types.int; + default = 3; + }; + + sshConfig = mkOption { + type = with types; nullOr str; + default = null; + }; + + keyfile = mkOption { + type = with types; nullOr str; + default = null; + }; + + unknownUnencryptedRepoAccessOk = mkOption { + type = types.bool; + default = false; + }; + hostnameIsUnique = mkOption { + type = types.bool; + default = true; + }; + + timerOptions = mkOption { + # type = types.submodule utils.systemdUtils.unitOptions.stage2TimerOptions; + type = types.attrs; + default = { + wantedBy = ["timers.target"]; + }; + }; + }; + }); + default = {}; + }; + }; + + config = { + systemd.services = mapAttrs' copyService cfg; + systemd.timers = mapAttrs' copyTimer cfg; + }; +} diff --git a/modules/borgsnap/default.nix b/modules/borgsnap/default.nix index f4c0eec4..0a674e64 100644 --- a/modules/borgsnap/default.nix +++ b/modules/borgsnap/default.nix @@ -74,6 +74,15 @@ in { type = with types; listOf str; default = []; }; + + unknownUnencryptedRepoAccessOk = mkOption { + type = types.bool; + default = false; + }; + hostnameIsUnique = mkOption { + type = types.bool; + default = true; + }; }; }; @@ -95,9 +104,10 @@ in { "BORG_CACHE_DIR=/var/lib/borg/cache" "BORG_SECURITY_DIR=/var/lib/borg/security" "BORG_KEYS_DIR=/var/lib/borg/keys" - "BORG_UNKNOWN_UNENCRYPTED_REPO_ACCESS_IS_OK=yes" - "BORG_HOSTNAME_IS_UNIQUE=yes" - ] ++ optional (!(isNull cfg.sshConfig)) "BORG_RSH=\"${pkgs.openssh}/bin/ssh -F ${pkgs.writeText "config" cfg.sshConfig}\"" + ] + ++ optional cfg.unknownUnencryptedRepoAccessOk "BORG_UNKNOWN_UNENCRYPTED_REPO_ACCESS_IS_OK=yes" + ++ optional cfg.hostnameIsUnique "BORG_HOSTNAME_IS_UNIQUE=yes" + ++ optional (!(isNull cfg.sshConfig)) "BORG_RSH=\"${pkgs.openssh}/bin/ssh -F ${pkgs.writeText "config" cfg.sshConfig}\"" ++ optional (!(isNull cfg.keyfile)) "BORG_KEY_FILE=${cfg.keyfile}"; RuntimeDirectory = "zfssnap-prune"; }; diff --git a/modules/zfssnap/zfssnap/zfssnap/__main__.py b/modules/zfssnap/zfssnap/zfssnap/__main__.py index 274317e2..2ff8b309 100644 --- a/modules/zfssnap/zfssnap/zfssnap/__main__.py +++ b/modules/zfssnap/zfssnap/zfssnap/__main__.py @@ -1,5 +1,3 @@ -#!@python@/bin/python - import csv import subprocess import io diff --git a/overlays/matrix-synapse/1.70.1/default.nix b/overlays/matrix-synapse/1.70.1/default.nix new file mode 100644 index 00000000..0c026914 --- /dev/null +++ b/overlays/matrix-synapse/1.70.1/default.nix @@ -0,0 +1,111 @@ +{ lib, stdenv, fetchFromGitHub, python3, openssl, rustPlatform +, enableSystemd ? stdenv.isLinux, nixosTests +, enableRedis ? true +, callPackage +}: + +let + plugins = python3.pkgs.callPackage ./plugins { }; + tools = callPackage ./tools { }; +in +with python3.pkgs; +buildPythonApplication rec { + pname = "matrix-synapse"; + version = "1.70.1"; + format = "pyproject"; + + src = fetchFromGitHub { + owner = "matrix-org"; + repo = "synapse"; + rev = "v${version}"; + hash = "sha256-/clEY3sabaDEOAAowQ896vYOvzf5Teevoa7ZkzWw+fY="; + }; + + cargoDeps = rustPlatform.fetchCargoTarball { + inherit src; + name = "${pname}-${version}"; + hash = "sha256-9wxWxrn+uPcz60710DROhDqNC6FvTtnqzWiWRk8kl6A="; + }; + + postPatch = '' + # Remove setuptools_rust from runtime dependencies + # https://github.com/matrix-org/synapse/blob/v1.69.0/pyproject.toml#L177-L185 + sed -i '/^setuptools_rust =/d' pyproject.toml + ''; + + nativeBuildInputs = [ + poetry-core + rustPlatform.cargoSetupHook + setuptools-rust + ] ++ (with rustPlatform.rust; [ + cargo + rustc + ]); + + buildInputs = [ openssl ]; + + propagatedBuildInputs = [ + authlib + bcrypt + bleach + canonicaljson + daemonize + frozendict + ijson + jinja2 + jsonschema + lxml + matrix-common + msgpack + netaddr + phonenumbers + pillow + prometheus-client + psutil + psycopg2 + pyasn1 + pydantic + pyjwt + pymacaroons + pynacl + pyopenssl + pysaml2 + pyyaml + requests + setuptools + signedjson + sortedcontainers + treq + twisted + typing-extensions + unpaddedbase64 + ] ++ lib.optional enableSystemd systemd + ++ lib.optionals enableRedis [ hiredis txredisapi ]; + + checkInputs = [ mock parameterized openssl ]; + + doCheck = !stdenv.isDarwin; + + checkPhase = '' + runHook preCheck + + # remove src module, so tests use the installed module instead + rm -rf ./synapse + + PYTHONPATH=".:$PYTHONPATH" ${python3.interpreter} -m twisted.trial -j $NIX_BUILD_CORES tests + + runHook postCheck + ''; + + passthru.tests = { inherit (nixosTests) matrix-synapse; }; + passthru.plugins = plugins; + passthru.tools = tools; + passthru.python = python3; + + meta = with lib; { + homepage = "https://matrix.org"; + description = "Matrix reference homeserver"; + license = licenses.asl20; + maintainers = teams.matrix.members; + }; +} diff --git a/overlays/matrix-synapse/1.70.1/plugins/default.nix b/overlays/matrix-synapse/1.70.1/plugins/default.nix new file mode 100644 index 00000000..e67d9075 --- /dev/null +++ b/overlays/matrix-synapse/1.70.1/plugins/default.nix @@ -0,0 +1,8 @@ +{ callPackage }: + +{ + matrix-synapse-ldap3 = callPackage ./ldap3.nix { }; + matrix-synapse-mjolnir-antispam = callPackage ./mjolnir-antispam.nix { }; + matrix-synapse-pam = callPackage ./pam.nix { }; + matrix-synapse-shared-secret-auth = callPackage ./shared-secret-auth.nix { }; +} diff --git a/overlays/matrix-synapse/1.70.1/plugins/ldap3.nix b/overlays/matrix-synapse/1.70.1/plugins/ldap3.nix new file mode 100644 index 00000000..394c0f5e --- /dev/null +++ b/overlays/matrix-synapse/1.70.1/plugins/ldap3.nix @@ -0,0 +1,17 @@ +{ isPy3k, buildPythonPackage, fetchPypi, service-identity, ldap3, twisted, ldaptor, mock }: + +buildPythonPackage rec { + pname = "matrix-synapse-ldap3"; + version = "0.1.5"; + + src = fetchPypi { + inherit pname version; + sha256 = "9fdf8df7c8ec756642aa0fea53b31c0b2f1924f70d7f049a2090b523125456fe"; + }; + + propagatedBuildInputs = [ service-identity ldap3 twisted ]; + + # ldaptor is not ready for py3 yet + doCheck = !isPy3k; + checkInputs = [ ldaptor mock ]; +} diff --git a/overlays/matrix-synapse/1.70.1/plugins/mjolnir-antispam.nix b/overlays/matrix-synapse/1.70.1/plugins/mjolnir-antispam.nix new file mode 100644 index 00000000..7372c2f7 --- /dev/null +++ b/overlays/matrix-synapse/1.70.1/plugins/mjolnir-antispam.nix @@ -0,0 +1,32 @@ +{ lib, buildPythonPackage, fetchFromGitHub, matrix-synapse }: + +buildPythonPackage rec { + pname = "matrix-synapse-mjolnir-antispam"; + version = "1.5.0"; + + src = fetchFromGitHub { + owner = "matrix-org"; + repo = "mjolnir"; + rev = "refs/tags/v${version}"; + sha256 = "sha256-YmP+r9W5e63Aw66lSQeTTbYwSF/vjPyHkoehJxtcRNw="; + }; + + sourceRoot = "./source/synapse_antispam"; + + propagatedBuildInputs = [ matrix-synapse ]; + + doCheck = false; # no tests + pythonImportsCheck = [ "mjolnir" ]; + + meta = with lib; { + description = "AntiSpam / Banlist plugin to be used with mjolnir"; + longDescription = '' + Primarily meant to block invites from undesired homeservers/users, + Mjolnir's Synapse module is a way to interpret ban lists and apply + them to your entire homeserver. + ''; + homepage = "https://github.com/matrix-org/mjolnir#synapse-module"; + license = licenses.asl20; + maintainers = with maintainers; [ jojosch ]; + }; +} diff --git a/overlays/matrix-synapse/1.70.1/plugins/pam.nix b/overlays/matrix-synapse/1.70.1/plugins/pam.nix new file mode 100644 index 00000000..a14fe6d6 --- /dev/null +++ b/overlays/matrix-synapse/1.70.1/plugins/pam.nix @@ -0,0 +1,15 @@ +{ buildPythonPackage, fetchFromGitHub, twisted, python-pam }: + +buildPythonPackage rec { + pname = "matrix-synapse-pam"; + version = "0.1.3"; + + src = fetchFromGitHub { + owner = "14mRh4X0r"; + repo = "matrix-synapse-pam"; + rev = "v${version}"; + sha256 = "0jgz49cwiyih5cg3hr4byva04zjnq8aj7rima9874la9fc5sd2wf"; + }; + + propagatedBuildInputs = [ twisted python-pam ]; +} diff --git a/overlays/matrix-synapse/1.70.1/plugins/shared-secret-auth.nix b/overlays/matrix-synapse/1.70.1/plugins/shared-secret-auth.nix new file mode 100644 index 00000000..a6e22db3 --- /dev/null +++ b/overlays/matrix-synapse/1.70.1/plugins/shared-secret-auth.nix @@ -0,0 +1,26 @@ +{ lib, buildPythonPackage, fetchFromGitHub, matrix-synapse, twisted }: + +buildPythonPackage rec { + pname = "matrix-synapse-shared-secret-auth"; + version = "2.0.2"; + + src = fetchFromGitHub { + owner = "devture"; + repo = "matrix-synapse-shared-secret-auth"; + rev = version; + sha256 = "sha256-qzXKwTEOMtdvsxoU3Xh3vQyhK+Q18LfkeSts7EyDIXE="; + }; + + doCheck = false; + pythonImportsCheck = [ "shared_secret_authenticator" ]; + + buildInputs = [ matrix-synapse ]; + propagatedBuildInputs = [ twisted ]; + + meta = with lib; { + description = "Shared Secret Authenticator password provider module for Matrix Synapse"; + homepage = "https://github.com/devture/matrix-synapse-shared-secret-auth"; + license = licenses.agpl3Plus; + maintainers = with maintainers; [ sumnerevans ]; + }; +} diff --git a/overlays/matrix-synapse/1.70.1/tools/default.nix b/overlays/matrix-synapse/1.70.1/tools/default.nix new file mode 100644 index 00000000..defc35bc --- /dev/null +++ b/overlays/matrix-synapse/1.70.1/tools/default.nix @@ -0,0 +1,6 @@ +{ callPackage }: +{ + rust-synapse-compress-state = callPackage ./rust-synapse-compress-state.nix { }; + + synadm = callPackage ./synadm.nix { }; +} diff --git a/overlays/matrix-synapse/1.70.1/tools/rust-synapse-compress-state.nix b/overlays/matrix-synapse/1.70.1/tools/rust-synapse-compress-state.nix new file mode 100644 index 00000000..fcf123d6 --- /dev/null +++ b/overlays/matrix-synapse/1.70.1/tools/rust-synapse-compress-state.nix @@ -0,0 +1,30 @@ +{ lib, rustPlatform, python3, fetchFromGitHub, pkg-config, openssl }: + +rustPlatform.buildRustPackage rec { + pname = "rust-synapse-compress-state"; + version = "0.1.3"; + + src = fetchFromGitHub { + owner = "matrix-org"; + repo = pname; + rev = "v${version}"; + sha256 = "sha256-SSfVtG8kwHarVbB1O7xC2SSbUpPGYMHTMyoxu8mpEk0="; + }; + + cargoSha256 = "sha256-PG+UeovhJMsIlm5dOYdtMxbUxZjwG3V59kAcB9aFP5c="; + + cargoBuildFlags = [ + "--all" + ]; + + nativeBuildInputs = [ python3 pkg-config ]; + + buildInputs = [ openssl ]; + + meta = with lib; { + description = "A tool to compress some state in a Synapse instance's database"; + homepage = "https://github.com/matrix-org/rust-synapse-compress-state"; + license = licenses.asl20; + maintainers = with maintainers; [ hexa maralorn ]; + }; +} diff --git a/overlays/matrix-synapse/1.70.1/tools/synadm.nix b/overlays/matrix-synapse/1.70.1/tools/synadm.nix new file mode 100644 index 00000000..5075e42e --- /dev/null +++ b/overlays/matrix-synapse/1.70.1/tools/synadm.nix @@ -0,0 +1,47 @@ +{ lib +, python3Packages +}: + +with python3Packages; buildPythonApplication rec { + pname = "synadm"; + version = "0.36"; + format = "setuptools"; + + src = fetchPypi { + inherit pname version; + sha256 = "sha256-OMXUbfAC927qJw0B5sq1lGJQRkFAUdohIOkCYUbZumI="; + }; + + postPatch = '' + substituteInPlace setup.py \ + --replace "Click>=7.0,<8.0" "Click" + ''; + + propagatedBuildInputs = [ + click + click-option-group + dnspython + tabulate + pyyaml + requests + ]; + + checkPhase = '' + runHook preCheck + export HOME=$TMPDIR + $out/bin/synadm -h > /dev/null + runHook postCheck + ''; + + meta = with lib; { + description = "Command line admin tool for Synapse"; + longDescription = '' + A CLI tool to help admins of Matrix Synapse homeservers + conveniently issue commands available via its admin API's + (matrix-org/synapse@master/docs/admin_api) + ''; + homepage = "https://github.com/JOJ0/synadm"; + license = licenses.gpl3Plus; + maintainers = with maintainers; [ hexa ]; + }; +} diff --git a/overlays/matrix-synapse/default.nix b/overlays/matrix-synapse/default.nix new file mode 100644 index 00000000..9db73e35 --- /dev/null +++ b/overlays/matrix-synapse/default.nix @@ -0,0 +1,3 @@ +{ final, prev, ... }: { + matrix-synapse = final.callPackage ./1.70.1/default.nix {}; +} diff --git a/shell.nix b/shell.nix index 6ada761e..14125d02 100644 --- a/shell.nix +++ b/shell.nix @@ -1,8 +1,29 @@ -{ pkgs ? import {}, deploy-rs, nvfetcher }: +{ system, self, deploy-rs, nvfetcher, mach-nix, leapseconds, ... }: let - tai64dec = pkgs.writeShellScriptBin "tai64dec" '' - echo $((16#$(${pkgs.daemontools}/bin/tai64n <<<"" | ${pkgs.coreutils}/bin/tail -c +2 | ${pkgs.coreutils}/bin/head -c 16))) - ''; + pkgs = self.legacyPackages.${system}; + + ca = mach-nix.lib.${system}.buildPythonPackage { + pname = "ca"; + src = ./tools/ca; + version = "0.0.0"; + ignoreDataOutdated = true; + + requirements = '' + cryptography >=38.0.0 + fqdn + atomicwrites + leapseconddata + xkcdpass + ''; + + _.cryptography.buildInputs = with pkgs; [ openssl ]; + + postInstall = '' + wrapProgram $out/bin/ca \ + --set-default LEAPSECONDS_FILE ${leapseconds} \ + --prefix PATH : ${pkgs.lib.makeBinPath (with pkgs; [sops])} + ''; + }; in pkgs.mkShell { name = "nixos"; nativeBuildInputs = with pkgs; [ @@ -10,10 +31,10 @@ in pkgs.mkShell { wireguard-tools gup nftables - deploy-rs - tai64dec + deploy-rs.packages.${system}.deploy-rs knot-dns yq - nvfetcher + nvfetcher.defaultPackage.${system} + ca ]; } diff --git a/tools/ca/ca/__main__.py b/tools/ca/ca/__main__.py new file mode 100644 index 00000000..e3e4bbe6 --- /dev/null +++ b/tools/ca/ca/__main__.py @@ -0,0 +1,568 @@ +import sys, os + +import logging +import argparse + +from inspect import signature + +from enum import Enum, auto +from contextlib import contextmanager + +from cryptography import __version__ as cryptography_version +from cryptography.hazmat.backends import openssl +from cryptography import x509 +from cryptography.x509.oid import NameOID, ExtendedKeyUsageOID, ExtensionOID +from cryptography.hazmat.primitives import serialization, hashes +from cryptography.hazmat.primitives.serialization import PrivateFormat, pkcs12 +from cryptography.hazmat.primitives.asymmetric.ed25519 import Ed25519PrivateKey +from cryptography.hazmat.primitives.asymmetric.ed448 import Ed448PrivateKey +from cryptography.hazmat.primitives.asymmetric.rsa import RSAPrivateKey +from cryptography.hazmat.primitives.asymmetric import rsa +from pathlib import Path +from atomicwrites import atomic_write +from fqdn import FQDN +from datetime import datetime, timedelta, timezone +from math import ceil, ldexp +import re +from getpass import getpass +from itertools import count +from tempfile import TemporaryFile +import subprocess +import json +from leapseconddata import LeapSecondData + + +class KeyType(Enum): + ED448 = 'ed448' + ED25519 = 'ed25519' + RSA4096 = 'rsa4096' + RSA2048 = 'rsa2048' + + def generate(self): + match self: + case KeyType.ED448: + return Ed448PrivateKey.generate() + case KeyType.ED25519: + return Ed25519PrivateKey.generate() + case KeyType.RSA4096: + return rsa.generate_private_key( + public_exponent = 65537, + key_size = 4096, + ) + case KeyType.RSA2048: + return rsa.generate_private_key( + public_exponent = 65537, + key_size = 2048, + ) + + def aligned(self, key): + match self: + case KeyType.ED448: + return isinstance(key, Ed448PrivateKey) + case KeyType.ED25519: + return isinstance(key, Ed25519PrivateKey) + case KeyType.RSA4096: + return isinstance(key, RSAPrivateKey) and key.key_size == 4096 + case KeyType.RSA2048: + return isinstance(key, RSAPrivateKey) and key.key_size == 2048 + + def __str__(self): + return self.value + + @classmethod + def from_string(cls, s): + try: + return cls(s) + except KeyError: + raise ValueError() + +class ValidFQDN(FQDN): + def __init__(self, *args, **kwds): + super().__init__(*args, **kwds) + + if not self.is_valid: + raise ValueError(f'‘{self}’ is not valid') + +def duration(inp_str): + delta = timedelta() + + item_re = re.compile(r'\W*(?P\d+)\W*(?P(?i:d|h|m(?!s)|s|ms|µs))') + + match = item_re.match(inp_str) + while match: + val = int(match.group('value')) + unit = match.group('unit').lower() + + if unit == 'd': + delta += timedelta(days=val) + elif unit == 'h': + delta += timedelta(hours=val) + elif unit == 'm': + delta += timedelta(minutes=val) + elif unit == 's': + delta += timedelta(seconds=val) + elif unit == 'ms': + delta += timedelta(milliseconds=val) + elif unit == 'µs' or unit == 'us': + delta += timedelta(microseconds=val) + else: + raise ValueError(f'Unknown time unit ‘{unit:s}’') + + inp_str = inp_str[match.end():] + match = item_re.match(inp_str) + else: + if re.match('\w', inp_str): + raise ValueError(f'Parsing of duration resulted in leftovers: ‘{inp_str:s}’') + + return delta + +@contextmanager +def umask(desired_umask): + """ A little helper to safely set and restore umask(2). """ + try: + prev_umask = os.umask(0) + os.umask(prev_umask | desired_umask) + yield + finally: + os.umask(prev_umask) + +class BooleanAction(argparse.Action): + def __init__(self, option_strings, dest, nargs=None, **kwargs): + super(BooleanAction, self).__init__(option_strings, dest, nargs=0, **kwargs) + + def __call__(self, parser, namespace, values, option_string=None): + setattr(namespace, self.dest, False if option_string.startswith('--no') else True) + + +def load_key(keyfile, prompt='CA private key password: '): + key = None + with open(keyfile, 'rb') as f: + is_sops = False + try: + sops_json = json.load(f) + is_sops = 'sops' in sops_json + except json.JSONDecodeError: + pass + + f.seek(0) + + if not is_sops: + try: + key = serialization.load_pem_private_key(f.read(), password=None) + except TypeError: + pw = getpass(prompt=prompt) + key = serialization.load_pem_private_key(f.read(), password=bytes(pw, sys.stdin.encoding)) + else: + cmd = ['sops', '-d', f'/dev/fd/{f.fileno()}'] + with subprocess.Popen(cmd, stdout=subprocess.PIPE, pass_fds=(f.fileno(),)) as proc: + key = serialization.load_pem_private_key(proc.stdout.read(), password=None) + ret = proc.wait() + if ret != 0: + raise subprocess.CalledProcessErrror(ret, cmd) + + return key + +def mv_bak(path): + global logger + + bak_path = path.parent / f'{path.name}.bak' + for n in count(2): + if not bak_path.exists(): + break + bak_path = path.parent / f'{path.name}.bak{n}' + + logger.warn('Renaming ‘%s’ to ‘%s’...', path, bak_path) + path.rename(bak_path) + +def tai64nint(dt): + global leapsecond_data + + have_data = False + try: + have_data = bool(leapsecond_data) + except NameError: + pass + + if not have_data: + leapsecond_data = LeapSecondData.from_file(Path(os.getenv('LEAPSECONDS_FILE'))) + + tai_dt = leapsecond_data.to_tai(dt) + seconds = int(tai_dt.timestamp()) + nanoseconds = int((tai_dt.timestamp() - seconds) / 1e-9) + seconds += int(ldexp(1, 62)) + return seconds << 32 | nanoseconds + +def write_genkey(key_type, sops, keyfile): + if keyfile.exists(): + raise ValueError(f'Keyfile exists: {keyfile}') + + key = None + + def genkey(fh): + nonlocal key, key_type + + logger.debug('Generating new privkey...') + key = key_type.generate() + priv_bytes = key.private_bytes(encoding=serialization.Encoding.PEM, format=serialization.PrivateFormat.PKCS8, encryption_algorithm=serialization.NoEncryption()) + fh.write(priv_bytes) + + if not sops: + with umask(0o0177), atomic_write(keyfile, overwrite=False, mode='wb') as fh: + logger.info('Writing new privkey to ‘%s’...', keyfile) + genkey(fh) + logger.debug('Adjusting permissions for ‘%s’...', keyfile) + os.chmod(keyfile, 0o0400) + else: + with TemporaryFile(mode='wb') as tf: + genkey(tf) + tf.seek(0) + + with umask(0o0177), atomic_write(keyfile, overwrite=False, mode='wb') as fh: + logger.info('Encrypting new privkey to ‘%s’...', keyfile) + subprocess.run(['sops', '-e', f'/dev/fd/{tf.fileno()}'], stdout=fh, pass_fds=(tf.fileno(),), check=True) + logger.debug('Adjusting permissions for ‘%s’...', keyfile) + os.chmod(keyfile, 0o0400) + + return key + +def initca(ca_cert, ca_key, key_type, subject, clock_skew, validity, sops): + global logger + + key = None + try: + key = load_key(ca_key) + logger.info('Successfully loaded privkey from ‘%s’', ca_key) + + if not key_type.aligned(key): + logger.warn('Private key ‘%s’ does not align with requested type %s', ca_key, key_type) + + try: + mv_bak(ca_key) + except FileNotFoundError: + pass + try: + mv_bak(ca_cert) + except FileNotFoundError: + pass + + raise FileNotFoundError(f'Key does not align with requested type: {ca_key}') + except FileNotFoundError: + key = write_genkey(key_type, sops, ca_key) + + cert = None + try: + with open(ca_cert, 'rb') as fh: + cert = x509.load_pem_x509_certificate(fh.read()) + logger.info('Successfully loaded certificate from ‘%s’', ca_cert) + except FileNotFoundError: + logger.debug('Generating new certificate...') + + now = datetime.utcnow() + name = x509.Name([ + x509.NameAttribute(NameOID.COMMON_NAME, subject.relative) + ]) + + cert = x509.CertificateBuilder().subject_name( + name + ).public_key( + key.public_key() + ).serial_number( + x509.random_serial_number() + ).not_valid_before( + now - clock_skew + ).not_valid_after( + now + validity + ).issuer_name( + name + ).add_extension( + x509.AuthorityKeyIdentifier.from_issuer_public_key(key.public_key()), + False + ).add_extension( + x509.SubjectKeyIdentifier.from_public_key(key.public_key()), + False + ).add_extension( + x509.KeyUsage(digital_signature=True, content_commitment=False, key_encipherment=False, data_encipherment=False, key_agreement=False, key_cert_sign=True, crl_sign=True, encipher_only=False, decipher_only=False), + True + ).add_extension( + x509.BasicConstraints(ca=True, path_length=None), + True + ).sign(key, None if isinstance(key, Ed25519PrivateKey) or isinstance(key, Ed448PrivateKey) else hashes.SHA512()) + + with umask(0o0133), atomic_write(ca_cert, overwrite=False, mode='wb') as cf: + logger.info('Writing new certificate to ‘%s’...', ca_cert) + cf.write(cert.public_bytes(serialization.Encoding.PEM)) + logger.debug('Adjusting permissions for ‘%s’...', ca_cert) + os.chmod(ca_cert, 0o0444) + +def signcsr(ca_cert, ca_key, clock_skew, validity, subject, alternative_name, ignore_alternative_names, csr, output): + csr_bytes = None + try: + csr_bytes = csr.read() + except AttributeError: + csr_bytes = csr + + csr = x509.load_pem_x509_csr(csr_bytes) + if not subject: + common_name_attrs = csr.subject.get_attributes_for_oid(NameOID.COMMON_NAME) + if len(common_name_attrs) != 1: + raise InvalidParamsError('Invalid name structure in CSR') + subject = common_name_attrs[0].value.lower() + logger.warn('Using subject common name from csr: %s', subject) + name = x509.Name([ + x509.NameAttribute(NameOID.COMMON_NAME, subject) + ]) + + if not ignore_alternative_names: + ext = csr.extensions.get_extension_for_oid(ExtensionOID.SUBJECT_ALTERNATIVE_NAME) + csr_alt_names = ext.value.get_values_for_type(x509.DNSName) + logger.warn('Using alternative names from csr: %s', csr_alt_names) + alternative_name = list(set(alternative_name) | set(csr_alt_names)) + + ca_key = load_key(ca_key) + with open(ca_cert, 'rb') as fh: + ca_cert = x509.load_pem_x509_certificate(fh.read()) + + now = datetime.now(tz=timezone.utc) + cert = x509.CertificateBuilder().subject_name( + name + ).public_key( + csr.public_key() + ).serial_number( + (tai64nint(now) << 24) | (x509.random_serial_number() & int(ldexp(1, 24) - 1)) + ).not_valid_before( + now - clock_skew + ).not_valid_after( + now + validity + ).issuer_name( + ca_cert.subject + ).add_extension( + x509.AuthorityKeyIdentifier.from_issuer_public_key(ca_cert.public_key()), + False + ).add_extension( + x509.SubjectKeyIdentifier.from_public_key(csr.public_key()), + False + ).add_extension( + x509.KeyUsage(digital_signature=True, content_commitment=True, key_encipherment=True, data_encipherment=False, key_agreement=False, key_cert_sign=False, crl_sign=False, encipher_only=False, decipher_only=False), + True + ).add_extension( + x509.BasicConstraints(ca=False, path_length=None), + True + ).add_extension( + x509.ExtendedKeyUsage([ExtendedKeyUsageOID.CLIENT_AUTH]), + False + ) + + if alternative_name: + cert = cert.add_extension( + x509.SubjectAlternativeName( + list(map(x509.DNSName, alternative_name)) + ), + False + ) + + cert = cert.sign(ca_key, None if isinstance(ca_key, Ed25519PrivateKey) or isinstance(ca_key, Ed448PrivateKey) else hashes.SHA256()) + + output = output.with_suffix('.crt') + + try: + mv_bak(output) + except FileNotFoundError: + pass + with umask(0o0133), atomic_write(output, overwrite=False, mode='wb') as cf: + logger.info('Writing new certificate to ‘%s’...', output) + cf.write(cert.public_bytes(serialization.Encoding.PEM)) + logger.debug('Adjusting permissions for ‘%s’...', output) + os.chmod(output, 0o0444) + +def new_client(ca_cert, ca_key, key_type, clock_skew, validity, subject, alternative_name, sops, output): + key_file = output.with_suffix('.key') + cert_file = output.with_suffix('.crt') + + key = None + try: + key = load_key(key_file) + logger.info('Successfully loaded privkey from ‘%s’', key_file) + + if not key_type.aligned(key): + logger.warn('Private key ‘%s’ does not align with requested type %s', key_file, key_type) + + try: + mv_bak(key_file) + except FileNotFoundError: + pass + try: + mv_bak(cert_file) + except FileNotFoundError: + pass + + raise FileNotFoundError(f'Key does not align with requested type: {key_file}') + except FileNotFoundError: + key = write_genkey(key_type, sops, key_file) + + csr = x509.CertificateSigningRequestBuilder().subject_name(x509.Name([ + x509.NameAttribute(NameOID.COMMON_NAME, subject) + ])) + if alternative_name: + csr = csr.add_extension( + x509.SubjectAlternativeName( + list(map(x509.DNSName, alternative_name)) + ), + False + ) + + return signcsr( + ca_cert=ca_cert, + ca_key=ca_key, + clock_skew=clock_skew, + validity=validity, + subject=None, + alternative_name=[], + ignore_alternative_names=False, + output=cert_file, + csr=csr.sign( + key, + None if isinstance(key, Ed25519PrivateKey) or isinstance(key, Ed448PrivateKey) else hashes.SHA256(), + ).public_bytes(serialization.Encoding.PEM) + ) + +def to_pkcs12(random_password, filename, output): + key_file = filename.with_suffix('.key') + cert_file = filename.with_suffix('.crt') + + if not output: + output = filename.with_suffix('.p12') + + key = load_key(key_file) + logger.info('Successfully loaded privkey from ‘%s’', key_file) + cert = None + with open(cert_file, mode='rb') as fh: + cert = x509.load_pem_x509_certificate(fh.read()) + logger.info('Successfully loaded certificate from ‘%s’', cert_file) + + with umask(0o0177), atomic_write(output, overwrite=False, mode='wb') as fh: + logger.info('Writing to ‘%s’...', output) + common_name_attrs = cert.subject.get_attributes_for_oid(NameOID.COMMON_NAME) + if len(common_name_attrs) != 1: + raise InvalidParamsError('Invalid name structure in cert') + subject = common_name_attrs[0].value.lower() + + pw = None + if not random_password: + pw2 = None + while not pw2 or pw2 != pw: + pw = getpass(prompt='Password: ') + if not pw: + pw = None + break + else: + pw2 = getpass(prompt='Repeat password: ') + else: + from xkcdpass import xkcd_password as xp + ws = xp.generate_wordlist(wordfile=xp.locate_wordfile()) + pw = xp.generate_xkcdpassword(ws, numwords=12) + print(f'Password: {pw}', file=sys.stderr) + + encryption = None + if pw: + encryption = PrivateFormat.PKCS12.encryption_builder().kdf_rounds( + 500000 + ).key_cert_algorithm( + pkcs12.PBES.PBESv2SHA256AndAES256CBC + ).hmac_hash( + hashes.SHA256() + ).build(bytes(pw, 'utf-8')) + fh.write(pkcs12.serialize_key_and_certificates( + bytes(subject, 'utf-8'), + key, + cert, + None, + encryption, + )) + logger.debug('Adjusting permissions for ‘%s’...', output) + os.chmod(output, 0o0400) + + +def main(): + global logger + logger = logging.getLogger(__name__) + console_handler = logging.StreamHandler() + console_handler.setFormatter( logging.Formatter('[%(levelname)s](%(name)s): %(message)s') ) + if sys.stderr.isatty(): + console_handler.setFormatter( logging.Formatter('%(asctime)s [%(levelname)s](%(name)s): %(message)s') ) + logger.addHandler(console_handler) + + # log uncaught exceptions + def log_exceptions(type, value, tb): + global logger + + logger.error(value) + sys.__excepthook__(type, value, tb) # calls default excepthook + + sys.excepthook = log_exceptions + + + parser = argparse.ArgumentParser(prog='ca', formatter_class=argparse.ArgumentDefaultsHelpFormatter) + parser.add_argument('--verbosity', dest='log_level', action='append', type=int) + parser.add_argument('--verbose', '-v', dest='log_level', action='append_const', const=1) + parser.add_argument('--quiet', '-q', dest='log_level', action='append_const', const=-1) + subparsers = parser.add_subparsers(help='Subcommands', required=True) + + subparser = subparsers.add_parser('init', aliases=['initca', 'init-ca', 'ca'], formatter_class=argparse.ArgumentDefaultsHelpFormatter) + subparser.add_argument('--ca-cert', type=Path, default=Path('ca.crt')) + subparser.add_argument('--ca-key', type=Path, default=Path('ca.key')) + subparser.add_argument('--key-type', type=KeyType.from_string, choices=list(KeyType), default=KeyType.ED448.value) + subparser.add_argument('--clock-skew', metavar='DURATION', type=duration, default=timedelta(minutes=5)) + subparser.add_argument('--validity', metavar='DURATION', type=duration, default=timedelta(days=ceil(365.2425*10))) + subparser.add_argument('--sops', '--no-sops', action=BooleanAction, default=True) + subparser.add_argument('--subject', metavar='FQDN', type=ValidFQDN, required=True) + subparser.set_defaults(cmd=initca) + + subparser = subparsers.add_parser('sign', aliases=['signcsr', 'sign-csr'], formatter_class=argparse.ArgumentDefaultsHelpFormatter) + subparser.add_argument('--ca-cert', type=Path, default=Path('ca.crt')) + subparser.add_argument('--ca-key', type=Path, default=Path('ca.key')) + subparser.add_argument('--clock-skew', metavar='DURATION', type=duration, default=timedelta(minutes=5)) + subparser.add_argument('--validity', metavar='DURATION', type=duration, default=timedelta(days=ceil(365.2425*10))) + subparser.add_argument('--subject', metavar='CN', type=str, required=False) + subparser.add_argument('--ignore-alternative-names', '--no-ignore-alternative-names', action=BooleanAction, default=True) + subparser.add_argument('--alternative-name', metavar='CN', type=str, action='append') + subparser.add_argument('--output', type=Path, required=True) + subparser.add_argument('csr', metavar='FILE', type=argparse.FileType(mode='rb')) + subparser.set_defaults(cmd=signcsr) + + subparser = subparsers.add_parser('new-client', aliases=['new', 'new-client', 'client'], formatter_class=argparse.ArgumentDefaultsHelpFormatter) + subparser.add_argument('--ca-cert', type=Path, default=Path('ca.crt')) + subparser.add_argument('--ca-key', type=Path, default=Path('ca.key')) + subparser.add_argument('--key-type', type=KeyType.from_string, choices=list(KeyType), default=KeyType.ED25519.value) + subparser.add_argument('--clock-skew', metavar='DURATION', type=duration, default=timedelta(minutes=5)) + subparser.add_argument('--validity', metavar='DURATION', type=duration, default=timedelta(days=ceil(365.2425*10))) + subparser.add_argument('--sops', '--no-sops', action=BooleanAction, default=True) + subparser.add_argument('--subject', metavar='CN', type=str, required=True) + subparser.add_argument('--alternative-name', metavar='CN', type=str, action='append') + subparser.add_argument('--output', type=Path, required=True) + subparser.set_defaults(cmd=new_client) + + subparser = subparsers.add_parser('pkcs12', aliases=['p12', 'pfx'], formatter_class=argparse.ArgumentDefaultsHelpFormatter) + subparser.add_argument('--random-password', '--no-random-password', action=BooleanAction, default=True) + subparser.add_argument('--output', type=Path) + subparser.add_argument('filename', metavar='BASENAME', type=Path) + subparser.set_defaults(cmd=to_pkcs12) + + args = parser.parse_args() + + + LOG_LEVELS = [logging.DEBUG, logging.INFO, logging.WARNING, logging.ERROR, logging.CRITICAL] + DEFAULT_LOG_LEVEL = logging.INFO + log_level = LOG_LEVELS.index(DEFAULT_LOG_LEVEL) + + for adjustment in args.log_level or (): + log_level = min(len(LOG_LEVELS) - 1, max(log_level - adjustment, 0)) + logger.setLevel(LOG_LEVELS[log_level]) + + + logger.debug('Using cryptography %s (%s)', cryptography_version, openssl.backend.openssl_version_text()) + + + args.cmd(**{ k: v for k, v in vars(args).items() if k in signature(args.cmd).parameters.keys() }) + +if __name__ == '__main__': + sys.exit(main()) diff --git a/tools/ca/setup.py b/tools/ca/setup.py new file mode 100644 index 00000000..3342a7a6 --- /dev/null +++ b/tools/ca/setup.py @@ -0,0 +1,10 @@ +from setuptools import setup + +setup(name='ca', + packages=['ca'], + entry_points={ + 'console_scripts': [ + 'ca=ca.__main__:main' + ], + }, +) -- cgit v1.2.3