#!@python@/bin/python import csv import subprocess import io from distutils.util import strtobool from datetime import datetime, timezone, timedelta from dateutil.tz import gettz, tzlocal import pytimeparse import argparse import re import sys import logging import shlex from collections import defaultdict, OrderedDict import configparser from xdg import BaseDirectory from functools import cache from math import floor @cache def _now(): return datetime.now(timezone.utc) def _snap_name(item, time=_now()): suffix = re.sub(r'\+00:00$', r'Z', time.isoformat()) return f'{item}@auto_{suffix}' def _log_cmd(*args): fmt_args = ' '.join(map(shlex.quote, args)) logger.debug(f'Running command: {fmt_args}') def _get_items(): items = {} args = ['zfs', 'get', '-H', '-p', '-o', 'name,value', '-t', 'filesystem,volume', '-s', 'local,default,inherited,temporary,received', 'li.yggdrasil:auto-snapshot'] _log_cmd(*args) with subprocess.Popen(args, stdout=subprocess.PIPE) as proc: text_stdout = io.TextIOWrapper(proc.stdout) reader = csv.reader(text_stdout, delimiter='\t', quoting=csv.QUOTE_NONE) for row in reader: name = row[0] setting = bool(strtobool(row[1])) items[name] = setting return items def prune(config, dry_run): items = defaultdict(list) args = ['zfs', 'get', '-H', '-p', '-o', 'name,value', '-t', 'snapshot', 'creation'] _log_cmd(*args) with subprocess.Popen(args, stdout=subprocess.PIPE) as proc: text_stdout = io.TextIOWrapper(proc.stdout) reader = csv.reader(text_stdout, delimiter='\t', quoting=csv.QUOTE_NONE) for row in reader: name = row[0] timestamp = int(row[1]) creation = datetime.fromtimestamp(timestamp, timezone.utc) base_name, _, _ = name.rpartition('@') expected_name = _snap_name(base_name, time=creation) if expected_name != name: # logger.debug(f'Skipping ‘{name}’ since it does not conform to naming scheme') continue items[base_name].append({'name': name, 'creation': creation}) keep = set() kept_count = defaultdict(lambda: defaultdict(lambda: 0)) def keep_because(base, snap, rule, period=None): nonlocal kept_count if snap not in keep: kept_count[rule][base] += 1 logger.info(f'Keeping ‘{snap}’ because of rule ‘{rule}’ (#{kept_count[rule][base]} for ‘{base}’, period={period})') keep.add(snap) within = config.gettimedelta('KEEP', 'within') within_cutoff = _now() - within for base, snap in [(base, snap) for base, snaps in items.items() for snap in snaps]: if snap['creation'] >= within_cutoff: keep_because(base, snap['name'], 'within') prune_timezone = config.gettimezone('KEEP', 'timezone', fallback=tzlocal) PRUNING_PATTERNS = OrderedDict([ ("secondly", lambda t: t.strftime('%Y-%m-%d %H:%M:%S')), ("minutely", lambda t: t.strftime('%Y-%m-%d %H:%M')), ("5m", lambda t: (t.strftime('%Y-%m-%d %H'), floor(t.minute / 5) * 5)), ("hourly", lambda t: t.strftime('%Y-%m-%d %H')), ("daily", lambda t: t.strftime('%Y-%m-%d')), ("weekly", lambda t: t.strftime('%G-%V')), ("monthly", lambda t: t.strftime('%Y-%m')), ("yearly", lambda t: t.strftime('%Y')), ]) for rule, pattern in PRUNING_PATTERNS.items(): desired_count = config.getint('KEEP', rule, fallback=0) for base, snaps in items.items(): last_period = None to_keep = desired_count if to_keep == 0: continue for snap in sorted(snaps, key=lambda snap: snap['creation'], reverse=True): if to_keep == 0: break period = pattern(snap['creation']) if period != last_period: last_period = period keep_because(base, snap['name'], rule, period=period) to_keep -= 1 if to_keep > 0: logger.debug(f'Missing {to_keep} to fulfill {rule}={desired_count} for ‘{base}’') all_snaps = {snap['name'] for _, snaps in items.items() for snap in snaps} to_delete = all_snaps - keep if to_delete: logger.info(f'Will prune: %s', ', '.join(map(lambda snap: f'‘{snap}’', to_delete))) else: logger.info('Nothing to prune') for snap in to_delete: args = ['zfs', 'destroy'] if dry_run: args += ['-n'] args += [snap] _log_cmd(*args) subprocess.run(args, check=True) def rename(snapshots, check=False): args = ['zfs', 'get', '-H', '-p', '-o', 'name,value', 'creation', *snapshots] _log_cmd(*args) with subprocess.Popen(args, stdout=subprocess.PIPE) as proc: text_stdout = io.TextIOWrapper(proc.stdout) reader = csv.reader(text_stdout, delimiter='\t', quoting=csv.QUOTE_NONE) for row in reader: name = row[0] timestamp = int(row[1]) creation = datetime.fromtimestamp(timestamp, timezone.utc) base_name, _, _ = name.rpartition('@') new_name = _snap_name(base_name, time=creation) if new_name == name: logger.debug(f'Not renaming ‘{name}’ since name is already correct') continue logger.info(f'Renaming ‘{name}’ to ‘{new_name}’') args = ['zfs', 'rename', name, new_name] _log_cmd(*args) subprocess.run(args, check=check) def autosnap(): items = _get_items() recursive, single = set(), set() for item_name, is_included in items.items(): if not is_included: continue children = {sub_name for sub_name in items if sub_name.startswith(f'{item_name}/')} is_recursive = all([items[sub_name] for sub_name in children]) if is_recursive and children: recursive.add(item_name) else: single.add(item_name) for item_name in recursive | single: is_covered = any([item_name.startswith(f'{super_name}/') for super_name in recursive]) if is_covered: try: recursive.remove(item_name) except KeyError: pass try: single.remove(item_name) except KeyError: pass def do_snapshot(*snap_items, recursive=False): nonlocal items snap_names = {_snap_name(item) for item in snap_items} all_snap_names = None if recursive: all_snap_names = set() for snap_item in snap_items: all_snap_names |= {_snap_name(item) for item in items if item.startswith(snap_item)} else: all_snap_names = snap_names args = ['zfs', 'snapshot'] if recursive: args += ['-r'] args += snap_names _log_cmd(*args) subprocess.run(args, check=True) rename(snapshots=all_snap_names, check=True) do_snapshot(*single) do_snapshot(*recursive, recursive=True) def main(): global logger logger = logging.getLogger(__name__) console_handler = logging.StreamHandler() console_handler.setFormatter( logging.Formatter('[%(levelname)s](%(name)s): %(message)s') ) if sys.stderr.isatty(): console_handler.setFormatter( logging.Formatter('%(asctime)s [%(levelname)s](%(name)s): %(message)s') ) logger.addHandler(console_handler) # log uncaught exceptions def log_exceptions(type, value, tb): global logger logger.error(value) sys.__excepthook__(type, value, tb) # calls default excepthook sys.excepthook = log_exceptions parser = argparse.ArgumentParser(prog='zfssnap') parser.add_argument('--verbose', '-v', action='count', default=0) subparsers = parser.add_subparsers() parser.set_defaults(cmd=autosnap) rename_parser = subparsers.add_parser('rename') rename_parser.add_argument('snapshots', nargs='+') rename_parser.set_defaults(cmd=rename) prune_parser = subparsers.add_parser('prune') prune_parser.add_argument('--config', '-c', dest='config_files', nargs='*', default=list()) prune_parser.add_argument('--dry-run', '-n', action='store_true', default=False) prune_parser.set_defaults(cmd=prune) args = parser.parse_args() if args.verbose <= 0: logger.setLevel(logging.WARNING) elif args.verbose <= 1: logger.setLevel(logging.INFO) else: logger.setLevel(logging.DEBUG) cmdArgs = {} for copy in {'snapshots', 'dry_run'}: if copy in vars(args): cmdArgs[copy] = vars(args)[copy] if 'config_files' in vars(args): def convert_timedelta(secs_str): secs=pytimeparse.parse(secs_str) if secs is None: raise ValueError(f'Could not parse timedelta expression ‘{secs_str}’') return timedelta(seconds=secs) config = configparser.ConfigParser(converters={ 'timedelta': convert_timedelta, 'timezone': gettz }) search_files = args.config_files if args.config_files else [*BaseDirectory.load_config_paths('zfssnap.ini')] read_files = config.read(search_files) def format_config_files(files): if not files: return 'no files' return ', '.join(map(lambda file: f'‘{file}’', files)) if not read_files: raise Exception(f'Found no config files. Tried: {format_config_files(search_files)}') logger.debug(f'Read following config files: {format_config_files(read_files)}') cmdArgs['config'] = config args.cmd(**cmdArgs) sys.exit(main())