diff options
author | Gregor Kleen <gkleen@yggdrasil.li> | 2022-03-13 20:11:05 +0100 |
---|---|---|
committer | Gregor Kleen <gkleen@yggdrasil.li> | 2022-03-13 20:11:05 +0100 |
commit | d4240be827e9527f6bb5a950b2563f495ae3849a (patch) | |
tree | a543a040bad5d560f9e45ccb238d98e0fb9b245f /hosts/vidhar | |
parent | c59727e3eb712b74fa99fd026a7941526a18f778 (diff) | |
download | nixos-d4240be827e9527f6bb5a950b2563f495ae3849a.tar nixos-d4240be827e9527f6bb5a950b2563f495ae3849a.tar.gz nixos-d4240be827e9527f6bb5a950b2563f495ae3849a.tar.bz2 nixos-d4240be827e9527f6bb5a950b2563f495ae3849a.tar.xz nixos-d4240be827e9527f6bb5a950b2563f495ae3849a.zip |
...
Diffstat (limited to 'hosts/vidhar')
-rwxr-xr-x | hosts/vidhar/borg/copy.py | 165 |
1 files changed, 79 insertions, 86 deletions
diff --git a/hosts/vidhar/borg/copy.py b/hosts/vidhar/borg/copy.py index 324fea0b..4e9599b8 100755 --- a/hosts/vidhar/borg/copy.py +++ b/hosts/vidhar/borg/copy.py | |||
@@ -171,94 +171,87 @@ def copy_archive(src_repo_path, dst_repo_path, entry): | |||
171 | else: | 171 | else: |
172 | print('Mounted', file=stderr) | 172 | print('Mounted', file=stderr) |
173 | while True: | 173 | while True: |
174 | try: | 174 | with tqdm(total=total_size, unit_scale=True, unit_divisor=1024, unit='B', smoothing=0.01, disable=None, dynamic_ncols=True, maxinterval=0.5, miniters=1) as progress: |
175 | with tqdm(total=total_size, unit_scale=True, unit_divisor=1024, unit='B', smoothing=0.01, disable=None, dynamic_ncols=True, maxinterval=0.5, miniters=1) as progress: | 175 | seen = 0 |
176 | seen = 0 | 176 | env = os.environ.copy() |
177 | env = os.environ.copy() | 177 | create_args = ['borg', |
178 | create_args = ['borg', | 178 | 'create', |
179 | 'create', | 179 | '--lock-wait=600', |
180 | '--lock-wait=600', | 180 | '--one-file-system', |
181 | '--one-file-system', | 181 | '--compression=auto,zstd,10', |
182 | '--compression=auto,zstd,10', | 182 | '--chunker-params=10,23,16,4095', |
183 | '--chunker-params=10,23,16,4095', | 183 | '--files-cache=ctime,size', |
184 | '--files-cache=ctime,size', | 184 | '--show-rc', |
185 | '--show-rc', | 185 | # '--remote-ratelimit=20480', |
186 | # '--remote-ratelimit=20480', | 186 | '--log-json', |
187 | '--log-json', | 187 | '--progress', |
188 | '--progress', | 188 | '--list', |
189 | '--list', | 189 | '--filter=AMEi-x?', |
190 | '--filter=AMEi-x?', | 190 | '--stats' |
191 | '--stats' | 191 | ] |
192 | ] | 192 | archive_time = datetime.strptime(entry["time"], "%Y-%m-%dT%H:%M:%S.%f").replace(tzinfo=tzlocal()).astimezone(tzutc()) |
193 | archive_time = datetime.strptime(entry["time"], "%Y-%m-%dT%H:%M:%S.%f").replace(tzinfo=tzlocal()).astimezone(tzutc()) | 193 | create_args += [f'--timestamp={archive_time.strftime("%Y-%m-%dT%H:%M:%S")}'] |
194 | create_args += [f'--timestamp={archive_time.strftime("%Y-%m-%dT%H:%M:%S")}'] | 194 | if cache_suffix: |
195 | if cache_suffix: | 195 | env['BORG_FILES_CACHE_SUFFIX'] = cache_suffix |
196 | env['BORG_FILES_CACHE_SUFFIX'] = cache_suffix | 196 | else: |
197 | else: | 197 | create_args += ['--files-cache=disabled'] |
198 | create_args += ['--files-cache=disabled'] | 198 | create_args += [f'{dst_repo_path}::{entry["name"]}', '.'] |
199 | create_args += [f'{dst_repo_path}::{entry["name"]}', '.'] | 199 | |
200 | 200 | with subprocess.Popen(create_args, stdin=subprocess.DEVNULL, stderr=subprocess.PIPE, text=True, env=env, preexec_fn=lambda: as_borg(caps={Cap.DAC_READ_SEARCH}, cwd=dir)) as proc: | |
201 | with subprocess.Popen(create_args, stdin=subprocess.DEVNULL, stderr=subprocess.PIPE, text=True, env=env, preexec_fn=lambda: as_borg(caps={Cap.DAC_READ_SEARCH}, cwd=dir)) as proc: | 201 | last_list = None |
202 | last_list = None | 202 | last_list_time = None |
203 | last_list_time = None | 203 | for line in proc.stderr: |
204 | for line in proc.stderr: | 204 | try: |
205 | try: | 205 | json_line = json.loads(line) |
206 | json_line = json.loads(line) | 206 | except json.decoder.JSONDecodeError: |
207 | except json.decoder.JSONDecodeError: | 207 | tqdm.write(line) |
208 | tqdm.write(line) | 208 | continue |
209 | continue | 209 | |
210 | 210 | t = '' | |
211 | t = '' | 211 | if 'time' in json_line and stderr.isatty(): |
212 | if 'time' in json_line and stderr.isatty(): | 212 | ts = datetime.fromtimestamp(json_line['time']).replace(tzinfo=tzlocal()) |
213 | ts = datetime.fromtimestamp(json_line['time']).replace(tzinfo=tzlocal()) | 213 | t = f'{ts.isoformat(timespec="minutes")} ' |
214 | t = f'{ts.isoformat(timespec="minutes")} ' | 214 | if json_line['type'] == 'archive_progress': |
215 | if json_line['type'] == 'archive_progress': | 215 | if last_list_time is None or ((datetime.now() - last_list_time) // timedelta(seconds=3)) % 2 == 1: |
216 | if last_list_time is None or ((datetime.now() - last_list_time) // timedelta(seconds=3)) % 2 == 1: | 216 | if 'path' in json_line and json_line['path']: |
217 | if 'path' in json_line and json_line['path']: | 217 | progress.set_description(f'… {json_line["path"]}', refresh=False) |
218 | progress.set_description(f'… {json_line["path"]}', refresh=False) | 218 | else: |
219 | else: | 219 | progress.set_description(None, refresh=False) |
220 | progress.set_description(None, refresh=False) | 220 | elif last_list is not None: |
221 | elif last_list is not None: | ||
222 | progress.set_description(last_list, refresh=False) | ||
223 | nfiles=json_line["nfiles"] | ||
224 | if total_files is not None: | ||
225 | nfiles=f'{json_line["nfiles"]}/{total_files}' | ||
226 | progress.set_postfix(compressed=naturalsize(json_line['compressed_size'], binary=True), deduplicated=naturalsize(json_line['deduplicated_size'], binary=True), nfiles=nfiles, refresh=False) | ||
227 | progress.update(json_line["original_size"] - seen) | ||
228 | seen = json_line["original_size"] | ||
229 | elif json_line['type'] == 'file_status': | ||
230 | # tqdm.write(t + f'{json_line["status"]} {json_line["path"]}') | ||
231 | last_list = f'{json_line["status"]} {json_line["path"]}' | ||
232 | last_list_time = datetime.now() | ||
233 | progress.set_description(last_list, refresh=False) | 221 | progress.set_description(last_list, refresh=False) |
234 | if not stderr.isatty(): | 222 | nfiles=json_line["nfiles"] |
235 | print(last_list, file=stderr) | 223 | if total_files is not None: |
236 | elif (json_line['type'] == 'log_message' or json_line['type'] == 'progress_message' or json_line['type'] == 'progress_percent') and ('message' in json_line or 'msgid' in json_line): | 224 | nfiles=f'{json_line["nfiles"]}/{total_files}' |
237 | if 'message' in json_line: | 225 | progress.set_postfix(compressed=naturalsize(json_line['compressed_size'], binary=True), deduplicated=naturalsize(json_line['deduplicated_size'], binary=True), nfiles=nfiles, refresh=False) |
238 | tqdm.write(t + json_line['message']) | 226 | progress.update(json_line["original_size"] - seen) |
239 | elif 'msgid' in json_line: | 227 | seen = json_line["original_size"] |
240 | tqdm.write(t + json_line['msgid']) | 228 | elif json_line['type'] == 'file_status': |
241 | else: | 229 | # tqdm.write(t + f'{json_line["status"]} {json_line["path"]}') |
242 | tqdm.write(t + line) | 230 | last_list = f'{json_line["status"]} {json_line["path"]}' |
243 | progress.set_description(None) | 231 | last_list_time = datetime.now() |
244 | if proc.wait() != 0: | 232 | progress.set_description(last_list, refresh=False) |
233 | if not stderr.isatty(): | ||
234 | print(last_list, file=stderr) | ||
235 | elif (json_line['type'] == 'log_message' or json_line['type'] == 'progress_message' or json_line['type'] == 'progress_percent') and ('message' in json_line or 'msgid' in json_line): | ||
236 | if 'message' in json_line: | ||
237 | tqdm.write(t + json_line['message']) | ||
238 | elif 'msgid' in json_line: | ||
239 | tqdm.write(t + json_line['msgid']) | ||
240 | else: | ||
241 | tqdm.write(t + line) | ||
242 | progress.set_description(None) | ||
243 | if proc.wait() != 0: | ||
244 | dst = None | ||
245 | try: | ||
246 | dst = read_repo(args.target) | ||
247 | except (subprocess.CalledProcessError, json.decoder.JSONDecodeError) as err: | ||
248 | print(err, file=stderr) | ||
245 | continue | 249 | continue |
246 | except subprocess.CalledProcessError as err: | 250 | else: |
247 | print(err, file=stderr) | 251 | if any(map(lambda other: entry['name'] == other['name'], dst)): |
248 | 252 | break | |
249 | dst = None | 253 | |
250 | try: | 254 | continue |
251 | dst = read_repo(args.target) | ||
252 | except (subprocess.CalledProcessError, json.decoder.JSONDecodeError) as err: | ||
253 | print(err, file=stderr) | ||
254 | continue | ||
255 | else: | ||
256 | if any(map(lambda other: entry['name'] == other['name'], dst)): | ||
257 | break | ||
258 | |||
259 | continue | ||
260 | else: | ||
261 | break | ||
262 | mount_proc.terminate() | 255 | mount_proc.terminate() |
263 | os._exit(0) | 256 | os._exit(0) |
264 | else: | 257 | else: |