From 01a28fd186c9f03aeea1e21a778d1c1a3e07f3f6 Mon Sep 17 00:00:00 2001 From: Gregor Kleen Date: Thu, 5 Oct 2023 21:47:33 +0200 Subject: ... --- overlays/worktime/poetry.lock | 32 +++++---- overlays/worktime/pyproject.toml | 1 + overlays/worktime/worktime/__main__.py | 123 ++++++++++++++++++++++----------- 3 files changed, 102 insertions(+), 54 deletions(-) (limited to 'overlays/worktime') diff --git a/overlays/worktime/poetry.lock b/overlays/worktime/poetry.lock index eab1d070..54182b09 100644 --- a/overlays/worktime/poetry.lock +++ b/overlays/worktime/poetry.lock @@ -1,10 +1,9 @@ -# This file is automatically @generated by Poetry and should not be changed by hand. +# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. [[package]] name = "backoff" version = "2.2.1" description = "Function decoration for backoff and retry" -category = "main" optional = false python-versions = ">=3.7,<4.0" files = [ @@ -16,7 +15,6 @@ files = [ name = "certifi" version = "2022.12.7" description = "Python package for providing Mozilla's CA Bundle." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -28,7 +26,6 @@ files = [ name = "charset-normalizer" version = "3.1.0" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -category = "main" optional = false python-versions = ">=3.7.0" files = [ @@ -113,7 +110,6 @@ files = [ name = "idna" version = "3.4" description = "Internationalized Domain Names in Applications (IDNA)" -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -121,11 +117,26 @@ files = [ {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, ] +[[package]] +name = "jsonpickle" +version = "3.0.2" +description = "Python library for serializing any arbitrary object graph into JSON" +optional = false +python-versions = ">=3.7" +files = [ + {file = "jsonpickle-3.0.2-py3-none-any.whl", hash = "sha256:4a8442d97ca3f77978afa58068768dba7bff2dbabe79a9647bc3cdafd4ef019f"}, + {file = "jsonpickle-3.0.2.tar.gz", hash = "sha256:e37abba4bfb3ca4a4647d28bb9f4706436f7b46c8a8333b4a718abafa8e46b37"}, +] + +[package.extras] +docs = ["jaraco.packaging (>=3.2)", "rst.linker (>=1.9)", "sphinx"] +testing = ["ecdsa", "feedparser", "gmpy2", "numpy", "pandas", "pymongo", "pytest (>=3.5,!=3.7.3)", "pytest-black-multipy", "pytest-checkdocs (>=1.2.3)", "pytest-cov", "pytest-flake8 (>=1.1.1)", "scikit-learn", "sqlalchemy"] +testing-libs = ["simplejson", "ujson"] + [[package]] name = "python-dateutil" version = "2.8.2" description = "Extensions to the standard Python datetime module" -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ @@ -140,7 +151,6 @@ six = ">=1.5" name = "pyxdg" version = "0.28" description = "PyXDG contains implementations of freedesktop.org standards in python." -category = "main" optional = false python-versions = "*" files = [ @@ -152,7 +162,6 @@ files = [ name = "requests" version = "2.28.2" description = "Python HTTP for Humans." -category = "main" optional = false python-versions = ">=3.7, <4" files = [ @@ -174,7 +183,6 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] name = "six" version = "1.16.0" description = "Python 2 and 3 compatibility utilities" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -186,7 +194,6 @@ files = [ name = "tabulate" version = "0.9.0" description = "Pretty-print tabular data" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -201,7 +208,6 @@ widechars = ["wcwidth"] name = "toml" version = "0.10.2" description = "Python Library for Tom's Obvious, Minimal Language" -category = "main" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -213,7 +219,6 @@ files = [ name = "uritools" version = "4.0.1" description = "URI parsing, classification and composition" -category = "main" optional = false python-versions = "~=3.7" files = [ @@ -225,7 +230,6 @@ files = [ name = "urllib3" version = "1.26.15" description = "HTTP library with thread-safe connection pooling, file post, and more." -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" files = [ @@ -241,4 +245,4 @@ socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "0d556c1b7f4ca6764a006e10ef9949359911925a9dae09d25a3c3d26d8966790" +content-hash = "d9137b4f8e37bba934abf732e4a2aeeb9924c4b6576830d8ae08bdb43b4e147f" diff --git a/overlays/worktime/pyproject.toml b/overlays/worktime/pyproject.toml index 61257422..08002d4d 100644 --- a/overlays/worktime/pyproject.toml +++ b/overlays/worktime/pyproject.toml @@ -13,6 +13,7 @@ requests = "^2.28.2" tabulate = "^0.9.0" backoff = "^2.2.1" toml = "^0.10.2" +jsonpickle = "^3.0.2" [tool.poetry.scripts] worktime = "worktime.__main__:main" diff --git a/overlays/worktime/worktime/__main__.py b/overlays/worktime/worktime/__main__.py index 84c8a8e2..0264b8a8 100755 --- a/overlays/worktime/worktime/__main__.py +++ b/overlays/worktime/worktime/__main__.py @@ -25,7 +25,7 @@ from sys import stderr from tabulate import tabulate -from itertools import groupby +from itertools import groupby, count from functools import cache, partial import backoff @@ -34,6 +34,11 @@ from pathlib import Path from collections import defaultdict +import shelve +import jsonpickle +from hashlib import blake2s + +shelve_d = shelve.open(str(Path(BaseDirectory.save_cache_path('worktime')) / 'entry_durations')) class TogglAPISection(Enum): TOGGL = '/api/v8' @@ -90,46 +95,79 @@ class TogglAPI(object): return response + def entry_durations(self, start_date, *, end_date, rounding=False, client_ids): + if client_ids is not None and not client_ids: + return + + step = timedelta(days = 120) + for req_start in (start_date + step * i for i in count()): + req_start = min(req_start, end_date) + req_end = min(req_start + step, end_date) + if req_end <= req_start: + break + # if end_date > req_start + step: + # req_end = datetime.combine((req_start + step).astimezone(timezone.utc).date(), time(tzinfo=timezone.utc)) + # elif req_start > start_date: + # req_start = datetime.combine(req_start.astimezone(timezone.utc).date(), time(tzinfo=timezone.utc)) + timedelta(days = 1) + + cache_key = None + if req_end + timedelta(days=60) < datetime.now().astimezone(timezone.utc): + cache_key = blake2s(jsonpickle.encode({ + 'start': req_start, + 'end': req_end, + 'rounding': rounding, + 'clients': client_ids, + 'workspace': self._workspace_id, + 'workspace_clients': self._client_ids + }).encode('utf-8'), key = self._api_token.encode('utf-8')).hexdigest() + if cache_key in shelve_d: + yield from shelve_d[cache_key] + continue + + entries = list() + params = { 'since': (req_start - timedelta(days=1)).astimezone(timezone.utc).isoformat(), + 'until': (req_end + timedelta(days=1)).astimezone(timezone.utc).isoformat(), + 'rounding': rounding, + 'billable': 'yes' + } + if client_ids is not None: + params |= { 'client_ids': ','.join(map(str, client_ids)) } + for page in count(start = 1): + url = self._make_url(api = TogglAPISection.REPORTS, section = ['details'], params = params | { 'page': page }) + r = self._query(url = url, method='GET') + if not r or not r.json(): + raise TogglAPIError(r) + report = r.json() + for entry in report['data']: + start = isoparse(entry['start']) + end = isoparse(entry['end']) + + if start > req_end or end < req_start: + continue + + x = min(end, req_end) - max(start, req_start) + if cache_key: + entries.append(x) + yield x + if not report['data']: + break + + if cache_key: + shelve_d[cache_key] = entries + # res = timedelta(milliseconds=report['total_billable']) if report['total_billable'] else timedelta(milliseconds=0) + # return res + def get_billable_hours(self, start_date, end_date=datetime.now(timezone.utc), rounding=False): billable_acc = timedelta(milliseconds = 0) - step = timedelta(days = 365) - - for req_start in [start_date + x * step for x in range(0, ceil((end_date - start_date) / step))]: - req_end = end_date - if end_date > req_start + step: - req_end = datetime.combine((req_start + step).astimezone(timezone.utc).date(), time(tzinfo=timezone.utc)) - elif req_start > start_date: - req_start = datetime.combine(req_start.astimezone(timezone.utc).date(), time(tzinfo=timezone.utc)) + timedelta(days = 1) - - def get_report(client_ids = self._client_ids): - nonlocal req_start, req_end, rounding, self - - if client_ids is not None and not client_ids: - return timedelta(milliseconds = 0) - - params = { 'since': req_start.astimezone(timezone.utc).isoformat(), - 'until': req_end.astimezone(timezone.utc).isoformat(), - 'rounding': rounding, - 'billable': 'yes' - } - if client_ids is not None: - params |= { 'client_ids': ','.join(map(str, client_ids)) } - url = self._make_url(api = TogglAPISection.REPORTS, section = ['summary'], params = params) - r = self._query(url = url, method='GET') - if not r or not r.json(): - raise TogglAPIError(r) - res = timedelta(milliseconds=r.json()['total_billable']) if r.json()['total_billable'] else timedelta(milliseconds=0) - return res - - if 0 in self._client_ids: - url = self._make_url(api = TogglAPISection.TOGGL, section = ['workspaces', self._workspace_id, 'clients']) - r = self._query(url = url, method = 'GET') - if not r or not r.json(): - raise TogglAPIError(r) - - billable_acc += get_report(None) - get_report(set(map(lambda c: c['id'], r.json()))) - - billable_acc += get_report(self._client_ids - {0}) + if 0 in self._client_ids: + url = self._make_url(api = TogglAPISection.TOGGL, section = ['workspaces', self._workspace_id, 'clients']) + r = self._query(url = url, method = 'GET') + if not r or not r.json(): + raise TogglAPIError(r) + + billable_acc += sum(self.entry_durations(start_date, end_date=end_date, rounding=rounding, client_ids=None), start=timedelta(milliseconds=0)) - sum(self.entry_durations(start_date, end_date=end_date, rounding=rounding, client_ids=frozenset(map(lambda c: c['id'], r.json()))), start=timedelta(milliseconds=0)) + + billable_acc += sum(self.entry_durations(start_date, end_date=end_date, rounding=rounding, client_ids=frozenset(*(self._client_ids - {0}))), start=timedelta(milliseconds=0)) return billable_acc @@ -512,6 +550,7 @@ def time_worked(now, **args): then = Worktime(**dict(args, now = then)) now = Worktime(**dict(args, now = now)) + print(now.time_worked) worked = now.time_worked - then.time_worked if args['do_round']: @@ -730,10 +769,14 @@ def classification(classification_name, table, table_format, **args): )) def main(): + def isotime(s): + return datetime.fromisoformat(s).replace(tzinfo=tzlocal()) + config = Worktime.config() parser = argparse.ArgumentParser(prog = "worktime", description = 'Track worktime using toggl API') - parser.add_argument('--time', dest = 'now', metavar = 'TIME', type = lambda s: datetime.fromisoformat(s).replace(tzinfo=tzlocal()), help = 'Time to calculate status for (default: current time)', default = datetime.now(tzlocal())) + parser.add_argument('--time', dest = 'now', metavar = 'TIME', type = isotime, help = 'Time to calculate status for (default: current time)', default = datetime.now(tzlocal())) + parser.add_argument('--start', dest = 'start_datetime', metavar = 'TIME', type = isotime, help = 'Time to calculate status from (default: None)', default = None) parser.add_argument('--no-running', dest = 'include_running', action = 'store_false') parser.add_argument('--no-force-day-to-work', dest = 'force_day_to_work', action = 'store_false') subparsers = parser.add_subparsers(help = 'Subcommands') -- cgit v1.2.3