summaryrefslogtreecommitdiff
path: root/overlays/worktime
diff options
context:
space:
mode:
Diffstat (limited to 'overlays/worktime')
-rw-r--r--overlays/worktime/.envrc4
-rw-r--r--overlays/worktime/.gitignore2
-rw-r--r--overlays/worktime/default.nix26
-rw-r--r--overlays/worktime/poetry.lock248
-rw-r--r--overlays/worktime/pyproject.toml41
-rw-r--r--overlays/worktime/uv.lock248
-rwxr-xr-xoverlays/worktime/worktime/__main__.py669
7 files changed, 747 insertions, 491 deletions
diff --git a/overlays/worktime/.envrc b/overlays/worktime/.envrc
new file mode 100644
index 00000000..2c909235
--- /dev/null
+++ b/overlays/worktime/.envrc
@@ -0,0 +1,4 @@
1use flake
2
3[[ -d ".venv" ]] || ( uv venv && uv sync )
4. .venv/bin/activate
diff --git a/overlays/worktime/.gitignore b/overlays/worktime/.gitignore
new file mode 100644
index 00000000..4ccfae70
--- /dev/null
+++ b/overlays/worktime/.gitignore
@@ -0,0 +1,2 @@
1.venv
2**/__pycache__
diff --git a/overlays/worktime/default.nix b/overlays/worktime/default.nix
index 5ecdf149..579cf7ad 100644
--- a/overlays/worktime/default.nix
+++ b/overlays/worktime/default.nix
@@ -1,13 +1,19 @@
1{ prev, ... }: 1{ prev, final, flake, flakeInputs, ... }:
2 2
3with prev.poetry2nix; 3let
4 4 workspace = flakeInputs.uv2nix.lib.workspace.loadWorkspace { workspaceRoot = ./.; };
5{ 5 pythonSet = flake.lib.pythonSet {
6 worktime = mkPoetryApplication { 6 pkgs = final;
7 python = prev.python310; 7 python = final.python312;
8 8 overlay = workspace.mkPyprojectOverlay {
9 projectDir = cleanPythonSources { src = ./.; }; 9 sourcePreference = "wheel";
10 10 };
11 meta.mainProgram = "worktime";
12 }; 11 };
12 virtualEnv = pythonSet.mkVirtualEnv "worktime" workspace.deps.default;
13in {
14 worktime = virtualEnv.overrideAttrs (oldAttrs: {
15 meta = (oldAttrs.meta or {}) // {
16 mainProgram = "worktime";
17 };
18 });
13} 19}
diff --git a/overlays/worktime/poetry.lock b/overlays/worktime/poetry.lock
deleted file mode 100644
index 54182b09..00000000
--- a/overlays/worktime/poetry.lock
+++ /dev/null
@@ -1,248 +0,0 @@
1# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand.
2
3[[package]]
4name = "backoff"
5version = "2.2.1"
6description = "Function decoration for backoff and retry"
7optional = false
8python-versions = ">=3.7,<4.0"
9files = [
10 {file = "backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8"},
11 {file = "backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba"},
12]
13
14[[package]]
15name = "certifi"
16version = "2022.12.7"
17description = "Python package for providing Mozilla's CA Bundle."
18optional = false
19python-versions = ">=3.6"
20files = [
21 {file = "certifi-2022.12.7-py3-none-any.whl", hash = "sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18"},
22 {file = "certifi-2022.12.7.tar.gz", hash = "sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3"},
23]
24
25[[package]]
26name = "charset-normalizer"
27version = "3.1.0"
28description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
29optional = false
30python-versions = ">=3.7.0"
31files = [
32 {file = "charset-normalizer-3.1.0.tar.gz", hash = "sha256:34e0a2f9c370eb95597aae63bf85eb5e96826d81e3dcf88b8886012906f509b5"},
33 {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e0ac8959c929593fee38da1c2b64ee9778733cdf03c482c9ff1d508b6b593b2b"},
34 {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d7fc3fca01da18fbabe4625d64bb612b533533ed10045a2ac3dd194bfa656b60"},
35 {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:04eefcee095f58eaabe6dc3cc2262f3bcd776d2c67005880894f447b3f2cb9c1"},
36 {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20064ead0717cf9a73a6d1e779b23d149b53daf971169289ed2ed43a71e8d3b0"},
37 {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1435ae15108b1cb6fffbcea2af3d468683b7afed0169ad718451f8db5d1aff6f"},
38 {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c84132a54c750fda57729d1e2599bb598f5fa0344085dbde5003ba429a4798c0"},
39 {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75f2568b4189dda1c567339b48cba4ac7384accb9c2a7ed655cd86b04055c795"},
40 {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11d3bcb7be35e7b1bba2c23beedac81ee893ac9871d0ba79effc7fc01167db6c"},
41 {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:891cf9b48776b5c61c700b55a598621fdb7b1e301a550365571e9624f270c203"},
42 {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5f008525e02908b20e04707a4f704cd286d94718f48bb33edddc7d7b584dddc1"},
43 {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:b06f0d3bf045158d2fb8837c5785fe9ff9b8c93358be64461a1089f5da983137"},
44 {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:49919f8400b5e49e961f320c735388ee686a62327e773fa5b3ce6721f7e785ce"},
45 {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:22908891a380d50738e1f978667536f6c6b526a2064156203d418f4856d6e86a"},
46 {file = "charset_normalizer-3.1.0-cp310-cp310-win32.whl", hash = "sha256:12d1a39aa6b8c6f6248bb54550efcc1c38ce0d8096a146638fd4738e42284448"},
47 {file = "charset_normalizer-3.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:65ed923f84a6844de5fd29726b888e58c62820e0769b76565480e1fdc3d062f8"},
48 {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9a3267620866c9d17b959a84dd0bd2d45719b817245e49371ead79ed4f710d19"},
49 {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6734e606355834f13445b6adc38b53c0fd45f1a56a9ba06c2058f86893ae8017"},
50 {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f8303414c7b03f794347ad062c0516cee0e15f7a612abd0ce1e25caf6ceb47df"},
51 {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aaf53a6cebad0eae578f062c7d462155eada9c172bd8c4d250b8c1d8eb7f916a"},
52 {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3dc5b6a8ecfdc5748a7e429782598e4f17ef378e3e272eeb1340ea57c9109f41"},
53 {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e1b25e3ad6c909f398df8921780d6a3d120d8c09466720226fc621605b6f92b1"},
54 {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ca564606d2caafb0abe6d1b5311c2649e8071eb241b2d64e75a0d0065107e62"},
55 {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b82fab78e0b1329e183a65260581de4375f619167478dddab510c6c6fb04d9b6"},
56 {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bd7163182133c0c7701b25e604cf1611c0d87712e56e88e7ee5d72deab3e76b5"},
57 {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:11d117e6c63e8f495412d37e7dc2e2fff09c34b2d09dbe2bee3c6229577818be"},
58 {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:cf6511efa4801b9b38dc5546d7547d5b5c6ef4b081c60b23e4d941d0eba9cbeb"},
59 {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:abc1185d79f47c0a7aaf7e2412a0eb2c03b724581139193d2d82b3ad8cbb00ac"},
60 {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cb7b2ab0188829593b9de646545175547a70d9a6e2b63bf2cd87a0a391599324"},
61 {file = "charset_normalizer-3.1.0-cp311-cp311-win32.whl", hash = "sha256:c36bcbc0d5174a80d6cccf43a0ecaca44e81d25be4b7f90f0ed7bcfbb5a00909"},
62 {file = "charset_normalizer-3.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:cca4def576f47a09a943666b8f829606bcb17e2bc2d5911a46c8f8da45f56755"},
63 {file = "charset_normalizer-3.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0c95f12b74681e9ae127728f7e5409cbbef9cd914d5896ef238cc779b8152373"},
64 {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fca62a8301b605b954ad2e9c3666f9d97f63872aa4efcae5492baca2056b74ab"},
65 {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac0aa6cd53ab9a31d397f8303f92c42f534693528fafbdb997c82bae6e477ad9"},
66 {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c3af8e0f07399d3176b179f2e2634c3ce9c1301379a6b8c9c9aeecd481da494f"},
67 {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a5fc78f9e3f501a1614a98f7c54d3969f3ad9bba8ba3d9b438c3bc5d047dd28"},
68 {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:628c985afb2c7d27a4800bfb609e03985aaecb42f955049957814e0491d4006d"},
69 {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:74db0052d985cf37fa111828d0dd230776ac99c740e1a758ad99094be4f1803d"},
70 {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1e8fcdd8f672a1c4fc8d0bd3a2b576b152d2a349782d1eb0f6b8e52e9954731d"},
71 {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:04afa6387e2b282cf78ff3dbce20f0cc071c12dc8f685bd40960cc68644cfea6"},
72 {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:dd5653e67b149503c68c4018bf07e42eeed6b4e956b24c00ccdf93ac79cdff84"},
73 {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d2686f91611f9e17f4548dbf050e75b079bbc2a82be565832bc8ea9047b61c8c"},
74 {file = "charset_normalizer-3.1.0-cp37-cp37m-win32.whl", hash = "sha256:4155b51ae05ed47199dc5b2a4e62abccb274cee6b01da5b895099b61b1982974"},
75 {file = "charset_normalizer-3.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:322102cdf1ab682ecc7d9b1c5eed4ec59657a65e1c146a0da342b78f4112db23"},
76 {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e633940f28c1e913615fd624fcdd72fdba807bf53ea6925d6a588e84e1151531"},
77 {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3a06f32c9634a8705f4ca9946d667609f52cf130d5548881401f1eb2c39b1e2c"},
78 {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7381c66e0561c5757ffe616af869b916c8b4e42b367ab29fedc98481d1e74e14"},
79 {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3573d376454d956553c356df45bb824262c397c6e26ce43e8203c4c540ee0acb"},
80 {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e89df2958e5159b811af9ff0f92614dabf4ff617c03a4c1c6ff53bf1c399e0e1"},
81 {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:78cacd03e79d009d95635e7d6ff12c21eb89b894c354bd2b2ed0b4763373693b"},
82 {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de5695a6f1d8340b12a5d6d4484290ee74d61e467c39ff03b39e30df62cf83a0"},
83 {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c60b9c202d00052183c9be85e5eaf18a4ada0a47d188a83c8f5c5b23252f649"},
84 {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f645caaf0008bacf349875a974220f1f1da349c5dbe7c4ec93048cdc785a3326"},
85 {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ea9f9c6034ea2d93d9147818f17c2a0860d41b71c38b9ce4d55f21b6f9165a11"},
86 {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:80d1543d58bd3d6c271b66abf454d437a438dff01c3e62fdbcd68f2a11310d4b"},
87 {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:73dc03a6a7e30b7edc5b01b601e53e7fc924b04e1835e8e407c12c037e81adbd"},
88 {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6f5c2e7bc8a4bf7c426599765b1bd33217ec84023033672c1e9a8b35eaeaaaf8"},
89 {file = "charset_normalizer-3.1.0-cp38-cp38-win32.whl", hash = "sha256:12a2b561af122e3d94cdb97fe6fb2bb2b82cef0cdca131646fdb940a1eda04f0"},
90 {file = "charset_normalizer-3.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:3160a0fd9754aab7d47f95a6b63ab355388d890163eb03b2d2b87ab0a30cfa59"},
91 {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:38e812a197bf8e71a59fe55b757a84c1f946d0ac114acafaafaf21667a7e169e"},
92 {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6baf0baf0d5d265fa7944feb9f7451cc316bfe30e8df1a61b1bb08577c554f31"},
93 {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8f25e17ab3039b05f762b0a55ae0b3632b2e073d9c8fc88e89aca31a6198e88f"},
94 {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3747443b6a904001473370d7810aa19c3a180ccd52a7157aacc264a5ac79265e"},
95 {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b116502087ce8a6b7a5f1814568ccbd0e9f6cfd99948aa59b0e241dc57cf739f"},
96 {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d16fd5252f883eb074ca55cb622bc0bee49b979ae4e8639fff6ca3ff44f9f854"},
97 {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fa558996782fc226b529fdd2ed7866c2c6ec91cee82735c98a197fae39f706"},
98 {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6f6c7a8a57e9405cad7485f4c9d3172ae486cfef1344b5ddd8e5239582d7355e"},
99 {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ac3775e3311661d4adace3697a52ac0bab17edd166087d493b52d4f4f553f9f0"},
100 {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:10c93628d7497c81686e8e5e557aafa78f230cd9e77dd0c40032ef90c18f2230"},
101 {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:6f4f4668e1831850ebcc2fd0b1cd11721947b6dc7c00bf1c6bd3c929ae14f2c7"},
102 {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:0be65ccf618c1e7ac9b849c315cc2e8a8751d9cfdaa43027d4f6624bd587ab7e"},
103 {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:53d0a3fa5f8af98a1e261de6a3943ca631c526635eb5817a87a59d9a57ebf48f"},
104 {file = "charset_normalizer-3.1.0-cp39-cp39-win32.whl", hash = "sha256:a04f86f41a8916fe45ac5024ec477f41f886b3c435da2d4e3d2709b22ab02af1"},
105 {file = "charset_normalizer-3.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:830d2948a5ec37c386d3170c483063798d7879037492540f10a475e3fd6f244b"},
106 {file = "charset_normalizer-3.1.0-py3-none-any.whl", hash = "sha256:3d9098b479e78c85080c98e1e35ff40b4a31d8953102bb0fd7d1b6f8a2111a3d"},
107]
108
109[[package]]
110name = "idna"
111version = "3.4"
112description = "Internationalized Domain Names in Applications (IDNA)"
113optional = false
114python-versions = ">=3.5"
115files = [
116 {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"},
117 {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"},
118]
119
120[[package]]
121name = "jsonpickle"
122version = "3.0.2"
123description = "Python library for serializing any arbitrary object graph into JSON"
124optional = false
125python-versions = ">=3.7"
126files = [
127 {file = "jsonpickle-3.0.2-py3-none-any.whl", hash = "sha256:4a8442d97ca3f77978afa58068768dba7bff2dbabe79a9647bc3cdafd4ef019f"},
128 {file = "jsonpickle-3.0.2.tar.gz", hash = "sha256:e37abba4bfb3ca4a4647d28bb9f4706436f7b46c8a8333b4a718abafa8e46b37"},
129]
130
131[package.extras]
132docs = ["jaraco.packaging (>=3.2)", "rst.linker (>=1.9)", "sphinx"]
133testing = ["ecdsa", "feedparser", "gmpy2", "numpy", "pandas", "pymongo", "pytest (>=3.5,!=3.7.3)", "pytest-black-multipy", "pytest-checkdocs (>=1.2.3)", "pytest-cov", "pytest-flake8 (>=1.1.1)", "scikit-learn", "sqlalchemy"]
134testing-libs = ["simplejson", "ujson"]
135
136[[package]]
137name = "python-dateutil"
138version = "2.8.2"
139description = "Extensions to the standard Python datetime module"
140optional = false
141python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7"
142files = [
143 {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"},
144 {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"},
145]
146
147[package.dependencies]
148six = ">=1.5"
149
150[[package]]
151name = "pyxdg"
152version = "0.28"
153description = "PyXDG contains implementations of freedesktop.org standards in python."
154optional = false
155python-versions = "*"
156files = [
157 {file = "pyxdg-0.28-py2.py3-none-any.whl", hash = "sha256:bdaf595999a0178ecea4052b7f4195569c1ff4d344567bccdc12dfdf02d545ab"},
158 {file = "pyxdg-0.28.tar.gz", hash = "sha256:3267bb3074e934df202af2ee0868575484108581e6f3cb006af1da35395e88b4"},
159]
160
161[[package]]
162name = "requests"
163version = "2.28.2"
164description = "Python HTTP for Humans."
165optional = false
166python-versions = ">=3.7, <4"
167files = [
168 {file = "requests-2.28.2-py3-none-any.whl", hash = "sha256:64299f4909223da747622c030b781c0d7811e359c37124b4bd368fb8c6518baa"},
169 {file = "requests-2.28.2.tar.gz", hash = "sha256:98b1b2782e3c6c4904938b84c0eb932721069dfdb9134313beff7c83c2df24bf"},
170]
171
172[package.dependencies]
173certifi = ">=2017.4.17"
174charset-normalizer = ">=2,<4"
175idna = ">=2.5,<4"
176urllib3 = ">=1.21.1,<1.27"
177
178[package.extras]
179socks = ["PySocks (>=1.5.6,!=1.5.7)"]
180use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
181
182[[package]]
183name = "six"
184version = "1.16.0"
185description = "Python 2 and 3 compatibility utilities"
186optional = false
187python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
188files = [
189 {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"},
190 {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"},
191]
192
193[[package]]
194name = "tabulate"
195version = "0.9.0"
196description = "Pretty-print tabular data"
197optional = false
198python-versions = ">=3.7"
199files = [
200 {file = "tabulate-0.9.0-py3-none-any.whl", hash = "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f"},
201 {file = "tabulate-0.9.0.tar.gz", hash = "sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c"},
202]
203
204[package.extras]
205widechars = ["wcwidth"]
206
207[[package]]
208name = "toml"
209version = "0.10.2"
210description = "Python Library for Tom's Obvious, Minimal Language"
211optional = false
212python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*"
213files = [
214 {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"},
215 {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"},
216]
217
218[[package]]
219name = "uritools"
220version = "4.0.1"
221description = "URI parsing, classification and composition"
222optional = false
223python-versions = "~=3.7"
224files = [
225 {file = "uritools-4.0.1-py3-none-any.whl", hash = "sha256:d122d394ed6e6e15ac0fddba6a5b19e9fa204e7797507815cbfb0e1455ac0475"},
226 {file = "uritools-4.0.1.tar.gz", hash = "sha256:efc5c3a6de05404850685a8d3f34da8476b56aa3516fbf8eff5c8704c7a2826f"},
227]
228
229[[package]]
230name = "urllib3"
231version = "1.26.15"
232description = "HTTP library with thread-safe connection pooling, file post, and more."
233optional = false
234python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*"
235files = [
236 {file = "urllib3-1.26.15-py2.py3-none-any.whl", hash = "sha256:aa751d169e23c7479ce47a0cb0da579e3ede798f994f5816a74e4f4500dcea42"},
237 {file = "urllib3-1.26.15.tar.gz", hash = "sha256:8a388717b9476f934a21484e8c8e61875ab60644d29b9b39e11e4b9dc1c6b305"},
238]
239
240[package.extras]
241brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"]
242secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"]
243socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"]
244
245[metadata]
246lock-version = "2.0"
247python-versions = "^3.10"
248content-hash = "d9137b4f8e37bba934abf732e4a2aeeb9924c4b6576830d8ae08bdb43b4e147f"
diff --git a/overlays/worktime/pyproject.toml b/overlays/worktime/pyproject.toml
index 08002d4d..42da51f5 100644
--- a/overlays/worktime/pyproject.toml
+++ b/overlays/worktime/pyproject.toml
@@ -1,23 +1,28 @@
1[tool.poetry] 1[project]
2name = "worktime" 2name = "worktime"
3version = "0.1.0" 3version = "1.0.0"
4description = "" 4requires-python = "~=3.12"
5authors = ["Gregor Kleen <gkleen@yggdrasil.li>"] 5dependencies = [
6 "pyxdg>=0.28,<0.29",
7 "python-dateutil>=2.9.0.post0,<3",
8 "uritools>=4.0.3,<5",
9 "requests>=2.32.3,<3",
10 "tabulate>=0.9.0,<0.10",
11 "toml>=0.10.2,<0.11",
12 "jsonpickle>=4.0.5,<5",
13 "frozendict>=2.4.6",
14 "atomicwriter>=0.2.5",
15 "desktop-notify>=1.3.3",
16]
6 17
7[tool.poetry.dependencies] 18[project.scripts]
8python = "^3.10"
9pyxdg = "^0.28"
10python-dateutil = "^2.8.2"
11uritools = "^4.0.1"
12requests = "^2.28.2"
13tabulate = "^0.9.0"
14backoff = "^2.2.1"
15toml = "^0.10.2"
16jsonpickle = "^3.0.2"
17
18[tool.poetry.scripts]
19worktime = "worktime.__main__:main" 19worktime = "worktime.__main__:main"
20worktime-ui = "worktime.__main__:ui"
21worktime-stop = "worktime.__main__:stop"
20 22
21[build-system] 23[build-system]
22requires = ["poetry-core"] 24requires = ["hatchling"]
23build-backend = "poetry.core.masonry.api" \ No newline at end of file 25build-backend = "hatchling.build"
26
27[dependency-groups]
28dev = []
diff --git a/overlays/worktime/uv.lock b/overlays/worktime/uv.lock
new file mode 100644
index 00000000..39de4ccf
--- /dev/null
+++ b/overlays/worktime/uv.lock
@@ -0,0 +1,248 @@
1version = 1
2revision = 2
3requires-python = ">=3.12, <4"
4
5[[package]]
6name = "atomicwriter"
7version = "0.2.5"
8source = { registry = "https://pypi.org/simple" }
9sdist = { url = "https://files.pythonhosted.org/packages/50/b4/dd04e186eb244d1ed84b1d0ebfba19ddc7f8886b98e345aaca4208b031d2/atomicwriter-0.2.5.tar.gz", hash = "sha256:5ced6afb0579377a13e191b17a16115e14c30ec00e6c38b60403f58235a867af", size = 64990, upload-time = "2025-05-24T20:35:42.538Z" }
10wheels = [
11 { url = "https://files.pythonhosted.org/packages/99/7c/672a0de09b0b355a2ffa521ef25cf106f1984823379dee37f7305fdc1774/atomicwriter-0.2.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5c1fab874e62ebe96f1af0e965dc1e92c4c1ef2e2e9612a444371b8fc751ec43", size = 234141, upload-time = "2025-05-24T20:34:32.74Z" },
12 { url = "https://files.pythonhosted.org/packages/b9/0c/e1c5bad033284c212c0a77121b48dd4147f80e9a7cd82a9d2ce0a2160901/atomicwriter-0.2.5-cp312-cp312-macosx_11_0_x86_64.whl", hash = "sha256:8dbb67cc730be7d6bdfd5e991271bc17052be8fb2e4fa27854b47d8a76d36349", size = 245788, upload-time = "2025-05-24T20:34:33.897Z" },
13 { url = "https://files.pythonhosted.org/packages/f4/d3/7036e203cc5fc4c49bf916b4ba158e0d2779de127afad5963edd7e3b9400/atomicwriter-0.2.5-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:a4e7f81932839c738425dc96ad98e4a7511b740cd3d75f480bfabbcf8e6f7eae", size = 260428, upload-time = "2025-05-24T20:34:35.533Z" },
14 { url = "https://files.pythonhosted.org/packages/e5/b9/9a4d235a8d67fb442302dc0f3ea2394b7bd994bfc99b1dc0f744c7852418/atomicwriter-0.2.5-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:de37a3a5d1b57b719cfb0b81a11cab2114acfdc2c36051bf0af72d05eb644411", size = 263648, upload-time = "2025-05-24T20:34:36.72Z" },
15 { url = "https://files.pythonhosted.org/packages/71/7c/32d4ddad53375de42f3e972bb0633ec76f2c31772f2e508479d4788651d9/atomicwriter-0.2.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0b925e55750092fd482565b6068b8c8366fd79de526681af9e58eb209f0deeca", size = 323775, upload-time = "2025-05-24T20:34:37.968Z" },
16 { url = "https://files.pythonhosted.org/packages/06/fe/6a226368a3f7ea30001fbd165f6a97f28c8f1a884896357b3d694983f5d2/atomicwriter-0.2.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:538f78f25e01584535782397211c66b8b3c9de90c2d1fc01a668ddce73dd0cb2", size = 340819, upload-time = "2025-05-24T20:34:39.63Z" },
17 { url = "https://files.pythonhosted.org/packages/92/95/b035b2296c483fde5392c629e0b6e3844eba6e54ea965c4b8827379b0893/atomicwriter-0.2.5-cp312-cp312-win_amd64.whl", hash = "sha256:1d2d49a1b94ea7b289be9f7134d756bfb0bbf53eb0e58411334ed1b9958abe5e", size = 152789, upload-time = "2025-05-24T20:34:40.905Z" },
18 { url = "https://files.pythonhosted.org/packages/da/25/caa0959ae8ce24763e24e1f45be6cb897414545d224a155f929d496d6812/atomicwriter-0.2.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3f5490fd5bec378509521f7c2a19a64031a0de07d368d76733c3f76a0b9f026b", size = 233830, upload-time = "2025-05-24T20:34:42.532Z" },
19 { url = "https://files.pythonhosted.org/packages/d2/76/3c41bfd4fd74bc63bec29f05a806a767258eea7cf151496b4ab015cb5323/atomicwriter-0.2.5-cp313-cp313-macosx_11_0_x86_64.whl", hash = "sha256:a4dada83ff1255c7e640363cc2a4399ab9a822d4dbc9c18f55bbf0c8b12ce056", size = 245461, upload-time = "2025-05-24T20:34:44.454Z" },
20 { url = "https://files.pythonhosted.org/packages/c3/1e/5512dbdfdc3f4ab12f5923c50ae4765cc2fc65a9f112bb9dccbcbe60b395/atomicwriter-0.2.5-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:ef2cf15e67513f05ad37d4cec48e403982c6b3c07f491472effd76d2157de7e2", size = 259892, upload-time = "2025-05-24T20:34:45.688Z" },
21 { url = "https://files.pythonhosted.org/packages/e5/1d/2382b6cacb119115828eb519697a555900bcfdb062efeb0f82603295402d/atomicwriter-0.2.5-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:73618f74c3c5f5401d3da0a3cd3043f23de5b6bb4a3d85bc580940a441355d25", size = 263125, upload-time = "2025-05-24T20:34:47.205Z" },
22 { url = "https://files.pythonhosted.org/packages/07/d7/c4d68386161870db4a8d0452f0655a19902fa435b749c12e6ef800e89b19/atomicwriter-0.2.5-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bbd5eda80710ddac7aefb421c79cef6b905852a827e764f0f12fcbaa88919f7a", size = 323503, upload-time = "2025-05-24T20:34:48.417Z" },
23 { url = "https://files.pythonhosted.org/packages/b7/08/0fc03c0736ab8466e1b47a3ee17a528da18019cff93b7c4c2b33df82c19e/atomicwriter-0.2.5-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:b4776aaca40bc3040c3716c2adad74625c42285083ff31e8bf24a95315225c7b", size = 340156, upload-time = "2025-05-24T20:34:50.389Z" },
24 { url = "https://files.pythonhosted.org/packages/fa/09/7ba888cf4d90bcabd9e82db3bdb9de50e4ef072e0ea0d375cd1931b79349/atomicwriter-0.2.5-cp313-cp313-win_amd64.whl", hash = "sha256:225ed1fbfa1996d9b0b2252f8a5d81263e51cbc797086d830f488c35b1d2ab42", size = 152274, upload-time = "2025-05-24T20:34:51.785Z" },
25 { url = "https://files.pythonhosted.org/packages/2a/70/07d2ba2e0a126cfecfbfed46baf599c9e2155f4c8338fed4d3ae0041b133/atomicwriter-0.2.5-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:63b55982cfa47232f179689933bf003eefb2bd33464235883ed3ce7322cf38f3", size = 232879, upload-time = "2025-05-24T20:34:53.195Z" },
26 { url = "https://files.pythonhosted.org/packages/f6/4d/397eb5435917135df93b339d849884bb1125896b1e15163c5244aa590336/atomicwriter-0.2.5-cp313-cp313t-macosx_11_0_x86_64.whl", hash = "sha256:e33f40b2a27f8831beeabb485923acb6dd067cc70bba1a63096749b3dc4747ff", size = 244386, upload-time = "2025-05-24T20:34:54.852Z" },
27 { url = "https://files.pythonhosted.org/packages/8b/01/73f0b683fa55e61dd29d30e48e9a75ddb049e6dad0ac4ae1a29dbc05f21e/atomicwriter-0.2.5-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:c646e115e88147d71f845a005fc53910f22c4dc65bd634768cb90b7f34259359", size = 258255, upload-time = "2025-05-24T20:34:56.046Z" },
28 { url = "https://files.pythonhosted.org/packages/4b/19/692387c1fb1b8714a9b2fab99a58850fd4136bed988814c8ff74d0c8de02/atomicwriter-0.2.5-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:47f974e986ff6514351c3ea75041009a514be0c34c225c062b0ad8a28ec9c0a3", size = 261768, upload-time = "2025-05-24T20:34:57.795Z" },
29 { url = "https://files.pythonhosted.org/packages/3e/f2/4d466f52ee635cc54011713272f302584c6d1ce612c331d9989fa6fa672f/atomicwriter-0.2.5-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:e1db8b9004cd3f628166e83b25eb814b82345f9d6bc15e99b6d201c355455b45", size = 321975, upload-time = "2025-05-24T20:34:59.45Z" },
30 { url = "https://files.pythonhosted.org/packages/84/ad/0189ad9783ca6609df47e06cc0cd22866a8073d46478f59c6ab3ec13e0fb/atomicwriter-0.2.5-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:a7da4a114121ab865663578b801a0520b2b518d4591af0bd294f6aac0dad243b", size = 338946, upload-time = "2025-05-24T20:35:01.501Z" },
31 { url = "https://files.pythonhosted.org/packages/94/79/2c4d8f75eeb09192cf572957f031271998f3c985fabd79d513fff66ac715/atomicwriter-0.2.5-cp313-cp313t-win_amd64.whl", hash = "sha256:7aab4b3956cc17219e7e4da76e8a1bceb3d3aeaf03234f89b90e234a2adcf27b", size = 151571, upload-time = "2025-05-24T20:35:02.747Z" },
32 { url = "https://files.pythonhosted.org/packages/32/19/d6a686d189c3577e7f08b33df398b959c24bf74b3cec34359104db1a24ff/atomicwriter-0.2.5-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:8d0fccac2dfe5d884d97edbda28be9c16d55faee9bdf66f53a99384ac387cc43", size = 239320, upload-time = "2025-05-24T20:35:04.028Z" },
33 { url = "https://files.pythonhosted.org/packages/8e/35/35571a4eed57816c3b5fdbefcb15f38563fbe4f3a4a7d1588c8ef899afaf/atomicwriter-0.2.5-cp39-abi3-macosx_11_0_x86_64.whl", hash = "sha256:6583c24333508839db2156d895cbbb5cd3ff20d4f9c698e341435e5b35990eaa", size = 250818, upload-time = "2025-05-24T20:35:05.21Z" },
34 { url = "https://files.pythonhosted.org/packages/81/d9/145093630bc25f115a49d32d9ef66745f5cdef787492d77fd27e74d20389/atomicwriter-0.2.5-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:136a9902ae3f1c0cb262a07dd3ac85069d71f8b11347cd740030567e67d611aa", size = 265796, upload-time = "2025-05-24T20:35:06.388Z" },
35 { url = "https://files.pythonhosted.org/packages/58/32/d1881adade2ebc70aa9dbb61cadabc2c00cfa99a7a5d6ba48f44e279056f/atomicwriter-0.2.5-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:0b6830434b6a49c19473c3f3975dfa0a87dec95bee81297f7393e378f9a0b82f", size = 269378, upload-time = "2025-05-24T20:35:07.578Z" },
36 { url = "https://files.pythonhosted.org/packages/93/f5/2661ea763784a4991c4c7be5c932a468937bd1d4618b833a63ec638a3b76/atomicwriter-0.2.5-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:53095a01891a2901aa04c10c8de52c0ba41e0d8a4a1893318cf34ccbdbde00b7", size = 328167, upload-time = "2025-05-24T20:35:08.764Z" },
37 { url = "https://files.pythonhosted.org/packages/ec/bc/e3aa521671a589bee9662d3e2108e4835a5d80e6da76e4d05d98d1c78005/atomicwriter-0.2.5-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:ecf4dc3983bb1f28b21cb09c2d96b6936d8864c559dcf151b57813cb1eae998b", size = 347153, upload-time = "2025-05-24T20:35:10.507Z" },
38 { url = "https://files.pythonhosted.org/packages/59/b7/e190383e7240b1f247c6df9bc6667db8df10190cd0bb2dba8ea6bd704ea4/atomicwriter-0.2.5-cp39-abi3-win_amd64.whl", hash = "sha256:92cff264a20364301ab341b332fd0112866870b8cb35caf99a3f3fee0e6c19e8", size = 156374, upload-time = "2025-05-24T20:35:11.716Z" },
39]
40
41[[package]]
42name = "certifi"
43version = "2025.1.31"
44source = { registry = "https://pypi.org/simple" }
45sdist = { url = "https://files.pythonhosted.org/packages/1c/ab/c9f1e32b7b1bf505bf26f0ef697775960db7932abeb7b516de930ba2705f/certifi-2025.1.31.tar.gz", hash = "sha256:3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651", size = 167577, upload-time = "2025-01-31T02:16:47.166Z" }
46wheels = [
47 { url = "https://files.pythonhosted.org/packages/38/fc/bce832fd4fd99766c04d1ee0eead6b0ec6486fb100ae5e74c1d91292b982/certifi-2025.1.31-py3-none-any.whl", hash = "sha256:ca78db4565a652026a4db2bcdf68f2fb589ea80d0be70e03929ed730746b84fe", size = 166393, upload-time = "2025-01-31T02:16:45.015Z" },
48]
49
50[[package]]
51name = "charset-normalizer"
52version = "3.4.1"
53source = { registry = "https://pypi.org/simple" }
54sdist = { url = "https://files.pythonhosted.org/packages/16/b0/572805e227f01586461c80e0fd25d65a2115599cc9dad142fee4b747c357/charset_normalizer-3.4.1.tar.gz", hash = "sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3", size = 123188, upload-time = "2024-12-24T18:12:35.43Z" }
55wheels = [
56 { url = "https://files.pythonhosted.org/packages/0a/9a/dd1e1cdceb841925b7798369a09279bd1cf183cef0f9ddf15a3a6502ee45/charset_normalizer-3.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545", size = 196105, upload-time = "2024-12-24T18:10:38.83Z" },
57 { url = "https://files.pythonhosted.org/packages/d3/8c/90bfabf8c4809ecb648f39794cf2a84ff2e7d2a6cf159fe68d9a26160467/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7", size = 140404, upload-time = "2024-12-24T18:10:44.272Z" },
58 { url = "https://files.pythonhosted.org/packages/ad/8f/e410d57c721945ea3b4f1a04b74f70ce8fa800d393d72899f0a40526401f/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757", size = 150423, upload-time = "2024-12-24T18:10:45.492Z" },
59 { url = "https://files.pythonhosted.org/packages/f0/b8/e6825e25deb691ff98cf5c9072ee0605dc2acfca98af70c2d1b1bc75190d/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa", size = 143184, upload-time = "2024-12-24T18:10:47.898Z" },
60 { url = "https://files.pythonhosted.org/packages/3e/a2/513f6cbe752421f16d969e32f3583762bfd583848b763913ddab8d9bfd4f/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d", size = 145268, upload-time = "2024-12-24T18:10:50.589Z" },
61 { url = "https://files.pythonhosted.org/packages/74/94/8a5277664f27c3c438546f3eb53b33f5b19568eb7424736bdc440a88a31f/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616", size = 147601, upload-time = "2024-12-24T18:10:52.541Z" },
62 { url = "https://files.pythonhosted.org/packages/7c/5f/6d352c51ee763623a98e31194823518e09bfa48be2a7e8383cf691bbb3d0/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b", size = 141098, upload-time = "2024-12-24T18:10:53.789Z" },
63 { url = "https://files.pythonhosted.org/packages/78/d4/f5704cb629ba5ab16d1d3d741396aec6dc3ca2b67757c45b0599bb010478/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d", size = 149520, upload-time = "2024-12-24T18:10:55.048Z" },
64 { url = "https://files.pythonhosted.org/packages/c5/96/64120b1d02b81785f222b976c0fb79a35875457fa9bb40827678e54d1bc8/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a", size = 152852, upload-time = "2024-12-24T18:10:57.647Z" },
65 { url = "https://files.pythonhosted.org/packages/84/c9/98e3732278a99f47d487fd3468bc60b882920cef29d1fa6ca460a1fdf4e6/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9", size = 150488, upload-time = "2024-12-24T18:10:59.43Z" },
66 { url = "https://files.pythonhosted.org/packages/13/0e/9c8d4cb99c98c1007cc11eda969ebfe837bbbd0acdb4736d228ccaabcd22/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1", size = 146192, upload-time = "2024-12-24T18:11:00.676Z" },
67 { url = "https://files.pythonhosted.org/packages/b2/21/2b6b5b860781a0b49427309cb8670785aa543fb2178de875b87b9cc97746/charset_normalizer-3.4.1-cp312-cp312-win32.whl", hash = "sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35", size = 95550, upload-time = "2024-12-24T18:11:01.952Z" },
68 { url = "https://files.pythonhosted.org/packages/21/5b/1b390b03b1d16c7e382b561c5329f83cc06623916aab983e8ab9239c7d5c/charset_normalizer-3.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f", size = 102785, upload-time = "2024-12-24T18:11:03.142Z" },
69 { url = "https://files.pythonhosted.org/packages/38/94/ce8e6f63d18049672c76d07d119304e1e2d7c6098f0841b51c666e9f44a0/charset_normalizer-3.4.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:aabfa34badd18f1da5ec1bc2715cadc8dca465868a4e73a0173466b688f29dda", size = 195698, upload-time = "2024-12-24T18:11:05.834Z" },
70 { url = "https://files.pythonhosted.org/packages/24/2e/dfdd9770664aae179a96561cc6952ff08f9a8cd09a908f259a9dfa063568/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22e14b5d70560b8dd51ec22863f370d1e595ac3d024cb8ad7d308b4cd95f8313", size = 140162, upload-time = "2024-12-24T18:11:07.064Z" },
71 { url = "https://files.pythonhosted.org/packages/24/4e/f646b9093cff8fc86f2d60af2de4dc17c759de9d554f130b140ea4738ca6/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8436c508b408b82d87dc5f62496973a1805cd46727c34440b0d29d8a2f50a6c9", size = 150263, upload-time = "2024-12-24T18:11:08.374Z" },
72 { url = "https://files.pythonhosted.org/packages/5e/67/2937f8d548c3ef6e2f9aab0f6e21001056f692d43282b165e7c56023e6dd/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2d074908e1aecee37a7635990b2c6d504cd4766c7bc9fc86d63f9c09af3fa11b", size = 142966, upload-time = "2024-12-24T18:11:09.831Z" },
73 { url = "https://files.pythonhosted.org/packages/52/ed/b7f4f07de100bdb95c1756d3a4d17b90c1a3c53715c1a476f8738058e0fa/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:955f8851919303c92343d2f66165294848d57e9bba6cf6e3625485a70a038d11", size = 144992, upload-time = "2024-12-24T18:11:12.03Z" },
74 { url = "https://files.pythonhosted.org/packages/96/2c/d49710a6dbcd3776265f4c923bb73ebe83933dfbaa841c5da850fe0fd20b/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:44ecbf16649486d4aebafeaa7ec4c9fed8b88101f4dd612dcaf65d5e815f837f", size = 147162, upload-time = "2024-12-24T18:11:13.372Z" },
75 { url = "https://files.pythonhosted.org/packages/b4/41/35ff1f9a6bd380303dea55e44c4933b4cc3c4850988927d4082ada230273/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0924e81d3d5e70f8126529951dac65c1010cdf117bb75eb02dd12339b57749dd", size = 140972, upload-time = "2024-12-24T18:11:14.628Z" },
76 { url = "https://files.pythonhosted.org/packages/fb/43/c6a0b685fe6910d08ba971f62cd9c3e862a85770395ba5d9cad4fede33ab/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2967f74ad52c3b98de4c3b32e1a44e32975e008a9cd2a8cc8966d6a5218c5cb2", size = 149095, upload-time = "2024-12-24T18:11:17.672Z" },
77 { url = "https://files.pythonhosted.org/packages/4c/ff/a9a504662452e2d2878512115638966e75633519ec11f25fca3d2049a94a/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c75cb2a3e389853835e84a2d8fb2b81a10645b503eca9bcb98df6b5a43eb8886", size = 152668, upload-time = "2024-12-24T18:11:18.989Z" },
78 { url = "https://files.pythonhosted.org/packages/6c/71/189996b6d9a4b932564701628af5cee6716733e9165af1d5e1b285c530ed/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:09b26ae6b1abf0d27570633b2b078a2a20419c99d66fb2823173d73f188ce601", size = 150073, upload-time = "2024-12-24T18:11:21.507Z" },
79 { url = "https://files.pythonhosted.org/packages/e4/93/946a86ce20790e11312c87c75ba68d5f6ad2208cfb52b2d6a2c32840d922/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fa88b843d6e211393a37219e6a1c1df99d35e8fd90446f1118f4216e307e48cd", size = 145732, upload-time = "2024-12-24T18:11:22.774Z" },
80 { url = "https://files.pythonhosted.org/packages/cd/e5/131d2fb1b0dddafc37be4f3a2fa79aa4c037368be9423061dccadfd90091/charset_normalizer-3.4.1-cp313-cp313-win32.whl", hash = "sha256:eb8178fe3dba6450a3e024e95ac49ed3400e506fd4e9e5c32d30adda88cbd407", size = 95391, upload-time = "2024-12-24T18:11:24.139Z" },
81 { url = "https://files.pythonhosted.org/packages/27/f2/4f9a69cc7712b9b5ad8fdb87039fd89abba997ad5cbe690d1835d40405b0/charset_normalizer-3.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:b1ac5992a838106edb89654e0aebfc24f5848ae2547d22c2c3f66454daa11971", size = 102702, upload-time = "2024-12-24T18:11:26.535Z" },
82 { url = "https://files.pythonhosted.org/packages/0e/f6/65ecc6878a89bb1c23a086ea335ad4bf21a588990c3f535a227b9eea9108/charset_normalizer-3.4.1-py3-none-any.whl", hash = "sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85", size = 49767, upload-time = "2024-12-24T18:12:32.852Z" },
83]
84
85[[package]]
86name = "dbus-next"
87version = "0.2.3"
88source = { registry = "https://pypi.org/simple" }
89sdist = { url = "https://files.pythonhosted.org/packages/ce/45/6a40fbe886d60a8c26f480e7d12535502b5ba123814b3b9a0b002ebca198/dbus_next-0.2.3.tar.gz", hash = "sha256:f4eae26909332ada528c0a3549dda8d4f088f9b365153952a408e28023a626a5", size = 71112, upload-time = "2021-07-25T22:11:28.398Z" }
90wheels = [
91 { url = "https://files.pythonhosted.org/packages/d2/fc/c0a3f4c4eaa5a22fbef91713474666e13d0ea2a69c84532579490a9f2cc8/dbus_next-0.2.3-py3-none-any.whl", hash = "sha256:58948f9aff9db08316734c0be2a120f6dc502124d9642f55e90ac82ffb16a18b", size = 57885, upload-time = "2021-07-25T22:11:25.466Z" },
92]
93
94[[package]]
95name = "desktop-notify"
96version = "1.3.3"
97source = { registry = "https://pypi.org/simple" }
98dependencies = [
99 { name = "dbus-next" },
100]
101sdist = { url = "https://files.pythonhosted.org/packages/7a/d8/7ae5779257f5f1aa0a2d50c02d70b29522bd414692f3d3bd18ef119fe82d/desktop-notify-1.3.3.tar.gz", hash = "sha256:62934ad1f72f292f9a3af5ffe45af32814af18c396c00369385540c72bf08077", size = 7828, upload-time = "2021-01-03T16:46:36.483Z" }
102wheels = [
103 { url = "https://files.pythonhosted.org/packages/0a/cd/a7e3bd0262f3e8a9272fd24d0193e24dad7cb4e4edd27da48e74b5523e59/desktop_notify-1.3.3-py3-none-any.whl", hash = "sha256:8ad7ecc3a9a603dd5fa3cdc11cc6265cfbc7f6df9d8ed240f4663f43ef0de37a", size = 9937, upload-time = "2021-01-03T16:46:35.157Z" },
104]
105
106[[package]]
107name = "frozendict"
108version = "2.4.6"
109source = { registry = "https://pypi.org/simple" }
110sdist = { url = "https://files.pythonhosted.org/packages/bb/59/19eb300ba28e7547538bdf603f1c6c34793240a90e1a7b61b65d8517e35e/frozendict-2.4.6.tar.gz", hash = "sha256:df7cd16470fbd26fc4969a208efadc46319334eb97def1ddf48919b351192b8e", size = 316416, upload-time = "2024-10-13T12:15:32.449Z" }
111wheels = [
112 { url = "https://files.pythonhosted.org/packages/04/13/d9839089b900fa7b479cce495d62110cddc4bd5630a04d8469916c0e79c5/frozendict-2.4.6-py311-none-any.whl", hash = "sha256:d065db6a44db2e2375c23eac816f1a022feb2fa98cbb50df44a9e83700accbea", size = 16148, upload-time = "2024-10-13T12:15:26.839Z" },
113 { url = "https://files.pythonhosted.org/packages/ba/d0/d482c39cee2ab2978a892558cf130681d4574ea208e162da8958b31e9250/frozendict-2.4.6-py312-none-any.whl", hash = "sha256:49344abe90fb75f0f9fdefe6d4ef6d4894e640fadab71f11009d52ad97f370b9", size = 16146, upload-time = "2024-10-13T12:15:28.16Z" },
114 { url = "https://files.pythonhosted.org/packages/a5/8e/b6bf6a0de482d7d7d7a2aaac8fdc4a4d0bb24a809f5ddd422aa7060eb3d2/frozendict-2.4.6-py313-none-any.whl", hash = "sha256:7134a2bb95d4a16556bb5f2b9736dceb6ea848fa5b6f3f6c2d6dba93b44b4757", size = 16146, upload-time = "2024-10-13T12:15:29.495Z" },
115]
116
117[[package]]
118name = "idna"
119version = "3.10"
120source = { registry = "https://pypi.org/simple" }
121sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490, upload-time = "2024-09-15T18:07:39.745Z" }
122wheels = [
123 { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442, upload-time = "2024-09-15T18:07:37.964Z" },
124]
125
126[[package]]
127name = "jsonpickle"
128version = "4.0.5"
129source = { registry = "https://pypi.org/simple" }
130sdist = { url = "https://files.pythonhosted.org/packages/d6/33/4bda317ab294722fcdfff8f63aab74af9fda3675a4652d984a101aa7587e/jsonpickle-4.0.5.tar.gz", hash = "sha256:f299818b39367c361b3f26bdba827d4249ab5d383cd93144d0f94b5417aacb35", size = 315661, upload-time = "2025-03-29T19:22:56.92Z" }
131wheels = [
132 { url = "https://files.pythonhosted.org/packages/dc/1b/0e79cf115e0f54f1e8f56effb6ffd2ef8f92e9c324d692ede660067f1bfe/jsonpickle-4.0.5-py3-none-any.whl", hash = "sha256:b4ac7d0a75ddcdfd93445737f1d36ff28768690d43e54bf5d0ddb1d915e580df", size = 46382, upload-time = "2025-03-29T19:22:54.252Z" },
133]
134
135[[package]]
136name = "python-dateutil"
137version = "2.9.0.post0"
138source = { registry = "https://pypi.org/simple" }
139dependencies = [
140 { name = "six" },
141]
142sdist = { url = "https://files.pythonhosted.org/packages/66/c0/0c8b6ad9f17a802ee498c46e004a0eb49bc148f2fd230864601a86dcf6db/python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 342432, upload-time = "2024-03-01T18:36:20.211Z" }
143wheels = [
144 { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892, upload-time = "2024-03-01T18:36:18.57Z" },
145]
146
147[[package]]
148name = "pyxdg"
149version = "0.28"
150source = { registry = "https://pypi.org/simple" }
151sdist = { url = "https://files.pythonhosted.org/packages/b0/25/7998cd2dec731acbd438fbf91bc619603fc5188de0a9a17699a781840452/pyxdg-0.28.tar.gz", hash = "sha256:3267bb3074e934df202af2ee0868575484108581e6f3cb006af1da35395e88b4", size = 77776, upload-time = "2022-06-05T11:35:01Z" }
152wheels = [
153 { url = "https://files.pythonhosted.org/packages/e5/8d/cf41b66a8110670e3ad03dab9b759704eeed07fa96e90fdc0357b2ba70e2/pyxdg-0.28-py2.py3-none-any.whl", hash = "sha256:bdaf595999a0178ecea4052b7f4195569c1ff4d344567bccdc12dfdf02d545ab", size = 49520, upload-time = "2022-06-05T11:34:58.832Z" },
154]
155
156[[package]]
157name = "requests"
158version = "2.32.3"
159source = { registry = "https://pypi.org/simple" }
160dependencies = [
161 { name = "certifi" },
162 { name = "charset-normalizer" },
163 { name = "idna" },
164 { name = "urllib3" },
165]
166sdist = { url = "https://files.pythonhosted.org/packages/63/70/2bf7780ad2d390a8d301ad0b550f1581eadbd9a20f896afe06353c2a2913/requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760", size = 131218, upload-time = "2024-05-29T15:37:49.536Z" }
167wheels = [
168 { url = "https://files.pythonhosted.org/packages/f9/9b/335f9764261e915ed497fcdeb11df5dfd6f7bf257d4a6a2a686d80da4d54/requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6", size = 64928, upload-time = "2024-05-29T15:37:47.027Z" },
169]
170
171[[package]]
172name = "six"
173version = "1.17.0"
174source = { registry = "https://pypi.org/simple" }
175sdist = { url = "https://files.pythonhosted.org/packages/94/e7/b2c673351809dca68a0e064b6af791aa332cf192da575fd474ed7d6f16a2/six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81", size = 34031, upload-time = "2024-12-04T17:35:28.174Z" }
176wheels = [
177 { url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050, upload-time = "2024-12-04T17:35:26.475Z" },
178]
179
180[[package]]
181name = "tabulate"
182version = "0.9.0"
183source = { registry = "https://pypi.org/simple" }
184sdist = { url = "https://files.pythonhosted.org/packages/ec/fe/802052aecb21e3797b8f7902564ab6ea0d60ff8ca23952079064155d1ae1/tabulate-0.9.0.tar.gz", hash = "sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c", size = 81090, upload-time = "2022-10-06T17:21:48.54Z" }
185wheels = [
186 { url = "https://files.pythonhosted.org/packages/40/44/4a5f08c96eb108af5cb50b41f76142f0afa346dfa99d5296fe7202a11854/tabulate-0.9.0-py3-none-any.whl", hash = "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f", size = 35252, upload-time = "2022-10-06T17:21:44.262Z" },
187]
188
189[[package]]
190name = "toml"
191version = "0.10.2"
192source = { registry = "https://pypi.org/simple" }
193sdist = { url = "https://files.pythonhosted.org/packages/be/ba/1f744cdc819428fc6b5084ec34d9b30660f6f9daaf70eead706e3203ec3c/toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f", size = 22253, upload-time = "2020-11-01T01:40:22.204Z" }
194wheels = [
195 { url = "https://files.pythonhosted.org/packages/44/6f/7120676b6d73228c96e17f1f794d8ab046fc910d781c8d151120c3f1569e/toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b", size = 16588, upload-time = "2020-11-01T01:40:20.672Z" },
196]
197
198[[package]]
199name = "uritools"
200version = "4.0.3"
201source = { registry = "https://pypi.org/simple" }
202sdist = { url = "https://files.pythonhosted.org/packages/d3/43/4182fb2a03145e6d38698e38b49114ce59bc8c79063452eb585a58f8ce78/uritools-4.0.3.tar.gz", hash = "sha256:ee06a182a9c849464ce9d5fa917539aacc8edd2a4924d1b7aabeeecabcae3bc2", size = 24184, upload-time = "2024-05-28T18:07:45.194Z" }
203wheels = [
204 { url = "https://files.pythonhosted.org/packages/e6/17/5a4510d9ca9cc8be217ce359eb54e693dca81cf4d442308b282d5131b17d/uritools-4.0.3-py3-none-any.whl", hash = "sha256:bae297d090e69a0451130ffba6f2f1c9477244aa0a5543d66aed2d9f77d0dd9c", size = 10304, upload-time = "2024-05-28T18:07:42.731Z" },
205]
206
207[[package]]
208name = "urllib3"
209version = "2.3.0"
210source = { registry = "https://pypi.org/simple" }
211sdist = { url = "https://files.pythonhosted.org/packages/aa/63/e53da845320b757bf29ef6a9062f5c669fe997973f966045cb019c3f4b66/urllib3-2.3.0.tar.gz", hash = "sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d", size = 307268, upload-time = "2024-12-22T07:47:30.032Z" }
212wheels = [
213 { url = "https://files.pythonhosted.org/packages/c8/19/4ec628951a74043532ca2cf5d97b7b14863931476d117c471e8e2b1eb39f/urllib3-2.3.0-py3-none-any.whl", hash = "sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df", size = 128369, upload-time = "2024-12-22T07:47:28.074Z" },
214]
215
216[[package]]
217name = "worktime"
218version = "1.0.0"
219source = { editable = "." }
220dependencies = [
221 { name = "atomicwriter" },
222 { name = "desktop-notify" },
223 { name = "frozendict" },
224 { name = "jsonpickle" },
225 { name = "python-dateutil" },
226 { name = "pyxdg" },
227 { name = "requests" },
228 { name = "tabulate" },
229 { name = "toml" },
230 { name = "uritools" },
231]
232
233[package.metadata]
234requires-dist = [
235 { name = "atomicwriter", specifier = ">=0.2.5" },
236 { name = "desktop-notify", specifier = ">=1.3.3" },
237 { name = "frozendict", specifier = ">=2.4.6" },
238 { name = "jsonpickle", specifier = ">=4.0.5,<5" },
239 { name = "python-dateutil", specifier = ">=2.9.0.post0,<3" },
240 { name = "pyxdg", specifier = ">=0.28,<0.29" },
241 { name = "requests", specifier = ">=2.32.3,<3" },
242 { name = "tabulate", specifier = ">=0.9.0,<0.10" },
243 { name = "toml", specifier = ">=0.10.2,<0.11" },
244 { name = "uritools", specifier = ">=4.0.3,<5" },
245]
246
247[package.metadata.requires-dev]
248dev = []
diff --git a/overlays/worktime/worktime/__main__.py b/overlays/worktime/worktime/__main__.py
index 362c8da4..016690f0 100755
--- a/overlays/worktime/worktime/__main__.py
+++ b/overlays/worktime/worktime/__main__.py
@@ -1,10 +1,12 @@
1import requests 1import requests
2from requests.exceptions import HTTPError 2from requests.exceptions import HTTPError
3from requests.auth import HTTPBasicAuth 3from requests.auth import HTTPBasicAuth
4from requests.adapters import HTTPAdapter, Retry
4from datetime import * 5from datetime import *
5from xdg import BaseDirectory 6from xdg import BaseDirectory
6import toml 7import toml
7from uritools import (uricompose) 8from uritools import uricompose
9from urllib.parse import urljoin
8 10
9from inspect import signature 11from inspect import signature
10 12
@@ -23,80 +25,80 @@ import argparse
23from copy import deepcopy 25from copy import deepcopy
24 26
25import sys 27import sys
26from sys import stderr 28from sys import stderr, stdout
27 29
28from tabulate import tabulate 30from tabulate import tabulate
29 31
30from itertools import groupby, count 32from itertools import groupby, count, islice
31from functools import cache, partial 33from functools import cache, partial
32 34
33import backoff
34
35from pathlib import Path 35from pathlib import Path
36 36
37from collections import defaultdict 37from collections import defaultdict
38from collections.abc import Iterable, Generator
39from typing import Any
38 40
39import jsonpickle 41import jsonpickle
40from hashlib import blake2s 42from hashlib import blake2s
43import json
44
45import asyncio
46
47from frozendict import frozendict
48from contextlib import closing
49import os
50from time import clock_gettime_ns, CLOCK_MONOTONIC
51from atomicwriter import AtomicWriter
52import desktop_notify.aio as notify
53
54class BearerAuth(requests.auth.AuthBase):
55 def __init__(self, token):
56 self.token = token
57 def __call__(self, r):
58 r.headers["authorization"] = "Bearer " + self.token
59 return r
60
61class KimaiSession(requests.Session):
62 def __init__(self, base_url: str, api_token: str):
63 super().__init__()
64 self.base_url = base_url
65 self.auth = BearerAuth(api_token)
66 retries = Retry(total=5, backoff_factor=0.1, status_forcelist=[500, 502, 503, 504])
67 super().mount(base_url, HTTPAdapter(max_retries=retries))
68
69 def request(self, method, url, *args, **kwargs):
70 joined_url = urljoin(self.base_url, url)
71 return super().request(method, joined_url, *args, headers = {'Accept': 'application/json'} | (kwargs['headers'] if 'headers' in kwargs else {}), **{k: v for k, v in kwargs.items() if k not in ['headers']})
72
73class KimaiAPI(object):
74 def __init__(self, base_url: str, api_token: str, clients: Iterable[str]):
75 self._session = KimaiSession(base_url, api_token)
76 self._kimai_clients = self._session.get('/api/customers').json()
77 self._client_ids = self.resolve_clients(clients)
78 kimai_user = self._session.get('/api/users/me').json()
79 self._tz = gettz(kimai_user['timezone'])
80
81 def resolve_clients(self, clients: Iterable[str]) -> frozenset[int]:
82 return frozenset({ client['id'] for client in self._kimai_clients if client['name'] in clients })
83
84 def render_datetime(self, datetime: datetime) -> str:
85 return datetime.astimezone(self._tz).strftime('%Y-%m-%dT%H:%M:%S')
86
87 def get_timesheets(self, params: dict[str, Any] = {}) -> Generator[Any]:
88 for page in count(start=1):
89 resp = self._session.get('/api/timesheets', params=params | {'size': 100, 'page': page})
90 if resp.status_code == 404:
91 break
92 yield from resp.json()
41 93
42class TogglAPISection(Enum): 94 def entry_durations(self, start_date: datetime, *, end_date: datetime, clients: Iterable[str] | None = None) -> Generator[timedelta]:
43 TOGGL = '/api/v9' 95 client_ids = None
44 REPORTS = '/reports/api/v2' 96 if clients is not None and not clients:
45
46class TogglAPIError(Exception):
47 def __init__(self, response, *, http_error=None):
48 self.http_error = http_error
49 self.response = response
50
51 def __str__(self):
52 if not self.http_error is None:
53 return str(self.http_error)
54 else:
55 return self.response.text
56
57class TogglAPI(object):
58 def __init__(self, api_token, workspace_id, client_ids):
59 self._api_token = api_token
60 self._workspace_id = workspace_id
61 self._client_ids = set(map(int, client_ids.split(','))) if client_ids else None
62
63 def _make_url(self, api=TogglAPISection.TOGGL, section=['me', 'time_entries', 'current'], params={}):
64 if api is TogglAPISection.REPORTS:
65 params.update({'user_agent': 'worktime', 'workspace_id': self._workspace_id})
66
67 api_path = api.value
68 section_path = '/'.join(section)
69 uri = uricompose(scheme='https', host='api.track.toggl.com', path=f"{api_path}/{section_path}", query=params)
70
71 return uri
72
73 def _query(self, url, method):
74 response = self._raw_query(url, method)
75 response.raise_for_status()
76 return response
77
78 @backoff.on_predicate(
79 backoff.expo,
80 factor=0.1, max_value=2,
81 predicate=lambda r: r.status_code == 429,
82 max_time=10,
83 )
84 def _raw_query(self, url, method):
85 headers = {'content-type': 'application/json', 'accept': 'application/json'}
86 response = None
87
88 if method == 'GET':
89 response = requests.get(url, headers=headers, auth=HTTPBasicAuth(self._api_token, 'api_token'))
90 elif method == 'POST':
91 response = requests.post(url, headers=headers, auth=HTTPBasicAuth(self._api_token, 'api_token'))
92 else:
93 raise ValueError(f"Undefined HTTP method “{method}”")
94
95 return response
96
97 def entry_durations(self, start_date, *, end_date, rounding=False, client_ids):
98 if client_ids is not None and not client_ids:
99 return 97 return
98 elif clients is None:
99 client_ids = self._client_ids
100 else:
101 client_ids = self.resolve_clients(clients)
100 102
101 cache_dir = Path(BaseDirectory.save_cache_path('worktime')) / 'entry_durations' 103 cache_dir = Path(BaseDirectory.save_cache_path('worktime')) / 'entry_durations'
102 step = timedelta(days = 120) 104 step = timedelta(days = 120)
@@ -115,11 +117,8 @@ class TogglAPI(object):
115 cache_key = blake2s(jsonpickle.encode({ 117 cache_key = blake2s(jsonpickle.encode({
116 'start': req_start, 118 'start': req_start,
117 'end': req_end, 119 'end': req_end,
118 'rounding': rounding, 120 'client_ids': client_ids,
119 'clients': client_ids, 121 }).encode('utf-8'), key = self._session.auth.token.encode('utf-8')).hexdigest()
120 'workspace': self._workspace_id,
121 'workspace_clients': self._client_ids
122 }).encode('utf-8'), key = self._api_token.encode('utf-8')).hexdigest()
123 cache_path = cache_dir / cache_key[:2] / cache_key[2:4] / f'{cache_key[4:]}.json' 122 cache_path = cache_dir / cache_key[:2] / cache_key[2:4] / f'{cache_key[4:]}.json'
124 try: 123 try:
125 with cache_path.open('r', encoding='utf-8') as ch: 124 with cache_path.open('r', encoding='utf-8') as ch:
@@ -129,85 +128,83 @@ class TogglAPI(object):
129 pass 128 pass
130 129
131 entries = list() 130 entries = list()
132 params = { 'since': (req_start - timedelta(days=1)).date().isoformat(), 131 params = {
133 'until': (req_end + timedelta(days=1)).date().isoformat(), 132 'begin': self.render_datetime(req_start),
134 'rounding': 'yes' if rounding else 'no', 133 'end': self.render_datetime(req_end),
135 'billable': 'yes' 134 'customers[]': list(client_ids),
136 } 135 'billable': 1,
137 if client_ids is not None: 136 }
138 params |= { 'client_ids': ','.join(map(str, client_ids)) } 137
139 for page in count(start = 1): 138 for entry in self.get_timesheets(params):
140 url = self._make_url(api = TogglAPISection.REPORTS, section = ['details'], params = params | { 'page': page }) 139 if entry['end'] is None:
141 r = self._query(url = url, method='GET') 140 continue
142 if not r or not r.json(): 141
143 raise TogglAPIError(r) 142 start = isoparse(entry['begin'])
144 report = r.json() 143 end = isoparse(entry['end'])
145 for entry in report['data']:
146 start = isoparse(entry['start'])
147 end = isoparse(entry['end'])
148
149 if start > req_end or end < req_start:
150 continue
151 144
152 x = min(end, req_end) - max(start, req_start) 145 if start > req_end or end < req_start:
153 if cache_key: 146 continue
154 entries.append(x) 147
155 yield x 148 x = min(end, req_end) - max(start, req_start)
156 if not report['data']: 149 if cache_key:
157 break 150 entries.append(x)
151 yield x
158 152
159 if cache_path: 153 if cache_path:
160 cache_path.parent.mkdir(parents=True, exist_ok=True) 154 cache_path.parent.mkdir(parents=True, exist_ok=True)
161 with cache_path.open('w', encoding='utf-8') as ch: 155 with cache_path.open('w', encoding='utf-8') as ch:
162 ch.write(jsonpickle.encode(entries)) 156 ch.write(jsonpickle.encode(entries))
163 # res = timedelta(milliseconds=report['total_billable']) if report['total_billable'] else timedelta(milliseconds=0)
164 # return res
165
166 def get_billable_hours(self, start_date, end_date=datetime.now(timezone.utc), rounding=False):
167 billable_acc = timedelta(milliseconds = 0)
168 if 0 in self._client_ids:
169 url = self._make_url(api = TogglAPISection.TOGGL, section = ['workspaces', self._workspace_id, 'clients'])
170 r = self._query(url = url, method = 'GET')
171 if not r or not r.json():
172 raise TogglAPIError(r)
173
174 billable_acc += sum(self.entry_durations(start_date, end_date=end_date, rounding=rounding, client_ids=None), start=timedelta(milliseconds=0)) - sum(self.entry_durations(start_date, end_date=end_date, rounding=rounding, client_ids=frozenset(map(lambda c: c['id'], r.json()))), start=timedelta(milliseconds=0))
175 157
176 billable_acc += sum(self.entry_durations(start_date, end_date=end_date, rounding=rounding, client_ids=frozenset(*(self._client_ids - {0}))), start=timedelta(milliseconds=0)) 158 def get_billable_hours(self, start_date: datetime, end_date: datetime = datetime.now(timezone.utc)) -> timedelta:
159 return sum(self.entry_durations(start_date, end_date=end_date), start=timedelta(milliseconds=0))
177 160
178 return billable_acc 161 def get_running_entry(self) -> Any | None:
162 kimai_entries = self._session.get('/api/timesheets/active').json()
163 if not kimai_entries:
164 return None
165 entry = kimai_entries[0]
179 166
180 def get_running_clock(self, now=datetime.now(timezone.utc)): 167 if entry['project']['customer']['id'] not in self._client_ids:
181 url = self._make_url(api = TogglAPISection.TOGGL, section = ['me', 'time_entries', 'current']) 168 return None
182 r = self._query(url = url, method='GET')
183 169
184 if not r or (not r.json() and r.json() is not None): 170 return entry
185 raise TogglAPIError(r)
186 171
187 if not r.json() or not r.json()['billable']: 172 def get_running_clock(self, now: datetime = datetime.now(timezone.utc)) -> timedelta | None:
173 entry = self.get_running_entry()
174 if not entry:
188 return None 175 return None
176 start = isoparse(entry['begin'])
177 return now - start if start <= now else None
189 178
190 if self._client_ids is not None: 179 def get_recent_entries(self) -> Generator[Any]:
191 if 'pid' in r.json() and r.json()['pid']: 180 step = timedelta(days = 7)
192 url = self._make_url(api = TogglAPISection.TOGGL, section = ['projects', str(r.json()['pid'])]) 181 now = datetime.now().astimezone(timezone.utc)
193 pr = self._query(url = url, method = 'GET') 182 ids = set()
194 if not pr or not pr.json(): 183 for req_end in (now - step * i for i in count()):
195 raise TogglAPIError(pr) 184 params = {
196 185 'begin': self.render_datetime(req_end - step),
197 if not pr.json(): 186 'end': self.render_datetime(req_end),
198 return None 187 'full': 'true',
188 }
189 for entry in self.get_timesheets(params):
190 if entry['id'] in ids:
191 continue
192 ids.add(entry['id'])
193 yield entry
199 194
200 if 'cid' in pr.json() and pr.json()['cid']: 195 def start_clock(self, project_id: int, activity_id: int, description: str | None = None, tags: Iterable[str] | None = None, billable: bool = True):
201 if pr.json()['cid'] not in self._client_ids: 196 self._session.post('/api/timesheets', json={
202 return None 197 'begin': self.render_datetime(datetime.now()),
203 elif 0 not in self._client_ids: 198 'project': project_id,
204 return None 199 'activity': activity_id,
205 elif 0 not in self._client_ids: 200 'description': description if description else '',
206 return None 201 'tags': (','.join(tags)) if tags else '',
202 'billable': billable,
203 }).raise_for_status()
207 204
208 start = isoparse(r.json()['start']) 205 def stop_clock(self, running_id: int):
206 self._session.patch(f'/api/timesheets/{running_id}/stop').raise_for_status()
209 207
210 return now - start if start <= now else None
211 208
212class Worktime(object): 209class Worktime(object):
213 time_worked = timedelta() 210 time_worked = timedelta()
@@ -223,6 +220,7 @@ class Worktime(object):
223 leave_budget = dict() 220 leave_budget = dict()
224 time_per_day = None 221 time_per_day = None
225 workdays = None 222 workdays = None
223 pull_forward = dict()
226 224
227 @staticmethod 225 @staticmethod
228 @cache 226 @cache
@@ -279,10 +277,10 @@ class Worktime(object):
279 277
280 config = Worktime.config() 278 config = Worktime.config()
281 config_dir = BaseDirectory.load_first_config('worktime') 279 config_dir = BaseDirectory.load_first_config('worktime')
282 api = TogglAPI( 280 api = KimaiAPI(
283 api_token=config.get("TOGGL", {}).get("ApiToken", None), 281 base_url=config.get("KIMAI", {}).get("BaseUrl", None),
284 workspace_id=config.get("TOGGL", {}).get("Workspace", None), 282 api_token=config.get("KIMAI", {}).get("ApiToken", None),
285 client_ids=config.get("TOGGL", {}).get("ClientIds", None) 283 clients=config.get("KIMAI", {}).get("Clients", None)
286 ) 284 )
287 date_format = config.get("WORKTIME", {}).get("DateFormat", '%Y-%m-%d') 285 date_format = config.get("WORKTIME", {}).get("DateFormat", '%Y-%m-%d')
288 286
@@ -377,10 +375,7 @@ class Worktime(object):
377 parse_datestr(stripped_line) 375 parse_datestr(stripped_line)
378 376
379 for day in [fromDay + timedelta(days = x) for x in range(0, (toDay - fromDay).days + 1)]: 377 for day in [fromDay + timedelta(days = x) for x in range(0, (toDay - fromDay).days + 1)]:
380 if self.end_date.date() < day or day < self.start_date.date(): 378 if self.would_be_workday(day) and self.start_date.date() <= day and day <= self.end_date.date():
381 continue
382
383 if self.would_be_workday(day):
384 if excused_kind == 'leave': 379 if excused_kind == 'leave':
385 self.leave_days.add(day) 380 self.leave_days.add(day)
386 elif time is not None and time >= self.time_per_day(day): 381 elif time is not None and time >= self.time_per_day(day):
@@ -390,14 +385,34 @@ class Worktime(object):
390 if e.errno != 2: 385 if e.errno != 2:
391 raise e 386 raise e
392 387
393 pull_forward = dict() 388 self.time_per_day = lambda day: timedelta(hours = hours_per_week(day)) / len(self.workdays) - (holidays[day] if day in holidays else timedelta())
394 389
395 start_day = self.start_date.date() 390 start_day = self.start_date.date()
396 end_day = self.end_date.date() 391 end_day = self.end_date.date()
397 392
393 self.extra_days_to_work = dict()
394
398 try: 395 try:
399 with open(Path(config_dir) / "pull-forward", 'r') as excused: 396 with open(Path(config_dir) / "days-to-work", 'r') as extra_days_to_work_file:
400 for line in excused: 397 for line in extra_days_to_work_file:
398 stripped_line = line.strip()
399 if stripped_line:
400 splitLine = stripped_line.split(' ')
401 if len(splitLine) == 2:
402 [hours, datestr] = splitLine
403 day = datetime.strptime(datestr, date_format).replace(tzinfo=tzlocal()).date()
404 self.extra_days_to_work[day] = timedelta(hours = float(hours))
405 else:
406 day = datetime.strptime(stripped_line, date_format).replace(tzinfo=tzlocal()).date()
407 self.extra_days_to_work[day] = self.time_per_day(day)
408 except IOError as e:
409 if e.errno != 2:
410 raise e
411
412
413 try:
414 with open(Path(config_dir) / "pull-forward", 'r') as pull_forward:
415 for line in pull_forward:
401 stripped_line = line.strip() 416 stripped_line = line.strip()
402 if stripped_line: 417 if stripped_line:
403 [hours, datestr] = stripped_line.split(' ') 418 [hours, datestr] = stripped_line.split(' ')
@@ -418,44 +433,29 @@ class Worktime(object):
418 if not d == datetime.strptime(c, date_format).replace(tzinfo=tzlocal()).date(): break 433 if not d == datetime.strptime(c, date_format).replace(tzinfo=tzlocal()).date(): break
419 else: 434 else:
420 if d >= self.end_date.date(): 435 if d >= self.end_date.date():
421 pull_forward[d] = min(timedelta(hours = float(hours)), self.time_per_day(d) - (holidays[d] if d in holidays else timedelta())) 436 time_for_day = self.time_per_day(d) if d.isoweekday() in self.workdays else timedelta()
437 if d in self.extra_days_to_work:
438 time_for_day += self.extra_days_to_work[d]
439 self.pull_forward[d] = min(timedelta(hours = float(hours)), time_for_day)
422 except IOError as e: 440 except IOError as e:
423 if e.errno != 2: 441 if e.errno != 2:
424 raise e 442 raise e
425 443
444 if self.pull_forward:
445 for year in range(self.end_date.year + 1, max(self.pull_forward.keys()).year + 1):
446 holidays |= {k: v * timedelta(hours = hours_per_week(k)) / len(self.workdays) for k, v in Worktime.holidays(year).items()}
447
426 self.days_to_work = dict() 448 self.days_to_work = dict()
427 449
428 if pull_forward: 450 # if self.pull_forward:
429 end_day = max(end_day, max(list(pull_forward))) 451 # end_day = max(end_day, max(self.pull_forward.keys()))
430 452
431 for day in [start_day + timedelta(days = x) for x in range(0, (end_day - start_day).days + 1)]: 453 for day in [start_day + timedelta(days = x) for x in range(0, (end_day - start_day).days + 1)]:
432 if day.isoweekday() in self.workdays: 454 if day.isoweekday() in self.workdays:
433 time_to_work = self.time_per_day(day) 455 time_to_work = self.time_per_day(day)
434 if day in holidays.keys():
435 time_to_work -= holidays[day]
436 if time_to_work > timedelta(): 456 if time_to_work > timedelta():
437 self.days_to_work[day] = time_to_work 457 self.days_to_work[day] = time_to_work
438 458
439 self.extra_days_to_work = dict()
440
441 try:
442 with open(Path(config_dir) / "days-to-work", 'r') as extra_days_to_work_file:
443 for line in extra_days_to_work_file:
444 stripped_line = line.strip()
445 if stripped_line:
446 splitLine = stripped_line.split(' ')
447 if len(splitLine) == 2:
448 [hours, datestr] = splitLine
449 day = datetime.strptime(datestr, date_format).replace(tzinfo=tzlocal()).date()
450 self.extra_days_to_work[day] = timedelta(hours = float(hours))
451 else:
452 day = datetime.strptime(stripped_line, date_format).replace(tzinfo=tzlocal()).date()
453 self.extra_days_to_work[day] = self.time_per_day(day)
454 except IOError as e:
455 if e.errno != 2:
456 raise e
457
458
459 self.now_is_workday = self.is_workday(self.now.date()) 459 self.now_is_workday = self.is_workday(self.now.date())
460 460
461 self.time_worked = timedelta() 461 self.time_worked = timedelta()
@@ -470,33 +470,46 @@ class Worktime(object):
470 self.extra_days_to_work[self.now.date()] = timedelta() 470 self.extra_days_to_work[self.now.date()] = timedelta()
471 471
472 self.time_to_work = sum([self.days_to_work[day] for day in self.days_to_work.keys() if day <= self.end_date.date()], timedelta()) 472 self.time_to_work = sum([self.days_to_work[day] for day in self.days_to_work.keys() if day <= self.end_date.date()], timedelta())
473 for day in [d for d in list(pull_forward) if d > self.end_date.date()]: 473 for day in [d for d in list(self.pull_forward) if d > self.end_date.date()]:
474 days_forward = set([d for d in self.days_to_work.keys() if d >= self.end_date.date() and d < day and (not d in pull_forward or d == self.end_date.date())]) 474 days_forward = set([d for d in [start_day + timedelta(days = x) for x in range(0, (day - start_day).days + 1)] if d >= self.end_date.date() and d < day and (d not in self.pull_forward or d == self.end_date.date())])
475 extra_days_forward = set([d for d in self.extra_days_to_work.keys() if d >= self.end_date.date() and d < day and (not d in pull_forward or d == self.end_date.date())]) 475 extra_days_forward = set([d for d in self.extra_days_to_work.keys() if d >= self.end_date.date() and d < day and (d not in self.pull_forward or d == self.end_date.date())])
476 days_forward = days_forward.union(extra_days_forward) 476 days_forward |= extra_days_forward
477 477
478 extra_day_time_left = timedelta() 478 extra_day_time_left = timedelta()
479 for extra_day in extra_days_forward: 479 for extra_day in extra_days_forward:
480 day_time = max(timedelta(), self.time_per_day(extra_day) - self.extra_days_to_work[extra_day]) 480 day_time = max(timedelta(), self.time_per_day(extra_day) - self.extra_days_to_work[extra_day])
481 extra_day_time_left += day_time 481 extra_day_time_left += day_time
482 extra_day_time = min(extra_day_time_left, pull_forward[day]) 482 extra_day_time = min(extra_day_time_left, self.pull_forward[day])
483 time_forward = pull_forward[day] - extra_day_time 483 time_forward = self.pull_forward[day] - extra_day_time
484 if extra_day_time_left > timedelta(): 484 if extra_day_time_left > timedelta():
485 for extra_day in extra_days_forward: 485 for extra_day in extra_days_forward:
486 day_time = max(timedelta(), self.time_per_day(extra_day) - self.extra_days_to_work[extra_day]) 486 day_time = max(timedelta(), self.time_per_day(extra_day) - self.extra_days_to_work[extra_day])
487 self.extra_days_to_work[extra_day] += extra_day_time * (day_time / extra_day_time_left) 487 self.extra_days_to_work[extra_day] += extra_day_time * (day_time / extra_day_time_left)
488 488
489 hours_per_day_forward = time_forward / len(days_forward) if len(days_forward) > 0 else timedelta() 489 def days_count(days_forward):
490 r = 0
491 for day in sorted(days_forward):
492 day_time = timedelta()
493 if day in self.extra_days_to_work:
494 day_time += self.extra_days_to_work
495 if day in holidays and not day in self.extra_days_to_work:
496 day_time -= holidays[day]
497 if day.isoweekday() in self.workdays:
498 day_time += timedelta(hours = hours_per_week(day)) / len(self.workdays)
499 r += max(timedelta(), day_time) / (timedelta(hours = hours_per_week(day)) / len(self.workdays))
500 return r
501
502 hours_per_day_forward = time_forward / days_count(days_forward) if days_count(days_forward) > 0 else timedelta()
490 days_forward.discard(self.end_date.date()) 503 days_forward.discard(self.end_date.date())
491 504
492 self.time_pulled_forward += time_forward - hours_per_day_forward * len(days_forward) 505 self.time_pulled_forward += time_forward - hours_per_day_forward * days_count(days_forward)
493 506
494 if self.end_date.date() in self.extra_days_to_work: 507 if self.end_date.date() in self.extra_days_to_work:
495 self.time_pulled_forward += self.extra_days_to_work[self.end_date.date()] 508 self.time_pulled_forward += self.extra_days_to_work[self.end_date.date()]
496 509
497 self.time_to_work += self.time_pulled_forward 510 # self.time_to_work += self.time_pulled_forward
498 511
499 self.time_worked += api.get_billable_hours(self.start_date, self.now, rounding = config.get("WORKTIME", {}).get("rounding", True)) 512 self.time_worked += api.get_billable_hours(self.start_date, self.now)
500 513
501def format_days(worktime, days, date_format=None): 514def format_days(worktime, days, date_format=None):
502 if not date_format: 515 if not date_format:
@@ -518,7 +531,14 @@ def format_days(worktime, days, date_format=None):
518 return ', '.join(map(lambda group: ','.join(map(format_group, group)), groups)) 531 return ', '.join(map(lambda group: ','.join(map(format_group, group)), groups))
519 532
520 533
521def worktime(**args): 534def tooltip_timedelta(td):
535 if td < timedelta(seconds = 0):
536 return "-" + tooltip_timedelta(-td)
537 mm, ss = divmod(td.total_seconds(), 60)
538 hh, mm = divmod(mm, 60)
539 return "%d:%02d:%02d" % (hh, mm, ss)
540
541def worktime(pull_forward_cutoff, waybar, **args):
522 worktime = Worktime(**args) 542 worktime = Worktime(**args)
523 543
524 def format_worktime(worktime): 544 def format_worktime(worktime):
@@ -557,24 +577,41 @@ def worktime(**args):
557 return f"{indicator}{difference_string}" 577 return f"{indicator}{difference_string}"
558 else: 578 else:
559 difference_string = difference_string(total_minutes_difference * timedelta(minutes = 1)) 579 difference_string = difference_string(total_minutes_difference * timedelta(minutes = 1))
560 if worktime.now_is_workday: 580 return difference_string
561 return difference_string 581
562 else: 582 out_class = "running" if worktime.running_entry else "stopped"
563 return f"({difference_string})" 583 difference = worktime.time_to_work - worktime.time_worked
564 584 if worktime.running_entry and -min(timedelta(milliseconds=0), difference) > sum(worktime.pull_forward.values(), start=timedelta(milliseconds=0)) or not worktime.running_entry and max(timedelta(milliseconds=0), difference) > worktime.time_per_day(worktime.now.date()) and worktime.now_is_workday:
565 if worktime.time_pulled_forward >= timedelta(minutes = 15): 585 out_class = "over"
586 pull_forward_sum = sum(worktime.pull_forward.values(), start=timedelta(milliseconds=0))
587 if pull_forward_sum >= min(pull_forward_cutoff, timedelta(seconds = 1)):
566 worktime_no_pulled_forward = deepcopy(worktime) 588 worktime_no_pulled_forward = deepcopy(worktime)
567 worktime_no_pulled_forward.time_to_work -= worktime_no_pulled_forward.time_pulled_forward 589 # worktime_no_pulled_forward.time_to_work -= worktime_no_pulled_forward.time_pulled_forward
568 worktime_no_pulled_forward.time_pulled_forward = timedelta() 590 worktime_no_pulled_forward.time_pulled_forward = timedelta()
591 worktime_no_pulled_forward.pull_forward = dict()
592 worktime.time_to_work += pull_forward_sum
569 593
570 difference_string = format_worktime(worktime)
571 difference_string_no_pulled_forward = format_worktime(worktime_no_pulled_forward) 594 difference_string_no_pulled_forward = format_worktime(worktime_no_pulled_forward)
572 595
573 print(f"{difference_string_no_pulled_forward}…{difference_string}") 596 tooltip = tooltip_timedelta(worktime_no_pulled_forward.time_to_work - worktime_no_pulled_forward.time_worked) + "…" + tooltip_timedelta(difference + pull_forward_sum)
597 if pull_forward_sum >= pull_forward_cutoff:
598 out_text = f"{difference_string_no_pulled_forward}…{format_worktime(worktime)}"
599 else:
600 out_text = format_worktime(worktime)
601 else:
602 tooltip = tooltip_timedelta(difference)
603 out_text = format_worktime(worktime)
604
605 if waybar:
606 json.dump({"text": out_text, "class": out_class, "tooltip": tooltip}, stdout)
574 else: 607 else:
575 print(format_worktime(worktime)) 608 print(out_text)
576 609
577def time_worked(now, **args): 610def pull_forward(**args):
611 worktime = Worktime(**args)
612 print(tooltip_timedelta(sum(worktime.pull_forward.values(), start=timedelta(milliseconds=0))))
613
614def time_worked(now, waybar, **args):
578 then = now.replace(hour = 0, minute = 0, second = 0, microsecond = 0) 615 then = now.replace(hour = 0, minute = 0, second = 0, microsecond = 0)
579 if now.time() == time(): 616 if now.time() == time():
580 now = now + timedelta(days = 1) 617 now = now + timedelta(days = 1)
@@ -584,33 +621,62 @@ def time_worked(now, **args):
584 621
585 worked = now.time_worked - then.time_worked 622 worked = now.time_worked - then.time_worked
586 623
624 out_text = None
625 out_class = "running" if now.running_entry else "stopped"
626 tooltip = tooltip_timedelta(worked)
627 target_time = max(then.time_per_day(then.now.date()), now.time_per_day(now.now.date())) if then.time_per_day(then.now.date()) and now.time_per_day(now.now.date()) else (then.time_per_day(then.now.date()) if then.time_per_day(then.now.date()) else now.time_per_day(now.now.date()))
628 difference = target_time - worked
629 difference_pull_forward = difference + now.time_pulled_forward
630 if now.running_entry and difference_pull_forward < timedelta(seconds=0):
631 out_class = "over"
587 if args['do_round']: 632 if args['do_round']:
588 total_minutes_difference = 5 * ceil(worked / timedelta(minutes = 5)) 633 total_minutes_difference = 5 * ceil(worked / timedelta(minutes = 5))
589 (hours_difference, minutes_difference) = divmod(abs(total_minutes_difference), 60) 634 (hours_difference, minutes_difference) = divmod(abs(total_minutes_difference), 60)
590 sign = '' if total_minutes_difference >= 0 else '-' 635 sign = '' if total_minutes_difference >= 0 else '-'
591
592 difference_string = f"{sign}"
593 if hours_difference != 0:
594 difference_string += f"{hours_difference}h"
595 if hours_difference == 0 or minutes_difference != 0:
596 difference_string += f"{minutes_difference}m"
597
598 clockout_time = None
599 clockout_difference = None
600 if then.now_is_workday or now.now_is_workday:
601 target_time = max(then.time_per_day(then.now.date()), now.time_per_day(now.now.date())) if then.time_per_day(then.now.date()) and now.time_per_day(now.now.date()) else (then.time_per_day(then.now.date()) if then.time_per_day(then.now.date()) else now.time_per_day(now.now.date()));
602 difference = target_time - worked
603 clockout_difference = 5 * ceil(difference / timedelta(minutes = 5))
604 clockout_time = now.now + difference
605 clockout_time += (5 - clockout_time.minute % 5) * timedelta(minutes = 1)
606 clockout_time = clockout_time.replace(second = 0, microsecond = 0)
607 636
608 if now.running_entry and clockout_time and clockout_difference >= 0: 637 difference_string = f"{sign}"
609 print(f"{difference_string}/{clockout_time:%H:%M}") 638 if hours_difference != 0:
610 else: 639 difference_string += f"{hours_difference}h"
611 print(difference_string) 640 if hours_difference == 0 or minutes_difference != 0:
641 difference_string += f"{minutes_difference}m"
642
643 def round_clockout_time(difference):
644 clockout_time = None
645 clockout_difference = None
646 exact_clockout_time = None
647 if then.now_is_workday or now.now_is_workday:
648 clockout_difference = 5 * ceil(difference / timedelta(minutes = 5))
649 clockout_time = now.now + difference
650 exact_clockout_time = clockout_time
651 clockout_time += (5 - clockout_time.minute % 5) * timedelta(minutes = 1)
652 clockout_time = clockout_time.replace(second = 0, microsecond = 0)
653
654 return clockout_time, exact_clockout_time, clockout_difference
655
656 clockout_time, exact_clockout_time, clockout_difference = round_clockout_time(difference)
657 clockout_time_pull_forward, exact_clockout_time_pull_forward, clockout_difference_pull_forward = round_clockout_time(difference_pull_forward)
658 clockout_pull_forward_sum, exact_clockout_pull_forward_sum, _ = round_clockout_time(now.time_to_work - now.time_worked + sum(now.pull_forward.values(), start=timedelta(milliseconds=0)))
659
660 if now.running_entry and clockout_time and (clockout_difference >= 0 or clockout_difference_pull_forward >= 0):
661 out_text = f"{difference_string}/{clockout_time:%H:%M}"
662 tooltip = f"{tooltip_timedelta(worked)}/{exact_clockout_time:%H:%M:%S}"
663
664 if clockout_pull_forward_sum >= clockout_time_pull_forward and clockout_time_pull_forward != clockout_time:
665 out_text += f"…{clockout_time_pull_forward:%H:%M}"
666 if exact_clockout_pull_forward_sum >= exact_clockout_time_pull_forward and exact_clockout_time_pull_forward != exact_clockout_time:
667 tooltip += f"…{exact_clockout_time_pull_forward:%H:%M:%S}"
668 else:
669 out_text = difference_string
612 else: 670 else:
613 print(worked) 671 out_text = str(worked)
672
673 if not now.now_is_workday:
674 out_text = f'({out_text})'
675
676 if waybar:
677 json.dump({"text": out_text, "class": out_class, "tooltip": tooltip}, stdout)
678 else:
679 print(out_text)
614 680
615def diff(now, **args): 681def diff(now, **args):
616 now = now.replace(hour = 0, minute = 0, second = 0, microsecond = 0) 682 now = now.replace(hour = 0, minute = 0, second = 0, microsecond = 0)
@@ -798,18 +864,54 @@ def classification(classification_name, table, table_format, **args):
798def main(): 864def main():
799 def isotime(s): 865 def isotime(s):
800 return datetime.fromisoformat(s).replace(tzinfo=tzlocal()) 866 return datetime.fromisoformat(s).replace(tzinfo=tzlocal())
867 def duration_minutes(s):
868 return timedelta(minutes = float(s))
869
870 def set_default_subparser(self, name, args=None, positional_args=0):
871 """default subparser selection. Call after setup, just before parse_args()
872 name: is the name of the subparser to call by default
873 args: if set is the argument list handed to parse_args()
874
875 , tested with 2.7, 3.2, 3.3, 3.4
876 it works with 2.6 assuming argparse is installed
877 """
878 subparser_found = False
879 for arg in sys.argv[1:]:
880 if arg in ['-h', '--help']: # global help if no subparser
881 break
882 else:
883 for x in self._subparsers._actions:
884 if not isinstance(x, argparse._SubParsersAction):
885 continue
886 for sp_name in x._name_parser_map.keys():
887 if sp_name in sys.argv[1:]:
888 subparser_found = True
889 if not subparser_found:
890 # insert default in last position before global positional
891 # arguments, this implies no global options are specified after
892 # first positional argument
893 if args is None:
894 sys.argv.insert(len(sys.argv) - positional_args, name)
895 else:
896 args.insert(len(args) - positional_args, name)
897
898 argparse.ArgumentParser.set_default_subparser = set_default_subparser
801 899
802 config = Worktime.config() 900 config = Worktime.config()
803 901
804 parser = argparse.ArgumentParser(prog = "worktime", description = 'Track worktime using toggl API') 902 parser = argparse.ArgumentParser(prog = "worktime", description = 'Track worktime using Kimai API')
805 parser.add_argument('--time', dest = 'now', metavar = 'TIME', type = isotime, help = 'Time to calculate status for (default: current time)', default = datetime.now(tzlocal())) 903 parser.add_argument('--time', dest = 'now', metavar = 'TIME', type = isotime, help = 'Time to calculate status for (default: current time)', default = datetime.now(tzlocal()))
806 parser.add_argument('--start', dest = 'start_datetime', metavar = 'TIME', type = isotime, help = 'Time to calculate status from (default: None)', default = None) 904 parser.add_argument('--start', dest = 'start_datetime', metavar = 'TIME', type = isotime, help = 'Time to calculate status from (default: None)', default = None)
807 parser.add_argument('--no-running', dest = 'include_running', action = 'store_false') 905 parser.add_argument('--no-running', dest = 'include_running', action = 'store_false')
808 parser.add_argument('--no-force-day-to-work', dest = 'force_day_to_work', action = 'store_false') 906 parser.add_argument('--no-force-day-to-work', dest = 'force_day_to_work', action = 'store_false')
809 subparsers = parser.add_subparsers(help = 'Subcommands') 907 subparsers = parser.add_subparsers(help = 'Subcommands')
810 parser.set_defaults(cmd = worktime) 908 worktime_parser = subparsers.add_parser('time_worked', aliases = ['time', 'worked'])
811 time_worked_parser = subparsers.add_parser('time_worked', aliases = ['time', 'worked', 'today']) 909 worktime_parser.add_argument('--pull-forward-cutoff', dest = 'pull_forward_cutoff', metavar = 'MINUTES', type = duration_minutes, default = timedelta(minutes = 15))
910 worktime_parser.add_argument('--waybar', action='store_true')
911 worktime_parser.set_defaults(cmd = worktime)
912 time_worked_parser = subparsers.add_parser('today')
812 time_worked_parser.add_argument('--no-round', dest = 'do_round', action = 'store_false') 913 time_worked_parser.add_argument('--no-round', dest = 'do_round', action = 'store_false')
914 time_worked_parser.add_argument('--waybar', action='store_true')
813 time_worked_parser.set_defaults(cmd = time_worked) 915 time_worked_parser.set_defaults(cmd = time_worked)
814 diff_parser = subparsers.add_parser('diff') 916 diff_parser = subparsers.add_parser('diff')
815 diff_parser.set_defaults(cmd = diff) 917 diff_parser.set_defaults(cmd = diff)
@@ -827,9 +929,146 @@ def main():
827 classification_parser.add_argument('--table', action = 'store_true') 929 classification_parser.add_argument('--table', action = 'store_true')
828 classification_parser.add_argument('--table-format', dest='table_format', type=str, default='fancy_grid') 930 classification_parser.add_argument('--table-format', dest='table_format', type=str, default='fancy_grid')
829 classification_parser.set_defaults(cmd = partial(classification, classification_name=classification_name)) 931 classification_parser.set_defaults(cmd = partial(classification, classification_name=classification_name))
932 pull_forward_parser = subparsers.add_parser('pull-forward')
933 pull_forward_parser.set_defaults(cmd = pull_forward)
934 parser.set_default_subparser('time_worked')
830 args = parser.parse_args() 935 args = parser.parse_args()
831 936
832 args.cmd(**vars(args)) 937 args.cmd(**vars(args))
833 938
939async def ui_update_options(api, cache_path):
940 options = set()
941 sort_order = dict()
942 entry_iter = enumerate(api.get_recent_entries())
943 loop = asyncio.get_event_loop()
944 start = clock_gettime_ns(CLOCK_MONOTONIC)
945 while item := await loop.run_in_executor(None, next, entry_iter):
946 ix, entry = item
947 if len(options) >= 20 or ix >= 1000:
948 break
949 elif len(options) >= 3:
950 now = clock_gettime_ns(CLOCK_MONOTONIC)
951 if now - start >= 4000000000:
952 break
953
954 option = frozendict({
955 'tags': frozenset(entry['tags']),
956 'activity': frozendict({'id': entry['activity']['id'], 'name': entry['activity']['name']}),
957 'project': frozendict({'id': entry['project']['id'], 'customer': entry['project']['customer']['name'], 'name': entry['project']['name']}),
958 'description': entry['description'] if entry['description'] else None,
959 'billable': entry['billable'],
960 })
961 sort_value = isoparse(entry['begin'])
962 if option in sort_order:
963 sort_value = max(sort_value, sort_order[option])
964 sort_order[option] = sort_value
965 options.add(option)
966
967 options = list(sorted(options, key = lambda o: sort_order[o], reverse = True))
968
969 with AtomicWriter(cache_path, overwrite=True) as ch:
970 ch.write_text(jsonpickle.encode(options))
971
972 return options
973
974def ui_render_option(option):
975 res = ''
976 if option['description']:
977 res += '„{}“, '.format(option['description'])
978 res += option['activity']['name'] + ', '
979 res += option['project']['name']
980 if option['project']['customer'] not in option['project']['name']:
981 res += ' ({})'.format(option['project']['customer'])
982 if option['tags']:
983 res += ', {}'.format(' '.join(map(lambda t: '#{}'.format(t), option['tags'])))
984 if not option['billable']:
985 res += ', not billable'
986 return res
987
988async def ui_main():
989 cache_path = Path(BaseDirectory.save_cache_path('worktime-ui')) / 'options.json'
990 options = None
991 try:
992 with cache_path.open('r', encoding='utf-8') as ch:
993 options = jsonpickle.decode(ch.read())
994 except FileNotFoundError:
995 pass
996
997 config = Worktime.config()
998 api = KimaiAPI(
999 base_url=config.get("KIMAI", {}).get("BaseUrl", None),
1000 api_token=config.get("KIMAI", {}).get("ApiToken", None),
1001 clients=config.get("KIMAI", {}).get("Clients", None)
1002 )
1003 running_entry = api.get_running_entry()
1004
1005 async with asyncio.TaskGroup() as tg:
1006 update_options = tg.create_task(ui_update_options(api, cache_path))
1007 if not options:
1008 options = await update_options
1009
1010 read_fd, write_fd = os.pipe()
1011 w_pipe = open(write_fd, 'wb', 0)
1012 loop = asyncio.get_event_loop()
1013 w_transport, _ = await loop.connect_write_pipe(
1014 asyncio.Protocol,
1015 w_pipe,
1016 )
1017 r_pipe = open(read_fd, 'rb', 0)
1018
1019 proc = await asyncio.create_subprocess_exec(
1020 "fuzzel", "--dmenu", "--index", "--width=60",
1021 stdout = asyncio.subprocess.PIPE,
1022 stdin = r_pipe,
1023 )
1024
1025 with closing(w_transport) as t:
1026 if running_entry:
1027 t.write(b'Stop running timesheet\n')
1028 for option in options:
1029 t.write(ui_render_option(option).encode('utf-8') + b'\n')
1030
1031 stdout, _ = await proc.communicate()
1032 if proc.returncode != 0:
1033 return
1034 fuzzel_out = int(stdout.decode('utf-8'))
1035 if fuzzel_out < 0 or fuzzel_out >= len(options):
1036 return
1037 elif running_entry and fuzzel_out == 0:
1038 api.stop_clock(running_entry['id'])
1039 await notify.Server('worktime').Notify("Stopped running timesheet").set_timeout(65000).show()
1040 else:
1041 if running_entry:
1042 fuzzel_out -= 1
1043 option = options[fuzzel_out]
1044 api.start_clock(
1045 project_id = option['project']['id'],
1046 activity_id = option['activity']['id'],
1047 description = option['description'],
1048 tags = option['tags'],
1049 billable = option['billable'],
1050 )
1051 await notify.Server('worktime').Notify("Timesheet started…").set_timeout(65000).show()
1052
1053
1054def ui():
1055 asyncio.run(ui_main())
1056
1057async def stop_main():
1058 config = Worktime.config()
1059 api = KimaiAPI(
1060 base_url=config.get("KIMAI", {}).get("BaseUrl", None),
1061 api_token=config.get("KIMAI", {}).get("ApiToken", None),
1062 clients=config.get("KIMAI", {}).get("Clients", None)
1063 )
1064 if running_entry := api.get_running_entry():
1065 api.stop_clock(running_entry['id'])
1066 await notify.Server('worktime').Notify("Stopped running timesheet").set_timeout(65000).show()
1067 else:
1068 await notify.Server('worktime').Notify("No timesheet currently running").set_timeout(65000).show()
1069
1070def stop():
1071 asyncio.run(stop_main())
1072
834if __name__ == "__main__": 1073if __name__ == "__main__":
835 sys.exit(main()) 1074 sys.exit(main())