clear-skies 1.18.31__py3-none-any.whl → 1.19.19__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of clear-skies might be problematic. Click here for more details.
- {clear_skies-1.18.31.dist-info → clear_skies-1.19.19.dist-info}/METADATA +10 -4
- {clear_skies-1.18.31.dist-info → clear_skies-1.19.19.dist-info}/RECORD +34 -24
- clearskies/backends/cursor_backend.py +28 -13
- clearskies/column_types/__init__.py +23 -0
- clearskies/column_types/audit.py +45 -15
- clearskies/column_types/belongs_to.py +9 -1
- clearskies/column_types/column.py +45 -2
- clearskies/column_types/created.py +13 -3
- clearskies/column_types/created_micro.py +26 -0
- clearskies/column_types/datetime.py +19 -3
- clearskies/column_types/datetime_micro.py +12 -0
- clearskies/column_types/email.py +1 -1
- clearskies/column_types/updated.py +13 -3
- clearskies/column_types/updated_micro.py +24 -0
- clearskies/contexts/test.py +3 -0
- clearskies/functional/string.py +7 -0
- clearskies/handlers/__init__.py +2 -0
- clearskies/handlers/get.py +8 -6
- clearskies/handlers/list.py +7 -5
- clearskies/handlers/simple_routing_route.py +2 -2
- clearskies/handlers/update.py +1 -0
- clearskies/handlers/write.py +1 -0
- clearskies/input_outputs/cli.py +1 -1
- clearskies/input_outputs/wsgi.py +1 -1
- clearskies/input_requirements/__init__.py +40 -2
- clearskies/input_requirements/after.py +36 -0
- clearskies/input_requirements/before.py +36 -0
- clearskies/input_requirements/in_the_future_at_least.py +19 -0
- clearskies/input_requirements/in_the_future_at_most.py +19 -0
- clearskies/input_requirements/in_the_past_at_least.py +19 -0
- clearskies/input_requirements/in_the_past_at_most.py +19 -0
- clearskies/input_requirements/time_delta.py +38 -0
- {clear_skies-1.18.31.dist-info → clear_skies-1.19.19.dist-info}/LICENSE +0 -0
- {clear_skies-1.18.31.dist-info → clear_skies-1.19.19.dist-info}/WHEEL +0 -0
|
@@ -1,22 +1,28 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: clear-skies
|
|
3
|
-
Version: 1.
|
|
3
|
+
Version: 1.19.19
|
|
4
4
|
Summary: A framework for building backends in the cloud
|
|
5
5
|
Home-page: https://github.com/cmancone/clearskies
|
|
6
6
|
License: MIT
|
|
7
7
|
Author: Conor Mancone
|
|
8
8
|
Author-email: cmancone@gmail.com
|
|
9
|
-
Requires-Python: >=3.
|
|
9
|
+
Requires-Python: >=3.10,<4.0
|
|
10
10
|
Classifier: Development Status :: 5 - Production/Stable
|
|
11
11
|
Classifier: Intended Audience :: Developers
|
|
12
12
|
Classifier: License :: OSI Approved :: MIT License
|
|
13
13
|
Classifier: Programming Language :: Python :: 3
|
|
14
|
-
Classifier: Programming Language :: Python :: 3.8
|
|
15
|
-
Classifier: Programming Language :: Python :: 3.9
|
|
16
14
|
Classifier: Programming Language :: Python :: 3.10
|
|
17
15
|
Classifier: Programming Language :: Python :: 3.11
|
|
18
16
|
Classifier: Topic :: Software Development :: Libraries :: Application Frameworks
|
|
17
|
+
Provides-Extra: jwt
|
|
18
|
+
Provides-Extra: mysql
|
|
19
|
+
Provides-Extra: secrets
|
|
20
|
+
Requires-Dist: akeyless (>=3.6.2,<4.0.0) ; extra == "secrets"
|
|
21
|
+
Requires-Dist: akeyless-cloud-id (>=0.2.3,<0.3.0) ; extra == "secrets"
|
|
19
22
|
Requires-Dist: dateparser (>=1.1.8,<2.0.0)
|
|
23
|
+
Requires-Dist: jose (>=1.0.0,<2.0.0) ; extra == "jwt"
|
|
24
|
+
Requires-Dist: pymysql (>=1.1.0,<2.0.0) ; extra == "mysql"
|
|
25
|
+
Requires-Dist: requests (>=2.31.0,<3.0.0)
|
|
20
26
|
Project-URL: Repository, https://github.com/cmancone/clearskies
|
|
21
27
|
Description-Content-Type: text/markdown
|
|
22
28
|
|
|
@@ -46,7 +46,7 @@ clearskies/autodoc/schema/string.py,sha256=oxZPCxYYhWnNHdbtwD3QuniStbj8XbBBpDTFX
|
|
|
46
46
|
clearskies/backends/__init__.py,sha256=3pvJw5dUnrqt7vyeBFjkHbycUEGkEpoYKxZoJwUVZO0,711
|
|
47
47
|
clearskies/backends/api_backend.py,sha256=vcGYub4G3jLLstTDkMMnZkID9JQz-qHmKNapgde4gSc,8875
|
|
48
48
|
clearskies/backends/backend.py,sha256=fkL-De0MUdzcS2JG_spSUQZIVL9oRFvaL6SP26JPpcI,7399
|
|
49
|
-
clearskies/backends/cursor_backend.py,sha256=
|
|
49
|
+
clearskies/backends/cursor_backend.py,sha256=VntlPS6z6bnZOC3XRJ-WFf5gK3pFUhH_qJpnZn8hl9U,11278
|
|
50
50
|
clearskies/backends/example_backend.py,sha256=jVpv0LZpNUEJGko0XqioLkHmZHbCW6M4YyNvzKlZcDw,1413
|
|
51
51
|
clearskies/backends/file_backend.py,sha256=tByQdOX1pf6r9-6vRDqOnQ8teRYo0bEWk589qrg598w,1752
|
|
52
52
|
clearskies/backends/json_backend.py,sha256=uDBqkekQadBm0BMoCVuzSPRB-5SjMTCDSAbuIqqwkF8,180
|
|
@@ -54,20 +54,22 @@ clearskies/backends/memory_backend.py,sha256=6Ts_NtP9S_QisvpNcQKO0CUqhCRAuL3d5LZ
|
|
|
54
54
|
clearskies/backends/restful_api_advanced_search_backend.py,sha256=uiR4SEKhLNmczYJEAkVMIdPWxQc4YWSp-_WzcSL7DEo,5480
|
|
55
55
|
clearskies/backends/secrets_backend.py,sha256=4lzrgdL_O_pgCT5HknV2gotFgp9GzjQ5_2n0-4H4kvs,2204
|
|
56
56
|
clearskies/binding_config.py,sha256=bF8LBNEgJacwKCqToAtDqN9hv5omzU7zt_4qB9KPtE0,457
|
|
57
|
-
clearskies/column_types/__init__.py,sha256=
|
|
58
|
-
clearskies/column_types/audit.py,sha256=
|
|
59
|
-
clearskies/column_types/belongs_to.py,sha256=
|
|
57
|
+
clearskies/column_types/__init__.py,sha256=QHEFFd3wRXvaOR7uZk-bpVLlLoTiN3D4S8sBU9rFwCE,4330
|
|
58
|
+
clearskies/column_types/audit.py,sha256=smgAnDzNS5n3_1ESeyqn6KDv_1k6gOYRqrFb8S2_TrQ,9143
|
|
59
|
+
clearskies/column_types/belongs_to.py,sha256=tH1tbTOfjifSNuVjO-KbMF7GiUIoLfcDItrrS3TGGM8,11044
|
|
60
60
|
clearskies/column_types/boolean.py,sha256=1yyM1CUfgD84pPE65c1OP1Qjf_J0Z45hjPrDR51AUkQ,1878
|
|
61
61
|
clearskies/column_types/category_tree.py,sha256=PgNmzZPyqYS5NADH_QTCxLvDXZFxzv5ESKTkvPrrLXo,9140
|
|
62
|
-
clearskies/column_types/column.py,sha256=
|
|
63
|
-
clearskies/column_types/created.py,sha256=
|
|
62
|
+
clearskies/column_types/column.py,sha256=ftuDFswjk-KE9Frxo1rhgkjr4sjSjnUc5ZtfNrnGLIc,15530
|
|
63
|
+
clearskies/column_types/created.py,sha256=S78e1VuvvUPYiI79gWGqlPzMSLIbXMpaYQMw8as7l38,615
|
|
64
64
|
clearskies/column_types/created_by_authorization_data.py,sha256=--1w1TOSo2CMwrpn6Y_iorl2RTqLgG8MbR8k27qreew,1108
|
|
65
65
|
clearskies/column_types/created_by_header.py,sha256=5sY6vjtra-BrAgUE4zkuLhBtzCVWHNtJPwMozPw_7_s,520
|
|
66
66
|
clearskies/column_types/created_by_ip.py,sha256=wwCUoEwHEVGN89x4xP7NJ6QR85Aum6v3JmxofoQrqtg,395
|
|
67
67
|
clearskies/column_types/created_by_routing_data.py,sha256=EhVorRaGV2OhEb0YSPwPmrsK2NQycYgGEd4ab8-qI2I,569
|
|
68
68
|
clearskies/column_types/created_by_user_agent.py,sha256=sSYDRrqSjsCwcYlhF_s9NO-iDww3PaH6aO2ATp_SKGQ,419
|
|
69
|
-
clearskies/column_types/
|
|
70
|
-
clearskies/column_types/
|
|
69
|
+
clearskies/column_types/created_micro.py,sha256=EqeHB3ZHbi4nCa1rbrKy1TbooT9U3Kfw5z4jUvLi32U,636
|
|
70
|
+
clearskies/column_types/datetime.py,sha256=OEy5eMfqWp6KOqEHQa3Ga_tl0oawtcMRBafI1SPYDFk,4275
|
|
71
|
+
clearskies/column_types/datetime_micro.py,sha256=3DxtkeJZhWigdni7QEJFV3BJuCmXJMyFQjDZ9jyI144,328
|
|
72
|
+
clearskies/column_types/email.py,sha256=qq0Yo_C3KxUqT68q2HWXocBBR4xwMqjxcIdgZRv218U,584
|
|
71
73
|
clearskies/column_types/float.py,sha256=j8jJeBueSOusPtAFCWgLHYBncfLnqT1U7bh1zcAkYiA,1332
|
|
72
74
|
clearskies/column_types/has_many.py,sha256=Z4oM1g2dQx6H9171c52FLC41nLryCOKmh51I75mYmmY,5895
|
|
73
75
|
clearskies/column_types/integer.py,sha256=dGIluusPmhLRNg7PplOJLbQI2AXojqRBUHt8ekYWNVI,1386
|
|
@@ -76,7 +78,8 @@ clearskies/column_types/many_to_many.py,sha256=Yu5wmMkVqRteMrH_8uSZK-bM_IrfDyv6A
|
|
|
76
78
|
clearskies/column_types/many_to_many_with_data.py,sha256=NOYjZedeLIWVyDV4BTRyNmhkNHZzx6SkHLFbL-VqHQU,7769
|
|
77
79
|
clearskies/column_types/select.py,sha256=1oBslTJ_7QCjlFeEcwJVRL-ED4sXwCESVFRAOonvG2I,297
|
|
78
80
|
clearskies/column_types/string.py,sha256=XbHC31TmlW0k86cvdVJBDyowU8Xis6Te6R0rPLXgLpI,863
|
|
79
|
-
clearskies/column_types/updated.py,sha256=
|
|
81
|
+
clearskies/column_types/updated.py,sha256=FNyRK-PS6lgFQ9QN2VrcIZP2g5EHGhll6pf-MgqMH7s,566
|
|
82
|
+
clearskies/column_types/updated_micro.py,sha256=3geqsV4nsPB9xDKyvQDGniU-KMqM8WvoqHfUYcBqGJY,587
|
|
80
83
|
clearskies/column_types/uuid.py,sha256=pA6Cd-1QSRuUpz0PxWAGRdG01hW7bpIicOhhJvAvDkQ,711
|
|
81
84
|
clearskies/columns.py,sha256=3PgLopL1l20vVWaUqOJkwXMr06a-gFHEsbExfOWVB4I,5925
|
|
82
85
|
clearskies/condition_parser.py,sha256=IXl51-rDxws7-nAtTVu_m6gTegtpkDnoGAFX2MTPDIo,6589
|
|
@@ -87,7 +90,7 @@ clearskies/contexts/cli.py,sha256=25GpL2PcuUiqesZx8_chBhyk3dqr4qpitp6xe6N4G7U,11
|
|
|
87
90
|
clearskies/contexts/context.py,sha256=0ufURXBDPaoYVuhU-IB8VmHABQlgiQYo5TgUZeXv5to,2589
|
|
88
91
|
clearskies/contexts/convert_to_application.py,sha256=5Y3eocNomUQYVX9kcLdf9vc-hItw1R1ns59bz5P3dbU,9602
|
|
89
92
|
clearskies/contexts/extract_handler.py,sha256=lmWACrV5KbE9vPHPYyg1vz1bWcLwt8WHtgHHL-lXB_k,1896
|
|
90
|
-
clearskies/contexts/test.py,sha256=
|
|
93
|
+
clearskies/contexts/test.py,sha256=BEK8ltTGH4sB8oLcBzv-uqzaTEzXzGVVU7qrLGUicwg,3194
|
|
91
94
|
clearskies/contexts/wsgi.py,sha256=6b69s3aHj3ZlL47LEulV-UveZprVXApZgFiN9qkqTxU,834
|
|
92
95
|
clearskies/decorators/__init__.py,sha256=bv515ELZ83cG2MUtOmp7nT9pQxrrBVEkLPKUDYppBbY,936
|
|
93
96
|
clearskies/decorators/auth0_jwks.py,sha256=tIVAJhIfW9h_2yv4BAqW2eVMb1AK4AlKylzBc2aKDBc,681
|
|
@@ -119,9 +122,9 @@ clearskies/di/test_module/another_module/__init__.py,sha256=8SRmHPDepLKGWTUSc1uc
|
|
|
119
122
|
clearskies/di/test_module/module_class.py,sha256=I_-wnMuHfbsvti-7d2Z4bXnr6deo__uvww9nds9qrlE,46
|
|
120
123
|
clearskies/environment.py,sha256=n2IMpNMakHJ7KJJ7sEQlxd4jfzAzqejxcTKxElyo1Us,3591
|
|
121
124
|
clearskies/functional/__init__.py,sha256=em6xhpGOws_egf6jWfeQlxq0Vm5gYzsbMNt_rxTZzqk,95
|
|
122
|
-
clearskies/functional/string.py,sha256=
|
|
125
|
+
clearskies/functional/string.py,sha256=HHXou_lyjntX7SMi0-hfe91Kk3grxQnU-7oi4Lon35Q,3015
|
|
123
126
|
clearskies/functional/validations.py,sha256=f1fTQ4rdFZouxoovAPg-YAgf0Q0QNpKEzxWWL7EFUHI,645
|
|
124
|
-
clearskies/handlers/__init__.py,sha256=
|
|
127
|
+
clearskies/handlers/__init__.py,sha256=9tH0zk4g7Mt22opD1NlynqXwwMX2DHzLmAVihZsJsfU,1011
|
|
125
128
|
clearskies/handlers/advanced_search.py,sha256=UbWDntGAGD5NM61yKEws5MfG0Xy3K_fn-i-X9FG-mKs,13746
|
|
126
129
|
clearskies/handlers/base.py,sha256=veR6tLPS0T9dOB9cilb6b3ausgsu5bcqEqLkMuNjJvg,22428
|
|
127
130
|
clearskies/handlers/callable.py,sha256=2pVMwllttNui4R5vlLpufPehAPL_ksh-5eBrUprNv8U,8113
|
|
@@ -135,32 +138,39 @@ clearskies/handlers/exceptions/authorization.py,sha256=14JuU5zLEf2sv82VNxsJt7dGS
|
|
|
135
138
|
clearskies/handlers/exceptions/client_error.py,sha256=o1OGnjlXT5zZ1Vb2LTRPYIio0YAnXPpXDGuqSLQP9_Y,39
|
|
136
139
|
clearskies/handlers/exceptions/input_error.py,sha256=kmEVv9LNIalLKD9GF-olsD39Z94abHGkB54NskOG6g4,136
|
|
137
140
|
clearskies/handlers/exceptions/not_found.py,sha256=xThOXiMfyYBaI2bhfOhpVoZ_vdw1hU8A_HxHenPqHzo,96
|
|
138
|
-
clearskies/handlers/get.py,sha256=
|
|
141
|
+
clearskies/handlers/get.py,sha256=x9SogDd2ZvIYwv2UkNa_FUmmKMNd9XPadkbnBKbyTGo,6892
|
|
139
142
|
clearskies/handlers/health_check.py,sha256=m6H3psUql-I7hZao3fsxjrZnCjFJZ1ROTF-41g8Syww,2078
|
|
140
143
|
clearskies/handlers/input_processing.py,sha256=edtG6NQJZCte0Nq-j7SUMzp6vxgIIbLxeUGscWOALAs,3814
|
|
141
|
-
clearskies/handlers/list.py,sha256=
|
|
144
|
+
clearskies/handlers/list.py,sha256=RdhvcVA4ZgkdmOHl6iGBC-2jPhjYay0_kFHttmVoq9Q,25466
|
|
142
145
|
clearskies/handlers/mygrations.py,sha256=4iKpJKooqgNtAURwMl_FgsXUt8OYOaG_TY1OV1llQxY,2759
|
|
143
146
|
clearskies/handlers/request_method_routing.py,sha256=DgPEz3tgbaUkXHsOriPbIctfSf4Gm4NxfRdVulH91Kg,1636
|
|
144
147
|
clearskies/handlers/restful_api.py,sha256=1rJ2REX1sTAdbqaRuCclP375agrho4zNNQx6hXGa4nQ,9258
|
|
145
148
|
clearskies/handlers/routing.py,sha256=uWKWcEaiHVqfDebPkQVuG9AS8pOixW31wW0yIQ-25Aw,3079
|
|
146
149
|
clearskies/handlers/schema_helper.py,sha256=62644USvFlZu_6tT7rb-k6t_5J3Q0uZsJwP5KREk_WM,4961
|
|
147
150
|
clearskies/handlers/simple_routing.py,sha256=KpOSQK2_tTwrPblDmxAUpX-Fqts_Or_MLWGKOOkJnoo,9403
|
|
148
|
-
clearskies/handlers/simple_routing_route.py,sha256=
|
|
151
|
+
clearskies/handlers/simple_routing_route.py,sha256=3U3dixEKuf-Xo88RQGH_grm1x79-px6idt1-xaLUSiY,8953
|
|
149
152
|
clearskies/handlers/simple_search.py,sha256=bSfq8rzdqkBj-dTGUBSZ1EkfjzUWHID7mKD2xEf8VzQ,6165
|
|
150
|
-
clearskies/handlers/update.py,sha256=
|
|
151
|
-
clearskies/handlers/write.py,sha256=
|
|
153
|
+
clearskies/handlers/update.py,sha256=rx8HW87Pfh95e_9nEfKKnxfkh2HBlCUdYqVwljtXiJ8,4116
|
|
154
|
+
clearskies/handlers/write.py,sha256=Gu1w1PQ1F7tlqCqALorMRek3UH6IkViPIO195dxPd8k,9372
|
|
152
155
|
clearskies/input_outputs/__init__.py,sha256=mQWL-u41FRTrPGuHe8FhLmcHjAEaUxjFwUf7RgDcbAs,182
|
|
153
|
-
clearskies/input_outputs/cli.py,sha256=
|
|
156
|
+
clearskies/input_outputs/cli.py,sha256=F54wues9cNESE8FhQP3mDWrgrZyFouDIN-ZZ3-KJjks,6258
|
|
154
157
|
clearskies/input_outputs/exceptions/__init__.py,sha256=bc5Tc1XBZnqA1fKbk7pk5hyx102vqx3sDE19E03xGk4,82
|
|
155
158
|
clearskies/input_outputs/exceptions/cli_input_error.py,sha256=kOFU8aLTLmeTL_AKDshxMu8_ufildg6p8ndhE1xHfb0,41
|
|
156
159
|
clearskies/input_outputs/exceptions/cli_not_found.py,sha256=JBBuZA9ZwdkPhd3a0qaGgEPQrxh1fehy4R3ZaV2gWXU,39
|
|
157
160
|
clearskies/input_outputs/input_output.py,sha256=vYKn9SE5erS4LuOhhAsXqaOEsGXwZ1NJ4v85KN1Xg6A,4501
|
|
158
|
-
clearskies/input_outputs/wsgi.py,sha256=
|
|
159
|
-
clearskies/input_requirements/__init__.py,sha256=
|
|
161
|
+
clearskies/input_outputs/wsgi.py,sha256=9p82eJP5FUAI6jbIojvydG3_9gncX7vcUACQMyRN9x4,3142
|
|
162
|
+
clearskies/input_requirements/__init__.py,sha256=vIqm8KDiomQSxT7WjogFi1yOOrsVrNMb5GKlfw5USNk,1836
|
|
163
|
+
clearskies/input_requirements/after.py,sha256=TXy8bIVz_77a8oJuohPwoM5E--AOVWsOSjjh5PpA2Ys,1544
|
|
164
|
+
clearskies/input_requirements/before.py,sha256=iLg-Hub9iW2eP19s5fkPrA9TCT_-DTm5KoxgA8ho0-k,1547
|
|
165
|
+
clearskies/input_requirements/in_the_future_at_least.py,sha256=PLVp_2Yv-1ScKnajlc9hjG7qYZhadKXHNTkRP7WKUdo,739
|
|
166
|
+
clearskies/input_requirements/in_the_future_at_most.py,sha256=L5Oz47KAHv5WL6Nu2vGkql6q8Ha9IKJMj-uxQkyuIdc,737
|
|
167
|
+
clearskies/input_requirements/in_the_past_at_least.py,sha256=ES0SgtADHcu7HZDdvIFyD9vpYTnrd4hBi03OShqUZg4,735
|
|
168
|
+
clearskies/input_requirements/in_the_past_at_most.py,sha256=hvhn_K1X4f7pbpFhjavR6Mu48JhhYcEIUx44YWfa_7E,733
|
|
160
169
|
clearskies/input_requirements/maximum_length.py,sha256=7hdGVq914BtZQwiGOLOU-t5QTDNQtCPT4TOnYTUo3Wo,689
|
|
161
170
|
clearskies/input_requirements/minimum_length.py,sha256=qSpLjNBu6AKRoBZi3jvCMYKsHwsknbkUk86C8CMIOEU,987
|
|
162
171
|
clearskies/input_requirements/required.py,sha256=luYP527YPkQIVNVPhnNztOI0UxO67gNqn3FiLBId1YE,1133
|
|
163
172
|
clearskies/input_requirements/requirement.py,sha256=5wUywAvbEQPh9tpfwWX3gdi4dwI-Xs9ePyC30qvwPaQ,584
|
|
173
|
+
clearskies/input_requirements/time_delta.py,sha256=lqajxGEp2zZB_Rk-dG8eWgpljbuph1yqZMuylYRYJKs,1247
|
|
164
174
|
clearskies/input_requirements/unique.py,sha256=gpbm9uoXcy8WCHsuWqAotwockbjDfJOWitIbK_3ngN0,777
|
|
165
175
|
clearskies/mocks/__init__.py,sha256=T68OUB9gGCX0WoisGzsY3Bt2cCFX7ILHKPqi6XKTJM0,113
|
|
166
176
|
clearskies/mocks/input_output.py,sha256=2wD5GbUyVSkXcBg1GTZ-Oz9VzcYxNHfTlmZAODW-7CI,3898
|
|
@@ -189,7 +199,7 @@ clearskies/tests/simple_api/models/__init__.py,sha256=nUA0W6fgXw_Bxa9CudkaDkC80t
|
|
|
189
199
|
clearskies/tests/simple_api/models/status.py,sha256=PEhPbaQh5qdUNHp8O0gz91LOLENAEBtqSaHxUPXchaM,699
|
|
190
200
|
clearskies/tests/simple_api/models/user.py,sha256=5_P4Tp1tTdX7PkMJ__epPM5MA7JAeVYGas69vcWloLc,819
|
|
191
201
|
clearskies/tests/simple_api/users_api.py,sha256=KYXCgEofDxHeRdQK67txN5oYUPvxxmB8JTku7L-apk4,2344
|
|
192
|
-
clear_skies-1.
|
|
193
|
-
clear_skies-1.
|
|
194
|
-
clear_skies-1.
|
|
195
|
-
clear_skies-1.
|
|
202
|
+
clear_skies-1.19.19.dist-info/LICENSE,sha256=3Ehd0g3YOpCj8sqj0Xjq5qbOtjjgk9qzhhD9YjRQgOA,1053
|
|
203
|
+
clear_skies-1.19.19.dist-info/METADATA,sha256=tHoiNbV-s9t99A7Ta16RB7VYyaFfEKG95uXBWGc1LNE,1622
|
|
204
|
+
clear_skies-1.19.19.dist-info/WHEEL,sha256=d2fvjOD7sXsVzChCqf0Ty0JbHKBaLYwDbGQDwQTnJ50,88
|
|
205
|
+
clear_skies-1.19.19.dist-info/RECORD,,
|
|
@@ -138,9 +138,22 @@ class CursorBackend(Backend):
|
|
|
138
138
|
next_page_data["start"] = int(start) + int(limit)
|
|
139
139
|
return records
|
|
140
140
|
|
|
141
|
+
def group_by_clause(self, group_by):
|
|
142
|
+
if not group_by:
|
|
143
|
+
return ""
|
|
144
|
+
escape = self._column_escape_character()
|
|
145
|
+
if "." not in group_by:
|
|
146
|
+
return f" GROUP BY {escape}{group_by}{escape}"
|
|
147
|
+
parts = group_by.split(".", 1)
|
|
148
|
+
table = parts[0]
|
|
149
|
+
column = parts[1]
|
|
150
|
+
return f" GROUP BY {escape}{table}{escape}.{escape}{column}{escape}"
|
|
151
|
+
|
|
141
152
|
def as_sql(self, configuration):
|
|
142
153
|
escape = self._column_escape_character()
|
|
143
|
-
[wheres, parameters] = self._conditions_as_wheres_and_parameters(
|
|
154
|
+
[wheres, parameters] = self._conditions_as_wheres_and_parameters(
|
|
155
|
+
configuration["wheres"], configuration["table_name"]
|
|
156
|
+
)
|
|
144
157
|
select_parts = []
|
|
145
158
|
if configuration["select_all"]:
|
|
146
159
|
select_parts.append(self._finalize_table_name(configuration["table_name"]) + ".*")
|
|
@@ -162,11 +175,7 @@ class CursorBackend(Backend):
|
|
|
162
175
|
order_by = " ORDER BY " + ", ".join(sort_parts)
|
|
163
176
|
else:
|
|
164
177
|
order_by = ""
|
|
165
|
-
group_by = (
|
|
166
|
-
f" GROUP BY {escape}" + configuration["group_by_column"] + escape
|
|
167
|
-
if configuration["group_by_column"]
|
|
168
|
-
else ""
|
|
169
|
-
)
|
|
178
|
+
group_by = self.group_by_clause(configuration["group_by_column"])
|
|
170
179
|
limit = ""
|
|
171
180
|
if configuration["limit"]:
|
|
172
181
|
start = 0
|
|
@@ -183,7 +192,9 @@ class CursorBackend(Backend):
|
|
|
183
192
|
def as_count_sql(self, configuration):
|
|
184
193
|
escape = self._column_escape_character()
|
|
185
194
|
# note that this won't work if we start including a HAVING clause
|
|
186
|
-
[wheres, parameters] = self._conditions_as_wheres_and_parameters(
|
|
195
|
+
[wheres, parameters] = self._conditions_as_wheres_and_parameters(
|
|
196
|
+
configuration["wheres"], configuration["table_name"]
|
|
197
|
+
)
|
|
187
198
|
# we also don't currently support parameters in the join clause - I'll probably need that though
|
|
188
199
|
if configuration["joins"]:
|
|
189
200
|
# We can ignore left joins because they don't change the count
|
|
@@ -195,10 +206,13 @@ class CursorBackend(Backend):
|
|
|
195
206
|
if not configuration["group_by_column"]:
|
|
196
207
|
query = f"SELECT COUNT(*) AS count FROM {table_name}{joins}{wheres}"
|
|
197
208
|
else:
|
|
198
|
-
|
|
209
|
+
group_by = self.group_by_clause(configuration["group_by_column"])
|
|
210
|
+
query = (
|
|
211
|
+
f"SELECT COUNT(*) AS count FROM (SELECT 1 FROM {table_name}{joins}{wheres}{group_by}) AS count_inner"
|
|
212
|
+
)
|
|
199
213
|
return [query, parameters]
|
|
200
214
|
|
|
201
|
-
def _conditions_as_wheres_and_parameters(self, conditions):
|
|
215
|
+
def _conditions_as_wheres_and_parameters(self, conditions, default_table_name):
|
|
202
216
|
if not conditions:
|
|
203
217
|
return ["", []]
|
|
204
218
|
|
|
@@ -206,16 +220,17 @@ class CursorBackend(Backend):
|
|
|
206
220
|
where_parts = []
|
|
207
221
|
for condition in conditions:
|
|
208
222
|
parameters.extend(condition["values"])
|
|
209
|
-
table = condition.get("table",
|
|
223
|
+
table = condition.get("table", default_table_name)
|
|
224
|
+
if not table:
|
|
225
|
+
table = default_table_name
|
|
210
226
|
column = condition["column"]
|
|
211
|
-
column_with_table = f"{table}.{column}"
|
|
227
|
+
column_with_table = f"{table}.{column}"
|
|
212
228
|
where_parts.append(
|
|
213
229
|
self.condition_parser._with_placeholders(
|
|
214
230
|
column_with_table,
|
|
215
231
|
condition["operator"],
|
|
216
232
|
condition["values"],
|
|
217
|
-
escape=False
|
|
218
|
-
escape_character=self._column_escape_character(),
|
|
233
|
+
escape=False,
|
|
219
234
|
)
|
|
220
235
|
)
|
|
221
236
|
return [" WHERE " + " AND ".join(where_parts), parameters]
|
|
@@ -9,7 +9,9 @@ from .created_by_header import CreatedByHeader
|
|
|
9
9
|
from .created_by_ip import CreatedByIp
|
|
10
10
|
from .created_by_routing_data import CreatedByRoutingData
|
|
11
11
|
from .created_by_user_agent import CreatedByUserAgent
|
|
12
|
+
from .created_micro import CreatedMicro
|
|
12
13
|
from .datetime import DateTime
|
|
14
|
+
from .datetime_micro import DateTimeMicro
|
|
13
15
|
from .email import Email
|
|
14
16
|
from .float import Float
|
|
15
17
|
from .has_many import HasMany
|
|
@@ -20,6 +22,7 @@ from .many_to_many_with_data import ManyToManyWithData
|
|
|
20
22
|
from .select import Select
|
|
21
23
|
from .string import String
|
|
22
24
|
from .updated import Updated
|
|
25
|
+
from .updated_micro import UpdatedMicro
|
|
23
26
|
from .uuid import UUID
|
|
24
27
|
|
|
25
28
|
|
|
@@ -67,10 +70,18 @@ def created_by_user_agent(name, **kwargs):
|
|
|
67
70
|
return build_column_config(name, CreatedByUserAgent, **kwargs)
|
|
68
71
|
|
|
69
72
|
|
|
73
|
+
def created_micro(name, **kwargs):
|
|
74
|
+
return build_column_config(name, CreatedMicro, **kwargs)
|
|
75
|
+
|
|
76
|
+
|
|
70
77
|
def datetime(name, **kwargs):
|
|
71
78
|
return build_column_config(name, DateTime, **kwargs)
|
|
72
79
|
|
|
73
80
|
|
|
81
|
+
def datetime_micro(name, **kwargs):
|
|
82
|
+
return build_column_config(name, DateTimeMicro, **kwargs)
|
|
83
|
+
|
|
84
|
+
|
|
74
85
|
def email(name, **kwargs):
|
|
75
86
|
return build_column_config(name, Email, **kwargs)
|
|
76
87
|
|
|
@@ -111,6 +122,10 @@ def updated(name, **kwargs):
|
|
|
111
122
|
return build_column_config(name, Updated, **kwargs)
|
|
112
123
|
|
|
113
124
|
|
|
125
|
+
def updated_micro(name, **kwargs):
|
|
126
|
+
return build_column_config(name, UpdatedMicro, **kwargs)
|
|
127
|
+
|
|
128
|
+
|
|
114
129
|
def uuid(name, **kwargs):
|
|
115
130
|
return build_column_config(name, UUID, **kwargs)
|
|
116
131
|
|
|
@@ -127,15 +142,21 @@ __all__ = [
|
|
|
127
142
|
"CategoryTree",
|
|
128
143
|
"Column",
|
|
129
144
|
"created",
|
|
145
|
+
"created_micro",
|
|
130
146
|
"Created",
|
|
147
|
+
"CreatdMicro",
|
|
131
148
|
"created_by_authorization_data",
|
|
132
149
|
"CreatedByAuthorizationData",
|
|
133
150
|
"created_by_ip",
|
|
134
151
|
"CreatedByIp",
|
|
135
152
|
"created_by_user_agent",
|
|
136
153
|
"CreatedByUserAgent",
|
|
154
|
+
"CreatedMicro",
|
|
155
|
+
"created_micro",
|
|
137
156
|
"datetime",
|
|
157
|
+
"datetime_micro",
|
|
138
158
|
"DateTime",
|
|
159
|
+
"DateTimeMicro",
|
|
139
160
|
"email",
|
|
140
161
|
"Email",
|
|
141
162
|
"float",
|
|
@@ -155,7 +176,9 @@ __all__ = [
|
|
|
155
176
|
"string",
|
|
156
177
|
"String",
|
|
157
178
|
"updated",
|
|
179
|
+
"updated_micro",
|
|
158
180
|
"Updated",
|
|
181
|
+
"UpdatedMicro",
|
|
159
182
|
"uuid",
|
|
160
183
|
"UUID",
|
|
161
184
|
]
|
clearskies/column_types/audit.py
CHANGED
|
@@ -29,6 +29,9 @@ class Audit(has_many.HasMany):
|
|
|
29
29
|
With `exclude_columns` you can specify some names of columns to ignore. If an update happens and only columns
|
|
30
30
|
in `exclude_columns` are being set, then a history entry will not be created. Also, these columns will
|
|
31
31
|
not be included in the audit record.
|
|
32
|
+
|
|
33
|
+
With `mask_columns` you can specify the names of columns which should be noted as updated in the audit record,
|
|
34
|
+
but the actual values (before and after) should not be recorded.
|
|
32
35
|
"""
|
|
33
36
|
|
|
34
37
|
_parent_columns = None
|
|
@@ -40,6 +43,7 @@ class Audit(has_many.HasMany):
|
|
|
40
43
|
my_configs = [
|
|
41
44
|
"child_models_class",
|
|
42
45
|
"exclude_columns",
|
|
46
|
+
"mask_columns",
|
|
43
47
|
"foreign_column_name",
|
|
44
48
|
"is_readable",
|
|
45
49
|
"readable_child_columns",
|
|
@@ -65,6 +69,7 @@ class Audit(has_many.HasMany):
|
|
|
65
69
|
"readable_child_columns": ["resource_id", "action", "data", "created_at"],
|
|
66
70
|
"parent_class_name": model_class.__name__,
|
|
67
71
|
"exclude_columns": configuration.get("exclude_columns", []),
|
|
72
|
+
"mask_columns": configuration.get("mask_columns", []),
|
|
68
73
|
}
|
|
69
74
|
super().configure(name, has_many_configuration, model_class)
|
|
70
75
|
|
|
@@ -92,21 +97,21 @@ class Audit(has_many.HasMany):
|
|
|
92
97
|
+ " but it has something else"
|
|
93
98
|
)
|
|
94
99
|
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
100
|
+
for config_name in ["exclude_columns", "mask_columns"]:
|
|
101
|
+
if config_name not in configuration:
|
|
102
|
+
continue
|
|
103
|
+
|
|
104
|
+
config_columns = configuration[config_name]
|
|
105
|
+
if not hasattr(config_columns, "__iter__"):
|
|
106
|
+
raise ValueError(f"{error_prefix} '{config_name}' should be an iterable with the list of column names.")
|
|
107
|
+
if isinstance(config_columns, str):
|
|
102
108
|
raise ValueError(
|
|
103
|
-
f"{error_prefix} '
|
|
104
|
-
+ "with the list of child columns to output."
|
|
109
|
+
f"{error_prefix} '{config_name}' should be an iterable " + "with a list of column names."
|
|
105
110
|
)
|
|
106
|
-
for column_name in
|
|
111
|
+
for column_name in config_columns:
|
|
107
112
|
if column_name not in parent_columns:
|
|
108
113
|
raise ValueError(
|
|
109
|
-
f"{error_prefix} '
|
|
114
|
+
f"{error_prefix} '{config_name}' references column named '{column_name}' but this"
|
|
110
115
|
+ " column does not exist in the original model class."
|
|
111
116
|
)
|
|
112
117
|
|
|
@@ -118,6 +123,7 @@ class Audit(has_many.HasMany):
|
|
|
118
123
|
old_data = model._previous_data
|
|
119
124
|
new_data = model._data
|
|
120
125
|
exclude_columns = self.config("exclude_columns")
|
|
126
|
+
mask_columns = self.config("mask_columns")
|
|
121
127
|
parent_columns = self.parent_columns
|
|
122
128
|
|
|
123
129
|
if not old_data:
|
|
@@ -129,6 +135,8 @@ class Audit(has_many.HasMany):
|
|
|
129
135
|
**create_data,
|
|
130
136
|
**parent_columns[key].to_json(model),
|
|
131
137
|
}
|
|
138
|
+
if key in mask_columns and key in create_data:
|
|
139
|
+
create_data[key] = "****"
|
|
132
140
|
self.record(model, "create", data=create_data)
|
|
133
141
|
return
|
|
134
142
|
|
|
@@ -152,6 +160,9 @@ class Audit(has_many.HasMany):
|
|
|
152
160
|
**to_data,
|
|
153
161
|
**parent_columns[column].to_json(model),
|
|
154
162
|
}
|
|
163
|
+
if column in mask_columns and column in to_data:
|
|
164
|
+
to_data[column] = "****"
|
|
165
|
+
from_data[column] = "****"
|
|
155
166
|
if not from_data and not to_data:
|
|
156
167
|
return
|
|
157
168
|
|
|
@@ -168,15 +179,28 @@ class Audit(has_many.HasMany):
|
|
|
168
179
|
super().post_delete(model)
|
|
169
180
|
exclude_columns = self.config("exclude_columns")
|
|
170
181
|
parent_columns = self.parent_columns
|
|
182
|
+
mask_columns = self.config("mask_columns")
|
|
183
|
+
|
|
184
|
+
final_data = {}
|
|
185
|
+
for key in model._data.keys():
|
|
186
|
+
if key in exclude_columns:
|
|
187
|
+
continue
|
|
188
|
+
final_data = {
|
|
189
|
+
**final_data,
|
|
190
|
+
**parent_columns[key].to_json(model),
|
|
191
|
+
}
|
|
192
|
+
|
|
193
|
+
for key in mask_columns:
|
|
194
|
+
if key not in final_data:
|
|
195
|
+
continue
|
|
196
|
+
final_data[key] = "****"
|
|
171
197
|
|
|
172
198
|
self.child_models.create(
|
|
173
199
|
{
|
|
174
200
|
"class": self.config("parent_class_name"),
|
|
175
201
|
"resource_id": model.get(self.config("parent_id_column_name")),
|
|
176
202
|
"action": "delete",
|
|
177
|
-
"data":
|
|
178
|
-
key: parent_columns[key].to_json(model) for key in model._data.keys() if key not in exclude_columns
|
|
179
|
-
},
|
|
203
|
+
"data": final_data,
|
|
180
204
|
}
|
|
181
205
|
)
|
|
182
206
|
|
|
@@ -186,7 +210,7 @@ class Audit(has_many.HasMany):
|
|
|
186
210
|
self._parent_columns = self.di.build(self.model_class, cache=True).columns()
|
|
187
211
|
return self._parent_columns
|
|
188
212
|
|
|
189
|
-
def record(self, model, action, data=None):
|
|
213
|
+
def record(self, model, action, data=None, record_data=None):
|
|
190
214
|
audit_data = {
|
|
191
215
|
"class": self.config("parent_class_name"),
|
|
192
216
|
"resource_id": model.get(self.config("parent_id_column_name")),
|
|
@@ -194,6 +218,12 @@ class Audit(has_many.HasMany):
|
|
|
194
218
|
}
|
|
195
219
|
if data is not None:
|
|
196
220
|
audit_data["data"] = data
|
|
221
|
+
if record_data is not None:
|
|
222
|
+
audit_data = {
|
|
223
|
+
**audit_data,
|
|
224
|
+
**record_data,
|
|
225
|
+
}
|
|
226
|
+
|
|
197
227
|
self.child_models.create(audit_data)
|
|
198
228
|
|
|
199
229
|
|
|
@@ -98,7 +98,15 @@ class BelongsTo(String):
|
|
|
98
98
|
return integer_check
|
|
99
99
|
parent_models = self.parent_models
|
|
100
100
|
id_column_name = parent_models.get_id_column_name()
|
|
101
|
-
|
|
101
|
+
matching_parents = parent_models.where(f"{id_column_name}={value}")
|
|
102
|
+
input_output = self.di.build("input_output", cache=True)
|
|
103
|
+
matching_parents = matching_parents.where_for_request(
|
|
104
|
+
matching_parents,
|
|
105
|
+
input_output.routing_data(),
|
|
106
|
+
input_output.get_authorization_data(),
|
|
107
|
+
input_output,
|
|
108
|
+
)
|
|
109
|
+
if not len(matching_parents):
|
|
102
110
|
return f"Invalid selection for {self.name}: record does not exist"
|
|
103
111
|
return ""
|
|
104
112
|
|
|
@@ -19,6 +19,8 @@ class Column(ABC):
|
|
|
19
19
|
"on_change",
|
|
20
20
|
"default",
|
|
21
21
|
"setable",
|
|
22
|
+
"created_by_source_type",
|
|
23
|
+
"created_by_source_key",
|
|
22
24
|
]
|
|
23
25
|
|
|
24
26
|
def __init__(self, di):
|
|
@@ -91,12 +93,47 @@ class Column(ABC):
|
|
|
91
93
|
if configuration.get("on_change"):
|
|
92
94
|
self._check_actions(configuration.get("on_change"), "on_change")
|
|
93
95
|
|
|
96
|
+
self._check_created_by_source(configuration)
|
|
97
|
+
|
|
94
98
|
def _finalize_configuration(self, configuration):
|
|
95
99
|
"""Make any changes to the configuration/fill in defaults"""
|
|
96
100
|
if not "input_requirements" in configuration:
|
|
97
101
|
configuration["input_requirements"] = []
|
|
98
102
|
return configuration
|
|
99
103
|
|
|
104
|
+
def _check_created_by_source(self, configuration):
|
|
105
|
+
source_type = configuration.get("created_by_source_type")
|
|
106
|
+
source_key = configuration.get("created_by_source_key")
|
|
107
|
+
if not source_type and not source_key:
|
|
108
|
+
return
|
|
109
|
+
|
|
110
|
+
error_prefix = f"Misconfiguration for column '{self.name}' in '{self.model_class.__name__}': "
|
|
111
|
+
if not source_type or not source_key:
|
|
112
|
+
raise ValueError(
|
|
113
|
+
f"{error_prefix} must provide both 'created_by_source_type' and 'created_by_source_key' but only one was provided."
|
|
114
|
+
)
|
|
115
|
+
|
|
116
|
+
if not isinstance(source_type, str):
|
|
117
|
+
raise ValueError(
|
|
118
|
+
f"{error_prefix} 'created_by_source_type' must be a string but is a '"
|
|
119
|
+
+ source_type.__class__.__name__
|
|
120
|
+
+ "'"
|
|
121
|
+
)
|
|
122
|
+
if not isinstance(source_key, str):
|
|
123
|
+
raise ValueError(
|
|
124
|
+
f"{error_prefix} 'created_by_source_key' must be a string but is a '"
|
|
125
|
+
+ source_key.__class__.__name__
|
|
126
|
+
+ "'"
|
|
127
|
+
)
|
|
128
|
+
|
|
129
|
+
allowed_types = ["authorization_data"]
|
|
130
|
+
if source_type not in allowed_types:
|
|
131
|
+
raise ValueError(
|
|
132
|
+
f"{error_prefix} 'created_by_source_type' must be one of '" + "', '".join(allowed_types) + "'"
|
|
133
|
+
)
|
|
134
|
+
if configuration.get("setable"):
|
|
135
|
+
raise ValueError(f"{error_prefix} you cannot set both 'setable' and 'created_by_source_type'")
|
|
136
|
+
|
|
100
137
|
def _check_actions(self, actions, trigger_name):
|
|
101
138
|
"""Check that the given actions are actually understandable by the system"""
|
|
102
139
|
if type(actions) != list:
|
|
@@ -189,14 +226,20 @@ class Column(ABC):
|
|
|
189
226
|
The difference between this and post_save is that this happens before the database is updated.
|
|
190
227
|
As a result, if you need the model id to make your changes, it has to happen in post_save, not pre_save
|
|
191
228
|
"""
|
|
192
|
-
if not model.exists
|
|
193
|
-
|
|
229
|
+
if not model.exists:
|
|
230
|
+
source_type = self.configuration.get("created_by_source_type")
|
|
231
|
+
if source_type:
|
|
232
|
+
if source_type == "authorization_data":
|
|
233
|
+
authorization_data = self.di.build("input_output", cache=True).get_authorization_data()
|
|
234
|
+
data[self.name] = authorization_data.get(self.config("created_by_source_key"), "N/A")
|
|
194
235
|
if "setable" in self.configuration:
|
|
195
236
|
setable = self.configuration["setable"]
|
|
196
237
|
if callable(setable):
|
|
197
238
|
data[self.name] = self.di.call_function(setable, data=data, model=model)
|
|
198
239
|
else:
|
|
199
240
|
data[self.name] = setable
|
|
241
|
+
if not model.exists and "default" in self.configuration and self.name not in data:
|
|
242
|
+
data[self.name] = self.configuration["default"]
|
|
200
243
|
return data
|
|
201
244
|
|
|
202
245
|
def post_save(self, data, model, id):
|
|
@@ -2,9 +2,15 @@ from .datetime import DateTime
|
|
|
2
2
|
|
|
3
3
|
|
|
4
4
|
class Created(DateTime):
|
|
5
|
-
|
|
5
|
+
my_configs = [
|
|
6
|
+
"date_format",
|
|
7
|
+
"default_date",
|
|
8
|
+
"utc",
|
|
9
|
+
]
|
|
10
|
+
|
|
11
|
+
def __init__(self, di, datetime):
|
|
6
12
|
super().__init__(di)
|
|
7
|
-
self.
|
|
13
|
+
self.datetime = datetime
|
|
8
14
|
|
|
9
15
|
@property
|
|
10
16
|
def is_writeable(self):
|
|
@@ -13,4 +19,8 @@ class Created(DateTime):
|
|
|
13
19
|
def pre_save(self, data, model):
|
|
14
20
|
if model.exists:
|
|
15
21
|
return data
|
|
16
|
-
|
|
22
|
+
if self.config("utc", silent=True):
|
|
23
|
+
now = self.datetime.datetime.now(self.datetime.timezone.utc)
|
|
24
|
+
else:
|
|
25
|
+
now = self.datetime.datetime.now()
|
|
26
|
+
return {**data, self.name: now}
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
from .datetime_micro import DateTimeMicro
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
class CreatedMicro(DateTimeMicro):
|
|
5
|
+
my_configs = [
|
|
6
|
+
"date_format",
|
|
7
|
+
"default_date",
|
|
8
|
+
"utc",
|
|
9
|
+
]
|
|
10
|
+
|
|
11
|
+
def __init__(self, di, datetime):
|
|
12
|
+
super().__init__(di)
|
|
13
|
+
self.datetime = datetime
|
|
14
|
+
|
|
15
|
+
@property
|
|
16
|
+
def is_writeable(self):
|
|
17
|
+
return False
|
|
18
|
+
|
|
19
|
+
def pre_save(self, data, model):
|
|
20
|
+
if model.exists:
|
|
21
|
+
return data
|
|
22
|
+
if self.config("utc", silent=True):
|
|
23
|
+
now = self.datetime.datetime.now(self.datetime.timezone.utc)
|
|
24
|
+
else:
|
|
25
|
+
now = self.datetime.datetime.now()
|
|
26
|
+
return {**data, self.name: now}
|
|
@@ -6,12 +6,28 @@ from ..autodoc.schema import DateTime as AutoDocDateTime
|
|
|
6
6
|
|
|
7
7
|
class DateTime(Column):
|
|
8
8
|
_auto_doc_class = AutoDocDateTime
|
|
9
|
+
_date_format = "%Y-%m-%d %H:%M:%S"
|
|
10
|
+
_default_date = "0000-00-00 00:00:00"
|
|
11
|
+
|
|
12
|
+
my_configs = [
|
|
13
|
+
"date_format",
|
|
14
|
+
"default_date",
|
|
15
|
+
]
|
|
9
16
|
|
|
10
17
|
def __init__(self, di):
|
|
11
18
|
super().__init__(di)
|
|
12
19
|
|
|
20
|
+
def _finalize_configuration(self, configuration):
|
|
21
|
+
return {
|
|
22
|
+
**{
|
|
23
|
+
"date_format": self._date_format,
|
|
24
|
+
"default_date": self._default_date,
|
|
25
|
+
},
|
|
26
|
+
**super()._finalize_configuration(configuration),
|
|
27
|
+
}
|
|
28
|
+
|
|
13
29
|
def from_backend(self, value):
|
|
14
|
-
if not value or value == "
|
|
30
|
+
if not value or value == self.config("default_date"):
|
|
15
31
|
date = None
|
|
16
32
|
elif type(value) == str:
|
|
17
33
|
date = dateparser.parse(value)
|
|
@@ -24,14 +40,14 @@ class DateTime(Column):
|
|
|
24
40
|
return data
|
|
25
41
|
|
|
26
42
|
# hopefully this is a Python datetime object in UTC timezone...
|
|
27
|
-
return {**data, **{self.name: data[self.name].strftime("
|
|
43
|
+
return {**data, **{self.name: data[self.name].strftime(self.config("date_format"))}}
|
|
28
44
|
|
|
29
45
|
def to_json(self, model):
|
|
30
46
|
datetime = model.get(self.name, silent=True)
|
|
31
47
|
return {self.name: datetime.isoformat() if datetime else None}
|
|
32
48
|
|
|
33
49
|
def build_condition(self, value, operator=None, column_prefix=""):
|
|
34
|
-
date = dateparser.parse(value).astimezone(timezone.utc).strftime("
|
|
50
|
+
date = dateparser.parse(value).astimezone(timezone.utc).strftime(self.config("date_format"))
|
|
35
51
|
if not operator:
|
|
36
52
|
operator = "="
|
|
37
53
|
return f"{column_prefix}{self.name}{operator}{date}"
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
from .datetime import DateTime
|
|
2
|
+
from datetime import datetime, timezone
|
|
3
|
+
import dateparser
|
|
4
|
+
from ..autodoc.schema import DateTime as AutoDocDateTime
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class DateTimeMicro(DateTime):
|
|
8
|
+
_date_format = "%Y-%m-%d %H:%M:%S.%f"
|
|
9
|
+
_default_date = "0000-00-00 00:00:00.000000"
|
|
10
|
+
|
|
11
|
+
def __init__(self, di):
|
|
12
|
+
super().__init__(di)
|
clearskies/column_types/email.py
CHANGED
|
@@ -13,6 +13,6 @@ class Email(String):
|
|
|
13
13
|
# don't check for an email if doing a fuzzy search, since we may be searching
|
|
14
14
|
# for a partial email
|
|
15
15
|
return ""
|
|
16
|
-
if re.search("^[
|
|
16
|
+
if re.search(r"^[^@\s]+@[^@]+\.[^@]+$", value):
|
|
17
17
|
return ""
|
|
18
18
|
return "Invalid email address"
|
|
@@ -2,13 +2,23 @@ from .datetime import DateTime
|
|
|
2
2
|
|
|
3
3
|
|
|
4
4
|
class Updated(DateTime):
|
|
5
|
-
|
|
5
|
+
my_configs = [
|
|
6
|
+
"date_format",
|
|
7
|
+
"default_date",
|
|
8
|
+
"utc",
|
|
9
|
+
]
|
|
10
|
+
|
|
11
|
+
def __init__(self, di, datetime):
|
|
6
12
|
super().__init__(di)
|
|
7
|
-
self.
|
|
13
|
+
self.datetime = datetime
|
|
8
14
|
|
|
9
15
|
@property
|
|
10
16
|
def is_writeable(self):
|
|
11
17
|
return False
|
|
12
18
|
|
|
13
19
|
def pre_save(self, data, model):
|
|
14
|
-
|
|
20
|
+
if self.config("utc", silent=True):
|
|
21
|
+
now = self.datetime.datetime.now(self.datetime.timezone.utc)
|
|
22
|
+
else:
|
|
23
|
+
now = self.datetime.datetime.now()
|
|
24
|
+
return {**data, self.name: now}
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
from .datetime_micro import DateTimeMicro
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
class UpdatedMicro(DateTimeMicro):
|
|
5
|
+
my_configs = [
|
|
6
|
+
"date_format",
|
|
7
|
+
"default_date",
|
|
8
|
+
"utc",
|
|
9
|
+
]
|
|
10
|
+
|
|
11
|
+
def __init__(self, di, datetime):
|
|
12
|
+
super().__init__(di)
|
|
13
|
+
self.datetime = datetime
|
|
14
|
+
|
|
15
|
+
@property
|
|
16
|
+
def is_writeable(self):
|
|
17
|
+
return False
|
|
18
|
+
|
|
19
|
+
def pre_save(self, data, model):
|
|
20
|
+
if self.config("utc", silent=True):
|
|
21
|
+
now = self.datetime.datetime.now(self.datetime.timezone.utc)
|
|
22
|
+
else:
|
|
23
|
+
now = self.datetime.datetime.now()
|
|
24
|
+
return {**data, self.name: now}
|
clearskies/contexts/test.py
CHANGED
|
@@ -39,6 +39,7 @@ class Test(Context):
|
|
|
39
39
|
input_output=None,
|
|
40
40
|
query_parameters=None,
|
|
41
41
|
authorization_data=None,
|
|
42
|
+
context_specifics=None,
|
|
42
43
|
):
|
|
43
44
|
if self.application is None:
|
|
44
45
|
raise ValueError("Cannot call the test context without an application")
|
|
@@ -59,6 +60,8 @@ class Test(Context):
|
|
|
59
60
|
input_output.set_query_parameters(query_parameters)
|
|
60
61
|
if authorization_data is not None:
|
|
61
62
|
input_output.set_authorization_data(authorization_data)
|
|
63
|
+
if context_specifics is not None:
|
|
64
|
+
input_output.set_context_specifics(context_specifics)
|
|
62
65
|
|
|
63
66
|
self.handler = self.di.build(self.application.handler_class, cache=False)
|
|
64
67
|
self.handler.configure(
|
clearskies/functional/string.py
CHANGED
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
import re
|
|
2
|
+
import datetime
|
|
2
3
|
|
|
3
4
|
|
|
4
5
|
def camel_case_to_snake_case(string: str) -> str:
|
|
@@ -95,3 +96,9 @@ def make_plural(singular: str):
|
|
|
95
96
|
if singular[-1] == "s":
|
|
96
97
|
return singular + "es"
|
|
97
98
|
return f"{singular}s"
|
|
99
|
+
|
|
100
|
+
|
|
101
|
+
def datetime_to_iso(value):
|
|
102
|
+
if not isinstance(value, datetime.date) and not isinstance(value, datetime.datetime):
|
|
103
|
+
return value
|
|
104
|
+
return value.isoformat()
|
clearskies/handlers/__init__.py
CHANGED
|
@@ -16,8 +16,10 @@ from .write import Write
|
|
|
16
16
|
from .schema_helper import SchemaHelper
|
|
17
17
|
from .simple_routing import SimpleRouting
|
|
18
18
|
from .simple_search import SimpleSearch
|
|
19
|
+
from . import exceptions
|
|
19
20
|
|
|
20
21
|
__all__ = [
|
|
22
|
+
"exceptions",
|
|
21
23
|
"AdvancedSearch",
|
|
22
24
|
"Callable",
|
|
23
25
|
"Create",
|
clearskies/handlers/get.py
CHANGED
|
@@ -25,14 +25,16 @@ class Get(Base):
|
|
|
25
25
|
return self.error(input_output, model, 404)
|
|
26
26
|
return self.success(input_output, self._model_as_json(model, input_output))
|
|
27
27
|
|
|
28
|
-
def
|
|
28
|
+
def get_model_id(self, input_output):
|
|
29
29
|
routing_data = input_output.routing_data()
|
|
30
30
|
if self.id_column_name in routing_data:
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
31
|
+
return routing_data[self.id_column_name]
|
|
32
|
+
if "id" in routing_data:
|
|
33
|
+
return routing_data["id"]
|
|
34
|
+
raise ValueError("I didn't receive the ID in my routing data. I am probably misconfigured.")
|
|
35
|
+
|
|
36
|
+
def fetch_model(self, input_output):
|
|
37
|
+
id = self.get_model_id(input_output)
|
|
36
38
|
models = self._model.where(f"{self.id_column_name}={id}")
|
|
37
39
|
for where in self.configuration("where"):
|
|
38
40
|
if type(where) == str:
|
clearskies/handlers/list.py
CHANGED
|
@@ -255,11 +255,13 @@ class List(Base):
|
|
|
255
255
|
|
|
256
256
|
# common checks for group_by and default_sort_column
|
|
257
257
|
for config_name in ["group_by", "default_sort_column"]:
|
|
258
|
-
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
|
|
258
|
+
value = configuration.get(config_name)
|
|
259
|
+
if not value:
|
|
260
|
+
continue
|
|
261
|
+
# we're being lazy for now and not checking complicated values
|
|
262
|
+
if "." in value:
|
|
263
|
+
continue
|
|
264
|
+
if value not in self._columns:
|
|
263
265
|
raise ValueError(
|
|
264
266
|
f"{error_prefix} '{config_name}' references column named {column_name} "
|
|
265
267
|
+ f"but this column does not exist for model '{model_class_name}'"
|
|
@@ -98,7 +98,7 @@ class SimpleRoutingRoute:
|
|
|
98
98
|
to understand if there was no route match at all.
|
|
99
99
|
"""
|
|
100
100
|
# if we're routing to a simple router then defer to it
|
|
101
|
-
incoming = f"Incoming request: [{request_method}] {full_path}. Check against route with url '{self._path}' "
|
|
101
|
+
incoming = f"Incoming request: [{request_method}] {full_path}. Check against route with url '{self._path}'. Results: "
|
|
102
102
|
if not self._methods:
|
|
103
103
|
incoming += " configured for any method except OPTIONS"
|
|
104
104
|
elif isinstance(self._methods, str):
|
|
@@ -136,7 +136,7 @@ class SimpleRoutingRoute:
|
|
|
136
136
|
return None
|
|
137
137
|
# make sure we don't get confused by partial matches. `user` should match `user/` and `user/5`,
|
|
138
138
|
# but it shouldn't match `users/`
|
|
139
|
-
if full_path_length > my_path_length and full_path[my_path_length] != "/":
|
|
139
|
+
if full_path_length > my_path_length and full_path[my_path_length] != "/" and my_path != "":
|
|
140
140
|
logger.debug(f"{incoming} Not a match. I only partially matched the URL but not as a sub-directory.")
|
|
141
141
|
return None
|
|
142
142
|
logger.debug(f"{incoming} Match!")
|
clearskies/handlers/update.py
CHANGED
clearskies/handlers/write.py
CHANGED
clearskies/input_outputs/cli.py
CHANGED
|
@@ -21,7 +21,7 @@ class CLI:
|
|
|
21
21
|
self._request_method = None
|
|
22
22
|
self._parse_args(self._sys.argv)
|
|
23
23
|
|
|
24
|
-
def respond(self, response, status_code):
|
|
24
|
+
def respond(self, response, status_code=200):
|
|
25
25
|
if status_code == 404:
|
|
26
26
|
raise exceptions.CLINotFound()
|
|
27
27
|
if status_code != 200:
|
clearskies/input_outputs/wsgi.py
CHANGED
|
@@ -21,7 +21,7 @@ class WSGI(InputOutput):
|
|
|
21
21
|
def _from_environment(self, key):
|
|
22
22
|
return self._environment[key] if key in self._environment else ""
|
|
23
23
|
|
|
24
|
-
def respond(self, body, status_code):
|
|
24
|
+
def respond(self, body, status_code=200):
|
|
25
25
|
if not self.has_header("content-type"):
|
|
26
26
|
self.set_header("content-type", "application/json; charset=UTF-8")
|
|
27
27
|
|
|
@@ -1,16 +1,33 @@
|
|
|
1
|
+
import datetime
|
|
2
|
+
|
|
3
|
+
from .after import After
|
|
4
|
+
from .before import Before
|
|
1
5
|
from ..binding_config import BindingConfig
|
|
2
6
|
from .minimum_length import MinimumLength
|
|
3
7
|
from .maximum_length import MaximumLength
|
|
4
8
|
from .required import Required
|
|
5
9
|
from .requirement import Requirement
|
|
6
10
|
from .unique import Unique
|
|
11
|
+
from .in_the_future_at_least import InTheFutureAtLeast
|
|
12
|
+
from .in_the_future_at_most import InTheFutureAtMost
|
|
13
|
+
from .in_the_past_at_least import InThePastAtLeast
|
|
14
|
+
from .in_the_past_at_most import InThePastAtMost
|
|
15
|
+
from .time_delta import TimeDelta
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
def after(other_column_name: str, allow_equal: bool = False):
|
|
19
|
+
return BindingConfig(After, other_column_name=other_column_name, allow_equal=allow_equal)
|
|
20
|
+
|
|
7
21
|
|
|
22
|
+
def before(other_column_name: str, allow_equal: bool = False):
|
|
23
|
+
return BindingConfig(Before, other_column_name=other_column_name, allow_equal=allow_equal)
|
|
8
24
|
|
|
9
|
-
|
|
25
|
+
|
|
26
|
+
def minimum_length(minimum_length: int):
|
|
10
27
|
return BindingConfig(MinimumLength, minimum_length)
|
|
11
28
|
|
|
12
29
|
|
|
13
|
-
def maximum_length(maximum_length):
|
|
30
|
+
def maximum_length(maximum_length: int):
|
|
14
31
|
return BindingConfig(MaximumLength, maximum_length)
|
|
15
32
|
|
|
16
33
|
|
|
@@ -22,9 +39,30 @@ def unique():
|
|
|
22
39
|
return BindingConfig(Unique)
|
|
23
40
|
|
|
24
41
|
|
|
42
|
+
def in_the_future_at_least(time_delta: datetime.timedelta):
|
|
43
|
+
return BindingConfig(InTheFutureAtLeast, time_delta)
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
def in_the_future_at_most(time_delta: datetime.timedelta):
|
|
47
|
+
return BindingConfig(InTheFutureAtMost, time_delta)
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
def in_the_past_at_least(time_delta: datetime.timedelta):
|
|
51
|
+
return BindingConfig(InThePastAtLeast, time_delta)
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
def in_the_past_at_most(time_delta: datetime.timedelta):
|
|
55
|
+
return BindingConfig(InThePastAtMost, time_delta)
|
|
56
|
+
|
|
57
|
+
|
|
25
58
|
__all__ = [
|
|
59
|
+
"in_the_future_at_least",
|
|
60
|
+
"in_the_future_at_most",
|
|
61
|
+
"in_the_past_at_least",
|
|
62
|
+
"in_the_past_at_most",
|
|
26
63
|
"minimum_length",
|
|
27
64
|
"maximum_length",
|
|
28
65
|
"required",
|
|
66
|
+
"TimeDelta",
|
|
29
67
|
"unique",
|
|
30
68
|
]
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
from .requirement import Requirement
|
|
2
|
+
import datetime
|
|
3
|
+
import dateparser
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class After(Requirement):
|
|
7
|
+
def configure(self, other_column_name: str, allow_equal: bool = False):
|
|
8
|
+
self.other_column_name = other_column_name
|
|
9
|
+
self.allow_equal = allow_equal
|
|
10
|
+
|
|
11
|
+
def check(self, model, data):
|
|
12
|
+
# we won't check anything for missing values (columns should be required if that is an issue)
|
|
13
|
+
if not data.get(self.column_name):
|
|
14
|
+
return ""
|
|
15
|
+
my_value = data[self.column_name]
|
|
16
|
+
other_value = data.get(self.other_column_name, model.__getitem__(self.other_column_name))
|
|
17
|
+
# again, no checks for non-values
|
|
18
|
+
if not other_value:
|
|
19
|
+
return ""
|
|
20
|
+
|
|
21
|
+
my_value_as_date = dateparser.parse(data[self.column_name])
|
|
22
|
+
if not my_value_as_date:
|
|
23
|
+
return f"'{self.column_name}' was not a valid date."
|
|
24
|
+
|
|
25
|
+
if type(other_value) != str and type(other_value) != datetime.datetime:
|
|
26
|
+
return f"'{other_column_name}' was not a valid date."
|
|
27
|
+
other_value_as_date = dateparser.parse(other_value) if type(other_value) == str else other_value
|
|
28
|
+
if not other_value_as_date:
|
|
29
|
+
return f"'{self.other_column_name}' was not a valid date."
|
|
30
|
+
|
|
31
|
+
if my_value_as_date == other_value_as_date:
|
|
32
|
+
return "" if self.allow_equal else f"'{self.column_name}' must be after '{self.other_column_name}'"
|
|
33
|
+
|
|
34
|
+
if my_value_as_date < other_value_as_date:
|
|
35
|
+
return f"'{self.column_name}' must be after '{self.other_column_name}'"
|
|
36
|
+
return ""
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
from .requirement import Requirement
|
|
2
|
+
import datetime
|
|
3
|
+
import dateparser
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class Before(Requirement):
|
|
7
|
+
def configure(self, other_column_name: str, allow_equal: bool = False):
|
|
8
|
+
self.other_column_name = other_column_name
|
|
9
|
+
self.allow_equal = allow_equal
|
|
10
|
+
|
|
11
|
+
def check(self, model, data):
|
|
12
|
+
# we won't check anything for missing values (columns should be required if that is an issue)
|
|
13
|
+
if not data.get(self.column_name):
|
|
14
|
+
return ""
|
|
15
|
+
my_value = data[self.column_name]
|
|
16
|
+
other_value = data.get(self.other_column_name, model.__getitem__(self.other_column_name))
|
|
17
|
+
# again, no checks for non-values
|
|
18
|
+
if not other_value:
|
|
19
|
+
return ""
|
|
20
|
+
|
|
21
|
+
my_value_as_date = dateparser.parse(data[self.column_name])
|
|
22
|
+
if not my_value_as_date:
|
|
23
|
+
return f"'{self.column_name}' was not a valid date."
|
|
24
|
+
|
|
25
|
+
if type(other_value) != str and type(other_value) != datetime.datetime:
|
|
26
|
+
return f"'{other_column_name}' was not a valid date."
|
|
27
|
+
other_value_as_date = dateparser.parse(other_value) if type(other_value) == str else other_value
|
|
28
|
+
if not other_value_as_date:
|
|
29
|
+
return f"'{self.other_column_name}' was not a valid date."
|
|
30
|
+
|
|
31
|
+
if my_value_as_date == other_value_as_date:
|
|
32
|
+
return "" if self.allow_equal else f"'{self.column_name}' must be before '{self.other_column_name}'"
|
|
33
|
+
|
|
34
|
+
if my_value_as_date > other_value_as_date:
|
|
35
|
+
return f"'{self.column_name}' must be before '{self.other_column_name}'"
|
|
36
|
+
return ""
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
from .time_delta import TimeDelta
|
|
2
|
+
import datetime
|
|
3
|
+
import dateparser
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class InTheFutureAtLeast(TimeDelta):
|
|
7
|
+
def check(self, model, data):
|
|
8
|
+
if self.column_name not in data or not data[self.column_name]:
|
|
9
|
+
return ""
|
|
10
|
+
as_date = dateparser.parse(data[self.column_name])
|
|
11
|
+
if not as_date:
|
|
12
|
+
return f"'{self.column_name}' was not a valid date"
|
|
13
|
+
now = (
|
|
14
|
+
self.datetime.datetime.now() if not as_date.tzinfo else self.datetime.datetime.now(tz=datetime.timezone.utc)
|
|
15
|
+
)
|
|
16
|
+
if as_date < now + self.time_delta:
|
|
17
|
+
human_friendly = self.delta_human_friendly()
|
|
18
|
+
return f"'{self.column_name}' must be at least {human_friendly} in the future."
|
|
19
|
+
return ""
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
from .time_delta import TimeDelta
|
|
2
|
+
import datetime
|
|
3
|
+
import dateparser
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class InTheFutureAtMost(TimeDelta):
|
|
7
|
+
def check(self, model, data):
|
|
8
|
+
if self.column_name not in data or not data[self.column_name]:
|
|
9
|
+
return ""
|
|
10
|
+
as_date = dateparser.parse(data[self.column_name])
|
|
11
|
+
if not as_date:
|
|
12
|
+
return f"'{self.column_name}' was not a valid date"
|
|
13
|
+
now = (
|
|
14
|
+
self.datetime.datetime.now() if not as_date.tzinfo else self.datetime.datetime.now(tz=datetime.timezone.utc)
|
|
15
|
+
)
|
|
16
|
+
if as_date > now + self.time_delta:
|
|
17
|
+
human_friendly = self.delta_human_friendly()
|
|
18
|
+
return f"'{self.column_name}' must be at most {human_friendly} in the future."
|
|
19
|
+
return ""
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
from .time_delta import TimeDelta
|
|
2
|
+
import datetime
|
|
3
|
+
import dateparser
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class InThePastAtLeast(TimeDelta):
|
|
7
|
+
def check(self, model, data):
|
|
8
|
+
if self.column_name not in data or not data[self.column_name]:
|
|
9
|
+
return ""
|
|
10
|
+
as_date = dateparser.parse(data[self.column_name])
|
|
11
|
+
if not as_date:
|
|
12
|
+
return f"'{self.column_name}' was not a valid date"
|
|
13
|
+
now = (
|
|
14
|
+
self.datetime.datetime.now() if not as_date.tzinfo else self.datetime.datetime.now(tz=datetime.timezone.utc)
|
|
15
|
+
)
|
|
16
|
+
if as_date > now - self.time_delta:
|
|
17
|
+
human_friendly = self.delta_human_friendly()
|
|
18
|
+
return f"'{self.column_name}' must be at least {human_friendly} in the past."
|
|
19
|
+
return ""
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
from .time_delta import TimeDelta
|
|
2
|
+
import datetime
|
|
3
|
+
import dateparser
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class InThePastAtMost(TimeDelta):
|
|
7
|
+
def check(self, model, data):
|
|
8
|
+
if self.column_name not in data or not data[self.column_name]:
|
|
9
|
+
return ""
|
|
10
|
+
as_date = dateparser.parse(data[self.column_name])
|
|
11
|
+
if not as_date:
|
|
12
|
+
return f"'{self.column_name}' was not a valid date"
|
|
13
|
+
now = (
|
|
14
|
+
self.datetime.datetime.now() if not as_date.tzinfo else self.datetime.datetime.now(tz=datetime.timezone.utc)
|
|
15
|
+
)
|
|
16
|
+
if as_date < now - self.time_delta:
|
|
17
|
+
human_friendly = self.delta_human_friendly()
|
|
18
|
+
return f"'{self.column_name}' must be at most {human_friendly} in the past."
|
|
19
|
+
return ""
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
from .requirement import Requirement
|
|
2
|
+
import datetime
|
|
3
|
+
from collections import OrderedDict
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class TimeDelta(Requirement):
|
|
7
|
+
time_delta = None
|
|
8
|
+
|
|
9
|
+
def __init__(self, datetime):
|
|
10
|
+
self.datetime = datetime
|
|
11
|
+
|
|
12
|
+
def configure(self, time_delta: datetime.timedelta):
|
|
13
|
+
if type(time_delta) != datetime.timedelta:
|
|
14
|
+
raise ValueError(
|
|
15
|
+
"The argument for all time-related input requirement classes is a datetime.timedelta object, but I received something else."
|
|
16
|
+
)
|
|
17
|
+
self.time_delta = time_delta
|
|
18
|
+
self.human_friendly = None
|
|
19
|
+
|
|
20
|
+
def delta_human_friendly(self):
|
|
21
|
+
remainder = int(self.time_delta.total_seconds())
|
|
22
|
+
parts = []
|
|
23
|
+
conversion = OrderedDict(
|
|
24
|
+
[
|
|
25
|
+
("year", 31536000),
|
|
26
|
+
("day", 86400),
|
|
27
|
+
("hour", 3600),
|
|
28
|
+
("minute", 60),
|
|
29
|
+
("second", 1),
|
|
30
|
+
]
|
|
31
|
+
)
|
|
32
|
+
for name, num_seconds in conversion.items():
|
|
33
|
+
if num_seconds > remainder:
|
|
34
|
+
continue
|
|
35
|
+
amount = int(remainder / num_seconds)
|
|
36
|
+
remainder -= amount * num_seconds
|
|
37
|
+
parts.append(f"{amount} {name}" + ("s" if amount != 1 else ""))
|
|
38
|
+
return ", ".join(parts)
|
|
File without changes
|
|
File without changes
|