arpakitlib 1.7.95__py3-none-any.whl → 1.7.97__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- arpakitlib/_arpakit_project_template/src/db/const.py +0 -0
- arpakitlib/ar_base_worker_util.py +1 -1
- arpakitlib/ar_operation_execution_util.py +100 -116
- {arpakitlib-1.7.95.dist-info → arpakitlib-1.7.97.dist-info}/METADATA +1 -1
- {arpakitlib-1.7.95.dist-info → arpakitlib-1.7.97.dist-info}/RECORD +9 -8
- {arpakitlib-1.7.95.dist-info → arpakitlib-1.7.97.dist-info}/LICENSE +0 -0
- {arpakitlib-1.7.95.dist-info → arpakitlib-1.7.97.dist-info}/NOTICE +0 -0
- {arpakitlib-1.7.95.dist-info → arpakitlib-1.7.97.dist-info}/WHEEL +0 -0
- {arpakitlib-1.7.95.dist-info → arpakitlib-1.7.97.dist-info}/entry_points.txt +0 -0
File without changes
|
@@ -34,7 +34,7 @@ class BaseWorker(ABC):
|
|
34
34
|
worker_name = self.__class__.__name__
|
35
35
|
self.worker_name = worker_name
|
36
36
|
self.worker_creation_dt = now_utc_dt()
|
37
|
-
self.worker_id = f"{str(uuid4()).replace('
|
37
|
+
self.worker_id = f"{str(uuid4()).replace('-', '')}_{randint(1000, 99999)}"
|
38
38
|
self.worker_fullname = (
|
39
39
|
f"{self.worker_name}_{self.worker_creation_dt.isoformat()}_{self.worker_id}"
|
40
40
|
)
|
@@ -145,7 +145,7 @@ class BaseOperationExecutor:
|
|
145
145
|
self._logger = logging.getLogger(self.__class__.__name__)
|
146
146
|
self.sql_alchemy_db = sqlalchemy_db
|
147
147
|
|
148
|
-
def sync_execute_operation(self, operation_dbm: OperationDBM) -> OperationDBM:
|
148
|
+
def sync_execute_operation(self, operation_dbm: OperationDBM, session: Session) -> OperationDBM:
|
149
149
|
if operation_dbm.type == BaseOperationTypes.healthcheck_:
|
150
150
|
self._logger.info("healthcheck")
|
151
151
|
elif operation_dbm.type == BaseOperationTypes.raise_fake_exception_:
|
@@ -154,7 +154,7 @@ class BaseOperationExecutor:
|
|
154
154
|
return operation_dbm
|
155
155
|
|
156
156
|
def sync_safe_execute_operation(
|
157
|
-
self, operation_dbm: OperationDBM, worker: OperationExecutorWorker
|
157
|
+
self, operation_dbm: OperationDBM, worker: OperationExecutorWorker, session: Session
|
158
158
|
) -> OperationDBM:
|
159
159
|
self._logger.info(
|
160
160
|
f"start "
|
@@ -163,26 +163,21 @@ class BaseOperationExecutor:
|
|
163
163
|
f"operation_dbm.status={operation_dbm.status}"
|
164
164
|
)
|
165
165
|
|
166
|
-
|
167
|
-
|
168
|
-
|
169
|
-
|
170
|
-
|
171
|
-
|
172
|
-
|
173
|
-
|
174
|
-
|
175
|
-
worker.worker_fullname: True
|
176
|
-
}
|
177
|
-
)
|
178
|
-
session.commit()
|
179
|
-
session.refresh(operation_dbm)
|
166
|
+
operation_dbm.execution_start_dt = now_utc_dt()
|
167
|
+
operation_dbm.status = OperationDBM.Statuses.executing
|
168
|
+
operation_dbm.output_data = combine_dicts(
|
169
|
+
operation_dbm.output_data,
|
170
|
+
{
|
171
|
+
worker.worker_fullname: True
|
172
|
+
}
|
173
|
+
)
|
174
|
+
session.commit()
|
180
175
|
|
181
176
|
exception: BaseException | None = None
|
182
177
|
traceback_str: str | None = None
|
183
178
|
|
184
179
|
try:
|
185
|
-
self.sync_execute_operation(operation_dbm=operation_dbm)
|
180
|
+
self.sync_execute_operation(operation_dbm=operation_dbm, session=session)
|
186
181
|
except BaseException as exception_:
|
187
182
|
self._logger.error(
|
188
183
|
f"error in sync_execute_operation (id={operation_dbm.id}, type={operation_dbm.type})",
|
@@ -191,40 +186,34 @@ class BaseOperationExecutor:
|
|
191
186
|
exception = exception_
|
192
187
|
traceback_str = traceback.format_exc()
|
193
188
|
|
194
|
-
|
195
|
-
|
196
|
-
operation_dbm
|
197
|
-
|
189
|
+
operation_dbm.execution_finish_dt = now_utc_dt()
|
190
|
+
if exception:
|
191
|
+
operation_dbm.status = OperationDBM.Statuses.executed_with_error
|
192
|
+
operation_dbm.error_data = combine_dicts(
|
193
|
+
{
|
194
|
+
"exception_str": str(exception),
|
195
|
+
"traceback_str": traceback_str
|
196
|
+
},
|
197
|
+
operation_dbm.error_data
|
198
198
|
)
|
199
|
-
|
200
|
-
|
201
|
-
|
202
|
-
|
203
|
-
|
204
|
-
|
205
|
-
|
206
|
-
|
207
|
-
|
208
|
-
|
209
|
-
|
210
|
-
|
199
|
+
else:
|
200
|
+
operation_dbm.status = OperationDBM.Statuses.executed_without_error
|
201
|
+
session.commit()
|
202
|
+
|
203
|
+
if exception:
|
204
|
+
story_log_dbm = StoryLogDBM(
|
205
|
+
level=StoryLogDBM.Levels.error,
|
206
|
+
title=f"error in sync_execute_operation (id={operation_dbm.id}, type={operation_dbm.type})",
|
207
|
+
data={
|
208
|
+
"operation_id": operation_dbm.id,
|
209
|
+
"exception_str": str(exception),
|
210
|
+
"traceback_str": traceback_str
|
211
|
+
}
|
212
|
+
)
|
213
|
+
session.add(story_log_dbm)
|
211
214
|
session.commit()
|
212
215
|
|
213
|
-
|
214
|
-
story_log_dbm = StoryLogDBM(
|
215
|
-
level=StoryLogDBM.Levels.error,
|
216
|
-
title=f"error in sync_execute_operation (id={operation_dbm.id}, type={operation_dbm.type})",
|
217
|
-
data={
|
218
|
-
"operation_id": operation_dbm.id,
|
219
|
-
"exception_str": str(exception),
|
220
|
-
"traceback_str": traceback_str
|
221
|
-
}
|
222
|
-
)
|
223
|
-
session.add(story_log_dbm)
|
224
|
-
session.commit()
|
225
|
-
session.refresh(story_log_dbm)
|
226
|
-
|
227
|
-
session.refresh(operation_dbm)
|
216
|
+
session.refresh(operation_dbm)
|
228
217
|
|
229
218
|
self._logger.info(
|
230
219
|
f"finish sync_safe_execute_operation, "
|
@@ -236,7 +225,7 @@ class BaseOperationExecutor:
|
|
236
225
|
|
237
226
|
return operation_dbm
|
238
227
|
|
239
|
-
async def async_execute_operation(self, operation_dbm: OperationDBM) -> OperationDBM:
|
228
|
+
async def async_execute_operation(self, operation_dbm: OperationDBM, session: Session) -> OperationDBM:
|
240
229
|
if operation_dbm.type == BaseOperationTypes.healthcheck_:
|
241
230
|
self._logger.info("healthcheck")
|
242
231
|
elif operation_dbm.type == BaseOperationTypes.raise_fake_exception_:
|
@@ -245,7 +234,7 @@ class BaseOperationExecutor:
|
|
245
234
|
return operation_dbm
|
246
235
|
|
247
236
|
async def async_safe_execute_operation(
|
248
|
-
self, operation_dbm: OperationDBM, worker: OperationExecutorWorker
|
237
|
+
self, operation_dbm: OperationDBM, worker: OperationExecutorWorker, session: Session
|
249
238
|
) -> OperationDBM:
|
250
239
|
self._logger.info(
|
251
240
|
f"start "
|
@@ -254,26 +243,21 @@ class BaseOperationExecutor:
|
|
254
243
|
f"operation_dbm.status={operation_dbm.status}"
|
255
244
|
)
|
256
245
|
|
257
|
-
|
258
|
-
|
259
|
-
|
260
|
-
|
261
|
-
|
262
|
-
|
263
|
-
|
264
|
-
|
265
|
-
|
266
|
-
worker.worker_fullname: True
|
267
|
-
}
|
268
|
-
)
|
269
|
-
session.commit()
|
270
|
-
session.refresh(operation_dbm)
|
246
|
+
operation_dbm.execution_start_dt = now_utc_dt()
|
247
|
+
operation_dbm.status = OperationDBM.Statuses.executing
|
248
|
+
operation_dbm.output_data = combine_dicts(
|
249
|
+
operation_dbm.output_data,
|
250
|
+
{
|
251
|
+
worker.worker_fullname: True
|
252
|
+
}
|
253
|
+
)
|
254
|
+
session.commit()
|
271
255
|
|
272
256
|
exception: BaseException | None = None
|
273
257
|
traceback_str: str | None = None
|
274
258
|
|
275
259
|
try:
|
276
|
-
await self.async_execute_operation(operation_dbm=operation_dbm)
|
260
|
+
await self.async_execute_operation(operation_dbm=operation_dbm, session=session)
|
277
261
|
except BaseException as exception_:
|
278
262
|
self._logger.error(
|
279
263
|
f"error in async_execute_operation (id={operation_dbm.id}, type={operation_dbm.type})",
|
@@ -282,40 +266,34 @@ class BaseOperationExecutor:
|
|
282
266
|
exception = exception_
|
283
267
|
traceback_str = traceback.format_exc()
|
284
268
|
|
285
|
-
|
286
|
-
|
287
|
-
operation_dbm
|
288
|
-
|
269
|
+
operation_dbm.execution_finish_dt = now_utc_dt()
|
270
|
+
if exception:
|
271
|
+
operation_dbm.status = OperationDBM.Statuses.executed_with_error
|
272
|
+
operation_dbm.error_data = combine_dicts(
|
273
|
+
{
|
274
|
+
"exception_str": str(exception),
|
275
|
+
"traceback_str": traceback_str
|
276
|
+
},
|
277
|
+
operation_dbm.error_data
|
289
278
|
)
|
290
|
-
|
291
|
-
|
292
|
-
|
293
|
-
|
294
|
-
|
295
|
-
|
296
|
-
|
297
|
-
|
298
|
-
|
299
|
-
|
300
|
-
|
301
|
-
|
279
|
+
else:
|
280
|
+
operation_dbm.status = OperationDBM.Statuses.executed_without_error
|
281
|
+
session.commit()
|
282
|
+
|
283
|
+
if exception:
|
284
|
+
story_log_dbm = StoryLogDBM(
|
285
|
+
level=StoryLogDBM.Levels.error,
|
286
|
+
title=f"error in async_execute_operation (id={operation_dbm.id}, type={operation_dbm.type})",
|
287
|
+
data={
|
288
|
+
"operation_id": operation_dbm.id,
|
289
|
+
"exception_str": str(exception),
|
290
|
+
"traceback_str": traceback_str
|
291
|
+
}
|
292
|
+
)
|
293
|
+
session.add(story_log_dbm)
|
302
294
|
session.commit()
|
303
295
|
|
304
|
-
|
305
|
-
story_log_dbm = StoryLogDBM(
|
306
|
-
level=StoryLogDBM.Levels.error,
|
307
|
-
title=f"error in async_execute_operation (id={operation_dbm.id}, type={operation_dbm.type})",
|
308
|
-
data={
|
309
|
-
"operation_id": operation_dbm.id,
|
310
|
-
"exception_str": str(exception),
|
311
|
-
"traceback_str": traceback_str
|
312
|
-
}
|
313
|
-
)
|
314
|
-
session.add(story_log_dbm)
|
315
|
-
session.commit()
|
316
|
-
session.refresh(story_log_dbm)
|
317
|
-
|
318
|
-
session.refresh(operation_dbm)
|
296
|
+
session.refresh(operation_dbm)
|
319
297
|
|
320
298
|
self._logger.info(
|
321
299
|
f"finish async_safe_execute_operation, "
|
@@ -351,18 +329,21 @@ class OperationExecutorWorker(BaseWorker):
|
|
351
329
|
self.sqlalchemy_db.init()
|
352
330
|
self.sync_run_startup_funcs()
|
353
331
|
|
354
|
-
def sync_execute_operation(self, operation_dbm: OperationDBM) -> OperationDBM:
|
355
|
-
return self.operation_executor.sync_safe_execute_operation(
|
332
|
+
def sync_execute_operation(self, operation_dbm: OperationDBM, session: Session) -> OperationDBM:
|
333
|
+
return self.operation_executor.sync_safe_execute_operation(
|
334
|
+
operation_dbm=operation_dbm, worker=self, session=session
|
335
|
+
)
|
356
336
|
|
357
337
|
def sync_run(self):
|
358
|
-
|
359
|
-
|
360
|
-
|
361
|
-
|
362
|
-
|
363
|
-
|
364
|
-
|
365
|
-
|
338
|
+
with self.sqlalchemy_db.new_session() as session:
|
339
|
+
operation_dbm: OperationDBM | None = get_operation_for_execution(
|
340
|
+
session=session,
|
341
|
+
filter_operation_types=self.filter_operation_types,
|
342
|
+
lock=True
|
343
|
+
)
|
344
|
+
if not operation_dbm:
|
345
|
+
return
|
346
|
+
self.sync_execute_operation(operation_dbm=operation_dbm, session=session)
|
366
347
|
|
367
348
|
def sync_run_on_error(self, exception: BaseException, **kwargs):
|
368
349
|
pass
|
@@ -371,18 +352,21 @@ class OperationExecutorWorker(BaseWorker):
|
|
371
352
|
self.sqlalchemy_db.init()
|
372
353
|
await self.async_run_startup_funcs()
|
373
354
|
|
374
|
-
async def async_execute_operation(self, operation_dbm: OperationDBM) -> OperationDBM:
|
375
|
-
return await self.operation_executor.async_safe_execute_operation(
|
355
|
+
async def async_execute_operation(self, operation_dbm: OperationDBM, session: Session) -> OperationDBM:
|
356
|
+
return await self.operation_executor.async_safe_execute_operation(
|
357
|
+
operation_dbm=operation_dbm, worker=self, session=session
|
358
|
+
)
|
376
359
|
|
377
360
|
async def async_run(self):
|
378
|
-
|
379
|
-
|
380
|
-
|
381
|
-
|
382
|
-
|
383
|
-
|
384
|
-
|
385
|
-
|
361
|
+
with self.sqlalchemy_db.new_session() as session:
|
362
|
+
operation_dbm: OperationDBM | None = get_operation_for_execution(
|
363
|
+
sqlalchemy_db=self.sqlalchemy_db,
|
364
|
+
filter_operation_types=self.filter_operation_types,
|
365
|
+
lock=True
|
366
|
+
)
|
367
|
+
if not operation_dbm:
|
368
|
+
return
|
369
|
+
await self.async_execute_operation(operation_dbm=operation_dbm, session=session)
|
386
370
|
|
387
371
|
async def async_run_on_error(self, exception: BaseException, **kwargs):
|
388
372
|
pass
|
@@ -96,6 +96,7 @@ arpakitlib/_arpakit_project_template/src/core/const.py,sha256=CZZew674y7LhCAlYhv
|
|
96
96
|
arpakitlib/_arpakit_project_template/src/core/settings.py,sha256=S1WPgnFWiRNjCXLllZHq6xp3AiyzzqnRBubg02iRkvo,2238
|
97
97
|
arpakitlib/_arpakit_project_template/src/core/util.py,sha256=5R8gvcZdvuDQes45FBnLC2IDv2Jhajp1VhJJYNKYjMQ,1539
|
98
98
|
arpakitlib/_arpakit_project_template/src/db/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
99
|
+
arpakitlib/_arpakit_project_template/src/db/const.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
99
100
|
arpakitlib/_arpakit_project_template/src/db/sqlalchemy_model.py,sha256=nXtayUkBaVb6tWx5qJgXZLbLOTVAjnSLpSDxBm7yZLc,234
|
100
101
|
arpakitlib/_arpakit_project_template/src/db/util.py,sha256=8Jg9TtTwvyxVYIN_W5_lk9y-Pyh8To1aMRFUKCRDuuA,550
|
101
102
|
arpakitlib/_arpakit_project_template/src/operation_execution/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
@@ -120,7 +121,7 @@ arpakitlib/ar_arpakit_project_template_util.py,sha256=AswzQvvb-zfUyrcP4EP0K756YL
|
|
120
121
|
arpakitlib/ar_arpakit_schedule_uust_api_client_util.py,sha256=jGbP6egs2yhgfheyqhM0J-SeM2qp2YrW7dV-u9djv4Q,19223
|
121
122
|
arpakitlib/ar_arpakitlib_cli_util.py,sha256=8lhEDxnwMSRX2PGV2xQtQru1AYKSA92SVolol5u7iBk,3154
|
122
123
|
arpakitlib/ar_base64_util.py,sha256=aZkg2cZTuAaP2IWeG_LXJ6RO7qhyskVwec-Lks0iM-k,676
|
123
|
-
arpakitlib/ar_base_worker_util.py,sha256=
|
124
|
+
arpakitlib/ar_base_worker_util.py,sha256=T53HIx1azgal4ZmBR1afXqQ3_zex35NIAYPHNchhxfs,5666
|
124
125
|
arpakitlib/ar_cache_file_util.py,sha256=Fo2pH-Zqm966KWFBHG_pbiySGZvhIFCYqy7k1weRfJ0,3476
|
125
126
|
arpakitlib/ar_datetime_util.py,sha256=Xe1NiT9oPQzNSG7RVRkhukhbg4i-hhS5ImmV7sPUc8o,971
|
126
127
|
arpakitlib/ar_dict_util.py,sha256=cF5LQJ6tLqyGoEXfDljMDZrikeZoWPw7CgINHIFGvXM,419
|
@@ -164,7 +165,7 @@ arpakitlib/ar_logging_util.py,sha256=mx3H6CzX9dsh29ruFmYnva8lL6mwvdBXmeHH9E2tvu8
|
|
164
165
|
arpakitlib/ar_mongodb_util.py,sha256=2ECkTnGAZ92qxioL-fmN6R4yZOSr3bXdXLWTzT1C3vk,4038
|
165
166
|
arpakitlib/ar_need_type_util.py,sha256=GETiREPMEYhch-yU6T--Bdawlbb04Jp1Qy7cOsUlIeA,2228
|
166
167
|
arpakitlib/ar_openai_api_client_util.py,sha256=_XmlApvHFMSyjvZydPa_kASIt9LsFrZmSC7YEzIG8Bg,1806
|
167
|
-
arpakitlib/ar_operation_execution_util.py,sha256=
|
168
|
+
arpakitlib/ar_operation_execution_util.py,sha256=bREu2IpBzbITEioS6SaL9bhvUPlYCuVgclTVEZCGx-Q,17696
|
168
169
|
arpakitlib/ar_parse_command.py,sha256=-s61xcATIsfw1eV_iD3xi-grsitbGzSDoAFc5V0OFy4,3447
|
169
170
|
arpakitlib/ar_postgresql_util.py,sha256=1AuLjEaa1Lg4pzn-ukCVnDi35Eg1k91APRTqZhIJAdo,945
|
170
171
|
arpakitlib/ar_run_cmd_util.py,sha256=D_rPavKMmWkQtwvZFz-Io5Ak8eSODHkcFeLPzNVC68g,1072
|
@@ -178,9 +179,9 @@ arpakitlib/ar_str_util.py,sha256=tFoGSDYoGpfdVHWor5Li9pEOFmDFlHkX-Z8iOy1LK7Y,353
|
|
178
179
|
arpakitlib/ar_type_util.py,sha256=s0NsTM7mV3HuwyRwyYLdNn7Ep2HbyI4FIr-dd8x0lfI,3734
|
179
180
|
arpakitlib/ar_yookassa_api_client_util.py,sha256=sh4fcUkAkdOetFn9JYoTvjcSXP-M1wU04KEY-ECLfLg,5137
|
180
181
|
arpakitlib/ar_zabbix_api_client_util.py,sha256=Q-VR4MvoZ9aHwZeYZr9G3LwN-ANx1T5KFmF6pvPM-9M,6402
|
181
|
-
arpakitlib-1.7.
|
182
|
-
arpakitlib-1.7.
|
183
|
-
arpakitlib-1.7.
|
184
|
-
arpakitlib-1.7.
|
185
|
-
arpakitlib-1.7.
|
186
|
-
arpakitlib-1.7.
|
182
|
+
arpakitlib-1.7.97.dist-info/LICENSE,sha256=GPEDQMam2r7FSTYqM1mm7aKnxLaWcBotH7UvQtea-ec,11355
|
183
|
+
arpakitlib-1.7.97.dist-info/METADATA,sha256=nf_HE_ed1KP5IJxPAZsDNsA_YysGS-qjT6wK5whA2rQ,2824
|
184
|
+
arpakitlib-1.7.97.dist-info/NOTICE,sha256=95aUzaPJjVpDsGAsNzVnq7tHTxAl0s5UFznCTkVCau4,763
|
185
|
+
arpakitlib-1.7.97.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
|
186
|
+
arpakitlib-1.7.97.dist-info/entry_points.txt,sha256=36xqR3PJFT2kuwjkM_EqoIy0qFUDPKSm_mJaI7emewE,87
|
187
|
+
arpakitlib-1.7.97.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|