arpakitlib 1.7.94__py3-none-any.whl → 1.7.96__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- arpakitlib/ar_operation_execution_util.py +60 -59
- {arpakitlib-1.7.94.dist-info → arpakitlib-1.7.96.dist-info}/METADATA +1 -1
- {arpakitlib-1.7.94.dist-info → arpakitlib-1.7.96.dist-info}/RECORD +7 -7
- {arpakitlib-1.7.94.dist-info → arpakitlib-1.7.96.dist-info}/LICENSE +0 -0
- {arpakitlib-1.7.94.dist-info → arpakitlib-1.7.96.dist-info}/NOTICE +0 -0
- {arpakitlib-1.7.94.dist-info → arpakitlib-1.7.96.dist-info}/WHEEL +0 -0
- {arpakitlib-1.7.94.dist-info → arpakitlib-1.7.96.dist-info}/entry_points.txt +0 -0
@@ -31,7 +31,8 @@ def get_operation_for_execution(
|
|
31
31
|
*,
|
32
32
|
session: Session | None = None,
|
33
33
|
sqlalchemy_db: SQLAlchemyDB | None = None,
|
34
|
-
filter_operation_types: list[str] | str | None = None
|
34
|
+
filter_operation_types: list[str] | str | None = None,
|
35
|
+
lock: bool = False
|
35
36
|
) -> OperationDBM | None:
|
36
37
|
if isinstance(filter_operation_types, str):
|
37
38
|
filter_operation_types = [filter_operation_types]
|
@@ -44,6 +45,10 @@ def get_operation_for_execution(
|
|
44
45
|
)
|
45
46
|
if filter_operation_types:
|
46
47
|
query = query.filter(OperationDBM.type.in_(filter_operation_types))
|
48
|
+
|
49
|
+
if lock:
|
50
|
+
query = query.with_for_update()
|
51
|
+
|
47
52
|
query = query.order_by(asc(OperationDBM.creation_dt))
|
48
53
|
operation_dbm: OperationDBM | None = query.first()
|
49
54
|
return operation_dbm
|
@@ -149,7 +154,7 @@ class BaseOperationExecutor:
|
|
149
154
|
return operation_dbm
|
150
155
|
|
151
156
|
def sync_safe_execute_operation(
|
152
|
-
self, operation_dbm: OperationDBM, worker: OperationExecutorWorker
|
157
|
+
self, operation_dbm: OperationDBM, worker: OperationExecutorWorker, session: Session
|
153
158
|
) -> OperationDBM:
|
154
159
|
self._logger.info(
|
155
160
|
f"start "
|
@@ -158,20 +163,16 @@ class BaseOperationExecutor:
|
|
158
163
|
f"operation_dbm.status={operation_dbm.status}"
|
159
164
|
)
|
160
165
|
|
161
|
-
|
162
|
-
|
163
|
-
|
164
|
-
|
165
|
-
|
166
|
-
|
167
|
-
|
168
|
-
|
169
|
-
|
170
|
-
|
171
|
-
}
|
172
|
-
)
|
173
|
-
session.commit()
|
174
|
-
session.refresh(operation_dbm)
|
166
|
+
operation_dbm.execution_start_dt = now_utc_dt()
|
167
|
+
operation_dbm.status = OperationDBM.Statuses.executing
|
168
|
+
operation_dbm.output_data = combine_dicts(
|
169
|
+
operation_dbm.output_data,
|
170
|
+
{
|
171
|
+
worker.worker_fullname: True
|
172
|
+
}
|
173
|
+
)
|
174
|
+
session.commit()
|
175
|
+
session.refresh(operation_dbm)
|
175
176
|
|
176
177
|
exception: BaseException | None = None
|
177
178
|
traceback_str: str | None = None
|
@@ -186,40 +187,35 @@ class BaseOperationExecutor:
|
|
186
187
|
exception = exception_
|
187
188
|
traceback_str = traceback.format_exc()
|
188
189
|
|
189
|
-
|
190
|
-
|
191
|
-
operation_dbm
|
192
|
-
|
190
|
+
operation_dbm.execution_finish_dt = now_utc_dt()
|
191
|
+
if exception:
|
192
|
+
operation_dbm.status = OperationDBM.Statuses.executed_with_error
|
193
|
+
operation_dbm.error_data = combine_dicts(
|
194
|
+
{
|
195
|
+
"exception_str": str(exception),
|
196
|
+
"traceback_str": traceback_str
|
197
|
+
},
|
198
|
+
operation_dbm.error_data
|
193
199
|
)
|
194
|
-
|
195
|
-
|
196
|
-
|
197
|
-
|
198
|
-
|
199
|
-
|
200
|
-
|
201
|
-
|
202
|
-
|
203
|
-
|
204
|
-
|
205
|
-
|
200
|
+
else:
|
201
|
+
operation_dbm.status = OperationDBM.Statuses.executed_without_error
|
202
|
+
session.commit()
|
203
|
+
|
204
|
+
if exception:
|
205
|
+
story_log_dbm = StoryLogDBM(
|
206
|
+
level=StoryLogDBM.Levels.error,
|
207
|
+
title=f"error in sync_execute_operation (id={operation_dbm.id}, type={operation_dbm.type})",
|
208
|
+
data={
|
209
|
+
"operation_id": operation_dbm.id,
|
210
|
+
"exception_str": str(exception),
|
211
|
+
"traceback_str": traceback_str
|
212
|
+
}
|
213
|
+
)
|
214
|
+
session.add(story_log_dbm)
|
206
215
|
session.commit()
|
216
|
+
session.refresh(story_log_dbm)
|
207
217
|
|
208
|
-
|
209
|
-
story_log_dbm = StoryLogDBM(
|
210
|
-
level=StoryLogDBM.Levels.error,
|
211
|
-
title=f"error in sync_execute_operation (id={operation_dbm.id}, type={operation_dbm.type})",
|
212
|
-
data={
|
213
|
-
"operation_id": operation_dbm.id,
|
214
|
-
"exception_str": str(exception),
|
215
|
-
"traceback_str": traceback_str
|
216
|
-
}
|
217
|
-
)
|
218
|
-
session.add(story_log_dbm)
|
219
|
-
session.commit()
|
220
|
-
session.refresh(story_log_dbm)
|
221
|
-
|
222
|
-
session.refresh(operation_dbm)
|
218
|
+
session.refresh(operation_dbm)
|
223
219
|
|
224
220
|
self._logger.info(
|
225
221
|
f"finish sync_safe_execute_operation, "
|
@@ -251,7 +247,7 @@ class BaseOperationExecutor:
|
|
251
247
|
|
252
248
|
with self.sql_alchemy_db.new_session() as session:
|
253
249
|
operation_dbm: OperationDBM = get_operation_by_id(
|
254
|
-
session=session, filter_operation_id=operation_dbm.id, raise_if_not_found=True
|
250
|
+
session=session, filter_operation_id=operation_dbm.id, raise_if_not_found=True, lock=True
|
255
251
|
)
|
256
252
|
operation_dbm.execution_start_dt = now_utc_dt()
|
257
253
|
operation_dbm.status = OperationDBM.Statuses.executing
|
@@ -280,7 +276,7 @@ class BaseOperationExecutor:
|
|
280
276
|
with self.sql_alchemy_db.new_session() as session:
|
281
277
|
|
282
278
|
operation_dbm: OperationDBM = get_operation_by_id(
|
283
|
-
session=session, filter_operation_id=operation_dbm.id, raise_if_not_found=True
|
279
|
+
session=session, filter_operation_id=operation_dbm.id, raise_if_not_found=True, lock=True
|
284
280
|
)
|
285
281
|
operation_dbm.execution_finish_dt = now_utc_dt()
|
286
282
|
if exception:
|
@@ -346,17 +342,21 @@ class OperationExecutorWorker(BaseWorker):
|
|
346
342
|
self.sqlalchemy_db.init()
|
347
343
|
self.sync_run_startup_funcs()
|
348
344
|
|
349
|
-
def sync_execute_operation(self, operation_dbm: OperationDBM) -> OperationDBM:
|
350
|
-
return self.operation_executor.sync_safe_execute_operation(
|
345
|
+
def sync_execute_operation(self, operation_dbm: OperationDBM, session: Session) -> OperationDBM:
|
346
|
+
return self.operation_executor.sync_safe_execute_operation(
|
347
|
+
operation_dbm=operation_dbm, worker=self, session=session
|
348
|
+
)
|
351
349
|
|
352
350
|
def sync_run(self):
|
353
|
-
|
354
|
-
|
355
|
-
|
356
|
-
|
357
|
-
|
358
|
-
|
359
|
-
|
351
|
+
with self.sqlalchemy_db.new_session() as session:
|
352
|
+
operation_dbm: OperationDBM | None = get_operation_for_execution(
|
353
|
+
session=session,
|
354
|
+
filter_operation_types=self.filter_operation_types,
|
355
|
+
lock=True
|
356
|
+
)
|
357
|
+
if not operation_dbm:
|
358
|
+
return
|
359
|
+
self.sync_execute_operation(operation_dbm=operation_dbm, session=session)
|
360
360
|
|
361
361
|
def sync_run_on_error(self, exception: BaseException, **kwargs):
|
362
362
|
pass
|
@@ -371,7 +371,8 @@ class OperationExecutorWorker(BaseWorker):
|
|
371
371
|
async def async_run(self):
|
372
372
|
operation_dbm: OperationDBM | None = get_operation_for_execution(
|
373
373
|
sqlalchemy_db=self.sqlalchemy_db,
|
374
|
-
filter_operation_types=self.filter_operation_types
|
374
|
+
filter_operation_types=self.filter_operation_types,
|
375
|
+
lock=True
|
375
376
|
)
|
376
377
|
if not operation_dbm:
|
377
378
|
return
|
@@ -164,7 +164,7 @@ arpakitlib/ar_logging_util.py,sha256=mx3H6CzX9dsh29ruFmYnva8lL6mwvdBXmeHH9E2tvu8
|
|
164
164
|
arpakitlib/ar_mongodb_util.py,sha256=2ECkTnGAZ92qxioL-fmN6R4yZOSr3bXdXLWTzT1C3vk,4038
|
165
165
|
arpakitlib/ar_need_type_util.py,sha256=GETiREPMEYhch-yU6T--Bdawlbb04Jp1Qy7cOsUlIeA,2228
|
166
166
|
arpakitlib/ar_openai_api_client_util.py,sha256=_XmlApvHFMSyjvZydPa_kASIt9LsFrZmSC7YEzIG8Bg,1806
|
167
|
-
arpakitlib/ar_operation_execution_util.py,sha256=
|
167
|
+
arpakitlib/ar_operation_execution_util.py,sha256=tm5o-GyinsBQgKCnKJoOizKsqPLmlV1IhADVqARsT6g,18241
|
168
168
|
arpakitlib/ar_parse_command.py,sha256=-s61xcATIsfw1eV_iD3xi-grsitbGzSDoAFc5V0OFy4,3447
|
169
169
|
arpakitlib/ar_postgresql_util.py,sha256=1AuLjEaa1Lg4pzn-ukCVnDi35Eg1k91APRTqZhIJAdo,945
|
170
170
|
arpakitlib/ar_run_cmd_util.py,sha256=D_rPavKMmWkQtwvZFz-Io5Ak8eSODHkcFeLPzNVC68g,1072
|
@@ -178,9 +178,9 @@ arpakitlib/ar_str_util.py,sha256=tFoGSDYoGpfdVHWor5Li9pEOFmDFlHkX-Z8iOy1LK7Y,353
|
|
178
178
|
arpakitlib/ar_type_util.py,sha256=s0NsTM7mV3HuwyRwyYLdNn7Ep2HbyI4FIr-dd8x0lfI,3734
|
179
179
|
arpakitlib/ar_yookassa_api_client_util.py,sha256=sh4fcUkAkdOetFn9JYoTvjcSXP-M1wU04KEY-ECLfLg,5137
|
180
180
|
arpakitlib/ar_zabbix_api_client_util.py,sha256=Q-VR4MvoZ9aHwZeYZr9G3LwN-ANx1T5KFmF6pvPM-9M,6402
|
181
|
-
arpakitlib-1.7.
|
182
|
-
arpakitlib-1.7.
|
183
|
-
arpakitlib-1.7.
|
184
|
-
arpakitlib-1.7.
|
185
|
-
arpakitlib-1.7.
|
186
|
-
arpakitlib-1.7.
|
181
|
+
arpakitlib-1.7.96.dist-info/LICENSE,sha256=GPEDQMam2r7FSTYqM1mm7aKnxLaWcBotH7UvQtea-ec,11355
|
182
|
+
arpakitlib-1.7.96.dist-info/METADATA,sha256=NUdQncUnBReSw8GBQxkblcPDWJ5UImdIcZJ15hI5PAg,2824
|
183
|
+
arpakitlib-1.7.96.dist-info/NOTICE,sha256=95aUzaPJjVpDsGAsNzVnq7tHTxAl0s5UFznCTkVCau4,763
|
184
|
+
arpakitlib-1.7.96.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
|
185
|
+
arpakitlib-1.7.96.dist-info/entry_points.txt,sha256=36xqR3PJFT2kuwjkM_EqoIy0qFUDPKSm_mJaI7emewE,87
|
186
|
+
arpakitlib-1.7.96.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|