arpakitlib 1.7.93__py3-none-any.whl → 1.7.95__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -6,11 +6,14 @@ import multiprocessing
6
6
  import threading
7
7
  from abc import ABC
8
8
  from datetime import timedelta
9
+ from random import randint
9
10
  from typing import Any
11
+ from uuid import uuid4
10
12
 
13
+ from arpakitlib.ar_datetime_util import now_utc_dt
11
14
  from arpakitlib.ar_enumeration_util import Enumeration
12
15
  from arpakitlib.ar_func_util import is_async_function, is_sync_function
13
- from arpakitlib.ar_sleep_util import sync_safe_sleep, async_safe_sleep
16
+ from arpakitlib.ar_sleep_util import sync_safe_sleep
14
17
 
15
18
  _ARPAKIT_LIB_MODULE_VERSION = "3.0"
16
19
 
@@ -21,15 +24,23 @@ class BaseWorker(ABC):
21
24
  *,
22
25
  timeout_after_run=timedelta(seconds=0.3),
23
26
  timeout_after_err_in_run=timedelta(seconds=1),
24
- startup_funcs: list[Any] | None = None
27
+ startup_funcs: list[Any] | None = None,
28
+ worker_name: str | None = None
25
29
  ):
26
30
  if startup_funcs is None:
27
31
  startup_funcs = []
28
32
  self.startup_funcs = startup_funcs
29
- self.worker_name = self.__class__.__name__
30
- self._logger = logging.getLogger(self.worker_name)
31
- self.timeout_after_run = timeout_after_run.total_seconds()
32
- self.timeout_after_err_in_run = timeout_after_err_in_run.total_seconds()
33
+ if worker_name is None:
34
+ worker_name = self.__class__.__name__
35
+ self.worker_name = worker_name
36
+ self.worker_creation_dt = now_utc_dt()
37
+ self.worker_id = f"{str(uuid4()).replace(' - ', '')}_{randint(1000, 9999)}"
38
+ self.worker_fullname = (
39
+ f"{self.worker_name}_{self.worker_creation_dt.isoformat()}_{self.worker_id}"
40
+ )
41
+ self._logger = logging.getLogger(self.worker_fullname)
42
+ self.timeout_after_run = timeout_after_run
43
+ self.timeout_after_err_in_run = timeout_after_err_in_run
33
44
 
34
45
  def sync_run_startup_funcs(self):
35
46
  for startup_func in self.startup_funcs:
@@ -41,10 +52,10 @@ class BaseWorker(ABC):
41
52
  raise TypeError("no sync and not async")
42
53
 
43
54
  def sync_on_startup(self):
44
- pass
55
+ self.sync_run_startup_funcs()
45
56
 
46
57
  def sync_run(self):
47
- self._logger.info("hello world")
58
+ self._logger.info(f"hello world, im {self.worker_fullname}")
48
59
 
49
60
  def sync_run_on_error(self, exception: BaseException, **kwargs):
50
61
  pass
@@ -66,8 +77,8 @@ class BaseWorker(ABC):
66
77
  except BaseException as exception_:
67
78
  self._logger.error("error in sync_run_on_error", exc_info=exception_)
68
79
  raise exception_
69
- if self.timeout_after_run is not None:
70
- sync_safe_sleep(self.timeout_after_run)
80
+ sync_safe_sleep(self.timeout_after_err_in_run)
81
+ sync_safe_sleep(self.timeout_after_run)
71
82
 
72
83
  async def async_run_startup_funcs(self):
73
84
  for startup_func in self.startup_funcs:
@@ -79,10 +90,10 @@ class BaseWorker(ABC):
79
90
  raise TypeError("no sync and not async")
80
91
 
81
92
  async def async_on_startup(self):
82
- pass
93
+ await self.async_run_startup_funcs()
83
94
 
84
95
  async def async_run(self):
85
- self._logger.info("hello world")
96
+ self._logger.info(f"hello world, im {self.worker_fullname}")
86
97
 
87
98
  async def async_run_on_error(self, exception: BaseException, **kwargs):
88
99
  pass
@@ -104,8 +115,8 @@ class BaseWorker(ABC):
104
115
  except BaseException as exception_:
105
116
  self._logger.error("error in async_run_on_error", exc_info=exception_)
106
117
  raise exception_
107
- if self.timeout_after_err_in_run is not None:
108
- await async_safe_sleep(self.timeout_after_err_in_run)
118
+ sync_safe_sleep(self.timeout_after_err_in_run)
119
+ sync_safe_sleep(self.timeout_after_run)
109
120
 
110
121
 
111
122
  class SafeRunInBackgroundModes(Enumeration):
@@ -136,6 +147,15 @@ def safe_run_worker_in_background(*, worker: BaseWorker, mode: str) -> (
136
147
  return res
137
148
 
138
149
 
150
+ def safe_run_workers_in_background(
151
+ *, workers: list[BaseWorker], mode: str
152
+ ) -> list[asyncio.Task] | list[threading.Thread] | list[multiprocessing.Process]:
153
+ res = []
154
+ for worker in workers:
155
+ res.append(safe_run_worker_in_background(worker=worker, mode=mode))
156
+ return res
157
+
158
+
139
159
  def __example():
140
160
  pass
141
161
 
@@ -31,7 +31,8 @@ def get_operation_for_execution(
31
31
  *,
32
32
  session: Session | None = None,
33
33
  sqlalchemy_db: SQLAlchemyDB | None = None,
34
- filter_operation_types: list[str] | str | None = None
34
+ filter_operation_types: list[str] | str | None = None,
35
+ lock: bool = False
35
36
  ) -> OperationDBM | None:
36
37
  if isinstance(filter_operation_types, str):
37
38
  filter_operation_types = [filter_operation_types]
@@ -44,6 +45,10 @@ def get_operation_for_execution(
44
45
  )
45
46
  if filter_operation_types:
46
47
  query = query.filter(OperationDBM.type.in_(filter_operation_types))
48
+
49
+ if lock:
50
+ query = query.with_for_update()
51
+
47
52
  query = query.order_by(asc(OperationDBM.creation_dt))
48
53
  operation_dbm: OperationDBM | None = query.first()
49
54
  return operation_dbm
@@ -148,7 +153,9 @@ class BaseOperationExecutor:
148
153
  raise Exception("raise_fake_exception")
149
154
  return operation_dbm
150
155
 
151
- def sync_safe_execute_operation(self, operation_dbm: OperationDBM) -> OperationDBM:
156
+ def sync_safe_execute_operation(
157
+ self, operation_dbm: OperationDBM, worker: OperationExecutorWorker
158
+ ) -> OperationDBM:
152
159
  self._logger.info(
153
160
  f"start "
154
161
  f"operation_dbm.id={operation_dbm.id}, "
@@ -158,10 +165,16 @@ class BaseOperationExecutor:
158
165
 
159
166
  with self.sql_alchemy_db.new_session() as session:
160
167
  operation_dbm: OperationDBM = get_operation_by_id(
161
- session=session, filter_operation_id=operation_dbm.id, raise_if_not_found=True
168
+ session=session, filter_operation_id=operation_dbm.id, raise_if_not_found=True, lock=True
162
169
  )
163
170
  operation_dbm.execution_start_dt = now_utc_dt()
164
171
  operation_dbm.status = OperationDBM.Statuses.executing
172
+ operation_dbm.output_data = combine_dicts(
173
+ operation_dbm.output_data,
174
+ {
175
+ worker.worker_fullname: True
176
+ }
177
+ )
165
178
  session.commit()
166
179
  session.refresh(operation_dbm)
167
180
 
@@ -181,7 +194,7 @@ class BaseOperationExecutor:
181
194
  with self.sql_alchemy_db.new_session() as session:
182
195
 
183
196
  operation_dbm: OperationDBM = get_operation_by_id(
184
- session=session, filter_operation_id=operation_dbm.id, raise_if_not_found=True
197
+ session=session, filter_operation_id=operation_dbm.id, raise_if_not_found=True, lock=True
185
198
  )
186
199
  operation_dbm.execution_finish_dt = now_utc_dt()
187
200
  if exception:
@@ -231,7 +244,9 @@ class BaseOperationExecutor:
231
244
  raise Exception("raise_fake_exception")
232
245
  return operation_dbm
233
246
 
234
- async def async_safe_execute_operation(self, operation_dbm: OperationDBM) -> OperationDBM:
247
+ async def async_safe_execute_operation(
248
+ self, operation_dbm: OperationDBM, worker: OperationExecutorWorker
249
+ ) -> OperationDBM:
235
250
  self._logger.info(
236
251
  f"start "
237
252
  f"operation_dbm.id={operation_dbm.id}, "
@@ -241,10 +256,16 @@ class BaseOperationExecutor:
241
256
 
242
257
  with self.sql_alchemy_db.new_session() as session:
243
258
  operation_dbm: OperationDBM = get_operation_by_id(
244
- session=session, filter_operation_id=operation_dbm.id, raise_if_not_found=True
259
+ session=session, filter_operation_id=operation_dbm.id, raise_if_not_found=True, lock=True
245
260
  )
246
261
  operation_dbm.execution_start_dt = now_utc_dt()
247
262
  operation_dbm.status = OperationDBM.Statuses.executing
263
+ operation_dbm.output_data = combine_dicts(
264
+ operation_dbm.output_data,
265
+ {
266
+ worker.worker_fullname: True
267
+ }
268
+ )
248
269
  session.commit()
249
270
  session.refresh(operation_dbm)
250
271
 
@@ -264,7 +285,7 @@ class BaseOperationExecutor:
264
285
  with self.sql_alchemy_db.new_session() as session:
265
286
 
266
287
  operation_dbm: OperationDBM = get_operation_by_id(
267
- session=session, filter_operation_id=operation_dbm.id, raise_if_not_found=True
288
+ session=session, filter_operation_id=operation_dbm.id, raise_if_not_found=True, lock=True
268
289
  )
269
290
  operation_dbm.execution_finish_dt = now_utc_dt()
270
291
  if exception:
@@ -315,19 +336,15 @@ class OperationExecutorWorker(BaseWorker):
315
336
  sqlalchemy_db: SQLAlchemyDB,
316
337
  operation_executor: BaseOperationExecutor | None = None,
317
338
  filter_operation_types: str | list[str] | None = None,
318
- timeout_after_run=timedelta(seconds=0.3),
319
- timeout_after_err_in_run=timedelta(seconds=1),
320
339
  startup_funcs: list[Any] | None = None
321
340
  ):
322
- super().__init__(
323
- timeout_after_run=timeout_after_run,
324
- timeout_after_err_in_run=timeout_after_err_in_run,
325
- startup_funcs=startup_funcs
326
- )
341
+ super().__init__(startup_funcs=startup_funcs)
327
342
  self.sqlalchemy_db = sqlalchemy_db
328
343
  if operation_executor is None:
329
344
  operation_executor = BaseOperationExecutor(sqlalchemy_db=sqlalchemy_db)
330
345
  self.operation_executor = operation_executor
346
+ if isinstance(filter_operation_types, str):
347
+ filter_operation_types = [filter_operation_types]
331
348
  self.filter_operation_types = filter_operation_types
332
349
 
333
350
  def sync_on_startup(self):
@@ -335,12 +352,13 @@ class OperationExecutorWorker(BaseWorker):
335
352
  self.sync_run_startup_funcs()
336
353
 
337
354
  def sync_execute_operation(self, operation_dbm: OperationDBM) -> OperationDBM:
338
- return self.operation_executor.sync_safe_execute_operation(operation_dbm=operation_dbm)
355
+ return self.operation_executor.sync_safe_execute_operation(operation_dbm=operation_dbm, worker=self)
339
356
 
340
357
  def sync_run(self):
341
358
  operation_dbm: OperationDBM | None = get_operation_for_execution(
342
359
  sqlalchemy_db=self.sqlalchemy_db,
343
- filter_operation_types=self.filter_operation_types
360
+ filter_operation_types=self.filter_operation_types,
361
+ lock=True
344
362
  )
345
363
  if not operation_dbm:
346
364
  return
@@ -354,12 +372,13 @@ class OperationExecutorWorker(BaseWorker):
354
372
  await self.async_run_startup_funcs()
355
373
 
356
374
  async def async_execute_operation(self, operation_dbm: OperationDBM) -> OperationDBM:
357
- return await self.operation_executor.async_safe_execute_operation(operation_dbm=operation_dbm)
375
+ return await self.operation_executor.async_safe_execute_operation(operation_dbm=operation_dbm, worker=self)
358
376
 
359
377
  async def async_run(self):
360
378
  operation_dbm: OperationDBM | None = get_operation_for_execution(
361
379
  sqlalchemy_db=self.sqlalchemy_db,
362
- filter_operation_types=self.filter_operation_types
380
+ filter_operation_types=self.filter_operation_types,
381
+ lock=True
363
382
  )
364
383
  if not operation_dbm:
365
384
  return
@@ -383,19 +402,15 @@ class ScheduledOperationCreatorWorker(BaseWorker):
383
402
  self,
384
403
  *,
385
404
  sqlalchemy_db: SQLAlchemyDB,
386
- scheduled_operations: list[ScheduledOperation] | None = None,
387
- timeout_after_run=timedelta(seconds=0.3),
388
- timeout_after_err_in_run=timedelta(seconds=1),
405
+ scheduled_operations: ScheduledOperation | list[ScheduledOperation] | None = None,
389
406
  startup_funcs: list[Any] | None = None
390
407
  ):
391
- super().__init__(
392
- timeout_after_run=timeout_after_run,
393
- timeout_after_err_in_run=timeout_after_err_in_run,
394
- startup_funcs=startup_funcs
395
- )
408
+ super().__init__(startup_funcs=startup_funcs)
396
409
  self.sqlalchemy_db = sqlalchemy_db
397
410
  if scheduled_operations is None:
398
411
  scheduled_operations = []
412
+ if isinstance(scheduled_operations, ScheduledOperation):
413
+ scheduled_operations = [scheduled_operations]
399
414
  self.scheduled_operations = scheduled_operations
400
415
 
401
416
  def sync_on_startup(self):
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: arpakitlib
3
- Version: 1.7.93
3
+ Version: 1.7.95
4
4
  Summary: arpakitlib
5
5
  Home-page: https://github.com/ARPAKIT-Company/arpakitlib
6
6
  License: Apache-2.0
@@ -120,7 +120,7 @@ arpakitlib/ar_arpakit_project_template_util.py,sha256=AswzQvvb-zfUyrcP4EP0K756YL
120
120
  arpakitlib/ar_arpakit_schedule_uust_api_client_util.py,sha256=jGbP6egs2yhgfheyqhM0J-SeM2qp2YrW7dV-u9djv4Q,19223
121
121
  arpakitlib/ar_arpakitlib_cli_util.py,sha256=8lhEDxnwMSRX2PGV2xQtQru1AYKSA92SVolol5u7iBk,3154
122
122
  arpakitlib/ar_base64_util.py,sha256=aZkg2cZTuAaP2IWeG_LXJ6RO7qhyskVwec-Lks0iM-k,676
123
- arpakitlib/ar_base_worker_util.py,sha256=3pK03ps_AwtdDJofA8mRIGqlEDR1mfWd_QsSS3IR6sM,4828
123
+ arpakitlib/ar_base_worker_util.py,sha256=hV_bMfkO897s00VGMAQ-z8GFzq-vEcRW5TKcmeBhbYM,5667
124
124
  arpakitlib/ar_cache_file_util.py,sha256=Fo2pH-Zqm966KWFBHG_pbiySGZvhIFCYqy7k1weRfJ0,3476
125
125
  arpakitlib/ar_datetime_util.py,sha256=Xe1NiT9oPQzNSG7RVRkhukhbg4i-hhS5ImmV7sPUc8o,971
126
126
  arpakitlib/ar_dict_util.py,sha256=cF5LQJ6tLqyGoEXfDljMDZrikeZoWPw7CgINHIFGvXM,419
@@ -164,7 +164,7 @@ arpakitlib/ar_logging_util.py,sha256=mx3H6CzX9dsh29ruFmYnva8lL6mwvdBXmeHH9E2tvu8
164
164
  arpakitlib/ar_mongodb_util.py,sha256=2ECkTnGAZ92qxioL-fmN6R4yZOSr3bXdXLWTzT1C3vk,4038
165
165
  arpakitlib/ar_need_type_util.py,sha256=GETiREPMEYhch-yU6T--Bdawlbb04Jp1Qy7cOsUlIeA,2228
166
166
  arpakitlib/ar_openai_api_client_util.py,sha256=_XmlApvHFMSyjvZydPa_kASIt9LsFrZmSC7YEzIG8Bg,1806
167
- arpakitlib/ar_operation_execution_util.py,sha256=D0pUb0hJ7VRsBrq1tiW5otPJMSqZdPDQLUnlPJxkAL0,18242
167
+ arpakitlib/ar_operation_execution_util.py,sha256=pSOpYDDT7HE0MgoW3eolJ2bI4eIYI8TUy0jlulmcRHQ,18709
168
168
  arpakitlib/ar_parse_command.py,sha256=-s61xcATIsfw1eV_iD3xi-grsitbGzSDoAFc5V0OFy4,3447
169
169
  arpakitlib/ar_postgresql_util.py,sha256=1AuLjEaa1Lg4pzn-ukCVnDi35Eg1k91APRTqZhIJAdo,945
170
170
  arpakitlib/ar_run_cmd_util.py,sha256=D_rPavKMmWkQtwvZFz-Io5Ak8eSODHkcFeLPzNVC68g,1072
@@ -178,9 +178,9 @@ arpakitlib/ar_str_util.py,sha256=tFoGSDYoGpfdVHWor5Li9pEOFmDFlHkX-Z8iOy1LK7Y,353
178
178
  arpakitlib/ar_type_util.py,sha256=s0NsTM7mV3HuwyRwyYLdNn7Ep2HbyI4FIr-dd8x0lfI,3734
179
179
  arpakitlib/ar_yookassa_api_client_util.py,sha256=sh4fcUkAkdOetFn9JYoTvjcSXP-M1wU04KEY-ECLfLg,5137
180
180
  arpakitlib/ar_zabbix_api_client_util.py,sha256=Q-VR4MvoZ9aHwZeYZr9G3LwN-ANx1T5KFmF6pvPM-9M,6402
181
- arpakitlib-1.7.93.dist-info/LICENSE,sha256=GPEDQMam2r7FSTYqM1mm7aKnxLaWcBotH7UvQtea-ec,11355
182
- arpakitlib-1.7.93.dist-info/METADATA,sha256=pZPltqRitxj6pZ7I8pHAauB5P2sUINz3NUjA8ZGSnBY,2824
183
- arpakitlib-1.7.93.dist-info/NOTICE,sha256=95aUzaPJjVpDsGAsNzVnq7tHTxAl0s5UFznCTkVCau4,763
184
- arpakitlib-1.7.93.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
185
- arpakitlib-1.7.93.dist-info/entry_points.txt,sha256=36xqR3PJFT2kuwjkM_EqoIy0qFUDPKSm_mJaI7emewE,87
186
- arpakitlib-1.7.93.dist-info/RECORD,,
181
+ arpakitlib-1.7.95.dist-info/LICENSE,sha256=GPEDQMam2r7FSTYqM1mm7aKnxLaWcBotH7UvQtea-ec,11355
182
+ arpakitlib-1.7.95.dist-info/METADATA,sha256=7AdfJVZL5IKD-fAW1diQvDtisHJlpoQ1EaAxaWbHnEU,2824
183
+ arpakitlib-1.7.95.dist-info/NOTICE,sha256=95aUzaPJjVpDsGAsNzVnq7tHTxAl0s5UFznCTkVCau4,763
184
+ arpakitlib-1.7.95.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
185
+ arpakitlib-1.7.95.dist-info/entry_points.txt,sha256=36xqR3PJFT2kuwjkM_EqoIy0qFUDPKSm_mJaI7emewE,87
186
+ arpakitlib-1.7.95.dist-info/RECORD,,