abstract-block-dumper 0.0.2__py3-none-any.whl → 0.0.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -30,7 +30,8 @@ class RegistryItem:
30
30
  block_number=block_number,
31
31
  exc_info=True,
32
32
  )
33
- raise ConditionEvaluationError(f"Failed to evaluate condition: {e}") from e
33
+ msg = f"Failed to evaluate condition: {e}"
34
+ raise ConditionEvaluationError(msg) from e
34
35
 
35
36
  def get_execution_args(self) -> list[dict[str, Any]]:
36
37
  """Get list of argument sets for execution."""
@@ -42,7 +43,7 @@ class RegistryItem:
42
43
  if hasattr(self.function, "name") and self.function.name is not None:
43
44
  return self.function.name
44
45
 
45
- return ".".join([self.function.__module__, self.function.__name__])
46
+ return f"{self.function.__module__}.{self.function.__name__}"
46
47
 
47
48
  def requires_backfilling(self) -> bool:
48
49
  """Check if this item requires backfilling."""
@@ -76,7 +77,7 @@ class MemoryRegistry(BaseRegistry):
76
77
  "Registered function",
77
78
  function_name=item.function.__name__,
78
79
  executable_path=item.executable_path,
79
- args=item.args,
80
+ args_counter=len(item.args or []),
80
81
  backfilling_lookback=item.backfilling_lookback,
81
82
  )
82
83
 
@@ -86,12 +87,11 @@ class MemoryRegistry(BaseRegistry):
86
87
  def clear(self) -> None:
87
88
  self._functions = []
88
89
 
89
- def get_by_executable_path(self, executable_path: str) -> RegistryItem:
90
+ def get_by_executable_path(self, executable_path: str) -> RegistryItem | None:
90
91
  for registry_item in self.get_functions():
91
92
  if registry_item.executable_path == executable_path:
92
93
  return registry_item
93
- # TODO: Improve this
94
- raise Exception("Function Not Found")
94
+ return None
95
95
 
96
96
 
97
97
  task_registry = MemoryRegistry()
@@ -100,22 +100,22 @@ class BlockProcessor:
100
100
  This handles tasks that may have been lost due to scheduler restarts.
101
101
  """
102
102
  retry_count = 0
103
- for task_attempt in abd_dal.get_ready_to_retry_attempts():
103
+ for retry_attempt in abd_dal.get_ready_to_retry_attempts():
104
104
  try:
105
105
  # Find the registry item to get celery_kwargs
106
- registry_item = self.registry.get_by_executable_path(task_attempt.executable_path)
106
+ registry_item = self.registry.get_by_executable_path(retry_attempt.executable_path)
107
107
  if not registry_item:
108
108
  logger.warning(
109
109
  "Registry item not found for failed task, skipping retry recovery",
110
- task_id=task_attempt.id,
111
- executable_path=task_attempt.executable_path,
110
+ task_id=retry_attempt.id,
111
+ executable_path=retry_attempt.executable_path,
112
112
  )
113
113
  continue
114
114
 
115
115
  # Use atomic transaction to prevent race conditions
116
116
  with transaction.atomic():
117
117
  # Re-fetch with select_for_update to prevent concurrent modifications
118
- task_attempt = TaskAttempt.objects.select_for_update(nowait=True).get(id=task_attempt.id)
118
+ task_attempt = TaskAttempt.objects.select_for_update(nowait=True).get(id=retry_attempt.id)
119
119
 
120
120
  # Verify task is still in FAILED state and ready for retry
121
121
  if task_attempt.status == TaskAttempt.Status.SUCCESS:
@@ -150,16 +150,16 @@ class BlockProcessor:
150
150
  except Exception:
151
151
  logger.error(
152
152
  "Failed to recover retry",
153
- task_id=task_attempt.id,
153
+ task_id=retry_attempt.id,
154
154
  exc_info=True,
155
155
  )
156
156
  # Reload task to see current state after potential execution failure
157
157
  try:
158
- task_attempt.refresh_from_db()
158
+ retry_attempt.refresh_from_db()
159
159
  # If task is still PENDING after error, revert to FAILED
160
160
  # (execution may have failed before celery task could mark it)
161
- if task_attempt.status == TaskAttempt.Status.PENDING:
162
- abd_dal.revert_to_failed(task_attempt)
161
+ if retry_attempt.status == TaskAttempt.Status.PENDING:
162
+ abd_dal.revert_to_failed(retry_attempt)
163
163
  except TaskAttempt.DoesNotExist:
164
164
  # Task was deleted during recovery, nothing to revert
165
165
  pass
@@ -61,18 +61,20 @@ class TaskScheduler:
61
61
  logger.info("TaskScheduler stopped.")
62
62
 
63
63
  def initialize_last_block(self) -> None:
64
- start_from_block_setting = getattr(settings, "BLOCK_DUMPER_START_FROM_BLOCK")
64
+ # Safe getattr in case setting is not defined
65
+ start_from_block_setting = getattr(settings, "BLOCK_DUMPER_START_FROM_BLOCK", None)
65
66
 
66
67
  if start_from_block_setting is not None:
67
68
  if start_from_block_setting == "current":
68
69
  self.last_processed_block = self.subtensor.get_current_block()
69
- logger.info(f"Starting from current blockchain block {self.last_processed_block}")
70
+ logger.info("Starting from current blockchain block", block_number=self.last_processed_block)
70
71
 
71
72
  elif isinstance(start_from_block_setting, int):
72
73
  self.last_processed_block = start_from_block_setting
73
- logger.info(f"Starting from configured block {self.last_processed_block}")
74
+ logger.info("Starting from configured block", block_number=self.last_processed_block)
74
75
  else:
75
- raise ValueError(f"Invalid BLOCK_DUMPER_START_FROM_BLOCK value: {start_from_block_setting}")
76
+ error_msg = f"Invalid BLOCK_DUMPER_START_FROM_BLOCK value: {start_from_block_setting}"
77
+ raise ValueError(error_msg)
76
78
  else:
77
79
  # Default behavior - resume from database
78
80
  last_block_number = abd_dal.get_the_latest_executed_block_number()
@@ -28,7 +28,7 @@ version_tuple: VERSION_TUPLE
28
28
  commit_id: COMMIT_ID
29
29
  __commit_id__: COMMIT_ID
30
30
 
31
- __version__ = version = '0.0.2'
32
- __version_tuple__ = version_tuple = (0, 0, 2)
31
+ __version__ = version = '0.0.4'
32
+ __version_tuple__ = version_tuple = (0, 0, 4)
33
33
 
34
34
  __commit_id__ = commit_id = None
@@ -9,6 +9,9 @@ class Command(BaseCommand):
9
9
  help = "Run the block scheduler daemon."
10
10
 
11
11
  def handle(self, *args, **options) -> None:
12
+ """
13
+ Handle the management command to start the block scheduler.
14
+ """
12
15
  self.stdout.write("Syncing decorated functions...")
13
16
  ensure_modules_loaded()
14
17
  functions_counter = len(task_registry.get_functions())
@@ -0,0 +1,53 @@
1
+ """
2
+ Celery integration helpers for abstract-block-dumper.
3
+
4
+ This module provides utilities to integrate @block_task decorated functions
5
+ with Celery workers.
6
+ """
7
+
8
+ from abstract_block_dumper._internal.discovery import ensure_modules_loaded
9
+
10
+
11
+ def setup_celery_tasks() -> None:
12
+ """
13
+ Discover and register all @block_task decorated functions for Celery.
14
+
15
+ This function MUST be called when Celery workers start to ensure that
16
+ all @block_task decorated functions are registered and available to
17
+ receive tasks from the message broker.
18
+
19
+ Usage in your project's celery.py:
20
+
21
+ from celery import Celery
22
+ from celery.signals import worker_ready
23
+
24
+ app = Celery('your_project')
25
+ app.config_from_object('django.conf:settings', namespace='CELERY')
26
+ app.autodiscover_tasks()
27
+
28
+ @worker_ready.connect
29
+ def on_worker_ready(**kwargs):
30
+ '''Load block tasks when worker is ready.'''
31
+ from abstract_block_dumper.v1.celery import setup_celery_tasks
32
+ setup_celery_tasks()
33
+
34
+ Why is this needed?
35
+ -------------------
36
+ The @block_task decorator uses Celery's @shared_task, which requires
37
+ the decorated functions to be imported before workers can receive
38
+ messages for those tasks. Without calling this function, you'll see
39
+ errors like:
40
+
41
+ "Received unregistered task of type 'your_app.block_tasks.task_name'"
42
+
43
+ What does it do?
44
+ ----------------
45
+ - Automatically imports all 'tasks.py' and 'block_tasks.py' modules
46
+ from your INSTALLED_APPS
47
+ - Triggers @block_task decorator registration
48
+ - Makes tasks available to Celery workers
49
+ """
50
+ ensure_modules_loaded()
51
+
52
+
53
+ __all__ = ["setup_celery_tasks"]
@@ -62,7 +62,9 @@ def schedule_retry(task_attempt: TaskAttempt) -> None:
62
62
  )
63
63
 
64
64
 
65
- def _celery_task_wrapper(func, block_number: int, **kwargs) -> dict[str, Any] | None:
65
+ def _celery_task_wrapper(
66
+ func: Callable[..., Any], block_number: int, **kwargs: dict[str, Any]
67
+ ) -> dict[str, Any] | None:
66
68
  executable_path = abd_utils.get_executable_path(func)
67
69
 
68
70
  with transaction.atomic():
@@ -72,21 +74,15 @@ def _celery_task_wrapper(func, block_number: int, **kwargs) -> dict[str, Any] |
72
74
  executable_path=executable_path,
73
75
  args_json=abd_utils.serialize_args(kwargs),
74
76
  )
75
- except TaskAttempt.DoesNotExist:
76
- logger.warning(
77
- "TaskAttempt not found - task may have been canceled directly",
78
- block_number=block_number,
79
- executable_path=executable_path,
80
- )
81
- raise CeleryTaskLockedError("TaskAttempt not found - task may have been canceled directly")
77
+ except TaskAttempt.DoesNotExist as exc:
78
+ msg = "TaskAttempt not found - task may have been canceled directly"
79
+ logger.warning(msg, block_number=block_number, executable_path=executable_path)
80
+ raise CeleryTaskLockedError(msg) from exc
81
+
82
82
  except OperationalError as e:
83
- logger.info(
84
- "Task already being processed by another worker",
85
- block_number=block_number,
86
- executable_path=executable_path,
87
- operational_error=str(e),
88
- )
89
- raise CeleryTaskLockedError("Task already being processed by another worker")
83
+ msg = "Task already being processed by another worker"
84
+ logger.info(msg, block_number=block_number, executable_path=executable_path, operational_error=str(e))
85
+ raise CeleryTaskLockedError(msg) from e
90
86
 
91
87
  if task_attempt.status != TaskAttempt.Status.PENDING:
92
88
  logger.info(
@@ -117,11 +113,11 @@ def _celery_task_wrapper(func, block_number: int, **kwargs) -> dict[str, Any] |
117
113
  logger.info("Task completed successfully", task_id=task_attempt.id)
118
114
  return {"result": result}
119
115
  except Exception as e:
120
- logger.error(
116
+ logger.exception(
121
117
  "Task execution failed",
122
118
  task_id=task_attempt.id,
123
119
  error_type=type(e).__name__,
124
- exc_info=True,
120
+ error_message=str(e),
125
121
  )
126
122
  abd_dal.task_mark_as_failed(task_attempt)
127
123
 
@@ -130,10 +126,9 @@ def _celery_task_wrapper(func, block_number: int, **kwargs) -> dict[str, Any] |
130
126
  try:
131
127
  schedule_retry(task_attempt)
132
128
  except Exception:
133
- logger.error(
129
+ logger.exception(
134
130
  "Failed to schedule retry",
135
131
  task_id=task_attempt.id,
136
- exc_info=True,
137
132
  )
138
133
  return None
139
134
 
@@ -173,10 +168,11 @@ def block_task(
173
168
 
174
169
  def decorator(func: Callable[..., Any]) -> Any:
175
170
  if not callable(condition):
176
- raise ValueError("condition must be a callable.")
171
+ msg = "condition must be a callable."
172
+ raise TypeError(msg)
177
173
 
178
174
  # Celery task wrapper
179
- def shared_celery_task(block_number: int, **kwargs) -> None | Any:
175
+ def shared_celery_task(block_number: int, **kwargs: dict[str, Any]) -> None | Any:
180
176
  """
181
177
  Wrapper that handles TaskAttempt tracking and executed the original
182
178
  function
@@ -193,13 +189,13 @@ def block_task(
193
189
  )(shared_celery_task)
194
190
 
195
191
  # Store original function referefence for introspection
196
- celery_task._original_func = func
192
+ celery_task._original_func = func # noqa: SLF001
197
193
 
198
194
  # Register the Celery task
199
195
  task_registry.register_item(
200
196
  RegistryItem(
201
197
  condition=condition,
202
- function=cast(Task, celery_task),
198
+ function=cast("Task", celery_task),
203
199
  args=args,
204
200
  backfilling_lookback=backfilling_lookback,
205
201
  celery_kwargs=celery_kwargs or {},
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: abstract-block-dumper
3
- Version: 0.0.2
3
+ Version: 0.0.4
4
4
  Project-URL: Source, https://github.com/bactensor/abstract-block-dumper
5
5
  Project-URL: Issue Tracker, https://github.com/bactensor/abstract-block-dumper/issues
6
6
  Author-email: Reef Technologies <opensource@reef.pl>
@@ -98,6 +98,28 @@ INSTALLED_APPS = [
98
98
  python manage.py migrate
99
99
  ```
100
100
 
101
+ 4. **Configure Celery to discover block tasks:**
102
+
103
+ In your project's `celery.py` file, add the following to ensure Celery workers can discover your `@block_task` decorated functions:
104
+
105
+ ```python
106
+ from celery import Celery
107
+ from celery.signals import worker_ready
108
+ from django.conf import settings
109
+
110
+ app = Celery('your_project')
111
+ app.config_from_object('django.conf:settings', namespace='CELERY')
112
+ app.autodiscover_tasks()
113
+
114
+ @worker_ready.connect
115
+ def on_worker_ready(**kwargs):
116
+ """Load block tasks when worker starts."""
117
+ from abstract_block_dumper.v1.celery import setup_celery_tasks
118
+ setup_celery_tasks()
119
+ ```
120
+
121
+ > **Important:** Without this step, Celery workers will not recognize your `@block_task` decorated functions, and you'll see "Received unregistered task" errors.
122
+
101
123
  ## Usage
102
124
 
103
125
  ### 1. Define Block Processing Tasks
@@ -128,7 +150,7 @@ See examples below:
128
150
  Use the `@block_task` decorator with lambda conditions to create block processing tasks:
129
151
 
130
152
  ```python
131
- from abstract_block_dumper.api.v1.decorators import block_task
153
+ from abstract_block_dumper.v1.decorators import block_task
132
154
 
133
155
 
134
156
  # Process every block
@@ -226,55 +248,92 @@ BLOCK_DUMPER_MAX_ATTEMPTS = 3 # maximum retry attempts
226
248
  BLOCK_TASK_MAX_RETRY_DELAY_MINUTES = 1440 # maximum retry delay (24 hours)
227
249
  ```
228
250
 
229
- ### Configuration Options Reference
230
-
231
- #### Core Settings
232
-
233
- **BITTENSOR_NETWORK** (str, default: `'finney'`) Specifies which [Bittensor network](https://docs.learnbittensor.org/concepts/bittensor-networks) to connect to
234
-
235
- **BLOCK_DUMPER_START_FROM_BLOCK** (str|int|None, default: `None`)
236
- - **Purpose**: Determines the starting block for processing when the scheduler first runs
237
- - **Valid Values**:
238
- - `None`: Resume from the last processed block stored in database
239
- - `'current'`: Start from the current blockchain block (skips historical blocks)
240
- - `int`: Start from a specific block number (e.g., `1000000`)
241
- - **Example**: `BLOCK_DUMPER_START_FROM_BLOCK = 'current'`
242
- - **Performance Impact**: Starting from historical blocks may require significant processing time
243
-
244
- #### Scheduler Settings
245
-
246
- **BLOCK_DUMPER_POLL_INTERVAL** (int, default: `1`)
247
- - **Purpose**: Seconds to wait between checking for new blocks
248
- - **Valid Range**: `1` to `3600` (1 second to 1 hour)
249
- - **Example**: `BLOCK_DUMPER_POLL_INTERVAL = 5`
250
- - **Performance Impact**:
251
- - Lower values (1-2s): Near real-time processing, higher CPU/network usage
252
- - Higher values (10-60s): Reduced load but delayed processing
253
- - Very low values (<1s) may cause rate limiting
254
-
255
- #### Retry and Error Handling Settings
256
-
257
- **BLOCK_DUMPER_MAX_ATTEMPTS** (int, default: `3`)
258
- - **Purpose**: Maximum number of attempts to retry a failed task before giving up
259
- - **Valid Range**: `1` to `10`
260
- - **Example**: `BLOCK_DUMPER_MAX_ATTEMPTS = 5`
261
- - **Performance Impact**: Higher values increase resilience but may delay failure detection
262
-
263
- **BLOCK_TASK_RETRY_BACKOFF** (int, default: `1`)
264
- - **Purpose**: Base number of minutes for exponential backoff retry delays
265
- - **Valid Range**: `1` to `60`
266
- - **Example**: `BLOCK_TASK_RETRY_BACKOFF = 2`
267
- - **Calculation**: Actual delay = `backoff ** attempt_count` minutes
251
+ ## Configuration Options Reference
252
+
253
+ ### `BITTENSOR_NETWORK`
254
+ - **Type:** `str`
255
+ - **Default:** `'finney'`
256
+ - **Description:** Specifies which [Bittensor network](https://docs.learnbittensor.org/concepts/bittensor-networks) to connect to
257
+
258
+ ---
259
+
260
+ ### `BLOCK_DUMPER_START_FROM_BLOCK`
261
+ - **Type:** `str | int | None`
262
+ - **Default:** `None`
263
+ - **Valid Range:** `None`, `'current'`, or any positive integer
264
+ - **Description:** Determines the starting block for processing when the scheduler first runs
265
+ - `None` → Resume from the last processed block stored in database
266
+ - `'current'` → Start from the current blockchain block (skips historical blocks)
267
+ - Integer → Start from a specific block number (e.g., `1000000`)
268
+
269
+ ```python
270
+ BLOCK_DUMPER_START_FROM_BLOCK = 'current'
271
+ ```
272
+
273
+ > **Performance Impact:** Starting from historical blocks may require significant processing time
274
+
275
+ ---
276
+
277
+ ### `BLOCK_DUMPER_POLL_INTERVAL`
278
+ - **Type:** `int`
279
+ - **Default:** `1`
280
+ - **Valid Range:** `1` to `3600` (seconds)
281
+ - **Description:** Seconds to wait between checking for new blocks
282
+
283
+ ```python
284
+ BLOCK_DUMPER_POLL_INTERVAL = 5
285
+ ```
286
+
287
+ > **Performance Impact:**
288
+ > - Lower values (1-2s): Near real-time processing, higher CPU/network usage
289
+ > - Higher values (10-60s): Reduced load but delayed processing
290
+ > - Very low values (<1s): May cause rate limiting
291
+
292
+ ---
293
+
294
+ ### `BLOCK_DUMPER_MAX_ATTEMPTS`
295
+ - **Type:** `int`
296
+ - **Default:** `3`
297
+ - **Valid Range:** `1` to `10`
298
+ - **Description:** Maximum number of attempts to retry a failed task before giving up
299
+
300
+ ```python
301
+ BLOCK_DUMPER_MAX_ATTEMPTS = 5
302
+ ```
303
+
304
+ > **Performance Impact:** Higher values increase resilience but may delay failure detection
305
+
306
+ ---
307
+
308
+ ### `BLOCK_TASK_RETRY_BACKOFF`
309
+ - **Type:** `int`
310
+ - **Default:** `1`
311
+ - **Valid Range:** `1` to `60` (minutes)
312
+ - **Description:** Base number of minutes for exponential backoff retry delays
313
+ - **Calculation:** Actual delay = `backoff ** attempt_count` minutes
268
314
  - Attempt 1: 2¹ = 2 minutes
269
- - Attempt 2: 2² = 4 minutes
315
+ - Attempt 2: 2² = 4 minutes
270
316
  - Attempt 3: 2³ = 8 minutes
271
- - **Performance Impact**: Lower values retry faster but may overwhelm failing services
272
317
 
273
- **BLOCK_TASK_MAX_RETRY_DELAY_MINUTES** (int, default: `1440`)
274
- - **Purpose**: Maximum delay (in minutes) between retry attempts, caps exponential backoff
275
- - **Valid Range**: `1` to `10080` (1 minute to 1 week)
276
- - **Example**: `BLOCK_TASK_MAX_RETRY_DELAY_MINUTES = 720` # 12 hours max
277
- - **Performance Impact**: Prevents extremely long delays while maintaining backoff benefits
318
+ ```python
319
+ BLOCK_TASK_RETRY_BACKOFF = 2
320
+ ```
321
+
322
+ > **Performance Impact:** Lower values retry faster but may overwhelm failing services
323
+
324
+ ---
325
+
326
+ ### `BLOCK_TASK_MAX_RETRY_DELAY_MINUTES`
327
+ - **Type:** `int`
328
+ - **Default:** `1440` (24 hours)
329
+ - **Valid Range:** `1` to `10080` (1 minute to 1 week)
330
+ - **Description:** Maximum delay (in minutes) between retry attempts, caps exponential backoff
331
+
332
+ ```python
333
+ BLOCK_TASK_MAX_RETRY_DELAY_MINUTES = 720 # 12 hours max
334
+ ```
335
+
336
+ > **Performance Impact:** Prevents extremely long delays while maintaining backoff benefits
278
337
 
279
338
 
280
339
  ## Example Project
@@ -1,5 +1,5 @@
1
1
  abstract_block_dumper/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
- abstract_block_dumper/_version.py,sha256=huLsL1iGeXWQKZ8bjwDdIWC7JOkj3wnzBh-HFMZl1PY,704
2
+ abstract_block_dumper/_version.py,sha256=QlXZ5JTjE_pgpDaeHk0GTExkc75xUZFmd0hA7kGYCJ0,704
3
3
  abstract_block_dumper/admin.py,sha256=3J3I_QOKFgfMNpTXW-rTQGO_q5Ls6uNuL0FkPVdIsYg,1654
4
4
  abstract_block_dumper/apps.py,sha256=DXATdrjsL3T2IletTbKeD6unr8ScLaxg7wz0nAHTAns,215
5
5
  abstract_block_dumper/models.py,sha256=MO9824dmHB6xF3PrFE_RERh7whVjQtS4tt6QA0wSbg0,2022
@@ -9,20 +9,21 @@ abstract_block_dumper/_internal/discovery.py,sha256=sISOL8vq6rC0pOndrCfWKDZjyYwz
9
9
  abstract_block_dumper/_internal/exceptions.py,sha256=jVXQ8b3gneno2XYvO0XisJPMlkAWb6H5u10egIpPJ4k,335
10
10
  abstract_block_dumper/_internal/dal/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
11
11
  abstract_block_dumper/_internal/dal/django_dal.py,sha256=pBGEFeo_U0ac2Za-dwzJvf04Ng8lP51aR60c_DUrGIw,5426
12
- abstract_block_dumper/_internal/dal/memory_registry.py,sha256=ogayH2Iqnltl0Lf696WsXiZYp0KDLb9G338Fb3_XiTs,2985
12
+ abstract_block_dumper/_internal/dal/memory_registry.py,sha256=yMNF7jrvWGF-S1pqyR2zOCNLWwrdsImcvV6cGqu1wYE,2972
13
13
  abstract_block_dumper/_internal/services/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
14
- abstract_block_dumper/_internal/services/block_processor.py,sha256=0ZRE4JfcdkhswbU1KEwwW_PazsCVJqsWfd_cvvu5r64,8123
14
+ abstract_block_dumper/_internal/services/block_processor.py,sha256=wB-zeft3Ys8zmqCdF_v12rXd6umNWvGfy2Ts6XSGkL8,8132
15
15
  abstract_block_dumper/_internal/services/executor.py,sha256=ZZmQ9TzoNEoAE4amiU8lHRsTfP7YusUkWXasrArfo2g,1806
16
- abstract_block_dumper/_internal/services/scheduler.py,sha256=t8NDqoKXrYzYxbXmLyg-VtkBCCifmq78Ae6jhDyk5EA,3473
16
+ abstract_block_dumper/_internal/services/scheduler.py,sha256=NrT3t0oVR-osf50tWWqcxojkVkxhd2PHsk0PuXD5RMc,3593
17
17
  abstract_block_dumper/_internal/services/utils.py,sha256=Y8b8KdKn53mcuWchw6b5EJq9ipO4p1FFf6g_Fpbg7cQ,1273
18
18
  abstract_block_dumper/management/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
19
19
  abstract_block_dumper/management/commands/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
20
- abstract_block_dumper/management/commands/block_tasks_v1.py,sha256=waHuWMXzQr7TOzftHcBRPLw5Cx_lfL2-quW9BjcihCI,788
20
+ abstract_block_dumper/management/commands/block_tasks_v1.py,sha256=jSi04ahIKYwlm_dNKCUGL_cmALv1iP-ZjfXrmz0pn-4,880
21
21
  abstract_block_dumper/migrations/0001_initial.py,sha256=ImPHC3G6kPkq4Xn_4YVAm4Labh1Xi7PkCRszYRGpTiI,2298
22
22
  abstract_block_dumper/migrations/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
23
23
  abstract_block_dumper/v1/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
24
- abstract_block_dumper/v1/decorators.py,sha256=a_M9foEY2ZllPlZFZ_iPLmrwa34C5bLbiXMihiLbWCw,7099
24
+ abstract_block_dumper/v1/celery.py,sha256=X4IqVs5i6ZpyY7fy1SqMZgsZy4SXP-jK2qG-FYnjU38,1722
25
+ abstract_block_dumper/v1/decorators.py,sha256=i-CVanS-yiBMKCEOLjGmwmopefWePhyMdodlWNIfJFg,7002
25
26
  abstract_block_dumper/v1/tasks.py,sha256=u9iMYdDUqzYT3yPrNwZecHnlweZ3yFipV9BcIWHCbus,2647
26
- abstract_block_dumper-0.0.2.dist-info/METADATA,sha256=4AfqUQJysgj5ys2iexAwPIQst0Ia9fbEa0bxePgOXKw,11990
27
- abstract_block_dumper-0.0.2.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
28
- abstract_block_dumper-0.0.2.dist-info/RECORD,,
27
+ abstract_block_dumper-0.0.4.dist-info/METADATA,sha256=wBsIWl5439xp-EYInzV-mQaAKIoa_CDP97FJoeaB-ng,12916
28
+ abstract_block_dumper-0.0.4.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
29
+ abstract_block_dumper-0.0.4.dist-info/RECORD,,