abstract-block-dumper 0.0.2__tar.gz → 0.0.4__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {abstract_block_dumper-0.0.2 → abstract_block_dumper-0.0.4}/.gitignore +2 -1
- {abstract_block_dumper-0.0.2 → abstract_block_dumper-0.0.4}/CHANGELOG.md +10 -0
- {abstract_block_dumper-0.0.2 → abstract_block_dumper-0.0.4}/PKG-INFO +107 -48
- {abstract_block_dumper-0.0.2 → abstract_block_dumper-0.0.4}/README.md +106 -47
- abstract_block_dumper-0.0.4/example_project/example_project/celery.py +26 -0
- {abstract_block_dumper-0.0.2 → abstract_block_dumper-0.0.4}/src/abstract_block_dumper/_internal/dal/memory_registry.py +6 -6
- {abstract_block_dumper-0.0.2 → abstract_block_dumper-0.0.4}/src/abstract_block_dumper/_internal/services/block_processor.py +9 -9
- {abstract_block_dumper-0.0.2 → abstract_block_dumper-0.0.4}/src/abstract_block_dumper/_internal/services/scheduler.py +6 -4
- {abstract_block_dumper-0.0.2 → abstract_block_dumper-0.0.4}/src/abstract_block_dumper/_version.py +2 -2
- {abstract_block_dumper-0.0.2 → abstract_block_dumper-0.0.4}/src/abstract_block_dumper/management/commands/block_tasks_v1.py +3 -0
- abstract_block_dumper-0.0.4/src/abstract_block_dumper/v1/celery.py +53 -0
- {abstract_block_dumper-0.0.2 → abstract_block_dumper-0.0.4}/src/abstract_block_dumper/v1/decorators.py +19 -23
- abstract_block_dumper-0.0.4/tests/unit/test_celery_integration.py +17 -0
- abstract_block_dumper-0.0.2/example_project/example_project/celery.py +0 -12
- {abstract_block_dumper-0.0.2 → abstract_block_dumper-0.0.4}/.cruft.json +0 -0
- {abstract_block_dumper-0.0.2 → abstract_block_dumper-0.0.4}/.github/dependabot.yml +0 -0
- {abstract_block_dumper-0.0.2 → abstract_block_dumper-0.0.4}/.github/workflows/ci.yml +0 -0
- {abstract_block_dumper-0.0.2 → abstract_block_dumper-0.0.4}/.github/workflows/publish.yml +0 -0
- {abstract_block_dumper-0.0.2 → abstract_block_dumper-0.0.4}/.pre-commit-config.yaml +0 -0
- {abstract_block_dumper-0.0.2 → abstract_block_dumper-0.0.4}/.shellcheckrc +0 -0
- {abstract_block_dumper-0.0.2 → abstract_block_dumper-0.0.4}/SECURITY.md +0 -0
- {abstract_block_dumper-0.0.2 → abstract_block_dumper-0.0.4}/docs/3rd_party/cookiecutter-rt-pkg/CHANGELOG.md +0 -0
- {abstract_block_dumper-0.0.2 → abstract_block_dumper-0.0.4}/example_project/.dockerignore +0 -0
- {abstract_block_dumper-0.0.2 → abstract_block_dumper-0.0.4}/example_project/.gitignore +0 -0
- {abstract_block_dumper-0.0.2 → abstract_block_dumper-0.0.4}/example_project/Dockerfile +0 -0
- {abstract_block_dumper-0.0.2 → abstract_block_dumper-0.0.4}/example_project/README.md +0 -0
- {abstract_block_dumper-0.0.2 → abstract_block_dumper-0.0.4}/example_project/block_explorer/__init__.py +0 -0
- {abstract_block_dumper-0.0.2 → abstract_block_dumper-0.0.4}/example_project/block_explorer/admin.py +0 -0
- {abstract_block_dumper-0.0.2 → abstract_block_dumper-0.0.4}/example_project/block_explorer/apps.py +0 -0
- {abstract_block_dumper-0.0.2 → abstract_block_dumper-0.0.4}/example_project/block_explorer/management/__init__.py +0 -0
- {abstract_block_dumper-0.0.2 → abstract_block_dumper-0.0.4}/example_project/block_explorer/management/commands/__init__.py +0 -0
- {abstract_block_dumper-0.0.2 → abstract_block_dumper-0.0.4}/example_project/block_explorer/management/commands/create_admin.py +0 -0
- {abstract_block_dumper-0.0.2 → abstract_block_dumper-0.0.4}/example_project/block_explorer/migrations/__init__.py +0 -0
- {abstract_block_dumper-0.0.2 → abstract_block_dumper-0.0.4}/example_project/block_explorer/models.py +0 -0
- {abstract_block_dumper-0.0.2 → abstract_block_dumper-0.0.4}/example_project/block_explorer/tasks.py +0 -0
- {abstract_block_dumper-0.0.2 → abstract_block_dumper-0.0.4}/example_project/block_explorer/tests.py +0 -0
- {abstract_block_dumper-0.0.2 → abstract_block_dumper-0.0.4}/example_project/block_explorer/views.py +0 -0
- {abstract_block_dumper-0.0.2 → abstract_block_dumper-0.0.4}/example_project/docker-compose.yml +0 -0
- {abstract_block_dumper-0.0.2 → abstract_block_dumper-0.0.4}/example_project/example_project/__init__.py +0 -0
- {abstract_block_dumper-0.0.2 → abstract_block_dumper-0.0.4}/example_project/example_project/asgi.py +0 -0
- {abstract_block_dumper-0.0.2 → abstract_block_dumper-0.0.4}/example_project/example_project/settings.py +0 -0
- {abstract_block_dumper-0.0.2 → abstract_block_dumper-0.0.4}/example_project/example_project/urls.py +0 -0
- {abstract_block_dumper-0.0.2 → abstract_block_dumper-0.0.4}/example_project/example_project/wsgi.py +0 -0
- {abstract_block_dumper-0.0.2 → abstract_block_dumper-0.0.4}/example_project/main.py +0 -0
- {abstract_block_dumper-0.0.2 → abstract_block_dumper-0.0.4}/example_project/manage.py +0 -0
- {abstract_block_dumper-0.0.2 → abstract_block_dumper-0.0.4}/example_project/pyproject.toml +0 -0
- {abstract_block_dumper-0.0.2 → abstract_block_dumper-0.0.4}/example_project/pytest.ini +0 -0
- {abstract_block_dumper-0.0.2 → abstract_block_dumper-0.0.4}/example_project/uv.lock +0 -0
- {abstract_block_dumper-0.0.2 → abstract_block_dumper-0.0.4}/noxfile.py +0 -0
- {abstract_block_dumper-0.0.2 → abstract_block_dumper-0.0.4}/pyproject.toml +0 -0
- {abstract_block_dumper-0.0.2 → abstract_block_dumper-0.0.4}/src/abstract_block_dumper/__init__.py +0 -0
- {abstract_block_dumper-0.0.2 → abstract_block_dumper-0.0.4}/src/abstract_block_dumper/_internal/__init__.py +0 -0
- {abstract_block_dumper-0.0.2 → abstract_block_dumper-0.0.4}/src/abstract_block_dumper/_internal/dal/__init__.py +0 -0
- {abstract_block_dumper-0.0.2 → abstract_block_dumper-0.0.4}/src/abstract_block_dumper/_internal/dal/django_dal.py +0 -0
- {abstract_block_dumper-0.0.2 → abstract_block_dumper-0.0.4}/src/abstract_block_dumper/_internal/discovery.py +0 -0
- {abstract_block_dumper-0.0.2 → abstract_block_dumper-0.0.4}/src/abstract_block_dumper/_internal/exceptions.py +0 -0
- {abstract_block_dumper-0.0.2 → abstract_block_dumper-0.0.4}/src/abstract_block_dumper/_internal/services/__init__.py +0 -0
- {abstract_block_dumper-0.0.2 → abstract_block_dumper-0.0.4}/src/abstract_block_dumper/_internal/services/executor.py +0 -0
- {abstract_block_dumper-0.0.2 → abstract_block_dumper-0.0.4}/src/abstract_block_dumper/_internal/services/utils.py +0 -0
- {abstract_block_dumper-0.0.2 → abstract_block_dumper-0.0.4}/src/abstract_block_dumper/admin.py +0 -0
- {abstract_block_dumper-0.0.2 → abstract_block_dumper-0.0.4}/src/abstract_block_dumper/apps.py +0 -0
- {abstract_block_dumper-0.0.2 → abstract_block_dumper-0.0.4}/src/abstract_block_dumper/management/__init__.py +0 -0
- {abstract_block_dumper-0.0.2 → abstract_block_dumper-0.0.4}/src/abstract_block_dumper/management/commands/__init__.py +0 -0
- {abstract_block_dumper-0.0.2 → abstract_block_dumper-0.0.4}/src/abstract_block_dumper/migrations/0001_initial.py +0 -0
- {abstract_block_dumper-0.0.2 → abstract_block_dumper-0.0.4}/src/abstract_block_dumper/migrations/__init__.py +0 -0
- {abstract_block_dumper-0.0.2 → abstract_block_dumper-0.0.4}/src/abstract_block_dumper/models.py +0 -0
- {abstract_block_dumper-0.0.2 → abstract_block_dumper-0.0.4}/src/abstract_block_dumper/py.typed +0 -0
- {abstract_block_dumper-0.0.2 → abstract_block_dumper-0.0.4}/src/abstract_block_dumper/v1/__init__.py +0 -0
- {abstract_block_dumper-0.0.2 → abstract_block_dumper-0.0.4}/src/abstract_block_dumper/v1/tasks.py +0 -0
- {abstract_block_dumper-0.0.2 → abstract_block_dumper-0.0.4}/tests/__init__.py +0 -0
- {abstract_block_dumper-0.0.2 → abstract_block_dumper-0.0.4}/tests/conftest.py +0 -0
- {abstract_block_dumper-0.0.2 → abstract_block_dumper-0.0.4}/tests/django_fixtures.py +0 -0
- {abstract_block_dumper-0.0.2 → abstract_block_dumper-0.0.4}/tests/fatories.py +0 -0
- {abstract_block_dumper-0.0.2 → abstract_block_dumper-0.0.4}/tests/integration/__init__.py +0 -0
- {abstract_block_dumper-0.0.2 → abstract_block_dumper-0.0.4}/tests/integration/test_block_processor.py +0 -0
- {abstract_block_dumper-0.0.2 → abstract_block_dumper-0.0.4}/tests/integration/test_concurrent_processing.py +0 -0
- {abstract_block_dumper-0.0.2 → abstract_block_dumper-0.0.4}/tests/integration/test_multi_arguments_tasks.py +0 -0
- {abstract_block_dumper-0.0.2 → abstract_block_dumper-0.0.4}/tests/integration/test_registered_celery_tasks.py +0 -0
- {abstract_block_dumper-0.0.2 → abstract_block_dumper-0.0.4}/tests/integration/test_scheduler.py +0 -0
- {abstract_block_dumper-0.0.2 → abstract_block_dumper-0.0.4}/tests/integration/test_task_registration.py +0 -0
- {abstract_block_dumper-0.0.2 → abstract_block_dumper-0.0.4}/tests/settings.py +0 -0
- {abstract_block_dumper-0.0.2 → abstract_block_dumper-0.0.4}/tests/unit/test_decorator.py +0 -0
- {abstract_block_dumper-0.0.2 → abstract_block_dumper-0.0.4}/uv.lock +0 -0
|
@@ -9,6 +9,16 @@ upcoming release can be found in [changelog.d](changelog.d).
|
|
|
9
9
|
|
|
10
10
|
<!-- towncrier release notes start -->
|
|
11
11
|
|
|
12
|
+
## [0.0.4](https://github.com/bactensor/abstract-block-dumper/releases/tag/v0.0.4) - 2025-11-17
|
|
13
|
+
|
|
14
|
+
No significant changes.
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
## [0.0.5](https://github.com/bactensor/abstract-block-dumper/releases/tag/v0.0.5) - 2025-11-17
|
|
18
|
+
|
|
19
|
+
No significant changes.
|
|
20
|
+
|
|
21
|
+
|
|
12
22
|
## [0.0.2](https://github.com/bactensor/abstract-block-dumper/releases/tag/v0.0.2) - 2025-10-24
|
|
13
23
|
|
|
14
24
|
No significant changes.
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: abstract-block-dumper
|
|
3
|
-
Version: 0.0.
|
|
3
|
+
Version: 0.0.4
|
|
4
4
|
Project-URL: Source, https://github.com/bactensor/abstract-block-dumper
|
|
5
5
|
Project-URL: Issue Tracker, https://github.com/bactensor/abstract-block-dumper/issues
|
|
6
6
|
Author-email: Reef Technologies <opensource@reef.pl>
|
|
@@ -98,6 +98,28 @@ INSTALLED_APPS = [
|
|
|
98
98
|
python manage.py migrate
|
|
99
99
|
```
|
|
100
100
|
|
|
101
|
+
4. **Configure Celery to discover block tasks:**
|
|
102
|
+
|
|
103
|
+
In your project's `celery.py` file, add the following to ensure Celery workers can discover your `@block_task` decorated functions:
|
|
104
|
+
|
|
105
|
+
```python
|
|
106
|
+
from celery import Celery
|
|
107
|
+
from celery.signals import worker_ready
|
|
108
|
+
from django.conf import settings
|
|
109
|
+
|
|
110
|
+
app = Celery('your_project')
|
|
111
|
+
app.config_from_object('django.conf:settings', namespace='CELERY')
|
|
112
|
+
app.autodiscover_tasks()
|
|
113
|
+
|
|
114
|
+
@worker_ready.connect
|
|
115
|
+
def on_worker_ready(**kwargs):
|
|
116
|
+
"""Load block tasks when worker starts."""
|
|
117
|
+
from abstract_block_dumper.v1.celery import setup_celery_tasks
|
|
118
|
+
setup_celery_tasks()
|
|
119
|
+
```
|
|
120
|
+
|
|
121
|
+
> **Important:** Without this step, Celery workers will not recognize your `@block_task` decorated functions, and you'll see "Received unregistered task" errors.
|
|
122
|
+
|
|
101
123
|
## Usage
|
|
102
124
|
|
|
103
125
|
### 1. Define Block Processing Tasks
|
|
@@ -128,7 +150,7 @@ See examples below:
|
|
|
128
150
|
Use the `@block_task` decorator with lambda conditions to create block processing tasks:
|
|
129
151
|
|
|
130
152
|
```python
|
|
131
|
-
from abstract_block_dumper.
|
|
153
|
+
from abstract_block_dumper.v1.decorators import block_task
|
|
132
154
|
|
|
133
155
|
|
|
134
156
|
# Process every block
|
|
@@ -226,55 +248,92 @@ BLOCK_DUMPER_MAX_ATTEMPTS = 3 # maximum retry attempts
|
|
|
226
248
|
BLOCK_TASK_MAX_RETRY_DELAY_MINUTES = 1440 # maximum retry delay (24 hours)
|
|
227
249
|
```
|
|
228
250
|
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
**
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
- **
|
|
242
|
-
- **
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
**
|
|
258
|
-
- **
|
|
259
|
-
- **
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
-
|
|
267
|
-
-
|
|
251
|
+
## Configuration Options Reference
|
|
252
|
+
|
|
253
|
+
### `BITTENSOR_NETWORK`
|
|
254
|
+
- **Type:** `str`
|
|
255
|
+
- **Default:** `'finney'`
|
|
256
|
+
- **Description:** Specifies which [Bittensor network](https://docs.learnbittensor.org/concepts/bittensor-networks) to connect to
|
|
257
|
+
|
|
258
|
+
---
|
|
259
|
+
|
|
260
|
+
### `BLOCK_DUMPER_START_FROM_BLOCK`
|
|
261
|
+
- **Type:** `str | int | None`
|
|
262
|
+
- **Default:** `None`
|
|
263
|
+
- **Valid Range:** `None`, `'current'`, or any positive integer
|
|
264
|
+
- **Description:** Determines the starting block for processing when the scheduler first runs
|
|
265
|
+
- `None` → Resume from the last processed block stored in database
|
|
266
|
+
- `'current'` → Start from the current blockchain block (skips historical blocks)
|
|
267
|
+
- Integer → Start from a specific block number (e.g., `1000000`)
|
|
268
|
+
|
|
269
|
+
```python
|
|
270
|
+
BLOCK_DUMPER_START_FROM_BLOCK = 'current'
|
|
271
|
+
```
|
|
272
|
+
|
|
273
|
+
> **Performance Impact:** Starting from historical blocks may require significant processing time
|
|
274
|
+
|
|
275
|
+
---
|
|
276
|
+
|
|
277
|
+
### `BLOCK_DUMPER_POLL_INTERVAL`
|
|
278
|
+
- **Type:** `int`
|
|
279
|
+
- **Default:** `1`
|
|
280
|
+
- **Valid Range:** `1` to `3600` (seconds)
|
|
281
|
+
- **Description:** Seconds to wait between checking for new blocks
|
|
282
|
+
|
|
283
|
+
```python
|
|
284
|
+
BLOCK_DUMPER_POLL_INTERVAL = 5
|
|
285
|
+
```
|
|
286
|
+
|
|
287
|
+
> **Performance Impact:**
|
|
288
|
+
> - Lower values (1-2s): Near real-time processing, higher CPU/network usage
|
|
289
|
+
> - Higher values (10-60s): Reduced load but delayed processing
|
|
290
|
+
> - Very low values (<1s): May cause rate limiting
|
|
291
|
+
|
|
292
|
+
---
|
|
293
|
+
|
|
294
|
+
### `BLOCK_DUMPER_MAX_ATTEMPTS`
|
|
295
|
+
- **Type:** `int`
|
|
296
|
+
- **Default:** `3`
|
|
297
|
+
- **Valid Range:** `1` to `10`
|
|
298
|
+
- **Description:** Maximum number of attempts to retry a failed task before giving up
|
|
299
|
+
|
|
300
|
+
```python
|
|
301
|
+
BLOCK_DUMPER_MAX_ATTEMPTS = 5
|
|
302
|
+
```
|
|
303
|
+
|
|
304
|
+
> **Performance Impact:** Higher values increase resilience but may delay failure detection
|
|
305
|
+
|
|
306
|
+
---
|
|
307
|
+
|
|
308
|
+
### `BLOCK_TASK_RETRY_BACKOFF`
|
|
309
|
+
- **Type:** `int`
|
|
310
|
+
- **Default:** `1`
|
|
311
|
+
- **Valid Range:** `1` to `60` (minutes)
|
|
312
|
+
- **Description:** Base number of minutes for exponential backoff retry delays
|
|
313
|
+
- **Calculation:** Actual delay = `backoff ** attempt_count` minutes
|
|
268
314
|
- Attempt 1: 2¹ = 2 minutes
|
|
269
|
-
- Attempt 2: 2² = 4 minutes
|
|
315
|
+
- Attempt 2: 2² = 4 minutes
|
|
270
316
|
- Attempt 3: 2³ = 8 minutes
|
|
271
|
-
- **Performance Impact**: Lower values retry faster but may overwhelm failing services
|
|
272
317
|
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
|
|
318
|
+
```python
|
|
319
|
+
BLOCK_TASK_RETRY_BACKOFF = 2
|
|
320
|
+
```
|
|
321
|
+
|
|
322
|
+
> **Performance Impact:** Lower values retry faster but may overwhelm failing services
|
|
323
|
+
|
|
324
|
+
---
|
|
325
|
+
|
|
326
|
+
### `BLOCK_TASK_MAX_RETRY_DELAY_MINUTES`
|
|
327
|
+
- **Type:** `int`
|
|
328
|
+
- **Default:** `1440` (24 hours)
|
|
329
|
+
- **Valid Range:** `1` to `10080` (1 minute to 1 week)
|
|
330
|
+
- **Description:** Maximum delay (in minutes) between retry attempts, caps exponential backoff
|
|
331
|
+
|
|
332
|
+
```python
|
|
333
|
+
BLOCK_TASK_MAX_RETRY_DELAY_MINUTES = 720 # 12 hours max
|
|
334
|
+
```
|
|
335
|
+
|
|
336
|
+
> **Performance Impact:** Prevents extremely long delays while maintaining backoff benefits
|
|
278
337
|
|
|
279
338
|
|
|
280
339
|
## Example Project
|
|
@@ -76,6 +76,28 @@ INSTALLED_APPS = [
|
|
|
76
76
|
python manage.py migrate
|
|
77
77
|
```
|
|
78
78
|
|
|
79
|
+
4. **Configure Celery to discover block tasks:**
|
|
80
|
+
|
|
81
|
+
In your project's `celery.py` file, add the following to ensure Celery workers can discover your `@block_task` decorated functions:
|
|
82
|
+
|
|
83
|
+
```python
|
|
84
|
+
from celery import Celery
|
|
85
|
+
from celery.signals import worker_ready
|
|
86
|
+
from django.conf import settings
|
|
87
|
+
|
|
88
|
+
app = Celery('your_project')
|
|
89
|
+
app.config_from_object('django.conf:settings', namespace='CELERY')
|
|
90
|
+
app.autodiscover_tasks()
|
|
91
|
+
|
|
92
|
+
@worker_ready.connect
|
|
93
|
+
def on_worker_ready(**kwargs):
|
|
94
|
+
"""Load block tasks when worker starts."""
|
|
95
|
+
from abstract_block_dumper.v1.celery import setup_celery_tasks
|
|
96
|
+
setup_celery_tasks()
|
|
97
|
+
```
|
|
98
|
+
|
|
99
|
+
> **Important:** Without this step, Celery workers will not recognize your `@block_task` decorated functions, and you'll see "Received unregistered task" errors.
|
|
100
|
+
|
|
79
101
|
## Usage
|
|
80
102
|
|
|
81
103
|
### 1. Define Block Processing Tasks
|
|
@@ -106,7 +128,7 @@ See examples below:
|
|
|
106
128
|
Use the `@block_task` decorator with lambda conditions to create block processing tasks:
|
|
107
129
|
|
|
108
130
|
```python
|
|
109
|
-
from abstract_block_dumper.
|
|
131
|
+
from abstract_block_dumper.v1.decorators import block_task
|
|
110
132
|
|
|
111
133
|
|
|
112
134
|
# Process every block
|
|
@@ -204,55 +226,92 @@ BLOCK_DUMPER_MAX_ATTEMPTS = 3 # maximum retry attempts
|
|
|
204
226
|
BLOCK_TASK_MAX_RETRY_DELAY_MINUTES = 1440 # maximum retry delay (24 hours)
|
|
205
227
|
```
|
|
206
228
|
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
**
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
- **
|
|
220
|
-
- **
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
**
|
|
236
|
-
- **
|
|
237
|
-
- **
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
-
|
|
245
|
-
-
|
|
229
|
+
## Configuration Options Reference
|
|
230
|
+
|
|
231
|
+
### `BITTENSOR_NETWORK`
|
|
232
|
+
- **Type:** `str`
|
|
233
|
+
- **Default:** `'finney'`
|
|
234
|
+
- **Description:** Specifies which [Bittensor network](https://docs.learnbittensor.org/concepts/bittensor-networks) to connect to
|
|
235
|
+
|
|
236
|
+
---
|
|
237
|
+
|
|
238
|
+
### `BLOCK_DUMPER_START_FROM_BLOCK`
|
|
239
|
+
- **Type:** `str | int | None`
|
|
240
|
+
- **Default:** `None`
|
|
241
|
+
- **Valid Range:** `None`, `'current'`, or any positive integer
|
|
242
|
+
- **Description:** Determines the starting block for processing when the scheduler first runs
|
|
243
|
+
- `None` → Resume from the last processed block stored in database
|
|
244
|
+
- `'current'` → Start from the current blockchain block (skips historical blocks)
|
|
245
|
+
- Integer → Start from a specific block number (e.g., `1000000`)
|
|
246
|
+
|
|
247
|
+
```python
|
|
248
|
+
BLOCK_DUMPER_START_FROM_BLOCK = 'current'
|
|
249
|
+
```
|
|
250
|
+
|
|
251
|
+
> **Performance Impact:** Starting from historical blocks may require significant processing time
|
|
252
|
+
|
|
253
|
+
---
|
|
254
|
+
|
|
255
|
+
### `BLOCK_DUMPER_POLL_INTERVAL`
|
|
256
|
+
- **Type:** `int`
|
|
257
|
+
- **Default:** `1`
|
|
258
|
+
- **Valid Range:** `1` to `3600` (seconds)
|
|
259
|
+
- **Description:** Seconds to wait between checking for new blocks
|
|
260
|
+
|
|
261
|
+
```python
|
|
262
|
+
BLOCK_DUMPER_POLL_INTERVAL = 5
|
|
263
|
+
```
|
|
264
|
+
|
|
265
|
+
> **Performance Impact:**
|
|
266
|
+
> - Lower values (1-2s): Near real-time processing, higher CPU/network usage
|
|
267
|
+
> - Higher values (10-60s): Reduced load but delayed processing
|
|
268
|
+
> - Very low values (<1s): May cause rate limiting
|
|
269
|
+
|
|
270
|
+
---
|
|
271
|
+
|
|
272
|
+
### `BLOCK_DUMPER_MAX_ATTEMPTS`
|
|
273
|
+
- **Type:** `int`
|
|
274
|
+
- **Default:** `3`
|
|
275
|
+
- **Valid Range:** `1` to `10`
|
|
276
|
+
- **Description:** Maximum number of attempts to retry a failed task before giving up
|
|
277
|
+
|
|
278
|
+
```python
|
|
279
|
+
BLOCK_DUMPER_MAX_ATTEMPTS = 5
|
|
280
|
+
```
|
|
281
|
+
|
|
282
|
+
> **Performance Impact:** Higher values increase resilience but may delay failure detection
|
|
283
|
+
|
|
284
|
+
---
|
|
285
|
+
|
|
286
|
+
### `BLOCK_TASK_RETRY_BACKOFF`
|
|
287
|
+
- **Type:** `int`
|
|
288
|
+
- **Default:** `1`
|
|
289
|
+
- **Valid Range:** `1` to `60` (minutes)
|
|
290
|
+
- **Description:** Base number of minutes for exponential backoff retry delays
|
|
291
|
+
- **Calculation:** Actual delay = `backoff ** attempt_count` minutes
|
|
246
292
|
- Attempt 1: 2¹ = 2 minutes
|
|
247
|
-
- Attempt 2: 2² = 4 minutes
|
|
293
|
+
- Attempt 2: 2² = 4 minutes
|
|
248
294
|
- Attempt 3: 2³ = 8 minutes
|
|
249
|
-
- **Performance Impact**: Lower values retry faster but may overwhelm failing services
|
|
250
295
|
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
|
|
296
|
+
```python
|
|
297
|
+
BLOCK_TASK_RETRY_BACKOFF = 2
|
|
298
|
+
```
|
|
299
|
+
|
|
300
|
+
> **Performance Impact:** Lower values retry faster but may overwhelm failing services
|
|
301
|
+
|
|
302
|
+
---
|
|
303
|
+
|
|
304
|
+
### `BLOCK_TASK_MAX_RETRY_DELAY_MINUTES`
|
|
305
|
+
- **Type:** `int`
|
|
306
|
+
- **Default:** `1440` (24 hours)
|
|
307
|
+
- **Valid Range:** `1` to `10080` (1 minute to 1 week)
|
|
308
|
+
- **Description:** Maximum delay (in minutes) between retry attempts, caps exponential backoff
|
|
309
|
+
|
|
310
|
+
```python
|
|
311
|
+
BLOCK_TASK_MAX_RETRY_DELAY_MINUTES = 720 # 12 hours max
|
|
312
|
+
```
|
|
313
|
+
|
|
314
|
+
> **Performance Impact:** Prevents extremely long delays while maintaining backoff benefits
|
|
256
315
|
|
|
257
316
|
|
|
258
317
|
## Example Project
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
import os
|
|
2
|
+
|
|
3
|
+
from celery import Celery
|
|
4
|
+
from celery.signals import worker_ready
|
|
5
|
+
from django.conf import settings
|
|
6
|
+
|
|
7
|
+
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "example_project.settings")
|
|
8
|
+
|
|
9
|
+
app = Celery("example_project")
|
|
10
|
+
|
|
11
|
+
app.config_from_object(settings, namespace="CELERY")
|
|
12
|
+
|
|
13
|
+
app.autodiscover_tasks(lambda: settings.INSTALLED_APPS)
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
@worker_ready.connect
|
|
17
|
+
def on_worker_ready(**kwargs):
|
|
18
|
+
"""
|
|
19
|
+
Load block tasks when Celery worker starts.
|
|
20
|
+
|
|
21
|
+
This is required for abstract-block-dumper to register @block_task
|
|
22
|
+
decorated functions so they can receive messages from the broker.
|
|
23
|
+
"""
|
|
24
|
+
from abstract_block_dumper.v1.celery import setup_celery_tasks
|
|
25
|
+
|
|
26
|
+
setup_celery_tasks()
|
|
@@ -30,7 +30,8 @@ class RegistryItem:
|
|
|
30
30
|
block_number=block_number,
|
|
31
31
|
exc_info=True,
|
|
32
32
|
)
|
|
33
|
-
|
|
33
|
+
msg = f"Failed to evaluate condition: {e}"
|
|
34
|
+
raise ConditionEvaluationError(msg) from e
|
|
34
35
|
|
|
35
36
|
def get_execution_args(self) -> list[dict[str, Any]]:
|
|
36
37
|
"""Get list of argument sets for execution."""
|
|
@@ -42,7 +43,7 @@ class RegistryItem:
|
|
|
42
43
|
if hasattr(self.function, "name") and self.function.name is not None:
|
|
43
44
|
return self.function.name
|
|
44
45
|
|
|
45
|
-
return "
|
|
46
|
+
return f"{self.function.__module__}.{self.function.__name__}"
|
|
46
47
|
|
|
47
48
|
def requires_backfilling(self) -> bool:
|
|
48
49
|
"""Check if this item requires backfilling."""
|
|
@@ -76,7 +77,7 @@ class MemoryRegistry(BaseRegistry):
|
|
|
76
77
|
"Registered function",
|
|
77
78
|
function_name=item.function.__name__,
|
|
78
79
|
executable_path=item.executable_path,
|
|
79
|
-
|
|
80
|
+
args_counter=len(item.args or []),
|
|
80
81
|
backfilling_lookback=item.backfilling_lookback,
|
|
81
82
|
)
|
|
82
83
|
|
|
@@ -86,12 +87,11 @@ class MemoryRegistry(BaseRegistry):
|
|
|
86
87
|
def clear(self) -> None:
|
|
87
88
|
self._functions = []
|
|
88
89
|
|
|
89
|
-
def get_by_executable_path(self, executable_path: str) -> RegistryItem:
|
|
90
|
+
def get_by_executable_path(self, executable_path: str) -> RegistryItem | None:
|
|
90
91
|
for registry_item in self.get_functions():
|
|
91
92
|
if registry_item.executable_path == executable_path:
|
|
92
93
|
return registry_item
|
|
93
|
-
|
|
94
|
-
raise Exception("Function Not Found")
|
|
94
|
+
return None
|
|
95
95
|
|
|
96
96
|
|
|
97
97
|
task_registry = MemoryRegistry()
|
|
@@ -100,22 +100,22 @@ class BlockProcessor:
|
|
|
100
100
|
This handles tasks that may have been lost due to scheduler restarts.
|
|
101
101
|
"""
|
|
102
102
|
retry_count = 0
|
|
103
|
-
for
|
|
103
|
+
for retry_attempt in abd_dal.get_ready_to_retry_attempts():
|
|
104
104
|
try:
|
|
105
105
|
# Find the registry item to get celery_kwargs
|
|
106
|
-
registry_item = self.registry.get_by_executable_path(
|
|
106
|
+
registry_item = self.registry.get_by_executable_path(retry_attempt.executable_path)
|
|
107
107
|
if not registry_item:
|
|
108
108
|
logger.warning(
|
|
109
109
|
"Registry item not found for failed task, skipping retry recovery",
|
|
110
|
-
task_id=
|
|
111
|
-
executable_path=
|
|
110
|
+
task_id=retry_attempt.id,
|
|
111
|
+
executable_path=retry_attempt.executable_path,
|
|
112
112
|
)
|
|
113
113
|
continue
|
|
114
114
|
|
|
115
115
|
# Use atomic transaction to prevent race conditions
|
|
116
116
|
with transaction.atomic():
|
|
117
117
|
# Re-fetch with select_for_update to prevent concurrent modifications
|
|
118
|
-
task_attempt = TaskAttempt.objects.select_for_update(nowait=True).get(id=
|
|
118
|
+
task_attempt = TaskAttempt.objects.select_for_update(nowait=True).get(id=retry_attempt.id)
|
|
119
119
|
|
|
120
120
|
# Verify task is still in FAILED state and ready for retry
|
|
121
121
|
if task_attempt.status == TaskAttempt.Status.SUCCESS:
|
|
@@ -150,16 +150,16 @@ class BlockProcessor:
|
|
|
150
150
|
except Exception:
|
|
151
151
|
logger.error(
|
|
152
152
|
"Failed to recover retry",
|
|
153
|
-
task_id=
|
|
153
|
+
task_id=retry_attempt.id,
|
|
154
154
|
exc_info=True,
|
|
155
155
|
)
|
|
156
156
|
# Reload task to see current state after potential execution failure
|
|
157
157
|
try:
|
|
158
|
-
|
|
158
|
+
retry_attempt.refresh_from_db()
|
|
159
159
|
# If task is still PENDING after error, revert to FAILED
|
|
160
160
|
# (execution may have failed before celery task could mark it)
|
|
161
|
-
if
|
|
162
|
-
abd_dal.revert_to_failed(
|
|
161
|
+
if retry_attempt.status == TaskAttempt.Status.PENDING:
|
|
162
|
+
abd_dal.revert_to_failed(retry_attempt)
|
|
163
163
|
except TaskAttempt.DoesNotExist:
|
|
164
164
|
# Task was deleted during recovery, nothing to revert
|
|
165
165
|
pass
|
|
@@ -61,18 +61,20 @@ class TaskScheduler:
|
|
|
61
61
|
logger.info("TaskScheduler stopped.")
|
|
62
62
|
|
|
63
63
|
def initialize_last_block(self) -> None:
|
|
64
|
-
|
|
64
|
+
# Safe getattr in case setting is not defined
|
|
65
|
+
start_from_block_setting = getattr(settings, "BLOCK_DUMPER_START_FROM_BLOCK", None)
|
|
65
66
|
|
|
66
67
|
if start_from_block_setting is not None:
|
|
67
68
|
if start_from_block_setting == "current":
|
|
68
69
|
self.last_processed_block = self.subtensor.get_current_block()
|
|
69
|
-
logger.info(
|
|
70
|
+
logger.info("Starting from current blockchain block", block_number=self.last_processed_block)
|
|
70
71
|
|
|
71
72
|
elif isinstance(start_from_block_setting, int):
|
|
72
73
|
self.last_processed_block = start_from_block_setting
|
|
73
|
-
logger.info(
|
|
74
|
+
logger.info("Starting from configured block", block_number=self.last_processed_block)
|
|
74
75
|
else:
|
|
75
|
-
|
|
76
|
+
error_msg = f"Invalid BLOCK_DUMPER_START_FROM_BLOCK value: {start_from_block_setting}"
|
|
77
|
+
raise ValueError(error_msg)
|
|
76
78
|
else:
|
|
77
79
|
# Default behavior - resume from database
|
|
78
80
|
last_block_number = abd_dal.get_the_latest_executed_block_number()
|
{abstract_block_dumper-0.0.2 → abstract_block_dumper-0.0.4}/src/abstract_block_dumper/_version.py
RENAMED
|
@@ -28,7 +28,7 @@ version_tuple: VERSION_TUPLE
|
|
|
28
28
|
commit_id: COMMIT_ID
|
|
29
29
|
__commit_id__: COMMIT_ID
|
|
30
30
|
|
|
31
|
-
__version__ = version = '0.0.
|
|
32
|
-
__version_tuple__ = version_tuple = (0, 0,
|
|
31
|
+
__version__ = version = '0.0.4'
|
|
32
|
+
__version_tuple__ = version_tuple = (0, 0, 4)
|
|
33
33
|
|
|
34
34
|
__commit_id__ = commit_id = None
|
|
@@ -9,6 +9,9 @@ class Command(BaseCommand):
|
|
|
9
9
|
help = "Run the block scheduler daemon."
|
|
10
10
|
|
|
11
11
|
def handle(self, *args, **options) -> None:
|
|
12
|
+
"""
|
|
13
|
+
Handle the management command to start the block scheduler.
|
|
14
|
+
"""
|
|
12
15
|
self.stdout.write("Syncing decorated functions...")
|
|
13
16
|
ensure_modules_loaded()
|
|
14
17
|
functions_counter = len(task_registry.get_functions())
|
|
@@ -0,0 +1,53 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Celery integration helpers for abstract-block-dumper.
|
|
3
|
+
|
|
4
|
+
This module provides utilities to integrate @block_task decorated functions
|
|
5
|
+
with Celery workers.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from abstract_block_dumper._internal.discovery import ensure_modules_loaded
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def setup_celery_tasks() -> None:
|
|
12
|
+
"""
|
|
13
|
+
Discover and register all @block_task decorated functions for Celery.
|
|
14
|
+
|
|
15
|
+
This function MUST be called when Celery workers start to ensure that
|
|
16
|
+
all @block_task decorated functions are registered and available to
|
|
17
|
+
receive tasks from the message broker.
|
|
18
|
+
|
|
19
|
+
Usage in your project's celery.py:
|
|
20
|
+
|
|
21
|
+
from celery import Celery
|
|
22
|
+
from celery.signals import worker_ready
|
|
23
|
+
|
|
24
|
+
app = Celery('your_project')
|
|
25
|
+
app.config_from_object('django.conf:settings', namespace='CELERY')
|
|
26
|
+
app.autodiscover_tasks()
|
|
27
|
+
|
|
28
|
+
@worker_ready.connect
|
|
29
|
+
def on_worker_ready(**kwargs):
|
|
30
|
+
'''Load block tasks when worker is ready.'''
|
|
31
|
+
from abstract_block_dumper.v1.celery import setup_celery_tasks
|
|
32
|
+
setup_celery_tasks()
|
|
33
|
+
|
|
34
|
+
Why is this needed?
|
|
35
|
+
-------------------
|
|
36
|
+
The @block_task decorator uses Celery's @shared_task, which requires
|
|
37
|
+
the decorated functions to be imported before workers can receive
|
|
38
|
+
messages for those tasks. Without calling this function, you'll see
|
|
39
|
+
errors like:
|
|
40
|
+
|
|
41
|
+
"Received unregistered task of type 'your_app.block_tasks.task_name'"
|
|
42
|
+
|
|
43
|
+
What does it do?
|
|
44
|
+
----------------
|
|
45
|
+
- Automatically imports all 'tasks.py' and 'block_tasks.py' modules
|
|
46
|
+
from your INSTALLED_APPS
|
|
47
|
+
- Triggers @block_task decorator registration
|
|
48
|
+
- Makes tasks available to Celery workers
|
|
49
|
+
"""
|
|
50
|
+
ensure_modules_loaded()
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
__all__ = ["setup_celery_tasks"]
|
|
@@ -62,7 +62,9 @@ def schedule_retry(task_attempt: TaskAttempt) -> None:
|
|
|
62
62
|
)
|
|
63
63
|
|
|
64
64
|
|
|
65
|
-
def _celery_task_wrapper(
|
|
65
|
+
def _celery_task_wrapper(
|
|
66
|
+
func: Callable[..., Any], block_number: int, **kwargs: dict[str, Any]
|
|
67
|
+
) -> dict[str, Any] | None:
|
|
66
68
|
executable_path = abd_utils.get_executable_path(func)
|
|
67
69
|
|
|
68
70
|
with transaction.atomic():
|
|
@@ -72,21 +74,15 @@ def _celery_task_wrapper(func, block_number: int, **kwargs) -> dict[str, Any] |
|
|
|
72
74
|
executable_path=executable_path,
|
|
73
75
|
args_json=abd_utils.serialize_args(kwargs),
|
|
74
76
|
)
|
|
75
|
-
except TaskAttempt.DoesNotExist:
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
)
|
|
81
|
-
raise CeleryTaskLockedError("TaskAttempt not found - task may have been canceled directly")
|
|
77
|
+
except TaskAttempt.DoesNotExist as exc:
|
|
78
|
+
msg = "TaskAttempt not found - task may have been canceled directly"
|
|
79
|
+
logger.warning(msg, block_number=block_number, executable_path=executable_path)
|
|
80
|
+
raise CeleryTaskLockedError(msg) from exc
|
|
81
|
+
|
|
82
82
|
except OperationalError as e:
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
executable_path=executable_path,
|
|
87
|
-
operational_error=str(e),
|
|
88
|
-
)
|
|
89
|
-
raise CeleryTaskLockedError("Task already being processed by another worker")
|
|
83
|
+
msg = "Task already being processed by another worker"
|
|
84
|
+
logger.info(msg, block_number=block_number, executable_path=executable_path, operational_error=str(e))
|
|
85
|
+
raise CeleryTaskLockedError(msg) from e
|
|
90
86
|
|
|
91
87
|
if task_attempt.status != TaskAttempt.Status.PENDING:
|
|
92
88
|
logger.info(
|
|
@@ -117,11 +113,11 @@ def _celery_task_wrapper(func, block_number: int, **kwargs) -> dict[str, Any] |
|
|
|
117
113
|
logger.info("Task completed successfully", task_id=task_attempt.id)
|
|
118
114
|
return {"result": result}
|
|
119
115
|
except Exception as e:
|
|
120
|
-
logger.
|
|
116
|
+
logger.exception(
|
|
121
117
|
"Task execution failed",
|
|
122
118
|
task_id=task_attempt.id,
|
|
123
119
|
error_type=type(e).__name__,
|
|
124
|
-
|
|
120
|
+
error_message=str(e),
|
|
125
121
|
)
|
|
126
122
|
abd_dal.task_mark_as_failed(task_attempt)
|
|
127
123
|
|
|
@@ -130,10 +126,9 @@ def _celery_task_wrapper(func, block_number: int, **kwargs) -> dict[str, Any] |
|
|
|
130
126
|
try:
|
|
131
127
|
schedule_retry(task_attempt)
|
|
132
128
|
except Exception:
|
|
133
|
-
logger.
|
|
129
|
+
logger.exception(
|
|
134
130
|
"Failed to schedule retry",
|
|
135
131
|
task_id=task_attempt.id,
|
|
136
|
-
exc_info=True,
|
|
137
132
|
)
|
|
138
133
|
return None
|
|
139
134
|
|
|
@@ -173,10 +168,11 @@ def block_task(
|
|
|
173
168
|
|
|
174
169
|
def decorator(func: Callable[..., Any]) -> Any:
|
|
175
170
|
if not callable(condition):
|
|
176
|
-
|
|
171
|
+
msg = "condition must be a callable."
|
|
172
|
+
raise TypeError(msg)
|
|
177
173
|
|
|
178
174
|
# Celery task wrapper
|
|
179
|
-
def shared_celery_task(block_number: int, **kwargs) -> None | Any:
|
|
175
|
+
def shared_celery_task(block_number: int, **kwargs: dict[str, Any]) -> None | Any:
|
|
180
176
|
"""
|
|
181
177
|
Wrapper that handles TaskAttempt tracking and executed the original
|
|
182
178
|
function
|
|
@@ -193,13 +189,13 @@ def block_task(
|
|
|
193
189
|
)(shared_celery_task)
|
|
194
190
|
|
|
195
191
|
# Store original function referefence for introspection
|
|
196
|
-
celery_task._original_func = func
|
|
192
|
+
celery_task._original_func = func # noqa: SLF001
|
|
197
193
|
|
|
198
194
|
# Register the Celery task
|
|
199
195
|
task_registry.register_item(
|
|
200
196
|
RegistryItem(
|
|
201
197
|
condition=condition,
|
|
202
|
-
function=cast(Task, celery_task),
|
|
198
|
+
function=cast("Task", celery_task),
|
|
203
199
|
args=args,
|
|
204
200
|
backfilling_lookback=backfilling_lookback,
|
|
205
201
|
celery_kwargs=celery_kwargs or {},
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
"""Tests for Celery integration helpers."""
|
|
2
|
+
|
|
3
|
+
from abstract_block_dumper.v1.celery import setup_celery_tasks
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
def test_setup_celery_tasks_can_be_called():
|
|
7
|
+
"""Test that setup_celery_tasks can be called without errors."""
|
|
8
|
+
# This should not raise any exceptions
|
|
9
|
+
setup_celery_tasks()
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
def test_setup_celery_tasks_is_idempotent():
|
|
13
|
+
"""Test that calling setup_celery_tasks multiple times is safe."""
|
|
14
|
+
# Should be safe to call multiple times (e.g., if worker restarts)
|
|
15
|
+
setup_celery_tasks()
|
|
16
|
+
setup_celery_tasks()
|
|
17
|
+
setup_celery_tasks()
|
|
@@ -1,12 +0,0 @@
|
|
|
1
|
-
import os
|
|
2
|
-
|
|
3
|
-
from celery import Celery
|
|
4
|
-
from django.conf import settings
|
|
5
|
-
|
|
6
|
-
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "example_project.settings")
|
|
7
|
-
|
|
8
|
-
app = Celery("example_project")
|
|
9
|
-
|
|
10
|
-
app.config_from_object(settings, namespace="CELERY")
|
|
11
|
-
|
|
12
|
-
app.autodiscover_tasks(lambda: settings.INSTALLED_APPS)
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{abstract_block_dumper-0.0.2 → abstract_block_dumper-0.0.4}/example_project/block_explorer/admin.py
RENAMED
|
File without changes
|
{abstract_block_dumper-0.0.2 → abstract_block_dumper-0.0.4}/example_project/block_explorer/apps.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{abstract_block_dumper-0.0.2 → abstract_block_dumper-0.0.4}/example_project/block_explorer/models.py
RENAMED
|
File without changes
|
{abstract_block_dumper-0.0.2 → abstract_block_dumper-0.0.4}/example_project/block_explorer/tasks.py
RENAMED
|
File without changes
|
{abstract_block_dumper-0.0.2 → abstract_block_dumper-0.0.4}/example_project/block_explorer/tests.py
RENAMED
|
File without changes
|
{abstract_block_dumper-0.0.2 → abstract_block_dumper-0.0.4}/example_project/block_explorer/views.py
RENAMED
|
File without changes
|
{abstract_block_dumper-0.0.2 → abstract_block_dumper-0.0.4}/example_project/docker-compose.yml
RENAMED
|
File without changes
|
|
File without changes
|
{abstract_block_dumper-0.0.2 → abstract_block_dumper-0.0.4}/example_project/example_project/asgi.py
RENAMED
|
File without changes
|
|
File without changes
|
{abstract_block_dumper-0.0.2 → abstract_block_dumper-0.0.4}/example_project/example_project/urls.py
RENAMED
|
File without changes
|
{abstract_block_dumper-0.0.2 → abstract_block_dumper-0.0.4}/example_project/example_project/wsgi.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{abstract_block_dumper-0.0.2 → abstract_block_dumper-0.0.4}/src/abstract_block_dumper/__init__.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{abstract_block_dumper-0.0.2 → abstract_block_dumper-0.0.4}/src/abstract_block_dumper/admin.py
RENAMED
|
File without changes
|
{abstract_block_dumper-0.0.2 → abstract_block_dumper-0.0.4}/src/abstract_block_dumper/apps.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{abstract_block_dumper-0.0.2 → abstract_block_dumper-0.0.4}/src/abstract_block_dumper/models.py
RENAMED
|
File without changes
|
{abstract_block_dumper-0.0.2 → abstract_block_dumper-0.0.4}/src/abstract_block_dumper/py.typed
RENAMED
|
File without changes
|
{abstract_block_dumper-0.0.2 → abstract_block_dumper-0.0.4}/src/abstract_block_dumper/v1/__init__.py
RENAMED
|
File without changes
|
{abstract_block_dumper-0.0.2 → abstract_block_dumper-0.0.4}/src/abstract_block_dumper/v1/tasks.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{abstract_block_dumper-0.0.2 → abstract_block_dumper-0.0.4}/tests/integration/test_scheduler.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|