abstract-block-dumper 0.0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- abstract_block_dumper/__init__.py +0 -0
- abstract_block_dumper/_version.py +34 -0
- abstract_block_dumper/admin.py +73 -0
- abstract_block_dumper/apps.py +7 -0
- abstract_block_dumper/dal/__init__.py +0 -0
- abstract_block_dumper/dal/django_dal.py +150 -0
- abstract_block_dumper/dal/memory_registry.py +105 -0
- abstract_block_dumper/decorators.py +211 -0
- abstract_block_dumper/discovery.py +24 -0
- abstract_block_dumper/exceptions.py +16 -0
- abstract_block_dumper/management/__init__.py +0 -0
- abstract_block_dumper/management/commands/block_tasks.py +19 -0
- abstract_block_dumper/migrations/0001_initial.py +54 -0
- abstract_block_dumper/migrations/__init__.py +0 -0
- abstract_block_dumper/models.py +59 -0
- abstract_block_dumper/py.typed +0 -0
- abstract_block_dumper/services/__init__.py +0 -0
- abstract_block_dumper/services/block_processor.py +190 -0
- abstract_block_dumper/services/executor.py +54 -0
- abstract_block_dumper/services/scheduler.py +92 -0
- abstract_block_dumper/services/utils.py +51 -0
- abstract_block_dumper/tasks.py +75 -0
- abstract_block_dumper-0.0.1.dist-info/METADATA +311 -0
- abstract_block_dumper-0.0.1.dist-info/RECORD +25 -0
- abstract_block_dumper-0.0.1.dist-info/WHEEL +4 -0
|
@@ -0,0 +1,311 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: abstract-block-dumper
|
|
3
|
+
Version: 0.0.1
|
|
4
|
+
Project-URL: Source, https://github.com/bactensor/abstract-block-dumper
|
|
5
|
+
Project-URL: Issue Tracker, https://github.com/bactensor/abstract-block-dumper/issues
|
|
6
|
+
Author-email: Reef Technologies <opensource@reef.pl>
|
|
7
|
+
License: MIT
|
|
8
|
+
Classifier: Framework :: Django
|
|
9
|
+
Classifier: Framework :: Django :: 4.2
|
|
10
|
+
Classifier: Intended Audience :: Developers
|
|
11
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
12
|
+
Classifier: Programming Language :: Python :: 3
|
|
13
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
14
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
15
|
+
Classifier: Topic :: Software Development :: Libraries
|
|
16
|
+
Requires-Python: >=3.11
|
|
17
|
+
Requires-Dist: bittensor>=9.10.1
|
|
18
|
+
Requires-Dist: celery>=5.5.3
|
|
19
|
+
Requires-Dist: django<6.0,>=3.2
|
|
20
|
+
Requires-Dist: structlog>=25.4.0
|
|
21
|
+
Description-Content-Type: text/markdown
|
|
22
|
+
|
|
23
|
+
# Abstract Block Dumper
|
|
24
|
+
[](https://github.com/bactensor/abstract-block-dumper/actions?query=workflow%3A%22Continuous+Integration%22) [](https://pypi.python.org/pypi/abstract_block_dumper) [](https://pypi.python.org/pypi/abstract_block_dumper) [](https://pypi.python.org/pypi/abstract_block_dumper)
|
|
25
|
+
|
|
26
|
+
This package provides a simplified framework for creating block processing tasks in Django applications.
|
|
27
|
+
Define tasks with lambda conditions using the @block_task decorator and run them asynchronously with Celery.
|
|
28
|
+
|
|
29
|
+
## Implementation Details
|
|
30
|
+
|
|
31
|
+
### General Workflow:
|
|
32
|
+
Register functions -> detect new blocks -> evaluate conditions -> send to Celery -> execute -> track results -> handle retries.
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
### WorkflowSteps
|
|
36
|
+
1. Register
|
|
37
|
+
- Functions are automatically discovered when the scheduler starts
|
|
38
|
+
- Functions must be located in installed apps in tasks.py or block_tasks.py
|
|
39
|
+
- Functions marked with @block_task decorators are stored in memory registry
|
|
40
|
+
|
|
41
|
+
2. Detect Blocks
|
|
42
|
+
- Scheduler is running by management command block_tasks
|
|
43
|
+
- Scheduler polls blockchain, finds new blocks, and batches them
|
|
44
|
+
|
|
45
|
+
3. Plan Tasks
|
|
46
|
+
- For each block, lambda conditions are evaluated against registered functions
|
|
47
|
+
- Tasks are created for matching conditions (with optional multiple argument sets)
|
|
48
|
+
|
|
49
|
+
4. Queue
|
|
50
|
+
Tasks are sent to Celery with queue and timeout settings from celery_kwargs
|
|
51
|
+
|
|
52
|
+
5. Execute
|
|
53
|
+
Celery runs the function with block info, capturing results and errors
|
|
54
|
+
|
|
55
|
+
6. Track
|
|
56
|
+
Task attempts are stored in TaskAttempt model with retry logic and state tracking
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
## Prerequisites
|
|
60
|
+
- Django
|
|
61
|
+
- Celery
|
|
62
|
+
- Redis (for Celery broker and result backend)
|
|
63
|
+
- PostgreSQL (recommended for production)
|
|
64
|
+
|
|
65
|
+
## Installation
|
|
66
|
+
|
|
67
|
+
1. Install the package:
|
|
68
|
+
```bash
|
|
69
|
+
pip install abstract_block_dumper
|
|
70
|
+
```
|
|
71
|
+
|
|
72
|
+
2. Add to your Django `INSTALLED_APPS`:
|
|
73
|
+
```python
|
|
74
|
+
INSTALLED_APPS = [
|
|
75
|
+
# ... other apps
|
|
76
|
+
'abstract_block_dumper',
|
|
77
|
+
]
|
|
78
|
+
```
|
|
79
|
+
|
|
80
|
+
3. Run migrations:
|
|
81
|
+
```bash
|
|
82
|
+
python manage.py migrate
|
|
83
|
+
```
|
|
84
|
+
|
|
85
|
+
## Usage
|
|
86
|
+
|
|
87
|
+
### 1. Define Block Processing Tasks
|
|
88
|
+
Create block processing tasks in `tasks.py` or `block_tasks.py` file inside any of your installed Django apps.
|
|
89
|
+
|
|
90
|
+
### 2. Use Decorators to Register Tasks
|
|
91
|
+
- Use `@block_task` with lambda conditions to create custom block processing tasks
|
|
92
|
+
|
|
93
|
+
### 3. Start the Block Scheduler
|
|
94
|
+
Run the scheduler to start processing blocks:
|
|
95
|
+
```bash
|
|
96
|
+
$ python manage.py block_tasks
|
|
97
|
+
```
|
|
98
|
+
|
|
99
|
+
This command will:
|
|
100
|
+
- Automatically discover and register all decorated functions
|
|
101
|
+
- Start polling the blockchain for new blocks
|
|
102
|
+
- Schedule tasks based on your lambda conditions
|
|
103
|
+
|
|
104
|
+
### 4. Start Celery Workers
|
|
105
|
+
In separate terminals, start Celery workers to execute tasks:
|
|
106
|
+
```bash
|
|
107
|
+
$ celery -A your_project worker --loglevel=info
|
|
108
|
+
```
|
|
109
|
+
|
|
110
|
+
See examples below:
|
|
111
|
+
|
|
112
|
+
Use the `@block_task` decorator with lambda conditions to create block processing tasks:
|
|
113
|
+
|
|
114
|
+
```python
|
|
115
|
+
from abstract_block_dumper.decorators import block_task
|
|
116
|
+
|
|
117
|
+
|
|
118
|
+
# Process every block
|
|
119
|
+
@block_task(condition=lambda bn: True)
|
|
120
|
+
def process_every_block(block_number: int):
|
|
121
|
+
print(f"Processing every block: {block_number}")
|
|
122
|
+
|
|
123
|
+
# Process every 10 blocks
|
|
124
|
+
@block_task(condition=lambda bn: bn % 10 == 0)
|
|
125
|
+
def process_every_10_blocks(block_number: int):
|
|
126
|
+
print(f"Processing every 10 blocks: {block_number}")
|
|
127
|
+
|
|
128
|
+
# Process with multiple netuids
|
|
129
|
+
@block_task(
|
|
130
|
+
condition=lambda bn, netuid: bn % 100 == 0,
|
|
131
|
+
args=[{"netuid": 1}, {"netuid": 3}, {"netuid": 22}],
|
|
132
|
+
backfilling_lookback=300,
|
|
133
|
+
celery_kwargs={"queue": "high-priority"}
|
|
134
|
+
)
|
|
135
|
+
def process_multi_netuid_task(block_number: int, netuid: int):
|
|
136
|
+
print(f"Processing block {block_number} for netuid: {netuid}")
|
|
137
|
+
```
|
|
138
|
+
|
|
139
|
+
|
|
140
|
+
## Maintenance Tasks
|
|
141
|
+
|
|
142
|
+
### Cleanup Old Task Attempts
|
|
143
|
+
|
|
144
|
+
The framework provides a maintenance task to clean up old task records and maintain database performance:
|
|
145
|
+
|
|
146
|
+
```python
|
|
147
|
+
from abstract_block_dumper.tasks import cleanup_old_tasks
|
|
148
|
+
|
|
149
|
+
# Delete tasks older than 7 days (default)
|
|
150
|
+
cleanup_old_tasks.delay()
|
|
151
|
+
|
|
152
|
+
# Delete tasks older than 30 days
|
|
153
|
+
cleanup_old_tasks.delay(days=30)
|
|
154
|
+
```
|
|
155
|
+
|
|
156
|
+
This task deletes all succeeded or unrecoverable failed tasks older than the specified number of days. It never deletes tasks with PENDING or RUNNING status to ensure ongoing work is preserved.
|
|
157
|
+
|
|
158
|
+
#### Running the Cleanup Task
|
|
159
|
+
|
|
160
|
+
**Option 1: Manual Execution**
|
|
161
|
+
```bash
|
|
162
|
+
# Using Django shell
|
|
163
|
+
python manage.py shell -c "from abstract_block_dumper.tasks import cleanup_old_tasks; cleanup_old_tasks.delay()"
|
|
164
|
+
```
|
|
165
|
+
|
|
166
|
+
**Option 2: Cron Job (Recommended - once per day)**
|
|
167
|
+
```bash
|
|
168
|
+
# Add to crontab (daily at 2 AM)
|
|
169
|
+
0 2 * * * cd /path/to/your/project && python manage.py shell -c "from abstract_block_dumper.tasks import cleanup_old_tasks; cleanup_old_tasks.delay()"
|
|
170
|
+
```
|
|
171
|
+
|
|
172
|
+
**Option 3: Celery Beat (Automated Scheduling)**
|
|
173
|
+
|
|
174
|
+
Add this to your Django `settings.py`:
|
|
175
|
+
|
|
176
|
+
```python
|
|
177
|
+
from celery.schedules import crontab
|
|
178
|
+
|
|
179
|
+
CELERY_BEAT_SCHEDULE = {
|
|
180
|
+
'cleanup-old-tasks': {
|
|
181
|
+
'task': 'abstract_block_dumper.cleanup_old_tasks',
|
|
182
|
+
'schedule': crontab(hour=2, minute=0), # Daily at 2 AM
|
|
183
|
+
'kwargs': {'days': 7}, # Customize retention period
|
|
184
|
+
},
|
|
185
|
+
}
|
|
186
|
+
```
|
|
187
|
+
|
|
188
|
+
Then start the Celery beat scheduler:
|
|
189
|
+
```bash
|
|
190
|
+
celery -A your_project beat --loglevel=info
|
|
191
|
+
```
|
|
192
|
+
|
|
193
|
+
## Configuration
|
|
194
|
+
|
|
195
|
+
### Required Django Settings
|
|
196
|
+
|
|
197
|
+
Add these settings to your Django `settings.py`:
|
|
198
|
+
|
|
199
|
+
```python
|
|
200
|
+
# Celery Configuration
|
|
201
|
+
CELERY_BROKER_URL = 'redis://localhost:6379/0'
|
|
202
|
+
CELERY_RESULT_BACKEND = 'redis://localhost:6379/0'
|
|
203
|
+
|
|
204
|
+
# Abstract Block Dumper specific settings
|
|
205
|
+
BITTENSOR_NETWORK = 'finney' # Options: 'finney', 'local', 'testnet', 'mainnet'
|
|
206
|
+
BLOCK_DUMPER_START_FROM_BLOCK = 'current' # Options: None, 'current', or int
|
|
207
|
+
BLOCK_DUMPER_POLL_INTERVAL = 1 # seconds between polling for new blocks
|
|
208
|
+
BLOCK_TASK_RETRY_BACKOFF = 2 # minutes for retry backoff base
|
|
209
|
+
BLOCK_DUMPER_MAX_ATTEMPTS = 3 # maximum retry attempts
|
|
210
|
+
BLOCK_TASK_MAX_RETRY_DELAY_MINUTES = 1440 # maximum retry delay (24 hours)
|
|
211
|
+
```
|
|
212
|
+
|
|
213
|
+
### Configuration Options Reference
|
|
214
|
+
|
|
215
|
+
#### Core Settings
|
|
216
|
+
|
|
217
|
+
**BITTENSOR_NETWORK** (str, default: `'finney'`) Specifies which [Bittensor network](https://docs.learnbittensor.org/concepts/bittensor-networks) to connect to
|
|
218
|
+
|
|
219
|
+
**BLOCK_DUMPER_START_FROM_BLOCK** (str|int|None, default: `None`)
|
|
220
|
+
- **Purpose**: Determines the starting block for processing when the scheduler first runs
|
|
221
|
+
- **Valid Values**:
|
|
222
|
+
- `None`: Resume from the last processed block stored in database
|
|
223
|
+
- `'current'`: Start from the current blockchain block (skips historical blocks)
|
|
224
|
+
- `int`: Start from a specific block number (e.g., `1000000`)
|
|
225
|
+
- **Example**: `BLOCK_DUMPER_START_FROM_BLOCK = 'current'`
|
|
226
|
+
- **Performance Impact**: Starting from historical blocks may require significant processing time
|
|
227
|
+
|
|
228
|
+
#### Scheduler Settings
|
|
229
|
+
|
|
230
|
+
**BLOCK_DUMPER_POLL_INTERVAL** (int, default: `1`)
|
|
231
|
+
- **Purpose**: Seconds to wait between checking for new blocks
|
|
232
|
+
- **Valid Range**: `1` to `3600` (1 second to 1 hour)
|
|
233
|
+
- **Example**: `BLOCK_DUMPER_POLL_INTERVAL = 5`
|
|
234
|
+
- **Performance Impact**:
|
|
235
|
+
- Lower values (1-2s): Near real-time processing, higher CPU/network usage
|
|
236
|
+
- Higher values (10-60s): Reduced load but delayed processing
|
|
237
|
+
- Very low values (<1s) may cause rate limiting
|
|
238
|
+
|
|
239
|
+
#### Retry and Error Handling Settings
|
|
240
|
+
|
|
241
|
+
**BLOCK_DUMPER_MAX_ATTEMPTS** (int, default: `3`)
|
|
242
|
+
- **Purpose**: Maximum number of attempts to retry a failed task before giving up
|
|
243
|
+
- **Valid Range**: `1` to `10`
|
|
244
|
+
- **Example**: `BLOCK_DUMPER_MAX_ATTEMPTS = 5`
|
|
245
|
+
- **Performance Impact**: Higher values increase resilience but may delay failure detection
|
|
246
|
+
|
|
247
|
+
**BLOCK_TASK_RETRY_BACKOFF** (int, default: `1`)
|
|
248
|
+
- **Purpose**: Base number of minutes for exponential backoff retry delays
|
|
249
|
+
- **Valid Range**: `1` to `60`
|
|
250
|
+
- **Example**: `BLOCK_TASK_RETRY_BACKOFF = 2`
|
|
251
|
+
- **Calculation**: Actual delay = `backoff ** attempt_count` minutes
|
|
252
|
+
- Attempt 1: 2¹ = 2 minutes
|
|
253
|
+
- Attempt 2: 2² = 4 minutes
|
|
254
|
+
- Attempt 3: 2³ = 8 minutes
|
|
255
|
+
- **Performance Impact**: Lower values retry faster but may overwhelm failing services
|
|
256
|
+
|
|
257
|
+
**BLOCK_TASK_MAX_RETRY_DELAY_MINUTES** (int, default: `1440`)
|
|
258
|
+
- **Purpose**: Maximum delay (in minutes) between retry attempts, caps exponential backoff
|
|
259
|
+
- **Valid Range**: `1` to `10080` (1 minute to 1 week)
|
|
260
|
+
- **Example**: `BLOCK_TASK_MAX_RETRY_DELAY_MINUTES = 720` # 12 hours max
|
|
261
|
+
- **Performance Impact**: Prevents extremely long delays while maintaining backoff benefits
|
|
262
|
+
|
|
263
|
+
|
|
264
|
+
## Example Project
|
|
265
|
+
|
|
266
|
+
The repository includes a complete working example in the `example_project/` directory that demonstrates:
|
|
267
|
+
|
|
268
|
+
- Django application setup with abstract-block-dumper
|
|
269
|
+
- Multiple task types (`@every_block`, `@every_n_blocks` with different configurations)
|
|
270
|
+
- Error handling with a randomly failing task
|
|
271
|
+
- Docker Compose setup with all required services
|
|
272
|
+
- Monitoring with Flower (Celery monitoring tool)
|
|
273
|
+
|
|
274
|
+
### Running the Example
|
|
275
|
+
|
|
276
|
+
```bash
|
|
277
|
+
cd example_project
|
|
278
|
+
docker-compose up --build
|
|
279
|
+
```
|
|
280
|
+
|
|
281
|
+
This starts:
|
|
282
|
+
- **Django application** (http://localhost:8000) - Admin interface (user: `admin`, password: `admin`)
|
|
283
|
+
- **Celery workers** - Execute block processing tasks
|
|
284
|
+
- **Block scheduler** - Monitors blockchain and schedules tasks
|
|
285
|
+
- **Flower monitoring** (http://localhost:5555) - Monitor Celery tasks
|
|
286
|
+
- **Redis & PostgreSQL** - Required services
|
|
287
|
+
|
|
288
|
+
|
|
289
|
+
## Development
|
|
290
|
+
|
|
291
|
+
|
|
292
|
+
Pre-requisites:
|
|
293
|
+
- [uv](https://docs.astral.sh/uv/)
|
|
294
|
+
- [nox](https://nox.thea.codes/en/stable/)
|
|
295
|
+
- [docker](https://www.docker.com/) and [docker compose plugin](https://docs.docker.com/compose/)
|
|
296
|
+
|
|
297
|
+
|
|
298
|
+
Ideally, you should run `nox -t format lint` before every commit to ensure that the code is properly formatted and linted.
|
|
299
|
+
Before submitting a PR, make sure that tests pass as well, you can do so using:
|
|
300
|
+
```
|
|
301
|
+
nox -t check # equivalent to `nox -t format lint test`
|
|
302
|
+
```
|
|
303
|
+
|
|
304
|
+
If you wish to install dependencies into `.venv` so your IDE can pick them up, you can do so using:
|
|
305
|
+
```
|
|
306
|
+
uv sync --all-extras --dev
|
|
307
|
+
```
|
|
308
|
+
|
|
309
|
+
### Release process
|
|
310
|
+
|
|
311
|
+
Run `nox -s make_release -- X.Y.Z` where `X.Y.Z` is the version you're releasing and follow the printed instructions.
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
abstract_block_dumper/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
2
|
+
abstract_block_dumper/_version.py,sha256=qf6R-J7-UyuABBo8c0HgaquJ8bejVbf07HodXgwAwgQ,704
|
|
3
|
+
abstract_block_dumper/admin.py,sha256=3J3I_QOKFgfMNpTXW-rTQGO_q5Ls6uNuL0FkPVdIsYg,1654
|
|
4
|
+
abstract_block_dumper/apps.py,sha256=DXATdrjsL3T2IletTbKeD6unr8ScLaxg7wz0nAHTAns,215
|
|
5
|
+
abstract_block_dumper/decorators.py,sha256=lV1ueIlEbBNojnXVH5GQiRCbck3-SQgtWOil5OqeTHo,7061
|
|
6
|
+
abstract_block_dumper/discovery.py,sha256=kZlb8y-0ltJE-L-1GLxZ_xlziibY8AjggvHJ9sxsScw,728
|
|
7
|
+
abstract_block_dumper/exceptions.py,sha256=EunFH-H5eXNNkKl2CvHlhZ2wvtdry969Gle-CZc7YM0,315
|
|
8
|
+
abstract_block_dumper/models.py,sha256=l229tar4FdQ52eETLKGeskgkXHWa4ealF6DWbG8M4Mc,2012
|
|
9
|
+
abstract_block_dumper/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
10
|
+
abstract_block_dumper/tasks.py,sha256=8ppGWxML3krVdrS_08WnKuCpERRhB_6DIyVEkpYZMrw,2638
|
|
11
|
+
abstract_block_dumper/dal/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
12
|
+
abstract_block_dumper/dal/django_dal.py,sha256=unAA4Mt5dBBaUhvyezfyC0VtWMD6Ru79NyjKaOMNNSw,5359
|
|
13
|
+
abstract_block_dumper/dal/memory_registry.py,sha256=rgU2CYGm2MHPgSZefgr-kuLxOtPu5wxINa3Y5ELgMUo,3029
|
|
14
|
+
abstract_block_dumper/management/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
15
|
+
abstract_block_dumper/management/commands/block_tasks.py,sha256=dEfFnoZCIIDsrNL5vRPtIDrkpcJk36yev_aoGAScgoQ,758
|
|
16
|
+
abstract_block_dumper/migrations/0001_initial.py,sha256=ImPHC3G6kPkq4Xn_4YVAm4Labh1Xi7PkCRszYRGpTiI,2298
|
|
17
|
+
abstract_block_dumper/migrations/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
18
|
+
abstract_block_dumper/services/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
19
|
+
abstract_block_dumper/services/block_processor.py,sha256=4guYwtVYh-N1UewiqVN7xG5pM56adyGe8vPG_kCDmQI,8072
|
|
20
|
+
abstract_block_dumper/services/executor.py,sha256=TDbrtVGiz7GNGJwHYB6ZqqhrrTDDL7JGzxOehpF-QTY,1786
|
|
21
|
+
abstract_block_dumper/services/scheduler.py,sha256=zKY24zSwjcQSVk3wt39GBurSNXkfylWsdV7Mgmv1RO8,3443
|
|
22
|
+
abstract_block_dumper/services/utils.py,sha256=Iqa-9xhNxOCnvSWjGBclOUvmO4qsUhhievUllVh82I4,1286
|
|
23
|
+
abstract_block_dumper-0.0.1.dist-info/METADATA,sha256=g26Qm3r1ZwH5OCCVCndRdjwnn-VY4YdIxwQyGKuVHXA,11022
|
|
24
|
+
abstract_block_dumper-0.0.1.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
|
25
|
+
abstract_block_dumper-0.0.1.dist-info/RECORD,,
|