abstract-block-dumper 0.1.0__tar.gz → 0.1.2__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {abstract_block_dumper-0.1.0 → abstract_block_dumper-0.1.2}/CHANGELOG.md +10 -0
- {abstract_block_dumper-0.1.0 → abstract_block_dumper-0.1.2}/PKG-INFO +1 -1
- {abstract_block_dumper-0.1.0 → abstract_block_dumper-0.1.2}/src/abstract_block_dumper/_internal/dal/django_dal.py +15 -13
- abstract_block_dumper-0.1.2/src/abstract_block_dumper/_internal/providers/bittensor_client.py +119 -0
- {abstract_block_dumper-0.1.0 → abstract_block_dumper-0.1.2}/src/abstract_block_dumper/_internal/services/backfill_scheduler.py +9 -14
- {abstract_block_dumper-0.1.0 → abstract_block_dumper-0.1.2}/src/abstract_block_dumper/_internal/services/block_processor.py +24 -3
- abstract_block_dumper-0.1.2/src/abstract_block_dumper/_internal/services/scheduler.py +117 -0
- {abstract_block_dumper-0.1.0 → abstract_block_dumper-0.1.2}/src/abstract_block_dumper/_version.py +2 -2
- abstract_block_dumper-0.1.2/tests/conftest.py +115 -0
- {abstract_block_dumper-0.1.0 → abstract_block_dumper-0.1.2}/tests/integration/test_scheduler.py +34 -0
- abstract_block_dumper-0.1.2/tests/unit/test_decorator.py +0 -0
- abstract_block_dumper-0.1.0/src/abstract_block_dumper/_internal/services/scheduler.py +0 -178
- abstract_block_dumper-0.1.0/tests/conftest.py +0 -65
- {abstract_block_dumper-0.1.0 → abstract_block_dumper-0.1.2}/.cruft.json +0 -0
- {abstract_block_dumper-0.1.0 → abstract_block_dumper-0.1.2}/.github/dependabot.yml +0 -0
- {abstract_block_dumper-0.1.0 → abstract_block_dumper-0.1.2}/.github/workflows/ci.yml +0 -0
- {abstract_block_dumper-0.1.0 → abstract_block_dumper-0.1.2}/.github/workflows/publish.yml +0 -0
- {abstract_block_dumper-0.1.0 → abstract_block_dumper-0.1.2}/.gitignore +0 -0
- {abstract_block_dumper-0.1.0 → abstract_block_dumper-0.1.2}/.pre-commit-config.yaml +0 -0
- {abstract_block_dumper-0.1.0 → abstract_block_dumper-0.1.2}/.shellcheckrc +0 -0
- {abstract_block_dumper-0.1.0 → abstract_block_dumper-0.1.2}/README.md +0 -0
- {abstract_block_dumper-0.1.0 → abstract_block_dumper-0.1.2}/SECURITY.md +0 -0
- {abstract_block_dumper-0.1.0 → abstract_block_dumper-0.1.2}/docs/3rd_party/cookiecutter-rt-pkg/CHANGELOG.md +0 -0
- {abstract_block_dumper-0.1.0 → abstract_block_dumper-0.1.2}/example_project/.dockerignore +0 -0
- {abstract_block_dumper-0.1.0 → abstract_block_dumper-0.1.2}/example_project/.gitignore +0 -0
- {abstract_block_dumper-0.1.0 → abstract_block_dumper-0.1.2}/example_project/Dockerfile +0 -0
- {abstract_block_dumper-0.1.0 → abstract_block_dumper-0.1.2}/example_project/README.md +0 -0
- {abstract_block_dumper-0.1.0 → abstract_block_dumper-0.1.2}/example_project/block_explorer/__init__.py +0 -0
- {abstract_block_dumper-0.1.0 → abstract_block_dumper-0.1.2}/example_project/block_explorer/admin.py +0 -0
- {abstract_block_dumper-0.1.0 → abstract_block_dumper-0.1.2}/example_project/block_explorer/apps.py +0 -0
- {abstract_block_dumper-0.1.0 → abstract_block_dumper-0.1.2}/example_project/block_explorer/management/__init__.py +0 -0
- {abstract_block_dumper-0.1.0 → abstract_block_dumper-0.1.2}/example_project/block_explorer/management/commands/__init__.py +0 -0
- {abstract_block_dumper-0.1.0 → abstract_block_dumper-0.1.2}/example_project/block_explorer/management/commands/create_admin.py +0 -0
- {abstract_block_dumper-0.1.0 → abstract_block_dumper-0.1.2}/example_project/block_explorer/migrations/__init__.py +0 -0
- {abstract_block_dumper-0.1.0 → abstract_block_dumper-0.1.2}/example_project/block_explorer/models.py +0 -0
- {abstract_block_dumper-0.1.0 → abstract_block_dumper-0.1.2}/example_project/block_explorer/tasks.py +0 -0
- {abstract_block_dumper-0.1.0 → abstract_block_dumper-0.1.2}/example_project/block_explorer/tests.py +0 -0
- {abstract_block_dumper-0.1.0 → abstract_block_dumper-0.1.2}/example_project/block_explorer/views.py +0 -0
- {abstract_block_dumper-0.1.0 → abstract_block_dumper-0.1.2}/example_project/docker-compose.yml +0 -0
- {abstract_block_dumper-0.1.0 → abstract_block_dumper-0.1.2}/example_project/example_project/__init__.py +0 -0
- {abstract_block_dumper-0.1.0 → abstract_block_dumper-0.1.2}/example_project/example_project/asgi.py +0 -0
- {abstract_block_dumper-0.1.0 → abstract_block_dumper-0.1.2}/example_project/example_project/celery.py +0 -0
- {abstract_block_dumper-0.1.0 → abstract_block_dumper-0.1.2}/example_project/example_project/settings.py +0 -0
- {abstract_block_dumper-0.1.0 → abstract_block_dumper-0.1.2}/example_project/example_project/urls.py +0 -0
- {abstract_block_dumper-0.1.0 → abstract_block_dumper-0.1.2}/example_project/example_project/wsgi.py +0 -0
- {abstract_block_dumper-0.1.0 → abstract_block_dumper-0.1.2}/example_project/main.py +0 -0
- {abstract_block_dumper-0.1.0 → abstract_block_dumper-0.1.2}/example_project/manage.py +0 -0
- {abstract_block_dumper-0.1.0 → abstract_block_dumper-0.1.2}/example_project/pyproject.toml +0 -0
- {abstract_block_dumper-0.1.0 → abstract_block_dumper-0.1.2}/example_project/pytest.ini +0 -0
- {abstract_block_dumper-0.1.0 → abstract_block_dumper-0.1.2}/example_project/uv.lock +0 -0
- {abstract_block_dumper-0.1.0 → abstract_block_dumper-0.1.2}/noxfile.py +0 -0
- {abstract_block_dumper-0.1.0 → abstract_block_dumper-0.1.2}/pyproject.toml +0 -0
- {abstract_block_dumper-0.1.0 → abstract_block_dumper-0.1.2}/src/abstract_block_dumper/__init__.py +0 -0
- {abstract_block_dumper-0.1.0 → abstract_block_dumper-0.1.2}/src/abstract_block_dumper/_internal/__init__.py +0 -0
- {abstract_block_dumper-0.1.0 → abstract_block_dumper-0.1.2}/src/abstract_block_dumper/_internal/dal/__init__.py +0 -0
- {abstract_block_dumper-0.1.0 → abstract_block_dumper-0.1.2}/src/abstract_block_dumper/_internal/dal/memory_registry.py +0 -0
- {abstract_block_dumper-0.1.0 → abstract_block_dumper-0.1.2}/src/abstract_block_dumper/_internal/discovery.py +0 -0
- {abstract_block_dumper-0.1.0 → abstract_block_dumper-0.1.2}/src/abstract_block_dumper/_internal/exceptions.py +0 -0
- {abstract_block_dumper-0.1.0/src/abstract_block_dumper/_internal/services → abstract_block_dumper-0.1.2/src/abstract_block_dumper/_internal/providers}/__init__.py +0 -0
- {abstract_block_dumper-0.1.0/src/abstract_block_dumper/management → abstract_block_dumper-0.1.2/src/abstract_block_dumper/_internal/services}/__init__.py +0 -0
- {abstract_block_dumper-0.1.0 → abstract_block_dumper-0.1.2}/src/abstract_block_dumper/_internal/services/executor.py +0 -0
- {abstract_block_dumper-0.1.0 → abstract_block_dumper-0.1.2}/src/abstract_block_dumper/_internal/services/metrics.py +0 -0
- {abstract_block_dumper-0.1.0 → abstract_block_dumper-0.1.2}/src/abstract_block_dumper/_internal/services/utils.py +0 -0
- {abstract_block_dumper-0.1.0 → abstract_block_dumper-0.1.2}/src/abstract_block_dumper/admin.py +0 -0
- {abstract_block_dumper-0.1.0 → abstract_block_dumper-0.1.2}/src/abstract_block_dumper/apps.py +0 -0
- {abstract_block_dumper-0.1.0/src/abstract_block_dumper/management/commands → abstract_block_dumper-0.1.2/src/abstract_block_dumper/management}/__init__.py +0 -0
- {abstract_block_dumper-0.1.0/src/abstract_block_dumper/migrations → abstract_block_dumper-0.1.2/src/abstract_block_dumper/management/commands}/__init__.py +0 -0
- {abstract_block_dumper-0.1.0 → abstract_block_dumper-0.1.2}/src/abstract_block_dumper/management/commands/backfill_blocks_v1.py +0 -0
- {abstract_block_dumper-0.1.0 → abstract_block_dumper-0.1.2}/src/abstract_block_dumper/management/commands/block_tasks_v1.py +0 -0
- {abstract_block_dumper-0.1.0 → abstract_block_dumper-0.1.2}/src/abstract_block_dumper/migrations/0001_initial.py +0 -0
- {abstract_block_dumper-0.1.0/src/abstract_block_dumper/v1 → abstract_block_dumper-0.1.2/src/abstract_block_dumper/migrations}/__init__.py +0 -0
- {abstract_block_dumper-0.1.0 → abstract_block_dumper-0.1.2}/src/abstract_block_dumper/models.py +0 -0
- {abstract_block_dumper-0.1.0 → abstract_block_dumper-0.1.2}/src/abstract_block_dumper/py.typed +0 -0
- {abstract_block_dumper-0.1.0/tests → abstract_block_dumper-0.1.2/src/abstract_block_dumper/v1}/__init__.py +0 -0
- {abstract_block_dumper-0.1.0 → abstract_block_dumper-0.1.2}/src/abstract_block_dumper/v1/celery.py +0 -0
- {abstract_block_dumper-0.1.0 → abstract_block_dumper-0.1.2}/src/abstract_block_dumper/v1/decorators.py +0 -0
- {abstract_block_dumper-0.1.0 → abstract_block_dumper-0.1.2}/src/abstract_block_dumper/v1/tasks.py +0 -0
- {abstract_block_dumper-0.1.0/tests/integration → abstract_block_dumper-0.1.2/tests}/__init__.py +0 -0
- {abstract_block_dumper-0.1.0 → abstract_block_dumper-0.1.2}/tests/django_fixtures.py +0 -0
- {abstract_block_dumper-0.1.0 → abstract_block_dumper-0.1.2}/tests/fatories.py +0 -0
- /abstract_block_dumper-0.1.0/tests/unit/test_decorator.py → /abstract_block_dumper-0.1.2/tests/integration/__init__.py +0 -0
- {abstract_block_dumper-0.1.0 → abstract_block_dumper-0.1.2}/tests/integration/test_backfill_scheduler.py +0 -0
- {abstract_block_dumper-0.1.0 → abstract_block_dumper-0.1.2}/tests/integration/test_block_processor.py +0 -0
- {abstract_block_dumper-0.1.0 → abstract_block_dumper-0.1.2}/tests/integration/test_concurrent_processing.py +0 -0
- {abstract_block_dumper-0.1.0 → abstract_block_dumper-0.1.2}/tests/integration/test_multi_arguments_tasks.py +0 -0
- {abstract_block_dumper-0.1.0 → abstract_block_dumper-0.1.2}/tests/integration/test_registered_celery_tasks.py +0 -0
- {abstract_block_dumper-0.1.0 → abstract_block_dumper-0.1.2}/tests/integration/test_task_registration.py +0 -0
- {abstract_block_dumper-0.1.0 → abstract_block_dumper-0.1.2}/tests/settings.py +0 -0
- {abstract_block_dumper-0.1.0 → abstract_block_dumper-0.1.2}/tests/unit/test_celery_integration.py +0 -0
- {abstract_block_dumper-0.1.0 → abstract_block_dumper-0.1.2}/uv.lock +0 -0
|
@@ -9,6 +9,16 @@ upcoming release can be found in [changelog.d](changelog.d).
|
|
|
9
9
|
|
|
10
10
|
<!-- towncrier release notes start -->
|
|
11
11
|
|
|
12
|
+
## [0.1.2](https://github.com/bactensor/abstract-block-dumper/releases/tag/v0.1.2) - 2026-01-12
|
|
13
|
+
|
|
14
|
+
No significant changes.
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
## [0.1.1](https://github.com/bactensor/abstract-block-dumper/releases/tag/v0.1.1) - 2025-12-18
|
|
18
|
+
|
|
19
|
+
No significant changes.
|
|
20
|
+
|
|
21
|
+
|
|
12
22
|
## [0.1.0](https://github.com/bactensor/abstract-block-dumper/releases/tag/v0.1.0) - 2025-12-10
|
|
13
23
|
|
|
14
24
|
No significant changes.
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: abstract-block-dumper
|
|
3
|
-
Version: 0.1.
|
|
3
|
+
Version: 0.1.2
|
|
4
4
|
Project-URL: Source, https://github.com/bactensor/abstract-block-dumper
|
|
5
5
|
Project-URL: Issue Tracker, https://github.com/bactensor/abstract-block-dumper/issues
|
|
6
6
|
Author-email: Reef Technologies <opensource@reef.pl>
|
|
@@ -1,8 +1,10 @@
|
|
|
1
|
+
from collections.abc import Iterator
|
|
1
2
|
from datetime import timedelta
|
|
2
3
|
from typing import Any
|
|
3
4
|
|
|
4
5
|
from django.conf import settings
|
|
5
6
|
from django.db import transaction
|
|
7
|
+
from django.db.models import Max
|
|
6
8
|
from django.db.models.query import QuerySet
|
|
7
9
|
from django.utils import timezone
|
|
8
10
|
|
|
@@ -10,19 +12,21 @@ import abstract_block_dumper._internal.services.utils as abd_utils
|
|
|
10
12
|
import abstract_block_dumper.models as abd_models
|
|
11
13
|
|
|
12
14
|
|
|
13
|
-
def get_ready_to_retry_attempts() ->
|
|
14
|
-
return
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
15
|
+
def get_ready_to_retry_attempts() -> Iterator[abd_models.TaskAttempt]:
|
|
16
|
+
return (
|
|
17
|
+
abd_models.TaskAttempt.objects.filter(
|
|
18
|
+
next_retry_at__isnull=False,
|
|
19
|
+
next_retry_at__lte=timezone.now(),
|
|
20
|
+
attempt_count__lt=abd_utils.get_max_attempt_limit(),
|
|
21
|
+
)
|
|
22
|
+
.exclude(
|
|
23
|
+
status=abd_models.TaskAttempt.Status.SUCCESS,
|
|
24
|
+
)
|
|
25
|
+
.iterator()
|
|
20
26
|
)
|
|
21
27
|
|
|
22
28
|
|
|
23
29
|
def executed_block_numbers(executable_path: str, args_json: str, from_block: int, to_block: int) -> set[int]:
|
|
24
|
-
# Use iterator() to avoid Django's QuerySet caching which causes memory leaks
|
|
25
|
-
# during long-running backfill operations
|
|
26
30
|
block_numbers = (
|
|
27
31
|
abd_models.TaskAttempt.objects.filter(
|
|
28
32
|
executable_path=executable_path,
|
|
@@ -151,7 +155,5 @@ def task_create_or_get_pending(
|
|
|
151
155
|
|
|
152
156
|
|
|
153
157
|
def get_the_latest_executed_block_number() -> int | None:
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
return qs.block_number
|
|
157
|
-
return None
|
|
158
|
+
result = abd_models.TaskAttempt.objects.aggregate(max_block=Max("block_number"))
|
|
159
|
+
return result["max_block"]
|
|
@@ -0,0 +1,119 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from typing import TYPE_CHECKING
|
|
4
|
+
|
|
5
|
+
import bittensor as bt
|
|
6
|
+
import structlog
|
|
7
|
+
|
|
8
|
+
import abstract_block_dumper._internal.services.utils as abd_utils
|
|
9
|
+
|
|
10
|
+
if TYPE_CHECKING:
|
|
11
|
+
import types
|
|
12
|
+
|
|
13
|
+
logger = structlog.get_logger(__name__)
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
# Blocks older than this threshold from current head require archive network
|
|
17
|
+
ARCHIVE_BLOCK_THRESHOLD = 300
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class BittensorConnectionClient:
|
|
21
|
+
"""
|
|
22
|
+
Manages connections to regular and archive Bittensor subtensor networks.
|
|
23
|
+
|
|
24
|
+
Supports context manager protocol for safe connection cleanup:
|
|
25
|
+
with BittensorConnectionClient(network="finney") as client:
|
|
26
|
+
block = client.subtensor.get_current_block()
|
|
27
|
+
"""
|
|
28
|
+
|
|
29
|
+
def __init__(self, network: str) -> None:
|
|
30
|
+
self.network = network
|
|
31
|
+
self._subtensor: bt.Subtensor | None = None
|
|
32
|
+
self._archive_subtensor: bt.Subtensor | None = None
|
|
33
|
+
self._current_block_cache: int | None = None
|
|
34
|
+
|
|
35
|
+
def __enter__(self) -> BittensorConnectionClient:
|
|
36
|
+
"""Context manager entry."""
|
|
37
|
+
return self
|
|
38
|
+
|
|
39
|
+
def __exit__(
|
|
40
|
+
self,
|
|
41
|
+
_exc_type: type[BaseException] | None,
|
|
42
|
+
_exc_val: BaseException | None,
|
|
43
|
+
_exc_tb: types.TracebackType | None,
|
|
44
|
+
) -> None:
|
|
45
|
+
"""Context manager exit - ensures connections are closed."""
|
|
46
|
+
self.close()
|
|
47
|
+
|
|
48
|
+
def close(self) -> None:
|
|
49
|
+
"""Close all subtensor connections to prevent memory leaks."""
|
|
50
|
+
if self._subtensor is not None:
|
|
51
|
+
try:
|
|
52
|
+
self._subtensor.close()
|
|
53
|
+
except Exception:
|
|
54
|
+
logger.warning("Error closing subtensor connection", exc_info=True)
|
|
55
|
+
self._subtensor = None
|
|
56
|
+
|
|
57
|
+
if self._archive_subtensor is not None:
|
|
58
|
+
try:
|
|
59
|
+
self._archive_subtensor.close()
|
|
60
|
+
except Exception:
|
|
61
|
+
logger.warning("Error closing archive subtensor connection", exc_info=True)
|
|
62
|
+
self._archive_subtensor = None
|
|
63
|
+
|
|
64
|
+
self._current_block_cache = None
|
|
65
|
+
logger.debug("Subtensor connections closed")
|
|
66
|
+
|
|
67
|
+
def get_for_block(self, block_number: int) -> bt.Subtensor:
|
|
68
|
+
"""Get the appropriate subtensor client for the given block number."""
|
|
69
|
+
raise NotImplementedError
|
|
70
|
+
|
|
71
|
+
@property
|
|
72
|
+
def subtensor(self) -> bt.Subtensor:
|
|
73
|
+
"""Get the regular subtensor connection, creating it if needed."""
|
|
74
|
+
if self._subtensor is None:
|
|
75
|
+
self._subtensor = abd_utils.get_bittensor_client(self.network)
|
|
76
|
+
return self._subtensor
|
|
77
|
+
|
|
78
|
+
@subtensor.setter
|
|
79
|
+
def subtensor(self, value: bt.Subtensor | None) -> None:
|
|
80
|
+
"""Set or reset the subtensor connection."""
|
|
81
|
+
self._subtensor = value
|
|
82
|
+
|
|
83
|
+
@property
|
|
84
|
+
def archive_subtensor(self) -> bt.Subtensor:
|
|
85
|
+
"""Get the archive subtensor connection, creating it if needed."""
|
|
86
|
+
if self._archive_subtensor is None:
|
|
87
|
+
self._archive_subtensor = abd_utils.get_bittensor_client("archive")
|
|
88
|
+
return self._archive_subtensor
|
|
89
|
+
|
|
90
|
+
@archive_subtensor.setter
|
|
91
|
+
def archive_subtensor(self, value: bt.Subtensor | None) -> None:
|
|
92
|
+
"""Set or reset the archive subtensor connection."""
|
|
93
|
+
self._archive_subtensor = value
|
|
94
|
+
|
|
95
|
+
def get_subtensor_for_block(self, block_number: int) -> bt.Subtensor:
|
|
96
|
+
"""
|
|
97
|
+
Get the appropriate subtensor for the given block number.
|
|
98
|
+
|
|
99
|
+
Uses archive network for blocks older than ARCHIVE_BLOCK_THRESHOLD
|
|
100
|
+
from the current head.
|
|
101
|
+
"""
|
|
102
|
+
if self._current_block_cache is None:
|
|
103
|
+
self._current_block_cache = self.subtensor.get_current_block()
|
|
104
|
+
|
|
105
|
+
blocks_behind = self._current_block_cache - block_number
|
|
106
|
+
|
|
107
|
+
if blocks_behind > ARCHIVE_BLOCK_THRESHOLD:
|
|
108
|
+
logger.debug(
|
|
109
|
+
"Using archive network for old block",
|
|
110
|
+
block_number=block_number,
|
|
111
|
+
blocks_behind=blocks_behind,
|
|
112
|
+
)
|
|
113
|
+
return self.archive_subtensor
|
|
114
|
+
return self.subtensor
|
|
115
|
+
|
|
116
|
+
def refresh_connections(self) -> None:
|
|
117
|
+
"""Close and reset all subtensor connections to force re-establishment."""
|
|
118
|
+
self.close()
|
|
119
|
+
logger.info("Subtensor connections refreshed")
|
|
@@ -15,7 +15,7 @@ import structlog
|
|
|
15
15
|
|
|
16
16
|
import abstract_block_dumper._internal.dal.django_dal as abd_dal
|
|
17
17
|
import abstract_block_dumper._internal.services.utils as abd_utils
|
|
18
|
-
from abstract_block_dumper._internal.services.block_processor import
|
|
18
|
+
from abstract_block_dumper._internal.services.block_processor import BaseBlockProcessor, block_processor_factory
|
|
19
19
|
from abstract_block_dumper._internal.services.metrics import (
|
|
20
20
|
BlockProcessingTimer,
|
|
21
21
|
increment_archive_network_usage,
|
|
@@ -36,8 +36,6 @@ logger = structlog.get_logger(__name__)
|
|
|
36
36
|
# Blocks older than this threshold from current head require archive network
|
|
37
37
|
ARCHIVE_BLOCK_THRESHOLD = 300
|
|
38
38
|
|
|
39
|
-
# Progress logging interval
|
|
40
|
-
PROGRESS_LOG_INTERVAL = 100
|
|
41
39
|
ARCHIVE_NETWORK = "archive"
|
|
42
40
|
|
|
43
41
|
# Memory cleanup interval (every N blocks)
|
|
@@ -59,7 +57,7 @@ class BackfillScheduler:
|
|
|
59
57
|
|
|
60
58
|
def __init__(
|
|
61
59
|
self,
|
|
62
|
-
block_processor:
|
|
60
|
+
block_processor: BaseBlockProcessor,
|
|
63
61
|
network: str,
|
|
64
62
|
from_block: int,
|
|
65
63
|
to_block: int,
|
|
@@ -281,16 +279,13 @@ class BackfillScheduler:
|
|
|
281
279
|
if self._current_head_cache:
|
|
282
280
|
set_block_lag("backfill", self._current_head_cache - block_number)
|
|
283
281
|
|
|
284
|
-
# Log
|
|
285
|
-
|
|
286
|
-
|
|
287
|
-
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
|
|
291
|
-
progress_percent=f"{progress_pct:.1f}%",
|
|
292
|
-
current_block=block_number,
|
|
293
|
-
)
|
|
282
|
+
# Log each block being processed
|
|
283
|
+
progress_pct = (processed_count / total_blocks) * 100
|
|
284
|
+
logger.info(
|
|
285
|
+
"Backfilling block",
|
|
286
|
+
block=block_number,
|
|
287
|
+
progress=f"{processed_count}/{total_blocks} ({progress_pct:.1f}%)",
|
|
288
|
+
)
|
|
294
289
|
|
|
295
290
|
# Rate limiting between block submissions
|
|
296
291
|
if block_number < self.to_block and self.rate_limit > 0:
|
|
@@ -1,4 +1,6 @@
|
|
|
1
|
+
import itertools
|
|
1
2
|
import time
|
|
3
|
+
from typing import Protocol
|
|
2
4
|
|
|
3
5
|
import structlog
|
|
4
6
|
from django.db import transaction
|
|
@@ -12,6 +14,25 @@ from abstract_block_dumper.models import TaskAttempt
|
|
|
12
14
|
logger = structlog.get_logger(__name__)
|
|
13
15
|
|
|
14
16
|
|
|
17
|
+
class BaseBlockProcessor(Protocol):
|
|
18
|
+
"""Protocol defining the interface for block processors."""
|
|
19
|
+
|
|
20
|
+
executor: CeleryExecutor
|
|
21
|
+
registry: BaseRegistry
|
|
22
|
+
|
|
23
|
+
def process_block(self, block_number: int) -> None:
|
|
24
|
+
"""Process a single block - executes registered tasks for this block only."""
|
|
25
|
+
...
|
|
26
|
+
|
|
27
|
+
def process_registry_item(self, registry_item: RegistryItem, block_number: int) -> None:
|
|
28
|
+
"""Process a single registry item for a given block."""
|
|
29
|
+
...
|
|
30
|
+
|
|
31
|
+
def recover_failed_retries(self, poll_interval: int, batch_size: int | None = None) -> None:
|
|
32
|
+
"""Recover failed tasks that are ready to be retried."""
|
|
33
|
+
...
|
|
34
|
+
|
|
35
|
+
|
|
15
36
|
class BlockProcessor:
|
|
16
37
|
def __init__(self, executor: CeleryExecutor, registry: BaseRegistry) -> None:
|
|
17
38
|
self.executor = executor
|
|
@@ -59,9 +80,9 @@ class BlockProcessor:
|
|
|
59
80
|
retry_count = 0
|
|
60
81
|
retry_attempts = abd_dal.get_ready_to_retry_attempts()
|
|
61
82
|
|
|
62
|
-
# Apply batch size limit if specified
|
|
83
|
+
# Apply batch size limit if specified (use islice for iterator compatibility)
|
|
63
84
|
if batch_size is not None:
|
|
64
|
-
retry_attempts = retry_attempts
|
|
85
|
+
retry_attempts = itertools.islice(retry_attempts, batch_size)
|
|
65
86
|
|
|
66
87
|
for retry_attempt in retry_attempts:
|
|
67
88
|
time.sleep(poll_interval)
|
|
@@ -147,7 +168,7 @@ class BlockProcessor:
|
|
|
147
168
|
def block_processor_factory(
|
|
148
169
|
executor: CeleryExecutor | None = None,
|
|
149
170
|
registry: BaseRegistry | None = None,
|
|
150
|
-
) ->
|
|
171
|
+
) -> BaseBlockProcessor:
|
|
151
172
|
return BlockProcessor(
|
|
152
173
|
executor=executor or CeleryExecutor(),
|
|
153
174
|
registry=registry or task_registry,
|
|
@@ -0,0 +1,117 @@
|
|
|
1
|
+
import time
|
|
2
|
+
from typing import Protocol
|
|
3
|
+
|
|
4
|
+
import structlog
|
|
5
|
+
from django.conf import settings
|
|
6
|
+
|
|
7
|
+
import abstract_block_dumper._internal.dal.django_dal as abd_dal
|
|
8
|
+
from abstract_block_dumper._internal.providers.bittensor_client import BittensorConnectionClient
|
|
9
|
+
from abstract_block_dumper._internal.services.block_processor import BaseBlockProcessor, block_processor_factory
|
|
10
|
+
from abstract_block_dumper._internal.services.metrics import (
|
|
11
|
+
BlockProcessingTimer,
|
|
12
|
+
increment_blocks_processed,
|
|
13
|
+
set_block_lag,
|
|
14
|
+
set_current_block,
|
|
15
|
+
set_registered_tasks,
|
|
16
|
+
)
|
|
17
|
+
|
|
18
|
+
logger = structlog.get_logger(__name__)
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class BlockStateResolver(Protocol):
|
|
22
|
+
"""Protocol defining the interface for block state resolvers."""
|
|
23
|
+
|
|
24
|
+
def get_starting_block(self) -> int:
|
|
25
|
+
"""Determine which block to start processing from."""
|
|
26
|
+
...
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
class DefaultBlockStateResolver:
|
|
30
|
+
"""Default implementation that reads from settings and database."""
|
|
31
|
+
|
|
32
|
+
def __init__(self, bittensor_client: BittensorConnectionClient) -> None:
|
|
33
|
+
self.bittensor_client = bittensor_client
|
|
34
|
+
|
|
35
|
+
def get_starting_block(self) -> int:
|
|
36
|
+
start_setting = getattr(settings, "BLOCK_DUMPER_START_FROM_BLOCK", None)
|
|
37
|
+
if start_setting == "current":
|
|
38
|
+
return self.bittensor_client.subtensor.get_current_block()
|
|
39
|
+
if isinstance(start_setting, int):
|
|
40
|
+
return start_setting
|
|
41
|
+
|
|
42
|
+
# Default: resume from DB or current
|
|
43
|
+
return abd_dal.get_the_latest_executed_block_number() or self.bittensor_client.subtensor.get_current_block()
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
class TaskScheduler:
|
|
47
|
+
def __init__(
|
|
48
|
+
self,
|
|
49
|
+
block_processor: BaseBlockProcessor,
|
|
50
|
+
bittensor_client: BittensorConnectionClient,
|
|
51
|
+
state_resolver: BlockStateResolver,
|
|
52
|
+
poll_interval: int,
|
|
53
|
+
) -> None:
|
|
54
|
+
self.block_processor = block_processor
|
|
55
|
+
self.poll_interval = poll_interval
|
|
56
|
+
self.bittensor_client = bittensor_client
|
|
57
|
+
self.last_processed_block = state_resolver.get_starting_block()
|
|
58
|
+
self.is_running = False
|
|
59
|
+
|
|
60
|
+
def start(self) -> None:
|
|
61
|
+
self.is_running = True
|
|
62
|
+
|
|
63
|
+
registered_tasks_count = len(self.block_processor.registry.get_functions())
|
|
64
|
+
set_registered_tasks(registered_tasks_count)
|
|
65
|
+
|
|
66
|
+
logger.info(
|
|
67
|
+
"TaskScheduler started",
|
|
68
|
+
last_processed_block=self.last_processed_block,
|
|
69
|
+
registry_functions=registered_tasks_count,
|
|
70
|
+
)
|
|
71
|
+
|
|
72
|
+
while self.is_running:
|
|
73
|
+
try:
|
|
74
|
+
current_block = self.bittensor_client.subtensor.get_current_block()
|
|
75
|
+
|
|
76
|
+
# Only process the current head block, skip if already processed
|
|
77
|
+
if current_block != self.last_processed_block:
|
|
78
|
+
with BlockProcessingTimer(mode="realtime"):
|
|
79
|
+
self.block_processor.process_block(current_block)
|
|
80
|
+
|
|
81
|
+
set_current_block("realtime", current_block)
|
|
82
|
+
increment_blocks_processed("realtime")
|
|
83
|
+
set_block_lag("realtime", 0) # Head-only mode has no lag
|
|
84
|
+
self.last_processed_block = current_block
|
|
85
|
+
|
|
86
|
+
time.sleep(self.poll_interval)
|
|
87
|
+
|
|
88
|
+
except KeyboardInterrupt:
|
|
89
|
+
logger.info("TaskScheduler stopping due to KeyboardInterrupt.")
|
|
90
|
+
self.stop()
|
|
91
|
+
break
|
|
92
|
+
except Exception:
|
|
93
|
+
logger.exception("Error in TaskScheduler loop")
|
|
94
|
+
time.sleep(self.poll_interval)
|
|
95
|
+
|
|
96
|
+
def stop(self) -> None:
|
|
97
|
+
self.is_running = False
|
|
98
|
+
self.bittensor_client.close()
|
|
99
|
+
logger.info("TaskScheduler stopped.")
|
|
100
|
+
|
|
101
|
+
|
|
102
|
+
def task_scheduler_factory(network: str = "finney") -> TaskScheduler:
|
|
103
|
+
"""
|
|
104
|
+
Factory for TaskScheduler.
|
|
105
|
+
|
|
106
|
+
Args:
|
|
107
|
+
network (str): Bittensor network name. Defaults to "finney"
|
|
108
|
+
|
|
109
|
+
"""
|
|
110
|
+
bittensor_client = BittensorConnectionClient(network=network)
|
|
111
|
+
state_resolver = DefaultBlockStateResolver(bittensor_client=bittensor_client)
|
|
112
|
+
return TaskScheduler(
|
|
113
|
+
block_processor=block_processor_factory(),
|
|
114
|
+
poll_interval=getattr(settings, "BLOCK_DUMPER_POLL_INTERVAL", 5),
|
|
115
|
+
bittensor_client=bittensor_client,
|
|
116
|
+
state_resolver=state_resolver,
|
|
117
|
+
)
|
{abstract_block_dumper-0.1.0 → abstract_block_dumper-0.1.2}/src/abstract_block_dumper/_version.py
RENAMED
|
@@ -28,7 +28,7 @@ version_tuple: VERSION_TUPLE
|
|
|
28
28
|
commit_id: COMMIT_ID
|
|
29
29
|
__commit_id__: COMMIT_ID
|
|
30
30
|
|
|
31
|
-
__version__ = version = '0.1.
|
|
32
|
-
__version_tuple__ = version_tuple = (0, 1,
|
|
31
|
+
__version__ = version = '0.1.2'
|
|
32
|
+
__version_tuple__ = version_tuple = (0, 1, 2)
|
|
33
33
|
|
|
34
34
|
__commit_id__ = commit_id = None
|
|
@@ -0,0 +1,115 @@
|
|
|
1
|
+
from collections.abc import Generator
|
|
2
|
+
from typing import Any, NoReturn
|
|
3
|
+
|
|
4
|
+
import django
|
|
5
|
+
import pytest
|
|
6
|
+
from celery import Celery
|
|
7
|
+
from django.conf import settings
|
|
8
|
+
|
|
9
|
+
from abstract_block_dumper._internal.dal.memory_registry import RegistryItem, task_registry
|
|
10
|
+
from abstract_block_dumper.v1.decorators import block_task
|
|
11
|
+
|
|
12
|
+
from .django_fixtures import * # noqa: F401, F403
|
|
13
|
+
|
|
14
|
+
# Ensure Django is set up
|
|
15
|
+
if not settings.configured:
|
|
16
|
+
django.setup()
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class MockedBlockProcessor:
|
|
20
|
+
"""Mock implementation of BaseBlockProcessor for testing."""
|
|
21
|
+
|
|
22
|
+
def __init__(self, executor: Any = None, registry: Any = None) -> None:
|
|
23
|
+
self.executor = executor
|
|
24
|
+
self.registry = registry or task_registry
|
|
25
|
+
self.processed_blocks: list[int] = []
|
|
26
|
+
self.processed_registry_items: list[tuple[RegistryItem, int]] = []
|
|
27
|
+
self.recover_failed_retries_calls: list[tuple[int, int | None]] = []
|
|
28
|
+
|
|
29
|
+
def process_block(self, block_number: int) -> None:
|
|
30
|
+
self.processed_blocks.append(block_number)
|
|
31
|
+
|
|
32
|
+
def process_registry_item(self, registry_item: RegistryItem, block_number: int) -> None:
|
|
33
|
+
self.processed_registry_items.append((registry_item, block_number))
|
|
34
|
+
|
|
35
|
+
def recover_failed_retries(self, poll_interval: int, batch_size: int | None = None) -> None:
|
|
36
|
+
self.recover_failed_retries_calls.append((poll_interval, batch_size))
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
class MockedSubtensor:
|
|
40
|
+
"""Mock implementation of bt.Subtensor for testing."""
|
|
41
|
+
|
|
42
|
+
def __init__(self, current_block: int = 1000) -> None:
|
|
43
|
+
self._current_block = current_block
|
|
44
|
+
|
|
45
|
+
def get_current_block(self) -> int:
|
|
46
|
+
return self._current_block
|
|
47
|
+
|
|
48
|
+
def set_current_block(self, block: int) -> None:
|
|
49
|
+
self._current_block = block
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
@pytest.fixture(autouse=True)
|
|
53
|
+
def celery_test_app() -> Generator[Celery, Any, None]:
|
|
54
|
+
"""Configure Celery for testing with eager mode."""
|
|
55
|
+
app = Celery("test_app")
|
|
56
|
+
app.config_from_object(settings, namespace="CELERY")
|
|
57
|
+
return app
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
def every_block_task_func(block_number: int):
|
|
61
|
+
"""
|
|
62
|
+
Test function for every block execution.
|
|
63
|
+
"""
|
|
64
|
+
return f"Processed block {block_number}"
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
def modulo_task_func(block_number: int, netuid: int):
|
|
68
|
+
"""
|
|
69
|
+
Test function for modulo condition execution.
|
|
70
|
+
"""
|
|
71
|
+
return f"Modulo task processed block {block_number} for netuid {netuid}"
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
def failing_task_func(block_number: int) -> NoReturn:
|
|
75
|
+
"""
|
|
76
|
+
Test function that always fails.
|
|
77
|
+
"""
|
|
78
|
+
raise ValueError("Test error")
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
@pytest.fixture
|
|
82
|
+
def setup_test_tasks():
|
|
83
|
+
# Register test tasks using decorators
|
|
84
|
+
|
|
85
|
+
# every block
|
|
86
|
+
block_task(condition=lambda bn: True)(every_block_task_func)
|
|
87
|
+
|
|
88
|
+
# every 5 blocks
|
|
89
|
+
block_task(condition=lambda bn, netuid: bn % 5 == 0, args=[{"netuid": 1}, {"netuid": 2}])(modulo_task_func)
|
|
90
|
+
|
|
91
|
+
yield
|
|
92
|
+
|
|
93
|
+
|
|
94
|
+
@pytest.fixture(autouse=True)
|
|
95
|
+
def cleanup_memory_registry():
|
|
96
|
+
task_registry.clear()
|
|
97
|
+
|
|
98
|
+
yield
|
|
99
|
+
|
|
100
|
+
task_registry.clear()
|
|
101
|
+
|
|
102
|
+
|
|
103
|
+
@pytest.fixture
|
|
104
|
+
def mock_block_processor() -> MockedBlockProcessor:
|
|
105
|
+
return MockedBlockProcessor()
|
|
106
|
+
|
|
107
|
+
|
|
108
|
+
@pytest.fixture
|
|
109
|
+
def mock_subtensor() -> MockedSubtensor:
|
|
110
|
+
return MockedSubtensor(current_block=1000)
|
|
111
|
+
|
|
112
|
+
|
|
113
|
+
@pytest.fixture
|
|
114
|
+
def mock_archive_subtensor() -> MockedSubtensor:
|
|
115
|
+
return MockedSubtensor(current_block=1000)
|
{abstract_block_dumper-0.1.0 → abstract_block_dumper-0.1.2}/tests/integration/test_scheduler.py
RENAMED
|
@@ -7,6 +7,7 @@ from django.utils import timezone
|
|
|
7
7
|
import abstract_block_dumper._internal.dal.django_dal as abd_dal
|
|
8
8
|
import abstract_block_dumper._internal.services.utils as abd_utils
|
|
9
9
|
from abstract_block_dumper._internal.dal.memory_registry import task_registry
|
|
10
|
+
from abstract_block_dumper._internal.providers.bittensor_client import BittensorConnectionClient
|
|
10
11
|
from abstract_block_dumper._internal.services.block_processor import block_processor_factory
|
|
11
12
|
from abstract_block_dumper.models import TaskAttempt
|
|
12
13
|
from abstract_block_dumper.v1.decorators import block_task
|
|
@@ -151,3 +152,36 @@ def test_retry_recover_mechanism():
|
|
|
151
152
|
status=TaskAttempt.Status.SUCCESS,
|
|
152
153
|
)
|
|
153
154
|
assert qs.count() == len(recover_ids)
|
|
155
|
+
|
|
156
|
+
|
|
157
|
+
def test_bittensor_client_uses_archive_for_old_blocks(
|
|
158
|
+
mock_subtensor,
|
|
159
|
+
mock_archive_subtensor,
|
|
160
|
+
) -> None:
|
|
161
|
+
"""Test that BittensorConnectionClient uses archive_subtensor for blocks older than 300 from current head."""
|
|
162
|
+
current_block = 1000
|
|
163
|
+
|
|
164
|
+
client = BittensorConnectionClient(network="testnet")
|
|
165
|
+
client._subtensor = mock_subtensor
|
|
166
|
+
client._archive_subtensor = mock_archive_subtensor
|
|
167
|
+
client._current_block_cache = current_block
|
|
168
|
+
|
|
169
|
+
# For old block (400 behind), should return archive_subtensor
|
|
170
|
+
old_block = 600 # 400 blocks behind (> 300 threshold)
|
|
171
|
+
result = client.get_subtensor_for_block(old_block)
|
|
172
|
+
assert result is mock_archive_subtensor
|
|
173
|
+
|
|
174
|
+
# For recent block (within 300), should return regular subtensor
|
|
175
|
+
recent_block = 800 # 200 blocks behind (< 300 threshold)
|
|
176
|
+
result = client.get_subtensor_for_block(recent_block)
|
|
177
|
+
assert result is mock_subtensor
|
|
178
|
+
|
|
179
|
+
# For block exactly at threshold (300 behind), should return regular subtensor
|
|
180
|
+
threshold_block = 700 # exactly 300 blocks behind
|
|
181
|
+
result = client.get_subtensor_for_block(threshold_block)
|
|
182
|
+
assert result is mock_subtensor
|
|
183
|
+
|
|
184
|
+
# For block just over threshold (301 behind), should return archive_subtensor
|
|
185
|
+
just_over_threshold_block = 699 # 301 blocks behind
|
|
186
|
+
result = client.get_subtensor_for_block(just_over_threshold_block)
|
|
187
|
+
assert result is mock_archive_subtensor
|
|
File without changes
|
|
@@ -1,178 +0,0 @@
|
|
|
1
|
-
import time
|
|
2
|
-
|
|
3
|
-
import bittensor as bt
|
|
4
|
-
import structlog
|
|
5
|
-
from django.conf import settings
|
|
6
|
-
|
|
7
|
-
import abstract_block_dumper._internal.dal.django_dal as abd_dal
|
|
8
|
-
import abstract_block_dumper._internal.services.utils as abd_utils
|
|
9
|
-
from abstract_block_dumper._internal.services.block_processor import BlockProcessor, block_processor_factory
|
|
10
|
-
from abstract_block_dumper._internal.services.metrics import (
|
|
11
|
-
BlockProcessingTimer,
|
|
12
|
-
increment_blocks_processed,
|
|
13
|
-
set_block_lag,
|
|
14
|
-
set_current_block,
|
|
15
|
-
set_registered_tasks,
|
|
16
|
-
)
|
|
17
|
-
|
|
18
|
-
logger = structlog.get_logger(__name__)
|
|
19
|
-
|
|
20
|
-
# Blocks older than this threshold from current head require archive network
|
|
21
|
-
ARCHIVE_BLOCK_THRESHOLD = 300
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
class TaskScheduler:
|
|
25
|
-
def __init__(
|
|
26
|
-
self,
|
|
27
|
-
block_processor: BlockProcessor,
|
|
28
|
-
network: str,
|
|
29
|
-
poll_interval: int,
|
|
30
|
-
) -> None:
|
|
31
|
-
self.block_processor = block_processor
|
|
32
|
-
self.network = network
|
|
33
|
-
self.poll_interval = poll_interval
|
|
34
|
-
self.last_processed_block = -1
|
|
35
|
-
self.is_running = False
|
|
36
|
-
self._subtensor: bt.Subtensor | None = None
|
|
37
|
-
self._archive_subtensor: bt.Subtensor | None = None
|
|
38
|
-
self._current_block_cache: int | None = None
|
|
39
|
-
|
|
40
|
-
@property
|
|
41
|
-
def subtensor(self) -> bt.Subtensor:
|
|
42
|
-
"""Get the regular subtensor connection, creating it if needed."""
|
|
43
|
-
if self._subtensor is None:
|
|
44
|
-
self._subtensor = abd_utils.get_bittensor_client(self.network)
|
|
45
|
-
return self._subtensor
|
|
46
|
-
|
|
47
|
-
@subtensor.setter
|
|
48
|
-
def subtensor(self, value: bt.Subtensor | None) -> None:
|
|
49
|
-
"""Set or reset the subtensor connection."""
|
|
50
|
-
self._subtensor = value
|
|
51
|
-
|
|
52
|
-
@property
|
|
53
|
-
def archive_subtensor(self) -> bt.Subtensor:
|
|
54
|
-
"""Get the archive subtensor connection, creating it if needed."""
|
|
55
|
-
if self._archive_subtensor is None:
|
|
56
|
-
self._archive_subtensor = abd_utils.get_bittensor_client("archive")
|
|
57
|
-
return self._archive_subtensor
|
|
58
|
-
|
|
59
|
-
@archive_subtensor.setter
|
|
60
|
-
def archive_subtensor(self, value: bt.Subtensor | None) -> None:
|
|
61
|
-
"""Set or reset the archive subtensor connection."""
|
|
62
|
-
self._archive_subtensor = value
|
|
63
|
-
|
|
64
|
-
def get_subtensor_for_block(self, block_number: int) -> bt.Subtensor:
|
|
65
|
-
"""
|
|
66
|
-
Get the appropriate subtensor for the given block number.
|
|
67
|
-
|
|
68
|
-
Uses archive network for blocks older than ARCHIVE_BLOCK_THRESHOLD
|
|
69
|
-
from the current head.
|
|
70
|
-
"""
|
|
71
|
-
if self._current_block_cache is None:
|
|
72
|
-
self._current_block_cache = self.subtensor.get_current_block()
|
|
73
|
-
|
|
74
|
-
blocks_behind = self._current_block_cache - block_number
|
|
75
|
-
|
|
76
|
-
if blocks_behind > ARCHIVE_BLOCK_THRESHOLD:
|
|
77
|
-
logger.debug(
|
|
78
|
-
"Using archive network for old block",
|
|
79
|
-
block_number=block_number,
|
|
80
|
-
blocks_behind=blocks_behind,
|
|
81
|
-
)
|
|
82
|
-
return self.archive_subtensor
|
|
83
|
-
return self.subtensor
|
|
84
|
-
|
|
85
|
-
def refresh_connections(self) -> None:
|
|
86
|
-
"""Reset all subtensor connections to force re-establishment."""
|
|
87
|
-
self._subtensor = None
|
|
88
|
-
self._archive_subtensor = None
|
|
89
|
-
self._current_block_cache = None
|
|
90
|
-
logger.info("Subtensor connections reset")
|
|
91
|
-
|
|
92
|
-
def start(self) -> None:
|
|
93
|
-
self.is_running = True
|
|
94
|
-
|
|
95
|
-
self.initialize_last_block()
|
|
96
|
-
|
|
97
|
-
registered_tasks_count = len(self.block_processor.registry.get_functions())
|
|
98
|
-
set_registered_tasks(registered_tasks_count)
|
|
99
|
-
|
|
100
|
-
logger.info(
|
|
101
|
-
"TaskScheduler started",
|
|
102
|
-
last_processed_block=self.last_processed_block,
|
|
103
|
-
registry_functions=registered_tasks_count,
|
|
104
|
-
)
|
|
105
|
-
|
|
106
|
-
while self.is_running:
|
|
107
|
-
try:
|
|
108
|
-
if self._current_block_cache is not None:
|
|
109
|
-
self.subtensor = self.get_subtensor_for_block(self._current_block_cache)
|
|
110
|
-
|
|
111
|
-
# Update current block cache for archive network decision
|
|
112
|
-
self._current_block_cache = self.subtensor.get_current_block()
|
|
113
|
-
current_block = self._current_block_cache
|
|
114
|
-
|
|
115
|
-
# Only process the current head block, skip if already processed
|
|
116
|
-
if current_block != self.last_processed_block:
|
|
117
|
-
with BlockProcessingTimer(mode="realtime"):
|
|
118
|
-
self.block_processor.process_block(current_block)
|
|
119
|
-
|
|
120
|
-
set_current_block("realtime", current_block)
|
|
121
|
-
increment_blocks_processed("realtime")
|
|
122
|
-
set_block_lag("realtime", 0) # Head-only mode has no lag
|
|
123
|
-
self.last_processed_block = current_block
|
|
124
|
-
|
|
125
|
-
time.sleep(self.poll_interval)
|
|
126
|
-
|
|
127
|
-
except KeyboardInterrupt:
|
|
128
|
-
logger.info("TaskScheduler stopping due to KeyboardInterrupt.")
|
|
129
|
-
self.stop()
|
|
130
|
-
break
|
|
131
|
-
except Exception:
|
|
132
|
-
logger.error("Fatal scheduler error", exc_info=True)
|
|
133
|
-
# resume the loop even if task failed
|
|
134
|
-
time.sleep(self.poll_interval)
|
|
135
|
-
|
|
136
|
-
def stop(self) -> None:
|
|
137
|
-
self.is_running = False
|
|
138
|
-
logger.info("TaskScheduler stopped.")
|
|
139
|
-
|
|
140
|
-
def initialize_last_block(self) -> None:
|
|
141
|
-
# Safe getattr in case setting is not defined
|
|
142
|
-
start_from_block_setting = getattr(settings, "BLOCK_DUMPER_START_FROM_BLOCK", None)
|
|
143
|
-
|
|
144
|
-
if start_from_block_setting is not None:
|
|
145
|
-
if start_from_block_setting == "current":
|
|
146
|
-
self.last_processed_block = self.subtensor.get_current_block()
|
|
147
|
-
logger.info("Starting from current blockchain block", block_number=self.last_processed_block)
|
|
148
|
-
|
|
149
|
-
elif isinstance(start_from_block_setting, int):
|
|
150
|
-
self.last_processed_block = start_from_block_setting
|
|
151
|
-
logger.info("Starting from configured block", block_number=self.last_processed_block)
|
|
152
|
-
else:
|
|
153
|
-
error_msg = f"Invalid BLOCK_DUMPER_START_FROM_BLOCK value: {start_from_block_setting}"
|
|
154
|
-
raise ValueError(error_msg)
|
|
155
|
-
else:
|
|
156
|
-
# Default behavior - resume from database
|
|
157
|
-
last_block_number = abd_dal.get_the_latest_executed_block_number()
|
|
158
|
-
|
|
159
|
-
self.last_processed_block = last_block_number or self.subtensor.get_current_block()
|
|
160
|
-
logger.info(
|
|
161
|
-
"Resume from the last database block or start from the current block",
|
|
162
|
-
last_processed_block=self.last_processed_block,
|
|
163
|
-
)
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
def task_scheduler_factory(network: str = "finney") -> TaskScheduler:
|
|
167
|
-
"""
|
|
168
|
-
Factory for TaskScheduler.
|
|
169
|
-
|
|
170
|
-
Args:
|
|
171
|
-
network (str): Bittensor network name. Defaults to "finney"
|
|
172
|
-
|
|
173
|
-
"""
|
|
174
|
-
return TaskScheduler(
|
|
175
|
-
block_processor=block_processor_factory(),
|
|
176
|
-
network=network,
|
|
177
|
-
poll_interval=getattr(settings, "BLOCK_DUMPER_POLL_INTERVAL", 1),
|
|
178
|
-
)
|
|
@@ -1,65 +0,0 @@
|
|
|
1
|
-
import django
|
|
2
|
-
import pytest
|
|
3
|
-
from celery import Celery
|
|
4
|
-
from django.conf import settings
|
|
5
|
-
|
|
6
|
-
from abstract_block_dumper._internal.dal.memory_registry import task_registry
|
|
7
|
-
|
|
8
|
-
from .django_fixtures import * # noqa: F401, F403
|
|
9
|
-
|
|
10
|
-
# Ensure Django is set up
|
|
11
|
-
if not settings.configured:
|
|
12
|
-
django.setup()
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
@pytest.fixture(autouse=True)
|
|
16
|
-
def celery_test_app():
|
|
17
|
-
"""Configure Celery for testing with eager mode."""
|
|
18
|
-
app = Celery("test_app")
|
|
19
|
-
app.config_from_object(settings, namespace="CELERY")
|
|
20
|
-
|
|
21
|
-
yield app
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
def every_block_task_func(block_number: int):
|
|
25
|
-
"""
|
|
26
|
-
Test function for every block execution.
|
|
27
|
-
"""
|
|
28
|
-
return f"Processed block {block_number}"
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
def modulo_task_func(block_number: int, netuid: int):
|
|
32
|
-
"""
|
|
33
|
-
Test function for modulo condition execution.
|
|
34
|
-
"""
|
|
35
|
-
return f"Modulo task processed block {block_number} for netuid {netuid}"
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
def failing_task_func(block_number: int):
|
|
39
|
-
"""
|
|
40
|
-
Test function that always fails.
|
|
41
|
-
"""
|
|
42
|
-
raise ValueError("Test error")
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
@pytest.fixture
|
|
46
|
-
def setup_test_tasks():
|
|
47
|
-
# Register test tasks using decorators
|
|
48
|
-
from abstract_block_dumper.v1.decorators import block_task
|
|
49
|
-
|
|
50
|
-
# every block
|
|
51
|
-
block_task(condition=lambda bn: True)(every_block_task_func)
|
|
52
|
-
|
|
53
|
-
# every 5 blocks
|
|
54
|
-
block_task(condition=lambda bn, netuid: bn % 5 == 0, args=[{"netuid": 1}, {"netuid": 2}])(modulo_task_func)
|
|
55
|
-
|
|
56
|
-
yield
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
@pytest.fixture(autouse=True)
|
|
60
|
-
def cleanup_memory_registry():
|
|
61
|
-
task_registry.clear()
|
|
62
|
-
|
|
63
|
-
yield
|
|
64
|
-
|
|
65
|
-
task_registry.clear()
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{abstract_block_dumper-0.1.0 → abstract_block_dumper-0.1.2}/example_project/block_explorer/admin.py
RENAMED
|
File without changes
|
{abstract_block_dumper-0.1.0 → abstract_block_dumper-0.1.2}/example_project/block_explorer/apps.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{abstract_block_dumper-0.1.0 → abstract_block_dumper-0.1.2}/example_project/block_explorer/models.py
RENAMED
|
File without changes
|
{abstract_block_dumper-0.1.0 → abstract_block_dumper-0.1.2}/example_project/block_explorer/tasks.py
RENAMED
|
File without changes
|
{abstract_block_dumper-0.1.0 → abstract_block_dumper-0.1.2}/example_project/block_explorer/tests.py
RENAMED
|
File without changes
|
{abstract_block_dumper-0.1.0 → abstract_block_dumper-0.1.2}/example_project/block_explorer/views.py
RENAMED
|
File without changes
|
{abstract_block_dumper-0.1.0 → abstract_block_dumper-0.1.2}/example_project/docker-compose.yml
RENAMED
|
File without changes
|
|
File without changes
|
{abstract_block_dumper-0.1.0 → abstract_block_dumper-0.1.2}/example_project/example_project/asgi.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
{abstract_block_dumper-0.1.0 → abstract_block_dumper-0.1.2}/example_project/example_project/urls.py
RENAMED
|
File without changes
|
{abstract_block_dumper-0.1.0 → abstract_block_dumper-0.1.2}/example_project/example_project/wsgi.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{abstract_block_dumper-0.1.0 → abstract_block_dumper-0.1.2}/src/abstract_block_dumper/__init__.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{abstract_block_dumper-0.1.0 → abstract_block_dumper-0.1.2}/src/abstract_block_dumper/admin.py
RENAMED
|
File without changes
|
{abstract_block_dumper-0.1.0 → abstract_block_dumper-0.1.2}/src/abstract_block_dumper/apps.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{abstract_block_dumper-0.1.0 → abstract_block_dumper-0.1.2}/src/abstract_block_dumper/models.py
RENAMED
|
File without changes
|
{abstract_block_dumper-0.1.0 → abstract_block_dumper-0.1.2}/src/abstract_block_dumper/py.typed
RENAMED
|
File without changes
|
|
File without changes
|
{abstract_block_dumper-0.1.0 → abstract_block_dumper-0.1.2}/src/abstract_block_dumper/v1/celery.py
RENAMED
|
File without changes
|
|
File without changes
|
{abstract_block_dumper-0.1.0 → abstract_block_dumper-0.1.2}/src/abstract_block_dumper/v1/tasks.py
RENAMED
|
File without changes
|
{abstract_block_dumper-0.1.0/tests/integration → abstract_block_dumper-0.1.2/tests}/__init__.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{abstract_block_dumper-0.1.0 → abstract_block_dumper-0.1.2}/tests/unit/test_celery_integration.py
RENAMED
|
File without changes
|
|
File without changes
|