abstract-block-dumper 0.0.5__py3-none-any.whl → 0.0.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (24) hide show
  1. abstract_block_dumper/{dal → _internal/dal}/django_dal.py +4 -3
  2. abstract_block_dumper/{dal → _internal/dal}/memory_registry.py +12 -20
  3. abstract_block_dumper/{discovery.py → _internal/discovery.py} +1 -1
  4. abstract_block_dumper/{exceptions.py → _internal/exceptions.py} +1 -1
  5. abstract_block_dumper/_internal/services/__init__.py +0 -0
  6. abstract_block_dumper/{services → _internal/services}/block_processor.py +15 -14
  7. abstract_block_dumper/{services → _internal/services}/executor.py +2 -2
  8. abstract_block_dumper/_internal/services/scheduler.py +161 -0
  9. abstract_block_dumper/{services → _internal/services}/utils.py +4 -12
  10. abstract_block_dumper/_version.py +2 -2
  11. abstract_block_dumper/management/commands/__init__.py +0 -0
  12. abstract_block_dumper/management/commands/{block_tasks.py → block_tasks_v1.py} +6 -3
  13. abstract_block_dumper/models.py +1 -1
  14. abstract_block_dumper/v1/__init__.py +0 -0
  15. abstract_block_dumper/v1/celery.py +53 -0
  16. abstract_block_dumper/{decorators.py → v1/decorators.py} +56 -39
  17. abstract_block_dumper/{tasks.py → v1/tasks.py} +3 -3
  18. {abstract_block_dumper-0.0.5.dist-info → abstract_block_dumper-0.0.7.dist-info}/METADATA +132 -54
  19. abstract_block_dumper-0.0.7.dist-info/RECORD +29 -0
  20. {abstract_block_dumper-0.0.5.dist-info → abstract_block_dumper-0.0.7.dist-info}/WHEEL +1 -1
  21. abstract_block_dumper/services/scheduler.py +0 -92
  22. abstract_block_dumper-0.0.5.dist-info/RECORD +0 -25
  23. /abstract_block_dumper/{dal → _internal}/__init__.py +0 -0
  24. /abstract_block_dumper/{services → _internal/dal}/__init__.py +0 -0
@@ -6,8 +6,8 @@ from django.db import transaction
6
6
  from django.db.models.query import QuerySet
7
7
  from django.utils import timezone
8
8
 
9
+ import abstract_block_dumper._internal.services.utils as abd_utils
9
10
  import abstract_block_dumper.models as abd_models
10
- import abstract_block_dumper.services.utils as abd_utils
11
11
 
12
12
 
13
13
  def get_ready_to_retry_attempts() -> QuerySet[abd_models.TaskAttempt]:
@@ -76,7 +76,7 @@ def task_mark_as_success(task: abd_models.TaskAttempt, result_data: dict) -> Non
76
76
  task.save()
77
77
 
78
78
 
79
- def task_mark_as_failed(task) -> None:
79
+ def task_mark_as_failed(task: abd_models.TaskAttempt) -> None:
80
80
  DEFAULT_BLOCK_TASK_RETRY_BACKOFF = 1
81
81
  MAX_RETRY_DELAY_MINUTES = 1440 # 24 hours max delay
82
82
 
@@ -98,7 +98,7 @@ def task_mark_as_failed(task) -> None:
98
98
  task.save()
99
99
 
100
100
 
101
- def task_schedule_to_retry(task):
101
+ def task_schedule_to_retry(task: abd_models.TaskAttempt) -> None:
102
102
  task.status = abd_models.TaskAttempt.Status.PENDING
103
103
  task.save()
104
104
 
@@ -110,6 +110,7 @@ def task_create_or_get_pending(
110
110
  ) -> tuple[abd_models.TaskAttempt, bool]:
111
111
  """
112
112
  Create or get a pending task attempt.
113
+
113
114
  Returns (task, created) where created indicates if a new task was created.
114
115
 
115
116
  For failed tasks that can retry:
@@ -6,7 +6,7 @@ from typing import Any
6
6
  import structlog
7
7
  from celery import Task
8
8
 
9
- from abstract_block_dumper.exceptions import ConditionEvaluationError
9
+ from abstract_block_dumper._internal.exceptions import ConditionEvaluationError
10
10
 
11
11
  logger = structlog.getLogger(__name__)
12
12
 
@@ -19,10 +19,8 @@ class RegistryItem:
19
19
  backfilling_lookback: int | None = None
20
20
  celery_kwargs: dict[str, Any] = field(default_factory=dict)
21
21
 
22
- def match_condition(self, block_number: int, **kwargs) -> bool:
23
- """
24
- Check if condition matches for given block and arguments
25
- """
22
+ def match_condition(self, block_number: int, **kwargs: dict[str, Any]) -> bool:
23
+ """Check if condition matches for given block and arguments."""
26
24
  try:
27
25
  return self.condition(block_number, **kwargs)
28
26
  except Exception as e:
@@ -32,28 +30,23 @@ class RegistryItem:
32
30
  block_number=block_number,
33
31
  exc_info=True,
34
32
  )
35
- raise ConditionEvaluationError(f"Failed to evaluate condition: {e}") from e
33
+ msg = f"Failed to evaluate condition: {e}"
34
+ raise ConditionEvaluationError(msg) from e
36
35
 
37
36
  def get_execution_args(self) -> list[dict[str, Any]]:
38
- """
39
- Get list of argument sets for execution
40
- """
37
+ """Get list of argument sets for execution."""
41
38
  return self.args or [{}]
42
39
 
43
40
  @property
44
41
  def executable_path(self) -> str:
45
- """
46
- Get the importable path to the function.
47
- """
42
+ """Get the importable path to the function."""
48
43
  if hasattr(self.function, "name") and self.function.name is not None:
49
44
  return self.function.name
50
45
 
51
- return ".".join([self.function.__module__, self.function.__name__])
46
+ return f"{self.function.__module__}.{self.function.__name__}"
52
47
 
53
48
  def requires_backfilling(self) -> bool:
54
- """
55
- Check if this item requires backfilling.
56
- """
49
+ """Check if this item requires backfilling."""
57
50
  return self.backfilling_lookback is not None
58
51
 
59
52
 
@@ -84,7 +77,7 @@ class MemoryRegistry(BaseRegistry):
84
77
  "Registered function",
85
78
  function_name=item.function.__name__,
86
79
  executable_path=item.executable_path,
87
- args=item.args,
80
+ args_counter=len(item.args or []),
88
81
  backfilling_lookback=item.backfilling_lookback,
89
82
  )
90
83
 
@@ -94,12 +87,11 @@ class MemoryRegistry(BaseRegistry):
94
87
  def clear(self) -> None:
95
88
  self._functions = []
96
89
 
97
- def get_by_executable_path(self, executable_path: str) -> RegistryItem:
90
+ def get_by_executable_path(self, executable_path: str) -> RegistryItem | None:
98
91
  for registry_item in self.get_functions():
99
92
  if registry_item.executable_path == executable_path:
100
93
  return registry_item
101
- # TODO: Improve this
102
- raise Exception("Function Not Found")
94
+ return None
103
95
 
104
96
 
105
97
  task_registry = MemoryRegistry()
@@ -11,7 +11,7 @@ def ensure_modules_loaded() -> None:
11
11
 
12
12
  @block_task must be loaded, otherwise it won't be registered.
13
13
  """
14
- from django.apps import apps
14
+ from django.apps import apps # noqa: PLC0415
15
15
 
16
16
  for app_config in apps.get_app_configs():
17
17
  for module_suffix in ["tasks", "block_tasks"]:
@@ -10,7 +10,7 @@ class ConditionEvaluationError(AbstractBlockDumperError):
10
10
  pass
11
11
 
12
12
 
13
- class CeleryTaskLocked(Exception):
13
+ class CeleryTaskLockedError(AbstractBlockDumperError):
14
14
  """Celery task execution is locked"""
15
15
 
16
16
  pass
File without changes
@@ -1,12 +1,12 @@
1
1
  import structlog
2
2
  from django.db import transaction
3
3
 
4
- import abstract_block_dumper.dal.django_dal as abd_dal
5
- from abstract_block_dumper.dal.memory_registry import BaseRegistry, RegistryItem, task_registry
6
- from abstract_block_dumper.exceptions import ConditionEvaluationError
4
+ import abstract_block_dumper._internal.dal.django_dal as abd_dal
5
+ from abstract_block_dumper._internal.dal.memory_registry import BaseRegistry, RegistryItem, task_registry
6
+ from abstract_block_dumper._internal.exceptions import ConditionEvaluationError
7
+ from abstract_block_dumper._internal.services.executor import CeleryExecutor
8
+ from abstract_block_dumper._internal.services.utils import serialize_args
7
9
  from abstract_block_dumper.models import TaskAttempt
8
- from abstract_block_dumper.services.executor import CeleryExecutor
9
- from abstract_block_dumper.services.utils import serialize_args
10
10
 
11
11
  logger = structlog.get_logger(__name__)
12
12
 
@@ -100,22 +100,22 @@ class BlockProcessor:
100
100
  This handles tasks that may have been lost due to scheduler restarts.
101
101
  """
102
102
  retry_count = 0
103
- for task_attempt in abd_dal.get_ready_to_retry_attempts():
103
+ for retry_attempt in abd_dal.get_ready_to_retry_attempts():
104
104
  try:
105
105
  # Find the registry item to get celery_kwargs
106
- registry_item = self.registry.get_by_executable_path(task_attempt.executable_path)
106
+ registry_item = self.registry.get_by_executable_path(retry_attempt.executable_path)
107
107
  if not registry_item:
108
108
  logger.warning(
109
109
  "Registry item not found for failed task, skipping retry recovery",
110
- task_id=task_attempt.id,
111
- executable_path=task_attempt.executable_path,
110
+ task_id=retry_attempt.id,
111
+ executable_path=retry_attempt.executable_path,
112
112
  )
113
113
  continue
114
114
 
115
115
  # Use atomic transaction to prevent race conditions
116
116
  with transaction.atomic():
117
117
  # Re-fetch with select_for_update to prevent concurrent modifications
118
- task_attempt = TaskAttempt.objects.select_for_update(nowait=True).get(id=task_attempt.id)
118
+ task_attempt = TaskAttempt.objects.select_for_update(nowait=True).get(id=retry_attempt.id)
119
119
 
120
120
  # Verify task is still in FAILED state and ready for retry
121
121
  if task_attempt.status == TaskAttempt.Status.SUCCESS:
@@ -150,16 +150,16 @@ class BlockProcessor:
150
150
  except Exception:
151
151
  logger.error(
152
152
  "Failed to recover retry",
153
- task_id=task_attempt.id,
153
+ task_id=retry_attempt.id,
154
154
  exc_info=True,
155
155
  )
156
156
  # Reload task to see current state after potential execution failure
157
157
  try:
158
- task_attempt.refresh_from_db()
158
+ retry_attempt.refresh_from_db()
159
159
  # If task is still PENDING after error, revert to FAILED
160
160
  # (execution may have failed before celery task could mark it)
161
- if task_attempt.status == TaskAttempt.Status.PENDING:
162
- abd_dal.revert_to_failed(task_attempt)
161
+ if retry_attempt.status == TaskAttempt.Status.PENDING:
162
+ abd_dal.revert_to_failed(retry_attempt)
163
163
  except TaskAttempt.DoesNotExist:
164
164
  # Task was deleted during recovery, nothing to revert
165
165
  pass
@@ -170,6 +170,7 @@ class BlockProcessor:
170
170
  def _cleanup_phantom_tasks(self) -> None:
171
171
  """
172
172
  Clean up tasks marked as SUCCESS but never actually started.
173
+
173
174
  Only removes tasks that were created recently (within last hour) to avoid
174
175
  deleting legitimate tasks marked as success by external processes.
175
176
  """
@@ -2,8 +2,8 @@ from typing import Any
2
2
 
3
3
  import structlog
4
4
 
5
- import abstract_block_dumper.dal.django_dal as abd_dal
6
- from abstract_block_dumper.dal.memory_registry import RegistryItem
5
+ import abstract_block_dumper._internal.dal.django_dal as abd_dal
6
+ from abstract_block_dumper._internal.dal.memory_registry import RegistryItem
7
7
  from abstract_block_dumper.models import TaskAttempt
8
8
 
9
9
  logger = structlog.get_logger(__name__)
@@ -0,0 +1,161 @@
1
+ import time
2
+
3
+ import bittensor as bt
4
+ import structlog
5
+ from django.conf import settings
6
+
7
+ import abstract_block_dumper._internal.dal.django_dal as abd_dal
8
+ import abstract_block_dumper._internal.services.utils as abd_utils
9
+ from abstract_block_dumper._internal.services.block_processor import BlockProcessor, block_processor_factory
10
+
11
+ logger = structlog.get_logger(__name__)
12
+
13
+ # Blocks older than this threshold from current head require archive network
14
+ ARCHIVE_BLOCK_THRESHOLD = 300
15
+
16
+
17
+ class TaskScheduler:
18
+ def __init__(
19
+ self,
20
+ block_processor: BlockProcessor,
21
+ network: str,
22
+ poll_interval: int,
23
+ ) -> None:
24
+ self.block_processor = block_processor
25
+ self.network = network
26
+ self.poll_interval = poll_interval
27
+ self.last_processed_block = -1
28
+ self.is_running = False
29
+ self._subtensor: bt.Subtensor | None = None
30
+ self._archive_subtensor: bt.Subtensor | None = None
31
+ self._current_block_cache: int | None = None
32
+
33
+ @property
34
+ def subtensor(self) -> bt.Subtensor:
35
+ """Get the regular subtensor connection, creating it if needed."""
36
+ if self._subtensor is None:
37
+ self._subtensor = abd_utils.get_bittensor_client(self.network)
38
+ return self._subtensor
39
+
40
+ @subtensor.setter
41
+ def subtensor(self, value: bt.Subtensor | None) -> None:
42
+ """Set or reset the subtensor connection."""
43
+ self._subtensor = value
44
+
45
+ @property
46
+ def archive_subtensor(self) -> bt.Subtensor:
47
+ """Get the archive subtensor connection, creating it if needed."""
48
+ if self._archive_subtensor is None:
49
+ self._archive_subtensor = abd_utils.get_bittensor_client("archive")
50
+ return self._archive_subtensor
51
+
52
+ @archive_subtensor.setter
53
+ def archive_subtensor(self, value: bt.Subtensor | None) -> None:
54
+ """Set or reset the archive subtensor connection."""
55
+ self._archive_subtensor = value
56
+
57
+ def get_subtensor_for_block(self, block_number: int) -> bt.Subtensor:
58
+ """
59
+ Get the appropriate subtensor for the given block number.
60
+
61
+ Uses archive network for blocks older than ARCHIVE_BLOCK_THRESHOLD
62
+ from the current head.
63
+ """
64
+ if self._current_block_cache is None:
65
+ self._current_block_cache = self.subtensor.get_current_block()
66
+
67
+ blocks_behind = self._current_block_cache - block_number
68
+
69
+ if blocks_behind > ARCHIVE_BLOCK_THRESHOLD:
70
+ logger.debug(
71
+ "Using archive network for old block",
72
+ block_number=block_number,
73
+ blocks_behind=blocks_behind,
74
+ )
75
+ return self.archive_subtensor
76
+ return self.subtensor
77
+
78
+ def refresh_connections(self) -> None:
79
+ """Reset all subtensor connections to force re-establishment."""
80
+ self._subtensor = None
81
+ self._archive_subtensor = None
82
+ self._current_block_cache = None
83
+ logger.info("Subtensor connections reset")
84
+
85
+ def start(self) -> None:
86
+ self.is_running = True
87
+
88
+ self.initialize_last_block()
89
+
90
+ logger.info(
91
+ "TaskScheduler started",
92
+ last_processed_block=self.last_processed_block,
93
+ registry_functions=len(self.block_processor.registry.get_functions()),
94
+ )
95
+
96
+ while self.is_running:
97
+ try:
98
+ # Process lost retries first
99
+ self.block_processor.recover_failed_retries()
100
+
101
+ # Update current block cache for archive network decision
102
+ self._current_block_cache = self.subtensor.get_current_block()
103
+ current_block = self._current_block_cache
104
+
105
+ for block_number in range(self.last_processed_block + 1, current_block + 1):
106
+ self.block_processor.process_block(block_number)
107
+ time.sleep(self.poll_interval)
108
+ self.last_processed_block = block_number
109
+
110
+ except KeyboardInterrupt:
111
+ logger.info("TaskScheduler stopping due to KeyboardInterrupt.")
112
+ self.stop()
113
+ break
114
+ except Exception:
115
+ logger.error("Fatal scheduler error", exc_info=True)
116
+ # resume the loop even if task failed
117
+ time.sleep(self.poll_interval)
118
+
119
+ def stop(self) -> None:
120
+ self.is_running = False
121
+ logger.info("TaskScheduler stopped.")
122
+
123
+ def initialize_last_block(self) -> None:
124
+ # Safe getattr in case setting is not defined
125
+ start_from_block_setting = getattr(settings, "BLOCK_DUMPER_START_FROM_BLOCK", None)
126
+
127
+ if start_from_block_setting is not None:
128
+ if start_from_block_setting == "current":
129
+ self.last_processed_block = self.subtensor.get_current_block()
130
+ logger.info("Starting from current blockchain block", block_number=self.last_processed_block)
131
+
132
+ elif isinstance(start_from_block_setting, int):
133
+ self.last_processed_block = start_from_block_setting
134
+ logger.info("Starting from configured block", block_number=self.last_processed_block)
135
+ else:
136
+ error_msg = f"Invalid BLOCK_DUMPER_START_FROM_BLOCK value: {start_from_block_setting}"
137
+ raise ValueError(error_msg)
138
+ else:
139
+ # Default behavior - resume from database
140
+ last_block_number = abd_dal.get_the_latest_executed_block_number()
141
+
142
+ self.last_processed_block = last_block_number or self.subtensor.get_current_block()
143
+ logger.info(
144
+ "Resume from the last database block or start from the current block",
145
+ last_processed_block=self.last_processed_block,
146
+ )
147
+
148
+
149
+ def task_scheduler_factory(network: str = "finney") -> TaskScheduler:
150
+ """
151
+ Factory for TaskScheduler.
152
+
153
+ Args:
154
+ network (str): Bittensor network name. Defaults to "finney"
155
+
156
+ """
157
+ return TaskScheduler(
158
+ block_processor=block_processor_factory(),
159
+ network=network,
160
+ poll_interval=getattr(settings, "BLOCK_DUMPER_POLL_INTERVAL", 1),
161
+ )
@@ -1,6 +1,5 @@
1
1
  import json
2
2
  from collections.abc import Callable
3
- from functools import cache
4
3
 
5
4
  import bittensor as bt
6
5
  import structlog
@@ -10,24 +9,19 @@ from django.conf import settings
10
9
  logger = structlog.get_logger(__name__)
11
10
 
12
11
 
13
- @cache
14
- def get_bittensor_client() -> bt.Subtensor:
12
+ def get_bittensor_client(network: str = "finney") -> bt.Subtensor:
15
13
  """
16
14
  Get a cached bittensor client.
17
15
 
18
16
  The client is cached indefinitely since network configuration
19
17
  doesn't change during runtime.
20
18
  """
21
- DEFAULT_BITTENSOR_NETWORK = "finney"
22
- network = getattr(settings, "BITTENSOR_NETWORK", DEFAULT_BITTENSOR_NETWORK)
23
- logger.info(f"Creating new bittensor client for network: {network}")
19
+ logger.info("Creating new bittensor client for network", network=network)
24
20
  return bt.subtensor(network=network)
25
21
 
26
22
 
27
23
  def get_current_celery_task_id() -> str:
28
- """
29
- Get current celery task id
30
- """
24
+ """Get current celery task id."""
31
25
  try:
32
26
  celery_task_id = current_task.id
33
27
  except Exception:
@@ -36,9 +30,7 @@ def get_current_celery_task_id() -> str:
36
30
 
37
31
 
38
32
  def get_executable_path(func: Callable) -> str:
39
- """
40
- Get executable path for the callable `func`
41
- """
33
+ """Get executable path for the callable `func`."""
42
34
  return ".".join([func.__module__, func.__name__])
43
35
 
44
36
 
@@ -28,7 +28,7 @@ version_tuple: VERSION_TUPLE
28
28
  commit_id: COMMIT_ID
29
29
  __commit_id__: COMMIT_ID
30
30
 
31
- __version__ = version = '0.0.5'
32
- __version_tuple__ = version_tuple = (0, 0, 5)
31
+ __version__ = version = '0.0.7'
32
+ __version_tuple__ = version_tuple = (0, 0, 7)
33
33
 
34
34
  __commit_id__ = commit_id = None
File without changes
@@ -1,14 +1,17 @@
1
1
  from django.core.management.base import BaseCommand
2
2
 
3
- from abstract_block_dumper.dal.memory_registry import task_registry
4
- from abstract_block_dumper.discovery import ensure_modules_loaded
5
- from abstract_block_dumper.services.scheduler import task_scheduler_factory
3
+ from abstract_block_dumper._internal.dal.memory_registry import task_registry
4
+ from abstract_block_dumper._internal.discovery import ensure_modules_loaded
5
+ from abstract_block_dumper._internal.services.scheduler import task_scheduler_factory
6
6
 
7
7
 
8
8
  class Command(BaseCommand):
9
9
  help = "Run the block scheduler daemon."
10
10
 
11
11
  def handle(self, *args, **options) -> None:
12
+ """
13
+ Handle the management command to start the block scheduler.
14
+ """
12
15
  self.stdout.write("Syncing decorated functions...")
13
16
  ensure_modules_loaded()
14
17
  functions_counter = len(task_registry.get_functions())
@@ -3,7 +3,7 @@ from typing import Any
3
3
 
4
4
  from django.db import models
5
5
 
6
- import abstract_block_dumper.services.utils as abd_utils
6
+ import abstract_block_dumper._internal.services.utils as abd_utils
7
7
 
8
8
 
9
9
  class TaskAttempt(models.Model):
File without changes
@@ -0,0 +1,53 @@
1
+ """
2
+ Celery integration helpers for abstract-block-dumper.
3
+
4
+ This module provides utilities to integrate @block_task decorated functions
5
+ with Celery workers.
6
+ """
7
+
8
+ from abstract_block_dumper._internal.discovery import ensure_modules_loaded
9
+
10
+
11
+ def setup_celery_tasks() -> None:
12
+ """
13
+ Discover and register all @block_task decorated functions for Celery.
14
+
15
+ This function MUST be called when Celery workers start to ensure that
16
+ all @block_task decorated functions are registered and available to
17
+ receive tasks from the message broker.
18
+
19
+ Usage in your project's celery.py:
20
+
21
+ from celery import Celery
22
+ from celery.signals import worker_ready
23
+
24
+ app = Celery('your_project')
25
+ app.config_from_object('django.conf:settings', namespace='CELERY')
26
+ app.autodiscover_tasks()
27
+
28
+ @worker_ready.connect
29
+ def on_worker_ready(**kwargs):
30
+ '''Load block tasks when worker is ready.'''
31
+ from abstract_block_dumper.v1.celery import setup_celery_tasks
32
+ setup_celery_tasks()
33
+
34
+ Why is this needed?
35
+ -------------------
36
+ The @block_task decorator uses Celery's @shared_task, which requires
37
+ the decorated functions to be imported before workers can receive
38
+ messages for those tasks. Without calling this function, you'll see
39
+ errors like:
40
+
41
+ "Received unregistered task of type 'your_app.block_tasks.task_name'"
42
+
43
+ What does it do?
44
+ ----------------
45
+ - Automatically imports all 'tasks.py' and 'block_tasks.py' modules
46
+ from your INSTALLED_APPS
47
+ - Triggers @block_task decorator registration
48
+ - Makes tasks available to Celery workers
49
+ """
50
+ ensure_modules_loaded()
51
+
52
+
53
+ __all__ = ["setup_celery_tasks"]
@@ -5,10 +5,10 @@ import structlog
5
5
  from celery import Task, shared_task
6
6
  from django.db import OperationalError, transaction
7
7
 
8
- import abstract_block_dumper.dal.django_dal as abd_dal
9
- import abstract_block_dumper.services.utils as abd_utils
10
- from abstract_block_dumper.dal.memory_registry import RegistryItem, task_registry
11
- from abstract_block_dumper.exceptions import CeleryTaskLocked
8
+ import abstract_block_dumper._internal.dal.django_dal as abd_dal
9
+ import abstract_block_dumper._internal.services.utils as abd_utils
10
+ from abstract_block_dumper._internal.dal.memory_registry import RegistryItem, task_registry
11
+ from abstract_block_dumper._internal.exceptions import CeleryTaskLockedError
12
12
  from abstract_block_dumper.models import TaskAttempt
13
13
 
14
14
  logger = structlog.get_logger(__name__)
@@ -20,7 +20,6 @@ def schedule_retry(task_attempt: TaskAttempt) -> None:
20
20
 
21
21
  Task must already be in FAILED state with next_retry_at set by mark_failed()
22
22
  """
23
-
24
23
  if not task_attempt.next_retry_at:
25
24
  logger.error(
26
25
  "Cannot schedule retry without next_retry_at",
@@ -63,7 +62,9 @@ def schedule_retry(task_attempt: TaskAttempt) -> None:
63
62
  )
64
63
 
65
64
 
66
- def _celery_task_wrapper(func, block_number: int, **kwargs) -> dict[str, Any] | None:
65
+ def _celery_task_wrapper(
66
+ func: Callable[..., Any], block_number: int, **kwargs: dict[str, Any]
67
+ ) -> dict[str, Any] | None:
67
68
  executable_path = abd_utils.get_executable_path(func)
68
69
 
69
70
  with transaction.atomic():
@@ -73,21 +74,15 @@ def _celery_task_wrapper(func, block_number: int, **kwargs) -> dict[str, Any] |
73
74
  executable_path=executable_path,
74
75
  args_json=abd_utils.serialize_args(kwargs),
75
76
  )
76
- except TaskAttempt.DoesNotExist:
77
- logger.warning(
78
- "TaskAttempt not found - task may have been canceled directly",
79
- block_number=block_number,
80
- executable_path=executable_path,
81
- )
82
- raise CeleryTaskLocked("TaskAttempt not found - task may have been canceled directly")
77
+ except TaskAttempt.DoesNotExist as exc:
78
+ msg = "TaskAttempt not found - task may have been canceled directly"
79
+ logger.warning(msg, block_number=block_number, executable_path=executable_path)
80
+ raise CeleryTaskLockedError(msg) from exc
81
+
83
82
  except OperationalError as e:
84
- logger.info(
85
- "Task already being processed by another worker",
86
- block_number=block_number,
87
- executable_path=executable_path,
88
- operational_error=str(e),
89
- )
90
- raise CeleryTaskLocked("Task already being processed by another worker")
83
+ msg = "Task already being processed by another worker"
84
+ logger.info(msg, block_number=block_number, executable_path=executable_path, operational_error=str(e))
85
+ raise CeleryTaskLockedError(msg) from e
91
86
 
92
87
  if task_attempt.status != TaskAttempt.Status.PENDING:
93
88
  logger.info(
@@ -118,11 +113,11 @@ def _celery_task_wrapper(func, block_number: int, **kwargs) -> dict[str, Any] |
118
113
  logger.info("Task completed successfully", task_id=task_attempt.id)
119
114
  return {"result": result}
120
115
  except Exception as e:
121
- logger.error(
116
+ logger.exception(
122
117
  "Task execution failed",
123
118
  task_id=task_attempt.id,
124
119
  error_type=type(e).__name__,
125
- exc_info=True,
120
+ error_message=str(e),
126
121
  )
127
122
  abd_dal.task_mark_as_failed(task_attempt)
128
123
 
@@ -131,33 +126,47 @@ def _celery_task_wrapper(func, block_number: int, **kwargs) -> dict[str, Any] |
131
126
  try:
132
127
  schedule_retry(task_attempt)
133
128
  except Exception:
134
- logger.error(
129
+ logger.exception(
135
130
  "Failed to schedule retry",
136
131
  task_id=task_attempt.id,
137
- exc_info=True,
138
132
  )
139
133
  return None
140
134
 
141
135
 
142
136
  def block_task(
143
- condition: Callable[..., bool],
137
+ func: Callable[..., Any] | None = None,
138
+ *,
139
+ condition: Callable[..., bool] | None = None,
144
140
  args: list[dict[str, Any]] | None = None,
145
141
  backfilling_lookback: int | None = None,
146
142
  celery_kwargs: dict[str, Any] | None = None,
147
143
  ) -> Callable[..., Any]:
148
144
  """
149
- Decorator for registering block tasks.
145
+ Decorator to register a function as a block task.
146
+
147
+ Block task is a function that will be executed conditionally on each new block.
148
+ The condition is a callable that takes the block number and any additional arguments,
149
+ and returns a boolean indicating whether to execute the task.
150
150
 
151
151
  Args:
152
- condition: Lambda function that determines when to execute
152
+ func: The function to decorate (used when decorator is applied without parentheses)
153
+ condition: Lambda function that determines when to execute the task. It should accept
154
+ block_number and any additional args as parameters and return a boolean.
155
+ Defaults to always True (run on every block).
153
156
  args: List of argument dictionaries for multi-execution
154
157
  backfilling_lookback: Number of blocks to backfill
155
158
  celery_kwargs: Additional Celery task parameters
156
159
 
157
160
  Examples:
158
- @block_task(
159
- condition=lambda bn: bn % 100 == 0
160
- )
161
+ @block_task
162
+ def run_on_every_block(block_number: int):
163
+ pass
164
+
165
+ @block_task()
166
+ def also_runs_on_every_block(block_number: int):
167
+ pass
168
+
169
+ @block_task(condition=lambda bn: bn % 100 == 0)
161
170
  def simple_task(block_number: int):
162
171
  pass
163
172
 
@@ -171,36 +180,39 @@ def block_task(
171
180
  pass
172
181
 
173
182
  """
183
+ # Default condition: always run
184
+ effective_condition = condition if condition is not None else (lambda *_args, **_kwargs: True)
174
185
 
175
- def decorator(func: Callable[..., Any]) -> Any:
176
- if not callable(condition):
177
- raise ValueError("condition must be a callable.")
186
+ def decorator(fn: Callable[..., Any]) -> Any:
187
+ if not callable(effective_condition):
188
+ msg = "condition must be a callable."
189
+ raise TypeError(msg)
178
190
 
179
191
  # Celery task wrapper
180
- def shared_celery_task(block_number: int, **kwargs) -> None | Any:
192
+ def shared_celery_task(block_number: int, **kwargs: dict[str, Any]) -> None | Any:
181
193
  """
182
194
  Wrapper that handles TaskAttempt tracking and executed the original
183
195
  function
184
196
 
185
197
  This entire wrapper becomes a Celery task.
186
198
  """
187
- return _celery_task_wrapper(func, block_number, **kwargs)
199
+ return _celery_task_wrapper(fn, block_number, **kwargs)
188
200
 
189
201
  # Wrap with celery shared_task
190
202
  celery_task = shared_task(
191
- name=abd_utils.get_executable_path(func),
203
+ name=abd_utils.get_executable_path(fn),
192
204
  bind=False,
193
205
  **celery_kwargs or {},
194
206
  )(shared_celery_task)
195
207
 
196
208
  # Store original function referefence for introspection
197
- celery_task._original_func = func
209
+ celery_task._original_func = fn # noqa: SLF001
198
210
 
199
211
  # Register the Celery task
200
212
  task_registry.register_item(
201
213
  RegistryItem(
202
- condition=condition,
203
- function=cast(Task, celery_task),
214
+ condition=effective_condition,
215
+ function=cast("Task", celery_task),
204
216
  args=args,
205
217
  backfilling_lookback=backfilling_lookback,
206
218
  celery_kwargs=celery_kwargs or {},
@@ -208,4 +220,9 @@ def block_task(
208
220
  )
209
221
  return celery_task
210
222
 
223
+ # If func is provided, decorator was used without parentheses: @block_task
224
+ if func is not None:
225
+ return decorator(func)
226
+
227
+ # Otherwise, decorator was used with parentheses: @block_task() or @block_task(condition=...)
211
228
  return decorator
@@ -14,7 +14,7 @@ from django.utils import timezone
14
14
  from abstract_block_dumper.models import TaskAttempt
15
15
 
16
16
 
17
- @shared_task(name="abstract_block_dumper.cleanup_old_tasks")
17
+ @shared_task(name="abstract_block_dumper.v1.cleanup_old_tasks")
18
18
  def cleanup_old_tasks(days: int = 7) -> dict[str, int | str]:
19
19
  """
20
20
  Delete all succeeded or unrecoverable failed tasks older than the specified number of days.
@@ -47,12 +47,12 @@ def cleanup_old_tasks(days: int = 7) -> dict[str, int | str]:
47
47
 
48
48
  Example cron (daily at 2 AM):
49
49
  0 2 * * * python manage.py shell -c \
50
- "from abstract_block_dumper.tasks import cleanup_old_tasks; cleanup_old_tasks.delay()"
50
+ "from abstract_block_dumper.v1.tasks import cleanup_old_tasks; cleanup_old_tasks.delay()"
51
51
 
52
52
  Example Celery beat schedule (in settings.py):
53
53
  CELERY_BEAT_SCHEDULE = {
54
54
  'cleanup-old-tasks': {
55
- 'task': 'abstract_block_dumper.cleanup_old_tasks',
55
+ 'task': 'abstract_block_dumper.v1.cleanup_old_tasks',
56
56
  'schedule': crontab(hour=2, minute=0), # Daily at 2 AM
57
57
  'kwargs': {'days': 7},
58
58
  },
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: abstract-block-dumper
3
- Version: 0.0.5
3
+ Version: 0.0.7
4
4
  Project-URL: Source, https://github.com/bactensor/abstract-block-dumper
5
5
  Project-URL: Issue Tracker, https://github.com/bactensor/abstract-block-dumper/issues
6
6
  Author-email: Reef Technologies <opensource@reef.pl>
@@ -15,7 +15,7 @@ Classifier: Programming Language :: Python :: 3.12
15
15
  Classifier: Topic :: Software Development :: Libraries
16
16
  Requires-Python: >=3.11
17
17
  Requires-Dist: bittensor>=9.10.1
18
- Requires-Dist: celery>=5.5.3
18
+ Requires-Dist: celery>=5.3
19
19
  Requires-Dist: django<6.0,>=3.2
20
20
  Requires-Dist: structlog>=25.4.0
21
21
  Description-Content-Type: text/markdown
@@ -26,6 +26,22 @@ Description-Content-Type: text/markdown
26
26
  This package provides a simplified framework for creating block processing tasks in Django applications.
27
27
  Define tasks with lambda conditions using the @block_task decorator and run them asynchronously with Celery.
28
28
 
29
+ ## Usage
30
+
31
+ > [!IMPORTANT]
32
+ > This package uses [ApiVer](#versioning), make sure to import `abstract_block_dumper.v1`.
33
+
34
+
35
+ ## Versioning
36
+
37
+ This package uses [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
38
+ TL;DR you are safe to use [compatible release version specifier](https://packaging.python.org/en/latest/specifications/version-specifiers/#compatible-release) `~=MAJOR.MINOR` in your `pyproject.toml` or `requirements.txt`.
39
+
40
+ Additionally, this package uses [ApiVer](https://www.youtube.com/watch?v=FgcoAKchPjk) to further reduce the risk of breaking changes.
41
+ This means, the public API of this package is explicitly versioned, e.g. `abstract_block_dumper.v1`, and will not change in a backwards-incompatible way even when `abstract_block_dumper.v2` is released.
42
+
43
+ Internal packages, i.e. prefixed by `abstract_block_dumper._` do not share these guarantees and may change in a backwards-incompatible way at any time even in patch releases.
44
+
29
45
  ## Implementation Details
30
46
 
31
47
  ### General Workflow:
@@ -82,6 +98,31 @@ INSTALLED_APPS = [
82
98
  python manage.py migrate
83
99
  ```
84
100
 
101
+ 4. **Configure Celery to discover block tasks:**
102
+
103
+ In your project's `celery.py` file, add the following to ensure Celery workers can discover your `@block_task` decorated functions:
104
+
105
+ ```python
106
+ from celery import Celery
107
+ from celery.signals import celeryd_init
108
+ from django.conf import settings
109
+
110
+ app = Celery('your_project')
111
+ app.config_from_object('django.conf:settings', namespace='CELERY')
112
+ app.autodiscover_tasks()
113
+
114
+
115
+
116
+ @celeryd_init.connect
117
+ def on_worker_init(**kwargs) -> None:
118
+ """Load block tasks when worker initializes."""
119
+ from abstract_block_dumper.v1.celery import setup_celery_tasks
120
+ setup_celery_tasks()
121
+
122
+ ```
123
+
124
+ > **Important:** Without this step, Celery workers will not recognize your `@block_task` decorated functions, and you'll see "Received unregistered task" errors.
125
+
85
126
  ## Usage
86
127
 
87
128
  ### 1. Define Block Processing Tasks
@@ -93,7 +134,7 @@ Create block processing tasks in `tasks.py` or `block_tasks.py` file inside any
93
134
  ### 3. Start the Block Scheduler
94
135
  Run the scheduler to start processing blocks:
95
136
  ```bash
96
- $ python manage.py block_tasks
137
+ $ python manage.py block_tasks_v1
97
138
  ```
98
139
 
99
140
  This command will:
@@ -112,11 +153,11 @@ See examples below:
112
153
  Use the `@block_task` decorator with lambda conditions to create block processing tasks:
113
154
 
114
155
  ```python
115
- from abstract_block_dumper.decorators import block_task
156
+ from abstract_block_dumper.v1.decorators import block_task
116
157
 
117
158
 
118
159
  # Process every block
119
- @block_task(condition=lambda bn: True)
160
+ @block_task
120
161
  def process_every_block(block_number: int):
121
162
  print(f"Processing every block: {block_number}")
122
163
 
@@ -144,7 +185,7 @@ def process_multi_netuid_task(block_number: int, netuid: int):
144
185
  The framework provides a maintenance task to clean up old task records and maintain database performance:
145
186
 
146
187
  ```python
147
- from abstract_block_dumper.tasks import cleanup_old_tasks
188
+ from abstract_block_dumper.v1.tasks import cleanup_old_tasks
148
189
 
149
190
  # Delete tasks older than 7 days (default)
150
191
  cleanup_old_tasks.delay()
@@ -160,13 +201,13 @@ This task deletes all succeeded or unrecoverable failed tasks older than the spe
160
201
  **Option 1: Manual Execution**
161
202
  ```bash
162
203
  # Using Django shell
163
- python manage.py shell -c "from abstract_block_dumper.tasks import cleanup_old_tasks; cleanup_old_tasks.delay()"
204
+ python manage.py shell -c "from abstract_block_dumper.v1.tasks import cleanup_old_tasks; cleanup_old_tasks.delay()"
164
205
  ```
165
206
 
166
207
  **Option 2: Cron Job (Recommended - once per day)**
167
208
  ```bash
168
209
  # Add to crontab (daily at 2 AM)
169
- 0 2 * * * cd /path/to/your/project && python manage.py shell -c "from abstract_block_dumper.tasks import cleanup_old_tasks; cleanup_old_tasks.delay()"
210
+ 0 2 * * * cd /path/to/your/project && python manage.py shell -c "from abstract_block_dumper.v1.tasks import cleanup_old_tasks; cleanup_old_tasks.delay()"
170
211
  ```
171
212
 
172
213
  **Option 3: Celery Beat (Automated Scheduling)**
@@ -210,55 +251,92 @@ BLOCK_DUMPER_MAX_ATTEMPTS = 3 # maximum retry attempts
210
251
  BLOCK_TASK_MAX_RETRY_DELAY_MINUTES = 1440 # maximum retry delay (24 hours)
211
252
  ```
212
253
 
213
- ### Configuration Options Reference
214
-
215
- #### Core Settings
216
-
217
- **BITTENSOR_NETWORK** (str, default: `'finney'`) Specifies which [Bittensor network](https://docs.learnbittensor.org/concepts/bittensor-networks) to connect to
218
-
219
- **BLOCK_DUMPER_START_FROM_BLOCK** (str|int|None, default: `None`)
220
- - **Purpose**: Determines the starting block for processing when the scheduler first runs
221
- - **Valid Values**:
222
- - `None`: Resume from the last processed block stored in database
223
- - `'current'`: Start from the current blockchain block (skips historical blocks)
224
- - `int`: Start from a specific block number (e.g., `1000000`)
225
- - **Example**: `BLOCK_DUMPER_START_FROM_BLOCK = 'current'`
226
- - **Performance Impact**: Starting from historical blocks may require significant processing time
227
-
228
- #### Scheduler Settings
229
-
230
- **BLOCK_DUMPER_POLL_INTERVAL** (int, default: `1`)
231
- - **Purpose**: Seconds to wait between checking for new blocks
232
- - **Valid Range**: `1` to `3600` (1 second to 1 hour)
233
- - **Example**: `BLOCK_DUMPER_POLL_INTERVAL = 5`
234
- - **Performance Impact**:
235
- - Lower values (1-2s): Near real-time processing, higher CPU/network usage
236
- - Higher values (10-60s): Reduced load but delayed processing
237
- - Very low values (<1s) may cause rate limiting
238
-
239
- #### Retry and Error Handling Settings
240
-
241
- **BLOCK_DUMPER_MAX_ATTEMPTS** (int, default: `3`)
242
- - **Purpose**: Maximum number of attempts to retry a failed task before giving up
243
- - **Valid Range**: `1` to `10`
244
- - **Example**: `BLOCK_DUMPER_MAX_ATTEMPTS = 5`
245
- - **Performance Impact**: Higher values increase resilience but may delay failure detection
246
-
247
- **BLOCK_TASK_RETRY_BACKOFF** (int, default: `1`)
248
- - **Purpose**: Base number of minutes for exponential backoff retry delays
249
- - **Valid Range**: `1` to `60`
250
- - **Example**: `BLOCK_TASK_RETRY_BACKOFF = 2`
251
- - **Calculation**: Actual delay = `backoff ** attempt_count` minutes
254
+ ## Configuration Options Reference
255
+
256
+ ### `BITTENSOR_NETWORK`
257
+ - **Type:** `str`
258
+ - **Default:** `'finney'`
259
+ - **Description:** Specifies which [Bittensor network](https://docs.learnbittensor.org/concepts/bittensor-networks) to connect to
260
+
261
+ ---
262
+
263
+ ### `BLOCK_DUMPER_START_FROM_BLOCK`
264
+ - **Type:** `str | int | None`
265
+ - **Default:** `None`
266
+ - **Valid Range:** `None`, `'current'`, or any positive integer
267
+ - **Description:** Determines the starting block for processing when the scheduler first runs
268
+ - `None` → Resume from the last processed block stored in database
269
+ - `'current'` → Start from the current blockchain block (skips historical blocks)
270
+ - Integer → Start from a specific block number (e.g., `1000000`)
271
+
272
+ ```python
273
+ BLOCK_DUMPER_START_FROM_BLOCK = 'current'
274
+ ```
275
+
276
+ > **Performance Impact:** Starting from historical blocks may require significant processing time
277
+
278
+ ---
279
+
280
+ ### `BLOCK_DUMPER_POLL_INTERVAL`
281
+ - **Type:** `int`
282
+ - **Default:** `1`
283
+ - **Valid Range:** `1` to `3600` (seconds)
284
+ - **Description:** Seconds to wait between checking for new blocks
285
+
286
+ ```python
287
+ BLOCK_DUMPER_POLL_INTERVAL = 5
288
+ ```
289
+
290
+ > **Performance Impact:**
291
+ > - Lower values (1-2s): Near real-time processing, higher CPU/network usage
292
+ > - Higher values (10-60s): Reduced load but delayed processing
293
+ > - Very low values (<1s): May cause rate limiting
294
+
295
+ ---
296
+
297
+ ### `BLOCK_DUMPER_MAX_ATTEMPTS`
298
+ - **Type:** `int`
299
+ - **Default:** `3`
300
+ - **Valid Range:** `1` to `10`
301
+ - **Description:** Maximum number of attempts to retry a failed task before giving up
302
+
303
+ ```python
304
+ BLOCK_DUMPER_MAX_ATTEMPTS = 5
305
+ ```
306
+
307
+ > **Performance Impact:** Higher values increase resilience but may delay failure detection
308
+
309
+ ---
310
+
311
+ ### `BLOCK_TASK_RETRY_BACKOFF`
312
+ - **Type:** `int`
313
+ - **Default:** `1`
314
+ - **Valid Range:** `1` to `60` (minutes)
315
+ - **Description:** Base number of minutes for exponential backoff retry delays
316
+ - **Calculation:** Actual delay = `backoff ** attempt_count` minutes
252
317
  - Attempt 1: 2¹ = 2 minutes
253
- - Attempt 2: 2² = 4 minutes
318
+ - Attempt 2: 2² = 4 minutes
254
319
  - Attempt 3: 2³ = 8 minutes
255
- - **Performance Impact**: Lower values retry faster but may overwhelm failing services
256
320
 
257
- **BLOCK_TASK_MAX_RETRY_DELAY_MINUTES** (int, default: `1440`)
258
- - **Purpose**: Maximum delay (in minutes) between retry attempts, caps exponential backoff
259
- - **Valid Range**: `1` to `10080` (1 minute to 1 week)
260
- - **Example**: `BLOCK_TASK_MAX_RETRY_DELAY_MINUTES = 720` # 12 hours max
261
- - **Performance Impact**: Prevents extremely long delays while maintaining backoff benefits
321
+ ```python
322
+ BLOCK_TASK_RETRY_BACKOFF = 2
323
+ ```
324
+
325
+ > **Performance Impact:** Lower values retry faster but may overwhelm failing services
326
+
327
+ ---
328
+
329
+ ### `BLOCK_TASK_MAX_RETRY_DELAY_MINUTES`
330
+ - **Type:** `int`
331
+ - **Default:** `1440` (24 hours)
332
+ - **Valid Range:** `1` to `10080` (1 minute to 1 week)
333
+ - **Description:** Maximum delay (in minutes) between retry attempts, caps exponential backoff
334
+
335
+ ```python
336
+ BLOCK_TASK_MAX_RETRY_DELAY_MINUTES = 720 # 12 hours max
337
+ ```
338
+
339
+ > **Performance Impact:** Prevents extremely long delays while maintaining backoff benefits
262
340
 
263
341
 
264
342
  ## Example Project
@@ -0,0 +1,29 @@
1
+ abstract_block_dumper/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
+ abstract_block_dumper/_version.py,sha256=AV58KqMkBGaCvmPdbd3g9huyNXfIVxjw8QbCMdaeivU,704
3
+ abstract_block_dumper/admin.py,sha256=3J3I_QOKFgfMNpTXW-rTQGO_q5Ls6uNuL0FkPVdIsYg,1654
4
+ abstract_block_dumper/apps.py,sha256=DXATdrjsL3T2IletTbKeD6unr8ScLaxg7wz0nAHTAns,215
5
+ abstract_block_dumper/models.py,sha256=MO9824dmHB6xF3PrFE_RERh7whVjQtS4tt6QA0wSbg0,2022
6
+ abstract_block_dumper/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
7
+ abstract_block_dumper/_internal/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
8
+ abstract_block_dumper/_internal/discovery.py,sha256=sISOL8vq6rC0pOndrCfWKDZjyYwzzZIChG-BH9mteq0,745
9
+ abstract_block_dumper/_internal/exceptions.py,sha256=jVXQ8b3gneno2XYvO0XisJPMlkAWb6H5u10egIpPJ4k,335
10
+ abstract_block_dumper/_internal/dal/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
11
+ abstract_block_dumper/_internal/dal/django_dal.py,sha256=pBGEFeo_U0ac2Za-dwzJvf04Ng8lP51aR60c_DUrGIw,5426
12
+ abstract_block_dumper/_internal/dal/memory_registry.py,sha256=yMNF7jrvWGF-S1pqyR2zOCNLWwrdsImcvV6cGqu1wYE,2972
13
+ abstract_block_dumper/_internal/services/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
14
+ abstract_block_dumper/_internal/services/block_processor.py,sha256=wB-zeft3Ys8zmqCdF_v12rXd6umNWvGfy2Ts6XSGkL8,8132
15
+ abstract_block_dumper/_internal/services/executor.py,sha256=ZZmQ9TzoNEoAE4amiU8lHRsTfP7YusUkWXasrArfo2g,1806
16
+ abstract_block_dumper/_internal/services/scheduler.py,sha256=lhkyJ6wXGVtFAijs2Edz4ZVXAT9RP6GAY_Dh_Yg-wd4,6113
17
+ abstract_block_dumper/_internal/services/utils.py,sha256=QSs2hBHWOPgNgKPf_ZmADXuqEiqK5mWZp7JblvQgxZQ,1140
18
+ abstract_block_dumper/management/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
19
+ abstract_block_dumper/management/commands/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
20
+ abstract_block_dumper/management/commands/block_tasks_v1.py,sha256=jSi04ahIKYwlm_dNKCUGL_cmALv1iP-ZjfXrmz0pn-4,880
21
+ abstract_block_dumper/migrations/0001_initial.py,sha256=ImPHC3G6kPkq4Xn_4YVAm4Labh1Xi7PkCRszYRGpTiI,2298
22
+ abstract_block_dumper/migrations/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
23
+ abstract_block_dumper/v1/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
24
+ abstract_block_dumper/v1/celery.py,sha256=X4IqVs5i6ZpyY7fy1SqMZgsZy4SXP-jK2qG-FYnjU38,1722
25
+ abstract_block_dumper/v1/decorators.py,sha256=SBl8XP9qhKyTdsKaRREW870BZGidEe0C_nmxnwh76lo,8156
26
+ abstract_block_dumper/v1/tasks.py,sha256=u9iMYdDUqzYT3yPrNwZecHnlweZ3yFipV9BcIWHCbus,2647
27
+ abstract_block_dumper-0.0.7.dist-info/METADATA,sha256=bg7lku8X3hdZI9DBoi_IfVHmP_pAuCKNWHjWenZyI2Q,12902
28
+ abstract_block_dumper-0.0.7.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
29
+ abstract_block_dumper-0.0.7.dist-info/RECORD,,
@@ -1,4 +1,4 @@
1
1
  Wheel-Version: 1.0
2
- Generator: hatchling 1.27.0
2
+ Generator: hatchling 1.28.0
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
@@ -1,92 +0,0 @@
1
- import time
2
-
3
- import bittensor as bt
4
- import structlog
5
- from django.conf import settings
6
-
7
- import abstract_block_dumper.dal.django_dal as abd_dal
8
- import abstract_block_dumper.services.utils as abd_utils
9
- from abstract_block_dumper.services.block_processor import BlockProcessor, block_processor_factory
10
-
11
- logger = structlog.get_logger(__name__)
12
-
13
-
14
- class TaskScheduler:
15
- def __init__(
16
- self,
17
- block_processor: BlockProcessor,
18
- subtensor: bt.Subtensor,
19
- poll_interval: int,
20
- ) -> None:
21
- self.block_processor = block_processor
22
- self.subtensor = subtensor
23
- self.poll_interval = poll_interval
24
- self.last_processed_block = -1
25
- self.is_running = False
26
-
27
- def start(self) -> None:
28
- self.is_running = True
29
-
30
- self.initialize_last_block()
31
-
32
- logger.info(
33
- "TaskScheduler started",
34
- last_processed_block=self.last_processed_block,
35
- registry_functions=len(self.block_processor.registry.get_functions()),
36
- )
37
-
38
- while self.is_running:
39
- try:
40
- # Process lost retries first
41
- self.block_processor.recover_failed_retries()
42
-
43
- current_block = self.subtensor.get_current_block()
44
-
45
- for block_number in range(self.last_processed_block + 1, current_block + 1):
46
- self.block_processor.process_block(block_number)
47
- self.last_processed_block = block_number
48
-
49
- time.sleep(self.poll_interval)
50
- except KeyboardInterrupt:
51
- logger.info("TaskScheduler stopping due to KeyboardInterrupt.")
52
- self.stop()
53
- break
54
- except Exception:
55
- logger.error("Fatal scheduler error", exc_info=True)
56
- # resume the loop even if task failed
57
- time.sleep(self.poll_interval)
58
-
59
- def stop(self) -> None:
60
- self.is_running = False
61
- logger.info("TaskScheduler stopped.")
62
-
63
- def initialize_last_block(self) -> None:
64
- start_from_block_setting = getattr(settings, "BLOCK_DUMPER_START_FROM_BLOCK")
65
-
66
- if start_from_block_setting is not None:
67
- if start_from_block_setting == "current":
68
- self.last_processed_block = self.subtensor.get_current_block()
69
- logger.info(f"Starting from current blockchain block {self.last_processed_block}")
70
-
71
- elif isinstance(start_from_block_setting, int):
72
- self.last_processed_block = start_from_block_setting
73
- logger.info(f"Starting from configured block {self.last_processed_block}")
74
- else:
75
- raise ValueError(f"Invalid BLOCK_DUMPER_START_FROM_BLOCK value: {start_from_block_setting}")
76
- else:
77
- # Default behavior - resume from database
78
- last_block_number = abd_dal.get_the_latest_executed_block_number()
79
-
80
- self.last_processed_block = last_block_number or self.subtensor.get_current_block()
81
- logger.info(
82
- "Resume from the last database block or start from the current block",
83
- last_processed_block=self.last_processed_block,
84
- )
85
-
86
-
87
- def task_scheduler_factory() -> TaskScheduler:
88
- return TaskScheduler(
89
- block_processor=block_processor_factory(),
90
- subtensor=abd_utils.get_bittensor_client(),
91
- poll_interval=getattr(settings, "BLOCK_DUMPER_POLL_INTERVAL", 1),
92
- )
@@ -1,25 +0,0 @@
1
- abstract_block_dumper/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
- abstract_block_dumper/_version.py,sha256=YRV1ohn6CdKEhsUOmFFMmr5UTjMv4Ydw3WJGxF2BHBs,704
3
- abstract_block_dumper/admin.py,sha256=3J3I_QOKFgfMNpTXW-rTQGO_q5Ls6uNuL0FkPVdIsYg,1654
4
- abstract_block_dumper/apps.py,sha256=DXATdrjsL3T2IletTbKeD6unr8ScLaxg7wz0nAHTAns,215
5
- abstract_block_dumper/decorators.py,sha256=lV1ueIlEbBNojnXVH5GQiRCbck3-SQgtWOil5OqeTHo,7061
6
- abstract_block_dumper/discovery.py,sha256=kZlb8y-0ltJE-L-1GLxZ_xlziibY8AjggvHJ9sxsScw,728
7
- abstract_block_dumper/exceptions.py,sha256=EunFH-H5eXNNkKl2CvHlhZ2wvtdry969Gle-CZc7YM0,315
8
- abstract_block_dumper/models.py,sha256=l229tar4FdQ52eETLKGeskgkXHWa4ealF6DWbG8M4Mc,2012
9
- abstract_block_dumper/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
10
- abstract_block_dumper/tasks.py,sha256=8ppGWxML3krVdrS_08WnKuCpERRhB_6DIyVEkpYZMrw,2638
11
- abstract_block_dumper/dal/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
12
- abstract_block_dumper/dal/django_dal.py,sha256=unAA4Mt5dBBaUhvyezfyC0VtWMD6Ru79NyjKaOMNNSw,5359
13
- abstract_block_dumper/dal/memory_registry.py,sha256=rgU2CYGm2MHPgSZefgr-kuLxOtPu5wxINa3Y5ELgMUo,3029
14
- abstract_block_dumper/management/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
15
- abstract_block_dumper/management/commands/block_tasks.py,sha256=dEfFnoZCIIDsrNL5vRPtIDrkpcJk36yev_aoGAScgoQ,758
16
- abstract_block_dumper/migrations/0001_initial.py,sha256=ImPHC3G6kPkq4Xn_4YVAm4Labh1Xi7PkCRszYRGpTiI,2298
17
- abstract_block_dumper/migrations/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
18
- abstract_block_dumper/services/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
19
- abstract_block_dumper/services/block_processor.py,sha256=4guYwtVYh-N1UewiqVN7xG5pM56adyGe8vPG_kCDmQI,8072
20
- abstract_block_dumper/services/executor.py,sha256=TDbrtVGiz7GNGJwHYB6ZqqhrrTDDL7JGzxOehpF-QTY,1786
21
- abstract_block_dumper/services/scheduler.py,sha256=zKY24zSwjcQSVk3wt39GBurSNXkfylWsdV7Mgmv1RO8,3443
22
- abstract_block_dumper/services/utils.py,sha256=Iqa-9xhNxOCnvSWjGBclOUvmO4qsUhhievUllVh82I4,1286
23
- abstract_block_dumper-0.0.5.dist-info/METADATA,sha256=ArtUpkxntGZ94Nbetr1tztCVywdeEDbhj4AcL7Qseg0,11022
24
- abstract_block_dumper-0.0.5.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
25
- abstract_block_dumper-0.0.5.dist-info/RECORD,,
File without changes