abstract-block-dumper 0.0.1__py3-none-any.whl → 0.0.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (23) hide show
  1. abstract_block_dumper/{dal → _internal/dal}/django_dal.py +4 -3
  2. abstract_block_dumper/{dal → _internal/dal}/memory_registry.py +12 -20
  3. abstract_block_dumper/{discovery.py → _internal/discovery.py} +1 -1
  4. abstract_block_dumper/{exceptions.py → _internal/exceptions.py} +1 -1
  5. abstract_block_dumper/_internal/services/__init__.py +0 -0
  6. abstract_block_dumper/{services → _internal/services}/block_processor.py +15 -14
  7. abstract_block_dumper/{services → _internal/services}/executor.py +2 -2
  8. abstract_block_dumper/{services → _internal/services}/scheduler.py +9 -7
  9. abstract_block_dumper/{services → _internal/services}/utils.py +3 -7
  10. abstract_block_dumper/_version.py +2 -2
  11. abstract_block_dumper/management/commands/__init__.py +0 -0
  12. abstract_block_dumper/management/commands/{block_tasks.py → block_tasks_v1.py} +6 -3
  13. abstract_block_dumper/models.py +1 -1
  14. abstract_block_dumper/v1/__init__.py +0 -0
  15. abstract_block_dumper/v1/celery.py +53 -0
  16. abstract_block_dumper/{decorators.py → v1/decorators.py} +24 -29
  17. abstract_block_dumper/{tasks.py → v1/tasks.py} +3 -3
  18. {abstract_block_dumper-0.0.1.dist-info → abstract_block_dumper-0.0.4.dist-info}/METADATA +127 -52
  19. abstract_block_dumper-0.0.4.dist-info/RECORD +29 -0
  20. abstract_block_dumper-0.0.1.dist-info/RECORD +0 -25
  21. /abstract_block_dumper/{dal → _internal}/__init__.py +0 -0
  22. /abstract_block_dumper/{services → _internal/dal}/__init__.py +0 -0
  23. {abstract_block_dumper-0.0.1.dist-info → abstract_block_dumper-0.0.4.dist-info}/WHEEL +0 -0
@@ -6,8 +6,8 @@ from django.db import transaction
6
6
  from django.db.models.query import QuerySet
7
7
  from django.utils import timezone
8
8
 
9
+ import abstract_block_dumper._internal.services.utils as abd_utils
9
10
  import abstract_block_dumper.models as abd_models
10
- import abstract_block_dumper.services.utils as abd_utils
11
11
 
12
12
 
13
13
  def get_ready_to_retry_attempts() -> QuerySet[abd_models.TaskAttempt]:
@@ -76,7 +76,7 @@ def task_mark_as_success(task: abd_models.TaskAttempt, result_data: dict) -> Non
76
76
  task.save()
77
77
 
78
78
 
79
- def task_mark_as_failed(task) -> None:
79
+ def task_mark_as_failed(task: abd_models.TaskAttempt) -> None:
80
80
  DEFAULT_BLOCK_TASK_RETRY_BACKOFF = 1
81
81
  MAX_RETRY_DELAY_MINUTES = 1440 # 24 hours max delay
82
82
 
@@ -98,7 +98,7 @@ def task_mark_as_failed(task) -> None:
98
98
  task.save()
99
99
 
100
100
 
101
- def task_schedule_to_retry(task):
101
+ def task_schedule_to_retry(task: abd_models.TaskAttempt) -> None:
102
102
  task.status = abd_models.TaskAttempt.Status.PENDING
103
103
  task.save()
104
104
 
@@ -110,6 +110,7 @@ def task_create_or_get_pending(
110
110
  ) -> tuple[abd_models.TaskAttempt, bool]:
111
111
  """
112
112
  Create or get a pending task attempt.
113
+
113
114
  Returns (task, created) where created indicates if a new task was created.
114
115
 
115
116
  For failed tasks that can retry:
@@ -6,7 +6,7 @@ from typing import Any
6
6
  import structlog
7
7
  from celery import Task
8
8
 
9
- from abstract_block_dumper.exceptions import ConditionEvaluationError
9
+ from abstract_block_dumper._internal.exceptions import ConditionEvaluationError
10
10
 
11
11
  logger = structlog.getLogger(__name__)
12
12
 
@@ -19,10 +19,8 @@ class RegistryItem:
19
19
  backfilling_lookback: int | None = None
20
20
  celery_kwargs: dict[str, Any] = field(default_factory=dict)
21
21
 
22
- def match_condition(self, block_number: int, **kwargs) -> bool:
23
- """
24
- Check if condition matches for given block and arguments
25
- """
22
+ def match_condition(self, block_number: int, **kwargs: dict[str, Any]) -> bool:
23
+ """Check if condition matches for given block and arguments."""
26
24
  try:
27
25
  return self.condition(block_number, **kwargs)
28
26
  except Exception as e:
@@ -32,28 +30,23 @@ class RegistryItem:
32
30
  block_number=block_number,
33
31
  exc_info=True,
34
32
  )
35
- raise ConditionEvaluationError(f"Failed to evaluate condition: {e}") from e
33
+ msg = f"Failed to evaluate condition: {e}"
34
+ raise ConditionEvaluationError(msg) from e
36
35
 
37
36
  def get_execution_args(self) -> list[dict[str, Any]]:
38
- """
39
- Get list of argument sets for execution
40
- """
37
+ """Get list of argument sets for execution."""
41
38
  return self.args or [{}]
42
39
 
43
40
  @property
44
41
  def executable_path(self) -> str:
45
- """
46
- Get the importable path to the function.
47
- """
42
+ """Get the importable path to the function."""
48
43
  if hasattr(self.function, "name") and self.function.name is not None:
49
44
  return self.function.name
50
45
 
51
- return ".".join([self.function.__module__, self.function.__name__])
46
+ return f"{self.function.__module__}.{self.function.__name__}"
52
47
 
53
48
  def requires_backfilling(self) -> bool:
54
- """
55
- Check if this item requires backfilling.
56
- """
49
+ """Check if this item requires backfilling."""
57
50
  return self.backfilling_lookback is not None
58
51
 
59
52
 
@@ -84,7 +77,7 @@ class MemoryRegistry(BaseRegistry):
84
77
  "Registered function",
85
78
  function_name=item.function.__name__,
86
79
  executable_path=item.executable_path,
87
- args=item.args,
80
+ args_counter=len(item.args or []),
88
81
  backfilling_lookback=item.backfilling_lookback,
89
82
  )
90
83
 
@@ -94,12 +87,11 @@ class MemoryRegistry(BaseRegistry):
94
87
  def clear(self) -> None:
95
88
  self._functions = []
96
89
 
97
- def get_by_executable_path(self, executable_path: str) -> RegistryItem:
90
+ def get_by_executable_path(self, executable_path: str) -> RegistryItem | None:
98
91
  for registry_item in self.get_functions():
99
92
  if registry_item.executable_path == executable_path:
100
93
  return registry_item
101
- # TODO: Improve this
102
- raise Exception("Function Not Found")
94
+ return None
103
95
 
104
96
 
105
97
  task_registry = MemoryRegistry()
@@ -11,7 +11,7 @@ def ensure_modules_loaded() -> None:
11
11
 
12
12
  @block_task must be loaded, otherwise it won't be registered.
13
13
  """
14
- from django.apps import apps
14
+ from django.apps import apps # noqa: PLC0415
15
15
 
16
16
  for app_config in apps.get_app_configs():
17
17
  for module_suffix in ["tasks", "block_tasks"]:
@@ -10,7 +10,7 @@ class ConditionEvaluationError(AbstractBlockDumperError):
10
10
  pass
11
11
 
12
12
 
13
- class CeleryTaskLocked(Exception):
13
+ class CeleryTaskLockedError(AbstractBlockDumperError):
14
14
  """Celery task execution is locked"""
15
15
 
16
16
  pass
File without changes
@@ -1,12 +1,12 @@
1
1
  import structlog
2
2
  from django.db import transaction
3
3
 
4
- import abstract_block_dumper.dal.django_dal as abd_dal
5
- from abstract_block_dumper.dal.memory_registry import BaseRegistry, RegistryItem, task_registry
6
- from abstract_block_dumper.exceptions import ConditionEvaluationError
4
+ import abstract_block_dumper._internal.dal.django_dal as abd_dal
5
+ from abstract_block_dumper._internal.dal.memory_registry import BaseRegistry, RegistryItem, task_registry
6
+ from abstract_block_dumper._internal.exceptions import ConditionEvaluationError
7
+ from abstract_block_dumper._internal.services.executor import CeleryExecutor
8
+ from abstract_block_dumper._internal.services.utils import serialize_args
7
9
  from abstract_block_dumper.models import TaskAttempt
8
- from abstract_block_dumper.services.executor import CeleryExecutor
9
- from abstract_block_dumper.services.utils import serialize_args
10
10
 
11
11
  logger = structlog.get_logger(__name__)
12
12
 
@@ -100,22 +100,22 @@ class BlockProcessor:
100
100
  This handles tasks that may have been lost due to scheduler restarts.
101
101
  """
102
102
  retry_count = 0
103
- for task_attempt in abd_dal.get_ready_to_retry_attempts():
103
+ for retry_attempt in abd_dal.get_ready_to_retry_attempts():
104
104
  try:
105
105
  # Find the registry item to get celery_kwargs
106
- registry_item = self.registry.get_by_executable_path(task_attempt.executable_path)
106
+ registry_item = self.registry.get_by_executable_path(retry_attempt.executable_path)
107
107
  if not registry_item:
108
108
  logger.warning(
109
109
  "Registry item not found for failed task, skipping retry recovery",
110
- task_id=task_attempt.id,
111
- executable_path=task_attempt.executable_path,
110
+ task_id=retry_attempt.id,
111
+ executable_path=retry_attempt.executable_path,
112
112
  )
113
113
  continue
114
114
 
115
115
  # Use atomic transaction to prevent race conditions
116
116
  with transaction.atomic():
117
117
  # Re-fetch with select_for_update to prevent concurrent modifications
118
- task_attempt = TaskAttempt.objects.select_for_update(nowait=True).get(id=task_attempt.id)
118
+ task_attempt = TaskAttempt.objects.select_for_update(nowait=True).get(id=retry_attempt.id)
119
119
 
120
120
  # Verify task is still in FAILED state and ready for retry
121
121
  if task_attempt.status == TaskAttempt.Status.SUCCESS:
@@ -150,16 +150,16 @@ class BlockProcessor:
150
150
  except Exception:
151
151
  logger.error(
152
152
  "Failed to recover retry",
153
- task_id=task_attempt.id,
153
+ task_id=retry_attempt.id,
154
154
  exc_info=True,
155
155
  )
156
156
  # Reload task to see current state after potential execution failure
157
157
  try:
158
- task_attempt.refresh_from_db()
158
+ retry_attempt.refresh_from_db()
159
159
  # If task is still PENDING after error, revert to FAILED
160
160
  # (execution may have failed before celery task could mark it)
161
- if task_attempt.status == TaskAttempt.Status.PENDING:
162
- abd_dal.revert_to_failed(task_attempt)
161
+ if retry_attempt.status == TaskAttempt.Status.PENDING:
162
+ abd_dal.revert_to_failed(retry_attempt)
163
163
  except TaskAttempt.DoesNotExist:
164
164
  # Task was deleted during recovery, nothing to revert
165
165
  pass
@@ -170,6 +170,7 @@ class BlockProcessor:
170
170
  def _cleanup_phantom_tasks(self) -> None:
171
171
  """
172
172
  Clean up tasks marked as SUCCESS but never actually started.
173
+
173
174
  Only removes tasks that were created recently (within last hour) to avoid
174
175
  deleting legitimate tasks marked as success by external processes.
175
176
  """
@@ -2,8 +2,8 @@ from typing import Any
2
2
 
3
3
  import structlog
4
4
 
5
- import abstract_block_dumper.dal.django_dal as abd_dal
6
- from abstract_block_dumper.dal.memory_registry import RegistryItem
5
+ import abstract_block_dumper._internal.dal.django_dal as abd_dal
6
+ from abstract_block_dumper._internal.dal.memory_registry import RegistryItem
7
7
  from abstract_block_dumper.models import TaskAttempt
8
8
 
9
9
  logger = structlog.get_logger(__name__)
@@ -4,9 +4,9 @@ import bittensor as bt
4
4
  import structlog
5
5
  from django.conf import settings
6
6
 
7
- import abstract_block_dumper.dal.django_dal as abd_dal
8
- import abstract_block_dumper.services.utils as abd_utils
9
- from abstract_block_dumper.services.block_processor import BlockProcessor, block_processor_factory
7
+ import abstract_block_dumper._internal.dal.django_dal as abd_dal
8
+ import abstract_block_dumper._internal.services.utils as abd_utils
9
+ from abstract_block_dumper._internal.services.block_processor import BlockProcessor, block_processor_factory
10
10
 
11
11
  logger = structlog.get_logger(__name__)
12
12
 
@@ -61,18 +61,20 @@ class TaskScheduler:
61
61
  logger.info("TaskScheduler stopped.")
62
62
 
63
63
  def initialize_last_block(self) -> None:
64
- start_from_block_setting = getattr(settings, "BLOCK_DUMPER_START_FROM_BLOCK")
64
+ # Safe getattr in case setting is not defined
65
+ start_from_block_setting = getattr(settings, "BLOCK_DUMPER_START_FROM_BLOCK", None)
65
66
 
66
67
  if start_from_block_setting is not None:
67
68
  if start_from_block_setting == "current":
68
69
  self.last_processed_block = self.subtensor.get_current_block()
69
- logger.info(f"Starting from current blockchain block {self.last_processed_block}")
70
+ logger.info("Starting from current blockchain block", block_number=self.last_processed_block)
70
71
 
71
72
  elif isinstance(start_from_block_setting, int):
72
73
  self.last_processed_block = start_from_block_setting
73
- logger.info(f"Starting from configured block {self.last_processed_block}")
74
+ logger.info("Starting from configured block", block_number=self.last_processed_block)
74
75
  else:
75
- raise ValueError(f"Invalid BLOCK_DUMPER_START_FROM_BLOCK value: {start_from_block_setting}")
76
+ error_msg = f"Invalid BLOCK_DUMPER_START_FROM_BLOCK value: {start_from_block_setting}"
77
+ raise ValueError(error_msg)
76
78
  else:
77
79
  # Default behavior - resume from database
78
80
  last_block_number = abd_dal.get_the_latest_executed_block_number()
@@ -20,14 +20,12 @@ def get_bittensor_client() -> bt.Subtensor:
20
20
  """
21
21
  DEFAULT_BITTENSOR_NETWORK = "finney"
22
22
  network = getattr(settings, "BITTENSOR_NETWORK", DEFAULT_BITTENSOR_NETWORK)
23
- logger.info(f"Creating new bittensor client for network: {network}")
23
+ logger.info("Creating new bittensor client for network", network=network)
24
24
  return bt.subtensor(network=network)
25
25
 
26
26
 
27
27
  def get_current_celery_task_id() -> str:
28
- """
29
- Get current celery task id
30
- """
28
+ """Get current celery task id."""
31
29
  try:
32
30
  celery_task_id = current_task.id
33
31
  except Exception:
@@ -36,9 +34,7 @@ def get_current_celery_task_id() -> str:
36
34
 
37
35
 
38
36
  def get_executable_path(func: Callable) -> str:
39
- """
40
- Get executable path for the callable `func`
41
- """
37
+ """Get executable path for the callable `func`."""
42
38
  return ".".join([func.__module__, func.__name__])
43
39
 
44
40
 
@@ -28,7 +28,7 @@ version_tuple: VERSION_TUPLE
28
28
  commit_id: COMMIT_ID
29
29
  __commit_id__: COMMIT_ID
30
30
 
31
- __version__ = version = '0.0.1'
32
- __version_tuple__ = version_tuple = (0, 0, 1)
31
+ __version__ = version = '0.0.4'
32
+ __version_tuple__ = version_tuple = (0, 0, 4)
33
33
 
34
34
  __commit_id__ = commit_id = None
File without changes
@@ -1,14 +1,17 @@
1
1
  from django.core.management.base import BaseCommand
2
2
 
3
- from abstract_block_dumper.dal.memory_registry import task_registry
4
- from abstract_block_dumper.discovery import ensure_modules_loaded
5
- from abstract_block_dumper.services.scheduler import task_scheduler_factory
3
+ from abstract_block_dumper._internal.dal.memory_registry import task_registry
4
+ from abstract_block_dumper._internal.discovery import ensure_modules_loaded
5
+ from abstract_block_dumper._internal.services.scheduler import task_scheduler_factory
6
6
 
7
7
 
8
8
  class Command(BaseCommand):
9
9
  help = "Run the block scheduler daemon."
10
10
 
11
11
  def handle(self, *args, **options) -> None:
12
+ """
13
+ Handle the management command to start the block scheduler.
14
+ """
12
15
  self.stdout.write("Syncing decorated functions...")
13
16
  ensure_modules_loaded()
14
17
  functions_counter = len(task_registry.get_functions())
@@ -3,7 +3,7 @@ from typing import Any
3
3
 
4
4
  from django.db import models
5
5
 
6
- import abstract_block_dumper.services.utils as abd_utils
6
+ import abstract_block_dumper._internal.services.utils as abd_utils
7
7
 
8
8
 
9
9
  class TaskAttempt(models.Model):
File without changes
@@ -0,0 +1,53 @@
1
+ """
2
+ Celery integration helpers for abstract-block-dumper.
3
+
4
+ This module provides utilities to integrate @block_task decorated functions
5
+ with Celery workers.
6
+ """
7
+
8
+ from abstract_block_dumper._internal.discovery import ensure_modules_loaded
9
+
10
+
11
+ def setup_celery_tasks() -> None:
12
+ """
13
+ Discover and register all @block_task decorated functions for Celery.
14
+
15
+ This function MUST be called when Celery workers start to ensure that
16
+ all @block_task decorated functions are registered and available to
17
+ receive tasks from the message broker.
18
+
19
+ Usage in your project's celery.py:
20
+
21
+ from celery import Celery
22
+ from celery.signals import worker_ready
23
+
24
+ app = Celery('your_project')
25
+ app.config_from_object('django.conf:settings', namespace='CELERY')
26
+ app.autodiscover_tasks()
27
+
28
+ @worker_ready.connect
29
+ def on_worker_ready(**kwargs):
30
+ '''Load block tasks when worker is ready.'''
31
+ from abstract_block_dumper.v1.celery import setup_celery_tasks
32
+ setup_celery_tasks()
33
+
34
+ Why is this needed?
35
+ -------------------
36
+ The @block_task decorator uses Celery's @shared_task, which requires
37
+ the decorated functions to be imported before workers can receive
38
+ messages for those tasks. Without calling this function, you'll see
39
+ errors like:
40
+
41
+ "Received unregistered task of type 'your_app.block_tasks.task_name'"
42
+
43
+ What does it do?
44
+ ----------------
45
+ - Automatically imports all 'tasks.py' and 'block_tasks.py' modules
46
+ from your INSTALLED_APPS
47
+ - Triggers @block_task decorator registration
48
+ - Makes tasks available to Celery workers
49
+ """
50
+ ensure_modules_loaded()
51
+
52
+
53
+ __all__ = ["setup_celery_tasks"]
@@ -5,10 +5,10 @@ import structlog
5
5
  from celery import Task, shared_task
6
6
  from django.db import OperationalError, transaction
7
7
 
8
- import abstract_block_dumper.dal.django_dal as abd_dal
9
- import abstract_block_dumper.services.utils as abd_utils
10
- from abstract_block_dumper.dal.memory_registry import RegistryItem, task_registry
11
- from abstract_block_dumper.exceptions import CeleryTaskLocked
8
+ import abstract_block_dumper._internal.dal.django_dal as abd_dal
9
+ import abstract_block_dumper._internal.services.utils as abd_utils
10
+ from abstract_block_dumper._internal.dal.memory_registry import RegistryItem, task_registry
11
+ from abstract_block_dumper._internal.exceptions import CeleryTaskLockedError
12
12
  from abstract_block_dumper.models import TaskAttempt
13
13
 
14
14
  logger = structlog.get_logger(__name__)
@@ -20,7 +20,6 @@ def schedule_retry(task_attempt: TaskAttempt) -> None:
20
20
 
21
21
  Task must already be in FAILED state with next_retry_at set by mark_failed()
22
22
  """
23
-
24
23
  if not task_attempt.next_retry_at:
25
24
  logger.error(
26
25
  "Cannot schedule retry without next_retry_at",
@@ -63,7 +62,9 @@ def schedule_retry(task_attempt: TaskAttempt) -> None:
63
62
  )
64
63
 
65
64
 
66
- def _celery_task_wrapper(func, block_number: int, **kwargs) -> dict[str, Any] | None:
65
+ def _celery_task_wrapper(
66
+ func: Callable[..., Any], block_number: int, **kwargs: dict[str, Any]
67
+ ) -> dict[str, Any] | None:
67
68
  executable_path = abd_utils.get_executable_path(func)
68
69
 
69
70
  with transaction.atomic():
@@ -73,21 +74,15 @@ def _celery_task_wrapper(func, block_number: int, **kwargs) -> dict[str, Any] |
73
74
  executable_path=executable_path,
74
75
  args_json=abd_utils.serialize_args(kwargs),
75
76
  )
76
- except TaskAttempt.DoesNotExist:
77
- logger.warning(
78
- "TaskAttempt not found - task may have been canceled directly",
79
- block_number=block_number,
80
- executable_path=executable_path,
81
- )
82
- raise CeleryTaskLocked("TaskAttempt not found - task may have been canceled directly")
77
+ except TaskAttempt.DoesNotExist as exc:
78
+ msg = "TaskAttempt not found - task may have been canceled directly"
79
+ logger.warning(msg, block_number=block_number, executable_path=executable_path)
80
+ raise CeleryTaskLockedError(msg) from exc
81
+
83
82
  except OperationalError as e:
84
- logger.info(
85
- "Task already being processed by another worker",
86
- block_number=block_number,
87
- executable_path=executable_path,
88
- operational_error=str(e),
89
- )
90
- raise CeleryTaskLocked("Task already being processed by another worker")
83
+ msg = "Task already being processed by another worker"
84
+ logger.info(msg, block_number=block_number, executable_path=executable_path, operational_error=str(e))
85
+ raise CeleryTaskLockedError(msg) from e
91
86
 
92
87
  if task_attempt.status != TaskAttempt.Status.PENDING:
93
88
  logger.info(
@@ -118,11 +113,11 @@ def _celery_task_wrapper(func, block_number: int, **kwargs) -> dict[str, Any] |
118
113
  logger.info("Task completed successfully", task_id=task_attempt.id)
119
114
  return {"result": result}
120
115
  except Exception as e:
121
- logger.error(
116
+ logger.exception(
122
117
  "Task execution failed",
123
118
  task_id=task_attempt.id,
124
119
  error_type=type(e).__name__,
125
- exc_info=True,
120
+ error_message=str(e),
126
121
  )
127
122
  abd_dal.task_mark_as_failed(task_attempt)
128
123
 
@@ -131,10 +126,9 @@ def _celery_task_wrapper(func, block_number: int, **kwargs) -> dict[str, Any] |
131
126
  try:
132
127
  schedule_retry(task_attempt)
133
128
  except Exception:
134
- logger.error(
129
+ logger.exception(
135
130
  "Failed to schedule retry",
136
131
  task_id=task_attempt.id,
137
- exc_info=True,
138
132
  )
139
133
  return None
140
134
 
@@ -146,7 +140,7 @@ def block_task(
146
140
  celery_kwargs: dict[str, Any] | None = None,
147
141
  ) -> Callable[..., Any]:
148
142
  """
149
- Decorator for registering block tasks.
143
+ Register a block task.
150
144
 
151
145
  Args:
152
146
  condition: Lambda function that determines when to execute
@@ -174,10 +168,11 @@ def block_task(
174
168
 
175
169
  def decorator(func: Callable[..., Any]) -> Any:
176
170
  if not callable(condition):
177
- raise ValueError("condition must be a callable.")
171
+ msg = "condition must be a callable."
172
+ raise TypeError(msg)
178
173
 
179
174
  # Celery task wrapper
180
- def shared_celery_task(block_number: int, **kwargs) -> None | Any:
175
+ def shared_celery_task(block_number: int, **kwargs: dict[str, Any]) -> None | Any:
181
176
  """
182
177
  Wrapper that handles TaskAttempt tracking and executed the original
183
178
  function
@@ -194,13 +189,13 @@ def block_task(
194
189
  )(shared_celery_task)
195
190
 
196
191
  # Store original function referefence for introspection
197
- celery_task._original_func = func
192
+ celery_task._original_func = func # noqa: SLF001
198
193
 
199
194
  # Register the Celery task
200
195
  task_registry.register_item(
201
196
  RegistryItem(
202
197
  condition=condition,
203
- function=cast(Task, celery_task),
198
+ function=cast("Task", celery_task),
204
199
  args=args,
205
200
  backfilling_lookback=backfilling_lookback,
206
201
  celery_kwargs=celery_kwargs or {},
@@ -14,7 +14,7 @@ from django.utils import timezone
14
14
  from abstract_block_dumper.models import TaskAttempt
15
15
 
16
16
 
17
- @shared_task(name="abstract_block_dumper.cleanup_old_tasks")
17
+ @shared_task(name="abstract_block_dumper.v1.cleanup_old_tasks")
18
18
  def cleanup_old_tasks(days: int = 7) -> dict[str, int | str]:
19
19
  """
20
20
  Delete all succeeded or unrecoverable failed tasks older than the specified number of days.
@@ -47,12 +47,12 @@ def cleanup_old_tasks(days: int = 7) -> dict[str, int | str]:
47
47
 
48
48
  Example cron (daily at 2 AM):
49
49
  0 2 * * * python manage.py shell -c \
50
- "from abstract_block_dumper.tasks import cleanup_old_tasks; cleanup_old_tasks.delay()"
50
+ "from abstract_block_dumper.v1.tasks import cleanup_old_tasks; cleanup_old_tasks.delay()"
51
51
 
52
52
  Example Celery beat schedule (in settings.py):
53
53
  CELERY_BEAT_SCHEDULE = {
54
54
  'cleanup-old-tasks': {
55
- 'task': 'abstract_block_dumper.cleanup_old_tasks',
55
+ 'task': 'abstract_block_dumper.v1.cleanup_old_tasks',
56
56
  'schedule': crontab(hour=2, minute=0), # Daily at 2 AM
57
57
  'kwargs': {'days': 7},
58
58
  },
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: abstract-block-dumper
3
- Version: 0.0.1
3
+ Version: 0.0.4
4
4
  Project-URL: Source, https://github.com/bactensor/abstract-block-dumper
5
5
  Project-URL: Issue Tracker, https://github.com/bactensor/abstract-block-dumper/issues
6
6
  Author-email: Reef Technologies <opensource@reef.pl>
@@ -26,6 +26,22 @@ Description-Content-Type: text/markdown
26
26
  This package provides a simplified framework for creating block processing tasks in Django applications.
27
27
  Define tasks with lambda conditions using the @block_task decorator and run them asynchronously with Celery.
28
28
 
29
+ ## Usage
30
+
31
+ > [!IMPORTANT]
32
+ > This package uses [ApiVer](#versioning), make sure to import `abstract_block_dumper.v1`.
33
+
34
+
35
+ ## Versioning
36
+
37
+ This package uses [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
38
+ TL;DR you are safe to use [compatible release version specifier](https://packaging.python.org/en/latest/specifications/version-specifiers/#compatible-release) `~=MAJOR.MINOR` in your `pyproject.toml` or `requirements.txt`.
39
+
40
+ Additionally, this package uses [ApiVer](https://www.youtube.com/watch?v=FgcoAKchPjk) to further reduce the risk of breaking changes.
41
+ This means, the public API of this package is explicitly versioned, e.g. `abstract_block_dumper.v1`, and will not change in a backwards-incompatible way even when `abstract_block_dumper.v2` is released.
42
+
43
+ Internal packages, i.e. prefixed by `abstract_block_dumper._` do not share these guarantees and may change in a backwards-incompatible way at any time even in patch releases.
44
+
29
45
  ## Implementation Details
30
46
 
31
47
  ### General Workflow:
@@ -82,6 +98,28 @@ INSTALLED_APPS = [
82
98
  python manage.py migrate
83
99
  ```
84
100
 
101
+ 4. **Configure Celery to discover block tasks:**
102
+
103
+ In your project's `celery.py` file, add the following to ensure Celery workers can discover your `@block_task` decorated functions:
104
+
105
+ ```python
106
+ from celery import Celery
107
+ from celery.signals import worker_ready
108
+ from django.conf import settings
109
+
110
+ app = Celery('your_project')
111
+ app.config_from_object('django.conf:settings', namespace='CELERY')
112
+ app.autodiscover_tasks()
113
+
114
+ @worker_ready.connect
115
+ def on_worker_ready(**kwargs):
116
+ """Load block tasks when worker starts."""
117
+ from abstract_block_dumper.v1.celery import setup_celery_tasks
118
+ setup_celery_tasks()
119
+ ```
120
+
121
+ > **Important:** Without this step, Celery workers will not recognize your `@block_task` decorated functions, and you'll see "Received unregistered task" errors.
122
+
85
123
  ## Usage
86
124
 
87
125
  ### 1. Define Block Processing Tasks
@@ -93,7 +131,7 @@ Create block processing tasks in `tasks.py` or `block_tasks.py` file inside any
93
131
  ### 3. Start the Block Scheduler
94
132
  Run the scheduler to start processing blocks:
95
133
  ```bash
96
- $ python manage.py block_tasks
134
+ $ python manage.py block_tasks_v1
97
135
  ```
98
136
 
99
137
  This command will:
@@ -112,7 +150,7 @@ See examples below:
112
150
  Use the `@block_task` decorator with lambda conditions to create block processing tasks:
113
151
 
114
152
  ```python
115
- from abstract_block_dumper.decorators import block_task
153
+ from abstract_block_dumper.v1.decorators import block_task
116
154
 
117
155
 
118
156
  # Process every block
@@ -144,7 +182,7 @@ def process_multi_netuid_task(block_number: int, netuid: int):
144
182
  The framework provides a maintenance task to clean up old task records and maintain database performance:
145
183
 
146
184
  ```python
147
- from abstract_block_dumper.tasks import cleanup_old_tasks
185
+ from abstract_block_dumper.v1.tasks import cleanup_old_tasks
148
186
 
149
187
  # Delete tasks older than 7 days (default)
150
188
  cleanup_old_tasks.delay()
@@ -160,13 +198,13 @@ This task deletes all succeeded or unrecoverable failed tasks older than the spe
160
198
  **Option 1: Manual Execution**
161
199
  ```bash
162
200
  # Using Django shell
163
- python manage.py shell -c "from abstract_block_dumper.tasks import cleanup_old_tasks; cleanup_old_tasks.delay()"
201
+ python manage.py shell -c "from abstract_block_dumper.v1.tasks import cleanup_old_tasks; cleanup_old_tasks.delay()"
164
202
  ```
165
203
 
166
204
  **Option 2: Cron Job (Recommended - once per day)**
167
205
  ```bash
168
206
  # Add to crontab (daily at 2 AM)
169
- 0 2 * * * cd /path/to/your/project && python manage.py shell -c "from abstract_block_dumper.tasks import cleanup_old_tasks; cleanup_old_tasks.delay()"
207
+ 0 2 * * * cd /path/to/your/project && python manage.py shell -c "from abstract_block_dumper.v1.tasks import cleanup_old_tasks; cleanup_old_tasks.delay()"
170
208
  ```
171
209
 
172
210
  **Option 3: Celery Beat (Automated Scheduling)**
@@ -210,55 +248,92 @@ BLOCK_DUMPER_MAX_ATTEMPTS = 3 # maximum retry attempts
210
248
  BLOCK_TASK_MAX_RETRY_DELAY_MINUTES = 1440 # maximum retry delay (24 hours)
211
249
  ```
212
250
 
213
- ### Configuration Options Reference
214
-
215
- #### Core Settings
216
-
217
- **BITTENSOR_NETWORK** (str, default: `'finney'`) Specifies which [Bittensor network](https://docs.learnbittensor.org/concepts/bittensor-networks) to connect to
218
-
219
- **BLOCK_DUMPER_START_FROM_BLOCK** (str|int|None, default: `None`)
220
- - **Purpose**: Determines the starting block for processing when the scheduler first runs
221
- - **Valid Values**:
222
- - `None`: Resume from the last processed block stored in database
223
- - `'current'`: Start from the current blockchain block (skips historical blocks)
224
- - `int`: Start from a specific block number (e.g., `1000000`)
225
- - **Example**: `BLOCK_DUMPER_START_FROM_BLOCK = 'current'`
226
- - **Performance Impact**: Starting from historical blocks may require significant processing time
227
-
228
- #### Scheduler Settings
229
-
230
- **BLOCK_DUMPER_POLL_INTERVAL** (int, default: `1`)
231
- - **Purpose**: Seconds to wait between checking for new blocks
232
- - **Valid Range**: `1` to `3600` (1 second to 1 hour)
233
- - **Example**: `BLOCK_DUMPER_POLL_INTERVAL = 5`
234
- - **Performance Impact**:
235
- - Lower values (1-2s): Near real-time processing, higher CPU/network usage
236
- - Higher values (10-60s): Reduced load but delayed processing
237
- - Very low values (<1s) may cause rate limiting
238
-
239
- #### Retry and Error Handling Settings
240
-
241
- **BLOCK_DUMPER_MAX_ATTEMPTS** (int, default: `3`)
242
- - **Purpose**: Maximum number of attempts to retry a failed task before giving up
243
- - **Valid Range**: `1` to `10`
244
- - **Example**: `BLOCK_DUMPER_MAX_ATTEMPTS = 5`
245
- - **Performance Impact**: Higher values increase resilience but may delay failure detection
246
-
247
- **BLOCK_TASK_RETRY_BACKOFF** (int, default: `1`)
248
- - **Purpose**: Base number of minutes for exponential backoff retry delays
249
- - **Valid Range**: `1` to `60`
250
- - **Example**: `BLOCK_TASK_RETRY_BACKOFF = 2`
251
- - **Calculation**: Actual delay = `backoff ** attempt_count` minutes
251
+ ## Configuration Options Reference
252
+
253
+ ### `BITTENSOR_NETWORK`
254
+ - **Type:** `str`
255
+ - **Default:** `'finney'`
256
+ - **Description:** Specifies which [Bittensor network](https://docs.learnbittensor.org/concepts/bittensor-networks) to connect to
257
+
258
+ ---
259
+
260
+ ### `BLOCK_DUMPER_START_FROM_BLOCK`
261
+ - **Type:** `str | int | None`
262
+ - **Default:** `None`
263
+ - **Valid Range:** `None`, `'current'`, or any positive integer
264
+ - **Description:** Determines the starting block for processing when the scheduler first runs
265
+ - `None` → Resume from the last processed block stored in database
266
+ - `'current'` → Start from the current blockchain block (skips historical blocks)
267
+ - Integer → Start from a specific block number (e.g., `1000000`)
268
+
269
+ ```python
270
+ BLOCK_DUMPER_START_FROM_BLOCK = 'current'
271
+ ```
272
+
273
+ > **Performance Impact:** Starting from historical blocks may require significant processing time
274
+
275
+ ---
276
+
277
+ ### `BLOCK_DUMPER_POLL_INTERVAL`
278
+ - **Type:** `int`
279
+ - **Default:** `1`
280
+ - **Valid Range:** `1` to `3600` (seconds)
281
+ - **Description:** Seconds to wait between checking for new blocks
282
+
283
+ ```python
284
+ BLOCK_DUMPER_POLL_INTERVAL = 5
285
+ ```
286
+
287
+ > **Performance Impact:**
288
+ > - Lower values (1-2s): Near real-time processing, higher CPU/network usage
289
+ > - Higher values (10-60s): Reduced load but delayed processing
290
+ > - Very low values (<1s): May cause rate limiting
291
+
292
+ ---
293
+
294
+ ### `BLOCK_DUMPER_MAX_ATTEMPTS`
295
+ - **Type:** `int`
296
+ - **Default:** `3`
297
+ - **Valid Range:** `1` to `10`
298
+ - **Description:** Maximum number of attempts to retry a failed task before giving up
299
+
300
+ ```python
301
+ BLOCK_DUMPER_MAX_ATTEMPTS = 5
302
+ ```
303
+
304
+ > **Performance Impact:** Higher values increase resilience but may delay failure detection
305
+
306
+ ---
307
+
308
+ ### `BLOCK_TASK_RETRY_BACKOFF`
309
+ - **Type:** `int`
310
+ - **Default:** `1`
311
+ - **Valid Range:** `1` to `60` (minutes)
312
+ - **Description:** Base number of minutes for exponential backoff retry delays
313
+ - **Calculation:** Actual delay = `backoff ** attempt_count` minutes
252
314
  - Attempt 1: 2¹ = 2 minutes
253
- - Attempt 2: 2² = 4 minutes
315
+ - Attempt 2: 2² = 4 minutes
254
316
  - Attempt 3: 2³ = 8 minutes
255
- - **Performance Impact**: Lower values retry faster but may overwhelm failing services
256
317
 
257
- **BLOCK_TASK_MAX_RETRY_DELAY_MINUTES** (int, default: `1440`)
258
- - **Purpose**: Maximum delay (in minutes) between retry attempts, caps exponential backoff
259
- - **Valid Range**: `1` to `10080` (1 minute to 1 week)
260
- - **Example**: `BLOCK_TASK_MAX_RETRY_DELAY_MINUTES = 720` # 12 hours max
261
- - **Performance Impact**: Prevents extremely long delays while maintaining backoff benefits
318
+ ```python
319
+ BLOCK_TASK_RETRY_BACKOFF = 2
320
+ ```
321
+
322
+ > **Performance Impact:** Lower values retry faster but may overwhelm failing services
323
+
324
+ ---
325
+
326
+ ### `BLOCK_TASK_MAX_RETRY_DELAY_MINUTES`
327
+ - **Type:** `int`
328
+ - **Default:** `1440` (24 hours)
329
+ - **Valid Range:** `1` to `10080` (1 minute to 1 week)
330
+ - **Description:** Maximum delay (in minutes) between retry attempts, caps exponential backoff
331
+
332
+ ```python
333
+ BLOCK_TASK_MAX_RETRY_DELAY_MINUTES = 720 # 12 hours max
334
+ ```
335
+
336
+ > **Performance Impact:** Prevents extremely long delays while maintaining backoff benefits
262
337
 
263
338
 
264
339
  ## Example Project
@@ -0,0 +1,29 @@
1
+ abstract_block_dumper/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
+ abstract_block_dumper/_version.py,sha256=QlXZ5JTjE_pgpDaeHk0GTExkc75xUZFmd0hA7kGYCJ0,704
3
+ abstract_block_dumper/admin.py,sha256=3J3I_QOKFgfMNpTXW-rTQGO_q5Ls6uNuL0FkPVdIsYg,1654
4
+ abstract_block_dumper/apps.py,sha256=DXATdrjsL3T2IletTbKeD6unr8ScLaxg7wz0nAHTAns,215
5
+ abstract_block_dumper/models.py,sha256=MO9824dmHB6xF3PrFE_RERh7whVjQtS4tt6QA0wSbg0,2022
6
+ abstract_block_dumper/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
7
+ abstract_block_dumper/_internal/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
8
+ abstract_block_dumper/_internal/discovery.py,sha256=sISOL8vq6rC0pOndrCfWKDZjyYwzzZIChG-BH9mteq0,745
9
+ abstract_block_dumper/_internal/exceptions.py,sha256=jVXQ8b3gneno2XYvO0XisJPMlkAWb6H5u10egIpPJ4k,335
10
+ abstract_block_dumper/_internal/dal/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
11
+ abstract_block_dumper/_internal/dal/django_dal.py,sha256=pBGEFeo_U0ac2Za-dwzJvf04Ng8lP51aR60c_DUrGIw,5426
12
+ abstract_block_dumper/_internal/dal/memory_registry.py,sha256=yMNF7jrvWGF-S1pqyR2zOCNLWwrdsImcvV6cGqu1wYE,2972
13
+ abstract_block_dumper/_internal/services/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
14
+ abstract_block_dumper/_internal/services/block_processor.py,sha256=wB-zeft3Ys8zmqCdF_v12rXd6umNWvGfy2Ts6XSGkL8,8132
15
+ abstract_block_dumper/_internal/services/executor.py,sha256=ZZmQ9TzoNEoAE4amiU8lHRsTfP7YusUkWXasrArfo2g,1806
16
+ abstract_block_dumper/_internal/services/scheduler.py,sha256=NrT3t0oVR-osf50tWWqcxojkVkxhd2PHsk0PuXD5RMc,3593
17
+ abstract_block_dumper/_internal/services/utils.py,sha256=Y8b8KdKn53mcuWchw6b5EJq9ipO4p1FFf6g_Fpbg7cQ,1273
18
+ abstract_block_dumper/management/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
19
+ abstract_block_dumper/management/commands/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
20
+ abstract_block_dumper/management/commands/block_tasks_v1.py,sha256=jSi04ahIKYwlm_dNKCUGL_cmALv1iP-ZjfXrmz0pn-4,880
21
+ abstract_block_dumper/migrations/0001_initial.py,sha256=ImPHC3G6kPkq4Xn_4YVAm4Labh1Xi7PkCRszYRGpTiI,2298
22
+ abstract_block_dumper/migrations/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
23
+ abstract_block_dumper/v1/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
24
+ abstract_block_dumper/v1/celery.py,sha256=X4IqVs5i6ZpyY7fy1SqMZgsZy4SXP-jK2qG-FYnjU38,1722
25
+ abstract_block_dumper/v1/decorators.py,sha256=i-CVanS-yiBMKCEOLjGmwmopefWePhyMdodlWNIfJFg,7002
26
+ abstract_block_dumper/v1/tasks.py,sha256=u9iMYdDUqzYT3yPrNwZecHnlweZ3yFipV9BcIWHCbus,2647
27
+ abstract_block_dumper-0.0.4.dist-info/METADATA,sha256=wBsIWl5439xp-EYInzV-mQaAKIoa_CDP97FJoeaB-ng,12916
28
+ abstract_block_dumper-0.0.4.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
29
+ abstract_block_dumper-0.0.4.dist-info/RECORD,,
@@ -1,25 +0,0 @@
1
- abstract_block_dumper/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
- abstract_block_dumper/_version.py,sha256=qf6R-J7-UyuABBo8c0HgaquJ8bejVbf07HodXgwAwgQ,704
3
- abstract_block_dumper/admin.py,sha256=3J3I_QOKFgfMNpTXW-rTQGO_q5Ls6uNuL0FkPVdIsYg,1654
4
- abstract_block_dumper/apps.py,sha256=DXATdrjsL3T2IletTbKeD6unr8ScLaxg7wz0nAHTAns,215
5
- abstract_block_dumper/decorators.py,sha256=lV1ueIlEbBNojnXVH5GQiRCbck3-SQgtWOil5OqeTHo,7061
6
- abstract_block_dumper/discovery.py,sha256=kZlb8y-0ltJE-L-1GLxZ_xlziibY8AjggvHJ9sxsScw,728
7
- abstract_block_dumper/exceptions.py,sha256=EunFH-H5eXNNkKl2CvHlhZ2wvtdry969Gle-CZc7YM0,315
8
- abstract_block_dumper/models.py,sha256=l229tar4FdQ52eETLKGeskgkXHWa4ealF6DWbG8M4Mc,2012
9
- abstract_block_dumper/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
10
- abstract_block_dumper/tasks.py,sha256=8ppGWxML3krVdrS_08WnKuCpERRhB_6DIyVEkpYZMrw,2638
11
- abstract_block_dumper/dal/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
12
- abstract_block_dumper/dal/django_dal.py,sha256=unAA4Mt5dBBaUhvyezfyC0VtWMD6Ru79NyjKaOMNNSw,5359
13
- abstract_block_dumper/dal/memory_registry.py,sha256=rgU2CYGm2MHPgSZefgr-kuLxOtPu5wxINa3Y5ELgMUo,3029
14
- abstract_block_dumper/management/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
15
- abstract_block_dumper/management/commands/block_tasks.py,sha256=dEfFnoZCIIDsrNL5vRPtIDrkpcJk36yev_aoGAScgoQ,758
16
- abstract_block_dumper/migrations/0001_initial.py,sha256=ImPHC3G6kPkq4Xn_4YVAm4Labh1Xi7PkCRszYRGpTiI,2298
17
- abstract_block_dumper/migrations/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
18
- abstract_block_dumper/services/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
19
- abstract_block_dumper/services/block_processor.py,sha256=4guYwtVYh-N1UewiqVN7xG5pM56adyGe8vPG_kCDmQI,8072
20
- abstract_block_dumper/services/executor.py,sha256=TDbrtVGiz7GNGJwHYB6ZqqhrrTDDL7JGzxOehpF-QTY,1786
21
- abstract_block_dumper/services/scheduler.py,sha256=zKY24zSwjcQSVk3wt39GBurSNXkfylWsdV7Mgmv1RO8,3443
22
- abstract_block_dumper/services/utils.py,sha256=Iqa-9xhNxOCnvSWjGBclOUvmO4qsUhhievUllVh82I4,1286
23
- abstract_block_dumper-0.0.1.dist-info/METADATA,sha256=g26Qm3r1ZwH5OCCVCndRdjwnn-VY4YdIxwQyGKuVHXA,11022
24
- abstract_block_dumper-0.0.1.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
25
- abstract_block_dumper-0.0.1.dist-info/RECORD,,
File without changes