eth-streams-py 0.1.4__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (77) hide show
  1. eth_streams_py-0.1.4/PKG-INFO +22 -0
  2. eth_streams_py-0.1.4/pyproject.toml +38 -0
  3. eth_streams_py-0.1.4/setup.cfg +4 -0
  4. eth_streams_py-0.1.4/setup.py +13 -0
  5. eth_streams_py-0.1.4/src/eth_streams/.DS_Store +0 -0
  6. eth_streams_py-0.1.4/src/eth_streams/__init__.py +50 -0
  7. eth_streams_py-0.1.4/src/eth_streams/context.py +20 -0
  8. eth_streams_py-0.1.4/src/eth_streams/coordinator.py +102 -0
  9. eth_streams_py-0.1.4/src/eth_streams/db.py +29 -0
  10. eth_streams_py-0.1.4/src/eth_streams/eth/__init__.py +11 -0
  11. eth_streams_py-0.1.4/src/eth_streams/eth/blocks/__init__.py +6 -0
  12. eth_streams_py-0.1.4/src/eth_streams/eth/blocks/publisher.py +30 -0
  13. eth_streams_py-0.1.4/src/eth_streams/eth/blocks/source.py +60 -0
  14. eth_streams_py-0.1.4/src/eth_streams/eth/eth_call_vertex.py +32 -0
  15. eth_streams_py-0.1.4/src/eth_streams/eth/logs/__init__.py +18 -0
  16. eth_streams_py-0.1.4/src/eth_streams/eth/logs/add_block.py +56 -0
  17. eth_streams_py-0.1.4/src/eth_streams/eth/logs/address_filter.py +28 -0
  18. eth_streams_py-0.1.4/src/eth_streams/eth/logs/contract_event_vertex.py +43 -0
  19. eth_streams_py-0.1.4/src/eth_streams/eth/logs/db_loader.py +45 -0
  20. eth_streams_py-0.1.4/src/eth_streams/eth/logs/signals.py +6 -0
  21. eth_streams_py-0.1.4/src/eth_streams/eth/logs/sources/__init__.py +7 -0
  22. eth_streams_py-0.1.4/src/eth_streams/eth/logs/sources/backfill.py +81 -0
  23. eth_streams_py-0.1.4/src/eth_streams/eth/logs/sources/subscriber.py +73 -0
  24. eth_streams_py-0.1.4/src/eth_streams/eth/logs/vertex/__init__.py +7 -0
  25. eth_streams_py-0.1.4/src/eth_streams/eth/logs/vertex/block_transform.py +24 -0
  26. eth_streams_py-0.1.4/src/eth_streams/eth/logs/vertex/event_vertex.py +59 -0
  27. eth_streams_py-0.1.4/src/eth_streams/logger.py +3 -0
  28. eth_streams_py-0.1.4/src/eth_streams/models/__init__.py +9 -0
  29. eth_streams_py-0.1.4/src/eth_streams/models/block.py +12 -0
  30. eth_streams_py-0.1.4/src/eth_streams/models/checkpoint.py +15 -0
  31. eth_streams_py-0.1.4/src/eth_streams/models/dex.py +43 -0
  32. eth_streams_py-0.1.4/src/eth_streams/models/events.py +104 -0
  33. eth_streams_py-0.1.4/src/eth_streams/models/transaction.py +30 -0
  34. eth_streams_py-0.1.4/src/eth_streams/pipeline/__init__.py +11 -0
  35. eth_streams_py-0.1.4/src/eth_streams/pipeline/pipeline.py +126 -0
  36. eth_streams_py-0.1.4/src/eth_streams/pipeline/stage.py +38 -0
  37. eth_streams_py-0.1.4/src/eth_streams/pipeline/subscriber.py +34 -0
  38. eth_streams_py-0.1.4/src/eth_streams/pipeline/transformer.py +22 -0
  39. eth_streams_py-0.1.4/src/eth_streams/storage/__init__.py +8 -0
  40. eth_streams_py-0.1.4/src/eth_streams/storage/publisher.py +20 -0
  41. eth_streams_py-0.1.4/src/eth_streams/types/__init__.py +26 -0
  42. eth_streams_py-0.1.4/src/eth_streams/types/address.py +5 -0
  43. eth_streams_py-0.1.4/src/eth_streams/types/annotation.py +13 -0
  44. eth_streams_py-0.1.4/src/eth_streams/types/base.py +17 -0
  45. eth_streams_py-0.1.4/src/eth_streams/types/batch.py +24 -0
  46. eth_streams_py-0.1.4/src/eth_streams/types/callback.py +44 -0
  47. eth_streams_py-0.1.4/src/eth_streams/types/envelope.py +17 -0
  48. eth_streams_py-0.1.4/src/eth_streams/types/events.py +5 -0
  49. eth_streams_py-0.1.4/src/eth_streams/types/measure.py +0 -0
  50. eth_streams_py-0.1.4/src/eth_streams/types/monitor.py +29 -0
  51. eth_streams_py-0.1.4/src/eth_streams/types/sink.py +52 -0
  52. eth_streams_py-0.1.4/src/eth_streams/types/source.py +54 -0
  53. eth_streams_py-0.1.4/src/eth_streams/types/task.py +20 -0
  54. eth_streams_py-0.1.4/src/eth_streams/types/topic.py +71 -0
  55. eth_streams_py-0.1.4/src/eth_streams/types/vertex.py +45 -0
  56. eth_streams_py-0.1.4/src/eth_streams/utils/__init__.py +11 -0
  57. eth_streams_py-0.1.4/src/eth_streams/utils/db.py +32 -0
  58. eth_streams_py-0.1.4/src/eth_streams/utils/expiring_dict.py +236 -0
  59. eth_streams_py-0.1.4/src/eth_streams/utils/implicits.py +7 -0
  60. eth_streams_py-0.1.4/src/eth_streams/utils/iterator.py +61 -0
  61. eth_streams_py-0.1.4/src/eth_streams/utils/model.py +10 -0
  62. eth_streams_py-0.1.4/src/eth_streams/workers/__init__.py +20 -0
  63. eth_streams_py-0.1.4/src/eth_streams/workers/batcher.py +52 -0
  64. eth_streams_py-0.1.4/src/eth_streams/workers/combinator.py +29 -0
  65. eth_streams_py-0.1.4/src/eth_streams/workers/counter.py +28 -0
  66. eth_streams_py-0.1.4/src/eth_streams/workers/echo.py +34 -0
  67. eth_streams_py-0.1.4/src/eth_streams/workers/iterator.py +15 -0
  68. eth_streams_py-0.1.4/src/eth_streams/workers/range.py +17 -0
  69. eth_streams_py-0.1.4/src/eth_streams/workers/skipper.py +20 -0
  70. eth_streams_py-0.1.4/src/eth_streams/workers/throttler.py +60 -0
  71. eth_streams_py-0.1.4/src/eth_streams/workers/timer.py +44 -0
  72. eth_streams_py-0.1.4/src/eth_streams_py.egg-info/PKG-INFO +22 -0
  73. eth_streams_py-0.1.4/src/eth_streams_py.egg-info/SOURCES.txt +75 -0
  74. eth_streams_py-0.1.4/src/eth_streams_py.egg-info/dependency_links.txt +1 -0
  75. eth_streams_py-0.1.4/src/eth_streams_py.egg-info/requires.txt +22 -0
  76. eth_streams_py-0.1.4/src/eth_streams_py.egg-info/top_level.txt +1 -0
  77. eth_streams_py-0.1.4/tests/test_pass.py +16 -0
@@ -0,0 +1,22 @@
1
+ Metadata-Version: 2.1
2
+ Name: eth-streams-py
3
+ Version: 0.1.4
4
+ Requires-Python: >=3.10
5
+ Requires-Dist: eth_protocols_py
6
+ Requires-Dist: eth_typeshed_py
7
+ Requires-Dist: eth_rpc_py
8
+ Requires-Dist: tortoise-orm
9
+ Provides-Extra: lint
10
+ Requires-Dist: mypy; extra == "lint"
11
+ Requires-Dist: ruff; extra == "lint"
12
+ Provides-Extra: test
13
+ Requires-Dist: pytest==7.4.1; extra == "test"
14
+ Requires-Dist: pytest-cov==4.1.0; extra == "test"
15
+ Requires-Dist: coverage[toml]==7.3.1; extra == "test"
16
+ Provides-Extra: build
17
+ Requires-Dist: build[virtualenv]==1.0.3; extra == "build"
18
+ Provides-Extra: dev
19
+ Requires-Dist: tox; extra == "dev"
20
+ Requires-Dist: eth-streams-py[lint]; extra == "dev"
21
+ Requires-Dist: eth-streams-py[test]; extra == "dev"
22
+ Requires-Dist: eth-streams-py[build]; extra == "dev"
@@ -0,0 +1,38 @@
1
+ [build-system]
2
+ requires = ["setuptools>=68", "setuptools_scm[toml]>=8"]
3
+ build-backend = "setuptools.build_meta"
4
+
5
+ [project]
6
+ name = "eth-streams-py"
7
+ requires-python = ">=3.10"
8
+ dynamic = ["version"]
9
+ dependencies = [
10
+ "eth_protocols_py",
11
+ "eth_typeshed_py",
12
+ "eth_rpc_py",
13
+ "tortoise-orm",
14
+ ]
15
+
16
+ # Enables the usage of setuptools_scm
17
+ [tool.setuptools_scm]
18
+ root = "../../"
19
+
20
+ [project.optional-dependencies]
21
+ lint = [
22
+ "mypy",
23
+ "ruff",
24
+ ]
25
+ test = [
26
+ "pytest==7.4.1",
27
+ "pytest-cov==4.1.0",
28
+ "coverage[toml]==7.3.1",
29
+ ]
30
+ build = [
31
+ "build[virtualenv]==1.0.3",
32
+ ]
33
+ dev = [
34
+ "tox",
35
+ "eth-streams-py[lint]",
36
+ "eth-streams-py[test]",
37
+ "eth-streams-py[build]",
38
+ ]
@@ -0,0 +1,4 @@
1
+ [egg_info]
2
+ tag_build =
3
+ tag_date = 0
4
+
@@ -0,0 +1,13 @@
1
+ import os
2
+
3
+ from setuptools import find_packages, setup
4
+
5
+
6
+ def get_version():
7
+ return os.getenv("PACKAGE_VERSION", "0.0.0")
8
+
9
+
10
+ setup(
11
+ name="eth-streams-py",
12
+ version=get_version(),
13
+ )
@@ -0,0 +1,50 @@
1
+ # flake8: noqa
2
+ from .context import Context
3
+ from .coordinator import Coordinator, CoordinatorContext
4
+ from .types import (
5
+ Address,
6
+ Callback,
7
+ Envelope,
8
+ FilterTopic,
9
+ Monitor,
10
+ Sink,
11
+ Source,
12
+ Task,
13
+ Topic,
14
+ )
15
+
16
+ Coordinator.model_rebuild()
17
+
18
+ from .eth import BlockSource, EventBackfillSource, ReorgError
19
+ from .pipeline import Pipeline, Stage, Transformer
20
+ from .utils import init_db
21
+ from .workers import Batch, Batcher, Combinator, Counter, Echo, Skipper, Throttler
22
+
23
+ __all__ = [
24
+ "Address",
25
+ "Batch",
26
+ "Batcher",
27
+ "BlockSource",
28
+ "Callback",
29
+ "Combinator",
30
+ "Context",
31
+ "Coordinator",
32
+ "CoordinatorContext",
33
+ "Counter",
34
+ "Echo",
35
+ "Envelope",
36
+ "EventBackfillSource",
37
+ "FilterTopic",
38
+ "Monitor",
39
+ "Pipeline",
40
+ "ReorgError",
41
+ "Source",
42
+ "Sink",
43
+ "Skipper",
44
+ "Stage",
45
+ "Task",
46
+ "Throttler",
47
+ "Topic",
48
+ "Transformer",
49
+ "init_db",
50
+ ]
@@ -0,0 +1,20 @@
1
+ from abc import ABC
2
+ from typing import Any
3
+
4
+ from pydantic import BaseModel
5
+
6
+ # TODO: Maintain staged events before commiting them
7
+ # This allows for reorgs to be handled gracefully
8
+
9
+
10
+ class Context(ABC, BaseModel):
11
+ """
12
+ This should store all state changes
13
+ """
14
+
15
+ @classmethod
16
+ def load(cls, json_data: str) -> "Context":
17
+ return cls.model_validate_json(json_data)
18
+
19
+ def dump(self) -> dict[str, Any]:
20
+ return self.model_dump()
@@ -0,0 +1,102 @@
1
+ import asyncio
2
+ from collections import deque
3
+ from typing import TYPE_CHECKING, Any
4
+
5
+ from pydantic import BaseModel, ConfigDict, Field, PrivateAttr
6
+ from tortoise import Tortoise
7
+
8
+ if TYPE_CHECKING:
9
+ from .types import Callback, Sink, Task
10
+
11
+
12
+ class Coordinator(BaseModel):
13
+ model_config = ConfigDict(
14
+ arbitrary_types_allowed=True,
15
+ )
16
+
17
+ callbacks: dict[str, "Callback"] = Field(default_factory=dict)
18
+ tasks: dict[str, "Task"] = Field(default_factory=dict)
19
+ sinks: dict[str, "Sink"] = Field(default_factory=dict)
20
+ concurrency: int = Field(default=10)
21
+ _semaphore: asyncio.Semaphore = PrivateAttr()
22
+
23
+ def model_post_init(self, __context):
24
+ self._semaphore = asyncio.Semaphore(self.concurrency)
25
+
26
+ @property
27
+ def semaphore(self):
28
+ return self._semaphore
29
+
30
+ def add_task(self, task: "Task"):
31
+ if task.name in self.tasks:
32
+ raise NameError("task already exists")
33
+ self.tasks[task.name] = task
34
+
35
+ def add_sink(self, sink: "Sink"):
36
+ if sink.name in self.sinks:
37
+ if self.sinks[sink.name] != sink:
38
+ raise NameError(f"Sink already exists: {sink.name}")
39
+ self.sinks[sink.name] = sink
40
+
41
+ def load_callbacks(self):
42
+ print("Loading all the strategies")
43
+
44
+ def load_callback(self, callback: "Callback"):
45
+ if callback.name in self.callbacks:
46
+ raise NameError("Callback already exists")
47
+ self.callbacks[callback.name] = callback
48
+
49
+ def on_safe_shutdown(self):
50
+ # TODO: callbacks are not used yet
51
+ for callback in self.callbacks.values():
52
+ asyncio.create_task(callback.shutdown(self.semaphore))
53
+
54
+ @property
55
+ def completed(self):
56
+ return all([c.stopped for c in self.sinks.values()])
57
+
58
+ async def run(self):
59
+ tasks = []
60
+
61
+ for task in self.tasks.values():
62
+ tasks.append(asyncio.create_task(task.run()))
63
+ await asyncio.gather(*tasks)
64
+
65
+ async def close(self):
66
+ await Tortoise.close_connections()
67
+
68
+ def __enter__(self):
69
+ CoordinatorContext.push_context_managed_coordinator(self)
70
+ return self
71
+
72
+ def __exit__(self, exc_type, exc_val, exc_tb):
73
+ CoordinatorContext.push_context_managed_coordinator(self)
74
+
75
+
76
+ class CoordinatorContext:
77
+ _context_managed_coordinators: deque[Coordinator] = deque()
78
+ implicits: dict[str, Any] = {}
79
+
80
+ @classmethod
81
+ def set_implicits(cls, **kwargs):
82
+ cls.implicits.update(kwargs)
83
+
84
+ @classmethod
85
+ def clear_implicits(cls):
86
+ cls.implicits = {}
87
+
88
+ @classmethod
89
+ def push_context_managed_coordinator(cls, dag: Coordinator):
90
+ cls._context_managed_coordinators.appendleft(dag)
91
+
92
+ @classmethod
93
+ def pop_context_managed_coordinator(cls) -> Coordinator | None:
94
+ dag = cls._context_managed_coordinators.popleft()
95
+ return dag
96
+
97
+ @classmethod
98
+ def get_current_coordinator(cls) -> Coordinator | None:
99
+ try:
100
+ return cls._context_managed_coordinators[0]
101
+ except IndexError:
102
+ return None
@@ -0,0 +1,29 @@
1
+ from importlib.util import find_spec
2
+ from typing import Optional
3
+
4
+ from tortoise import Tortoise
5
+
6
+
7
+ async def init_db(
8
+ load_schema: bool = False,
9
+ db_url: str = "sqlite://db.sqlite3",
10
+ modules: Optional[list[str]] = None,
11
+ ):
12
+ if not modules:
13
+ spec = find_spec("__main__")
14
+ if spec:
15
+ modules = [spec.name.replace("__main__", "models")]
16
+ else:
17
+ modules = []
18
+
19
+ await Tortoise.init(
20
+ db_url=db_url,
21
+ modules={
22
+ "models": [
23
+ "eth_streams.models",
24
+ *modules,
25
+ ],
26
+ },
27
+ )
28
+ if load_schema:
29
+ await Tortoise.generate_schemas()
@@ -0,0 +1,11 @@
1
+ from .blocks.source import BlockSource, ReorgError
2
+ from .logs import ContractEventSink, EventBackfillSource, LogEventVertex, LogSubscriber
3
+
4
+ __all__ = [
5
+ "BlockSource",
6
+ "ContractEventSink",
7
+ "EventBackfillSource",
8
+ "LogSubscriber",
9
+ "LogEventVertex",
10
+ "ReorgError",
11
+ ]
@@ -0,0 +1,6 @@
1
+ from .source import BlockSource, ReorgError
2
+
3
+ __all__ = [
4
+ "BlockSource",
5
+ "ReorgError",
6
+ ]
@@ -0,0 +1,30 @@
1
+ from typing import Generic, TypeVar
2
+
3
+ from eth_rpc import Block
4
+ from eth_streams.models import Block as BlockModel
5
+ from eth_streams.types import Envelope, Sink
6
+
7
+ T = TypeVar("T", bound=Block)
8
+
9
+
10
+ class BlockSink(Sink[list[Block]], Generic[T]):
11
+ async def notify(self, envelope: Envelope[list[Block]]):
12
+ """Converts a log to a contract event and writes it to the database"""
13
+ events = envelope.message
14
+ # TODO: add batching logic
15
+ batch = []
16
+ for block in events:
17
+ contract_event = BlockModel(
18
+ number=block.number,
19
+ timestamp=block.timestamp,
20
+ chain_id=block.network.chain_id,
21
+ hash=block.hash,
22
+ parent_block_hash=block.parent_hash,
23
+ hot_block=False,
24
+ )
25
+ batch.append(contract_event)
26
+ if batch:
27
+ await BlockModel.bulk_create(
28
+ batch,
29
+ ignore_conflicts=True,
30
+ )
@@ -0,0 +1,60 @@
1
+ from collections.abc import AsyncIterator
2
+
3
+ from eth_rpc import Block, get_current_network
4
+ from eth_rpc.types import BLOCK_STRINGS, Network
5
+ from eth_streams.types import Source, Topic
6
+ from eth_streams.utils import ExpiringDict, get_implicit
7
+ from pydantic import BaseModel, ConfigDict, Field
8
+
9
+
10
+ class ReorgError(BaseModel):
11
+ block_number: int
12
+
13
+
14
+ class BlockSource(Source[ReorgError | Block], BaseModel):
15
+ __name__ = "block-source"
16
+
17
+ model_config = ConfigDict(arbitrary_types_allowed=True)
18
+ reorg_topic: Topic[ReorgError]
19
+
20
+ network: Network = Field(default_factory=get_current_network)
21
+ start_block: int | BLOCK_STRINGS = Field(
22
+ default_factory=lambda: get_implicit("start_block", "earliest")
23
+ )
24
+ reorg_distance: int = Field(5)
25
+ history: ExpiringDict[int, Block] = Field(
26
+ default_factory=lambda: ExpiringDict(100, 12 * 100)
27
+ )
28
+ restart_point: int | None = Field(None)
29
+
30
+ @property
31
+ def block_topic(self):
32
+ return self.default_topic
33
+
34
+ def __class_getitem__(self, network: Network):
35
+ self.network = network
36
+
37
+ async def _run(self) -> AsyncIterator[tuple[Topic, ReorgError | Block]]:
38
+ if self.start_block == "latest":
39
+ latest = await Block[self.network].get_number()
40
+ prev_block = await Block[self.network].load_by_number(
41
+ block_number=latest - 1
42
+ )
43
+ else:
44
+ prev_block = await Block[self.network].load_by_number(
45
+ block_number=self.start_block - 1
46
+ )
47
+ self.history[prev_block.number] = prev_block
48
+ current_block: int = prev_block.number + 1
49
+
50
+ while True:
51
+ async for block in Block[self.network].subscribe_from(
52
+ start_block=current_block
53
+ ):
54
+ self.history[block.number] = block
55
+ if self.history[block.number - 1].hash != block.parent_hash:
56
+ # go back the reorg_distance to reindex those blocks
57
+ current_block = block.number - self.reorg_distance
58
+ yield (self.reorg_topic, ReorgError(block_number=current_block))
59
+ break
60
+ yield (self.block_topic, block)
@@ -0,0 +1,32 @@
1
+ from abc import abstractmethod
2
+ from collections.abc import AsyncIterator
3
+ from typing import Any, Generic, TypeVar, final
4
+
5
+ from eth_rpc import ContractFunc
6
+ from eth_streams.types import Envelope, Topic, Vertex
7
+
8
+ ArgType = TypeVar("ArgType")
9
+ ResponseType = TypeVar("ResponseType")
10
+ U = TypeVar("U")
11
+
12
+
13
+ class EthCallVertex(Vertex[Any, U], Generic[ArgType, ResponseType, U]):
14
+ func: ContractFunc[ArgType, ResponseType]
15
+ args: ArgType
16
+
17
+ @final
18
+ async def transform(
19
+ self, envelope: Envelope[Any]
20
+ ) -> AsyncIterator[tuple[Topic[U], U]]:
21
+ result = await self.func(self.args)
22
+ if await self.conditions(envelope, result):
23
+ response = await self.modify(envelope, result)
24
+ yield self.default_topic, response
25
+
26
+ @abstractmethod
27
+ async def conditions(self, envelope: Envelope[Any], result: ResponseType) -> bool:
28
+ """Specify what condition needs to be met by the eth_call to alert the user"""
29
+
30
+ @abstractmethod
31
+ async def modify(self, envelope: Envelope[Any], result: ResponseType) -> U:
32
+ """Specify what to return if the condition is met"""
@@ -0,0 +1,18 @@
1
+ from .add_block import AddBlockVertex
2
+ from .contract_event_vertex import ContractEventSink
3
+ from .db_loader import DBLoader
4
+ from .signals import AddAddress, RemoveAddress
5
+ from .sources import EventBackfillSource, LogSubscriber
6
+ from .vertex import BlockNumberToLogsVertex, LogEventVertex
7
+
8
+ __all__ = [
9
+ "AddAddress",
10
+ "AddBlockVertex",
11
+ "BlockNumberToLogsVertex",
12
+ "ContractEventSink",
13
+ "DBLoader",
14
+ "EventBackfillSource",
15
+ "LogEventVertex",
16
+ "LogSubscriber",
17
+ "RemoveAddress",
18
+ ]
@@ -0,0 +1,56 @@
1
+ from abc import abstractmethod
2
+ from collections.abc import AsyncIterator
3
+ from typing import ClassVar, Generic, TypeVar
4
+
5
+ from eth_rpc import Block, Log
6
+ from eth_rpc.types import Network
7
+ from eth_streams.types import Envelope, Topic, Vertex
8
+ from pydantic import BaseModel, Field
9
+
10
+ T = TypeVar("T")
11
+
12
+
13
+ class BlockWrap(BaseModel, Generic[T]):
14
+ data: T
15
+ block: Block
16
+
17
+
18
+ class AddBlockVertex(Vertex[Log, BlockWrap[T]]):
19
+ """
20
+ A singleton class, depending on whether `with_tx_data` is set to True or False.
21
+ This wraps the transaction into a BaseModel with the Block data.
22
+ """
23
+
24
+ __instances: ClassVar[dict[bool, "AddBlockVertex"]] = {}
25
+
26
+ blocks: dict[tuple[int, Network], Block] = Field(default_factory=dict)
27
+ with_tx_data: bool = Field(default=True)
28
+
29
+ def __new__(cls, with_tx_data=True, **kwargs):
30
+ if cls.__instances.get(with_tx_data) is None:
31
+ cls.__instances[with_tx_data] = super().__new__(cls)
32
+ return cls.__instances[with_tx_data]
33
+
34
+ def __init__(self, **kwargs):
35
+ if not self.__dict__:
36
+ super().__init__(**kwargs)
37
+
38
+ @abstractmethod
39
+ def get_block_number(self, envelope: Envelope[T]) -> tuple[Block, Network]: ...
40
+
41
+ async def transform(
42
+ self, envelope: Envelope[T]
43
+ ) -> AsyncIterator[tuple[Topic[BlockWrap[T]], BlockWrap[T]]]:
44
+ key = self.get_block_number(envelope)
45
+ block_number, network = key
46
+
47
+ if key not in self.blocks:
48
+ self.blocks[key] = await Block[network].load_by_number(block_number)
49
+
50
+ yield (
51
+ self.default_topic,
52
+ BlockWrap(
53
+ data=envelope.message,
54
+ block=self.blocks[key],
55
+ ),
56
+ )
@@ -0,0 +1,28 @@
1
+ from collections.abc import AsyncIterator
2
+ from typing import cast
3
+
4
+ from eth_rpc import Log
5
+ from eth_streams.types import Address, Batch, Envelope, Topic, Vertex
6
+ from eth_typing import HexAddress, HexStr
7
+ from pydantic import Field
8
+
9
+
10
+ class AddressFilterVertex(Vertex[Log | Batch[Log] | Address, Log | Batch[Log]]):
11
+ addresses: set[HexAddress] = Field(default_factory=set)
12
+
13
+ async def transform(
14
+ self, envelope: Envelope[Log | Batch[Log] | Address]
15
+ ) -> AsyncIterator[tuple[Topic[Log | Batch[Log]], Log | Batch[Log]]]:
16
+ if isinstance(envelope.message, Address):
17
+ self.addresses.add(HexAddress(HexStr(envelope.message)))
18
+ elif isinstance(envelope.message, Log):
19
+ if envelope.message.address in self.addresses:
20
+ yield (self.default_topic, envelope.message)
21
+ elif isinstance(envelope.message, Batch):
22
+ batch = Batch[Log]()
23
+ for item in envelope.message:
24
+ item = cast(Log, item)
25
+ if item.address in self.addresses:
26
+ batch.append(item)
27
+ if len(batch) > 0:
28
+ yield (self.default_topic, batch)
@@ -0,0 +1,43 @@
1
+ import asyncio
2
+ from typing import ClassVar, Generic, Optional, TypeVar
3
+
4
+ from eth_rpc import EventData
5
+ from eth_streams.models import ContractEvent
6
+ from pydantic import BaseModel
7
+
8
+ from .db_loader import DBLoader
9
+
10
+ T = TypeVar("T", bound=BaseModel)
11
+
12
+
13
+ class ContractEventSink(DBLoader[T, ContractEvent], Generic[T]):
14
+ __instance: ClassVar[Optional["ContractEventSink"]] = None
15
+
16
+ lock: ClassVar[asyncio.Lock] = asyncio.Lock()
17
+ model: type[ContractEvent] = ContractEvent
18
+
19
+ def __new__(cls, **kwargs):
20
+ # this allows us to create a singleton
21
+ if cls.__instance is None:
22
+ cls.__instance = super().__new__(cls)
23
+ return cls.__instance
24
+
25
+ def __init__(self, **kwargs):
26
+ if not self.__dict__:
27
+ super().__init__(**kwargs)
28
+
29
+ def _convert(self, event_data: EventData[T]) -> ContractEvent:
30
+ return ContractEvent(
31
+ chain=event_data.network.chain_id,
32
+ address=event_data.log.address,
33
+ block_number=event_data.log.block_number,
34
+ block_hash=event_data.log.block_hash,
35
+ transaction_index=event_data.log.transaction_index,
36
+ transaction_hash=event_data.log.transaction_hash,
37
+ log_index=event_data.log.log_index,
38
+ name=event_data.name,
39
+ topic0=event_data.log.topics[0] if len(event_data.log.topics) > 0 else "",
40
+ event_type=event_data.log.__class__.__name__,
41
+ event_data=event_data.event.model_dump(),
42
+ confirmed=True,
43
+ )
@@ -0,0 +1,45 @@
1
+ import asyncio
2
+ import sqlite3
3
+ from abc import abstractmethod
4
+ from typing import ClassVar, Generic, TypeVar
5
+
6
+ from eth_rpc import EventData, get_current_network
7
+ from eth_rpc.types import Network
8
+ from eth_streams.logger import logger
9
+ from eth_streams.types import Envelope, Sink
10
+ from eth_streams.workers import Batch
11
+ from pydantic import Field
12
+ from tortoise import Model
13
+
14
+ T = TypeVar("T", bound=EventData)
15
+ U = TypeVar("U", bound=Model)
16
+
17
+
18
+ class DBLoader(Sink[EventData[T] | list[EventData[T]]], Generic[T, U]):
19
+ network: Network = Field(default_factory=get_current_network)
20
+ lock: ClassVar[asyncio.Lock] = asyncio.Lock()
21
+ model: type[U]
22
+
23
+ @abstractmethod
24
+ def _convert(self, event_data: EventData[T]) -> U:
25
+ """Convert the input type to the model"""
26
+
27
+ async def _notify(self, envelope: Envelope[EventData[T] | list[EventData[T]]]):
28
+ """Converts a log to a contract event and writes it to the database"""
29
+ events = envelope.message
30
+ # TODO: add batching logic
31
+ batch = []
32
+ if isinstance(events, Batch):
33
+ for event_data in events:
34
+ batch.append(self._convert(event_data))
35
+ elif isinstance(events, EventData):
36
+ batch.append(self._convert(events))
37
+ if batch:
38
+ logger.debug(f"WRITING BATCH: {len(batch)}")
39
+ try:
40
+ await self.model.bulk_create(
41
+ batch,
42
+ ignore_conflicts=True,
43
+ )
44
+ except (ValueError, sqlite3.ProgrammingError) as exc:
45
+ raise exc
@@ -0,0 +1,6 @@
1
+ class AddAddress(str):
2
+ """Add an address to the event filter"""
3
+
4
+
5
+ class RemoveAddress(str):
6
+ """Remove an address to the event filter"""
@@ -0,0 +1,7 @@
1
+ from .backfill import EventBackfillSource
2
+ from .subscriber import LogSubscriber
3
+
4
+ __all__ = [
5
+ "EventBackfillSource",
6
+ "LogSubscriber",
7
+ ]