zexus 1.7.1 → 1.7.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +3 -3
- package/package.json +1 -1
- package/src/__init__.py +7 -0
- package/src/zexus/__init__.py +1 -1
- package/src/zexus/__pycache__/__init__.cpython-312.pyc +0 -0
- package/src/zexus/__pycache__/capability_system.cpython-312.pyc +0 -0
- package/src/zexus/__pycache__/debug_sanitizer.cpython-312.pyc +0 -0
- package/src/zexus/__pycache__/environment.cpython-312.pyc +0 -0
- package/src/zexus/__pycache__/error_reporter.cpython-312.pyc +0 -0
- package/src/zexus/__pycache__/input_validation.cpython-312.pyc +0 -0
- package/src/zexus/__pycache__/lexer.cpython-312.pyc +0 -0
- package/src/zexus/__pycache__/module_cache.cpython-312.pyc +0 -0
- package/src/zexus/__pycache__/module_manager.cpython-312.pyc +0 -0
- package/src/zexus/__pycache__/object.cpython-312.pyc +0 -0
- package/src/zexus/__pycache__/security.cpython-312.pyc +0 -0
- package/src/zexus/__pycache__/security_enforcement.cpython-312.pyc +0 -0
- package/src/zexus/__pycache__/syntax_validator.cpython-312.pyc +0 -0
- package/src/zexus/__pycache__/zexus_ast.cpython-312.pyc +0 -0
- package/src/zexus/__pycache__/zexus_token.cpython-312.pyc +0 -0
- package/src/zexus/access_control_system/__pycache__/__init__.cpython-312.pyc +0 -0
- package/src/zexus/access_control_system/__pycache__/access_control.cpython-312.pyc +0 -0
- package/src/zexus/advanced_types.py +17 -2
- package/src/zexus/blockchain/__init__.py +411 -0
- package/src/zexus/blockchain/accelerator.py +1160 -0
- package/src/zexus/blockchain/chain.py +660 -0
- package/src/zexus/blockchain/consensus.py +821 -0
- package/src/zexus/blockchain/contract_vm.py +1019 -0
- package/src/zexus/blockchain/crypto.py +79 -14
- package/src/zexus/blockchain/events.py +526 -0
- package/src/zexus/blockchain/loadtest.py +721 -0
- package/src/zexus/blockchain/monitoring.py +350 -0
- package/src/zexus/blockchain/mpt.py +716 -0
- package/src/zexus/blockchain/multichain.py +951 -0
- package/src/zexus/blockchain/multiprocess_executor.py +338 -0
- package/src/zexus/blockchain/network.py +886 -0
- package/src/zexus/blockchain/node.py +666 -0
- package/src/zexus/blockchain/rpc.py +1203 -0
- package/src/zexus/blockchain/rust_bridge.py +421 -0
- package/src/zexus/blockchain/storage.py +423 -0
- package/src/zexus/blockchain/tokens.py +750 -0
- package/src/zexus/blockchain/upgradeable.py +1004 -0
- package/src/zexus/blockchain/verification.py +1602 -0
- package/src/zexus/blockchain/wallet.py +621 -0
- package/src/zexus/cli/__pycache__/main.cpython-312.pyc +0 -0
- package/src/zexus/cli/main.py +300 -20
- package/src/zexus/cli/zpm.py +1 -1
- package/src/zexus/compiler/__pycache__/bytecode.cpython-312.pyc +0 -0
- package/src/zexus/compiler/__pycache__/lexer.cpython-312.pyc +0 -0
- package/src/zexus/compiler/__pycache__/parser.cpython-312.pyc +0 -0
- package/src/zexus/compiler/__pycache__/semantic.cpython-312.pyc +0 -0
- package/src/zexus/compiler/__pycache__/zexus_ast.cpython-312.pyc +0 -0
- package/src/zexus/compiler/lexer.py +10 -5
- package/src/zexus/concurrency_system.py +79 -0
- package/src/zexus/config.py +54 -0
- package/src/zexus/crypto_bridge.py +244 -8
- package/src/zexus/dap/__init__.py +10 -0
- package/src/zexus/dap/__main__.py +4 -0
- package/src/zexus/dap/dap_server.py +391 -0
- package/src/zexus/dap/debug_engine.py +298 -0
- package/src/zexus/environment.py +10 -1
- package/src/zexus/evaluator/__pycache__/bytecode_compiler.cpython-312.pyc +0 -0
- package/src/zexus/evaluator/__pycache__/core.cpython-312.pyc +0 -0
- package/src/zexus/evaluator/__pycache__/expressions.cpython-312.pyc +0 -0
- package/src/zexus/evaluator/__pycache__/functions.cpython-312.pyc +0 -0
- package/src/zexus/evaluator/__pycache__/resource_limiter.cpython-312.pyc +0 -0
- package/src/zexus/evaluator/__pycache__/statements.cpython-312.pyc +0 -0
- package/src/zexus/evaluator/__pycache__/unified_execution.cpython-312.pyc +0 -0
- package/src/zexus/evaluator/__pycache__/utils.cpython-312.pyc +0 -0
- package/src/zexus/evaluator/bytecode_compiler.py +441 -37
- package/src/zexus/evaluator/core.py +560 -49
- package/src/zexus/evaluator/expressions.py +122 -49
- package/src/zexus/evaluator/functions.py +417 -16
- package/src/zexus/evaluator/statements.py +521 -118
- package/src/zexus/evaluator/unified_execution.py +573 -72
- package/src/zexus/evaluator/utils.py +14 -2
- package/src/zexus/event_loop.py +186 -0
- package/src/zexus/lexer.py +742 -486
- package/src/zexus/lsp/__init__.py +1 -1
- package/src/zexus/lsp/definition_provider.py +163 -9
- package/src/zexus/lsp/server.py +22 -8
- package/src/zexus/lsp/symbol_provider.py +182 -9
- package/src/zexus/module_cache.py +237 -9
- package/src/zexus/object.py +64 -6
- package/src/zexus/parser/__pycache__/parser.cpython-312.pyc +0 -0
- package/src/zexus/parser/__pycache__/strategy_context.cpython-312.pyc +0 -0
- package/src/zexus/parser/__pycache__/strategy_structural.cpython-312.pyc +0 -0
- package/src/zexus/parser/parser.py +786 -285
- package/src/zexus/parser/strategy_context.py +407 -66
- package/src/zexus/parser/strategy_structural.py +117 -19
- package/src/zexus/persistence.py +15 -1
- package/src/zexus/renderer/__init__.py +15 -0
- package/src/zexus/renderer/__pycache__/__init__.cpython-312.pyc +0 -0
- package/src/zexus/renderer/__pycache__/backend.cpython-312.pyc +0 -0
- package/src/zexus/renderer/__pycache__/canvas.cpython-312.pyc +0 -0
- package/src/zexus/renderer/__pycache__/color_system.cpython-312.pyc +0 -0
- package/src/zexus/renderer/__pycache__/layout.cpython-312.pyc +0 -0
- package/src/zexus/renderer/__pycache__/main_renderer.cpython-312.pyc +0 -0
- package/src/zexus/renderer/__pycache__/painter.cpython-312.pyc +0 -0
- package/src/zexus/renderer/tk_backend.py +208 -0
- package/src/zexus/renderer/web_backend.py +260 -0
- package/src/zexus/runtime/__pycache__/__init__.cpython-312.pyc +0 -0
- package/src/zexus/runtime/__pycache__/async_runtime.cpython-312.pyc +0 -0
- package/src/zexus/runtime/__pycache__/load_manager.cpython-312.pyc +0 -0
- package/src/zexus/runtime/file_flags.py +137 -0
- package/src/zexus/safety/__pycache__/__init__.cpython-312.pyc +0 -0
- package/src/zexus/safety/__pycache__/memory_safety.cpython-312.pyc +0 -0
- package/src/zexus/security.py +424 -34
- package/src/zexus/stdlib/fs.py +23 -18
- package/src/zexus/stdlib/http.py +289 -186
- package/src/zexus/stdlib/sockets.py +207 -163
- package/src/zexus/stdlib/websockets.py +282 -0
- package/src/zexus/stdlib_integration.py +369 -2
- package/src/zexus/strategy_recovery.py +6 -3
- package/src/zexus/type_checker.py +423 -0
- package/src/zexus/virtual_filesystem.py +189 -2
- package/src/zexus/vm/__init__.py +113 -3
- package/src/zexus/vm/__pycache__/async_optimizer.cpython-312.pyc +0 -0
- package/src/zexus/vm/__pycache__/bytecode.cpython-312.pyc +0 -0
- package/src/zexus/vm/__pycache__/bytecode_converter.cpython-312.pyc +0 -0
- package/src/zexus/vm/__pycache__/cache.cpython-312.pyc +0 -0
- package/src/zexus/vm/__pycache__/compiler.cpython-312.pyc +0 -0
- package/src/zexus/vm/__pycache__/gas_metering.cpython-312.pyc +0 -0
- package/src/zexus/vm/__pycache__/jit.cpython-312.pyc +0 -0
- package/src/zexus/vm/__pycache__/parallel_vm.cpython-312.pyc +0 -0
- package/src/zexus/vm/__pycache__/vm.cpython-312.pyc +0 -0
- package/src/zexus/vm/async_optimizer.py +14 -1
- package/src/zexus/vm/binary_bytecode.py +659 -0
- package/src/zexus/vm/bytecode.py +28 -1
- package/src/zexus/vm/bytecode_converter.py +26 -12
- package/src/zexus/vm/cabi.c +1985 -0
- package/src/zexus/vm/cabi.cpython-312-x86_64-linux-gnu.so +0 -0
- package/src/zexus/vm/cabi.h +127 -0
- package/src/zexus/vm/cache.py +557 -17
- package/src/zexus/vm/compiler.py +703 -5
- package/src/zexus/vm/fastops.c +15743 -0
- package/src/zexus/vm/fastops.cpython-312-x86_64-linux-gnu.so +0 -0
- package/src/zexus/vm/fastops.pyx +288 -0
- package/src/zexus/vm/gas_metering.py +50 -9
- package/src/zexus/vm/jit.py +83 -2
- package/src/zexus/vm/native_jit_backend.py +1816 -0
- package/src/zexus/vm/native_runtime.cpp +1388 -0
- package/src/zexus/vm/native_runtime.cpython-312-x86_64-linux-gnu.so +0 -0
- package/src/zexus/vm/optimizer.py +161 -11
- package/src/zexus/vm/parallel_vm.py +118 -42
- package/src/zexus/vm/peephole_optimizer.py +82 -4
- package/src/zexus/vm/profiler.py +38 -18
- package/src/zexus/vm/register_allocator.py +16 -5
- package/src/zexus/vm/register_vm.py +8 -5
- package/src/zexus/vm/vm.py +3411 -573
- package/src/zexus/vm/wasm_compiler.py +658 -0
- package/src/zexus/zexus_ast.py +63 -11
- package/src/zexus/zexus_token.py +13 -5
- package/src/zexus/zpm/installer.py +55 -15
- package/src/zexus/zpm/package_manager.py +1 -1
- package/src/zexus/zpm/registry.py +257 -28
- package/src/zexus.egg-info/PKG-INFO +7 -4
- package/src/zexus.egg-info/SOURCES.txt +116 -9
- package/src/zexus.egg-info/entry_points.txt +1 -0
- package/src/zexus.egg-info/requires.txt +4 -0
|
@@ -10,6 +10,7 @@ Provides built-in functions for:
|
|
|
10
10
|
import hashlib
|
|
11
11
|
import hmac
|
|
12
12
|
import secrets
|
|
13
|
+
import os
|
|
13
14
|
from typing import Any, Optional
|
|
14
15
|
|
|
15
16
|
# Try to import cryptography library (optional for basic hashing)
|
|
@@ -24,6 +25,13 @@ except ImportError:
|
|
|
24
25
|
print("Warning: cryptography library not installed. Signature features will be limited.")
|
|
25
26
|
print("Install with: pip install cryptography")
|
|
26
27
|
|
|
28
|
+
# Real Keccak-256 from pycryptodome (different from SHA3-256!)
|
|
29
|
+
try:
|
|
30
|
+
from Crypto.Hash import keccak as _keccak_mod
|
|
31
|
+
_KECCAK_AVAILABLE = True
|
|
32
|
+
except ImportError:
|
|
33
|
+
_KECCAK_AVAILABLE = False
|
|
34
|
+
|
|
27
35
|
|
|
28
36
|
class CryptoPlugin:
|
|
29
37
|
"""
|
|
@@ -38,8 +46,23 @@ class CryptoPlugin:
|
|
|
38
46
|
'SHA3-512': hashlib.sha3_512,
|
|
39
47
|
'BLAKE2B': hashlib.blake2b,
|
|
40
48
|
'BLAKE2S': hashlib.blake2s,
|
|
41
|
-
|
|
49
|
+
# KECCAK256 is handled specially in hash_data() — NOT sha3_256
|
|
42
50
|
}
|
|
51
|
+
|
|
52
|
+
# Configurable blockchain address prefix (default Ethereum style)
|
|
53
|
+
ADDRESS_PREFIX = os.environ.get("ZEXUS_ADDRESS_PREFIX", "0x")
|
|
54
|
+
|
|
55
|
+
@classmethod
|
|
56
|
+
def set_address_prefix(cls, prefix: str) -> None:
|
|
57
|
+
"""Set the default prefix used by derive_address()."""
|
|
58
|
+
if not isinstance(prefix, str) or not prefix:
|
|
59
|
+
raise ValueError("Address prefix must be a non-empty string")
|
|
60
|
+
cls.ADDRESS_PREFIX = prefix
|
|
61
|
+
|
|
62
|
+
@classmethod
|
|
63
|
+
def get_address_prefix(cls) -> str:
|
|
64
|
+
"""Get the current default address prefix."""
|
|
65
|
+
return cls.ADDRESS_PREFIX
|
|
43
66
|
|
|
44
67
|
@staticmethod
|
|
45
68
|
def hash_data(data: Any, algorithm: str = 'SHA256') -> str:
|
|
@@ -54,6 +77,26 @@ class CryptoPlugin:
|
|
|
54
77
|
Hex-encoded hash
|
|
55
78
|
"""
|
|
56
79
|
algorithm = algorithm.upper()
|
|
80
|
+
|
|
81
|
+
# Special case: real Keccak-256 (NOT SHA3-256 — different padding)
|
|
82
|
+
if algorithm == 'KECCAK256':
|
|
83
|
+
if not _KECCAK_AVAILABLE:
|
|
84
|
+
raise RuntimeError(
|
|
85
|
+
"Keccak-256 requires the 'pycryptodome' package. "
|
|
86
|
+
"SHA3-256 uses different padding and is NOT compatible. "
|
|
87
|
+
"Install with: pip install pycryptodome"
|
|
88
|
+
)
|
|
89
|
+
# Convert data to bytes
|
|
90
|
+
if isinstance(data, bytes):
|
|
91
|
+
data_bytes = data
|
|
92
|
+
elif isinstance(data, str):
|
|
93
|
+
data_bytes = data.encode('utf-8')
|
|
94
|
+
else:
|
|
95
|
+
data_bytes = str(data).encode('utf-8')
|
|
96
|
+
k = _keccak_mod.new(digest_bits=256)
|
|
97
|
+
k.update(data_bytes)
|
|
98
|
+
return k.hexdigest()
|
|
99
|
+
|
|
57
100
|
if algorithm not in CryptoPlugin.HASH_ALGORITHMS:
|
|
58
101
|
raise ValueError(f"Unsupported hash algorithm: {algorithm}. "
|
|
59
102
|
f"Supported: {', '.join(CryptoPlugin.HASH_ALGORITHMS.keys())}")
|
|
@@ -285,7 +328,10 @@ class CryptoPlugin:
|
|
|
285
328
|
@staticmethod
|
|
286
329
|
def keccak256(data: Any) -> str:
|
|
287
330
|
"""
|
|
288
|
-
Ethereum-
|
|
331
|
+
Ethereum-compatible Keccak-256 hash.
|
|
332
|
+
|
|
333
|
+
NOTE: This uses real Keccak-256 (pre-NIST padding), NOT SHA3-256.
|
|
334
|
+
Requires pycryptodome.
|
|
289
335
|
|
|
290
336
|
Args:
|
|
291
337
|
data: Data to hash
|
|
@@ -309,19 +355,24 @@ class CryptoPlugin:
|
|
|
309
355
|
"""
|
|
310
356
|
return secrets.token_hex(length)
|
|
311
357
|
|
|
312
|
-
@
|
|
313
|
-
def derive_address(public_key_pem: str) -> str:
|
|
358
|
+
@classmethod
|
|
359
|
+
def derive_address(cls, public_key_pem: str, prefix: Optional[str] = None) -> str:
|
|
314
360
|
"""
|
|
315
|
-
Derive
|
|
361
|
+
Derive a blockchain address from a public key
|
|
316
362
|
|
|
317
363
|
Args:
|
|
318
364
|
public_key_pem: Public key in PEM format
|
|
365
|
+
prefix: Optional address prefix override (e.g. "0x", "Zx01")
|
|
319
366
|
|
|
320
367
|
Returns:
|
|
321
|
-
Address (
|
|
368
|
+
Address (prefix + 40 hex chars)
|
|
322
369
|
"""
|
|
323
370
|
if not CRYPTO_AVAILABLE:
|
|
324
371
|
raise RuntimeError("cryptography library not installed. Install with: pip install cryptography")
|
|
372
|
+
|
|
373
|
+
effective_prefix = cls.ADDRESS_PREFIX if prefix is None else prefix
|
|
374
|
+
if not isinstance(effective_prefix, str) or not effective_prefix:
|
|
375
|
+
raise ValueError("Address prefix must be a non-empty string")
|
|
325
376
|
|
|
326
377
|
# Load public key
|
|
327
378
|
public_key = serialization.load_pem_public_key(
|
|
@@ -335,12 +386,20 @@ class CryptoPlugin:
|
|
|
335
386
|
format=serialization.PublicFormat.UncompressedPoint
|
|
336
387
|
)
|
|
337
388
|
|
|
338
|
-
#
|
|
339
|
-
|
|
389
|
+
# Real Keccak-256 hash (Ethereum-compatible)
|
|
390
|
+
if _KECCAK_AVAILABLE:
|
|
391
|
+
k = _keccak_mod.new(digest_bits=256)
|
|
392
|
+
k.update(public_bytes[1:]) # Skip 0x04 prefix
|
|
393
|
+
hash_result = k.digest()
|
|
394
|
+
else:
|
|
395
|
+
raise RuntimeError(
|
|
396
|
+
"Ethereum-compatible address derivation requires Keccak-256 "
|
|
397
|
+
"from the 'pycryptodome' package. Install with: pip install pycryptodome"
|
|
398
|
+
)
|
|
340
399
|
|
|
341
400
|
# Take last 20 bytes as address
|
|
342
401
|
address = hash_result[-20:].hex()
|
|
343
|
-
return
|
|
402
|
+
return effective_prefix + address
|
|
344
403
|
|
|
345
404
|
|
|
346
405
|
def register_crypto_builtins(env):
|
|
@@ -356,7 +415,10 @@ def register_crypto_builtins(env):
|
|
|
356
415
|
- random_bytes(length?) -> string
|
|
357
416
|
- derive_address(public_key) -> string
|
|
358
417
|
"""
|
|
359
|
-
|
|
418
|
+
try:
|
|
419
|
+
from zexus.object import Function, String, Boolean, Hash, Integer, Error
|
|
420
|
+
except ImportError:
|
|
421
|
+
from src.zexus.object import Function, String, Boolean, Hash, Integer, Error
|
|
360
422
|
|
|
361
423
|
# hash(data, algorithm)
|
|
362
424
|
def builtin_hash(args):
|
|
@@ -439,15 +501,18 @@ def register_crypto_builtins(env):
|
|
|
439
501
|
except Exception as e:
|
|
440
502
|
return Error(f"Random bytes error: {str(e)}")
|
|
441
503
|
|
|
442
|
-
# derive_address(public_key)
|
|
504
|
+
# derive_address(public_key, [prefix])
|
|
443
505
|
def builtin_derive_address(args):
|
|
444
|
-
if len(args)
|
|
445
|
-
return Error("derive_address expects 1
|
|
506
|
+
if len(args) < 1 or len(args) > 2:
|
|
507
|
+
return Error("derive_address expects 1 or 2 arguments: public_key, [prefix]")
|
|
446
508
|
|
|
447
509
|
public_key = args[0].value if hasattr(args[0], 'value') else str(args[0])
|
|
510
|
+
prefix = None
|
|
511
|
+
if len(args) > 1:
|
|
512
|
+
prefix = args[1].value if hasattr(args[1], 'value') else str(args[1])
|
|
448
513
|
|
|
449
514
|
try:
|
|
450
|
-
result = CryptoPlugin.derive_address(public_key)
|
|
515
|
+
result = CryptoPlugin.derive_address(public_key, prefix=prefix)
|
|
451
516
|
return String(result)
|
|
452
517
|
except Exception as e:
|
|
453
518
|
return Error(f"Address derivation error: {str(e)}")
|
|
@@ -0,0 +1,526 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Event Indexing & Log Filtering for the Zexus Blockchain.
|
|
3
|
+
|
|
4
|
+
Provides:
|
|
5
|
+
- **BloomFilter**: Space-efficient probabilistic set for fast log matching.
|
|
6
|
+
- **EventLog**: Structured event model with indexed topics.
|
|
7
|
+
- **EventIndex**: Persistent event store (SQLite-backed) with multi-key
|
|
8
|
+
lookup by block range, address, topic, and event name.
|
|
9
|
+
- **LogFilter**: Composable filter object matching Ethereum-style
|
|
10
|
+
``getLogs`` semantics (fromBlock, toBlock, address, topics).
|
|
11
|
+
|
|
12
|
+
Usage (from RPCServer or BlockchainNode):
|
|
13
|
+
|
|
14
|
+
>>> idx = EventIndex(data_dir="/tmp/zexus")
|
|
15
|
+
>>> idx.index_block(block) # called after each block is added
|
|
16
|
+
>>> logs = idx.get_logs(LogFilter(from_block=0, to_block=10,
|
|
17
|
+
... address="0xabc..."))
|
|
18
|
+
|
|
19
|
+
Bloom filters are attached to each block header (``logs_bloom``) so
|
|
20
|
+
nodes can skip blocks that certainly do not contain matching logs.
|
|
21
|
+
"""
|
|
22
|
+
|
|
23
|
+
from __future__ import annotations
|
|
24
|
+
|
|
25
|
+
import hashlib
|
|
26
|
+
import json
|
|
27
|
+
import math
|
|
28
|
+
import sqlite3
|
|
29
|
+
import time
|
|
30
|
+
from dataclasses import dataclass, field, asdict
|
|
31
|
+
from typing import Any, Dict, List, Optional, Set, Tuple
|
|
32
|
+
|
|
33
|
+
import logging
|
|
34
|
+
|
|
35
|
+
logger = logging.getLogger(__name__)
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
# ══════════════════════════════════════════════════════════════════════
|
|
39
|
+
# Bloom Filter — 2048-bit (256-byte) per Ethereum Yellow Paper §4.3.1
|
|
40
|
+
# ══════════════════════════════════════════════════════════════════════
|
|
41
|
+
|
|
42
|
+
class BloomFilter:
|
|
43
|
+
"""A 2048-bit (256-byte) Bloom filter using 3 hash functions.
|
|
44
|
+
|
|
45
|
+
Compatible with Ethereum's *logsBloom* layout so tooling can
|
|
46
|
+
interoperate. Each item is hashed with Keccak-256 (or SHA-256
|
|
47
|
+
fallback) and 3 independent bit positions are set.
|
|
48
|
+
"""
|
|
49
|
+
|
|
50
|
+
SIZE_BITS = 2048
|
|
51
|
+
SIZE_BYTES = SIZE_BITS // 8 # 256
|
|
52
|
+
NUM_HASHES = 3
|
|
53
|
+
|
|
54
|
+
def __init__(self, data: Optional[bytes] = None):
|
|
55
|
+
if data is not None:
|
|
56
|
+
if len(data) != self.SIZE_BYTES:
|
|
57
|
+
raise ValueError(f"Bloom data must be {self.SIZE_BYTES} bytes")
|
|
58
|
+
self._bits = bytearray(data)
|
|
59
|
+
else:
|
|
60
|
+
self._bits = bytearray(self.SIZE_BYTES)
|
|
61
|
+
|
|
62
|
+
# ── Core ops ──────────────────────────────────────────────────
|
|
63
|
+
|
|
64
|
+
def add(self, item: str) -> None:
|
|
65
|
+
"""Add an item (hex string or plain text) to the bloom."""
|
|
66
|
+
for pos in self._bit_positions(item):
|
|
67
|
+
byte_idx = pos // 8
|
|
68
|
+
bit_idx = pos % 8
|
|
69
|
+
self._bits[byte_idx] |= (1 << bit_idx)
|
|
70
|
+
|
|
71
|
+
def contains(self, item: str) -> bool:
|
|
72
|
+
"""Probabilistic membership test (no false negatives)."""
|
|
73
|
+
for pos in self._bit_positions(item):
|
|
74
|
+
byte_idx = pos // 8
|
|
75
|
+
bit_idx = pos % 8
|
|
76
|
+
if not (self._bits[byte_idx] & (1 << bit_idx)):
|
|
77
|
+
return False
|
|
78
|
+
return True
|
|
79
|
+
|
|
80
|
+
def merge(self, other: "BloomFilter") -> None:
|
|
81
|
+
"""OR another bloom into this one (union)."""
|
|
82
|
+
for i in range(self.SIZE_BYTES):
|
|
83
|
+
self._bits[i] |= other._bits[i]
|
|
84
|
+
|
|
85
|
+
# ── Serialization ─────────────────────────────────────────────
|
|
86
|
+
|
|
87
|
+
def to_hex(self) -> str:
|
|
88
|
+
return "0x" + self._bits.hex()
|
|
89
|
+
|
|
90
|
+
@classmethod
|
|
91
|
+
def from_hex(cls, hex_str: str) -> "BloomFilter":
|
|
92
|
+
raw = hex_str.removeprefix("0x")
|
|
93
|
+
return cls(bytes.fromhex(raw))
|
|
94
|
+
|
|
95
|
+
def to_bytes(self) -> bytes:
|
|
96
|
+
return bytes(self._bits)
|
|
97
|
+
|
|
98
|
+
@property
|
|
99
|
+
def is_empty(self) -> bool:
|
|
100
|
+
return all(b == 0 for b in self._bits)
|
|
101
|
+
|
|
102
|
+
# ── Internal ──────────────────────────────────────────────────
|
|
103
|
+
|
|
104
|
+
def _bit_positions(self, item: str) -> List[int]:
|
|
105
|
+
h = hashlib.sha256(item.encode("utf-8")).digest()
|
|
106
|
+
positions = []
|
|
107
|
+
for i in range(self.NUM_HASHES):
|
|
108
|
+
# Take 2 bytes from the hash for each function
|
|
109
|
+
val = int.from_bytes(h[2 * i: 2 * i + 2], "big")
|
|
110
|
+
positions.append(val % self.SIZE_BITS)
|
|
111
|
+
return positions
|
|
112
|
+
|
|
113
|
+
def __or__(self, other: "BloomFilter") -> "BloomFilter":
|
|
114
|
+
result = BloomFilter(bytes(self._bits))
|
|
115
|
+
result.merge(other)
|
|
116
|
+
return result
|
|
117
|
+
|
|
118
|
+
def __repr__(self) -> str:
|
|
119
|
+
ones = sum(bin(b).count("1") for b in self._bits)
|
|
120
|
+
return f"<BloomFilter bits_set={ones}/{self.SIZE_BITS}>"
|
|
121
|
+
|
|
122
|
+
|
|
123
|
+
# ══════════════════════════════════════════════════════════════════════
|
|
124
|
+
# EventLog — structured event model
|
|
125
|
+
# ══════════════════════════════════════════════════════════════════════
|
|
126
|
+
|
|
127
|
+
@dataclass
|
|
128
|
+
class EventLog:
|
|
129
|
+
"""A single indexed event log entry.
|
|
130
|
+
|
|
131
|
+
Fields match Ethereum's log structure for maximum interoperability:
|
|
132
|
+
- ``address``: Contract that emitted the event.
|
|
133
|
+
- ``topics``: list of topic strings (topic[0] = event signature).
|
|
134
|
+
- ``data``: ABI-encoded (or JSON) event data payload.
|
|
135
|
+
- ``block_number``, ``block_hash``, ``tx_hash``, ``tx_index``,
|
|
136
|
+
``log_index``: Location within the chain.
|
|
137
|
+
"""
|
|
138
|
+
|
|
139
|
+
address: str = ""
|
|
140
|
+
topics: List[str] = field(default_factory=list)
|
|
141
|
+
data: str = ""
|
|
142
|
+
block_number: int = 0
|
|
143
|
+
block_hash: str = ""
|
|
144
|
+
tx_hash: str = ""
|
|
145
|
+
tx_index: int = 0
|
|
146
|
+
log_index: int = 0
|
|
147
|
+
timestamp: float = 0.0
|
|
148
|
+
removed: bool = False # True if log was reverted during a reorg
|
|
149
|
+
|
|
150
|
+
@property
|
|
151
|
+
def event_name(self) -> str:
|
|
152
|
+
"""Convenience: the human-readable event name from topic[0]."""
|
|
153
|
+
return self.topics[0] if self.topics else ""
|
|
154
|
+
|
|
155
|
+
def topic_hash(self) -> str:
|
|
156
|
+
"""Keccak-256/SHA-256 hash of the event signature (topic[0])."""
|
|
157
|
+
if not self.topics:
|
|
158
|
+
return ""
|
|
159
|
+
return hashlib.sha256(self.topics[0].encode()).hexdigest()
|
|
160
|
+
|
|
161
|
+
def to_dict(self) -> Dict[str, Any]:
|
|
162
|
+
return asdict(self)
|
|
163
|
+
|
|
164
|
+
@classmethod
|
|
165
|
+
def from_dict(cls, d: Dict[str, Any]) -> "EventLog":
|
|
166
|
+
return cls(**{k: v for k, v in d.items() if k in cls.__dataclass_fields__})
|
|
167
|
+
|
|
168
|
+
|
|
169
|
+
# ══════════════════════════════════════════════════════════════════════
|
|
170
|
+
# LogFilter — composable log query
|
|
171
|
+
# ══════════════════════════════════════════════════════════════════════
|
|
172
|
+
|
|
173
|
+
@dataclass
|
|
174
|
+
class LogFilter:
|
|
175
|
+
"""Query filter for retrieving event logs.
|
|
176
|
+
|
|
177
|
+
Semantics follow ``eth_getLogs``:
|
|
178
|
+
- ``from_block`` / ``to_block``: inclusive block range.
|
|
179
|
+
- ``address``: single address *or* list of addresses.
|
|
180
|
+
- ``topics``: list-of-lists; each position can be a single
|
|
181
|
+
topic or a list of alternatives (OR within position, AND
|
|
182
|
+
across positions).
|
|
183
|
+
- ``event_name``: shortcut filter on the human-readable name.
|
|
184
|
+
"""
|
|
185
|
+
|
|
186
|
+
from_block: int = 0
|
|
187
|
+
to_block: Optional[int] = None # None → latest
|
|
188
|
+
address: Optional[Any] = None # str or List[str]
|
|
189
|
+
topics: Optional[List[Optional[Any]]] = None # [[t1,t2], None, [t3]]
|
|
190
|
+
event_name: Optional[str] = None
|
|
191
|
+
limit: int = 10_000
|
|
192
|
+
|
|
193
|
+
def address_set(self) -> Optional[Set[str]]:
|
|
194
|
+
if self.address is None:
|
|
195
|
+
return None
|
|
196
|
+
if isinstance(self.address, str):
|
|
197
|
+
return {self.address}
|
|
198
|
+
return set(self.address)
|
|
199
|
+
|
|
200
|
+
def matches(self, log: EventLog) -> bool:
|
|
201
|
+
"""Check if a log entry satisfies this filter."""
|
|
202
|
+
# Block range
|
|
203
|
+
if log.block_number < self.from_block:
|
|
204
|
+
return False
|
|
205
|
+
if self.to_block is not None and log.block_number > self.to_block:
|
|
206
|
+
return False
|
|
207
|
+
|
|
208
|
+
# Address
|
|
209
|
+
addr_set = self.address_set()
|
|
210
|
+
if addr_set is not None and log.address not in addr_set:
|
|
211
|
+
return False
|
|
212
|
+
|
|
213
|
+
# Event name shortcut
|
|
214
|
+
if self.event_name and log.event_name != self.event_name:
|
|
215
|
+
return False
|
|
216
|
+
|
|
217
|
+
# Topics (position-based matching)
|
|
218
|
+
if self.topics:
|
|
219
|
+
for i, topic_filter in enumerate(self.topics):
|
|
220
|
+
if topic_filter is None:
|
|
221
|
+
continue # wildcard
|
|
222
|
+
if i >= len(log.topics):
|
|
223
|
+
return False
|
|
224
|
+
if isinstance(topic_filter, list):
|
|
225
|
+
if log.topics[i] not in topic_filter:
|
|
226
|
+
return False
|
|
227
|
+
else:
|
|
228
|
+
if log.topics[i] != topic_filter:
|
|
229
|
+
return False
|
|
230
|
+
|
|
231
|
+
return True
|
|
232
|
+
|
|
233
|
+
|
|
234
|
+
# ══════════════════════════════════════════════════════════════════════
|
|
235
|
+
# EventIndex — persistent event store (SQLite)
|
|
236
|
+
# ══════════════════════════════════════════════════════════════════════
|
|
237
|
+
|
|
238
|
+
class EventIndex:
|
|
239
|
+
"""Persistent, indexed event/log store backed by SQLite.
|
|
240
|
+
|
|
241
|
+
Every time a block is finalized, call ``index_block(block)`` to
|
|
242
|
+
extract and persist all receipt logs. Queries via ``get_logs``
|
|
243
|
+
hit indexed columns and optionally check the per-block bloom
|
|
244
|
+
filter *before* scanning individual entries.
|
|
245
|
+
"""
|
|
246
|
+
|
|
247
|
+
def __init__(self, data_dir: Optional[str] = None):
|
|
248
|
+
self._db: Optional[sqlite3.Connection] = None
|
|
249
|
+
self._blooms: Dict[int, BloomFilter] = {} # block_height -> bloom
|
|
250
|
+
if data_dir:
|
|
251
|
+
import os
|
|
252
|
+
os.makedirs(data_dir, exist_ok=True)
|
|
253
|
+
self._init_db(os.path.join(data_dir, "events.db"))
|
|
254
|
+
|
|
255
|
+
def _init_db(self, db_path: str):
|
|
256
|
+
self._db = sqlite3.connect(db_path)
|
|
257
|
+
self._db.execute("""
|
|
258
|
+
CREATE TABLE IF NOT EXISTS event_logs (
|
|
259
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
260
|
+
block_number INTEGER NOT NULL,
|
|
261
|
+
block_hash TEXT NOT NULL,
|
|
262
|
+
tx_hash TEXT NOT NULL,
|
|
263
|
+
tx_index INTEGER NOT NULL,
|
|
264
|
+
log_index INTEGER NOT NULL,
|
|
265
|
+
address TEXT NOT NULL,
|
|
266
|
+
topic0 TEXT,
|
|
267
|
+
topic1 TEXT,
|
|
268
|
+
topic2 TEXT,
|
|
269
|
+
topic3 TEXT,
|
|
270
|
+
data TEXT,
|
|
271
|
+
timestamp REAL,
|
|
272
|
+
removed INTEGER DEFAULT 0
|
|
273
|
+
)
|
|
274
|
+
""")
|
|
275
|
+
self._db.execute("""
|
|
276
|
+
CREATE TABLE IF NOT EXISTS block_blooms (
|
|
277
|
+
block_number INTEGER PRIMARY KEY,
|
|
278
|
+
bloom_hex TEXT NOT NULL
|
|
279
|
+
)
|
|
280
|
+
""")
|
|
281
|
+
# Indices for fast lookups
|
|
282
|
+
self._db.execute("CREATE INDEX IF NOT EXISTS idx_logs_block ON event_logs(block_number)")
|
|
283
|
+
self._db.execute("CREATE INDEX IF NOT EXISTS idx_logs_address ON event_logs(address)")
|
|
284
|
+
self._db.execute("CREATE INDEX IF NOT EXISTS idx_logs_topic0 ON event_logs(topic0)")
|
|
285
|
+
self._db.execute("CREATE INDEX IF NOT EXISTS idx_logs_tx ON event_logs(tx_hash)")
|
|
286
|
+
self._db.commit()
|
|
287
|
+
|
|
288
|
+
# ── Indexing ──────────────────────────────────────────────────
|
|
289
|
+
|
|
290
|
+
def index_block(self, block) -> int:
|
|
291
|
+
"""Extract logs from a block's receipts and persist them.
|
|
292
|
+
|
|
293
|
+
Returns the number of new log entries indexed.
|
|
294
|
+
"""
|
|
295
|
+
bloom = BloomFilter()
|
|
296
|
+
count = 0
|
|
297
|
+
log_index = 0
|
|
298
|
+
|
|
299
|
+
for tx_idx, receipt in enumerate(block.receipts):
|
|
300
|
+
for raw_log in receipt.logs:
|
|
301
|
+
log = self._normalize_log(
|
|
302
|
+
raw_log, block, receipt, tx_idx, log_index
|
|
303
|
+
)
|
|
304
|
+
bloom.add(log.address)
|
|
305
|
+
for topic in log.topics:
|
|
306
|
+
bloom.add(topic)
|
|
307
|
+
|
|
308
|
+
self._persist_log(log)
|
|
309
|
+
count += 1
|
|
310
|
+
log_index += 1
|
|
311
|
+
|
|
312
|
+
self._blooms[block.header.height] = bloom
|
|
313
|
+
if self._db:
|
|
314
|
+
self._db.execute(
|
|
315
|
+
"INSERT OR REPLACE INTO block_blooms (block_number, bloom_hex) VALUES (?, ?)",
|
|
316
|
+
(block.header.height, bloom.to_hex()),
|
|
317
|
+
)
|
|
318
|
+
self._db.commit()
|
|
319
|
+
|
|
320
|
+
return count
|
|
321
|
+
|
|
322
|
+
def index_receipt_logs(self, receipt, block_number: int,
|
|
323
|
+
block_hash: str, tx_index: int) -> int:
|
|
324
|
+
"""Index logs from a single receipt (for incremental indexing)."""
|
|
325
|
+
count = 0
|
|
326
|
+
for log_idx, raw_log in enumerate(receipt.logs):
|
|
327
|
+
log = EventLog(
|
|
328
|
+
address=raw_log.get("contract", raw_log.get("address", "")),
|
|
329
|
+
topics=[raw_log.get("event", "")] + raw_log.get("topics", []),
|
|
330
|
+
data=json.dumps(raw_log.get("data", ""), default=str),
|
|
331
|
+
block_number=block_number,
|
|
332
|
+
block_hash=block_hash,
|
|
333
|
+
tx_hash=receipt.tx_hash,
|
|
334
|
+
tx_index=tx_index,
|
|
335
|
+
log_index=log_idx,
|
|
336
|
+
timestamp=raw_log.get("timestamp", 0.0),
|
|
337
|
+
)
|
|
338
|
+
self._persist_log(log)
|
|
339
|
+
count += 1
|
|
340
|
+
return count
|
|
341
|
+
|
|
342
|
+
# ── Querying ──────────────────────────────────────────────────
|
|
343
|
+
|
|
344
|
+
def get_logs(self, filt: LogFilter) -> List[EventLog]:
|
|
345
|
+
"""Query logs matching the given filter.
|
|
346
|
+
|
|
347
|
+
Uses bloom filters for block-level pre-filtering when available,
|
|
348
|
+
then applies full filter matching.
|
|
349
|
+
"""
|
|
350
|
+
# Fast path: SQL query if DB available
|
|
351
|
+
if self._db:
|
|
352
|
+
return self._query_db(filt)
|
|
353
|
+
|
|
354
|
+
# In-memory fallback (for tests without data_dir)
|
|
355
|
+
return []
|
|
356
|
+
|
|
357
|
+
def get_logs_for_tx(self, tx_hash: str) -> List[EventLog]:
|
|
358
|
+
"""Get all logs emitted by a specific transaction."""
|
|
359
|
+
if self._db:
|
|
360
|
+
rows = self._db.execute(
|
|
361
|
+
"SELECT * FROM event_logs WHERE tx_hash = ? ORDER BY log_index",
|
|
362
|
+
(tx_hash,)
|
|
363
|
+
).fetchall()
|
|
364
|
+
return [self._row_to_log(r) for r in rows]
|
|
365
|
+
return []
|
|
366
|
+
|
|
367
|
+
def get_logs_for_block(self, block_number: int) -> List[EventLog]:
|
|
368
|
+
"""Get all logs in a specific block."""
|
|
369
|
+
if self._db:
|
|
370
|
+
rows = self._db.execute(
|
|
371
|
+
"SELECT * FROM event_logs WHERE block_number = ? ORDER BY log_index",
|
|
372
|
+
(block_number,)
|
|
373
|
+
).fetchall()
|
|
374
|
+
return [self._row_to_log(r) for r in rows]
|
|
375
|
+
return []
|
|
376
|
+
|
|
377
|
+
def get_bloom(self, block_number: int) -> Optional[BloomFilter]:
|
|
378
|
+
"""Get the bloom filter for a specific block."""
|
|
379
|
+
if block_number in self._blooms:
|
|
380
|
+
return self._blooms[block_number]
|
|
381
|
+
if self._db:
|
|
382
|
+
row = self._db.execute(
|
|
383
|
+
"SELECT bloom_hex FROM block_blooms WHERE block_number = ?",
|
|
384
|
+
(block_number,)
|
|
385
|
+
).fetchone()
|
|
386
|
+
if row:
|
|
387
|
+
bloom = BloomFilter.from_hex(row[0])
|
|
388
|
+
self._blooms[block_number] = bloom
|
|
389
|
+
return bloom
|
|
390
|
+
return None
|
|
391
|
+
|
|
392
|
+
def count_logs(self, filt: Optional[LogFilter] = None) -> int:
|
|
393
|
+
"""Count total logs, optionally filtered."""
|
|
394
|
+
if self._db:
|
|
395
|
+
if filt:
|
|
396
|
+
where, params = self._build_where(filt)
|
|
397
|
+
row = self._db.execute(
|
|
398
|
+
f"SELECT COUNT(*) FROM event_logs {where}", params
|
|
399
|
+
).fetchone()
|
|
400
|
+
return row[0]
|
|
401
|
+
row = self._db.execute("SELECT COUNT(*) FROM event_logs").fetchone()
|
|
402
|
+
return row[0]
|
|
403
|
+
return 0
|
|
404
|
+
|
|
405
|
+
# ── Reorg handling ────────────────────────────────────────────
|
|
406
|
+
|
|
407
|
+
def mark_removed(self, block_number: int) -> int:
|
|
408
|
+
"""Mark all logs at or above a block height as removed (reorg)."""
|
|
409
|
+
if self._db:
|
|
410
|
+
cursor = self._db.execute(
|
|
411
|
+
"UPDATE event_logs SET removed = 1 WHERE block_number >= ?",
|
|
412
|
+
(block_number,)
|
|
413
|
+
)
|
|
414
|
+
self._db.commit()
|
|
415
|
+
return cursor.rowcount
|
|
416
|
+
return 0
|
|
417
|
+
|
|
418
|
+
def prune_removed(self) -> int:
|
|
419
|
+
"""Permanently delete logs marked as removed."""
|
|
420
|
+
if self._db:
|
|
421
|
+
cursor = self._db.execute("DELETE FROM event_logs WHERE removed = 1")
|
|
422
|
+
self._db.commit()
|
|
423
|
+
return cursor.rowcount
|
|
424
|
+
return 0
|
|
425
|
+
|
|
426
|
+
# ── Internal helpers ──────────────────────────────────────────
|
|
427
|
+
|
|
428
|
+
def _normalize_log(self, raw_log: Dict, block, receipt, tx_idx: int,
|
|
429
|
+
log_idx: int) -> EventLog:
|
|
430
|
+
"""Convert a raw receipt log dict into a structured EventLog."""
|
|
431
|
+
topics = []
|
|
432
|
+
if "event" in raw_log:
|
|
433
|
+
topics.append(raw_log["event"])
|
|
434
|
+
if "topics" in raw_log:
|
|
435
|
+
topics.extend(raw_log["topics"])
|
|
436
|
+
if not topics and "name" in raw_log:
|
|
437
|
+
topics.append(raw_log["name"])
|
|
438
|
+
|
|
439
|
+
return EventLog(
|
|
440
|
+
address=raw_log.get("contract", raw_log.get("address", "")),
|
|
441
|
+
topics=topics,
|
|
442
|
+
data=json.dumps(raw_log.get("data", ""), default=str),
|
|
443
|
+
block_number=block.header.height,
|
|
444
|
+
block_hash=block.hash,
|
|
445
|
+
tx_hash=receipt.tx_hash,
|
|
446
|
+
tx_index=tx_idx,
|
|
447
|
+
log_index=log_idx,
|
|
448
|
+
timestamp=raw_log.get("timestamp", block.header.timestamp),
|
|
449
|
+
)
|
|
450
|
+
|
|
451
|
+
def _persist_log(self, log: EventLog):
|
|
452
|
+
if not self._db:
|
|
453
|
+
return
|
|
454
|
+
topics = log.topics + [None] * (4 - len(log.topics))
|
|
455
|
+
self._db.execute(
|
|
456
|
+
"""INSERT INTO event_logs
|
|
457
|
+
(block_number, block_hash, tx_hash, tx_index, log_index,
|
|
458
|
+
address, topic0, topic1, topic2, topic3, data, timestamp, removed)
|
|
459
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)""",
|
|
460
|
+
(log.block_number, log.block_hash, log.tx_hash, log.tx_index,
|
|
461
|
+
log.log_index, log.address, topics[0], topics[1], topics[2],
|
|
462
|
+
topics[3], log.data, log.timestamp, int(log.removed)),
|
|
463
|
+
)
|
|
464
|
+
|
|
465
|
+
def _build_where(self, filt: LogFilter) -> Tuple[str, list]:
|
|
466
|
+
clauses = ["removed = 0"]
|
|
467
|
+
params: list = []
|
|
468
|
+
|
|
469
|
+
clauses.append("block_number >= ?")
|
|
470
|
+
params.append(filt.from_block)
|
|
471
|
+
if filt.to_block is not None:
|
|
472
|
+
clauses.append("block_number <= ?")
|
|
473
|
+
params.append(filt.to_block)
|
|
474
|
+
|
|
475
|
+
addr_set = filt.address_set()
|
|
476
|
+
if addr_set:
|
|
477
|
+
placeholders = ",".join("?" for _ in addr_set)
|
|
478
|
+
clauses.append(f"address IN ({placeholders})")
|
|
479
|
+
params.extend(addr_set)
|
|
480
|
+
|
|
481
|
+
if filt.event_name:
|
|
482
|
+
clauses.append("topic0 = ?")
|
|
483
|
+
params.append(filt.event_name)
|
|
484
|
+
|
|
485
|
+
if filt.topics:
|
|
486
|
+
for i, topic_filter in enumerate(filt.topics[:4]):
|
|
487
|
+
col = f"topic{i}"
|
|
488
|
+
if topic_filter is None:
|
|
489
|
+
continue
|
|
490
|
+
if isinstance(topic_filter, list):
|
|
491
|
+
ph = ",".join("?" for _ in topic_filter)
|
|
492
|
+
clauses.append(f"{col} IN ({ph})")
|
|
493
|
+
params.extend(topic_filter)
|
|
494
|
+
else:
|
|
495
|
+
clauses.append(f"{col} = ?")
|
|
496
|
+
params.append(topic_filter)
|
|
497
|
+
|
|
498
|
+
where = "WHERE " + " AND ".join(clauses) if clauses else ""
|
|
499
|
+
return where, params
|
|
500
|
+
|
|
501
|
+
def _query_db(self, filt: LogFilter) -> List[EventLog]:
|
|
502
|
+
where, params = self._build_where(filt)
|
|
503
|
+
sql = f"SELECT * FROM event_logs {where} ORDER BY block_number, log_index LIMIT ?"
|
|
504
|
+
params.append(filt.limit)
|
|
505
|
+
rows = self._db.execute(sql, params).fetchall()
|
|
506
|
+
return [self._row_to_log(r) for r in rows]
|
|
507
|
+
|
|
508
|
+
def _row_to_log(self, row) -> EventLog:
|
|
509
|
+
topics = [t for t in [row[7], row[8], row[9], row[10]] if t is not None]
|
|
510
|
+
return EventLog(
|
|
511
|
+
address=row[6],
|
|
512
|
+
topics=topics,
|
|
513
|
+
data=row[11] or "",
|
|
514
|
+
block_number=row[1],
|
|
515
|
+
block_hash=row[2],
|
|
516
|
+
tx_hash=row[3],
|
|
517
|
+
tx_index=row[4],
|
|
518
|
+
log_index=row[5],
|
|
519
|
+
timestamp=row[12] or 0.0,
|
|
520
|
+
removed=bool(row[13]),
|
|
521
|
+
)
|
|
522
|
+
|
|
523
|
+
def close(self):
|
|
524
|
+
if self._db:
|
|
525
|
+
self._db.close()
|
|
526
|
+
self._db = None
|