pyspiral 0.7.18__cp312-abi3-manylinux_2_28_x86_64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (110) hide show
  1. pyspiral-0.7.18.dist-info/METADATA +52 -0
  2. pyspiral-0.7.18.dist-info/RECORD +110 -0
  3. pyspiral-0.7.18.dist-info/WHEEL +4 -0
  4. pyspiral-0.7.18.dist-info/entry_points.txt +3 -0
  5. spiral/__init__.py +55 -0
  6. spiral/_lib.abi3.so +0 -0
  7. spiral/adbc.py +411 -0
  8. spiral/api/__init__.py +78 -0
  9. spiral/api/admin.py +15 -0
  10. spiral/api/client.py +164 -0
  11. spiral/api/filesystems.py +134 -0
  12. spiral/api/key_space_indexes.py +23 -0
  13. spiral/api/organizations.py +77 -0
  14. spiral/api/projects.py +219 -0
  15. spiral/api/telemetry.py +19 -0
  16. spiral/api/text_indexes.py +56 -0
  17. spiral/api/types.py +23 -0
  18. spiral/api/workers.py +40 -0
  19. spiral/api/workloads.py +52 -0
  20. spiral/arrow_.py +216 -0
  21. spiral/cli/__init__.py +88 -0
  22. spiral/cli/__main__.py +4 -0
  23. spiral/cli/admin.py +14 -0
  24. spiral/cli/app.py +108 -0
  25. spiral/cli/console.py +95 -0
  26. spiral/cli/fs.py +76 -0
  27. spiral/cli/iceberg.py +97 -0
  28. spiral/cli/key_spaces.py +103 -0
  29. spiral/cli/login.py +25 -0
  30. spiral/cli/orgs.py +90 -0
  31. spiral/cli/printer.py +53 -0
  32. spiral/cli/projects.py +147 -0
  33. spiral/cli/state.py +7 -0
  34. spiral/cli/tables.py +197 -0
  35. spiral/cli/telemetry.py +17 -0
  36. spiral/cli/text.py +115 -0
  37. spiral/cli/types.py +50 -0
  38. spiral/cli/workloads.py +58 -0
  39. spiral/client.py +256 -0
  40. spiral/core/__init__.pyi +0 -0
  41. spiral/core/_tools/__init__.pyi +5 -0
  42. spiral/core/authn/__init__.pyi +21 -0
  43. spiral/core/client/__init__.pyi +285 -0
  44. spiral/core/config/__init__.pyi +35 -0
  45. spiral/core/expr/__init__.pyi +15 -0
  46. spiral/core/expr/images/__init__.pyi +3 -0
  47. spiral/core/expr/list_/__init__.pyi +4 -0
  48. spiral/core/expr/refs/__init__.pyi +4 -0
  49. spiral/core/expr/str_/__init__.pyi +3 -0
  50. spiral/core/expr/struct_/__init__.pyi +6 -0
  51. spiral/core/expr/text/__init__.pyi +5 -0
  52. spiral/core/expr/udf/__init__.pyi +14 -0
  53. spiral/core/expr/video/__init__.pyi +3 -0
  54. spiral/core/table/__init__.pyi +141 -0
  55. spiral/core/table/manifests/__init__.pyi +35 -0
  56. spiral/core/table/metastore/__init__.pyi +58 -0
  57. spiral/core/table/spec/__init__.pyi +215 -0
  58. spiral/dataloader.py +299 -0
  59. spiral/dataset.py +264 -0
  60. spiral/datetime_.py +27 -0
  61. spiral/debug/__init__.py +0 -0
  62. spiral/debug/manifests.py +87 -0
  63. spiral/debug/metrics.py +56 -0
  64. spiral/debug/scan.py +266 -0
  65. spiral/enrichment.py +306 -0
  66. spiral/expressions/__init__.py +274 -0
  67. spiral/expressions/base.py +167 -0
  68. spiral/expressions/file.py +17 -0
  69. spiral/expressions/http.py +17 -0
  70. spiral/expressions/list_.py +68 -0
  71. spiral/expressions/s3.py +16 -0
  72. spiral/expressions/str_.py +39 -0
  73. spiral/expressions/struct.py +59 -0
  74. spiral/expressions/text.py +62 -0
  75. spiral/expressions/tiff.py +222 -0
  76. spiral/expressions/udf.py +60 -0
  77. spiral/grpc_.py +32 -0
  78. spiral/iceberg.py +31 -0
  79. spiral/iterable_dataset.py +106 -0
  80. spiral/key_space_index.py +44 -0
  81. spiral/project.py +227 -0
  82. spiral/protogen/_/__init__.py +0 -0
  83. spiral/protogen/_/arrow/__init__.py +0 -0
  84. spiral/protogen/_/arrow/flight/__init__.py +0 -0
  85. spiral/protogen/_/arrow/flight/protocol/__init__.py +0 -0
  86. spiral/protogen/_/arrow/flight/protocol/sql/__init__.py +2548 -0
  87. spiral/protogen/_/google/__init__.py +0 -0
  88. spiral/protogen/_/google/protobuf/__init__.py +2310 -0
  89. spiral/protogen/_/message_pool.py +3 -0
  90. spiral/protogen/_/py.typed +0 -0
  91. spiral/protogen/_/scandal/__init__.py +190 -0
  92. spiral/protogen/_/spfs/__init__.py +72 -0
  93. spiral/protogen/_/spql/__init__.py +61 -0
  94. spiral/protogen/_/substrait/__init__.py +6196 -0
  95. spiral/protogen/_/substrait/extensions/__init__.py +169 -0
  96. spiral/protogen/__init__.py +0 -0
  97. spiral/protogen/util.py +41 -0
  98. spiral/py.typed +0 -0
  99. spiral/scan.py +363 -0
  100. spiral/server.py +17 -0
  101. spiral/settings.py +36 -0
  102. spiral/snapshot.py +56 -0
  103. spiral/streaming_/__init__.py +3 -0
  104. spiral/streaming_/reader.py +133 -0
  105. spiral/streaming_/stream.py +157 -0
  106. spiral/substrait_.py +274 -0
  107. spiral/table.py +224 -0
  108. spiral/text_index.py +17 -0
  109. spiral/transaction.py +155 -0
  110. spiral/types_.py +6 -0
spiral/table.py ADDED
@@ -0,0 +1,224 @@
1
+ from datetime import datetime
2
+ from typing import TYPE_CHECKING
3
+
4
+ from spiral.core.table import Table as CoreTable
5
+ from spiral.core.table.spec import Schema
6
+ from spiral.enrichment import Enrichment
7
+ from spiral.expressions.base import Expr, ExprLike
8
+ from spiral.settings import settings
9
+ from spiral.snapshot import Snapshot
10
+ from spiral.transaction import Transaction
11
+
12
+ if TYPE_CHECKING:
13
+ import duckdb
14
+ import polars as pl
15
+ import pyarrow.dataset as ds
16
+
17
+ from spiral.client import Spiral
18
+ from spiral.key_space_index import KeySpaceIndex
19
+ from spiral.streaming_ import SpiralStream
20
+
21
+
22
+ class Table(Expr):
23
+ """API for interacting with a SpiralDB's Table.
24
+
25
+ Spiral Table is a powerful and flexible way for storing, analyzing,
26
+ and querying massive and/or multimodal datasets. The data model will feel familiar
27
+ to users of SQL- or DataFrame-style systems, yet is designed to be more flexible, more powerful,
28
+ and more useful in the context of modern data processing.
29
+
30
+ Tables are stored and queried directly from object storage.
31
+ """
32
+
33
+ def __init__(self, spiral: "Spiral", core: CoreTable, *, identifier: str | None = None):
34
+ super().__init__(core.__expr__)
35
+
36
+ self.spiral = spiral
37
+ self.core = core
38
+
39
+ self._key_schema = core.key_schema
40
+ self._key_columns = set(self._key_schema.names)
41
+ self._identifier = identifier
42
+
43
+ @property
44
+ def table_id(self) -> str:
45
+ return self.core.id
46
+
47
+ @property
48
+ def identifier(self) -> str:
49
+ """Returns the fully qualified identifier of the table."""
50
+ return self._identifier or self.table_id
51
+
52
+ @property
53
+ def project(self) -> str | None:
54
+ """Returns the project of the table."""
55
+ if self._identifier is None:
56
+ return None
57
+ project, _, _ = self._identifier.split(".")
58
+ return project
59
+
60
+ @property
61
+ def dataset(self) -> str | None:
62
+ """Returns the dataset of the table."""
63
+ if self._identifier is None:
64
+ return None
65
+ _, dataset, _ = self._identifier.split(".")
66
+ return dataset
67
+
68
+ @property
69
+ def name(self) -> str | None:
70
+ """Returns the name of the table."""
71
+ if self._identifier is None:
72
+ return None
73
+ _, _, name = self._identifier.split(".")
74
+ return name
75
+
76
+ def last_modified_at(self) -> int:
77
+ return self.core.get_wal(asof=None).last_modified_at
78
+
79
+ def __str__(self):
80
+ return self.identifier
81
+
82
+ def __repr__(self):
83
+ return f'Table("{self.identifier}")'
84
+
85
+ def __getitem__(self, item: str | int | list[str]) -> Expr:
86
+ return super().__getitem__(item)
87
+
88
+ def select(self, *paths: str, exclude: list[str] = None) -> "Expr":
89
+ return super().select(*paths, exclude=exclude)
90
+
91
+ @property
92
+ def key_schema(self) -> Schema:
93
+ """Returns the key schema of the table."""
94
+ return self._key_schema
95
+
96
+ def schema(self) -> Schema:
97
+ """Returns the FULL schema of the table.
98
+
99
+ NOTE: This can be expensive for large tables.
100
+ """
101
+ return self.core.get_schema(asof=None)
102
+
103
+ def write(
104
+ self,
105
+ expr: ExprLike,
106
+ *,
107
+ partition_size_bytes: int | None = None,
108
+ ) -> None:
109
+ """Write an item to the table inside a single transaction.
110
+
111
+ :param expr: The expression to write. Must evaluate to a struct array.
112
+ :param partition_size_bytes: The maximum partition size in bytes.
113
+ """
114
+ with self.txn() as txn:
115
+ txn.write(
116
+ expr,
117
+ partition_size_bytes=partition_size_bytes,
118
+ )
119
+
120
+ def enrich(
121
+ self,
122
+ *projections: ExprLike,
123
+ where: ExprLike | None = None,
124
+ ) -> Enrichment:
125
+ """Returns an Enrichment object that, when applied, produces new columns.
126
+
127
+ Enrichment can be applied in different ways, e.g. distributed.
128
+
129
+ :param projections: Projection expressions deriving new columns to write back.
130
+ Expressions can be over multiple Spiral tables, but all tables including
131
+ this one must share the same key schema.
132
+ :param where: Optional filter expression to apply when reading the input tables.
133
+ """
134
+ from spiral import expressions as se
135
+
136
+ projection = se.merge(*projections)
137
+ if where is not None:
138
+ where = se.lift(where)
139
+
140
+ return Enrichment(self, projection, where)
141
+
142
+ def drop_columns(self, column_paths: list[str]) -> None:
143
+ """
144
+ Drops the specified columns from the table.
145
+
146
+
147
+ :param column_paths: Fully qualified column names. (e.g., "column_name" or "nested.field").
148
+ All columns must exist, if a column doesn't exist the function will return an error.
149
+ """
150
+ with self.txn() as txn:
151
+ txn.drop_columns(column_paths)
152
+
153
+ def snapshot(self, asof: datetime | int | None = None) -> Snapshot:
154
+ """Returns a snapshot of the table at the given timestamp."""
155
+ if isinstance(asof, datetime):
156
+ asof = int(asof.timestamp() * 1_000_000)
157
+ return Snapshot(self, self.core.get_snapshot(asof=asof))
158
+
159
+ def txn(self, retries: int | None = 3) -> Transaction:
160
+ """Begins a new transaction. Transaction must be committed for writes to become visible.
161
+
162
+ :param retries: Maximum number of retry attempts on conflict (default: 3). Set to None for a single attempt.
163
+
164
+ While transaction can be used to atomically write data to the table,
165
+ it is important that the primary key columns are unique within the transaction.
166
+ The behavior is undefined if this is not the case.
167
+ """
168
+ return Transaction(self.spiral.core.transaction(self.core, settings().file_format, retries=retries))
169
+
170
+ def to_dataset(self) -> "ds.Dataset":
171
+ """Returns a PyArrow Dataset representing the table."""
172
+ return self.snapshot().to_dataset()
173
+
174
+ def to_polars(self) -> "pl.LazyFrame":
175
+ """Returns a Polars LazyFrame for the Spiral table."""
176
+ return self.snapshot().to_polars()
177
+
178
+ def to_duckdb(self) -> "duckdb.DuckDBPyRelation":
179
+ """Returns a DuckDB relation for the Spiral table."""
180
+ return self.snapshot().to_duckdb()
181
+
182
+ def to_streaming(
183
+ self,
184
+ index: "KeySpaceIndex",
185
+ *,
186
+ projection: Expr | None = None,
187
+ cache_dir: str | None = None,
188
+ shard_row_block_size: int | None = None,
189
+ ) -> "SpiralStream":
190
+ """Returns a stream to be used with MosaicML's StreamingDataset.
191
+
192
+ Requires `streaming` package to be installed.
193
+
194
+ Args:
195
+ index: Prebuilt KeysIndex to use when creating the stream.
196
+ The index's `asof` will be used when scanning.
197
+ projection: Optional projection to use when scanning the table if index's projection is not used.
198
+ Projection must be compatible with the index's projection for correctness.
199
+ cache_dir: Directory to use for caching data. If None, a temporary directory will be used.
200
+ shard_row_block_size: Number of rows per segment of a shard file. Defaults to 8192.
201
+ Value should be set to lower for larger rows.
202
+ """
203
+ from spiral.streaming_ import SpiralStream
204
+
205
+ if index.table_id != self.table_id:
206
+ raise ValueError("Index must be built on the same table as the scan.")
207
+ if index.asof == 0:
208
+ raise ValueError("Index have to be synced before it can be used.")
209
+
210
+ # We know table from projection is in the session cause this method is on it.
211
+ scan = self.spiral.scan(
212
+ projection if projection is not None else index.projection,
213
+ where=index.filter,
214
+ asof=index.asof,
215
+ )
216
+ shards = self.spiral.internal.key_space_index_shards(index=index.core)
217
+
218
+ return SpiralStream(
219
+ sp=self.spiral,
220
+ scan=scan,
221
+ shards=shards,
222
+ cache_dir=cache_dir,
223
+ shard_row_block_size=shard_row_block_size,
224
+ )
spiral/text_index.py ADDED
@@ -0,0 +1,17 @@
1
+ from spiral.core.client import TextIndex as CoreTextIndex
2
+ from spiral.expressions import Expr
3
+
4
+
5
+ class TextIndex(Expr):
6
+ def __init__(self, core: CoreTextIndex, *, name: str | None = None):
7
+ super().__init__(core.__expr__)
8
+ self.core = core
9
+ self._name = name
10
+
11
+ @property
12
+ def index_id(self) -> str:
13
+ return self.core.id
14
+
15
+ @property
16
+ def name(self) -> str:
17
+ return self._name or self.index_id
spiral/transaction.py ADDED
@@ -0,0 +1,155 @@
1
+ import logging
2
+ from pathlib import Path
3
+
4
+ from spiral.core.table import KeyRange
5
+ from spiral.core.table import Transaction as CoreTransaction
6
+ from spiral.core.table.spec import Operation
7
+ from spiral.expressions.base import ExprLike
8
+ from spiral.scan import Scan
9
+
10
+ logger = logging.getLogger(__name__)
11
+
12
+
13
+ class Transaction:
14
+ """Spiral table transaction.
15
+
16
+ While transaction can be used to atomically write data to the table,
17
+ it is important that the primary key columns are unique within the transaction.
18
+ """
19
+
20
+ def __init__(self, core: CoreTransaction):
21
+ self._core = core
22
+
23
+ @property
24
+ def status(self) -> str:
25
+ """The status of the transaction."""
26
+ return self._core.status
27
+
28
+ def is_empty(self) -> bool:
29
+ """Check if the transaction has no operations."""
30
+ return self._core.is_empty()
31
+
32
+ def __enter__(self):
33
+ return self
34
+
35
+ def __exit__(self, exc_type, exc_value, traceback):
36
+ if exc_type is None:
37
+ self._core.commit()
38
+ else:
39
+ self._core.abort()
40
+
41
+ def write(self, expr: ExprLike, *, partition_size_bytes: int | None = None):
42
+ """Write an item to the table inside a single transaction.
43
+
44
+ :param expr: The expression to write. Must evaluate to a struct array.
45
+ :param partition_size_bytes: The maximum partition size in bytes.
46
+ If not provided, the default partition size is used.
47
+ """
48
+ from spiral import expressions as se
49
+
50
+ record_batches = se.evaluate(expr)
51
+
52
+ self._core.write(record_batches, partition_size_bytes=partition_size_bytes)
53
+
54
+ def writeback(
55
+ self,
56
+ scan: Scan,
57
+ *,
58
+ key_range: KeyRange | None = None,
59
+ partition_size_bytes: int | None = None,
60
+ batch_readahead: int | None = None,
61
+ ):
62
+ """Write back the results of a scan to the table.
63
+
64
+ :param scan: The scan to write back.
65
+ The scan does NOT need to be over the same table as transaction,
66
+ but it does need to have the same key schema.
67
+ :param key_range: Optional key range to limit the writeback to.
68
+ :param partition_size_bytes: The maximum partition size in bytes.
69
+ :param batch_readahead: The number of batches to read ahead when evaluating the scan.
70
+ """
71
+ self._core.writeback(
72
+ scan.core, key_range=key_range, partition_size_bytes=partition_size_bytes, batch_readahead=batch_readahead
73
+ )
74
+
75
+ def drop_columns(self, column_paths: list[str]):
76
+ """
77
+ Drops the specified columns from the table.
78
+
79
+
80
+ :param column_paths: Fully qualified column names. (e.g., "column_name" or "nested.field").
81
+ All columns must exist, if a column doesn't exist the function will return an error.
82
+ """
83
+ self._core.drop_columns(column_paths)
84
+
85
+ def take(self) -> list[Operation]:
86
+ """Take the operations from the transaction
87
+
88
+ Transaction can no longer be committed or aborted after calling this method.
89
+ ."""
90
+ return self._core.take()
91
+
92
+ def include(self, ops: list[Operation]):
93
+ """Include the given operations in the transaction.
94
+
95
+ Checks for conflicts between the included operations and any existing operations.
96
+ """
97
+ self._core.include(ops)
98
+
99
+ def commit(self, *, txn_dump: str | None = None, compact: bool = False, manifest_rows: int | None = None):
100
+ """Commit the transaction."""
101
+ if txn_dump is not None:
102
+ try:
103
+ # Create parent directories if they don't exist
104
+ dump_path = Path(txn_dump)
105
+ dump_path.parent.mkdir(parents=True, exist_ok=True)
106
+
107
+ # Write operations to a JSONL file
108
+ with open(dump_path, "w") as f:
109
+ for op in self._core.ops():
110
+ f.write(op.to_json() + "\n")
111
+
112
+ logger.info(f"Transaction dumped to {txn_dump}")
113
+ except Exception as e:
114
+ logger.error(f"Failed to dump transaction to {txn_dump}: {e}")
115
+
116
+ self._core.commit(compact=compact, manifest_rows=manifest_rows)
117
+
118
+ @staticmethod
119
+ def load_dumps(*txn_dump: str) -> list[Operation]:
120
+ """Load a transaction from a dump file."""
121
+ import json
122
+
123
+ dumps = list(txn_dump)
124
+ ops: list[Operation] = []
125
+
126
+ for dump in dumps:
127
+ with open(dump) as f:
128
+ lines = f.readlines()
129
+
130
+ for line in lines:
131
+ line = line.strip()
132
+ if not line:
133
+ continue
134
+
135
+ # Each line may contain multiple JSON objects concatenated together
136
+ # This is due to a bug in the dump writing code.
137
+ # Use JSONDecoder to parse them one by one
138
+ decoder = json.JSONDecoder()
139
+ idx = 0
140
+ while idx < len(line):
141
+ try:
142
+ obj, end_idx = decoder.raw_decode(line, idx)
143
+ ops.append(Operation.from_json(json.dumps(obj)))
144
+ idx = end_idx
145
+ # Skip whitespace between JSON objects
146
+ while idx < len(line) and line[idx].isspace():
147
+ idx += 1
148
+ except json.JSONDecodeError as e:
149
+ raise ValueError(f"Failed to parse JSON at position {idx} in line: {line}") from e
150
+
151
+ return ops
152
+
153
+ def abort(self):
154
+ """Abort the transaction."""
155
+ self._core.abort()
spiral/types_.py ADDED
@@ -0,0 +1,6 @@
1
+ from typing import Annotated, TypeAlias
2
+
3
+ from pydantic import UrlConstraints
4
+
5
+ Uri: TypeAlias = Annotated[str, UrlConstraints()]
6
+ Timestamp: TypeAlias = int