pyspiral 0.8.9__cp311-abi3-macosx_11_0_arm64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (114) hide show
  1. pyspiral-0.8.9.dist-info/METADATA +53 -0
  2. pyspiral-0.8.9.dist-info/RECORD +114 -0
  3. pyspiral-0.8.9.dist-info/WHEEL +4 -0
  4. pyspiral-0.8.9.dist-info/entry_points.txt +3 -0
  5. spiral/__init__.py +55 -0
  6. spiral/_lib.abi3.so +0 -0
  7. spiral/adbc.py +411 -0
  8. spiral/api/__init__.py +78 -0
  9. spiral/api/admin.py +15 -0
  10. spiral/api/client.py +165 -0
  11. spiral/api/filesystems.py +152 -0
  12. spiral/api/key_space_indexes.py +23 -0
  13. spiral/api/organizations.py +78 -0
  14. spiral/api/projects.py +219 -0
  15. spiral/api/telemetry.py +19 -0
  16. spiral/api/text_indexes.py +56 -0
  17. spiral/api/types.py +23 -0
  18. spiral/api/workers.py +40 -0
  19. spiral/api/workloads.py +52 -0
  20. spiral/arrow_.py +202 -0
  21. spiral/cli/__init__.py +89 -0
  22. spiral/cli/__main__.py +4 -0
  23. spiral/cli/admin.py +33 -0
  24. spiral/cli/app.py +108 -0
  25. spiral/cli/console.py +95 -0
  26. spiral/cli/fs.py +109 -0
  27. spiral/cli/iceberg.py +97 -0
  28. spiral/cli/key_spaces.py +103 -0
  29. spiral/cli/login.py +25 -0
  30. spiral/cli/orgs.py +81 -0
  31. spiral/cli/printer.py +53 -0
  32. spiral/cli/projects.py +148 -0
  33. spiral/cli/state.py +7 -0
  34. spiral/cli/tables.py +225 -0
  35. spiral/cli/telemetry.py +17 -0
  36. spiral/cli/text.py +115 -0
  37. spiral/cli/types.py +50 -0
  38. spiral/cli/workloads.py +86 -0
  39. spiral/client.py +279 -0
  40. spiral/core/__init__.pyi +0 -0
  41. spiral/core/_tools/__init__.pyi +5 -0
  42. spiral/core/authn/__init__.pyi +21 -0
  43. spiral/core/client/__init__.pyi +270 -0
  44. spiral/core/config/__init__.pyi +35 -0
  45. spiral/core/expr/__init__.pyi +15 -0
  46. spiral/core/expr/images/__init__.pyi +3 -0
  47. spiral/core/expr/list_/__init__.pyi +4 -0
  48. spiral/core/expr/pushdown/__init__.pyi +3 -0
  49. spiral/core/expr/refs/__init__.pyi +4 -0
  50. spiral/core/expr/s3/__init__.pyi +3 -0
  51. spiral/core/expr/str_/__init__.pyi +3 -0
  52. spiral/core/expr/struct_/__init__.pyi +6 -0
  53. spiral/core/expr/text/__init__.pyi +5 -0
  54. spiral/core/expr/udf/__init__.pyi +14 -0
  55. spiral/core/expr/video/__init__.pyi +3 -0
  56. spiral/core/table/__init__.pyi +142 -0
  57. spiral/core/table/manifests/__init__.pyi +35 -0
  58. spiral/core/table/metastore/__init__.pyi +58 -0
  59. spiral/core/table/spec/__init__.pyi +214 -0
  60. spiral/dataloader.py +310 -0
  61. spiral/dataset.py +264 -0
  62. spiral/datetime_.py +27 -0
  63. spiral/debug/__init__.py +0 -0
  64. spiral/debug/manifests.py +103 -0
  65. spiral/debug/metrics.py +56 -0
  66. spiral/debug/scan.py +266 -0
  67. spiral/demo.py +100 -0
  68. spiral/enrichment.py +290 -0
  69. spiral/expressions/__init__.py +274 -0
  70. spiral/expressions/base.py +186 -0
  71. spiral/expressions/file.py +17 -0
  72. spiral/expressions/http.py +17 -0
  73. spiral/expressions/list_.py +77 -0
  74. spiral/expressions/pushdown.py +12 -0
  75. spiral/expressions/s3.py +16 -0
  76. spiral/expressions/str_.py +39 -0
  77. spiral/expressions/struct.py +59 -0
  78. spiral/expressions/text.py +62 -0
  79. spiral/expressions/tiff.py +225 -0
  80. spiral/expressions/udf.py +66 -0
  81. spiral/grpc_.py +32 -0
  82. spiral/iceberg.py +31 -0
  83. spiral/iterable_dataset.py +106 -0
  84. spiral/key_space_index.py +44 -0
  85. spiral/project.py +247 -0
  86. spiral/protogen/_/__init__.py +0 -0
  87. spiral/protogen/_/arrow/__init__.py +0 -0
  88. spiral/protogen/_/arrow/flight/__init__.py +0 -0
  89. spiral/protogen/_/arrow/flight/protocol/__init__.py +0 -0
  90. spiral/protogen/_/arrow/flight/protocol/sql/__init__.py +2548 -0
  91. spiral/protogen/_/google/__init__.py +0 -0
  92. spiral/protogen/_/google/protobuf/__init__.py +2310 -0
  93. spiral/protogen/_/message_pool.py +3 -0
  94. spiral/protogen/_/py.typed +0 -0
  95. spiral/protogen/_/scandal/__init__.py +190 -0
  96. spiral/protogen/_/spfs/__init__.py +72 -0
  97. spiral/protogen/_/spql/__init__.py +61 -0
  98. spiral/protogen/_/substrait/__init__.py +6196 -0
  99. spiral/protogen/_/substrait/extensions/__init__.py +169 -0
  100. spiral/protogen/__init__.py +0 -0
  101. spiral/protogen/util.py +41 -0
  102. spiral/py.typed +0 -0
  103. spiral/scan.py +383 -0
  104. spiral/server.py +37 -0
  105. spiral/settings.py +36 -0
  106. spiral/snapshot.py +61 -0
  107. spiral/streaming_/__init__.py +3 -0
  108. spiral/streaming_/reader.py +133 -0
  109. spiral/streaming_/stream.py +156 -0
  110. spiral/substrait_.py +274 -0
  111. spiral/table.py +216 -0
  112. spiral/text_index.py +17 -0
  113. spiral/transaction.py +156 -0
  114. spiral/types_.py +6 -0
@@ -0,0 +1,225 @@
1
+ import pyarrow as pa
2
+
3
+ from spiral.expressions.base import Expr, ExprLike
4
+
5
+ _TIFF_RES_DTYPE: pa.DataType = pa.struct(
6
+ [
7
+ pa.field("pixels", pa.large_binary()),
8
+ pa.field("height", pa.uint32()),
9
+ pa.field("width", pa.uint32()),
10
+ pa.field("channels", pa.uint8()),
11
+ pa.field("channel_bit_depth", pa.uint8()),
12
+ ]
13
+ )
14
+
15
+
16
+ def read(
17
+ expr: ExprLike,
18
+ indexes: ExprLike | int | None = None,
19
+ window: ExprLike | tuple[tuple[int, int], tuple[int, int]] | None = None,
20
+ boundless: ExprLike | bool | None = None,
21
+ ) -> Expr:
22
+ """
23
+ Read referenced cell in a `TIFF` format. Requires `rasterio` to be installed.
24
+
25
+ Args:
26
+ expr: The referenced `TIFF` bytes.
27
+ indexes: The band indexes to read. Defaults to all.
28
+ window: The window to read. In format (row_range_tuple, col_range_tuple). Defaults to full window.
29
+ boundless: If `True`, windows that extend beyond the dataset's extent
30
+ are permitted and partially or completely filled arrays will be returned as appropriate.
31
+
32
+ Returns:
33
+ An array where each element is a decoded image with fields:
34
+ pixels: bytes of shape (channels, width, height).
35
+ width: Width of the image with type `pa.uint32()`.
36
+ height: Height of the image with type `pa.uint32()`.
37
+ channels: Number of channels of the image with type `pa.uint8()`.
38
+ If `indexes` is not None, this is the length of `indexes` or 1 if `indexes` is an int.
39
+ channel_bit_depth: Bit depth of the channel with type `pa.uint8()`.
40
+ """
41
+ try:
42
+ import rasterio # noqa: F401
43
+ except ImportError:
44
+ raise ImportError("`rasterio` is required for tiff.read")
45
+
46
+ return TiffReadUDF()(expr, indexes, window, boundless)
47
+
48
+
49
+ def select(
50
+ expr: ExprLike,
51
+ shape: ExprLike | dict,
52
+ indexes: ExprLike | int | None = None,
53
+ ) -> Expr:
54
+ """
55
+ Select the shape out of the referenced cell in a `TIFF` format. Requires `rasterio` to be installed.
56
+
57
+ Args:
58
+ expr: The referenced `TIFF` bytes.
59
+ shape: [GeoJSON-like](https://geojson.org/) shape.
60
+ indexes: The band indexes to read. Defaults to all.
61
+
62
+ Returns:
63
+ An array where each element is a decoded image with fields:
64
+ pixels: bytes of shape (len(indexes) or 1, width, height).
65
+ width: Width of the image with type `pa.uint32()`.
66
+ height: Height of the image with type `pa.uint32()`.
67
+ channels: Number of channels of the image with type `pa.uint8()`.
68
+ If `indexes` is not None, this is the length of `indexes` or 1 if `indexes` is an int.
69
+ channel_bit_depth: Bit depth of the channel with type `pa.uint8()`.
70
+ """
71
+ try:
72
+ import rasterio # noqa: F401
73
+ except ImportError:
74
+ raise ImportError("`rasterio` is required for tiff.select")
75
+
76
+ return TiffSelectUDF()(expr, shape, indexes)
77
+
78
+
79
+ class TiffReadUDF:
80
+ def __init__(self):
81
+ super().__init__("tiff.read")
82
+
83
+ def return_type(self, *input_types: pa.DataType) -> pa.DataType:
84
+ return _TIFF_RES_DTYPE
85
+
86
+ def invoke(self, fp, *input_args: pa.Array) -> pa.Array:
87
+ try:
88
+ import rasterio
89
+ except ImportError:
90
+ raise ImportError("`rasterio` is required for tiff.read")
91
+
92
+ from rasterio.windows import Window
93
+
94
+ if len(input_args) != 4:
95
+ raise ValueError("tiff.read expects exactly 4 arguments: expr, indexes, window, boundless")
96
+
97
+ _, indexes, window, boundless = input_args
98
+
99
+ indexes = indexes[0].as_py()
100
+ if indexes is not None and not isinstance(indexes, int) and not isinstance(indexes, list):
101
+ raise ValueError(f"tiff.read expects indexes to be None or an int or a list, got {indexes}")
102
+
103
+ boundless = boundless[0].as_py()
104
+ if boundless is not None and not isinstance(boundless, bool):
105
+ raise ValueError(f"tiff.read expects boundless to be None or a bool, got {boundless}")
106
+
107
+ window = window[0].as_py()
108
+ if window is not None:
109
+ if len(window) != 2:
110
+ raise ValueError(f"tiff.read window invalid, got {window}")
111
+ window = Window.from_slices(slice(*window[0]), slice(*window[1]), boundless=boundless or False)
112
+
113
+ opener = _VsiOpener(fp)
114
+ with rasterio.open("ref", opener=opener) as src:
115
+ src: rasterio.DatasetReader
116
+ # TODO(marko): We know the size and dtype so we should be able to preallocate the result and read into it.
117
+ # This matters more if we want to rewrite this function to work with multiple inputs at once, in which
118
+ # case we should first consider using Rust GDAL bindings - I believe rasterio uses GDAL under the hood.
119
+ result = src.read(indexes=indexes, window=window)
120
+ return _return_result(result, indexes)
121
+
122
+
123
+ class TiffSelectUDF:
124
+ def __init__(self):
125
+ super().__init__("tiff.select")
126
+
127
+ def return_type(self, *input_types: pa.DataType) -> pa.DataType:
128
+ return _TIFF_RES_DTYPE
129
+
130
+ def invoke(self, fp, *input_args: pa.Array) -> pa.Array:
131
+ try:
132
+ import rasterio
133
+ except ImportError:
134
+ raise ImportError("`rasterio` is required for tiff.select")
135
+
136
+ from rasterio.mask import raster_geometry_mask
137
+
138
+ if len(input_args) != 3:
139
+ raise ValueError("tiff.select expects exactly 3 arguments: expr, shape, indexes")
140
+
141
+ _, shape, indexes = input_args
142
+
143
+ shape = shape[0].as_py()
144
+ if shape is None:
145
+ raise ValueError("tiff.select expects shape to be a GeoJSON-like shape")
146
+
147
+ indexes = indexes[0].as_py()
148
+ if indexes is not None and not isinstance(indexes, int) and not isinstance(indexes, list):
149
+ raise ValueError(f"tiff.select expects indexes to be None or an int or a list, got {indexes}")
150
+
151
+ opener = _VsiOpener(fp)
152
+ with rasterio.open("ref", opener=opener) as src:
153
+ src: rasterio.DatasetReader
154
+
155
+ shape_mask, _, window = raster_geometry_mask(src, [shape], crop=True)
156
+ out_shape = (src.count,) + shape_mask.shape
157
+
158
+ result = src.read(window=window, indexes=indexes, out_shape=out_shape, masked=True)
159
+ return _return_result(result, indexes)
160
+
161
+
162
+ def _return_result(result, indexes) -> pa.Array:
163
+ import numpy as np
164
+
165
+ result: np.ndarray
166
+
167
+ channels = result.shape[0]
168
+ if indexes is None:
169
+ pass
170
+ elif isinstance(indexes, int):
171
+ assert channels == 1, f"Expected 1 channel, got {channels}"
172
+ else:
173
+ assert channels == len(indexes), f"Expected {len(indexes)} channels, got {channels}"
174
+
175
+ if result.dtype == np.uint8:
176
+ channel_bit_depth = 8
177
+ elif result.dtype == np.uint16:
178
+ channel_bit_depth = 16
179
+ else:
180
+ raise ValueError(f"Unsupported bit width: {result.dtype}")
181
+
182
+ return pa.array(
183
+ [
184
+ {
185
+ "pixels": result.tobytes(),
186
+ "height": result.shape[1],
187
+ "width": result.shape[2],
188
+ "channels": channels,
189
+ "channel_bit_depth": channel_bit_depth,
190
+ }
191
+ ],
192
+ type=_TIFF_RES_DTYPE,
193
+ )
194
+
195
+
196
+ class _VsiOpener:
197
+ """
198
+ VSI file opener which returns a constant file-like on open.
199
+
200
+ Must match https://rasterio.readthedocs.io/en/stable/topics/vsi.html#python-file-and-filesystem-openers spec but
201
+ only `open` is needed when going through rasterio.
202
+ """
203
+
204
+ def __init__(self, file_like):
205
+ self._file_like = file_like
206
+
207
+ def open(self, _path, mode):
208
+ if mode not in {"r", "rb"}:
209
+ raise ValueError(f"Unsupported mode: {mode}")
210
+ return self._file_like
211
+
212
+ def isdir(self, _):
213
+ return False
214
+
215
+ def isfile(self, _):
216
+ return False
217
+
218
+ def mtime(self, _):
219
+ return 0
220
+
221
+ def size(self, _):
222
+ return self._file_like.size()
223
+
224
+ def modified(self, _):
225
+ raise NotImplementedError
@@ -0,0 +1,66 @@
1
+ import abc
2
+
3
+ import pyarrow as pa
4
+
5
+ from spiral import _lib
6
+ from spiral.expressions.base import Expr, ExprLike
7
+
8
+
9
+ class UDF(abc.ABC):
10
+ """A User-Defined Function (UDF). This class should be subclassed to define custom UDFs.
11
+
12
+ Example:
13
+
14
+ ```python
15
+ import spiral
16
+ from spiral.demo import fineweb
17
+
18
+ sp = spiral.Spiral()
19
+ fineweb_table = fineweb(sp)
20
+
21
+ from spiral import expressions as se
22
+ import pyarrow as pa
23
+
24
+ class MyAdd(se.UDF):
25
+ def __init__(self):
26
+ super().__init__("my_add")
27
+
28
+ def return_type(self, scope: pa.DataType):
29
+ if not isinstance(scope, pa.StructType):
30
+ raise ValueError("Expected struct type as input")
31
+ return scope.field(0).type
32
+
33
+ def invoke(self, scope: pa.Array):
34
+ if not isinstance(scope, pa.StructArray):
35
+ raise ValueError("Expected struct array as input")
36
+ return pa.compute.add(scope.field(0), scope.field(1))
37
+
38
+ my_add = MyAdd()
39
+
40
+ expr = my_add(fineweb_table.select("first_arg", "second_arg"))
41
+ ```
42
+ """
43
+
44
+ def __init__(self, name: str):
45
+ self._udf = _lib.expr.udf.create(name, return_type=self.return_type, invoke=self.invoke)
46
+
47
+ def __call__(self, scope: ExprLike) -> Expr:
48
+ """Create an expression that calls this UDF with the given arguments."""
49
+ from spiral import expressions as se
50
+
51
+ return Expr(self._udf(se.lift(scope).__expr__))
52
+
53
+ @abc.abstractmethod
54
+ def return_type(self, scope: pa.DataType) -> pa.DataType:
55
+ """Must return the return type of the UDF given the input scope type.
56
+
57
+ All expressions in Spiral must return nullable (Arrow default) types,
58
+ including nested structs, meaning that all fields in structs must also be nullable,
59
+ and if those fields are structs, their fields must also be nullable, and so on.
60
+ """
61
+ ...
62
+
63
+ @abc.abstractmethod
64
+ def invoke(self, scope: pa.Array) -> pa.Array:
65
+ """Must implement the UDF logic given the input scope array."""
66
+ ...
spiral/grpc_.py ADDED
@@ -0,0 +1,32 @@
1
+ from collections.abc import AsyncIterator, Awaitable, Callable
2
+ from typing import TypeVar
3
+
4
+ R = TypeVar("R")
5
+ T = TypeVar("T")
6
+
7
+
8
+ async def paged(stub_fn: Callable[[R], Awaitable[T]], request: R, page_size: int = None) -> AsyncIterator[T]:
9
+ """Page through a gRPC paged API.
10
+
11
+ Assumes fields exist as per https://cloud.google.com/apis/design/design_patterns#list_pagination
12
+ """
13
+ next_page_token: str | None = None
14
+ while True:
15
+ request.page_size = page_size
16
+ request.page_token = next_page_token
17
+ res = await stub_fn(request)
18
+ if not res.next_page_token:
19
+ # No more items
20
+ yield res
21
+ break
22
+
23
+ next_page_token = res.next_page_token
24
+ yield res
25
+
26
+
27
+ async def paged_items(
28
+ stub_fn: Callable[[R], Awaitable[T]], request: R, collection_name: str, page_size: int = None
29
+ ) -> AsyncIterator:
30
+ async for page in paged(stub_fn, request, page_size=page_size):
31
+ for item in getattr(page, collection_name):
32
+ yield item
spiral/iceberg.py ADDED
@@ -0,0 +1,31 @@
1
+ from typing import TYPE_CHECKING
2
+
3
+ if TYPE_CHECKING:
4
+ from pyiceberg.catalog import Catalog
5
+
6
+ from spiral.client import Spiral
7
+
8
+
9
+ class Iceberg:
10
+ """
11
+ Apache Iceberg is a powerful open-source table format designed for high-performance data lakes.
12
+ Iceberg brings reliability, scalability, and advanced features like time travel, schema evolution,
13
+ and ACID transactions to your warehouse.
14
+ """
15
+
16
+ def __init__(self, spiral: "Spiral"):
17
+ self._spiral = spiral
18
+ self._api = self._spiral.api
19
+
20
+ def catalog(self) -> "Catalog":
21
+ """Open the Iceberg catalog."""
22
+ from pyiceberg.catalog import load_catalog
23
+
24
+ return load_catalog(
25
+ "default",
26
+ **{
27
+ "type": "rest",
28
+ "uri": self._spiral.config.server_url + "/iceberg",
29
+ "token": self._spiral.authn.token().expose_secret(),
30
+ },
31
+ )
@@ -0,0 +1,106 @@
1
+ from collections.abc import Callable, Iterator
2
+ from typing import TYPE_CHECKING
3
+
4
+ import pyarrow as pa
5
+
6
+ if TYPE_CHECKING:
7
+ import datasets.iterable_dataset as hf # noqa
8
+ import streaming # noqa
9
+ import torch.utils.data as torchdata # noqa
10
+
11
+
12
+ def _hf_compatible_schema(schema: pa.Schema) -> pa.Schema:
13
+ """
14
+ Replace string-view and binary-view columns in the schema with strings/binary.
15
+ Recursively handles nested types (struct, list, etc).
16
+ We use this converted schema as Features in the returned Dataset.
17
+ Remove this method once we have https://github.com/huggingface/datasets/pull/7718
18
+ """
19
+
20
+ def _convert_type(dtype: pa.DataType) -> pa.DataType:
21
+ if dtype == pa.string_view():
22
+ return pa.string()
23
+ elif dtype == pa.binary_view():
24
+ return pa.binary()
25
+ elif pa.types.is_struct(dtype):
26
+ new_fields = [
27
+ pa.field(field.name, _convert_type(field.type), nullable=field.nullable, metadata=field.metadata)
28
+ for field in dtype
29
+ ]
30
+ return pa.struct(new_fields)
31
+ elif pa.types.is_list(dtype):
32
+ return pa.list_(_convert_type(dtype.value_type))
33
+ elif pa.types.is_large_list(dtype):
34
+ return pa.large_list(_convert_type(dtype.value_type))
35
+ elif pa.types.is_fixed_size_list(dtype):
36
+ return pa.list_(_convert_type(dtype.value_type), dtype.list_size)
37
+ elif pa.types.is_map(dtype):
38
+ return pa.map_(_convert_type(dtype.key_type), _convert_type(dtype.item_type))
39
+ else:
40
+ return dtype
41
+
42
+ new_fields = []
43
+ for field in schema:
44
+ new_type = _convert_type(field.type)
45
+ new_fields.append(pa.field(field.name, new_type, nullable=field.nullable, metadata=field.metadata))
46
+
47
+ return pa.schema(new_fields)
48
+
49
+
50
+ def to_iterable_dataset(stream: pa.RecordBatchReader) -> "hf.IterableDataset":
51
+ from datasets import DatasetInfo, Features
52
+ from datasets.builder import ArrowExamplesIterable
53
+ from datasets.iterable_dataset import IterableDataset
54
+
55
+ def _generate_tables(**kwargs) -> Iterator[tuple[int, pa.Table]]:
56
+ # This key is unused when training with IterableDataset.
57
+ # Default implementation returns shard id, e.g. parquet row group id.
58
+ for i, rb in enumerate(stream):
59
+ yield i, pa.Table.from_batches([rb], stream.schema)
60
+
61
+ # TODO(marko): This is temporary until we stop returning IterableDataset from this function.
62
+ class _IterableDataset(IterableDataset):
63
+ # Diff with datasets.iterable_dataset.IterableDataset:
64
+ # - Removes torch handling which attempts to handle worker processes.
65
+ # - Assumes arrow iterator.
66
+ def __iter__(self):
67
+ from datasets.formatting import get_formatter
68
+
69
+ prepared_ex_iterable = self._prepare_ex_iterable_for_iteration()
70
+ if self._formatting and (prepared_ex_iterable.iter_arrow or self._formatting.is_table):
71
+ formatter = get_formatter(self._formatting.format_type, features=self.features)
72
+ iterator = prepared_ex_iterable.iter_arrow()
73
+ for key, pa_table in iterator:
74
+ yield formatter.format_row(pa_table)
75
+ return
76
+
77
+ for key, example in prepared_ex_iterable:
78
+ # no need to format thanks to FormattedExamplesIterable
79
+ yield example
80
+
81
+ def map(self, *args, **kwargs):
82
+ # Map constructs a new IterableDataset, so we need to "patch" it
83
+ base = super().map(*args, **kwargs)
84
+ if isinstance(base, IterableDataset):
85
+ # Patch __iter__ to avoid torch handling
86
+ base.__class__ = _IterableDataset # type: ignore
87
+ return base
88
+
89
+ class _ArrowExamplesIterable(ArrowExamplesIterable):
90
+ def __init__(self, generate_tables_fn: Callable[..., Iterator[tuple[int, pa.Table]]], features: Features):
91
+ # NOTE: generate_tables_fn type annotations are wrong, return type must be an iterable of tuples.
92
+ super().__init__(generate_tables_fn, kwargs={}) # type: ignore
93
+ self._features = features
94
+
95
+ @property
96
+ def is_typed(self) -> bool:
97
+ return True
98
+
99
+ @property
100
+ def features(self) -> Features:
101
+ return self._features
102
+
103
+ target_features = Features.from_arrow_schema(_hf_compatible_schema(stream.schema))
104
+ ex_iterable = _ArrowExamplesIterable(_generate_tables, target_features)
105
+ info = DatasetInfo(features=target_features)
106
+ return _IterableDataset(ex_iterable=ex_iterable, info=info)
@@ -0,0 +1,44 @@
1
+ from spiral.core.client import KeySpaceIndex as CoreKeySpaceIndex
2
+ from spiral.expressions import Expr
3
+ from spiral.types_ import Timestamp
4
+
5
+
6
+ class KeySpaceIndex:
7
+ """
8
+ KeysIndex represents an optionally materialized key space, defined by a projection and a filter over a table.
9
+ It can be used to efficiently and precisely shard the table for parallel processing or distributed training.
10
+
11
+ An index is defined by:
12
+ - A granularity that defines the target size of key ranges in the index.
13
+ IMPORTANT: Actual key ranges may be smaller, but will not exceed twice the granularity.
14
+ - A projection expression that defines which columns are included in the resulting key space.
15
+ - An optional filter expression that defines which rows are included in the index.
16
+ """
17
+
18
+ def __init__(self, core: CoreKeySpaceIndex, *, name: str | None = None):
19
+ self.core = core
20
+ self._name = name
21
+
22
+ @property
23
+ def index_id(self) -> str:
24
+ return self.core.id
25
+
26
+ @property
27
+ def table_id(self) -> str:
28
+ return self.core.table_id
29
+
30
+ @property
31
+ def name(self) -> str:
32
+ return self._name or self.index_id
33
+
34
+ @property
35
+ def asof(self) -> Timestamp:
36
+ return self.core.asof
37
+
38
+ @property
39
+ def projection(self) -> Expr:
40
+ return Expr(self.core.projection)
41
+
42
+ @property
43
+ def filter(self) -> Expr | None:
44
+ return Expr(self.core.filter) if self.core.filter is not None else None