pyspiral 0.6.3__cp310-abi3-macosx_11_0_arm64.whl → 0.6.5__cp310-abi3-macosx_11_0_arm64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {pyspiral-0.6.3.dist-info → pyspiral-0.6.5.dist-info}/METADATA +3 -3
- {pyspiral-0.6.3.dist-info → pyspiral-0.6.5.dist-info}/RECORD +34 -32
- {pyspiral-0.6.3.dist-info → pyspiral-0.6.5.dist-info}/WHEEL +1 -1
- spiral/_lib.abi3.so +0 -0
- spiral/api/client.py +1 -1
- spiral/api/filesystems.py +9 -40
- spiral/api/projects.py +9 -2
- spiral/cli/app.py +42 -6
- spiral/cli/fs.py +25 -60
- spiral/cli/login.py +6 -3
- spiral/cli/projects.py +34 -22
- spiral/client.py +2 -2
- spiral/core/_tools/__init__.pyi +5 -0
- spiral/core/authn/__init__.pyi +1 -1
- spiral/core/client/__init__.pyi +14 -3
- spiral/core/table/__init__.pyi +3 -0
- spiral/debug/manifests.py +26 -18
- spiral/expressions/__init__.py +2 -2
- spiral/expressions/base.py +9 -3
- spiral/iterable_dataset.py +106 -0
- spiral/protogen/_/arrow/flight/protocol/sql/__init__.py +1 -1
- spiral/protogen/_/google/protobuf/__init__.py +121 -1
- spiral/protogen/_/scandal/__init__.py +1 -1
- spiral/protogen/_/spfs/__init__.py +1 -1
- spiral/protogen/_/spql/__init__.py +1 -1
- spiral/protogen/_/substrait/__init__.py +2 -2
- spiral/protogen/_/substrait/extensions/__init__.py +1 -1
- spiral/scan.py +22 -34
- spiral/settings.py +3 -1
- spiral/snapshot.py +16 -0
- spiral/streaming_/stream.py +7 -3
- spiral/table.py +53 -94
- spiral/transaction.py +1 -1
- {pyspiral-0.6.3.dist-info → pyspiral-0.6.5.dist-info}/entry_points.txt +0 -0
spiral/core/authn/__init__.pyi
CHANGED
@@ -8,7 +8,7 @@ class Authn:
|
|
8
8
|
@staticmethod
|
9
9
|
def from_token(token: Token) -> Authn: ...
|
10
10
|
@staticmethod
|
11
|
-
def from_fallback() -> Authn: ...
|
11
|
+
def from_fallback(api_url: str) -> Authn: ...
|
12
12
|
@staticmethod
|
13
13
|
def from_device() -> Authn: ...
|
14
14
|
def token(self) -> Token | None: ...
|
spiral/core/client/__init__.pyi
CHANGED
@@ -26,12 +26,12 @@ class Spiral:
|
|
26
26
|
projection: Expr,
|
27
27
|
filter: Expr | None = None,
|
28
28
|
asof: int | None = None,
|
29
|
-
exclude_keys: bool =
|
29
|
+
exclude_keys: bool | None = None,
|
30
30
|
) -> Scan:
|
31
31
|
"""Construct a table scan."""
|
32
32
|
...
|
33
33
|
|
34
|
-
def transaction(self, table: Table, format: str | None = None) -> Transaction:
|
34
|
+
def transaction(self, table: Table, format: str | None = None, retries: int | None = 3) -> Transaction:
|
35
35
|
"""Being a table transaction."""
|
36
36
|
...
|
37
37
|
|
@@ -136,11 +136,18 @@ class ShuffleStrategy:
|
|
136
136
|
# Externally provided shards to shuffle before reading rows.
|
137
137
|
shards: list[Shard] | None
|
138
138
|
|
139
|
+
# Maximum number of rows to return in a single batch.
|
140
|
+
# If None, it is derived from the shuffle buffer size.
|
141
|
+
# IMPORTANT: The returned batch may be smaller than this size.
|
142
|
+
max_batch_size: int | None
|
143
|
+
|
139
144
|
def __init__(
|
140
145
|
self,
|
146
|
+
shuffle_buffer_size: int,
|
147
|
+
*,
|
141
148
|
seed: int | None = None,
|
142
|
-
shuffle_buffer_size: int | None = None,
|
143
149
|
shards: list[Shard] | None = None,
|
150
|
+
max_batch_size: int | None = None,
|
144
151
|
): ...
|
145
152
|
|
146
153
|
class Operations:
|
@@ -205,3 +212,7 @@ class Operations:
|
|
205
212
|
"""
|
206
213
|
...
|
207
214
|
def metrics(self) -> dict[str, Any]: ...
|
215
|
+
|
216
|
+
def flush_telemetry() -> None:
|
217
|
+
"""Flush telemetry data to the configured exporter."""
|
218
|
+
...
|
spiral/core/table/__init__.pyi
CHANGED
@@ -70,6 +70,9 @@ class Scan:
|
|
70
70
|
self,
|
71
71
|
strategy: ShuffleStrategy | None = None,
|
72
72
|
batch_readahead: int | None = None,
|
73
|
+
num_workers: int | None = None,
|
74
|
+
worker_id: int | None = None,
|
75
|
+
infinite: bool = False,
|
73
76
|
) -> pa.RecordBatchReader: ...
|
74
77
|
def metrics(self) -> dict[str, Any]: ...
|
75
78
|
def _prepare_shard(
|
spiral/debug/manifests.py
CHANGED
@@ -1,3 +1,6 @@
|
|
1
|
+
from rich.console import Console
|
2
|
+
from rich.table import Table
|
3
|
+
|
1
4
|
from spiral import datetime_
|
2
5
|
from spiral.core.table import Scan
|
3
6
|
from spiral.core.table.manifests import FragmentManifest
|
@@ -42,23 +45,26 @@ def _table_of_fragments(manifest: FragmentManifest, title: str):
|
|
42
45
|
avg_size = total_size / fragment_count if fragment_count > 0 else 0
|
43
46
|
|
44
47
|
# Print title and summary
|
45
|
-
|
46
|
-
print(
|
48
|
+
console = Console()
|
49
|
+
console.print(f"\n\n{title}")
|
50
|
+
console.print(
|
47
51
|
f"{fragment_count} fragments, "
|
48
52
|
f"total: {_format_bytes(total_size)}, "
|
49
53
|
f"avg: {_format_bytes(int(avg_size))}, "
|
50
54
|
f"metadata: {_format_bytes(total_metadata_size)}"
|
51
55
|
)
|
52
|
-
print("=" * 120)
|
53
56
|
|
54
|
-
#
|
55
|
-
|
56
|
-
|
57
|
-
|
58
|
-
)
|
59
|
-
|
57
|
+
# Create rich table
|
58
|
+
table = Table(title=None, show_header=True, header_style="bold")
|
59
|
+
table.add_column("ID", style="cyan", no_wrap=True)
|
60
|
+
table.add_column("Size (Metadata)", justify="right")
|
61
|
+
table.add_column("Format", justify="center")
|
62
|
+
table.add_column("Key Span", justify="center")
|
63
|
+
table.add_column("Level", justify="center")
|
64
|
+
table.add_column("Committed At", justify="center")
|
65
|
+
table.add_column("Compacted At", justify="center")
|
60
66
|
|
61
|
-
#
|
67
|
+
# Add each fragment as a row
|
62
68
|
for fragment in manifest:
|
63
69
|
committed_str = str(datetime_.from_timestamp_micros(fragment.committed_at)) if fragment.committed_at else "N/A"
|
64
70
|
compacted_str = str(datetime_.from_timestamp_micros(fragment.compacted_at)) if fragment.compacted_at else "N/A"
|
@@ -68,12 +74,14 @@ def _table_of_fragments(manifest: FragmentManifest, title: str):
|
|
68
74
|
)
|
69
75
|
key_span = f"{fragment.key_span.begin}..{fragment.key_span.end}"
|
70
76
|
|
71
|
-
|
72
|
-
|
73
|
-
|
74
|
-
|
75
|
-
|
76
|
-
|
77
|
-
|
78
|
-
|
77
|
+
table.add_row(
|
78
|
+
fragment.id,
|
79
|
+
size_with_metadata,
|
80
|
+
str(fragment.format),
|
81
|
+
key_span,
|
82
|
+
str(fragment.level),
|
83
|
+
committed_str,
|
84
|
+
compacted_str,
|
79
85
|
)
|
86
|
+
|
87
|
+
console.print(table)
|
spiral/expressions/__init__.py
CHANGED
@@ -92,7 +92,7 @@ def lift(expr: ExprLike) -> Expr:
|
|
92
92
|
return lift(pa.array(expr))
|
93
93
|
|
94
94
|
# Unpack tables and chunked arrays
|
95
|
-
if isinstance(expr, pa.Table):
|
95
|
+
if isinstance(expr, pa.Table | pa.RecordBatch):
|
96
96
|
expr = expr.to_struct_array()
|
97
97
|
if isinstance(expr, pa.ChunkedArray):
|
98
98
|
expr = expr.combine_chunks()
|
@@ -104,7 +104,7 @@ def lift(expr: ExprLike) -> Expr:
|
|
104
104
|
if isinstance(expr, pa.StructArray) and expr.null_count != 0:
|
105
105
|
# raise ValueError("lift: cannot lift a struct array with nulls.")
|
106
106
|
warnings.warn("found a struct array with nulls", stacklevel=2)
|
107
|
-
if isinstance(expr, pa.StructScalar) and not expr.is_valid
|
107
|
+
if isinstance(expr, pa.StructScalar) and not expr.is_valid:
|
108
108
|
# raise ValueError("lift: cannot lift a struct scalar with nulls.")
|
109
109
|
warnings.warn("found a struct scalar with nulls", stacklevel=2)
|
110
110
|
return lift(arrow_.nest_structs(expr))
|
spiral/expressions/base.py
CHANGED
@@ -26,14 +26,16 @@ class Expr:
|
|
26
26
|
def __repr__(self):
|
27
27
|
return repr(self.__expr__)
|
28
28
|
|
29
|
-
def __getitem__(self, item: str | int) -> "Expr":
|
29
|
+
def __getitem__(self, item: str | int | list[str]) -> "Expr":
|
30
30
|
"""
|
31
31
|
Get an item from a struct or list.
|
32
32
|
|
33
33
|
Args:
|
34
34
|
item: The key or index to get.
|
35
35
|
If item is a string, it is assumed to be a field in a struct. Dot-separated string is supported
|
36
|
-
to access nested fields.
|
36
|
+
to access nested fields.
|
37
|
+
If item is a list of strings, it is assumed to be a list of fields in a struct.
|
38
|
+
If item is an integer, it is assumed to be an index in a list.
|
37
39
|
"""
|
38
40
|
from spiral import expressions as se
|
39
41
|
|
@@ -42,10 +44,14 @@ class Expr:
|
|
42
44
|
if isinstance(item, int):
|
43
45
|
# Assume list and get an element.
|
44
46
|
expr = se.list_.element_at(expr, item)
|
45
|
-
|
47
|
+
elif isinstance(item, str):
|
46
48
|
# Walk into the struct.
|
47
49
|
for part in item.split("."):
|
48
50
|
expr = se.getitem(expr, part)
|
51
|
+
elif isinstance(item, list) and all(isinstance(i, str) for i in item):
|
52
|
+
expr = se.pack({k: expr[k] for k in item})
|
53
|
+
else:
|
54
|
+
raise TypeError(f"Invalid item type: {type(item)}")
|
49
55
|
|
50
56
|
return expr
|
51
57
|
|
@@ -0,0 +1,106 @@
|
|
1
|
+
from collections.abc import Callable, Iterator
|
2
|
+
from typing import TYPE_CHECKING
|
3
|
+
|
4
|
+
import pyarrow as pa
|
5
|
+
|
6
|
+
if TYPE_CHECKING:
|
7
|
+
import datasets.iterable_dataset as hf # noqa
|
8
|
+
import streaming # noqa
|
9
|
+
import torch.utils.data as torchdata # noqa
|
10
|
+
|
11
|
+
|
12
|
+
def _hf_compatible_schema(schema: pa.Schema) -> pa.Schema:
|
13
|
+
"""
|
14
|
+
Replace string-view and binary-view columns in the schema with strings/binary.
|
15
|
+
Recursively handles nested types (struct, list, etc).
|
16
|
+
We use this converted schema as Features in the returned Dataset.
|
17
|
+
Remove this method once we have https://github.com/huggingface/datasets/pull/7718
|
18
|
+
"""
|
19
|
+
|
20
|
+
def _convert_type(dtype: pa.DataType) -> pa.DataType:
|
21
|
+
if dtype == pa.string_view():
|
22
|
+
return pa.string()
|
23
|
+
elif dtype == pa.binary_view():
|
24
|
+
return pa.binary()
|
25
|
+
elif pa.types.is_struct(dtype):
|
26
|
+
new_fields = [
|
27
|
+
pa.field(field.name, _convert_type(field.type), nullable=field.nullable, metadata=field.metadata)
|
28
|
+
for field in dtype
|
29
|
+
]
|
30
|
+
return pa.struct(new_fields)
|
31
|
+
elif pa.types.is_list(dtype):
|
32
|
+
return pa.list_(_convert_type(dtype.value_type))
|
33
|
+
elif pa.types.is_large_list(dtype):
|
34
|
+
return pa.large_list(_convert_type(dtype.value_type))
|
35
|
+
elif pa.types.is_fixed_size_list(dtype):
|
36
|
+
return pa.list_(_convert_type(dtype.value_type), dtype.list_size)
|
37
|
+
elif pa.types.is_map(dtype):
|
38
|
+
return pa.map_(_convert_type(dtype.key_type), _convert_type(dtype.item_type))
|
39
|
+
else:
|
40
|
+
return dtype
|
41
|
+
|
42
|
+
new_fields = []
|
43
|
+
for field in schema:
|
44
|
+
new_type = _convert_type(field.type)
|
45
|
+
new_fields.append(pa.field(field.name, new_type, nullable=field.nullable, metadata=field.metadata))
|
46
|
+
|
47
|
+
return pa.schema(new_fields)
|
48
|
+
|
49
|
+
|
50
|
+
def to_iterable_dataset(stream: pa.RecordBatchReader) -> "hf.IterableDataset":
|
51
|
+
from datasets import DatasetInfo, Features
|
52
|
+
from datasets.builder import ArrowExamplesIterable
|
53
|
+
from datasets.iterable_dataset import IterableDataset
|
54
|
+
|
55
|
+
def _generate_tables(**kwargs) -> Iterator[tuple[int, pa.Table]]:
|
56
|
+
# This key is unused when training with IterableDataset.
|
57
|
+
# Default implementation returns shard id, e.g. parquet row group id.
|
58
|
+
for i, rb in enumerate(stream):
|
59
|
+
yield i, pa.Table.from_batches([rb], stream.schema)
|
60
|
+
|
61
|
+
# TODO(marko): This is temporary until we stop returning IterableDataset from this function.
|
62
|
+
class _IterableDataset(IterableDataset):
|
63
|
+
# Diff with datasets.iterable_dataset.IterableDataset:
|
64
|
+
# - Removes torch handling which attempts to handle worker processes.
|
65
|
+
# - Assumes arrow iterator.
|
66
|
+
def __iter__(self):
|
67
|
+
from datasets.formatting import get_formatter
|
68
|
+
|
69
|
+
prepared_ex_iterable = self._prepare_ex_iterable_for_iteration()
|
70
|
+
if self._formatting and (prepared_ex_iterable.iter_arrow or self._formatting.is_table):
|
71
|
+
formatter = get_formatter(self._formatting.format_type, features=self.features)
|
72
|
+
iterator = prepared_ex_iterable.iter_arrow()
|
73
|
+
for key, pa_table in iterator:
|
74
|
+
yield formatter.format_row(pa_table)
|
75
|
+
return
|
76
|
+
|
77
|
+
for key, example in prepared_ex_iterable:
|
78
|
+
# no need to format thanks to FormattedExamplesIterable
|
79
|
+
yield example
|
80
|
+
|
81
|
+
def map(self, *args, **kwargs):
|
82
|
+
# Map constructs a new IterableDataset, so we need to "patch" it
|
83
|
+
base = super().map(*args, **kwargs)
|
84
|
+
if isinstance(base, IterableDataset):
|
85
|
+
# Patch __iter__ to avoid torch handling
|
86
|
+
base.__class__ = _IterableDataset # type: ignore
|
87
|
+
return base
|
88
|
+
|
89
|
+
class _ArrowExamplesIterable(ArrowExamplesIterable):
|
90
|
+
def __init__(self, generate_tables_fn: Callable[..., Iterator[tuple[int, pa.Table]]], features: Features):
|
91
|
+
# NOTE: generate_tables_fn type annotations are wrong, return type must be an iterable of tuples.
|
92
|
+
super().__init__(generate_tables_fn, kwargs={}) # type: ignore
|
93
|
+
self._features = features
|
94
|
+
|
95
|
+
@property
|
96
|
+
def is_typed(self) -> bool:
|
97
|
+
return True
|
98
|
+
|
99
|
+
@property
|
100
|
+
def features(self) -> Features:
|
101
|
+
return self._features
|
102
|
+
|
103
|
+
target_features = Features.from_arrow_schema(_hf_compatible_schema(stream.schema))
|
104
|
+
ex_iterable = _ArrowExamplesIterable(_generate_tables, target_features)
|
105
|
+
info = DatasetInfo(features=target_features)
|
106
|
+
return _IterableDataset(ex_iterable=ex_iterable, info=info)
|
@@ -21,12 +21,15 @@ __all__ = (
|
|
21
21
|
"FeatureSet",
|
22
22
|
"FeatureSetDefaults",
|
23
23
|
"FeatureSetDefaultsFeatureSetEditionDefault",
|
24
|
+
"FeatureSetEnforceNamingStyle",
|
24
25
|
"FeatureSetEnumType",
|
25
26
|
"FeatureSetFieldPresence",
|
26
27
|
"FeatureSetJsonFormat",
|
27
28
|
"FeatureSetMessageEncoding",
|
28
29
|
"FeatureSetRepeatedFieldEncoding",
|
29
30
|
"FeatureSetUtf8Validation",
|
31
|
+
"FeatureSetVisibilityFeature",
|
32
|
+
"FeatureSetVisibilityFeatureDefaultSymbolVisibility",
|
30
33
|
"FieldDescriptorProto",
|
31
34
|
"FieldDescriptorProtoLabel",
|
32
35
|
"FieldDescriptorProtoType",
|
@@ -54,6 +57,7 @@ __all__ = (
|
|
54
57
|
"ServiceOptions",
|
55
58
|
"SourceCodeInfo",
|
56
59
|
"SourceCodeInfoLocation",
|
60
|
+
"SymbolVisibility",
|
57
61
|
"UninterpretedOption",
|
58
62
|
"UninterpretedOptionNamePart",
|
59
63
|
)
|
@@ -66,7 +70,7 @@ import betterproto2
|
|
66
70
|
|
67
71
|
from ...message_pool import default_message_pool
|
68
72
|
|
69
|
-
_COMPILER_VERSION = "0.
|
73
|
+
_COMPILER_VERSION = "0.9.0"
|
70
74
|
betterproto2.check_compiler_version(_COMPILER_VERSION)
|
71
75
|
|
72
76
|
|
@@ -174,6 +178,14 @@ class ExtensionRangeOptionsVerificationState(betterproto2.Enum):
|
|
174
178
|
UNVERIFIED = 1
|
175
179
|
|
176
180
|
|
181
|
+
class FeatureSetEnforceNamingStyle(betterproto2.Enum):
|
182
|
+
ENFORCE_NAMING_STYLE_UNKNOWN = 0
|
183
|
+
|
184
|
+
STYLE2024 = 1
|
185
|
+
|
186
|
+
STYLE_LEGACY = 2
|
187
|
+
|
188
|
+
|
177
189
|
class FeatureSetEnumType(betterproto2.Enum):
|
178
190
|
ENUM_TYPE_UNKNOWN = 0
|
179
191
|
|
@@ -224,6 +236,32 @@ class FeatureSetUtf8Validation(betterproto2.Enum):
|
|
224
236
|
NONE = 3
|
225
237
|
|
226
238
|
|
239
|
+
class FeatureSetVisibilityFeatureDefaultSymbolVisibility(betterproto2.Enum):
|
240
|
+
DEFAULT_SYMBOL_VISIBILITY_UNKNOWN = 0
|
241
|
+
|
242
|
+
EXPORT_ALL = 1
|
243
|
+
"""
|
244
|
+
Default pre-EDITION_2024, all UNSET visibility are export.
|
245
|
+
"""
|
246
|
+
|
247
|
+
EXPORT_TOP_LEVEL = 2
|
248
|
+
"""
|
249
|
+
All top-level symbols default to export, nested default to local.
|
250
|
+
"""
|
251
|
+
|
252
|
+
LOCAL_ALL = 3
|
253
|
+
"""
|
254
|
+
All symbols default to local.
|
255
|
+
"""
|
256
|
+
|
257
|
+
STRICT = 4
|
258
|
+
"""
|
259
|
+
All symbols local by default. Nested types cannot be exported.
|
260
|
+
With special case caveat for message { enum {} reserved 1 to max; }
|
261
|
+
This is the recommended setting for new protos.
|
262
|
+
"""
|
263
|
+
|
264
|
+
|
227
265
|
class FieldDescriptorProtoLabel(betterproto2.Enum):
|
228
266
|
OPTIONAL = 1
|
229
267
|
"""
|
@@ -512,6 +550,22 @@ class MethodOptionsIdempotencyLevel(betterproto2.Enum):
|
|
512
550
|
"""
|
513
551
|
|
514
552
|
|
553
|
+
class SymbolVisibility(betterproto2.Enum):
|
554
|
+
"""
|
555
|
+
Describes the 'visibility' of a symbol with respect to the proto import
|
556
|
+
system. Symbols can only be imported when the visibility rules do not prevent
|
557
|
+
it (ex: local symbols cannot be imported). Visibility modifiers can only set
|
558
|
+
on `message` and `enum` as they are the only types available to be referenced
|
559
|
+
from other files.
|
560
|
+
"""
|
561
|
+
|
562
|
+
VISIBILITY_UNSET = 0
|
563
|
+
|
564
|
+
VISIBILITY_LOCAL = 1
|
565
|
+
|
566
|
+
VISIBILITY_EXPORT = 2
|
567
|
+
|
568
|
+
|
515
569
|
@dataclass(eq=False, repr=False)
|
516
570
|
class Any(betterproto2.Message):
|
517
571
|
"""
|
@@ -744,6 +798,13 @@ class DescriptorProto(betterproto2.Message):
|
|
744
798
|
A given name may only be reserved once.
|
745
799
|
"""
|
746
800
|
|
801
|
+
visibility: "SymbolVisibility" = betterproto2.field(
|
802
|
+
11, betterproto2.TYPE_ENUM, default_factory=lambda: SymbolVisibility(0)
|
803
|
+
)
|
804
|
+
"""
|
805
|
+
Support for `export` and `local` keywords on enums.
|
806
|
+
"""
|
807
|
+
|
747
808
|
|
748
809
|
default_message_pool.register_message("google.protobuf", "DescriptorProto", DescriptorProto)
|
749
810
|
|
@@ -835,6 +896,13 @@ class EnumDescriptorProto(betterproto2.Message):
|
|
835
896
|
be reserved once.
|
836
897
|
"""
|
837
898
|
|
899
|
+
visibility: "SymbolVisibility" = betterproto2.field(
|
900
|
+
6, betterproto2.TYPE_ENUM, default_factory=lambda: SymbolVisibility(0)
|
901
|
+
)
|
902
|
+
"""
|
903
|
+
Support for `export` and `local` keywords on enums.
|
904
|
+
"""
|
905
|
+
|
838
906
|
|
839
907
|
default_message_pool.register_message("google.protobuf", "EnumDescriptorProto", EnumDescriptorProto)
|
840
908
|
|
@@ -895,6 +963,9 @@ class EnumOptions(betterproto2.Message):
|
|
895
963
|
features: "FeatureSet | None" = betterproto2.field(7, betterproto2.TYPE_MESSAGE, optional=True)
|
896
964
|
"""
|
897
965
|
Any features defined in the specific edition.
|
966
|
+
WARNING: This field should only be used by protobuf plugins or special
|
967
|
+
cases like the proto compiler. Other uses are discouraged and
|
968
|
+
developers should rely on the protoreflect APIs for their client language.
|
898
969
|
"""
|
899
970
|
|
900
971
|
uninterpreted_option: "list[UninterpretedOption]" = betterproto2.field(
|
@@ -942,6 +1013,9 @@ class EnumValueOptions(betterproto2.Message):
|
|
942
1013
|
features: "FeatureSet | None" = betterproto2.field(2, betterproto2.TYPE_MESSAGE, optional=True)
|
943
1014
|
"""
|
944
1015
|
Any features defined in the specific edition.
|
1016
|
+
WARNING: This field should only be used by protobuf plugins or special
|
1017
|
+
cases like the proto compiler. Other uses are discouraged and
|
1018
|
+
developers should rely on the protoreflect APIs for their client language.
|
945
1019
|
"""
|
946
1020
|
|
947
1021
|
debug_redact: "bool" = betterproto2.field(3, betterproto2.TYPE_BOOL)
|
@@ -1082,10 +1156,26 @@ class FeatureSet(betterproto2.Message):
|
|
1082
1156
|
6, betterproto2.TYPE_ENUM, default_factory=lambda: FeatureSetJsonFormat(0)
|
1083
1157
|
)
|
1084
1158
|
|
1159
|
+
enforce_naming_style: "FeatureSetEnforceNamingStyle" = betterproto2.field(
|
1160
|
+
7, betterproto2.TYPE_ENUM, default_factory=lambda: FeatureSetEnforceNamingStyle(0)
|
1161
|
+
)
|
1162
|
+
|
1163
|
+
default_symbol_visibility: "FeatureSetVisibilityFeatureDefaultSymbolVisibility" = betterproto2.field(
|
1164
|
+
8, betterproto2.TYPE_ENUM, default_factory=lambda: FeatureSetVisibilityFeatureDefaultSymbolVisibility(0)
|
1165
|
+
)
|
1166
|
+
|
1085
1167
|
|
1086
1168
|
default_message_pool.register_message("google.protobuf", "FeatureSet", FeatureSet)
|
1087
1169
|
|
1088
1170
|
|
1171
|
+
@dataclass(eq=False, repr=False)
|
1172
|
+
class FeatureSetVisibilityFeature(betterproto2.Message):
|
1173
|
+
pass
|
1174
|
+
|
1175
|
+
|
1176
|
+
default_message_pool.register_message("google.protobuf", "FeatureSet.VisibilityFeature", FeatureSetVisibilityFeature)
|
1177
|
+
|
1178
|
+
|
1089
1179
|
@dataclass(eq=False, repr=False)
|
1090
1180
|
class FeatureSetDefaults(betterproto2.Message):
|
1091
1181
|
"""
|
@@ -1340,6 +1430,9 @@ class FieldOptions(betterproto2.Message):
|
|
1340
1430
|
features: "FeatureSet | None" = betterproto2.field(21, betterproto2.TYPE_MESSAGE, optional=True)
|
1341
1431
|
"""
|
1342
1432
|
Any features defined in the specific edition.
|
1433
|
+
WARNING: This field should only be used by protobuf plugins or special
|
1434
|
+
cases like the proto compiler. Other uses are discouraged and
|
1435
|
+
developers should rely on the protoreflect APIs for their client language.
|
1343
1436
|
"""
|
1344
1437
|
|
1345
1438
|
feature_support: "FieldOptionsFeatureSupport | None" = betterproto2.field(
|
@@ -1438,6 +1531,12 @@ class FileDescriptorProto(betterproto2.Message):
|
|
1438
1531
|
For Google-internal migration only. Do not use.
|
1439
1532
|
"""
|
1440
1533
|
|
1534
|
+
option_dependency: "list[str]" = betterproto2.field(15, betterproto2.TYPE_STRING, repeated=True)
|
1535
|
+
"""
|
1536
|
+
Names of files imported by this file purely for the purpose of providing
|
1537
|
+
option extensions. These are excluded from the dependency list above.
|
1538
|
+
"""
|
1539
|
+
|
1441
1540
|
message_type: "list[DescriptorProto]" = betterproto2.field(4, betterproto2.TYPE_MESSAGE, repeated=True)
|
1442
1541
|
"""
|
1443
1542
|
All top-level definitions in this file.
|
@@ -1465,11 +1564,17 @@ class FileDescriptorProto(betterproto2.Message):
|
|
1465
1564
|
The supported values are "proto2", "proto3", and "editions".
|
1466
1565
|
|
1467
1566
|
If `edition` is present, this value must be "editions".
|
1567
|
+
WARNING: This field should only be used by protobuf plugins or special
|
1568
|
+
cases like the proto compiler. Other uses are discouraged and
|
1569
|
+
developers should rely on the protoreflect APIs for their client language.
|
1468
1570
|
"""
|
1469
1571
|
|
1470
1572
|
edition: "Edition" = betterproto2.field(14, betterproto2.TYPE_ENUM, default_factory=lambda: Edition(0))
|
1471
1573
|
"""
|
1472
1574
|
The edition of the proto file.
|
1575
|
+
WARNING: This field should only be used by protobuf plugins or special
|
1576
|
+
cases like the proto compiler. Other uses are discouraged and
|
1577
|
+
developers should rely on the protoreflect APIs for their client language.
|
1473
1578
|
"""
|
1474
1579
|
|
1475
1580
|
|
@@ -1665,6 +1770,9 @@ class FileOptions(betterproto2.Message):
|
|
1665
1770
|
features: "FeatureSet | None" = betterproto2.field(50, betterproto2.TYPE_MESSAGE, optional=True)
|
1666
1771
|
"""
|
1667
1772
|
Any features defined in the specific edition.
|
1773
|
+
WARNING: This field should only be used by protobuf plugins or special
|
1774
|
+
cases like the proto compiler. Other uses are discouraged and
|
1775
|
+
developers should rely on the protoreflect APIs for their client language.
|
1668
1776
|
"""
|
1669
1777
|
|
1670
1778
|
uninterpreted_option: "list[UninterpretedOption]" = betterproto2.field(
|
@@ -1817,6 +1925,9 @@ class MessageOptions(betterproto2.Message):
|
|
1817
1925
|
features: "FeatureSet | None" = betterproto2.field(12, betterproto2.TYPE_MESSAGE, optional=True)
|
1818
1926
|
"""
|
1819
1927
|
Any features defined in the specific edition.
|
1928
|
+
WARNING: This field should only be used by protobuf plugins or special
|
1929
|
+
cases like the proto compiler. Other uses are discouraged and
|
1930
|
+
developers should rely on the protoreflect APIs for their client language.
|
1820
1931
|
"""
|
1821
1932
|
|
1822
1933
|
uninterpreted_option: "list[UninterpretedOption]" = betterproto2.field(
|
@@ -1889,6 +2000,9 @@ class MethodOptions(betterproto2.Message):
|
|
1889
2000
|
features: "FeatureSet | None" = betterproto2.field(35, betterproto2.TYPE_MESSAGE, optional=True)
|
1890
2001
|
"""
|
1891
2002
|
Any features defined in the specific edition.
|
2003
|
+
WARNING: This field should only be used by protobuf plugins or special
|
2004
|
+
cases like the proto compiler. Other uses are discouraged and
|
2005
|
+
developers should rely on the protoreflect APIs for their client language.
|
1892
2006
|
"""
|
1893
2007
|
|
1894
2008
|
uninterpreted_option: "list[UninterpretedOption]" = betterproto2.field(
|
@@ -1921,6 +2035,9 @@ class OneofOptions(betterproto2.Message):
|
|
1921
2035
|
features: "FeatureSet | None" = betterproto2.field(1, betterproto2.TYPE_MESSAGE, optional=True)
|
1922
2036
|
"""
|
1923
2037
|
Any features defined in the specific edition.
|
2038
|
+
WARNING: This field should only be used by protobuf plugins or special
|
2039
|
+
cases like the proto compiler. Other uses are discouraged and
|
2040
|
+
developers should rely on the protoreflect APIs for their client language.
|
1924
2041
|
"""
|
1925
2042
|
|
1926
2043
|
uninterpreted_option: "list[UninterpretedOption]" = betterproto2.field(
|
@@ -1955,6 +2072,9 @@ class ServiceOptions(betterproto2.Message):
|
|
1955
2072
|
features: "FeatureSet | None" = betterproto2.field(34, betterproto2.TYPE_MESSAGE, optional=True)
|
1956
2073
|
"""
|
1957
2074
|
Any features defined in the specific edition.
|
2075
|
+
WARNING: This field should only be used by protobuf plugins or special
|
2076
|
+
cases like the proto compiler. Other uses are discouraged and
|
2077
|
+
developers should rely on the protoreflect APIs for their client language.
|
1958
2078
|
"""
|
1959
2079
|
|
1960
2080
|
deprecated: "bool" = betterproto2.field(33, betterproto2.TYPE_BOOL)
|
@@ -246,7 +246,7 @@ import betterproto2
|
|
246
246
|
|
247
247
|
from ..message_pool import default_message_pool
|
248
248
|
|
249
|
-
_COMPILER_VERSION = "0.
|
249
|
+
_COMPILER_VERSION = "0.9.0"
|
250
250
|
betterproto2.check_compiler_version(_COMPILER_VERSION)
|
251
251
|
|
252
252
|
|
@@ -2885,7 +2885,7 @@ class ExpressionMaskExpression(betterproto2.Message):
|
|
2885
2885
|
"""
|
2886
2886
|
A reference that takes an existing subtype and selectively removes fields
|
2887
2887
|
from it. For example, one might initially have an inner struct with 100
|
2888
|
-
fields but a
|
2888
|
+
fields but a particular operation only needs to interact with only 2 of
|
2889
2889
|
those 100 fields. In this situation, one would use a mask expression to
|
2890
2890
|
eliminate the 98 fields that are not relevant to the rest of the operation
|
2891
2891
|
pipeline.
|