atdata 0.2.3b1__py3-none-any.whl → 0.3.1b1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- atdata/.gitignore +1 -0
- atdata/__init__.py +39 -0
- atdata/_cid.py +0 -21
- atdata/_exceptions.py +168 -0
- atdata/_helpers.py +41 -15
- atdata/_hf_api.py +95 -11
- atdata/_logging.py +70 -0
- atdata/_protocols.py +77 -238
- atdata/_schema_codec.py +7 -6
- atdata/_stub_manager.py +5 -25
- atdata/_type_utils.py +28 -2
- atdata/atmosphere/__init__.py +31 -20
- atdata/atmosphere/_types.py +4 -4
- atdata/atmosphere/client.py +64 -12
- atdata/atmosphere/lens.py +11 -12
- atdata/atmosphere/records.py +12 -12
- atdata/atmosphere/schema.py +16 -18
- atdata/atmosphere/store.py +6 -7
- atdata/cli/__init__.py +161 -175
- atdata/cli/diagnose.py +2 -2
- atdata/cli/{local.py → infra.py} +11 -11
- atdata/cli/inspect.py +69 -0
- atdata/cli/preview.py +63 -0
- atdata/cli/schema.py +109 -0
- atdata/dataset.py +583 -328
- atdata/index/__init__.py +54 -0
- atdata/index/_entry.py +157 -0
- atdata/index/_index.py +1198 -0
- atdata/index/_schema.py +380 -0
- atdata/lens.py +9 -2
- atdata/lexicons/__init__.py +121 -0
- atdata/lexicons/ac.foundation.dataset.arrayFormat.json +16 -0
- atdata/lexicons/ac.foundation.dataset.getLatestSchema.json +78 -0
- atdata/lexicons/ac.foundation.dataset.lens.json +99 -0
- atdata/lexicons/ac.foundation.dataset.record.json +96 -0
- atdata/lexicons/ac.foundation.dataset.schema.json +107 -0
- atdata/lexicons/ac.foundation.dataset.schemaType.json +16 -0
- atdata/lexicons/ac.foundation.dataset.storageBlobs.json +24 -0
- atdata/lexicons/ac.foundation.dataset.storageExternal.json +25 -0
- atdata/lexicons/ndarray_shim.json +16 -0
- atdata/local/__init__.py +70 -0
- atdata/local/_repo_legacy.py +218 -0
- atdata/manifest/__init__.py +28 -0
- atdata/manifest/_aggregates.py +156 -0
- atdata/manifest/_builder.py +163 -0
- atdata/manifest/_fields.py +154 -0
- atdata/manifest/_manifest.py +146 -0
- atdata/manifest/_query.py +150 -0
- atdata/manifest/_writer.py +74 -0
- atdata/promote.py +18 -14
- atdata/providers/__init__.py +25 -0
- atdata/providers/_base.py +140 -0
- atdata/providers/_factory.py +69 -0
- atdata/providers/_postgres.py +214 -0
- atdata/providers/_redis.py +171 -0
- atdata/providers/_sqlite.py +191 -0
- atdata/repository.py +323 -0
- atdata/stores/__init__.py +23 -0
- atdata/stores/_disk.py +123 -0
- atdata/stores/_s3.py +349 -0
- atdata/testing.py +341 -0
- {atdata-0.2.3b1.dist-info → atdata-0.3.1b1.dist-info}/METADATA +5 -2
- atdata-0.3.1b1.dist-info/RECORD +67 -0
- atdata/local.py +0 -1720
- atdata-0.2.3b1.dist-info/RECORD +0 -28
- {atdata-0.2.3b1.dist-info → atdata-0.3.1b1.dist-info}/WHEEL +0 -0
- {atdata-0.2.3b1.dist-info → atdata-0.3.1b1.dist-info}/entry_points.txt +0 -0
- {atdata-0.2.3b1.dist-info → atdata-0.3.1b1.dist-info}/licenses/LICENSE +0 -0
atdata/index/_schema.py
ADDED
|
@@ -0,0 +1,380 @@
|
|
|
1
|
+
"""Schema models and helper functions for local storage."""
|
|
2
|
+
|
|
3
|
+
from atdata._type_utils import (
|
|
4
|
+
PRIMITIVE_TYPE_MAP,
|
|
5
|
+
unwrap_optional,
|
|
6
|
+
is_ndarray_type,
|
|
7
|
+
extract_ndarray_dtype,
|
|
8
|
+
parse_semver,
|
|
9
|
+
)
|
|
10
|
+
from atdata._protocols import Packable
|
|
11
|
+
|
|
12
|
+
from dataclasses import dataclass, fields, is_dataclass
|
|
13
|
+
from datetime import datetime, timezone
|
|
14
|
+
from typing import (
|
|
15
|
+
Any,
|
|
16
|
+
Type,
|
|
17
|
+
TypeVar,
|
|
18
|
+
Iterator,
|
|
19
|
+
Optional,
|
|
20
|
+
Literal,
|
|
21
|
+
get_type_hints,
|
|
22
|
+
get_origin,
|
|
23
|
+
get_args,
|
|
24
|
+
)
|
|
25
|
+
|
|
26
|
+
T = TypeVar("T", bound=Packable)
|
|
27
|
+
|
|
28
|
+
# URI scheme prefixes
|
|
29
|
+
_ATDATA_URI_PREFIX = "atdata://local/schema/"
|
|
30
|
+
_LEGACY_URI_PREFIX = "local://schemas/"
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
class SchemaNamespace:
|
|
34
|
+
"""Namespace for accessing loaded schema types as attributes.
|
|
35
|
+
|
|
36
|
+
After ``index.load_schema(uri)``, the type is available as an attribute.
|
|
37
|
+
Supports attribute access, iteration, ``len()``, and ``in`` checks.
|
|
38
|
+
|
|
39
|
+
Examples:
|
|
40
|
+
>>> index.load_schema("atdata://local/schema/MySample@1.0.0")
|
|
41
|
+
>>> MyType = index.types.MySample
|
|
42
|
+
>>> sample = MyType(field1="hello", field2=42)
|
|
43
|
+
|
|
44
|
+
Note:
|
|
45
|
+
For full IDE autocomplete, enable ``auto_stubs=True`` and add
|
|
46
|
+
``index.stub_dir`` to your IDE's extraPaths.
|
|
47
|
+
"""
|
|
48
|
+
|
|
49
|
+
def __init__(self) -> None:
|
|
50
|
+
self._types: dict[str, Type[Packable]] = {}
|
|
51
|
+
|
|
52
|
+
def _register(self, name: str, cls: Type[Packable]) -> None:
|
|
53
|
+
"""Register a schema type in the namespace."""
|
|
54
|
+
self._types[name] = cls
|
|
55
|
+
|
|
56
|
+
def __getattr__(self, name: str) -> Any:
|
|
57
|
+
# Returns Any to avoid IDE complaints about unknown attributes.
|
|
58
|
+
# For full IDE support, import from the generated module instead.
|
|
59
|
+
if name.startswith("_"):
|
|
60
|
+
raise AttributeError(f"'{type(self).__name__}' has no attribute '{name}'")
|
|
61
|
+
if name not in self._types:
|
|
62
|
+
raise AttributeError(
|
|
63
|
+
f"Schema '{name}' not loaded. "
|
|
64
|
+
f"Call index.load_schema() first to load the schema."
|
|
65
|
+
)
|
|
66
|
+
return self._types[name]
|
|
67
|
+
|
|
68
|
+
def __dir__(self) -> list[str]:
|
|
69
|
+
return list(self._types.keys()) + ["_types", "_register", "get"]
|
|
70
|
+
|
|
71
|
+
def __iter__(self) -> Iterator[str]:
|
|
72
|
+
return iter(self._types)
|
|
73
|
+
|
|
74
|
+
def __len__(self) -> int:
|
|
75
|
+
return len(self._types)
|
|
76
|
+
|
|
77
|
+
def __contains__(self, name: str) -> bool:
|
|
78
|
+
return name in self._types
|
|
79
|
+
|
|
80
|
+
def __repr__(self) -> str:
|
|
81
|
+
if not self._types:
|
|
82
|
+
return "SchemaNamespace(empty)"
|
|
83
|
+
names = ", ".join(sorted(self._types.keys()))
|
|
84
|
+
return f"SchemaNamespace({names})"
|
|
85
|
+
|
|
86
|
+
def get(self, name: str, default: T | None = None) -> Type[Packable] | T | None:
|
|
87
|
+
"""Get a type by name, returning default if not found.
|
|
88
|
+
|
|
89
|
+
Args:
|
|
90
|
+
name: The schema class name to look up.
|
|
91
|
+
default: Value to return if not found (default: None).
|
|
92
|
+
|
|
93
|
+
Returns:
|
|
94
|
+
The schema class, or default if not loaded.
|
|
95
|
+
"""
|
|
96
|
+
return self._types.get(name, default)
|
|
97
|
+
|
|
98
|
+
|
|
99
|
+
##
|
|
100
|
+
# Schema types
|
|
101
|
+
|
|
102
|
+
|
|
103
|
+
@dataclass
|
|
104
|
+
class SchemaFieldType:
|
|
105
|
+
"""Schema field type definition for local storage.
|
|
106
|
+
|
|
107
|
+
Represents a type in the schema type system, supporting primitives,
|
|
108
|
+
ndarrays, arrays, and references to other schemas.
|
|
109
|
+
"""
|
|
110
|
+
|
|
111
|
+
kind: Literal["primitive", "ndarray", "ref", "array"]
|
|
112
|
+
"""The category of type."""
|
|
113
|
+
|
|
114
|
+
primitive: Optional[str] = None
|
|
115
|
+
"""For kind='primitive': one of 'str', 'int', 'float', 'bool', 'bytes'."""
|
|
116
|
+
|
|
117
|
+
dtype: Optional[str] = None
|
|
118
|
+
"""For kind='ndarray': numpy dtype string (e.g., 'float32')."""
|
|
119
|
+
|
|
120
|
+
ref: Optional[str] = None
|
|
121
|
+
"""For kind='ref': URI of referenced schema."""
|
|
122
|
+
|
|
123
|
+
items: Optional["SchemaFieldType"] = None
|
|
124
|
+
"""For kind='array': type of array elements."""
|
|
125
|
+
|
|
126
|
+
@classmethod
|
|
127
|
+
def from_dict(cls, data: dict) -> "SchemaFieldType":
|
|
128
|
+
"""Create from a dictionary (e.g., from Redis storage)."""
|
|
129
|
+
type_str = data.get("$type", "")
|
|
130
|
+
if "#" in type_str:
|
|
131
|
+
kind = type_str.split("#")[-1]
|
|
132
|
+
else:
|
|
133
|
+
kind = data.get("kind", "primitive")
|
|
134
|
+
|
|
135
|
+
items = None
|
|
136
|
+
if "items" in data and data["items"]:
|
|
137
|
+
items = cls.from_dict(data["items"])
|
|
138
|
+
|
|
139
|
+
return cls(
|
|
140
|
+
kind=kind, # type: ignore[arg-type]
|
|
141
|
+
primitive=data.get("primitive"),
|
|
142
|
+
dtype=data.get("dtype"),
|
|
143
|
+
ref=data.get("ref"),
|
|
144
|
+
items=items,
|
|
145
|
+
)
|
|
146
|
+
|
|
147
|
+
def to_dict(self) -> dict:
|
|
148
|
+
"""Convert to dictionary for storage."""
|
|
149
|
+
result: dict[str, Any] = {"$type": f"local#{self.kind}"}
|
|
150
|
+
if self.kind == "primitive":
|
|
151
|
+
result["primitive"] = self.primitive
|
|
152
|
+
elif self.kind == "ndarray":
|
|
153
|
+
result["dtype"] = self.dtype
|
|
154
|
+
elif self.kind == "ref":
|
|
155
|
+
result["ref"] = self.ref
|
|
156
|
+
elif self.kind == "array" and self.items:
|
|
157
|
+
result["items"] = self.items.to_dict()
|
|
158
|
+
return result
|
|
159
|
+
|
|
160
|
+
|
|
161
|
+
@dataclass
|
|
162
|
+
class SchemaField:
|
|
163
|
+
"""Schema field definition for local storage."""
|
|
164
|
+
|
|
165
|
+
name: str
|
|
166
|
+
"""Field name."""
|
|
167
|
+
|
|
168
|
+
field_type: SchemaFieldType
|
|
169
|
+
"""Type of this field."""
|
|
170
|
+
|
|
171
|
+
optional: bool = False
|
|
172
|
+
"""Whether this field can be None."""
|
|
173
|
+
|
|
174
|
+
@classmethod
|
|
175
|
+
def from_dict(cls, data: dict) -> "SchemaField":
|
|
176
|
+
"""Create from a dictionary."""
|
|
177
|
+
return cls(
|
|
178
|
+
name=data["name"],
|
|
179
|
+
field_type=SchemaFieldType.from_dict(data["fieldType"]),
|
|
180
|
+
optional=data.get("optional", False),
|
|
181
|
+
)
|
|
182
|
+
|
|
183
|
+
def to_dict(self) -> dict:
|
|
184
|
+
"""Convert to dictionary for storage."""
|
|
185
|
+
return {
|
|
186
|
+
"name": self.name,
|
|
187
|
+
"fieldType": self.field_type.to_dict(),
|
|
188
|
+
"optional": self.optional,
|
|
189
|
+
}
|
|
190
|
+
|
|
191
|
+
|
|
192
|
+
@dataclass
|
|
193
|
+
class LocalSchemaRecord:
|
|
194
|
+
"""Schema record for local storage.
|
|
195
|
+
|
|
196
|
+
Represents a PackableSample schema stored in the local index.
|
|
197
|
+
Aligns with the atmosphere SchemaRecord structure for seamless promotion.
|
|
198
|
+
"""
|
|
199
|
+
|
|
200
|
+
name: str
|
|
201
|
+
"""Schema name (typically the class name)."""
|
|
202
|
+
|
|
203
|
+
version: str
|
|
204
|
+
"""Semantic version string (e.g., '1.0.0')."""
|
|
205
|
+
|
|
206
|
+
fields: list[SchemaField]
|
|
207
|
+
"""List of field definitions."""
|
|
208
|
+
|
|
209
|
+
ref: str
|
|
210
|
+
"""Schema reference URI (atdata://local/schema/{name}@{version})."""
|
|
211
|
+
|
|
212
|
+
description: Optional[str] = None
|
|
213
|
+
"""Human-readable description."""
|
|
214
|
+
|
|
215
|
+
created_at: Optional[datetime] = None
|
|
216
|
+
"""When this schema was published."""
|
|
217
|
+
|
|
218
|
+
@classmethod
|
|
219
|
+
def from_dict(cls, data: dict) -> "LocalSchemaRecord":
|
|
220
|
+
"""Create from a dictionary (e.g., from Redis storage)."""
|
|
221
|
+
created_at = None
|
|
222
|
+
if "createdAt" in data:
|
|
223
|
+
try:
|
|
224
|
+
created_at = datetime.fromisoformat(data["createdAt"])
|
|
225
|
+
except (ValueError, TypeError):
|
|
226
|
+
created_at = None # Invalid datetime format, leave as None
|
|
227
|
+
|
|
228
|
+
return cls(
|
|
229
|
+
name=data["name"],
|
|
230
|
+
version=data["version"],
|
|
231
|
+
fields=[SchemaField.from_dict(f) for f in data.get("fields", [])],
|
|
232
|
+
ref=data.get("$ref", ""),
|
|
233
|
+
description=data.get("description"),
|
|
234
|
+
created_at=created_at,
|
|
235
|
+
)
|
|
236
|
+
|
|
237
|
+
def to_dict(self) -> dict:
|
|
238
|
+
"""Convert to dictionary for storage."""
|
|
239
|
+
result: dict[str, Any] = {
|
|
240
|
+
"name": self.name,
|
|
241
|
+
"version": self.version,
|
|
242
|
+
"fields": [f.to_dict() for f in self.fields],
|
|
243
|
+
"$ref": self.ref,
|
|
244
|
+
}
|
|
245
|
+
if self.description:
|
|
246
|
+
result["description"] = self.description
|
|
247
|
+
if self.created_at:
|
|
248
|
+
result["createdAt"] = self.created_at.isoformat()
|
|
249
|
+
return result
|
|
250
|
+
|
|
251
|
+
|
|
252
|
+
##
|
|
253
|
+
# Schema helpers
|
|
254
|
+
|
|
255
|
+
|
|
256
|
+
def _kind_str_for_sample_type(st: Type[Packable]) -> str:
|
|
257
|
+
"""Return fully-qualified 'module.name' string for a sample type."""
|
|
258
|
+
return f"{st.__module__}.{st.__name__}"
|
|
259
|
+
|
|
260
|
+
|
|
261
|
+
def _schema_ref_from_type(sample_type: Type[Packable], version: str) -> str:
|
|
262
|
+
"""Generate 'atdata://local/schema/{name}@{version}' reference."""
|
|
263
|
+
return _make_schema_ref(sample_type.__name__, version)
|
|
264
|
+
|
|
265
|
+
|
|
266
|
+
def _make_schema_ref(name: str, version: str) -> str:
|
|
267
|
+
"""Generate schema reference URI from name and version."""
|
|
268
|
+
return f"{_ATDATA_URI_PREFIX}{name}@{version}"
|
|
269
|
+
|
|
270
|
+
|
|
271
|
+
def _parse_schema_ref(ref: str) -> tuple[str, str]:
|
|
272
|
+
"""Parse schema reference into (name, version).
|
|
273
|
+
|
|
274
|
+
Supports both new format: 'atdata://local/schema/{name}@{version}'
|
|
275
|
+
and legacy format: 'local://schemas/{module.Class}@{version}'
|
|
276
|
+
"""
|
|
277
|
+
if ref.startswith(_ATDATA_URI_PREFIX):
|
|
278
|
+
path = ref[len(_ATDATA_URI_PREFIX) :]
|
|
279
|
+
elif ref.startswith(_LEGACY_URI_PREFIX):
|
|
280
|
+
path = ref[len(_LEGACY_URI_PREFIX) :]
|
|
281
|
+
else:
|
|
282
|
+
raise ValueError(f"Invalid schema reference: {ref}")
|
|
283
|
+
|
|
284
|
+
if "@" not in path:
|
|
285
|
+
raise ValueError(f"Schema reference must include version (@version): {ref}")
|
|
286
|
+
|
|
287
|
+
name, version = path.rsplit("@", 1)
|
|
288
|
+
# For legacy format, extract just the class name from module.Class
|
|
289
|
+
if "." in name:
|
|
290
|
+
name = name.rsplit(".", 1)[1]
|
|
291
|
+
return name, version
|
|
292
|
+
|
|
293
|
+
|
|
294
|
+
def _increment_patch(version: str) -> str:
|
|
295
|
+
"""Increment patch version: 1.0.0 -> 1.0.1"""
|
|
296
|
+
major, minor, patch = parse_semver(version)
|
|
297
|
+
return f"{major}.{minor}.{patch + 1}"
|
|
298
|
+
|
|
299
|
+
|
|
300
|
+
def _python_type_to_field_type(python_type: Any) -> dict:
|
|
301
|
+
"""Convert Python type annotation to schema field type dict."""
|
|
302
|
+
if python_type in PRIMITIVE_TYPE_MAP:
|
|
303
|
+
return {
|
|
304
|
+
"$type": "local#primitive",
|
|
305
|
+
"primitive": PRIMITIVE_TYPE_MAP[python_type],
|
|
306
|
+
}
|
|
307
|
+
|
|
308
|
+
if is_ndarray_type(python_type):
|
|
309
|
+
return {"$type": "local#ndarray", "dtype": extract_ndarray_dtype(python_type)}
|
|
310
|
+
|
|
311
|
+
origin = get_origin(python_type)
|
|
312
|
+
if origin is list:
|
|
313
|
+
args = get_args(python_type)
|
|
314
|
+
items = (
|
|
315
|
+
_python_type_to_field_type(args[0])
|
|
316
|
+
if args
|
|
317
|
+
else {"$type": "local#primitive", "primitive": "str"}
|
|
318
|
+
)
|
|
319
|
+
return {"$type": "local#array", "items": items}
|
|
320
|
+
|
|
321
|
+
if is_dataclass(python_type):
|
|
322
|
+
raise TypeError(
|
|
323
|
+
f"Nested dataclass types not yet supported: {python_type.__name__}. "
|
|
324
|
+
"Publish nested types separately and use references."
|
|
325
|
+
)
|
|
326
|
+
|
|
327
|
+
raise TypeError(f"Unsupported type for schema field: {python_type}")
|
|
328
|
+
|
|
329
|
+
|
|
330
|
+
def _build_schema_record(
|
|
331
|
+
sample_type: Type[Packable],
|
|
332
|
+
*,
|
|
333
|
+
version: str,
|
|
334
|
+
description: str | None = None,
|
|
335
|
+
) -> dict:
|
|
336
|
+
"""Build a schema record dict from a PackableSample type.
|
|
337
|
+
|
|
338
|
+
Args:
|
|
339
|
+
sample_type: The PackableSample subclass to introspect.
|
|
340
|
+
version: Semantic version string.
|
|
341
|
+
description: Optional human-readable description. If None, uses the
|
|
342
|
+
class docstring.
|
|
343
|
+
|
|
344
|
+
Returns:
|
|
345
|
+
Schema record dict suitable for Redis storage.
|
|
346
|
+
|
|
347
|
+
Raises:
|
|
348
|
+
ValueError: If sample_type is not a dataclass.
|
|
349
|
+
TypeError: If a field type is not supported.
|
|
350
|
+
"""
|
|
351
|
+
if not is_dataclass(sample_type):
|
|
352
|
+
raise ValueError(f"{sample_type.__name__} must be a dataclass (use @packable)")
|
|
353
|
+
|
|
354
|
+
# Use docstring as fallback for description
|
|
355
|
+
if description is None:
|
|
356
|
+
description = sample_type.__doc__
|
|
357
|
+
|
|
358
|
+
field_defs = []
|
|
359
|
+
type_hints = get_type_hints(sample_type)
|
|
360
|
+
|
|
361
|
+
for f in fields(sample_type):
|
|
362
|
+
field_type = type_hints.get(f.name, f.type)
|
|
363
|
+
field_type, is_optional = unwrap_optional(field_type)
|
|
364
|
+
field_type_dict = _python_type_to_field_type(field_type)
|
|
365
|
+
|
|
366
|
+
field_defs.append(
|
|
367
|
+
{
|
|
368
|
+
"name": f.name,
|
|
369
|
+
"fieldType": field_type_dict,
|
|
370
|
+
"optional": is_optional,
|
|
371
|
+
}
|
|
372
|
+
)
|
|
373
|
+
|
|
374
|
+
return {
|
|
375
|
+
"name": sample_type.__name__,
|
|
376
|
+
"version": version,
|
|
377
|
+
"fields": field_defs,
|
|
378
|
+
"description": description,
|
|
379
|
+
"createdAt": datetime.now(timezone.utc).isoformat(),
|
|
380
|
+
}
|
atdata/lens.py
CHANGED
|
@@ -61,6 +61,7 @@ if TYPE_CHECKING:
|
|
|
61
61
|
from .dataset import PackableSample
|
|
62
62
|
|
|
63
63
|
from ._protocols import Packable
|
|
64
|
+
from ._exceptions import LensNotFoundError
|
|
64
65
|
|
|
65
66
|
|
|
66
67
|
##
|
|
@@ -101,7 +102,8 @@ class Lens(Generic[S, V]):
|
|
|
101
102
|
... return FullData(name=view.name, age=source.age)
|
|
102
103
|
"""
|
|
103
104
|
|
|
104
|
-
#
|
|
105
|
+
# Note: The docstring uses "Parameters:" for type parameters as a workaround
|
|
106
|
+
# for quartodoc not supporting "Type Parameters:" sections.
|
|
105
107
|
|
|
106
108
|
def __init__(
|
|
107
109
|
self, get: LensGetter[S, V], put: Optional[LensPutter[S, V]] = None
|
|
@@ -290,7 +292,12 @@ class LensNetwork:
|
|
|
290
292
|
"""
|
|
291
293
|
ret = self._registry.get((source, view), None)
|
|
292
294
|
if ret is None:
|
|
293
|
-
|
|
295
|
+
available_targets = [
|
|
296
|
+
(sig[1], lens_obj.__name__)
|
|
297
|
+
for sig, lens_obj in self._registry.items()
|
|
298
|
+
if sig[0] is source and hasattr(lens_obj, "__name__")
|
|
299
|
+
]
|
|
300
|
+
raise LensNotFoundError(source, view, available_targets)
|
|
294
301
|
|
|
295
302
|
return ret
|
|
296
303
|
|
|
@@ -0,0 +1,121 @@
|
|
|
1
|
+
"""ATProto Lexicon definitions for the atdata federation.
|
|
2
|
+
|
|
3
|
+
This package contains the canonical Lexicon JSON files for the
|
|
4
|
+
``ac.foundation.dataset`` namespace. These define the ATProto record
|
|
5
|
+
types used by atdata for publishing schemas, datasets, and lenses
|
|
6
|
+
to the AT Protocol network.
|
|
7
|
+
|
|
8
|
+
Lexicons:
|
|
9
|
+
ac.foundation.dataset.schema
|
|
10
|
+
Versioned sample type definitions (PackableSample schemas).
|
|
11
|
+
ac.foundation.dataset.record
|
|
12
|
+
Dataset index records pointing to WebDataset storage.
|
|
13
|
+
ac.foundation.dataset.lens
|
|
14
|
+
Bidirectional transformations between schemas.
|
|
15
|
+
ac.foundation.dataset.schemaType
|
|
16
|
+
Extensible token for schema format identifiers.
|
|
17
|
+
ac.foundation.dataset.arrayFormat
|
|
18
|
+
Extensible token for array serialization formats.
|
|
19
|
+
ac.foundation.dataset.storageExternal
|
|
20
|
+
External URL-based storage (S3, HTTP, IPFS).
|
|
21
|
+
ac.foundation.dataset.storageBlobs
|
|
22
|
+
ATProto PDS blob-based storage.
|
|
23
|
+
ac.foundation.dataset.getLatestSchema
|
|
24
|
+
XRPC query for fetching the latest schema version.
|
|
25
|
+
|
|
26
|
+
The ``ndarray_shim.json`` file defines the standard NDArray type
|
|
27
|
+
for use within JSON Schema definitions.
|
|
28
|
+
|
|
29
|
+
Examples:
|
|
30
|
+
>>> from atdata.lexicons import load_lexicon
|
|
31
|
+
>>> schema_lex = load_lexicon("ac.foundation.dataset.schema")
|
|
32
|
+
>>> schema_lex["id"]
|
|
33
|
+
'ac.foundation.dataset.schema'
|
|
34
|
+
"""
|
|
35
|
+
|
|
36
|
+
import json
|
|
37
|
+
from importlib import resources
|
|
38
|
+
from functools import lru_cache
|
|
39
|
+
from typing import Any
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
NAMESPACE = "ac.foundation.dataset"
|
|
43
|
+
|
|
44
|
+
LEXICON_IDS = (
|
|
45
|
+
f"{NAMESPACE}.schema",
|
|
46
|
+
f"{NAMESPACE}.record",
|
|
47
|
+
f"{NAMESPACE}.lens",
|
|
48
|
+
f"{NAMESPACE}.schemaType",
|
|
49
|
+
f"{NAMESPACE}.arrayFormat",
|
|
50
|
+
f"{NAMESPACE}.storageExternal",
|
|
51
|
+
f"{NAMESPACE}.storageBlobs",
|
|
52
|
+
f"{NAMESPACE}.getLatestSchema",
|
|
53
|
+
)
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
@lru_cache(maxsize=16)
|
|
57
|
+
def load_lexicon(lexicon_id: str) -> dict[str, Any]:
|
|
58
|
+
"""Load a lexicon definition by its NSID.
|
|
59
|
+
|
|
60
|
+
Args:
|
|
61
|
+
lexicon_id: The lexicon NSID, e.g. ``"ac.foundation.dataset.schema"``.
|
|
62
|
+
|
|
63
|
+
Returns:
|
|
64
|
+
Parsed JSON dictionary containing the lexicon definition.
|
|
65
|
+
|
|
66
|
+
Raises:
|
|
67
|
+
FileNotFoundError: If no lexicon file exists for the given ID.
|
|
68
|
+
|
|
69
|
+
Examples:
|
|
70
|
+
>>> lex = load_lexicon("ac.foundation.dataset.schema")
|
|
71
|
+
>>> lex["defs"]["main"]["type"]
|
|
72
|
+
'record'
|
|
73
|
+
"""
|
|
74
|
+
filename = f"{lexicon_id}.json"
|
|
75
|
+
ref = resources.files(__package__).joinpath(filename)
|
|
76
|
+
try:
|
|
77
|
+
text = ref.read_text(encoding="utf-8")
|
|
78
|
+
except FileNotFoundError:
|
|
79
|
+
raise FileNotFoundError(
|
|
80
|
+
f"No lexicon file found for '{lexicon_id}'. "
|
|
81
|
+
f"Expected {filename} in {__package__}."
|
|
82
|
+
) from None
|
|
83
|
+
return json.loads(text)
|
|
84
|
+
|
|
85
|
+
|
|
86
|
+
@lru_cache(maxsize=1)
|
|
87
|
+
def load_ndarray_shim() -> dict[str, Any]:
|
|
88
|
+
"""Load the NDArray JSON Schema shim definition.
|
|
89
|
+
|
|
90
|
+
Returns:
|
|
91
|
+
Parsed JSON dictionary containing the NDArray shim schema.
|
|
92
|
+
|
|
93
|
+
Examples:
|
|
94
|
+
>>> shim = load_ndarray_shim()
|
|
95
|
+
>>> shim["$defs"]["ndarray"]["type"]
|
|
96
|
+
'string'
|
|
97
|
+
"""
|
|
98
|
+
ref = resources.files(__package__).joinpath("ndarray_shim.json")
|
|
99
|
+
return json.loads(ref.read_text(encoding="utf-8"))
|
|
100
|
+
|
|
101
|
+
|
|
102
|
+
def list_lexicons() -> tuple[str, ...]:
|
|
103
|
+
"""Return the tuple of all known lexicon NSIDs.
|
|
104
|
+
|
|
105
|
+
Returns:
|
|
106
|
+
Tuple of lexicon ID strings.
|
|
107
|
+
|
|
108
|
+
Examples:
|
|
109
|
+
>>> "ac.foundation.dataset.schema" in list_lexicons()
|
|
110
|
+
True
|
|
111
|
+
"""
|
|
112
|
+
return LEXICON_IDS
|
|
113
|
+
|
|
114
|
+
|
|
115
|
+
__all__ = [
|
|
116
|
+
"NAMESPACE",
|
|
117
|
+
"LEXICON_IDS",
|
|
118
|
+
"load_lexicon",
|
|
119
|
+
"load_ndarray_shim",
|
|
120
|
+
"list_lexicons",
|
|
121
|
+
]
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
{
|
|
2
|
+
"lexicon": 1,
|
|
3
|
+
"id": "ac.foundation.dataset.arrayFormat",
|
|
4
|
+
"defs": {
|
|
5
|
+
"main": {
|
|
6
|
+
"type": "string",
|
|
7
|
+
"description": "Array serialization format identifier for NDArray fields in sample schemas. Known values correspond to token definitions in this Lexicon. Each format has versioned specifications maintained by foundation.ac at canonical URLs.",
|
|
8
|
+
"knownValues": ["ndarrayBytes"],
|
|
9
|
+
"maxLength": 50
|
|
10
|
+
},
|
|
11
|
+
"ndarrayBytes": {
|
|
12
|
+
"type": "token",
|
|
13
|
+
"description": "Numpy .npy binary format for NDArray serialization. Stores arrays with dtype and shape in binary header. Versions maintained at https://foundation.ac/schemas/atdata-ndarray-bytes/{version}/"
|
|
14
|
+
}
|
|
15
|
+
}
|
|
16
|
+
}
|
|
@@ -0,0 +1,78 @@
|
|
|
1
|
+
{
|
|
2
|
+
"lexicon": 1,
|
|
3
|
+
"id": "ac.foundation.dataset.getLatestSchema",
|
|
4
|
+
"defs": {
|
|
5
|
+
"main": {
|
|
6
|
+
"type": "query",
|
|
7
|
+
"description": "Get the latest version of a sample schema by its permanent NSID identifier",
|
|
8
|
+
"parameters": {
|
|
9
|
+
"type": "params",
|
|
10
|
+
"required": [
|
|
11
|
+
"schemaId"
|
|
12
|
+
],
|
|
13
|
+
"properties": {
|
|
14
|
+
"schemaId": {
|
|
15
|
+
"type": "string",
|
|
16
|
+
"description": "The permanent NSID identifier for the schema (the {NSID} part of the rkey {NSID}@{semver})",
|
|
17
|
+
"maxLength": 500
|
|
18
|
+
}
|
|
19
|
+
}
|
|
20
|
+
},
|
|
21
|
+
"output": {
|
|
22
|
+
"encoding": "application/json",
|
|
23
|
+
"schema": {
|
|
24
|
+
"type": "object",
|
|
25
|
+
"required": [
|
|
26
|
+
"uri",
|
|
27
|
+
"version",
|
|
28
|
+
"record"
|
|
29
|
+
],
|
|
30
|
+
"properties": {
|
|
31
|
+
"uri": {
|
|
32
|
+
"type": "string",
|
|
33
|
+
"description": "AT-URI of the latest schema version",
|
|
34
|
+
"maxLength": 500
|
|
35
|
+
},
|
|
36
|
+
"version": {
|
|
37
|
+
"type": "string",
|
|
38
|
+
"description": "Semantic version of the latest schema",
|
|
39
|
+
"maxLength": 20
|
|
40
|
+
},
|
|
41
|
+
"record": {
|
|
42
|
+
"type": "ref",
|
|
43
|
+
"ref": "ac.foundation.dataset.schema",
|
|
44
|
+
"description": "The full schema record"
|
|
45
|
+
},
|
|
46
|
+
"allVersions": {
|
|
47
|
+
"type": "array",
|
|
48
|
+
"description": "All available versions (optional, sorted by semver descending)",
|
|
49
|
+
"items": {
|
|
50
|
+
"type": "object",
|
|
51
|
+
"required": [
|
|
52
|
+
"uri",
|
|
53
|
+
"version"
|
|
54
|
+
],
|
|
55
|
+
"properties": {
|
|
56
|
+
"uri": {
|
|
57
|
+
"type": "string",
|
|
58
|
+
"maxLength": 500
|
|
59
|
+
},
|
|
60
|
+
"version": {
|
|
61
|
+
"type": "string",
|
|
62
|
+
"maxLength": 20
|
|
63
|
+
}
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
},
|
|
70
|
+
"errors": [
|
|
71
|
+
{
|
|
72
|
+
"name": "SchemaNotFound",
|
|
73
|
+
"description": "No schema found with the given NSID"
|
|
74
|
+
}
|
|
75
|
+
]
|
|
76
|
+
}
|
|
77
|
+
}
|
|
78
|
+
}
|