dyff-schema 0.10.2__py3-none-any.whl → 0.11.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dyff-schema might be problematic. Click here for more details.

@@ -0,0 +1,297 @@
1
+ # SPDX-FileCopyrightText: 2024 UL Research Institutes
2
+ # SPDX-License-Identifier: Apache-2.0
3
+
4
+ # mypy: disable-error-code="import-untyped"
5
+ import functools
6
+ import typing
7
+ from typing import Callable, Generic, Literal, NamedTuple, Optional, TypeVar
8
+
9
+ import pyarrow.dataset
10
+ import pydantic
11
+ from typing_extensions import ParamSpec
12
+
13
+ from dyff.schema.dataset import ReplicatedItem, arrow
14
+ from dyff.schema.platform import (
15
+ DataSchema,
16
+ Dataset,
17
+ Evaluation,
18
+ Measurement,
19
+ MeasurementLevel,
20
+ MeasurementSpec,
21
+ MethodImplementation,
22
+ MethodImplementationKind,
23
+ MethodImplementationPythonFunction,
24
+ MethodInput,
25
+ MethodInputKind,
26
+ MethodOutput,
27
+ MethodOutputKind,
28
+ MethodParameter,
29
+ MethodScope,
30
+ )
31
+ from dyff.schema.requests import MethodCreateRequest
32
+
33
+
34
+ def _fqn(obj) -> tuple[str, str]:
35
+ """See: https://stackoverflow.com/a/70693158"""
36
+ try:
37
+ module = obj.__module__
38
+ except AttributeError:
39
+ module = obj.__class__.__module__
40
+ try:
41
+ name = obj.__qualname__
42
+ except AttributeError:
43
+ name = obj.__class__.__qualname__
44
+ # if obj is a method of builtin class, then module will be None
45
+ if module == "builtins" or module is None:
46
+ raise AssertionError("should not be called on a builtin")
47
+ return module, name
48
+
49
+
50
+ class DataAnnotation(NamedTuple):
51
+ kind: str
52
+ direction: Literal["input", "output"]
53
+ level: Optional[MeasurementLevel] = None
54
+ schema: Optional[DataSchema] = None
55
+
56
+
57
+ def Input(input_type) -> DataAnnotation:
58
+ """Apply this annotation to parameters of a Method implementation to
59
+ indicate that the parameter expects a PyArrow dataset derived from the
60
+ specified type of entity, e.g.::
61
+
62
+ def my_method(input_data: Annotated[pyarrow.dataset.Dataset, Input(Evaluation)], ...
63
+
64
+ :param input_type: A Dyff entity type with associated input data; one of
65
+ {Dataset, Evaluation, Measurement}
66
+ :return: Annotation data
67
+ """
68
+ if input_type == Dataset:
69
+ return DataAnnotation(kind="Dataset", direction="input")
70
+ elif input_type == Evaluation:
71
+ return DataAnnotation(kind="Evaluation", direction="input")
72
+ elif input_type == Measurement:
73
+ return DataAnnotation(kind="Measurement", direction="input")
74
+ else:
75
+ raise TypeError()
76
+
77
+
78
+ # TODO: I think this could work if we ever upgrade to Python 3.12+. We need the
79
+ # type checker to accept `InputData[Evaluation]` and treat it as an alias for
80
+ # `pyarrow.dataset.Dataset`.
81
+ #
82
+ # if typing.TYPE_CHECKING:
83
+ # _InputDataType = TypeVar("_InputDataType")
84
+ # type InputData[_InputDataType] = pyarrow.dataset.Dataset
85
+ # else:
86
+ #
87
+ # class InputData:
88
+ # def __init__(self):
89
+ # raise NotImplementedError()
90
+ #
91
+ # def __class_getitem__(cls, input_type) -> typing.GenericAlias:
92
+ # return Annotated[pyarrow.dataset.Dataset, Input(input_type)]
93
+
94
+
95
+ def Output(output_type, *, schema, level: Optional[MeasurementLevel] = None):
96
+ """Apply this annotation to the return type of a Method to provide
97
+ metadata about the type of output created by the Method, e.g.::
98
+
99
+ def my_method(...) -> Annotated[
100
+ Iterable[pyarrow.RecordBatch],
101
+ Output(Measurement, schema=MyPydanticType, level=MeasurementLevel.Instance)
102
+ ]: ...
103
+
104
+ :param output_type: A Dyff entity type with associated output data; one of
105
+ {Measurement, SafetyCase}
106
+ :param schema: The schema of the output. Can be a type derived from
107
+ pydantic.BaseModel or an Arrow schema. The mandatory fields `_index_`
108
+ and `_replication_` will be *added* and should not be present.
109
+ :param level: The MeasurementLevel, if the output is a Measurement.
110
+ :return: Annotation data
111
+ """
112
+ if isinstance(schema, type) and issubclass(schema, pydantic.BaseModel):
113
+ RowSchema = pydantic.create_model(
114
+ "RowSchema", __base__=(schema, ReplicatedItem)
115
+ )
116
+ data_schema = DataSchema(
117
+ arrowSchema=arrow.encode_schema(arrow.arrow_schema(RowSchema))
118
+ )
119
+ elif isinstance(schema, pyarrow.Schema):
120
+ raise NotImplementedError()
121
+ # TODO: Add _index_ and _replication_
122
+ # data_schema = DataSchema(arrowSchema=arrow.encode_schema(schema))
123
+ else:
124
+ raise TypeError()
125
+
126
+ if output_type == Measurement:
127
+ if level is None:
128
+ raise ValueError("Must specify 'level' when output_type == Measurement")
129
+ return DataAnnotation(
130
+ kind="Measurement",
131
+ direction="output",
132
+ level=level,
133
+ schema=data_schema,
134
+ )
135
+ else:
136
+ raise TypeError()
137
+
138
+
139
+ # TODO: See comments about InputData above
140
+
141
+ # _OutputDataType = TypeVar("_OutputDataType")
142
+ # _OutputDataSchema = TypeVar("_OutputDataSchema")
143
+ # _OutputDataLevel = TypeVar("_OutputDataLevel")
144
+
145
+
146
+ # class OutputData(
147
+ # Generic[_OutputDataType, _OutputDataSchema, _OutputDataLevel],
148
+ # ):
149
+ # def __init__(self):
150
+ # raise NotImplementedError()
151
+
152
+ # def __class_getitem__(cls, args) -> typing.GenericAlias:
153
+ # return Annotated[Iterable[pyarrow.RecordBatch], Output(*args)]
154
+
155
+
156
+ P = ParamSpec("P")
157
+ R = TypeVar("R")
158
+
159
+
160
+ class MethodPrototype(Generic[P, R]):
161
+ """A wrapper for Python functions that implement Methods that knows how to create an
162
+ appropriate MethodCreateRequest based on the function signature."""
163
+
164
+ def __init__(
165
+ self,
166
+ f: Callable[P, R],
167
+ *,
168
+ scope: MethodScope,
169
+ description: Optional[str] = None,
170
+ ):
171
+ self.f = f
172
+ self.scope = scope
173
+ self.description = description
174
+ # This is similar to doing @functools.wraps() but it works with
175
+ # function objects
176
+ functools.update_wrapper(self, f)
177
+
178
+ def __call__(self, *args: P.args, **kwargs: P.kwargs) -> R:
179
+ return self.f(*args, **kwargs)
180
+
181
+ def create_request(
182
+ self, *, account: str, modules: list[str]
183
+ ) -> MethodCreateRequest:
184
+ """Create a MethodCreateRequest for the wrapped function.
185
+
186
+ :param account: The .account field for the request
187
+ :param modules: The .modules field for the request. This should include at least
188
+ the module that contains the wrapped function.
189
+ """
190
+ name = self.f.__name__
191
+ hints = typing.get_type_hints(self.f, include_extras=True)
192
+
193
+ parameters: list[MethodParameter] = []
194
+ inputs: list[MethodInput] = []
195
+ output: Optional[MethodOutput] = None
196
+ for k, v in hints.items():
197
+ annotation = None
198
+ if metadata := getattr(v, "__metadata__", None):
199
+ for m in metadata:
200
+ if isinstance(m, DataAnnotation):
201
+ annotation = m
202
+ break
203
+ if k == "return":
204
+ if annotation is None:
205
+ continue
206
+ if annotation.level is None:
207
+ raise ValueError("Must specify .level for Output")
208
+ if annotation.schema is None:
209
+ raise ValueError("Must specify .schema for Output")
210
+ output = MethodOutput(
211
+ kind=MethodOutputKind(annotation.kind),
212
+ measurement=MeasurementSpec(
213
+ name=name,
214
+ description=self.description,
215
+ level=MeasurementLevel(annotation.level),
216
+ schema=annotation.schema,
217
+ ),
218
+ )
219
+ elif annotation is None:
220
+ parameters.append(MethodParameter(keyword=k))
221
+ else:
222
+ inputs.append(
223
+ MethodInput(kind=MethodInputKind(annotation.kind), keyword=k)
224
+ )
225
+
226
+ if output is None:
227
+ raise TypeError("Return type must be annotated with Output()")
228
+
229
+ return MethodCreateRequest(
230
+ account=account,
231
+ modules=modules,
232
+ name=name,
233
+ scope=self.scope,
234
+ description=self.description,
235
+ implementation=MethodImplementation(
236
+ kind=MethodImplementationKind.PythonFunction,
237
+ pythonFunction=MethodImplementationPythonFunction(
238
+ fullyQualifiedName=".".join(_fqn(self.f))
239
+ ),
240
+ ),
241
+ parameters=parameters,
242
+ inputs=inputs,
243
+ output=output,
244
+ )
245
+
246
+
247
+ def method(
248
+ *, scope: MethodScope, description: Optional[str] = None
249
+ ) -> Callable[[Callable[P, R]], MethodPrototype[P, R]]:
250
+ """Use this decorator to indicate that a Python function implements a
251
+ Dyff Method. This should be used in conjunction with appropriate type
252
+ annotations, e.g.::
253
+
254
+ @method
255
+ def my_method(
256
+ arg: str,
257
+ data: Annotated[pyarrow.dataset.Dataset, Input(Evaluation)]
258
+ ) -> Annotated[
259
+ Iterable[pyarrow.RecordBatch],
260
+ Output(Measurement, schema=MyPydanticType, level=MeasurementLevel.Instance)
261
+ ]:
262
+ ...
263
+
264
+ The wrapped function will be an instance of MethodPrototype, and you can
265
+ use its .create_request() member function to create an appropriate
266
+ MethodCreateRequest for the wrapped function.
267
+
268
+ :param scope: The .scope field for the Method
269
+ :param description: The .description field for the Method. If not specified,
270
+ the docstring of the wrapped function will be used.
271
+ :return: A decorator that returns a MethodPrototype.
272
+ """
273
+
274
+ def decorator(f: Callable[P, R]) -> MethodPrototype[P, R]:
275
+ nonlocal description
276
+ if description is None:
277
+ description = f.__doc__
278
+ return MethodPrototype(f, scope=scope, description=description)
279
+
280
+ return decorator
281
+
282
+
283
+ def method_request(
284
+ f: MethodPrototype, *, account: str, modules: list[str]
285
+ ) -> MethodCreateRequest:
286
+ return f.create_request(account=account, modules=modules)
287
+
288
+
289
+ __all__ = [
290
+ "Input",
291
+ # "InputData",
292
+ "MethodPrototype",
293
+ "Output",
294
+ # "OutputData",
295
+ "method",
296
+ "method_request",
297
+ ]
dyff/schema/v0/r1/base.py CHANGED
@@ -558,11 +558,11 @@ def list_(
558
558
  return pydantic.conlist(item_type, min_items=list_size, max_items=list_size)
559
559
 
560
560
 
561
- # mypy gets confused because 'dict' is the name of a method in DyffDefaultSerializers
561
+ # mypy gets confused because 'dict' is the name of a method in DyffBaseModel
562
562
  _ModelAsDict = dict[str, Any]
563
563
 
564
564
 
565
- class DyffDefaultSerializers(pydantic.BaseModel):
565
+ class DyffBaseModel(pydantic.BaseModel):
566
566
  """This must be the base class for *all pydantic models* in the Dyff schema.
567
567
 
568
568
  Overrides serialization functions to serialize by alias, so that "round-trip"
@@ -571,6 +571,9 @@ class DyffDefaultSerializers(pydantic.BaseModel):
571
571
  Python reserved words like 'bytes' as field names.
572
572
  """
573
573
 
574
+ class Config:
575
+ extra = pydantic.Extra.forbid
576
+
574
577
  def dict(self, *, by_alias: bool = True, **kwargs) -> _ModelAsDict:
575
578
  return super().dict(by_alias=by_alias, **kwargs)
576
579
 
@@ -586,7 +589,7 @@ class DyffDefaultSerializers(pydantic.BaseModel):
586
589
  ) -> _ModelAsDict:
587
590
  """Encode the object as a dict containing only JSON datatypes.
588
591
 
589
- .. deprecated::
592
+ .. deprecated:: 0.8.0
590
593
 
591
594
  FIXME: This emulates a Pydantic 2 feature, but the mode="json"
592
595
  option can only be implemented in an inefficient way. Remove when
@@ -603,10 +606,10 @@ class DyffDefaultSerializers(pydantic.BaseModel):
603
606
  # don't have timezones set currently for historical reasons. It's actually
604
607
  # better if all datetimes in the system are UTC, so that their JSON
605
608
  # representations (i.e., isoformat strings) are well-ordered.
606
- class DyffSchemaBaseModel(DyffDefaultSerializers):
609
+ class DyffSchemaBaseModel(DyffBaseModel):
607
610
  """This should be the base class for *almost all* non-request models in the Dyff
608
611
  schema. Models that do not inherit from this class *must* still inherit from
609
- DyffDefaultSerializers.
612
+ DyffBaseModel.
610
613
 
611
614
  Adds a root validator to ensure that all datetime fields are represented in the UTC
612
615
  timezone. This is necessary to avoid errors when comparing "naive" and "aware"
@@ -630,7 +633,7 @@ class DyffSchemaBaseModel(DyffDefaultSerializers):
630
633
  __all__ = [
631
634
  "DTYPE",
632
635
  "DType",
633
- "DyffDefaultSerializers",
636
+ "DyffBaseModel",
634
637
  "DyffSchemaBaseModel",
635
638
  "FixedWidthFloat",
636
639
  "FixedWidthInt",
@@ -140,6 +140,22 @@ def _k8s_label_value_maxlen():
140
140
  return _k8s_label_maxlen()
141
141
 
142
142
 
143
+ def _oci_image_tag_regex():
144
+ """Regex matching valid image tags according to the OCI spec.
145
+
146
+ See: https://github.com/opencontainers/distribution-spec/blob/main/spec.md#pull
147
+ """
148
+ return r"^[a-zA-Z0-9_][a-zA-Z0-9._-]{0,127}$"
149
+
150
+
151
+ def _oci_image_tag_maxlen():
152
+ """Max length of valid image tags according to the OCI spec.
153
+
154
+ See: https://github.com/opencontainers/distribution-spec/blob/main/spec.md#pull
155
+ """
156
+ return 127
157
+
158
+
143
159
  class Entities(str, enum.Enum):
144
160
  """The kinds of entities in the dyff system."""
145
161
 
@@ -149,8 +165,10 @@ class Entities(str, enum.Enum):
149
165
  AuditProcedure = "AuditProcedure"
150
166
  DataSource = "DataSource"
151
167
  Dataset = "Dataset"
168
+ Documentation = "Documentation"
152
169
  Evaluation = "Evaluation"
153
170
  Family = "Family"
171
+ History = "History"
154
172
  InferenceService = "InferenceService"
155
173
  InferenceSession = "InferenceSession"
156
174
  Measurement = "Measurement"
@@ -158,8 +176,8 @@ class Entities(str, enum.Enum):
158
176
  Model = "Model"
159
177
  Module = "Module"
160
178
  Report = "Report"
179
+ Revision = "Revision"
161
180
  SafetyCase = "SafetyCase"
162
- Tag = "Tag"
163
181
 
164
182
 
165
183
  class Resources(str, enum.Enum):
@@ -170,8 +188,10 @@ class Resources(str, enum.Enum):
170
188
  AuditProcedure = "auditprocedures"
171
189
  Dataset = "datasets"
172
190
  DataSource = "datasources"
191
+ Documentation = "documentation"
173
192
  Evaluation = "evaluations"
174
193
  Family = "families"
194
+ History = "histories"
175
195
  InferenceService = "inferenceservices"
176
196
  InferenceSession = "inferencesessions"
177
197
  Measurement = "measurements"
@@ -179,8 +199,8 @@ class Resources(str, enum.Enum):
179
199
  Model = "models"
180
200
  Module = "modules"
181
201
  Report = "reports"
202
+ Revision = "revisions"
182
203
  SafetyCase = "safetycases"
183
- Tag = "tags"
184
204
 
185
205
  Task = "tasks"
186
206
  """
@@ -217,6 +237,11 @@ LabelValue: TypeAlias = Optional[ # type: ignore
217
237
  ]
218
238
 
219
239
 
240
+ TagName: TypeAlias = pydantic.constr( # type: ignore
241
+ regex=_oci_image_tag_regex(), max_length=_k8s_label_key_maxlen()
242
+ )
243
+
244
+
220
245
  class Label(DyffSchemaBaseModel):
221
246
  """A key-value label for a resource. Used to specify identifying attributes of
222
247
  resources that are meaningful to users but do not imply semantics in the dyff
@@ -331,67 +356,6 @@ class Documented(DyffSchemaBaseModel):
331
356
  )
332
357
 
333
358
 
334
- class FamilyMemberKind(str, enum.Enum):
335
- """The kinds of entities that can be members of a Family.
336
-
337
- These are resources for which it makes sense to have different versions or variants
338
- that evolve over time.
339
- """
340
-
341
- Dataset = "Dataset"
342
- InferenceService = "InferenceService"
343
- Method = "Method"
344
- Model = "Model"
345
- Module = "Module"
346
-
347
-
348
- class FamilyMember(DyffSchemaBaseModel):
349
- family: Optional[str] = pydantic.Field(
350
- default=None,
351
- description="ID of the Family to which the resource belongs.",
352
- )
353
-
354
-
355
- class TagBase(DyffSchemaBaseModel):
356
- tag: str = pydantic.Field(
357
- description="An interpretable identifier for the tag that is unique in"
358
- " the context of the corresponding Family."
359
- )
360
-
361
- resource: str = pydantic.Field(
362
- description="ID of the resource this tag references.",
363
- )
364
-
365
- description: str = pydantic.Field(
366
- description="A short description of the tag. Interpreted as Markdown."
367
- " This should include information about how the tagged version is"
368
- " different from other versions."
369
- )
370
-
371
-
372
- class Tag(TagBase):
373
- created: datetime = pydantic.Field(description="Tag creation time.")
374
-
375
-
376
- class Family(Labeled, SchemaVersion, DyffModelWithID):
377
- kind: Literal["Family"] = "Family"
378
-
379
- resourceKind: FamilyMemberKind = pydantic.Field(
380
- description="The kind of resource that comprises the family.",
381
- )
382
-
383
- tags: list[Tag] = pydantic.Field(
384
- default_factory=list,
385
- description="Tags mapping interpretable names to resource IDs.",
386
- )
387
-
388
- documentation: DocumentationBase = pydantic.Field(
389
- default_factory=DocumentationBase,
390
- description="Documentation of the resource family. The content is used"
391
- " to populate various views in the web UI.",
392
- )
393
-
394
-
395
359
  class DyffEntity(Status, Labeled, SchemaVersion, DyffModelWithID):
396
360
  kind: Literal[
397
361
  "Analysis",
@@ -400,6 +364,8 @@ class DyffEntity(Status, Labeled, SchemaVersion, DyffModelWithID):
400
364
  "DataSource",
401
365
  "Dataset",
402
366
  "Evaluation",
367
+ "Family",
368
+ "History",
403
369
  "InferenceService",
404
370
  "InferenceSession",
405
371
  "Measurement",
@@ -407,6 +373,7 @@ class DyffEntity(Status, Labeled, SchemaVersion, DyffModelWithID):
407
373
  "Model",
408
374
  "Module",
409
375
  "Report",
376
+ "Revision",
410
377
  "SafetyCase",
411
378
  ]
412
379
 
@@ -584,6 +551,89 @@ class Account(DyffSchemaBaseModel):
584
551
  creationTime: Optional[datetime] = None
585
552
 
586
553
 
554
+ class FamilyMemberKind(str, enum.Enum):
555
+ """The kinds of entities that can be members of a Family.
556
+
557
+ These are resources for which it makes sense to have different versions or variants
558
+ that evolve over time.
559
+ """
560
+
561
+ Dataset = "Dataset"
562
+ InferenceService = "InferenceService"
563
+ Method = "Method"
564
+ Model = "Model"
565
+ Module = "Module"
566
+
567
+
568
+ class FamilyMemberBase(DyffSchemaBaseModel):
569
+ name: TagName = pydantic.Field(
570
+ description="An interpretable identifier for the member that is unique"
571
+ " in the context of the corresponding Family.",
572
+ )
573
+
574
+ resource: str = pydantic.Field(
575
+ description="ID of the resource this member references.",
576
+ )
577
+
578
+ description: Optional[str] = pydantic.Field(
579
+ default=None,
580
+ description="A short description of the member. Interpreted as Markdown."
581
+ " This should include information about how this version of the resource"
582
+ " is different from other versions.",
583
+ )
584
+
585
+
586
+ class FamilyMember(FamilyMemberBase):
587
+ family: str = pydantic.Field(
588
+ description="Identifier of the Family containing this tag."
589
+ )
590
+
591
+ creationTime: datetime = pydantic.Field(
592
+ default=None, description="Tag creation time (assigned by system)"
593
+ )
594
+
595
+
596
+ class FamilyMembers(DyffSchemaBaseModel):
597
+ members: dict[TagName, FamilyMember] = pydantic.Field(
598
+ default_factory=dict,
599
+ description="Mapping of names to IDs of member resources.",
600
+ )
601
+
602
+
603
+ class FamilyBase(DyffSchemaBaseModel):
604
+ memberKind: FamilyMemberKind = pydantic.Field(
605
+ description="The kind of resource that comprises the family.",
606
+ )
607
+
608
+
609
+ class Family(DyffEntity, FamilyBase, FamilyMembers):
610
+ kind: Literal["Family"] = "Family"
611
+
612
+ documentation: DocumentationBase = pydantic.Field(
613
+ default_factory=DocumentationBase,
614
+ description="Documentation of the resource family. The content is used"
615
+ " to populate various views in the web UI.",
616
+ )
617
+
618
+
619
+ class RevisionMetadata(DyffSchemaBaseModel):
620
+ creationTime: datetime = pydantic.Field("The time when the Revision was created")
621
+
622
+
623
+ # Note: The 'Revision' class itself is defined all the way at the end of this
624
+ # file, because OpenAPI generation doesn't work with the Python < 3.10
625
+ # "ForwardDeclaration" syntax.
626
+
627
+
628
+ class History(DyffEntity):
629
+ kind: Literal["History"] = "History"
630
+
631
+ latest: str = pydantic.Field(description="The ID of the latest Revision")
632
+ revisions: dict[str, RevisionMetadata] = pydantic.Field(
633
+ description="The set of known Revisions"
634
+ )
635
+
636
+
587
637
  # ----------------------------------------------------------------------------
588
638
 
589
639
 
@@ -1011,13 +1061,11 @@ class ContainerImageSource(DyffSchemaBaseModel):
1011
1061
  " digest, even if 'tag' is specified.",
1012
1062
  regex=r"^sha256:[0-9a-f]{64}$",
1013
1063
  )
1014
- tag: Optional[str] = pydantic.Field(
1064
+ tag: Optional[TagName] = pydantic.Field(
1015
1065
  default=None,
1016
1066
  description="The tag of the image. Although the image is always pulled"
1017
1067
  " by digest, including the tag is strongly recommended as it is often"
1018
1068
  " the main source of versioning information.",
1019
- # https://github.com/opencontainers/distribution-spec/blob/main/spec.md#pull
1020
- regex=r"^[a-zA-Z0-9_][a-zA-Z0-9._-]{0,127}$",
1021
1069
  )
1022
1070
 
1023
1071
  def url(self) -> str:
@@ -1908,12 +1956,14 @@ def entity_class(kind: Entities):
1908
1956
  return _ENTITY_CLASS[kind]
1909
1957
 
1910
1958
 
1911
- DyffEntityType = Union[
1959
+ DyffEntityTypeExceptRevision = Union[
1912
1960
  Audit,
1913
1961
  AuditProcedure,
1914
1962
  DataSource,
1915
1963
  Dataset,
1916
1964
  Evaluation,
1965
+ Family,
1966
+ History,
1917
1967
  InferenceService,
1918
1968
  InferenceSession,
1919
1969
  Measurement,
@@ -1925,6 +1975,35 @@ DyffEntityType = Union[
1925
1975
  ]
1926
1976
 
1927
1977
 
1978
+ # Note: This class is defined here because OpenAPI generation doesn't work
1979
+ # with the Python < 3.10 "ForwardDeclaration" syntax. You get an error like:
1980
+ #
1981
+ # Traceback (most recent call last):
1982
+ # File "/home/jessehostetler/dsri/code/dyff/dyff-api/./scripts/generate-openapi-definitions.py", line 15, in <module>
1983
+ # get_openapi(
1984
+ # File "/home/jessehostetler/dsri/code/dyff/venv/lib/python3.9/site-packages/fastapi/openapi/utils.py", line 422, in get_openapi
1985
+ # definitions = get_model_definitions(
1986
+ # File "/home/jessehostetler/dsri/code/dyff/venv/lib/python3.9/site-packages/fastapi/utils.py", line 60, in get_model_definitions
1987
+ # m_schema, m_definitions, m_nested_models = model_process_schema(
1988
+ # File "pydantic/schema.py", line 581, in pydantic.schema.model_process_schema
1989
+ # File "pydantic/schema.py", line 622, in pydantic.schema.model_type_schema
1990
+ # File "pydantic/schema.py", line 255, in pydantic.schema.field_schema
1991
+ # File "pydantic/schema.py", line 527, in pydantic.schema.field_type_schema
1992
+ # File "pydantic/schema.py", line 926, in pydantic.schema.field_singleton_schema
1993
+ # File "/home/jessehostetler/.asdf/installs/python/3.9.18/lib/python3.9/abc.py", line 123, in __subclasscheck__
1994
+ # return _abc_subclasscheck(cls, subclass)
1995
+ # TypeError: issubclass() arg 1 must be a class
1996
+ class Revision(DyffEntity, RevisionMetadata):
1997
+ kind: Literal["Revision"] = "Revision"
1998
+
1999
+ entity: DyffEntityTypeExceptRevision = pydantic.Field(
2000
+ description="The associated entity data",
2001
+ )
2002
+
2003
+
2004
+ DyffEntityType = Union[DyffEntityTypeExceptRevision, Revision]
2005
+
2006
+
1928
2007
  __all__ = [
1929
2008
  "Accelerator",
1930
2009
  "AcceleratorGPU",
@@ -1965,12 +2044,16 @@ __all__ = [
1965
2044
  "EvaluationBase",
1966
2045
  "ExtractorStep",
1967
2046
  "Family",
2047
+ "FamilyBase",
1968
2048
  "FamilyMember",
2049
+ "FamilyMemberBase",
1969
2050
  "FamilyMemberKind",
2051
+ "FamilyMembers",
1970
2052
  "ForeignInferenceService",
1971
2053
  "ForeignMethod",
1972
2054
  "ForeignModel",
1973
2055
  "Frameworks",
2056
+ "History",
1974
2057
  "Identity",
1975
2058
  "InferenceInterface",
1976
2059
  "InferenceService",
@@ -2025,13 +2108,14 @@ __all__ = [
2025
2108
  "Report",
2026
2109
  "ReportBase",
2027
2110
  "Resources",
2111
+ "Revision",
2112
+ "RevisionMetadata",
2028
2113
  "SafetyCase",
2029
2114
  "SafetyCaseSpec",
2030
2115
  "SchemaAdapter",
2031
2116
  "Status",
2032
2117
  "StorageSignedURL",
2033
- "Tag",
2034
- "TagBase",
2118
+ "TagName",
2035
2119
  "TaskSchema",
2036
2120
  "entity_class",
2037
2121
  "JobStatus",
@@ -19,13 +19,15 @@ from typing import Optional, Union
19
19
 
20
20
  import pydantic
21
21
 
22
- from .base import DyffDefaultSerializers
22
+ from .base import DyffBaseModel
23
23
  from .platform import (
24
24
  AnalysisBase,
25
25
  DatasetBase,
26
26
  DataView,
27
27
  DocumentationBase,
28
28
  EvaluationBase,
29
+ FamilyBase,
30
+ FamilyMembers,
29
31
  InferenceServiceBase,
30
32
  InferenceSessionBase,
31
33
  Labeled,
@@ -33,12 +35,11 @@ from .platform import (
33
35
  ModelSpec,
34
36
  ModuleBase,
35
37
  ReportBase,
36
- TagBase,
37
38
  )
38
39
  from .version import SchemaVersion
39
40
 
40
41
 
41
- class DyffRequestDefaultValidators(DyffDefaultSerializers):
42
+ class DyffRequestDefaultValidators(DyffBaseModel):
42
43
  """This must be the base class for *all* request models in the Dyff schema.
43
44
 
44
45
  Adds a root validator to ensure that all user-provided datetime fields have a
@@ -136,6 +137,10 @@ class EvaluationCreateRequest(DyffEntityCreateRequest, EvaluationBase):
136
137
  return values
137
138
 
138
139
 
140
+ class FamilyCreateRequest(DyffEntityCreateRequest, FamilyBase):
141
+ pass
142
+
143
+
139
144
  class MethodCreateRequest(DyffEntityCreateRequest, MethodBase):
140
145
  pass
141
146
 
@@ -169,7 +174,7 @@ class ReportCreateRequest(DyffEntityCreateRequest, ReportBase):
169
174
  )
170
175
 
171
176
 
172
- class TagCreateRequest(DyffRequestBase, TagBase):
177
+ class FamilyMembersEditRequest(DyffRequestBase, FamilyMembers):
173
178
  pass
174
179
 
175
180
 
@@ -226,6 +231,10 @@ class EvaluationQueryRequest(DyffEntityQueryRequest):
226
231
  modelName: Optional[str] = pydantic.Field(default=None)
227
232
 
228
233
 
234
+ class FamilyQueryRequest(DyffEntityQueryRequest):
235
+ pass
236
+
237
+
229
238
  class InferenceServiceQueryRequest(DyffEntityQueryRequest):
230
239
  name: Optional[str] = pydantic.Field(default=None)
231
240
  model: Optional[str] = pydantic.Field(default=None)
@@ -282,6 +291,9 @@ __all__ = [
282
291
  "EvaluationCreateRequest",
283
292
  "EvaluationQueryRequest",
284
293
  "EvaluationInferenceSessionRequest",
294
+ "FamilyCreateRequest",
295
+ "FamilyMembersEditRequest",
296
+ "FamilyQueryRequest",
285
297
  "InferenceServiceCreateRequest",
286
298
  "InferenceServiceQueryRequest",
287
299
  "InferenceSessionCreateRequest",
@@ -298,5 +310,4 @@ __all__ = [
298
310
  "ReportCreateRequest",
299
311
  "ReportQueryRequest",
300
312
  "SafetyCaseQueryRequest",
301
- "TagCreateRequest",
302
313
  ]
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: dyff-schema
3
- Version: 0.10.2
3
+ Version: 0.11.0
4
4
  Summary: Data models for the Dyff AI auditing platform.
5
5
  Author-email: Digital Safety Research Institute <contact@dsri.org>
6
6
  License: Apache-2.0
@@ -1,5 +1,6 @@
1
1
  dyff/schema/__init__.py,sha256=JcpxaRHNYgLjJWLjVayLlqacb2GX49Pazpwb8m-BctM,1031
2
2
  dyff/schema/adapters.py,sha256=YMTHv_2VlLGFp-Kqwa6H51hjffHmk8gXjZilHysIF5Q,123
3
+ dyff/schema/annotations.py,sha256=nE6Jk1PLqlShj8uqjE_EzZC9zYnTDW5AVtQcjysiK8M,10018
3
4
  dyff/schema/base.py,sha256=jvaNtsSZyFfsdUZTcY_U-yfLY5_GyrMxSXhON2R9XR0,119
4
5
  dyff/schema/copydoc.py,sha256=B4ZRpQmbFxi-3l9LCHvaJiVKb9VxADgC5vey804Febc,1075
5
6
  dyff/schema/errors.py,sha256=ow3yiucU4wGkeLmapUURp3eyaebwGUwDaVTXpPcrA7M,1542
@@ -21,9 +22,9 @@ dyff/schema/io/vllm.py,sha256=2q05M_-lTzq9oywKXHPPpCFCSDVCSsRQqtmERzWTtio,123
21
22
  dyff/schema/v0/__init__.py,sha256=L5y8UhRnojerPYHumsxQJRcHCNz8Hj9NM8b47mewMNs,92
22
23
  dyff/schema/v0/r1/__init__.py,sha256=L5y8UhRnojerPYHumsxQJRcHCNz8Hj9NM8b47mewMNs,92
23
24
  dyff/schema/v0/r1/adapters.py,sha256=2t2oxsnGfSEDKKDIEYw4qqLXMH7qlFIwPVuLyUmbsHs,23552
24
- dyff/schema/v0/r1/base.py,sha256=QX1TfqX3jBafxpBnf2bUTcgP0sMyqZFFNJZQHhM48BI,19385
25
- dyff/schema/v0/r1/platform.py,sha256=XNrL-H1wuY9CaBpfbCVzDK7hqclS87OouyAnS9FlM4g,64134
26
- dyff/schema/v0/r1/requests.py,sha256=2Xu1iA2m1bCNzITPcwrnNagqAOtB8Fahav9zzV8oGJ0,10245
25
+ dyff/schema/v0/r1/base.py,sha256=i7eOKXDGS8_J9k2aVObUTpSOnA8CAgRW7Quj1fSbyRg,19403
26
+ dyff/schema/v0/r1/platform.py,sha256=QYsbEzo3o8dFqf85xw9f91EZ_6oU1J6N91L_nLAsjro,67231
27
+ dyff/schema/v0/r1/requests.py,sha256=3veChMw6QseXNvNBgqUGFq5fIK7S1JPOUMnVwstL5bI,10460
27
28
  dyff/schema/v0/r1/test.py,sha256=X6dUyVd5svcPCI-PBMOAqEfK9jv3bRDvkQTJzwS96c0,10720
28
29
  dyff/schema/v0/r1/version.py,sha256=isKAGuGxsdru8vDaYmI4YiZdJOu_wNxXK7u6QzD6FE4,392
29
30
  dyff/schema/v0/r1/dataset/__init__.py,sha256=LbVlkO2asyGYBKk2z49xjJYTM-pu9y9e4eQDXgTDLnM,2553
@@ -34,9 +35,9 @@ dyff/schema/v0/r1/dataset/text.py,sha256=nLIn91Zlt0tNdXUklSgjJ-kEDxoPX32ISLkiv2D
34
35
  dyff/schema/v0/r1/dataset/vision.py,sha256=aIe0fbfM_g3DsrDTdg2K803YKLjZBpurM_VJcJFuZLc,369
35
36
  dyff/schema/v0/r1/io/__init__.py,sha256=L5y8UhRnojerPYHumsxQJRcHCNz8Hj9NM8b47mewMNs,92
36
37
  dyff/schema/v0/r1/io/vllm.py,sha256=CUE9y8KthtUI7sD49S875rDmPvKotSXVIRaBS79aBZs,5320
37
- dyff_schema-0.10.2.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
38
- dyff_schema-0.10.2.dist-info/METADATA,sha256=EI2KtRIPtITmbu0w4pd8OLmEGJY1wmvaKKcKPODezwQ,3484
39
- dyff_schema-0.10.2.dist-info/NOTICE,sha256=YONACu0s_Ui6jNi-wtEsVQbTU1JIkh8wvLH6d1-Ni_w,43
40
- dyff_schema-0.10.2.dist-info/WHEEL,sha256=cpQTJ5IWu9CdaPViMhC9YzF8gZuS5-vlfoFihTBC86A,91
41
- dyff_schema-0.10.2.dist-info/top_level.txt,sha256=9e3VVdeX73t_sUJOPQPCcGtYO1JhoErhHIi3WoWGcFI,5
42
- dyff_schema-0.10.2.dist-info/RECORD,,
38
+ dyff_schema-0.11.0.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
39
+ dyff_schema-0.11.0.dist-info/METADATA,sha256=fGlr2PCwu0n_MefBk8e8L3EeVNIPzwEhW5O4qF-giZo,3484
40
+ dyff_schema-0.11.0.dist-info/NOTICE,sha256=YONACu0s_Ui6jNi-wtEsVQbTU1JIkh8wvLH6d1-Ni_w,43
41
+ dyff_schema-0.11.0.dist-info/WHEEL,sha256=Wyh-_nZ0DJYolHNn1_hMa4lM7uDedD_RGVwbmTjyItk,91
42
+ dyff_schema-0.11.0.dist-info/top_level.txt,sha256=9e3VVdeX73t_sUJOPQPCcGtYO1JhoErhHIi3WoWGcFI,5
43
+ dyff_schema-0.11.0.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (70.1.0)
2
+ Generator: setuptools (71.1.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5