dyff-schema 0.27.3__tar.gz → 0.27.4__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dyff-schema might be problematic. Click here for more details.

Files changed (63) hide show
  1. {dyff_schema-0.27.3/dyff_schema.egg-info → dyff_schema-0.27.4}/PKG-INFO +3 -2
  2. dyff_schema-0.27.4/dyff/schema/__init__.py +66 -0
  3. {dyff_schema-0.27.3 → dyff_schema-0.27.4}/dyff/schema/v0/r1/commands.py +0 -1
  4. {dyff_schema-0.27.3 → dyff_schema-0.27.4}/dyff/schema/v0/r1/platform.py +1 -3
  5. {dyff_schema-0.27.3 → dyff_schema-0.27.4}/dyff/schema/v0/r1/requests.py +24 -0
  6. {dyff_schema-0.27.3 → dyff_schema-0.27.4/dyff_schema.egg-info}/PKG-INFO +3 -2
  7. dyff_schema-0.27.3/dyff/schema/__init__.py +0 -39
  8. {dyff_schema-0.27.3 → dyff_schema-0.27.4}/.gitignore +0 -0
  9. {dyff_schema-0.27.3 → dyff_schema-0.27.4}/.gitlab-ci.yml +0 -0
  10. {dyff_schema-0.27.3 → dyff_schema-0.27.4}/.licenserc.yaml +0 -0
  11. {dyff_schema-0.27.3 → dyff_schema-0.27.4}/.pre-commit-config.yaml +0 -0
  12. {dyff_schema-0.27.3 → dyff_schema-0.27.4}/.prettierignore +0 -0
  13. {dyff_schema-0.27.3 → dyff_schema-0.27.4}/.secrets.baseline +0 -0
  14. {dyff_schema-0.27.3 → dyff_schema-0.27.4}/CODE_OF_CONDUCT.md +0 -0
  15. {dyff_schema-0.27.3 → dyff_schema-0.27.4}/LICENSE +0 -0
  16. {dyff_schema-0.27.3 → dyff_schema-0.27.4}/NOTICE +0 -0
  17. {dyff_schema-0.27.3 → dyff_schema-0.27.4}/README.md +0 -0
  18. {dyff_schema-0.27.3 → dyff_schema-0.27.4}/dyff/schema/adapters.py +0 -0
  19. {dyff_schema-0.27.3 → dyff_schema-0.27.4}/dyff/schema/annotations.py +0 -0
  20. {dyff_schema-0.27.3 → dyff_schema-0.27.4}/dyff/schema/base.py +0 -0
  21. {dyff_schema-0.27.3 → dyff_schema-0.27.4}/dyff/schema/commands.py +0 -0
  22. {dyff_schema-0.27.3 → dyff_schema-0.27.4}/dyff/schema/copydoc.py +0 -0
  23. {dyff_schema-0.27.3 → dyff_schema-0.27.4}/dyff/schema/dataset/__init__.py +0 -0
  24. {dyff_schema-0.27.3 → dyff_schema-0.27.4}/dyff/schema/dataset/arrow.py +0 -0
  25. {dyff_schema-0.27.3 → dyff_schema-0.27.4}/dyff/schema/dataset/binary.py +0 -0
  26. {dyff_schema-0.27.3 → dyff_schema-0.27.4}/dyff/schema/dataset/classification.py +0 -0
  27. {dyff_schema-0.27.3 → dyff_schema-0.27.4}/dyff/schema/dataset/embedding.py +0 -0
  28. {dyff_schema-0.27.3 → dyff_schema-0.27.4}/dyff/schema/dataset/text.py +0 -0
  29. {dyff_schema-0.27.3 → dyff_schema-0.27.4}/dyff/schema/dataset/vision.py +0 -0
  30. {dyff_schema-0.27.3 → dyff_schema-0.27.4}/dyff/schema/errors.py +0 -0
  31. {dyff_schema-0.27.3 → dyff_schema-0.27.4}/dyff/schema/ids.py +0 -0
  32. {dyff_schema-0.27.3 → dyff_schema-0.27.4}/dyff/schema/io/__init__.py +0 -0
  33. {dyff_schema-0.27.3 → dyff_schema-0.27.4}/dyff/schema/io/vllm.py +0 -0
  34. {dyff_schema-0.27.3 → dyff_schema-0.27.4}/dyff/schema/platform.py +0 -0
  35. {dyff_schema-0.27.3 → dyff_schema-0.27.4}/dyff/schema/py.typed +0 -0
  36. {dyff_schema-0.27.3 → dyff_schema-0.27.4}/dyff/schema/quantity.py +0 -0
  37. {dyff_schema-0.27.3 → dyff_schema-0.27.4}/dyff/schema/requests.py +0 -0
  38. {dyff_schema-0.27.3 → dyff_schema-0.27.4}/dyff/schema/test.py +0 -0
  39. {dyff_schema-0.27.3 → dyff_schema-0.27.4}/dyff/schema/v0/__init__.py +0 -0
  40. {dyff_schema-0.27.3 → dyff_schema-0.27.4}/dyff/schema/v0/r1/__init__.py +0 -0
  41. {dyff_schema-0.27.3 → dyff_schema-0.27.4}/dyff/schema/v0/r1/adapters.py +0 -0
  42. {dyff_schema-0.27.3 → dyff_schema-0.27.4}/dyff/schema/v0/r1/base.py +0 -0
  43. {dyff_schema-0.27.3 → dyff_schema-0.27.4}/dyff/schema/v0/r1/dataset/__init__.py +0 -0
  44. {dyff_schema-0.27.3 → dyff_schema-0.27.4}/dyff/schema/v0/r1/dataset/arrow.py +0 -0
  45. {dyff_schema-0.27.3 → dyff_schema-0.27.4}/dyff/schema/v0/r1/dataset/binary.py +0 -0
  46. {dyff_schema-0.27.3 → dyff_schema-0.27.4}/dyff/schema/v0/r1/dataset/classification.py +0 -0
  47. {dyff_schema-0.27.3 → dyff_schema-0.27.4}/dyff/schema/v0/r1/dataset/embedding.py +0 -0
  48. {dyff_schema-0.27.3 → dyff_schema-0.27.4}/dyff/schema/v0/r1/dataset/text.py +0 -0
  49. {dyff_schema-0.27.3 → dyff_schema-0.27.4}/dyff/schema/v0/r1/dataset/vision.py +0 -0
  50. {dyff_schema-0.27.3 → dyff_schema-0.27.4}/dyff/schema/v0/r1/io/__init__.py +0 -0
  51. {dyff_schema-0.27.3 → dyff_schema-0.27.4}/dyff/schema/v0/r1/io/vllm.py +0 -0
  52. {dyff_schema-0.27.3 → dyff_schema-0.27.4}/dyff/schema/v0/r1/test.py +0 -0
  53. {dyff_schema-0.27.3 → dyff_schema-0.27.4}/dyff/schema/v0/r1/version.py +0 -0
  54. {dyff_schema-0.27.3 → dyff_schema-0.27.4}/dyff/schema/version.py +0 -0
  55. {dyff_schema-0.27.3 → dyff_schema-0.27.4}/dyff_schema.egg-info/SOURCES.txt +0 -0
  56. {dyff_schema-0.27.3 → dyff_schema-0.27.4}/dyff_schema.egg-info/dependency_links.txt +0 -0
  57. {dyff_schema-0.27.3 → dyff_schema-0.27.4}/dyff_schema.egg-info/requires.txt +0 -0
  58. {dyff_schema-0.27.3 → dyff_schema-0.27.4}/dyff_schema.egg-info/top_level.txt +0 -0
  59. {dyff_schema-0.27.3 → dyff_schema-0.27.4}/makefile +0 -0
  60. {dyff_schema-0.27.3 → dyff_schema-0.27.4}/pyproject.toml +0 -0
  61. {dyff_schema-0.27.3 → dyff_schema-0.27.4}/setup.cfg +0 -0
  62. {dyff_schema-0.27.3 → dyff_schema-0.27.4}/tests/test_adapters.py +0 -0
  63. {dyff_schema-0.27.3 → dyff_schema-0.27.4}/tests/test_import.py +0 -0
@@ -1,6 +1,6 @@
1
- Metadata-Version: 2.2
1
+ Metadata-Version: 2.4
2
2
  Name: dyff-schema
3
- Version: 0.27.3
3
+ Version: 0.27.4
4
4
  Summary: Data models for the Dyff AI auditing platform.
5
5
  Author-email: Digital Safety Research Institute <contact@dsri.org>
6
6
  License: Apache-2.0
@@ -27,6 +27,7 @@ Requires-Dist: jsonpath-ng
27
27
  Requires-Dist: numpy<2
28
28
  Requires-Dist: pyarrow
29
29
  Requires-Dist: pydantic<2
30
+ Dynamic: license-file
30
31
 
31
32
  # dyff-schema
32
33
 
@@ -0,0 +1,66 @@
1
+ # SPDX-FileCopyrightText: 2024 UL Research Institutes
2
+ # SPDX-License-Identifier: Apache-2.0
3
+
4
+ import importlib
5
+ from typing import Any, Iterable, Type, TypeVar, Union
6
+
7
+ import pydantic
8
+
9
+ from .base import DyffBaseModel, DyffSchemaBaseModel
10
+ from .version import SomeSchemaVersion
11
+
12
+
13
+ def _symbol(fully_qualified_name):
14
+ tokens = fully_qualified_name.split(".")
15
+ module_name = ".".join(tokens[:-1])
16
+ member = tokens[-1]
17
+ module = importlib.import_module(module_name)
18
+ return getattr(module, member)
19
+
20
+
21
+ def product_schema(
22
+ schemas: Iterable[Type[DyffSchemaBaseModel]],
23
+ ) -> Type[DyffSchemaBaseModel]:
24
+ return pydantic.create_model("Product", __base__=tuple(schemas))
25
+
26
+
27
+ # TODO: Should have a way of registering schema names rather than allowing
28
+ # arbitrary imports.
29
+ def named_data_schema(
30
+ name: str, schema_version: SomeSchemaVersion
31
+ ) -> Type[DyffSchemaBaseModel]:
32
+ version, revision = schema_version.split(".")
33
+ return _symbol(f"dyff.schema.v{version}.r{revision}.dataset.{name}")
34
+
35
+
36
+ _UpcastTargetT = TypeVar("_UpcastTargetT", bound=DyffBaseModel)
37
+ _UpcastSourceT = TypeVar("_UpcastSourceT", bound=DyffBaseModel)
38
+
39
+
40
+ def upcast(
41
+ t: type[_UpcastTargetT], obj: Union[_UpcastSourceT, dict[str, Any]]
42
+ ) -> _UpcastTargetT:
43
+ """Perform a "structural upcast" on a Pydantic model instance.
44
+
45
+ An upcast is possible when the top-level fields of the target type are a subset of
46
+ the top-level fields of the source type, and the data in each source field validates
47
+ against the corresponding target field. In particular, an upcast is possible when
48
+ the source type is a Python subclass of the target type.
49
+
50
+ The upcast is "shallow" in the sense that sub-objects must validate as-is. In
51
+ particular, most Dyff schema types do not allow additional properties, so validation
52
+ will fail if a sub-object of the source object has fields that are not present in
53
+ the corresponding sub-object of the target type.
54
+ """
55
+ if not isinstance(obj, dict):
56
+ # Preserve the unset status
57
+ obj = obj.dict(exclude_unset=True)
58
+ fields = {k: v for k, v in obj.items() if k in t.__fields__}
59
+ return t.parse_obj(fields)
60
+
61
+
62
+ __all__ = [
63
+ "named_data_schema",
64
+ "product_schema",
65
+ "upcast",
66
+ ]
@@ -15,7 +15,6 @@ from .base import DyffSchemaBaseModel, JsonMergePatchSemantics, Null
15
15
  from .platform import (
16
16
  DyffEntityType,
17
17
  EntityIdentifier,
18
- EntityKindLiteral,
19
18
  FamilyMember,
20
19
  FamilyMembers,
21
20
  LabelKeyType,
@@ -301,9 +301,7 @@ class EntityIdentifier(DyffSchemaBaseModel):
301
301
 
302
302
  id: str = pydantic.Field(description="The .id of the entity.")
303
303
  kind: EntityKindLiteral = pydantic.Field(
304
- description="The .kind of the entity. This is optional because"
305
- " sometimes you need to send a command without knowing the kind,"
306
- " but it should be set whenever possible.",
304
+ description="The .kind of the entity.",
307
305
  )
308
306
 
309
307
 
@@ -20,6 +20,7 @@ from typing import Any, Literal, Optional, Union
20
20
 
21
21
  import pydantic
22
22
 
23
+ from ... import upcast
23
24
  from . import commands
24
25
  from .base import DyffBaseModel, JsonMergePatchSemantics, Null
25
26
  from .platform import (
@@ -29,6 +30,7 @@ from .platform import (
29
30
  DatasetBase,
30
31
  DataView,
31
32
  DocumentationBase,
33
+ Evaluation,
32
34
  EvaluationBase,
33
35
  FamilyBase,
34
36
  FamilyMemberBase,
@@ -197,6 +199,27 @@ class EvaluationCreateRequest(DyffEntityCreateRequest, EvaluationBase):
197
199
  )
198
200
  return values
199
201
 
202
+ @staticmethod
203
+ def repeat_of(evaluation: Evaluation) -> EvaluationCreateRequest:
204
+ """Return a request that will run an existing Evaluation again with the same
205
+ configuration."""
206
+ base = upcast(EvaluationBase, evaluation)
207
+ if evaluation.inferenceSessionReference:
208
+ return EvaluationCreateRequest(
209
+ account=evaluation.account,
210
+ inferenceSessionReference=evaluation.inferenceSessionReference,
211
+ **base.dict(),
212
+ )
213
+ else:
214
+ return EvaluationCreateRequest(
215
+ account=evaluation.account,
216
+ inferenceSession=EvaluationInferenceSessionRequest(
217
+ inferenceService=evaluation.inferenceSession.inferenceService.id,
218
+ **upcast(InferenceSessionBase, evaluation.inferenceSession).dict(),
219
+ ),
220
+ **base.dict(),
221
+ )
222
+
200
223
 
201
224
  class FamilyCreateRequest(DyffEntityCreateRequest, FamilyBase):
202
225
  pass
@@ -403,6 +426,7 @@ class ScoreQueryRequest(DyffRequestDefaultValidators):
403
426
 
404
427
  id: Optional[str] = pydantic.Field(default=None)
405
428
  name: Optional[str] = pydantic.Field(default=None)
429
+ analysis: Optional[str] = pydantic.Field(default=None)
406
430
  method: Optional[str] = pydantic.Field(default=None)
407
431
  methodName: Optional[str] = pydantic.Field(default=None)
408
432
  dataset: Optional[str] = pydantic.Field(default=None)
@@ -1,6 +1,6 @@
1
- Metadata-Version: 2.2
1
+ Metadata-Version: 2.4
2
2
  Name: dyff-schema
3
- Version: 0.27.3
3
+ Version: 0.27.4
4
4
  Summary: Data models for the Dyff AI auditing platform.
5
5
  Author-email: Digital Safety Research Institute <contact@dsri.org>
6
6
  License: Apache-2.0
@@ -27,6 +27,7 @@ Requires-Dist: jsonpath-ng
27
27
  Requires-Dist: numpy<2
28
28
  Requires-Dist: pyarrow
29
29
  Requires-Dist: pydantic<2
30
+ Dynamic: license-file
30
31
 
31
32
  # dyff-schema
32
33
 
@@ -1,39 +0,0 @@
1
- # SPDX-FileCopyrightText: 2024 UL Research Institutes
2
- # SPDX-License-Identifier: Apache-2.0
3
-
4
- import importlib
5
- from typing import Iterable, Type
6
-
7
- import pydantic
8
-
9
- from .base import DyffSchemaBaseModel
10
- from .version import SomeSchemaVersion
11
-
12
-
13
- def _symbol(fully_qualified_name):
14
- tokens = fully_qualified_name.split(".")
15
- module_name = ".".join(tokens[:-1])
16
- member = tokens[-1]
17
- module = importlib.import_module(module_name)
18
- return getattr(module, member)
19
-
20
-
21
- def product_schema(
22
- schemas: Iterable[Type[DyffSchemaBaseModel]],
23
- ) -> Type[DyffSchemaBaseModel]:
24
- return pydantic.create_model("Product", __base__=tuple(schemas))
25
-
26
-
27
- # TODO: Should have a way of registering schema names rather than allowing
28
- # arbitrary imports.
29
- def named_data_schema(
30
- name: str, schema_version: SomeSchemaVersion
31
- ) -> Type[DyffSchemaBaseModel]:
32
- version, revision = schema_version.split(".")
33
- return _symbol(f"dyff.schema.v{version}.r{revision}.dataset.{name}")
34
-
35
-
36
- __all__ = [
37
- "named_data_schema",
38
- "product_schema",
39
- ]
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes