dyff-schema 0.36.9__tar.gz → 0.37.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dyff-schema might be problematic. Click here for more details.

Files changed (68) hide show
  1. {dyff_schema-0.36.9/dyff_schema.egg-info → dyff_schema-0.37.0}/PKG-INFO +1 -1
  2. dyff_schema-0.37.0/dyff/schema/_version.py +2 -0
  3. {dyff_schema-0.36.9 → dyff_schema-0.37.0}/dyff/schema/v0/r1/commands.py +12 -10
  4. {dyff_schema-0.36.9 → dyff_schema-0.37.0}/dyff/schema/v0/r1/platform.py +124 -1
  5. {dyff_schema-0.36.9 → dyff_schema-0.37.0}/dyff/schema/v0/r1/requests.py +16 -0
  6. {dyff_schema-0.36.9 → dyff_schema-0.37.0/dyff_schema.egg-info}/PKG-INFO +1 -1
  7. dyff_schema-0.36.9/dyff/schema/_version.py +0 -2
  8. {dyff_schema-0.36.9 → dyff_schema-0.37.0}/.gitignore +0 -0
  9. {dyff_schema-0.36.9 → dyff_schema-0.37.0}/.gitlab-ci.yml +0 -0
  10. {dyff_schema-0.36.9 → dyff_schema-0.37.0}/.idea/dyff-schema.iml +0 -0
  11. {dyff_schema-0.36.9 → dyff_schema-0.37.0}/.licenserc.yaml +0 -0
  12. {dyff_schema-0.36.9 → dyff_schema-0.37.0}/.pre-commit-config.yaml +0 -0
  13. {dyff_schema-0.36.9 → dyff_schema-0.37.0}/.prettierignore +0 -0
  14. {dyff_schema-0.36.9 → dyff_schema-0.37.0}/.secrets.baseline +0 -0
  15. {dyff_schema-0.36.9 → dyff_schema-0.37.0}/CODE_OF_CONDUCT.md +0 -0
  16. {dyff_schema-0.36.9 → dyff_schema-0.37.0}/LICENSE +0 -0
  17. {dyff_schema-0.36.9 → dyff_schema-0.37.0}/NOTICE +0 -0
  18. {dyff_schema-0.36.9 → dyff_schema-0.37.0}/README.md +0 -0
  19. {dyff_schema-0.36.9 → dyff_schema-0.37.0}/dyff/schema/__init__.py +0 -0
  20. {dyff_schema-0.36.9 → dyff_schema-0.37.0}/dyff/schema/adapters.py +0 -0
  21. {dyff_schema-0.36.9 → dyff_schema-0.37.0}/dyff/schema/annotations.py +0 -0
  22. {dyff_schema-0.36.9 → dyff_schema-0.37.0}/dyff/schema/base.py +0 -0
  23. {dyff_schema-0.36.9 → dyff_schema-0.37.0}/dyff/schema/commands.py +0 -0
  24. {dyff_schema-0.36.9 → dyff_schema-0.37.0}/dyff/schema/copydoc.py +0 -0
  25. {dyff_schema-0.36.9 → dyff_schema-0.37.0}/dyff/schema/dataset/__init__.py +0 -0
  26. {dyff_schema-0.36.9 → dyff_schema-0.37.0}/dyff/schema/dataset/arrow.py +0 -0
  27. {dyff_schema-0.36.9 → dyff_schema-0.37.0}/dyff/schema/dataset/binary.py +0 -0
  28. {dyff_schema-0.36.9 → dyff_schema-0.37.0}/dyff/schema/dataset/classification.py +0 -0
  29. {dyff_schema-0.36.9 → dyff_schema-0.37.0}/dyff/schema/dataset/embedding.py +0 -0
  30. {dyff_schema-0.36.9 → dyff_schema-0.37.0}/dyff/schema/dataset/text.py +0 -0
  31. {dyff_schema-0.36.9 → dyff_schema-0.37.0}/dyff/schema/dataset/vision.py +0 -0
  32. {dyff_schema-0.36.9 → dyff_schema-0.37.0}/dyff/schema/errors.py +0 -0
  33. {dyff_schema-0.36.9 → dyff_schema-0.37.0}/dyff/schema/ids.py +0 -0
  34. {dyff_schema-0.36.9 → dyff_schema-0.37.0}/dyff/schema/io/__init__.py +0 -0
  35. {dyff_schema-0.36.9 → dyff_schema-0.37.0}/dyff/schema/io/vllm.py +0 -0
  36. {dyff_schema-0.36.9 → dyff_schema-0.37.0}/dyff/schema/platform.py +0 -0
  37. {dyff_schema-0.36.9 → dyff_schema-0.37.0}/dyff/schema/py.typed +0 -0
  38. {dyff_schema-0.36.9 → dyff_schema-0.37.0}/dyff/schema/quantity.py +0 -0
  39. {dyff_schema-0.36.9 → dyff_schema-0.37.0}/dyff/schema/requests.py +0 -0
  40. {dyff_schema-0.36.9 → dyff_schema-0.37.0}/dyff/schema/responses.py +0 -0
  41. {dyff_schema-0.36.9 → dyff_schema-0.37.0}/dyff/schema/test.py +0 -0
  42. {dyff_schema-0.36.9 → dyff_schema-0.37.0}/dyff/schema/v0/__init__.py +0 -0
  43. {dyff_schema-0.36.9 → dyff_schema-0.37.0}/dyff/schema/v0/r1/__init__.py +0 -0
  44. {dyff_schema-0.36.9 → dyff_schema-0.37.0}/dyff/schema/v0/r1/adapters.py +0 -0
  45. {dyff_schema-0.36.9 → dyff_schema-0.37.0}/dyff/schema/v0/r1/base.py +0 -0
  46. {dyff_schema-0.36.9 → dyff_schema-0.37.0}/dyff/schema/v0/r1/dataset/__init__.py +0 -0
  47. {dyff_schema-0.36.9 → dyff_schema-0.37.0}/dyff/schema/v0/r1/dataset/arrow.py +0 -0
  48. {dyff_schema-0.36.9 → dyff_schema-0.37.0}/dyff/schema/v0/r1/dataset/binary.py +0 -0
  49. {dyff_schema-0.36.9 → dyff_schema-0.37.0}/dyff/schema/v0/r1/dataset/classification.py +0 -0
  50. {dyff_schema-0.36.9 → dyff_schema-0.37.0}/dyff/schema/v0/r1/dataset/embedding.py +0 -0
  51. {dyff_schema-0.36.9 → dyff_schema-0.37.0}/dyff/schema/v0/r1/dataset/text.py +0 -0
  52. {dyff_schema-0.36.9 → dyff_schema-0.37.0}/dyff/schema/v0/r1/dataset/vision.py +0 -0
  53. {dyff_schema-0.36.9 → dyff_schema-0.37.0}/dyff/schema/v0/r1/io/__init__.py +0 -0
  54. {dyff_schema-0.36.9 → dyff_schema-0.37.0}/dyff/schema/v0/r1/io/vllm.py +0 -0
  55. {dyff_schema-0.36.9 → dyff_schema-0.37.0}/dyff/schema/v0/r1/oci.py +0 -0
  56. {dyff_schema-0.36.9 → dyff_schema-0.37.0}/dyff/schema/v0/r1/responses.py +0 -0
  57. {dyff_schema-0.36.9 → dyff_schema-0.37.0}/dyff/schema/v0/r1/test.py +0 -0
  58. {dyff_schema-0.36.9 → dyff_schema-0.37.0}/dyff/schema/v0/r1/version.py +0 -0
  59. {dyff_schema-0.36.9 → dyff_schema-0.37.0}/dyff/schema/version.py +0 -0
  60. {dyff_schema-0.36.9 → dyff_schema-0.37.0}/dyff_schema.egg-info/SOURCES.txt +0 -0
  61. {dyff_schema-0.36.9 → dyff_schema-0.37.0}/dyff_schema.egg-info/dependency_links.txt +0 -0
  62. {dyff_schema-0.36.9 → dyff_schema-0.37.0}/dyff_schema.egg-info/requires.txt +0 -0
  63. {dyff_schema-0.36.9 → dyff_schema-0.37.0}/dyff_schema.egg-info/top_level.txt +0 -0
  64. {dyff_schema-0.36.9 → dyff_schema-0.37.0}/makefile +0 -0
  65. {dyff_schema-0.36.9 → dyff_schema-0.37.0}/pyproject.toml +0 -0
  66. {dyff_schema-0.36.9 → dyff_schema-0.37.0}/setup.cfg +0 -0
  67. {dyff_schema-0.36.9 → dyff_schema-0.37.0}/tests/test_adapters.py +0 -0
  68. {dyff_schema-0.36.9 → dyff_schema-0.37.0}/tests/test_import.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: dyff-schema
3
- Version: 0.36.9
3
+ Version: 0.37.0
4
4
  Summary: Data models for the Dyff AI auditing platform.
5
5
  Author-email: Digital Safety Research Institute <contact@dsri.org>
6
6
  License: Apache-2.0
@@ -0,0 +1,2 @@
1
+ __version__ = version = "0.37.0"
2
+ __version_tuple__ = version_tuple = (0, 37, 0)
@@ -218,9 +218,6 @@ class ChallengeTaskRulesExecutionEnvironmentPatch(JsonMergePatchSemantics):
218
218
  choices: dict[str, Optional[ChallengeTaskExecutionEnvironment]] = pydantic.Field(
219
219
  default_factory=dict, description="Execution environment choices."
220
220
  )
221
- default: Optional[str] = pydantic.Field(
222
- default=None, description="The default execution environment."
223
- )
224
221
 
225
222
 
226
223
  class ChallengeTaskRulesSchedulePatch(JsonMergePatchSemantics):
@@ -257,11 +254,7 @@ class ChallengeTaskRulesSchedulePatch(JsonMergePatchSemantics):
257
254
 
258
255
 
259
256
  class ChallengeTaskRulesPatch(DyffSchemaBaseModel):
260
- """Same properties as ChallengeTaskRules, but assigning None to a field is
261
- interpreted as a command to delete that field.
262
-
263
- Fields that are not assigned explicitly remain unchanged.
264
- """
257
+ """Edits to make to the different rules categories."""
265
258
 
266
259
  executionEnvironment: ChallengeTaskRulesExecutionEnvironmentPatch = pydantic.Field(
267
260
  default_factory=ChallengeTaskRulesExecutionEnvironmentPatch,
@@ -289,10 +282,18 @@ class ChallengeTaskRulesPatch(DyffSchemaBaseModel):
289
282
  return schedule.model_dump(mode=_info.mode)
290
283
 
291
284
 
285
+ class EditChallengeTaskRulesPatch(DyffSchemaBaseModel):
286
+ """A Json Merge Patch for the rules of one task."""
287
+
288
+ rules: ChallengeTaskRulesPatch = pydantic.Field(
289
+ description="Edits to make to the task rules."
290
+ )
291
+
292
+
292
293
  class EditChallengeTaskRulesAttributes(DyffSchemaBaseModel):
293
294
  """Attributes for the EditChallengeTaskRules command."""
294
295
 
295
- rules: ChallengeTaskRulesPatch = pydantic.Field(
296
+ tasks: dict[str, EditChallengeTaskRulesPatch] = pydantic.Field(
296
297
  description="Edits to make to the task rules."
297
298
  )
298
299
 
@@ -612,9 +613,10 @@ __all__ = [
612
613
  "EditChallengeContentAttributes",
613
614
  "EditChallengeContentData",
614
615
  "EditChallengeContentPatch",
616
+ "EditChallengeTaskRules",
615
617
  "EditChallengeTaskRulesAttributes",
616
618
  "EditChallengeTaskRulesData",
617
- "EditChallengeTaskRules",
619
+ "EditChallengeTaskRulesPatch",
618
620
  "EditEntityDocumentation",
619
621
  "EditEntityDocumentationAttributes",
620
622
  "EditEntityDocumentationData",
@@ -19,13 +19,24 @@ We use the following naming convention:
19
19
 
20
20
  # fmt: on
21
21
  # mypy: disable-error-code="import-untyped"
22
+ from __future__ import annotations
23
+
22
24
  import abc
23
25
  import enum
24
26
  import urllib.parse
25
27
  from datetime import datetime, timedelta, timezone
26
28
  from enum import Enum
27
29
  from pathlib import Path
28
- from typing import Any, Literal, NamedTuple, Optional, Type, TypeVar, Union
30
+ from typing import (
31
+ TYPE_CHECKING,
32
+ Any,
33
+ Literal,
34
+ NamedTuple,
35
+ Optional,
36
+ Type,
37
+ TypeVar,
38
+ Union,
39
+ )
29
40
 
30
41
  import i18naddress
31
42
  import pyarrow
@@ -40,6 +51,12 @@ from .base import DyffSchemaBaseModel
40
51
  from .dataset import arrow, make_item_type, make_response_type
41
52
  from .version import SCHEMA_VERSION, SchemaVersion
42
53
 
54
+ if TYPE_CHECKING:
55
+ from .requests import (
56
+ AnalysisCreateRequest,
57
+ EvaluationCreateRequest,
58
+ )
59
+
43
60
  SYSTEM_ATTRIBUTES = frozenset(["creationTime", "status", "reason"])
44
61
 
45
62
 
@@ -219,6 +236,8 @@ class Entities(str, enum.Enum):
219
236
  Method = "Method"
220
237
  Model = "Model"
221
238
  Module = "Module"
239
+ Pipeline = "Pipeline"
240
+ PipelineRun = "PipelineRun"
222
241
  Report = "Report"
223
242
  Revision = "Revision"
224
243
  SafetyCase = "SafetyCase"
@@ -258,6 +277,8 @@ class Resources(str, enum.Enum):
258
277
  Method = "methods"
259
278
  Model = "models"
260
279
  Module = "modules"
280
+ Pipeline = "pipelines"
281
+ PipelineRun = "pipelineruns"
261
282
  Report = "reports"
262
283
  Revision = "revisions"
263
284
  SafetyCase = "safetycases"
@@ -309,6 +330,8 @@ EntityKindLiteral = Literal[
309
330
  "Module",
310
331
  # FIXME: (schema v1) Rename to Artifact
311
332
  "OCIArtifact",
333
+ "Pipeline",
334
+ "PipelineRun",
312
335
  "Report",
313
336
  "Revision",
314
337
  "SafetyCase",
@@ -539,6 +562,8 @@ class DyffEntity(Status, Labeled, SchemaVersion, DyffModelWithID):
539
562
  "Module",
540
563
  # FIXME: (schema v1) Rename to Artifact
541
564
  "OCIArtifact",
565
+ "Pipeline",
566
+ "PipelineRun",
542
567
  "Report",
543
568
  "Revision",
544
569
  "SafetyCase",
@@ -2706,6 +2731,98 @@ class ChallengeSubmission(DyffEntity):
2706
2731
  return None
2707
2732
 
2708
2733
 
2734
+ # ---------------------------------------------------------------------------
2735
+ # Pipelines
2736
+
2737
+ PipelineNodeRequest: TypeAlias = Union[
2738
+ "AnalysisCreateRequest",
2739
+ "EvaluationCreateRequest",
2740
+ ]
2741
+
2742
+
2743
+ class PipelineNode(DyffSchemaBaseModel):
2744
+ name: str = pydantic.Field(
2745
+ description="The name of the node. Must be unique in the context of the pipeline."
2746
+ )
2747
+
2748
+ request: PipelineNodeRequest = pydantic.Field( # type: ignore[valid-type]
2749
+ discriminator="kind",
2750
+ description="The request template that will be executed when this node"
2751
+ ' executes. You can use the syntax ``"$(node_name)"`` in request fields'
2752
+ " that reference another entity to indicate that the request depends"
2753
+ " on another node in the pipeline. The placeholder will be substituted"
2754
+ " with the ID of the created entity once it is known. Dyff infers the"
2755
+ " dependency graph structure from these placeholders.",
2756
+ )
2757
+
2758
+
2759
+ class PipelineParameter(DyffSchemaBaseModel):
2760
+ """Declares a parameter that can be passed to the pipeline to customize its
2761
+ behavior."""
2762
+
2763
+ keyword: str = pydantic.Field(description="The keyword of the argument.")
2764
+ destination: str = pydantic.Field(
2765
+ description="The field in a pipeline node to substitute with the"
2766
+ " parameter value. Should be a string like 'node_name.field1.field2'."
2767
+ )
2768
+ description: Optional[str] = pydantic.Field(
2769
+ default=None, description="A description of the argument."
2770
+ )
2771
+
2772
+
2773
+ class PipelineBase(DyffSchemaBaseModel):
2774
+ name: str = pydantic.Field(description="The name of the Pipeline.")
2775
+ nodes: dict[str, PipelineNode] = pydantic.Field(
2776
+ description="The nodes in the pipeline graph.",
2777
+ min_length=1,
2778
+ )
2779
+
2780
+ @pydantic.field_validator("nodes", mode="after")
2781
+ def validate_node_names_match(cls, nodes: dict[str, PipelineNode]):
2782
+ for k, v in nodes.items():
2783
+ if k != v.name:
2784
+ raise ValueError(f"nodes[{k}]: dict key must match value.name")
2785
+
2786
+
2787
+ class Pipeline(DyffEntity, PipelineBase):
2788
+ """A set of Dyff workflows that can be executed as a group.
2789
+
2790
+ The pipeline
2791
+ is a directed acyclic graph representing data dependencies between
2792
+ workflows. For example, a simple pipeline might run an Evaluation and
2793
+ then create a SafetyCase from the evaluation output. This pipeline would
2794
+ have a graph structure like ``evaluation -> safetycase``.
2795
+ """
2796
+
2797
+ kind: Literal["Pipeline"] = Entities.Pipeline.value
2798
+
2799
+ def dependencies(self) -> list[str]:
2800
+ return []
2801
+
2802
+ def resource_allocation(self) -> Optional[ResourceAllocation]:
2803
+ return None
2804
+
2805
+
2806
+ class PipelineRunBase(DyffSchemaBaseModel):
2807
+ """A pipeline run is an execution of a pipeline."""
2808
+
2809
+ pipeline: str = pydantic.Field(description="The ID of the pipeline that was run.")
2810
+ arguments: dict[str, pydantic.JsonValue] = pydantic.Field(
2811
+ default_factory=dict,
2812
+ description="The arguments to pass to the pipeline.",
2813
+ )
2814
+
2815
+
2816
+ class PipelineRun(DyffEntity, PipelineRunBase):
2817
+ kind: Literal["PipelineRun"] = Entities.PipelineRun.value
2818
+
2819
+ def dependencies(self) -> list[str]:
2820
+ return [self.pipeline]
2821
+
2822
+ def resource_allocation(self) -> Optional[ResourceAllocation]:
2823
+ return None
2824
+
2825
+
2709
2826
  # ---------------------------------------------------------------------------
2710
2827
  # Status enumerations
2711
2828
 
@@ -3210,6 +3327,12 @@ __all__ = [
3210
3327
  "Module",
3211
3328
  "ModuleBase",
3212
3329
  "OCIArtifact",
3330
+ "Pipeline",
3331
+ "PipelineBase",
3332
+ "PipelineNode",
3333
+ "PipelineNodeRequest",
3334
+ "PipelineRun",
3335
+ "PipelineRunBase",
3213
3336
  "QueryableDyffEntity",
3214
3337
  "Report",
3215
3338
  "ReportBase",
@@ -42,6 +42,7 @@ from .platform import (
42
42
  MethodBase,
43
43
  ModelSpec,
44
44
  ModuleBase,
45
+ PipelineBase,
45
46
  ReportBase,
46
47
  TagNameType,
47
48
  TeamBase,
@@ -282,6 +283,19 @@ class ModuleCreateRequest(DyffEntityCreateRequest, ModuleBase):
282
283
  pass
283
284
 
284
285
 
286
+ class PipelineCreateRequest(DyffEntityCreateRequest, PipelineBase):
287
+ pass
288
+
289
+
290
+ class PipelineRunRequest(DyffRequestBase):
291
+ """A request to run a pipeline."""
292
+
293
+ arguments: dict[str, pydantic.JsonValue] = pydantic.Field(
294
+ default_factory=dict,
295
+ description="Arguments to pass to the pipeline run.",
296
+ )
297
+
298
+
285
299
  class ReportCreateRequest(DyffEntityCreateRequest, ReportBase):
286
300
  """A Report transforms raw model outputs into some useful statistics.
287
301
 
@@ -541,6 +555,8 @@ __all__ = [
541
555
  "ModelQueryRequest",
542
556
  "ModuleCreateRequest",
543
557
  "ModuleQueryRequest",
558
+ "PipelineCreateRequest",
559
+ "PipelineRunRequest",
544
560
  "QueryRequest",
545
561
  "ReportCreateRequest",
546
562
  "ReportQueryRequest",
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: dyff-schema
3
- Version: 0.36.9
3
+ Version: 0.37.0
4
4
  Summary: Data models for the Dyff AI auditing platform.
5
5
  Author-email: Digital Safety Research Institute <contact@dsri.org>
6
6
  License: Apache-2.0
@@ -1,2 +0,0 @@
1
- __version__ = version = "0.36.9"
2
- __version_tuple__ = version_tuple = (0, 36, 9)
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes