dyff-schema 0.37.3__py3-none-any.whl → 0.38.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dyff-schema might be problematic. Click here for more details.

dyff/schema/_version.py CHANGED
@@ -1,2 +1,2 @@
1
- __version__ = version = "0.37.3"
2
- __version_tuple__ = version_tuple = (0, 37, 3)
1
+ __version__ = version = "0.38.1"
2
+ __version_tuple__ = version_tuple = (0, 38, 1)
@@ -28,7 +28,6 @@ from datetime import datetime, timedelta, timezone
28
28
  from enum import Enum
29
29
  from pathlib import Path
30
30
  from typing import (
31
- TYPE_CHECKING,
32
31
  Any,
33
32
  Literal,
34
33
  NamedTuple,
@@ -51,12 +50,6 @@ from .base import DyffSchemaBaseModel
51
50
  from .dataset import arrow, make_item_type, make_response_type
52
51
  from .version import SCHEMA_VERSION, SchemaVersion
53
52
 
54
- if TYPE_CHECKING:
55
- from .requests import (
56
- AnalysisCreateRequest,
57
- EvaluationCreateRequest,
58
- )
59
-
60
53
  SYSTEM_ATTRIBUTES = frozenset(["creationTime", "status", "reason"])
61
54
 
62
55
 
@@ -1712,6 +1705,10 @@ class EvaluationClientConfiguration(DyffSchemaBaseModel):
1712
1705
  )
1713
1706
 
1714
1707
 
1708
+ class EvaluationInferenceSessionRequest(InferenceSessionBase):
1709
+ inferenceService: str = pydantic.Field(description="InferenceService ID")
1710
+
1711
+
1715
1712
  class EvaluationBase(DyffSchemaBaseModel):
1716
1713
  dataset: str = pydantic.Field(description="The Dataset to evaluate on.")
1717
1714
 
@@ -1731,13 +1728,39 @@ class EvaluationBase(DyffSchemaBaseModel):
1731
1728
  )
1732
1729
 
1733
1730
 
1731
+ class EvaluationRequestBase(EvaluationBase):
1732
+ """A description of how to run an InferenceService on a Dataset to obtain a set of
1733
+ evaluation results."""
1734
+
1735
+ inferenceSession: Optional[EvaluationInferenceSessionRequest] = pydantic.Field(
1736
+ default=None,
1737
+ description="Specification of the InferenceSession that will perform inference for the evaluation.",
1738
+ )
1739
+
1740
+ inferenceSessionReference: Optional[str] = pydantic.Field(
1741
+ default=None,
1742
+ description="The ID of a running inference session that will be used"
1743
+ " for the evaluation, instead of starting a new one.",
1744
+ )
1745
+
1746
+ @pydantic.model_validator(mode="after")
1747
+ def check_session_exactly_one(self):
1748
+ session = self.inferenceSession is not None
1749
+ session_ref = self.inferenceSessionReference is not None
1750
+ if not (session ^ session_ref):
1751
+ raise ValueError(
1752
+ "must specify exactly one of {inferenceSession, inferenceSessionReference}"
1753
+ )
1754
+ return self
1755
+
1756
+
1734
1757
  class Evaluation(DyffEntity, EvaluationBase):
1735
1758
  """A description of how to run an InferenceService on a Dataset to obtain a set of
1736
1759
  evaluation results."""
1737
1760
 
1738
1761
  kind: Literal["Evaluation"] = Entities.Evaluation.value
1739
1762
 
1740
- inferenceSession: InferenceSessionSpec = pydantic.Field(
1763
+ inferenceSession: InferenceSessionSpec = pydantic.Field( # type: ignore[assignment]
1741
1764
  description="Specification of the InferenceSession that will perform"
1742
1765
  " inference for the evaluation.",
1743
1766
  )
@@ -2229,6 +2252,10 @@ class AnalysisBase(DyffSchemaBaseModel):
2229
2252
  )
2230
2253
 
2231
2254
 
2255
+ class AnalysisRequestBase(AnalysisBase):
2256
+ method: EntityIDField = pydantic.Field(description="Method ID")
2257
+
2258
+
2232
2259
  class AnalysisData(DyffSchemaBaseModel):
2233
2260
  """Arbitrary additional data for the Analysis, specified as a key-value pair where
2234
2261
  the value is the data encoded in base64."""
@@ -2734,13 +2761,34 @@ class ChallengeSubmission(DyffEntity):
2734
2761
  # ---------------------------------------------------------------------------
2735
2762
  # Pipelines
2736
2763
 
2764
+
2765
+ class PipelineEvaluationRequest(EvaluationRequestBase):
2766
+ kind: Literal["PipelineEvaluationRequest"] = "PipelineEvaluationRequest"
2767
+
2768
+
2769
+ class PipelineMeasurementRequest(AnalysisRequestBase):
2770
+ kind: Literal["PipelineMeasurementRequest"] = "PipelineMeasurementRequest"
2771
+
2772
+
2773
+ class PipelineSafetyCaseRequest(AnalysisRequestBase):
2774
+ kind: Literal["PipelineSafetyCaseRequest"] = "PipelineSafetyCaseRequest"
2775
+
2776
+
2737
2777
  PipelineNodeRequest: TypeAlias = Union[
2738
- "AnalysisCreateRequest",
2739
- "EvaluationCreateRequest",
2778
+ PipelineEvaluationRequest,
2779
+ PipelineMeasurementRequest,
2780
+ PipelineSafetyCaseRequest,
2740
2781
  ]
2741
2782
 
2742
2783
 
2743
2784
  class PipelineNode(DyffSchemaBaseModel):
2785
+ """A node in the graph that defines the pipeline.
2786
+
2787
+ Each node contains a Dyff API request that might depend on the outcome of other
2788
+ requests in the pipeline graph. When the pipeline runs, the requests are executed in
2789
+ an order that respects these dependencies.
2790
+ """
2791
+
2744
2792
  name: str = pydantic.Field(
2745
2793
  description="The name of the node. Must be unique in the context of the pipeline."
2746
2794
  )
@@ -2748,7 +2796,7 @@ class PipelineNode(DyffSchemaBaseModel):
2748
2796
  request: PipelineNodeRequest = pydantic.Field( # type: ignore[valid-type]
2749
2797
  discriminator="kind",
2750
2798
  description="The request template that will be executed when this node"
2751
- ' executes. You can use the syntax ``"$(node_name)"`` in request fields'
2799
+ ' executes. You can use the syntax "$(node_name)" in request fields'
2752
2800
  " that reference another entity to indicate that the request depends"
2753
2801
  " on another node in the pipeline. The placeholder will be substituted"
2754
2802
  " with the ID of the created entity once it is known. Dyff infers the"
@@ -2766,7 +2814,9 @@ class PipelineParameter(DyffSchemaBaseModel):
2766
2814
  " parameter value. Should be a string like 'node_name.field1.field2'."
2767
2815
  )
2768
2816
  description: Optional[str] = pydantic.Field(
2769
- default=None, description="A description of the argument."
2817
+ default=None,
2818
+ description="A description of the argument.",
2819
+ max_length=summary_maxlen(),
2770
2820
  )
2771
2821
 
2772
2822
 
@@ -2787,11 +2837,16 @@ class PipelineBase(DyffSchemaBaseModel):
2787
2837
  class Pipeline(DyffEntity, PipelineBase):
2788
2838
  """A set of Dyff workflows that can be executed as a group.
2789
2839
 
2790
- The pipeline
2791
- is a directed acyclic graph representing data dependencies between
2792
- workflows. For example, a simple pipeline might run an Evaluation and
2793
- then create a SafetyCase from the evaluation output. This pipeline would
2840
+ The pipeline is a directed acyclic graph representing data dependencies
2841
+ between workflows. For example, a simple pipeline might run an Evaluation
2842
+ and then create a SafetyCase from the evaluation output. This pipeline would
2794
2843
  have a graph structure like ``evaluation -> safetycase``.
2844
+
2845
+ Each node in the pipeline contains the specification of a Dyff API request.
2846
+ The request specifications may contain placeholders that reference other
2847
+ nodes in the pipeline graph. When the pipeline is run, the nodes execute
2848
+ in an order that respects their dependencies, and the placeholders are
2849
+ replaced with concrete values once they are known.
2795
2850
  """
2796
2851
 
2797
2852
  kind: Literal["Pipeline"] = Entities.Pipeline.value
@@ -3085,8 +3140,6 @@ _ENTITY_CLASS = {
3085
3140
  Entities.Method: Method,
3086
3141
  Entities.Model: Model,
3087
3142
  Entities.Module: Module,
3088
- Entities.Pipeline: Pipeline,
3089
- Entities.PipelineRun: PipelineRun,
3090
3143
  Entities.Report: Report,
3091
3144
  Entities.SafetyCase: SafetyCase,
3092
3145
  Entities.Team: Team,
@@ -3218,6 +3271,7 @@ __all__ = [
3218
3271
  "AnalysisData",
3219
3272
  "AnalysisInput",
3220
3273
  "AnalysisOutputQueryFields",
3274
+ "AnalysisRequestBase",
3221
3275
  "AnalysisScope",
3222
3276
  "Annotation",
3223
3277
  "APIFunctions",
@@ -3269,6 +3323,7 @@ __all__ = [
3269
3323
  "Evaluation",
3270
3324
  "EvaluationBase",
3271
3325
  "EvaluationClientConfiguration",
3326
+ "EvaluationRequestBase",
3272
3327
  "ExtractorStep",
3273
3328
  "Family",
3274
3329
  "FamilyBase",
@@ -3340,10 +3395,13 @@ __all__ = [
3340
3395
  "OCIArtifact",
3341
3396
  "Pipeline",
3342
3397
  "PipelineBase",
3398
+ "PipelineEvaluationRequest",
3399
+ "PipelineMeasurementRequest",
3343
3400
  "PipelineNode",
3344
- "PipelineNodeRequest",
3401
+ "PipelineParameter",
3345
3402
  "PipelineRun",
3346
3403
  "PipelineRunBase",
3404
+ "PipelineSafetyCaseRequest",
3347
3405
  "QueryableDyffEntity",
3348
3406
  "Report",
3349
3407
  "ReportBase",
@@ -21,10 +21,10 @@ from typing import Any, Literal, Optional, Union
21
21
  import pydantic
22
22
 
23
23
  from ... import upcast
24
- from . import commands, oci
24
+ from . import commands
25
25
  from .base import DyffBaseModel, JsonMergePatchSemantics
26
26
  from .platform import (
27
- AnalysisBase,
27
+ AnalysisRequestBase,
28
28
  AnalysisScope,
29
29
  ChallengeContent,
30
30
  ChallengeTaskBase,
@@ -32,9 +32,9 @@ from .platform import (
32
32
  DatasetBase,
33
33
  DataView,
34
34
  DocumentationBase,
35
- EntityIDField,
36
35
  Evaluation,
37
- EvaluationBase,
36
+ EvaluationInferenceSessionRequest,
37
+ EvaluationRequestBase,
38
38
  FamilyBase,
39
39
  FamilyMemberBase,
40
40
  InferenceServiceBase,
@@ -43,6 +43,7 @@ from .platform import (
43
43
  ModelSpec,
44
44
  ModuleBase,
45
45
  PipelineBase,
46
+ PipelineRunBase,
46
47
  ReportBase,
47
48
  TagNameType,
48
49
  TeamBase,
@@ -112,12 +113,10 @@ class DyffEntityCreateRequest(DyffRequestBase):
112
113
  account: str = pydantic.Field(description="Account that owns the entity")
113
114
 
114
115
 
115
- class AnalysisCreateRequest(DyffEntityCreateRequest, AnalysisBase):
116
+ class AnalysisCreateRequest(DyffEntityCreateRequest, AnalysisRequestBase):
116
117
  """An Analysis transforms Datasets, Evaluations, and Measurements into new
117
118
  Measurements or SafetyCases."""
118
119
 
119
- method: EntityIDField = pydantic.Field(description="Method ID")
120
-
121
120
  @pydantic.field_validator("scope", check_fields=False)
122
121
  def _validate_scope(cls, scope: AnalysisScope) -> AnalysisScope:
123
122
  # TODO: This has to be a validator function because we can't apply the
@@ -214,40 +213,15 @@ class InferenceSessionTokenCreateRequest(DyffRequestBase):
214
213
  )
215
214
 
216
215
 
217
- class EvaluationInferenceSessionRequest(InferenceSessionBase):
218
- inferenceService: str = pydantic.Field(description="InferenceService ID")
219
-
220
-
221
- class EvaluationCreateRequest(DyffEntityCreateRequest, EvaluationBase):
216
+ class EvaluationCreateRequest(DyffEntityCreateRequest, EvaluationRequestBase):
222
217
  """A description of how to run an InferenceService on a Dataset to obtain a set of
223
218
  evaluation results."""
224
219
 
225
- inferenceSession: Optional[EvaluationInferenceSessionRequest] = pydantic.Field(
226
- default=None,
227
- description="Specification of the InferenceSession that will perform inference for the evaluation.",
228
- )
229
-
230
- inferenceSessionReference: Optional[str] = pydantic.Field(
231
- default=None,
232
- description="The ID of a running inference session that will be used"
233
- " for the evaluation, instead of starting a new one.",
234
- )
235
-
236
- @pydantic.model_validator(mode="after")
237
- def check_session_exactly_one(self):
238
- session = self.inferenceSession is not None
239
- session_ref = self.inferenceSessionReference is not None
240
- if not (session ^ session_ref):
241
- raise ValueError(
242
- "must specify exactly one of {inferenceSession, inferenceSessionReference}"
243
- )
244
- return self
245
-
246
220
  @staticmethod
247
221
  def repeat_of(evaluation: Evaluation) -> EvaluationCreateRequest:
248
222
  """Return a request that will run an existing Evaluation again with the same
249
223
  configuration."""
250
- base = upcast(EvaluationBase, evaluation)
224
+ base = upcast(EvaluationRequestBase, evaluation)
251
225
  if evaluation.inferenceSessionReference:
252
226
  return EvaluationCreateRequest(
253
227
  account=evaluation.account,
@@ -287,13 +261,8 @@ class PipelineCreateRequest(DyffEntityCreateRequest, PipelineBase):
287
261
  pass
288
262
 
289
263
 
290
- class PipelineRunRequest(DyffRequestBase):
291
- """A request to run a pipeline."""
292
-
293
- arguments: dict[str, pydantic.JsonValue] = pydantic.Field(
294
- default_factory=dict,
295
- description="Arguments to pass to the pipeline run.",
296
- )
264
+ class PipelineRunRequest(DyffEntityCreateRequest, PipelineRunBase):
265
+ pass
297
266
 
298
267
 
299
268
  class ReportCreateRequest(DyffEntityCreateRequest, ReportBase):
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: dyff-schema
3
- Version: 0.37.3
3
+ Version: 0.38.1
4
4
  Summary: Data models for the Dyff AI auditing platform.
5
5
  Author-email: Digital Safety Research Institute <contact@dsri.org>
6
6
  License: Apache-2.0
@@ -1,5 +1,5 @@
1
1
  dyff/schema/__init__.py,sha256=w7OWDFuyGKd6xt_yllNtKzHahPgywrfU4Ue02psYaMA,2244
2
- dyff/schema/_version.py,sha256=5nPrqHGvMXylMNMttG621ra1SKLUs9wn6ubCYwxMf7g,80
2
+ dyff/schema/_version.py,sha256=OAbKv7VQqg-KzyqDjyeWStot6XPBinPJdHJGJ1MwAw4,80
3
3
  dyff/schema/adapters.py,sha256=YMTHv_2VlLGFp-Kqwa6H51hjffHmk8gXjZilHysIF5Q,123
4
4
  dyff/schema/annotations.py,sha256=nE6Jk1PLqlShj8uqjE_EzZC9zYnTDW5AVtQcjysiK8M,10018
5
5
  dyff/schema/base.py,sha256=jvaNtsSZyFfsdUZTcY_U-yfLY5_GyrMxSXhON2R9XR0,119
@@ -29,8 +29,8 @@ dyff/schema/v0/r1/adapters.py,sha256=hpwCSW8lkMkUKCLe0zaMUDu-VS_caSxJvPsECEi_XRA
29
29
  dyff/schema/v0/r1/base.py,sha256=1VJXVLKldOq3aH2HdVgxXXk4DJTZEIiaTa4GzMKzziU,20228
30
30
  dyff/schema/v0/r1/commands.py,sha256=jqbEvDRLpcVGWXqlbniuSKg3UIjlrpKt6_0hiwUpPfQ,20153
31
31
  dyff/schema/v0/r1/oci.py,sha256=YjHDVBJ2IIxqijll70OK6pM-qT6pq8tvU7D3YB9vGM0,6700
32
- dyff/schema/v0/r1/platform.py,sha256=Ck6nHK-k-Lm90Gz-fpz_I_6bSFixR1NYkuPWHFfR6d8,108134
33
- dyff/schema/v0/r1/requests.py,sha256=-KaM1VjwaqZEI0zgexeyetQccrRX78x4WxHNIxQqJog,19483
32
+ dyff/schema/v0/r1/platform.py,sha256=Slh548oqJhfTfriz62YRut3WriGgE4FfwflG5oMwkTM,110503
33
+ dyff/schema/v0/r1/requests.py,sha256=JMpdx9WBlUpePshacG85XMmlGPBkkaK5453VaK3kH9s,18321
34
34
  dyff/schema/v0/r1/responses.py,sha256=nxy7FPtfw2B_bljz5UGGuSE79HTkDQxKH56AJVmd4Qo,1287
35
35
  dyff/schema/v0/r1/test.py,sha256=X6dUyVd5svcPCI-PBMOAqEfK9jv3bRDvkQTJzwS96c0,10720
36
36
  dyff/schema/v0/r1/version.py,sha256=NONebgcv5Thsw_ymud6PacZdGjV6ndBrmLnap-obcpo,428
@@ -43,9 +43,9 @@ dyff/schema/v0/r1/dataset/text.py,sha256=MYG5seGODDryRSCy-g0Unh5dD0HCytmZ3FeElC-
43
43
  dyff/schema/v0/r1/dataset/vision.py,sha256=tJFF4dkhHX0UXTj1sPW-G22xTSI40gbYO465FuvmvAU,443
44
44
  dyff/schema/v0/r1/io/__init__.py,sha256=L5y8UhRnojerPYHumsxQJRcHCNz8Hj9NM8b47mewMNs,92
45
45
  dyff/schema/v0/r1/io/vllm.py,sha256=vWyLg-susbg0JDfv6VExBpgFdU2GHP2a14ChOdbckvs,5321
46
- dyff_schema-0.37.3.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
47
- dyff_schema-0.37.3.dist-info/licenses/NOTICE,sha256=YONACu0s_Ui6jNi-wtEsVQbTU1JIkh8wvLH6d1-Ni_w,43
48
- dyff_schema-0.37.3.dist-info/METADATA,sha256=nuB3G6pomcu9ApybN9hIRUMxmFluT4bhC0CUfVokj4U,3734
49
- dyff_schema-0.37.3.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
50
- dyff_schema-0.37.3.dist-info/top_level.txt,sha256=9e3VVdeX73t_sUJOPQPCcGtYO1JhoErhHIi3WoWGcFI,5
51
- dyff_schema-0.37.3.dist-info/RECORD,,
46
+ dyff_schema-0.38.1.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
47
+ dyff_schema-0.38.1.dist-info/licenses/NOTICE,sha256=YONACu0s_Ui6jNi-wtEsVQbTU1JIkh8wvLH6d1-Ni_w,43
48
+ dyff_schema-0.38.1.dist-info/METADATA,sha256=LwpS_MihYVnxHSp9cuKFpjY72GTrHqgHuR5b7ztQM9Y,3734
49
+ dyff_schema-0.38.1.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
50
+ dyff_schema-0.38.1.dist-info/top_level.txt,sha256=9e3VVdeX73t_sUJOPQPCcGtYO1JhoErhHIi3WoWGcFI,5
51
+ dyff_schema-0.38.1.dist-info/RECORD,,