dyff-schema 0.3.4__tar.gz → 0.3.7__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dyff-schema might be problematic. Click here for more details.

Files changed (55) hide show
  1. {dyff-schema-0.3.4/dyff_schema.egg-info → dyff-schema-0.3.7}/PKG-INFO +1 -1
  2. {dyff-schema-0.3.4 → dyff-schema-0.3.7}/dyff/schema/v0/r1/platform.py +69 -42
  3. {dyff-schema-0.3.4 → dyff-schema-0.3.7}/dyff/schema/v0/r1/requests.py +3 -7
  4. {dyff-schema-0.3.4 → dyff-schema-0.3.7/dyff_schema.egg-info}/PKG-INFO +1 -1
  5. {dyff-schema-0.3.4 → dyff-schema-0.3.7}/.gitignore +0 -0
  6. {dyff-schema-0.3.4 → dyff-schema-0.3.7}/.gitlab-ci.yml +0 -0
  7. {dyff-schema-0.3.4 → dyff-schema-0.3.7}/.licenserc.yaml +0 -0
  8. {dyff-schema-0.3.4 → dyff-schema-0.3.7}/.pre-commit-config.yaml +0 -0
  9. {dyff-schema-0.3.4 → dyff-schema-0.3.7}/.prettierignore +0 -0
  10. {dyff-schema-0.3.4 → dyff-schema-0.3.7}/.secrets.baseline +0 -0
  11. {dyff-schema-0.3.4 → dyff-schema-0.3.7}/CODE_OF_CONDUCT.md +0 -0
  12. {dyff-schema-0.3.4 → dyff-schema-0.3.7}/LICENSE +0 -0
  13. {dyff-schema-0.3.4 → dyff-schema-0.3.7}/NOTICE +0 -0
  14. {dyff-schema-0.3.4 → dyff-schema-0.3.7}/README.md +0 -0
  15. {dyff-schema-0.3.4 → dyff-schema-0.3.7}/dyff/schema/__init__.py +0 -0
  16. {dyff-schema-0.3.4 → dyff-schema-0.3.7}/dyff/schema/adapters.py +0 -0
  17. {dyff-schema-0.3.4 → dyff-schema-0.3.7}/dyff/schema/base.py +0 -0
  18. {dyff-schema-0.3.4 → dyff-schema-0.3.7}/dyff/schema/copydoc.py +0 -0
  19. {dyff-schema-0.3.4 → dyff-schema-0.3.7}/dyff/schema/dataset/__init__.py +0 -0
  20. {dyff-schema-0.3.4 → dyff-schema-0.3.7}/dyff/schema/dataset/arrow.py +0 -0
  21. {dyff-schema-0.3.4 → dyff-schema-0.3.7}/dyff/schema/dataset/binary.py +0 -0
  22. {dyff-schema-0.3.4 → dyff-schema-0.3.7}/dyff/schema/dataset/classification.py +0 -0
  23. {dyff-schema-0.3.4 → dyff-schema-0.3.7}/dyff/schema/dataset/text.py +0 -0
  24. {dyff-schema-0.3.4 → dyff-schema-0.3.7}/dyff/schema/dataset/vision.py +0 -0
  25. {dyff-schema-0.3.4 → dyff-schema-0.3.7}/dyff/schema/ids.py +0 -0
  26. {dyff-schema-0.3.4 → dyff-schema-0.3.7}/dyff/schema/io/__init__.py +0 -0
  27. {dyff-schema-0.3.4 → dyff-schema-0.3.7}/dyff/schema/io/vllm.py +0 -0
  28. {dyff-schema-0.3.4 → dyff-schema-0.3.7}/dyff/schema/platform.py +0 -0
  29. {dyff-schema-0.3.4 → dyff-schema-0.3.7}/dyff/schema/py.typed +0 -0
  30. {dyff-schema-0.3.4 → dyff-schema-0.3.7}/dyff/schema/quantity.py +0 -0
  31. {dyff-schema-0.3.4 → dyff-schema-0.3.7}/dyff/schema/requests.py +0 -0
  32. {dyff-schema-0.3.4 → dyff-schema-0.3.7}/dyff/schema/test.py +0 -0
  33. {dyff-schema-0.3.4 → dyff-schema-0.3.7}/dyff/schema/v0/__init__.py +0 -0
  34. {dyff-schema-0.3.4 → dyff-schema-0.3.7}/dyff/schema/v0/r1/__init__.py +0 -0
  35. {dyff-schema-0.3.4 → dyff-schema-0.3.7}/dyff/schema/v0/r1/adapters.py +0 -0
  36. {dyff-schema-0.3.4 → dyff-schema-0.3.7}/dyff/schema/v0/r1/base.py +0 -0
  37. {dyff-schema-0.3.4 → dyff-schema-0.3.7}/dyff/schema/v0/r1/dataset/__init__.py +0 -0
  38. {dyff-schema-0.3.4 → dyff-schema-0.3.7}/dyff/schema/v0/r1/dataset/arrow.py +0 -0
  39. {dyff-schema-0.3.4 → dyff-schema-0.3.7}/dyff/schema/v0/r1/dataset/binary.py +0 -0
  40. {dyff-schema-0.3.4 → dyff-schema-0.3.7}/dyff/schema/v0/r1/dataset/classification.py +0 -0
  41. {dyff-schema-0.3.4 → dyff-schema-0.3.7}/dyff/schema/v0/r1/dataset/text.py +0 -0
  42. {dyff-schema-0.3.4 → dyff-schema-0.3.7}/dyff/schema/v0/r1/dataset/vision.py +0 -0
  43. {dyff-schema-0.3.4 → dyff-schema-0.3.7}/dyff/schema/v0/r1/io/__init__.py +0 -0
  44. {dyff-schema-0.3.4 → dyff-schema-0.3.7}/dyff/schema/v0/r1/io/vllm.py +0 -0
  45. {dyff-schema-0.3.4 → dyff-schema-0.3.7}/dyff/schema/v0/r1/test.py +0 -0
  46. {dyff-schema-0.3.4 → dyff-schema-0.3.7}/dyff/schema/v0/r1/version.py +0 -0
  47. {dyff-schema-0.3.4 → dyff-schema-0.3.7}/dyff/schema/version.py +0 -0
  48. {dyff-schema-0.3.4 → dyff-schema-0.3.7}/dyff_schema.egg-info/SOURCES.txt +0 -0
  49. {dyff-schema-0.3.4 → dyff-schema-0.3.7}/dyff_schema.egg-info/dependency_links.txt +0 -0
  50. {dyff-schema-0.3.4 → dyff-schema-0.3.7}/dyff_schema.egg-info/requires.txt +0 -0
  51. {dyff-schema-0.3.4 → dyff-schema-0.3.7}/dyff_schema.egg-info/top_level.txt +0 -0
  52. {dyff-schema-0.3.4 → dyff-schema-0.3.7}/makefile +0 -0
  53. {dyff-schema-0.3.4 → dyff-schema-0.3.7}/pyproject.toml +0 -0
  54. {dyff-schema-0.3.4 → dyff-schema-0.3.7}/setup.cfg +0 -0
  55. {dyff-schema-0.3.4 → dyff-schema-0.3.7}/tests/test_import.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: dyff-schema
3
- Version: 0.3.4
3
+ Version: 0.3.7
4
4
  Summary: Data models for the Dyff AI auditing platform.
5
5
  Author-email: Digital Safety Research Institute <contact@dsri.org>
6
6
  License: Apache-2.0
@@ -1114,27 +1114,18 @@ class MeasurementLevel(str, enum.Enum):
1114
1114
 
1115
1115
 
1116
1116
  class AnalysisOutputQueryFields(DyffSchemaBaseModel):
1117
- method: QueryableDyffEntity = pydantic.Field(
1118
- description="Identifying information about the Method that was run to produce the output."
1119
- )
1120
-
1121
- dataset: Optional[QueryableDyffEntity] = pydantic.Field(
1117
+ analysis: str = pydantic.Field(
1122
1118
  default=None,
1123
- description="Identifying information about the Dataset being analyzed, if applicable.",
1119
+ description="ID of the Analysis that produced the output.",
1124
1120
  )
1125
1121
 
1126
- evaluation: Optional[str] = pydantic.Field(
1127
- default=None,
1128
- description="ID of the Evaluation being analyzed, if applicable.",
1122
+ method: QueryableDyffEntity = pydantic.Field(
1123
+ description="Identifying information about the Method that was run to produce the output."
1129
1124
  )
1130
1125
 
1131
- inferenceService: Optional[QueryableDyffEntity] = pydantic.Field(
1126
+ inputs: list[str] = pydantic.Field(
1132
1127
  default=None,
1133
- description="Identifying information about the InferenceService being analyzed, if applicable.",
1134
- )
1135
-
1136
- model: Optional[QueryableDyffEntity] = pydantic.Field(
1137
- description="Identifying information about the Model being analyzed, if applicable",
1128
+ description="IDs of resources that were inputs to the Analysis.",
1138
1129
  )
1139
1130
 
1140
1131
 
@@ -1150,16 +1141,6 @@ class MeasurementSpec(DyffSchemaBaseModel):
1150
1141
  )
1151
1142
 
1152
1143
 
1153
- class Measurement(DyffEntity, MeasurementSpec, AnalysisOutputQueryFields):
1154
- kind: Literal["Measurement"] = Entities.Measurement.value
1155
-
1156
- def dependencies(self) -> list[str]:
1157
- return []
1158
-
1159
- def resource_allocation(self) -> Optional[ResourceAllocation]:
1160
- return None
1161
-
1162
-
1163
1144
  class SafetyCaseSpec(DyffSchemaBaseModel):
1164
1145
  name: str = pydantic.Field(description="Descriptive name of the SafetyCase.")
1165
1146
  description: Optional[str] = pydantic.Field(
@@ -1167,16 +1148,6 @@ class SafetyCaseSpec(DyffSchemaBaseModel):
1167
1148
  )
1168
1149
 
1169
1150
 
1170
- class SafetyCase(DyffEntity, SafetyCaseSpec, AnalysisOutputQueryFields):
1171
- kind: Literal["SafetyCase"] = Entities.SafetyCase.value
1172
-
1173
- def dependencies(self) -> list[str]:
1174
- return []
1175
-
1176
- def resource_allocation(self) -> Optional[ResourceAllocation]:
1177
- return None
1178
-
1179
-
1180
1151
  class MethodImplementationKind(str, enum.Enum):
1181
1152
  JupyterNotebook = "JupyterNotebook"
1182
1153
  PythonFunction = "PythonFunction"
@@ -1286,9 +1257,19 @@ class MethodOutput(DyffSchemaBaseModel):
1286
1257
  )
1287
1258
 
1288
1259
 
1260
+ class MethodScope(str, enum.Enum):
1261
+ InferenceService = Entities.InferenceService.value
1262
+ Evaluation = Entities.Evaluation.value
1263
+
1264
+
1289
1265
  class MethodBase(DyffSchemaBaseModel):
1290
1266
  name: str = pydantic.Field(description="Descriptive name of the Method.")
1291
1267
 
1268
+ scope: MethodScope = pydantic.Field(
1269
+ description="The scope of the Method. The Method produces outputs that"
1270
+ " are specific to one entity of the type specified in the .scope field."
1271
+ )
1272
+
1292
1273
  description: Optional[str] = pydantic.Field(
1293
1274
  default=None, description="Long-form description, interpreted as Markdown."
1294
1275
  )
@@ -1327,7 +1308,7 @@ class Method(DyffEntity, MethodBase):
1327
1308
  return None
1328
1309
 
1329
1310
 
1330
- class AnalysisInputMapping(DyffSchemaBaseModel):
1311
+ class AnalysisInput(DyffSchemaBaseModel):
1331
1312
  keyword: str = pydantic.Field(
1332
1313
  description="The 'keyword' specified for this input in the MethodSpec."
1333
1314
  )
@@ -1350,24 +1331,48 @@ class ForeignMethod(DyffModelWithID, MethodBase):
1350
1331
  pass
1351
1332
 
1352
1333
 
1334
+ class AnalysisScope(DyffSchemaBaseModel):
1335
+ dataset: Optional[str]
1336
+ inferenceService: Optional[str]
1337
+ evaluation: Optional[str]
1338
+ model: Optional[str]
1339
+
1340
+
1353
1341
  class AnalysisBase(DyffSchemaBaseModel):
1342
+ scope: AnalysisScope = pydantic.Field(
1343
+ description="The specific entities to which the analysis results apply."
1344
+ " At a minimum, the field corresponding to method.scope must be set."
1345
+ )
1346
+
1354
1347
  arguments: list[AnalysisArgument] = pydantic.Field(
1355
1348
  default_factory=list,
1356
1349
  description="Arguments to pass to the Method implementation.",
1357
1350
  )
1358
1351
 
1359
- inputs: list[AnalysisInputMapping] = pydantic.Field(
1352
+ inputs: list[AnalysisInput] = pydantic.Field(
1360
1353
  default_factory=list, description="Mapping of keywords to data entities."
1361
1354
  )
1362
1355
 
1363
1356
 
1364
- class Analysis(DyffEntity, AnalysisBase):
1365
- kind: Literal["Analysis"] = Entities.Analysis.value
1366
-
1357
+ class Analysis(AnalysisBase):
1367
1358
  method: ForeignMethod = pydantic.Field(
1368
1359
  description="The analysis Method to run.",
1369
1360
  )
1370
1361
 
1362
+
1363
+ class Measurement(DyffEntity, MeasurementSpec, Analysis):
1364
+ kind: Literal["Measurement"] = Entities.Measurement.value
1365
+
1366
+ def dependencies(self) -> list[str]:
1367
+ return [self.method.id] + [x.entity for x in self.inputs]
1368
+
1369
+ def resource_allocation(self) -> Optional[ResourceAllocation]:
1370
+ return None
1371
+
1372
+
1373
+ class SafetyCase(DyffEntity, SafetyCaseSpec, Analysis):
1374
+ kind: Literal["SafetyCase"] = Entities.SafetyCase.value
1375
+
1371
1376
  def dependencies(self) -> list[str]:
1372
1377
  return [self.method.id] + [x.entity for x in self.inputs]
1373
1378
 
@@ -1375,6 +1380,27 @@ class Analysis(DyffEntity, AnalysisBase):
1375
1380
  return None
1376
1381
 
1377
1382
 
1383
+ # class Analysis(DyffEntity, AnalysisBase):
1384
+ # kind: Literal["Analysis"] = Entities.Analysis.value
1385
+
1386
+ # method: ForeignMethod = pydantic.Field(
1387
+ # description="The analysis Method to run.",
1388
+ # )
1389
+
1390
+ # def dependencies(self) -> list[str]:
1391
+ # return [self.method.id] + [x.entity for x in self.inputs]
1392
+
1393
+ # def resource_allocation(self) -> Optional[ResourceAllocation]:
1394
+ # return None
1395
+
1396
+
1397
+ # class AnalysisAndOutputs(DyffSchemaBaseModel):
1398
+ # analysis: Analysis = pydantic.Field(description="The Analysis entity")
1399
+ # outputs: list[AnalysisOutputType] = pydantic.Field(
1400
+ # description="Concrete outputs of the Analysis"
1401
+ # )
1402
+
1403
+
1378
1404
  # ---------------------------------------------------------------------------
1379
1405
  # Status enumerations
1380
1406
 
@@ -1638,7 +1664,6 @@ def entity_class(kind: Entities):
1638
1664
 
1639
1665
 
1640
1666
  DyffEntityType = Union[
1641
- Analysis,
1642
1667
  Audit,
1643
1668
  AuditProcedure,
1644
1669
  DataSource,
@@ -1662,7 +1687,7 @@ __all__ = [
1662
1687
  "Analysis",
1663
1688
  "AnalysisArgument",
1664
1689
  "AnalysisBase",
1665
- "AnalysisInputMapping",
1690
+ "AnalysisInput",
1666
1691
  "AnalysisOutputQueryFields",
1667
1692
  "Annotation",
1668
1693
  "APIFunctions",
@@ -1689,6 +1714,7 @@ __all__ = [
1689
1714
  "EvaluationBase",
1690
1715
  "ExtractorStep",
1691
1716
  "ForeignInferenceService",
1717
+ "ForeignMethod",
1692
1718
  "ForeignModel",
1693
1719
  "Frameworks",
1694
1720
  "InferenceInterface",
@@ -1722,6 +1748,7 @@ __all__ = [
1722
1748
  "MethodOutput",
1723
1749
  "MethodOutputKind",
1724
1750
  "MethodParameter",
1751
+ "MethodScope",
1725
1752
  "Model",
1726
1753
  "ModelArtifact",
1727
1754
  "ModelArtifactHuggingFaceCache",
@@ -131,21 +131,17 @@ class DyffEntityQueryRequest(DyffSchemaBaseModel):
131
131
 
132
132
 
133
133
  class _AnalysisProductQueryRequest(DyffEntityQueryRequest):
134
- dataset: Optional[str] = pydantic.Field(default=None)
135
- datasetName: Optional[str] = pydantic.Field(default=None)
136
- evaluation: Optional[str] = pydantic.Field(default=None)
137
- inferenceService: Optional[str] = pydantic.Field(default=None)
138
- inferenceServiceName: Optional[str] = pydantic.Field(default=None)
134
+ analysis: Optional[str] = pydantic.Field(default=None)
139
135
  method: Optional[str] = pydantic.Field(default=None)
140
136
  methodName: Optional[str] = pydantic.Field(default=None)
141
- model: Optional[str] = pydantic.Field(default=None)
142
- modelName: Optional[str] = pydantic.Field(default=None)
137
+ inputsAnyOf: Optional[str] = pydantic.Field(default=None)
143
138
 
144
139
 
145
140
  class AnalysisQueryRequest(DyffEntityQueryRequest):
146
141
  method: Optional[str] = pydantic.Field(default=None)
147
142
  methodName: Optional[str] = pydantic.Field(default=None)
148
143
  methodOutputKind: Optional[str] = pydantic.Field(default=None)
144
+ inputsAnyOf: Optional[str] = pydantic.Field(default=None)
149
145
 
150
146
 
151
147
  class AuditQueryRequest(DyffEntityQueryRequest):
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: dyff-schema
3
- Version: 0.3.4
3
+ Version: 0.3.7
4
4
  Summary: Data models for the Dyff AI auditing platform.
5
5
  Author-email: Digital Safety Research Institute <contact@dsri.org>
6
6
  License: Apache-2.0
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes