orca-sdk 0.0.95__py3-none-any.whl → 0.0.97__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (89) hide show
  1. orca_sdk/__init__.py +1 -5
  2. orca_sdk/_generated_api_client/api/__init__.py +22 -2
  3. orca_sdk/_generated_api_client/api/{datasource/create_datasource_datasource_post.py → auth/create_org_plan_auth_org_plan_post.py} +32 -31
  4. orca_sdk/_generated_api_client/api/auth/get_org_plan_auth_org_plan_get.py +122 -0
  5. orca_sdk/_generated_api_client/api/auth/update_org_plan_auth_org_plan_put.py +168 -0
  6. orca_sdk/_generated_api_client/api/datasource/create_datasource_from_content_datasource_post.py +224 -0
  7. orca_sdk/_generated_api_client/api/datasource/create_datasource_from_files_datasource_upload_post.py +229 -0
  8. orca_sdk/_generated_api_client/api/task/list_tasks_task_get.py +21 -26
  9. orca_sdk/_generated_api_client/api/telemetry/generate_memory_suggestions_telemetry_prediction_prediction_id_memory_suggestions_post.py +239 -0
  10. orca_sdk/_generated_api_client/api/telemetry/get_action_recommendation_telemetry_prediction_prediction_id_action_get.py +192 -0
  11. orca_sdk/_generated_api_client/models/__init__.py +54 -4
  12. orca_sdk/_generated_api_client/models/action_recommendation.py +82 -0
  13. orca_sdk/_generated_api_client/models/action_recommendation_action.py +11 -0
  14. orca_sdk/_generated_api_client/models/add_memory_recommendations.py +85 -0
  15. orca_sdk/_generated_api_client/models/add_memory_suggestion.py +79 -0
  16. orca_sdk/_generated_api_client/models/body_create_datasource_from_files_datasource_upload_post.py +145 -0
  17. orca_sdk/_generated_api_client/models/class_representatives.py +92 -0
  18. orca_sdk/_generated_api_client/models/classification_model_metadata.py +14 -0
  19. orca_sdk/_generated_api_client/models/clone_memoryset_request.py +40 -0
  20. orca_sdk/_generated_api_client/models/constraint_violation_error_response.py +8 -7
  21. orca_sdk/_generated_api_client/models/constraint_violation_error_response_status_code.py +8 -0
  22. orca_sdk/_generated_api_client/models/create_classification_model_request.py +40 -0
  23. orca_sdk/_generated_api_client/models/create_datasource_from_content_request.py +101 -0
  24. orca_sdk/_generated_api_client/models/create_memoryset_request.py +40 -0
  25. orca_sdk/_generated_api_client/models/create_org_plan_request.py +73 -0
  26. orca_sdk/_generated_api_client/models/create_org_plan_request_tier.py +11 -0
  27. orca_sdk/_generated_api_client/models/create_regression_model_request.py +20 -0
  28. orca_sdk/_generated_api_client/models/embed_request.py +20 -0
  29. orca_sdk/_generated_api_client/models/embedding_evaluation_payload.py +28 -10
  30. orca_sdk/_generated_api_client/models/embedding_evaluation_request.py +28 -10
  31. orca_sdk/_generated_api_client/models/embedding_model_result.py +9 -0
  32. orca_sdk/_generated_api_client/models/filter_item.py +31 -23
  33. orca_sdk/_generated_api_client/models/filter_item_field_type_1_item_type_0.py +8 -0
  34. orca_sdk/_generated_api_client/models/filter_item_field_type_2_item_type_0.py +8 -0
  35. orca_sdk/_generated_api_client/models/filter_item_field_type_2_item_type_1.py +2 -0
  36. orca_sdk/_generated_api_client/models/internal_server_error_response.py +8 -7
  37. orca_sdk/_generated_api_client/models/internal_server_error_response_status_code.py +8 -0
  38. orca_sdk/_generated_api_client/models/labeled_memory.py +5 -5
  39. orca_sdk/_generated_api_client/models/labeled_memory_update.py +16 -16
  40. orca_sdk/_generated_api_client/models/labeled_memory_with_feedback_metrics.py +5 -5
  41. orca_sdk/_generated_api_client/models/lookup_request.py +20 -0
  42. orca_sdk/_generated_api_client/models/memory_metrics.py +98 -0
  43. orca_sdk/_generated_api_client/models/memoryset_analysis_configs.py +33 -0
  44. orca_sdk/_generated_api_client/models/memoryset_class_patterns_analysis_config.py +79 -0
  45. orca_sdk/_generated_api_client/models/memoryset_class_patterns_metrics.py +138 -0
  46. orca_sdk/_generated_api_client/models/memoryset_metadata.py +42 -0
  47. orca_sdk/_generated_api_client/models/memoryset_metrics.py +33 -0
  48. orca_sdk/_generated_api_client/models/memoryset_update.py +20 -0
  49. orca_sdk/_generated_api_client/models/not_found_error_response.py +6 -7
  50. orca_sdk/_generated_api_client/models/not_found_error_response_resource_type_0.py +1 -0
  51. orca_sdk/_generated_api_client/models/not_found_error_response_status_code.py +8 -0
  52. orca_sdk/_generated_api_client/models/org_plan.py +99 -0
  53. orca_sdk/_generated_api_client/models/org_plan_tier.py +11 -0
  54. orca_sdk/_generated_api_client/models/paginated_task.py +108 -0
  55. orca_sdk/_generated_api_client/models/predictive_model_update.py +20 -0
  56. orca_sdk/_generated_api_client/models/pretrained_embedding_model_metadata.py +8 -0
  57. orca_sdk/_generated_api_client/models/regression_model_metadata.py +14 -0
  58. orca_sdk/_generated_api_client/models/scored_memory_update.py +9 -9
  59. orca_sdk/_generated_api_client/models/service_unavailable_error_response.py +8 -7
  60. orca_sdk/_generated_api_client/models/service_unavailable_error_response_status_code.py +8 -0
  61. orca_sdk/_generated_api_client/models/telemetry_field_type_0_item_type_0.py +8 -0
  62. orca_sdk/_generated_api_client/models/telemetry_field_type_1_item_type_0.py +8 -0
  63. orca_sdk/_generated_api_client/models/telemetry_field_type_1_item_type_1.py +8 -0
  64. orca_sdk/_generated_api_client/models/telemetry_filter_item.py +42 -30
  65. orca_sdk/_generated_api_client/models/telemetry_sort_options.py +42 -30
  66. orca_sdk/_generated_api_client/models/unauthenticated_error_response.py +8 -7
  67. orca_sdk/_generated_api_client/models/unauthenticated_error_response_status_code.py +8 -0
  68. orca_sdk/_generated_api_client/models/unauthorized_error_response.py +8 -7
  69. orca_sdk/_generated_api_client/models/unauthorized_error_response_status_code.py +8 -0
  70. orca_sdk/_generated_api_client/models/update_org_plan_request.py +73 -0
  71. orca_sdk/_generated_api_client/models/update_org_plan_request_tier.py +11 -0
  72. orca_sdk/_shared/metrics.py +1 -1
  73. orca_sdk/classification_model.py +4 -1
  74. orca_sdk/classification_model_test.py +53 -0
  75. orca_sdk/credentials.py +15 -1
  76. orca_sdk/datasource.py +180 -41
  77. orca_sdk/datasource_test.py +194 -0
  78. orca_sdk/embedding_model.py +51 -13
  79. orca_sdk/embedding_model_test.py +27 -0
  80. orca_sdk/job.py +15 -14
  81. orca_sdk/job_test.py +34 -0
  82. orca_sdk/memoryset.py +47 -7
  83. orca_sdk/regression_model_test.py +0 -1
  84. orca_sdk/telemetry.py +94 -3
  85. {orca_sdk-0.0.95.dist-info → orca_sdk-0.0.97.dist-info}/METADATA +18 -1
  86. {orca_sdk-0.0.95.dist-info → orca_sdk-0.0.97.dist-info}/RECORD +87 -56
  87. orca_sdk/_generated_api_client/models/body_create_datasource_datasource_post.py +0 -207
  88. orca_sdk/_generated_api_client/models/labeled_memory_metrics.py +0 -246
  89. {orca_sdk-0.0.95.dist-info → orca_sdk-0.0.97.dist-info}/WHEEL +0 -0
@@ -32,12 +32,14 @@ class EmbeddingModelResult:
32
32
  embedding_model_path (str):
33
33
  analysis_result (AnalyzeNeighborLabelsResult):
34
34
  memoryset_name (Union[None, Unset, str]):
35
+ is_finetuned (Union[Unset, bool]): Default: False.
35
36
  """
36
37
 
37
38
  embedding_model_name: str
38
39
  embedding_model_path: str
39
40
  analysis_result: "AnalyzeNeighborLabelsResult"
40
41
  memoryset_name: Union[None, Unset, str] = UNSET
42
+ is_finetuned: Union[Unset, bool] = False
41
43
  additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict)
42
44
 
43
45
  def to_dict(self) -> dict[str, Any]:
@@ -53,6 +55,8 @@ class EmbeddingModelResult:
53
55
  else:
54
56
  memoryset_name = self.memoryset_name
55
57
 
58
+ is_finetuned = self.is_finetuned
59
+
56
60
  field_dict: dict[str, Any] = {}
57
61
  field_dict.update(self.additional_properties)
58
62
  field_dict.update(
@@ -64,6 +68,8 @@ class EmbeddingModelResult:
64
68
  )
65
69
  if memoryset_name is not UNSET:
66
70
  field_dict["memoryset_name"] = memoryset_name
71
+ if is_finetuned is not UNSET:
72
+ field_dict["is_finetuned"] = is_finetuned
67
73
 
68
74
  return field_dict
69
75
 
@@ -87,11 +93,14 @@ class EmbeddingModelResult:
87
93
 
88
94
  memoryset_name = _parse_memoryset_name(d.pop("memoryset_name", UNSET))
89
95
 
96
+ is_finetuned = d.pop("is_finetuned", UNSET)
97
+
90
98
  embedding_model_result = cls(
91
99
  embedding_model_name=embedding_model_name,
92
100
  embedding_model_path=embedding_model_path,
93
101
  analysis_result=analysis_result,
94
102
  memoryset_name=memoryset_name,
103
+ is_finetuned=is_finetuned,
95
104
  )
96
105
 
97
106
  embedding_model_result.additional_properties = d
@@ -12,13 +12,15 @@ The main change is:
12
12
 
13
13
  import datetime
14
14
  from enum import Enum
15
- from typing import Any, List, Literal, Type, TypeVar, Union, cast
15
+ from typing import Any, List, Type, TypeVar, Union, cast
16
16
 
17
17
  from attrs import define as _attrs_define
18
18
  from attrs import field as _attrs_field
19
19
  from dateutil.parser import isoparse
20
20
 
21
21
  from ..models.filter_item_field_type_0_item import FilterItemFieldType0Item
22
+ from ..models.filter_item_field_type_1_item_type_0 import FilterItemFieldType1ItemType0
23
+ from ..models.filter_item_field_type_2_item_type_0 import FilterItemFieldType2ItemType0
22
24
  from ..models.filter_item_field_type_2_item_type_1 import FilterItemFieldType2ItemType1
23
25
  from ..models.filter_item_op import FilterItemOp
24
26
 
@@ -29,23 +31,23 @@ T = TypeVar("T", bound="FilterItem")
29
31
  class FilterItem:
30
32
  """
31
33
  Attributes:
32
- field (Union[List[FilterItemFieldType0Item], List[Union[FilterItemFieldType2ItemType1, Literal['metrics']]],
33
- List[Union[Literal['metadata'], str]]]):
34
+ field (Union[List[FilterItemFieldType0Item], List[Union[FilterItemFieldType1ItemType0, str]],
35
+ List[Union[FilterItemFieldType2ItemType0, FilterItemFieldType2ItemType1]]]):
34
36
  op (FilterItemOp):
35
37
  value (Union[List[bool], List[float], List[int], List[str], None, bool, datetime.datetime, float, int, str]):
36
38
  """
37
39
 
38
40
  field: Union[
39
41
  List[FilterItemFieldType0Item],
40
- List[Union[FilterItemFieldType2ItemType1, Literal["metrics"]]],
41
- List[Union[Literal["metadata"], str]],
42
+ List[Union[FilterItemFieldType1ItemType0, str]],
43
+ List[Union[FilterItemFieldType2ItemType0, FilterItemFieldType2ItemType1]],
42
44
  ]
43
45
  op: FilterItemOp
44
46
  value: Union[List[bool], List[float], List[int], List[str], None, bool, datetime.datetime, float, int, str]
45
47
  additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict)
46
48
 
47
49
  def to_dict(self) -> dict[str, Any]:
48
- field: Union[List[Union[Literal["metadata"], str]], List[Union[Literal["metrics"], str]], List[str]]
50
+ field: List[str]
49
51
  if isinstance(self.field, list):
50
52
  field = []
51
53
  for field_type_0_item_data in self.field:
@@ -85,8 +87,8 @@ class FilterItem:
85
87
  data: object,
86
88
  ) -> Union[
87
89
  List[FilterItemFieldType0Item],
88
- List[Union[FilterItemFieldType2ItemType1, Literal["metrics"]]],
89
- List[Union[Literal["metadata"], str]],
90
+ List[Union[FilterItemFieldType1ItemType0, str]],
91
+ List[Union[FilterItemFieldType2ItemType0, FilterItemFieldType2ItemType1]],
90
92
  ]:
91
93
  try:
92
94
  if not isinstance(data, list):
@@ -108,14 +110,16 @@ class FilterItem:
108
110
  _field_type_1 = data
109
111
  for field_type_1_item_data in _field_type_1:
110
112
 
111
- def _parse_field_type_1_item(data: object) -> Union[Literal["metadata"], str]:
112
- field_type_1_item_type_0 = cast(Literal["metadata"], data)
113
- if field_type_1_item_type_0 != "metadata":
114
- raise ValueError(
115
- f"field_type_1_item_type_0 must match const 'metadata', got '{field_type_1_item_type_0}'"
116
- )
117
- return field_type_1_item_type_0
118
- return cast(Union[Literal["metadata"], str], data)
113
+ def _parse_field_type_1_item(data: object) -> Union[FilterItemFieldType1ItemType0, str]:
114
+ try:
115
+ if not isinstance(data, str):
116
+ raise TypeError()
117
+ field_type_1_item_type_0 = FilterItemFieldType1ItemType0(data)
118
+
119
+ return field_type_1_item_type_0
120
+ except: # noqa: E722
121
+ pass
122
+ return cast(Union[FilterItemFieldType1ItemType0, str], data)
119
123
 
120
124
  field_type_1_item = _parse_field_type_1_item(field_type_1_item_data)
121
125
 
@@ -130,13 +134,17 @@ class FilterItem:
130
134
  _field_type_2 = data
131
135
  for field_type_2_item_data in _field_type_2:
132
136
 
133
- def _parse_field_type_2_item(data: object) -> Union[FilterItemFieldType2ItemType1, Literal["metrics"]]:
134
- field_type_2_item_type_0 = cast(Literal["metrics"], data)
135
- if field_type_2_item_type_0 != "metrics":
136
- raise ValueError(
137
- f"field_type_2_item_type_0 must match const 'metrics', got '{field_type_2_item_type_0}'"
138
- )
139
- return field_type_2_item_type_0
137
+ def _parse_field_type_2_item(
138
+ data: object,
139
+ ) -> Union[FilterItemFieldType2ItemType0, FilterItemFieldType2ItemType1]:
140
+ try:
141
+ if not isinstance(data, str):
142
+ raise TypeError()
143
+ field_type_2_item_type_0 = FilterItemFieldType2ItemType0(data)
144
+
145
+ return field_type_2_item_type_0
146
+ except: # noqa: E722
147
+ pass
140
148
  if not isinstance(data, str):
141
149
  raise TypeError()
142
150
  field_type_2_item_type_1 = FilterItemFieldType2ItemType1(data)
@@ -0,0 +1,8 @@
1
+ from enum import Enum
2
+
3
+
4
+ class FilterItemFieldType1ItemType0(str, Enum):
5
+ METADATA = "metadata"
6
+
7
+ def __str__(self) -> str:
8
+ return str(self.value)
@@ -0,0 +1,8 @@
1
+ from enum import Enum
2
+
3
+
4
+ class FilterItemFieldType2ItemType0(str, Enum):
5
+ METRICS = "metrics"
6
+
7
+ def __str__(self) -> str:
8
+ return str(self.value)
@@ -15,6 +15,8 @@ class FilterItemFieldType2ItemType1(str, Enum):
15
15
  NEIGHBOR_PREDICTED_LABEL_MATCHES_CURRENT_LABEL = "neighbor_predicted_label_matches_current_label"
16
16
  NORMALIZED_NEIGHBOR_LABEL_ENTROPY = "normalized_neighbor_label_entropy"
17
17
  POTENTIAL_DUPLICATE_MEMORY_IDS = "potential_duplicate_memory_ids"
18
+ SPREAD = "spread"
19
+ UNIFORMITY = "uniformity"
18
20
 
19
21
  def __str__(self) -> str:
20
22
  return str(self.value)
@@ -10,11 +10,14 @@ The main change is:
10
10
 
11
11
  # flake8: noqa: C901
12
12
 
13
- from typing import Any, Literal, Type, TypeVar, cast
13
+ from enum import Enum
14
+ from typing import Any, Type, TypeVar
14
15
 
15
16
  from attrs import define as _attrs_define
16
17
  from attrs import field as _attrs_field
17
18
 
19
+ from ..models.internal_server_error_response_status_code import InternalServerErrorResponseStatusCode
20
+
18
21
  T = TypeVar("T", bound="InternalServerErrorResponse")
19
22
 
20
23
 
@@ -22,16 +25,16 @@ T = TypeVar("T", bound="InternalServerErrorResponse")
22
25
  class InternalServerErrorResponse:
23
26
  """
24
27
  Attributes:
25
- status_code (Literal[500]):
28
+ status_code (InternalServerErrorResponseStatusCode):
26
29
  message (str):
27
30
  """
28
31
 
29
- status_code: Literal[500]
32
+ status_code: InternalServerErrorResponseStatusCode
30
33
  message: str
31
34
  additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict)
32
35
 
33
36
  def to_dict(self) -> dict[str, Any]:
34
- status_code = self.status_code
37
+ status_code = self.status_code.value if isinstance(self.status_code, Enum) else self.status_code
35
38
 
36
39
  message = self.message
37
40
 
@@ -49,9 +52,7 @@ class InternalServerErrorResponse:
49
52
  @classmethod
50
53
  def from_dict(cls: Type[T], src_dict: dict[str, Any]) -> T:
51
54
  d = src_dict.copy()
52
- status_code = cast(Literal[500], d.pop("status_code"))
53
- if status_code != 500:
54
- raise ValueError(f"status_code must match const 500, got '{status_code}'")
55
+ status_code = InternalServerErrorResponseStatusCode(d.pop("status_code"))
55
56
 
56
57
  message = d.pop("message")
57
58
 
@@ -0,0 +1,8 @@
1
+ from enum import IntEnum
2
+
3
+
4
+ class InternalServerErrorResponseStatusCode(IntEnum):
5
+ VALUE_500 = 500
6
+
7
+ def __str__(self) -> str:
8
+ return str(self.value)
@@ -19,7 +19,7 @@ from dateutil.parser import isoparse
19
19
 
20
20
  if TYPE_CHECKING:
21
21
  from ..models.labeled_memory_metadata import LabeledMemoryMetadata
22
- from ..models.labeled_memory_metrics import LabeledMemoryMetrics
22
+ from ..models.memory_metrics import MemoryMetrics
23
23
 
24
24
 
25
25
  T = TypeVar("T", bound="LabeledMemory")
@@ -39,7 +39,7 @@ class LabeledMemory:
39
39
  created_at (datetime.datetime):
40
40
  updated_at (datetime.datetime):
41
41
  edited_at (datetime.datetime):
42
- metrics (LabeledMemoryMetrics): Metrics computed for a labeled memory.
42
+ metrics (MemoryMetrics):
43
43
  label (int):
44
44
  label_name (Union[None, str]):
45
45
  """
@@ -53,7 +53,7 @@ class LabeledMemory:
53
53
  created_at: datetime.datetime
54
54
  updated_at: datetime.datetime
55
55
  edited_at: datetime.datetime
56
- metrics: "LabeledMemoryMetrics"
56
+ metrics: "MemoryMetrics"
57
57
  label: int
58
58
  label_name: Union[None, str]
59
59
  additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict)
@@ -110,7 +110,7 @@ class LabeledMemory:
110
110
  @classmethod
111
111
  def from_dict(cls: Type[T], src_dict: dict[str, Any]) -> T:
112
112
  from ..models.labeled_memory_metadata import LabeledMemoryMetadata
113
- from ..models.labeled_memory_metrics import LabeledMemoryMetrics
113
+ from ..models.memory_metrics import MemoryMetrics
114
114
 
115
115
  d = src_dict.copy()
116
116
 
@@ -140,7 +140,7 @@ class LabeledMemory:
140
140
 
141
141
  edited_at = isoparse(d.pop("edited_at"))
142
142
 
143
- metrics = LabeledMemoryMetrics.from_dict(d.pop("metrics"))
143
+ metrics = MemoryMetrics.from_dict(d.pop("metrics"))
144
144
 
145
145
  label = d.pop("label")
146
146
 
@@ -17,8 +17,8 @@ from attrs import define as _attrs_define
17
17
  from ..types import UNSET, Unset
18
18
 
19
19
  if TYPE_CHECKING:
20
- from ..models.labeled_memory_metrics import LabeledMemoryMetrics
21
20
  from ..models.labeled_memory_update_metadata_type_0 import LabeledMemoryUpdateMetadataType0
21
+ from ..models.memory_metrics import MemoryMetrics
22
22
 
23
23
 
24
24
  T = TypeVar("T", bound="LabeledMemoryUpdate")
@@ -33,20 +33,20 @@ class LabeledMemoryUpdate:
33
33
  value (Union[Unset, str]):
34
34
  metadata (Union['LabeledMemoryUpdateMetadataType0', None, Unset]):
35
35
  source_id (Union[None, Unset, str]):
36
+ metrics (Union['MemoryMetrics', None, Unset]):
36
37
  label (Union[Unset, int]):
37
- metrics (Union['LabeledMemoryMetrics', None, Unset]):
38
38
  """
39
39
 
40
40
  memory_id: str
41
41
  value: Union[Unset, str] = UNSET
42
42
  metadata: Union["LabeledMemoryUpdateMetadataType0", None, Unset] = UNSET
43
43
  source_id: Union[None, Unset, str] = UNSET
44
+ metrics: Union["MemoryMetrics", None, Unset] = UNSET
44
45
  label: Union[Unset, int] = UNSET
45
- metrics: Union["LabeledMemoryMetrics", None, Unset] = UNSET
46
46
 
47
47
  def to_dict(self) -> dict[str, Any]:
48
- from ..models.labeled_memory_metrics import LabeledMemoryMetrics
49
48
  from ..models.labeled_memory_update_metadata_type_0 import LabeledMemoryUpdateMetadataType0
49
+ from ..models.memory_metrics import MemoryMetrics
50
50
 
51
51
  memory_id = self.memory_id
52
52
 
@@ -70,16 +70,16 @@ class LabeledMemoryUpdate:
70
70
  else:
71
71
  source_id = self.source_id
72
72
 
73
- label = self.label
74
-
75
73
  metrics: Union[Dict[str, Any], None, Unset]
76
74
  if isinstance(self.metrics, Unset):
77
75
  metrics = UNSET
78
- elif isinstance(self.metrics, LabeledMemoryMetrics):
76
+ elif isinstance(self.metrics, MemoryMetrics):
79
77
  metrics = self.metrics.to_dict()
80
78
  else:
81
79
  metrics = self.metrics
82
80
 
81
+ label = self.label
82
+
83
83
  field_dict: dict[str, Any] = {}
84
84
  field_dict.update(
85
85
  {
@@ -92,17 +92,17 @@ class LabeledMemoryUpdate:
92
92
  field_dict["metadata"] = metadata
93
93
  if source_id is not UNSET:
94
94
  field_dict["source_id"] = source_id
95
- if label is not UNSET:
96
- field_dict["label"] = label
97
95
  if metrics is not UNSET:
98
96
  field_dict["metrics"] = metrics
97
+ if label is not UNSET:
98
+ field_dict["label"] = label
99
99
 
100
100
  return field_dict
101
101
 
102
102
  @classmethod
103
103
  def from_dict(cls: Type[T], src_dict: dict[str, Any]) -> T:
104
- from ..models.labeled_memory_metrics import LabeledMemoryMetrics
105
104
  from ..models.labeled_memory_update_metadata_type_0 import LabeledMemoryUpdateMetadataType0
105
+ from ..models.memory_metrics import MemoryMetrics
106
106
 
107
107
  d = src_dict.copy()
108
108
  memory_id = d.pop("memory_id")
@@ -140,9 +140,7 @@ class LabeledMemoryUpdate:
140
140
 
141
141
  source_id = _parse_source_id(d.pop("source_id", UNSET))
142
142
 
143
- label = d.pop("label", UNSET)
144
-
145
- def _parse_metrics(data: object) -> Union["LabeledMemoryMetrics", None, Unset]:
143
+ def _parse_metrics(data: object) -> Union["MemoryMetrics", None, Unset]:
146
144
  if data is None:
147
145
  return data
148
146
  if isinstance(data, Unset):
@@ -150,22 +148,24 @@ class LabeledMemoryUpdate:
150
148
  try:
151
149
  if not isinstance(data, dict):
152
150
  raise TypeError()
153
- metrics_type_0 = LabeledMemoryMetrics.from_dict(data)
151
+ metrics_type_0 = MemoryMetrics.from_dict(data)
154
152
 
155
153
  return metrics_type_0
156
154
  except: # noqa: E722
157
155
  pass
158
- return cast(Union["LabeledMemoryMetrics", None, Unset], data)
156
+ return cast(Union["MemoryMetrics", None, Unset], data)
159
157
 
160
158
  metrics = _parse_metrics(d.pop("metrics", UNSET))
161
159
 
160
+ label = d.pop("label", UNSET)
161
+
162
162
  labeled_memory_update = cls(
163
163
  memory_id=memory_id,
164
164
  value=value,
165
165
  metadata=metadata,
166
166
  source_id=source_id,
167
- label=label,
168
167
  metrics=metrics,
168
+ label=label,
169
169
  )
170
170
 
171
171
  return labeled_memory_update
@@ -18,11 +18,11 @@ from attrs import field as _attrs_field
18
18
  from dateutil.parser import isoparse
19
19
 
20
20
  if TYPE_CHECKING:
21
- from ..models.labeled_memory_metrics import LabeledMemoryMetrics
22
21
  from ..models.labeled_memory_with_feedback_metrics_feedback_metrics import (
23
22
  LabeledMemoryWithFeedbackMetricsFeedbackMetrics,
24
23
  )
25
24
  from ..models.labeled_memory_with_feedback_metrics_metadata import LabeledMemoryWithFeedbackMetricsMetadata
25
+ from ..models.memory_metrics import MemoryMetrics
26
26
 
27
27
 
28
28
  T = TypeVar("T", bound="LabeledMemoryWithFeedbackMetrics")
@@ -41,7 +41,7 @@ class LabeledMemoryWithFeedbackMetrics:
41
41
  created_at (datetime.datetime):
42
42
  updated_at (datetime.datetime):
43
43
  edited_at (datetime.datetime):
44
- metrics (LabeledMemoryMetrics): Metrics computed for a labeled memory.
44
+ metrics (MemoryMetrics):
45
45
  label (int):
46
46
  label_name (Union[None, str]):
47
47
  feedback_metrics (LabeledMemoryWithFeedbackMetricsFeedbackMetrics):
@@ -57,7 +57,7 @@ class LabeledMemoryWithFeedbackMetrics:
57
57
  created_at: datetime.datetime
58
58
  updated_at: datetime.datetime
59
59
  edited_at: datetime.datetime
60
- metrics: "LabeledMemoryMetrics"
60
+ metrics: "MemoryMetrics"
61
61
  label: int
62
62
  label_name: Union[None, str]
63
63
  feedback_metrics: "LabeledMemoryWithFeedbackMetricsFeedbackMetrics"
@@ -121,11 +121,11 @@ class LabeledMemoryWithFeedbackMetrics:
121
121
 
122
122
  @classmethod
123
123
  def from_dict(cls: Type[T], src_dict: dict[str, Any]) -> T:
124
- from ..models.labeled_memory_metrics import LabeledMemoryMetrics
125
124
  from ..models.labeled_memory_with_feedback_metrics_feedback_metrics import (
126
125
  LabeledMemoryWithFeedbackMetricsFeedbackMetrics,
127
126
  )
128
127
  from ..models.labeled_memory_with_feedback_metrics_metadata import LabeledMemoryWithFeedbackMetricsMetadata
128
+ from ..models.memory_metrics import MemoryMetrics
129
129
 
130
130
  d = src_dict.copy()
131
131
 
@@ -155,7 +155,7 @@ class LabeledMemoryWithFeedbackMetrics:
155
155
 
156
156
  edited_at = isoparse(d.pop("edited_at"))
157
157
 
158
- metrics = LabeledMemoryMetrics.from_dict(d.pop("metrics"))
158
+ metrics = MemoryMetrics.from_dict(d.pop("metrics"))
159
159
 
160
160
  label = d.pop("label")
161
161
 
@@ -26,10 +26,12 @@ class LookupRequest:
26
26
  Attributes:
27
27
  query (List[str]):
28
28
  count (Union[Unset, int]): Default: 1.
29
+ prompt (Union[None, Unset, str]):
29
30
  """
30
31
 
31
32
  query: List[str]
32
33
  count: Union[Unset, int] = 1
34
+ prompt: Union[None, Unset, str] = UNSET
33
35
  additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict)
34
36
 
35
37
  def to_dict(self) -> dict[str, Any]:
@@ -37,6 +39,12 @@ class LookupRequest:
37
39
 
38
40
  count = self.count
39
41
 
42
+ prompt: Union[None, Unset, str]
43
+ if isinstance(self.prompt, Unset):
44
+ prompt = UNSET
45
+ else:
46
+ prompt = self.prompt
47
+
40
48
  field_dict: dict[str, Any] = {}
41
49
  field_dict.update(self.additional_properties)
42
50
  field_dict.update(
@@ -46,6 +54,8 @@ class LookupRequest:
46
54
  )
47
55
  if count is not UNSET:
48
56
  field_dict["count"] = count
57
+ if prompt is not UNSET:
58
+ field_dict["prompt"] = prompt
49
59
 
50
60
  return field_dict
51
61
 
@@ -56,9 +66,19 @@ class LookupRequest:
56
66
 
57
67
  count = d.pop("count", UNSET)
58
68
 
69
+ def _parse_prompt(data: object) -> Union[None, Unset, str]:
70
+ if data is None:
71
+ return data
72
+ if isinstance(data, Unset):
73
+ return data
74
+ return cast(Union[None, Unset, str], data)
75
+
76
+ prompt = _parse_prompt(d.pop("prompt", UNSET))
77
+
59
78
  lookup_request = cls(
60
79
  query=query,
61
80
  count=count,
81
+ prompt=prompt,
62
82
  )
63
83
 
64
84
  lookup_request.additional_properties = d
@@ -31,6 +31,15 @@ class MemoryMetrics:
31
31
  cluster (Union[Unset, int]):
32
32
  embedding_2d (Union[Unset, List[float]]):
33
33
  anomaly_score (Union[Unset, float]):
34
+ neighbor_label_logits (Union[Unset, List[float]]):
35
+ neighbor_predicted_label (Union[Unset, int]):
36
+ neighbor_predicted_label_ambiguity (Union[Unset, float]):
37
+ neighbor_predicted_label_confidence (Union[Unset, float]):
38
+ current_label_neighbor_confidence (Union[Unset, float]):
39
+ normalized_neighbor_label_entropy (Union[Unset, float]):
40
+ neighbor_predicted_label_matches_current_label (Union[None, Unset, bool]):
41
+ spread (Union[Unset, float]):
42
+ uniformity (Union[Unset, float]):
34
43
  """
35
44
 
36
45
  is_duplicate: Union[Unset, bool] = UNSET
@@ -40,6 +49,15 @@ class MemoryMetrics:
40
49
  cluster: Union[Unset, int] = UNSET
41
50
  embedding_2d: Union[Unset, List[float]] = UNSET
42
51
  anomaly_score: Union[Unset, float] = UNSET
52
+ neighbor_label_logits: Union[Unset, List[float]] = UNSET
53
+ neighbor_predicted_label: Union[Unset, int] = UNSET
54
+ neighbor_predicted_label_ambiguity: Union[Unset, float] = UNSET
55
+ neighbor_predicted_label_confidence: Union[Unset, float] = UNSET
56
+ current_label_neighbor_confidence: Union[Unset, float] = UNSET
57
+ normalized_neighbor_label_entropy: Union[Unset, float] = UNSET
58
+ neighbor_predicted_label_matches_current_label: Union[None, Unset, bool] = UNSET
59
+ spread: Union[Unset, float] = UNSET
60
+ uniformity: Union[Unset, float] = UNSET
43
61
  additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict)
44
62
 
45
63
  def to_dict(self) -> dict[str, Any]:
@@ -72,6 +90,30 @@ class MemoryMetrics:
72
90
 
73
91
  anomaly_score = self.anomaly_score
74
92
 
93
+ neighbor_label_logits: Union[Unset, List[float]] = UNSET
94
+ if not isinstance(self.neighbor_label_logits, Unset):
95
+ neighbor_label_logits = self.neighbor_label_logits
96
+
97
+ neighbor_predicted_label = self.neighbor_predicted_label
98
+
99
+ neighbor_predicted_label_ambiguity = self.neighbor_predicted_label_ambiguity
100
+
101
+ neighbor_predicted_label_confidence = self.neighbor_predicted_label_confidence
102
+
103
+ current_label_neighbor_confidence = self.current_label_neighbor_confidence
104
+
105
+ normalized_neighbor_label_entropy = self.normalized_neighbor_label_entropy
106
+
107
+ neighbor_predicted_label_matches_current_label: Union[None, Unset, bool]
108
+ if isinstance(self.neighbor_predicted_label_matches_current_label, Unset):
109
+ neighbor_predicted_label_matches_current_label = UNSET
110
+ else:
111
+ neighbor_predicted_label_matches_current_label = self.neighbor_predicted_label_matches_current_label
112
+
113
+ spread = self.spread
114
+
115
+ uniformity = self.uniformity
116
+
75
117
  field_dict: dict[str, Any] = {}
76
118
  field_dict.update(self.additional_properties)
77
119
  field_dict.update({})
@@ -89,6 +131,26 @@ class MemoryMetrics:
89
131
  field_dict["embedding_2d"] = embedding_2d
90
132
  if anomaly_score is not UNSET:
91
133
  field_dict["anomaly_score"] = anomaly_score
134
+ if neighbor_label_logits is not UNSET:
135
+ field_dict["neighbor_label_logits"] = neighbor_label_logits
136
+ if neighbor_predicted_label is not UNSET:
137
+ field_dict["neighbor_predicted_label"] = neighbor_predicted_label
138
+ if neighbor_predicted_label_ambiguity is not UNSET:
139
+ field_dict["neighbor_predicted_label_ambiguity"] = neighbor_predicted_label_ambiguity
140
+ if neighbor_predicted_label_confidence is not UNSET:
141
+ field_dict["neighbor_predicted_label_confidence"] = neighbor_predicted_label_confidence
142
+ if current_label_neighbor_confidence is not UNSET:
143
+ field_dict["current_label_neighbor_confidence"] = current_label_neighbor_confidence
144
+ if normalized_neighbor_label_entropy is not UNSET:
145
+ field_dict["normalized_neighbor_label_entropy"] = normalized_neighbor_label_entropy
146
+ if neighbor_predicted_label_matches_current_label is not UNSET:
147
+ field_dict["neighbor_predicted_label_matches_current_label"] = (
148
+ neighbor_predicted_label_matches_current_label
149
+ )
150
+ if spread is not UNSET:
151
+ field_dict["spread"] = spread
152
+ if uniformity is not UNSET:
153
+ field_dict["uniformity"] = uniformity
92
154
 
93
155
  return field_dict
94
156
 
@@ -135,6 +197,33 @@ class MemoryMetrics:
135
197
 
136
198
  anomaly_score = d.pop("anomaly_score", UNSET)
137
199
 
200
+ neighbor_label_logits = cast(List[float], d.pop("neighbor_label_logits", UNSET))
201
+
202
+ neighbor_predicted_label = d.pop("neighbor_predicted_label", UNSET)
203
+
204
+ neighbor_predicted_label_ambiguity = d.pop("neighbor_predicted_label_ambiguity", UNSET)
205
+
206
+ neighbor_predicted_label_confidence = d.pop("neighbor_predicted_label_confidence", UNSET)
207
+
208
+ current_label_neighbor_confidence = d.pop("current_label_neighbor_confidence", UNSET)
209
+
210
+ normalized_neighbor_label_entropy = d.pop("normalized_neighbor_label_entropy", UNSET)
211
+
212
+ def _parse_neighbor_predicted_label_matches_current_label(data: object) -> Union[None, Unset, bool]:
213
+ if data is None:
214
+ return data
215
+ if isinstance(data, Unset):
216
+ return data
217
+ return cast(Union[None, Unset, bool], data)
218
+
219
+ neighbor_predicted_label_matches_current_label = _parse_neighbor_predicted_label_matches_current_label(
220
+ d.pop("neighbor_predicted_label_matches_current_label", UNSET)
221
+ )
222
+
223
+ spread = d.pop("spread", UNSET)
224
+
225
+ uniformity = d.pop("uniformity", UNSET)
226
+
138
227
  memory_metrics = cls(
139
228
  is_duplicate=is_duplicate,
140
229
  duplicate_memory_ids=duplicate_memory_ids,
@@ -143,6 +232,15 @@ class MemoryMetrics:
143
232
  cluster=cluster,
144
233
  embedding_2d=embedding_2d,
145
234
  anomaly_score=anomaly_score,
235
+ neighbor_label_logits=neighbor_label_logits,
236
+ neighbor_predicted_label=neighbor_predicted_label,
237
+ neighbor_predicted_label_ambiguity=neighbor_predicted_label_ambiguity,
238
+ neighbor_predicted_label_confidence=neighbor_predicted_label_confidence,
239
+ current_label_neighbor_confidence=current_label_neighbor_confidence,
240
+ normalized_neighbor_label_entropy=normalized_neighbor_label_entropy,
241
+ neighbor_predicted_label_matches_current_label=neighbor_predicted_label_matches_current_label,
242
+ spread=spread,
243
+ uniformity=uniformity,
146
244
  )
147
245
 
148
246
  memory_metrics.additional_properties = d