orca-sdk 0.1.2__py3-none-any.whl → 0.1.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- orca_sdk/__init__.py +1 -1
- orca_sdk/_utils/auth.py +12 -8
- orca_sdk/async_client.py +3942 -0
- orca_sdk/classification_model.py +218 -20
- orca_sdk/classification_model_test.py +96 -28
- orca_sdk/client.py +899 -712
- orca_sdk/conftest.py +37 -36
- orca_sdk/credentials.py +54 -14
- orca_sdk/credentials_test.py +92 -28
- orca_sdk/datasource.py +64 -12
- orca_sdk/datasource_test.py +144 -18
- orca_sdk/embedding_model.py +54 -37
- orca_sdk/embedding_model_test.py +27 -20
- orca_sdk/job.py +27 -21
- orca_sdk/memoryset.py +823 -205
- orca_sdk/memoryset_test.py +315 -33
- orca_sdk/regression_model.py +59 -15
- orca_sdk/regression_model_test.py +35 -26
- orca_sdk/telemetry.py +76 -26
- {orca_sdk-0.1.2.dist-info → orca_sdk-0.1.4.dist-info}/METADATA +1 -1
- orca_sdk-0.1.4.dist-info/RECORD +41 -0
- orca_sdk-0.1.2.dist-info/RECORD +0 -40
- {orca_sdk-0.1.2.dist-info → orca_sdk-0.1.4.dist-info}/WHEEL +0 -0
orca_sdk/client.py
CHANGED
|
@@ -6,15 +6,31 @@ import json
|
|
|
6
6
|
import logging
|
|
7
7
|
import os
|
|
8
8
|
import uuid
|
|
9
|
+
from contextlib import contextmanager
|
|
10
|
+
from contextvars import ContextVar
|
|
9
11
|
from string import Formatter
|
|
10
|
-
from typing import
|
|
12
|
+
from typing import (
|
|
13
|
+
Any,
|
|
14
|
+
Callable,
|
|
15
|
+
Generator,
|
|
16
|
+
Literal,
|
|
17
|
+
Mapping,
|
|
18
|
+
NotRequired,
|
|
19
|
+
Self,
|
|
20
|
+
TypedDict,
|
|
21
|
+
cast,
|
|
22
|
+
overload,
|
|
23
|
+
)
|
|
11
24
|
|
|
12
25
|
from httpx import (
|
|
13
26
|
URL,
|
|
14
27
|
USE_CLIENT_DEFAULT,
|
|
28
|
+
BaseTransport,
|
|
15
29
|
Client,
|
|
16
30
|
Headers,
|
|
17
31
|
HTTPTransport,
|
|
32
|
+
Limits,
|
|
33
|
+
Proxy,
|
|
18
34
|
Request,
|
|
19
35
|
Response,
|
|
20
36
|
Timeout,
|
|
@@ -117,6 +133,8 @@ class ClassificationEvaluationRequest(TypedDict):
|
|
|
117
133
|
datasource_value_column: str
|
|
118
134
|
record_telemetry: NotRequired[bool]
|
|
119
135
|
telemetry_tags: NotRequired[list[str] | None]
|
|
136
|
+
subsample: NotRequired[int | float | None]
|
|
137
|
+
ignore_unlabeled: NotRequired[bool]
|
|
120
138
|
|
|
121
139
|
|
|
122
140
|
class CleanupResponse(TypedDict):
|
|
@@ -145,6 +163,7 @@ class CountPredictionsRequest(TypedDict):
|
|
|
145
163
|
prediction_ids: NotRequired[list[str] | None]
|
|
146
164
|
start_timestamp: NotRequired[str | None]
|
|
147
165
|
end_timestamp: NotRequired[str | None]
|
|
166
|
+
memory_id: NotRequired[str | None]
|
|
148
167
|
|
|
149
168
|
|
|
150
169
|
class CreateApiKeyRequest(TypedDict):
|
|
@@ -175,6 +194,12 @@ class CreateOrgPlanRequest(TypedDict):
|
|
|
175
194
|
tier: Literal["FREE", "PRO", "ENTERPRISE", "CANCELLED"]
|
|
176
195
|
|
|
177
196
|
|
|
197
|
+
class DatasetFilterItem(TypedDict):
|
|
198
|
+
field: str
|
|
199
|
+
op: Literal["==", "!=", ">", ">=", "<", "<=", "in", "not in", "like"]
|
|
200
|
+
value: Any
|
|
201
|
+
|
|
202
|
+
|
|
178
203
|
class DeleteMemoriesRequest(TypedDict):
|
|
179
204
|
memory_ids: list[str]
|
|
180
205
|
|
|
@@ -192,7 +217,7 @@ class EmbedRequest(TypedDict):
|
|
|
192
217
|
class EmbeddingEvaluationRequest(TypedDict):
|
|
193
218
|
datasource_name_or_id: str
|
|
194
219
|
eval_datasource_name_or_id: NotRequired[str | None]
|
|
195
|
-
subsample: NotRequired[int | None]
|
|
220
|
+
subsample: NotRequired[int | float | None]
|
|
196
221
|
datasource_value_column: NotRequired[str]
|
|
197
222
|
datasource_label_column: NotRequired[str | None]
|
|
198
223
|
datasource_score_column: NotRequired[str | None]
|
|
@@ -201,7 +226,7 @@ class EmbeddingEvaluationRequest(TypedDict):
|
|
|
201
226
|
weigh_memories: NotRequired[bool]
|
|
202
227
|
|
|
203
228
|
|
|
204
|
-
EmbeddingFinetuningMethod = Literal["classification", "batch_triplet_loss"]
|
|
229
|
+
EmbeddingFinetuningMethod = Literal["classification", "regression", "batch_triplet_loss"]
|
|
205
230
|
|
|
206
231
|
|
|
207
232
|
class FeedbackMetrics(TypedDict):
|
|
@@ -215,7 +240,19 @@ FeedbackType = Literal["CONTINUOUS", "BINARY"]
|
|
|
215
240
|
class FilterItem(TypedDict):
|
|
216
241
|
field: list
|
|
217
242
|
op: Literal["==", "!=", ">", ">=", "<", "<=", "in", "not in", "like"]
|
|
218
|
-
value: str | int | float | bool | list[str] | list[int] | list[float] | list[bool] | None
|
|
243
|
+
value: str | int | float | bool | list[str | None] | list[int] | list[float] | list[bool] | None
|
|
244
|
+
|
|
245
|
+
|
|
246
|
+
class GetDatasourceRowCountRequest(TypedDict):
|
|
247
|
+
filters: NotRequired[list[DatasetFilterItem]]
|
|
248
|
+
|
|
249
|
+
|
|
250
|
+
class GetDatasourceRowsRequest(TypedDict):
|
|
251
|
+
filters: NotRequired[list[DatasetFilterItem]]
|
|
252
|
+
limit: NotRequired[int]
|
|
253
|
+
offset: NotRequired[int]
|
|
254
|
+
shuffle: NotRequired[bool]
|
|
255
|
+
shuffle_seed: NotRequired[int | None]
|
|
219
256
|
|
|
220
257
|
|
|
221
258
|
class GetMemoriesRequest(TypedDict):
|
|
@@ -236,6 +273,18 @@ class InternalServerErrorResponse(TypedDict):
|
|
|
236
273
|
request_id: str
|
|
237
274
|
|
|
238
275
|
|
|
276
|
+
JobStatus = Literal["INITIALIZED", "DISPATCHED", "WAITING", "PROCESSING", "COMPLETED", "FAILED", "ABORTING", "ABORTED"]
|
|
277
|
+
|
|
278
|
+
|
|
279
|
+
class JobStatusInfo(TypedDict):
|
|
280
|
+
status: JobStatus
|
|
281
|
+
steps_total: int | None
|
|
282
|
+
steps_completed: int | None
|
|
283
|
+
exception: str | None
|
|
284
|
+
updated_at: str
|
|
285
|
+
created_at: str
|
|
286
|
+
|
|
287
|
+
|
|
239
288
|
class LabelClassMetrics(TypedDict):
|
|
240
289
|
label: int | None
|
|
241
290
|
label_name: NotRequired[str | None]
|
|
@@ -243,6 +292,11 @@ class LabelClassMetrics(TypedDict):
|
|
|
243
292
|
memory_count: int
|
|
244
293
|
|
|
245
294
|
|
|
295
|
+
class LabelPercentage(TypedDict):
|
|
296
|
+
label: int | None
|
|
297
|
+
percentage: float
|
|
298
|
+
|
|
299
|
+
|
|
246
300
|
class LabeledExample(TypedDict):
|
|
247
301
|
text: str
|
|
248
302
|
label_name: str
|
|
@@ -253,6 +307,7 @@ class LabeledMemoryInsert(TypedDict):
|
|
|
253
307
|
value: str | bytes
|
|
254
308
|
metadata: NotRequired[dict[str, str | int | float | bool | None]]
|
|
255
309
|
source_id: NotRequired[str | None]
|
|
310
|
+
partition_id: NotRequired[str | None]
|
|
256
311
|
label: int | None
|
|
257
312
|
|
|
258
313
|
|
|
@@ -317,8 +372,6 @@ class MemorysetClassPatternsMetrics(TypedDict):
|
|
|
317
372
|
class MemorysetClusterAnalysisConfig(TypedDict):
|
|
318
373
|
min_cluster_size: NotRequired[int | None]
|
|
319
374
|
max_cluster_size: NotRequired[int | None]
|
|
320
|
-
clustering_method: NotRequired[Literal["density", "graph"]]
|
|
321
|
-
min_cluster_distance: NotRequired[float]
|
|
322
375
|
partitioning_method: NotRequired[Literal["ng", "rb", "cpm"]]
|
|
323
376
|
resolution: NotRequired[float | None]
|
|
324
377
|
num_iterations: NotRequired[int]
|
|
@@ -335,16 +388,32 @@ class MemorysetClusterMetrics(TypedDict):
|
|
|
335
388
|
class MemorysetConceptAnalysisConfig(TypedDict):
|
|
336
389
|
high_level_description: NotRequired[str | None]
|
|
337
390
|
max_sample_rows: NotRequired[int]
|
|
391
|
+
min_lookup_count: NotRequired[int]
|
|
392
|
+
max_lookup_count: NotRequired[int]
|
|
393
|
+
overlap_decay_tau: NotRequired[float]
|
|
394
|
+
target_lcc_fraction: NotRequired[float]
|
|
395
|
+
min_cluster_count: NotRequired[int]
|
|
396
|
+
max_cluster_count: NotRequired[int]
|
|
338
397
|
max_trial_count: NotRequired[int]
|
|
339
|
-
|
|
340
|
-
|
|
341
|
-
|
|
342
|
-
noise_penalty: NotRequired[float]
|
|
398
|
+
resolution: NotRequired[float]
|
|
399
|
+
iterations: NotRequired[int]
|
|
400
|
+
use_generative_naming: NotRequired[bool]
|
|
343
401
|
naming_examples_count: NotRequired[int]
|
|
344
402
|
naming_counterexample_count: NotRequired[int]
|
|
403
|
+
primary_label_pct_threshold: NotRequired[float]
|
|
345
404
|
seed: NotRequired[int]
|
|
346
405
|
|
|
347
406
|
|
|
407
|
+
class MemorysetDistributionAnalysisConfig(TypedDict):
|
|
408
|
+
neighbor_counts: NotRequired[list[int]]
|
|
409
|
+
quantiles: NotRequired[list[float]]
|
|
410
|
+
|
|
411
|
+
|
|
412
|
+
class MemorysetDistributionMetrics(TypedDict):
|
|
413
|
+
lookup_score_metrics: dict[str, LookupScoreMetrics]
|
|
414
|
+
updated_at: str
|
|
415
|
+
|
|
416
|
+
|
|
348
417
|
class MemorysetDuplicateAnalysisConfig(TypedDict):
|
|
349
418
|
potential_duplicate_threshold: NotRequired[float]
|
|
350
419
|
|
|
@@ -370,16 +439,6 @@ class MemorysetLabelMetrics(TypedDict):
|
|
|
370
439
|
updated_at: str
|
|
371
440
|
|
|
372
441
|
|
|
373
|
-
class MemorysetNeighborAnalysisConfig(TypedDict):
|
|
374
|
-
neighbor_counts: NotRequired[list[int]]
|
|
375
|
-
quantiles: NotRequired[list[float]]
|
|
376
|
-
|
|
377
|
-
|
|
378
|
-
class MemorysetNeighborMetrics(TypedDict):
|
|
379
|
-
lookup_score_metrics: dict[str, LookupScoreMetrics]
|
|
380
|
-
updated_at: str
|
|
381
|
-
|
|
382
|
-
|
|
383
442
|
class MemorysetProjectionAnalysisConfig(TypedDict):
|
|
384
443
|
min_dist: NotRequired[float]
|
|
385
444
|
spread: NotRequired[float]
|
|
@@ -411,12 +470,12 @@ class NotFoundErrorResponse(TypedDict):
|
|
|
411
470
|
"memory",
|
|
412
471
|
"evaluation",
|
|
413
472
|
"analysis",
|
|
414
|
-
"
|
|
473
|
+
"job",
|
|
415
474
|
"pretrained_embedding_model",
|
|
416
475
|
"finetuned_embedding_model",
|
|
417
476
|
"feedback_category",
|
|
418
|
-
"embedding_evaluation",
|
|
419
477
|
"org_plan",
|
|
478
|
+
"worker",
|
|
420
479
|
]
|
|
421
480
|
| None
|
|
422
481
|
)
|
|
@@ -525,6 +584,8 @@ class RegressionEvaluationRequest(TypedDict):
|
|
|
525
584
|
datasource_value_column: str
|
|
526
585
|
record_telemetry: NotRequired[bool]
|
|
527
586
|
telemetry_tags: NotRequired[list[str] | None]
|
|
587
|
+
subsample: NotRequired[int | float | None]
|
|
588
|
+
ignore_unlabeled: NotRequired[bool]
|
|
528
589
|
|
|
529
590
|
|
|
530
591
|
class RegressionMetrics(TypedDict):
|
|
@@ -567,12 +628,14 @@ class RegressionPredictionRequest(TypedDict):
|
|
|
567
628
|
prompt: NotRequired[str | None]
|
|
568
629
|
use_lookup_cache: NotRequired[bool]
|
|
569
630
|
consistency_level: NotRequired[Literal["Bounded", "Session", "Strong", "Eventual"] | None]
|
|
631
|
+
ignore_unlabeled: NotRequired[bool]
|
|
570
632
|
|
|
571
633
|
|
|
572
634
|
class ScorePredictionMemoryLookup(TypedDict):
|
|
573
635
|
value: str | bytes
|
|
574
636
|
embedding: list[float]
|
|
575
637
|
source_id: str | None
|
|
638
|
+
partition_id: str | None
|
|
576
639
|
metadata: dict[str, str | int | float | bool | None]
|
|
577
640
|
memory_id: str
|
|
578
641
|
memory_version: int
|
|
@@ -610,6 +673,7 @@ class ScoredMemory(TypedDict):
|
|
|
610
673
|
value: str | bytes
|
|
611
674
|
embedding: list[float]
|
|
612
675
|
source_id: str | None
|
|
676
|
+
partition_id: str | None
|
|
613
677
|
metadata: dict[str, str | int | float | bool | None]
|
|
614
678
|
memory_id: str
|
|
615
679
|
memory_version: int
|
|
@@ -625,6 +689,7 @@ class ScoredMemoryInsert(TypedDict):
|
|
|
625
689
|
value: str | bytes
|
|
626
690
|
metadata: NotRequired[dict[str, str | int | float | bool | None]]
|
|
627
691
|
source_id: NotRequired[str | None]
|
|
692
|
+
partition_id: NotRequired[str | None]
|
|
628
693
|
score: float | None
|
|
629
694
|
|
|
630
695
|
|
|
@@ -632,6 +697,7 @@ class ScoredMemoryLookup(TypedDict):
|
|
|
632
697
|
value: str | bytes
|
|
633
698
|
embedding: list[float]
|
|
634
699
|
source_id: str | None
|
|
700
|
+
partition_id: str | None
|
|
635
701
|
metadata: dict[str, str | int | float | bool | None]
|
|
636
702
|
memory_id: str
|
|
637
703
|
memory_version: int
|
|
@@ -648,6 +714,7 @@ class ScoredMemoryUpdate(TypedDict):
|
|
|
648
714
|
value: NotRequired[str | bytes]
|
|
649
715
|
metadata: NotRequired[dict[str, str | int | float | bool | None] | None]
|
|
650
716
|
source_id: NotRequired[str | None]
|
|
717
|
+
partition_id: NotRequired[str | None]
|
|
651
718
|
metrics: NotRequired[MemoryMetrics | None]
|
|
652
719
|
score: NotRequired[float | None]
|
|
653
720
|
|
|
@@ -656,6 +723,7 @@ class ScoredMemoryWithFeedbackMetrics(TypedDict):
|
|
|
656
723
|
value: str | bytes
|
|
657
724
|
embedding: list[float]
|
|
658
725
|
source_id: str | None
|
|
726
|
+
partition_id: str | None
|
|
659
727
|
metadata: dict[str, str | int | float | bool | None]
|
|
660
728
|
memory_id: str
|
|
661
729
|
memory_version: int
|
|
@@ -681,18 +749,6 @@ class SubConceptMetrics(TypedDict):
|
|
|
681
749
|
memory_count: int
|
|
682
750
|
|
|
683
751
|
|
|
684
|
-
TaskStatus = Literal["INITIALIZED", "DISPATCHED", "WAITING", "PROCESSING", "COMPLETED", "FAILED", "ABORTING", "ABORTED"]
|
|
685
|
-
|
|
686
|
-
|
|
687
|
-
class TaskStatusInfo(TypedDict):
|
|
688
|
-
status: TaskStatus
|
|
689
|
-
steps_total: int | None
|
|
690
|
-
steps_completed: int | None
|
|
691
|
-
exception: str | None
|
|
692
|
-
updated_at: str
|
|
693
|
-
created_at: str
|
|
694
|
-
|
|
695
|
-
|
|
696
752
|
TelemetryField = list
|
|
697
753
|
|
|
698
754
|
|
|
@@ -733,6 +789,13 @@ class ValidationError(TypedDict):
|
|
|
733
789
|
type: str
|
|
734
790
|
|
|
735
791
|
|
|
792
|
+
WorkerStatus = Literal["IDLE", "BUSY", "DRAINING", "SHUTDOWN", "CRASHED"]
|
|
793
|
+
|
|
794
|
+
|
|
795
|
+
class GetTestErrorByStatusCodeParams(TypedDict):
|
|
796
|
+
status_code: int | Literal["error", "warning"]
|
|
797
|
+
|
|
798
|
+
|
|
736
799
|
class DeleteAuthApiKeyByNameOrIdParams(TypedDict):
|
|
737
800
|
name_or_id: str
|
|
738
801
|
|
|
@@ -758,6 +821,10 @@ class DeleteMemorysetByNameOrIdParams(TypedDict):
|
|
|
758
821
|
name_or_id: str
|
|
759
822
|
|
|
760
823
|
|
|
824
|
+
class PostGpuMemorysetByNameOrIdLookupParams(TypedDict):
|
|
825
|
+
name_or_id: str
|
|
826
|
+
|
|
827
|
+
|
|
761
828
|
class GetMemorysetByNameOrIdMemoryByMemoryIdParams(TypedDict):
|
|
762
829
|
name_or_id: str
|
|
763
830
|
memory_id: str
|
|
@@ -790,20 +857,35 @@ class PostMemorysetByNameOrIdMemoriesDeleteParams(TypedDict):
|
|
|
790
857
|
name_or_id: str
|
|
791
858
|
|
|
792
859
|
|
|
860
|
+
class PatchGpuMemorysetByNameOrIdMemoryParams(TypedDict):
|
|
861
|
+
name_or_id: str
|
|
862
|
+
|
|
863
|
+
|
|
864
|
+
class PostGpuMemorysetByNameOrIdMemoryParams(TypedDict):
|
|
865
|
+
name_or_id: str
|
|
866
|
+
|
|
867
|
+
|
|
868
|
+
PostGpuMemorysetByNameOrIdMemoryRequest = list[LabeledMemoryInsert] | list[ScoredMemoryInsert]
|
|
869
|
+
|
|
870
|
+
|
|
871
|
+
class PatchGpuMemorysetByNameOrIdMemoriesParams(TypedDict):
|
|
872
|
+
name_or_id: str
|
|
873
|
+
|
|
874
|
+
|
|
793
875
|
class PostMemorysetByNameOrIdAnalysisParams(TypedDict):
|
|
794
876
|
name_or_id: str
|
|
795
877
|
|
|
796
878
|
|
|
797
879
|
class GetMemorysetByNameOrIdAnalysisParams(TypedDict):
|
|
798
880
|
name_or_id: str
|
|
799
|
-
status: NotRequired[
|
|
881
|
+
status: NotRequired[JobStatus | None]
|
|
800
882
|
limit: NotRequired[int | None]
|
|
801
883
|
offset: NotRequired[int | None]
|
|
802
884
|
|
|
803
885
|
|
|
804
|
-
class
|
|
886
|
+
class GetMemorysetByNameOrIdAnalysisByAnalysisJobIdParams(TypedDict):
|
|
805
887
|
name_or_id: str
|
|
806
|
-
|
|
888
|
+
analysis_job_id: str
|
|
807
889
|
|
|
808
890
|
|
|
809
891
|
class PostMemorysetByNameOrIdMemoryByMemoryIdCascadingEditsParams(TypedDict):
|
|
@@ -819,34 +901,50 @@ class DeleteFinetunedEmbeddingModelByNameOrIdParams(TypedDict):
|
|
|
819
901
|
name_or_id: str
|
|
820
902
|
|
|
821
903
|
|
|
822
|
-
class
|
|
904
|
+
class PostGpuFinetunedEmbeddingModelByNameOrIdEmbeddingParams(TypedDict):
|
|
823
905
|
name_or_id: str
|
|
824
906
|
|
|
825
907
|
|
|
826
|
-
class
|
|
827
|
-
|
|
828
|
-
task_id: str
|
|
908
|
+
class GetPretrainedEmbeddingModelByModelNameParams(TypedDict):
|
|
909
|
+
model_name: PretrainedEmbeddingModelName
|
|
829
910
|
|
|
830
911
|
|
|
831
|
-
class
|
|
832
|
-
|
|
912
|
+
class PostGpuPretrainedEmbeddingModelByModelNameEmbeddingParams(TypedDict):
|
|
913
|
+
model_name: PretrainedEmbeddingModelName
|
|
833
914
|
|
|
834
915
|
|
|
835
|
-
class
|
|
836
|
-
|
|
916
|
+
class PostFinetunedEmbeddingModelByNameOrIdEvaluationParams(TypedDict):
|
|
917
|
+
name_or_id: str
|
|
837
918
|
|
|
838
919
|
|
|
839
920
|
class PostPretrainedEmbeddingModelByModelNameEvaluationParams(TypedDict):
|
|
840
921
|
model_name: PretrainedEmbeddingModelName
|
|
841
922
|
|
|
842
923
|
|
|
843
|
-
class
|
|
924
|
+
class GetFinetunedEmbeddingModelByNameOrIdEvaluationByJobIdParams(TypedDict):
|
|
925
|
+
name_or_id: str
|
|
926
|
+
job_id: str
|
|
927
|
+
|
|
928
|
+
|
|
929
|
+
class GetPretrainedEmbeddingModelByModelNameEvaluationByJobIdParams(TypedDict):
|
|
844
930
|
model_name: PretrainedEmbeddingModelName
|
|
845
|
-
|
|
931
|
+
job_id: str
|
|
932
|
+
|
|
933
|
+
|
|
934
|
+
class GetFinetunedEmbeddingModelByNameOrIdEvaluationsParams(TypedDict):
|
|
935
|
+
name_or_id: str
|
|
936
|
+
datasource: NotRequired[str | None]
|
|
937
|
+
value_column: NotRequired[str | None]
|
|
938
|
+
label_column: NotRequired[str | None]
|
|
939
|
+
score_column: NotRequired[str | None]
|
|
846
940
|
|
|
847
941
|
|
|
848
942
|
class GetPretrainedEmbeddingModelByModelNameEvaluationsParams(TypedDict):
|
|
849
943
|
model_name: PretrainedEmbeddingModelName
|
|
944
|
+
datasource: NotRequired[str | None]
|
|
945
|
+
value_column: NotRequired[str | None]
|
|
946
|
+
label_column: NotRequired[str | None]
|
|
947
|
+
score_column: NotRequired[str | None]
|
|
850
948
|
|
|
851
949
|
|
|
852
950
|
class PostDatasourceUploadRequest(TypedDict):
|
|
@@ -868,20 +966,19 @@ class DeleteDatasourceByNameOrIdParams(TypedDict):
|
|
|
868
966
|
name_or_id: str
|
|
869
967
|
|
|
870
968
|
|
|
871
|
-
class
|
|
969
|
+
class PostDatasourceByNameOrIdRowsParams(TypedDict):
|
|
872
970
|
name_or_id: str
|
|
873
971
|
|
|
874
972
|
|
|
875
|
-
class
|
|
973
|
+
class PostDatasourceByNameOrIdRowsCountParams(TypedDict):
|
|
876
974
|
name_or_id: str
|
|
877
|
-
status: NotRequired[TaskStatus | None]
|
|
878
|
-
limit: NotRequired[int | None]
|
|
879
|
-
offset: NotRequired[int | None]
|
|
880
975
|
|
|
881
976
|
|
|
882
|
-
class
|
|
977
|
+
class GetDatasourceByNameOrIdEmbeddingModelEvaluationsParams(TypedDict):
|
|
883
978
|
name_or_id: str
|
|
884
|
-
|
|
979
|
+
value_column: NotRequired[str | None]
|
|
980
|
+
label_column: NotRequired[str | None]
|
|
981
|
+
score_column: NotRequired[str | None]
|
|
885
982
|
|
|
886
983
|
|
|
887
984
|
class GetDatasourceByNameOrIdDownloadParams(TypedDict):
|
|
@@ -907,36 +1004,42 @@ class DeleteClassificationModelByNameOrIdParams(TypedDict):
|
|
|
907
1004
|
name_or_id: str
|
|
908
1005
|
|
|
909
1006
|
|
|
910
|
-
class
|
|
911
|
-
|
|
1007
|
+
class PatchRegressionModelByNameOrIdParams(TypedDict):
|
|
1008
|
+
name_or_id: str
|
|
912
1009
|
|
|
913
1010
|
|
|
914
|
-
class
|
|
915
|
-
|
|
1011
|
+
class GetRegressionModelByNameOrIdParams(TypedDict):
|
|
1012
|
+
name_or_id: str
|
|
916
1013
|
|
|
917
1014
|
|
|
918
|
-
class
|
|
919
|
-
|
|
920
|
-
task_id: str
|
|
1015
|
+
class DeleteRegressionModelByNameOrIdParams(TypedDict):
|
|
1016
|
+
name_or_id: str
|
|
921
1017
|
|
|
922
1018
|
|
|
923
|
-
class
|
|
924
|
-
|
|
925
|
-
task_id: str
|
|
1019
|
+
class PostGpuClassificationModelByNameOrIdPredictionParams(TypedDict):
|
|
1020
|
+
name_or_id: str
|
|
926
1021
|
|
|
927
1022
|
|
|
928
|
-
class
|
|
1023
|
+
class PostClassificationModelByNameOrIdPredictionParams(TypedDict):
|
|
929
1024
|
name_or_id: str
|
|
930
1025
|
|
|
931
1026
|
|
|
932
|
-
class
|
|
1027
|
+
class PostGpuRegressionModelByNameOrIdPredictionParams(TypedDict):
|
|
933
1028
|
name_or_id: str
|
|
934
1029
|
|
|
935
1030
|
|
|
936
|
-
class
|
|
1031
|
+
class PostRegressionModelByNameOrIdPredictionParams(TypedDict):
|
|
937
1032
|
name_or_id: str
|
|
938
1033
|
|
|
939
1034
|
|
|
1035
|
+
class PostClassificationModelByModelNameOrIdEvaluationParams(TypedDict):
|
|
1036
|
+
model_name_or_id: str
|
|
1037
|
+
|
|
1038
|
+
|
|
1039
|
+
class GetClassificationModelByModelNameOrIdEvaluationParams(TypedDict):
|
|
1040
|
+
model_name_or_id: str
|
|
1041
|
+
|
|
1042
|
+
|
|
940
1043
|
class PostRegressionModelByModelNameOrIdEvaluationParams(TypedDict):
|
|
941
1044
|
model_name_or_id: str
|
|
942
1045
|
|
|
@@ -945,26 +1048,36 @@ class GetRegressionModelByModelNameOrIdEvaluationParams(TypedDict):
|
|
|
945
1048
|
model_name_or_id: str
|
|
946
1049
|
|
|
947
1050
|
|
|
948
|
-
class
|
|
1051
|
+
class GetClassificationModelByModelNameOrIdEvaluationByJobIdParams(TypedDict):
|
|
949
1052
|
model_name_or_id: str
|
|
950
|
-
|
|
1053
|
+
job_id: str
|
|
951
1054
|
|
|
952
1055
|
|
|
953
|
-
class
|
|
1056
|
+
class DeleteClassificationModelByModelNameOrIdEvaluationByJobIdParams(TypedDict):
|
|
954
1057
|
model_name_or_id: str
|
|
955
|
-
|
|
1058
|
+
job_id: str
|
|
956
1059
|
|
|
957
1060
|
|
|
958
|
-
class
|
|
959
|
-
|
|
1061
|
+
class GetRegressionModelByModelNameOrIdEvaluationByJobIdParams(TypedDict):
|
|
1062
|
+
model_name_or_id: str
|
|
1063
|
+
job_id: str
|
|
960
1064
|
|
|
961
1065
|
|
|
962
|
-
class
|
|
963
|
-
|
|
1066
|
+
class DeleteRegressionModelByModelNameOrIdEvaluationByJobIdParams(TypedDict):
|
|
1067
|
+
model_name_or_id: str
|
|
1068
|
+
job_id: str
|
|
1069
|
+
|
|
1070
|
+
|
|
1071
|
+
class GetJobByJobIdParams(TypedDict):
|
|
1072
|
+
job_id: str
|
|
964
1073
|
|
|
965
1074
|
|
|
966
|
-
class
|
|
967
|
-
|
|
1075
|
+
class GetJobByJobIdStatusParams(TypedDict):
|
|
1076
|
+
job_id: str
|
|
1077
|
+
|
|
1078
|
+
|
|
1079
|
+
class GetJobParams(TypedDict):
|
|
1080
|
+
status: NotRequired[JobStatus | list[JobStatus] | None]
|
|
968
1081
|
type: NotRequired[str | list[str] | None]
|
|
969
1082
|
limit: NotRequired[int | None]
|
|
970
1083
|
offset: NotRequired[int]
|
|
@@ -972,8 +1085,18 @@ class GetTaskParams(TypedDict):
|
|
|
972
1085
|
end_timestamp: NotRequired[str | None]
|
|
973
1086
|
|
|
974
1087
|
|
|
975
|
-
class
|
|
976
|
-
|
|
1088
|
+
class DeleteJobByJobIdAbortParams(TypedDict):
|
|
1089
|
+
job_id: str
|
|
1090
|
+
|
|
1091
|
+
|
|
1092
|
+
class GetWorkerParams(TypedDict):
|
|
1093
|
+
status: NotRequired[WorkerStatus | list[WorkerStatus] | None]
|
|
1094
|
+
limit: NotRequired[int | None]
|
|
1095
|
+
offset: NotRequired[int]
|
|
1096
|
+
|
|
1097
|
+
|
|
1098
|
+
class GetWorkerByWorkerIdParams(TypedDict):
|
|
1099
|
+
worker_id: str
|
|
977
1100
|
|
|
978
1101
|
|
|
979
1102
|
class GetTelemetryPredictionByPredictionIdParams(TypedDict):
|
|
@@ -1019,43 +1142,8 @@ class DeleteTelemetryFeedbackCategoryByNameOrIdParams(TypedDict):
|
|
|
1019
1142
|
PutTelemetryPredictionFeedbackRequest = list[PredictionFeedbackRequest]
|
|
1020
1143
|
|
|
1021
1144
|
|
|
1022
|
-
class
|
|
1023
|
-
|
|
1024
|
-
|
|
1025
|
-
|
|
1026
|
-
class PostGpuMemorysetByNameOrIdLookupParams(TypedDict):
|
|
1027
|
-
name_or_id: str
|
|
1028
|
-
|
|
1029
|
-
|
|
1030
|
-
class PatchGpuMemorysetByNameOrIdMemoryParams(TypedDict):
|
|
1031
|
-
name_or_id: str
|
|
1032
|
-
|
|
1033
|
-
|
|
1034
|
-
class PostGpuMemorysetByNameOrIdMemoryParams(TypedDict):
|
|
1035
|
-
name_or_id: str
|
|
1036
|
-
|
|
1037
|
-
|
|
1038
|
-
PostGpuMemorysetByNameOrIdMemoryRequest = list[LabeledMemoryInsert] | list[ScoredMemoryInsert]
|
|
1039
|
-
|
|
1040
|
-
|
|
1041
|
-
class PatchGpuMemorysetByNameOrIdMemoriesParams(TypedDict):
|
|
1042
|
-
name_or_id: str
|
|
1043
|
-
|
|
1044
|
-
|
|
1045
|
-
class PostGpuClassificationModelByNameOrIdPredictionParams(TypedDict):
|
|
1046
|
-
name_or_id: str
|
|
1047
|
-
|
|
1048
|
-
|
|
1049
|
-
class PostGpuRegressionModelByNameOrIdPredictionParams(TypedDict):
|
|
1050
|
-
name_or_id: str
|
|
1051
|
-
|
|
1052
|
-
|
|
1053
|
-
class PostGpuFinetunedEmbeddingModelByNameOrIdEmbeddingParams(TypedDict):
|
|
1054
|
-
name_or_id: str
|
|
1055
|
-
|
|
1056
|
-
|
|
1057
|
-
class PostGpuPretrainedEmbeddingModelByModelNameEmbeddingParams(TypedDict):
|
|
1058
|
-
model_name: PretrainedEmbeddingModelName
|
|
1145
|
+
class GetAgentsBootstrapClassificationModelByJobIdParams(TypedDict):
|
|
1146
|
+
job_id: str
|
|
1059
1147
|
|
|
1060
1148
|
|
|
1061
1149
|
class FieldValidationError(TypedDict):
|
|
@@ -1127,6 +1215,7 @@ class ClassificationPredictionRequest(TypedDict):
|
|
|
1127
1215
|
prompt: NotRequired[str | None]
|
|
1128
1216
|
use_lookup_cache: NotRequired[bool]
|
|
1129
1217
|
consistency_level: NotRequired[Literal["Bounded", "Session", "Strong", "Eventual"] | None]
|
|
1218
|
+
ignore_unlabeled: NotRequired[bool]
|
|
1130
1219
|
|
|
1131
1220
|
|
|
1132
1221
|
class CloneMemorysetRequest(TypedDict):
|
|
@@ -1143,6 +1232,7 @@ class ColumnInfo(TypedDict):
|
|
|
1143
1232
|
name: str
|
|
1144
1233
|
type: ColumnType
|
|
1145
1234
|
enum_options: NotRequired[list[str] | None]
|
|
1235
|
+
string_values: NotRequired[list[str] | None]
|
|
1146
1236
|
int_values: NotRequired[list[int] | None]
|
|
1147
1237
|
contains_nones: NotRequired[bool]
|
|
1148
1238
|
|
|
@@ -1154,6 +1244,7 @@ class ConceptMetrics(TypedDict):
|
|
|
1154
1244
|
primary_label: int | None
|
|
1155
1245
|
memory_count: int
|
|
1156
1246
|
subconcepts: list[SubConceptMetrics]
|
|
1247
|
+
label_percentages: list[LabelPercentage]
|
|
1157
1248
|
|
|
1158
1249
|
|
|
1159
1250
|
class CreateClassificationModelRequest(TypedDict):
|
|
@@ -1187,6 +1278,9 @@ class CreateMemorysetRequest(TypedDict):
|
|
|
1187
1278
|
index_params: NotRequired[dict[str, int | float | str]]
|
|
1188
1279
|
prompt: NotRequired[str]
|
|
1189
1280
|
hidden: NotRequired[bool]
|
|
1281
|
+
batch_size: NotRequired[int]
|
|
1282
|
+
subsample: NotRequired[int | float | None]
|
|
1283
|
+
memory_type: NotRequired[MemoryType]
|
|
1190
1284
|
|
|
1191
1285
|
|
|
1192
1286
|
class CreateRegressionModelRequest(TypedDict):
|
|
@@ -1198,25 +1292,6 @@ class CreateRegressionModelRequest(TypedDict):
|
|
|
1198
1292
|
head_type: NotRequired[RARHeadType]
|
|
1199
1293
|
|
|
1200
1294
|
|
|
1201
|
-
class DatasourceEmbeddingEvaluationsRequest(TypedDict):
|
|
1202
|
-
value_column: str
|
|
1203
|
-
label_column: str
|
|
1204
|
-
source_id_column: str | None
|
|
1205
|
-
neighbor_count: NotRequired[int]
|
|
1206
|
-
label_names: NotRequired[list[str] | None]
|
|
1207
|
-
embedding_models: NotRequired[list[PretrainedEmbeddingModelName | str] | None]
|
|
1208
|
-
|
|
1209
|
-
|
|
1210
|
-
class DatasourceEmbeddingEvaluationsTaskPayload(TypedDict):
|
|
1211
|
-
value_column: str
|
|
1212
|
-
label_column: str
|
|
1213
|
-
source_id_column: str | None
|
|
1214
|
-
neighbor_count: NotRequired[int]
|
|
1215
|
-
label_names: NotRequired[list[str] | None]
|
|
1216
|
-
embedding_models: NotRequired[list[PretrainedEmbeddingModelName | str] | None]
|
|
1217
|
-
datasource_id: str
|
|
1218
|
-
|
|
1219
|
-
|
|
1220
1295
|
class DatasourceMetadata(TypedDict):
|
|
1221
1296
|
id: str
|
|
1222
1297
|
org_id: str
|
|
@@ -1230,56 +1305,52 @@ class DatasourceMetadata(TypedDict):
|
|
|
1230
1305
|
|
|
1231
1306
|
|
|
1232
1307
|
class EmbeddingEvaluationResponse(TypedDict):
|
|
1233
|
-
|
|
1308
|
+
job_id: str
|
|
1234
1309
|
org_id: str
|
|
1235
|
-
|
|
1310
|
+
finetuned_embedding_model_id: str | None
|
|
1311
|
+
pretrained_embedding_model_name: PretrainedEmbeddingModelName | None
|
|
1312
|
+
datasource_id: str
|
|
1313
|
+
subsample: int | float | None
|
|
1314
|
+
datasource_value_column: str
|
|
1315
|
+
datasource_label_column: NotRequired[str | None]
|
|
1316
|
+
datasource_score_column: NotRequired[str | None]
|
|
1317
|
+
neighbor_count: int
|
|
1318
|
+
weigh_memories: bool
|
|
1319
|
+
status: JobStatus
|
|
1236
1320
|
result: ClassificationMetrics | RegressionMetrics | None
|
|
1237
1321
|
created_at: str
|
|
1238
1322
|
updated_at: str
|
|
1239
|
-
|
|
1240
|
-
|
|
1241
|
-
class EmbeddingEvaluationResponseUnionClassificationMetricsRegressionMetrics(TypedDict):
|
|
1242
1323
|
task_id: str
|
|
1243
|
-
org_id: str
|
|
1244
|
-
status: TaskStatus
|
|
1245
|
-
result: ClassificationMetrics | RegressionMetrics | None
|
|
1246
|
-
created_at: str
|
|
1247
|
-
updated_at: str
|
|
1248
|
-
|
|
1249
|
-
|
|
1250
|
-
class EmbeddingModelResult(TypedDict):
|
|
1251
|
-
embedding_model_name: str
|
|
1252
|
-
embedding_model_path: str
|
|
1253
|
-
analysis_result: MemorysetLabelMetrics
|
|
1254
|
-
memoryset_name: NotRequired[str | None]
|
|
1255
|
-
is_finetuned: NotRequired[bool]
|
|
1256
1324
|
|
|
1257
1325
|
|
|
1258
1326
|
class EvaluationResponse(TypedDict):
|
|
1259
|
-
|
|
1327
|
+
job_id: str
|
|
1260
1328
|
org_id: str
|
|
1261
|
-
status:
|
|
1329
|
+
status: JobStatus
|
|
1262
1330
|
result: ClassificationMetrics | RegressionMetrics | None
|
|
1263
1331
|
created_at: str
|
|
1264
1332
|
updated_at: str
|
|
1333
|
+
task_id: str
|
|
1265
1334
|
|
|
1266
1335
|
|
|
1267
1336
|
class EvaluationResponseClassificationMetrics(TypedDict):
|
|
1268
|
-
|
|
1337
|
+
job_id: str
|
|
1269
1338
|
org_id: str
|
|
1270
|
-
status:
|
|
1339
|
+
status: JobStatus
|
|
1271
1340
|
result: ClassificationMetrics | None
|
|
1272
1341
|
created_at: str
|
|
1273
1342
|
updated_at: str
|
|
1343
|
+
task_id: str
|
|
1274
1344
|
|
|
1275
1345
|
|
|
1276
1346
|
class EvaluationResponseRegressionMetrics(TypedDict):
|
|
1277
|
-
|
|
1347
|
+
job_id: str
|
|
1278
1348
|
org_id: str
|
|
1279
|
-
status:
|
|
1349
|
+
status: JobStatus
|
|
1280
1350
|
result: RegressionMetrics | None
|
|
1281
1351
|
created_at: str
|
|
1282
1352
|
updated_at: str
|
|
1353
|
+
task_id: str
|
|
1283
1354
|
|
|
1284
1355
|
|
|
1285
1356
|
class FinetuneEmbeddingModelRequest(TypedDict):
|
|
@@ -1288,7 +1359,8 @@ class FinetuneEmbeddingModelRequest(TypedDict):
|
|
|
1288
1359
|
train_memoryset_name_or_id: NotRequired[str | None]
|
|
1289
1360
|
train_datasource_name_or_id: NotRequired[str | None]
|
|
1290
1361
|
eval_datasource_name_or_id: NotRequired[str | None]
|
|
1291
|
-
label_column: NotRequired[str]
|
|
1362
|
+
label_column: NotRequired[str | None]
|
|
1363
|
+
score_column: NotRequired[str | None]
|
|
1292
1364
|
value_column: NotRequired[str]
|
|
1293
1365
|
training_method: NotRequired[EmbeddingFinetuningMethod]
|
|
1294
1366
|
training_args: NotRequired[dict[str, str | int | float | bool]]
|
|
@@ -1305,8 +1377,9 @@ class FinetunedEmbeddingModelMetadata(TypedDict):
|
|
|
1305
1377
|
created_at: str
|
|
1306
1378
|
updated_at: str
|
|
1307
1379
|
base_model: PretrainedEmbeddingModelName
|
|
1380
|
+
finetuning_job_id: str
|
|
1381
|
+
finetuning_status: JobStatus
|
|
1308
1382
|
finetuning_task_id: str
|
|
1309
|
-
finetuning_status: TaskStatus
|
|
1310
1383
|
|
|
1311
1384
|
|
|
1312
1385
|
class HTTPValidationError(TypedDict):
|
|
@@ -1318,10 +1391,28 @@ class InvalidInputErrorResponse(TypedDict):
|
|
|
1318
1391
|
validation_issues: list[FieldValidationError]
|
|
1319
1392
|
|
|
1320
1393
|
|
|
1394
|
+
class Job(TypedDict):
|
|
1395
|
+
status: JobStatus
|
|
1396
|
+
steps_total: int | None
|
|
1397
|
+
steps_completed: int | None
|
|
1398
|
+
exception: str | None
|
|
1399
|
+
updated_at: str
|
|
1400
|
+
created_at: str
|
|
1401
|
+
id: str
|
|
1402
|
+
org_id: str
|
|
1403
|
+
worker_id: str | None
|
|
1404
|
+
type: str
|
|
1405
|
+
payload: BaseModel
|
|
1406
|
+
result: BaseModel | None
|
|
1407
|
+
depends_on: NotRequired[list[str]]
|
|
1408
|
+
lease_token: str | None
|
|
1409
|
+
|
|
1410
|
+
|
|
1321
1411
|
class LabelPredictionMemoryLookup(TypedDict):
|
|
1322
1412
|
value: str | bytes
|
|
1323
1413
|
embedding: list[float]
|
|
1324
1414
|
source_id: str | None
|
|
1415
|
+
partition_id: str | None
|
|
1325
1416
|
metadata: dict[str, str | int | float | bool | None]
|
|
1326
1417
|
memory_id: str
|
|
1327
1418
|
memory_version: int
|
|
@@ -1363,6 +1454,7 @@ class LabeledMemory(TypedDict):
|
|
|
1363
1454
|
value: str | bytes
|
|
1364
1455
|
embedding: list[float]
|
|
1365
1456
|
source_id: str | None
|
|
1457
|
+
partition_id: str | None
|
|
1366
1458
|
metadata: dict[str, str | int | float | bool | None]
|
|
1367
1459
|
memory_id: str
|
|
1368
1460
|
memory_version: int
|
|
@@ -1378,6 +1470,7 @@ class LabeledMemoryLookup(TypedDict):
|
|
|
1378
1470
|
value: str | bytes
|
|
1379
1471
|
embedding: list[float]
|
|
1380
1472
|
source_id: str | None
|
|
1473
|
+
partition_id: str | None
|
|
1381
1474
|
metadata: dict[str, str | int | float | bool | None]
|
|
1382
1475
|
memory_id: str
|
|
1383
1476
|
memory_version: int
|
|
@@ -1395,6 +1488,7 @@ class LabeledMemoryUpdate(TypedDict):
|
|
|
1395
1488
|
value: NotRequired[str | bytes]
|
|
1396
1489
|
metadata: NotRequired[dict[str, str | int | float | bool | None] | None]
|
|
1397
1490
|
source_id: NotRequired[str | None]
|
|
1491
|
+
partition_id: NotRequired[str | None]
|
|
1398
1492
|
metrics: NotRequired[MemoryMetrics | None]
|
|
1399
1493
|
label: NotRequired[int | None]
|
|
1400
1494
|
|
|
@@ -1403,6 +1497,7 @@ class LabeledMemoryWithFeedbackMetrics(TypedDict):
|
|
|
1403
1497
|
value: str | bytes
|
|
1404
1498
|
embedding: list[float]
|
|
1405
1499
|
source_id: str | None
|
|
1500
|
+
partition_id: str | None
|
|
1406
1501
|
metadata: dict[str, str | int | float | bool | None]
|
|
1407
1502
|
memory_id: str
|
|
1408
1503
|
memory_version: int
|
|
@@ -1422,14 +1517,15 @@ class ListPredictionsRequest(TypedDict):
|
|
|
1422
1517
|
prediction_ids: NotRequired[list[str] | None]
|
|
1423
1518
|
start_timestamp: NotRequired[str | None]
|
|
1424
1519
|
end_timestamp: NotRequired[str | None]
|
|
1425
|
-
|
|
1520
|
+
memory_id: NotRequired[str | None]
|
|
1521
|
+
limit: NotRequired[int]
|
|
1426
1522
|
offset: NotRequired[int | None]
|
|
1427
1523
|
sort: NotRequired[PredictionSort]
|
|
1428
1524
|
expected_label_match: NotRequired[bool | None]
|
|
1429
1525
|
|
|
1430
1526
|
|
|
1431
1527
|
class MemorysetAnalysisConfigs(TypedDict):
|
|
1432
|
-
|
|
1528
|
+
distribution: NotRequired[MemorysetDistributionAnalysisConfig | None]
|
|
1433
1529
|
label: NotRequired[MemorysetLabelAnalysisConfig | None]
|
|
1434
1530
|
duplicate: NotRequired[MemorysetDuplicateAnalysisConfig | None]
|
|
1435
1531
|
projection: NotRequired[MemorysetProjectionAnalysisConfig | None]
|
|
@@ -1452,7 +1548,7 @@ class MemorysetConceptMetrics(TypedDict):
|
|
|
1452
1548
|
|
|
1453
1549
|
|
|
1454
1550
|
class MemorysetMetrics(TypedDict):
|
|
1455
|
-
|
|
1551
|
+
distribution: NotRequired[MemorysetDistributionMetrics | None]
|
|
1456
1552
|
label: NotRequired[MemorysetLabelMetrics | None]
|
|
1457
1553
|
duplicate: NotRequired[MemorysetDuplicateMetrics | None]
|
|
1458
1554
|
projection: NotRequired[MemorysetProjectionMetrics | None]
|
|
@@ -1461,6 +1557,13 @@ class MemorysetMetrics(TypedDict):
|
|
|
1461
1557
|
concepts: NotRequired[MemorysetConceptMetrics | None]
|
|
1462
1558
|
|
|
1463
1559
|
|
|
1560
|
+
class PaginatedJob(TypedDict):
|
|
1561
|
+
items: list[Job]
|
|
1562
|
+
total: int
|
|
1563
|
+
offset: int
|
|
1564
|
+
limit: int
|
|
1565
|
+
|
|
1566
|
+
|
|
1464
1567
|
class PaginatedUnionLabeledMemoryWithFeedbackMetricsScoredMemoryWithFeedbackMetrics(TypedDict):
|
|
1465
1568
|
items: list[LabeledMemoryWithFeedbackMetrics | ScoredMemoryWithFeedbackMetrics]
|
|
1466
1569
|
total: int
|
|
@@ -1478,21 +1581,6 @@ class PretrainedEmbeddingModelMetadata(TypedDict):
|
|
|
1478
1581
|
num_params: int
|
|
1479
1582
|
|
|
1480
1583
|
|
|
1481
|
-
class Task(TypedDict):
|
|
1482
|
-
status: TaskStatus
|
|
1483
|
-
steps_total: int | None
|
|
1484
|
-
steps_completed: int | None
|
|
1485
|
-
exception: str | None
|
|
1486
|
-
updated_at: str
|
|
1487
|
-
created_at: str
|
|
1488
|
-
id: str
|
|
1489
|
-
org_id: str
|
|
1490
|
-
type: str
|
|
1491
|
-
payload: BaseModel
|
|
1492
|
-
result: BaseModel | None
|
|
1493
|
-
depends_on: NotRequired[list[str]]
|
|
1494
|
-
|
|
1495
|
-
|
|
1496
1584
|
class TelemetryMemoriesRequest(TypedDict):
|
|
1497
1585
|
memoryset_id: str
|
|
1498
1586
|
offset: NotRequired[int]
|
|
@@ -1501,6 +1589,16 @@ class TelemetryMemoriesRequest(TypedDict):
|
|
|
1501
1589
|
sort: NotRequired[list[TelemetrySortOptions] | None]
|
|
1502
1590
|
|
|
1503
1591
|
|
|
1592
|
+
class WorkerInfo(TypedDict):
|
|
1593
|
+
id: str
|
|
1594
|
+
status: WorkerStatus
|
|
1595
|
+
started_at: str
|
|
1596
|
+
updated_at: str
|
|
1597
|
+
version: str | None
|
|
1598
|
+
git_sha: str
|
|
1599
|
+
config: dict[str, str | float | int | bool | dict[str, str] | None]
|
|
1600
|
+
|
|
1601
|
+
|
|
1504
1602
|
PatchGpuMemorysetByNameOrIdMemoryRequest = LabeledMemoryUpdate | ScoredMemoryUpdate
|
|
1505
1603
|
|
|
1506
1604
|
|
|
@@ -1513,15 +1611,11 @@ class CascadingEditSuggestion(TypedDict):
|
|
|
1513
1611
|
lookup_score: float
|
|
1514
1612
|
|
|
1515
1613
|
|
|
1516
|
-
class EmbeddingEvaluationResult(TypedDict):
|
|
1517
|
-
evaluation_results: list[EmbeddingModelResult]
|
|
1518
|
-
|
|
1519
|
-
|
|
1520
1614
|
class MemorysetAnalysisResponse(TypedDict):
|
|
1521
|
-
|
|
1615
|
+
job_id: str
|
|
1522
1616
|
org_id: str
|
|
1523
1617
|
memoryset_id: str
|
|
1524
|
-
status:
|
|
1618
|
+
status: JobStatus
|
|
1525
1619
|
lookup_count: int
|
|
1526
1620
|
batch_size: int
|
|
1527
1621
|
clear_metrics: bool
|
|
@@ -1529,6 +1623,7 @@ class MemorysetAnalysisResponse(TypedDict):
|
|
|
1529
1623
|
results: MemorysetMetrics | None
|
|
1530
1624
|
created_at: str
|
|
1531
1625
|
updated_at: str
|
|
1626
|
+
task_id: str
|
|
1532
1627
|
|
|
1533
1628
|
|
|
1534
1629
|
class MemorysetMetadata(TypedDict):
|
|
@@ -1544,8 +1639,8 @@ class MemorysetMetadata(TypedDict):
|
|
|
1544
1639
|
created_at: str
|
|
1545
1640
|
updated_at: str
|
|
1546
1641
|
memories_updated_at: str
|
|
1547
|
-
|
|
1548
|
-
insertion_status:
|
|
1642
|
+
insertion_job_id: str
|
|
1643
|
+
insertion_status: JobStatus
|
|
1549
1644
|
metrics: MemorysetMetrics
|
|
1550
1645
|
memory_type: MemoryType
|
|
1551
1646
|
label_names: list[str] | None
|
|
@@ -1555,10 +1650,11 @@ class MemorysetMetadata(TypedDict):
|
|
|
1555
1650
|
document_prompt_override: str | None
|
|
1556
1651
|
query_prompt_override: str | None
|
|
1557
1652
|
hidden: bool
|
|
1653
|
+
insertion_task_id: str
|
|
1558
1654
|
|
|
1559
1655
|
|
|
1560
|
-
class
|
|
1561
|
-
items: list[
|
|
1656
|
+
class PaginatedWorkerInfo(TypedDict):
|
|
1657
|
+
items: list[WorkerInfo]
|
|
1562
1658
|
total: int
|
|
1563
1659
|
offset: int
|
|
1564
1660
|
limit: int
|
|
@@ -1572,22 +1668,12 @@ class BootstrapClassificationModelMeta(TypedDict):
|
|
|
1572
1668
|
|
|
1573
1669
|
|
|
1574
1670
|
class BootstrapClassificationModelResponse(TypedDict):
|
|
1575
|
-
|
|
1671
|
+
job_id: str
|
|
1576
1672
|
org_id: str
|
|
1577
|
-
status:
|
|
1673
|
+
status: JobStatus
|
|
1578
1674
|
result: BootstrapClassificationModelMeta | None
|
|
1579
1675
|
input: BootstrapClassificationModelRequest | None
|
|
1580
|
-
|
|
1581
|
-
|
|
1582
|
-
class DatasourceEmbeddingEvaluationsResponse(TypedDict):
|
|
1583
|
-
task_id: str
|
|
1584
|
-
org_id: str
|
|
1585
|
-
datasource_id: str
|
|
1586
|
-
status: TaskStatus
|
|
1587
|
-
result: EmbeddingEvaluationResult | None
|
|
1588
|
-
payload: DatasourceEmbeddingEvaluationsTaskPayload
|
|
1589
|
-
created_at: str
|
|
1590
|
-
updated_at: str
|
|
1676
|
+
task_id: str
|
|
1591
1677
|
|
|
1592
1678
|
|
|
1593
1679
|
class OrcaClient(Client):
|
|
@@ -1604,11 +1690,11 @@ class OrcaClient(Client):
|
|
|
1604
1690
|
return path_params, query_params
|
|
1605
1691
|
|
|
1606
1692
|
@overload
|
|
1607
|
-
def
|
|
1693
|
+
def GET(
|
|
1608
1694
|
self,
|
|
1609
|
-
path: Literal["/
|
|
1695
|
+
path: Literal["/test_error/{status_code}"],
|
|
1610
1696
|
*,
|
|
1611
|
-
params:
|
|
1697
|
+
params: GetTestErrorByStatusCodeParams,
|
|
1612
1698
|
parse_as: Literal["json"] = "json",
|
|
1613
1699
|
headers: HeaderTypes | None = None,
|
|
1614
1700
|
cookies: CookieTypes | None = None,
|
|
@@ -1616,16 +1702,15 @@ class OrcaClient(Client):
|
|
|
1616
1702
|
follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
1617
1703
|
timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
1618
1704
|
extensions: RequestExtensions | None = None,
|
|
1619
|
-
) ->
|
|
1620
|
-
"""Cleanup orphaned milvus collections and blobs"""
|
|
1705
|
+
) -> Any:
|
|
1621
1706
|
pass
|
|
1622
1707
|
|
|
1623
1708
|
@overload
|
|
1624
|
-
def
|
|
1709
|
+
def GET(
|
|
1625
1710
|
self,
|
|
1626
|
-
path: Literal["/
|
|
1711
|
+
path: Literal["/check/alive"],
|
|
1627
1712
|
*,
|
|
1628
|
-
params:
|
|
1713
|
+
params: None = None,
|
|
1629
1714
|
parse_as: Literal["json"] = "json",
|
|
1630
1715
|
headers: HeaderTypes | None = None,
|
|
1631
1716
|
cookies: CookieTypes | None = None,
|
|
@@ -1633,14 +1718,13 @@ class OrcaClient(Client):
|
|
|
1633
1718
|
follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
1634
1719
|
timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
1635
1720
|
extensions: RequestExtensions | None = None,
|
|
1636
|
-
) ->
|
|
1637
|
-
"""Delete an API key by name or ID."""
|
|
1721
|
+
) -> AliveResponse:
|
|
1638
1722
|
pass
|
|
1639
1723
|
|
|
1640
1724
|
@overload
|
|
1641
|
-
def
|
|
1725
|
+
def GET(
|
|
1642
1726
|
self,
|
|
1643
|
-
path: Literal["/
|
|
1727
|
+
path: Literal["/check/ready"],
|
|
1644
1728
|
*,
|
|
1645
1729
|
params: None = None,
|
|
1646
1730
|
parse_as: Literal["json"] = "json",
|
|
@@ -1650,16 +1734,15 @@ class OrcaClient(Client):
|
|
|
1650
1734
|
follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
1651
1735
|
timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
1652
1736
|
extensions: RequestExtensions | None = None,
|
|
1653
|
-
) ->
|
|
1654
|
-
"""Deletes the org and all associated resources"""
|
|
1737
|
+
) -> ReadyResponse:
|
|
1655
1738
|
pass
|
|
1656
1739
|
|
|
1657
1740
|
@overload
|
|
1658
|
-
def
|
|
1741
|
+
def GET(
|
|
1659
1742
|
self,
|
|
1660
|
-
path: Literal["/
|
|
1743
|
+
path: Literal["/gpu/check/healthy"],
|
|
1661
1744
|
*,
|
|
1662
|
-
params:
|
|
1745
|
+
params: None = None,
|
|
1663
1746
|
parse_as: Literal["json"] = "json",
|
|
1664
1747
|
headers: HeaderTypes | None = None,
|
|
1665
1748
|
cookies: CookieTypes | None = None,
|
|
@@ -1667,15 +1750,15 @@ class OrcaClient(Client):
|
|
|
1667
1750
|
follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
1668
1751
|
timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
1669
1752
|
extensions: RequestExtensions | None = None,
|
|
1670
|
-
) ->
|
|
1753
|
+
) -> HealthyResponse:
|
|
1671
1754
|
pass
|
|
1672
1755
|
|
|
1673
1756
|
@overload
|
|
1674
|
-
def
|
|
1757
|
+
def GET(
|
|
1675
1758
|
self,
|
|
1676
|
-
path: Literal["/
|
|
1759
|
+
path: Literal["/check/healthy"],
|
|
1677
1760
|
*,
|
|
1678
|
-
params:
|
|
1761
|
+
params: None = None,
|
|
1679
1762
|
parse_as: Literal["json"] = "json",
|
|
1680
1763
|
headers: HeaderTypes | None = None,
|
|
1681
1764
|
cookies: CookieTypes | None = None,
|
|
@@ -1683,15 +1766,15 @@ class OrcaClient(Client):
|
|
|
1683
1766
|
follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
1684
1767
|
timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
1685
1768
|
extensions: RequestExtensions | None = None,
|
|
1686
|
-
) ->
|
|
1769
|
+
) -> HealthyResponse:
|
|
1687
1770
|
pass
|
|
1688
1771
|
|
|
1689
1772
|
@overload
|
|
1690
|
-
def
|
|
1773
|
+
def GET(
|
|
1691
1774
|
self,
|
|
1692
|
-
path: Literal["/
|
|
1775
|
+
path: Literal["/gpu/config"],
|
|
1693
1776
|
*,
|
|
1694
|
-
params:
|
|
1777
|
+
params: None = None,
|
|
1695
1778
|
parse_as: Literal["json"] = "json",
|
|
1696
1779
|
headers: HeaderTypes | None = None,
|
|
1697
1780
|
cookies: CookieTypes | None = None,
|
|
@@ -1699,16 +1782,15 @@ class OrcaClient(Client):
|
|
|
1699
1782
|
follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
1700
1783
|
timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
1701
1784
|
extensions: RequestExtensions | None = None,
|
|
1702
|
-
) -> None:
|
|
1703
|
-
"""Delete a finetuned embedding model by name or ID."""
|
|
1785
|
+
) -> dict[str, str | float | int | bool | None]:
|
|
1704
1786
|
pass
|
|
1705
1787
|
|
|
1706
1788
|
@overload
|
|
1707
|
-
def
|
|
1789
|
+
def GET(
|
|
1708
1790
|
self,
|
|
1709
|
-
path: Literal["/
|
|
1791
|
+
path: Literal["/config"],
|
|
1710
1792
|
*,
|
|
1711
|
-
params:
|
|
1793
|
+
params: None = None,
|
|
1712
1794
|
parse_as: Literal["json"] = "json",
|
|
1713
1795
|
headers: HeaderTypes | None = None,
|
|
1714
1796
|
cookies: CookieTypes | None = None,
|
|
@@ -1716,16 +1798,15 @@ class OrcaClient(Client):
|
|
|
1716
1798
|
follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
1717
1799
|
timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
1718
1800
|
extensions: RequestExtensions | None = None,
|
|
1719
|
-
) -> None:
|
|
1720
|
-
"""Delete a datasource by name or ID."""
|
|
1801
|
+
) -> dict[str, str | float | int | bool | None]:
|
|
1721
1802
|
pass
|
|
1722
1803
|
|
|
1723
1804
|
@overload
|
|
1724
|
-
def
|
|
1805
|
+
def GET(
|
|
1725
1806
|
self,
|
|
1726
|
-
path: Literal["/
|
|
1807
|
+
path: Literal["/auth/root"],
|
|
1727
1808
|
*,
|
|
1728
|
-
params:
|
|
1809
|
+
params: None = None,
|
|
1729
1810
|
parse_as: Literal["json"] = "json",
|
|
1730
1811
|
headers: HeaderTypes | None = None,
|
|
1731
1812
|
cookies: CookieTypes | None = None,
|
|
@@ -1733,15 +1814,16 @@ class OrcaClient(Client):
|
|
|
1733
1814
|
follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
1734
1815
|
timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
1735
1816
|
extensions: RequestExtensions | None = None,
|
|
1736
|
-
) ->
|
|
1817
|
+
) -> bool:
|
|
1818
|
+
"""Return true only when called with a valid root API key; otherwise 401 Unauthenticated."""
|
|
1737
1819
|
pass
|
|
1738
1820
|
|
|
1739
1821
|
@overload
|
|
1740
|
-
def
|
|
1822
|
+
def GET(
|
|
1741
1823
|
self,
|
|
1742
|
-
path: Literal["/
|
|
1824
|
+
path: Literal["/auth/api_key"],
|
|
1743
1825
|
*,
|
|
1744
|
-
params:
|
|
1826
|
+
params: None = None,
|
|
1745
1827
|
parse_as: Literal["json"] = "json",
|
|
1746
1828
|
headers: HeaderTypes | None = None,
|
|
1747
1829
|
cookies: CookieTypes | None = None,
|
|
@@ -1749,15 +1831,16 @@ class OrcaClient(Client):
|
|
|
1749
1831
|
follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
1750
1832
|
timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
1751
1833
|
extensions: RequestExtensions | None = None,
|
|
1752
|
-
) ->
|
|
1834
|
+
) -> list[ApiKeyMetadata]:
|
|
1835
|
+
"""List all API keys for the organization."""
|
|
1753
1836
|
pass
|
|
1754
1837
|
|
|
1755
1838
|
@overload
|
|
1756
|
-
def
|
|
1839
|
+
def GET(
|
|
1757
1840
|
self,
|
|
1758
|
-
path: Literal["/
|
|
1841
|
+
path: Literal["/auth"],
|
|
1759
1842
|
*,
|
|
1760
|
-
params:
|
|
1843
|
+
params: None = None,
|
|
1761
1844
|
parse_as: Literal["json"] = "json",
|
|
1762
1845
|
headers: HeaderTypes | None = None,
|
|
1763
1846
|
cookies: CookieTypes | None = None,
|
|
@@ -1765,15 +1848,16 @@ class OrcaClient(Client):
|
|
|
1765
1848
|
follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
1766
1849
|
timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
1767
1850
|
extensions: RequestExtensions | None = None,
|
|
1768
|
-
) ->
|
|
1851
|
+
) -> bool:
|
|
1852
|
+
"""Returns true if the api key header is valid for the org (will be false for admin api key)"""
|
|
1769
1853
|
pass
|
|
1770
1854
|
|
|
1771
1855
|
@overload
|
|
1772
|
-
def
|
|
1856
|
+
def GET(
|
|
1773
1857
|
self,
|
|
1774
|
-
path: Literal["/
|
|
1858
|
+
path: Literal["/auth/org/plan"],
|
|
1775
1859
|
*,
|
|
1776
|
-
params:
|
|
1860
|
+
params: None = None,
|
|
1777
1861
|
parse_as: Literal["json"] = "json",
|
|
1778
1862
|
headers: HeaderTypes | None = None,
|
|
1779
1863
|
cookies: CookieTypes | None = None,
|
|
@@ -1781,15 +1865,16 @@ class OrcaClient(Client):
|
|
|
1781
1865
|
follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
1782
1866
|
timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
1783
1867
|
extensions: RequestExtensions | None = None,
|
|
1784
|
-
) ->
|
|
1868
|
+
) -> OrgPlan:
|
|
1869
|
+
"""Get the organization plan."""
|
|
1785
1870
|
pass
|
|
1786
1871
|
|
|
1787
1872
|
@overload
|
|
1788
|
-
def
|
|
1873
|
+
def GET(
|
|
1789
1874
|
self,
|
|
1790
|
-
path: Literal["/
|
|
1875
|
+
path: Literal["/memoryset"],
|
|
1791
1876
|
*,
|
|
1792
|
-
params:
|
|
1877
|
+
params: GetMemorysetParams,
|
|
1793
1878
|
parse_as: Literal["json"] = "json",
|
|
1794
1879
|
headers: HeaderTypes | None = None,
|
|
1795
1880
|
cookies: CookieTypes | None = None,
|
|
@@ -1797,15 +1882,15 @@ class OrcaClient(Client):
|
|
|
1797
1882
|
follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
1798
1883
|
timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
1799
1884
|
extensions: RequestExtensions | None = None,
|
|
1800
|
-
) ->
|
|
1885
|
+
) -> list[MemorysetMetadata]:
|
|
1801
1886
|
pass
|
|
1802
1887
|
|
|
1803
1888
|
@overload
|
|
1804
|
-
def
|
|
1889
|
+
def GET(
|
|
1805
1890
|
self,
|
|
1806
|
-
path: Literal["/
|
|
1891
|
+
path: Literal["/memoryset/{name_or_id}"],
|
|
1807
1892
|
*,
|
|
1808
|
-
params:
|
|
1893
|
+
params: GetMemorysetByNameOrIdParams,
|
|
1809
1894
|
parse_as: Literal["json"] = "json",
|
|
1810
1895
|
headers: HeaderTypes | None = None,
|
|
1811
1896
|
cookies: CookieTypes | None = None,
|
|
@@ -1813,46 +1898,31 @@ class OrcaClient(Client):
|
|
|
1813
1898
|
follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
1814
1899
|
timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
1815
1900
|
extensions: RequestExtensions | None = None,
|
|
1816
|
-
) ->
|
|
1817
|
-
"""Delete a feedback category and all associated feedback records."""
|
|
1901
|
+
) -> MemorysetMetadata:
|
|
1818
1902
|
pass
|
|
1819
1903
|
|
|
1820
|
-
|
|
1904
|
+
@overload
|
|
1905
|
+
def GET(
|
|
1821
1906
|
self,
|
|
1822
|
-
path:
|
|
1907
|
+
path: Literal["/memoryset/{name_or_id}/memory/{memory_id}"],
|
|
1823
1908
|
*,
|
|
1824
|
-
params:
|
|
1825
|
-
parse_as: Literal["json"
|
|
1909
|
+
params: GetMemorysetByNameOrIdMemoryByMemoryIdParams,
|
|
1910
|
+
parse_as: Literal["json"] = "json",
|
|
1826
1911
|
headers: HeaderTypes | None = None,
|
|
1827
1912
|
cookies: CookieTypes | None = None,
|
|
1828
1913
|
auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
1829
1914
|
follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
1830
1915
|
timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
1831
1916
|
extensions: RequestExtensions | None = None,
|
|
1832
|
-
) ->
|
|
1833
|
-
|
|
1834
|
-
res = self.delete(
|
|
1835
|
-
path.format(**path_params),
|
|
1836
|
-
params=query_params,
|
|
1837
|
-
headers=headers,
|
|
1838
|
-
cookies=cookies,
|
|
1839
|
-
auth=auth,
|
|
1840
|
-
follow_redirects=follow_redirects,
|
|
1841
|
-
timeout=timeout,
|
|
1842
|
-
extensions=extensions,
|
|
1843
|
-
).raise_for_status()
|
|
1844
|
-
return (
|
|
1845
|
-
None
|
|
1846
|
-
if res.status_code == 204
|
|
1847
|
-
else res.json() if parse_as == "json" else res.text if parse_as == "text" else res.content
|
|
1848
|
-
)
|
|
1917
|
+
) -> LabeledMemory | ScoredMemory:
|
|
1918
|
+
pass
|
|
1849
1919
|
|
|
1850
1920
|
@overload
|
|
1851
1921
|
def GET(
|
|
1852
1922
|
self,
|
|
1853
|
-
path: Literal["/
|
|
1923
|
+
path: Literal["/memoryset/{name_or_id}/potential_duplicate_groups"],
|
|
1854
1924
|
*,
|
|
1855
|
-
params:
|
|
1925
|
+
params: GetMemorysetByNameOrIdPotentialDuplicateGroupsParams,
|
|
1856
1926
|
parse_as: Literal["json"] = "json",
|
|
1857
1927
|
headers: HeaderTypes | None = None,
|
|
1858
1928
|
cookies: CookieTypes | None = None,
|
|
@@ -1860,15 +1930,15 @@ class OrcaClient(Client):
|
|
|
1860
1930
|
follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
1861
1931
|
timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
1862
1932
|
extensions: RequestExtensions | None = None,
|
|
1863
|
-
) ->
|
|
1933
|
+
) -> list[list[LabeledMemory]] | list[list[ScoredMemory]]:
|
|
1864
1934
|
pass
|
|
1865
1935
|
|
|
1866
1936
|
@overload
|
|
1867
1937
|
def GET(
|
|
1868
1938
|
self,
|
|
1869
|
-
path: Literal["/
|
|
1939
|
+
path: Literal["/memoryset/{name_or_id}/analysis"],
|
|
1870
1940
|
*,
|
|
1871
|
-
params:
|
|
1941
|
+
params: GetMemorysetByNameOrIdAnalysisParams,
|
|
1872
1942
|
parse_as: Literal["json"] = "json",
|
|
1873
1943
|
headers: HeaderTypes | None = None,
|
|
1874
1944
|
cookies: CookieTypes | None = None,
|
|
@@ -1876,15 +1946,15 @@ class OrcaClient(Client):
|
|
|
1876
1946
|
follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
1877
1947
|
timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
1878
1948
|
extensions: RequestExtensions | None = None,
|
|
1879
|
-
) ->
|
|
1949
|
+
) -> list[MemorysetAnalysisResponse]:
|
|
1880
1950
|
pass
|
|
1881
1951
|
|
|
1882
1952
|
@overload
|
|
1883
1953
|
def GET(
|
|
1884
1954
|
self,
|
|
1885
|
-
path: Literal["/
|
|
1955
|
+
path: Literal["/memoryset/{name_or_id}/analysis/{analysis_job_id}"],
|
|
1886
1956
|
*,
|
|
1887
|
-
params:
|
|
1957
|
+
params: GetMemorysetByNameOrIdAnalysisByAnalysisJobIdParams,
|
|
1888
1958
|
parse_as: Literal["json"] = "json",
|
|
1889
1959
|
headers: HeaderTypes | None = None,
|
|
1890
1960
|
cookies: CookieTypes | None = None,
|
|
@@ -1892,13 +1962,13 @@ class OrcaClient(Client):
|
|
|
1892
1962
|
follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
1893
1963
|
timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
1894
1964
|
extensions: RequestExtensions | None = None,
|
|
1895
|
-
) ->
|
|
1965
|
+
) -> MemorysetAnalysisResponse:
|
|
1896
1966
|
pass
|
|
1897
1967
|
|
|
1898
1968
|
@overload
|
|
1899
1969
|
def GET(
|
|
1900
1970
|
self,
|
|
1901
|
-
path: Literal["/
|
|
1971
|
+
path: Literal["/finetuned_embedding_model"],
|
|
1902
1972
|
*,
|
|
1903
1973
|
params: None = None,
|
|
1904
1974
|
parse_as: Literal["json"] = "json",
|
|
@@ -1908,15 +1978,16 @@ class OrcaClient(Client):
|
|
|
1908
1978
|
follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
1909
1979
|
timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
1910
1980
|
extensions: RequestExtensions | None = None,
|
|
1911
|
-
) ->
|
|
1981
|
+
) -> list[FinetunedEmbeddingModelMetadata]:
|
|
1982
|
+
"""List all finetuned embedding models for the organization."""
|
|
1912
1983
|
pass
|
|
1913
1984
|
|
|
1914
1985
|
@overload
|
|
1915
1986
|
def GET(
|
|
1916
1987
|
self,
|
|
1917
|
-
path: Literal["/
|
|
1988
|
+
path: Literal["/finetuned_embedding_model/{name_or_id}"],
|
|
1918
1989
|
*,
|
|
1919
|
-
params:
|
|
1990
|
+
params: GetFinetunedEmbeddingModelByNameOrIdParams,
|
|
1920
1991
|
parse_as: Literal["json"] = "json",
|
|
1921
1992
|
headers: HeaderTypes | None = None,
|
|
1922
1993
|
cookies: CookieTypes | None = None,
|
|
@@ -1924,13 +1995,14 @@ class OrcaClient(Client):
|
|
|
1924
1995
|
follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
1925
1996
|
timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
1926
1997
|
extensions: RequestExtensions | None = None,
|
|
1927
|
-
) ->
|
|
1998
|
+
) -> FinetunedEmbeddingModelMetadata:
|
|
1999
|
+
"""Get a finetuned embedding model by name or ID."""
|
|
1928
2000
|
pass
|
|
1929
2001
|
|
|
1930
2002
|
@overload
|
|
1931
2003
|
def GET(
|
|
1932
2004
|
self,
|
|
1933
|
-
path: Literal["/
|
|
2005
|
+
path: Literal["/pretrained_embedding_model"],
|
|
1934
2006
|
*,
|
|
1935
2007
|
params: None = None,
|
|
1936
2008
|
parse_as: Literal["json"] = "json",
|
|
@@ -1940,47 +2012,50 @@ class OrcaClient(Client):
|
|
|
1940
2012
|
follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
1941
2013
|
timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
1942
2014
|
extensions: RequestExtensions | None = None,
|
|
1943
|
-
) ->
|
|
2015
|
+
) -> list[PretrainedEmbeddingModelMetadata]:
|
|
2016
|
+
"""List all available pretrained embedding models."""
|
|
1944
2017
|
pass
|
|
1945
2018
|
|
|
1946
2019
|
@overload
|
|
1947
2020
|
def GET(
|
|
1948
2021
|
self,
|
|
1949
|
-
path: Literal["/
|
|
2022
|
+
path: Literal["/pretrained_embedding_model/{model_name}"],
|
|
1950
2023
|
*,
|
|
1951
|
-
params:
|
|
1952
|
-
parse_as: Literal["
|
|
2024
|
+
params: GetPretrainedEmbeddingModelByModelNameParams,
|
|
2025
|
+
parse_as: Literal["json"] = "json",
|
|
1953
2026
|
headers: HeaderTypes | None = None,
|
|
1954
2027
|
cookies: CookieTypes | None = None,
|
|
1955
2028
|
auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
1956
2029
|
follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
1957
2030
|
timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
1958
2031
|
extensions: RequestExtensions | None = None,
|
|
1959
|
-
) ->
|
|
2032
|
+
) -> PretrainedEmbeddingModelMetadata:
|
|
2033
|
+
"""Get metadata for a specific pretrained embedding model."""
|
|
1960
2034
|
pass
|
|
1961
2035
|
|
|
1962
2036
|
@overload
|
|
1963
2037
|
def GET(
|
|
1964
2038
|
self,
|
|
1965
|
-
path: Literal["/"],
|
|
2039
|
+
path: Literal["/finetuned_embedding_model/{name_or_id}/evaluation/{job_id}"],
|
|
1966
2040
|
*,
|
|
1967
|
-
params:
|
|
1968
|
-
parse_as: Literal["
|
|
2041
|
+
params: GetFinetunedEmbeddingModelByNameOrIdEvaluationByJobIdParams,
|
|
2042
|
+
parse_as: Literal["json"] = "json",
|
|
1969
2043
|
headers: HeaderTypes | None = None,
|
|
1970
2044
|
cookies: CookieTypes | None = None,
|
|
1971
2045
|
auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
1972
2046
|
follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
1973
2047
|
timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
1974
2048
|
extensions: RequestExtensions | None = None,
|
|
1975
|
-
) ->
|
|
2049
|
+
) -> EmbeddingEvaluationResponse:
|
|
2050
|
+
"""Get evaluation results for a finetuned embedding model by job ID."""
|
|
1976
2051
|
pass
|
|
1977
2052
|
|
|
1978
2053
|
@overload
|
|
1979
2054
|
def GET(
|
|
1980
2055
|
self,
|
|
1981
|
-
path: Literal["/
|
|
2056
|
+
path: Literal["/pretrained_embedding_model/{model_name}/evaluation/{job_id}"],
|
|
1982
2057
|
*,
|
|
1983
|
-
params:
|
|
2058
|
+
params: GetPretrainedEmbeddingModelByModelNameEvaluationByJobIdParams,
|
|
1984
2059
|
parse_as: Literal["json"] = "json",
|
|
1985
2060
|
headers: HeaderTypes | None = None,
|
|
1986
2061
|
cookies: CookieTypes | None = None,
|
|
@@ -1988,16 +2063,16 @@ class OrcaClient(Client):
|
|
|
1988
2063
|
follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
1989
2064
|
timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
1990
2065
|
extensions: RequestExtensions | None = None,
|
|
1991
|
-
) ->
|
|
1992
|
-
"""
|
|
2066
|
+
) -> EmbeddingEvaluationResponse:
|
|
2067
|
+
"""Get evaluation results for a pretrained embedding model by job ID."""
|
|
1993
2068
|
pass
|
|
1994
2069
|
|
|
1995
2070
|
@overload
|
|
1996
2071
|
def GET(
|
|
1997
2072
|
self,
|
|
1998
|
-
path: Literal["/
|
|
2073
|
+
path: Literal["/finetuned_embedding_model/{name_or_id}/evaluations"],
|
|
1999
2074
|
*,
|
|
2000
|
-
params:
|
|
2075
|
+
params: GetFinetunedEmbeddingModelByNameOrIdEvaluationsParams,
|
|
2001
2076
|
parse_as: Literal["json"] = "json",
|
|
2002
2077
|
headers: HeaderTypes | None = None,
|
|
2003
2078
|
cookies: CookieTypes | None = None,
|
|
@@ -2005,16 +2080,16 @@ class OrcaClient(Client):
|
|
|
2005
2080
|
follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2006
2081
|
timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2007
2082
|
extensions: RequestExtensions | None = None,
|
|
2008
|
-
) -> list[
|
|
2009
|
-
"""List all
|
|
2083
|
+
) -> list[EmbeddingEvaluationResponse]:
|
|
2084
|
+
"""List all evaluation results for a finetuned embedding model."""
|
|
2010
2085
|
pass
|
|
2011
2086
|
|
|
2012
2087
|
@overload
|
|
2013
2088
|
def GET(
|
|
2014
2089
|
self,
|
|
2015
|
-
path: Literal["/
|
|
2090
|
+
path: Literal["/pretrained_embedding_model/{model_name}/evaluations"],
|
|
2016
2091
|
*,
|
|
2017
|
-
params:
|
|
2092
|
+
params: GetPretrainedEmbeddingModelByModelNameEvaluationsParams,
|
|
2018
2093
|
parse_as: Literal["json"] = "json",
|
|
2019
2094
|
headers: HeaderTypes | None = None,
|
|
2020
2095
|
cookies: CookieTypes | None = None,
|
|
@@ -2022,14 +2097,14 @@ class OrcaClient(Client):
|
|
|
2022
2097
|
follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2023
2098
|
timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2024
2099
|
extensions: RequestExtensions | None = None,
|
|
2025
|
-
) ->
|
|
2026
|
-
"""
|
|
2100
|
+
) -> list[EmbeddingEvaluationResponse]:
|
|
2101
|
+
"""List all evaluation results for a pretrained embedding model."""
|
|
2027
2102
|
pass
|
|
2028
2103
|
|
|
2029
2104
|
@overload
|
|
2030
2105
|
def GET(
|
|
2031
2106
|
self,
|
|
2032
|
-
path: Literal["/
|
|
2107
|
+
path: Literal["/datasource"],
|
|
2033
2108
|
*,
|
|
2034
2109
|
params: None = None,
|
|
2035
2110
|
parse_as: Literal["json"] = "json",
|
|
@@ -2039,16 +2114,16 @@ class OrcaClient(Client):
|
|
|
2039
2114
|
follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2040
2115
|
timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2041
2116
|
extensions: RequestExtensions | None = None,
|
|
2042
|
-
) ->
|
|
2043
|
-
"""
|
|
2117
|
+
) -> list[DatasourceMetadata]:
|
|
2118
|
+
"""List all datasources for the organization."""
|
|
2044
2119
|
pass
|
|
2045
2120
|
|
|
2046
2121
|
@overload
|
|
2047
2122
|
def GET(
|
|
2048
2123
|
self,
|
|
2049
|
-
path: Literal["/
|
|
2124
|
+
path: Literal["/datasource/{name_or_id}"],
|
|
2050
2125
|
*,
|
|
2051
|
-
params:
|
|
2126
|
+
params: GetDatasourceByNameOrIdParams,
|
|
2052
2127
|
parse_as: Literal["json"] = "json",
|
|
2053
2128
|
headers: HeaderTypes | None = None,
|
|
2054
2129
|
cookies: CookieTypes | None = None,
|
|
@@ -2056,15 +2131,16 @@ class OrcaClient(Client):
|
|
|
2056
2131
|
follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2057
2132
|
timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2058
2133
|
extensions: RequestExtensions | None = None,
|
|
2059
|
-
) ->
|
|
2134
|
+
) -> DatasourceMetadata:
|
|
2135
|
+
"""Get a datasource by name or ID."""
|
|
2060
2136
|
pass
|
|
2061
2137
|
|
|
2062
2138
|
@overload
|
|
2063
2139
|
def GET(
|
|
2064
2140
|
self,
|
|
2065
|
-
path: Literal["/
|
|
2141
|
+
path: Literal["/datasource/{name_or_id}/embedding_model_evaluations"],
|
|
2066
2142
|
*,
|
|
2067
|
-
params:
|
|
2143
|
+
params: GetDatasourceByNameOrIdEmbeddingModelEvaluationsParams,
|
|
2068
2144
|
parse_as: Literal["json"] = "json",
|
|
2069
2145
|
headers: HeaderTypes | None = None,
|
|
2070
2146
|
cookies: CookieTypes | None = None,
|
|
@@ -2072,15 +2148,16 @@ class OrcaClient(Client):
|
|
|
2072
2148
|
follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2073
2149
|
timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2074
2150
|
extensions: RequestExtensions | None = None,
|
|
2075
|
-
) ->
|
|
2151
|
+
) -> list[EmbeddingEvaluationResponse]:
|
|
2152
|
+
"""List all evaluation results for a datasource."""
|
|
2076
2153
|
pass
|
|
2077
2154
|
|
|
2078
2155
|
@overload
|
|
2079
2156
|
def GET(
|
|
2080
2157
|
self,
|
|
2081
|
-
path: Literal["/
|
|
2158
|
+
path: Literal["/datasource/{name_or_id}/download"],
|
|
2082
2159
|
*,
|
|
2083
|
-
params:
|
|
2160
|
+
params: GetDatasourceByNameOrIdDownloadParams,
|
|
2084
2161
|
parse_as: Literal["json"] = "json",
|
|
2085
2162
|
headers: HeaderTypes | None = None,
|
|
2086
2163
|
cookies: CookieTypes | None = None,
|
|
@@ -2088,47 +2165,50 @@ class OrcaClient(Client):
|
|
|
2088
2165
|
follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2089
2166
|
timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2090
2167
|
extensions: RequestExtensions | None = None,
|
|
2091
|
-
) ->
|
|
2168
|
+
) -> list[dict[str, Any]]:
|
|
2169
|
+
"""Download datasource in the specified format."""
|
|
2092
2170
|
pass
|
|
2093
2171
|
|
|
2094
2172
|
@overload
|
|
2095
2173
|
def GET(
|
|
2096
2174
|
self,
|
|
2097
|
-
path: Literal["/
|
|
2175
|
+
path: Literal["/datasource/{name_or_id}/download"],
|
|
2098
2176
|
*,
|
|
2099
|
-
params:
|
|
2100
|
-
parse_as: Literal["
|
|
2177
|
+
params: GetDatasourceByNameOrIdDownloadParams,
|
|
2178
|
+
parse_as: Literal["text"],
|
|
2101
2179
|
headers: HeaderTypes | None = None,
|
|
2102
2180
|
cookies: CookieTypes | None = None,
|
|
2103
2181
|
auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2104
2182
|
follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2105
2183
|
timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2106
2184
|
extensions: RequestExtensions | None = None,
|
|
2107
|
-
) ->
|
|
2185
|
+
) -> str:
|
|
2186
|
+
"""Download datasource in the specified format."""
|
|
2108
2187
|
pass
|
|
2109
2188
|
|
|
2110
2189
|
@overload
|
|
2111
2190
|
def GET(
|
|
2112
2191
|
self,
|
|
2113
|
-
path: Literal["/
|
|
2192
|
+
path: Literal["/datasource/{name_or_id}/download"],
|
|
2114
2193
|
*,
|
|
2115
|
-
params:
|
|
2116
|
-
parse_as:
|
|
2194
|
+
params: GetDatasourceByNameOrIdDownloadParams,
|
|
2195
|
+
parse_as: None,
|
|
2117
2196
|
headers: HeaderTypes | None = None,
|
|
2118
2197
|
cookies: CookieTypes | None = None,
|
|
2119
2198
|
auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2120
2199
|
follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2121
2200
|
timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2122
2201
|
extensions: RequestExtensions | None = None,
|
|
2123
|
-
) ->
|
|
2202
|
+
) -> bytes:
|
|
2203
|
+
"""Download datasource in the specified format."""
|
|
2124
2204
|
pass
|
|
2125
2205
|
|
|
2126
2206
|
@overload
|
|
2127
2207
|
def GET(
|
|
2128
2208
|
self,
|
|
2129
|
-
path: Literal["/
|
|
2209
|
+
path: Literal["/classification_model"],
|
|
2130
2210
|
*,
|
|
2131
|
-
params:
|
|
2211
|
+
params: None = None,
|
|
2132
2212
|
parse_as: Literal["json"] = "json",
|
|
2133
2213
|
headers: HeaderTypes | None = None,
|
|
2134
2214
|
cookies: CookieTypes | None = None,
|
|
@@ -2136,13 +2216,13 @@ class OrcaClient(Client):
|
|
|
2136
2216
|
follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2137
2217
|
timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2138
2218
|
extensions: RequestExtensions | None = None,
|
|
2139
|
-
) ->
|
|
2219
|
+
) -> list[ClassificationModelMetadata]:
|
|
2140
2220
|
pass
|
|
2141
2221
|
|
|
2142
2222
|
@overload
|
|
2143
2223
|
def GET(
|
|
2144
2224
|
self,
|
|
2145
|
-
path: Literal["/
|
|
2225
|
+
path: Literal["/regression_model"],
|
|
2146
2226
|
*,
|
|
2147
2227
|
params: None = None,
|
|
2148
2228
|
parse_as: Literal["json"] = "json",
|
|
@@ -2152,16 +2232,15 @@ class OrcaClient(Client):
|
|
|
2152
2232
|
follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2153
2233
|
timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2154
2234
|
extensions: RequestExtensions | None = None,
|
|
2155
|
-
) -> list[
|
|
2156
|
-
"""List all finetuned embedding models for the organization."""
|
|
2235
|
+
) -> list[RegressionModelMetadata]:
|
|
2157
2236
|
pass
|
|
2158
2237
|
|
|
2159
2238
|
@overload
|
|
2160
2239
|
def GET(
|
|
2161
2240
|
self,
|
|
2162
|
-
path: Literal["/
|
|
2241
|
+
path: Literal["/classification_model/{name_or_id}"],
|
|
2163
2242
|
*,
|
|
2164
|
-
params:
|
|
2243
|
+
params: GetClassificationModelByNameOrIdParams,
|
|
2165
2244
|
parse_as: Literal["json"] = "json",
|
|
2166
2245
|
headers: HeaderTypes | None = None,
|
|
2167
2246
|
cookies: CookieTypes | None = None,
|
|
@@ -2169,16 +2248,15 @@ class OrcaClient(Client):
|
|
|
2169
2248
|
follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2170
2249
|
timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2171
2250
|
extensions: RequestExtensions | None = None,
|
|
2172
|
-
) ->
|
|
2173
|
-
"""Get a finetuned embedding model by name or ID."""
|
|
2251
|
+
) -> ClassificationModelMetadata:
|
|
2174
2252
|
pass
|
|
2175
2253
|
|
|
2176
2254
|
@overload
|
|
2177
2255
|
def GET(
|
|
2178
2256
|
self,
|
|
2179
|
-
path: Literal["/
|
|
2257
|
+
path: Literal["/regression_model/{name_or_id}"],
|
|
2180
2258
|
*,
|
|
2181
|
-
params:
|
|
2259
|
+
params: GetRegressionModelByNameOrIdParams,
|
|
2182
2260
|
parse_as: Literal["json"] = "json",
|
|
2183
2261
|
headers: HeaderTypes | None = None,
|
|
2184
2262
|
cookies: CookieTypes | None = None,
|
|
@@ -2186,16 +2264,15 @@ class OrcaClient(Client):
|
|
|
2186
2264
|
follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2187
2265
|
timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2188
2266
|
extensions: RequestExtensions | None = None,
|
|
2189
|
-
) ->
|
|
2190
|
-
"""Get evaluation results for a finetuned embedding model by task ID."""
|
|
2267
|
+
) -> RegressionModelMetadata:
|
|
2191
2268
|
pass
|
|
2192
2269
|
|
|
2193
2270
|
@overload
|
|
2194
2271
|
def GET(
|
|
2195
2272
|
self,
|
|
2196
|
-
path: Literal["/
|
|
2273
|
+
path: Literal["/predictive_model"],
|
|
2197
2274
|
*,
|
|
2198
|
-
params:
|
|
2275
|
+
params: None = None,
|
|
2199
2276
|
parse_as: Literal["json"] = "json",
|
|
2200
2277
|
headers: HeaderTypes | None = None,
|
|
2201
2278
|
cookies: CookieTypes | None = None,
|
|
@@ -2203,16 +2280,15 @@ class OrcaClient(Client):
|
|
|
2203
2280
|
follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2204
2281
|
timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2205
2282
|
extensions: RequestExtensions | None = None,
|
|
2206
|
-
) -> list[
|
|
2207
|
-
"""List all evaluation results for a finetuned embedding model."""
|
|
2283
|
+
) -> list[ClassificationModelMetadata | RegressionModelMetadata]:
|
|
2208
2284
|
pass
|
|
2209
2285
|
|
|
2210
2286
|
@overload
|
|
2211
2287
|
def GET(
|
|
2212
2288
|
self,
|
|
2213
|
-
path: Literal["/
|
|
2289
|
+
path: Literal["/classification_model/{model_name_or_id}/evaluation"],
|
|
2214
2290
|
*,
|
|
2215
|
-
params:
|
|
2291
|
+
params: GetClassificationModelByModelNameOrIdEvaluationParams,
|
|
2216
2292
|
parse_as: Literal["json"] = "json",
|
|
2217
2293
|
headers: HeaderTypes | None = None,
|
|
2218
2294
|
cookies: CookieTypes | None = None,
|
|
@@ -2220,16 +2296,15 @@ class OrcaClient(Client):
|
|
|
2220
2296
|
follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2221
2297
|
timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2222
2298
|
extensions: RequestExtensions | None = None,
|
|
2223
|
-
) -> list[
|
|
2224
|
-
"""List all available pretrained embedding models."""
|
|
2299
|
+
) -> list[EvaluationResponseClassificationMetrics]:
|
|
2225
2300
|
pass
|
|
2226
2301
|
|
|
2227
2302
|
@overload
|
|
2228
2303
|
def GET(
|
|
2229
2304
|
self,
|
|
2230
|
-
path: Literal["/
|
|
2305
|
+
path: Literal["/regression_model/{model_name_or_id}/evaluation"],
|
|
2231
2306
|
*,
|
|
2232
|
-
params:
|
|
2307
|
+
params: GetRegressionModelByModelNameOrIdEvaluationParams,
|
|
2233
2308
|
parse_as: Literal["json"] = "json",
|
|
2234
2309
|
headers: HeaderTypes | None = None,
|
|
2235
2310
|
cookies: CookieTypes | None = None,
|
|
@@ -2237,16 +2312,15 @@ class OrcaClient(Client):
|
|
|
2237
2312
|
follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2238
2313
|
timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2239
2314
|
extensions: RequestExtensions | None = None,
|
|
2240
|
-
) ->
|
|
2241
|
-
"""Get metadata for a specific pretrained embedding model."""
|
|
2315
|
+
) -> list[EvaluationResponseRegressionMetrics]:
|
|
2242
2316
|
pass
|
|
2243
2317
|
|
|
2244
2318
|
@overload
|
|
2245
2319
|
def GET(
|
|
2246
2320
|
self,
|
|
2247
|
-
path: Literal["/
|
|
2321
|
+
path: Literal["/classification_model/{model_name_or_id}/evaluation/{job_id}"],
|
|
2248
2322
|
*,
|
|
2249
|
-
params:
|
|
2323
|
+
params: GetClassificationModelByModelNameOrIdEvaluationByJobIdParams,
|
|
2250
2324
|
parse_as: Literal["json"] = "json",
|
|
2251
2325
|
headers: HeaderTypes | None = None,
|
|
2252
2326
|
cookies: CookieTypes | None = None,
|
|
@@ -2254,16 +2328,15 @@ class OrcaClient(Client):
|
|
|
2254
2328
|
follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2255
2329
|
timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2256
2330
|
extensions: RequestExtensions | None = None,
|
|
2257
|
-
) ->
|
|
2258
|
-
"""Get evaluation results for a pretrained embedding model by task ID."""
|
|
2331
|
+
) -> EvaluationResponseClassificationMetrics:
|
|
2259
2332
|
pass
|
|
2260
2333
|
|
|
2261
2334
|
@overload
|
|
2262
2335
|
def GET(
|
|
2263
2336
|
self,
|
|
2264
|
-
path: Literal["/
|
|
2337
|
+
path: Literal["/regression_model/{model_name_or_id}/evaluation/{job_id}"],
|
|
2265
2338
|
*,
|
|
2266
|
-
params:
|
|
2339
|
+
params: GetRegressionModelByModelNameOrIdEvaluationByJobIdParams,
|
|
2267
2340
|
parse_as: Literal["json"] = "json",
|
|
2268
2341
|
headers: HeaderTypes | None = None,
|
|
2269
2342
|
cookies: CookieTypes | None = None,
|
|
@@ -2271,16 +2344,15 @@ class OrcaClient(Client):
|
|
|
2271
2344
|
follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2272
2345
|
timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2273
2346
|
extensions: RequestExtensions | None = None,
|
|
2274
|
-
) ->
|
|
2275
|
-
"""List all evaluation results for a pretrained embedding model."""
|
|
2347
|
+
) -> EvaluationResponseRegressionMetrics:
|
|
2276
2348
|
pass
|
|
2277
2349
|
|
|
2278
2350
|
@overload
|
|
2279
2351
|
def GET(
|
|
2280
2352
|
self,
|
|
2281
|
-
path: Literal["/
|
|
2353
|
+
path: Literal["/job/{job_id}"],
|
|
2282
2354
|
*,
|
|
2283
|
-
params:
|
|
2355
|
+
params: GetJobByJobIdParams,
|
|
2284
2356
|
parse_as: Literal["json"] = "json",
|
|
2285
2357
|
headers: HeaderTypes | None = None,
|
|
2286
2358
|
cookies: CookieTypes | None = None,
|
|
@@ -2288,16 +2360,15 @@ class OrcaClient(Client):
|
|
|
2288
2360
|
follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2289
2361
|
timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2290
2362
|
extensions: RequestExtensions | None = None,
|
|
2291
|
-
) ->
|
|
2292
|
-
"""List all datasources for the organization."""
|
|
2363
|
+
) -> Job:
|
|
2293
2364
|
pass
|
|
2294
2365
|
|
|
2295
2366
|
@overload
|
|
2296
2367
|
def GET(
|
|
2297
2368
|
self,
|
|
2298
|
-
path: Literal["/
|
|
2369
|
+
path: Literal["/job/{job_id}/status"],
|
|
2299
2370
|
*,
|
|
2300
|
-
params:
|
|
2371
|
+
params: GetJobByJobIdStatusParams,
|
|
2301
2372
|
parse_as: Literal["json"] = "json",
|
|
2302
2373
|
headers: HeaderTypes | None = None,
|
|
2303
2374
|
cookies: CookieTypes | None = None,
|
|
@@ -2305,16 +2376,15 @@ class OrcaClient(Client):
|
|
|
2305
2376
|
follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2306
2377
|
timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2307
2378
|
extensions: RequestExtensions | None = None,
|
|
2308
|
-
) ->
|
|
2309
|
-
"""Get a datasource by name or ID."""
|
|
2379
|
+
) -> JobStatusInfo:
|
|
2310
2380
|
pass
|
|
2311
2381
|
|
|
2312
2382
|
@overload
|
|
2313
2383
|
def GET(
|
|
2314
2384
|
self,
|
|
2315
|
-
path: Literal["/
|
|
2385
|
+
path: Literal["/job"],
|
|
2316
2386
|
*,
|
|
2317
|
-
params:
|
|
2387
|
+
params: GetJobParams,
|
|
2318
2388
|
parse_as: Literal["json"] = "json",
|
|
2319
2389
|
headers: HeaderTypes | None = None,
|
|
2320
2390
|
cookies: CookieTypes | None = None,
|
|
@@ -2322,16 +2392,15 @@ class OrcaClient(Client):
|
|
|
2322
2392
|
follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2323
2393
|
timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2324
2394
|
extensions: RequestExtensions | None = None,
|
|
2325
|
-
) ->
|
|
2326
|
-
"""List embedding evaluation tasks for a datasource."""
|
|
2395
|
+
) -> PaginatedJob:
|
|
2327
2396
|
pass
|
|
2328
2397
|
|
|
2329
2398
|
@overload
|
|
2330
2399
|
def GET(
|
|
2331
2400
|
self,
|
|
2332
|
-
path: Literal["/
|
|
2401
|
+
path: Literal["/worker"],
|
|
2333
2402
|
*,
|
|
2334
|
-
params:
|
|
2403
|
+
params: GetWorkerParams,
|
|
2335
2404
|
parse_as: Literal["json"] = "json",
|
|
2336
2405
|
headers: HeaderTypes | None = None,
|
|
2337
2406
|
cookies: CookieTypes | None = None,
|
|
@@ -2339,16 +2408,20 @@ class OrcaClient(Client):
|
|
|
2339
2408
|
follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2340
2409
|
timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2341
2410
|
extensions: RequestExtensions | None = None,
|
|
2342
|
-
) ->
|
|
2343
|
-
"""
|
|
2411
|
+
) -> PaginatedWorkerInfo:
|
|
2412
|
+
"""
|
|
2413
|
+
List all workers in the system. Requires root access.
|
|
2414
|
+
|
|
2415
|
+
This endpoint automatically cleans up orphaned workers before returning results.
|
|
2416
|
+
"""
|
|
2344
2417
|
pass
|
|
2345
2418
|
|
|
2346
2419
|
@overload
|
|
2347
2420
|
def GET(
|
|
2348
2421
|
self,
|
|
2349
|
-
path: Literal["/
|
|
2422
|
+
path: Literal["/worker/{worker_id}"],
|
|
2350
2423
|
*,
|
|
2351
|
-
params:
|
|
2424
|
+
params: GetWorkerByWorkerIdParams,
|
|
2352
2425
|
parse_as: Literal["json"] = "json",
|
|
2353
2426
|
headers: HeaderTypes | None = None,
|
|
2354
2427
|
cookies: CookieTypes | None = None,
|
|
@@ -2356,50 +2429,50 @@ class OrcaClient(Client):
|
|
|
2356
2429
|
follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2357
2430
|
timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2358
2431
|
extensions: RequestExtensions | None = None,
|
|
2359
|
-
) ->
|
|
2360
|
-
"""
|
|
2432
|
+
) -> WorkerInfo:
|
|
2433
|
+
"""Get information about a specific worker. Requires root access."""
|
|
2361
2434
|
pass
|
|
2362
2435
|
|
|
2363
2436
|
@overload
|
|
2364
2437
|
def GET(
|
|
2365
2438
|
self,
|
|
2366
|
-
path: Literal["/
|
|
2439
|
+
path: Literal["/telemetry/prediction/{prediction_id}"],
|
|
2367
2440
|
*,
|
|
2368
|
-
params:
|
|
2369
|
-
parse_as: Literal["
|
|
2441
|
+
params: GetTelemetryPredictionByPredictionIdParams,
|
|
2442
|
+
parse_as: Literal["json"] = "json",
|
|
2370
2443
|
headers: HeaderTypes | None = None,
|
|
2371
2444
|
cookies: CookieTypes | None = None,
|
|
2372
2445
|
auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2373
2446
|
follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2374
2447
|
timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2375
2448
|
extensions: RequestExtensions | None = None,
|
|
2376
|
-
) ->
|
|
2377
|
-
"""
|
|
2449
|
+
) -> LabelPredictionWithMemoriesAndFeedback | ScorePredictionWithMemoriesAndFeedback:
|
|
2450
|
+
"""Get a specific prediction by ID."""
|
|
2378
2451
|
pass
|
|
2379
2452
|
|
|
2380
2453
|
@overload
|
|
2381
2454
|
def GET(
|
|
2382
2455
|
self,
|
|
2383
|
-
path: Literal["/
|
|
2456
|
+
path: Literal["/telemetry/prediction/{prediction_id}/explanation"],
|
|
2384
2457
|
*,
|
|
2385
|
-
params:
|
|
2386
|
-
parse_as:
|
|
2458
|
+
params: GetTelemetryPredictionByPredictionIdExplanationParams,
|
|
2459
|
+
parse_as: Literal["text"],
|
|
2387
2460
|
headers: HeaderTypes | None = None,
|
|
2388
2461
|
cookies: CookieTypes | None = None,
|
|
2389
2462
|
auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2390
2463
|
follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2391
2464
|
timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2392
2465
|
extensions: RequestExtensions | None = None,
|
|
2393
|
-
) ->
|
|
2394
|
-
"""
|
|
2466
|
+
) -> str:
|
|
2467
|
+
"""Get explanation for a prediction, optionally streaming the response."""
|
|
2395
2468
|
pass
|
|
2396
2469
|
|
|
2397
2470
|
@overload
|
|
2398
2471
|
def GET(
|
|
2399
2472
|
self,
|
|
2400
|
-
path: Literal["/
|
|
2473
|
+
path: Literal["/telemetry/prediction/{prediction_id}/action"],
|
|
2401
2474
|
*,
|
|
2402
|
-
params:
|
|
2475
|
+
params: GetTelemetryPredictionByPredictionIdActionParams,
|
|
2403
2476
|
parse_as: Literal["json"] = "json",
|
|
2404
2477
|
headers: HeaderTypes | None = None,
|
|
2405
2478
|
cookies: CookieTypes | None = None,
|
|
@@ -2407,15 +2480,16 @@ class OrcaClient(Client):
|
|
|
2407
2480
|
follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2408
2481
|
timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2409
2482
|
extensions: RequestExtensions | None = None,
|
|
2410
|
-
) ->
|
|
2483
|
+
) -> ActionRecommendation:
|
|
2484
|
+
"""Get action recommendation for improving a specific prediction."""
|
|
2411
2485
|
pass
|
|
2412
2486
|
|
|
2413
2487
|
@overload
|
|
2414
2488
|
def GET(
|
|
2415
2489
|
self,
|
|
2416
|
-
path: Literal["/
|
|
2490
|
+
path: Literal["/telemetry/prediction/{prediction_id}/memory_suggestions"],
|
|
2417
2491
|
*,
|
|
2418
|
-
params:
|
|
2492
|
+
params: GetTelemetryPredictionByPredictionIdMemorySuggestionsParams,
|
|
2419
2493
|
parse_as: Literal["json"] = "json",
|
|
2420
2494
|
headers: HeaderTypes | None = None,
|
|
2421
2495
|
cookies: CookieTypes | None = None,
|
|
@@ -2423,15 +2497,21 @@ class OrcaClient(Client):
|
|
|
2423
2497
|
follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2424
2498
|
timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2425
2499
|
extensions: RequestExtensions | None = None,
|
|
2426
|
-
) ->
|
|
2500
|
+
) -> AddMemoryRecommendations:
|
|
2501
|
+
"""
|
|
2502
|
+
Generate synthetic memory suggestions to improve a specific prediction.
|
|
2503
|
+
|
|
2504
|
+
The returned suggestions have labels as string representations of integer indices
|
|
2505
|
+
corresponding to the memoryset's label_names.
|
|
2506
|
+
"""
|
|
2427
2507
|
pass
|
|
2428
2508
|
|
|
2429
2509
|
@overload
|
|
2430
2510
|
def GET(
|
|
2431
2511
|
self,
|
|
2432
|
-
path: Literal["/
|
|
2512
|
+
path: Literal["/telemetry/feedback_category"],
|
|
2433
2513
|
*,
|
|
2434
|
-
params:
|
|
2514
|
+
params: None = None,
|
|
2435
2515
|
parse_as: Literal["json"] = "json",
|
|
2436
2516
|
headers: HeaderTypes | None = None,
|
|
2437
2517
|
cookies: CookieTypes | None = None,
|
|
@@ -2439,15 +2519,16 @@ class OrcaClient(Client):
|
|
|
2439
2519
|
follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2440
2520
|
timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2441
2521
|
extensions: RequestExtensions | None = None,
|
|
2442
|
-
) ->
|
|
2522
|
+
) -> list[PredictionFeedbackCategory]:
|
|
2523
|
+
"""List all feedback categories for the organization."""
|
|
2443
2524
|
pass
|
|
2444
2525
|
|
|
2445
2526
|
@overload
|
|
2446
2527
|
def GET(
|
|
2447
2528
|
self,
|
|
2448
|
-
path: Literal["/
|
|
2529
|
+
path: Literal["/telemetry/feedback_category/{name_or_id}"],
|
|
2449
2530
|
*,
|
|
2450
|
-
params:
|
|
2531
|
+
params: GetTelemetryFeedbackCategoryByNameOrIdParams,
|
|
2451
2532
|
parse_as: Literal["json"] = "json",
|
|
2452
2533
|
headers: HeaderTypes | None = None,
|
|
2453
2534
|
cookies: CookieTypes | None = None,
|
|
@@ -2455,15 +2536,16 @@ class OrcaClient(Client):
|
|
|
2455
2536
|
follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2456
2537
|
timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2457
2538
|
extensions: RequestExtensions | None = None,
|
|
2458
|
-
) ->
|
|
2539
|
+
) -> PredictionFeedbackCategory:
|
|
2540
|
+
"""Get a feedback category by name or ID."""
|
|
2459
2541
|
pass
|
|
2460
2542
|
|
|
2461
2543
|
@overload
|
|
2462
2544
|
def GET(
|
|
2463
2545
|
self,
|
|
2464
|
-
path: Literal["/
|
|
2546
|
+
path: Literal["/agents/bootstrap_classification_model/{job_id}"],
|
|
2465
2547
|
*,
|
|
2466
|
-
params:
|
|
2548
|
+
params: GetAgentsBootstrapClassificationModelByJobIdParams,
|
|
2467
2549
|
parse_as: Literal["json"] = "json",
|
|
2468
2550
|
headers: HeaderTypes | None = None,
|
|
2469
2551
|
cookies: CookieTypes | None = None,
|
|
@@ -2471,31 +2553,47 @@ class OrcaClient(Client):
|
|
|
2471
2553
|
follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2472
2554
|
timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2473
2555
|
extensions: RequestExtensions | None = None,
|
|
2474
|
-
) ->
|
|
2556
|
+
) -> BootstrapClassificationModelResponse:
|
|
2557
|
+
"""Get the status of a bootstrap classification model job"""
|
|
2475
2558
|
pass
|
|
2476
2559
|
|
|
2477
|
-
@overload
|
|
2478
2560
|
def GET(
|
|
2479
2561
|
self,
|
|
2480
|
-
path:
|
|
2562
|
+
path: str,
|
|
2481
2563
|
*,
|
|
2482
|
-
params: None = None,
|
|
2483
|
-
parse_as: Literal["json"] = "json",
|
|
2564
|
+
params: Mapping[str, Any] | None = None,
|
|
2565
|
+
parse_as: Literal["json", "text"] | None = "json",
|
|
2484
2566
|
headers: HeaderTypes | None = None,
|
|
2485
2567
|
cookies: CookieTypes | None = None,
|
|
2486
2568
|
auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2487
2569
|
follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2488
2570
|
timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2489
2571
|
extensions: RequestExtensions | None = None,
|
|
2490
|
-
) ->
|
|
2491
|
-
|
|
2572
|
+
) -> Any:
|
|
2573
|
+
path_params, query_params = self._parse_params(params or {}, path)
|
|
2574
|
+
res = self.get(
|
|
2575
|
+
path.format(**path_params),
|
|
2576
|
+
params=query_params,
|
|
2577
|
+
headers=headers,
|
|
2578
|
+
cookies=cookies,
|
|
2579
|
+
auth=auth,
|
|
2580
|
+
follow_redirects=follow_redirects,
|
|
2581
|
+
timeout=timeout,
|
|
2582
|
+
extensions=extensions,
|
|
2583
|
+
)
|
|
2584
|
+
res.raise_for_status()
|
|
2585
|
+
return (
|
|
2586
|
+
None
|
|
2587
|
+
if res.status_code == 204
|
|
2588
|
+
else res.json() if parse_as == "json" else res.text if parse_as == "text" else res.content
|
|
2589
|
+
)
|
|
2492
2590
|
|
|
2493
2591
|
@overload
|
|
2494
|
-
def
|
|
2592
|
+
def DELETE(
|
|
2495
2593
|
self,
|
|
2496
|
-
path: Literal["/
|
|
2594
|
+
path: Literal["/cleanup"],
|
|
2497
2595
|
*,
|
|
2498
|
-
params:
|
|
2596
|
+
params: None = None,
|
|
2499
2597
|
parse_as: Literal["json"] = "json",
|
|
2500
2598
|
headers: HeaderTypes | None = None,
|
|
2501
2599
|
cookies: CookieTypes | None = None,
|
|
@@ -2503,15 +2601,16 @@ class OrcaClient(Client):
|
|
|
2503
2601
|
follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2504
2602
|
timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2505
2603
|
extensions: RequestExtensions | None = None,
|
|
2506
|
-
) ->
|
|
2604
|
+
) -> CleanupResponse:
|
|
2605
|
+
"""Cleanup orphaned milvus collections and blobs"""
|
|
2507
2606
|
pass
|
|
2508
2607
|
|
|
2509
2608
|
@overload
|
|
2510
|
-
def
|
|
2609
|
+
def DELETE(
|
|
2511
2610
|
self,
|
|
2512
|
-
path: Literal["/
|
|
2611
|
+
path: Literal["/auth/api_key/{name_or_id}"],
|
|
2513
2612
|
*,
|
|
2514
|
-
params:
|
|
2613
|
+
params: DeleteAuthApiKeyByNameOrIdParams,
|
|
2515
2614
|
parse_as: Literal["json"] = "json",
|
|
2516
2615
|
headers: HeaderTypes | None = None,
|
|
2517
2616
|
cookies: CookieTypes | None = None,
|
|
@@ -2519,15 +2618,16 @@ class OrcaClient(Client):
|
|
|
2519
2618
|
follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2520
2619
|
timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2521
2620
|
extensions: RequestExtensions | None = None,
|
|
2522
|
-
) ->
|
|
2621
|
+
) -> None:
|
|
2622
|
+
"""Delete an API key by name or ID."""
|
|
2523
2623
|
pass
|
|
2524
2624
|
|
|
2525
2625
|
@overload
|
|
2526
|
-
def
|
|
2626
|
+
def DELETE(
|
|
2527
2627
|
self,
|
|
2528
|
-
path: Literal["/
|
|
2628
|
+
path: Literal["/auth/org"],
|
|
2529
2629
|
*,
|
|
2530
|
-
params:
|
|
2630
|
+
params: None = None,
|
|
2531
2631
|
parse_as: Literal["json"] = "json",
|
|
2532
2632
|
headers: HeaderTypes | None = None,
|
|
2533
2633
|
cookies: CookieTypes | None = None,
|
|
@@ -2535,15 +2635,16 @@ class OrcaClient(Client):
|
|
|
2535
2635
|
follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2536
2636
|
timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2537
2637
|
extensions: RequestExtensions | None = None,
|
|
2538
|
-
) ->
|
|
2638
|
+
) -> None:
|
|
2639
|
+
"""Deletes the org and all associated resources"""
|
|
2539
2640
|
pass
|
|
2540
2641
|
|
|
2541
2642
|
@overload
|
|
2542
|
-
def
|
|
2643
|
+
def DELETE(
|
|
2543
2644
|
self,
|
|
2544
|
-
path: Literal["/
|
|
2645
|
+
path: Literal["/memoryset/{name_or_id}"],
|
|
2545
2646
|
*,
|
|
2546
|
-
params:
|
|
2647
|
+
params: DeleteMemorysetByNameOrIdParams,
|
|
2547
2648
|
parse_as: Literal["json"] = "json",
|
|
2548
2649
|
headers: HeaderTypes | None = None,
|
|
2549
2650
|
cookies: CookieTypes | None = None,
|
|
@@ -2551,15 +2652,15 @@ class OrcaClient(Client):
|
|
|
2551
2652
|
follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2552
2653
|
timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2553
2654
|
extensions: RequestExtensions | None = None,
|
|
2554
|
-
) ->
|
|
2655
|
+
) -> None:
|
|
2555
2656
|
pass
|
|
2556
2657
|
|
|
2557
2658
|
@overload
|
|
2558
|
-
def
|
|
2659
|
+
def DELETE(
|
|
2559
2660
|
self,
|
|
2560
|
-
path: Literal["/
|
|
2661
|
+
path: Literal["/memoryset/{name_or_id}/memory/{memory_id}"],
|
|
2561
2662
|
*,
|
|
2562
|
-
params:
|
|
2663
|
+
params: DeleteMemorysetByNameOrIdMemoryByMemoryIdParams,
|
|
2563
2664
|
parse_as: Literal["json"] = "json",
|
|
2564
2665
|
headers: HeaderTypes | None = None,
|
|
2565
2666
|
cookies: CookieTypes | None = None,
|
|
@@ -2567,15 +2668,15 @@ class OrcaClient(Client):
|
|
|
2567
2668
|
follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2568
2669
|
timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2569
2670
|
extensions: RequestExtensions | None = None,
|
|
2570
|
-
) ->
|
|
2671
|
+
) -> None:
|
|
2571
2672
|
pass
|
|
2572
2673
|
|
|
2573
2674
|
@overload
|
|
2574
|
-
def
|
|
2675
|
+
def DELETE(
|
|
2575
2676
|
self,
|
|
2576
|
-
path: Literal["/
|
|
2677
|
+
path: Literal["/finetuned_embedding_model/{name_or_id}"],
|
|
2577
2678
|
*,
|
|
2578
|
-
params:
|
|
2679
|
+
params: DeleteFinetunedEmbeddingModelByNameOrIdParams,
|
|
2579
2680
|
parse_as: Literal["json"] = "json",
|
|
2580
2681
|
headers: HeaderTypes | None = None,
|
|
2581
2682
|
cookies: CookieTypes | None = None,
|
|
@@ -2583,15 +2684,16 @@ class OrcaClient(Client):
|
|
|
2583
2684
|
follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2584
2685
|
timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2585
2686
|
extensions: RequestExtensions | None = None,
|
|
2586
|
-
) ->
|
|
2687
|
+
) -> None:
|
|
2688
|
+
"""Delete a finetuned embedding model by name or ID."""
|
|
2587
2689
|
pass
|
|
2588
2690
|
|
|
2589
2691
|
@overload
|
|
2590
|
-
def
|
|
2692
|
+
def DELETE(
|
|
2591
2693
|
self,
|
|
2592
|
-
path: Literal["/
|
|
2694
|
+
path: Literal["/datasource/{name_or_id}"],
|
|
2593
2695
|
*,
|
|
2594
|
-
params:
|
|
2696
|
+
params: DeleteDatasourceByNameOrIdParams,
|
|
2595
2697
|
parse_as: Literal["json"] = "json",
|
|
2596
2698
|
headers: HeaderTypes | None = None,
|
|
2597
2699
|
cookies: CookieTypes | None = None,
|
|
@@ -2599,33 +2701,32 @@ class OrcaClient(Client):
|
|
|
2599
2701
|
follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2600
2702
|
timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2601
2703
|
extensions: RequestExtensions | None = None,
|
|
2602
|
-
) ->
|
|
2603
|
-
"""
|
|
2704
|
+
) -> None:
|
|
2705
|
+
"""Delete a datasource by name or ID."""
|
|
2604
2706
|
pass
|
|
2605
2707
|
|
|
2606
2708
|
@overload
|
|
2607
|
-
def
|
|
2709
|
+
def DELETE(
|
|
2608
2710
|
self,
|
|
2609
|
-
path: Literal["/
|
|
2711
|
+
path: Literal["/classification_model/{name_or_id}"],
|
|
2610
2712
|
*,
|
|
2611
|
-
params:
|
|
2612
|
-
parse_as: Literal["
|
|
2713
|
+
params: DeleteClassificationModelByNameOrIdParams,
|
|
2714
|
+
parse_as: Literal["json"] = "json",
|
|
2613
2715
|
headers: HeaderTypes | None = None,
|
|
2614
2716
|
cookies: CookieTypes | None = None,
|
|
2615
2717
|
auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2616
2718
|
follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2617
2719
|
timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2618
2720
|
extensions: RequestExtensions | None = None,
|
|
2619
|
-
) ->
|
|
2620
|
-
"""Get explanation for a prediction, optionally streaming the response."""
|
|
2721
|
+
) -> None:
|
|
2621
2722
|
pass
|
|
2622
2723
|
|
|
2623
2724
|
@overload
|
|
2624
|
-
def
|
|
2725
|
+
def DELETE(
|
|
2625
2726
|
self,
|
|
2626
|
-
path: Literal["/
|
|
2727
|
+
path: Literal["/regression_model/{name_or_id}"],
|
|
2627
2728
|
*,
|
|
2628
|
-
params:
|
|
2729
|
+
params: DeleteRegressionModelByNameOrIdParams,
|
|
2629
2730
|
parse_as: Literal["json"] = "json",
|
|
2630
2731
|
headers: HeaderTypes | None = None,
|
|
2631
2732
|
cookies: CookieTypes | None = None,
|
|
@@ -2633,16 +2734,15 @@ class OrcaClient(Client):
|
|
|
2633
2734
|
follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2634
2735
|
timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2635
2736
|
extensions: RequestExtensions | None = None,
|
|
2636
|
-
) ->
|
|
2637
|
-
"""Get action recommendation for improving a specific prediction."""
|
|
2737
|
+
) -> None:
|
|
2638
2738
|
pass
|
|
2639
2739
|
|
|
2640
2740
|
@overload
|
|
2641
|
-
def
|
|
2741
|
+
def DELETE(
|
|
2642
2742
|
self,
|
|
2643
|
-
path: Literal["/
|
|
2743
|
+
path: Literal["/classification_model/{model_name_or_id}/evaluation/{job_id}"],
|
|
2644
2744
|
*,
|
|
2645
|
-
params:
|
|
2745
|
+
params: DeleteClassificationModelByModelNameOrIdEvaluationByJobIdParams,
|
|
2646
2746
|
parse_as: Literal["json"] = "json",
|
|
2647
2747
|
headers: HeaderTypes | None = None,
|
|
2648
2748
|
cookies: CookieTypes | None = None,
|
|
@@ -2650,21 +2750,15 @@ class OrcaClient(Client):
|
|
|
2650
2750
|
follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2651
2751
|
timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2652
2752
|
extensions: RequestExtensions | None = None,
|
|
2653
|
-
) ->
|
|
2654
|
-
"""
|
|
2655
|
-
Generate synthetic memory suggestions to improve a specific prediction.
|
|
2656
|
-
|
|
2657
|
-
The returned suggestions have labels as string representations of integer indices
|
|
2658
|
-
corresponding to the memoryset's label_names.
|
|
2659
|
-
"""
|
|
2753
|
+
) -> None:
|
|
2660
2754
|
pass
|
|
2661
2755
|
|
|
2662
2756
|
@overload
|
|
2663
|
-
def
|
|
2757
|
+
def DELETE(
|
|
2664
2758
|
self,
|
|
2665
|
-
path: Literal["/
|
|
2759
|
+
path: Literal["/regression_model/{model_name_or_id}/evaluation/{job_id}"],
|
|
2666
2760
|
*,
|
|
2667
|
-
params:
|
|
2761
|
+
params: DeleteRegressionModelByModelNameOrIdEvaluationByJobIdParams,
|
|
2668
2762
|
parse_as: Literal["json"] = "json",
|
|
2669
2763
|
headers: HeaderTypes | None = None,
|
|
2670
2764
|
cookies: CookieTypes | None = None,
|
|
@@ -2672,16 +2766,15 @@ class OrcaClient(Client):
|
|
|
2672
2766
|
follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2673
2767
|
timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2674
2768
|
extensions: RequestExtensions | None = None,
|
|
2675
|
-
) ->
|
|
2676
|
-
"""List all feedback categories for the organization."""
|
|
2769
|
+
) -> None:
|
|
2677
2770
|
pass
|
|
2678
2771
|
|
|
2679
2772
|
@overload
|
|
2680
|
-
def
|
|
2773
|
+
def DELETE(
|
|
2681
2774
|
self,
|
|
2682
|
-
path: Literal["/
|
|
2775
|
+
path: Literal["/job/{job_id}/abort"],
|
|
2683
2776
|
*,
|
|
2684
|
-
params:
|
|
2777
|
+
params: DeleteJobByJobIdAbortParams,
|
|
2685
2778
|
parse_as: Literal["json"] = "json",
|
|
2686
2779
|
headers: HeaderTypes | None = None,
|
|
2687
2780
|
cookies: CookieTypes | None = None,
|
|
@@ -2689,16 +2782,15 @@ class OrcaClient(Client):
|
|
|
2689
2782
|
follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2690
2783
|
timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2691
2784
|
extensions: RequestExtensions | None = None,
|
|
2692
|
-
) ->
|
|
2693
|
-
"""Get a feedback category by name or ID."""
|
|
2785
|
+
) -> None:
|
|
2694
2786
|
pass
|
|
2695
2787
|
|
|
2696
2788
|
@overload
|
|
2697
|
-
def
|
|
2789
|
+
def DELETE(
|
|
2698
2790
|
self,
|
|
2699
|
-
path: Literal["/
|
|
2791
|
+
path: Literal["/telemetry/feedback_category/{name_or_id}"],
|
|
2700
2792
|
*,
|
|
2701
|
-
params:
|
|
2793
|
+
params: DeleteTelemetryFeedbackCategoryByNameOrIdParams,
|
|
2702
2794
|
parse_as: Literal["json"] = "json",
|
|
2703
2795
|
headers: HeaderTypes | None = None,
|
|
2704
2796
|
cookies: CookieTypes | None = None,
|
|
@@ -2706,11 +2798,11 @@ class OrcaClient(Client):
|
|
|
2706
2798
|
follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2707
2799
|
timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2708
2800
|
extensions: RequestExtensions | None = None,
|
|
2709
|
-
) ->
|
|
2710
|
-
"""
|
|
2801
|
+
) -> None:
|
|
2802
|
+
"""Delete a feedback category and all associated feedback records."""
|
|
2711
2803
|
pass
|
|
2712
2804
|
|
|
2713
|
-
def
|
|
2805
|
+
def DELETE(
|
|
2714
2806
|
self,
|
|
2715
2807
|
path: str,
|
|
2716
2808
|
*,
|
|
@@ -2724,7 +2816,7 @@ class OrcaClient(Client):
|
|
|
2724
2816
|
extensions: RequestExtensions | None = None,
|
|
2725
2817
|
) -> Any:
|
|
2726
2818
|
path_params, query_params = self._parse_params(params or {}, path)
|
|
2727
|
-
res = self.
|
|
2819
|
+
res = self.delete(
|
|
2728
2820
|
path.format(**path_params),
|
|
2729
2821
|
params=query_params,
|
|
2730
2822
|
headers=headers,
|
|
@@ -2733,7 +2825,8 @@ class OrcaClient(Client):
|
|
|
2733
2825
|
follow_redirects=follow_redirects,
|
|
2734
2826
|
timeout=timeout,
|
|
2735
2827
|
extensions=extensions,
|
|
2736
|
-
)
|
|
2828
|
+
)
|
|
2829
|
+
res.raise_for_status()
|
|
2737
2830
|
return (
|
|
2738
2831
|
None
|
|
2739
2832
|
if res.status_code == 204
|
|
@@ -2842,6 +2935,26 @@ class OrcaClient(Client):
|
|
|
2842
2935
|
) -> None:
|
|
2843
2936
|
pass
|
|
2844
2937
|
|
|
2938
|
+
@overload
|
|
2939
|
+
def POST(
|
|
2940
|
+
self,
|
|
2941
|
+
path: Literal["/gpu/memoryset/{name_or_id}/lookup"],
|
|
2942
|
+
*,
|
|
2943
|
+
params: PostGpuMemorysetByNameOrIdLookupParams,
|
|
2944
|
+
json: LookupRequest,
|
|
2945
|
+
data: None = None,
|
|
2946
|
+
files: None = None,
|
|
2947
|
+
content: None = None,
|
|
2948
|
+
parse_as: Literal["json"] = "json",
|
|
2949
|
+
headers: HeaderTypes | None = None,
|
|
2950
|
+
cookies: CookieTypes | None = None,
|
|
2951
|
+
auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2952
|
+
follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2953
|
+
timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2954
|
+
extensions: RequestExtensions | None = None,
|
|
2955
|
+
) -> list[list[LabeledMemoryLookup | ScoredMemoryLookup]]:
|
|
2956
|
+
pass
|
|
2957
|
+
|
|
2845
2958
|
@overload
|
|
2846
2959
|
def POST(
|
|
2847
2960
|
self,
|
|
@@ -2859,16 +2972,76 @@ class OrcaClient(Client):
|
|
|
2859
2972
|
follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2860
2973
|
timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2861
2974
|
extensions: RequestExtensions | None = None,
|
|
2862
|
-
) -> list[LabeledMemory] | list[ScoredMemory]:
|
|
2975
|
+
) -> list[LabeledMemory] | list[ScoredMemory]:
|
|
2976
|
+
pass
|
|
2977
|
+
|
|
2978
|
+
@overload
|
|
2979
|
+
def POST(
|
|
2980
|
+
self,
|
|
2981
|
+
path: Literal["/memoryset/{name_or_id}/memories"],
|
|
2982
|
+
*,
|
|
2983
|
+
params: PostMemorysetByNameOrIdMemoriesParams,
|
|
2984
|
+
json: ListMemoriesRequest | None = None,
|
|
2985
|
+
data: None = None,
|
|
2986
|
+
files: None = None,
|
|
2987
|
+
content: None = None,
|
|
2988
|
+
parse_as: Literal["json"] = "json",
|
|
2989
|
+
headers: HeaderTypes | None = None,
|
|
2990
|
+
cookies: CookieTypes | None = None,
|
|
2991
|
+
auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2992
|
+
follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2993
|
+
timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2994
|
+
extensions: RequestExtensions | None = None,
|
|
2995
|
+
) -> list[LabeledMemory] | list[ScoredMemory]:
|
|
2996
|
+
pass
|
|
2997
|
+
|
|
2998
|
+
@overload
|
|
2999
|
+
def POST(
|
|
3000
|
+
self,
|
|
3001
|
+
path: Literal["/memoryset/{name_or_id}/memories/delete"],
|
|
3002
|
+
*,
|
|
3003
|
+
params: PostMemorysetByNameOrIdMemoriesDeleteParams,
|
|
3004
|
+
json: DeleteMemoriesRequest,
|
|
3005
|
+
data: None = None,
|
|
3006
|
+
files: None = None,
|
|
3007
|
+
content: None = None,
|
|
3008
|
+
parse_as: Literal["json"] = "json",
|
|
3009
|
+
headers: HeaderTypes | None = None,
|
|
3010
|
+
cookies: CookieTypes | None = None,
|
|
3011
|
+
auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
3012
|
+
follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
3013
|
+
timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
3014
|
+
extensions: RequestExtensions | None = None,
|
|
3015
|
+
) -> None:
|
|
3016
|
+
pass
|
|
3017
|
+
|
|
3018
|
+
@overload
|
|
3019
|
+
def POST(
|
|
3020
|
+
self,
|
|
3021
|
+
path: Literal["/gpu/memoryset/{name_or_id}/memory"],
|
|
3022
|
+
*,
|
|
3023
|
+
params: PostGpuMemorysetByNameOrIdMemoryParams,
|
|
3024
|
+
json: PostGpuMemorysetByNameOrIdMemoryRequest,
|
|
3025
|
+
data: None = None,
|
|
3026
|
+
files: None = None,
|
|
3027
|
+
content: None = None,
|
|
3028
|
+
parse_as: Literal["json"] = "json",
|
|
3029
|
+
headers: HeaderTypes | None = None,
|
|
3030
|
+
cookies: CookieTypes | None = None,
|
|
3031
|
+
auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
3032
|
+
follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
3033
|
+
timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
3034
|
+
extensions: RequestExtensions | None = None,
|
|
3035
|
+
) -> list[str]:
|
|
2863
3036
|
pass
|
|
2864
3037
|
|
|
2865
3038
|
@overload
|
|
2866
3039
|
def POST(
|
|
2867
3040
|
self,
|
|
2868
|
-
path: Literal["/memoryset/{name_or_id}/
|
|
3041
|
+
path: Literal["/memoryset/{name_or_id}/analysis"],
|
|
2869
3042
|
*,
|
|
2870
|
-
params:
|
|
2871
|
-
json:
|
|
3043
|
+
params: PostMemorysetByNameOrIdAnalysisParams,
|
|
3044
|
+
json: MemorysetAnalysisRequest,
|
|
2872
3045
|
data: None = None,
|
|
2873
3046
|
files: None = None,
|
|
2874
3047
|
content: None = None,
|
|
@@ -2879,16 +3052,16 @@ class OrcaClient(Client):
|
|
|
2879
3052
|
follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2880
3053
|
timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2881
3054
|
extensions: RequestExtensions | None = None,
|
|
2882
|
-
) ->
|
|
3055
|
+
) -> MemorysetAnalysisResponse:
|
|
2883
3056
|
pass
|
|
2884
3057
|
|
|
2885
3058
|
@overload
|
|
2886
3059
|
def POST(
|
|
2887
3060
|
self,
|
|
2888
|
-
path: Literal["/memoryset/{name_or_id}/
|
|
3061
|
+
path: Literal["/memoryset/{name_or_id}/memory/{memory_id}/cascading_edits"],
|
|
2889
3062
|
*,
|
|
2890
|
-
params:
|
|
2891
|
-
json:
|
|
3063
|
+
params: PostMemorysetByNameOrIdMemoryByMemoryIdCascadingEditsParams,
|
|
3064
|
+
json: CascadeEditSuggestionsRequest,
|
|
2892
3065
|
data: None = None,
|
|
2893
3066
|
files: None = None,
|
|
2894
3067
|
content: None = None,
|
|
@@ -2899,16 +3072,16 @@ class OrcaClient(Client):
|
|
|
2899
3072
|
follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2900
3073
|
timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2901
3074
|
extensions: RequestExtensions | None = None,
|
|
2902
|
-
) ->
|
|
3075
|
+
) -> list[CascadingEditSuggestion]:
|
|
2903
3076
|
pass
|
|
2904
3077
|
|
|
2905
3078
|
@overload
|
|
2906
3079
|
def POST(
|
|
2907
3080
|
self,
|
|
2908
|
-
path: Literal["/
|
|
3081
|
+
path: Literal["/finetuned_embedding_model"],
|
|
2909
3082
|
*,
|
|
2910
|
-
params:
|
|
2911
|
-
json:
|
|
3083
|
+
params: None = None,
|
|
3084
|
+
json: FinetuneEmbeddingModelRequest,
|
|
2912
3085
|
data: None = None,
|
|
2913
3086
|
files: None = None,
|
|
2914
3087
|
content: None = None,
|
|
@@ -2919,16 +3092,17 @@ class OrcaClient(Client):
|
|
|
2919
3092
|
follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2920
3093
|
timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2921
3094
|
extensions: RequestExtensions | None = None,
|
|
2922
|
-
) ->
|
|
3095
|
+
) -> FinetunedEmbeddingModelMetadata:
|
|
3096
|
+
"""Create a finetuned embedding model."""
|
|
2923
3097
|
pass
|
|
2924
3098
|
|
|
2925
3099
|
@overload
|
|
2926
3100
|
def POST(
|
|
2927
3101
|
self,
|
|
2928
|
-
path: Literal["/
|
|
3102
|
+
path: Literal["/gpu/finetuned_embedding_model/{name_or_id}/embedding"],
|
|
2929
3103
|
*,
|
|
2930
|
-
params:
|
|
2931
|
-
json:
|
|
3104
|
+
params: PostGpuFinetunedEmbeddingModelByNameOrIdEmbeddingParams,
|
|
3105
|
+
json: EmbedRequest,
|
|
2932
3106
|
data: None = None,
|
|
2933
3107
|
files: None = None,
|
|
2934
3108
|
content: None = None,
|
|
@@ -2939,16 +3113,17 @@ class OrcaClient(Client):
|
|
|
2939
3113
|
follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2940
3114
|
timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2941
3115
|
extensions: RequestExtensions | None = None,
|
|
2942
|
-
) -> list[
|
|
3116
|
+
) -> list[list[float]]:
|
|
3117
|
+
"""Embed values using a finetuned embedding model."""
|
|
2943
3118
|
pass
|
|
2944
3119
|
|
|
2945
3120
|
@overload
|
|
2946
3121
|
def POST(
|
|
2947
3122
|
self,
|
|
2948
|
-
path: Literal["/
|
|
3123
|
+
path: Literal["/gpu/pretrained_embedding_model/{model_name}/embedding"],
|
|
2949
3124
|
*,
|
|
2950
|
-
params:
|
|
2951
|
-
json:
|
|
3125
|
+
params: PostGpuPretrainedEmbeddingModelByModelNameEmbeddingParams,
|
|
3126
|
+
json: EmbedRequest,
|
|
2952
3127
|
data: None = None,
|
|
2953
3128
|
files: None = None,
|
|
2954
3129
|
content: None = None,
|
|
@@ -2959,8 +3134,8 @@ class OrcaClient(Client):
|
|
|
2959
3134
|
follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2960
3135
|
timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
2961
3136
|
extensions: RequestExtensions | None = None,
|
|
2962
|
-
) ->
|
|
2963
|
-
"""
|
|
3137
|
+
) -> list[list[float]]:
|
|
3138
|
+
"""Embed values using a pretrained embedding model."""
|
|
2964
3139
|
pass
|
|
2965
3140
|
|
|
2966
3141
|
@overload
|
|
@@ -3062,10 +3237,10 @@ class OrcaClient(Client):
|
|
|
3062
3237
|
@overload
|
|
3063
3238
|
def POST(
|
|
3064
3239
|
self,
|
|
3065
|
-
path: Literal["/datasource/{name_or_id}/
|
|
3240
|
+
path: Literal["/datasource/{name_or_id}/rows"],
|
|
3066
3241
|
*,
|
|
3067
|
-
params:
|
|
3068
|
-
json:
|
|
3242
|
+
params: PostDatasourceByNameOrIdRowsParams,
|
|
3243
|
+
json: GetDatasourceRowsRequest,
|
|
3069
3244
|
data: None = None,
|
|
3070
3245
|
files: None = None,
|
|
3071
3246
|
content: None = None,
|
|
@@ -3076,17 +3251,17 @@ class OrcaClient(Client):
|
|
|
3076
3251
|
follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
3077
3252
|
timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
3078
3253
|
extensions: RequestExtensions | None = None,
|
|
3079
|
-
) ->
|
|
3080
|
-
"""
|
|
3254
|
+
) -> list[dict[str, Any]]:
|
|
3255
|
+
"""Get rows from a specific datasource with optional filtering."""
|
|
3081
3256
|
pass
|
|
3082
3257
|
|
|
3083
3258
|
@overload
|
|
3084
3259
|
def POST(
|
|
3085
3260
|
self,
|
|
3086
|
-
path: Literal["/
|
|
3261
|
+
path: Literal["/datasource/{name_or_id}/rows/count"],
|
|
3087
3262
|
*,
|
|
3088
|
-
params:
|
|
3089
|
-
json:
|
|
3263
|
+
params: PostDatasourceByNameOrIdRowsCountParams,
|
|
3264
|
+
json: GetDatasourceRowCountRequest,
|
|
3090
3265
|
data: None = None,
|
|
3091
3266
|
files: None = None,
|
|
3092
3267
|
content: None = None,
|
|
@@ -3097,16 +3272,17 @@ class OrcaClient(Client):
|
|
|
3097
3272
|
follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
3098
3273
|
timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
3099
3274
|
extensions: RequestExtensions | None = None,
|
|
3100
|
-
) ->
|
|
3275
|
+
) -> int:
|
|
3276
|
+
"""Get row count from a specific datasource with optional filtering."""
|
|
3101
3277
|
pass
|
|
3102
3278
|
|
|
3103
3279
|
@overload
|
|
3104
3280
|
def POST(
|
|
3105
3281
|
self,
|
|
3106
|
-
path: Literal["/classification_model
|
|
3282
|
+
path: Literal["/classification_model"],
|
|
3107
3283
|
*,
|
|
3108
|
-
params:
|
|
3109
|
-
json:
|
|
3284
|
+
params: None = None,
|
|
3285
|
+
json: CreateClassificationModelRequest,
|
|
3110
3286
|
data: None = None,
|
|
3111
3287
|
files: None = None,
|
|
3112
3288
|
content: None = None,
|
|
@@ -3117,7 +3293,7 @@ class OrcaClient(Client):
|
|
|
3117
3293
|
follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
3118
3294
|
timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
3119
3295
|
extensions: RequestExtensions | None = None,
|
|
3120
|
-
) ->
|
|
3296
|
+
) -> ClassificationModelMetadata:
|
|
3121
3297
|
pass
|
|
3122
3298
|
|
|
3123
3299
|
@overload
|
|
@@ -3143,10 +3319,10 @@ class OrcaClient(Client):
|
|
|
3143
3319
|
@overload
|
|
3144
3320
|
def POST(
|
|
3145
3321
|
self,
|
|
3146
|
-
path: Literal["/
|
|
3322
|
+
path: Literal["/gpu/classification_model/{name_or_id}/prediction"],
|
|
3147
3323
|
*,
|
|
3148
|
-
params:
|
|
3149
|
-
json:
|
|
3324
|
+
params: PostGpuClassificationModelByNameOrIdPredictionParams,
|
|
3325
|
+
json: ClassificationPredictionRequest,
|
|
3150
3326
|
data: None = None,
|
|
3151
3327
|
files: None = None,
|
|
3152
3328
|
content: None = None,
|
|
@@ -3157,16 +3333,16 @@ class OrcaClient(Client):
|
|
|
3157
3333
|
follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
3158
3334
|
timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
3159
3335
|
extensions: RequestExtensions | None = None,
|
|
3160
|
-
) ->
|
|
3336
|
+
) -> list[BaseLabelPredictionResult]:
|
|
3161
3337
|
pass
|
|
3162
3338
|
|
|
3163
3339
|
@overload
|
|
3164
3340
|
def POST(
|
|
3165
3341
|
self,
|
|
3166
|
-
path: Literal["/
|
|
3342
|
+
path: Literal["/classification_model/{name_or_id}/prediction"],
|
|
3167
3343
|
*,
|
|
3168
|
-
params:
|
|
3169
|
-
json:
|
|
3344
|
+
params: PostClassificationModelByNameOrIdPredictionParams,
|
|
3345
|
+
json: ClassificationPredictionRequest,
|
|
3170
3346
|
data: None = None,
|
|
3171
3347
|
files: None = None,
|
|
3172
3348
|
content: None = None,
|
|
@@ -3177,17 +3353,16 @@ class OrcaClient(Client):
|
|
|
3177
3353
|
follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
3178
3354
|
timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
3179
3355
|
extensions: RequestExtensions | None = None,
|
|
3180
|
-
) -> list[
|
|
3181
|
-
"""List predictions with optional filtering and sorting."""
|
|
3356
|
+
) -> list[BaseLabelPredictionResult]:
|
|
3182
3357
|
pass
|
|
3183
3358
|
|
|
3184
3359
|
@overload
|
|
3185
3360
|
def POST(
|
|
3186
3361
|
self,
|
|
3187
|
-
path: Literal["/
|
|
3362
|
+
path: Literal["/gpu/regression_model/{name_or_id}/prediction"],
|
|
3188
3363
|
*,
|
|
3189
|
-
params:
|
|
3190
|
-
json:
|
|
3364
|
+
params: PostGpuRegressionModelByNameOrIdPredictionParams,
|
|
3365
|
+
json: RegressionPredictionRequest,
|
|
3191
3366
|
data: None = None,
|
|
3192
3367
|
files: None = None,
|
|
3193
3368
|
content: None = None,
|
|
@@ -3198,17 +3373,16 @@ class OrcaClient(Client):
|
|
|
3198
3373
|
follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
3199
3374
|
timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
3200
3375
|
extensions: RequestExtensions | None = None,
|
|
3201
|
-
) ->
|
|
3202
|
-
"""Count predictions with optional filtering."""
|
|
3376
|
+
) -> list[BaseScorePredictionResult]:
|
|
3203
3377
|
pass
|
|
3204
3378
|
|
|
3205
3379
|
@overload
|
|
3206
3380
|
def POST(
|
|
3207
3381
|
self,
|
|
3208
|
-
path: Literal["/
|
|
3382
|
+
path: Literal["/regression_model/{name_or_id}/prediction"],
|
|
3209
3383
|
*,
|
|
3210
|
-
params:
|
|
3211
|
-
json:
|
|
3384
|
+
params: PostRegressionModelByNameOrIdPredictionParams,
|
|
3385
|
+
json: RegressionPredictionRequest,
|
|
3212
3386
|
data: None = None,
|
|
3213
3387
|
files: None = None,
|
|
3214
3388
|
content: None = None,
|
|
@@ -3219,21 +3393,16 @@ class OrcaClient(Client):
|
|
|
3219
3393
|
follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
3220
3394
|
timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
3221
3395
|
extensions: RequestExtensions | None = None,
|
|
3222
|
-
) ->
|
|
3223
|
-
"""
|
|
3224
|
-
List memories with feedback metrics.
|
|
3225
|
-
**Note**: This endpoint will ONLY return memories that have been used in a prediction.
|
|
3226
|
-
If you want to query ALL memories WITHOUT feedback metrics, use the query_memoryset endpoint.
|
|
3227
|
-
"""
|
|
3396
|
+
) -> list[BaseScorePredictionResult]:
|
|
3228
3397
|
pass
|
|
3229
3398
|
|
|
3230
3399
|
@overload
|
|
3231
3400
|
def POST(
|
|
3232
3401
|
self,
|
|
3233
|
-
path: Literal["/
|
|
3402
|
+
path: Literal["/classification_model/{model_name_or_id}/evaluation"],
|
|
3234
3403
|
*,
|
|
3235
|
-
params:
|
|
3236
|
-
json:
|
|
3404
|
+
params: PostClassificationModelByModelNameOrIdEvaluationParams,
|
|
3405
|
+
json: ClassificationEvaluationRequest,
|
|
3237
3406
|
data: None = None,
|
|
3238
3407
|
files: None = None,
|
|
3239
3408
|
content: None = None,
|
|
@@ -3244,30 +3413,16 @@ class OrcaClient(Client):
|
|
|
3244
3413
|
follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
3245
3414
|
timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
3246
3415
|
extensions: RequestExtensions | None = None,
|
|
3247
|
-
) ->
|
|
3248
|
-
"""
|
|
3249
|
-
Bootstrap a classification model by creating a memoryset with generated memories and a classification model.
|
|
3250
|
-
|
|
3251
|
-
This endpoint uses the bootstrap_classification_model agent to generate:
|
|
3252
|
-
1. Memoryset configuration with appropriate settings
|
|
3253
|
-
2. Model configuration with optimal parameters
|
|
3254
|
-
3. High-quality training memories for each label
|
|
3255
|
-
|
|
3256
|
-
The process involves:
|
|
3257
|
-
1. Calling the agent to generate configurations and memories
|
|
3258
|
-
2. Creating a datasource from the generated memories
|
|
3259
|
-
3. Creating a memoryset from the datasource
|
|
3260
|
-
4. Creating a classification model from the memoryset
|
|
3261
|
-
"""
|
|
3416
|
+
) -> EvaluationResponse:
|
|
3262
3417
|
pass
|
|
3263
3418
|
|
|
3264
3419
|
@overload
|
|
3265
3420
|
def POST(
|
|
3266
3421
|
self,
|
|
3267
|
-
path: Literal["/
|
|
3422
|
+
path: Literal["/regression_model/{model_name_or_id}/evaluation"],
|
|
3268
3423
|
*,
|
|
3269
|
-
params:
|
|
3270
|
-
json:
|
|
3424
|
+
params: PostRegressionModelByModelNameOrIdEvaluationParams,
|
|
3425
|
+
json: RegressionEvaluationRequest,
|
|
3271
3426
|
data: None = None,
|
|
3272
3427
|
files: None = None,
|
|
3273
3428
|
content: None = None,
|
|
@@ -3278,16 +3433,16 @@ class OrcaClient(Client):
|
|
|
3278
3433
|
follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
3279
3434
|
timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
3280
3435
|
extensions: RequestExtensions | None = None,
|
|
3281
|
-
) ->
|
|
3436
|
+
) -> EvaluationResponse:
|
|
3282
3437
|
pass
|
|
3283
3438
|
|
|
3284
3439
|
@overload
|
|
3285
3440
|
def POST(
|
|
3286
3441
|
self,
|
|
3287
|
-
path: Literal["/
|
|
3442
|
+
path: Literal["/telemetry/prediction"],
|
|
3288
3443
|
*,
|
|
3289
|
-
params:
|
|
3290
|
-
json:
|
|
3444
|
+
params: None = None,
|
|
3445
|
+
json: ListPredictionsRequest | None = None,
|
|
3291
3446
|
data: None = None,
|
|
3292
3447
|
files: None = None,
|
|
3293
3448
|
content: None = None,
|
|
@@ -3298,16 +3453,17 @@ class OrcaClient(Client):
|
|
|
3298
3453
|
follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
3299
3454
|
timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
3300
3455
|
extensions: RequestExtensions | None = None,
|
|
3301
|
-
) -> list[
|
|
3456
|
+
) -> list[LabelPredictionWithMemoriesAndFeedback | ScorePredictionWithMemoriesAndFeedback]:
|
|
3457
|
+
"""List predictions with optional filtering and sorting."""
|
|
3302
3458
|
pass
|
|
3303
3459
|
|
|
3304
3460
|
@overload
|
|
3305
3461
|
def POST(
|
|
3306
3462
|
self,
|
|
3307
|
-
path: Literal["/
|
|
3463
|
+
path: Literal["/telemetry/prediction/count"],
|
|
3308
3464
|
*,
|
|
3309
|
-
params:
|
|
3310
|
-
json:
|
|
3465
|
+
params: None = None,
|
|
3466
|
+
json: CountPredictionsRequest | None = None,
|
|
3311
3467
|
data: None = None,
|
|
3312
3468
|
files: None = None,
|
|
3313
3469
|
content: None = None,
|
|
@@ -3318,16 +3474,17 @@ class OrcaClient(Client):
|
|
|
3318
3474
|
follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
3319
3475
|
timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
3320
3476
|
extensions: RequestExtensions | None = None,
|
|
3321
|
-
) ->
|
|
3477
|
+
) -> int:
|
|
3478
|
+
"""Count predictions with optional filtering."""
|
|
3322
3479
|
pass
|
|
3323
3480
|
|
|
3324
3481
|
@overload
|
|
3325
3482
|
def POST(
|
|
3326
3483
|
self,
|
|
3327
|
-
path: Literal["/
|
|
3484
|
+
path: Literal["/telemetry/memories"],
|
|
3328
3485
|
*,
|
|
3329
|
-
params:
|
|
3330
|
-
json:
|
|
3486
|
+
params: None = None,
|
|
3487
|
+
json: TelemetryMemoriesRequest,
|
|
3331
3488
|
data: None = None,
|
|
3332
3489
|
files: None = None,
|
|
3333
3490
|
content: None = None,
|
|
@@ -3338,16 +3495,21 @@ class OrcaClient(Client):
|
|
|
3338
3495
|
follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
3339
3496
|
timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
3340
3497
|
extensions: RequestExtensions | None = None,
|
|
3341
|
-
) ->
|
|
3498
|
+
) -> PaginatedUnionLabeledMemoryWithFeedbackMetricsScoredMemoryWithFeedbackMetrics:
|
|
3499
|
+
"""
|
|
3500
|
+
List memories with feedback metrics.
|
|
3501
|
+
**Note**: This endpoint will ONLY return memories that have been used in a prediction.
|
|
3502
|
+
If you want to query ALL memories WITHOUT feedback metrics, use the query_memoryset endpoint.
|
|
3503
|
+
"""
|
|
3342
3504
|
pass
|
|
3343
3505
|
|
|
3344
3506
|
@overload
|
|
3345
3507
|
def POST(
|
|
3346
3508
|
self,
|
|
3347
|
-
path: Literal["/
|
|
3509
|
+
path: Literal["/agents/bootstrap_classification_model"],
|
|
3348
3510
|
*,
|
|
3349
|
-
params:
|
|
3350
|
-
json:
|
|
3511
|
+
params: None = None,
|
|
3512
|
+
json: BootstrapClassificationModelRequest,
|
|
3351
3513
|
data: None = None,
|
|
3352
3514
|
files: None = None,
|
|
3353
3515
|
content: None = None,
|
|
@@ -3358,29 +3520,21 @@ class OrcaClient(Client):
|
|
|
3358
3520
|
follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
3359
3521
|
timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
3360
3522
|
extensions: RequestExtensions | None = None,
|
|
3361
|
-
) ->
|
|
3362
|
-
"""
|
|
3363
|
-
|
|
3523
|
+
) -> BootstrapClassificationModelResponse:
|
|
3524
|
+
"""
|
|
3525
|
+
Bootstrap a classification model by creating a memoryset with generated memories and a classification model.
|
|
3364
3526
|
|
|
3365
|
-
|
|
3366
|
-
|
|
3367
|
-
|
|
3368
|
-
|
|
3369
|
-
|
|
3370
|
-
|
|
3371
|
-
|
|
3372
|
-
|
|
3373
|
-
|
|
3374
|
-
|
|
3375
|
-
|
|
3376
|
-
headers: HeaderTypes | None = None,
|
|
3377
|
-
cookies: CookieTypes | None = None,
|
|
3378
|
-
auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
3379
|
-
follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
3380
|
-
timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
3381
|
-
extensions: RequestExtensions | None = None,
|
|
3382
|
-
) -> list[list[float]]:
|
|
3383
|
-
"""Embed values using a pretrained embedding model."""
|
|
3527
|
+
This endpoint uses the bootstrap_classification_model agent to generate:
|
|
3528
|
+
1. Memoryset configuration with appropriate settings
|
|
3529
|
+
2. Model configuration with optimal parameters
|
|
3530
|
+
3. High-quality training memories for each label
|
|
3531
|
+
|
|
3532
|
+
The process involves:
|
|
3533
|
+
1. Calling the agent to generate configurations and memories
|
|
3534
|
+
2. Creating a datasource from the generated memories
|
|
3535
|
+
3. Creating a memoryset from the datasource
|
|
3536
|
+
4. Creating a classification model from the memoryset
|
|
3537
|
+
"""
|
|
3384
3538
|
pass
|
|
3385
3539
|
|
|
3386
3540
|
def POST(
|
|
@@ -3414,7 +3568,8 @@ class OrcaClient(Client):
|
|
|
3414
3568
|
follow_redirects=follow_redirects,
|
|
3415
3569
|
timeout=timeout,
|
|
3416
3570
|
extensions=extensions,
|
|
3417
|
-
)
|
|
3571
|
+
)
|
|
3572
|
+
res.raise_for_status()
|
|
3418
3573
|
return (
|
|
3419
3574
|
None
|
|
3420
3575
|
if res.status_code == 204
|
|
@@ -3494,7 +3649,8 @@ class OrcaClient(Client):
|
|
|
3494
3649
|
follow_redirects=follow_redirects,
|
|
3495
3650
|
timeout=timeout,
|
|
3496
3651
|
extensions=extensions,
|
|
3497
|
-
)
|
|
3652
|
+
)
|
|
3653
|
+
res.raise_for_status()
|
|
3498
3654
|
return (
|
|
3499
3655
|
None
|
|
3500
3656
|
if res.status_code == 204
|
|
@@ -3524,10 +3680,10 @@ class OrcaClient(Client):
|
|
|
3524
3680
|
@overload
|
|
3525
3681
|
def PATCH(
|
|
3526
3682
|
self,
|
|
3527
|
-
path: Literal["/
|
|
3683
|
+
path: Literal["/gpu/memoryset/{name_or_id}/memory"],
|
|
3528
3684
|
*,
|
|
3529
|
-
params:
|
|
3530
|
-
json:
|
|
3685
|
+
params: PatchGpuMemorysetByNameOrIdMemoryParams,
|
|
3686
|
+
json: PatchGpuMemorysetByNameOrIdMemoryRequest,
|
|
3531
3687
|
data: None = None,
|
|
3532
3688
|
files: None = None,
|
|
3533
3689
|
content: None = None,
|
|
@@ -3538,16 +3694,16 @@ class OrcaClient(Client):
|
|
|
3538
3694
|
follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
3539
3695
|
timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
3540
3696
|
extensions: RequestExtensions | None = None,
|
|
3541
|
-
) ->
|
|
3697
|
+
) -> LabeledMemory | ScoredMemory:
|
|
3542
3698
|
pass
|
|
3543
3699
|
|
|
3544
3700
|
@overload
|
|
3545
3701
|
def PATCH(
|
|
3546
3702
|
self,
|
|
3547
|
-
path: Literal["/
|
|
3703
|
+
path: Literal["/gpu/memoryset/{name_or_id}/memories"],
|
|
3548
3704
|
*,
|
|
3549
|
-
params:
|
|
3550
|
-
json:
|
|
3705
|
+
params: PatchGpuMemorysetByNameOrIdMemoriesParams,
|
|
3706
|
+
json: PatchGpuMemorysetByNameOrIdMemoriesRequest,
|
|
3551
3707
|
data: None = None,
|
|
3552
3708
|
files: None = None,
|
|
3553
3709
|
content: None = None,
|
|
@@ -3558,16 +3714,16 @@ class OrcaClient(Client):
|
|
|
3558
3714
|
follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
3559
3715
|
timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
3560
3716
|
extensions: RequestExtensions | None = None,
|
|
3561
|
-
) ->
|
|
3717
|
+
) -> list[LabeledMemory] | list[ScoredMemory]:
|
|
3562
3718
|
pass
|
|
3563
3719
|
|
|
3564
3720
|
@overload
|
|
3565
3721
|
def PATCH(
|
|
3566
3722
|
self,
|
|
3567
|
-
path: Literal["/
|
|
3723
|
+
path: Literal["/classification_model/{name_or_id}"],
|
|
3568
3724
|
*,
|
|
3569
|
-
params:
|
|
3570
|
-
json:
|
|
3725
|
+
params: PatchClassificationModelByNameOrIdParams,
|
|
3726
|
+
json: PredictiveModelUpdate,
|
|
3571
3727
|
data: None = None,
|
|
3572
3728
|
files: None = None,
|
|
3573
3729
|
content: None = None,
|
|
@@ -3578,17 +3734,16 @@ class OrcaClient(Client):
|
|
|
3578
3734
|
follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
3579
3735
|
timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
3580
3736
|
extensions: RequestExtensions | None = None,
|
|
3581
|
-
) ->
|
|
3582
|
-
"""Update a prediction with new expected values, tags, or memory ID."""
|
|
3737
|
+
) -> ClassificationModelMetadata:
|
|
3583
3738
|
pass
|
|
3584
3739
|
|
|
3585
3740
|
@overload
|
|
3586
3741
|
def PATCH(
|
|
3587
3742
|
self,
|
|
3588
|
-
path: Literal["/
|
|
3743
|
+
path: Literal["/regression_model/{name_or_id}"],
|
|
3589
3744
|
*,
|
|
3590
|
-
params:
|
|
3591
|
-
json:
|
|
3745
|
+
params: PatchRegressionModelByNameOrIdParams,
|
|
3746
|
+
json: PredictiveModelUpdate,
|
|
3592
3747
|
data: None = None,
|
|
3593
3748
|
files: None = None,
|
|
3594
3749
|
content: None = None,
|
|
@@ -3599,16 +3754,16 @@ class OrcaClient(Client):
|
|
|
3599
3754
|
follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
3600
3755
|
timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
3601
3756
|
extensions: RequestExtensions | None = None,
|
|
3602
|
-
) ->
|
|
3757
|
+
) -> RegressionModelMetadata:
|
|
3603
3758
|
pass
|
|
3604
3759
|
|
|
3605
3760
|
@overload
|
|
3606
3761
|
def PATCH(
|
|
3607
3762
|
self,
|
|
3608
|
-
path: Literal["/
|
|
3763
|
+
path: Literal["/telemetry/prediction/{prediction_id}"],
|
|
3609
3764
|
*,
|
|
3610
|
-
params:
|
|
3611
|
-
json:
|
|
3765
|
+
params: PatchTelemetryPredictionByPredictionIdParams,
|
|
3766
|
+
json: UpdatePredictionRequest,
|
|
3612
3767
|
data: None = None,
|
|
3613
3768
|
files: None = None,
|
|
3614
3769
|
content: None = None,
|
|
@@ -3619,7 +3774,8 @@ class OrcaClient(Client):
|
|
|
3619
3774
|
follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
3620
3775
|
timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
|
|
3621
3776
|
extensions: RequestExtensions | None = None,
|
|
3622
|
-
) ->
|
|
3777
|
+
) -> Any:
|
|
3778
|
+
"""Update a prediction with new expected values, tags, or memory ID."""
|
|
3623
3779
|
pass
|
|
3624
3780
|
|
|
3625
3781
|
def PATCH(
|
|
@@ -3653,95 +3809,126 @@ class OrcaClient(Client):
|
|
|
3653
3809
|
follow_redirects=follow_redirects,
|
|
3654
3810
|
timeout=timeout,
|
|
3655
3811
|
extensions=extensions,
|
|
3656
|
-
)
|
|
3812
|
+
)
|
|
3813
|
+
res.raise_for_status()
|
|
3657
3814
|
return (
|
|
3658
3815
|
None
|
|
3659
3816
|
if res.status_code == 204
|
|
3660
3817
|
else res.json() if parse_as == "json" else res.text if parse_as == "text" else res.content
|
|
3661
3818
|
)
|
|
3662
3819
|
|
|
3663
|
-
@
|
|
3664
|
-
def
|
|
3665
|
-
|
|
3666
|
-
|
|
3667
|
-
|
|
3668
|
-
|
|
3820
|
+
@staticmethod
|
|
3821
|
+
def _raise_error_for_response(response: Response) -> None:
|
|
3822
|
+
if response.status_code == 401:
|
|
3823
|
+
raise ValueError("Invalid API key")
|
|
3824
|
+
# elif response.status_code == 402:
|
|
3825
|
+
# res = cast(QuotaExceededErrorResponse, json.loads(response.read().decode(response.encoding or "utf-8")))
|
|
3826
|
+
# raise RuntimeError(
|
|
3827
|
+
# f"{res['quota_type'].replace('_', ' ').title()} limit reached ({res['current']}/{res['quota_limit']})"
|
|
3828
|
+
# )
|
|
3829
|
+
elif response.status_code == 403:
|
|
3830
|
+
raise PermissionError(json.loads(response.read().decode(response.encoding or "utf-8"))["reason"])
|
|
3831
|
+
elif response.status_code == 404:
|
|
3832
|
+
res = cast(NotFoundErrorResponse, json.loads(response.read().decode(response.encoding or "utf-8")))
|
|
3833
|
+
if res["resource"] is not None:
|
|
3834
|
+
raise LookupError(f"The {res['resource']} you are looking for does not exist")
|
|
3835
|
+
else:
|
|
3836
|
+
raise RuntimeError(f"Unknown API route: {response.url}")
|
|
3837
|
+
elif response.status_code == 405:
|
|
3838
|
+
raise RuntimeError(f"Unknown method {response.request.method} for API route: {response.url}")
|
|
3839
|
+
elif response.status_code == 409:
|
|
3840
|
+
res = cast(
|
|
3841
|
+
ConstraintViolationErrorResponse, json.loads(response.read().decode(response.encoding or "utf-8"))
|
|
3842
|
+
)
|
|
3843
|
+
raise RuntimeError(res["constraint"])
|
|
3844
|
+
elif response.status_code == 422:
|
|
3845
|
+
res = cast(InvalidInputErrorResponse, json.loads(response.read().decode(response.encoding or "utf-8")))
|
|
3846
|
+
issues = [f"{issue['loc'][-1]}: {issue['msg']}" for issue in res["validation_issues"]]
|
|
3847
|
+
raise ValueError("Invalid input:\n\t" + "\n\t".join(issues))
|
|
3848
|
+
elif response.status_code == 500:
|
|
3849
|
+
res = cast(InternalServerErrorResponse, json.loads(response.read().decode(response.encoding or "utf-8")))
|
|
3850
|
+
raise RuntimeError(f"Unexpected server error: {res['message']}")
|
|
3851
|
+
elif response.status_code == 503:
|
|
3852
|
+
raise RuntimeError("Orca API is currently unavailable, please try again later")
|
|
3853
|
+
elif response.status_code >= 400:
|
|
3854
|
+
raise RuntimeError(f"Unexpected status code: {response.status_code}")
|
|
3855
|
+
|
|
3856
|
+
@staticmethod
|
|
3857
|
+
def _instrument_request(request: Request) -> None:
|
|
3858
|
+
request.headers["X-Request-ID"] = str(uuid.uuid4())
|
|
3859
|
+
|
|
3860
|
+
def __init__(
|
|
3861
|
+
self,
|
|
3862
|
+
*,
|
|
3863
|
+
api_key: str | None = None,
|
|
3864
|
+
base_url: URL | str = "",
|
|
3865
|
+
headers: HeaderTypes | None = None,
|
|
3866
|
+
transport: BaseTransport | None = None,
|
|
3867
|
+
timeout: TimeoutTypes | None = None,
|
|
3868
|
+
limits: Limits | None = None,
|
|
3869
|
+
max_redirects: int = 20,
|
|
3870
|
+
event_hooks: None | (Mapping[str, list[Callable[..., Any]]]) = None,
|
|
3871
|
+
http1: bool = True,
|
|
3872
|
+
http2: bool = False,
|
|
3873
|
+
proxy: str | URL | Proxy | None = None,
|
|
3874
|
+
log_level: int = logging.WARNING,
|
|
3875
|
+
) -> None:
|
|
3876
|
+
"""
|
|
3877
|
+
Initialize an OrcaAPI httpx client
|
|
3669
3878
|
|
|
3670
|
-
|
|
3671
|
-
|
|
3672
|
-
|
|
3879
|
+
Params:
|
|
3880
|
+
api_key: API key to use for authentication, will default to ORCA_API_KEY if not set.
|
|
3881
|
+
base_url: URL of the OrcaAPI, will default to ORCA_API_URL or the cloud API URL if not set.
|
|
3882
|
+
"""
|
|
3883
|
+
logging.getLogger("httpx").setLevel(log_level)
|
|
3884
|
+
logging.getLogger("httpcore").setLevel(log_level)
|
|
3885
|
+
super().__init__(
|
|
3886
|
+
headers={"Api-Key": api_key or os.environ.get("ORCA_API_KEY", "")} | dict(Headers(headers or {}).items()),
|
|
3887
|
+
http1=http1,
|
|
3888
|
+
http2=http2,
|
|
3889
|
+
proxy=proxy,
|
|
3890
|
+
timeout=timeout or Timeout(connect=3, read=20, write=10, pool=5),
|
|
3891
|
+
follow_redirects=True,
|
|
3892
|
+
limits=limits or Limits(max_connections=100, max_keepalive_connections=20),
|
|
3893
|
+
max_redirects=max_redirects,
|
|
3894
|
+
event_hooks=event_hooks
|
|
3895
|
+
or {"request": [self._instrument_request], "response": [self._raise_error_for_response]},
|
|
3896
|
+
base_url=base_url or os.environ.get("ORCA_API_URL", "https://api.orcadb.ai/"),
|
|
3897
|
+
transport=transport
|
|
3898
|
+
or RetryTransport(
|
|
3899
|
+
transport=HTTPTransport(),
|
|
3900
|
+
retry=Retry(
|
|
3901
|
+
total=5,
|
|
3902
|
+
backoff_factor=0.5,
|
|
3903
|
+
allowed_methods=["GET", "POST", "PUT", "PATCH", "DELETE"],
|
|
3904
|
+
status_forcelist=[429, 500, 502, 503, 504],
|
|
3905
|
+
),
|
|
3906
|
+
),
|
|
3907
|
+
)
|
|
3673
3908
|
|
|
3674
3909
|
@property
|
|
3675
|
-
def
|
|
3676
|
-
|
|
3677
|
-
|
|
3678
|
-
|
|
3679
|
-
|
|
3680
|
-
|
|
3681
|
-
|
|
3682
|
-
|
|
3683
|
-
|
|
3684
|
-
|
|
3685
|
-
|
|
3686
|
-
def
|
|
3687
|
-
|
|
3688
|
-
|
|
3689
|
-
|
|
3690
|
-
|
|
3691
|
-
|
|
3692
|
-
|
|
3693
|
-
|
|
3694
|
-
|
|
3695
|
-
|
|
3696
|
-
|
|
3697
|
-
|
|
3698
|
-
|
|
3699
|
-
|
|
3700
|
-
elif response.status_code == 403:
|
|
3701
|
-
raise PermissionError(_read_json_response(response)["reason"])
|
|
3702
|
-
elif response.status_code == 404:
|
|
3703
|
-
res = cast(NotFoundErrorResponse, _read_json_response(response))
|
|
3704
|
-
if res["resource"] is not None:
|
|
3705
|
-
raise LookupError(f"The {res['resource']} you are looking for does not exist")
|
|
3706
|
-
else:
|
|
3707
|
-
raise RuntimeError(f"Unknown API route: {response.url}")
|
|
3708
|
-
elif response.status_code == 405:
|
|
3709
|
-
raise RuntimeError(f"Unknown method {response.request.method} for API route: {response.url}")
|
|
3710
|
-
elif response.status_code == 409:
|
|
3711
|
-
res = cast(ConstraintViolationErrorResponse, _read_json_response(response))
|
|
3712
|
-
raise RuntimeError(res["constraint"])
|
|
3713
|
-
elif response.status_code == 422:
|
|
3714
|
-
res = cast(InvalidInputErrorResponse, _read_json_response(response))
|
|
3715
|
-
issues = [f"{issue['loc'][-1]}: {issue['msg']}" for issue in res["validation_issues"]]
|
|
3716
|
-
raise ValueError("Invalid input:\n\t" + "\n\t".join(issues))
|
|
3717
|
-
elif response.status_code == 500:
|
|
3718
|
-
res = cast(InternalServerErrorResponse, _read_json_response(response))
|
|
3719
|
-
raise RuntimeError(f"Unexpected server error: {res['message']}")
|
|
3720
|
-
elif response.status_code == 503:
|
|
3721
|
-
raise RuntimeError("Orca API is currently unavailable, please try again later")
|
|
3722
|
-
elif response.status_code >= 400:
|
|
3723
|
-
raise RuntimeError(f"Unexpected status code: {response.status_code}")
|
|
3724
|
-
|
|
3725
|
-
|
|
3726
|
-
def _instrument_request(request: Request) -> None:
|
|
3727
|
-
request.headers["X-Request-ID"] = str(uuid.uuid4())
|
|
3728
|
-
|
|
3729
|
-
|
|
3730
|
-
logging.getLogger("httpx").setLevel(logging.WARNING)
|
|
3731
|
-
logging.getLogger("httpcore").setLevel(logging.ERROR)
|
|
3732
|
-
|
|
3733
|
-
orca_api = OrcaClient(
|
|
3734
|
-
transport=RetryTransport(
|
|
3735
|
-
transport=HTTPTransport(),
|
|
3736
|
-
retry=Retry(
|
|
3737
|
-
total=5,
|
|
3738
|
-
backoff_factor=0.5,
|
|
3739
|
-
allowed_methods=["GET", "POST", "PUT", "PATCH", "DELETE"],
|
|
3740
|
-
status_forcelist=[429, 500, 502, 503, 504],
|
|
3741
|
-
),
|
|
3742
|
-
),
|
|
3743
|
-
event_hooks={"request": [_instrument_request], "response": [_raise_error_for_response]},
|
|
3744
|
-
follow_redirects=True,
|
|
3745
|
-
timeout=Timeout(connect=3, read=20, write=10, pool=5),
|
|
3746
|
-
)
|
|
3747
|
-
"""Typed client for the Orca API"""
|
|
3910
|
+
def api_key(self) -> str:
|
|
3911
|
+
return self.headers["Api-Key"]
|
|
3912
|
+
|
|
3913
|
+
@api_key.setter
|
|
3914
|
+
def api_key(self, api_key: str) -> None:
|
|
3915
|
+
self.headers.update(Headers({"Api-Key": api_key}))
|
|
3916
|
+
|
|
3917
|
+
client_ctx = ContextVar[Self | None]("orca_client", default=None)
|
|
3918
|
+
default_client: Self | None = None
|
|
3919
|
+
|
|
3920
|
+
@contextmanager
|
|
3921
|
+
def use(self) -> Generator[None, None, None]:
|
|
3922
|
+
"""Context manager to inject this client into any OrcaSDK methods"""
|
|
3923
|
+
token = self.client_ctx.set(self)
|
|
3924
|
+
try:
|
|
3925
|
+
yield
|
|
3926
|
+
finally:
|
|
3927
|
+
self.client_ctx.reset(token)
|
|
3928
|
+
|
|
3929
|
+
@classmethod
|
|
3930
|
+
def _resolve_client(cls, client: Self | None = None) -> Self:
|
|
3931
|
+
client = client or cls.client_ctx.get() or cls.default_client
|
|
3932
|
+
if not client:
|
|
3933
|
+
client = cls.default_client = cls()
|
|
3934
|
+
return client
|