worqhat 3.9.0__py3-none-any.whl → 3.10.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -16,11 +16,17 @@ from .tables import (
16
16
  AsyncTablesResourceWithStreamingResponse,
17
17
  )
18
18
  from ...types import (
19
+ db_cluster_params,
20
+ db_recommend_params,
21
+ db_find_similar_params,
19
22
  db_execute_batch_params,
20
23
  db_execute_query_params,
24
+ db_hybrid_search_params,
21
25
  db_insert_record_params,
22
26
  db_delete_records_params,
23
27
  db_update_records_params,
28
+ db_semantic_search_params,
29
+ db_detect_anomalies_params,
24
30
  db_process_nl_query_params,
25
31
  )
26
32
  from ..._types import Body, Omit, Query, Headers, NotGiven, SequenceNotStr, omit, not_given
@@ -34,11 +40,17 @@ from ..._response import (
34
40
  async_to_streamed_response_wrapper,
35
41
  )
36
42
  from ..._base_client import make_request_options
43
+ from ...types.db_cluster_response import DBClusterResponse
44
+ from ...types.db_recommend_response import DBRecommendResponse
45
+ from ...types.db_find_similar_response import DBFindSimilarResponse
37
46
  from ...types.db_execute_batch_response import DBExecuteBatchResponse
38
47
  from ...types.db_execute_query_response import DBExecuteQueryResponse
48
+ from ...types.db_hybrid_search_response import DBHybridSearchResponse
39
49
  from ...types.db_insert_record_response import DBInsertRecordResponse
40
50
  from ...types.db_delete_records_response import DBDeleteRecordsResponse
41
51
  from ...types.db_update_records_response import DBUpdateRecordsResponse
52
+ from ...types.db_semantic_search_response import DBSemanticSearchResponse
53
+ from ...types.db_detect_anomalies_response import DBDetectAnomaliesResponse
42
54
  from ...types.db_process_nl_query_response import DBProcessNlQueryResponse
43
55
 
44
56
  __all__ = ["DBResource", "AsyncDBResource"]
@@ -68,6 +80,68 @@ class DBResource(SyncAPIResource):
68
80
  """
69
81
  return DBResourceWithStreamingResponse(self)
70
82
 
83
+ def cluster(
84
+ self,
85
+ *,
86
+ table: str,
87
+ environment: Literal["development", "staging", "production"] | Omit = omit,
88
+ generate_labels: bool | Omit = omit,
89
+ max_clusters: float | Omit = omit,
90
+ min_clusters: float | Omit = omit,
91
+ num_clusters: float | Omit = omit,
92
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
93
+ # The extra values given here take precedence over values defined on the client or passed to this method.
94
+ extra_headers: Headers | None = None,
95
+ extra_query: Query | None = None,
96
+ extra_body: Body | None = None,
97
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
98
+ ) -> DBClusterResponse:
99
+ """
100
+ Performs K-means clustering on vector embeddings to automatically group similar
101
+ records. Supports auto-detection of optimal cluster count and AI-generated
102
+ cluster labels. Returns cluster information including centroids, sample records,
103
+ and quality metrics.
104
+
105
+ Args:
106
+ table: Table to cluster
107
+
108
+ environment: Environment to cluster (development, staging, production)
109
+
110
+ generate_labels: Whether to generate AI labels for clusters
111
+
112
+ max_clusters: Maximum clusters for auto-detection
113
+
114
+ min_clusters: Minimum clusters for auto-detection
115
+
116
+ num_clusters: Number of clusters (auto-detected if not provided)
117
+
118
+ extra_headers: Send extra headers
119
+
120
+ extra_query: Add additional query parameters to the request
121
+
122
+ extra_body: Add additional JSON properties to the request
123
+
124
+ timeout: Override the client-level default timeout for this request, in seconds
125
+ """
126
+ return self._post(
127
+ "/db/cluster",
128
+ body=maybe_transform(
129
+ {
130
+ "table": table,
131
+ "environment": environment,
132
+ "generate_labels": generate_labels,
133
+ "max_clusters": max_clusters,
134
+ "min_clusters": min_clusters,
135
+ "num_clusters": num_clusters,
136
+ },
137
+ db_cluster_params.DBClusterParams,
138
+ ),
139
+ options=make_request_options(
140
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
141
+ ),
142
+ cast_to=DBClusterResponse,
143
+ )
144
+
71
145
  def delete_records(
72
146
  self,
73
147
  *,
@@ -116,6 +190,64 @@ class DBResource(SyncAPIResource):
116
190
  cast_to=DBDeleteRecordsResponse,
117
191
  )
118
192
 
193
+ def detect_anomalies(
194
+ self,
195
+ *,
196
+ table: str,
197
+ environment: Literal["development", "staging", "production"] | Omit = omit,
198
+ k: float | Omit = omit,
199
+ limit: float | Omit = omit,
200
+ threshold: float | Omit = omit,
201
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
202
+ # The extra values given here take precedence over values defined on the client or passed to this method.
203
+ extra_headers: Headers | None = None,
204
+ extra_query: Query | None = None,
205
+ extra_body: Body | None = None,
206
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
207
+ ) -> DBDetectAnomaliesResponse:
208
+ """
209
+ Identifies anomalous or outlier records using K-nearest neighbors analysis on
210
+ vector embeddings. Useful for fraud detection, data quality checks, and
211
+ identifying unusual patterns. Returns anomaly scores and nearest neighbors for
212
+ each detected anomaly.
213
+
214
+ Args:
215
+ table: Table to analyze for anomalies
216
+
217
+ environment: Environment to analyze (development, staging, production)
218
+
219
+ k: Number of nearest neighbors to consider
220
+
221
+ limit: Maximum number of anomalies to return
222
+
223
+ threshold: Minimum anomaly score threshold
224
+
225
+ extra_headers: Send extra headers
226
+
227
+ extra_query: Add additional query parameters to the request
228
+
229
+ extra_body: Add additional JSON properties to the request
230
+
231
+ timeout: Override the client-level default timeout for this request, in seconds
232
+ """
233
+ return self._post(
234
+ "/db/detect-anomalies",
235
+ body=maybe_transform(
236
+ {
237
+ "table": table,
238
+ "environment": environment,
239
+ "k": k,
240
+ "limit": limit,
241
+ "threshold": threshold,
242
+ },
243
+ db_detect_anomalies_params.DBDetectAnomaliesParams,
244
+ ),
245
+ options=make_request_options(
246
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
247
+ ),
248
+ cast_to=DBDetectAnomaliesResponse,
249
+ )
250
+
119
251
  def execute_batch(
120
252
  self,
121
253
  *,
@@ -216,6 +348,138 @@ class DBResource(SyncAPIResource):
216
348
  cast_to=DBExecuteQueryResponse,
217
349
  )
218
350
 
351
+ def find_similar(
352
+ self,
353
+ *,
354
+ record_id: Union[str, float],
355
+ table: str,
356
+ environment: Literal["development", "staging", "production"] | Omit = omit,
357
+ exclude_self: bool | Omit = omit,
358
+ limit: float | Omit = omit,
359
+ target_table: str | Omit = omit,
360
+ threshold: float | Omit = omit,
361
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
362
+ # The extra values given here take precedence over values defined on the client or passed to this method.
363
+ extra_headers: Headers | None = None,
364
+ extra_query: Query | None = None,
365
+ extra_body: Body | None = None,
366
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
367
+ ) -> DBFindSimilarResponse:
368
+ """
369
+ Finds records similar to a specific existing record using vector embeddings.
370
+ Useful for "More like this" functionality and cross-table similarity search.
371
+ Returns similarity scores and supports excluding the source record.
372
+
373
+ Args:
374
+ record_id: ID of the source record
375
+
376
+ table: Table containing the source record
377
+
378
+ environment: Environment to search in (development, staging, production)
379
+
380
+ exclude_self: Whether to exclude the source record from results
381
+
382
+ limit: Maximum number of similar records to return
383
+
384
+ target_table: Different table to search in (optional)
385
+
386
+ threshold: Minimum similarity score threshold
387
+
388
+ extra_headers: Send extra headers
389
+
390
+ extra_query: Add additional query parameters to the request
391
+
392
+ extra_body: Add additional JSON properties to the request
393
+
394
+ timeout: Override the client-level default timeout for this request, in seconds
395
+ """
396
+ return self._post(
397
+ "/db/find-similar",
398
+ body=maybe_transform(
399
+ {
400
+ "record_id": record_id,
401
+ "table": table,
402
+ "environment": environment,
403
+ "exclude_self": exclude_self,
404
+ "limit": limit,
405
+ "target_table": target_table,
406
+ "threshold": threshold,
407
+ },
408
+ db_find_similar_params.DBFindSimilarParams,
409
+ ),
410
+ options=make_request_options(
411
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
412
+ ),
413
+ cast_to=DBFindSimilarResponse,
414
+ )
415
+
416
+ def hybrid_search(
417
+ self,
418
+ *,
419
+ query: str,
420
+ table: str,
421
+ environment: Literal["development", "staging", "production"] | Omit = omit,
422
+ keyword_weight: float | Omit = omit,
423
+ limit: float | Omit = omit,
424
+ semantic_weight: float | Omit = omit,
425
+ text_columns: SequenceNotStr[str] | Omit = omit,
426
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
427
+ # The extra values given here take precedence over values defined on the client or passed to this method.
428
+ extra_headers: Headers | None = None,
429
+ extra_query: Query | None = None,
430
+ extra_body: Body | None = None,
431
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
432
+ ) -> DBHybridSearchResponse:
433
+ """Combines semantic search using vector embeddings with traditional keyword
434
+ search.
435
+
436
+ Provides weighted scoring between semantic similarity and keyword
437
+ matching. Ideal for queries that need both contextual understanding and exact
438
+ term matching.
439
+
440
+ Args:
441
+ query: Search query combining natural language and keywords
442
+
443
+ table: Table to search in
444
+
445
+ environment: Environment to search in (development, staging, production)
446
+
447
+ keyword_weight: Weight for keyword matching score
448
+
449
+ limit: Maximum number of results to return
450
+
451
+ semantic_weight: Weight for semantic similarity score
452
+
453
+ text_columns: Columns to include in keyword search
454
+
455
+ extra_headers: Send extra headers
456
+
457
+ extra_query: Add additional query parameters to the request
458
+
459
+ extra_body: Add additional JSON properties to the request
460
+
461
+ timeout: Override the client-level default timeout for this request, in seconds
462
+ """
463
+ return self._post(
464
+ "/db/hybrid-search",
465
+ body=maybe_transform(
466
+ {
467
+ "query": query,
468
+ "table": table,
469
+ "environment": environment,
470
+ "keyword_weight": keyword_weight,
471
+ "limit": limit,
472
+ "semantic_weight": semantic_weight,
473
+ "text_columns": text_columns,
474
+ },
475
+ db_hybrid_search_params.DBHybridSearchParams,
476
+ ),
477
+ options=make_request_options(
478
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
479
+ ),
480
+ cast_to=DBHybridSearchResponse,
481
+ )
482
+
219
483
  def insert_record(
220
484
  self,
221
485
  *,
@@ -313,6 +577,137 @@ class DBResource(SyncAPIResource):
313
577
  cast_to=DBProcessNlQueryResponse,
314
578
  )
315
579
 
580
+ def recommend(
581
+ self,
582
+ *,
583
+ table: str,
584
+ environment: Literal["development", "staging", "production"] | Omit = omit,
585
+ exclude_ids: SequenceNotStr[str] | Omit = omit,
586
+ limit: float | Omit = omit,
587
+ record_id: Union[str, float] | Omit = omit,
588
+ strategy: Literal["similar", "diverse", "popular"] | Omit = omit,
589
+ user_history: SequenceNotStr[str] | Omit = omit,
590
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
591
+ # The extra values given here take precedence over values defined on the client or passed to this method.
592
+ extra_headers: Headers | None = None,
593
+ extra_query: Query | None = None,
594
+ extra_body: Body | None = None,
595
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
596
+ ) -> DBRecommendResponse:
597
+ """
598
+ Generates item recommendations using vector embeddings and collaborative
599
+ filtering. Supports multiple recommendation strategies including similar items,
600
+ diverse recommendations, and user history-based recommendations.
601
+
602
+ Args:
603
+ table: Table to generate recommendations from
604
+
605
+ environment: Environment to search in (development, staging, production)
606
+
607
+ exclude_ids: Record IDs to exclude from recommendations
608
+
609
+ limit: Maximum number of recommendations to return
610
+
611
+ record_id: Source item ID for item-to-item recommendations
612
+
613
+ strategy: Recommendation strategy to use
614
+
615
+ user_history: Array of record IDs the user has interacted with
616
+
617
+ extra_headers: Send extra headers
618
+
619
+ extra_query: Add additional query parameters to the request
620
+
621
+ extra_body: Add additional JSON properties to the request
622
+
623
+ timeout: Override the client-level default timeout for this request, in seconds
624
+ """
625
+ return self._post(
626
+ "/db/recommend",
627
+ body=maybe_transform(
628
+ {
629
+ "table": table,
630
+ "environment": environment,
631
+ "exclude_ids": exclude_ids,
632
+ "limit": limit,
633
+ "record_id": record_id,
634
+ "strategy": strategy,
635
+ "user_history": user_history,
636
+ },
637
+ db_recommend_params.DBRecommendParams,
638
+ ),
639
+ options=make_request_options(
640
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
641
+ ),
642
+ cast_to=DBRecommendResponse,
643
+ )
644
+
645
+ def semantic_search(
646
+ self,
647
+ *,
648
+ query: str,
649
+ environment: Literal["development", "staging", "production"] | Omit = omit,
650
+ filters: Dict[str, object] | Omit = omit,
651
+ limit: float | Omit = omit,
652
+ table: str | Omit = omit,
653
+ tables: SequenceNotStr[str] | Omit = omit,
654
+ threshold: float | Omit = omit,
655
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
656
+ # The extra values given here take precedence over values defined on the client or passed to this method.
657
+ extra_headers: Headers | None = None,
658
+ extra_query: Query | None = None,
659
+ extra_body: Body | None = None,
660
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
661
+ ) -> DBSemanticSearchResponse:
662
+ """
663
+ Performs semantic search across database tables using vector embeddings.
664
+ Supports both single table and cross-table searches with configurable similarity
665
+ thresholds. Returns records with similarity scores and metadata about matched
666
+ fields.
667
+
668
+ Args:
669
+ query: Natural language search query
670
+
671
+ environment: Environment to search in (development, staging, production)
672
+
673
+ filters: Additional WHERE conditions to apply
674
+
675
+ limit: Maximum number of results to return
676
+
677
+ table: Single table to search in (optional if tables is provided)
678
+
679
+ tables: Multiple tables to search across (optional if table is provided)
680
+
681
+ threshold: Minimum similarity score threshold
682
+
683
+ extra_headers: Send extra headers
684
+
685
+ extra_query: Add additional query parameters to the request
686
+
687
+ extra_body: Add additional JSON properties to the request
688
+
689
+ timeout: Override the client-level default timeout for this request, in seconds
690
+ """
691
+ return self._post(
692
+ "/db/semantic-search",
693
+ body=maybe_transform(
694
+ {
695
+ "query": query,
696
+ "environment": environment,
697
+ "filters": filters,
698
+ "limit": limit,
699
+ "table": table,
700
+ "tables": tables,
701
+ "threshold": threshold,
702
+ },
703
+ db_semantic_search_params.DBSemanticSearchParams,
704
+ ),
705
+ options=make_request_options(
706
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
707
+ ),
708
+ cast_to=DBSemanticSearchResponse,
709
+ )
710
+
316
711
  def update_records(
317
712
  self,
318
713
  *,
@@ -390,6 +785,68 @@ class AsyncDBResource(AsyncAPIResource):
390
785
  """
391
786
  return AsyncDBResourceWithStreamingResponse(self)
392
787
 
788
+ async def cluster(
789
+ self,
790
+ *,
791
+ table: str,
792
+ environment: Literal["development", "staging", "production"] | Omit = omit,
793
+ generate_labels: bool | Omit = omit,
794
+ max_clusters: float | Omit = omit,
795
+ min_clusters: float | Omit = omit,
796
+ num_clusters: float | Omit = omit,
797
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
798
+ # The extra values given here take precedence over values defined on the client or passed to this method.
799
+ extra_headers: Headers | None = None,
800
+ extra_query: Query | None = None,
801
+ extra_body: Body | None = None,
802
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
803
+ ) -> DBClusterResponse:
804
+ """
805
+ Performs K-means clustering on vector embeddings to automatically group similar
806
+ records. Supports auto-detection of optimal cluster count and AI-generated
807
+ cluster labels. Returns cluster information including centroids, sample records,
808
+ and quality metrics.
809
+
810
+ Args:
811
+ table: Table to cluster
812
+
813
+ environment: Environment to cluster (development, staging, production)
814
+
815
+ generate_labels: Whether to generate AI labels for clusters
816
+
817
+ max_clusters: Maximum clusters for auto-detection
818
+
819
+ min_clusters: Minimum clusters for auto-detection
820
+
821
+ num_clusters: Number of clusters (auto-detected if not provided)
822
+
823
+ extra_headers: Send extra headers
824
+
825
+ extra_query: Add additional query parameters to the request
826
+
827
+ extra_body: Add additional JSON properties to the request
828
+
829
+ timeout: Override the client-level default timeout for this request, in seconds
830
+ """
831
+ return await self._post(
832
+ "/db/cluster",
833
+ body=await async_maybe_transform(
834
+ {
835
+ "table": table,
836
+ "environment": environment,
837
+ "generate_labels": generate_labels,
838
+ "max_clusters": max_clusters,
839
+ "min_clusters": min_clusters,
840
+ "num_clusters": num_clusters,
841
+ },
842
+ db_cluster_params.DBClusterParams,
843
+ ),
844
+ options=make_request_options(
845
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
846
+ ),
847
+ cast_to=DBClusterResponse,
848
+ )
849
+
393
850
  async def delete_records(
394
851
  self,
395
852
  *,
@@ -438,6 +895,64 @@ class AsyncDBResource(AsyncAPIResource):
438
895
  cast_to=DBDeleteRecordsResponse,
439
896
  )
440
897
 
898
+ async def detect_anomalies(
899
+ self,
900
+ *,
901
+ table: str,
902
+ environment: Literal["development", "staging", "production"] | Omit = omit,
903
+ k: float | Omit = omit,
904
+ limit: float | Omit = omit,
905
+ threshold: float | Omit = omit,
906
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
907
+ # The extra values given here take precedence over values defined on the client or passed to this method.
908
+ extra_headers: Headers | None = None,
909
+ extra_query: Query | None = None,
910
+ extra_body: Body | None = None,
911
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
912
+ ) -> DBDetectAnomaliesResponse:
913
+ """
914
+ Identifies anomalous or outlier records using K-nearest neighbors analysis on
915
+ vector embeddings. Useful for fraud detection, data quality checks, and
916
+ identifying unusual patterns. Returns anomaly scores and nearest neighbors for
917
+ each detected anomaly.
918
+
919
+ Args:
920
+ table: Table to analyze for anomalies
921
+
922
+ environment: Environment to analyze (development, staging, production)
923
+
924
+ k: Number of nearest neighbors to consider
925
+
926
+ limit: Maximum number of anomalies to return
927
+
928
+ threshold: Minimum anomaly score threshold
929
+
930
+ extra_headers: Send extra headers
931
+
932
+ extra_query: Add additional query parameters to the request
933
+
934
+ extra_body: Add additional JSON properties to the request
935
+
936
+ timeout: Override the client-level default timeout for this request, in seconds
937
+ """
938
+ return await self._post(
939
+ "/db/detect-anomalies",
940
+ body=await async_maybe_transform(
941
+ {
942
+ "table": table,
943
+ "environment": environment,
944
+ "k": k,
945
+ "limit": limit,
946
+ "threshold": threshold,
947
+ },
948
+ db_detect_anomalies_params.DBDetectAnomaliesParams,
949
+ ),
950
+ options=make_request_options(
951
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
952
+ ),
953
+ cast_to=DBDetectAnomaliesResponse,
954
+ )
955
+
441
956
  async def execute_batch(
442
957
  self,
443
958
  *,
@@ -538,6 +1053,138 @@ class AsyncDBResource(AsyncAPIResource):
538
1053
  cast_to=DBExecuteQueryResponse,
539
1054
  )
540
1055
 
1056
+ async def find_similar(
1057
+ self,
1058
+ *,
1059
+ record_id: Union[str, float],
1060
+ table: str,
1061
+ environment: Literal["development", "staging", "production"] | Omit = omit,
1062
+ exclude_self: bool | Omit = omit,
1063
+ limit: float | Omit = omit,
1064
+ target_table: str | Omit = omit,
1065
+ threshold: float | Omit = omit,
1066
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
1067
+ # The extra values given here take precedence over values defined on the client or passed to this method.
1068
+ extra_headers: Headers | None = None,
1069
+ extra_query: Query | None = None,
1070
+ extra_body: Body | None = None,
1071
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
1072
+ ) -> DBFindSimilarResponse:
1073
+ """
1074
+ Finds records similar to a specific existing record using vector embeddings.
1075
+ Useful for "More like this" functionality and cross-table similarity search.
1076
+ Returns similarity scores and supports excluding the source record.
1077
+
1078
+ Args:
1079
+ record_id: ID of the source record
1080
+
1081
+ table: Table containing the source record
1082
+
1083
+ environment: Environment to search in (development, staging, production)
1084
+
1085
+ exclude_self: Whether to exclude the source record from results
1086
+
1087
+ limit: Maximum number of similar records to return
1088
+
1089
+ target_table: Different table to search in (optional)
1090
+
1091
+ threshold: Minimum similarity score threshold
1092
+
1093
+ extra_headers: Send extra headers
1094
+
1095
+ extra_query: Add additional query parameters to the request
1096
+
1097
+ extra_body: Add additional JSON properties to the request
1098
+
1099
+ timeout: Override the client-level default timeout for this request, in seconds
1100
+ """
1101
+ return await self._post(
1102
+ "/db/find-similar",
1103
+ body=await async_maybe_transform(
1104
+ {
1105
+ "record_id": record_id,
1106
+ "table": table,
1107
+ "environment": environment,
1108
+ "exclude_self": exclude_self,
1109
+ "limit": limit,
1110
+ "target_table": target_table,
1111
+ "threshold": threshold,
1112
+ },
1113
+ db_find_similar_params.DBFindSimilarParams,
1114
+ ),
1115
+ options=make_request_options(
1116
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
1117
+ ),
1118
+ cast_to=DBFindSimilarResponse,
1119
+ )
1120
+
1121
+ async def hybrid_search(
1122
+ self,
1123
+ *,
1124
+ query: str,
1125
+ table: str,
1126
+ environment: Literal["development", "staging", "production"] | Omit = omit,
1127
+ keyword_weight: float | Omit = omit,
1128
+ limit: float | Omit = omit,
1129
+ semantic_weight: float | Omit = omit,
1130
+ text_columns: SequenceNotStr[str] | Omit = omit,
1131
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
1132
+ # The extra values given here take precedence over values defined on the client or passed to this method.
1133
+ extra_headers: Headers | None = None,
1134
+ extra_query: Query | None = None,
1135
+ extra_body: Body | None = None,
1136
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
1137
+ ) -> DBHybridSearchResponse:
1138
+ """Combines semantic search using vector embeddings with traditional keyword
1139
+ search.
1140
+
1141
+ Provides weighted scoring between semantic similarity and keyword
1142
+ matching. Ideal for queries that need both contextual understanding and exact
1143
+ term matching.
1144
+
1145
+ Args:
1146
+ query: Search query combining natural language and keywords
1147
+
1148
+ table: Table to search in
1149
+
1150
+ environment: Environment to search in (development, staging, production)
1151
+
1152
+ keyword_weight: Weight for keyword matching score
1153
+
1154
+ limit: Maximum number of results to return
1155
+
1156
+ semantic_weight: Weight for semantic similarity score
1157
+
1158
+ text_columns: Columns to include in keyword search
1159
+
1160
+ extra_headers: Send extra headers
1161
+
1162
+ extra_query: Add additional query parameters to the request
1163
+
1164
+ extra_body: Add additional JSON properties to the request
1165
+
1166
+ timeout: Override the client-level default timeout for this request, in seconds
1167
+ """
1168
+ return await self._post(
1169
+ "/db/hybrid-search",
1170
+ body=await async_maybe_transform(
1171
+ {
1172
+ "query": query,
1173
+ "table": table,
1174
+ "environment": environment,
1175
+ "keyword_weight": keyword_weight,
1176
+ "limit": limit,
1177
+ "semantic_weight": semantic_weight,
1178
+ "text_columns": text_columns,
1179
+ },
1180
+ db_hybrid_search_params.DBHybridSearchParams,
1181
+ ),
1182
+ options=make_request_options(
1183
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
1184
+ ),
1185
+ cast_to=DBHybridSearchResponse,
1186
+ )
1187
+
541
1188
  async def insert_record(
542
1189
  self,
543
1190
  *,
@@ -635,6 +1282,137 @@ class AsyncDBResource(AsyncAPIResource):
635
1282
  cast_to=DBProcessNlQueryResponse,
636
1283
  )
637
1284
 
1285
+ async def recommend(
1286
+ self,
1287
+ *,
1288
+ table: str,
1289
+ environment: Literal["development", "staging", "production"] | Omit = omit,
1290
+ exclude_ids: SequenceNotStr[str] | Omit = omit,
1291
+ limit: float | Omit = omit,
1292
+ record_id: Union[str, float] | Omit = omit,
1293
+ strategy: Literal["similar", "diverse", "popular"] | Omit = omit,
1294
+ user_history: SequenceNotStr[str] | Omit = omit,
1295
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
1296
+ # The extra values given here take precedence over values defined on the client or passed to this method.
1297
+ extra_headers: Headers | None = None,
1298
+ extra_query: Query | None = None,
1299
+ extra_body: Body | None = None,
1300
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
1301
+ ) -> DBRecommendResponse:
1302
+ """
1303
+ Generates item recommendations using vector embeddings and collaborative
1304
+ filtering. Supports multiple recommendation strategies including similar items,
1305
+ diverse recommendations, and user history-based recommendations.
1306
+
1307
+ Args:
1308
+ table: Table to generate recommendations from
1309
+
1310
+ environment: Environment to search in (development, staging, production)
1311
+
1312
+ exclude_ids: Record IDs to exclude from recommendations
1313
+
1314
+ limit: Maximum number of recommendations to return
1315
+
1316
+ record_id: Source item ID for item-to-item recommendations
1317
+
1318
+ strategy: Recommendation strategy to use
1319
+
1320
+ user_history: Array of record IDs the user has interacted with
1321
+
1322
+ extra_headers: Send extra headers
1323
+
1324
+ extra_query: Add additional query parameters to the request
1325
+
1326
+ extra_body: Add additional JSON properties to the request
1327
+
1328
+ timeout: Override the client-level default timeout for this request, in seconds
1329
+ """
1330
+ return await self._post(
1331
+ "/db/recommend",
1332
+ body=await async_maybe_transform(
1333
+ {
1334
+ "table": table,
1335
+ "environment": environment,
1336
+ "exclude_ids": exclude_ids,
1337
+ "limit": limit,
1338
+ "record_id": record_id,
1339
+ "strategy": strategy,
1340
+ "user_history": user_history,
1341
+ },
1342
+ db_recommend_params.DBRecommendParams,
1343
+ ),
1344
+ options=make_request_options(
1345
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
1346
+ ),
1347
+ cast_to=DBRecommendResponse,
1348
+ )
1349
+
1350
+ async def semantic_search(
1351
+ self,
1352
+ *,
1353
+ query: str,
1354
+ environment: Literal["development", "staging", "production"] | Omit = omit,
1355
+ filters: Dict[str, object] | Omit = omit,
1356
+ limit: float | Omit = omit,
1357
+ table: str | Omit = omit,
1358
+ tables: SequenceNotStr[str] | Omit = omit,
1359
+ threshold: float | Omit = omit,
1360
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
1361
+ # The extra values given here take precedence over values defined on the client or passed to this method.
1362
+ extra_headers: Headers | None = None,
1363
+ extra_query: Query | None = None,
1364
+ extra_body: Body | None = None,
1365
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
1366
+ ) -> DBSemanticSearchResponse:
1367
+ """
1368
+ Performs semantic search across database tables using vector embeddings.
1369
+ Supports both single table and cross-table searches with configurable similarity
1370
+ thresholds. Returns records with similarity scores and metadata about matched
1371
+ fields.
1372
+
1373
+ Args:
1374
+ query: Natural language search query
1375
+
1376
+ environment: Environment to search in (development, staging, production)
1377
+
1378
+ filters: Additional WHERE conditions to apply
1379
+
1380
+ limit: Maximum number of results to return
1381
+
1382
+ table: Single table to search in (optional if tables is provided)
1383
+
1384
+ tables: Multiple tables to search across (optional if table is provided)
1385
+
1386
+ threshold: Minimum similarity score threshold
1387
+
1388
+ extra_headers: Send extra headers
1389
+
1390
+ extra_query: Add additional query parameters to the request
1391
+
1392
+ extra_body: Add additional JSON properties to the request
1393
+
1394
+ timeout: Override the client-level default timeout for this request, in seconds
1395
+ """
1396
+ return await self._post(
1397
+ "/db/semantic-search",
1398
+ body=await async_maybe_transform(
1399
+ {
1400
+ "query": query,
1401
+ "environment": environment,
1402
+ "filters": filters,
1403
+ "limit": limit,
1404
+ "table": table,
1405
+ "tables": tables,
1406
+ "threshold": threshold,
1407
+ },
1408
+ db_semantic_search_params.DBSemanticSearchParams,
1409
+ ),
1410
+ options=make_request_options(
1411
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
1412
+ ),
1413
+ cast_to=DBSemanticSearchResponse,
1414
+ )
1415
+
638
1416
  async def update_records(
639
1417
  self,
640
1418
  *,
@@ -692,21 +1470,39 @@ class DBResourceWithRawResponse:
692
1470
  def __init__(self, db: DBResource) -> None:
693
1471
  self._db = db
694
1472
 
1473
+ self.cluster = to_raw_response_wrapper(
1474
+ db.cluster,
1475
+ )
695
1476
  self.delete_records = to_raw_response_wrapper(
696
1477
  db.delete_records,
697
1478
  )
1479
+ self.detect_anomalies = to_raw_response_wrapper(
1480
+ db.detect_anomalies,
1481
+ )
698
1482
  self.execute_batch = to_raw_response_wrapper(
699
1483
  db.execute_batch,
700
1484
  )
701
1485
  self.execute_query = to_raw_response_wrapper(
702
1486
  db.execute_query,
703
1487
  )
1488
+ self.find_similar = to_raw_response_wrapper(
1489
+ db.find_similar,
1490
+ )
1491
+ self.hybrid_search = to_raw_response_wrapper(
1492
+ db.hybrid_search,
1493
+ )
704
1494
  self.insert_record = to_raw_response_wrapper(
705
1495
  db.insert_record,
706
1496
  )
707
1497
  self.process_nl_query = to_raw_response_wrapper(
708
1498
  db.process_nl_query,
709
1499
  )
1500
+ self.recommend = to_raw_response_wrapper(
1501
+ db.recommend,
1502
+ )
1503
+ self.semantic_search = to_raw_response_wrapper(
1504
+ db.semantic_search,
1505
+ )
710
1506
  self.update_records = to_raw_response_wrapper(
711
1507
  db.update_records,
712
1508
  )
@@ -720,21 +1516,39 @@ class AsyncDBResourceWithRawResponse:
720
1516
  def __init__(self, db: AsyncDBResource) -> None:
721
1517
  self._db = db
722
1518
 
1519
+ self.cluster = async_to_raw_response_wrapper(
1520
+ db.cluster,
1521
+ )
723
1522
  self.delete_records = async_to_raw_response_wrapper(
724
1523
  db.delete_records,
725
1524
  )
1525
+ self.detect_anomalies = async_to_raw_response_wrapper(
1526
+ db.detect_anomalies,
1527
+ )
726
1528
  self.execute_batch = async_to_raw_response_wrapper(
727
1529
  db.execute_batch,
728
1530
  )
729
1531
  self.execute_query = async_to_raw_response_wrapper(
730
1532
  db.execute_query,
731
1533
  )
1534
+ self.find_similar = async_to_raw_response_wrapper(
1535
+ db.find_similar,
1536
+ )
1537
+ self.hybrid_search = async_to_raw_response_wrapper(
1538
+ db.hybrid_search,
1539
+ )
732
1540
  self.insert_record = async_to_raw_response_wrapper(
733
1541
  db.insert_record,
734
1542
  )
735
1543
  self.process_nl_query = async_to_raw_response_wrapper(
736
1544
  db.process_nl_query,
737
1545
  )
1546
+ self.recommend = async_to_raw_response_wrapper(
1547
+ db.recommend,
1548
+ )
1549
+ self.semantic_search = async_to_raw_response_wrapper(
1550
+ db.semantic_search,
1551
+ )
738
1552
  self.update_records = async_to_raw_response_wrapper(
739
1553
  db.update_records,
740
1554
  )
@@ -748,21 +1562,39 @@ class DBResourceWithStreamingResponse:
748
1562
  def __init__(self, db: DBResource) -> None:
749
1563
  self._db = db
750
1564
 
1565
+ self.cluster = to_streamed_response_wrapper(
1566
+ db.cluster,
1567
+ )
751
1568
  self.delete_records = to_streamed_response_wrapper(
752
1569
  db.delete_records,
753
1570
  )
1571
+ self.detect_anomalies = to_streamed_response_wrapper(
1572
+ db.detect_anomalies,
1573
+ )
754
1574
  self.execute_batch = to_streamed_response_wrapper(
755
1575
  db.execute_batch,
756
1576
  )
757
1577
  self.execute_query = to_streamed_response_wrapper(
758
1578
  db.execute_query,
759
1579
  )
1580
+ self.find_similar = to_streamed_response_wrapper(
1581
+ db.find_similar,
1582
+ )
1583
+ self.hybrid_search = to_streamed_response_wrapper(
1584
+ db.hybrid_search,
1585
+ )
760
1586
  self.insert_record = to_streamed_response_wrapper(
761
1587
  db.insert_record,
762
1588
  )
763
1589
  self.process_nl_query = to_streamed_response_wrapper(
764
1590
  db.process_nl_query,
765
1591
  )
1592
+ self.recommend = to_streamed_response_wrapper(
1593
+ db.recommend,
1594
+ )
1595
+ self.semantic_search = to_streamed_response_wrapper(
1596
+ db.semantic_search,
1597
+ )
766
1598
  self.update_records = to_streamed_response_wrapper(
767
1599
  db.update_records,
768
1600
  )
@@ -776,21 +1608,39 @@ class AsyncDBResourceWithStreamingResponse:
776
1608
  def __init__(self, db: AsyncDBResource) -> None:
777
1609
  self._db = db
778
1610
 
1611
+ self.cluster = async_to_streamed_response_wrapper(
1612
+ db.cluster,
1613
+ )
779
1614
  self.delete_records = async_to_streamed_response_wrapper(
780
1615
  db.delete_records,
781
1616
  )
1617
+ self.detect_anomalies = async_to_streamed_response_wrapper(
1618
+ db.detect_anomalies,
1619
+ )
782
1620
  self.execute_batch = async_to_streamed_response_wrapper(
783
1621
  db.execute_batch,
784
1622
  )
785
1623
  self.execute_query = async_to_streamed_response_wrapper(
786
1624
  db.execute_query,
787
1625
  )
1626
+ self.find_similar = async_to_streamed_response_wrapper(
1627
+ db.find_similar,
1628
+ )
1629
+ self.hybrid_search = async_to_streamed_response_wrapper(
1630
+ db.hybrid_search,
1631
+ )
788
1632
  self.insert_record = async_to_streamed_response_wrapper(
789
1633
  db.insert_record,
790
1634
  )
791
1635
  self.process_nl_query = async_to_streamed_response_wrapper(
792
1636
  db.process_nl_query,
793
1637
  )
1638
+ self.recommend = async_to_streamed_response_wrapper(
1639
+ db.recommend,
1640
+ )
1641
+ self.semantic_search = async_to_streamed_response_wrapper(
1642
+ db.semantic_search,
1643
+ )
794
1644
  self.update_records = async_to_streamed_response_wrapper(
795
1645
  db.update_records,
796
1646
  )