graphlit-client 1.0.20241021001 → 1.0.20241104001

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -4908,6 +4908,7 @@ exports.GetSpecification = (0, graphql_tag_1.default) `
4908
4908
  retrievalStrategy {
4909
4909
  type
4910
4910
  contentLimit
4911
+ disableFallback
4911
4912
  }
4912
4913
  rerankingStrategy {
4913
4914
  serviceType
@@ -5210,6 +5211,7 @@ exports.QuerySpecifications = (0, graphql_tag_1.default) `
5210
5211
  retrievalStrategy {
5211
5212
  type
5212
5213
  contentLimit
5214
+ disableFallback
5213
5215
  }
5214
5216
  rerankingStrategy {
5215
5217
  serviceType
@@ -363,17 +363,31 @@ export declare enum AnthropicModels {
363
363
  Claude_2_0 = "CLAUDE_2_0",
364
364
  /** @deprecated Use Claude 3.x instead. */
365
365
  Claude_2_1 = "CLAUDE_2_1",
366
+ /** Claude 3.5 Haiku (Latest) */
367
+ Claude_3_5Haiku = "CLAUDE_3_5_HAIKU",
368
+ /** Claude 3.5 Haiku (10-22-2024 version) */
369
+ Claude_3_5Haiku_20241022 = "CLAUDE_3_5_HAIKU_20241022",
366
370
  /** Claude 3.5 Sonnet (Latest) */
367
371
  Claude_3_5Sonnet = "CLAUDE_3_5_SONNET",
372
+ /** Claude 3.5 Sonnet (06-20-2024 version) */
373
+ Claude_3_5Sonnet_20240620 = "CLAUDE_3_5_SONNET_20240620",
374
+ /** Claude 3.5 Sonnet (10-22-2024 version) */
375
+ Claude_3_5Sonnet_20241022 = "CLAUDE_3_5_SONNET_20241022",
368
376
  /** Claude 3 Haiku (Latest) */
369
377
  Claude_3Haiku = "CLAUDE_3_HAIKU",
378
+ /** Claude 3 Haiku (03-07-2024 version) */
379
+ Claude_3Haiku_20240307 = "CLAUDE_3_HAIKU_20240307",
370
380
  /** Claude 3 Opus (Latest) */
371
381
  Claude_3Opus = "CLAUDE_3_OPUS",
382
+ /** Claude 3 Opus (02-29-2024 version) */
383
+ Claude_3Opus_20240229 = "CLAUDE_3_OPUS_20240229",
372
384
  /** Claude 3 Sonnet (Latest) */
373
385
  Claude_3Sonnet = "CLAUDE_3_SONNET",
374
- /** @deprecated Use Claude 3 Haiku instead. */
386
+ /** Claude 3 Sonnet (02-29-2024 version) */
387
+ Claude_3Sonnet_20240229 = "CLAUDE_3_SONNET_20240229",
388
+ /** @deprecated Use Claude 3.5 Haiku instead. */
375
389
  ClaudeInstant_1 = "CLAUDE_INSTANT_1",
376
- /** @deprecated Use Claude 3 Haiku instead. */
390
+ /** @deprecated Use Claude 3.5 Haiku instead. */
377
391
  ClaudeInstant_1_2 = "CLAUDE_INSTANT_1_2",
378
392
  /** Developer-specified model */
379
393
  Custom = "CUSTOM"
@@ -1793,7 +1807,7 @@ export type ConversationInput = {
1793
1807
  /** Filter content for conversation, optional. */
1794
1808
  filter?: InputMaybe<ContentCriteriaInput>;
1795
1809
  /** The conversation messages. */
1796
- messages?: InputMaybe<Array<InputMaybe<ConversationMessageInput>>>;
1810
+ messages?: InputMaybe<Array<ConversationMessageInput>>;
1797
1811
  /** The name of the conversation. */
1798
1812
  name: Scalars['String']['input'];
1799
1813
  /** The LLM specification used by this conversation, optional. */
@@ -1916,8 +1930,8 @@ export type ConversationToolCall = {
1916
1930
  __typename?: 'ConversationToolCall';
1917
1931
  /** The tool arguments. */
1918
1932
  arguments: Scalars['String']['output'];
1919
- /** The tool identifier. */
1920
- id?: Maybe<Scalars['String']['output']>;
1933
+ /** The tool call identifier. */
1934
+ id: Scalars['String']['output'];
1921
1935
  /** The tool name. */
1922
1936
  name: Scalars['String']['output'];
1923
1937
  };
@@ -3795,7 +3809,10 @@ export declare enum GroqModels {
3795
3809
  Llama_3_2_1BPreview = "LLAMA_3_2_1B_PREVIEW",
3796
3810
  /** LLaMA 3.2 3b Preview */
3797
3811
  Llama_3_2_3BPreview = "LLAMA_3_2_3B_PREVIEW",
3798
- /** LLaMA 3.2 11b Text Preview */
3812
+ /**
3813
+ * LLaMA 3.2 11b Text Preview
3814
+ * @deprecated Use Llama 3.2 11b Vision Preview instead.
3815
+ */
3799
3816
  Llama_3_2_11BTextPreview = "LLAMA_3_2_11B_TEXT_PREVIEW",
3800
3817
  /** LLaMA 3.2 11b Vision Preview */
3801
3818
  Llama_3_2_11BVisionPreview = "LLAMA_3_2_11B_VISION_PREVIEW",
@@ -3805,7 +3822,10 @@ export declare enum GroqModels {
3805
3822
  Llama_3_8B = "LLAMA_3_8B",
3806
3823
  /** LLaMA 3 70b */
3807
3824
  Llama_3_70B = "LLAMA_3_70B",
3808
- /** LLaVA 1.5 7B Preview */
3825
+ /**
3826
+ * LLaVA 1.5 7B Preview
3827
+ * @deprecated Use Llama 3.2 11b Vision Preview instead.
3828
+ */
3809
3829
  Llava_1_5_7BPreview = "LLAVA_1_5_7B_PREVIEW",
3810
3830
  /** Mixtral 8x7b Instruct */
3811
3831
  Mixtral_8X7BInstruct = "MIXTRAL_8X7B_INSTRUCT"
@@ -10224,6 +10244,8 @@ export type RetrievalStrategy = {
10224
10244
  __typename?: 'RetrievalStrategy';
10225
10245
  /** The maximum number of content sources to provide with prompt context. Defaults to 25. */
10226
10246
  contentLimit?: Maybe<Scalars['Int']['output']>;
10247
+ /** Whether to disable fallback to previous contents, when no contents are found by semantic search. Defaults to false. */
10248
+ disableFallback?: Maybe<Scalars['Boolean']['output']>;
10227
10249
  /** The retrieval strategy type. */
10228
10250
  type: RetrievalStrategyTypes;
10229
10251
  };
@@ -10231,6 +10253,8 @@ export type RetrievalStrategy = {
10231
10253
  export type RetrievalStrategyInput = {
10232
10254
  /** The maximum number of content sources to provide with prompt context. Defaults to 25. */
10233
10255
  contentLimit?: InputMaybe<Scalars['Int']['input']>;
10256
+ /** Whether to disable fallback to previous contents, when no contents are found by semantic search. Defaults to false. */
10257
+ disableFallback?: InputMaybe<Scalars['Boolean']['input']>;
10234
10258
  /** The retrieval strategy type. */
10235
10259
  type: RetrievalStrategyTypes;
10236
10260
  };
@@ -13585,7 +13609,7 @@ export type ContinueConversationMutation = {
13585
13609
  } | null> | null;
13586
13610
  toolCalls?: Array<{
13587
13611
  __typename?: 'ConversationToolCall';
13588
- id?: string | null;
13612
+ id: string;
13589
13613
  name: string;
13590
13614
  arguments: string;
13591
13615
  } | null> | null;
@@ -13831,7 +13855,7 @@ export type GetConversationQuery = {
13831
13855
  } | null> | null;
13832
13856
  toolCalls?: Array<{
13833
13857
  __typename?: 'ConversationToolCall';
13834
- id?: string | null;
13858
+ id: string;
13835
13859
  name: string;
13836
13860
  arguments: string;
13837
13861
  } | null> | null;
@@ -14159,7 +14183,7 @@ export type PromptConversationMutation = {
14159
14183
  } | null> | null;
14160
14184
  toolCalls?: Array<{
14161
14185
  __typename?: 'ConversationToolCall';
14162
- id?: string | null;
14186
+ id: string;
14163
14187
  name: string;
14164
14188
  arguments: string;
14165
14189
  } | null> | null;
@@ -14376,7 +14400,7 @@ export type QueryConversationsQuery = {
14376
14400
  } | null> | null;
14377
14401
  toolCalls?: Array<{
14378
14402
  __typename?: 'ConversationToolCall';
14379
- id?: string | null;
14403
+ id: string;
14380
14404
  name: string;
14381
14405
  arguments: string;
14382
14406
  } | null> | null;
@@ -17589,6 +17613,7 @@ export type GetSpecificationQuery = {
17589
17613
  __typename?: 'RetrievalStrategy';
17590
17614
  type: RetrievalStrategyTypes;
17591
17615
  contentLimit?: number | null;
17616
+ disableFallback?: boolean | null;
17592
17617
  } | null;
17593
17618
  rerankingStrategy?: {
17594
17619
  __typename?: 'RerankingStrategy';
@@ -17879,7 +17904,7 @@ export type PromptSpecificationsMutation = {
17879
17904
  } | null> | null;
17880
17905
  toolCalls?: Array<{
17881
17906
  __typename?: 'ConversationToolCall';
17882
- id?: string | null;
17907
+ id: string;
17883
17908
  name: string;
17884
17909
  arguments: string;
17885
17910
  } | null> | null;
@@ -17928,6 +17953,7 @@ export type QuerySpecificationsQuery = {
17928
17953
  __typename?: 'RetrievalStrategy';
17929
17954
  type: RetrievalStrategyTypes;
17930
17955
  contentLimit?: number | null;
17956
+ disableFallback?: boolean | null;
17931
17957
  } | null;
17932
17958
  rerankingStrategy?: {
17933
17959
  __typename?: 'RerankingStrategy';
@@ -18,17 +18,31 @@ var AnthropicModels;
18
18
  AnthropicModels["Claude_2_0"] = "CLAUDE_2_0";
19
19
  /** @deprecated Use Claude 3.x instead. */
20
20
  AnthropicModels["Claude_2_1"] = "CLAUDE_2_1";
21
+ /** Claude 3.5 Haiku (Latest) */
22
+ AnthropicModels["Claude_3_5Haiku"] = "CLAUDE_3_5_HAIKU";
23
+ /** Claude 3.5 Haiku (10-22-2024 version) */
24
+ AnthropicModels["Claude_3_5Haiku_20241022"] = "CLAUDE_3_5_HAIKU_20241022";
21
25
  /** Claude 3.5 Sonnet (Latest) */
22
26
  AnthropicModels["Claude_3_5Sonnet"] = "CLAUDE_3_5_SONNET";
27
+ /** Claude 3.5 Sonnet (06-20-2024 version) */
28
+ AnthropicModels["Claude_3_5Sonnet_20240620"] = "CLAUDE_3_5_SONNET_20240620";
29
+ /** Claude 3.5 Sonnet (10-22-2024 version) */
30
+ AnthropicModels["Claude_3_5Sonnet_20241022"] = "CLAUDE_3_5_SONNET_20241022";
23
31
  /** Claude 3 Haiku (Latest) */
24
32
  AnthropicModels["Claude_3Haiku"] = "CLAUDE_3_HAIKU";
33
+ /** Claude 3 Haiku (03-07-2024 version) */
34
+ AnthropicModels["Claude_3Haiku_20240307"] = "CLAUDE_3_HAIKU_20240307";
25
35
  /** Claude 3 Opus (Latest) */
26
36
  AnthropicModels["Claude_3Opus"] = "CLAUDE_3_OPUS";
37
+ /** Claude 3 Opus (02-29-2024 version) */
38
+ AnthropicModels["Claude_3Opus_20240229"] = "CLAUDE_3_OPUS_20240229";
27
39
  /** Claude 3 Sonnet (Latest) */
28
40
  AnthropicModels["Claude_3Sonnet"] = "CLAUDE_3_SONNET";
29
- /** @deprecated Use Claude 3 Haiku instead. */
41
+ /** Claude 3 Sonnet (02-29-2024 version) */
42
+ AnthropicModels["Claude_3Sonnet_20240229"] = "CLAUDE_3_SONNET_20240229";
43
+ /** @deprecated Use Claude 3.5 Haiku instead. */
30
44
  AnthropicModels["ClaudeInstant_1"] = "CLAUDE_INSTANT_1";
31
- /** @deprecated Use Claude 3 Haiku instead. */
45
+ /** @deprecated Use Claude 3.5 Haiku instead. */
32
46
  AnthropicModels["ClaudeInstant_1_2"] = "CLAUDE_INSTANT_1_2";
33
47
  /** Developer-specified model */
34
48
  AnthropicModels["Custom"] = "CUSTOM";
@@ -751,7 +765,10 @@ var GroqModels;
751
765
  GroqModels["Llama_3_2_1BPreview"] = "LLAMA_3_2_1B_PREVIEW";
752
766
  /** LLaMA 3.2 3b Preview */
753
767
  GroqModels["Llama_3_2_3BPreview"] = "LLAMA_3_2_3B_PREVIEW";
754
- /** LLaMA 3.2 11b Text Preview */
768
+ /**
769
+ * LLaMA 3.2 11b Text Preview
770
+ * @deprecated Use Llama 3.2 11b Vision Preview instead.
771
+ */
755
772
  GroqModels["Llama_3_2_11BTextPreview"] = "LLAMA_3_2_11B_TEXT_PREVIEW";
756
773
  /** LLaMA 3.2 11b Vision Preview */
757
774
  GroqModels["Llama_3_2_11BVisionPreview"] = "LLAMA_3_2_11B_VISION_PREVIEW";
@@ -761,7 +778,10 @@ var GroqModels;
761
778
  GroqModels["Llama_3_8B"] = "LLAMA_3_8B";
762
779
  /** LLaMA 3 70b */
763
780
  GroqModels["Llama_3_70B"] = "LLAMA_3_70B";
764
- /** LLaVA 1.5 7B Preview */
781
+ /**
782
+ * LLaVA 1.5 7B Preview
783
+ * @deprecated Use Llama 3.2 11b Vision Preview instead.
784
+ */
765
785
  GroqModels["Llava_1_5_7BPreview"] = "LLAVA_1_5_7B_PREVIEW";
766
786
  /** Mixtral 8x7b Instruct */
767
787
  GroqModels["Mixtral_8X7BInstruct"] = "MIXTRAL_8X7B_INSTRUCT";
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "graphlit-client",
3
- "version": "1.0.20241021001",
3
+ "version": "1.0.20241104001",
4
4
  "description": "Graphlit API TypeScript Client",
5
5
  "main": "dist/client.js",
6
6
  "types": "dist/client.d.ts",