graphlit-client 1.0.20240927001 → 1.0.20240930001
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -4674,6 +4674,14 @@ exports.GetSpecification = (0, graphql_tag_1.default) `
|
|
4674
4674
|
customRevision
|
4675
4675
|
count
|
4676
4676
|
}
|
4677
|
+
azureAI {
|
4678
|
+
tokenLimit
|
4679
|
+
completionTokenLimit
|
4680
|
+
key
|
4681
|
+
endpoint
|
4682
|
+
temperature
|
4683
|
+
probability
|
4684
|
+
}
|
4677
4685
|
openAI {
|
4678
4686
|
tokenLimit
|
4679
4687
|
completionTokenLimit
|
@@ -4951,6 +4959,14 @@ exports.QuerySpecifications = (0, graphql_tag_1.default) `
|
|
4951
4959
|
customRevision
|
4952
4960
|
count
|
4953
4961
|
}
|
4962
|
+
azureAI {
|
4963
|
+
tokenLimit
|
4964
|
+
completionTokenLimit
|
4965
|
+
key
|
4966
|
+
endpoint
|
4967
|
+
temperature
|
4968
|
+
probability
|
4969
|
+
}
|
4954
4970
|
openAI {
|
4955
4971
|
tokenLimit
|
4956
4972
|
completionTokenLimit
|
@@ -496,6 +496,52 @@ export type AudioMetadataInput = {
|
|
496
496
|
/** The audio title. */
|
497
497
|
title?: InputMaybe<Scalars['String']['input']>;
|
498
498
|
};
|
499
|
+
/** Represents Azure AI model properties. */
|
500
|
+
export type AzureAiModelProperties = {
|
501
|
+
__typename?: 'AzureAIModelProperties';
|
502
|
+
/** The limit of tokens generated by prompt completion. */
|
503
|
+
completionTokenLimit?: Maybe<Scalars['Int']['output']>;
|
504
|
+
/** The Azure AI API endpoint. */
|
505
|
+
endpoint: Scalars['URL']['output'];
|
506
|
+
/** The Azure AI API key. */
|
507
|
+
key: Scalars['String']['output'];
|
508
|
+
/** The model token probability. */
|
509
|
+
probability?: Maybe<Scalars['Float']['output']>;
|
510
|
+
/** The model temperature. */
|
511
|
+
temperature?: Maybe<Scalars['Float']['output']>;
|
512
|
+
/** The number of tokens which can provided to the model. */
|
513
|
+
tokenLimit: Scalars['Int']['output'];
|
514
|
+
};
|
515
|
+
/** Represents Azure AI model properties. */
|
516
|
+
export type AzureAiModelPropertiesInput = {
|
517
|
+
/** The limit of tokens generated by prompt completion. */
|
518
|
+
completionTokenLimit?: InputMaybe<Scalars['Int']['input']>;
|
519
|
+
/** The Azure AI API endpoint. */
|
520
|
+
endpoint: Scalars['URL']['input'];
|
521
|
+
/** The Azure AI API key. */
|
522
|
+
key: Scalars['String']['input'];
|
523
|
+
/** The model token probability. */
|
524
|
+
probability?: InputMaybe<Scalars['Float']['input']>;
|
525
|
+
/** The model temperature. */
|
526
|
+
temperature?: InputMaybe<Scalars['Float']['input']>;
|
527
|
+
/** The number of tokens which can provided to the model. */
|
528
|
+
tokenLimit: Scalars['Int']['input'];
|
529
|
+
};
|
530
|
+
/** Represents Azure AI model properties. */
|
531
|
+
export type AzureAiModelPropertiesUpdateInput = {
|
532
|
+
/** The limit of tokens generated by prompt completion. */
|
533
|
+
completionTokenLimit?: InputMaybe<Scalars['Int']['input']>;
|
534
|
+
/** The Azure AI API endpoint. */
|
535
|
+
endpoint?: InputMaybe<Scalars['URL']['input']>;
|
536
|
+
/** The Azure AI API key. */
|
537
|
+
key?: InputMaybe<Scalars['String']['input']>;
|
538
|
+
/** The model token probability. */
|
539
|
+
probability?: InputMaybe<Scalars['Float']['input']>;
|
540
|
+
/** The model temperature. */
|
541
|
+
temperature?: InputMaybe<Scalars['Float']['input']>;
|
542
|
+
/** The number of tokens which can provided to the model. */
|
543
|
+
tokenLimit?: InputMaybe<Scalars['Int']['input']>;
|
544
|
+
};
|
499
545
|
/** Represents Azure blob feed properties. */
|
500
546
|
export type AzureBlobFeedProperties = {
|
501
547
|
__typename?: 'AzureBlobFeedProperties';
|
@@ -3633,6 +3679,8 @@ export declare enum GroqModels {
|
|
3633
3679
|
Llama_3_2_3BPreview = "LLAMA_3_2_3B_PREVIEW",
|
3634
3680
|
/** LLaMA 3.2 11b Text Preview */
|
3635
3681
|
Llama_3_2_11BTextPreview = "LLAMA_3_2_11B_TEXT_PREVIEW",
|
3682
|
+
/** LLaMA 3.2 11b Vision Preview */
|
3683
|
+
Llama_3_2_11BVisionPreview = "LLAMA_3_2_11B_VISION_PREVIEW",
|
3636
3684
|
/** LLaMA 3.2 90b Text Preview */
|
3637
3685
|
Llama_3_2_90BTextPreview = "LLAMA_3_2_90B_TEXT_PREVIEW",
|
3638
3686
|
/** LLaMA 3 8b */
|
@@ -6187,7 +6235,9 @@ export declare enum MistralModels {
|
|
6187
6235
|
/** Mistral Small */
|
6188
6236
|
MistralSmall = "MISTRAL_SMALL",
|
6189
6237
|
/** Mixtral 8x7b Instruct */
|
6190
|
-
Mixtral_8X7BInstruct = "MIXTRAL_8X7B_INSTRUCT"
|
6238
|
+
Mixtral_8X7BInstruct = "MIXTRAL_8X7B_INSTRUCT",
|
6239
|
+
/** Pixtral 12b (2024-09 version) */
|
6240
|
+
Pixtral_12B_2409 = "PIXTRAL_12B_2409"
|
6191
6241
|
}
|
6192
6242
|
/** Represents the LLM document preparation properties. */
|
6193
6243
|
export type ModelDocumentPreparationInputProperties = {
|
@@ -6215,6 +6265,8 @@ export type ModelImageExtractionPropertiesInput = {
|
|
6215
6265
|
export declare enum ModelServiceTypes {
|
6216
6266
|
/** Anthropic */
|
6217
6267
|
Anthropic = "ANTHROPIC",
|
6268
|
+
/** Azure AI */
|
6269
|
+
AzureAi = "AZURE_AI",
|
6218
6270
|
/** Azure OpenAI */
|
6219
6271
|
AzureOpenAi = "AZURE_OPEN_AI",
|
6220
6272
|
/** Cerebras */
|
@@ -10489,6 +10541,8 @@ export type Specification = {
|
|
10489
10541
|
__typename?: 'Specification';
|
10490
10542
|
/** The Anthropic model properties. */
|
10491
10543
|
anthropic?: Maybe<AnthropicModelProperties>;
|
10544
|
+
/** The Azure AI model properties. */
|
10545
|
+
azureAI?: Maybe<AzureAiModelProperties>;
|
10492
10546
|
/** The Azure OpenAI model properties. */
|
10493
10547
|
azureOpenAI?: Maybe<AzureOpenAiModelProperties>;
|
10494
10548
|
/** The Cerebras model properties. */
|
@@ -10497,9 +10551,9 @@ export type Specification = {
|
|
10497
10551
|
cohere?: Maybe<CohereModelProperties>;
|
10498
10552
|
/** The creation date of the specification. */
|
10499
10553
|
creationDate: Scalars['DateTime']['output'];
|
10500
|
-
/** Custom guidance which is injected into the LLM
|
10554
|
+
/** Custom guidance which is injected into the LLM prompt. */
|
10501
10555
|
customGuidance?: Maybe<Scalars['String']['output']>;
|
10502
|
-
/** Custom instructions which are injected into the LLM
|
10556
|
+
/** Custom instructions which are injected into the LLM prompt. */
|
10503
10557
|
customInstructions?: Maybe<Scalars['String']['output']>;
|
10504
10558
|
/** The Deepseek model properties. */
|
10505
10559
|
deepseek?: Maybe<DeepseekModelProperties>;
|
@@ -10579,15 +10633,17 @@ export type SpecificationFilter = {
|
|
10579
10633
|
export type SpecificationInput = {
|
10580
10634
|
/** The Anthropic model properties. */
|
10581
10635
|
anthropic?: InputMaybe<AnthropicModelPropertiesInput>;
|
10636
|
+
/** The Azure AI model properties. */
|
10637
|
+
azureAI?: InputMaybe<AzureAiModelPropertiesInput>;
|
10582
10638
|
/** The Azure OpenAI model properties. */
|
10583
10639
|
azureOpenAI?: InputMaybe<AzureOpenAiModelPropertiesInput>;
|
10584
10640
|
/** The Cerebras model properties. */
|
10585
10641
|
cerebras?: InputMaybe<CerebrasModelPropertiesInput>;
|
10586
10642
|
/** The Cohere model properties. */
|
10587
10643
|
cohere?: InputMaybe<CohereModelPropertiesInput>;
|
10588
|
-
/** Custom guidance which is injected into the LLM
|
10644
|
+
/** Custom guidance which is injected into the LLM prompt. */
|
10589
10645
|
customGuidance?: InputMaybe<Scalars['String']['input']>;
|
10590
|
-
/** Custom instructions which are injected into the LLM
|
10646
|
+
/** Custom instructions which are injected into the LLM prompt. */
|
10591
10647
|
customInstructions?: InputMaybe<Scalars['String']['input']>;
|
10592
10648
|
/** The Deepseek model properties. */
|
10593
10649
|
deepseek?: InputMaybe<DeepseekModelPropertiesInput>;
|
@@ -10647,15 +10703,17 @@ export declare enum SpecificationTypes {
|
|
10647
10703
|
export type SpecificationUpdateInput = {
|
10648
10704
|
/** The Anthropic model properties. */
|
10649
10705
|
anthropic?: InputMaybe<AnthropicModelPropertiesUpdateInput>;
|
10706
|
+
/** The Azure AI model properties. */
|
10707
|
+
azureAI?: InputMaybe<AzureAiModelPropertiesUpdateInput>;
|
10650
10708
|
/** The Azure OpenAI model properties. */
|
10651
10709
|
azureOpenAI?: InputMaybe<AzureOpenAiModelPropertiesUpdateInput>;
|
10652
10710
|
/** The Cerebras model properties. */
|
10653
10711
|
cerebras?: InputMaybe<CerebrasModelPropertiesUpdateInput>;
|
10654
10712
|
/** The Cohere model properties. */
|
10655
10713
|
cohere?: InputMaybe<CohereModelPropertiesUpdateInput>;
|
10656
|
-
/** Custom guidance which is injected into the LLM
|
10714
|
+
/** Custom guidance which is injected into the LLM prompt. */
|
10657
10715
|
customGuidance?: InputMaybe<Scalars['String']['input']>;
|
10658
|
-
/** Custom instructions which are injected into the LLM
|
10716
|
+
/** Custom instructions which are injected into the LLM prompt. */
|
10659
10717
|
customInstructions?: InputMaybe<Scalars['String']['input']>;
|
10660
10718
|
/** The Deepseek model properties. */
|
10661
10719
|
deepseek?: InputMaybe<DeepseekModelPropertiesUpdateInput>;
|
@@ -17010,6 +17068,15 @@ export type GetSpecificationQuery = {
|
|
17010
17068
|
customRevision?: string | null;
|
17011
17069
|
count?: number | null;
|
17012
17070
|
} | null;
|
17071
|
+
azureAI?: {
|
17072
|
+
__typename?: 'AzureAIModelProperties';
|
17073
|
+
tokenLimit: number;
|
17074
|
+
completionTokenLimit?: number | null;
|
17075
|
+
key: string;
|
17076
|
+
endpoint: any;
|
17077
|
+
temperature?: number | null;
|
17078
|
+
probability?: number | null;
|
17079
|
+
} | null;
|
17013
17080
|
openAI?: {
|
17014
17081
|
__typename?: 'OpenAIModelProperties';
|
17015
17082
|
tokenLimit?: number | null;
|
@@ -17321,6 +17388,15 @@ export type QuerySpecificationsQuery = {
|
|
17321
17388
|
customRevision?: string | null;
|
17322
17389
|
count?: number | null;
|
17323
17390
|
} | null;
|
17391
|
+
azureAI?: {
|
17392
|
+
__typename?: 'AzureAIModelProperties';
|
17393
|
+
tokenLimit: number;
|
17394
|
+
completionTokenLimit?: number | null;
|
17395
|
+
key: string;
|
17396
|
+
endpoint: any;
|
17397
|
+
temperature?: number | null;
|
17398
|
+
probability?: number | null;
|
17399
|
+
} | null;
|
17324
17400
|
openAI?: {
|
17325
17401
|
__typename?: 'OpenAIModelProperties';
|
17326
17402
|
tokenLimit?: number | null;
|
@@ -729,6 +729,8 @@ var GroqModels;
|
|
729
729
|
GroqModels["Llama_3_2_3BPreview"] = "LLAMA_3_2_3B_PREVIEW";
|
730
730
|
/** LLaMA 3.2 11b Text Preview */
|
731
731
|
GroqModels["Llama_3_2_11BTextPreview"] = "LLAMA_3_2_11B_TEXT_PREVIEW";
|
732
|
+
/** LLaMA 3.2 11b Vision Preview */
|
733
|
+
GroqModels["Llama_3_2_11BVisionPreview"] = "LLAMA_3_2_11B_VISION_PREVIEW";
|
732
734
|
/** LLaMA 3.2 90b Text Preview */
|
733
735
|
GroqModels["Llama_3_2_90BTextPreview"] = "LLAMA_3_2_90B_TEXT_PREVIEW";
|
734
736
|
/** LLaMA 3 8b */
|
@@ -1009,12 +1011,16 @@ var MistralModels;
|
|
1009
1011
|
MistralModels["MistralSmall"] = "MISTRAL_SMALL";
|
1010
1012
|
/** Mixtral 8x7b Instruct */
|
1011
1013
|
MistralModels["Mixtral_8X7BInstruct"] = "MIXTRAL_8X7B_INSTRUCT";
|
1014
|
+
/** Pixtral 12b (2024-09 version) */
|
1015
|
+
MistralModels["Pixtral_12B_2409"] = "PIXTRAL_12B_2409";
|
1012
1016
|
})(MistralModels || (exports.MistralModels = MistralModels = {}));
|
1013
1017
|
/** Model service type */
|
1014
1018
|
var ModelServiceTypes;
|
1015
1019
|
(function (ModelServiceTypes) {
|
1016
1020
|
/** Anthropic */
|
1017
1021
|
ModelServiceTypes["Anthropic"] = "ANTHROPIC";
|
1022
|
+
/** Azure AI */
|
1023
|
+
ModelServiceTypes["AzureAi"] = "AZURE_AI";
|
1018
1024
|
/** Azure OpenAI */
|
1019
1025
|
ModelServiceTypes["AzureOpenAi"] = "AZURE_OPEN_AI";
|
1020
1026
|
/** Cerebras */
|