graphlit-client 1.0.20240927002 → 1.0.20240930002

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -4674,6 +4674,14 @@ exports.GetSpecification = (0, graphql_tag_1.default) `
4674
4674
  customRevision
4675
4675
  count
4676
4676
  }
4677
+ azureAI {
4678
+ tokenLimit
4679
+ completionTokenLimit
4680
+ key
4681
+ endpoint
4682
+ temperature
4683
+ probability
4684
+ }
4677
4685
  openAI {
4678
4686
  tokenLimit
4679
4687
  completionTokenLimit
@@ -4951,6 +4959,14 @@ exports.QuerySpecifications = (0, graphql_tag_1.default) `
4951
4959
  customRevision
4952
4960
  count
4953
4961
  }
4962
+ azureAI {
4963
+ tokenLimit
4964
+ completionTokenLimit
4965
+ key
4966
+ endpoint
4967
+ temperature
4968
+ probability
4969
+ }
4954
4970
  openAI {
4955
4971
  tokenLimit
4956
4972
  completionTokenLimit
@@ -496,6 +496,52 @@ export type AudioMetadataInput = {
496
496
  /** The audio title. */
497
497
  title?: InputMaybe<Scalars['String']['input']>;
498
498
  };
499
+ /** Represents Azure AI model properties. */
500
+ export type AzureAiModelProperties = {
501
+ __typename?: 'AzureAIModelProperties';
502
+ /** The limit of tokens generated by prompt completion. */
503
+ completionTokenLimit?: Maybe<Scalars['Int']['output']>;
504
+ /** The Azure AI API endpoint. */
505
+ endpoint: Scalars['URL']['output'];
506
+ /** The Azure AI API key. */
507
+ key: Scalars['String']['output'];
508
+ /** The model token probability. */
509
+ probability?: Maybe<Scalars['Float']['output']>;
510
+ /** The model temperature. */
511
+ temperature?: Maybe<Scalars['Float']['output']>;
512
+ /** The number of tokens which can provided to the model. */
513
+ tokenLimit: Scalars['Int']['output'];
514
+ };
515
+ /** Represents Azure AI model properties. */
516
+ export type AzureAiModelPropertiesInput = {
517
+ /** The limit of tokens generated by prompt completion. */
518
+ completionTokenLimit?: InputMaybe<Scalars['Int']['input']>;
519
+ /** The Azure AI API endpoint. */
520
+ endpoint: Scalars['URL']['input'];
521
+ /** The Azure AI API key. */
522
+ key: Scalars['String']['input'];
523
+ /** The model token probability. */
524
+ probability?: InputMaybe<Scalars['Float']['input']>;
525
+ /** The model temperature. */
526
+ temperature?: InputMaybe<Scalars['Float']['input']>;
527
+ /** The number of tokens which can provided to the model. */
528
+ tokenLimit: Scalars['Int']['input'];
529
+ };
530
+ /** Represents Azure AI model properties. */
531
+ export type AzureAiModelPropertiesUpdateInput = {
532
+ /** The limit of tokens generated by prompt completion. */
533
+ completionTokenLimit?: InputMaybe<Scalars['Int']['input']>;
534
+ /** The Azure AI API endpoint. */
535
+ endpoint?: InputMaybe<Scalars['URL']['input']>;
536
+ /** The Azure AI API key. */
537
+ key?: InputMaybe<Scalars['String']['input']>;
538
+ /** The model token probability. */
539
+ probability?: InputMaybe<Scalars['Float']['input']>;
540
+ /** The model temperature. */
541
+ temperature?: InputMaybe<Scalars['Float']['input']>;
542
+ /** The number of tokens which can provided to the model. */
543
+ tokenLimit?: InputMaybe<Scalars['Int']['input']>;
544
+ };
499
545
  /** Represents Azure blob feed properties. */
500
546
  export type AzureBlobFeedProperties = {
501
547
  __typename?: 'AzureBlobFeedProperties';
@@ -3489,8 +3535,16 @@ export declare enum GoogleModels {
3489
3535
  Custom = "CUSTOM",
3490
3536
  /** Gemini 1.5 Flash (Latest) */
3491
3537
  Gemini_1_5Flash = "GEMINI_1_5_FLASH",
3538
+ /** Gemini 1.5 Flash (001 version) */
3539
+ Gemini_1_5Flash_001 = "GEMINI_1_5_FLASH_001",
3540
+ /** Gemini 1.5 Flash (002 version) */
3541
+ Gemini_1_5Flash_002 = "GEMINI_1_5_FLASH_002",
3492
3542
  /** Gemini 1.5 Pro (Latest) */
3493
- Gemini_1_5Pro = "GEMINI_1_5_PRO"
3543
+ Gemini_1_5Pro = "GEMINI_1_5_PRO",
3544
+ /** Gemini 1.5 Pro (001 version) */
3545
+ Gemini_1_5Pro_001 = "GEMINI_1_5_PRO_001",
3546
+ /** Gemini 1.5 Pro (002 version) */
3547
+ Gemini_1_5Pro_002 = "GEMINI_1_5_PRO_002"
3494
3548
  }
3495
3549
  /** Represents a knowledge graph. */
3496
3550
  export type Graph = {
@@ -6189,7 +6243,9 @@ export declare enum MistralModels {
6189
6243
  /** Mistral Small */
6190
6244
  MistralSmall = "MISTRAL_SMALL",
6191
6245
  /** Mixtral 8x7b Instruct */
6192
- Mixtral_8X7BInstruct = "MIXTRAL_8X7B_INSTRUCT"
6246
+ Mixtral_8X7BInstruct = "MIXTRAL_8X7B_INSTRUCT",
6247
+ /** Pixtral 12b (2024-09 version) */
6248
+ Pixtral_12B_2409 = "PIXTRAL_12B_2409"
6193
6249
  }
6194
6250
  /** Represents the LLM document preparation properties. */
6195
6251
  export type ModelDocumentPreparationInputProperties = {
@@ -6217,6 +6273,8 @@ export type ModelImageExtractionPropertiesInput = {
6217
6273
  export declare enum ModelServiceTypes {
6218
6274
  /** Anthropic */
6219
6275
  Anthropic = "ANTHROPIC",
6276
+ /** Azure AI */
6277
+ AzureAi = "AZURE_AI",
6220
6278
  /** Azure OpenAI */
6221
6279
  AzureOpenAi = "AZURE_OPEN_AI",
6222
6280
  /** Cerebras */
@@ -10491,6 +10549,8 @@ export type Specification = {
10491
10549
  __typename?: 'Specification';
10492
10550
  /** The Anthropic model properties. */
10493
10551
  anthropic?: Maybe<AnthropicModelProperties>;
10552
+ /** The Azure AI model properties. */
10553
+ azureAI?: Maybe<AzureAiModelProperties>;
10494
10554
  /** The Azure OpenAI model properties. */
10495
10555
  azureOpenAI?: Maybe<AzureOpenAiModelProperties>;
10496
10556
  /** The Cerebras model properties. */
@@ -10499,9 +10559,9 @@ export type Specification = {
10499
10559
  cohere?: Maybe<CohereModelProperties>;
10500
10560
  /** The creation date of the specification. */
10501
10561
  creationDate: Scalars['DateTime']['output'];
10502
- /** Custom guidance which is injected into the LLM conversation prompt. */
10562
+ /** Custom guidance which is injected into the LLM prompt. */
10503
10563
  customGuidance?: Maybe<Scalars['String']['output']>;
10504
- /** Custom instructions which are injected into the LLM conversation prompt. */
10564
+ /** Custom instructions which are injected into the LLM prompt. */
10505
10565
  customInstructions?: Maybe<Scalars['String']['output']>;
10506
10566
  /** The Deepseek model properties. */
10507
10567
  deepseek?: Maybe<DeepseekModelProperties>;
@@ -10581,15 +10641,17 @@ export type SpecificationFilter = {
10581
10641
  export type SpecificationInput = {
10582
10642
  /** The Anthropic model properties. */
10583
10643
  anthropic?: InputMaybe<AnthropicModelPropertiesInput>;
10644
+ /** The Azure AI model properties. */
10645
+ azureAI?: InputMaybe<AzureAiModelPropertiesInput>;
10584
10646
  /** The Azure OpenAI model properties. */
10585
10647
  azureOpenAI?: InputMaybe<AzureOpenAiModelPropertiesInput>;
10586
10648
  /** The Cerebras model properties. */
10587
10649
  cerebras?: InputMaybe<CerebrasModelPropertiesInput>;
10588
10650
  /** The Cohere model properties. */
10589
10651
  cohere?: InputMaybe<CohereModelPropertiesInput>;
10590
- /** Custom guidance which is injected into the LLM conversation prompt. */
10652
+ /** Custom guidance which is injected into the LLM prompt. */
10591
10653
  customGuidance?: InputMaybe<Scalars['String']['input']>;
10592
- /** Custom instructions which are injected into the LLM conversation prompt. */
10654
+ /** Custom instructions which are injected into the LLM prompt. */
10593
10655
  customInstructions?: InputMaybe<Scalars['String']['input']>;
10594
10656
  /** The Deepseek model properties. */
10595
10657
  deepseek?: InputMaybe<DeepseekModelPropertiesInput>;
@@ -10649,15 +10711,17 @@ export declare enum SpecificationTypes {
10649
10711
  export type SpecificationUpdateInput = {
10650
10712
  /** The Anthropic model properties. */
10651
10713
  anthropic?: InputMaybe<AnthropicModelPropertiesUpdateInput>;
10714
+ /** The Azure AI model properties. */
10715
+ azureAI?: InputMaybe<AzureAiModelPropertiesUpdateInput>;
10652
10716
  /** The Azure OpenAI model properties. */
10653
10717
  azureOpenAI?: InputMaybe<AzureOpenAiModelPropertiesUpdateInput>;
10654
10718
  /** The Cerebras model properties. */
10655
10719
  cerebras?: InputMaybe<CerebrasModelPropertiesUpdateInput>;
10656
10720
  /** The Cohere model properties. */
10657
10721
  cohere?: InputMaybe<CohereModelPropertiesUpdateInput>;
10658
- /** Custom guidance which is injected into the LLM conversation prompt. */
10722
+ /** Custom guidance which is injected into the LLM prompt. */
10659
10723
  customGuidance?: InputMaybe<Scalars['String']['input']>;
10660
- /** Custom instructions which are injected into the LLM conversation prompt. */
10724
+ /** Custom instructions which are injected into the LLM prompt. */
10661
10725
  customInstructions?: InputMaybe<Scalars['String']['input']>;
10662
10726
  /** The Deepseek model properties. */
10663
10727
  deepseek?: InputMaybe<DeepseekModelPropertiesUpdateInput>;
@@ -17012,6 +17076,15 @@ export type GetSpecificationQuery = {
17012
17076
  customRevision?: string | null;
17013
17077
  count?: number | null;
17014
17078
  } | null;
17079
+ azureAI?: {
17080
+ __typename?: 'AzureAIModelProperties';
17081
+ tokenLimit: number;
17082
+ completionTokenLimit?: number | null;
17083
+ key: string;
17084
+ endpoint: any;
17085
+ temperature?: number | null;
17086
+ probability?: number | null;
17087
+ } | null;
17015
17088
  openAI?: {
17016
17089
  __typename?: 'OpenAIModelProperties';
17017
17090
  tokenLimit?: number | null;
@@ -17323,6 +17396,15 @@ export type QuerySpecificationsQuery = {
17323
17396
  customRevision?: string | null;
17324
17397
  count?: number | null;
17325
17398
  } | null;
17399
+ azureAI?: {
17400
+ __typename?: 'AzureAIModelProperties';
17401
+ tokenLimit: number;
17402
+ completionTokenLimit?: number | null;
17403
+ key: string;
17404
+ endpoint: any;
17405
+ temperature?: number | null;
17406
+ probability?: number | null;
17407
+ } | null;
17326
17408
  openAI?: {
17327
17409
  __typename?: 'OpenAIModelProperties';
17328
17410
  tokenLimit?: number | null;
@@ -699,8 +699,16 @@ var GoogleModels;
699
699
  GoogleModels["Custom"] = "CUSTOM";
700
700
  /** Gemini 1.5 Flash (Latest) */
701
701
  GoogleModels["Gemini_1_5Flash"] = "GEMINI_1_5_FLASH";
702
+ /** Gemini 1.5 Flash (001 version) */
703
+ GoogleModels["Gemini_1_5Flash_001"] = "GEMINI_1_5_FLASH_001";
704
+ /** Gemini 1.5 Flash (002 version) */
705
+ GoogleModels["Gemini_1_5Flash_002"] = "GEMINI_1_5_FLASH_002";
702
706
  /** Gemini 1.5 Pro (Latest) */
703
707
  GoogleModels["Gemini_1_5Pro"] = "GEMINI_1_5_PRO";
708
+ /** Gemini 1.5 Pro (001 version) */
709
+ GoogleModels["Gemini_1_5Pro_001"] = "GEMINI_1_5_PRO_001";
710
+ /** Gemini 1.5 Pro (002 version) */
711
+ GoogleModels["Gemini_1_5Pro_002"] = "GEMINI_1_5_PRO_002";
704
712
  })(GoogleModels || (exports.GoogleModels = GoogleModels = {}));
705
713
  /** GraphRAG strategies */
706
714
  var GraphStrategyTypes;
@@ -1011,12 +1019,16 @@ var MistralModels;
1011
1019
  MistralModels["MistralSmall"] = "MISTRAL_SMALL";
1012
1020
  /** Mixtral 8x7b Instruct */
1013
1021
  MistralModels["Mixtral_8X7BInstruct"] = "MIXTRAL_8X7B_INSTRUCT";
1022
+ /** Pixtral 12b (2024-09 version) */
1023
+ MistralModels["Pixtral_12B_2409"] = "PIXTRAL_12B_2409";
1014
1024
  })(MistralModels || (exports.MistralModels = MistralModels = {}));
1015
1025
  /** Model service type */
1016
1026
  var ModelServiceTypes;
1017
1027
  (function (ModelServiceTypes) {
1018
1028
  /** Anthropic */
1019
1029
  ModelServiceTypes["Anthropic"] = "ANTHROPIC";
1030
+ /** Azure AI */
1031
+ ModelServiceTypes["AzureAi"] = "AZURE_AI";
1020
1032
  /** Azure OpenAI */
1021
1033
  ModelServiceTypes["AzureOpenAi"] = "AZURE_OPEN_AI";
1022
1034
  /** Cerebras */
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "graphlit-client",
3
- "version": "1.0.20240927002",
3
+ "version": "1.0.20240930002",
4
4
  "description": "Graphlit API TypeScript Client",
5
5
  "main": "dist/client.js",
6
6
  "types": "dist/client.d.ts",