graphlit-client 1.0.20240927002 → 1.0.20240930001

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -4674,6 +4674,14 @@ exports.GetSpecification = (0, graphql_tag_1.default) `
4674
4674
  customRevision
4675
4675
  count
4676
4676
  }
4677
+ azureAI {
4678
+ tokenLimit
4679
+ completionTokenLimit
4680
+ key
4681
+ endpoint
4682
+ temperature
4683
+ probability
4684
+ }
4677
4685
  openAI {
4678
4686
  tokenLimit
4679
4687
  completionTokenLimit
@@ -4951,6 +4959,14 @@ exports.QuerySpecifications = (0, graphql_tag_1.default) `
4951
4959
  customRevision
4952
4960
  count
4953
4961
  }
4962
+ azureAI {
4963
+ tokenLimit
4964
+ completionTokenLimit
4965
+ key
4966
+ endpoint
4967
+ temperature
4968
+ probability
4969
+ }
4954
4970
  openAI {
4955
4971
  tokenLimit
4956
4972
  completionTokenLimit
@@ -496,6 +496,52 @@ export type AudioMetadataInput = {
496
496
  /** The audio title. */
497
497
  title?: InputMaybe<Scalars['String']['input']>;
498
498
  };
499
+ /** Represents Azure AI model properties. */
500
+ export type AzureAiModelProperties = {
501
+ __typename?: 'AzureAIModelProperties';
502
+ /** The limit of tokens generated by prompt completion. */
503
+ completionTokenLimit?: Maybe<Scalars['Int']['output']>;
504
+ /** The Azure AI API endpoint. */
505
+ endpoint: Scalars['URL']['output'];
506
+ /** The Azure AI API key. */
507
+ key: Scalars['String']['output'];
508
+ /** The model token probability. */
509
+ probability?: Maybe<Scalars['Float']['output']>;
510
+ /** The model temperature. */
511
+ temperature?: Maybe<Scalars['Float']['output']>;
512
+ /** The number of tokens which can provided to the model. */
513
+ tokenLimit: Scalars['Int']['output'];
514
+ };
515
+ /** Represents Azure AI model properties. */
516
+ export type AzureAiModelPropertiesInput = {
517
+ /** The limit of tokens generated by prompt completion. */
518
+ completionTokenLimit?: InputMaybe<Scalars['Int']['input']>;
519
+ /** The Azure AI API endpoint. */
520
+ endpoint: Scalars['URL']['input'];
521
+ /** The Azure AI API key. */
522
+ key: Scalars['String']['input'];
523
+ /** The model token probability. */
524
+ probability?: InputMaybe<Scalars['Float']['input']>;
525
+ /** The model temperature. */
526
+ temperature?: InputMaybe<Scalars['Float']['input']>;
527
+ /** The number of tokens which can provided to the model. */
528
+ tokenLimit: Scalars['Int']['input'];
529
+ };
530
+ /** Represents Azure AI model properties. */
531
+ export type AzureAiModelPropertiesUpdateInput = {
532
+ /** The limit of tokens generated by prompt completion. */
533
+ completionTokenLimit?: InputMaybe<Scalars['Int']['input']>;
534
+ /** The Azure AI API endpoint. */
535
+ endpoint?: InputMaybe<Scalars['URL']['input']>;
536
+ /** The Azure AI API key. */
537
+ key?: InputMaybe<Scalars['String']['input']>;
538
+ /** The model token probability. */
539
+ probability?: InputMaybe<Scalars['Float']['input']>;
540
+ /** The model temperature. */
541
+ temperature?: InputMaybe<Scalars['Float']['input']>;
542
+ /** The number of tokens which can provided to the model. */
543
+ tokenLimit?: InputMaybe<Scalars['Int']['input']>;
544
+ };
499
545
  /** Represents Azure blob feed properties. */
500
546
  export type AzureBlobFeedProperties = {
501
547
  __typename?: 'AzureBlobFeedProperties';
@@ -6189,7 +6235,9 @@ export declare enum MistralModels {
6189
6235
  /** Mistral Small */
6190
6236
  MistralSmall = "MISTRAL_SMALL",
6191
6237
  /** Mixtral 8x7b Instruct */
6192
- Mixtral_8X7BInstruct = "MIXTRAL_8X7B_INSTRUCT"
6238
+ Mixtral_8X7BInstruct = "MIXTRAL_8X7B_INSTRUCT",
6239
+ /** Pixtral 12b (2024-09 version) */
6240
+ Pixtral_12B_2409 = "PIXTRAL_12B_2409"
6193
6241
  }
6194
6242
  /** Represents the LLM document preparation properties. */
6195
6243
  export type ModelDocumentPreparationInputProperties = {
@@ -6217,6 +6265,8 @@ export type ModelImageExtractionPropertiesInput = {
6217
6265
  export declare enum ModelServiceTypes {
6218
6266
  /** Anthropic */
6219
6267
  Anthropic = "ANTHROPIC",
6268
+ /** Azure AI */
6269
+ AzureAi = "AZURE_AI",
6220
6270
  /** Azure OpenAI */
6221
6271
  AzureOpenAi = "AZURE_OPEN_AI",
6222
6272
  /** Cerebras */
@@ -10491,6 +10541,8 @@ export type Specification = {
10491
10541
  __typename?: 'Specification';
10492
10542
  /** The Anthropic model properties. */
10493
10543
  anthropic?: Maybe<AnthropicModelProperties>;
10544
+ /** The Azure AI model properties. */
10545
+ azureAI?: Maybe<AzureAiModelProperties>;
10494
10546
  /** The Azure OpenAI model properties. */
10495
10547
  azureOpenAI?: Maybe<AzureOpenAiModelProperties>;
10496
10548
  /** The Cerebras model properties. */
@@ -10499,9 +10551,9 @@ export type Specification = {
10499
10551
  cohere?: Maybe<CohereModelProperties>;
10500
10552
  /** The creation date of the specification. */
10501
10553
  creationDate: Scalars['DateTime']['output'];
10502
- /** Custom guidance which is injected into the LLM conversation prompt. */
10554
+ /** Custom guidance which is injected into the LLM prompt. */
10503
10555
  customGuidance?: Maybe<Scalars['String']['output']>;
10504
- /** Custom instructions which are injected into the LLM conversation prompt. */
10556
+ /** Custom instructions which are injected into the LLM prompt. */
10505
10557
  customInstructions?: Maybe<Scalars['String']['output']>;
10506
10558
  /** The Deepseek model properties. */
10507
10559
  deepseek?: Maybe<DeepseekModelProperties>;
@@ -10581,15 +10633,17 @@ export type SpecificationFilter = {
10581
10633
  export type SpecificationInput = {
10582
10634
  /** The Anthropic model properties. */
10583
10635
  anthropic?: InputMaybe<AnthropicModelPropertiesInput>;
10636
+ /** The Azure AI model properties. */
10637
+ azureAI?: InputMaybe<AzureAiModelPropertiesInput>;
10584
10638
  /** The Azure OpenAI model properties. */
10585
10639
  azureOpenAI?: InputMaybe<AzureOpenAiModelPropertiesInput>;
10586
10640
  /** The Cerebras model properties. */
10587
10641
  cerebras?: InputMaybe<CerebrasModelPropertiesInput>;
10588
10642
  /** The Cohere model properties. */
10589
10643
  cohere?: InputMaybe<CohereModelPropertiesInput>;
10590
- /** Custom guidance which is injected into the LLM conversation prompt. */
10644
+ /** Custom guidance which is injected into the LLM prompt. */
10591
10645
  customGuidance?: InputMaybe<Scalars['String']['input']>;
10592
- /** Custom instructions which are injected into the LLM conversation prompt. */
10646
+ /** Custom instructions which are injected into the LLM prompt. */
10593
10647
  customInstructions?: InputMaybe<Scalars['String']['input']>;
10594
10648
  /** The Deepseek model properties. */
10595
10649
  deepseek?: InputMaybe<DeepseekModelPropertiesInput>;
@@ -10649,15 +10703,17 @@ export declare enum SpecificationTypes {
10649
10703
  export type SpecificationUpdateInput = {
10650
10704
  /** The Anthropic model properties. */
10651
10705
  anthropic?: InputMaybe<AnthropicModelPropertiesUpdateInput>;
10706
+ /** The Azure AI model properties. */
10707
+ azureAI?: InputMaybe<AzureAiModelPropertiesUpdateInput>;
10652
10708
  /** The Azure OpenAI model properties. */
10653
10709
  azureOpenAI?: InputMaybe<AzureOpenAiModelPropertiesUpdateInput>;
10654
10710
  /** The Cerebras model properties. */
10655
10711
  cerebras?: InputMaybe<CerebrasModelPropertiesUpdateInput>;
10656
10712
  /** The Cohere model properties. */
10657
10713
  cohere?: InputMaybe<CohereModelPropertiesUpdateInput>;
10658
- /** Custom guidance which is injected into the LLM conversation prompt. */
10714
+ /** Custom guidance which is injected into the LLM prompt. */
10659
10715
  customGuidance?: InputMaybe<Scalars['String']['input']>;
10660
- /** Custom instructions which are injected into the LLM conversation prompt. */
10716
+ /** Custom instructions which are injected into the LLM prompt. */
10661
10717
  customInstructions?: InputMaybe<Scalars['String']['input']>;
10662
10718
  /** The Deepseek model properties. */
10663
10719
  deepseek?: InputMaybe<DeepseekModelPropertiesUpdateInput>;
@@ -17012,6 +17068,15 @@ export type GetSpecificationQuery = {
17012
17068
  customRevision?: string | null;
17013
17069
  count?: number | null;
17014
17070
  } | null;
17071
+ azureAI?: {
17072
+ __typename?: 'AzureAIModelProperties';
17073
+ tokenLimit: number;
17074
+ completionTokenLimit?: number | null;
17075
+ key: string;
17076
+ endpoint: any;
17077
+ temperature?: number | null;
17078
+ probability?: number | null;
17079
+ } | null;
17015
17080
  openAI?: {
17016
17081
  __typename?: 'OpenAIModelProperties';
17017
17082
  tokenLimit?: number | null;
@@ -17323,6 +17388,15 @@ export type QuerySpecificationsQuery = {
17323
17388
  customRevision?: string | null;
17324
17389
  count?: number | null;
17325
17390
  } | null;
17391
+ azureAI?: {
17392
+ __typename?: 'AzureAIModelProperties';
17393
+ tokenLimit: number;
17394
+ completionTokenLimit?: number | null;
17395
+ key: string;
17396
+ endpoint: any;
17397
+ temperature?: number | null;
17398
+ probability?: number | null;
17399
+ } | null;
17326
17400
  openAI?: {
17327
17401
  __typename?: 'OpenAIModelProperties';
17328
17402
  tokenLimit?: number | null;
@@ -1011,12 +1011,16 @@ var MistralModels;
1011
1011
  MistralModels["MistralSmall"] = "MISTRAL_SMALL";
1012
1012
  /** Mixtral 8x7b Instruct */
1013
1013
  MistralModels["Mixtral_8X7BInstruct"] = "MIXTRAL_8X7B_INSTRUCT";
1014
+ /** Pixtral 12b (2024-09 version) */
1015
+ MistralModels["Pixtral_12B_2409"] = "PIXTRAL_12B_2409";
1014
1016
  })(MistralModels || (exports.MistralModels = MistralModels = {}));
1015
1017
  /** Model service type */
1016
1018
  var ModelServiceTypes;
1017
1019
  (function (ModelServiceTypes) {
1018
1020
  /** Anthropic */
1019
1021
  ModelServiceTypes["Anthropic"] = "ANTHROPIC";
1022
+ /** Azure AI */
1023
+ ModelServiceTypes["AzureAi"] = "AZURE_AI";
1020
1024
  /** Azure OpenAI */
1021
1025
  ModelServiceTypes["AzureOpenAi"] = "AZURE_OPEN_AI";
1022
1026
  /** Cerebras */
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "graphlit-client",
3
- "version": "1.0.20240927002",
3
+ "version": "1.0.20240930001",
4
4
  "description": "Graphlit API TypeScript Client",
5
5
  "main": "dist/client.js",
6
6
  "types": "dist/client.d.ts",