graphlit-client 1.0.20250414001 → 1.0.20250420001

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -7864,6 +7864,8 @@ exports.GetSpecification = (0, graphql_tag_1.default) `
7864
7864
  temperature
7865
7865
  probability
7866
7866
  chunkTokenLimit
7867
+ enableThinking
7868
+ thinkingTokenLimit
7867
7869
  }
7868
7870
  replicate {
7869
7871
  tokenLimit
@@ -8207,6 +8209,8 @@ exports.QuerySpecifications = (0, graphql_tag_1.default) `
8207
8209
  temperature
8208
8210
  probability
8209
8211
  chunkTokenLimit
8212
+ enableThinking
8213
+ thinkingTokenLimit
8210
8214
  }
8211
8215
  replicate {
8212
8216
  tokenLimit
@@ -4156,16 +4156,20 @@ export type GoogleDriveFeedPropertiesInput = {
4156
4156
  };
4157
4157
  /** Represents Google Drive properties. */
4158
4158
  export type GoogleDriveFeedPropertiesUpdateInput = {
4159
- /** Google client identifier. */
4159
+ /** Google Drive authentication type, defaults to User. */
4160
+ authenticationType?: InputMaybe<GoogleDriveAuthenticationTypes>;
4161
+ /** Google client identifier, when using User authentication type. */
4160
4162
  clientId?: InputMaybe<Scalars['String']['input']>;
4161
- /** Google client secret. */
4163
+ /** Google client secret, when using User authentication type. */
4162
4164
  clientSecret?: InputMaybe<Scalars['String']['input']>;
4163
4165
  /** Google Drive file identifiers. Takes precedence over folder identifier. */
4164
4166
  files?: InputMaybe<Array<InputMaybe<Scalars['String']['input']>>>;
4165
4167
  /** Google Drive folder identifier. */
4166
4168
  folderId?: InputMaybe<Scalars['String']['input']>;
4167
- /** Google refresh token. */
4169
+ /** Google refresh token, when using User authentication type. */
4168
4170
  refreshToken?: InputMaybe<Scalars['String']['input']>;
4171
+ /** Google service account JSON, when using ServiceAccount authentication type. */
4172
+ serviceAccountJson?: InputMaybe<Scalars['String']['input']>;
4169
4173
  };
4170
4174
  /** Represents Google Email feed properties. */
4171
4175
  export type GoogleEmailFeedProperties = {
@@ -4260,6 +4264,8 @@ export type GoogleModelProperties = {
4260
4264
  chunkTokenLimit?: Maybe<Scalars['Int']['output']>;
4261
4265
  /** The limit of tokens generated by prompt completion. */
4262
4266
  completionTokenLimit?: Maybe<Scalars['Int']['output']>;
4267
+ /** Whether Gemini's extended thinking is enabled. Applies to Gemini Flash 2.5 or higher. */
4268
+ enableThinking?: Maybe<Scalars['Boolean']['output']>;
4263
4269
  /** The Google API key, if using developer's own account. */
4264
4270
  key?: Maybe<Scalars['String']['output']>;
4265
4271
  /** The Google model, or custom, when using developer's own account. */
@@ -4270,6 +4276,8 @@ export type GoogleModelProperties = {
4270
4276
  probability?: Maybe<Scalars['Float']['output']>;
4271
4277
  /** The model temperature. */
4272
4278
  temperature?: Maybe<Scalars['Float']['output']>;
4279
+ /** The limit of thinking tokens allowed for Gemini's internal reasoning process. */
4280
+ thinkingTokenLimit?: Maybe<Scalars['Int']['output']>;
4273
4281
  /** The number of tokens which can provided to the Google model, if using developer's own account. */
4274
4282
  tokenLimit?: Maybe<Scalars['Int']['output']>;
4275
4283
  };
@@ -4279,6 +4287,8 @@ export type GoogleModelPropertiesInput = {
4279
4287
  chunkTokenLimit?: InputMaybe<Scalars['Int']['input']>;
4280
4288
  /** The limit of tokens generated by prompt completion. */
4281
4289
  completionTokenLimit?: InputMaybe<Scalars['Int']['input']>;
4290
+ /** Whether Gemini's extended thinking is enabled. Applies to Gemini Flash 2.5 or higher. */
4291
+ enableThinking?: InputMaybe<Scalars['Boolean']['input']>;
4282
4292
  /** The Google API key, if using developer's own account. */
4283
4293
  key?: InputMaybe<Scalars['String']['input']>;
4284
4294
  /** The Google model, or custom, when using developer's own account. */
@@ -4289,6 +4299,8 @@ export type GoogleModelPropertiesInput = {
4289
4299
  probability?: InputMaybe<Scalars['Float']['input']>;
4290
4300
  /** The model temperature. */
4291
4301
  temperature?: InputMaybe<Scalars['Float']['input']>;
4302
+ /** The limit of thinking tokens allowed for Gemini's internal reasoning process. */
4303
+ thinkingTokenLimit?: InputMaybe<Scalars['Int']['input']>;
4292
4304
  /** The number of tokens which can provided to the Google model, if using developer's own account. */
4293
4305
  tokenLimit?: InputMaybe<Scalars['Int']['input']>;
4294
4306
  };
@@ -4298,6 +4310,8 @@ export type GoogleModelPropertiesUpdateInput = {
4298
4310
  chunkTokenLimit?: InputMaybe<Scalars['Int']['input']>;
4299
4311
  /** The limit of tokens generated by prompt completion. */
4300
4312
  completionTokenLimit?: InputMaybe<Scalars['Int']['input']>;
4313
+ /** Whether Gemini's extended thinking is enabled. Applies to Gemini Flash 2.5 or higher. */
4314
+ enableThinking?: InputMaybe<Scalars['Boolean']['input']>;
4301
4315
  /** The Google API key, if using developer's own account. */
4302
4316
  key?: InputMaybe<Scalars['String']['input']>;
4303
4317
  /** The Google model, or custom, when using developer's own account. */
@@ -4308,6 +4322,8 @@ export type GoogleModelPropertiesUpdateInput = {
4308
4322
  probability?: InputMaybe<Scalars['Float']['input']>;
4309
4323
  /** The model temperature. */
4310
4324
  temperature?: InputMaybe<Scalars['Float']['input']>;
4325
+ /** The limit of thinking tokens allowed for Gemini's internal reasoning process. */
4326
+ thinkingTokenLimit?: InputMaybe<Scalars['Int']['input']>;
4311
4327
  /** The number of tokens which can provided to the Google model, if using developer's own account. */
4312
4328
  tokenLimit?: InputMaybe<Scalars['Int']['input']>;
4313
4329
  };
@@ -4346,6 +4362,8 @@ export declare enum GoogleModels {
4346
4362
  * @deprecated Use Gemini 2.5 Pro (Experimental) instead.
4347
4363
  */
4348
4364
  Gemini_2_0ProExperimental = "GEMINI_2_0_PRO_EXPERIMENTAL",
4365
+ /** Gemini 2.5 Flash (Preview) */
4366
+ Gemini_2_5FlashPreview = "GEMINI_2_5_FLASH_PREVIEW",
4349
4367
  /** Gemini 2.5 Pro (Experimental) */
4350
4368
  Gemini_2_5ProExperimental = "GEMINI_2_5_PRO_EXPERIMENTAL",
4351
4369
  /** Gemini 2.5 Pro (Preview) */
@@ -9275,10 +9293,18 @@ export declare enum OpenAiModels {
9275
9293
  O1Preview_128K = "O1_PREVIEW_128K",
9276
9294
  /** o1 Preview 128k (2024-09-12 version) */
9277
9295
  O1Preview_128K_20240912 = "O1_PREVIEW_128K_20240912",
9296
+ /** o3 200k (Latest) */
9297
+ O3_200K = "O3_200K",
9298
+ /** o3 200k (2025-04-16 version) */
9299
+ O3_200K_20250416 = "O3_200K_20250416",
9278
9300
  /** o3 Mini 200k (Latest) */
9279
9301
  O3Mini_200K = "O3_MINI_200K",
9280
9302
  /** o3 Mini 200k (2025-01-31 version) */
9281
- O3Mini_200K_20250131 = "O3_MINI_200K_20250131"
9303
+ O3Mini_200K_20250131 = "O3_MINI_200K_20250131",
9304
+ /** o4 Mini 200k (Latest) */
9305
+ O4Mini_200K = "O4_MINI_200K",
9306
+ /** o4 Mini 200k (2025-04-16 version) */
9307
+ O4Mini_200K_20250416 = "O4_MINI_200K_20250416"
9282
9308
  }
9283
9309
  /** OpenAI reasoning effort levels */
9284
9310
  export declare enum OpenAiReasoningEffortLevels {
@@ -22818,6 +22844,8 @@ export type GetSpecificationQuery = {
22818
22844
  temperature?: number | null;
22819
22845
  probability?: number | null;
22820
22846
  chunkTokenLimit?: number | null;
22847
+ enableThinking?: boolean | null;
22848
+ thinkingTokenLimit?: number | null;
22821
22849
  } | null;
22822
22850
  replicate?: {
22823
22851
  __typename?: 'ReplicateModelProperties';
@@ -23205,6 +23233,8 @@ export type QuerySpecificationsQuery = {
23205
23233
  temperature?: number | null;
23206
23234
  probability?: number | null;
23207
23235
  chunkTokenLimit?: number | null;
23236
+ enableThinking?: boolean | null;
23237
+ thinkingTokenLimit?: number | null;
23208
23238
  } | null;
23209
23239
  replicate?: {
23210
23240
  __typename?: 'ReplicateModelProperties';
@@ -881,6 +881,8 @@ var GoogleModels;
881
881
  * @deprecated Use Gemini 2.5 Pro (Experimental) instead.
882
882
  */
883
883
  GoogleModels["Gemini_2_0ProExperimental"] = "GEMINI_2_0_PRO_EXPERIMENTAL";
884
+ /** Gemini 2.5 Flash (Preview) */
885
+ GoogleModels["Gemini_2_5FlashPreview"] = "GEMINI_2_5_FLASH_PREVIEW";
884
886
  /** Gemini 2.5 Pro (Experimental) */
885
887
  GoogleModels["Gemini_2_5ProExperimental"] = "GEMINI_2_5_PRO_EXPERIMENTAL";
886
888
  /** Gemini 2.5 Pro (Preview) */
@@ -1460,10 +1462,18 @@ var OpenAiModels;
1460
1462
  OpenAiModels["O1Preview_128K"] = "O1_PREVIEW_128K";
1461
1463
  /** o1 Preview 128k (2024-09-12 version) */
1462
1464
  OpenAiModels["O1Preview_128K_20240912"] = "O1_PREVIEW_128K_20240912";
1465
+ /** o3 200k (Latest) */
1466
+ OpenAiModels["O3_200K"] = "O3_200K";
1467
+ /** o3 200k (2025-04-16 version) */
1468
+ OpenAiModels["O3_200K_20250416"] = "O3_200K_20250416";
1463
1469
  /** o3 Mini 200k (Latest) */
1464
1470
  OpenAiModels["O3Mini_200K"] = "O3_MINI_200K";
1465
1471
  /** o3 Mini 200k (2025-01-31 version) */
1466
1472
  OpenAiModels["O3Mini_200K_20250131"] = "O3_MINI_200K_20250131";
1473
+ /** o4 Mini 200k (Latest) */
1474
+ OpenAiModels["O4Mini_200K"] = "O4_MINI_200K";
1475
+ /** o4 Mini 200k (2025-04-16 version) */
1476
+ OpenAiModels["O4Mini_200K_20250416"] = "O4_MINI_200K_20250416";
1467
1477
  })(OpenAiModels || (exports.OpenAiModels = OpenAiModels = {}));
1468
1478
  /** OpenAI reasoning effort levels */
1469
1479
  var OpenAiReasoningEffortLevels;
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "graphlit-client",
3
- "version": "1.0.20250414001",
3
+ "version": "1.0.20250420001",
4
4
  "description": "Graphlit API TypeScript Client",
5
5
  "main": "dist/client.js",
6
6
  "types": "dist/client.d.ts",