graphlit-client 1.0.20251004001 → 1.0.20251006001

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -212,6 +212,14 @@ export const GetAlert = gql `
212
212
  id
213
213
  }
214
214
  }
215
+ openAIVideo {
216
+ model
217
+ seconds
218
+ size
219
+ seed {
220
+ id
221
+ }
222
+ }
215
223
  }
216
224
  summarySpecification {
217
225
  id
@@ -382,6 +390,14 @@ export const QueryAlerts = gql `
382
390
  id
383
391
  }
384
392
  }
393
+ openAIVideo {
394
+ model
395
+ seconds
396
+ size
397
+ seed {
398
+ id
399
+ }
400
+ }
385
401
  }
386
402
  summarySpecification {
387
403
  id
@@ -9274,7 +9290,6 @@ export const GetSpecification = gql `
9274
9290
  type
9275
9291
  contentLimit
9276
9292
  disableFallback
9277
- expandRelatedContents
9278
9293
  }
9279
9294
  rerankingStrategy {
9280
9295
  serviceType
@@ -9649,7 +9664,6 @@ export const QuerySpecifications = gql `
9649
9664
  type
9650
9665
  contentLimit
9651
9666
  disableFallback
9652
- expandRelatedContents
9653
9667
  }
9654
9668
  rerankingStrategy {
9655
9669
  serviceType
@@ -11156,6 +11170,7 @@ export const CreateWorkflow = gql `
11156
11170
  }
11157
11171
  enableEmailCollections
11158
11172
  enableFolderCollections
11173
+ enableMessageCollections
11159
11174
  }
11160
11175
  indexing {
11161
11176
  jobs {
@@ -11320,6 +11335,7 @@ export const CreateWorkflow = gql `
11320
11335
  policy {
11321
11336
  type
11322
11337
  allowDuplicates
11338
+ embeddingTypes
11323
11339
  }
11324
11340
  }
11325
11341
  actions {
@@ -11410,6 +11426,7 @@ export const GetWorkflow = gql `
11410
11426
  }
11411
11427
  enableEmailCollections
11412
11428
  enableFolderCollections
11429
+ enableMessageCollections
11413
11430
  }
11414
11431
  indexing {
11415
11432
  jobs {
@@ -11574,6 +11591,7 @@ export const GetWorkflow = gql `
11574
11591
  policy {
11575
11592
  type
11576
11593
  allowDuplicates
11594
+ embeddingTypes
11577
11595
  }
11578
11596
  }
11579
11597
  actions {
@@ -11637,6 +11655,7 @@ export const QueryWorkflows = gql `
11637
11655
  }
11638
11656
  enableEmailCollections
11639
11657
  enableFolderCollections
11658
+ enableMessageCollections
11640
11659
  }
11641
11660
  indexing {
11642
11661
  jobs {
@@ -11801,6 +11820,7 @@ export const QueryWorkflows = gql `
11801
11820
  policy {
11802
11821
  type
11803
11822
  allowDuplicates
11823
+ embeddingTypes
11804
11824
  }
11805
11825
  }
11806
11826
  actions {
@@ -11859,6 +11879,7 @@ export const UpdateWorkflow = gql `
11859
11879
  }
11860
11880
  enableEmailCollections
11861
11881
  enableFolderCollections
11882
+ enableMessageCollections
11862
11883
  }
11863
11884
  indexing {
11864
11885
  jobs {
@@ -12023,6 +12044,7 @@ export const UpdateWorkflow = gql `
12023
12044
  policy {
12024
12045
  type
12025
12046
  allowDuplicates
12047
+ embeddingTypes
12026
12048
  }
12027
12049
  }
12028
12050
  actions {
@@ -12080,6 +12102,7 @@ export const UpsertWorkflow = gql `
12080
12102
  }
12081
12103
  enableEmailCollections
12082
12104
  enableFolderCollections
12105
+ enableMessageCollections
12083
12106
  }
12084
12107
  indexing {
12085
12108
  jobs {
@@ -12244,6 +12267,7 @@ export const UpsertWorkflow = gql `
12244
12267
  policy {
12245
12268
  type
12246
12269
  allowDuplicates
12270
+ embeddingTypes
12247
12271
  }
12248
12272
  }
12249
12273
  actions {
@@ -1192,12 +1192,18 @@ export type CalendarAttendee = {
1192
1192
  };
1193
1193
  /** Represents a calendar event attendee input. */
1194
1194
  export type CalendarAttendeeInput = {
1195
+ /** The attendee display name. */
1196
+ displayName?: InputMaybe<Scalars['String']['input']>;
1195
1197
  /** The attendee email. */
1196
1198
  email?: InputMaybe<Scalars['String']['input']>;
1197
1199
  /** Whether the attendee is optional. */
1198
1200
  isOptional?: InputMaybe<Scalars['Boolean']['input']>;
1199
1201
  /** Whether the attendee is the organizer. */
1200
1202
  isOrganizer?: InputMaybe<Scalars['Boolean']['input']>;
1203
+ /** Whether the attendee is required. */
1204
+ isRequired?: InputMaybe<Scalars['Boolean']['input']>;
1205
+ /** Whether the attendee is a resource. */
1206
+ isResource?: InputMaybe<Scalars['Boolean']['input']>;
1201
1207
  /** The attendee name. */
1202
1208
  name?: InputMaybe<Scalars['String']['input']>;
1203
1209
  /** The attendee response status. */
@@ -2388,6 +2394,8 @@ export type ContentPublishingConnector = {
2388
2394
  googleImage?: Maybe<GoogleImagePublishingProperties>;
2389
2395
  /** The specific properties for OpenAI Image publishing. */
2390
2396
  openAIImage?: Maybe<OpenAiImagePublishingProperties>;
2397
+ /** The specific properties for OpenAI Video publishing. */
2398
+ openAIVideo?: Maybe<OpenAiVideoPublishingProperties>;
2391
2399
  /** The content publishing service type. */
2392
2400
  type: ContentPublishingServiceTypes;
2393
2401
  };
@@ -2401,6 +2409,8 @@ export type ContentPublishingConnectorInput = {
2401
2409
  googleImage?: InputMaybe<GoogleImagePublishingPropertiesInput>;
2402
2410
  /** The specific properties for OpenAI Image publishing. */
2403
2411
  openAIImage?: InputMaybe<OpenAiImagePublishingPropertiesInput>;
2412
+ /** The specific properties for OpenAI Video publishing. */
2413
+ openAIVideo?: InputMaybe<OpenAiVideoPublishingPropertiesInput>;
2404
2414
  /** The content publishing service type. */
2405
2415
  type: ContentPublishingServiceTypes;
2406
2416
  };
@@ -2414,6 +2424,8 @@ export type ContentPublishingConnectorUpdateInput = {
2414
2424
  googleImage?: InputMaybe<GoogleImagePublishingPropertiesInput>;
2415
2425
  /** The specific properties for OpenAI Image publishing. */
2416
2426
  openAIImage?: InputMaybe<OpenAiImagePublishingPropertiesInput>;
2427
+ /** The specific properties for OpenAI Video publishing. */
2428
+ openAIVideo?: InputMaybe<OpenAiVideoPublishingPropertiesInput>;
2417
2429
  /** The content publishing service type. */
2418
2430
  type?: InputMaybe<ContentPublishingServiceTypes>;
2419
2431
  };
@@ -2426,6 +2438,8 @@ export declare enum ContentPublishingFormats {
2426
2438
  Markdown = "MARKDOWN",
2427
2439
  /** MP3 */
2428
2440
  Mp3 = "MP3",
2441
+ /** MP4 */
2442
+ Mp4 = "MP4",
2429
2443
  /** PNG */
2430
2444
  Png = "PNG",
2431
2445
  /** Plain Text */
@@ -2441,6 +2455,8 @@ export declare enum ContentPublishingServiceTypes {
2441
2455
  GoogleImage = "GOOGLE_IMAGE",
2442
2456
  /** OpenAI Image publishing */
2443
2457
  OpenAiImage = "OPEN_AI_IMAGE",
2458
+ /** OpenAI Video publishing */
2459
+ OpenAiVideo = "OPEN_AI_VIDEO",
2444
2460
  /** Text publishing */
2445
2461
  Text = "TEXT"
2446
2462
  }
@@ -3609,6 +3625,17 @@ export type EmailPreparationPropertiesInput = {
3609
3625
  /** Whether to extract attachments from emails as ingested content. */
3610
3626
  includeAttachments?: InputMaybe<Scalars['Boolean']['input']>;
3611
3627
  };
3628
+ /** Embedding type */
3629
+ export declare enum EmbeddingTypes {
3630
+ /** Audio embeddings */
3631
+ Audio = "AUDIO",
3632
+ /** Image embeddings */
3633
+ Image = "IMAGE",
3634
+ /** Text embeddings */
3635
+ Text = "TEXT",
3636
+ /** Video embeddings */
3637
+ Video = "VIDEO"
3638
+ }
3612
3639
  /** Represents the embeddings strategy. */
3613
3640
  export type EmbeddingsStrategy = {
3614
3641
  __typename?: 'EmbeddingsStrategy';
@@ -5233,7 +5260,7 @@ export type GoogleImagePublishingProperties = {
5233
5260
  count?: Maybe<Scalars['Int']['output']>;
5234
5261
  /** The Google Image model. */
5235
5262
  model?: Maybe<GoogleImageModels>;
5236
- /** The seed image reference to use when generating image(s). */
5263
+ /** The seed image reference to use when generating image(s), optional. */
5237
5264
  seed?: Maybe<EntityReference>;
5238
5265
  };
5239
5266
  /** Represents the Google Image publishing properties. */
@@ -5242,7 +5269,7 @@ export type GoogleImagePublishingPropertiesInput = {
5242
5269
  count?: InputMaybe<Scalars['Int']['input']>;
5243
5270
  /** The Google Image model. */
5244
5271
  model?: InputMaybe<GoogleImageModels>;
5245
- /** The seed image reference to use when generating image(s). */
5272
+ /** The seed image reference to use when generating image(s), optional. */
5246
5273
  seed?: InputMaybe<EntityReferenceInput>;
5247
5274
  };
5248
5275
  /** Represents Google model properties. */
@@ -5854,6 +5881,8 @@ export type IngestionWorkflowStage = {
5854
5881
  enableEmailCollections?: Maybe<Scalars['Boolean']['output']>;
5855
5882
  /** Whether to create collections for every site folder (i.e. '<parent>/<child>'). Disabled by default. */
5856
5883
  enableFolderCollections?: Maybe<Scalars['Boolean']['output']>;
5884
+ /** Whether to create collections for every message conversation (i.e. Slack, Teams, Discord). Disabled by default. */
5885
+ enableMessageCollections?: Maybe<Scalars['Boolean']['output']>;
5857
5886
  /** The ingestion filter. */
5858
5887
  if?: Maybe<IngestionContentFilter>;
5859
5888
  /** The observations to be assigned to ingested content. */
@@ -5867,6 +5896,8 @@ export type IngestionWorkflowStageInput = {
5867
5896
  enableEmailCollections?: InputMaybe<Scalars['Boolean']['input']>;
5868
5897
  /** Whether to create collections for every site folder (i.e. '<parent>/<child>'). Disabled by default. */
5869
5898
  enableFolderCollections?: InputMaybe<Scalars['Boolean']['input']>;
5899
+ /** Whether to create collections for every message conversation (i.e. Slack, Teams, Discord). Disabled by default. */
5900
+ enableMessageCollections?: InputMaybe<Scalars['Boolean']['input']>;
5870
5901
  /** The ingestion filter. */
5871
5902
  if?: InputMaybe<IngestionContentFilterInput>;
5872
5903
  /** The observations to be assigned to ingested content. */
@@ -10503,7 +10534,9 @@ export declare enum OpenAiImageModels {
10503
10534
  /** Developer-specified model */
10504
10535
  Custom = "CUSTOM",
10505
10536
  /** GPT Image-1 */
10506
- GptImage_1 = "GPT_IMAGE_1"
10537
+ GptImage_1 = "GPT_IMAGE_1",
10538
+ /** GPT Image-1 Mini */
10539
+ GptImage_1Mini = "GPT_IMAGE_1_MINI"
10507
10540
  }
10508
10541
  /** Represents the OpenAI Image publishing properties. */
10509
10542
  export type OpenAiImagePublishingProperties = {
@@ -10512,7 +10545,7 @@ export type OpenAiImagePublishingProperties = {
10512
10545
  count?: Maybe<Scalars['Int']['output']>;
10513
10546
  /** The OpenAI Image model. */
10514
10547
  model?: Maybe<OpenAiImageModels>;
10515
- /** The seed image reference to use when generating image(s). */
10548
+ /** The seed image reference to use when generating image(s), optional. */
10516
10549
  seed?: Maybe<EntityReference>;
10517
10550
  };
10518
10551
  /** Represents the OpenAI Image publishing properties. */
@@ -10521,7 +10554,7 @@ export type OpenAiImagePublishingPropertiesInput = {
10521
10554
  count?: InputMaybe<Scalars['Int']['input']>;
10522
10555
  /** The OpenAI Image model. */
10523
10556
  model?: InputMaybe<OpenAiImageModels>;
10524
- /** The seed image reference to use when generating image(s). */
10557
+ /** The seed image reference to use when generating image(s), optional. */
10525
10558
  seed?: InputMaybe<EntityReferenceInput>;
10526
10559
  };
10527
10560
  /** Represents OpenAI model properties. */
@@ -10768,6 +10801,38 @@ export declare enum OpenAiReasoningEffortLevels {
10768
10801
  /** Medium effort */
10769
10802
  Medium = "MEDIUM"
10770
10803
  }
10804
+ /** OpenAI Video model type */
10805
+ export declare enum OpenAiVideoModels {
10806
+ /** Developer-specified model */
10807
+ Custom = "CUSTOM",
10808
+ /** Sora 2 (Latest) - Fast generation */
10809
+ Sora_2 = "SORA_2",
10810
+ /** Sora 2 Pro - High quality production */
10811
+ Sora_2Pro = "SORA_2_PRO"
10812
+ }
10813
+ /** Represents the OpenAI Video publishing properties. */
10814
+ export type OpenAiVideoPublishingProperties = {
10815
+ __typename?: 'OpenAIVideoPublishingProperties';
10816
+ /** The OpenAI Video model. */
10817
+ model?: Maybe<OpenAiVideoModels>;
10818
+ /** The video duration in seconds, optional. Must be 4, 8, or 12. Defaults to 4. */
10819
+ seconds?: Maybe<Scalars['Int']['output']>;
10820
+ /** The seed image reference to use when generating video, optional. */
10821
+ seed?: Maybe<EntityReference>;
10822
+ /** The video resolution size, optional. */
10823
+ size?: Maybe<VideoSizeTypes>;
10824
+ };
10825
+ /** Represents the OpenAI Video publishing properties. */
10826
+ export type OpenAiVideoPublishingPropertiesInput = {
10827
+ /** The OpenAI Video model. */
10828
+ model?: InputMaybe<OpenAiVideoModels>;
10829
+ /** The video duration in seconds, optional. Must be 4, 8, or 12. Defaults to 4. */
10830
+ seconds?: InputMaybe<Scalars['Int']['input']>;
10831
+ /** The seed image reference to use when generating video, optional. */
10832
+ seed?: InputMaybe<EntityReferenceInput>;
10833
+ /** The video resolution size, optional. */
10834
+ size?: InputMaybe<VideoSizeTypes>;
10835
+ };
10771
10836
  /** OpenAI vision model detail levels */
10772
10837
  export declare enum OpenAiVisionDetailLevels {
10773
10838
  /** High */
@@ -13444,8 +13509,6 @@ export type RetrievalStrategy = {
13444
13509
  contentLimit?: Maybe<Scalars['Int']['output']>;
13445
13510
  /** Whether to disable fallback to previous contents, when no contents are found by semantic search. Defaults to false. */
13446
13511
  disableFallback?: Maybe<Scalars['Boolean']['output']>;
13447
- /** Whether to expand related contents bidirectionally (child contents like extracted images, parent contents like source documents). Defaults to false. */
13448
- expandRelatedContents?: Maybe<Scalars['Boolean']['output']>;
13449
13512
  /** The retrieval strategy type. */
13450
13513
  type: RetrievalStrategyTypes;
13451
13514
  };
@@ -13455,8 +13518,6 @@ export type RetrievalStrategyInput = {
13455
13518
  contentLimit?: InputMaybe<Scalars['Int']['input']>;
13456
13519
  /** Whether to disable fallback to previous contents, when no contents are found by semantic search. Defaults to false. */
13457
13520
  disableFallback?: InputMaybe<Scalars['Boolean']['input']>;
13458
- /** Whether to expand related contents bidirectionally (child contents like extracted images, parent contents like source documents). Defaults to false. */
13459
- expandRelatedContents?: InputMaybe<Scalars['Boolean']['input']>;
13460
13521
  /** The retrieval strategy type. */
13461
13522
  type: RetrievalStrategyTypes;
13462
13523
  };
@@ -13473,8 +13534,6 @@ export declare enum RetrievalStrategyTypes {
13473
13534
  export type RetrievalStrategyUpdateInput = {
13474
13535
  /** The maximum number of content sources to provide with prompt context. Defaults to 25. */
13475
13536
  contentLimit?: InputMaybe<Scalars['Int']['input']>;
13476
- /** Whether to expand related contents bidirectionally (child contents like extracted images, parent contents like source documents). Defaults to false. */
13477
- expandRelatedContents?: InputMaybe<Scalars['Boolean']['input']>;
13478
13537
  /** The retrieval strategy type. */
13479
13538
  type?: InputMaybe<RetrievalStrategyTypes>;
13480
13539
  };
@@ -13736,10 +13795,14 @@ export type SharePointLibraryResult = {
13736
13795
  libraryId?: Maybe<Scalars['ID']['output']>;
13737
13796
  /** The SharePoint library name. */
13738
13797
  libraryName?: Maybe<Scalars['String']['output']>;
13798
+ /** The SharePoint library web URL. */
13799
+ libraryWebUrl?: Maybe<Scalars['String']['output']>;
13739
13800
  /** The SharePoint site identifier. */
13740
13801
  siteId?: Maybe<Scalars['ID']['output']>;
13741
13802
  /** The SharePoint site name. */
13742
13803
  siteName?: Maybe<Scalars['String']['output']>;
13804
+ /** The SharePoint site web URL. */
13805
+ siteWebUrl?: Maybe<Scalars['String']['output']>;
13743
13806
  };
13744
13807
  /** Represents SharePoint libraries. */
13745
13808
  export type SharePointLibraryResults = {
@@ -14312,6 +14375,8 @@ export type StoragePolicy = {
14312
14375
  __typename?: 'StoragePolicy';
14313
14376
  /** Whether duplicate content (by URI, eTag, etc.) will be allowed upon ingestion, defaults to False. When disabled, content will be reingested in-place. */
14314
14377
  allowDuplicates?: Maybe<Scalars['Boolean']['output']>;
14378
+ /** The types of embeddings to generate during enrichment. Defaults to all types if not specified. Specify an empty array to disable all embeddings. */
14379
+ embeddingTypes?: Maybe<Array<Maybe<EmbeddingTypes>>>;
14315
14380
  /** The storage policy type. */
14316
14381
  type?: Maybe<StoragePolicyTypes>;
14317
14382
  };
@@ -14319,6 +14384,8 @@ export type StoragePolicy = {
14319
14384
  export type StoragePolicyInput = {
14320
14385
  /** Whether duplicate content (by URI, eTag, etc.) will be allowed upon ingestion, defaults to False. When disabled, content will be reingested in-place. */
14321
14386
  allowDuplicates?: InputMaybe<Scalars['Boolean']['input']>;
14387
+ /** The types of embeddings to generate during enrichment. Defaults to all types if not specified. Specify an empty array to disable all embeddings. */
14388
+ embeddingTypes?: InputMaybe<Array<InputMaybe<EmbeddingTypes>>>;
14322
14389
  /** The storage policy type. */
14323
14390
  type?: InputMaybe<StoragePolicyTypes>;
14324
14391
  };
@@ -14916,6 +14983,15 @@ export type VideoMetadataInput = {
14916
14983
  /** The video width. */
14917
14984
  width?: InputMaybe<Scalars['Int']['input']>;
14918
14985
  };
14986
+ /** Video size (resolution) */
14987
+ export declare enum VideoSizeTypes {
14988
+ /** 1080x1920 (Full HD portrait) */
14989
+ Size_1080X1920 = "SIZE_1080X1920",
14990
+ /** 1280x720 (HD landscape) */
14991
+ Size_1280X720 = "SIZE_1280X720",
14992
+ /** 1920x1080 (Full HD landscape) */
14993
+ Size_1920X1080 = "SIZE_1920X1080"
14994
+ }
14919
14995
  /** Represents a view. */
14920
14996
  export type View = {
14921
14997
  __typename?: 'View';
@@ -15686,6 +15762,16 @@ export type GetAlertQuery = {
15686
15762
  id: string;
15687
15763
  } | null;
15688
15764
  } | null;
15765
+ openAIVideo?: {
15766
+ __typename?: 'OpenAIVideoPublishingProperties';
15767
+ model?: OpenAiVideoModels | null;
15768
+ seconds?: number | null;
15769
+ size?: VideoSizeTypes | null;
15770
+ seed?: {
15771
+ __typename?: 'EntityReference';
15772
+ id: string;
15773
+ } | null;
15774
+ } | null;
15689
15775
  };
15690
15776
  summarySpecification?: {
15691
15777
  __typename?: 'EntityReference';
@@ -15903,6 +15989,16 @@ export type QueryAlertsQuery = {
15903
15989
  id: string;
15904
15990
  } | null;
15905
15991
  } | null;
15992
+ openAIVideo?: {
15993
+ __typename?: 'OpenAIVideoPublishingProperties';
15994
+ model?: OpenAiVideoModels | null;
15995
+ seconds?: number | null;
15996
+ size?: VideoSizeTypes | null;
15997
+ seed?: {
15998
+ __typename?: 'EntityReference';
15999
+ id: string;
16000
+ } | null;
16001
+ } | null;
15906
16002
  };
15907
16003
  summarySpecification?: {
15908
16004
  __typename?: 'EntityReference';
@@ -26343,7 +26439,6 @@ export type GetSpecificationQuery = {
26343
26439
  type: RetrievalStrategyTypes;
26344
26440
  contentLimit?: number | null;
26345
26441
  disableFallback?: boolean | null;
26346
- expandRelatedContents?: boolean | null;
26347
26442
  } | null;
26348
26443
  rerankingStrategy?: {
26349
26444
  __typename?: 'RerankingStrategy';
@@ -26764,7 +26859,6 @@ export type QuerySpecificationsQuery = {
26764
26859
  type: RetrievalStrategyTypes;
26765
26860
  contentLimit?: number | null;
26766
26861
  disableFallback?: boolean | null;
26767
- expandRelatedContents?: boolean | null;
26768
26862
  } | null;
26769
26863
  rerankingStrategy?: {
26770
26864
  __typename?: 'RerankingStrategy';
@@ -28651,6 +28745,7 @@ export type CreateWorkflowMutation = {
28651
28745
  __typename?: 'IngestionWorkflowStage';
28652
28746
  enableEmailCollections?: boolean | null;
28653
28747
  enableFolderCollections?: boolean | null;
28748
+ enableMessageCollections?: boolean | null;
28654
28749
  if?: {
28655
28750
  __typename?: 'IngestionContentFilter';
28656
28751
  types?: Array<ContentTypes> | null;
@@ -28880,6 +28975,7 @@ export type CreateWorkflowMutation = {
28880
28975
  __typename?: 'StoragePolicy';
28881
28976
  type?: StoragePolicyTypes | null;
28882
28977
  allowDuplicates?: boolean | null;
28978
+ embeddingTypes?: Array<EmbeddingTypes | null> | null;
28883
28979
  } | null;
28884
28980
  } | null;
28885
28981
  actions?: Array<{
@@ -28972,6 +29068,7 @@ export type GetWorkflowQuery = {
28972
29068
  __typename?: 'IngestionWorkflowStage';
28973
29069
  enableEmailCollections?: boolean | null;
28974
29070
  enableFolderCollections?: boolean | null;
29071
+ enableMessageCollections?: boolean | null;
28975
29072
  if?: {
28976
29073
  __typename?: 'IngestionContentFilter';
28977
29074
  types?: Array<ContentTypes> | null;
@@ -29201,6 +29298,7 @@ export type GetWorkflowQuery = {
29201
29298
  __typename?: 'StoragePolicy';
29202
29299
  type?: StoragePolicyTypes | null;
29203
29300
  allowDuplicates?: boolean | null;
29301
+ embeddingTypes?: Array<EmbeddingTypes | null> | null;
29204
29302
  } | null;
29205
29303
  } | null;
29206
29304
  actions?: Array<{
@@ -29259,6 +29357,7 @@ export type QueryWorkflowsQuery = {
29259
29357
  __typename?: 'IngestionWorkflowStage';
29260
29358
  enableEmailCollections?: boolean | null;
29261
29359
  enableFolderCollections?: boolean | null;
29360
+ enableMessageCollections?: boolean | null;
29262
29361
  if?: {
29263
29362
  __typename?: 'IngestionContentFilter';
29264
29363
  types?: Array<ContentTypes> | null;
@@ -29488,6 +29587,7 @@ export type QueryWorkflowsQuery = {
29488
29587
  __typename?: 'StoragePolicy';
29489
29588
  type?: StoragePolicyTypes | null;
29490
29589
  allowDuplicates?: boolean | null;
29590
+ embeddingTypes?: Array<EmbeddingTypes | null> | null;
29491
29591
  } | null;
29492
29592
  } | null;
29493
29593
  actions?: Array<{
@@ -29538,6 +29638,7 @@ export type UpdateWorkflowMutation = {
29538
29638
  __typename?: 'IngestionWorkflowStage';
29539
29639
  enableEmailCollections?: boolean | null;
29540
29640
  enableFolderCollections?: boolean | null;
29641
+ enableMessageCollections?: boolean | null;
29541
29642
  if?: {
29542
29643
  __typename?: 'IngestionContentFilter';
29543
29644
  types?: Array<ContentTypes> | null;
@@ -29767,6 +29868,7 @@ export type UpdateWorkflowMutation = {
29767
29868
  __typename?: 'StoragePolicy';
29768
29869
  type?: StoragePolicyTypes | null;
29769
29870
  allowDuplicates?: boolean | null;
29871
+ embeddingTypes?: Array<EmbeddingTypes | null> | null;
29770
29872
  } | null;
29771
29873
  } | null;
29772
29874
  actions?: Array<{
@@ -29816,6 +29918,7 @@ export type UpsertWorkflowMutation = {
29816
29918
  __typename?: 'IngestionWorkflowStage';
29817
29919
  enableEmailCollections?: boolean | null;
29818
29920
  enableFolderCollections?: boolean | null;
29921
+ enableMessageCollections?: boolean | null;
29819
29922
  if?: {
29820
29923
  __typename?: 'IngestionContentFilter';
29821
29924
  types?: Array<ContentTypes> | null;
@@ -30045,6 +30148,7 @@ export type UpsertWorkflowMutation = {
30045
30148
  __typename?: 'StoragePolicy';
30046
30149
  type?: StoragePolicyTypes | null;
30047
30150
  allowDuplicates?: boolean | null;
30151
+ embeddingTypes?: Array<EmbeddingTypes | null> | null;
30048
30152
  } | null;
30049
30153
  } | null;
30050
30154
  actions?: Array<{
@@ -438,6 +438,8 @@ export var ContentPublishingFormats;
438
438
  ContentPublishingFormats["Markdown"] = "MARKDOWN";
439
439
  /** MP3 */
440
440
  ContentPublishingFormats["Mp3"] = "MP3";
441
+ /** MP4 */
442
+ ContentPublishingFormats["Mp4"] = "MP4";
441
443
  /** PNG */
442
444
  ContentPublishingFormats["Png"] = "PNG";
443
445
  /** Plain Text */
@@ -454,6 +456,8 @@ export var ContentPublishingServiceTypes;
454
456
  ContentPublishingServiceTypes["GoogleImage"] = "GOOGLE_IMAGE";
455
457
  /** OpenAI Image publishing */
456
458
  ContentPublishingServiceTypes["OpenAiImage"] = "OPEN_AI_IMAGE";
459
+ /** OpenAI Video publishing */
460
+ ContentPublishingServiceTypes["OpenAiVideo"] = "OPEN_AI_VIDEO";
457
461
  /** Text publishing */
458
462
  ContentPublishingServiceTypes["Text"] = "TEXT";
459
463
  })(ContentPublishingServiceTypes || (ContentPublishingServiceTypes = {}));
@@ -633,6 +637,18 @@ export var EmailListingTypes;
633
637
  /** Read past emails */
634
638
  EmailListingTypes["Past"] = "PAST";
635
639
  })(EmailListingTypes || (EmailListingTypes = {}));
640
+ /** Embedding type */
641
+ export var EmbeddingTypes;
642
+ (function (EmbeddingTypes) {
643
+ /** Audio embeddings */
644
+ EmbeddingTypes["Audio"] = "AUDIO";
645
+ /** Image embeddings */
646
+ EmbeddingTypes["Image"] = "IMAGE";
647
+ /** Text embeddings */
648
+ EmbeddingTypes["Text"] = "TEXT";
649
+ /** Video embeddings */
650
+ EmbeddingTypes["Video"] = "VIDEO";
651
+ })(EmbeddingTypes || (EmbeddingTypes = {}));
636
652
  /** Entity enrichment service types */
637
653
  export var EntityEnrichmentServiceTypes;
638
654
  (function (EntityEnrichmentServiceTypes) {
@@ -1651,6 +1667,8 @@ export var OpenAiImageModels;
1651
1667
  OpenAiImageModels["Custom"] = "CUSTOM";
1652
1668
  /** GPT Image-1 */
1653
1669
  OpenAiImageModels["GptImage_1"] = "GPT_IMAGE_1";
1670
+ /** GPT Image-1 Mini */
1671
+ OpenAiImageModels["GptImage_1Mini"] = "GPT_IMAGE_1_MINI";
1654
1672
  })(OpenAiImageModels || (OpenAiImageModels = {}));
1655
1673
  /** OpenAI model type */
1656
1674
  export var OpenAiModels;
@@ -1822,6 +1840,16 @@ export var OpenAiReasoningEffortLevels;
1822
1840
  /** Medium effort */
1823
1841
  OpenAiReasoningEffortLevels["Medium"] = "MEDIUM";
1824
1842
  })(OpenAiReasoningEffortLevels || (OpenAiReasoningEffortLevels = {}));
1843
+ /** OpenAI Video model type */
1844
+ export var OpenAiVideoModels;
1845
+ (function (OpenAiVideoModels) {
1846
+ /** Developer-specified model */
1847
+ OpenAiVideoModels["Custom"] = "CUSTOM";
1848
+ /** Sora 2 (Latest) - Fast generation */
1849
+ OpenAiVideoModels["Sora_2"] = "SORA_2";
1850
+ /** Sora 2 Pro - High quality production */
1851
+ OpenAiVideoModels["Sora_2Pro"] = "SORA_2_PRO";
1852
+ })(OpenAiVideoModels || (OpenAiVideoModels = {}));
1825
1853
  /** OpenAI vision model detail levels */
1826
1854
  export var OpenAiVisionDetailLevels;
1827
1855
  (function (OpenAiVisionDetailLevels) {
@@ -2324,6 +2352,16 @@ export var UserTypes;
2324
2352
  /** Human user */
2325
2353
  UserTypes["Human"] = "HUMAN";
2326
2354
  })(UserTypes || (UserTypes = {}));
2355
+ /** Video size (resolution) */
2356
+ export var VideoSizeTypes;
2357
+ (function (VideoSizeTypes) {
2358
+ /** 1080x1920 (Full HD portrait) */
2359
+ VideoSizeTypes["Size_1080X1920"] = "SIZE_1080X1920";
2360
+ /** 1280x720 (HD landscape) */
2361
+ VideoSizeTypes["Size_1280X720"] = "SIZE_1280X720";
2362
+ /** 1920x1080 (Full HD landscape) */
2363
+ VideoSizeTypes["Size_1920X1080"] = "SIZE_1920X1080";
2364
+ })(VideoSizeTypes || (VideoSizeTypes = {}));
2327
2365
  /** View type */
2328
2366
  export var ViewTypes;
2329
2367
  (function (ViewTypes) {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "graphlit-client",
3
- "version": "1.0.20251004001",
3
+ "version": "1.0.20251006001",
4
4
  "description": "Graphlit API Client for TypeScript",
5
5
  "type": "module",
6
6
  "main": "./dist/client.js",