graphlit-client 1.0.20240725001 → 1.0.20240803001
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -617,7 +617,6 @@ exports.GetContent = (0, graphql_tag_1.default) `
|
|
617
617
|
season
|
618
618
|
publisher
|
619
619
|
copyright
|
620
|
-
language
|
621
620
|
genre
|
622
621
|
title
|
623
622
|
description
|
@@ -712,6 +711,9 @@ exports.GetContent = (0, graphql_tag_1.default) `
|
|
712
711
|
folderCount
|
713
712
|
isEncrypted
|
714
713
|
}
|
714
|
+
language {
|
715
|
+
languages
|
716
|
+
}
|
715
717
|
parent {
|
716
718
|
id
|
717
719
|
name
|
@@ -999,7 +1001,6 @@ exports.QueryContents = (0, graphql_tag_1.default) `
|
|
999
1001
|
season
|
1000
1002
|
publisher
|
1001
1003
|
copyright
|
1002
|
-
language
|
1003
1004
|
genre
|
1004
1005
|
title
|
1005
1006
|
description
|
@@ -1094,6 +1095,9 @@ exports.QueryContents = (0, graphql_tag_1.default) `
|
|
1094
1095
|
folderCount
|
1095
1096
|
isEncrypted
|
1096
1097
|
}
|
1098
|
+
language {
|
1099
|
+
languages
|
1100
|
+
}
|
1097
1101
|
parent {
|
1098
1102
|
id
|
1099
1103
|
name
|
@@ -1238,7 +1242,6 @@ exports.QueryContentsFacets = (0, graphql_tag_1.default) `
|
|
1238
1242
|
season
|
1239
1243
|
publisher
|
1240
1244
|
copyright
|
1241
|
-
language
|
1242
1245
|
genre
|
1243
1246
|
title
|
1244
1247
|
description
|
@@ -1333,6 +1336,9 @@ exports.QueryContentsFacets = (0, graphql_tag_1.default) `
|
|
1333
1336
|
folderCount
|
1334
1337
|
isEncrypted
|
1335
1338
|
}
|
1339
|
+
language {
|
1340
|
+
languages
|
1341
|
+
}
|
1336
1342
|
parent {
|
1337
1343
|
id
|
1338
1344
|
name
|
@@ -1616,7 +1622,6 @@ exports.GetConversation = (0, graphql_tag_1.default) `
|
|
1616
1622
|
season
|
1617
1623
|
publisher
|
1618
1624
|
copyright
|
1619
|
-
language
|
1620
1625
|
genre
|
1621
1626
|
title
|
1622
1627
|
description
|
@@ -1812,7 +1817,6 @@ exports.PromptConversation = (0, graphql_tag_1.default) `
|
|
1812
1817
|
season
|
1813
1818
|
publisher
|
1814
1819
|
copyright
|
1815
|
-
language
|
1816
1820
|
genre
|
1817
1821
|
title
|
1818
1822
|
description
|
@@ -2003,7 +2007,6 @@ exports.QueryConversations = (0, graphql_tag_1.default) `
|
|
2003
2007
|
season
|
2004
2008
|
publisher
|
2005
2009
|
copyright
|
2006
|
-
language
|
2007
2010
|
genre
|
2008
2011
|
title
|
2009
2012
|
description
|
@@ -3279,6 +3282,7 @@ exports.LookupCredits = (0, graphql_tag_1.default) `
|
|
3279
3282
|
credits
|
3280
3283
|
storageRatio
|
3281
3284
|
computeRatio
|
3285
|
+
indexingRatio
|
3282
3286
|
preparationRatio
|
3283
3287
|
extractionRatio
|
3284
3288
|
enrichmentRatio
|
@@ -3332,6 +3336,7 @@ exports.QueryCredits = (0, graphql_tag_1.default) `
|
|
3332
3336
|
credits
|
3333
3337
|
storageRatio
|
3334
3338
|
computeRatio
|
3339
|
+
indexingRatio
|
3335
3340
|
preparationRatio
|
3336
3341
|
extractionRatio
|
3337
3342
|
enrichmentRatio
|
@@ -3687,6 +3692,35 @@ exports.GetSpecification = (0, graphql_tag_1.default) `
|
|
3687
3692
|
temperature
|
3688
3693
|
probability
|
3689
3694
|
}
|
3695
|
+
mistral {
|
3696
|
+
tokenLimit
|
3697
|
+
completionTokenLimit
|
3698
|
+
model
|
3699
|
+
key
|
3700
|
+
modelName
|
3701
|
+
endpoint
|
3702
|
+
temperature
|
3703
|
+
probability
|
3704
|
+
}
|
3705
|
+
groq {
|
3706
|
+
tokenLimit
|
3707
|
+
completionTokenLimit
|
3708
|
+
model
|
3709
|
+
key
|
3710
|
+
modelName
|
3711
|
+
endpoint
|
3712
|
+
temperature
|
3713
|
+
probability
|
3714
|
+
}
|
3715
|
+
deepseek {
|
3716
|
+
tokenLimit
|
3717
|
+
completionTokenLimit
|
3718
|
+
model
|
3719
|
+
key
|
3720
|
+
modelName
|
3721
|
+
temperature
|
3722
|
+
probability
|
3723
|
+
}
|
3690
3724
|
tools {
|
3691
3725
|
name
|
3692
3726
|
description
|
@@ -3755,7 +3789,6 @@ exports.PromptSpecifications = (0, graphql_tag_1.default) `
|
|
3755
3789
|
season
|
3756
3790
|
publisher
|
3757
3791
|
copyright
|
3758
|
-
language
|
3759
3792
|
genre
|
3760
3793
|
title
|
3761
3794
|
description
|
@@ -3915,6 +3948,35 @@ exports.QuerySpecifications = (0, graphql_tag_1.default) `
|
|
3915
3948
|
temperature
|
3916
3949
|
probability
|
3917
3950
|
}
|
3951
|
+
mistral {
|
3952
|
+
tokenLimit
|
3953
|
+
completionTokenLimit
|
3954
|
+
model
|
3955
|
+
key
|
3956
|
+
modelName
|
3957
|
+
endpoint
|
3958
|
+
temperature
|
3959
|
+
probability
|
3960
|
+
}
|
3961
|
+
groq {
|
3962
|
+
tokenLimit
|
3963
|
+
completionTokenLimit
|
3964
|
+
model
|
3965
|
+
key
|
3966
|
+
modelName
|
3967
|
+
endpoint
|
3968
|
+
temperature
|
3969
|
+
probability
|
3970
|
+
}
|
3971
|
+
deepseek {
|
3972
|
+
tokenLimit
|
3973
|
+
completionTokenLimit
|
3974
|
+
model
|
3975
|
+
key
|
3976
|
+
modelName
|
3977
|
+
temperature
|
3978
|
+
probability
|
3979
|
+
}
|
3918
3980
|
tools {
|
3919
3981
|
name
|
3920
3982
|
description
|
@@ -3960,6 +4022,15 @@ exports.CreateWorkflow = (0, graphql_tag_1.default) `
|
|
3960
4022
|
id
|
3961
4023
|
}
|
3962
4024
|
}
|
4025
|
+
indexing {
|
4026
|
+
jobs {
|
4027
|
+
connector {
|
4028
|
+
type
|
4029
|
+
contentType
|
4030
|
+
fileType
|
4031
|
+
}
|
4032
|
+
}
|
4033
|
+
}
|
3963
4034
|
preparation {
|
3964
4035
|
disableSmartCapture
|
3965
4036
|
summarizations {
|
@@ -4016,6 +4087,11 @@ exports.CreateWorkflow = (0, graphql_tag_1.default) `
|
|
4016
4087
|
detailLevel
|
4017
4088
|
customInstructions
|
4018
4089
|
}
|
4090
|
+
modelImage {
|
4091
|
+
specification {
|
4092
|
+
id
|
4093
|
+
}
|
4094
|
+
}
|
4019
4095
|
modelText {
|
4020
4096
|
specification {
|
4021
4097
|
id
|
@@ -4113,6 +4189,15 @@ exports.GetWorkflow = (0, graphql_tag_1.default) `
|
|
4113
4189
|
id
|
4114
4190
|
}
|
4115
4191
|
}
|
4192
|
+
indexing {
|
4193
|
+
jobs {
|
4194
|
+
connector {
|
4195
|
+
type
|
4196
|
+
contentType
|
4197
|
+
fileType
|
4198
|
+
}
|
4199
|
+
}
|
4200
|
+
}
|
4116
4201
|
preparation {
|
4117
4202
|
disableSmartCapture
|
4118
4203
|
summarizations {
|
@@ -4169,6 +4254,11 @@ exports.GetWorkflow = (0, graphql_tag_1.default) `
|
|
4169
4254
|
detailLevel
|
4170
4255
|
customInstructions
|
4171
4256
|
}
|
4257
|
+
modelImage {
|
4258
|
+
specification {
|
4259
|
+
id
|
4260
|
+
}
|
4261
|
+
}
|
4172
4262
|
modelText {
|
4173
4263
|
specification {
|
4174
4264
|
id
|
@@ -4239,6 +4329,15 @@ exports.QueryWorkflows = (0, graphql_tag_1.default) `
|
|
4239
4329
|
id
|
4240
4330
|
}
|
4241
4331
|
}
|
4332
|
+
indexing {
|
4333
|
+
jobs {
|
4334
|
+
connector {
|
4335
|
+
type
|
4336
|
+
contentType
|
4337
|
+
fileType
|
4338
|
+
}
|
4339
|
+
}
|
4340
|
+
}
|
4242
4341
|
preparation {
|
4243
4342
|
disableSmartCapture
|
4244
4343
|
summarizations {
|
@@ -4295,6 +4394,11 @@ exports.QueryWorkflows = (0, graphql_tag_1.default) `
|
|
4295
4394
|
detailLevel
|
4296
4395
|
customInstructions
|
4297
4396
|
}
|
4397
|
+
modelImage {
|
4398
|
+
specification {
|
4399
|
+
id
|
4400
|
+
}
|
4401
|
+
}
|
4298
4402
|
modelText {
|
4299
4403
|
specification {
|
4300
4404
|
id
|
@@ -4360,6 +4464,15 @@ exports.UpdateWorkflow = (0, graphql_tag_1.default) `
|
|
4360
4464
|
id
|
4361
4465
|
}
|
4362
4466
|
}
|
4467
|
+
indexing {
|
4468
|
+
jobs {
|
4469
|
+
connector {
|
4470
|
+
type
|
4471
|
+
contentType
|
4472
|
+
fileType
|
4473
|
+
}
|
4474
|
+
}
|
4475
|
+
}
|
4363
4476
|
preparation {
|
4364
4477
|
disableSmartCapture
|
4365
4478
|
summarizations {
|
@@ -4416,6 +4529,11 @@ exports.UpdateWorkflow = (0, graphql_tag_1.default) `
|
|
4416
4529
|
detailLevel
|
4417
4530
|
customInstructions
|
4418
4531
|
}
|
4532
|
+
modelImage {
|
4533
|
+
specification {
|
4534
|
+
id
|
4535
|
+
}
|
4536
|
+
}
|
4419
4537
|
modelText {
|
4420
4538
|
specification {
|
4421
4539
|
id
|
@@ -446,8 +446,6 @@ export type AudioMetadata = {
|
|
446
446
|
genre?: Maybe<Scalars['String']['output']>;
|
447
447
|
/** The episode keywords, if podcast episode. */
|
448
448
|
keywords?: Maybe<Array<Maybe<Scalars['String']['output']>>>;
|
449
|
-
/** The audio language. */
|
450
|
-
language?: Maybe<Scalars['String']['output']>;
|
451
449
|
/** The episode publisher, if podcast episode. */
|
452
450
|
publisher?: Maybe<Scalars['String']['output']>;
|
453
451
|
/** The audio sample rate. */
|
@@ -483,8 +481,6 @@ export type AudioMetadataInput = {
|
|
483
481
|
genre?: InputMaybe<Scalars['String']['input']>;
|
484
482
|
/** The episode keywords, if podcast episode. */
|
485
483
|
keywords?: InputMaybe<Array<InputMaybe<Scalars['String']['input']>>>;
|
486
|
-
/** The audio language. */
|
487
|
-
language?: InputMaybe<Scalars['String']['input']>;
|
488
484
|
/** The metadata geo-location. */
|
489
485
|
location?: InputMaybe<PointInput>;
|
490
486
|
/** The metadata modified date. */
|
@@ -1074,6 +1070,8 @@ export type Content = {
|
|
1074
1070
|
issue?: Maybe<IssueMetadata>;
|
1075
1071
|
/** The summarized content keywords or key phrases. */
|
1076
1072
|
keywords?: Maybe<Array<Scalars['String']['output']>>;
|
1073
|
+
/** The content language metadata. */
|
1074
|
+
language?: Maybe<LanguageMetadata>;
|
1077
1075
|
/** The extracted hyperlinks. */
|
1078
1076
|
links?: Maybe<Array<LinkReferenceType>>;
|
1079
1077
|
/** The geo-location of the content. */
|
@@ -1384,6 +1382,29 @@ export type ContentGraphInput = {
|
|
1384
1382
|
/** The observable types. */
|
1385
1383
|
types?: InputMaybe<Array<InputMaybe<ObservableTypes>>>;
|
1386
1384
|
};
|
1385
|
+
/** Represents a content indexing connector. */
|
1386
|
+
export type ContentIndexingConnector = {
|
1387
|
+
__typename?: 'ContentIndexingConnector';
|
1388
|
+
/** The content type for filtering content indexing services. */
|
1389
|
+
contentType?: Maybe<ContentTypes>;
|
1390
|
+
/** The file type for filtering content indexing services. */
|
1391
|
+
fileType?: Maybe<FileTypes>;
|
1392
|
+
/** The content indexing service type. */
|
1393
|
+
type?: Maybe<ContentIndexingServiceTypes>;
|
1394
|
+
};
|
1395
|
+
/** Represents a content indexing connector. */
|
1396
|
+
export type ContentIndexingConnectorInput = {
|
1397
|
+
/** The content type for filtering content indexing services. */
|
1398
|
+
contentType?: InputMaybe<ContentTypes>;
|
1399
|
+
/** The file type for filtering content indexing services. */
|
1400
|
+
fileType?: InputMaybe<FileTypes>;
|
1401
|
+
/** The entity enrichment service type. */
|
1402
|
+
type?: InputMaybe<ContentIndexingServiceTypes>;
|
1403
|
+
};
|
1404
|
+
export declare enum ContentIndexingServiceTypes {
|
1405
|
+
/** Azure AI Language */
|
1406
|
+
AzureAiLanguage = "AZURE_AI_LANGUAGE"
|
1407
|
+
}
|
1387
1408
|
/** Represents content. */
|
1388
1409
|
export type ContentInput = {
|
1389
1410
|
/** The content description. */
|
@@ -1505,6 +1526,8 @@ export type ContentUpdateInput = {
|
|
1505
1526
|
issue?: InputMaybe<IssueMetadataInput>;
|
1506
1527
|
/** The summarized content keywords or key phrases. */
|
1507
1528
|
keywords?: InputMaybe<Array<Scalars['String']['input']>>;
|
1529
|
+
/** The content language metadata. */
|
1530
|
+
language?: InputMaybe<LanguageMetadataInput>;
|
1508
1531
|
/** The name of the content. */
|
1509
1532
|
name?: InputMaybe<Scalars['String']['input']>;
|
1510
1533
|
/** The content package metadata. */
|
@@ -2310,9 +2333,14 @@ export type EntityExtractionConnector = {
|
|
2310
2333
|
extractedTypes?: Maybe<Array<ObservableTypes>>;
|
2311
2334
|
/** The file types to allow for entity extraction. */
|
2312
2335
|
fileTypes?: Maybe<Array<FileTypes>>;
|
2336
|
+
/** The specific properties for LLM image entity extraction. */
|
2337
|
+
modelImage?: Maybe<ModelImageExtractionProperties>;
|
2313
2338
|
/** The specific properties for LLM text entity extraction. */
|
2314
2339
|
modelText?: Maybe<ModelTextExtractionProperties>;
|
2315
|
-
/**
|
2340
|
+
/**
|
2341
|
+
* The specific properties for OpenAI image entity extraction.
|
2342
|
+
* @deprecated Use MODEL_IMAGE instead.
|
2343
|
+
*/
|
2316
2344
|
openAIImage?: Maybe<OpenAiImageExtractionProperties>;
|
2317
2345
|
/** The entity extraction connector service type. */
|
2318
2346
|
type: EntityExtractionServiceTypes;
|
@@ -2331,22 +2359,27 @@ export type EntityExtractionConnectorInput = {
|
|
2331
2359
|
extractedTypes?: InputMaybe<Array<ObservableTypes>>;
|
2332
2360
|
/** The file types to allow for entity extraction. */
|
2333
2361
|
fileTypes?: InputMaybe<Array<FileTypes>>;
|
2362
|
+
/** The specific properties for LLM image entity extraction. */
|
2363
|
+
modelImage?: InputMaybe<ModelImageExtractionPropertiesInput>;
|
2334
2364
|
/** The specific properties for LLM text entity extraction. */
|
2335
2365
|
modelText?: InputMaybe<ModelTextExtractionPropertiesInput>;
|
2336
|
-
/** The specific properties for OpenAI image entity extraction. */
|
2337
|
-
openAIImage?: InputMaybe<OpenAiImageExtractionPropertiesInput>;
|
2338
2366
|
/** The entity extraction service type. */
|
2339
2367
|
type: EntityExtractionServiceTypes;
|
2340
2368
|
};
|
2341
2369
|
/** Entity extraction service type */
|
2342
2370
|
export declare enum EntityExtractionServiceTypes {
|
2343
|
-
/** Azure Cognitive Services Image */
|
2371
|
+
/** Azure AI Vision, fka Azure Cognitive Services Image */
|
2344
2372
|
AzureCognitiveServicesImage = "AZURE_COGNITIVE_SERVICES_IMAGE",
|
2345
|
-
/** Azure Cognitive Services Text */
|
2373
|
+
/** Azure AI Language, fka Azure Cognitive Services Text */
|
2346
2374
|
AzureCognitiveServicesText = "AZURE_COGNITIVE_SERVICES_TEXT",
|
2375
|
+
/** LLM Image */
|
2376
|
+
ModelImage = "MODEL_IMAGE",
|
2347
2377
|
/** LLM Text */
|
2348
2378
|
ModelText = "MODEL_TEXT",
|
2349
|
-
/**
|
2379
|
+
/**
|
2380
|
+
* OpenAI Image
|
2381
|
+
* @deprecated Use MODEL_IMAGE instead.
|
2382
|
+
*/
|
2350
2383
|
OpenAiImage = "OPEN_AI_IMAGE",
|
2351
2384
|
/** Roboflow Image */
|
2352
2385
|
RoboflowImage = "ROBOFLOW_IMAGE"
|
@@ -3602,6 +3635,28 @@ export declare enum ImageProjectionTypes {
|
|
3602
3635
|
/** Equirectangular mage projection */
|
3603
3636
|
Equirectangular = "EQUIRECTANGULAR"
|
3604
3637
|
}
|
3638
|
+
/** Represents a indexing workflow job. */
|
3639
|
+
export type IndexingWorkflowJob = {
|
3640
|
+
__typename?: 'IndexingWorkflowJob';
|
3641
|
+
/** The content indexing connector. */
|
3642
|
+
connector?: Maybe<ContentIndexingConnector>;
|
3643
|
+
};
|
3644
|
+
/** Represents a indexing workflow job. */
|
3645
|
+
export type IndexingWorkflowJobInput = {
|
3646
|
+
/** The content indexing connector. */
|
3647
|
+
connector?: InputMaybe<ContentIndexingConnectorInput>;
|
3648
|
+
};
|
3649
|
+
/** Represents the indexing workflow stage. */
|
3650
|
+
export type IndexingWorkflowStage = {
|
3651
|
+
__typename?: 'IndexingWorkflowStage';
|
3652
|
+
/** The jobs for the indexing workflow stage. */
|
3653
|
+
jobs?: Maybe<Array<Maybe<IndexingWorkflowJob>>>;
|
3654
|
+
};
|
3655
|
+
/** Represents the indexing workflow stage. */
|
3656
|
+
export type IndexingWorkflowStageInput = {
|
3657
|
+
/** The jobs for the indexing workflow stage. */
|
3658
|
+
jobs?: InputMaybe<Array<InputMaybe<IndexingWorkflowJobInput>>>;
|
3659
|
+
};
|
3605
3660
|
/** Represents an ingestion content filter. */
|
3606
3661
|
export type IngestionContentFilter = {
|
3607
3662
|
__typename?: 'IngestionContentFilter';
|
@@ -3872,6 +3927,17 @@ export type LabelUpdateInput = {
|
|
3872
3927
|
/** The name of the label. */
|
3873
3928
|
name?: InputMaybe<Scalars['String']['input']>;
|
3874
3929
|
};
|
3930
|
+
/** Represents language metadata. */
|
3931
|
+
export type LanguageMetadata = {
|
3932
|
+
__typename?: 'LanguageMetadata';
|
3933
|
+
/** The content language(s) in ISO 639-1 format, i.e. 'en'. */
|
3934
|
+
languages?: Maybe<Array<Maybe<Scalars['String']['output']>>>;
|
3935
|
+
};
|
3936
|
+
/** Represents language metadata. */
|
3937
|
+
export type LanguageMetadataInput = {
|
3938
|
+
/** The content language(s) in ISO 639-1 format, i.e. 'en'. */
|
3939
|
+
languages?: InputMaybe<Array<InputMaybe<Scalars['String']['input']>>>;
|
3940
|
+
};
|
3875
3941
|
/** Represents Linear feed properties. */
|
3876
3942
|
export type LinearFeedProperties = {
|
3877
3943
|
__typename?: 'LinearFeedProperties';
|
@@ -4342,6 +4408,17 @@ export declare enum MistralModels {
|
|
4342
4408
|
/** Mixtral 8x7b Instruct */
|
4343
4409
|
Mixtral_8X7BInstruct = "MIXTRAL_8X7B_INSTRUCT"
|
4344
4410
|
}
|
4411
|
+
/** Represents an LLM image entity extraction connector. */
|
4412
|
+
export type ModelImageExtractionProperties = {
|
4413
|
+
__typename?: 'ModelImageExtractionProperties';
|
4414
|
+
/** The LLM specification used for entity extraction. */
|
4415
|
+
specification?: Maybe<EntityReference>;
|
4416
|
+
};
|
4417
|
+
/** Represents an LLM image entity extraction connector. */
|
4418
|
+
export type ModelImageExtractionPropertiesInput = {
|
4419
|
+
/** The LLM specification used for entity extraction. */
|
4420
|
+
specification?: InputMaybe<EntityReferenceInput>;
|
4421
|
+
};
|
4345
4422
|
/** Model service type */
|
4346
4423
|
export declare enum ModelServiceTypes {
|
4347
4424
|
/** Anthropic */
|
@@ -5351,6 +5428,8 @@ export type OpenAiModelProperties = {
|
|
5351
5428
|
__typename?: 'OpenAIModelProperties';
|
5352
5429
|
/** The limit of tokens generated by prompt completion. */
|
5353
5430
|
completionTokenLimit?: Maybe<Scalars['Int']['output']>;
|
5431
|
+
/** The OpenAI vision detail mode. Only applies when using OpenAI for image analysis. */
|
5432
|
+
detailLevel?: Maybe<OpenAiVisionDetailLevels>;
|
5354
5433
|
/** The OpenAI API key, if using developer's own account. */
|
5355
5434
|
key?: Maybe<Scalars['String']['output']>;
|
5356
5435
|
/** The OpenAI model, or custom, when using developer's own account. */
|
@@ -5368,6 +5447,8 @@ export type OpenAiModelProperties = {
|
|
5368
5447
|
export type OpenAiModelPropertiesInput = {
|
5369
5448
|
/** The limit of tokens generated by prompt completion. */
|
5370
5449
|
completionTokenLimit?: InputMaybe<Scalars['Int']['input']>;
|
5450
|
+
/** The OpenAI vision detail mode. Only applies when using OpenAI for image analysis. */
|
5451
|
+
detailLevel?: InputMaybe<OpenAiVisionDetailLevels>;
|
5371
5452
|
/** The OpenAI API key, if using developer's own account. */
|
5372
5453
|
key?: InputMaybe<Scalars['String']['input']>;
|
5373
5454
|
/** The OpenAI model, or custom, when using developer's own account. */
|
@@ -5385,6 +5466,8 @@ export type OpenAiModelPropertiesInput = {
|
|
5385
5466
|
export type OpenAiModelPropertiesUpdateInput = {
|
5386
5467
|
/** The limit of tokens generated by prompt completion. */
|
5387
5468
|
completionTokenLimit?: InputMaybe<Scalars['Int']['input']>;
|
5469
|
+
/** The OpenAI vision detail mode. Only applies when using OpenAI for image analysis. */
|
5470
|
+
detailLevel?: InputMaybe<OpenAiVisionDetailLevels>;
|
5388
5471
|
/** The OpenAI API key, if using developer's own account. */
|
5389
5472
|
key?: InputMaybe<Scalars['String']['input']>;
|
5390
5473
|
/** The Azure OpenAI model, or custom, when using developer's own account. */
|
@@ -6429,6 +6512,8 @@ export type ProjectCredits = {
|
|
6429
6512
|
enrichmentRatio?: Maybe<Scalars['Decimal']['output']>;
|
6430
6513
|
/** The content extraction ratio of credits. */
|
6431
6514
|
extractionRatio?: Maybe<Scalars['Decimal']['output']>;
|
6515
|
+
/** The content indexing ratio of credits. */
|
6516
|
+
indexingRatio?: Maybe<Scalars['Decimal']['output']>;
|
6432
6517
|
/** The tenant identifier. */
|
6433
6518
|
ownerId?: Maybe<Scalars['ID']['output']>;
|
6434
6519
|
/** The content preparation ratio of credits. */
|
@@ -8578,6 +8663,8 @@ export type Workflow = {
|
|
8578
8663
|
extraction?: Maybe<ExtractionWorkflowStage>;
|
8579
8664
|
/** The ID of the workflow. */
|
8580
8665
|
id: Scalars['ID']['output'];
|
8666
|
+
/** The indexing stage of the content workflow. */
|
8667
|
+
indexing?: Maybe<IndexingWorkflowStage>;
|
8581
8668
|
/** The ingestion stage of the content workflow. */
|
8582
8669
|
ingestion?: Maybe<IngestionWorkflowStage>;
|
8583
8670
|
/** The modified date of the workflow. */
|
@@ -8635,6 +8722,8 @@ export type WorkflowInput = {
|
|
8635
8722
|
enrichment?: InputMaybe<EnrichmentWorkflowStageInput>;
|
8636
8723
|
/** The extraction stage of the content workflow. */
|
8637
8724
|
extraction?: InputMaybe<ExtractionWorkflowStageInput>;
|
8725
|
+
/** The indexing stage of the content workflow. */
|
8726
|
+
indexing?: InputMaybe<IndexingWorkflowStageInput>;
|
8638
8727
|
/** The ingestion stage of the content workflow. */
|
8639
8728
|
ingestion?: InputMaybe<IngestionWorkflowStageInput>;
|
8640
8729
|
/** The name of the workflow. */
|
@@ -8660,6 +8749,8 @@ export type WorkflowUpdateInput = {
|
|
8660
8749
|
extraction?: InputMaybe<ExtractionWorkflowStageInput>;
|
8661
8750
|
/** The ID of the workflow to update. */
|
8662
8751
|
id: Scalars['ID']['input'];
|
8752
|
+
/** The indexing stage of the content workflow. */
|
8753
|
+
indexing?: InputMaybe<IndexingWorkflowStageInput>;
|
8663
8754
|
/** The ingestion stage of the content workflow. */
|
8664
8755
|
ingestion?: InputMaybe<IngestionWorkflowStageInput>;
|
8665
8756
|
/** The name of the workflow. */
|
@@ -9507,7 +9598,6 @@ export type GetContentQuery = {
|
|
9507
9598
|
season?: string | null;
|
9508
9599
|
publisher?: string | null;
|
9509
9600
|
copyright?: string | null;
|
9510
|
-
language?: string | null;
|
9511
9601
|
genre?: string | null;
|
9512
9602
|
title?: string | null;
|
9513
9603
|
description?: string | null;
|
@@ -9611,6 +9701,10 @@ export type GetContentQuery = {
|
|
9611
9701
|
folderCount?: number | null;
|
9612
9702
|
isEncrypted?: boolean | null;
|
9613
9703
|
} | null;
|
9704
|
+
language?: {
|
9705
|
+
__typename?: 'LanguageMetadata';
|
9706
|
+
languages?: Array<string | null> | null;
|
9707
|
+
} | null;
|
9614
9708
|
parent?: {
|
9615
9709
|
__typename?: 'Content';
|
9616
9710
|
id: string;
|
@@ -9934,7 +10028,6 @@ export type QueryContentsQuery = {
|
|
9934
10028
|
season?: string | null;
|
9935
10029
|
publisher?: string | null;
|
9936
10030
|
copyright?: string | null;
|
9937
|
-
language?: string | null;
|
9938
10031
|
genre?: string | null;
|
9939
10032
|
title?: string | null;
|
9940
10033
|
description?: string | null;
|
@@ -10038,6 +10131,10 @@ export type QueryContentsQuery = {
|
|
10038
10131
|
folderCount?: number | null;
|
10039
10132
|
isEncrypted?: boolean | null;
|
10040
10133
|
} | null;
|
10134
|
+
language?: {
|
10135
|
+
__typename?: 'LanguageMetadata';
|
10136
|
+
languages?: Array<string | null> | null;
|
10137
|
+
} | null;
|
10041
10138
|
parent?: {
|
10042
10139
|
__typename?: 'Content';
|
10043
10140
|
id: string;
|
@@ -10206,7 +10303,6 @@ export type QueryContentsFacetsQuery = {
|
|
10206
10303
|
season?: string | null;
|
10207
10304
|
publisher?: string | null;
|
10208
10305
|
copyright?: string | null;
|
10209
|
-
language?: string | null;
|
10210
10306
|
genre?: string | null;
|
10211
10307
|
title?: string | null;
|
10212
10308
|
description?: string | null;
|
@@ -10310,6 +10406,10 @@ export type QueryContentsFacetsQuery = {
|
|
10310
10406
|
folderCount?: number | null;
|
10311
10407
|
isEncrypted?: boolean | null;
|
10312
10408
|
} | null;
|
10409
|
+
language?: {
|
10410
|
+
__typename?: 'LanguageMetadata';
|
10411
|
+
languages?: Array<string | null> | null;
|
10412
|
+
} | null;
|
10313
10413
|
parent?: {
|
10314
10414
|
__typename?: 'Content';
|
10315
10415
|
id: string;
|
@@ -10667,7 +10767,6 @@ export type GetConversationQuery = {
|
|
10667
10767
|
season?: string | null;
|
10668
10768
|
publisher?: string | null;
|
10669
10769
|
copyright?: string | null;
|
10670
|
-
language?: string | null;
|
10671
10770
|
genre?: string | null;
|
10672
10771
|
title?: string | null;
|
10673
10772
|
description?: string | null;
|
@@ -10899,7 +10998,6 @@ export type PromptConversationMutation = {
|
|
10899
10998
|
season?: string | null;
|
10900
10999
|
publisher?: string | null;
|
10901
11000
|
copyright?: string | null;
|
10902
|
-
language?: string | null;
|
10903
11001
|
genre?: string | null;
|
10904
11002
|
title?: string | null;
|
10905
11003
|
description?: string | null;
|
@@ -11110,7 +11208,6 @@ export type QueryConversationsQuery = {
|
|
11110
11208
|
season?: string | null;
|
11111
11209
|
publisher?: string | null;
|
11112
11210
|
copyright?: string | null;
|
11113
|
-
language?: string | null;
|
11114
11211
|
genre?: string | null;
|
11115
11212
|
title?: string | null;
|
11116
11213
|
description?: string | null;
|
@@ -12681,6 +12778,7 @@ export type LookupCreditsQuery = {
|
|
12681
12778
|
credits?: any | null;
|
12682
12779
|
storageRatio?: any | null;
|
12683
12780
|
computeRatio?: any | null;
|
12781
|
+
indexingRatio?: any | null;
|
12684
12782
|
preparationRatio?: any | null;
|
12685
12783
|
extractionRatio?: any | null;
|
12686
12784
|
enrichmentRatio?: any | null;
|
@@ -12741,6 +12839,7 @@ export type QueryCreditsQuery = {
|
|
12741
12839
|
credits?: any | null;
|
12742
12840
|
storageRatio?: any | null;
|
12743
12841
|
computeRatio?: any | null;
|
12842
|
+
indexingRatio?: any | null;
|
12744
12843
|
preparationRatio?: any | null;
|
12745
12844
|
extractionRatio?: any | null;
|
12746
12845
|
enrichmentRatio?: any | null;
|
@@ -13177,6 +13276,38 @@ export type GetSpecificationQuery = {
|
|
13177
13276
|
temperature?: number | null;
|
13178
13277
|
probability?: number | null;
|
13179
13278
|
} | null;
|
13279
|
+
mistral?: {
|
13280
|
+
__typename?: 'MistralModelProperties';
|
13281
|
+
tokenLimit?: number | null;
|
13282
|
+
completionTokenLimit?: number | null;
|
13283
|
+
model: MistralModels;
|
13284
|
+
key?: string | null;
|
13285
|
+
modelName?: string | null;
|
13286
|
+
endpoint?: any | null;
|
13287
|
+
temperature?: number | null;
|
13288
|
+
probability?: number | null;
|
13289
|
+
} | null;
|
13290
|
+
groq?: {
|
13291
|
+
__typename?: 'GroqModelProperties';
|
13292
|
+
tokenLimit?: number | null;
|
13293
|
+
completionTokenLimit?: number | null;
|
13294
|
+
model: GroqModels;
|
13295
|
+
key?: string | null;
|
13296
|
+
modelName?: string | null;
|
13297
|
+
endpoint?: any | null;
|
13298
|
+
temperature?: number | null;
|
13299
|
+
probability?: number | null;
|
13300
|
+
} | null;
|
13301
|
+
deepseek?: {
|
13302
|
+
__typename?: 'DeepseekModelProperties';
|
13303
|
+
tokenLimit?: number | null;
|
13304
|
+
completionTokenLimit?: number | null;
|
13305
|
+
model: DeepseekModels;
|
13306
|
+
key?: string | null;
|
13307
|
+
modelName?: string | null;
|
13308
|
+
temperature?: number | null;
|
13309
|
+
probability?: number | null;
|
13310
|
+
} | null;
|
13180
13311
|
tools?: Array<{
|
13181
13312
|
__typename?: 'ToolDefinition';
|
13182
13313
|
name: string;
|
@@ -13269,7 +13400,6 @@ export type PromptSpecificationsMutation = {
|
|
13269
13400
|
season?: string | null;
|
13270
13401
|
publisher?: string | null;
|
13271
13402
|
copyright?: string | null;
|
13272
|
-
language?: string | null;
|
13273
13403
|
genre?: string | null;
|
13274
13404
|
title?: string | null;
|
13275
13405
|
description?: string | null;
|
@@ -13434,6 +13564,38 @@ export type QuerySpecificationsQuery = {
|
|
13434
13564
|
temperature?: number | null;
|
13435
13565
|
probability?: number | null;
|
13436
13566
|
} | null;
|
13567
|
+
mistral?: {
|
13568
|
+
__typename?: 'MistralModelProperties';
|
13569
|
+
tokenLimit?: number | null;
|
13570
|
+
completionTokenLimit?: number | null;
|
13571
|
+
model: MistralModels;
|
13572
|
+
key?: string | null;
|
13573
|
+
modelName?: string | null;
|
13574
|
+
endpoint?: any | null;
|
13575
|
+
temperature?: number | null;
|
13576
|
+
probability?: number | null;
|
13577
|
+
} | null;
|
13578
|
+
groq?: {
|
13579
|
+
__typename?: 'GroqModelProperties';
|
13580
|
+
tokenLimit?: number | null;
|
13581
|
+
completionTokenLimit?: number | null;
|
13582
|
+
model: GroqModels;
|
13583
|
+
key?: string | null;
|
13584
|
+
modelName?: string | null;
|
13585
|
+
endpoint?: any | null;
|
13586
|
+
temperature?: number | null;
|
13587
|
+
probability?: number | null;
|
13588
|
+
} | null;
|
13589
|
+
deepseek?: {
|
13590
|
+
__typename?: 'DeepseekModelProperties';
|
13591
|
+
tokenLimit?: number | null;
|
13592
|
+
completionTokenLimit?: number | null;
|
13593
|
+
model: DeepseekModels;
|
13594
|
+
key?: string | null;
|
13595
|
+
modelName?: string | null;
|
13596
|
+
temperature?: number | null;
|
13597
|
+
probability?: number | null;
|
13598
|
+
} | null;
|
13437
13599
|
tools?: Array<{
|
13438
13600
|
__typename?: 'ToolDefinition';
|
13439
13601
|
name: string;
|
@@ -13492,6 +13654,18 @@ export type CreateWorkflowMutation = {
|
|
13492
13654
|
id: string;
|
13493
13655
|
} | null> | null;
|
13494
13656
|
} | null;
|
13657
|
+
indexing?: {
|
13658
|
+
__typename?: 'IndexingWorkflowStage';
|
13659
|
+
jobs?: Array<{
|
13660
|
+
__typename?: 'IndexingWorkflowJob';
|
13661
|
+
connector?: {
|
13662
|
+
__typename?: 'ContentIndexingConnector';
|
13663
|
+
type?: ContentIndexingServiceTypes | null;
|
13664
|
+
contentType?: ContentTypes | null;
|
13665
|
+
fileType?: FileTypes | null;
|
13666
|
+
} | null;
|
13667
|
+
} | null> | null;
|
13668
|
+
} | null;
|
13495
13669
|
preparation?: {
|
13496
13670
|
__typename?: 'PreparationWorkflowStage';
|
13497
13671
|
disableSmartCapture?: boolean | null;
|
@@ -13563,6 +13737,13 @@ export type CreateWorkflowMutation = {
|
|
13563
13737
|
detailLevel?: OpenAiVisionDetailLevels | null;
|
13564
13738
|
customInstructions?: string | null;
|
13565
13739
|
} | null;
|
13740
|
+
modelImage?: {
|
13741
|
+
__typename?: 'ModelImageExtractionProperties';
|
13742
|
+
specification?: {
|
13743
|
+
__typename?: 'EntityReference';
|
13744
|
+
id: string;
|
13745
|
+
} | null;
|
13746
|
+
} | null;
|
13566
13747
|
modelText?: {
|
13567
13748
|
__typename?: 'ModelTextExtractionProperties';
|
13568
13749
|
specification?: {
|
@@ -13686,6 +13867,18 @@ export type GetWorkflowQuery = {
|
|
13686
13867
|
id: string;
|
13687
13868
|
} | null> | null;
|
13688
13869
|
} | null;
|
13870
|
+
indexing?: {
|
13871
|
+
__typename?: 'IndexingWorkflowStage';
|
13872
|
+
jobs?: Array<{
|
13873
|
+
__typename?: 'IndexingWorkflowJob';
|
13874
|
+
connector?: {
|
13875
|
+
__typename?: 'ContentIndexingConnector';
|
13876
|
+
type?: ContentIndexingServiceTypes | null;
|
13877
|
+
contentType?: ContentTypes | null;
|
13878
|
+
fileType?: FileTypes | null;
|
13879
|
+
} | null;
|
13880
|
+
} | null> | null;
|
13881
|
+
} | null;
|
13689
13882
|
preparation?: {
|
13690
13883
|
__typename?: 'PreparationWorkflowStage';
|
13691
13884
|
disableSmartCapture?: boolean | null;
|
@@ -13757,6 +13950,13 @@ export type GetWorkflowQuery = {
|
|
13757
13950
|
detailLevel?: OpenAiVisionDetailLevels | null;
|
13758
13951
|
customInstructions?: string | null;
|
13759
13952
|
} | null;
|
13953
|
+
modelImage?: {
|
13954
|
+
__typename?: 'ModelImageExtractionProperties';
|
13955
|
+
specification?: {
|
13956
|
+
__typename?: 'EntityReference';
|
13957
|
+
id: string;
|
13958
|
+
} | null;
|
13959
|
+
} | null;
|
13760
13960
|
modelText?: {
|
13761
13961
|
__typename?: 'ModelTextExtractionProperties';
|
13762
13962
|
specification?: {
|
@@ -13846,6 +14046,18 @@ export type QueryWorkflowsQuery = {
|
|
13846
14046
|
id: string;
|
13847
14047
|
} | null> | null;
|
13848
14048
|
} | null;
|
14049
|
+
indexing?: {
|
14050
|
+
__typename?: 'IndexingWorkflowStage';
|
14051
|
+
jobs?: Array<{
|
14052
|
+
__typename?: 'IndexingWorkflowJob';
|
14053
|
+
connector?: {
|
14054
|
+
__typename?: 'ContentIndexingConnector';
|
14055
|
+
type?: ContentIndexingServiceTypes | null;
|
14056
|
+
contentType?: ContentTypes | null;
|
14057
|
+
fileType?: FileTypes | null;
|
14058
|
+
} | null;
|
14059
|
+
} | null> | null;
|
14060
|
+
} | null;
|
13849
14061
|
preparation?: {
|
13850
14062
|
__typename?: 'PreparationWorkflowStage';
|
13851
14063
|
disableSmartCapture?: boolean | null;
|
@@ -13917,6 +14129,13 @@ export type QueryWorkflowsQuery = {
|
|
13917
14129
|
detailLevel?: OpenAiVisionDetailLevels | null;
|
13918
14130
|
customInstructions?: string | null;
|
13919
14131
|
} | null;
|
14132
|
+
modelImage?: {
|
14133
|
+
__typename?: 'ModelImageExtractionProperties';
|
14134
|
+
specification?: {
|
14135
|
+
__typename?: 'EntityReference';
|
14136
|
+
id: string;
|
14137
|
+
} | null;
|
14138
|
+
} | null;
|
13920
14139
|
modelText?: {
|
13921
14140
|
__typename?: 'ModelTextExtractionProperties';
|
13922
14141
|
specification?: {
|
@@ -13999,6 +14218,18 @@ export type UpdateWorkflowMutation = {
|
|
13999
14218
|
id: string;
|
14000
14219
|
} | null> | null;
|
14001
14220
|
} | null;
|
14221
|
+
indexing?: {
|
14222
|
+
__typename?: 'IndexingWorkflowStage';
|
14223
|
+
jobs?: Array<{
|
14224
|
+
__typename?: 'IndexingWorkflowJob';
|
14225
|
+
connector?: {
|
14226
|
+
__typename?: 'ContentIndexingConnector';
|
14227
|
+
type?: ContentIndexingServiceTypes | null;
|
14228
|
+
contentType?: ContentTypes | null;
|
14229
|
+
fileType?: FileTypes | null;
|
14230
|
+
} | null;
|
14231
|
+
} | null> | null;
|
14232
|
+
} | null;
|
14002
14233
|
preparation?: {
|
14003
14234
|
__typename?: 'PreparationWorkflowStage';
|
14004
14235
|
disableSmartCapture?: boolean | null;
|
@@ -14070,6 +14301,13 @@ export type UpdateWorkflowMutation = {
|
|
14070
14301
|
detailLevel?: OpenAiVisionDetailLevels | null;
|
14071
14302
|
customInstructions?: string | null;
|
14072
14303
|
} | null;
|
14304
|
+
modelImage?: {
|
14305
|
+
__typename?: 'ModelImageExtractionProperties';
|
14306
|
+
specification?: {
|
14307
|
+
__typename?: 'EntityReference';
|
14308
|
+
id: string;
|
14309
|
+
} | null;
|
14310
|
+
} | null;
|
14073
14311
|
modelText?: {
|
14074
14312
|
__typename?: 'ModelTextExtractionProperties';
|
14075
14313
|
specification?: {
|
@@ -1,7 +1,7 @@
|
|
1
1
|
"use strict";
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
-
exports.
|
4
|
-
exports.YouTubeTypes = exports.UnitTypes = exports.TimedPolicyRecurrenceTypes = exports.TimeIntervalTypes = exports.TextTypes = exports.TextRoles = exports.SummarizationTypes = exports.SpecificationTypes = exports.SoftwareFacetTypes = exports.SiteTypes = exports.SharePointAuthenticationTypes = exports.SearchTypes = exports.SearchQueryTypes = exports.RevisionStrategyTypes = exports.RetrievalStrategyTypes = exports.ResourceConnectorTypes = exports.RerankingModelServiceTypes = exports.RepoFacetTypes = exports.ReplicateModels = exports.RenditionTypes = exports.PromptStrategyTypes = exports.ProductFacetTypes = exports.PolicyTimeTypes = exports.PlaceFacetTypes = exports.PersonFacetTypes = exports.OrientationTypes = exports.OrganizationFacetTypes = exports.OrderDirectionTypes = exports.OrderByTypes = exports.OperationTypes = exports.OpenAiVisionDetailLevels = exports.OpenAiModels = exports.OccurrenceTypes = exports.ObservableTypes = void 0;
|
3
|
+
exports.ModelServiceTypes = exports.MistralModels = exports.MetadataTypes = exports.MailSensitivity = exports.MailPriority = exports.MailImportance = exports.LinkTypes = exports.LabelFacetTypes = exports.IntegrationServiceTypes = exports.ImageProjectionTypes = exports.H3ResolutionTypes = exports.GroqModels = exports.GraphStrategyTypes = exports.FileTypes = exports.FilePreparationServiceTypes = exports.FeedTypes = exports.FeedServiceTypes = exports.FeedListingTypes = exports.FeedConnectorTypes = exports.FacetValueTypes = exports.EventFacetTypes = exports.EnvironmentTypes = exports.EntityTypes = exports.EntityState = exports.EntityExtractionServiceTypes = exports.EntityEnrichmentServiceTypes = exports.EmailListingTypes = exports.ElevenLabsModels = exports.DeviceTypes = exports.DeepseekModels = exports.DeepgramModels = exports.ConversationTypes = exports.ConversationStrategyTypes = exports.ConversationSearchTypes = exports.ConversationRoleTypes = exports.ContentTypes = exports.ContentPublishingServiceTypes = exports.ContentPublishingFormats = exports.ContentIndexingServiceTypes = exports.ContentFacetTypes = exports.CollectionTypes = exports.CohereModels = exports.CategoryFacetTypes = exports.BillableMetrics = exports.AzureOpenAiModels = exports.AzureDocumentIntelligenceVersions = exports.AzureDocumentIntelligenceModels = exports.ApplyPolicy = exports.AnthropicModels = exports.AlertTypes = void 0;
|
4
|
+
exports.YouTubeTypes = exports.UnitTypes = exports.TimedPolicyRecurrenceTypes = exports.TimeIntervalTypes = exports.TextTypes = exports.TextRoles = exports.SummarizationTypes = exports.SpecificationTypes = exports.SoftwareFacetTypes = exports.SiteTypes = exports.SharePointAuthenticationTypes = exports.SearchTypes = exports.SearchQueryTypes = exports.RevisionStrategyTypes = exports.RetrievalStrategyTypes = exports.ResourceConnectorTypes = exports.RerankingModelServiceTypes = exports.RepoFacetTypes = exports.ReplicateModels = exports.RenditionTypes = exports.PromptStrategyTypes = exports.ProductFacetTypes = exports.PolicyTimeTypes = exports.PlaceFacetTypes = exports.PersonFacetTypes = exports.OrientationTypes = exports.OrganizationFacetTypes = exports.OrderDirectionTypes = exports.OrderByTypes = exports.OperationTypes = exports.OpenAiVisionDetailLevels = exports.OpenAiModels = exports.OccurrenceTypes = exports.ObservableTypes = exports.NotionTypes = void 0;
|
5
5
|
/** Alert type */
|
6
6
|
var AlertTypes;
|
7
7
|
(function (AlertTypes) {
|
@@ -187,6 +187,11 @@ var ContentFacetTypes;
|
|
187
187
|
/** Video Software */
|
188
188
|
ContentFacetTypes["VideoSoftware"] = "VIDEO_SOFTWARE";
|
189
189
|
})(ContentFacetTypes || (exports.ContentFacetTypes = ContentFacetTypes = {}));
|
190
|
+
var ContentIndexingServiceTypes;
|
191
|
+
(function (ContentIndexingServiceTypes) {
|
192
|
+
/** Azure AI Language */
|
193
|
+
ContentIndexingServiceTypes["AzureAiLanguage"] = "AZURE_AI_LANGUAGE";
|
194
|
+
})(ContentIndexingServiceTypes || (exports.ContentIndexingServiceTypes = ContentIndexingServiceTypes = {}));
|
190
195
|
var ContentPublishingFormats;
|
191
196
|
(function (ContentPublishingFormats) {
|
192
197
|
/** HTML */
|
@@ -352,13 +357,18 @@ var EntityEnrichmentServiceTypes;
|
|
352
357
|
/** Entity extraction service type */
|
353
358
|
var EntityExtractionServiceTypes;
|
354
359
|
(function (EntityExtractionServiceTypes) {
|
355
|
-
/** Azure Cognitive Services Image */
|
360
|
+
/** Azure AI Vision, fka Azure Cognitive Services Image */
|
356
361
|
EntityExtractionServiceTypes["AzureCognitiveServicesImage"] = "AZURE_COGNITIVE_SERVICES_IMAGE";
|
357
|
-
/** Azure Cognitive Services Text */
|
362
|
+
/** Azure AI Language, fka Azure Cognitive Services Text */
|
358
363
|
EntityExtractionServiceTypes["AzureCognitiveServicesText"] = "AZURE_COGNITIVE_SERVICES_TEXT";
|
364
|
+
/** LLM Image */
|
365
|
+
EntityExtractionServiceTypes["ModelImage"] = "MODEL_IMAGE";
|
359
366
|
/** LLM Text */
|
360
367
|
EntityExtractionServiceTypes["ModelText"] = "MODEL_TEXT";
|
361
|
-
/**
|
368
|
+
/**
|
369
|
+
* OpenAI Image
|
370
|
+
* @deprecated Use MODEL_IMAGE instead.
|
371
|
+
*/
|
362
372
|
EntityExtractionServiceTypes["OpenAiImage"] = "OPEN_AI_IMAGE";
|
363
373
|
/** Roboflow Image */
|
364
374
|
EntityExtractionServiceTypes["RoboflowImage"] = "ROBOFLOW_IMAGE";
|