graphlit-client 1.0.20250420001 → 1.0.20250423001
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -191,6 +191,9 @@ exports.GetAlert = (0, graphql_tag_1.default) `
|
|
191
191
|
model
|
192
192
|
voice
|
193
193
|
}
|
194
|
+
openAIImage {
|
195
|
+
model
|
196
|
+
}
|
194
197
|
}
|
195
198
|
summarySpecification {
|
196
199
|
id
|
@@ -323,6 +326,9 @@ exports.QueryAlerts = (0, graphql_tag_1.default) `
|
|
323
326
|
model
|
324
327
|
voice
|
325
328
|
}
|
329
|
+
openAIImage {
|
330
|
+
model
|
331
|
+
}
|
326
332
|
}
|
327
333
|
summarySpecification {
|
328
334
|
id
|
@@ -7387,6 +7393,7 @@ exports.LookupCredits = (0, graphql_tag_1.default) `
|
|
7387
7393
|
computeRatio
|
7388
7394
|
embeddingRatio
|
7389
7395
|
completionRatio
|
7396
|
+
generationRatio
|
7390
7397
|
ingestionRatio
|
7391
7398
|
indexingRatio
|
7392
7399
|
preparationRatio
|
@@ -7445,6 +7452,7 @@ exports.QueryCredits = (0, graphql_tag_1.default) `
|
|
7445
7452
|
computeRatio
|
7446
7453
|
embeddingRatio
|
7447
7454
|
completionRatio
|
7455
|
+
generationRatio
|
7448
7456
|
ingestionRatio
|
7449
7457
|
indexingRatio
|
7450
7458
|
preparationRatio
|
@@ -844,7 +844,7 @@ export type AzureOpenAiModelProperties = {
|
|
844
844
|
probability?: Maybe<Scalars['Float']['output']>;
|
845
845
|
/** The model temperature. */
|
846
846
|
temperature?: Maybe<Scalars['Float']['output']>;
|
847
|
-
/** The number of tokens which can provided to the OpenAI model, if using developer's own account. */
|
847
|
+
/** The number of tokens which can provided to the OpenAI-compatible model, if using developer's own account. */
|
848
848
|
tokenLimit?: Maybe<Scalars['Int']['output']>;
|
849
849
|
};
|
850
850
|
/** Represents Azure OpenAI model properties. */
|
@@ -1927,45 +1927,59 @@ export type ContentInput = {
|
|
1927
1927
|
/** Represents a content publishing connector. */
|
1928
1928
|
export type ContentPublishingConnector = {
|
1929
1929
|
__typename?: 'ContentPublishingConnector';
|
1930
|
-
/** The specific properties for ElevenLabs publishing. */
|
1930
|
+
/** The specific properties for ElevenLabs Audio publishing. */
|
1931
1931
|
elevenLabs?: Maybe<ElevenLabsPublishingProperties>;
|
1932
1932
|
/** The content publishing format, i.e. MP3, Markdown. */
|
1933
1933
|
format: ContentPublishingFormats;
|
1934
|
+
/** The specific properties for OpenAI Image publishing. */
|
1935
|
+
openAIImage?: Maybe<OpenAiImagePublishingProperties>;
|
1934
1936
|
/** The content publishing service type. */
|
1935
1937
|
type: ContentPublishingServiceTypes;
|
1936
1938
|
};
|
1937
1939
|
/** Represents a content publishing connector. */
|
1938
1940
|
export type ContentPublishingConnectorInput = {
|
1939
|
-
/** The specific properties for ElevenLabs publishing. */
|
1941
|
+
/** The specific properties for ElevenLabs Audio publishing. */
|
1940
1942
|
elevenLabs?: InputMaybe<ElevenLabsPublishingPropertiesInput>;
|
1941
1943
|
/** The content publishing format, i.e. MP3, Markdown. */
|
1942
1944
|
format: ContentPublishingFormats;
|
1945
|
+
/** The specific properties for OpenAI Image publishing. */
|
1946
|
+
openAIImage?: InputMaybe<OpenAiImagePublishingPropertiesInput>;
|
1943
1947
|
/** The content publishing service type. */
|
1944
1948
|
type: ContentPublishingServiceTypes;
|
1945
1949
|
};
|
1946
1950
|
/** Represents a content publishing connector. */
|
1947
1951
|
export type ContentPublishingConnectorUpdateInput = {
|
1948
|
-
/** The specific properties for ElevenLabs publishing. */
|
1952
|
+
/** The specific properties for ElevenLabs Audio publishing. */
|
1949
1953
|
elevenLabs?: InputMaybe<ElevenLabsPublishingPropertiesInput>;
|
1950
1954
|
/** The content publishing format, i.e. MP3, Markdown. */
|
1951
1955
|
format?: InputMaybe<ContentPublishingFormats>;
|
1956
|
+
/** The specific properties for OpenAI Image publishing. */
|
1957
|
+
openAIImage?: InputMaybe<OpenAiImagePublishingPropertiesInput>;
|
1952
1958
|
/** The content publishing service type. */
|
1953
1959
|
type?: InputMaybe<ContentPublishingServiceTypes>;
|
1954
1960
|
};
|
1955
1961
|
export declare enum ContentPublishingFormats {
|
1956
1962
|
/** HTML */
|
1957
1963
|
Html = "HTML",
|
1964
|
+
/** JPEG */
|
1965
|
+
Jpeg = "JPEG",
|
1958
1966
|
/** Markdown */
|
1959
1967
|
Markdown = "MARKDOWN",
|
1960
1968
|
/** MP3 */
|
1961
1969
|
Mp3 = "MP3",
|
1970
|
+
/** PNG */
|
1971
|
+
Png = "PNG",
|
1962
1972
|
/** Plain Text */
|
1963
|
-
Text = "TEXT"
|
1973
|
+
Text = "TEXT",
|
1974
|
+
/** WEBP */
|
1975
|
+
Webp = "WEBP"
|
1964
1976
|
}
|
1965
1977
|
/** Content publishing service type */
|
1966
1978
|
export declare enum ContentPublishingServiceTypes {
|
1967
1979
|
/** ElevenLabs Audio publishing */
|
1968
1980
|
ElevenLabsAudio = "ELEVEN_LABS_AUDIO",
|
1981
|
+
/** OpenAI Image publishing */
|
1982
|
+
OpenAiImage = "OPEN_AI_IMAGE",
|
1969
1983
|
/** Text publishing */
|
1970
1984
|
Text = "TEXT"
|
1971
1985
|
}
|
@@ -2860,7 +2874,7 @@ export declare enum ElevenLabsModels {
|
|
2860
2874
|
*/
|
2861
2875
|
TurboV2_5 = "TURBO_V2_5"
|
2862
2876
|
}
|
2863
|
-
/** Represents the ElevenLabs publishing properties. */
|
2877
|
+
/** Represents the ElevenLabs Audio publishing properties. */
|
2864
2878
|
export type ElevenLabsPublishingProperties = {
|
2865
2879
|
__typename?: 'ElevenLabsPublishingProperties';
|
2866
2880
|
/** The ElevenLabs model. */
|
@@ -2868,7 +2882,7 @@ export type ElevenLabsPublishingProperties = {
|
|
2868
2882
|
/** The ElevenLabs voice identifier. */
|
2869
2883
|
voice?: Maybe<Scalars['String']['output']>;
|
2870
2884
|
};
|
2871
|
-
/** Represents the ElevenLabs publishing properties. */
|
2885
|
+
/** Represents the ElevenLabs Audio publishing properties. */
|
2872
2886
|
export type ElevenLabsPublishingPropertiesInput = {
|
2873
2887
|
/** The ElevenLabs model. */
|
2874
2888
|
model?: InputMaybe<ElevenLabsModels>;
|
@@ -9091,6 +9105,24 @@ export type OpenAiImageExtractionProperties = {
|
|
9091
9105
|
/** The OpenAI vision detail mode. */
|
9092
9106
|
detailLevel?: Maybe<OpenAiVisionDetailLevels>;
|
9093
9107
|
};
|
9108
|
+
/** OpenAI Image model type */
|
9109
|
+
export declare enum OpenAiImageModels {
|
9110
|
+
/** Developer-specified model */
|
9111
|
+
Custom = "CUSTOM",
|
9112
|
+
/** GPT Image-1 */
|
9113
|
+
GptImage_1 = "GPT_IMAGE_1"
|
9114
|
+
}
|
9115
|
+
/** Represents the OpenAI Image publishing properties. */
|
9116
|
+
export type OpenAiImagePublishingProperties = {
|
9117
|
+
__typename?: 'OpenAIImagePublishingProperties';
|
9118
|
+
/** The OpenAI Image model. */
|
9119
|
+
model?: Maybe<OpenAiImageModels>;
|
9120
|
+
};
|
9121
|
+
/** Represents the OpenAI Image publishing properties. */
|
9122
|
+
export type OpenAiImagePublishingPropertiesInput = {
|
9123
|
+
/** The OpenAI Image model. */
|
9124
|
+
model?: InputMaybe<OpenAiImageModels>;
|
9125
|
+
};
|
9094
9126
|
/** Represents OpenAI model properties. */
|
9095
9127
|
export type OpenAiModelProperties = {
|
9096
9128
|
__typename?: 'OpenAIModelProperties';
|
@@ -9114,7 +9146,7 @@ export type OpenAiModelProperties = {
|
|
9114
9146
|
reasoningEffort?: Maybe<OpenAiReasoningEffortLevels>;
|
9115
9147
|
/** The model temperature. */
|
9116
9148
|
temperature?: Maybe<Scalars['Float']['output']>;
|
9117
|
-
/** The number of tokens which can provided to the OpenAI model, if using developer's own account. */
|
9149
|
+
/** The number of tokens which can provided to the OpenAI-compatible model, if using developer's own account. */
|
9118
9150
|
tokenLimit?: Maybe<Scalars['Int']['output']>;
|
9119
9151
|
};
|
9120
9152
|
/** Represents OpenAI model properties. */
|
@@ -9139,7 +9171,7 @@ export type OpenAiModelPropertiesInput = {
|
|
9139
9171
|
reasoningEffort?: InputMaybe<OpenAiReasoningEffortLevels>;
|
9140
9172
|
/** The model temperature. */
|
9141
9173
|
temperature?: InputMaybe<Scalars['Float']['input']>;
|
9142
|
-
/** The number of tokens which can provided to the OpenAI model, if using developer's own account. */
|
9174
|
+
/** The number of tokens which can provided to the OpenAI-compatible model, if using developer's own account. */
|
9143
9175
|
tokenLimit?: InputMaybe<Scalars['Int']['input']>;
|
9144
9176
|
};
|
9145
9177
|
/** Represents OpenAI model properties. */
|
@@ -9164,7 +9196,7 @@ export type OpenAiModelPropertiesUpdateInput = {
|
|
9164
9196
|
reasoningEffort?: InputMaybe<OpenAiReasoningEffortLevels>;
|
9165
9197
|
/** The model temperature. */
|
9166
9198
|
temperature?: InputMaybe<Scalars['Float']['input']>;
|
9167
|
-
/** The number of tokens which can provided to the OpenAI model, if using developer's own account. */
|
9199
|
+
/** The number of tokens which can provided to the OpenAI-compatible model, if using developer's own account. */
|
9168
9200
|
tokenLimit?: InputMaybe<Scalars['Int']['input']>;
|
9169
9201
|
};
|
9170
9202
|
/** OpenAI model type */
|
@@ -10319,6 +10351,8 @@ export type ProjectCredits = {
|
|
10319
10351
|
enrichmentRatio?: Maybe<Scalars['Decimal']['output']>;
|
10320
10352
|
/** The content extraction ratio of credits. */
|
10321
10353
|
extractionRatio?: Maybe<Scalars['Decimal']['output']>;
|
10354
|
+
/** The LLM generation ratio of credits. */
|
10355
|
+
generationRatio?: Maybe<Scalars['Decimal']['output']>;
|
10322
10356
|
/** The content indexing ratio of credits. */
|
10323
10357
|
indexingRatio?: Maybe<Scalars['Decimal']['output']>;
|
10324
10358
|
/** The content ingestion ratio of credits. */
|
@@ -13783,6 +13817,10 @@ export type GetAlertQuery = {
|
|
13783
13817
|
model?: ElevenLabsModels | null;
|
13784
13818
|
voice?: string | null;
|
13785
13819
|
} | null;
|
13820
|
+
openAIImage?: {
|
13821
|
+
__typename?: 'OpenAIImagePublishingProperties';
|
13822
|
+
model?: OpenAiImageModels | null;
|
13823
|
+
} | null;
|
13786
13824
|
};
|
13787
13825
|
summarySpecification?: {
|
13788
13826
|
__typename?: 'EntityReference';
|
@@ -13953,6 +13991,10 @@ export type QueryAlertsQuery = {
|
|
13953
13991
|
model?: ElevenLabsModels | null;
|
13954
13992
|
voice?: string | null;
|
13955
13993
|
} | null;
|
13994
|
+
openAIImage?: {
|
13995
|
+
__typename?: 'OpenAIImagePublishingProperties';
|
13996
|
+
model?: OpenAiImageModels | null;
|
13997
|
+
} | null;
|
13956
13998
|
};
|
13957
13999
|
summarySpecification?: {
|
13958
14000
|
__typename?: 'EntityReference';
|
@@ -22267,6 +22309,7 @@ export type LookupCreditsQuery = {
|
|
22267
22309
|
computeRatio?: any | null;
|
22268
22310
|
embeddingRatio?: any | null;
|
22269
22311
|
completionRatio?: any | null;
|
22312
|
+
generationRatio?: any | null;
|
22270
22313
|
ingestionRatio?: any | null;
|
22271
22314
|
indexingRatio?: any | null;
|
22272
22315
|
preparationRatio?: any | null;
|
@@ -22332,6 +22375,7 @@ export type QueryCreditsQuery = {
|
|
22332
22375
|
computeRatio?: any | null;
|
22333
22376
|
embeddingRatio?: any | null;
|
22334
22377
|
completionRatio?: any | null;
|
22378
|
+
generationRatio?: any | null;
|
22335
22379
|
ingestionRatio?: any | null;
|
22336
22380
|
indexingRatio?: any | null;
|
22337
22381
|
preparationRatio?: any | null;
|
@@ -1,8 +1,8 @@
|
|
1
1
|
"use strict";
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
3
3
|
exports.IntegrationServiceTypes = exports.ImageProjectionTypes = exports.H3ResolutionTypes = exports.GroqModels = exports.GraphStrategyTypes = exports.GoogleModels = exports.GoogleDriveAuthenticationTypes = exports.FileTypes = exports.FilePreparationServiceTypes = exports.FeedTypes = exports.FeedServiceTypes = exports.FeedListingTypes = exports.FeedConnectorTypes = exports.FacetValueTypes = exports.EventFacetTypes = exports.EnvironmentTypes = exports.EntityTypes = exports.EntityState = exports.EntityExtractionServiceTypes = exports.EntityEnrichmentServiceTypes = exports.EmailListingTypes = exports.ElevenLabsModels = exports.DeviceTypes = exports.DeepseekModels = exports.DeepgramModels = exports.ConversationTypes = exports.ConversationStrategyTypes = exports.ConversationSearchTypes = exports.ConversationRoleTypes = exports.ContentTypes = exports.ContentSourceTypes = exports.ContentPublishingServiceTypes = exports.ContentPublishingFormats = exports.ContentIndexingServiceTypes = exports.ContentFacetTypes = exports.ContentClassificationServiceTypes = exports.ConnectorTypes = exports.CollectionTypes = exports.CohereModels = exports.CerebrasModels = exports.CategoryFacetTypes = exports.BillableMetrics = exports.AzureOpenAiModels = exports.AzureDocumentIntelligenceVersions = exports.AzureDocumentIntelligenceModels = exports.AuthenticationServiceTypes = exports.AssemblyAiModels = exports.ApplyPolicy = exports.AnthropicModels = exports.AlertTypes = void 0;
|
4
|
-
exports.
|
5
|
-
exports.YouTubeTypes = exports.VoyageModels = exports.UserTypes = exports.UnitTypes = exports.TwitterListingTypes = exports.TrelloTypes = exports.TimedPolicyRecurrenceTypes = exports.TimeIntervalTypes = exports.TextTypes = exports.TextRoles = exports.SummarizationTypes = exports.StoragePolicyTypes = exports.SpecificationTypes = exports.SoftwareFacetTypes = exports.SiteTypes = void 0;
|
4
|
+
exports.SearchTypes = exports.SearchServiceTypes = exports.SearchQueryTypes = exports.SdkTypes = exports.RevisionStrategyTypes = exports.RetrievalStrategyTypes = exports.ResourceConnectorTypes = exports.RerankingModelServiceTypes = exports.RepoFacetTypes = exports.ReplicateModels = exports.RenditionTypes = exports.RegexSourceTypes = exports.PromptStrategyTypes = exports.ProductFacetTypes = exports.PolicyTimeTypes = exports.PlaceFacetTypes = exports.PersonFacetTypes = exports.OrientationTypes = exports.OrganizationFacetTypes = exports.OrderDirectionTypes = exports.OrderByTypes = exports.OperationTypes = exports.OpenAiVisionDetailLevels = exports.OpenAiReasoningEffortLevels = exports.OpenAiModels = exports.OpenAiImageModels = exports.OccurrenceTypes = exports.ObservableTypes = exports.NotionTypes = exports.ModelTypes = exports.ModelServiceTypes = exports.MistralModels = exports.MetadataTypes = exports.MedicalTherapyFacetTypes = exports.MedicalTestFacetTypes = exports.MedicalStudyFacetTypes = exports.MedicalProcedureFacetTypes = exports.MedicalIndicationFacetTypes = exports.MedicalGuidelineFacetTypes = exports.MedicalDrugFacetTypes = exports.MedicalDrugClassFacetTypes = exports.MedicalDeviceFacetTypes = exports.MedicalContraindicationFacetTypes = exports.MedicalConditionFacetTypes = exports.MailSensitivity = exports.MailPriority = exports.MailImportance = exports.LinkTypes = exports.LabelFacetTypes = exports.JinaModels = void 0;
|
5
|
+
exports.YouTubeTypes = exports.VoyageModels = exports.UserTypes = exports.UnitTypes = exports.TwitterListingTypes = exports.TrelloTypes = exports.TimedPolicyRecurrenceTypes = exports.TimeIntervalTypes = exports.TextTypes = exports.TextRoles = exports.SummarizationTypes = exports.StoragePolicyTypes = exports.SpecificationTypes = exports.SoftwareFacetTypes = exports.SiteTypes = exports.SharePointAuthenticationTypes = void 0;
|
6
6
|
/** Alert type */
|
7
7
|
var AlertTypes;
|
8
8
|
(function (AlertTypes) {
|
@@ -302,18 +302,26 @@ var ContentPublishingFormats;
|
|
302
302
|
(function (ContentPublishingFormats) {
|
303
303
|
/** HTML */
|
304
304
|
ContentPublishingFormats["Html"] = "HTML";
|
305
|
+
/** JPEG */
|
306
|
+
ContentPublishingFormats["Jpeg"] = "JPEG";
|
305
307
|
/** Markdown */
|
306
308
|
ContentPublishingFormats["Markdown"] = "MARKDOWN";
|
307
309
|
/** MP3 */
|
308
310
|
ContentPublishingFormats["Mp3"] = "MP3";
|
311
|
+
/** PNG */
|
312
|
+
ContentPublishingFormats["Png"] = "PNG";
|
309
313
|
/** Plain Text */
|
310
314
|
ContentPublishingFormats["Text"] = "TEXT";
|
315
|
+
/** WEBP */
|
316
|
+
ContentPublishingFormats["Webp"] = "WEBP";
|
311
317
|
})(ContentPublishingFormats || (exports.ContentPublishingFormats = ContentPublishingFormats = {}));
|
312
318
|
/** Content publishing service type */
|
313
319
|
var ContentPublishingServiceTypes;
|
314
320
|
(function (ContentPublishingServiceTypes) {
|
315
321
|
/** ElevenLabs Audio publishing */
|
316
322
|
ContentPublishingServiceTypes["ElevenLabsAudio"] = "ELEVEN_LABS_AUDIO";
|
323
|
+
/** OpenAI Image publishing */
|
324
|
+
ContentPublishingServiceTypes["OpenAiImage"] = "OPEN_AI_IMAGE";
|
317
325
|
/** Text publishing */
|
318
326
|
ContentPublishingServiceTypes["Text"] = "TEXT";
|
319
327
|
})(ContentPublishingServiceTypes || (exports.ContentPublishingServiceTypes = ContentPublishingServiceTypes = {}));
|
@@ -1335,6 +1343,14 @@ var OccurrenceTypes;
|
|
1335
1343
|
OccurrenceTypes["Text"] = "TEXT";
|
1336
1344
|
OccurrenceTypes["Time"] = "TIME";
|
1337
1345
|
})(OccurrenceTypes || (exports.OccurrenceTypes = OccurrenceTypes = {}));
|
1346
|
+
/** OpenAI Image model type */
|
1347
|
+
var OpenAiImageModels;
|
1348
|
+
(function (OpenAiImageModels) {
|
1349
|
+
/** Developer-specified model */
|
1350
|
+
OpenAiImageModels["Custom"] = "CUSTOM";
|
1351
|
+
/** GPT Image-1 */
|
1352
|
+
OpenAiImageModels["GptImage_1"] = "GPT_IMAGE_1";
|
1353
|
+
})(OpenAiImageModels || (exports.OpenAiImageModels = OpenAiImageModels = {}));
|
1338
1354
|
/** OpenAI model type */
|
1339
1355
|
var OpenAiModels;
|
1340
1356
|
(function (OpenAiModels) {
|