@civitai/client 0.2.0-beta.53 → 0.2.0-beta.55

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -2,6 +2,7 @@ export {
2
2
  addWorkflowTag,
3
3
  deleteWorkflow,
4
4
  getBlob,
5
+ getBlobArchive,
5
6
  getBlobContent,
6
7
  getBlockedContent,
7
8
  getConsumerBlobUploadUrl,
@@ -15,6 +16,7 @@ export {
15
16
  invokeAceStepAudioStepTemplate,
16
17
  invokeAgeClassificationStepTemplate,
17
18
  invokeBatchOcrSafetyClassificationStepTemplate,
19
+ invokeBlobArchiveStepTemplate,
18
20
  invokeChatCompletionStepTemplate,
19
21
  invokeComfyStepTemplate,
20
22
  invokeConvertImageStepTemplate,
@@ -86,12 +88,18 @@ export {
86
88
  AnimalPoseEstimator,
87
89
  type AnimeRecognitionResult,
88
90
  AnylineMergeWith,
91
+ ArchiveFormat,
89
92
  type AssistantMessage,
90
93
  type AudioBlob,
91
94
  type BatchOcrSafetyClassificationInput,
92
95
  type BatchOcrSafetyClassificationOutput,
93
96
  type BatchOcrSafetyClassificationResult,
94
97
  type Blob,
98
+ type BlobArchiveEntry,
99
+ type BlobArchiveInput,
100
+ type BlobArchiveOutput,
101
+ type BlobArchiveStep,
102
+ type BlobArchiveStepTemplate,
95
103
  type BlurRegion,
96
104
  BlurRegionMode,
97
105
  type BlurTransform,
@@ -229,6 +237,10 @@ export {
229
237
  type Gemini25FlashEditImageGenInput,
230
238
  type Gemini25FlashImageGenInput,
231
239
  type GeminiImageGenInput,
240
+ type GetBlobArchiveData,
241
+ type GetBlobArchiveError,
242
+ type GetBlobArchiveErrors,
243
+ type GetBlobArchiveResponses,
232
244
  type GetBlobContentData,
233
245
  type GetBlobContentError,
234
246
  type GetBlobContentErrors,
@@ -277,6 +289,12 @@ export {
277
289
  type HaiperVideoGenInput,
278
290
  HaiperVideoGenModel,
279
291
  type HaiperVideoGenOutput,
292
+ type HappyHorseV1ImageToVideoInput,
293
+ type HappyHorseV1ReferenceToVideoInput,
294
+ type HappyHorseV1TextToVideoInput,
295
+ type HappyHorseV1VideoEditInput,
296
+ type HappyHorseV1VideoGenInput,
297
+ type HappyHorseVideoGenInput,
280
298
  type HeadBlobData,
281
299
  type HeadBlobError,
282
300
  type HeadBlobErrors,
@@ -338,6 +356,11 @@ export {
338
356
  type InvokeBatchOcrSafetyClassificationStepTemplateErrors,
339
357
  type InvokeBatchOcrSafetyClassificationStepTemplateResponse,
340
358
  type InvokeBatchOcrSafetyClassificationStepTemplateResponses,
359
+ type InvokeBlobArchiveStepTemplateData,
360
+ type InvokeBlobArchiveStepTemplateError,
361
+ type InvokeBlobArchiveStepTemplateErrors,
362
+ type InvokeBlobArchiveStepTemplateResponse,
363
+ type InvokeBlobArchiveStepTemplateResponses,
341
364
  type InvokeChatCompletionStepTemplateData,
342
365
  type InvokeChatCompletionStepTemplateError,
343
366
  type InvokeChatCompletionStepTemplateErrors,
@@ -574,6 +597,9 @@ export {
574
597
  type OpenAiGpt1CreateImageInput,
575
598
  type OpenAiGpt1EditImageInput,
576
599
  type OpenAiGpt1ImageGenInput,
600
+ type OpenAiGpt2CreateImageInput,
601
+ type OpenAiGpt2EditImageInput,
602
+ type OpenAiGpt2ImageGenInput,
577
603
  type OpenApiImageGenInput,
578
604
  OutputFormat,
579
605
  type PatchWorkflowData,
@@ -1,3 +1,3 @@
1
1
  // This file is auto-generated by @hey-api/openapi-ts
2
- export { addWorkflowTag, deleteWorkflow, getBlob, getBlobContent, getBlockedContent, getConsumerBlobUploadUrl, getResource, getStreamingBlob, getWorkflow, getWorkflowStep, headBlob, invalidateResource, invalidateUserCache, invokeAceStepAudioStepTemplate, invokeAgeClassificationStepTemplate, invokeBatchOcrSafetyClassificationStepTemplate, invokeChatCompletionStepTemplate, invokeComfyStepTemplate, invokeConvertImageStepTemplate, invokeEchoStepTemplate, invokeHumanoidImageMaskStepTemplate, invokeImageGenStepTemplate, invokeImageResourceTrainingStepTemplate, invokeImageUploadStepTemplate, invokeImageUpscalerStepTemplate, invokeMediaCaptioningStepTemplate, invokeMediaHashStepTemplate, invokeMediaRatingStepTemplate, invokeModelClamScanStepTemplate, invokeModelHashStepTemplate, invokeModelParseMetadataStepTemplate, invokeModelPickleScanStepTemplate, invokePreprocessImageStepTemplate, invokePromptEnhancementStepTemplate, invokeRepeatStepTemplate, invokeTextToImageStepTemplate, invokeTextToSpeechStepTemplate, invokeTrainingStepTemplate, invokeTranscodeStepTemplate, invokeTranscriptionStepTemplate, invokeTryOnUStepTemplate, invokeVideoEnhancementStepTemplate, invokeVideoFrameExtractionStepTemplate, invokeVideoGenStepTemplate, invokeVideoInterpolationStepTemplate, invokeVideoMetadataStepTemplate, invokeVideoUpscalerStepTemplate, invokeWdTaggingStepTemplate, invokeXGuardModerationStepTemplate, patchWorkflow, patchWorkflowStep, queryWorkflows, refreshBlob, removeAllWorkflowTags, removeWorkflowTag, submitWorkflow, updateWorkflow, updateWorkflowStep, uploadConsumerBlob, } from './sdk.gen';
3
- export { AnimalPoseBboxDetector, AnimalPoseEstimator, AnylineMergeWith, BlurRegionMode, BuzzClientAccount, CoarseMode, ComfySampler, ComfyScheduler, ContainerFormat, DensePoseColormap, DensePoseModel, DepthAnythingCheckpoint, DepthAnythingV2Checkpoint, DwPoseBboxDetector, DwPoseEstimator, FileFormat, HaiperVideoGenAspectRatio, HaiperVideoGenCameraMovement, HaiperVideoGenModel, HumanoidImageMaskCategory, ImageGenOutputFormat, ImageResouceTrainingModerationStatus, ImageTransformer, JobSupport, KlingMode, KlingModel, KlingV3Operation, KlingVideoGenAspectRatio, KlingVideoGenDuration, LeresBoost, LightricksAspectRatio, MediaHashType, Metric3dBackbone, MiniMaxVideoGenModel, NsfwLevel, OutputFormat, Priority, SafeMode, Scheduler, SdCppSampleMethod, SdCppSchedule, SdCppUCacheMode, SeedanceModel, SeedreamVersion, TrainingModerationStatus, TransactionType, UpdateWorkflowStatus, Veo3AspectRatio, Veo3GenerationMode, Veo3Version, ViduVideoGenModel, ViduVideoGenStyle, WorkflowStatus, WorkflowUpgradeMode, ZoeDepthEnvironment, } from './types.gen';
2
+ export { addWorkflowTag, deleteWorkflow, getBlob, getBlobArchive, getBlobContent, getBlockedContent, getConsumerBlobUploadUrl, getResource, getStreamingBlob, getWorkflow, getWorkflowStep, headBlob, invalidateResource, invalidateUserCache, invokeAceStepAudioStepTemplate, invokeAgeClassificationStepTemplate, invokeBatchOcrSafetyClassificationStepTemplate, invokeBlobArchiveStepTemplate, invokeChatCompletionStepTemplate, invokeComfyStepTemplate, invokeConvertImageStepTemplate, invokeEchoStepTemplate, invokeHumanoidImageMaskStepTemplate, invokeImageGenStepTemplate, invokeImageResourceTrainingStepTemplate, invokeImageUploadStepTemplate, invokeImageUpscalerStepTemplate, invokeMediaCaptioningStepTemplate, invokeMediaHashStepTemplate, invokeMediaRatingStepTemplate, invokeModelClamScanStepTemplate, invokeModelHashStepTemplate, invokeModelParseMetadataStepTemplate, invokeModelPickleScanStepTemplate, invokePreprocessImageStepTemplate, invokePromptEnhancementStepTemplate, invokeRepeatStepTemplate, invokeTextToImageStepTemplate, invokeTextToSpeechStepTemplate, invokeTrainingStepTemplate, invokeTranscodeStepTemplate, invokeTranscriptionStepTemplate, invokeTryOnUStepTemplate, invokeVideoEnhancementStepTemplate, invokeVideoFrameExtractionStepTemplate, invokeVideoGenStepTemplate, invokeVideoInterpolationStepTemplate, invokeVideoMetadataStepTemplate, invokeVideoUpscalerStepTemplate, invokeWdTaggingStepTemplate, invokeXGuardModerationStepTemplate, patchWorkflow, patchWorkflowStep, queryWorkflows, refreshBlob, removeAllWorkflowTags, removeWorkflowTag, submitWorkflow, updateWorkflow, updateWorkflowStep, uploadConsumerBlob, } from './sdk.gen';
3
+ export { AnimalPoseBboxDetector, AnimalPoseEstimator, AnylineMergeWith, ArchiveFormat, BlurRegionMode, BuzzClientAccount, CoarseMode, ComfySampler, ComfyScheduler, ContainerFormat, DensePoseColormap, DensePoseModel, DepthAnythingCheckpoint, DepthAnythingV2Checkpoint, DwPoseBboxDetector, DwPoseEstimator, FileFormat, HaiperVideoGenAspectRatio, HaiperVideoGenCameraMovement, HaiperVideoGenModel, HumanoidImageMaskCategory, ImageGenOutputFormat, ImageResouceTrainingModerationStatus, ImageTransformer, JobSupport, KlingMode, KlingModel, KlingV3Operation, KlingVideoGenAspectRatio, KlingVideoGenDuration, LeresBoost, LightricksAspectRatio, MediaHashType, Metric3dBackbone, MiniMaxVideoGenModel, NsfwLevel, OutputFormat, Priority, SafeMode, Scheduler, SdCppSampleMethod, SdCppSchedule, SdCppUCacheMode, SeedanceModel, SeedreamVersion, TrainingModerationStatus, TransactionType, UpdateWorkflowStatus, Veo3AspectRatio, Veo3GenerationMode, Veo3Version, ViduVideoGenModel, ViduVideoGenStyle, WorkflowStatus, WorkflowUpgradeMode, ZoeDepthEnvironment, } from './types.gen';
@@ -6,6 +6,9 @@ import type {
6
6
  DeleteWorkflowData,
7
7
  DeleteWorkflowErrors,
8
8
  DeleteWorkflowResponses,
9
+ GetBlobArchiveData,
10
+ GetBlobArchiveErrors,
11
+ GetBlobArchiveResponses,
9
12
  GetBlobContentData,
10
13
  GetBlobContentErrors,
11
14
  GetBlobContentResponses,
@@ -46,6 +49,9 @@ import type {
46
49
  InvokeBatchOcrSafetyClassificationStepTemplateData,
47
50
  InvokeBatchOcrSafetyClassificationStepTemplateErrors,
48
51
  InvokeBatchOcrSafetyClassificationStepTemplateResponses,
52
+ InvokeBlobArchiveStepTemplateData,
53
+ InvokeBlobArchiveStepTemplateErrors,
54
+ InvokeBlobArchiveStepTemplateResponses,
49
55
  InvokeChatCompletionStepTemplateData,
50
56
  InvokeChatCompletionStepTemplateErrors,
51
57
  InvokeChatCompletionStepTemplateResponses,
@@ -245,6 +251,18 @@ export declare const getBlobContent: <ThrowOnError extends boolean = false>(
245
251
  export declare const getBlockedContent: <ThrowOnError extends boolean = false>(
246
252
  options: Options<GetBlockedContentData, ThrowOnError>
247
253
  ) => import('./client').RequestResult<unknown, GetBlockedContentErrors, ThrowOnError, 'fields'>;
254
+ /**
255
+ * Streams an archive (zip or tar) of the blobs referenced by a signed archive token.
256
+ * Tokens are produced by the `blobArchive` workflow step.
257
+ */
258
+ export declare const getBlobArchive: <ThrowOnError extends boolean = false>(
259
+ options: Options<GetBlobArchiveData, ThrowOnError>
260
+ ) => import('./client').RequestResult<
261
+ GetBlobArchiveResponses,
262
+ GetBlobArchiveErrors,
263
+ ThrowOnError,
264
+ 'fields'
265
+ >;
248
266
  /**
249
267
  * Refresh a blob's URL and debounce its lifetime.
250
268
  * Returns a fresh presigned URL and resets the 30-day TTL.
@@ -300,6 +318,19 @@ export declare const invokeBatchOcrSafetyClassificationStepTemplate: <
300
318
  ThrowOnError,
301
319
  'fields'
302
320
  >;
321
+ /**
322
+ * Bundles a set of blobs into a single archive (zip or tar) that callers can
323
+ * /// download from a signed streaming URL. Runs in-process in the orchestrator;
324
+ * /// no worker job is dispatched.
325
+ */
326
+ export declare const invokeBlobArchiveStepTemplate: <ThrowOnError extends boolean = false>(
327
+ options?: Options<InvokeBlobArchiveStepTemplateData, ThrowOnError>
328
+ ) => import('./client').RequestResult<
329
+ InvokeBlobArchiveStepTemplateResponses,
330
+ InvokeBlobArchiveStepTemplateErrors,
331
+ ThrowOnError,
332
+ 'fields'
333
+ >;
303
334
  /**
304
335
  * ChatCompletion
305
336
  *
@@ -56,6 +56,17 @@ export const getBlockedContent = (options) => {
56
56
  ...options,
57
57
  });
58
58
  };
59
+ /**
60
+ * Streams an archive (zip or tar) of the blobs referenced by a signed archive token.
61
+ * Tokens are produced by the `blobArchive` workflow step.
62
+ */
63
+ export const getBlobArchive = (options) => {
64
+ var _a;
65
+ return ((_a = options.client) !== null && _a !== void 0 ? _a : client).get({
66
+ url: '/v2/consumer/blobs/archive/{encryptedToken}',
67
+ ...options,
68
+ });
69
+ };
59
70
  /**
60
71
  * Refresh a blob's URL and debounce its lifetime.
61
72
  * Returns a fresh presigned URL and resets the 30-day TTL.
@@ -113,6 +124,22 @@ export const invokeBatchOcrSafetyClassificationStepTemplate = (options) => {
113
124
  },
114
125
  });
115
126
  };
127
+ /**
128
+ * Bundles a set of blobs into a single archive (zip or tar) that callers can
129
+ * /// download from a signed streaming URL. Runs in-process in the orchestrator;
130
+ * /// no worker job is dispatched.
131
+ */
132
+ export const invokeBlobArchiveStepTemplate = (options) => {
133
+ var _a;
134
+ return ((_a = options === null || options === void 0 ? void 0 : options.client) !== null && _a !== void 0 ? _a : client).post({
135
+ url: '/v2/consumer/recipes/blobArchive',
136
+ ...options,
137
+ headers: {
138
+ 'Content-Type': 'application/json',
139
+ ...options === null || options === void 0 ? void 0 : options.headers,
140
+ },
141
+ });
142
+ };
116
143
  /**
117
144
  * ChatCompletion
118
145
  *
@@ -382,6 +382,17 @@ export declare const AnylineMergeWith: {
382
382
  readonly MANGA_LINE: 'manga_line';
383
383
  };
384
384
  export type AnylineMergeWith = (typeof AnylineMergeWith)[keyof typeof AnylineMergeWith];
385
+ /**
386
+ * Container format for a Civitai.Orchestration.Grains.Workflows.Steps.BlobArchive.BlobArchiveStep output.
387
+ */
388
+ export declare const ArchiveFormat: {
389
+ readonly ZIP: 'zip';
390
+ readonly TAR: 'tar';
391
+ };
392
+ /**
393
+ * Container format for a Civitai.Orchestration.Grains.Workflows.Steps.BlobArchive.BlobArchiveStep output.
394
+ */
395
+ export type ArchiveFormat = (typeof ArchiveFormat)[keyof typeof ArchiveFormat];
385
396
  /**
386
397
  * An assistant message representing a prior response.
387
398
  */
@@ -453,6 +464,72 @@ export type Blob = {
453
464
  */
454
465
  blockedReason?: null | string;
455
466
  };
467
+ /**
468
+ * A single entry within a Civitai.Orchestration.Grains.Workflows.Steps.BlobArchive.BlobArchiveStep.
469
+ */
470
+ export type BlobArchiveEntry = {
471
+ /**
472
+ * The blob ID to include in the archive.
473
+ */
474
+ blobId: string;
475
+ /**
476
+ * Optional filename to use inside the archive. When omitted, the blob ID is used.
477
+ * Path components and control characters are stripped.
478
+ */
479
+ fileName?: null | string;
480
+ };
481
+ /**
482
+ * Input configuration for the BlobArchive workflow step.
483
+ */
484
+ export type BlobArchiveInput = {
485
+ /**
486
+ * The blobs to include in the archive. Must contain at least 1 and at most 1000 entries.
487
+ */
488
+ entries: Array<BlobArchiveEntry>;
489
+ /**
490
+ * Optional filename for the archive itself (used in the Content-Disposition header).
491
+ * Defaults to "archive.zip" or "archive.tar" based on Civitai.Orchestration.Grains.Workflows.Steps.BlobArchive.BlobArchiveInput.Format.
492
+ */
493
+ archiveName?: null | string;
494
+ format?: ArchiveFormat;
495
+ };
496
+ /**
497
+ * Output produced by the BlobArchive workflow step.
498
+ */
499
+ export type BlobArchiveOutput = {
500
+ /**
501
+ * The signed URL that streams the archive when requested.
502
+ */
503
+ url: string;
504
+ /**
505
+ * The number of entries included in the archive.
506
+ */
507
+ entryCount: number;
508
+ format: ArchiveFormat;
509
+ /**
510
+ * The UTC time at which this URL expires.
511
+ */
512
+ expiresAt: string;
513
+ };
514
+ /**
515
+ * Bundles a set of blobs into a single archive (zip or tar) that callers can
516
+ * download from a signed streaming URL. Runs in-process in the orchestrator;
517
+ * no worker job is dispatched.
518
+ */
519
+ export type BlobArchiveStep = Omit<WorkflowStep, '$type'> & {
520
+ input: BlobArchiveInput;
521
+ output?: BlobArchiveOutput;
522
+ $type: 'blobArchive';
523
+ };
524
+ /**
525
+ * Bundles a set of blobs into a single archive (zip or tar) that callers can
526
+ * download from a signed streaming URL. Runs in-process in the orchestrator;
527
+ * no worker job is dispatched.
528
+ */
529
+ export type BlobArchiveStepTemplate = Omit<WorkflowStepTemplate, '$type'> & {
530
+ input: BlobArchiveInput;
531
+ $type: 'blobArchive';
532
+ };
456
533
  /**
457
534
  * A rectangular region defined by pixel coordinates.
458
535
  */
@@ -779,6 +856,7 @@ export type ComfyErnieStandardImageGenInput = Omit<
779
856
  loras?: {
780
857
  [key: string]: number;
781
858
  };
859
+ diffusionModel?: null | string;
782
860
  model: 'ernie';
783
861
  ecosystem: 'ernie';
784
862
  engine: 'comfy';
@@ -810,6 +888,7 @@ export type ComfyErnieTurboImageGenInput = Omit<
810
888
  loras?: {
811
889
  [key: string]: number;
812
890
  };
891
+ diffusionModel?: null | string;
813
892
  model: 'turbo';
814
893
  ecosystem: 'ernie';
815
894
  engine: 'comfy';
@@ -2057,6 +2136,82 @@ export type HaiperVideoGenOutput = VideoGenOutput & {
2057
2136
  externalTOSViolation?: null | boolean;
2058
2137
  message?: null | string;
2059
2138
  };
2139
+ /**
2140
+ * Animate a single source image (used as the first frame) into a video.
2141
+ */
2142
+ export type HappyHorseV1ImageToVideoInput = Omit<
2143
+ HappyHorseV1VideoGenInput,
2144
+ 'engine' | 'version' | 'operation'
2145
+ > & {
2146
+ /**
2147
+ * Either A URL, A DataURL or a Base64 string
2148
+ */
2149
+ image: string;
2150
+ operation: 'imageToVideo';
2151
+ version: 'v1.0';
2152
+ engine: 'happyHorse';
2153
+ };
2154
+ /**
2155
+ * Generate a video using 1–9 reference images for subject consistency.
2156
+ * Reference subjects via "character1" through "character9" placeholders in the prompt.
2157
+ */
2158
+ export type HappyHorseV1ReferenceToVideoInput = Omit<
2159
+ HappyHorseV1VideoGenInput,
2160
+ 'engine' | 'version' | 'operation'
2161
+ > & {
2162
+ images: Array<string>;
2163
+ aspectRatio?: '16:9' | '9:16' | '1:1' | '4:3' | '3:4';
2164
+ operation: 'referenceToVideo';
2165
+ version: 'v1.0';
2166
+ engine: 'happyHorse';
2167
+ };
2168
+ /**
2169
+ * Generate a video from a text prompt only.
2170
+ */
2171
+ export type HappyHorseV1TextToVideoInput = Omit<
2172
+ HappyHorseV1VideoGenInput,
2173
+ 'engine' | 'version' | 'operation'
2174
+ > & {
2175
+ aspectRatio?: '16:9' | '9:16' | '1:1' | '4:3' | '3:4';
2176
+ operation: 'textToVideo';
2177
+ version: 'v1.0';
2178
+ engine: 'happyHorse';
2179
+ };
2180
+ /**
2181
+ * Edit an existing video, optionally guided by reference images.
2182
+ * FAL bills both the input and output seconds — the per-second rate is doubled.
2183
+ */
2184
+ export type HappyHorseV1VideoEditInput = Omit<
2185
+ HappyHorseV1VideoGenInput,
2186
+ 'engine' | 'version' | 'operation'
2187
+ > & {
2188
+ sourceVideo: string;
2189
+ referenceImages?: Array<string>;
2190
+ audioSetting?: 'auto' | 'origin';
2191
+ operation: 'videoEdit';
2192
+ version: 'v1.0';
2193
+ engine: 'happyHorse';
2194
+ };
2195
+ /**
2196
+ * Version-level base for Happy-Horse v1.0. Carries common v1.0 parameters and the operation discriminator.
2197
+ */
2198
+ export type HappyHorseV1VideoGenInput = Omit<HappyHorseVideoGenInput, 'engine' | 'version'> & {
2199
+ operation: string;
2200
+ resolution?: '720p' | '1080p';
2201
+ duration?: number;
2202
+ seed?: null | number;
2203
+ enableSafetyChecker?: boolean;
2204
+ version: 'v1.0';
2205
+ engine: 'happyHorse';
2206
+ };
2207
+ /**
2208
+ * Engine-level base for Alibaba Happy-Horse video generation (FAL).
2209
+ * The version derived type carries the operation-level discriminator.
2210
+ */
2211
+ export type HappyHorseVideoGenInput = Omit<VideoGenInput, 'engine'> & {
2212
+ version: string;
2213
+ engine: 'happyHorse';
2214
+ };
2060
2215
  export declare const HumanoidImageMaskCategory: {
2061
2216
  readonly DRESSES: 'dresses';
2062
2217
  readonly UPPER_BODY: 'upperBody';
@@ -2907,7 +3062,7 @@ export type ModelHashOutput = {
2907
3062
  /**
2908
3063
  * SHA256 hash of the full file.
2909
3064
  */
2910
- shA256?: null | string;
3065
+ sha256?: null | string;
2911
3066
  /**
2912
3067
  * AutoV1 short hash (8 chars of SHA256 over a 64 KB block starting at 1 MB).
2913
3068
  */
@@ -2927,7 +3082,7 @@ export type ModelHashOutput = {
2927
3082
  /**
2928
3083
  * CRC32 of the full file.
2929
3084
  */
2930
- crC32?: null | string;
3085
+ crc32?: null | string;
2931
3086
  };
2932
3087
  /**
2933
3088
  * ModelHash
@@ -3217,6 +3372,49 @@ export type OpenAiGpt1ImageGenInput = Omit<OpenApiImageGenInput, 'engine' | 'mod
3217
3372
  model: 'gpt-image-1';
3218
3373
  engine: 'openai';
3219
3374
  };
3375
+ export type OpenAiGpt2CreateImageInput = Omit<
3376
+ OpenAiGpt2ImageGenInput,
3377
+ 'engine' | 'model' | 'operation'
3378
+ > & {
3379
+ operation: 'createImage';
3380
+ model: 'gpt-image-2';
3381
+ engine: 'openai';
3382
+ };
3383
+ export type OpenAiGpt2EditImageInput = Omit<
3384
+ OpenAiGpt2ImageGenInput,
3385
+ 'engine' | 'model' | 'operation'
3386
+ > & {
3387
+ images: Array<string>;
3388
+ /**
3389
+ * Either A URL, A DataURL or a Base64 string
3390
+ */
3391
+ maskImage?: null | string;
3392
+ /**
3393
+ * When null, fal infers output size from the input images (image_size: "auto").
3394
+ * When set, the requested width is sent to fal as image_size.width.
3395
+ * Both Width and Height must be set together, or both null.
3396
+ */
3397
+ width?: null | number;
3398
+ /**
3399
+ * When null, fal infers output size from the input images (image_size: "auto").
3400
+ * When set, the requested height is sent to fal as image_size.height.
3401
+ * Both Width and Height must be set together, or both null.
3402
+ */
3403
+ height?: null | number;
3404
+ operation: 'editImage';
3405
+ model: 'gpt-image-2';
3406
+ engine: 'openai';
3407
+ };
3408
+ export type OpenAiGpt2ImageGenInput = Omit<OpenApiImageGenInput, 'engine' | 'model'> & {
3409
+ operation: string;
3410
+ prompt: string;
3411
+ width?: null | number;
3412
+ height?: null | number;
3413
+ quantity?: number;
3414
+ quality?: 'low' | 'medium' | 'high';
3415
+ model: 'gpt-image-2';
3416
+ engine: 'openai';
3417
+ };
3220
3418
  export type OpenApiImageGenInput = Omit<ImageGenInput, 'engine'> & {
3221
3419
  model: string;
3222
3420
  prompt: string;
@@ -6909,6 +7107,14 @@ export type GetBlobData = {
6909
7107
  * A maximum nsfw level. If this is specified and the blob does not have a NSFW level specified or the NSFW level exceeds our max then we'll return an error
6910
7108
  */
6911
7109
  nsfwLevel?: NsfwLevel;
7110
+ /**
7111
+ * Optional filename to use in the Content-Disposition header when the blob is served. Path components and control characters are stripped.
7112
+ */
7113
+ filename?: string;
7114
+ /**
7115
+ * When true, the blob is served with Content-Disposition: attachment to prompt a download. Defaults to inline.
7116
+ */
7117
+ download?: boolean;
6912
7118
  };
6913
7119
  url: '/v2/consumer/blobs/{blobId}';
6914
7120
  };
@@ -7073,6 +7279,34 @@ export type GetBlockedContentErrors = {
7073
7279
  403: ProblemDetails;
7074
7280
  };
7075
7281
  export type GetBlockedContentError = GetBlockedContentErrors[keyof GetBlockedContentErrors];
7282
+ export type GetBlobArchiveData = {
7283
+ body?: never;
7284
+ path: {
7285
+ /**
7286
+ * The signed token containing the archive manifest.
7287
+ */
7288
+ encryptedToken: string;
7289
+ };
7290
+ query?: never;
7291
+ url: '/v2/consumer/blobs/archive/{encryptedToken}';
7292
+ };
7293
+ export type GetBlobArchiveErrors = {
7294
+ /**
7295
+ * Unauthorized
7296
+ */
7297
+ 401: ProblemDetails;
7298
+ /**
7299
+ * Gone
7300
+ */
7301
+ 410: ProblemDetails;
7302
+ };
7303
+ export type GetBlobArchiveError = GetBlobArchiveErrors[keyof GetBlobArchiveErrors];
7304
+ export type GetBlobArchiveResponses = {
7305
+ /**
7306
+ * OK
7307
+ */
7308
+ 200: unknown;
7309
+ };
7076
7310
  export type RefreshBlobData = {
7077
7311
  body?: never;
7078
7312
  path: {
@@ -7219,6 +7453,36 @@ export type InvokeBatchOcrSafetyClassificationStepTemplateResponses = {
7219
7453
  };
7220
7454
  export type InvokeBatchOcrSafetyClassificationStepTemplateResponse =
7221
7455
  InvokeBatchOcrSafetyClassificationStepTemplateResponses[keyof InvokeBatchOcrSafetyClassificationStepTemplateResponses];
7456
+ export type InvokeBlobArchiveStepTemplateData = {
7457
+ body?: BlobArchiveInput;
7458
+ path?: never;
7459
+ query?: {
7460
+ experimental?: boolean;
7461
+ allowMatureContent?: boolean;
7462
+ whatif?: boolean;
7463
+ };
7464
+ url: '/v2/consumer/recipes/blobArchive';
7465
+ };
7466
+ export type InvokeBlobArchiveStepTemplateErrors = {
7467
+ /**
7468
+ * Bad Request
7469
+ */
7470
+ 400: ProblemDetails;
7471
+ /**
7472
+ * Unauthorized
7473
+ */
7474
+ 401: ProblemDetails;
7475
+ };
7476
+ export type InvokeBlobArchiveStepTemplateError =
7477
+ InvokeBlobArchiveStepTemplateErrors[keyof InvokeBlobArchiveStepTemplateErrors];
7478
+ export type InvokeBlobArchiveStepTemplateResponses = {
7479
+ /**
7480
+ * OK
7481
+ */
7482
+ 200: BlobArchiveOutput;
7483
+ };
7484
+ export type InvokeBlobArchiveStepTemplateResponse =
7485
+ InvokeBlobArchiveStepTemplateResponses[keyof InvokeBlobArchiveStepTemplateResponses];
7222
7486
  export type InvokeChatCompletionStepTemplateData = {
7223
7487
  body?: ChatCompletionInput;
7224
7488
  path?: never;
@@ -16,6 +16,10 @@ export const AnylineMergeWith = {
16
16
  LINEART_ANIME: 'lineart_anime',
17
17
  MANGA_LINE: 'manga_line',
18
18
  };
19
+ /**
20
+ * Container format for a Civitai.Orchestration.Grains.Workflows.Steps.BlobArchive.BlobArchiveStep output.
21
+ */
22
+ export const ArchiveFormat = { ZIP: 'zip', TAR: 'tar' };
19
23
  /**
20
24
  * Determines how regions are applied to the blur operation.
21
25
  */
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@civitai/client",
3
- "version": "0.2.0-beta.53",
3
+ "version": "0.2.0-beta.55",
4
4
  "description": "Civitai's javascript client for generating ai content",
5
5
  "main": "dist/index.js",
6
6
  "types": "dist/index.d.ts",