fal-endpoint-types 1.1.0 → 1.2.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json
CHANGED
package/readme.md
CHANGED
|
@@ -1,4 +1,8 @@
|
|
|
1
|
-
|
|
1
|
+
Fal.ai model endpoints Input and Output ts shapes.
|
|
2
|
+
Built from the Open API schemas provided by fal.ai.
|
|
3
|
+
|
|
4
|
+
- https://fal.ai/api/models
|
|
5
|
+
- https://fal.ai/api/openapi/queue/openapi.json?endpoint_id={endpointId}
|
|
2
6
|
|
|
3
7
|
Usage:
|
|
4
8
|
|
|
@@ -11,46 +15,39 @@ const output: fal.EndpointOutput<'fal-ai/hyper3d/rodin'> = {
|
|
|
11
15
|
};
|
|
12
16
|
```
|
|
13
17
|
|
|
14
|
-
|
|
15
|
-
const getMyMappedEndpointInput = (endpointId: Endpoint, userInputs: any): EndpointInput<typeof endpointId> => {
|
|
16
|
-
// ...
|
|
17
|
-
// Narrow down the endpoint id
|
|
18
|
-
// to infer the type of the returned value
|
|
19
|
-
switch (endpointId) {
|
|
20
|
-
case 'fal-ai/minimax/hailuo-02/standard/text-to-video':
|
|
21
|
-
return {
|
|
22
|
-
prompt: userInputs.prompt, // type safe, will be infered by the narrowed endpoint id
|
|
23
|
-
};
|
|
24
|
-
default:
|
|
25
|
-
throw new Error(`Unsupported endpoint: ${endpointId}`);
|
|
26
|
-
}
|
|
27
|
-
}
|
|
28
|
-
|
|
29
|
-
const endpointId = req.endpointId; // 'fal-ai/minimax/hailuo-02/standard/text-to-video'
|
|
30
|
-
const input = getMyMappedEndpointInput(endpointId, req.userInputs);
|
|
18
|
+
Usage example:
|
|
31
19
|
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
20
|
+
```ts
|
|
21
|
+
// ...
|
|
22
|
+
// Narrow down the endpoint id,
|
|
23
|
+
// to infer the type of the returned value,
|
|
24
|
+
// this example uses an index signature.
|
|
25
|
+
// The input type is infered in the function.
|
|
26
|
+
const FalInputs: {
|
|
27
|
+
[x: number | string | symbol]: any;
|
|
28
|
+
} & {
|
|
29
|
+
[K in fal.Endpoint]?: (userInputs: Record<string, any>) => fal.EndpointInput<K>;
|
|
30
|
+
} = {
|
|
31
|
+
'fal-ai/flux-kontext/dev': (userInputs) => ({
|
|
32
|
+
// typed FluxKontextDevInput
|
|
33
|
+
prompt: typeof userInputs['prompt'] === 'string' ? userInputs['prompt'] : '',
|
|
34
|
+
image_url: typeof userInputs['image_url'] === 'string' ? userInputs['image_url'] : '',
|
|
39
35
|
}),
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
const result = await fetch('https://fal.ai/api/v1/endpoints/{endpointId}/requests/{res.requestId}', {
|
|
43
|
-
method: 'GET',
|
|
44
|
-
}).then(res => res.json())
|
|
45
|
-
const output = data.data as unknown as EndpointOutput<typeof endpointId>;
|
|
46
|
-
|
|
47
|
-
// --------------------------
|
|
48
|
-
// OR
|
|
49
|
-
|
|
50
|
-
const res = await fal.queue.submit(endpointId, {input});
|
|
51
|
-
|
|
52
|
-
// import('@fal/client').Result<EndpointOutput<typeof endpointId>>
|
|
53
|
-
const result = await fal.queue.result(res.requestId);
|
|
36
|
+
// '...etc...': (userInputs) => ({}), // more endpoints you want to support
|
|
37
|
+
};
|
|
54
38
|
|
|
55
|
-
const
|
|
39
|
+
const endpointIdString = String('fal-ai/flux-kontext/dev');
|
|
40
|
+
// any | undefined (best instead of all possible inputs)
|
|
41
|
+
const falInputAny = FalInputs[endpointIdString]?.({ prompt: 'Hello, world!' });
|
|
42
|
+
// FluxKreaTrainerInput | BriaVideoBackgroundRemovalInput
|
|
43
|
+
// | ... 100 more ... | undefined (expensive option, no benefits, not safe)
|
|
44
|
+
const falInputAll =
|
|
45
|
+
isFalEndpoint(endpointIdString) ?
|
|
46
|
+
FalInputs[endpointIdString]?.({ prompt: 'Hello, world!' })
|
|
47
|
+
: undefined;
|
|
48
|
+
|
|
49
|
+
//... Normal inference when the endpoint id is known.
|
|
50
|
+
const endpointId = 'fal-ai/flux-kontext/dev';
|
|
51
|
+
// FluxKontextDevInput | undefined
|
|
52
|
+
const falInput = FalInputs[endpointId]?.({ prompt: 'Hello, world!' });
|
|
56
53
|
```
|
|
@@ -139822,3 +139822,5 @@ export interface AnimatediffSparsectrlLcmOutput {
|
|
|
139822
139822
|
url: string;
|
|
139823
139823
|
};
|
|
139824
139824
|
}
|
|
139825
|
+
|
|
139826
|
+
export {};
|
package/types/fal/index.d.ts
CHANGED
|
@@ -1,59 +1,11 @@
|
|
|
1
|
+
/// <reference types="./endpoints/index.d.ts" />
|
|
2
|
+
|
|
1
3
|
declare global {
|
|
2
4
|
export namespace fal {
|
|
3
|
-
export type Endpoint = keyof fal.endpoints.Endpoints;
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
* @example
|
|
7
|
-
* ```ts
|
|
8
|
-
*
|
|
9
|
-
* const getMyMappedEndpointInput = (endpointId: Endpoint, userInputs: any): EndpointInput<typeof endpointId> => {
|
|
10
|
-
* // ...
|
|
11
|
-
* // Narrow down the endpoint id
|
|
12
|
-
* // to infer the type of the returned value
|
|
13
|
-
* switch (endpointId) {
|
|
14
|
-
* case 'fal-ai/minimax/hailuo-02/standard/text-to-video':
|
|
15
|
-
* return {
|
|
16
|
-
* prompt: userInputs.prompt, // type safe, will be infered by the narrowed endpoint id
|
|
17
|
-
* };
|
|
18
|
-
* default:
|
|
19
|
-
* throw new Error(`Unsupported endpoint: ${endpointId}`);
|
|
20
|
-
* }
|
|
21
|
-
* }
|
|
22
|
-
*
|
|
23
|
-
* const endpointId = req.endpointId; // 'fal-ai/minimax/hailuo-02/standard/text-to-video'
|
|
24
|
-
* const input = getMyMappedEndpointInput(endpointId, req.userInputs);
|
|
25
|
-
*
|
|
26
|
-
* // --------------------------
|
|
27
|
-
*
|
|
28
|
-
* const res = await fetch('https://fal.ai/api/v1/endpoints/{endpointId}', {
|
|
29
|
-
* method: 'POST',
|
|
30
|
-
* body: JSON.stringify({
|
|
31
|
-
* input: {
|
|
32
|
-
* ...input,
|
|
33
|
-
* }),
|
|
34
|
-
* }).then(res => res.json());
|
|
35
|
-
* // check the api urls in the docs, this is just an example.
|
|
36
|
-
* const result = await fetch('https://fal.ai/api/v1/endpoints/{endpointId}/requests/{res.requestId}', {
|
|
37
|
-
* method: 'GET',
|
|
38
|
-
* }).then(res => res.json())
|
|
39
|
-
* const output = data.data as unknown as EndpointOutput<typeof endpointId>;
|
|
40
|
-
*
|
|
41
|
-
* // --------------------------
|
|
42
|
-
* // OR
|
|
43
|
-
*
|
|
44
|
-
* const res = await fal.queue.submit(endpointId, {input});
|
|
45
|
-
*
|
|
46
|
-
* // import('@fal/client').Result<EndpointOutput<typeof endpointId>>
|
|
47
|
-
* const result = await fal.queue.result(res.requestId);
|
|
48
|
-
*
|
|
49
|
-
* const output = result.data as unknown as EndpointOutput<typeof endpointId>;
|
|
50
|
-
* ```
|
|
51
|
-
*
|
|
52
|
-
*/
|
|
53
|
-
export type EndpointInput<T extends Endpoint> = fal.endpoints.Endpoints[T]['input'];
|
|
54
|
-
export type EndpointOutput<T extends Endpoint> = fal.endpoints.Endpoints[T]['output'];
|
|
5
|
+
export type Endpoint = keyof fal.endpoints.Endpoints & string;
|
|
6
|
+
export type EndpointInput<T extends fal.Endpoint> = fal.endpoints.Endpoints[T]['input'];
|
|
7
|
+
export type EndpointOutput<T extends fal.Endpoint> = fal.endpoints.Endpoints[T]['output'];
|
|
55
8
|
}
|
|
56
|
-
export namespace fal.endpoints {}
|
|
57
9
|
}
|
|
58
10
|
|
|
59
11
|
export {};
|
|
@@ -1,20 +0,0 @@
|
|
|
1
|
-
import type * as falEndpoints from './schema.js';
|
|
2
|
-
|
|
3
|
-
declare global {
|
|
4
|
-
export namespace fal {}
|
|
5
|
-
export namespace fal.endpoints {
|
|
6
|
-
export interface Endpoints {
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
'fal-ai/hyper3d/rodin': {
|
|
11
|
-
input: falEndpoints.Hyper3dRodinInput;
|
|
12
|
-
output: falEndpoints.Hyper3dRodinOutput;
|
|
13
|
-
};
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
}
|
|
17
|
-
}
|
|
18
|
-
}
|
|
19
|
-
|
|
20
|
-
export {};
|
|
@@ -1,177 +0,0 @@
|
|
|
1
|
-
export interface Hyper3dRodinInput {
|
|
2
|
-
/**
|
|
3
|
-
* Prompt
|
|
4
|
-
* @description A textual prompt to guide model generation. Required for Text-to-3D mode. Optional for Image-to-3D mode.
|
|
5
|
-
* @default
|
|
6
|
-
* @example A futuristic robot with sleek metallic design.
|
|
7
|
-
*/
|
|
8
|
-
prompt?: string;
|
|
9
|
-
/**
|
|
10
|
-
* Condition Mode
|
|
11
|
-
* @description For fuse mode, One or more images are required.It will generate a model by extracting and fusing features of objects from multiple images.For concat mode, need to upload multiple multi-view images of the same object and generate the model. (You can upload multi-view images in any order, regardless of the order of view.)
|
|
12
|
-
* @default concat
|
|
13
|
-
* @enum {string}
|
|
14
|
-
*/
|
|
15
|
-
condition_mode?: 'fuse' | 'concat';
|
|
16
|
-
/**
|
|
17
|
-
* Bbox Condition
|
|
18
|
-
* @description An array that specifies the dimensions and scaling factor of the bounding box. Typically, this array contains 3 elements, Length(X-axis), Width(Y-axis) and Height(Z-axis).
|
|
19
|
-
* @example [
|
|
20
|
-
* 100,
|
|
21
|
-
* 50,
|
|
22
|
-
* 150
|
|
23
|
-
* ]
|
|
24
|
-
*/
|
|
25
|
-
bbox_condition?: number[];
|
|
26
|
-
/**
|
|
27
|
-
* Tier
|
|
28
|
-
* @description Tier of generation. For Rodin Sketch, set to Sketch. For Rodin Regular, set to Regular.
|
|
29
|
-
* @default Regular
|
|
30
|
-
* @enum {string}
|
|
31
|
-
*/
|
|
32
|
-
tier?: 'Regular' | 'Sketch';
|
|
33
|
-
/**
|
|
34
|
-
* Quality
|
|
35
|
-
* @description Generation quality. Possible values: high, medium, low, extra-low. Default is medium.
|
|
36
|
-
* @default medium
|
|
37
|
-
* @enum {string}
|
|
38
|
-
*/
|
|
39
|
-
quality?: 'high' | 'medium' | 'low' | 'extra-low';
|
|
40
|
-
/**
|
|
41
|
-
* T/A Pose
|
|
42
|
-
* @description When generating the human-like model, this parameter control the generation result to T/A Pose.
|
|
43
|
-
* @default false
|
|
44
|
-
*/
|
|
45
|
-
TAPose?: boolean;
|
|
46
|
-
/**
|
|
47
|
-
* Input Image Urls
|
|
48
|
-
* @description URL of images to use while generating the 3D model. Required for Image-to-3D mode. Optional for Text-to-3D mode.
|
|
49
|
-
* @example https://storage.googleapis.com/falserverless/model_tests/video_models/robot.png
|
|
50
|
-
*/
|
|
51
|
-
input_image_urls?: string[];
|
|
52
|
-
/**
|
|
53
|
-
* Geometry File Format
|
|
54
|
-
* @description Format of the geometry file. Possible values: glb, usdz, fbx, obj, stl. Default is glb.
|
|
55
|
-
* @default glb
|
|
56
|
-
* @enum {string}
|
|
57
|
-
*/
|
|
58
|
-
geometry_file_format?: 'glb' | 'usdz' | 'fbx' | 'obj' | 'stl';
|
|
59
|
-
/**
|
|
60
|
-
* Use Hyper
|
|
61
|
-
* @description Whether to export the model using hyper mode. Default is false.
|
|
62
|
-
* @default false
|
|
63
|
-
*/
|
|
64
|
-
use_hyper?: boolean;
|
|
65
|
-
/**
|
|
66
|
-
* Addons
|
|
67
|
-
* @description Generation add-on features. Default is []. Possible values are HighPack. The HighPack option will provide 4K resolution textures instead of the default 1K, as well as models with high-poly. It will cost triple the billable units.
|
|
68
|
-
* @enum {string}
|
|
69
|
-
*/
|
|
70
|
-
addons?: 'HighPack';
|
|
71
|
-
/**
|
|
72
|
-
* Seed
|
|
73
|
-
* @description Seed value for randomization, ranging from 0 to 65535. Optional.
|
|
74
|
-
*/
|
|
75
|
-
seed?: number;
|
|
76
|
-
/**
|
|
77
|
-
* Material
|
|
78
|
-
* @description Material type. Possible values: PBR, Shaded. Default is PBR.
|
|
79
|
-
* @default PBR
|
|
80
|
-
* @example Shaded
|
|
81
|
-
* @enum {string}
|
|
82
|
-
*/
|
|
83
|
-
material?: 'PBR' | 'Shaded';
|
|
84
|
-
}
|
|
85
|
-
|
|
86
|
-
export interface Hyper3dRodinOutput {
|
|
87
|
-
/**
|
|
88
|
-
* Model Mesh
|
|
89
|
-
* @description Generated 3D object file.
|
|
90
|
-
* @example {
|
|
91
|
-
* "url": "https://v3.fal.media/files/koala/VlX4JqNI8F9HO2ETp_B7t_base_basic_pbr.glb"
|
|
92
|
-
* }
|
|
93
|
-
*/
|
|
94
|
-
model_mesh: {
|
|
95
|
-
/**
|
|
96
|
-
* File Size
|
|
97
|
-
* @description The size of the file in bytes.
|
|
98
|
-
* @example 4404019
|
|
99
|
-
*/
|
|
100
|
-
file_size?: number;
|
|
101
|
-
/**
|
|
102
|
-
* File Name
|
|
103
|
-
* @description The name of the file. It will be auto-generated if not provided.
|
|
104
|
-
* @example z9RV14K95DvU.png
|
|
105
|
-
*/
|
|
106
|
-
file_name?: string;
|
|
107
|
-
/**
|
|
108
|
-
* Content Type
|
|
109
|
-
* @description The mime type of the file.
|
|
110
|
-
* @example image/png
|
|
111
|
-
*/
|
|
112
|
-
content_type?: string;
|
|
113
|
-
/**
|
|
114
|
-
* Url
|
|
115
|
-
* @description The URL where the file can be downloaded from.
|
|
116
|
-
*/
|
|
117
|
-
url: string;
|
|
118
|
-
/**
|
|
119
|
-
* File Data
|
|
120
|
-
* Format: binary
|
|
121
|
-
* @description File data
|
|
122
|
-
*/
|
|
123
|
-
file_data?: string;
|
|
124
|
-
};
|
|
125
|
-
/**
|
|
126
|
-
* Seed
|
|
127
|
-
* @description Seed value used for generation.
|
|
128
|
-
*/
|
|
129
|
-
seed: number;
|
|
130
|
-
/**
|
|
131
|
-
* Textures
|
|
132
|
-
* @description Generated textures for the 3D object.
|
|
133
|
-
*/
|
|
134
|
-
textures: {
|
|
135
|
-
/**
|
|
136
|
-
* Height
|
|
137
|
-
* @description The height of the image in pixels.
|
|
138
|
-
* @example 1024
|
|
139
|
-
*/
|
|
140
|
-
height?: number;
|
|
141
|
-
/**
|
|
142
|
-
* File Size
|
|
143
|
-
* @description The size of the file in bytes.
|
|
144
|
-
* @example 4404019
|
|
145
|
-
*/
|
|
146
|
-
file_size?: number;
|
|
147
|
-
/**
|
|
148
|
-
* Url
|
|
149
|
-
* @description The URL where the file can be downloaded from.
|
|
150
|
-
*/
|
|
151
|
-
url: string;
|
|
152
|
-
/**
|
|
153
|
-
* Width
|
|
154
|
-
* @description The width of the image in pixels.
|
|
155
|
-
* @example 1024
|
|
156
|
-
*/
|
|
157
|
-
width?: number;
|
|
158
|
-
/**
|
|
159
|
-
* File Name
|
|
160
|
-
* @description The name of the file. It will be auto-generated if not provided.
|
|
161
|
-
* @example z9RV14K95DvU.png
|
|
162
|
-
*/
|
|
163
|
-
file_name?: string;
|
|
164
|
-
/**
|
|
165
|
-
* Content Type
|
|
166
|
-
* @description The mime type of the file.
|
|
167
|
-
* @example image/png
|
|
168
|
-
*/
|
|
169
|
-
content_type?: string;
|
|
170
|
-
/**
|
|
171
|
-
* File Data
|
|
172
|
-
* Format: binary
|
|
173
|
-
* @description File data
|
|
174
|
-
*/
|
|
175
|
-
file_data?: string;
|
|
176
|
-
}[];
|
|
177
|
-
}
|