@n8n/n8n-nodes-langchain 2.18.3 → 2.18.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/node-definitions/.nodes-hash +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/index.ts +3 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v2/resource_image/operation_edit.schema.js +6 -6
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v21/resource_image/operation_edit.schema.js +6 -6
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v21/resource_image/operation_edit.ts +6 -6
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v22/resource_image/operation_edit.schema.js +6 -6
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v22/resource_image/operation_edit.ts +6 -6
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/index.schema.js +30 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/index.ts +27 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_audio/index.schema.js +24 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_audio/index.ts +18 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_audio/operation_generate.schema.js +32 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_audio/operation_generate.ts +60 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_audio/operation_transcribe.schema.js +30 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_audio/operation_transcribe.ts +46 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_audio/operation_translate.schema.js +30 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_audio/operation_translate.ts +43 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_conversation/index.schema.js +26 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_conversation/index.ts +21 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_conversation/operation_create.schema.js +30 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_conversation/operation_create.ts +54 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_conversation/operation_get.schema.js +29 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_conversation/operation_get.ts +31 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_conversation/operation_remove.schema.js +29 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_conversation/operation_remove.ts +31 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_conversation/operation_update.schema.js +30 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_conversation/operation_update.ts +36 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_file/index.schema.js +24 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_file/index.ts +18 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_file/operation_delete_file.schema.js +29 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_file/operation_delete_file.ts +33 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_file/operation_list.schema.js +29 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_file/operation_list.ts +37 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_file/operation_upload.schema.js +30 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_file/operation_upload.ts +43 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_image/index.schema.js +24 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_image/index.ts +18 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_image/operation_analyze.schema.js +35 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_image/operation_analyze.ts +74 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_image/operation_edit.schema.js +39 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_image/operation_edit.ts +117 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_image/operation_generate.schema.js +31 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_image/operation_generate.ts +89 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_text/index.schema.js +22 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_text/index.ts +15 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_text/operation_classify.schema.js +30 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_text/operation_classify.ts +36 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_text/operation_response.schema.js +34 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_text/operation_response.ts +317 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_video/index.schema.js +18 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_video/index.ts +10 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_video/operation_generate.schema.js +33 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_video/operation_generate.ts +67 -0
- package/dist/nodes/vendors/OpenAi/OpenAi.node.js +2 -1
- package/dist/nodes/vendors/OpenAi/OpenAi.node.js.map +1 -1
- package/dist/nodes/vendors/OpenAi/methods/listSearch.js +18 -1
- package/dist/nodes/vendors/OpenAi/methods/listSearch.js.map +1 -1
- package/dist/nodes/vendors/OpenAi/v2/OpenAiV2.node.js +1 -1
- package/dist/nodes/vendors/OpenAi/v2/OpenAiV2.node.js.map +1 -1
- package/dist/nodes/vendors/OpenAi/v2/actions/image/edit.operation.js +261 -10
- package/dist/nodes/vendors/OpenAi/v2/actions/image/edit.operation.js.map +1 -1
- package/dist/typecheck.tsbuildinfo +1 -1
- package/dist/types/nodes.json +2 -2
- package/package.json +9 -9
|
@@ -1 +1 @@
|
|
|
1
|
-
|
|
1
|
+
5721f264ecfa053d6d51df1aac8b5daeb6309677216fba6f4dd304f8eafda40e
|
|
@@ -4,6 +4,7 @@
|
|
|
4
4
|
* Re-exports all version-specific types and provides combined union type.
|
|
5
5
|
*/
|
|
6
6
|
|
|
7
|
+
import type { LcOpenAiV23Node } from './v23';
|
|
7
8
|
import type { LcOpenAiV22Node } from './v22';
|
|
8
9
|
import type { LcOpenAiV21Node } from './v21';
|
|
9
10
|
import type { LcOpenAiV2Node } from './v2';
|
|
@@ -17,6 +18,7 @@ import type { LcOpenAiV12Node } from './v12';
|
|
|
17
18
|
import type { LcOpenAiV11Node } from './v11';
|
|
18
19
|
import type { LcOpenAiV1Node } from './v1';
|
|
19
20
|
|
|
21
|
+
export * from './v23';
|
|
20
22
|
export * from './v22';
|
|
21
23
|
export * from './v21';
|
|
22
24
|
export * from './v2';
|
|
@@ -31,4 +33,4 @@ export * from './v11';
|
|
|
31
33
|
export * from './v1';
|
|
32
34
|
|
|
33
35
|
// Combined union type for all versions
|
|
34
|
-
export type LcOpenAiNode = LcOpenAiV22Node | LcOpenAiV21Node | LcOpenAiV2Node | LcOpenAiV18Node | LcOpenAiV17Node | LcOpenAiV16Node | LcOpenAiV15Node | LcOpenAiV14Node | LcOpenAiV13Node | LcOpenAiV12Node | LcOpenAiV11Node | LcOpenAiV1Node;
|
|
36
|
+
export type LcOpenAiNode = LcOpenAiV23Node | LcOpenAiV22Node | LcOpenAiV21Node | LcOpenAiV2Node | LcOpenAiV18Node | LcOpenAiV17Node | LcOpenAiV16Node | LcOpenAiV15Node | LcOpenAiV14Node | LcOpenAiV13Node | LcOpenAiV12Node | LcOpenAiV11Node | LcOpenAiV1Node;
|
|
@@ -24,14 +24,14 @@ module.exports = function getSchema({ parameters, z, expressionSchema, stringOrE
|
|
|
24
24
|
operation: z.literal('edit'),
|
|
25
25
|
model: z.union([z.literal('dall-e-2'), z.literal('gpt-image-1'), expressionSchema]).optional(),
|
|
26
26
|
prompt: stringOrExpression.optional(),
|
|
27
|
-
images: resolveSchema({ parameters, schema: z.object({ values: z.array(z.object({ binaryPropertyName: stringOrExpression.optional() })).optional() }), required: false, displayOptions: {"show":{"/model":["gpt-image-1"]}} }),
|
|
28
|
-
binaryPropertyName: resolveSchema({ parameters, schema: stringOrExpression, required: false, displayOptions: {"show":{"/model":["dall-e-2"]}} }),
|
|
27
|
+
images: resolveSchema({ parameters, schema: z.object({ values: z.array(z.object({ binaryPropertyName: stringOrExpression.optional() })).optional() }), required: false, displayOptions: {"show":{"/model":["gpt-image-1"],"/modelId":[{"_cnd":{"includes":"gpt-image"}}]}} }),
|
|
28
|
+
binaryPropertyName: resolveSchema({ parameters, schema: stringOrExpression, required: false, displayOptions: {"show":{"/model":["dall-e-2"],"/modelId":[{"_cnd":{"includes":"dall-e"}}]}} }),
|
|
29
29
|
n: numberOrExpression.optional(),
|
|
30
30
|
size: z.union([z.literal('256x256'), z.literal('512x512'), z.literal('1024x1024'), z.literal('1024x1536'), z.literal('1536x1024'), z.literal('auto'), expressionSchema]).optional(),
|
|
31
|
-
quality: resolveSchema({ parameters, schema: z.union([z.literal('auto'), z.literal('high'), z.literal('medium'), z.literal('low'), z.literal('standard'), expressionSchema]), required: false, displayOptions: {"show":{"/model":["gpt-image-1"]}} }),
|
|
32
|
-
responseFormat: resolveSchema({ parameters, schema: z.union([z.literal('url'), z.literal('b64_json'), expressionSchema]), required: false, displayOptions: {"show":{"/model":["dall-e-2"]}} }),
|
|
33
|
-
outputFormat: resolveSchema({ parameters, schema: z.union([z.literal('png'), z.literal('jpeg'), z.literal('webp'), expressionSchema]), required: false, displayOptions: {"show":{"/model":["gpt-image-1"]}} }),
|
|
34
|
-
outputCompression: resolveSchema({ parameters, schema: numberOrExpression, required: false, displayOptions: {"show":{"/model":["gpt-image-1"],"outputFormat":["webp","jpeg"]}}, defaults: {"outputFormat":"png"} }),
|
|
31
|
+
quality: resolveSchema({ parameters, schema: z.union([z.literal('auto'), z.literal('high'), z.literal('medium'), z.literal('low'), z.literal('standard'), expressionSchema]), required: false, displayOptions: {"show":{"/model":["gpt-image-1"],"/modelId":[{"_cnd":{"includes":"gpt-image"}}]}} }),
|
|
32
|
+
responseFormat: resolveSchema({ parameters, schema: z.union([z.literal('url'), z.literal('b64_json'), expressionSchema]), required: false, displayOptions: {"show":{"/model":["dall-e-2"],"/modelId":[{"_cnd":{"includes":"dall-e"}}]}} }),
|
|
33
|
+
outputFormat: resolveSchema({ parameters, schema: z.union([z.literal('png'), z.literal('jpeg'), z.literal('webp'), expressionSchema]), required: false, displayOptions: {"show":{"/model":["gpt-image-1"],"/modelId":[{"_cnd":{"includes":"gpt-image"}}]}} }),
|
|
34
|
+
outputCompression: resolveSchema({ parameters, schema: numberOrExpression, required: false, displayOptions: {"show":{"/model":["gpt-image-1"],"outputFormat":["webp","jpeg"],"/modelId":[{"_cnd":{"includes":"gpt-image"}}]}}, defaults: {"outputFormat":"png"} }),
|
|
35
35
|
options: z.object({ user: stringOrExpression.optional(), background: z.union([z.literal('auto'), z.literal('transparent'), z.literal('opaque'), expressionSchema]).optional(), inputFidelity: z.union([z.literal('low'), z.literal('high'), expressionSchema]).optional(), imageMask: stringOrExpression.optional() }).optional(),
|
|
36
36
|
}).optional(),
|
|
37
37
|
subnodes: subnodesSchema.optional(),
|
|
@@ -24,14 +24,14 @@ module.exports = function getSchema({ parameters, z, expressionSchema, stringOrE
|
|
|
24
24
|
operation: z.literal('edit'),
|
|
25
25
|
model: z.union([z.literal('dall-e-2'), z.literal('gpt-image-1'), expressionSchema]).optional(),
|
|
26
26
|
prompt: stringOrExpression.optional(),
|
|
27
|
-
images: resolveSchema({ parameters, schema: z.object({ values: z.array(z.object({ binaryPropertyName: stringOrExpression.optional() })).optional() }), required: false, displayOptions: {"show":{"/model":["gpt-image-1"]}} }),
|
|
28
|
-
binaryPropertyName: resolveSchema({ parameters, schema: stringOrExpression, required: false, displayOptions: {"show":{"/model":["dall-e-2"]}} }),
|
|
27
|
+
images: resolveSchema({ parameters, schema: z.object({ values: z.array(z.object({ binaryPropertyName: stringOrExpression.optional() })).optional() }), required: false, displayOptions: {"show":{"/model":["gpt-image-1"],"/modelId":[{"_cnd":{"includes":"gpt-image"}}]}} }),
|
|
28
|
+
binaryPropertyName: resolveSchema({ parameters, schema: stringOrExpression, required: false, displayOptions: {"show":{"/model":["dall-e-2"],"/modelId":[{"_cnd":{"includes":"dall-e"}}]}} }),
|
|
29
29
|
n: numberOrExpression.optional(),
|
|
30
30
|
size: z.union([z.literal('256x256'), z.literal('512x512'), z.literal('1024x1024'), z.literal('1024x1536'), z.literal('1536x1024'), z.literal('auto'), expressionSchema]).optional(),
|
|
31
|
-
quality: resolveSchema({ parameters, schema: z.union([z.literal('auto'), z.literal('high'), z.literal('medium'), z.literal('low'), z.literal('standard'), expressionSchema]), required: false, displayOptions: {"show":{"/model":["gpt-image-1"]}} }),
|
|
32
|
-
responseFormat: resolveSchema({ parameters, schema: z.union([z.literal('url'), z.literal('b64_json'), expressionSchema]), required: false, displayOptions: {"show":{"/model":["dall-e-2"]}} }),
|
|
33
|
-
outputFormat: resolveSchema({ parameters, schema: z.union([z.literal('png'), z.literal('jpeg'), z.literal('webp'), expressionSchema]), required: false, displayOptions: {"show":{"/model":["gpt-image-1"]}} }),
|
|
34
|
-
outputCompression: resolveSchema({ parameters, schema: numberOrExpression, required: false, displayOptions: {"show":{"/model":["gpt-image-1"],"outputFormat":["webp","jpeg"]}}, defaults: {"outputFormat":"png"} }),
|
|
31
|
+
quality: resolveSchema({ parameters, schema: z.union([z.literal('auto'), z.literal('high'), z.literal('medium'), z.literal('low'), z.literal('standard'), expressionSchema]), required: false, displayOptions: {"show":{"/model":["gpt-image-1"],"/modelId":[{"_cnd":{"includes":"gpt-image"}}]}} }),
|
|
32
|
+
responseFormat: resolveSchema({ parameters, schema: z.union([z.literal('url'), z.literal('b64_json'), expressionSchema]), required: false, displayOptions: {"show":{"/model":["dall-e-2"],"/modelId":[{"_cnd":{"includes":"dall-e"}}]}} }),
|
|
33
|
+
outputFormat: resolveSchema({ parameters, schema: z.union([z.literal('png'), z.literal('jpeg'), z.literal('webp'), expressionSchema]), required: false, displayOptions: {"show":{"/model":["gpt-image-1"],"/modelId":[{"_cnd":{"includes":"gpt-image"}}]}} }),
|
|
34
|
+
outputCompression: resolveSchema({ parameters, schema: numberOrExpression, required: false, displayOptions: {"show":{"/model":["gpt-image-1"],"outputFormat":["webp","jpeg"],"/modelId":[{"_cnd":{"includes":"gpt-image"}}]}}, defaults: {"outputFormat":"png"} }),
|
|
35
35
|
options: z.object({ user: stringOrExpression.optional(), background: z.union([z.literal('auto'), z.literal('transparent'), z.literal('opaque'), expressionSchema]).optional(), inputFidelity: z.union([z.literal('low'), z.literal('high'), expressionSchema]).optional(), imageMask: stringOrExpression.optional() }).optional(),
|
|
36
36
|
}).optional(),
|
|
37
37
|
subnodes: subnodesSchema.optional(),
|
package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v21/resource_image/operation_edit.ts
CHANGED
|
@@ -23,7 +23,7 @@ export type LcOpenAiV21ImageEditParams = {
|
|
|
23
23
|
prompt?: string | Expression<string> | PlaceholderValue;
|
|
24
24
|
/**
|
|
25
25
|
* Add one or more binary fields to include images with your prompt. Each image should be a png, webp, or jpg file less than 50MB. You can provide up to 16 images.
|
|
26
|
-
* @displayOptions.show { /model: ["gpt-image-1"] }
|
|
26
|
+
* @displayOptions.show { /model: ["gpt-image-1"], /modelId: [{"_cnd":{"includes":"gpt-image"}}] }
|
|
27
27
|
* @default {"values":[{"binaryPropertyName":"data"}]}
|
|
28
28
|
*/
|
|
29
29
|
images?: {
|
|
@@ -39,7 +39,7 @@ export type LcOpenAiV21ImageEditParams = {
|
|
|
39
39
|
/**
|
|
40
40
|
* Name of the binary property which contains the image. It should be a square png file less than 4MB.
|
|
41
41
|
* @hint The name of the input field containing the binary file data to be processed
|
|
42
|
-
* @displayOptions.show { /model: ["dall-e-2"] }
|
|
42
|
+
* @displayOptions.show { /model: ["dall-e-2"], /modelId: [{"_cnd":{"includes":"dall-e"}}] }
|
|
43
43
|
* @default data
|
|
44
44
|
*/
|
|
45
45
|
binaryPropertyName?: string | Expression<string> | PlaceholderValue;
|
|
@@ -55,25 +55,25 @@ export type LcOpenAiV21ImageEditParams = {
|
|
|
55
55
|
size?: '256x256' | '512x512' | '1024x1024' | '1024x1536' | '1536x1024' | 'auto' | Expression<string>;
|
|
56
56
|
/**
|
|
57
57
|
* The quality of the image that will be generated
|
|
58
|
-
* @displayOptions.show { /model: ["gpt-image-1"] }
|
|
58
|
+
* @displayOptions.show { /model: ["gpt-image-1"], /modelId: [{"_cnd":{"includes":"gpt-image"}}] }
|
|
59
59
|
* @default auto
|
|
60
60
|
*/
|
|
61
61
|
quality?: 'auto' | 'high' | 'medium' | 'low' | 'standard' | Expression<string>;
|
|
62
62
|
/**
|
|
63
63
|
* The format in which the generated images are returned. URLs are only valid for 60 minutes after generation.
|
|
64
|
-
* @displayOptions.show { /model: ["dall-e-2"] }
|
|
64
|
+
* @displayOptions.show { /model: ["dall-e-2"], /modelId: [{"_cnd":{"includes":"dall-e"}}] }
|
|
65
65
|
* @default url
|
|
66
66
|
*/
|
|
67
67
|
responseFormat?: 'url' | 'b64_json' | Expression<string>;
|
|
68
68
|
/**
|
|
69
69
|
* The format in which the generated images are returned. Only supported for gpt-image-1.
|
|
70
|
-
* @displayOptions.show { /model: ["gpt-image-1"] }
|
|
70
|
+
* @displayOptions.show { /model: ["gpt-image-1"], /modelId: [{"_cnd":{"includes":"gpt-image"}}] }
|
|
71
71
|
* @default png
|
|
72
72
|
*/
|
|
73
73
|
outputFormat?: 'png' | 'jpeg' | 'webp' | Expression<string>;
|
|
74
74
|
/**
|
|
75
75
|
* The compression level (0-100%) for the generated images. Only supported for gpt-image-1 with webp or jpeg output formats.
|
|
76
|
-
* @displayOptions.show { /model: ["gpt-image-1"], outputFormat: ["webp", "jpeg"] }
|
|
76
|
+
* @displayOptions.show { /model: ["gpt-image-1"], outputFormat: ["webp", "jpeg"], /modelId: [{"_cnd":{"includes":"gpt-image"}}] }
|
|
77
77
|
* @default 100
|
|
78
78
|
*/
|
|
79
79
|
outputCompression?: number | Expression<number>;
|
|
@@ -24,14 +24,14 @@ module.exports = function getSchema({ parameters, z, expressionSchema, stringOrE
|
|
|
24
24
|
operation: z.literal('edit'),
|
|
25
25
|
model: z.union([z.literal('dall-e-2'), z.literal('gpt-image-1'), expressionSchema]).optional(),
|
|
26
26
|
prompt: stringOrExpression.optional(),
|
|
27
|
-
images: resolveSchema({ parameters, schema: z.object({ values: z.array(z.object({ binaryPropertyName: stringOrExpression.optional() })).optional() }), required: false, displayOptions: {"show":{"/model":["gpt-image-1"]}} }),
|
|
28
|
-
binaryPropertyName: resolveSchema({ parameters, schema: stringOrExpression, required: false, displayOptions: {"show":{"/model":["dall-e-2"]}} }),
|
|
27
|
+
images: resolveSchema({ parameters, schema: z.object({ values: z.array(z.object({ binaryPropertyName: stringOrExpression.optional() })).optional() }), required: false, displayOptions: {"show":{"/model":["gpt-image-1"],"/modelId":[{"_cnd":{"includes":"gpt-image"}}]}} }),
|
|
28
|
+
binaryPropertyName: resolveSchema({ parameters, schema: stringOrExpression, required: false, displayOptions: {"show":{"/model":["dall-e-2"],"/modelId":[{"_cnd":{"includes":"dall-e"}}]}} }),
|
|
29
29
|
n: numberOrExpression.optional(),
|
|
30
30
|
size: z.union([z.literal('256x256'), z.literal('512x512'), z.literal('1024x1024'), z.literal('1024x1536'), z.literal('1536x1024'), z.literal('auto'), expressionSchema]).optional(),
|
|
31
|
-
quality: resolveSchema({ parameters, schema: z.union([z.literal('auto'), z.literal('high'), z.literal('medium'), z.literal('low'), z.literal('standard'), expressionSchema]), required: false, displayOptions: {"show":{"/model":["gpt-image-1"]}} }),
|
|
32
|
-
responseFormat: resolveSchema({ parameters, schema: z.union([z.literal('url'), z.literal('b64_json'), expressionSchema]), required: false, displayOptions: {"show":{"/model":["dall-e-2"]}} }),
|
|
33
|
-
outputFormat: resolveSchema({ parameters, schema: z.union([z.literal('png'), z.literal('jpeg'), z.literal('webp'), expressionSchema]), required: false, displayOptions: {"show":{"/model":["gpt-image-1"]}} }),
|
|
34
|
-
outputCompression: resolveSchema({ parameters, schema: numberOrExpression, required: false, displayOptions: {"show":{"/model":["gpt-image-1"],"outputFormat":["webp","jpeg"]}}, defaults: {"outputFormat":"png"} }),
|
|
31
|
+
quality: resolveSchema({ parameters, schema: z.union([z.literal('auto'), z.literal('high'), z.literal('medium'), z.literal('low'), z.literal('standard'), expressionSchema]), required: false, displayOptions: {"show":{"/model":["gpt-image-1"],"/modelId":[{"_cnd":{"includes":"gpt-image"}}]}} }),
|
|
32
|
+
responseFormat: resolveSchema({ parameters, schema: z.union([z.literal('url'), z.literal('b64_json'), expressionSchema]), required: false, displayOptions: {"show":{"/model":["dall-e-2"],"/modelId":[{"_cnd":{"includes":"dall-e"}}]}} }),
|
|
33
|
+
outputFormat: resolveSchema({ parameters, schema: z.union([z.literal('png'), z.literal('jpeg'), z.literal('webp'), expressionSchema]), required: false, displayOptions: {"show":{"/model":["gpt-image-1"],"/modelId":[{"_cnd":{"includes":"gpt-image"}}]}} }),
|
|
34
|
+
outputCompression: resolveSchema({ parameters, schema: numberOrExpression, required: false, displayOptions: {"show":{"/model":["gpt-image-1"],"outputFormat":["webp","jpeg"],"/modelId":[{"_cnd":{"includes":"gpt-image"}}]}}, defaults: {"outputFormat":"png"} }),
|
|
35
35
|
options: z.object({ user: stringOrExpression.optional(), background: z.union([z.literal('auto'), z.literal('transparent'), z.literal('opaque'), expressionSchema]).optional(), inputFidelity: z.union([z.literal('low'), z.literal('high'), expressionSchema]).optional(), imageMask: stringOrExpression.optional() }).optional(),
|
|
36
36
|
}).optional(),
|
|
37
37
|
subnodes: subnodesSchema.optional(),
|
package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v22/resource_image/operation_edit.ts
CHANGED
|
@@ -23,7 +23,7 @@ export type LcOpenAiV22ImageEditParams = {
|
|
|
23
23
|
prompt?: string | Expression<string> | PlaceholderValue;
|
|
24
24
|
/**
|
|
25
25
|
* Add one or more binary fields to include images with your prompt. Each image should be a png, webp, or jpg file less than 50MB. You can provide up to 16 images.
|
|
26
|
-
* @displayOptions.show { /model: ["gpt-image-1"] }
|
|
26
|
+
* @displayOptions.show { /model: ["gpt-image-1"], /modelId: [{"_cnd":{"includes":"gpt-image"}}] }
|
|
27
27
|
* @default {"values":[{"binaryPropertyName":"data"}]}
|
|
28
28
|
*/
|
|
29
29
|
images?: {
|
|
@@ -39,7 +39,7 @@ export type LcOpenAiV22ImageEditParams = {
|
|
|
39
39
|
/**
|
|
40
40
|
* Name of the binary property which contains the image. It should be a square png file less than 4MB.
|
|
41
41
|
* @hint The name of the input field containing the binary file data to be processed
|
|
42
|
-
* @displayOptions.show { /model: ["dall-e-2"] }
|
|
42
|
+
* @displayOptions.show { /model: ["dall-e-2"], /modelId: [{"_cnd":{"includes":"dall-e"}}] }
|
|
43
43
|
* @default data
|
|
44
44
|
*/
|
|
45
45
|
binaryPropertyName?: string | Expression<string> | PlaceholderValue;
|
|
@@ -55,25 +55,25 @@ export type LcOpenAiV22ImageEditParams = {
|
|
|
55
55
|
size?: '256x256' | '512x512' | '1024x1024' | '1024x1536' | '1536x1024' | 'auto' | Expression<string>;
|
|
56
56
|
/**
|
|
57
57
|
* The quality of the image that will be generated
|
|
58
|
-
* @displayOptions.show { /model: ["gpt-image-1"] }
|
|
58
|
+
* @displayOptions.show { /model: ["gpt-image-1"], /modelId: [{"_cnd":{"includes":"gpt-image"}}] }
|
|
59
59
|
* @default auto
|
|
60
60
|
*/
|
|
61
61
|
quality?: 'auto' | 'high' | 'medium' | 'low' | 'standard' | Expression<string>;
|
|
62
62
|
/**
|
|
63
63
|
* The format in which the generated images are returned. URLs are only valid for 60 minutes after generation.
|
|
64
|
-
* @displayOptions.show { /model: ["dall-e-2"] }
|
|
64
|
+
* @displayOptions.show { /model: ["dall-e-2"], /modelId: [{"_cnd":{"includes":"dall-e"}}] }
|
|
65
65
|
* @default url
|
|
66
66
|
*/
|
|
67
67
|
responseFormat?: 'url' | 'b64_json' | Expression<string>;
|
|
68
68
|
/**
|
|
69
69
|
* The format in which the generated images are returned. Only supported for gpt-image-1.
|
|
70
|
-
* @displayOptions.show { /model: ["gpt-image-1"] }
|
|
70
|
+
* @displayOptions.show { /model: ["gpt-image-1"], /modelId: [{"_cnd":{"includes":"gpt-image"}}] }
|
|
71
71
|
* @default png
|
|
72
72
|
*/
|
|
73
73
|
outputFormat?: 'png' | 'jpeg' | 'webp' | Expression<string>;
|
|
74
74
|
/**
|
|
75
75
|
* The compression level (0-100%) for the generated images. Only supported for gpt-image-1 with webp or jpeg output formats.
|
|
76
|
-
* @displayOptions.show { /model: ["gpt-image-1"], outputFormat: ["webp", "jpeg"] }
|
|
76
|
+
* @displayOptions.show { /model: ["gpt-image-1"], outputFormat: ["webp", "jpeg"], /modelId: [{"_cnd":{"includes":"gpt-image"}}] }
|
|
77
77
|
* @default 100
|
|
78
78
|
*/
|
|
79
79
|
outputCompression?: number | Expression<number>;
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* OpenAI Node - Version 2.3 - Zod Schema Factory
|
|
3
|
+
* Exports a factory that unions all discriminator schemas.
|
|
4
|
+
*
|
|
5
|
+
* Schema helpers (z, expressionSchema, etc.) are passed as parameters
|
|
6
|
+
* by the schema-validator, not imported from external files.
|
|
7
|
+
*
|
|
8
|
+
* @generated - CommonJS JavaScript for runtime loading
|
|
9
|
+
*/
|
|
10
|
+
|
|
11
|
+
const getTextSchema = require('./resource_text/index.schema');
|
|
12
|
+
const getImageSchema = require('./resource_image/index.schema');
|
|
13
|
+
const getAudioSchema = require('./resource_audio/index.schema');
|
|
14
|
+
const getFileSchema = require('./resource_file/index.schema');
|
|
15
|
+
const getConversationSchema = require('./resource_conversation/index.schema');
|
|
16
|
+
const getVideoSchema = require('./resource_video/index.schema');
|
|
17
|
+
|
|
18
|
+
module.exports = function getSchema(helpers) {
|
|
19
|
+
const { parameters, z } = helpers;
|
|
20
|
+
// Apply discriminator default if not set
|
|
21
|
+
const effectiveParams = parameters.resource === undefined ? { ...parameters, resource: 'text' } : parameters;
|
|
22
|
+
return z.union([
|
|
23
|
+
getTextSchema({ ...helpers, parameters: effectiveParams }),
|
|
24
|
+
getImageSchema({ ...helpers, parameters: effectiveParams }),
|
|
25
|
+
getAudioSchema({ ...helpers, parameters: effectiveParams }),
|
|
26
|
+
getFileSchema({ ...helpers, parameters: effectiveParams }),
|
|
27
|
+
getConversationSchema({ ...helpers, parameters: effectiveParams }),
|
|
28
|
+
getVideoSchema({ ...helpers, parameters: effectiveParams }),
|
|
29
|
+
]);
|
|
30
|
+
};
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* OpenAI Node - Version 2.3
|
|
3
|
+
* Re-exports all discriminator combinations.
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
import type { LcOpenAiV23TextNode } from './resource_text';
|
|
7
|
+
import type { LcOpenAiV23ImageNode } from './resource_image';
|
|
8
|
+
import type { LcOpenAiV23AudioNode } from './resource_audio';
|
|
9
|
+
import type { LcOpenAiV23FileNode } from './resource_file';
|
|
10
|
+
import type { LcOpenAiV23ConversationNode } from './resource_conversation';
|
|
11
|
+
import type { LcOpenAiV23VideoNode } from './resource_video';
|
|
12
|
+
|
|
13
|
+
export * from './resource_text';
|
|
14
|
+
export * from './resource_image';
|
|
15
|
+
export * from './resource_audio';
|
|
16
|
+
export * from './resource_file';
|
|
17
|
+
export * from './resource_conversation';
|
|
18
|
+
export * from './resource_video';
|
|
19
|
+
|
|
20
|
+
export type LcOpenAiV23Node =
|
|
21
|
+
| LcOpenAiV23TextNode
|
|
22
|
+
| LcOpenAiV23ImageNode
|
|
23
|
+
| LcOpenAiV23AudioNode
|
|
24
|
+
| LcOpenAiV23FileNode
|
|
25
|
+
| LcOpenAiV23ConversationNode
|
|
26
|
+
| LcOpenAiV23VideoNode
|
|
27
|
+
;
|
package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_audio/index.schema.js
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* OpenAI - Audio Resource - Zod Schema Factory
|
|
3
|
+
* Exports a factory that unions all operation schemas for this resource.
|
|
4
|
+
*
|
|
5
|
+
* Schema helpers (z, expressionSchema, etc.) are passed as parameters
|
|
6
|
+
* by the schema-validator, not imported from external files.
|
|
7
|
+
*
|
|
8
|
+
* @generated - CommonJS JavaScript for runtime loading
|
|
9
|
+
*/
|
|
10
|
+
|
|
11
|
+
const getGenerateSchema = require('./operation_generate.schema');
|
|
12
|
+
const getTranscribeSchema = require('./operation_transcribe.schema');
|
|
13
|
+
const getTranslateSchema = require('./operation_translate.schema');
|
|
14
|
+
|
|
15
|
+
module.exports = function getSchema(helpers) {
|
|
16
|
+
const { parameters, z } = helpers;
|
|
17
|
+
// Apply operation default if not set
|
|
18
|
+
const effectiveParams = parameters.operation === undefined ? { ...parameters, operation: 'generate' } : parameters;
|
|
19
|
+
return z.union([
|
|
20
|
+
getGenerateSchema({ ...helpers, parameters: effectiveParams }),
|
|
21
|
+
getTranscribeSchema({ ...helpers, parameters: effectiveParams }),
|
|
22
|
+
getTranslateSchema({ ...helpers, parameters: effectiveParams }),
|
|
23
|
+
]);
|
|
24
|
+
};
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* OpenAI - Audio Resource
|
|
3
|
+
* Re-exports all operation types for this resource.
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
import type { LcOpenAiV23AudioGenerateNode } from './operation_generate';
|
|
7
|
+
import type { LcOpenAiV23AudioTranscribeNode } from './operation_transcribe';
|
|
8
|
+
import type { LcOpenAiV23AudioTranslateNode } from './operation_translate';
|
|
9
|
+
|
|
10
|
+
export * from './operation_generate';
|
|
11
|
+
export * from './operation_transcribe';
|
|
12
|
+
export * from './operation_translate';
|
|
13
|
+
|
|
14
|
+
export type LcOpenAiV23AudioNode =
|
|
15
|
+
| LcOpenAiV23AudioGenerateNode
|
|
16
|
+
| LcOpenAiV23AudioTranscribeNode
|
|
17
|
+
| LcOpenAiV23AudioTranslateNode
|
|
18
|
+
;
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* OpenAI Node - Version 2.3 - Zod Schema
|
|
3
|
+
* Discriminator: resource=audio, operation=generate
|
|
4
|
+
*
|
|
5
|
+
* Use .parse() for strict validation or .safeParse() for error handling.
|
|
6
|
+
*
|
|
7
|
+
* Schema helpers (z, expressionSchema, etc.) are passed as parameters
|
|
8
|
+
* by the schema-validator, not imported from external files.
|
|
9
|
+
*
|
|
10
|
+
* @generated - CommonJS JavaScript for runtime loading
|
|
11
|
+
*/
|
|
12
|
+
|
|
13
|
+
module.exports = function getSchema({ parameters, z, expressionSchema, stringOrExpression, numberOrExpression, booleanOrExpression, resourceLocatorValueSchema, resourceMapperValueSchema, filterValueSchema, assignmentCollectionValueSchema, iDataObjectSchema, toolInstanceSchema, memoryInstanceSchema }) {
|
|
14
|
+
|
|
15
|
+
// Static subnode schema
|
|
16
|
+
const subnodesSchema = z.object({
|
|
17
|
+
tools: z.array(toolInstanceSchema).optional(),
|
|
18
|
+
memory: memoryInstanceSchema.optional(),
|
|
19
|
+
}).strict();
|
|
20
|
+
|
|
21
|
+
return z.object({
|
|
22
|
+
parameters: z.object({
|
|
23
|
+
resource: z.literal('audio'),
|
|
24
|
+
operation: z.literal('generate').default('generate'),
|
|
25
|
+
model: z.union([z.literal('tts-1'), z.literal('tts-1-hd'), expressionSchema]).optional(),
|
|
26
|
+
input: stringOrExpression.optional(),
|
|
27
|
+
voice: z.union([z.literal('alloy'), z.literal('echo'), z.literal('fable'), z.literal('nova'), z.literal('onyx'), z.literal('shimmer'), expressionSchema]).optional(),
|
|
28
|
+
options: z.object({ response_format: z.union([z.literal('mp3'), z.literal('opus'), z.literal('aac'), z.literal('flac'), expressionSchema]).optional(), speed: numberOrExpression.optional(), binaryPropertyOutput: stringOrExpression.optional() }).optional(),
|
|
29
|
+
}).optional(),
|
|
30
|
+
subnodes: subnodesSchema.optional(),
|
|
31
|
+
});
|
|
32
|
+
};
|
|
@@ -0,0 +1,60 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* OpenAI Node - Version 2.3
|
|
3
|
+
* Discriminator: resource=audio, operation=generate
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
interface Credentials {
|
|
8
|
+
openAiApi: CredentialReference;
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
/** Creates audio from a text prompt */
|
|
12
|
+
export type LcOpenAiV23AudioGenerateParams = {
|
|
13
|
+
resource: 'audio';
|
|
14
|
+
operation: 'generate';
|
|
15
|
+
/**
|
|
16
|
+
* Model
|
|
17
|
+
* @default tts-1
|
|
18
|
+
*/
|
|
19
|
+
model?: 'tts-1' | 'tts-1-hd' | Expression<string>;
|
|
20
|
+
/**
|
|
21
|
+
* The text to generate audio for. The maximum length is 4096 characters.
|
|
22
|
+
*/
|
|
23
|
+
input?: string | Expression<string> | PlaceholderValue;
|
|
24
|
+
/**
|
|
25
|
+
* The voice to use when generating the audio
|
|
26
|
+
* @default alloy
|
|
27
|
+
*/
|
|
28
|
+
voice?: 'alloy' | 'echo' | 'fable' | 'nova' | 'onyx' | 'shimmer' | Expression<string>;
|
|
29
|
+
/**
|
|
30
|
+
* Options
|
|
31
|
+
* @default {}
|
|
32
|
+
*/
|
|
33
|
+
options?: {
|
|
34
|
+
/** Response Format
|
|
35
|
+
* @default mp3
|
|
36
|
+
*/
|
|
37
|
+
response_format?: 'mp3' | 'opus' | 'aac' | 'flac' | Expression<string>;
|
|
38
|
+
/** Audio Speed
|
|
39
|
+
* @default 1
|
|
40
|
+
*/
|
|
41
|
+
speed?: number | Expression<number>;
|
|
42
|
+
/** Put Output in Field
|
|
43
|
+
* @hint The name of the output field to put the binary file data in
|
|
44
|
+
* @default data
|
|
45
|
+
*/
|
|
46
|
+
binaryPropertyOutput?: string | Expression<string> | PlaceholderValue;
|
|
47
|
+
};
|
|
48
|
+
};
|
|
49
|
+
|
|
50
|
+
export interface LcOpenAiV23AudioGenerateSubnodeConfig {
|
|
51
|
+
tools?: ToolInstance[];
|
|
52
|
+
memory?: MemoryInstance;
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
export type LcOpenAiV23AudioGenerateNode = {
|
|
56
|
+
type: '@n8n/n8n-nodes-langchain.openAi';
|
|
57
|
+
version: 2.3;
|
|
58
|
+
credentials?: Credentials;
|
|
59
|
+
config: NodeConfig<LcOpenAiV23AudioGenerateParams> & { subnodes?: LcOpenAiV23AudioGenerateSubnodeConfig };
|
|
60
|
+
};
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* OpenAI Node - Version 2.3 - Zod Schema
|
|
3
|
+
* Discriminator: resource=audio, operation=transcribe
|
|
4
|
+
*
|
|
5
|
+
* Use .parse() for strict validation or .safeParse() for error handling.
|
|
6
|
+
*
|
|
7
|
+
* Schema helpers (z, expressionSchema, etc.) are passed as parameters
|
|
8
|
+
* by the schema-validator, not imported from external files.
|
|
9
|
+
*
|
|
10
|
+
* @generated - CommonJS JavaScript for runtime loading
|
|
11
|
+
*/
|
|
12
|
+
|
|
13
|
+
module.exports = function getSchema({ parameters, z, expressionSchema, stringOrExpression, numberOrExpression, booleanOrExpression, resourceLocatorValueSchema, resourceMapperValueSchema, filterValueSchema, assignmentCollectionValueSchema, iDataObjectSchema, toolInstanceSchema, memoryInstanceSchema }) {
|
|
14
|
+
|
|
15
|
+
// Static subnode schema
|
|
16
|
+
const subnodesSchema = z.object({
|
|
17
|
+
tools: z.array(toolInstanceSchema).optional(),
|
|
18
|
+
memory: memoryInstanceSchema.optional(),
|
|
19
|
+
}).strict();
|
|
20
|
+
|
|
21
|
+
return z.object({
|
|
22
|
+
parameters: z.object({
|
|
23
|
+
resource: z.literal('audio'),
|
|
24
|
+
operation: z.literal('transcribe'),
|
|
25
|
+
binaryPropertyName: stringOrExpression.optional(),
|
|
26
|
+
options: z.object({ language: stringOrExpression.optional(), temperature: numberOrExpression.optional() }).optional(),
|
|
27
|
+
}).optional(),
|
|
28
|
+
subnodes: subnodesSchema.optional(),
|
|
29
|
+
});
|
|
30
|
+
};
|
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* OpenAI Node - Version 2.3
|
|
3
|
+
* Discriminator: resource=audio, operation=transcribe
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
interface Credentials {
|
|
8
|
+
openAiApi: CredentialReference;
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
/** Transcribes audio into text */
|
|
12
|
+
export type LcOpenAiV23AudioTranscribeParams = {
|
|
13
|
+
resource: 'audio';
|
|
14
|
+
operation: 'transcribe';
|
|
15
|
+
/**
|
|
16
|
+
* Name of the binary property which contains the audio file in one of these formats: flac, mp3, mp4, mpeg, mpga, m4a, ogg, wav, or webm
|
|
17
|
+
* @hint The name of the input field containing the binary file data to be processed
|
|
18
|
+
* @default data
|
|
19
|
+
*/
|
|
20
|
+
binaryPropertyName?: string | Expression<string> | PlaceholderValue;
|
|
21
|
+
/**
|
|
22
|
+
* Options
|
|
23
|
+
* @default {}
|
|
24
|
+
*/
|
|
25
|
+
options?: {
|
|
26
|
+
/** The language of the input audio. Supplying the input language in <a href="https://en.wikipedia.org/wiki/List_of_ISO_639_language_codes" target="_blank">ISO-639-1</a> format will improve accuracy and latency.
|
|
27
|
+
*/
|
|
28
|
+
language?: string | Expression<string> | PlaceholderValue;
|
|
29
|
+
/** Output Randomness (Temperature)
|
|
30
|
+
* @default 0
|
|
31
|
+
*/
|
|
32
|
+
temperature?: number | Expression<number>;
|
|
33
|
+
};
|
|
34
|
+
};
|
|
35
|
+
|
|
36
|
+
export interface LcOpenAiV23AudioTranscribeSubnodeConfig {
|
|
37
|
+
tools?: ToolInstance[];
|
|
38
|
+
memory?: MemoryInstance;
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
export type LcOpenAiV23AudioTranscribeNode = {
|
|
42
|
+
type: '@n8n/n8n-nodes-langchain.openAi';
|
|
43
|
+
version: 2.3;
|
|
44
|
+
credentials?: Credentials;
|
|
45
|
+
config: NodeConfig<LcOpenAiV23AudioTranscribeParams> & { subnodes?: LcOpenAiV23AudioTranscribeSubnodeConfig };
|
|
46
|
+
};
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* OpenAI Node - Version 2.3 - Zod Schema
|
|
3
|
+
* Discriminator: resource=audio, operation=translate
|
|
4
|
+
*
|
|
5
|
+
* Use .parse() for strict validation or .safeParse() for error handling.
|
|
6
|
+
*
|
|
7
|
+
* Schema helpers (z, expressionSchema, etc.) are passed as parameters
|
|
8
|
+
* by the schema-validator, not imported from external files.
|
|
9
|
+
*
|
|
10
|
+
* @generated - CommonJS JavaScript for runtime loading
|
|
11
|
+
*/
|
|
12
|
+
|
|
13
|
+
module.exports = function getSchema({ parameters, z, expressionSchema, stringOrExpression, numberOrExpression, booleanOrExpression, resourceLocatorValueSchema, resourceMapperValueSchema, filterValueSchema, assignmentCollectionValueSchema, iDataObjectSchema, toolInstanceSchema, memoryInstanceSchema }) {
|
|
14
|
+
|
|
15
|
+
// Static subnode schema
|
|
16
|
+
const subnodesSchema = z.object({
|
|
17
|
+
tools: z.array(toolInstanceSchema).optional(),
|
|
18
|
+
memory: memoryInstanceSchema.optional(),
|
|
19
|
+
}).strict();
|
|
20
|
+
|
|
21
|
+
return z.object({
|
|
22
|
+
parameters: z.object({
|
|
23
|
+
resource: z.literal('audio'),
|
|
24
|
+
operation: z.literal('translate'),
|
|
25
|
+
binaryPropertyName: stringOrExpression.optional(),
|
|
26
|
+
options: z.object({ temperature: numberOrExpression.optional() }).optional(),
|
|
27
|
+
}).optional(),
|
|
28
|
+
subnodes: subnodesSchema.optional(),
|
|
29
|
+
});
|
|
30
|
+
};
|
|
@@ -0,0 +1,43 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* OpenAI Node - Version 2.3
|
|
3
|
+
* Discriminator: resource=audio, operation=translate
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
interface Credentials {
|
|
8
|
+
openAiApi: CredentialReference;
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
/** Translates audio into text in English */
|
|
12
|
+
export type LcOpenAiV23AudioTranslateParams = {
|
|
13
|
+
resource: 'audio';
|
|
14
|
+
operation: 'translate';
|
|
15
|
+
/**
|
|
16
|
+
* Name of the binary property which contains the audio file in one of these formats: flac, mp3, mp4, mpeg, mpga, m4a, ogg, wav, or webm
|
|
17
|
+
* @hint The name of the input field containing the binary file data to be processed
|
|
18
|
+
* @default data
|
|
19
|
+
*/
|
|
20
|
+
binaryPropertyName?: string | Expression<string> | PlaceholderValue;
|
|
21
|
+
/**
|
|
22
|
+
* Options
|
|
23
|
+
* @default {}
|
|
24
|
+
*/
|
|
25
|
+
options?: {
|
|
26
|
+
/** Output Randomness (Temperature)
|
|
27
|
+
* @default 0
|
|
28
|
+
*/
|
|
29
|
+
temperature?: number | Expression<number>;
|
|
30
|
+
};
|
|
31
|
+
};
|
|
32
|
+
|
|
33
|
+
export interface LcOpenAiV23AudioTranslateSubnodeConfig {
|
|
34
|
+
tools?: ToolInstance[];
|
|
35
|
+
memory?: MemoryInstance;
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
export type LcOpenAiV23AudioTranslateNode = {
|
|
39
|
+
type: '@n8n/n8n-nodes-langchain.openAi';
|
|
40
|
+
version: 2.3;
|
|
41
|
+
credentials?: Credentials;
|
|
42
|
+
config: NodeConfig<LcOpenAiV23AudioTranslateParams> & { subnodes?: LcOpenAiV23AudioTranslateSubnodeConfig };
|
|
43
|
+
};
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* OpenAI - Conversation Resource - Zod Schema Factory
|
|
3
|
+
* Exports a factory that unions all operation schemas for this resource.
|
|
4
|
+
*
|
|
5
|
+
* Schema helpers (z, expressionSchema, etc.) are passed as parameters
|
|
6
|
+
* by the schema-validator, not imported from external files.
|
|
7
|
+
*
|
|
8
|
+
* @generated - CommonJS JavaScript for runtime loading
|
|
9
|
+
*/
|
|
10
|
+
|
|
11
|
+
const getCreateSchema = require('./operation_create.schema');
|
|
12
|
+
const getGetSchema = require('./operation_get.schema');
|
|
13
|
+
const getRemoveSchema = require('./operation_remove.schema');
|
|
14
|
+
const getUpdateSchema = require('./operation_update.schema');
|
|
15
|
+
|
|
16
|
+
module.exports = function getSchema(helpers) {
|
|
17
|
+
const { parameters, z } = helpers;
|
|
18
|
+
// Apply operation default if not set
|
|
19
|
+
const effectiveParams = parameters.operation === undefined ? { ...parameters, operation: 'generate' } : parameters;
|
|
20
|
+
return z.union([
|
|
21
|
+
getCreateSchema({ ...helpers, parameters: effectiveParams }),
|
|
22
|
+
getGetSchema({ ...helpers, parameters: effectiveParams }),
|
|
23
|
+
getRemoveSchema({ ...helpers, parameters: effectiveParams }),
|
|
24
|
+
getUpdateSchema({ ...helpers, parameters: effectiveParams }),
|
|
25
|
+
]);
|
|
26
|
+
};
|
package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_conversation/index.ts
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* OpenAI - Conversation Resource
|
|
3
|
+
* Re-exports all operation types for this resource.
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
import type { LcOpenAiV23ConversationCreateNode } from './operation_create';
|
|
7
|
+
import type { LcOpenAiV23ConversationGetNode } from './operation_get';
|
|
8
|
+
import type { LcOpenAiV23ConversationRemoveNode } from './operation_remove';
|
|
9
|
+
import type { LcOpenAiV23ConversationUpdateNode } from './operation_update';
|
|
10
|
+
|
|
11
|
+
export * from './operation_create';
|
|
12
|
+
export * from './operation_get';
|
|
13
|
+
export * from './operation_remove';
|
|
14
|
+
export * from './operation_update';
|
|
15
|
+
|
|
16
|
+
export type LcOpenAiV23ConversationNode =
|
|
17
|
+
| LcOpenAiV23ConversationCreateNode
|
|
18
|
+
| LcOpenAiV23ConversationGetNode
|
|
19
|
+
| LcOpenAiV23ConversationRemoveNode
|
|
20
|
+
| LcOpenAiV23ConversationUpdateNode
|
|
21
|
+
;
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* OpenAI Node - Version 2.3 - Zod Schema
|
|
3
|
+
* Discriminator: resource=conversation, operation=create
|
|
4
|
+
*
|
|
5
|
+
* Use .parse() for strict validation or .safeParse() for error handling.
|
|
6
|
+
*
|
|
7
|
+
* Schema helpers (z, expressionSchema, etc.) are passed as parameters
|
|
8
|
+
* by the schema-validator, not imported from external files.
|
|
9
|
+
*
|
|
10
|
+
* @generated - CommonJS JavaScript for runtime loading
|
|
11
|
+
*/
|
|
12
|
+
|
|
13
|
+
module.exports = function getSchema({ parameters, z, expressionSchema, stringOrExpression, numberOrExpression, booleanOrExpression, resourceLocatorValueSchema, resourceMapperValueSchema, filterValueSchema, assignmentCollectionValueSchema, iDataObjectSchema, toolInstanceSchema, memoryInstanceSchema }) {
|
|
14
|
+
|
|
15
|
+
// Static subnode schema
|
|
16
|
+
const subnodesSchema = z.object({
|
|
17
|
+
tools: z.array(toolInstanceSchema).optional(),
|
|
18
|
+
memory: memoryInstanceSchema.optional(),
|
|
19
|
+
}).strict();
|
|
20
|
+
|
|
21
|
+
return z.object({
|
|
22
|
+
parameters: z.object({
|
|
23
|
+
resource: z.literal('conversation'),
|
|
24
|
+
operation: z.literal('create'),
|
|
25
|
+
messages: z.object({ values: z.array(z.object({ role: z.union([z.literal('user'), z.literal('assistant'), z.literal('system'), expressionSchema]).optional(), content: stringOrExpression.optional() })).optional() }).optional(),
|
|
26
|
+
options: z.object({ metadata: z.union([iDataObjectSchema, z.string()]).optional() }).optional(),
|
|
27
|
+
}).optional(),
|
|
28
|
+
subnodes: subnodesSchema.optional(),
|
|
29
|
+
});
|
|
30
|
+
};
|