@n8n/n8n-nodes-langchain 2.18.2 → 2.18.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (126) hide show
  1. package/dist/node-definitions/.nodes-hash +1 -1
  2. package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/index.ts +3 -1
  3. package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v12/index.schema.js +32 -0
  4. package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v12/index.ts +30 -0
  5. package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v12/resource_audio/index.schema.js +22 -0
  6. package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v12/resource_audio/index.ts +15 -0
  7. package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v12/resource_audio/operation_analyze.schema.js +34 -0
  8. package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v12/resource_audio/operation_analyze.ts +69 -0
  9. package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v12/resource_audio/operation_transcribe.schema.js +33 -0
  10. package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v12/resource_audio/operation_transcribe.ts +66 -0
  11. package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v12/resource_document/index.schema.js +18 -0
  12. package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v12/resource_document/index.ts +10 -0
  13. package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v12/resource_document/operation_analyze.schema.js +34 -0
  14. package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v12/resource_document/operation_analyze.ts +69 -0
  15. package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v12/resource_file/index.schema.js +18 -0
  16. package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v12/resource_file/index.ts +10 -0
  17. package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v12/resource_file/operation_upload.schema.js +30 -0
  18. package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v12/resource_file/operation_upload.ts +43 -0
  19. package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v12/resource_file_search/index.schema.js +26 -0
  20. package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v12/resource_file_search/index.ts +21 -0
  21. package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v12/resource_file_search/operation_create_store.schema.js +28 -0
  22. package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v12/resource_file_search/operation_create_store.ts +30 -0
  23. package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v12/resource_file_search/operation_delete_store.schema.js +29 -0
  24. package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v12/resource_file_search/operation_delete_store.ts +35 -0
  25. package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v12/resource_file_search/operation_list_stores.schema.js +29 -0
  26. package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v12/resource_file_search/operation_list_stores.ts +35 -0
  27. package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v12/resource_file_search/operation_upload_to_store.schema.js +32 -0
  28. package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v12/resource_file_search/operation_upload_to_store.ts +51 -0
  29. package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v12/resource_image/index.schema.js +24 -0
  30. package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v12/resource_image/index.ts +18 -0
  31. package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v12/resource_image/operation_analyze.schema.js +34 -0
  32. package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v12/resource_image/operation_analyze.ts +69 -0
  33. package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v12/resource_image/operation_edit.schema.js +31 -0
  34. package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v12/resource_image/operation_edit.ts +61 -0
  35. package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v12/resource_image/operation_generate.schema.js +30 -0
  36. package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v12/resource_image/operation_generate.ts +51 -0
  37. package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v12/resource_text/index.schema.js +18 -0
  38. package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v12/resource_text/index.ts +10 -0
  39. package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v12/resource_text/operation_message.schema.js +33 -0
  40. package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v12/resource_text/operation_message.ts +153 -0
  41. package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v12/resource_video/index.schema.js +24 -0
  42. package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v12/resource_video/index.ts +18 -0
  43. package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v12/resource_video/operation_analyze.schema.js +34 -0
  44. package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v12/resource_video/operation_analyze.ts +69 -0
  45. package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v12/resource_video/operation_download.schema.js +29 -0
  46. package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v12/resource_video/operation_download.ts +41 -0
  47. package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v12/resource_video/operation_generate.schema.js +31 -0
  48. package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v12/resource_video/operation_generate.ts +68 -0
  49. package/dist/node-definitions/nodes/n8n-nodes-langchain/lmChatGoogleGemini/index.ts +3 -1
  50. package/dist/node-definitions/nodes/n8n-nodes-langchain/lmChatGoogleGemini/v11.schema.js +25 -0
  51. package/dist/node-definitions/nodes/n8n-nodes-langchain/lmChatGoogleGemini/v11.ts +69 -0
  52. package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/index.ts +3 -1
  53. package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v2/resource_image/operation_edit.schema.js +6 -6
  54. package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v21/resource_image/operation_edit.schema.js +6 -6
  55. package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v21/resource_image/operation_edit.ts +6 -6
  56. package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v22/resource_image/operation_edit.schema.js +6 -6
  57. package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v22/resource_image/operation_edit.ts +6 -6
  58. package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/index.schema.js +30 -0
  59. package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/index.ts +27 -0
  60. package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_audio/index.schema.js +24 -0
  61. package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_audio/index.ts +18 -0
  62. package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_audio/operation_generate.schema.js +32 -0
  63. package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_audio/operation_generate.ts +60 -0
  64. package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_audio/operation_transcribe.schema.js +30 -0
  65. package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_audio/operation_transcribe.ts +46 -0
  66. package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_audio/operation_translate.schema.js +30 -0
  67. package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_audio/operation_translate.ts +43 -0
  68. package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_conversation/index.schema.js +26 -0
  69. package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_conversation/index.ts +21 -0
  70. package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_conversation/operation_create.schema.js +30 -0
  71. package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_conversation/operation_create.ts +54 -0
  72. package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_conversation/operation_get.schema.js +29 -0
  73. package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_conversation/operation_get.ts +31 -0
  74. package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_conversation/operation_remove.schema.js +29 -0
  75. package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_conversation/operation_remove.ts +31 -0
  76. package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_conversation/operation_update.schema.js +30 -0
  77. package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_conversation/operation_update.ts +36 -0
  78. package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_file/index.schema.js +24 -0
  79. package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_file/index.ts +18 -0
  80. package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_file/operation_delete_file.schema.js +29 -0
  81. package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_file/operation_delete_file.ts +33 -0
  82. package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_file/operation_list.schema.js +29 -0
  83. package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_file/operation_list.ts +37 -0
  84. package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_file/operation_upload.schema.js +30 -0
  85. package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_file/operation_upload.ts +43 -0
  86. package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_image/index.schema.js +24 -0
  87. package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_image/index.ts +18 -0
  88. package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_image/operation_analyze.schema.js +35 -0
  89. package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_image/operation_analyze.ts +74 -0
  90. package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_image/operation_edit.schema.js +39 -0
  91. package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_image/operation_edit.ts +117 -0
  92. package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_image/operation_generate.schema.js +31 -0
  93. package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_image/operation_generate.ts +89 -0
  94. package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_text/index.schema.js +22 -0
  95. package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_text/index.ts +15 -0
  96. package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_text/operation_classify.schema.js +30 -0
  97. package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_text/operation_classify.ts +36 -0
  98. package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_text/operation_response.schema.js +34 -0
  99. package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_text/operation_response.ts +317 -0
  100. package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_video/index.schema.js +18 -0
  101. package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_video/index.ts +10 -0
  102. package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_video/operation_generate.schema.js +33 -0
  103. package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_video/operation_generate.ts +67 -0
  104. package/dist/nodes/llms/LmChatGoogleGemini/LmChatGoogleGemini.node.js +65 -48
  105. package/dist/nodes/llms/LmChatGoogleGemini/LmChatGoogleGemini.node.js.map +1 -1
  106. package/dist/nodes/vendors/GoogleGemini/actions/descriptions.js +3 -0
  107. package/dist/nodes/vendors/GoogleGemini/actions/descriptions.js.map +1 -1
  108. package/dist/nodes/vendors/GoogleGemini/actions/image/generate.operation.js +9 -1
  109. package/dist/nodes/vendors/GoogleGemini/actions/image/generate.operation.js.map +1 -1
  110. package/dist/nodes/vendors/GoogleGemini/actions/text/message.operation.js +6 -1
  111. package/dist/nodes/vendors/GoogleGemini/actions/text/message.operation.js.map +1 -1
  112. package/dist/nodes/vendors/GoogleGemini/actions/versionDescription.js +2 -2
  113. package/dist/nodes/vendors/GoogleGemini/actions/versionDescription.js.map +1 -1
  114. package/dist/nodes/vendors/GoogleGemini/methods/listSearch.js +3 -0
  115. package/dist/nodes/vendors/GoogleGemini/methods/listSearch.js.map +1 -1
  116. package/dist/nodes/vendors/OpenAi/OpenAi.node.js +2 -1
  117. package/dist/nodes/vendors/OpenAi/OpenAi.node.js.map +1 -1
  118. package/dist/nodes/vendors/OpenAi/methods/listSearch.js +18 -1
  119. package/dist/nodes/vendors/OpenAi/methods/listSearch.js.map +1 -1
  120. package/dist/nodes/vendors/OpenAi/v2/OpenAiV2.node.js +1 -1
  121. package/dist/nodes/vendors/OpenAi/v2/OpenAiV2.node.js.map +1 -1
  122. package/dist/nodes/vendors/OpenAi/v2/actions/image/edit.operation.js +261 -10
  123. package/dist/nodes/vendors/OpenAi/v2/actions/image/edit.operation.js.map +1 -1
  124. package/dist/typecheck.tsbuildinfo +1 -1
  125. package/dist/types/nodes.json +4 -4
  126. package/package.json +6 -6
@@ -0,0 +1,117 @@
1
+ /**
2
+ * OpenAI Node - Version 2.3
3
+ * Discriminator: resource=image, operation=edit
4
+ */
5
+
6
+
7
+ interface Credentials {
8
+ openAiApi: CredentialReference;
9
+ }
10
+
11
+ /** Edit an image */
12
+ export type LcOpenAiV23ImageEditParams = {
13
+ resource: 'image';
14
+ operation: 'edit';
15
+ /**
16
+ * Model
17
+ * @searchListMethod imageGenerateModelSearch
18
+ * @default {"mode":"list","value":"gpt-image-1"}
19
+ */
20
+ modelId?: { __rl: true; mode: 'list' | 'id'; value: string; cachedResultName?: string };
21
+ /**
22
+ * A text description of the desired image(s). Maximum 1000 characters for dall-e-2, 32000 characters for gpt-image-1.
23
+ */
24
+ prompt?: string | Expression<string> | PlaceholderValue;
25
+ /**
26
+ * Add one or more binary fields to include images with your prompt. Each image should be a png, webp, or jpg file less than 50MB. You can provide up to 16 images.
27
+ * @displayOptions.show { /model: ["gpt-image-1"], /modelId: [{"_cnd":{"includes":"gpt-image"}}] }
28
+ * @default {"values":[{"binaryPropertyName":"data"}]}
29
+ */
30
+ images?: {
31
+ /** Image
32
+ */
33
+ values?: Array<{
34
+ /** The name of the binary field containing the image data
35
+ * @default data
36
+ */
37
+ binaryPropertyName?: string | Expression<string> | PlaceholderValue;
38
+ }>;
39
+ };
40
+ /**
41
+ * Name of the binary property which contains the image. It should be a square png file less than 4MB.
42
+ * @hint The name of the input field containing the binary file data to be processed
43
+ * @displayOptions.show { /model: ["dall-e-2"], /modelId: [{"_cnd":{"includes":"dall-e"}}] }
44
+ * @default data
45
+ */
46
+ binaryPropertyName?: string | Expression<string> | PlaceholderValue;
47
+ /**
48
+ * The number of images to generate. Must be between 1 and 10.
49
+ * @default 1
50
+ */
51
+ n?: number | Expression<number>;
52
+ /**
53
+ * The size of the generated images
54
+ * @default 1024x1024
55
+ */
56
+ size?: '256x256' | '512x512' | '1024x1024' | '1024x1536' | '1536x1024' | 'auto' | Expression<string>;
57
+ /**
58
+ * The quality of the image that will be generated
59
+ * @displayOptions.show { /model: ["gpt-image-1"], /modelId: [{"_cnd":{"includes":"gpt-image"}}] }
60
+ * @default auto
61
+ */
62
+ quality?: 'auto' | 'high' | 'medium' | 'low' | 'standard' | Expression<string>;
63
+ /**
64
+ * The format in which the generated images are returned. URLs are only valid for 60 minutes after generation.
65
+ * @displayOptions.show { /model: ["dall-e-2"], /modelId: [{"_cnd":{"includes":"dall-e"}}] }
66
+ * @default url
67
+ */
68
+ responseFormat?: 'url' | 'b64_json' | Expression<string>;
69
+ /**
70
+ * The format in which the generated images are returned. Only supported for gpt-image-1.
71
+ * @displayOptions.show { /model: ["gpt-image-1"], /modelId: [{"_cnd":{"includes":"gpt-image"}}] }
72
+ * @default png
73
+ */
74
+ outputFormat?: 'png' | 'jpeg' | 'webp' | Expression<string>;
75
+ /**
76
+ * The compression level (0-100%) for the generated images. Only supported for gpt-image-1 with webp or jpeg output formats.
77
+ * @displayOptions.show { /model: ["gpt-image-1"], outputFormat: ["webp", "jpeg"], /modelId: [{"_cnd":{"includes":"gpt-image"}}] }
78
+ * @default 100
79
+ */
80
+ outputCompression?: number | Expression<number>;
81
+ /**
82
+ * Options
83
+ * @default {}
84
+ */
85
+ options?: {
86
+ /** A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse
87
+ */
88
+ user?: string | Expression<string> | PlaceholderValue;
89
+ /** Allows to set transparency for the background of the generated image(s). Only supported for gpt-image-1.
90
+ * @displayOptions.show { /modelId: [{"_cnd":{"includes":"gpt-image"}}] }
91
+ * @default auto
92
+ */
93
+ background?: 'auto' | 'transparent' | 'opaque' | Expression<string>;
94
+ /** Control how much effort the model will exert to match the style and features of input images. Only supported for gpt-image-1.
95
+ * @displayOptions.show { /modelId: [{"_cnd":{"includes":"gpt-image"}}] }
96
+ * @default low
97
+ */
98
+ inputFidelity?: 'low' | 'high' | Expression<string>;
99
+ /** Name of the binary property which contains the image. An additional image whose fully transparent areas (e.g. where alpha is zero) indicate where image should be edited. If there are multiple images provided, the mask will be applied on the first image. Must be a valid PNG file, less than 4MB, and have the same dimensions as image.
100
+ * @hint The name of the input field containing the binary file data to be processed
101
+ * @default data
102
+ */
103
+ imageMask?: string | Expression<string> | PlaceholderValue;
104
+ };
105
+ };
106
+
107
+ export interface LcOpenAiV23ImageEditSubnodeConfig {
108
+ tools?: ToolInstance[];
109
+ memory?: MemoryInstance;
110
+ }
111
+
112
+ export type LcOpenAiV23ImageEditNode = {
113
+ type: '@n8n/n8n-nodes-langchain.openAi';
114
+ version: 2.3;
115
+ credentials?: Credentials;
116
+ config: NodeConfig<LcOpenAiV23ImageEditParams> & { subnodes?: LcOpenAiV23ImageEditSubnodeConfig };
117
+ };
@@ -0,0 +1,31 @@
1
+ /**
2
+ * OpenAI Node - Version 2.3 - Zod Schema
3
+ * Discriminator: resource=image, operation=generate
4
+ *
5
+ * Use .parse() for strict validation or .safeParse() for error handling.
6
+ *
7
+ * Schema helpers (z, expressionSchema, etc.) are passed as parameters
8
+ * by the schema-validator, not imported from external files.
9
+ *
10
+ * @generated - CommonJS JavaScript for runtime loading
11
+ */
12
+
13
+ module.exports = function getSchema({ parameters, z, expressionSchema, stringOrExpression, numberOrExpression, booleanOrExpression, resourceLocatorValueSchema, resourceMapperValueSchema, filterValueSchema, assignmentCollectionValueSchema, iDataObjectSchema, toolInstanceSchema, memoryInstanceSchema }) {
14
+
15
+ // Static subnode schema
16
+ const subnodesSchema = z.object({
17
+ tools: z.array(toolInstanceSchema).optional(),
18
+ memory: memoryInstanceSchema.optional(),
19
+ }).strict();
20
+
21
+ return z.object({
22
+ parameters: z.object({
23
+ resource: z.literal('image'),
24
+ operation: z.literal('generate').default('generate'),
25
+ modelId: z.union([z.object({ __rl: z.literal(true), mode: z.union([z.literal('list'), z.literal('id')]), value: z.union([z.string(), z.number()]), cachedResultName: z.string().optional(), cachedResultUrl: z.string().optional() }), expressionSchema]).optional(),
26
+ prompt: stringOrExpression.optional(),
27
+ options: z.object({ n: numberOrExpression.optional(), dalleQuality: z.union([z.literal('hd'), z.literal('standard'), expressionSchema]).optional(), quality: z.union([z.literal('high'), z.literal('medium'), z.literal('low'), expressionSchema]).optional(), size: z.union([z.literal('256x256'), z.literal('512x512'), z.literal('1024x1024'), expressionSchema]).optional(), size: z.union([z.literal('1024x1024'), z.literal('1792x1024'), z.literal('1024x1792'), expressionSchema]).optional(), size: z.union([z.literal('1024x1024'), z.literal('1024x1536'), z.literal('1536x1024'), expressionSchema]).optional(), style: z.union([z.literal('natural'), z.literal('vivid'), expressionSchema]).optional(), returnImageUrls: booleanOrExpression.optional(), binaryPropertyOutput: stringOrExpression.optional() }).optional(),
28
+ }).optional(),
29
+ subnodes: subnodesSchema.optional(),
30
+ });
31
+ };
@@ -0,0 +1,89 @@
1
+ /**
2
+ * OpenAI Node - Version 2.3
3
+ * Discriminator: resource=image, operation=generate
4
+ */
5
+
6
+
7
+ interface Credentials {
8
+ openAiApi: CredentialReference;
9
+ }
10
+
11
+ /** Creates audio from a text prompt */
12
+ export type LcOpenAiV23ImageGenerateParams = {
13
+ resource: 'image';
14
+ operation: 'generate';
15
+ /**
16
+ * Model
17
+ * @searchListMethod imageGenerateModelSearch
18
+ * @default {"mode":"list","value":"gpt-image-1-mini"}
19
+ */
20
+ modelId?: { __rl: true; mode: 'list' | 'id'; value: string; cachedResultName?: string };
21
+ /**
22
+ * A text description of the desired image(s). The maximum length is 1000 characters for dall-e-2 and 4000 characters for dall-e-3.
23
+ */
24
+ prompt?: string | Expression<string> | PlaceholderValue;
25
+ /**
26
+ * Options
27
+ * @default {}
28
+ */
29
+ options?: {
30
+ /** Number of images to generate
31
+ * @displayOptions.show { /modelId: ["dall-e-2"] }
32
+ * @default 1
33
+ */
34
+ n?: number | Expression<number>;
35
+ /** The quality of the image that will be generated, HD creates images with finer details and greater consistency across the image
36
+ * @displayOptions.show { /modelId: ["dall-e-3"] }
37
+ * @default standard
38
+ */
39
+ dalleQuality?: 'hd' | 'standard' | Expression<string>;
40
+ /** The quality of the image that will be generated, High creates images with finer details and greater consistency across the image
41
+ * @displayOptions.show { /modelId: [{"_cnd":{"includes":"gpt-image"}}] }
42
+ * @default medium
43
+ */
44
+ quality?: 'high' | 'medium' | 'low' | Expression<string>;
45
+ /** Resolution
46
+ * @displayOptions.show { /modelId: ["dall-e-2"] }
47
+ * @default 1024x1024
48
+ */
49
+ size?: '256x256' | '512x512' | '1024x1024' | Expression<string>;
50
+ /** Resolution
51
+ * @displayOptions.show { /modelId: ["dall-e-3"] }
52
+ * @default 1024x1024
53
+ */
54
+ size?: '1024x1024' | '1792x1024' | '1024x1792' | Expression<string>;
55
+ /** Resolution
56
+ * @displayOptions.show { /modelId: [{"_cnd":{"includes":"gpt-image"}}] }
57
+ * @default 1024x1024
58
+ */
59
+ size?: '1024x1024' | '1024x1536' | '1536x1024' | Expression<string>;
60
+ /** Style
61
+ * @displayOptions.show { /modelId: ["dall-e-3"] }
62
+ * @default vivid
63
+ */
64
+ style?: 'natural' | 'vivid' | Expression<string>;
65
+ /** Whether to return image URL(s) instead of binary file(s)
66
+ * @displayOptions.hide { /modelId: [{"_cnd":{"includes":"gpt-image"}}] }
67
+ * @default false
68
+ */
69
+ returnImageUrls?: boolean | Expression<boolean>;
70
+ /** Put Output in Field
71
+ * @hint The name of the output field to put the binary file data in
72
+ * @displayOptions.show { returnImageUrls: [false] }
73
+ * @default data
74
+ */
75
+ binaryPropertyOutput?: string | Expression<string> | PlaceholderValue;
76
+ };
77
+ };
78
+
79
+ export interface LcOpenAiV23ImageGenerateSubnodeConfig {
80
+ tools?: ToolInstance[];
81
+ memory?: MemoryInstance;
82
+ }
83
+
84
+ export type LcOpenAiV23ImageGenerateNode = {
85
+ type: '@n8n/n8n-nodes-langchain.openAi';
86
+ version: 2.3;
87
+ credentials?: Credentials;
88
+ config: NodeConfig<LcOpenAiV23ImageGenerateParams> & { subnodes?: LcOpenAiV23ImageGenerateSubnodeConfig };
89
+ };
@@ -0,0 +1,22 @@
1
+ /**
2
+ * OpenAI - Text Resource - Zod Schema Factory
3
+ * Exports a factory that unions all operation schemas for this resource.
4
+ *
5
+ * Schema helpers (z, expressionSchema, etc.) are passed as parameters
6
+ * by the schema-validator, not imported from external files.
7
+ *
8
+ * @generated - CommonJS JavaScript for runtime loading
9
+ */
10
+
11
+ const getClassifySchema = require('./operation_classify.schema');
12
+ const getResponseSchema = require('./operation_response.schema');
13
+
14
+ module.exports = function getSchema(helpers) {
15
+ const { parameters, z } = helpers;
16
+ // Apply operation default if not set
17
+ const effectiveParams = parameters.operation === undefined ? { ...parameters, operation: 'generate' } : parameters;
18
+ return z.union([
19
+ getClassifySchema({ ...helpers, parameters: effectiveParams }),
20
+ getResponseSchema({ ...helpers, parameters: effectiveParams }),
21
+ ]);
22
+ };
@@ -0,0 +1,15 @@
1
+ /**
2
+ * OpenAI - Text Resource
3
+ * Re-exports all operation types for this resource.
4
+ */
5
+
6
+ import type { LcOpenAiV23TextClassifyNode } from './operation_classify';
7
+ import type { LcOpenAiV23TextResponseNode } from './operation_response';
8
+
9
+ export * from './operation_classify';
10
+ export * from './operation_response';
11
+
12
+ export type LcOpenAiV23TextNode =
13
+ | LcOpenAiV23TextClassifyNode
14
+ | LcOpenAiV23TextResponseNode
15
+ ;
@@ -0,0 +1,30 @@
1
+ /**
2
+ * OpenAI Node - Version 2.3 - Zod Schema
3
+ * Discriminator: resource=text, operation=classify
4
+ *
5
+ * Use .parse() for strict validation or .safeParse() for error handling.
6
+ *
7
+ * Schema helpers (z, expressionSchema, etc.) are passed as parameters
8
+ * by the schema-validator, not imported from external files.
9
+ *
10
+ * @generated - CommonJS JavaScript for runtime loading
11
+ */
12
+
13
+ module.exports = function getSchema({ parameters, z, expressionSchema, stringOrExpression, numberOrExpression, booleanOrExpression, resourceLocatorValueSchema, resourceMapperValueSchema, filterValueSchema, assignmentCollectionValueSchema, iDataObjectSchema, toolInstanceSchema, memoryInstanceSchema }) {
14
+
15
+ // Static subnode schema
16
+ const subnodesSchema = z.object({
17
+ tools: z.array(toolInstanceSchema).optional(),
18
+ memory: memoryInstanceSchema.optional(),
19
+ }).strict();
20
+
21
+ return z.object({
22
+ parameters: z.object({
23
+ resource: z.literal('text').default('text'),
24
+ operation: z.literal('classify'),
25
+ input: stringOrExpression.optional(),
26
+ simplify: booleanOrExpression.optional(),
27
+ }).optional(),
28
+ subnodes: subnodesSchema.optional(),
29
+ });
30
+ };
@@ -0,0 +1,36 @@
1
+ /**
2
+ * OpenAI Node - Version 2.3
3
+ * Discriminator: resource=text, operation=classify
4
+ */
5
+
6
+
7
+ interface Credentials {
8
+ openAiApi: CredentialReference;
9
+ }
10
+
11
+ /** Check whether content complies with usage policies */
12
+ export type LcOpenAiV23TextClassifyParams = {
13
+ resource: 'text';
14
+ operation: 'classify';
15
+ /**
16
+ * The input text to classify if it is violates the moderation policy
17
+ */
18
+ input?: string | Expression<string> | PlaceholderValue;
19
+ /**
20
+ * Whether to return a simplified version of the response instead of the raw data
21
+ * @default false
22
+ */
23
+ simplify?: boolean | Expression<boolean>;
24
+ };
25
+
26
+ export interface LcOpenAiV23TextClassifySubnodeConfig {
27
+ tools?: ToolInstance[];
28
+ memory?: MemoryInstance;
29
+ }
30
+
31
+ export type LcOpenAiV23TextClassifyNode = {
32
+ type: '@n8n/n8n-nodes-langchain.openAi';
33
+ version: 2.3;
34
+ credentials?: Credentials;
35
+ config: NodeConfig<LcOpenAiV23TextClassifyParams> & { subnodes?: LcOpenAiV23TextClassifySubnodeConfig };
36
+ };
@@ -0,0 +1,34 @@
1
+ /**
2
+ * OpenAI Node - Version 2.3 - Zod Schema
3
+ * Discriminator: resource=text, operation=response
4
+ *
5
+ * Use .parse() for strict validation or .safeParse() for error handling.
6
+ *
7
+ * Schema helpers (z, expressionSchema, etc.) are passed as parameters
8
+ * by the schema-validator, not imported from external files.
9
+ *
10
+ * @generated - CommonJS JavaScript for runtime loading
11
+ */
12
+
13
+ module.exports = function getSchema({ parameters, z, expressionSchema, stringOrExpression, numberOrExpression, booleanOrExpression, resourceLocatorValueSchema, resourceMapperValueSchema, filterValueSchema, assignmentCollectionValueSchema, iDataObjectSchema, resolveSchema, toolInstanceSchema, memoryInstanceSchema }) {
14
+
15
+ // Static subnode schema
16
+ const subnodesSchema = z.object({
17
+ tools: z.array(toolInstanceSchema).optional(),
18
+ memory: memoryInstanceSchema.optional(),
19
+ }).strict();
20
+
21
+ return z.object({
22
+ parameters: z.object({
23
+ resource: z.literal('text').default('text'),
24
+ operation: z.literal('response'),
25
+ modelId: z.union([z.object({ __rl: z.literal(true), mode: z.union([z.literal('list'), z.literal('id')]), value: z.union([z.string(), z.number()]), cachedResultName: z.string().optional(), cachedResultUrl: z.string().optional() }), expressionSchema]).optional(),
26
+ responses: z.object({ values: z.array(z.object({ type: z.union([z.literal('text'), z.literal('image'), z.literal('file'), expressionSchema]).optional(), role: z.union([z.literal('user'), z.literal('assistant'), z.literal('system'), expressionSchema]).optional(), content: stringOrExpression.optional(), imageType: z.union([z.literal('url'), z.literal('fileId'), z.literal('base64'), expressionSchema]).optional(), imageUrl: stringOrExpression.optional(), binaryPropertyName: stringOrExpression.optional(), fileId: stringOrExpression.optional(), imageDetail: z.union([z.literal('auto'), z.literal('low'), z.literal('high'), expressionSchema]).optional(), fileType: z.union([z.literal('url'), z.literal('fileId'), z.literal('base64'), expressionSchema]).optional(), fileUrl: stringOrExpression.optional(), fileId: stringOrExpression.optional(), binaryPropertyName: stringOrExpression.optional(), fileName: stringOrExpression.optional() })).optional() }).optional(),
27
+ simplify: booleanOrExpression.optional(),
28
+ hideTools: resolveSchema({ parameters, schema: z.unknown(), required: false, displayOptions: {"show":{"modelId":["gpt-3.5-turbo-16k-0613","dall-e-3","text-embedding-3-large","dall-e-2","whisper-1","tts-1-hd-1106","tts-1-hd","gpt-4-0314","text-embedding-3-small","gpt-4-32k-0314","gpt-3.5-turbo-0301","gpt-4-vision-preview","gpt-3.5-turbo-16k","gpt-3.5-turbo-instruct-0914","tts-1","davinci-002","gpt-3.5-turbo-instruct","babbage-002","tts-1-1106","text-embedding-ada-002"]}}, defaults: {"modelId":{"mode":"list","value":""}} }),
29
+ builtInTools: z.object({ webSearch: z.unknown().optional(), fileSearch: z.unknown().optional(), codeInterpreter: booleanOrExpression.optional() }).optional(),
30
+ options: z.object({ conversationId: stringOrExpression.optional(), include: z.array(z.union([z.literal('code_interpreter_call.outputs'), z.literal('computer_call_output.output.image_url'), z.literal('file_search_call.results'), z.literal('message.input_image.image_url'), z.literal('message.output_text.logprobs'), z.literal('reasoning.encrypted_content'), z.literal('web_search_call.action.sources')])).optional(), instructions: stringOrExpression.optional(), maxTokens: numberOrExpression.optional(), maxToolsIterations: numberOrExpression.optional(), maxToolCalls: numberOrExpression.optional(), metadata: z.union([iDataObjectSchema, z.string()]).optional(), parallelToolCalls: booleanOrExpression.optional(), previousResponseId: stringOrExpression.optional(), promptConfig: z.unknown().optional(), promptCacheKey: stringOrExpression.optional(), reasoning: z.unknown().optional(), safetyIdentifier: stringOrExpression.optional(), serviceTier: z.union([z.literal('auto'), z.literal('flex'), z.literal('default'), z.literal('priority'), expressionSchema]).optional(), store: booleanOrExpression.optional(), textFormat: z.unknown().optional(), topLogprobs: numberOrExpression.optional(), temperature: numberOrExpression.optional(), topP: numberOrExpression.optional(), truncation: booleanOrExpression.optional(), backgroundMode: z.unknown().optional() }).optional(),
31
+ }).optional(),
32
+ subnodes: subnodesSchema.optional(),
33
+ });
34
+ };