@inductiv/node-red-openai-api 1.0.1 → 1.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +8 -2
- package/lib.js +96 -70
- package/locales/de-DE/node.json +1 -0
- package/locales/en-US/node.json +1 -0
- package/locales/ja/node.json +5 -0
- package/locales/zh-CN/node.json +1 -0
- package/node.html +2770 -2702
- package/node.js +7 -6
- package/package.json +2 -2
package/README.md
CHANGED
|
@@ -16,6 +16,12 @@ Welcome to _@inductiv/node-red-openai-api_, a versatile and configurable Node-RE
|
|
|
16
16
|
- **Configurable and Flexible**: Adapt to a wide range of project requirements, making it easy to integrate AI into your IoT solutions.
|
|
17
17
|
- **Powerful Combinations**: Utilize Node-RED's diverse nodes to build complex, AI-driven IoT workflows with ease.
|
|
18
18
|
|
|
19
|
+
## Release Notes (v1.1.0)
|
|
20
|
+
|
|
21
|
+
- **Fixed a bug** that prevented custom property paths not honored. [#22](https://github.com/allanbunch/node-red-openai-api/issues/22)
|
|
22
|
+
- Added the ```vectorStores.fileBatches.uploadAndPoll``` endpoint to the **Vector Store File Batches** endpoint group.
|
|
23
|
+
- Updated the node's documentation panel.
|
|
24
|
+
|
|
19
25
|
## What's New in Version 1.0
|
|
20
26
|
|
|
21
27
|
Version 1.0 of the **node-red-openai-api** node brings significant enhancements and new possibilities, including:
|
|
@@ -52,10 +58,10 @@ npm i @inductiv/node-red-openai-api
|
|
|
52
58
|
|
|
53
59
|
## Usage
|
|
54
60
|
|
|
55
|
-
After installation, find your
|
|
61
|
+
After installation, find your node in the **AI** palette category labeled "OpenAI API". Here's how you can start integrating AI into your IoT projects:
|
|
56
62
|
|
|
57
63
|
1. Configure the node with your AI platform's API key (if required).
|
|
58
|
-
2.
|
|
64
|
+
2. Send [OpenAI documented](https://platform.openai.com/docs/api-reference/) API service configuration paramaters to the node using the default `msg.payload` property, or confiure your desired incoming object property reference on the node itself.
|
|
59
65
|
3. Explore the [examples](./examples/) directory for sample implementations.
|
|
60
66
|
|
|
61
67
|
## Contribute
|
package/lib.js
CHANGED
|
@@ -18,7 +18,7 @@ let OpenaiApi = (function () {
|
|
|
18
18
|
// Begin Vector Store File functions
|
|
19
19
|
async createVectorStoreFile(parameters) {
|
|
20
20
|
const openai = new OpenAI(this.clientParams);
|
|
21
|
-
const { vector_store_id, ...params } = parameters.
|
|
21
|
+
const { vector_store_id, ...params } = parameters.payload;
|
|
22
22
|
const response = await openai.beta.vectorStores.files.create(
|
|
23
23
|
vector_store_id,
|
|
24
24
|
params,
|
|
@@ -31,7 +31,7 @@ let OpenaiApi = (function () {
|
|
|
31
31
|
/* Returns a list of vector store files. */
|
|
32
32
|
|
|
33
33
|
const openai = new OpenAI(this.clientParams);
|
|
34
|
-
const { vector_store_id, ...params } = parameters.
|
|
34
|
+
const { vector_store_id, ...params } = parameters.payload;
|
|
35
35
|
const list = await openai.beta.vectorStores.files.list(
|
|
36
36
|
vector_store_id,
|
|
37
37
|
params,
|
|
@@ -44,7 +44,7 @@ let OpenaiApi = (function () {
|
|
|
44
44
|
/* Retrieves a vector store file. */
|
|
45
45
|
|
|
46
46
|
const openai = new OpenAI(this.clientParams);
|
|
47
|
-
const { vector_store_id, file_id } = parameters.
|
|
47
|
+
const { vector_store_id, file_id } = parameters.payload;
|
|
48
48
|
const response = openai.beta.vectorStores.files.retrieve(
|
|
49
49
|
vector_store_id,
|
|
50
50
|
file_id,
|
|
@@ -57,7 +57,7 @@ let OpenaiApi = (function () {
|
|
|
57
57
|
/* Removes a file from the vector store. */
|
|
58
58
|
|
|
59
59
|
const openai = new OpenAI(this.clientParams);
|
|
60
|
-
const { vector_store_id, file_id, ...params } = parameters.
|
|
60
|
+
const { vector_store_id, file_id, ...params } = parameters.payload;
|
|
61
61
|
const response = openai.beta.vectorStores.files.del(
|
|
62
62
|
vector_store_id,
|
|
63
63
|
file_id,
|
|
@@ -69,9 +69,11 @@ let OpenaiApi = (function () {
|
|
|
69
69
|
|
|
70
70
|
// End Vector Store File functions
|
|
71
71
|
|
|
72
|
+
// >>> Begin File Batch functions
|
|
73
|
+
|
|
72
74
|
async createVectorStoreFileBatch(parameters) {
|
|
73
75
|
const openai = new OpenAI(this.clientParams);
|
|
74
|
-
const { vector_store_id, ...params } = parameters.
|
|
76
|
+
const { vector_store_id, ...params } = parameters.payload;
|
|
75
77
|
const response = await openai.beta.vectorStores.fileBatches.create(
|
|
76
78
|
vector_store_id,
|
|
77
79
|
params,
|
|
@@ -82,7 +84,7 @@ let OpenaiApi = (function () {
|
|
|
82
84
|
|
|
83
85
|
async retrieveVectorStoreFileBatch(parameters) {
|
|
84
86
|
const openai = new OpenAI(this.clientParams);
|
|
85
|
-
const { vector_store_id, batch_id, ...params } = parameters.
|
|
87
|
+
const { vector_store_id, batch_id, ...params } = parameters.payload;
|
|
86
88
|
const response = await openai.beta.vectorStores.fileBatches.retrieve(
|
|
87
89
|
vector_store_id,
|
|
88
90
|
batch_id,
|
|
@@ -94,7 +96,7 @@ let OpenaiApi = (function () {
|
|
|
94
96
|
|
|
95
97
|
async cancelVectorStoreFileBatch(parameters) {
|
|
96
98
|
const openai = new OpenAI(this.clientParams);
|
|
97
|
-
const { vector_store_id, batch_id, ...params } = parameters.
|
|
99
|
+
const { vector_store_id, batch_id, ...params } = parameters.payload;
|
|
98
100
|
const response = await openai.beta.vectorStores.fileBatches.retrieve(
|
|
99
101
|
vector_store_id,
|
|
100
102
|
batch_id,
|
|
@@ -106,7 +108,7 @@ let OpenaiApi = (function () {
|
|
|
106
108
|
|
|
107
109
|
async listVectorStoreBatchFiles(parameters) {
|
|
108
110
|
const openai = new OpenAI(this.clientParams);
|
|
109
|
-
const { vector_store_id, batch_id, ...params } = parameters.
|
|
111
|
+
const { vector_store_id, batch_id, ...params } = parameters.payload;
|
|
110
112
|
const list = await openai.beta.vectorStores.fileBatches.listFiles(
|
|
111
113
|
vector_store_id,
|
|
112
114
|
batch_id,
|
|
@@ -117,10 +119,39 @@ let OpenaiApi = (function () {
|
|
|
117
119
|
return batchFiles;
|
|
118
120
|
}
|
|
119
121
|
|
|
122
|
+
async uploadAndPollVectorStoreFileBatch(parameters) {
|
|
123
|
+
const openai = new OpenAI(this.clientParams);
|
|
124
|
+
const { vector_store_id, files, file_ids, ...params } = parameters.payload;
|
|
125
|
+
|
|
126
|
+
if (!files || !Array.isArray(files)) {
|
|
127
|
+
throw new Error("Files is not defined or not an array");
|
|
128
|
+
}
|
|
129
|
+
|
|
130
|
+
// Validate file paths
|
|
131
|
+
files.forEach(path => {
|
|
132
|
+
if (!fs.existsSync(path)) {
|
|
133
|
+
throw new Error(`File does not exist: ${path}`);
|
|
134
|
+
}
|
|
135
|
+
});
|
|
136
|
+
|
|
137
|
+
const fileStreams = files.map(path => fs.createReadStream(path));
|
|
138
|
+
|
|
139
|
+
const response = await openai.beta.vectorStores.fileBatches.uploadAndPoll(
|
|
140
|
+
vector_store_id,
|
|
141
|
+
{files: fileStreams, fileIds: file_ids},
|
|
142
|
+
params
|
|
143
|
+
);
|
|
144
|
+
|
|
145
|
+
return response;
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
|
|
149
|
+
// <<< End File Batch Functions
|
|
150
|
+
|
|
120
151
|
async createVectorStore(parameters) {
|
|
121
152
|
const openai = new OpenAI(this.clientParams);
|
|
122
153
|
const response = await openai.beta.vectorStores.create(
|
|
123
|
-
parameters.
|
|
154
|
+
parameters.payload,
|
|
124
155
|
);
|
|
125
156
|
|
|
126
157
|
return response;
|
|
@@ -128,7 +159,7 @@ let OpenaiApi = (function () {
|
|
|
128
159
|
|
|
129
160
|
async listVectorStores(parameters) {
|
|
130
161
|
const openai = new OpenAI(this.clientParams);
|
|
131
|
-
const list = await openai.beta.vectorStores.list(parameters.
|
|
162
|
+
const list = await openai.beta.vectorStores.list(parameters.payload);
|
|
132
163
|
const vectorStores = [...list.data];
|
|
133
164
|
|
|
134
165
|
return vectorStores;
|
|
@@ -136,7 +167,7 @@ let OpenaiApi = (function () {
|
|
|
136
167
|
|
|
137
168
|
async retrieveVectorStore(parameters) {
|
|
138
169
|
const openai = new OpenAI(this.clientParams);
|
|
139
|
-
const { vector_store_id, ...params } = parameters.
|
|
170
|
+
const { vector_store_id, ...params } = parameters.payload;
|
|
140
171
|
const response = await openai.beta.vectorStores.retrieve(
|
|
141
172
|
vector_store_id,
|
|
142
173
|
params,
|
|
@@ -147,7 +178,7 @@ let OpenaiApi = (function () {
|
|
|
147
178
|
|
|
148
179
|
async modifyVectorStore(parameters) {
|
|
149
180
|
const openai = new OpenAI(this.clientParams);
|
|
150
|
-
const { vector_store_id, ...params } = parameters.
|
|
181
|
+
const { vector_store_id, ...params } = parameters.payload;
|
|
151
182
|
const response = await openai.beta.vectorStores.update(
|
|
152
183
|
vector_store_id,
|
|
153
184
|
params,
|
|
@@ -158,7 +189,7 @@ let OpenaiApi = (function () {
|
|
|
158
189
|
|
|
159
190
|
async deleteVectorStore(parameters) {
|
|
160
191
|
const openai = new OpenAI(this.clientParams);
|
|
161
|
-
const { vector_store_id, ...params } = parameters.
|
|
192
|
+
const { vector_store_id, ...params } = parameters.payload;
|
|
162
193
|
const response = await openai.beta.vectorStores.del(
|
|
163
194
|
vector_store_id,
|
|
164
195
|
params,
|
|
@@ -169,14 +200,14 @@ let OpenaiApi = (function () {
|
|
|
169
200
|
|
|
170
201
|
async createBatch(parameters) {
|
|
171
202
|
const openai = new OpenAI(this.clientParams);
|
|
172
|
-
const response = await openai.batches.create(parameters.
|
|
203
|
+
const response = await openai.batches.create(parameters.payload);
|
|
173
204
|
|
|
174
205
|
return response;
|
|
175
206
|
}
|
|
176
207
|
|
|
177
208
|
async retrieveBatch(parameters) {
|
|
178
209
|
const openai = new OpenAI(this.clientParams);
|
|
179
|
-
const { batch_id, ...params } = parameters.
|
|
210
|
+
const { batch_id, ...params } = parameters.payload;
|
|
180
211
|
const response = await openai.batches.retrieve(batch_id, params);
|
|
181
212
|
|
|
182
213
|
return response;
|
|
@@ -184,7 +215,7 @@ let OpenaiApi = (function () {
|
|
|
184
215
|
|
|
185
216
|
async listBatch(parameters) {
|
|
186
217
|
const openai = new OpenAI(this.clientParams);
|
|
187
|
-
const list = await openai.batches.list(parameters.
|
|
218
|
+
const list = await openai.batches.list(parameters.payload);
|
|
188
219
|
const batches = [...list.data];
|
|
189
220
|
|
|
190
221
|
return batches;
|
|
@@ -192,7 +223,7 @@ let OpenaiApi = (function () {
|
|
|
192
223
|
|
|
193
224
|
async cancelBatch(parameters) {
|
|
194
225
|
const openai = new OpenAI(this.clientParams);
|
|
195
|
-
const { batch_id, ...params } = parameters.
|
|
226
|
+
const { batch_id, ...params } = parameters.payload;
|
|
196
227
|
const response = await openai.batches.cancel(batch_id, params);
|
|
197
228
|
|
|
198
229
|
return response;
|
|
@@ -201,9 +232,9 @@ let OpenaiApi = (function () {
|
|
|
201
232
|
async createChatCompletion(parameters) {
|
|
202
233
|
const { _node, ...params } = parameters;
|
|
203
234
|
const openai = new OpenAI(this.clientParams);
|
|
204
|
-
const response = await openai.chat.completions.create(params.
|
|
235
|
+
const response = await openai.chat.completions.create(params.payload);
|
|
205
236
|
|
|
206
|
-
if (params.
|
|
237
|
+
if (params.payload.stream) {
|
|
207
238
|
_node.status({
|
|
208
239
|
fill: "green",
|
|
209
240
|
shape: "dot",
|
|
@@ -225,14 +256,14 @@ let OpenaiApi = (function () {
|
|
|
225
256
|
|
|
226
257
|
async createImage(parameters) {
|
|
227
258
|
const openai = new OpenAI(this.clientParams);
|
|
228
|
-
const response = await openai.images.generate(parameters.
|
|
259
|
+
const response = await openai.images.generate(parameters.payload);
|
|
229
260
|
|
|
230
261
|
return response.data;
|
|
231
262
|
}
|
|
232
263
|
|
|
233
264
|
async createImageEdit(parameters) {
|
|
234
265
|
const openai = new OpenAI(this.clientParams);
|
|
235
|
-
let { image, mask, ...params } = parameters.
|
|
266
|
+
let { image, mask, ...params } = parameters.payload;
|
|
236
267
|
|
|
237
268
|
params.image = fs.createReadStream(image);
|
|
238
269
|
if (mask) {
|
|
@@ -245,7 +276,7 @@ let OpenaiApi = (function () {
|
|
|
245
276
|
|
|
246
277
|
async createImageVariation(parameters) {
|
|
247
278
|
const openai = new OpenAI(this.clientParams);
|
|
248
|
-
let { image, ...params } = parameters.
|
|
279
|
+
let { image, ...params } = parameters.payload;
|
|
249
280
|
|
|
250
281
|
params.image = fs.createReadStream(image);
|
|
251
282
|
const response = await openai.images.createVariation(params);
|
|
@@ -255,14 +286,14 @@ let OpenaiApi = (function () {
|
|
|
255
286
|
|
|
256
287
|
async createEmbedding(parameters) {
|
|
257
288
|
const openai = new OpenAI(this.clientParams);
|
|
258
|
-
const response = await openai.embeddings.create(parameters.
|
|
289
|
+
const response = await openai.embeddings.create(parameters.payload);
|
|
259
290
|
|
|
260
291
|
return response.data;
|
|
261
292
|
}
|
|
262
293
|
|
|
263
294
|
async createSpeech(parameters) {
|
|
264
295
|
const openai = new OpenAI(this.clientParams);
|
|
265
|
-
const audio = await openai.audio.speech.create(parameters.
|
|
296
|
+
const audio = await openai.audio.speech.create(parameters.payload);
|
|
266
297
|
const response = Buffer.from(await audio.arrayBuffer());
|
|
267
298
|
|
|
268
299
|
return response;
|
|
@@ -270,7 +301,7 @@ let OpenaiApi = (function () {
|
|
|
270
301
|
|
|
271
302
|
async createTranscription(parameters) {
|
|
272
303
|
const openai = new OpenAI(this.clientParams);
|
|
273
|
-
let { file, ...params } = parameters.
|
|
304
|
+
let { file, ...params } = parameters.payload;
|
|
274
305
|
|
|
275
306
|
params.file = fs.createReadStream(file);
|
|
276
307
|
|
|
@@ -281,7 +312,7 @@ let OpenaiApi = (function () {
|
|
|
281
312
|
|
|
282
313
|
async createTranslation(parameters) {
|
|
283
314
|
const openai = new OpenAI(this.clientParams);
|
|
284
|
-
let { file, ...params } = parameters.
|
|
315
|
+
let { file, ...params } = parameters.payload;
|
|
285
316
|
|
|
286
317
|
params.file = fs.createReadStream(file);
|
|
287
318
|
|
|
@@ -292,14 +323,14 @@ let OpenaiApi = (function () {
|
|
|
292
323
|
|
|
293
324
|
async listFiles(parameters) {
|
|
294
325
|
const openai = new OpenAI(this.clientParams);
|
|
295
|
-
const list = await openai.files.list(parameters.
|
|
326
|
+
const list = await openai.files.list(parameters.payload);
|
|
296
327
|
|
|
297
328
|
return [...list.data];
|
|
298
329
|
}
|
|
299
330
|
|
|
300
331
|
async createFile(parameters) {
|
|
301
332
|
const openai = new OpenAI(this.clientParams);
|
|
302
|
-
let { file, ...params } = parameters.
|
|
333
|
+
let { file, ...params } = parameters.payload;
|
|
303
334
|
|
|
304
335
|
params.file = fs.createReadStream(file);
|
|
305
336
|
|
|
@@ -310,7 +341,7 @@ let OpenaiApi = (function () {
|
|
|
310
341
|
|
|
311
342
|
async deleteFile(parameters) {
|
|
312
343
|
const openai = new OpenAI(this.clientParams);
|
|
313
|
-
const { file_id, ...params } = parameters.
|
|
344
|
+
const { file_id, ...params } = parameters.payload;
|
|
314
345
|
const response = await openai.files.del(file_id, params);
|
|
315
346
|
|
|
316
347
|
return response;
|
|
@@ -318,7 +349,7 @@ let OpenaiApi = (function () {
|
|
|
318
349
|
|
|
319
350
|
async retrieveFile(parameters) {
|
|
320
351
|
const openai = new OpenAI(this.clientParams);
|
|
321
|
-
const { file_id, ...params } = parameters.
|
|
352
|
+
const { file_id, ...params } = parameters.payload;
|
|
322
353
|
const response = await openai.files.retrieve(file_id, params);
|
|
323
354
|
|
|
324
355
|
return response;
|
|
@@ -326,7 +357,7 @@ let OpenaiApi = (function () {
|
|
|
326
357
|
|
|
327
358
|
async downloadFile(parameters) {
|
|
328
359
|
const openai = new OpenAI(this.clientParams);
|
|
329
|
-
const { file_id, ...params } = parameters.
|
|
360
|
+
const { file_id, ...params } = parameters.payload;
|
|
330
361
|
const response = await openai.files.content(file_id, params);
|
|
331
362
|
|
|
332
363
|
return response;
|
|
@@ -335,7 +366,7 @@ let OpenaiApi = (function () {
|
|
|
335
366
|
async createFineTuningJob(parameters) {
|
|
336
367
|
const openai = new OpenAI(this.clientParams);
|
|
337
368
|
const response = await openai.fineTuning.jobs.create(
|
|
338
|
-
parameters.
|
|
369
|
+
parameters.payload,
|
|
339
370
|
);
|
|
340
371
|
|
|
341
372
|
return response;
|
|
@@ -343,14 +374,14 @@ let OpenaiApi = (function () {
|
|
|
343
374
|
|
|
344
375
|
async listPaginatedFineTuningJobs(parameters) {
|
|
345
376
|
const openai = new OpenAI(this.clientParams);
|
|
346
|
-
const list = await openai.fineTuning.jobs.list(parameters.
|
|
377
|
+
const list = await openai.fineTuning.jobs.list(parameters.payload);
|
|
347
378
|
|
|
348
379
|
return [...list.data];
|
|
349
380
|
}
|
|
350
381
|
|
|
351
382
|
async retrieveFineTuningJob(parameters) {
|
|
352
383
|
const openai = new OpenAI(this.clientParams);
|
|
353
|
-
const { fine_tuning_job_id, ...params } = parameters.
|
|
384
|
+
const { fine_tuning_job_id, ...params } = parameters.payload;
|
|
354
385
|
const response = await openai.fineTuning.jobs.retrieve(
|
|
355
386
|
fine_tuning_job_id,
|
|
356
387
|
params,
|
|
@@ -361,7 +392,7 @@ let OpenaiApi = (function () {
|
|
|
361
392
|
|
|
362
393
|
async listFineTuningEvents(parameters) {
|
|
363
394
|
const openai = new OpenAI(this.clientParams);
|
|
364
|
-
const { fine_tuning_job_id, ...params } = parameters.
|
|
395
|
+
const { fine_tuning_job_id, ...params } = parameters.payload;
|
|
365
396
|
const list = await openai.fineTuning.jobs.listEvents(
|
|
366
397
|
fine_tuning_job_id,
|
|
367
398
|
params,
|
|
@@ -372,7 +403,7 @@ let OpenaiApi = (function () {
|
|
|
372
403
|
|
|
373
404
|
async cancelFineTuningJob(parameters) {
|
|
374
405
|
const openai = new OpenAI(this.clientParams);
|
|
375
|
-
const { fine_tuning_job_id, ...params } = parameters.
|
|
406
|
+
const { fine_tuning_job_id, ...params } = parameters.payload;
|
|
376
407
|
const response = await openai.fineTuning.jobs.cancel(
|
|
377
408
|
fine_tuning_job_id,
|
|
378
409
|
params,
|
|
@@ -383,14 +414,14 @@ let OpenaiApi = (function () {
|
|
|
383
414
|
|
|
384
415
|
async listModels(parameters) {
|
|
385
416
|
const openai = new OpenAI(this.clientParams);
|
|
386
|
-
const list = await openai.models.list();
|
|
417
|
+
const list = await openai.models.list(parameters.payload);
|
|
387
418
|
|
|
388
419
|
return [...list.data];
|
|
389
420
|
}
|
|
390
421
|
|
|
391
422
|
async retrieveModel(parameters) {
|
|
392
423
|
const openai = new OpenAI(this.clientParams);
|
|
393
|
-
const model = parameters.
|
|
424
|
+
const model = parameters.payload.model;
|
|
394
425
|
const response = await openai.models.retrieve(model);
|
|
395
426
|
|
|
396
427
|
return response;
|
|
@@ -398,7 +429,7 @@ let OpenaiApi = (function () {
|
|
|
398
429
|
|
|
399
430
|
async deleteModel(parameters) {
|
|
400
431
|
const openai = new OpenAI(this.clientParams);
|
|
401
|
-
const model = parameters.
|
|
432
|
+
const model = parameters.payload.model;
|
|
402
433
|
const response = await openai.models.del(model);
|
|
403
434
|
|
|
404
435
|
return response;
|
|
@@ -406,13 +437,13 @@ let OpenaiApi = (function () {
|
|
|
406
437
|
|
|
407
438
|
async createModeration(parameters) {
|
|
408
439
|
const openai = new OpenAI(this.clientParams);
|
|
409
|
-
const response = await openai.moderations.create(parameters.
|
|
440
|
+
const response = await openai.moderations.create(parameters.payload);
|
|
410
441
|
return response;
|
|
411
442
|
}
|
|
412
443
|
|
|
413
444
|
async listAssistants(parameters) {
|
|
414
445
|
const openai = new OpenAI(this.clientParams);
|
|
415
|
-
const list = await openai.beta.assistants.list(parameters.
|
|
446
|
+
const list = await openai.beta.assistants.list(parameters.payload);
|
|
416
447
|
|
|
417
448
|
return [...list.data];
|
|
418
449
|
}
|
|
@@ -420,7 +451,7 @@ let OpenaiApi = (function () {
|
|
|
420
451
|
async createAssistant(parameters) {
|
|
421
452
|
const openai = new OpenAI(this.clientParams);
|
|
422
453
|
const response = await openai.beta.assistants.create(
|
|
423
|
-
parameters.
|
|
454
|
+
parameters.payload,
|
|
424
455
|
);
|
|
425
456
|
|
|
426
457
|
return response;
|
|
@@ -428,7 +459,7 @@ let OpenaiApi = (function () {
|
|
|
428
459
|
|
|
429
460
|
async getAssistant(parameters) {
|
|
430
461
|
const openai = new OpenAI(this.clientParams);
|
|
431
|
-
const { assistant_id, ...params } = parameters.
|
|
462
|
+
const { assistant_id, ...params } = parameters.payload;
|
|
432
463
|
const response = await openai.beta.assistants.retrieve(
|
|
433
464
|
assistant_id,
|
|
434
465
|
params,
|
|
@@ -439,7 +470,7 @@ let OpenaiApi = (function () {
|
|
|
439
470
|
|
|
440
471
|
async modifyAssistant(parameters) {
|
|
441
472
|
const openai = new OpenAI(this.clientParams);
|
|
442
|
-
const { assistant_id, ...params } = parameters.
|
|
473
|
+
const { assistant_id, ...params } = parameters.payload;
|
|
443
474
|
const response = await openai.beta.assistants.update(
|
|
444
475
|
assistant_id,
|
|
445
476
|
params,
|
|
@@ -450,7 +481,7 @@ let OpenaiApi = (function () {
|
|
|
450
481
|
|
|
451
482
|
async deleteAssistant(parameters) {
|
|
452
483
|
const openai = new OpenAI(this.clientParams);
|
|
453
|
-
const { assistant_id, ...params } = parameters.
|
|
484
|
+
const { assistant_id, ...params } = parameters.payload;
|
|
454
485
|
const response = await openai.beta.assistants.del(assistant_id, params);
|
|
455
486
|
|
|
456
487
|
return response;
|
|
@@ -458,14 +489,14 @@ let OpenaiApi = (function () {
|
|
|
458
489
|
|
|
459
490
|
async createThread(parameters) {
|
|
460
491
|
const openai = new OpenAI(this.clientParams);
|
|
461
|
-
const response = await openai.beta.threads.create(parameters.
|
|
492
|
+
const response = await openai.beta.threads.create(parameters.payload);
|
|
462
493
|
|
|
463
494
|
return response;
|
|
464
495
|
}
|
|
465
496
|
|
|
466
497
|
async getThread(parameters) {
|
|
467
498
|
const openai = new OpenAI(this.clientParams);
|
|
468
|
-
const { thread_id, ...params } = parameters.
|
|
499
|
+
const { thread_id, ...params } = parameters.payload;
|
|
469
500
|
const response = await openai.beta.threads.retrieve(thread_id, params);
|
|
470
501
|
|
|
471
502
|
return response;
|
|
@@ -473,7 +504,7 @@ let OpenaiApi = (function () {
|
|
|
473
504
|
|
|
474
505
|
async modifyThread(parameters) {
|
|
475
506
|
const openai = new OpenAI(this.clientParams);
|
|
476
|
-
const { thread_id, ...params } = parameters.
|
|
507
|
+
const { thread_id, ...params } = parameters.payload;
|
|
477
508
|
const response = await openai.beta.threads.update(thread_id, params);
|
|
478
509
|
|
|
479
510
|
return response;
|
|
@@ -481,7 +512,7 @@ let OpenaiApi = (function () {
|
|
|
481
512
|
|
|
482
513
|
async deleteThread(parameters) {
|
|
483
514
|
const openai = new OpenAI(this.clientParams);
|
|
484
|
-
const { thread_id, ...params } = parameters.
|
|
515
|
+
const { thread_id, ...params } = parameters.payload;
|
|
485
516
|
const response = await openai.beta.threads.del(thread_id, params);
|
|
486
517
|
|
|
487
518
|
return response;
|
|
@@ -489,7 +520,7 @@ let OpenaiApi = (function () {
|
|
|
489
520
|
|
|
490
521
|
async listMessages(parameters) {
|
|
491
522
|
const openai = new OpenAI(this.clientParams);
|
|
492
|
-
const { thread_id, ...params } = parameters.
|
|
523
|
+
const { thread_id, ...params } = parameters.payload;
|
|
493
524
|
const list = await openai.beta.threads.messages.list(thread_id, params);
|
|
494
525
|
|
|
495
526
|
return [...list.data];
|
|
@@ -497,7 +528,7 @@ let OpenaiApi = (function () {
|
|
|
497
528
|
|
|
498
529
|
async createMessage(parameters) {
|
|
499
530
|
const openai = new OpenAI(this.clientParams);
|
|
500
|
-
const { thread_id, ...params } = parameters.
|
|
531
|
+
const { thread_id, ...params } = parameters.payload;
|
|
501
532
|
const response = await openai.beta.threads.messages.create(
|
|
502
533
|
thread_id,
|
|
503
534
|
params,
|
|
@@ -508,7 +539,7 @@ let OpenaiApi = (function () {
|
|
|
508
539
|
|
|
509
540
|
async getMessage(parameters) {
|
|
510
541
|
const openai = new OpenAI(this.clientParams);
|
|
511
|
-
const { thread_id, message_id, ...params } = parameters.
|
|
542
|
+
const { thread_id, message_id, ...params } = parameters.payload;
|
|
512
543
|
const response = await openai.beta.threads.messages.retrieve(
|
|
513
544
|
thread_id,
|
|
514
545
|
message_id,
|
|
@@ -520,7 +551,7 @@ let OpenaiApi = (function () {
|
|
|
520
551
|
|
|
521
552
|
async modifyMessage(parameters) {
|
|
522
553
|
const openai = new OpenAI(this.clientParams);
|
|
523
|
-
const { thread_id, message_id, ...params } = parameters.
|
|
554
|
+
const { thread_id, message_id, ...params } = parameters.payload;
|
|
524
555
|
const response = await openai.beta.threads.messages.update(
|
|
525
556
|
thread_id,
|
|
526
557
|
message_id,
|
|
@@ -532,13 +563,10 @@ let OpenaiApi = (function () {
|
|
|
532
563
|
|
|
533
564
|
async createThreadAndRun(parameters) {
|
|
534
565
|
const openai = new OpenAI(this.clientParams);
|
|
535
|
-
|
|
536
566
|
const { _node, ...params } = parameters;
|
|
537
|
-
const response = await openai.beta.threads.createAndRun(
|
|
538
|
-
params.msg.payload,
|
|
539
|
-
);
|
|
567
|
+
const response = await openai.beta.threads.createAndRun(params.payload);
|
|
540
568
|
|
|
541
|
-
if (params.
|
|
569
|
+
if (params.payload.stream) {
|
|
542
570
|
_node.status({
|
|
543
571
|
fill: "green",
|
|
544
572
|
shape: "dot",
|
|
@@ -560,7 +588,7 @@ let OpenaiApi = (function () {
|
|
|
560
588
|
|
|
561
589
|
async listRuns(parameters) {
|
|
562
590
|
const openai = new OpenAI(this.clientParams);
|
|
563
|
-
const { thread_id, ...params } = parameters.
|
|
591
|
+
const { thread_id, ...params } = parameters.payload;
|
|
564
592
|
const list = await openai.beta.threads.runs.list(thread_id, params);
|
|
565
593
|
|
|
566
594
|
return [...list.data];
|
|
@@ -568,9 +596,8 @@ let OpenaiApi = (function () {
|
|
|
568
596
|
|
|
569
597
|
async createRun(parameters) {
|
|
570
598
|
const openai = new OpenAI(this.clientParams);
|
|
571
|
-
|
|
572
599
|
const { _node, ..._params } = parameters;
|
|
573
|
-
const { thread_id, ...params } = _params.
|
|
600
|
+
const { thread_id, ...params } = _params.payload;
|
|
574
601
|
const response = await openai.beta.threads.runs.create(thread_id, params);
|
|
575
602
|
|
|
576
603
|
if (params.stream) {
|
|
@@ -595,7 +622,7 @@ let OpenaiApi = (function () {
|
|
|
595
622
|
|
|
596
623
|
async getRun(parameters) {
|
|
597
624
|
const openai = new OpenAI(this.clientParams);
|
|
598
|
-
const { thread_id, run_id, ...params } = parameters.
|
|
625
|
+
const { thread_id, run_id, ...params } = parameters.payload;
|
|
599
626
|
const response = await openai.beta.threads.runs.retrieve(
|
|
600
627
|
thread_id,
|
|
601
628
|
run_id,
|
|
@@ -607,7 +634,7 @@ let OpenaiApi = (function () {
|
|
|
607
634
|
|
|
608
635
|
async modifyRun(parameters) {
|
|
609
636
|
const openai = new OpenAI(this.clientParams);
|
|
610
|
-
const { thread_id, run_id, ...params } = parameters.
|
|
637
|
+
const { thread_id, run_id, ...params } = parameters.payload;
|
|
611
638
|
const response = await openai.beta.threads.runs.update(
|
|
612
639
|
thread_id,
|
|
613
640
|
run_id,
|
|
@@ -619,9 +646,8 @@ let OpenaiApi = (function () {
|
|
|
619
646
|
|
|
620
647
|
async submitToolOutputsToRun(parameters) {
|
|
621
648
|
const openai = new OpenAI(this.clientParams);
|
|
622
|
-
|
|
623
|
-
const {
|
|
624
|
-
const { thread_id, run_id, ...params } = _params.msg.payload;
|
|
649
|
+
const { _node, ..._params } = parameters;
|
|
650
|
+
const { thread_id, run_id, ...params } = _params.payload;
|
|
625
651
|
const response = await openai.beta.threads.runs.submitToolOutputs(
|
|
626
652
|
thread_id,
|
|
627
653
|
run_id,
|
|
@@ -650,7 +676,7 @@ let OpenaiApi = (function () {
|
|
|
650
676
|
|
|
651
677
|
async cancelRun(parameters) {
|
|
652
678
|
const openai = new OpenAI(this.clientParams);
|
|
653
|
-
const { thread_id, run_id, ...params } = parameters.
|
|
679
|
+
const { thread_id, run_id, ...params } = parameters.payload;
|
|
654
680
|
const response = await openai.beta.threads.runs.cancel(
|
|
655
681
|
thread_id,
|
|
656
682
|
run_id,
|
|
@@ -662,15 +688,15 @@ let OpenaiApi = (function () {
|
|
|
662
688
|
|
|
663
689
|
async listRunSteps(parameters) {
|
|
664
690
|
const openai = new OpenAI(this.clientParams);
|
|
665
|
-
const { thread_id, run_id, ...params } = parameters.
|
|
666
|
-
const list = await openai.beta.threads.runs.steps.list(run_id, params);
|
|
691
|
+
const { thread_id, run_id, ...params } = parameters.payload;
|
|
692
|
+
const list = await openai.beta.threads.runs.steps.list(thread_id, run_id, params);
|
|
667
693
|
|
|
668
694
|
return [...list.data];
|
|
669
695
|
}
|
|
670
696
|
|
|
671
697
|
async getRunStep(parameters) {
|
|
672
698
|
const openai = new OpenAI(this.clientParams);
|
|
673
|
-
const { thread_id, run_id, step_id, ...params } = parameters.
|
|
699
|
+
const { thread_id, run_id, step_id, ...params } = parameters.payload;
|
|
674
700
|
const response = await openai.beta.threads.runs.steps.retrieve(
|
|
675
701
|
thread_id,
|
|
676
702
|
run_id,
|
package/locales/de-DE/node.json
CHANGED
|
@@ -24,6 +24,7 @@
|
|
|
24
24
|
"retrieveVectorStoreFileBatch": "retrieve vector store file batch",
|
|
25
25
|
"cancelVectorStoreFileBatch": "cancel vector store file batch",
|
|
26
26
|
"listVectorStoreBatchFiles": "list vector store batch files",
|
|
27
|
+
"uploadAndPollVectorStoreFileBatch": "upload and poll file batch",
|
|
27
28
|
"createVectorStore": "create vector store",
|
|
28
29
|
"listVectorStores": "list vector stores",
|
|
29
30
|
"retrieveVectorStore": "retrieve vector store",
|
package/locales/en-US/node.json
CHANGED
|
@@ -24,6 +24,7 @@
|
|
|
24
24
|
"retrieveVectorStoreFileBatch": "retrieve vector store file batch",
|
|
25
25
|
"cancelVectorStoreFileBatch": "cancel vector store file batch",
|
|
26
26
|
"listVectorStoreBatchFiles": "list vector store batch files",
|
|
27
|
+
"uploadAndPollVectorStoreFileBatch": "upload and poll file batch",
|
|
27
28
|
"createVectorStore": "create vector store",
|
|
28
29
|
"listVectorStores": "list vector stores",
|
|
29
30
|
"retrieveVectorStore": "retrieve vector store",
|
package/locales/ja/node.json
CHANGED
|
@@ -16,10 +16,15 @@
|
|
|
16
16
|
"streaming": "streaming"
|
|
17
17
|
},
|
|
18
18
|
"parameters": {
|
|
19
|
+
"createVectorStoreFile": "create vector store file",
|
|
20
|
+
"listVectorStoreFiles": "list vector store files",
|
|
21
|
+
"retrieveVectorStoreFile": "retrieve vector store file",
|
|
22
|
+
"deleteVectorStoreFile": "delete vector store file",
|
|
19
23
|
"createVectorStoreFileBatch": "create vector stoe file batch",
|
|
20
24
|
"retrieveVectorStoreFileBatch": "retrieve vector store file batch",
|
|
21
25
|
"cancelVectorStoreFileBatch": "cancel vector store file batch",
|
|
22
26
|
"listVectorStoreBatchFiles": "list vector store batch files",
|
|
27
|
+
"uploadAndPollVectorStoreFileBatch": "upload and poll file batch",
|
|
23
28
|
"createVectorStore": "create vector store",
|
|
24
29
|
"listVectorStores": "list vector stores",
|
|
25
30
|
"retrieveVectorStore": "retrieve vector store",
|
package/locales/zh-CN/node.json
CHANGED
|
@@ -24,6 +24,7 @@
|
|
|
24
24
|
"retrieveVectorStoreFileBatch": "retrieve vector store file batch",
|
|
25
25
|
"cancelVectorStoreFileBatch": "cancel vector store file batch",
|
|
26
26
|
"listVectorStoreBatchFiles": "list vector store batch files",
|
|
27
|
+
"uploadAndPollVectorStoreFileBatch": "upload and poll file batch",
|
|
27
28
|
"createVectorStore": "create vector store",
|
|
28
29
|
"listVectorStores": "list vector stores",
|
|
29
30
|
"retrieveVectorStore": "retrieve vector store",
|