@jsonstudio/llms 0.4.2 → 0.4.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/conversion/conversion-v3/config/default-configs.js +3 -79
- package/dist/conversion/conversion-v3/nodes/process/tool-process-node.d.ts +4 -0
- package/dist/conversion/conversion-v3/nodes/process/tool-process-node.js +70 -9
- package/dist/conversion/shared/responses-id-utils.js +18 -5
- package/package.json +2 -2
|
@@ -644,23 +644,6 @@ export const DEFAULT_PIPELINE_DOCUMENT = {
|
|
|
644
644
|
id: 'pipeline-responses',
|
|
645
645
|
name: 'OpenAI Responses Pipeline',
|
|
646
646
|
entryEndpoints: ['/v1/responses', '/responses'],
|
|
647
|
-
processMode: 'chat',
|
|
648
|
-
streaming: 'always',
|
|
649
|
-
nodes: [
|
|
650
|
-
{ id: 'responses-sse-input', kind: 'sse-input', implementation: 'sse-input' },
|
|
651
|
-
{ id: 'responses-input', kind: 'input', implementation: 'responses-input' },
|
|
652
|
-
{ id: 'responses-process', kind: 'process', implementation: 'passthrough-process' },
|
|
653
|
-
{ id: 'responses-output', kind: 'output', implementation: 'responses-output' },
|
|
654
|
-
{ id: 'responses-sse-output', kind: 'sse-output', implementation: 'sse-output' }
|
|
655
|
-
]
|
|
656
|
-
},
|
|
657
|
-
{
|
|
658
|
-
id: 'pipeline-responses-passthrough',
|
|
659
|
-
name: 'OpenAI Responses Passthrough Pipeline',
|
|
660
|
-
entryEndpoints: ['/v1/responses', '/responses'],
|
|
661
|
-
providerProtocols: ['openai-responses'],
|
|
662
|
-
processMode: 'passthrough',
|
|
663
|
-
mode: 'passthrough',
|
|
664
647
|
streaming: 'always',
|
|
665
648
|
nodes: [
|
|
666
649
|
{ id: 'responses-sse-input', kind: 'sse-input', implementation: 'sse-input' },
|
|
@@ -670,37 +653,7 @@ export const DEFAULT_PIPELINE_DOCUMENT = {
|
|
|
670
653
|
{ id: 'responses-sse-output', kind: 'sse-output', implementation: 'sse-output' }
|
|
671
654
|
]
|
|
672
655
|
},
|
|
673
|
-
|
|
674
|
-
id: 'pipeline-responses-openai-chat',
|
|
675
|
-
name: 'OpenAI Responses Pipeline (Chat Provider)',
|
|
676
|
-
entryEndpoints: ['/v1/responses', '/responses'],
|
|
677
|
-
providerProtocols: ['openai'],
|
|
678
|
-
processMode: 'chat',
|
|
679
|
-
streaming: 'always',
|
|
680
|
-
nodes: [
|
|
681
|
-
{ id: 'responses-openai-sse-input', kind: 'sse-input', implementation: 'sse-input' },
|
|
682
|
-
{ id: 'responses-openai-input', kind: 'input', implementation: 'responses-input' },
|
|
683
|
-
{ id: 'responses-openai-process', kind: 'process', implementation: 'chat-process' },
|
|
684
|
-
{ id: 'responses-openai-output', kind: 'output', implementation: 'openai-output' },
|
|
685
|
-
{ id: 'responses-openai-sse-output', kind: 'sse-output', implementation: 'sse-output' }
|
|
686
|
-
]
|
|
687
|
-
},
|
|
688
|
-
{
|
|
689
|
-
id: 'pipeline-responses-openai-chat-passthrough',
|
|
690
|
-
name: 'OpenAI Responses Pipeline (Chat Provider Passthrough)',
|
|
691
|
-
entryEndpoints: ['/v1/responses', '/responses'],
|
|
692
|
-
providerProtocols: ['openai'],
|
|
693
|
-
processMode: 'passthrough',
|
|
694
|
-
mode: 'passthrough',
|
|
695
|
-
streaming: 'never',
|
|
696
|
-
nodes: [
|
|
697
|
-
{ id: 'responses-openai-sse-input', kind: 'sse-input', implementation: 'sse-input' },
|
|
698
|
-
{ id: 'responses-openai-input', kind: 'input', implementation: 'responses-input' },
|
|
699
|
-
{ id: 'responses-openai-process', kind: 'process', implementation: 'chat-process' },
|
|
700
|
-
{ id: 'responses-openai-output', kind: 'output', implementation: 'openai-output' },
|
|
701
|
-
{ id: 'responses-openai-sse-output', kind: 'sse-output', implementation: 'sse-output' }
|
|
702
|
-
]
|
|
703
|
-
},
|
|
656
|
+
// Chat providers (openai-compatible) reuse the default response pipeline above.
|
|
704
657
|
{
|
|
705
658
|
id: 'pipeline-openai-chat-response',
|
|
706
659
|
name: 'OpenAI Chat Response Pipeline',
|
|
@@ -729,21 +682,7 @@ export const DEFAULT_PIPELINE_DOCUMENT = {
|
|
|
729
682
|
id: 'pipeline-responses-response',
|
|
730
683
|
name: 'OpenAI Responses Response Pipeline',
|
|
731
684
|
entryEndpoints: ['/v1/responses#response', '/responses#response'],
|
|
732
|
-
processMode: 'chat',
|
|
733
685
|
streaming: 'always',
|
|
734
|
-
nodes: [
|
|
735
|
-
{ id: 'responses-response-input', kind: 'input', implementation: 'responses-response-input' },
|
|
736
|
-
{ id: 'responses-response-sse-pass', kind: 'process', implementation: 'sse-passthrough' },
|
|
737
|
-
{ id: 'responses-response-sse-output', kind: 'sse-output', implementation: 'sse-output' }
|
|
738
|
-
]
|
|
739
|
-
},
|
|
740
|
-
{
|
|
741
|
-
id: 'pipeline-responses-response-passthrough',
|
|
742
|
-
name: 'OpenAI Responses Response Pipeline (Passthrough)',
|
|
743
|
-
entryEndpoints: ['/v1/responses#response', '/responses#response'],
|
|
744
|
-
processMode: 'passthrough',
|
|
745
|
-
mode: 'passthrough',
|
|
746
|
-
streaming: 'never',
|
|
747
686
|
nodes: [
|
|
748
687
|
{ id: 'responses-response-input', kind: 'input', implementation: 'responses-response-input' },
|
|
749
688
|
{ id: 'responses-response-process', kind: 'process', implementation: 'response-process' },
|
|
@@ -756,25 +695,10 @@ export const DEFAULT_PIPELINE_DOCUMENT = {
|
|
|
756
695
|
name: 'OpenAI Responses Response Pipeline (Chat Provider)',
|
|
757
696
|
entryEndpoints: ['/v1/responses#response', '/responses#response'],
|
|
758
697
|
providerProtocols: ['openai'],
|
|
759
|
-
|
|
760
|
-
streaming: 'never',
|
|
761
|
-
nodes: [
|
|
762
|
-
{ id: 'responses-openai-response-input', kind: 'input', implementation: 'openai-response-input' },
|
|
763
|
-
{ id: 'responses-openai-response-sse-pass', kind: 'process', implementation: 'sse-passthrough' },
|
|
764
|
-
{ id: 'responses-openai-response-sse-output', kind: 'sse-output', implementation: 'sse-output' }
|
|
765
|
-
]
|
|
766
|
-
},
|
|
767
|
-
{
|
|
768
|
-
id: 'pipeline-responses-openai-chat-response-passthrough',
|
|
769
|
-
name: 'OpenAI Responses Response Pipeline (Chat Provider Passthrough)',
|
|
770
|
-
entryEndpoints: ['/v1/responses#response', '/responses#response'],
|
|
771
|
-
providerProtocols: ['openai'],
|
|
772
|
-
processMode: 'passthrough',
|
|
773
|
-
mode: 'passthrough',
|
|
774
|
-
streaming: 'auto',
|
|
698
|
+
streaming: 'always',
|
|
775
699
|
nodes: [
|
|
776
700
|
{ id: 'responses-openai-response-input', kind: 'input', implementation: 'openai-response-input' },
|
|
777
|
-
{ id: 'responses-openai-response-
|
|
701
|
+
{ id: 'responses-openai-response-process', kind: 'process', implementation: 'response-process' },
|
|
778
702
|
{ id: 'responses-openai-response-sse-output', kind: 'sse-output', implementation: 'sse-output' }
|
|
779
703
|
]
|
|
780
704
|
},
|
|
@@ -22,5 +22,9 @@ export declare class ToolProcessNode extends BasePipelineNode {
|
|
|
22
22
|
private ensureArgumentsString;
|
|
23
23
|
private ensureToolChoiceIntegrity;
|
|
24
24
|
private buildToolCallId;
|
|
25
|
+
private sanitizeToolIdentifier;
|
|
26
|
+
private ensureUniqueToolCallId;
|
|
27
|
+
private generateToolCallId;
|
|
28
|
+
private resolveToolCallId;
|
|
25
29
|
private normalizeResponse;
|
|
26
30
|
}
|
|
@@ -160,6 +160,8 @@ export class ToolProcessNode extends BasePipelineNode {
|
|
|
160
160
|
return undefined;
|
|
161
161
|
}
|
|
162
162
|
const normalized = [];
|
|
163
|
+
const toolIdRegistry = new Set();
|
|
164
|
+
const toolIdAliasMap = new Map();
|
|
163
165
|
let lastToolCallId = null;
|
|
164
166
|
let callCounter = 0;
|
|
165
167
|
for (const msg of messages) {
|
|
@@ -169,17 +171,18 @@ export class ToolProcessNode extends BasePipelineNode {
|
|
|
169
171
|
if (Array.isArray(next.tool_calls) && next.tool_calls.length > 0) {
|
|
170
172
|
next.content = null;
|
|
171
173
|
next.tool_calls = next.tool_calls
|
|
172
|
-
.map(call => this.normalizeToolCall(call, callCounter
|
|
174
|
+
.map(call => this.normalizeToolCall(call, callCounter++, toolIdRegistry, toolIdAliasMap))
|
|
173
175
|
.filter(Boolean);
|
|
174
176
|
if (next.tool_calls.length) {
|
|
175
177
|
lastToolCallId = next.tool_calls[next.tool_calls.length - 1].id;
|
|
176
178
|
}
|
|
177
179
|
}
|
|
178
180
|
if (next.role === 'tool') {
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
next.tool_call_id
|
|
181
|
+
if (!lastToolCallId) {
|
|
182
|
+
lastToolCallId = this.generateToolCallId(undefined, this.buildToolCallId(callCounter++), toolIdRegistry, toolIdAliasMap);
|
|
183
|
+
}
|
|
184
|
+
const resolvedId = this.resolveToolCallId(next.tool_call_id, lastToolCallId, toolIdRegistry, toolIdAliasMap);
|
|
185
|
+
next.tool_call_id = resolvedId;
|
|
183
186
|
if (next.content !== null && typeof next.content !== 'string') {
|
|
184
187
|
try {
|
|
185
188
|
next.content = JSON.stringify(next.content);
|
|
@@ -228,12 +231,10 @@ export class ToolProcessNode extends BasePipelineNode {
|
|
|
228
231
|
}
|
|
229
232
|
return ordered;
|
|
230
233
|
}
|
|
231
|
-
normalizeToolCall(call, counter) {
|
|
234
|
+
normalizeToolCall(call, counter, registry, aliasMap) {
|
|
232
235
|
if (!call || typeof call !== 'object')
|
|
233
236
|
return null;
|
|
234
|
-
const id =
|
|
235
|
-
? call.id.trim()
|
|
236
|
-
: this.buildToolCallId(counter);
|
|
237
|
+
const id = this.generateToolCallId(call.id, this.buildToolCallId(counter), registry, aliasMap);
|
|
237
238
|
const fn = call.function || { name: '', arguments: '' };
|
|
238
239
|
const name = typeof fn.name === 'string' ? fn.name.trim() : '';
|
|
239
240
|
const args = this.ensureArgumentsString(fn.arguments);
|
|
@@ -283,6 +284,66 @@ export class ToolProcessNode extends BasePipelineNode {
|
|
|
283
284
|
buildToolCallId(counter) {
|
|
284
285
|
return `${TOOL_CALL_ID_PREFIX}_${counter}_${Date.now()}`;
|
|
285
286
|
}
|
|
287
|
+
sanitizeToolIdentifier(value) {
|
|
288
|
+
if (!value)
|
|
289
|
+
return '';
|
|
290
|
+
const trimmed = value.trim();
|
|
291
|
+
if (!trimmed)
|
|
292
|
+
return '';
|
|
293
|
+
const sanitized = trimmed.replace(/[^A-Za-z0-9_-]/g, '_');
|
|
294
|
+
return sanitized || '';
|
|
295
|
+
}
|
|
296
|
+
ensureUniqueToolCallId(base, registry) {
|
|
297
|
+
if (!registry.has(base)) {
|
|
298
|
+
return base;
|
|
299
|
+
}
|
|
300
|
+
let suffix = 1;
|
|
301
|
+
let candidate = `${base}_${suffix}`;
|
|
302
|
+
while (registry.has(candidate)) {
|
|
303
|
+
suffix += 1;
|
|
304
|
+
candidate = `${base}_${suffix}`;
|
|
305
|
+
}
|
|
306
|
+
return candidate;
|
|
307
|
+
}
|
|
308
|
+
generateToolCallId(rawCandidate, fallbackBase, registry, aliasMap) {
|
|
309
|
+
const candidate = typeof rawCandidate === 'string' ? rawCandidate.trim() : '';
|
|
310
|
+
const sanitizedCandidate = this.sanitizeToolIdentifier(candidate);
|
|
311
|
+
const sanitizedFallback = this.sanitizeToolIdentifier(fallbackBase) || this.buildToolCallId(Date.now());
|
|
312
|
+
const base = sanitizedCandidate || sanitizedFallback;
|
|
313
|
+
const unique = this.ensureUniqueToolCallId(base, registry);
|
|
314
|
+
registry.add(unique);
|
|
315
|
+
if (candidate)
|
|
316
|
+
aliasMap.set(candidate, unique);
|
|
317
|
+
aliasMap.set(unique, unique);
|
|
318
|
+
if (sanitizedCandidate && sanitizedCandidate !== candidate) {
|
|
319
|
+
aliasMap.set(sanitizedCandidate, unique);
|
|
320
|
+
}
|
|
321
|
+
return unique;
|
|
322
|
+
}
|
|
323
|
+
resolveToolCallId(rawCandidate, fallbackId, registry, aliasMap) {
|
|
324
|
+
const candidate = typeof rawCandidate === 'string' ? rawCandidate.trim() : '';
|
|
325
|
+
if (candidate) {
|
|
326
|
+
const cached = aliasMap.get(candidate);
|
|
327
|
+
if (cached) {
|
|
328
|
+
return cached;
|
|
329
|
+
}
|
|
330
|
+
const sanitized = this.sanitizeToolIdentifier(candidate);
|
|
331
|
+
if (sanitized) {
|
|
332
|
+
const cachedSanitized = aliasMap.get(sanitized);
|
|
333
|
+
if (cachedSanitized) {
|
|
334
|
+
aliasMap.set(candidate, cachedSanitized);
|
|
335
|
+
return cachedSanitized;
|
|
336
|
+
}
|
|
337
|
+
const unique = this.ensureUniqueToolCallId(sanitized, registry);
|
|
338
|
+
registry.add(unique);
|
|
339
|
+
aliasMap.set(candidate, unique);
|
|
340
|
+
aliasMap.set(sanitized, unique);
|
|
341
|
+
aliasMap.set(unique, unique);
|
|
342
|
+
return unique;
|
|
343
|
+
}
|
|
344
|
+
}
|
|
345
|
+
return fallbackId;
|
|
346
|
+
}
|
|
286
347
|
normalizeResponse(response) {
|
|
287
348
|
if (!response?.message)
|
|
288
349
|
return;
|
|
@@ -1,16 +1,29 @@
|
|
|
1
|
+
function sanitizeCore(value) {
|
|
2
|
+
return value
|
|
3
|
+
.replace(/[^A-Za-z0-9_-]/g, '_')
|
|
4
|
+
.replace(/_{2,}/g, '_')
|
|
5
|
+
.replace(/^_+/, '')
|
|
6
|
+
.replace(/_+$/, '');
|
|
7
|
+
}
|
|
1
8
|
function stripPrefix(value) {
|
|
2
9
|
if (typeof value !== 'string')
|
|
3
10
|
return null;
|
|
4
11
|
const trimmed = value.trim();
|
|
5
12
|
if (!trimmed)
|
|
6
13
|
return null;
|
|
7
|
-
|
|
8
|
-
|
|
14
|
+
let sanitized = sanitizeCore(trimmed);
|
|
15
|
+
if (!sanitized)
|
|
16
|
+
return null;
|
|
17
|
+
if (/^fc[_-]/i.test(sanitized)) {
|
|
18
|
+
sanitized = sanitized.replace(/^fc[_-]?/i, '');
|
|
19
|
+
}
|
|
20
|
+
else if (/^call[_-]/i.test(sanitized)) {
|
|
21
|
+
sanitized = sanitized.replace(/^call[_-]?/i, '');
|
|
9
22
|
}
|
|
10
|
-
if (
|
|
11
|
-
|
|
23
|
+
if (!sanitized) {
|
|
24
|
+
sanitized = Math.random().toString(36).slice(2, 10);
|
|
12
25
|
}
|
|
13
|
-
return `fc_${
|
|
26
|
+
return `fc_${sanitized}`;
|
|
14
27
|
}
|
|
15
28
|
function normalizeWithFallback(options) {
|
|
16
29
|
const normalized = stripPrefix(options.callId);
|