@jsonstudio/llms 0.4.6 → 0.6.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/conversion/codecs/anthropic-openai-codec.js +28 -2
- package/dist/conversion/codecs/gemini-openai-codec.js +23 -0
- package/dist/conversion/codecs/responses-openai-codec.js +8 -1
- package/dist/conversion/hub/node-support.js +14 -1
- package/dist/conversion/hub/pipeline/hub-pipeline.d.ts +66 -0
- package/dist/conversion/hub/pipeline/hub-pipeline.js +284 -193
- package/dist/conversion/hub/pipeline/stages/req_inbound/req_inbound_stage1_format_parse/index.d.ts +11 -0
- package/dist/conversion/hub/pipeline/stages/req_inbound/req_inbound_stage1_format_parse/index.js +6 -0
- package/dist/conversion/hub/pipeline/stages/req_inbound/req_inbound_stage2_semantic_map/index.d.ts +16 -0
- package/dist/conversion/hub/pipeline/stages/req_inbound/req_inbound_stage2_semantic_map/index.js +17 -0
- package/dist/conversion/hub/pipeline/stages/req_inbound/req_inbound_stage3_context_capture/context-factories.d.ts +5 -0
- package/dist/conversion/hub/pipeline/stages/req_inbound/req_inbound_stage3_context_capture/context-factories.js +17 -0
- package/dist/conversion/hub/pipeline/stages/req_inbound/req_inbound_stage3_context_capture/index.d.ts +19 -0
- package/dist/conversion/hub/pipeline/stages/req_inbound/req_inbound_stage3_context_capture/index.js +269 -0
- package/dist/conversion/hub/pipeline/stages/req_outbound/req_outbound_stage1_semantic_map/index.d.ts +18 -0
- package/dist/conversion/hub/pipeline/stages/req_outbound/req_outbound_stage1_semantic_map/index.js +141 -0
- package/dist/conversion/hub/pipeline/stages/req_outbound/req_outbound_stage2_format_build/index.d.ts +11 -0
- package/dist/conversion/hub/pipeline/stages/req_outbound/req_outbound_stage2_format_build/index.js +29 -0
- package/dist/conversion/hub/pipeline/stages/req_process/req_process_stage1_tool_governance/index.d.ts +16 -0
- package/dist/conversion/hub/pipeline/stages/req_process/req_process_stage1_tool_governance/index.js +15 -0
- package/dist/conversion/hub/pipeline/stages/req_process/req_process_stage2_route_select/index.d.ts +17 -0
- package/dist/conversion/hub/pipeline/stages/req_process/req_process_stage2_route_select/index.js +18 -0
- package/dist/conversion/hub/pipeline/stages/resp_inbound/resp_inbound_stage1_sse_decode/index.d.ts +17 -0
- package/dist/conversion/hub/pipeline/stages/resp_inbound/resp_inbound_stage1_sse_decode/index.js +63 -0
- package/dist/conversion/hub/pipeline/stages/resp_inbound/resp_inbound_stage2_format_parse/index.d.ts +11 -0
- package/dist/conversion/hub/pipeline/stages/resp_inbound/resp_inbound_stage2_format_parse/index.js +6 -0
- package/dist/conversion/hub/pipeline/stages/resp_inbound/resp_inbound_stage3_semantic_map/index.d.ts +12 -0
- package/dist/conversion/hub/pipeline/stages/resp_inbound/resp_inbound_stage3_semantic_map/index.js +6 -0
- package/dist/conversion/hub/pipeline/stages/resp_outbound/resp_outbound_stage1_client_remap/index.d.ts +13 -0
- package/dist/conversion/hub/pipeline/stages/resp_outbound/resp_outbound_stage1_client_remap/index.js +43 -0
- package/dist/conversion/hub/pipeline/stages/resp_outbound/resp_outbound_stage2_sse_stream/index.d.ts +17 -0
- package/dist/conversion/hub/pipeline/stages/resp_outbound/resp_outbound_stage2_sse_stream/index.js +22 -0
- package/dist/conversion/hub/pipeline/stages/resp_process/resp_process_stage1_tool_governance/index.d.ts +16 -0
- package/dist/conversion/hub/pipeline/stages/resp_process/resp_process_stage1_tool_governance/index.js +19 -0
- package/dist/conversion/hub/pipeline/stages/resp_process/resp_process_stage2_finalize/index.d.ts +17 -0
- package/dist/conversion/hub/pipeline/stages/resp_process/resp_process_stage2_finalize/index.js +19 -0
- package/dist/conversion/hub/pipeline/stages/utils.d.ts +2 -0
- package/dist/conversion/hub/pipeline/stages/utils.js +11 -0
- package/dist/conversion/hub/pipeline/target-utils.d.ts +5 -0
- package/dist/conversion/hub/pipeline/target-utils.js +87 -0
- package/dist/conversion/hub/process/chat-process.js +23 -17
- package/dist/conversion/hub/response/provider-response.js +69 -122
- package/dist/conversion/hub/response/response-mappers.d.ts +19 -0
- package/dist/conversion/hub/response/response-mappers.js +22 -2
- package/dist/conversion/hub/response/response-runtime.d.ts +8 -0
- package/dist/conversion/hub/response/response-runtime.js +239 -6
- package/dist/conversion/hub/semantic-mappers/anthropic-mapper.d.ts +8 -0
- package/dist/conversion/hub/semantic-mappers/anthropic-mapper.js +135 -55
- package/dist/conversion/hub/semantic-mappers/chat-mapper.js +80 -40
- package/dist/conversion/hub/semantic-mappers/gemini-mapper.js +5 -29
- package/dist/conversion/hub/semantic-mappers/responses-mapper.js +16 -13
- package/dist/conversion/hub/snapshot-recorder.d.ts +13 -0
- package/dist/conversion/hub/snapshot-recorder.js +90 -50
- package/dist/conversion/hub/standardized-bridge.js +49 -38
- package/dist/conversion/hub/types/chat-envelope.d.ts +68 -0
- package/dist/conversion/hub/types/standardized.d.ts +97 -0
- package/dist/conversion/pipeline/codecs/v2/anthropic-openai-pipeline.js +29 -2
- package/dist/conversion/pipeline/codecs/v2/responses-openai-pipeline.js +68 -1
- package/dist/conversion/responses/responses-openai-bridge.d.ts +6 -1
- package/dist/conversion/responses/responses-openai-bridge.js +132 -10
- package/dist/conversion/shared/anthropic-message-utils.d.ts +9 -1
- package/dist/conversion/shared/anthropic-message-utils.js +414 -26
- package/dist/conversion/shared/bridge-actions.js +267 -95
- package/dist/conversion/shared/bridge-message-utils.js +54 -8
- package/dist/conversion/shared/bridge-policies.js +21 -2
- package/dist/conversion/shared/chat-envelope-validator.d.ts +8 -0
- package/dist/conversion/shared/chat-envelope-validator.js +128 -0
- package/dist/conversion/shared/chat-request-filters.js +109 -28
- package/dist/conversion/shared/mcp-injection.js +41 -20
- package/dist/conversion/shared/openai-finalizer.d.ts +11 -0
- package/dist/conversion/shared/openai-finalizer.js +73 -0
- package/dist/conversion/shared/openai-message-normalize.js +32 -31
- package/dist/conversion/shared/protocol-state.d.ts +4 -0
- package/dist/conversion/shared/protocol-state.js +23 -0
- package/dist/conversion/shared/reasoning-normalizer.d.ts +1 -0
- package/dist/conversion/shared/reasoning-normalizer.js +50 -18
- package/dist/conversion/shared/responses-output-builder.d.ts +1 -1
- package/dist/conversion/shared/responses-output-builder.js +76 -25
- package/dist/conversion/shared/responses-reasoning-registry.d.ts +8 -0
- package/dist/conversion/shared/responses-reasoning-registry.js +61 -0
- package/dist/conversion/shared/responses-response-utils.js +32 -2
- package/dist/conversion/shared/responses-tool-utils.js +28 -2
- package/dist/conversion/shared/snapshot-hooks.d.ts +9 -0
- package/dist/conversion/shared/snapshot-hooks.js +60 -6
- package/dist/conversion/shared/snapshot-utils.d.ts +16 -0
- package/dist/conversion/shared/snapshot-utils.js +84 -0
- package/dist/conversion/shared/tool-filter-pipeline.js +46 -7
- package/dist/conversion/shared/tool-mapping.js +13 -2
- package/dist/filters/index.d.ts +18 -0
- package/dist/filters/index.js +0 -1
- package/dist/filters/special/request-streaming-to-nonstreaming.d.ts +13 -0
- package/dist/filters/special/request-streaming-to-nonstreaming.js +13 -1
- package/dist/filters/special/request-tool-choice-policy.js +3 -1
- package/dist/filters/special/request-tool-list-filter.d.ts +11 -0
- package/dist/filters/special/request-tool-list-filter.js +20 -7
- package/dist/sse/shared/responses-output-normalizer.js +5 -4
- package/dist/sse/sse-to-json/builders/response-builder.js +24 -1
- package/dist/sse/types/responses-types.d.ts +2 -0
- package/package.json +1 -1
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
import type { StageRecorder } from '../../../../format-adapters/index.js';
|
|
2
|
+
import type { JsonObject } from '../../../../types/json.js';
|
|
3
|
+
import type { AdapterContext } from '../../../../types/chat-envelope.js';
|
|
4
|
+
type ClientProtocol = 'openai-chat' | 'openai-responses' | 'anthropic-messages';
|
|
5
|
+
export interface RespOutboundStage1ClientRemapOptions {
|
|
6
|
+
payload: JsonObject;
|
|
7
|
+
clientProtocol: ClientProtocol;
|
|
8
|
+
requestId: string;
|
|
9
|
+
adapterContext?: AdapterContext;
|
|
10
|
+
stageRecorder?: StageRecorder;
|
|
11
|
+
}
|
|
12
|
+
export declare function runRespOutboundStage1ClientRemap(options: RespOutboundStage1ClientRemapOptions): JsonObject;
|
|
13
|
+
export {};
|
package/dist/conversion/hub/pipeline/stages/resp_outbound/resp_outbound_stage1_client_remap/index.js
ADDED
|
@@ -0,0 +1,43 @@
|
|
|
1
|
+
import { buildAnthropicResponseFromChat } from '../../../../response/response-runtime.js';
|
|
2
|
+
import { buildResponsesPayloadFromChat } from '../../../../../responses/responses-openai-bridge.js';
|
|
3
|
+
import { recordStage } from '../../../stages/utils.js';
|
|
4
|
+
export function runRespOutboundStage1ClientRemap(options) {
|
|
5
|
+
let clientPayload;
|
|
6
|
+
if (options.clientProtocol === 'openai-chat') {
|
|
7
|
+
clientPayload = options.payload;
|
|
8
|
+
}
|
|
9
|
+
else if (options.clientProtocol === 'anthropic-messages') {
|
|
10
|
+
clientPayload = buildAnthropicResponseFromChat(options.payload, {
|
|
11
|
+
aliasMap: resolveAliasMapFromContext(options.adapterContext)
|
|
12
|
+
});
|
|
13
|
+
}
|
|
14
|
+
else {
|
|
15
|
+
clientPayload = buildResponsesPayloadFromChat(options.payload, {
|
|
16
|
+
requestId: options.requestId
|
|
17
|
+
});
|
|
18
|
+
}
|
|
19
|
+
recordStage(options.stageRecorder, 'resp_outbound_stage1_client_remap', clientPayload);
|
|
20
|
+
return clientPayload;
|
|
21
|
+
}
|
|
22
|
+
function resolveAliasMapFromContext(adapterContext) {
|
|
23
|
+
if (!adapterContext) {
|
|
24
|
+
return undefined;
|
|
25
|
+
}
|
|
26
|
+
const candidate = adapterContext.anthropicToolNameMap;
|
|
27
|
+
if (!candidate || typeof candidate !== 'object' || Array.isArray(candidate)) {
|
|
28
|
+
return undefined;
|
|
29
|
+
}
|
|
30
|
+
const map = {};
|
|
31
|
+
for (const [key, value] of Object.entries(candidate)) {
|
|
32
|
+
if (typeof key !== 'string' || typeof value !== 'string') {
|
|
33
|
+
continue;
|
|
34
|
+
}
|
|
35
|
+
const trimmedKey = key.trim();
|
|
36
|
+
const trimmedValue = value.trim();
|
|
37
|
+
if (!trimmedKey.length || !trimmedValue.length) {
|
|
38
|
+
continue;
|
|
39
|
+
}
|
|
40
|
+
map[trimmedKey] = trimmedValue;
|
|
41
|
+
}
|
|
42
|
+
return Object.keys(map).length ? map : undefined;
|
|
43
|
+
}
|
package/dist/conversion/hub/pipeline/stages/resp_outbound/resp_outbound_stage2_sse_stream/index.d.ts
ADDED
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
import type { Readable } from 'node:stream';
|
|
2
|
+
import type { StageRecorder } from '../../../../format-adapters/index.js';
|
|
3
|
+
import type { JsonObject } from '../../../../types/json.js';
|
|
4
|
+
type ClientProtocol = 'openai-chat' | 'openai-responses' | 'anthropic-messages';
|
|
5
|
+
export interface RespOutboundStage2SseStreamOptions {
|
|
6
|
+
clientPayload: JsonObject;
|
|
7
|
+
clientProtocol: ClientProtocol;
|
|
8
|
+
requestId: string;
|
|
9
|
+
wantsStream: boolean;
|
|
10
|
+
stageRecorder?: StageRecorder;
|
|
11
|
+
}
|
|
12
|
+
export interface RespOutboundStage2SseStreamResult {
|
|
13
|
+
body?: JsonObject;
|
|
14
|
+
stream?: Readable;
|
|
15
|
+
}
|
|
16
|
+
export declare function runRespOutboundStage2SseStream(options: RespOutboundStage2SseStreamOptions): Promise<RespOutboundStage2SseStreamResult>;
|
|
17
|
+
export {};
|
package/dist/conversion/hub/pipeline/stages/resp_outbound/resp_outbound_stage2_sse_stream/index.js
ADDED
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
import { defaultSseCodecRegistry } from '../../../../../../sse/index.js';
|
|
2
|
+
import { recordStage } from '../../../stages/utils.js';
|
|
3
|
+
export async function runRespOutboundStage2SseStream(options) {
|
|
4
|
+
if (!options.wantsStream) {
|
|
5
|
+
recordStage(options.stageRecorder, 'resp_outbound_stage2_sse_stream', {
|
|
6
|
+
passthrough: false,
|
|
7
|
+
protocol: options.clientProtocol,
|
|
8
|
+
payload: options.clientPayload
|
|
9
|
+
});
|
|
10
|
+
return { body: options.clientPayload };
|
|
11
|
+
}
|
|
12
|
+
const codec = defaultSseCodecRegistry.get(options.clientProtocol);
|
|
13
|
+
const stream = await codec.convertJsonToSse(options.clientPayload, {
|
|
14
|
+
requestId: options.requestId
|
|
15
|
+
});
|
|
16
|
+
recordStage(options.stageRecorder, 'resp_outbound_stage2_sse_stream', {
|
|
17
|
+
passthrough: false,
|
|
18
|
+
protocol: options.clientProtocol,
|
|
19
|
+
payload: options.clientPayload
|
|
20
|
+
});
|
|
21
|
+
return { stream };
|
|
22
|
+
}
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
import type { StageRecorder } from '../../../../format-adapters/index.js';
|
|
2
|
+
import type { JsonObject } from '../../../../types/json.js';
|
|
3
|
+
import type { ChatCompletionLike } from '../../../../response/response-mappers.js';
|
|
4
|
+
type ClientProtocol = 'openai-chat' | 'openai-responses' | 'anthropic-messages';
|
|
5
|
+
export interface RespProcessStage1ToolGovernanceOptions {
|
|
6
|
+
payload: ChatCompletionLike;
|
|
7
|
+
entryEndpoint: string;
|
|
8
|
+
requestId: string;
|
|
9
|
+
clientProtocol: ClientProtocol;
|
|
10
|
+
stageRecorder?: StageRecorder;
|
|
11
|
+
}
|
|
12
|
+
export interface RespProcessStage1ToolGovernanceResult {
|
|
13
|
+
governedPayload: JsonObject;
|
|
14
|
+
}
|
|
15
|
+
export declare function runRespProcessStage1ToolGovernance(options: RespProcessStage1ToolGovernanceOptions): Promise<RespProcessStage1ToolGovernanceResult>;
|
|
16
|
+
export {};
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
import { runChatResponseToolFilters } from '../../../../../shared/tool-filter-pipeline.js';
|
|
2
|
+
import { ToolGovernanceEngine } from '../../../../tool-governance/index.js';
|
|
3
|
+
import { recordStage } from '../../../stages/utils.js';
|
|
4
|
+
const toolGovernanceEngine = new ToolGovernanceEngine();
|
|
5
|
+
export async function runRespProcessStage1ToolGovernance(options) {
|
|
6
|
+
const filtered = await runChatResponseToolFilters(options.payload, {
|
|
7
|
+
entryEndpoint: options.entryEndpoint,
|
|
8
|
+
requestId: options.requestId,
|
|
9
|
+
profile: 'openai-chat'
|
|
10
|
+
});
|
|
11
|
+
const { payload: governed, summary } = toolGovernanceEngine.governResponse(filtered, options.clientProtocol);
|
|
12
|
+
recordStage(options.stageRecorder, 'resp_process_stage1_tool_governance', {
|
|
13
|
+
summary,
|
|
14
|
+
applied: summary?.applied,
|
|
15
|
+
filteredPayload: filtered,
|
|
16
|
+
governedPayload: governed
|
|
17
|
+
});
|
|
18
|
+
return { governedPayload: governed };
|
|
19
|
+
}
|
package/dist/conversion/hub/pipeline/stages/resp_process/resp_process_stage2_finalize/index.d.ts
ADDED
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
import type { StageRecorder } from '../../../../format-adapters/index.js';
|
|
2
|
+
import type { JsonObject } from '../../../../types/json.js';
|
|
3
|
+
import type { ChatReasoningMode } from '../../../../../shared/openai-finalizer.js';
|
|
4
|
+
import type { ProcessedRequest } from '../../../../types/standardized.js';
|
|
5
|
+
export interface RespProcessStage2FinalizeOptions {
|
|
6
|
+
payload: JsonObject;
|
|
7
|
+
entryEndpoint: string;
|
|
8
|
+
requestId: string;
|
|
9
|
+
wantsStream: boolean;
|
|
10
|
+
reasoningMode: ChatReasoningMode;
|
|
11
|
+
stageRecorder?: StageRecorder;
|
|
12
|
+
}
|
|
13
|
+
export interface RespProcessStage2FinalizeResult {
|
|
14
|
+
finalizedPayload: JsonObject;
|
|
15
|
+
processedRequest: ProcessedRequest;
|
|
16
|
+
}
|
|
17
|
+
export declare function runRespProcessStage2Finalize(options: RespProcessStage2FinalizeOptions): Promise<RespProcessStage2FinalizeResult>;
|
package/dist/conversion/hub/pipeline/stages/resp_process/resp_process_stage2_finalize/index.js
ADDED
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
import { finalizeOpenAIChatResponse } from '../../../../../shared/openai-finalizer.js';
|
|
2
|
+
import { buildProcessedRequestFromChatResponse } from '../../../../response/chat-response-utils.js';
|
|
3
|
+
import { recordStage } from '../../../stages/utils.js';
|
|
4
|
+
export async function runRespProcessStage2Finalize(options) {
|
|
5
|
+
const finalized = (await finalizeOpenAIChatResponse(options.payload, {
|
|
6
|
+
requestId: options.requestId,
|
|
7
|
+
endpoint: options.entryEndpoint,
|
|
8
|
+
stream: options.wantsStream,
|
|
9
|
+
reasoningMode: options.reasoningMode
|
|
10
|
+
}));
|
|
11
|
+
const processedRequest = buildProcessedRequestFromChatResponse(finalized, {
|
|
12
|
+
stream: options.wantsStream
|
|
13
|
+
});
|
|
14
|
+
recordStage(options.stageRecorder, 'resp_process_stage2_finalize', {
|
|
15
|
+
model: finalized.model,
|
|
16
|
+
stream: options.wantsStream
|
|
17
|
+
});
|
|
18
|
+
return { finalizedPayload: finalized, processedRequest };
|
|
19
|
+
}
|
|
@@ -0,0 +1,5 @@
|
|
|
1
|
+
import type { ProcessedRequest, StandardizedRequest } from '../types/standardized.js';
|
|
2
|
+
import type { TargetMetadata } from '../../../router/virtual-router/types.js';
|
|
3
|
+
export declare function applyTargetMetadata(metadata: Record<string, unknown> | undefined, target: TargetMetadata, routeName?: string, originalModel?: string): void;
|
|
4
|
+
export declare function applyTargetToSubject(subject: StandardizedRequest | ProcessedRequest, target: TargetMetadata, originalModel?: string): void;
|
|
5
|
+
export declare function extractModelFromTarget(target: TargetMetadata): string | null;
|
|
@@ -0,0 +1,87 @@
|
|
|
1
|
+
export function applyTargetMetadata(metadata, target, routeName, originalModel) {
|
|
2
|
+
if (!metadata || typeof metadata !== 'object') {
|
|
3
|
+
return;
|
|
4
|
+
}
|
|
5
|
+
metadata.routeName = routeName;
|
|
6
|
+
metadata.pipelineId = target.providerKey;
|
|
7
|
+
metadata.target = target;
|
|
8
|
+
metadata.providerKey = target.providerKey;
|
|
9
|
+
metadata.providerType = target.providerType;
|
|
10
|
+
metadata.modelId = target.modelId;
|
|
11
|
+
metadata.processMode = target.processMode || 'chat';
|
|
12
|
+
if (target.responsesConfig?.toolCallIdStyle) {
|
|
13
|
+
metadata.toolCallIdStyle = target.responsesConfig.toolCallIdStyle;
|
|
14
|
+
}
|
|
15
|
+
if (originalModel && typeof originalModel === 'string' && originalModel.trim()) {
|
|
16
|
+
const trimmed = originalModel.trim();
|
|
17
|
+
if (typeof metadata.originalModelId !== 'string' || !metadata.originalModelId) {
|
|
18
|
+
metadata.originalModelId = trimmed;
|
|
19
|
+
}
|
|
20
|
+
if (typeof metadata.clientModelId !== 'string' || !metadata.clientModelId) {
|
|
21
|
+
metadata.clientModelId = trimmed;
|
|
22
|
+
}
|
|
23
|
+
}
|
|
24
|
+
if (typeof target.modelId === 'string' && target.modelId.trim()) {
|
|
25
|
+
metadata.assignedModelId = target.modelId.trim();
|
|
26
|
+
}
|
|
27
|
+
}
|
|
28
|
+
export function applyTargetToSubject(subject, target, originalModel) {
|
|
29
|
+
if (!subject || typeof subject !== 'object') {
|
|
30
|
+
return;
|
|
31
|
+
}
|
|
32
|
+
const newModel = extractModelFromTarget(target);
|
|
33
|
+
if (!newModel) {
|
|
34
|
+
return;
|
|
35
|
+
}
|
|
36
|
+
subject.model = newModel;
|
|
37
|
+
if (subject.parameters && typeof subject.parameters === 'object') {
|
|
38
|
+
subject.parameters = {
|
|
39
|
+
...subject.parameters,
|
|
40
|
+
model: newModel
|
|
41
|
+
};
|
|
42
|
+
}
|
|
43
|
+
else {
|
|
44
|
+
subject.parameters = { model: newModel };
|
|
45
|
+
}
|
|
46
|
+
const metadata = (subject.metadata && typeof subject.metadata === 'object'
|
|
47
|
+
? subject.metadata
|
|
48
|
+
: { originalEndpoint: '/v1/chat/completions' });
|
|
49
|
+
metadata.providerKey = target.providerKey;
|
|
50
|
+
metadata.providerType = target.providerType;
|
|
51
|
+
metadata.processMode = target.processMode || 'chat';
|
|
52
|
+
subject.metadata = metadata;
|
|
53
|
+
const subjectMeta = metadata;
|
|
54
|
+
if (originalModel && typeof originalModel === 'string' && originalModel.trim()) {
|
|
55
|
+
const trimmed = originalModel.trim();
|
|
56
|
+
if (typeof subjectMeta.originalModelId !== 'string' || !subjectMeta.originalModelId) {
|
|
57
|
+
subjectMeta.originalModelId = trimmed;
|
|
58
|
+
}
|
|
59
|
+
if (typeof subjectMeta.clientModelId !== 'string' || !subjectMeta.clientModelId) {
|
|
60
|
+
subjectMeta.clientModelId = trimmed;
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
subjectMeta.assignedModelId = newModel;
|
|
64
|
+
}
|
|
65
|
+
export function extractModelFromTarget(target) {
|
|
66
|
+
if (!target || typeof target.providerKey !== 'string') {
|
|
67
|
+
return null;
|
|
68
|
+
}
|
|
69
|
+
if (typeof target.modelId === 'string' && target.modelId.trim()) {
|
|
70
|
+
return target.modelId.trim();
|
|
71
|
+
}
|
|
72
|
+
if (typeof target.runtimeKey === 'string' && target.runtimeKey.length) {
|
|
73
|
+
const prefix = `${target.runtimeKey}.`;
|
|
74
|
+
if (target.providerKey.startsWith(prefix)) {
|
|
75
|
+
const candidate = target.providerKey.slice(prefix.length).trim();
|
|
76
|
+
if (candidate.length) {
|
|
77
|
+
return candidate;
|
|
78
|
+
}
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
const firstDot = target.providerKey.indexOf('.');
|
|
82
|
+
if (firstDot <= 0 || firstDot === target.providerKey.length - 1) {
|
|
83
|
+
return null;
|
|
84
|
+
}
|
|
85
|
+
const fallback = target.providerKey.slice(firstDot + 1).trim();
|
|
86
|
+
return fallback.length ? fallback : null;
|
|
87
|
+
}
|
|
@@ -29,7 +29,7 @@ async function applyRequestToolGovernance(request, context) {
|
|
|
29
29
|
const providerProtocol = readString(metadata.providerProtocol) ?? readString(metadata.provider) ?? 'openai-chat';
|
|
30
30
|
const metadataToolHints = metadata.toolFilterHints;
|
|
31
31
|
const metadataStreamFlag = metadata.stream;
|
|
32
|
-
const
|
|
32
|
+
const inboundStreamIntent = typeof metadataStreamFlag === 'boolean' ? metadataStreamFlag : request.parameters?.stream === true;
|
|
33
33
|
const shaped = {
|
|
34
34
|
model: request.model,
|
|
35
35
|
messages: deepClone(request.messages),
|
|
@@ -43,10 +43,11 @@ async function applyRequestToolGovernance(request, context) {
|
|
|
43
43
|
requestId: context.requestId,
|
|
44
44
|
model: request.model,
|
|
45
45
|
profile: providerProtocol,
|
|
46
|
-
stream:
|
|
46
|
+
stream: inboundStreamIntent,
|
|
47
47
|
toolFilterHints: metadataToolHints
|
|
48
48
|
});
|
|
49
49
|
const governed = normalizeRecord(governedPayload);
|
|
50
|
+
const providerStreamIntent = typeof governed.stream === 'boolean' ? governed.stream : undefined;
|
|
50
51
|
const merged = {
|
|
51
52
|
...request,
|
|
52
53
|
messages: Array.isArray(governed.messages)
|
|
@@ -60,14 +61,19 @@ async function applyRequestToolGovernance(request, context) {
|
|
|
60
61
|
metadata: {
|
|
61
62
|
...request.metadata,
|
|
62
63
|
toolChoice: readToolChoice(governed.tool_choice),
|
|
63
|
-
originalStream:
|
|
64
|
-
stream:
|
|
65
|
-
|
|
66
|
-
: request.parameters?.stream,
|
|
64
|
+
originalStream: inboundStreamIntent,
|
|
65
|
+
stream: inboundStreamIntent,
|
|
66
|
+
providerStream: providerStreamIntent,
|
|
67
67
|
governedTools: governed.tools !== undefined,
|
|
68
68
|
governanceTimestamp: Date.now()
|
|
69
69
|
}
|
|
70
70
|
};
|
|
71
|
+
if (typeof inboundStreamIntent === 'boolean') {
|
|
72
|
+
merged.metadata = {
|
|
73
|
+
...merged.metadata,
|
|
74
|
+
inboundStream: inboundStreamIntent
|
|
75
|
+
};
|
|
76
|
+
}
|
|
71
77
|
if (typeof governed.stream === 'boolean') {
|
|
72
78
|
merged.parameters = {
|
|
73
79
|
...merged.parameters,
|
|
@@ -220,17 +226,17 @@ function castCustomTool(tool) {
|
|
|
220
226
|
}
|
|
221
227
|
function castToolParameters(value) {
|
|
222
228
|
if (isRecord(value)) {
|
|
223
|
-
const
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
}
|
|
229
|
+
const cloned = deepClone(value);
|
|
230
|
+
if (cloned && typeof cloned === 'object' && !Array.isArray(cloned)) {
|
|
231
|
+
const schema = cloned;
|
|
232
|
+
if (!('type' in schema)) {
|
|
233
|
+
schema.type = 'object';
|
|
234
|
+
}
|
|
235
|
+
if (!isRecord(schema.properties)) {
|
|
236
|
+
schema.properties = {};
|
|
237
|
+
}
|
|
238
|
+
return schema;
|
|
239
|
+
}
|
|
234
240
|
}
|
|
235
241
|
return {
|
|
236
242
|
type: 'object',
|
|
@@ -1,28 +1,31 @@
|
|
|
1
|
-
import { Readable } from 'node:stream';
|
|
2
|
-
import { runChatResponseToolFilters } from '../../shared/tool-filter-pipeline.js';
|
|
3
|
-
import { finalizeOpenAIChatResponse } from '../../shared/openai-finalizer.js';
|
|
4
|
-
import { buildProcessedRequestFromChatResponse } from './chat-response-utils.js';
|
|
5
|
-
import { buildAnthropicResponseFromChat } from './response-runtime.js';
|
|
6
|
-
import { buildResponsesPayloadFromChat } from '../../responses/responses-openai-bridge.js';
|
|
7
|
-
import { defaultSseCodecRegistry } from '../../../sse/registry/sse-codec-registry.js';
|
|
8
|
-
import { ToolGovernanceEngine } from '../tool-governance/index.js';
|
|
9
1
|
import { ChatFormatAdapter } from '../format-adapters/chat-format-adapter.js';
|
|
10
2
|
import { ResponsesFormatAdapter } from '../format-adapters/responses-format-adapter.js';
|
|
11
3
|
import { AnthropicFormatAdapter } from '../format-adapters/anthropic-format-adapter.js';
|
|
12
4
|
import { GeminiFormatAdapter } from '../format-adapters/gemini-format-adapter.js';
|
|
13
5
|
import { OpenAIChatResponseMapper, ResponsesResponseMapper, AnthropicResponseMapper, GeminiResponseMapper } from './response-mappers.js';
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
6
|
+
import { runRespInboundStage1SseDecode } from '../pipeline/stages/resp_inbound/resp_inbound_stage1_sse_decode/index.js';
|
|
7
|
+
import { runRespInboundStage2FormatParse } from '../pipeline/stages/resp_inbound/resp_inbound_stage2_format_parse/index.js';
|
|
8
|
+
import { runRespInboundStage3SemanticMap } from '../pipeline/stages/resp_inbound/resp_inbound_stage3_semantic_map/index.js';
|
|
9
|
+
import { runRespProcessStage1ToolGovernance } from '../pipeline/stages/resp_process/resp_process_stage1_tool_governance/index.js';
|
|
10
|
+
import { runRespProcessStage2Finalize } from '../pipeline/stages/resp_process/resp_process_stage2_finalize/index.js';
|
|
11
|
+
import { runRespOutboundStage1ClientRemap } from '../pipeline/stages/resp_outbound/resp_outbound_stage1_client_remap/index.js';
|
|
12
|
+
import { runRespOutboundStage2SseStream } from '../pipeline/stages/resp_outbound/resp_outbound_stage2_sse_stream/index.js';
|
|
13
|
+
function resolveChatReasoningMode(entryEndpoint) {
|
|
14
|
+
const envRaw = (process.env.ROUTECODEX_CHAT_REASONING_MODE || process.env.RCC_CHAT_REASONING_MODE || '').trim().toLowerCase();
|
|
15
|
+
const map = {
|
|
16
|
+
keep: 'keep',
|
|
17
|
+
drop: 'drop',
|
|
18
|
+
discard: 'drop',
|
|
19
|
+
text: 'append_to_content',
|
|
20
|
+
append: 'append_to_content',
|
|
21
|
+
append_text: 'append_to_content',
|
|
22
|
+
append_to_content: 'append_to_content'
|
|
23
|
+
};
|
|
24
|
+
if (envRaw && map[envRaw]) {
|
|
25
|
+
return map[envRaw];
|
|
26
|
+
}
|
|
27
|
+
return 'keep';
|
|
28
|
+
}
|
|
26
29
|
const PROVIDER_RESPONSE_REGISTRY = {
|
|
27
30
|
'openai-chat': {
|
|
28
31
|
protocol: 'openai-chat',
|
|
@@ -45,33 +48,6 @@ const PROVIDER_RESPONSE_REGISTRY = {
|
|
|
45
48
|
createMapper: () => new GeminiResponseMapper()
|
|
46
49
|
}
|
|
47
50
|
};
|
|
48
|
-
function recordStage(recorder, stage, payload) {
|
|
49
|
-
if (!recorder)
|
|
50
|
-
return;
|
|
51
|
-
try {
|
|
52
|
-
recorder.record(stage, payload);
|
|
53
|
-
}
|
|
54
|
-
catch {
|
|
55
|
-
/* ignore */
|
|
56
|
-
}
|
|
57
|
-
}
|
|
58
|
-
function extractSseStream(payload) {
|
|
59
|
-
if (!payload || typeof payload !== 'object') {
|
|
60
|
-
return undefined;
|
|
61
|
-
}
|
|
62
|
-
const direct = payload.__sse_responses || payload.__sse_stream;
|
|
63
|
-
if (direct && typeof direct.pipe === 'function') {
|
|
64
|
-
return direct;
|
|
65
|
-
}
|
|
66
|
-
const nested = payload.data;
|
|
67
|
-
if (nested && typeof nested === 'object') {
|
|
68
|
-
const inner = nested.__sse_responses || nested.__sse_stream;
|
|
69
|
-
if (inner && typeof inner.pipe === 'function') {
|
|
70
|
-
return inner;
|
|
71
|
-
}
|
|
72
|
-
}
|
|
73
|
-
return undefined;
|
|
74
|
-
}
|
|
75
51
|
function resolveClientProtocol(entryEndpoint) {
|
|
76
52
|
const lowered = (entryEndpoint || '').toLowerCase();
|
|
77
53
|
if (lowered.includes('/v1/responses'))
|
|
@@ -80,14 +56,6 @@ function resolveClientProtocol(entryEndpoint) {
|
|
|
80
56
|
return 'anthropic-messages';
|
|
81
57
|
return 'openai-chat';
|
|
82
58
|
}
|
|
83
|
-
async function maybeCreateSseStream(protocol, payload, requestId) {
|
|
84
|
-
const codec = defaultSseCodecRegistry.get(protocol);
|
|
85
|
-
if (!codec) {
|
|
86
|
-
const stream = new Readable({ read() { this.push(null); } });
|
|
87
|
-
return stream;
|
|
88
|
-
}
|
|
89
|
-
return codec.convertJsonToSse(payload, { requestId });
|
|
90
|
-
}
|
|
91
59
|
function supportsSseProtocol(protocol) {
|
|
92
60
|
return protocol === 'openai-chat' || protocol === 'openai-responses' || protocol === 'anthropic-messages' || protocol === 'gemini-chat';
|
|
93
61
|
}
|
|
@@ -117,86 +85,65 @@ function applyModelOverride(payload, model) {
|
|
|
117
85
|
}
|
|
118
86
|
export async function convertProviderResponse(options) {
|
|
119
87
|
const clientProtocol = resolveClientProtocol(options.entryEndpoint);
|
|
120
|
-
const passthroughStream = extractSseStream(options.providerResponse);
|
|
121
|
-
const protocolsMatch = clientProtocol === options.providerProtocol;
|
|
122
|
-
const modelHint = typeof options.context.modelId === 'string' ? options.context.modelId : undefined;
|
|
123
88
|
const displayModel = extractDisplayModel(options.context);
|
|
124
|
-
let providerPayload = options.providerResponse;
|
|
125
|
-
if (passthroughStream) {
|
|
126
|
-
if (options.wantsStream && protocolsMatch && supportsSseProtocol(clientProtocol)) {
|
|
127
|
-
recordStage(options.stageRecorder, STAGES.clientMap, { passthrough: true, protocol: clientProtocol });
|
|
128
|
-
return { __sse_responses: passthroughStream };
|
|
129
|
-
}
|
|
130
|
-
if (supportsSseProtocol(options.providerProtocol)) {
|
|
131
|
-
try {
|
|
132
|
-
const codec = defaultSseCodecRegistry.get(options.providerProtocol);
|
|
133
|
-
const decoded = await codec.convertSseToJson(passthroughStream, {
|
|
134
|
-
requestId: options.context.requestId,
|
|
135
|
-
model: modelHint
|
|
136
|
-
});
|
|
137
|
-
if (decoded && typeof decoded === 'object') {
|
|
138
|
-
providerPayload = decoded;
|
|
139
|
-
recordStage(options.stageRecorder, STAGES.sseDecode, { protocol: options.providerProtocol });
|
|
140
|
-
}
|
|
141
|
-
}
|
|
142
|
-
catch (error) {
|
|
143
|
-
recordStage(options.stageRecorder, STAGES.sseDecode, {
|
|
144
|
-
protocol: options.providerProtocol,
|
|
145
|
-
error: error instanceof Error ? error.message : String(error)
|
|
146
|
-
});
|
|
147
|
-
}
|
|
148
|
-
}
|
|
149
|
-
}
|
|
150
89
|
const plan = PROVIDER_RESPONSE_REGISTRY[options.providerProtocol];
|
|
151
90
|
if (!plan) {
|
|
152
91
|
throw new Error(`Unknown provider protocol: ${options.providerProtocol}`);
|
|
153
92
|
}
|
|
93
|
+
const inboundStage1 = await runRespInboundStage1SseDecode({
|
|
94
|
+
providerProtocol: options.providerProtocol,
|
|
95
|
+
payload: options.providerResponse,
|
|
96
|
+
adapterContext: options.context,
|
|
97
|
+
wantsStream: options.wantsStream,
|
|
98
|
+
stageRecorder: options.stageRecorder
|
|
99
|
+
});
|
|
154
100
|
const formatAdapter = plan.createFormatAdapter();
|
|
155
101
|
const mapper = plan.createMapper();
|
|
156
|
-
const formatEnvelope = await
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
102
|
+
const formatEnvelope = await runRespInboundStage2FormatParse({
|
|
103
|
+
adapterContext: options.context,
|
|
104
|
+
payload: inboundStage1.payload,
|
|
105
|
+
formatAdapter,
|
|
106
|
+
stageRecorder: options.stageRecorder
|
|
107
|
+
});
|
|
108
|
+
const chatResponse = await runRespInboundStage3SemanticMap({
|
|
109
|
+
adapterContext: options.context,
|
|
110
|
+
formatEnvelope,
|
|
111
|
+
mapper,
|
|
112
|
+
stageRecorder: options.stageRecorder
|
|
113
|
+
});
|
|
114
|
+
const governanceResult = await runRespProcessStage1ToolGovernance({
|
|
115
|
+
payload: chatResponse,
|
|
161
116
|
entryEndpoint: options.entryEndpoint,
|
|
162
117
|
requestId: options.context.requestId,
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
const { payload: governed, summary: responseGovernance } = toolGovernanceEngine.governResponse(filtered, clientProtocol);
|
|
166
|
-
recordStage(options.stageRecorder, STAGES.governance, {
|
|
167
|
-
...governed,
|
|
168
|
-
__toolGovernanceSummary: responseGovernance
|
|
118
|
+
clientProtocol,
|
|
119
|
+
stageRecorder: options.stageRecorder
|
|
169
120
|
});
|
|
170
|
-
const
|
|
121
|
+
const finalizeResult = await runRespProcessStage2Finalize({
|
|
122
|
+
payload: governanceResult.governedPayload,
|
|
123
|
+
entryEndpoint: options.entryEndpoint,
|
|
171
124
|
requestId: options.context.requestId,
|
|
172
|
-
|
|
173
|
-
|
|
125
|
+
wantsStream: options.wantsStream,
|
|
126
|
+
reasoningMode: resolveChatReasoningMode(options.entryEndpoint),
|
|
127
|
+
stageRecorder: options.stageRecorder
|
|
174
128
|
});
|
|
175
|
-
applyModelOverride(
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
129
|
+
applyModelOverride(finalizeResult.finalizedPayload, displayModel);
|
|
130
|
+
const clientPayload = runRespOutboundStage1ClientRemap({
|
|
131
|
+
payload: finalizeResult.finalizedPayload,
|
|
132
|
+
clientProtocol,
|
|
133
|
+
requestId: options.context.requestId,
|
|
134
|
+
adapterContext: options.context,
|
|
135
|
+
stageRecorder: options.stageRecorder
|
|
179
136
|
});
|
|
180
|
-
recordStage(options.stageRecorder, STAGES.processed, processedRequest);
|
|
181
|
-
let clientPayload;
|
|
182
|
-
if (clientProtocol === 'openai-chat') {
|
|
183
|
-
clientPayload = finalized;
|
|
184
|
-
}
|
|
185
|
-
else if (clientProtocol === 'anthropic-messages') {
|
|
186
|
-
clientPayload = buildAnthropicResponseFromChat(finalized);
|
|
187
|
-
}
|
|
188
|
-
else {
|
|
189
|
-
clientPayload = buildResponsesPayloadFromChat(finalized, {
|
|
190
|
-
requestId: options.context.requestId
|
|
191
|
-
});
|
|
192
|
-
}
|
|
193
137
|
applyModelOverride(clientPayload, displayModel);
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
138
|
+
const outbound = await runRespOutboundStage2SseStream({
|
|
139
|
+
clientPayload,
|
|
140
|
+
clientProtocol,
|
|
141
|
+
requestId: options.context.requestId,
|
|
142
|
+
wantsStream: options.wantsStream,
|
|
143
|
+
stageRecorder: options.stageRecorder
|
|
144
|
+
});
|
|
145
|
+
if (outbound.stream) {
|
|
146
|
+
return { __sse_responses: outbound.stream, format: clientProtocol };
|
|
197
147
|
}
|
|
198
|
-
|
|
199
|
-
const sseStream = await maybeCreateSseStream(clientProtocol, clientPayload, options.context.requestId);
|
|
200
|
-
recordStage(options.stageRecorder, STAGES.ssePost, {});
|
|
201
|
-
return { __sse_responses: sseStream, format: clientProtocol };
|
|
148
|
+
return { body: clientPayload, format: clientProtocol };
|
|
202
149
|
}
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
import type { FormatEnvelope } from '../types/format-envelope.js';
|
|
2
|
+
import type { AdapterContext } from '../types/chat-envelope.js';
|
|
3
|
+
import type { JsonObject } from '../types/json.js';
|
|
4
|
+
export type ChatCompletionLike = JsonObject;
|
|
5
|
+
export interface ResponseMapper {
|
|
6
|
+
toChatCompletion(format: FormatEnvelope, ctx: AdapterContext): Promise<ChatCompletionLike> | ChatCompletionLike;
|
|
7
|
+
}
|
|
8
|
+
export declare class OpenAIChatResponseMapper implements ResponseMapper {
|
|
9
|
+
toChatCompletion(format: FormatEnvelope, _ctx: AdapterContext): ChatCompletionLike;
|
|
10
|
+
}
|
|
11
|
+
export declare class ResponsesResponseMapper implements ResponseMapper {
|
|
12
|
+
toChatCompletion(format: FormatEnvelope, _ctx: AdapterContext): ChatCompletionLike;
|
|
13
|
+
}
|
|
14
|
+
export declare class AnthropicResponseMapper implements ResponseMapper {
|
|
15
|
+
toChatCompletion(format: FormatEnvelope, ctx: AdapterContext): ChatCompletionLike;
|
|
16
|
+
}
|
|
17
|
+
export declare class GeminiResponseMapper implements ResponseMapper {
|
|
18
|
+
toChatCompletion(format: FormatEnvelope, _ctx: AdapterContext): ChatCompletionLike;
|
|
19
|
+
}
|
|
@@ -12,8 +12,9 @@ export class ResponsesResponseMapper {
|
|
|
12
12
|
}
|
|
13
13
|
}
|
|
14
14
|
export class AnthropicResponseMapper {
|
|
15
|
-
toChatCompletion(format,
|
|
16
|
-
|
|
15
|
+
toChatCompletion(format, ctx) {
|
|
16
|
+
const aliasMap = extractAnthropicAliasMap(ctx);
|
|
17
|
+
return buildOpenAIChatFromAnthropicMessage(format.payload ?? {}, { aliasMap });
|
|
17
18
|
}
|
|
18
19
|
}
|
|
19
20
|
export class GeminiResponseMapper {
|
|
@@ -21,3 +22,22 @@ export class GeminiResponseMapper {
|
|
|
21
22
|
return buildOpenAIChatFromGeminiResponse(format.payload ?? {});
|
|
22
23
|
}
|
|
23
24
|
}
|
|
25
|
+
function extractAnthropicAliasMap(ctx) {
|
|
26
|
+
const candidate = ctx?.anthropicToolNameMap;
|
|
27
|
+
if (!candidate || typeof candidate !== 'object' || Array.isArray(candidate)) {
|
|
28
|
+
return undefined;
|
|
29
|
+
}
|
|
30
|
+
const normalized = {};
|
|
31
|
+
for (const [key, value] of Object.entries(candidate)) {
|
|
32
|
+
if (typeof key !== 'string' || typeof value !== 'string') {
|
|
33
|
+
continue;
|
|
34
|
+
}
|
|
35
|
+
const trimmedKey = key.trim();
|
|
36
|
+
const trimmedValue = value.trim();
|
|
37
|
+
if (!trimmedKey.length || !trimmedValue.length) {
|
|
38
|
+
continue;
|
|
39
|
+
}
|
|
40
|
+
normalized[trimmedKey] = trimmedValue;
|
|
41
|
+
}
|
|
42
|
+
return Object.keys(normalized).length ? normalized : undefined;
|
|
43
|
+
}
|