@jsonstudio/llms 0.4.4 → 0.4.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/conversion/codec-registry.js +11 -1
- package/dist/conversion/codecs/anthropic-openai-codec.d.ts +13 -0
- package/dist/conversion/codecs/anthropic-openai-codec.js +18 -473
- package/dist/conversion/codecs/gemini-openai-codec.js +91 -48
- package/dist/conversion/codecs/responses-openai-codec.js +9 -2
- package/dist/conversion/hub/format-adapters/anthropic-format-adapter.js +3 -0
- package/dist/conversion/hub/format-adapters/chat-format-adapter.js +3 -0
- package/dist/conversion/hub/format-adapters/gemini-format-adapter.js +3 -0
- package/dist/conversion/hub/format-adapters/responses-format-adapter.d.ts +19 -0
- package/dist/conversion/hub/format-adapters/responses-format-adapter.js +9 -0
- package/dist/conversion/hub/node-support.js +3 -1
- package/dist/conversion/hub/pipeline/hub-pipeline.js +37 -32
- package/dist/conversion/hub/response/provider-response.js +1 -1
- package/dist/conversion/hub/response/response-mappers.js +1 -1
- package/dist/conversion/hub/response/response-runtime.js +109 -10
- package/dist/conversion/hub/semantic-mappers/anthropic-mapper.js +70 -156
- package/dist/conversion/hub/semantic-mappers/chat-mapper.js +63 -52
- package/dist/conversion/hub/semantic-mappers/gemini-mapper.js +76 -143
- package/dist/conversion/hub/semantic-mappers/responses-mapper.js +40 -160
- package/dist/conversion/hub/standardized-bridge.js +3 -0
- package/dist/conversion/hub/tool-governance/rules.js +2 -2
- package/dist/conversion/index.d.ts +5 -0
- package/dist/conversion/index.js +5 -0
- package/dist/conversion/pipeline/codecs/v2/anthropic-openai-pipeline.d.ts +12 -0
- package/dist/conversion/pipeline/codecs/v2/anthropic-openai-pipeline.js +100 -0
- package/dist/conversion/pipeline/codecs/v2/openai-openai-pipeline.d.ts +15 -0
- package/dist/conversion/pipeline/codecs/v2/openai-openai-pipeline.js +174 -0
- package/dist/conversion/pipeline/codecs/v2/responses-openai-pipeline.d.ts +14 -0
- package/dist/conversion/pipeline/codecs/v2/responses-openai-pipeline.js +166 -0
- package/dist/conversion/pipeline/codecs/v2/shared/openai-chat-helpers.d.ts +13 -0
- package/dist/conversion/pipeline/codecs/v2/shared/openai-chat-helpers.js +66 -0
- package/dist/conversion/pipeline/hooks/adapter-context.d.ts +7 -0
- package/dist/conversion/pipeline/hooks/adapter-context.js +18 -0
- package/dist/conversion/pipeline/hooks/protocol-hooks.d.ts +67 -0
- package/dist/conversion/pipeline/hooks/protocol-hooks.js +1 -0
- package/dist/conversion/pipeline/index.d.ts +35 -0
- package/dist/conversion/pipeline/index.js +103 -0
- package/dist/conversion/pipeline/meta/meta-bag.d.ts +20 -0
- package/dist/conversion/pipeline/meta/meta-bag.js +81 -0
- package/dist/conversion/pipeline/schema/canonical-chat.d.ts +18 -0
- package/dist/conversion/pipeline/schema/canonical-chat.js +1 -0
- package/dist/conversion/pipeline/schema/index.d.ts +1 -0
- package/dist/conversion/pipeline/schema/index.js +1 -0
- package/dist/conversion/responses/responses-openai-bridge.d.ts +48 -0
- package/dist/conversion/responses/responses-openai-bridge.js +157 -1146
- package/dist/conversion/shared/anthropic-message-utils.d.ts +12 -0
- package/dist/conversion/shared/anthropic-message-utils.js +587 -0
- package/dist/conversion/shared/bridge-actions.d.ts +39 -0
- package/dist/conversion/shared/bridge-actions.js +709 -0
- package/dist/conversion/shared/bridge-conversation-store.d.ts +41 -0
- package/dist/conversion/shared/bridge-conversation-store.js +279 -0
- package/dist/conversion/shared/bridge-id-utils.d.ts +7 -0
- package/dist/conversion/shared/bridge-id-utils.js +42 -0
- package/dist/conversion/shared/bridge-instructions.d.ts +1 -0
- package/dist/conversion/shared/bridge-instructions.js +113 -0
- package/dist/conversion/shared/bridge-message-types.d.ts +39 -0
- package/dist/conversion/shared/bridge-message-types.js +1 -0
- package/dist/conversion/shared/bridge-message-utils.d.ts +22 -0
- package/dist/conversion/shared/bridge-message-utils.js +473 -0
- package/dist/conversion/shared/bridge-metadata.d.ts +1 -0
- package/dist/conversion/shared/bridge-metadata.js +1 -0
- package/dist/conversion/shared/bridge-policies.d.ts +18 -0
- package/dist/conversion/shared/bridge-policies.js +276 -0
- package/dist/conversion/shared/bridge-request-adapter.d.ts +28 -0
- package/dist/conversion/shared/bridge-request-adapter.js +430 -0
- package/dist/conversion/shared/chat-output-normalizer.d.ts +4 -0
- package/dist/conversion/shared/chat-output-normalizer.js +56 -0
- package/dist/conversion/shared/chat-request-filters.js +24 -1
- package/dist/conversion/shared/gemini-tool-utils.d.ts +5 -0
- package/dist/conversion/shared/gemini-tool-utils.js +130 -0
- package/dist/conversion/shared/metadata-passthrough.d.ts +11 -0
- package/dist/conversion/shared/metadata-passthrough.js +57 -0
- package/dist/conversion/shared/output-content-normalizer.d.ts +12 -0
- package/dist/conversion/shared/output-content-normalizer.js +119 -0
- package/dist/conversion/shared/reasoning-normalizer.d.ts +21 -0
- package/dist/conversion/shared/reasoning-normalizer.js +368 -0
- package/dist/conversion/shared/reasoning-tool-normalizer.d.ts +12 -0
- package/dist/conversion/shared/reasoning-tool-normalizer.js +132 -0
- package/dist/conversion/shared/reasoning-tool-parser.d.ts +10 -0
- package/dist/conversion/shared/reasoning-tool-parser.js +95 -0
- package/dist/conversion/shared/reasoning-utils.d.ts +2 -0
- package/dist/conversion/shared/reasoning-utils.js +42 -0
- package/dist/conversion/shared/responses-conversation-store.js +5 -11
- package/dist/conversion/shared/responses-message-utils.d.ts +15 -0
- package/dist/conversion/shared/responses-message-utils.js +206 -0
- package/dist/conversion/shared/responses-output-builder.d.ts +15 -0
- package/dist/conversion/shared/responses-output-builder.js +179 -0
- package/dist/conversion/shared/responses-output-utils.d.ts +7 -0
- package/dist/conversion/shared/responses-output-utils.js +108 -0
- package/dist/conversion/shared/responses-request-adapter.d.ts +28 -0
- package/dist/conversion/shared/responses-request-adapter.js +9 -40
- package/dist/conversion/shared/responses-response-utils.d.ts +3 -0
- package/dist/conversion/shared/responses-response-utils.js +209 -0
- package/dist/conversion/shared/responses-tool-utils.d.ts +12 -0
- package/dist/conversion/shared/responses-tool-utils.js +90 -0
- package/dist/conversion/shared/responses-types.d.ts +33 -0
- package/dist/conversion/shared/responses-types.js +1 -0
- package/dist/conversion/shared/tool-call-utils.d.ts +11 -0
- package/dist/conversion/shared/tool-call-utils.js +56 -0
- package/dist/conversion/shared/tool-mapping.d.ts +19 -0
- package/dist/conversion/shared/tool-mapping.js +124 -0
- package/dist/conversion/shared/tool-normalizers.d.ts +4 -0
- package/dist/conversion/shared/tool-normalizers.js +84 -0
- package/dist/router/virtual-router/bootstrap.js +18 -3
- package/dist/router/virtual-router/provider-registry.js +4 -2
- package/dist/router/virtual-router/types.d.ts +212 -0
- package/dist/sse/index.d.ts +38 -2
- package/dist/sse/index.js +27 -0
- package/dist/sse/json-to-sse/anthropic-json-to-sse-converter.d.ts +14 -0
- package/dist/sse/json-to-sse/anthropic-json-to-sse-converter.js +106 -73
- package/dist/sse/json-to-sse/chat-json-to-sse-converter.js +6 -2
- package/dist/sse/json-to-sse/gemini-json-to-sse-converter.d.ts +14 -0
- package/dist/sse/json-to-sse/gemini-json-to-sse-converter.js +99 -0
- package/dist/sse/json-to-sse/index.d.ts +7 -0
- package/dist/sse/json-to-sse/index.js +2 -0
- package/dist/sse/json-to-sse/sequencers/anthropic-sequencer.d.ts +13 -0
- package/dist/sse/json-to-sse/sequencers/anthropic-sequencer.js +150 -0
- package/dist/sse/json-to-sse/sequencers/chat-sequencer.d.ts +39 -0
- package/dist/sse/json-to-sse/sequencers/chat-sequencer.js +49 -3
- package/dist/sse/json-to-sse/sequencers/gemini-sequencer.d.ts +10 -0
- package/dist/sse/json-to-sse/sequencers/gemini-sequencer.js +95 -0
- package/dist/sse/json-to-sse/sequencers/responses-sequencer.js +31 -5
- package/dist/sse/registry/sse-codec-registry.d.ts +32 -0
- package/dist/sse/registry/sse-codec-registry.js +30 -1
- package/dist/sse/shared/reasoning-dispatcher.d.ts +10 -0
- package/dist/sse/shared/reasoning-dispatcher.js +25 -0
- package/dist/sse/shared/responses-output-normalizer.d.ts +12 -0
- package/dist/sse/shared/responses-output-normalizer.js +45 -0
- package/dist/sse/shared/serializers/anthropic-event-serializer.d.ts +2 -0
- package/dist/sse/shared/serializers/anthropic-event-serializer.js +9 -0
- package/dist/sse/shared/serializers/gemini-event-serializer.d.ts +2 -0
- package/dist/sse/shared/serializers/gemini-event-serializer.js +5 -0
- package/dist/sse/shared/serializers/index.d.ts +41 -0
- package/dist/sse/shared/serializers/index.js +2 -0
- package/dist/sse/shared/writer.d.ts +127 -0
- package/dist/sse/shared/writer.js +37 -1
- package/dist/sse/sse-to-json/anthropic-sse-to-json-converter.d.ts +11 -0
- package/dist/sse/sse-to-json/anthropic-sse-to-json-converter.js +92 -127
- package/dist/sse/sse-to-json/builders/anthropic-response-builder.d.ts +16 -0
- package/dist/sse/sse-to-json/builders/anthropic-response-builder.js +151 -0
- package/dist/sse/sse-to-json/builders/response-builder.d.ts +165 -0
- package/dist/sse/sse-to-json/builders/response-builder.js +27 -6
- package/dist/sse/sse-to-json/chat-sse-to-json-converter.d.ts +114 -0
- package/dist/sse/sse-to-json/chat-sse-to-json-converter.js +79 -3
- package/dist/sse/sse-to-json/gemini-sse-to-json-converter.d.ts +13 -0
- package/dist/sse/sse-to-json/gemini-sse-to-json-converter.js +160 -0
- package/dist/sse/sse-to-json/index.d.ts +7 -0
- package/dist/sse/sse-to-json/index.js +2 -0
- package/dist/sse/sse-to-json/parsers/sse-parser.js +53 -1
- package/dist/sse/types/anthropic-types.d.ts +170 -0
- package/dist/sse/types/anthropic-types.js +8 -5
- package/dist/sse/types/chat-types.d.ts +10 -0
- package/dist/sse/types/chat-types.js +2 -1
- package/dist/sse/types/core-interfaces.d.ts +1 -1
- package/dist/sse/types/gemini-types.d.ts +116 -0
- package/dist/sse/types/gemini-types.js +5 -0
- package/dist/sse/types/index.d.ts +5 -2
- package/dist/sse/types/index.js +2 -0
- package/package.json +1 -1
|
@@ -0,0 +1,100 @@
|
|
|
1
|
+
import { ProtocolConversionPipeline } from '../../index.js';
|
|
2
|
+
import { chatEnvelopeToStandardized } from '../../../hub/standardized-bridge.js';
|
|
3
|
+
import { AnthropicFormatAdapter } from '../../../hub/format-adapters/anthropic-format-adapter.js';
|
|
4
|
+
import { AnthropicSemanticMapper } from '../../../hub/semantic-mappers/anthropic-mapper.js';
|
|
5
|
+
import { buildAnthropicFromOpenAIChat } from '../../../codecs/anthropic-openai-codec.js';
|
|
6
|
+
import { buildAdapterContextFromPipeline } from '../../hooks/adapter-context.js';
|
|
7
|
+
import { runStandardChatRequestFilters } from '../../../index.js';
|
|
8
|
+
import { canonicalizeOpenAIChatResponse, convertStandardizedToOpenAIChat as convertCanonicalToOpenAIChat, OPENAI_PROTOCOL } from './shared/openai-chat-helpers.js';
|
|
9
|
+
const DEFAULT_ANTHROPIC_ENDPOINT = '/v1/messages';
|
|
10
|
+
const ANTHROPIC_PROTOCOL = 'anthropic-messages';
|
|
11
|
+
function assertJsonObject(value, stage) {
|
|
12
|
+
if (!value || typeof value !== 'object' || Array.isArray(value)) {
|
|
13
|
+
throw new Error(`Anthropic pipeline codec requires JSON object payload at ${stage}`);
|
|
14
|
+
}
|
|
15
|
+
return value;
|
|
16
|
+
}
|
|
17
|
+
function createAnthropicHooks() {
|
|
18
|
+
const formatAdapter = new AnthropicFormatAdapter();
|
|
19
|
+
const semanticMapper = new AnthropicSemanticMapper();
|
|
20
|
+
return {
|
|
21
|
+
id: 'anthropic-openai-v2',
|
|
22
|
+
protocol: ANTHROPIC_PROTOCOL,
|
|
23
|
+
inbound: {
|
|
24
|
+
parse: async ({ wire, context }) => {
|
|
25
|
+
const adapterContext = buildAdapterContextFromPipeline(context, {
|
|
26
|
+
defaultEntryEndpoint: DEFAULT_ANTHROPIC_ENDPOINT,
|
|
27
|
+
overrideProtocol: ANTHROPIC_PROTOCOL
|
|
28
|
+
});
|
|
29
|
+
const formatEnvelope = await formatAdapter.parseRequest(wire, adapterContext);
|
|
30
|
+
const chatEnvelope = await semanticMapper.toChat(formatEnvelope, adapterContext);
|
|
31
|
+
const canonical = chatEnvelopeToStandardized(chatEnvelope, {
|
|
32
|
+
adapterContext,
|
|
33
|
+
endpoint: context.entryEndpoint ?? DEFAULT_ANTHROPIC_ENDPOINT,
|
|
34
|
+
requestId: context.requestId
|
|
35
|
+
});
|
|
36
|
+
return { canonical };
|
|
37
|
+
}
|
|
38
|
+
},
|
|
39
|
+
outbound: {
|
|
40
|
+
serialize: async ({ canonical }) => {
|
|
41
|
+
const anthropicPayload = buildAnthropicFromOpenAIChat(canonical);
|
|
42
|
+
return { payload: assertJsonObject(anthropicPayload, 'anthropic_outbound_serialize') };
|
|
43
|
+
}
|
|
44
|
+
}
|
|
45
|
+
};
|
|
46
|
+
}
|
|
47
|
+
function buildPipelineContext(profile, context) {
|
|
48
|
+
return {
|
|
49
|
+
requestId: context.requestId,
|
|
50
|
+
entryEndpoint: context.entryEndpoint ?? context.endpoint ?? DEFAULT_ANTHROPIC_ENDPOINT,
|
|
51
|
+
providerProtocol: context.targetProtocol ?? profile.incomingProtocol ?? ANTHROPIC_PROTOCOL,
|
|
52
|
+
targetProtocol: profile.outgoingProtocol ?? context.targetProtocol ?? OPENAI_PROTOCOL,
|
|
53
|
+
profileId: profile.id,
|
|
54
|
+
stream: context.stream,
|
|
55
|
+
metadata: context.metadata
|
|
56
|
+
};
|
|
57
|
+
}
|
|
58
|
+
export class AnthropicOpenAIPipelineCodec {
|
|
59
|
+
id = 'anthropic-openai-v2';
|
|
60
|
+
pipeline;
|
|
61
|
+
initialized = false;
|
|
62
|
+
constructor() {
|
|
63
|
+
this.pipeline = new ProtocolConversionPipeline(createAnthropicHooks());
|
|
64
|
+
}
|
|
65
|
+
async initialize() {
|
|
66
|
+
this.initialized = true;
|
|
67
|
+
}
|
|
68
|
+
ensureInitialized() {
|
|
69
|
+
if (!this.initialized) {
|
|
70
|
+
throw new Error('AnthropicOpenAIPipelineCodec must be initialized before use');
|
|
71
|
+
}
|
|
72
|
+
}
|
|
73
|
+
async convertRequest(payload, profile, context) {
|
|
74
|
+
this.ensureInitialized();
|
|
75
|
+
const inboundContext = buildPipelineContext(profile, context);
|
|
76
|
+
const inboundOptions = {
|
|
77
|
+
payload: assertJsonObject(payload, 'anthropic_inbound_request'),
|
|
78
|
+
context: inboundContext
|
|
79
|
+
};
|
|
80
|
+
const inbound = await this.pipeline.convertInbound(inboundOptions);
|
|
81
|
+
const openaiPayload = await convertCanonicalToOpenAIChat(inbound.canonical, inbound.context);
|
|
82
|
+
const filterContext = {
|
|
83
|
+
...context,
|
|
84
|
+
requestId: context.requestId ?? inbound.context.requestId ?? `req_${Date.now()}`,
|
|
85
|
+
entryEndpoint: context.entryEndpoint ?? context.endpoint ?? DEFAULT_ANTHROPIC_ENDPOINT,
|
|
86
|
+
endpoint: context.endpoint ?? context.entryEndpoint ?? DEFAULT_ANTHROPIC_ENDPOINT
|
|
87
|
+
};
|
|
88
|
+
return runStandardChatRequestFilters(openaiPayload, profile, filterContext);
|
|
89
|
+
}
|
|
90
|
+
async convertResponse(payload, profile, context) {
|
|
91
|
+
this.ensureInitialized();
|
|
92
|
+
const sanitized = await canonicalizeOpenAIChatResponse(assertJsonObject(payload, 'openai_chat_response'), context);
|
|
93
|
+
const outboundOptions = {
|
|
94
|
+
canonical: sanitized,
|
|
95
|
+
context: buildPipelineContext(profile, context)
|
|
96
|
+
};
|
|
97
|
+
const outbound = await this.pipeline.convertOutbound(outboundOptions);
|
|
98
|
+
return outbound.payload;
|
|
99
|
+
}
|
|
100
|
+
}
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
import type { ConversionCodec, ConversionContext, ConversionProfile } from '../../../types.js';
|
|
2
|
+
import type { JsonObject } from '../../../hub/types/json.js';
|
|
3
|
+
export declare class OpenAIOpenAIPipelineCodec implements ConversionCodec {
|
|
4
|
+
readonly id = "openai-openai-v2";
|
|
5
|
+
private readonly pipeline;
|
|
6
|
+
private readonly requestMetaStore;
|
|
7
|
+
private initialized;
|
|
8
|
+
constructor();
|
|
9
|
+
initialize(): Promise<void>;
|
|
10
|
+
private ensureInitialized;
|
|
11
|
+
private stashMeta;
|
|
12
|
+
private consumeMeta;
|
|
13
|
+
convertRequest(payload: unknown, profile: ConversionProfile, context: ConversionContext): Promise<JsonObject>;
|
|
14
|
+
convertResponse(payload: unknown, profile: ConversionProfile, context: ConversionContext): Promise<JsonObject>;
|
|
15
|
+
}
|
|
@@ -0,0 +1,174 @@
|
|
|
1
|
+
import { ProtocolConversionPipeline } from '../../index.js';
|
|
2
|
+
import { buildAdapterContextFromPipeline } from '../../hooks/adapter-context.js';
|
|
3
|
+
import { chatEnvelopeToStandardized } from '../../../hub/standardized-bridge.js';
|
|
4
|
+
import { ChatFormatAdapter } from '../../../hub/format-adapters/chat-format-adapter.js';
|
|
5
|
+
import { ChatSemanticMapper } from '../../../hub/semantic-mappers/chat-mapper.js';
|
|
6
|
+
import { runStandardChatRequestFilters } from '../../../index.js';
|
|
7
|
+
import { canonicalizeOpenAIChatResponse, convertStandardizedToOpenAIChat as convertCanonicalToOpenAIChat, DEFAULT_OPENAI_ENDPOINT, OPENAI_PROTOCOL } from './shared/openai-chat-helpers.js';
|
|
8
|
+
function assertJsonObject(value, stage) {
|
|
9
|
+
if (!value || typeof value !== 'object' || Array.isArray(value)) {
|
|
10
|
+
throw new Error(`OpenAI pipeline codec requires JSON payload at ${stage}`);
|
|
11
|
+
}
|
|
12
|
+
return value;
|
|
13
|
+
}
|
|
14
|
+
function restoreToolCallIndexes(targetMessages, sourceMessages) {
|
|
15
|
+
if (!Array.isArray(targetMessages) || !Array.isArray(sourceMessages)) {
|
|
16
|
+
return;
|
|
17
|
+
}
|
|
18
|
+
const sourceIndexMap = new Map();
|
|
19
|
+
for (const message of sourceMessages) {
|
|
20
|
+
if (!message || typeof message !== 'object')
|
|
21
|
+
continue;
|
|
22
|
+
const toolCalls = message.tool_calls;
|
|
23
|
+
if (!Array.isArray(toolCalls))
|
|
24
|
+
continue;
|
|
25
|
+
toolCalls.forEach((toolCall) => {
|
|
26
|
+
if (!toolCall || typeof toolCall !== 'object')
|
|
27
|
+
return;
|
|
28
|
+
const id = typeof toolCall.id === 'string'
|
|
29
|
+
? String(toolCall.id)
|
|
30
|
+
: undefined;
|
|
31
|
+
if (!id || sourceIndexMap.has(id))
|
|
32
|
+
return;
|
|
33
|
+
const idxValue = toolCall.index;
|
|
34
|
+
if (typeof idxValue !== 'number')
|
|
35
|
+
return;
|
|
36
|
+
sourceIndexMap.set(id, idxValue);
|
|
37
|
+
});
|
|
38
|
+
}
|
|
39
|
+
for (const message of targetMessages) {
|
|
40
|
+
if (!message || typeof message !== 'object')
|
|
41
|
+
continue;
|
|
42
|
+
const toolCalls = message.tool_calls;
|
|
43
|
+
if (!Array.isArray(toolCalls))
|
|
44
|
+
continue;
|
|
45
|
+
toolCalls.forEach((toolCall) => {
|
|
46
|
+
if (!toolCall || typeof toolCall !== 'object')
|
|
47
|
+
return;
|
|
48
|
+
const id = typeof toolCall.id === 'string'
|
|
49
|
+
? String(toolCall.id)
|
|
50
|
+
: undefined;
|
|
51
|
+
if (!id)
|
|
52
|
+
return;
|
|
53
|
+
const sourceIndex = sourceIndexMap.get(id);
|
|
54
|
+
if (sourceIndex === undefined)
|
|
55
|
+
return;
|
|
56
|
+
toolCall.index = sourceIndex;
|
|
57
|
+
});
|
|
58
|
+
}
|
|
59
|
+
}
|
|
60
|
+
function createOpenAIHooks() {
|
|
61
|
+
const formatAdapter = new ChatFormatAdapter();
|
|
62
|
+
const semanticMapper = new ChatSemanticMapper();
|
|
63
|
+
return {
|
|
64
|
+
id: 'openai-openai-v2',
|
|
65
|
+
protocol: OPENAI_PROTOCOL,
|
|
66
|
+
inbound: {
|
|
67
|
+
parse: async ({ wire, context }) => {
|
|
68
|
+
const adapterContext = buildAdapterContextFromPipeline(context, {
|
|
69
|
+
defaultEntryEndpoint: DEFAULT_OPENAI_ENDPOINT,
|
|
70
|
+
overrideProtocol: OPENAI_PROTOCOL
|
|
71
|
+
});
|
|
72
|
+
const formatEnvelope = await formatAdapter.parseRequest(wire, adapterContext);
|
|
73
|
+
const chatEnvelope = await semanticMapper.toChat(formatEnvelope, adapterContext);
|
|
74
|
+
const canonical = chatEnvelopeToStandardized(chatEnvelope, {
|
|
75
|
+
adapterContext,
|
|
76
|
+
endpoint: adapterContext.entryEndpoint ?? DEFAULT_OPENAI_ENDPOINT,
|
|
77
|
+
requestId: adapterContext.requestId
|
|
78
|
+
});
|
|
79
|
+
return { canonical };
|
|
80
|
+
}
|
|
81
|
+
},
|
|
82
|
+
outbound: {
|
|
83
|
+
serialize: async ({ canonical }) => {
|
|
84
|
+
return { payload: assertJsonObject(canonical, 'openai_outbound_serialize') };
|
|
85
|
+
}
|
|
86
|
+
}
|
|
87
|
+
};
|
|
88
|
+
}
|
|
89
|
+
function buildPipelineContext(profile, context) {
|
|
90
|
+
return {
|
|
91
|
+
requestId: context.requestId,
|
|
92
|
+
entryEndpoint: context.entryEndpoint ?? context.endpoint ?? DEFAULT_OPENAI_ENDPOINT,
|
|
93
|
+
providerProtocol: profile.incomingProtocol ?? context.targetProtocol ?? OPENAI_PROTOCOL,
|
|
94
|
+
targetProtocol: profile.outgoingProtocol ?? context.targetProtocol ?? OPENAI_PROTOCOL,
|
|
95
|
+
profileId: profile.id,
|
|
96
|
+
stream: context.stream,
|
|
97
|
+
metadata: context.metadata
|
|
98
|
+
};
|
|
99
|
+
}
|
|
100
|
+
export class OpenAIOpenAIPipelineCodec {
|
|
101
|
+
id = 'openai-openai-v2';
|
|
102
|
+
pipeline;
|
|
103
|
+
requestMetaStore = new Map();
|
|
104
|
+
initialized = false;
|
|
105
|
+
constructor() {
|
|
106
|
+
this.pipeline = new ProtocolConversionPipeline(createOpenAIHooks());
|
|
107
|
+
}
|
|
108
|
+
async initialize() {
|
|
109
|
+
this.initialized = true;
|
|
110
|
+
}
|
|
111
|
+
ensureInitialized() {
|
|
112
|
+
if (!this.initialized) {
|
|
113
|
+
throw new Error('OpenAIOpenAIPipelineCodec must be initialized before use');
|
|
114
|
+
}
|
|
115
|
+
}
|
|
116
|
+
stashMeta(requestId, bag) {
|
|
117
|
+
this.requestMetaStore.set(requestId, bag.snapshot());
|
|
118
|
+
}
|
|
119
|
+
consumeMeta(requestId) {
|
|
120
|
+
const stored = this.requestMetaStore.get(requestId);
|
|
121
|
+
if (stored) {
|
|
122
|
+
this.requestMetaStore.delete(requestId);
|
|
123
|
+
return stored;
|
|
124
|
+
}
|
|
125
|
+
return undefined;
|
|
126
|
+
}
|
|
127
|
+
async convertRequest(payload, profile, context) {
|
|
128
|
+
this.ensureInitialized();
|
|
129
|
+
const inboundContext = buildPipelineContext(profile, context);
|
|
130
|
+
const requestId = context.requestId ?? inboundContext.requestId ?? `req_${Date.now()}`;
|
|
131
|
+
inboundContext.requestId = requestId;
|
|
132
|
+
const inboundPayload = assertJsonObject(payload, 'openai_inbound_request');
|
|
133
|
+
const inboundOptions = {
|
|
134
|
+
payload: inboundPayload,
|
|
135
|
+
context: inboundContext
|
|
136
|
+
};
|
|
137
|
+
const inbound = await this.pipeline.convertInbound(inboundOptions);
|
|
138
|
+
this.stashMeta(requestId, inbound.meta);
|
|
139
|
+
const openaiPayload = await convertCanonicalToOpenAIChat(inbound.canonical, inbound.context);
|
|
140
|
+
if (!Array.isArray(openaiPayload.tools) && Array.isArray(inboundPayload.tools)) {
|
|
141
|
+
openaiPayload.tools = inboundPayload.tools;
|
|
142
|
+
}
|
|
143
|
+
if (openaiPayload.tool_choice === undefined &&
|
|
144
|
+
inboundPayload.tool_choice !== undefined) {
|
|
145
|
+
openaiPayload.tool_choice = inboundPayload.tool_choice;
|
|
146
|
+
}
|
|
147
|
+
const filterContext = {
|
|
148
|
+
...context,
|
|
149
|
+
requestId,
|
|
150
|
+
entryEndpoint: context.entryEndpoint ?? DEFAULT_OPENAI_ENDPOINT,
|
|
151
|
+
endpoint: context.endpoint ?? DEFAULT_OPENAI_ENDPOINT
|
|
152
|
+
};
|
|
153
|
+
const filtered = await runStandardChatRequestFilters(openaiPayload, profile, filterContext);
|
|
154
|
+
if (filtered && typeof filtered === 'object') {
|
|
155
|
+
restoreToolCallIndexes(filtered.messages, inboundPayload.messages);
|
|
156
|
+
}
|
|
157
|
+
return filtered;
|
|
158
|
+
}
|
|
159
|
+
async convertResponse(payload, profile, context) {
|
|
160
|
+
this.ensureInitialized();
|
|
161
|
+
const pipelineContext = buildPipelineContext(profile, context);
|
|
162
|
+
const requestId = context.requestId ?? pipelineContext.requestId ?? `req_${Date.now()}`;
|
|
163
|
+
pipelineContext.requestId = requestId;
|
|
164
|
+
const storedMeta = this.consumeMeta(requestId);
|
|
165
|
+
const sanitized = await canonicalizeOpenAIChatResponse(assertJsonObject(payload, 'openai_chat_response'), context);
|
|
166
|
+
const outboundOptions = {
|
|
167
|
+
canonical: sanitized,
|
|
168
|
+
context: pipelineContext,
|
|
169
|
+
meta: storedMeta
|
|
170
|
+
};
|
|
171
|
+
const outbound = await this.pipeline.convertOutbound(outboundOptions);
|
|
172
|
+
return outbound.payload;
|
|
173
|
+
}
|
|
174
|
+
}
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
import type { ConversionCodec, ConversionContext, ConversionProfile } from '../../../types.js';
|
|
2
|
+
import type { JsonObject } from '../../../hub/types/json.js';
|
|
3
|
+
export declare class ResponsesOpenAIPipelineCodec implements ConversionCodec {
|
|
4
|
+
readonly id = "responses-openai-v2";
|
|
5
|
+
private readonly pipeline;
|
|
6
|
+
private readonly requestMetaStore;
|
|
7
|
+
private initialized;
|
|
8
|
+
constructor();
|
|
9
|
+
initialize(): Promise<void>;
|
|
10
|
+
private ensureInitialized;
|
|
11
|
+
private stashMeta;
|
|
12
|
+
convertRequest(payload: unknown, profile: ConversionProfile, context: ConversionContext): Promise<JsonObject>;
|
|
13
|
+
convertResponse(payload: unknown, profile: ConversionProfile, context: ConversionContext): Promise<JsonObject>;
|
|
14
|
+
}
|
|
@@ -0,0 +1,166 @@
|
|
|
1
|
+
import { ProtocolConversionPipeline } from '../../index.js';
|
|
2
|
+
import { chatEnvelopeToStandardized } from '../../../hub/standardized-bridge.js';
|
|
3
|
+
import { ResponsesFormatAdapter } from '../../../hub/format-adapters/responses-format-adapter.js';
|
|
4
|
+
import { ResponsesSemanticMapper } from '../../../hub/semantic-mappers/responses-mapper.js';
|
|
5
|
+
import { captureResponsesContext, buildChatRequestFromResponses, buildResponsesPayloadFromChat } from '../../../responses/responses-openai-bridge.js';
|
|
6
|
+
import { buildAdapterContextFromPipeline } from '../../hooks/adapter-context.js';
|
|
7
|
+
import { runStandardChatRequestFilters } from '../../../index.js';
|
|
8
|
+
import { canonicalizeOpenAIChatResponse, convertStandardizedToOpenAIChat as convertCanonicalToOpenAIChat, OPENAI_PROTOCOL } from './shared/openai-chat-helpers.js';
|
|
9
|
+
const DEFAULT_RESPONSES_ENDPOINT = '/v1/responses';
|
|
10
|
+
const RESPONSES_PROTOCOL = 'openai-responses';
|
|
11
|
+
function assertJsonObject(value, stage) {
|
|
12
|
+
if (!value || typeof value !== 'object' || Array.isArray(value)) {
|
|
13
|
+
throw new Error(`Responses pipeline codec requires JSON payload at ${stage}`);
|
|
14
|
+
}
|
|
15
|
+
return value;
|
|
16
|
+
}
|
|
17
|
+
function cloneResponsesContext(context) {
|
|
18
|
+
try {
|
|
19
|
+
return JSON.parse(JSON.stringify(context ?? {}));
|
|
20
|
+
}
|
|
21
|
+
catch {
|
|
22
|
+
return {};
|
|
23
|
+
}
|
|
24
|
+
}
|
|
25
|
+
function restoreResponsesContext(value) {
|
|
26
|
+
if (!value || typeof value !== 'object') {
|
|
27
|
+
return undefined;
|
|
28
|
+
}
|
|
29
|
+
return value;
|
|
30
|
+
}
|
|
31
|
+
function captureToolResults(payload) {
|
|
32
|
+
const results = [];
|
|
33
|
+
const inputArr = Array.isArray(payload?.input) ? payload.input : [];
|
|
34
|
+
for (const it of inputArr) {
|
|
35
|
+
if (!it || typeof it !== 'object')
|
|
36
|
+
continue;
|
|
37
|
+
const t = String(it.type || '').toLowerCase();
|
|
38
|
+
if (t === 'tool_result' || t === 'tool_message' || t === 'function_call_output') {
|
|
39
|
+
const tool_call_id = it.tool_call_id || it.call_id || it.tool_use_id;
|
|
40
|
+
let output = undefined;
|
|
41
|
+
const rawOut = it.output;
|
|
42
|
+
if (typeof rawOut === 'string')
|
|
43
|
+
output = rawOut;
|
|
44
|
+
else if (rawOut && typeof rawOut === 'object') {
|
|
45
|
+
try {
|
|
46
|
+
output = JSON.stringify(rawOut);
|
|
47
|
+
}
|
|
48
|
+
catch { /* ignore */ }
|
|
49
|
+
}
|
|
50
|
+
results.push({ tool_call_id, output });
|
|
51
|
+
}
|
|
52
|
+
}
|
|
53
|
+
return results;
|
|
54
|
+
}
|
|
55
|
+
function createResponsesHooks() {
|
|
56
|
+
const formatAdapter = new ResponsesFormatAdapter();
|
|
57
|
+
const semanticMapper = new ResponsesSemanticMapper();
|
|
58
|
+
return {
|
|
59
|
+
id: 'responses-openai-v2',
|
|
60
|
+
protocol: RESPONSES_PROTOCOL,
|
|
61
|
+
inbound: {
|
|
62
|
+
parse: async ({ wire, context, meta }) => {
|
|
63
|
+
const adapterContext = buildAdapterContextFromPipeline(context, {
|
|
64
|
+
defaultEntryEndpoint: DEFAULT_RESPONSES_ENDPOINT,
|
|
65
|
+
overrideProtocol: RESPONSES_PROTOCOL
|
|
66
|
+
});
|
|
67
|
+
const formatEnvelope = await formatAdapter.parseRequest(wire, adapterContext);
|
|
68
|
+
const chatEnvelope = await semanticMapper.toChat(formatEnvelope, adapterContext);
|
|
69
|
+
const canonical = chatEnvelopeToStandardized(chatEnvelope, {
|
|
70
|
+
adapterContext,
|
|
71
|
+
endpoint: adapterContext.entryEndpoint,
|
|
72
|
+
requestId: adapterContext.requestId
|
|
73
|
+
});
|
|
74
|
+
// Capture Responses-specific context for outbound mapping parity
|
|
75
|
+
const responsesContext = captureResponsesContext(wire, { route: { requestId: adapterContext.requestId } });
|
|
76
|
+
const built = buildChatRequestFromResponses(wire, responsesContext);
|
|
77
|
+
if (built.toolsNormalized) {
|
|
78
|
+
responsesContext.toolsNormalized = built.toolsNormalized;
|
|
79
|
+
}
|
|
80
|
+
const captured = captureToolResults(wire);
|
|
81
|
+
if (captured.length) {
|
|
82
|
+
responsesContext.__captured_tool_results = captured;
|
|
83
|
+
}
|
|
84
|
+
meta.set('responsesContext', cloneResponsesContext(responsesContext));
|
|
85
|
+
return { canonical };
|
|
86
|
+
}
|
|
87
|
+
},
|
|
88
|
+
outbound: {
|
|
89
|
+
serialize: async ({ canonical, meta }) => {
|
|
90
|
+
const stored = meta.consume('responsesContext');
|
|
91
|
+
const responsesContext = restoreResponsesContext(stored);
|
|
92
|
+
const payload = buildResponsesPayloadFromChat(canonical, responsesContext);
|
|
93
|
+
return { payload: assertJsonObject(payload, 'responses_outbound_serialize') };
|
|
94
|
+
}
|
|
95
|
+
}
|
|
96
|
+
};
|
|
97
|
+
}
|
|
98
|
+
function buildPipelineContext(profile, context) {
|
|
99
|
+
return {
|
|
100
|
+
requestId: context.requestId,
|
|
101
|
+
entryEndpoint: context.entryEndpoint ?? context.endpoint ?? DEFAULT_RESPONSES_ENDPOINT,
|
|
102
|
+
providerProtocol: profile.incomingProtocol ?? RESPONSES_PROTOCOL,
|
|
103
|
+
targetProtocol: profile.outgoingProtocol ?? context.targetProtocol ?? OPENAI_PROTOCOL,
|
|
104
|
+
profileId: profile.id,
|
|
105
|
+
stream: context.stream,
|
|
106
|
+
metadata: context.metadata
|
|
107
|
+
};
|
|
108
|
+
}
|
|
109
|
+
export class ResponsesOpenAIPipelineCodec {
|
|
110
|
+
id = 'responses-openai-v2';
|
|
111
|
+
pipeline;
|
|
112
|
+
requestMetaStore = new Map();
|
|
113
|
+
initialized = false;
|
|
114
|
+
constructor() {
|
|
115
|
+
this.pipeline = new ProtocolConversionPipeline(createResponsesHooks());
|
|
116
|
+
}
|
|
117
|
+
async initialize() {
|
|
118
|
+
this.initialized = true;
|
|
119
|
+
}
|
|
120
|
+
ensureInitialized() {
|
|
121
|
+
if (!this.initialized) {
|
|
122
|
+
throw new Error('ResponsesOpenAIPipelineCodec must be initialized before use');
|
|
123
|
+
}
|
|
124
|
+
}
|
|
125
|
+
stashMeta(requestId, bag) {
|
|
126
|
+
this.requestMetaStore.set(requestId, bag.snapshot());
|
|
127
|
+
}
|
|
128
|
+
async convertRequest(payload, profile, context) {
|
|
129
|
+
this.ensureInitialized();
|
|
130
|
+
const inboundContext = buildPipelineContext(profile, context);
|
|
131
|
+
const requestId = context.requestId ?? inboundContext.requestId ?? `req_${Date.now()}`;
|
|
132
|
+
inboundContext.requestId = requestId;
|
|
133
|
+
const inboundOptions = {
|
|
134
|
+
payload: assertJsonObject(payload, 'responses_inbound_request'),
|
|
135
|
+
context: inboundContext
|
|
136
|
+
};
|
|
137
|
+
const inbound = await this.pipeline.convertInbound(inboundOptions);
|
|
138
|
+
this.stashMeta(requestId, inbound.meta);
|
|
139
|
+
const openaiPayload = await convertCanonicalToOpenAIChat(inbound.canonical, inbound.context);
|
|
140
|
+
const filterContext = {
|
|
141
|
+
...context,
|
|
142
|
+
requestId,
|
|
143
|
+
entryEndpoint: context.entryEndpoint ?? DEFAULT_RESPONSES_ENDPOINT,
|
|
144
|
+
endpoint: context.endpoint ?? DEFAULT_RESPONSES_ENDPOINT
|
|
145
|
+
};
|
|
146
|
+
return runStandardChatRequestFilters(openaiPayload, profile, filterContext);
|
|
147
|
+
}
|
|
148
|
+
async convertResponse(payload, profile, context) {
|
|
149
|
+
this.ensureInitialized();
|
|
150
|
+
const pipelineContext = buildPipelineContext(profile, context);
|
|
151
|
+
const requestId = context.requestId ?? pipelineContext.requestId ?? `req_${Date.now()}`;
|
|
152
|
+
pipelineContext.requestId = requestId;
|
|
153
|
+
const storedMeta = this.requestMetaStore.get(requestId);
|
|
154
|
+
if (storedMeta) {
|
|
155
|
+
this.requestMetaStore.delete(requestId);
|
|
156
|
+
}
|
|
157
|
+
const sanitized = await canonicalizeOpenAIChatResponse(assertJsonObject(payload, 'responses_openai_response'), context);
|
|
158
|
+
const outboundOptions = {
|
|
159
|
+
canonical: sanitized,
|
|
160
|
+
context: pipelineContext,
|
|
161
|
+
meta: storedMeta
|
|
162
|
+
};
|
|
163
|
+
const outbound = await this.pipeline.convertOutbound(outboundOptions);
|
|
164
|
+
return outbound.payload;
|
|
165
|
+
}
|
|
166
|
+
}
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
import type { JsonObject } from '../../../../hub/types/json.js';
|
|
2
|
+
import type { ConversionContext } from '../../../../types.js';
|
|
3
|
+
import type { CanonicalChatRequest } from '../../../schema/index.js';
|
|
4
|
+
import type { ProtocolPipelineContext } from '../../../hooks/protocol-hooks.js';
|
|
5
|
+
export declare const DEFAULT_OPENAI_ENDPOINT = "/v1/chat/completions";
|
|
6
|
+
export declare const OPENAI_PROTOCOL = "openai-chat";
|
|
7
|
+
export declare function convertStandardizedToOpenAIChat(standardized: CanonicalChatRequest, context: ProtocolPipelineContext, options?: {
|
|
8
|
+
defaultEndpoint?: string;
|
|
9
|
+
}): Promise<JsonObject>;
|
|
10
|
+
export declare function canonicalizeOpenAIChatResponse(payload: JsonObject, context: ConversionContext, options?: {
|
|
11
|
+
defaultEndpoint?: string;
|
|
12
|
+
profile?: string;
|
|
13
|
+
}): Promise<JsonObject>;
|
|
@@ -0,0 +1,66 @@
|
|
|
1
|
+
import { standardizedToChatEnvelope } from '../../../../hub/standardized-bridge.js';
|
|
2
|
+
import { buildAdapterContextFromPipeline } from '../../../hooks/adapter-context.js';
|
|
3
|
+
import { ChatFormatAdapter } from '../../../../hub/format-adapters/chat-format-adapter.js';
|
|
4
|
+
import { ChatSemanticMapper } from '../../../../hub/semantic-mappers/chat-mapper.js';
|
|
5
|
+
import { FilterEngine, ResponseToolTextCanonicalizeFilter, ResponseToolArgumentsStringifyFilter, ResponseFinishInvariantsFilter } from '../../../../../filters/index.js';
|
|
6
|
+
import { normalizeChatResponseReasoningTools } from '../../../../shared/reasoning-tool-normalizer.js';
|
|
7
|
+
export const DEFAULT_OPENAI_ENDPOINT = '/v1/chat/completions';
|
|
8
|
+
export const OPENAI_PROTOCOL = 'openai-chat';
|
|
9
|
+
const sharedChatFormatAdapter = new ChatFormatAdapter();
|
|
10
|
+
const sharedChatSemanticMapper = new ChatSemanticMapper();
|
|
11
|
+
function ensureJsonObject(value, stage) {
|
|
12
|
+
if (!value || typeof value !== 'object' || Array.isArray(value)) {
|
|
13
|
+
throw new Error(`OpenAI chat helper requires JSON object payload at ${stage}`);
|
|
14
|
+
}
|
|
15
|
+
return value;
|
|
16
|
+
}
|
|
17
|
+
export async function convertStandardizedToOpenAIChat(standardized, context, options) {
|
|
18
|
+
const adapterContext = buildAdapterContextFromPipeline(context, {
|
|
19
|
+
defaultEntryEndpoint: options?.defaultEndpoint ?? DEFAULT_OPENAI_ENDPOINT,
|
|
20
|
+
overrideProtocol: OPENAI_PROTOCOL
|
|
21
|
+
});
|
|
22
|
+
const chatEnvelope = standardizedToChatEnvelope(standardized, { adapterContext });
|
|
23
|
+
const formatEnvelope = await sharedChatSemanticMapper.fromChat(chatEnvelope, adapterContext);
|
|
24
|
+
const payload = await sharedChatFormatAdapter.buildRequest(formatEnvelope, adapterContext);
|
|
25
|
+
return ensureJsonObject(payload, 'openai_request_build');
|
|
26
|
+
}
|
|
27
|
+
export async function canonicalizeOpenAIChatResponse(payload, context, options) {
|
|
28
|
+
const dto = {
|
|
29
|
+
data: payload,
|
|
30
|
+
metadata: {
|
|
31
|
+
requestId: context.requestId ?? `req_${Date.now()}`,
|
|
32
|
+
pipelineId: context.metadata?.pipelineId ?? 'conversion-router',
|
|
33
|
+
processingTime: 0,
|
|
34
|
+
stages: []
|
|
35
|
+
}
|
|
36
|
+
};
|
|
37
|
+
const filterContext = {
|
|
38
|
+
requestId: dto.metadata.requestId,
|
|
39
|
+
model: typeof payload.model === 'string' ? payload.model : undefined,
|
|
40
|
+
endpoint: context.entryEndpoint ?? context.endpoint ?? options?.defaultEndpoint ?? DEFAULT_OPENAI_ENDPOINT,
|
|
41
|
+
profile: options?.profile ?? OPENAI_PROTOCOL,
|
|
42
|
+
debug: { emit: () => { } }
|
|
43
|
+
};
|
|
44
|
+
const engine = new FilterEngine();
|
|
45
|
+
engine.registerFilter(new ResponseToolTextCanonicalizeFilter());
|
|
46
|
+
try {
|
|
47
|
+
const { ResponseToolArgumentsToonDecodeFilter } = await import('../../../../../filters/index.js');
|
|
48
|
+
engine.registerFilter(new ResponseToolArgumentsToonDecodeFilter());
|
|
49
|
+
}
|
|
50
|
+
catch {
|
|
51
|
+
// optional decode filter
|
|
52
|
+
}
|
|
53
|
+
engine.registerFilter(new ResponseToolArgumentsStringifyFilter());
|
|
54
|
+
engine.registerFilter(new ResponseFinishInvariantsFilter());
|
|
55
|
+
const stage1 = await engine.run('response_pre', dto.data, filterContext);
|
|
56
|
+
const stage2 = await engine.run('response_map', stage1, filterContext);
|
|
57
|
+
const stage3 = await engine.run('response_post', stage2, filterContext);
|
|
58
|
+
const normalized = ensureJsonObject(stage3, 'openai_response_filters');
|
|
59
|
+
try {
|
|
60
|
+
normalizeChatResponseReasoningTools(normalized, { idPrefixBase: 'reasoning_choice' });
|
|
61
|
+
}
|
|
62
|
+
catch {
|
|
63
|
+
// reasoning normalization best-effort
|
|
64
|
+
}
|
|
65
|
+
return normalized;
|
|
66
|
+
}
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
import type { AdapterContext } from '../../hub/types/chat-envelope.js';
|
|
2
|
+
import type { ProtocolPipelineContext } from './protocol-hooks.js';
|
|
3
|
+
export interface AdapterContextOptions {
|
|
4
|
+
defaultEntryEndpoint?: string;
|
|
5
|
+
overrideProtocol?: string;
|
|
6
|
+
}
|
|
7
|
+
export declare function buildAdapterContextFromPipeline(context: ProtocolPipelineContext, options?: AdapterContextOptions): AdapterContext;
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
export function buildAdapterContextFromPipeline(context, options) {
|
|
2
|
+
const requestId = context.requestId ?? `req_${Date.now()}`;
|
|
3
|
+
const entryEndpoint = options?.defaultEntryEndpoint ??
|
|
4
|
+
context.entryEndpoint ??
|
|
5
|
+
'/v1/chat/completions';
|
|
6
|
+
const providerProtocol = options?.overrideProtocol ??
|
|
7
|
+
context.providerProtocol ??
|
|
8
|
+
context.targetProtocol ??
|
|
9
|
+
'openai-chat';
|
|
10
|
+
const streamingHint = context.stream === true ? 'force' : context.stream === false ? 'disable' : 'auto';
|
|
11
|
+
return {
|
|
12
|
+
requestId,
|
|
13
|
+
entryEndpoint,
|
|
14
|
+
providerProtocol,
|
|
15
|
+
profileId: context.profileId,
|
|
16
|
+
streamingHint
|
|
17
|
+
};
|
|
18
|
+
}
|
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
import type { JsonObject } from '../../hub/types/json.js';
|
|
2
|
+
import type { CanonicalChatRequest, CanonicalChatResponse } from '../schema/index.js';
|
|
3
|
+
import type { ConversionMetaBag, ConversionMetaRecord } from '../meta/meta-bag.js';
|
|
4
|
+
export interface ProtocolPipelineContext {
|
|
5
|
+
requestId?: string;
|
|
6
|
+
entryEndpoint?: string;
|
|
7
|
+
profileId?: string;
|
|
8
|
+
providerProtocol?: string;
|
|
9
|
+
targetProtocol?: string;
|
|
10
|
+
stream?: boolean;
|
|
11
|
+
metadata?: Record<string, unknown>;
|
|
12
|
+
}
|
|
13
|
+
export interface InboundHookArgs<TWire = JsonObject> {
|
|
14
|
+
wire: TWire;
|
|
15
|
+
meta: ConversionMetaBag;
|
|
16
|
+
context: ProtocolPipelineContext;
|
|
17
|
+
}
|
|
18
|
+
export interface CanonicalHookArgs<TCanonical> {
|
|
19
|
+
canonical: TCanonical;
|
|
20
|
+
meta: ConversionMetaBag;
|
|
21
|
+
context: ProtocolPipelineContext;
|
|
22
|
+
}
|
|
23
|
+
export interface OutboundWireHookArgs<TWire = JsonObject> {
|
|
24
|
+
wire: TWire;
|
|
25
|
+
meta: ConversionMetaBag;
|
|
26
|
+
context: ProtocolPipelineContext;
|
|
27
|
+
}
|
|
28
|
+
export interface CanonicalParseResult<TCanonical> {
|
|
29
|
+
canonical: TCanonical;
|
|
30
|
+
meta?: ConversionMetaRecord;
|
|
31
|
+
}
|
|
32
|
+
export type ProtocolParserHook<TWire, TCanonical> = (args: InboundHookArgs<TWire>) => Promise<CanonicalParseResult<TCanonical>> | CanonicalParseResult<TCanonical>;
|
|
33
|
+
export type ProtocolCleanupHook<TCanonical> = (args: CanonicalHookArgs<TCanonical>) => Promise<void> | void;
|
|
34
|
+
export interface ProtocolSerializationResult<TWire> {
|
|
35
|
+
payload: TWire;
|
|
36
|
+
meta?: ConversionMetaRecord;
|
|
37
|
+
}
|
|
38
|
+
export type ProtocolSerializerHook<TCanonical, TWire> = (args: CanonicalHookArgs<TCanonical>) => Promise<ProtocolSerializationResult<TWire>> | ProtocolSerializationResult<TWire>;
|
|
39
|
+
export type ProtocolAugmentationHook<TCanonical> = (args: CanonicalHookArgs<TCanonical>) => Promise<void> | void;
|
|
40
|
+
export interface ProtocolValidationErrorDetail {
|
|
41
|
+
path?: string;
|
|
42
|
+
message: string;
|
|
43
|
+
code?: string;
|
|
44
|
+
}
|
|
45
|
+
export interface ProtocolValidationResult {
|
|
46
|
+
ok: boolean;
|
|
47
|
+
errors?: ProtocolValidationErrorDetail[];
|
|
48
|
+
}
|
|
49
|
+
export type ProtocolValidationHook<TArgs> = (args: TArgs) => Promise<void | ProtocolValidationResult> | void | ProtocolValidationResult;
|
|
50
|
+
export type InboundValidationHook<TWire> = ProtocolValidationHook<InboundHookArgs<TWire>>;
|
|
51
|
+
export type OutboundValidationHook<TWire> = ProtocolValidationHook<OutboundWireHookArgs<TWire>>;
|
|
52
|
+
export interface ProtocolInboundHooks<TInboundWire = JsonObject, TCanonical = CanonicalChatRequest> {
|
|
53
|
+
preValidate?: InboundValidationHook<TInboundWire>;
|
|
54
|
+
parse: ProtocolParserHook<TInboundWire, TCanonical>;
|
|
55
|
+
cleanup?: ProtocolCleanupHook<TCanonical>;
|
|
56
|
+
}
|
|
57
|
+
export interface ProtocolOutboundHooks<TCanonical = CanonicalChatResponse, TOutboundWire = JsonObject> {
|
|
58
|
+
augment?: ProtocolAugmentationHook<TCanonical>;
|
|
59
|
+
serialize: ProtocolSerializerHook<TCanonical, TOutboundWire>;
|
|
60
|
+
postValidate?: OutboundValidationHook<TOutboundWire>;
|
|
61
|
+
}
|
|
62
|
+
export interface ProtocolPipelineHooks<TInboundWire = JsonObject, TOutboundWire = JsonObject, TCanonicalInbound = CanonicalChatRequest, TCanonicalOutbound = CanonicalChatResponse> {
|
|
63
|
+
id: string;
|
|
64
|
+
protocol: string;
|
|
65
|
+
inbound: ProtocolInboundHooks<TInboundWire, TCanonicalInbound>;
|
|
66
|
+
outbound: ProtocolOutboundHooks<TCanonicalOutbound, TOutboundWire>;
|
|
67
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|