@animalabs/membrane 0.1.20 → 0.2.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -5,4 +5,5 @@ export { AnthropicAdapter, toAnthropicContent, fromAnthropicContent, type Anthro
5
5
  export { OpenRouterAdapter, toOpenRouterMessages, fromOpenRouterMessage, type OpenRouterAdapterConfig, } from './openrouter.js';
6
6
  export { OpenAIAdapter, toOpenAIContent, fromOpenAIContent, type OpenAIAdapterConfig, } from './openai.js';
7
7
  export { OpenAICompatibleAdapter, toOpenAIMessages, fromOpenAIMessage, type OpenAICompatibleAdapterConfig, } from './openai-compatible.js';
8
+ export { OpenAICompletionsAdapter, type OpenAICompletionsAdapterConfig, } from './openai-completions.js';
8
9
  //# sourceMappingURL=index.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/providers/index.ts"],"names":[],"mappings":"AAAA;;GAEG;AAEH,OAAO,EACL,gBAAgB,EAChB,kBAAkB,EAClB,oBAAoB,EACpB,KAAK,sBAAsB,GAC5B,MAAM,gBAAgB,CAAC;AAExB,OAAO,EACL,iBAAiB,EACjB,oBAAoB,EACpB,qBAAqB,EACrB,KAAK,uBAAuB,GAC7B,MAAM,iBAAiB,CAAC;AAEzB,OAAO,EACL,aAAa,EACb,eAAe,EACf,iBAAiB,EACjB,KAAK,mBAAmB,GACzB,MAAM,aAAa,CAAC;AAErB,OAAO,EACL,uBAAuB,EACvB,gBAAgB,EAChB,iBAAiB,EACjB,KAAK,6BAA6B,GACnC,MAAM,wBAAwB,CAAC"}
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/providers/index.ts"],"names":[],"mappings":"AAAA;;GAEG;AAEH,OAAO,EACL,gBAAgB,EAChB,kBAAkB,EAClB,oBAAoB,EACpB,KAAK,sBAAsB,GAC5B,MAAM,gBAAgB,CAAC;AAExB,OAAO,EACL,iBAAiB,EACjB,oBAAoB,EACpB,qBAAqB,EACrB,KAAK,uBAAuB,GAC7B,MAAM,iBAAiB,CAAC;AAEzB,OAAO,EACL,aAAa,EACb,eAAe,EACf,iBAAiB,EACjB,KAAK,mBAAmB,GACzB,MAAM,aAAa,CAAC;AAErB,OAAO,EACL,uBAAuB,EACvB,gBAAgB,EAChB,iBAAiB,EACjB,KAAK,6BAA6B,GACnC,MAAM,wBAAwB,CAAC;AAEhC,OAAO,EACL,wBAAwB,EACxB,KAAK,8BAA8B,GACpC,MAAM,yBAAyB,CAAC"}
@@ -5,4 +5,5 @@ export { AnthropicAdapter, toAnthropicContent, fromAnthropicContent, } from './a
5
5
  export { OpenRouterAdapter, toOpenRouterMessages, fromOpenRouterMessage, } from './openrouter.js';
6
6
  export { OpenAIAdapter, toOpenAIContent, fromOpenAIContent, } from './openai.js';
7
7
  export { OpenAICompatibleAdapter, toOpenAIMessages, fromOpenAIMessage, } from './openai-compatible.js';
8
+ export { OpenAICompletionsAdapter, } from './openai-completions.js';
8
9
  //# sourceMappingURL=index.js.map
@@ -1 +1 @@
1
- {"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/providers/index.ts"],"names":[],"mappings":"AAAA;;GAEG;AAEH,OAAO,EACL,gBAAgB,EAChB,kBAAkB,EAClB,oBAAoB,GAErB,MAAM,gBAAgB,CAAC;AAExB,OAAO,EACL,iBAAiB,EACjB,oBAAoB,EACpB,qBAAqB,GAEtB,MAAM,iBAAiB,CAAC;AAEzB,OAAO,EACL,aAAa,EACb,eAAe,EACf,iBAAiB,GAElB,MAAM,aAAa,CAAC;AAErB,OAAO,EACL,uBAAuB,EACvB,gBAAgB,EAChB,iBAAiB,GAElB,MAAM,wBAAwB,CAAC"}
1
+ {"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/providers/index.ts"],"names":[],"mappings":"AAAA;;GAEG;AAEH,OAAO,EACL,gBAAgB,EAChB,kBAAkB,EAClB,oBAAoB,GAErB,MAAM,gBAAgB,CAAC;AAExB,OAAO,EACL,iBAAiB,EACjB,oBAAoB,EACpB,qBAAqB,GAEtB,MAAM,iBAAiB,CAAC;AAEzB,OAAO,EACL,aAAa,EACb,eAAe,EACf,iBAAiB,GAElB,MAAM,aAAa,CAAC;AAErB,OAAO,EACL,uBAAuB,EACvB,gBAAgB,EAChB,iBAAiB,GAElB,MAAM,wBAAwB,CAAC;AAEhC,OAAO,EACL,wBAAwB,GAEzB,MAAM,yBAAyB,CAAC"}
@@ -0,0 +1,78 @@
1
+ /**
2
+ * OpenAI-Compatible Completions adapter for base models
3
+ *
4
+ * For true base/completion models that use the `/v1/completions` endpoint:
5
+ * - No chat formatting built-in
6
+ * - Single text prompt input
7
+ * - Raw completion output
8
+ * - No image support
9
+ *
10
+ * Serializes conversations to Human:/Assistant: format.
11
+ */
12
+ import type { ProviderAdapter, ProviderRequest, ProviderRequestOptions, ProviderResponse, StreamCallbacks } from '../types/index.js';
13
+ export interface OpenAICompletionsAdapterConfig {
14
+ /** Base URL for the API (required, e.g., 'http://localhost:8000/v1') */
15
+ baseURL: string;
16
+ /** API key (optional for local servers) */
17
+ apiKey?: string;
18
+ /** Provider name for logging/identification (default: 'openai-completions') */
19
+ providerName?: string;
20
+ /** Default max tokens */
21
+ defaultMaxTokens?: number;
22
+ /** Additional headers to include with requests */
23
+ extraHeaders?: Record<string, string>;
24
+ /**
25
+ * Name of the assistant participant (default: 'Assistant')
26
+ * Used to identify which messages are from the assistant and to
27
+ * add the final prompt prefix for completion.
28
+ */
29
+ assistantName?: string;
30
+ /**
31
+ * Additional stop sequences beyond auto-generated participant-based ones.
32
+ * By default, stop sequences are generated from participant names in the
33
+ * conversation (e.g., "\n\nAlice:", "\nBob:").
34
+ */
35
+ extraStopSequences?: string[];
36
+ /**
37
+ * Whether to warn when images are stripped from context (default: true)
38
+ */
39
+ warnOnImageStrip?: boolean;
40
+ }
41
+ export declare class OpenAICompletionsAdapter implements ProviderAdapter {
42
+ readonly name: string;
43
+ private baseURL;
44
+ private apiKey;
45
+ private defaultMaxTokens;
46
+ private extraHeaders;
47
+ private assistantName;
48
+ private extraStopSequences;
49
+ private warnOnImageStrip;
50
+ constructor(config: OpenAICompletionsAdapterConfig);
51
+ supportsModel(_modelId: string): boolean;
52
+ complete(request: ProviderRequest, options?: ProviderRequestOptions): Promise<ProviderResponse>;
53
+ stream(request: ProviderRequest, callbacks: StreamCallbacks, options?: ProviderRequestOptions): Promise<ProviderResponse>;
54
+ /**
55
+ * Serialize messages to "Participant: content" format for base models.
56
+ * Uses actual participant names from messages.
57
+ * Images are stripped from content.
58
+ */
59
+ serializeToPrompt(messages: any[]): {
60
+ prompt: string;
61
+ participants: Set<string>;
62
+ };
63
+ /**
64
+ * Generate stop sequences from participant names.
65
+ * Prevents the model from generating turns for other participants.
66
+ */
67
+ private generateStopSequences;
68
+ private extractTextContent;
69
+ private getHeaders;
70
+ private buildRequest;
71
+ private makeRequest;
72
+ private parseResponse;
73
+ private buildStreamedResponse;
74
+ private textToContent;
75
+ private mapFinishReason;
76
+ private handleError;
77
+ }
78
+ //# sourceMappingURL=openai-completions.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"openai-completions.d.ts","sourceRoot":"","sources":["../../src/providers/openai-completions.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;GAUG;AAEH,OAAO,KAAK,EACV,eAAe,EACf,eAAe,EACf,sBAAsB,EACtB,gBAAgB,EAChB,eAAe,EAEhB,MAAM,mBAAmB,CAAC;AA2C3B,MAAM,WAAW,8BAA8B;IAC7C,wEAAwE;IACxE,OAAO,EAAE,MAAM,CAAC;IAEhB,2CAA2C;IAC3C,MAAM,CAAC,EAAE,MAAM,CAAC;IAEhB,+EAA+E;IAC/E,YAAY,CAAC,EAAE,MAAM,CAAC;IAEtB,yBAAyB;IACzB,gBAAgB,CAAC,EAAE,MAAM,CAAC;IAE1B,kDAAkD;IAClD,YAAY,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IAEtC;;;;OAIG;IACH,aAAa,CAAC,EAAE,MAAM,CAAC;IAEvB;;;;OAIG;IACH,kBAAkB,CAAC,EAAE,MAAM,EAAE,CAAC;IAE9B;;OAEG;IACH,gBAAgB,CAAC,EAAE,OAAO,CAAC;CAC5B;AAMD,qBAAa,wBAAyB,YAAW,eAAe;IAC9D,QAAQ,CAAC,IAAI,EAAE,MAAM,CAAC;IACtB,OAAO,CAAC,OAAO,CAAS;IACxB,OAAO,CAAC,MAAM,CAAS;IACvB,OAAO,CAAC,gBAAgB,CAAS;IACjC,OAAO,CAAC,YAAY,CAAyB;IAC7C,OAAO,CAAC,aAAa,CAAS;IAC9B,OAAO,CAAC,kBAAkB,CAAW;IACrC,OAAO,CAAC,gBAAgB,CAAU;gBAEtB,MAAM,EAAE,8BAA8B;IAelD,aAAa,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO;IAIlC,QAAQ,CACZ,OAAO,EAAE,eAAe,EACxB,OAAO,CAAC,EAAE,sBAAsB,GAC/B,OAAO,CAAC,gBAAgB,CAAC;IAWtB,MAAM,CACV,OAAO,EAAE,eAAe,EACxB,SAAS,EAAE,eAAe,EAC1B,OAAO,CAAC,EAAE,sBAAsB,GAC/B,OAAO,CAAC,gBAAgB,CAAC;IAkE5B;;;;OAIG;IACH,iBAAiB,CAAC,QAAQ,EAAE,GAAG,EAAE,GAAG;QAAE,MAAM,EAAE,MAAM,CAAC;QAAC,YAAY,EAAE,GAAG,CAAC,MAAM,CAAC,CAAA;KAAE;IAkCjF;;;OAGG;IACH,OAAO,CAAC,qBAAqB;IAe7B,OAAO,CAAC,kBAAkB;IA4B1B,OAAO,CAAC,UAAU;IAalB,OAAO,CAAC,YAAY;YAgCN,WAAW;IAgBzB,OAAO,CAAC,aAAa;IAkBrB,OAAO,CAAC,qBAAqB;IAoB7B,OAAO,CAAC,aAAa;IASrB,OAAO,CAAC,eAAe;IAWvB,OAAO,CAAC,WAAW;CAqCpB"}
@@ -0,0 +1,305 @@
1
+ /**
2
+ * OpenAI-Compatible Completions adapter for base models
3
+ *
4
+ * For true base/completion models that use the `/v1/completions` endpoint:
5
+ * - No chat formatting built-in
6
+ * - Single text prompt input
7
+ * - Raw completion output
8
+ * - No image support
9
+ *
10
+ * Serializes conversations to Human:/Assistant: format.
11
+ */
12
+ import { MembraneError, rateLimitError, contextLengthError, authError, serverError, abortError, networkError, } from '../types/index.js';
13
+ // ============================================================================
14
+ // OpenAI Completions Adapter
15
+ // ============================================================================
16
+ export class OpenAICompletionsAdapter {
17
+ name;
18
+ baseURL;
19
+ apiKey;
20
+ defaultMaxTokens;
21
+ extraHeaders;
22
+ assistantName;
23
+ extraStopSequences;
24
+ warnOnImageStrip;
25
+ constructor(config) {
26
+ if (!config.baseURL) {
27
+ throw new Error('OpenAI completions adapter requires baseURL');
28
+ }
29
+ this.name = config.providerName ?? 'openai-completions';
30
+ this.baseURL = config.baseURL.replace(/\/$/, ''); // Remove trailing slash
31
+ this.apiKey = config.apiKey ?? '';
32
+ this.defaultMaxTokens = config.defaultMaxTokens ?? 4096;
33
+ this.extraHeaders = config.extraHeaders ?? {};
34
+ this.assistantName = config.assistantName ?? 'Assistant';
35
+ this.extraStopSequences = config.extraStopSequences ?? [];
36
+ this.warnOnImageStrip = config.warnOnImageStrip ?? true;
37
+ }
38
+ supportsModel(_modelId) {
39
+ return true;
40
+ }
41
+ async complete(request, options) {
42
+ const completionsRequest = this.buildRequest(request);
43
+ try {
44
+ const response = await this.makeRequest(completionsRequest, options);
45
+ return this.parseResponse(response, request.model, completionsRequest);
46
+ }
47
+ catch (error) {
48
+ throw this.handleError(error, completionsRequest);
49
+ }
50
+ }
51
+ async stream(request, callbacks, options) {
52
+ const completionsRequest = this.buildRequest(request);
53
+ completionsRequest.stream = true;
54
+ try {
55
+ const response = await fetch(`${this.baseURL}/completions`, {
56
+ method: 'POST',
57
+ headers: this.getHeaders(),
58
+ body: JSON.stringify(completionsRequest),
59
+ signal: options?.signal,
60
+ });
61
+ if (!response.ok) {
62
+ const errorText = await response.text();
63
+ throw new Error(`API error: ${response.status} ${errorText}`);
64
+ }
65
+ const reader = response.body?.getReader();
66
+ if (!reader) {
67
+ throw new Error('No response body');
68
+ }
69
+ const decoder = new TextDecoder();
70
+ let accumulated = '';
71
+ let finishReason = 'stop';
72
+ while (true) {
73
+ const { done, value } = await reader.read();
74
+ if (done)
75
+ break;
76
+ const chunk = decoder.decode(value, { stream: true });
77
+ const lines = chunk.split('\n').filter(line => line.startsWith('data: '));
78
+ for (const line of lines) {
79
+ const data = line.slice(6);
80
+ if (data === '[DONE]')
81
+ continue;
82
+ try {
83
+ const parsed = JSON.parse(data);
84
+ const text = parsed.choices?.[0]?.text;
85
+ if (text) {
86
+ accumulated += text;
87
+ callbacks.onChunk(text);
88
+ }
89
+ if (parsed.choices?.[0]?.finish_reason) {
90
+ finishReason = parsed.choices[0].finish_reason;
91
+ }
92
+ }
93
+ catch {
94
+ // Ignore parse errors in stream
95
+ }
96
+ }
97
+ }
98
+ return this.buildStreamedResponse(accumulated, finishReason, request.model, completionsRequest);
99
+ }
100
+ catch (error) {
101
+ throw this.handleError(error, completionsRequest);
102
+ }
103
+ }
104
+ // ============================================================================
105
+ // Prompt Serialization
106
+ // ============================================================================
107
+ /**
108
+ * Serialize messages to "Participant: content" format for base models.
109
+ * Uses actual participant names from messages.
110
+ * Images are stripped from content.
111
+ */
112
+ serializeToPrompt(messages) {
113
+ const parts = [];
114
+ const participants = new Set();
115
+ let hasStrippedImages = false;
116
+ for (const msg of messages) {
117
+ // Get participant name (supports both 'participant' and 'role' fields)
118
+ const participant = msg.participant || msg.role || 'Unknown';
119
+ participants.add(participant);
120
+ // Extract text content, strip images
121
+ const textContent = this.extractTextContent(msg.content);
122
+ if (textContent.hadImages) {
123
+ hasStrippedImages = true;
124
+ }
125
+ if (textContent.text) {
126
+ parts.push(`${participant}: ${textContent.text}`);
127
+ }
128
+ }
129
+ if (hasStrippedImages && this.warnOnImageStrip) {
130
+ console.warn('[OpenAICompletionsAdapter] Images were stripped from context (not supported in completions mode)');
131
+ }
132
+ // Add final assistant prefix to prompt completion
133
+ parts.push(`${this.assistantName}:`);
134
+ return {
135
+ prompt: parts.join('\n\n'),
136
+ participants,
137
+ };
138
+ }
139
+ /**
140
+ * Generate stop sequences from participant names.
141
+ * Prevents the model from generating turns for other participants.
142
+ */
143
+ generateStopSequences(participants) {
144
+ const stops = [];
145
+ for (const participant of participants) {
146
+ // Skip the assistant - we don't want to stop on its own name
147
+ if (participant === this.assistantName)
148
+ continue;
149
+ // Add both "\n\nName:" and "\nName:" variants
150
+ stops.push(`\n\n${participant}:`);
151
+ stops.push(`\n${participant}:`);
152
+ }
153
+ return stops;
154
+ }
155
+ extractTextContent(content) {
156
+ if (typeof content === 'string') {
157
+ return { text: content, hadImages: false };
158
+ }
159
+ if (Array.isArray(content)) {
160
+ const textParts = [];
161
+ let hadImages = false;
162
+ for (const block of content) {
163
+ if (block.type === 'text') {
164
+ textParts.push(block.text);
165
+ }
166
+ else if (block.type === 'image' || block.type === 'image_url') {
167
+ hadImages = true;
168
+ }
169
+ // Skip tool_use, tool_result, thinking blocks for base models
170
+ }
171
+ return { text: textParts.join('\n'), hadImages };
172
+ }
173
+ return { text: '', hadImages: false };
174
+ }
175
+ // ============================================================================
176
+ // Private Methods
177
+ // ============================================================================
178
+ getHeaders() {
179
+ const headers = {
180
+ 'Content-Type': 'application/json',
181
+ ...this.extraHeaders,
182
+ };
183
+ if (this.apiKey) {
184
+ headers['Authorization'] = `Bearer ${this.apiKey}`;
185
+ }
186
+ return headers;
187
+ }
188
+ buildRequest(request) {
189
+ const { prompt, participants } = this.serializeToPrompt(request.messages);
190
+ const params = {
191
+ model: request.model,
192
+ prompt,
193
+ max_tokens: request.maxTokens || this.defaultMaxTokens,
194
+ };
195
+ if (request.temperature !== undefined) {
196
+ params.temperature = request.temperature;
197
+ }
198
+ // Generate stop sequences from participant names + any extras
199
+ const stopSequences = [
200
+ ...this.generateStopSequences(participants),
201
+ ...this.extraStopSequences,
202
+ ...(request.stopSequences || []),
203
+ ];
204
+ if (stopSequences.length > 0) {
205
+ params.stop = stopSequences;
206
+ }
207
+ // Apply extra params (but not messages/tools which don't apply)
208
+ if (request.extra) {
209
+ const { messages, tools, ...rest } = request.extra;
210
+ Object.assign(params, rest);
211
+ }
212
+ return params;
213
+ }
214
+ async makeRequest(request, options) {
215
+ const response = await fetch(`${this.baseURL}/completions`, {
216
+ method: 'POST',
217
+ headers: this.getHeaders(),
218
+ body: JSON.stringify(request),
219
+ signal: options?.signal,
220
+ });
221
+ if (!response.ok) {
222
+ const errorText = await response.text();
223
+ throw new Error(`API error: ${response.status} ${errorText}`);
224
+ }
225
+ return response.json();
226
+ }
227
+ parseResponse(response, requestedModel, rawRequest) {
228
+ const choice = response.choices[0];
229
+ const text = choice?.text ?? '';
230
+ return {
231
+ content: this.textToContent(text),
232
+ stopReason: this.mapFinishReason(choice?.finish_reason),
233
+ stopSequence: undefined,
234
+ usage: {
235
+ inputTokens: response.usage?.prompt_tokens ?? 0,
236
+ outputTokens: response.usage?.completion_tokens ?? 0,
237
+ },
238
+ model: response.model ?? requestedModel,
239
+ rawRequest,
240
+ raw: response,
241
+ };
242
+ }
243
+ buildStreamedResponse(accumulated, finishReason, requestedModel, rawRequest) {
244
+ return {
245
+ content: this.textToContent(accumulated),
246
+ stopReason: this.mapFinishReason(finishReason),
247
+ stopSequence: undefined,
248
+ usage: {
249
+ inputTokens: 0, // Not available in streaming
250
+ outputTokens: 0,
251
+ },
252
+ model: requestedModel,
253
+ rawRequest,
254
+ raw: { text: accumulated, finish_reason: finishReason },
255
+ };
256
+ }
257
+ textToContent(text) {
258
+ // Trim leading whitespace (model often starts with space after "Assistant:")
259
+ const trimmed = text.replace(/^\s+/, '');
260
+ if (!trimmed)
261
+ return [];
262
+ return [{ type: 'text', text: trimmed }];
263
+ }
264
+ mapFinishReason(reason) {
265
+ switch (reason) {
266
+ case 'stop':
267
+ return 'end_turn';
268
+ case 'length':
269
+ return 'max_tokens';
270
+ default:
271
+ return 'end_turn';
272
+ }
273
+ }
274
+ handleError(error, rawRequest) {
275
+ if (error instanceof Error) {
276
+ const message = error.message;
277
+ if (message.includes('429') || message.includes('rate')) {
278
+ return rateLimitError(message, undefined, error, rawRequest);
279
+ }
280
+ if (message.includes('401') || message.includes('auth') || message.includes('Unauthorized')) {
281
+ return authError(message, error, rawRequest);
282
+ }
283
+ if (message.includes('context') || message.includes('too long') || message.includes('maximum context')) {
284
+ return contextLengthError(message, error, rawRequest);
285
+ }
286
+ if (message.includes('500') || message.includes('502') || message.includes('503')) {
287
+ return serverError(message, undefined, error, rawRequest);
288
+ }
289
+ if (error.name === 'AbortError') {
290
+ return abortError(undefined, rawRequest);
291
+ }
292
+ if (message.includes('network') || message.includes('fetch') || message.includes('ECONNREFUSED')) {
293
+ return networkError(message, error, rawRequest);
294
+ }
295
+ }
296
+ return new MembraneError({
297
+ type: 'unknown',
298
+ message: error instanceof Error ? error.message : String(error),
299
+ retryable: false,
300
+ rawError: error,
301
+ rawRequest,
302
+ });
303
+ }
304
+ }
305
+ //# sourceMappingURL=openai-completions.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"openai-completions.js","sourceRoot":"","sources":["../../src/providers/openai-completions.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;GAUG;AAUH,OAAO,EACL,aAAa,EACb,cAAc,EACd,kBAAkB,EAClB,SAAS,EACT,WAAW,EACX,UAAU,EACV,YAAY,GACb,MAAM,mBAAmB,CAAC;AAsE3B,+EAA+E;AAC/E,6BAA6B;AAC7B,+EAA+E;AAE/E,MAAM,OAAO,wBAAwB;IAC1B,IAAI,CAAS;IACd,OAAO,CAAS;IAChB,MAAM,CAAS;IACf,gBAAgB,CAAS;IACzB,YAAY,CAAyB;IACrC,aAAa,CAAS;IACtB,kBAAkB,CAAW;IAC7B,gBAAgB,CAAU;IAElC,YAAY,MAAsC;QAChD,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC;YACpB,MAAM,IAAI,KAAK,CAAC,6CAA6C,CAAC,CAAC;QACjE,CAAC;QAED,IAAI,CAAC,IAAI,GAAG,MAAM,CAAC,YAAY,IAAI,oBAAoB,CAAC;QACxD,IAAI,CAAC,OAAO,GAAG,MAAM,CAAC,OAAO,CAAC,OAAO,CAAC,KAAK,EAAE,EAAE,CAAC,CAAC,CAAC,wBAAwB;QAC1E,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC,MAAM,IAAI,EAAE,CAAC;QAClC,IAAI,CAAC,gBAAgB,GAAG,MAAM,CAAC,gBAAgB,IAAI,IAAI,CAAC;QACxD,IAAI,CAAC,YAAY,GAAG,MAAM,CAAC,YAAY,IAAI,EAAE,CAAC;QAC9C,IAAI,CAAC,aAAa,GAAG,MAAM,CAAC,aAAa,IAAI,WAAW,CAAC;QACzD,IAAI,CAAC,kBAAkB,GAAG,MAAM,CAAC,kBAAkB,IAAI,EAAE,CAAC;QAC1D,IAAI,CAAC,gBAAgB,GAAG,MAAM,CAAC,gBAAgB,IAAI,IAAI,CAAC;IAC1D,CAAC;IAED,aAAa,CAAC,QAAgB;QAC5B,OAAO,IAAI,CAAC;IACd,CAAC;IAED,KAAK,CAAC,QAAQ,CACZ,OAAwB,EACxB,OAAgC;QAEhC,MAAM,kBAAkB,GAAG,IAAI,CAAC,YAAY,CAAC,OAAO,CAAC,CAAC;QAEtD,IAAI,CAAC;YACH,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,WAAW,CAAC,kBAAkB,EAAE,OAAO,CAAC,CAAC;YACrE,OAAO,IAAI,CAAC,aAAa,CAAC,QAAQ,EAAE,OAAO,CAAC,KAAK,EAAE,kBAAkB,CAAC,CAAC;QACzE,CAAC;QAAC,OAAO,KAAK,EAAE,CAAC;YACf,MAAM,IAAI,CAAC,WAAW,CAAC,KAAK,EAAE,kBAAkB,CAAC,CAAC;QACpD,CAAC;IACH,CAAC;IAED,KAAK,CAAC,MAAM,CACV,OAAwB,EACxB,SAA0B,EAC1B,OAAgC;QAEhC,MAAM,kBAAkB,GAAG,IAAI,CAAC,YAAY,CAAC,OAAO,CAAC,CAAC;QACtD,kBAAkB,CAAC,MAAM,GAAG,IAAI,CAAC;QAEjC,IAAI,CAAC;YACH,MAAM,QAAQ,GAAG,MAAM,KAAK,CAAC,GAAG,IAAI,CAAC,OAAO,cAAc,EAAE;gBAC1D,MAAM,EAAE,MAAM;gBACd,OAAO,EAAE,IAAI,CAAC,UAAU,EAAE;gBAC1B,IAAI,EAAE,IAAI,CAAC,SAAS,CAAC,kBAAkB,CAAC;gBACxC,MAAM,EAAE,OAAO,EAAE,MAAM;aACxB,CAAC,CAAC;YAEH,IAAI,CAAC,QAAQ,CAAC,EAAE,EAAE,CAAC;gBACjB,MAAM,SAAS,GAAG,MAAM,QAAQ,CAAC,IAAI,EAAE,CAAC;gBACxC,MAAM,IAAI,KAAK,CAAC,cAAc,QAAQ,CAAC,MAAM,IAAI,SAAS,EAAE,CAAC,CAAC;YAChE,CAAC;YAED,MAAM,MAAM,GAAG,QAAQ,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC;YAC1C,IAAI,CAAC,MAAM,EAAE,CAAC;gBACZ,MAAM,IAAI,KAAK,CAAC,kBAAkB,CAAC,CAAC;YACtC,CAAC;YAED,MAAM,OAAO,GAAG,IAAI,WAAW,EAAE,CAAC;YAClC,IAAI,WAAW,GAAG,EAAE,CAAC;YACrB,IAAI,YAAY,GAAG,MAAM,CAAC;YAE1B,OAAO,IAAI,EAAE,CAAC;gBACZ,MAAM,EAAE,IAAI,EAAE,KAAK,EAAE,GAAG,MAAM,MAAM,CAAC,IAAI,EAAE,CAAC;gBAC5C,IAAI,IAAI;oBAAE,MAAM;gBAEhB,MAAM,KAAK,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,EAAE,EAAE,MAAM,EAAE,IAAI,EAAE,CAAC,CAAC;gBACtD,MAAM,KAAK,GAAG,KAAK,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,CAAC,IAAI,CAAC,UAAU,CAAC,QAAQ,CAAC,CAAC,CAAC;gBAE1E,KAAK,MAAM,IAAI,IAAI,KAAK,EAAE,CAAC;oBACzB,MAAM,IAAI,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;oBAC3B,IAAI,IAAI,KAAK,QAAQ;wBAAE,SAAS;oBAEhC,IAAI,CAAC;wBACH,MAAM,MAAM,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;wBAChC,MAAM,IAAI,GAAG,MAAM,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC,EAAE,IAAI,CAAC;wBAEvC,IAAI,IAAI,EAAE,CAAC;4BACT,WAAW,IAAI,IAAI,CAAC;4BACpB,SAAS,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;wBAC1B,CAAC;wBAED,IAAI,MAAM,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC,EAAE,aAAa,EAAE,CAAC;4BACvC,YAAY,GAAG,MAAM,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,aAAa,CAAC;wBACjD,CAAC;oBACH,CAAC;oBAAC,MAAM,CAAC;wBACP,gCAAgC;oBAClC,CAAC;gBACH,CAAC;YACH,CAAC;YAED,OAAO,IAAI,CAAC,qBAAqB,CAAC,WAAW,EAAE,YAAY,EAAE,OAAO,CAAC,KAAK,EAAE,kBAAkB,CAAC,CAAC;QAElG,CAAC;QAAC,OAAO,KAAK,EAAE,CAAC;YACf,MAAM,IAAI,CAAC,WAAW,CAAC,KAAK,EAAE,kBAAkB,CAAC,CAAC;QACpD,CAAC;IACH,CAAC;IAED,+EAA+E;IAC/E,uBAAuB;IACvB,+EAA+E;IAE/E;;;;OAIG;IACH,iBAAiB,CAAC,QAAe;QAC/B,MAAM,KAAK,GAAa,EAAE,CAAC;QAC3B,MAAM,YAAY,GAAG,IAAI,GAAG,EAAU,CAAC;QACvC,IAAI,iBAAiB,GAAG,KAAK,CAAC;QAE9B,KAAK,MAAM,GAAG,IAAI,QAAQ,EAAE,CAAC;YAC3B,uEAAuE;YACvE,MAAM,WAAW,GAAG,GAAG,CAAC,WAAW,IAAI,GAAG,CAAC,IAAI,IAAI,SAAS,CAAC;YAC7D,YAAY,CAAC,GAAG,CAAC,WAAW,CAAC,CAAC;YAE9B,qCAAqC;YACrC,MAAM,WAAW,GAAG,IAAI,CAAC,kBAAkB,CAAC,GAAG,CAAC,OAAO,CAAC,CAAC;YACzD,IAAI,WAAW,CAAC,SAAS,EAAE,CAAC;gBAC1B,iBAAiB,GAAG,IAAI,CAAC;YAC3B,CAAC;YAED,IAAI,WAAW,CAAC,IAAI,EAAE,CAAC;gBACrB,KAAK,CAAC,IAAI,CAAC,GAAG,WAAW,KAAK,WAAW,CAAC,IAAI,EAAE,CAAC,CAAC;YACpD,CAAC;QACH,CAAC;QAED,IAAI,iBAAiB,IAAI,IAAI,CAAC,gBAAgB,EAAE,CAAC;YAC/C,OAAO,CAAC,IAAI,CAAC,kGAAkG,CAAC,CAAC;QACnH,CAAC;QAED,kDAAkD;QAClD,KAAK,CAAC,IAAI,CAAC,GAAG,IAAI,CAAC,aAAa,GAAG,CAAC,CAAC;QAErC,OAAO;YACL,MAAM,EAAE,KAAK,CAAC,IAAI,CAAC,MAAM,CAAC;YAC1B,YAAY;SACb,CAAC;IACJ,CAAC;IAED;;;OAGG;IACK,qBAAqB,CAAC,YAAyB;QACrD,MAAM,KAAK,GAAa,EAAE,CAAC;QAE3B,KAAK,MAAM,WAAW,IAAI,YAAY,EAAE,CAAC;YACvC,6DAA6D;YAC7D,IAAI,WAAW,KAAK,IAAI,CAAC,aAAa;gBAAE,SAAS;YAEjD,8CAA8C;YAC9C,KAAK,CAAC,IAAI,CAAC,OAAO,WAAW,GAAG,CAAC,CAAC;YAClC,KAAK,CAAC,IAAI,CAAC,KAAK,WAAW,GAAG,CAAC,CAAC;QAClC,CAAC;QAED,OAAO,KAAK,CAAC;IACf,CAAC;IAEO,kBAAkB,CAAC,OAAY;QACrC,IAAI,OAAO,OAAO,KAAK,QAAQ,EAAE,CAAC;YAChC,OAAO,EAAE,IAAI,EAAE,OAAO,EAAE,SAAS,EAAE,KAAK,EAAE,CAAC;QAC7C,CAAC;QAED,IAAI,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE,CAAC;YAC3B,MAAM,SAAS,GAAa,EAAE,CAAC;YAC/B,IAAI,SAAS,GAAG,KAAK,CAAC;YAEtB,KAAK,MAAM,KAAK,IAAI,OAAO,EAAE,CAAC;gBAC5B,IAAI,KAAK,CAAC,IAAI,KAAK,MAAM,EAAE,CAAC;oBAC1B,SAAS,CAAC,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;gBAC7B,CAAC;qBAAM,IAAI,KAAK,CAAC,IAAI,KAAK,OAAO,IAAI,KAAK,CAAC,IAAI,KAAK,WAAW,EAAE,CAAC;oBAChE,SAAS,GAAG,IAAI,CAAC;gBACnB,CAAC;gBACD,8DAA8D;YAChE,CAAC;YAED,OAAO,EAAE,IAAI,EAAE,SAAS,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,SAAS,EAAE,CAAC;QACnD,CAAC;QAED,OAAO,EAAE,IAAI,EAAE,EAAE,EAAE,SAAS,EAAE,KAAK,EAAE,CAAC;IACxC,CAAC;IAED,+EAA+E;IAC/E,kBAAkB;IAClB,+EAA+E;IAEvE,UAAU;QAChB,MAAM,OAAO,GAA2B;YACtC,cAAc,EAAE,kBAAkB;YAClC,GAAG,IAAI,CAAC,YAAY;SACrB,CAAC;QAEF,IAAI,IAAI,CAAC,MAAM,EAAE,CAAC;YAChB,OAAO,CAAC,eAAe,CAAC,GAAG,UAAU,IAAI,CAAC,MAAM,EAAE,CAAC;QACrD,CAAC;QAED,OAAO,OAAO,CAAC;IACjB,CAAC;IAEO,YAAY,CAAC,OAAwB;QAC3C,MAAM,EAAE,MAAM,EAAE,YAAY,EAAE,GAAG,IAAI,CAAC,iBAAiB,CAAC,OAAO,CAAC,QAAiB,CAAC,CAAC;QAEnF,MAAM,MAAM,GAAuB;YACjC,KAAK,EAAE,OAAO,CAAC,KAAK;YACpB,MAAM;YACN,UAAU,EAAE,OAAO,CAAC,SAAS,IAAI,IAAI,CAAC,gBAAgB;SACvD,CAAC;QAEF,IAAI,OAAO,CAAC,WAAW,KAAK,SAAS,EAAE,CAAC;YACtC,MAAM,CAAC,WAAW,GAAG,OAAO,CAAC,WAAW,CAAC;QAC3C,CAAC;QAED,8DAA8D;QAC9D,MAAM,aAAa,GAAG;YACpB,GAAG,IAAI,CAAC,qBAAqB,CAAC,YAAY,CAAC;YAC3C,GAAG,IAAI,CAAC,kBAAkB;YAC1B,GAAG,CAAC,OAAO,CAAC,aAAa,IAAI,EAAE,CAAC;SACjC,CAAC;QACF,IAAI,aAAa,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;YAC7B,MAAM,CAAC,IAAI,GAAG,aAAa,CAAC;QAC9B,CAAC;QAED,gEAAgE;QAChE,IAAI,OAAO,CAAC,KAAK,EAAE,CAAC;YAClB,MAAM,EAAE,QAAQ,EAAE,KAAK,EAAE,GAAG,IAAI,EAAE,GAAG,OAAO,CAAC,KAAY,CAAC;YAC1D,MAAM,CAAC,MAAM,CAAC,MAAM,EAAE,IAAI,CAAC,CAAC;QAC9B,CAAC;QAED,OAAO,MAAM,CAAC;IAChB,CAAC;IAEO,KAAK,CAAC,WAAW,CAAC,OAA2B,EAAE,OAAgC;QACrF,MAAM,QAAQ,GAAG,MAAM,KAAK,CAAC,GAAG,IAAI,CAAC,OAAO,cAAc,EAAE;YAC1D,MAAM,EAAE,MAAM;YACd,OAAO,EAAE,IAAI,CAAC,UAAU,EAAE;YAC1B,IAAI,EAAE,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC;YAC7B,MAAM,EAAE,OAAO,EAAE,MAAM;SACxB,CAAC,CAAC;QAEH,IAAI,CAAC,QAAQ,CAAC,EAAE,EAAE,CAAC;YACjB,MAAM,SAAS,GAAG,MAAM,QAAQ,CAAC,IAAI,EAAE,CAAC;YACxC,MAAM,IAAI,KAAK,CAAC,cAAc,QAAQ,CAAC,MAAM,IAAI,SAAS,EAAE,CAAC,CAAC;QAChE,CAAC;QAED,OAAO,QAAQ,CAAC,IAAI,EAAkC,CAAC;IACzD,CAAC;IAEO,aAAa,CAAC,QAA6B,EAAE,cAAsB,EAAE,UAAmB;QAC9F,MAAM,MAAM,GAAG,QAAQ,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC;QACnC,MAAM,IAAI,GAAG,MAAM,EAAE,IAAI,IAAI,EAAE,CAAC;QAEhC,OAAO;YACL,OAAO,EAAE,IAAI,CAAC,aAAa,CAAC,IAAI,CAAC;YACjC,UAAU,EAAE,IAAI,CAAC,eAAe,CAAC,MAAM,EAAE,aAAa,CAAC;YACvD,YAAY,EAAE,SAAS;YACvB,KAAK,EAAE;gBACL,WAAW,EAAE,QAAQ,CAAC,KAAK,EAAE,aAAa,IAAI,CAAC;gBAC/C,YAAY,EAAE,QAAQ,CAAC,KAAK,EAAE,iBAAiB,IAAI,CAAC;aACrD;YACD,KAAK,EAAE,QAAQ,CAAC,KAAK,IAAI,cAAc;YACvC,UAAU;YACV,GAAG,EAAE,QAAQ;SACd,CAAC;IACJ,CAAC;IAEO,qBAAqB,CAC3B,WAAmB,EACnB,YAAoB,EACpB,cAAsB,EACtB,UAAoB;QAEpB,OAAO;YACL,OAAO,EAAE,IAAI,CAAC,aAAa,CAAC,WAAW,CAAC;YACxC,UAAU,EAAE,IAAI,CAAC,eAAe,CAAC,YAAY,CAAC;YAC9C,YAAY,EAAE,SAAS;YACvB,KAAK,EAAE;gBACL,WAAW,EAAE,CAAC,EAAE,6BAA6B;gBAC7C,YAAY,EAAE,CAAC;aAChB;YACD,KAAK,EAAE,cAAc;YACrB,UAAU;YACV,GAAG,EAAE,EAAE,IAAI,EAAE,WAAW,EAAE,aAAa,EAAE,YAAY,EAAE;SACxD,CAAC;IACJ,CAAC;IAEO,aAAa,CAAC,IAAY;QAChC,6EAA6E;QAC7E,MAAM,OAAO,GAAG,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,EAAE,CAAC,CAAC;QAEzC,IAAI,CAAC,OAAO;YAAE,OAAO,EAAE,CAAC;QAExB,OAAO,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,OAAO,EAAE,CAAC,CAAC;IAC3C,CAAC;IAEO,eAAe,CAAC,MAA0B;QAChD,QAAQ,MAAM,EAAE,CAAC;YACf,KAAK,MAAM;gBACT,OAAO,UAAU,CAAC;YACpB,KAAK,QAAQ;gBACX,OAAO,YAAY,CAAC;YACtB;gBACE,OAAO,UAAU,CAAC;QACtB,CAAC;IACH,CAAC;IAEO,WAAW,CAAC,KAAc,EAAE,UAAoB;QACtD,IAAI,KAAK,YAAY,KAAK,EAAE,CAAC;YAC3B,MAAM,OAAO,GAAG,KAAK,CAAC,OAAO,CAAC;YAE9B,IAAI,OAAO,CAAC,QAAQ,CAAC,KAAK,CAAC,IAAI,OAAO,CAAC,QAAQ,CAAC,MAAM,CAAC,EAAE,CAAC;gBACxD,OAAO,cAAc,CAAC,OAAO,EAAE,SAAS,EAAE,KAAK,EAAE,UAAU,CAAC,CAAC;YAC/D,CAAC;YAED,IAAI,OAAO,CAAC,QAAQ,CAAC,KAAK,CAAC,IAAI,OAAO,CAAC,QAAQ,CAAC,MAAM,CAAC,IAAI,OAAO,CAAC,QAAQ,CAAC,cAAc,CAAC,EAAE,CAAC;gBAC5F,OAAO,SAAS,CAAC,OAAO,EAAE,KAAK,EAAE,UAAU,CAAC,CAAC;YAC/C,CAAC;YAED,IAAI,OAAO,CAAC,QAAQ,CAAC,SAAS,CAAC,IAAI,OAAO,CAAC,QAAQ,CAAC,UAAU,CAAC,IAAI,OAAO,CAAC,QAAQ,CAAC,iBAAiB,CAAC,EAAE,CAAC;gBACvG,OAAO,kBAAkB,CAAC,OAAO,EAAE,KAAK,EAAE,UAAU,CAAC,CAAC;YACxD,CAAC;YAED,IAAI,OAAO,CAAC,QAAQ,CAAC,KAAK,CAAC,IAAI,OAAO,CAAC,QAAQ,CAAC,KAAK,CAAC,IAAI,OAAO,CAAC,QAAQ,CAAC,KAAK,CAAC,EAAE,CAAC;gBAClF,OAAO,WAAW,CAAC,OAAO,EAAE,SAAS,EAAE,KAAK,EAAE,UAAU,CAAC,CAAC;YAC5D,CAAC;YAED,IAAI,KAAK,CAAC,IAAI,KAAK,YAAY,EAAE,CAAC;gBAChC,OAAO,UAAU,CAAC,SAAS,EAAE,UAAU,CAAC,CAAC;YAC3C,CAAC;YAED,IAAI,OAAO,CAAC,QAAQ,CAAC,SAAS,CAAC,IAAI,OAAO,CAAC,QAAQ,CAAC,OAAO,CAAC,IAAI,OAAO,CAAC,QAAQ,CAAC,cAAc,CAAC,EAAE,CAAC;gBACjG,OAAO,YAAY,CAAC,OAAO,EAAE,KAAK,EAAE,UAAU,CAAC,CAAC;YAClD,CAAC;QACH,CAAC;QAED,OAAO,IAAI,aAAa,CAAC;YACvB,IAAI,EAAE,SAAS;YACf,OAAO,EAAE,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC;YAC/D,SAAS,EAAE,KAAK;YAChB,QAAQ,EAAE,KAAK;YACf,UAAU;SACX,CAAC,CAAC;IACL,CAAC;CACF"}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@animalabs/membrane",
3
- "version": "0.1.20",
3
+ "version": "0.2.1",
4
4
  "description": "LLM middleware - a selective boundary that transforms what passes through",
5
5
  "type": "module",
6
6
  "main": "./dist/index.js",
@@ -29,3 +29,8 @@ export {
29
29
  fromOpenAIMessage,
30
30
  type OpenAICompatibleAdapterConfig,
31
31
  } from './openai-compatible.js';
32
+
33
+ export {
34
+ OpenAICompletionsAdapter,
35
+ type OpenAICompletionsAdapterConfig,
36
+ } from './openai-completions.js';
@@ -0,0 +1,458 @@
1
+ /**
2
+ * OpenAI-Compatible Completions adapter for base models
3
+ *
4
+ * For true base/completion models that use the `/v1/completions` endpoint:
5
+ * - No chat formatting built-in
6
+ * - Single text prompt input
7
+ * - Raw completion output
8
+ * - No image support
9
+ *
10
+ * Serializes conversations to Human:/Assistant: format.
11
+ */
12
+
13
+ import type {
14
+ ProviderAdapter,
15
+ ProviderRequest,
16
+ ProviderRequestOptions,
17
+ ProviderResponse,
18
+ StreamCallbacks,
19
+ ContentBlock,
20
+ } from '../types/index.js';
21
+ import {
22
+ MembraneError,
23
+ rateLimitError,
24
+ contextLengthError,
25
+ authError,
26
+ serverError,
27
+ abortError,
28
+ networkError,
29
+ } from '../types/index.js';
30
+
31
+ // ============================================================================
32
+ // Types
33
+ // ============================================================================
34
+
35
+ interface CompletionsRequest {
36
+ model: string;
37
+ prompt: string;
38
+ max_tokens?: number;
39
+ temperature?: number;
40
+ stop?: string[];
41
+ stream?: boolean;
42
+ }
43
+
44
+ interface CompletionsResponse {
45
+ id: string;
46
+ model: string;
47
+ choices: {
48
+ index: number;
49
+ text: string;
50
+ finish_reason: string;
51
+ }[];
52
+ usage?: {
53
+ prompt_tokens: number;
54
+ completion_tokens: number;
55
+ total_tokens: number;
56
+ };
57
+ }
58
+
59
+ // ============================================================================
60
+ // Adapter Configuration
61
+ // ============================================================================
62
+
63
+ export interface OpenAICompletionsAdapterConfig {
64
+ /** Base URL for the API (required, e.g., 'http://localhost:8000/v1') */
65
+ baseURL: string;
66
+
67
+ /** API key (optional for local servers) */
68
+ apiKey?: string;
69
+
70
+ /** Provider name for logging/identification (default: 'openai-completions') */
71
+ providerName?: string;
72
+
73
+ /** Default max tokens */
74
+ defaultMaxTokens?: number;
75
+
76
+ /** Additional headers to include with requests */
77
+ extraHeaders?: Record<string, string>;
78
+
79
+ /**
80
+ * Name of the assistant participant (default: 'Assistant')
81
+ * Used to identify which messages are from the assistant and to
82
+ * add the final prompt prefix for completion.
83
+ */
84
+ assistantName?: string;
85
+
86
+ /**
87
+ * Additional stop sequences beyond auto-generated participant-based ones.
88
+ * By default, stop sequences are generated from participant names in the
89
+ * conversation (e.g., "\n\nAlice:", "\nBob:").
90
+ */
91
+ extraStopSequences?: string[];
92
+
93
+ /**
94
+ * Whether to warn when images are stripped from context (default: true)
95
+ */
96
+ warnOnImageStrip?: boolean;
97
+ }
98
+
99
+ // ============================================================================
100
+ // OpenAI Completions Adapter
101
+ // ============================================================================
102
+
103
+ export class OpenAICompletionsAdapter implements ProviderAdapter {
104
+ readonly name: string;
105
+ private baseURL: string;
106
+ private apiKey: string;
107
+ private defaultMaxTokens: number;
108
+ private extraHeaders: Record<string, string>;
109
+ private assistantName: string;
110
+ private extraStopSequences: string[];
111
+ private warnOnImageStrip: boolean;
112
+
113
+ constructor(config: OpenAICompletionsAdapterConfig) {
114
+ if (!config.baseURL) {
115
+ throw new Error('OpenAI completions adapter requires baseURL');
116
+ }
117
+
118
+ this.name = config.providerName ?? 'openai-completions';
119
+ this.baseURL = config.baseURL.replace(/\/$/, ''); // Remove trailing slash
120
+ this.apiKey = config.apiKey ?? '';
121
+ this.defaultMaxTokens = config.defaultMaxTokens ?? 4096;
122
+ this.extraHeaders = config.extraHeaders ?? {};
123
+ this.assistantName = config.assistantName ?? 'Assistant';
124
+ this.extraStopSequences = config.extraStopSequences ?? [];
125
+ this.warnOnImageStrip = config.warnOnImageStrip ?? true;
126
+ }
127
+
128
+ supportsModel(_modelId: string): boolean {
129
+ return true;
130
+ }
131
+
132
+ async complete(
133
+ request: ProviderRequest,
134
+ options?: ProviderRequestOptions
135
+ ): Promise<ProviderResponse> {
136
+ const completionsRequest = this.buildRequest(request);
137
+
138
+ try {
139
+ const response = await this.makeRequest(completionsRequest, options);
140
+ return this.parseResponse(response, request.model, completionsRequest);
141
+ } catch (error) {
142
+ throw this.handleError(error, completionsRequest);
143
+ }
144
+ }
145
+
146
+ async stream(
147
+ request: ProviderRequest,
148
+ callbacks: StreamCallbacks,
149
+ options?: ProviderRequestOptions
150
+ ): Promise<ProviderResponse> {
151
+ const completionsRequest = this.buildRequest(request);
152
+ completionsRequest.stream = true;
153
+
154
+ try {
155
+ const response = await fetch(`${this.baseURL}/completions`, {
156
+ method: 'POST',
157
+ headers: this.getHeaders(),
158
+ body: JSON.stringify(completionsRequest),
159
+ signal: options?.signal,
160
+ });
161
+
162
+ if (!response.ok) {
163
+ const errorText = await response.text();
164
+ throw new Error(`API error: ${response.status} ${errorText}`);
165
+ }
166
+
167
+ const reader = response.body?.getReader();
168
+ if (!reader) {
169
+ throw new Error('No response body');
170
+ }
171
+
172
+ const decoder = new TextDecoder();
173
+ let accumulated = '';
174
+ let finishReason = 'stop';
175
+
176
+ while (true) {
177
+ const { done, value } = await reader.read();
178
+ if (done) break;
179
+
180
+ const chunk = decoder.decode(value, { stream: true });
181
+ const lines = chunk.split('\n').filter(line => line.startsWith('data: '));
182
+
183
+ for (const line of lines) {
184
+ const data = line.slice(6);
185
+ if (data === '[DONE]') continue;
186
+
187
+ try {
188
+ const parsed = JSON.parse(data);
189
+ const text = parsed.choices?.[0]?.text;
190
+
191
+ if (text) {
192
+ accumulated += text;
193
+ callbacks.onChunk(text);
194
+ }
195
+
196
+ if (parsed.choices?.[0]?.finish_reason) {
197
+ finishReason = parsed.choices[0].finish_reason;
198
+ }
199
+ } catch {
200
+ // Ignore parse errors in stream
201
+ }
202
+ }
203
+ }
204
+
205
+ return this.buildStreamedResponse(accumulated, finishReason, request.model, completionsRequest);
206
+
207
+ } catch (error) {
208
+ throw this.handleError(error, completionsRequest);
209
+ }
210
+ }
211
+
212
+ // ============================================================================
213
+ // Prompt Serialization
214
+ // ============================================================================
215
+
216
+ /**
217
+ * Serialize messages to "Participant: content" format for base models.
218
+ * Uses actual participant names from messages.
219
+ * Images are stripped from content.
220
+ */
221
+ serializeToPrompt(messages: any[]): { prompt: string; participants: Set<string> } {
222
+ const parts: string[] = [];
223
+ const participants = new Set<string>();
224
+ let hasStrippedImages = false;
225
+
226
+ for (const msg of messages) {
227
+ // Get participant name (supports both 'participant' and 'role' fields)
228
+ const participant = msg.participant || msg.role || 'Unknown';
229
+ participants.add(participant);
230
+
231
+ // Extract text content, strip images
232
+ const textContent = this.extractTextContent(msg.content);
233
+ if (textContent.hadImages) {
234
+ hasStrippedImages = true;
235
+ }
236
+
237
+ if (textContent.text) {
238
+ parts.push(`${participant}: ${textContent.text}`);
239
+ }
240
+ }
241
+
242
+ if (hasStrippedImages && this.warnOnImageStrip) {
243
+ console.warn('[OpenAICompletionsAdapter] Images were stripped from context (not supported in completions mode)');
244
+ }
245
+
246
+ // Add final assistant prefix to prompt completion
247
+ parts.push(`${this.assistantName}:`);
248
+
249
+ return {
250
+ prompt: parts.join('\n\n'),
251
+ participants,
252
+ };
253
+ }
254
+
255
+ /**
256
+ * Generate stop sequences from participant names.
257
+ * Prevents the model from generating turns for other participants.
258
+ */
259
+ private generateStopSequences(participants: Set<string>): string[] {
260
+ const stops: string[] = [];
261
+
262
+ for (const participant of participants) {
263
+ // Skip the assistant - we don't want to stop on its own name
264
+ if (participant === this.assistantName) continue;
265
+
266
+ // Add both "\n\nName:" and "\nName:" variants
267
+ stops.push(`\n\n${participant}:`);
268
+ stops.push(`\n${participant}:`);
269
+ }
270
+
271
+ return stops;
272
+ }
273
+
274
+ private extractTextContent(content: any): { text: string; hadImages: boolean } {
275
+ if (typeof content === 'string') {
276
+ return { text: content, hadImages: false };
277
+ }
278
+
279
+ if (Array.isArray(content)) {
280
+ const textParts: string[] = [];
281
+ let hadImages = false;
282
+
283
+ for (const block of content) {
284
+ if (block.type === 'text') {
285
+ textParts.push(block.text);
286
+ } else if (block.type === 'image' || block.type === 'image_url') {
287
+ hadImages = true;
288
+ }
289
+ // Skip tool_use, tool_result, thinking blocks for base models
290
+ }
291
+
292
+ return { text: textParts.join('\n'), hadImages };
293
+ }
294
+
295
+ return { text: '', hadImages: false };
296
+ }
297
+
298
+ // ============================================================================
299
+ // Private Methods
300
+ // ============================================================================
301
+
302
+ private getHeaders(): Record<string, string> {
303
+ const headers: Record<string, string> = {
304
+ 'Content-Type': 'application/json',
305
+ ...this.extraHeaders,
306
+ };
307
+
308
+ if (this.apiKey) {
309
+ headers['Authorization'] = `Bearer ${this.apiKey}`;
310
+ }
311
+
312
+ return headers;
313
+ }
314
+
315
+ private buildRequest(request: ProviderRequest): CompletionsRequest {
316
+ const { prompt, participants } = this.serializeToPrompt(request.messages as any[]);
317
+
318
+ const params: CompletionsRequest = {
319
+ model: request.model,
320
+ prompt,
321
+ max_tokens: request.maxTokens || this.defaultMaxTokens,
322
+ };
323
+
324
+ if (request.temperature !== undefined) {
325
+ params.temperature = request.temperature;
326
+ }
327
+
328
+ // Generate stop sequences from participant names + any extras
329
+ const stopSequences = [
330
+ ...this.generateStopSequences(participants),
331
+ ...this.extraStopSequences,
332
+ ...(request.stopSequences || []),
333
+ ];
334
+ if (stopSequences.length > 0) {
335
+ params.stop = stopSequences;
336
+ }
337
+
338
+ // Apply extra params (but not messages/tools which don't apply)
339
+ if (request.extra) {
340
+ const { messages, tools, ...rest } = request.extra as any;
341
+ Object.assign(params, rest);
342
+ }
343
+
344
+ return params;
345
+ }
346
+
347
+ private async makeRequest(request: CompletionsRequest, options?: ProviderRequestOptions): Promise<CompletionsResponse> {
348
+ const response = await fetch(`${this.baseURL}/completions`, {
349
+ method: 'POST',
350
+ headers: this.getHeaders(),
351
+ body: JSON.stringify(request),
352
+ signal: options?.signal,
353
+ });
354
+
355
+ if (!response.ok) {
356
+ const errorText = await response.text();
357
+ throw new Error(`API error: ${response.status} ${errorText}`);
358
+ }
359
+
360
+ return response.json() as Promise<CompletionsResponse>;
361
+ }
362
+
363
+ private parseResponse(response: CompletionsResponse, requestedModel: string, rawRequest: unknown): ProviderResponse {
364
+ const choice = response.choices[0];
365
+ const text = choice?.text ?? '';
366
+
367
+ return {
368
+ content: this.textToContent(text),
369
+ stopReason: this.mapFinishReason(choice?.finish_reason),
370
+ stopSequence: undefined,
371
+ usage: {
372
+ inputTokens: response.usage?.prompt_tokens ?? 0,
373
+ outputTokens: response.usage?.completion_tokens ?? 0,
374
+ },
375
+ model: response.model ?? requestedModel,
376
+ rawRequest,
377
+ raw: response,
378
+ };
379
+ }
380
+
381
+ private buildStreamedResponse(
382
+ accumulated: string,
383
+ finishReason: string,
384
+ requestedModel: string,
385
+ rawRequest?: unknown
386
+ ): ProviderResponse {
387
+ return {
388
+ content: this.textToContent(accumulated),
389
+ stopReason: this.mapFinishReason(finishReason),
390
+ stopSequence: undefined,
391
+ usage: {
392
+ inputTokens: 0, // Not available in streaming
393
+ outputTokens: 0,
394
+ },
395
+ model: requestedModel,
396
+ rawRequest,
397
+ raw: { text: accumulated, finish_reason: finishReason },
398
+ };
399
+ }
400
+
401
+ private textToContent(text: string): ContentBlock[] {
402
+ // Trim leading whitespace (model often starts with space after "Assistant:")
403
+ const trimmed = text.replace(/^\s+/, '');
404
+
405
+ if (!trimmed) return [];
406
+
407
+ return [{ type: 'text', text: trimmed }];
408
+ }
409
+
410
+ private mapFinishReason(reason: string | undefined): string {
411
+ switch (reason) {
412
+ case 'stop':
413
+ return 'end_turn';
414
+ case 'length':
415
+ return 'max_tokens';
416
+ default:
417
+ return 'end_turn';
418
+ }
419
+ }
420
+
421
+ private handleError(error: unknown, rawRequest?: unknown): MembraneError {
422
+ if (error instanceof Error) {
423
+ const message = error.message;
424
+
425
+ if (message.includes('429') || message.includes('rate')) {
426
+ return rateLimitError(message, undefined, error, rawRequest);
427
+ }
428
+
429
+ if (message.includes('401') || message.includes('auth') || message.includes('Unauthorized')) {
430
+ return authError(message, error, rawRequest);
431
+ }
432
+
433
+ if (message.includes('context') || message.includes('too long') || message.includes('maximum context')) {
434
+ return contextLengthError(message, error, rawRequest);
435
+ }
436
+
437
+ if (message.includes('500') || message.includes('502') || message.includes('503')) {
438
+ return serverError(message, undefined, error, rawRequest);
439
+ }
440
+
441
+ if (error.name === 'AbortError') {
442
+ return abortError(undefined, rawRequest);
443
+ }
444
+
445
+ if (message.includes('network') || message.includes('fetch') || message.includes('ECONNREFUSED')) {
446
+ return networkError(message, error, rawRequest);
447
+ }
448
+ }
449
+
450
+ return new MembraneError({
451
+ type: 'unknown',
452
+ message: error instanceof Error ? error.message : String(error),
453
+ retryable: false,
454
+ rawError: error,
455
+ rawRequest,
456
+ });
457
+ }
458
+ }