@oai2lmapi/opencode-provider 0.1.0-prerelease.20260115130629.e7d4f72

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md ADDED
@@ -0,0 +1,340 @@
1
+ # @oai2lmapi/opencode-provider
2
+
3
+ OpenAI-compatible provider for [OpenCode](https://github.com/anomalyco/opencode), built with the Vercel AI SDK.
4
+
5
+ ## Features
6
+
7
+ - **Auto-Discovery**: Automatically discovers models from your API's `/models` endpoint
8
+ - **Smart Configuration**: Automatically detects model capabilities (tool calling, vision, context limits)
9
+ - **Flexible Overrides**: Per-model configuration via OpenCode settings
10
+ - **Based on AI SDK**: Built on top of Vercel AI SDK's `@ai-sdk/openai-compatible`
11
+
12
+ > **Note**: Advanced features like chain-of-thought handling (`<think>` tags) and prompt-based tool calling are planned for future releases.
13
+
14
+ ## Installation
15
+
16
+ ```bash
17
+ npm install @oai2lmapi/opencode-provider
18
+ # or
19
+ pnpm add @oai2lmapi/opencode-provider
20
+ # or
21
+ yarn add @oai2lmapi/opencode-provider
22
+ ```
23
+
24
+ ## Usage
25
+
26
+ ### Basic Setup
27
+
28
+ Create a provider configuration file for OpenCode (e.g., `opencode.config.ts`):
29
+
30
+ ```typescript
31
+ import { createOAI2LMProvider } from '@oai2lmapi/opencode-provider';
32
+
33
+ export default {
34
+ providers: {
35
+ myapi: createOAI2LMProvider({
36
+ apiKey: process.env.MY_API_KEY,
37
+ baseURL: 'https://api.example.com/v1',
38
+ }),
39
+ },
40
+ };
41
+ ```
42
+
43
+ ### With Model Auto-Discovery
44
+
45
+ The provider will automatically fetch available models on initialization:
46
+
47
+ ```typescript
48
+ import { createOAI2LMProvider } from '@oai2lmapi/opencode-provider';
49
+
50
+ const provider = await createOAI2LMProvider({
51
+ apiKey: process.env.MY_API_KEY,
52
+ baseURL: 'https://api.example.com/v1',
53
+ autoDiscoverModels: true, // default
54
+ });
55
+
56
+ // Use with OpenCode
57
+ const result = await generateText({
58
+ model: provider('gpt-4'),
59
+ prompt: 'Hello, world!',
60
+ });
61
+ ```
62
+
63
+ ### Model Overrides
64
+
65
+ Configure per-model settings:
66
+
67
+ ```typescript
68
+ const provider = createOAI2LMProvider({
69
+ apiKey: process.env.MY_API_KEY,
70
+ baseURL: 'https://api.example.com/v1',
71
+ modelOverrides: {
72
+ 'deepseek-*': {
73
+ // Use prompt-based tool calling for DeepSeek models
74
+ usePromptBasedToolCalling: true,
75
+ // Strip chain-of-thought tags
76
+ suppressChainOfThought: true,
77
+ },
78
+ 'o1-*': {
79
+ // Enable reasoning capture for o1 models
80
+ captureReasoning: true,
81
+ },
82
+ 'gpt-4-vision': {
83
+ // Override capabilities
84
+ supportsImageInput: true,
85
+ maxInputTokens: 128000,
86
+ },
87
+ },
88
+ });
89
+ ```
90
+
91
+ ### Chain-of-Thought Handling
92
+
93
+ For reasoning models that output `<think>` tags:
94
+
95
+ ```typescript
96
+ const provider = createOAI2LMProvider({
97
+ apiKey: process.env.MY_API_KEY,
98
+ baseURL: 'https://api.example.com/v1',
99
+ modelOverrides: {
100
+ 'reasoning-model-*': {
101
+ // Capture and expose chain-of-thought
102
+ captureReasoning: true,
103
+ // Or suppress it from output
104
+ suppressChainOfThought: false,
105
+ },
106
+ },
107
+ });
108
+
109
+ const result = await generateText({
110
+ model: provider('reasoning-model-v1'),
111
+ prompt: 'Solve this puzzle...',
112
+ });
113
+
114
+ // Access reasoning if captured
115
+ console.log(result.reasoning); // Chain-of-thought content
116
+ console.log(result.text); // Final answer without <think> tags
117
+ ```
118
+
119
+ ### Prompt-Based Tool Calling
120
+
121
+ For models without native function calling:
122
+
123
+ ```typescript
124
+ const provider = createOAI2LMProvider({
125
+ apiKey: process.env.MY_API_KEY,
126
+ baseURL: 'https://api.example.com/v1',
127
+ modelOverrides: {
128
+ 'legacy-model': {
129
+ usePromptBasedToolCalling: true,
130
+ },
131
+ },
132
+ });
133
+
134
+ // Tools are automatically converted to XML format in system prompt
135
+ const result = await generateText({
136
+ model: provider('legacy-model'),
137
+ prompt: 'What is the weather in Tokyo?',
138
+ tools: {
139
+ getWeather: {
140
+ description: 'Get current weather',
141
+ parameters: z.object({
142
+ location: z.string(),
143
+ }),
144
+ execute: async ({ location }) => {
145
+ // ... fetch weather
146
+ },
147
+ },
148
+ },
149
+ });
150
+ ```
151
+
152
+ ## Configuration Options
153
+
154
+ ### Provider Settings
155
+
156
+ ```typescript
157
+ interface OAI2LMProviderSettings {
158
+ /** API key for authentication */
159
+ apiKey: string;
160
+
161
+ /** Base URL for API calls (e.g., 'https://api.example.com/v1') */
162
+ baseURL: string;
163
+
164
+ /** Provider name (defaults to 'oai2lm') */
165
+ name?: string;
166
+
167
+ /** Custom headers */
168
+ headers?: Record<string, string>;
169
+
170
+ /** Auto-discover models on initialization (default: true) */
171
+ autoDiscoverModels?: boolean;
172
+
173
+ /** Per-model configuration overrides */
174
+ modelOverrides?: Record<string, ModelOverride>;
175
+
176
+ /** Custom fetch implementation */
177
+ fetch?: typeof fetch;
178
+ }
179
+ ```
180
+
181
+ ### Model Override Options
182
+
183
+ ```typescript
184
+ interface ModelOverride {
185
+ /** Max input tokens */
186
+ maxInputTokens?: number;
187
+
188
+ /** Max output tokens */
189
+ maxOutputTokens?: number;
190
+
191
+ /** Supports native tool/function calling */
192
+ supportsToolCalling?: boolean;
193
+
194
+ /** Supports image inputs */
195
+ supportsImageInput?: boolean;
196
+
197
+ /** Default temperature */
198
+ temperature?: number;
199
+
200
+ /** Use XML-based prompt engineering for tools */
201
+ usePromptBasedToolCalling?: boolean;
202
+
203
+ /** Strip <think>...</think> blocks from output */
204
+ suppressChainOfThought?: boolean;
205
+
206
+ /** Capture reasoning content separately */
207
+ captureReasoning?: boolean;
208
+
209
+ /** Thinking level: token budget or 'low'/'medium'/'high' */
210
+ thinkingLevel?: number | 'low' | 'medium' | 'high' | 'auto';
211
+ }
212
+ ```
213
+
214
+ ## How It Works
215
+
216
+ 1. **Model Discovery**: On initialization, the provider fetches the `/models` endpoint
217
+ 2. **Capability Detection**: Analyzes model metadata to determine capabilities
218
+ 3. **Metadata Caching**: Model info is cached to reduce API calls
219
+ 4. **Override Application**: User-defined overrides are applied on top of discovered capabilities
220
+ 5. **Request Translation**: Converts AI SDK requests to OpenAI-compatible format
221
+ 6. **Response Parsing**: Handles special formats like `<think>` tags and XML tool calls
222
+
223
+ ## Configuration with OpenCode
224
+
225
+ This provider integrates with OpenCode's data directory for configuration. By default (following the XDG base directory spec), it looks for a config file at:
226
+
227
+ - `~/.local/share/opencode/oai2lm.json` (primary location, checked first — corresponds to `$XDG_DATA_HOME/opencode/oai2lm.json` with the common default of `~/.local/share`)
228
+ - `~/.config/opencode/oai2lm.json` (alternative location, checked second as a fallback — corresponds to `$XDG_CONFIG_HOME/opencode/oai2lm.json` with the common default of `~/.config`)
229
+
230
+ If you are on a non-standard system or use custom XDG paths, you can override these locations by setting `XDG_DATA_HOME` and/or `XDG_CONFIG_HOME`. The provider will then resolve the config file as `$XDG_DATA_HOME/opencode/oai2lm.json` and `$XDG_CONFIG_HOME/opencode/oai2lm.json` respectively, and it will still search these locations in the order shown, using the first config file it finds (so the data directory location takes precedence if both files exist).
231
+
232
+ ### Config File Format
233
+
234
+ ```json
235
+ {
236
+ "apiKey": "your-api-key",
237
+ "baseURL": "https://api.example.com/v1",
238
+ "name": "my-provider",
239
+ "autoDiscoverModels": true,
240
+ "modelOverrides": {
241
+ "deepseek-*": {
242
+ "usePromptBasedToolCalling": true,
243
+ "suppressChainOfThought": true
244
+ },
245
+ "gpt-4-vision": {
246
+ "supportsImageInput": true,
247
+ "maxInputTokens": 128000
248
+ }
249
+ }
250
+ }
251
+ ```
252
+
253
+ ### Using Config File
254
+
255
+ ```typescript
256
+ import { createOAI2LMProviderFromConfig, OAI2LMProvider } from '@oai2lmapi/opencode-provider';
257
+
258
+ // Create provider from config file
259
+ const provider = createOAI2LMProviderFromConfig();
260
+
261
+ // Or use the static method
262
+ const provider2 = OAI2LMProvider.fromConfig();
263
+
264
+ // Override specific settings
265
+ const provider3 = createOAI2LMProviderFromConfig({
266
+ baseURL: 'https://api.custom.com/v1', // Override base URL
267
+ });
268
+ ```
269
+
270
+ ### Environment Variables
271
+
272
+ You can also configure via environment variables:
273
+
274
+ - `OAI2LM_API_KEY` - API key for authentication
275
+ - `OAI2LM_BASE_URL` - Base URL for API calls
276
+
277
+ Priority order (highest to lowest):
278
+ 1. Explicit settings passed to function
279
+ 2. Environment variables
280
+ 3. Config file values
281
+
282
+ ## Integration with OpenCode
283
+
284
+ This provider is designed to work seamlessly with OpenCode's configuration system:
285
+
286
+ ```javascript
287
+ // ~/.opencode/config.js
288
+ export default {
289
+ providers: {
290
+ myapi: {
291
+ type: '@oai2lmapi/opencode-provider',
292
+ apiKey: process.env.MY_API_KEY,
293
+ baseURL: 'https://api.example.com/v1',
294
+ modelOverrides: {
295
+ // Configure models as needed
296
+ },
297
+ },
298
+ },
299
+ models: {
300
+ default: 'myapi:gpt-4',
301
+ },
302
+ };
303
+ ```
304
+
305
+ ## Examples
306
+
307
+ ### Using with Multiple Providers
308
+
309
+ ```typescript
310
+ import { createOAI2LMProvider } from '@oai2lmapi/opencode-provider';
311
+
312
+ const openai = createOAI2LMProvider({
313
+ name: 'openai',
314
+ apiKey: process.env.OPENAI_API_KEY,
315
+ baseURL: 'https://api.openai.com/v1',
316
+ });
317
+
318
+ const deepseek = createOAI2LMProvider({
319
+ name: 'deepseek',
320
+ apiKey: process.env.DEEPSEEK_API_KEY,
321
+ baseURL: 'https://api.deepseek.com/v1',
322
+ modelOverrides: {
323
+ '*': {
324
+ usePromptBasedToolCalling: true,
325
+ },
326
+ },
327
+ });
328
+
329
+ // Use either provider
330
+ await generateText({ model: openai('gpt-4'), prompt: '...' });
331
+ await generateText({ model: deepseek('deepseek-chat'), prompt: '...' });
332
+ ```
333
+
334
+ ## License
335
+
336
+ MIT
337
+
338
+ ## Contributing
339
+
340
+ Contributions are welcome! Please see the main repository for guidelines.
@@ -0,0 +1,96 @@
1
+ /**
2
+ * Configuration loading for OpenCode plugin
3
+ *
4
+ * Reads configuration from:
5
+ * 1. ~/.local/share/opencode/oai2lm.json (primary config file)
6
+ * 2. ~/.config/opencode/oai2lm.json (alternative config location)
7
+ *
8
+ * This follows OpenCode's convention where:
9
+ * - ~/.local/share/opencode/ contains data files (auth.json, etc.)
10
+ * - ~/.config/opencode/ contains user configuration
11
+ */
12
+ import { OAI2LMProviderSettings, ModelOverride } from './types.js';
13
+ /**
14
+ * Configuration file structure for oai2lm.json
15
+ */
16
+ export interface OAI2LMConfig {
17
+ /** API key for authentication (can also be read from env) */
18
+ apiKey?: string;
19
+ /** Base URL for API calls */
20
+ baseURL?: string;
21
+ /** Provider name (defaults to 'oai2lm') */
22
+ name?: string;
23
+ /**
24
+ * Custom headers applied to all requests.
25
+ *
26
+ * When combined with override headers (e.g. from model overrides or
27
+ * runtime settings), config file headers are applied first and then
28
+ * override headers are spread on top. This means any override header
29
+ * with the same key will replace the corresponding config file value.
30
+ */
31
+ headers?: Record<string, string>;
32
+ /** Auto-discover models on initialization (default: true) */
33
+ autoDiscoverModels?: boolean;
34
+ /** Per-model configuration overrides (supports wildcards) */
35
+ modelOverrides?: Record<string, ModelOverride>;
36
+ }
37
+ /**
38
+ * Get the OpenCode data directory path
39
+ * Follows XDG Base Directory Specification
40
+ * Note: XDG spec requires absolute paths; relative paths are ignored
41
+ */
42
+ export declare function getDataDir(): string;
43
+ /**
44
+ * Get the OpenCode config directory path
45
+ * Follows XDG Base Directory Specification
46
+ * Note: XDG spec requires absolute paths; relative paths are ignored
47
+ */
48
+ export declare function getConfigDir(): string;
49
+ /**
50
+ * Config file name for this plugin
51
+ */
52
+ export declare const CONFIG_FILENAME = "oai2lm.json";
53
+ /**
54
+ * Load configuration from oai2lm.json
55
+ *
56
+ * Search order (by precedence):
57
+ * 1. ~/.local/share/opencode/oai2lm.json (data directory)
58
+ * 2. ~/.config/opencode/oai2lm.json (config directory)
59
+ *
60
+ * The data directory location takes precedence over the config directory;
61
+ * the first readable file found in this order is used.
62
+ */
63
+ export declare function loadConfig(): OAI2LMConfig | undefined;
64
+ /**
65
+ * Load API key from environment or config
66
+ *
67
+ * Priority:
68
+ * 1. Explicit apiKey in settings
69
+ * 2. Environment variable OAI2LM_API_KEY
70
+ * 3. Config file apiKey
71
+ */
72
+ export declare function resolveApiKey(explicitKey?: string, config?: OAI2LMConfig): string | undefined;
73
+ /**
74
+ * Load base URL from environment or config
75
+ *
76
+ * Priority:
77
+ * 1. Explicit baseURL in settings
78
+ * 2. Environment variable OAI2LM_BASE_URL
79
+ * 3. Config file baseURL
80
+ * 4. Default: https://api.openai.com/v1
81
+ */
82
+ export declare function resolveBaseURL(explicitURL?: string, config?: OAI2LMConfig): string;
83
+ /**
84
+ * Create provider settings from config file and overrides
85
+ *
86
+ * This is a convenience function that:
87
+ * 1. Loads config from oai2lm.json
88
+ * 2. Applies explicit settings as overrides
89
+ * 3. Returns complete settings ready for provider creation
90
+ */
91
+ export declare function createSettingsFromConfig(overrides?: Partial<OAI2LMProviderSettings>): OAI2LMProviderSettings;
92
+ /**
93
+ * Get the path to the config file (for user guidance)
94
+ */
95
+ export declare function getConfigFilePath(): string;
96
+ //# sourceMappingURL=config.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"config.d.ts","sourceRoot":"","sources":["../src/config.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;GAUG;AAKH,OAAO,EAAE,sBAAsB,EAAE,aAAa,EAAE,MAAM,YAAY,CAAC;AAEnE;;GAEG;AACH,MAAM,WAAW,YAAY;IAC3B,6DAA6D;IAC7D,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,6BAA6B;IAC7B,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,2CAA2C;IAC3C,IAAI,CAAC,EAAE,MAAM,CAAC;IACd;;;;;;;OAOG;IACH,OAAO,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IACjC,6DAA6D;IAC7D,kBAAkB,CAAC,EAAE,OAAO,CAAC;IAC7B,6DAA6D;IAC7D,cAAc,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,aAAa,CAAC,CAAC;CAChD;AAED;;;;GAIG;AACH,wBAAgB,UAAU,IAAI,MAAM,CAMnC;AAED;;;;GAIG;AACH,wBAAgB,YAAY,IAAI,MAAM,CAMrC;AAED;;GAEG;AACH,eAAO,MAAM,eAAe,gBAAgB,CAAC;AAuB7C;;;;;;;;;GASG;AACH,wBAAgB,UAAU,IAAI,YAAY,GAAG,SAAS,CAcrD;AAED;;;;;;;GAOG;AACH,wBAAgB,aAAa,CAC3B,WAAW,CAAC,EAAE,MAAM,EACpB,MAAM,CAAC,EAAE,YAAY,GACpB,MAAM,GAAG,SAAS,CAYpB;AAED;;;;;;;;GAQG;AACH,wBAAgB,cAAc,CAC5B,WAAW,CAAC,EAAE,MAAM,EACpB,MAAM,CAAC,EAAE,YAAY,GACpB,MAAM,CAkBR;AAED;;;;;;;GAOG;AACH,wBAAgB,wBAAwB,CACtC,SAAS,CAAC,EAAE,OAAO,CAAC,sBAAsB,CAAC,GAC1C,sBAAsB,CAkCxB;AAED;;GAEG;AACH,wBAAgB,iBAAiB,IAAI,MAAM,CAE1C"}
package/dist/config.js ADDED
@@ -0,0 +1,175 @@
1
+ /**
2
+ * Configuration loading for OpenCode plugin
3
+ *
4
+ * Reads configuration from:
5
+ * 1. ~/.local/share/opencode/oai2lm.json (primary config file)
6
+ * 2. ~/.config/opencode/oai2lm.json (alternative config location)
7
+ *
8
+ * This follows OpenCode's convention where:
9
+ * - ~/.local/share/opencode/ contains data files (auth.json, etc.)
10
+ * - ~/.config/opencode/ contains user configuration
11
+ */
12
+ import { readFileSync, existsSync } from 'node:fs';
13
+ import { homedir } from 'node:os';
14
+ import { join, isAbsolute } from 'node:path';
15
+ /**
16
+ * Get the OpenCode data directory path
17
+ * Follows XDG Base Directory Specification
18
+ * Note: XDG spec requires absolute paths; relative paths are ignored
19
+ */
20
+ export function getDataDir() {
21
+ const xdgDataHome = process.env['XDG_DATA_HOME'];
22
+ if (xdgDataHome && isAbsolute(xdgDataHome)) {
23
+ return join(xdgDataHome, 'opencode');
24
+ }
25
+ return join(homedir(), '.local', 'share', 'opencode');
26
+ }
27
+ /**
28
+ * Get the OpenCode config directory path
29
+ * Follows XDG Base Directory Specification
30
+ * Note: XDG spec requires absolute paths; relative paths are ignored
31
+ */
32
+ export function getConfigDir() {
33
+ const xdgConfigHome = process.env['XDG_CONFIG_HOME'];
34
+ if (xdgConfigHome && isAbsolute(xdgConfigHome)) {
35
+ return join(xdgConfigHome, 'opencode');
36
+ }
37
+ return join(homedir(), '.config', 'opencode');
38
+ }
39
+ /**
40
+ * Config file name for this plugin
41
+ */
42
+ export const CONFIG_FILENAME = 'oai2lm.json';
43
+ /**
44
+ * Try to read and parse a JSON file
45
+ */
46
+ function readJsonFile(filepath) {
47
+ try {
48
+ if (!existsSync(filepath)) {
49
+ return undefined;
50
+ }
51
+ const content = readFileSync(filepath, 'utf-8');
52
+ return JSON.parse(content);
53
+ }
54
+ catch (error) {
55
+ if (error instanceof SyntaxError) {
56
+ const message = error.message;
57
+ console.warn(`Failed to parse JSON in config file ${filepath}: ${message}`);
58
+ }
59
+ else {
60
+ console.warn(`Failed to read config file ${filepath}:`, error);
61
+ }
62
+ return undefined;
63
+ }
64
+ }
65
+ /**
66
+ * Load configuration from oai2lm.json
67
+ *
68
+ * Search order (by precedence):
69
+ * 1. ~/.local/share/opencode/oai2lm.json (data directory)
70
+ * 2. ~/.config/opencode/oai2lm.json (config directory)
71
+ *
72
+ * The data directory location takes precedence over the config directory;
73
+ * the first readable file found in this order is used.
74
+ */
75
+ export function loadConfig() {
76
+ const paths = [
77
+ join(getDataDir(), CONFIG_FILENAME),
78
+ join(getConfigDir(), CONFIG_FILENAME),
79
+ ];
80
+ for (const configPath of paths) {
81
+ const config = readJsonFile(configPath);
82
+ if (config) {
83
+ return config;
84
+ }
85
+ }
86
+ return undefined;
87
+ }
88
+ /**
89
+ * Load API key from environment or config
90
+ *
91
+ * Priority:
92
+ * 1. Explicit apiKey in settings
93
+ * 2. Environment variable OAI2LM_API_KEY
94
+ * 3. Config file apiKey
95
+ */
96
+ export function resolveApiKey(explicitKey, config) {
97
+ if (typeof explicitKey === 'string' && explicitKey.trim().length > 0) {
98
+ return explicitKey.trim();
99
+ }
100
+ const envKey = process.env['OAI2LM_API_KEY'];
101
+ // Treat empty-string environment values as "not set" and fall back to config
102
+ if (typeof envKey === 'string' && envKey.trim() !== '') {
103
+ return envKey.trim();
104
+ }
105
+ return config?.apiKey;
106
+ }
107
+ /**
108
+ * Load base URL from environment or config
109
+ *
110
+ * Priority:
111
+ * 1. Explicit baseURL in settings
112
+ * 2. Environment variable OAI2LM_BASE_URL
113
+ * 3. Config file baseURL
114
+ * 4. Default: https://api.openai.com/v1
115
+ */
116
+ export function resolveBaseURL(explicitURL, config) {
117
+ // Explicit setting takes precedence if it is a non-empty string
118
+ if (typeof explicitURL === 'string' && explicitURL.trim().length > 0) {
119
+ return explicitURL.trim();
120
+ }
121
+ // Environment variable takes precedence over config if it is a non-empty string
122
+ const envURL = process.env['OAI2LM_BASE_URL'];
123
+ if (typeof envURL === 'string' && envURL.trim().length > 0) {
124
+ return envURL.trim();
125
+ }
126
+ // Fall back to config baseURL if provided and non-empty, otherwise use default
127
+ if (typeof config?.baseURL === 'string' && config.baseURL.trim().length > 0) {
128
+ return config.baseURL.trim();
129
+ }
130
+ return 'https://api.openai.com/v1';
131
+ }
132
+ /**
133
+ * Create provider settings from config file and overrides
134
+ *
135
+ * This is a convenience function that:
136
+ * 1. Loads config from oai2lm.json
137
+ * 2. Applies explicit settings as overrides
138
+ * 3. Returns complete settings ready for provider creation
139
+ */
140
+ export function createSettingsFromConfig(overrides) {
141
+ const config = loadConfig();
142
+ const apiKey = resolveApiKey(overrides?.apiKey, config);
143
+ if (!apiKey) {
144
+ const dataPath = join(getDataDir(), CONFIG_FILENAME);
145
+ const configPath = join(getConfigDir(), CONFIG_FILENAME);
146
+ throw new Error('API key not found. Please set OAI2LM_API_KEY environment variable, ' +
147
+ 'or add apiKey to ' + dataPath + ' or ' + configPath + ', ' +
148
+ 'or pass apiKey in settings.');
149
+ }
150
+ const baseURL = resolveBaseURL(overrides?.baseURL, config);
151
+ // Merge model overrides: config < explicit overrides
152
+ const modelOverrides = {
153
+ ...(config?.modelOverrides ?? {}),
154
+ ...(overrides?.modelOverrides ?? {}),
155
+ };
156
+ return {
157
+ apiKey,
158
+ baseURL,
159
+ name: overrides?.name ?? config?.name ?? 'oai2lm',
160
+ headers: {
161
+ ...(config?.headers ?? {}),
162
+ ...(overrides?.headers ?? {}),
163
+ },
164
+ autoDiscoverModels: overrides?.autoDiscoverModels ?? config?.autoDiscoverModels ?? true,
165
+ modelOverrides: Object.keys(modelOverrides).length > 0 ? modelOverrides : undefined,
166
+ fetch: overrides?.fetch,
167
+ };
168
+ }
169
+ /**
170
+ * Get the path to the config file (for user guidance)
171
+ */
172
+ export function getConfigFilePath() {
173
+ return join(getDataDir(), CONFIG_FILENAME);
174
+ }
175
+ //# sourceMappingURL=config.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"config.js","sourceRoot":"","sources":["../src/config.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;GAUG;AAEH,OAAO,EAAE,YAAY,EAAE,UAAU,EAAE,MAAM,SAAS,CAAC;AACnD,OAAO,EAAE,OAAO,EAAE,MAAM,SAAS,CAAC;AAClC,OAAO,EAAE,IAAI,EAAE,UAAU,EAAE,MAAM,WAAW,CAAC;AA4B7C;;;;GAIG;AACH,MAAM,UAAU,UAAU;IACxB,MAAM,WAAW,GAAG,OAAO,CAAC,GAAG,CAAC,eAAe,CAAC,CAAC;IACjD,IAAI,WAAW,IAAI,UAAU,CAAC,WAAW,CAAC,EAAE,CAAC;QAC3C,OAAO,IAAI,CAAC,WAAW,EAAE,UAAU,CAAC,CAAC;IACvC,CAAC;IACD,OAAO,IAAI,CAAC,OAAO,EAAE,EAAE,QAAQ,EAAE,OAAO,EAAE,UAAU,CAAC,CAAC;AACxD,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,YAAY;IAC1B,MAAM,aAAa,GAAG,OAAO,CAAC,GAAG,CAAC,iBAAiB,CAAC,CAAC;IACrD,IAAI,aAAa,IAAI,UAAU,CAAC,aAAa,CAAC,EAAE,CAAC;QAC/C,OAAO,IAAI,CAAC,aAAa,EAAE,UAAU,CAAC,CAAC;IACzC,CAAC;IACD,OAAO,IAAI,CAAC,OAAO,EAAE,EAAE,SAAS,EAAE,UAAU,CAAC,CAAC;AAChD,CAAC;AAED;;GAEG;AACH,MAAM,CAAC,MAAM,eAAe,GAAG,aAAa,CAAC;AAE7C;;GAEG;AACH,SAAS,YAAY,CAAI,QAAgB;IACvC,IAAI,CAAC;QACH,IAAI,CAAC,UAAU,CAAC,QAAQ,CAAC,EAAE,CAAC;YAC1B,OAAO,SAAS,CAAC;QACnB,CAAC;QACD,MAAM,OAAO,GAAG,YAAY,CAAC,QAAQ,EAAE,OAAO,CAAC,CAAC;QAChD,OAAO,IAAI,CAAC,KAAK,CAAC,OAAO,CAAM,CAAC;IAClC,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QACf,IAAI,KAAK,YAAY,WAAW,EAAE,CAAC;YACjC,MAAM,OAAO,GAAG,KAAK,CAAC,OAAO,CAAC;YAC9B,OAAO,CAAC,IAAI,CAAC,uCAAuC,QAAQ,KAAK,OAAO,EAAE,CAAC,CAAC;QAC9E,CAAC;aAAM,CAAC;YACN,OAAO,CAAC,IAAI,CAAC,8BAA8B,QAAQ,GAAG,EAAE,KAAK,CAAC,CAAC;QACjE,CAAC;QACD,OAAO,SAAS,CAAC;IACnB,CAAC;AACH,CAAC;AAED;;;;;;;;;GASG;AACH,MAAM,UAAU,UAAU;IACxB,MAAM,KAAK,GAAG;QACZ,IAAI,CAAC,UAAU,EAAE,EAAE,eAAe,CAAC;QACnC,IAAI,CAAC,YAAY,EAAE,EAAE,eAAe,CAAC;KACtC,CAAC;IAEF,KAAK,MAAM,UAAU,IAAI,KAAK,EAAE,CAAC;QAC/B,MAAM,MAAM,GAAG,YAAY,CAAe,UAAU,CAAC,CAAC;QACtD,IAAI,MAAM,EAAE,CAAC;YACX,OAAO,MAAM,CAAC;QAChB,CAAC;IACH,CAAC;IAED,OAAO,SAAS,CAAC;AACnB,CAAC;AAED;;;;;;;GAOG;AACH,MAAM,UAAU,aAAa,CAC3B,WAAoB,EACpB,MAAqB;IAErB,IAAI,OAAO,WAAW,KAAK,QAAQ,IAAI,WAAW,CAAC,IAAI,EAAE,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;QACrE,OAAO,WAAW,CAAC,IAAI,EAAE,CAAC;IAC5B,CAAC;IAED,MAAM,MAAM,GAAG,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,CAAC;IAC7C,6EAA6E;IAC7E,IAAI,OAAO,MAAM,KAAK,QAAQ,IAAI,MAAM,CAAC,IAAI,EAAE,KAAK,EAAE,EAAE,CAAC;QACvD,OAAO,MAAM,CAAC,IAAI,EAAE,CAAC;IACvB,CAAC;IAED,OAAO,MAAM,EAAE,MAAM,CAAC;AACxB,CAAC;AAED;;;;;;;;GAQG;AACH,MAAM,UAAU,cAAc,CAC5B,WAAoB,EACpB,MAAqB;IAErB,gEAAgE;IAChE,IAAI,OAAO,WAAW,KAAK,QAAQ,IAAI,WAAW,CAAC,IAAI,EAAE,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;QACrE,OAAO,WAAW,CAAC,IAAI,EAAE,CAAC;IAC5B,CAAC;IAED,gFAAgF;IAChF,MAAM,MAAM,GAAG,OAAO,CAAC,GAAG,CAAC,iBAAiB,CAAC,CAAC;IAC9C,IAAI,OAAO,MAAM,KAAK,QAAQ,IAAI,MAAM,CAAC,IAAI,EAAE,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;QAC3D,OAAO,MAAM,CAAC,IAAI,EAAE,CAAC;IACvB,CAAC;IAED,+EAA+E;IAC/E,IAAI,OAAO,MAAM,EAAE,OAAO,KAAK,QAAQ,IAAI,MAAM,CAAC,OAAO,CAAC,IAAI,EAAE,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;QAC5E,OAAO,MAAM,CAAC,OAAO,CAAC,IAAI,EAAE,CAAC;IAC/B,CAAC;IAED,OAAO,2BAA2B,CAAC;AACrC,CAAC;AAED;;;;;;;GAOG;AACH,MAAM,UAAU,wBAAwB,CACtC,SAA2C;IAE3C,MAAM,MAAM,GAAG,UAAU,EAAE,CAAC;IAE5B,MAAM,MAAM,GAAG,aAAa,CAAC,SAAS,EAAE,MAAM,EAAE,MAAM,CAAC,CAAC;IACxD,IAAI,CAAC,MAAM,EAAE,CAAC;QACZ,MAAM,QAAQ,GAAG,IAAI,CAAC,UAAU,EAAE,EAAE,eAAe,CAAC,CAAC;QACrD,MAAM,UAAU,GAAG,IAAI,CAAC,YAAY,EAAE,EAAE,eAAe,CAAC,CAAC;QACzD,MAAM,IAAI,KAAK,CACb,qEAAqE;YACrE,mBAAmB,GAAG,QAAQ,GAAG,MAAM,GAAG,UAAU,GAAG,IAAI;YAC3D,6BAA6B,CAC9B,CAAC;IACJ,CAAC;IAED,MAAM,OAAO,GAAG,cAAc,CAAC,SAAS,EAAE,OAAO,EAAE,MAAM,CAAC,CAAC;IAE3D,qDAAqD;IACrD,MAAM,cAAc,GAAkC;QACpD,GAAG,CAAC,MAAM,EAAE,cAAc,IAAI,EAAE,CAAC;QACjC,GAAG,CAAC,SAAS,EAAE,cAAc,IAAI,EAAE,CAAC;KACrC,CAAC;IAEF,OAAO;QACL,MAAM;QACN,OAAO;QACP,IAAI,EAAE,SAAS,EAAE,IAAI,IAAI,MAAM,EAAE,IAAI,IAAI,QAAQ;QACjD,OAAO,EAAE;YACP,GAAG,CAAC,MAAM,EAAE,OAAO,IAAI,EAAE,CAAC;YAC1B,GAAG,CAAC,SAAS,EAAE,OAAO,IAAI,EAAE,CAAC;SAC9B;QACD,kBAAkB,EAAE,SAAS,EAAE,kBAAkB,IAAI,MAAM,EAAE,kBAAkB,IAAI,IAAI;QACvF,cAAc,EAAE,MAAM,CAAC,IAAI,CAAC,cAAc,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,cAAc,CAAC,CAAC,CAAC,SAAS;QACnF,KAAK,EAAE,SAAS,EAAE,KAAK;KACxB,CAAC;AACJ,CAAC;AAED;;GAEG;AACH,MAAM,UAAU,iBAAiB;IAC/B,OAAO,IAAI,CAAC,UAAU,EAAE,EAAE,eAAe,CAAC,CAAC;AAC7C,CAAC"}
@@ -0,0 +1,12 @@
1
+ /**
2
+ * @oai2lmapi/opencode-provider
3
+ *
4
+ * OpenAI-compatible provider for OpenCode with auto-discovery and advanced features
5
+ */
6
+ export { createOAI2LMProvider, createOAI2LMProviderFromConfig, OAI2LMProvider } from './provider.js';
7
+ export type { OAI2LMProviderSettings, ModelOverride, ModelMetadata, ModelInfo, } from './types.js';
8
+ export { getModelMetadataFromPatterns, DEFAULT_MODEL_METADATA } from './modelMetadata.js';
9
+ export { ModelDiscovery } from './modelDiscovery.js';
10
+ export { loadConfig, createSettingsFromConfig, getConfigFilePath, getDataDir, getConfigDir, resolveApiKey, resolveBaseURL, CONFIG_FILENAME, } from './config.js';
11
+ export type { OAI2LMConfig } from './config.js';
12
+ //# sourceMappingURL=index.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAEH,OAAO,EAAE,oBAAoB,EAAE,8BAA8B,EAAE,cAAc,EAAE,MAAM,eAAe,CAAC;AACrG,YAAY,EACV,sBAAsB,EACtB,aAAa,EACb,aAAa,EACb,SAAS,GACV,MAAM,YAAY,CAAC;AACpB,OAAO,EAAE,4BAA4B,EAAE,sBAAsB,EAAE,MAAM,oBAAoB,CAAC;AAC1F,OAAO,EAAE,cAAc,EAAE,MAAM,qBAAqB,CAAC;AACrD,OAAO,EACL,UAAU,EACV,wBAAwB,EACxB,iBAAiB,EACjB,UAAU,EACV,YAAY,EACZ,aAAa,EACb,cAAc,EACd,eAAe,GAChB,MAAM,aAAa,CAAC;AACrB,YAAY,EAAE,YAAY,EAAE,MAAM,aAAa,CAAC"}
package/dist/index.js ADDED
@@ -0,0 +1,10 @@
1
+ /**
2
+ * @oai2lmapi/opencode-provider
3
+ *
4
+ * OpenAI-compatible provider for OpenCode with auto-discovery and advanced features
5
+ */
6
+ export { createOAI2LMProvider, createOAI2LMProviderFromConfig, OAI2LMProvider } from './provider.js';
7
+ export { getModelMetadataFromPatterns, DEFAULT_MODEL_METADATA } from './modelMetadata.js';
8
+ export { ModelDiscovery } from './modelDiscovery.js';
9
+ export { loadConfig, createSettingsFromConfig, getConfigFilePath, getDataDir, getConfigDir, resolveApiKey, resolveBaseURL, CONFIG_FILENAME, } from './config.js';
10
+ //# sourceMappingURL=index.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAEH,OAAO,EAAE,oBAAoB,EAAE,8BAA8B,EAAE,cAAc,EAAE,MAAM,eAAe,CAAC;AAOrG,OAAO,EAAE,4BAA4B,EAAE,sBAAsB,EAAE,MAAM,oBAAoB,CAAC;AAC1F,OAAO,EAAE,cAAc,EAAE,MAAM,qBAAqB,CAAC;AACrD,OAAO,EACL,UAAU,EACV,wBAAwB,EACxB,iBAAiB,EACjB,UAAU,EACV,YAAY,EACZ,aAAa,EACb,cAAc,EACd,eAAe,GAChB,MAAM,aAAa,CAAC"}
@@ -0,0 +1,31 @@
1
+ /**
2
+ * Model discovery: fetch and cache models from API
3
+ */
4
+ import { ModelInfo, ModelMetadata } from './types.js';
5
+ export declare class ModelDiscovery {
6
+ private readonly baseURL;
7
+ private readonly apiKey;
8
+ private readonly headers;
9
+ private readonly fetchFn;
10
+ private modelsCache;
11
+ private lastFetchTime;
12
+ private readonly cacheDuration;
13
+ constructor(baseURL: string, apiKey: string, headers: Record<string, string>, fetchFn?: typeof fetch);
14
+ /**
15
+ * Fetch models from API /models endpoint
16
+ */
17
+ fetchModels(): Promise<ModelInfo[]>;
18
+ /**
19
+ * Get metadata for a specific model
20
+ */
21
+ getModelMetadata(modelId: string): Promise<ModelMetadata | undefined>;
22
+ /**
23
+ * Extract metadata from model object returned by API
24
+ */
25
+ private extractMetadataFromModel;
26
+ /**
27
+ * Clear cache
28
+ */
29
+ clearCache(): void;
30
+ }
31
+ //# sourceMappingURL=modelDiscovery.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"modelDiscovery.d.ts","sourceRoot":"","sources":["../src/modelDiscovery.ts"],"names":[],"mappings":"AAAA;;GAEG;AAEH,OAAO,EAAE,SAAS,EAAsB,aAAa,EAAE,MAAM,YAAY,CAAC;AAG1E,qBAAa,cAAc;IAMvB,OAAO,CAAC,QAAQ,CAAC,OAAO;IACxB,OAAO,CAAC,QAAQ,CAAC,MAAM;IACvB,OAAO,CAAC,QAAQ,CAAC,OAAO;IACxB,OAAO,CAAC,QAAQ,CAAC,OAAO;IAR1B,OAAO,CAAC,WAAW,CAAqC;IACxD,OAAO,CAAC,aAAa,CAAa;IAClC,OAAO,CAAC,QAAQ,CAAC,aAAa,CAAiB;gBAG5B,OAAO,EAAE,MAAM,EACf,MAAM,EAAE,MAAM,EACd,OAAO,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,EAC/B,OAAO,GAAE,OAAO,KAAa;IAGhD;;OAEG;IACG,WAAW,IAAI,OAAO,CAAC,SAAS,EAAE,CAAC;IAwCzC;;OAEG;IACG,gBAAgB,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO,CAAC,aAAa,GAAG,SAAS,CAAC;IAkB3E;;OAEG;IACH,OAAO,CAAC,wBAAwB;IA2ChC;;OAEG;IACH,UAAU,IAAI,IAAI;CAInB"}