@lssm/lib.ai-providers 0.0.0-canary-20251217050226

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md ADDED
@@ -0,0 +1,90 @@
1
+ # @lssm/lib.ai-providers
2
+
3
+ **Unified AI provider abstraction** for ContractSpec applications.
4
+
5
+ ## Overview
6
+
7
+ This library provides a consistent interface for working with multiple LLM providers across ContractSpec. It's used by:
8
+
9
+ - `@lssm/module.ai-chat` - Vibe coding chat
10
+ - `@lssm/bundle.contractspec-workspace` - CLI AI features
11
+ - `@lssm/lib.ai-agent` - Agent orchestration
12
+
13
+ ## Supported Providers
14
+
15
+ | Provider | Local | BYOK | Managed |
16
+ |----------|-------|------|---------|
17
+ | Ollama | ✅ | - | - |
18
+ | OpenAI | - | ✅ | ✅ |
19
+ | Anthropic | - | ✅ | ✅ |
20
+ | Mistral | - | ✅ | ✅ |
21
+ | Google Gemini | - | ✅ | ✅ |
22
+
23
+ ## Usage
24
+
25
+ ### Basic Provider Creation
26
+
27
+ ```typescript
28
+ import { createProvider, type ProviderConfig } from '@lssm/lib.ai-providers';
29
+
30
+ // Ollama (local)
31
+ const ollamaProvider = createProvider({
32
+ provider: 'ollama',
33
+ model: 'llama3.2',
34
+ });
35
+
36
+ // OpenAI (BYOK)
37
+ const openaiProvider = createProvider({
38
+ provider: 'openai',
39
+ apiKey: process.env.OPENAI_API_KEY,
40
+ model: 'gpt-4o',
41
+ });
42
+
43
+ // Use the model
44
+ const model = openaiProvider.getModel();
45
+ ```
46
+
47
+ ### From Environment
48
+
49
+ ```typescript
50
+ import { createProviderFromEnv } from '@lssm/lib.ai-providers';
51
+
52
+ // Reads from CONTRACTSPEC_AI_PROVIDER, OPENAI_API_KEY, etc.
53
+ const provider = createProviderFromEnv();
54
+ ```
55
+
56
+ ### Legacy Config Support
57
+
58
+ ```typescript
59
+ import { getAIProvider } from '@lssm/lib.ai-providers';
60
+ import type { Config } from '@lssm/bundle.contractspec-workspace';
61
+
62
+ // Backwards compatible with existing Config type
63
+ const model = getAIProvider(config);
64
+ ```
65
+
66
+ ## Provider Modes
67
+
68
+ - **Local**: Run models locally (Ollama only)
69
+ - **BYOK**: Bring Your Own Key for cloud providers
70
+ - **Managed**: Use ContractSpec-managed keys via API proxy
71
+
72
+ ## API
73
+
74
+ ### Types
75
+
76
+ - `ProviderName` - Supported provider names
77
+ - `ProviderMode` - local | byok | managed
78
+ - `ProviderConfig` - Configuration for creating a provider
79
+ - `Provider` - Provider interface with getModel()
80
+ - `ModelInfo` - Model metadata (context window, capabilities)
81
+
82
+ ### Functions
83
+
84
+ - `createProvider(config)` - Create a provider instance
85
+ - `createProviderFromEnv()` - Create from environment variables
86
+ - `getAIProvider(config)` - Legacy compatibility function
87
+ - `validateProvider(config)` - Check if provider is properly configured
88
+ - `getRecommendedModels(provider)` - Get recommended models for a provider
89
+ - `getAvailableProviders()` - List available providers with status
90
+
@@ -0,0 +1 @@
1
+ import{DEFAULT_MODELS as e,getModelsForProvider as t}from"./models.js";import{anthropic as n}from"@ai-sdk/anthropic";import{google as r}from"@ai-sdk/google";import{mistral as i}from"@ai-sdk/mistral";import{openai as a}from"@ai-sdk/openai";import{ollama as o}from"ollama-ai-provider";var s=class{name;model;mode;config;cachedModel=null;constructor(t){this.name=t.provider,this.model=t.model??e[t.provider],this.mode=this.determineMode(t),this.config=t}determineMode(e){return e.provider===`ollama`?`local`:e.apiKey?`byok`:`managed`}getModel(){return this.cachedModel||=this.createModel(),this.cachedModel}createModel(){let{apiKey:e,baseUrl:t,proxyUrl:s,organizationId:c}=this.config;switch(this.name){case`ollama`:{let e=process.env.OLLAMA_BASE_URL;t&&t!==`http://localhost:11434`&&(process.env.OLLAMA_BASE_URL=t);let n=o(this.model);return e===void 0?t&&t!==`http://localhost:11434`&&delete process.env.OLLAMA_BASE_URL:process.env.OLLAMA_BASE_URL=e,n}case`openai`:if(this.mode===`managed`){let e=process.env.OPENAI_BASE_URL;s&&(process.env.OPENAI_BASE_URL=s);let t=a(this.model);return e===void 0?s&&delete process.env.OPENAI_BASE_URL:process.env.OPENAI_BASE_URL=e,t}return a(this.model);case`anthropic`:if(this.mode===`managed`){let e=process.env.OPENAI_BASE_URL;s&&(process.env.OPENAI_BASE_URL=s);let t=a(this.model);return e===void 0?s&&delete process.env.OPENAI_BASE_URL:process.env.OPENAI_BASE_URL=e,t}return n(this.model);case`mistral`:if(this.mode===`managed`){let e=process.env.OPENAI_BASE_URL;s&&(process.env.OPENAI_BASE_URL=s);let t=a(this.model);return e===void 0?s&&delete process.env.OPENAI_BASE_URL:process.env.OPENAI_BASE_URL=e,t}return i(this.model);case`gemini`:if(this.mode===`managed`){let e=process.env.OPENAI_BASE_URL;s&&(process.env.OPENAI_BASE_URL=s);let t=a(this.model);return e===void 0?s&&delete process.env.OPENAI_BASE_URL:process.env.OPENAI_BASE_URL=e,t}return r(this.model);default:throw Error(`Unknown provider: ${this.name}`)}}async listModels(){return this.name===`ollama`?this.listOllamaModels():t(this.name)}async listOllamaModels(){try{let e=this.config.baseUrl??`http://localhost:11434`,n=await fetch(`${e}/api/tags`);return n.ok?((await n.json()).models??[]).map(e=>({id:e.name,name:e.name,provider:`ollama`,contextWindow:8e3,capabilities:{vision:!1,tools:!1,reasoning:!1,streaming:!0}})):t(`ollama`)}catch{return t(`ollama`)}}async validate(){return this.name===`ollama`?this.validateOllama():this.mode===`byok`&&!this.config.apiKey?{valid:!1,error:`API key required for ${this.name}`}:this.mode===`managed`&&!this.config.proxyUrl&&!this.config.organizationId?{valid:!1,error:`Managed mode requires proxyUrl or organizationId`}:{valid:!0}}async validateOllama(){try{let e=this.config.baseUrl??`http://localhost:11434`,t=await fetch(`${e}/api/tags`);if(!t.ok)return{valid:!1,error:`Ollama server returned ${t.status}`};let n=(await t.json()).models??[];return n.some(e=>e.name===this.model)?{valid:!0}:{valid:!1,error:`Model "${this.model}" not found. Available: ${n.map(e=>e.name).join(`, `)}`}}catch(e){return{valid:!1,error:`Cannot connect to Ollama at ${this.config.baseUrl??`http://localhost:11434`}: ${e instanceof Error?e.message:String(e)}`}}}};function c(e){return new s(e)}function l(){let e=process.env.CONTRACTSPEC_AI_PROVIDER??`openai`,t=process.env.CONTRACTSPEC_AI_MODEL,n;switch(e){case`openai`:n=process.env.OPENAI_API_KEY;break;case`anthropic`:n=process.env.ANTHROPIC_API_KEY;break;case`mistral`:n=process.env.MISTRAL_API_KEY;break;case`gemini`:n=process.env.GOOGLE_API_KEY??process.env.GEMINI_API_KEY;break;case`ollama`:break}return c({provider:e,model:t,apiKey:n,baseUrl:process.env.OLLAMA_BASE_URL,proxyUrl:process.env.CONTRACTSPEC_AI_PROXY_URL,organizationId:process.env.CONTRACTSPEC_ORG_ID})}function u(){let e=[];e.push({provider:`ollama`,available:!0,mode:`local`});let t=process.env.OPENAI_API_KEY;e.push({provider:`openai`,available:!!t||!!process.env.CONTRACTSPEC_AI_PROXY_URL,mode:t?`byok`:`managed`,reason:t?void 0:`Set OPENAI_API_KEY for BYOK mode`});let n=process.env.ANTHROPIC_API_KEY;e.push({provider:`anthropic`,available:!!n||!!process.env.CONTRACTSPEC_AI_PROXY_URL,mode:n?`byok`:`managed`,reason:n?void 0:`Set ANTHROPIC_API_KEY for BYOK mode`});let r=process.env.MISTRAL_API_KEY;e.push({provider:`mistral`,available:!!r||!!process.env.CONTRACTSPEC_AI_PROXY_URL,mode:r?`byok`:`managed`,reason:r?void 0:`Set MISTRAL_API_KEY for BYOK mode`});let i=process.env.GOOGLE_API_KEY??process.env.GEMINI_API_KEY;return e.push({provider:`gemini`,available:!!i||!!process.env.CONTRACTSPEC_AI_PROXY_URL,mode:i?`byok`:`managed`,reason:i?void 0:`Set GOOGLE_API_KEY for BYOK mode`}),e}export{c as createProvider,l as createProviderFromEnv,u as getAvailableProviders};
package/dist/index.js ADDED
@@ -0,0 +1 @@
1
+ import{DEFAULT_MODELS as e,MODELS as t,getDefaultModel as n,getModelInfo as r,getModelsForProvider as i,getRecommendedModels as a}from"./models.js";import{createProvider as o,createProviderFromEnv as s,getAvailableProviders as c}from"./factory.js";import{getEnvVarName as l,hasCredentials as u,isOllamaRunning as d,listOllamaModels as f,validateProvider as p}from"./validation.js";import{getAIProvider as m,validateProvider as h}from"./legacy.js";export{e as DEFAULT_MODELS,t as MODELS,o as createProvider,s as createProviderFromEnv,m as getAIProvider,c as getAvailableProviders,n as getDefaultModel,l as getEnvVarName,r as getModelInfo,i as getModelsForProvider,a as getRecommendedModels,u as hasCredentials,d as isOllamaRunning,f as listOllamaModels,h as validateLegacyProvider,p as validateProvider};
package/dist/legacy.js ADDED
@@ -0,0 +1 @@
1
+ import{getRecommendedModels as e}from"./models.js";import{createProvider as t}from"./factory.js";function n(e){switch(e){case`claude`:return`anthropic`;case`custom`:return`openai`;default:return e}}function r(e){let r=n(e.aiProvider),i;switch(r){case`openai`:i=process.env.OPENAI_API_KEY;break;case`anthropic`:i=process.env.ANTHROPIC_API_KEY;break;case`mistral`:i=process.env.MISTRAL_API_KEY;break;case`gemini`:i=process.env.GOOGLE_API_KEY??process.env.GEMINI_API_KEY;break}return t({provider:r,model:e.aiModel,apiKey:i,baseUrl:e.customEndpoint}).getModel()}async function i(e){let t=n(e.aiProvider);return t===`ollama`?{success:!0}:t===`anthropic`&&!process.env.ANTHROPIC_API_KEY?{success:!1,error:`ANTHROPIC_API_KEY environment variable not set`}:t===`openai`&&!process.env.OPENAI_API_KEY?{success:!1,error:`OPENAI_API_KEY environment variable not set`}:t===`mistral`&&!process.env.MISTRAL_API_KEY?{success:!1,error:`MISTRAL_API_KEY environment variable not set`}:t===`gemini`&&!process.env.GOOGLE_API_KEY&&!process.env.GEMINI_API_KEY?{success:!1,error:`GOOGLE_API_KEY or GEMINI_API_KEY environment variable not set`}:{success:!0}}export{r as getAIProvider,e as getRecommendedModels,i as validateProvider};
package/dist/models.js ADDED
@@ -0,0 +1 @@
1
+ const e={ollama:`llama3.2`,openai:`gpt-4o`,anthropic:`claude-sonnet-4-20250514`,mistral:`mistral-large-latest`,gemini:`gemini-2.0-flash`},t=[{id:`llama3.2`,name:`Llama 3.2`,provider:`ollama`,contextWindow:128e3,capabilities:{vision:!1,tools:!0,reasoning:!1,streaming:!0}},{id:`codellama`,name:`Code Llama`,provider:`ollama`,contextWindow:16e3,capabilities:{vision:!1,tools:!1,reasoning:!1,streaming:!0}},{id:`deepseek-coder`,name:`DeepSeek Coder`,provider:`ollama`,contextWindow:16e3,capabilities:{vision:!1,tools:!1,reasoning:!1,streaming:!0}},{id:`mistral`,name:`Mistral 7B`,provider:`ollama`,contextWindow:32e3,capabilities:{vision:!1,tools:!1,reasoning:!1,streaming:!0}},{id:`gpt-4o`,name:`GPT-4o`,provider:`openai`,contextWindow:128e3,capabilities:{vision:!0,tools:!0,reasoning:!1,streaming:!0},costPerMillion:{input:2.5,output:10}},{id:`gpt-4o-mini`,name:`GPT-4o Mini`,provider:`openai`,contextWindow:128e3,capabilities:{vision:!0,tools:!0,reasoning:!1,streaming:!0},costPerMillion:{input:.15,output:.6}},{id:`o1`,name:`o1`,provider:`openai`,contextWindow:2e5,capabilities:{vision:!0,tools:!0,reasoning:!0,streaming:!0},costPerMillion:{input:15,output:60}},{id:`o1-mini`,name:`o1 Mini`,provider:`openai`,contextWindow:128e3,capabilities:{vision:!1,tools:!0,reasoning:!0,streaming:!0},costPerMillion:{input:3,output:12}},{id:`claude-sonnet-4-20250514`,name:`Claude Sonnet 4`,provider:`anthropic`,contextWindow:2e5,capabilities:{vision:!0,tools:!0,reasoning:!0,streaming:!0},costPerMillion:{input:3,output:15}},{id:`claude-3-5-sonnet-20241022`,name:`Claude 3.5 Sonnet`,provider:`anthropic`,contextWindow:2e5,capabilities:{vision:!0,tools:!0,reasoning:!1,streaming:!0},costPerMillion:{input:3,output:15}},{id:`claude-3-5-haiku-20241022`,name:`Claude 3.5 Haiku`,provider:`anthropic`,contextWindow:2e5,capabilities:{vision:!0,tools:!0,reasoning:!1,streaming:!0},costPerMillion:{input:.8,output:4}},{id:`mistral-large-latest`,name:`Mistral Large`,provider:`mistral`,contextWindow:128e3,capabilities:{vision:!1,tools:!0,reasoning:!1,streaming:!0},costPerMillion:{input:2,output:6}},{id:`codestral-latest`,name:`Codestral`,provider:`mistral`,contextWindow:32e3,capabilities:{vision:!1,tools:!0,reasoning:!1,streaming:!0},costPerMillion:{input:.2,output:.6}},{id:`mistral-small-latest`,name:`Mistral Small`,provider:`mistral`,contextWindow:32e3,capabilities:{vision:!1,tools:!0,reasoning:!1,streaming:!0},costPerMillion:{input:.2,output:.6}},{id:`gemini-2.0-flash`,name:`Gemini 2.0 Flash`,provider:`gemini`,contextWindow:1e6,capabilities:{vision:!0,tools:!0,reasoning:!1,streaming:!0},costPerMillion:{input:.075,output:.3}},{id:`gemini-2.5-pro-preview-06-05`,name:`Gemini 2.5 Pro`,provider:`gemini`,contextWindow:1e6,capabilities:{vision:!0,tools:!0,reasoning:!0,streaming:!0},costPerMillion:{input:1.25,output:10}},{id:`gemini-2.5-flash-preview-05-20`,name:`Gemini 2.5 Flash`,provider:`gemini`,contextWindow:1e6,capabilities:{vision:!0,tools:!0,reasoning:!0,streaming:!0},costPerMillion:{input:.15,output:.6}}];function n(e){return t.filter(t=>t.provider===e)}function r(e){return t.find(t=>t.id===e)}function i(e){return n(e===`claude`?`anthropic`:e===`custom`?`openai`:e).map(e=>e.id)}function a(t){return e[t]}export{e as DEFAULT_MODELS,t as MODELS,a as getDefaultModel,r as getModelInfo,n as getModelsForProvider,i as getRecommendedModels};
package/dist/types.js ADDED
File without changes
@@ -0,0 +1 @@
1
+ import{createProvider as e}from"./factory.js";async function t(t){return e(t).validate()}function n(e){switch(e){case`ollama`:return!0;case`openai`:return!!process.env.OPENAI_API_KEY;case`anthropic`:return!!process.env.ANTHROPIC_API_KEY;case`mistral`:return!!process.env.MISTRAL_API_KEY;case`gemini`:return!!(process.env.GOOGLE_API_KEY??process.env.GEMINI_API_KEY);default:return!1}}function r(e){switch(e){case`ollama`:return null;case`openai`:return`OPENAI_API_KEY`;case`anthropic`:return`ANTHROPIC_API_KEY`;case`mistral`:return`MISTRAL_API_KEY`;case`gemini`:return`GOOGLE_API_KEY`;default:return null}}async function i(e=`http://localhost:11434`){try{return(await fetch(`${e}/api/tags`)).ok}catch{return!1}}async function a(e=`http://localhost:11434`){try{let t=await fetch(`${e}/api/tags`);return t.ok?((await t.json()).models??[]).map(e=>e.name):[]}catch{return[]}}export{r as getEnvVarName,n as hasCredentials,i as isOllamaRunning,a as listOllamaModels,t as validateProvider};
package/package.json ADDED
@@ -0,0 +1,61 @@
1
+ {
2
+ "name": "@lssm/lib.ai-providers",
3
+ "version": "0.0.0-canary-20251217050226",
4
+ "type": "module",
5
+ "main": "./dist/index.js",
6
+ "module": "./dist/index.js",
7
+ "types": "./dist/index.d.ts",
8
+ "files": [
9
+ "dist",
10
+ "README.md"
11
+ ],
12
+ "scripts": {
13
+ "publish:pkg": "bun publish --tolerate-republish --ignore-scripts --verbose",
14
+ "publish:pkg:canary": "bun publish:pkg --canary",
15
+ "build": "bun build:bundle && bun build:types",
16
+ "build:bundle": "tsdown",
17
+ "build:types": "tsc --noEmit",
18
+ "dev": "bun build:bundle --watch",
19
+ "clean": "rimraf dist .turbo",
20
+ "lint": "bun lint:fix",
21
+ "lint:fix": "eslint src --fix",
22
+ "lint:check": "eslint src",
23
+ "test": "bun test"
24
+ },
25
+ "dependencies": {
26
+ "@ai-sdk/anthropic": "beta",
27
+ "@ai-sdk/google": "beta",
28
+ "@ai-sdk/mistral": "beta",
29
+ "@ai-sdk/openai": "beta",
30
+ "ai": "beta",
31
+ "ollama-ai-provider": "^1.2.0",
32
+ "zod": "^4.1.13"
33
+ },
34
+ "devDependencies": {
35
+ "@lssm/tool.tsdown": "0.0.0-canary-20251217050226",
36
+ "@lssm/tool.typescript": "0.0.0-canary-20251217050226",
37
+ "tsdown": "^0.17.4",
38
+ "typescript": "^5.9.3"
39
+ },
40
+ "exports": {
41
+ ".": "./src/index.ts",
42
+ "./factory": "./src/factory.ts",
43
+ "./legacy": "./src/legacy.ts",
44
+ "./models": "./src/models.ts",
45
+ "./types": "./src/types.ts",
46
+ "./validation": "./src/validation.ts",
47
+ "./*": "./*"
48
+ },
49
+ "publishConfig": {
50
+ "access": "public",
51
+ "exports": {
52
+ ".": "./dist/index.js",
53
+ "./factory": "./dist/factory.js",
54
+ "./legacy": "./dist/legacy.js",
55
+ "./models": "./dist/models.js",
56
+ "./types": "./dist/types.js",
57
+ "./validation": "./dist/validation.js",
58
+ "./*": "./*"
59
+ }
60
+ }
61
+ }