@contractspec/lib.ai-providers 3.7.6 → 3.7.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -1,103 +1,76 @@
1
1
  # @contractspec/lib.ai-providers
2
2
 
3
- Website: https://contractspec.io/
3
+ Website: https://contractspec.io
4
4
 
5
- **Unified AI provider abstraction** for ContractSpec applications.
5
+ **Unified AI provider abstraction layer.**
6
6
 
7
- ## Overview
7
+ ## What It Provides
8
8
 
9
- This library provides a consistent interface for working with multiple LLM providers across ContractSpec. It's used by:
9
+ - **Layer**: lib.
10
+ - **Consumers**: ai-agent, content-gen, image-gen, voice.
11
+ - Related ContractSpec packages include `@contractspec/lib.provider-ranking`, `@contractspec/tool.bun`, `@contractspec/tool.typescript`.
12
+ - Related ContractSpec packages include `@contractspec/lib.provider-ranking`, `@contractspec/tool.bun`, `@contractspec/tool.typescript`.
10
13
 
11
- - `@contractspec/module.ai-chat` - Vibe coding chat
12
- - `@contractspec/bundle.workspace` - CLI AI features
13
- - `@contractspec/lib.ai-agent` - Agent orchestration
14
+ ## Installation
14
15
 
15
- ## Supported Providers
16
+ `npm install @contractspec/lib.ai-providers`
16
17
 
17
- | Provider | Local | BYOK | Managed |
18
- | ------------- | ----- | ---- | ------- |
19
- | Ollama | ✅ | - | - |
20
- | OpenAI | - | ✅ | ✅ |
21
- | Anthropic | - | ✅ | ✅ |
22
- | Mistral | - | ✅ | ✅ |
23
- | Google Gemini | - | ✅ | ✅ |
18
+ or
24
19
 
25
- ### Mistral model presets
26
-
27
- The bundled Mistral model catalog includes current families used in ContractSpec flows:
28
-
29
- - `mistral-large-latest`
30
- - `mistral-medium-latest`
31
- - `mistral-small-latest`
32
- - `codestral-latest`
33
- - `devstral-small-latest`
34
- - `magistral-medium-latest`
35
- - `pixtral-large-latest`
20
+ `bun add @contractspec/lib.ai-providers`
36
21
 
37
22
  ## Usage
38
23
 
39
- ### Basic Provider Creation
40
-
41
- ```typescript
42
- import { createProvider, type ProviderConfig } from '@contractspec/lib.ai-providers';
43
-
44
- // Ollama (local)
45
- const ollamaProvider = createProvider({
46
- provider: 'ollama',
47
- model: 'llama3.2',
48
- });
49
-
50
- // OpenAI (BYOK)
51
- const openaiProvider = createProvider({
52
- provider: 'openai',
53
- apiKey: process.env.OPENAI_API_KEY,
54
- model: 'gpt-4o',
55
- });
56
-
57
- // Use the model
58
- const model = openaiProvider.getModel();
59
- ```
60
-
61
- ### From Environment
62
-
63
- ```typescript
64
- import { createProviderFromEnv } from '@contractspec/lib.ai-providers';
65
-
66
- // Reads from CONTRACTSPEC_AI_PROVIDER, OPENAI_API_KEY, etc.
67
- const provider = createProviderFromEnv();
68
- ```
69
-
70
- ### Legacy Config Support
71
-
72
- ```typescript
73
- import { getAIProvider } from '@contractspec/lib.ai-providers';
74
- import type { Config } from '@contractspec/bundle.workspace';
75
-
76
- // Backwards compatible with existing Config type
77
- const model = getAIProvider(config);
78
- ```
79
-
80
- ## Provider Modes
81
-
82
- - **Local**: Run models locally (Ollama only)
83
- - **BYOK**: Bring Your Own Key for cloud providers
84
- - **Managed**: Use ContractSpec-managed keys via API proxy
85
-
86
- ## API
87
-
88
- ### Types
89
-
90
- - `ProviderName` - Supported provider names
91
- - `ProviderMode` - local | byok | managed
92
- - `ProviderConfig` - Configuration for creating a provider
93
- - `Provider` - Provider interface with getModel()
94
- - `ModelInfo` - Model metadata (context window, capabilities)
95
-
96
- ### Functions
97
-
98
- - `createProvider(config)` - Create a provider instance
99
- - `createProviderFromEnv()` - Create from environment variables
100
- - `getAIProvider(config)` - Legacy compatibility function
101
- - `validateProvider(config)` - Check if provider is properly configured
102
- - `getRecommendedModels(provider)` - Get recommended models for a provider
103
- - `getAvailableProviders()` - List available providers with status
24
+ Import the root entrypoint from `@contractspec/lib.ai-providers`, or choose a documented subpath when you only need one part of the package surface.
25
+
26
+ ## Architecture
27
+
28
+ - `src/factory.ts` is part of the package's public or composition surface.
29
+ - `src/index.ts` is the root public barrel and package entrypoint.
30
+ - `src/legacy.ts` is part of the package's public or composition surface.
31
+ - `src/models.test.ts` is part of the package's public or composition surface.
32
+ - `src/models.ts` is part of the package's public or composition surface.
33
+ - `src/selector-types.ts` is part of the package's public or composition surface.
34
+ - `src/selector.ts` is part of the package's public or composition surface.
35
+ - `src/types.ts` is shared public type definitions.
36
+
37
+ ## Public Entry Points
38
+
39
+ - Export `.` resolves through `./src/index.ts`.
40
+ - Export `./factory` resolves through `./src/factory.ts`.
41
+ - Export `./legacy` resolves through `./src/legacy.ts`.
42
+ - Export `./models` resolves through `./src/models.ts`.
43
+ - Export `./selector` resolves through `./src/selector.ts`.
44
+ - Export `./selector-types` resolves through `./src/selector-types.ts`.
45
+ - Export `./types` resolves through `./src/types.ts`.
46
+ - Export `./validation` resolves through `./src/validation.ts`.
47
+
48
+ ## Local Commands
49
+
50
+ - `bun run dev` — contractspec-bun-build dev
51
+ - `bun run build` bun run prebuild && bun run build:bundle && bun run build:types
52
+ - `bun run test` — bun test --pass-with-no-tests
53
+ - `bun run lint` — bun lint:fix
54
+ - `bun run lint:check` — biome check .
55
+ - `bun run lint:fix` — biome check --write --unsafe --only=nursery/useSortedClasses . && biome check --write .
56
+ - `bun run typecheck` — tsc --noEmit
57
+ - `bun run publish:pkg` — bun publish --tolerate-republish --ignore-scripts --verbose
58
+ - `bun run publish:pkg:canary` bun publish:pkg --tag canary
59
+ - `bun run clean` rimraf dist .turbo
60
+ - `bun run build:bundle` — contractspec-bun-build transpile
61
+ - `bun run build:types` contractspec-bun-build types
62
+ - `bun run prebuild` — contractspec-bun-build prebuild
63
+
64
+ ## Recent Updates
65
+
66
+ - Replace eslint+prettier by biomejs to optimize speed.
67
+ - Add latest models and align defaults.
68
+ - Resolve lint, build, and type errors across nine packages.
69
+ - Add first-class transport, auth, versioning, and BYOK support across all integrations.
70
+ - Add AI provider ranking system with ranking-driven model selection.
71
+ - Add first-class mistral provider support.
72
+
73
+ ## Notes
74
+
75
+ - Provider interface is consumed by all AI-powered libs; breaking changes cascade widely.
76
+ - Adding new providers must not break existing factory signatures.
@@ -671,61 +671,73 @@ function getAvailableProviders() {
671
671
  });
672
672
  return providers;
673
673
  }
674
- // src/validation.ts
675
- async function validateProvider(config) {
676
- const provider = createProvider(config);
677
- return provider.validate();
678
- }
679
- function hasCredentials(provider) {
680
- switch (provider) {
681
- case "ollama":
682
- return true;
683
- case "openai":
684
- return Boolean(process.env.OPENAI_API_KEY);
685
- case "anthropic":
686
- return Boolean(process.env.ANTHROPIC_API_KEY);
687
- case "mistral":
688
- return Boolean(process.env.MISTRAL_API_KEY);
689
- case "gemini":
690
- return Boolean(process.env.GOOGLE_API_KEY ?? process.env.GEMINI_API_KEY);
674
+
675
+ // src/legacy.ts
676
+ function mapLegacyProvider(legacy) {
677
+ switch (legacy) {
678
+ case "claude":
679
+ return "anthropic";
680
+ case "custom":
681
+ return "openai";
691
682
  default:
692
- return false;
683
+ return legacy;
693
684
  }
694
685
  }
695
- function getEnvVarName(provider) {
686
+ function getAIProvider(config) {
687
+ const provider = mapLegacyProvider(config.aiProvider);
688
+ let apiKey;
696
689
  switch (provider) {
697
- case "ollama":
698
- return null;
699
690
  case "openai":
700
- return "OPENAI_API_KEY";
691
+ apiKey = process.env.OPENAI_API_KEY;
692
+ break;
701
693
  case "anthropic":
702
- return "ANTHROPIC_API_KEY";
694
+ apiKey = process.env.ANTHROPIC_API_KEY;
695
+ break;
703
696
  case "mistral":
704
- return "MISTRAL_API_KEY";
697
+ apiKey = process.env.MISTRAL_API_KEY;
698
+ break;
705
699
  case "gemini":
706
- return "GOOGLE_API_KEY";
707
- default:
708
- return null;
700
+ apiKey = process.env.GOOGLE_API_KEY ?? process.env.GEMINI_API_KEY;
701
+ break;
709
702
  }
703
+ const instance = createProvider({
704
+ provider,
705
+ model: config.aiModel,
706
+ apiKey,
707
+ baseUrl: config.customEndpoint
708
+ });
709
+ return instance.getModel();
710
710
  }
711
- async function isOllamaRunning(baseUrl = "http://localhost:11434") {
712
- try {
713
- const response = await fetch(`${baseUrl}/api/tags`);
714
- return response.ok;
715
- } catch {
716
- return false;
711
+ async function validateProvider(config) {
712
+ const provider = mapLegacyProvider(config.aiProvider);
713
+ if (provider === "ollama") {
714
+ return { success: true };
717
715
  }
718
- }
719
- async function listOllamaModels(baseUrl = "http://localhost:11434") {
720
- try {
721
- const response = await fetch(`${baseUrl}/api/tags`);
722
- if (!response.ok)
723
- return [];
724
- const data = await response.json();
725
- return (data.models ?? []).map((m) => m.name);
726
- } catch {
727
- return [];
716
+ if (provider === "anthropic" && !process.env.ANTHROPIC_API_KEY) {
717
+ return {
718
+ success: false,
719
+ error: "ANTHROPIC_API_KEY environment variable not set"
720
+ };
721
+ }
722
+ if (provider === "openai" && !process.env.OPENAI_API_KEY) {
723
+ return {
724
+ success: false,
725
+ error: "OPENAI_API_KEY environment variable not set"
726
+ };
727
+ }
728
+ if (provider === "mistral" && !process.env.MISTRAL_API_KEY) {
729
+ return {
730
+ success: false,
731
+ error: "MISTRAL_API_KEY environment variable not set"
732
+ };
733
+ }
734
+ if (provider === "gemini" && !process.env.GOOGLE_API_KEY && !process.env.GEMINI_API_KEY) {
735
+ return {
736
+ success: false,
737
+ error: "GOOGLE_API_KEY or GEMINI_API_KEY environment variable not set"
738
+ };
728
739
  }
740
+ return { success: true };
729
741
  }
730
742
  // src/selector.ts
731
743
  function createModelSelector(options) {
@@ -895,77 +907,65 @@ function mergeConstraints(defaults, overrides) {
895
907
  return defaults;
896
908
  return { ...defaults, ...overrides };
897
909
  }
898
-
899
- // src/legacy.ts
900
- function mapLegacyProvider(legacy) {
901
- switch (legacy) {
902
- case "claude":
903
- return "anthropic";
904
- case "custom":
905
- return "openai";
910
+ // src/validation.ts
911
+ async function validateProvider2(config) {
912
+ const provider = createProvider(config);
913
+ return provider.validate();
914
+ }
915
+ function hasCredentials(provider) {
916
+ switch (provider) {
917
+ case "ollama":
918
+ return true;
919
+ case "openai":
920
+ return Boolean(process.env.OPENAI_API_KEY);
921
+ case "anthropic":
922
+ return Boolean(process.env.ANTHROPIC_API_KEY);
923
+ case "mistral":
924
+ return Boolean(process.env.MISTRAL_API_KEY);
925
+ case "gemini":
926
+ return Boolean(process.env.GOOGLE_API_KEY ?? process.env.GEMINI_API_KEY);
906
927
  default:
907
- return legacy;
928
+ return false;
908
929
  }
909
930
  }
910
- function getAIProvider(config) {
911
- const provider = mapLegacyProvider(config.aiProvider);
912
- let apiKey;
931
+ function getEnvVarName(provider) {
913
932
  switch (provider) {
933
+ case "ollama":
934
+ return null;
914
935
  case "openai":
915
- apiKey = process.env.OPENAI_API_KEY;
916
- break;
936
+ return "OPENAI_API_KEY";
917
937
  case "anthropic":
918
- apiKey = process.env.ANTHROPIC_API_KEY;
919
- break;
938
+ return "ANTHROPIC_API_KEY";
920
939
  case "mistral":
921
- apiKey = process.env.MISTRAL_API_KEY;
922
- break;
940
+ return "MISTRAL_API_KEY";
923
941
  case "gemini":
924
- apiKey = process.env.GOOGLE_API_KEY ?? process.env.GEMINI_API_KEY;
925
- break;
942
+ return "GOOGLE_API_KEY";
943
+ default:
944
+ return null;
926
945
  }
927
- const instance = createProvider({
928
- provider,
929
- model: config.aiModel,
930
- apiKey,
931
- baseUrl: config.customEndpoint
932
- });
933
- return instance.getModel();
934
946
  }
935
- async function validateProvider2(config) {
936
- const provider = mapLegacyProvider(config.aiProvider);
937
- if (provider === "ollama") {
938
- return { success: true };
939
- }
940
- if (provider === "anthropic" && !process.env.ANTHROPIC_API_KEY) {
941
- return {
942
- success: false,
943
- error: "ANTHROPIC_API_KEY environment variable not set"
944
- };
945
- }
946
- if (provider === "openai" && !process.env.OPENAI_API_KEY) {
947
- return {
948
- success: false,
949
- error: "OPENAI_API_KEY environment variable not set"
950
- };
951
- }
952
- if (provider === "mistral" && !process.env.MISTRAL_API_KEY) {
953
- return {
954
- success: false,
955
- error: "MISTRAL_API_KEY environment variable not set"
956
- };
947
+ async function isOllamaRunning(baseUrl = "http://localhost:11434") {
948
+ try {
949
+ const response = await fetch(`${baseUrl}/api/tags`);
950
+ return response.ok;
951
+ } catch {
952
+ return false;
957
953
  }
958
- if (provider === "gemini" && !process.env.GOOGLE_API_KEY && !process.env.GEMINI_API_KEY) {
959
- return {
960
- success: false,
961
- error: "GOOGLE_API_KEY or GEMINI_API_KEY environment variable not set"
962
- };
954
+ }
955
+ async function listOllamaModels(baseUrl = "http://localhost:11434") {
956
+ try {
957
+ const response = await fetch(`${baseUrl}/api/tags`);
958
+ if (!response.ok)
959
+ return [];
960
+ const data = await response.json();
961
+ return (data.models ?? []).map((m) => m.name);
962
+ } catch {
963
+ return [];
963
964
  }
964
- return { success: true };
965
965
  }
966
966
  export {
967
- validateProvider,
968
- validateProvider2 as validateLegacyProvider,
967
+ validateProvider2 as validateProvider,
968
+ validateProvider as validateLegacyProvider,
969
969
  listOllamaModels,
970
970
  isOllamaRunning,
971
971
  hasCredentials,
package/dist/factory.d.ts CHANGED
@@ -1,3 +1,6 @@
1
+ /**
2
+ * Provider factory and creation utilities
3
+ */
1
4
  import type { Provider, ProviderAvailability, ProviderConfig } from './types';
2
5
  /**
3
6
  * Create a provider from configuration
package/dist/index.d.ts CHANGED
@@ -3,11 +3,11 @@
3
3
  *
4
4
  * Unified AI provider abstraction for ContractSpec applications.
5
5
  */
6
- export * from './types';
7
6
  export * from './factory';
7
+ export { getAIProvider, validateProvider as validateLegacyProvider, } from './legacy';
8
8
  export * from './models';
9
- export * from './validation';
10
- export * from './selector-types';
11
- export { createModelSelector } from './selector';
12
9
  export type { ModelSelectorOptions } from './selector';
13
- export { getAIProvider, validateProvider as validateLegacyProvider, } from './legacy';
10
+ export { createModelSelector } from './selector';
11
+ export * from './selector-types';
12
+ export * from './types';
13
+ export * from './validation';
package/dist/index.js CHANGED
@@ -672,61 +672,73 @@ function getAvailableProviders() {
672
672
  });
673
673
  return providers;
674
674
  }
675
- // src/validation.ts
676
- async function validateProvider(config) {
677
- const provider = createProvider(config);
678
- return provider.validate();
679
- }
680
- function hasCredentials(provider) {
681
- switch (provider) {
682
- case "ollama":
683
- return true;
684
- case "openai":
685
- return Boolean(process.env.OPENAI_API_KEY);
686
- case "anthropic":
687
- return Boolean(process.env.ANTHROPIC_API_KEY);
688
- case "mistral":
689
- return Boolean(process.env.MISTRAL_API_KEY);
690
- case "gemini":
691
- return Boolean(process.env.GOOGLE_API_KEY ?? process.env.GEMINI_API_KEY);
675
+
676
+ // src/legacy.ts
677
+ function mapLegacyProvider(legacy) {
678
+ switch (legacy) {
679
+ case "claude":
680
+ return "anthropic";
681
+ case "custom":
682
+ return "openai";
692
683
  default:
693
- return false;
684
+ return legacy;
694
685
  }
695
686
  }
696
- function getEnvVarName(provider) {
687
+ function getAIProvider(config) {
688
+ const provider = mapLegacyProvider(config.aiProvider);
689
+ let apiKey;
697
690
  switch (provider) {
698
- case "ollama":
699
- return null;
700
691
  case "openai":
701
- return "OPENAI_API_KEY";
692
+ apiKey = process.env.OPENAI_API_KEY;
693
+ break;
702
694
  case "anthropic":
703
- return "ANTHROPIC_API_KEY";
695
+ apiKey = process.env.ANTHROPIC_API_KEY;
696
+ break;
704
697
  case "mistral":
705
- return "MISTRAL_API_KEY";
698
+ apiKey = process.env.MISTRAL_API_KEY;
699
+ break;
706
700
  case "gemini":
707
- return "GOOGLE_API_KEY";
708
- default:
709
- return null;
701
+ apiKey = process.env.GOOGLE_API_KEY ?? process.env.GEMINI_API_KEY;
702
+ break;
710
703
  }
704
+ const instance = createProvider({
705
+ provider,
706
+ model: config.aiModel,
707
+ apiKey,
708
+ baseUrl: config.customEndpoint
709
+ });
710
+ return instance.getModel();
711
711
  }
712
- async function isOllamaRunning(baseUrl = "http://localhost:11434") {
713
- try {
714
- const response = await fetch(`${baseUrl}/api/tags`);
715
- return response.ok;
716
- } catch {
717
- return false;
712
+ async function validateProvider(config) {
713
+ const provider = mapLegacyProvider(config.aiProvider);
714
+ if (provider === "ollama") {
715
+ return { success: true };
718
716
  }
719
- }
720
- async function listOllamaModels(baseUrl = "http://localhost:11434") {
721
- try {
722
- const response = await fetch(`${baseUrl}/api/tags`);
723
- if (!response.ok)
724
- return [];
725
- const data = await response.json();
726
- return (data.models ?? []).map((m) => m.name);
727
- } catch {
728
- return [];
717
+ if (provider === "anthropic" && !process.env.ANTHROPIC_API_KEY) {
718
+ return {
719
+ success: false,
720
+ error: "ANTHROPIC_API_KEY environment variable not set"
721
+ };
722
+ }
723
+ if (provider === "openai" && !process.env.OPENAI_API_KEY) {
724
+ return {
725
+ success: false,
726
+ error: "OPENAI_API_KEY environment variable not set"
727
+ };
728
+ }
729
+ if (provider === "mistral" && !process.env.MISTRAL_API_KEY) {
730
+ return {
731
+ success: false,
732
+ error: "MISTRAL_API_KEY environment variable not set"
733
+ };
734
+ }
735
+ if (provider === "gemini" && !process.env.GOOGLE_API_KEY && !process.env.GEMINI_API_KEY) {
736
+ return {
737
+ success: false,
738
+ error: "GOOGLE_API_KEY or GEMINI_API_KEY environment variable not set"
739
+ };
729
740
  }
741
+ return { success: true };
730
742
  }
731
743
  // src/selector.ts
732
744
  function createModelSelector(options) {
@@ -896,77 +908,65 @@ function mergeConstraints(defaults, overrides) {
896
908
  return defaults;
897
909
  return { ...defaults, ...overrides };
898
910
  }
899
-
900
- // src/legacy.ts
901
- function mapLegacyProvider(legacy) {
902
- switch (legacy) {
903
- case "claude":
904
- return "anthropic";
905
- case "custom":
906
- return "openai";
911
+ // src/validation.ts
912
+ async function validateProvider2(config) {
913
+ const provider = createProvider(config);
914
+ return provider.validate();
915
+ }
916
+ function hasCredentials(provider) {
917
+ switch (provider) {
918
+ case "ollama":
919
+ return true;
920
+ case "openai":
921
+ return Boolean(process.env.OPENAI_API_KEY);
922
+ case "anthropic":
923
+ return Boolean(process.env.ANTHROPIC_API_KEY);
924
+ case "mistral":
925
+ return Boolean(process.env.MISTRAL_API_KEY);
926
+ case "gemini":
927
+ return Boolean(process.env.GOOGLE_API_KEY ?? process.env.GEMINI_API_KEY);
907
928
  default:
908
- return legacy;
929
+ return false;
909
930
  }
910
931
  }
911
- function getAIProvider(config) {
912
- const provider = mapLegacyProvider(config.aiProvider);
913
- let apiKey;
932
+ function getEnvVarName(provider) {
914
933
  switch (provider) {
934
+ case "ollama":
935
+ return null;
915
936
  case "openai":
916
- apiKey = process.env.OPENAI_API_KEY;
917
- break;
937
+ return "OPENAI_API_KEY";
918
938
  case "anthropic":
919
- apiKey = process.env.ANTHROPIC_API_KEY;
920
- break;
939
+ return "ANTHROPIC_API_KEY";
921
940
  case "mistral":
922
- apiKey = process.env.MISTRAL_API_KEY;
923
- break;
941
+ return "MISTRAL_API_KEY";
924
942
  case "gemini":
925
- apiKey = process.env.GOOGLE_API_KEY ?? process.env.GEMINI_API_KEY;
926
- break;
943
+ return "GOOGLE_API_KEY";
944
+ default:
945
+ return null;
927
946
  }
928
- const instance = createProvider({
929
- provider,
930
- model: config.aiModel,
931
- apiKey,
932
- baseUrl: config.customEndpoint
933
- });
934
- return instance.getModel();
935
947
  }
936
- async function validateProvider2(config) {
937
- const provider = mapLegacyProvider(config.aiProvider);
938
- if (provider === "ollama") {
939
- return { success: true };
940
- }
941
- if (provider === "anthropic" && !process.env.ANTHROPIC_API_KEY) {
942
- return {
943
- success: false,
944
- error: "ANTHROPIC_API_KEY environment variable not set"
945
- };
946
- }
947
- if (provider === "openai" && !process.env.OPENAI_API_KEY) {
948
- return {
949
- success: false,
950
- error: "OPENAI_API_KEY environment variable not set"
951
- };
952
- }
953
- if (provider === "mistral" && !process.env.MISTRAL_API_KEY) {
954
- return {
955
- success: false,
956
- error: "MISTRAL_API_KEY environment variable not set"
957
- };
948
+ async function isOllamaRunning(baseUrl = "http://localhost:11434") {
949
+ try {
950
+ const response = await fetch(`${baseUrl}/api/tags`);
951
+ return response.ok;
952
+ } catch {
953
+ return false;
958
954
  }
959
- if (provider === "gemini" && !process.env.GOOGLE_API_KEY && !process.env.GEMINI_API_KEY) {
960
- return {
961
- success: false,
962
- error: "GOOGLE_API_KEY or GEMINI_API_KEY environment variable not set"
963
- };
955
+ }
956
+ async function listOllamaModels(baseUrl = "http://localhost:11434") {
957
+ try {
958
+ const response = await fetch(`${baseUrl}/api/tags`);
959
+ if (!response.ok)
960
+ return [];
961
+ const data = await response.json();
962
+ return (data.models ?? []).map((m) => m.name);
963
+ } catch {
964
+ return [];
964
965
  }
965
- return { success: true };
966
966
  }
967
967
  export {
968
- validateProvider,
969
- validateProvider2 as validateLegacyProvider,
968
+ validateProvider2 as validateProvider,
969
+ validateProvider as validateLegacyProvider,
970
970
  listOllamaModels,
971
971
  isOllamaRunning,
972
972
  hasCredentials,
package/dist/legacy.d.ts CHANGED
@@ -5,8 +5,8 @@
5
5
  * that uses the old provider API from contractspec-workspace.
6
6
  */
7
7
  import type { LanguageModel } from 'ai';
8
- import type { LegacyConfig } from './types';
9
8
  import { getRecommendedModels as getModels } from './models';
9
+ import type { LegacyConfig } from './types';
10
10
  /**
11
11
  * Get AI provider from legacy Config type
12
12
  *
@@ -671,61 +671,73 @@ function getAvailableProviders() {
671
671
  });
672
672
  return providers;
673
673
  }
674
- // src/validation.ts
675
- async function validateProvider(config) {
676
- const provider = createProvider(config);
677
- return provider.validate();
678
- }
679
- function hasCredentials(provider) {
680
- switch (provider) {
681
- case "ollama":
682
- return true;
683
- case "openai":
684
- return Boolean(process.env.OPENAI_API_KEY);
685
- case "anthropic":
686
- return Boolean(process.env.ANTHROPIC_API_KEY);
687
- case "mistral":
688
- return Boolean(process.env.MISTRAL_API_KEY);
689
- case "gemini":
690
- return Boolean(process.env.GOOGLE_API_KEY ?? process.env.GEMINI_API_KEY);
674
+
675
+ // src/legacy.ts
676
+ function mapLegacyProvider(legacy) {
677
+ switch (legacy) {
678
+ case "claude":
679
+ return "anthropic";
680
+ case "custom":
681
+ return "openai";
691
682
  default:
692
- return false;
683
+ return legacy;
693
684
  }
694
685
  }
695
- function getEnvVarName(provider) {
686
+ function getAIProvider(config) {
687
+ const provider = mapLegacyProvider(config.aiProvider);
688
+ let apiKey;
696
689
  switch (provider) {
697
- case "ollama":
698
- return null;
699
690
  case "openai":
700
- return "OPENAI_API_KEY";
691
+ apiKey = process.env.OPENAI_API_KEY;
692
+ break;
701
693
  case "anthropic":
702
- return "ANTHROPIC_API_KEY";
694
+ apiKey = process.env.ANTHROPIC_API_KEY;
695
+ break;
703
696
  case "mistral":
704
- return "MISTRAL_API_KEY";
697
+ apiKey = process.env.MISTRAL_API_KEY;
698
+ break;
705
699
  case "gemini":
706
- return "GOOGLE_API_KEY";
707
- default:
708
- return null;
700
+ apiKey = process.env.GOOGLE_API_KEY ?? process.env.GEMINI_API_KEY;
701
+ break;
709
702
  }
703
+ const instance = createProvider({
704
+ provider,
705
+ model: config.aiModel,
706
+ apiKey,
707
+ baseUrl: config.customEndpoint
708
+ });
709
+ return instance.getModel();
710
710
  }
711
- async function isOllamaRunning(baseUrl = "http://localhost:11434") {
712
- try {
713
- const response = await fetch(`${baseUrl}/api/tags`);
714
- return response.ok;
715
- } catch {
716
- return false;
711
+ async function validateProvider(config) {
712
+ const provider = mapLegacyProvider(config.aiProvider);
713
+ if (provider === "ollama") {
714
+ return { success: true };
717
715
  }
718
- }
719
- async function listOllamaModels(baseUrl = "http://localhost:11434") {
720
- try {
721
- const response = await fetch(`${baseUrl}/api/tags`);
722
- if (!response.ok)
723
- return [];
724
- const data = await response.json();
725
- return (data.models ?? []).map((m) => m.name);
726
- } catch {
727
- return [];
716
+ if (provider === "anthropic" && !process.env.ANTHROPIC_API_KEY) {
717
+ return {
718
+ success: false,
719
+ error: "ANTHROPIC_API_KEY environment variable not set"
720
+ };
721
+ }
722
+ if (provider === "openai" && !process.env.OPENAI_API_KEY) {
723
+ return {
724
+ success: false,
725
+ error: "OPENAI_API_KEY environment variable not set"
726
+ };
727
+ }
728
+ if (provider === "mistral" && !process.env.MISTRAL_API_KEY) {
729
+ return {
730
+ success: false,
731
+ error: "MISTRAL_API_KEY environment variable not set"
732
+ };
733
+ }
734
+ if (provider === "gemini" && !process.env.GOOGLE_API_KEY && !process.env.GEMINI_API_KEY) {
735
+ return {
736
+ success: false,
737
+ error: "GOOGLE_API_KEY or GEMINI_API_KEY environment variable not set"
738
+ };
728
739
  }
740
+ return { success: true };
729
741
  }
730
742
  // src/selector.ts
731
743
  function createModelSelector(options) {
@@ -895,77 +907,65 @@ function mergeConstraints(defaults, overrides) {
895
907
  return defaults;
896
908
  return { ...defaults, ...overrides };
897
909
  }
898
-
899
- // src/legacy.ts
900
- function mapLegacyProvider(legacy) {
901
- switch (legacy) {
902
- case "claude":
903
- return "anthropic";
904
- case "custom":
905
- return "openai";
910
+ // src/validation.ts
911
+ async function validateProvider2(config) {
912
+ const provider = createProvider(config);
913
+ return provider.validate();
914
+ }
915
+ function hasCredentials(provider) {
916
+ switch (provider) {
917
+ case "ollama":
918
+ return true;
919
+ case "openai":
920
+ return Boolean(process.env.OPENAI_API_KEY);
921
+ case "anthropic":
922
+ return Boolean(process.env.ANTHROPIC_API_KEY);
923
+ case "mistral":
924
+ return Boolean(process.env.MISTRAL_API_KEY);
925
+ case "gemini":
926
+ return Boolean(process.env.GOOGLE_API_KEY ?? process.env.GEMINI_API_KEY);
906
927
  default:
907
- return legacy;
928
+ return false;
908
929
  }
909
930
  }
910
- function getAIProvider(config) {
911
- const provider = mapLegacyProvider(config.aiProvider);
912
- let apiKey;
931
+ function getEnvVarName(provider) {
913
932
  switch (provider) {
933
+ case "ollama":
934
+ return null;
914
935
  case "openai":
915
- apiKey = process.env.OPENAI_API_KEY;
916
- break;
936
+ return "OPENAI_API_KEY";
917
937
  case "anthropic":
918
- apiKey = process.env.ANTHROPIC_API_KEY;
919
- break;
938
+ return "ANTHROPIC_API_KEY";
920
939
  case "mistral":
921
- apiKey = process.env.MISTRAL_API_KEY;
922
- break;
940
+ return "MISTRAL_API_KEY";
923
941
  case "gemini":
924
- apiKey = process.env.GOOGLE_API_KEY ?? process.env.GEMINI_API_KEY;
925
- break;
942
+ return "GOOGLE_API_KEY";
943
+ default:
944
+ return null;
926
945
  }
927
- const instance = createProvider({
928
- provider,
929
- model: config.aiModel,
930
- apiKey,
931
- baseUrl: config.customEndpoint
932
- });
933
- return instance.getModel();
934
946
  }
935
- async function validateProvider2(config) {
936
- const provider = mapLegacyProvider(config.aiProvider);
937
- if (provider === "ollama") {
938
- return { success: true };
939
- }
940
- if (provider === "anthropic" && !process.env.ANTHROPIC_API_KEY) {
941
- return {
942
- success: false,
943
- error: "ANTHROPIC_API_KEY environment variable not set"
944
- };
945
- }
946
- if (provider === "openai" && !process.env.OPENAI_API_KEY) {
947
- return {
948
- success: false,
949
- error: "OPENAI_API_KEY environment variable not set"
950
- };
951
- }
952
- if (provider === "mistral" && !process.env.MISTRAL_API_KEY) {
953
- return {
954
- success: false,
955
- error: "MISTRAL_API_KEY environment variable not set"
956
- };
947
+ async function isOllamaRunning(baseUrl = "http://localhost:11434") {
948
+ try {
949
+ const response = await fetch(`${baseUrl}/api/tags`);
950
+ return response.ok;
951
+ } catch {
952
+ return false;
957
953
  }
958
- if (provider === "gemini" && !process.env.GOOGLE_API_KEY && !process.env.GEMINI_API_KEY) {
959
- return {
960
- success: false,
961
- error: "GOOGLE_API_KEY or GEMINI_API_KEY environment variable not set"
962
- };
954
+ }
955
+ async function listOllamaModels(baseUrl = "http://localhost:11434") {
956
+ try {
957
+ const response = await fetch(`${baseUrl}/api/tags`);
958
+ if (!response.ok)
959
+ return [];
960
+ const data = await response.json();
961
+ return (data.models ?? []).map((m) => m.name);
962
+ } catch {
963
+ return [];
963
964
  }
964
- return { success: true };
965
965
  }
966
966
  export {
967
- validateProvider,
968
- validateProvider2 as validateLegacyProvider,
967
+ validateProvider2 as validateProvider,
968
+ validateProvider as validateLegacyProvider,
969
969
  listOllamaModels,
970
970
  isOllamaRunning,
971
971
  hasCredentials,
@@ -1,6 +1,6 @@
1
+ import type { BenchmarkDimension, DimensionWeightConfig } from '@contractspec/lib.provider-ranking';
1
2
  import type { LanguageModel } from 'ai';
2
3
  import type { ModelCapabilities, ProviderName } from './types';
3
- import type { BenchmarkDimension, DimensionWeightConfig } from '@contractspec/lib.provider-ranking';
4
4
  export interface ModelConstraints {
5
5
  maxCostPerMillionInput?: number;
6
6
  maxCostPerMillionOutput?: number;
@@ -1,6 +1,6 @@
1
1
  import type { ProviderRankingStore } from '@contractspec/lib.provider-ranking';
2
- import type { ModelInfo } from './types';
3
2
  import type { ModelConstraints, ModelSelectionContext, ModelSelectionResult, ModelSelector } from './selector-types';
3
+ import type { ModelInfo } from './types';
4
4
  export interface ModelSelectorOptions {
5
5
  store: ProviderRankingStore;
6
6
  fallbackModels?: ModelInfo[];
@@ -13,4 +13,4 @@ export interface ModelSelectorOptions {
13
13
  * MultiObjectiveSelector when `priorities` are set.
14
14
  */
15
15
  export declare function createModelSelector(options: ModelSelectorOptions): ModelSelector;
16
- export type { ModelSelector, ModelSelectionContext, ModelSelectionResult, ModelConstraints, };
16
+ export type { ModelConstraints, ModelSelectionContext, ModelSelectionResult, ModelSelector, };
@@ -1,7 +1,7 @@
1
1
  /**
2
2
  * Provider validation utilities
3
3
  */
4
- import type { ProviderName, ProviderConfig } from './types';
4
+ import type { ProviderConfig, ProviderName } from './types';
5
5
  /**
6
6
  * Validation result
7
7
  */
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@contractspec/lib.ai-providers",
3
- "version": "3.7.6",
3
+ "version": "3.7.9",
4
4
  "description": "Unified AI provider abstraction layer",
5
5
  "keywords": [
6
6
  "contractspec",
@@ -26,8 +26,8 @@
26
26
  "dev": "contractspec-bun-build dev",
27
27
  "clean": "rimraf dist .turbo",
28
28
  "lint": "bun lint:fix",
29
- "lint:fix": "eslint src --fix",
30
- "lint:check": "eslint src",
29
+ "lint:fix": "biome check --write --unsafe --only=nursery/useSortedClasses . && biome check --write .",
30
+ "lint:check": "biome check .",
31
31
  "test": "bun test --pass-with-no-tests",
32
32
  "prebuild": "contractspec-bun-build prebuild",
33
33
  "typecheck": "tsc --noEmit"
@@ -37,15 +37,15 @@
37
37
  "@ai-sdk/google": "3.0.43",
38
38
  "@ai-sdk/mistral": "3.0.24",
39
39
  "@ai-sdk/openai": "3.0.41",
40
- "@contractspec/lib.provider-ranking": "0.7.6",
40
+ "@contractspec/lib.provider-ranking": "0.7.9",
41
41
  "ai": "6.0.116",
42
42
  "ollama-ai-provider": "^1.2.0",
43
43
  "zod": "^4.3.5"
44
44
  },
45
45
  "devDependencies": {
46
- "@contractspec/tool.typescript": "3.7.6",
46
+ "@contractspec/tool.typescript": "3.7.9",
47
47
  "typescript": "^5.9.3",
48
- "@contractspec/tool.bun": "3.7.6"
48
+ "@contractspec/tool.bun": "3.7.9"
49
49
  },
50
50
  "exports": {
51
51
  ".": {