cognitive-modules 0.6.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (78) hide show
  1. package/README.md +165 -0
  2. package/dist/cli.d.ts +16 -0
  3. package/dist/cli.js +335 -0
  4. package/dist/commands/add.d.ts +34 -0
  5. package/dist/commands/add.js +229 -0
  6. package/dist/commands/index.d.ts +11 -0
  7. package/dist/commands/index.js +11 -0
  8. package/dist/commands/init.d.ts +5 -0
  9. package/dist/commands/init.js +78 -0
  10. package/dist/commands/list.d.ts +5 -0
  11. package/dist/commands/list.js +28 -0
  12. package/dist/commands/pipe.d.ts +9 -0
  13. package/dist/commands/pipe.js +59 -0
  14. package/dist/commands/remove.d.ts +10 -0
  15. package/dist/commands/remove.js +47 -0
  16. package/dist/commands/run.d.ts +12 -0
  17. package/dist/commands/run.js +65 -0
  18. package/dist/commands/update.d.ts +14 -0
  19. package/dist/commands/update.js +105 -0
  20. package/dist/commands/versions.d.ts +13 -0
  21. package/dist/commands/versions.js +60 -0
  22. package/dist/index.d.ts +9 -0
  23. package/dist/index.js +11 -0
  24. package/dist/modules/index.d.ts +5 -0
  25. package/dist/modules/index.js +5 -0
  26. package/dist/modules/loader.d.ts +12 -0
  27. package/dist/modules/loader.js +197 -0
  28. package/dist/modules/runner.d.ts +12 -0
  29. package/dist/modules/runner.js +229 -0
  30. package/dist/providers/anthropic.d.ts +14 -0
  31. package/dist/providers/anthropic.js +70 -0
  32. package/dist/providers/base.d.ts +11 -0
  33. package/dist/providers/base.js +19 -0
  34. package/dist/providers/deepseek.d.ts +14 -0
  35. package/dist/providers/deepseek.js +66 -0
  36. package/dist/providers/gemini.d.ts +19 -0
  37. package/dist/providers/gemini.js +94 -0
  38. package/dist/providers/index.d.ts +19 -0
  39. package/dist/providers/index.js +74 -0
  40. package/dist/providers/minimax.d.ts +14 -0
  41. package/dist/providers/minimax.js +64 -0
  42. package/dist/providers/moonshot.d.ts +14 -0
  43. package/dist/providers/moonshot.js +65 -0
  44. package/dist/providers/ollama.d.ts +13 -0
  45. package/dist/providers/ollama.js +64 -0
  46. package/dist/providers/openai.d.ts +14 -0
  47. package/dist/providers/openai.js +67 -0
  48. package/dist/providers/qwen.d.ts +14 -0
  49. package/dist/providers/qwen.js +65 -0
  50. package/dist/types.d.ts +136 -0
  51. package/dist/types.js +5 -0
  52. package/package.json +48 -0
  53. package/src/cli.ts +375 -0
  54. package/src/commands/add.ts +315 -0
  55. package/src/commands/index.ts +12 -0
  56. package/src/commands/init.ts +94 -0
  57. package/src/commands/list.ts +33 -0
  58. package/src/commands/pipe.ts +76 -0
  59. package/src/commands/remove.ts +57 -0
  60. package/src/commands/run.ts +80 -0
  61. package/src/commands/update.ts +130 -0
  62. package/src/commands/versions.ts +79 -0
  63. package/src/index.ts +44 -0
  64. package/src/modules/index.ts +6 -0
  65. package/src/modules/loader.ts +219 -0
  66. package/src/modules/runner.ts +278 -0
  67. package/src/providers/anthropic.ts +89 -0
  68. package/src/providers/base.ts +29 -0
  69. package/src/providers/deepseek.ts +83 -0
  70. package/src/providers/gemini.ts +117 -0
  71. package/src/providers/index.ts +78 -0
  72. package/src/providers/minimax.ts +81 -0
  73. package/src/providers/moonshot.ts +82 -0
  74. package/src/providers/ollama.ts +83 -0
  75. package/src/providers/openai.ts +84 -0
  76. package/src/providers/qwen.ts +82 -0
  77. package/src/types.ts +184 -0
  78. package/tsconfig.json +17 -0
@@ -0,0 +1,70 @@
1
+ /**
2
+ * Anthropic Provider - Claude API
3
+ */
4
+ import { BaseProvider } from './base.js';
5
+ export class AnthropicProvider extends BaseProvider {
6
+ name = 'anthropic';
7
+ apiKey;
8
+ model;
9
+ baseUrl = 'https://api.anthropic.com/v1';
10
+ constructor(apiKey, model = 'claude-sonnet-4-5-20250929') {
11
+ super();
12
+ this.apiKey = apiKey || process.env.ANTHROPIC_API_KEY || '';
13
+ this.model = model;
14
+ }
15
+ isConfigured() {
16
+ return !!this.apiKey;
17
+ }
18
+ async invoke(params) {
19
+ if (!this.isConfigured()) {
20
+ throw new Error('Anthropic API key not configured. Set ANTHROPIC_API_KEY environment variable.');
21
+ }
22
+ const url = `${this.baseUrl}/messages`;
23
+ // Extract system message
24
+ const systemMessage = params.messages.find(m => m.role === 'system');
25
+ const otherMessages = params.messages.filter(m => m.role !== 'system');
26
+ // Add JSON schema instruction if provided
27
+ let messages = otherMessages;
28
+ if (params.jsonSchema) {
29
+ const lastUserIdx = messages.findLastIndex(m => m.role === 'user');
30
+ if (lastUserIdx >= 0) {
31
+ messages = [...messages];
32
+ messages[lastUserIdx] = {
33
+ ...messages[lastUserIdx],
34
+ content: messages[lastUserIdx].content + this.buildJsonPrompt(params.jsonSchema),
35
+ };
36
+ }
37
+ }
38
+ const body = {
39
+ model: this.model,
40
+ messages: messages.map(m => ({ role: m.role, content: m.content })),
41
+ max_tokens: params.maxTokens ?? 4096,
42
+ };
43
+ if (systemMessage) {
44
+ body.system = systemMessage.content;
45
+ }
46
+ const response = await fetch(url, {
47
+ method: 'POST',
48
+ headers: {
49
+ 'Content-Type': 'application/json',
50
+ 'x-api-key': this.apiKey,
51
+ 'anthropic-version': '2023-06-01',
52
+ },
53
+ body: JSON.stringify(body),
54
+ });
55
+ if (!response.ok) {
56
+ const error = await response.text();
57
+ throw new Error(`Anthropic API error: ${response.status} - ${error}`);
58
+ }
59
+ const data = await response.json();
60
+ const content = data.content?.[0]?.text || '';
61
+ return {
62
+ content,
63
+ usage: data.usage ? {
64
+ promptTokens: data.usage.input_tokens || 0,
65
+ completionTokens: data.usage.output_tokens || 0,
66
+ totalTokens: (data.usage.input_tokens || 0) + (data.usage.output_tokens || 0),
67
+ } : undefined,
68
+ };
69
+ }
70
+ }
@@ -0,0 +1,11 @@
1
+ /**
2
+ * Base Provider - Abstract class for all LLM providers
3
+ */
4
+ import type { Provider, InvokeParams, InvokeResult } from '../types.js';
5
+ export declare abstract class BaseProvider implements Provider {
6
+ abstract name: string;
7
+ abstract invoke(params: InvokeParams): Promise<InvokeResult>;
8
+ abstract isConfigured(): boolean;
9
+ protected buildJsonPrompt(schema: object): string;
10
+ protected parseJsonResponse(content: string): unknown;
11
+ }
@@ -0,0 +1,19 @@
1
+ /**
2
+ * Base Provider - Abstract class for all LLM providers
3
+ */
4
+ export class BaseProvider {
5
+ buildJsonPrompt(schema) {
6
+ return `\n\nYou MUST respond with valid JSON matching this schema:\n${JSON.stringify(schema, null, 2)}\n\nRespond with ONLY the JSON, no markdown code blocks.`;
7
+ }
8
+ parseJsonResponse(content) {
9
+ // Try to extract JSON from markdown code blocks
10
+ const jsonMatch = content.match(/```(?:json)?\s*([\s\S]*?)\s*```/);
11
+ const jsonStr = jsonMatch ? jsonMatch[1] : content;
12
+ try {
13
+ return JSON.parse(jsonStr.trim());
14
+ }
15
+ catch {
16
+ throw new Error(`Failed to parse JSON response: ${content.substring(0, 200)}`);
17
+ }
18
+ }
19
+ }
@@ -0,0 +1,14 @@
1
+ /**
2
+ * DeepSeek Provider - DeepSeek API
3
+ */
4
+ import { BaseProvider } from './base.js';
5
+ import type { InvokeParams, InvokeResult } from '../types.js';
6
+ export declare class DeepSeekProvider extends BaseProvider {
7
+ name: string;
8
+ private apiKey;
9
+ private model;
10
+ private baseUrl;
11
+ constructor(apiKey?: string, model?: string);
12
+ isConfigured(): boolean;
13
+ invoke(params: InvokeParams): Promise<InvokeResult>;
14
+ }
@@ -0,0 +1,66 @@
1
+ /**
2
+ * DeepSeek Provider - DeepSeek API
3
+ */
4
+ import { BaseProvider } from './base.js';
5
+ export class DeepSeekProvider extends BaseProvider {
6
+ name = 'deepseek';
7
+ apiKey;
8
+ model;
9
+ baseUrl = 'https://api.deepseek.com/v1';
10
+ constructor(apiKey, model = 'deepseek-chat') {
11
+ // deepseek-chat 自动映射到最新的 DeepSeek-V3.2
12
+ super();
13
+ this.apiKey = apiKey || process.env.DEEPSEEK_API_KEY || '';
14
+ this.model = model;
15
+ }
16
+ isConfigured() {
17
+ return !!this.apiKey;
18
+ }
19
+ async invoke(params) {
20
+ if (!this.isConfigured()) {
21
+ throw new Error('DeepSeek API key not configured. Set DEEPSEEK_API_KEY environment variable.');
22
+ }
23
+ const url = `${this.baseUrl}/chat/completions`;
24
+ const body = {
25
+ model: this.model,
26
+ messages: params.messages.map(m => ({ role: m.role, content: m.content })),
27
+ temperature: params.temperature ?? 0.7,
28
+ max_tokens: params.maxTokens ?? 4096,
29
+ };
30
+ // Add JSON mode if schema provided
31
+ if (params.jsonSchema) {
32
+ body.response_format = { type: 'json_object' };
33
+ const lastUserIdx = params.messages.findLastIndex(m => m.role === 'user');
34
+ if (lastUserIdx >= 0) {
35
+ const messages = [...params.messages];
36
+ messages[lastUserIdx] = {
37
+ ...messages[lastUserIdx],
38
+ content: messages[lastUserIdx].content + this.buildJsonPrompt(params.jsonSchema),
39
+ };
40
+ body.messages = messages.map(m => ({ role: m.role, content: m.content }));
41
+ }
42
+ }
43
+ const response = await fetch(url, {
44
+ method: 'POST',
45
+ headers: {
46
+ 'Content-Type': 'application/json',
47
+ 'Authorization': `Bearer ${this.apiKey}`,
48
+ },
49
+ body: JSON.stringify(body),
50
+ });
51
+ if (!response.ok) {
52
+ const error = await response.text();
53
+ throw new Error(`DeepSeek API error: ${response.status} - ${error}`);
54
+ }
55
+ const data = await response.json();
56
+ const content = data.choices?.[0]?.message?.content || '';
57
+ return {
58
+ content,
59
+ usage: data.usage ? {
60
+ promptTokens: data.usage.prompt_tokens || 0,
61
+ completionTokens: data.usage.completion_tokens || 0,
62
+ totalTokens: data.usage.total_tokens || 0,
63
+ } : undefined,
64
+ };
65
+ }
66
+ }
@@ -0,0 +1,19 @@
1
+ /**
2
+ * Gemini Provider - Google Gemini API
3
+ */
4
+ import { BaseProvider } from './base.js';
5
+ import type { InvokeParams, InvokeResult } from '../types.js';
6
+ export declare class GeminiProvider extends BaseProvider {
7
+ name: string;
8
+ private apiKey;
9
+ private model;
10
+ private baseUrl;
11
+ constructor(apiKey?: string, model?: string);
12
+ isConfigured(): boolean;
13
+ /**
14
+ * Clean JSON Schema for Gemini API compatibility
15
+ * Removes unsupported fields like additionalProperties
16
+ */
17
+ private cleanSchemaForGemini;
18
+ invoke(params: InvokeParams): Promise<InvokeResult>;
19
+ }
@@ -0,0 +1,94 @@
1
+ /**
2
+ * Gemini Provider - Google Gemini API
3
+ */
4
+ import { BaseProvider } from './base.js';
5
+ export class GeminiProvider extends BaseProvider {
6
+ name = 'gemini';
7
+ apiKey;
8
+ model;
9
+ baseUrl = 'https://generativelanguage.googleapis.com/v1beta';
10
+ constructor(apiKey, model = 'gemini-3-flash') {
11
+ super();
12
+ this.apiKey = apiKey || process.env.GEMINI_API_KEY || '';
13
+ this.model = model;
14
+ }
15
+ isConfigured() {
16
+ return !!this.apiKey;
17
+ }
18
+ /**
19
+ * Clean JSON Schema for Gemini API compatibility
20
+ * Removes unsupported fields like additionalProperties
21
+ */
22
+ cleanSchemaForGemini(schema) {
23
+ const unsupportedFields = ['additionalProperties', '$schema', 'default', 'examples'];
24
+ const clean = (obj) => {
25
+ if (Array.isArray(obj)) {
26
+ return obj.map(clean);
27
+ }
28
+ if (obj && typeof obj === 'object') {
29
+ const result = {};
30
+ for (const [key, value] of Object.entries(obj)) {
31
+ if (!unsupportedFields.includes(key)) {
32
+ result[key] = clean(value);
33
+ }
34
+ }
35
+ return result;
36
+ }
37
+ return obj;
38
+ };
39
+ return clean(schema);
40
+ }
41
+ async invoke(params) {
42
+ if (!this.isConfigured()) {
43
+ throw new Error('Gemini API key not configured. Set GEMINI_API_KEY environment variable.');
44
+ }
45
+ const url = `${this.baseUrl}/models/${this.model}:generateContent?key=${this.apiKey}`;
46
+ // Convert messages to Gemini format
47
+ const contents = params.messages
48
+ .filter(m => m.role !== 'system')
49
+ .map(m => ({
50
+ role: m.role === 'assistant' ? 'model' : 'user',
51
+ parts: [{ text: m.content }]
52
+ }));
53
+ // Add system instruction if present
54
+ const systemMessage = params.messages.find(m => m.role === 'system');
55
+ const body = {
56
+ contents,
57
+ generationConfig: {
58
+ temperature: params.temperature ?? 0.7,
59
+ maxOutputTokens: params.maxTokens ?? 8192,
60
+ }
61
+ };
62
+ if (systemMessage) {
63
+ body.systemInstruction = { parts: [{ text: systemMessage.content }] };
64
+ }
65
+ // Add JSON schema constraint if provided
66
+ if (params.jsonSchema) {
67
+ const cleanedSchema = this.cleanSchemaForGemini(params.jsonSchema);
68
+ body.generationConfig = {
69
+ ...body.generationConfig,
70
+ responseMimeType: 'application/json',
71
+ responseSchema: cleanedSchema,
72
+ };
73
+ }
74
+ const response = await fetch(url, {
75
+ method: 'POST',
76
+ headers: { 'Content-Type': 'application/json' },
77
+ body: JSON.stringify(body),
78
+ });
79
+ if (!response.ok) {
80
+ const error = await response.text();
81
+ throw new Error(`Gemini API error: ${response.status} - ${error}`);
82
+ }
83
+ const data = await response.json();
84
+ const content = data.candidates?.[0]?.content?.parts?.[0]?.text || '';
85
+ return {
86
+ content,
87
+ usage: data.usageMetadata ? {
88
+ promptTokens: data.usageMetadata.promptTokenCount || 0,
89
+ completionTokens: data.usageMetadata.candidatesTokenCount || 0,
90
+ totalTokens: data.usageMetadata.totalTokenCount || 0,
91
+ } : undefined,
92
+ };
93
+ }
94
+ }
@@ -0,0 +1,19 @@
1
+ /**
2
+ * Provider Registry
3
+ */
4
+ import type { Provider } from '../types.js';
5
+ export { BaseProvider } from './base.js';
6
+ export { GeminiProvider } from './gemini.js';
7
+ export { OpenAIProvider } from './openai.js';
8
+ export { AnthropicProvider } from './anthropic.js';
9
+ export { MiniMaxProvider } from './minimax.js';
10
+ export { DeepSeekProvider } from './deepseek.js';
11
+ export { MoonshotProvider } from './moonshot.js';
12
+ export { QwenProvider } from './qwen.js';
13
+ export { OllamaProvider } from './ollama.js';
14
+ export declare function getProvider(name?: string, model?: string): Provider;
15
+ export declare function listProviders(): Array<{
16
+ name: string;
17
+ configured: boolean;
18
+ model: string;
19
+ }>;
@@ -0,0 +1,74 @@
1
+ /**
2
+ * Provider Registry
3
+ */
4
+ import { GeminiProvider } from './gemini.js';
5
+ import { OpenAIProvider } from './openai.js';
6
+ import { AnthropicProvider } from './anthropic.js';
7
+ import { MiniMaxProvider } from './minimax.js';
8
+ import { DeepSeekProvider } from './deepseek.js';
9
+ import { MoonshotProvider } from './moonshot.js';
10
+ import { QwenProvider } from './qwen.js';
11
+ import { OllamaProvider } from './ollama.js';
12
+ export { BaseProvider } from './base.js';
13
+ export { GeminiProvider } from './gemini.js';
14
+ export { OpenAIProvider } from './openai.js';
15
+ export { AnthropicProvider } from './anthropic.js';
16
+ export { MiniMaxProvider } from './minimax.js';
17
+ export { DeepSeekProvider } from './deepseek.js';
18
+ export { MoonshotProvider } from './moonshot.js';
19
+ export { QwenProvider } from './qwen.js';
20
+ export { OllamaProvider } from './ollama.js';
21
+ const providers = {
22
+ gemini: (model) => new GeminiProvider(undefined, model),
23
+ openai: (model) => new OpenAIProvider(undefined, model),
24
+ anthropic: (model) => new AnthropicProvider(undefined, model),
25
+ minimax: (model) => new MiniMaxProvider(undefined, model),
26
+ deepseek: (model) => new DeepSeekProvider(undefined, model),
27
+ moonshot: (model) => new MoonshotProvider(undefined, model),
28
+ kimi: (model) => new MoonshotProvider(undefined, model), // Alias
29
+ qwen: (model) => new QwenProvider(undefined, model),
30
+ tongyi: (model) => new QwenProvider(undefined, model), // Alias
31
+ dashscope: (model) => new QwenProvider(undefined, model), // Alias
32
+ ollama: (model) => new OllamaProvider(model),
33
+ local: (model) => new OllamaProvider(model), // Alias
34
+ };
35
+ export function getProvider(name, model) {
36
+ // Check for model override from environment
37
+ const modelOverride = model || process.env.COG_MODEL;
38
+ // Auto-detect if not specified
39
+ if (!name) {
40
+ if (process.env.GEMINI_API_KEY)
41
+ return new GeminiProvider(undefined, modelOverride);
42
+ if (process.env.OPENAI_API_KEY)
43
+ return new OpenAIProvider(undefined, modelOverride);
44
+ if (process.env.ANTHROPIC_API_KEY)
45
+ return new AnthropicProvider(undefined, modelOverride);
46
+ if (process.env.DEEPSEEK_API_KEY)
47
+ return new DeepSeekProvider(undefined, modelOverride);
48
+ if (process.env.MINIMAX_API_KEY)
49
+ return new MiniMaxProvider(undefined, modelOverride);
50
+ if (process.env.MOONSHOT_API_KEY)
51
+ return new MoonshotProvider(undefined, modelOverride);
52
+ if (process.env.DASHSCOPE_API_KEY || process.env.QWEN_API_KEY)
53
+ return new QwenProvider(undefined, modelOverride);
54
+ // Ollama is always available as fallback if nothing else is configured
55
+ return new OllamaProvider(modelOverride);
56
+ }
57
+ const factory = providers[name.toLowerCase()];
58
+ if (!factory) {
59
+ throw new Error(`Unknown provider: ${name}. Available: ${Object.keys(providers).join(', ')}`);
60
+ }
61
+ return factory(modelOverride);
62
+ }
63
+ export function listProviders() {
64
+ return [
65
+ { name: 'gemini', configured: !!process.env.GEMINI_API_KEY, model: 'gemini-3-flash' },
66
+ { name: 'openai', configured: !!process.env.OPENAI_API_KEY, model: 'gpt-5.2' },
67
+ { name: 'anthropic', configured: !!process.env.ANTHROPIC_API_KEY, model: 'claude-sonnet-4.5' },
68
+ { name: 'deepseek', configured: !!process.env.DEEPSEEK_API_KEY, model: 'deepseek-v3.2' },
69
+ { name: 'minimax', configured: !!process.env.MINIMAX_API_KEY, model: 'MiniMax-M2.1' },
70
+ { name: 'moonshot', configured: !!process.env.MOONSHOT_API_KEY, model: 'kimi-k2.5' },
71
+ { name: 'qwen', configured: !!(process.env.DASHSCOPE_API_KEY || process.env.QWEN_API_KEY), model: 'qwen3-max' },
72
+ { name: 'ollama', configured: true, model: 'llama4 (local)' },
73
+ ];
74
+ }
@@ -0,0 +1,14 @@
1
+ /**
2
+ * MiniMax Provider - MiniMax API
3
+ */
4
+ import { BaseProvider } from './base.js';
5
+ import type { InvokeParams, InvokeResult } from '../types.js';
6
+ export declare class MiniMaxProvider extends BaseProvider {
7
+ name: string;
8
+ private apiKey;
9
+ private model;
10
+ private baseUrl;
11
+ constructor(apiKey?: string, model?: string);
12
+ isConfigured(): boolean;
13
+ invoke(params: InvokeParams): Promise<InvokeResult>;
14
+ }
@@ -0,0 +1,64 @@
1
+ /**
2
+ * MiniMax Provider - MiniMax API
3
+ */
4
+ import { BaseProvider } from './base.js';
5
+ export class MiniMaxProvider extends BaseProvider {
6
+ name = 'minimax';
7
+ apiKey;
8
+ model;
9
+ baseUrl = 'https://api.minimax.chat/v1';
10
+ constructor(apiKey, model = 'MiniMax-M2.1') {
11
+ super();
12
+ this.apiKey = apiKey || process.env.MINIMAX_API_KEY || '';
13
+ this.model = model;
14
+ }
15
+ isConfigured() {
16
+ return !!this.apiKey;
17
+ }
18
+ async invoke(params) {
19
+ if (!this.isConfigured()) {
20
+ throw new Error('MiniMax API key not configured. Set MINIMAX_API_KEY environment variable.');
21
+ }
22
+ const url = `${this.baseUrl}/text/chatcompletion_v2`;
23
+ const body = {
24
+ model: this.model,
25
+ messages: params.messages.map(m => ({ role: m.role, content: m.content })),
26
+ temperature: params.temperature ?? 0.7,
27
+ max_tokens: params.maxTokens ?? 4096,
28
+ };
29
+ // Add JSON mode if schema provided
30
+ if (params.jsonSchema) {
31
+ const lastUserIdx = params.messages.findLastIndex(m => m.role === 'user');
32
+ if (lastUserIdx >= 0) {
33
+ const messages = [...params.messages];
34
+ messages[lastUserIdx] = {
35
+ ...messages[lastUserIdx],
36
+ content: messages[lastUserIdx].content + this.buildJsonPrompt(params.jsonSchema),
37
+ };
38
+ body.messages = messages.map(m => ({ role: m.role, content: m.content }));
39
+ }
40
+ }
41
+ const response = await fetch(url, {
42
+ method: 'POST',
43
+ headers: {
44
+ 'Content-Type': 'application/json',
45
+ 'Authorization': `Bearer ${this.apiKey}`,
46
+ },
47
+ body: JSON.stringify(body),
48
+ });
49
+ if (!response.ok) {
50
+ const error = await response.text();
51
+ throw new Error(`MiniMax API error: ${response.status} - ${error}`);
52
+ }
53
+ const data = await response.json();
54
+ const content = data.choices?.[0]?.message?.content || '';
55
+ return {
56
+ content,
57
+ usage: data.usage ? {
58
+ promptTokens: data.usage.prompt_tokens || 0,
59
+ completionTokens: data.usage.completion_tokens || 0,
60
+ totalTokens: data.usage.total_tokens || 0,
61
+ } : undefined,
62
+ };
63
+ }
64
+ }
@@ -0,0 +1,14 @@
1
+ /**
2
+ * Moonshot Provider - Moonshot AI (Kimi) API
3
+ */
4
+ import { BaseProvider } from './base.js';
5
+ import type { InvokeParams, InvokeResult } from '../types.js';
6
+ export declare class MoonshotProvider extends BaseProvider {
7
+ name: string;
8
+ private apiKey;
9
+ private model;
10
+ private baseUrl;
11
+ constructor(apiKey?: string, model?: string);
12
+ isConfigured(): boolean;
13
+ invoke(params: InvokeParams): Promise<InvokeResult>;
14
+ }
@@ -0,0 +1,65 @@
1
+ /**
2
+ * Moonshot Provider - Moonshot AI (Kimi) API
3
+ */
4
+ import { BaseProvider } from './base.js';
5
+ export class MoonshotProvider extends BaseProvider {
6
+ name = 'moonshot';
7
+ apiKey;
8
+ model;
9
+ baseUrl = 'https://api.moonshot.cn/v1';
10
+ constructor(apiKey, model = 'kimi-k2.5') {
11
+ super();
12
+ this.apiKey = apiKey || process.env.MOONSHOT_API_KEY || '';
13
+ this.model = model;
14
+ }
15
+ isConfigured() {
16
+ return !!this.apiKey;
17
+ }
18
+ async invoke(params) {
19
+ if (!this.isConfigured()) {
20
+ throw new Error('Moonshot API key not configured. Set MOONSHOT_API_KEY environment variable.');
21
+ }
22
+ const url = `${this.baseUrl}/chat/completions`;
23
+ const body = {
24
+ model: this.model,
25
+ messages: params.messages.map(m => ({ role: m.role, content: m.content })),
26
+ temperature: params.temperature ?? 0.7,
27
+ max_tokens: params.maxTokens ?? 4096,
28
+ };
29
+ // Add JSON mode if schema provided
30
+ if (params.jsonSchema) {
31
+ body.response_format = { type: 'json_object' };
32
+ const lastUserIdx = params.messages.findLastIndex(m => m.role === 'user');
33
+ if (lastUserIdx >= 0) {
34
+ const messages = [...params.messages];
35
+ messages[lastUserIdx] = {
36
+ ...messages[lastUserIdx],
37
+ content: messages[lastUserIdx].content + this.buildJsonPrompt(params.jsonSchema),
38
+ };
39
+ body.messages = messages.map(m => ({ role: m.role, content: m.content }));
40
+ }
41
+ }
42
+ const response = await fetch(url, {
43
+ method: 'POST',
44
+ headers: {
45
+ 'Content-Type': 'application/json',
46
+ 'Authorization': `Bearer ${this.apiKey}`,
47
+ },
48
+ body: JSON.stringify(body),
49
+ });
50
+ if (!response.ok) {
51
+ const error = await response.text();
52
+ throw new Error(`Moonshot API error: ${response.status} - ${error}`);
53
+ }
54
+ const data = await response.json();
55
+ const content = data.choices?.[0]?.message?.content || '';
56
+ return {
57
+ content,
58
+ usage: data.usage ? {
59
+ promptTokens: data.usage.prompt_tokens || 0,
60
+ completionTokens: data.usage.completion_tokens || 0,
61
+ totalTokens: data.usage.total_tokens || 0,
62
+ } : undefined,
63
+ };
64
+ }
65
+ }
@@ -0,0 +1,13 @@
1
+ /**
2
+ * Ollama Provider - Local LLM via Ollama
3
+ */
4
+ import { BaseProvider } from './base.js';
5
+ import type { InvokeParams, InvokeResult } from '../types.js';
6
+ export declare class OllamaProvider extends BaseProvider {
7
+ name: string;
8
+ private model;
9
+ private baseUrl;
10
+ constructor(model?: string, baseUrl?: string);
11
+ isConfigured(): boolean;
12
+ invoke(params: InvokeParams): Promise<InvokeResult>;
13
+ }
@@ -0,0 +1,64 @@
1
+ /**
2
+ * Ollama Provider - Local LLM via Ollama
3
+ */
4
+ import { BaseProvider } from './base.js';
5
+ export class OllamaProvider extends BaseProvider {
6
+ name = 'ollama';
7
+ model;
8
+ baseUrl;
9
+ constructor(model = 'llama4', baseUrl = 'http://localhost:11434') {
10
+ super();
11
+ this.model = process.env.OLLAMA_MODEL || model;
12
+ this.baseUrl = process.env.OLLAMA_HOST || baseUrl;
13
+ }
14
+ isConfigured() {
15
+ return true; // Ollama doesn't need API key
16
+ }
17
+ async invoke(params) {
18
+ const url = `${this.baseUrl}/api/chat`;
19
+ let messages = params.messages.map(m => ({ role: m.role, content: m.content }));
20
+ // Add JSON mode if schema provided
21
+ if (params.jsonSchema) {
22
+ const lastUserIdx = messages.findLastIndex(m => m.role === 'user');
23
+ if (lastUserIdx >= 0) {
24
+ messages = [...messages];
25
+ messages[lastUserIdx] = {
26
+ ...messages[lastUserIdx],
27
+ content: messages[lastUserIdx].content + this.buildJsonPrompt(params.jsonSchema),
28
+ };
29
+ }
30
+ }
31
+ const body = {
32
+ model: this.model,
33
+ messages,
34
+ stream: false,
35
+ options: {
36
+ temperature: params.temperature ?? 0.7,
37
+ num_predict: params.maxTokens ?? 4096,
38
+ },
39
+ };
40
+ // Request JSON format
41
+ if (params.jsonSchema) {
42
+ body.format = 'json';
43
+ }
44
+ const response = await fetch(url, {
45
+ method: 'POST',
46
+ headers: { 'Content-Type': 'application/json' },
47
+ body: JSON.stringify(body),
48
+ });
49
+ if (!response.ok) {
50
+ const error = await response.text();
51
+ throw new Error(`Ollama API error: ${response.status} - ${error}`);
52
+ }
53
+ const data = await response.json();
54
+ const content = data.message?.content || '';
55
+ return {
56
+ content,
57
+ usage: {
58
+ promptTokens: data.prompt_eval_count || 0,
59
+ completionTokens: data.eval_count || 0,
60
+ totalTokens: (data.prompt_eval_count || 0) + (data.eval_count || 0),
61
+ },
62
+ };
63
+ }
64
+ }
@@ -0,0 +1,14 @@
1
+ /**
2
+ * OpenAI Provider - OpenAI API (and compatible APIs)
3
+ */
4
+ import { BaseProvider } from './base.js';
5
+ import type { InvokeParams, InvokeResult } from '../types.js';
6
+ export declare class OpenAIProvider extends BaseProvider {
7
+ name: string;
8
+ private apiKey;
9
+ private model;
10
+ private baseUrl;
11
+ constructor(apiKey?: string, model?: string, baseUrl?: string);
12
+ isConfigured(): boolean;
13
+ invoke(params: InvokeParams): Promise<InvokeResult>;
14
+ }