@nahisaho/katashiro-llm 2.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (62) hide show
  1. package/LICENSE +21 -0
  2. package/dist/LLMClient.d.ts +64 -0
  3. package/dist/LLMClient.d.ts.map +1 -0
  4. package/dist/LLMClient.js +139 -0
  5. package/dist/LLMClient.js.map +1 -0
  6. package/dist/PromptManager.d.ts +66 -0
  7. package/dist/PromptManager.d.ts.map +1 -0
  8. package/dist/PromptManager.js +121 -0
  9. package/dist/PromptManager.js.map +1 -0
  10. package/dist/TokenCounter.d.ts +43 -0
  11. package/dist/TokenCounter.d.ts.map +1 -0
  12. package/dist/TokenCounter.js +100 -0
  13. package/dist/TokenCounter.js.map +1 -0
  14. package/dist/index.d.ts +12 -0
  15. package/dist/index.d.ts.map +1 -0
  16. package/dist/index.js +17 -0
  17. package/dist/index.js.map +1 -0
  18. package/dist/providers/AzureOpenAILLMProvider.d.ts +82 -0
  19. package/dist/providers/AzureOpenAILLMProvider.d.ts.map +1 -0
  20. package/dist/providers/AzureOpenAILLMProvider.js +339 -0
  21. package/dist/providers/AzureOpenAILLMProvider.js.map +1 -0
  22. package/dist/providers/BaseLLMProvider.d.ts +51 -0
  23. package/dist/providers/BaseLLMProvider.d.ts.map +1 -0
  24. package/dist/providers/BaseLLMProvider.js +72 -0
  25. package/dist/providers/BaseLLMProvider.js.map +1 -0
  26. package/dist/providers/LLMFactory.d.ts +75 -0
  27. package/dist/providers/LLMFactory.d.ts.map +1 -0
  28. package/dist/providers/LLMFactory.js +149 -0
  29. package/dist/providers/LLMFactory.js.map +1 -0
  30. package/dist/providers/MockLLMProvider.d.ts +57 -0
  31. package/dist/providers/MockLLMProvider.d.ts.map +1 -0
  32. package/dist/providers/MockLLMProvider.js +120 -0
  33. package/dist/providers/MockLLMProvider.js.map +1 -0
  34. package/dist/providers/OllamaLLMProvider.d.ts +73 -0
  35. package/dist/providers/OllamaLLMProvider.d.ts.map +1 -0
  36. package/dist/providers/OllamaLLMProvider.js +242 -0
  37. package/dist/providers/OllamaLLMProvider.js.map +1 -0
  38. package/dist/providers/OpenAILLMProvider.d.ts +87 -0
  39. package/dist/providers/OpenAILLMProvider.d.ts.map +1 -0
  40. package/dist/providers/OpenAILLMProvider.js +349 -0
  41. package/dist/providers/OpenAILLMProvider.js.map +1 -0
  42. package/dist/providers/index.d.ts +17 -0
  43. package/dist/providers/index.d.ts.map +1 -0
  44. package/dist/providers/index.js +19 -0
  45. package/dist/providers/index.js.map +1 -0
  46. package/dist/types.d.ts +251 -0
  47. package/dist/types.d.ts.map +1 -0
  48. package/dist/types.js +8 -0
  49. package/dist/types.js.map +1 -0
  50. package/package.json +51 -0
  51. package/src/LLMClient.ts +171 -0
  52. package/src/PromptManager.ts +156 -0
  53. package/src/TokenCounter.ts +114 -0
  54. package/src/index.ts +35 -0
  55. package/src/providers/AzureOpenAILLMProvider.ts +494 -0
  56. package/src/providers/BaseLLMProvider.ts +110 -0
  57. package/src/providers/LLMFactory.ts +216 -0
  58. package/src/providers/MockLLMProvider.ts +173 -0
  59. package/src/providers/OllamaLLMProvider.ts +322 -0
  60. package/src/providers/OpenAILLMProvider.ts +500 -0
  61. package/src/providers/index.ts +35 -0
  62. package/src/types.ts +268 -0
@@ -0,0 +1,110 @@
1
+ /**
2
+ * Base LLM Provider
3
+ *
4
+ * @requirement REQ-LLM-001
5
+ * @design DES-KATASHIRO-003-LLM §3.1.1
6
+ */
7
+
8
+ import type { z, ZodType } from 'zod';
9
+ import type {
10
+ LLMProvider,
11
+ ProviderConfig,
12
+ GenerateRequest,
13
+ GenerateResponse,
14
+ StreamChunk,
15
+ } from '../types.js';
16
+
17
+ /**
18
+ * 抽象LLMプロバイダー基底クラス
19
+ */
20
+ export abstract class BaseLLMProvider implements LLMProvider {
21
+ abstract readonly name: string;
22
+ abstract readonly supportedModels: string[];
23
+
24
+ protected config: ProviderConfig;
25
+ protected defaultModel: string;
26
+
27
+ constructor(config: ProviderConfig = {}) {
28
+ this.config = {
29
+ timeout: 30000,
30
+ maxRetries: 3,
31
+ ...config,
32
+ };
33
+ this.defaultModel = config.defaultModel ?? this.getDefaultModel();
34
+ }
35
+
36
+ /**
37
+ * デフォルトモデル取得
38
+ */
39
+ protected abstract getDefaultModel(): string;
40
+
41
+ /**
42
+ * テキスト生成(抽象メソッド)
43
+ */
44
+ abstract generate(request: GenerateRequest): Promise<GenerateResponse>;
45
+
46
+ /**
47
+ * ストリーミング生成(抽象メソッド)
48
+ */
49
+ abstract generateStream(request: GenerateRequest): AsyncGenerator<StreamChunk>;
50
+
51
+ /**
52
+ * 構造化出力生成
53
+ */
54
+ async generateStructured<T extends ZodType>(
55
+ request: GenerateRequest,
56
+ schema: T
57
+ ): Promise<z.infer<T>> {
58
+ const jsonSchema = this.zodToJsonSchema(schema);
59
+
60
+ const enhancedRequest: GenerateRequest = {
61
+ ...request,
62
+ responseFormat: { type: 'json_object' },
63
+ messages: [
64
+ ...request.messages,
65
+ {
66
+ role: 'user',
67
+ content: `Respond with valid JSON matching this schema:\n${JSON.stringify(jsonSchema, null, 2)}`,
68
+ },
69
+ ],
70
+ };
71
+
72
+ const response = await this.generate(enhancedRequest);
73
+ const parsed = JSON.parse(response.content);
74
+ return schema.parse(parsed);
75
+ }
76
+
77
+ /**
78
+ * トークン数カウント(デフォルト実装)
79
+ */
80
+ async countTokens(text: string, _model?: string): Promise<number> {
81
+ // 簡易実装: 4文字 ≈ 1トークン
82
+ return Math.ceil(text.length / 4);
83
+ }
84
+
85
+ /**
86
+ * モデルサポート確認
87
+ */
88
+ isModelSupported(model: string): boolean {
89
+ return this.supportedModels.includes(model);
90
+ }
91
+
92
+ /**
93
+ * モデル取得(デフォルトまたは指定)
94
+ */
95
+ protected getModel(request: GenerateRequest): string {
96
+ return request.model ?? this.defaultModel;
97
+ }
98
+
99
+ /**
100
+ * Zodスキーマ→JSONスキーマ変換(簡易実装)
101
+ */
102
+ protected zodToJsonSchema(schema: ZodType): Record<string, unknown> {
103
+ // 実際にはzod-to-json-schemaライブラリを使用
104
+ const description = (schema as { description?: string }).description;
105
+ return {
106
+ type: 'object',
107
+ description: description ?? 'Generated schema',
108
+ };
109
+ }
110
+ }
@@ -0,0 +1,216 @@
1
+ /**
2
+ * LLM Provider Factory
3
+ *
4
+ * Esperanto-style factory for creating LLM providers
5
+ *
6
+ * @requirement REQ-LLM-001
7
+ * @design DES-KATASHIRO-003-LLM
8
+ */
9
+
10
+ import type { LLMProvider, ProviderConfig } from '../types.js';
11
+ import { MockLLMProvider } from './MockLLMProvider.js';
12
+ import { OllamaLLMProvider } from './OllamaLLMProvider.js';
13
+ import type { OllamaProviderConfig } from './OllamaLLMProvider.js';
14
+ import { OpenAILLMProvider } from './OpenAILLMProvider.js';
15
+ import type { OpenAIProviderConfig } from './OpenAILLMProvider.js';
16
+ import { AzureOpenAILLMProvider } from './AzureOpenAILLMProvider.js';
17
+ import type { AzureOpenAIProviderConfig } from './AzureOpenAILLMProvider.js';
18
+
19
+ /**
20
+ * プロバイダータイプ
21
+ */
22
+ export type LLMProviderType =
23
+ | 'mock'
24
+ | 'ollama'
25
+ | 'openai'
26
+ | 'openai-compatible'
27
+ | 'azure-openai';
28
+
29
+ /**
30
+ * プロバイダー設定マップ
31
+ */
32
+ export interface LLMProviderConfigMap {
33
+ mock: ProviderConfig;
34
+ ollama: OllamaProviderConfig;
35
+ openai: OpenAIProviderConfig;
36
+ 'openai-compatible': OpenAIProviderConfig;
37
+ 'azure-openai': AzureOpenAIProviderConfig;
38
+ }
39
+
40
+ /**
41
+ * 環境変数から設定を取得
42
+ */
43
+ function getConfigFromEnv(
44
+ provider: LLMProviderType
45
+ ): Partial<LLMProviderConfigMap[LLMProviderType]> {
46
+ switch (provider) {
47
+ case 'ollama':
48
+ return {
49
+ baseUrl: process.env.OLLAMA_BASE_URL ?? process.env.OLLAMA_HOST,
50
+ model: process.env.OLLAMA_MODEL,
51
+ };
52
+
53
+ case 'openai':
54
+ return {
55
+ apiKey: process.env.OPENAI_API_KEY,
56
+ organization: process.env.OPENAI_ORGANIZATION,
57
+ baseUrl: process.env.OPENAI_BASE_URL,
58
+ model: process.env.OPENAI_MODEL,
59
+ };
60
+
61
+ case 'openai-compatible':
62
+ return {
63
+ apiKey:
64
+ process.env.OPENAI_COMPATIBLE_API_KEY_LLM ??
65
+ process.env.OPENAI_COMPATIBLE_API_KEY,
66
+ baseUrl:
67
+ process.env.OPENAI_COMPATIBLE_BASE_URL_LLM ??
68
+ process.env.OPENAI_COMPATIBLE_BASE_URL,
69
+ };
70
+
71
+ case 'azure-openai':
72
+ return {
73
+ endpoint: process.env.AZURE_OPENAI_ENDPOINT,
74
+ apiKey: process.env.AZURE_OPENAI_API_KEY,
75
+ deploymentName: process.env.AZURE_OPENAI_DEPLOYMENT,
76
+ apiVersion: process.env.AZURE_OPENAI_API_VERSION,
77
+ };
78
+
79
+ default:
80
+ return {};
81
+ }
82
+ }
83
+
84
+ /**
85
+ * LLM Provider Factory
86
+ *
87
+ * Esperantoスタイルのファクトリクラス
88
+ *
89
+ * @example
90
+ * ```typescript
91
+ * // 利用可能なプロバイダーを確認
92
+ * const providers = LLMFactory.getAvailableProviders();
93
+ *
94
+ * // プロバイダーを作成
95
+ * const provider = LLMFactory.create('ollama', {
96
+ * baseUrl: 'http://192.168.224.1:11434',
97
+ * model: 'llama3.2',
98
+ * });
99
+ *
100
+ * // または環境変数から自動設定
101
+ * const provider = LLMFactory.create('openai');
102
+ * ```
103
+ */
104
+ export class LLMFactory {
105
+ /**
106
+ * プロバイダー作成
107
+ */
108
+ static create<T extends LLMProviderType>(
109
+ provider: T,
110
+ config?: Partial<LLMProviderConfigMap[T]>
111
+ ): LLMProvider {
112
+ // 環境変数からの設定とマージ
113
+ const envConfig = getConfigFromEnv(provider);
114
+ const mergedConfig = { ...envConfig, ...config };
115
+
116
+ switch (provider) {
117
+ case 'mock':
118
+ return new MockLLMProvider(mergedConfig as unknown as import('./MockLLMProvider.js').MockProviderConfig);
119
+
120
+ case 'ollama':
121
+ return new OllamaLLMProvider(mergedConfig as OllamaProviderConfig);
122
+
123
+ case 'openai':
124
+ case 'openai-compatible':
125
+ return new OpenAILLMProvider(mergedConfig as OpenAIProviderConfig);
126
+
127
+ case 'azure-openai':
128
+ return new AzureOpenAILLMProvider(
129
+ mergedConfig as AzureOpenAIProviderConfig
130
+ );
131
+
132
+ default:
133
+ throw new Error(`Unknown LLM provider: ${provider}`);
134
+ }
135
+ }
136
+
137
+ /**
138
+ * 利用可能なプロバイダー一覧
139
+ */
140
+ static getAvailableProviders(): LLMProviderType[] {
141
+ return ['mock', 'ollama', 'openai', 'openai-compatible', 'azure-openai'];
142
+ }
143
+
144
+ /**
145
+ * デフォルトプロバイダー取得
146
+ *
147
+ * 環境変数から自動判定
148
+ */
149
+ static getDefaultProvider(): LLMProvider {
150
+ // 優先順位: AZURE > OPENAI > OLLAMA > MOCK
151
+ if (
152
+ process.env.AZURE_OPENAI_ENDPOINT &&
153
+ process.env.AZURE_OPENAI_API_KEY
154
+ ) {
155
+ return this.create('azure-openai');
156
+ }
157
+
158
+ if (process.env.OPENAI_API_KEY) {
159
+ return this.create('openai');
160
+ }
161
+
162
+ if (process.env.OLLAMA_BASE_URL || process.env.OLLAMA_HOST) {
163
+ return this.create('ollama');
164
+ }
165
+
166
+ // フォールバック: モック
167
+ return this.create('mock');
168
+ }
169
+
170
+ /**
171
+ * プロバイダーの利用可能確認
172
+ */
173
+ static isProviderConfigured(provider: LLMProviderType): boolean {
174
+ switch (provider) {
175
+ case 'mock':
176
+ return true;
177
+
178
+ case 'ollama':
179
+ return !!(process.env.OLLAMA_BASE_URL || process.env.OLLAMA_HOST);
180
+
181
+ case 'openai':
182
+ return !!process.env.OPENAI_API_KEY;
183
+
184
+ case 'openai-compatible':
185
+ return !!(
186
+ process.env.OPENAI_COMPATIBLE_BASE_URL_LLM ||
187
+ process.env.OPENAI_COMPATIBLE_BASE_URL
188
+ );
189
+
190
+ case 'azure-openai':
191
+ return !!(
192
+ process.env.AZURE_OPENAI_ENDPOINT && process.env.AZURE_OPENAI_API_KEY
193
+ );
194
+
195
+ default:
196
+ return false;
197
+ }
198
+ }
199
+ }
200
+
201
+ /**
202
+ * 便利関数: LLMプロバイダー作成
203
+ */
204
+ export function createLLMProvider<T extends LLMProviderType>(
205
+ provider: T,
206
+ config?: Partial<LLMProviderConfigMap[T]>
207
+ ): LLMProvider {
208
+ return LLMFactory.create(provider, config);
209
+ }
210
+
211
+ /**
212
+ * 便利関数: デフォルトプロバイダー取得
213
+ */
214
+ export function getDefaultLLMProvider(): LLMProvider {
215
+ return LLMFactory.getDefaultProvider();
216
+ }
@@ -0,0 +1,173 @@
1
+ /**
2
+ * Mock LLM Provider - テスト用モックプロバイダー
3
+ *
4
+ * @requirement REQ-LLM-001
5
+ * @design DES-KATASHIRO-003-LLM §3.1
6
+ */
7
+
8
+ import type { ZodType, z } from 'zod';
9
+ import type {
10
+ LLMProvider,
11
+ GenerateRequest,
12
+ GenerateResponse,
13
+ StreamChunk,
14
+ } from '../types.js';
15
+
16
+ /**
17
+ * モックプロバイダー設定
18
+ */
19
+ export interface MockProviderConfig {
20
+ /** デフォルトレスポンス */
21
+ defaultResponse?: string;
22
+ /** 遅延(ミリ秒) */
23
+ delay?: number;
24
+ /** エラーを発生させるか */
25
+ shouldFail?: boolean;
26
+ /** エラーメッセージ */
27
+ errorMessage?: string;
28
+ /** カスタムレスポンス生成関数 */
29
+ responseGenerator?: (request: GenerateRequest) => string;
30
+ }
31
+
32
+ /**
33
+ * テスト用モックLLMプロバイダー
34
+ */
35
+ export class MockLLMProvider implements LLMProvider {
36
+ readonly name = 'mock';
37
+ readonly supportedModels = ['mock-model', 'mock-model-v2'];
38
+
39
+ private config: MockProviderConfig;
40
+ private callCount = 0;
41
+ private callHistory: GenerateRequest[] = [];
42
+
43
+ constructor(config: MockProviderConfig = {}) {
44
+ this.config = {
45
+ defaultResponse: 'This is a mock response.',
46
+ delay: 0,
47
+ shouldFail: false,
48
+ ...config,
49
+ };
50
+ }
51
+
52
+ /**
53
+ * 呼び出し回数取得
54
+ */
55
+ getCallCount(): number {
56
+ return this.callCount;
57
+ }
58
+
59
+ /**
60
+ * 呼び出し履歴取得
61
+ */
62
+ getCallHistory(): GenerateRequest[] {
63
+ return [...this.callHistory];
64
+ }
65
+
66
+ /**
67
+ * リセット
68
+ */
69
+ reset(): void {
70
+ this.callCount = 0;
71
+ this.callHistory = [];
72
+ }
73
+
74
+ /**
75
+ * 設定更新
76
+ */
77
+ updateConfig(config: Partial<MockProviderConfig>): void {
78
+ this.config = { ...this.config, ...config };
79
+ }
80
+
81
+ async generate(request: GenerateRequest): Promise<GenerateResponse> {
82
+ this.callCount++;
83
+ this.callHistory.push(request);
84
+
85
+ if (this.config.delay && this.config.delay > 0) {
86
+ await this.sleep(this.config.delay);
87
+ }
88
+
89
+ if (this.config.shouldFail) {
90
+ throw new Error(this.config.errorMessage ?? 'Mock provider error');
91
+ }
92
+
93
+ const content = this.config.responseGenerator
94
+ ? this.config.responseGenerator(request)
95
+ : this.config.defaultResponse ?? '';
96
+
97
+ return {
98
+ id: `mock-${Date.now()}-${this.callCount}`,
99
+ model: request.model ?? 'mock-model',
100
+ content,
101
+ usage: {
102
+ promptTokens: this.estimateTokens(request.messages.map((m) =>
103
+ typeof m.content === 'string' ? m.content : ''
104
+ ).join('')),
105
+ completionTokens: this.estimateTokens(content),
106
+ totalTokens: 0, // 後で計算
107
+ },
108
+ finishReason: 'stop',
109
+ };
110
+ }
111
+
112
+ async *generateStream(request: GenerateRequest): AsyncGenerator<StreamChunk> {
113
+ this.callCount++;
114
+ this.callHistory.push(request);
115
+
116
+ if (this.config.shouldFail) {
117
+ throw new Error(this.config.errorMessage ?? 'Mock provider error');
118
+ }
119
+
120
+ const content = this.config.responseGenerator
121
+ ? this.config.responseGenerator(request)
122
+ : this.config.defaultResponse ?? '';
123
+
124
+ // 文字ごとにストリーミング
125
+ const words = content.split(' ');
126
+ for (let i = 0; i < words.length; i++) {
127
+ if (this.config.delay && this.config.delay > 0) {
128
+ await this.sleep(this.config.delay / words.length);
129
+ }
130
+ yield { type: 'content', content: (i > 0 ? ' ' : '') + words[i] };
131
+ }
132
+
133
+ yield {
134
+ type: 'done',
135
+ usage: {
136
+ promptTokens: this.estimateTokens(request.messages.map((m) =>
137
+ typeof m.content === 'string' ? m.content : ''
138
+ ).join('')),
139
+ completionTokens: this.estimateTokens(content),
140
+ totalTokens: 0,
141
+ },
142
+ };
143
+ }
144
+
145
+ async generateStructured<T extends ZodType>(
146
+ request: GenerateRequest,
147
+ schema: T
148
+ ): Promise<z.infer<T>> {
149
+ this.callCount++;
150
+ this.callHistory.push(request);
151
+
152
+ if (this.config.shouldFail) {
153
+ throw new Error(this.config.errorMessage ?? 'Mock provider error');
154
+ }
155
+
156
+ // デフォルトの空オブジェクトを返す(実際のテストでは適切なデータを設定)
157
+ const defaultValue = {};
158
+ return schema.parse(defaultValue);
159
+ }
160
+
161
+ async countTokens(text: string, _model?: string): Promise<number> {
162
+ return this.estimateTokens(text);
163
+ }
164
+
165
+ private estimateTokens(text: string): number {
166
+ // 簡易トークン推定: 4文字 ≈ 1トークン
167
+ return Math.ceil(text.length / 4);
168
+ }
169
+
170
+ private sleep(ms: number): Promise<void> {
171
+ return new Promise((resolve) => setTimeout(resolve, ms));
172
+ }
173
+ }