genai-lite 0.2.0 → 0.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (59) hide show
  1. package/README.md +508 -30
  2. package/dist/config/presets.json +121 -17
  3. package/dist/index.d.ts +3 -3
  4. package/dist/index.js +4 -3
  5. package/dist/llm/LLMService.createMessages.test.d.ts +4 -0
  6. package/dist/llm/LLMService.createMessages.test.js +364 -0
  7. package/dist/llm/LLMService.d.ts +49 -47
  8. package/dist/llm/LLMService.js +208 -303
  9. package/dist/llm/LLMService.original.d.ts +147 -0
  10. package/dist/llm/LLMService.original.js +656 -0
  11. package/dist/llm/LLMService.prepareMessage.test.d.ts +1 -0
  12. package/dist/llm/LLMService.prepareMessage.test.js +303 -0
  13. package/dist/llm/LLMService.sendMessage.preset.test.d.ts +1 -0
  14. package/dist/llm/LLMService.sendMessage.preset.test.js +153 -0
  15. package/dist/llm/LLMService.test.js +275 -0
  16. package/dist/llm/clients/AnthropicClientAdapter.js +64 -10
  17. package/dist/llm/clients/AnthropicClientAdapter.test.js +11 -1
  18. package/dist/llm/clients/GeminiClientAdapter.js +70 -11
  19. package/dist/llm/clients/GeminiClientAdapter.test.js +125 -1
  20. package/dist/llm/clients/MockClientAdapter.js +9 -3
  21. package/dist/llm/clients/MockClientAdapter.test.js +11 -1
  22. package/dist/llm/clients/OpenAIClientAdapter.js +26 -10
  23. package/dist/llm/clients/OpenAIClientAdapter.test.js +11 -1
  24. package/dist/llm/config.js +117 -2
  25. package/dist/llm/config.test.js +17 -0
  26. package/dist/llm/services/AdapterRegistry.d.ts +59 -0
  27. package/dist/llm/services/AdapterRegistry.js +113 -0
  28. package/dist/llm/services/AdapterRegistry.test.d.ts +1 -0
  29. package/dist/llm/services/AdapterRegistry.test.js +239 -0
  30. package/dist/llm/services/ModelResolver.d.ts +35 -0
  31. package/dist/llm/services/ModelResolver.js +116 -0
  32. package/dist/llm/services/ModelResolver.test.d.ts +1 -0
  33. package/dist/llm/services/ModelResolver.test.js +158 -0
  34. package/dist/llm/services/PresetManager.d.ts +27 -0
  35. package/dist/llm/services/PresetManager.js +50 -0
  36. package/dist/llm/services/PresetManager.test.d.ts +1 -0
  37. package/dist/llm/services/PresetManager.test.js +210 -0
  38. package/dist/llm/services/RequestValidator.d.ts +31 -0
  39. package/dist/llm/services/RequestValidator.js +122 -0
  40. package/dist/llm/services/RequestValidator.test.d.ts +1 -0
  41. package/dist/llm/services/RequestValidator.test.js +159 -0
  42. package/dist/llm/services/SettingsManager.d.ts +32 -0
  43. package/dist/llm/services/SettingsManager.js +223 -0
  44. package/dist/llm/services/SettingsManager.test.d.ts +1 -0
  45. package/dist/llm/services/SettingsManager.test.js +266 -0
  46. package/dist/llm/types.d.ts +107 -0
  47. package/dist/prompting/builder.d.ts +4 -0
  48. package/dist/prompting/builder.js +12 -61
  49. package/dist/prompting/content.js +3 -9
  50. package/dist/prompting/index.d.ts +2 -3
  51. package/dist/prompting/index.js +4 -5
  52. package/dist/prompting/parser.d.ts +80 -0
  53. package/dist/prompting/parser.js +133 -0
  54. package/dist/prompting/parser.test.js +348 -0
  55. package/dist/prompting/template.d.ts +8 -0
  56. package/dist/prompting/template.js +89 -6
  57. package/dist/prompting/template.test.js +116 -0
  58. package/package.json +3 -2
  59. package/src/config/presets.json +122 -17
@@ -1,13 +1,8 @@
1
1
  import type { ApiKeyProvider } from '../types';
2
- import type { LLMChatRequest, LLMResponse, LLMFailureResponse, ProviderInfo, ModelInfo, ApiProviderId } from "./types";
3
- import type { ILLMClientAdapter } from "./clients/types";
2
+ import type { LLMChatRequest, LLMChatRequestWithPreset, LLMResponse, LLMFailureResponse, ProviderInfo, ModelInfo, ApiProviderId, LLMSettings, ModelContext, LLMMessage } from "./types";
4
3
  import type { ModelPreset } from "../types/presets";
5
- /**
6
- * Defines how custom presets interact with the default presets.
7
- * 'replace': Use only the custom presets provided. The default set is ignored.
8
- * 'extend': Use the default presets, and add/override them with the custom presets. This is the default behavior.
9
- */
10
- export type PresetMode = 'replace' | 'extend';
4
+ import { type PresetMode } from "./services/PresetManager";
5
+ export type { PresetMode };
11
6
  /**
12
7
  * Options for configuring the LLMService
13
8
  */
@@ -17,6 +12,17 @@ export interface LLMServiceOptions {
17
12
  /** The strategy for integrating custom presets. Defaults to 'extend'. */
18
13
  presetMode?: PresetMode;
19
14
  }
15
+ /**
16
+ * Result from createMessages method
17
+ */
18
+ export interface CreateMessagesResult {
19
+ /** The parsed messages with role assignments */
20
+ messages: LLMMessage[];
21
+ /** Model context variables that were injected during template rendering */
22
+ modelContext: ModelContext | null;
23
+ /** Settings extracted from the template's <META> block */
24
+ settings: Partial<LLMSettings>;
25
+ }
20
26
  /**
21
27
  * Main process service for LLM operations
22
28
  *
@@ -30,9 +36,11 @@ export interface LLMServiceOptions {
30
36
  */
31
37
  export declare class LLMService {
32
38
  private getApiKey;
33
- private clientAdapters;
34
- private mockClientAdapter;
35
- private presets;
39
+ private presetManager;
40
+ private adapterRegistry;
41
+ private requestValidator;
42
+ private settingsManager;
43
+ private modelResolver;
36
44
  constructor(getApiKey: ApiKeyProvider, options?: LLMServiceOptions);
37
45
  /**
38
46
  * Gets list of supported LLM providers
@@ -53,58 +61,52 @@ export declare class LLMService {
53
61
  * @param request - The LLM chat request
54
62
  * @returns Promise resolving to either success or failure response
55
63
  */
56
- sendMessage(request: LLMChatRequest): Promise<LLMResponse | LLMFailureResponse>;
64
+ sendMessage(request: LLMChatRequest | LLMChatRequestWithPreset): Promise<LLMResponse | LLMFailureResponse>;
57
65
  /**
58
- * Validates basic LLM request structure
66
+ * Gets all configured model presets
59
67
  *
60
- * @param request - The request to validate
61
- * @returns LLMFailureResponse if validation fails, null if valid
68
+ * @returns Array of model presets
62
69
  */
63
- private validateRequestStructure;
70
+ getPresets(): ModelPreset[];
64
71
  /**
65
- * Merges request settings with model-specific and global defaults
72
+ * Creates messages from a template with role tags and model-aware variable substitution
66
73
  *
67
- * @param modelId - The model ID to get defaults for
68
- * @param providerId - The provider ID to get defaults for
69
- * @param requestSettings - Settings from the request
70
- * @returns Complete settings object with all required fields
71
- */
72
- private mergeSettingsForModel;
73
- /**
74
- * Gets the appropriate client adapter for a provider
74
+ * This unified method combines the functionality of template rendering, model context
75
+ * injection, and role tag parsing into a single, intuitive API. It replaces the need
76
+ * to chain prepareMessage and buildMessagesFromTemplate for model-aware multi-turn prompts.
75
77
  *
76
- * @param providerId - The provider ID
77
- * @returns The client adapter to use
78
- */
79
- private getClientAdapter;
80
- /**
81
- * Registers a client adapter for a specific provider
78
+ * @param options Options for creating messages
79
+ * @returns Promise resolving to parsed messages and model context
82
80
  *
83
- * @param providerId - The provider ID
84
- * @param adapter - The client adapter implementation
81
+ * @example
82
+ * ```typescript
83
+ * const { messages } = await llm.createMessages({
84
+ * template: `
85
+ * <SYSTEM>You are a {{ thinking_enabled ? "thoughtful" : "helpful" }} assistant.</SYSTEM>
86
+ * <USER>Help me with {{ task }}</USER>
87
+ * `,
88
+ * variables: { task: 'understanding async/await' },
89
+ * presetId: 'openai-gpt-4.1-default'
90
+ * });
91
+ * ```
85
92
  */
86
- registerClientAdapter(providerId: ApiProviderId, adapter: ILLMClientAdapter): void;
93
+ createMessages(options: {
94
+ template: string;
95
+ variables?: Record<string, any>;
96
+ presetId?: string;
97
+ providerId?: string;
98
+ modelId?: string;
99
+ }): Promise<CreateMessagesResult>;
87
100
  /**
88
101
  * Gets information about registered adapters
89
102
  *
90
103
  * @returns Map of provider IDs to adapter info
91
104
  */
92
- getRegisteredAdapters(): Map<ApiProviderId, any>;
105
+ getRegisteredAdapters(): Map<string, import("./services/AdapterRegistry").AdapterInfo>;
93
106
  /**
94
107
  * Gets a summary of available providers and their adapter status
95
108
  *
96
109
  * @returns Summary of provider availability
97
110
  */
98
- getProviderSummary(): {
99
- totalProviders: number;
100
- providersWithAdapters: number;
101
- availableProviders: string[];
102
- unavailableProviders: string[];
103
- };
104
- /**
105
- * Gets all configured model presets
106
- *
107
- * @returns Array of model presets
108
- */
109
- getPresets(): ModelPreset[];
111
+ getProviderSummary(): import("./services/AdapterRegistry").ProviderSummary;
110
112
  }