@oxog/npm-llms 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,452 @@
1
+ import { g as Plugin, s as ExtractorContext, u as AIProvider, t as CompletionOptions, r as ParsedChangelog, q as ChangelogEntry } from '../kernel-I4Zn2uXv.cjs';
2
+ export { j as EventHandler, K as Kernel, h as PluginCategory, i as PluginInfo, y as composePlugins, w as createKernel, x as definePlugin } from '../kernel-I4Zn2uXv.cjs';
3
+
4
+ /**
5
+ * Types Resolver Plugin
6
+ * Automatically fetches @types/* packages when main package lacks .d.ts files
7
+ * @module plugins/core/types-resolver
8
+ */
9
+
10
+ /**
11
+ * Types Resolver Plugin
12
+ * Fetches @types/* package when main package has no .d.ts files
13
+ */
14
+ declare const typesResolverPlugin: Plugin<ExtractorContext>;
15
+
16
+ /**
17
+ * DTS Parser Plugin
18
+ * Parses TypeScript declaration files to extract API information
19
+ * @module plugins/core/dts-parser
20
+ */
21
+
22
+ /**
23
+ * DTS Parser Plugin
24
+ * Parses .d.ts files to extract API entries
25
+ */
26
+ declare const dtsParserPlugin: Plugin<ExtractorContext>;
27
+
28
+ /**
29
+ * TypeScript Source Parser Plugin
30
+ * Fallback parser for packages without .d.ts files
31
+ * @module plugins/core/ts-source-parser
32
+ */
33
+
34
+ /**
35
+ * TypeScript Source Parser Plugin
36
+ * Parses .ts/.tsx files when .d.ts files are not available
37
+ */
38
+ declare const tsSourceParserPlugin: Plugin<ExtractorContext>;
39
+
40
+ /**
41
+ * README Parser Plugin
42
+ * Parses README.md to extract documentation content
43
+ * @module plugins/core/readme-parser
44
+ */
45
+
46
+ /**
47
+ * README Parser Plugin
48
+ * Extracts structured content from package README
49
+ */
50
+ declare const readmeParserPlugin: Plugin<ExtractorContext>;
51
+
52
+ /**
53
+ * llms.txt Output Plugin
54
+ * Generates concise LLM-optimized documentation
55
+ * @module plugins/core/llms-output
56
+ */
57
+
58
+ /**
59
+ * llms.txt Output Plugin
60
+ * Generates llms.txt within token limits
61
+ */
62
+ declare const llmsOutputPlugin: Plugin<ExtractorContext>;
63
+
64
+ /**
65
+ * llms-full.txt Output Plugin
66
+ * Generates complete LLM-optimized documentation
67
+ * @module plugins/core/llms-full-output
68
+ */
69
+
70
+ /**
71
+ * llms-full.txt Output Plugin
72
+ * Generates complete API documentation without token limits
73
+ */
74
+ declare const llmsFullOutputPlugin: Plugin<ExtractorContext>;
75
+
76
+ /**
77
+ * Markdown Output Plugin
78
+ * Generates API documentation in Markdown format
79
+ * @module plugins/core/markdown-output
80
+ */
81
+
82
+ /**
83
+ * Markdown Output Plugin
84
+ * Generates formatted API documentation
85
+ */
86
+ declare const markdownOutputPlugin: Plugin<ExtractorContext>;
87
+
88
+ /**
89
+ * JSON Output Plugin
90
+ * Generates structured JSON documentation
91
+ * @module plugins/core/json-output
92
+ */
93
+
94
+ /**
95
+ * JSON Output Plugin
96
+ * Generates structured JSON for programmatic use
97
+ */
98
+ declare const jsonOutputPlugin: Plugin<ExtractorContext>;
99
+
100
+ /**
101
+ * Core plugins for @oxog/npm-llms
102
+ * @module plugins/core
103
+ */
104
+
105
+ /**
106
+ * All core parser plugins
107
+ * Note: typesResolverPlugin must come first to fetch @types/* before parsing
108
+ */
109
+ declare const coreParserPlugins: Plugin<ExtractorContext>[];
110
+ /**
111
+ * All core output plugins
112
+ */
113
+ declare const coreOutputPlugins: Plugin<ExtractorContext>[];
114
+ /**
115
+ * All core plugins
116
+ */
117
+ declare const corePlugins: Plugin<ExtractorContext>[];
118
+
119
+ /**
120
+ * AI Base Plugin
121
+ * Provides common infrastructure for AI-powered documentation enrichment
122
+ * @module plugins/optional/ai-base
123
+ */
124
+
125
+ /**
126
+ * AI enrichment task types
127
+ */
128
+ type AIEnrichmentTask = 'descriptions' | 'examples' | 'summary' | 'params' | 'returns';
129
+ /**
130
+ * AI enrichment options
131
+ */
132
+ interface AIEnrichmentOptions {
133
+ /** Tasks to perform */
134
+ tasks: AIEnrichmentTask[];
135
+ /** Maximum tokens per request */
136
+ maxTokensPerRequest?: number;
137
+ /** Temperature for generation */
138
+ temperature?: number;
139
+ /** Batch size for processing entries */
140
+ batchSize?: number;
141
+ /** Whether to skip entries that already have content */
142
+ skipExisting?: boolean;
143
+ }
144
+ /**
145
+ * Create AI enrichment plugin
146
+ * @param provider - AI provider implementation
147
+ * @param options - Enrichment options
148
+ * @returns Plugin instance
149
+ */
150
+ declare function createAIEnrichmentPlugin(provider: AIProvider, options?: Partial<AIEnrichmentOptions>): Plugin;
151
+ /**
152
+ * Create a simple AI provider from a completion function
153
+ */
154
+ declare function createSimpleProvider(name: string, completeFn: (prompt: string, options?: CompletionOptions) => Promise<string>, isAvailableFn?: () => boolean): AIProvider;
155
+
156
+ /**
157
+ * Claude API configuration
158
+ */
159
+ interface ClaudeConfig {
160
+ /** API key (or set ANTHROPIC_API_KEY env var) */
161
+ apiKey?: string;
162
+ /** Model to use */
163
+ model?: 'claude-opus-4-5-20251101' | 'claude-sonnet-4-5-20250929' | 'claude-haiku-4-5-20251001' | 'claude-opus-4-5' | 'claude-sonnet-4-5' | 'claude-haiku-4-5' | string;
164
+ /** API base URL */
165
+ baseUrl?: string;
166
+ /** Request timeout in ms */
167
+ timeout?: number;
168
+ }
169
+ /**
170
+ * Create Claude AI provider
171
+ * @param config - Claude configuration
172
+ * @returns AI provider instance
173
+ */
174
+ declare function createClaudeProvider(config?: ClaudeConfig): AIProvider;
175
+ /**
176
+ * Create Claude AI enrichment plugin
177
+ * @param config - Claude configuration
178
+ * @param enrichmentOptions - Enrichment options
179
+ * @returns Plugin instance
180
+ */
181
+ declare function createClaudePlugin(config?: ClaudeConfig, enrichmentOptions?: Partial<AIEnrichmentOptions>): Plugin<ExtractorContext>;
182
+
183
+ /**
184
+ * OpenAI-compatible provider presets
185
+ */
186
+ declare const OPENAI_COMPATIBLE_PRESETS: {
187
+ readonly openai: {
188
+ readonly name: "openai";
189
+ readonly baseUrl: "https://api.openai.com/v1";
190
+ readonly envKey: "OPENAI_API_KEY";
191
+ readonly defaultModel: "gpt-4.1-nano";
192
+ };
193
+ readonly xai: {
194
+ readonly name: "xai";
195
+ readonly baseUrl: "https://api.x.ai/v1";
196
+ readonly envKey: "XAI_API_KEY";
197
+ readonly defaultModel: "grok-3-mini-fast";
198
+ };
199
+ readonly zai: {
200
+ readonly name: "zai";
201
+ readonly baseUrl: "https://api.z.ai/api/paas/v4";
202
+ readonly envKey: "ZAI_API_KEY";
203
+ readonly defaultModel: "glm-4.7";
204
+ };
205
+ readonly together: {
206
+ readonly name: "together";
207
+ readonly baseUrl: "https://api.together.xyz/v1";
208
+ readonly envKey: "TOGETHER_API_KEY";
209
+ readonly defaultModel: "meta-llama/Llama-3.3-70B-Instruct-Turbo";
210
+ };
211
+ readonly perplexity: {
212
+ readonly name: "perplexity";
213
+ readonly baseUrl: "https://api.perplexity.ai";
214
+ readonly envKey: "PERPLEXITY_API_KEY";
215
+ readonly defaultModel: "sonar-pro";
216
+ };
217
+ readonly openrouter: {
218
+ readonly name: "openrouter";
219
+ readonly baseUrl: "https://openrouter.ai/api/v1";
220
+ readonly envKey: "OPENROUTER_API_KEY";
221
+ readonly defaultModel: "anthropic/claude-3.5-sonnet";
222
+ };
223
+ readonly deepseek: {
224
+ readonly name: "deepseek";
225
+ readonly baseUrl: "https://api.deepseek.com/v1";
226
+ readonly envKey: "DEEPSEEK_API_KEY";
227
+ readonly defaultModel: "deepseek-chat";
228
+ };
229
+ readonly mistral: {
230
+ readonly name: "mistral";
231
+ readonly baseUrl: "https://api.mistral.ai/v1";
232
+ readonly envKey: "MISTRAL_API_KEY";
233
+ readonly defaultModel: "mistral-small-latest";
234
+ };
235
+ };
236
+ type OpenAICompatiblePreset = keyof typeof OPENAI_COMPATIBLE_PRESETS;
237
+ /**
238
+ * OpenAI API configuration
239
+ */
240
+ interface OpenAIConfig {
241
+ /** API key (or set OPENAI_API_KEY env var) */
242
+ apiKey?: string;
243
+ /** Model to use */
244
+ model?: 'gpt-4.1' | 'gpt-4.1-mini' | 'gpt-4.1-nano' | 'gpt-4o' | 'gpt-4o-mini' | 'o4-mini' | 'o3' | 'o3-mini' | 'o3-pro' | 'o1' | 'o1-pro' | string;
245
+ /** API base URL (with /v1 suffix for chat/completions endpoint) */
246
+ baseUrl?: string;
247
+ /** Request timeout in ms */
248
+ timeout?: number;
249
+ /** Organization ID (OpenAI only) */
250
+ organization?: string;
251
+ /** Provider preset for OpenAI-compatible APIs */
252
+ preset?: OpenAICompatiblePreset;
253
+ }
254
+ /**
255
+ * Create OpenAI provider
256
+ * @param config - OpenAI configuration
257
+ * @returns AI provider instance
258
+ */
259
+ declare function createOpenAIProvider(config?: OpenAIConfig): AIProvider;
260
+ /**
261
+ * Create OpenAI enrichment plugin
262
+ * @param config - OpenAI configuration
263
+ * @param enrichmentOptions - Enrichment options
264
+ * @returns Plugin instance
265
+ */
266
+ declare function createOpenAIPlugin(config?: OpenAIConfig, enrichmentOptions?: Partial<AIEnrichmentOptions>): Plugin<ExtractorContext>;
267
+
268
+ /**
269
+ * Gemini AI plugin for @oxog/npm-llms
270
+ * Uses Google's Gemini API for documentation enrichment
271
+ * @module plugins/optional/gemini-ai
272
+ */
273
+
274
+ /**
275
+ * Gemini configuration options
276
+ */
277
+ interface GeminiConfig {
278
+ /** Gemini API key (defaults to GOOGLE_API_KEY or GEMINI_API_KEY env var) */
279
+ apiKey?: string;
280
+ /** Model to use */
281
+ model?: 'gemini-3-pro-preview' | 'gemini-3-pro-image-preview' | 'gemini-3-flash-preview' | 'gemini-2.5-flash-image' | 'gemini-2.5-pro' | 'gemini-2.5-flash' | 'gemini-2.5-flash-lite' | 'gemini-2.0-flash' | 'gemini-2.0-flash-lite' | 'gemini-flash-latest' | 'gemini-flash-lite-latest' | string;
282
+ /** API base URL */
283
+ baseUrl?: string;
284
+ /** Request timeout in ms */
285
+ timeout?: number;
286
+ /** Maximum tokens to generate */
287
+ maxTokens?: number;
288
+ /** Temperature for generation */
289
+ temperature?: number;
290
+ }
291
+ /**
292
+ * Create a Gemini AI provider
293
+ * @param config - Gemini configuration
294
+ * @returns AI provider instance
295
+ */
296
+ declare function createGeminiProvider(config?: GeminiConfig): AIProvider;
297
+ /**
298
+ * Create a Gemini enrichment plugin
299
+ * @param config - Gemini configuration
300
+ * @param enrichmentOptions - Enrichment options
301
+ * @returns Gemini plugin
302
+ */
303
+ declare function createGeminiPlugin(config?: GeminiConfig, enrichmentOptions?: Partial<AIEnrichmentOptions>): Plugin;
304
+ /**
305
+ * Check if Gemini is available (API key is set)
306
+ * @returns True if Gemini is available
307
+ */
308
+ declare function checkGeminiAvailable(): boolean;
309
+
310
+ /**
311
+ * Ollama configuration
312
+ */
313
+ interface OllamaConfig {
314
+ /** Model to use */
315
+ model?: string;
316
+ /** Ollama API base URL */
317
+ baseUrl?: string;
318
+ /** Request timeout in ms */
319
+ timeout?: number;
320
+ }
321
+ /**
322
+ * Create Ollama provider
323
+ * @param config - Ollama configuration
324
+ * @returns AI provider instance
325
+ */
326
+ declare function createOllamaProvider(config?: OllamaConfig): AIProvider;
327
+ /**
328
+ * Create Ollama enrichment plugin
329
+ * @param config - Ollama configuration
330
+ * @param enrichmentOptions - Enrichment options
331
+ * @returns Plugin instance
332
+ */
333
+ declare function createOllamaPlugin(config?: OllamaConfig, enrichmentOptions?: Partial<AIEnrichmentOptions>): Plugin<ExtractorContext>;
334
+ /**
335
+ * Check if Ollama is available
336
+ * @param baseUrl - Ollama base URL
337
+ * @returns True if Ollama is running
338
+ */
339
+ declare function checkOllamaAvailable(baseUrl?: string): Promise<boolean>;
340
+ /**
341
+ * List available Ollama models
342
+ * @param baseUrl - Ollama base URL
343
+ * @returns List of model names
344
+ */
345
+ declare function listOllamaModels(baseUrl?: string): Promise<string[]>;
346
+
347
+ /**
348
+ * Groq API configuration
349
+ */
350
+ interface GroqConfig {
351
+ /** API key (or set GROQ_API_KEY env var) */
352
+ apiKey?: string;
353
+ /** Model to use */
354
+ model?: 'llama-3.3-70b-versatile' | 'llama-3.1-8b-instant' | 'meta-llama/llama-4-maverick-17b-128e-instruct' | 'meta-llama/llama-4-scout-17b-16e-instruct' | 'qwen/qwen3-32b' | 'moonshotai/kimi-k2-instruct-0905' | 'groq/compound' | 'groq/compound-mini' | 'openai/gpt-oss-120b' | 'openai/gpt-oss-20b' | string;
355
+ /** API base URL */
356
+ baseUrl?: string;
357
+ /** Request timeout in ms */
358
+ timeout?: number;
359
+ }
360
+ /**
361
+ * Create Groq provider
362
+ * @param config - Groq configuration
363
+ * @returns AI provider instance
364
+ */
365
+ declare function createGroqProvider(config?: GroqConfig): AIProvider;
366
+ /**
367
+ * Create Groq enrichment plugin
368
+ * @param config - Groq configuration
369
+ * @param enrichmentOptions - Enrichment options
370
+ * @returns Plugin instance
371
+ */
372
+ declare function createGroqPlugin(config?: GroqConfig, enrichmentOptions?: Partial<AIEnrichmentOptions>): Plugin<ExtractorContext>;
373
+
374
+ /**
375
+ * Changelog parser plugin for @oxog/npm-llms
376
+ * Parses CHANGELOG.md files in Keep a Changelog format
377
+ * @module plugins/optional/changelog-parser
378
+ */
379
+
380
+ /**
381
+ * Parse a changelog file into structured data
382
+ * @param content - Raw changelog content
383
+ * @returns Parsed changelog structure
384
+ */
385
+ declare function parseChangelog(content: string): ParsedChangelog;
386
+ /**
387
+ * Find changelog file in package files
388
+ * @param files - Map of file paths to content
389
+ * @returns Changelog content or undefined
390
+ */
391
+ declare function findChangelog(files: Map<string, string>): string | undefined;
392
+ /**
393
+ * Get latest version from changelog
394
+ * @param changelog - Parsed changelog
395
+ * @returns Latest version entry or undefined
396
+ */
397
+ declare function getLatestVersion(changelog: ParsedChangelog): ChangelogEntry | undefined;
398
+ /**
399
+ * Get version by number
400
+ * @param changelog - Parsed changelog
401
+ * @param version - Version number to find
402
+ * @returns Version entry or undefined
403
+ */
404
+ declare function getVersion(changelog: ParsedChangelog, version: string): ChangelogEntry | undefined;
405
+ /**
406
+ * Format changelog entry as text
407
+ * @param entry - Changelog entry
408
+ * @returns Formatted text
409
+ */
410
+ declare function formatChangelogEntry(entry: ChangelogEntry): string;
411
+ /**
412
+ * Create changelog parser plugin
413
+ * @returns Changelog parser plugin
414
+ */
415
+ declare function createChangelogParserPlugin(): Plugin;
416
+
417
+ /**
418
+ * HTML output plugin for @oxog/npm-llms
419
+ * Generates HTML documentation from API entries
420
+ * @module plugins/optional/html-output
421
+ */
422
+
423
+ /**
424
+ * HTML output options
425
+ */
426
+ interface HTMLOutputOptions {
427
+ /** Page title (uses package name if not provided) */
428
+ title?: string;
429
+ /** Include inline styles */
430
+ includeStyles?: boolean;
431
+ /** Include syntax highlighting CSS classes */
432
+ syntaxHighlighting?: boolean;
433
+ /** Dark mode support */
434
+ darkMode?: boolean;
435
+ /** Custom CSS to inject */
436
+ customCSS?: string;
437
+ }
438
+ /**
439
+ * Generate full HTML document
440
+ * @param ctx - Extractor context
441
+ * @param options - HTML options
442
+ * @returns HTML string
443
+ */
444
+ declare function generateHTML(ctx: ExtractorContext, options?: HTMLOutputOptions): string;
445
+ /**
446
+ * Create HTML output plugin
447
+ * @param options - HTML options
448
+ * @returns HTML output plugin
449
+ */
450
+ declare function createHTMLOutputPlugin(options?: HTMLOutputOptions): Plugin;
451
+
452
+ export { type AIEnrichmentOptions, type AIEnrichmentTask, AIProvider, type ClaudeConfig, CompletionOptions, ExtractorContext, type GeminiConfig, type GroqConfig, type HTMLOutputOptions, type OllamaConfig, type OpenAIConfig, Plugin, checkGeminiAvailable, checkOllamaAvailable, coreOutputPlugins, coreParserPlugins, corePlugins, createAIEnrichmentPlugin, createChangelogParserPlugin, createClaudePlugin, createClaudeProvider, createGeminiPlugin, createGeminiProvider, createGroqPlugin, createGroqProvider, createHTMLOutputPlugin, createOllamaPlugin, createOllamaProvider, createOpenAIPlugin, createOpenAIProvider, createSimpleProvider, dtsParserPlugin, findChangelog, formatChangelogEntry, generateHTML, getLatestVersion, getVersion, jsonOutputPlugin, listOllamaModels, llmsFullOutputPlugin, llmsOutputPlugin, markdownOutputPlugin, parseChangelog, readmeParserPlugin, tsSourceParserPlugin, typesResolverPlugin };