skilld 0.15.2 → 0.15.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1 -0
- package/dist/_chunks/{detect-imports.mjs → agent.mjs} +42 -6
- package/dist/_chunks/agent.mjs.map +1 -0
- package/dist/_chunks/{storage.mjs → cache.mjs} +81 -1
- package/dist/_chunks/cache.mjs.map +1 -0
- package/dist/_chunks/cache2.mjs +6 -0
- package/dist/_chunks/config.mjs +23 -0
- package/dist/_chunks/config.mjs.map +1 -1
- package/dist/_chunks/config2.mjs +12 -0
- package/dist/_chunks/{embedding-cache2.mjs → embedding-cache.mjs} +1 -1
- package/dist/_chunks/embedding-cache.mjs.map +1 -0
- package/dist/_chunks/formatting.mjs +86 -0
- package/dist/_chunks/formatting.mjs.map +1 -0
- package/dist/_chunks/{version.d.mts → index.d.mts} +1 -1
- package/dist/_chunks/index.d.mts.map +1 -0
- package/dist/_chunks/{utils.d.mts → index2.d.mts} +1 -1
- package/dist/_chunks/index2.d.mts.map +1 -0
- package/dist/_chunks/install.mjs +15 -0
- package/dist/_chunks/list.mjs +13 -0
- package/dist/_chunks/markdown.mjs +7 -0
- package/dist/_chunks/markdown.mjs.map +1 -1
- package/dist/_chunks/{pool2.mjs → pool.mjs} +1 -1
- package/dist/_chunks/pool.mjs.map +1 -0
- package/dist/_chunks/prompts.mjs +232 -0
- package/dist/_chunks/prompts.mjs.map +1 -1
- package/dist/_chunks/remove.mjs +12 -0
- package/dist/_chunks/sanitize.mjs +71 -0
- package/dist/_chunks/sanitize.mjs.map +1 -1
- package/dist/_chunks/search-interactive.mjs +14 -0
- package/dist/_chunks/search-interactive2.mjs +236 -0
- package/dist/_chunks/search-interactive2.mjs.map +1 -0
- package/dist/_chunks/search.mjs +171 -0
- package/dist/_chunks/search.mjs.map +1 -0
- package/dist/_chunks/search2.mjs +13 -0
- package/dist/_chunks/shared.mjs +4 -0
- package/dist/_chunks/shared.mjs.map +1 -1
- package/dist/_chunks/skills.mjs +552 -0
- package/dist/_chunks/skills.mjs.map +1 -0
- package/dist/_chunks/{npm.mjs → sources.mjs} +401 -4
- package/dist/_chunks/sources.mjs.map +1 -0
- package/dist/_chunks/status.mjs +13 -0
- package/dist/_chunks/sync.mjs +2026 -0
- package/dist/_chunks/sync.mjs.map +1 -0
- package/dist/_chunks/sync2.mjs +14 -0
- package/dist/_chunks/uninstall.mjs +15 -0
- package/dist/_chunks/validate.mjs +3 -0
- package/dist/_chunks/validate.mjs.map +1 -1
- package/dist/_chunks/yaml.mjs +19 -0
- package/dist/_chunks/yaml.mjs.map +1 -1
- package/dist/agent/index.d.mts +1 -1
- package/dist/agent/index.mjs +4 -3
- package/dist/cache/index.d.mts +2 -2
- package/dist/cache/index.mjs +2 -1
- package/dist/cli.mjs +173 -3082
- package/dist/cli.mjs.map +1 -1
- package/dist/index.d.mts +2 -3
- package/dist/index.mjs +4 -4
- package/dist/retriv/index.d.mts.map +1 -1
- package/dist/retriv/index.mjs +26 -5
- package/dist/retriv/index.mjs.map +1 -1
- package/dist/retriv/worker.mjs +3 -3
- package/dist/sources/index.d.mts +2 -2
- package/dist/sources/index.mjs +2 -1
- package/dist/types.d.mts +2 -3
- package/package.json +10 -10
- package/dist/_chunks/detect-imports.mjs.map +0 -1
- package/dist/_chunks/embedding-cache2.mjs.map +0 -1
- package/dist/_chunks/npm.mjs.map +0 -1
- package/dist/_chunks/pool2.mjs.map +0 -1
- package/dist/_chunks/storage.mjs.map +0 -1
- package/dist/_chunks/utils.d.mts.map +0 -1
- package/dist/_chunks/version.d.mts.map +0 -1
package/dist/index.d.mts
CHANGED
|
@@ -1,6 +1,5 @@
|
|
|
1
|
-
import { C as writeToCache, D as CachedPackage, E as CachedDoc, O as CACHE_DIR, T as CacheConfig, _ as listCached, a as clearAllCache, d as isCached, k as REFERENCES_DIR, n as getCacheKey, o as clearCache, r as getVersionKey, s as ensureCacheDir, t as getCacheDir, y as readCachedDocs } from "./_chunks/
|
|
1
|
+
import { C as writeToCache, D as CachedPackage, E as CachedDoc, O as CACHE_DIR, T as CacheConfig, _ as listCached, a as clearAllCache, d as isCached, k as REFERENCES_DIR, n as getCacheKey, o as clearCache, r as getVersionKey, s as ensureCacheDir, t as getCacheDir, y as readCachedDocs } from "./_chunks/index.mjs";
|
|
2
2
|
import { c as SearchResult, l as SearchSnippet, n as Document, o as SearchFilter, r as IndexConfig, s as SearchOptions } from "./_chunks/types.mjs";
|
|
3
3
|
import { createIndex, search, searchSnippets } from "./retriv/index.mjs";
|
|
4
|
-
import { J as normalizeLlmsLinks, K as fetchLlmsTxt, L as readLocalDependencies, M as fetchNpmPackage, V as resolvePackageDocs, W as downloadLlmsDocs, Y as parseMarkdownLinks, dt as FetchedDoc, ft as LlmsContent, ht as NpmPackageInfo, mt as LocalDependency, ot as fetchReadmeContent, pt as LlmsLink, vt as ResolvedPackage } from "./_chunks/
|
|
5
|
-
import "./sources/index.mjs";
|
|
4
|
+
import { J as normalizeLlmsLinks, K as fetchLlmsTxt, L as readLocalDependencies, M as fetchNpmPackage, V as resolvePackageDocs, W as downloadLlmsDocs, Y as parseMarkdownLinks, dt as FetchedDoc, ft as LlmsContent, ht as NpmPackageInfo, mt as LocalDependency, ot as fetchReadmeContent, pt as LlmsLink, vt as ResolvedPackage } from "./_chunks/index2.mjs";
|
|
6
5
|
export { CACHE_DIR, type CacheConfig, type CachedDoc, type CachedPackage, type Document, type FetchedDoc, type IndexConfig, type LlmsContent, type LlmsLink, type LocalDependency, type NpmPackageInfo, REFERENCES_DIR, type ResolvedPackage, type SearchFilter, type SearchOptions, type SearchResult, type SearchSnippet, clearAllCache, clearCache, createIndex, downloadLlmsDocs, ensureCacheDir, fetchLlmsTxt, fetchNpmPackage, fetchReadmeContent, getCacheDir, getCacheKey, getVersionKey, isCached, listCached, normalizeLlmsLinks, parseMarkdownLinks, readCachedDocs, readLocalDependencies, resolvePackageDocs, search, searchSnippets, writeToCache };
|
package/dist/index.mjs
CHANGED
|
@@ -1,9 +1,9 @@
|
|
|
1
1
|
import { c as getVersionKey, n as REFERENCES_DIR, o as getCacheDir, s as getCacheKey, t as CACHE_DIR } from "./_chunks/config.mjs";
|
|
2
|
-
import
|
|
3
|
-
import "./cache
|
|
2
|
+
import "./_chunks/sanitize.mjs";
|
|
3
|
+
import { h as readCachedDocs, n as clearCache, p as listCached, r as ensureCacheDir, s as isCached, t as clearAllCache, y as writeToCache } from "./_chunks/cache.mjs";
|
|
4
|
+
import "./_chunks/yaml.mjs";
|
|
4
5
|
import "./_chunks/markdown.mjs";
|
|
5
6
|
import { createIndex, search, searchSnippets } from "./retriv/index.mjs";
|
|
6
7
|
import "./_chunks/shared.mjs";
|
|
7
|
-
import { C as downloadLlmsDocs, D as normalizeLlmsLinks, O as parseMarkdownLinks, T as fetchLlmsTxt, d as resolvePackageDocs, n as fetchNpmPackage, s as readLocalDependencies, v as fetchReadmeContent } from "./_chunks/
|
|
8
|
-
import "./sources/index.mjs";
|
|
8
|
+
import { C as downloadLlmsDocs, D as normalizeLlmsLinks, O as parseMarkdownLinks, T as fetchLlmsTxt, d as resolvePackageDocs, n as fetchNpmPackage, s as readLocalDependencies, v as fetchReadmeContent } from "./_chunks/sources.mjs";
|
|
9
9
|
export { CACHE_DIR, REFERENCES_DIR, clearAllCache, clearCache, createIndex, downloadLlmsDocs, ensureCacheDir, fetchLlmsTxt, fetchNpmPackage, fetchReadmeContent, getCacheDir, getCacheKey, getVersionKey, isCached, listCached, normalizeLlmsLinks, parseMarkdownLinks, readCachedDocs, readLocalDependencies, resolvePackageDocs, search, searchSnippets, writeToCache };
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.mts","names":[],"sources":["../../src/retriv/index.ts"],"mappings":";;;;KAKK,cAAA,GAAiB,OAAA,CAAQ,UAAA,QAAkB,KAAA;AAAA,iBAGjC,KAAA,CAAM,MAAA,EAAQ,IAAA,CAAK,WAAA,cAAsB,OAAA,CAAvB,MAAA,CAAuB,cAAA;;;;;;;iBA4BlC,iBAAA,CACpB,SAAA,EAAW,QAAA,IACX,MAAA,EAAQ,WAAA,GACP,OAAA;;;;;iBAUmB,WAAA,CACpB,SAAA,EAAW,QAAA,IACX,MAAA,EAAQ,WAAA,GACP,OAAA;AAAA,iBAMmB,MAAA,CACpB,KAAA,UACA,MAAA,EAAQ,WAAA,EACR,OAAA,GAAS,aAAA,GACR,OAAA,CAAQ,YAAA;AAzD0C;;;AAAA,iBA8E/B,cAAA,CACpB,KAAA,UACA,MAAA,EAAQ,WAAA,EACR,OAAA,GAAS,aAAA,GACR,OAAA,CAAQ,aAAA;AAAA,iBA2BW,QAAA,CAAS,OAAA,aAAoB,OAAA,CAAQ,GAAA,SAAY,cAAA;AAAA,iBASjD,YAAA,CACpB,KAAA,UACA,IAAA,EAAM,GAAA,SAAY,cAAA,GAClB,OAAA,GAAS,aAAA,GACR,OAAA,CAAQ,aAAA;AAAA,
|
|
1
|
+
{"version":3,"file":"index.d.mts","names":[],"sources":["../../src/retriv/index.ts"],"mappings":";;;;KAKK,cAAA,GAAiB,OAAA,CAAQ,UAAA,QAAkB,KAAA;AAAA,iBAGjC,KAAA,CAAM,MAAA,EAAQ,IAAA,CAAK,WAAA,cAAsB,OAAA,CAAvB,MAAA,CAAuB,cAAA;;;;;;;iBA4BlC,iBAAA,CACpB,SAAA,EAAW,QAAA,IACX,MAAA,EAAQ,WAAA,GACP,OAAA;;;;;iBAUmB,WAAA,CACpB,SAAA,EAAW,QAAA,IACX,MAAA,EAAQ,WAAA,GACP,OAAA;AAAA,iBAMmB,MAAA,CACpB,KAAA,UACA,MAAA,EAAQ,WAAA,EACR,OAAA,GAAS,aAAA,GACR,OAAA,CAAQ,YAAA;AAzD0C;;;AAAA,iBA8E/B,cAAA,CACpB,KAAA,UACA,MAAA,EAAQ,WAAA,EACR,OAAA,GAAS,aAAA,GACR,OAAA,CAAQ,aAAA;AAAA,iBA2BW,QAAA,CAAS,OAAA,aAAoB,OAAA,CAAQ,GAAA,SAAY,cAAA;AAAA,iBASjD,YAAA,CACpB,KAAA,UACA,IAAA,EAAM,GAAA,SAAY,cAAA,GAClB,OAAA,GAAS,aAAA,GACR,OAAA,CAAQ,aAAA;AAAA,iBAkCW,SAAA,CAAU,IAAA,EAAM,GAAA,SAAY,cAAA,IAAkB,OAAA"}
|
package/dist/retriv/index.mjs
CHANGED
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import "../_chunks/yaml.mjs";
|
|
1
2
|
import { a as stripFrontmatter } from "../_chunks/markdown.mjs";
|
|
2
3
|
async function getDb(config) {
|
|
3
4
|
const [{ createRetriv }, { autoChunker }, sqliteMod, { transformersJs }, { cachedEmbeddings }] = await Promise.all([
|
|
@@ -5,7 +6,7 @@ async function getDb(config) {
|
|
|
5
6
|
import("retriv/chunkers/auto"),
|
|
6
7
|
import("retriv/db/sqlite"),
|
|
7
8
|
import("retriv/embeddings/transformers-js"),
|
|
8
|
-
import("../_chunks/embedding-
|
|
9
|
+
import("../_chunks/embedding-cache.mjs").then((n) => n.r)
|
|
9
10
|
]);
|
|
10
11
|
const embeddings = await cachedEmbeddings(transformersJs());
|
|
11
12
|
return createRetriv({
|
|
@@ -16,13 +17,21 @@ async function getDb(config) {
|
|
|
16
17
|
chunking: autoChunker()
|
|
17
18
|
});
|
|
18
19
|
}
|
|
20
|
+
/**
|
|
21
|
+
* Index documents in-process (no worker thread).
|
|
22
|
+
* Preferred for tests and environments where worker_threads is unreliable.
|
|
23
|
+
*/
|
|
19
24
|
async function createIndexDirect(documents, config) {
|
|
20
25
|
const db = await getDb(config);
|
|
21
26
|
await db.index(documents, { onProgress: config.onProgress });
|
|
22
27
|
await db.close?.();
|
|
23
28
|
}
|
|
29
|
+
/**
|
|
30
|
+
* Index documents in a background worker thread.
|
|
31
|
+
* Falls back to direct indexing if worker fails to spawn.
|
|
32
|
+
*/
|
|
24
33
|
async function createIndex(documents, config) {
|
|
25
|
-
const { createIndexInWorker } = await import("../_chunks/
|
|
34
|
+
const { createIndexInWorker } = await import("../_chunks/pool.mjs").then((n) => n.t);
|
|
26
35
|
return createIndexInWorker(documents, config);
|
|
27
36
|
}
|
|
28
37
|
async function search(query, config, options = {}) {
|
|
@@ -47,6 +56,9 @@ async function search(query, config, options = {}) {
|
|
|
47
56
|
scope: r._chunk?.scope
|
|
48
57
|
}));
|
|
49
58
|
}
|
|
59
|
+
/**
|
|
60
|
+
* Search and return formatted snippets
|
|
61
|
+
*/
|
|
50
62
|
async function searchSnippets(query, config, options = {}) {
|
|
51
63
|
return toSnippets(await search(query, config, options));
|
|
52
64
|
}
|
|
@@ -78,9 +90,10 @@ async function openPool(dbPaths) {
|
|
|
78
90
|
}
|
|
79
91
|
async function searchPooled(query, pool, options = {}) {
|
|
80
92
|
const { limit = 10, filter } = options;
|
|
81
|
-
|
|
93
|
+
const fetchLimit = limit * 2;
|
|
94
|
+
const allResults = await Promise.all([...pool.values()].map(async (db) => {
|
|
82
95
|
return (await db.search(query, {
|
|
83
|
-
limit,
|
|
96
|
+
limit: fetchLimit,
|
|
84
97
|
filter,
|
|
85
98
|
returnContent: true,
|
|
86
99
|
returnMetadata: true,
|
|
@@ -95,7 +108,15 @@ async function searchPooled(query, pool, options = {}) {
|
|
|
95
108
|
entities: r._chunk?.entities,
|
|
96
109
|
scope: r._chunk?.scope
|
|
97
110
|
}));
|
|
98
|
-
}))
|
|
111
|
+
}));
|
|
112
|
+
const seen = /* @__PURE__ */ new Set();
|
|
113
|
+
return toSnippets(allResults.flat().sort((a, b) => b.score - a.score).filter((r) => {
|
|
114
|
+
const lr = r.lineRange;
|
|
115
|
+
const key = `${r.metadata.source || r.id}:${lr?.[0]}-${lr?.[1]}`;
|
|
116
|
+
if (seen.has(key)) return false;
|
|
117
|
+
seen.add(key);
|
|
118
|
+
return true;
|
|
119
|
+
}).slice(0, limit));
|
|
99
120
|
}
|
|
100
121
|
async function closePool(pool) {
|
|
101
122
|
await Promise.all([...pool.values()].map((db) => db.close?.()));
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.mjs","names":[],"sources":["../../src/retriv/index.ts"],"sourcesContent":["import type { ChunkEntity, Document, IndexConfig, IndexPhase, IndexProgress, SearchFilter, SearchOptions, SearchResult, SearchSnippet } from './types.ts'\nimport { stripFrontmatter } from '../core/markdown.ts'\n\nexport type { ChunkEntity, Document, IndexConfig, IndexPhase, IndexProgress, SearchFilter, SearchOptions, SearchResult, SearchSnippet }\n\ntype RetrivInstance = Awaited<ReturnType<typeof getDb>>\n\n// Dynamic imports: retriv/chunkers/auto eagerly loads typescript which may not be installed (e.g. npx)\nasync function getDb(config: Pick<IndexConfig, 'dbPath'>) {\n const [\n { createRetriv },\n { autoChunker },\n sqliteMod,\n { transformersJs },\n { cachedEmbeddings },\n ] = await Promise.all([\n import('retriv'),\n import('retriv/chunkers/auto'),\n import('retriv/db/sqlite'),\n import('retriv/embeddings/transformers-js'),\n import('./embedding-cache.ts'),\n ])\n const embeddings = await cachedEmbeddings(transformersJs())\n return createRetriv({\n driver: sqliteMod.default({\n path: config.dbPath,\n embeddings,\n }),\n chunking: autoChunker(),\n })\n}\n\n/**\n * Index documents in-process (no worker thread).\n * Preferred for tests and environments where worker_threads is unreliable.\n */\nexport async function createIndexDirect(\n documents: Document[],\n config: IndexConfig,\n): Promise<void> {\n const db = await getDb(config)\n await db.index(documents, { onProgress: config.onProgress })\n await db.close?.()\n}\n\n/**\n * Index documents in a background worker thread.\n * Falls back to direct indexing if worker fails to spawn.\n */\nexport async function createIndex(\n documents: Document[],\n config: IndexConfig,\n): Promise<void> {\n // Dynamic import justified: search/searchSnippets shouldn't pull in worker_threads\n const { createIndexInWorker } = await import('./pool.ts')\n return createIndexInWorker(documents, config)\n}\n\nexport async function search(\n query: string,\n config: IndexConfig,\n options: SearchOptions = {},\n): Promise<SearchResult[]> {\n const { limit = 10, filter } = options\n const db = await getDb(config)\n const results = await db.search(query, { limit, filter, returnContent: true, returnMetadata: true, returnMeta: true })\n await db.close?.()\n\n return results.map(r => ({\n id: r.id,\n content: r.content ?? '',\n score: r.score,\n metadata: r.metadata ?? {},\n highlights: r._meta?.highlights ?? [],\n lineRange: r._chunk?.lineRange,\n entities: r._chunk?.entities,\n scope: r._chunk?.scope,\n }))\n}\n\n/**\n * Search and return formatted snippets\n */\nexport async function searchSnippets(\n query: string,\n config: IndexConfig,\n options: SearchOptions = {},\n): Promise<SearchSnippet[]> {\n const results = await search(query, config, options)\n return toSnippets(results)\n}\n\nfunction toSnippets(results: SearchResult[]): SearchSnippet[] {\n return results.map((r) => {\n const content = stripFrontmatter(r.content)\n const source = r.metadata.source || r.id\n const lines = content.split('\\n').length\n\n return {\n package: r.metadata.package || 'unknown',\n source,\n lineStart: r.lineRange?.[0] ?? 1,\n lineEnd: r.lineRange?.[1] ?? lines,\n content,\n score: r.score,\n highlights: r.highlights,\n entities: r.entities,\n scope: r.scope,\n }\n })\n}\n\n// ── Pooled DB access for interactive search ──\n\nexport async function openPool(dbPaths: string[]): Promise<Map<string, RetrivInstance>> {\n const pool = new Map<string, RetrivInstance>()\n await Promise.all(dbPaths.map(async (dbPath) => {\n const db = await getDb({ dbPath })\n pool.set(dbPath, db)\n }))\n return pool\n}\n\nexport async function searchPooled(\n query: string,\n pool: Map<string, RetrivInstance>,\n options: SearchOptions = {},\n): Promise<SearchSnippet[]> {\n const { limit = 10, filter } = options\n const allResults = await Promise.all(\n [...pool.values()].map(async (db) => {\n const results = await db.search(query, { limit, filter, returnContent: true, returnMetadata: true, returnMeta: true })\n return results.map(r => ({\n id: r.id,\n content: r.content ?? '',\n score: r.score,\n metadata: r.metadata ?? {},\n highlights: r._meta?.highlights ?? [],\n lineRange: r._chunk?.lineRange as [number, number] | undefined,\n entities: r._chunk?.entities,\n scope: r._chunk?.scope,\n }))\n }),\n )\n const merged = allResults.flat().sort((a, b) => b.score - a.score).slice(0, limit)\n return toSnippets(merged)\n}\n\nexport async function closePool(pool: Map<string, RetrivInstance>): Promise<void> {\n await Promise.all([...pool.values()].map(db => db.close?.()))\n pool.clear()\n}\n"],"mappings":"
|
|
1
|
+
{"version":3,"file":"index.mjs","names":[],"sources":["../../src/retriv/index.ts"],"sourcesContent":["import type { ChunkEntity, Document, IndexConfig, IndexPhase, IndexProgress, SearchFilter, SearchOptions, SearchResult, SearchSnippet } from './types.ts'\nimport { stripFrontmatter } from '../core/markdown.ts'\n\nexport type { ChunkEntity, Document, IndexConfig, IndexPhase, IndexProgress, SearchFilter, SearchOptions, SearchResult, SearchSnippet }\n\ntype RetrivInstance = Awaited<ReturnType<typeof getDb>>\n\n// Dynamic imports: retriv/chunkers/auto eagerly loads typescript which may not be installed (e.g. npx)\nasync function getDb(config: Pick<IndexConfig, 'dbPath'>) {\n const [\n { createRetriv },\n { autoChunker },\n sqliteMod,\n { transformersJs },\n { cachedEmbeddings },\n ] = await Promise.all([\n import('retriv'),\n import('retriv/chunkers/auto'),\n import('retriv/db/sqlite'),\n import('retriv/embeddings/transformers-js'),\n import('./embedding-cache.ts'),\n ])\n const embeddings = await cachedEmbeddings(transformersJs())\n return createRetriv({\n driver: sqliteMod.default({\n path: config.dbPath,\n embeddings,\n }),\n chunking: autoChunker(),\n })\n}\n\n/**\n * Index documents in-process (no worker thread).\n * Preferred for tests and environments where worker_threads is unreliable.\n */\nexport async function createIndexDirect(\n documents: Document[],\n config: IndexConfig,\n): Promise<void> {\n const db = await getDb(config)\n await db.index(documents, { onProgress: config.onProgress })\n await db.close?.()\n}\n\n/**\n * Index documents in a background worker thread.\n * Falls back to direct indexing if worker fails to spawn.\n */\nexport async function createIndex(\n documents: Document[],\n config: IndexConfig,\n): Promise<void> {\n // Dynamic import justified: search/searchSnippets shouldn't pull in worker_threads\n const { createIndexInWorker } = await import('./pool.ts')\n return createIndexInWorker(documents, config)\n}\n\nexport async function search(\n query: string,\n config: IndexConfig,\n options: SearchOptions = {},\n): Promise<SearchResult[]> {\n const { limit = 10, filter } = options\n const db = await getDb(config)\n const results = await db.search(query, { limit, filter, returnContent: true, returnMetadata: true, returnMeta: true })\n await db.close?.()\n\n return results.map(r => ({\n id: r.id,\n content: r.content ?? '',\n score: r.score,\n metadata: r.metadata ?? {},\n highlights: r._meta?.highlights ?? [],\n lineRange: r._chunk?.lineRange,\n entities: r._chunk?.entities,\n scope: r._chunk?.scope,\n }))\n}\n\n/**\n * Search and return formatted snippets\n */\nexport async function searchSnippets(\n query: string,\n config: IndexConfig,\n options: SearchOptions = {},\n): Promise<SearchSnippet[]> {\n const results = await search(query, config, options)\n return toSnippets(results)\n}\n\nfunction toSnippets(results: SearchResult[]): SearchSnippet[] {\n return results.map((r) => {\n const content = stripFrontmatter(r.content)\n const source = r.metadata.source || r.id\n const lines = content.split('\\n').length\n\n return {\n package: r.metadata.package || 'unknown',\n source,\n lineStart: r.lineRange?.[0] ?? 1,\n lineEnd: r.lineRange?.[1] ?? lines,\n content,\n score: r.score,\n highlights: r.highlights,\n entities: r.entities,\n scope: r.scope,\n }\n })\n}\n\n// ── Pooled DB access for interactive search ──\n\nexport async function openPool(dbPaths: string[]): Promise<Map<string, RetrivInstance>> {\n const pool = new Map<string, RetrivInstance>()\n await Promise.all(dbPaths.map(async (dbPath) => {\n const db = await getDb({ dbPath })\n pool.set(dbPath, db)\n }))\n return pool\n}\n\nexport async function searchPooled(\n query: string,\n pool: Map<string, RetrivInstance>,\n options: SearchOptions = {},\n): Promise<SearchSnippet[]> {\n const { limit = 10, filter } = options\n const fetchLimit = limit * 2 // Over-fetch to compensate for dedup\n const allResults = await Promise.all(\n [...pool.values()].map(async (db) => {\n const results = await db.search(query, { limit: fetchLimit, filter, returnContent: true, returnMetadata: true, returnMeta: true })\n return results.map(r => ({\n id: r.id,\n content: r.content ?? '',\n score: r.score,\n metadata: r.metadata ?? {},\n highlights: r._meta?.highlights ?? [],\n lineRange: r._chunk?.lineRange as [number, number] | undefined,\n entities: r._chunk?.entities,\n scope: r._chunk?.scope,\n }))\n }),\n )\n // Deduplicate by source+lineRange (overlapping chunks from same doc)\n const seen = new Set<string>()\n const merged = allResults.flat()\n .sort((a, b) => b.score - a.score)\n .filter((r) => {\n const lr = r.lineRange\n const key = `${r.metadata.source || r.id}:${lr?.[0]}-${lr?.[1]}`\n if (seen.has(key))\n return false\n seen.add(key)\n return true\n })\n .slice(0, limit)\n return toSnippets(merged)\n}\n\nexport async function closePool(pool: Map<string, RetrivInstance>): Promise<void> {\n await Promise.all([...pool.values()].map(db => db.close?.()))\n pool.clear()\n}\n"],"mappings":";;AAQA,eAAe,MAAM,QAAqC;CACxD,MAAM,CACJ,EAAE,gBACF,EAAE,eACF,WACA,EAAE,kBACF,EAAE,sBACA,MAAM,QAAQ,IAAI;EACpB,OAAO;EACP,OAAO;EACP,OAAO;EACP,OAAO;EACP,OAAO,kCAAA,MAAA,MAAA,EAAA,EAAA;EACR,CAAC;CACF,MAAM,aAAa,MAAM,iBAAiB,gBAAgB,CAAC;AAC3D,QAAO,aAAa;EAClB,QAAQ,UAAU,QAAQ;GACxB,MAAM,OAAO;GACb;GACD,CAAC;EACF,UAAU,aAAA;EACX,CAAC;;;;;;AAOJ,eAAsB,kBACpB,WACA,QACe;CACf,MAAM,KAAK,MAAM,MAAM,OAAO;AAC9B,OAAM,GAAG,MAAM,WAAW,EAAE,YAAY,OAAO,YAAY,CAAC;AAC5D,OAAM,GAAG,SAAS;;;;;;AAOpB,eAAsB,YACpB,WACA,QACe;CAEf,MAAM,EAAE,wBAAwB,MAAM,OAAO,uBAAA,MAAA,MAAA,EAAA,EAAA;AAC7C,QAAO,oBAAoB,WAAW,OAAO;;AAG/C,eAAsB,OACpB,OACA,QACA,UAAyB,EAAE,EACF;CACzB,MAAM,EAAE,QAAQ,IAAI,WAAW;CAC/B,MAAM,KAAK,MAAM,MAAM,OAAO;CAC9B,MAAM,UAAU,MAAM,GAAG,OAAO,OAAO;EAAE;EAAO;EAAQ,eAAe;EAAM,gBAAgB;EAAM,YAAY;EAAM,CAAC;AACtH,OAAM,GAAG,SAAS;AAElB,QAAO,QAAQ,KAAI,OAAM;EACvB,IAAI,EAAE;EACN,SAAS,EAAE,WAAW;EACtB,OAAO,EAAE;EACT,UAAU,EAAE,YAAY,EAAE;EAC1B,YAAY,EAAE,OAAO,cAAc,EAAE;EACrC,WAAW,EAAE,QAAQ;EACrB,UAAU,EAAE,QAAQ;EACpB,OAAO,EAAE,QAAQ;EAClB,EAAE;;;;;AAML,eAAsB,eACpB,OACA,QACA,UAAyB,EAAE,EACD;AAE1B,QAAO,WADS,MAAM,OAAO,OAAO,QAAQ,QAAQ,CAC1B;;AAG5B,SAAS,WAAW,SAA0C;AAC5D,QAAO,QAAQ,KAAK,MAAM;EACxB,MAAM,UAAU,iBAAiB,EAAE,QAAQ;EAC3C,MAAM,SAAS,EAAE,SAAS,UAAU,EAAE;EACtC,MAAM,QAAQ,QAAQ,MAAM,KAAK,CAAC;AAElC,SAAO;GACL,SAAS,EAAE,SAAS,WAAW;GAC/B;GACA,WAAW,EAAE,YAAY,MAAM;GAC/B,SAAS,EAAE,YAAY,MAAM;GAC7B;GACA,OAAO,EAAE;GACT,YAAY,EAAE;GACd,UAAU,EAAE;GACZ,OAAO,EAAE;GACV;GACD;;AAKJ,eAAsB,SAAS,SAAyD;CACtF,MAAM,uBAAO,IAAI,KAA6B;AAC9C,OAAM,QAAQ,IAAI,QAAQ,IAAI,OAAO,WAAW;EAC9C,MAAM,KAAK,MAAM,MAAM,EAAE,QAAQ,CAAC;AAClC,OAAK,IAAI,QAAQ,GAAG;GACpB,CAAC;AACH,QAAO;;AAGT,eAAsB,aACpB,OACA,MACA,UAAyB,EAAE,EACD;CAC1B,MAAM,EAAE,QAAQ,IAAI,WAAW;CAC/B,MAAM,aAAa,QAAQ;CAC3B,MAAM,aAAa,MAAM,QAAQ,IAC/B,CAAC,GAAG,KAAK,QAAQ,CAAC,CAAC,IAAI,OAAO,OAAO;AAEnC,UADgB,MAAM,GAAG,OAAO,OAAO;GAAE,OAAO;GAAY;GAAQ,eAAe;GAAM,gBAAgB;GAAM,YAAY;GAAM,CAAC,EACnH,KAAI,OAAM;GACvB,IAAI,EAAE;GACN,SAAS,EAAE,WAAW;GACtB,OAAO,EAAE;GACT,UAAU,EAAE,YAAY,EAAE;GAC1B,YAAY,EAAE,OAAO,cAAc,EAAE;GACrC,WAAW,EAAE,QAAQ;GACrB,UAAU,EAAE,QAAQ;GACpB,OAAO,EAAE,QAAQ;GAClB,EAAE;GACH,CACH;CAED,MAAM,uBAAO,IAAI,KAAa;AAY9B,QAAO,WAXQ,WAAW,MAAM,CAC7B,MAAM,GAAG,MAAM,EAAE,QAAQ,EAAE,MAAM,CACjC,QAAQ,MAAM;EACb,MAAM,KAAK,EAAE;EACb,MAAM,MAAM,GAAG,EAAE,SAAS,UAAU,EAAE,GAAG,GAAG,KAAK,GAAG,GAAG,KAAK;AAC5D,MAAI,KAAK,IAAI,IAAI,CACf,QAAO;AACT,OAAK,IAAI,IAAI;AACb,SAAO;GACP,CACD,MAAM,GAAG,MAAM,CACO;;AAG3B,eAAsB,UAAU,MAAkD;AAChF,OAAM,QAAQ,IAAI,CAAC,GAAG,KAAK,QAAQ,CAAC,CAAC,KAAI,OAAM,GAAG,SAAS,CAAC,CAAC;AAC7D,MAAK,OAAO"}
|
package/dist/retriv/worker.mjs
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
|
-
import "../_chunks/chunk.mjs";
|
|
2
1
|
import "../_chunks/config.mjs";
|
|
3
|
-
import "../_chunks/
|
|
4
|
-
import
|
|
2
|
+
import "../_chunks/sanitize.mjs";
|
|
3
|
+
import "../_chunks/cache.mjs";
|
|
4
|
+
import { t as cachedEmbeddings } from "../_chunks/embedding-cache.mjs";
|
|
5
5
|
import { parentPort } from "node:worker_threads";
|
|
6
6
|
import { createRetriv } from "retriv";
|
|
7
7
|
import { autoChunker } from "retriv/chunkers/auto";
|
package/dist/sources/index.d.mts
CHANGED
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
import { $ as generateIssueIndex, A as ResolveStep, At as generateDiscussionIndex, B as resolveLocalPackageDocs, C as getFilePatterns, Ct as parseSkillFrontmatterName, D as getRepoKeyForPackage, Dt as GitHubDiscussion, E as getRepoEntry, Et as generateDocsIndex, F as getInstalledSkillVersion, G as extractSections, H as resolvePackageDocsWithAttempts, I as parseVersionSpecifier, J as normalizeLlmsLinks, K as fetchLlmsTxt, L as readLocalDependencies, M as fetchNpmPackage, Mt as toCrawlPattern, N as fetchNpmRegistryMeta, Nt as fetchBlogReleases, O as LocalPackageInfo, Ot as fetchGitHubDiscussions, P as fetchPkgDist, Q as formatIssueAsMarkdown, R as readLocalPackageInfo, S as getDocOverride, St as parseGitSkillInput, T as getRelatedPackages, Tt as resolveEntryFiles, U as searchNpmPackages, V as resolvePackageDocs, W as downloadLlmsDocs, X as GitHubIssue, Y as parseMarkdownLinks, Z as fetchGitHubIssues, _ as BlogPreset, _t as ResolveResult, a as normalizeRepoUrl, at as fetchReadme, b as getBlogPreset, bt as RemoteSkill, c as verifyUrl, ct as isShallowGitDocs, d as SemVer, dt as FetchedDoc, et as isGhAvailable, f as compareSemver, ft as LlmsContent, g as parseSemver, gt as ResolveAttempt, h as isPrerelease, ht as NpmPackageInfo, i as isGitHubRepoUrl, it as fetchGitHubRepoMeta, j as fetchLatestVersion, jt as fetchCrawledDocs, k as ResolveOptions, kt as formatDiscussionAsMarkdown, l as GitHubRelease, lt as resolveGitHubRepo, m as generateReleaseIndex, mt as LocalDependency, n as extractBranchHint, nt as MIN_GIT_DOCS, o as parseGitHubUrl, ot as fetchReadmeContent, p as fetchReleaseNotes, pt as LlmsLink, q as fetchLlmsUrl, r as fetchText, rt as fetchGitDocs, s as parsePackageSpec, st as filterFrameworkDocs, t as $fetch, tt as GitDocsResult, u as ReleaseIndexOptions, ut as validateGitDocsWithLlms, v as BlogRelease, vt as ResolvedPackage, w as getPrereleaseChangelogRef, wt as EntryFile, x as getCrawlUrl, xt as fetchGitSkills, y as DocOverride, yt as GitSkillSource, z as resolveInstalledVersion } from "../_chunks/
|
|
2
|
-
export { $fetch,
|
|
1
|
+
import { $ as generateIssueIndex, A as ResolveStep, At as generateDiscussionIndex, B as resolveLocalPackageDocs, C as getFilePatterns, Ct as parseSkillFrontmatterName, D as getRepoKeyForPackage, Dt as GitHubDiscussion, E as getRepoEntry, Et as generateDocsIndex, F as getInstalledSkillVersion, G as extractSections, H as resolvePackageDocsWithAttempts, I as parseVersionSpecifier, J as normalizeLlmsLinks, K as fetchLlmsTxt, L as readLocalDependencies, M as fetchNpmPackage, Mt as toCrawlPattern, N as fetchNpmRegistryMeta, Nt as fetchBlogReleases, O as LocalPackageInfo, Ot as fetchGitHubDiscussions, P as fetchPkgDist, Q as formatIssueAsMarkdown, R as readLocalPackageInfo, S as getDocOverride, St as parseGitSkillInput, T as getRelatedPackages, Tt as resolveEntryFiles, U as searchNpmPackages, V as resolvePackageDocs, W as downloadLlmsDocs, X as GitHubIssue, Y as parseMarkdownLinks, Z as fetchGitHubIssues, _ as BlogPreset, _t as ResolveResult, a as normalizeRepoUrl, at as fetchReadme, b as getBlogPreset, bt as RemoteSkill, c as verifyUrl, ct as isShallowGitDocs, d as SemVer, dt as FetchedDoc, et as isGhAvailable, f as compareSemver, ft as LlmsContent, g as parseSemver, gt as ResolveAttempt, h as isPrerelease, ht as NpmPackageInfo, i as isGitHubRepoUrl, it as fetchGitHubRepoMeta, j as fetchLatestVersion, jt as fetchCrawledDocs, k as ResolveOptions, kt as formatDiscussionAsMarkdown, l as GitHubRelease, lt as resolveGitHubRepo, m as generateReleaseIndex, mt as LocalDependency, n as extractBranchHint, nt as MIN_GIT_DOCS, o as parseGitHubUrl, ot as fetchReadmeContent, p as fetchReleaseNotes, pt as LlmsLink, q as fetchLlmsUrl, r as fetchText, rt as fetchGitDocs, s as parsePackageSpec, st as filterFrameworkDocs, t as $fetch, tt as GitDocsResult, u as ReleaseIndexOptions, ut as validateGitDocsWithLlms, v as BlogRelease, vt as ResolvedPackage, w as getPrereleaseChangelogRef, wt as EntryFile, x as getCrawlUrl, xt as fetchGitSkills, y as DocOverride, yt as GitSkillSource, z as resolveInstalledVersion } from "../_chunks/index2.mjs";
|
|
2
|
+
export { $fetch, BlogPreset, BlogRelease, DocOverride, EntryFile, FetchedDoc, GitDocsResult, GitHubDiscussion, GitHubIssue, GitHubRelease, GitSkillSource, LlmsContent, LlmsLink, LocalDependency, LocalPackageInfo, MIN_GIT_DOCS, NpmPackageInfo, ReleaseIndexOptions, RemoteSkill, ResolveAttempt, ResolveOptions, ResolveResult, ResolveStep, ResolvedPackage, SemVer, compareSemver, downloadLlmsDocs, extractBranchHint, extractSections, fetchBlogReleases, fetchCrawledDocs, fetchGitDocs, fetchGitHubDiscussions, fetchGitHubIssues, fetchGitHubRepoMeta, fetchGitSkills, fetchLatestVersion, fetchLlmsTxt, fetchLlmsUrl, fetchNpmPackage, fetchNpmRegistryMeta, fetchPkgDist, fetchReadme, fetchReadmeContent, fetchReleaseNotes, fetchText, filterFrameworkDocs, formatDiscussionAsMarkdown, formatIssueAsMarkdown, generateDiscussionIndex, generateDocsIndex, generateIssueIndex, generateReleaseIndex, getBlogPreset, getCrawlUrl, getDocOverride, getFilePatterns, getInstalledSkillVersion, getPrereleaseChangelogRef, getRelatedPackages, getRepoEntry, getRepoKeyForPackage, isGhAvailable, isGitHubRepoUrl, isPrerelease, isShallowGitDocs, normalizeLlmsLinks, normalizeRepoUrl, parseGitHubUrl, parseGitSkillInput, parseMarkdownLinks, parsePackageSpec, parseSemver, parseSkillFrontmatterName, parseVersionSpecifier, readLocalDependencies, readLocalPackageInfo, resolveEntryFiles, resolveGitHubRepo, resolveInstalledVersion, resolveLocalPackageDocs, resolvePackageDocs, resolvePackageDocsWithAttempts, searchNpmPackages, toCrawlPattern, validateGitDocsWithLlms, verifyUrl };
|
package/dist/sources/index.mjs
CHANGED
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
import "../_chunks/config.mjs";
|
|
2
|
+
import "../_chunks/yaml.mjs";
|
|
2
3
|
import "../_chunks/markdown.mjs";
|
|
3
4
|
import { c as getDocOverride, d as getPrereleaseChangelogRef, f as getRelatedPackages, l as getFilePatterns, m as getRepoKeyForPackage, o as getBlogPreset, p as getRepoEntry, s as getCrawlUrl } from "../_chunks/shared.mjs";
|
|
4
|
-
import { $ as fetchGitHubIssues, A as parseGitSkillInput, B as compareSemver, C as downloadLlmsDocs, D as normalizeLlmsLinks, E as fetchLlmsUrl, F as formatDiscussionAsMarkdown, G as $fetch, H as generateReleaseIndex, I as generateDiscussionIndex, J as isGitHubRepoUrl, K as extractBranchHint, L as fetchCrawledDocs, M as resolveEntryFiles, N as generateDocsIndex, O as parseMarkdownLinks, P as fetchGitHubDiscussions, Q as verifyUrl, R as toCrawlPattern, S as validateGitDocsWithLlms, T as fetchLlmsTxt, U as isPrerelease, V as fetchReleaseNotes, W as parseSemver, X as parseGitHubUrl, Y as normalizeRepoUrl, Z as parsePackageSpec, _ as fetchReadme, a as getInstalledSkillVersion, b as isShallowGitDocs, c as readLocalPackageInfo, d as resolvePackageDocs, et as formatIssueAsMarkdown, f as resolvePackageDocsWithAttempts, g as fetchGitHubRepoMeta, h as fetchGitDocs, i as fetchPkgDist, j as parseSkillFrontmatterName, k as fetchGitSkills, l as resolveInstalledVersion, m as MIN_GIT_DOCS, n as fetchNpmPackage, nt as isGhAvailable, o as parseVersionSpecifier, p as searchNpmPackages, q as fetchText, r as fetchNpmRegistryMeta, s as readLocalDependencies, t as fetchLatestVersion, tt as generateIssueIndex, u as resolveLocalPackageDocs, v as fetchReadmeContent, w as extractSections, x as resolveGitHubRepo, y as filterFrameworkDocs, z as fetchBlogReleases } from "../_chunks/
|
|
5
|
+
import { $ as fetchGitHubIssues, A as parseGitSkillInput, B as compareSemver, C as downloadLlmsDocs, D as normalizeLlmsLinks, E as fetchLlmsUrl, F as formatDiscussionAsMarkdown, G as $fetch, H as generateReleaseIndex, I as generateDiscussionIndex, J as isGitHubRepoUrl, K as extractBranchHint, L as fetchCrawledDocs, M as resolveEntryFiles, N as generateDocsIndex, O as parseMarkdownLinks, P as fetchGitHubDiscussions, Q as verifyUrl, R as toCrawlPattern, S as validateGitDocsWithLlms, T as fetchLlmsTxt, U as isPrerelease, V as fetchReleaseNotes, W as parseSemver, X as parseGitHubUrl, Y as normalizeRepoUrl, Z as parsePackageSpec, _ as fetchReadme, a as getInstalledSkillVersion, b as isShallowGitDocs, c as readLocalPackageInfo, d as resolvePackageDocs, et as formatIssueAsMarkdown, f as resolvePackageDocsWithAttempts, g as fetchGitHubRepoMeta, h as fetchGitDocs, i as fetchPkgDist, j as parseSkillFrontmatterName, k as fetchGitSkills, l as resolveInstalledVersion, m as MIN_GIT_DOCS, n as fetchNpmPackage, nt as isGhAvailable, o as parseVersionSpecifier, p as searchNpmPackages, q as fetchText, r as fetchNpmRegistryMeta, s as readLocalDependencies, t as fetchLatestVersion, tt as generateIssueIndex, u as resolveLocalPackageDocs, v as fetchReadmeContent, w as extractSections, x as resolveGitHubRepo, y as filterFrameworkDocs, z as fetchBlogReleases } from "../_chunks/sources.mjs";
|
|
5
6
|
export { $fetch, MIN_GIT_DOCS, compareSemver, downloadLlmsDocs, extractBranchHint, extractSections, fetchBlogReleases, fetchCrawledDocs, fetchGitDocs, fetchGitHubDiscussions, fetchGitHubIssues, fetchGitHubRepoMeta, fetchGitSkills, fetchLatestVersion, fetchLlmsTxt, fetchLlmsUrl, fetchNpmPackage, fetchNpmRegistryMeta, fetchPkgDist, fetchReadme, fetchReadmeContent, fetchReleaseNotes, fetchText, filterFrameworkDocs, formatDiscussionAsMarkdown, formatIssueAsMarkdown, generateDiscussionIndex, generateDocsIndex, generateIssueIndex, generateReleaseIndex, getBlogPreset, getCrawlUrl, getDocOverride, getFilePatterns, getInstalledSkillVersion, getPrereleaseChangelogRef, getRelatedPackages, getRepoEntry, getRepoKeyForPackage, isGhAvailable, isGitHubRepoUrl, isPrerelease, isShallowGitDocs, normalizeLlmsLinks, normalizeRepoUrl, parseGitHubUrl, parseGitSkillInput, parseMarkdownLinks, parsePackageSpec, parseSemver, parseSkillFrontmatterName, parseVersionSpecifier, readLocalDependencies, readLocalPackageInfo, resolveEntryFiles, resolveGitHubRepo, resolveInstalledVersion, resolveLocalPackageDocs, resolvePackageDocs, resolvePackageDocsWithAttempts, searchNpmPackages, toCrawlPattern, validateGitDocsWithLlms, verifyUrl };
|
package/dist/types.d.mts
CHANGED
|
@@ -1,6 +1,5 @@
|
|
|
1
|
-
import { D as CachedPackage, T as CacheConfig } from "./_chunks/
|
|
1
|
+
import { D as CachedPackage, T as CacheConfig } from "./_chunks/index.mjs";
|
|
2
2
|
import { c as SearchResult, l as SearchSnippet, n as Document, o as SearchFilter, r as IndexConfig, s as SearchOptions } from "./_chunks/types.mjs";
|
|
3
3
|
import "./retriv/index.mjs";
|
|
4
|
-
import { dt as FetchedDoc, ft as LlmsContent, pt as LlmsLink } from "./_chunks/
|
|
5
|
-
import "./sources/index.mjs";
|
|
4
|
+
import { dt as FetchedDoc, ft as LlmsContent, pt as LlmsLink } from "./_chunks/index2.mjs";
|
|
6
5
|
export { type CacheConfig, type CachedPackage, type Document, type FetchedDoc, type IndexConfig, type LlmsContent, type LlmsLink, type SearchFilter, type SearchOptions, type SearchResult, type SearchSnippet };
|
package/package.json
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "skilld",
|
|
3
3
|
"type": "module",
|
|
4
|
-
"version": "0.15.
|
|
4
|
+
"version": "0.15.4",
|
|
5
5
|
"description": "Generate AI agent skills from npm package documentation",
|
|
6
6
|
"author": {
|
|
7
7
|
"name": "Harlan Wilton",
|
|
@@ -49,34 +49,34 @@
|
|
|
49
49
|
"citty": "^0.2.1",
|
|
50
50
|
"consola": "^3.4.2",
|
|
51
51
|
"giget": "^3.1.2",
|
|
52
|
-
"
|
|
53
|
-
"
|
|
54
|
-
"mdast-util-from-markdown": "^2.0.2",
|
|
52
|
+
"log-update": "^7.2.0",
|
|
53
|
+
"mdast-util-from-markdown": "^2.0.3",
|
|
55
54
|
"mdast-util-frontmatter": "^2.0.1",
|
|
56
55
|
"mdast-util-to-string": "^4.0.0",
|
|
57
56
|
"mdream": "^0.16.0",
|
|
58
57
|
"micromark-extension-frontmatter": "^2.0.0",
|
|
59
58
|
"mlly": "^1.8.0",
|
|
60
59
|
"ofetch": "^1.5.1",
|
|
61
|
-
"oxc-parser": "^0.
|
|
60
|
+
"oxc-parser": "^0.115.0",
|
|
62
61
|
"p-limit": "^7.3.0",
|
|
63
62
|
"pathe": "^2.0.3",
|
|
64
|
-
"retriv": "^0.10.
|
|
63
|
+
"retriv": "^0.10.3",
|
|
65
64
|
"semver": "^7.7.4",
|
|
66
65
|
"sqlite-vec": "^0.1.7-alpha.10",
|
|
67
66
|
"std-env": "^3.10.0",
|
|
67
|
+
"tinyglobby": "^0.2.15",
|
|
68
68
|
"typescript": "6.0.0-beta",
|
|
69
69
|
"unagent": "^0.0.8",
|
|
70
|
-
"unist-util-visit": "^5.
|
|
70
|
+
"unist-util-visit": "^5.1.0"
|
|
71
71
|
},
|
|
72
72
|
"devDependencies": {
|
|
73
|
-
"@antfu/eslint-config": "^7.
|
|
74
|
-
"@types/node": "^25.2
|
|
73
|
+
"@antfu/eslint-config": "^7.6.1",
|
|
74
|
+
"@types/node": "^25.3.2",
|
|
75
75
|
"@types/semver": "^7.7.1",
|
|
76
76
|
"@vitest/coverage-v8": "^4.0.18",
|
|
77
77
|
"bumpp": "^10.4.1",
|
|
78
78
|
"evalite": "^0.19.0",
|
|
79
|
-
"obuild": "^0.4.
|
|
79
|
+
"obuild": "^0.4.31",
|
|
80
80
|
"tsx": "^4.21.0",
|
|
81
81
|
"vitest": "^4.0.18"
|
|
82
82
|
},
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"detect-imports.mjs","names":["cli","agentId","models","buildArgs","parseLine","cli","agentId","models","buildArgs","parseLine","claude","gemini","codex","claude.buildArgs","gemini.buildArgs","codex.buildArgs","claude.parseLine","gemini.parseLine","codex.parseLine","agents","delay"],"sources":["../../src/agent/clis/claude.ts","../../src/agent/clis/codex.ts","../../src/agent/clis/gemini.ts","../../src/agent/clis/index.ts","../../src/agent/detect-presets.ts","../../src/agent/detect-imports.ts"],"sourcesContent":["/**\n * Claude Code CLI — token-level streaming via --include-partial-messages\n */\n\nimport type { CliModelEntry, ParsedEvent } from './types.ts'\nimport { join } from 'pathe'\n\nexport const cli = 'claude' as const\nexport const agentId = 'claude-code' as const\n\nexport const models: Record<string, CliModelEntry> = {\n opus: { model: 'opus', name: 'Opus 4.6', hint: 'Most capable for complex work' },\n sonnet: { model: 'sonnet', name: 'Sonnet 4.6', hint: 'Best for everyday tasks' },\n haiku: { model: 'haiku', name: 'Haiku 4.5', hint: 'Fastest for quick answers', recommended: true },\n}\n\nexport function buildArgs(model: string, skillDir: string, symlinkDirs: string[]): string[] {\n const skilldDir = join(skillDir, '.skilld')\n const readDirs = [skillDir, ...symlinkDirs]\n const allowedTools = [\n ...readDirs.flatMap(d => [`Read(${d}/**)`, `Glob(${d}/**)`, `Grep(${d}/**)`]),\n `Write(${skilldDir}/**)`,\n `Bash(*skilld search*)`,\n `Bash(*skilld validate*)`,\n ].join(' ')\n return [\n '-p',\n '--model',\n model,\n '--output-format',\n 'stream-json',\n '--verbose',\n '--include-partial-messages',\n '--allowedTools',\n allowedTools,\n '--disallowedTools',\n 'WebSearch WebFetch Task',\n '--add-dir',\n skillDir,\n ...symlinkDirs.flatMap(d => ['--add-dir', d]),\n '--no-session-persistence',\n ]\n}\n\n/**\n * Parse claude stream-json events\n *\n * Event types:\n * - stream_event/content_block_delta/text_delta → token streaming\n * - stream_event/content_block_start/tool_use → tool invocation starting\n * - assistant message with tool_use content → tool name + input\n * - assistant message with text content → full text (non-streaming fallback)\n * - result → usage, cost, turns\n */\nexport function parseLine(line: string): ParsedEvent {\n try {\n const obj = JSON.parse(line)\n\n // Token-level streaming (--include-partial-messages)\n if (obj.type === 'stream_event') {\n const evt = obj.event\n if (!evt)\n return {}\n\n // Text delta — the main streaming path\n if (evt.type === 'content_block_delta' && evt.delta?.type === 'text_delta') {\n return { textDelta: evt.delta.text }\n }\n\n return {}\n }\n\n // Full assistant message (complete turn, after streaming)\n if (obj.type === 'assistant' && obj.message?.content) {\n const content = obj.message.content as any[]\n\n // Extract tool uses with inputs for progress hints\n const tools = content.filter((c: any) => c.type === 'tool_use')\n if (tools.length) {\n const names = tools.map((t: any) => t.name)\n // Extract useful hint from tool input (file path, query, etc)\n const hint = tools.map((t: any) => {\n const input = t.input || {}\n return input.file_path || input.path || input.pattern || input.query || input.command || ''\n }).filter(Boolean).join(', ')\n // Capture Write content as fallback if permission is denied\n const writeTool = tools.find((t: any) => t.name === 'Write' && t.input?.content)\n return { toolName: names.join(', '), toolHint: hint || undefined, writeContent: writeTool?.input?.content }\n }\n\n // Text content (fallback for non-partial mode)\n const text = content\n .filter((c: any) => c.type === 'text')\n .map((c: any) => c.text)\n .join('')\n if (text)\n return { fullText: text }\n }\n\n // Final result\n if (obj.type === 'result') {\n const u = obj.usage\n return {\n done: true,\n usage: u ? { input: u.input_tokens ?? u.inputTokens ?? 0, output: u.output_tokens ?? u.outputTokens ?? 0 } : undefined,\n cost: obj.total_cost_usd,\n turns: obj.num_turns,\n }\n }\n }\n catch {}\n return {}\n}\n","/**\n * OpenAI Codex CLI — exec subcommand with JSON output\n * Prompt passed via stdin with `-` sentinel\n *\n * Event types:\n * - turn.started / turn.completed → turn lifecycle + usage\n * - item.started → command_execution in progress\n * - item.completed → agent_message (text), command_execution (result), file_change (apply_patch)\n * - error / turn.failed → errors\n */\n\nimport type { CliModelEntry, ParsedEvent } from './types.ts'\n\nexport const cli = 'codex' as const\nexport const agentId = 'codex' as const\n\nexport const models: Record<string, CliModelEntry> = {\n 'gpt-5.3-codex': { model: 'gpt-5.3-codex', name: 'GPT-5.3 Codex', hint: 'Latest frontier Codex model' },\n 'gpt-5.3-codex-spark': { model: 'gpt-5.3-codex-spark', name: 'GPT-5.3 Codex Spark', hint: 'Faster GPT-5.3 Codex variant', recommended: true },\n 'gpt-5.2-codex': { model: 'gpt-5.2-codex', name: 'GPT-5.2 Codex', hint: 'Frontier agentic coding model' },\n}\n\nexport function buildArgs(model: string, _skillDir: string, _symlinkDirs: string[]): string[] {\n return [\n 'exec',\n '--json',\n '--ephemeral',\n '--model',\n model,\n // Permissions aligned with Claude's scoped model:\n // --full-auto = --sandbox workspace-write + --ask-for-approval on-request\n // → writes scoped to CWD (.skilld/, set in spawn), reads unrestricted, network blocked\n // Shell remains enabled for `skilld` / `npx -y skilld` search/validate (no per-command allowlist in Codex)\n // --ephemeral → no session persistence (equivalent to Claude's --no-session-persistence)\n '--full-auto',\n '-',\n ]\n}\n\nexport function parseLine(line: string): ParsedEvent {\n try {\n const obj = JSON.parse(line)\n\n if (obj.type === 'item.completed' && obj.item) {\n const item = obj.item\n // Agent message — the main text output\n if (item.type === 'agent_message' && item.text)\n return { fullText: item.text }\n // Command execution completed — log as tool progress\n // If the command writes to a file (redirect or cat >), capture output as writeContent fallback\n if (item.type === 'command_execution' && item.aggregated_output) {\n const cmd = item.command || ''\n const writeContent = (/^cat\\s*>|>/.test(cmd)) ? item.aggregated_output : undefined\n return { toolName: 'Bash', toolHint: `(${item.aggregated_output.length} chars output)`, writeContent }\n }\n // apply_patch completed — file written directly to disk\n if (item.type === 'file_change' && item.changes?.length) {\n const paths = item.changes.map((c: { path: string, kind: string }) => c.path).join(', ')\n return { toolName: 'Write', toolHint: paths }\n }\n }\n\n // Command starting — show progress\n if (obj.type === 'item.started' && obj.item?.type === 'command_execution') {\n return { toolName: 'Bash', toolHint: obj.item.command }\n }\n\n // Turn completed — usage stats\n if (obj.type === 'turn.completed' && obj.usage) {\n return {\n done: true,\n usage: {\n input: obj.usage.input_tokens ?? 0,\n output: obj.usage.output_tokens ?? 0,\n },\n }\n }\n\n // Error events\n if (obj.type === 'turn.failed' || obj.type === 'error') {\n return { done: true }\n }\n }\n catch {}\n return {}\n}\n","/**\n * Gemini CLI — turn-level streaming via -o stream-json\n * Write scoping: relies on cwd being set to .skilld/ (no native --writeable-dirs)\n */\n\nimport type { CliModelEntry, ParsedEvent } from './types.ts'\nimport { resolveSkilldCommand } from '../../core/shared.ts'\n\nexport const cli = 'gemini' as const\nexport const agentId = 'gemini-cli' as const\n\nexport const models: Record<string, CliModelEntry> = {\n 'gemini-3-pro': { model: 'gemini-3-pro-preview', name: 'Gemini 3 Pro', hint: 'Most capable' },\n 'gemini-3-flash': { model: 'gemini-3-flash-preview', name: 'Gemini 3 Flash', hint: 'Balanced', recommended: true },\n}\n\nexport function buildArgs(model: string, skillDir: string, symlinkDirs: string[]): string[] {\n return [\n '-o',\n 'stream-json',\n '-m',\n model,\n '--allowed-tools',\n `read_file,write_file,glob_tool,list_directory,search_file_content,run_shell_command(${resolveSkilldCommand()}),run_shell_command(grep),run_shell_command(head)`,\n '--include-directories',\n skillDir,\n ...symlinkDirs.flatMap(d => ['--include-directories', d]),\n ]\n}\n\n/** Parse gemini stream-json events — turn level (full message per event) */\nexport function parseLine(line: string): ParsedEvent {\n try {\n const obj = JSON.parse(line)\n\n // Text message (delta or full)\n if (obj.type === 'message' && obj.role === 'assistant' && obj.content) {\n return obj.delta ? { textDelta: obj.content } : { fullText: obj.content }\n }\n\n // Tool invocation\n if (obj.type === 'tool_use' || obj.type === 'tool_call') {\n const name = obj.tool_name || obj.name || obj.tool || 'tool'\n const params = obj.parameters || obj.args || obj.input || {}\n const hint = params.file_path || params.path || params.dir_path || params.pattern || params.query || params.command || ''\n // Capture write_file content as fallback (matches Claude's Write tool behavior)\n if (name === 'write_file' && params.content) {\n return { toolName: name, toolHint: hint || undefined, writeContent: params.content }\n }\n return { toolName: name, toolHint: hint || undefined }\n }\n\n // Final result\n if (obj.type === 'result') {\n const s = obj.stats\n return {\n done: true,\n usage: s ? { input: s.input_tokens ?? s.input ?? 0, output: s.output_tokens ?? s.output ?? 0 } : undefined,\n turns: s?.tool_calls,\n }\n }\n }\n catch {}\n return {}\n}\n","/**\n * CLI orchestrator — spawns per-CLI processes for skill generation\n * Each CLI (claude, gemini, codex) has its own buildArgs + parseLine in separate files\n */\n\nimport type { SkillSection } from '../prompts/index.ts'\nimport type { AgentType } from '../types.ts'\nimport type { CliModelConfig, CliName, OptimizeDocsOptions, OptimizeModel, OptimizeResult, ParsedEvent, SectionResult, StreamProgress, ValidationWarning } from './types.ts'\nimport { exec, spawn } from 'node:child_process'\nimport { createHash } from 'node:crypto'\nimport { existsSync, lstatSync, mkdirSync, readdirSync, readFileSync, realpathSync, unlinkSync, writeFileSync } from 'node:fs'\nimport { homedir } from 'node:os'\nimport { setTimeout as delay } from 'node:timers/promises'\nimport { promisify } from 'node:util'\nimport { dirname, join } from 'pathe'\nimport { isWindows } from 'std-env'\nimport { readCachedSection, writeSections } from '../../cache/index.ts'\nimport { sanitizeMarkdown } from '../../core/sanitize.ts'\nimport { detectInstalledAgents } from '../detect.ts'\nimport { buildAllSectionPrompts, getSectionValidator, SECTION_MERGE_ORDER, SECTION_OUTPUT_FILES } from '../prompts/index.ts'\nimport { agents } from '../registry.ts'\nimport * as claude from './claude.ts'\nimport * as codex from './codex.ts'\nimport * as gemini from './gemini.ts'\n\nexport { buildAllSectionPrompts, buildSectionPrompt, SECTION_MERGE_ORDER, SECTION_OUTPUT_FILES } from '../prompts/index.ts'\nexport type { CustomPrompt, SkillSection } from '../prompts/index.ts'\nexport type { CliModelConfig, CliName, ModelInfo, OptimizeDocsOptions, OptimizeModel, OptimizeResult, StreamProgress } from './types.ts'\n\n// ── Tool progress display ────────────────────────────────────────────\n\nconst TOOL_VERBS: Record<string, string> = {\n // Claude\n Read: 'Reading',\n Glob: 'Searching',\n Grep: 'Searching',\n Write: 'Writing',\n Bash: 'Running',\n // Gemini\n read_file: 'Reading',\n glob_tool: 'Searching',\n write_file: 'Writing',\n list_directory: 'Listing',\n search_file_content: 'Searching',\n run_shell_command: 'Running',\n}\n\ninterface ToolProgressLog {\n message: (msg: string) => void\n}\n\n/** Create a progress callback that emits one line per tool call, Claude Code style */\nexport function createToolProgress(log: ToolProgressLog): (progress: StreamProgress) => void {\n let lastMsg = ''\n let repeatCount = 0\n\n function emit(msg: string) {\n if (msg === lastMsg) {\n repeatCount++\n log.message(`${msg} \\x1B[90m(+${repeatCount})\\x1B[0m`)\n }\n else {\n lastMsg = msg\n repeatCount = 0\n log.message(msg)\n }\n }\n\n return ({ type, chunk, section }) => {\n if (type === 'text') {\n emit(`${section ? `\\x1B[90m[${section}]\\x1B[0m ` : ''}Writing...`)\n return\n }\n if (type !== 'reasoning' || !chunk.startsWith('['))\n return\n\n // Parse individual tool names and hints from \"[Read: path]\" or \"[Read, Glob: path1, path2]\"\n const match = chunk.match(/^\\[([^:[\\]]+)(?::\\s(.+))?\\]$/)\n if (!match)\n return\n\n const names = match[1]!.split(',').map(n => n.trim())\n const hints = match[2]?.split(',').map(h => h.trim()) ?? []\n\n for (let i = 0; i < names.length; i++) {\n const rawName = names[i]!\n const hint = hints[i] ?? hints[0] ?? ''\n const verb = TOOL_VERBS[rawName] ?? rawName\n const prefix = section ? `\\x1B[90m[${section}]\\x1B[0m ` : ''\n\n if ((rawName === 'Bash' || rawName === 'run_shell_command') && hint) {\n const searchMatch = hint.match(/skilld search\\s+\"([^\"]+)\"/)\n if (searchMatch) {\n emit(`${prefix}Searching \\x1B[36m\"${searchMatch[1]}\"\\x1B[0m`)\n }\n else if (hint.includes('skilld validate')) {\n emit(`${prefix}Validating...`)\n }\n else {\n const shortened = shortenCommand(hint)\n emit(`${prefix}Running ${shortened.length > 50 ? `${shortened.slice(0, 47)}...` : shortened}`)\n }\n }\n else {\n const path = shortenPath(hint || '...')\n emit(`${prefix}${verb} \\x1B[90m${path}\\x1B[0m`)\n }\n }\n }\n}\n\n// ── Per-CLI dispatch ─────────────────────────────────────────────────\n\nconst CLI_DEFS = [claude, gemini, codex] as const\n\nconst CLI_BUILD_ARGS: Record<CliName, (model: string, skillDir: string, symlinkDirs: string[]) => string[]> = {\n claude: claude.buildArgs,\n gemini: gemini.buildArgs,\n codex: codex.buildArgs,\n}\n\nconst CLI_PARSE_LINE: Record<CliName, (line: string) => ParsedEvent> = {\n claude: claude.parseLine,\n gemini: gemini.parseLine,\n codex: codex.parseLine,\n}\n\n// ── Assemble CLI_MODELS from per-CLI model definitions ───────────────\n\nexport const CLI_MODELS: Partial<Record<OptimizeModel, CliModelConfig>> = Object.fromEntries(\n CLI_DEFS.flatMap(def =>\n Object.entries(def.models).map(([id, entry]) => [\n id,\n { ...entry, cli: def.cli, agentId: def.agentId },\n ]),\n ),\n)\n\n// ── Model helpers ────────────────────────────────────────────────────\n\nexport function getModelName(id: OptimizeModel): string {\n return CLI_MODELS[id]?.name ?? id\n}\n\nexport function getModelLabel(id: OptimizeModel): string {\n const config = CLI_MODELS[id]\n if (!config)\n return id\n const agentName = agents[config.agentId]?.displayName ?? config.cli\n return `${agentName} · ${config.name}`\n}\n\nexport async function getAvailableModels(): Promise<import('./types.ts').ModelInfo[]> {\n const execAsync = promisify(exec)\n const lookupCmd = isWindows ? 'where' : 'which'\n\n const installedAgents = detectInstalledAgents()\n const agentsWithCli = installedAgents.filter(id => agents[id].cli)\n\n const cliChecks = await Promise.all(\n agentsWithCli.map(async (agentId) => {\n const cli = agents[agentId].cli!\n try {\n await execAsync(`${lookupCmd} ${cli}`)\n return agentId\n }\n catch { return null }\n }),\n )\n const availableAgentIds = new Set(cliChecks.filter((id): id is AgentType => id != null))\n\n return (Object.entries(CLI_MODELS) as [OptimizeModel, CliModelConfig][])\n .filter(([_, config]) => availableAgentIds.has(config.agentId))\n .map(([id, config]) => ({\n id,\n name: config.name,\n hint: config.hint,\n recommended: config.recommended,\n agentId: config.agentId,\n agentName: agents[config.agentId]?.displayName ?? config.agentId,\n }))\n}\n\n// ── Reference dirs ───────────────────────────────────────────────────\n\n/** Resolve symlinks in .skilld/ to get real paths for --add-dir */\nfunction resolveReferenceDirs(skillDir: string): string[] {\n const refsDir = join(skillDir, '.skilld')\n if (!existsSync(refsDir))\n return []\n const resolved = readdirSync(refsDir)\n .map(entry => join(refsDir, entry))\n .filter(p => lstatSync(p).isSymbolicLink() && existsSync(p))\n .map(p => realpathSync(p))\n\n // Include parent directories so CLIs can search across all references at once\n // (e.g. Gemini's sandbox requires the parent dir to be explicitly included)\n const parents = new Set<string>()\n for (const p of resolved) {\n const parent = dirname(p)\n if (!resolved.includes(parent))\n parents.add(parent)\n }\n\n return [...resolved, ...parents]\n}\n\n// ── Cache ────────────────────────────────────────────────────────────\n\nconst CACHE_DIR = join(homedir(), '.skilld', 'llm-cache')\n\n/** Strip absolute paths from prompt so the hash is project-independent */\nfunction normalizePromptForHash(prompt: string): string {\n return prompt.replace(/\\/[^\\s`]*\\.(?:claude|codex|gemini)\\/skills\\/[^\\s/`]+/g, '<SKILL_DIR>')\n}\n\nfunction hashPrompt(prompt: string, model: OptimizeModel, section: SkillSection): string {\n return createHash('sha256').update(`exec:${model}:${section}:${normalizePromptForHash(prompt)}`).digest('hex').slice(0, 16)\n}\n\nfunction getCached(prompt: string, model: OptimizeModel, section: SkillSection, maxAge = 7 * 24 * 60 * 60 * 1000): string | null {\n const path = join(CACHE_DIR, `${hashPrompt(prompt, model, section)}.json`)\n if (!existsSync(path))\n return null\n try {\n const { text, timestamp } = JSON.parse(readFileSync(path, 'utf-8'))\n return Date.now() - timestamp > maxAge ? null : text\n }\n catch { return null }\n}\n\nfunction setCache(prompt: string, model: OptimizeModel, section: SkillSection, text: string): void {\n mkdirSync(CACHE_DIR, { recursive: true, mode: 0o700 })\n writeFileSync(\n join(CACHE_DIR, `${hashPrompt(prompt, model, section)}.json`),\n JSON.stringify({ text, model, section, timestamp: Date.now() }),\n { mode: 0o600 },\n )\n}\n\n// ── Per-section spawn ────────────────────────────────────────────────\n\ninterface OptimizeSectionOptions {\n section: SkillSection\n prompt: string\n outputFile: string\n skillDir: string\n model: OptimizeModel\n packageName: string\n onProgress?: (progress: StreamProgress) => void\n timeout: number\n debug?: boolean\n preExistingFiles: Set<string>\n}\n\n/** Spawn a single CLI process for one section */\nfunction optimizeSection(opts: OptimizeSectionOptions): Promise<SectionResult> {\n const { section, prompt, outputFile, skillDir, model, onProgress, timeout, debug, preExistingFiles } = opts\n\n const cliConfig = CLI_MODELS[model]\n if (!cliConfig) {\n return Promise.resolve({ section, content: '', wasOptimized: false, error: `No CLI mapping for model: ${model}` })\n }\n\n const { cli, model: cliModel } = cliConfig\n const symlinkDirs = resolveReferenceDirs(skillDir)\n const args = CLI_BUILD_ARGS[cli](cliModel, skillDir, symlinkDirs)\n const parseLine = CLI_PARSE_LINE[cli]\n\n const skilldDir = join(skillDir, '.skilld')\n const outputPath = join(skilldDir, outputFile)\n\n // Remove stale output so we don't read a leftover from a previous run\n if (existsSync(outputPath))\n unlinkSync(outputPath)\n\n // Write prompt for debugging\n writeFileSync(join(skilldDir, `PROMPT_${section}.md`), prompt)\n\n return new Promise<SectionResult>((resolve) => {\n const proc = spawn(cli, args, {\n cwd: skilldDir,\n stdio: ['pipe', 'pipe', 'pipe'],\n timeout,\n env: { ...process.env, NO_COLOR: '1' },\n shell: isWindows,\n })\n\n let buffer = ''\n let accumulatedText = ''\n let lastWriteContent = ''\n let usage: { input: number, output: number } | undefined\n let cost: number | undefined\n const rawLines: string[] = []\n\n onProgress?.({ chunk: '[starting...]', type: 'reasoning', text: '', reasoning: '', section })\n\n proc.stdin.write(prompt)\n proc.stdin.end()\n\n proc.stdout.on('data', (chunk: Buffer) => {\n buffer += chunk.toString()\n const lines = buffer.split('\\n')\n buffer = lines.pop() || ''\n\n for (const line of lines) {\n if (!line.trim())\n continue\n if (debug)\n rawLines.push(line)\n const evt = parseLine(line)\n\n if (evt.textDelta)\n accumulatedText += evt.textDelta\n if (evt.fullText)\n accumulatedText = evt.fullText\n\n if (evt.writeContent)\n lastWriteContent = evt.writeContent\n\n if (evt.toolName) {\n const hint = evt.toolHint\n ? `[${evt.toolName}: ${evt.toolHint}]`\n : `[${evt.toolName}]`\n onProgress?.({ chunk: hint, type: 'reasoning', text: '', reasoning: hint, section })\n }\n\n if (evt.usage)\n usage = evt.usage\n if (evt.cost != null)\n cost = evt.cost\n }\n })\n\n let stderr = ''\n proc.stderr.on('data', (chunk: Buffer) => {\n stderr += chunk.toString()\n })\n\n proc.on('close', (code) => {\n // Drain remaining buffer for metadata\n if (buffer.trim()) {\n const evt = parseLine(buffer)\n if (evt.textDelta)\n accumulatedText += evt.textDelta\n if (evt.fullText)\n accumulatedText = evt.fullText\n if (evt.writeContent)\n lastWriteContent = evt.writeContent\n if (evt.usage)\n usage = evt.usage\n if (evt.cost != null)\n cost = evt.cost\n }\n\n // Remove unexpected files the LLM may have written (prompt injection defense)\n // Only clean files not in the pre-existing snapshot and not our expected output\n for (const entry of readdirSync(skilldDir)) {\n if (entry !== outputFile && !preExistingFiles.has(entry)) {\n // Allow other section output files and debug prompts\n if (Object.values(SECTION_OUTPUT_FILES).includes(entry))\n continue\n if (entry.startsWith('PROMPT_') || entry === 'logs')\n continue\n try {\n unlinkSync(join(skilldDir, entry))\n }\n catch {}\n }\n }\n\n // Prefer file written by LLM, fall back to Write tool content (if denied), then accumulated stdout\n const raw = (existsSync(outputPath) ? readFileSync(outputPath, 'utf-8') : lastWriteContent || accumulatedText).trim()\n\n // Always write stderr on failure; write all logs in debug mode\n const logsDir = join(skilldDir, 'logs')\n const logName = section.toUpperCase().replace(/-/g, '_')\n if (debug || (stderr && (!raw || code !== 0))) {\n mkdirSync(logsDir, { recursive: true })\n if (stderr)\n writeFileSync(join(logsDir, `${logName}.stderr.log`), stderr)\n }\n if (debug) {\n mkdirSync(logsDir, { recursive: true })\n if (rawLines.length)\n writeFileSync(join(logsDir, `${logName}.jsonl`), rawLines.join('\\n'))\n if (raw)\n writeFileSync(join(logsDir, `${logName}.md`), raw)\n }\n\n if (!raw && code !== 0) {\n resolve({ section, content: '', wasOptimized: false, error: stderr.trim() || `CLI exited with code ${code}` })\n return\n }\n\n // Clean the section output (strip markdown fences, frontmatter, sanitize)\n const content = raw ? cleanSectionOutput(raw) : ''\n\n if (content) {\n // Write cleaned content back to the output file for debugging\n writeFileSync(outputPath, content)\n }\n\n const validator = getSectionValidator(section)\n const rawWarnings = content && validator ? validator(content) : []\n const warnings: ValidationWarning[] = rawWarnings.map(w => ({ section, warning: w.warning }))\n\n resolve({\n section,\n content,\n wasOptimized: !!content,\n warnings: warnings?.length ? warnings : undefined,\n usage,\n cost,\n })\n })\n\n proc.on('error', (err) => {\n resolve({ section, content: '', wasOptimized: false, error: err.message })\n })\n })\n}\n\n// ── Main orchestrator ────────────────────────────────────────────────\n\nexport async function optimizeDocs(opts: OptimizeDocsOptions): Promise<OptimizeResult> {\n const { packageName, skillDir, model = 'sonnet', version, hasGithub, hasReleases, hasChangelog, docFiles, docsType, hasShippedDocs, onProgress, timeout = 180000, debug, noCache, sections, customPrompt, features, pkgFiles } = opts\n\n const selectedSections = sections ?? ['api-changes', 'best-practices'] as SkillSection[]\n\n // Build all section prompts\n const sectionPrompts = buildAllSectionPrompts({\n packageName,\n skillDir,\n version,\n hasIssues: hasGithub,\n hasDiscussions: hasGithub,\n hasReleases,\n hasChangelog,\n docFiles,\n docsType,\n hasShippedDocs,\n customPrompt,\n features,\n pkgFiles,\n sections: selectedSections,\n })\n\n if (sectionPrompts.size === 0) {\n return { optimized: '', wasOptimized: false, error: 'No valid sections to generate' }\n }\n\n const cliConfig = CLI_MODELS[model]\n if (!cliConfig) {\n return { optimized: '', wasOptimized: false, error: `No CLI mapping for model: ${model}` }\n }\n\n // Check per-section cache: references dir first (version-keyed), then LLM cache (prompt-hashed)\n const cachedResults: SectionResult[] = []\n const uncachedSections: Array<{ section: SkillSection, prompt: string }> = []\n\n for (const [section, prompt] of sectionPrompts) {\n if (!noCache) {\n // Check global references dir (cross-project, version-keyed)\n if (version) {\n const outputFile = SECTION_OUTPUT_FILES[section]\n const refCached = readCachedSection(packageName, version, outputFile)\n if (refCached) {\n onProgress?.({ chunk: `[${section}: cached]`, type: 'text', text: refCached, reasoning: '', section })\n cachedResults.push({ section, content: refCached, wasOptimized: true })\n continue\n }\n }\n\n // Check LLM prompt-hash cache\n const cached = getCached(prompt, model, section)\n if (cached) {\n onProgress?.({ chunk: `[${section}: cached]`, type: 'text', text: cached, reasoning: '', section })\n cachedResults.push({ section, content: cached, wasOptimized: true })\n continue\n }\n }\n uncachedSections.push({ section, prompt })\n }\n\n // Prepare .skilld/ dir and snapshot before spawns\n const skilldDir = join(skillDir, '.skilld')\n mkdirSync(skilldDir, { recursive: true })\n\n // Pre-flight: warn about broken symlinks in .skilld/ (avoids wasting tokens on missing refs)\n for (const entry of readdirSync(skilldDir)) {\n const entryPath = join(skilldDir, entry)\n try {\n if (lstatSync(entryPath).isSymbolicLink() && !existsSync(entryPath))\n onProgress?.({ chunk: `[warn: broken symlink .skilld/${entry}]`, type: 'reasoning', text: '', reasoning: '' })\n }\n catch {}\n }\n\n const preExistingFiles = new Set(readdirSync(skilldDir))\n\n // Spawn uncached sections with staggered starts to avoid rate-limit collisions\n const STAGGER_MS = 3000\n const spawnResults = uncachedSections.length > 0\n ? await Promise.allSettled(\n uncachedSections.map(({ section, prompt }, i) => {\n const outputFile = SECTION_OUTPUT_FILES[section]\n const run = () => optimizeSection({\n section,\n prompt,\n outputFile,\n skillDir,\n model,\n packageName,\n onProgress,\n timeout,\n debug,\n preExistingFiles,\n })\n // Stagger: first section starts immediately, rest delayed\n if (i === 0)\n return run()\n return delay(i * STAGGER_MS).then(run)\n }),\n )\n : []\n\n // Collect results, retry failed sections once\n const allResults: SectionResult[] = [...cachedResults]\n let totalUsage: { input: number, output: number } | undefined\n let totalCost = 0\n const retryQueue: Array<{ index: number, section: SkillSection, prompt: string }> = []\n\n for (let i = 0; i < spawnResults.length; i++) {\n const r = spawnResults[i]!\n const { section, prompt } = uncachedSections[i]!\n if (r.status === 'fulfilled' && r.value.wasOptimized) {\n allResults.push(r.value)\n if (r.value.usage) {\n totalUsage = totalUsage ?? { input: 0, output: 0 }\n totalUsage.input += r.value.usage.input\n totalUsage.output += r.value.usage.output\n }\n if (r.value.cost != null)\n totalCost += r.value.cost\n if (!noCache)\n setCache(prompt, model, section, r.value.content)\n }\n else {\n retryQueue.push({ index: i, section, prompt })\n }\n }\n\n // Retry failed sections once (sequential to avoid rate limits)\n for (const { section, prompt } of retryQueue) {\n onProgress?.({ chunk: `[${section}: retrying...]`, type: 'reasoning', text: '', reasoning: '', section })\n await delay(STAGGER_MS)\n const result = await optimizeSection({\n section,\n prompt,\n outputFile: SECTION_OUTPUT_FILES[section],\n skillDir,\n model,\n packageName,\n onProgress,\n timeout,\n debug,\n preExistingFiles,\n }).catch((err: Error) => ({ section, content: '', wasOptimized: false, error: err.message }) as SectionResult)\n\n allResults.push(result)\n if (result.wasOptimized && !noCache)\n setCache(prompt, model, section, result.content)\n if (result.usage) {\n totalUsage = totalUsage ?? { input: 0, output: 0 }\n totalUsage.input += result.usage.input\n totalUsage.output += result.usage.output\n }\n if (result.cost != null)\n totalCost += result.cost\n }\n\n // Write successful sections to global references dir for cross-project reuse\n if (version) {\n const sectionFiles = allResults\n .filter(r => r.wasOptimized && r.content)\n .map(r => ({ file: SECTION_OUTPUT_FILES[r.section], content: r.content }))\n if (sectionFiles.length > 0) {\n writeSections(packageName, version, sectionFiles)\n }\n }\n\n // Merge results in SECTION_MERGE_ORDER\n const mergedParts: string[] = []\n for (const section of SECTION_MERGE_ORDER) {\n const result = allResults.find(r => r.section === section)\n if (result?.wasOptimized && result.content) {\n mergedParts.push(result.content)\n }\n }\n\n const optimized = mergedParts.join('\\n\\n')\n const wasOptimized = mergedParts.length > 0\n\n const usageResult = totalUsage\n ? { inputTokens: totalUsage.input, outputTokens: totalUsage.output, totalTokens: totalUsage.input + totalUsage.output }\n : undefined\n\n // Collect errors and warnings from sections\n const errors = allResults.filter(r => r.error).map(r => `${r.section}: ${r.error}`)\n const warnings = allResults.flatMap(r => r.warnings ?? []).map(w => `${w.section}: ${w.warning}`)\n\n const debugLogsDir = debug && uncachedSections.length > 0\n ? join(skillDir, '.skilld', 'logs')\n : undefined\n\n return {\n optimized,\n wasOptimized,\n error: errors.length > 0 ? errors.join('; ') : undefined,\n warnings: warnings.length > 0 ? warnings : undefined,\n finishReason: wasOptimized ? 'stop' : 'error',\n usage: usageResult,\n cost: totalCost || undefined,\n debugLogsDir,\n }\n}\n\n// ── Helpers ──────────────────────────────────────────────────────────\n\n/** Shorten absolute paths for display: /home/user/project/.claude/skills/vue/SKILL.md → .claude/.../SKILL.md */\nfunction shortenPath(p: string): string {\n const refIdx = p.indexOf('.skilld/')\n if (refIdx !== -1)\n return p.slice(refIdx + '.skilld/'.length)\n // Keep just filename for other paths\n const parts = p.split('/')\n return parts.length > 2 ? `.../${parts.slice(-2).join('/')}` : p\n}\n\n/** Replace absolute paths in a command string with shortened versions */\nfunction shortenCommand(cmd: string): string {\n return cmd.replace(/\\/[^\\s\"']+/g, (match) => {\n // Only shorten paths that look like they're inside a project\n if (match.includes('.claude/') || match.includes('.skilld/') || match.includes('node_modules/'))\n return `.../${match.split('/').slice(-2).join('/')}`\n return match\n })\n}\n\n/** Clean a single section's LLM output: strip markdown fences, frontmatter, sanitize */\nexport function cleanSectionOutput(content: string): string {\n let cleaned = content.trim()\n\n // Strip wrapping fences if output is wrapped in ```markdown, ```md, or bare ```\n // Requires matched open+close pair to avoid stripping internal code blocks\n const wrapMatch = cleaned.match(/^```(?:markdown|md)?[^\\S\\n]*\\n([\\s\\S]+)\\n```[^\\S\\n]*$/)\n if (wrapMatch) {\n const inner = wrapMatch[1]!.trim()\n // For bare ``` wrappers (no markdown/md tag), verify inner looks like section output\n const isExplicitWrapper = /^```(?:markdown|md)/.test(cleaned)\n if (isExplicitWrapper || /^##\\s/m.test(inner) || /^- (?:BREAKING|DEPRECATED|NEW): /m.test(inner)) {\n cleaned = inner\n }\n }\n\n // Strip accidental frontmatter or leading horizontal rules\n const fmMatch = cleaned.match(/^-{3,}\\n/)\n if (fmMatch) {\n const afterOpen = fmMatch[0].length\n const closeMatch = cleaned.slice(afterOpen).match(/\\n-{3,}/)\n if (closeMatch) {\n cleaned = cleaned.slice(afterOpen + closeMatch.index! + closeMatch[0].length).trim()\n }\n else {\n cleaned = cleaned.slice(afterOpen).trim()\n }\n }\n\n // Strip raw code preamble before first section marker (defense against LLMs dumping source)\n // Section markers: ## heading, BREAKING/DEPRECATED/NEW labels\n const firstMarker = cleaned.match(/^(##\\s|- (?:BREAKING|DEPRECATED|NEW): )/m)\n if (firstMarker?.index && firstMarker.index > 0) {\n const preamble = cleaned.slice(0, firstMarker.index)\n // Only strip if preamble looks like code (contains function/const/export/return patterns)\n if (/\\b(?:function|const |let |var |export |return |import |async |class )\\b/.test(preamble)) {\n cleaned = cleaned.slice(firstMarker.index).trim()\n }\n }\n\n // Strip duplicate section headings (LLM echoing the format example before real content)\n // Handles headings separated by blank lines or boilerplate text\n const headingMatch = cleaned.match(/^(## .+)\\n/)\n if (headingMatch) {\n const heading = headingMatch[1]!\n const afterFirst = headingMatch[0].length\n const secondIdx = cleaned.indexOf(heading, afterFirst)\n if (secondIdx !== -1) {\n // Only strip if the gap between duplicates is small (< 200 chars of boilerplate)\n if (secondIdx - afterFirst < 200)\n cleaned = cleaned.slice(secondIdx).trim()\n }\n }\n\n // Normalize source link paths: ensure .skilld/ prefix is present\n // LLMs sometimes emit [source](./docs/...) instead of [source](./.skilld/docs/...)\n cleaned = cleaned.replace(\n /\\[source\\]\\(\\.\\/((docs|issues|discussions|releases|pkg|guide)\\/)/g,\n '[source](./.skilld/$1',\n )\n\n cleaned = sanitizeMarkdown(cleaned)\n\n // Reject content that lacks any section structure — likely leaked LLM reasoning/narration\n // Valid sections contain headings (##), API change labels, or source-linked items\n if (!/^##\\s/m.test(cleaned) && !/^- (?:BREAKING|DEPRECATED|NEW): /m.test(cleaned) && !/\\[source\\]/.test(cleaned)) {\n return ''\n }\n\n return cleaned\n}\n","/**\n * Detect packages from framework presets (e.g., Nuxt modules in nuxt.config)\n * These are string literals in config arrays, not imports — the import scanner misses them.\n */\n\nimport type { PackageUsage } from './detect-imports.ts'\nimport { readFile } from 'node:fs/promises'\nimport { parseSync } from 'oxc-parser'\nimport { join } from 'pathe'\n\nconst NUXT_CONFIG_FILES = ['nuxt.config.ts', 'nuxt.config.js', 'nuxt.config.mjs']\nconst NUXT_ECOSYSTEM = ['vue', 'nitro', 'h3']\n\nasync function findNuxtConfig(cwd: string): Promise<{ path: string, content: string } | null> {\n for (const name of NUXT_CONFIG_FILES) {\n const path = join(cwd, name)\n const content = await readFile(path, 'utf8').catch(() => null)\n if (content)\n return { path, content }\n }\n return null\n}\n\n/**\n * Walk AST node to find all string values inside a `modules` array property.\n * Handles: defineNuxtConfig({ modules: [...] }) and export default { modules: [...] }\n */\nexport function extractModuleStrings(node: any): string[] {\n if (!node || typeof node !== 'object')\n return []\n\n // Found a Property with key \"modules\" and an ArrayExpression value\n if (node.type === 'Property' && !node.computed\n && (node.key?.type === 'Identifier' && node.key.name === 'modules')\n && node.value?.type === 'ArrayExpression') { return node.value.elements.filter((el: any) => el?.type === 'Literal' && typeof el.value === 'string').map((el: any) => el.value as string) }\n\n // Recurse into arrays and object values\n const results: string[] = []\n if (Array.isArray(node)) {\n for (const child of node)\n results.push(...extractModuleStrings(child))\n }\n else {\n for (const key of Object.keys(node)) {\n if (key === 'start' || key === 'end' || key === 'type')\n continue\n const val = node[key]\n if (val && typeof val === 'object')\n results.push(...extractModuleStrings(val))\n }\n }\n return results\n}\n\n/**\n * Detect Nuxt modules from nuxt.config.{ts,js,mjs}\n */\nexport async function detectNuxtModules(cwd: string): Promise<PackageUsage[]> {\n const config = await findNuxtConfig(cwd)\n if (!config)\n return []\n\n const result = parseSync(config.path, config.content)\n const modules = extractModuleStrings(result.program)\n\n // Dedupe and build results\n const seen = new Set<string>()\n const packages: PackageUsage[] = []\n\n for (const mod of modules) {\n if (!seen.has(mod)) {\n seen.add(mod)\n packages.push({ name: mod, count: 0, source: 'preset' })\n }\n }\n\n // Add core ecosystem packages\n for (const pkg of NUXT_ECOSYSTEM) {\n if (!seen.has(pkg)) {\n seen.add(pkg)\n packages.push({ name: pkg, count: 0, source: 'preset' })\n }\n }\n\n return packages\n}\n\n/**\n * Run all preset detectors and merge results\n */\nexport async function detectPresetPackages(cwd: string): Promise<PackageUsage[]> {\n // Currently only Nuxt, but extensible for other frameworks\n return detectNuxtModules(cwd)\n}\n","/**\n * Detect directly-used npm packages by scanning source files\n * Uses mlly for proper ES module parsing + globby for gitignore support\n */\n\nimport { readFile } from 'node:fs/promises'\nimport { globby } from 'globby'\nimport { findDynamicImports, findStaticImports } from 'mlly'\nimport { detectPresetPackages } from './detect-presets.ts'\n\nexport interface PackageUsage {\n name: string\n count: number\n source?: 'import' | 'preset'\n}\n\nexport interface DetectResult {\n packages: PackageUsage[]\n error?: string\n}\n\nconst PATTERNS = ['**/*.{ts,js,vue,mjs,cjs,tsx,jsx,mts,cts}']\nconst IGNORE = ['**/node_modules/**', '**/dist/**', '**/.nuxt/**', '**/.output/**', '**/coverage/**']\n\nfunction addPackage(counts: Map<string, number>, specifier: string | undefined) {\n if (!specifier || specifier.startsWith('.') || specifier.startsWith('/'))\n return\n\n // Extract package name (handle subpaths like 'pkg/subpath')\n const name = specifier.startsWith('@')\n ? specifier.split('/').slice(0, 2).join('/')\n : specifier.split('/')[0]!\n\n if (!isNodeBuiltin(name)) {\n counts.set(name, (counts.get(name) || 0) + 1)\n }\n}\n\n/**\n * Scan source files to detect all directly-imported npm packages\n * Async with gitignore support for proper spinner animation\n */\nexport async function detectImportedPackages(cwd: string = process.cwd()): Promise<DetectResult> {\n try {\n const counts = new Map<string, number>()\n\n const files = await globby(PATTERNS, {\n cwd,\n ignore: IGNORE,\n gitignore: true,\n absolute: true,\n })\n\n await Promise.all(files.map(async (file) => {\n const content = await readFile(file, 'utf8')\n\n // Static: import x from 'pkg'\n for (const imp of findStaticImports(content)) {\n addPackage(counts, imp.specifier)\n }\n\n // Dynamic: import('pkg') - expression is the string literal\n for (const imp of findDynamicImports(content)) {\n // expression includes quotes, extract string value\n const match = imp.expression.match(/^['\"]([^'\"]+)['\"]$/)\n if (match)\n addPackage(counts, match[1]!)\n }\n }))\n\n // Sort by usage count (descending), then alphabetically\n const packages: PackageUsage[] = [...counts.entries()]\n .map(([name, count]) => ({ name, count, source: 'import' as const }))\n .sort((a, b) => b.count - a.count || a.name.localeCompare(b.name))\n\n // Merge preset-detected packages (imports take priority)\n const presets = await detectPresetPackages(cwd)\n const importNames = new Set(packages.map(p => p.name))\n for (const preset of presets) {\n if (!importNames.has(preset.name))\n packages.push(preset)\n }\n\n return { packages }\n }\n catch (err) {\n return { packages: [], error: String(err) }\n }\n}\n\nconst NODE_BUILTINS = new Set([\n 'assert',\n 'buffer',\n 'child_process',\n 'cluster',\n 'console',\n 'constants',\n 'crypto',\n 'dgram',\n 'dns',\n 'domain',\n 'events',\n 'fs',\n 'http',\n 'https',\n 'module',\n 'net',\n 'os',\n 'path',\n 'perf_hooks',\n 'process',\n 'punycode',\n 'querystring',\n 'readline',\n 'repl',\n 'stream',\n 'string_decoder',\n 'sys',\n 'timers',\n 'tls',\n 'tty',\n 'url',\n 'util',\n 'v8',\n 'vm',\n 'wasi',\n 'worker_threads',\n 'zlib',\n])\n\nfunction isNodeBuiltin(pkg: string): boolean {\n const base = pkg.startsWith('node:') ? pkg.slice(5) : pkg\n return NODE_BUILTINS.has(base.split('/')[0]!)\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAOA,MAAaA,QAAM;AACnB,MAAaC,YAAU;AAEvB,MAAaC,WAAwC;CACnD,MAAM;EAAE,OAAO;EAAQ,MAAM;EAAY,MAAM;EAAiC;CAChF,QAAQ;EAAE,OAAO;EAAU,MAAM;EAAc,MAAM;EAA2B;CAChF,OAAO;EAAE,OAAO;EAAS,MAAM;EAAa,MAAM;EAA6B,aAAa;;CAC7F;AAED,SAAgBC,YAAU,OAAe,UAAkB,aAAiC;CAC1F,MAAM,YAAY,KAAK,UAAU,UAAU;AAQ3C,QAAO;EACL;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EAdmB;GACnB,GAFe,CAAC,UAAU,GAAG,YAAY,CAE7B,SAAQ,MAAK;IAAC,QAAQ,EAAE;IAAO,QAAQ,EAAE;IAAO,QAAQ,EAAE;IAAM,CAAC;GAC7E,SAAS,UAAU;GACnB;GACA;GACD,CAAC,KAAK,IAAI;EAWT;EACA;EACA;EACA;EACA,GAAG,YAAY,SAAQ,MAAK,CAAC,aAAa,EAAE,CAAC;EAC7C;EACD;;AAaH,SAAgBC,YAAU,MAA2B;AACnD,KAAI;EACF,MAAM,MAAM,KAAK,MAAM,KAAK;AAG5B,MAAI,IAAI,SAAS,gBAAgB;GAC/B,MAAM,MAAM,IAAI;AAChB,OAAI,CAAC,IACH,QAAO,EAAE;AAGX,OAAI,IAAI,SAAS,yBAAyB,IAAI,OAAO,SAAS,aAC5D,QAAO,EAAE,WAAW,IAAI,MAAM,MAAM;AAGtC,UAAO,EAAE;;AAIX,MAAI,IAAI,SAAS,eAAe,IAAI,SAAS,SAAS;GACpD,MAAM,UAAU,IAAI,QAAQ;GAG5B,MAAM,QAAQ,QAAQ,QAAQ,MAAW,EAAE,SAAS,WAAW;AAC/D,OAAI,MAAM,QAAQ;IAChB,MAAM,QAAQ,MAAM,KAAK,MAAW,EAAE,KAAK;IAE3C,MAAM,OAAO,MAAM,KAAK,MAAW;KACjC,MAAM,QAAQ,EAAE,SAAS,EAAE;AAC3B,YAAO,MAAM,aAAa,MAAM,QAAQ,MAAM,WAAW,MAAM,SAAS,MAAM,WAAW;MACzF,CAAC,OAAO,QAAQ,CAAC,KAAK,KAAK;IAE7B,MAAM,YAAY,MAAM,MAAM,MAAW,EAAE,SAAS,WAAW,EAAE,OAAO,QAAQ;AAChF,WAAO;KAAE,UAAU,MAAM,KAAK,KAAK;KAAE,UAAU,QAAQ,KAAA;KAAW,cAAc,WAAW,OAAO;KAAS;;GAI7G,MAAM,OAAO,QACV,QAAQ,MAAW,EAAE,SAAS,OAAO,CACrC,KAAK,MAAW,EAAE,KAAK,CACvB,KAAK,GAAG;AACX,OAAI,KACF,QAAO,EAAE,UAAU,MAAM;;AAI7B,MAAI,IAAI,SAAS,UAAU;GACzB,MAAM,IAAI,IAAI;AACd,UAAO;IACL,MAAM;IACN,OAAO,IAAI;KAAE,OAAO,EAAE,gBAAgB,EAAE,eAAe;KAAG,QAAQ,EAAE,iBAAiB,EAAE,gBAAgB;KAAG,GAAG,KAAA;IAC7G,MAAM,IAAI;IACV,OAAO,IAAI;IACZ;;SAGC;AACN,QAAO,EAAE;;;;;;;;;AClGX,MAAaC,QAAM;AACnB,MAAaC,YAAU;AAEvB,MAAaC,WAAwC;CACnD,iBAAiB;EAAE,OAAO;EAAiB,MAAM;EAAiB,MAAM;EAA+B;CACvG,uBAAuB;EAAE,OAAO;EAAuB,MAAM;EAAuB,MAAM;EAAgC,aAAa;EAAM;CAC7I,iBAAiB;EAAE,OAAO;EAAiB,MAAM;EAAiB,MAAM;;CACzE;AAED,SAAgBC,YAAU,OAAe,WAAmB,cAAkC;AAC5F,QAAO;EACL;EACA;EACA;EACA;EACA;EAMA;EACA;EACD;;AAGH,SAAgBC,YAAU,MAA2B;AACnD,KAAI;EACF,MAAM,MAAM,KAAK,MAAM,KAAK;AAE5B,MAAI,IAAI,SAAS,oBAAoB,IAAI,MAAM;GAC7C,MAAM,OAAO,IAAI;AAEjB,OAAI,KAAK,SAAS,mBAAmB,KAAK,KACxC,QAAO,EAAE,UAAU,KAAK,MAAM;AAGhC,OAAI,KAAK,SAAS,uBAAuB,KAAK,mBAAmB;IAC/D,MAAM,MAAM,KAAK,WAAW;IAC5B,MAAM,eAAgB,aAAa,KAAK,IAAI,GAAI,KAAK,oBAAoB,KAAA;AACzE,WAAO;KAAE,UAAU;KAAQ,UAAU,IAAI,KAAK,kBAAkB,OAAO;KAAiB;KAAc;;AAGxG,OAAI,KAAK,SAAS,iBAAiB,KAAK,SAAS,OAE/C,QAAO;IAAE,UAAU;IAAS,UADd,KAAK,QAAQ,KAAK,MAAsC,EAAE,KAAK,CAAC,KAAK,KAAA;IACtC;;AAKjD,MAAI,IAAI,SAAS,kBAAkB,IAAI,MAAM,SAAS,oBACpD,QAAO;GAAE,UAAU;GAAQ,UAAU,IAAI,KAAK;GAAS;AAIzD,MAAI,IAAI,SAAS,oBAAoB,IAAI,MACvC,QAAO;GACL,MAAM;GACN,OAAO;IACL,OAAO,IAAI,MAAM,gBAAgB;IACjC,QAAQ,IAAI,MAAM,iBAAiB;;GAEtC;AAIH,MAAI,IAAI,SAAS,iBAAiB,IAAI,SAAS,QAC7C,QAAO,EAAE,MAAM,MAAM;SAGnB;AACN,QAAO,EAAE;;;;;;;;;AC5EX,MAAa,MAAM;AACnB,MAAa,UAAU;AAEvB,MAAa,SAAwC;CACnD,gBAAgB;EAAE,OAAO;EAAwB,MAAM;EAAgB,MAAM;EAAgB;CAC7F,kBAAkB;EAAE,OAAO;EAA0B,MAAM;EAAkB,MAAM;EAAY,aAAa;;CAC7G;AAED,SAAgB,UAAU,OAAe,UAAkB,aAAiC;AAC1F,QAAO;EACL;EACA;EACA;EACA;EACA;EACA,uFAAuF,sBAAsB,CAAC;EAC9G;EACA;EACA,GAAG,YAAY,SAAQ,MAAK,CAAC,yBAAyB,EAAE,CAAA;EACzD;;AAIH,SAAgB,UAAU,MAA2B;AACnD,KAAI;EACF,MAAM,MAAM,KAAK,MAAM,KAAK;AAG5B,MAAI,IAAI,SAAS,aAAa,IAAI,SAAS,eAAe,IAAI,QAC5D,QAAO,IAAI,QAAQ,EAAE,WAAW,IAAI,SAAS,GAAG,EAAE,UAAU,IAAI,SAAS;AAI3E,MAAI,IAAI,SAAS,cAAc,IAAI,SAAS,aAAa;GACvD,MAAM,OAAO,IAAI,aAAa,IAAI,QAAQ,IAAI,QAAQ;GACtD,MAAM,SAAS,IAAI,cAAc,IAAI,QAAQ,IAAI,SAAS,EAAE;GAC5D,MAAM,OAAO,OAAO,aAAa,OAAO,QAAQ,OAAO,YAAY,OAAO,WAAW,OAAO,SAAS,OAAO,WAAW;AAEvH,OAAI,SAAS,gBAAgB,OAAO,QAClC,QAAO;IAAE,UAAU;IAAM,UAAU,QAAQ,KAAA;IAAW,cAAc,OAAO;IAAS;AAEtF,UAAO;IAAE,UAAU;IAAM,UAAU,QAAQ,KAAA;IAAW;;AAIxD,MAAI,IAAI,SAAS,UAAU;GACzB,MAAM,IAAI,IAAI;AACd,UAAO;IACL,MAAM;IACN,OAAO,IAAI;KAAE,OAAO,EAAE,gBAAgB,EAAE,SAAS;KAAG,QAAQ,EAAE,iBAAiB,EAAE,UAAU;KAAG,GAAG,KAAA;IACjG,OAAO,GAAG;IACX;;SAGC;AACN,QAAO,EAAE;;AChCX,MAAM,aAAqC;CAEzC,MAAM;CACN,MAAM;CACN,MAAM;CACN,OAAO;CACP,MAAM;CAEN,WAAW;CACX,WAAW;CACX,YAAY;CACZ,gBAAgB;CAChB,qBAAqB;CACrB,mBAAmB;CACpB;AAOD,SAAgB,mBAAmB,KAA0D;CAC3F,IAAI,UAAU;CACd,IAAI,cAAc;CAElB,SAAS,KAAK,KAAa;AACzB,MAAI,QAAQ,SAAS;AACnB;AACA,OAAI,QAAQ,GAAG,IAAI,aAAa,YAAY,UAAU;SAEnD;AACH,aAAU;AACV,iBAAc;AACd,OAAI,QAAQ,IAAI;;;AAIpB,SAAQ,EAAE,MAAM,OAAO,cAAc;AACnC,MAAI,SAAS,QAAQ;AACnB,QAAK,GAAG,UAAU,YAAY,QAAQ,aAAa,GAAG,YAAY;AAClE;;AAEF,MAAI,SAAS,eAAe,CAAC,MAAM,WAAW,IAAI,CAChD;EAGF,MAAM,QAAQ,MAAM,MAAM,+BAA+B;AACzD,MAAI,CAAC,MACH;EAEF,MAAM,QAAQ,MAAM,GAAI,MAAM,IAAI,CAAC,KAAI,MAAK,EAAE,MAAM,CAAC;EACrD,MAAM,QAAQ,MAAM,IAAI,MAAM,IAAI,CAAC,KAAI,MAAK,EAAE,MAAM,CAAC,IAAI,EAAE;AAE3D,OAAK,IAAI,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;GACrC,MAAM,UAAU,MAAM;GACtB,MAAM,OAAO,MAAM,MAAM,MAAM,MAAM;GACrC,MAAM,OAAO,WAAW,YAAY;GACpC,MAAM,SAAS,UAAU,YAAY,QAAQ,aAAa;AAE1D,QAAK,YAAY,UAAU,YAAY,wBAAwB,MAAM;IACnE,MAAM,cAAc,KAAK,MAAM,4BAA4B;AAC3D,QAAI,YACF,MAAK,GAAG,OAAO,qBAAqB,YAAY,GAAG,UAAU;aAEtD,KAAK,SAAS,kBAAkB,CACvC,MAAK,GAAG,OAAO,eAAe;SAE3B;KACH,MAAM,YAAY,eAAe,KAAK;AACtC,UAAK,GAAG,OAAO,UAAU,UAAU,SAAS,KAAK,GAAG,UAAU,MAAM,GAAG,GAAG,CAAC,OAAO,YAAY;;SAKhG,MAAK,GAAG,SAAS,KAAK,WADT,YAAY,QAAQ,MAAM,CACD,SAAS;;;;AAQvD,MAAM,WAAW;CAACC;CAAQC;CAAQC;CAAM;AAExC,MAAM,iBAAwG;CAC5G,QAAQC;CACR,QAAQC;CACR,OAAOC;CACR;AAED,MAAM,iBAAiE;CACrE,QAAQC;CACR,QAAQC;CACR,OAAOC;CACR;AAID,MAAa,aAA6D,OAAO,YAC/E,SAAS,SAAQ,QACf,OAAO,QAAQ,IAAI,OAAO,CAAC,KAAK,CAAC,IAAI,WAAW,CAC9C,IACA;CAAE,GAAG;CAAO,KAAK,IAAI;CAAK,SAAS,IAAI;CAAS,CACjD,CAAC,CACH,CACF;AAID,SAAgB,aAAa,IAA2B;AACtD,QAAO,WAAW,KAAK,QAAQ;;AAGjC,SAAgB,cAAc,IAA2B;CACvD,MAAM,SAAS,WAAW;AAC1B,KAAI,CAAC,OACH,QAAO;AAET,QAAO,GADWC,QAAO,OAAO,UAAU,eAAe,OAAO,IAC5C,KAAK,OAAO;;AAGlC,eAAsB,qBAAgE;CACpF,MAAM,YAAY,UAAU,KAAK;CACjC,MAAM,YAAY,YAAY,UAAU;CAGxC,MAAM,gBADkB,uBAAuB,CACT,QAAO,OAAMA,QAAO,IAAI,IAAI;CAElE,MAAM,YAAY,MAAM,QAAQ,IAC9B,cAAc,IAAI,OAAO,YAAY;EACnC,MAAM,MAAMA,QAAO,SAAS;AAC5B,MAAI;AACF,SAAM,UAAU,GAAG,UAAU,GAAG,MAAM;AACtC,UAAO;UAEH;AAAE,UAAO;;GACf,CACH;CACD,MAAM,oBAAoB,IAAI,IAAI,UAAU,QAAQ,OAAwB,MAAM,KAAK,CAAC;AAExF,QAAQ,OAAO,QAAQ,WAAW,CAC/B,QAAQ,CAAC,GAAG,YAAY,kBAAkB,IAAI,OAAO,QAAQ,CAAC,CAC9D,KAAK,CAAC,IAAI,aAAa;EACtB;EACA,MAAM,OAAO;EACb,MAAM,OAAO;EACb,aAAa,OAAO;EACpB,SAAS,OAAO;EAChB,WAAWA,QAAO,OAAO,UAAU,eAAe,OAAO;EAC1D,EAAE;;AAMP,SAAS,qBAAqB,UAA4B;CACxD,MAAM,UAAU,KAAK,UAAU,UAAU;AACzC,KAAI,CAAC,WAAW,QAAQ,CACtB,QAAO,EAAE;CACX,MAAM,WAAW,YAAY,QAAQ,CAClC,KAAI,UAAS,KAAK,SAAS,MAAM,CAAC,CAClC,QAAO,MAAK,UAAU,EAAE,CAAC,gBAAgB,IAAI,WAAW,EAAE,CAAC,CAC3D,KAAI,MAAK,aAAa,EAAE,CAAC;CAI5B,MAAM,0BAAU,IAAI,KAAa;AACjC,MAAK,MAAM,KAAK,UAAU;EACxB,MAAM,SAAS,QAAQ,EAAE;AACzB,MAAI,CAAC,SAAS,SAAS,OAAO,CAC5B,SAAQ,IAAI,OAAO;;AAGvB,QAAO,CAAC,GAAG,UAAU,GAAG,QAAQ;;AAKlC,MAAM,YAAY,KAAK,SAAS,EAAE,WAAW,YAAY;AAGzD,SAAS,uBAAuB,QAAwB;AACtD,QAAO,OAAO,QAAQ,yDAAyD,cAAc;;AAG/F,SAAS,WAAW,QAAgB,OAAsB,SAA+B;AACvF,QAAO,WAAW,SAAS,CAAC,OAAO,QAAQ,MAAM,GAAG,QAAQ,GAAG,uBAAuB,OAAO,GAAG,CAAC,OAAO,MAAM,CAAC,MAAM,GAAG,GAAG;;AAG7H,SAAS,UAAU,QAAgB,OAAsB,SAAuB,SAAS,QAAc,KAAK,KAAqB;CAC/H,MAAM,OAAO,KAAK,WAAW,GAAG,WAAW,QAAQ,OAAO,QAAQ,CAAC,OAAO;AAC1E,KAAI,CAAC,WAAW,KAAK,CACnB,QAAO;AACT,KAAI;EACF,MAAM,EAAE,MAAM,cAAc,KAAK,MAAM,aAAa,MAAM,QAAQ,CAAC;AACnE,SAAO,KAAK,KAAK,GAAG,YAAY,SAAS,OAAO;SAE5C;AAAE,SAAO;;;AAGjB,SAAS,SAAS,QAAgB,OAAsB,SAAuB,MAAoB;AACjG,WAAU,WAAW;EAAE,WAAW;EAAM,MAAM;EAAO,CAAC;AACtD,eACE,KAAK,WAAW,GAAG,WAAW,QAAQ,OAAO,QAAQ,CAAC,OAAO,EAC7D,KAAK,UAAU;EAAE;EAAM;EAAO;EAAS,WAAW,KAAK,KAAA;EAAO,CAAC,EAC/D,EAAE,MAAM,KAAO,CAChB;;AAmBH,SAAS,gBAAgB,MAAsD;CAC7E,MAAM,EAAE,SAAS,QAAQ,YAAY,UAAU,OAAO,YAAY,SAAS,OAAO,qBAAqB;CAEvG,MAAM,YAAY,WAAW;AAC7B,KAAI,CAAC,UACH,QAAO,QAAQ,QAAQ;EAAE;EAAS,SAAS;EAAI,cAAc;EAAO,OAAO,6BAA6B;EAAS,CAAC;CAGpH,MAAM,EAAE,KAAK,OAAO,aAAa;CACjC,MAAM,cAAc,qBAAqB,SAAS;CAClD,MAAM,OAAO,eAAe,KAAK,UAAU,UAAU,YAAY;CACjE,MAAM,YAAY,eAAe;CAEjC,MAAM,YAAY,KAAK,UAAU,UAAU;CAC3C,MAAM,aAAa,KAAK,WAAW,WAAW;AAG9C,KAAI,WAAW,WAAW,CACxB,YAAW,WAAW;AAGxB,eAAc,KAAK,WAAW,UAAU,QAAQ,KAAK,EAAE,OAAO;AAE9D,QAAO,IAAI,SAAwB,YAAY;EAC7C,MAAM,OAAO,MAAM,KAAK,MAAM;GAC5B,KAAK;GACL,OAAO;IAAC;IAAQ;IAAQ;IAAO;GAC/B;GACA,KAAK;IAAE,GAAG,QAAQ;IAAK,UAAU;IAAK;GACtC,OAAO;GACR,CAAC;EAEF,IAAI,SAAS;EACb,IAAI,kBAAkB;EACtB,IAAI,mBAAmB;EACvB,IAAI;EACJ,IAAI;EACJ,MAAM,WAAqB,EAAE;AAE7B,eAAa;GAAE,OAAO;GAAiB,MAAM;GAAa,MAAM;GAAI,WAAW;GAAI;GAAS,CAAC;AAE7F,OAAK,MAAM,MAAM,OAAO;AACxB,OAAK,MAAM,KAAK;AAEhB,OAAK,OAAO,GAAG,SAAS,UAAkB;AACxC,aAAU,MAAM,UAAU;GAC1B,MAAM,QAAQ,OAAO,MAAM,KAAK;AAChC,YAAS,MAAM,KAAK,IAAI;AAExB,QAAK,MAAM,QAAQ,OAAO;AACxB,QAAI,CAAC,KAAK,MAAM,CACd;AACF,QAAI,MACF,UAAS,KAAK,KAAK;IACrB,MAAM,MAAM,UAAU,KAAK;AAE3B,QAAI,IAAI,UACN,oBAAmB,IAAI;AACzB,QAAI,IAAI,SACN,mBAAkB,IAAI;AAExB,QAAI,IAAI,aACN,oBAAmB,IAAI;AAEzB,QAAI,IAAI,UAAU;KAChB,MAAM,OAAO,IAAI,WACb,IAAI,IAAI,SAAS,IAAI,IAAI,SAAS,KAClC,IAAI,IAAI,SAAS;AACrB,kBAAa;MAAE,OAAO;MAAM,MAAM;MAAa,MAAM;MAAI,WAAW;MAAM;MAAS,CAAC;;AAGtF,QAAI,IAAI,MACN,SAAQ,IAAI;AACd,QAAI,IAAI,QAAQ,KACd,QAAO,IAAI;;IAEf;EAEF,IAAI,SAAS;AACb,OAAK,OAAO,GAAG,SAAS,UAAkB;AACxC,aAAU,MAAM,UAAU;IAC1B;AAEF,OAAK,GAAG,UAAU,SAAS;AAEzB,OAAI,OAAO,MAAM,EAAE;IACjB,MAAM,MAAM,UAAU,OAAO;AAC7B,QAAI,IAAI,UACN,oBAAmB,IAAI;AACzB,QAAI,IAAI,SACN,mBAAkB,IAAI;AACxB,QAAI,IAAI,aACN,oBAAmB,IAAI;AACzB,QAAI,IAAI,MACN,SAAQ,IAAI;AACd,QAAI,IAAI,QAAQ,KACd,QAAO,IAAI;;AAKf,QAAK,MAAM,SAAS,YAAY,UAAU,CACxC,KAAI,UAAU,cAAc,CAAC,iBAAiB,IAAI,MAAM,EAAE;AAExD,QAAI,OAAO,OAAO,qBAAqB,CAAC,SAAS,MAAM,CACrD;AACF,QAAI,MAAM,WAAW,UAAU,IAAI,UAAU,OAC3C;AACF,QAAI;AACF,gBAAW,KAAK,WAAW,MAAM,CAAC;YAE9B;;GAKV,MAAM,OAAO,WAAW,WAAW,GAAG,aAAa,YAAY,QAAQ,GAAG,oBAAoB,iBAAiB,MAAM;GAGrH,MAAM,UAAU,KAAK,WAAW,OAAO;GACvC,MAAM,UAAU,QAAQ,aAAa,CAAC,QAAQ,MAAM,IAAI;AACxD,OAAI,SAAU,WAAW,CAAC,OAAO,SAAS,IAAK;AAC7C,cAAU,SAAS,EAAE,WAAW,MAAM,CAAC;AACvC,QAAI,OACF,eAAc,KAAK,SAAS,GAAG,QAAQ,aAAa,EAAE,OAAO;;AAEjE,OAAI,OAAO;AACT,cAAU,SAAS,EAAE,WAAW,MAAM,CAAC;AACvC,QAAI,SAAS,OACX,eAAc,KAAK,SAAS,GAAG,QAAQ,QAAQ,EAAE,SAAS,KAAK,KAAK,CAAC;AACvE,QAAI,IACF,eAAc,KAAK,SAAS,GAAG,QAAQ,KAAK,EAAE,IAAI;;AAGtD,OAAI,CAAC,OAAO,SAAS,GAAG;AACtB,YAAQ;KAAE;KAAS,SAAS;KAAI,cAAc;KAAO,OAAO,OAAO,MAAM,IAAI,wBAAwB;KAAQ,CAAC;AAC9G;;GAIF,MAAM,UAAU,MAAM,mBAAmB,IAAI,GAAG;AAEhD,OAAI,QAEF,eAAc,YAAY,QAAQ;GAGpC,MAAM,YAAY,oBAAoB,QAAQ;GAE9C,MAAM,YADc,WAAW,YAAY,UAAU,QAAQ,GAAG,EAAE,EAChB,KAAI,OAAM;IAAE;IAAS,SAAS,EAAE;IAAS,EAAE;AAE7F,WAAQ;IACN;IACA;IACA,cAAc,CAAC,CAAC;IAChB,UAAU,UAAU,SAAS,WAAW,KAAA;IACxC;IACA;IACD,CAAC;IACF;AAEF,OAAK,GAAG,UAAU,QAAQ;AACxB,WAAQ;IAAE;IAAS,SAAS;IAAI,cAAc;IAAO,OAAO,IAAI;IAAS,CAAC;IAC1E;GACF;;AAKJ,eAAsB,aAAa,MAAoD;CACrF,MAAM,EAAE,aAAa,UAAU,QAAQ,UAAU,SAAS,WAAW,aAAa,cAAc,UAAU,UAAU,gBAAgB,YAAY,UAAU,MAAQ,OAAO,SAAS,UAAU,cAAc,UAAU,aAAa;CAKjO,MAAM,iBAAiB,uBAAuB;EAC5C;EACA;EACA;EACA,WAAW;EACX,gBAAgB;EAChB;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA,UAjBuB,YAAY,CAAC,eAAe,iBAAA;EAkBpD,CAAC;AAEF,KAAI,eAAe,SAAS,EAC1B,QAAO;EAAE,WAAW;EAAI,cAAc;EAAO,OAAO;EAAiC;AAIvF,KAAI,CADc,WAAW,OAE3B,QAAO;EAAE,WAAW;EAAI,cAAc;EAAO,OAAO,6BAA6B;EAAS;CAI5F,MAAM,gBAAiC,EAAE;CACzC,MAAM,mBAAqE,EAAE;AAE7E,MAAK,MAAM,CAAC,SAAS,WAAW,gBAAgB;AAC9C,MAAI,CAAC,SAAS;AAEZ,OAAI,SAAS;IACX,MAAM,aAAa,qBAAqB;IACxC,MAAM,YAAY,kBAAkB,aAAa,SAAS,WAAW;AACrE,QAAI,WAAW;AACb,kBAAa;MAAE,OAAO,IAAI,QAAQ;MAAY,MAAM;MAAQ,MAAM;MAAW,WAAW;MAAI;MAAS,CAAC;AACtG,mBAAc,KAAK;MAAE;MAAS,SAAS;MAAW,cAAc;MAAM,CAAC;AACvE;;;GAKJ,MAAM,SAAS,UAAU,QAAQ,OAAO,QAAQ;AAChD,OAAI,QAAQ;AACV,iBAAa;KAAE,OAAO,IAAI,QAAQ;KAAY,MAAM;KAAQ,MAAM;KAAQ,WAAW;KAAI;KAAS,CAAC;AACnG,kBAAc,KAAK;KAAE;KAAS,SAAS;KAAQ,cAAc;KAAM,CAAC;AACpE;;;AAGJ,mBAAiB,KAAK;GAAE;GAAS;GAAQ,CAAC;;CAI5C,MAAM,YAAY,KAAK,UAAU,UAAU;AAC3C,WAAU,WAAW,EAAE,WAAW,MAAM,CAAC;AAGzC,MAAK,MAAM,SAAS,YAAY,UAAU,EAAE;EAC1C,MAAM,YAAY,KAAK,WAAW,MAAM;AACxC,MAAI;AACF,OAAI,UAAU,UAAU,CAAC,gBAAgB,IAAI,CAAC,WAAW,UAAU,CACjE,cAAa;IAAE,OAAO,iCAAiC,MAAM;IAAI,MAAM;IAAa,MAAM;IAAI,WAAW;IAAI,CAAC;UAE5G;;CAGR,MAAM,mBAAmB,IAAI,IAAI,YAAY,UAAU,CAAC;CAGxD,MAAM,aAAa;CACnB,MAAM,eAAe,iBAAiB,SAAS,IAC3C,MAAM,QAAQ,WACZ,iBAAiB,KAAK,EAAE,SAAS,UAAU,MAAM;EAC/C,MAAM,aAAa,qBAAqB;EACxC,MAAM,YAAY,gBAAgB;GAChC;GACA;GACA;GACA;GACA;GACA;GACA;GACA;GACA;GACA;GACD,CAAC;AAEF,MAAI,MAAM,EACR,QAAO,KAAK;AACd,SAAOC,WAAM,IAAI,WAAW,CAAC,KAAK,IAAI;GACtC,CACH,GACD,EAAE;CAGN,MAAM,aAA8B,CAAC,GAAG,cAAc;CACtD,IAAI;CACJ,IAAI,YAAY;CAChB,MAAM,aAA8E,EAAE;AAEtF,MAAK,IAAI,IAAI,GAAG,IAAI,aAAa,QAAQ,KAAK;EAC5C,MAAM,IAAI,aAAa;EACvB,MAAM,EAAE,SAAS,WAAW,iBAAiB;AAC7C,MAAI,EAAE,WAAW,eAAe,EAAE,MAAM,cAAc;AACpD,cAAW,KAAK,EAAE,MAAM;AACxB,OAAI,EAAE,MAAM,OAAO;AACjB,iBAAa,cAAc;KAAE,OAAO;KAAG,QAAQ;KAAG;AAClD,eAAW,SAAS,EAAE,MAAM,MAAM;AAClC,eAAW,UAAU,EAAE,MAAM,MAAM;;AAErC,OAAI,EAAE,MAAM,QAAQ,KAClB,cAAa,EAAE,MAAM;AACvB,OAAI,CAAC,QACH,UAAS,QAAQ,OAAO,SAAS,EAAE,MAAM,QAAQ;QAGnD,YAAW,KAAK;GAAE,OAAO;GAAG;GAAS;GAAQ,CAAC;;AAKlD,MAAK,MAAM,EAAE,SAAS,YAAY,YAAY;AAC5C,eAAa;GAAE,OAAO,IAAI,QAAQ;GAAiB,MAAM;GAAa,MAAM;GAAI,WAAW;GAAI;GAAS,CAAC;AACzG,QAAMA,WAAM,WAAW;EACvB,MAAM,SAAS,MAAM,gBAAgB;GACnC;GACA;GACA,YAAY,qBAAqB;GACjC;GACA;GACA;GACA;GACA;GACA;GACA;GACD,CAAC,CAAC,OAAO,SAAgB;GAAE;GAAS,SAAS;GAAI,cAAc;GAAO,OAAO,IAAI;GAAS,EAAmB;AAE9G,aAAW,KAAK,OAAO;AACvB,MAAI,OAAO,gBAAgB,CAAC,QAC1B,UAAS,QAAQ,OAAO,SAAS,OAAO,QAAQ;AAClD,MAAI,OAAO,OAAO;AAChB,gBAAa,cAAc;IAAE,OAAO;IAAG,QAAQ;IAAG;AAClD,cAAW,SAAS,OAAO,MAAM;AACjC,cAAW,UAAU,OAAO,MAAM;;AAEpC,MAAI,OAAO,QAAQ,KACjB,cAAa,OAAO;;AAIxB,KAAI,SAAS;EACX,MAAM,eAAe,WAClB,QAAO,MAAK,EAAE,gBAAgB,EAAE,QAAQ,CACxC,KAAI,OAAM;GAAE,MAAM,qBAAqB,EAAE;GAAU,SAAS,EAAE;GAAS,EAAE;AAC5E,MAAI,aAAa,SAAS,EACxB,eAAc,aAAa,SAAS,aAAa;;CAKrD,MAAM,cAAwB,EAAE;AAChC,MAAK,MAAM,WAAW,qBAAqB;EACzC,MAAM,SAAS,WAAW,MAAK,MAAK,EAAE,YAAY,QAAQ;AAC1D,MAAI,QAAQ,gBAAgB,OAAO,QACjC,aAAY,KAAK,OAAO,QAAQ;;CAIpC,MAAM,YAAY,YAAY,KAAK,OAAO;CAC1C,MAAM,eAAe,YAAY,SAAS;CAE1C,MAAM,cAAc,aAChB;EAAE,aAAa,WAAW;EAAO,cAAc,WAAW;EAAQ,aAAa,WAAW,QAAQ,WAAW;EAAQ,GACrH,KAAA;CAGJ,MAAM,SAAS,WAAW,QAAO,MAAK,EAAE,MAAM,CAAC,KAAI,MAAK,GAAG,EAAE,QAAQ,IAAI,EAAE,QAAQ;CACnF,MAAM,WAAW,WAAW,SAAQ,MAAK,EAAE,YAAY,EAAE,CAAC,CAAC,KAAI,MAAK,GAAG,EAAE,QAAQ,IAAI,EAAE,UAAU;CAEjG,MAAM,eAAe,SAAS,iBAAiB,SAAS,IACpD,KAAK,UAAU,WAAW,OAAO,GACjC,KAAA;AAEJ,QAAO;EACL;EACA;EACA,OAAO,OAAO,SAAS,IAAI,OAAO,KAAK,KAAK,GAAG,KAAA;EAC/C,UAAU,SAAS,SAAS,IAAI,WAAW,KAAA;EAC3C,cAAc,eAAe,SAAS;EACtC,OAAO;EACP,MAAM,aAAa,KAAA;EACnB;EACD;;AAMH,SAAS,YAAY,GAAmB;CACtC,MAAM,SAAS,EAAE,QAAQ,WAAW;AACpC,KAAI,WAAW,GACb,QAAO,EAAE,MAAM,SAAS,EAAkB;CAE5C,MAAM,QAAQ,EAAE,MAAM,IAAI;AAC1B,QAAO,MAAM,SAAS,IAAI,OAAO,MAAM,MAAM,GAAG,CAAC,KAAK,IAAI,KAAK;;AAIjE,SAAS,eAAe,KAAqB;AAC3C,QAAO,IAAI,QAAQ,gBAAgB,UAAU;AAE3C,MAAI,MAAM,SAAS,WAAW,IAAI,MAAM,SAAS,WAAW,IAAI,MAAM,SAAS,gBAAgB,CAC7F,QAAO,OAAO,MAAM,MAAM,IAAI,CAAC,MAAM,GAAG,CAAC,KAAK,IAAI;AACpD,SAAO;GACP;;AAIJ,SAAgB,mBAAmB,SAAyB;CAC1D,IAAI,UAAU,QAAQ,MAAM;CAI5B,MAAM,YAAY,QAAQ,MAAM,wDAAwD;AACxF,KAAI,WAAW;EACb,MAAM,QAAQ,UAAU,GAAI,MAAM;AAGlC,MAD0B,sBAAsB,KAAK,QAAQ,IACpC,SAAS,KAAK,MAAM,IAAI,oCAAoC,KAAK,MAAM,CAC9F,WAAU;;CAKd,MAAM,UAAU,QAAQ,MAAM,WAAW;AACzC,KAAI,SAAS;EACX,MAAM,YAAY,QAAQ,GAAG;EAC7B,MAAM,aAAa,QAAQ,MAAM,UAAU,CAAC,MAAM,UAAU;AAC5D,MAAI,WACF,WAAU,QAAQ,MAAM,YAAY,WAAW,QAAS,WAAW,GAAG,OAAO,CAAC,MAAM;MAGpF,WAAU,QAAQ,MAAM,UAAU,CAAC,MAAM;;CAM7C,MAAM,cAAc,QAAQ,MAAM,2CAA2C;AAC7E,KAAI,aAAa,SAAS,YAAY,QAAQ,GAAG;EAC/C,MAAM,WAAW,QAAQ,MAAM,GAAG,YAAY,MAAM;AAEpD,MAAI,0EAA0E,KAAK,SAAS,CAC1F,WAAU,QAAQ,MAAM,YAAY,MAAM,CAAC,MAAM;;CAMrD,MAAM,eAAe,QAAQ,MAAM,aAAa;AAChD,KAAI,cAAc;EAChB,MAAM,UAAU,aAAa;EAC7B,MAAM,aAAa,aAAa,GAAG;EACnC,MAAM,YAAY,QAAQ,QAAQ,SAAS,WAAW;AACtD,MAAI,cAAc;OAEZ,YAAY,aAAa,IAC3B,WAAU,QAAQ,MAAM,UAAU,CAAC,MAAM;;;AAM/C,WAAU,QAAQ,QAChB,qEACA,wBACD;AAED,WAAU,iBAAiB,QAAQ;AAInC,KAAI,CAAC,SAAS,KAAK,QAAQ,IAAI,CAAC,oCAAoC,KAAK,QAAQ,IAAI,CAAC,aAAa,KAAK,QAAQ,CAC9G,QAAO;AAGT,QAAO;;ACrsBT,MAAM,oBAAoB;CAAC;CAAkB;CAAkB;CAAkB;AACjF,MAAM,iBAAiB;CAAC;CAAO;CAAS;CAAK;AAE7C,eAAe,eAAe,KAAgE;AAC5F,MAAK,MAAM,QAAQ,mBAAmB;EACpC,MAAM,OAAO,KAAK,KAAK,KAAK;EAC5B,MAAM,UAAU,MAAM,SAAS,MAAM,OAAO,CAAC,YAAY,KAAK;AAC9D,MAAI,QACF,QAAO;GAAE;GAAM;GAAS;;AAE5B,QAAO;;AAOT,SAAgB,qBAAqB,MAAqB;AACxD,KAAI,CAAC,QAAQ,OAAO,SAAS,SAC3B,QAAO,EAAE;AAGX,KAAI,KAAK,SAAS,cAAc,CAAC,KAAK,YAChC,KAAK,KAAK,SAAS,gBAAgB,KAAK,IAAI,SAAS,aACtD,KAAK,OAAO,SAAS,kBAAqB,QAAO,KAAK,MAAM,SAAS,QAAQ,OAAY,IAAI,SAAS,aAAa,OAAO,GAAG,UAAU,SAAS,CAAC,KAAK,OAAY,GAAG,MAAgB;CAG1L,MAAM,UAAoB,EAAE;AAC5B,KAAI,MAAM,QAAQ,KAAK,CACrB,MAAK,MAAM,SAAS,KAClB,SAAQ,KAAK,GAAG,qBAAqB,MAAM,CAAC;KAG9C,MAAK,MAAM,OAAO,OAAO,KAAK,KAAK,EAAE;AACnC,MAAI,QAAQ,WAAW,QAAQ,SAAS,QAAQ,OAC9C;EACF,MAAM,MAAM,KAAK;AACjB,MAAI,OAAO,OAAO,QAAQ,SACxB,SAAQ,KAAK,GAAG,qBAAqB,IAAI,CAAC;;AAGhD,QAAO;;AAMT,eAAsB,kBAAkB,KAAsC;CAC5E,MAAM,SAAS,MAAM,eAAe,IAAI;AACxC,KAAI,CAAC,OACH,QAAO,EAAE;CAGX,MAAM,UAAU,qBADD,UAAU,OAAO,MAAM,OAAO,QAAQ,CACT,QAAQ;CAGpD,MAAM,uBAAO,IAAI,KAAa;CAC9B,MAAM,WAA2B,EAAE;AAEnC,MAAK,MAAM,OAAO,QAChB,KAAI,CAAC,KAAK,IAAI,IAAI,EAAE;AAClB,OAAK,IAAI,IAAI;AACb,WAAS,KAAK;GAAE,MAAM;GAAK,OAAO;GAAG,QAAQ;GAAU,CAAC;;AAK5D,MAAK,MAAM,OAAO,eAChB,KAAI,CAAC,KAAK,IAAI,IAAI,EAAE;AAClB,OAAK,IAAI,IAAI;AACb,WAAS,KAAK;GAAE,MAAM;GAAK,OAAO;GAAG,QAAQ;GAAU,CAAC;;AAI5D,QAAO;;AAMT,eAAsB,qBAAqB,KAAsC;AAE/E,QAAO,kBAAkB,IAAI;;ACvE/B,MAAM,WAAW,CAAC,2CAA2C;AAC7D,MAAM,SAAS;CAAC;CAAsB;CAAc;CAAe;CAAiB;CAAiB;AAErG,SAAS,WAAW,QAA6B,WAA+B;AAC9E,KAAI,CAAC,aAAa,UAAU,WAAW,IAAI,IAAI,UAAU,WAAW,IAAI,CACtE;CAGF,MAAM,OAAO,UAAU,WAAW,IAAI,GAClC,UAAU,MAAM,IAAI,CAAC,MAAM,GAAG,EAAE,CAAC,KAAK,IAAI,GAC1C,UAAU,MAAM,IAAI,CAAC;AAEzB,KAAI,CAAC,cAAc,KAAK,CACtB,QAAO,IAAI,OAAO,OAAO,IAAI,KAAK,IAAI,KAAK,EAAE;;AAQjD,eAAsB,uBAAuB,MAAc,QAAQ,KAAK,EAAyB;AAC/F,KAAI;EACF,MAAM,yBAAS,IAAI,KAAqB;EAExC,MAAM,QAAQ,MAAM,OAAO,UAAU;GACnC;GACA,QAAQ;GACR,WAAW;GACX,UAAU;GACX,CAAC;AAEF,QAAM,QAAQ,IAAI,MAAM,IAAI,OAAO,SAAS;GAC1C,MAAM,UAAU,MAAM,SAAS,MAAM,OAAO;AAG5C,QAAK,MAAM,OAAO,kBAAkB,QAAQ,CAC1C,YAAW,QAAQ,IAAI,UAAU;AAInC,QAAK,MAAM,OAAO,mBAAmB,QAAQ,EAAE;IAE7C,MAAM,QAAQ,IAAI,WAAW,MAAM,qBAAqB;AACxD,QAAI,MACF,YAAW,QAAQ,MAAM,GAAI;;IAEjC,CAAC;EAGH,MAAM,WAA2B,CAAC,GAAG,OAAO,SAAS,CAAC,CACnD,KAAK,CAAC,MAAM,YAAY;GAAE;GAAM;GAAO,QAAQ;GAAmB,EAAE,CACpE,MAAM,GAAG,MAAM,EAAE,QAAQ,EAAE,SAAS,EAAE,KAAK,cAAc,EAAE,KAAK,CAAC;EAGpE,MAAM,UAAU,MAAM,qBAAqB,IAAI;EAC/C,MAAM,cAAc,IAAI,IAAI,SAAS,KAAI,MAAK,EAAE,KAAK,CAAC;AACtD,OAAK,MAAM,UAAU,QACnB,KAAI,CAAC,YAAY,IAAI,OAAO,KAAK,CAC/B,UAAS,KAAK,OAAO;AAGzB,SAAO,EAAE,UAAU;UAEd,KAAK;AACV,SAAO;GAAE,UAAU,EAAE;GAAE,OAAO,OAAO,IAAA;GAAM;;;AAI/C,MAAM,gBAAgB,IAAI,IAAI;CAC5B;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACD,CAAC;AAEF,SAAS,cAAc,KAAsB;CAC3C,MAAM,OAAO,IAAI,WAAW,QAAQ,GAAG,IAAI,MAAM,EAAE,GAAG;AACtD,QAAO,cAAc,IAAI,KAAK,MAAM,IAAI,CAAC,GAAI"}
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"embedding-cache2.mjs","names":[],"sources":["../../src/retriv/embedding-cache.ts"],"sourcesContent":["import type { DatabaseSync } from 'node:sqlite'\nimport type { Embedding } from 'retriv'\nimport { rmSync } from 'node:fs'\nimport { join } from 'pathe'\nimport { CACHE_DIR } from '../cache/index.ts'\n\ninterface EmbeddingConfig {\n resolve: () => Promise<{ embedder: (texts: string[]) => Promise<Embedding[]>, dimensions: number, maxTokens?: number }>\n}\n\nconst EMBEDDINGS_DB_PATH = join(CACHE_DIR, 'embeddings.db')\n\nfunction openDb(): DatabaseSync {\n // eslint-disable-next-line ts/no-require-imports\n const { DatabaseSync: DB } = require('node:sqlite') as typeof import('node:sqlite')\n const db = new DB(EMBEDDINGS_DB_PATH)\n db.exec('PRAGMA journal_mode=WAL')\n db.exec('PRAGMA busy_timeout=5000')\n db.exec(`CREATE TABLE IF NOT EXISTS embeddings (text_hash TEXT PRIMARY KEY, embedding BLOB NOT NULL)`)\n db.exec(`CREATE TABLE IF NOT EXISTS meta (key TEXT PRIMARY KEY, value TEXT NOT NULL)`)\n return db\n}\n\nfunction createSqliteStorage(db: DatabaseSync) {\n const getStmt = db.prepare('SELECT embedding FROM embeddings WHERE text_hash = ?')\n const setStmt = db.prepare('INSERT OR IGNORE INTO embeddings (text_hash, embedding) VALUES (?, ?)')\n\n return {\n get: (hash: string): Embedding | null => {\n const row = getStmt.get(hash) as { embedding: Buffer } | undefined\n if (!row)\n return null\n return new Float32Array(row.embedding.buffer, row.embedding.byteOffset, row.embedding.byteLength / 4)\n },\n set: (hash: string, embedding: Embedding): void => {\n const arr = embedding instanceof Float32Array ? embedding : new Float32Array(embedding)\n setStmt.run(hash, Buffer.from(arr.buffer, arr.byteOffset, arr.byteLength))\n },\n }\n}\n\nexport async function cachedEmbeddings(config: EmbeddingConfig): Promise<EmbeddingConfig> {\n const { cachedEmbeddings: retrivCached } = await import('retriv/embeddings/cached')\n const db = openDb()\n const storage = createSqliteStorage(db)\n\n const originalResolve = config.resolve\n const validatedConfig: EmbeddingConfig = {\n async resolve() {\n const resolved = await originalResolve()\n const getMetaStmt = db.prepare('SELECT value FROM meta WHERE key = ?')\n const setMetaStmt = db.prepare('INSERT OR REPLACE INTO meta (key, value) VALUES (?, ?)')\n\n const storedDims = getMetaStmt.get('dimensions') as { value: string } | undefined\n if (storedDims && Number(storedDims.value) !== resolved.dimensions) {\n db.exec('DELETE FROM embeddings')\n }\n setMetaStmt.run('dimensions', String(resolved.dimensions))\n\n return resolved\n },\n }\n\n return retrivCached(validatedConfig, { storage })\n}\n\nexport function clearEmbeddingCache(): void {\n rmSync(EMBEDDINGS_DB_PATH, { force: true })\n}\n"],"mappings":";;;;;;;;AAUA,MAAM,qBAAqB,KAAK,WAAW,gBAAgB;AAE3D,SAAS,SAAuB;CAE9B,MAAM,EAAE,cAAc,OAAA,UAAe,cAAc;CACnD,MAAM,KAAK,IAAI,GAAG,mBAAmB;AACrC,IAAG,KAAK,0BAA0B;AAClC,IAAG,KAAK,2BAA2B;AACnC,IAAG,KAAK,8FAA8F;AACtG,IAAG,KAAK,8EAA8E;AACtF,QAAO;;AAGT,SAAS,oBAAoB,IAAkB;CAC7C,MAAM,UAAU,GAAG,QAAQ,uDAAuD;CAClF,MAAM,UAAU,GAAG,QAAQ,wEAAwE;AAEnG,QAAO;EACL,MAAM,SAAmC;GACvC,MAAM,MAAM,QAAQ,IAAI,KAAK;AAC7B,OAAI,CAAC,IACH,QAAO;AACT,UAAO,IAAI,aAAa,IAAI,UAAU,QAAQ,IAAI,UAAU,YAAY,IAAI,UAAU,aAAa,EAAE;;EAEvG,MAAM,MAAc,cAA+B;GACjD,MAAM,MAAM,qBAAqB,eAAe,YAAY,IAAI,aAAa,UAAU;AACvF,WAAQ,IAAI,MAAM,OAAO,KAAK,IAAI,QAAQ,IAAI,YAAY,IAAI,WAAW,CAAC;;EAE7E;;AAGH,eAAsB,iBAAiB,QAAmD;CACxF,MAAM,EAAE,kBAAkB,iBAAiB,MAAM,OAAO;CACxD,MAAM,KAAK,QAAQ;CACnB,MAAM,UAAU,oBAAoB,GAAG;CAEvC,MAAM,kBAAkB,OAAO;AAiB/B,QAAO,aAhBkC,EACvC,MAAM,UAAU;EACd,MAAM,WAAW,MAAM,iBAAiB;EACxC,MAAM,cAAc,GAAG,QAAQ,uCAAuC;EACtE,MAAM,cAAc,GAAG,QAAQ,yDAAyD;EAExF,MAAM,aAAa,YAAY,IAAI,aAAa;AAChD,MAAI,cAAc,OAAO,WAAW,MAAM,KAAK,SAAS,WACtD,IAAG,KAAK,yBAAyB;AAEnC,cAAY,IAAI,cAAc,OAAO,SAAS,WAAW,CAAC;AAE1D,SAAO;IAEV,EAEoC,EAAE,SAAS,CAAC;;AAGnD,SAAgB,sBAA4B;AAC1C,QAAO,oBAAoB,EAAE,OAAO,MAAM,CAAC"}
|