@fragno-dev/cli 0.1.20 → 0.1.22
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.turbo/turbo-build.log +6 -6
- package/CHANGELOG.md +35 -0
- package/dist/cli.d.ts.map +1 -1
- package/dist/cli.js +96 -26
- package/dist/cli.js.map +1 -1
- package/package.json +4 -4
- package/src/cli.ts +1 -1
- package/src/commands/db/generate.ts +2 -2
- package/src/commands/db/info.ts +12 -27
- package/src/utils/find-fragno-databases.ts +27 -23
- package/src/utils/load-config.test.ts +233 -0
- package/src/utils/load-config.ts +115 -0
package/.turbo/turbo-build.log
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
|
|
2
|
-
> @fragno-dev/cli@0.1.
|
|
2
|
+
> @fragno-dev/cli@0.1.22 build /home/runner/work/fragno/fragno/apps/fragno-cli
|
|
3
3
|
> tsdown
|
|
4
4
|
|
|
5
5
|
[34mℹ[39m tsdown [2mv0.15.12[22m powered by rolldown [2mv1.0.0-beta.45[22m
|
|
@@ -10,9 +10,9 @@
|
|
|
10
10
|
[34mℹ[39m Build start
|
|
11
11
|
[34mℹ[39m Granting execute permission to [4mdist/cli.d.ts[24m
|
|
12
12
|
[34mℹ[39m Granting execute permission to [4mdist/cli.js[24m
|
|
13
|
-
[34mℹ[39m [2mdist/[22m[1mcli.js[22m [
|
|
14
|
-
[34mℹ[39m [2mdist/[22mcli.js.map [
|
|
15
|
-
[34mℹ[39m [2mdist/[22mcli.d.ts.map [2m 0.83 kB[22m [2m│ gzip: 0.
|
|
13
|
+
[34mℹ[39m [2mdist/[22m[1mcli.js[22m [2m37.36 kB[22m [2m│ gzip: 9.31 kB[22m
|
|
14
|
+
[34mℹ[39m [2mdist/[22mcli.js.map [2m75.92 kB[22m [2m│ gzip: 18.59 kB[22m
|
|
15
|
+
[34mℹ[39m [2mdist/[22mcli.d.ts.map [2m 0.83 kB[22m [2m│ gzip: 0.38 kB[22m
|
|
16
16
|
[34mℹ[39m [2mdist/[22m[32m[1mcli.d.ts[22m[39m [2m 2.01 kB[22m [2m│ gzip: 0.49 kB[22m
|
|
17
|
-
[34mℹ[39m 4 files, total:
|
|
18
|
-
[32m✔[39m Build complete in [
|
|
17
|
+
[34mℹ[39m 4 files, total: 116.12 kB
|
|
18
|
+
[32m✔[39m Build complete in [32m13114ms[39m
|
package/CHANGELOG.md
CHANGED
|
@@ -1,5 +1,40 @@
|
|
|
1
1
|
# @fragno-dev/cli
|
|
2
2
|
|
|
3
|
+
## 0.1.22
|
|
4
|
+
|
|
5
|
+
### Patch Changes
|
|
6
|
+
|
|
7
|
+
- Updated dependencies [aecfa70]
|
|
8
|
+
- Updated dependencies [3faac77]
|
|
9
|
+
- Updated dependencies [01a9c6d]
|
|
10
|
+
- Updated dependencies [5028ad3]
|
|
11
|
+
- Updated dependencies [20d824a]
|
|
12
|
+
- @fragno-dev/db@0.2.1
|
|
13
|
+
- @fragno-dev/corpus@0.0.7
|
|
14
|
+
|
|
15
|
+
## 0.1.21
|
|
16
|
+
|
|
17
|
+
### Patch Changes
|
|
18
|
+
|
|
19
|
+
- ca0db15: fix: resolve TSConfig paths when loading Fragment files
|
|
20
|
+
- fcce048: feat: use "dry run" mode when searching for database schemas to be more lenient when
|
|
21
|
+
instantiating a Fragment
|
|
22
|
+
- f9ae2d3: fix: database namespace generation
|
|
23
|
+
- Updated dependencies [8429960]
|
|
24
|
+
- Updated dependencies [4d897c9]
|
|
25
|
+
- Updated dependencies [a46b59c]
|
|
26
|
+
- Updated dependencies [bc072dd]
|
|
27
|
+
- Updated dependencies [e46d2a7]
|
|
28
|
+
- Updated dependencies [fcce048]
|
|
29
|
+
- Updated dependencies [147bdd6]
|
|
30
|
+
- Updated dependencies [f9ae2d3]
|
|
31
|
+
- Updated dependencies [f3b7084]
|
|
32
|
+
- Updated dependencies [c3870ec]
|
|
33
|
+
- Updated dependencies [75e298f]
|
|
34
|
+
- @fragno-dev/db@0.2.0
|
|
35
|
+
- @fragno-dev/core@0.1.11
|
|
36
|
+
- @fragno-dev/corpus@0.0.7
|
|
37
|
+
|
|
3
38
|
## 0.1.20
|
|
4
39
|
|
|
5
40
|
### Patch Changes
|
package/dist/cli.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"cli.d.ts","names":[],"sources":["../src/commands/db/generate.ts","../src/commands/db/migrate.ts","../src/commands/db/info.ts","../src/commands/search.ts","../src/commands/corpus.ts","../src/cli.ts"],"sourcesContent":[],"mappings":";;;;;cAOa,iBAyGX,OAAA,CAzG0B;;;;;EAAf,CAAA;;;;ICFA,WAAA,EAqEX,MAAA;;;;ICtEW,KAAA,EAAA,
|
|
1
|
+
{"version":3,"file":"cli.d.ts","names":[],"sources":["../src/commands/db/generate.ts","../src/commands/db/migrate.ts","../src/commands/db/info.ts","../src/commands/search.ts","../src/commands/corpus.ts","../src/cli.ts"],"sourcesContent":[],"mappings":";;;;;cAOa,iBAyGX,OAAA,CAzG0B;;;;;EAAf,CAAA;;;;ICFA,WAAA,EAqEX,MAAA;;;;ICtEW,KAAA,EAAA,MA6GX;;;;IC7FW,IAAA,EAAA,QAoFX;;;;ACsaF,CAAA,CAAA;;;cHzgBa,gBAqEX,OAAA,CArEyB;;;cCDd,aA6GX,OAAA,CA7GsB;;;cCgBX,eAoFX,OAAA,CApFwB;;;;;EHbb,CAAA;;;;ICFA,OAAA,EAAA,KAqEX;;;;ICtEW,WA6GX,EAAA,MAAA;;;;IC7FW,IAAA,EAAA,QAoFX;;;;ACsaF,CAAA,CAAA;;;AD1fa,cC0fA,aDtaX,EC+gBA,OAAA,CAzGwB,OD1fA,CAAA;;;;IC0fb,WAAA,EAyGX,MAAA;;;;IChmBW,KAAA,EAAA,MAGX;IAGW,WAGX,EAAA,MAAA;EAEoB,CAAA;;;;;;;;;;;;;;;;;;;;;cAXT,WAAS,OAAA,CAAA,QAGpB,OAAA,CAHoB,IAAA;cAMT,aAAW,OAAA,CAAA,QAGtB,OAAA,CAHsB,IAAA;iBAKF,GAAA,CAAA,GAAG"}
|
package/dist/cli.js
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
#!/usr/bin/env node
|
|
2
2
|
import { cli, define } from "gunshi";
|
|
3
|
-
import { mkdir, writeFile } from "node:fs/promises";
|
|
3
|
+
import { access, mkdir, readFile, writeFile } from "node:fs/promises";
|
|
4
4
|
import { dirname, join, relative, resolve } from "node:path";
|
|
5
5
|
import { executeMigrations, generateMigrationsOrSchema } from "@fragno-dev/db/generation-engine";
|
|
6
6
|
import { FragnoDatabase, isFragnoDatabase } from "@fragno-dev/db";
|
|
@@ -8,23 +8,99 @@ import { fragnoDatabaseAdapterNameFakeSymbol, fragnoDatabaseAdapterVersionFakeSy
|
|
|
8
8
|
import { instantiatedFragmentFakeSymbol } from "@fragno-dev/core/internal/symbols";
|
|
9
9
|
import "@fragno-dev/core";
|
|
10
10
|
import { loadConfig } from "c12";
|
|
11
|
+
import { constants, readFileSync } from "node:fs";
|
|
11
12
|
import { getAllSubjectIdsInOrder, getAllSubjects, getCategoryTitle, getSubject, getSubjectChildren, getSubjectParent, getSubjects, isCategory } from "@fragno-dev/corpus";
|
|
12
13
|
import { marked } from "marked";
|
|
13
14
|
import { markedTerminal } from "marked-terminal";
|
|
14
15
|
import { stripVTControlCharacters } from "node:util";
|
|
15
|
-
import { readFileSync } from "node:fs";
|
|
16
16
|
import { fileURLToPath } from "node:url";
|
|
17
17
|
|
|
18
|
+
//#region src/utils/load-config.ts
|
|
19
|
+
/**
|
|
20
|
+
* Checks if a file exists using async API.
|
|
21
|
+
*/
|
|
22
|
+
async function fileExists(path) {
|
|
23
|
+
try {
|
|
24
|
+
await access(path, constants.F_OK);
|
|
25
|
+
return true;
|
|
26
|
+
} catch {
|
|
27
|
+
return false;
|
|
28
|
+
}
|
|
29
|
+
}
|
|
30
|
+
/**
|
|
31
|
+
* Walks up the directory tree from the target path to find a tsconfig.json file.
|
|
32
|
+
*/
|
|
33
|
+
async function findTsconfig(startPath) {
|
|
34
|
+
let currentDir = dirname(startPath);
|
|
35
|
+
const root = resolve("/");
|
|
36
|
+
while (currentDir !== root) {
|
|
37
|
+
const tsconfigPath = join(currentDir, "tsconfig.json");
|
|
38
|
+
if (await fileExists(tsconfigPath)) return tsconfigPath;
|
|
39
|
+
currentDir = dirname(currentDir);
|
|
40
|
+
}
|
|
41
|
+
return null;
|
|
42
|
+
}
|
|
43
|
+
/**
|
|
44
|
+
* Strips comments from JSONC (JSON with Comments) content.
|
|
45
|
+
*/
|
|
46
|
+
function stripJsonComments(jsonc) {
|
|
47
|
+
let result = jsonc.replace(/\/\/[^\n]*/g, "");
|
|
48
|
+
result = result.replace(/\/\*[\s\S]*?\*\//g, "");
|
|
49
|
+
return result;
|
|
50
|
+
}
|
|
51
|
+
/**
|
|
52
|
+
* Converts TypeScript path aliases to jiti alias format.
|
|
53
|
+
* Strips trailing '*' from aliases and paths, and resolves paths relative to baseUrl.
|
|
54
|
+
*/
|
|
55
|
+
function convertTsconfigPathsToJitiAlias(tsconfigPaths, baseUrlResolved) {
|
|
56
|
+
return Object.fromEntries(Object.entries(tsconfigPaths).map(([_alias, paths]) => {
|
|
57
|
+
const pathsArray = paths;
|
|
58
|
+
return [_alias.endsWith("*") ? _alias.slice(0, -1) : _alias, resolve(baseUrlResolved, pathsArray[0].endsWith("*") ? pathsArray[0].slice(0, -1) : pathsArray[0])];
|
|
59
|
+
}));
|
|
60
|
+
}
|
|
61
|
+
/**
|
|
62
|
+
* Resolves tsconfig path aliases for use with jiti.
|
|
63
|
+
*/
|
|
64
|
+
async function resolveTsconfigAliases(targetPath) {
|
|
65
|
+
const tsconfigPath = await findTsconfig(targetPath);
|
|
66
|
+
if (!tsconfigPath) return {};
|
|
67
|
+
try {
|
|
68
|
+
const jsonContent = stripJsonComments(await readFile(tsconfigPath, "utf-8"));
|
|
69
|
+
const tsconfig = JSON.parse(jsonContent);
|
|
70
|
+
const tsconfigPaths = tsconfig?.compilerOptions?.paths;
|
|
71
|
+
if (!tsconfigPaths || typeof tsconfigPaths !== "object") return {};
|
|
72
|
+
return convertTsconfigPathsToJitiAlias(tsconfigPaths, resolve(dirname(tsconfigPath), tsconfig?.compilerOptions?.baseUrl || "."));
|
|
73
|
+
} catch (error) {
|
|
74
|
+
console.warn(`Warning: Failed to parse tsconfig at ${tsconfigPath}:`, error);
|
|
75
|
+
return {};
|
|
76
|
+
}
|
|
77
|
+
}
|
|
78
|
+
/**
|
|
79
|
+
* Loads a config file using c12 with automatic tsconfig path alias resolution.
|
|
80
|
+
*/
|
|
81
|
+
async function loadConfig$1(path) {
|
|
82
|
+
const { config } = await loadConfig({
|
|
83
|
+
configFile: path,
|
|
84
|
+
jitiOptions: { alias: await resolveTsconfigAliases(path) }
|
|
85
|
+
});
|
|
86
|
+
return config;
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
//#endregion
|
|
18
90
|
//#region src/utils/find-fragno-databases.ts
|
|
19
91
|
async function importFragmentFile(path) {
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
92
|
+
process.env["FRAGNO_INIT_DRY_RUN"] = "true";
|
|
93
|
+
try {
|
|
94
|
+
const databases = findFragnoDatabases(await loadConfig$1(path));
|
|
95
|
+
const adapterNames = databases.map((db) => `${db.adapter[fragnoDatabaseAdapterNameFakeSymbol]}@${db.adapter[fragnoDatabaseAdapterVersionFakeSymbol]}`);
|
|
96
|
+
if ([...new Set(adapterNames)].length > 1) throw new Error(`All Fragno databases must use the same adapter name and version. Found mismatch: (${adapterNames.join(", ")})`);
|
|
97
|
+
return {
|
|
98
|
+
adapter: databases[0].adapter,
|
|
99
|
+
databases
|
|
100
|
+
};
|
|
101
|
+
} finally {
|
|
102
|
+
delete process.env["FRAGNO_INIT_DRY_RUN"];
|
|
103
|
+
}
|
|
28
104
|
}
|
|
29
105
|
/**
|
|
30
106
|
* Imports multiple fragment files and validates they all use the same adapter.
|
|
@@ -89,12 +165,12 @@ function findFragnoDatabases(targetModule) {
|
|
|
89
165
|
const options = internal.options;
|
|
90
166
|
if (!deps["db"] || !deps["schema"]) continue;
|
|
91
167
|
const schema = deps["schema"];
|
|
168
|
+
const namespace = deps["namespace"];
|
|
92
169
|
const databaseAdapter = options["databaseAdapter"];
|
|
93
170
|
if (!databaseAdapter) {
|
|
94
171
|
console.warn(`Warning: Fragment '${value.name}' appears to be a database fragment but no databaseAdapter found in options.`);
|
|
95
172
|
continue;
|
|
96
173
|
}
|
|
97
|
-
const namespace = value.name + "-db";
|
|
98
174
|
fragnoDatabases.push(new FragnoDatabase({
|
|
99
175
|
namespace,
|
|
100
176
|
schema,
|
|
@@ -138,7 +214,7 @@ const generateCommand = define({
|
|
|
138
214
|
const fromVersion = ctx.values.from;
|
|
139
215
|
const prefix = ctx.values.prefix;
|
|
140
216
|
const { databases: allFragnoDatabases, adapter } = await importFragmentFiles(targets.map((target) => resolve(process.cwd(), target)));
|
|
141
|
-
if (!adapter.createSchemaGenerator && !adapter.
|
|
217
|
+
if (!adapter.createSchemaGenerator && !adapter.prepareMigrations) throw new Error("The adapter does not support schema generation. Please use an adapter that implements either createSchemaGenerator or prepareMigrations.");
|
|
142
218
|
console.log("Generating schema...");
|
|
143
219
|
let results;
|
|
144
220
|
try {
|
|
@@ -228,19 +304,13 @@ const infoCommand = define({
|
|
|
228
304
|
const info = {
|
|
229
305
|
namespace: fragnoDb.namespace,
|
|
230
306
|
schemaVersion: fragnoDb.schema.version,
|
|
231
|
-
migrationSupport: !!fragnoDb.adapter.
|
|
307
|
+
migrationSupport: !!fragnoDb.adapter.prepareMigrations
|
|
232
308
|
};
|
|
233
|
-
if (fragnoDb.adapter.
|
|
234
|
-
|
|
235
|
-
info.currentVersion =
|
|
236
|
-
info.
|
|
237
|
-
|
|
238
|
-
else if (info.pendingVersions === 0) info.status = "Up to date";
|
|
239
|
-
} catch (error) {
|
|
240
|
-
info.error = error instanceof Error ? error.message : String(error);
|
|
241
|
-
info.status = "Error";
|
|
242
|
-
}
|
|
243
|
-
else info.status = "Schema only";
|
|
309
|
+
if (fragnoDb.adapter.prepareMigrations) {
|
|
310
|
+
info.currentVersion = await fragnoDb.adapter.getSchemaVersion(fragnoDb.namespace);
|
|
311
|
+
if (info.schemaVersion.toString() !== info.currentVersion) info.status = `Migrations pending`;
|
|
312
|
+
else info.status = "Up to date";
|
|
313
|
+
} else info.status = "Schema only";
|
|
244
314
|
return info;
|
|
245
315
|
}));
|
|
246
316
|
const hasMigrationSupport = dbInfos.some((info) => info.migrationSupport);
|
|
@@ -266,7 +336,7 @@ const infoCommand = define({
|
|
|
266
336
|
if (!hasMigrationSupport) {
|
|
267
337
|
console.log("Note: These adapters do not support migrations.");
|
|
268
338
|
console.log("Use 'fragno-cli db generate' to generate schema files.");
|
|
269
|
-
} else
|
|
339
|
+
} else console.log("Run 'fragno-cli db migrate <target>' to apply pending migrations.");
|
|
270
340
|
}
|
|
271
341
|
});
|
|
272
342
|
|
|
@@ -886,7 +956,7 @@ async function run() {
|
|
|
886
956
|
process.exit(1);
|
|
887
957
|
}
|
|
888
958
|
} catch (error) {
|
|
889
|
-
console.error(
|
|
959
|
+
console.error(error);
|
|
890
960
|
process.exit(1);
|
|
891
961
|
}
|
|
892
962
|
}
|
package/dist/cli.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"cli.js","names":["allDatabases: FragnoDatabase<AnySchema>[]","adapter: DatabaseAdapter | undefined","firstAdapterFile: string | undefined","fragnoDatabases: FragnoDatabase<AnySchema>[]","results: { schema: string; path: string; namespace: string }[]","results: ExecuteMigrationResult[]","info: {\n namespace: string;\n schemaVersion: number;\n migrationSupport: boolean;\n currentVersion?: number;\n pendingVersions?: number;\n status?: string;\n error?: string;\n }","lines: string[]","matches: CodeBlockMatch[]","startLine: number | undefined","endLine: number | undefined"],"sources":["../src/utils/find-fragno-databases.ts","../src/commands/db/generate.ts","../src/commands/db/migrate.ts","../src/commands/db/info.ts","../src/utils/format-search-results.ts","../src/commands/search.ts","../src/commands/corpus.ts","../src/cli.ts"],"sourcesContent":["import { isFragnoDatabase, type DatabaseAdapter, FragnoDatabase } from \"@fragno-dev/db\";\nimport {\n fragnoDatabaseAdapterNameFakeSymbol,\n fragnoDatabaseAdapterVersionFakeSymbol,\n} from \"@fragno-dev/db/adapters\";\nimport type { AnySchema } from \"@fragno-dev/db/schema\";\nimport { instantiatedFragmentFakeSymbol } from \"@fragno-dev/core/internal/symbols\";\nimport { type FragnoInstantiatedFragment } from \"@fragno-dev/core\";\nimport { loadConfig } from \"c12\";\nimport { relative } from \"node:path\";\n\nexport async function importFragmentFile(path: string): Promise<Record<string, unknown>> {\n const { config } = await loadConfig({\n configFile: path,\n });\n\n const databases = findFragnoDatabases(config);\n const adapterNames = databases.map(\n (db) =>\n `${db.adapter[fragnoDatabaseAdapterNameFakeSymbol]}@${db.adapter[fragnoDatabaseAdapterVersionFakeSymbol]}`,\n );\n const uniqueAdapterNames = [...new Set(adapterNames)];\n\n if (uniqueAdapterNames.length > 1) {\n throw new Error(\n `All Fragno databases must use the same adapter name and version. ` +\n `Found mismatch: (${adapterNames.join(\", \")})`,\n );\n }\n\n return {\n adapter: databases[0].adapter,\n databases,\n };\n}\n\n/**\n * Imports multiple fragment files and validates they all use the same adapter.\n * Returns the combined databases from all files.\n */\nexport async function importFragmentFiles(paths: string[]): Promise<{\n adapter: DatabaseAdapter;\n databases: FragnoDatabase<AnySchema>[];\n}> {\n // De-duplicate paths (in case same file was specified multiple times)\n const uniquePaths = Array.from(new Set(paths));\n\n if (uniquePaths.length === 0) {\n throw new Error(\"No fragment files provided\");\n }\n\n const allDatabases: FragnoDatabase<AnySchema>[] = [];\n let adapter: DatabaseAdapter | undefined;\n let firstAdapterFile: string | undefined;\n const cwd = process.cwd();\n\n for (const path of uniquePaths) {\n const relativePath = relative(cwd, path);\n\n try {\n const result = await importFragmentFile(path);\n const databases = result[\"databases\"] as FragnoDatabase<AnySchema>[];\n const fileAdapter = result[\"adapter\"] as DatabaseAdapter;\n\n if (databases.length === 0) {\n console.warn(\n `Warning: No FragnoDatabase instances found in ${relativePath}.\\n` +\n `Make sure you export either:\\n` +\n ` - A FragnoDatabase instance created with .create(adapter)\\n` +\n ` - An instantiated fragment with embedded database definition\\n`,\n );\n continue;\n }\n\n // Set the adapter from the first file with databases\n if (!adapter) {\n adapter = fileAdapter;\n firstAdapterFile = relativePath;\n }\n\n // Validate all files use the same adapter name and version\n const firstAdapterName = adapter[fragnoDatabaseAdapterNameFakeSymbol];\n const firstAdapterVersion = adapter[fragnoDatabaseAdapterVersionFakeSymbol];\n const fileAdapterName = fileAdapter[fragnoDatabaseAdapterNameFakeSymbol];\n const fileAdapterVersion = fileAdapter[fragnoDatabaseAdapterVersionFakeSymbol];\n\n if (firstAdapterName !== fileAdapterName || firstAdapterVersion !== fileAdapterVersion) {\n const firstAdapterInfo = `${firstAdapterName}@${firstAdapterVersion}`;\n const fileAdapterInfo = `${fileAdapterName}@${fileAdapterVersion}`;\n\n throw new Error(\n `All fragments must use the same database adapter. Mixed adapters found:\\n` +\n ` - ${firstAdapterFile}: ${firstAdapterInfo}\\n` +\n ` - ${relativePath}: ${fileAdapterInfo}\\n\\n` +\n `Make sure all fragments use the same adapter name and version.`,\n );\n }\n\n allDatabases.push(...databases);\n console.log(` Found ${databases.length} database(s) in ${relativePath}`);\n } catch (error) {\n throw new Error(\n `Failed to import fragment file ${relativePath}: ${error instanceof Error ? error.message : String(error)}`,\n );\n }\n }\n\n if (allDatabases.length === 0) {\n throw new Error(\n `No FragnoDatabase instances found in any of the target files.\\n` +\n `Make sure your files export either:\\n` +\n ` - A FragnoDatabase instance created with .create(adapter)\\n` +\n ` - An instantiated fragment with embedded database definition\\n`,\n );\n }\n\n if (!adapter) {\n throw new Error(\"No adapter found in any of the fragment files\");\n }\n\n return {\n adapter,\n databases: allDatabases,\n };\n}\n\nfunction isNewFragnoInstantiatedFragment(\n value: unknown,\n): value is FragnoInstantiatedFragment<\n [],\n unknown,\n Record<string, unknown>,\n Record<string, unknown>,\n Record<string, unknown>,\n unknown,\n Record<string, unknown>\n> {\n return (\n typeof value === \"object\" &&\n value !== null &&\n instantiatedFragmentFakeSymbol in value &&\n value[instantiatedFragmentFakeSymbol] === instantiatedFragmentFakeSymbol\n );\n}\n\n/**\n * Finds all FragnoDatabase instances in a module, including those embedded\n * in instantiated fragments.\n */\nexport function findFragnoDatabases(\n targetModule: Record<string, unknown>,\n): FragnoDatabase<AnySchema>[] {\n const fragnoDatabases: FragnoDatabase<AnySchema>[] = [];\n\n for (const [_key, value] of Object.entries(targetModule)) {\n if (isFragnoDatabase(value)) {\n fragnoDatabases.push(value);\n } else if (isNewFragnoInstantiatedFragment(value)) {\n // Handle new fragment API\n const internal = value.$internal;\n const deps = internal.deps as Record<string, unknown>;\n const options = internal.options as Record<string, unknown>;\n\n // Check if this is a database fragment by looking for implicit database dependencies\n if (!deps[\"db\"] || !deps[\"schema\"]) {\n continue;\n }\n\n const schema = deps[\"schema\"] as AnySchema;\n const databaseAdapter = options[\"databaseAdapter\"] as DatabaseAdapter | undefined;\n\n if (!databaseAdapter) {\n console.warn(\n `Warning: Fragment '${value.name}' appears to be a database fragment but no databaseAdapter found in options.`,\n );\n continue;\n }\n\n // Derive namespace from fragment name (follows convention: fragmentName + \"-db\")\n const namespace = value.name + \"-db\";\n\n fragnoDatabases.push(\n new FragnoDatabase({\n namespace,\n schema,\n adapter: databaseAdapter,\n }),\n );\n }\n }\n\n return fragnoDatabases;\n}\n","import { writeFile, mkdir } from \"node:fs/promises\";\nimport { resolve, dirname } from \"node:path\";\nimport { define } from \"gunshi\";\nimport { generateMigrationsOrSchema } from \"@fragno-dev/db/generation-engine\";\nimport { importFragmentFiles } from \"../../utils/find-fragno-databases\";\n\n// Define the db generate command with type safety\nexport const generateCommand = define({\n name: \"generate\",\n description: \"Generate schema files from FragnoDatabase definitions\",\n args: {\n output: {\n type: \"string\",\n short: \"o\",\n description:\n \"Output path: for single file, exact file path; for multiple files, output directory (default: current directory)\",\n },\n from: {\n type: \"number\",\n short: \"f\",\n description: \"Source version to generate migration from (default: current database version)\",\n },\n to: {\n type: \"number\",\n short: \"t\",\n description: \"Target version to generate migration to (default: latest schema version)\",\n },\n prefix: {\n type: \"string\",\n short: \"p\",\n description: \"String to prepend to the generated file (e.g., '/* eslint-disable */')\",\n },\n },\n run: async (ctx) => {\n // With `define()` and `multiple: true`, targets is properly typed as string[]\n const targets = ctx.positionals;\n const output = ctx.values.output;\n const toVersion = ctx.values.to;\n const fromVersion = ctx.values.from;\n const prefix = ctx.values.prefix;\n\n // Resolve all target paths\n const targetPaths = targets.map((target) => resolve(process.cwd(), target));\n\n // Import all fragment files and validate they use the same adapter\n const { databases: allFragnoDatabases, adapter } = await importFragmentFiles(targetPaths);\n\n // Check if adapter supports any form of schema generation\n if (!adapter.createSchemaGenerator && !adapter.createMigrationEngine) {\n throw new Error(\n `The adapter does not support schema generation. ` +\n `Please use an adapter that implements either createSchemaGenerator or createMigrationEngine.`,\n );\n }\n\n // Generate schema for all fragments\n console.log(\"Generating schema...\");\n\n let results: { schema: string; path: string; namespace: string }[];\n try {\n results = await generateMigrationsOrSchema(allFragnoDatabases, {\n path: output,\n toVersion,\n fromVersion,\n });\n } catch (error) {\n throw new Error(\n `Failed to generate schema: ${error instanceof Error ? error.message : String(error)}`,\n );\n }\n\n // Write all generated files\n for (const result of results) {\n // For single file: use output as exact file path\n // For multiple files: use output as base directory\n const finalOutputPath =\n output && results.length === 1\n ? resolve(process.cwd(), output)\n : output\n ? resolve(process.cwd(), output, result.path)\n : resolve(process.cwd(), result.path);\n\n // Ensure parent directory exists\n const parentDir = dirname(finalOutputPath);\n try {\n await mkdir(parentDir, { recursive: true });\n } catch (error) {\n throw new Error(\n `Failed to create directory: ${error instanceof Error ? error.message : String(error)}`,\n );\n }\n\n // Write schema to file\n try {\n const content = prefix ? `${prefix}\\n${result.schema}` : result.schema;\n await writeFile(finalOutputPath, content, { encoding: \"utf-8\" });\n } catch (error) {\n throw new Error(\n `Failed to write schema file: ${error instanceof Error ? error.message : String(error)}`,\n );\n }\n\n console.log(`✓ Generated: ${finalOutputPath}`);\n }\n\n console.log(`\\n✓ Schema generated successfully!`);\n console.log(` Files generated: ${results.length}`);\n console.log(` Fragments:`);\n for (const db of allFragnoDatabases) {\n console.log(` - ${db.namespace} (version ${db.schema.version})`);\n }\n },\n});\n","import { resolve } from \"node:path\";\nimport { define } from \"gunshi\";\nimport { importFragmentFiles } from \"../../utils/find-fragno-databases\";\nimport { executeMigrations, type ExecuteMigrationResult } from \"@fragno-dev/db/generation-engine\";\n\nexport const migrateCommand = define({\n name: \"migrate\",\n description: \"Run database migrations for all fragments to their latest versions\",\n args: {},\n run: async (ctx) => {\n const targets = ctx.positionals;\n\n if (targets.length === 0) {\n throw new Error(\"At least one target file path is required\");\n }\n\n // Resolve all target paths\n const targetPaths = targets.map((target) => resolve(process.cwd(), target));\n\n // Import all fragment files and validate they use the same adapter\n const { databases: allFragnoDatabases } = await importFragmentFiles(targetPaths);\n\n console.log(\"\\nMigrating all fragments to their latest versions...\\n\");\n\n let results: ExecuteMigrationResult[];\n try {\n results = await executeMigrations(allFragnoDatabases);\n } catch (error) {\n throw new Error(\n `Migration failed: ${error instanceof Error ? error.message : String(error)}`,\n );\n }\n\n // Display progress for each result\n for (const result of results) {\n console.log(`Fragment: ${result.namespace}`);\n console.log(` Current version: ${result.fromVersion}`);\n console.log(` Target version: ${result.toVersion}`);\n\n if (result.didMigrate) {\n console.log(` ✓ Migration completed: v${result.fromVersion} → v${result.toVersion}\\n`);\n } else {\n console.log(` ✓ Already at latest version. No migration needed.\\n`);\n }\n }\n\n // Summary\n console.log(\"═══════════════════════════════════════\");\n console.log(\"Migration Summary\");\n console.log(\"═══════════════════════════════════════\");\n\n const migrated = results.filter((r) => r.didMigrate);\n const skipped = results.filter((r) => !r.didMigrate);\n\n if (migrated.length > 0) {\n console.log(`\\n✓ Migrated ${migrated.length} fragment(s):`);\n for (const r of migrated) {\n console.log(` - ${r.namespace}: v${r.fromVersion} → v${r.toVersion}`);\n }\n }\n\n if (skipped.length > 0) {\n console.log(`\\n○ Skipped ${skipped.length} fragment(s) (already up-to-date):`);\n for (const r of skipped) {\n console.log(` - ${r.namespace}: v${r.toVersion}`);\n }\n }\n\n for (const db of allFragnoDatabases) {\n await db.adapter.close();\n }\n\n console.log(\"\\n✓ All migrations completed successfully\");\n },\n});\n","import { resolve } from \"node:path\";\nimport { define } from \"gunshi\";\nimport { importFragmentFiles } from \"../../utils/find-fragno-databases\";\n\nexport const infoCommand = define({\n name: \"info\",\n description: \"Display database information and migration status\",\n args: {},\n run: async (ctx) => {\n const targets = ctx.positionals;\n\n if (targets.length === 0) {\n throw new Error(\"At least one target file path is required\");\n }\n\n // Resolve all target paths\n const targetPaths = targets.map((target) => resolve(process.cwd(), target));\n\n // Import all fragment files\n const { databases: allFragnoDatabases } = await importFragmentFiles(targetPaths);\n\n // Collect database information\n const dbInfos = await Promise.all(\n allFragnoDatabases.map(async (fragnoDb) => {\n const info: {\n namespace: string;\n schemaVersion: number;\n migrationSupport: boolean;\n currentVersion?: number;\n pendingVersions?: number;\n status?: string;\n error?: string;\n } = {\n namespace: fragnoDb.namespace,\n schemaVersion: fragnoDb.schema.version,\n migrationSupport: !!fragnoDb.adapter.createMigrationEngine,\n };\n\n // Get current database version if migrations are supported\n if (fragnoDb.adapter.createMigrationEngine) {\n try {\n const migrator = fragnoDb.adapter.createMigrationEngine(\n fragnoDb.schema,\n fragnoDb.namespace,\n );\n const currentVersion = await migrator.getVersion();\n info.currentVersion = currentVersion;\n info.pendingVersions = fragnoDb.schema.version - currentVersion;\n\n if (info.pendingVersions > 0) {\n info.status = `Pending (${info.pendingVersions} migration(s))`;\n } else if (info.pendingVersions === 0) {\n info.status = \"Up to date\";\n }\n } catch (error) {\n info.error = error instanceof Error ? error.message : String(error);\n info.status = \"Error\";\n }\n } else {\n info.status = \"Schema only\";\n }\n\n return info;\n }),\n );\n\n // Determine if any database supports migrations\n const hasMigrationSupport = dbInfos.some((info) => info.migrationSupport);\n\n // Print compact table\n console.log(\"\");\n console.log(`Database Information:`);\n console.log(\"\");\n\n // Table header\n const namespaceHeader = \"Namespace\";\n const versionHeader = \"Schema\";\n const currentHeader = \"Current\";\n const statusHeader = \"Status\";\n\n const maxNamespaceLen = Math.max(\n namespaceHeader.length,\n ...dbInfos.map((info) => info.namespace.length),\n );\n const namespaceWidth = Math.max(maxNamespaceLen + 2, 20);\n const versionWidth = 8;\n const currentWidth = 9;\n const statusWidth = 25;\n\n // Print table\n console.log(\n namespaceHeader.padEnd(namespaceWidth) +\n versionHeader.padEnd(versionWidth) +\n (hasMigrationSupport ? currentHeader.padEnd(currentWidth) : \"\") +\n statusHeader,\n );\n console.log(\n \"-\".repeat(namespaceWidth) +\n \"-\".repeat(versionWidth) +\n (hasMigrationSupport ? \"-\".repeat(currentWidth) : \"\") +\n \"-\".repeat(statusWidth),\n );\n\n for (const info of dbInfos) {\n const currentVersionStr =\n info.currentVersion !== undefined ? String(info.currentVersion) : \"-\";\n console.log(\n info.namespace.padEnd(namespaceWidth) +\n String(info.schemaVersion).padEnd(versionWidth) +\n (hasMigrationSupport ? currentVersionStr.padEnd(currentWidth) : \"\") +\n (info.status || \"-\"),\n );\n }\n\n // Print help text\n console.log(\"\");\n if (!hasMigrationSupport) {\n console.log(\"Note: These adapters do not support migrations.\");\n console.log(\"Use 'fragno-cli db generate' to generate schema files.\");\n } else {\n const hasPendingMigrations = dbInfos.some(\n (info) => info.pendingVersions && info.pendingVersions > 0,\n );\n if (hasPendingMigrations) {\n console.log(\"Run 'fragno-cli db migrate <target>' to apply pending migrations.\");\n }\n }\n },\n});\n","interface SearchResult {\n id: string;\n type: \"page\" | \"heading\" | \"text\";\n content: string;\n breadcrumbs?: string[];\n contentWithHighlights?: Array<{\n type: string;\n content: string;\n styles?: { highlight?: boolean };\n }>;\n url: string;\n}\n\ninterface MergedResult {\n url: string;\n urlWithMd: string;\n fullUrl: string;\n fullUrlWithMd: string;\n title?: string;\n breadcrumbs?: string[];\n type: \"page\" | \"heading\" | \"text\";\n sections: Array<{\n content: string;\n type: \"page\" | \"heading\" | \"text\";\n }>;\n}\n\n/**\n * Merge search results by URL, grouping sections and content under each URL (without hash)\n */\nexport function mergeResultsByUrl(results: SearchResult[], baseUrl: string): MergedResult[] {\n const mergedMap = new Map<string, MergedResult>();\n\n for (const result of results) {\n // Strip hash to get base URL for merging\n const baseUrlWithoutHash = result.url.split(\"#\")[0];\n const existing = mergedMap.get(baseUrlWithoutHash);\n\n if (existing) {\n // Add this result as a section\n existing.sections.push({\n content: result.content,\n type: result.type,\n });\n } else {\n // Create new merged result\n const urlWithMd = `${baseUrlWithoutHash}.md`;\n\n const fullUrl = `https://${baseUrl}${baseUrlWithoutHash}`;\n const fullUrlWithMd = `https://${baseUrl}${urlWithMd}`;\n\n mergedMap.set(baseUrlWithoutHash, {\n url: baseUrlWithoutHash,\n urlWithMd,\n fullUrl,\n fullUrlWithMd,\n title: result.type === \"page\" ? result.content : undefined,\n breadcrumbs: result.breadcrumbs,\n type: result.type,\n sections: [\n {\n content: result.content,\n type: result.type,\n },\n ],\n });\n }\n }\n\n return Array.from(mergedMap.values());\n}\n\n/**\n * Format merged results as markdown\n */\nexport function formatAsMarkdown(mergedResults: MergedResult[]): string {\n const lines: string[] = [];\n\n for (const result of mergedResults) {\n // Title (use first section content if it's a page, or just use content)\n const title = result.title || result.sections[0]?.content || \"Untitled\";\n lines.push(`## Page: '${title}'`);\n // Breadcrumbs\n if (result.breadcrumbs && result.breadcrumbs.length > 0) {\n lines.push(\" \" + result.breadcrumbs.join(\" > \"));\n lines.push(\"\");\n }\n\n // Both URLs\n lines.push(\"URLs:\");\n lines.push(` - ${result.fullUrl}`);\n lines.push(` - ${result.fullUrlWithMd}`);\n lines.push(\"\");\n\n // Show all sections found on this page\n if (result.sections.length > 1) {\n lines.push(\"Relevant sections:\");\n for (let i = 0; i < result.sections.length; i++) {\n const section = result.sections[i];\n // Skip the first section if it's just the page title repeated\n if (i === 0 && result.type === \"page\" && section.content === result.title) {\n continue;\n }\n lines.push(` - ${section.content}`);\n }\n lines.push(\"\");\n }\n\n lines.push(\"---\");\n lines.push(\"\");\n }\n\n return lines.join(\"\\n\");\n}\n\n/**\n * Format merged results as JSON\n */\nexport function formatAsJson(mergedResults: MergedResult[]): string {\n return JSON.stringify(mergedResults, null, 2);\n}\n","import { define } from \"gunshi\";\nimport {\n mergeResultsByUrl,\n formatAsMarkdown,\n formatAsJson,\n} from \"../utils/format-search-results.js\";\n\ninterface SearchResult {\n id: string;\n type: \"page\" | \"heading\" | \"text\";\n content: string;\n breadcrumbs?: string[];\n contentWithHighlights?: Array<{\n type: string;\n content: string;\n styles?: { highlight?: boolean };\n }>;\n url: string;\n}\n\nexport const searchCommand = define({\n name: \"search\",\n description: \"Search the Fragno documentation\",\n args: {\n limit: {\n type: \"number\",\n description: \"Maximum number of results to show\",\n default: 10,\n },\n json: {\n type: \"boolean\",\n description: \"Output results in JSON format\",\n default: false,\n },\n markdown: {\n type: \"boolean\",\n description: \"Output results in Markdown format (default)\",\n default: true,\n },\n \"base-url\": {\n type: \"string\",\n description: \"Base URL for the documentation site\",\n default: \"fragno.dev\",\n },\n },\n run: async (ctx) => {\n const query = ctx.positionals.join(\" \");\n\n if (!query || query.trim().length === 0) {\n throw new Error(\"Please provide a search query\");\n }\n\n // Determine output mode\n const jsonMode = ctx.values.json as boolean;\n const baseUrl = ctx.values[\"base-url\"] as string;\n\n if (!jsonMode) {\n console.log(`Searching for: \"${query}\"\\n`);\n }\n\n try {\n // Make request to the docs search API\n const encodedQuery = encodeURIComponent(query);\n const response = await fetch(`https://${baseUrl}/api/search?query=${encodedQuery}`);\n\n if (!response.ok) {\n throw new Error(`API request failed with status ${response.status}`);\n }\n\n const results = (await response.json()) as SearchResult[];\n\n // Apply limit\n const limit = ctx.values.limit as number;\n const limitedResults = results.slice(0, limit);\n\n if (limitedResults.length === 0) {\n if (jsonMode) {\n console.log(\"[]\");\n } else {\n console.log(\"No results found.\");\n }\n return;\n }\n\n // Merge results by URL\n const mergedResults = mergeResultsByUrl(limitedResults, baseUrl);\n\n // Output based on mode\n if (jsonMode) {\n console.log(formatAsJson(mergedResults));\n } else {\n // Markdown mode (default)\n console.log(\n `Found ${results.length} result${results.length === 1 ? \"\" : \"s\"}${results.length > limit ? ` (showing ${limit})` : \"\"}\\n`,\n );\n console.log(formatAsMarkdown(mergedResults));\n }\n } catch (error) {\n if (error instanceof Error) {\n throw new Error(`Search failed: ${error.message}`);\n }\n throw new Error(\"Search failed: An unknown error occurred\");\n }\n },\n});\n","import { define } from \"gunshi\";\nimport {\n getSubjects,\n getSubject,\n getAllSubjects,\n getSubjectParent,\n getSubjectChildren,\n getAllSubjectIdsInOrder,\n isCategory,\n getCategoryTitle,\n} from \"@fragno-dev/corpus\";\nimport type { Subject, Example } from \"@fragno-dev/corpus\";\nimport { marked } from \"marked\";\n// @ts-expect-error - marked-terminal types are outdated for v7\nimport { markedTerminal } from \"marked-terminal\";\nimport { stripVTControlCharacters } from \"node:util\";\n\n// Always configure marked to use terminal renderer\nmarked.use(markedTerminal());\n\ninterface PrintOptions {\n showLineNumbers: boolean;\n startLine?: number;\n endLine?: number;\n headingsOnly: boolean;\n}\n\n/**\n * Build markdown content for multiple subjects\n */\nexport function buildSubjectsMarkdown(subjects: Subject[]): string {\n let fullMarkdown = \"\";\n\n for (const subject of subjects) {\n fullMarkdown += `# ${subject.title}\\n\\n`;\n\n if (subject.description) {\n fullMarkdown += `${subject.description}\\n\\n`;\n }\n\n // Add imports block if present\n if (subject.imports) {\n fullMarkdown += `### Imports\\n\\n\\`\\`\\`typescript\\n${subject.imports}\\n\\`\\`\\`\\n\\n`;\n }\n\n // Add prelude blocks if present\n if (subject.prelude.length > 0) {\n fullMarkdown += `### Prelude\\n\\n`;\n for (const block of subject.prelude) {\n // Don't include the directive in the displayed code fence\n fullMarkdown += `\\`\\`\\`typescript\\n${block.code}\\n\\`\\`\\`\\n\\n`;\n }\n }\n\n // Add all sections\n for (const section of subject.sections) {\n fullMarkdown += `## ${section.heading}\\n\\n${section.content}\\n\\n`;\n }\n }\n\n return fullMarkdown;\n}\n\n/**\n * Add line numbers to content\n */\nexport function addLineNumbers(content: string, startFrom: number = 1): string {\n const lines = content.split(\"\\n\");\n const maxDigits = String(startFrom + lines.length - 1).length;\n\n return lines\n .map((line, index) => {\n const lineNum = startFrom + index;\n const paddedNum = String(lineNum).padStart(maxDigits, \" \");\n return `${paddedNum}│ ${line}`;\n })\n .join(\"\\n\");\n}\n\n/**\n * Filter content by line range\n */\nexport function filterByLineRange(content: string, startLine: number, endLine: number): string {\n const lines = content.split(\"\\n\");\n // Convert to 0-based index\n const start = Math.max(0, startLine - 1);\n const end = Math.min(lines.length, endLine);\n return lines.slice(start, end).join(\"\\n\");\n}\n\n/**\n * Extract headings and code block information with line numbers\n */\nexport function extractHeadingsAndBlocks(subjects: Subject[]): string {\n let output = \"\";\n let currentLine = 1;\n let lastOutputLine = 0;\n\n // Helper to add a gap indicator if we skipped lines\n const addGapIfNeeded = () => {\n if (lastOutputLine > 0 && currentLine > lastOutputLine + 1) {\n output += ` │\\n`;\n }\n };\n\n // Add instruction header\n output += \"Use --start N --end N flags to show specific line ranges\\n\\n\";\n\n for (const subject of subjects) {\n // Title\n addGapIfNeeded();\n output += `${currentLine.toString().padStart(4, \" \")}│ # ${subject.title}\\n`;\n lastOutputLine = currentLine;\n currentLine += 1;\n\n // Empty line after title - SHOW IT\n output += `${currentLine.toString().padStart(4, \" \")}│\\n`;\n lastOutputLine = currentLine;\n currentLine += 1;\n\n // Description - show full text\n if (subject.description) {\n const descLines = subject.description.split(\"\\n\");\n for (const line of descLines) {\n output += `${currentLine.toString().padStart(4, \" \")}│ ${line}\\n`;\n lastOutputLine = currentLine;\n currentLine += 1;\n }\n // Empty line after description - SHOW IT\n output += `${currentLine.toString().padStart(4, \" \")}│\\n`;\n lastOutputLine = currentLine;\n currentLine += 1;\n }\n\n // Imports block - show full code\n if (subject.imports) {\n addGapIfNeeded();\n output += `${currentLine.toString().padStart(4, \" \")}│ ### Imports\\n`;\n lastOutputLine = currentLine;\n currentLine += 1;\n // Empty line after heading - SHOW IT\n output += `${currentLine.toString().padStart(4, \" \")}│\\n`;\n lastOutputLine = currentLine;\n currentLine += 1;\n output += `${currentLine.toString().padStart(4, \" \")}│ \\`\\`\\`typescript\\n`;\n lastOutputLine = currentLine;\n currentLine += 1;\n const importLines = subject.imports.split(\"\\n\");\n for (const line of importLines) {\n output += `${currentLine.toString().padStart(4, \" \")}│ ${line}\\n`;\n lastOutputLine = currentLine;\n currentLine += 1;\n }\n output += `${currentLine.toString().padStart(4, \" \")}│ \\`\\`\\`\\n`;\n lastOutputLine = currentLine;\n currentLine += 1;\n // Empty line after code block - SHOW IT\n output += `${currentLine.toString().padStart(4, \" \")}│\\n`;\n lastOutputLine = currentLine;\n currentLine += 1;\n }\n\n // Prelude blocks - show as list\n if (subject.prelude.length > 0) {\n addGapIfNeeded();\n output += `${currentLine.toString().padStart(4, \" \")}│ ### Prelude\\n`;\n lastOutputLine = currentLine;\n currentLine += 1;\n // Empty line after heading\n output += `${currentLine.toString().padStart(4, \" \")}│\\n`;\n lastOutputLine = currentLine;\n currentLine += 1;\n\n for (const block of subject.prelude) {\n const id = block.id || \"(no-id)\";\n const blockStartLine = currentLine + 1; // +1 for opening ```\n const codeLines = block.code.split(\"\\n\").length;\n const blockEndLine = currentLine + 1 + codeLines; // opening ``` + code lines\n output += `${currentLine.toString().padStart(4, \" \")}│ - id: \\`${id}\\`, L${blockStartLine}-${blockEndLine}\\n`;\n lastOutputLine = currentLine;\n currentLine += codeLines + 3; // opening ```, code, closing ```, blank line\n }\n // Update lastOutputLine to current position to avoid gap indicator\n lastOutputLine = currentLine - 1;\n }\n\n // Sections - show headings and any example IDs that belong to them\n const sectionToExamples = new Map<string, Example[]>();\n\n // Group examples by their rough section (based on heading appearance in explanations)\n for (const example of subject.examples) {\n // Try to match the example to a section based on context\n // For now, we'll list all example IDs under the sections where they appear\n for (const section of subject.sections) {\n // Check if the section contains references to this example\n if (\n section.content.includes(example.code.substring(0, Math.min(50, example.code.length)))\n ) {\n if (!sectionToExamples.has(section.heading)) {\n sectionToExamples.set(section.heading, []);\n }\n sectionToExamples.get(section.heading)!.push(example);\n break;\n }\n }\n }\n\n for (const section of subject.sections) {\n addGapIfNeeded();\n output += `${currentLine.toString().padStart(4, \" \")}│ ## ${section.heading}\\n`;\n lastOutputLine = currentLine;\n currentLine += 1;\n\n // Show code block IDs as a list if any examples match this section\n const examples = sectionToExamples.get(section.heading) || [];\n if (examples.length > 0) {\n // We need to parse the section content to find where each example appears\n const sectionStartLine = currentLine;\n const lines = section.content.split(\"\\n\");\n\n for (const example of examples) {\n const id = example.id || \"(no-id)\";\n // Find the code block in section content\n let blockStartLine = sectionStartLine;\n let blockEndLine = sectionStartLine;\n let inCodeBlock = false;\n let foundBlock = false;\n\n for (let i = 0; i < lines.length; i++) {\n const line = lines[i];\n if (line.trim().startsWith(\"```\") && !inCodeBlock) {\n // Check if next lines match the example\n const codeStart = i + 1;\n let matches = true;\n const exampleLines = example.code.split(\"\\n\");\n for (let j = 0; j < Math.min(3, exampleLines.length); j++) {\n if (lines[codeStart + j]?.trim() !== exampleLines[j]?.trim()) {\n matches = false;\n break;\n }\n }\n if (matches) {\n blockStartLine = sectionStartLine + i + 1; // +1 to skip opening ```\n blockEndLine = sectionStartLine + i + exampleLines.length;\n foundBlock = true;\n break;\n }\n }\n }\n\n if (foundBlock) {\n output += `${currentLine.toString().padStart(4, \" \")}│ - id: \\`${id}\\`, L${blockStartLine}-${blockEndLine}\\n`;\n } else {\n output += `${currentLine.toString().padStart(4, \" \")}│ - id: \\`${id}\\`\\n`;\n }\n lastOutputLine = currentLine;\n }\n }\n\n // Count lines\n const sectionLines = section.content.split(\"\\n\");\n for (const _line of sectionLines) {\n currentLine += 1;\n }\n currentLine += 1; // blank line after section\n // Update lastOutputLine to current position to avoid gap indicator\n lastOutputLine = currentLine - 1;\n }\n }\n\n return output;\n}\n\n/**\n * Print subjects with the given options\n */\nasync function printSubjects(subjects: Subject[], options: PrintOptions): Promise<void> {\n if (options.headingsOnly) {\n // Show only headings and code block IDs\n const headingsOutput = extractHeadingsAndBlocks(subjects);\n console.log(headingsOutput);\n return;\n }\n\n // Build the full markdown content\n const markdown = buildSubjectsMarkdown(subjects);\n\n // Render markdown to terminal for nice formatting\n let output = await marked.parse(markdown);\n\n // Apply line range filter if specified (after rendering)\n const startLine = options.startLine ?? 1;\n if (options.startLine !== undefined || options.endLine !== undefined) {\n const end = options.endLine ?? output.split(\"\\n\").length;\n output = filterByLineRange(output, startLine, end);\n }\n\n // Add line numbers after rendering (if requested)\n // Line numbers correspond to the rendered output that agents interact with\n if (options.showLineNumbers) {\n output = addLineNumbers(output, startLine);\n }\n\n console.log(output);\n}\n\n/**\n * Find and print code blocks by ID\n */\nasync function printCodeBlockById(\n id: string,\n topics: string[],\n showLineNumbers: boolean,\n): Promise<void> {\n // If topics are specified, search only those; otherwise search all subjects\n const subjects = topics.length > 0 ? getSubject(...topics) : getAllSubjects();\n\n interface CodeBlockMatch {\n subjectId: string;\n subjectTitle: string;\n section: string;\n code: string;\n type: \"prelude\" | \"example\";\n startLine?: number;\n endLine?: number;\n }\n\n const matches: CodeBlockMatch[] = [];\n\n for (const subject of subjects) {\n // Build the rendered markdown to get correct line numbers (matching --start/--end behavior)\n const fullMarkdown = buildSubjectsMarkdown([subject]);\n const renderedOutput = await marked.parse(fullMarkdown);\n const renderedLines = renderedOutput.split(\"\\n\");\n\n // Search in prelude blocks\n for (const block of subject.prelude) {\n if (block.id === id) {\n // Find line numbers in the rendered output\n let startLine: number | undefined;\n let endLine: number | undefined;\n\n // Search for the prelude code in the rendered output\n const codeLines = block.code.split(\"\\n\");\n const firstCodeLine = codeLines[0].trim();\n\n for (let i = 0; i < renderedLines.length; i++) {\n // Strip ANSI codes before comparing\n if (stripVTControlCharacters(renderedLines[i]).trim() === firstCodeLine) {\n // Found the start of the code\n startLine = i + 1; // 1-based line numbers\n endLine = i + codeLines.length;\n break;\n }\n }\n\n matches.push({\n subjectId: subject.id,\n subjectTitle: subject.title,\n section: \"Prelude\",\n code: block.code,\n type: \"prelude\",\n startLine,\n endLine,\n });\n }\n }\n\n // Search in examples\n for (const example of subject.examples) {\n if (example.id === id) {\n // Try to find which section this example belongs to\n let sectionName = \"Unknown Section\";\n let startLine: number | undefined;\n let endLine: number | undefined;\n\n for (const section of subject.sections) {\n if (\n section.content.includes(example.code.substring(0, Math.min(50, example.code.length)))\n ) {\n sectionName = section.heading;\n\n // Find line numbers in the rendered output\n const codeLines = example.code.split(\"\\n\");\n const firstCodeLine = codeLines[0].trim();\n\n for (let i = 0; i < renderedLines.length; i++) {\n // Strip ANSI codes before comparing\n if (stripVTControlCharacters(renderedLines[i]).trim() === firstCodeLine) {\n // Found the start of the code\n startLine = i + 1; // 1-based line numbers\n endLine = i + codeLines.length;\n break;\n }\n }\n break;\n }\n }\n\n matches.push({\n subjectId: subject.id,\n subjectTitle: subject.title,\n section: sectionName,\n code: example.code,\n type: \"example\",\n startLine,\n endLine,\n });\n }\n }\n }\n\n if (matches.length === 0) {\n console.error(`Error: No code block found with id \"${id}\"`);\n if (topics.length > 0) {\n console.error(`Searched in topics: ${topics.join(\", \")}`);\n } else {\n console.error(\"Searched in all available topics\");\n }\n process.exit(1);\n }\n\n // Build markdown output\n for (let i = 0; i < matches.length; i++) {\n const match = matches[i];\n\n if (matches.length > 1 && i > 0) {\n console.log(\"\\n---\\n\");\n }\n\n // Build markdown for this match\n let matchMarkdown = `# ${match.subjectTitle}\\n\\n`;\n matchMarkdown += `## ${match.section}\\n\\n`;\n\n // Add line number info if available and requested (as plain text, not in markdown)\n if (showLineNumbers && match.startLine && match.endLine) {\n console.log(`Lines ${match.startLine}-${match.endLine} (use with --start/--end)\\n`);\n }\n\n matchMarkdown += `\\`\\`\\`typescript\\n${match.code}\\n\\`\\`\\`\\n`;\n\n // Render the markdown\n const rendered = await marked.parse(matchMarkdown);\n console.log(rendered);\n }\n}\n\n/**\n * Print only the topic tree\n */\nfunction printTopicTree(): void {\n const subjects = getSubjects();\n const subjectMap = new Map(subjects.map((s) => [s.id, s]));\n\n // Helper function to get title for any subject ID (including categories)\n function getTitle(subjectId: string): string {\n if (isCategory(subjectId)) {\n return getCategoryTitle(subjectId);\n }\n const subject = subjectMap.get(subjectId);\n return subject ? subject.title : subjectId;\n }\n\n // Helper function to recursively display tree\n function displayNode(subjectId: string, indent: string, isLast: boolean, isRoot: boolean): void {\n const title = getTitle(subjectId);\n\n if (isRoot) {\n console.log(` ${subjectId.padEnd(30)} ${title}`);\n } else {\n const connector = isLast ? \"└─\" : \"├─\";\n console.log(`${indent}${connector} ${subjectId.padEnd(26)} ${title}`);\n }\n\n const children = getSubjectChildren(subjectId);\n if (children.length > 0) {\n const childIndent = isRoot ? \" \" : indent + (isLast ? \" \" : \"│ \");\n for (let i = 0; i < children.length; i++) {\n displayNode(children[i], childIndent, i === children.length - 1, false);\n }\n }\n }\n\n // Get all root subject IDs (including categories)\n const allIds = getAllSubjectIdsInOrder();\n const rootIds = allIds.filter((id) => !getSubjectParent(id));\n\n // Display root subjects\n for (const subjectId of rootIds) {\n displayNode(subjectId, \"\", false, true);\n }\n}\n\n/**\n * Print information about the corpus command\n */\nfunction printCorpusHelp(): void {\n console.log(\"Fragno Corpus - Code examples and documentation (similar to LLMs.txt\");\n console.log(\"\");\n console.log(\"Usage: fragno-cli corpus [options] [topic...]\");\n console.log(\"\");\n console.log(\"Options:\");\n console.log(\" -n, --no-line-numbers Hide line numbers (shown by default)\");\n console.log(\" -s, --start N Starting line number to display from\");\n console.log(\" -e, --end N Ending line number to display to\");\n console.log(\" --headings Show only headings and code block IDs\");\n console.log(\" --id <id> Retrieve a specific code block by ID\");\n console.log(\" --tree Show only the topic tree\");\n console.log(\"\");\n console.log(\"Examples:\");\n console.log(\" fragno-cli corpus # List all available topics\");\n console.log(\" fragno-cli corpus --tree # Show only the topic tree\");\n console.log(\" fragno-cli corpus defining-routes # Show route definition examples\");\n console.log(\" fragno-cli corpus --headings database-querying\");\n console.log(\" # Show structure overview\");\n console.log(\" fragno-cli corpus --start 10 --end 50 database-querying\");\n console.log(\" # Show specific lines\");\n console.log(\" fragno-cli corpus --id create-user # Get code block by ID\");\n console.log(\" fragno-cli corpus database-adapters kysely-adapter\");\n console.log(\" # Show multiple topics\");\n console.log(\"\");\n console.log(\"Available topics:\");\n\n printTopicTree();\n}\n\nexport const corpusCommand = define({\n name: \"corpus\",\n description: \"View code examples and documentation for Fragno\",\n args: {\n \"no-line-numbers\": {\n type: \"boolean\",\n short: \"n\",\n description: \"Hide line numbers (line numbers are shown by default)\",\n },\n start: {\n type: \"number\",\n short: \"s\",\n description: \"Starting line number (1-based) to display from\",\n },\n end: {\n type: \"number\",\n short: \"e\",\n description: \"Ending line number (1-based) to display to\",\n },\n headings: {\n type: \"boolean\",\n description: \"Show only section headings and code block IDs with line numbers\",\n },\n id: {\n type: \"string\",\n description: \"Retrieve a specific code block by ID\",\n },\n tree: {\n type: \"boolean\",\n description: \"Show only the topic tree (without help text)\",\n },\n },\n run: async (ctx) => {\n const topics = ctx.positionals;\n const showLineNumbers = !(ctx.values[\"no-line-numbers\"] ?? false);\n const startLine = ctx.values.start;\n const endLine = ctx.values.end;\n const headingsOnly = ctx.values.headings ?? false;\n const codeBlockId = ctx.values.id;\n const treeOnly = ctx.values.tree ?? false;\n\n // Handle --id flag\n if (codeBlockId) {\n await printCodeBlockById(codeBlockId, topics, showLineNumbers);\n return;\n }\n\n // Handle --tree flag\n if (treeOnly) {\n printTopicTree();\n return;\n }\n\n // No topics provided - show help\n if (topics.length === 0) {\n printCorpusHelp();\n return;\n }\n\n // Validate line range\n if (startLine !== undefined && endLine !== undefined && startLine > endLine) {\n console.error(\"Error: --start must be less than or equal to --end\");\n process.exit(1);\n }\n\n // Load and display requested topics\n try {\n const subjects = getSubject(...topics);\n\n await printSubjects(subjects, {\n showLineNumbers,\n startLine,\n endLine,\n headingsOnly,\n });\n } catch (error) {\n if (error instanceof Error && error.message.includes(\"ENOENT\")) {\n // Extract the subject name from the error message or use the topics array\n const missingTopics = topics.filter((topic) => {\n try {\n getSubject(topic);\n return false;\n } catch {\n return true;\n }\n });\n\n if (missingTopics.length === 1) {\n console.error(`Error: Subject '${missingTopics[0]}' not found.`);\n } else if (missingTopics.length > 1) {\n console.error(\n `Error: Subjects not found: ${missingTopics.map((t) => `'${t}'`).join(\", \")}`,\n );\n } else {\n console.error(\"Error: One or more subjects not found.\");\n }\n console.log(\"\\nAvailable topics:\");\n printTopicTree();\n } else {\n console.error(\"Error loading topics:\", error instanceof Error ? error.message : error);\n console.log(\"\\nRun 'fragno-cli corpus' to see available topics.\");\n }\n process.exit(1);\n }\n },\n});\n","#!/usr/bin/env node\n\nimport { cli, define } from \"gunshi\";\nimport { generateCommand } from \"./commands/db/generate.js\";\nimport { migrateCommand } from \"./commands/db/migrate.js\";\nimport { infoCommand } from \"./commands/db/info.js\";\nimport { searchCommand } from \"./commands/search.js\";\nimport { corpusCommand } from \"./commands/corpus.js\";\nimport { readFileSync } from \"node:fs\";\nimport { fileURLToPath } from \"node:url\";\nimport { dirname, join } from \"node:path\";\n\nconst __dirname = dirname(fileURLToPath(import.meta.url));\nconst packageJson = JSON.parse(readFileSync(join(__dirname, \"../package.json\"), \"utf-8\"));\nconst version = packageJson.version;\n\n// Create a Map of db sub-commands\nconst dbSubCommands = new Map();\ndbSubCommands.set(\"generate\", generateCommand);\ndbSubCommands.set(\"migrate\", migrateCommand);\ndbSubCommands.set(\"info\", infoCommand);\n\n// Define the db command with nested subcommands\nexport const dbCommand = define({\n name: \"db\",\n description: \"Database management commands\",\n});\n\n// Define the main command\nexport const mainCommand = define({\n name: \"fragno-cli\",\n description: \"Tools for working with Fragno fragments\",\n});\n\nexport async function run() {\n try {\n const args = process.argv.slice(2);\n\n // Manual routing for top-level commands\n if (args[0] === \"search\") {\n // Run search command directly\n await cli(args.slice(1), searchCommand, {\n name: \"fragno-cli search\",\n version,\n });\n } else if (args[0] === \"corpus\") {\n // Run corpus command directly\n await cli(args.slice(1), corpusCommand, {\n name: \"fragno-cli corpus\",\n version,\n });\n } else if (args[0] === \"db\") {\n // Handle db subcommands\n const subCommandName = args[1];\n\n if (!subCommandName || subCommandName === \"--help\" || subCommandName === \"-h\") {\n // Show db help with subcommands\n console.log(\"Database management commands\");\n console.log(\"\");\n console.log(\"USAGE:\");\n console.log(\" fragno-cli db <COMMAND>\");\n console.log(\"\");\n console.log(\"COMMANDS:\");\n console.log(\n \" generate Generate schema files from FragnoDatabase definitions\",\n );\n console.log(\" migrate Run database migrations\");\n console.log(\" info Display database information and migration status\");\n console.log(\"\");\n console.log(\"For more info, run any command with the `--help` flag:\");\n console.log(\" fragno-cli db generate --help\");\n console.log(\" fragno-cli db migrate --help\");\n console.log(\" fragno-cli db info --help\");\n console.log(\"\");\n console.log(\"OPTIONS:\");\n console.log(\" -h, --help Display this help message\");\n console.log(\" -v, --version Display this version\");\n } else if (subCommandName === \"--version\" || subCommandName === \"-v\") {\n console.log(version);\n } else {\n // Route to specific db subcommand\n const subCommand = dbSubCommands.get(subCommandName);\n\n if (!subCommand) {\n console.error(`Unknown command: ${subCommandName}`);\n console.log(\"\");\n console.log(\"Run 'fragno-cli db --help' for available commands.\");\n process.exit(1);\n }\n\n // Run the subcommand\n await cli(args.slice(2), subCommand, {\n name: `fragno-cli db ${subCommandName}`,\n version,\n });\n }\n } else if (!args.length || args[0] === \"--help\" || args[0] === \"-h\") {\n // Show main help\n console.log(\"Tools for working with Fragno\");\n console.log(\"\");\n console.log(\"USAGE:\");\n console.log(\" fragno-cli <COMMAND>\");\n console.log(\"\");\n console.log(\"COMMANDS:\");\n console.log(\" db Database management commands\");\n console.log(\" search Search the Fragno documentation\");\n console.log(\" corpus View code examples and documentation for Fragno\");\n console.log(\"\");\n console.log(\"For more info, run any command with the `--help` flag:\");\n console.log(\" fragno-cli db --help\");\n console.log(\" fragno-cli search --help\");\n console.log(\" fragno-cli corpus --help\");\n console.log(\"\");\n console.log(\"OPTIONS:\");\n console.log(\" -h, --help Display this help message\");\n console.log(\" -v, --version Display this version\");\n } else if (args[0] === \"--version\" || args[0] === \"-v\") {\n console.log(version);\n } else {\n // Unknown command\n console.error(`Unknown command: ${args[0]}`);\n console.log(\"\");\n console.log(\"Run 'fragno-cli --help' for available commands.\");\n process.exit(1);\n }\n } catch (error) {\n console.error(\"Error:\", error instanceof Error ? error.message : error);\n process.exit(1);\n }\n}\n\nif (import.meta.main) {\n await run();\n}\n\nexport { generateCommand, migrateCommand, infoCommand, searchCommand, corpusCommand };\n"],"mappings":";;;;;;;;;;;;;;;;;;AAWA,eAAsB,mBAAmB,MAAgD;CACvF,MAAM,EAAE,WAAW,MAAM,WAAW,EAClC,YAAY,MACb,CAAC;CAEF,MAAM,YAAY,oBAAoB,OAAO;CAC7C,MAAM,eAAe,UAAU,KAC5B,OACC,GAAG,GAAG,QAAQ,qCAAqC,GAAG,GAAG,QAAQ,0CACpE;AAGD,KAF2B,CAAC,GAAG,IAAI,IAAI,aAAa,CAAC,CAE9B,SAAS,EAC9B,OAAM,IAAI,MACR,qFACsB,aAAa,KAAK,KAAK,CAAC,GAC/C;AAGH,QAAO;EACL,SAAS,UAAU,GAAG;EACtB;EACD;;;;;;AAOH,eAAsB,oBAAoB,OAGvC;CAED,MAAM,cAAc,MAAM,KAAK,IAAI,IAAI,MAAM,CAAC;AAE9C,KAAI,YAAY,WAAW,EACzB,OAAM,IAAI,MAAM,6BAA6B;CAG/C,MAAMA,eAA4C,EAAE;CACpD,IAAIC;CACJ,IAAIC;CACJ,MAAM,MAAM,QAAQ,KAAK;AAEzB,MAAK,MAAM,QAAQ,aAAa;EAC9B,MAAM,eAAe,SAAS,KAAK,KAAK;AAExC,MAAI;GACF,MAAM,SAAS,MAAM,mBAAmB,KAAK;GAC7C,MAAM,YAAY,OAAO;GACzB,MAAM,cAAc,OAAO;AAE3B,OAAI,UAAU,WAAW,GAAG;AAC1B,YAAQ,KACN,iDAAiD,aAAa,gKAI/D;AACD;;AAIF,OAAI,CAAC,SAAS;AACZ,cAAU;AACV,uBAAmB;;GAIrB,MAAM,mBAAmB,QAAQ;GACjC,MAAM,sBAAsB,QAAQ;GACpC,MAAM,kBAAkB,YAAY;GACpC,MAAM,qBAAqB,YAAY;AAEvC,OAAI,qBAAqB,mBAAmB,wBAAwB,oBAAoB;IACtF,MAAM,mBAAmB,GAAG,iBAAiB,GAAG;IAChD,MAAM,kBAAkB,GAAG,gBAAgB,GAAG;AAE9C,UAAM,IAAI,MACR,gFACS,iBAAiB,IAAI,iBAAiB,QACtC,aAAa,IAAI,gBAAgB,oEAE3C;;AAGH,gBAAa,KAAK,GAAG,UAAU;AAC/B,WAAQ,IAAI,WAAW,UAAU,OAAO,kBAAkB,eAAe;WAClE,OAAO;AACd,SAAM,IAAI,MACR,kCAAkC,aAAa,IAAI,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM,GAC1G;;;AAIL,KAAI,aAAa,WAAW,EAC1B,OAAM,IAAI,MACR,oOAID;AAGH,KAAI,CAAC,QACH,OAAM,IAAI,MAAM,gDAAgD;AAGlE,QAAO;EACL;EACA,WAAW;EACZ;;AAGH,SAAS,gCACP,OASA;AACA,QACE,OAAO,UAAU,YACjB,UAAU,QACV,kCAAkC,SAClC,MAAM,oCAAoC;;;;;;AAQ9C,SAAgB,oBACd,cAC6B;CAC7B,MAAMC,kBAA+C,EAAE;AAEvD,MAAK,MAAM,CAAC,MAAM,UAAU,OAAO,QAAQ,aAAa,CACtD,KAAI,iBAAiB,MAAM,CACzB,iBAAgB,KAAK,MAAM;UAClB,gCAAgC,MAAM,EAAE;EAEjD,MAAM,WAAW,MAAM;EACvB,MAAM,OAAO,SAAS;EACtB,MAAM,UAAU,SAAS;AAGzB,MAAI,CAAC,KAAK,SAAS,CAAC,KAAK,UACvB;EAGF,MAAM,SAAS,KAAK;EACpB,MAAM,kBAAkB,QAAQ;AAEhC,MAAI,CAAC,iBAAiB;AACpB,WAAQ,KACN,sBAAsB,MAAM,KAAK,8EAClC;AACD;;EAIF,MAAM,YAAY,MAAM,OAAO;AAE/B,kBAAgB,KACd,IAAI,eAAe;GACjB;GACA;GACA,SAAS;GACV,CAAC,CACH;;AAIL,QAAO;;;;;ACxLT,MAAa,kBAAkB,OAAO;CACpC,MAAM;CACN,aAAa;CACb,MAAM;EACJ,QAAQ;GACN,MAAM;GACN,OAAO;GACP,aACE;GACH;EACD,MAAM;GACJ,MAAM;GACN,OAAO;GACP,aAAa;GACd;EACD,IAAI;GACF,MAAM;GACN,OAAO;GACP,aAAa;GACd;EACD,QAAQ;GACN,MAAM;GACN,OAAO;GACP,aAAa;GACd;EACF;CACD,KAAK,OAAO,QAAQ;EAElB,MAAM,UAAU,IAAI;EACpB,MAAM,SAAS,IAAI,OAAO;EAC1B,MAAM,YAAY,IAAI,OAAO;EAC7B,MAAM,cAAc,IAAI,OAAO;EAC/B,MAAM,SAAS,IAAI,OAAO;EAM1B,MAAM,EAAE,WAAW,oBAAoB,YAAY,MAAM,oBAHrC,QAAQ,KAAK,WAAW,QAAQ,QAAQ,KAAK,EAAE,OAAO,CAAC,CAGc;AAGzF,MAAI,CAAC,QAAQ,yBAAyB,CAAC,QAAQ,sBAC7C,OAAM,IAAI,MACR,+IAED;AAIH,UAAQ,IAAI,uBAAuB;EAEnC,IAAIC;AACJ,MAAI;AACF,aAAU,MAAM,2BAA2B,oBAAoB;IAC7D,MAAM;IACN;IACA;IACD,CAAC;WACK,OAAO;AACd,SAAM,IAAI,MACR,8BAA8B,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM,GACrF;;AAIH,OAAK,MAAM,UAAU,SAAS;GAG5B,MAAM,kBACJ,UAAU,QAAQ,WAAW,IACzB,QAAQ,QAAQ,KAAK,EAAE,OAAO,GAC9B,SACE,QAAQ,QAAQ,KAAK,EAAE,QAAQ,OAAO,KAAK,GAC3C,QAAQ,QAAQ,KAAK,EAAE,OAAO,KAAK;GAG3C,MAAM,YAAY,QAAQ,gBAAgB;AAC1C,OAAI;AACF,UAAM,MAAM,WAAW,EAAE,WAAW,MAAM,CAAC;YACpC,OAAO;AACd,UAAM,IAAI,MACR,+BAA+B,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM,GACtF;;AAIH,OAAI;AAEF,UAAM,UAAU,iBADA,SAAS,GAAG,OAAO,IAAI,OAAO,WAAW,OAAO,QACtB,EAAE,UAAU,SAAS,CAAC;YACzD,OAAO;AACd,UAAM,IAAI,MACR,gCAAgC,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM,GACvF;;AAGH,WAAQ,IAAI,gBAAgB,kBAAkB;;AAGhD,UAAQ,IAAI,qCAAqC;AACjD,UAAQ,IAAI,sBAAsB,QAAQ,SAAS;AACnD,UAAQ,IAAI,eAAe;AAC3B,OAAK,MAAM,MAAM,mBACf,SAAQ,IAAI,SAAS,GAAG,UAAU,YAAY,GAAG,OAAO,QAAQ,GAAG;;CAGxE,CAAC;;;;AC3GF,MAAa,iBAAiB,OAAO;CACnC,MAAM;CACN,aAAa;CACb,MAAM,EAAE;CACR,KAAK,OAAO,QAAQ;EAClB,MAAM,UAAU,IAAI;AAEpB,MAAI,QAAQ,WAAW,EACrB,OAAM,IAAI,MAAM,4CAA4C;EAO9D,MAAM,EAAE,WAAW,uBAAuB,MAAM,oBAH5B,QAAQ,KAAK,WAAW,QAAQ,QAAQ,KAAK,EAAE,OAAO,CAAC,CAGK;AAEhF,UAAQ,IAAI,0DAA0D;EAEtE,IAAIC;AACJ,MAAI;AACF,aAAU,MAAM,kBAAkB,mBAAmB;WAC9C,OAAO;AACd,SAAM,IAAI,MACR,qBAAqB,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM,GAC5E;;AAIH,OAAK,MAAM,UAAU,SAAS;AAC5B,WAAQ,IAAI,aAAa,OAAO,YAAY;AAC5C,WAAQ,IAAI,sBAAsB,OAAO,cAAc;AACvD,WAAQ,IAAI,qBAAqB,OAAO,YAAY;AAEpD,OAAI,OAAO,WACT,SAAQ,IAAI,6BAA6B,OAAO,YAAY,MAAM,OAAO,UAAU,IAAI;OAEvF,SAAQ,IAAI,wDAAwD;;AAKxE,UAAQ,IAAI,0CAA0C;AACtD,UAAQ,IAAI,oBAAoB;AAChC,UAAQ,IAAI,0CAA0C;EAEtD,MAAM,WAAW,QAAQ,QAAQ,MAAM,EAAE,WAAW;EACpD,MAAM,UAAU,QAAQ,QAAQ,MAAM,CAAC,EAAE,WAAW;AAEpD,MAAI,SAAS,SAAS,GAAG;AACvB,WAAQ,IAAI,gBAAgB,SAAS,OAAO,eAAe;AAC3D,QAAK,MAAM,KAAK,SACd,SAAQ,IAAI,OAAO,EAAE,UAAU,KAAK,EAAE,YAAY,MAAM,EAAE,YAAY;;AAI1E,MAAI,QAAQ,SAAS,GAAG;AACtB,WAAQ,IAAI,eAAe,QAAQ,OAAO,oCAAoC;AAC9E,QAAK,MAAM,KAAK,QACd,SAAQ,IAAI,OAAO,EAAE,UAAU,KAAK,EAAE,YAAY;;AAItD,OAAK,MAAM,MAAM,mBACf,OAAM,GAAG,QAAQ,OAAO;AAG1B,UAAQ,IAAI,4CAA4C;;CAE3D,CAAC;;;;ACtEF,MAAa,cAAc,OAAO;CAChC,MAAM;CACN,aAAa;CACb,MAAM,EAAE;CACR,KAAK,OAAO,QAAQ;EAClB,MAAM,UAAU,IAAI;AAEpB,MAAI,QAAQ,WAAW,EACrB,OAAM,IAAI,MAAM,4CAA4C;EAO9D,MAAM,EAAE,WAAW,uBAAuB,MAAM,oBAH5B,QAAQ,KAAK,WAAW,QAAQ,QAAQ,KAAK,EAAE,OAAO,CAAC,CAGK;EAGhF,MAAM,UAAU,MAAM,QAAQ,IAC5B,mBAAmB,IAAI,OAAO,aAAa;GACzC,MAAMC,OAQF;IACF,WAAW,SAAS;IACpB,eAAe,SAAS,OAAO;IAC/B,kBAAkB,CAAC,CAAC,SAAS,QAAQ;IACtC;AAGD,OAAI,SAAS,QAAQ,sBACnB,KAAI;IAKF,MAAM,iBAAiB,MAJN,SAAS,QAAQ,sBAChC,SAAS,QACT,SAAS,UACV,CACqC,YAAY;AAClD,SAAK,iBAAiB;AACtB,SAAK,kBAAkB,SAAS,OAAO,UAAU;AAEjD,QAAI,KAAK,kBAAkB,EACzB,MAAK,SAAS,YAAY,KAAK,gBAAgB;aACtC,KAAK,oBAAoB,EAClC,MAAK,SAAS;YAET,OAAO;AACd,SAAK,QAAQ,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM;AACnE,SAAK,SAAS;;OAGhB,MAAK,SAAS;AAGhB,UAAO;IACP,CACH;EAGD,MAAM,sBAAsB,QAAQ,MAAM,SAAS,KAAK,iBAAiB;AAGzE,UAAQ,IAAI,GAAG;AACf,UAAQ,IAAI,wBAAwB;AACpC,UAAQ,IAAI,GAAG;EAGf,MAAM,kBAAkB;EACxB,MAAM,gBAAgB;EACtB,MAAM,gBAAgB;EACtB,MAAM,eAAe;EAErB,MAAM,kBAAkB,KAAK,IAC3B,GACA,GAAG,QAAQ,KAAK,SAAS,KAAK,UAAU,OAAO,CAChD;EACD,MAAM,iBAAiB,KAAK,IAAI,kBAAkB,GAAG,GAAG;EACxD,MAAM,eAAe;EACrB,MAAM,eAAe;EACrB,MAAM,cAAc;AAGpB,UAAQ,IACN,gBAAgB,OAAO,eAAe,GACpC,cAAc,OAAO,aAAa,IACjC,sBAAsB,cAAc,OAAO,aAAa,GAAG,MAC5D,aACH;AACD,UAAQ,IACN,IAAI,OAAO,eAAe,GACxB,IAAI,OAAO,aAAa,IACvB,sBAAsB,IAAI,OAAO,aAAa,GAAG,MAClD,IAAI,OAAO,YAAY,CAC1B;AAED,OAAK,MAAM,QAAQ,SAAS;GAC1B,MAAM,oBACJ,KAAK,mBAAmB,SAAY,OAAO,KAAK,eAAe,GAAG;AACpE,WAAQ,IACN,KAAK,UAAU,OAAO,eAAe,GACnC,OAAO,KAAK,cAAc,CAAC,OAAO,aAAa,IAC9C,sBAAsB,kBAAkB,OAAO,aAAa,GAAG,OAC/D,KAAK,UAAU,KACnB;;AAIH,UAAQ,IAAI,GAAG;AACf,MAAI,CAAC,qBAAqB;AACxB,WAAQ,IAAI,kDAAkD;AAC9D,WAAQ,IAAI,yDAAyD;aAExC,QAAQ,MAClC,SAAS,KAAK,mBAAmB,KAAK,kBAAkB,EAC1D,CAEC,SAAQ,IAAI,oEAAoE;;CAIvF,CAAC;;;;;;;AClGF,SAAgB,kBAAkB,SAAyB,SAAiC;CAC1F,MAAM,4BAAY,IAAI,KAA2B;AAEjD,MAAK,MAAM,UAAU,SAAS;EAE5B,MAAM,qBAAqB,OAAO,IAAI,MAAM,IAAI,CAAC;EACjD,MAAM,WAAW,UAAU,IAAI,mBAAmB;AAElD,MAAI,SAEF,UAAS,SAAS,KAAK;GACrB,SAAS,OAAO;GAChB,MAAM,OAAO;GACd,CAAC;OACG;GAEL,MAAM,YAAY,GAAG,mBAAmB;GAExC,MAAM,UAAU,WAAW,UAAU;GACrC,MAAM,gBAAgB,WAAW,UAAU;AAE3C,aAAU,IAAI,oBAAoB;IAChC,KAAK;IACL;IACA;IACA;IACA,OAAO,OAAO,SAAS,SAAS,OAAO,UAAU;IACjD,aAAa,OAAO;IACpB,MAAM,OAAO;IACb,UAAU,CACR;KACE,SAAS,OAAO;KAChB,MAAM,OAAO;KACd,CACF;IACF,CAAC;;;AAIN,QAAO,MAAM,KAAK,UAAU,QAAQ,CAAC;;;;;AAMvC,SAAgB,iBAAiB,eAAuC;CACtE,MAAMC,QAAkB,EAAE;AAE1B,MAAK,MAAM,UAAU,eAAe;EAElC,MAAM,QAAQ,OAAO,SAAS,OAAO,SAAS,IAAI,WAAW;AAC7D,QAAM,KAAK,aAAa,MAAM,GAAG;AAEjC,MAAI,OAAO,eAAe,OAAO,YAAY,SAAS,GAAG;AACvD,SAAM,KAAK,QAAQ,OAAO,YAAY,KAAK,MAAM,CAAC;AAClD,SAAM,KAAK,GAAG;;AAIhB,QAAM,KAAK,QAAQ;AACnB,QAAM,KAAK,OAAO,OAAO,UAAU;AACnC,QAAM,KAAK,OAAO,OAAO,gBAAgB;AACzC,QAAM,KAAK,GAAG;AAGd,MAAI,OAAO,SAAS,SAAS,GAAG;AAC9B,SAAM,KAAK,qBAAqB;AAChC,QAAK,IAAI,IAAI,GAAG,IAAI,OAAO,SAAS,QAAQ,KAAK;IAC/C,MAAM,UAAU,OAAO,SAAS;AAEhC,QAAI,MAAM,KAAK,OAAO,SAAS,UAAU,QAAQ,YAAY,OAAO,MAClE;AAEF,UAAM,KAAK,OAAO,QAAQ,UAAU;;AAEtC,SAAM,KAAK,GAAG;;AAGhB,QAAM,KAAK,MAAM;AACjB,QAAM,KAAK,GAAG;;AAGhB,QAAO,MAAM,KAAK,KAAK;;;;;AAMzB,SAAgB,aAAa,eAAuC;AAClE,QAAO,KAAK,UAAU,eAAe,MAAM,EAAE;;;;;ACnG/C,MAAa,gBAAgB,OAAO;CAClC,MAAM;CACN,aAAa;CACb,MAAM;EACJ,OAAO;GACL,MAAM;GACN,aAAa;GACb,SAAS;GACV;EACD,MAAM;GACJ,MAAM;GACN,aAAa;GACb,SAAS;GACV;EACD,UAAU;GACR,MAAM;GACN,aAAa;GACb,SAAS;GACV;EACD,YAAY;GACV,MAAM;GACN,aAAa;GACb,SAAS;GACV;EACF;CACD,KAAK,OAAO,QAAQ;EAClB,MAAM,QAAQ,IAAI,YAAY,KAAK,IAAI;AAEvC,MAAI,CAAC,SAAS,MAAM,MAAM,CAAC,WAAW,EACpC,OAAM,IAAI,MAAM,gCAAgC;EAIlD,MAAM,WAAW,IAAI,OAAO;EAC5B,MAAM,UAAU,IAAI,OAAO;AAE3B,MAAI,CAAC,SACH,SAAQ,IAAI,mBAAmB,MAAM,KAAK;AAG5C,MAAI;GAEF,MAAM,eAAe,mBAAmB,MAAM;GAC9C,MAAM,WAAW,MAAM,MAAM,WAAW,QAAQ,oBAAoB,eAAe;AAEnF,OAAI,CAAC,SAAS,GACZ,OAAM,IAAI,MAAM,kCAAkC,SAAS,SAAS;GAGtE,MAAM,UAAW,MAAM,SAAS,MAAM;GAGtC,MAAM,QAAQ,IAAI,OAAO;GACzB,MAAM,iBAAiB,QAAQ,MAAM,GAAG,MAAM;AAE9C,OAAI,eAAe,WAAW,GAAG;AAC/B,QAAI,SACF,SAAQ,IAAI,KAAK;QAEjB,SAAQ,IAAI,oBAAoB;AAElC;;GAIF,MAAM,gBAAgB,kBAAkB,gBAAgB,QAAQ;AAGhE,OAAI,SACF,SAAQ,IAAI,aAAa,cAAc,CAAC;QACnC;AAEL,YAAQ,IACN,SAAS,QAAQ,OAAO,SAAS,QAAQ,WAAW,IAAI,KAAK,MAAM,QAAQ,SAAS,QAAQ,aAAa,MAAM,KAAK,GAAG,IACxH;AACD,YAAQ,IAAI,iBAAiB,cAAc,CAAC;;WAEvC,OAAO;AACd,OAAI,iBAAiB,MACnB,OAAM,IAAI,MAAM,kBAAkB,MAAM,UAAU;AAEpD,SAAM,IAAI,MAAM,2CAA2C;;;CAGhE,CAAC;;;;ACtFF,OAAO,IAAI,gBAAgB,CAAC;;;;AAY5B,SAAgB,sBAAsB,UAA6B;CACjE,IAAI,eAAe;AAEnB,MAAK,MAAM,WAAW,UAAU;AAC9B,kBAAgB,KAAK,QAAQ,MAAM;AAEnC,MAAI,QAAQ,YACV,iBAAgB,GAAG,QAAQ,YAAY;AAIzC,MAAI,QAAQ,QACV,iBAAgB,oCAAoC,QAAQ,QAAQ;AAItE,MAAI,QAAQ,QAAQ,SAAS,GAAG;AAC9B,mBAAgB;AAChB,QAAK,MAAM,SAAS,QAAQ,QAE1B,iBAAgB,qBAAqB,MAAM,KAAK;;AAKpD,OAAK,MAAM,WAAW,QAAQ,SAC5B,iBAAgB,MAAM,QAAQ,QAAQ,MAAM,QAAQ,QAAQ;;AAIhE,QAAO;;;;;AAMT,SAAgB,eAAe,SAAiB,YAAoB,GAAW;CAC7E,MAAM,QAAQ,QAAQ,MAAM,KAAK;CACjC,MAAM,YAAY,OAAO,YAAY,MAAM,SAAS,EAAE,CAAC;AAEvD,QAAO,MACJ,KAAK,MAAM,UAAU;EACpB,MAAM,UAAU,YAAY;AAE5B,SAAO,GADW,OAAO,QAAQ,CAAC,SAAS,WAAW,IAAI,CACtC,IAAI;GACxB,CACD,KAAK,KAAK;;;;;AAMf,SAAgB,kBAAkB,SAAiB,WAAmB,SAAyB;CAC7F,MAAM,QAAQ,QAAQ,MAAM,KAAK;CAEjC,MAAM,QAAQ,KAAK,IAAI,GAAG,YAAY,EAAE;CACxC,MAAM,MAAM,KAAK,IAAI,MAAM,QAAQ,QAAQ;AAC3C,QAAO,MAAM,MAAM,OAAO,IAAI,CAAC,KAAK,KAAK;;;;;AAM3C,SAAgB,yBAAyB,UAA6B;CACpE,IAAI,SAAS;CACb,IAAI,cAAc;CAClB,IAAI,iBAAiB;CAGrB,MAAM,uBAAuB;AAC3B,MAAI,iBAAiB,KAAK,cAAc,iBAAiB,EACvD,WAAU;;AAKd,WAAU;AAEV,MAAK,MAAM,WAAW,UAAU;AAE9B,kBAAgB;AAChB,YAAU,GAAG,YAAY,UAAU,CAAC,SAAS,GAAG,IAAI,CAAC,MAAM,QAAQ,MAAM;AACzE,mBAAiB;AACjB,iBAAe;AAGf,YAAU,GAAG,YAAY,UAAU,CAAC,SAAS,GAAG,IAAI,CAAC;AACrD,mBAAiB;AACjB,iBAAe;AAGf,MAAI,QAAQ,aAAa;GACvB,MAAM,YAAY,QAAQ,YAAY,MAAM,KAAK;AACjD,QAAK,MAAM,QAAQ,WAAW;AAC5B,cAAU,GAAG,YAAY,UAAU,CAAC,SAAS,GAAG,IAAI,CAAC,IAAI,KAAK;AAC9D,qBAAiB;AACjB,mBAAe;;AAGjB,aAAU,GAAG,YAAY,UAAU,CAAC,SAAS,GAAG,IAAI,CAAC;AACrD,oBAAiB;AACjB,kBAAe;;AAIjB,MAAI,QAAQ,SAAS;AACnB,mBAAgB;AAChB,aAAU,GAAG,YAAY,UAAU,CAAC,SAAS,GAAG,IAAI,CAAC;AACrD,oBAAiB;AACjB,kBAAe;AAEf,aAAU,GAAG,YAAY,UAAU,CAAC,SAAS,GAAG,IAAI,CAAC;AACrD,oBAAiB;AACjB,kBAAe;AACf,aAAU,GAAG,YAAY,UAAU,CAAC,SAAS,GAAG,IAAI,CAAC;AACrD,oBAAiB;AACjB,kBAAe;GACf,MAAM,cAAc,QAAQ,QAAQ,MAAM,KAAK;AAC/C,QAAK,MAAM,QAAQ,aAAa;AAC9B,cAAU,GAAG,YAAY,UAAU,CAAC,SAAS,GAAG,IAAI,CAAC,IAAI,KAAK;AAC9D,qBAAiB;AACjB,mBAAe;;AAEjB,aAAU,GAAG,YAAY,UAAU,CAAC,SAAS,GAAG,IAAI,CAAC;AACrD,oBAAiB;AACjB,kBAAe;AAEf,aAAU,GAAG,YAAY,UAAU,CAAC,SAAS,GAAG,IAAI,CAAC;AACrD,oBAAiB;AACjB,kBAAe;;AAIjB,MAAI,QAAQ,QAAQ,SAAS,GAAG;AAC9B,mBAAgB;AAChB,aAAU,GAAG,YAAY,UAAU,CAAC,SAAS,GAAG,IAAI,CAAC;AACrD,oBAAiB;AACjB,kBAAe;AAEf,aAAU,GAAG,YAAY,UAAU,CAAC,SAAS,GAAG,IAAI,CAAC;AACrD,oBAAiB;AACjB,kBAAe;AAEf,QAAK,MAAM,SAAS,QAAQ,SAAS;IACnC,MAAM,KAAK,MAAM,MAAM;IACvB,MAAM,iBAAiB,cAAc;IACrC,MAAM,YAAY,MAAM,KAAK,MAAM,KAAK,CAAC;IACzC,MAAM,eAAe,cAAc,IAAI;AACvC,cAAU,GAAG,YAAY,UAAU,CAAC,SAAS,GAAG,IAAI,CAAC,cAAc,GAAG,OAAO,eAAe,GAAG,aAAa;AAC5G,qBAAiB;AACjB,mBAAe,YAAY;;AAG7B,oBAAiB,cAAc;;EAIjC,MAAM,oCAAoB,IAAI,KAAwB;AAGtD,OAAK,MAAM,WAAW,QAAQ,SAG5B,MAAK,MAAM,WAAW,QAAQ,SAE5B,KACE,QAAQ,QAAQ,SAAS,QAAQ,KAAK,UAAU,GAAG,KAAK,IAAI,IAAI,QAAQ,KAAK,OAAO,CAAC,CAAC,EACtF;AACA,OAAI,CAAC,kBAAkB,IAAI,QAAQ,QAAQ,CACzC,mBAAkB,IAAI,QAAQ,SAAS,EAAE,CAAC;AAE5C,qBAAkB,IAAI,QAAQ,QAAQ,CAAE,KAAK,QAAQ;AACrD;;AAKN,OAAK,MAAM,WAAW,QAAQ,UAAU;AACtC,mBAAgB;AAChB,aAAU,GAAG,YAAY,UAAU,CAAC,SAAS,GAAG,IAAI,CAAC,OAAO,QAAQ,QAAQ;AAC5E,oBAAiB;AACjB,kBAAe;GAGf,MAAM,WAAW,kBAAkB,IAAI,QAAQ,QAAQ,IAAI,EAAE;AAC7D,OAAI,SAAS,SAAS,GAAG;IAEvB,MAAM,mBAAmB;IACzB,MAAM,QAAQ,QAAQ,QAAQ,MAAM,KAAK;AAEzC,SAAK,MAAM,WAAW,UAAU;KAC9B,MAAM,KAAK,QAAQ,MAAM;KAEzB,IAAI,iBAAiB;KACrB,IAAI,eAAe;KAEnB,IAAI,aAAa;AAEjB,UAAK,IAAI,IAAI,GAAG,IAAI,MAAM,QAAQ,IAEhC,KADa,MAAM,GACV,MAAM,CAAC,WAAW,MAAM,IAAI,MAAc;MAEjD,MAAM,YAAY,IAAI;MACtB,IAAI,UAAU;MACd,MAAM,eAAe,QAAQ,KAAK,MAAM,KAAK;AAC7C,WAAK,IAAI,IAAI,GAAG,IAAI,KAAK,IAAI,GAAG,aAAa,OAAO,EAAE,IACpD,KAAI,MAAM,YAAY,IAAI,MAAM,KAAK,aAAa,IAAI,MAAM,EAAE;AAC5D,iBAAU;AACV;;AAGJ,UAAI,SAAS;AACX,wBAAiB,mBAAmB,IAAI;AACxC,sBAAe,mBAAmB,IAAI,aAAa;AACnD,oBAAa;AACb;;;AAKN,SAAI,WACF,WAAU,GAAG,YAAY,UAAU,CAAC,SAAS,GAAG,IAAI,CAAC,cAAc,GAAG,OAAO,eAAe,GAAG,aAAa;SAE5G,WAAU,GAAG,YAAY,UAAU,CAAC,SAAS,GAAG,IAAI,CAAC,cAAc,GAAG;AAExE,sBAAiB;;;GAKrB,MAAM,eAAe,QAAQ,QAAQ,MAAM,KAAK;AAChD,QAAK,MAAM,SAAS,aAClB,gBAAe;AAEjB,kBAAe;AAEf,oBAAiB,cAAc;;;AAInC,QAAO;;;;;AAMT,eAAe,cAAc,UAAqB,SAAsC;AACtF,KAAI,QAAQ,cAAc;EAExB,MAAM,iBAAiB,yBAAyB,SAAS;AACzD,UAAQ,IAAI,eAAe;AAC3B;;CAIF,MAAM,WAAW,sBAAsB,SAAS;CAGhD,IAAI,SAAS,MAAM,OAAO,MAAM,SAAS;CAGzC,MAAM,YAAY,QAAQ,aAAa;AACvC,KAAI,QAAQ,cAAc,UAAa,QAAQ,YAAY,QAAW;EACpE,MAAM,MAAM,QAAQ,WAAW,OAAO,MAAM,KAAK,CAAC;AAClD,WAAS,kBAAkB,QAAQ,WAAW,IAAI;;AAKpD,KAAI,QAAQ,gBACV,UAAS,eAAe,QAAQ,UAAU;AAG5C,SAAQ,IAAI,OAAO;;;;;AAMrB,eAAe,mBACb,IACA,QACA,iBACe;CAEf,MAAM,WAAW,OAAO,SAAS,IAAI,WAAW,GAAG,OAAO,GAAG,gBAAgB;CAY7E,MAAMC,UAA4B,EAAE;AAEpC,MAAK,MAAM,WAAW,UAAU;EAE9B,MAAM,eAAe,sBAAsB,CAAC,QAAQ,CAAC;EAErD,MAAM,iBADiB,MAAM,OAAO,MAAM,aAAa,EAClB,MAAM,KAAK;AAGhD,OAAK,MAAM,SAAS,QAAQ,QAC1B,KAAI,MAAM,OAAO,IAAI;GAEnB,IAAIC;GACJ,IAAIC;GAGJ,MAAM,YAAY,MAAM,KAAK,MAAM,KAAK;GACxC,MAAM,gBAAgB,UAAU,GAAG,MAAM;AAEzC,QAAK,IAAI,IAAI,GAAG,IAAI,cAAc,QAAQ,IAExC,KAAI,yBAAyB,cAAc,GAAG,CAAC,MAAM,KAAK,eAAe;AAEvE,gBAAY,IAAI;AAChB,cAAU,IAAI,UAAU;AACxB;;AAIJ,WAAQ,KAAK;IACX,WAAW,QAAQ;IACnB,cAAc,QAAQ;IACtB,SAAS;IACT,MAAM,MAAM;IACZ,MAAM;IACN;IACA;IACD,CAAC;;AAKN,OAAK,MAAM,WAAW,QAAQ,SAC5B,KAAI,QAAQ,OAAO,IAAI;GAErB,IAAI,cAAc;GAClB,IAAID;GACJ,IAAIC;AAEJ,QAAK,MAAM,WAAW,QAAQ,SAC5B,KACE,QAAQ,QAAQ,SAAS,QAAQ,KAAK,UAAU,GAAG,KAAK,IAAI,IAAI,QAAQ,KAAK,OAAO,CAAC,CAAC,EACtF;AACA,kBAAc,QAAQ;IAGtB,MAAM,YAAY,QAAQ,KAAK,MAAM,KAAK;IAC1C,MAAM,gBAAgB,UAAU,GAAG,MAAM;AAEzC,SAAK,IAAI,IAAI,GAAG,IAAI,cAAc,QAAQ,IAExC,KAAI,yBAAyB,cAAc,GAAG,CAAC,MAAM,KAAK,eAAe;AAEvE,iBAAY,IAAI;AAChB,eAAU,IAAI,UAAU;AACxB;;AAGJ;;AAIJ,WAAQ,KAAK;IACX,WAAW,QAAQ;IACnB,cAAc,QAAQ;IACtB,SAAS;IACT,MAAM,QAAQ;IACd,MAAM;IACN;IACA;IACD,CAAC;;;AAKR,KAAI,QAAQ,WAAW,GAAG;AACxB,UAAQ,MAAM,uCAAuC,GAAG,GAAG;AAC3D,MAAI,OAAO,SAAS,EAClB,SAAQ,MAAM,uBAAuB,OAAO,KAAK,KAAK,GAAG;MAEzD,SAAQ,MAAM,mCAAmC;AAEnD,UAAQ,KAAK,EAAE;;AAIjB,MAAK,IAAI,IAAI,GAAG,IAAI,QAAQ,QAAQ,KAAK;EACvC,MAAM,QAAQ,QAAQ;AAEtB,MAAI,QAAQ,SAAS,KAAK,IAAI,EAC5B,SAAQ,IAAI,UAAU;EAIxB,IAAI,gBAAgB,KAAK,MAAM,aAAa;AAC5C,mBAAiB,MAAM,MAAM,QAAQ;AAGrC,MAAI,mBAAmB,MAAM,aAAa,MAAM,QAC9C,SAAQ,IAAI,SAAS,MAAM,UAAU,GAAG,MAAM,QAAQ,6BAA6B;AAGrF,mBAAiB,qBAAqB,MAAM,KAAK;EAGjD,MAAM,WAAW,MAAM,OAAO,MAAM,cAAc;AAClD,UAAQ,IAAI,SAAS;;;;;;AAOzB,SAAS,iBAAuB;CAC9B,MAAM,WAAW,aAAa;CAC9B,MAAM,aAAa,IAAI,IAAI,SAAS,KAAK,MAAM,CAAC,EAAE,IAAI,EAAE,CAAC,CAAC;CAG1D,SAAS,SAAS,WAA2B;AAC3C,MAAI,WAAW,UAAU,CACvB,QAAO,iBAAiB,UAAU;EAEpC,MAAM,UAAU,WAAW,IAAI,UAAU;AACzC,SAAO,UAAU,QAAQ,QAAQ;;CAInC,SAAS,YAAY,WAAmB,QAAgB,QAAiB,QAAuB;EAC9F,MAAM,QAAQ,SAAS,UAAU;AAEjC,MAAI,OACF,SAAQ,IAAI,KAAK,UAAU,OAAO,GAAG,CAAC,GAAG,QAAQ;OAC5C;GACL,MAAM,YAAY,SAAS,OAAO;AAClC,WAAQ,IAAI,GAAG,SAAS,UAAU,GAAG,UAAU,OAAO,GAAG,CAAC,GAAG,QAAQ;;EAGvE,MAAM,WAAW,mBAAmB,UAAU;AAC9C,MAAI,SAAS,SAAS,GAAG;GACvB,MAAM,cAAc,SAAS,SAAS,UAAU,SAAS,QAAQ;AACjE,QAAK,IAAI,IAAI,GAAG,IAAI,SAAS,QAAQ,IACnC,aAAY,SAAS,IAAI,aAAa,MAAM,SAAS,SAAS,GAAG,MAAM;;;CAO7E,MAAM,UADS,yBAAyB,CACjB,QAAQ,OAAO,CAAC,iBAAiB,GAAG,CAAC;AAG5D,MAAK,MAAM,aAAa,QACtB,aAAY,WAAW,IAAI,OAAO,KAAK;;;;;AAO3C,SAAS,kBAAwB;AAC/B,SAAQ,IAAI,uEAAuE;AACnF,SAAQ,IAAI,GAAG;AACf,SAAQ,IAAI,gDAAgD;AAC5D,SAAQ,IAAI,GAAG;AACf,SAAQ,IAAI,WAAW;AACvB,SAAQ,IAAI,oEAAoE;AAChF,SAAQ,IAAI,oEAAoE;AAChF,SAAQ,IAAI,gEAAgE;AAC5E,SAAQ,IAAI,qEAAqE;AACjF,SAAQ,IAAI,oEAAoE;AAChF,SAAQ,IAAI,wDAAwD;AACpE,SAAQ,IAAI,GAAG;AACf,SAAQ,IAAI,YAAY;AACxB,SAAQ,IAAI,4EAA4E;AACxF,SAAQ,IAAI,2EAA2E;AACvF,SAAQ,IAAI,iFAAiF;AAC7F,SAAQ,IAAI,mDAAmD;AAC/D,SAAQ,IAAI,0EAA0E;AACtF,SAAQ,IAAI,4DAA4D;AACxE,SAAQ,IAAI,sEAAsE;AAClF,SAAQ,IAAI,uEAAuE;AACnF,SAAQ,IAAI,uDAAuD;AACnE,SAAQ,IAAI,uEAAuE;AACnF,SAAQ,IAAI,GAAG;AACf,SAAQ,IAAI,oBAAoB;AAEhC,iBAAgB;;AAGlB,MAAa,gBAAgB,OAAO;CAClC,MAAM;CACN,aAAa;CACb,MAAM;EACJ,mBAAmB;GACjB,MAAM;GACN,OAAO;GACP,aAAa;GACd;EACD,OAAO;GACL,MAAM;GACN,OAAO;GACP,aAAa;GACd;EACD,KAAK;GACH,MAAM;GACN,OAAO;GACP,aAAa;GACd;EACD,UAAU;GACR,MAAM;GACN,aAAa;GACd;EACD,IAAI;GACF,MAAM;GACN,aAAa;GACd;EACD,MAAM;GACJ,MAAM;GACN,aAAa;GACd;EACF;CACD,KAAK,OAAO,QAAQ;EAClB,MAAM,SAAS,IAAI;EACnB,MAAM,kBAAkB,EAAE,IAAI,OAAO,sBAAsB;EAC3D,MAAM,YAAY,IAAI,OAAO;EAC7B,MAAM,UAAU,IAAI,OAAO;EAC3B,MAAM,eAAe,IAAI,OAAO,YAAY;EAC5C,MAAM,cAAc,IAAI,OAAO;EAC/B,MAAM,WAAW,IAAI,OAAO,QAAQ;AAGpC,MAAI,aAAa;AACf,SAAM,mBAAmB,aAAa,QAAQ,gBAAgB;AAC9D;;AAIF,MAAI,UAAU;AACZ,mBAAgB;AAChB;;AAIF,MAAI,OAAO,WAAW,GAAG;AACvB,oBAAiB;AACjB;;AAIF,MAAI,cAAc,UAAa,YAAY,UAAa,YAAY,SAAS;AAC3E,WAAQ,MAAM,qDAAqD;AACnE,WAAQ,KAAK,EAAE;;AAIjB,MAAI;AAGF,SAAM,cAFW,WAAW,GAAG,OAAO,EAER;IAC5B;IACA;IACA;IACA;IACD,CAAC;WACK,OAAO;AACd,OAAI,iBAAiB,SAAS,MAAM,QAAQ,SAAS,SAAS,EAAE;IAE9D,MAAM,gBAAgB,OAAO,QAAQ,UAAU;AAC7C,SAAI;AACF,iBAAW,MAAM;AACjB,aAAO;aACD;AACN,aAAO;;MAET;AAEF,QAAI,cAAc,WAAW,EAC3B,SAAQ,MAAM,mBAAmB,cAAc,GAAG,cAAc;aACvD,cAAc,SAAS,EAChC,SAAQ,MACN,8BAA8B,cAAc,KAAK,MAAM,IAAI,EAAE,GAAG,CAAC,KAAK,KAAK,GAC5E;QAED,SAAQ,MAAM,yCAAyC;AAEzD,YAAQ,IAAI,sBAAsB;AAClC,oBAAgB;UACX;AACL,YAAQ,MAAM,yBAAyB,iBAAiB,QAAQ,MAAM,UAAU,MAAM;AACtF,YAAQ,IAAI,qDAAqD;;AAEnE,WAAQ,KAAK,EAAE;;;CAGpB,CAAC;;;;AC3mBF,MAAM,YAAY,QAAQ,cAAc,OAAO,KAAK,IAAI,CAAC;AAEzD,MAAM,UADc,KAAK,MAAM,aAAa,KAAK,WAAW,kBAAkB,EAAE,QAAQ,CAAC,CAC7D;AAG5B,MAAM,gCAAgB,IAAI,KAAK;AAC/B,cAAc,IAAI,YAAY,gBAAgB;AAC9C,cAAc,IAAI,WAAW,eAAe;AAC5C,cAAc,IAAI,QAAQ,YAAY;AAGtC,MAAa,YAAY,OAAO;CAC9B,MAAM;CACN,aAAa;CACd,CAAC;AAGF,MAAa,cAAc,OAAO;CAChC,MAAM;CACN,aAAa;CACd,CAAC;AAEF,eAAsB,MAAM;AAC1B,KAAI;EACF,MAAM,OAAO,QAAQ,KAAK,MAAM,EAAE;AAGlC,MAAI,KAAK,OAAO,SAEd,OAAM,IAAI,KAAK,MAAM,EAAE,EAAE,eAAe;GACtC,MAAM;GACN;GACD,CAAC;WACO,KAAK,OAAO,SAErB,OAAM,IAAI,KAAK,MAAM,EAAE,EAAE,eAAe;GACtC,MAAM;GACN;GACD,CAAC;WACO,KAAK,OAAO,MAAM;GAE3B,MAAM,iBAAiB,KAAK;AAE5B,OAAI,CAAC,kBAAkB,mBAAmB,YAAY,mBAAmB,MAAM;AAE7E,YAAQ,IAAI,+BAA+B;AAC3C,YAAQ,IAAI,GAAG;AACf,YAAQ,IAAI,SAAS;AACrB,YAAQ,IAAI,4BAA4B;AACxC,YAAQ,IAAI,GAAG;AACf,YAAQ,IAAI,YAAY;AACxB,YAAQ,IACN,gFACD;AACD,YAAQ,IAAI,kDAAkD;AAC9D,YAAQ,IAAI,4EAA4E;AACxF,YAAQ,IAAI,GAAG;AACf,YAAQ,IAAI,yDAAyD;AACrE,YAAQ,IAAI,kCAAkC;AAC9C,YAAQ,IAAI,iCAAiC;AAC7C,YAAQ,IAAI,8BAA8B;AAC1C,YAAQ,IAAI,GAAG;AACf,YAAQ,IAAI,WAAW;AACvB,YAAQ,IAAI,qDAAqD;AACjE,YAAQ,IAAI,gDAAgD;cACnD,mBAAmB,eAAe,mBAAmB,KAC9D,SAAQ,IAAI,QAAQ;QACf;IAEL,MAAM,aAAa,cAAc,IAAI,eAAe;AAEpD,QAAI,CAAC,YAAY;AACf,aAAQ,MAAM,oBAAoB,iBAAiB;AACnD,aAAQ,IAAI,GAAG;AACf,aAAQ,IAAI,qDAAqD;AACjE,aAAQ,KAAK,EAAE;;AAIjB,UAAM,IAAI,KAAK,MAAM,EAAE,EAAE,YAAY;KACnC,MAAM,iBAAiB;KACvB;KACD,CAAC;;aAEK,CAAC,KAAK,UAAU,KAAK,OAAO,YAAY,KAAK,OAAO,MAAM;AAEnE,WAAQ,IAAI,gCAAgC;AAC5C,WAAQ,IAAI,GAAG;AACf,WAAQ,IAAI,SAAS;AACrB,WAAQ,IAAI,yBAAyB;AACrC,WAAQ,IAAI,GAAG;AACf,WAAQ,IAAI,YAAY;AACxB,WAAQ,IAAI,uDAAuD;AACnE,WAAQ,IAAI,0DAA0D;AACtE,WAAQ,IAAI,0EAA0E;AACtF,WAAQ,IAAI,GAAG;AACf,WAAQ,IAAI,yDAAyD;AACrE,WAAQ,IAAI,yBAAyB;AACrC,WAAQ,IAAI,6BAA6B;AACzC,WAAQ,IAAI,6BAA6B;AACzC,WAAQ,IAAI,GAAG;AACf,WAAQ,IAAI,WAAW;AACvB,WAAQ,IAAI,qDAAqD;AACjE,WAAQ,IAAI,gDAAgD;aACnD,KAAK,OAAO,eAAe,KAAK,OAAO,KAChD,SAAQ,IAAI,QAAQ;OACf;AAEL,WAAQ,MAAM,oBAAoB,KAAK,KAAK;AAC5C,WAAQ,IAAI,GAAG;AACf,WAAQ,IAAI,kDAAkD;AAC9D,WAAQ,KAAK,EAAE;;UAEV,OAAO;AACd,UAAQ,MAAM,UAAU,iBAAiB,QAAQ,MAAM,UAAU,MAAM;AACvE,UAAQ,KAAK,EAAE;;;AAInB,IAAI,OAAO,KAAK,KACd,OAAM,KAAK"}
|
|
1
|
+
{"version":3,"file":"cli.js","names":["loadConfig","c12LoadConfig","loadConfig","allDatabases: FragnoDatabase<AnySchema>[]","adapter: DatabaseAdapter | undefined","firstAdapterFile: string | undefined","fragnoDatabases: FragnoDatabase<AnySchema>[]","results: { schema: string; path: string; namespace: string }[]","results: ExecuteMigrationResult[]","info: {\n namespace: string;\n schemaVersion: number;\n migrationSupport: boolean;\n currentVersion?: string;\n pendingVersions?: string;\n status?: string;\n }","lines: string[]","matches: CodeBlockMatch[]","startLine: number | undefined","endLine: number | undefined"],"sources":["../src/utils/load-config.ts","../src/utils/find-fragno-databases.ts","../src/commands/db/generate.ts","../src/commands/db/migrate.ts","../src/commands/db/info.ts","../src/utils/format-search-results.ts","../src/commands/search.ts","../src/commands/corpus.ts","../src/cli.ts"],"sourcesContent":["import { loadConfig as c12LoadConfig } from \"c12\";\nimport { readFile, access } from \"node:fs/promises\";\nimport { dirname, resolve, join } from \"node:path\";\nimport { constants } from \"node:fs\";\n\n/**\n * Checks if a file exists using async API.\n */\nasync function fileExists(path: string): Promise<boolean> {\n try {\n await access(path, constants.F_OK);\n return true;\n } catch {\n return false;\n }\n}\n\n/**\n * Walks up the directory tree from the target path to find a tsconfig.json file.\n */\nasync function findTsconfig(startPath: string): Promise<string | null> {\n let currentDir = dirname(startPath);\n const root = resolve(\"/\");\n\n while (currentDir !== root) {\n const tsconfigPath = join(currentDir, \"tsconfig.json\");\n if (await fileExists(tsconfigPath)) {\n return tsconfigPath;\n }\n currentDir = dirname(currentDir);\n }\n\n return null;\n}\n\n/**\n * Strips comments from JSONC (JSON with Comments) content.\n */\nexport function stripJsonComments(jsonc: string): string {\n // Remove single-line comments (// ...)\n let result = jsonc.replace(/\\/\\/[^\\n]*/g, \"\");\n\n // Remove multi-line comments (/* ... */)\n result = result.replace(/\\/\\*[\\s\\S]*?\\*\\//g, \"\");\n\n return result;\n}\n\n/**\n * Converts TypeScript path aliases to jiti alias format.\n * Strips trailing '*' from aliases and paths, and resolves paths relative to baseUrl.\n */\nexport function convertTsconfigPathsToJitiAlias(\n tsconfigPaths: Record<string, string[]>,\n baseUrlResolved: string,\n): Record<string, string> {\n return Object.fromEntries(\n Object.entries(tsconfigPaths).map(([_alias, paths]) => {\n const pathsArray = paths as string[];\n // trim '*' if present and resolve the actual path\n const aliasKey = _alias.endsWith(\"*\") ? _alias.slice(0, -1) : _alias;\n const pathValue = pathsArray[0].endsWith(\"*\") ? pathsArray[0].slice(0, -1) : pathsArray[0];\n return [aliasKey, resolve(baseUrlResolved, pathValue)];\n }),\n );\n}\n\n/**\n * Resolves tsconfig path aliases for use with jiti.\n */\nasync function resolveTsconfigAliases(targetPath: string): Promise<Record<string, string>> {\n const tsconfigPath = await findTsconfig(targetPath);\n\n if (!tsconfigPath) {\n return {};\n }\n\n try {\n const tsconfigContent = await readFile(tsconfigPath, \"utf-8\");\n // Strip comments to handle JSONC format\n const jsonContent = stripJsonComments(tsconfigContent);\n const tsconfig = JSON.parse(jsonContent);\n const tsconfigPaths = tsconfig?.compilerOptions?.paths;\n\n if (!tsconfigPaths || typeof tsconfigPaths !== \"object\") {\n return {};\n }\n\n const tsconfigDir = dirname(tsconfigPath);\n const baseUrl = tsconfig?.compilerOptions?.baseUrl || \".\";\n const baseUrlResolved = resolve(tsconfigDir, baseUrl);\n\n // Convert tsconfig paths to jiti alias format\n return convertTsconfigPathsToJitiAlias(tsconfigPaths, baseUrlResolved);\n } catch (error) {\n console.warn(`Warning: Failed to parse tsconfig at ${tsconfigPath}:`, error);\n return {};\n }\n}\n\n/**\n * Loads a config file using c12 with automatic tsconfig path alias resolution.\n */\nexport async function loadConfig(path: string): Promise<Record<string, unknown>> {\n const alias = await resolveTsconfigAliases(path);\n\n const { config } = await c12LoadConfig({\n configFile: path,\n jitiOptions: {\n alias,\n },\n });\n\n return config as Record<string, unknown>;\n}\n","import { isFragnoDatabase, type DatabaseAdapter, FragnoDatabase } from \"@fragno-dev/db\";\nimport {\n fragnoDatabaseAdapterNameFakeSymbol,\n fragnoDatabaseAdapterVersionFakeSymbol,\n} from \"@fragno-dev/db/adapters\";\nimport type { AnySchema } from \"@fragno-dev/db/schema\";\nimport { instantiatedFragmentFakeSymbol } from \"@fragno-dev/core/internal/symbols\";\nimport { type FragnoInstantiatedFragment } from \"@fragno-dev/core\";\nimport { loadConfig } from \"./load-config\";\nimport { relative } from \"node:path\";\n\nexport async function importFragmentFile(path: string): Promise<Record<string, unknown>> {\n // Enable dry run mode for database schema extraction\n process.env[\"FRAGNO_INIT_DRY_RUN\"] = \"true\";\n\n try {\n const config = await loadConfig(path);\n\n const databases = findFragnoDatabases(config);\n const adapterNames = databases.map(\n (db) =>\n `${db.adapter[fragnoDatabaseAdapterNameFakeSymbol]}@${db.adapter[fragnoDatabaseAdapterVersionFakeSymbol]}`,\n );\n const uniqueAdapterNames = [...new Set(adapterNames)];\n\n if (uniqueAdapterNames.length > 1) {\n throw new Error(\n `All Fragno databases must use the same adapter name and version. ` +\n `Found mismatch: (${adapterNames.join(\", \")})`,\n );\n }\n\n return {\n adapter: databases[0].adapter,\n databases,\n };\n } finally {\n // Clean up after loading\n delete process.env[\"FRAGNO_INIT_DRY_RUN\"];\n }\n}\n\n/**\n * Imports multiple fragment files and validates they all use the same adapter.\n * Returns the combined databases from all files.\n */\nexport async function importFragmentFiles(paths: string[]): Promise<{\n adapter: DatabaseAdapter;\n databases: FragnoDatabase<AnySchema>[];\n}> {\n // De-duplicate paths (in case same file was specified multiple times)\n const uniquePaths = Array.from(new Set(paths));\n\n if (uniquePaths.length === 0) {\n throw new Error(\"No fragment files provided\");\n }\n\n const allDatabases: FragnoDatabase<AnySchema>[] = [];\n let adapter: DatabaseAdapter | undefined;\n let firstAdapterFile: string | undefined;\n const cwd = process.cwd();\n\n for (const path of uniquePaths) {\n const relativePath = relative(cwd, path);\n\n try {\n const result = await importFragmentFile(path);\n const databases = result[\"databases\"] as FragnoDatabase<AnySchema>[];\n const fileAdapter = result[\"adapter\"] as DatabaseAdapter;\n\n if (databases.length === 0) {\n console.warn(\n `Warning: No FragnoDatabase instances found in ${relativePath}.\\n` +\n `Make sure you export either:\\n` +\n ` - A FragnoDatabase instance created with .create(adapter)\\n` +\n ` - An instantiated fragment with embedded database definition\\n`,\n );\n continue;\n }\n\n // Set the adapter from the first file with databases\n if (!adapter) {\n adapter = fileAdapter;\n firstAdapterFile = relativePath;\n }\n\n // Validate all files use the same adapter name and version\n const firstAdapterName = adapter[fragnoDatabaseAdapterNameFakeSymbol];\n const firstAdapterVersion = adapter[fragnoDatabaseAdapterVersionFakeSymbol];\n const fileAdapterName = fileAdapter[fragnoDatabaseAdapterNameFakeSymbol];\n const fileAdapterVersion = fileAdapter[fragnoDatabaseAdapterVersionFakeSymbol];\n\n if (firstAdapterName !== fileAdapterName || firstAdapterVersion !== fileAdapterVersion) {\n const firstAdapterInfo = `${firstAdapterName}@${firstAdapterVersion}`;\n const fileAdapterInfo = `${fileAdapterName}@${fileAdapterVersion}`;\n\n throw new Error(\n `All fragments must use the same database adapter. Mixed adapters found:\\n` +\n ` - ${firstAdapterFile}: ${firstAdapterInfo}\\n` +\n ` - ${relativePath}: ${fileAdapterInfo}\\n\\n` +\n `Make sure all fragments use the same adapter name and version.`,\n );\n }\n\n allDatabases.push(...databases);\n console.log(` Found ${databases.length} database(s) in ${relativePath}`);\n } catch (error) {\n throw new Error(\n `Failed to import fragment file ${relativePath}: ${error instanceof Error ? error.message : String(error)}`,\n );\n }\n }\n\n if (allDatabases.length === 0) {\n throw new Error(\n `No FragnoDatabase instances found in any of the target files.\\n` +\n `Make sure your files export either:\\n` +\n ` - A FragnoDatabase instance created with .create(adapter)\\n` +\n ` - An instantiated fragment with embedded database definition\\n`,\n );\n }\n\n if (!adapter) {\n throw new Error(\"No adapter found in any of the fragment files\");\n }\n\n return {\n adapter,\n databases: allDatabases,\n };\n}\n\nfunction isNewFragnoInstantiatedFragment(\n value: unknown,\n): value is FragnoInstantiatedFragment<\n [],\n unknown,\n Record<string, unknown>,\n Record<string, unknown>,\n Record<string, unknown>,\n unknown,\n Record<string, unknown>\n> {\n return (\n typeof value === \"object\" &&\n value !== null &&\n instantiatedFragmentFakeSymbol in value &&\n value[instantiatedFragmentFakeSymbol] === instantiatedFragmentFakeSymbol\n );\n}\n\n/**\n * Finds all FragnoDatabase instances in a module, including those embedded\n * in instantiated fragments.\n */\nexport function findFragnoDatabases(\n targetModule: Record<string, unknown>,\n): FragnoDatabase<AnySchema>[] {\n const fragnoDatabases: FragnoDatabase<AnySchema>[] = [];\n\n for (const [_key, value] of Object.entries(targetModule)) {\n if (isFragnoDatabase(value)) {\n fragnoDatabases.push(value);\n } else if (isNewFragnoInstantiatedFragment(value)) {\n // Handle new fragment API\n const internal = value.$internal;\n const deps = internal.deps as Record<string, unknown>;\n const options = internal.options as Record<string, unknown>;\n\n // Check if this is a database fragment by looking for implicit database dependencies\n if (!deps[\"db\"] || !deps[\"schema\"]) {\n continue;\n }\n\n const schema = deps[\"schema\"] as AnySchema;\n const namespace = deps[\"namespace\"] as string;\n const databaseAdapter = options[\"databaseAdapter\"] as DatabaseAdapter | undefined;\n\n if (!databaseAdapter) {\n console.warn(\n `Warning: Fragment '${value.name}' appears to be a database fragment but no databaseAdapter found in options.`,\n );\n continue;\n }\n\n fragnoDatabases.push(\n new FragnoDatabase({\n namespace,\n schema,\n adapter: databaseAdapter,\n }),\n );\n }\n }\n\n return fragnoDatabases;\n}\n","import { writeFile, mkdir } from \"node:fs/promises\";\nimport { resolve, dirname } from \"node:path\";\nimport { define } from \"gunshi\";\nimport { generateMigrationsOrSchema } from \"@fragno-dev/db/generation-engine\";\nimport { importFragmentFiles } from \"../../utils/find-fragno-databases\";\n\n// Define the db generate command with type safety\nexport const generateCommand = define({\n name: \"generate\",\n description: \"Generate schema files from FragnoDatabase definitions\",\n args: {\n output: {\n type: \"string\",\n short: \"o\",\n description:\n \"Output path: for single file, exact file path; for multiple files, output directory (default: current directory)\",\n },\n from: {\n type: \"number\",\n short: \"f\",\n description: \"Source version to generate migration from (default: current database version)\",\n },\n to: {\n type: \"number\",\n short: \"t\",\n description: \"Target version to generate migration to (default: latest schema version)\",\n },\n prefix: {\n type: \"string\",\n short: \"p\",\n description: \"String to prepend to the generated file (e.g., '/* eslint-disable */')\",\n },\n },\n run: async (ctx) => {\n // With `define()` and `multiple: true`, targets is properly typed as string[]\n const targets = ctx.positionals;\n const output = ctx.values.output;\n const toVersion = ctx.values.to;\n const fromVersion = ctx.values.from;\n const prefix = ctx.values.prefix;\n\n // Resolve all target paths\n const targetPaths = targets.map((target) => resolve(process.cwd(), target));\n\n // Import all fragment files and validate they use the same adapter\n const { databases: allFragnoDatabases, adapter } = await importFragmentFiles(targetPaths);\n\n // Check if adapter supports any form of schema generation\n if (!adapter.createSchemaGenerator && !adapter.prepareMigrations) {\n throw new Error(\n `The adapter does not support schema generation. ` +\n `Please use an adapter that implements either createSchemaGenerator or prepareMigrations.`,\n );\n }\n\n // Generate schema for all fragments\n console.log(\"Generating schema...\");\n\n let results: { schema: string; path: string; namespace: string }[];\n try {\n results = await generateMigrationsOrSchema(allFragnoDatabases, {\n path: output,\n toVersion,\n fromVersion,\n });\n } catch (error) {\n throw new Error(\n `Failed to generate schema: ${error instanceof Error ? error.message : String(error)}`,\n );\n }\n\n // Write all generated files\n for (const result of results) {\n // For single file: use output as exact file path\n // For multiple files: use output as base directory\n const finalOutputPath =\n output && results.length === 1\n ? resolve(process.cwd(), output)\n : output\n ? resolve(process.cwd(), output, result.path)\n : resolve(process.cwd(), result.path);\n\n // Ensure parent directory exists\n const parentDir = dirname(finalOutputPath);\n try {\n await mkdir(parentDir, { recursive: true });\n } catch (error) {\n throw new Error(\n `Failed to create directory: ${error instanceof Error ? error.message : String(error)}`,\n );\n }\n\n // Write schema to file\n try {\n const content = prefix ? `${prefix}\\n${result.schema}` : result.schema;\n await writeFile(finalOutputPath, content, { encoding: \"utf-8\" });\n } catch (error) {\n throw new Error(\n `Failed to write schema file: ${error instanceof Error ? error.message : String(error)}`,\n );\n }\n\n console.log(`✓ Generated: ${finalOutputPath}`);\n }\n\n console.log(`\\n✓ Schema generated successfully!`);\n console.log(` Files generated: ${results.length}`);\n console.log(` Fragments:`);\n for (const db of allFragnoDatabases) {\n console.log(` - ${db.namespace} (version ${db.schema.version})`);\n }\n },\n});\n","import { resolve } from \"node:path\";\nimport { define } from \"gunshi\";\nimport { importFragmentFiles } from \"../../utils/find-fragno-databases\";\nimport { executeMigrations, type ExecuteMigrationResult } from \"@fragno-dev/db/generation-engine\";\n\nexport const migrateCommand = define({\n name: \"migrate\",\n description: \"Run database migrations for all fragments to their latest versions\",\n args: {},\n run: async (ctx) => {\n const targets = ctx.positionals;\n\n if (targets.length === 0) {\n throw new Error(\"At least one target file path is required\");\n }\n\n // Resolve all target paths\n const targetPaths = targets.map((target) => resolve(process.cwd(), target));\n\n // Import all fragment files and validate they use the same adapter\n const { databases: allFragnoDatabases } = await importFragmentFiles(targetPaths);\n\n console.log(\"\\nMigrating all fragments to their latest versions...\\n\");\n\n let results: ExecuteMigrationResult[];\n try {\n results = await executeMigrations(allFragnoDatabases);\n } catch (error) {\n throw new Error(\n `Migration failed: ${error instanceof Error ? error.message : String(error)}`,\n );\n }\n\n // Display progress for each result\n for (const result of results) {\n console.log(`Fragment: ${result.namespace}`);\n console.log(` Current version: ${result.fromVersion}`);\n console.log(` Target version: ${result.toVersion}`);\n\n if (result.didMigrate) {\n console.log(` ✓ Migration completed: v${result.fromVersion} → v${result.toVersion}\\n`);\n } else {\n console.log(` ✓ Already at latest version. No migration needed.\\n`);\n }\n }\n\n // Summary\n console.log(\"═══════════════════════════════════════\");\n console.log(\"Migration Summary\");\n console.log(\"═══════════════════════════════════════\");\n\n const migrated = results.filter((r) => r.didMigrate);\n const skipped = results.filter((r) => !r.didMigrate);\n\n if (migrated.length > 0) {\n console.log(`\\n✓ Migrated ${migrated.length} fragment(s):`);\n for (const r of migrated) {\n console.log(` - ${r.namespace}: v${r.fromVersion} → v${r.toVersion}`);\n }\n }\n\n if (skipped.length > 0) {\n console.log(`\\n○ Skipped ${skipped.length} fragment(s) (already up-to-date):`);\n for (const r of skipped) {\n console.log(` - ${r.namespace}: v${r.toVersion}`);\n }\n }\n\n for (const db of allFragnoDatabases) {\n await db.adapter.close();\n }\n\n console.log(\"\\n✓ All migrations completed successfully\");\n },\n});\n","import { resolve } from \"node:path\";\nimport { define } from \"gunshi\";\nimport { importFragmentFiles } from \"../../utils/find-fragno-databases\";\n\nexport const infoCommand = define({\n name: \"info\",\n description: \"Display database information and migration status\",\n args: {},\n run: async (ctx) => {\n const targets = ctx.positionals;\n\n if (targets.length === 0) {\n throw new Error(\"At least one target file path is required\");\n }\n\n // Resolve all target paths\n const targetPaths = targets.map((target) => resolve(process.cwd(), target));\n\n // Import all fragment files\n const { databases: allFragnoDatabases } = await importFragmentFiles(targetPaths);\n\n // Collect database information\n const dbInfos = await Promise.all(\n allFragnoDatabases.map(async (fragnoDb) => {\n const info: {\n namespace: string;\n schemaVersion: number;\n migrationSupport: boolean;\n currentVersion?: string;\n pendingVersions?: string;\n status?: string;\n } = {\n namespace: fragnoDb.namespace,\n schemaVersion: fragnoDb.schema.version,\n migrationSupport: !!fragnoDb.adapter.prepareMigrations,\n };\n\n // Get current database version if migrations are supported\n if (fragnoDb.adapter.prepareMigrations) {\n const currentVersion = await fragnoDb.adapter.getSchemaVersion(fragnoDb.namespace);\n info.currentVersion = currentVersion;\n // info.pendingVersions = fragnoDb.schema.version - currentVersion;\n\n if (info.schemaVersion.toString() !== info.currentVersion) {\n info.status = `Migrations pending`;\n } else {\n info.status = \"Up to date\";\n }\n } else {\n info.status = \"Schema only\";\n }\n\n return info;\n }),\n );\n\n // Determine if any database supports migrations\n const hasMigrationSupport = dbInfos.some((info) => info.migrationSupport);\n\n // Print compact table\n console.log(\"\");\n console.log(`Database Information:`);\n console.log(\"\");\n\n // Table header\n const namespaceHeader = \"Namespace\";\n const versionHeader = \"Schema\";\n const currentHeader = \"Current\";\n const statusHeader = \"Status\";\n\n const maxNamespaceLen = Math.max(\n namespaceHeader.length,\n ...dbInfos.map((info) => info.namespace.length),\n );\n const namespaceWidth = Math.max(maxNamespaceLen + 2, 20);\n const versionWidth = 8;\n const currentWidth = 9;\n const statusWidth = 25;\n\n // Print table\n console.log(\n namespaceHeader.padEnd(namespaceWidth) +\n versionHeader.padEnd(versionWidth) +\n (hasMigrationSupport ? currentHeader.padEnd(currentWidth) : \"\") +\n statusHeader,\n );\n console.log(\n \"-\".repeat(namespaceWidth) +\n \"-\".repeat(versionWidth) +\n (hasMigrationSupport ? \"-\".repeat(currentWidth) : \"\") +\n \"-\".repeat(statusWidth),\n );\n\n for (const info of dbInfos) {\n const currentVersionStr =\n info.currentVersion !== undefined ? String(info.currentVersion) : \"-\";\n console.log(\n info.namespace.padEnd(namespaceWidth) +\n String(info.schemaVersion).padEnd(versionWidth) +\n (hasMigrationSupport ? currentVersionStr.padEnd(currentWidth) : \"\") +\n (info.status || \"-\"),\n );\n }\n\n // Print help text\n console.log(\"\");\n if (!hasMigrationSupport) {\n console.log(\"Note: These adapters do not support migrations.\");\n console.log(\"Use 'fragno-cli db generate' to generate schema files.\");\n } else {\n console.log(\"Run 'fragno-cli db migrate <target>' to apply pending migrations.\");\n }\n },\n});\n","interface SearchResult {\n id: string;\n type: \"page\" | \"heading\" | \"text\";\n content: string;\n breadcrumbs?: string[];\n contentWithHighlights?: Array<{\n type: string;\n content: string;\n styles?: { highlight?: boolean };\n }>;\n url: string;\n}\n\ninterface MergedResult {\n url: string;\n urlWithMd: string;\n fullUrl: string;\n fullUrlWithMd: string;\n title?: string;\n breadcrumbs?: string[];\n type: \"page\" | \"heading\" | \"text\";\n sections: Array<{\n content: string;\n type: \"page\" | \"heading\" | \"text\";\n }>;\n}\n\n/**\n * Merge search results by URL, grouping sections and content under each URL (without hash)\n */\nexport function mergeResultsByUrl(results: SearchResult[], baseUrl: string): MergedResult[] {\n const mergedMap = new Map<string, MergedResult>();\n\n for (const result of results) {\n // Strip hash to get base URL for merging\n const baseUrlWithoutHash = result.url.split(\"#\")[0];\n const existing = mergedMap.get(baseUrlWithoutHash);\n\n if (existing) {\n // Add this result as a section\n existing.sections.push({\n content: result.content,\n type: result.type,\n });\n } else {\n // Create new merged result\n const urlWithMd = `${baseUrlWithoutHash}.md`;\n\n const fullUrl = `https://${baseUrl}${baseUrlWithoutHash}`;\n const fullUrlWithMd = `https://${baseUrl}${urlWithMd}`;\n\n mergedMap.set(baseUrlWithoutHash, {\n url: baseUrlWithoutHash,\n urlWithMd,\n fullUrl,\n fullUrlWithMd,\n title: result.type === \"page\" ? result.content : undefined,\n breadcrumbs: result.breadcrumbs,\n type: result.type,\n sections: [\n {\n content: result.content,\n type: result.type,\n },\n ],\n });\n }\n }\n\n return Array.from(mergedMap.values());\n}\n\n/**\n * Format merged results as markdown\n */\nexport function formatAsMarkdown(mergedResults: MergedResult[]): string {\n const lines: string[] = [];\n\n for (const result of mergedResults) {\n // Title (use first section content if it's a page, or just use content)\n const title = result.title || result.sections[0]?.content || \"Untitled\";\n lines.push(`## Page: '${title}'`);\n // Breadcrumbs\n if (result.breadcrumbs && result.breadcrumbs.length > 0) {\n lines.push(\" \" + result.breadcrumbs.join(\" > \"));\n lines.push(\"\");\n }\n\n // Both URLs\n lines.push(\"URLs:\");\n lines.push(` - ${result.fullUrl}`);\n lines.push(` - ${result.fullUrlWithMd}`);\n lines.push(\"\");\n\n // Show all sections found on this page\n if (result.sections.length > 1) {\n lines.push(\"Relevant sections:\");\n for (let i = 0; i < result.sections.length; i++) {\n const section = result.sections[i];\n // Skip the first section if it's just the page title repeated\n if (i === 0 && result.type === \"page\" && section.content === result.title) {\n continue;\n }\n lines.push(` - ${section.content}`);\n }\n lines.push(\"\");\n }\n\n lines.push(\"---\");\n lines.push(\"\");\n }\n\n return lines.join(\"\\n\");\n}\n\n/**\n * Format merged results as JSON\n */\nexport function formatAsJson(mergedResults: MergedResult[]): string {\n return JSON.stringify(mergedResults, null, 2);\n}\n","import { define } from \"gunshi\";\nimport {\n mergeResultsByUrl,\n formatAsMarkdown,\n formatAsJson,\n} from \"../utils/format-search-results.js\";\n\ninterface SearchResult {\n id: string;\n type: \"page\" | \"heading\" | \"text\";\n content: string;\n breadcrumbs?: string[];\n contentWithHighlights?: Array<{\n type: string;\n content: string;\n styles?: { highlight?: boolean };\n }>;\n url: string;\n}\n\nexport const searchCommand = define({\n name: \"search\",\n description: \"Search the Fragno documentation\",\n args: {\n limit: {\n type: \"number\",\n description: \"Maximum number of results to show\",\n default: 10,\n },\n json: {\n type: \"boolean\",\n description: \"Output results in JSON format\",\n default: false,\n },\n markdown: {\n type: \"boolean\",\n description: \"Output results in Markdown format (default)\",\n default: true,\n },\n \"base-url\": {\n type: \"string\",\n description: \"Base URL for the documentation site\",\n default: \"fragno.dev\",\n },\n },\n run: async (ctx) => {\n const query = ctx.positionals.join(\" \");\n\n if (!query || query.trim().length === 0) {\n throw new Error(\"Please provide a search query\");\n }\n\n // Determine output mode\n const jsonMode = ctx.values.json as boolean;\n const baseUrl = ctx.values[\"base-url\"] as string;\n\n if (!jsonMode) {\n console.log(`Searching for: \"${query}\"\\n`);\n }\n\n try {\n // Make request to the docs search API\n const encodedQuery = encodeURIComponent(query);\n const response = await fetch(`https://${baseUrl}/api/search?query=${encodedQuery}`);\n\n if (!response.ok) {\n throw new Error(`API request failed with status ${response.status}`);\n }\n\n const results = (await response.json()) as SearchResult[];\n\n // Apply limit\n const limit = ctx.values.limit as number;\n const limitedResults = results.slice(0, limit);\n\n if (limitedResults.length === 0) {\n if (jsonMode) {\n console.log(\"[]\");\n } else {\n console.log(\"No results found.\");\n }\n return;\n }\n\n // Merge results by URL\n const mergedResults = mergeResultsByUrl(limitedResults, baseUrl);\n\n // Output based on mode\n if (jsonMode) {\n console.log(formatAsJson(mergedResults));\n } else {\n // Markdown mode (default)\n console.log(\n `Found ${results.length} result${results.length === 1 ? \"\" : \"s\"}${results.length > limit ? ` (showing ${limit})` : \"\"}\\n`,\n );\n console.log(formatAsMarkdown(mergedResults));\n }\n } catch (error) {\n if (error instanceof Error) {\n throw new Error(`Search failed: ${error.message}`);\n }\n throw new Error(\"Search failed: An unknown error occurred\");\n }\n },\n});\n","import { define } from \"gunshi\";\nimport {\n getSubjects,\n getSubject,\n getAllSubjects,\n getSubjectParent,\n getSubjectChildren,\n getAllSubjectIdsInOrder,\n isCategory,\n getCategoryTitle,\n} from \"@fragno-dev/corpus\";\nimport type { Subject, Example } from \"@fragno-dev/corpus\";\nimport { marked } from \"marked\";\n// @ts-expect-error - marked-terminal types are outdated for v7\nimport { markedTerminal } from \"marked-terminal\";\nimport { stripVTControlCharacters } from \"node:util\";\n\n// Always configure marked to use terminal renderer\nmarked.use(markedTerminal());\n\ninterface PrintOptions {\n showLineNumbers: boolean;\n startLine?: number;\n endLine?: number;\n headingsOnly: boolean;\n}\n\n/**\n * Build markdown content for multiple subjects\n */\nexport function buildSubjectsMarkdown(subjects: Subject[]): string {\n let fullMarkdown = \"\";\n\n for (const subject of subjects) {\n fullMarkdown += `# ${subject.title}\\n\\n`;\n\n if (subject.description) {\n fullMarkdown += `${subject.description}\\n\\n`;\n }\n\n // Add imports block if present\n if (subject.imports) {\n fullMarkdown += `### Imports\\n\\n\\`\\`\\`typescript\\n${subject.imports}\\n\\`\\`\\`\\n\\n`;\n }\n\n // Add prelude blocks if present\n if (subject.prelude.length > 0) {\n fullMarkdown += `### Prelude\\n\\n`;\n for (const block of subject.prelude) {\n // Don't include the directive in the displayed code fence\n fullMarkdown += `\\`\\`\\`typescript\\n${block.code}\\n\\`\\`\\`\\n\\n`;\n }\n }\n\n // Add all sections\n for (const section of subject.sections) {\n fullMarkdown += `## ${section.heading}\\n\\n${section.content}\\n\\n`;\n }\n }\n\n return fullMarkdown;\n}\n\n/**\n * Add line numbers to content\n */\nexport function addLineNumbers(content: string, startFrom: number = 1): string {\n const lines = content.split(\"\\n\");\n const maxDigits = String(startFrom + lines.length - 1).length;\n\n return lines\n .map((line, index) => {\n const lineNum = startFrom + index;\n const paddedNum = String(lineNum).padStart(maxDigits, \" \");\n return `${paddedNum}│ ${line}`;\n })\n .join(\"\\n\");\n}\n\n/**\n * Filter content by line range\n */\nexport function filterByLineRange(content: string, startLine: number, endLine: number): string {\n const lines = content.split(\"\\n\");\n // Convert to 0-based index\n const start = Math.max(0, startLine - 1);\n const end = Math.min(lines.length, endLine);\n return lines.slice(start, end).join(\"\\n\");\n}\n\n/**\n * Extract headings and code block information with line numbers\n */\nexport function extractHeadingsAndBlocks(subjects: Subject[]): string {\n let output = \"\";\n let currentLine = 1;\n let lastOutputLine = 0;\n\n // Helper to add a gap indicator if we skipped lines\n const addGapIfNeeded = () => {\n if (lastOutputLine > 0 && currentLine > lastOutputLine + 1) {\n output += ` │\\n`;\n }\n };\n\n // Add instruction header\n output += \"Use --start N --end N flags to show specific line ranges\\n\\n\";\n\n for (const subject of subjects) {\n // Title\n addGapIfNeeded();\n output += `${currentLine.toString().padStart(4, \" \")}│ # ${subject.title}\\n`;\n lastOutputLine = currentLine;\n currentLine += 1;\n\n // Empty line after title - SHOW IT\n output += `${currentLine.toString().padStart(4, \" \")}│\\n`;\n lastOutputLine = currentLine;\n currentLine += 1;\n\n // Description - show full text\n if (subject.description) {\n const descLines = subject.description.split(\"\\n\");\n for (const line of descLines) {\n output += `${currentLine.toString().padStart(4, \" \")}│ ${line}\\n`;\n lastOutputLine = currentLine;\n currentLine += 1;\n }\n // Empty line after description - SHOW IT\n output += `${currentLine.toString().padStart(4, \" \")}│\\n`;\n lastOutputLine = currentLine;\n currentLine += 1;\n }\n\n // Imports block - show full code\n if (subject.imports) {\n addGapIfNeeded();\n output += `${currentLine.toString().padStart(4, \" \")}│ ### Imports\\n`;\n lastOutputLine = currentLine;\n currentLine += 1;\n // Empty line after heading - SHOW IT\n output += `${currentLine.toString().padStart(4, \" \")}│\\n`;\n lastOutputLine = currentLine;\n currentLine += 1;\n output += `${currentLine.toString().padStart(4, \" \")}│ \\`\\`\\`typescript\\n`;\n lastOutputLine = currentLine;\n currentLine += 1;\n const importLines = subject.imports.split(\"\\n\");\n for (const line of importLines) {\n output += `${currentLine.toString().padStart(4, \" \")}│ ${line}\\n`;\n lastOutputLine = currentLine;\n currentLine += 1;\n }\n output += `${currentLine.toString().padStart(4, \" \")}│ \\`\\`\\`\\n`;\n lastOutputLine = currentLine;\n currentLine += 1;\n // Empty line after code block - SHOW IT\n output += `${currentLine.toString().padStart(4, \" \")}│\\n`;\n lastOutputLine = currentLine;\n currentLine += 1;\n }\n\n // Prelude blocks - show as list\n if (subject.prelude.length > 0) {\n addGapIfNeeded();\n output += `${currentLine.toString().padStart(4, \" \")}│ ### Prelude\\n`;\n lastOutputLine = currentLine;\n currentLine += 1;\n // Empty line after heading\n output += `${currentLine.toString().padStart(4, \" \")}│\\n`;\n lastOutputLine = currentLine;\n currentLine += 1;\n\n for (const block of subject.prelude) {\n const id = block.id || \"(no-id)\";\n const blockStartLine = currentLine + 1; // +1 for opening ```\n const codeLines = block.code.split(\"\\n\").length;\n const blockEndLine = currentLine + 1 + codeLines; // opening ``` + code lines\n output += `${currentLine.toString().padStart(4, \" \")}│ - id: \\`${id}\\`, L${blockStartLine}-${blockEndLine}\\n`;\n lastOutputLine = currentLine;\n currentLine += codeLines + 3; // opening ```, code, closing ```, blank line\n }\n // Update lastOutputLine to current position to avoid gap indicator\n lastOutputLine = currentLine - 1;\n }\n\n // Sections - show headings and any example IDs that belong to them\n const sectionToExamples = new Map<string, Example[]>();\n\n // Group examples by their rough section (based on heading appearance in explanations)\n for (const example of subject.examples) {\n // Try to match the example to a section based on context\n // For now, we'll list all example IDs under the sections where they appear\n for (const section of subject.sections) {\n // Check if the section contains references to this example\n if (\n section.content.includes(example.code.substring(0, Math.min(50, example.code.length)))\n ) {\n if (!sectionToExamples.has(section.heading)) {\n sectionToExamples.set(section.heading, []);\n }\n sectionToExamples.get(section.heading)!.push(example);\n break;\n }\n }\n }\n\n for (const section of subject.sections) {\n addGapIfNeeded();\n output += `${currentLine.toString().padStart(4, \" \")}│ ## ${section.heading}\\n`;\n lastOutputLine = currentLine;\n currentLine += 1;\n\n // Show code block IDs as a list if any examples match this section\n const examples = sectionToExamples.get(section.heading) || [];\n if (examples.length > 0) {\n // We need to parse the section content to find where each example appears\n const sectionStartLine = currentLine;\n const lines = section.content.split(\"\\n\");\n\n for (const example of examples) {\n const id = example.id || \"(no-id)\";\n // Find the code block in section content\n let blockStartLine = sectionStartLine;\n let blockEndLine = sectionStartLine;\n let inCodeBlock = false;\n let foundBlock = false;\n\n for (let i = 0; i < lines.length; i++) {\n const line = lines[i];\n if (line.trim().startsWith(\"```\") && !inCodeBlock) {\n // Check if next lines match the example\n const codeStart = i + 1;\n let matches = true;\n const exampleLines = example.code.split(\"\\n\");\n for (let j = 0; j < Math.min(3, exampleLines.length); j++) {\n if (lines[codeStart + j]?.trim() !== exampleLines[j]?.trim()) {\n matches = false;\n break;\n }\n }\n if (matches) {\n blockStartLine = sectionStartLine + i + 1; // +1 to skip opening ```\n blockEndLine = sectionStartLine + i + exampleLines.length;\n foundBlock = true;\n break;\n }\n }\n }\n\n if (foundBlock) {\n output += `${currentLine.toString().padStart(4, \" \")}│ - id: \\`${id}\\`, L${blockStartLine}-${blockEndLine}\\n`;\n } else {\n output += `${currentLine.toString().padStart(4, \" \")}│ - id: \\`${id}\\`\\n`;\n }\n lastOutputLine = currentLine;\n }\n }\n\n // Count lines\n const sectionLines = section.content.split(\"\\n\");\n for (const _line of sectionLines) {\n currentLine += 1;\n }\n currentLine += 1; // blank line after section\n // Update lastOutputLine to current position to avoid gap indicator\n lastOutputLine = currentLine - 1;\n }\n }\n\n return output;\n}\n\n/**\n * Print subjects with the given options\n */\nasync function printSubjects(subjects: Subject[], options: PrintOptions): Promise<void> {\n if (options.headingsOnly) {\n // Show only headings and code block IDs\n const headingsOutput = extractHeadingsAndBlocks(subjects);\n console.log(headingsOutput);\n return;\n }\n\n // Build the full markdown content\n const markdown = buildSubjectsMarkdown(subjects);\n\n // Render markdown to terminal for nice formatting\n let output = await marked.parse(markdown);\n\n // Apply line range filter if specified (after rendering)\n const startLine = options.startLine ?? 1;\n if (options.startLine !== undefined || options.endLine !== undefined) {\n const end = options.endLine ?? output.split(\"\\n\").length;\n output = filterByLineRange(output, startLine, end);\n }\n\n // Add line numbers after rendering (if requested)\n // Line numbers correspond to the rendered output that agents interact with\n if (options.showLineNumbers) {\n output = addLineNumbers(output, startLine);\n }\n\n console.log(output);\n}\n\n/**\n * Find and print code blocks by ID\n */\nasync function printCodeBlockById(\n id: string,\n topics: string[],\n showLineNumbers: boolean,\n): Promise<void> {\n // If topics are specified, search only those; otherwise search all subjects\n const subjects = topics.length > 0 ? getSubject(...topics) : getAllSubjects();\n\n interface CodeBlockMatch {\n subjectId: string;\n subjectTitle: string;\n section: string;\n code: string;\n type: \"prelude\" | \"example\";\n startLine?: number;\n endLine?: number;\n }\n\n const matches: CodeBlockMatch[] = [];\n\n for (const subject of subjects) {\n // Build the rendered markdown to get correct line numbers (matching --start/--end behavior)\n const fullMarkdown = buildSubjectsMarkdown([subject]);\n const renderedOutput = await marked.parse(fullMarkdown);\n const renderedLines = renderedOutput.split(\"\\n\");\n\n // Search in prelude blocks\n for (const block of subject.prelude) {\n if (block.id === id) {\n // Find line numbers in the rendered output\n let startLine: number | undefined;\n let endLine: number | undefined;\n\n // Search for the prelude code in the rendered output\n const codeLines = block.code.split(\"\\n\");\n const firstCodeLine = codeLines[0].trim();\n\n for (let i = 0; i < renderedLines.length; i++) {\n // Strip ANSI codes before comparing\n if (stripVTControlCharacters(renderedLines[i]).trim() === firstCodeLine) {\n // Found the start of the code\n startLine = i + 1; // 1-based line numbers\n endLine = i + codeLines.length;\n break;\n }\n }\n\n matches.push({\n subjectId: subject.id,\n subjectTitle: subject.title,\n section: \"Prelude\",\n code: block.code,\n type: \"prelude\",\n startLine,\n endLine,\n });\n }\n }\n\n // Search in examples\n for (const example of subject.examples) {\n if (example.id === id) {\n // Try to find which section this example belongs to\n let sectionName = \"Unknown Section\";\n let startLine: number | undefined;\n let endLine: number | undefined;\n\n for (const section of subject.sections) {\n if (\n section.content.includes(example.code.substring(0, Math.min(50, example.code.length)))\n ) {\n sectionName = section.heading;\n\n // Find line numbers in the rendered output\n const codeLines = example.code.split(\"\\n\");\n const firstCodeLine = codeLines[0].trim();\n\n for (let i = 0; i < renderedLines.length; i++) {\n // Strip ANSI codes before comparing\n if (stripVTControlCharacters(renderedLines[i]).trim() === firstCodeLine) {\n // Found the start of the code\n startLine = i + 1; // 1-based line numbers\n endLine = i + codeLines.length;\n break;\n }\n }\n break;\n }\n }\n\n matches.push({\n subjectId: subject.id,\n subjectTitle: subject.title,\n section: sectionName,\n code: example.code,\n type: \"example\",\n startLine,\n endLine,\n });\n }\n }\n }\n\n if (matches.length === 0) {\n console.error(`Error: No code block found with id \"${id}\"`);\n if (topics.length > 0) {\n console.error(`Searched in topics: ${topics.join(\", \")}`);\n } else {\n console.error(\"Searched in all available topics\");\n }\n process.exit(1);\n }\n\n // Build markdown output\n for (let i = 0; i < matches.length; i++) {\n const match = matches[i];\n\n if (matches.length > 1 && i > 0) {\n console.log(\"\\n---\\n\");\n }\n\n // Build markdown for this match\n let matchMarkdown = `# ${match.subjectTitle}\\n\\n`;\n matchMarkdown += `## ${match.section}\\n\\n`;\n\n // Add line number info if available and requested (as plain text, not in markdown)\n if (showLineNumbers && match.startLine && match.endLine) {\n console.log(`Lines ${match.startLine}-${match.endLine} (use with --start/--end)\\n`);\n }\n\n matchMarkdown += `\\`\\`\\`typescript\\n${match.code}\\n\\`\\`\\`\\n`;\n\n // Render the markdown\n const rendered = await marked.parse(matchMarkdown);\n console.log(rendered);\n }\n}\n\n/**\n * Print only the topic tree\n */\nfunction printTopicTree(): void {\n const subjects = getSubjects();\n const subjectMap = new Map(subjects.map((s) => [s.id, s]));\n\n // Helper function to get title for any subject ID (including categories)\n function getTitle(subjectId: string): string {\n if (isCategory(subjectId)) {\n return getCategoryTitle(subjectId);\n }\n const subject = subjectMap.get(subjectId);\n return subject ? subject.title : subjectId;\n }\n\n // Helper function to recursively display tree\n function displayNode(subjectId: string, indent: string, isLast: boolean, isRoot: boolean): void {\n const title = getTitle(subjectId);\n\n if (isRoot) {\n console.log(` ${subjectId.padEnd(30)} ${title}`);\n } else {\n const connector = isLast ? \"└─\" : \"├─\";\n console.log(`${indent}${connector} ${subjectId.padEnd(26)} ${title}`);\n }\n\n const children = getSubjectChildren(subjectId);\n if (children.length > 0) {\n const childIndent = isRoot ? \" \" : indent + (isLast ? \" \" : \"│ \");\n for (let i = 0; i < children.length; i++) {\n displayNode(children[i], childIndent, i === children.length - 1, false);\n }\n }\n }\n\n // Get all root subject IDs (including categories)\n const allIds = getAllSubjectIdsInOrder();\n const rootIds = allIds.filter((id) => !getSubjectParent(id));\n\n // Display root subjects\n for (const subjectId of rootIds) {\n displayNode(subjectId, \"\", false, true);\n }\n}\n\n/**\n * Print information about the corpus command\n */\nfunction printCorpusHelp(): void {\n console.log(\"Fragno Corpus - Code examples and documentation (similar to LLMs.txt\");\n console.log(\"\");\n console.log(\"Usage: fragno-cli corpus [options] [topic...]\");\n console.log(\"\");\n console.log(\"Options:\");\n console.log(\" -n, --no-line-numbers Hide line numbers (shown by default)\");\n console.log(\" -s, --start N Starting line number to display from\");\n console.log(\" -e, --end N Ending line number to display to\");\n console.log(\" --headings Show only headings and code block IDs\");\n console.log(\" --id <id> Retrieve a specific code block by ID\");\n console.log(\" --tree Show only the topic tree\");\n console.log(\"\");\n console.log(\"Examples:\");\n console.log(\" fragno-cli corpus # List all available topics\");\n console.log(\" fragno-cli corpus --tree # Show only the topic tree\");\n console.log(\" fragno-cli corpus defining-routes # Show route definition examples\");\n console.log(\" fragno-cli corpus --headings database-querying\");\n console.log(\" # Show structure overview\");\n console.log(\" fragno-cli corpus --start 10 --end 50 database-querying\");\n console.log(\" # Show specific lines\");\n console.log(\" fragno-cli corpus --id create-user # Get code block by ID\");\n console.log(\" fragno-cli corpus database-adapters kysely-adapter\");\n console.log(\" # Show multiple topics\");\n console.log(\"\");\n console.log(\"Available topics:\");\n\n printTopicTree();\n}\n\nexport const corpusCommand = define({\n name: \"corpus\",\n description: \"View code examples and documentation for Fragno\",\n args: {\n \"no-line-numbers\": {\n type: \"boolean\",\n short: \"n\",\n description: \"Hide line numbers (line numbers are shown by default)\",\n },\n start: {\n type: \"number\",\n short: \"s\",\n description: \"Starting line number (1-based) to display from\",\n },\n end: {\n type: \"number\",\n short: \"e\",\n description: \"Ending line number (1-based) to display to\",\n },\n headings: {\n type: \"boolean\",\n description: \"Show only section headings and code block IDs with line numbers\",\n },\n id: {\n type: \"string\",\n description: \"Retrieve a specific code block by ID\",\n },\n tree: {\n type: \"boolean\",\n description: \"Show only the topic tree (without help text)\",\n },\n },\n run: async (ctx) => {\n const topics = ctx.positionals;\n const showLineNumbers = !(ctx.values[\"no-line-numbers\"] ?? false);\n const startLine = ctx.values.start;\n const endLine = ctx.values.end;\n const headingsOnly = ctx.values.headings ?? false;\n const codeBlockId = ctx.values.id;\n const treeOnly = ctx.values.tree ?? false;\n\n // Handle --id flag\n if (codeBlockId) {\n await printCodeBlockById(codeBlockId, topics, showLineNumbers);\n return;\n }\n\n // Handle --tree flag\n if (treeOnly) {\n printTopicTree();\n return;\n }\n\n // No topics provided - show help\n if (topics.length === 0) {\n printCorpusHelp();\n return;\n }\n\n // Validate line range\n if (startLine !== undefined && endLine !== undefined && startLine > endLine) {\n console.error(\"Error: --start must be less than or equal to --end\");\n process.exit(1);\n }\n\n // Load and display requested topics\n try {\n const subjects = getSubject(...topics);\n\n await printSubjects(subjects, {\n showLineNumbers,\n startLine,\n endLine,\n headingsOnly,\n });\n } catch (error) {\n if (error instanceof Error && error.message.includes(\"ENOENT\")) {\n // Extract the subject name from the error message or use the topics array\n const missingTopics = topics.filter((topic) => {\n try {\n getSubject(topic);\n return false;\n } catch {\n return true;\n }\n });\n\n if (missingTopics.length === 1) {\n console.error(`Error: Subject '${missingTopics[0]}' not found.`);\n } else if (missingTopics.length > 1) {\n console.error(\n `Error: Subjects not found: ${missingTopics.map((t) => `'${t}'`).join(\", \")}`,\n );\n } else {\n console.error(\"Error: One or more subjects not found.\");\n }\n console.log(\"\\nAvailable topics:\");\n printTopicTree();\n } else {\n console.error(\"Error loading topics:\", error instanceof Error ? error.message : error);\n console.log(\"\\nRun 'fragno-cli corpus' to see available topics.\");\n }\n process.exit(1);\n }\n },\n});\n","#!/usr/bin/env node\n\nimport { cli, define } from \"gunshi\";\nimport { generateCommand } from \"./commands/db/generate.js\";\nimport { migrateCommand } from \"./commands/db/migrate.js\";\nimport { infoCommand } from \"./commands/db/info.js\";\nimport { searchCommand } from \"./commands/search.js\";\nimport { corpusCommand } from \"./commands/corpus.js\";\nimport { readFileSync } from \"node:fs\";\nimport { fileURLToPath } from \"node:url\";\nimport { dirname, join } from \"node:path\";\n\nconst __dirname = dirname(fileURLToPath(import.meta.url));\nconst packageJson = JSON.parse(readFileSync(join(__dirname, \"../package.json\"), \"utf-8\"));\nconst version = packageJson.version;\n\n// Create a Map of db sub-commands\nconst dbSubCommands = new Map();\ndbSubCommands.set(\"generate\", generateCommand);\ndbSubCommands.set(\"migrate\", migrateCommand);\ndbSubCommands.set(\"info\", infoCommand);\n\n// Define the db command with nested subcommands\nexport const dbCommand = define({\n name: \"db\",\n description: \"Database management commands\",\n});\n\n// Define the main command\nexport const mainCommand = define({\n name: \"fragno-cli\",\n description: \"Tools for working with Fragno fragments\",\n});\n\nexport async function run() {\n try {\n const args = process.argv.slice(2);\n\n // Manual routing for top-level commands\n if (args[0] === \"search\") {\n // Run search command directly\n await cli(args.slice(1), searchCommand, {\n name: \"fragno-cli search\",\n version,\n });\n } else if (args[0] === \"corpus\") {\n // Run corpus command directly\n await cli(args.slice(1), corpusCommand, {\n name: \"fragno-cli corpus\",\n version,\n });\n } else if (args[0] === \"db\") {\n // Handle db subcommands\n const subCommandName = args[1];\n\n if (!subCommandName || subCommandName === \"--help\" || subCommandName === \"-h\") {\n // Show db help with subcommands\n console.log(\"Database management commands\");\n console.log(\"\");\n console.log(\"USAGE:\");\n console.log(\" fragno-cli db <COMMAND>\");\n console.log(\"\");\n console.log(\"COMMANDS:\");\n console.log(\n \" generate Generate schema files from FragnoDatabase definitions\",\n );\n console.log(\" migrate Run database migrations\");\n console.log(\" info Display database information and migration status\");\n console.log(\"\");\n console.log(\"For more info, run any command with the `--help` flag:\");\n console.log(\" fragno-cli db generate --help\");\n console.log(\" fragno-cli db migrate --help\");\n console.log(\" fragno-cli db info --help\");\n console.log(\"\");\n console.log(\"OPTIONS:\");\n console.log(\" -h, --help Display this help message\");\n console.log(\" -v, --version Display this version\");\n } else if (subCommandName === \"--version\" || subCommandName === \"-v\") {\n console.log(version);\n } else {\n // Route to specific db subcommand\n const subCommand = dbSubCommands.get(subCommandName);\n\n if (!subCommand) {\n console.error(`Unknown command: ${subCommandName}`);\n console.log(\"\");\n console.log(\"Run 'fragno-cli db --help' for available commands.\");\n process.exit(1);\n }\n\n // Run the subcommand\n await cli(args.slice(2), subCommand, {\n name: `fragno-cli db ${subCommandName}`,\n version,\n });\n }\n } else if (!args.length || args[0] === \"--help\" || args[0] === \"-h\") {\n // Show main help\n console.log(\"Tools for working with Fragno\");\n console.log(\"\");\n console.log(\"USAGE:\");\n console.log(\" fragno-cli <COMMAND>\");\n console.log(\"\");\n console.log(\"COMMANDS:\");\n console.log(\" db Database management commands\");\n console.log(\" search Search the Fragno documentation\");\n console.log(\" corpus View code examples and documentation for Fragno\");\n console.log(\"\");\n console.log(\"For more info, run any command with the `--help` flag:\");\n console.log(\" fragno-cli db --help\");\n console.log(\" fragno-cli search --help\");\n console.log(\" fragno-cli corpus --help\");\n console.log(\"\");\n console.log(\"OPTIONS:\");\n console.log(\" -h, --help Display this help message\");\n console.log(\" -v, --version Display this version\");\n } else if (args[0] === \"--version\" || args[0] === \"-v\") {\n console.log(version);\n } else {\n // Unknown command\n console.error(`Unknown command: ${args[0]}`);\n console.log(\"\");\n console.log(\"Run 'fragno-cli --help' for available commands.\");\n process.exit(1);\n }\n } catch (error) {\n console.error(error);\n process.exit(1);\n }\n}\n\nif (import.meta.main) {\n await run();\n}\n\nexport { generateCommand, migrateCommand, infoCommand, searchCommand, corpusCommand };\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;AAQA,eAAe,WAAW,MAAgC;AACxD,KAAI;AACF,QAAM,OAAO,MAAM,UAAU,KAAK;AAClC,SAAO;SACD;AACN,SAAO;;;;;;AAOX,eAAe,aAAa,WAA2C;CACrE,IAAI,aAAa,QAAQ,UAAU;CACnC,MAAM,OAAO,QAAQ,IAAI;AAEzB,QAAO,eAAe,MAAM;EAC1B,MAAM,eAAe,KAAK,YAAY,gBAAgB;AACtD,MAAI,MAAM,WAAW,aAAa,CAChC,QAAO;AAET,eAAa,QAAQ,WAAW;;AAGlC,QAAO;;;;;AAMT,SAAgB,kBAAkB,OAAuB;CAEvD,IAAI,SAAS,MAAM,QAAQ,eAAe,GAAG;AAG7C,UAAS,OAAO,QAAQ,qBAAqB,GAAG;AAEhD,QAAO;;;;;;AAOT,SAAgB,gCACd,eACA,iBACwB;AACxB,QAAO,OAAO,YACZ,OAAO,QAAQ,cAAc,CAAC,KAAK,CAAC,QAAQ,WAAW;EACrD,MAAM,aAAa;AAInB,SAAO,CAFU,OAAO,SAAS,IAAI,GAAG,OAAO,MAAM,GAAG,GAAG,GAAG,QAE5C,QAAQ,iBADR,WAAW,GAAG,SAAS,IAAI,GAAG,WAAW,GAAG,MAAM,GAAG,GAAG,GAAG,WAAW,GACnC,CAAC;GACtD,CACH;;;;;AAMH,eAAe,uBAAuB,YAAqD;CACzF,MAAM,eAAe,MAAM,aAAa,WAAW;AAEnD,KAAI,CAAC,aACH,QAAO,EAAE;AAGX,KAAI;EAGF,MAAM,cAAc,kBAFI,MAAM,SAAS,cAAc,QAAQ,CAEP;EACtD,MAAM,WAAW,KAAK,MAAM,YAAY;EACxC,MAAM,gBAAgB,UAAU,iBAAiB;AAEjD,MAAI,CAAC,iBAAiB,OAAO,kBAAkB,SAC7C,QAAO,EAAE;AAQX,SAAO,gCAAgC,eAHf,QAFJ,QAAQ,aAAa,EACzB,UAAU,iBAAiB,WAAW,IACD,CAGiB;UAC/D,OAAO;AACd,UAAQ,KAAK,wCAAwC,aAAa,IAAI,MAAM;AAC5E,SAAO,EAAE;;;;;;AAOb,eAAsBA,aAAW,MAAgD;CAG/E,MAAM,EAAE,WAAW,MAAMC,WAAc;EACrC,YAAY;EACZ,aAAa,EACX,OALU,MAAM,uBAAuB,KAAK,EAM7C;EACF,CAAC;AAEF,QAAO;;;;;ACtGT,eAAsB,mBAAmB,MAAgD;AAEvF,SAAQ,IAAI,yBAAyB;AAErC,KAAI;EAGF,MAAM,YAAY,oBAFH,MAAMC,aAAW,KAAK,CAEQ;EAC7C,MAAM,eAAe,UAAU,KAC5B,OACC,GAAG,GAAG,QAAQ,qCAAqC,GAAG,GAAG,QAAQ,0CACpE;AAGD,MAF2B,CAAC,GAAG,IAAI,IAAI,aAAa,CAAC,CAE9B,SAAS,EAC9B,OAAM,IAAI,MACR,qFACsB,aAAa,KAAK,KAAK,CAAC,GAC/C;AAGH,SAAO;GACL,SAAS,UAAU,GAAG;GACtB;GACD;WACO;AAER,SAAO,QAAQ,IAAI;;;;;;;AAQvB,eAAsB,oBAAoB,OAGvC;CAED,MAAM,cAAc,MAAM,KAAK,IAAI,IAAI,MAAM,CAAC;AAE9C,KAAI,YAAY,WAAW,EACzB,OAAM,IAAI,MAAM,6BAA6B;CAG/C,MAAMC,eAA4C,EAAE;CACpD,IAAIC;CACJ,IAAIC;CACJ,MAAM,MAAM,QAAQ,KAAK;AAEzB,MAAK,MAAM,QAAQ,aAAa;EAC9B,MAAM,eAAe,SAAS,KAAK,KAAK;AAExC,MAAI;GACF,MAAM,SAAS,MAAM,mBAAmB,KAAK;GAC7C,MAAM,YAAY,OAAO;GACzB,MAAM,cAAc,OAAO;AAE3B,OAAI,UAAU,WAAW,GAAG;AAC1B,YAAQ,KACN,iDAAiD,aAAa,gKAI/D;AACD;;AAIF,OAAI,CAAC,SAAS;AACZ,cAAU;AACV,uBAAmB;;GAIrB,MAAM,mBAAmB,QAAQ;GACjC,MAAM,sBAAsB,QAAQ;GACpC,MAAM,kBAAkB,YAAY;GACpC,MAAM,qBAAqB,YAAY;AAEvC,OAAI,qBAAqB,mBAAmB,wBAAwB,oBAAoB;IACtF,MAAM,mBAAmB,GAAG,iBAAiB,GAAG;IAChD,MAAM,kBAAkB,GAAG,gBAAgB,GAAG;AAE9C,UAAM,IAAI,MACR,gFACS,iBAAiB,IAAI,iBAAiB,QACtC,aAAa,IAAI,gBAAgB,oEAE3C;;AAGH,gBAAa,KAAK,GAAG,UAAU;AAC/B,WAAQ,IAAI,WAAW,UAAU,OAAO,kBAAkB,eAAe;WAClE,OAAO;AACd,SAAM,IAAI,MACR,kCAAkC,aAAa,IAAI,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM,GAC1G;;;AAIL,KAAI,aAAa,WAAW,EAC1B,OAAM,IAAI,MACR,oOAID;AAGH,KAAI,CAAC,QACH,OAAM,IAAI,MAAM,gDAAgD;AAGlE,QAAO;EACL;EACA,WAAW;EACZ;;AAGH,SAAS,gCACP,OASA;AACA,QACE,OAAO,UAAU,YACjB,UAAU,QACV,kCAAkC,SAClC,MAAM,oCAAoC;;;;;;AAQ9C,SAAgB,oBACd,cAC6B;CAC7B,MAAMC,kBAA+C,EAAE;AAEvD,MAAK,MAAM,CAAC,MAAM,UAAU,OAAO,QAAQ,aAAa,CACtD,KAAI,iBAAiB,MAAM,CACzB,iBAAgB,KAAK,MAAM;UAClB,gCAAgC,MAAM,EAAE;EAEjD,MAAM,WAAW,MAAM;EACvB,MAAM,OAAO,SAAS;EACtB,MAAM,UAAU,SAAS;AAGzB,MAAI,CAAC,KAAK,SAAS,CAAC,KAAK,UACvB;EAGF,MAAM,SAAS,KAAK;EACpB,MAAM,YAAY,KAAK;EACvB,MAAM,kBAAkB,QAAQ;AAEhC,MAAI,CAAC,iBAAiB;AACpB,WAAQ,KACN,sBAAsB,MAAM,KAAK,8EAClC;AACD;;AAGF,kBAAgB,KACd,IAAI,eAAe;GACjB;GACA;GACA,SAAS;GACV,CAAC,CACH;;AAIL,QAAO;;;;;AC5LT,MAAa,kBAAkB,OAAO;CACpC,MAAM;CACN,aAAa;CACb,MAAM;EACJ,QAAQ;GACN,MAAM;GACN,OAAO;GACP,aACE;GACH;EACD,MAAM;GACJ,MAAM;GACN,OAAO;GACP,aAAa;GACd;EACD,IAAI;GACF,MAAM;GACN,OAAO;GACP,aAAa;GACd;EACD,QAAQ;GACN,MAAM;GACN,OAAO;GACP,aAAa;GACd;EACF;CACD,KAAK,OAAO,QAAQ;EAElB,MAAM,UAAU,IAAI;EACpB,MAAM,SAAS,IAAI,OAAO;EAC1B,MAAM,YAAY,IAAI,OAAO;EAC7B,MAAM,cAAc,IAAI,OAAO;EAC/B,MAAM,SAAS,IAAI,OAAO;EAM1B,MAAM,EAAE,WAAW,oBAAoB,YAAY,MAAM,oBAHrC,QAAQ,KAAK,WAAW,QAAQ,QAAQ,KAAK,EAAE,OAAO,CAAC,CAGc;AAGzF,MAAI,CAAC,QAAQ,yBAAyB,CAAC,QAAQ,kBAC7C,OAAM,IAAI,MACR,2IAED;AAIH,UAAQ,IAAI,uBAAuB;EAEnC,IAAIC;AACJ,MAAI;AACF,aAAU,MAAM,2BAA2B,oBAAoB;IAC7D,MAAM;IACN;IACA;IACD,CAAC;WACK,OAAO;AACd,SAAM,IAAI,MACR,8BAA8B,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM,GACrF;;AAIH,OAAK,MAAM,UAAU,SAAS;GAG5B,MAAM,kBACJ,UAAU,QAAQ,WAAW,IACzB,QAAQ,QAAQ,KAAK,EAAE,OAAO,GAC9B,SACE,QAAQ,QAAQ,KAAK,EAAE,QAAQ,OAAO,KAAK,GAC3C,QAAQ,QAAQ,KAAK,EAAE,OAAO,KAAK;GAG3C,MAAM,YAAY,QAAQ,gBAAgB;AAC1C,OAAI;AACF,UAAM,MAAM,WAAW,EAAE,WAAW,MAAM,CAAC;YACpC,OAAO;AACd,UAAM,IAAI,MACR,+BAA+B,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM,GACtF;;AAIH,OAAI;AAEF,UAAM,UAAU,iBADA,SAAS,GAAG,OAAO,IAAI,OAAO,WAAW,OAAO,QACtB,EAAE,UAAU,SAAS,CAAC;YACzD,OAAO;AACd,UAAM,IAAI,MACR,gCAAgC,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM,GACvF;;AAGH,WAAQ,IAAI,gBAAgB,kBAAkB;;AAGhD,UAAQ,IAAI,qCAAqC;AACjD,UAAQ,IAAI,sBAAsB,QAAQ,SAAS;AACnD,UAAQ,IAAI,eAAe;AAC3B,OAAK,MAAM,MAAM,mBACf,SAAQ,IAAI,SAAS,GAAG,UAAU,YAAY,GAAG,OAAO,QAAQ,GAAG;;CAGxE,CAAC;;;;AC3GF,MAAa,iBAAiB,OAAO;CACnC,MAAM;CACN,aAAa;CACb,MAAM,EAAE;CACR,KAAK,OAAO,QAAQ;EAClB,MAAM,UAAU,IAAI;AAEpB,MAAI,QAAQ,WAAW,EACrB,OAAM,IAAI,MAAM,4CAA4C;EAO9D,MAAM,EAAE,WAAW,uBAAuB,MAAM,oBAH5B,QAAQ,KAAK,WAAW,QAAQ,QAAQ,KAAK,EAAE,OAAO,CAAC,CAGK;AAEhF,UAAQ,IAAI,0DAA0D;EAEtE,IAAIC;AACJ,MAAI;AACF,aAAU,MAAM,kBAAkB,mBAAmB;WAC9C,OAAO;AACd,SAAM,IAAI,MACR,qBAAqB,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM,GAC5E;;AAIH,OAAK,MAAM,UAAU,SAAS;AAC5B,WAAQ,IAAI,aAAa,OAAO,YAAY;AAC5C,WAAQ,IAAI,sBAAsB,OAAO,cAAc;AACvD,WAAQ,IAAI,qBAAqB,OAAO,YAAY;AAEpD,OAAI,OAAO,WACT,SAAQ,IAAI,6BAA6B,OAAO,YAAY,MAAM,OAAO,UAAU,IAAI;OAEvF,SAAQ,IAAI,wDAAwD;;AAKxE,UAAQ,IAAI,0CAA0C;AACtD,UAAQ,IAAI,oBAAoB;AAChC,UAAQ,IAAI,0CAA0C;EAEtD,MAAM,WAAW,QAAQ,QAAQ,MAAM,EAAE,WAAW;EACpD,MAAM,UAAU,QAAQ,QAAQ,MAAM,CAAC,EAAE,WAAW;AAEpD,MAAI,SAAS,SAAS,GAAG;AACvB,WAAQ,IAAI,gBAAgB,SAAS,OAAO,eAAe;AAC3D,QAAK,MAAM,KAAK,SACd,SAAQ,IAAI,OAAO,EAAE,UAAU,KAAK,EAAE,YAAY,MAAM,EAAE,YAAY;;AAI1E,MAAI,QAAQ,SAAS,GAAG;AACtB,WAAQ,IAAI,eAAe,QAAQ,OAAO,oCAAoC;AAC9E,QAAK,MAAM,KAAK,QACd,SAAQ,IAAI,OAAO,EAAE,UAAU,KAAK,EAAE,YAAY;;AAItD,OAAK,MAAM,MAAM,mBACf,OAAM,GAAG,QAAQ,OAAO;AAG1B,UAAQ,IAAI,4CAA4C;;CAE3D,CAAC;;;;ACtEF,MAAa,cAAc,OAAO;CAChC,MAAM;CACN,aAAa;CACb,MAAM,EAAE;CACR,KAAK,OAAO,QAAQ;EAClB,MAAM,UAAU,IAAI;AAEpB,MAAI,QAAQ,WAAW,EACrB,OAAM,IAAI,MAAM,4CAA4C;EAO9D,MAAM,EAAE,WAAW,uBAAuB,MAAM,oBAH5B,QAAQ,KAAK,WAAW,QAAQ,QAAQ,KAAK,EAAE,OAAO,CAAC,CAGK;EAGhF,MAAM,UAAU,MAAM,QAAQ,IAC5B,mBAAmB,IAAI,OAAO,aAAa;GACzC,MAAMC,OAOF;IACF,WAAW,SAAS;IACpB,eAAe,SAAS,OAAO;IAC/B,kBAAkB,CAAC,CAAC,SAAS,QAAQ;IACtC;AAGD,OAAI,SAAS,QAAQ,mBAAmB;AAEtC,SAAK,iBADkB,MAAM,SAAS,QAAQ,iBAAiB,SAAS,UAAU;AAIlF,QAAI,KAAK,cAAc,UAAU,KAAK,KAAK,eACzC,MAAK,SAAS;QAEd,MAAK,SAAS;SAGhB,MAAK,SAAS;AAGhB,UAAO;IACP,CACH;EAGD,MAAM,sBAAsB,QAAQ,MAAM,SAAS,KAAK,iBAAiB;AAGzE,UAAQ,IAAI,GAAG;AACf,UAAQ,IAAI,wBAAwB;AACpC,UAAQ,IAAI,GAAG;EAGf,MAAM,kBAAkB;EACxB,MAAM,gBAAgB;EACtB,MAAM,gBAAgB;EACtB,MAAM,eAAe;EAErB,MAAM,kBAAkB,KAAK,IAC3B,GACA,GAAG,QAAQ,KAAK,SAAS,KAAK,UAAU,OAAO,CAChD;EACD,MAAM,iBAAiB,KAAK,IAAI,kBAAkB,GAAG,GAAG;EACxD,MAAM,eAAe;EACrB,MAAM,eAAe;EACrB,MAAM,cAAc;AAGpB,UAAQ,IACN,gBAAgB,OAAO,eAAe,GACpC,cAAc,OAAO,aAAa,IACjC,sBAAsB,cAAc,OAAO,aAAa,GAAG,MAC5D,aACH;AACD,UAAQ,IACN,IAAI,OAAO,eAAe,GACxB,IAAI,OAAO,aAAa,IACvB,sBAAsB,IAAI,OAAO,aAAa,GAAG,MAClD,IAAI,OAAO,YAAY,CAC1B;AAED,OAAK,MAAM,QAAQ,SAAS;GAC1B,MAAM,oBACJ,KAAK,mBAAmB,SAAY,OAAO,KAAK,eAAe,GAAG;AACpE,WAAQ,IACN,KAAK,UAAU,OAAO,eAAe,GACnC,OAAO,KAAK,cAAc,CAAC,OAAO,aAAa,IAC9C,sBAAsB,kBAAkB,OAAO,aAAa,GAAG,OAC/D,KAAK,UAAU,KACnB;;AAIH,UAAQ,IAAI,GAAG;AACf,MAAI,CAAC,qBAAqB;AACxB,WAAQ,IAAI,kDAAkD;AAC9D,WAAQ,IAAI,yDAAyD;QAErE,SAAQ,IAAI,oEAAoE;;CAGrF,CAAC;;;;;;;ACnFF,SAAgB,kBAAkB,SAAyB,SAAiC;CAC1F,MAAM,4BAAY,IAAI,KAA2B;AAEjD,MAAK,MAAM,UAAU,SAAS;EAE5B,MAAM,qBAAqB,OAAO,IAAI,MAAM,IAAI,CAAC;EACjD,MAAM,WAAW,UAAU,IAAI,mBAAmB;AAElD,MAAI,SAEF,UAAS,SAAS,KAAK;GACrB,SAAS,OAAO;GAChB,MAAM,OAAO;GACd,CAAC;OACG;GAEL,MAAM,YAAY,GAAG,mBAAmB;GAExC,MAAM,UAAU,WAAW,UAAU;GACrC,MAAM,gBAAgB,WAAW,UAAU;AAE3C,aAAU,IAAI,oBAAoB;IAChC,KAAK;IACL;IACA;IACA;IACA,OAAO,OAAO,SAAS,SAAS,OAAO,UAAU;IACjD,aAAa,OAAO;IACpB,MAAM,OAAO;IACb,UAAU,CACR;KACE,SAAS,OAAO;KAChB,MAAM,OAAO;KACd,CACF;IACF,CAAC;;;AAIN,QAAO,MAAM,KAAK,UAAU,QAAQ,CAAC;;;;;AAMvC,SAAgB,iBAAiB,eAAuC;CACtE,MAAMC,QAAkB,EAAE;AAE1B,MAAK,MAAM,UAAU,eAAe;EAElC,MAAM,QAAQ,OAAO,SAAS,OAAO,SAAS,IAAI,WAAW;AAC7D,QAAM,KAAK,aAAa,MAAM,GAAG;AAEjC,MAAI,OAAO,eAAe,OAAO,YAAY,SAAS,GAAG;AACvD,SAAM,KAAK,QAAQ,OAAO,YAAY,KAAK,MAAM,CAAC;AAClD,SAAM,KAAK,GAAG;;AAIhB,QAAM,KAAK,QAAQ;AACnB,QAAM,KAAK,OAAO,OAAO,UAAU;AACnC,QAAM,KAAK,OAAO,OAAO,gBAAgB;AACzC,QAAM,KAAK,GAAG;AAGd,MAAI,OAAO,SAAS,SAAS,GAAG;AAC9B,SAAM,KAAK,qBAAqB;AAChC,QAAK,IAAI,IAAI,GAAG,IAAI,OAAO,SAAS,QAAQ,KAAK;IAC/C,MAAM,UAAU,OAAO,SAAS;AAEhC,QAAI,MAAM,KAAK,OAAO,SAAS,UAAU,QAAQ,YAAY,OAAO,MAClE;AAEF,UAAM,KAAK,OAAO,QAAQ,UAAU;;AAEtC,SAAM,KAAK,GAAG;;AAGhB,QAAM,KAAK,MAAM;AACjB,QAAM,KAAK,GAAG;;AAGhB,QAAO,MAAM,KAAK,KAAK;;;;;AAMzB,SAAgB,aAAa,eAAuC;AAClE,QAAO,KAAK,UAAU,eAAe,MAAM,EAAE;;;;;ACnG/C,MAAa,gBAAgB,OAAO;CAClC,MAAM;CACN,aAAa;CACb,MAAM;EACJ,OAAO;GACL,MAAM;GACN,aAAa;GACb,SAAS;GACV;EACD,MAAM;GACJ,MAAM;GACN,aAAa;GACb,SAAS;GACV;EACD,UAAU;GACR,MAAM;GACN,aAAa;GACb,SAAS;GACV;EACD,YAAY;GACV,MAAM;GACN,aAAa;GACb,SAAS;GACV;EACF;CACD,KAAK,OAAO,QAAQ;EAClB,MAAM,QAAQ,IAAI,YAAY,KAAK,IAAI;AAEvC,MAAI,CAAC,SAAS,MAAM,MAAM,CAAC,WAAW,EACpC,OAAM,IAAI,MAAM,gCAAgC;EAIlD,MAAM,WAAW,IAAI,OAAO;EAC5B,MAAM,UAAU,IAAI,OAAO;AAE3B,MAAI,CAAC,SACH,SAAQ,IAAI,mBAAmB,MAAM,KAAK;AAG5C,MAAI;GAEF,MAAM,eAAe,mBAAmB,MAAM;GAC9C,MAAM,WAAW,MAAM,MAAM,WAAW,QAAQ,oBAAoB,eAAe;AAEnF,OAAI,CAAC,SAAS,GACZ,OAAM,IAAI,MAAM,kCAAkC,SAAS,SAAS;GAGtE,MAAM,UAAW,MAAM,SAAS,MAAM;GAGtC,MAAM,QAAQ,IAAI,OAAO;GACzB,MAAM,iBAAiB,QAAQ,MAAM,GAAG,MAAM;AAE9C,OAAI,eAAe,WAAW,GAAG;AAC/B,QAAI,SACF,SAAQ,IAAI,KAAK;QAEjB,SAAQ,IAAI,oBAAoB;AAElC;;GAIF,MAAM,gBAAgB,kBAAkB,gBAAgB,QAAQ;AAGhE,OAAI,SACF,SAAQ,IAAI,aAAa,cAAc,CAAC;QACnC;AAEL,YAAQ,IACN,SAAS,QAAQ,OAAO,SAAS,QAAQ,WAAW,IAAI,KAAK,MAAM,QAAQ,SAAS,QAAQ,aAAa,MAAM,KAAK,GAAG,IACxH;AACD,YAAQ,IAAI,iBAAiB,cAAc,CAAC;;WAEvC,OAAO;AACd,OAAI,iBAAiB,MACnB,OAAM,IAAI,MAAM,kBAAkB,MAAM,UAAU;AAEpD,SAAM,IAAI,MAAM,2CAA2C;;;CAGhE,CAAC;;;;ACtFF,OAAO,IAAI,gBAAgB,CAAC;;;;AAY5B,SAAgB,sBAAsB,UAA6B;CACjE,IAAI,eAAe;AAEnB,MAAK,MAAM,WAAW,UAAU;AAC9B,kBAAgB,KAAK,QAAQ,MAAM;AAEnC,MAAI,QAAQ,YACV,iBAAgB,GAAG,QAAQ,YAAY;AAIzC,MAAI,QAAQ,QACV,iBAAgB,oCAAoC,QAAQ,QAAQ;AAItE,MAAI,QAAQ,QAAQ,SAAS,GAAG;AAC9B,mBAAgB;AAChB,QAAK,MAAM,SAAS,QAAQ,QAE1B,iBAAgB,qBAAqB,MAAM,KAAK;;AAKpD,OAAK,MAAM,WAAW,QAAQ,SAC5B,iBAAgB,MAAM,QAAQ,QAAQ,MAAM,QAAQ,QAAQ;;AAIhE,QAAO;;;;;AAMT,SAAgB,eAAe,SAAiB,YAAoB,GAAW;CAC7E,MAAM,QAAQ,QAAQ,MAAM,KAAK;CACjC,MAAM,YAAY,OAAO,YAAY,MAAM,SAAS,EAAE,CAAC;AAEvD,QAAO,MACJ,KAAK,MAAM,UAAU;EACpB,MAAM,UAAU,YAAY;AAE5B,SAAO,GADW,OAAO,QAAQ,CAAC,SAAS,WAAW,IAAI,CACtC,IAAI;GACxB,CACD,KAAK,KAAK;;;;;AAMf,SAAgB,kBAAkB,SAAiB,WAAmB,SAAyB;CAC7F,MAAM,QAAQ,QAAQ,MAAM,KAAK;CAEjC,MAAM,QAAQ,KAAK,IAAI,GAAG,YAAY,EAAE;CACxC,MAAM,MAAM,KAAK,IAAI,MAAM,QAAQ,QAAQ;AAC3C,QAAO,MAAM,MAAM,OAAO,IAAI,CAAC,KAAK,KAAK;;;;;AAM3C,SAAgB,yBAAyB,UAA6B;CACpE,IAAI,SAAS;CACb,IAAI,cAAc;CAClB,IAAI,iBAAiB;CAGrB,MAAM,uBAAuB;AAC3B,MAAI,iBAAiB,KAAK,cAAc,iBAAiB,EACvD,WAAU;;AAKd,WAAU;AAEV,MAAK,MAAM,WAAW,UAAU;AAE9B,kBAAgB;AAChB,YAAU,GAAG,YAAY,UAAU,CAAC,SAAS,GAAG,IAAI,CAAC,MAAM,QAAQ,MAAM;AACzE,mBAAiB;AACjB,iBAAe;AAGf,YAAU,GAAG,YAAY,UAAU,CAAC,SAAS,GAAG,IAAI,CAAC;AACrD,mBAAiB;AACjB,iBAAe;AAGf,MAAI,QAAQ,aAAa;GACvB,MAAM,YAAY,QAAQ,YAAY,MAAM,KAAK;AACjD,QAAK,MAAM,QAAQ,WAAW;AAC5B,cAAU,GAAG,YAAY,UAAU,CAAC,SAAS,GAAG,IAAI,CAAC,IAAI,KAAK;AAC9D,qBAAiB;AACjB,mBAAe;;AAGjB,aAAU,GAAG,YAAY,UAAU,CAAC,SAAS,GAAG,IAAI,CAAC;AACrD,oBAAiB;AACjB,kBAAe;;AAIjB,MAAI,QAAQ,SAAS;AACnB,mBAAgB;AAChB,aAAU,GAAG,YAAY,UAAU,CAAC,SAAS,GAAG,IAAI,CAAC;AACrD,oBAAiB;AACjB,kBAAe;AAEf,aAAU,GAAG,YAAY,UAAU,CAAC,SAAS,GAAG,IAAI,CAAC;AACrD,oBAAiB;AACjB,kBAAe;AACf,aAAU,GAAG,YAAY,UAAU,CAAC,SAAS,GAAG,IAAI,CAAC;AACrD,oBAAiB;AACjB,kBAAe;GACf,MAAM,cAAc,QAAQ,QAAQ,MAAM,KAAK;AAC/C,QAAK,MAAM,QAAQ,aAAa;AAC9B,cAAU,GAAG,YAAY,UAAU,CAAC,SAAS,GAAG,IAAI,CAAC,IAAI,KAAK;AAC9D,qBAAiB;AACjB,mBAAe;;AAEjB,aAAU,GAAG,YAAY,UAAU,CAAC,SAAS,GAAG,IAAI,CAAC;AACrD,oBAAiB;AACjB,kBAAe;AAEf,aAAU,GAAG,YAAY,UAAU,CAAC,SAAS,GAAG,IAAI,CAAC;AACrD,oBAAiB;AACjB,kBAAe;;AAIjB,MAAI,QAAQ,QAAQ,SAAS,GAAG;AAC9B,mBAAgB;AAChB,aAAU,GAAG,YAAY,UAAU,CAAC,SAAS,GAAG,IAAI,CAAC;AACrD,oBAAiB;AACjB,kBAAe;AAEf,aAAU,GAAG,YAAY,UAAU,CAAC,SAAS,GAAG,IAAI,CAAC;AACrD,oBAAiB;AACjB,kBAAe;AAEf,QAAK,MAAM,SAAS,QAAQ,SAAS;IACnC,MAAM,KAAK,MAAM,MAAM;IACvB,MAAM,iBAAiB,cAAc;IACrC,MAAM,YAAY,MAAM,KAAK,MAAM,KAAK,CAAC;IACzC,MAAM,eAAe,cAAc,IAAI;AACvC,cAAU,GAAG,YAAY,UAAU,CAAC,SAAS,GAAG,IAAI,CAAC,cAAc,GAAG,OAAO,eAAe,GAAG,aAAa;AAC5G,qBAAiB;AACjB,mBAAe,YAAY;;AAG7B,oBAAiB,cAAc;;EAIjC,MAAM,oCAAoB,IAAI,KAAwB;AAGtD,OAAK,MAAM,WAAW,QAAQ,SAG5B,MAAK,MAAM,WAAW,QAAQ,SAE5B,KACE,QAAQ,QAAQ,SAAS,QAAQ,KAAK,UAAU,GAAG,KAAK,IAAI,IAAI,QAAQ,KAAK,OAAO,CAAC,CAAC,EACtF;AACA,OAAI,CAAC,kBAAkB,IAAI,QAAQ,QAAQ,CACzC,mBAAkB,IAAI,QAAQ,SAAS,EAAE,CAAC;AAE5C,qBAAkB,IAAI,QAAQ,QAAQ,CAAE,KAAK,QAAQ;AACrD;;AAKN,OAAK,MAAM,WAAW,QAAQ,UAAU;AACtC,mBAAgB;AAChB,aAAU,GAAG,YAAY,UAAU,CAAC,SAAS,GAAG,IAAI,CAAC,OAAO,QAAQ,QAAQ;AAC5E,oBAAiB;AACjB,kBAAe;GAGf,MAAM,WAAW,kBAAkB,IAAI,QAAQ,QAAQ,IAAI,EAAE;AAC7D,OAAI,SAAS,SAAS,GAAG;IAEvB,MAAM,mBAAmB;IACzB,MAAM,QAAQ,QAAQ,QAAQ,MAAM,KAAK;AAEzC,SAAK,MAAM,WAAW,UAAU;KAC9B,MAAM,KAAK,QAAQ,MAAM;KAEzB,IAAI,iBAAiB;KACrB,IAAI,eAAe;KAEnB,IAAI,aAAa;AAEjB,UAAK,IAAI,IAAI,GAAG,IAAI,MAAM,QAAQ,IAEhC,KADa,MAAM,GACV,MAAM,CAAC,WAAW,MAAM,IAAI,MAAc;MAEjD,MAAM,YAAY,IAAI;MACtB,IAAI,UAAU;MACd,MAAM,eAAe,QAAQ,KAAK,MAAM,KAAK;AAC7C,WAAK,IAAI,IAAI,GAAG,IAAI,KAAK,IAAI,GAAG,aAAa,OAAO,EAAE,IACpD,KAAI,MAAM,YAAY,IAAI,MAAM,KAAK,aAAa,IAAI,MAAM,EAAE;AAC5D,iBAAU;AACV;;AAGJ,UAAI,SAAS;AACX,wBAAiB,mBAAmB,IAAI;AACxC,sBAAe,mBAAmB,IAAI,aAAa;AACnD,oBAAa;AACb;;;AAKN,SAAI,WACF,WAAU,GAAG,YAAY,UAAU,CAAC,SAAS,GAAG,IAAI,CAAC,cAAc,GAAG,OAAO,eAAe,GAAG,aAAa;SAE5G,WAAU,GAAG,YAAY,UAAU,CAAC,SAAS,GAAG,IAAI,CAAC,cAAc,GAAG;AAExE,sBAAiB;;;GAKrB,MAAM,eAAe,QAAQ,QAAQ,MAAM,KAAK;AAChD,QAAK,MAAM,SAAS,aAClB,gBAAe;AAEjB,kBAAe;AAEf,oBAAiB,cAAc;;;AAInC,QAAO;;;;;AAMT,eAAe,cAAc,UAAqB,SAAsC;AACtF,KAAI,QAAQ,cAAc;EAExB,MAAM,iBAAiB,yBAAyB,SAAS;AACzD,UAAQ,IAAI,eAAe;AAC3B;;CAIF,MAAM,WAAW,sBAAsB,SAAS;CAGhD,IAAI,SAAS,MAAM,OAAO,MAAM,SAAS;CAGzC,MAAM,YAAY,QAAQ,aAAa;AACvC,KAAI,QAAQ,cAAc,UAAa,QAAQ,YAAY,QAAW;EACpE,MAAM,MAAM,QAAQ,WAAW,OAAO,MAAM,KAAK,CAAC;AAClD,WAAS,kBAAkB,QAAQ,WAAW,IAAI;;AAKpD,KAAI,QAAQ,gBACV,UAAS,eAAe,QAAQ,UAAU;AAG5C,SAAQ,IAAI,OAAO;;;;;AAMrB,eAAe,mBACb,IACA,QACA,iBACe;CAEf,MAAM,WAAW,OAAO,SAAS,IAAI,WAAW,GAAG,OAAO,GAAG,gBAAgB;CAY7E,MAAMC,UAA4B,EAAE;AAEpC,MAAK,MAAM,WAAW,UAAU;EAE9B,MAAM,eAAe,sBAAsB,CAAC,QAAQ,CAAC;EAErD,MAAM,iBADiB,MAAM,OAAO,MAAM,aAAa,EAClB,MAAM,KAAK;AAGhD,OAAK,MAAM,SAAS,QAAQ,QAC1B,KAAI,MAAM,OAAO,IAAI;GAEnB,IAAIC;GACJ,IAAIC;GAGJ,MAAM,YAAY,MAAM,KAAK,MAAM,KAAK;GACxC,MAAM,gBAAgB,UAAU,GAAG,MAAM;AAEzC,QAAK,IAAI,IAAI,GAAG,IAAI,cAAc,QAAQ,IAExC,KAAI,yBAAyB,cAAc,GAAG,CAAC,MAAM,KAAK,eAAe;AAEvE,gBAAY,IAAI;AAChB,cAAU,IAAI,UAAU;AACxB;;AAIJ,WAAQ,KAAK;IACX,WAAW,QAAQ;IACnB,cAAc,QAAQ;IACtB,SAAS;IACT,MAAM,MAAM;IACZ,MAAM;IACN;IACA;IACD,CAAC;;AAKN,OAAK,MAAM,WAAW,QAAQ,SAC5B,KAAI,QAAQ,OAAO,IAAI;GAErB,IAAI,cAAc;GAClB,IAAID;GACJ,IAAIC;AAEJ,QAAK,MAAM,WAAW,QAAQ,SAC5B,KACE,QAAQ,QAAQ,SAAS,QAAQ,KAAK,UAAU,GAAG,KAAK,IAAI,IAAI,QAAQ,KAAK,OAAO,CAAC,CAAC,EACtF;AACA,kBAAc,QAAQ;IAGtB,MAAM,YAAY,QAAQ,KAAK,MAAM,KAAK;IAC1C,MAAM,gBAAgB,UAAU,GAAG,MAAM;AAEzC,SAAK,IAAI,IAAI,GAAG,IAAI,cAAc,QAAQ,IAExC,KAAI,yBAAyB,cAAc,GAAG,CAAC,MAAM,KAAK,eAAe;AAEvE,iBAAY,IAAI;AAChB,eAAU,IAAI,UAAU;AACxB;;AAGJ;;AAIJ,WAAQ,KAAK;IACX,WAAW,QAAQ;IACnB,cAAc,QAAQ;IACtB,SAAS;IACT,MAAM,QAAQ;IACd,MAAM;IACN;IACA;IACD,CAAC;;;AAKR,KAAI,QAAQ,WAAW,GAAG;AACxB,UAAQ,MAAM,uCAAuC,GAAG,GAAG;AAC3D,MAAI,OAAO,SAAS,EAClB,SAAQ,MAAM,uBAAuB,OAAO,KAAK,KAAK,GAAG;MAEzD,SAAQ,MAAM,mCAAmC;AAEnD,UAAQ,KAAK,EAAE;;AAIjB,MAAK,IAAI,IAAI,GAAG,IAAI,QAAQ,QAAQ,KAAK;EACvC,MAAM,QAAQ,QAAQ;AAEtB,MAAI,QAAQ,SAAS,KAAK,IAAI,EAC5B,SAAQ,IAAI,UAAU;EAIxB,IAAI,gBAAgB,KAAK,MAAM,aAAa;AAC5C,mBAAiB,MAAM,MAAM,QAAQ;AAGrC,MAAI,mBAAmB,MAAM,aAAa,MAAM,QAC9C,SAAQ,IAAI,SAAS,MAAM,UAAU,GAAG,MAAM,QAAQ,6BAA6B;AAGrF,mBAAiB,qBAAqB,MAAM,KAAK;EAGjD,MAAM,WAAW,MAAM,OAAO,MAAM,cAAc;AAClD,UAAQ,IAAI,SAAS;;;;;;AAOzB,SAAS,iBAAuB;CAC9B,MAAM,WAAW,aAAa;CAC9B,MAAM,aAAa,IAAI,IAAI,SAAS,KAAK,MAAM,CAAC,EAAE,IAAI,EAAE,CAAC,CAAC;CAG1D,SAAS,SAAS,WAA2B;AAC3C,MAAI,WAAW,UAAU,CACvB,QAAO,iBAAiB,UAAU;EAEpC,MAAM,UAAU,WAAW,IAAI,UAAU;AACzC,SAAO,UAAU,QAAQ,QAAQ;;CAInC,SAAS,YAAY,WAAmB,QAAgB,QAAiB,QAAuB;EAC9F,MAAM,QAAQ,SAAS,UAAU;AAEjC,MAAI,OACF,SAAQ,IAAI,KAAK,UAAU,OAAO,GAAG,CAAC,GAAG,QAAQ;OAC5C;GACL,MAAM,YAAY,SAAS,OAAO;AAClC,WAAQ,IAAI,GAAG,SAAS,UAAU,GAAG,UAAU,OAAO,GAAG,CAAC,GAAG,QAAQ;;EAGvE,MAAM,WAAW,mBAAmB,UAAU;AAC9C,MAAI,SAAS,SAAS,GAAG;GACvB,MAAM,cAAc,SAAS,SAAS,UAAU,SAAS,QAAQ;AACjE,QAAK,IAAI,IAAI,GAAG,IAAI,SAAS,QAAQ,IACnC,aAAY,SAAS,IAAI,aAAa,MAAM,SAAS,SAAS,GAAG,MAAM;;;CAO7E,MAAM,UADS,yBAAyB,CACjB,QAAQ,OAAO,CAAC,iBAAiB,GAAG,CAAC;AAG5D,MAAK,MAAM,aAAa,QACtB,aAAY,WAAW,IAAI,OAAO,KAAK;;;;;AAO3C,SAAS,kBAAwB;AAC/B,SAAQ,IAAI,uEAAuE;AACnF,SAAQ,IAAI,GAAG;AACf,SAAQ,IAAI,gDAAgD;AAC5D,SAAQ,IAAI,GAAG;AACf,SAAQ,IAAI,WAAW;AACvB,SAAQ,IAAI,oEAAoE;AAChF,SAAQ,IAAI,oEAAoE;AAChF,SAAQ,IAAI,gEAAgE;AAC5E,SAAQ,IAAI,qEAAqE;AACjF,SAAQ,IAAI,oEAAoE;AAChF,SAAQ,IAAI,wDAAwD;AACpE,SAAQ,IAAI,GAAG;AACf,SAAQ,IAAI,YAAY;AACxB,SAAQ,IAAI,4EAA4E;AACxF,SAAQ,IAAI,2EAA2E;AACvF,SAAQ,IAAI,iFAAiF;AAC7F,SAAQ,IAAI,mDAAmD;AAC/D,SAAQ,IAAI,0EAA0E;AACtF,SAAQ,IAAI,4DAA4D;AACxE,SAAQ,IAAI,sEAAsE;AAClF,SAAQ,IAAI,uEAAuE;AACnF,SAAQ,IAAI,uDAAuD;AACnE,SAAQ,IAAI,uEAAuE;AACnF,SAAQ,IAAI,GAAG;AACf,SAAQ,IAAI,oBAAoB;AAEhC,iBAAgB;;AAGlB,MAAa,gBAAgB,OAAO;CAClC,MAAM;CACN,aAAa;CACb,MAAM;EACJ,mBAAmB;GACjB,MAAM;GACN,OAAO;GACP,aAAa;GACd;EACD,OAAO;GACL,MAAM;GACN,OAAO;GACP,aAAa;GACd;EACD,KAAK;GACH,MAAM;GACN,OAAO;GACP,aAAa;GACd;EACD,UAAU;GACR,MAAM;GACN,aAAa;GACd;EACD,IAAI;GACF,MAAM;GACN,aAAa;GACd;EACD,MAAM;GACJ,MAAM;GACN,aAAa;GACd;EACF;CACD,KAAK,OAAO,QAAQ;EAClB,MAAM,SAAS,IAAI;EACnB,MAAM,kBAAkB,EAAE,IAAI,OAAO,sBAAsB;EAC3D,MAAM,YAAY,IAAI,OAAO;EAC7B,MAAM,UAAU,IAAI,OAAO;EAC3B,MAAM,eAAe,IAAI,OAAO,YAAY;EAC5C,MAAM,cAAc,IAAI,OAAO;EAC/B,MAAM,WAAW,IAAI,OAAO,QAAQ;AAGpC,MAAI,aAAa;AACf,SAAM,mBAAmB,aAAa,QAAQ,gBAAgB;AAC9D;;AAIF,MAAI,UAAU;AACZ,mBAAgB;AAChB;;AAIF,MAAI,OAAO,WAAW,GAAG;AACvB,oBAAiB;AACjB;;AAIF,MAAI,cAAc,UAAa,YAAY,UAAa,YAAY,SAAS;AAC3E,WAAQ,MAAM,qDAAqD;AACnE,WAAQ,KAAK,EAAE;;AAIjB,MAAI;AAGF,SAAM,cAFW,WAAW,GAAG,OAAO,EAER;IAC5B;IACA;IACA;IACA;IACD,CAAC;WACK,OAAO;AACd,OAAI,iBAAiB,SAAS,MAAM,QAAQ,SAAS,SAAS,EAAE;IAE9D,MAAM,gBAAgB,OAAO,QAAQ,UAAU;AAC7C,SAAI;AACF,iBAAW,MAAM;AACjB,aAAO;aACD;AACN,aAAO;;MAET;AAEF,QAAI,cAAc,WAAW,EAC3B,SAAQ,MAAM,mBAAmB,cAAc,GAAG,cAAc;aACvD,cAAc,SAAS,EAChC,SAAQ,MACN,8BAA8B,cAAc,KAAK,MAAM,IAAI,EAAE,GAAG,CAAC,KAAK,KAAK,GAC5E;QAED,SAAQ,MAAM,yCAAyC;AAEzD,YAAQ,IAAI,sBAAsB;AAClC,oBAAgB;UACX;AACL,YAAQ,MAAM,yBAAyB,iBAAiB,QAAQ,MAAM,UAAU,MAAM;AACtF,YAAQ,IAAI,qDAAqD;;AAEnE,WAAQ,KAAK,EAAE;;;CAGpB,CAAC;;;;AC3mBF,MAAM,YAAY,QAAQ,cAAc,OAAO,KAAK,IAAI,CAAC;AAEzD,MAAM,UADc,KAAK,MAAM,aAAa,KAAK,WAAW,kBAAkB,EAAE,QAAQ,CAAC,CAC7D;AAG5B,MAAM,gCAAgB,IAAI,KAAK;AAC/B,cAAc,IAAI,YAAY,gBAAgB;AAC9C,cAAc,IAAI,WAAW,eAAe;AAC5C,cAAc,IAAI,QAAQ,YAAY;AAGtC,MAAa,YAAY,OAAO;CAC9B,MAAM;CACN,aAAa;CACd,CAAC;AAGF,MAAa,cAAc,OAAO;CAChC,MAAM;CACN,aAAa;CACd,CAAC;AAEF,eAAsB,MAAM;AAC1B,KAAI;EACF,MAAM,OAAO,QAAQ,KAAK,MAAM,EAAE;AAGlC,MAAI,KAAK,OAAO,SAEd,OAAM,IAAI,KAAK,MAAM,EAAE,EAAE,eAAe;GACtC,MAAM;GACN;GACD,CAAC;WACO,KAAK,OAAO,SAErB,OAAM,IAAI,KAAK,MAAM,EAAE,EAAE,eAAe;GACtC,MAAM;GACN;GACD,CAAC;WACO,KAAK,OAAO,MAAM;GAE3B,MAAM,iBAAiB,KAAK;AAE5B,OAAI,CAAC,kBAAkB,mBAAmB,YAAY,mBAAmB,MAAM;AAE7E,YAAQ,IAAI,+BAA+B;AAC3C,YAAQ,IAAI,GAAG;AACf,YAAQ,IAAI,SAAS;AACrB,YAAQ,IAAI,4BAA4B;AACxC,YAAQ,IAAI,GAAG;AACf,YAAQ,IAAI,YAAY;AACxB,YAAQ,IACN,gFACD;AACD,YAAQ,IAAI,kDAAkD;AAC9D,YAAQ,IAAI,4EAA4E;AACxF,YAAQ,IAAI,GAAG;AACf,YAAQ,IAAI,yDAAyD;AACrE,YAAQ,IAAI,kCAAkC;AAC9C,YAAQ,IAAI,iCAAiC;AAC7C,YAAQ,IAAI,8BAA8B;AAC1C,YAAQ,IAAI,GAAG;AACf,YAAQ,IAAI,WAAW;AACvB,YAAQ,IAAI,qDAAqD;AACjE,YAAQ,IAAI,gDAAgD;cACnD,mBAAmB,eAAe,mBAAmB,KAC9D,SAAQ,IAAI,QAAQ;QACf;IAEL,MAAM,aAAa,cAAc,IAAI,eAAe;AAEpD,QAAI,CAAC,YAAY;AACf,aAAQ,MAAM,oBAAoB,iBAAiB;AACnD,aAAQ,IAAI,GAAG;AACf,aAAQ,IAAI,qDAAqD;AACjE,aAAQ,KAAK,EAAE;;AAIjB,UAAM,IAAI,KAAK,MAAM,EAAE,EAAE,YAAY;KACnC,MAAM,iBAAiB;KACvB;KACD,CAAC;;aAEK,CAAC,KAAK,UAAU,KAAK,OAAO,YAAY,KAAK,OAAO,MAAM;AAEnE,WAAQ,IAAI,gCAAgC;AAC5C,WAAQ,IAAI,GAAG;AACf,WAAQ,IAAI,SAAS;AACrB,WAAQ,IAAI,yBAAyB;AACrC,WAAQ,IAAI,GAAG;AACf,WAAQ,IAAI,YAAY;AACxB,WAAQ,IAAI,uDAAuD;AACnE,WAAQ,IAAI,0DAA0D;AACtE,WAAQ,IAAI,0EAA0E;AACtF,WAAQ,IAAI,GAAG;AACf,WAAQ,IAAI,yDAAyD;AACrE,WAAQ,IAAI,yBAAyB;AACrC,WAAQ,IAAI,6BAA6B;AACzC,WAAQ,IAAI,6BAA6B;AACzC,WAAQ,IAAI,GAAG;AACf,WAAQ,IAAI,WAAW;AACvB,WAAQ,IAAI,qDAAqD;AACjE,WAAQ,IAAI,gDAAgD;aACnD,KAAK,OAAO,eAAe,KAAK,OAAO,KAChD,SAAQ,IAAI,QAAQ;OACf;AAEL,WAAQ,MAAM,oBAAoB,KAAK,KAAK;AAC5C,WAAQ,IAAI,GAAG;AACf,WAAQ,IAAI,kDAAkD;AAC9D,WAAQ,KAAK,EAAE;;UAEV,OAAO;AACd,UAAQ,MAAM,MAAM;AACpB,UAAQ,KAAK,EAAE;;;AAInB,IAAI,OAAO,KAAK,KACd,OAAM,KAAK"}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@fragno-dev/cli",
|
|
3
|
-
"version": "0.1.
|
|
3
|
+
"version": "0.1.22",
|
|
4
4
|
"exports": {
|
|
5
5
|
".": {
|
|
6
6
|
"development": "./src/cli.ts",
|
|
@@ -27,9 +27,9 @@
|
|
|
27
27
|
"gunshi": "^0.26.3",
|
|
28
28
|
"marked": "^15.0.12",
|
|
29
29
|
"marked-terminal": "^7.3.0",
|
|
30
|
-
"@fragno-dev/core": "0.1.
|
|
31
|
-
"@fragno-dev/corpus": "0.0.
|
|
32
|
-
"@fragno-dev/db": "0.1
|
|
30
|
+
"@fragno-dev/core": "0.1.11",
|
|
31
|
+
"@fragno-dev/corpus": "0.0.7",
|
|
32
|
+
"@fragno-dev/db": "0.2.1"
|
|
33
33
|
},
|
|
34
34
|
"main": "./dist/cli.js",
|
|
35
35
|
"module": "./dist/cli.js",
|
package/src/cli.ts
CHANGED
|
@@ -46,10 +46,10 @@ export const generateCommand = define({
|
|
|
46
46
|
const { databases: allFragnoDatabases, adapter } = await importFragmentFiles(targetPaths);
|
|
47
47
|
|
|
48
48
|
// Check if adapter supports any form of schema generation
|
|
49
|
-
if (!adapter.createSchemaGenerator && !adapter.
|
|
49
|
+
if (!adapter.createSchemaGenerator && !adapter.prepareMigrations) {
|
|
50
50
|
throw new Error(
|
|
51
51
|
`The adapter does not support schema generation. ` +
|
|
52
|
-
`Please use an adapter that implements either createSchemaGenerator or
|
|
52
|
+
`Please use an adapter that implements either createSchemaGenerator or prepareMigrations.`,
|
|
53
53
|
);
|
|
54
54
|
}
|
|
55
55
|
|
package/src/commands/db/info.ts
CHANGED
|
@@ -26,35 +26,25 @@ export const infoCommand = define({
|
|
|
26
26
|
namespace: string;
|
|
27
27
|
schemaVersion: number;
|
|
28
28
|
migrationSupport: boolean;
|
|
29
|
-
currentVersion?:
|
|
30
|
-
pendingVersions?:
|
|
29
|
+
currentVersion?: string;
|
|
30
|
+
pendingVersions?: string;
|
|
31
31
|
status?: string;
|
|
32
|
-
error?: string;
|
|
33
32
|
} = {
|
|
34
33
|
namespace: fragnoDb.namespace,
|
|
35
34
|
schemaVersion: fragnoDb.schema.version,
|
|
36
|
-
migrationSupport: !!fragnoDb.adapter.
|
|
35
|
+
migrationSupport: !!fragnoDb.adapter.prepareMigrations,
|
|
37
36
|
};
|
|
38
37
|
|
|
39
38
|
// Get current database version if migrations are supported
|
|
40
|
-
if (fragnoDb.adapter.
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
fragnoDb.namespace,
|
|
45
|
-
);
|
|
46
|
-
const currentVersion = await migrator.getVersion();
|
|
47
|
-
info.currentVersion = currentVersion;
|
|
48
|
-
info.pendingVersions = fragnoDb.schema.version - currentVersion;
|
|
39
|
+
if (fragnoDb.adapter.prepareMigrations) {
|
|
40
|
+
const currentVersion = await fragnoDb.adapter.getSchemaVersion(fragnoDb.namespace);
|
|
41
|
+
info.currentVersion = currentVersion;
|
|
42
|
+
// info.pendingVersions = fragnoDb.schema.version - currentVersion;
|
|
49
43
|
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
}
|
|
55
|
-
} catch (error) {
|
|
56
|
-
info.error = error instanceof Error ? error.message : String(error);
|
|
57
|
-
info.status = "Error";
|
|
44
|
+
if (info.schemaVersion.toString() !== info.currentVersion) {
|
|
45
|
+
info.status = `Migrations pending`;
|
|
46
|
+
} else {
|
|
47
|
+
info.status = "Up to date";
|
|
58
48
|
}
|
|
59
49
|
} else {
|
|
60
50
|
info.status = "Schema only";
|
|
@@ -118,12 +108,7 @@ export const infoCommand = define({
|
|
|
118
108
|
console.log("Note: These adapters do not support migrations.");
|
|
119
109
|
console.log("Use 'fragno-cli db generate' to generate schema files.");
|
|
120
110
|
} else {
|
|
121
|
-
|
|
122
|
-
(info) => info.pendingVersions && info.pendingVersions > 0,
|
|
123
|
-
);
|
|
124
|
-
if (hasPendingMigrations) {
|
|
125
|
-
console.log("Run 'fragno-cli db migrate <target>' to apply pending migrations.");
|
|
126
|
-
}
|
|
111
|
+
console.log("Run 'fragno-cli db migrate <target>' to apply pending migrations.");
|
|
127
112
|
}
|
|
128
113
|
},
|
|
129
114
|
});
|
|
@@ -6,32 +6,38 @@ import {
|
|
|
6
6
|
import type { AnySchema } from "@fragno-dev/db/schema";
|
|
7
7
|
import { instantiatedFragmentFakeSymbol } from "@fragno-dev/core/internal/symbols";
|
|
8
8
|
import { type FragnoInstantiatedFragment } from "@fragno-dev/core";
|
|
9
|
-
import { loadConfig } from "
|
|
9
|
+
import { loadConfig } from "./load-config";
|
|
10
10
|
import { relative } from "node:path";
|
|
11
11
|
|
|
12
12
|
export async function importFragmentFile(path: string): Promise<Record<string, unknown>> {
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
});
|
|
16
|
-
|
|
17
|
-
const databases = findFragnoDatabases(config);
|
|
18
|
-
const adapterNames = databases.map(
|
|
19
|
-
(db) =>
|
|
20
|
-
`${db.adapter[fragnoDatabaseAdapterNameFakeSymbol]}@${db.adapter[fragnoDatabaseAdapterVersionFakeSymbol]}`,
|
|
21
|
-
);
|
|
22
|
-
const uniqueAdapterNames = [...new Set(adapterNames)];
|
|
13
|
+
// Enable dry run mode for database schema extraction
|
|
14
|
+
process.env["FRAGNO_INIT_DRY_RUN"] = "true";
|
|
23
15
|
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
16
|
+
try {
|
|
17
|
+
const config = await loadConfig(path);
|
|
18
|
+
|
|
19
|
+
const databases = findFragnoDatabases(config);
|
|
20
|
+
const adapterNames = databases.map(
|
|
21
|
+
(db) =>
|
|
22
|
+
`${db.adapter[fragnoDatabaseAdapterNameFakeSymbol]}@${db.adapter[fragnoDatabaseAdapterVersionFakeSymbol]}`,
|
|
28
23
|
);
|
|
29
|
-
|
|
24
|
+
const uniqueAdapterNames = [...new Set(adapterNames)];
|
|
30
25
|
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
26
|
+
if (uniqueAdapterNames.length > 1) {
|
|
27
|
+
throw new Error(
|
|
28
|
+
`All Fragno databases must use the same adapter name and version. ` +
|
|
29
|
+
`Found mismatch: (${adapterNames.join(", ")})`,
|
|
30
|
+
);
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
return {
|
|
34
|
+
adapter: databases[0].adapter,
|
|
35
|
+
databases,
|
|
36
|
+
};
|
|
37
|
+
} finally {
|
|
38
|
+
// Clean up after loading
|
|
39
|
+
delete process.env["FRAGNO_INIT_DRY_RUN"];
|
|
40
|
+
}
|
|
35
41
|
}
|
|
36
42
|
|
|
37
43
|
/**
|
|
@@ -167,6 +173,7 @@ export function findFragnoDatabases(
|
|
|
167
173
|
}
|
|
168
174
|
|
|
169
175
|
const schema = deps["schema"] as AnySchema;
|
|
176
|
+
const namespace = deps["namespace"] as string;
|
|
170
177
|
const databaseAdapter = options["databaseAdapter"] as DatabaseAdapter | undefined;
|
|
171
178
|
|
|
172
179
|
if (!databaseAdapter) {
|
|
@@ -176,9 +183,6 @@ export function findFragnoDatabases(
|
|
|
176
183
|
continue;
|
|
177
184
|
}
|
|
178
185
|
|
|
179
|
-
// Derive namespace from fragment name (follows convention: fragmentName + "-db")
|
|
180
|
-
const namespace = value.name + "-db";
|
|
181
|
-
|
|
182
186
|
fragnoDatabases.push(
|
|
183
187
|
new FragnoDatabase({
|
|
184
188
|
namespace,
|
|
@@ -0,0 +1,233 @@
|
|
|
1
|
+
import { describe, it, expect } from "vitest";
|
|
2
|
+
import { stripJsonComments, convertTsconfigPathsToJitiAlias } from "./load-config";
|
|
3
|
+
import { resolve } from "node:path";
|
|
4
|
+
|
|
5
|
+
describe("stripJsonComments", () => {
|
|
6
|
+
it("should strip single-line comments", () => {
|
|
7
|
+
const input = `{
|
|
8
|
+
// This is a comment
|
|
9
|
+
"key": "value"
|
|
10
|
+
}`;
|
|
11
|
+
const expected = `{
|
|
12
|
+
|
|
13
|
+
"key": "value"
|
|
14
|
+
}`;
|
|
15
|
+
expect(stripJsonComments(input)).toBe(expected);
|
|
16
|
+
});
|
|
17
|
+
|
|
18
|
+
it("should strip multiple single-line comments", () => {
|
|
19
|
+
const input = `{
|
|
20
|
+
// First comment
|
|
21
|
+
"key1": "value1", // Inline comment
|
|
22
|
+
// Second comment
|
|
23
|
+
"key2": "value2"
|
|
24
|
+
}`;
|
|
25
|
+
const expected = `{
|
|
26
|
+
|
|
27
|
+
"key1": "value1",
|
|
28
|
+
|
|
29
|
+
"key2": "value2"
|
|
30
|
+
}`;
|
|
31
|
+
expect(stripJsonComments(input)).toBe(expected);
|
|
32
|
+
});
|
|
33
|
+
|
|
34
|
+
it("should strip multi-line comments", () => {
|
|
35
|
+
const input = `{
|
|
36
|
+
/* This is a
|
|
37
|
+
multi-line comment */
|
|
38
|
+
"key": "value"
|
|
39
|
+
}`;
|
|
40
|
+
const expected = `{
|
|
41
|
+
|
|
42
|
+
"key": "value"
|
|
43
|
+
}`;
|
|
44
|
+
expect(stripJsonComments(input)).toBe(expected);
|
|
45
|
+
});
|
|
46
|
+
|
|
47
|
+
it("should strip multiple multi-line comments", () => {
|
|
48
|
+
const input = `{
|
|
49
|
+
/* Comment 1 */
|
|
50
|
+
"key1": "value1",
|
|
51
|
+
/* Comment 2
|
|
52
|
+
spanning lines */
|
|
53
|
+
"key2": "value2"
|
|
54
|
+
}`;
|
|
55
|
+
const expected = `{
|
|
56
|
+
|
|
57
|
+
"key1": "value1",
|
|
58
|
+
|
|
59
|
+
"key2": "value2"
|
|
60
|
+
}`;
|
|
61
|
+
expect(stripJsonComments(input)).toBe(expected);
|
|
62
|
+
});
|
|
63
|
+
|
|
64
|
+
it("should strip both single-line and multi-line comments", () => {
|
|
65
|
+
const input = `{
|
|
66
|
+
// Single line comment
|
|
67
|
+
"key1": "value1",
|
|
68
|
+
/* Multi-line
|
|
69
|
+
comment */
|
|
70
|
+
"key2": "value2" // Another single line
|
|
71
|
+
}`;
|
|
72
|
+
const expected = `{
|
|
73
|
+
|
|
74
|
+
"key1": "value1",
|
|
75
|
+
|
|
76
|
+
"key2": "value2"
|
|
77
|
+
}`;
|
|
78
|
+
expect(stripJsonComments(input)).toBe(expected);
|
|
79
|
+
});
|
|
80
|
+
|
|
81
|
+
it("should handle strings with comment-like content", () => {
|
|
82
|
+
const input = `{
|
|
83
|
+
"url": "https://example.com",
|
|
84
|
+
"comment": "This // is not a comment"
|
|
85
|
+
}`;
|
|
86
|
+
// Note: This is a known limitation - the simple regex approach
|
|
87
|
+
// will strip what looks like comments even inside strings
|
|
88
|
+
// For tsconfig.json files this is typically fine since URLs/strings
|
|
89
|
+
// with comment syntax are rare
|
|
90
|
+
const result = stripJsonComments(input);
|
|
91
|
+
expect(result).toContain('"url": "https:');
|
|
92
|
+
});
|
|
93
|
+
|
|
94
|
+
it("should handle empty input", () => {
|
|
95
|
+
expect(stripJsonComments("")).toBe("");
|
|
96
|
+
});
|
|
97
|
+
|
|
98
|
+
it("should handle input with no comments", () => {
|
|
99
|
+
const input = `{
|
|
100
|
+
"key": "value",
|
|
101
|
+
"nested": {
|
|
102
|
+
"key2": "value2"
|
|
103
|
+
}
|
|
104
|
+
}`;
|
|
105
|
+
expect(stripJsonComments(input)).toBe(input);
|
|
106
|
+
});
|
|
107
|
+
|
|
108
|
+
it("should handle real tsconfig.json example", () => {
|
|
109
|
+
const input = `{
|
|
110
|
+
"compilerOptions": {
|
|
111
|
+
// Enable latest features
|
|
112
|
+
"target": "ESNext",
|
|
113
|
+
"module": "ESNext",
|
|
114
|
+
/* Bundler mode */
|
|
115
|
+
"moduleResolution": "bundler",
|
|
116
|
+
// Best practices
|
|
117
|
+
"strict": true
|
|
118
|
+
}
|
|
119
|
+
}`;
|
|
120
|
+
const result = stripJsonComments(input);
|
|
121
|
+
expect(() => JSON.parse(result)).not.toThrow();
|
|
122
|
+
const parsed = JSON.parse(result);
|
|
123
|
+
expect(parsed.compilerOptions.target).toBe("ESNext");
|
|
124
|
+
expect(parsed.compilerOptions.module).toBe("ESNext");
|
|
125
|
+
expect(parsed.compilerOptions.moduleResolution).toBe("bundler");
|
|
126
|
+
expect(parsed.compilerOptions.strict).toBe(true);
|
|
127
|
+
});
|
|
128
|
+
});
|
|
129
|
+
|
|
130
|
+
describe("convertTsconfigPathsToJitiAlias", () => {
|
|
131
|
+
it("should convert simple path alias", () => {
|
|
132
|
+
const tsconfigPaths = {
|
|
133
|
+
"@/*": ["./src/*"],
|
|
134
|
+
};
|
|
135
|
+
const baseUrl = "/project";
|
|
136
|
+
const result = convertTsconfigPathsToJitiAlias(tsconfigPaths, baseUrl);
|
|
137
|
+
|
|
138
|
+
expect(result).toEqual({
|
|
139
|
+
"@/": resolve(baseUrl, "./src/"),
|
|
140
|
+
});
|
|
141
|
+
});
|
|
142
|
+
|
|
143
|
+
it("should convert multiple path aliases", () => {
|
|
144
|
+
const tsconfigPaths = {
|
|
145
|
+
"@/*": ["./src/*"],
|
|
146
|
+
"@components/*": ["./src/components/*"],
|
|
147
|
+
"@utils/*": ["./src/utils/*"],
|
|
148
|
+
};
|
|
149
|
+
const baseUrl = "/project";
|
|
150
|
+
const result = convertTsconfigPathsToJitiAlias(tsconfigPaths, baseUrl);
|
|
151
|
+
|
|
152
|
+
expect(result).toEqual({
|
|
153
|
+
"@/": resolve(baseUrl, "./src/"),
|
|
154
|
+
"@components/": resolve(baseUrl, "./src/components/"),
|
|
155
|
+
"@utils/": resolve(baseUrl, "./src/utils/"),
|
|
156
|
+
});
|
|
157
|
+
});
|
|
158
|
+
|
|
159
|
+
it("should handle absolute paths", () => {
|
|
160
|
+
const tsconfigPaths = {
|
|
161
|
+
"@lib/*": ["/absolute/path/to/lib/*"],
|
|
162
|
+
};
|
|
163
|
+
const baseUrl = "/project";
|
|
164
|
+
const result = convertTsconfigPathsToJitiAlias(tsconfigPaths, baseUrl);
|
|
165
|
+
|
|
166
|
+
expect(result).toEqual({
|
|
167
|
+
"@lib/": resolve(baseUrl, "/absolute/path/to/lib/"),
|
|
168
|
+
});
|
|
169
|
+
});
|
|
170
|
+
|
|
171
|
+
it("should handle nested paths", () => {
|
|
172
|
+
const tsconfigPaths = {
|
|
173
|
+
"@/components/*": ["./src/app/components/*"],
|
|
174
|
+
};
|
|
175
|
+
const baseUrl = "/project";
|
|
176
|
+
const result = convertTsconfigPathsToJitiAlias(tsconfigPaths, baseUrl);
|
|
177
|
+
|
|
178
|
+
expect(result).toEqual({
|
|
179
|
+
"@/components/": resolve(baseUrl, "./src/app/components/"),
|
|
180
|
+
});
|
|
181
|
+
});
|
|
182
|
+
|
|
183
|
+
it("should handle empty paths object", () => {
|
|
184
|
+
const tsconfigPaths = {};
|
|
185
|
+
const baseUrl = "/project";
|
|
186
|
+
const result = convertTsconfigPathsToJitiAlias(tsconfigPaths, baseUrl);
|
|
187
|
+
|
|
188
|
+
expect(result).toEqual({});
|
|
189
|
+
});
|
|
190
|
+
|
|
191
|
+
it("should only use first path when multiple paths are provided", () => {
|
|
192
|
+
const tsconfigPaths = {
|
|
193
|
+
"@/*": ["./src/*", "./lib/*", "./dist/*"],
|
|
194
|
+
};
|
|
195
|
+
const baseUrl = "/project";
|
|
196
|
+
const result = convertTsconfigPathsToJitiAlias(tsconfigPaths, baseUrl);
|
|
197
|
+
|
|
198
|
+
// Should only use the first path
|
|
199
|
+
expect(result).toEqual({
|
|
200
|
+
"@/": resolve(baseUrl, "./src/"),
|
|
201
|
+
});
|
|
202
|
+
});
|
|
203
|
+
|
|
204
|
+
it("should handle real-world tsconfig paths", () => {
|
|
205
|
+
const tsconfigPaths = {
|
|
206
|
+
"@/.source": ["./.source/index.ts"],
|
|
207
|
+
"@/*": ["./*"],
|
|
208
|
+
};
|
|
209
|
+
const baseUrl = "/project/apps/docs";
|
|
210
|
+
const result = convertTsconfigPathsToJitiAlias(tsconfigPaths, baseUrl);
|
|
211
|
+
|
|
212
|
+
expect(result).toEqual({
|
|
213
|
+
"@/.source": resolve(baseUrl, "./.source/index.ts"),
|
|
214
|
+
"@/": resolve(baseUrl, "./"),
|
|
215
|
+
});
|
|
216
|
+
});
|
|
217
|
+
|
|
218
|
+
it("should strip trailing asterisk from both alias and path", () => {
|
|
219
|
+
const tsconfigPaths = {
|
|
220
|
+
"~/*": ["./custom/*"],
|
|
221
|
+
};
|
|
222
|
+
const baseUrl = "/project";
|
|
223
|
+
const result = convertTsconfigPathsToJitiAlias(tsconfigPaths, baseUrl);
|
|
224
|
+
|
|
225
|
+
expect(result).toEqual({
|
|
226
|
+
"~/": resolve(baseUrl, "./custom/"),
|
|
227
|
+
});
|
|
228
|
+
|
|
229
|
+
// Verify no asterisks in result
|
|
230
|
+
expect(Object.keys(result)[0]).not.toContain("*");
|
|
231
|
+
expect(Object.values(result)[0]).not.toContain("*");
|
|
232
|
+
});
|
|
233
|
+
});
|
|
@@ -0,0 +1,115 @@
|
|
|
1
|
+
import { loadConfig as c12LoadConfig } from "c12";
|
|
2
|
+
import { readFile, access } from "node:fs/promises";
|
|
3
|
+
import { dirname, resolve, join } from "node:path";
|
|
4
|
+
import { constants } from "node:fs";
|
|
5
|
+
|
|
6
|
+
/**
|
|
7
|
+
* Checks if a file exists using async API.
|
|
8
|
+
*/
|
|
9
|
+
async function fileExists(path: string): Promise<boolean> {
|
|
10
|
+
try {
|
|
11
|
+
await access(path, constants.F_OK);
|
|
12
|
+
return true;
|
|
13
|
+
} catch {
|
|
14
|
+
return false;
|
|
15
|
+
}
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
/**
|
|
19
|
+
* Walks up the directory tree from the target path to find a tsconfig.json file.
|
|
20
|
+
*/
|
|
21
|
+
async function findTsconfig(startPath: string): Promise<string | null> {
|
|
22
|
+
let currentDir = dirname(startPath);
|
|
23
|
+
const root = resolve("/");
|
|
24
|
+
|
|
25
|
+
while (currentDir !== root) {
|
|
26
|
+
const tsconfigPath = join(currentDir, "tsconfig.json");
|
|
27
|
+
if (await fileExists(tsconfigPath)) {
|
|
28
|
+
return tsconfigPath;
|
|
29
|
+
}
|
|
30
|
+
currentDir = dirname(currentDir);
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
return null;
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
/**
|
|
37
|
+
* Strips comments from JSONC (JSON with Comments) content.
|
|
38
|
+
*/
|
|
39
|
+
export function stripJsonComments(jsonc: string): string {
|
|
40
|
+
// Remove single-line comments (// ...)
|
|
41
|
+
let result = jsonc.replace(/\/\/[^\n]*/g, "");
|
|
42
|
+
|
|
43
|
+
// Remove multi-line comments (/* ... */)
|
|
44
|
+
result = result.replace(/\/\*[\s\S]*?\*\//g, "");
|
|
45
|
+
|
|
46
|
+
return result;
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
/**
|
|
50
|
+
* Converts TypeScript path aliases to jiti alias format.
|
|
51
|
+
* Strips trailing '*' from aliases and paths, and resolves paths relative to baseUrl.
|
|
52
|
+
*/
|
|
53
|
+
export function convertTsconfigPathsToJitiAlias(
|
|
54
|
+
tsconfigPaths: Record<string, string[]>,
|
|
55
|
+
baseUrlResolved: string,
|
|
56
|
+
): Record<string, string> {
|
|
57
|
+
return Object.fromEntries(
|
|
58
|
+
Object.entries(tsconfigPaths).map(([_alias, paths]) => {
|
|
59
|
+
const pathsArray = paths as string[];
|
|
60
|
+
// trim '*' if present and resolve the actual path
|
|
61
|
+
const aliasKey = _alias.endsWith("*") ? _alias.slice(0, -1) : _alias;
|
|
62
|
+
const pathValue = pathsArray[0].endsWith("*") ? pathsArray[0].slice(0, -1) : pathsArray[0];
|
|
63
|
+
return [aliasKey, resolve(baseUrlResolved, pathValue)];
|
|
64
|
+
}),
|
|
65
|
+
);
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
/**
|
|
69
|
+
* Resolves tsconfig path aliases for use with jiti.
|
|
70
|
+
*/
|
|
71
|
+
async function resolveTsconfigAliases(targetPath: string): Promise<Record<string, string>> {
|
|
72
|
+
const tsconfigPath = await findTsconfig(targetPath);
|
|
73
|
+
|
|
74
|
+
if (!tsconfigPath) {
|
|
75
|
+
return {};
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
try {
|
|
79
|
+
const tsconfigContent = await readFile(tsconfigPath, "utf-8");
|
|
80
|
+
// Strip comments to handle JSONC format
|
|
81
|
+
const jsonContent = stripJsonComments(tsconfigContent);
|
|
82
|
+
const tsconfig = JSON.parse(jsonContent);
|
|
83
|
+
const tsconfigPaths = tsconfig?.compilerOptions?.paths;
|
|
84
|
+
|
|
85
|
+
if (!tsconfigPaths || typeof tsconfigPaths !== "object") {
|
|
86
|
+
return {};
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
const tsconfigDir = dirname(tsconfigPath);
|
|
90
|
+
const baseUrl = tsconfig?.compilerOptions?.baseUrl || ".";
|
|
91
|
+
const baseUrlResolved = resolve(tsconfigDir, baseUrl);
|
|
92
|
+
|
|
93
|
+
// Convert tsconfig paths to jiti alias format
|
|
94
|
+
return convertTsconfigPathsToJitiAlias(tsconfigPaths, baseUrlResolved);
|
|
95
|
+
} catch (error) {
|
|
96
|
+
console.warn(`Warning: Failed to parse tsconfig at ${tsconfigPath}:`, error);
|
|
97
|
+
return {};
|
|
98
|
+
}
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
/**
|
|
102
|
+
* Loads a config file using c12 with automatic tsconfig path alias resolution.
|
|
103
|
+
*/
|
|
104
|
+
export async function loadConfig(path: string): Promise<Record<string, unknown>> {
|
|
105
|
+
const alias = await resolveTsconfigAliases(path);
|
|
106
|
+
|
|
107
|
+
const { config } = await c12LoadConfig({
|
|
108
|
+
configFile: path,
|
|
109
|
+
jitiOptions: {
|
|
110
|
+
alias,
|
|
111
|
+
},
|
|
112
|
+
});
|
|
113
|
+
|
|
114
|
+
return config as Record<string, unknown>;
|
|
115
|
+
}
|