@fragno-dev/cli 0.1.13 → 0.1.14
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.turbo/turbo-build.log +7 -7
- package/CHANGELOG.md +30 -0
- package/dist/cli.d.ts +34 -7
- package/dist/cli.d.ts.map +1 -1
- package/dist/cli.js +219 -3
- package/dist/cli.js.map +1 -1
- package/package.json +4 -3
- package/src/cli.ts +16 -4
- package/src/commands/corpus.ts +102 -0
- package/src/commands/search.ts +105 -0
- package/src/utils/format-search-results.ts +121 -0
package/.turbo/turbo-build.log
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
|
|
2
|
-
> @fragno-dev/cli@0.1.
|
|
2
|
+
> @fragno-dev/cli@0.1.14 build /home/runner/work/fragno/fragno/apps/fragno-cli
|
|
3
3
|
> tsdown
|
|
4
4
|
|
|
5
5
|
[34mℹ[39m tsdown [2mv0.15.12[22m powered by rolldown [2mv1.0.0-beta.45[22m
|
|
@@ -10,9 +10,9 @@
|
|
|
10
10
|
[34mℹ[39m Build start
|
|
11
11
|
[34mℹ[39m Granting execute permission to [4mdist/cli.d.ts[24m
|
|
12
12
|
[34mℹ[39m Granting execute permission to [4mdist/cli.js[24m
|
|
13
|
-
[34mℹ[39m [2mdist/[22m[1mcli.js[22m [
|
|
14
|
-
[34mℹ[39m [2mdist/[22mcli.js.map [
|
|
15
|
-
[34mℹ[39m [2mdist/[22mcli.d.ts.map [2m 0.
|
|
16
|
-
[34mℹ[39m [2mdist/[22m[32m[1mcli.d.ts[22m[39m [2m
|
|
17
|
-
[34mℹ[39m 4 files, total:
|
|
18
|
-
[32m✔[39m Build complete in [
|
|
13
|
+
[34mℹ[39m [2mdist/[22m[1mcli.js[22m [2m22.08 kB[22m [2m│ gzip: 5.68 kB[22m
|
|
14
|
+
[34mℹ[39m [2mdist/[22mcli.js.map [2m44.37 kB[22m [2m│ gzip: 11.22 kB[22m
|
|
15
|
+
[34mℹ[39m [2mdist/[22mcli.d.ts.map [2m 0.71 kB[22m [2m│ gzip: 0.33 kB[22m
|
|
16
|
+
[34mℹ[39m [2mdist/[22m[32m[1mcli.d.ts[22m[39m [2m 1.51 kB[22m [2m│ gzip: 0.39 kB[22m
|
|
17
|
+
[34mℹ[39m 4 files, total: 68.67 kB
|
|
18
|
+
[32m✔[39m Build complete in [32m9366ms[39m
|
package/CHANGELOG.md
CHANGED
|
@@ -1,5 +1,35 @@
|
|
|
1
1
|
# @fragno-dev/cli
|
|
2
2
|
|
|
3
|
+
## 0.1.14
|
|
4
|
+
|
|
5
|
+
### Patch Changes
|
|
6
|
+
|
|
7
|
+
- 4ec7e78: feat: add search command for documentation lookup
|
|
8
|
+
|
|
9
|
+
Add a new `search` command to fragno-cli that searches the Fragno documentation directly from the
|
|
10
|
+
command line. Results are automatically grouped by page URL with all relevant sections displayed
|
|
11
|
+
together.
|
|
12
|
+
|
|
13
|
+
- 4ec7e78: feat: add corpus command to view code examples and documentation
|
|
14
|
+
|
|
15
|
+
The new `corpus` command allows users and LLMs to access tested, type-checked code examples
|
|
16
|
+
organized by subject. Examples include route definition, database querying, and adapter setup. Use
|
|
17
|
+
`fragno-cli corpus [topic...]` to view one or more topics.
|
|
18
|
+
|
|
19
|
+
- 27cc540: fix: Corpus dependency issue
|
|
20
|
+
- Updated dependencies [be1a630]
|
|
21
|
+
- Updated dependencies [b2a88aa]
|
|
22
|
+
- Updated dependencies [2900bfa]
|
|
23
|
+
- Updated dependencies [27cc540]
|
|
24
|
+
- Updated dependencies [059a249]
|
|
25
|
+
- Updated dependencies [f3f7bc2]
|
|
26
|
+
- Updated dependencies [a9f8159]
|
|
27
|
+
- Updated dependencies [9d4cd3a]
|
|
28
|
+
- Updated dependencies [fdb5aaf]
|
|
29
|
+
- @fragno-dev/core@0.1.6
|
|
30
|
+
- @fragno-dev/db@0.1.12
|
|
31
|
+
- @fragno-dev/corpus@0.0.2
|
|
32
|
+
|
|
3
33
|
## 0.1.13
|
|
4
34
|
|
|
5
35
|
### Patch Changes
|
package/dist/cli.d.ts
CHANGED
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
#!/usr/bin/env node
|
|
2
|
-
import * as
|
|
2
|
+
import * as gunshi5 from "gunshi";
|
|
3
3
|
|
|
4
4
|
//#region src/commands/db/generate.d.ts
|
|
5
|
-
declare const generateCommand:
|
|
5
|
+
declare const generateCommand: gunshi5.Command<{
|
|
6
6
|
output: {
|
|
7
7
|
type: "string";
|
|
8
8
|
short: string;
|
|
@@ -26,14 +26,41 @@ declare const generateCommand: gunshi4.Command<{
|
|
|
26
26
|
}>;
|
|
27
27
|
//#endregion
|
|
28
28
|
//#region src/commands/db/migrate.d.ts
|
|
29
|
-
declare const migrateCommand:
|
|
29
|
+
declare const migrateCommand: gunshi5.Command<{}>;
|
|
30
30
|
//#endregion
|
|
31
31
|
//#region src/commands/db/info.d.ts
|
|
32
|
-
declare const infoCommand:
|
|
32
|
+
declare const infoCommand: gunshi5.Command<{}>;
|
|
33
|
+
//#endregion
|
|
34
|
+
//#region src/commands/search.d.ts
|
|
35
|
+
declare const searchCommand: gunshi5.Command<{
|
|
36
|
+
limit: {
|
|
37
|
+
type: "number";
|
|
38
|
+
description: string;
|
|
39
|
+
default: number;
|
|
40
|
+
};
|
|
41
|
+
json: {
|
|
42
|
+
type: "boolean";
|
|
43
|
+
description: string;
|
|
44
|
+
default: false;
|
|
45
|
+
};
|
|
46
|
+
markdown: {
|
|
47
|
+
type: "boolean";
|
|
48
|
+
description: string;
|
|
49
|
+
default: true;
|
|
50
|
+
};
|
|
51
|
+
"base-url": {
|
|
52
|
+
type: "string";
|
|
53
|
+
description: string;
|
|
54
|
+
default: string;
|
|
55
|
+
};
|
|
56
|
+
}>;
|
|
57
|
+
//#endregion
|
|
58
|
+
//#region src/commands/corpus.d.ts
|
|
59
|
+
declare const corpusCommand: gunshi5.Command<gunshi5.Args>;
|
|
33
60
|
//#endregion
|
|
34
61
|
//#region src/cli.d.ts
|
|
35
|
-
declare const dbCommand:
|
|
36
|
-
declare const mainCommand:
|
|
62
|
+
declare const dbCommand: gunshi5.Command<gunshi5.Args>;
|
|
63
|
+
declare const mainCommand: gunshi5.Command<gunshi5.Args>;
|
|
37
64
|
//#endregion
|
|
38
|
-
export { dbCommand, generateCommand, infoCommand, mainCommand, migrateCommand };
|
|
65
|
+
export { corpusCommand, dbCommand, generateCommand, infoCommand, mainCommand, migrateCommand, searchCommand };
|
|
39
66
|
//# sourceMappingURL=cli.d.ts.map
|
package/dist/cli.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"cli.d.ts","names":[],"sources":["../src/commands/db/generate.ts","../src/commands/db/migrate.ts","../src/commands/db/info.ts","../src/cli.ts"],"sourcesContent":[],"mappings":";;;;cAOa,iBAyGX,OAAA,CAzG0B;;;;IAAf,WAAA,EAAA,MAyGX;;;;IC3GW,KAAA,EAAA,MAqEX;;;;ICtEW,IAAA,EAAA,QA4HX;;;;
|
|
1
|
+
{"version":3,"file":"cli.d.ts","names":[],"sources":["../src/commands/db/generate.ts","../src/commands/db/migrate.ts","../src/commands/db/info.ts","../src/commands/search.ts","../src/commands/corpus.ts","../src/cli.ts"],"sourcesContent":[],"mappings":";;;;cAOa,iBAyGX,OAAA,CAzG0B;;;;IAAf,WAAA,EAAA,MAyGX;;;;IC3GW,KAAA,EAAA,MAqEX;;;;ICtEW,IAAA,EAAA,QA4HX;;;;EC5GW,MAAA,EAAA;;;;ECoDA,CAAA;;;;cHnEA,gBAqEX,OAAA,CArEyB;;;cCDd,aA4HX,OAAA,CA5HsB;;;cCgBX,eAoFX,OAAA,CApFwB;;;;IHbb,OAAA,EAAA,MAyGX;;;;IC3GW,WAAA,EAqEX,MAAA;;;;ICtEW,IAAA,EAAA,SA4HX;;;;EC5GW,UAAA,EAAA;;;;ECoDA,CAAA;;;;cAAA,eAAa,OAAA,CAAA,QA6BxB,OAAA,CA7BwB,IAAA;;;cC1Cb,WAAS,OAAA,CAAA,QAIpB,OAAA,CAJoB,IAAA;cAaT,aAAW,OAAA,CAAA,QAetB,OAAA,CAfsB,IAAA"}
|
package/dist/cli.js
CHANGED
|
@@ -7,6 +7,7 @@ import { FragnoDatabase, isFragnoDatabase } from "@fragno-dev/db";
|
|
|
7
7
|
import { fragnoDatabaseAdapterNameFakeSymbol, fragnoDatabaseAdapterVersionFakeSymbol } from "@fragno-dev/db/adapters";
|
|
8
8
|
import { instantiatedFragmentFakeSymbol } from "@fragno-dev/core/api/fragment-instantiation";
|
|
9
9
|
import { loadConfig } from "c12";
|
|
10
|
+
import { getSubject, getSubjects } from "@fragno-dev/corpus";
|
|
10
11
|
|
|
11
12
|
//#region src/utils/find-fragno-databases.ts
|
|
12
13
|
async function importFragmentFile(path) {
|
|
@@ -258,6 +259,215 @@ const infoCommand = define({
|
|
|
258
259
|
}
|
|
259
260
|
});
|
|
260
261
|
|
|
262
|
+
//#endregion
|
|
263
|
+
//#region src/utils/format-search-results.ts
|
|
264
|
+
/**
|
|
265
|
+
* Merge search results by URL, grouping sections and content under each URL (without hash)
|
|
266
|
+
*/
|
|
267
|
+
function mergeResultsByUrl(results, baseUrl) {
|
|
268
|
+
const mergedMap = /* @__PURE__ */ new Map();
|
|
269
|
+
for (const result of results) {
|
|
270
|
+
const baseUrlWithoutHash = result.url.split("#")[0];
|
|
271
|
+
const existing = mergedMap.get(baseUrlWithoutHash);
|
|
272
|
+
if (existing) existing.sections.push({
|
|
273
|
+
content: result.content,
|
|
274
|
+
type: result.type
|
|
275
|
+
});
|
|
276
|
+
else {
|
|
277
|
+
const urlWithMd = `${baseUrlWithoutHash}.md`;
|
|
278
|
+
const fullUrl = `https://${baseUrl}${baseUrlWithoutHash}`;
|
|
279
|
+
const fullUrlWithMd = `https://${baseUrl}${urlWithMd}`;
|
|
280
|
+
mergedMap.set(baseUrlWithoutHash, {
|
|
281
|
+
url: baseUrlWithoutHash,
|
|
282
|
+
urlWithMd,
|
|
283
|
+
fullUrl,
|
|
284
|
+
fullUrlWithMd,
|
|
285
|
+
title: result.type === "page" ? result.content : void 0,
|
|
286
|
+
breadcrumbs: result.breadcrumbs,
|
|
287
|
+
type: result.type,
|
|
288
|
+
sections: [{
|
|
289
|
+
content: result.content,
|
|
290
|
+
type: result.type
|
|
291
|
+
}]
|
|
292
|
+
});
|
|
293
|
+
}
|
|
294
|
+
}
|
|
295
|
+
return Array.from(mergedMap.values());
|
|
296
|
+
}
|
|
297
|
+
/**
|
|
298
|
+
* Format merged results as markdown
|
|
299
|
+
*/
|
|
300
|
+
function formatAsMarkdown(mergedResults) {
|
|
301
|
+
const lines = [];
|
|
302
|
+
for (const result of mergedResults) {
|
|
303
|
+
const title = result.title || result.sections[0]?.content || "Untitled";
|
|
304
|
+
lines.push(`## Page: '${title}'`);
|
|
305
|
+
if (result.breadcrumbs && result.breadcrumbs.length > 0) {
|
|
306
|
+
lines.push(" " + result.breadcrumbs.join(" > "));
|
|
307
|
+
lines.push("");
|
|
308
|
+
}
|
|
309
|
+
lines.push("URLs:");
|
|
310
|
+
lines.push(` - ${result.fullUrl}`);
|
|
311
|
+
lines.push(` - ${result.fullUrlWithMd}`);
|
|
312
|
+
lines.push("");
|
|
313
|
+
if (result.sections.length > 1) {
|
|
314
|
+
lines.push("Relevant sections:");
|
|
315
|
+
for (let i = 0; i < result.sections.length; i++) {
|
|
316
|
+
const section = result.sections[i];
|
|
317
|
+
if (i === 0 && result.type === "page" && section.content === result.title) continue;
|
|
318
|
+
lines.push(` - ${section.content}`);
|
|
319
|
+
}
|
|
320
|
+
lines.push("");
|
|
321
|
+
}
|
|
322
|
+
lines.push("---");
|
|
323
|
+
lines.push("");
|
|
324
|
+
}
|
|
325
|
+
return lines.join("\n");
|
|
326
|
+
}
|
|
327
|
+
/**
|
|
328
|
+
* Format merged results as JSON
|
|
329
|
+
*/
|
|
330
|
+
function formatAsJson(mergedResults) {
|
|
331
|
+
return JSON.stringify(mergedResults, null, 2);
|
|
332
|
+
}
|
|
333
|
+
|
|
334
|
+
//#endregion
|
|
335
|
+
//#region src/commands/search.ts
|
|
336
|
+
const searchCommand = define({
|
|
337
|
+
name: "search",
|
|
338
|
+
description: "Search the Fragno documentation",
|
|
339
|
+
args: {
|
|
340
|
+
limit: {
|
|
341
|
+
type: "number",
|
|
342
|
+
description: "Maximum number of results to show",
|
|
343
|
+
default: 10
|
|
344
|
+
},
|
|
345
|
+
json: {
|
|
346
|
+
type: "boolean",
|
|
347
|
+
description: "Output results in JSON format",
|
|
348
|
+
default: false
|
|
349
|
+
},
|
|
350
|
+
markdown: {
|
|
351
|
+
type: "boolean",
|
|
352
|
+
description: "Output results in Markdown format (default)",
|
|
353
|
+
default: true
|
|
354
|
+
},
|
|
355
|
+
"base-url": {
|
|
356
|
+
type: "string",
|
|
357
|
+
description: "Base URL for the documentation site",
|
|
358
|
+
default: "fragno.dev"
|
|
359
|
+
}
|
|
360
|
+
},
|
|
361
|
+
run: async (ctx) => {
|
|
362
|
+
const query = ctx.positionals.join(" ");
|
|
363
|
+
if (!query || query.trim().length === 0) throw new Error("Please provide a search query");
|
|
364
|
+
const jsonMode = ctx.values.json;
|
|
365
|
+
const baseUrl = ctx.values["base-url"];
|
|
366
|
+
if (!jsonMode) console.log(`Searching for: "${query}"\n`);
|
|
367
|
+
try {
|
|
368
|
+
const encodedQuery = encodeURIComponent(query);
|
|
369
|
+
const response = await fetch(`https://${baseUrl}/api/search?query=${encodedQuery}`);
|
|
370
|
+
if (!response.ok) throw new Error(`API request failed with status ${response.status}`);
|
|
371
|
+
const results = await response.json();
|
|
372
|
+
const limit = ctx.values.limit;
|
|
373
|
+
const limitedResults = results.slice(0, limit);
|
|
374
|
+
if (limitedResults.length === 0) {
|
|
375
|
+
if (jsonMode) console.log("[]");
|
|
376
|
+
else console.log("No results found.");
|
|
377
|
+
return;
|
|
378
|
+
}
|
|
379
|
+
const mergedResults = mergeResultsByUrl(limitedResults, baseUrl);
|
|
380
|
+
if (jsonMode) console.log(formatAsJson(mergedResults));
|
|
381
|
+
else {
|
|
382
|
+
console.log(`Found ${results.length} result${results.length === 1 ? "" : "s"}${results.length > limit ? ` (showing ${limit})` : ""}\n`);
|
|
383
|
+
console.log(formatAsMarkdown(mergedResults));
|
|
384
|
+
}
|
|
385
|
+
} catch (error) {
|
|
386
|
+
if (error instanceof Error) throw new Error(`Search failed: ${error.message}`);
|
|
387
|
+
throw new Error("Search failed: An unknown error occurred");
|
|
388
|
+
}
|
|
389
|
+
}
|
|
390
|
+
});
|
|
391
|
+
|
|
392
|
+
//#endregion
|
|
393
|
+
//#region src/commands/corpus.ts
|
|
394
|
+
/**
|
|
395
|
+
* Print a subject with its examples
|
|
396
|
+
*/
|
|
397
|
+
function printSubject(subject) {
|
|
398
|
+
console.log(`\n${"=".repeat(60)}`);
|
|
399
|
+
console.log(`${subject.title}`);
|
|
400
|
+
console.log(`${"=".repeat(60)}\n`);
|
|
401
|
+
if (subject.description) {
|
|
402
|
+
console.log(subject.description);
|
|
403
|
+
console.log();
|
|
404
|
+
}
|
|
405
|
+
if (subject.imports) {
|
|
406
|
+
console.log("### Imports\n");
|
|
407
|
+
console.log("```typescript");
|
|
408
|
+
console.log(subject.imports);
|
|
409
|
+
console.log("```\n");
|
|
410
|
+
}
|
|
411
|
+
if (subject.init) {
|
|
412
|
+
console.log("### Initialization\n");
|
|
413
|
+
console.log("```typescript");
|
|
414
|
+
console.log(subject.init);
|
|
415
|
+
console.log("```\n");
|
|
416
|
+
}
|
|
417
|
+
for (let i = 0; i < subject.examples.length; i++) {
|
|
418
|
+
const example = subject.examples[i];
|
|
419
|
+
console.log(`### Example ${i + 1}\n`);
|
|
420
|
+
console.log("```typescript");
|
|
421
|
+
console.log(example.code);
|
|
422
|
+
console.log("```");
|
|
423
|
+
if (example.explanation) {
|
|
424
|
+
console.log();
|
|
425
|
+
console.log(example.explanation);
|
|
426
|
+
}
|
|
427
|
+
console.log();
|
|
428
|
+
}
|
|
429
|
+
}
|
|
430
|
+
/**
|
|
431
|
+
* Print information about the corpus command
|
|
432
|
+
*/
|
|
433
|
+
function printCorpusHelp() {
|
|
434
|
+
console.log("Fragno Corpus - Code examples and documentation");
|
|
435
|
+
console.log("");
|
|
436
|
+
console.log("Usage: fragno-cli corpus [topic...]");
|
|
437
|
+
console.log("");
|
|
438
|
+
console.log("Examples:");
|
|
439
|
+
console.log(" fragno-cli corpus # List all available topics");
|
|
440
|
+
console.log(" fragno-cli corpus defining-routes # Show route definition examples");
|
|
441
|
+
console.log(" fragno-cli corpus database-adapters kysely-adapter");
|
|
442
|
+
console.log(" # Show multiple topics");
|
|
443
|
+
console.log("");
|
|
444
|
+
console.log("Available topics:");
|
|
445
|
+
const subjects = getSubjects();
|
|
446
|
+
for (const subject of subjects) console.log(` ${subject.id.padEnd(30)} ${subject.title}`);
|
|
447
|
+
}
|
|
448
|
+
const corpusCommand = define({
|
|
449
|
+
name: "corpus",
|
|
450
|
+
description: "View code examples and documentation for Fragno",
|
|
451
|
+
run: (ctx) => {
|
|
452
|
+
const topics = ctx.positionals;
|
|
453
|
+
if (topics.length === 0) {
|
|
454
|
+
printCorpusHelp();
|
|
455
|
+
return;
|
|
456
|
+
}
|
|
457
|
+
try {
|
|
458
|
+
const subjects = getSubject(...topics);
|
|
459
|
+
for (const subject of subjects) printSubject(subject);
|
|
460
|
+
console.log(`${"=".repeat(60)}`);
|
|
461
|
+
console.log(`Displayed ${subjects.length} topic(s)`);
|
|
462
|
+
console.log(`${"=".repeat(60)}\n`);
|
|
463
|
+
} catch (error) {
|
|
464
|
+
console.error("Error loading topics:", error instanceof Error ? error.message : error);
|
|
465
|
+
console.log("\nRun 'fragno-cli corpus' to see available topics.");
|
|
466
|
+
process.exit(1);
|
|
467
|
+
}
|
|
468
|
+
}
|
|
469
|
+
});
|
|
470
|
+
|
|
261
471
|
//#endregion
|
|
262
472
|
//#region src/cli.ts
|
|
263
473
|
const dbSubCommands = /* @__PURE__ */ new Map();
|
|
@@ -283,6 +493,8 @@ const dbCommand = define({
|
|
|
283
493
|
});
|
|
284
494
|
const rootSubCommands = /* @__PURE__ */ new Map();
|
|
285
495
|
rootSubCommands.set("db", dbCommand);
|
|
496
|
+
rootSubCommands.set("search", searchCommand);
|
|
497
|
+
rootSubCommands.set("corpus", corpusCommand);
|
|
286
498
|
const mainCommand = define({
|
|
287
499
|
name: "fragno-cli",
|
|
288
500
|
description: "Fragno CLI - Tools for working with Fragno fragments",
|
|
@@ -292,14 +504,18 @@ const mainCommand = define({
|
|
|
292
504
|
console.log("Usage: fragno-cli <command> [options]");
|
|
293
505
|
console.log("");
|
|
294
506
|
console.log("Commands:");
|
|
295
|
-
console.log(" db
|
|
507
|
+
console.log(" db Database management commands");
|
|
508
|
+
console.log(" search Search the Fragno documentation");
|
|
509
|
+
console.log(" corpus View code examples and documentation");
|
|
296
510
|
console.log("");
|
|
297
511
|
console.log("Run 'fragno-cli <command> --help' for more information.");
|
|
298
512
|
}
|
|
299
513
|
});
|
|
300
514
|
if (import.meta.main) try {
|
|
301
515
|
const args = process.argv.slice(2);
|
|
302
|
-
if (args[0] === "
|
|
516
|
+
if (args[0] === "search") await cli(args.slice(1), searchCommand);
|
|
517
|
+
else if (args[0] === "corpus") await cli(args.slice(1), corpusCommand);
|
|
518
|
+
else if (args[0] === "db" && args.length > 1) {
|
|
303
519
|
const subCommandName = args[1];
|
|
304
520
|
if (subCommandName === "--help" || subCommandName === "-h") {
|
|
305
521
|
printDbHelp();
|
|
@@ -329,5 +545,5 @@ if (import.meta.main) try {
|
|
|
329
545
|
}
|
|
330
546
|
|
|
331
547
|
//#endregion
|
|
332
|
-
export { dbCommand, generateCommand, infoCommand, mainCommand, migrateCommand };
|
|
548
|
+
export { corpusCommand, dbCommand, generateCommand, infoCommand, mainCommand, migrateCommand, searchCommand };
|
|
333
549
|
//# sourceMappingURL=cli.js.map
|
package/dist/cli.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"cli.js","names":["allDatabases: FragnoDatabase<AnySchema>[]","adapter: DatabaseAdapter | undefined","firstAdapterFile: string | undefined","fragnoDatabases: FragnoDatabase<AnySchema>[]","results: { schema: string; path: string; namespace: string }[]","results: ExecuteMigrationResult[]","info: {\n namespace: string;\n schemaVersion: number;\n migrationSupport: boolean;\n currentVersion?: number;\n pendingVersions?: number;\n status?: string;\n error?: string;\n }"],"sources":["../src/utils/find-fragno-databases.ts","../src/commands/db/generate.ts","../src/commands/db/migrate.ts","../src/commands/db/info.ts","../src/cli.ts"],"sourcesContent":["import { isFragnoDatabase, type DatabaseAdapter, FragnoDatabase } from \"@fragno-dev/db\";\nimport {\n fragnoDatabaseAdapterNameFakeSymbol,\n fragnoDatabaseAdapterVersionFakeSymbol,\n} from \"@fragno-dev/db/adapters\";\nimport type { AnySchema } from \"@fragno-dev/db/schema\";\nimport {\n instantiatedFragmentFakeSymbol,\n type FragnoInstantiatedFragment,\n} from \"@fragno-dev/core/api/fragment-instantiation\";\nimport { loadConfig } from \"c12\";\nimport { relative } from \"node:path\";\n\nexport async function importFragmentFile(path: string): Promise<Record<string, unknown>> {\n const { config } = await loadConfig({\n configFile: path,\n });\n\n const databases = findFragnoDatabases(config);\n const adapterNames = databases.map(\n (db) =>\n `${db.adapter[fragnoDatabaseAdapterNameFakeSymbol]}@${db.adapter[fragnoDatabaseAdapterVersionFakeSymbol]}`,\n );\n const uniqueAdapterNames = [...new Set(adapterNames)];\n\n if (uniqueAdapterNames.length > 1) {\n throw new Error(\n `All Fragno databases must use the same adapter name and version. ` +\n `Found mismatch: (${adapterNames.join(\", \")})`,\n );\n }\n\n return {\n adapter: databases[0].adapter,\n databases,\n };\n}\n\n/**\n * Imports multiple fragment files and validates they all use the same adapter.\n * Returns the combined databases from all files.\n */\nexport async function importFragmentFiles(paths: string[]): Promise<{\n adapter: DatabaseAdapter;\n databases: FragnoDatabase<AnySchema>[];\n}> {\n // De-duplicate paths (in case same file was specified multiple times)\n const uniquePaths = Array.from(new Set(paths));\n\n if (uniquePaths.length === 0) {\n throw new Error(\"No fragment files provided\");\n }\n\n const allDatabases: FragnoDatabase<AnySchema>[] = [];\n let adapter: DatabaseAdapter | undefined;\n let firstAdapterFile: string | undefined;\n const cwd = process.cwd();\n\n for (const path of uniquePaths) {\n const relativePath = relative(cwd, path);\n\n try {\n const result = await importFragmentFile(path);\n const databases = result[\"databases\"] as FragnoDatabase<AnySchema>[];\n const fileAdapter = result[\"adapter\"] as DatabaseAdapter;\n\n if (databases.length === 0) {\n console.warn(\n `Warning: No FragnoDatabase instances found in ${relativePath}.\\n` +\n `Make sure you export either:\\n` +\n ` - A FragnoDatabase instance created with .create(adapter)\\n` +\n ` - An instantiated fragment with embedded database definition\\n`,\n );\n continue;\n }\n\n // Set the adapter from the first file with databases\n if (!adapter) {\n adapter = fileAdapter;\n firstAdapterFile = relativePath;\n }\n\n // Validate all files use the same adapter name and version\n const firstAdapterName = adapter[fragnoDatabaseAdapterNameFakeSymbol];\n const firstAdapterVersion = adapter[fragnoDatabaseAdapterVersionFakeSymbol];\n const fileAdapterName = fileAdapter[fragnoDatabaseAdapterNameFakeSymbol];\n const fileAdapterVersion = fileAdapter[fragnoDatabaseAdapterVersionFakeSymbol];\n\n if (firstAdapterName !== fileAdapterName || firstAdapterVersion !== fileAdapterVersion) {\n const firstAdapterInfo = `${firstAdapterName}@${firstAdapterVersion}`;\n const fileAdapterInfo = `${fileAdapterName}@${fileAdapterVersion}`;\n\n throw new Error(\n `All fragments must use the same database adapter. Mixed adapters found:\\n` +\n ` - ${firstAdapterFile}: ${firstAdapterInfo}\\n` +\n ` - ${relativePath}: ${fileAdapterInfo}\\n\\n` +\n `Make sure all fragments use the same adapter name and version.`,\n );\n }\n\n allDatabases.push(...databases);\n console.log(` Found ${databases.length} database(s) in ${relativePath}`);\n } catch (error) {\n throw new Error(\n `Failed to import fragment file ${relativePath}: ${error instanceof Error ? error.message : String(error)}`,\n );\n }\n }\n\n if (allDatabases.length === 0) {\n throw new Error(\n `No FragnoDatabase instances found in any of the target files.\\n` +\n `Make sure your files export either:\\n` +\n ` - A FragnoDatabase instance created with .create(adapter)\\n` +\n ` - An instantiated fragment with embedded database definition\\n`,\n );\n }\n\n if (!adapter) {\n throw new Error(\"No adapter found in any of the fragment files\");\n }\n\n return {\n adapter,\n databases: allDatabases,\n };\n}\n\nfunction isFragnoInstantiatedFragment(\n value: unknown,\n): value is FragnoInstantiatedFragment<[], {}, {}, {}> {\n return (\n typeof value === \"object\" &&\n value !== null &&\n instantiatedFragmentFakeSymbol in value &&\n value[instantiatedFragmentFakeSymbol] === instantiatedFragmentFakeSymbol\n );\n}\n\nfunction additionalContextIsDatabaseContext(additionalContext: unknown): additionalContext is {\n databaseSchema: AnySchema;\n databaseNamespace: string;\n databaseAdapter: DatabaseAdapter;\n} {\n return (\n typeof additionalContext === \"object\" &&\n additionalContext !== null &&\n \"databaseSchema\" in additionalContext &&\n \"databaseNamespace\" in additionalContext &&\n \"databaseAdapter\" in additionalContext\n );\n}\n\n/**\n * Finds all FragnoDatabase instances in a module, including those embedded\n * in instantiated fragments.\n */\nexport function findFragnoDatabases(\n targetModule: Record<string, unknown>,\n): FragnoDatabase<AnySchema>[] {\n const fragnoDatabases: FragnoDatabase<AnySchema>[] = [];\n\n for (const [_key, value] of Object.entries(targetModule)) {\n if (isFragnoDatabase(value)) {\n fragnoDatabases.push(value);\n } else if (isFragnoInstantiatedFragment(value)) {\n const additionalContext = value.additionalContext;\n\n if (!additionalContext || !additionalContextIsDatabaseContext(additionalContext)) {\n continue;\n }\n\n // Extract database schema, namespace, and adapter from instantiated fragment's additionalContext\n const { databaseSchema, databaseNamespace, databaseAdapter } = additionalContext;\n\n fragnoDatabases.push(\n new FragnoDatabase({\n namespace: databaseNamespace,\n schema: databaseSchema,\n adapter: databaseAdapter,\n }),\n );\n }\n }\n\n return fragnoDatabases;\n}\n","import { writeFile, mkdir } from \"node:fs/promises\";\nimport { resolve, dirname } from \"node:path\";\nimport { define } from \"gunshi\";\nimport { generateMigrationsOrSchema } from \"@fragno-dev/db/generation-engine\";\nimport { importFragmentFiles } from \"../../utils/find-fragno-databases\";\n\n// Define the db generate command with type safety\nexport const generateCommand = define({\n name: \"generate\",\n description: \"Generate schema files from FragnoDatabase definitions\",\n args: {\n output: {\n type: \"string\",\n short: \"o\",\n description:\n \"Output path: for single file, exact file path; for multiple files, output directory (default: current directory)\",\n },\n from: {\n type: \"number\",\n short: \"f\",\n description: \"Source version to generate migration from (default: current database version)\",\n },\n to: {\n type: \"number\",\n short: \"t\",\n description: \"Target version to generate migration to (default: latest schema version)\",\n },\n prefix: {\n type: \"string\",\n short: \"p\",\n description: \"String to prepend to the generated file (e.g., '/* eslint-disable */')\",\n },\n },\n run: async (ctx) => {\n // With `define()` and `multiple: true`, targets is properly typed as string[]\n const targets = ctx.positionals;\n const output = ctx.values.output;\n const toVersion = ctx.values.to;\n const fromVersion = ctx.values.from;\n const prefix = ctx.values.prefix;\n\n // Resolve all target paths\n const targetPaths = targets.map((target) => resolve(process.cwd(), target));\n\n // Import all fragment files and validate they use the same adapter\n const { databases: allFragnoDatabases, adapter } = await importFragmentFiles(targetPaths);\n\n // Check if adapter supports any form of schema generation\n if (!adapter.createSchemaGenerator && !adapter.createMigrationEngine) {\n throw new Error(\n `The adapter does not support schema generation. ` +\n `Please use an adapter that implements either createSchemaGenerator or createMigrationEngine.`,\n );\n }\n\n // Generate schema for all fragments\n console.log(\"Generating schema...\");\n\n let results: { schema: string; path: string; namespace: string }[];\n try {\n results = await generateMigrationsOrSchema(allFragnoDatabases, {\n path: output,\n toVersion,\n fromVersion,\n });\n } catch (error) {\n throw new Error(\n `Failed to generate schema: ${error instanceof Error ? error.message : String(error)}`,\n );\n }\n\n // Write all generated files\n for (const result of results) {\n // For single file: use output as exact file path\n // For multiple files: use output as base directory\n const finalOutputPath =\n output && results.length === 1\n ? resolve(process.cwd(), output)\n : output\n ? resolve(process.cwd(), output, result.path)\n : resolve(process.cwd(), result.path);\n\n // Ensure parent directory exists\n const parentDir = dirname(finalOutputPath);\n try {\n await mkdir(parentDir, { recursive: true });\n } catch (error) {\n throw new Error(\n `Failed to create directory: ${error instanceof Error ? error.message : String(error)}`,\n );\n }\n\n // Write schema to file\n try {\n const content = prefix ? `${prefix}\\n${result.schema}` : result.schema;\n await writeFile(finalOutputPath, content, { encoding: \"utf-8\" });\n } catch (error) {\n throw new Error(\n `Failed to write schema file: ${error instanceof Error ? error.message : String(error)}`,\n );\n }\n\n console.log(`✓ Generated: ${finalOutputPath}`);\n }\n\n console.log(`\\n✓ Schema generated successfully!`);\n console.log(` Files generated: ${results.length}`);\n console.log(` Fragments:`);\n for (const db of allFragnoDatabases) {\n console.log(` - ${db.namespace} (version ${db.schema.version})`);\n }\n },\n});\n","import { resolve } from \"node:path\";\nimport { define } from \"gunshi\";\nimport { importFragmentFiles } from \"../../utils/find-fragno-databases\";\nimport { executeMigrations, type ExecuteMigrationResult } from \"@fragno-dev/db/generation-engine\";\n\nexport const migrateCommand = define({\n name: \"migrate\",\n description: \"Run database migrations for all fragments to their latest versions\",\n args: {},\n run: async (ctx) => {\n const targets = ctx.positionals;\n\n if (targets.length === 0) {\n throw new Error(\"At least one target file path is required\");\n }\n\n // Resolve all target paths\n const targetPaths = targets.map((target) => resolve(process.cwd(), target));\n\n // Import all fragment files and validate they use the same adapter\n const { databases: allFragnoDatabases } = await importFragmentFiles(targetPaths);\n\n console.log(\"\\nMigrating all fragments to their latest versions...\\n\");\n\n let results: ExecuteMigrationResult[];\n try {\n results = await executeMigrations(allFragnoDatabases);\n } catch (error) {\n throw new Error(\n `Migration failed: ${error instanceof Error ? error.message : String(error)}`,\n );\n }\n\n // Display progress for each result\n for (const result of results) {\n console.log(`Fragment: ${result.namespace}`);\n console.log(` Current version: ${result.fromVersion}`);\n console.log(` Target version: ${result.toVersion}`);\n\n if (result.didMigrate) {\n console.log(` ✓ Migration completed: v${result.fromVersion} → v${result.toVersion}\\n`);\n } else {\n console.log(` ✓ Already at latest version. No migration needed.\\n`);\n }\n }\n\n // Summary\n console.log(\"═══════════════════════════════════════\");\n console.log(\"Migration Summary\");\n console.log(\"═══════════════════════════════════════\");\n\n const migrated = results.filter((r) => r.didMigrate);\n const skipped = results.filter((r) => !r.didMigrate);\n\n if (migrated.length > 0) {\n console.log(`\\n✓ Migrated ${migrated.length} fragment(s):`);\n for (const r of migrated) {\n console.log(` - ${r.namespace}: v${r.fromVersion} → v${r.toVersion}`);\n }\n }\n\n if (skipped.length > 0) {\n console.log(`\\n○ Skipped ${skipped.length} fragment(s) (already up-to-date):`);\n for (const r of skipped) {\n console.log(` - ${r.namespace}: v${r.toVersion}`);\n }\n }\n\n for (const db of allFragnoDatabases) {\n await db.adapter.close();\n }\n\n console.log(\"\\n✓ All migrations completed successfully\");\n },\n});\n","import { resolve } from \"node:path\";\nimport { define } from \"gunshi\";\nimport { importFragmentFiles } from \"../../utils/find-fragno-databases\";\n\nexport const infoCommand = define({\n name: \"info\",\n description: \"Display database information and migration status\",\n args: {},\n run: async (ctx) => {\n const targets = ctx.positionals;\n\n if (targets.length === 0) {\n throw new Error(\"At least one target file path is required\");\n }\n\n // Resolve all target paths\n const targetPaths = targets.map((target) => resolve(process.cwd(), target));\n\n // Import all fragment files\n const { databases: allFragnoDatabases } = await importFragmentFiles(targetPaths);\n\n // Collect database information\n const dbInfos = await Promise.all(\n allFragnoDatabases.map(async (fragnoDb) => {\n const info: {\n namespace: string;\n schemaVersion: number;\n migrationSupport: boolean;\n currentVersion?: number;\n pendingVersions?: number;\n status?: string;\n error?: string;\n } = {\n namespace: fragnoDb.namespace,\n schemaVersion: fragnoDb.schema.version,\n migrationSupport: !!fragnoDb.adapter.createMigrationEngine,\n };\n\n // Get current database version if migrations are supported\n if (fragnoDb.adapter.createMigrationEngine) {\n try {\n const migrator = fragnoDb.adapter.createMigrationEngine(\n fragnoDb.schema,\n fragnoDb.namespace,\n );\n const currentVersion = await migrator.getVersion();\n info.currentVersion = currentVersion;\n info.pendingVersions = fragnoDb.schema.version - currentVersion;\n\n if (info.pendingVersions > 0) {\n info.status = `Pending (${info.pendingVersions} migration(s))`;\n } else if (info.pendingVersions === 0) {\n info.status = \"Up to date\";\n }\n } catch (error) {\n info.error = error instanceof Error ? error.message : String(error);\n info.status = \"Error\";\n }\n } else {\n info.status = \"Schema only\";\n }\n\n return info;\n }),\n );\n\n // Determine if any database supports migrations\n const hasMigrationSupport = dbInfos.some((info) => info.migrationSupport);\n\n // Print compact table\n console.log(\"\");\n console.log(`Database Information:`);\n console.log(\"\");\n\n // Table header\n const namespaceHeader = \"Namespace\";\n const versionHeader = \"Schema\";\n const currentHeader = \"Current\";\n const statusHeader = \"Status\";\n\n const maxNamespaceLen = Math.max(\n namespaceHeader.length,\n ...dbInfos.map((info) => info.namespace.length),\n );\n const namespaceWidth = Math.max(maxNamespaceLen + 2, 20);\n const versionWidth = 8;\n const currentWidth = 9;\n const statusWidth = 25;\n\n // Print table\n console.log(\n namespaceHeader.padEnd(namespaceWidth) +\n versionHeader.padEnd(versionWidth) +\n (hasMigrationSupport ? currentHeader.padEnd(currentWidth) : \"\") +\n statusHeader,\n );\n console.log(\n \"-\".repeat(namespaceWidth) +\n \"-\".repeat(versionWidth) +\n (hasMigrationSupport ? \"-\".repeat(currentWidth) : \"\") +\n \"-\".repeat(statusWidth),\n );\n\n for (const info of dbInfos) {\n const currentVersionStr =\n info.currentVersion !== undefined ? String(info.currentVersion) : \"-\";\n console.log(\n info.namespace.padEnd(namespaceWidth) +\n String(info.schemaVersion).padEnd(versionWidth) +\n (hasMigrationSupport ? currentVersionStr.padEnd(currentWidth) : \"\") +\n (info.status || \"-\"),\n );\n }\n\n // Print help text\n console.log(\"\");\n if (!hasMigrationSupport) {\n console.log(\"Note: These adapters do not support migrations.\");\n console.log(\"Use 'fragno-cli db generate' to generate schema files.\");\n } else {\n const hasPendingMigrations = dbInfos.some(\n (info) => info.pendingVersions && info.pendingVersions > 0,\n );\n if (hasPendingMigrations) {\n console.log(\"Run 'fragno-cli db migrate <target>' to apply pending migrations.\");\n }\n }\n },\n});\n","#!/usr/bin/env node\n\nimport { cli, define, parseArgs, resolveArgs } from \"gunshi\";\nimport { generateCommand } from \"./commands/db/generate.js\";\nimport { migrateCommand } from \"./commands/db/migrate.js\";\nimport { infoCommand } from \"./commands/db/info.js\";\n\n// Create a Map of db sub-commands\nconst dbSubCommands = new Map();\ndbSubCommands.set(\"generate\", generateCommand);\ndbSubCommands.set(\"migrate\", migrateCommand);\ndbSubCommands.set(\"info\", infoCommand);\n\n// Helper function to print db command help\nfunction printDbHelp() {\n console.log(\"Database management commands for Fragno\");\n console.log(\"\");\n console.log(\"Usage: fragno-cli db <command> [options]\");\n console.log(\"\");\n console.log(\"Commands:\");\n console.log(\" generate Generate schema files from FragnoDatabase definitions\");\n console.log(\" migrate Run database migrations\");\n console.log(\" info Display database information and migration status\");\n console.log(\"\");\n console.log(\"Run 'fragno-cli db <command> --help' for more information.\");\n}\n\n// Define the db command with type safety\nexport const dbCommand = define({\n name: \"db\",\n description: \"Database management commands\",\n run: printDbHelp,\n});\n\n// Create a Map of root sub-commands\nconst rootSubCommands = new Map();\nrootSubCommands.set(\"db\", dbCommand);\n\n// Define the main command with type safety\nexport const mainCommand = define({\n name: \"fragno-cli\",\n description: \"Fragno CLI - Tools for working with Fragno fragments\",\n run: () => {\n console.log(\"Fragno CLI - Tools for working with Fragno fragments\");\n console.log(\"\");\n console.log(\"Usage: fragno-cli <command> [options]\");\n console.log(\"\");\n console.log(\"Commands:\");\n console.log(\" db Database management commands\");\n console.log(\"\");\n console.log(\"Run 'fragno-cli <command> --help' for more information.\");\n },\n});\n\nif (import.meta.main) {\n try {\n // Parse arguments to handle nested subcommands\n const args = process.argv.slice(2);\n\n // Check if we're calling a db subcommand directly\n if (args[0] === \"db\" && args.length > 1) {\n const subCommandName = args[1];\n\n // Check if it's a help request\n if (subCommandName === \"--help\" || subCommandName === \"-h\") {\n printDbHelp();\n process.exit(0);\n }\n\n const subCommand = dbSubCommands.get(subCommandName);\n\n if (!subCommand) {\n console.error(`Unknown command: ${subCommandName}`);\n console.log(\"\");\n printDbHelp();\n process.exit(1);\n }\n\n // Run the specific subcommand with its args\n const subArgs = args.slice(2);\n const isSubCommandHelp = subArgs.includes(\"--help\") || subArgs.includes(\"-h\");\n\n // Check for validation errors before running\n let hasValidationError = false;\n if (!isSubCommandHelp && subCommand.args) {\n const tokens = parseArgs(subArgs);\n const resolved = resolveArgs(subCommand.args, tokens);\n hasValidationError = !!resolved.error;\n }\n\n // Run the command (let gunshi handle printing errors/help)\n await cli(subArgs, subCommand);\n\n // Exit with error code if there was a validation error\n if (hasValidationError) {\n process.exit(1);\n }\n } else if (args[0] === \"db\") {\n // \"db\" command with no subcommand - show db help\n printDbHelp();\n } else {\n // Run the main CLI\n await cli(args, mainCommand, {\n subCommands: rootSubCommands,\n });\n }\n } catch (error) {\n console.error(\"Error:\", error instanceof Error ? error.message : error);\n process.exit(1);\n }\n}\n\nexport { generateCommand, migrateCommand, infoCommand };\n"],"mappings":";;;;;;;;;;;AAaA,eAAsB,mBAAmB,MAAgD;CACvF,MAAM,EAAE,WAAW,MAAM,WAAW,EAClC,YAAY,MACb,CAAC;CAEF,MAAM,YAAY,oBAAoB,OAAO;CAC7C,MAAM,eAAe,UAAU,KAC5B,OACC,GAAG,GAAG,QAAQ,qCAAqC,GAAG,GAAG,QAAQ,0CACpE;AAGD,KAF2B,CAAC,GAAG,IAAI,IAAI,aAAa,CAAC,CAE9B,SAAS,EAC9B,OAAM,IAAI,MACR,qFACsB,aAAa,KAAK,KAAK,CAAC,GAC/C;AAGH,QAAO;EACL,SAAS,UAAU,GAAG;EACtB;EACD;;;;;;AAOH,eAAsB,oBAAoB,OAGvC;CAED,MAAM,cAAc,MAAM,KAAK,IAAI,IAAI,MAAM,CAAC;AAE9C,KAAI,YAAY,WAAW,EACzB,OAAM,IAAI,MAAM,6BAA6B;CAG/C,MAAMA,eAA4C,EAAE;CACpD,IAAIC;CACJ,IAAIC;CACJ,MAAM,MAAM,QAAQ,KAAK;AAEzB,MAAK,MAAM,QAAQ,aAAa;EAC9B,MAAM,eAAe,SAAS,KAAK,KAAK;AAExC,MAAI;GACF,MAAM,SAAS,MAAM,mBAAmB,KAAK;GAC7C,MAAM,YAAY,OAAO;GACzB,MAAM,cAAc,OAAO;AAE3B,OAAI,UAAU,WAAW,GAAG;AAC1B,YAAQ,KACN,iDAAiD,aAAa,gKAI/D;AACD;;AAIF,OAAI,CAAC,SAAS;AACZ,cAAU;AACV,uBAAmB;;GAIrB,MAAM,mBAAmB,QAAQ;GACjC,MAAM,sBAAsB,QAAQ;GACpC,MAAM,kBAAkB,YAAY;GACpC,MAAM,qBAAqB,YAAY;AAEvC,OAAI,qBAAqB,mBAAmB,wBAAwB,oBAAoB;IACtF,MAAM,mBAAmB,GAAG,iBAAiB,GAAG;IAChD,MAAM,kBAAkB,GAAG,gBAAgB,GAAG;AAE9C,UAAM,IAAI,MACR,gFACS,iBAAiB,IAAI,iBAAiB,QACtC,aAAa,IAAI,gBAAgB,oEAE3C;;AAGH,gBAAa,KAAK,GAAG,UAAU;AAC/B,WAAQ,IAAI,WAAW,UAAU,OAAO,kBAAkB,eAAe;WAClE,OAAO;AACd,SAAM,IAAI,MACR,kCAAkC,aAAa,IAAI,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM,GAC1G;;;AAIL,KAAI,aAAa,WAAW,EAC1B,OAAM,IAAI,MACR,oOAID;AAGH,KAAI,CAAC,QACH,OAAM,IAAI,MAAM,gDAAgD;AAGlE,QAAO;EACL;EACA,WAAW;EACZ;;AAGH,SAAS,6BACP,OACqD;AACrD,QACE,OAAO,UAAU,YACjB,UAAU,QACV,kCAAkC,SAClC,MAAM,oCAAoC;;AAI9C,SAAS,mCAAmC,mBAI1C;AACA,QACE,OAAO,sBAAsB,YAC7B,sBAAsB,QACtB,oBAAoB,qBACpB,uBAAuB,qBACvB,qBAAqB;;;;;;AAQzB,SAAgB,oBACd,cAC6B;CAC7B,MAAMC,kBAA+C,EAAE;AAEvD,MAAK,MAAM,CAAC,MAAM,UAAU,OAAO,QAAQ,aAAa,CACtD,KAAI,iBAAiB,MAAM,CACzB,iBAAgB,KAAK,MAAM;UAClB,6BAA6B,MAAM,EAAE;EAC9C,MAAM,oBAAoB,MAAM;AAEhC,MAAI,CAAC,qBAAqB,CAAC,mCAAmC,kBAAkB,CAC9E;EAIF,MAAM,EAAE,gBAAgB,mBAAmB,oBAAoB;AAE/D,kBAAgB,KACd,IAAI,eAAe;GACjB,WAAW;GACX,QAAQ;GACR,SAAS;GACV,CAAC,CACH;;AAIL,QAAO;;;;;AClLT,MAAa,kBAAkB,OAAO;CACpC,MAAM;CACN,aAAa;CACb,MAAM;EACJ,QAAQ;GACN,MAAM;GACN,OAAO;GACP,aACE;GACH;EACD,MAAM;GACJ,MAAM;GACN,OAAO;GACP,aAAa;GACd;EACD,IAAI;GACF,MAAM;GACN,OAAO;GACP,aAAa;GACd;EACD,QAAQ;GACN,MAAM;GACN,OAAO;GACP,aAAa;GACd;EACF;CACD,KAAK,OAAO,QAAQ;EAElB,MAAM,UAAU,IAAI;EACpB,MAAM,SAAS,IAAI,OAAO;EAC1B,MAAM,YAAY,IAAI,OAAO;EAC7B,MAAM,cAAc,IAAI,OAAO;EAC/B,MAAM,SAAS,IAAI,OAAO;EAM1B,MAAM,EAAE,WAAW,oBAAoB,YAAY,MAAM,oBAHrC,QAAQ,KAAK,WAAW,QAAQ,QAAQ,KAAK,EAAE,OAAO,CAAC,CAGc;AAGzF,MAAI,CAAC,QAAQ,yBAAyB,CAAC,QAAQ,sBAC7C,OAAM,IAAI,MACR,+IAED;AAIH,UAAQ,IAAI,uBAAuB;EAEnC,IAAIC;AACJ,MAAI;AACF,aAAU,MAAM,2BAA2B,oBAAoB;IAC7D,MAAM;IACN;IACA;IACD,CAAC;WACK,OAAO;AACd,SAAM,IAAI,MACR,8BAA8B,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM,GACrF;;AAIH,OAAK,MAAM,UAAU,SAAS;GAG5B,MAAM,kBACJ,UAAU,QAAQ,WAAW,IACzB,QAAQ,QAAQ,KAAK,EAAE,OAAO,GAC9B,SACE,QAAQ,QAAQ,KAAK,EAAE,QAAQ,OAAO,KAAK,GAC3C,QAAQ,QAAQ,KAAK,EAAE,OAAO,KAAK;GAG3C,MAAM,YAAY,QAAQ,gBAAgB;AAC1C,OAAI;AACF,UAAM,MAAM,WAAW,EAAE,WAAW,MAAM,CAAC;YACpC,OAAO;AACd,UAAM,IAAI,MACR,+BAA+B,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM,GACtF;;AAIH,OAAI;AAEF,UAAM,UAAU,iBADA,SAAS,GAAG,OAAO,IAAI,OAAO,WAAW,OAAO,QACtB,EAAE,UAAU,SAAS,CAAC;YACzD,OAAO;AACd,UAAM,IAAI,MACR,gCAAgC,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM,GACvF;;AAGH,WAAQ,IAAI,gBAAgB,kBAAkB;;AAGhD,UAAQ,IAAI,qCAAqC;AACjD,UAAQ,IAAI,sBAAsB,QAAQ,SAAS;AACnD,UAAQ,IAAI,eAAe;AAC3B,OAAK,MAAM,MAAM,mBACf,SAAQ,IAAI,SAAS,GAAG,UAAU,YAAY,GAAG,OAAO,QAAQ,GAAG;;CAGxE,CAAC;;;;AC3GF,MAAa,iBAAiB,OAAO;CACnC,MAAM;CACN,aAAa;CACb,MAAM,EAAE;CACR,KAAK,OAAO,QAAQ;EAClB,MAAM,UAAU,IAAI;AAEpB,MAAI,QAAQ,WAAW,EACrB,OAAM,IAAI,MAAM,4CAA4C;EAO9D,MAAM,EAAE,WAAW,uBAAuB,MAAM,oBAH5B,QAAQ,KAAK,WAAW,QAAQ,QAAQ,KAAK,EAAE,OAAO,CAAC,CAGK;AAEhF,UAAQ,IAAI,0DAA0D;EAEtE,IAAIC;AACJ,MAAI;AACF,aAAU,MAAM,kBAAkB,mBAAmB;WAC9C,OAAO;AACd,SAAM,IAAI,MACR,qBAAqB,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM,GAC5E;;AAIH,OAAK,MAAM,UAAU,SAAS;AAC5B,WAAQ,IAAI,aAAa,OAAO,YAAY;AAC5C,WAAQ,IAAI,sBAAsB,OAAO,cAAc;AACvD,WAAQ,IAAI,qBAAqB,OAAO,YAAY;AAEpD,OAAI,OAAO,WACT,SAAQ,IAAI,6BAA6B,OAAO,YAAY,MAAM,OAAO,UAAU,IAAI;OAEvF,SAAQ,IAAI,wDAAwD;;AAKxE,UAAQ,IAAI,0CAA0C;AACtD,UAAQ,IAAI,oBAAoB;AAChC,UAAQ,IAAI,0CAA0C;EAEtD,MAAM,WAAW,QAAQ,QAAQ,MAAM,EAAE,WAAW;EACpD,MAAM,UAAU,QAAQ,QAAQ,MAAM,CAAC,EAAE,WAAW;AAEpD,MAAI,SAAS,SAAS,GAAG;AACvB,WAAQ,IAAI,gBAAgB,SAAS,OAAO,eAAe;AAC3D,QAAK,MAAM,KAAK,SACd,SAAQ,IAAI,OAAO,EAAE,UAAU,KAAK,EAAE,YAAY,MAAM,EAAE,YAAY;;AAI1E,MAAI,QAAQ,SAAS,GAAG;AACtB,WAAQ,IAAI,eAAe,QAAQ,OAAO,oCAAoC;AAC9E,QAAK,MAAM,KAAK,QACd,SAAQ,IAAI,OAAO,EAAE,UAAU,KAAK,EAAE,YAAY;;AAItD,OAAK,MAAM,MAAM,mBACf,OAAM,GAAG,QAAQ,OAAO;AAG1B,UAAQ,IAAI,4CAA4C;;CAE3D,CAAC;;;;ACtEF,MAAa,cAAc,OAAO;CAChC,MAAM;CACN,aAAa;CACb,MAAM,EAAE;CACR,KAAK,OAAO,QAAQ;EAClB,MAAM,UAAU,IAAI;AAEpB,MAAI,QAAQ,WAAW,EACrB,OAAM,IAAI,MAAM,4CAA4C;EAO9D,MAAM,EAAE,WAAW,uBAAuB,MAAM,oBAH5B,QAAQ,KAAK,WAAW,QAAQ,QAAQ,KAAK,EAAE,OAAO,CAAC,CAGK;EAGhF,MAAM,UAAU,MAAM,QAAQ,IAC5B,mBAAmB,IAAI,OAAO,aAAa;GACzC,MAAMC,OAQF;IACF,WAAW,SAAS;IACpB,eAAe,SAAS,OAAO;IAC/B,kBAAkB,CAAC,CAAC,SAAS,QAAQ;IACtC;AAGD,OAAI,SAAS,QAAQ,sBACnB,KAAI;IAKF,MAAM,iBAAiB,MAJN,SAAS,QAAQ,sBAChC,SAAS,QACT,SAAS,UACV,CACqC,YAAY;AAClD,SAAK,iBAAiB;AACtB,SAAK,kBAAkB,SAAS,OAAO,UAAU;AAEjD,QAAI,KAAK,kBAAkB,EACzB,MAAK,SAAS,YAAY,KAAK,gBAAgB;aACtC,KAAK,oBAAoB,EAClC,MAAK,SAAS;YAET,OAAO;AACd,SAAK,QAAQ,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM;AACnE,SAAK,SAAS;;OAGhB,MAAK,SAAS;AAGhB,UAAO;IACP,CACH;EAGD,MAAM,sBAAsB,QAAQ,MAAM,SAAS,KAAK,iBAAiB;AAGzE,UAAQ,IAAI,GAAG;AACf,UAAQ,IAAI,wBAAwB;AACpC,UAAQ,IAAI,GAAG;EAGf,MAAM,kBAAkB;EACxB,MAAM,gBAAgB;EACtB,MAAM,gBAAgB;EACtB,MAAM,eAAe;EAErB,MAAM,kBAAkB,KAAK,IAC3B,GACA,GAAG,QAAQ,KAAK,SAAS,KAAK,UAAU,OAAO,CAChD;EACD,MAAM,iBAAiB,KAAK,IAAI,kBAAkB,GAAG,GAAG;EACxD,MAAM,eAAe;EACrB,MAAM,eAAe;EACrB,MAAM,cAAc;AAGpB,UAAQ,IACN,gBAAgB,OAAO,eAAe,GACpC,cAAc,OAAO,aAAa,IACjC,sBAAsB,cAAc,OAAO,aAAa,GAAG,MAC5D,aACH;AACD,UAAQ,IACN,IAAI,OAAO,eAAe,GACxB,IAAI,OAAO,aAAa,IACvB,sBAAsB,IAAI,OAAO,aAAa,GAAG,MAClD,IAAI,OAAO,YAAY,CAC1B;AAED,OAAK,MAAM,QAAQ,SAAS;GAC1B,MAAM,oBACJ,KAAK,mBAAmB,SAAY,OAAO,KAAK,eAAe,GAAG;AACpE,WAAQ,IACN,KAAK,UAAU,OAAO,eAAe,GACnC,OAAO,KAAK,cAAc,CAAC,OAAO,aAAa,IAC9C,sBAAsB,kBAAkB,OAAO,aAAa,GAAG,OAC/D,KAAK,UAAU,KACnB;;AAIH,UAAQ,IAAI,GAAG;AACf,MAAI,CAAC,qBAAqB;AACxB,WAAQ,IAAI,kDAAkD;AAC9D,WAAQ,IAAI,yDAAyD;aAExC,QAAQ,MAClC,SAAS,KAAK,mBAAmB,KAAK,kBAAkB,EAC1D,CAEC,SAAQ,IAAI,oEAAoE;;CAIvF,CAAC;;;;ACxHF,MAAM,gCAAgB,IAAI,KAAK;AAC/B,cAAc,IAAI,YAAY,gBAAgB;AAC9C,cAAc,IAAI,WAAW,eAAe;AAC5C,cAAc,IAAI,QAAQ,YAAY;AAGtC,SAAS,cAAc;AACrB,SAAQ,IAAI,0CAA0C;AACtD,SAAQ,IAAI,GAAG;AACf,SAAQ,IAAI,2CAA2C;AACvD,SAAQ,IAAI,GAAG;AACf,SAAQ,IAAI,YAAY;AACxB,SAAQ,IAAI,sEAAsE;AAClF,SAAQ,IAAI,wCAAwC;AACpD,SAAQ,IAAI,kEAAkE;AAC9E,SAAQ,IAAI,GAAG;AACf,SAAQ,IAAI,6DAA6D;;AAI3E,MAAa,YAAY,OAAO;CAC9B,MAAM;CACN,aAAa;CACb,KAAK;CACN,CAAC;AAGF,MAAM,kCAAkB,IAAI,KAAK;AACjC,gBAAgB,IAAI,MAAM,UAAU;AAGpC,MAAa,cAAc,OAAO;CAChC,MAAM;CACN,aAAa;CACb,WAAW;AACT,UAAQ,IAAI,uDAAuD;AACnE,UAAQ,IAAI,GAAG;AACf,UAAQ,IAAI,wCAAwC;AACpD,UAAQ,IAAI,GAAG;AACf,UAAQ,IAAI,YAAY;AACxB,UAAQ,IAAI,uCAAuC;AACnD,UAAQ,IAAI,GAAG;AACf,UAAQ,IAAI,0DAA0D;;CAEzE,CAAC;AAEF,IAAI,OAAO,KAAK,KACd,KAAI;CAEF,MAAM,OAAO,QAAQ,KAAK,MAAM,EAAE;AAGlC,KAAI,KAAK,OAAO,QAAQ,KAAK,SAAS,GAAG;EACvC,MAAM,iBAAiB,KAAK;AAG5B,MAAI,mBAAmB,YAAY,mBAAmB,MAAM;AAC1D,gBAAa;AACb,WAAQ,KAAK,EAAE;;EAGjB,MAAM,aAAa,cAAc,IAAI,eAAe;AAEpD,MAAI,CAAC,YAAY;AACf,WAAQ,MAAM,oBAAoB,iBAAiB;AACnD,WAAQ,IAAI,GAAG;AACf,gBAAa;AACb,WAAQ,KAAK,EAAE;;EAIjB,MAAM,UAAU,KAAK,MAAM,EAAE;EAC7B,MAAM,mBAAmB,QAAQ,SAAS,SAAS,IAAI,QAAQ,SAAS,KAAK;EAG7E,IAAI,qBAAqB;AACzB,MAAI,CAAC,oBAAoB,WAAW,MAAM;GACxC,MAAM,SAAS,UAAU,QAAQ;AAEjC,wBAAqB,CAAC,CADL,YAAY,WAAW,MAAM,OAAO,CACrB;;AAIlC,QAAM,IAAI,SAAS,WAAW;AAG9B,MAAI,mBACF,SAAQ,KAAK,EAAE;YAER,KAAK,OAAO,KAErB,cAAa;KAGb,OAAM,IAAI,MAAM,aAAa,EAC3B,aAAa,iBACd,CAAC;SAEG,OAAO;AACd,SAAQ,MAAM,UAAU,iBAAiB,QAAQ,MAAM,UAAU,MAAM;AACvE,SAAQ,KAAK,EAAE"}
|
|
1
|
+
{"version":3,"file":"cli.js","names":["allDatabases: FragnoDatabase<AnySchema>[]","adapter: DatabaseAdapter | undefined","firstAdapterFile: string | undefined","fragnoDatabases: FragnoDatabase<AnySchema>[]","results: { schema: string; path: string; namespace: string }[]","results: ExecuteMigrationResult[]","info: {\n namespace: string;\n schemaVersion: number;\n migrationSupport: boolean;\n currentVersion?: number;\n pendingVersions?: number;\n status?: string;\n error?: string;\n }","lines: string[]"],"sources":["../src/utils/find-fragno-databases.ts","../src/commands/db/generate.ts","../src/commands/db/migrate.ts","../src/commands/db/info.ts","../src/utils/format-search-results.ts","../src/commands/search.ts","../src/commands/corpus.ts","../src/cli.ts"],"sourcesContent":["import { isFragnoDatabase, type DatabaseAdapter, FragnoDatabase } from \"@fragno-dev/db\";\nimport {\n fragnoDatabaseAdapterNameFakeSymbol,\n fragnoDatabaseAdapterVersionFakeSymbol,\n} from \"@fragno-dev/db/adapters\";\nimport type { AnySchema } from \"@fragno-dev/db/schema\";\nimport {\n instantiatedFragmentFakeSymbol,\n type FragnoInstantiatedFragment,\n} from \"@fragno-dev/core/api/fragment-instantiation\";\nimport { loadConfig } from \"c12\";\nimport { relative } from \"node:path\";\n\nexport async function importFragmentFile(path: string): Promise<Record<string, unknown>> {\n const { config } = await loadConfig({\n configFile: path,\n });\n\n const databases = findFragnoDatabases(config);\n const adapterNames = databases.map(\n (db) =>\n `${db.adapter[fragnoDatabaseAdapterNameFakeSymbol]}@${db.adapter[fragnoDatabaseAdapterVersionFakeSymbol]}`,\n );\n const uniqueAdapterNames = [...new Set(adapterNames)];\n\n if (uniqueAdapterNames.length > 1) {\n throw new Error(\n `All Fragno databases must use the same adapter name and version. ` +\n `Found mismatch: (${adapterNames.join(\", \")})`,\n );\n }\n\n return {\n adapter: databases[0].adapter,\n databases,\n };\n}\n\n/**\n * Imports multiple fragment files and validates they all use the same adapter.\n * Returns the combined databases from all files.\n */\nexport async function importFragmentFiles(paths: string[]): Promise<{\n adapter: DatabaseAdapter;\n databases: FragnoDatabase<AnySchema>[];\n}> {\n // De-duplicate paths (in case same file was specified multiple times)\n const uniquePaths = Array.from(new Set(paths));\n\n if (uniquePaths.length === 0) {\n throw new Error(\"No fragment files provided\");\n }\n\n const allDatabases: FragnoDatabase<AnySchema>[] = [];\n let adapter: DatabaseAdapter | undefined;\n let firstAdapterFile: string | undefined;\n const cwd = process.cwd();\n\n for (const path of uniquePaths) {\n const relativePath = relative(cwd, path);\n\n try {\n const result = await importFragmentFile(path);\n const databases = result[\"databases\"] as FragnoDatabase<AnySchema>[];\n const fileAdapter = result[\"adapter\"] as DatabaseAdapter;\n\n if (databases.length === 0) {\n console.warn(\n `Warning: No FragnoDatabase instances found in ${relativePath}.\\n` +\n `Make sure you export either:\\n` +\n ` - A FragnoDatabase instance created with .create(adapter)\\n` +\n ` - An instantiated fragment with embedded database definition\\n`,\n );\n continue;\n }\n\n // Set the adapter from the first file with databases\n if (!adapter) {\n adapter = fileAdapter;\n firstAdapterFile = relativePath;\n }\n\n // Validate all files use the same adapter name and version\n const firstAdapterName = adapter[fragnoDatabaseAdapterNameFakeSymbol];\n const firstAdapterVersion = adapter[fragnoDatabaseAdapterVersionFakeSymbol];\n const fileAdapterName = fileAdapter[fragnoDatabaseAdapterNameFakeSymbol];\n const fileAdapterVersion = fileAdapter[fragnoDatabaseAdapterVersionFakeSymbol];\n\n if (firstAdapterName !== fileAdapterName || firstAdapterVersion !== fileAdapterVersion) {\n const firstAdapterInfo = `${firstAdapterName}@${firstAdapterVersion}`;\n const fileAdapterInfo = `${fileAdapterName}@${fileAdapterVersion}`;\n\n throw new Error(\n `All fragments must use the same database adapter. Mixed adapters found:\\n` +\n ` - ${firstAdapterFile}: ${firstAdapterInfo}\\n` +\n ` - ${relativePath}: ${fileAdapterInfo}\\n\\n` +\n `Make sure all fragments use the same adapter name and version.`,\n );\n }\n\n allDatabases.push(...databases);\n console.log(` Found ${databases.length} database(s) in ${relativePath}`);\n } catch (error) {\n throw new Error(\n `Failed to import fragment file ${relativePath}: ${error instanceof Error ? error.message : String(error)}`,\n );\n }\n }\n\n if (allDatabases.length === 0) {\n throw new Error(\n `No FragnoDatabase instances found in any of the target files.\\n` +\n `Make sure your files export either:\\n` +\n ` - A FragnoDatabase instance created with .create(adapter)\\n` +\n ` - An instantiated fragment with embedded database definition\\n`,\n );\n }\n\n if (!adapter) {\n throw new Error(\"No adapter found in any of the fragment files\");\n }\n\n return {\n adapter,\n databases: allDatabases,\n };\n}\n\nfunction isFragnoInstantiatedFragment(\n value: unknown,\n): value is FragnoInstantiatedFragment<[], {}, {}, {}> {\n return (\n typeof value === \"object\" &&\n value !== null &&\n instantiatedFragmentFakeSymbol in value &&\n value[instantiatedFragmentFakeSymbol] === instantiatedFragmentFakeSymbol\n );\n}\n\nfunction additionalContextIsDatabaseContext(additionalContext: unknown): additionalContext is {\n databaseSchema: AnySchema;\n databaseNamespace: string;\n databaseAdapter: DatabaseAdapter;\n} {\n return (\n typeof additionalContext === \"object\" &&\n additionalContext !== null &&\n \"databaseSchema\" in additionalContext &&\n \"databaseNamespace\" in additionalContext &&\n \"databaseAdapter\" in additionalContext\n );\n}\n\n/**\n * Finds all FragnoDatabase instances in a module, including those embedded\n * in instantiated fragments.\n */\nexport function findFragnoDatabases(\n targetModule: Record<string, unknown>,\n): FragnoDatabase<AnySchema>[] {\n const fragnoDatabases: FragnoDatabase<AnySchema>[] = [];\n\n for (const [_key, value] of Object.entries(targetModule)) {\n if (isFragnoDatabase(value)) {\n fragnoDatabases.push(value);\n } else if (isFragnoInstantiatedFragment(value)) {\n const additionalContext = value.additionalContext;\n\n if (!additionalContext || !additionalContextIsDatabaseContext(additionalContext)) {\n continue;\n }\n\n // Extract database schema, namespace, and adapter from instantiated fragment's additionalContext\n const { databaseSchema, databaseNamespace, databaseAdapter } = additionalContext;\n\n fragnoDatabases.push(\n new FragnoDatabase({\n namespace: databaseNamespace,\n schema: databaseSchema,\n adapter: databaseAdapter,\n }),\n );\n }\n }\n\n return fragnoDatabases;\n}\n","import { writeFile, mkdir } from \"node:fs/promises\";\nimport { resolve, dirname } from \"node:path\";\nimport { define } from \"gunshi\";\nimport { generateMigrationsOrSchema } from \"@fragno-dev/db/generation-engine\";\nimport { importFragmentFiles } from \"../../utils/find-fragno-databases\";\n\n// Define the db generate command with type safety\nexport const generateCommand = define({\n name: \"generate\",\n description: \"Generate schema files from FragnoDatabase definitions\",\n args: {\n output: {\n type: \"string\",\n short: \"o\",\n description:\n \"Output path: for single file, exact file path; for multiple files, output directory (default: current directory)\",\n },\n from: {\n type: \"number\",\n short: \"f\",\n description: \"Source version to generate migration from (default: current database version)\",\n },\n to: {\n type: \"number\",\n short: \"t\",\n description: \"Target version to generate migration to (default: latest schema version)\",\n },\n prefix: {\n type: \"string\",\n short: \"p\",\n description: \"String to prepend to the generated file (e.g., '/* eslint-disable */')\",\n },\n },\n run: async (ctx) => {\n // With `define()` and `multiple: true`, targets is properly typed as string[]\n const targets = ctx.positionals;\n const output = ctx.values.output;\n const toVersion = ctx.values.to;\n const fromVersion = ctx.values.from;\n const prefix = ctx.values.prefix;\n\n // Resolve all target paths\n const targetPaths = targets.map((target) => resolve(process.cwd(), target));\n\n // Import all fragment files and validate they use the same adapter\n const { databases: allFragnoDatabases, adapter } = await importFragmentFiles(targetPaths);\n\n // Check if adapter supports any form of schema generation\n if (!adapter.createSchemaGenerator && !adapter.createMigrationEngine) {\n throw new Error(\n `The adapter does not support schema generation. ` +\n `Please use an adapter that implements either createSchemaGenerator or createMigrationEngine.`,\n );\n }\n\n // Generate schema for all fragments\n console.log(\"Generating schema...\");\n\n let results: { schema: string; path: string; namespace: string }[];\n try {\n results = await generateMigrationsOrSchema(allFragnoDatabases, {\n path: output,\n toVersion,\n fromVersion,\n });\n } catch (error) {\n throw new Error(\n `Failed to generate schema: ${error instanceof Error ? error.message : String(error)}`,\n );\n }\n\n // Write all generated files\n for (const result of results) {\n // For single file: use output as exact file path\n // For multiple files: use output as base directory\n const finalOutputPath =\n output && results.length === 1\n ? resolve(process.cwd(), output)\n : output\n ? resolve(process.cwd(), output, result.path)\n : resolve(process.cwd(), result.path);\n\n // Ensure parent directory exists\n const parentDir = dirname(finalOutputPath);\n try {\n await mkdir(parentDir, { recursive: true });\n } catch (error) {\n throw new Error(\n `Failed to create directory: ${error instanceof Error ? error.message : String(error)}`,\n );\n }\n\n // Write schema to file\n try {\n const content = prefix ? `${prefix}\\n${result.schema}` : result.schema;\n await writeFile(finalOutputPath, content, { encoding: \"utf-8\" });\n } catch (error) {\n throw new Error(\n `Failed to write schema file: ${error instanceof Error ? error.message : String(error)}`,\n );\n }\n\n console.log(`✓ Generated: ${finalOutputPath}`);\n }\n\n console.log(`\\n✓ Schema generated successfully!`);\n console.log(` Files generated: ${results.length}`);\n console.log(` Fragments:`);\n for (const db of allFragnoDatabases) {\n console.log(` - ${db.namespace} (version ${db.schema.version})`);\n }\n },\n});\n","import { resolve } from \"node:path\";\nimport { define } from \"gunshi\";\nimport { importFragmentFiles } from \"../../utils/find-fragno-databases\";\nimport { executeMigrations, type ExecuteMigrationResult } from \"@fragno-dev/db/generation-engine\";\n\nexport const migrateCommand = define({\n name: \"migrate\",\n description: \"Run database migrations for all fragments to their latest versions\",\n args: {},\n run: async (ctx) => {\n const targets = ctx.positionals;\n\n if (targets.length === 0) {\n throw new Error(\"At least one target file path is required\");\n }\n\n // Resolve all target paths\n const targetPaths = targets.map((target) => resolve(process.cwd(), target));\n\n // Import all fragment files and validate they use the same adapter\n const { databases: allFragnoDatabases } = await importFragmentFiles(targetPaths);\n\n console.log(\"\\nMigrating all fragments to their latest versions...\\n\");\n\n let results: ExecuteMigrationResult[];\n try {\n results = await executeMigrations(allFragnoDatabases);\n } catch (error) {\n throw new Error(\n `Migration failed: ${error instanceof Error ? error.message : String(error)}`,\n );\n }\n\n // Display progress for each result\n for (const result of results) {\n console.log(`Fragment: ${result.namespace}`);\n console.log(` Current version: ${result.fromVersion}`);\n console.log(` Target version: ${result.toVersion}`);\n\n if (result.didMigrate) {\n console.log(` ✓ Migration completed: v${result.fromVersion} → v${result.toVersion}\\n`);\n } else {\n console.log(` ✓ Already at latest version. No migration needed.\\n`);\n }\n }\n\n // Summary\n console.log(\"═══════════════════════════════════════\");\n console.log(\"Migration Summary\");\n console.log(\"═══════════════════════════════════════\");\n\n const migrated = results.filter((r) => r.didMigrate);\n const skipped = results.filter((r) => !r.didMigrate);\n\n if (migrated.length > 0) {\n console.log(`\\n✓ Migrated ${migrated.length} fragment(s):`);\n for (const r of migrated) {\n console.log(` - ${r.namespace}: v${r.fromVersion} → v${r.toVersion}`);\n }\n }\n\n if (skipped.length > 0) {\n console.log(`\\n○ Skipped ${skipped.length} fragment(s) (already up-to-date):`);\n for (const r of skipped) {\n console.log(` - ${r.namespace}: v${r.toVersion}`);\n }\n }\n\n for (const db of allFragnoDatabases) {\n await db.adapter.close();\n }\n\n console.log(\"\\n✓ All migrations completed successfully\");\n },\n});\n","import { resolve } from \"node:path\";\nimport { define } from \"gunshi\";\nimport { importFragmentFiles } from \"../../utils/find-fragno-databases\";\n\nexport const infoCommand = define({\n name: \"info\",\n description: \"Display database information and migration status\",\n args: {},\n run: async (ctx) => {\n const targets = ctx.positionals;\n\n if (targets.length === 0) {\n throw new Error(\"At least one target file path is required\");\n }\n\n // Resolve all target paths\n const targetPaths = targets.map((target) => resolve(process.cwd(), target));\n\n // Import all fragment files\n const { databases: allFragnoDatabases } = await importFragmentFiles(targetPaths);\n\n // Collect database information\n const dbInfos = await Promise.all(\n allFragnoDatabases.map(async (fragnoDb) => {\n const info: {\n namespace: string;\n schemaVersion: number;\n migrationSupport: boolean;\n currentVersion?: number;\n pendingVersions?: number;\n status?: string;\n error?: string;\n } = {\n namespace: fragnoDb.namespace,\n schemaVersion: fragnoDb.schema.version,\n migrationSupport: !!fragnoDb.adapter.createMigrationEngine,\n };\n\n // Get current database version if migrations are supported\n if (fragnoDb.adapter.createMigrationEngine) {\n try {\n const migrator = fragnoDb.adapter.createMigrationEngine(\n fragnoDb.schema,\n fragnoDb.namespace,\n );\n const currentVersion = await migrator.getVersion();\n info.currentVersion = currentVersion;\n info.pendingVersions = fragnoDb.schema.version - currentVersion;\n\n if (info.pendingVersions > 0) {\n info.status = `Pending (${info.pendingVersions} migration(s))`;\n } else if (info.pendingVersions === 0) {\n info.status = \"Up to date\";\n }\n } catch (error) {\n info.error = error instanceof Error ? error.message : String(error);\n info.status = \"Error\";\n }\n } else {\n info.status = \"Schema only\";\n }\n\n return info;\n }),\n );\n\n // Determine if any database supports migrations\n const hasMigrationSupport = dbInfos.some((info) => info.migrationSupport);\n\n // Print compact table\n console.log(\"\");\n console.log(`Database Information:`);\n console.log(\"\");\n\n // Table header\n const namespaceHeader = \"Namespace\";\n const versionHeader = \"Schema\";\n const currentHeader = \"Current\";\n const statusHeader = \"Status\";\n\n const maxNamespaceLen = Math.max(\n namespaceHeader.length,\n ...dbInfos.map((info) => info.namespace.length),\n );\n const namespaceWidth = Math.max(maxNamespaceLen + 2, 20);\n const versionWidth = 8;\n const currentWidth = 9;\n const statusWidth = 25;\n\n // Print table\n console.log(\n namespaceHeader.padEnd(namespaceWidth) +\n versionHeader.padEnd(versionWidth) +\n (hasMigrationSupport ? currentHeader.padEnd(currentWidth) : \"\") +\n statusHeader,\n );\n console.log(\n \"-\".repeat(namespaceWidth) +\n \"-\".repeat(versionWidth) +\n (hasMigrationSupport ? \"-\".repeat(currentWidth) : \"\") +\n \"-\".repeat(statusWidth),\n );\n\n for (const info of dbInfos) {\n const currentVersionStr =\n info.currentVersion !== undefined ? String(info.currentVersion) : \"-\";\n console.log(\n info.namespace.padEnd(namespaceWidth) +\n String(info.schemaVersion).padEnd(versionWidth) +\n (hasMigrationSupport ? currentVersionStr.padEnd(currentWidth) : \"\") +\n (info.status || \"-\"),\n );\n }\n\n // Print help text\n console.log(\"\");\n if (!hasMigrationSupport) {\n console.log(\"Note: These adapters do not support migrations.\");\n console.log(\"Use 'fragno-cli db generate' to generate schema files.\");\n } else {\n const hasPendingMigrations = dbInfos.some(\n (info) => info.pendingVersions && info.pendingVersions > 0,\n );\n if (hasPendingMigrations) {\n console.log(\"Run 'fragno-cli db migrate <target>' to apply pending migrations.\");\n }\n }\n },\n});\n","interface SearchResult {\n id: string;\n type: \"page\" | \"heading\" | \"text\";\n content: string;\n breadcrumbs?: string[];\n contentWithHighlights?: Array<{\n type: string;\n content: string;\n styles?: { highlight?: boolean };\n }>;\n url: string;\n}\n\ninterface MergedResult {\n url: string;\n urlWithMd: string;\n fullUrl: string;\n fullUrlWithMd: string;\n title?: string;\n breadcrumbs?: string[];\n type: \"page\" | \"heading\" | \"text\";\n sections: Array<{\n content: string;\n type: \"page\" | \"heading\" | \"text\";\n }>;\n}\n\n/**\n * Merge search results by URL, grouping sections and content under each URL (without hash)\n */\nexport function mergeResultsByUrl(results: SearchResult[], baseUrl: string): MergedResult[] {\n const mergedMap = new Map<string, MergedResult>();\n\n for (const result of results) {\n // Strip hash to get base URL for merging\n const baseUrlWithoutHash = result.url.split(\"#\")[0];\n const existing = mergedMap.get(baseUrlWithoutHash);\n\n if (existing) {\n // Add this result as a section\n existing.sections.push({\n content: result.content,\n type: result.type,\n });\n } else {\n // Create new merged result\n const urlWithMd = `${baseUrlWithoutHash}.md`;\n\n const fullUrl = `https://${baseUrl}${baseUrlWithoutHash}`;\n const fullUrlWithMd = `https://${baseUrl}${urlWithMd}`;\n\n mergedMap.set(baseUrlWithoutHash, {\n url: baseUrlWithoutHash,\n urlWithMd,\n fullUrl,\n fullUrlWithMd,\n title: result.type === \"page\" ? result.content : undefined,\n breadcrumbs: result.breadcrumbs,\n type: result.type,\n sections: [\n {\n content: result.content,\n type: result.type,\n },\n ],\n });\n }\n }\n\n return Array.from(mergedMap.values());\n}\n\n/**\n * Format merged results as markdown\n */\nexport function formatAsMarkdown(mergedResults: MergedResult[]): string {\n const lines: string[] = [];\n\n for (const result of mergedResults) {\n // Title (use first section content if it's a page, or just use content)\n const title = result.title || result.sections[0]?.content || \"Untitled\";\n lines.push(`## Page: '${title}'`);\n // Breadcrumbs\n if (result.breadcrumbs && result.breadcrumbs.length > 0) {\n lines.push(\" \" + result.breadcrumbs.join(\" > \"));\n lines.push(\"\");\n }\n\n // Both URLs\n lines.push(\"URLs:\");\n lines.push(` - ${result.fullUrl}`);\n lines.push(` - ${result.fullUrlWithMd}`);\n lines.push(\"\");\n\n // Show all sections found on this page\n if (result.sections.length > 1) {\n lines.push(\"Relevant sections:\");\n for (let i = 0; i < result.sections.length; i++) {\n const section = result.sections[i];\n // Skip the first section if it's just the page title repeated\n if (i === 0 && result.type === \"page\" && section.content === result.title) {\n continue;\n }\n lines.push(` - ${section.content}`);\n }\n lines.push(\"\");\n }\n\n lines.push(\"---\");\n lines.push(\"\");\n }\n\n return lines.join(\"\\n\");\n}\n\n/**\n * Format merged results as JSON\n */\nexport function formatAsJson(mergedResults: MergedResult[]): string {\n return JSON.stringify(mergedResults, null, 2);\n}\n","import { define } from \"gunshi\";\nimport {\n mergeResultsByUrl,\n formatAsMarkdown,\n formatAsJson,\n} from \"../utils/format-search-results.js\";\n\ninterface SearchResult {\n id: string;\n type: \"page\" | \"heading\" | \"text\";\n content: string;\n breadcrumbs?: string[];\n contentWithHighlights?: Array<{\n type: string;\n content: string;\n styles?: { highlight?: boolean };\n }>;\n url: string;\n}\n\nexport const searchCommand = define({\n name: \"search\",\n description: \"Search the Fragno documentation\",\n args: {\n limit: {\n type: \"number\",\n description: \"Maximum number of results to show\",\n default: 10,\n },\n json: {\n type: \"boolean\",\n description: \"Output results in JSON format\",\n default: false,\n },\n markdown: {\n type: \"boolean\",\n description: \"Output results in Markdown format (default)\",\n default: true,\n },\n \"base-url\": {\n type: \"string\",\n description: \"Base URL for the documentation site\",\n default: \"fragno.dev\",\n },\n },\n run: async (ctx) => {\n const query = ctx.positionals.join(\" \");\n\n if (!query || query.trim().length === 0) {\n throw new Error(\"Please provide a search query\");\n }\n\n // Determine output mode\n const jsonMode = ctx.values.json as boolean;\n const baseUrl = ctx.values[\"base-url\"] as string;\n\n if (!jsonMode) {\n console.log(`Searching for: \"${query}\"\\n`);\n }\n\n try {\n // Make request to the docs search API\n const encodedQuery = encodeURIComponent(query);\n const response = await fetch(`https://${baseUrl}/api/search?query=${encodedQuery}`);\n\n if (!response.ok) {\n throw new Error(`API request failed with status ${response.status}`);\n }\n\n const results = (await response.json()) as SearchResult[];\n\n // Apply limit\n const limit = ctx.values.limit as number;\n const limitedResults = results.slice(0, limit);\n\n if (limitedResults.length === 0) {\n if (jsonMode) {\n console.log(\"[]\");\n } else {\n console.log(\"No results found.\");\n }\n return;\n }\n\n // Merge results by URL\n const mergedResults = mergeResultsByUrl(limitedResults, baseUrl);\n\n // Output based on mode\n if (jsonMode) {\n console.log(formatAsJson(mergedResults));\n } else {\n // Markdown mode (default)\n console.log(\n `Found ${results.length} result${results.length === 1 ? \"\" : \"s\"}${results.length > limit ? ` (showing ${limit})` : \"\"}\\n`,\n );\n console.log(formatAsMarkdown(mergedResults));\n }\n } catch (error) {\n if (error instanceof Error) {\n throw new Error(`Search failed: ${error.message}`);\n }\n throw new Error(\"Search failed: An unknown error occurred\");\n }\n },\n});\n","import { define } from \"gunshi\";\nimport { getSubjects, getSubject } from \"@fragno-dev/corpus\";\n\n/**\n * Print a subject with its examples\n */\nfunction printSubject(subject: ReturnType<typeof getSubject>[number]): void {\n console.log(`\\n${\"=\".repeat(60)}`);\n console.log(`${subject.title}`);\n console.log(`${\"=\".repeat(60)}\\n`);\n\n if (subject.description) {\n console.log(subject.description);\n console.log();\n }\n\n // Print imports block if present\n if (subject.imports) {\n console.log(\"### Imports\\n\");\n console.log(\"```typescript\");\n console.log(subject.imports);\n console.log(\"```\\n\");\n }\n\n // Print init block if present\n if (subject.init) {\n console.log(\"### Initialization\\n\");\n console.log(\"```typescript\");\n console.log(subject.init);\n console.log(\"```\\n\");\n }\n\n // Print examples\n for (let i = 0; i < subject.examples.length; i++) {\n const example = subject.examples[i];\n\n console.log(`### Example ${i + 1}\\n`);\n console.log(\"```typescript\");\n console.log(example.code);\n console.log(\"```\");\n\n if (example.explanation) {\n console.log();\n console.log(example.explanation);\n }\n\n console.log();\n }\n}\n\n/**\n * Print information about the corpus command\n */\nfunction printCorpusHelp(): void {\n console.log(\"Fragno Corpus - Code examples and documentation\");\n console.log(\"\");\n console.log(\"Usage: fragno-cli corpus [topic...]\");\n console.log(\"\");\n console.log(\"Examples:\");\n console.log(\" fragno-cli corpus # List all available topics\");\n console.log(\" fragno-cli corpus defining-routes # Show route definition examples\");\n console.log(\" fragno-cli corpus database-adapters kysely-adapter\");\n console.log(\" # Show multiple topics\");\n console.log(\"\");\n console.log(\"Available topics:\");\n\n const subjects = getSubjects();\n for (const subject of subjects) {\n console.log(` ${subject.id.padEnd(30)} ${subject.title}`);\n }\n}\n\nexport const corpusCommand = define({\n name: \"corpus\",\n description: \"View code examples and documentation for Fragno\",\n run: (ctx) => {\n const topics = ctx.positionals;\n\n // No topics provided - show help\n if (topics.length === 0) {\n printCorpusHelp();\n return;\n }\n\n // Load and display requested topics\n try {\n const subjects = getSubject(...topics);\n\n for (const subject of subjects) {\n printSubject(subject);\n }\n\n console.log(`${\"=\".repeat(60)}`);\n console.log(`Displayed ${subjects.length} topic(s)`);\n console.log(`${\"=\".repeat(60)}\\n`);\n } catch (error) {\n console.error(\"Error loading topics:\", error instanceof Error ? error.message : error);\n console.log(\"\\nRun 'fragno-cli corpus' to see available topics.\");\n process.exit(1);\n }\n },\n});\n","#!/usr/bin/env node\n\nimport { cli, define, parseArgs, resolveArgs } from \"gunshi\";\nimport { generateCommand } from \"./commands/db/generate.js\";\nimport { migrateCommand } from \"./commands/db/migrate.js\";\nimport { infoCommand } from \"./commands/db/info.js\";\nimport { searchCommand } from \"./commands/search.js\";\nimport { corpusCommand } from \"./commands/corpus.js\";\n\n// Create a Map of db sub-commands\nconst dbSubCommands = new Map();\ndbSubCommands.set(\"generate\", generateCommand);\ndbSubCommands.set(\"migrate\", migrateCommand);\ndbSubCommands.set(\"info\", infoCommand);\n\n// Helper function to print db command help\nfunction printDbHelp() {\n console.log(\"Database management commands for Fragno\");\n console.log(\"\");\n console.log(\"Usage: fragno-cli db <command> [options]\");\n console.log(\"\");\n console.log(\"Commands:\");\n console.log(\" generate Generate schema files from FragnoDatabase definitions\");\n console.log(\" migrate Run database migrations\");\n console.log(\" info Display database information and migration status\");\n console.log(\"\");\n console.log(\"Run 'fragno-cli db <command> --help' for more information.\");\n}\n\n// Define the db command with type safety\nexport const dbCommand = define({\n name: \"db\",\n description: \"Database management commands\",\n run: printDbHelp,\n});\n\n// Create a Map of root sub-commands\nconst rootSubCommands = new Map();\nrootSubCommands.set(\"db\", dbCommand);\nrootSubCommands.set(\"search\", searchCommand);\nrootSubCommands.set(\"corpus\", corpusCommand);\n\n// Define the main command with type safety\nexport const mainCommand = define({\n name: \"fragno-cli\",\n description: \"Fragno CLI - Tools for working with Fragno fragments\",\n run: () => {\n console.log(\"Fragno CLI - Tools for working with Fragno fragments\");\n console.log(\"\");\n console.log(\"Usage: fragno-cli <command> [options]\");\n console.log(\"\");\n console.log(\"Commands:\");\n console.log(\" db Database management commands\");\n console.log(\" search Search the Fragno documentation\");\n console.log(\" corpus View code examples and documentation\");\n console.log(\"\");\n console.log(\"Run 'fragno-cli <command> --help' for more information.\");\n },\n});\n\nif (import.meta.main) {\n try {\n // Parse arguments to handle nested subcommands\n const args = process.argv.slice(2);\n\n // Check if we're calling the search or corpus command directly\n if (args[0] === \"search\") {\n const searchArgs = args.slice(1);\n await cli(searchArgs, searchCommand);\n } else if (args[0] === \"corpus\") {\n const corpusArgs = args.slice(1);\n await cli(corpusArgs, corpusCommand);\n } else if (args[0] === \"db\" && args.length > 1) {\n const subCommandName = args[1];\n\n // Check if it's a help request\n if (subCommandName === \"--help\" || subCommandName === \"-h\") {\n printDbHelp();\n process.exit(0);\n }\n\n const subCommand = dbSubCommands.get(subCommandName);\n\n if (!subCommand) {\n console.error(`Unknown command: ${subCommandName}`);\n console.log(\"\");\n printDbHelp();\n process.exit(1);\n }\n\n // Run the specific subcommand with its args\n const subArgs = args.slice(2);\n const isSubCommandHelp = subArgs.includes(\"--help\") || subArgs.includes(\"-h\");\n\n // Check for validation errors before running\n let hasValidationError = false;\n if (!isSubCommandHelp && subCommand.args) {\n const tokens = parseArgs(subArgs);\n const resolved = resolveArgs(subCommand.args, tokens);\n hasValidationError = !!resolved.error;\n }\n\n // Run the command (let gunshi handle printing errors/help)\n await cli(subArgs, subCommand);\n\n // Exit with error code if there was a validation error\n if (hasValidationError) {\n process.exit(1);\n }\n } else if (args[0] === \"db\") {\n // \"db\" command with no subcommand - show db help\n printDbHelp();\n } else {\n // Run the main CLI\n await cli(args, mainCommand, {\n subCommands: rootSubCommands,\n });\n }\n } catch (error) {\n console.error(\"Error:\", error instanceof Error ? error.message : error);\n process.exit(1);\n }\n}\n\nexport { generateCommand, migrateCommand, infoCommand, searchCommand, corpusCommand };\n"],"mappings":";;;;;;;;;;;;AAaA,eAAsB,mBAAmB,MAAgD;CACvF,MAAM,EAAE,WAAW,MAAM,WAAW,EAClC,YAAY,MACb,CAAC;CAEF,MAAM,YAAY,oBAAoB,OAAO;CAC7C,MAAM,eAAe,UAAU,KAC5B,OACC,GAAG,GAAG,QAAQ,qCAAqC,GAAG,GAAG,QAAQ,0CACpE;AAGD,KAF2B,CAAC,GAAG,IAAI,IAAI,aAAa,CAAC,CAE9B,SAAS,EAC9B,OAAM,IAAI,MACR,qFACsB,aAAa,KAAK,KAAK,CAAC,GAC/C;AAGH,QAAO;EACL,SAAS,UAAU,GAAG;EACtB;EACD;;;;;;AAOH,eAAsB,oBAAoB,OAGvC;CAED,MAAM,cAAc,MAAM,KAAK,IAAI,IAAI,MAAM,CAAC;AAE9C,KAAI,YAAY,WAAW,EACzB,OAAM,IAAI,MAAM,6BAA6B;CAG/C,MAAMA,eAA4C,EAAE;CACpD,IAAIC;CACJ,IAAIC;CACJ,MAAM,MAAM,QAAQ,KAAK;AAEzB,MAAK,MAAM,QAAQ,aAAa;EAC9B,MAAM,eAAe,SAAS,KAAK,KAAK;AAExC,MAAI;GACF,MAAM,SAAS,MAAM,mBAAmB,KAAK;GAC7C,MAAM,YAAY,OAAO;GACzB,MAAM,cAAc,OAAO;AAE3B,OAAI,UAAU,WAAW,GAAG;AAC1B,YAAQ,KACN,iDAAiD,aAAa,gKAI/D;AACD;;AAIF,OAAI,CAAC,SAAS;AACZ,cAAU;AACV,uBAAmB;;GAIrB,MAAM,mBAAmB,QAAQ;GACjC,MAAM,sBAAsB,QAAQ;GACpC,MAAM,kBAAkB,YAAY;GACpC,MAAM,qBAAqB,YAAY;AAEvC,OAAI,qBAAqB,mBAAmB,wBAAwB,oBAAoB;IACtF,MAAM,mBAAmB,GAAG,iBAAiB,GAAG;IAChD,MAAM,kBAAkB,GAAG,gBAAgB,GAAG;AAE9C,UAAM,IAAI,MACR,gFACS,iBAAiB,IAAI,iBAAiB,QACtC,aAAa,IAAI,gBAAgB,oEAE3C;;AAGH,gBAAa,KAAK,GAAG,UAAU;AAC/B,WAAQ,IAAI,WAAW,UAAU,OAAO,kBAAkB,eAAe;WAClE,OAAO;AACd,SAAM,IAAI,MACR,kCAAkC,aAAa,IAAI,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM,GAC1G;;;AAIL,KAAI,aAAa,WAAW,EAC1B,OAAM,IAAI,MACR,oOAID;AAGH,KAAI,CAAC,QACH,OAAM,IAAI,MAAM,gDAAgD;AAGlE,QAAO;EACL;EACA,WAAW;EACZ;;AAGH,SAAS,6BACP,OACqD;AACrD,QACE,OAAO,UAAU,YACjB,UAAU,QACV,kCAAkC,SAClC,MAAM,oCAAoC;;AAI9C,SAAS,mCAAmC,mBAI1C;AACA,QACE,OAAO,sBAAsB,YAC7B,sBAAsB,QACtB,oBAAoB,qBACpB,uBAAuB,qBACvB,qBAAqB;;;;;;AAQzB,SAAgB,oBACd,cAC6B;CAC7B,MAAMC,kBAA+C,EAAE;AAEvD,MAAK,MAAM,CAAC,MAAM,UAAU,OAAO,QAAQ,aAAa,CACtD,KAAI,iBAAiB,MAAM,CACzB,iBAAgB,KAAK,MAAM;UAClB,6BAA6B,MAAM,EAAE;EAC9C,MAAM,oBAAoB,MAAM;AAEhC,MAAI,CAAC,qBAAqB,CAAC,mCAAmC,kBAAkB,CAC9E;EAIF,MAAM,EAAE,gBAAgB,mBAAmB,oBAAoB;AAE/D,kBAAgB,KACd,IAAI,eAAe;GACjB,WAAW;GACX,QAAQ;GACR,SAAS;GACV,CAAC,CACH;;AAIL,QAAO;;;;;AClLT,MAAa,kBAAkB,OAAO;CACpC,MAAM;CACN,aAAa;CACb,MAAM;EACJ,QAAQ;GACN,MAAM;GACN,OAAO;GACP,aACE;GACH;EACD,MAAM;GACJ,MAAM;GACN,OAAO;GACP,aAAa;GACd;EACD,IAAI;GACF,MAAM;GACN,OAAO;GACP,aAAa;GACd;EACD,QAAQ;GACN,MAAM;GACN,OAAO;GACP,aAAa;GACd;EACF;CACD,KAAK,OAAO,QAAQ;EAElB,MAAM,UAAU,IAAI;EACpB,MAAM,SAAS,IAAI,OAAO;EAC1B,MAAM,YAAY,IAAI,OAAO;EAC7B,MAAM,cAAc,IAAI,OAAO;EAC/B,MAAM,SAAS,IAAI,OAAO;EAM1B,MAAM,EAAE,WAAW,oBAAoB,YAAY,MAAM,oBAHrC,QAAQ,KAAK,WAAW,QAAQ,QAAQ,KAAK,EAAE,OAAO,CAAC,CAGc;AAGzF,MAAI,CAAC,QAAQ,yBAAyB,CAAC,QAAQ,sBAC7C,OAAM,IAAI,MACR,+IAED;AAIH,UAAQ,IAAI,uBAAuB;EAEnC,IAAIC;AACJ,MAAI;AACF,aAAU,MAAM,2BAA2B,oBAAoB;IAC7D,MAAM;IACN;IACA;IACD,CAAC;WACK,OAAO;AACd,SAAM,IAAI,MACR,8BAA8B,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM,GACrF;;AAIH,OAAK,MAAM,UAAU,SAAS;GAG5B,MAAM,kBACJ,UAAU,QAAQ,WAAW,IACzB,QAAQ,QAAQ,KAAK,EAAE,OAAO,GAC9B,SACE,QAAQ,QAAQ,KAAK,EAAE,QAAQ,OAAO,KAAK,GAC3C,QAAQ,QAAQ,KAAK,EAAE,OAAO,KAAK;GAG3C,MAAM,YAAY,QAAQ,gBAAgB;AAC1C,OAAI;AACF,UAAM,MAAM,WAAW,EAAE,WAAW,MAAM,CAAC;YACpC,OAAO;AACd,UAAM,IAAI,MACR,+BAA+B,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM,GACtF;;AAIH,OAAI;AAEF,UAAM,UAAU,iBADA,SAAS,GAAG,OAAO,IAAI,OAAO,WAAW,OAAO,QACtB,EAAE,UAAU,SAAS,CAAC;YACzD,OAAO;AACd,UAAM,IAAI,MACR,gCAAgC,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM,GACvF;;AAGH,WAAQ,IAAI,gBAAgB,kBAAkB;;AAGhD,UAAQ,IAAI,qCAAqC;AACjD,UAAQ,IAAI,sBAAsB,QAAQ,SAAS;AACnD,UAAQ,IAAI,eAAe;AAC3B,OAAK,MAAM,MAAM,mBACf,SAAQ,IAAI,SAAS,GAAG,UAAU,YAAY,GAAG,OAAO,QAAQ,GAAG;;CAGxE,CAAC;;;;AC3GF,MAAa,iBAAiB,OAAO;CACnC,MAAM;CACN,aAAa;CACb,MAAM,EAAE;CACR,KAAK,OAAO,QAAQ;EAClB,MAAM,UAAU,IAAI;AAEpB,MAAI,QAAQ,WAAW,EACrB,OAAM,IAAI,MAAM,4CAA4C;EAO9D,MAAM,EAAE,WAAW,uBAAuB,MAAM,oBAH5B,QAAQ,KAAK,WAAW,QAAQ,QAAQ,KAAK,EAAE,OAAO,CAAC,CAGK;AAEhF,UAAQ,IAAI,0DAA0D;EAEtE,IAAIC;AACJ,MAAI;AACF,aAAU,MAAM,kBAAkB,mBAAmB;WAC9C,OAAO;AACd,SAAM,IAAI,MACR,qBAAqB,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM,GAC5E;;AAIH,OAAK,MAAM,UAAU,SAAS;AAC5B,WAAQ,IAAI,aAAa,OAAO,YAAY;AAC5C,WAAQ,IAAI,sBAAsB,OAAO,cAAc;AACvD,WAAQ,IAAI,qBAAqB,OAAO,YAAY;AAEpD,OAAI,OAAO,WACT,SAAQ,IAAI,6BAA6B,OAAO,YAAY,MAAM,OAAO,UAAU,IAAI;OAEvF,SAAQ,IAAI,wDAAwD;;AAKxE,UAAQ,IAAI,0CAA0C;AACtD,UAAQ,IAAI,oBAAoB;AAChC,UAAQ,IAAI,0CAA0C;EAEtD,MAAM,WAAW,QAAQ,QAAQ,MAAM,EAAE,WAAW;EACpD,MAAM,UAAU,QAAQ,QAAQ,MAAM,CAAC,EAAE,WAAW;AAEpD,MAAI,SAAS,SAAS,GAAG;AACvB,WAAQ,IAAI,gBAAgB,SAAS,OAAO,eAAe;AAC3D,QAAK,MAAM,KAAK,SACd,SAAQ,IAAI,OAAO,EAAE,UAAU,KAAK,EAAE,YAAY,MAAM,EAAE,YAAY;;AAI1E,MAAI,QAAQ,SAAS,GAAG;AACtB,WAAQ,IAAI,eAAe,QAAQ,OAAO,oCAAoC;AAC9E,QAAK,MAAM,KAAK,QACd,SAAQ,IAAI,OAAO,EAAE,UAAU,KAAK,EAAE,YAAY;;AAItD,OAAK,MAAM,MAAM,mBACf,OAAM,GAAG,QAAQ,OAAO;AAG1B,UAAQ,IAAI,4CAA4C;;CAE3D,CAAC;;;;ACtEF,MAAa,cAAc,OAAO;CAChC,MAAM;CACN,aAAa;CACb,MAAM,EAAE;CACR,KAAK,OAAO,QAAQ;EAClB,MAAM,UAAU,IAAI;AAEpB,MAAI,QAAQ,WAAW,EACrB,OAAM,IAAI,MAAM,4CAA4C;EAO9D,MAAM,EAAE,WAAW,uBAAuB,MAAM,oBAH5B,QAAQ,KAAK,WAAW,QAAQ,QAAQ,KAAK,EAAE,OAAO,CAAC,CAGK;EAGhF,MAAM,UAAU,MAAM,QAAQ,IAC5B,mBAAmB,IAAI,OAAO,aAAa;GACzC,MAAMC,OAQF;IACF,WAAW,SAAS;IACpB,eAAe,SAAS,OAAO;IAC/B,kBAAkB,CAAC,CAAC,SAAS,QAAQ;IACtC;AAGD,OAAI,SAAS,QAAQ,sBACnB,KAAI;IAKF,MAAM,iBAAiB,MAJN,SAAS,QAAQ,sBAChC,SAAS,QACT,SAAS,UACV,CACqC,YAAY;AAClD,SAAK,iBAAiB;AACtB,SAAK,kBAAkB,SAAS,OAAO,UAAU;AAEjD,QAAI,KAAK,kBAAkB,EACzB,MAAK,SAAS,YAAY,KAAK,gBAAgB;aACtC,KAAK,oBAAoB,EAClC,MAAK,SAAS;YAET,OAAO;AACd,SAAK,QAAQ,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM;AACnE,SAAK,SAAS;;OAGhB,MAAK,SAAS;AAGhB,UAAO;IACP,CACH;EAGD,MAAM,sBAAsB,QAAQ,MAAM,SAAS,KAAK,iBAAiB;AAGzE,UAAQ,IAAI,GAAG;AACf,UAAQ,IAAI,wBAAwB;AACpC,UAAQ,IAAI,GAAG;EAGf,MAAM,kBAAkB;EACxB,MAAM,gBAAgB;EACtB,MAAM,gBAAgB;EACtB,MAAM,eAAe;EAErB,MAAM,kBAAkB,KAAK,IAC3B,GACA,GAAG,QAAQ,KAAK,SAAS,KAAK,UAAU,OAAO,CAChD;EACD,MAAM,iBAAiB,KAAK,IAAI,kBAAkB,GAAG,GAAG;EACxD,MAAM,eAAe;EACrB,MAAM,eAAe;EACrB,MAAM,cAAc;AAGpB,UAAQ,IACN,gBAAgB,OAAO,eAAe,GACpC,cAAc,OAAO,aAAa,IACjC,sBAAsB,cAAc,OAAO,aAAa,GAAG,MAC5D,aACH;AACD,UAAQ,IACN,IAAI,OAAO,eAAe,GACxB,IAAI,OAAO,aAAa,IACvB,sBAAsB,IAAI,OAAO,aAAa,GAAG,MAClD,IAAI,OAAO,YAAY,CAC1B;AAED,OAAK,MAAM,QAAQ,SAAS;GAC1B,MAAM,oBACJ,KAAK,mBAAmB,SAAY,OAAO,KAAK,eAAe,GAAG;AACpE,WAAQ,IACN,KAAK,UAAU,OAAO,eAAe,GACnC,OAAO,KAAK,cAAc,CAAC,OAAO,aAAa,IAC9C,sBAAsB,kBAAkB,OAAO,aAAa,GAAG,OAC/D,KAAK,UAAU,KACnB;;AAIH,UAAQ,IAAI,GAAG;AACf,MAAI,CAAC,qBAAqB;AACxB,WAAQ,IAAI,kDAAkD;AAC9D,WAAQ,IAAI,yDAAyD;aAExC,QAAQ,MAClC,SAAS,KAAK,mBAAmB,KAAK,kBAAkB,EAC1D,CAEC,SAAQ,IAAI,oEAAoE;;CAIvF,CAAC;;;;;;;AClGF,SAAgB,kBAAkB,SAAyB,SAAiC;CAC1F,MAAM,4BAAY,IAAI,KAA2B;AAEjD,MAAK,MAAM,UAAU,SAAS;EAE5B,MAAM,qBAAqB,OAAO,IAAI,MAAM,IAAI,CAAC;EACjD,MAAM,WAAW,UAAU,IAAI,mBAAmB;AAElD,MAAI,SAEF,UAAS,SAAS,KAAK;GACrB,SAAS,OAAO;GAChB,MAAM,OAAO;GACd,CAAC;OACG;GAEL,MAAM,YAAY,GAAG,mBAAmB;GAExC,MAAM,UAAU,WAAW,UAAU;GACrC,MAAM,gBAAgB,WAAW,UAAU;AAE3C,aAAU,IAAI,oBAAoB;IAChC,KAAK;IACL;IACA;IACA;IACA,OAAO,OAAO,SAAS,SAAS,OAAO,UAAU;IACjD,aAAa,OAAO;IACpB,MAAM,OAAO;IACb,UAAU,CACR;KACE,SAAS,OAAO;KAChB,MAAM,OAAO;KACd,CACF;IACF,CAAC;;;AAIN,QAAO,MAAM,KAAK,UAAU,QAAQ,CAAC;;;;;AAMvC,SAAgB,iBAAiB,eAAuC;CACtE,MAAMC,QAAkB,EAAE;AAE1B,MAAK,MAAM,UAAU,eAAe;EAElC,MAAM,QAAQ,OAAO,SAAS,OAAO,SAAS,IAAI,WAAW;AAC7D,QAAM,KAAK,aAAa,MAAM,GAAG;AAEjC,MAAI,OAAO,eAAe,OAAO,YAAY,SAAS,GAAG;AACvD,SAAM,KAAK,QAAQ,OAAO,YAAY,KAAK,MAAM,CAAC;AAClD,SAAM,KAAK,GAAG;;AAIhB,QAAM,KAAK,QAAQ;AACnB,QAAM,KAAK,OAAO,OAAO,UAAU;AACnC,QAAM,KAAK,OAAO,OAAO,gBAAgB;AACzC,QAAM,KAAK,GAAG;AAGd,MAAI,OAAO,SAAS,SAAS,GAAG;AAC9B,SAAM,KAAK,qBAAqB;AAChC,QAAK,IAAI,IAAI,GAAG,IAAI,OAAO,SAAS,QAAQ,KAAK;IAC/C,MAAM,UAAU,OAAO,SAAS;AAEhC,QAAI,MAAM,KAAK,OAAO,SAAS,UAAU,QAAQ,YAAY,OAAO,MAClE;AAEF,UAAM,KAAK,OAAO,QAAQ,UAAU;;AAEtC,SAAM,KAAK,GAAG;;AAGhB,QAAM,KAAK,MAAM;AACjB,QAAM,KAAK,GAAG;;AAGhB,QAAO,MAAM,KAAK,KAAK;;;;;AAMzB,SAAgB,aAAa,eAAuC;AAClE,QAAO,KAAK,UAAU,eAAe,MAAM,EAAE;;;;;ACnG/C,MAAa,gBAAgB,OAAO;CAClC,MAAM;CACN,aAAa;CACb,MAAM;EACJ,OAAO;GACL,MAAM;GACN,aAAa;GACb,SAAS;GACV;EACD,MAAM;GACJ,MAAM;GACN,aAAa;GACb,SAAS;GACV;EACD,UAAU;GACR,MAAM;GACN,aAAa;GACb,SAAS;GACV;EACD,YAAY;GACV,MAAM;GACN,aAAa;GACb,SAAS;GACV;EACF;CACD,KAAK,OAAO,QAAQ;EAClB,MAAM,QAAQ,IAAI,YAAY,KAAK,IAAI;AAEvC,MAAI,CAAC,SAAS,MAAM,MAAM,CAAC,WAAW,EACpC,OAAM,IAAI,MAAM,gCAAgC;EAIlD,MAAM,WAAW,IAAI,OAAO;EAC5B,MAAM,UAAU,IAAI,OAAO;AAE3B,MAAI,CAAC,SACH,SAAQ,IAAI,mBAAmB,MAAM,KAAK;AAG5C,MAAI;GAEF,MAAM,eAAe,mBAAmB,MAAM;GAC9C,MAAM,WAAW,MAAM,MAAM,WAAW,QAAQ,oBAAoB,eAAe;AAEnF,OAAI,CAAC,SAAS,GACZ,OAAM,IAAI,MAAM,kCAAkC,SAAS,SAAS;GAGtE,MAAM,UAAW,MAAM,SAAS,MAAM;GAGtC,MAAM,QAAQ,IAAI,OAAO;GACzB,MAAM,iBAAiB,QAAQ,MAAM,GAAG,MAAM;AAE9C,OAAI,eAAe,WAAW,GAAG;AAC/B,QAAI,SACF,SAAQ,IAAI,KAAK;QAEjB,SAAQ,IAAI,oBAAoB;AAElC;;GAIF,MAAM,gBAAgB,kBAAkB,gBAAgB,QAAQ;AAGhE,OAAI,SACF,SAAQ,IAAI,aAAa,cAAc,CAAC;QACnC;AAEL,YAAQ,IACN,SAAS,QAAQ,OAAO,SAAS,QAAQ,WAAW,IAAI,KAAK,MAAM,QAAQ,SAAS,QAAQ,aAAa,MAAM,KAAK,GAAG,IACxH;AACD,YAAQ,IAAI,iBAAiB,cAAc,CAAC;;WAEvC,OAAO;AACd,OAAI,iBAAiB,MACnB,OAAM,IAAI,MAAM,kBAAkB,MAAM,UAAU;AAEpD,SAAM,IAAI,MAAM,2CAA2C;;;CAGhE,CAAC;;;;;;;AClGF,SAAS,aAAa,SAAsD;AAC1E,SAAQ,IAAI,KAAK,IAAI,OAAO,GAAG,GAAG;AAClC,SAAQ,IAAI,GAAG,QAAQ,QAAQ;AAC/B,SAAQ,IAAI,GAAG,IAAI,OAAO,GAAG,CAAC,IAAI;AAElC,KAAI,QAAQ,aAAa;AACvB,UAAQ,IAAI,QAAQ,YAAY;AAChC,UAAQ,KAAK;;AAIf,KAAI,QAAQ,SAAS;AACnB,UAAQ,IAAI,gBAAgB;AAC5B,UAAQ,IAAI,gBAAgB;AAC5B,UAAQ,IAAI,QAAQ,QAAQ;AAC5B,UAAQ,IAAI,QAAQ;;AAItB,KAAI,QAAQ,MAAM;AAChB,UAAQ,IAAI,uBAAuB;AACnC,UAAQ,IAAI,gBAAgB;AAC5B,UAAQ,IAAI,QAAQ,KAAK;AACzB,UAAQ,IAAI,QAAQ;;AAItB,MAAK,IAAI,IAAI,GAAG,IAAI,QAAQ,SAAS,QAAQ,KAAK;EAChD,MAAM,UAAU,QAAQ,SAAS;AAEjC,UAAQ,IAAI,eAAe,IAAI,EAAE,IAAI;AACrC,UAAQ,IAAI,gBAAgB;AAC5B,UAAQ,IAAI,QAAQ,KAAK;AACzB,UAAQ,IAAI,MAAM;AAElB,MAAI,QAAQ,aAAa;AACvB,WAAQ,KAAK;AACb,WAAQ,IAAI,QAAQ,YAAY;;AAGlC,UAAQ,KAAK;;;;;;AAOjB,SAAS,kBAAwB;AAC/B,SAAQ,IAAI,kDAAkD;AAC9D,SAAQ,IAAI,GAAG;AACf,SAAQ,IAAI,sCAAsC;AAClD,SAAQ,IAAI,GAAG;AACf,SAAQ,IAAI,YAAY;AACxB,SAAQ,IAAI,4EAA4E;AACxF,SAAQ,IAAI,iFAAiF;AAC7F,SAAQ,IAAI,uDAAuD;AACnE,SAAQ,IAAI,uEAAuE;AACnF,SAAQ,IAAI,GAAG;AACf,SAAQ,IAAI,oBAAoB;CAEhC,MAAM,WAAW,aAAa;AAC9B,MAAK,MAAM,WAAW,SACpB,SAAQ,IAAI,KAAK,QAAQ,GAAG,OAAO,GAAG,CAAC,GAAG,QAAQ,QAAQ;;AAI9D,MAAa,gBAAgB,OAAO;CAClC,MAAM;CACN,aAAa;CACb,MAAM,QAAQ;EACZ,MAAM,SAAS,IAAI;AAGnB,MAAI,OAAO,WAAW,GAAG;AACvB,oBAAiB;AACjB;;AAIF,MAAI;GACF,MAAM,WAAW,WAAW,GAAG,OAAO;AAEtC,QAAK,MAAM,WAAW,SACpB,cAAa,QAAQ;AAGvB,WAAQ,IAAI,GAAG,IAAI,OAAO,GAAG,GAAG;AAChC,WAAQ,IAAI,aAAa,SAAS,OAAO,WAAW;AACpD,WAAQ,IAAI,GAAG,IAAI,OAAO,GAAG,CAAC,IAAI;WAC3B,OAAO;AACd,WAAQ,MAAM,yBAAyB,iBAAiB,QAAQ,MAAM,UAAU,MAAM;AACtF,WAAQ,IAAI,qDAAqD;AACjE,WAAQ,KAAK,EAAE;;;CAGpB,CAAC;;;;AC3FF,MAAM,gCAAgB,IAAI,KAAK;AAC/B,cAAc,IAAI,YAAY,gBAAgB;AAC9C,cAAc,IAAI,WAAW,eAAe;AAC5C,cAAc,IAAI,QAAQ,YAAY;AAGtC,SAAS,cAAc;AACrB,SAAQ,IAAI,0CAA0C;AACtD,SAAQ,IAAI,GAAG;AACf,SAAQ,IAAI,2CAA2C;AACvD,SAAQ,IAAI,GAAG;AACf,SAAQ,IAAI,YAAY;AACxB,SAAQ,IAAI,sEAAsE;AAClF,SAAQ,IAAI,wCAAwC;AACpD,SAAQ,IAAI,kEAAkE;AAC9E,SAAQ,IAAI,GAAG;AACf,SAAQ,IAAI,6DAA6D;;AAI3E,MAAa,YAAY,OAAO;CAC9B,MAAM;CACN,aAAa;CACb,KAAK;CACN,CAAC;AAGF,MAAM,kCAAkB,IAAI,KAAK;AACjC,gBAAgB,IAAI,MAAM,UAAU;AACpC,gBAAgB,IAAI,UAAU,cAAc;AAC5C,gBAAgB,IAAI,UAAU,cAAc;AAG5C,MAAa,cAAc,OAAO;CAChC,MAAM;CACN,aAAa;CACb,WAAW;AACT,UAAQ,IAAI,uDAAuD;AACnE,UAAQ,IAAI,GAAG;AACf,UAAQ,IAAI,wCAAwC;AACpD,UAAQ,IAAI,GAAG;AACf,UAAQ,IAAI,YAAY;AACxB,UAAQ,IAAI,0CAA0C;AACtD,UAAQ,IAAI,6CAA6C;AACzD,UAAQ,IAAI,kDAAkD;AAC9D,UAAQ,IAAI,GAAG;AACf,UAAQ,IAAI,0DAA0D;;CAEzE,CAAC;AAEF,IAAI,OAAO,KAAK,KACd,KAAI;CAEF,MAAM,OAAO,QAAQ,KAAK,MAAM,EAAE;AAGlC,KAAI,KAAK,OAAO,SAEd,OAAM,IADa,KAAK,MAAM,EAAE,EACV,cAAc;UAC3B,KAAK,OAAO,SAErB,OAAM,IADa,KAAK,MAAM,EAAE,EACV,cAAc;UAC3B,KAAK,OAAO,QAAQ,KAAK,SAAS,GAAG;EAC9C,MAAM,iBAAiB,KAAK;AAG5B,MAAI,mBAAmB,YAAY,mBAAmB,MAAM;AAC1D,gBAAa;AACb,WAAQ,KAAK,EAAE;;EAGjB,MAAM,aAAa,cAAc,IAAI,eAAe;AAEpD,MAAI,CAAC,YAAY;AACf,WAAQ,MAAM,oBAAoB,iBAAiB;AACnD,WAAQ,IAAI,GAAG;AACf,gBAAa;AACb,WAAQ,KAAK,EAAE;;EAIjB,MAAM,UAAU,KAAK,MAAM,EAAE;EAC7B,MAAM,mBAAmB,QAAQ,SAAS,SAAS,IAAI,QAAQ,SAAS,KAAK;EAG7E,IAAI,qBAAqB;AACzB,MAAI,CAAC,oBAAoB,WAAW,MAAM;GACxC,MAAM,SAAS,UAAU,QAAQ;AAEjC,wBAAqB,CAAC,CADL,YAAY,WAAW,MAAM,OAAO,CACrB;;AAIlC,QAAM,IAAI,SAAS,WAAW;AAG9B,MAAI,mBACF,SAAQ,KAAK,EAAE;YAER,KAAK,OAAO,KAErB,cAAa;KAGb,OAAM,IAAI,MAAM,aAAa,EAC3B,aAAa,iBACd,CAAC;SAEG,OAAO;AACd,SAAQ,MAAM,UAAU,iBAAiB,QAAQ,MAAM,UAAU,MAAM;AACvE,SAAQ,KAAK,EAAE"}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@fragno-dev/cli",
|
|
3
|
-
"version": "0.1.
|
|
3
|
+
"version": "0.1.14",
|
|
4
4
|
"exports": {
|
|
5
5
|
".": {
|
|
6
6
|
"development": "./src/cli.ts",
|
|
@@ -24,8 +24,9 @@
|
|
|
24
24
|
"@clack/prompts": "^0.11.0",
|
|
25
25
|
"c12": "^3.3.1",
|
|
26
26
|
"gunshi": "^0.26.3",
|
|
27
|
-
"@fragno-dev/core": "0.1.
|
|
28
|
-
"@fragno-dev/db": "0.1.
|
|
27
|
+
"@fragno-dev/core": "0.1.6",
|
|
28
|
+
"@fragno-dev/db": "0.1.12",
|
|
29
|
+
"@fragno-dev/corpus": "0.0.2"
|
|
29
30
|
},
|
|
30
31
|
"main": "./dist/cli.js",
|
|
31
32
|
"module": "./dist/cli.js",
|
package/src/cli.ts
CHANGED
|
@@ -4,6 +4,8 @@ import { cli, define, parseArgs, resolveArgs } from "gunshi";
|
|
|
4
4
|
import { generateCommand } from "./commands/db/generate.js";
|
|
5
5
|
import { migrateCommand } from "./commands/db/migrate.js";
|
|
6
6
|
import { infoCommand } from "./commands/db/info.js";
|
|
7
|
+
import { searchCommand } from "./commands/search.js";
|
|
8
|
+
import { corpusCommand } from "./commands/corpus.js";
|
|
7
9
|
|
|
8
10
|
// Create a Map of db sub-commands
|
|
9
11
|
const dbSubCommands = new Map();
|
|
@@ -35,6 +37,8 @@ export const dbCommand = define({
|
|
|
35
37
|
// Create a Map of root sub-commands
|
|
36
38
|
const rootSubCommands = new Map();
|
|
37
39
|
rootSubCommands.set("db", dbCommand);
|
|
40
|
+
rootSubCommands.set("search", searchCommand);
|
|
41
|
+
rootSubCommands.set("corpus", corpusCommand);
|
|
38
42
|
|
|
39
43
|
// Define the main command with type safety
|
|
40
44
|
export const mainCommand = define({
|
|
@@ -46,7 +50,9 @@ export const mainCommand = define({
|
|
|
46
50
|
console.log("Usage: fragno-cli <command> [options]");
|
|
47
51
|
console.log("");
|
|
48
52
|
console.log("Commands:");
|
|
49
|
-
console.log(" db
|
|
53
|
+
console.log(" db Database management commands");
|
|
54
|
+
console.log(" search Search the Fragno documentation");
|
|
55
|
+
console.log(" corpus View code examples and documentation");
|
|
50
56
|
console.log("");
|
|
51
57
|
console.log("Run 'fragno-cli <command> --help' for more information.");
|
|
52
58
|
},
|
|
@@ -57,8 +63,14 @@ if (import.meta.main) {
|
|
|
57
63
|
// Parse arguments to handle nested subcommands
|
|
58
64
|
const args = process.argv.slice(2);
|
|
59
65
|
|
|
60
|
-
// Check if we're calling
|
|
61
|
-
if (args[0] === "
|
|
66
|
+
// Check if we're calling the search or corpus command directly
|
|
67
|
+
if (args[0] === "search") {
|
|
68
|
+
const searchArgs = args.slice(1);
|
|
69
|
+
await cli(searchArgs, searchCommand);
|
|
70
|
+
} else if (args[0] === "corpus") {
|
|
71
|
+
const corpusArgs = args.slice(1);
|
|
72
|
+
await cli(corpusArgs, corpusCommand);
|
|
73
|
+
} else if (args[0] === "db" && args.length > 1) {
|
|
62
74
|
const subCommandName = args[1];
|
|
63
75
|
|
|
64
76
|
// Check if it's a help request
|
|
@@ -110,4 +122,4 @@ if (import.meta.main) {
|
|
|
110
122
|
}
|
|
111
123
|
}
|
|
112
124
|
|
|
113
|
-
export { generateCommand, migrateCommand, infoCommand };
|
|
125
|
+
export { generateCommand, migrateCommand, infoCommand, searchCommand, corpusCommand };
|
|
@@ -0,0 +1,102 @@
|
|
|
1
|
+
import { define } from "gunshi";
|
|
2
|
+
import { getSubjects, getSubject } from "@fragno-dev/corpus";
|
|
3
|
+
|
|
4
|
+
/**
|
|
5
|
+
* Print a subject with its examples
|
|
6
|
+
*/
|
|
7
|
+
function printSubject(subject: ReturnType<typeof getSubject>[number]): void {
|
|
8
|
+
console.log(`\n${"=".repeat(60)}`);
|
|
9
|
+
console.log(`${subject.title}`);
|
|
10
|
+
console.log(`${"=".repeat(60)}\n`);
|
|
11
|
+
|
|
12
|
+
if (subject.description) {
|
|
13
|
+
console.log(subject.description);
|
|
14
|
+
console.log();
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
// Print imports block if present
|
|
18
|
+
if (subject.imports) {
|
|
19
|
+
console.log("### Imports\n");
|
|
20
|
+
console.log("```typescript");
|
|
21
|
+
console.log(subject.imports);
|
|
22
|
+
console.log("```\n");
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
// Print init block if present
|
|
26
|
+
if (subject.init) {
|
|
27
|
+
console.log("### Initialization\n");
|
|
28
|
+
console.log("```typescript");
|
|
29
|
+
console.log(subject.init);
|
|
30
|
+
console.log("```\n");
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
// Print examples
|
|
34
|
+
for (let i = 0; i < subject.examples.length; i++) {
|
|
35
|
+
const example = subject.examples[i];
|
|
36
|
+
|
|
37
|
+
console.log(`### Example ${i + 1}\n`);
|
|
38
|
+
console.log("```typescript");
|
|
39
|
+
console.log(example.code);
|
|
40
|
+
console.log("```");
|
|
41
|
+
|
|
42
|
+
if (example.explanation) {
|
|
43
|
+
console.log();
|
|
44
|
+
console.log(example.explanation);
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
console.log();
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
/**
|
|
52
|
+
* Print information about the corpus command
|
|
53
|
+
*/
|
|
54
|
+
function printCorpusHelp(): void {
|
|
55
|
+
console.log("Fragno Corpus - Code examples and documentation");
|
|
56
|
+
console.log("");
|
|
57
|
+
console.log("Usage: fragno-cli corpus [topic...]");
|
|
58
|
+
console.log("");
|
|
59
|
+
console.log("Examples:");
|
|
60
|
+
console.log(" fragno-cli corpus # List all available topics");
|
|
61
|
+
console.log(" fragno-cli corpus defining-routes # Show route definition examples");
|
|
62
|
+
console.log(" fragno-cli corpus database-adapters kysely-adapter");
|
|
63
|
+
console.log(" # Show multiple topics");
|
|
64
|
+
console.log("");
|
|
65
|
+
console.log("Available topics:");
|
|
66
|
+
|
|
67
|
+
const subjects = getSubjects();
|
|
68
|
+
for (const subject of subjects) {
|
|
69
|
+
console.log(` ${subject.id.padEnd(30)} ${subject.title}`);
|
|
70
|
+
}
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
export const corpusCommand = define({
|
|
74
|
+
name: "corpus",
|
|
75
|
+
description: "View code examples and documentation for Fragno",
|
|
76
|
+
run: (ctx) => {
|
|
77
|
+
const topics = ctx.positionals;
|
|
78
|
+
|
|
79
|
+
// No topics provided - show help
|
|
80
|
+
if (topics.length === 0) {
|
|
81
|
+
printCorpusHelp();
|
|
82
|
+
return;
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
// Load and display requested topics
|
|
86
|
+
try {
|
|
87
|
+
const subjects = getSubject(...topics);
|
|
88
|
+
|
|
89
|
+
for (const subject of subjects) {
|
|
90
|
+
printSubject(subject);
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
console.log(`${"=".repeat(60)}`);
|
|
94
|
+
console.log(`Displayed ${subjects.length} topic(s)`);
|
|
95
|
+
console.log(`${"=".repeat(60)}\n`);
|
|
96
|
+
} catch (error) {
|
|
97
|
+
console.error("Error loading topics:", error instanceof Error ? error.message : error);
|
|
98
|
+
console.log("\nRun 'fragno-cli corpus' to see available topics.");
|
|
99
|
+
process.exit(1);
|
|
100
|
+
}
|
|
101
|
+
},
|
|
102
|
+
});
|
|
@@ -0,0 +1,105 @@
|
|
|
1
|
+
import { define } from "gunshi";
|
|
2
|
+
import {
|
|
3
|
+
mergeResultsByUrl,
|
|
4
|
+
formatAsMarkdown,
|
|
5
|
+
formatAsJson,
|
|
6
|
+
} from "../utils/format-search-results.js";
|
|
7
|
+
|
|
8
|
+
interface SearchResult {
|
|
9
|
+
id: string;
|
|
10
|
+
type: "page" | "heading" | "text";
|
|
11
|
+
content: string;
|
|
12
|
+
breadcrumbs?: string[];
|
|
13
|
+
contentWithHighlights?: Array<{
|
|
14
|
+
type: string;
|
|
15
|
+
content: string;
|
|
16
|
+
styles?: { highlight?: boolean };
|
|
17
|
+
}>;
|
|
18
|
+
url: string;
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
export const searchCommand = define({
|
|
22
|
+
name: "search",
|
|
23
|
+
description: "Search the Fragno documentation",
|
|
24
|
+
args: {
|
|
25
|
+
limit: {
|
|
26
|
+
type: "number",
|
|
27
|
+
description: "Maximum number of results to show",
|
|
28
|
+
default: 10,
|
|
29
|
+
},
|
|
30
|
+
json: {
|
|
31
|
+
type: "boolean",
|
|
32
|
+
description: "Output results in JSON format",
|
|
33
|
+
default: false,
|
|
34
|
+
},
|
|
35
|
+
markdown: {
|
|
36
|
+
type: "boolean",
|
|
37
|
+
description: "Output results in Markdown format (default)",
|
|
38
|
+
default: true,
|
|
39
|
+
},
|
|
40
|
+
"base-url": {
|
|
41
|
+
type: "string",
|
|
42
|
+
description: "Base URL for the documentation site",
|
|
43
|
+
default: "fragno.dev",
|
|
44
|
+
},
|
|
45
|
+
},
|
|
46
|
+
run: async (ctx) => {
|
|
47
|
+
const query = ctx.positionals.join(" ");
|
|
48
|
+
|
|
49
|
+
if (!query || query.trim().length === 0) {
|
|
50
|
+
throw new Error("Please provide a search query");
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
// Determine output mode
|
|
54
|
+
const jsonMode = ctx.values.json as boolean;
|
|
55
|
+
const baseUrl = ctx.values["base-url"] as string;
|
|
56
|
+
|
|
57
|
+
if (!jsonMode) {
|
|
58
|
+
console.log(`Searching for: "${query}"\n`);
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
try {
|
|
62
|
+
// Make request to the docs search API
|
|
63
|
+
const encodedQuery = encodeURIComponent(query);
|
|
64
|
+
const response = await fetch(`https://${baseUrl}/api/search?query=${encodedQuery}`);
|
|
65
|
+
|
|
66
|
+
if (!response.ok) {
|
|
67
|
+
throw new Error(`API request failed with status ${response.status}`);
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
const results = (await response.json()) as SearchResult[];
|
|
71
|
+
|
|
72
|
+
// Apply limit
|
|
73
|
+
const limit = ctx.values.limit as number;
|
|
74
|
+
const limitedResults = results.slice(0, limit);
|
|
75
|
+
|
|
76
|
+
if (limitedResults.length === 0) {
|
|
77
|
+
if (jsonMode) {
|
|
78
|
+
console.log("[]");
|
|
79
|
+
} else {
|
|
80
|
+
console.log("No results found.");
|
|
81
|
+
}
|
|
82
|
+
return;
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
// Merge results by URL
|
|
86
|
+
const mergedResults = mergeResultsByUrl(limitedResults, baseUrl);
|
|
87
|
+
|
|
88
|
+
// Output based on mode
|
|
89
|
+
if (jsonMode) {
|
|
90
|
+
console.log(formatAsJson(mergedResults));
|
|
91
|
+
} else {
|
|
92
|
+
// Markdown mode (default)
|
|
93
|
+
console.log(
|
|
94
|
+
`Found ${results.length} result${results.length === 1 ? "" : "s"}${results.length > limit ? ` (showing ${limit})` : ""}\n`,
|
|
95
|
+
);
|
|
96
|
+
console.log(formatAsMarkdown(mergedResults));
|
|
97
|
+
}
|
|
98
|
+
} catch (error) {
|
|
99
|
+
if (error instanceof Error) {
|
|
100
|
+
throw new Error(`Search failed: ${error.message}`);
|
|
101
|
+
}
|
|
102
|
+
throw new Error("Search failed: An unknown error occurred");
|
|
103
|
+
}
|
|
104
|
+
},
|
|
105
|
+
});
|
|
@@ -0,0 +1,121 @@
|
|
|
1
|
+
interface SearchResult {
|
|
2
|
+
id: string;
|
|
3
|
+
type: "page" | "heading" | "text";
|
|
4
|
+
content: string;
|
|
5
|
+
breadcrumbs?: string[];
|
|
6
|
+
contentWithHighlights?: Array<{
|
|
7
|
+
type: string;
|
|
8
|
+
content: string;
|
|
9
|
+
styles?: { highlight?: boolean };
|
|
10
|
+
}>;
|
|
11
|
+
url: string;
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
interface MergedResult {
|
|
15
|
+
url: string;
|
|
16
|
+
urlWithMd: string;
|
|
17
|
+
fullUrl: string;
|
|
18
|
+
fullUrlWithMd: string;
|
|
19
|
+
title?: string;
|
|
20
|
+
breadcrumbs?: string[];
|
|
21
|
+
type: "page" | "heading" | "text";
|
|
22
|
+
sections: Array<{
|
|
23
|
+
content: string;
|
|
24
|
+
type: "page" | "heading" | "text";
|
|
25
|
+
}>;
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
/**
|
|
29
|
+
* Merge search results by URL, grouping sections and content under each URL (without hash)
|
|
30
|
+
*/
|
|
31
|
+
export function mergeResultsByUrl(results: SearchResult[], baseUrl: string): MergedResult[] {
|
|
32
|
+
const mergedMap = new Map<string, MergedResult>();
|
|
33
|
+
|
|
34
|
+
for (const result of results) {
|
|
35
|
+
// Strip hash to get base URL for merging
|
|
36
|
+
const baseUrlWithoutHash = result.url.split("#")[0];
|
|
37
|
+
const existing = mergedMap.get(baseUrlWithoutHash);
|
|
38
|
+
|
|
39
|
+
if (existing) {
|
|
40
|
+
// Add this result as a section
|
|
41
|
+
existing.sections.push({
|
|
42
|
+
content: result.content,
|
|
43
|
+
type: result.type,
|
|
44
|
+
});
|
|
45
|
+
} else {
|
|
46
|
+
// Create new merged result
|
|
47
|
+
const urlWithMd = `${baseUrlWithoutHash}.md`;
|
|
48
|
+
|
|
49
|
+
const fullUrl = `https://${baseUrl}${baseUrlWithoutHash}`;
|
|
50
|
+
const fullUrlWithMd = `https://${baseUrl}${urlWithMd}`;
|
|
51
|
+
|
|
52
|
+
mergedMap.set(baseUrlWithoutHash, {
|
|
53
|
+
url: baseUrlWithoutHash,
|
|
54
|
+
urlWithMd,
|
|
55
|
+
fullUrl,
|
|
56
|
+
fullUrlWithMd,
|
|
57
|
+
title: result.type === "page" ? result.content : undefined,
|
|
58
|
+
breadcrumbs: result.breadcrumbs,
|
|
59
|
+
type: result.type,
|
|
60
|
+
sections: [
|
|
61
|
+
{
|
|
62
|
+
content: result.content,
|
|
63
|
+
type: result.type,
|
|
64
|
+
},
|
|
65
|
+
],
|
|
66
|
+
});
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
return Array.from(mergedMap.values());
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
/**
|
|
74
|
+
* Format merged results as markdown
|
|
75
|
+
*/
|
|
76
|
+
export function formatAsMarkdown(mergedResults: MergedResult[]): string {
|
|
77
|
+
const lines: string[] = [];
|
|
78
|
+
|
|
79
|
+
for (const result of mergedResults) {
|
|
80
|
+
// Title (use first section content if it's a page, or just use content)
|
|
81
|
+
const title = result.title || result.sections[0]?.content || "Untitled";
|
|
82
|
+
lines.push(`## Page: '${title}'`);
|
|
83
|
+
// Breadcrumbs
|
|
84
|
+
if (result.breadcrumbs && result.breadcrumbs.length > 0) {
|
|
85
|
+
lines.push(" " + result.breadcrumbs.join(" > "));
|
|
86
|
+
lines.push("");
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
// Both URLs
|
|
90
|
+
lines.push("URLs:");
|
|
91
|
+
lines.push(` - ${result.fullUrl}`);
|
|
92
|
+
lines.push(` - ${result.fullUrlWithMd}`);
|
|
93
|
+
lines.push("");
|
|
94
|
+
|
|
95
|
+
// Show all sections found on this page
|
|
96
|
+
if (result.sections.length > 1) {
|
|
97
|
+
lines.push("Relevant sections:");
|
|
98
|
+
for (let i = 0; i < result.sections.length; i++) {
|
|
99
|
+
const section = result.sections[i];
|
|
100
|
+
// Skip the first section if it's just the page title repeated
|
|
101
|
+
if (i === 0 && result.type === "page" && section.content === result.title) {
|
|
102
|
+
continue;
|
|
103
|
+
}
|
|
104
|
+
lines.push(` - ${section.content}`);
|
|
105
|
+
}
|
|
106
|
+
lines.push("");
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
lines.push("---");
|
|
110
|
+
lines.push("");
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
return lines.join("\n");
|
|
114
|
+
}
|
|
115
|
+
|
|
116
|
+
/**
|
|
117
|
+
* Format merged results as JSON
|
|
118
|
+
*/
|
|
119
|
+
export function formatAsJson(mergedResults: MergedResult[]): string {
|
|
120
|
+
return JSON.stringify(mergedResults, null, 2);
|
|
121
|
+
}
|