@public-ui/mcp 3.0.7-rc.3 → 4.0.0-alpha.8
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +339 -140
- package/dist/cli.cjs +37 -42
- package/dist/cli.d.cts +1 -0
- package/dist/cli.d.mts +1 -0
- package/dist/cli.d.ts +1 -0
- package/dist/cli.mjs +36 -42
- package/dist/data.cjs +88 -0
- package/dist/data.d.cts +34 -0
- package/dist/data.d.mts +34 -0
- package/dist/data.d.ts +34 -0
- package/dist/data.mjs +83 -0
- package/dist/mcp.cjs +298 -0
- package/dist/mcp.d.cts +9 -0
- package/dist/mcp.d.mts +9 -0
- package/dist/mcp.d.ts +9 -0
- package/dist/mcp.mjs +291 -0
- package/dist/search.cjs +52 -0
- package/dist/search.d.cts +16 -0
- package/dist/search.d.mts +16 -0
- package/dist/search.d.ts +16 -0
- package/dist/search.mjs +46 -0
- package/package.json +48 -6
- package/shared/.gitkeep +1 -0
- package/shared/sample-index.json +1860 -0
- package/dist/api-handler.cjs +0 -269
- package/dist/api-handler.mjs +0 -265
- package/dist/chunks/sample-index-runtime.cjs +0 -327
- package/dist/chunks/sample-index-runtime.mjs +0 -320
- package/dist/index.cjs +0 -71
- package/dist/index.mjs +0 -66
- package/dist/sample-index.cjs +0 -104
- package/dist/sample-index.mjs +0 -96
- package/dist/samples.json +0 -1410
- package/dist/samples.mjs +0 -1412
|
@@ -1,327 +0,0 @@
|
|
|
1
|
-
'use strict';
|
|
2
|
-
|
|
3
|
-
const promises = require('node:fs/promises');
|
|
4
|
-
const node_fs = require('node:fs');
|
|
5
|
-
const path = require('node:path');
|
|
6
|
-
const node_url = require('node:url');
|
|
7
|
-
|
|
8
|
-
var _documentCurrentScript = typeof document !== 'undefined' ? document.currentScript : null;
|
|
9
|
-
function _interopDefaultCompat (e) { return e && typeof e === 'object' && 'default' in e ? e.default : e; }
|
|
10
|
-
|
|
11
|
-
const path__default = /*#__PURE__*/_interopDefaultCompat(path);
|
|
12
|
-
|
|
13
|
-
const __dirname$1 = path__default.dirname(node_url.fileURLToPath((typeof document === 'undefined' ? require('u' + 'rl').pathToFileURL(__filename).href : (_documentCurrentScript && _documentCurrentScript.tagName.toUpperCase() === 'SCRIPT' && _documentCurrentScript.src || new URL('chunks/sample-index-runtime.cjs', document.baseURI).href))));
|
|
14
|
-
function findRepoRoot() {
|
|
15
|
-
let repoRoot = path__default.resolve(__dirname$1, "../../../..");
|
|
16
|
-
let current = __dirname$1;
|
|
17
|
-
let attempts = 0;
|
|
18
|
-
while (attempts < 10) {
|
|
19
|
-
const packagesDir = path__default.join(current, "packages");
|
|
20
|
-
if (node_fs.existsSync(packagesDir)) {
|
|
21
|
-
repoRoot = current;
|
|
22
|
-
break;
|
|
23
|
-
}
|
|
24
|
-
current = path__default.dirname(current);
|
|
25
|
-
attempts++;
|
|
26
|
-
}
|
|
27
|
-
return repoRoot;
|
|
28
|
-
}
|
|
29
|
-
const REPO_ROOT = findRepoRoot();
|
|
30
|
-
const SAMPLE_ROOT = path__default.join(REPO_ROOT, "packages/samples/react/src");
|
|
31
|
-
const ROUTE_FILENAMES = ["routes.ts", "routes.tsx"];
|
|
32
|
-
const SAMPLE_EXTENSIONS = [".tsx", ".ts", ".jsx", ".js"];
|
|
33
|
-
const MARKDOWN_EXTENSIONS = [".md", ".mdx"];
|
|
34
|
-
const IGNORED_DIRECTORIES = /* @__PURE__ */ new Set([".git", ".github", ".nx", ".turbo", ".vercel", "dist", "build", "node_modules"]);
|
|
35
|
-
const COMPONENTS_DIR = path__default.join(SAMPLE_ROOT, "components");
|
|
36
|
-
const SCENARIOS_DIR = path__default.join(SAMPLE_ROOT, "scenarios");
|
|
37
|
-
const DOCS_DIR = path__default.join(REPO_ROOT, "docs");
|
|
38
|
-
const MARKDOWN_SOURCES = [
|
|
39
|
-
{ directory: DOCS_DIR, groupPrefix: "docs", recursive: true },
|
|
40
|
-
{ directory: REPO_ROOT, groupPrefix: "docs", recursive: false }
|
|
41
|
-
];
|
|
42
|
-
function computeCounts(entries) {
|
|
43
|
-
return entries.reduce(
|
|
44
|
-
(acc, entry) => {
|
|
45
|
-
const kind = entry.kind ?? "sample";
|
|
46
|
-
acc.total += 1;
|
|
47
|
-
acc.byKind.set(kind, (acc.byKind.get(kind) ?? 0) + 1);
|
|
48
|
-
return acc;
|
|
49
|
-
},
|
|
50
|
-
{ total: 0, byKind: /* @__PURE__ */ new Map() }
|
|
51
|
-
);
|
|
52
|
-
}
|
|
53
|
-
function normalizeEntryId(entry) {
|
|
54
|
-
const kind = entry.kind ?? "sample";
|
|
55
|
-
const isDoc = kind === "doc";
|
|
56
|
-
const expectedPrefix = isDoc ? "doc" : "sample";
|
|
57
|
-
if (typeof entry.id === "string" && entry.id.startsWith(`${expectedPrefix}/`)) {
|
|
58
|
-
return entry;
|
|
59
|
-
}
|
|
60
|
-
const segments = [];
|
|
61
|
-
if (entry.group) {
|
|
62
|
-
const groupSegments = entry.group.split("/").filter(Boolean);
|
|
63
|
-
if (isDoc && groupSegments[0] === "docs") {
|
|
64
|
-
groupSegments.shift();
|
|
65
|
-
}
|
|
66
|
-
segments.push(...groupSegments);
|
|
67
|
-
}
|
|
68
|
-
if (entry.name) {
|
|
69
|
-
segments.push(entry.name);
|
|
70
|
-
} else if (entry.id) {
|
|
71
|
-
segments.push(...String(entry.id).split("/").filter(Boolean));
|
|
72
|
-
}
|
|
73
|
-
const normalized = {
|
|
74
|
-
...entry,
|
|
75
|
-
id: [expectedPrefix, ...segments.filter(Boolean)].join("/")
|
|
76
|
-
};
|
|
77
|
-
return normalized;
|
|
78
|
-
}
|
|
79
|
-
class SampleIndex {
|
|
80
|
-
constructor(entries) {
|
|
81
|
-
const normalizedEntries = entries.map((entry) => normalizeEntryId(entry));
|
|
82
|
-
this.entries = normalizedEntries;
|
|
83
|
-
this.map = new Map(normalizedEntries.map((entry) => [entry.id, entry]));
|
|
84
|
-
this.generatedAt = /* @__PURE__ */ new Date();
|
|
85
|
-
const counts = computeCounts(normalizedEntries);
|
|
86
|
-
this.counts = {
|
|
87
|
-
total: counts.total,
|
|
88
|
-
byKind: counts.byKind,
|
|
89
|
-
totalSamples: counts.byKind.get("sample") ?? counts.total,
|
|
90
|
-
totalDocs: counts.byKind.get("doc") ?? 0
|
|
91
|
-
};
|
|
92
|
-
}
|
|
93
|
-
list(query, options = {}) {
|
|
94
|
-
const kinds = options.kinds ? new Set(options.kinds) : void 0;
|
|
95
|
-
const normalizeKind = (entry) => entry.kind ?? "sample";
|
|
96
|
-
let results = kinds ? this.entries.filter((entry) => kinds.has(normalizeKind(entry))) : this.entries;
|
|
97
|
-
if (!query) {
|
|
98
|
-
return results;
|
|
99
|
-
}
|
|
100
|
-
const normalized = query.trim().toLowerCase();
|
|
101
|
-
return results.filter(
|
|
102
|
-
(entry) => entry.id.toLowerCase().includes(normalized) || entry.group.toLowerCase().includes(normalized) || entry.name.toLowerCase().includes(normalized)
|
|
103
|
-
);
|
|
104
|
-
}
|
|
105
|
-
get(id) {
|
|
106
|
-
return this.map.get(id);
|
|
107
|
-
}
|
|
108
|
-
}
|
|
109
|
-
async function buildSampleIndex() {
|
|
110
|
-
console.log("[buildSampleIndex] Starting sample discovery...");
|
|
111
|
-
console.log("[buildSampleIndex] REPO_ROOT:", REPO_ROOT);
|
|
112
|
-
console.log("[buildSampleIndex] SAMPLE_ROOT:", SAMPLE_ROOT);
|
|
113
|
-
console.log("[buildSampleIndex] COMPONENTS_DIR:", COMPONENTS_DIR);
|
|
114
|
-
console.log("[buildSampleIndex] SCENARIOS_DIR:", SCENARIOS_DIR);
|
|
115
|
-
try {
|
|
116
|
-
await promises.access(SAMPLE_ROOT);
|
|
117
|
-
console.log("[buildSampleIndex] \u2705 SAMPLE_ROOT exists");
|
|
118
|
-
} catch (error) {
|
|
119
|
-
console.log("[buildSampleIndex] \u274C SAMPLE_ROOT does not exist:", error.message);
|
|
120
|
-
return new SampleIndex([]);
|
|
121
|
-
}
|
|
122
|
-
try {
|
|
123
|
-
await promises.access(COMPONENTS_DIR);
|
|
124
|
-
console.log("[buildSampleIndex] \u2705 COMPONENTS_DIR exists");
|
|
125
|
-
} catch (error) {
|
|
126
|
-
console.log("[buildSampleIndex] \u274C COMPONENTS_DIR does not exist:", error.message);
|
|
127
|
-
}
|
|
128
|
-
try {
|
|
129
|
-
await promises.access(SCENARIOS_DIR);
|
|
130
|
-
console.log("[buildSampleIndex] \u2705 SCENARIOS_DIR exists");
|
|
131
|
-
} catch (error) {
|
|
132
|
-
console.log("[buildSampleIndex] \u274C SCENARIOS_DIR does not exist:", error.message);
|
|
133
|
-
}
|
|
134
|
-
const routeFiles = await discoverRouteFiles();
|
|
135
|
-
console.log("[buildSampleIndex] Found route files:", routeFiles.length);
|
|
136
|
-
const entries = [];
|
|
137
|
-
for (const routeFile of routeFiles) {
|
|
138
|
-
const routeDir = path__default.dirname(routeFile);
|
|
139
|
-
const routeData = await parseRouteFile(routeFile);
|
|
140
|
-
for (const [group, value] of Object.entries(routeData)) {
|
|
141
|
-
if (!value || typeof value !== "object" || Array.isArray(value)) {
|
|
142
|
-
continue;
|
|
143
|
-
}
|
|
144
|
-
for (const [name, descriptor] of Object.entries(value)) {
|
|
145
|
-
const importPath = descriptor?.__path;
|
|
146
|
-
if (!importPath) {
|
|
147
|
-
continue;
|
|
148
|
-
}
|
|
149
|
-
const absolutePath = await resolveSamplePath(routeDir, importPath);
|
|
150
|
-
if (!absolutePath) {
|
|
151
|
-
continue;
|
|
152
|
-
}
|
|
153
|
-
const code = await promises.readFile(absolutePath, "utf8");
|
|
154
|
-
const sampleIdSegments = ["sample", group, name].filter(Boolean);
|
|
155
|
-
entries.push({
|
|
156
|
-
id: sampleIdSegments.join("/"),
|
|
157
|
-
group,
|
|
158
|
-
name,
|
|
159
|
-
path: path__default.relative(REPO_ROOT, absolutePath),
|
|
160
|
-
absolutePath,
|
|
161
|
-
code,
|
|
162
|
-
kind: "sample"
|
|
163
|
-
});
|
|
164
|
-
}
|
|
165
|
-
}
|
|
166
|
-
}
|
|
167
|
-
const markdownEntries = await collectMarkdownEntries();
|
|
168
|
-
entries.push(...markdownEntries);
|
|
169
|
-
entries.sort((a, b) => a.id.localeCompare(b.id));
|
|
170
|
-
console.log("[buildSampleIndex] Total entries found:", entries.length);
|
|
171
|
-
if (markdownEntries.length > 0) {
|
|
172
|
-
console.log("[buildSampleIndex] Markdown entries added:", markdownEntries.length);
|
|
173
|
-
}
|
|
174
|
-
return new SampleIndex(entries);
|
|
175
|
-
}
|
|
176
|
-
async function discoverRouteFiles() {
|
|
177
|
-
const files = [];
|
|
178
|
-
for (const baseDir of [COMPONENTS_DIR, SCENARIOS_DIR]) {
|
|
179
|
-
const rootRoute = await findRouteFile(baseDir);
|
|
180
|
-
if (rootRoute) {
|
|
181
|
-
files.push(rootRoute);
|
|
182
|
-
}
|
|
183
|
-
const directories = await safeReadDir(baseDir);
|
|
184
|
-
for (const entry of directories) {
|
|
185
|
-
const routeFile = await findRouteFile(path__default.join(baseDir, entry.name));
|
|
186
|
-
if (routeFile) {
|
|
187
|
-
files.push(routeFile);
|
|
188
|
-
}
|
|
189
|
-
}
|
|
190
|
-
}
|
|
191
|
-
return files;
|
|
192
|
-
}
|
|
193
|
-
async function safeReadDir(dir) {
|
|
194
|
-
try {
|
|
195
|
-
const entries = await promises.readdir(dir, { withFileTypes: true });
|
|
196
|
-
return entries.filter((entry) => entry.isDirectory());
|
|
197
|
-
} catch {
|
|
198
|
-
return [];
|
|
199
|
-
}
|
|
200
|
-
}
|
|
201
|
-
async function readDirEntries(dir) {
|
|
202
|
-
try {
|
|
203
|
-
return await promises.readdir(dir, { withFileTypes: true });
|
|
204
|
-
} catch {
|
|
205
|
-
return [];
|
|
206
|
-
}
|
|
207
|
-
}
|
|
208
|
-
async function findRouteFile(dir) {
|
|
209
|
-
for (const file of ROUTE_FILENAMES) {
|
|
210
|
-
const candidate = path__default.join(dir, file);
|
|
211
|
-
if (await pathExists(candidate)) {
|
|
212
|
-
return candidate;
|
|
213
|
-
}
|
|
214
|
-
}
|
|
215
|
-
return void 0;
|
|
216
|
-
}
|
|
217
|
-
async function pathExists(filePath) {
|
|
218
|
-
try {
|
|
219
|
-
await promises.access(filePath);
|
|
220
|
-
return true;
|
|
221
|
-
} catch {
|
|
222
|
-
return false;
|
|
223
|
-
}
|
|
224
|
-
}
|
|
225
|
-
async function parseRouteFile(filePath) {
|
|
226
|
-
const raw = await promises.readFile(filePath, "utf8");
|
|
227
|
-
const importDeclarations = [];
|
|
228
|
-
let content = raw.replace(/import\s+type[^;]+;\s*/g, "").replace(/import\s+\{\s*Routes\s*\}\s+from\s+['"][^'"]+['"];\s*/g, "");
|
|
229
|
-
content = content.replace(/import\s+\{\s*([^}]+)\s*\}\s+from\s+['"]([^'"]+)['"];\s*/g, (_, identifiers, source) => {
|
|
230
|
-
const names = identifiers.split(",").map((part) => part.trim()).filter(Boolean);
|
|
231
|
-
const declarations = names.map((name) => `const ${name} = { __path: '${source}' };`).join("\n");
|
|
232
|
-
importDeclarations.push(declarations);
|
|
233
|
-
return "";
|
|
234
|
-
});
|
|
235
|
-
content = content.replace(/export\s+const\s+\w+\s*:\s*\w+\s*=\s*/, "");
|
|
236
|
-
content = content.replace(/export\s+const\s+\w+\s*=\s*/, "");
|
|
237
|
-
content = content.trim();
|
|
238
|
-
if (content.endsWith(";")) {
|
|
239
|
-
content = content.slice(0, -1);
|
|
240
|
-
}
|
|
241
|
-
const fnBody = `${importDeclarations.join("\n")}
|
|
242
|
-
return ${content};`;
|
|
243
|
-
const routes = new Function(fnBody)();
|
|
244
|
-
return routes && typeof routes === "object" ? routes : {};
|
|
245
|
-
}
|
|
246
|
-
async function collectMarkdownEntries() {
|
|
247
|
-
const entries = [];
|
|
248
|
-
for (const source of MARKDOWN_SOURCES) {
|
|
249
|
-
if (!await pathExists(source.directory)) {
|
|
250
|
-
continue;
|
|
251
|
-
}
|
|
252
|
-
const sourceEntries = await collectMarkdownFromDirectory(source.directory, {
|
|
253
|
-
groupPrefix: source.groupPrefix,
|
|
254
|
-
recursive: source.recursive,
|
|
255
|
-
relativeRoot: source.directory
|
|
256
|
-
});
|
|
257
|
-
entries.push(...sourceEntries);
|
|
258
|
-
}
|
|
259
|
-
return entries;
|
|
260
|
-
}
|
|
261
|
-
async function collectMarkdownFromDirectory(directory, { groupPrefix, recursive, relativeRoot }) {
|
|
262
|
-
const entries = [];
|
|
263
|
-
const dirents = await readDirEntries(directory);
|
|
264
|
-
for (const dirent of dirents) {
|
|
265
|
-
const absolutePath = path__default.join(directory, dirent.name);
|
|
266
|
-
if (dirent.isDirectory()) {
|
|
267
|
-
if (!recursive || IGNORED_DIRECTORIES.has(dirent.name)) {
|
|
268
|
-
continue;
|
|
269
|
-
}
|
|
270
|
-
const nestedEntries = await collectMarkdownFromDirectory(absolutePath, {
|
|
271
|
-
groupPrefix,
|
|
272
|
-
recursive: true,
|
|
273
|
-
relativeRoot
|
|
274
|
-
});
|
|
275
|
-
entries.push(...nestedEntries);
|
|
276
|
-
continue;
|
|
277
|
-
}
|
|
278
|
-
if (!dirent.isFile()) {
|
|
279
|
-
continue;
|
|
280
|
-
}
|
|
281
|
-
const extension = path__default.extname(dirent.name).toLowerCase();
|
|
282
|
-
if (!MARKDOWN_EXTENSIONS.includes(extension)) {
|
|
283
|
-
continue;
|
|
284
|
-
}
|
|
285
|
-
const code = await promises.readFile(absolutePath, "utf8");
|
|
286
|
-
const repoRelativePath = path__default.relative(REPO_ROOT, absolutePath);
|
|
287
|
-
const normalizedRepoPath = repoRelativePath.split(path__default.sep).join("/");
|
|
288
|
-
const relativePath = path__default.relative(relativeRoot, absolutePath).split(path__default.sep).join("/");
|
|
289
|
-
const withoutExtension = relativePath.replace(/\.[^.]+$/, "");
|
|
290
|
-
const segments = withoutExtension.split("/").filter(Boolean);
|
|
291
|
-
const name = segments.pop() ?? withoutExtension;
|
|
292
|
-
const group = segments.length ? `${groupPrefix}/${segments.join("/")}` : groupPrefix;
|
|
293
|
-
const docIdSegments = ["doc"];
|
|
294
|
-
if (group.startsWith(`${groupPrefix}/`)) {
|
|
295
|
-
const relativeGroup = group.slice(groupPrefix.length + 1);
|
|
296
|
-
if (relativeGroup) {
|
|
297
|
-
docIdSegments.push(...relativeGroup.split("/"));
|
|
298
|
-
}
|
|
299
|
-
} else if (group !== groupPrefix) {
|
|
300
|
-
docIdSegments.push(...group.split("/"));
|
|
301
|
-
}
|
|
302
|
-
docIdSegments.push(name);
|
|
303
|
-
entries.push({
|
|
304
|
-
id: docIdSegments.join("/"),
|
|
305
|
-
group,
|
|
306
|
-
name,
|
|
307
|
-
path: normalizedRepoPath,
|
|
308
|
-
absolutePath,
|
|
309
|
-
code,
|
|
310
|
-
kind: "doc"
|
|
311
|
-
});
|
|
312
|
-
}
|
|
313
|
-
return entries;
|
|
314
|
-
}
|
|
315
|
-
async function resolveSamplePath(baseDir, relativeImport) {
|
|
316
|
-
const normalized = relativeImport.replace(/['"];?$/g, "");
|
|
317
|
-
const base = normalized.startsWith(".") ? normalized : `./${normalized}`;
|
|
318
|
-
for (const extension of SAMPLE_EXTENSIONS) {
|
|
319
|
-
const candidate = path__default.resolve(baseDir, `${base}${extension}`);
|
|
320
|
-
if (await pathExists(candidate)) {
|
|
321
|
-
return candidate;
|
|
322
|
-
}
|
|
323
|
-
}
|
|
324
|
-
return void 0;
|
|
325
|
-
}
|
|
326
|
-
|
|
327
|
-
exports.buildSampleIndex = buildSampleIndex;
|
|
@@ -1,320 +0,0 @@
|
|
|
1
|
-
import { access, readFile, readdir } from 'node:fs/promises';
|
|
2
|
-
import { existsSync } from 'node:fs';
|
|
3
|
-
import path from 'node:path';
|
|
4
|
-
import { fileURLToPath } from 'node:url';
|
|
5
|
-
|
|
6
|
-
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
|
7
|
-
function findRepoRoot() {
|
|
8
|
-
let repoRoot = path.resolve(__dirname, "../../../..");
|
|
9
|
-
let current = __dirname;
|
|
10
|
-
let attempts = 0;
|
|
11
|
-
while (attempts < 10) {
|
|
12
|
-
const packagesDir = path.join(current, "packages");
|
|
13
|
-
if (existsSync(packagesDir)) {
|
|
14
|
-
repoRoot = current;
|
|
15
|
-
break;
|
|
16
|
-
}
|
|
17
|
-
current = path.dirname(current);
|
|
18
|
-
attempts++;
|
|
19
|
-
}
|
|
20
|
-
return repoRoot;
|
|
21
|
-
}
|
|
22
|
-
const REPO_ROOT = findRepoRoot();
|
|
23
|
-
const SAMPLE_ROOT = path.join(REPO_ROOT, "packages/samples/react/src");
|
|
24
|
-
const ROUTE_FILENAMES = ["routes.ts", "routes.tsx"];
|
|
25
|
-
const SAMPLE_EXTENSIONS = [".tsx", ".ts", ".jsx", ".js"];
|
|
26
|
-
const MARKDOWN_EXTENSIONS = [".md", ".mdx"];
|
|
27
|
-
const IGNORED_DIRECTORIES = /* @__PURE__ */ new Set([".git", ".github", ".nx", ".turbo", ".vercel", "dist", "build", "node_modules"]);
|
|
28
|
-
const COMPONENTS_DIR = path.join(SAMPLE_ROOT, "components");
|
|
29
|
-
const SCENARIOS_DIR = path.join(SAMPLE_ROOT, "scenarios");
|
|
30
|
-
const DOCS_DIR = path.join(REPO_ROOT, "docs");
|
|
31
|
-
const MARKDOWN_SOURCES = [
|
|
32
|
-
{ directory: DOCS_DIR, groupPrefix: "docs", recursive: true },
|
|
33
|
-
{ directory: REPO_ROOT, groupPrefix: "docs", recursive: false }
|
|
34
|
-
];
|
|
35
|
-
function computeCounts(entries) {
|
|
36
|
-
return entries.reduce(
|
|
37
|
-
(acc, entry) => {
|
|
38
|
-
const kind = entry.kind ?? "sample";
|
|
39
|
-
acc.total += 1;
|
|
40
|
-
acc.byKind.set(kind, (acc.byKind.get(kind) ?? 0) + 1);
|
|
41
|
-
return acc;
|
|
42
|
-
},
|
|
43
|
-
{ total: 0, byKind: /* @__PURE__ */ new Map() }
|
|
44
|
-
);
|
|
45
|
-
}
|
|
46
|
-
function normalizeEntryId(entry) {
|
|
47
|
-
const kind = entry.kind ?? "sample";
|
|
48
|
-
const isDoc = kind === "doc";
|
|
49
|
-
const expectedPrefix = isDoc ? "doc" : "sample";
|
|
50
|
-
if (typeof entry.id === "string" && entry.id.startsWith(`${expectedPrefix}/`)) {
|
|
51
|
-
return entry;
|
|
52
|
-
}
|
|
53
|
-
const segments = [];
|
|
54
|
-
if (entry.group) {
|
|
55
|
-
const groupSegments = entry.group.split("/").filter(Boolean);
|
|
56
|
-
if (isDoc && groupSegments[0] === "docs") {
|
|
57
|
-
groupSegments.shift();
|
|
58
|
-
}
|
|
59
|
-
segments.push(...groupSegments);
|
|
60
|
-
}
|
|
61
|
-
if (entry.name) {
|
|
62
|
-
segments.push(entry.name);
|
|
63
|
-
} else if (entry.id) {
|
|
64
|
-
segments.push(...String(entry.id).split("/").filter(Boolean));
|
|
65
|
-
}
|
|
66
|
-
const normalized = {
|
|
67
|
-
...entry,
|
|
68
|
-
id: [expectedPrefix, ...segments.filter(Boolean)].join("/")
|
|
69
|
-
};
|
|
70
|
-
return normalized;
|
|
71
|
-
}
|
|
72
|
-
class SampleIndex {
|
|
73
|
-
constructor(entries) {
|
|
74
|
-
const normalizedEntries = entries.map((entry) => normalizeEntryId(entry));
|
|
75
|
-
this.entries = normalizedEntries;
|
|
76
|
-
this.map = new Map(normalizedEntries.map((entry) => [entry.id, entry]));
|
|
77
|
-
this.generatedAt = /* @__PURE__ */ new Date();
|
|
78
|
-
const counts = computeCounts(normalizedEntries);
|
|
79
|
-
this.counts = {
|
|
80
|
-
total: counts.total,
|
|
81
|
-
byKind: counts.byKind,
|
|
82
|
-
totalSamples: counts.byKind.get("sample") ?? counts.total,
|
|
83
|
-
totalDocs: counts.byKind.get("doc") ?? 0
|
|
84
|
-
};
|
|
85
|
-
}
|
|
86
|
-
list(query, options = {}) {
|
|
87
|
-
const kinds = options.kinds ? new Set(options.kinds) : void 0;
|
|
88
|
-
const normalizeKind = (entry) => entry.kind ?? "sample";
|
|
89
|
-
let results = kinds ? this.entries.filter((entry) => kinds.has(normalizeKind(entry))) : this.entries;
|
|
90
|
-
if (!query) {
|
|
91
|
-
return results;
|
|
92
|
-
}
|
|
93
|
-
const normalized = query.trim().toLowerCase();
|
|
94
|
-
return results.filter(
|
|
95
|
-
(entry) => entry.id.toLowerCase().includes(normalized) || entry.group.toLowerCase().includes(normalized) || entry.name.toLowerCase().includes(normalized)
|
|
96
|
-
);
|
|
97
|
-
}
|
|
98
|
-
get(id) {
|
|
99
|
-
return this.map.get(id);
|
|
100
|
-
}
|
|
101
|
-
}
|
|
102
|
-
async function buildSampleIndex() {
|
|
103
|
-
console.log("[buildSampleIndex] Starting sample discovery...");
|
|
104
|
-
console.log("[buildSampleIndex] REPO_ROOT:", REPO_ROOT);
|
|
105
|
-
console.log("[buildSampleIndex] SAMPLE_ROOT:", SAMPLE_ROOT);
|
|
106
|
-
console.log("[buildSampleIndex] COMPONENTS_DIR:", COMPONENTS_DIR);
|
|
107
|
-
console.log("[buildSampleIndex] SCENARIOS_DIR:", SCENARIOS_DIR);
|
|
108
|
-
try {
|
|
109
|
-
await access(SAMPLE_ROOT);
|
|
110
|
-
console.log("[buildSampleIndex] \u2705 SAMPLE_ROOT exists");
|
|
111
|
-
} catch (error) {
|
|
112
|
-
console.log("[buildSampleIndex] \u274C SAMPLE_ROOT does not exist:", error.message);
|
|
113
|
-
return new SampleIndex([]);
|
|
114
|
-
}
|
|
115
|
-
try {
|
|
116
|
-
await access(COMPONENTS_DIR);
|
|
117
|
-
console.log("[buildSampleIndex] \u2705 COMPONENTS_DIR exists");
|
|
118
|
-
} catch (error) {
|
|
119
|
-
console.log("[buildSampleIndex] \u274C COMPONENTS_DIR does not exist:", error.message);
|
|
120
|
-
}
|
|
121
|
-
try {
|
|
122
|
-
await access(SCENARIOS_DIR);
|
|
123
|
-
console.log("[buildSampleIndex] \u2705 SCENARIOS_DIR exists");
|
|
124
|
-
} catch (error) {
|
|
125
|
-
console.log("[buildSampleIndex] \u274C SCENARIOS_DIR does not exist:", error.message);
|
|
126
|
-
}
|
|
127
|
-
const routeFiles = await discoverRouteFiles();
|
|
128
|
-
console.log("[buildSampleIndex] Found route files:", routeFiles.length);
|
|
129
|
-
const entries = [];
|
|
130
|
-
for (const routeFile of routeFiles) {
|
|
131
|
-
const routeDir = path.dirname(routeFile);
|
|
132
|
-
const routeData = await parseRouteFile(routeFile);
|
|
133
|
-
for (const [group, value] of Object.entries(routeData)) {
|
|
134
|
-
if (!value || typeof value !== "object" || Array.isArray(value)) {
|
|
135
|
-
continue;
|
|
136
|
-
}
|
|
137
|
-
for (const [name, descriptor] of Object.entries(value)) {
|
|
138
|
-
const importPath = descriptor?.__path;
|
|
139
|
-
if (!importPath) {
|
|
140
|
-
continue;
|
|
141
|
-
}
|
|
142
|
-
const absolutePath = await resolveSamplePath(routeDir, importPath);
|
|
143
|
-
if (!absolutePath) {
|
|
144
|
-
continue;
|
|
145
|
-
}
|
|
146
|
-
const code = await readFile(absolutePath, "utf8");
|
|
147
|
-
const sampleIdSegments = ["sample", group, name].filter(Boolean);
|
|
148
|
-
entries.push({
|
|
149
|
-
id: sampleIdSegments.join("/"),
|
|
150
|
-
group,
|
|
151
|
-
name,
|
|
152
|
-
path: path.relative(REPO_ROOT, absolutePath),
|
|
153
|
-
absolutePath,
|
|
154
|
-
code,
|
|
155
|
-
kind: "sample"
|
|
156
|
-
});
|
|
157
|
-
}
|
|
158
|
-
}
|
|
159
|
-
}
|
|
160
|
-
const markdownEntries = await collectMarkdownEntries();
|
|
161
|
-
entries.push(...markdownEntries);
|
|
162
|
-
entries.sort((a, b) => a.id.localeCompare(b.id));
|
|
163
|
-
console.log("[buildSampleIndex] Total entries found:", entries.length);
|
|
164
|
-
if (markdownEntries.length > 0) {
|
|
165
|
-
console.log("[buildSampleIndex] Markdown entries added:", markdownEntries.length);
|
|
166
|
-
}
|
|
167
|
-
return new SampleIndex(entries);
|
|
168
|
-
}
|
|
169
|
-
async function discoverRouteFiles() {
|
|
170
|
-
const files = [];
|
|
171
|
-
for (const baseDir of [COMPONENTS_DIR, SCENARIOS_DIR]) {
|
|
172
|
-
const rootRoute = await findRouteFile(baseDir);
|
|
173
|
-
if (rootRoute) {
|
|
174
|
-
files.push(rootRoute);
|
|
175
|
-
}
|
|
176
|
-
const directories = await safeReadDir(baseDir);
|
|
177
|
-
for (const entry of directories) {
|
|
178
|
-
const routeFile = await findRouteFile(path.join(baseDir, entry.name));
|
|
179
|
-
if (routeFile) {
|
|
180
|
-
files.push(routeFile);
|
|
181
|
-
}
|
|
182
|
-
}
|
|
183
|
-
}
|
|
184
|
-
return files;
|
|
185
|
-
}
|
|
186
|
-
async function safeReadDir(dir) {
|
|
187
|
-
try {
|
|
188
|
-
const entries = await readdir(dir, { withFileTypes: true });
|
|
189
|
-
return entries.filter((entry) => entry.isDirectory());
|
|
190
|
-
} catch {
|
|
191
|
-
return [];
|
|
192
|
-
}
|
|
193
|
-
}
|
|
194
|
-
async function readDirEntries(dir) {
|
|
195
|
-
try {
|
|
196
|
-
return await readdir(dir, { withFileTypes: true });
|
|
197
|
-
} catch {
|
|
198
|
-
return [];
|
|
199
|
-
}
|
|
200
|
-
}
|
|
201
|
-
async function findRouteFile(dir) {
|
|
202
|
-
for (const file of ROUTE_FILENAMES) {
|
|
203
|
-
const candidate = path.join(dir, file);
|
|
204
|
-
if (await pathExists(candidate)) {
|
|
205
|
-
return candidate;
|
|
206
|
-
}
|
|
207
|
-
}
|
|
208
|
-
return void 0;
|
|
209
|
-
}
|
|
210
|
-
async function pathExists(filePath) {
|
|
211
|
-
try {
|
|
212
|
-
await access(filePath);
|
|
213
|
-
return true;
|
|
214
|
-
} catch {
|
|
215
|
-
return false;
|
|
216
|
-
}
|
|
217
|
-
}
|
|
218
|
-
async function parseRouteFile(filePath) {
|
|
219
|
-
const raw = await readFile(filePath, "utf8");
|
|
220
|
-
const importDeclarations = [];
|
|
221
|
-
let content = raw.replace(/import\s+type[^;]+;\s*/g, "").replace(/import\s+\{\s*Routes\s*\}\s+from\s+['"][^'"]+['"];\s*/g, "");
|
|
222
|
-
content = content.replace(/import\s+\{\s*([^}]+)\s*\}\s+from\s+['"]([^'"]+)['"];\s*/g, (_, identifiers, source) => {
|
|
223
|
-
const names = identifiers.split(",").map((part) => part.trim()).filter(Boolean);
|
|
224
|
-
const declarations = names.map((name) => `const ${name} = { __path: '${source}' };`).join("\n");
|
|
225
|
-
importDeclarations.push(declarations);
|
|
226
|
-
return "";
|
|
227
|
-
});
|
|
228
|
-
content = content.replace(/export\s+const\s+\w+\s*:\s*\w+\s*=\s*/, "");
|
|
229
|
-
content = content.replace(/export\s+const\s+\w+\s*=\s*/, "");
|
|
230
|
-
content = content.trim();
|
|
231
|
-
if (content.endsWith(";")) {
|
|
232
|
-
content = content.slice(0, -1);
|
|
233
|
-
}
|
|
234
|
-
const fnBody = `${importDeclarations.join("\n")}
|
|
235
|
-
return ${content};`;
|
|
236
|
-
const routes = new Function(fnBody)();
|
|
237
|
-
return routes && typeof routes === "object" ? routes : {};
|
|
238
|
-
}
|
|
239
|
-
async function collectMarkdownEntries() {
|
|
240
|
-
const entries = [];
|
|
241
|
-
for (const source of MARKDOWN_SOURCES) {
|
|
242
|
-
if (!await pathExists(source.directory)) {
|
|
243
|
-
continue;
|
|
244
|
-
}
|
|
245
|
-
const sourceEntries = await collectMarkdownFromDirectory(source.directory, {
|
|
246
|
-
groupPrefix: source.groupPrefix,
|
|
247
|
-
recursive: source.recursive,
|
|
248
|
-
relativeRoot: source.directory
|
|
249
|
-
});
|
|
250
|
-
entries.push(...sourceEntries);
|
|
251
|
-
}
|
|
252
|
-
return entries;
|
|
253
|
-
}
|
|
254
|
-
async function collectMarkdownFromDirectory(directory, { groupPrefix, recursive, relativeRoot }) {
|
|
255
|
-
const entries = [];
|
|
256
|
-
const dirents = await readDirEntries(directory);
|
|
257
|
-
for (const dirent of dirents) {
|
|
258
|
-
const absolutePath = path.join(directory, dirent.name);
|
|
259
|
-
if (dirent.isDirectory()) {
|
|
260
|
-
if (!recursive || IGNORED_DIRECTORIES.has(dirent.name)) {
|
|
261
|
-
continue;
|
|
262
|
-
}
|
|
263
|
-
const nestedEntries = await collectMarkdownFromDirectory(absolutePath, {
|
|
264
|
-
groupPrefix,
|
|
265
|
-
recursive: true,
|
|
266
|
-
relativeRoot
|
|
267
|
-
});
|
|
268
|
-
entries.push(...nestedEntries);
|
|
269
|
-
continue;
|
|
270
|
-
}
|
|
271
|
-
if (!dirent.isFile()) {
|
|
272
|
-
continue;
|
|
273
|
-
}
|
|
274
|
-
const extension = path.extname(dirent.name).toLowerCase();
|
|
275
|
-
if (!MARKDOWN_EXTENSIONS.includes(extension)) {
|
|
276
|
-
continue;
|
|
277
|
-
}
|
|
278
|
-
const code = await readFile(absolutePath, "utf8");
|
|
279
|
-
const repoRelativePath = path.relative(REPO_ROOT, absolutePath);
|
|
280
|
-
const normalizedRepoPath = repoRelativePath.split(path.sep).join("/");
|
|
281
|
-
const relativePath = path.relative(relativeRoot, absolutePath).split(path.sep).join("/");
|
|
282
|
-
const withoutExtension = relativePath.replace(/\.[^.]+$/, "");
|
|
283
|
-
const segments = withoutExtension.split("/").filter(Boolean);
|
|
284
|
-
const name = segments.pop() ?? withoutExtension;
|
|
285
|
-
const group = segments.length ? `${groupPrefix}/${segments.join("/")}` : groupPrefix;
|
|
286
|
-
const docIdSegments = ["doc"];
|
|
287
|
-
if (group.startsWith(`${groupPrefix}/`)) {
|
|
288
|
-
const relativeGroup = group.slice(groupPrefix.length + 1);
|
|
289
|
-
if (relativeGroup) {
|
|
290
|
-
docIdSegments.push(...relativeGroup.split("/"));
|
|
291
|
-
}
|
|
292
|
-
} else if (group !== groupPrefix) {
|
|
293
|
-
docIdSegments.push(...group.split("/"));
|
|
294
|
-
}
|
|
295
|
-
docIdSegments.push(name);
|
|
296
|
-
entries.push({
|
|
297
|
-
id: docIdSegments.join("/"),
|
|
298
|
-
group,
|
|
299
|
-
name,
|
|
300
|
-
path: normalizedRepoPath,
|
|
301
|
-
absolutePath,
|
|
302
|
-
code,
|
|
303
|
-
kind: "doc"
|
|
304
|
-
});
|
|
305
|
-
}
|
|
306
|
-
return entries;
|
|
307
|
-
}
|
|
308
|
-
async function resolveSamplePath(baseDir, relativeImport) {
|
|
309
|
-
const normalized = relativeImport.replace(/['"];?$/g, "");
|
|
310
|
-
const base = normalized.startsWith(".") ? normalized : `./${normalized}`;
|
|
311
|
-
for (const extension of SAMPLE_EXTENSIONS) {
|
|
312
|
-
const candidate = path.resolve(baseDir, `${base}${extension}`);
|
|
313
|
-
if (await pathExists(candidate)) {
|
|
314
|
-
return candidate;
|
|
315
|
-
}
|
|
316
|
-
}
|
|
317
|
-
return void 0;
|
|
318
|
-
}
|
|
319
|
-
|
|
320
|
-
export { buildSampleIndex };
|