@aigne/afs-fs 1.11.0-beta.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE.md +26 -0
- package/README.md +337 -0
- package/dist/_virtual/rolldown_runtime.cjs +29 -0
- package/dist/index.cjs +360 -0
- package/dist/index.d.cts +98 -0
- package/dist/index.d.cts.map +1 -0
- package/dist/index.d.mts +98 -0
- package/dist/index.d.mts.map +1 -0
- package/dist/index.mjs +359 -0
- package/dist/index.mjs.map +1 -0
- package/dist/utils/ripgrep.cjs +85 -0
- package/dist/utils/ripgrep.mjs +85 -0
- package/dist/utils/ripgrep.mjs.map +1 -0
- package/package.json +58 -0
package/dist/index.mjs
ADDED
|
@@ -0,0 +1,359 @@
|
|
|
1
|
+
import { searchWithRipgrep } from "./utils/ripgrep.mjs";
|
|
2
|
+
import { mkdir, readFile, readdir, rename, rm, stat, symlink, writeFile } from "node:fs/promises";
|
|
3
|
+
import { basename, dirname, isAbsolute, join, relative } from "node:path";
|
|
4
|
+
import { camelize, optionalize, zodParse } from "@aigne/afs/utils/zod";
|
|
5
|
+
import ignore from "ignore";
|
|
6
|
+
import { minimatch } from "minimatch";
|
|
7
|
+
import { z } from "zod";
|
|
8
|
+
|
|
9
|
+
//#region src/index.ts
|
|
10
|
+
const LIST_MAX_LIMIT = 1e3;
|
|
11
|
+
const afsFSOptionsSchema = camelize(z.object({
|
|
12
|
+
name: optionalize(z.string()),
|
|
13
|
+
localPath: z.string().describe("The path to the local directory to mount"),
|
|
14
|
+
description: optionalize(z.string().describe("A description of the mounted directory")),
|
|
15
|
+
ignore: optionalize(z.array(z.string())),
|
|
16
|
+
accessMode: optionalize(z.enum(["readonly", "readwrite"]).describe("Access mode for this module")),
|
|
17
|
+
agentSkills: optionalize(z.boolean().describe("Enable automatic agent skill scanning for this module"))
|
|
18
|
+
}));
|
|
19
|
+
var AFSFS = class AFSFS {
|
|
20
|
+
static schema() {
|
|
21
|
+
return afsFSOptionsSchema;
|
|
22
|
+
}
|
|
23
|
+
static async load({ filepath, parsed }) {
|
|
24
|
+
return new AFSFS({
|
|
25
|
+
...await AFSFS.schema().parseAsync(parsed),
|
|
26
|
+
cwd: dirname(filepath)
|
|
27
|
+
});
|
|
28
|
+
}
|
|
29
|
+
constructor(options) {
|
|
30
|
+
this.options = options;
|
|
31
|
+
zodParse(afsFSOptionsSchema, options);
|
|
32
|
+
let localPath;
|
|
33
|
+
if (options.localPath === ".") localPath = process.cwd();
|
|
34
|
+
else {
|
|
35
|
+
localPath = options.localPath.replaceAll("${CWD}", process.cwd());
|
|
36
|
+
if (localPath.startsWith("~/")) localPath = join(process.env.HOME || "", localPath.slice(2));
|
|
37
|
+
if (!isAbsolute(localPath)) localPath = join(options.cwd || process.cwd(), localPath);
|
|
38
|
+
}
|
|
39
|
+
this.name = options.name || basename(localPath) || "fs";
|
|
40
|
+
this.description = options.description;
|
|
41
|
+
this.agentSkills = options.agentSkills;
|
|
42
|
+
this.accessMode = options.accessMode ?? (options.agentSkills ? "readonly" : "readwrite");
|
|
43
|
+
this.options.localPath = localPath;
|
|
44
|
+
}
|
|
45
|
+
name;
|
|
46
|
+
description;
|
|
47
|
+
accessMode;
|
|
48
|
+
agentSkills;
|
|
49
|
+
get localPathExists() {
|
|
50
|
+
return stat(this.options.localPath).then(() => true).catch(() => false);
|
|
51
|
+
}
|
|
52
|
+
/**
|
|
53
|
+
* Detect MIME type based on file extension
|
|
54
|
+
*/
|
|
55
|
+
getMimeType(filePath) {
|
|
56
|
+
return {
|
|
57
|
+
png: "image/png",
|
|
58
|
+
jpg: "image/jpeg",
|
|
59
|
+
jpeg: "image/jpeg",
|
|
60
|
+
gif: "image/gif",
|
|
61
|
+
bmp: "image/bmp",
|
|
62
|
+
webp: "image/webp",
|
|
63
|
+
svg: "image/svg+xml",
|
|
64
|
+
ico: "image/x-icon",
|
|
65
|
+
pdf: "application/pdf",
|
|
66
|
+
txt: "text/plain",
|
|
67
|
+
md: "text/markdown",
|
|
68
|
+
js: "text/javascript",
|
|
69
|
+
ts: "text/typescript",
|
|
70
|
+
json: "application/json",
|
|
71
|
+
html: "text/html",
|
|
72
|
+
css: "text/css",
|
|
73
|
+
xml: "text/xml"
|
|
74
|
+
}[basename(filePath).split(".").pop()?.toLowerCase() || ""] || "application/octet-stream";
|
|
75
|
+
}
|
|
76
|
+
/**
|
|
77
|
+
* Check if file is likely binary based on extension
|
|
78
|
+
*/
|
|
79
|
+
isBinaryFile(filePath) {
|
|
80
|
+
const ext = basename(filePath).split(".").pop()?.toLowerCase();
|
|
81
|
+
return [
|
|
82
|
+
"png",
|
|
83
|
+
"jpg",
|
|
84
|
+
"jpeg",
|
|
85
|
+
"gif",
|
|
86
|
+
"bmp",
|
|
87
|
+
"webp",
|
|
88
|
+
"ico",
|
|
89
|
+
"pdf",
|
|
90
|
+
"zip",
|
|
91
|
+
"tar",
|
|
92
|
+
"gz",
|
|
93
|
+
"exe",
|
|
94
|
+
"dll",
|
|
95
|
+
"so",
|
|
96
|
+
"dylib",
|
|
97
|
+
"wasm"
|
|
98
|
+
].includes(ext || "");
|
|
99
|
+
}
|
|
100
|
+
async symlinkToPhysical(path) {
|
|
101
|
+
if (await this.localPathExists) await symlink(this.options.localPath, path);
|
|
102
|
+
}
|
|
103
|
+
async list(path, options) {
|
|
104
|
+
path = join("/", path);
|
|
105
|
+
const limit = Math.min(options?.limit || LIST_MAX_LIMIT, LIST_MAX_LIMIT);
|
|
106
|
+
const maxChildren = typeof options?.maxChildren === "number" ? options.maxChildren : Number.MAX_SAFE_INTEGER;
|
|
107
|
+
const maxDepth = options?.maxDepth ?? 1;
|
|
108
|
+
const disableGitignore = options?.disableGitignore ?? false;
|
|
109
|
+
const pattern = options?.pattern;
|
|
110
|
+
const basePath = join(this.options.localPath, path);
|
|
111
|
+
const mountRoot = this.options.localPath;
|
|
112
|
+
if (typeof maxChildren === "number" && maxChildren <= 0) throw new Error(`Invalid maxChildren: ${maxChildren}. Must be positive.`);
|
|
113
|
+
const entries = [];
|
|
114
|
+
const queue = [];
|
|
115
|
+
queue.push({
|
|
116
|
+
fullPath: basePath,
|
|
117
|
+
relativePath: path || "/",
|
|
118
|
+
depth: 0,
|
|
119
|
+
gitignored: false
|
|
120
|
+
});
|
|
121
|
+
while (true) {
|
|
122
|
+
const item = queue.shift();
|
|
123
|
+
if (!item) break;
|
|
124
|
+
const { fullPath, relativePath, depth, gitignored } = item;
|
|
125
|
+
const stats = await stat(fullPath);
|
|
126
|
+
const isDirectory = stats.isDirectory();
|
|
127
|
+
let childItemsWithStatus;
|
|
128
|
+
if (isDirectory && !gitignored) {
|
|
129
|
+
const items = (await readdir(fullPath)).sort();
|
|
130
|
+
let ig = null;
|
|
131
|
+
let ignoreBase = mountRoot;
|
|
132
|
+
if (!disableGitignore) {
|
|
133
|
+
const result = await this.loadIgnoreRules(fullPath, mountRoot);
|
|
134
|
+
ig = result?.ig || null;
|
|
135
|
+
ignoreBase = result?.ignoreBase || mountRoot;
|
|
136
|
+
}
|
|
137
|
+
childItemsWithStatus = items.map((childName) => {
|
|
138
|
+
if (!ig) return {
|
|
139
|
+
name: childName,
|
|
140
|
+
gitignored: false
|
|
141
|
+
};
|
|
142
|
+
const itemFullPath = join(fullPath, childName);
|
|
143
|
+
const itemRelativePath = relative(ignoreBase, itemFullPath);
|
|
144
|
+
return {
|
|
145
|
+
name: childName,
|
|
146
|
+
gitignored: ig.ignores(itemRelativePath) || ig.ignores(`${itemRelativePath}/`)
|
|
147
|
+
};
|
|
148
|
+
});
|
|
149
|
+
}
|
|
150
|
+
const metadata = {
|
|
151
|
+
childrenCount: childItemsWithStatus?.length,
|
|
152
|
+
type: isDirectory ? "directory" : "file",
|
|
153
|
+
size: stats.size,
|
|
154
|
+
gitignored: gitignored || void 0
|
|
155
|
+
};
|
|
156
|
+
if (!isDirectory) metadata.mimeType = this.getMimeType(fullPath);
|
|
157
|
+
const entry = {
|
|
158
|
+
id: relativePath,
|
|
159
|
+
path: relativePath,
|
|
160
|
+
createdAt: stats.birthtime,
|
|
161
|
+
updatedAt: stats.mtime,
|
|
162
|
+
metadata
|
|
163
|
+
};
|
|
164
|
+
if (!pattern || minimatch(relativePath, pattern, { matchBase: true })) entries.push(entry);
|
|
165
|
+
if (entries.length >= limit) {
|
|
166
|
+
if (isDirectory && entry.metadata) entry.metadata.childrenTruncated = true;
|
|
167
|
+
break;
|
|
168
|
+
}
|
|
169
|
+
if (isDirectory && depth < maxDepth && childItemsWithStatus) {
|
|
170
|
+
const itemsToProcess = childItemsWithStatus.length > maxChildren ? childItemsWithStatus.slice(0, maxChildren) : childItemsWithStatus;
|
|
171
|
+
if (itemsToProcess.length < childItemsWithStatus.length && entry.metadata) entry.metadata.childrenTruncated = true;
|
|
172
|
+
for (const child of itemsToProcess) queue.push({
|
|
173
|
+
fullPath: join(fullPath, child.name),
|
|
174
|
+
relativePath: join(relativePath, child.name),
|
|
175
|
+
depth: depth + 1,
|
|
176
|
+
gitignored: child.gitignored
|
|
177
|
+
});
|
|
178
|
+
}
|
|
179
|
+
}
|
|
180
|
+
return { data: entries };
|
|
181
|
+
}
|
|
182
|
+
async read(path, _options) {
|
|
183
|
+
try {
|
|
184
|
+
const fullPath = join(this.options.localPath, path);
|
|
185
|
+
const stats = await stat(fullPath);
|
|
186
|
+
let content;
|
|
187
|
+
const metadata = {
|
|
188
|
+
type: stats.isDirectory() ? "directory" : "file",
|
|
189
|
+
size: stats.size
|
|
190
|
+
};
|
|
191
|
+
if (stats.isFile()) {
|
|
192
|
+
const mimeType = this.getMimeType(fullPath);
|
|
193
|
+
const isBinary = this.isBinaryFile(fullPath);
|
|
194
|
+
metadata.mimeType = mimeType;
|
|
195
|
+
if (isBinary) {
|
|
196
|
+
content = (await readFile(fullPath)).toString("base64");
|
|
197
|
+
metadata.contentType = "base64";
|
|
198
|
+
} else content = await readFile(fullPath, "utf8");
|
|
199
|
+
}
|
|
200
|
+
return { data: {
|
|
201
|
+
id: path,
|
|
202
|
+
path,
|
|
203
|
+
createdAt: stats.birthtime,
|
|
204
|
+
updatedAt: stats.mtime,
|
|
205
|
+
content,
|
|
206
|
+
metadata
|
|
207
|
+
} };
|
|
208
|
+
} catch (error) {
|
|
209
|
+
return {
|
|
210
|
+
data: void 0,
|
|
211
|
+
message: error.message
|
|
212
|
+
};
|
|
213
|
+
}
|
|
214
|
+
}
|
|
215
|
+
async write(path, entry, options) {
|
|
216
|
+
const fullPath = join(this.options.localPath, path);
|
|
217
|
+
const append = options?.append ?? false;
|
|
218
|
+
await mkdir(dirname(fullPath), { recursive: true });
|
|
219
|
+
if (entry.content !== void 0) {
|
|
220
|
+
let contentToWrite;
|
|
221
|
+
if (typeof entry.content === "string") contentToWrite = entry.content;
|
|
222
|
+
else contentToWrite = JSON.stringify(entry.content, null, 2);
|
|
223
|
+
await writeFile(fullPath, contentToWrite, {
|
|
224
|
+
encoding: "utf8",
|
|
225
|
+
flag: append ? "a" : "w"
|
|
226
|
+
});
|
|
227
|
+
}
|
|
228
|
+
const stats = await stat(fullPath);
|
|
229
|
+
return { data: {
|
|
230
|
+
id: path,
|
|
231
|
+
path,
|
|
232
|
+
createdAt: stats.birthtime,
|
|
233
|
+
updatedAt: stats.mtime,
|
|
234
|
+
content: entry.content,
|
|
235
|
+
summary: entry.summary,
|
|
236
|
+
metadata: {
|
|
237
|
+
...entry.metadata,
|
|
238
|
+
type: stats.isDirectory() ? "directory" : "file",
|
|
239
|
+
size: stats.size
|
|
240
|
+
},
|
|
241
|
+
userId: entry.userId,
|
|
242
|
+
sessionId: entry.sessionId,
|
|
243
|
+
linkTo: entry.linkTo
|
|
244
|
+
} };
|
|
245
|
+
}
|
|
246
|
+
async delete(path, options) {
|
|
247
|
+
const fullPath = join(this.options.localPath, path);
|
|
248
|
+
const recursive = options?.recursive ?? false;
|
|
249
|
+
if ((await stat(fullPath)).isDirectory() && !recursive) throw new Error(`Cannot delete directory '${path}' without recursive option. Set recursive: true to delete directories.`);
|
|
250
|
+
await rm(fullPath, {
|
|
251
|
+
recursive,
|
|
252
|
+
force: true
|
|
253
|
+
});
|
|
254
|
+
return { message: `Successfully deleted: ${path}` };
|
|
255
|
+
}
|
|
256
|
+
async rename(oldPath, newPath, options) {
|
|
257
|
+
const oldFullPath = join(this.options.localPath, oldPath);
|
|
258
|
+
const newFullPath = join(this.options.localPath, newPath);
|
|
259
|
+
const overwrite = options?.overwrite ?? false;
|
|
260
|
+
await stat(oldFullPath);
|
|
261
|
+
try {
|
|
262
|
+
await stat(newFullPath);
|
|
263
|
+
if (!overwrite) throw new Error(`Destination '${newPath}' already exists. Set overwrite: true to replace it.`);
|
|
264
|
+
} catch (error) {
|
|
265
|
+
if (error.code !== "ENOENT") throw error;
|
|
266
|
+
}
|
|
267
|
+
await mkdir(dirname(newFullPath), { recursive: true });
|
|
268
|
+
await rename(oldFullPath, newFullPath);
|
|
269
|
+
return { message: `Successfully renamed '${oldPath}' to '${newPath}'` };
|
|
270
|
+
}
|
|
271
|
+
async search(path, query, options) {
|
|
272
|
+
const limit = Math.min(options?.limit || LIST_MAX_LIMIT, LIST_MAX_LIMIT);
|
|
273
|
+
const basePath = join(this.options.localPath, path);
|
|
274
|
+
const matches = await searchWithRipgrep(basePath, query, options);
|
|
275
|
+
const entries = [];
|
|
276
|
+
const processedFiles = /* @__PURE__ */ new Set();
|
|
277
|
+
let hasMoreFiles = false;
|
|
278
|
+
for (const match of matches) if (match.type === "match" && match.data.path) {
|
|
279
|
+
const absolutePath = match.data.path.text;
|
|
280
|
+
const itemRelativePath = join(path, relative(basePath, absolutePath));
|
|
281
|
+
if (processedFiles.has(itemRelativePath)) continue;
|
|
282
|
+
processedFiles.add(itemRelativePath);
|
|
283
|
+
const stats = await stat(absolutePath);
|
|
284
|
+
const entry = {
|
|
285
|
+
id: itemRelativePath,
|
|
286
|
+
path: itemRelativePath,
|
|
287
|
+
createdAt: stats.birthtime,
|
|
288
|
+
updatedAt: stats.mtime,
|
|
289
|
+
summary: match.data.lines?.text,
|
|
290
|
+
metadata: {
|
|
291
|
+
type: "file",
|
|
292
|
+
size: stats.size
|
|
293
|
+
}
|
|
294
|
+
};
|
|
295
|
+
entries.push(entry);
|
|
296
|
+
if (entries.length >= limit) {
|
|
297
|
+
hasMoreFiles = true;
|
|
298
|
+
break;
|
|
299
|
+
}
|
|
300
|
+
}
|
|
301
|
+
return {
|
|
302
|
+
data: entries,
|
|
303
|
+
message: hasMoreFiles ? `Results truncated to limit ${limit}` : void 0
|
|
304
|
+
};
|
|
305
|
+
}
|
|
306
|
+
/**
|
|
307
|
+
* Load gitignore rules from mountRoot down to checkPath.
|
|
308
|
+
* Accumulates rules from parent to child for proper inheritance.
|
|
309
|
+
* Stops at .git boundaries (submodules are independent repos).
|
|
310
|
+
* @param checkPath - The directory whose files we're checking
|
|
311
|
+
* @param mountRoot - The mounted local filesystem root (stop point for walking up)
|
|
312
|
+
* @returns An object with ignore instance and the base path for matching
|
|
313
|
+
*/
|
|
314
|
+
async loadIgnoreRules(checkPath, mountRoot) {
|
|
315
|
+
const ig = ignore();
|
|
316
|
+
const dirsToCheck = [];
|
|
317
|
+
let currentPath = checkPath;
|
|
318
|
+
let gitBoundary = null;
|
|
319
|
+
while (true) {
|
|
320
|
+
dirsToCheck.unshift(currentPath);
|
|
321
|
+
if (gitBoundary === null) try {
|
|
322
|
+
await stat(join(currentPath, ".git"));
|
|
323
|
+
gitBoundary = currentPath;
|
|
324
|
+
} catch {}
|
|
325
|
+
if (currentPath === mountRoot) break;
|
|
326
|
+
const parentPath = dirname(currentPath);
|
|
327
|
+
if (!currentPath.startsWith(mountRoot) || parentPath === currentPath) break;
|
|
328
|
+
currentPath = parentPath;
|
|
329
|
+
}
|
|
330
|
+
const effectiveStart = gitBoundary || mountRoot;
|
|
331
|
+
const filteredDirs = dirsToCheck.filter((dir) => dir >= effectiveStart);
|
|
332
|
+
for (const dirPath of filteredDirs) try {
|
|
333
|
+
const gitignoreContent = await readFile(join(dirPath, ".gitignore"), "utf8");
|
|
334
|
+
const effectiveBase = gitBoundary && dirPath >= gitBoundary ? gitBoundary : mountRoot;
|
|
335
|
+
if (dirPath !== effectiveBase) {
|
|
336
|
+
const prefix = relative(effectiveBase, dirPath);
|
|
337
|
+
const prefixedLines = gitignoreContent.split("\n").map((line) => {
|
|
338
|
+
const trimmed = line.trim();
|
|
339
|
+
if (!trimmed || trimmed.startsWith("#")) return line;
|
|
340
|
+
if (trimmed.startsWith("!")) return line;
|
|
341
|
+
if (trimmed.startsWith("/")) return `/${prefix}${trimmed}`;
|
|
342
|
+
if (trimmed.includes("/")) return `${prefix}/${trimmed}`;
|
|
343
|
+
return `${prefix}/**/${trimmed}`;
|
|
344
|
+
});
|
|
345
|
+
ig.add(prefixedLines.join("\n"));
|
|
346
|
+
} else ig.add(gitignoreContent);
|
|
347
|
+
} catch {}
|
|
348
|
+
ig.add(".git");
|
|
349
|
+
ig.add(this.options.ignore || []);
|
|
350
|
+
return {
|
|
351
|
+
ig,
|
|
352
|
+
ignoreBase: effectiveStart
|
|
353
|
+
};
|
|
354
|
+
}
|
|
355
|
+
};
|
|
356
|
+
|
|
357
|
+
//#endregion
|
|
358
|
+
export { AFSFS };
|
|
359
|
+
//# sourceMappingURL=index.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.mjs","names":[],"sources":["../src/index.ts"],"sourcesContent":["import { mkdir, readdir, readFile, rename, rm, stat, symlink, writeFile } from \"node:fs/promises\";\nimport { basename, dirname, isAbsolute, join, relative } from \"node:path\";\nimport type {\n AFSAccessMode,\n AFSDeleteOptions,\n AFSDeleteResult,\n AFSEntry,\n AFSListOptions,\n AFSListResult,\n AFSModule,\n AFSModuleClass,\n AFSModuleLoadParams,\n AFSReadOptions,\n AFSReadResult,\n AFSRenameOptions,\n AFSSearchOptions,\n AFSSearchResult,\n AFSWriteEntryPayload,\n AFSWriteOptions,\n AFSWriteResult,\n} from \"@aigne/afs\";\nimport { camelize, optionalize, zodParse } from \"@aigne/afs/utils/zod\";\nimport ignore from \"ignore\";\nimport { minimatch } from \"minimatch\";\nimport { z } from \"zod\";\nimport { searchWithRipgrep } from \"./utils/ripgrep.js\";\n\nconst LIST_MAX_LIMIT = 1000;\n\nexport interface AFSFSOptions {\n name?: string;\n localPath: string;\n description?: string;\n ignore?: string[];\n /**\n * Access mode for this module.\n * - \"readonly\": Only read operations are allowed\n * - \"readwrite\": All operations are allowed (default, unless agentSkills is enabled)\n * @default \"readwrite\" (or \"readonly\" when agentSkills is true)\n */\n accessMode?: AFSAccessMode;\n /**\n * Enable automatic agent skill scanning for this module.\n * When enabled, defaults accessMode to \"readonly\" if not explicitly set.\n * @default false\n */\n agentSkills?: boolean;\n}\n\nconst afsFSOptionsSchema = camelize(\n z.object({\n name: optionalize(z.string()),\n localPath: z.string().describe(\"The path to the local directory to mount\"),\n description: optionalize(z.string().describe(\"A description of the mounted directory\")),\n ignore: optionalize(z.array(z.string())),\n accessMode: optionalize(\n z.enum([\"readonly\", \"readwrite\"]).describe(\"Access mode for this module\"),\n ),\n agentSkills: optionalize(\n z.boolean().describe(\"Enable automatic agent skill scanning for this module\"),\n ),\n }),\n);\n\nexport class AFSFS implements AFSModule {\n static schema() {\n return afsFSOptionsSchema;\n }\n\n static async load({ filepath, parsed }: AFSModuleLoadParams) {\n const valid = await AFSFS.schema().parseAsync(parsed);\n\n return new AFSFS({ ...valid, cwd: dirname(filepath) });\n }\n\n constructor(public options: AFSFSOptions & { cwd?: string }) {\n zodParse(afsFSOptionsSchema, options);\n\n let localPath: string;\n\n if (options.localPath === \".\") {\n localPath = process.cwd();\n } else {\n localPath = options.localPath.replaceAll(\"${CWD}\", process.cwd());\n if (localPath.startsWith(\"~/\")) {\n localPath = join(process.env.HOME || \"\", localPath.slice(2));\n }\n if (!isAbsolute(localPath)) localPath = join(options.cwd || process.cwd(), localPath);\n }\n\n this.name = options.name || basename(localPath) || \"fs\";\n this.description = options.description;\n this.agentSkills = options.agentSkills;\n // Default to \"readwrite\", but \"readonly\" if agentSkills is enabled\n this.accessMode = options.accessMode ?? (options.agentSkills ? \"readonly\" : \"readwrite\");\n this.options.localPath = localPath;\n }\n\n name: string;\n\n description?: string;\n\n accessMode: AFSAccessMode;\n\n agentSkills?: boolean;\n\n private get localPathExists() {\n return stat(this.options.localPath)\n .then(() => true)\n .catch(() => false);\n }\n\n /**\n * Detect MIME type based on file extension\n */\n private getMimeType(filePath: string): string {\n const ext = basename(filePath).split(\".\").pop()?.toLowerCase();\n const mimeTypes: Record<string, string> = {\n // Images\n png: \"image/png\",\n jpg: \"image/jpeg\",\n jpeg: \"image/jpeg\",\n gif: \"image/gif\",\n bmp: \"image/bmp\",\n webp: \"image/webp\",\n svg: \"image/svg+xml\",\n ico: \"image/x-icon\",\n // Documents\n pdf: \"application/pdf\",\n txt: \"text/plain\",\n md: \"text/markdown\",\n // Code\n js: \"text/javascript\",\n ts: \"text/typescript\",\n json: \"application/json\",\n html: \"text/html\",\n css: \"text/css\",\n xml: \"text/xml\",\n };\n return mimeTypes[ext || \"\"] || \"application/octet-stream\";\n }\n\n /**\n * Check if file is likely binary based on extension\n */\n private isBinaryFile(filePath: string): boolean {\n const ext = basename(filePath).split(\".\").pop()?.toLowerCase();\n const binaryExtensions = [\n \"png\",\n \"jpg\",\n \"jpeg\",\n \"gif\",\n \"bmp\",\n \"webp\",\n \"ico\",\n \"pdf\",\n \"zip\",\n \"tar\",\n \"gz\",\n \"exe\",\n \"dll\",\n \"so\",\n \"dylib\",\n \"wasm\",\n ];\n return binaryExtensions.includes(ext || \"\");\n }\n\n async symlinkToPhysical(path: string): Promise<void> {\n if (await this.localPathExists) {\n await symlink(this.options.localPath, path);\n }\n }\n\n async list(path: string, options?: AFSListOptions): Promise<AFSListResult> {\n path = join(\"/\", path); // Ensure leading slash\n\n const limit = Math.min(options?.limit || LIST_MAX_LIMIT, LIST_MAX_LIMIT);\n const maxChildren =\n typeof options?.maxChildren === \"number\" ? options.maxChildren : Number.MAX_SAFE_INTEGER;\n const maxDepth = options?.maxDepth ?? 1;\n const disableGitignore = options?.disableGitignore ?? false;\n const pattern = options?.pattern;\n const basePath = join(this.options.localPath, path);\n const mountRoot = this.options.localPath;\n\n // Validate maxChildren\n if (typeof maxChildren === \"number\" && maxChildren <= 0) {\n throw new Error(`Invalid maxChildren: ${maxChildren}. Must be positive.`);\n }\n\n const entries: AFSEntry[] = [];\n\n // Queue for breadth-first traversal\n interface QueueItem {\n fullPath: string;\n relativePath: string;\n depth: number;\n gitignored?: boolean;\n }\n\n const queue: QueueItem[] = [];\n\n // Add root path to queue as starting point\n queue.push({\n fullPath: basePath,\n relativePath: path || \"/\",\n depth: 0,\n gitignored: false,\n });\n\n // Process queue in breadth-first order\n while (true) {\n const item = queue.shift();\n if (!item) break; // Queue is empty\n\n const { fullPath, relativePath, depth, gitignored } = item;\n\n // Stat and readdir once per item\n const stats = await stat(fullPath);\n const isDirectory = stats.isDirectory();\n let childItemsWithStatus: { name: string; gitignored: boolean }[] | undefined;\n\n // Don't read children of gitignored directories (they won't be recursed into)\n if (isDirectory && !gitignored) {\n const items = (await readdir(fullPath)).sort();\n\n // Load .gitignore rules for this directory if not disabled\n let ig: ReturnType<typeof ignore> | null = null;\n let ignoreBase: string = mountRoot;\n if (!disableGitignore) {\n const result = await this.loadIgnoreRules(fullPath, mountRoot);\n ig = result?.ig || null;\n ignoreBase = result?.ignoreBase || mountRoot;\n }\n\n // Mark items with their gitignored status (instead of filtering them out)\n childItemsWithStatus = items.map((childName) => {\n if (!ig) return { name: childName, gitignored: false };\n\n const itemFullPath = join(fullPath, childName);\n // Calculate path relative to ignoreBase (git root or mountRoot) for gitignore matching\n const itemRelativePath = relative(ignoreBase, itemFullPath);\n // Check both the file and directory (with trailing slash) patterns\n const isIgnored = ig.ignores(itemRelativePath) || ig.ignores(`${itemRelativePath}/`);\n return { name: childName, gitignored: isIgnored };\n });\n }\n\n const metadata: Record<string, any> = {\n childrenCount: childItemsWithStatus?.length,\n type: isDirectory ? \"directory\" : \"file\",\n size: stats.size,\n gitignored: gitignored || undefined,\n };\n\n // Add mimeType for files\n if (!isDirectory) {\n metadata.mimeType = this.getMimeType(fullPath);\n }\n\n const entry: AFSEntry = {\n id: relativePath,\n path: relativePath,\n createdAt: stats.birthtime,\n updatedAt: stats.mtime,\n metadata,\n };\n\n // Apply pattern filter if specified\n const matchesPattern = !pattern || minimatch(relativePath, pattern, { matchBase: true });\n if (matchesPattern) {\n entries.push(entry);\n }\n\n // Check if we'll hit the limit after adding this entry\n if (entries.length >= limit) {\n // Mark this directory as truncated since we can't process its children\n if (isDirectory && entry.metadata) {\n entry.metadata.childrenTruncated = true;\n }\n break;\n }\n\n // If it's a directory and depth allows, add children to queue\n if (isDirectory && depth < maxDepth && childItemsWithStatus) {\n // Apply maxChildren limit\n const itemsToProcess =\n childItemsWithStatus.length > maxChildren\n ? childItemsWithStatus.slice(0, maxChildren)\n : childItemsWithStatus;\n const isTruncated = itemsToProcess.length < childItemsWithStatus.length;\n\n // Mark directory as truncated if children were limited by maxChildren\n if (isTruncated && entry.metadata) {\n entry.metadata.childrenTruncated = true;\n }\n\n for (const child of itemsToProcess) {\n // Add child to queue; gitignored directories won't be recursed into\n // (they're added to show them, but their children won't be processed)\n queue.push({\n fullPath: join(fullPath, child.name),\n relativePath: join(relativePath, child.name),\n depth: depth + 1,\n gitignored: child.gitignored,\n });\n }\n }\n }\n\n return {\n data: entries,\n };\n }\n\n async read(path: string, _options?: AFSReadOptions): Promise<AFSReadResult> {\n try {\n const fullPath = join(this.options.localPath, path);\n\n const stats = await stat(fullPath);\n\n let content: string | undefined;\n const metadata: Record<string, any> = {\n type: stats.isDirectory() ? \"directory\" : \"file\",\n size: stats.size,\n };\n\n if (stats.isFile()) {\n // Determine mimeType based on file extension\n const mimeType = this.getMimeType(fullPath);\n const isBinary = this.isBinaryFile(fullPath);\n metadata.mimeType = mimeType;\n\n if (isBinary) {\n // For binary files, read as buffer and convert to base64\n const buffer = await readFile(fullPath);\n content = buffer.toString(\"base64\");\n // Mark content as base64 in metadata\n metadata.contentType = \"base64\";\n } else {\n // For text files, read as utf8\n content = await readFile(fullPath, \"utf8\");\n }\n }\n\n const entry: AFSEntry = {\n id: path,\n path: path,\n createdAt: stats.birthtime,\n updatedAt: stats.mtime,\n content,\n metadata,\n };\n\n return { data: entry };\n } catch (error) {\n return {\n data: undefined,\n message: error.message,\n };\n }\n }\n\n async write(\n path: string,\n entry: AFSWriteEntryPayload,\n options?: AFSWriteOptions,\n ): Promise<AFSWriteResult> {\n const fullPath = join(this.options.localPath, path);\n const append = options?.append ?? false;\n\n // Ensure parent directory exists\n const parentDir = dirname(fullPath);\n await mkdir(parentDir, { recursive: true });\n\n // Write content if provided\n if (entry.content !== undefined) {\n let contentToWrite: string;\n if (typeof entry.content === \"string\") {\n contentToWrite = entry.content;\n } else {\n contentToWrite = JSON.stringify(entry.content, null, 2);\n }\n await writeFile(fullPath, contentToWrite, {\n encoding: \"utf8\",\n flag: append ? \"a\" : \"w\",\n });\n }\n\n // Get file stats after writing\n const stats = await stat(fullPath);\n\n const writtenEntry: AFSEntry = {\n id: path,\n path: path,\n createdAt: stats.birthtime,\n updatedAt: stats.mtime,\n content: entry.content,\n summary: entry.summary,\n metadata: {\n ...entry.metadata,\n type: stats.isDirectory() ? \"directory\" : \"file\",\n size: stats.size,\n },\n userId: entry.userId,\n sessionId: entry.sessionId,\n linkTo: entry.linkTo,\n };\n\n return { data: writtenEntry };\n }\n\n async delete(path: string, options?: AFSDeleteOptions): Promise<AFSDeleteResult> {\n const fullPath = join(this.options.localPath, path);\n const recursive = options?.recursive ?? false;\n\n const stats = await stat(fullPath);\n\n // If it's a directory and recursive is false, throw an error\n if (stats.isDirectory() && !recursive) {\n throw new Error(\n `Cannot delete directory '${path}' without recursive option. Set recursive: true to delete directories.`,\n );\n }\n\n await rm(fullPath, { recursive, force: true });\n return { message: `Successfully deleted: ${path}` };\n }\n\n async rename(\n oldPath: string,\n newPath: string,\n options?: AFSRenameOptions,\n ): Promise<{ message?: string }> {\n const oldFullPath = join(this.options.localPath, oldPath);\n const newFullPath = join(this.options.localPath, newPath);\n const overwrite = options?.overwrite ?? false;\n\n // Check if source exists\n await stat(oldFullPath);\n\n // Check if destination exists\n try {\n await stat(newFullPath);\n if (!overwrite) {\n throw new Error(\n `Destination '${newPath}' already exists. Set overwrite: true to replace it.`,\n );\n }\n } catch (error) {\n // Destination doesn't exist, which is fine\n if ((error as NodeJS.ErrnoException).code !== \"ENOENT\") {\n throw error;\n }\n }\n\n // Ensure parent directory of new path exists\n const newParentDir = dirname(newFullPath);\n await mkdir(newParentDir, { recursive: true });\n\n // Perform the rename/move\n await rename(oldFullPath, newFullPath);\n\n return { message: `Successfully renamed '${oldPath}' to '${newPath}'` };\n }\n\n async search(path: string, query: string, options?: AFSSearchOptions): Promise<AFSSearchResult> {\n const limit = Math.min(options?.limit || LIST_MAX_LIMIT, LIST_MAX_LIMIT);\n const basePath = join(this.options.localPath, path);\n const matches = await searchWithRipgrep(basePath, query, options);\n\n const entries: AFSEntry[] = [];\n const processedFiles = new Set<string>();\n let hasMoreFiles = false;\n\n for (const match of matches) {\n if (match.type === \"match\" && match.data.path) {\n const absolutePath = match.data.path.text;\n const itemRelativePath = join(path, relative(basePath, absolutePath));\n\n // Avoid duplicate files\n if (processedFiles.has(itemRelativePath)) continue;\n processedFiles.add(itemRelativePath);\n\n const stats = await stat(absolutePath);\n\n const entry: AFSEntry = {\n id: itemRelativePath,\n path: itemRelativePath,\n createdAt: stats.birthtime,\n updatedAt: stats.mtime,\n summary: match.data.lines?.text,\n metadata: {\n type: \"file\",\n size: stats.size,\n },\n };\n\n entries.push(entry);\n\n if (entries.length >= limit) {\n hasMoreFiles = true;\n break;\n }\n }\n }\n\n return {\n data: entries,\n message: hasMoreFiles ? `Results truncated to limit ${limit}` : undefined,\n };\n }\n\n /**\n * Load gitignore rules from mountRoot down to checkPath.\n * Accumulates rules from parent to child for proper inheritance.\n * Stops at .git boundaries (submodules are independent repos).\n * @param checkPath - The directory whose files we're checking\n * @param mountRoot - The mounted local filesystem root (stop point for walking up)\n * @returns An object with ignore instance and the base path for matching\n */\n private async loadIgnoreRules(\n checkPath: string,\n mountRoot: string,\n ): Promise<{ ig: ReturnType<typeof ignore>; ignoreBase: string } | null> {\n const ig = ignore();\n\n // Collect directories from mountRoot down to checkPath\n const dirsToCheck: string[] = [];\n let currentPath = checkPath;\n let gitBoundary: string | null = null;\n\n // Walk up from checkPath to mountRoot, checking for .git boundaries\n while (true) {\n dirsToCheck.unshift(currentPath);\n\n // Check if this directory has a .git (it's a git repo boundary)\n if (gitBoundary === null) {\n try {\n const gitPath = join(currentPath, \".git\");\n await stat(gitPath);\n // Found .git, this is a git boundary\n gitBoundary = currentPath;\n } catch {\n // No .git at this level\n }\n }\n\n // Stop when we reach mountRoot\n if (currentPath === mountRoot) {\n break;\n }\n\n const parentPath = dirname(currentPath);\n // Safety check: stop if we go outside mountRoot or hit filesystem root\n if (!currentPath.startsWith(mountRoot) || parentPath === currentPath) {\n break;\n }\n currentPath = parentPath;\n }\n\n // If we found a git boundary, only load rules from that boundary down\n // Otherwise load from mountRoot down\n const effectiveStart = gitBoundary || mountRoot;\n const filteredDirs = dirsToCheck.filter((dir) => dir >= effectiveStart);\n\n // Load .gitignore files from effectiveStart down to checkPath (parent to child order)\n // This respects git boundaries - submodules don't inherit parent repo's rules\n for (const dirPath of filteredDirs) {\n try {\n const gitignorePath = join(dirPath, \".gitignore\");\n const gitignoreContent = await readFile(gitignorePath, \"utf8\");\n\n // Calculate the effective base for this .gitignore file\n // If there's a git boundary, paths are relative to that boundary when within it\n // Otherwise, paths are relative to mountRoot\n const effectiveBase = gitBoundary && dirPath >= gitBoundary ? gitBoundary : mountRoot;\n\n // If this is a subdirectory of the effective base, we need to prefix the rules\n // so they match correctly relative to effectiveBase\n if (dirPath !== effectiveBase) {\n const prefix = relative(effectiveBase, dirPath);\n // Split rules into lines and add prefix to each non-empty, non-comment line\n const lines = gitignoreContent.split(\"\\n\");\n const prefixedLines = lines.map((line) => {\n const trimmed = line.trim();\n // Skip empty lines and comments\n if (!trimmed || trimmed.startsWith(\"#\")) {\n return line;\n }\n // Skip negation patterns for now (would need special handling)\n if (trimmed.startsWith(\"!\")) {\n return line;\n }\n // Add prefix to the pattern\n // If pattern starts with /, it's relative to git root\n if (trimmed.startsWith(\"/\")) {\n return `/${prefix}${trimmed}`;\n }\n // If pattern contains /, it's a path pattern, prefix it directly\n if (trimmed.includes(\"/\")) {\n return `${prefix}/${trimmed}`;\n }\n // If it's a simple wildcard pattern like *.log or foo\n // Use **/ to match at any depth within the prefixed directory\n return `${prefix}/**/${trimmed}`;\n });\n ig.add(prefixedLines.join(\"\\n\"));\n } else {\n // This is effectiveBase's own .gitignore, use as-is\n ig.add(gitignoreContent);\n }\n } catch {\n // .gitignore doesn't exist at this level\n }\n }\n\n ig.add(\".git\");\n ig.add(this.options.ignore || []);\n\n return { ig, ignoreBase: effectiveStart };\n }\n}\n\nconst _typeCheck: AFSModuleClass<AFSFS, AFSFSOptions> = AFSFS;\n"],"mappings":";;;;;;;;;AA2BA,MAAM,iBAAiB;AAsBvB,MAAM,qBAAqB,SACzB,EAAE,OAAO;CACP,MAAM,YAAY,EAAE,QAAQ,CAAC;CAC7B,WAAW,EAAE,QAAQ,CAAC,SAAS,2CAA2C;CAC1E,aAAa,YAAY,EAAE,QAAQ,CAAC,SAAS,yCAAyC,CAAC;CACvF,QAAQ,YAAY,EAAE,MAAM,EAAE,QAAQ,CAAC,CAAC;CACxC,YAAY,YACV,EAAE,KAAK,CAAC,YAAY,YAAY,CAAC,CAAC,SAAS,8BAA8B,CAC1E;CACD,aAAa,YACX,EAAE,SAAS,CAAC,SAAS,wDAAwD,CAC9E;CACF,CAAC,CACH;AAED,IAAa,QAAb,MAAa,MAA2B;CACtC,OAAO,SAAS;AACd,SAAO;;CAGT,aAAa,KAAK,EAAE,UAAU,UAA+B;AAG3D,SAAO,IAAI,MAAM;GAAE,GAFL,MAAM,MAAM,QAAQ,CAAC,WAAW,OAAO;GAExB,KAAK,QAAQ,SAAS;GAAE,CAAC;;CAGxD,YAAY,AAAO,SAA0C;EAA1C;AACjB,WAAS,oBAAoB,QAAQ;EAErC,IAAI;AAEJ,MAAI,QAAQ,cAAc,IACxB,aAAY,QAAQ,KAAK;OACpB;AACL,eAAY,QAAQ,UAAU,WAAW,UAAU,QAAQ,KAAK,CAAC;AACjE,OAAI,UAAU,WAAW,KAAK,CAC5B,aAAY,KAAK,QAAQ,IAAI,QAAQ,IAAI,UAAU,MAAM,EAAE,CAAC;AAE9D,OAAI,CAAC,WAAW,UAAU,CAAE,aAAY,KAAK,QAAQ,OAAO,QAAQ,KAAK,EAAE,UAAU;;AAGvF,OAAK,OAAO,QAAQ,QAAQ,SAAS,UAAU,IAAI;AACnD,OAAK,cAAc,QAAQ;AAC3B,OAAK,cAAc,QAAQ;AAE3B,OAAK,aAAa,QAAQ,eAAe,QAAQ,cAAc,aAAa;AAC5E,OAAK,QAAQ,YAAY;;CAG3B;CAEA;CAEA;CAEA;CAEA,IAAY,kBAAkB;AAC5B,SAAO,KAAK,KAAK,QAAQ,UAAU,CAChC,WAAW,KAAK,CAChB,YAAY,MAAM;;;;;CAMvB,AAAQ,YAAY,UAA0B;AAwB5C,SAtB0C;GAExC,KAAK;GACL,KAAK;GACL,MAAM;GACN,KAAK;GACL,KAAK;GACL,MAAM;GACN,KAAK;GACL,KAAK;GAEL,KAAK;GACL,KAAK;GACL,IAAI;GAEJ,IAAI;GACJ,IAAI;GACJ,MAAM;GACN,MAAM;GACN,KAAK;GACL,KAAK;GACN,CAtBW,SAAS,SAAS,CAAC,MAAM,IAAI,CAAC,KAAK,EAAE,aAAa,IAuBtC,OAAO;;;;;CAMjC,AAAQ,aAAa,UAA2B;EAC9C,MAAM,MAAM,SAAS,SAAS,CAAC,MAAM,IAAI,CAAC,KAAK,EAAE,aAAa;AAmB9D,SAlByB;GACvB;GACA;GACA;GACA;GACA;GACA;GACA;GACA;GACA;GACA;GACA;GACA;GACA;GACA;GACA;GACA;GACD,CACuB,SAAS,OAAO,GAAG;;CAG7C,MAAM,kBAAkB,MAA6B;AACnD,MAAI,MAAM,KAAK,gBACb,OAAM,QAAQ,KAAK,QAAQ,WAAW,KAAK;;CAI/C,MAAM,KAAK,MAAc,SAAkD;AACzE,SAAO,KAAK,KAAK,KAAK;EAEtB,MAAM,QAAQ,KAAK,IAAI,SAAS,SAAS,gBAAgB,eAAe;EACxE,MAAM,cACJ,OAAO,SAAS,gBAAgB,WAAW,QAAQ,cAAc,OAAO;EAC1E,MAAM,WAAW,SAAS,YAAY;EACtC,MAAM,mBAAmB,SAAS,oBAAoB;EACtD,MAAM,UAAU,SAAS;EACzB,MAAM,WAAW,KAAK,KAAK,QAAQ,WAAW,KAAK;EACnD,MAAM,YAAY,KAAK,QAAQ;AAG/B,MAAI,OAAO,gBAAgB,YAAY,eAAe,EACpD,OAAM,IAAI,MAAM,wBAAwB,YAAY,qBAAqB;EAG3E,MAAM,UAAsB,EAAE;EAU9B,MAAM,QAAqB,EAAE;AAG7B,QAAM,KAAK;GACT,UAAU;GACV,cAAc,QAAQ;GACtB,OAAO;GACP,YAAY;GACb,CAAC;AAGF,SAAO,MAAM;GACX,MAAM,OAAO,MAAM,OAAO;AAC1B,OAAI,CAAC,KAAM;GAEX,MAAM,EAAE,UAAU,cAAc,OAAO,eAAe;GAGtD,MAAM,QAAQ,MAAM,KAAK,SAAS;GAClC,MAAM,cAAc,MAAM,aAAa;GACvC,IAAI;AAGJ,OAAI,eAAe,CAAC,YAAY;IAC9B,MAAM,SAAS,MAAM,QAAQ,SAAS,EAAE,MAAM;IAG9C,IAAI,KAAuC;IAC3C,IAAI,aAAqB;AACzB,QAAI,CAAC,kBAAkB;KACrB,MAAM,SAAS,MAAM,KAAK,gBAAgB,UAAU,UAAU;AAC9D,UAAK,QAAQ,MAAM;AACnB,kBAAa,QAAQ,cAAc;;AAIrC,2BAAuB,MAAM,KAAK,cAAc;AAC9C,SAAI,CAAC,GAAI,QAAO;MAAE,MAAM;MAAW,YAAY;MAAO;KAEtD,MAAM,eAAe,KAAK,UAAU,UAAU;KAE9C,MAAM,mBAAmB,SAAS,YAAY,aAAa;AAG3D,YAAO;MAAE,MAAM;MAAW,YADR,GAAG,QAAQ,iBAAiB,IAAI,GAAG,QAAQ,GAAG,iBAAiB,GAAG;MACnC;MACjD;;GAGJ,MAAM,WAAgC;IACpC,eAAe,sBAAsB;IACrC,MAAM,cAAc,cAAc;IAClC,MAAM,MAAM;IACZ,YAAY,cAAc;IAC3B;AAGD,OAAI,CAAC,YACH,UAAS,WAAW,KAAK,YAAY,SAAS;GAGhD,MAAM,QAAkB;IACtB,IAAI;IACJ,MAAM;IACN,WAAW,MAAM;IACjB,WAAW,MAAM;IACjB;IACD;AAID,OADuB,CAAC,WAAW,UAAU,cAAc,SAAS,EAAE,WAAW,MAAM,CAAC,CAEtF,SAAQ,KAAK,MAAM;AAIrB,OAAI,QAAQ,UAAU,OAAO;AAE3B,QAAI,eAAe,MAAM,SACvB,OAAM,SAAS,oBAAoB;AAErC;;AAIF,OAAI,eAAe,QAAQ,YAAY,sBAAsB;IAE3D,MAAM,iBACJ,qBAAqB,SAAS,cAC1B,qBAAqB,MAAM,GAAG,YAAY,GAC1C;AAIN,QAHoB,eAAe,SAAS,qBAAqB,UAG9C,MAAM,SACvB,OAAM,SAAS,oBAAoB;AAGrC,SAAK,MAAM,SAAS,eAGlB,OAAM,KAAK;KACT,UAAU,KAAK,UAAU,MAAM,KAAK;KACpC,cAAc,KAAK,cAAc,MAAM,KAAK;KAC5C,OAAO,QAAQ;KACf,YAAY,MAAM;KACnB,CAAC;;;AAKR,SAAO,EACL,MAAM,SACP;;CAGH,MAAM,KAAK,MAAc,UAAmD;AAC1E,MAAI;GACF,MAAM,WAAW,KAAK,KAAK,QAAQ,WAAW,KAAK;GAEnD,MAAM,QAAQ,MAAM,KAAK,SAAS;GAElC,IAAI;GACJ,MAAM,WAAgC;IACpC,MAAM,MAAM,aAAa,GAAG,cAAc;IAC1C,MAAM,MAAM;IACb;AAED,OAAI,MAAM,QAAQ,EAAE;IAElB,MAAM,WAAW,KAAK,YAAY,SAAS;IAC3C,MAAM,WAAW,KAAK,aAAa,SAAS;AAC5C,aAAS,WAAW;AAEpB,QAAI,UAAU;AAGZ,gBADe,MAAM,SAAS,SAAS,EACtB,SAAS,SAAS;AAEnC,cAAS,cAAc;UAGvB,WAAU,MAAM,SAAS,UAAU,OAAO;;AAa9C,UAAO,EAAE,MATe;IACtB,IAAI;IACE;IACN,WAAW,MAAM;IACjB,WAAW,MAAM;IACjB;IACA;IACD,EAEqB;WACf,OAAO;AACd,UAAO;IACL,MAAM;IACN,SAAS,MAAM;IAChB;;;CAIL,MAAM,MACJ,MACA,OACA,SACyB;EACzB,MAAM,WAAW,KAAK,KAAK,QAAQ,WAAW,KAAK;EACnD,MAAM,SAAS,SAAS,UAAU;AAIlC,QAAM,MADY,QAAQ,SAAS,EACZ,EAAE,WAAW,MAAM,CAAC;AAG3C,MAAI,MAAM,YAAY,QAAW;GAC/B,IAAI;AACJ,OAAI,OAAO,MAAM,YAAY,SAC3B,kBAAiB,MAAM;OAEvB,kBAAiB,KAAK,UAAU,MAAM,SAAS,MAAM,EAAE;AAEzD,SAAM,UAAU,UAAU,gBAAgB;IACxC,UAAU;IACV,MAAM,SAAS,MAAM;IACtB,CAAC;;EAIJ,MAAM,QAAQ,MAAM,KAAK,SAAS;AAmBlC,SAAO,EAAE,MAjBsB;GAC7B,IAAI;GACE;GACN,WAAW,MAAM;GACjB,WAAW,MAAM;GACjB,SAAS,MAAM;GACf,SAAS,MAAM;GACf,UAAU;IACR,GAAG,MAAM;IACT,MAAM,MAAM,aAAa,GAAG,cAAc;IAC1C,MAAM,MAAM;IACb;GACD,QAAQ,MAAM;GACd,WAAW,MAAM;GACjB,QAAQ,MAAM;GACf,EAE4B;;CAG/B,MAAM,OAAO,MAAc,SAAsD;EAC/E,MAAM,WAAW,KAAK,KAAK,QAAQ,WAAW,KAAK;EACnD,MAAM,YAAY,SAAS,aAAa;AAKxC,OAHc,MAAM,KAAK,SAAS,EAGxB,aAAa,IAAI,CAAC,UAC1B,OAAM,IAAI,MACR,4BAA4B,KAAK,wEAClC;AAGH,QAAM,GAAG,UAAU;GAAE;GAAW,OAAO;GAAM,CAAC;AAC9C,SAAO,EAAE,SAAS,yBAAyB,QAAQ;;CAGrD,MAAM,OACJ,SACA,SACA,SAC+B;EAC/B,MAAM,cAAc,KAAK,KAAK,QAAQ,WAAW,QAAQ;EACzD,MAAM,cAAc,KAAK,KAAK,QAAQ,WAAW,QAAQ;EACzD,MAAM,YAAY,SAAS,aAAa;AAGxC,QAAM,KAAK,YAAY;AAGvB,MAAI;AACF,SAAM,KAAK,YAAY;AACvB,OAAI,CAAC,UACH,OAAM,IAAI,MACR,gBAAgB,QAAQ,sDACzB;WAEI,OAAO;AAEd,OAAK,MAAgC,SAAS,SAC5C,OAAM;;AAMV,QAAM,MADe,QAAQ,YAAY,EACf,EAAE,WAAW,MAAM,CAAC;AAG9C,QAAM,OAAO,aAAa,YAAY;AAEtC,SAAO,EAAE,SAAS,yBAAyB,QAAQ,QAAQ,QAAQ,IAAI;;CAGzE,MAAM,OAAO,MAAc,OAAe,SAAsD;EAC9F,MAAM,QAAQ,KAAK,IAAI,SAAS,SAAS,gBAAgB,eAAe;EACxE,MAAM,WAAW,KAAK,KAAK,QAAQ,WAAW,KAAK;EACnD,MAAM,UAAU,MAAM,kBAAkB,UAAU,OAAO,QAAQ;EAEjE,MAAM,UAAsB,EAAE;EAC9B,MAAM,iCAAiB,IAAI,KAAa;EACxC,IAAI,eAAe;AAEnB,OAAK,MAAM,SAAS,QAClB,KAAI,MAAM,SAAS,WAAW,MAAM,KAAK,MAAM;GAC7C,MAAM,eAAe,MAAM,KAAK,KAAK;GACrC,MAAM,mBAAmB,KAAK,MAAM,SAAS,UAAU,aAAa,CAAC;AAGrE,OAAI,eAAe,IAAI,iBAAiB,CAAE;AAC1C,kBAAe,IAAI,iBAAiB;GAEpC,MAAM,QAAQ,MAAM,KAAK,aAAa;GAEtC,MAAM,QAAkB;IACtB,IAAI;IACJ,MAAM;IACN,WAAW,MAAM;IACjB,WAAW,MAAM;IACjB,SAAS,MAAM,KAAK,OAAO;IAC3B,UAAU;KACR,MAAM;KACN,MAAM,MAAM;KACb;IACF;AAED,WAAQ,KAAK,MAAM;AAEnB,OAAI,QAAQ,UAAU,OAAO;AAC3B,mBAAe;AACf;;;AAKN,SAAO;GACL,MAAM;GACN,SAAS,eAAe,8BAA8B,UAAU;GACjE;;;;;;;;;;CAWH,MAAc,gBACZ,WACA,WACuE;EACvE,MAAM,KAAK,QAAQ;EAGnB,MAAM,cAAwB,EAAE;EAChC,IAAI,cAAc;EAClB,IAAI,cAA6B;AAGjC,SAAO,MAAM;AACX,eAAY,QAAQ,YAAY;AAGhC,OAAI,gBAAgB,KAClB,KAAI;AAEF,UAAM,KADU,KAAK,aAAa,OAAO,CACtB;AAEnB,kBAAc;WACR;AAMV,OAAI,gBAAgB,UAClB;GAGF,MAAM,aAAa,QAAQ,YAAY;AAEvC,OAAI,CAAC,YAAY,WAAW,UAAU,IAAI,eAAe,YACvD;AAEF,iBAAc;;EAKhB,MAAM,iBAAiB,eAAe;EACtC,MAAM,eAAe,YAAY,QAAQ,QAAQ,OAAO,eAAe;AAIvE,OAAK,MAAM,WAAW,aACpB,KAAI;GAEF,MAAM,mBAAmB,MAAM,SADT,KAAK,SAAS,aAAa,EACM,OAAO;GAK9D,MAAM,gBAAgB,eAAe,WAAW,cAAc,cAAc;AAI5E,OAAI,YAAY,eAAe;IAC7B,MAAM,SAAS,SAAS,eAAe,QAAQ;IAG/C,MAAM,gBADQ,iBAAiB,MAAM,KAAK,CACd,KAAK,SAAS;KACxC,MAAM,UAAU,KAAK,MAAM;AAE3B,SAAI,CAAC,WAAW,QAAQ,WAAW,IAAI,CACrC,QAAO;AAGT,SAAI,QAAQ,WAAW,IAAI,CACzB,QAAO;AAIT,SAAI,QAAQ,WAAW,IAAI,CACzB,QAAO,IAAI,SAAS;AAGtB,SAAI,QAAQ,SAAS,IAAI,CACvB,QAAO,GAAG,OAAO,GAAG;AAItB,YAAO,GAAG,OAAO,MAAM;MACvB;AACF,OAAG,IAAI,cAAc,KAAK,KAAK,CAAC;SAGhC,IAAG,IAAI,iBAAiB;UAEpB;AAKV,KAAG,IAAI,OAAO;AACd,KAAG,IAAI,KAAK,QAAQ,UAAU,EAAE,CAAC;AAEjC,SAAO;GAAE;GAAI,YAAY;GAAgB"}
|
|
@@ -0,0 +1,85 @@
|
|
|
1
|
+
const require_rolldown_runtime = require('../_virtual/rolldown_runtime.cjs');
|
|
2
|
+
let node_fs_promises = require("node:fs/promises");
|
|
3
|
+
let node_path = require("node:path");
|
|
4
|
+
let node_child_process = require("node:child_process");
|
|
5
|
+
let _vscode_ripgrep = require("@vscode/ripgrep");
|
|
6
|
+
|
|
7
|
+
//#region src/utils/ripgrep.ts
|
|
8
|
+
async function searchWithRipgrep(basePath, query, options) {
|
|
9
|
+
await ensureNativeDependenciesInstalled();
|
|
10
|
+
return new Promise((resolve, reject) => {
|
|
11
|
+
const args = [
|
|
12
|
+
"--json",
|
|
13
|
+
"--no-heading",
|
|
14
|
+
"--with-filename",
|
|
15
|
+
query,
|
|
16
|
+
basePath
|
|
17
|
+
];
|
|
18
|
+
if (!options?.caseSensitive) args.unshift("-i");
|
|
19
|
+
const rg = (0, node_child_process.spawn)(_vscode_ripgrep.rgPath, args);
|
|
20
|
+
let output = "";
|
|
21
|
+
let errorOutput = "";
|
|
22
|
+
rg.stdout.on("data", (data) => {
|
|
23
|
+
output += data.toString();
|
|
24
|
+
});
|
|
25
|
+
rg.stderr.on("data", (data) => {
|
|
26
|
+
errorOutput += data.toString();
|
|
27
|
+
});
|
|
28
|
+
rg.on("close", (code) => {
|
|
29
|
+
if (code === 0 || code === 1) try {
|
|
30
|
+
resolve(output.trim().split("\n").filter((line) => line).map((line) => JSON.parse(line)));
|
|
31
|
+
} catch (error) {
|
|
32
|
+
reject(/* @__PURE__ */ new Error(`Failed to parse ripgrep output: ${error}`));
|
|
33
|
+
}
|
|
34
|
+
else reject(/* @__PURE__ */ new Error(`Ripgrep failed with code ${code}: ${errorOutput}`));
|
|
35
|
+
});
|
|
36
|
+
rg.on("error", (error) => {
|
|
37
|
+
reject(/* @__PURE__ */ new Error(`Failed to spawn ripgrep: ${error}`));
|
|
38
|
+
});
|
|
39
|
+
});
|
|
40
|
+
}
|
|
41
|
+
let installPromise = null;
|
|
42
|
+
async function ensureNativeDependenciesInstalled() {
|
|
43
|
+
if (installPromise) return installPromise;
|
|
44
|
+
installPromise = (async () => {
|
|
45
|
+
if (await isFileExists(_vscode_ripgrep.rgPath)) return;
|
|
46
|
+
const binFolder = (0, node_path.dirname)(_vscode_ripgrep.rgPath);
|
|
47
|
+
await (0, node_fs_promises.rm)(binFolder, {
|
|
48
|
+
recursive: true,
|
|
49
|
+
force: true
|
|
50
|
+
});
|
|
51
|
+
await new Promise((resolve, reject) => {
|
|
52
|
+
const child = (0, node_child_process.spawn)("npm", ["run", "postinstall"], {
|
|
53
|
+
cwd: (0, node_path.dirname)(binFolder),
|
|
54
|
+
stdio: "pipe",
|
|
55
|
+
shell: process.platform === "win32"
|
|
56
|
+
});
|
|
57
|
+
let stderr = "";
|
|
58
|
+
child.stderr.on("data", (data) => {
|
|
59
|
+
const str = data.toString();
|
|
60
|
+
stderr += str;
|
|
61
|
+
});
|
|
62
|
+
child.on("error", (error) => reject(error));
|
|
63
|
+
child.on("exit", (code) => {
|
|
64
|
+
if (code === 0) resolve();
|
|
65
|
+
else reject(/* @__PURE__ */ new Error(`npm install failed with code ${code} ${stderr}`));
|
|
66
|
+
});
|
|
67
|
+
});
|
|
68
|
+
})().catch((error) => {
|
|
69
|
+
console.error(`Failed to install native dependencies: ${error}`);
|
|
70
|
+
installPromise = null;
|
|
71
|
+
});
|
|
72
|
+
return installPromise;
|
|
73
|
+
}
|
|
74
|
+
async function isFileExists(path) {
|
|
75
|
+
try {
|
|
76
|
+
await (0, node_fs_promises.stat)(path);
|
|
77
|
+
return true;
|
|
78
|
+
} catch (error) {
|
|
79
|
+
if (error.code === "ENOENT") return false;
|
|
80
|
+
throw error;
|
|
81
|
+
}
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
//#endregion
|
|
85
|
+
exports.searchWithRipgrep = searchWithRipgrep;
|
|
@@ -0,0 +1,85 @@
|
|
|
1
|
+
import { rm, stat } from "node:fs/promises";
|
|
2
|
+
import { dirname } from "node:path";
|
|
3
|
+
import { spawn } from "node:child_process";
|
|
4
|
+
import { rgPath } from "@vscode/ripgrep";
|
|
5
|
+
|
|
6
|
+
//#region src/utils/ripgrep.ts
|
|
7
|
+
async function searchWithRipgrep(basePath, query, options) {
|
|
8
|
+
await ensureNativeDependenciesInstalled();
|
|
9
|
+
return new Promise((resolve, reject) => {
|
|
10
|
+
const args = [
|
|
11
|
+
"--json",
|
|
12
|
+
"--no-heading",
|
|
13
|
+
"--with-filename",
|
|
14
|
+
query,
|
|
15
|
+
basePath
|
|
16
|
+
];
|
|
17
|
+
if (!options?.caseSensitive) args.unshift("-i");
|
|
18
|
+
const rg = spawn(rgPath, args);
|
|
19
|
+
let output = "";
|
|
20
|
+
let errorOutput = "";
|
|
21
|
+
rg.stdout.on("data", (data) => {
|
|
22
|
+
output += data.toString();
|
|
23
|
+
});
|
|
24
|
+
rg.stderr.on("data", (data) => {
|
|
25
|
+
errorOutput += data.toString();
|
|
26
|
+
});
|
|
27
|
+
rg.on("close", (code) => {
|
|
28
|
+
if (code === 0 || code === 1) try {
|
|
29
|
+
resolve(output.trim().split("\n").filter((line) => line).map((line) => JSON.parse(line)));
|
|
30
|
+
} catch (error) {
|
|
31
|
+
reject(/* @__PURE__ */ new Error(`Failed to parse ripgrep output: ${error}`));
|
|
32
|
+
}
|
|
33
|
+
else reject(/* @__PURE__ */ new Error(`Ripgrep failed with code ${code}: ${errorOutput}`));
|
|
34
|
+
});
|
|
35
|
+
rg.on("error", (error) => {
|
|
36
|
+
reject(/* @__PURE__ */ new Error(`Failed to spawn ripgrep: ${error}`));
|
|
37
|
+
});
|
|
38
|
+
});
|
|
39
|
+
}
|
|
40
|
+
let installPromise = null;
|
|
41
|
+
async function ensureNativeDependenciesInstalled() {
|
|
42
|
+
if (installPromise) return installPromise;
|
|
43
|
+
installPromise = (async () => {
|
|
44
|
+
if (await isFileExists(rgPath)) return;
|
|
45
|
+
const binFolder = dirname(rgPath);
|
|
46
|
+
await rm(binFolder, {
|
|
47
|
+
recursive: true,
|
|
48
|
+
force: true
|
|
49
|
+
});
|
|
50
|
+
await new Promise((resolve, reject) => {
|
|
51
|
+
const child = spawn("npm", ["run", "postinstall"], {
|
|
52
|
+
cwd: dirname(binFolder),
|
|
53
|
+
stdio: "pipe",
|
|
54
|
+
shell: process.platform === "win32"
|
|
55
|
+
});
|
|
56
|
+
let stderr = "";
|
|
57
|
+
child.stderr.on("data", (data) => {
|
|
58
|
+
const str = data.toString();
|
|
59
|
+
stderr += str;
|
|
60
|
+
});
|
|
61
|
+
child.on("error", (error) => reject(error));
|
|
62
|
+
child.on("exit", (code) => {
|
|
63
|
+
if (code === 0) resolve();
|
|
64
|
+
else reject(/* @__PURE__ */ new Error(`npm install failed with code ${code} ${stderr}`));
|
|
65
|
+
});
|
|
66
|
+
});
|
|
67
|
+
})().catch((error) => {
|
|
68
|
+
console.error(`Failed to install native dependencies: ${error}`);
|
|
69
|
+
installPromise = null;
|
|
70
|
+
});
|
|
71
|
+
return installPromise;
|
|
72
|
+
}
|
|
73
|
+
async function isFileExists(path) {
|
|
74
|
+
try {
|
|
75
|
+
await stat(path);
|
|
76
|
+
return true;
|
|
77
|
+
} catch (error) {
|
|
78
|
+
if (error.code === "ENOENT") return false;
|
|
79
|
+
throw error;
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
//#endregion
|
|
84
|
+
export { searchWithRipgrep };
|
|
85
|
+
//# sourceMappingURL=ripgrep.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"ripgrep.mjs","names":[],"sources":["../../src/utils/ripgrep.ts"],"sourcesContent":["import { spawn } from \"node:child_process\";\nimport { rm, stat } from \"node:fs/promises\";\nimport { dirname } from \"node:path\";\nimport { rgPath } from \"@vscode/ripgrep\";\n\nexport interface RipgrepMatch {\n type: \"match\" | \"begin\" | \"end\" | \"context\";\n data: {\n path?: { text: string };\n lines?: { text: string };\n line_number?: number;\n absolute_offset?: number;\n submatches?: Array<{\n match: { text: string };\n start: number;\n end: number;\n }>;\n };\n}\n\nexport interface SearchOptions {\n caseSensitive?: boolean;\n}\n\nexport async function searchWithRipgrep(\n basePath: string,\n query: string,\n options?: SearchOptions,\n): Promise<RipgrepMatch[]> {\n await ensureNativeDependenciesInstalled();\n\n return new Promise((resolve, reject) => {\n const args = [\"--json\", \"--no-heading\", \"--with-filename\", query, basePath];\n if (!options?.caseSensitive) {\n args.unshift(\"-i\");\n }\n\n const rg = spawn(rgPath, args);\n let output = \"\";\n let errorOutput = \"\";\n\n rg.stdout.on(\"data\", (data: Buffer) => {\n output += data.toString();\n });\n\n rg.stderr.on(\"data\", (data: Buffer) => {\n errorOutput += data.toString();\n });\n\n rg.on(\"close\", (code) => {\n if (code === 0 || code === 1) {\n // 0 = found, 1 = not found\n try {\n const lines = output\n .trim()\n .split(\"\\n\")\n .filter((line) => line);\n const matches: RipgrepMatch[] = lines.map((line) => JSON.parse(line));\n resolve(matches);\n } catch (error) {\n reject(new Error(`Failed to parse ripgrep output: ${error}`));\n }\n } else {\n reject(new Error(`Ripgrep failed with code ${code}: ${errorOutput}`));\n }\n });\n\n rg.on(\"error\", (error) => {\n reject(new Error(`Failed to spawn ripgrep: ${error}`));\n });\n });\n}\n\nlet installPromise: Promise<void> | null = null;\n\nasync function ensureNativeDependenciesInstalled() {\n if (installPromise) return installPromise;\n\n installPromise = (async () => {\n if (await isFileExists(rgPath)) {\n return;\n }\n\n // Remove the existing bin folder to ensure a clean install\n const binFolder = dirname(rgPath);\n await rm(binFolder, { recursive: true, force: true });\n\n await new Promise<void>((resolve, reject) => {\n const child = spawn(\"npm\", [\"run\", \"postinstall\"], {\n cwd: dirname(binFolder),\n stdio: \"pipe\",\n shell: process.platform === \"win32\",\n });\n\n let stderr = \"\";\n child.stderr.on(\"data\", (data) => {\n const str = data.toString();\n stderr += str;\n });\n\n child.on(\"error\", (error) => reject(error));\n\n child.on(\"exit\", (code) => {\n if (code === 0) resolve();\n else reject(new Error(`npm install failed with code ${code} ${stderr}`));\n });\n });\n })().catch((error) => {\n console.error(`Failed to install native dependencies: ${error}`);\n installPromise = null;\n });\n\n return installPromise;\n}\n\nasync function isFileExists(path: string): Promise<boolean> {\n try {\n await stat(path);\n return true;\n } catch (error) {\n if (error.code === \"ENOENT\") {\n return false;\n }\n throw error;\n }\n}\n"],"mappings":";;;;;;AAwBA,eAAsB,kBACpB,UACA,OACA,SACyB;AACzB,OAAM,mCAAmC;AAEzC,QAAO,IAAI,SAAS,SAAS,WAAW;EACtC,MAAM,OAAO;GAAC;GAAU;GAAgB;GAAmB;GAAO;GAAS;AAC3E,MAAI,CAAC,SAAS,cACZ,MAAK,QAAQ,KAAK;EAGpB,MAAM,KAAK,MAAM,QAAQ,KAAK;EAC9B,IAAI,SAAS;EACb,IAAI,cAAc;AAElB,KAAG,OAAO,GAAG,SAAS,SAAiB;AACrC,aAAU,KAAK,UAAU;IACzB;AAEF,KAAG,OAAO,GAAG,SAAS,SAAiB;AACrC,kBAAe,KAAK,UAAU;IAC9B;AAEF,KAAG,GAAG,UAAU,SAAS;AACvB,OAAI,SAAS,KAAK,SAAS,EAEzB,KAAI;AAMF,YALc,OACX,MAAM,CACN,MAAM,KAAK,CACX,QAAQ,SAAS,KAAK,CACa,KAAK,SAAS,KAAK,MAAM,KAAK,CAAC,CACrD;YACT,OAAO;AACd,2BAAO,IAAI,MAAM,mCAAmC,QAAQ,CAAC;;OAG/D,wBAAO,IAAI,MAAM,4BAA4B,KAAK,IAAI,cAAc,CAAC;IAEvE;AAEF,KAAG,GAAG,UAAU,UAAU;AACxB,0BAAO,IAAI,MAAM,4BAA4B,QAAQ,CAAC;IACtD;GACF;;AAGJ,IAAI,iBAAuC;AAE3C,eAAe,oCAAoC;AACjD,KAAI,eAAgB,QAAO;AAE3B,mBAAkB,YAAY;AAC5B,MAAI,MAAM,aAAa,OAAO,CAC5B;EAIF,MAAM,YAAY,QAAQ,OAAO;AACjC,QAAM,GAAG,WAAW;GAAE,WAAW;GAAM,OAAO;GAAM,CAAC;AAErD,QAAM,IAAI,SAAe,SAAS,WAAW;GAC3C,MAAM,QAAQ,MAAM,OAAO,CAAC,OAAO,cAAc,EAAE;IACjD,KAAK,QAAQ,UAAU;IACvB,OAAO;IACP,OAAO,QAAQ,aAAa;IAC7B,CAAC;GAEF,IAAI,SAAS;AACb,SAAM,OAAO,GAAG,SAAS,SAAS;IAChC,MAAM,MAAM,KAAK,UAAU;AAC3B,cAAU;KACV;AAEF,SAAM,GAAG,UAAU,UAAU,OAAO,MAAM,CAAC;AAE3C,SAAM,GAAG,SAAS,SAAS;AACzB,QAAI,SAAS,EAAG,UAAS;QACpB,wBAAO,IAAI,MAAM,gCAAgC,KAAK,GAAG,SAAS,CAAC;KACxE;IACF;KACA,CAAC,OAAO,UAAU;AACpB,UAAQ,MAAM,0CAA0C,QAAQ;AAChE,mBAAiB;GACjB;AAEF,QAAO;;AAGT,eAAe,aAAa,MAAgC;AAC1D,KAAI;AACF,QAAM,KAAK,KAAK;AAChB,SAAO;UACA,OAAO;AACd,MAAI,MAAM,SAAS,SACjB,QAAO;AAET,QAAM"}
|
package/package.json
ADDED
|
@@ -0,0 +1,58 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@aigne/afs-fs",
|
|
3
|
+
"version": "1.11.0-beta.2",
|
|
4
|
+
"description": "AIGNE AFS module for local filesystem storage",
|
|
5
|
+
"license": "UNLICENSED",
|
|
6
|
+
"publishConfig": {
|
|
7
|
+
"access": "public"
|
|
8
|
+
},
|
|
9
|
+
"author": "Arcblock <blocklet@arcblock.io> https://github.com/arcblock",
|
|
10
|
+
"homepage": "https://github.com/arcblock/afs",
|
|
11
|
+
"repository": {
|
|
12
|
+
"type": "git",
|
|
13
|
+
"url": "git+https://github.com/arcblock/afs"
|
|
14
|
+
},
|
|
15
|
+
"bugs": {
|
|
16
|
+
"url": "https://github.com/arcblock/afs/issues"
|
|
17
|
+
},
|
|
18
|
+
"type": "module",
|
|
19
|
+
"main": "./dist/index.cjs",
|
|
20
|
+
"module": "./dist/index.mjs",
|
|
21
|
+
"types": "./dist/index.d.cts",
|
|
22
|
+
"exports": {
|
|
23
|
+
".": {
|
|
24
|
+
"require": "./dist/index.cjs",
|
|
25
|
+
"import": "./dist/index.mjs"
|
|
26
|
+
},
|
|
27
|
+
"./*": "./*"
|
|
28
|
+
},
|
|
29
|
+
"files": [
|
|
30
|
+
"dist",
|
|
31
|
+
"LICENSE",
|
|
32
|
+
"README.md",
|
|
33
|
+
"CHANGELOG.md"
|
|
34
|
+
],
|
|
35
|
+
"dependencies": {
|
|
36
|
+
"@vscode/ripgrep": "^1.15.14",
|
|
37
|
+
"ignore": "^7.0.5",
|
|
38
|
+
"minimatch": "^10.1.1",
|
|
39
|
+
"zod": "^3.25.67",
|
|
40
|
+
"@aigne/afs": "^1.11.0-beta.2"
|
|
41
|
+
},
|
|
42
|
+
"devDependencies": {
|
|
43
|
+
"@types/bun": "^1.3.6",
|
|
44
|
+
"npm-run-all": "^4.1.5",
|
|
45
|
+
"rimraf": "^6.1.2",
|
|
46
|
+
"tsdown": "0.20.0-beta.3",
|
|
47
|
+
"typescript": "5.9.2",
|
|
48
|
+
"@aigne/scripts": "0.0.0",
|
|
49
|
+
"@aigne/typescript-config": "0.0.0"
|
|
50
|
+
},
|
|
51
|
+
"scripts": {
|
|
52
|
+
"build": "tsdown",
|
|
53
|
+
"check-types": "tsc --noEmit",
|
|
54
|
+
"clean": "rimraf dist coverage",
|
|
55
|
+
"test": "bun test",
|
|
56
|
+
"test:coverage": "bun test --coverage --coverage-reporter=lcov --coverage-reporter=text"
|
|
57
|
+
}
|
|
58
|
+
}
|