rulesync 7.2.0 → 7.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +50 -25
- package/dist/chunk-NRNUPCXY.js +15081 -0
- package/dist/cli/index.cjs +18933 -0
- package/dist/cli/index.d.cts +1 -0
- package/dist/cli/index.d.ts +1 -0
- package/dist/cli/index.js +3955 -0
- package/dist/index.cjs +2009 -5649
- package/dist/index.d.cts +274 -1
- package/dist/index.d.ts +274 -1
- package/dist/index.js +35 -18690
- package/package.json +17 -5
|
@@ -0,0 +1,3955 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
import {
|
|
3
|
+
ALL_FEATURES,
|
|
4
|
+
ALL_FEATURES_WITH_WILDCARD,
|
|
5
|
+
ALL_TOOL_TARGETS,
|
|
6
|
+
CommandsProcessor,
|
|
7
|
+
ConfigResolver,
|
|
8
|
+
FETCH_CONCURRENCY_LIMIT,
|
|
9
|
+
HooksProcessor,
|
|
10
|
+
IgnoreProcessor,
|
|
11
|
+
MAX_FILE_SIZE,
|
|
12
|
+
McpProcessor,
|
|
13
|
+
RULESYNC_AIIGNORE_FILE_NAME,
|
|
14
|
+
RULESYNC_AIIGNORE_RELATIVE_FILE_PATH,
|
|
15
|
+
RULESYNC_COMMANDS_RELATIVE_DIR_PATH,
|
|
16
|
+
RULESYNC_CONFIG_RELATIVE_FILE_PATH,
|
|
17
|
+
RULESYNC_CURATED_SKILLS_RELATIVE_DIR_PATH,
|
|
18
|
+
RULESYNC_HOOKS_FILE_NAME,
|
|
19
|
+
RULESYNC_HOOKS_RELATIVE_FILE_PATH,
|
|
20
|
+
RULESYNC_IGNORE_RELATIVE_FILE_PATH,
|
|
21
|
+
RULESYNC_MCP_FILE_NAME,
|
|
22
|
+
RULESYNC_MCP_RELATIVE_FILE_PATH,
|
|
23
|
+
RULESYNC_OVERVIEW_FILE_NAME,
|
|
24
|
+
RULESYNC_RELATIVE_DIR_PATH,
|
|
25
|
+
RULESYNC_RULES_RELATIVE_DIR_PATH,
|
|
26
|
+
RULESYNC_SKILLS_RELATIVE_DIR_PATH,
|
|
27
|
+
RULESYNC_SOURCES_LOCK_RELATIVE_FILE_PATH,
|
|
28
|
+
RULESYNC_SUBAGENTS_RELATIVE_DIR_PATH,
|
|
29
|
+
RulesProcessor,
|
|
30
|
+
RulesyncCommand,
|
|
31
|
+
RulesyncCommandFrontmatterSchema,
|
|
32
|
+
RulesyncHooks,
|
|
33
|
+
RulesyncIgnore,
|
|
34
|
+
RulesyncMcp,
|
|
35
|
+
RulesyncRule,
|
|
36
|
+
RulesyncRuleFrontmatterSchema,
|
|
37
|
+
RulesyncSkill,
|
|
38
|
+
RulesyncSkillFrontmatterSchema,
|
|
39
|
+
RulesyncSubagent,
|
|
40
|
+
RulesyncSubagentFrontmatterSchema,
|
|
41
|
+
SKILL_FILE_NAME,
|
|
42
|
+
SkillsProcessor,
|
|
43
|
+
SubagentsProcessor,
|
|
44
|
+
checkPathTraversal,
|
|
45
|
+
checkRulesyncDirExists,
|
|
46
|
+
createTempDirectory,
|
|
47
|
+
directoryExists,
|
|
48
|
+
ensureDir,
|
|
49
|
+
fileExists,
|
|
50
|
+
findFilesByGlobs,
|
|
51
|
+
formatError,
|
|
52
|
+
generate,
|
|
53
|
+
getLocalSkillDirNames,
|
|
54
|
+
importFromTool,
|
|
55
|
+
listDirectoryFiles,
|
|
56
|
+
logger,
|
|
57
|
+
readFileContent,
|
|
58
|
+
removeDirectory,
|
|
59
|
+
removeFile,
|
|
60
|
+
removeTempDirectory,
|
|
61
|
+
stringifyFrontmatter,
|
|
62
|
+
writeFileContent
|
|
63
|
+
} from "../chunk-NRNUPCXY.js";
|
|
64
|
+
|
|
65
|
+
// src/cli/index.ts
|
|
66
|
+
import { Command } from "commander";
|
|
67
|
+
|
|
68
|
+
// src/constants/announcements.ts
|
|
69
|
+
var ANNOUNCEMENT = "".trim();
|
|
70
|
+
|
|
71
|
+
// src/lib/fetch.ts
|
|
72
|
+
import { Semaphore } from "es-toolkit/promise";
|
|
73
|
+
import { join } from "path";
|
|
74
|
+
|
|
75
|
+
// src/lib/github-client.ts
|
|
76
|
+
import { RequestError } from "@octokit/request-error";
|
|
77
|
+
import { Octokit } from "@octokit/rest";
|
|
78
|
+
|
|
79
|
+
// src/types/fetch.ts
|
|
80
|
+
import { z as z2 } from "zod/mini";
|
|
81
|
+
|
|
82
|
+
// src/types/fetch-targets.ts
|
|
83
|
+
import { z } from "zod/mini";
|
|
84
|
+
var ALL_FETCH_TARGETS = ["rulesync", ...ALL_TOOL_TARGETS];
|
|
85
|
+
var FetchTargetSchema = z.enum(ALL_FETCH_TARGETS);
|
|
86
|
+
|
|
87
|
+
// src/types/fetch.ts
|
|
88
|
+
var ConflictStrategySchema = z2.enum(["skip", "overwrite"]);
|
|
89
|
+
var GitHubFileTypeSchema = z2.enum(["file", "dir", "symlink", "submodule"]);
|
|
90
|
+
var GitHubFileEntrySchema = z2.looseObject({
|
|
91
|
+
name: z2.string(),
|
|
92
|
+
path: z2.string(),
|
|
93
|
+
sha: z2.string(),
|
|
94
|
+
size: z2.number(),
|
|
95
|
+
type: GitHubFileTypeSchema,
|
|
96
|
+
download_url: z2.nullable(z2.string())
|
|
97
|
+
});
|
|
98
|
+
var FetchOptionsSchema = z2.looseObject({
|
|
99
|
+
target: z2.optional(FetchTargetSchema),
|
|
100
|
+
features: z2.optional(z2.array(z2.enum(ALL_FEATURES_WITH_WILDCARD))),
|
|
101
|
+
ref: z2.optional(z2.string()),
|
|
102
|
+
path: z2.optional(z2.string()),
|
|
103
|
+
output: z2.optional(z2.string()),
|
|
104
|
+
conflict: z2.optional(ConflictStrategySchema),
|
|
105
|
+
token: z2.optional(z2.string()),
|
|
106
|
+
verbose: z2.optional(z2.boolean()),
|
|
107
|
+
silent: z2.optional(z2.boolean())
|
|
108
|
+
});
|
|
109
|
+
var FetchFileStatusSchema = z2.enum(["created", "overwritten", "skipped"]);
|
|
110
|
+
var GitHubRepoInfoSchema = z2.looseObject({
|
|
111
|
+
default_branch: z2.string(),
|
|
112
|
+
private: z2.boolean()
|
|
113
|
+
});
|
|
114
|
+
var GitHubReleaseAssetSchema = z2.looseObject({
|
|
115
|
+
name: z2.string(),
|
|
116
|
+
browser_download_url: z2.string(),
|
|
117
|
+
size: z2.number()
|
|
118
|
+
});
|
|
119
|
+
var GitHubReleaseSchema = z2.looseObject({
|
|
120
|
+
tag_name: z2.string(),
|
|
121
|
+
name: z2.nullable(z2.string()),
|
|
122
|
+
prerelease: z2.boolean(),
|
|
123
|
+
draft: z2.boolean(),
|
|
124
|
+
assets: z2.array(GitHubReleaseAssetSchema)
|
|
125
|
+
});
|
|
126
|
+
|
|
127
|
+
// src/lib/github-client.ts
|
|
128
|
+
var GitHubClientError = class extends Error {
|
|
129
|
+
constructor(message, statusCode, apiError) {
|
|
130
|
+
super(message);
|
|
131
|
+
this.statusCode = statusCode;
|
|
132
|
+
this.apiError = apiError;
|
|
133
|
+
this.name = "GitHubClientError";
|
|
134
|
+
}
|
|
135
|
+
};
|
|
136
|
+
function logGitHubAuthHints(error) {
|
|
137
|
+
logger.error(`GitHub API Error: ${error.message}`);
|
|
138
|
+
if (error.statusCode === 401 || error.statusCode === 403) {
|
|
139
|
+
logger.info(
|
|
140
|
+
"Tip: Set GITHUB_TOKEN or GH_TOKEN environment variable for private repositories or better rate limits."
|
|
141
|
+
);
|
|
142
|
+
logger.info(
|
|
143
|
+
"Tip: If you use GitHub CLI, you can use `GITHUB_TOKEN=$(gh auth token) rulesync fetch ...`"
|
|
144
|
+
);
|
|
145
|
+
}
|
|
146
|
+
}
|
|
147
|
+
var GitHubClient = class {
|
|
148
|
+
octokit;
|
|
149
|
+
hasToken;
|
|
150
|
+
constructor(config = {}) {
|
|
151
|
+
if (config.baseUrl && !config.baseUrl.startsWith("https://")) {
|
|
152
|
+
throw new GitHubClientError("GitHub API base URL must use HTTPS");
|
|
153
|
+
}
|
|
154
|
+
this.hasToken = !!config.token;
|
|
155
|
+
this.octokit = new Octokit({
|
|
156
|
+
auth: config.token,
|
|
157
|
+
baseUrl: config.baseUrl
|
|
158
|
+
});
|
|
159
|
+
}
|
|
160
|
+
/**
|
|
161
|
+
* Get authentication token from various sources
|
|
162
|
+
*/
|
|
163
|
+
static resolveToken(explicitToken) {
|
|
164
|
+
if (explicitToken) {
|
|
165
|
+
return explicitToken;
|
|
166
|
+
}
|
|
167
|
+
return process.env["GITHUB_TOKEN"] ?? process.env["GH_TOKEN"];
|
|
168
|
+
}
|
|
169
|
+
/**
|
|
170
|
+
* Get the default branch of a repository
|
|
171
|
+
*/
|
|
172
|
+
async getDefaultBranch(owner, repo) {
|
|
173
|
+
const repoInfo = await this.getRepoInfo(owner, repo);
|
|
174
|
+
return repoInfo.default_branch;
|
|
175
|
+
}
|
|
176
|
+
/**
|
|
177
|
+
* Get repository information
|
|
178
|
+
*/
|
|
179
|
+
async getRepoInfo(owner, repo) {
|
|
180
|
+
try {
|
|
181
|
+
const { data } = await this.octokit.repos.get({ owner, repo });
|
|
182
|
+
const parsed = GitHubRepoInfoSchema.safeParse(data);
|
|
183
|
+
if (!parsed.success) {
|
|
184
|
+
throw new GitHubClientError(
|
|
185
|
+
`Invalid repository info response: ${formatError(parsed.error)}`
|
|
186
|
+
);
|
|
187
|
+
}
|
|
188
|
+
return parsed.data;
|
|
189
|
+
} catch (error) {
|
|
190
|
+
throw this.handleError(error);
|
|
191
|
+
}
|
|
192
|
+
}
|
|
193
|
+
/**
|
|
194
|
+
* List contents of a directory in a repository
|
|
195
|
+
*/
|
|
196
|
+
async listDirectory(owner, repo, path2, ref) {
|
|
197
|
+
try {
|
|
198
|
+
const { data } = await this.octokit.repos.getContent({
|
|
199
|
+
owner,
|
|
200
|
+
repo,
|
|
201
|
+
path: path2,
|
|
202
|
+
ref
|
|
203
|
+
});
|
|
204
|
+
if (!Array.isArray(data)) {
|
|
205
|
+
throw new GitHubClientError(`Path "${path2}" is not a directory`);
|
|
206
|
+
}
|
|
207
|
+
const entries = [];
|
|
208
|
+
for (const item of data) {
|
|
209
|
+
const parsed = GitHubFileEntrySchema.safeParse(item);
|
|
210
|
+
if (parsed.success) {
|
|
211
|
+
entries.push(parsed.data);
|
|
212
|
+
}
|
|
213
|
+
}
|
|
214
|
+
return entries;
|
|
215
|
+
} catch (error) {
|
|
216
|
+
throw this.handleError(error);
|
|
217
|
+
}
|
|
218
|
+
}
|
|
219
|
+
/**
|
|
220
|
+
* Get raw file content from a repository
|
|
221
|
+
*/
|
|
222
|
+
async getFileContent(owner, repo, path2, ref) {
|
|
223
|
+
try {
|
|
224
|
+
const { data } = await this.octokit.repos.getContent({
|
|
225
|
+
owner,
|
|
226
|
+
repo,
|
|
227
|
+
path: path2,
|
|
228
|
+
ref,
|
|
229
|
+
mediaType: {
|
|
230
|
+
format: "raw"
|
|
231
|
+
}
|
|
232
|
+
});
|
|
233
|
+
if (typeof data === "string") {
|
|
234
|
+
return data;
|
|
235
|
+
}
|
|
236
|
+
if (!Array.isArray(data) && "content" in data && data.content) {
|
|
237
|
+
return Buffer.from(data.content, "base64").toString("utf-8");
|
|
238
|
+
}
|
|
239
|
+
throw new GitHubClientError(`Unexpected response format for file content`);
|
|
240
|
+
} catch (error) {
|
|
241
|
+
throw this.handleError(error);
|
|
242
|
+
}
|
|
243
|
+
}
|
|
244
|
+
/**
|
|
245
|
+
* Check if a file exists and is within size limits
|
|
246
|
+
*/
|
|
247
|
+
async getFileInfo(owner, repo, path2, ref) {
|
|
248
|
+
try {
|
|
249
|
+
const { data } = await this.octokit.repos.getContent({
|
|
250
|
+
owner,
|
|
251
|
+
repo,
|
|
252
|
+
path: path2,
|
|
253
|
+
ref
|
|
254
|
+
});
|
|
255
|
+
if (Array.isArray(data)) {
|
|
256
|
+
return null;
|
|
257
|
+
}
|
|
258
|
+
const parsed = GitHubFileEntrySchema.safeParse(data);
|
|
259
|
+
if (!parsed.success) {
|
|
260
|
+
return null;
|
|
261
|
+
}
|
|
262
|
+
if (parsed.data.size > MAX_FILE_SIZE) {
|
|
263
|
+
throw new GitHubClientError(
|
|
264
|
+
`File "${path2}" exceeds maximum size limit of ${MAX_FILE_SIZE / 1024 / 1024}MB`
|
|
265
|
+
);
|
|
266
|
+
}
|
|
267
|
+
return parsed.data;
|
|
268
|
+
} catch (error) {
|
|
269
|
+
if (error instanceof RequestError && error.status === 404) {
|
|
270
|
+
return null;
|
|
271
|
+
}
|
|
272
|
+
if (error instanceof GitHubClientError && error.statusCode === 404) {
|
|
273
|
+
return null;
|
|
274
|
+
}
|
|
275
|
+
throw this.handleError(error);
|
|
276
|
+
}
|
|
277
|
+
}
|
|
278
|
+
/**
|
|
279
|
+
* Validate that a repository exists and is accessible
|
|
280
|
+
*/
|
|
281
|
+
async validateRepository(owner, repo) {
|
|
282
|
+
try {
|
|
283
|
+
await this.getRepoInfo(owner, repo);
|
|
284
|
+
return true;
|
|
285
|
+
} catch (error) {
|
|
286
|
+
if (error instanceof GitHubClientError && error.statusCode === 404) {
|
|
287
|
+
return false;
|
|
288
|
+
}
|
|
289
|
+
throw error;
|
|
290
|
+
}
|
|
291
|
+
}
|
|
292
|
+
/**
|
|
293
|
+
* Resolve a ref (branch, tag, or SHA) to a full commit SHA.
|
|
294
|
+
*/
|
|
295
|
+
async resolveRefToSha(owner, repo, ref) {
|
|
296
|
+
try {
|
|
297
|
+
const { data } = await this.octokit.repos.getCommit({
|
|
298
|
+
owner,
|
|
299
|
+
repo,
|
|
300
|
+
ref
|
|
301
|
+
});
|
|
302
|
+
return data.sha;
|
|
303
|
+
} catch (error) {
|
|
304
|
+
throw this.handleError(error);
|
|
305
|
+
}
|
|
306
|
+
}
|
|
307
|
+
/**
|
|
308
|
+
* Get the latest release from a repository
|
|
309
|
+
*/
|
|
310
|
+
async getLatestRelease(owner, repo) {
|
|
311
|
+
try {
|
|
312
|
+
const { data } = await this.octokit.repos.getLatestRelease({ owner, repo });
|
|
313
|
+
const parsed = GitHubReleaseSchema.safeParse(data);
|
|
314
|
+
if (!parsed.success) {
|
|
315
|
+
throw new GitHubClientError(`Invalid release info response: ${formatError(parsed.error)}`);
|
|
316
|
+
}
|
|
317
|
+
return parsed.data;
|
|
318
|
+
} catch (error) {
|
|
319
|
+
throw this.handleError(error);
|
|
320
|
+
}
|
|
321
|
+
}
|
|
322
|
+
/**
|
|
323
|
+
* Handle errors from Octokit and convert to GitHubClientError
|
|
324
|
+
*/
|
|
325
|
+
handleError(error) {
|
|
326
|
+
if (error instanceof GitHubClientError) {
|
|
327
|
+
return error;
|
|
328
|
+
}
|
|
329
|
+
if (error instanceof RequestError) {
|
|
330
|
+
const responseData = error.response?.data;
|
|
331
|
+
const message = this.extractErrorMessage(responseData, error.message);
|
|
332
|
+
const apiError = message ? { message } : void 0;
|
|
333
|
+
const errorMessage = this.getErrorMessage(error.status, apiError);
|
|
334
|
+
return new GitHubClientError(errorMessage, error.status, apiError);
|
|
335
|
+
}
|
|
336
|
+
if (error instanceof Error) {
|
|
337
|
+
return new GitHubClientError(error.message);
|
|
338
|
+
}
|
|
339
|
+
return new GitHubClientError("Unknown error occurred");
|
|
340
|
+
}
|
|
341
|
+
/**
|
|
342
|
+
* Extract error message from response data
|
|
343
|
+
*/
|
|
344
|
+
extractErrorMessage(data, fallback) {
|
|
345
|
+
if (typeof data === "object" && data !== null && "message" in data) {
|
|
346
|
+
const record = data;
|
|
347
|
+
const msg = record["message"];
|
|
348
|
+
if (typeof msg === "string") {
|
|
349
|
+
return msg;
|
|
350
|
+
}
|
|
351
|
+
}
|
|
352
|
+
return fallback;
|
|
353
|
+
}
|
|
354
|
+
/**
|
|
355
|
+
* Get human-readable error message for HTTP status codes
|
|
356
|
+
*/
|
|
357
|
+
getErrorMessage(statusCode, apiError) {
|
|
358
|
+
const baseMessage = apiError?.message ?? `HTTP ${statusCode}`;
|
|
359
|
+
switch (statusCode) {
|
|
360
|
+
case 401:
|
|
361
|
+
return `Authentication failed: ${baseMessage}. Check your GitHub token.`;
|
|
362
|
+
case 403:
|
|
363
|
+
if (baseMessage.toLowerCase().includes("rate limit")) {
|
|
364
|
+
return `GitHub API rate limit exceeded. ${this.hasToken ? "Try again later." : "Consider using a GitHub token."}`;
|
|
365
|
+
}
|
|
366
|
+
return `Access forbidden: ${baseMessage}. Check repository permissions.`;
|
|
367
|
+
case 404:
|
|
368
|
+
return `Not found: ${baseMessage}`;
|
|
369
|
+
case 422:
|
|
370
|
+
return `Invalid request: ${baseMessage}`;
|
|
371
|
+
default:
|
|
372
|
+
return `GitHub API error: ${baseMessage}`;
|
|
373
|
+
}
|
|
374
|
+
}
|
|
375
|
+
};
|
|
376
|
+
|
|
377
|
+
// src/lib/github-utils.ts
|
|
378
|
+
var MAX_RECURSION_DEPTH = 15;
|
|
379
|
+
async function withSemaphore(semaphore, fn) {
|
|
380
|
+
await semaphore.acquire();
|
|
381
|
+
try {
|
|
382
|
+
return await fn();
|
|
383
|
+
} finally {
|
|
384
|
+
semaphore.release();
|
|
385
|
+
}
|
|
386
|
+
}
|
|
387
|
+
async function listDirectoryRecursive(params) {
|
|
388
|
+
const { client, owner, repo, path: path2, ref, depth = 0, semaphore } = params;
|
|
389
|
+
if (depth > MAX_RECURSION_DEPTH) {
|
|
390
|
+
throw new Error(
|
|
391
|
+
`Maximum recursion depth (${MAX_RECURSION_DEPTH}) exceeded while listing directory: ${path2}`
|
|
392
|
+
);
|
|
393
|
+
}
|
|
394
|
+
const entries = await withSemaphore(
|
|
395
|
+
semaphore,
|
|
396
|
+
() => client.listDirectory(owner, repo, path2, ref)
|
|
397
|
+
);
|
|
398
|
+
const files = [];
|
|
399
|
+
const directories = [];
|
|
400
|
+
for (const entry of entries) {
|
|
401
|
+
if (entry.type === "file") {
|
|
402
|
+
files.push(entry);
|
|
403
|
+
} else if (entry.type === "dir") {
|
|
404
|
+
directories.push(entry);
|
|
405
|
+
}
|
|
406
|
+
}
|
|
407
|
+
const subResults = await Promise.all(
|
|
408
|
+
directories.map(
|
|
409
|
+
(dir) => listDirectoryRecursive({
|
|
410
|
+
client,
|
|
411
|
+
owner,
|
|
412
|
+
repo,
|
|
413
|
+
path: dir.path,
|
|
414
|
+
ref,
|
|
415
|
+
depth: depth + 1,
|
|
416
|
+
semaphore
|
|
417
|
+
})
|
|
418
|
+
)
|
|
419
|
+
);
|
|
420
|
+
return [...files, ...subResults.flat()];
|
|
421
|
+
}
|
|
422
|
+
|
|
423
|
+
// src/types/git-provider.ts
|
|
424
|
+
import { z as z3 } from "zod/mini";
|
|
425
|
+
var ALL_GIT_PROVIDERS = ["github", "gitlab"];
|
|
426
|
+
var GitProviderSchema = z3.enum(ALL_GIT_PROVIDERS);
|
|
427
|
+
|
|
428
|
+
// src/lib/source-parser.ts
|
|
429
|
+
var GITHUB_HOSTS = /* @__PURE__ */ new Set(["github.com", "www.github.com"]);
|
|
430
|
+
var GITLAB_HOSTS = /* @__PURE__ */ new Set(["gitlab.com", "www.gitlab.com"]);
|
|
431
|
+
function parseSource(source) {
|
|
432
|
+
if (source.startsWith("http://") || source.startsWith("https://")) {
|
|
433
|
+
return parseUrl(source);
|
|
434
|
+
}
|
|
435
|
+
if (source.includes(":") && !source.includes("://")) {
|
|
436
|
+
const colonIndex = source.indexOf(":");
|
|
437
|
+
const prefix = source.substring(0, colonIndex);
|
|
438
|
+
const rest = source.substring(colonIndex + 1);
|
|
439
|
+
const provider = ALL_GIT_PROVIDERS.find((p) => p === prefix);
|
|
440
|
+
if (provider) {
|
|
441
|
+
return { provider, ...parseShorthand(rest) };
|
|
442
|
+
}
|
|
443
|
+
return { provider: "github", ...parseShorthand(source) };
|
|
444
|
+
}
|
|
445
|
+
return { provider: "github", ...parseShorthand(source) };
|
|
446
|
+
}
|
|
447
|
+
function parseUrl(url) {
|
|
448
|
+
const urlObj = new URL(url);
|
|
449
|
+
const host = urlObj.hostname.toLowerCase();
|
|
450
|
+
let provider;
|
|
451
|
+
if (GITHUB_HOSTS.has(host)) {
|
|
452
|
+
provider = "github";
|
|
453
|
+
} else if (GITLAB_HOSTS.has(host)) {
|
|
454
|
+
provider = "gitlab";
|
|
455
|
+
} else {
|
|
456
|
+
throw new Error(
|
|
457
|
+
`Unknown Git provider for host: ${host}. Supported providers: ${ALL_GIT_PROVIDERS.join(", ")}`
|
|
458
|
+
);
|
|
459
|
+
}
|
|
460
|
+
const segments = urlObj.pathname.split("/").filter(Boolean);
|
|
461
|
+
if (segments.length < 2) {
|
|
462
|
+
throw new Error(`Invalid ${provider} URL: ${url}. Expected format: https://${host}/owner/repo`);
|
|
463
|
+
}
|
|
464
|
+
const owner = segments[0];
|
|
465
|
+
const repo = segments[1]?.replace(/\.git$/, "");
|
|
466
|
+
if (segments.length > 2 && (segments[2] === "tree" || segments[2] === "blob")) {
|
|
467
|
+
const ref = segments[3];
|
|
468
|
+
const path2 = segments.length > 4 ? segments.slice(4).join("/") : void 0;
|
|
469
|
+
return {
|
|
470
|
+
provider,
|
|
471
|
+
owner: owner ?? "",
|
|
472
|
+
repo: repo ?? "",
|
|
473
|
+
ref,
|
|
474
|
+
path: path2
|
|
475
|
+
};
|
|
476
|
+
}
|
|
477
|
+
return {
|
|
478
|
+
provider,
|
|
479
|
+
owner: owner ?? "",
|
|
480
|
+
repo: repo ?? ""
|
|
481
|
+
};
|
|
482
|
+
}
|
|
483
|
+
function parseShorthand(source) {
|
|
484
|
+
let remaining = source;
|
|
485
|
+
let path2;
|
|
486
|
+
let ref;
|
|
487
|
+
const colonIndex = remaining.indexOf(":");
|
|
488
|
+
if (colonIndex !== -1) {
|
|
489
|
+
path2 = remaining.substring(colonIndex + 1);
|
|
490
|
+
if (!path2) {
|
|
491
|
+
throw new Error(`Invalid source: ${source}. Path cannot be empty after ":".`);
|
|
492
|
+
}
|
|
493
|
+
remaining = remaining.substring(0, colonIndex);
|
|
494
|
+
}
|
|
495
|
+
const atIndex = remaining.indexOf("@");
|
|
496
|
+
if (atIndex !== -1) {
|
|
497
|
+
ref = remaining.substring(atIndex + 1);
|
|
498
|
+
if (!ref) {
|
|
499
|
+
throw new Error(`Invalid source: ${source}. Ref cannot be empty after "@".`);
|
|
500
|
+
}
|
|
501
|
+
remaining = remaining.substring(0, atIndex);
|
|
502
|
+
}
|
|
503
|
+
const slashIndex = remaining.indexOf("/");
|
|
504
|
+
if (slashIndex === -1) {
|
|
505
|
+
throw new Error(
|
|
506
|
+
`Invalid source: ${source}. Expected format: owner/repo, owner/repo@ref, or owner/repo:path`
|
|
507
|
+
);
|
|
508
|
+
}
|
|
509
|
+
const owner = remaining.substring(0, slashIndex);
|
|
510
|
+
const repo = remaining.substring(slashIndex + 1);
|
|
511
|
+
if (!owner || !repo) {
|
|
512
|
+
throw new Error(`Invalid source: ${source}. Both owner and repo are required.`);
|
|
513
|
+
}
|
|
514
|
+
return {
|
|
515
|
+
owner,
|
|
516
|
+
repo,
|
|
517
|
+
ref,
|
|
518
|
+
path: path2
|
|
519
|
+
};
|
|
520
|
+
}
|
|
521
|
+
|
|
522
|
+
// src/lib/fetch.ts
|
|
523
|
+
var FEATURE_PATHS = {
|
|
524
|
+
rules: ["rules"],
|
|
525
|
+
commands: ["commands"],
|
|
526
|
+
subagents: ["subagents"],
|
|
527
|
+
skills: ["skills"],
|
|
528
|
+
ignore: [RULESYNC_AIIGNORE_FILE_NAME],
|
|
529
|
+
mcp: [RULESYNC_MCP_FILE_NAME],
|
|
530
|
+
hooks: [RULESYNC_HOOKS_FILE_NAME]
|
|
531
|
+
};
|
|
532
|
+
function isToolTarget(target) {
|
|
533
|
+
return target !== "rulesync";
|
|
534
|
+
}
|
|
535
|
+
function validateFileSize(relativePath, size) {
|
|
536
|
+
if (size > MAX_FILE_SIZE) {
|
|
537
|
+
throw new GitHubClientError(
|
|
538
|
+
`File "${relativePath}" exceeds maximum size limit (${(size / 1024 / 1024).toFixed(2)}MB > ${MAX_FILE_SIZE / 1024 / 1024}MB)`
|
|
539
|
+
);
|
|
540
|
+
}
|
|
541
|
+
}
|
|
542
|
+
async function processFeatureConversion(params) {
|
|
543
|
+
const { processor, outputDir } = params;
|
|
544
|
+
const paths = [];
|
|
545
|
+
const toolFiles = await processor.loadToolFiles();
|
|
546
|
+
if (toolFiles.length === 0) {
|
|
547
|
+
return { paths: [] };
|
|
548
|
+
}
|
|
549
|
+
const rulesyncFiles = await processor.convertToolFilesToRulesyncFiles(toolFiles);
|
|
550
|
+
for (const file of rulesyncFiles) {
|
|
551
|
+
const relativePath = join(file.getRelativeDirPath(), file.getRelativeFilePath());
|
|
552
|
+
const outputPath = join(outputDir, relativePath);
|
|
553
|
+
await writeFileContent(outputPath, file.getFileContent());
|
|
554
|
+
paths.push(relativePath);
|
|
555
|
+
}
|
|
556
|
+
return { paths };
|
|
557
|
+
}
|
|
558
|
+
async function convertFetchedFilesToRulesync(params) {
|
|
559
|
+
const { tempDir, outputDir, target, features } = params;
|
|
560
|
+
const convertedPaths = [];
|
|
561
|
+
const featureConfigs = [
|
|
562
|
+
{
|
|
563
|
+
feature: "rules",
|
|
564
|
+
getTargets: () => RulesProcessor.getToolTargets({ global: false }),
|
|
565
|
+
createProcessor: () => new RulesProcessor({ baseDir: tempDir, toolTarget: target, global: false })
|
|
566
|
+
},
|
|
567
|
+
{
|
|
568
|
+
feature: "commands",
|
|
569
|
+
getTargets: () => CommandsProcessor.getToolTargets({ global: false, includeSimulated: false }),
|
|
570
|
+
createProcessor: () => new CommandsProcessor({ baseDir: tempDir, toolTarget: target, global: false })
|
|
571
|
+
},
|
|
572
|
+
{
|
|
573
|
+
feature: "subagents",
|
|
574
|
+
getTargets: () => SubagentsProcessor.getToolTargets({ global: false, includeSimulated: false }),
|
|
575
|
+
createProcessor: () => new SubagentsProcessor({ baseDir: tempDir, toolTarget: target, global: false })
|
|
576
|
+
},
|
|
577
|
+
{
|
|
578
|
+
feature: "ignore",
|
|
579
|
+
getTargets: () => IgnoreProcessor.getToolTargets(),
|
|
580
|
+
createProcessor: () => new IgnoreProcessor({ baseDir: tempDir, toolTarget: target })
|
|
581
|
+
},
|
|
582
|
+
{
|
|
583
|
+
feature: "mcp",
|
|
584
|
+
getTargets: () => McpProcessor.getToolTargets({ global: false }),
|
|
585
|
+
createProcessor: () => new McpProcessor({ baseDir: tempDir, toolTarget: target, global: false })
|
|
586
|
+
},
|
|
587
|
+
{
|
|
588
|
+
feature: "hooks",
|
|
589
|
+
getTargets: () => HooksProcessor.getToolTargets({ global: false }),
|
|
590
|
+
createProcessor: () => new HooksProcessor({ baseDir: tempDir, toolTarget: target, global: false })
|
|
591
|
+
}
|
|
592
|
+
];
|
|
593
|
+
for (const config of featureConfigs) {
|
|
594
|
+
if (!features.includes(config.feature)) {
|
|
595
|
+
continue;
|
|
596
|
+
}
|
|
597
|
+
const supportedTargets = config.getTargets();
|
|
598
|
+
if (!supportedTargets.includes(target)) {
|
|
599
|
+
continue;
|
|
600
|
+
}
|
|
601
|
+
const processor = config.createProcessor();
|
|
602
|
+
const result = await processFeatureConversion({ processor, outputDir });
|
|
603
|
+
convertedPaths.push(...result.paths);
|
|
604
|
+
}
|
|
605
|
+
if (features.includes("skills")) {
|
|
606
|
+
logger.debug(
|
|
607
|
+
"Skills conversion is not yet supported in fetch command. Use import command instead."
|
|
608
|
+
);
|
|
609
|
+
}
|
|
610
|
+
return { converted: convertedPaths.length, convertedPaths };
|
|
611
|
+
}
|
|
612
|
+
function resolveFeatures(features) {
|
|
613
|
+
if (!features || features.length === 0 || features.includes("*")) {
|
|
614
|
+
return [...ALL_FEATURES];
|
|
615
|
+
}
|
|
616
|
+
return features.filter((f) => ALL_FEATURES.includes(f));
|
|
617
|
+
}
|
|
618
|
+
function hasStatusCode(error) {
|
|
619
|
+
if (typeof error !== "object" || error === null || !("statusCode" in error)) {
|
|
620
|
+
return false;
|
|
621
|
+
}
|
|
622
|
+
const maybeStatus = Object.getOwnPropertyDescriptor(error, "statusCode")?.value;
|
|
623
|
+
return typeof maybeStatus === "number";
|
|
624
|
+
}
|
|
625
|
+
function isNotFoundError(error) {
|
|
626
|
+
if (error instanceof GitHubClientError && error.statusCode === 404) {
|
|
627
|
+
return true;
|
|
628
|
+
}
|
|
629
|
+
if (hasStatusCode(error) && error.statusCode === 404) {
|
|
630
|
+
return true;
|
|
631
|
+
}
|
|
632
|
+
return false;
|
|
633
|
+
}
|
|
634
|
+
async function fetchFiles(params) {
|
|
635
|
+
const { source, options = {}, baseDir = process.cwd() } = params;
|
|
636
|
+
const parsed = parseSource(source);
|
|
637
|
+
if (parsed.provider === "gitlab") {
|
|
638
|
+
throw new Error(
|
|
639
|
+
"GitLab is not yet supported. Currently only GitHub repositories are supported."
|
|
640
|
+
);
|
|
641
|
+
}
|
|
642
|
+
const resolvedRef = options.ref ?? parsed.ref;
|
|
643
|
+
const resolvedPath = options.path ?? parsed.path ?? ".";
|
|
644
|
+
const outputDir = options.output ?? RULESYNC_RELATIVE_DIR_PATH;
|
|
645
|
+
const conflictStrategy = options.conflict ?? "overwrite";
|
|
646
|
+
const enabledFeatures = resolveFeatures(options.features);
|
|
647
|
+
const target = options.target ?? "rulesync";
|
|
648
|
+
checkPathTraversal({
|
|
649
|
+
relativePath: outputDir,
|
|
650
|
+
intendedRootDir: baseDir
|
|
651
|
+
});
|
|
652
|
+
const token = GitHubClient.resolveToken(options.token);
|
|
653
|
+
const client = new GitHubClient({ token });
|
|
654
|
+
logger.debug(`Validating repository: ${parsed.owner}/${parsed.repo}`);
|
|
655
|
+
const isValid = await client.validateRepository(parsed.owner, parsed.repo);
|
|
656
|
+
if (!isValid) {
|
|
657
|
+
throw new GitHubClientError(
|
|
658
|
+
`Repository not found: ${parsed.owner}/${parsed.repo}. Check the repository name and your access permissions.`,
|
|
659
|
+
404
|
|
660
|
+
);
|
|
661
|
+
}
|
|
662
|
+
const ref = resolvedRef ?? await client.getDefaultBranch(parsed.owner, parsed.repo);
|
|
663
|
+
logger.debug(`Using ref: ${ref}`);
|
|
664
|
+
if (isToolTarget(target)) {
|
|
665
|
+
return fetchAndConvertToolFiles({
|
|
666
|
+
client,
|
|
667
|
+
parsed,
|
|
668
|
+
ref,
|
|
669
|
+
resolvedPath,
|
|
670
|
+
enabledFeatures,
|
|
671
|
+
target,
|
|
672
|
+
outputDir,
|
|
673
|
+
baseDir,
|
|
674
|
+
conflictStrategy
|
|
675
|
+
});
|
|
676
|
+
}
|
|
677
|
+
const semaphore = new Semaphore(FETCH_CONCURRENCY_LIMIT);
|
|
678
|
+
const filesToFetch = await collectFeatureFiles({
|
|
679
|
+
client,
|
|
680
|
+
owner: parsed.owner,
|
|
681
|
+
repo: parsed.repo,
|
|
682
|
+
basePath: resolvedPath,
|
|
683
|
+
ref,
|
|
684
|
+
enabledFeatures,
|
|
685
|
+
semaphore
|
|
686
|
+
});
|
|
687
|
+
if (filesToFetch.length === 0) {
|
|
688
|
+
logger.warn(`No files found matching enabled features: ${enabledFeatures.join(", ")}`);
|
|
689
|
+
return {
|
|
690
|
+
source: `${parsed.owner}/${parsed.repo}`,
|
|
691
|
+
ref,
|
|
692
|
+
files: [],
|
|
693
|
+
created: 0,
|
|
694
|
+
overwritten: 0,
|
|
695
|
+
skipped: 0
|
|
696
|
+
};
|
|
697
|
+
}
|
|
698
|
+
const outputBasePath = join(baseDir, outputDir);
|
|
699
|
+
for (const { relativePath, size } of filesToFetch) {
|
|
700
|
+
checkPathTraversal({
|
|
701
|
+
relativePath,
|
|
702
|
+
intendedRootDir: outputBasePath
|
|
703
|
+
});
|
|
704
|
+
validateFileSize(relativePath, size);
|
|
705
|
+
}
|
|
706
|
+
const results = await Promise.all(
|
|
707
|
+
filesToFetch.map(async ({ remotePath, relativePath }) => {
|
|
708
|
+
const localPath = join(outputBasePath, relativePath);
|
|
709
|
+
const exists = await fileExists(localPath);
|
|
710
|
+
if (exists && conflictStrategy === "skip") {
|
|
711
|
+
logger.debug(`Skipping existing file: ${relativePath}`);
|
|
712
|
+
return { relativePath, status: "skipped" };
|
|
713
|
+
}
|
|
714
|
+
const content = await withSemaphore(
|
|
715
|
+
semaphore,
|
|
716
|
+
() => client.getFileContent(parsed.owner, parsed.repo, remotePath, ref)
|
|
717
|
+
);
|
|
718
|
+
await writeFileContent(localPath, content);
|
|
719
|
+
const status = exists ? "overwritten" : "created";
|
|
720
|
+
logger.debug(`Wrote: ${relativePath} (${status})`);
|
|
721
|
+
return { relativePath, status };
|
|
722
|
+
})
|
|
723
|
+
);
|
|
724
|
+
const summary = {
|
|
725
|
+
source: `${parsed.owner}/${parsed.repo}`,
|
|
726
|
+
ref,
|
|
727
|
+
files: results,
|
|
728
|
+
created: results.filter((r) => r.status === "created").length,
|
|
729
|
+
overwritten: results.filter((r) => r.status === "overwritten").length,
|
|
730
|
+
skipped: results.filter((r) => r.status === "skipped").length
|
|
731
|
+
};
|
|
732
|
+
return summary;
|
|
733
|
+
}
|
|
734
|
+
async function collectFeatureFiles(params) {
|
|
735
|
+
const { client, owner, repo, basePath, ref, enabledFeatures, semaphore } = params;
|
|
736
|
+
const dirCache = /* @__PURE__ */ new Map();
|
|
737
|
+
async function getCachedDirectory(path2) {
|
|
738
|
+
let promise = dirCache.get(path2);
|
|
739
|
+
if (promise === void 0) {
|
|
740
|
+
promise = withSemaphore(semaphore, () => client.listDirectory(owner, repo, path2, ref));
|
|
741
|
+
dirCache.set(path2, promise);
|
|
742
|
+
}
|
|
743
|
+
return promise;
|
|
744
|
+
}
|
|
745
|
+
const tasks = enabledFeatures.flatMap(
|
|
746
|
+
(feature) => FEATURE_PATHS[feature].map((featurePath) => ({ feature, featurePath }))
|
|
747
|
+
);
|
|
748
|
+
const results = await Promise.all(
|
|
749
|
+
tasks.map(async ({ featurePath }) => {
|
|
750
|
+
const fullPath = basePath === "." || basePath === "" ? featurePath : join(basePath, featurePath);
|
|
751
|
+
const collected = [];
|
|
752
|
+
try {
|
|
753
|
+
if (featurePath.includes(".")) {
|
|
754
|
+
try {
|
|
755
|
+
const entries = await getCachedDirectory(
|
|
756
|
+
basePath === "." || basePath === "" ? "." : basePath
|
|
757
|
+
);
|
|
758
|
+
const fileEntry = entries.find((e) => e.name === featurePath && e.type === "file");
|
|
759
|
+
if (fileEntry) {
|
|
760
|
+
collected.push({
|
|
761
|
+
remotePath: fileEntry.path,
|
|
762
|
+
relativePath: featurePath,
|
|
763
|
+
size: fileEntry.size
|
|
764
|
+
});
|
|
765
|
+
}
|
|
766
|
+
} catch (error) {
|
|
767
|
+
if (isNotFoundError(error)) {
|
|
768
|
+
logger.debug(`File not found: ${fullPath}`);
|
|
769
|
+
} else {
|
|
770
|
+
throw error;
|
|
771
|
+
}
|
|
772
|
+
}
|
|
773
|
+
} else {
|
|
774
|
+
const dirFiles = await listDirectoryRecursive({
|
|
775
|
+
client,
|
|
776
|
+
owner,
|
|
777
|
+
repo,
|
|
778
|
+
path: fullPath,
|
|
779
|
+
ref,
|
|
780
|
+
semaphore
|
|
781
|
+
});
|
|
782
|
+
for (const file of dirFiles) {
|
|
783
|
+
const relativePath = basePath === "." || basePath === "" ? file.path : file.path.substring(basePath.length + 1);
|
|
784
|
+
collected.push({
|
|
785
|
+
remotePath: file.path,
|
|
786
|
+
relativePath,
|
|
787
|
+
size: file.size
|
|
788
|
+
});
|
|
789
|
+
}
|
|
790
|
+
}
|
|
791
|
+
} catch (error) {
|
|
792
|
+
if (isNotFoundError(error)) {
|
|
793
|
+
logger.debug(`Feature not found: ${fullPath}`);
|
|
794
|
+
return collected;
|
|
795
|
+
}
|
|
796
|
+
throw error;
|
|
797
|
+
}
|
|
798
|
+
return collected;
|
|
799
|
+
})
|
|
800
|
+
);
|
|
801
|
+
return results.flat();
|
|
802
|
+
}
|
|
803
|
+
async function fetchAndConvertToolFiles(params) {
|
|
804
|
+
const {
|
|
805
|
+
client,
|
|
806
|
+
parsed,
|
|
807
|
+
ref,
|
|
808
|
+
resolvedPath,
|
|
809
|
+
enabledFeatures,
|
|
810
|
+
target,
|
|
811
|
+
outputDir,
|
|
812
|
+
baseDir,
|
|
813
|
+
conflictStrategy: _conflictStrategy
|
|
814
|
+
} = params;
|
|
815
|
+
const tempDir = await createTempDirectory();
|
|
816
|
+
logger.debug(`Created temp directory: ${tempDir}`);
|
|
817
|
+
const semaphore = new Semaphore(FETCH_CONCURRENCY_LIMIT);
|
|
818
|
+
try {
|
|
819
|
+
const filesToFetch = await collectFeatureFiles({
|
|
820
|
+
client,
|
|
821
|
+
owner: parsed.owner,
|
|
822
|
+
repo: parsed.repo,
|
|
823
|
+
basePath: resolvedPath,
|
|
824
|
+
ref,
|
|
825
|
+
enabledFeatures,
|
|
826
|
+
semaphore
|
|
827
|
+
});
|
|
828
|
+
if (filesToFetch.length === 0) {
|
|
829
|
+
logger.warn(`No files found matching enabled features: ${enabledFeatures.join(", ")}`);
|
|
830
|
+
return {
|
|
831
|
+
source: `${parsed.owner}/${parsed.repo}`,
|
|
832
|
+
ref,
|
|
833
|
+
files: [],
|
|
834
|
+
created: 0,
|
|
835
|
+
overwritten: 0,
|
|
836
|
+
skipped: 0
|
|
837
|
+
};
|
|
838
|
+
}
|
|
839
|
+
for (const { relativePath, size } of filesToFetch) {
|
|
840
|
+
validateFileSize(relativePath, size);
|
|
841
|
+
}
|
|
842
|
+
const toolPaths = getToolPathMapping(target);
|
|
843
|
+
await Promise.all(
|
|
844
|
+
filesToFetch.map(async ({ remotePath, relativePath }) => {
|
|
845
|
+
const toolRelativePath = mapToToolPath(relativePath, toolPaths);
|
|
846
|
+
checkPathTraversal({
|
|
847
|
+
relativePath: toolRelativePath,
|
|
848
|
+
intendedRootDir: tempDir
|
|
849
|
+
});
|
|
850
|
+
const localPath = join(tempDir, toolRelativePath);
|
|
851
|
+
const content = await withSemaphore(
|
|
852
|
+
semaphore,
|
|
853
|
+
() => client.getFileContent(parsed.owner, parsed.repo, remotePath, ref)
|
|
854
|
+
);
|
|
855
|
+
await writeFileContent(localPath, content);
|
|
856
|
+
logger.debug(`Fetched to temp: ${toolRelativePath}`);
|
|
857
|
+
})
|
|
858
|
+
);
|
|
859
|
+
const outputBasePath = join(baseDir, outputDir);
|
|
860
|
+
const { converted, convertedPaths } = await convertFetchedFilesToRulesync({
|
|
861
|
+
tempDir,
|
|
862
|
+
outputDir: outputBasePath,
|
|
863
|
+
target,
|
|
864
|
+
features: enabledFeatures
|
|
865
|
+
});
|
|
866
|
+
const results = convertedPaths.map((relativePath) => ({
|
|
867
|
+
relativePath,
|
|
868
|
+
status: "created"
|
|
869
|
+
}));
|
|
870
|
+
logger.debug(`Converted ${converted} files from ${target} format to rulesync format`);
|
|
871
|
+
return {
|
|
872
|
+
source: `${parsed.owner}/${parsed.repo}`,
|
|
873
|
+
ref,
|
|
874
|
+
files: results,
|
|
875
|
+
created: results.filter((r) => r.status === "created").length,
|
|
876
|
+
overwritten: results.filter((r) => r.status === "overwritten").length,
|
|
877
|
+
skipped: results.filter((r) => r.status === "skipped").length
|
|
878
|
+
};
|
|
879
|
+
} finally {
|
|
880
|
+
await removeTempDirectory(tempDir);
|
|
881
|
+
}
|
|
882
|
+
}
|
|
883
|
+
function getToolPathMapping(target) {
|
|
884
|
+
const mapping = {};
|
|
885
|
+
const supportedRulesTargets = RulesProcessor.getToolTargets({ global: false });
|
|
886
|
+
if (supportedRulesTargets.includes(target)) {
|
|
887
|
+
const factory = RulesProcessor.getFactory(target);
|
|
888
|
+
if (factory) {
|
|
889
|
+
const paths = factory.class.getSettablePaths({ global: false });
|
|
890
|
+
mapping.rules = {
|
|
891
|
+
root: paths.root?.relativeFilePath,
|
|
892
|
+
nonRoot: paths.nonRoot?.relativeDirPath
|
|
893
|
+
};
|
|
894
|
+
}
|
|
895
|
+
}
|
|
896
|
+
const supportedCommandsTargets = CommandsProcessor.getToolTargets({
|
|
897
|
+
global: false,
|
|
898
|
+
includeSimulated: false
|
|
899
|
+
});
|
|
900
|
+
if (supportedCommandsTargets.includes(target)) {
|
|
901
|
+
const factory = CommandsProcessor.getFactory(target);
|
|
902
|
+
if (factory) {
|
|
903
|
+
const paths = factory.class.getSettablePaths({ global: false });
|
|
904
|
+
mapping.commands = paths.relativeDirPath;
|
|
905
|
+
}
|
|
906
|
+
}
|
|
907
|
+
const supportedSubagentsTargets = SubagentsProcessor.getToolTargets({
|
|
908
|
+
global: false,
|
|
909
|
+
includeSimulated: false
|
|
910
|
+
});
|
|
911
|
+
if (supportedSubagentsTargets.includes(target)) {
|
|
912
|
+
const factory = SubagentsProcessor.getFactory(target);
|
|
913
|
+
if (factory) {
|
|
914
|
+
const paths = factory.class.getSettablePaths({ global: false });
|
|
915
|
+
mapping.subagents = paths.relativeDirPath;
|
|
916
|
+
}
|
|
917
|
+
}
|
|
918
|
+
const supportedSkillsTargets = SkillsProcessor.getToolTargets({ global: false });
|
|
919
|
+
if (supportedSkillsTargets.includes(target)) {
|
|
920
|
+
const factory = SkillsProcessor.getFactory(target);
|
|
921
|
+
if (factory) {
|
|
922
|
+
const paths = factory.class.getSettablePaths({ global: false });
|
|
923
|
+
mapping.skills = paths.relativeDirPath;
|
|
924
|
+
}
|
|
925
|
+
}
|
|
926
|
+
return mapping;
|
|
927
|
+
}
|
|
928
|
+
function mapToToolPath(relativePath, toolPaths) {
|
|
929
|
+
if (relativePath.startsWith("rules/")) {
|
|
930
|
+
const restPath = relativePath.substring("rules/".length);
|
|
931
|
+
if (toolPaths.rules?.nonRoot) {
|
|
932
|
+
return join(toolPaths.rules.nonRoot, restPath);
|
|
933
|
+
}
|
|
934
|
+
}
|
|
935
|
+
if (toolPaths.rules?.root && relativePath === toolPaths.rules.root) {
|
|
936
|
+
return relativePath;
|
|
937
|
+
}
|
|
938
|
+
if (relativePath.startsWith("commands/")) {
|
|
939
|
+
const restPath = relativePath.substring("commands/".length);
|
|
940
|
+
if (toolPaths.commands) {
|
|
941
|
+
return join(toolPaths.commands, restPath);
|
|
942
|
+
}
|
|
943
|
+
}
|
|
944
|
+
if (relativePath.startsWith("subagents/")) {
|
|
945
|
+
const restPath = relativePath.substring("subagents/".length);
|
|
946
|
+
if (toolPaths.subagents) {
|
|
947
|
+
return join(toolPaths.subagents, restPath);
|
|
948
|
+
}
|
|
949
|
+
}
|
|
950
|
+
if (relativePath.startsWith("skills/")) {
|
|
951
|
+
const restPath = relativePath.substring("skills/".length);
|
|
952
|
+
if (toolPaths.skills) {
|
|
953
|
+
return join(toolPaths.skills, restPath);
|
|
954
|
+
}
|
|
955
|
+
}
|
|
956
|
+
return relativePath;
|
|
957
|
+
}
|
|
958
|
+
function formatFetchSummary(summary) {
|
|
959
|
+
const lines = [];
|
|
960
|
+
lines.push(`Fetched from ${summary.source}@${summary.ref}:`);
|
|
961
|
+
for (const file of summary.files) {
|
|
962
|
+
const icon = file.status === "skipped" ? "-" : "\u2713";
|
|
963
|
+
const statusText = file.status === "created" ? "(created)" : file.status === "overwritten" ? "(overwritten)" : "(skipped - already exists)";
|
|
964
|
+
lines.push(` ${icon} ${file.relativePath} ${statusText}`);
|
|
965
|
+
}
|
|
966
|
+
const parts = [];
|
|
967
|
+
if (summary.created > 0) parts.push(`${summary.created} created`);
|
|
968
|
+
if (summary.overwritten > 0) parts.push(`${summary.overwritten} overwritten`);
|
|
969
|
+
if (summary.skipped > 0) parts.push(`${summary.skipped} skipped`);
|
|
970
|
+
lines.push("");
|
|
971
|
+
const summaryText = parts.length > 0 ? parts.join(", ") : "no files";
|
|
972
|
+
lines.push(`Summary: ${summaryText}`);
|
|
973
|
+
return lines.join("\n");
|
|
974
|
+
}
|
|
975
|
+
|
|
976
|
+
// src/cli/commands/fetch.ts
|
|
977
|
+
async function fetchCommand(options) {
|
|
978
|
+
const { source, ...fetchOptions } = options;
|
|
979
|
+
logger.configure({
|
|
980
|
+
verbose: fetchOptions.verbose ?? false,
|
|
981
|
+
silent: fetchOptions.silent ?? false
|
|
982
|
+
});
|
|
983
|
+
logger.debug(`Fetching files from ${source}...`);
|
|
984
|
+
try {
|
|
985
|
+
const summary = await fetchFiles({
|
|
986
|
+
source,
|
|
987
|
+
options: fetchOptions
|
|
988
|
+
});
|
|
989
|
+
const output = formatFetchSummary(summary);
|
|
990
|
+
logger.success(output);
|
|
991
|
+
if (summary.created + summary.overwritten === 0 && summary.skipped === 0) {
|
|
992
|
+
logger.warn("No files were fetched.");
|
|
993
|
+
}
|
|
994
|
+
} catch (error) {
|
|
995
|
+
if (error instanceof GitHubClientError) {
|
|
996
|
+
logGitHubAuthHints(error);
|
|
997
|
+
} else {
|
|
998
|
+
logger.error(formatError(error));
|
|
999
|
+
}
|
|
1000
|
+
process.exit(1);
|
|
1001
|
+
}
|
|
1002
|
+
}
|
|
1003
|
+
|
|
1004
|
+
// src/utils/result.ts
|
|
1005
|
+
function calculateTotalCount(result) {
|
|
1006
|
+
return result.rulesCount + result.ignoreCount + result.mcpCount + result.commandsCount + result.subagentsCount + result.skillsCount + result.hooksCount;
|
|
1007
|
+
}
|
|
1008
|
+
|
|
1009
|
+
// src/cli/commands/generate.ts
|
|
1010
|
+
function logFeatureResult(params) {
|
|
1011
|
+
const { count, paths, featureName, isPreview, modePrefix } = params;
|
|
1012
|
+
if (count > 0) {
|
|
1013
|
+
if (isPreview) {
|
|
1014
|
+
logger.info(`${modePrefix} Would write ${count} ${featureName}`);
|
|
1015
|
+
} else {
|
|
1016
|
+
logger.success(`Written ${count} ${featureName}`);
|
|
1017
|
+
}
|
|
1018
|
+
for (const p of paths) {
|
|
1019
|
+
logger.info(` ${p}`);
|
|
1020
|
+
}
|
|
1021
|
+
}
|
|
1022
|
+
}
|
|
1023
|
+
async function generateCommand(options) {
|
|
1024
|
+
const config = await ConfigResolver.resolve(options);
|
|
1025
|
+
logger.configure({
|
|
1026
|
+
verbose: config.getVerbose(),
|
|
1027
|
+
silent: config.getSilent()
|
|
1028
|
+
});
|
|
1029
|
+
const check = config.getCheck();
|
|
1030
|
+
const isPreview = config.isPreviewMode();
|
|
1031
|
+
const modePrefix = isPreview ? "[DRY RUN]" : "";
|
|
1032
|
+
logger.debug("Generating files...");
|
|
1033
|
+
if (!await checkRulesyncDirExists({ baseDir: process.cwd() })) {
|
|
1034
|
+
logger.error("\u274C .rulesync directory not found. Run 'rulesync init' first.");
|
|
1035
|
+
process.exit(1);
|
|
1036
|
+
}
|
|
1037
|
+
logger.debug(`Base directories: ${config.getBaseDirs().join(", ")}`);
|
|
1038
|
+
const features = config.getFeatures();
|
|
1039
|
+
if (features.includes("ignore")) {
|
|
1040
|
+
logger.debug("Generating ignore files...");
|
|
1041
|
+
}
|
|
1042
|
+
if (features.includes("mcp")) {
|
|
1043
|
+
logger.debug("Generating MCP files...");
|
|
1044
|
+
}
|
|
1045
|
+
if (features.includes("commands")) {
|
|
1046
|
+
logger.debug("Generating command files...");
|
|
1047
|
+
}
|
|
1048
|
+
if (features.includes("subagents")) {
|
|
1049
|
+
logger.debug("Generating subagent files...");
|
|
1050
|
+
}
|
|
1051
|
+
if (features.includes("skills")) {
|
|
1052
|
+
logger.debug("Generating skill files...");
|
|
1053
|
+
}
|
|
1054
|
+
if (features.includes("hooks")) {
|
|
1055
|
+
logger.debug("Generating hooks...");
|
|
1056
|
+
}
|
|
1057
|
+
if (features.includes("rules")) {
|
|
1058
|
+
logger.debug("Generating rule files...");
|
|
1059
|
+
}
|
|
1060
|
+
const result = await generate({ config });
|
|
1061
|
+
logFeatureResult({
|
|
1062
|
+
count: result.ignoreCount,
|
|
1063
|
+
paths: result.ignorePaths,
|
|
1064
|
+
featureName: "ignore file(s)",
|
|
1065
|
+
isPreview,
|
|
1066
|
+
modePrefix
|
|
1067
|
+
});
|
|
1068
|
+
logFeatureResult({
|
|
1069
|
+
count: result.mcpCount,
|
|
1070
|
+
paths: result.mcpPaths,
|
|
1071
|
+
featureName: "MCP configuration(s)",
|
|
1072
|
+
isPreview,
|
|
1073
|
+
modePrefix
|
|
1074
|
+
});
|
|
1075
|
+
logFeatureResult({
|
|
1076
|
+
count: result.commandsCount,
|
|
1077
|
+
paths: result.commandsPaths,
|
|
1078
|
+
featureName: "command(s)",
|
|
1079
|
+
isPreview,
|
|
1080
|
+
modePrefix
|
|
1081
|
+
});
|
|
1082
|
+
logFeatureResult({
|
|
1083
|
+
count: result.subagentsCount,
|
|
1084
|
+
paths: result.subagentsPaths,
|
|
1085
|
+
featureName: "subagent(s)",
|
|
1086
|
+
isPreview,
|
|
1087
|
+
modePrefix
|
|
1088
|
+
});
|
|
1089
|
+
logFeatureResult({
|
|
1090
|
+
count: result.skillsCount,
|
|
1091
|
+
paths: result.skillsPaths,
|
|
1092
|
+
featureName: "skill(s)",
|
|
1093
|
+
isPreview,
|
|
1094
|
+
modePrefix
|
|
1095
|
+
});
|
|
1096
|
+
logFeatureResult({
|
|
1097
|
+
count: result.hooksCount,
|
|
1098
|
+
paths: result.hooksPaths,
|
|
1099
|
+
featureName: "hooks file(s)",
|
|
1100
|
+
isPreview,
|
|
1101
|
+
modePrefix
|
|
1102
|
+
});
|
|
1103
|
+
logFeatureResult({
|
|
1104
|
+
count: result.rulesCount,
|
|
1105
|
+
paths: result.rulesPaths,
|
|
1106
|
+
featureName: "rule(s)",
|
|
1107
|
+
isPreview,
|
|
1108
|
+
modePrefix
|
|
1109
|
+
});
|
|
1110
|
+
const totalGenerated = calculateTotalCount(result);
|
|
1111
|
+
if (totalGenerated === 0) {
|
|
1112
|
+
const enabledFeatures = features.join(", ");
|
|
1113
|
+
logger.info(`\u2713 All files are up to date (${enabledFeatures})`);
|
|
1114
|
+
return;
|
|
1115
|
+
}
|
|
1116
|
+
const parts = [];
|
|
1117
|
+
if (result.rulesCount > 0) parts.push(`${result.rulesCount} rules`);
|
|
1118
|
+
if (result.ignoreCount > 0) parts.push(`${result.ignoreCount} ignore files`);
|
|
1119
|
+
if (result.mcpCount > 0) parts.push(`${result.mcpCount} MCP files`);
|
|
1120
|
+
if (result.commandsCount > 0) parts.push(`${result.commandsCount} commands`);
|
|
1121
|
+
if (result.subagentsCount > 0) parts.push(`${result.subagentsCount} subagents`);
|
|
1122
|
+
if (result.skillsCount > 0) parts.push(`${result.skillsCount} skills`);
|
|
1123
|
+
if (result.hooksCount > 0) parts.push(`${result.hooksCount} hooks`);
|
|
1124
|
+
if (isPreview) {
|
|
1125
|
+
logger.info(`${modePrefix} Would write ${totalGenerated} file(s) total (${parts.join(" + ")})`);
|
|
1126
|
+
} else {
|
|
1127
|
+
logger.success(`\u{1F389} All done! Written ${totalGenerated} file(s) total (${parts.join(" + ")})`);
|
|
1128
|
+
}
|
|
1129
|
+
if (check) {
|
|
1130
|
+
if (result.hasDiff) {
|
|
1131
|
+
logger.error("\u274C Files are not up to date. Run 'rulesync generate' to update.");
|
|
1132
|
+
process.exit(1);
|
|
1133
|
+
} else {
|
|
1134
|
+
logger.success("\u2713 All files are up to date.");
|
|
1135
|
+
}
|
|
1136
|
+
}
|
|
1137
|
+
}
|
|
1138
|
+
|
|
1139
|
+
// src/cli/commands/gitignore.ts
|
|
1140
|
+
import { join as join2 } from "path";
|
|
1141
|
+
var RULESYNC_HEADER = "# Generated by Rulesync";
|
|
1142
|
+
var LEGACY_RULESYNC_HEADER = "# Generated by rulesync - AI tool configuration files";
|
|
1143
|
+
var RULESYNC_IGNORE_ENTRIES = [
|
|
1144
|
+
// Rulesync curated (fetched) skills
|
|
1145
|
+
".rulesync/skills/.curated/",
|
|
1146
|
+
// AGENTS.md
|
|
1147
|
+
"**/AGENTS.md",
|
|
1148
|
+
"**/.agents/",
|
|
1149
|
+
// Augment
|
|
1150
|
+
"**/.augmentignore",
|
|
1151
|
+
"**/.augment/rules/",
|
|
1152
|
+
"**/.augment-guidelines",
|
|
1153
|
+
// Claude Code
|
|
1154
|
+
"**/CLAUDE.md",
|
|
1155
|
+
"**/CLAUDE.local.md",
|
|
1156
|
+
"**/.claude/CLAUDE.md",
|
|
1157
|
+
"**/.claude/CLAUDE.local.md",
|
|
1158
|
+
"**/.claude/memories/",
|
|
1159
|
+
"**/.claude/rules/",
|
|
1160
|
+
"**/.claude/commands/",
|
|
1161
|
+
"**/.claude/agents/",
|
|
1162
|
+
"**/.claude/skills/",
|
|
1163
|
+
"**/.claude/settings.local.json",
|
|
1164
|
+
"**/.mcp.json",
|
|
1165
|
+
// Cline
|
|
1166
|
+
"**/.clinerules/",
|
|
1167
|
+
"**/.clinerules/workflows/",
|
|
1168
|
+
"**/.clineignore",
|
|
1169
|
+
"**/.cline/mcp.json",
|
|
1170
|
+
// Codex
|
|
1171
|
+
"**/.codexignore",
|
|
1172
|
+
"**/.codex/memories/",
|
|
1173
|
+
"**/.codex/skills/",
|
|
1174
|
+
"**/.codex/subagents/",
|
|
1175
|
+
// Cursor
|
|
1176
|
+
"**/.cursor/",
|
|
1177
|
+
"**/.cursorignore",
|
|
1178
|
+
// Factory Droid
|
|
1179
|
+
"**/.factory/rules/",
|
|
1180
|
+
"**/.factory/commands/",
|
|
1181
|
+
"**/.factory/droids/",
|
|
1182
|
+
"**/.factory/skills/",
|
|
1183
|
+
"**/.factory/mcp.json",
|
|
1184
|
+
"**/.factory/settings.json",
|
|
1185
|
+
// Gemini
|
|
1186
|
+
"**/GEMINI.md",
|
|
1187
|
+
"**/.gemini/memories/",
|
|
1188
|
+
"**/.gemini/commands/",
|
|
1189
|
+
"**/.gemini/subagents/",
|
|
1190
|
+
"**/.gemini/skills/",
|
|
1191
|
+
"**/.geminiignore",
|
|
1192
|
+
// GitHub Copilot
|
|
1193
|
+
"**/.github/copilot-instructions.md",
|
|
1194
|
+
"**/.github/instructions/",
|
|
1195
|
+
"**/.github/prompts/",
|
|
1196
|
+
"**/.github/agents/",
|
|
1197
|
+
"**/.github/skills/",
|
|
1198
|
+
"**/.vscode/mcp.json",
|
|
1199
|
+
// Junie
|
|
1200
|
+
"**/.junie/guidelines.md",
|
|
1201
|
+
"**/.junie/mcp.json",
|
|
1202
|
+
// Kilo Code
|
|
1203
|
+
"**/.kilocode/rules/",
|
|
1204
|
+
"**/.kilocode/skills/",
|
|
1205
|
+
"**/.kilocode/workflows/",
|
|
1206
|
+
"**/.kilocode/mcp.json",
|
|
1207
|
+
"**/.kilocodeignore",
|
|
1208
|
+
// Kiro
|
|
1209
|
+
"**/.kiro/steering/",
|
|
1210
|
+
"**/.kiro/prompts/",
|
|
1211
|
+
"**/.kiro/skills/",
|
|
1212
|
+
"**/.kiro/agents/",
|
|
1213
|
+
"**/.kiro/settings/mcp.json",
|
|
1214
|
+
"**/.aiignore",
|
|
1215
|
+
// OpenCode
|
|
1216
|
+
"**/.opencode/memories/",
|
|
1217
|
+
"**/.opencode/command/",
|
|
1218
|
+
"**/.opencode/agent/",
|
|
1219
|
+
"**/.opencode/skill/",
|
|
1220
|
+
"**/.opencode/plugins/",
|
|
1221
|
+
// Qwen
|
|
1222
|
+
"**/QWEN.md",
|
|
1223
|
+
"**/.qwen/memories/",
|
|
1224
|
+
// Replit
|
|
1225
|
+
"**/replit.md",
|
|
1226
|
+
// Roo
|
|
1227
|
+
"**/.roo/rules/",
|
|
1228
|
+
"**/.roo/skills/",
|
|
1229
|
+
"**/.rooignore",
|
|
1230
|
+
"**/.roo/mcp.json",
|
|
1231
|
+
"**/.roo/subagents/",
|
|
1232
|
+
// Warp
|
|
1233
|
+
"**/.warp/",
|
|
1234
|
+
"**/WARP.md",
|
|
1235
|
+
// Others
|
|
1236
|
+
".rulesync/rules/*.local.md",
|
|
1237
|
+
"rulesync.local.jsonc",
|
|
1238
|
+
"!.rulesync/.aiignore"
|
|
1239
|
+
];
|
|
1240
|
+
var isRulesyncHeader = (line) => {
|
|
1241
|
+
const trimmed = line.trim();
|
|
1242
|
+
return trimmed === RULESYNC_HEADER || trimmed === LEGACY_RULESYNC_HEADER;
|
|
1243
|
+
};
|
|
1244
|
+
var isRulesyncEntry = (line) => {
|
|
1245
|
+
const trimmed = line.trim();
|
|
1246
|
+
if (trimmed === "" || isRulesyncHeader(line)) {
|
|
1247
|
+
return false;
|
|
1248
|
+
}
|
|
1249
|
+
return RULESYNC_IGNORE_ENTRIES.includes(trimmed);
|
|
1250
|
+
};
|
|
1251
|
+
var removeExistingRulesyncEntries = (content) => {
|
|
1252
|
+
const lines = content.split("\n");
|
|
1253
|
+
const filteredLines = [];
|
|
1254
|
+
let inRulesyncBlock = false;
|
|
1255
|
+
let consecutiveEmptyLines = 0;
|
|
1256
|
+
for (const line of lines) {
|
|
1257
|
+
const trimmed = line.trim();
|
|
1258
|
+
if (isRulesyncHeader(line)) {
|
|
1259
|
+
inRulesyncBlock = true;
|
|
1260
|
+
continue;
|
|
1261
|
+
}
|
|
1262
|
+
if (inRulesyncBlock) {
|
|
1263
|
+
if (trimmed === "") {
|
|
1264
|
+
consecutiveEmptyLines++;
|
|
1265
|
+
if (consecutiveEmptyLines >= 2) {
|
|
1266
|
+
inRulesyncBlock = false;
|
|
1267
|
+
consecutiveEmptyLines = 0;
|
|
1268
|
+
}
|
|
1269
|
+
continue;
|
|
1270
|
+
}
|
|
1271
|
+
if (isRulesyncEntry(line)) {
|
|
1272
|
+
consecutiveEmptyLines = 0;
|
|
1273
|
+
continue;
|
|
1274
|
+
}
|
|
1275
|
+
inRulesyncBlock = false;
|
|
1276
|
+
consecutiveEmptyLines = 0;
|
|
1277
|
+
}
|
|
1278
|
+
if (isRulesyncEntry(line)) {
|
|
1279
|
+
continue;
|
|
1280
|
+
}
|
|
1281
|
+
filteredLines.push(line);
|
|
1282
|
+
}
|
|
1283
|
+
let result = filteredLines.join("\n");
|
|
1284
|
+
while (result.endsWith("\n\n")) {
|
|
1285
|
+
result = result.slice(0, -1);
|
|
1286
|
+
}
|
|
1287
|
+
return result;
|
|
1288
|
+
};
|
|
1289
|
+
var gitignoreCommand = async () => {
|
|
1290
|
+
const gitignorePath = join2(process.cwd(), ".gitignore");
|
|
1291
|
+
let gitignoreContent = "";
|
|
1292
|
+
if (await fileExists(gitignorePath)) {
|
|
1293
|
+
gitignoreContent = await readFileContent(gitignorePath);
|
|
1294
|
+
}
|
|
1295
|
+
const cleanedContent = removeExistingRulesyncEntries(gitignoreContent);
|
|
1296
|
+
const rulesyncBlock = [RULESYNC_HEADER, ...RULESYNC_IGNORE_ENTRIES].join("\n");
|
|
1297
|
+
const newContent = cleanedContent.trim() ? `${cleanedContent.trimEnd()}
|
|
1298
|
+
|
|
1299
|
+
${rulesyncBlock}
|
|
1300
|
+
` : `${rulesyncBlock}
|
|
1301
|
+
`;
|
|
1302
|
+
if (gitignoreContent === newContent) {
|
|
1303
|
+
logger.success(".gitignore is already up to date");
|
|
1304
|
+
return;
|
|
1305
|
+
}
|
|
1306
|
+
await writeFileContent(gitignorePath, newContent);
|
|
1307
|
+
logger.success("Updated .gitignore with rulesync entries:");
|
|
1308
|
+
for (const entry of RULESYNC_IGNORE_ENTRIES) {
|
|
1309
|
+
logger.info(` ${entry}`);
|
|
1310
|
+
}
|
|
1311
|
+
logger.info("");
|
|
1312
|
+
logger.info(
|
|
1313
|
+
"\u{1F4A1} If you're using Google Antigravity, note that rules, workflows, and skills won't load if they're gitignored."
|
|
1314
|
+
);
|
|
1315
|
+
logger.info(" You can add the following to .git/info/exclude instead:");
|
|
1316
|
+
logger.info(" **/.agent/rules/");
|
|
1317
|
+
logger.info(" **/.agent/workflows/");
|
|
1318
|
+
logger.info(" **/.agent/skills/");
|
|
1319
|
+
logger.info(" For more details: https://github.com/dyoshikawa/rulesync/issues/981");
|
|
1320
|
+
};
|
|
1321
|
+
|
|
1322
|
+
// src/cli/commands/import.ts
|
|
1323
|
+
async function importCommand(options) {
|
|
1324
|
+
if (!options.targets) {
|
|
1325
|
+
logger.error("No tools found in --targets");
|
|
1326
|
+
process.exit(1);
|
|
1327
|
+
}
|
|
1328
|
+
if (options.targets.length > 1) {
|
|
1329
|
+
logger.error("Only one tool can be imported at a time");
|
|
1330
|
+
process.exit(1);
|
|
1331
|
+
}
|
|
1332
|
+
const config = await ConfigResolver.resolve(options);
|
|
1333
|
+
logger.configure({
|
|
1334
|
+
verbose: config.getVerbose(),
|
|
1335
|
+
silent: config.getSilent()
|
|
1336
|
+
});
|
|
1337
|
+
const tool = config.getTargets()[0];
|
|
1338
|
+
logger.debug(`Importing files from ${tool}...`);
|
|
1339
|
+
const result = await importFromTool({ config, tool });
|
|
1340
|
+
const totalImported = calculateTotalCount(result);
|
|
1341
|
+
if (totalImported === 0) {
|
|
1342
|
+
const enabledFeatures = config.getFeatures().join(", ");
|
|
1343
|
+
logger.warn(`No files imported for enabled features: ${enabledFeatures}`);
|
|
1344
|
+
return;
|
|
1345
|
+
}
|
|
1346
|
+
const parts = [];
|
|
1347
|
+
if (result.rulesCount > 0) parts.push(`${result.rulesCount} rules`);
|
|
1348
|
+
if (result.ignoreCount > 0) parts.push(`${result.ignoreCount} ignore files`);
|
|
1349
|
+
if (result.mcpCount > 0) parts.push(`${result.mcpCount} MCP files`);
|
|
1350
|
+
if (result.commandsCount > 0) parts.push(`${result.commandsCount} commands`);
|
|
1351
|
+
if (result.subagentsCount > 0) parts.push(`${result.subagentsCount} subagents`);
|
|
1352
|
+
if (result.skillsCount > 0) parts.push(`${result.skillsCount} skills`);
|
|
1353
|
+
if (result.hooksCount > 0) parts.push(`${result.hooksCount} hooks`);
|
|
1354
|
+
logger.success(`Imported ${totalImported} file(s) total (${parts.join(" + ")})`);
|
|
1355
|
+
}
|
|
1356
|
+
|
|
1357
|
+
// src/lib/init.ts
|
|
1358
|
+
import { join as join3 } from "path";
|
|
1359
|
+
async function init() {
|
|
1360
|
+
const sampleFiles = await createSampleFiles();
|
|
1361
|
+
const configFile = await createConfigFile();
|
|
1362
|
+
return {
|
|
1363
|
+
configFile,
|
|
1364
|
+
sampleFiles
|
|
1365
|
+
};
|
|
1366
|
+
}
|
|
1367
|
+
async function createConfigFile() {
|
|
1368
|
+
const path2 = RULESYNC_CONFIG_RELATIVE_FILE_PATH;
|
|
1369
|
+
if (await fileExists(path2)) {
|
|
1370
|
+
return { created: false, path: path2 };
|
|
1371
|
+
}
|
|
1372
|
+
await writeFileContent(
|
|
1373
|
+
path2,
|
|
1374
|
+
JSON.stringify(
|
|
1375
|
+
{
|
|
1376
|
+
targets: ["copilot", "cursor", "claudecode", "codexcli"],
|
|
1377
|
+
features: ["rules", "ignore", "mcp", "commands", "subagents", "skills", "hooks"],
|
|
1378
|
+
baseDirs: ["."],
|
|
1379
|
+
delete: true,
|
|
1380
|
+
verbose: false,
|
|
1381
|
+
silent: false,
|
|
1382
|
+
global: false,
|
|
1383
|
+
simulateCommands: false,
|
|
1384
|
+
simulateSubagents: false,
|
|
1385
|
+
simulateSkills: false
|
|
1386
|
+
},
|
|
1387
|
+
null,
|
|
1388
|
+
2
|
|
1389
|
+
)
|
|
1390
|
+
);
|
|
1391
|
+
return { created: true, path: path2 };
|
|
1392
|
+
}
|
|
1393
|
+
async function createSampleFiles() {
|
|
1394
|
+
const results = [];
|
|
1395
|
+
const sampleRuleFile = {
|
|
1396
|
+
filename: RULESYNC_OVERVIEW_FILE_NAME,
|
|
1397
|
+
content: `---
|
|
1398
|
+
root: true
|
|
1399
|
+
targets: ["*"]
|
|
1400
|
+
description: "Project overview and general development guidelines"
|
|
1401
|
+
globs: ["**/*"]
|
|
1402
|
+
---
|
|
1403
|
+
|
|
1404
|
+
# Project Overview
|
|
1405
|
+
|
|
1406
|
+
## General Guidelines
|
|
1407
|
+
|
|
1408
|
+
- Use TypeScript for all new code
|
|
1409
|
+
- Follow consistent naming conventions
|
|
1410
|
+
- Write self-documenting code with clear variable and function names
|
|
1411
|
+
- Prefer composition over inheritance
|
|
1412
|
+
- Use meaningful comments for complex business logic
|
|
1413
|
+
|
|
1414
|
+
## Code Style
|
|
1415
|
+
|
|
1416
|
+
- Use 2 spaces for indentation
|
|
1417
|
+
- Use semicolons
|
|
1418
|
+
- Use double quotes for strings
|
|
1419
|
+
- Use trailing commas in multi-line objects and arrays
|
|
1420
|
+
|
|
1421
|
+
## Architecture Principles
|
|
1422
|
+
|
|
1423
|
+
- Organize code by feature, not by file type
|
|
1424
|
+
- Keep related files close together
|
|
1425
|
+
- Use dependency injection for better testability
|
|
1426
|
+
- Implement proper error handling
|
|
1427
|
+
- Follow single responsibility principle
|
|
1428
|
+
`
|
|
1429
|
+
};
|
|
1430
|
+
const sampleMcpFile = {
|
|
1431
|
+
filename: "mcp.json",
|
|
1432
|
+
content: `{
|
|
1433
|
+
"mcpServers": {
|
|
1434
|
+
"serena": {
|
|
1435
|
+
"type": "stdio",
|
|
1436
|
+
"command": "uvx",
|
|
1437
|
+
"args": [
|
|
1438
|
+
"--from",
|
|
1439
|
+
"git+https://github.com/oraios/serena",
|
|
1440
|
+
"serena",
|
|
1441
|
+
"start-mcp-server",
|
|
1442
|
+
"--context",
|
|
1443
|
+
"ide-assistant",
|
|
1444
|
+
"--enable-web-dashboard",
|
|
1445
|
+
"false",
|
|
1446
|
+
"--project",
|
|
1447
|
+
"."
|
|
1448
|
+
],
|
|
1449
|
+
"env": {}
|
|
1450
|
+
},
|
|
1451
|
+
"context7": {
|
|
1452
|
+
"type": "stdio",
|
|
1453
|
+
"command": "npx",
|
|
1454
|
+
"args": [
|
|
1455
|
+
"-y",
|
|
1456
|
+
"@upstash/context7-mcp"
|
|
1457
|
+
],
|
|
1458
|
+
"env": {}
|
|
1459
|
+
}
|
|
1460
|
+
}
|
|
1461
|
+
}
|
|
1462
|
+
`
|
|
1463
|
+
};
|
|
1464
|
+
const sampleCommandFile = {
|
|
1465
|
+
filename: "review-pr.md",
|
|
1466
|
+
content: `---
|
|
1467
|
+
description: 'Review a pull request'
|
|
1468
|
+
targets: ["*"]
|
|
1469
|
+
---
|
|
1470
|
+
|
|
1471
|
+
target_pr = $ARGUMENTS
|
|
1472
|
+
|
|
1473
|
+
If target_pr is not provided, use the PR of the current branch.
|
|
1474
|
+
|
|
1475
|
+
Execute the following in parallel:
|
|
1476
|
+
|
|
1477
|
+
1. Check code quality and style consistency
|
|
1478
|
+
2. Review test coverage
|
|
1479
|
+
3. Verify documentation updates
|
|
1480
|
+
4. Check for potential bugs or security issues
|
|
1481
|
+
|
|
1482
|
+
Then provide a summary of findings and suggestions for improvement.
|
|
1483
|
+
`
|
|
1484
|
+
};
|
|
1485
|
+
const sampleSubagentFile = {
|
|
1486
|
+
filename: "planner.md",
|
|
1487
|
+
content: `---
|
|
1488
|
+
name: planner
|
|
1489
|
+
targets: ["*"]
|
|
1490
|
+
description: >-
|
|
1491
|
+
This is the general-purpose planner. The user asks the agent to plan to
|
|
1492
|
+
suggest a specification, implement a new feature, refactor the codebase, or
|
|
1493
|
+
fix a bug. This agent can be called by the user explicitly only.
|
|
1494
|
+
claudecode:
|
|
1495
|
+
model: inherit
|
|
1496
|
+
---
|
|
1497
|
+
|
|
1498
|
+
You are the planner for any tasks.
|
|
1499
|
+
|
|
1500
|
+
Based on the user's instruction, create a plan while analyzing the related files. Then, report the plan in detail. You can output files to @tmp/ if needed.
|
|
1501
|
+
|
|
1502
|
+
Attention, again, you are just the planner, so though you can read any files and run any commands for analysis, please don't write any code.
|
|
1503
|
+
`
|
|
1504
|
+
};
|
|
1505
|
+
const sampleSkillFile = {
|
|
1506
|
+
dirName: "project-context",
|
|
1507
|
+
content: `---
|
|
1508
|
+
name: project-context
|
|
1509
|
+
description: "Summarize the project context and key constraints"
|
|
1510
|
+
targets: ["*"]
|
|
1511
|
+
---
|
|
1512
|
+
|
|
1513
|
+
Summarize the project goals, core constraints, and relevant dependencies.
|
|
1514
|
+
Call out any architecture decisions, shared conventions, and validation steps.
|
|
1515
|
+
Keep the summary concise and ready to reuse in future tasks.`
|
|
1516
|
+
};
|
|
1517
|
+
const sampleIgnoreFile = {
|
|
1518
|
+
content: `credentials/
|
|
1519
|
+
`
|
|
1520
|
+
};
|
|
1521
|
+
const sampleHooksFile = {
|
|
1522
|
+
content: `{
|
|
1523
|
+
"version": 1,
|
|
1524
|
+
"hooks": {
|
|
1525
|
+
"postToolUse": [
|
|
1526
|
+
{
|
|
1527
|
+
"matcher": "Write|Edit",
|
|
1528
|
+
"command": ".rulesync/hooks/format.sh"
|
|
1529
|
+
}
|
|
1530
|
+
]
|
|
1531
|
+
}
|
|
1532
|
+
}
|
|
1533
|
+
`
|
|
1534
|
+
};
|
|
1535
|
+
const rulePaths = RulesyncRule.getSettablePaths();
|
|
1536
|
+
const mcpPaths = RulesyncMcp.getSettablePaths();
|
|
1537
|
+
const commandPaths = RulesyncCommand.getSettablePaths();
|
|
1538
|
+
const subagentPaths = RulesyncSubagent.getSettablePaths();
|
|
1539
|
+
const skillPaths = RulesyncSkill.getSettablePaths();
|
|
1540
|
+
const ignorePaths = RulesyncIgnore.getSettablePaths();
|
|
1541
|
+
const hooksPaths = RulesyncHooks.getSettablePaths();
|
|
1542
|
+
await ensureDir(rulePaths.recommended.relativeDirPath);
|
|
1543
|
+
await ensureDir(mcpPaths.recommended.relativeDirPath);
|
|
1544
|
+
await ensureDir(commandPaths.relativeDirPath);
|
|
1545
|
+
await ensureDir(subagentPaths.relativeDirPath);
|
|
1546
|
+
await ensureDir(skillPaths.relativeDirPath);
|
|
1547
|
+
await ensureDir(ignorePaths.recommended.relativeDirPath);
|
|
1548
|
+
const ruleFilepath = join3(rulePaths.recommended.relativeDirPath, sampleRuleFile.filename);
|
|
1549
|
+
results.push(await writeIfNotExists(ruleFilepath, sampleRuleFile.content));
|
|
1550
|
+
const mcpFilepath = join3(
|
|
1551
|
+
mcpPaths.recommended.relativeDirPath,
|
|
1552
|
+
mcpPaths.recommended.relativeFilePath
|
|
1553
|
+
);
|
|
1554
|
+
results.push(await writeIfNotExists(mcpFilepath, sampleMcpFile.content));
|
|
1555
|
+
const commandFilepath = join3(commandPaths.relativeDirPath, sampleCommandFile.filename);
|
|
1556
|
+
results.push(await writeIfNotExists(commandFilepath, sampleCommandFile.content));
|
|
1557
|
+
const subagentFilepath = join3(subagentPaths.relativeDirPath, sampleSubagentFile.filename);
|
|
1558
|
+
results.push(await writeIfNotExists(subagentFilepath, sampleSubagentFile.content));
|
|
1559
|
+
const skillDirPath = join3(skillPaths.relativeDirPath, sampleSkillFile.dirName);
|
|
1560
|
+
await ensureDir(skillDirPath);
|
|
1561
|
+
const skillFilepath = join3(skillDirPath, SKILL_FILE_NAME);
|
|
1562
|
+
results.push(await writeIfNotExists(skillFilepath, sampleSkillFile.content));
|
|
1563
|
+
const ignoreFilepath = join3(
|
|
1564
|
+
ignorePaths.recommended.relativeDirPath,
|
|
1565
|
+
ignorePaths.recommended.relativeFilePath
|
|
1566
|
+
);
|
|
1567
|
+
results.push(await writeIfNotExists(ignoreFilepath, sampleIgnoreFile.content));
|
|
1568
|
+
const hooksFilepath = join3(hooksPaths.relativeDirPath, hooksPaths.relativeFilePath);
|
|
1569
|
+
results.push(await writeIfNotExists(hooksFilepath, sampleHooksFile.content));
|
|
1570
|
+
return results;
|
|
1571
|
+
}
|
|
1572
|
+
async function writeIfNotExists(path2, content) {
|
|
1573
|
+
if (await fileExists(path2)) {
|
|
1574
|
+
return { created: false, path: path2 };
|
|
1575
|
+
}
|
|
1576
|
+
await writeFileContent(path2, content);
|
|
1577
|
+
return { created: true, path: path2 };
|
|
1578
|
+
}
|
|
1579
|
+
|
|
1580
|
+
// src/cli/commands/init.ts
|
|
1581
|
+
async function initCommand() {
|
|
1582
|
+
logger.debug("Initializing rulesync...");
|
|
1583
|
+
await ensureDir(RULESYNC_RELATIVE_DIR_PATH);
|
|
1584
|
+
const result = await init();
|
|
1585
|
+
for (const file of result.sampleFiles) {
|
|
1586
|
+
if (file.created) {
|
|
1587
|
+
logger.success(`Created ${file.path}`);
|
|
1588
|
+
} else {
|
|
1589
|
+
logger.info(`Skipped ${file.path} (already exists)`);
|
|
1590
|
+
}
|
|
1591
|
+
}
|
|
1592
|
+
if (result.configFile.created) {
|
|
1593
|
+
logger.success(`Created ${result.configFile.path}`);
|
|
1594
|
+
} else {
|
|
1595
|
+
logger.info(`Skipped ${result.configFile.path} (already exists)`);
|
|
1596
|
+
}
|
|
1597
|
+
logger.success("rulesync initialized successfully!");
|
|
1598
|
+
logger.info("Next steps:");
|
|
1599
|
+
logger.info(
|
|
1600
|
+
`1. Edit ${RULESYNC_RELATIVE_DIR_PATH}/**/*.md, ${RULESYNC_RELATIVE_DIR_PATH}/skills/*/${SKILL_FILE_NAME}, ${RULESYNC_MCP_RELATIVE_FILE_PATH}, ${RULESYNC_HOOKS_RELATIVE_FILE_PATH} and ${RULESYNC_AIIGNORE_RELATIVE_FILE_PATH}`
|
|
1601
|
+
);
|
|
1602
|
+
logger.info("2. Run 'rulesync generate' to create configuration files");
|
|
1603
|
+
}
|
|
1604
|
+
|
|
1605
|
+
// src/lib/sources.ts
|
|
1606
|
+
import { Semaphore as Semaphore2 } from "es-toolkit/promise";
|
|
1607
|
+
import { join as join5, resolve, sep } from "path";
|
|
1608
|
+
|
|
1609
|
+
// src/lib/sources-lock.ts
|
|
1610
|
+
import { createHash } from "crypto";
|
|
1611
|
+
import { join as join4 } from "path";
|
|
1612
|
+
import { optional, z as z4 } from "zod/mini";
|
|
1613
|
+
var LOCKFILE_VERSION = 1;
|
|
1614
|
+
var LockedSkillSchema = z4.object({
|
|
1615
|
+
integrity: z4.string()
|
|
1616
|
+
});
|
|
1617
|
+
var LockedSourceSchema = z4.object({
|
|
1618
|
+
requestedRef: optional(z4.string()),
|
|
1619
|
+
resolvedRef: z4.string(),
|
|
1620
|
+
resolvedAt: optional(z4.string()),
|
|
1621
|
+
skills: z4.record(z4.string(), LockedSkillSchema)
|
|
1622
|
+
});
|
|
1623
|
+
var SourcesLockSchema = z4.object({
|
|
1624
|
+
lockfileVersion: z4.number(),
|
|
1625
|
+
sources: z4.record(z4.string(), LockedSourceSchema)
|
|
1626
|
+
});
|
|
1627
|
+
var LegacyLockedSourceSchema = z4.object({
|
|
1628
|
+
resolvedRef: z4.string(),
|
|
1629
|
+
skills: z4.array(z4.string())
|
|
1630
|
+
});
|
|
1631
|
+
var LegacySourcesLockSchema = z4.object({
|
|
1632
|
+
sources: z4.record(z4.string(), LegacyLockedSourceSchema)
|
|
1633
|
+
});
|
|
1634
|
+
function migrateLegacyLock(legacy) {
|
|
1635
|
+
const sources = {};
|
|
1636
|
+
for (const [key, entry] of Object.entries(legacy.sources)) {
|
|
1637
|
+
const skills = {};
|
|
1638
|
+
for (const name of entry.skills) {
|
|
1639
|
+
skills[name] = { integrity: "" };
|
|
1640
|
+
}
|
|
1641
|
+
sources[key] = {
|
|
1642
|
+
resolvedRef: entry.resolvedRef,
|
|
1643
|
+
skills
|
|
1644
|
+
};
|
|
1645
|
+
}
|
|
1646
|
+
logger.info(
|
|
1647
|
+
"Migrated legacy sources lockfile to version 1. Run 'rulesync install --update' to populate integrity hashes."
|
|
1648
|
+
);
|
|
1649
|
+
return { lockfileVersion: LOCKFILE_VERSION, sources };
|
|
1650
|
+
}
|
|
1651
|
+
function createEmptyLock() {
|
|
1652
|
+
return { lockfileVersion: LOCKFILE_VERSION, sources: {} };
|
|
1653
|
+
}
|
|
1654
|
+
async function readLockFile(params) {
|
|
1655
|
+
const lockPath = join4(params.baseDir, RULESYNC_SOURCES_LOCK_RELATIVE_FILE_PATH);
|
|
1656
|
+
if (!await fileExists(lockPath)) {
|
|
1657
|
+
logger.debug("No sources lockfile found, starting fresh.");
|
|
1658
|
+
return createEmptyLock();
|
|
1659
|
+
}
|
|
1660
|
+
try {
|
|
1661
|
+
const content = await readFileContent(lockPath);
|
|
1662
|
+
const data = JSON.parse(content);
|
|
1663
|
+
const result = SourcesLockSchema.safeParse(data);
|
|
1664
|
+
if (result.success) {
|
|
1665
|
+
return result.data;
|
|
1666
|
+
}
|
|
1667
|
+
const legacyResult = LegacySourcesLockSchema.safeParse(data);
|
|
1668
|
+
if (legacyResult.success) {
|
|
1669
|
+
return migrateLegacyLock(legacyResult.data);
|
|
1670
|
+
}
|
|
1671
|
+
logger.warn(
|
|
1672
|
+
`Invalid sources lockfile format (${RULESYNC_SOURCES_LOCK_RELATIVE_FILE_PATH}). Starting fresh.`
|
|
1673
|
+
);
|
|
1674
|
+
return createEmptyLock();
|
|
1675
|
+
} catch {
|
|
1676
|
+
logger.warn(
|
|
1677
|
+
`Failed to read sources lockfile (${RULESYNC_SOURCES_LOCK_RELATIVE_FILE_PATH}). Starting fresh.`
|
|
1678
|
+
);
|
|
1679
|
+
return createEmptyLock();
|
|
1680
|
+
}
|
|
1681
|
+
}
|
|
1682
|
+
async function writeLockFile(params) {
|
|
1683
|
+
const lockPath = join4(params.baseDir, RULESYNC_SOURCES_LOCK_RELATIVE_FILE_PATH);
|
|
1684
|
+
const content = JSON.stringify(params.lock, null, 2) + "\n";
|
|
1685
|
+
await writeFileContent(lockPath, content);
|
|
1686
|
+
logger.debug(`Wrote sources lockfile to ${lockPath}`);
|
|
1687
|
+
}
|
|
1688
|
+
function computeSkillIntegrity(files) {
|
|
1689
|
+
const hash = createHash("sha256");
|
|
1690
|
+
const sorted = files.toSorted((a, b) => a.path.localeCompare(b.path));
|
|
1691
|
+
for (const file of sorted) {
|
|
1692
|
+
hash.update(file.path);
|
|
1693
|
+
hash.update("\0");
|
|
1694
|
+
hash.update(file.content);
|
|
1695
|
+
hash.update("\0");
|
|
1696
|
+
}
|
|
1697
|
+
return `sha256-${hash.digest("hex")}`;
|
|
1698
|
+
}
|
|
1699
|
+
function normalizeSourceKey(source) {
|
|
1700
|
+
let key = source;
|
|
1701
|
+
for (const prefix of [
|
|
1702
|
+
"https://www.github.com/",
|
|
1703
|
+
"https://github.com/",
|
|
1704
|
+
"http://www.github.com/",
|
|
1705
|
+
"http://github.com/"
|
|
1706
|
+
]) {
|
|
1707
|
+
if (key.toLowerCase().startsWith(prefix)) {
|
|
1708
|
+
key = key.substring(prefix.length);
|
|
1709
|
+
break;
|
|
1710
|
+
}
|
|
1711
|
+
}
|
|
1712
|
+
if (key.startsWith("github:")) {
|
|
1713
|
+
key = key.substring("github:".length);
|
|
1714
|
+
}
|
|
1715
|
+
key = key.replace(/\/+$/, "");
|
|
1716
|
+
key = key.replace(/\.git$/, "");
|
|
1717
|
+
key = key.toLowerCase();
|
|
1718
|
+
return key;
|
|
1719
|
+
}
|
|
1720
|
+
function getLockedSource(lock, sourceKey) {
|
|
1721
|
+
const normalized = normalizeSourceKey(sourceKey);
|
|
1722
|
+
for (const [key, value] of Object.entries(lock.sources)) {
|
|
1723
|
+
if (normalizeSourceKey(key) === normalized) {
|
|
1724
|
+
return value;
|
|
1725
|
+
}
|
|
1726
|
+
}
|
|
1727
|
+
return void 0;
|
|
1728
|
+
}
|
|
1729
|
+
function setLockedSource(lock, sourceKey, entry) {
|
|
1730
|
+
const normalized = normalizeSourceKey(sourceKey);
|
|
1731
|
+
const filteredSources = {};
|
|
1732
|
+
for (const [key, value] of Object.entries(lock.sources)) {
|
|
1733
|
+
if (normalizeSourceKey(key) !== normalized) {
|
|
1734
|
+
filteredSources[key] = value;
|
|
1735
|
+
}
|
|
1736
|
+
}
|
|
1737
|
+
return {
|
|
1738
|
+
lockfileVersion: lock.lockfileVersion,
|
|
1739
|
+
sources: {
|
|
1740
|
+
...filteredSources,
|
|
1741
|
+
[normalized]: entry
|
|
1742
|
+
}
|
|
1743
|
+
};
|
|
1744
|
+
}
|
|
1745
|
+
function getLockedSkillNames(entry) {
|
|
1746
|
+
return Object.keys(entry.skills);
|
|
1747
|
+
}
|
|
1748
|
+
|
|
1749
|
+
// src/lib/sources.ts
|
|
1750
|
+
async function resolveAndFetchSources(params) {
|
|
1751
|
+
const { sources, baseDir, options = {} } = params;
|
|
1752
|
+
if (sources.length === 0) {
|
|
1753
|
+
return { fetchedSkillCount: 0, sourcesProcessed: 0 };
|
|
1754
|
+
}
|
|
1755
|
+
if (options.skipSources) {
|
|
1756
|
+
logger.info("Skipping source fetching.");
|
|
1757
|
+
return { fetchedSkillCount: 0, sourcesProcessed: 0 };
|
|
1758
|
+
}
|
|
1759
|
+
let lock = options.updateSources ? createEmptyLock() : await readLockFile({ baseDir });
|
|
1760
|
+
if (options.frozen) {
|
|
1761
|
+
const missingKeys = [];
|
|
1762
|
+
const missingSkills = [];
|
|
1763
|
+
const curatedDir = join5(baseDir, RULESYNC_CURATED_SKILLS_RELATIVE_DIR_PATH);
|
|
1764
|
+
for (const source of sources) {
|
|
1765
|
+
const locked = getLockedSource(lock, source.source);
|
|
1766
|
+
if (!locked) {
|
|
1767
|
+
missingKeys.push(source.source);
|
|
1768
|
+
continue;
|
|
1769
|
+
}
|
|
1770
|
+
const skillNames = getLockedSkillNames(locked);
|
|
1771
|
+
for (const skillName of skillNames) {
|
|
1772
|
+
if (!await directoryExists(join5(curatedDir, skillName))) {
|
|
1773
|
+
missingSkills.push(`${source.source}:${skillName}`);
|
|
1774
|
+
}
|
|
1775
|
+
}
|
|
1776
|
+
}
|
|
1777
|
+
if (missingKeys.length > 0) {
|
|
1778
|
+
throw new Error(
|
|
1779
|
+
`Frozen install failed: lockfile is missing entries for: ${missingKeys.join(", ")}. Run 'rulesync install' to update the lockfile.`
|
|
1780
|
+
);
|
|
1781
|
+
}
|
|
1782
|
+
if (missingSkills.length > 0) {
|
|
1783
|
+
throw new Error(
|
|
1784
|
+
`Frozen install failed: locked skills missing from disk: ${missingSkills.join(", ")}. Run 'rulesync install' to fetch missing skills.`
|
|
1785
|
+
);
|
|
1786
|
+
}
|
|
1787
|
+
}
|
|
1788
|
+
const originalLockJson = JSON.stringify(lock);
|
|
1789
|
+
const token = GitHubClient.resolveToken(options.token);
|
|
1790
|
+
const client = new GitHubClient({ token });
|
|
1791
|
+
const localSkillNames = await getLocalSkillDirNames(baseDir);
|
|
1792
|
+
let totalSkillCount = 0;
|
|
1793
|
+
const allFetchedSkillNames = /* @__PURE__ */ new Set();
|
|
1794
|
+
for (const sourceEntry of sources) {
|
|
1795
|
+
try {
|
|
1796
|
+
const { skillCount, fetchedSkillNames, updatedLock } = await fetchSource({
|
|
1797
|
+
sourceEntry,
|
|
1798
|
+
client,
|
|
1799
|
+
baseDir,
|
|
1800
|
+
lock,
|
|
1801
|
+
localSkillNames,
|
|
1802
|
+
alreadyFetchedSkillNames: allFetchedSkillNames,
|
|
1803
|
+
updateSources: options.updateSources ?? false
|
|
1804
|
+
});
|
|
1805
|
+
lock = updatedLock;
|
|
1806
|
+
totalSkillCount += skillCount;
|
|
1807
|
+
for (const name of fetchedSkillNames) {
|
|
1808
|
+
allFetchedSkillNames.add(name);
|
|
1809
|
+
}
|
|
1810
|
+
} catch (error) {
|
|
1811
|
+
if (error instanceof GitHubClientError) {
|
|
1812
|
+
logGitHubAuthHints(error);
|
|
1813
|
+
} else {
|
|
1814
|
+
logger.error(`Failed to fetch source "${sourceEntry.source}": ${formatError(error)}`);
|
|
1815
|
+
}
|
|
1816
|
+
}
|
|
1817
|
+
}
|
|
1818
|
+
const sourceKeys = new Set(sources.map((s) => normalizeSourceKey(s.source)));
|
|
1819
|
+
const prunedSources = {};
|
|
1820
|
+
for (const [key, value] of Object.entries(lock.sources)) {
|
|
1821
|
+
if (sourceKeys.has(normalizeSourceKey(key))) {
|
|
1822
|
+
prunedSources[key] = value;
|
|
1823
|
+
} else {
|
|
1824
|
+
logger.debug(`Pruned stale lockfile entry: ${key}`);
|
|
1825
|
+
}
|
|
1826
|
+
}
|
|
1827
|
+
lock = { lockfileVersion: lock.lockfileVersion, sources: prunedSources };
|
|
1828
|
+
if (!options.frozen && JSON.stringify(lock) !== originalLockJson) {
|
|
1829
|
+
await writeLockFile({ baseDir, lock });
|
|
1830
|
+
} else {
|
|
1831
|
+
logger.debug("Lockfile unchanged, skipping write.");
|
|
1832
|
+
}
|
|
1833
|
+
return { fetchedSkillCount: totalSkillCount, sourcesProcessed: sources.length };
|
|
1834
|
+
}
|
|
1835
|
+
async function checkLockedSkillsExist(curatedDir, skillNames) {
|
|
1836
|
+
if (skillNames.length === 0) return true;
|
|
1837
|
+
for (const name of skillNames) {
|
|
1838
|
+
if (!await directoryExists(join5(curatedDir, name))) {
|
|
1839
|
+
return false;
|
|
1840
|
+
}
|
|
1841
|
+
}
|
|
1842
|
+
return true;
|
|
1843
|
+
}
|
|
1844
|
+
async function fetchSource(params) {
|
|
1845
|
+
const { sourceEntry, client, baseDir, localSkillNames, alreadyFetchedSkillNames, updateSources } = params;
|
|
1846
|
+
let { lock } = params;
|
|
1847
|
+
const parsed = parseSource(sourceEntry.source);
|
|
1848
|
+
if (parsed.provider === "gitlab") {
|
|
1849
|
+
throw new Error("GitLab sources are not yet supported.");
|
|
1850
|
+
}
|
|
1851
|
+
const sourceKey = sourceEntry.source;
|
|
1852
|
+
const locked = getLockedSource(lock, sourceKey);
|
|
1853
|
+
const lockedSkillNames = locked ? getLockedSkillNames(locked) : [];
|
|
1854
|
+
let ref;
|
|
1855
|
+
let resolvedSha;
|
|
1856
|
+
let requestedRef;
|
|
1857
|
+
if (locked && !updateSources) {
|
|
1858
|
+
ref = locked.resolvedRef;
|
|
1859
|
+
resolvedSha = locked.resolvedRef;
|
|
1860
|
+
requestedRef = locked.requestedRef;
|
|
1861
|
+
logger.debug(`Using locked ref for ${sourceKey}: ${resolvedSha}`);
|
|
1862
|
+
} else {
|
|
1863
|
+
requestedRef = parsed.ref ?? await client.getDefaultBranch(parsed.owner, parsed.repo);
|
|
1864
|
+
resolvedSha = await client.resolveRefToSha(parsed.owner, parsed.repo, requestedRef);
|
|
1865
|
+
ref = resolvedSha;
|
|
1866
|
+
logger.debug(`Resolved ${sourceKey} ref "${requestedRef}" to SHA: ${resolvedSha}`);
|
|
1867
|
+
}
|
|
1868
|
+
const curatedDir = join5(baseDir, RULESYNC_CURATED_SKILLS_RELATIVE_DIR_PATH);
|
|
1869
|
+
if (locked && resolvedSha === locked.resolvedRef && !updateSources) {
|
|
1870
|
+
const allExist = await checkLockedSkillsExist(curatedDir, lockedSkillNames);
|
|
1871
|
+
if (allExist) {
|
|
1872
|
+
logger.debug(`SHA unchanged for ${sourceKey}, skipping re-fetch.`);
|
|
1873
|
+
return {
|
|
1874
|
+
skillCount: 0,
|
|
1875
|
+
fetchedSkillNames: lockedSkillNames,
|
|
1876
|
+
updatedLock: lock
|
|
1877
|
+
};
|
|
1878
|
+
}
|
|
1879
|
+
}
|
|
1880
|
+
const skillFilter = sourceEntry.skills ?? ["*"];
|
|
1881
|
+
const isWildcard = skillFilter.length === 1 && skillFilter[0] === "*";
|
|
1882
|
+
const skillsBasePath = parsed.path ?? "skills";
|
|
1883
|
+
let remoteSkillDirs;
|
|
1884
|
+
try {
|
|
1885
|
+
const entries = await client.listDirectory(parsed.owner, parsed.repo, skillsBasePath, ref);
|
|
1886
|
+
remoteSkillDirs = entries.filter((e) => e.type === "dir").map((e) => ({ name: e.name, path: e.path }));
|
|
1887
|
+
} catch (error) {
|
|
1888
|
+
if (error instanceof GitHubClientError && error.statusCode === 404) {
|
|
1889
|
+
logger.warn(`No skills/ directory found in ${sourceKey}. Skipping.`);
|
|
1890
|
+
return { skillCount: 0, fetchedSkillNames: [], updatedLock: lock };
|
|
1891
|
+
}
|
|
1892
|
+
throw error;
|
|
1893
|
+
}
|
|
1894
|
+
const filteredDirs = isWildcard ? remoteSkillDirs : remoteSkillDirs.filter((d) => skillFilter.includes(d.name));
|
|
1895
|
+
const semaphore = new Semaphore2(FETCH_CONCURRENCY_LIMIT);
|
|
1896
|
+
const fetchedSkills = {};
|
|
1897
|
+
if (locked) {
|
|
1898
|
+
const resolvedCuratedDir = resolve(curatedDir);
|
|
1899
|
+
for (const prevSkill of lockedSkillNames) {
|
|
1900
|
+
const prevDir = join5(curatedDir, prevSkill);
|
|
1901
|
+
if (!resolve(prevDir).startsWith(resolvedCuratedDir + sep)) {
|
|
1902
|
+
logger.warn(
|
|
1903
|
+
`Skipping removal of "${prevSkill}": resolved path is outside the curated directory.`
|
|
1904
|
+
);
|
|
1905
|
+
continue;
|
|
1906
|
+
}
|
|
1907
|
+
if (await directoryExists(prevDir)) {
|
|
1908
|
+
await removeDirectory(prevDir);
|
|
1909
|
+
}
|
|
1910
|
+
}
|
|
1911
|
+
}
|
|
1912
|
+
for (const skillDir of filteredDirs) {
|
|
1913
|
+
if (skillDir.name.includes("..") || skillDir.name.includes("/") || skillDir.name.includes("\\")) {
|
|
1914
|
+
logger.warn(
|
|
1915
|
+
`Skipping skill with invalid name "${skillDir.name}" from ${sourceKey}: contains path traversal characters.`
|
|
1916
|
+
);
|
|
1917
|
+
continue;
|
|
1918
|
+
}
|
|
1919
|
+
if (localSkillNames.has(skillDir.name)) {
|
|
1920
|
+
logger.debug(
|
|
1921
|
+
`Skipping remote skill "${skillDir.name}" from ${sourceKey}: local skill takes precedence.`
|
|
1922
|
+
);
|
|
1923
|
+
continue;
|
|
1924
|
+
}
|
|
1925
|
+
if (alreadyFetchedSkillNames.has(skillDir.name)) {
|
|
1926
|
+
logger.warn(
|
|
1927
|
+
`Skipping duplicate skill "${skillDir.name}" from ${sourceKey}: already fetched from another source.`
|
|
1928
|
+
);
|
|
1929
|
+
continue;
|
|
1930
|
+
}
|
|
1931
|
+
const allFiles = await listDirectoryRecursive({
|
|
1932
|
+
client,
|
|
1933
|
+
owner: parsed.owner,
|
|
1934
|
+
repo: parsed.repo,
|
|
1935
|
+
path: skillDir.path,
|
|
1936
|
+
ref,
|
|
1937
|
+
semaphore
|
|
1938
|
+
});
|
|
1939
|
+
const files = allFiles.filter((file) => {
|
|
1940
|
+
if (file.size > MAX_FILE_SIZE) {
|
|
1941
|
+
logger.warn(
|
|
1942
|
+
`Skipping file "${file.path}" (${(file.size / 1024 / 1024).toFixed(2)}MB exceeds ${MAX_FILE_SIZE / 1024 / 1024}MB limit).`
|
|
1943
|
+
);
|
|
1944
|
+
return false;
|
|
1945
|
+
}
|
|
1946
|
+
return true;
|
|
1947
|
+
});
|
|
1948
|
+
const skillFiles = [];
|
|
1949
|
+
for (const file of files) {
|
|
1950
|
+
const relativeToSkill = file.path.substring(skillDir.path.length + 1);
|
|
1951
|
+
const localFilePath = join5(curatedDir, skillDir.name, relativeToSkill);
|
|
1952
|
+
checkPathTraversal({
|
|
1953
|
+
relativePath: relativeToSkill,
|
|
1954
|
+
intendedRootDir: join5(curatedDir, skillDir.name)
|
|
1955
|
+
});
|
|
1956
|
+
const content = await withSemaphore(
|
|
1957
|
+
semaphore,
|
|
1958
|
+
() => client.getFileContent(parsed.owner, parsed.repo, file.path, ref)
|
|
1959
|
+
);
|
|
1960
|
+
await writeFileContent(localFilePath, content);
|
|
1961
|
+
skillFiles.push({ path: relativeToSkill, content });
|
|
1962
|
+
}
|
|
1963
|
+
const integrity = computeSkillIntegrity(skillFiles);
|
|
1964
|
+
const lockedSkillEntry = locked?.skills[skillDir.name];
|
|
1965
|
+
if (lockedSkillEntry && lockedSkillEntry.integrity && lockedSkillEntry.integrity !== integrity && resolvedSha === locked?.resolvedRef) {
|
|
1966
|
+
logger.warn(
|
|
1967
|
+
`Integrity mismatch for skill "${skillDir.name}" from ${sourceKey}: expected "${lockedSkillEntry.integrity}", got "${integrity}". Content may have been tampered with.`
|
|
1968
|
+
);
|
|
1969
|
+
}
|
|
1970
|
+
fetchedSkills[skillDir.name] = { integrity };
|
|
1971
|
+
logger.debug(`Fetched skill "${skillDir.name}" from ${sourceKey}`);
|
|
1972
|
+
}
|
|
1973
|
+
const fetchedNames = Object.keys(fetchedSkills);
|
|
1974
|
+
const mergedSkills = { ...fetchedSkills };
|
|
1975
|
+
if (locked) {
|
|
1976
|
+
for (const [skillName, skillEntry] of Object.entries(locked.skills)) {
|
|
1977
|
+
if (!(skillName in mergedSkills)) {
|
|
1978
|
+
mergedSkills[skillName] = skillEntry;
|
|
1979
|
+
}
|
|
1980
|
+
}
|
|
1981
|
+
}
|
|
1982
|
+
lock = setLockedSource(lock, sourceKey, {
|
|
1983
|
+
requestedRef,
|
|
1984
|
+
resolvedRef: resolvedSha,
|
|
1985
|
+
resolvedAt: (/* @__PURE__ */ new Date()).toISOString(),
|
|
1986
|
+
skills: mergedSkills
|
|
1987
|
+
});
|
|
1988
|
+
logger.info(
|
|
1989
|
+
`Fetched ${fetchedNames.length} skill(s) from ${sourceKey}: ${fetchedNames.join(", ") || "(none)"}`
|
|
1990
|
+
);
|
|
1991
|
+
return {
|
|
1992
|
+
skillCount: fetchedNames.length,
|
|
1993
|
+
fetchedSkillNames: fetchedNames,
|
|
1994
|
+
updatedLock: lock
|
|
1995
|
+
};
|
|
1996
|
+
}
|
|
1997
|
+
|
|
1998
|
+
// src/cli/commands/install.ts
|
|
1999
|
+
async function installCommand(options) {
|
|
2000
|
+
logger.configure({
|
|
2001
|
+
verbose: options.verbose ?? false,
|
|
2002
|
+
silent: options.silent ?? false
|
|
2003
|
+
});
|
|
2004
|
+
const config = await ConfigResolver.resolve({
|
|
2005
|
+
configPath: options.configPath,
|
|
2006
|
+
verbose: options.verbose,
|
|
2007
|
+
silent: options.silent
|
|
2008
|
+
});
|
|
2009
|
+
const sources = config.getSources();
|
|
2010
|
+
if (sources.length === 0) {
|
|
2011
|
+
logger.warn("No sources defined in configuration. Nothing to install.");
|
|
2012
|
+
return;
|
|
2013
|
+
}
|
|
2014
|
+
logger.debug(`Installing skills from ${sources.length} source(s)...`);
|
|
2015
|
+
const result = await resolveAndFetchSources({
|
|
2016
|
+
sources,
|
|
2017
|
+
baseDir: process.cwd(),
|
|
2018
|
+
options: {
|
|
2019
|
+
updateSources: options.update,
|
|
2020
|
+
frozen: options.frozen,
|
|
2021
|
+
token: options.token
|
|
2022
|
+
}
|
|
2023
|
+
});
|
|
2024
|
+
if (result.fetchedSkillCount > 0) {
|
|
2025
|
+
logger.success(
|
|
2026
|
+
`Installed ${result.fetchedSkillCount} skill(s) from ${result.sourcesProcessed} source(s).`
|
|
2027
|
+
);
|
|
2028
|
+
} else {
|
|
2029
|
+
logger.success(`All skills up to date (${result.sourcesProcessed} source(s) checked).`);
|
|
2030
|
+
}
|
|
2031
|
+
}
|
|
2032
|
+
|
|
2033
|
+
// src/cli/commands/mcp.ts
|
|
2034
|
+
import { FastMCP } from "fastmcp";
|
|
2035
|
+
|
|
2036
|
+
// src/mcp/tools.ts
|
|
2037
|
+
import { z as z13 } from "zod/mini";
|
|
2038
|
+
|
|
2039
|
+
// src/mcp/commands.ts
|
|
2040
|
+
import { basename, join as join6 } from "path";
|
|
2041
|
+
import { z as z5 } from "zod/mini";
|
|
2042
|
+
var maxCommandSizeBytes = 1024 * 1024;
|
|
2043
|
+
var maxCommandsCount = 1e3;
|
|
2044
|
+
async function listCommands() {
|
|
2045
|
+
const commandsDir = join6(process.cwd(), RULESYNC_COMMANDS_RELATIVE_DIR_PATH);
|
|
2046
|
+
try {
|
|
2047
|
+
const files = await listDirectoryFiles(commandsDir);
|
|
2048
|
+
const mdFiles = files.filter((file) => file.endsWith(".md"));
|
|
2049
|
+
const commands = await Promise.all(
|
|
2050
|
+
mdFiles.map(async (file) => {
|
|
2051
|
+
try {
|
|
2052
|
+
const command = await RulesyncCommand.fromFile({
|
|
2053
|
+
relativeFilePath: file
|
|
2054
|
+
});
|
|
2055
|
+
const frontmatter = command.getFrontmatter();
|
|
2056
|
+
return {
|
|
2057
|
+
relativePathFromCwd: join6(RULESYNC_COMMANDS_RELATIVE_DIR_PATH, file),
|
|
2058
|
+
frontmatter
|
|
2059
|
+
};
|
|
2060
|
+
} catch (error) {
|
|
2061
|
+
logger.error(`Failed to read command file ${file}: ${formatError(error)}`);
|
|
2062
|
+
return null;
|
|
2063
|
+
}
|
|
2064
|
+
})
|
|
2065
|
+
);
|
|
2066
|
+
return commands.filter((command) => command !== null);
|
|
2067
|
+
} catch (error) {
|
|
2068
|
+
logger.error(
|
|
2069
|
+
`Failed to read commands directory (${RULESYNC_COMMANDS_RELATIVE_DIR_PATH}): ${formatError(error)}`
|
|
2070
|
+
);
|
|
2071
|
+
return [];
|
|
2072
|
+
}
|
|
2073
|
+
}
|
|
2074
|
+
async function getCommand({ relativePathFromCwd }) {
|
|
2075
|
+
checkPathTraversal({
|
|
2076
|
+
relativePath: relativePathFromCwd,
|
|
2077
|
+
intendedRootDir: process.cwd()
|
|
2078
|
+
});
|
|
2079
|
+
const filename = basename(relativePathFromCwd);
|
|
2080
|
+
try {
|
|
2081
|
+
const command = await RulesyncCommand.fromFile({
|
|
2082
|
+
relativeFilePath: filename
|
|
2083
|
+
});
|
|
2084
|
+
return {
|
|
2085
|
+
relativePathFromCwd: join6(RULESYNC_COMMANDS_RELATIVE_DIR_PATH, filename),
|
|
2086
|
+
frontmatter: command.getFrontmatter(),
|
|
2087
|
+
body: command.getBody()
|
|
2088
|
+
};
|
|
2089
|
+
} catch (error) {
|
|
2090
|
+
throw new Error(`Failed to read command file ${relativePathFromCwd}: ${formatError(error)}`, {
|
|
2091
|
+
cause: error
|
|
2092
|
+
});
|
|
2093
|
+
}
|
|
2094
|
+
}
|
|
2095
|
+
async function putCommand({
|
|
2096
|
+
relativePathFromCwd,
|
|
2097
|
+
frontmatter,
|
|
2098
|
+
body
|
|
2099
|
+
}) {
|
|
2100
|
+
checkPathTraversal({
|
|
2101
|
+
relativePath: relativePathFromCwd,
|
|
2102
|
+
intendedRootDir: process.cwd()
|
|
2103
|
+
});
|
|
2104
|
+
const filename = basename(relativePathFromCwd);
|
|
2105
|
+
const estimatedSize = JSON.stringify(frontmatter).length + body.length;
|
|
2106
|
+
if (estimatedSize > maxCommandSizeBytes) {
|
|
2107
|
+
throw new Error(
|
|
2108
|
+
`Command size ${estimatedSize} bytes exceeds maximum ${maxCommandSizeBytes} bytes (1MB) for ${relativePathFromCwd}`
|
|
2109
|
+
);
|
|
2110
|
+
}
|
|
2111
|
+
try {
|
|
2112
|
+
const existingCommands = await listCommands();
|
|
2113
|
+
const isUpdate = existingCommands.some(
|
|
2114
|
+
(command2) => command2.relativePathFromCwd === join6(RULESYNC_COMMANDS_RELATIVE_DIR_PATH, filename)
|
|
2115
|
+
);
|
|
2116
|
+
if (!isUpdate && existingCommands.length >= maxCommandsCount) {
|
|
2117
|
+
throw new Error(
|
|
2118
|
+
`Maximum number of commands (${maxCommandsCount}) reached in ${RULESYNC_COMMANDS_RELATIVE_DIR_PATH}`
|
|
2119
|
+
);
|
|
2120
|
+
}
|
|
2121
|
+
const fileContent = stringifyFrontmatter(body, frontmatter);
|
|
2122
|
+
const command = new RulesyncCommand({
|
|
2123
|
+
baseDir: process.cwd(),
|
|
2124
|
+
relativeDirPath: RULESYNC_COMMANDS_RELATIVE_DIR_PATH,
|
|
2125
|
+
relativeFilePath: filename,
|
|
2126
|
+
frontmatter,
|
|
2127
|
+
body,
|
|
2128
|
+
fileContent,
|
|
2129
|
+
validate: true
|
|
2130
|
+
});
|
|
2131
|
+
const commandsDir = join6(process.cwd(), RULESYNC_COMMANDS_RELATIVE_DIR_PATH);
|
|
2132
|
+
await ensureDir(commandsDir);
|
|
2133
|
+
await writeFileContent(command.getFilePath(), command.getFileContent());
|
|
2134
|
+
return {
|
|
2135
|
+
relativePathFromCwd: join6(RULESYNC_COMMANDS_RELATIVE_DIR_PATH, filename),
|
|
2136
|
+
frontmatter: command.getFrontmatter(),
|
|
2137
|
+
body: command.getBody()
|
|
2138
|
+
};
|
|
2139
|
+
} catch (error) {
|
|
2140
|
+
throw new Error(`Failed to write command file ${relativePathFromCwd}: ${formatError(error)}`, {
|
|
2141
|
+
cause: error
|
|
2142
|
+
});
|
|
2143
|
+
}
|
|
2144
|
+
}
|
|
2145
|
+
async function deleteCommand({ relativePathFromCwd }) {
|
|
2146
|
+
checkPathTraversal({
|
|
2147
|
+
relativePath: relativePathFromCwd,
|
|
2148
|
+
intendedRootDir: process.cwd()
|
|
2149
|
+
});
|
|
2150
|
+
const filename = basename(relativePathFromCwd);
|
|
2151
|
+
const fullPath = join6(process.cwd(), RULESYNC_COMMANDS_RELATIVE_DIR_PATH, filename);
|
|
2152
|
+
try {
|
|
2153
|
+
await removeFile(fullPath);
|
|
2154
|
+
return {
|
|
2155
|
+
relativePathFromCwd: join6(RULESYNC_COMMANDS_RELATIVE_DIR_PATH, filename)
|
|
2156
|
+
};
|
|
2157
|
+
} catch (error) {
|
|
2158
|
+
throw new Error(`Failed to delete command file ${relativePathFromCwd}: ${formatError(error)}`, {
|
|
2159
|
+
cause: error
|
|
2160
|
+
});
|
|
2161
|
+
}
|
|
2162
|
+
}
|
|
2163
|
+
var commandToolSchemas = {
|
|
2164
|
+
listCommands: z5.object({}),
|
|
2165
|
+
getCommand: z5.object({
|
|
2166
|
+
relativePathFromCwd: z5.string()
|
|
2167
|
+
}),
|
|
2168
|
+
putCommand: z5.object({
|
|
2169
|
+
relativePathFromCwd: z5.string(),
|
|
2170
|
+
frontmatter: RulesyncCommandFrontmatterSchema,
|
|
2171
|
+
body: z5.string()
|
|
2172
|
+
}),
|
|
2173
|
+
deleteCommand: z5.object({
|
|
2174
|
+
relativePathFromCwd: z5.string()
|
|
2175
|
+
})
|
|
2176
|
+
};
|
|
2177
|
+
var commandTools = {
|
|
2178
|
+
listCommands: {
|
|
2179
|
+
name: "listCommands",
|
|
2180
|
+
description: `List all commands from ${join6(RULESYNC_COMMANDS_RELATIVE_DIR_PATH, "*.md")} with their frontmatter.`,
|
|
2181
|
+
parameters: commandToolSchemas.listCommands,
|
|
2182
|
+
execute: async () => {
|
|
2183
|
+
const commands = await listCommands();
|
|
2184
|
+
const output = { commands };
|
|
2185
|
+
return JSON.stringify(output, null, 2);
|
|
2186
|
+
}
|
|
2187
|
+
},
|
|
2188
|
+
getCommand: {
|
|
2189
|
+
name: "getCommand",
|
|
2190
|
+
description: "Get detailed information about a specific command. relativePathFromCwd parameter is required.",
|
|
2191
|
+
parameters: commandToolSchemas.getCommand,
|
|
2192
|
+
execute: async (args) => {
|
|
2193
|
+
const result = await getCommand({ relativePathFromCwd: args.relativePathFromCwd });
|
|
2194
|
+
return JSON.stringify(result, null, 2);
|
|
2195
|
+
}
|
|
2196
|
+
},
|
|
2197
|
+
putCommand: {
|
|
2198
|
+
name: "putCommand",
|
|
2199
|
+
description: "Create or update a command (upsert operation). relativePathFromCwd, frontmatter, and body parameters are required.",
|
|
2200
|
+
parameters: commandToolSchemas.putCommand,
|
|
2201
|
+
execute: async (args) => {
|
|
2202
|
+
const result = await putCommand({
|
|
2203
|
+
relativePathFromCwd: args.relativePathFromCwd,
|
|
2204
|
+
frontmatter: args.frontmatter,
|
|
2205
|
+
body: args.body
|
|
2206
|
+
});
|
|
2207
|
+
return JSON.stringify(result, null, 2);
|
|
2208
|
+
}
|
|
2209
|
+
},
|
|
2210
|
+
deleteCommand: {
|
|
2211
|
+
name: "deleteCommand",
|
|
2212
|
+
description: "Delete a command file. relativePathFromCwd parameter is required.",
|
|
2213
|
+
parameters: commandToolSchemas.deleteCommand,
|
|
2214
|
+
execute: async (args) => {
|
|
2215
|
+
const result = await deleteCommand({ relativePathFromCwd: args.relativePathFromCwd });
|
|
2216
|
+
return JSON.stringify(result, null, 2);
|
|
2217
|
+
}
|
|
2218
|
+
}
|
|
2219
|
+
};
|
|
2220
|
+
|
|
2221
|
+
// src/mcp/generate.ts
|
|
2222
|
+
import { z as z6 } from "zod/mini";
|
|
2223
|
+
var generateOptionsSchema = z6.object({
|
|
2224
|
+
targets: z6.optional(z6.array(z6.string())),
|
|
2225
|
+
features: z6.optional(z6.array(z6.string())),
|
|
2226
|
+
delete: z6.optional(z6.boolean()),
|
|
2227
|
+
global: z6.optional(z6.boolean()),
|
|
2228
|
+
simulateCommands: z6.optional(z6.boolean()),
|
|
2229
|
+
simulateSubagents: z6.optional(z6.boolean()),
|
|
2230
|
+
simulateSkills: z6.optional(z6.boolean())
|
|
2231
|
+
});
|
|
2232
|
+
async function executeGenerate(options = {}) {
|
|
2233
|
+
try {
|
|
2234
|
+
const exists = await checkRulesyncDirExists({ baseDir: process.cwd() });
|
|
2235
|
+
if (!exists) {
|
|
2236
|
+
return {
|
|
2237
|
+
success: false,
|
|
2238
|
+
error: ".rulesync directory does not exist. Please run 'rulesync init' first or create the directory manually."
|
|
2239
|
+
};
|
|
2240
|
+
}
|
|
2241
|
+
const config = await ConfigResolver.resolve({
|
|
2242
|
+
// eslint-disable-next-line no-type-assertion/no-type-assertion
|
|
2243
|
+
targets: options.targets,
|
|
2244
|
+
// eslint-disable-next-line no-type-assertion/no-type-assertion
|
|
2245
|
+
features: options.features,
|
|
2246
|
+
delete: options.delete,
|
|
2247
|
+
global: options.global,
|
|
2248
|
+
simulateCommands: options.simulateCommands,
|
|
2249
|
+
simulateSubagents: options.simulateSubagents,
|
|
2250
|
+
simulateSkills: options.simulateSkills,
|
|
2251
|
+
// Always use default baseDirs (process.cwd()) and configPath
|
|
2252
|
+
// verbose and silent are meaningless in MCP context
|
|
2253
|
+
verbose: false,
|
|
2254
|
+
silent: true
|
|
2255
|
+
});
|
|
2256
|
+
const generateResult = await generate({ config });
|
|
2257
|
+
return buildSuccessResponse({ generateResult, config });
|
|
2258
|
+
} catch (error) {
|
|
2259
|
+
return {
|
|
2260
|
+
success: false,
|
|
2261
|
+
error: formatError(error)
|
|
2262
|
+
};
|
|
2263
|
+
}
|
|
2264
|
+
}
|
|
2265
|
+
function buildSuccessResponse(params) {
|
|
2266
|
+
const { generateResult, config } = params;
|
|
2267
|
+
const totalCount = calculateTotalCount(generateResult);
|
|
2268
|
+
return {
|
|
2269
|
+
success: true,
|
|
2270
|
+
result: {
|
|
2271
|
+
rulesCount: generateResult.rulesCount,
|
|
2272
|
+
ignoreCount: generateResult.ignoreCount,
|
|
2273
|
+
mcpCount: generateResult.mcpCount,
|
|
2274
|
+
commandsCount: generateResult.commandsCount,
|
|
2275
|
+
subagentsCount: generateResult.subagentsCount,
|
|
2276
|
+
skillsCount: generateResult.skillsCount,
|
|
2277
|
+
hooksCount: generateResult.hooksCount,
|
|
2278
|
+
totalCount
|
|
2279
|
+
},
|
|
2280
|
+
config: {
|
|
2281
|
+
targets: config.getTargets(),
|
|
2282
|
+
features: config.getFeatures(),
|
|
2283
|
+
global: config.getGlobal(),
|
|
2284
|
+
delete: config.getDelete(),
|
|
2285
|
+
simulateCommands: config.getSimulateCommands(),
|
|
2286
|
+
simulateSubagents: config.getSimulateSubagents(),
|
|
2287
|
+
simulateSkills: config.getSimulateSkills()
|
|
2288
|
+
}
|
|
2289
|
+
};
|
|
2290
|
+
}
|
|
2291
|
+
var generateToolSchemas = {
|
|
2292
|
+
executeGenerate: generateOptionsSchema
|
|
2293
|
+
};
|
|
2294
|
+
var generateTools = {
|
|
2295
|
+
executeGenerate: {
|
|
2296
|
+
name: "executeGenerate",
|
|
2297
|
+
description: "Execute the rulesync generate command to create output files for AI tools. Uses rulesync.jsonc settings by default, but options can override them.",
|
|
2298
|
+
parameters: generateToolSchemas.executeGenerate,
|
|
2299
|
+
execute: async (options = {}) => {
|
|
2300
|
+
const result = await executeGenerate(options);
|
|
2301
|
+
return JSON.stringify(result, null, 2);
|
|
2302
|
+
}
|
|
2303
|
+
}
|
|
2304
|
+
};
|
|
2305
|
+
|
|
2306
|
+
// src/mcp/ignore.ts
|
|
2307
|
+
import { join as join7 } from "path";
|
|
2308
|
+
import { z as z7 } from "zod/mini";
|
|
2309
|
+
var maxIgnoreFileSizeBytes = 100 * 1024;
|
|
2310
|
+
async function getIgnoreFile() {
|
|
2311
|
+
const ignoreFilePath = join7(process.cwd(), RULESYNC_AIIGNORE_RELATIVE_FILE_PATH);
|
|
2312
|
+
try {
|
|
2313
|
+
const content = await readFileContent(ignoreFilePath);
|
|
2314
|
+
return {
|
|
2315
|
+
relativePathFromCwd: RULESYNC_AIIGNORE_RELATIVE_FILE_PATH,
|
|
2316
|
+
content
|
|
2317
|
+
};
|
|
2318
|
+
} catch (error) {
|
|
2319
|
+
throw new Error(
|
|
2320
|
+
`Failed to read ignore file (${RULESYNC_AIIGNORE_RELATIVE_FILE_PATH}): ${formatError(error)}`,
|
|
2321
|
+
{
|
|
2322
|
+
cause: error
|
|
2323
|
+
}
|
|
2324
|
+
);
|
|
2325
|
+
}
|
|
2326
|
+
}
|
|
2327
|
+
async function putIgnoreFile({ content }) {
|
|
2328
|
+
const ignoreFilePath = join7(process.cwd(), RULESYNC_AIIGNORE_RELATIVE_FILE_PATH);
|
|
2329
|
+
const contentSizeBytes = Buffer.byteLength(content, "utf8");
|
|
2330
|
+
if (contentSizeBytes > maxIgnoreFileSizeBytes) {
|
|
2331
|
+
throw new Error(
|
|
2332
|
+
`Ignore file size ${contentSizeBytes} bytes exceeds maximum ${maxIgnoreFileSizeBytes} bytes (100KB) for ${RULESYNC_AIIGNORE_RELATIVE_FILE_PATH}`
|
|
2333
|
+
);
|
|
2334
|
+
}
|
|
2335
|
+
try {
|
|
2336
|
+
await ensureDir(process.cwd());
|
|
2337
|
+
await writeFileContent(ignoreFilePath, content);
|
|
2338
|
+
return {
|
|
2339
|
+
relativePathFromCwd: RULESYNC_AIIGNORE_RELATIVE_FILE_PATH,
|
|
2340
|
+
content
|
|
2341
|
+
};
|
|
2342
|
+
} catch (error) {
|
|
2343
|
+
throw new Error(
|
|
2344
|
+
`Failed to write ignore file (${RULESYNC_AIIGNORE_RELATIVE_FILE_PATH}): ${formatError(error)}`,
|
|
2345
|
+
{
|
|
2346
|
+
cause: error
|
|
2347
|
+
}
|
|
2348
|
+
);
|
|
2349
|
+
}
|
|
2350
|
+
}
|
|
2351
|
+
async function deleteIgnoreFile() {
|
|
2352
|
+
const aiignorePath = join7(process.cwd(), RULESYNC_AIIGNORE_RELATIVE_FILE_PATH);
|
|
2353
|
+
const legacyIgnorePath = join7(process.cwd(), RULESYNC_IGNORE_RELATIVE_FILE_PATH);
|
|
2354
|
+
try {
|
|
2355
|
+
await Promise.all([removeFile(aiignorePath), removeFile(legacyIgnorePath)]);
|
|
2356
|
+
return {
|
|
2357
|
+
// Keep the historical return shape — point to the recommended file path
|
|
2358
|
+
// for backward compatibility.
|
|
2359
|
+
relativePathFromCwd: RULESYNC_AIIGNORE_RELATIVE_FILE_PATH
|
|
2360
|
+
};
|
|
2361
|
+
} catch (error) {
|
|
2362
|
+
throw new Error(
|
|
2363
|
+
`Failed to delete ignore files (${RULESYNC_AIIGNORE_RELATIVE_FILE_PATH}, ${RULESYNC_IGNORE_RELATIVE_FILE_PATH}): ${formatError(error)}`,
|
|
2364
|
+
{
|
|
2365
|
+
cause: error
|
|
2366
|
+
}
|
|
2367
|
+
);
|
|
2368
|
+
}
|
|
2369
|
+
}
|
|
2370
|
+
var ignoreToolSchemas = {
|
|
2371
|
+
getIgnoreFile: z7.object({}),
|
|
2372
|
+
putIgnoreFile: z7.object({
|
|
2373
|
+
content: z7.string()
|
|
2374
|
+
}),
|
|
2375
|
+
deleteIgnoreFile: z7.object({})
|
|
2376
|
+
};
|
|
2377
|
+
var ignoreTools = {
|
|
2378
|
+
getIgnoreFile: {
|
|
2379
|
+
name: "getIgnoreFile",
|
|
2380
|
+
description: "Get the content of the .rulesyncignore file from the project root.",
|
|
2381
|
+
parameters: ignoreToolSchemas.getIgnoreFile,
|
|
2382
|
+
execute: async () => {
|
|
2383
|
+
const result = await getIgnoreFile();
|
|
2384
|
+
return JSON.stringify(result, null, 2);
|
|
2385
|
+
}
|
|
2386
|
+
},
|
|
2387
|
+
putIgnoreFile: {
|
|
2388
|
+
name: "putIgnoreFile",
|
|
2389
|
+
description: "Create or update the .rulesync/.aiignore file (upsert operation). content parameter is required.",
|
|
2390
|
+
parameters: ignoreToolSchemas.putIgnoreFile,
|
|
2391
|
+
execute: async (args) => {
|
|
2392
|
+
const result = await putIgnoreFile({ content: args.content });
|
|
2393
|
+
return JSON.stringify(result, null, 2);
|
|
2394
|
+
}
|
|
2395
|
+
},
|
|
2396
|
+
deleteIgnoreFile: {
|
|
2397
|
+
name: "deleteIgnoreFile",
|
|
2398
|
+
description: "Delete the .rulesyncignore and .rulesync/.aiignore files.",
|
|
2399
|
+
parameters: ignoreToolSchemas.deleteIgnoreFile,
|
|
2400
|
+
execute: async () => {
|
|
2401
|
+
const result = await deleteIgnoreFile();
|
|
2402
|
+
return JSON.stringify(result, null, 2);
|
|
2403
|
+
}
|
|
2404
|
+
}
|
|
2405
|
+
};
|
|
2406
|
+
|
|
2407
|
+
// src/mcp/import.ts
|
|
2408
|
+
import { z as z8 } from "zod/mini";
|
|
2409
|
+
var importOptionsSchema = z8.object({
|
|
2410
|
+
target: z8.string(),
|
|
2411
|
+
features: z8.optional(z8.array(z8.string())),
|
|
2412
|
+
global: z8.optional(z8.boolean())
|
|
2413
|
+
});
|
|
2414
|
+
async function executeImport(options) {
|
|
2415
|
+
try {
|
|
2416
|
+
if (!options.target) {
|
|
2417
|
+
return {
|
|
2418
|
+
success: false,
|
|
2419
|
+
error: "target is required. Please specify a tool to import from."
|
|
2420
|
+
};
|
|
2421
|
+
}
|
|
2422
|
+
const config = await ConfigResolver.resolve({
|
|
2423
|
+
// eslint-disable-next-line no-type-assertion/no-type-assertion
|
|
2424
|
+
targets: [options.target],
|
|
2425
|
+
// eslint-disable-next-line no-type-assertion/no-type-assertion
|
|
2426
|
+
features: options.features,
|
|
2427
|
+
global: options.global,
|
|
2428
|
+
// Always use default baseDirs (process.cwd()) and configPath
|
|
2429
|
+
// verbose and silent are meaningless in MCP context
|
|
2430
|
+
verbose: false,
|
|
2431
|
+
silent: true
|
|
2432
|
+
});
|
|
2433
|
+
const tool = config.getTargets()[0];
|
|
2434
|
+
const importResult = await importFromTool({ config, tool });
|
|
2435
|
+
return buildSuccessResponse2({ importResult, config, tool });
|
|
2436
|
+
} catch (error) {
|
|
2437
|
+
return {
|
|
2438
|
+
success: false,
|
|
2439
|
+
error: formatError(error)
|
|
2440
|
+
};
|
|
2441
|
+
}
|
|
2442
|
+
}
|
|
2443
|
+
function buildSuccessResponse2(params) {
|
|
2444
|
+
const { importResult, config, tool } = params;
|
|
2445
|
+
const totalCount = calculateTotalCount(importResult);
|
|
2446
|
+
return {
|
|
2447
|
+
success: true,
|
|
2448
|
+
result: {
|
|
2449
|
+
rulesCount: importResult.rulesCount,
|
|
2450
|
+
ignoreCount: importResult.ignoreCount,
|
|
2451
|
+
mcpCount: importResult.mcpCount,
|
|
2452
|
+
commandsCount: importResult.commandsCount,
|
|
2453
|
+
subagentsCount: importResult.subagentsCount,
|
|
2454
|
+
skillsCount: importResult.skillsCount,
|
|
2455
|
+
hooksCount: importResult.hooksCount,
|
|
2456
|
+
totalCount
|
|
2457
|
+
},
|
|
2458
|
+
config: {
|
|
2459
|
+
target: tool,
|
|
2460
|
+
features: config.getFeatures(),
|
|
2461
|
+
global: config.getGlobal()
|
|
2462
|
+
}
|
|
2463
|
+
};
|
|
2464
|
+
}
|
|
2465
|
+
var importToolSchemas = {
|
|
2466
|
+
executeImport: importOptionsSchema
|
|
2467
|
+
};
|
|
2468
|
+
var importTools = {
|
|
2469
|
+
executeImport: {
|
|
2470
|
+
name: "executeImport",
|
|
2471
|
+
description: "Execute the rulesync import command to import configuration files from an AI tool into .rulesync directory. Requires exactly one target tool to import from.",
|
|
2472
|
+
parameters: importToolSchemas.executeImport,
|
|
2473
|
+
execute: async (options) => {
|
|
2474
|
+
const result = await executeImport(options);
|
|
2475
|
+
return JSON.stringify(result, null, 2);
|
|
2476
|
+
}
|
|
2477
|
+
}
|
|
2478
|
+
};
|
|
2479
|
+
|
|
2480
|
+
// src/mcp/mcp.ts
|
|
2481
|
+
import { join as join8 } from "path";
|
|
2482
|
+
import { z as z9 } from "zod/mini";
|
|
2483
|
+
var maxMcpSizeBytes = 1024 * 1024;
|
|
2484
|
+
async function getMcpFile() {
|
|
2485
|
+
try {
|
|
2486
|
+
const rulesyncMcp = await RulesyncMcp.fromFile({
|
|
2487
|
+
validate: true
|
|
2488
|
+
});
|
|
2489
|
+
const relativePathFromCwd = join8(
|
|
2490
|
+
rulesyncMcp.getRelativeDirPath(),
|
|
2491
|
+
rulesyncMcp.getRelativeFilePath()
|
|
2492
|
+
);
|
|
2493
|
+
return {
|
|
2494
|
+
relativePathFromCwd,
|
|
2495
|
+
content: rulesyncMcp.getFileContent()
|
|
2496
|
+
};
|
|
2497
|
+
} catch (error) {
|
|
2498
|
+
throw new Error(
|
|
2499
|
+
`Failed to read MCP file (${RULESYNC_MCP_RELATIVE_FILE_PATH}): ${formatError(error)}`,
|
|
2500
|
+
{
|
|
2501
|
+
cause: error
|
|
2502
|
+
}
|
|
2503
|
+
);
|
|
2504
|
+
}
|
|
2505
|
+
}
|
|
2506
|
+
async function putMcpFile({ content }) {
|
|
2507
|
+
if (content.length > maxMcpSizeBytes) {
|
|
2508
|
+
throw new Error(
|
|
2509
|
+
`MCP file size ${content.length} bytes exceeds maximum ${maxMcpSizeBytes} bytes (1MB) for ${RULESYNC_MCP_RELATIVE_FILE_PATH}`
|
|
2510
|
+
);
|
|
2511
|
+
}
|
|
2512
|
+
try {
|
|
2513
|
+
JSON.parse(content);
|
|
2514
|
+
} catch (error) {
|
|
2515
|
+
throw new Error(
|
|
2516
|
+
`Invalid JSON format in MCP file (${RULESYNC_MCP_RELATIVE_FILE_PATH}): ${formatError(error)}`,
|
|
2517
|
+
{
|
|
2518
|
+
cause: error
|
|
2519
|
+
}
|
|
2520
|
+
);
|
|
2521
|
+
}
|
|
2522
|
+
try {
|
|
2523
|
+
const baseDir = process.cwd();
|
|
2524
|
+
const paths = RulesyncMcp.getSettablePaths();
|
|
2525
|
+
const relativeDirPath = paths.recommended.relativeDirPath;
|
|
2526
|
+
const relativeFilePath = paths.recommended.relativeFilePath;
|
|
2527
|
+
const fullPath = join8(baseDir, relativeDirPath, relativeFilePath);
|
|
2528
|
+
const rulesyncMcp = new RulesyncMcp({
|
|
2529
|
+
baseDir,
|
|
2530
|
+
relativeDirPath,
|
|
2531
|
+
relativeFilePath,
|
|
2532
|
+
fileContent: content,
|
|
2533
|
+
validate: true
|
|
2534
|
+
});
|
|
2535
|
+
await ensureDir(join8(baseDir, relativeDirPath));
|
|
2536
|
+
await writeFileContent(fullPath, content);
|
|
2537
|
+
const relativePathFromCwd = join8(relativeDirPath, relativeFilePath);
|
|
2538
|
+
return {
|
|
2539
|
+
relativePathFromCwd,
|
|
2540
|
+
content: rulesyncMcp.getFileContent()
|
|
2541
|
+
};
|
|
2542
|
+
} catch (error) {
|
|
2543
|
+
throw new Error(
|
|
2544
|
+
`Failed to write MCP file (${RULESYNC_MCP_RELATIVE_FILE_PATH}): ${formatError(error)}`,
|
|
2545
|
+
{
|
|
2546
|
+
cause: error
|
|
2547
|
+
}
|
|
2548
|
+
);
|
|
2549
|
+
}
|
|
2550
|
+
}
|
|
2551
|
+
async function deleteMcpFile() {
|
|
2552
|
+
try {
|
|
2553
|
+
const baseDir = process.cwd();
|
|
2554
|
+
const paths = RulesyncMcp.getSettablePaths();
|
|
2555
|
+
const recommendedPath = join8(
|
|
2556
|
+
baseDir,
|
|
2557
|
+
paths.recommended.relativeDirPath,
|
|
2558
|
+
paths.recommended.relativeFilePath
|
|
2559
|
+
);
|
|
2560
|
+
const legacyPath = join8(baseDir, paths.legacy.relativeDirPath, paths.legacy.relativeFilePath);
|
|
2561
|
+
await removeFile(recommendedPath);
|
|
2562
|
+
await removeFile(legacyPath);
|
|
2563
|
+
const relativePathFromCwd = join8(
|
|
2564
|
+
paths.recommended.relativeDirPath,
|
|
2565
|
+
paths.recommended.relativeFilePath
|
|
2566
|
+
);
|
|
2567
|
+
return {
|
|
2568
|
+
relativePathFromCwd
|
|
2569
|
+
};
|
|
2570
|
+
} catch (error) {
|
|
2571
|
+
throw new Error(
|
|
2572
|
+
`Failed to delete MCP file (${RULESYNC_MCP_RELATIVE_FILE_PATH}): ${formatError(error)}`,
|
|
2573
|
+
{
|
|
2574
|
+
cause: error
|
|
2575
|
+
}
|
|
2576
|
+
);
|
|
2577
|
+
}
|
|
2578
|
+
}
|
|
2579
|
+
var mcpToolSchemas = {
|
|
2580
|
+
getMcpFile: z9.object({}),
|
|
2581
|
+
putMcpFile: z9.object({
|
|
2582
|
+
content: z9.string()
|
|
2583
|
+
}),
|
|
2584
|
+
deleteMcpFile: z9.object({})
|
|
2585
|
+
};
|
|
2586
|
+
var mcpTools = {
|
|
2587
|
+
getMcpFile: {
|
|
2588
|
+
name: "getMcpFile",
|
|
2589
|
+
description: `Get the MCP configuration file (${RULESYNC_MCP_RELATIVE_FILE_PATH}).`,
|
|
2590
|
+
parameters: mcpToolSchemas.getMcpFile,
|
|
2591
|
+
execute: async () => {
|
|
2592
|
+
const result = await getMcpFile();
|
|
2593
|
+
return JSON.stringify(result, null, 2);
|
|
2594
|
+
}
|
|
2595
|
+
},
|
|
2596
|
+
putMcpFile: {
|
|
2597
|
+
name: "putMcpFile",
|
|
2598
|
+
description: "Create or update the MCP configuration file (upsert operation). content parameter is required and must be valid JSON.",
|
|
2599
|
+
parameters: mcpToolSchemas.putMcpFile,
|
|
2600
|
+
execute: async (args) => {
|
|
2601
|
+
const result = await putMcpFile({ content: args.content });
|
|
2602
|
+
return JSON.stringify(result, null, 2);
|
|
2603
|
+
}
|
|
2604
|
+
},
|
|
2605
|
+
deleteMcpFile: {
|
|
2606
|
+
name: "deleteMcpFile",
|
|
2607
|
+
description: "Delete the MCP configuration file.",
|
|
2608
|
+
parameters: mcpToolSchemas.deleteMcpFile,
|
|
2609
|
+
execute: async () => {
|
|
2610
|
+
const result = await deleteMcpFile();
|
|
2611
|
+
return JSON.stringify(result, null, 2);
|
|
2612
|
+
}
|
|
2613
|
+
}
|
|
2614
|
+
};
|
|
2615
|
+
|
|
2616
|
+
// src/mcp/rules.ts
|
|
2617
|
+
import { basename as basename2, join as join9 } from "path";
|
|
2618
|
+
import { z as z10 } from "zod/mini";
|
|
2619
|
+
var maxRuleSizeBytes = 1024 * 1024;
|
|
2620
|
+
var maxRulesCount = 1e3;
|
|
2621
|
+
async function listRules() {
|
|
2622
|
+
const rulesDir = join9(process.cwd(), RULESYNC_RULES_RELATIVE_DIR_PATH);
|
|
2623
|
+
try {
|
|
2624
|
+
const files = await listDirectoryFiles(rulesDir);
|
|
2625
|
+
const mdFiles = files.filter((file) => file.endsWith(".md"));
|
|
2626
|
+
const rules = await Promise.all(
|
|
2627
|
+
mdFiles.map(async (file) => {
|
|
2628
|
+
try {
|
|
2629
|
+
const rule = await RulesyncRule.fromFile({
|
|
2630
|
+
relativeFilePath: file,
|
|
2631
|
+
validate: true
|
|
2632
|
+
});
|
|
2633
|
+
const frontmatter = rule.getFrontmatter();
|
|
2634
|
+
return {
|
|
2635
|
+
relativePathFromCwd: join9(RULESYNC_RULES_RELATIVE_DIR_PATH, file),
|
|
2636
|
+
frontmatter
|
|
2637
|
+
};
|
|
2638
|
+
} catch (error) {
|
|
2639
|
+
logger.error(`Failed to read rule file ${file}: ${formatError(error)}`);
|
|
2640
|
+
return null;
|
|
2641
|
+
}
|
|
2642
|
+
})
|
|
2643
|
+
);
|
|
2644
|
+
return rules.filter((rule) => rule !== null);
|
|
2645
|
+
} catch (error) {
|
|
2646
|
+
logger.error(
|
|
2647
|
+
`Failed to read rules directory (${RULESYNC_RULES_RELATIVE_DIR_PATH}): ${formatError(error)}`
|
|
2648
|
+
);
|
|
2649
|
+
return [];
|
|
2650
|
+
}
|
|
2651
|
+
}
|
|
2652
|
+
async function getRule({ relativePathFromCwd }) {
|
|
2653
|
+
checkPathTraversal({
|
|
2654
|
+
relativePath: relativePathFromCwd,
|
|
2655
|
+
intendedRootDir: process.cwd()
|
|
2656
|
+
});
|
|
2657
|
+
const filename = basename2(relativePathFromCwd);
|
|
2658
|
+
try {
|
|
2659
|
+
const rule = await RulesyncRule.fromFile({
|
|
2660
|
+
relativeFilePath: filename,
|
|
2661
|
+
validate: true
|
|
2662
|
+
});
|
|
2663
|
+
return {
|
|
2664
|
+
relativePathFromCwd: join9(RULESYNC_RULES_RELATIVE_DIR_PATH, filename),
|
|
2665
|
+
frontmatter: rule.getFrontmatter(),
|
|
2666
|
+
body: rule.getBody()
|
|
2667
|
+
};
|
|
2668
|
+
} catch (error) {
|
|
2669
|
+
throw new Error(`Failed to read rule file ${relativePathFromCwd}: ${formatError(error)}`, {
|
|
2670
|
+
cause: error
|
|
2671
|
+
});
|
|
2672
|
+
}
|
|
2673
|
+
}
|
|
2674
|
+
async function putRule({
|
|
2675
|
+
relativePathFromCwd,
|
|
2676
|
+
frontmatter,
|
|
2677
|
+
body
|
|
2678
|
+
}) {
|
|
2679
|
+
checkPathTraversal({
|
|
2680
|
+
relativePath: relativePathFromCwd,
|
|
2681
|
+
intendedRootDir: process.cwd()
|
|
2682
|
+
});
|
|
2683
|
+
const filename = basename2(relativePathFromCwd);
|
|
2684
|
+
const estimatedSize = JSON.stringify(frontmatter).length + body.length;
|
|
2685
|
+
if (estimatedSize > maxRuleSizeBytes) {
|
|
2686
|
+
throw new Error(
|
|
2687
|
+
`Rule size ${estimatedSize} bytes exceeds maximum ${maxRuleSizeBytes} bytes (1MB) for ${relativePathFromCwd}`
|
|
2688
|
+
);
|
|
2689
|
+
}
|
|
2690
|
+
try {
|
|
2691
|
+
const existingRules = await listRules();
|
|
2692
|
+
const isUpdate = existingRules.some(
|
|
2693
|
+
(rule2) => rule2.relativePathFromCwd === join9(RULESYNC_RULES_RELATIVE_DIR_PATH, filename)
|
|
2694
|
+
);
|
|
2695
|
+
if (!isUpdate && existingRules.length >= maxRulesCount) {
|
|
2696
|
+
throw new Error(
|
|
2697
|
+
`Maximum number of rules (${maxRulesCount}) reached in ${RULESYNC_RULES_RELATIVE_DIR_PATH}`
|
|
2698
|
+
);
|
|
2699
|
+
}
|
|
2700
|
+
const rule = new RulesyncRule({
|
|
2701
|
+
baseDir: process.cwd(),
|
|
2702
|
+
relativeDirPath: RULESYNC_RULES_RELATIVE_DIR_PATH,
|
|
2703
|
+
relativeFilePath: filename,
|
|
2704
|
+
frontmatter,
|
|
2705
|
+
body,
|
|
2706
|
+
validate: true
|
|
2707
|
+
});
|
|
2708
|
+
const rulesDir = join9(process.cwd(), RULESYNC_RULES_RELATIVE_DIR_PATH);
|
|
2709
|
+
await ensureDir(rulesDir);
|
|
2710
|
+
await writeFileContent(rule.getFilePath(), rule.getFileContent());
|
|
2711
|
+
return {
|
|
2712
|
+
relativePathFromCwd: join9(RULESYNC_RULES_RELATIVE_DIR_PATH, filename),
|
|
2713
|
+
frontmatter: rule.getFrontmatter(),
|
|
2714
|
+
body: rule.getBody()
|
|
2715
|
+
};
|
|
2716
|
+
} catch (error) {
|
|
2717
|
+
throw new Error(`Failed to write rule file ${relativePathFromCwd}: ${formatError(error)}`, {
|
|
2718
|
+
cause: error
|
|
2719
|
+
});
|
|
2720
|
+
}
|
|
2721
|
+
}
|
|
2722
|
+
async function deleteRule({ relativePathFromCwd }) {
|
|
2723
|
+
checkPathTraversal({
|
|
2724
|
+
relativePath: relativePathFromCwd,
|
|
2725
|
+
intendedRootDir: process.cwd()
|
|
2726
|
+
});
|
|
2727
|
+
const filename = basename2(relativePathFromCwd);
|
|
2728
|
+
const fullPath = join9(process.cwd(), RULESYNC_RULES_RELATIVE_DIR_PATH, filename);
|
|
2729
|
+
try {
|
|
2730
|
+
await removeFile(fullPath);
|
|
2731
|
+
return {
|
|
2732
|
+
relativePathFromCwd: join9(RULESYNC_RULES_RELATIVE_DIR_PATH, filename)
|
|
2733
|
+
};
|
|
2734
|
+
} catch (error) {
|
|
2735
|
+
throw new Error(`Failed to delete rule file ${relativePathFromCwd}: ${formatError(error)}`, {
|
|
2736
|
+
cause: error
|
|
2737
|
+
});
|
|
2738
|
+
}
|
|
2739
|
+
}
|
|
2740
|
+
var ruleToolSchemas = {
|
|
2741
|
+
listRules: z10.object({}),
|
|
2742
|
+
getRule: z10.object({
|
|
2743
|
+
relativePathFromCwd: z10.string()
|
|
2744
|
+
}),
|
|
2745
|
+
putRule: z10.object({
|
|
2746
|
+
relativePathFromCwd: z10.string(),
|
|
2747
|
+
frontmatter: RulesyncRuleFrontmatterSchema,
|
|
2748
|
+
body: z10.string()
|
|
2749
|
+
}),
|
|
2750
|
+
deleteRule: z10.object({
|
|
2751
|
+
relativePathFromCwd: z10.string()
|
|
2752
|
+
})
|
|
2753
|
+
};
|
|
2754
|
+
var ruleTools = {
|
|
2755
|
+
listRules: {
|
|
2756
|
+
name: "listRules",
|
|
2757
|
+
description: `List all rules from ${join9(RULESYNC_RULES_RELATIVE_DIR_PATH, "*.md")} with their frontmatter.`,
|
|
2758
|
+
parameters: ruleToolSchemas.listRules,
|
|
2759
|
+
execute: async () => {
|
|
2760
|
+
const rules = await listRules();
|
|
2761
|
+
const output = { rules };
|
|
2762
|
+
return JSON.stringify(output, null, 2);
|
|
2763
|
+
}
|
|
2764
|
+
},
|
|
2765
|
+
getRule: {
|
|
2766
|
+
name: "getRule",
|
|
2767
|
+
description: "Get detailed information about a specific rule. relativePathFromCwd parameter is required.",
|
|
2768
|
+
parameters: ruleToolSchemas.getRule,
|
|
2769
|
+
execute: async (args) => {
|
|
2770
|
+
const result = await getRule({ relativePathFromCwd: args.relativePathFromCwd });
|
|
2771
|
+
return JSON.stringify(result, null, 2);
|
|
2772
|
+
}
|
|
2773
|
+
},
|
|
2774
|
+
putRule: {
|
|
2775
|
+
name: "putRule",
|
|
2776
|
+
description: "Create or update a rule (upsert operation). relativePathFromCwd, frontmatter, and body parameters are required.",
|
|
2777
|
+
parameters: ruleToolSchemas.putRule,
|
|
2778
|
+
execute: async (args) => {
|
|
2779
|
+
const result = await putRule({
|
|
2780
|
+
relativePathFromCwd: args.relativePathFromCwd,
|
|
2781
|
+
frontmatter: args.frontmatter,
|
|
2782
|
+
body: args.body
|
|
2783
|
+
});
|
|
2784
|
+
return JSON.stringify(result, null, 2);
|
|
2785
|
+
}
|
|
2786
|
+
},
|
|
2787
|
+
deleteRule: {
|
|
2788
|
+
name: "deleteRule",
|
|
2789
|
+
description: "Delete a rule file. relativePathFromCwd parameter is required.",
|
|
2790
|
+
parameters: ruleToolSchemas.deleteRule,
|
|
2791
|
+
execute: async (args) => {
|
|
2792
|
+
const result = await deleteRule({ relativePathFromCwd: args.relativePathFromCwd });
|
|
2793
|
+
return JSON.stringify(result, null, 2);
|
|
2794
|
+
}
|
|
2795
|
+
}
|
|
2796
|
+
};
|
|
2797
|
+
|
|
2798
|
+
// src/mcp/skills.ts
|
|
2799
|
+
import { basename as basename3, dirname, join as join10 } from "path";
|
|
2800
|
+
import { z as z11 } from "zod/mini";
|
|
2801
|
+
var maxSkillSizeBytes = 1024 * 1024;
|
|
2802
|
+
var maxSkillsCount = 1e3;
|
|
2803
|
+
function aiDirFileToMcpSkillFile(file) {
|
|
2804
|
+
return {
|
|
2805
|
+
name: file.relativeFilePathToDirPath,
|
|
2806
|
+
body: file.fileBuffer.toString("utf-8")
|
|
2807
|
+
};
|
|
2808
|
+
}
|
|
2809
|
+
function mcpSkillFileToAiDirFile(file) {
|
|
2810
|
+
return {
|
|
2811
|
+
relativeFilePathToDirPath: file.name,
|
|
2812
|
+
fileBuffer: Buffer.from(file.body, "utf-8")
|
|
2813
|
+
};
|
|
2814
|
+
}
|
|
2815
|
+
function extractDirName(relativeDirPathFromCwd) {
|
|
2816
|
+
const dirName = basename3(relativeDirPathFromCwd);
|
|
2817
|
+
if (!dirName) {
|
|
2818
|
+
throw new Error(`Invalid path: ${relativeDirPathFromCwd}`);
|
|
2819
|
+
}
|
|
2820
|
+
return dirName;
|
|
2821
|
+
}
|
|
2822
|
+
async function listSkills() {
|
|
2823
|
+
const skillsDir = join10(process.cwd(), RULESYNC_SKILLS_RELATIVE_DIR_PATH);
|
|
2824
|
+
try {
|
|
2825
|
+
const skillDirPaths = await findFilesByGlobs(join10(skillsDir, "*"), { type: "dir" });
|
|
2826
|
+
const skills = await Promise.all(
|
|
2827
|
+
skillDirPaths.map(async (dirPath) => {
|
|
2828
|
+
const dirName = basename3(dirPath);
|
|
2829
|
+
if (!dirName) return null;
|
|
2830
|
+
try {
|
|
2831
|
+
const skill = await RulesyncSkill.fromDir({
|
|
2832
|
+
dirName
|
|
2833
|
+
});
|
|
2834
|
+
const frontmatter = skill.getFrontmatter();
|
|
2835
|
+
return {
|
|
2836
|
+
relativeDirPathFromCwd: join10(RULESYNC_SKILLS_RELATIVE_DIR_PATH, dirName),
|
|
2837
|
+
frontmatter
|
|
2838
|
+
};
|
|
2839
|
+
} catch (error) {
|
|
2840
|
+
logger.error(`Failed to read skill directory ${dirName}: ${formatError(error)}`);
|
|
2841
|
+
return null;
|
|
2842
|
+
}
|
|
2843
|
+
})
|
|
2844
|
+
);
|
|
2845
|
+
return skills.filter((skill) => skill !== null);
|
|
2846
|
+
} catch (error) {
|
|
2847
|
+
logger.error(
|
|
2848
|
+
`Failed to read skills directory (${RULESYNC_SKILLS_RELATIVE_DIR_PATH}): ${formatError(error)}`
|
|
2849
|
+
);
|
|
2850
|
+
return [];
|
|
2851
|
+
}
|
|
2852
|
+
}
|
|
2853
|
+
async function getSkill({ relativeDirPathFromCwd }) {
|
|
2854
|
+
checkPathTraversal({
|
|
2855
|
+
relativePath: relativeDirPathFromCwd,
|
|
2856
|
+
intendedRootDir: process.cwd()
|
|
2857
|
+
});
|
|
2858
|
+
const dirName = extractDirName(relativeDirPathFromCwd);
|
|
2859
|
+
try {
|
|
2860
|
+
const skill = await RulesyncSkill.fromDir({
|
|
2861
|
+
dirName
|
|
2862
|
+
});
|
|
2863
|
+
return {
|
|
2864
|
+
relativeDirPathFromCwd: join10(RULESYNC_SKILLS_RELATIVE_DIR_PATH, dirName),
|
|
2865
|
+
frontmatter: skill.getFrontmatter(),
|
|
2866
|
+
body: skill.getBody(),
|
|
2867
|
+
otherFiles: skill.getOtherFiles().map(aiDirFileToMcpSkillFile)
|
|
2868
|
+
};
|
|
2869
|
+
} catch (error) {
|
|
2870
|
+
throw new Error(
|
|
2871
|
+
`Failed to read skill directory ${relativeDirPathFromCwd}: ${formatError(error)}`,
|
|
2872
|
+
{
|
|
2873
|
+
cause: error
|
|
2874
|
+
}
|
|
2875
|
+
);
|
|
2876
|
+
}
|
|
2877
|
+
}
|
|
2878
|
+
async function putSkill({
|
|
2879
|
+
relativeDirPathFromCwd,
|
|
2880
|
+
frontmatter,
|
|
2881
|
+
body,
|
|
2882
|
+
otherFiles = []
|
|
2883
|
+
}) {
|
|
2884
|
+
checkPathTraversal({
|
|
2885
|
+
relativePath: relativeDirPathFromCwd,
|
|
2886
|
+
intendedRootDir: process.cwd()
|
|
2887
|
+
});
|
|
2888
|
+
const dirName = extractDirName(relativeDirPathFromCwd);
|
|
2889
|
+
const estimatedSize = JSON.stringify(frontmatter).length + body.length + otherFiles.reduce((acc, file) => acc + file.name.length + file.body.length, 0);
|
|
2890
|
+
if (estimatedSize > maxSkillSizeBytes) {
|
|
2891
|
+
throw new Error(
|
|
2892
|
+
`Skill size ${estimatedSize} bytes exceeds maximum ${maxSkillSizeBytes} bytes (1MB) for ${relativeDirPathFromCwd}`
|
|
2893
|
+
);
|
|
2894
|
+
}
|
|
2895
|
+
try {
|
|
2896
|
+
const existingSkills = await listSkills();
|
|
2897
|
+
const isUpdate = existingSkills.some(
|
|
2898
|
+
(skill2) => skill2.relativeDirPathFromCwd === join10(RULESYNC_SKILLS_RELATIVE_DIR_PATH, dirName)
|
|
2899
|
+
);
|
|
2900
|
+
if (!isUpdate && existingSkills.length >= maxSkillsCount) {
|
|
2901
|
+
throw new Error(
|
|
2902
|
+
`Maximum number of skills (${maxSkillsCount}) reached in ${RULESYNC_SKILLS_RELATIVE_DIR_PATH}`
|
|
2903
|
+
);
|
|
2904
|
+
}
|
|
2905
|
+
const aiDirFiles = otherFiles.map(mcpSkillFileToAiDirFile);
|
|
2906
|
+
const skill = new RulesyncSkill({
|
|
2907
|
+
baseDir: process.cwd(),
|
|
2908
|
+
relativeDirPath: RULESYNC_SKILLS_RELATIVE_DIR_PATH,
|
|
2909
|
+
dirName,
|
|
2910
|
+
frontmatter,
|
|
2911
|
+
body,
|
|
2912
|
+
otherFiles: aiDirFiles,
|
|
2913
|
+
validate: true
|
|
2914
|
+
});
|
|
2915
|
+
const skillDirPath = join10(process.cwd(), RULESYNC_SKILLS_RELATIVE_DIR_PATH, dirName);
|
|
2916
|
+
await ensureDir(skillDirPath);
|
|
2917
|
+
const skillFilePath = join10(skillDirPath, SKILL_FILE_NAME);
|
|
2918
|
+
const skillFileContent = stringifyFrontmatter(body, frontmatter);
|
|
2919
|
+
await writeFileContent(skillFilePath, skillFileContent);
|
|
2920
|
+
for (const file of otherFiles) {
|
|
2921
|
+
checkPathTraversal({
|
|
2922
|
+
relativePath: file.name,
|
|
2923
|
+
intendedRootDir: skillDirPath
|
|
2924
|
+
});
|
|
2925
|
+
const filePath = join10(skillDirPath, file.name);
|
|
2926
|
+
const fileDir = join10(skillDirPath, dirname(file.name));
|
|
2927
|
+
if (fileDir !== skillDirPath) {
|
|
2928
|
+
await ensureDir(fileDir);
|
|
2929
|
+
}
|
|
2930
|
+
await writeFileContent(filePath, file.body);
|
|
2931
|
+
}
|
|
2932
|
+
return {
|
|
2933
|
+
relativeDirPathFromCwd: join10(RULESYNC_SKILLS_RELATIVE_DIR_PATH, dirName),
|
|
2934
|
+
frontmatter: skill.getFrontmatter(),
|
|
2935
|
+
body: skill.getBody(),
|
|
2936
|
+
otherFiles: skill.getOtherFiles().map(aiDirFileToMcpSkillFile)
|
|
2937
|
+
};
|
|
2938
|
+
} catch (error) {
|
|
2939
|
+
throw new Error(
|
|
2940
|
+
`Failed to write skill directory ${relativeDirPathFromCwd}: ${formatError(error)}`,
|
|
2941
|
+
{
|
|
2942
|
+
cause: error
|
|
2943
|
+
}
|
|
2944
|
+
);
|
|
2945
|
+
}
|
|
2946
|
+
}
|
|
2947
|
+
async function deleteSkill({
|
|
2948
|
+
relativeDirPathFromCwd
|
|
2949
|
+
}) {
|
|
2950
|
+
checkPathTraversal({
|
|
2951
|
+
relativePath: relativeDirPathFromCwd,
|
|
2952
|
+
intendedRootDir: process.cwd()
|
|
2953
|
+
});
|
|
2954
|
+
const dirName = extractDirName(relativeDirPathFromCwd);
|
|
2955
|
+
const skillDirPath = join10(process.cwd(), RULESYNC_SKILLS_RELATIVE_DIR_PATH, dirName);
|
|
2956
|
+
try {
|
|
2957
|
+
if (await directoryExists(skillDirPath)) {
|
|
2958
|
+
await removeDirectory(skillDirPath);
|
|
2959
|
+
}
|
|
2960
|
+
return {
|
|
2961
|
+
relativeDirPathFromCwd: join10(RULESYNC_SKILLS_RELATIVE_DIR_PATH, dirName)
|
|
2962
|
+
};
|
|
2963
|
+
} catch (error) {
|
|
2964
|
+
throw new Error(
|
|
2965
|
+
`Failed to delete skill directory ${relativeDirPathFromCwd}: ${formatError(error)}`,
|
|
2966
|
+
{
|
|
2967
|
+
cause: error
|
|
2968
|
+
}
|
|
2969
|
+
);
|
|
2970
|
+
}
|
|
2971
|
+
}
|
|
2972
|
+
var McpSkillFileSchema = z11.object({
|
|
2973
|
+
name: z11.string(),
|
|
2974
|
+
body: z11.string()
|
|
2975
|
+
});
|
|
2976
|
+
var skillToolSchemas = {
|
|
2977
|
+
listSkills: z11.object({}),
|
|
2978
|
+
getSkill: z11.object({
|
|
2979
|
+
relativeDirPathFromCwd: z11.string()
|
|
2980
|
+
}),
|
|
2981
|
+
putSkill: z11.object({
|
|
2982
|
+
relativeDirPathFromCwd: z11.string(),
|
|
2983
|
+
frontmatter: RulesyncSkillFrontmatterSchema,
|
|
2984
|
+
body: z11.string(),
|
|
2985
|
+
otherFiles: z11.optional(z11.array(McpSkillFileSchema))
|
|
2986
|
+
}),
|
|
2987
|
+
deleteSkill: z11.object({
|
|
2988
|
+
relativeDirPathFromCwd: z11.string()
|
|
2989
|
+
})
|
|
2990
|
+
};
|
|
2991
|
+
var skillTools = {
|
|
2992
|
+
listSkills: {
|
|
2993
|
+
name: "listSkills",
|
|
2994
|
+
description: `List all skills from ${join10(RULESYNC_SKILLS_RELATIVE_DIR_PATH, "*", SKILL_FILE_NAME)} with their frontmatter.`,
|
|
2995
|
+
parameters: skillToolSchemas.listSkills,
|
|
2996
|
+
execute: async () => {
|
|
2997
|
+
const skills = await listSkills();
|
|
2998
|
+
const output = { skills };
|
|
2999
|
+
return JSON.stringify(output, null, 2);
|
|
3000
|
+
}
|
|
3001
|
+
},
|
|
3002
|
+
getSkill: {
|
|
3003
|
+
name: "getSkill",
|
|
3004
|
+
description: "Get detailed information about a specific skill including SKILL.md content and other files. relativeDirPathFromCwd parameter is required.",
|
|
3005
|
+
parameters: skillToolSchemas.getSkill,
|
|
3006
|
+
execute: async (args) => {
|
|
3007
|
+
const result = await getSkill({ relativeDirPathFromCwd: args.relativeDirPathFromCwd });
|
|
3008
|
+
return JSON.stringify(result, null, 2);
|
|
3009
|
+
}
|
|
3010
|
+
},
|
|
3011
|
+
putSkill: {
|
|
3012
|
+
name: "putSkill",
|
|
3013
|
+
description: "Create or update a skill (upsert operation). relativeDirPathFromCwd, frontmatter, and body parameters are required. otherFiles is optional.",
|
|
3014
|
+
parameters: skillToolSchemas.putSkill,
|
|
3015
|
+
execute: async (args) => {
|
|
3016
|
+
const result = await putSkill({
|
|
3017
|
+
relativeDirPathFromCwd: args.relativeDirPathFromCwd,
|
|
3018
|
+
frontmatter: args.frontmatter,
|
|
3019
|
+
body: args.body,
|
|
3020
|
+
otherFiles: args.otherFiles
|
|
3021
|
+
});
|
|
3022
|
+
return JSON.stringify(result, null, 2);
|
|
3023
|
+
}
|
|
3024
|
+
},
|
|
3025
|
+
deleteSkill: {
|
|
3026
|
+
name: "deleteSkill",
|
|
3027
|
+
description: "Delete a skill directory and all its contents. relativeDirPathFromCwd parameter is required.",
|
|
3028
|
+
parameters: skillToolSchemas.deleteSkill,
|
|
3029
|
+
execute: async (args) => {
|
|
3030
|
+
const result = await deleteSkill({ relativeDirPathFromCwd: args.relativeDirPathFromCwd });
|
|
3031
|
+
return JSON.stringify(result, null, 2);
|
|
3032
|
+
}
|
|
3033
|
+
}
|
|
3034
|
+
};
|
|
3035
|
+
|
|
3036
|
+
// src/mcp/subagents.ts
|
|
3037
|
+
import { basename as basename4, join as join11 } from "path";
|
|
3038
|
+
import { z as z12 } from "zod/mini";
|
|
3039
|
+
var maxSubagentSizeBytes = 1024 * 1024;
|
|
3040
|
+
var maxSubagentsCount = 1e3;
|
|
3041
|
+
async function listSubagents() {
|
|
3042
|
+
const subagentsDir = join11(process.cwd(), RULESYNC_SUBAGENTS_RELATIVE_DIR_PATH);
|
|
3043
|
+
try {
|
|
3044
|
+
const files = await listDirectoryFiles(subagentsDir);
|
|
3045
|
+
const mdFiles = files.filter((file) => file.endsWith(".md"));
|
|
3046
|
+
const subagents = await Promise.all(
|
|
3047
|
+
mdFiles.map(async (file) => {
|
|
3048
|
+
try {
|
|
3049
|
+
const subagent = await RulesyncSubagent.fromFile({
|
|
3050
|
+
relativeFilePath: file,
|
|
3051
|
+
validate: true
|
|
3052
|
+
});
|
|
3053
|
+
const frontmatter = subagent.getFrontmatter();
|
|
3054
|
+
return {
|
|
3055
|
+
relativePathFromCwd: join11(RULESYNC_SUBAGENTS_RELATIVE_DIR_PATH, file),
|
|
3056
|
+
frontmatter
|
|
3057
|
+
};
|
|
3058
|
+
} catch (error) {
|
|
3059
|
+
logger.error(`Failed to read subagent file ${file}: ${formatError(error)}`);
|
|
3060
|
+
return null;
|
|
3061
|
+
}
|
|
3062
|
+
})
|
|
3063
|
+
);
|
|
3064
|
+
return subagents.filter(
|
|
3065
|
+
(subagent) => subagent !== null
|
|
3066
|
+
);
|
|
3067
|
+
} catch (error) {
|
|
3068
|
+
logger.error(
|
|
3069
|
+
`Failed to read subagents directory (${RULESYNC_SUBAGENTS_RELATIVE_DIR_PATH}): ${formatError(error)}`
|
|
3070
|
+
);
|
|
3071
|
+
return [];
|
|
3072
|
+
}
|
|
3073
|
+
}
|
|
3074
|
+
async function getSubagent({ relativePathFromCwd }) {
|
|
3075
|
+
checkPathTraversal({
|
|
3076
|
+
relativePath: relativePathFromCwd,
|
|
3077
|
+
intendedRootDir: process.cwd()
|
|
3078
|
+
});
|
|
3079
|
+
const filename = basename4(relativePathFromCwd);
|
|
3080
|
+
try {
|
|
3081
|
+
const subagent = await RulesyncSubagent.fromFile({
|
|
3082
|
+
relativeFilePath: filename,
|
|
3083
|
+
validate: true
|
|
3084
|
+
});
|
|
3085
|
+
return {
|
|
3086
|
+
relativePathFromCwd: join11(RULESYNC_SUBAGENTS_RELATIVE_DIR_PATH, filename),
|
|
3087
|
+
frontmatter: subagent.getFrontmatter(),
|
|
3088
|
+
body: subagent.getBody()
|
|
3089
|
+
};
|
|
3090
|
+
} catch (error) {
|
|
3091
|
+
throw new Error(`Failed to read subagent file ${relativePathFromCwd}: ${formatError(error)}`, {
|
|
3092
|
+
cause: error
|
|
3093
|
+
});
|
|
3094
|
+
}
|
|
3095
|
+
}
|
|
3096
|
+
async function putSubagent({
|
|
3097
|
+
relativePathFromCwd,
|
|
3098
|
+
frontmatter,
|
|
3099
|
+
body
|
|
3100
|
+
}) {
|
|
3101
|
+
checkPathTraversal({
|
|
3102
|
+
relativePath: relativePathFromCwd,
|
|
3103
|
+
intendedRootDir: process.cwd()
|
|
3104
|
+
});
|
|
3105
|
+
const filename = basename4(relativePathFromCwd);
|
|
3106
|
+
const estimatedSize = JSON.stringify(frontmatter).length + body.length;
|
|
3107
|
+
if (estimatedSize > maxSubagentSizeBytes) {
|
|
3108
|
+
throw new Error(
|
|
3109
|
+
`Subagent size ${estimatedSize} bytes exceeds maximum ${maxSubagentSizeBytes} bytes (1MB) for ${relativePathFromCwd}`
|
|
3110
|
+
);
|
|
3111
|
+
}
|
|
3112
|
+
try {
|
|
3113
|
+
const existingSubagents = await listSubagents();
|
|
3114
|
+
const isUpdate = existingSubagents.some(
|
|
3115
|
+
(subagent2) => subagent2.relativePathFromCwd === join11(RULESYNC_SUBAGENTS_RELATIVE_DIR_PATH, filename)
|
|
3116
|
+
);
|
|
3117
|
+
if (!isUpdate && existingSubagents.length >= maxSubagentsCount) {
|
|
3118
|
+
throw new Error(
|
|
3119
|
+
`Maximum number of subagents (${maxSubagentsCount}) reached in ${RULESYNC_SUBAGENTS_RELATIVE_DIR_PATH}`
|
|
3120
|
+
);
|
|
3121
|
+
}
|
|
3122
|
+
const subagent = new RulesyncSubagent({
|
|
3123
|
+
baseDir: process.cwd(),
|
|
3124
|
+
relativeDirPath: RULESYNC_SUBAGENTS_RELATIVE_DIR_PATH,
|
|
3125
|
+
relativeFilePath: filename,
|
|
3126
|
+
frontmatter,
|
|
3127
|
+
body,
|
|
3128
|
+
validate: true
|
|
3129
|
+
});
|
|
3130
|
+
const subagentsDir = join11(process.cwd(), RULESYNC_SUBAGENTS_RELATIVE_DIR_PATH);
|
|
3131
|
+
await ensureDir(subagentsDir);
|
|
3132
|
+
await writeFileContent(subagent.getFilePath(), subagent.getFileContent());
|
|
3133
|
+
return {
|
|
3134
|
+
relativePathFromCwd: join11(RULESYNC_SUBAGENTS_RELATIVE_DIR_PATH, filename),
|
|
3135
|
+
frontmatter: subagent.getFrontmatter(),
|
|
3136
|
+
body: subagent.getBody()
|
|
3137
|
+
};
|
|
3138
|
+
} catch (error) {
|
|
3139
|
+
throw new Error(`Failed to write subagent file ${relativePathFromCwd}: ${formatError(error)}`, {
|
|
3140
|
+
cause: error
|
|
3141
|
+
});
|
|
3142
|
+
}
|
|
3143
|
+
}
|
|
3144
|
+
async function deleteSubagent({ relativePathFromCwd }) {
|
|
3145
|
+
checkPathTraversal({
|
|
3146
|
+
relativePath: relativePathFromCwd,
|
|
3147
|
+
intendedRootDir: process.cwd()
|
|
3148
|
+
});
|
|
3149
|
+
const filename = basename4(relativePathFromCwd);
|
|
3150
|
+
const fullPath = join11(process.cwd(), RULESYNC_SUBAGENTS_RELATIVE_DIR_PATH, filename);
|
|
3151
|
+
try {
|
|
3152
|
+
await removeFile(fullPath);
|
|
3153
|
+
return {
|
|
3154
|
+
relativePathFromCwd: join11(RULESYNC_SUBAGENTS_RELATIVE_DIR_PATH, filename)
|
|
3155
|
+
};
|
|
3156
|
+
} catch (error) {
|
|
3157
|
+
throw new Error(
|
|
3158
|
+
`Failed to delete subagent file ${relativePathFromCwd}: ${formatError(error)}`,
|
|
3159
|
+
{
|
|
3160
|
+
cause: error
|
|
3161
|
+
}
|
|
3162
|
+
);
|
|
3163
|
+
}
|
|
3164
|
+
}
|
|
3165
|
+
var subagentToolSchemas = {
|
|
3166
|
+
listSubagents: z12.object({}),
|
|
3167
|
+
getSubagent: z12.object({
|
|
3168
|
+
relativePathFromCwd: z12.string()
|
|
3169
|
+
}),
|
|
3170
|
+
putSubagent: z12.object({
|
|
3171
|
+
relativePathFromCwd: z12.string(),
|
|
3172
|
+
frontmatter: RulesyncSubagentFrontmatterSchema,
|
|
3173
|
+
body: z12.string()
|
|
3174
|
+
}),
|
|
3175
|
+
deleteSubagent: z12.object({
|
|
3176
|
+
relativePathFromCwd: z12.string()
|
|
3177
|
+
})
|
|
3178
|
+
};
|
|
3179
|
+
var subagentTools = {
|
|
3180
|
+
listSubagents: {
|
|
3181
|
+
name: "listSubagents",
|
|
3182
|
+
description: `List all subagents from ${join11(RULESYNC_SUBAGENTS_RELATIVE_DIR_PATH, "*.md")} with their frontmatter.`,
|
|
3183
|
+
parameters: subagentToolSchemas.listSubagents,
|
|
3184
|
+
execute: async () => {
|
|
3185
|
+
const subagents = await listSubagents();
|
|
3186
|
+
const output = { subagents };
|
|
3187
|
+
return JSON.stringify(output, null, 2);
|
|
3188
|
+
}
|
|
3189
|
+
},
|
|
3190
|
+
getSubagent: {
|
|
3191
|
+
name: "getSubagent",
|
|
3192
|
+
description: "Get detailed information about a specific subagent. relativePathFromCwd parameter is required.",
|
|
3193
|
+
parameters: subagentToolSchemas.getSubagent,
|
|
3194
|
+
execute: async (args) => {
|
|
3195
|
+
const result = await getSubagent({ relativePathFromCwd: args.relativePathFromCwd });
|
|
3196
|
+
return JSON.stringify(result, null, 2);
|
|
3197
|
+
}
|
|
3198
|
+
},
|
|
3199
|
+
putSubagent: {
|
|
3200
|
+
name: "putSubagent",
|
|
3201
|
+
description: "Create or update a subagent (upsert operation). relativePathFromCwd, frontmatter, and body parameters are required.",
|
|
3202
|
+
parameters: subagentToolSchemas.putSubagent,
|
|
3203
|
+
execute: async (args) => {
|
|
3204
|
+
const result = await putSubagent({
|
|
3205
|
+
relativePathFromCwd: args.relativePathFromCwd,
|
|
3206
|
+
frontmatter: args.frontmatter,
|
|
3207
|
+
body: args.body
|
|
3208
|
+
});
|
|
3209
|
+
return JSON.stringify(result, null, 2);
|
|
3210
|
+
}
|
|
3211
|
+
},
|
|
3212
|
+
deleteSubagent: {
|
|
3213
|
+
name: "deleteSubagent",
|
|
3214
|
+
description: "Delete a subagent file. relativePathFromCwd parameter is required.",
|
|
3215
|
+
parameters: subagentToolSchemas.deleteSubagent,
|
|
3216
|
+
execute: async (args) => {
|
|
3217
|
+
const result = await deleteSubagent({ relativePathFromCwd: args.relativePathFromCwd });
|
|
3218
|
+
return JSON.stringify(result, null, 2);
|
|
3219
|
+
}
|
|
3220
|
+
}
|
|
3221
|
+
};
|
|
3222
|
+
|
|
3223
|
+
// src/mcp/tools.ts
|
|
3224
|
+
var rulesyncFeatureSchema = z13.enum([
|
|
3225
|
+
"rule",
|
|
3226
|
+
"command",
|
|
3227
|
+
"subagent",
|
|
3228
|
+
"skill",
|
|
3229
|
+
"ignore",
|
|
3230
|
+
"mcp",
|
|
3231
|
+
"generate",
|
|
3232
|
+
"import"
|
|
3233
|
+
]);
|
|
3234
|
+
var rulesyncOperationSchema = z13.enum(["list", "get", "put", "delete", "run"]);
|
|
3235
|
+
var skillFileSchema = z13.object({
|
|
3236
|
+
name: z13.string(),
|
|
3237
|
+
body: z13.string()
|
|
3238
|
+
});
|
|
3239
|
+
var rulesyncToolSchema = z13.object({
|
|
3240
|
+
feature: rulesyncFeatureSchema,
|
|
3241
|
+
operation: rulesyncOperationSchema,
|
|
3242
|
+
targetPathFromCwd: z13.optional(z13.string()),
|
|
3243
|
+
frontmatter: z13.optional(z13.unknown()),
|
|
3244
|
+
body: z13.optional(z13.string()),
|
|
3245
|
+
otherFiles: z13.optional(z13.array(skillFileSchema)),
|
|
3246
|
+
content: z13.optional(z13.string()),
|
|
3247
|
+
generateOptions: z13.optional(generateOptionsSchema),
|
|
3248
|
+
importOptions: z13.optional(importOptionsSchema)
|
|
3249
|
+
});
|
|
3250
|
+
var supportedOperationsByFeature = {
|
|
3251
|
+
rule: ["list", "get", "put", "delete"],
|
|
3252
|
+
command: ["list", "get", "put", "delete"],
|
|
3253
|
+
subagent: ["list", "get", "put", "delete"],
|
|
3254
|
+
skill: ["list", "get", "put", "delete"],
|
|
3255
|
+
ignore: ["get", "put", "delete"],
|
|
3256
|
+
mcp: ["get", "put", "delete"],
|
|
3257
|
+
generate: ["run"],
|
|
3258
|
+
import: ["run"]
|
|
3259
|
+
};
|
|
3260
|
+
function assertSupported({
|
|
3261
|
+
feature,
|
|
3262
|
+
operation
|
|
3263
|
+
}) {
|
|
3264
|
+
const supportedOperations = supportedOperationsByFeature[feature];
|
|
3265
|
+
if (!supportedOperations.includes(operation)) {
|
|
3266
|
+
throw new Error(
|
|
3267
|
+
`Operation ${operation} is not supported for feature ${feature}. Supported operations: ${supportedOperations.join(
|
|
3268
|
+
", "
|
|
3269
|
+
)}`
|
|
3270
|
+
);
|
|
3271
|
+
}
|
|
3272
|
+
}
|
|
3273
|
+
function requireTargetPath({ targetPathFromCwd, feature, operation }) {
|
|
3274
|
+
if (!targetPathFromCwd) {
|
|
3275
|
+
throw new Error(`targetPathFromCwd is required for ${feature} ${operation} operation`);
|
|
3276
|
+
}
|
|
3277
|
+
return targetPathFromCwd;
|
|
3278
|
+
}
|
|
3279
|
+
function parseFrontmatter({
|
|
3280
|
+
feature,
|
|
3281
|
+
frontmatter
|
|
3282
|
+
}) {
|
|
3283
|
+
switch (feature) {
|
|
3284
|
+
case "rule": {
|
|
3285
|
+
return RulesyncRuleFrontmatterSchema.parse(frontmatter);
|
|
3286
|
+
}
|
|
3287
|
+
case "command": {
|
|
3288
|
+
return RulesyncCommandFrontmatterSchema.parse(frontmatter);
|
|
3289
|
+
}
|
|
3290
|
+
case "subagent": {
|
|
3291
|
+
return RulesyncSubagentFrontmatterSchema.parse(frontmatter);
|
|
3292
|
+
}
|
|
3293
|
+
case "skill": {
|
|
3294
|
+
return RulesyncSkillFrontmatterSchema.parse(frontmatter);
|
|
3295
|
+
}
|
|
3296
|
+
}
|
|
3297
|
+
}
|
|
3298
|
+
function ensureBody({ body, feature, operation }) {
|
|
3299
|
+
if (!body) {
|
|
3300
|
+
throw new Error(`body is required for ${feature} ${operation} operation`);
|
|
3301
|
+
}
|
|
3302
|
+
return body;
|
|
3303
|
+
}
|
|
3304
|
+
var rulesyncTool = {
|
|
3305
|
+
name: "rulesyncTool",
|
|
3306
|
+
description: "Manage Rulesync files through a single MCP tool. Features: rule/command/subagent/skill support list/get/put/delete; ignore/mcp support get/put/delete only; generate supports run only; import supports run only. Parameters: list requires no targetPathFromCwd (lists all items); get/delete require targetPathFromCwd; put requires targetPathFromCwd, frontmatter, and body (or content for ignore/mcp); generate/run uses generateOptions to configure generation; import/run uses importOptions to configure import.",
|
|
3307
|
+
parameters: rulesyncToolSchema,
|
|
3308
|
+
execute: async (args) => {
|
|
3309
|
+
const parsed = rulesyncToolSchema.parse(args);
|
|
3310
|
+
assertSupported({ feature: parsed.feature, operation: parsed.operation });
|
|
3311
|
+
switch (parsed.feature) {
|
|
3312
|
+
case "rule": {
|
|
3313
|
+
if (parsed.operation === "list") {
|
|
3314
|
+
return ruleTools.listRules.execute();
|
|
3315
|
+
}
|
|
3316
|
+
if (parsed.operation === "get") {
|
|
3317
|
+
return ruleTools.getRule.execute({ relativePathFromCwd: requireTargetPath(parsed) });
|
|
3318
|
+
}
|
|
3319
|
+
if (parsed.operation === "put") {
|
|
3320
|
+
return ruleTools.putRule.execute({
|
|
3321
|
+
relativePathFromCwd: requireTargetPath(parsed),
|
|
3322
|
+
frontmatter: parseFrontmatter({
|
|
3323
|
+
feature: "rule",
|
|
3324
|
+
frontmatter: parsed.frontmatter ?? {}
|
|
3325
|
+
}),
|
|
3326
|
+
body: ensureBody(parsed)
|
|
3327
|
+
});
|
|
3328
|
+
}
|
|
3329
|
+
return ruleTools.deleteRule.execute({ relativePathFromCwd: requireTargetPath(parsed) });
|
|
3330
|
+
}
|
|
3331
|
+
case "command": {
|
|
3332
|
+
if (parsed.operation === "list") {
|
|
3333
|
+
return commandTools.listCommands.execute();
|
|
3334
|
+
}
|
|
3335
|
+
if (parsed.operation === "get") {
|
|
3336
|
+
return commandTools.getCommand.execute({
|
|
3337
|
+
relativePathFromCwd: requireTargetPath(parsed)
|
|
3338
|
+
});
|
|
3339
|
+
}
|
|
3340
|
+
if (parsed.operation === "put") {
|
|
3341
|
+
return commandTools.putCommand.execute({
|
|
3342
|
+
relativePathFromCwd: requireTargetPath(parsed),
|
|
3343
|
+
frontmatter: parseFrontmatter({
|
|
3344
|
+
feature: "command",
|
|
3345
|
+
frontmatter: parsed.frontmatter ?? {}
|
|
3346
|
+
}),
|
|
3347
|
+
body: ensureBody(parsed)
|
|
3348
|
+
});
|
|
3349
|
+
}
|
|
3350
|
+
return commandTools.deleteCommand.execute({
|
|
3351
|
+
relativePathFromCwd: requireTargetPath(parsed)
|
|
3352
|
+
});
|
|
3353
|
+
}
|
|
3354
|
+
case "subagent": {
|
|
3355
|
+
if (parsed.operation === "list") {
|
|
3356
|
+
return subagentTools.listSubagents.execute();
|
|
3357
|
+
}
|
|
3358
|
+
if (parsed.operation === "get") {
|
|
3359
|
+
return subagentTools.getSubagent.execute({
|
|
3360
|
+
relativePathFromCwd: requireTargetPath(parsed)
|
|
3361
|
+
});
|
|
3362
|
+
}
|
|
3363
|
+
if (parsed.operation === "put") {
|
|
3364
|
+
return subagentTools.putSubagent.execute({
|
|
3365
|
+
relativePathFromCwd: requireTargetPath(parsed),
|
|
3366
|
+
frontmatter: parseFrontmatter({
|
|
3367
|
+
feature: "subagent",
|
|
3368
|
+
frontmatter: parsed.frontmatter ?? {}
|
|
3369
|
+
}),
|
|
3370
|
+
body: ensureBody(parsed)
|
|
3371
|
+
});
|
|
3372
|
+
}
|
|
3373
|
+
return subagentTools.deleteSubagent.execute({
|
|
3374
|
+
relativePathFromCwd: requireTargetPath(parsed)
|
|
3375
|
+
});
|
|
3376
|
+
}
|
|
3377
|
+
case "skill": {
|
|
3378
|
+
if (parsed.operation === "list") {
|
|
3379
|
+
return skillTools.listSkills.execute();
|
|
3380
|
+
}
|
|
3381
|
+
if (parsed.operation === "get") {
|
|
3382
|
+
return skillTools.getSkill.execute({ relativeDirPathFromCwd: requireTargetPath(parsed) });
|
|
3383
|
+
}
|
|
3384
|
+
if (parsed.operation === "put") {
|
|
3385
|
+
return skillTools.putSkill.execute({
|
|
3386
|
+
relativeDirPathFromCwd: requireTargetPath(parsed),
|
|
3387
|
+
frontmatter: parseFrontmatter({
|
|
3388
|
+
feature: "skill",
|
|
3389
|
+
frontmatter: parsed.frontmatter ?? {}
|
|
3390
|
+
}),
|
|
3391
|
+
body: ensureBody(parsed),
|
|
3392
|
+
otherFiles: parsed.otherFiles ?? []
|
|
3393
|
+
});
|
|
3394
|
+
}
|
|
3395
|
+
return skillTools.deleteSkill.execute({
|
|
3396
|
+
relativeDirPathFromCwd: requireTargetPath(parsed)
|
|
3397
|
+
});
|
|
3398
|
+
}
|
|
3399
|
+
case "ignore": {
|
|
3400
|
+
if (parsed.operation === "get") {
|
|
3401
|
+
return ignoreTools.getIgnoreFile.execute();
|
|
3402
|
+
}
|
|
3403
|
+
if (parsed.operation === "put") {
|
|
3404
|
+
if (!parsed.content) {
|
|
3405
|
+
throw new Error("content is required for ignore put operation");
|
|
3406
|
+
}
|
|
3407
|
+
return ignoreTools.putIgnoreFile.execute({ content: parsed.content });
|
|
3408
|
+
}
|
|
3409
|
+
return ignoreTools.deleteIgnoreFile.execute();
|
|
3410
|
+
}
|
|
3411
|
+
case "mcp": {
|
|
3412
|
+
if (parsed.operation === "get") {
|
|
3413
|
+
return mcpTools.getMcpFile.execute();
|
|
3414
|
+
}
|
|
3415
|
+
if (parsed.operation === "put") {
|
|
3416
|
+
if (!parsed.content) {
|
|
3417
|
+
throw new Error("content is required for mcp put operation");
|
|
3418
|
+
}
|
|
3419
|
+
return mcpTools.putMcpFile.execute({ content: parsed.content });
|
|
3420
|
+
}
|
|
3421
|
+
return mcpTools.deleteMcpFile.execute();
|
|
3422
|
+
}
|
|
3423
|
+
case "generate": {
|
|
3424
|
+
return generateTools.executeGenerate.execute(parsed.generateOptions ?? {});
|
|
3425
|
+
}
|
|
3426
|
+
case "import": {
|
|
3427
|
+
if (!parsed.importOptions) {
|
|
3428
|
+
throw new Error("importOptions is required for import feature");
|
|
3429
|
+
}
|
|
3430
|
+
return importTools.executeImport.execute(parsed.importOptions);
|
|
3431
|
+
}
|
|
3432
|
+
default: {
|
|
3433
|
+
throw new Error(`Unknown feature: ${parsed.feature}`);
|
|
3434
|
+
}
|
|
3435
|
+
}
|
|
3436
|
+
}
|
|
3437
|
+
};
|
|
3438
|
+
|
|
3439
|
+
// src/cli/commands/mcp.ts
|
|
3440
|
+
async function mcpCommand({ version }) {
|
|
3441
|
+
const server = new FastMCP({
|
|
3442
|
+
name: "Rulesync MCP Server",
|
|
3443
|
+
// eslint-disable-next-line no-type-assertion/no-type-assertion
|
|
3444
|
+
version,
|
|
3445
|
+
instructions: "This server handles Rulesync files including rules, commands, MCP, ignore files, subagents and skills for any AI agents. It should be used when you need those files."
|
|
3446
|
+
});
|
|
3447
|
+
server.addTool(rulesyncTool);
|
|
3448
|
+
logger.info("Rulesync MCP server started via stdio");
|
|
3449
|
+
void server.start({
|
|
3450
|
+
transportType: "stdio"
|
|
3451
|
+
});
|
|
3452
|
+
}
|
|
3453
|
+
|
|
3454
|
+
// src/lib/update.ts
|
|
3455
|
+
import * as crypto from "crypto";
|
|
3456
|
+
import * as fs from "fs";
|
|
3457
|
+
import * as os from "os";
|
|
3458
|
+
import * as path from "path";
|
|
3459
|
+
import { Readable, Transform } from "stream";
|
|
3460
|
+
import { pipeline } from "stream/promises";
|
|
3461
|
+
var RULESYNC_REPO_OWNER = "dyoshikawa";
|
|
3462
|
+
var RULESYNC_REPO_NAME = "rulesync";
|
|
3463
|
+
var RELEASES_URL = `https://github.com/${RULESYNC_REPO_OWNER}/${RULESYNC_REPO_NAME}/releases`;
|
|
3464
|
+
var MAX_DOWNLOAD_SIZE = 500 * 1024 * 1024;
|
|
3465
|
+
var ALLOWED_DOWNLOAD_DOMAINS = [
|
|
3466
|
+
"github.com",
|
|
3467
|
+
"objects.githubusercontent.com",
|
|
3468
|
+
"github-releases.githubusercontent.com",
|
|
3469
|
+
"release-assets.githubusercontent.com"
|
|
3470
|
+
];
|
|
3471
|
+
var UpdatePermissionError = class extends Error {
|
|
3472
|
+
constructor(message) {
|
|
3473
|
+
super(message);
|
|
3474
|
+
this.name = "UpdatePermissionError";
|
|
3475
|
+
}
|
|
3476
|
+
};
|
|
3477
|
+
function detectExecutionEnvironment() {
|
|
3478
|
+
const execPath = process.execPath;
|
|
3479
|
+
const scriptPath = process.argv[1] ?? "";
|
|
3480
|
+
const isRulesyncBinary = /rulesync(-[a-z0-9]+(-[a-z0-9]+)?)?(\.exe)?$/i.test(execPath);
|
|
3481
|
+
if (isRulesyncBinary) {
|
|
3482
|
+
if (execPath.includes("/homebrew/") || execPath.includes("/Cellar/")) {
|
|
3483
|
+
return "homebrew";
|
|
3484
|
+
}
|
|
3485
|
+
return "single-binary";
|
|
3486
|
+
}
|
|
3487
|
+
if ((scriptPath.includes("/homebrew/") || scriptPath.includes("/Cellar/")) && scriptPath.includes("rulesync")) {
|
|
3488
|
+
return "homebrew";
|
|
3489
|
+
}
|
|
3490
|
+
return "npm";
|
|
3491
|
+
}
|
|
3492
|
+
function getPlatformAssetName() {
|
|
3493
|
+
const platform2 = os.platform();
|
|
3494
|
+
const arch2 = os.arch();
|
|
3495
|
+
const platformMap = {
|
|
3496
|
+
darwin: "darwin",
|
|
3497
|
+
linux: "linux",
|
|
3498
|
+
win32: "windows"
|
|
3499
|
+
};
|
|
3500
|
+
const archMap = {
|
|
3501
|
+
x64: "x64",
|
|
3502
|
+
arm64: "arm64"
|
|
3503
|
+
};
|
|
3504
|
+
const platformName = platformMap[platform2];
|
|
3505
|
+
const archName = archMap[arch2];
|
|
3506
|
+
if (!platformName || !archName) {
|
|
3507
|
+
return null;
|
|
3508
|
+
}
|
|
3509
|
+
const extension = platform2 === "win32" ? ".exe" : "";
|
|
3510
|
+
return `rulesync-${platformName}-${archName}${extension}`;
|
|
3511
|
+
}
|
|
3512
|
+
function normalizeVersion(v) {
|
|
3513
|
+
return v.replace(/^v/, "").replace(/-.*$/, "");
|
|
3514
|
+
}
|
|
3515
|
+
function compareVersions(a, b) {
|
|
3516
|
+
const aParts = normalizeVersion(a).split(".").map(Number);
|
|
3517
|
+
const bParts = normalizeVersion(b).split(".").map(Number);
|
|
3518
|
+
for (let i = 0; i < Math.max(aParts.length, bParts.length); i++) {
|
|
3519
|
+
const aNum = aParts[i] ?? 0;
|
|
3520
|
+
const bNum = bParts[i] ?? 0;
|
|
3521
|
+
if (!Number.isFinite(aNum) || !Number.isFinite(bNum)) {
|
|
3522
|
+
throw new Error(`Invalid version format: cannot compare "${a}" and "${b}"`);
|
|
3523
|
+
}
|
|
3524
|
+
if (aNum > bNum) return 1;
|
|
3525
|
+
if (aNum < bNum) return -1;
|
|
3526
|
+
}
|
|
3527
|
+
return 0;
|
|
3528
|
+
}
|
|
3529
|
+
function validateDownloadUrl(url) {
|
|
3530
|
+
let parsed;
|
|
3531
|
+
try {
|
|
3532
|
+
parsed = new URL(url);
|
|
3533
|
+
} catch {
|
|
3534
|
+
throw new Error(`Invalid download URL: ${url}`);
|
|
3535
|
+
}
|
|
3536
|
+
if (parsed.protocol !== "https:") {
|
|
3537
|
+
throw new Error(`Download URL must use HTTPS: ${url}`);
|
|
3538
|
+
}
|
|
3539
|
+
const isAllowed = ALLOWED_DOWNLOAD_DOMAINS.some((domain) => parsed.hostname === domain);
|
|
3540
|
+
if (!isAllowed) {
|
|
3541
|
+
throw new Error(
|
|
3542
|
+
`Download URL domain "${parsed.hostname}" is not in the allowed list: ${ALLOWED_DOWNLOAD_DOMAINS.join(", ")}`
|
|
3543
|
+
);
|
|
3544
|
+
}
|
|
3545
|
+
if (parsed.hostname === "github.com") {
|
|
3546
|
+
const expectedPrefix = `/${RULESYNC_REPO_OWNER}/${RULESYNC_REPO_NAME}/`;
|
|
3547
|
+
if (!parsed.pathname.startsWith(expectedPrefix)) {
|
|
3548
|
+
throw new Error(
|
|
3549
|
+
`Download URL path must belong to ${RULESYNC_REPO_OWNER}/${RULESYNC_REPO_NAME}: ${url}`
|
|
3550
|
+
);
|
|
3551
|
+
}
|
|
3552
|
+
}
|
|
3553
|
+
}
|
|
3554
|
+
async function checkForUpdate(currentVersion, token) {
|
|
3555
|
+
const client = new GitHubClient({
|
|
3556
|
+
token: GitHubClient.resolveToken(token)
|
|
3557
|
+
});
|
|
3558
|
+
const release = await client.getLatestRelease(RULESYNC_REPO_OWNER, RULESYNC_REPO_NAME);
|
|
3559
|
+
const latestVersion = normalizeVersion(release.tag_name);
|
|
3560
|
+
const normalizedCurrentVersion = normalizeVersion(currentVersion);
|
|
3561
|
+
return {
|
|
3562
|
+
currentVersion: normalizedCurrentVersion,
|
|
3563
|
+
latestVersion,
|
|
3564
|
+
hasUpdate: compareVersions(latestVersion, normalizedCurrentVersion) > 0,
|
|
3565
|
+
release
|
|
3566
|
+
};
|
|
3567
|
+
}
|
|
3568
|
+
function findAsset(release, assetName) {
|
|
3569
|
+
return release.assets.find((asset) => asset.name === assetName) ?? null;
|
|
3570
|
+
}
|
|
3571
|
+
async function downloadFile(url, destPath) {
|
|
3572
|
+
validateDownloadUrl(url);
|
|
3573
|
+
const response = await fetch(url, {
|
|
3574
|
+
redirect: "follow"
|
|
3575
|
+
});
|
|
3576
|
+
if (!response.ok) {
|
|
3577
|
+
throw new Error(`Failed to download ${url}: HTTP ${response.status}`);
|
|
3578
|
+
}
|
|
3579
|
+
if (response.url) {
|
|
3580
|
+
validateDownloadUrl(response.url);
|
|
3581
|
+
}
|
|
3582
|
+
const contentLength = response.headers.get("content-length");
|
|
3583
|
+
if (contentLength && Number(contentLength) > MAX_DOWNLOAD_SIZE) {
|
|
3584
|
+
throw new Error(
|
|
3585
|
+
`Download too large: ${contentLength} bytes exceeds limit of ${MAX_DOWNLOAD_SIZE} bytes`
|
|
3586
|
+
);
|
|
3587
|
+
}
|
|
3588
|
+
if (!response.body) {
|
|
3589
|
+
throw new Error("Response body is empty");
|
|
3590
|
+
}
|
|
3591
|
+
const fileStream = fs.createWriteStream(destPath);
|
|
3592
|
+
let downloadedBytes = 0;
|
|
3593
|
+
const bodyReader = Readable.fromWeb(
|
|
3594
|
+
// eslint-disable-next-line no-type-assertion/no-type-assertion
|
|
3595
|
+
response.body
|
|
3596
|
+
);
|
|
3597
|
+
const sizeChecker = new Transform({
|
|
3598
|
+
transform(chunk, _encoding, callback) {
|
|
3599
|
+
downloadedBytes += chunk.length;
|
|
3600
|
+
if (downloadedBytes > MAX_DOWNLOAD_SIZE) {
|
|
3601
|
+
callback(
|
|
3602
|
+
new Error(
|
|
3603
|
+
`Download too large: exceeded limit of ${MAX_DOWNLOAD_SIZE} bytes during streaming`
|
|
3604
|
+
)
|
|
3605
|
+
);
|
|
3606
|
+
return;
|
|
3607
|
+
}
|
|
3608
|
+
callback(null, chunk);
|
|
3609
|
+
}
|
|
3610
|
+
});
|
|
3611
|
+
await pipeline(bodyReader, sizeChecker, fileStream);
|
|
3612
|
+
}
|
|
3613
|
+
async function calculateSha256(filePath) {
|
|
3614
|
+
const content = await fs.promises.readFile(filePath);
|
|
3615
|
+
return crypto.createHash("sha256").update(content).digest("hex");
|
|
3616
|
+
}
|
|
3617
|
+
function parseSha256Sums(content) {
|
|
3618
|
+
const result = /* @__PURE__ */ new Map();
|
|
3619
|
+
for (const line of content.split("\n")) {
|
|
3620
|
+
const trimmed = line.trim();
|
|
3621
|
+
if (!trimmed) continue;
|
|
3622
|
+
const match = /^([a-f0-9]{64})\s+(.+)$/.exec(trimmed);
|
|
3623
|
+
if (match && match[1] && match[2]) {
|
|
3624
|
+
result.set(match[2].trim(), match[1]);
|
|
3625
|
+
}
|
|
3626
|
+
}
|
|
3627
|
+
return result;
|
|
3628
|
+
}
|
|
3629
|
+
async function performBinaryUpdate(currentVersion, options = {}) {
|
|
3630
|
+
const { force = false, token } = options;
|
|
3631
|
+
const updateCheck = await checkForUpdate(currentVersion, token);
|
|
3632
|
+
if (!updateCheck.hasUpdate && !force) {
|
|
3633
|
+
return `Already at the latest version (${currentVersion})`;
|
|
3634
|
+
}
|
|
3635
|
+
const assetName = getPlatformAssetName();
|
|
3636
|
+
if (!assetName) {
|
|
3637
|
+
throw new Error(
|
|
3638
|
+
`Unsupported platform: ${os.platform()} ${os.arch()}. Please download manually from ${RELEASES_URL}`
|
|
3639
|
+
);
|
|
3640
|
+
}
|
|
3641
|
+
const binaryAsset = findAsset(updateCheck.release, assetName);
|
|
3642
|
+
if (!binaryAsset) {
|
|
3643
|
+
throw new Error(
|
|
3644
|
+
`Binary for ${assetName} not found in release. Please download manually from ${RELEASES_URL}`
|
|
3645
|
+
);
|
|
3646
|
+
}
|
|
3647
|
+
const checksumAsset = findAsset(updateCheck.release, "SHA256SUMS");
|
|
3648
|
+
if (!checksumAsset) {
|
|
3649
|
+
throw new Error(
|
|
3650
|
+
`SHA256SUMS not found in release. Cannot verify download integrity. Please download manually from ${RELEASES_URL}`
|
|
3651
|
+
);
|
|
3652
|
+
}
|
|
3653
|
+
const tempDir = await fs.promises.mkdtemp(path.join(os.tmpdir(), "rulesync-update-"));
|
|
3654
|
+
let restoreFailed = false;
|
|
3655
|
+
try {
|
|
3656
|
+
if (os.platform() !== "win32") {
|
|
3657
|
+
await fs.promises.chmod(tempDir, 448);
|
|
3658
|
+
}
|
|
3659
|
+
const tempBinaryPath = path.join(tempDir, assetName);
|
|
3660
|
+
await downloadFile(binaryAsset.browser_download_url, tempBinaryPath);
|
|
3661
|
+
const checksumsPath = path.join(tempDir, "SHA256SUMS");
|
|
3662
|
+
await downloadFile(checksumAsset.browser_download_url, checksumsPath);
|
|
3663
|
+
const checksumsContent = await fs.promises.readFile(checksumsPath, "utf-8");
|
|
3664
|
+
const checksums = parseSha256Sums(checksumsContent);
|
|
3665
|
+
const expectedChecksum = checksums.get(assetName);
|
|
3666
|
+
if (!expectedChecksum) {
|
|
3667
|
+
throw new Error(
|
|
3668
|
+
`Checksum entry for "${assetName}" not found in SHA256SUMS. Cannot verify download integrity.`
|
|
3669
|
+
);
|
|
3670
|
+
}
|
|
3671
|
+
const actualChecksum = await calculateSha256(tempBinaryPath);
|
|
3672
|
+
if (actualChecksum !== expectedChecksum) {
|
|
3673
|
+
throw new Error(
|
|
3674
|
+
`Checksum verification failed. Expected: ${expectedChecksum}, Got: ${actualChecksum}. The download may be corrupted.`
|
|
3675
|
+
);
|
|
3676
|
+
}
|
|
3677
|
+
const currentExePath = await fs.promises.realpath(process.execPath);
|
|
3678
|
+
const currentDir = path.dirname(currentExePath);
|
|
3679
|
+
const backupPath = path.join(tempDir, "rulesync.backup");
|
|
3680
|
+
try {
|
|
3681
|
+
await fs.promises.copyFile(currentExePath, backupPath);
|
|
3682
|
+
} catch (error) {
|
|
3683
|
+
if (isPermissionError(error)) {
|
|
3684
|
+
throw new UpdatePermissionError(
|
|
3685
|
+
`Permission denied: Cannot read ${currentExePath}. Try running with sudo.`
|
|
3686
|
+
);
|
|
3687
|
+
}
|
|
3688
|
+
throw error;
|
|
3689
|
+
}
|
|
3690
|
+
try {
|
|
3691
|
+
const tempInPlace = path.join(currentDir, `.rulesync-update-${crypto.randomUUID()}`);
|
|
3692
|
+
try {
|
|
3693
|
+
await fs.promises.copyFile(tempBinaryPath, tempInPlace);
|
|
3694
|
+
if (os.platform() !== "win32") {
|
|
3695
|
+
await fs.promises.chmod(tempInPlace, 493);
|
|
3696
|
+
}
|
|
3697
|
+
await fs.promises.rename(tempInPlace, currentExePath);
|
|
3698
|
+
} catch {
|
|
3699
|
+
try {
|
|
3700
|
+
await fs.promises.unlink(tempInPlace);
|
|
3701
|
+
} catch {
|
|
3702
|
+
}
|
|
3703
|
+
await fs.promises.copyFile(tempBinaryPath, currentExePath);
|
|
3704
|
+
if (os.platform() !== "win32") {
|
|
3705
|
+
await fs.promises.chmod(currentExePath, 493);
|
|
3706
|
+
}
|
|
3707
|
+
}
|
|
3708
|
+
return `Successfully updated from ${currentVersion} to ${updateCheck.latestVersion}`;
|
|
3709
|
+
} catch (error) {
|
|
3710
|
+
try {
|
|
3711
|
+
await fs.promises.copyFile(backupPath, currentExePath);
|
|
3712
|
+
} catch {
|
|
3713
|
+
restoreFailed = true;
|
|
3714
|
+
throw new Error(
|
|
3715
|
+
`Failed to replace binary and restore failed. Backup is preserved at: ${backupPath} (in ${tempDir}). Please manually copy it to ${currentExePath}. Original error: ${error instanceof Error ? error.message : String(error)}`,
|
|
3716
|
+
{ cause: error }
|
|
3717
|
+
);
|
|
3718
|
+
}
|
|
3719
|
+
if (isPermissionError(error)) {
|
|
3720
|
+
throw new UpdatePermissionError(
|
|
3721
|
+
`Permission denied: Cannot write to ${path.dirname(currentExePath)}. Try running with sudo.`
|
|
3722
|
+
);
|
|
3723
|
+
}
|
|
3724
|
+
throw error;
|
|
3725
|
+
}
|
|
3726
|
+
} finally {
|
|
3727
|
+
if (!restoreFailed) {
|
|
3728
|
+
try {
|
|
3729
|
+
await fs.promises.rm(tempDir, { recursive: true, force: true });
|
|
3730
|
+
} catch {
|
|
3731
|
+
}
|
|
3732
|
+
}
|
|
3733
|
+
}
|
|
3734
|
+
}
|
|
3735
|
+
function isPermissionError(error) {
|
|
3736
|
+
if (typeof error === "object" && error !== null && "code" in error) {
|
|
3737
|
+
const record = error;
|
|
3738
|
+
return record["code"] === "EACCES" || record["code"] === "EPERM";
|
|
3739
|
+
}
|
|
3740
|
+
return false;
|
|
3741
|
+
}
|
|
3742
|
+
function getNpmUpgradeInstructions() {
|
|
3743
|
+
return `This rulesync installation was installed via npm/npx.
|
|
3744
|
+
|
|
3745
|
+
To upgrade, run one of the following commands:
|
|
3746
|
+
|
|
3747
|
+
Global installation:
|
|
3748
|
+
npm install -g rulesync@latest
|
|
3749
|
+
|
|
3750
|
+
Project dependency:
|
|
3751
|
+
npm install rulesync@latest
|
|
3752
|
+
|
|
3753
|
+
Or use npx to always run the latest version:
|
|
3754
|
+
npx rulesync@latest --version`;
|
|
3755
|
+
}
|
|
3756
|
+
function getHomebrewUpgradeInstructions() {
|
|
3757
|
+
return `This rulesync installation was installed via Homebrew.
|
|
3758
|
+
|
|
3759
|
+
To upgrade, run:
|
|
3760
|
+
brew upgrade rulesync`;
|
|
3761
|
+
}
|
|
3762
|
+
|
|
3763
|
+
// src/cli/commands/update.ts
|
|
3764
|
+
async function updateCommand(currentVersion, options) {
|
|
3765
|
+
const { check = false, force = false, verbose = false, silent = false, token } = options;
|
|
3766
|
+
logger.configure({ verbose, silent });
|
|
3767
|
+
try {
|
|
3768
|
+
const environment = detectExecutionEnvironment();
|
|
3769
|
+
logger.debug(`Detected environment: ${environment}`);
|
|
3770
|
+
if (environment === "npm") {
|
|
3771
|
+
logger.info(getNpmUpgradeInstructions());
|
|
3772
|
+
return;
|
|
3773
|
+
}
|
|
3774
|
+
if (environment === "homebrew") {
|
|
3775
|
+
logger.info(getHomebrewUpgradeInstructions());
|
|
3776
|
+
return;
|
|
3777
|
+
}
|
|
3778
|
+
if (check) {
|
|
3779
|
+
logger.info("Checking for updates...");
|
|
3780
|
+
const updateCheck = await checkForUpdate(currentVersion, token);
|
|
3781
|
+
if (updateCheck.hasUpdate) {
|
|
3782
|
+
logger.success(
|
|
3783
|
+
`Update available: ${updateCheck.currentVersion} -> ${updateCheck.latestVersion}`
|
|
3784
|
+
);
|
|
3785
|
+
} else {
|
|
3786
|
+
logger.info(`Already at the latest version (${updateCheck.currentVersion})`);
|
|
3787
|
+
}
|
|
3788
|
+
return;
|
|
3789
|
+
}
|
|
3790
|
+
logger.info("Checking for updates...");
|
|
3791
|
+
const message = await performBinaryUpdate(currentVersion, { force, token });
|
|
3792
|
+
logger.success(message);
|
|
3793
|
+
} catch (error) {
|
|
3794
|
+
if (error instanceof GitHubClientError) {
|
|
3795
|
+
logGitHubAuthHints(error);
|
|
3796
|
+
} else if (error instanceof UpdatePermissionError) {
|
|
3797
|
+
logger.error(error.message);
|
|
3798
|
+
logger.info("Tip: Run with elevated privileges (e.g., sudo rulesync update)");
|
|
3799
|
+
} else {
|
|
3800
|
+
logger.error(formatError(error));
|
|
3801
|
+
}
|
|
3802
|
+
process.exit(1);
|
|
3803
|
+
}
|
|
3804
|
+
}
|
|
3805
|
+
|
|
3806
|
+
// src/cli/index.ts
|
|
3807
|
+
var getVersion = () => "7.3.0";
|
|
3808
|
+
var main = async () => {
|
|
3809
|
+
const program = new Command();
|
|
3810
|
+
const version = getVersion();
|
|
3811
|
+
program.hook("postAction", () => {
|
|
3812
|
+
if (ANNOUNCEMENT.length > 0) {
|
|
3813
|
+
logger.info(ANNOUNCEMENT);
|
|
3814
|
+
}
|
|
3815
|
+
});
|
|
3816
|
+
program.name("rulesync").description("Unified AI rules management CLI tool").version(version, "-v, --version", "Show version");
|
|
3817
|
+
program.command("init").description("Initialize rulesync in current directory").action(initCommand);
|
|
3818
|
+
program.command("gitignore").description("Add generated files to .gitignore").action(gitignoreCommand);
|
|
3819
|
+
program.command("fetch <source>").description("Fetch files from a Git repository (GitHub/GitLab)").option(
|
|
3820
|
+
"-t, --target <target>",
|
|
3821
|
+
"Target format to interpret files as (e.g., 'rulesync', 'claudecode'). Default: rulesync"
|
|
3822
|
+
).option(
|
|
3823
|
+
"-f, --features <features>",
|
|
3824
|
+
`Comma-separated list of features to fetch (${ALL_FEATURES.join(",")}) or '*' for all`,
|
|
3825
|
+
(value) => {
|
|
3826
|
+
return value.split(",").map((f) => f.trim());
|
|
3827
|
+
}
|
|
3828
|
+
).option("-r, --ref <ref>", "Branch, tag, or commit SHA to fetch from").option("-p, --path <path>", "Subdirectory path within the repository").option("-o, --output <dir>", "Output directory (default: .rulesync)").option(
|
|
3829
|
+
"-c, --conflict <strategy>",
|
|
3830
|
+
"Conflict resolution strategy: skip, overwrite (default: overwrite)"
|
|
3831
|
+
).option("--token <token>", "Git provider token for private repositories").option("-V, --verbose", "Verbose output").option("-s, --silent", "Suppress all output").action(async (source, options) => {
|
|
3832
|
+
await fetchCommand({
|
|
3833
|
+
source,
|
|
3834
|
+
target: options.target,
|
|
3835
|
+
features: options.features,
|
|
3836
|
+
ref: options.ref,
|
|
3837
|
+
path: options.path,
|
|
3838
|
+
output: options.output,
|
|
3839
|
+
conflict: options.conflict,
|
|
3840
|
+
token: options.token,
|
|
3841
|
+
verbose: options.verbose,
|
|
3842
|
+
silent: options.silent
|
|
3843
|
+
});
|
|
3844
|
+
});
|
|
3845
|
+
program.command("import").description("Import configurations from AI tools to rulesync format").option(
|
|
3846
|
+
"-t, --targets <tool>",
|
|
3847
|
+
"Tool to import from (e.g., 'copilot', 'cursor', 'cline')",
|
|
3848
|
+
(value) => {
|
|
3849
|
+
return value.split(",").map((t) => t.trim());
|
|
3850
|
+
}
|
|
3851
|
+
).option(
|
|
3852
|
+
"-f, --features <features>",
|
|
3853
|
+
`Comma-separated list of features to import (${ALL_FEATURES.join(",")}) or '*' for all`,
|
|
3854
|
+
(value) => {
|
|
3855
|
+
return value.split(",").map((f) => f.trim());
|
|
3856
|
+
}
|
|
3857
|
+
).option("-V, --verbose", "Verbose output").option("-s, --silent", "Suppress all output").option("-g, --global", "Import for global(user scope) configuration files").action(async (options) => {
|
|
3858
|
+
try {
|
|
3859
|
+
await importCommand({
|
|
3860
|
+
targets: options.targets,
|
|
3861
|
+
features: options.features,
|
|
3862
|
+
verbose: options.verbose,
|
|
3863
|
+
silent: options.silent,
|
|
3864
|
+
configPath: options.config,
|
|
3865
|
+
global: options.global
|
|
3866
|
+
});
|
|
3867
|
+
} catch (error) {
|
|
3868
|
+
logger.error(formatError(error));
|
|
3869
|
+
process.exit(1);
|
|
3870
|
+
}
|
|
3871
|
+
});
|
|
3872
|
+
program.command("mcp").description("Start MCP server for rulesync").action(async () => {
|
|
3873
|
+
try {
|
|
3874
|
+
await mcpCommand({ version });
|
|
3875
|
+
} catch (error) {
|
|
3876
|
+
logger.error(formatError(error));
|
|
3877
|
+
process.exit(1);
|
|
3878
|
+
}
|
|
3879
|
+
});
|
|
3880
|
+
program.command("install").description("Install skills from declarative sources in rulesync.jsonc").option("--update", "Force re-resolve all source refs, ignoring lockfile").option("--frozen", "Fail if lockfile is missing or out of sync (for CI)").option("--token <token>", "GitHub token for private repos").option("-c, --config <path>", "Path to configuration file").option("-V, --verbose", "Verbose output").option("-s, --silent", "Suppress all output").action(async (options) => {
|
|
3881
|
+
try {
|
|
3882
|
+
await installCommand({
|
|
3883
|
+
update: options.update,
|
|
3884
|
+
frozen: options.frozen,
|
|
3885
|
+
token: options.token,
|
|
3886
|
+
configPath: options.config,
|
|
3887
|
+
verbose: options.verbose,
|
|
3888
|
+
silent: options.silent
|
|
3889
|
+
});
|
|
3890
|
+
} catch (error) {
|
|
3891
|
+
logger.error(formatError(error));
|
|
3892
|
+
process.exit(1);
|
|
3893
|
+
}
|
|
3894
|
+
});
|
|
3895
|
+
program.command("generate").description("Generate configuration files for AI tools").option(
|
|
3896
|
+
"-t, --targets <tools>",
|
|
3897
|
+
"Comma-separated list of tools to generate for (e.g., 'copilot,cursor,cline' or '*' for all)",
|
|
3898
|
+
(value) => {
|
|
3899
|
+
return value.split(",").map((t) => t.trim());
|
|
3900
|
+
}
|
|
3901
|
+
).option(
|
|
3902
|
+
"-f, --features <features>",
|
|
3903
|
+
`Comma-separated list of features to generate (${ALL_FEATURES.join(",")}) or '*' for all`,
|
|
3904
|
+
(value) => {
|
|
3905
|
+
return value.split(",").map((f) => f.trim());
|
|
3906
|
+
}
|
|
3907
|
+
).option("--delete", "Delete all existing files in output directories before generating").option(
|
|
3908
|
+
"-b, --base-dir <paths>",
|
|
3909
|
+
"Base directories to generate files (comma-separated for multiple paths)"
|
|
3910
|
+
).option("-V, --verbose", "Verbose output").option("-s, --silent", "Suppress all output").option("-c, --config <path>", "Path to configuration file").option("-g, --global", "Generate for global(user scope) configuration files").option(
|
|
3911
|
+
"--simulate-commands",
|
|
3912
|
+
"Generate simulated commands. This feature is only available for copilot, cursor and codexcli."
|
|
3913
|
+
).option(
|
|
3914
|
+
"--simulate-subagents",
|
|
3915
|
+
"Generate simulated subagents. This feature is only available for copilot and codexcli."
|
|
3916
|
+
).option(
|
|
3917
|
+
"--simulate-skills",
|
|
3918
|
+
"Generate simulated skills. This feature is only available for copilot, cursor and codexcli."
|
|
3919
|
+
).option("--dry-run", "Dry run: show changes without writing files").option("--check", "Check if files are up to date (exits with code 1 if changes needed)").action(async (options) => {
|
|
3920
|
+
try {
|
|
3921
|
+
await generateCommand({
|
|
3922
|
+
targets: options.targets,
|
|
3923
|
+
features: options.features,
|
|
3924
|
+
verbose: options.verbose,
|
|
3925
|
+
silent: options.silent,
|
|
3926
|
+
delete: options.delete,
|
|
3927
|
+
baseDirs: options.baseDirs,
|
|
3928
|
+
configPath: options.config,
|
|
3929
|
+
global: options.global,
|
|
3930
|
+
simulateCommands: options.simulateCommands,
|
|
3931
|
+
simulateSubagents: options.simulateSubagents,
|
|
3932
|
+
simulateSkills: options.simulateSkills,
|
|
3933
|
+
dryRun: options.dryRun,
|
|
3934
|
+
check: options.check
|
|
3935
|
+
});
|
|
3936
|
+
} catch (error) {
|
|
3937
|
+
logger.error(formatError(error));
|
|
3938
|
+
process.exit(1);
|
|
3939
|
+
}
|
|
3940
|
+
});
|
|
3941
|
+
program.command("update").description("Update rulesync to the latest version").option("--check", "Check for updates without installing").option("--force", "Force update even if already at latest version").option("--token <token>", "GitHub token for API access").option("-V, --verbose", "Verbose output").option("-s, --silent", "Suppress all output").action(async (options) => {
|
|
3942
|
+
await updateCommand(version, {
|
|
3943
|
+
check: options.check,
|
|
3944
|
+
force: options.force,
|
|
3945
|
+
token: options.token,
|
|
3946
|
+
verbose: options.verbose,
|
|
3947
|
+
silent: options.silent
|
|
3948
|
+
});
|
|
3949
|
+
});
|
|
3950
|
+
program.parse();
|
|
3951
|
+
};
|
|
3952
|
+
main().catch((error) => {
|
|
3953
|
+
logger.error(formatError(error));
|
|
3954
|
+
process.exit(1);
|
|
3955
|
+
});
|