rulesync 6.5.0 → 6.6.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +34 -4
- package/dist/index.cjs +431 -39
- package/dist/index.js +431 -39
- package/package.json +1 -1
package/README.md
CHANGED
|
@@ -36,8 +36,19 @@ rulesync --help
|
|
|
36
36
|
|
|
37
37
|
Download pre-built binaries from the [latest release](https://github.com/dyoshikawa/rulesync/releases/latest). These binaries are built using [Bun's single-file executable bundler](https://bun.sh/docs/bundler/executables).
|
|
38
38
|
|
|
39
|
+
**Quick Install (Linux/macOS - No sudo required):**
|
|
40
|
+
|
|
41
|
+
```bash
|
|
42
|
+
curl -fsSL https://github.com/dyoshikawa/rulesync/releases/latest/download/install.sh | bash
|
|
43
|
+
```
|
|
44
|
+
|
|
45
|
+
Options:
|
|
46
|
+
|
|
47
|
+
- Install specific version: `curl -fsSL https://github.com/dyoshikawa/rulesync/releases/latest/download/install.sh | bash -s -- v6.4.0`
|
|
48
|
+
- Custom directory: `RULESYNC_HOME=~/.local curl -fsSL https://github.com/dyoshikawa/rulesync/releases/latest/download/install.sh | bash`
|
|
49
|
+
|
|
39
50
|
<details>
|
|
40
|
-
<summary>
|
|
51
|
+
<summary>Manual installation (requires sudo)</summary>
|
|
41
52
|
|
|
42
53
|
#### Linux (x64)
|
|
43
54
|
|
|
@@ -228,6 +239,15 @@ npx rulesync generate --check --targets "*" --features "*"
|
|
|
228
239
|
|
|
229
240
|
# Add generated files to .gitignore
|
|
230
241
|
npx rulesync gitignore
|
|
242
|
+
|
|
243
|
+
# Update rulesync to the latest version (single-binary installs)
|
|
244
|
+
npx rulesync update
|
|
245
|
+
|
|
246
|
+
# Check for updates without installing
|
|
247
|
+
npx rulesync update --check
|
|
248
|
+
|
|
249
|
+
# Force update even if already at latest version
|
|
250
|
+
npx rulesync update --force
|
|
231
251
|
```
|
|
232
252
|
|
|
233
253
|
## Preview Modes
|
|
@@ -255,12 +275,12 @@ echo $? # 0 if up to date, 1 if changes needed
|
|
|
255
275
|
> [!NOTE]
|
|
256
276
|
> `--dry-run` and `--check` cannot be used together.
|
|
257
277
|
|
|
258
|
-
## Fetch Command (
|
|
278
|
+
## Fetch Command (In Development)
|
|
259
279
|
|
|
260
280
|
The `fetch` command allows you to fetch configuration files directly from a Git repository (GitHub/GitLab).
|
|
261
281
|
|
|
262
|
-
> [!
|
|
263
|
-
> This feature is
|
|
282
|
+
> [!NOTE]
|
|
283
|
+
> This feature is in development and may change in future releases.
|
|
264
284
|
|
|
265
285
|
**Note:** The fetch command searches for feature directories (`rules/`, `commands/`, `skills/`, `subagents/`, etc.) directly at the specified path, without requiring a `.rulesync/` directory structure. This allows fetching from external repositories like `vercel-labs/agent-skills` or `anthropics/skills`.
|
|
266
286
|
|
|
@@ -875,6 +895,16 @@ So, in this case, approximately 92% reduction in MCP tools consumption!
|
|
|
875
895
|
|
|
876
896
|
</details>
|
|
877
897
|
|
|
898
|
+
## Official Skills
|
|
899
|
+
|
|
900
|
+
Rulesync provides official skills that you can install using the fetch command:
|
|
901
|
+
|
|
902
|
+
```bash
|
|
903
|
+
npx rulesync fetch dyoshikawa/rulesync --features skills
|
|
904
|
+
```
|
|
905
|
+
|
|
906
|
+
This will install the Rulesync documentation skill to your project.
|
|
907
|
+
|
|
878
908
|
## Rulesync MCP Server
|
|
879
909
|
|
|
880
910
|
Rulesync provides an MCP (Model Context Protocol) server that enables AI agents to manage your Rulesync files. This allows AI agents to discover, read, create, update, and delete files dynamically.
|
package/dist/index.cjs
CHANGED
|
@@ -2428,7 +2428,7 @@ var CommandsProcessor = class extends FeatureProcessor {
|
|
|
2428
2428
|
);
|
|
2429
2429
|
const rulesyncCommands = await Promise.all(
|
|
2430
2430
|
rulesyncCommandPaths.map(
|
|
2431
|
-
(
|
|
2431
|
+
(path4) => RulesyncCommand.fromFile({ relativeFilePath: (0, import_node_path19.basename)(path4) })
|
|
2432
2432
|
)
|
|
2433
2433
|
);
|
|
2434
2434
|
logger.info(`Successfully loaded ${rulesyncCommands.length} rulesync commands`);
|
|
@@ -2448,10 +2448,10 @@ var CommandsProcessor = class extends FeatureProcessor {
|
|
|
2448
2448
|
);
|
|
2449
2449
|
if (forDeletion) {
|
|
2450
2450
|
const toolCommands2 = commandFilePaths.map(
|
|
2451
|
-
(
|
|
2451
|
+
(path4) => factory.class.forDeletion({
|
|
2452
2452
|
baseDir: this.baseDir,
|
|
2453
2453
|
relativeDirPath: paths.relativeDirPath,
|
|
2454
|
-
relativeFilePath: (0, import_node_path19.basename)(
|
|
2454
|
+
relativeFilePath: (0, import_node_path19.basename)(path4),
|
|
2455
2455
|
global: this.global
|
|
2456
2456
|
})
|
|
2457
2457
|
).filter((cmd) => cmd.isDeletable());
|
|
@@ -2460,9 +2460,9 @@ var CommandsProcessor = class extends FeatureProcessor {
|
|
|
2460
2460
|
}
|
|
2461
2461
|
const toolCommands = await Promise.all(
|
|
2462
2462
|
commandFilePaths.map(
|
|
2463
|
-
(
|
|
2463
|
+
(path4) => factory.class.fromFile({
|
|
2464
2464
|
baseDir: this.baseDir,
|
|
2465
|
-
relativeFilePath: (0, import_node_path19.basename)(
|
|
2465
|
+
relativeFilePath: (0, import_node_path19.basename)(path4),
|
|
2466
2466
|
global: this.global
|
|
2467
2467
|
})
|
|
2468
2468
|
)
|
|
@@ -8648,7 +8648,7 @@ var SkillsProcessor = class extends DirFeatureProcessor {
|
|
|
8648
8648
|
const paths = RulesyncSkill.getSettablePaths();
|
|
8649
8649
|
const rulesyncSkillsDirPath = (0, import_node_path69.join)(this.baseDir, paths.relativeDirPath);
|
|
8650
8650
|
const dirPaths = await findFilesByGlobs((0, import_node_path69.join)(rulesyncSkillsDirPath, "*"), { type: "dir" });
|
|
8651
|
-
const dirNames = dirPaths.map((
|
|
8651
|
+
const dirNames = dirPaths.map((path4) => (0, import_node_path69.basename)(path4));
|
|
8652
8652
|
const rulesyncSkills = await Promise.all(
|
|
8653
8653
|
dirNames.map(
|
|
8654
8654
|
(dirName) => RulesyncSkill.fromDir({ baseDir: this.baseDir, dirName, global: this.global })
|
|
@@ -8666,7 +8666,7 @@ var SkillsProcessor = class extends DirFeatureProcessor {
|
|
|
8666
8666
|
const paths = factory.class.getSettablePaths({ global: this.global });
|
|
8667
8667
|
const skillsDirPath = (0, import_node_path69.join)(this.baseDir, paths.relativeDirPath);
|
|
8668
8668
|
const dirPaths = await findFilesByGlobs((0, import_node_path69.join)(skillsDirPath, "*"), { type: "dir" });
|
|
8669
|
-
const dirNames = dirPaths.map((
|
|
8669
|
+
const dirNames = dirPaths.map((path4) => (0, import_node_path69.basename)(path4));
|
|
8670
8670
|
const toolSkills = await Promise.all(
|
|
8671
8671
|
dirNames.map(
|
|
8672
8672
|
(dirName) => factory.class.fromDir({
|
|
@@ -8684,7 +8684,7 @@ var SkillsProcessor = class extends DirFeatureProcessor {
|
|
|
8684
8684
|
const paths = factory.class.getSettablePaths({ global: this.global });
|
|
8685
8685
|
const skillsDirPath = (0, import_node_path69.join)(this.baseDir, paths.relativeDirPath);
|
|
8686
8686
|
const dirPaths = await findFilesByGlobs((0, import_node_path69.join)(skillsDirPath, "*"), { type: "dir" });
|
|
8687
|
-
const dirNames = dirPaths.map((
|
|
8687
|
+
const dirNames = dirPaths.map((path4) => (0, import_node_path69.basename)(path4));
|
|
8688
8688
|
const toolSkills = dirNames.map(
|
|
8689
8689
|
(dirName) => factory.class.forDeletion({
|
|
8690
8690
|
baseDir: this.baseDir,
|
|
@@ -10092,10 +10092,10 @@ var SubagentsProcessor = class extends FeatureProcessor {
|
|
|
10092
10092
|
);
|
|
10093
10093
|
if (forDeletion) {
|
|
10094
10094
|
const toolSubagents2 = subagentFilePaths.map(
|
|
10095
|
-
(
|
|
10095
|
+
(path4) => factory.class.forDeletion({
|
|
10096
10096
|
baseDir: this.baseDir,
|
|
10097
10097
|
relativeDirPath: paths.relativeDirPath,
|
|
10098
|
-
relativeFilePath: (0, import_node_path82.basename)(
|
|
10098
|
+
relativeFilePath: (0, import_node_path82.basename)(path4),
|
|
10099
10099
|
global: this.global
|
|
10100
10100
|
})
|
|
10101
10101
|
).filter((subagent) => subagent.isDeletable());
|
|
@@ -10104,9 +10104,9 @@ var SubagentsProcessor = class extends FeatureProcessor {
|
|
|
10104
10104
|
}
|
|
10105
10105
|
const toolSubagents = await Promise.all(
|
|
10106
10106
|
subagentFilePaths.map(
|
|
10107
|
-
(
|
|
10107
|
+
(path4) => factory.class.fromFile({
|
|
10108
10108
|
baseDir: this.baseDir,
|
|
10109
|
-
relativeFilePath: (0, import_node_path82.basename)(
|
|
10109
|
+
relativeFilePath: (0, import_node_path82.basename)(path4),
|
|
10110
10110
|
global: this.global
|
|
10111
10111
|
})
|
|
10112
10112
|
)
|
|
@@ -13521,6 +13521,18 @@ var GitHubRepoInfoSchema = import_mini51.z.looseObject({
|
|
|
13521
13521
|
default_branch: import_mini51.z.string(),
|
|
13522
13522
|
private: import_mini51.z.boolean()
|
|
13523
13523
|
});
|
|
13524
|
+
var GitHubReleaseAssetSchema = import_mini51.z.looseObject({
|
|
13525
|
+
name: import_mini51.z.string(),
|
|
13526
|
+
browser_download_url: import_mini51.z.string(),
|
|
13527
|
+
size: import_mini51.z.number()
|
|
13528
|
+
});
|
|
13529
|
+
var GitHubReleaseSchema = import_mini51.z.looseObject({
|
|
13530
|
+
tag_name: import_mini51.z.string(),
|
|
13531
|
+
name: import_mini51.z.nullable(import_mini51.z.string()),
|
|
13532
|
+
prerelease: import_mini51.z.boolean(),
|
|
13533
|
+
draft: import_mini51.z.boolean(),
|
|
13534
|
+
assets: import_mini51.z.array(GitHubReleaseAssetSchema)
|
|
13535
|
+
});
|
|
13524
13536
|
|
|
13525
13537
|
// src/lib/github-client.ts
|
|
13526
13538
|
var GitHubClientError = class extends Error {
|
|
@@ -13580,16 +13592,16 @@ var GitHubClient = class {
|
|
|
13580
13592
|
/**
|
|
13581
13593
|
* List contents of a directory in a repository
|
|
13582
13594
|
*/
|
|
13583
|
-
async listDirectory(owner, repo,
|
|
13595
|
+
async listDirectory(owner, repo, path4, ref) {
|
|
13584
13596
|
try {
|
|
13585
13597
|
const { data } = await this.octokit.repos.getContent({
|
|
13586
13598
|
owner,
|
|
13587
13599
|
repo,
|
|
13588
|
-
path:
|
|
13600
|
+
path: path4,
|
|
13589
13601
|
ref
|
|
13590
13602
|
});
|
|
13591
13603
|
if (!Array.isArray(data)) {
|
|
13592
|
-
throw new GitHubClientError(`Path "${
|
|
13604
|
+
throw new GitHubClientError(`Path "${path4}" is not a directory`);
|
|
13593
13605
|
}
|
|
13594
13606
|
const entries = [];
|
|
13595
13607
|
for (const item of data) {
|
|
@@ -13606,12 +13618,12 @@ var GitHubClient = class {
|
|
|
13606
13618
|
/**
|
|
13607
13619
|
* Get raw file content from a repository
|
|
13608
13620
|
*/
|
|
13609
|
-
async getFileContent(owner, repo,
|
|
13621
|
+
async getFileContent(owner, repo, path4, ref) {
|
|
13610
13622
|
try {
|
|
13611
13623
|
const { data } = await this.octokit.repos.getContent({
|
|
13612
13624
|
owner,
|
|
13613
13625
|
repo,
|
|
13614
|
-
path:
|
|
13626
|
+
path: path4,
|
|
13615
13627
|
ref,
|
|
13616
13628
|
mediaType: {
|
|
13617
13629
|
format: "raw"
|
|
@@ -13631,12 +13643,12 @@ var GitHubClient = class {
|
|
|
13631
13643
|
/**
|
|
13632
13644
|
* Check if a file exists and is within size limits
|
|
13633
13645
|
*/
|
|
13634
|
-
async getFileInfo(owner, repo,
|
|
13646
|
+
async getFileInfo(owner, repo, path4, ref) {
|
|
13635
13647
|
try {
|
|
13636
13648
|
const { data } = await this.octokit.repos.getContent({
|
|
13637
13649
|
owner,
|
|
13638
13650
|
repo,
|
|
13639
|
-
path:
|
|
13651
|
+
path: path4,
|
|
13640
13652
|
ref
|
|
13641
13653
|
});
|
|
13642
13654
|
if (Array.isArray(data)) {
|
|
@@ -13648,7 +13660,7 @@ var GitHubClient = class {
|
|
|
13648
13660
|
}
|
|
13649
13661
|
if (parsed.data.size > MAX_FILE_SIZE) {
|
|
13650
13662
|
throw new GitHubClientError(
|
|
13651
|
-
`File "${
|
|
13663
|
+
`File "${path4}" exceeds maximum size limit of ${MAX_FILE_SIZE / 1024 / 1024}MB`
|
|
13652
13664
|
);
|
|
13653
13665
|
}
|
|
13654
13666
|
return parsed.data;
|
|
@@ -13676,6 +13688,21 @@ var GitHubClient = class {
|
|
|
13676
13688
|
throw error;
|
|
13677
13689
|
}
|
|
13678
13690
|
}
|
|
13691
|
+
/**
|
|
13692
|
+
* Get the latest release from a repository
|
|
13693
|
+
*/
|
|
13694
|
+
async getLatestRelease(owner, repo) {
|
|
13695
|
+
try {
|
|
13696
|
+
const { data } = await this.octokit.repos.getLatestRelease({ owner, repo });
|
|
13697
|
+
const parsed = GitHubReleaseSchema.safeParse(data);
|
|
13698
|
+
if (!parsed.success) {
|
|
13699
|
+
throw new GitHubClientError(`Invalid release info response: ${formatError(parsed.error)}`);
|
|
13700
|
+
}
|
|
13701
|
+
return parsed.data;
|
|
13702
|
+
} catch (error) {
|
|
13703
|
+
throw this.handleError(error);
|
|
13704
|
+
}
|
|
13705
|
+
}
|
|
13679
13706
|
/**
|
|
13680
13707
|
* Handle errors from Octokit and convert to GitHubClientError
|
|
13681
13708
|
*/
|
|
@@ -13851,13 +13878,13 @@ function parseUrl(url) {
|
|
|
13851
13878
|
const repo = segments[1]?.replace(/\.git$/, "");
|
|
13852
13879
|
if (segments.length > 2 && (segments[2] === "tree" || segments[2] === "blob")) {
|
|
13853
13880
|
const ref = segments[3];
|
|
13854
|
-
const
|
|
13881
|
+
const path4 = segments.length > 4 ? segments.slice(4).join("/") : void 0;
|
|
13855
13882
|
return {
|
|
13856
13883
|
provider,
|
|
13857
13884
|
owner: owner ?? "",
|
|
13858
13885
|
repo: repo ?? "",
|
|
13859
13886
|
ref,
|
|
13860
|
-
path:
|
|
13887
|
+
path: path4
|
|
13861
13888
|
};
|
|
13862
13889
|
}
|
|
13863
13890
|
return {
|
|
@@ -13868,12 +13895,12 @@ function parseUrl(url) {
|
|
|
13868
13895
|
}
|
|
13869
13896
|
function parseShorthand(source) {
|
|
13870
13897
|
let remaining = source;
|
|
13871
|
-
let
|
|
13898
|
+
let path4;
|
|
13872
13899
|
let ref;
|
|
13873
13900
|
const colonIndex = remaining.indexOf(":");
|
|
13874
13901
|
if (colonIndex !== -1) {
|
|
13875
|
-
|
|
13876
|
-
if (!
|
|
13902
|
+
path4 = remaining.substring(colonIndex + 1);
|
|
13903
|
+
if (!path4) {
|
|
13877
13904
|
throw new Error(`Invalid source: ${source}. Path cannot be empty after ":".`);
|
|
13878
13905
|
}
|
|
13879
13906
|
remaining = remaining.substring(0, colonIndex);
|
|
@@ -13901,7 +13928,7 @@ function parseShorthand(source) {
|
|
|
13901
13928
|
owner,
|
|
13902
13929
|
repo,
|
|
13903
13930
|
ref,
|
|
13904
|
-
path:
|
|
13931
|
+
path: path4
|
|
13905
13932
|
};
|
|
13906
13933
|
}
|
|
13907
13934
|
function resolveFeatures(features) {
|
|
@@ -14077,8 +14104,8 @@ async function collectFeatureFiles(params) {
|
|
|
14077
14104
|
}
|
|
14078
14105
|
return filesToFetch;
|
|
14079
14106
|
}
|
|
14080
|
-
async function listDirectoryRecursive(client, owner, repo,
|
|
14081
|
-
const entries = await client.listDirectory(owner, repo,
|
|
14107
|
+
async function listDirectoryRecursive(client, owner, repo, path4, ref) {
|
|
14108
|
+
const entries = await client.listDirectory(owner, repo, path4, ref);
|
|
14082
14109
|
const files = [];
|
|
14083
14110
|
for (const entry of entries) {
|
|
14084
14111
|
if (entry.type === "file") {
|
|
@@ -15510,12 +15537,12 @@ async function init() {
|
|
|
15510
15537
|
};
|
|
15511
15538
|
}
|
|
15512
15539
|
async function createConfigFile() {
|
|
15513
|
-
const
|
|
15514
|
-
if (await fileExists(
|
|
15515
|
-
return { created: false, path:
|
|
15540
|
+
const path4 = RULESYNC_CONFIG_RELATIVE_FILE_PATH;
|
|
15541
|
+
if (await fileExists(path4)) {
|
|
15542
|
+
return { created: false, path: path4 };
|
|
15516
15543
|
}
|
|
15517
15544
|
await writeFileContent(
|
|
15518
|
-
|
|
15545
|
+
path4,
|
|
15519
15546
|
JSON.stringify(
|
|
15520
15547
|
{
|
|
15521
15548
|
targets: ["copilot", "cursor", "claudecode", "codexcli"],
|
|
@@ -15534,7 +15561,7 @@ async function createConfigFile() {
|
|
|
15534
15561
|
2
|
|
15535
15562
|
)
|
|
15536
15563
|
);
|
|
15537
|
-
return { created: true, path:
|
|
15564
|
+
return { created: true, path: path4 };
|
|
15538
15565
|
}
|
|
15539
15566
|
async function createSampleFiles() {
|
|
15540
15567
|
const results = [];
|
|
@@ -15715,12 +15742,12 @@ Keep the summary concise and ready to reuse in future tasks.`
|
|
|
15715
15742
|
results.push(await writeIfNotExists(hooksFilepath, sampleHooksFile.content));
|
|
15716
15743
|
return results;
|
|
15717
15744
|
}
|
|
15718
|
-
async function writeIfNotExists(
|
|
15719
|
-
if (await fileExists(
|
|
15720
|
-
return { created: false, path:
|
|
15745
|
+
async function writeIfNotExists(path4, content) {
|
|
15746
|
+
if (await fileExists(path4)) {
|
|
15747
|
+
return { created: false, path: path4 };
|
|
15721
15748
|
}
|
|
15722
|
-
await writeFileContent(
|
|
15723
|
-
return { created: true, path:
|
|
15749
|
+
await writeFileContent(path4, content);
|
|
15750
|
+
return { created: true, path: path4 };
|
|
15724
15751
|
}
|
|
15725
15752
|
|
|
15726
15753
|
// src/cli/commands/init.ts
|
|
@@ -17142,8 +17169,364 @@ async function mcpCommand({ version }) {
|
|
|
17142
17169
|
});
|
|
17143
17170
|
}
|
|
17144
17171
|
|
|
17172
|
+
// src/lib/update.ts
|
|
17173
|
+
var crypto = __toESM(require("crypto"), 1);
|
|
17174
|
+
var fs = __toESM(require("fs"), 1);
|
|
17175
|
+
var os2 = __toESM(require("os"), 1);
|
|
17176
|
+
var path3 = __toESM(require("path"), 1);
|
|
17177
|
+
var import_node_stream = require("stream");
|
|
17178
|
+
var import_promises2 = require("stream/promises");
|
|
17179
|
+
var RULESYNC_REPO_OWNER = "dyoshikawa";
|
|
17180
|
+
var RULESYNC_REPO_NAME = "rulesync";
|
|
17181
|
+
var RELEASES_URL = `https://github.com/${RULESYNC_REPO_OWNER}/${RULESYNC_REPO_NAME}/releases`;
|
|
17182
|
+
var MAX_DOWNLOAD_SIZE = 500 * 1024 * 1024;
|
|
17183
|
+
var ALLOWED_DOWNLOAD_DOMAINS = [
|
|
17184
|
+
"github.com",
|
|
17185
|
+
"objects.githubusercontent.com",
|
|
17186
|
+
"github-releases.githubusercontent.com"
|
|
17187
|
+
];
|
|
17188
|
+
var UpdatePermissionError = class extends Error {
|
|
17189
|
+
constructor(message) {
|
|
17190
|
+
super(message);
|
|
17191
|
+
this.name = "UpdatePermissionError";
|
|
17192
|
+
}
|
|
17193
|
+
};
|
|
17194
|
+
function detectExecutionEnvironment() {
|
|
17195
|
+
const execPath = process.execPath;
|
|
17196
|
+
const scriptPath = process.argv[1] ?? "";
|
|
17197
|
+
const isRulesyncBinary = /rulesync(-[a-z0-9]+(-[a-z0-9]+)?)?(\.exe)?$/i.test(execPath);
|
|
17198
|
+
if (isRulesyncBinary) {
|
|
17199
|
+
if (execPath.includes("/homebrew/") || execPath.includes("/Cellar/")) {
|
|
17200
|
+
return "homebrew";
|
|
17201
|
+
}
|
|
17202
|
+
return "single-binary";
|
|
17203
|
+
}
|
|
17204
|
+
if ((scriptPath.includes("/homebrew/") || scriptPath.includes("/Cellar/")) && scriptPath.includes("rulesync")) {
|
|
17205
|
+
return "homebrew";
|
|
17206
|
+
}
|
|
17207
|
+
return "npm";
|
|
17208
|
+
}
|
|
17209
|
+
function getPlatformAssetName() {
|
|
17210
|
+
const platform2 = os2.platform();
|
|
17211
|
+
const arch2 = os2.arch();
|
|
17212
|
+
const platformMap = {
|
|
17213
|
+
darwin: "darwin",
|
|
17214
|
+
linux: "linux",
|
|
17215
|
+
win32: "windows"
|
|
17216
|
+
};
|
|
17217
|
+
const archMap = {
|
|
17218
|
+
x64: "x64",
|
|
17219
|
+
arm64: "arm64"
|
|
17220
|
+
};
|
|
17221
|
+
const platformName = platformMap[platform2];
|
|
17222
|
+
const archName = archMap[arch2];
|
|
17223
|
+
if (!platformName || !archName) {
|
|
17224
|
+
return null;
|
|
17225
|
+
}
|
|
17226
|
+
const extension = platform2 === "win32" ? ".exe" : "";
|
|
17227
|
+
return `rulesync-${platformName}-${archName}${extension}`;
|
|
17228
|
+
}
|
|
17229
|
+
function normalizeVersion(v) {
|
|
17230
|
+
return v.replace(/^v/, "").replace(/-.*$/, "");
|
|
17231
|
+
}
|
|
17232
|
+
function compareVersions(a, b) {
|
|
17233
|
+
const aParts = normalizeVersion(a).split(".").map(Number);
|
|
17234
|
+
const bParts = normalizeVersion(b).split(".").map(Number);
|
|
17235
|
+
for (let i = 0; i < Math.max(aParts.length, bParts.length); i++) {
|
|
17236
|
+
const aNum = aParts[i] ?? 0;
|
|
17237
|
+
const bNum = bParts[i] ?? 0;
|
|
17238
|
+
if (!Number.isFinite(aNum) || !Number.isFinite(bNum)) {
|
|
17239
|
+
throw new Error(`Invalid version format: cannot compare "${a}" and "${b}"`);
|
|
17240
|
+
}
|
|
17241
|
+
if (aNum > bNum) return 1;
|
|
17242
|
+
if (aNum < bNum) return -1;
|
|
17243
|
+
}
|
|
17244
|
+
return 0;
|
|
17245
|
+
}
|
|
17246
|
+
function validateDownloadUrl(url) {
|
|
17247
|
+
let parsed;
|
|
17248
|
+
try {
|
|
17249
|
+
parsed = new URL(url);
|
|
17250
|
+
} catch {
|
|
17251
|
+
throw new Error(`Invalid download URL: ${url}`);
|
|
17252
|
+
}
|
|
17253
|
+
if (parsed.protocol !== "https:") {
|
|
17254
|
+
throw new Error(`Download URL must use HTTPS: ${url}`);
|
|
17255
|
+
}
|
|
17256
|
+
const isAllowed = ALLOWED_DOWNLOAD_DOMAINS.some((domain) => parsed.hostname === domain);
|
|
17257
|
+
if (!isAllowed) {
|
|
17258
|
+
throw new Error(
|
|
17259
|
+
`Download URL domain "${parsed.hostname}" is not in the allowed list: ${ALLOWED_DOWNLOAD_DOMAINS.join(", ")}`
|
|
17260
|
+
);
|
|
17261
|
+
}
|
|
17262
|
+
if (parsed.hostname === "github.com") {
|
|
17263
|
+
const expectedPrefix = `/${RULESYNC_REPO_OWNER}/${RULESYNC_REPO_NAME}/`;
|
|
17264
|
+
if (!parsed.pathname.startsWith(expectedPrefix)) {
|
|
17265
|
+
throw new Error(
|
|
17266
|
+
`Download URL path must belong to ${RULESYNC_REPO_OWNER}/${RULESYNC_REPO_NAME}: ${url}`
|
|
17267
|
+
);
|
|
17268
|
+
}
|
|
17269
|
+
}
|
|
17270
|
+
}
|
|
17271
|
+
async function checkForUpdate(currentVersion, token) {
|
|
17272
|
+
const client = new GitHubClient({
|
|
17273
|
+
token: GitHubClient.resolveToken(token)
|
|
17274
|
+
});
|
|
17275
|
+
const release = await client.getLatestRelease(RULESYNC_REPO_OWNER, RULESYNC_REPO_NAME);
|
|
17276
|
+
const latestVersion = normalizeVersion(release.tag_name);
|
|
17277
|
+
const normalizedCurrentVersion = normalizeVersion(currentVersion);
|
|
17278
|
+
return {
|
|
17279
|
+
currentVersion: normalizedCurrentVersion,
|
|
17280
|
+
latestVersion,
|
|
17281
|
+
hasUpdate: compareVersions(latestVersion, normalizedCurrentVersion) > 0,
|
|
17282
|
+
release
|
|
17283
|
+
};
|
|
17284
|
+
}
|
|
17285
|
+
function findAsset(release, assetName) {
|
|
17286
|
+
return release.assets.find((asset) => asset.name === assetName) ?? null;
|
|
17287
|
+
}
|
|
17288
|
+
async function downloadFile(url, destPath) {
|
|
17289
|
+
validateDownloadUrl(url);
|
|
17290
|
+
const response = await fetch(url, {
|
|
17291
|
+
redirect: "follow"
|
|
17292
|
+
});
|
|
17293
|
+
if (!response.ok) {
|
|
17294
|
+
throw new Error(`Failed to download: HTTP ${response.status}`);
|
|
17295
|
+
}
|
|
17296
|
+
if (response.url) {
|
|
17297
|
+
validateDownloadUrl(response.url);
|
|
17298
|
+
}
|
|
17299
|
+
const contentLength = response.headers.get("content-length");
|
|
17300
|
+
if (contentLength && Number(contentLength) > MAX_DOWNLOAD_SIZE) {
|
|
17301
|
+
throw new Error(
|
|
17302
|
+
`Download too large: ${contentLength} bytes exceeds limit of ${MAX_DOWNLOAD_SIZE} bytes`
|
|
17303
|
+
);
|
|
17304
|
+
}
|
|
17305
|
+
if (!response.body) {
|
|
17306
|
+
throw new Error("Response body is empty");
|
|
17307
|
+
}
|
|
17308
|
+
const fileStream = fs.createWriteStream(destPath);
|
|
17309
|
+
let downloadedBytes = 0;
|
|
17310
|
+
const bodyReader = import_node_stream.Readable.fromWeb(
|
|
17311
|
+
// eslint-disable-next-line no-type-assertion/no-type-assertion
|
|
17312
|
+
response.body
|
|
17313
|
+
);
|
|
17314
|
+
const sizeChecker = new import_node_stream.Transform({
|
|
17315
|
+
transform(chunk, _encoding, callback) {
|
|
17316
|
+
downloadedBytes += chunk.length;
|
|
17317
|
+
if (downloadedBytes > MAX_DOWNLOAD_SIZE) {
|
|
17318
|
+
callback(
|
|
17319
|
+
new Error(
|
|
17320
|
+
`Download too large: exceeded limit of ${MAX_DOWNLOAD_SIZE} bytes during streaming`
|
|
17321
|
+
)
|
|
17322
|
+
);
|
|
17323
|
+
return;
|
|
17324
|
+
}
|
|
17325
|
+
callback(null, chunk);
|
|
17326
|
+
}
|
|
17327
|
+
});
|
|
17328
|
+
await (0, import_promises2.pipeline)(bodyReader, sizeChecker, fileStream);
|
|
17329
|
+
}
|
|
17330
|
+
async function calculateSha256(filePath) {
|
|
17331
|
+
const content = await fs.promises.readFile(filePath);
|
|
17332
|
+
return crypto.createHash("sha256").update(content).digest("hex");
|
|
17333
|
+
}
|
|
17334
|
+
function parseSha256Sums(content) {
|
|
17335
|
+
const result = /* @__PURE__ */ new Map();
|
|
17336
|
+
for (const line of content.split("\n")) {
|
|
17337
|
+
const trimmed = line.trim();
|
|
17338
|
+
if (!trimmed) continue;
|
|
17339
|
+
const match = /^([a-f0-9]{64})\s+(.+)$/.exec(trimmed);
|
|
17340
|
+
if (match && match[1] && match[2]) {
|
|
17341
|
+
result.set(match[2].trim(), match[1]);
|
|
17342
|
+
}
|
|
17343
|
+
}
|
|
17344
|
+
return result;
|
|
17345
|
+
}
|
|
17346
|
+
async function performBinaryUpdate(currentVersion, options = {}) {
|
|
17347
|
+
const { force = false, token } = options;
|
|
17348
|
+
const updateCheck = await checkForUpdate(currentVersion, token);
|
|
17349
|
+
if (!updateCheck.hasUpdate && !force) {
|
|
17350
|
+
return `Already at the latest version (${currentVersion})`;
|
|
17351
|
+
}
|
|
17352
|
+
const assetName = getPlatformAssetName();
|
|
17353
|
+
if (!assetName) {
|
|
17354
|
+
throw new Error(
|
|
17355
|
+
`Unsupported platform: ${os2.platform()} ${os2.arch()}. Please download manually from ${RELEASES_URL}`
|
|
17356
|
+
);
|
|
17357
|
+
}
|
|
17358
|
+
const binaryAsset = findAsset(updateCheck.release, assetName);
|
|
17359
|
+
if (!binaryAsset) {
|
|
17360
|
+
throw new Error(
|
|
17361
|
+
`Binary for ${assetName} not found in release. Please download manually from ${RELEASES_URL}`
|
|
17362
|
+
);
|
|
17363
|
+
}
|
|
17364
|
+
const checksumAsset = findAsset(updateCheck.release, "SHA256SUMS");
|
|
17365
|
+
if (!checksumAsset) {
|
|
17366
|
+
throw new Error(
|
|
17367
|
+
`SHA256SUMS not found in release. Cannot verify download integrity. Please download manually from ${RELEASES_URL}`
|
|
17368
|
+
);
|
|
17369
|
+
}
|
|
17370
|
+
const tempDir = await fs.promises.mkdtemp(path3.join(os2.tmpdir(), "rulesync-update-"));
|
|
17371
|
+
let restoreFailed = false;
|
|
17372
|
+
try {
|
|
17373
|
+
if (os2.platform() !== "win32") {
|
|
17374
|
+
await fs.promises.chmod(tempDir, 448);
|
|
17375
|
+
}
|
|
17376
|
+
const tempBinaryPath = path3.join(tempDir, assetName);
|
|
17377
|
+
await downloadFile(binaryAsset.browser_download_url, tempBinaryPath);
|
|
17378
|
+
const checksumsPath = path3.join(tempDir, "SHA256SUMS");
|
|
17379
|
+
await downloadFile(checksumAsset.browser_download_url, checksumsPath);
|
|
17380
|
+
const checksumsContent = await fs.promises.readFile(checksumsPath, "utf-8");
|
|
17381
|
+
const checksums = parseSha256Sums(checksumsContent);
|
|
17382
|
+
const expectedChecksum = checksums.get(assetName);
|
|
17383
|
+
if (!expectedChecksum) {
|
|
17384
|
+
throw new Error(
|
|
17385
|
+
`Checksum entry for "${assetName}" not found in SHA256SUMS. Cannot verify download integrity.`
|
|
17386
|
+
);
|
|
17387
|
+
}
|
|
17388
|
+
const actualChecksum = await calculateSha256(tempBinaryPath);
|
|
17389
|
+
if (actualChecksum !== expectedChecksum) {
|
|
17390
|
+
throw new Error(
|
|
17391
|
+
`Checksum verification failed. Expected: ${expectedChecksum}, Got: ${actualChecksum}. The download may be corrupted.`
|
|
17392
|
+
);
|
|
17393
|
+
}
|
|
17394
|
+
const currentExePath = await fs.promises.realpath(process.execPath);
|
|
17395
|
+
const currentDir = path3.dirname(currentExePath);
|
|
17396
|
+
const backupPath = path3.join(tempDir, "rulesync.backup");
|
|
17397
|
+
try {
|
|
17398
|
+
await fs.promises.copyFile(currentExePath, backupPath);
|
|
17399
|
+
} catch (error) {
|
|
17400
|
+
if (isPermissionError(error)) {
|
|
17401
|
+
throw new UpdatePermissionError(
|
|
17402
|
+
`Permission denied: Cannot read ${currentExePath}. Try running with sudo.`
|
|
17403
|
+
);
|
|
17404
|
+
}
|
|
17405
|
+
throw error;
|
|
17406
|
+
}
|
|
17407
|
+
try {
|
|
17408
|
+
const tempInPlace = path3.join(currentDir, `.rulesync-update-${crypto.randomUUID()}`);
|
|
17409
|
+
try {
|
|
17410
|
+
await fs.promises.copyFile(tempBinaryPath, tempInPlace);
|
|
17411
|
+
if (os2.platform() !== "win32") {
|
|
17412
|
+
await fs.promises.chmod(tempInPlace, 493);
|
|
17413
|
+
}
|
|
17414
|
+
await fs.promises.rename(tempInPlace, currentExePath);
|
|
17415
|
+
} catch {
|
|
17416
|
+
try {
|
|
17417
|
+
await fs.promises.unlink(tempInPlace);
|
|
17418
|
+
} catch {
|
|
17419
|
+
}
|
|
17420
|
+
await fs.promises.copyFile(tempBinaryPath, currentExePath);
|
|
17421
|
+
if (os2.platform() !== "win32") {
|
|
17422
|
+
await fs.promises.chmod(currentExePath, 493);
|
|
17423
|
+
}
|
|
17424
|
+
}
|
|
17425
|
+
return `Successfully updated from ${currentVersion} to ${updateCheck.latestVersion}`;
|
|
17426
|
+
} catch (error) {
|
|
17427
|
+
try {
|
|
17428
|
+
await fs.promises.copyFile(backupPath, currentExePath);
|
|
17429
|
+
} catch {
|
|
17430
|
+
restoreFailed = true;
|
|
17431
|
+
throw new Error(
|
|
17432
|
+
`Failed to replace binary and restore failed. Backup is preserved at: ${backupPath} (in ${tempDir}). Please manually copy it to ${currentExePath}. Original error: ${error instanceof Error ? error.message : String(error)}`,
|
|
17433
|
+
{ cause: error }
|
|
17434
|
+
);
|
|
17435
|
+
}
|
|
17436
|
+
if (isPermissionError(error)) {
|
|
17437
|
+
throw new UpdatePermissionError(
|
|
17438
|
+
`Permission denied: Cannot write to ${path3.dirname(currentExePath)}. Try running with sudo.`
|
|
17439
|
+
);
|
|
17440
|
+
}
|
|
17441
|
+
throw error;
|
|
17442
|
+
}
|
|
17443
|
+
} finally {
|
|
17444
|
+
if (!restoreFailed) {
|
|
17445
|
+
try {
|
|
17446
|
+
await fs.promises.rm(tempDir, { recursive: true, force: true });
|
|
17447
|
+
} catch {
|
|
17448
|
+
}
|
|
17449
|
+
}
|
|
17450
|
+
}
|
|
17451
|
+
}
|
|
17452
|
+
function isPermissionError(error) {
|
|
17453
|
+
if (typeof error === "object" && error !== null && "code" in error) {
|
|
17454
|
+
const record = error;
|
|
17455
|
+
return record["code"] === "EACCES" || record["code"] === "EPERM";
|
|
17456
|
+
}
|
|
17457
|
+
return false;
|
|
17458
|
+
}
|
|
17459
|
+
function getNpmUpgradeInstructions() {
|
|
17460
|
+
return `This rulesync installation was installed via npm/npx.
|
|
17461
|
+
|
|
17462
|
+
To upgrade, run one of the following commands:
|
|
17463
|
+
|
|
17464
|
+
Global installation:
|
|
17465
|
+
npm install -g rulesync@latest
|
|
17466
|
+
|
|
17467
|
+
Project dependency:
|
|
17468
|
+
npm install rulesync@latest
|
|
17469
|
+
|
|
17470
|
+
Or use npx to always run the latest version:
|
|
17471
|
+
npx rulesync@latest --version`;
|
|
17472
|
+
}
|
|
17473
|
+
function getHomebrewUpgradeInstructions() {
|
|
17474
|
+
return `This rulesync installation was installed via Homebrew.
|
|
17475
|
+
|
|
17476
|
+
To upgrade, run:
|
|
17477
|
+
brew upgrade rulesync`;
|
|
17478
|
+
}
|
|
17479
|
+
|
|
17480
|
+
// src/cli/commands/update.ts
|
|
17481
|
+
async function updateCommand(currentVersion, options) {
|
|
17482
|
+
const { check = false, force = false, verbose = false, silent = false, token } = options;
|
|
17483
|
+
logger.configure({ verbose, silent });
|
|
17484
|
+
try {
|
|
17485
|
+
const environment = detectExecutionEnvironment();
|
|
17486
|
+
logger.debug(`Detected environment: ${environment}`);
|
|
17487
|
+
if (environment === "npm") {
|
|
17488
|
+
logger.info(getNpmUpgradeInstructions());
|
|
17489
|
+
return;
|
|
17490
|
+
}
|
|
17491
|
+
if (environment === "homebrew") {
|
|
17492
|
+
logger.info(getHomebrewUpgradeInstructions());
|
|
17493
|
+
return;
|
|
17494
|
+
}
|
|
17495
|
+
if (check) {
|
|
17496
|
+
logger.info("Checking for updates...");
|
|
17497
|
+
const updateCheck = await checkForUpdate(currentVersion, token);
|
|
17498
|
+
if (updateCheck.hasUpdate) {
|
|
17499
|
+
logger.success(
|
|
17500
|
+
`Update available: ${updateCheck.currentVersion} -> ${updateCheck.latestVersion}`
|
|
17501
|
+
);
|
|
17502
|
+
} else {
|
|
17503
|
+
logger.info(`Already at the latest version (${updateCheck.currentVersion})`);
|
|
17504
|
+
}
|
|
17505
|
+
return;
|
|
17506
|
+
}
|
|
17507
|
+
logger.info("Checking for updates...");
|
|
17508
|
+
const message = await performBinaryUpdate(currentVersion, { force, token });
|
|
17509
|
+
logger.success(message);
|
|
17510
|
+
} catch (error) {
|
|
17511
|
+
if (error instanceof GitHubClientError) {
|
|
17512
|
+
logger.error(`GitHub API Error: ${error.message}`);
|
|
17513
|
+
if (error.statusCode === 401 || error.statusCode === 403) {
|
|
17514
|
+
logger.info(
|
|
17515
|
+
"Tip: Set GITHUB_TOKEN or GH_TOKEN environment variable for better rate limits."
|
|
17516
|
+
);
|
|
17517
|
+
}
|
|
17518
|
+
} else if (error instanceof UpdatePermissionError) {
|
|
17519
|
+
logger.error(error.message);
|
|
17520
|
+
logger.info("Tip: Run with elevated privileges (e.g., sudo rulesync update)");
|
|
17521
|
+
} else {
|
|
17522
|
+
logger.error(formatError(error));
|
|
17523
|
+
}
|
|
17524
|
+
process.exit(1);
|
|
17525
|
+
}
|
|
17526
|
+
}
|
|
17527
|
+
|
|
17145
17528
|
// src/cli/index.ts
|
|
17146
|
-
var getVersion = () => "6.
|
|
17529
|
+
var getVersion = () => "6.6.0";
|
|
17147
17530
|
var main = async () => {
|
|
17148
17531
|
const program = new import_commander.Command();
|
|
17149
17532
|
const version = getVersion();
|
|
@@ -17266,6 +17649,15 @@ var main = async () => {
|
|
|
17266
17649
|
process.exit(1);
|
|
17267
17650
|
}
|
|
17268
17651
|
});
|
|
17652
|
+
program.command("update").description("Update rulesync to the latest version").option("--check", "Check for updates without installing").option("--force", "Force update even if already at latest version").option("--token <token>", "GitHub token for API access").option("-V, --verbose", "Verbose output").option("-s, --silent", "Suppress all output").action(async (options) => {
|
|
17653
|
+
await updateCommand(version, {
|
|
17654
|
+
check: options.check,
|
|
17655
|
+
force: options.force,
|
|
17656
|
+
token: options.token,
|
|
17657
|
+
verbose: options.verbose,
|
|
17658
|
+
silent: options.silent
|
|
17659
|
+
});
|
|
17660
|
+
});
|
|
17269
17661
|
program.parse();
|
|
17270
17662
|
};
|
|
17271
17663
|
main().catch((error) => {
|
package/dist/index.js
CHANGED
|
@@ -2405,7 +2405,7 @@ var CommandsProcessor = class extends FeatureProcessor {
|
|
|
2405
2405
|
);
|
|
2406
2406
|
const rulesyncCommands = await Promise.all(
|
|
2407
2407
|
rulesyncCommandPaths.map(
|
|
2408
|
-
(
|
|
2408
|
+
(path4) => RulesyncCommand.fromFile({ relativeFilePath: basename16(path4) })
|
|
2409
2409
|
)
|
|
2410
2410
|
);
|
|
2411
2411
|
logger.info(`Successfully loaded ${rulesyncCommands.length} rulesync commands`);
|
|
@@ -2425,10 +2425,10 @@ var CommandsProcessor = class extends FeatureProcessor {
|
|
|
2425
2425
|
);
|
|
2426
2426
|
if (forDeletion) {
|
|
2427
2427
|
const toolCommands2 = commandFilePaths.map(
|
|
2428
|
-
(
|
|
2428
|
+
(path4) => factory.class.forDeletion({
|
|
2429
2429
|
baseDir: this.baseDir,
|
|
2430
2430
|
relativeDirPath: paths.relativeDirPath,
|
|
2431
|
-
relativeFilePath: basename16(
|
|
2431
|
+
relativeFilePath: basename16(path4),
|
|
2432
2432
|
global: this.global
|
|
2433
2433
|
})
|
|
2434
2434
|
).filter((cmd) => cmd.isDeletable());
|
|
@@ -2437,9 +2437,9 @@ var CommandsProcessor = class extends FeatureProcessor {
|
|
|
2437
2437
|
}
|
|
2438
2438
|
const toolCommands = await Promise.all(
|
|
2439
2439
|
commandFilePaths.map(
|
|
2440
|
-
(
|
|
2440
|
+
(path4) => factory.class.fromFile({
|
|
2441
2441
|
baseDir: this.baseDir,
|
|
2442
|
-
relativeFilePath: basename16(
|
|
2442
|
+
relativeFilePath: basename16(path4),
|
|
2443
2443
|
global: this.global
|
|
2444
2444
|
})
|
|
2445
2445
|
)
|
|
@@ -8625,7 +8625,7 @@ var SkillsProcessor = class extends DirFeatureProcessor {
|
|
|
8625
8625
|
const paths = RulesyncSkill.getSettablePaths();
|
|
8626
8626
|
const rulesyncSkillsDirPath = join68(this.baseDir, paths.relativeDirPath);
|
|
8627
8627
|
const dirPaths = await findFilesByGlobs(join68(rulesyncSkillsDirPath, "*"), { type: "dir" });
|
|
8628
|
-
const dirNames = dirPaths.map((
|
|
8628
|
+
const dirNames = dirPaths.map((path4) => basename18(path4));
|
|
8629
8629
|
const rulesyncSkills = await Promise.all(
|
|
8630
8630
|
dirNames.map(
|
|
8631
8631
|
(dirName) => RulesyncSkill.fromDir({ baseDir: this.baseDir, dirName, global: this.global })
|
|
@@ -8643,7 +8643,7 @@ var SkillsProcessor = class extends DirFeatureProcessor {
|
|
|
8643
8643
|
const paths = factory.class.getSettablePaths({ global: this.global });
|
|
8644
8644
|
const skillsDirPath = join68(this.baseDir, paths.relativeDirPath);
|
|
8645
8645
|
const dirPaths = await findFilesByGlobs(join68(skillsDirPath, "*"), { type: "dir" });
|
|
8646
|
-
const dirNames = dirPaths.map((
|
|
8646
|
+
const dirNames = dirPaths.map((path4) => basename18(path4));
|
|
8647
8647
|
const toolSkills = await Promise.all(
|
|
8648
8648
|
dirNames.map(
|
|
8649
8649
|
(dirName) => factory.class.fromDir({
|
|
@@ -8661,7 +8661,7 @@ var SkillsProcessor = class extends DirFeatureProcessor {
|
|
|
8661
8661
|
const paths = factory.class.getSettablePaths({ global: this.global });
|
|
8662
8662
|
const skillsDirPath = join68(this.baseDir, paths.relativeDirPath);
|
|
8663
8663
|
const dirPaths = await findFilesByGlobs(join68(skillsDirPath, "*"), { type: "dir" });
|
|
8664
|
-
const dirNames = dirPaths.map((
|
|
8664
|
+
const dirNames = dirPaths.map((path4) => basename18(path4));
|
|
8665
8665
|
const toolSkills = dirNames.map(
|
|
8666
8666
|
(dirName) => factory.class.forDeletion({
|
|
8667
8667
|
baseDir: this.baseDir,
|
|
@@ -10069,10 +10069,10 @@ var SubagentsProcessor = class extends FeatureProcessor {
|
|
|
10069
10069
|
);
|
|
10070
10070
|
if (forDeletion) {
|
|
10071
10071
|
const toolSubagents2 = subagentFilePaths.map(
|
|
10072
|
-
(
|
|
10072
|
+
(path4) => factory.class.forDeletion({
|
|
10073
10073
|
baseDir: this.baseDir,
|
|
10074
10074
|
relativeDirPath: paths.relativeDirPath,
|
|
10075
|
-
relativeFilePath: basename22(
|
|
10075
|
+
relativeFilePath: basename22(path4),
|
|
10076
10076
|
global: this.global
|
|
10077
10077
|
})
|
|
10078
10078
|
).filter((subagent) => subagent.isDeletable());
|
|
@@ -10081,9 +10081,9 @@ var SubagentsProcessor = class extends FeatureProcessor {
|
|
|
10081
10081
|
}
|
|
10082
10082
|
const toolSubagents = await Promise.all(
|
|
10083
10083
|
subagentFilePaths.map(
|
|
10084
|
-
(
|
|
10084
|
+
(path4) => factory.class.fromFile({
|
|
10085
10085
|
baseDir: this.baseDir,
|
|
10086
|
-
relativeFilePath: basename22(
|
|
10086
|
+
relativeFilePath: basename22(path4),
|
|
10087
10087
|
global: this.global
|
|
10088
10088
|
})
|
|
10089
10089
|
)
|
|
@@ -13498,6 +13498,18 @@ var GitHubRepoInfoSchema = z51.looseObject({
|
|
|
13498
13498
|
default_branch: z51.string(),
|
|
13499
13499
|
private: z51.boolean()
|
|
13500
13500
|
});
|
|
13501
|
+
var GitHubReleaseAssetSchema = z51.looseObject({
|
|
13502
|
+
name: z51.string(),
|
|
13503
|
+
browser_download_url: z51.string(),
|
|
13504
|
+
size: z51.number()
|
|
13505
|
+
});
|
|
13506
|
+
var GitHubReleaseSchema = z51.looseObject({
|
|
13507
|
+
tag_name: z51.string(),
|
|
13508
|
+
name: z51.nullable(z51.string()),
|
|
13509
|
+
prerelease: z51.boolean(),
|
|
13510
|
+
draft: z51.boolean(),
|
|
13511
|
+
assets: z51.array(GitHubReleaseAssetSchema)
|
|
13512
|
+
});
|
|
13501
13513
|
|
|
13502
13514
|
// src/lib/github-client.ts
|
|
13503
13515
|
var GitHubClientError = class extends Error {
|
|
@@ -13557,16 +13569,16 @@ var GitHubClient = class {
|
|
|
13557
13569
|
/**
|
|
13558
13570
|
* List contents of a directory in a repository
|
|
13559
13571
|
*/
|
|
13560
|
-
async listDirectory(owner, repo,
|
|
13572
|
+
async listDirectory(owner, repo, path4, ref) {
|
|
13561
13573
|
try {
|
|
13562
13574
|
const { data } = await this.octokit.repos.getContent({
|
|
13563
13575
|
owner,
|
|
13564
13576
|
repo,
|
|
13565
|
-
path:
|
|
13577
|
+
path: path4,
|
|
13566
13578
|
ref
|
|
13567
13579
|
});
|
|
13568
13580
|
if (!Array.isArray(data)) {
|
|
13569
|
-
throw new GitHubClientError(`Path "${
|
|
13581
|
+
throw new GitHubClientError(`Path "${path4}" is not a directory`);
|
|
13570
13582
|
}
|
|
13571
13583
|
const entries = [];
|
|
13572
13584
|
for (const item of data) {
|
|
@@ -13583,12 +13595,12 @@ var GitHubClient = class {
|
|
|
13583
13595
|
/**
|
|
13584
13596
|
* Get raw file content from a repository
|
|
13585
13597
|
*/
|
|
13586
|
-
async getFileContent(owner, repo,
|
|
13598
|
+
async getFileContent(owner, repo, path4, ref) {
|
|
13587
13599
|
try {
|
|
13588
13600
|
const { data } = await this.octokit.repos.getContent({
|
|
13589
13601
|
owner,
|
|
13590
13602
|
repo,
|
|
13591
|
-
path:
|
|
13603
|
+
path: path4,
|
|
13592
13604
|
ref,
|
|
13593
13605
|
mediaType: {
|
|
13594
13606
|
format: "raw"
|
|
@@ -13608,12 +13620,12 @@ var GitHubClient = class {
|
|
|
13608
13620
|
/**
|
|
13609
13621
|
* Check if a file exists and is within size limits
|
|
13610
13622
|
*/
|
|
13611
|
-
async getFileInfo(owner, repo,
|
|
13623
|
+
async getFileInfo(owner, repo, path4, ref) {
|
|
13612
13624
|
try {
|
|
13613
13625
|
const { data } = await this.octokit.repos.getContent({
|
|
13614
13626
|
owner,
|
|
13615
13627
|
repo,
|
|
13616
|
-
path:
|
|
13628
|
+
path: path4,
|
|
13617
13629
|
ref
|
|
13618
13630
|
});
|
|
13619
13631
|
if (Array.isArray(data)) {
|
|
@@ -13625,7 +13637,7 @@ var GitHubClient = class {
|
|
|
13625
13637
|
}
|
|
13626
13638
|
if (parsed.data.size > MAX_FILE_SIZE) {
|
|
13627
13639
|
throw new GitHubClientError(
|
|
13628
|
-
`File "${
|
|
13640
|
+
`File "${path4}" exceeds maximum size limit of ${MAX_FILE_SIZE / 1024 / 1024}MB`
|
|
13629
13641
|
);
|
|
13630
13642
|
}
|
|
13631
13643
|
return parsed.data;
|
|
@@ -13653,6 +13665,21 @@ var GitHubClient = class {
|
|
|
13653
13665
|
throw error;
|
|
13654
13666
|
}
|
|
13655
13667
|
}
|
|
13668
|
+
/**
|
|
13669
|
+
* Get the latest release from a repository
|
|
13670
|
+
*/
|
|
13671
|
+
async getLatestRelease(owner, repo) {
|
|
13672
|
+
try {
|
|
13673
|
+
const { data } = await this.octokit.repos.getLatestRelease({ owner, repo });
|
|
13674
|
+
const parsed = GitHubReleaseSchema.safeParse(data);
|
|
13675
|
+
if (!parsed.success) {
|
|
13676
|
+
throw new GitHubClientError(`Invalid release info response: ${formatError(parsed.error)}`);
|
|
13677
|
+
}
|
|
13678
|
+
return parsed.data;
|
|
13679
|
+
} catch (error) {
|
|
13680
|
+
throw this.handleError(error);
|
|
13681
|
+
}
|
|
13682
|
+
}
|
|
13656
13683
|
/**
|
|
13657
13684
|
* Handle errors from Octokit and convert to GitHubClientError
|
|
13658
13685
|
*/
|
|
@@ -13828,13 +13855,13 @@ function parseUrl(url) {
|
|
|
13828
13855
|
const repo = segments[1]?.replace(/\.git$/, "");
|
|
13829
13856
|
if (segments.length > 2 && (segments[2] === "tree" || segments[2] === "blob")) {
|
|
13830
13857
|
const ref = segments[3];
|
|
13831
|
-
const
|
|
13858
|
+
const path4 = segments.length > 4 ? segments.slice(4).join("/") : void 0;
|
|
13832
13859
|
return {
|
|
13833
13860
|
provider,
|
|
13834
13861
|
owner: owner ?? "",
|
|
13835
13862
|
repo: repo ?? "",
|
|
13836
13863
|
ref,
|
|
13837
|
-
path:
|
|
13864
|
+
path: path4
|
|
13838
13865
|
};
|
|
13839
13866
|
}
|
|
13840
13867
|
return {
|
|
@@ -13845,12 +13872,12 @@ function parseUrl(url) {
|
|
|
13845
13872
|
}
|
|
13846
13873
|
function parseShorthand(source) {
|
|
13847
13874
|
let remaining = source;
|
|
13848
|
-
let
|
|
13875
|
+
let path4;
|
|
13849
13876
|
let ref;
|
|
13850
13877
|
const colonIndex = remaining.indexOf(":");
|
|
13851
13878
|
if (colonIndex !== -1) {
|
|
13852
|
-
|
|
13853
|
-
if (!
|
|
13879
|
+
path4 = remaining.substring(colonIndex + 1);
|
|
13880
|
+
if (!path4) {
|
|
13854
13881
|
throw new Error(`Invalid source: ${source}. Path cannot be empty after ":".`);
|
|
13855
13882
|
}
|
|
13856
13883
|
remaining = remaining.substring(0, colonIndex);
|
|
@@ -13878,7 +13905,7 @@ function parseShorthand(source) {
|
|
|
13878
13905
|
owner,
|
|
13879
13906
|
repo,
|
|
13880
13907
|
ref,
|
|
13881
|
-
path:
|
|
13908
|
+
path: path4
|
|
13882
13909
|
};
|
|
13883
13910
|
}
|
|
13884
13911
|
function resolveFeatures(features) {
|
|
@@ -14054,8 +14081,8 @@ async function collectFeatureFiles(params) {
|
|
|
14054
14081
|
}
|
|
14055
14082
|
return filesToFetch;
|
|
14056
14083
|
}
|
|
14057
|
-
async function listDirectoryRecursive(client, owner, repo,
|
|
14058
|
-
const entries = await client.listDirectory(owner, repo,
|
|
14084
|
+
async function listDirectoryRecursive(client, owner, repo, path4, ref) {
|
|
14085
|
+
const entries = await client.listDirectory(owner, repo, path4, ref);
|
|
14059
14086
|
const files = [];
|
|
14060
14087
|
for (const entry of entries) {
|
|
14061
14088
|
if (entry.type === "file") {
|
|
@@ -15487,12 +15514,12 @@ async function init() {
|
|
|
15487
15514
|
};
|
|
15488
15515
|
}
|
|
15489
15516
|
async function createConfigFile() {
|
|
15490
|
-
const
|
|
15491
|
-
if (await fileExists(
|
|
15492
|
-
return { created: false, path:
|
|
15517
|
+
const path4 = RULESYNC_CONFIG_RELATIVE_FILE_PATH;
|
|
15518
|
+
if (await fileExists(path4)) {
|
|
15519
|
+
return { created: false, path: path4 };
|
|
15493
15520
|
}
|
|
15494
15521
|
await writeFileContent(
|
|
15495
|
-
|
|
15522
|
+
path4,
|
|
15496
15523
|
JSON.stringify(
|
|
15497
15524
|
{
|
|
15498
15525
|
targets: ["copilot", "cursor", "claudecode", "codexcli"],
|
|
@@ -15511,7 +15538,7 @@ async function createConfigFile() {
|
|
|
15511
15538
|
2
|
|
15512
15539
|
)
|
|
15513
15540
|
);
|
|
15514
|
-
return { created: true, path:
|
|
15541
|
+
return { created: true, path: path4 };
|
|
15515
15542
|
}
|
|
15516
15543
|
async function createSampleFiles() {
|
|
15517
15544
|
const results = [];
|
|
@@ -15692,12 +15719,12 @@ Keep the summary concise and ready to reuse in future tasks.`
|
|
|
15692
15719
|
results.push(await writeIfNotExists(hooksFilepath, sampleHooksFile.content));
|
|
15693
15720
|
return results;
|
|
15694
15721
|
}
|
|
15695
|
-
async function writeIfNotExists(
|
|
15696
|
-
if (await fileExists(
|
|
15697
|
-
return { created: false, path:
|
|
15722
|
+
async function writeIfNotExists(path4, content) {
|
|
15723
|
+
if (await fileExists(path4)) {
|
|
15724
|
+
return { created: false, path: path4 };
|
|
15698
15725
|
}
|
|
15699
|
-
await writeFileContent(
|
|
15700
|
-
return { created: true, path:
|
|
15726
|
+
await writeFileContent(path4, content);
|
|
15727
|
+
return { created: true, path: path4 };
|
|
15701
15728
|
}
|
|
15702
15729
|
|
|
15703
15730
|
// src/cli/commands/init.ts
|
|
@@ -17119,8 +17146,364 @@ async function mcpCommand({ version }) {
|
|
|
17119
17146
|
});
|
|
17120
17147
|
}
|
|
17121
17148
|
|
|
17149
|
+
// src/lib/update.ts
|
|
17150
|
+
import * as crypto from "crypto";
|
|
17151
|
+
import * as fs from "fs";
|
|
17152
|
+
import * as os2 from "os";
|
|
17153
|
+
import * as path3 from "path";
|
|
17154
|
+
import { Readable, Transform } from "stream";
|
|
17155
|
+
import { pipeline } from "stream/promises";
|
|
17156
|
+
var RULESYNC_REPO_OWNER = "dyoshikawa";
|
|
17157
|
+
var RULESYNC_REPO_NAME = "rulesync";
|
|
17158
|
+
var RELEASES_URL = `https://github.com/${RULESYNC_REPO_OWNER}/${RULESYNC_REPO_NAME}/releases`;
|
|
17159
|
+
var MAX_DOWNLOAD_SIZE = 500 * 1024 * 1024;
|
|
17160
|
+
var ALLOWED_DOWNLOAD_DOMAINS = [
|
|
17161
|
+
"github.com",
|
|
17162
|
+
"objects.githubusercontent.com",
|
|
17163
|
+
"github-releases.githubusercontent.com"
|
|
17164
|
+
];
|
|
17165
|
+
var UpdatePermissionError = class extends Error {
|
|
17166
|
+
constructor(message) {
|
|
17167
|
+
super(message);
|
|
17168
|
+
this.name = "UpdatePermissionError";
|
|
17169
|
+
}
|
|
17170
|
+
};
|
|
17171
|
+
function detectExecutionEnvironment() {
|
|
17172
|
+
const execPath = process.execPath;
|
|
17173
|
+
const scriptPath = process.argv[1] ?? "";
|
|
17174
|
+
const isRulesyncBinary = /rulesync(-[a-z0-9]+(-[a-z0-9]+)?)?(\.exe)?$/i.test(execPath);
|
|
17175
|
+
if (isRulesyncBinary) {
|
|
17176
|
+
if (execPath.includes("/homebrew/") || execPath.includes("/Cellar/")) {
|
|
17177
|
+
return "homebrew";
|
|
17178
|
+
}
|
|
17179
|
+
return "single-binary";
|
|
17180
|
+
}
|
|
17181
|
+
if ((scriptPath.includes("/homebrew/") || scriptPath.includes("/Cellar/")) && scriptPath.includes("rulesync")) {
|
|
17182
|
+
return "homebrew";
|
|
17183
|
+
}
|
|
17184
|
+
return "npm";
|
|
17185
|
+
}
|
|
17186
|
+
function getPlatformAssetName() {
|
|
17187
|
+
const platform2 = os2.platform();
|
|
17188
|
+
const arch2 = os2.arch();
|
|
17189
|
+
const platformMap = {
|
|
17190
|
+
darwin: "darwin",
|
|
17191
|
+
linux: "linux",
|
|
17192
|
+
win32: "windows"
|
|
17193
|
+
};
|
|
17194
|
+
const archMap = {
|
|
17195
|
+
x64: "x64",
|
|
17196
|
+
arm64: "arm64"
|
|
17197
|
+
};
|
|
17198
|
+
const platformName = platformMap[platform2];
|
|
17199
|
+
const archName = archMap[arch2];
|
|
17200
|
+
if (!platformName || !archName) {
|
|
17201
|
+
return null;
|
|
17202
|
+
}
|
|
17203
|
+
const extension = platform2 === "win32" ? ".exe" : "";
|
|
17204
|
+
return `rulesync-${platformName}-${archName}${extension}`;
|
|
17205
|
+
}
|
|
17206
|
+
function normalizeVersion(v) {
|
|
17207
|
+
return v.replace(/^v/, "").replace(/-.*$/, "");
|
|
17208
|
+
}
|
|
17209
|
+
function compareVersions(a, b) {
|
|
17210
|
+
const aParts = normalizeVersion(a).split(".").map(Number);
|
|
17211
|
+
const bParts = normalizeVersion(b).split(".").map(Number);
|
|
17212
|
+
for (let i = 0; i < Math.max(aParts.length, bParts.length); i++) {
|
|
17213
|
+
const aNum = aParts[i] ?? 0;
|
|
17214
|
+
const bNum = bParts[i] ?? 0;
|
|
17215
|
+
if (!Number.isFinite(aNum) || !Number.isFinite(bNum)) {
|
|
17216
|
+
throw new Error(`Invalid version format: cannot compare "${a}" and "${b}"`);
|
|
17217
|
+
}
|
|
17218
|
+
if (aNum > bNum) return 1;
|
|
17219
|
+
if (aNum < bNum) return -1;
|
|
17220
|
+
}
|
|
17221
|
+
return 0;
|
|
17222
|
+
}
|
|
17223
|
+
function validateDownloadUrl(url) {
|
|
17224
|
+
let parsed;
|
|
17225
|
+
try {
|
|
17226
|
+
parsed = new URL(url);
|
|
17227
|
+
} catch {
|
|
17228
|
+
throw new Error(`Invalid download URL: ${url}`);
|
|
17229
|
+
}
|
|
17230
|
+
if (parsed.protocol !== "https:") {
|
|
17231
|
+
throw new Error(`Download URL must use HTTPS: ${url}`);
|
|
17232
|
+
}
|
|
17233
|
+
const isAllowed = ALLOWED_DOWNLOAD_DOMAINS.some((domain) => parsed.hostname === domain);
|
|
17234
|
+
if (!isAllowed) {
|
|
17235
|
+
throw new Error(
|
|
17236
|
+
`Download URL domain "${parsed.hostname}" is not in the allowed list: ${ALLOWED_DOWNLOAD_DOMAINS.join(", ")}`
|
|
17237
|
+
);
|
|
17238
|
+
}
|
|
17239
|
+
if (parsed.hostname === "github.com") {
|
|
17240
|
+
const expectedPrefix = `/${RULESYNC_REPO_OWNER}/${RULESYNC_REPO_NAME}/`;
|
|
17241
|
+
if (!parsed.pathname.startsWith(expectedPrefix)) {
|
|
17242
|
+
throw new Error(
|
|
17243
|
+
`Download URL path must belong to ${RULESYNC_REPO_OWNER}/${RULESYNC_REPO_NAME}: ${url}`
|
|
17244
|
+
);
|
|
17245
|
+
}
|
|
17246
|
+
}
|
|
17247
|
+
}
|
|
17248
|
+
async function checkForUpdate(currentVersion, token) {
|
|
17249
|
+
const client = new GitHubClient({
|
|
17250
|
+
token: GitHubClient.resolveToken(token)
|
|
17251
|
+
});
|
|
17252
|
+
const release = await client.getLatestRelease(RULESYNC_REPO_OWNER, RULESYNC_REPO_NAME);
|
|
17253
|
+
const latestVersion = normalizeVersion(release.tag_name);
|
|
17254
|
+
const normalizedCurrentVersion = normalizeVersion(currentVersion);
|
|
17255
|
+
return {
|
|
17256
|
+
currentVersion: normalizedCurrentVersion,
|
|
17257
|
+
latestVersion,
|
|
17258
|
+
hasUpdate: compareVersions(latestVersion, normalizedCurrentVersion) > 0,
|
|
17259
|
+
release
|
|
17260
|
+
};
|
|
17261
|
+
}
|
|
17262
|
+
function findAsset(release, assetName) {
|
|
17263
|
+
return release.assets.find((asset) => asset.name === assetName) ?? null;
|
|
17264
|
+
}
|
|
17265
|
+
async function downloadFile(url, destPath) {
|
|
17266
|
+
validateDownloadUrl(url);
|
|
17267
|
+
const response = await fetch(url, {
|
|
17268
|
+
redirect: "follow"
|
|
17269
|
+
});
|
|
17270
|
+
if (!response.ok) {
|
|
17271
|
+
throw new Error(`Failed to download: HTTP ${response.status}`);
|
|
17272
|
+
}
|
|
17273
|
+
if (response.url) {
|
|
17274
|
+
validateDownloadUrl(response.url);
|
|
17275
|
+
}
|
|
17276
|
+
const contentLength = response.headers.get("content-length");
|
|
17277
|
+
if (contentLength && Number(contentLength) > MAX_DOWNLOAD_SIZE) {
|
|
17278
|
+
throw new Error(
|
|
17279
|
+
`Download too large: ${contentLength} bytes exceeds limit of ${MAX_DOWNLOAD_SIZE} bytes`
|
|
17280
|
+
);
|
|
17281
|
+
}
|
|
17282
|
+
if (!response.body) {
|
|
17283
|
+
throw new Error("Response body is empty");
|
|
17284
|
+
}
|
|
17285
|
+
const fileStream = fs.createWriteStream(destPath);
|
|
17286
|
+
let downloadedBytes = 0;
|
|
17287
|
+
const bodyReader = Readable.fromWeb(
|
|
17288
|
+
// eslint-disable-next-line no-type-assertion/no-type-assertion
|
|
17289
|
+
response.body
|
|
17290
|
+
);
|
|
17291
|
+
const sizeChecker = new Transform({
|
|
17292
|
+
transform(chunk, _encoding, callback) {
|
|
17293
|
+
downloadedBytes += chunk.length;
|
|
17294
|
+
if (downloadedBytes > MAX_DOWNLOAD_SIZE) {
|
|
17295
|
+
callback(
|
|
17296
|
+
new Error(
|
|
17297
|
+
`Download too large: exceeded limit of ${MAX_DOWNLOAD_SIZE} bytes during streaming`
|
|
17298
|
+
)
|
|
17299
|
+
);
|
|
17300
|
+
return;
|
|
17301
|
+
}
|
|
17302
|
+
callback(null, chunk);
|
|
17303
|
+
}
|
|
17304
|
+
});
|
|
17305
|
+
await pipeline(bodyReader, sizeChecker, fileStream);
|
|
17306
|
+
}
|
|
17307
|
+
async function calculateSha256(filePath) {
|
|
17308
|
+
const content = await fs.promises.readFile(filePath);
|
|
17309
|
+
return crypto.createHash("sha256").update(content).digest("hex");
|
|
17310
|
+
}
|
|
17311
|
+
function parseSha256Sums(content) {
|
|
17312
|
+
const result = /* @__PURE__ */ new Map();
|
|
17313
|
+
for (const line of content.split("\n")) {
|
|
17314
|
+
const trimmed = line.trim();
|
|
17315
|
+
if (!trimmed) continue;
|
|
17316
|
+
const match = /^([a-f0-9]{64})\s+(.+)$/.exec(trimmed);
|
|
17317
|
+
if (match && match[1] && match[2]) {
|
|
17318
|
+
result.set(match[2].trim(), match[1]);
|
|
17319
|
+
}
|
|
17320
|
+
}
|
|
17321
|
+
return result;
|
|
17322
|
+
}
|
|
17323
|
+
async function performBinaryUpdate(currentVersion, options = {}) {
|
|
17324
|
+
const { force = false, token } = options;
|
|
17325
|
+
const updateCheck = await checkForUpdate(currentVersion, token);
|
|
17326
|
+
if (!updateCheck.hasUpdate && !force) {
|
|
17327
|
+
return `Already at the latest version (${currentVersion})`;
|
|
17328
|
+
}
|
|
17329
|
+
const assetName = getPlatformAssetName();
|
|
17330
|
+
if (!assetName) {
|
|
17331
|
+
throw new Error(
|
|
17332
|
+
`Unsupported platform: ${os2.platform()} ${os2.arch()}. Please download manually from ${RELEASES_URL}`
|
|
17333
|
+
);
|
|
17334
|
+
}
|
|
17335
|
+
const binaryAsset = findAsset(updateCheck.release, assetName);
|
|
17336
|
+
if (!binaryAsset) {
|
|
17337
|
+
throw new Error(
|
|
17338
|
+
`Binary for ${assetName} not found in release. Please download manually from ${RELEASES_URL}`
|
|
17339
|
+
);
|
|
17340
|
+
}
|
|
17341
|
+
const checksumAsset = findAsset(updateCheck.release, "SHA256SUMS");
|
|
17342
|
+
if (!checksumAsset) {
|
|
17343
|
+
throw new Error(
|
|
17344
|
+
`SHA256SUMS not found in release. Cannot verify download integrity. Please download manually from ${RELEASES_URL}`
|
|
17345
|
+
);
|
|
17346
|
+
}
|
|
17347
|
+
const tempDir = await fs.promises.mkdtemp(path3.join(os2.tmpdir(), "rulesync-update-"));
|
|
17348
|
+
let restoreFailed = false;
|
|
17349
|
+
try {
|
|
17350
|
+
if (os2.platform() !== "win32") {
|
|
17351
|
+
await fs.promises.chmod(tempDir, 448);
|
|
17352
|
+
}
|
|
17353
|
+
const tempBinaryPath = path3.join(tempDir, assetName);
|
|
17354
|
+
await downloadFile(binaryAsset.browser_download_url, tempBinaryPath);
|
|
17355
|
+
const checksumsPath = path3.join(tempDir, "SHA256SUMS");
|
|
17356
|
+
await downloadFile(checksumAsset.browser_download_url, checksumsPath);
|
|
17357
|
+
const checksumsContent = await fs.promises.readFile(checksumsPath, "utf-8");
|
|
17358
|
+
const checksums = parseSha256Sums(checksumsContent);
|
|
17359
|
+
const expectedChecksum = checksums.get(assetName);
|
|
17360
|
+
if (!expectedChecksum) {
|
|
17361
|
+
throw new Error(
|
|
17362
|
+
`Checksum entry for "${assetName}" not found in SHA256SUMS. Cannot verify download integrity.`
|
|
17363
|
+
);
|
|
17364
|
+
}
|
|
17365
|
+
const actualChecksum = await calculateSha256(tempBinaryPath);
|
|
17366
|
+
if (actualChecksum !== expectedChecksum) {
|
|
17367
|
+
throw new Error(
|
|
17368
|
+
`Checksum verification failed. Expected: ${expectedChecksum}, Got: ${actualChecksum}. The download may be corrupted.`
|
|
17369
|
+
);
|
|
17370
|
+
}
|
|
17371
|
+
const currentExePath = await fs.promises.realpath(process.execPath);
|
|
17372
|
+
const currentDir = path3.dirname(currentExePath);
|
|
17373
|
+
const backupPath = path3.join(tempDir, "rulesync.backup");
|
|
17374
|
+
try {
|
|
17375
|
+
await fs.promises.copyFile(currentExePath, backupPath);
|
|
17376
|
+
} catch (error) {
|
|
17377
|
+
if (isPermissionError(error)) {
|
|
17378
|
+
throw new UpdatePermissionError(
|
|
17379
|
+
`Permission denied: Cannot read ${currentExePath}. Try running with sudo.`
|
|
17380
|
+
);
|
|
17381
|
+
}
|
|
17382
|
+
throw error;
|
|
17383
|
+
}
|
|
17384
|
+
try {
|
|
17385
|
+
const tempInPlace = path3.join(currentDir, `.rulesync-update-${crypto.randomUUID()}`);
|
|
17386
|
+
try {
|
|
17387
|
+
await fs.promises.copyFile(tempBinaryPath, tempInPlace);
|
|
17388
|
+
if (os2.platform() !== "win32") {
|
|
17389
|
+
await fs.promises.chmod(tempInPlace, 493);
|
|
17390
|
+
}
|
|
17391
|
+
await fs.promises.rename(tempInPlace, currentExePath);
|
|
17392
|
+
} catch {
|
|
17393
|
+
try {
|
|
17394
|
+
await fs.promises.unlink(tempInPlace);
|
|
17395
|
+
} catch {
|
|
17396
|
+
}
|
|
17397
|
+
await fs.promises.copyFile(tempBinaryPath, currentExePath);
|
|
17398
|
+
if (os2.platform() !== "win32") {
|
|
17399
|
+
await fs.promises.chmod(currentExePath, 493);
|
|
17400
|
+
}
|
|
17401
|
+
}
|
|
17402
|
+
return `Successfully updated from ${currentVersion} to ${updateCheck.latestVersion}`;
|
|
17403
|
+
} catch (error) {
|
|
17404
|
+
try {
|
|
17405
|
+
await fs.promises.copyFile(backupPath, currentExePath);
|
|
17406
|
+
} catch {
|
|
17407
|
+
restoreFailed = true;
|
|
17408
|
+
throw new Error(
|
|
17409
|
+
`Failed to replace binary and restore failed. Backup is preserved at: ${backupPath} (in ${tempDir}). Please manually copy it to ${currentExePath}. Original error: ${error instanceof Error ? error.message : String(error)}`,
|
|
17410
|
+
{ cause: error }
|
|
17411
|
+
);
|
|
17412
|
+
}
|
|
17413
|
+
if (isPermissionError(error)) {
|
|
17414
|
+
throw new UpdatePermissionError(
|
|
17415
|
+
`Permission denied: Cannot write to ${path3.dirname(currentExePath)}. Try running with sudo.`
|
|
17416
|
+
);
|
|
17417
|
+
}
|
|
17418
|
+
throw error;
|
|
17419
|
+
}
|
|
17420
|
+
} finally {
|
|
17421
|
+
if (!restoreFailed) {
|
|
17422
|
+
try {
|
|
17423
|
+
await fs.promises.rm(tempDir, { recursive: true, force: true });
|
|
17424
|
+
} catch {
|
|
17425
|
+
}
|
|
17426
|
+
}
|
|
17427
|
+
}
|
|
17428
|
+
}
|
|
17429
|
+
function isPermissionError(error) {
|
|
17430
|
+
if (typeof error === "object" && error !== null && "code" in error) {
|
|
17431
|
+
const record = error;
|
|
17432
|
+
return record["code"] === "EACCES" || record["code"] === "EPERM";
|
|
17433
|
+
}
|
|
17434
|
+
return false;
|
|
17435
|
+
}
|
|
17436
|
+
function getNpmUpgradeInstructions() {
|
|
17437
|
+
return `This rulesync installation was installed via npm/npx.
|
|
17438
|
+
|
|
17439
|
+
To upgrade, run one of the following commands:
|
|
17440
|
+
|
|
17441
|
+
Global installation:
|
|
17442
|
+
npm install -g rulesync@latest
|
|
17443
|
+
|
|
17444
|
+
Project dependency:
|
|
17445
|
+
npm install rulesync@latest
|
|
17446
|
+
|
|
17447
|
+
Or use npx to always run the latest version:
|
|
17448
|
+
npx rulesync@latest --version`;
|
|
17449
|
+
}
|
|
17450
|
+
function getHomebrewUpgradeInstructions() {
|
|
17451
|
+
return `This rulesync installation was installed via Homebrew.
|
|
17452
|
+
|
|
17453
|
+
To upgrade, run:
|
|
17454
|
+
brew upgrade rulesync`;
|
|
17455
|
+
}
|
|
17456
|
+
|
|
17457
|
+
// src/cli/commands/update.ts
|
|
17458
|
+
async function updateCommand(currentVersion, options) {
|
|
17459
|
+
const { check = false, force = false, verbose = false, silent = false, token } = options;
|
|
17460
|
+
logger.configure({ verbose, silent });
|
|
17461
|
+
try {
|
|
17462
|
+
const environment = detectExecutionEnvironment();
|
|
17463
|
+
logger.debug(`Detected environment: ${environment}`);
|
|
17464
|
+
if (environment === "npm") {
|
|
17465
|
+
logger.info(getNpmUpgradeInstructions());
|
|
17466
|
+
return;
|
|
17467
|
+
}
|
|
17468
|
+
if (environment === "homebrew") {
|
|
17469
|
+
logger.info(getHomebrewUpgradeInstructions());
|
|
17470
|
+
return;
|
|
17471
|
+
}
|
|
17472
|
+
if (check) {
|
|
17473
|
+
logger.info("Checking for updates...");
|
|
17474
|
+
const updateCheck = await checkForUpdate(currentVersion, token);
|
|
17475
|
+
if (updateCheck.hasUpdate) {
|
|
17476
|
+
logger.success(
|
|
17477
|
+
`Update available: ${updateCheck.currentVersion} -> ${updateCheck.latestVersion}`
|
|
17478
|
+
);
|
|
17479
|
+
} else {
|
|
17480
|
+
logger.info(`Already at the latest version (${updateCheck.currentVersion})`);
|
|
17481
|
+
}
|
|
17482
|
+
return;
|
|
17483
|
+
}
|
|
17484
|
+
logger.info("Checking for updates...");
|
|
17485
|
+
const message = await performBinaryUpdate(currentVersion, { force, token });
|
|
17486
|
+
logger.success(message);
|
|
17487
|
+
} catch (error) {
|
|
17488
|
+
if (error instanceof GitHubClientError) {
|
|
17489
|
+
logger.error(`GitHub API Error: ${error.message}`);
|
|
17490
|
+
if (error.statusCode === 401 || error.statusCode === 403) {
|
|
17491
|
+
logger.info(
|
|
17492
|
+
"Tip: Set GITHUB_TOKEN or GH_TOKEN environment variable for better rate limits."
|
|
17493
|
+
);
|
|
17494
|
+
}
|
|
17495
|
+
} else if (error instanceof UpdatePermissionError) {
|
|
17496
|
+
logger.error(error.message);
|
|
17497
|
+
logger.info("Tip: Run with elevated privileges (e.g., sudo rulesync update)");
|
|
17498
|
+
} else {
|
|
17499
|
+
logger.error(formatError(error));
|
|
17500
|
+
}
|
|
17501
|
+
process.exit(1);
|
|
17502
|
+
}
|
|
17503
|
+
}
|
|
17504
|
+
|
|
17122
17505
|
// src/cli/index.ts
|
|
17123
|
-
var getVersion = () => "6.
|
|
17506
|
+
var getVersion = () => "6.6.0";
|
|
17124
17507
|
var main = async () => {
|
|
17125
17508
|
const program = new Command();
|
|
17126
17509
|
const version = getVersion();
|
|
@@ -17243,6 +17626,15 @@ var main = async () => {
|
|
|
17243
17626
|
process.exit(1);
|
|
17244
17627
|
}
|
|
17245
17628
|
});
|
|
17629
|
+
program.command("update").description("Update rulesync to the latest version").option("--check", "Check for updates without installing").option("--force", "Force update even if already at latest version").option("--token <token>", "GitHub token for API access").option("-V, --verbose", "Verbose output").option("-s, --silent", "Suppress all output").action(async (options) => {
|
|
17630
|
+
await updateCommand(version, {
|
|
17631
|
+
check: options.check,
|
|
17632
|
+
force: options.force,
|
|
17633
|
+
token: options.token,
|
|
17634
|
+
verbose: options.verbose,
|
|
17635
|
+
silent: options.silent
|
|
17636
|
+
});
|
|
17637
|
+
});
|
|
17246
17638
|
program.parse();
|
|
17247
17639
|
};
|
|
17248
17640
|
main().catch((error) => {
|