rulesync 6.7.0 → 6.7.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +51 -48
- package/dist/index.cjs +17 -10
- package/dist/index.js +17 -10
- package/package.json +1 -1
package/README.md
CHANGED
|
@@ -118,26 +118,29 @@ Get-FileHash rulesync.exe -Algorithm SHA256 | ForEach-Object {
|
|
|
118
118
|
## Getting Started
|
|
119
119
|
|
|
120
120
|
```bash
|
|
121
|
+
# Install rulesync globally
|
|
122
|
+
npm install -g rulesync
|
|
123
|
+
|
|
121
124
|
# Create necessary directories, sample rule files, and configuration file
|
|
122
|
-
|
|
125
|
+
rulesync init
|
|
123
126
|
|
|
124
127
|
# Install official skills (recommended)
|
|
125
|
-
|
|
128
|
+
rulesync fetch dyoshikawa/rulesync --features skills
|
|
126
129
|
```
|
|
127
130
|
|
|
128
131
|
On the other hand, if you already have AI tool configurations:
|
|
129
132
|
|
|
130
133
|
```bash
|
|
131
134
|
# Import existing files (to .rulesync/**/*)
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
135
|
+
rulesync import --targets claudecode # From CLAUDE.md
|
|
136
|
+
rulesync import --targets cursor # From .cursorrules
|
|
137
|
+
rulesync import --targets copilot # From .github/copilot-instructions.md
|
|
138
|
+
rulesync import --targets claudecode --features rules,mcp,commands,subagents
|
|
136
139
|
|
|
137
140
|
# And more tool supports
|
|
138
141
|
|
|
139
142
|
# Generate unified configurations with all features
|
|
140
|
-
|
|
143
|
+
rulesync generate --targets "*" --features "*"
|
|
141
144
|
```
|
|
142
145
|
|
|
143
146
|
## Supported Tools and Features
|
|
@@ -211,46 +214,46 @@ Rulesync is trusted by leading companies and recognized by the industry:
|
|
|
211
214
|
|
|
212
215
|
```bash
|
|
213
216
|
# Initialize new project (recommended: organized rules structure)
|
|
214
|
-
|
|
217
|
+
rulesync init
|
|
215
218
|
|
|
216
219
|
# Import existing configurations (to .rulesync/rules/ by default)
|
|
217
|
-
|
|
220
|
+
rulesync import --targets claudecode --features rules,ignore,mcp,commands,subagents,skills
|
|
218
221
|
|
|
219
222
|
# Fetch configurations from a Git repository
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
+
rulesync fetch owner/repo
|
|
224
|
+
rulesync fetch owner/repo@v1.0.0 --features rules,commands
|
|
225
|
+
rulesync fetch https://github.com/owner/repo --conflict skip
|
|
223
226
|
|
|
224
227
|
# Generate all features for all tools (new preferred syntax)
|
|
225
|
-
|
|
228
|
+
rulesync generate --targets "*" --features "*"
|
|
226
229
|
|
|
227
230
|
# Generate specific features for specific tools
|
|
228
|
-
|
|
229
|
-
|
|
231
|
+
rulesync generate --targets copilot,cursor,cline --features rules,mcp
|
|
232
|
+
rulesync generate --targets claudecode --features rules,subagents
|
|
230
233
|
|
|
231
234
|
# Generate only rules (no MCP, ignore files, commands, or subagents)
|
|
232
|
-
|
|
235
|
+
rulesync generate --targets "*" --features rules
|
|
233
236
|
|
|
234
237
|
# Generate simulated commands and subagents
|
|
235
|
-
|
|
238
|
+
rulesync generate --targets copilot,cursor,codexcli --features commands,subagents --simulate-commands --simulate-subagents
|
|
236
239
|
|
|
237
240
|
# Dry run: show changes without writing files
|
|
238
|
-
|
|
241
|
+
rulesync generate --dry-run --targets claudecode --features rules
|
|
239
242
|
|
|
240
243
|
# Check if files are up to date (for CI/CD pipelines)
|
|
241
|
-
|
|
244
|
+
rulesync generate --check --targets "*" --features "*"
|
|
242
245
|
|
|
243
246
|
# Add generated files to .gitignore
|
|
244
|
-
|
|
247
|
+
rulesync gitignore
|
|
245
248
|
|
|
246
249
|
# Update rulesync to the latest version (single-binary installs)
|
|
247
|
-
|
|
250
|
+
rulesync update
|
|
248
251
|
|
|
249
252
|
# Check for updates without installing
|
|
250
|
-
|
|
253
|
+
rulesync update --check
|
|
251
254
|
|
|
252
255
|
# Force update even if already at latest version
|
|
253
|
-
|
|
256
|
+
rulesync update --force
|
|
254
257
|
```
|
|
255
258
|
|
|
256
259
|
## Dry Run
|
|
@@ -262,7 +265,7 @@ Rulesync provides two dry run options for the `generate` command that allow you
|
|
|
262
265
|
Show what would be written or deleted without actually writing any files. Changes are displayed with a `[DRY RUN]` prefix.
|
|
263
266
|
|
|
264
267
|
```bash
|
|
265
|
-
|
|
268
|
+
rulesync generate --dry-run --targets claudecode --features rules
|
|
266
269
|
```
|
|
267
270
|
|
|
268
271
|
### `--check`
|
|
@@ -271,7 +274,7 @@ Same as `--dry-run`, but exits with code 1 if files are not up to date. This is
|
|
|
271
274
|
|
|
272
275
|
```bash
|
|
273
276
|
# In your CI pipeline
|
|
274
|
-
|
|
277
|
+
rulesync generate --check --targets "*" --features "*"
|
|
275
278
|
echo $? # 0 if up to date, 1 if changes needed
|
|
276
279
|
```
|
|
277
280
|
|
|
@@ -291,20 +294,20 @@ The `fetch` command allows you to fetch configuration files directly from a Git
|
|
|
291
294
|
|
|
292
295
|
```bash
|
|
293
296
|
# Full URL format
|
|
294
|
-
|
|
295
|
-
|
|
296
|
-
|
|
297
|
-
|
|
297
|
+
rulesync fetch https://github.com/owner/repo
|
|
298
|
+
rulesync fetch https://github.com/owner/repo/tree/branch
|
|
299
|
+
rulesync fetch https://github.com/owner/repo/tree/branch/path/to/subdir
|
|
300
|
+
rulesync fetch https://gitlab.com/owner/repo # GitLab (planned)
|
|
298
301
|
|
|
299
302
|
# Prefix format
|
|
300
|
-
|
|
301
|
-
|
|
303
|
+
rulesync fetch github:owner/repo
|
|
304
|
+
rulesync fetch gitlab:owner/repo # GitLab (planned)
|
|
302
305
|
|
|
303
306
|
# Shorthand format (defaults to GitHub)
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
|
|
307
|
+
rulesync fetch owner/repo
|
|
308
|
+
rulesync fetch owner/repo@ref # Specify branch/tag/commit
|
|
309
|
+
rulesync fetch owner/repo:path # Specify subdirectory
|
|
310
|
+
rulesync fetch owner/repo@ref:path # Both ref and path
|
|
308
311
|
```
|
|
309
312
|
|
|
310
313
|
### Options
|
|
@@ -323,27 +326,27 @@ npx rulesync fetch owner/repo@ref:path # Both ref and path
|
|
|
323
326
|
|
|
324
327
|
```bash
|
|
325
328
|
# Fetch skills from external repositories
|
|
326
|
-
|
|
327
|
-
|
|
329
|
+
rulesync fetch vercel-labs/agent-skills --features skills
|
|
330
|
+
rulesync fetch anthropics/skills --features skills
|
|
328
331
|
|
|
329
332
|
# Fetch all features from a public repository
|
|
330
|
-
|
|
333
|
+
rulesync fetch dyoshikawa/rulesync --path .rulesync
|
|
331
334
|
|
|
332
335
|
# Fetch only rules and commands from a specific tag
|
|
333
|
-
|
|
336
|
+
rulesync fetch owner/repo@v1.0.0 --features rules,commands
|
|
334
337
|
|
|
335
338
|
# Fetch from a private repository (uses GITHUB_TOKEN env var)
|
|
336
339
|
export GITHUB_TOKEN=ghp_xxxx
|
|
337
|
-
|
|
340
|
+
rulesync fetch owner/private-repo
|
|
338
341
|
|
|
339
342
|
# Or use GitHub CLI to get the token
|
|
340
|
-
GITHUB_TOKEN=$(gh auth token)
|
|
343
|
+
GITHUB_TOKEN=$(gh auth token) rulesync fetch owner/private-repo
|
|
341
344
|
|
|
342
345
|
# Preserve existing files (skip conflicts)
|
|
343
|
-
|
|
346
|
+
rulesync fetch owner/repo --conflict skip
|
|
344
347
|
|
|
345
348
|
# Fetch from a monorepo subdirectory
|
|
346
|
-
|
|
349
|
+
rulesync fetch owner/repo:packages/my-package
|
|
347
350
|
```
|
|
348
351
|
|
|
349
352
|
## Configuration
|
|
@@ -695,7 +698,7 @@ Currently, supports rules and commands generation for Claude Code. Import for gl
|
|
|
695
698
|
2. Initialize files for global files in the directory.
|
|
696
699
|
```bash
|
|
697
700
|
cd ~/.aiglobal
|
|
698
|
-
|
|
701
|
+
rulesync init
|
|
699
702
|
```
|
|
700
703
|
3. Edit `~/.aiglobal/rulesync.jsonc` to enable global mode.
|
|
701
704
|
```jsonc
|
|
@@ -718,7 +721,7 @@ Currently, supports rules and commands generation for Claude Code. Import for gl
|
|
|
718
721
|
5. Generate rules for global settings.
|
|
719
722
|
```bash
|
|
720
723
|
# Run in the `~/.aiglobal` directory
|
|
721
|
-
|
|
724
|
+
rulesync generate
|
|
722
725
|
```
|
|
723
726
|
|
|
724
727
|
> [!NOTE]
|
|
@@ -735,7 +738,7 @@ Simulated commands, subagents and skills allow you to generate simulated feature
|
|
|
735
738
|
1. Prepare `.rulesync/commands/*.md`, `.rulesync/subagents/*.md` and `.rulesync/skills/*/SKILL.md` for your purposes.
|
|
736
739
|
2. Generate simulated commands, subagents and skills for specific tools that are included in cursor, codexcli and etc.
|
|
737
740
|
```bash
|
|
738
|
-
|
|
741
|
+
rulesync generate \
|
|
739
742
|
--targets copilot,cursor,codexcli \
|
|
740
743
|
--features commands,subagents,skills \
|
|
741
744
|
--simulate-commands \
|
|
@@ -762,7 +765,7 @@ Rulesync supports compressing tokens consumed by MCP servers [d-kimuson/modular-
|
|
|
762
765
|
|
|
763
766
|
```bash
|
|
764
767
|
# Enable modular-mcp via CLI
|
|
765
|
-
|
|
768
|
+
rulesync generate --targets claudecode --features mcp --modular-mcp
|
|
766
769
|
|
|
767
770
|
# Or via configuration file
|
|
768
771
|
{
|
|
@@ -906,7 +909,7 @@ So, in this case, approximately 92% reduction in MCP tools consumption!
|
|
|
906
909
|
Rulesync provides official skills that you can install using the fetch command:
|
|
907
910
|
|
|
908
911
|
```bash
|
|
909
|
-
|
|
912
|
+
rulesync fetch dyoshikawa/rulesync --features skills
|
|
910
913
|
```
|
|
911
914
|
|
|
912
915
|
This will install the Rulesync documentation skill to your project.
|
package/dist/index.cjs
CHANGED
|
@@ -5452,7 +5452,7 @@ var OpencodeMcp = class _OpencodeMcp extends ToolMcp {
|
|
|
5452
5452
|
static getSettablePaths({ global } = {}) {
|
|
5453
5453
|
if (global) {
|
|
5454
5454
|
return {
|
|
5455
|
-
relativeDirPath: ".",
|
|
5455
|
+
relativeDirPath: (0, import_node_path48.join)(".config", "opencode"),
|
|
5456
5456
|
relativeFilePath: "opencode.json"
|
|
5457
5457
|
};
|
|
5458
5458
|
}
|
|
@@ -6429,6 +6429,10 @@ var DirFeatureProcessor = class {
|
|
|
6429
6429
|
/**
|
|
6430
6430
|
* Once converted to rulesync/tool dirs, write them to the filesystem.
|
|
6431
6431
|
* Returns the number of directories written.
|
|
6432
|
+
*
|
|
6433
|
+
* Note: This method uses directory-level change detection. If any file within
|
|
6434
|
+
* a directory has changed, ALL files in that directory are rewritten. This is
|
|
6435
|
+
* an intentional design decision to ensure consistency within directory units.
|
|
6432
6436
|
*/
|
|
6433
6437
|
async writeAiDirs(aiDirs) {
|
|
6434
6438
|
let changedCount = 0;
|
|
@@ -6478,7 +6482,12 @@ var DirFeatureProcessor = class {
|
|
|
6478
6482
|
}
|
|
6479
6483
|
for (const [i, file] of otherFiles.entries()) {
|
|
6480
6484
|
const filePath = (0, import_node_path56.join)(dirPath, file.relativeFilePathToDirPath);
|
|
6481
|
-
const content = otherFileContents[i]
|
|
6485
|
+
const content = otherFileContents[i];
|
|
6486
|
+
if (content === void 0) {
|
|
6487
|
+
throw new Error(
|
|
6488
|
+
`Internal error: content for file ${file.relativeFilePathToDirPath} is undefined. This indicates a synchronization issue between otherFiles and otherFileContents arrays.`
|
|
6489
|
+
);
|
|
6490
|
+
}
|
|
6482
6491
|
await writeFileContent(filePath, content);
|
|
6483
6492
|
}
|
|
6484
6493
|
}
|
|
@@ -14996,9 +15005,9 @@ function logFeatureResult(params) {
|
|
|
14996
15005
|
const { count, featureName, isPreview, modePrefix } = params;
|
|
14997
15006
|
if (count > 0) {
|
|
14998
15007
|
if (isPreview) {
|
|
14999
|
-
logger.info(`${modePrefix} Would
|
|
15008
|
+
logger.info(`${modePrefix} Would write ${count} ${featureName}`);
|
|
15000
15009
|
} else {
|
|
15001
|
-
logger.success(`
|
|
15010
|
+
logger.success(`Written ${count} ${featureName}`);
|
|
15002
15011
|
}
|
|
15003
15012
|
}
|
|
15004
15013
|
}
|
|
@@ -15093,7 +15102,7 @@ async function generateCommand(options) {
|
|
|
15093
15102
|
const totalGenerated = calculateTotalCount(result);
|
|
15094
15103
|
if (totalGenerated === 0) {
|
|
15095
15104
|
const enabledFeatures = features.join(", ");
|
|
15096
|
-
logger.
|
|
15105
|
+
logger.info(`\u2713 All files are up to date (${enabledFeatures})`);
|
|
15097
15106
|
return;
|
|
15098
15107
|
}
|
|
15099
15108
|
const parts = [];
|
|
@@ -15105,11 +15114,9 @@ async function generateCommand(options) {
|
|
|
15105
15114
|
if (result.skillsCount > 0) parts.push(`${result.skillsCount} skills`);
|
|
15106
15115
|
if (result.hooksCount > 0) parts.push(`${result.hooksCount} hooks`);
|
|
15107
15116
|
if (isPreview) {
|
|
15108
|
-
logger.info(
|
|
15109
|
-
`${modePrefix} Would generate ${totalGenerated} file(s) total (${parts.join(" + ")})`
|
|
15110
|
-
);
|
|
15117
|
+
logger.info(`${modePrefix} Would write ${totalGenerated} file(s) total (${parts.join(" + ")})`);
|
|
15111
15118
|
} else {
|
|
15112
|
-
logger.success(`\u{1F389} All done!
|
|
15119
|
+
logger.success(`\u{1F389} All done! Written ${totalGenerated} file(s) total (${parts.join(" + ")})`);
|
|
15113
15120
|
}
|
|
15114
15121
|
if (check) {
|
|
15115
15122
|
if (result.hasDiff) {
|
|
@@ -17535,7 +17542,7 @@ async function updateCommand(currentVersion, options) {
|
|
|
17535
17542
|
}
|
|
17536
17543
|
|
|
17537
17544
|
// src/cli/index.ts
|
|
17538
|
-
var getVersion = () => "6.7.
|
|
17545
|
+
var getVersion = () => "6.7.1";
|
|
17539
17546
|
var main = async () => {
|
|
17540
17547
|
const program = new import_commander.Command();
|
|
17541
17548
|
const version = getVersion();
|
package/dist/index.js
CHANGED
|
@@ -5429,7 +5429,7 @@ var OpencodeMcp = class _OpencodeMcp extends ToolMcp {
|
|
|
5429
5429
|
static getSettablePaths({ global } = {}) {
|
|
5430
5430
|
if (global) {
|
|
5431
5431
|
return {
|
|
5432
|
-
relativeDirPath: ".",
|
|
5432
|
+
relativeDirPath: join47(".config", "opencode"),
|
|
5433
5433
|
relativeFilePath: "opencode.json"
|
|
5434
5434
|
};
|
|
5435
5435
|
}
|
|
@@ -6406,6 +6406,10 @@ var DirFeatureProcessor = class {
|
|
|
6406
6406
|
/**
|
|
6407
6407
|
* Once converted to rulesync/tool dirs, write them to the filesystem.
|
|
6408
6408
|
* Returns the number of directories written.
|
|
6409
|
+
*
|
|
6410
|
+
* Note: This method uses directory-level change detection. If any file within
|
|
6411
|
+
* a directory has changed, ALL files in that directory are rewritten. This is
|
|
6412
|
+
* an intentional design decision to ensure consistency within directory units.
|
|
6409
6413
|
*/
|
|
6410
6414
|
async writeAiDirs(aiDirs) {
|
|
6411
6415
|
let changedCount = 0;
|
|
@@ -6455,7 +6459,12 @@ var DirFeatureProcessor = class {
|
|
|
6455
6459
|
}
|
|
6456
6460
|
for (const [i, file] of otherFiles.entries()) {
|
|
6457
6461
|
const filePath = join55(dirPath, file.relativeFilePathToDirPath);
|
|
6458
|
-
const content = otherFileContents[i]
|
|
6462
|
+
const content = otherFileContents[i];
|
|
6463
|
+
if (content === void 0) {
|
|
6464
|
+
throw new Error(
|
|
6465
|
+
`Internal error: content for file ${file.relativeFilePathToDirPath} is undefined. This indicates a synchronization issue between otherFiles and otherFileContents arrays.`
|
|
6466
|
+
);
|
|
6467
|
+
}
|
|
6459
6468
|
await writeFileContent(filePath, content);
|
|
6460
6469
|
}
|
|
6461
6470
|
}
|
|
@@ -14973,9 +14982,9 @@ function logFeatureResult(params) {
|
|
|
14973
14982
|
const { count, featureName, isPreview, modePrefix } = params;
|
|
14974
14983
|
if (count > 0) {
|
|
14975
14984
|
if (isPreview) {
|
|
14976
|
-
logger.info(`${modePrefix} Would
|
|
14985
|
+
logger.info(`${modePrefix} Would write ${count} ${featureName}`);
|
|
14977
14986
|
} else {
|
|
14978
|
-
logger.success(`
|
|
14987
|
+
logger.success(`Written ${count} ${featureName}`);
|
|
14979
14988
|
}
|
|
14980
14989
|
}
|
|
14981
14990
|
}
|
|
@@ -15070,7 +15079,7 @@ async function generateCommand(options) {
|
|
|
15070
15079
|
const totalGenerated = calculateTotalCount(result);
|
|
15071
15080
|
if (totalGenerated === 0) {
|
|
15072
15081
|
const enabledFeatures = features.join(", ");
|
|
15073
|
-
logger.
|
|
15082
|
+
logger.info(`\u2713 All files are up to date (${enabledFeatures})`);
|
|
15074
15083
|
return;
|
|
15075
15084
|
}
|
|
15076
15085
|
const parts = [];
|
|
@@ -15082,11 +15091,9 @@ async function generateCommand(options) {
|
|
|
15082
15091
|
if (result.skillsCount > 0) parts.push(`${result.skillsCount} skills`);
|
|
15083
15092
|
if (result.hooksCount > 0) parts.push(`${result.hooksCount} hooks`);
|
|
15084
15093
|
if (isPreview) {
|
|
15085
|
-
logger.info(
|
|
15086
|
-
`${modePrefix} Would generate ${totalGenerated} file(s) total (${parts.join(" + ")})`
|
|
15087
|
-
);
|
|
15094
|
+
logger.info(`${modePrefix} Would write ${totalGenerated} file(s) total (${parts.join(" + ")})`);
|
|
15088
15095
|
} else {
|
|
15089
|
-
logger.success(`\u{1F389} All done!
|
|
15096
|
+
logger.success(`\u{1F389} All done! Written ${totalGenerated} file(s) total (${parts.join(" + ")})`);
|
|
15090
15097
|
}
|
|
15091
15098
|
if (check) {
|
|
15092
15099
|
if (result.hasDiff) {
|
|
@@ -17512,7 +17519,7 @@ async function updateCommand(currentVersion, options) {
|
|
|
17512
17519
|
}
|
|
17513
17520
|
|
|
17514
17521
|
// src/cli/index.ts
|
|
17515
|
-
var getVersion = () => "6.7.
|
|
17522
|
+
var getVersion = () => "6.7.1";
|
|
17516
17523
|
var main = async () => {
|
|
17517
17524
|
const program = new Command();
|
|
17518
17525
|
const version = getVersion();
|