@any-sync/cli 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bin/auth.js +12 -0
- package/bin/cli.js +155 -0
- package/bin/pull.js +20 -0
- package/bin/push.js +20 -0
- package/bin/reset.js +20 -0
- package/bin/status.js +20 -0
- package/lib/auth.js +36 -0
- package/lib/config.js +59 -0
- package/lib/gh.js +55 -0
- package/lib/glob.js +59 -0
- package/lib/hooks.js +45 -0
- package/lib/index.d.ts +96 -0
- package/lib/index.js +44 -0
- package/lib/init.js +103 -0
- package/lib/lockfile.js +76 -0
- package/lib/pull.js +118 -0
- package/lib/push.js +192 -0
- package/lib/reset.js +26 -0
- package/lib/status.js +131 -0
- package/package.json +17 -0
- package/test/glob.test.js +64 -0
- package/test/init.test.js +31 -0
- package/test/lockfile.test.js +91 -0
package/lib/init.js
ADDED
|
@@ -0,0 +1,103 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
const fs = require('fs');
|
|
4
|
+
const path = require('path');
|
|
5
|
+
|
|
6
|
+
/**
|
|
7
|
+
* Create a config file with the given mappings.
|
|
8
|
+
* If config already exists, returns the path without overwriting.
|
|
9
|
+
*/
|
|
10
|
+
function init(configPath, repo, branch, mappings) {
|
|
11
|
+
// Validate repo format
|
|
12
|
+
if (!/^[a-zA-Z0-9._-]+\/[a-zA-Z0-9._-]+$/.test(repo)) {
|
|
13
|
+
throw new Error('Invalid repo format. Use owner/repo (e.g., myuser/my-sync-repo)');
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
// Validate branch name
|
|
17
|
+
if (!/^[a-zA-Z0-9._\/-]+$/.test(branch)) {
|
|
18
|
+
throw new Error('Invalid branch name. Use alphanumeric characters, hyphens, underscores, dots, or slashes.');
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
// If config already exists, return path
|
|
22
|
+
if (fs.existsSync(configPath)) {
|
|
23
|
+
return configPath;
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
// Create parent directory
|
|
27
|
+
fs.mkdirSync(path.dirname(configPath), { recursive: true });
|
|
28
|
+
|
|
29
|
+
// Build config with repo/branch applied to each mapping
|
|
30
|
+
const config = {
|
|
31
|
+
mappings: mappings.map(m => ({
|
|
32
|
+
name: m.name,
|
|
33
|
+
repo,
|
|
34
|
+
branch,
|
|
35
|
+
sourcePath: m.sourcePath,
|
|
36
|
+
destPath: m.destPath,
|
|
37
|
+
...(m.include ? { include: m.include } : {}),
|
|
38
|
+
...(m.exclude ? { exclude: m.exclude } : {}),
|
|
39
|
+
})),
|
|
40
|
+
};
|
|
41
|
+
|
|
42
|
+
fs.writeFileSync(configPath, JSON.stringify(config, null, 2) + '\n', 'utf8');
|
|
43
|
+
return configPath;
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
/**
|
|
47
|
+
* Return default mappings for a known preset.
|
|
48
|
+
* Returns null for unknown presets.
|
|
49
|
+
*/
|
|
50
|
+
function getPresetMappings(preset) {
|
|
51
|
+
const os = require('os');
|
|
52
|
+
|
|
53
|
+
switch (preset) {
|
|
54
|
+
case 'claude':
|
|
55
|
+
return [
|
|
56
|
+
{
|
|
57
|
+
name: 'claude-skills',
|
|
58
|
+
sourcePath: 'skills',
|
|
59
|
+
destPath: '~/.claude/skills',
|
|
60
|
+
include: ['**/*.md'],
|
|
61
|
+
},
|
|
62
|
+
{ name: 'claude-memory', sourcePath: 'memory', destPath: '~/.claude/memory' },
|
|
63
|
+
{
|
|
64
|
+
name: 'claude-settings',
|
|
65
|
+
sourcePath: 'settings',
|
|
66
|
+
destPath: '~/.claude',
|
|
67
|
+
include: ['settings.json'],
|
|
68
|
+
},
|
|
69
|
+
];
|
|
70
|
+
|
|
71
|
+
case 'openclaw': {
|
|
72
|
+
let workspace =
|
|
73
|
+
process.env.OPENCLAW_WORKSPACE || path.join(os.homedir(), '.openclaw', 'workspace');
|
|
74
|
+
const profile = process.env.OPENCLAW_PROFILE;
|
|
75
|
+
if (profile && profile !== 'default') {
|
|
76
|
+
workspace = path.join(os.homedir(), '.openclaw', `workspace-${profile}`);
|
|
77
|
+
}
|
|
78
|
+
return [
|
|
79
|
+
{
|
|
80
|
+
name: 'workspace-skills',
|
|
81
|
+
sourcePath: 'skills',
|
|
82
|
+
destPath: path.join(workspace, 'skills'),
|
|
83
|
+
},
|
|
84
|
+
{
|
|
85
|
+
name: 'workspace-memory',
|
|
86
|
+
sourcePath: 'memory',
|
|
87
|
+
destPath: path.join(workspace, 'memory'),
|
|
88
|
+
},
|
|
89
|
+
{
|
|
90
|
+
name: 'workspace-config',
|
|
91
|
+
sourcePath: 'config',
|
|
92
|
+
destPath: workspace,
|
|
93
|
+
include: ['AGENTS.md', 'SOUL.md', 'USER.md', 'TOOLS.md', 'IDENTITY.md'],
|
|
94
|
+
},
|
|
95
|
+
];
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
default:
|
|
99
|
+
return null;
|
|
100
|
+
}
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
module.exports = { init, getPresetMappings };
|
package/lib/lockfile.js
ADDED
|
@@ -0,0 +1,76 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
const fs = require('fs');
|
|
4
|
+
const path = require('path');
|
|
5
|
+
const crypto = require('crypto');
|
|
6
|
+
|
|
7
|
+
const EMPTY_DATA = { version: 1, files: {}, lastSync: {} };
|
|
8
|
+
|
|
9
|
+
class Lockfile {
|
|
10
|
+
constructor(filePath) {
|
|
11
|
+
this._path = filePath;
|
|
12
|
+
this._data = { ...EMPTY_DATA, files: {}, lastSync: {} };
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
static load(filePath) {
|
|
16
|
+
const lf = new Lockfile(filePath);
|
|
17
|
+
if (fs.existsSync(filePath)) {
|
|
18
|
+
try {
|
|
19
|
+
const raw = JSON.parse(fs.readFileSync(filePath, 'utf8'));
|
|
20
|
+
if (raw.version === 1 && raw.files && raw.lastSync) {
|
|
21
|
+
lf._data = raw;
|
|
22
|
+
}
|
|
23
|
+
} catch {
|
|
24
|
+
// Invalid file — use empty data
|
|
25
|
+
}
|
|
26
|
+
}
|
|
27
|
+
return lf;
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
save() {
|
|
31
|
+
const tmp = this._path + '.' + crypto.randomUUID();
|
|
32
|
+
fs.writeFileSync(tmp, JSON.stringify(this._data, null, 2) + '\n', 'utf8');
|
|
33
|
+
fs.renameSync(tmp, this._path);
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
getEntry(key) {
|
|
37
|
+
return this._data.files[key] || null;
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
setEntry(key, remoteSha, localHash) {
|
|
41
|
+
this._data.files[key] = {
|
|
42
|
+
remoteSha,
|
|
43
|
+
localHash,
|
|
44
|
+
syncedAt: new Date().toISOString().replace(/\.\d{3}Z$/, '.000Z'),
|
|
45
|
+
};
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
getEntriesForMapping(name) {
|
|
49
|
+
const prefix = name + '::';
|
|
50
|
+
const result = {};
|
|
51
|
+
for (const [key, value] of Object.entries(this._data.files)) {
|
|
52
|
+
if (key.startsWith(prefix)) {
|
|
53
|
+
result[key.slice(prefix.length)] = value;
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
return result;
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
setLastSync(name) {
|
|
60
|
+
this._data.lastSync[name] = new Date().toISOString().replace(/\.\d{3}Z$/, '.000Z');
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
getLastSync(name) {
|
|
64
|
+
return this._data.lastSync[name] || null;
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
function makeKey(mapping, relpath) {
|
|
69
|
+
return mapping + '::' + relpath;
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
function hashFile(filePath) {
|
|
73
|
+
return crypto.createHash('sha256').update(fs.readFileSync(filePath)).digest('hex');
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
module.exports = { Lockfile, makeKey, hashFile };
|
package/lib/pull.js
ADDED
|
@@ -0,0 +1,118 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
const fs = require('fs');
|
|
4
|
+
const path = require('path');
|
|
5
|
+
const { Lockfile, makeKey, hashFile } = require('./lockfile');
|
|
6
|
+
const { ghApiRetry } = require('./gh');
|
|
7
|
+
const { matchesAny } = require('./glob');
|
|
8
|
+
const { loadConfig, parseMapping } = require('./config');
|
|
9
|
+
const { checkAuth } = require('./auth');
|
|
10
|
+
|
|
11
|
+
/**
|
|
12
|
+
* Pull files from GitHub.
|
|
13
|
+
* Returns: { pulled: [...], conflicts: [...], skipped: [...] }
|
|
14
|
+
*/
|
|
15
|
+
function pull(configPath, lockfilePath) {
|
|
16
|
+
const config = loadConfig(configPath);
|
|
17
|
+
const token = checkAuth();
|
|
18
|
+
process.env.GITHUB_TOKEN = token;
|
|
19
|
+
|
|
20
|
+
const lf = Lockfile.load(lockfilePath);
|
|
21
|
+
const pulled = [];
|
|
22
|
+
const conflicts = [];
|
|
23
|
+
const skipped = [];
|
|
24
|
+
|
|
25
|
+
for (const raw of config.mappings) {
|
|
26
|
+
const m = parseMapping(raw);
|
|
27
|
+
const [owner, repoName] = m.repo.split('/');
|
|
28
|
+
const prefix = m.sourcePath ? m.sourcePath + '/' : '';
|
|
29
|
+
|
|
30
|
+
// Fetch recursive tree
|
|
31
|
+
let treeOutput;
|
|
32
|
+
try {
|
|
33
|
+
treeOutput = ghApiRetry([
|
|
34
|
+
`/repos/${owner}/${repoName}/git/trees/${m.branch}?recursive=1`,
|
|
35
|
+
'--jq', '.tree[] | select(.type == "blob") | [.path, .sha] | @tsv',
|
|
36
|
+
]);
|
|
37
|
+
} catch (err) {
|
|
38
|
+
process.stderr.write(`Error: Failed to fetch tree for ${m.repo} branch ${m.branch}\n`);
|
|
39
|
+
continue;
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
if (!treeOutput) {
|
|
43
|
+
lf.setLastSync(m.name);
|
|
44
|
+
continue;
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
const entries = treeOutput.split('\n').filter(Boolean).map(line => {
|
|
48
|
+
const [filePath, sha] = line.split('\t');
|
|
49
|
+
return { path: filePath, sha };
|
|
50
|
+
});
|
|
51
|
+
|
|
52
|
+
for (const entry of entries) {
|
|
53
|
+
// Filter by prefix
|
|
54
|
+
if (prefix && !entry.path.startsWith(prefix)) continue;
|
|
55
|
+
|
|
56
|
+
const relPath = prefix ? entry.path.slice(prefix.length) : entry.path;
|
|
57
|
+
|
|
58
|
+
// Apply include filter
|
|
59
|
+
if (m.include.length > 0 && !matchesAny(m.include, relPath)) continue;
|
|
60
|
+
|
|
61
|
+
// Apply exclude filter
|
|
62
|
+
if (m.exclude.length > 0 && matchesAny(m.exclude, relPath)) continue;
|
|
63
|
+
|
|
64
|
+
const lockKey = makeKey(m.name, relPath);
|
|
65
|
+
const existing = lf.getEntry(lockKey);
|
|
66
|
+
const localFile = path.join(m.destPath, relPath);
|
|
67
|
+
|
|
68
|
+
if (existing) {
|
|
69
|
+
// Remote unchanged → skip
|
|
70
|
+
if (existing.remoteSha === entry.sha) {
|
|
71
|
+
skipped.push({ file: relPath, mapping: m.name, reason: 'unchanged' });
|
|
72
|
+
continue;
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
// Remote changed — check if local also changed
|
|
76
|
+
if (fs.existsSync(localFile)) {
|
|
77
|
+
const currentHash = hashFile(localFile);
|
|
78
|
+
if (currentHash !== existing.localHash) {
|
|
79
|
+
conflicts.push({ file: relPath, mapping: m.name });
|
|
80
|
+
continue;
|
|
81
|
+
}
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
// Download blob
|
|
86
|
+
let blobContent;
|
|
87
|
+
try {
|
|
88
|
+
blobContent = ghApiRetry([
|
|
89
|
+
`/repos/${owner}/${repoName}/git/blobs/${entry.sha}`,
|
|
90
|
+
'--jq', '.content',
|
|
91
|
+
]);
|
|
92
|
+
} catch (err) {
|
|
93
|
+
process.stderr.write(`Error: Failed to download blob for ${relPath}\n`);
|
|
94
|
+
continue;
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
// Write file atomically
|
|
98
|
+
const dir = path.dirname(localFile);
|
|
99
|
+
fs.mkdirSync(dir, { recursive: true });
|
|
100
|
+
const decoded = Buffer.from(blobContent.trim(), 'base64');
|
|
101
|
+
const tmp = localFile + '.' + require('crypto').randomUUID();
|
|
102
|
+
fs.writeFileSync(tmp, decoded);
|
|
103
|
+
fs.renameSync(tmp, localFile);
|
|
104
|
+
|
|
105
|
+
// Update lockfile
|
|
106
|
+
const newHash = hashFile(localFile);
|
|
107
|
+
lf.setEntry(lockKey, entry.sha, newHash);
|
|
108
|
+
pulled.push({ file: relPath, mapping: m.name });
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
lf.setLastSync(m.name);
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
lf.save();
|
|
115
|
+
return { pulled, conflicts, skipped };
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
module.exports = { pull };
|
package/lib/push.js
ADDED
|
@@ -0,0 +1,192 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
const fs = require('fs');
|
|
4
|
+
const path = require('path');
|
|
5
|
+
const crypto = require('crypto');
|
|
6
|
+
const { Lockfile, makeKey, hashFile } = require('./lockfile');
|
|
7
|
+
const { ghApiRetry } = require('./gh');
|
|
8
|
+
const { matchesAny } = require('./glob');
|
|
9
|
+
const { loadConfig, parseMapping } = require('./config');
|
|
10
|
+
const { checkAuth } = require('./auth');
|
|
11
|
+
|
|
12
|
+
/**
|
|
13
|
+
* Walk a directory recursively, returning relative file paths.
|
|
14
|
+
*/
|
|
15
|
+
function walkDir(dir) {
|
|
16
|
+
const results = [];
|
|
17
|
+
if (!fs.existsSync(dir)) return results;
|
|
18
|
+
|
|
19
|
+
const entries = fs.readdirSync(dir, { withFileTypes: true, recursive: true });
|
|
20
|
+
for (const entry of entries) {
|
|
21
|
+
if (entry.isFile()) {
|
|
22
|
+
// entry.parentPath is available in Node 20+, fallback to entry.path
|
|
23
|
+
const parent = entry.parentPath || entry.path;
|
|
24
|
+
const fullPath = path.join(parent, entry.name);
|
|
25
|
+
const relPath = path.relative(dir, fullPath).split(path.sep).join('/');
|
|
26
|
+
results.push(relPath);
|
|
27
|
+
}
|
|
28
|
+
}
|
|
29
|
+
return results;
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
/**
|
|
33
|
+
* Push local changes to GitHub.
|
|
34
|
+
* Returns: { pushed: [...], branch: "..." }
|
|
35
|
+
*/
|
|
36
|
+
function push(configPath, lockfilePath) {
|
|
37
|
+
const config = loadConfig(configPath);
|
|
38
|
+
const token = checkAuth();
|
|
39
|
+
process.env.GITHUB_TOKEN = token;
|
|
40
|
+
|
|
41
|
+
const lf = Lockfile.load(lockfilePath);
|
|
42
|
+
const allPushed = [];
|
|
43
|
+
let lastBranch = 'main';
|
|
44
|
+
|
|
45
|
+
for (const raw of config.mappings) {
|
|
46
|
+
const m = parseMapping(raw);
|
|
47
|
+
const [owner, repoName] = m.repo.split('/');
|
|
48
|
+
lastBranch = m.branch;
|
|
49
|
+
|
|
50
|
+
if (!fs.existsSync(m.destPath)) continue;
|
|
51
|
+
|
|
52
|
+
const localFiles = walkDir(m.destPath);
|
|
53
|
+
const treeEntries = [];
|
|
54
|
+
|
|
55
|
+
for (const relPath of localFiles) {
|
|
56
|
+
// Apply include filter
|
|
57
|
+
if (m.include.length > 0 && !matchesAny(m.include, relPath)) continue;
|
|
58
|
+
|
|
59
|
+
// Apply exclude filter
|
|
60
|
+
if (m.exclude.length > 0 && matchesAny(m.exclude, relPath)) continue;
|
|
61
|
+
|
|
62
|
+
const localFile = path.join(m.destPath, relPath);
|
|
63
|
+
const lockKey = makeKey(m.name, relPath);
|
|
64
|
+
const existing = lf.getEntry(lockKey);
|
|
65
|
+
const currentHash = hashFile(localFile);
|
|
66
|
+
|
|
67
|
+
// Check if changed
|
|
68
|
+
let isChanged = false;
|
|
69
|
+
if (!existing) {
|
|
70
|
+
isChanged = true; // New file
|
|
71
|
+
} else if (currentHash !== existing.localHash) {
|
|
72
|
+
isChanged = true; // Modified
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
if (!isChanged) continue;
|
|
76
|
+
|
|
77
|
+
// Construct repo path
|
|
78
|
+
const repoPath = m.sourcePath ? m.sourcePath + '/' + relPath : relPath;
|
|
79
|
+
|
|
80
|
+
// Create blob (base64 encoded, piped via stdin)
|
|
81
|
+
const content = fs.readFileSync(localFile).toString('base64');
|
|
82
|
+
const blobPayload = JSON.stringify({ content, encoding: 'base64' });
|
|
83
|
+
|
|
84
|
+
let blobSha;
|
|
85
|
+
try {
|
|
86
|
+
blobSha = ghApiRetry(
|
|
87
|
+
[`/repos/${owner}/${repoName}/git/blobs`, '--input', '-', '--jq', '.sha'],
|
|
88
|
+
{ input: blobPayload }
|
|
89
|
+
);
|
|
90
|
+
} catch (err) {
|
|
91
|
+
process.stderr.write(`Error: Failed to create blob for ${relPath}\n`);
|
|
92
|
+
process.exit(1);
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
treeEntries.push({ path: repoPath, mode: '100644', type: 'blob', sha: blobSha });
|
|
96
|
+
allPushed.push({ file: relPath, mapping: m.name, blobSha, localHash: currentHash });
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
if (treeEntries.length === 0) continue;
|
|
100
|
+
|
|
101
|
+
// Get current commit SHA
|
|
102
|
+
let commitSha;
|
|
103
|
+
try {
|
|
104
|
+
commitSha = ghApiRetry([
|
|
105
|
+
`/repos/${owner}/${repoName}/git/ref/heads/${m.branch}`,
|
|
106
|
+
'--jq', '.object.sha',
|
|
107
|
+
]);
|
|
108
|
+
} catch (err) {
|
|
109
|
+
process.stderr.write(`Error: Failed to get commit SHA for branch ${m.branch}\n`);
|
|
110
|
+
process.exit(1);
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
// Get base tree SHA
|
|
114
|
+
let baseTreeSha;
|
|
115
|
+
try {
|
|
116
|
+
baseTreeSha = ghApiRetry([
|
|
117
|
+
`/repos/${owner}/${repoName}/git/commits/${commitSha}`,
|
|
118
|
+
'--jq', '.tree.sha',
|
|
119
|
+
]);
|
|
120
|
+
} catch (err) {
|
|
121
|
+
process.stderr.write('Error: Failed to get base tree SHA\n');
|
|
122
|
+
process.exit(1);
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
// Create new tree
|
|
126
|
+
const treePayload = JSON.stringify({ base_tree: baseTreeSha, tree: treeEntries });
|
|
127
|
+
let newTreeSha;
|
|
128
|
+
try {
|
|
129
|
+
newTreeSha = ghApiRetry(
|
|
130
|
+
[`/repos/${owner}/${repoName}/git/trees`, '--input', '-', '--jq', '.sha'],
|
|
131
|
+
{ input: treePayload }
|
|
132
|
+
);
|
|
133
|
+
} catch (err) {
|
|
134
|
+
process.stderr.write('Error: Failed to create tree\n');
|
|
135
|
+
process.exit(1);
|
|
136
|
+
}
|
|
137
|
+
|
|
138
|
+
// Create commit
|
|
139
|
+
let commitMsg;
|
|
140
|
+
if (treeEntries.length === 1) {
|
|
141
|
+
commitMsg = `sync: Update ${treeEntries[0].path} via Any Sync`;
|
|
142
|
+
} else {
|
|
143
|
+
commitMsg = `sync: Update ${treeEntries.length} file(s) in ${m.sourcePath} via Any Sync`;
|
|
144
|
+
}
|
|
145
|
+
|
|
146
|
+
const commitPayload = JSON.stringify({
|
|
147
|
+
message: commitMsg,
|
|
148
|
+
tree: newTreeSha,
|
|
149
|
+
parents: [commitSha],
|
|
150
|
+
});
|
|
151
|
+
|
|
152
|
+
let newCommitSha;
|
|
153
|
+
try {
|
|
154
|
+
newCommitSha = ghApiRetry(
|
|
155
|
+
[`/repos/${owner}/${repoName}/git/commits`, '--input', '-', '--jq', '.sha'],
|
|
156
|
+
{ input: commitPayload }
|
|
157
|
+
);
|
|
158
|
+
} catch (err) {
|
|
159
|
+
process.stderr.write('Error: Failed to create commit\n');
|
|
160
|
+
process.exit(1);
|
|
161
|
+
}
|
|
162
|
+
|
|
163
|
+
// Update branch ref
|
|
164
|
+
try {
|
|
165
|
+
ghApiRetry([
|
|
166
|
+
'-X', 'PATCH',
|
|
167
|
+
`/repos/${owner}/${repoName}/git/refs/heads/${m.branch}`,
|
|
168
|
+
'-f', `sha=${newCommitSha}`,
|
|
169
|
+
]);
|
|
170
|
+
} catch (err) {
|
|
171
|
+
process.stderr.write('Error: Failed to update branch ref. Another push may have occurred — try pulling first.\n');
|
|
172
|
+
process.exit(1);
|
|
173
|
+
}
|
|
174
|
+
|
|
175
|
+
// Update lockfile for pushed files
|
|
176
|
+
for (const p of allPushed) {
|
|
177
|
+
if (p.mapping !== m.name) continue;
|
|
178
|
+
const lockKey = makeKey(m.name, p.file);
|
|
179
|
+
lf.setEntry(lockKey, p.blobSha, p.localHash);
|
|
180
|
+
}
|
|
181
|
+
|
|
182
|
+
lf.setLastSync(m.name);
|
|
183
|
+
}
|
|
184
|
+
|
|
185
|
+
lf.save();
|
|
186
|
+
return {
|
|
187
|
+
pushed: allPushed.map(p => ({ file: p.file, mapping: p.mapping })),
|
|
188
|
+
branch: lastBranch,
|
|
189
|
+
};
|
|
190
|
+
}
|
|
191
|
+
|
|
192
|
+
module.exports = { push };
|
package/lib/reset.js
ADDED
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
const fs = require('fs');
|
|
4
|
+
|
|
5
|
+
/**
|
|
6
|
+
* Delete config and lockfile.
|
|
7
|
+
* Returns: { deletedConfig, configPath, deletedLockfile, lockfilePath }
|
|
8
|
+
*/
|
|
9
|
+
function reset(configPath, lockfilePath) {
|
|
10
|
+
let deletedConfig = false;
|
|
11
|
+
let deletedLockfile = false;
|
|
12
|
+
|
|
13
|
+
if (fs.existsSync(configPath)) {
|
|
14
|
+
fs.unlinkSync(configPath);
|
|
15
|
+
deletedConfig = true;
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
if (fs.existsSync(lockfilePath)) {
|
|
19
|
+
fs.unlinkSync(lockfilePath);
|
|
20
|
+
deletedLockfile = true;
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
return { deletedConfig, configPath, deletedLockfile, lockfilePath };
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
module.exports = { reset };
|
package/lib/status.js
ADDED
|
@@ -0,0 +1,131 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
const fs = require('fs');
|
|
4
|
+
const path = require('path');
|
|
5
|
+
const { execFileSync } = require('child_process');
|
|
6
|
+
const { Lockfile, makeKey, hashFile } = require('./lockfile');
|
|
7
|
+
const { matchesAny } = require('./glob');
|
|
8
|
+
const { loadConfig, parseMapping } = require('./config');
|
|
9
|
+
|
|
10
|
+
/**
|
|
11
|
+
* Walk a directory recursively, returning relative file paths.
|
|
12
|
+
*/
|
|
13
|
+
function walkDir(dir) {
|
|
14
|
+
const results = [];
|
|
15
|
+
if (!fs.existsSync(dir)) return results;
|
|
16
|
+
const entries = fs.readdirSync(dir, { withFileTypes: true, recursive: true });
|
|
17
|
+
for (const entry of entries) {
|
|
18
|
+
if (entry.isFile()) {
|
|
19
|
+
const parent = entry.parentPath || entry.path;
|
|
20
|
+
const fullPath = path.join(parent, entry.name);
|
|
21
|
+
results.push(path.relative(dir, fullPath).split(path.sep).join('/'));
|
|
22
|
+
}
|
|
23
|
+
}
|
|
24
|
+
return results;
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
/**
|
|
28
|
+
* Get sync status.
|
|
29
|
+
* Returns: { auth: {...}, config: {...}, mappings: [...] }
|
|
30
|
+
*/
|
|
31
|
+
function status(configPath, lockfilePath) {
|
|
32
|
+
// Check auth
|
|
33
|
+
let authMethod = 'none';
|
|
34
|
+
let authUser = '';
|
|
35
|
+
|
|
36
|
+
if (process.env.GITHUB_TOKEN) {
|
|
37
|
+
authMethod = 'token';
|
|
38
|
+
try {
|
|
39
|
+
authUser = execFileSync('gh', ['api', '/user', '--jq', '.login'], {
|
|
40
|
+
encoding: 'utf8',
|
|
41
|
+
}).trim();
|
|
42
|
+
} catch {
|
|
43
|
+
authUser = 'unknown';
|
|
44
|
+
}
|
|
45
|
+
} else {
|
|
46
|
+
try {
|
|
47
|
+
execFileSync('gh', ['auth', 'token'], { encoding: 'utf8', stdio: ['pipe', 'pipe', 'pipe'] });
|
|
48
|
+
authMethod = 'gh';
|
|
49
|
+
try {
|
|
50
|
+
authUser = execFileSync('gh', ['api', '/user', '--jq', '.login'], {
|
|
51
|
+
encoding: 'utf8',
|
|
52
|
+
}).trim();
|
|
53
|
+
} catch {
|
|
54
|
+
authUser = 'unknown';
|
|
55
|
+
}
|
|
56
|
+
} catch {
|
|
57
|
+
// No auth
|
|
58
|
+
}
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
// Check config
|
|
62
|
+
let configValid = false;
|
|
63
|
+
let config = null;
|
|
64
|
+
if (fs.existsSync(configPath)) {
|
|
65
|
+
try {
|
|
66
|
+
config = JSON.parse(fs.readFileSync(configPath, 'utf8'));
|
|
67
|
+
if (Array.isArray(config.mappings)) {
|
|
68
|
+
configValid = true;
|
|
69
|
+
}
|
|
70
|
+
} catch {
|
|
71
|
+
// Invalid config
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
const lf = Lockfile.load(lockfilePath);
|
|
76
|
+
const mappings = [];
|
|
77
|
+
|
|
78
|
+
if (configValid && config) {
|
|
79
|
+
for (const raw of config.mappings) {
|
|
80
|
+
const m = parseMapping(raw);
|
|
81
|
+
const lastSync = lf.getLastSync(m.name);
|
|
82
|
+
const entries = lf.getEntriesForMapping(m.name);
|
|
83
|
+
const trackedFiles = Object.keys(entries).length;
|
|
84
|
+
const changes = [];
|
|
85
|
+
|
|
86
|
+
// Check tracked files for local modifications
|
|
87
|
+
for (const [relPath, entry] of Object.entries(entries)) {
|
|
88
|
+
const localFile = path.join(m.destPath, relPath);
|
|
89
|
+
if (fs.existsSync(localFile)) {
|
|
90
|
+
const currentHash = hashFile(localFile);
|
|
91
|
+
if (currentHash !== entry.localHash) {
|
|
92
|
+
changes.push({ file: relPath, type: 'modified' });
|
|
93
|
+
}
|
|
94
|
+
}
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
// Check for new files
|
|
98
|
+
if (fs.existsSync(m.destPath)) {
|
|
99
|
+
const localFiles = walkDir(m.destPath);
|
|
100
|
+
for (const relPath of localFiles) {
|
|
101
|
+
// Apply include filter
|
|
102
|
+
if (m.include.length > 0 && !matchesAny(m.include, relPath)) continue;
|
|
103
|
+
// Apply exclude filter
|
|
104
|
+
if (m.exclude.length > 0 && matchesAny(m.exclude, relPath)) continue;
|
|
105
|
+
|
|
106
|
+
const lockKey = makeKey(m.name, relPath);
|
|
107
|
+
if (!lf.getEntry(lockKey)) {
|
|
108
|
+
changes.push({ file: relPath, type: 'new' });
|
|
109
|
+
}
|
|
110
|
+
}
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
mappings.push({
|
|
114
|
+
name: m.name,
|
|
115
|
+
repo: m.repo,
|
|
116
|
+
branch: m.branch,
|
|
117
|
+
lastSync,
|
|
118
|
+
trackedFiles,
|
|
119
|
+
changes,
|
|
120
|
+
});
|
|
121
|
+
}
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
return {
|
|
125
|
+
auth: { method: authMethod, user: authUser },
|
|
126
|
+
config: { path: configPath, valid: configValid },
|
|
127
|
+
mappings,
|
|
128
|
+
};
|
|
129
|
+
}
|
|
130
|
+
|
|
131
|
+
module.exports = { status };
|
package/package.json
ADDED
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@any-sync/cli",
|
|
3
|
+
"version": "0.2.0",
|
|
4
|
+
"description": "CLI and core library for Any Sync — bidirectional GitHub file sync",
|
|
5
|
+
"main": "lib/index.js",
|
|
6
|
+
"exports": {
|
|
7
|
+
".": "./lib/index.js"
|
|
8
|
+
},
|
|
9
|
+
"bin": {
|
|
10
|
+
"any-sync": "bin/cli.js"
|
|
11
|
+
},
|
|
12
|
+
"license": "MIT",
|
|
13
|
+
"repository": "https://github.com/imink/any-sync",
|
|
14
|
+
"scripts": {
|
|
15
|
+
"test": "node --test test/*.test.js"
|
|
16
|
+
}
|
|
17
|
+
}
|