@lifestreamdynamics/vault-cli 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +759 -0
- package/dist/client.d.ts +12 -0
- package/dist/client.js +79 -0
- package/dist/commands/admin.d.ts +2 -0
- package/dist/commands/admin.js +263 -0
- package/dist/commands/audit.d.ts +2 -0
- package/dist/commands/audit.js +119 -0
- package/dist/commands/auth.d.ts +2 -0
- package/dist/commands/auth.js +256 -0
- package/dist/commands/config.d.ts +2 -0
- package/dist/commands/config.js +130 -0
- package/dist/commands/connectors.d.ts +2 -0
- package/dist/commands/connectors.js +224 -0
- package/dist/commands/docs.d.ts +2 -0
- package/dist/commands/docs.js +194 -0
- package/dist/commands/hooks.d.ts +2 -0
- package/dist/commands/hooks.js +159 -0
- package/dist/commands/keys.d.ts +2 -0
- package/dist/commands/keys.js +165 -0
- package/dist/commands/publish.d.ts +2 -0
- package/dist/commands/publish.js +138 -0
- package/dist/commands/search.d.ts +2 -0
- package/dist/commands/search.js +61 -0
- package/dist/commands/shares.d.ts +2 -0
- package/dist/commands/shares.js +121 -0
- package/dist/commands/subscription.d.ts +2 -0
- package/dist/commands/subscription.js +166 -0
- package/dist/commands/sync.d.ts +2 -0
- package/dist/commands/sync.js +565 -0
- package/dist/commands/teams.d.ts +2 -0
- package/dist/commands/teams.js +322 -0
- package/dist/commands/user.d.ts +2 -0
- package/dist/commands/user.js +48 -0
- package/dist/commands/vaults.d.ts +2 -0
- package/dist/commands/vaults.js +157 -0
- package/dist/commands/versions.d.ts +2 -0
- package/dist/commands/versions.js +219 -0
- package/dist/commands/webhooks.d.ts +2 -0
- package/dist/commands/webhooks.js +181 -0
- package/dist/config.d.ts +24 -0
- package/dist/config.js +88 -0
- package/dist/index.d.ts +2 -0
- package/dist/index.js +63 -0
- package/dist/lib/credential-manager.d.ts +48 -0
- package/dist/lib/credential-manager.js +101 -0
- package/dist/lib/encrypted-config.d.ts +20 -0
- package/dist/lib/encrypted-config.js +102 -0
- package/dist/lib/keychain.d.ts +8 -0
- package/dist/lib/keychain.js +82 -0
- package/dist/lib/migration.d.ts +31 -0
- package/dist/lib/migration.js +92 -0
- package/dist/lib/profiles.d.ts +43 -0
- package/dist/lib/profiles.js +104 -0
- package/dist/sync/config.d.ts +32 -0
- package/dist/sync/config.js +100 -0
- package/dist/sync/conflict.d.ts +30 -0
- package/dist/sync/conflict.js +60 -0
- package/dist/sync/daemon-worker.d.ts +1 -0
- package/dist/sync/daemon-worker.js +128 -0
- package/dist/sync/daemon.d.ts +44 -0
- package/dist/sync/daemon.js +174 -0
- package/dist/sync/diff.d.ts +43 -0
- package/dist/sync/diff.js +166 -0
- package/dist/sync/engine.d.ts +41 -0
- package/dist/sync/engine.js +233 -0
- package/dist/sync/ignore.d.ts +16 -0
- package/dist/sync/ignore.js +72 -0
- package/dist/sync/remote-poller.d.ts +23 -0
- package/dist/sync/remote-poller.js +145 -0
- package/dist/sync/state.d.ts +32 -0
- package/dist/sync/state.js +98 -0
- package/dist/sync/types.d.ts +68 -0
- package/dist/sync/types.js +4 -0
- package/dist/sync/watcher.d.ts +23 -0
- package/dist/sync/watcher.js +207 -0
- package/dist/utils/flags.d.ts +18 -0
- package/dist/utils/flags.js +31 -0
- package/dist/utils/format.d.ts +2 -0
- package/dist/utils/format.js +22 -0
- package/dist/utils/output.d.ts +87 -0
- package/dist/utils/output.js +229 -0
- package/package.json +62 -0
|
@@ -0,0 +1,166 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Compute the diff between local and remote state for a pull operation.
|
|
3
|
+
* Pull = download remote changes to local.
|
|
4
|
+
*/
|
|
5
|
+
export function computePullDiff(localFiles, remoteFiles, lastState) {
|
|
6
|
+
const downloads = [];
|
|
7
|
+
const deletes = [];
|
|
8
|
+
// Files on remote that need to be downloaded
|
|
9
|
+
for (const [docPath, remote] of Object.entries(remoteFiles)) {
|
|
10
|
+
const local = localFiles[docPath];
|
|
11
|
+
const lastRemote = lastState.remote[docPath];
|
|
12
|
+
if (!local) {
|
|
13
|
+
// File exists remotely but not locally
|
|
14
|
+
if (lastState.local[docPath]) {
|
|
15
|
+
// Was previously synced but deleted locally — remote wins on pull
|
|
16
|
+
downloads.push({
|
|
17
|
+
path: docPath,
|
|
18
|
+
action: 'create',
|
|
19
|
+
direction: 'download',
|
|
20
|
+
sizeBytes: remote.size,
|
|
21
|
+
reason: 'Deleted locally, exists remotely (pull restores)',
|
|
22
|
+
});
|
|
23
|
+
}
|
|
24
|
+
else {
|
|
25
|
+
// New remote file
|
|
26
|
+
downloads.push({
|
|
27
|
+
path: docPath,
|
|
28
|
+
action: 'create',
|
|
29
|
+
direction: 'download',
|
|
30
|
+
sizeBytes: remote.size,
|
|
31
|
+
reason: 'New remote file',
|
|
32
|
+
});
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
else if (lastRemote && remote.hash !== lastRemote.hash) {
|
|
36
|
+
// Remote file changed since last sync
|
|
37
|
+
downloads.push({
|
|
38
|
+
path: docPath,
|
|
39
|
+
action: 'update',
|
|
40
|
+
direction: 'download',
|
|
41
|
+
sizeBytes: remote.size,
|
|
42
|
+
reason: 'Remote file updated',
|
|
43
|
+
});
|
|
44
|
+
}
|
|
45
|
+
else if (!lastRemote && remote.hash !== local.hash) {
|
|
46
|
+
// First sync, files differ — remote wins on pull
|
|
47
|
+
downloads.push({
|
|
48
|
+
path: docPath,
|
|
49
|
+
action: 'update',
|
|
50
|
+
direction: 'download',
|
|
51
|
+
sizeBytes: remote.size,
|
|
52
|
+
reason: 'Content differs (first sync, pull prefers remote)',
|
|
53
|
+
});
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
// Files deleted from remote since last sync
|
|
57
|
+
for (const docPath of Object.keys(lastState.remote)) {
|
|
58
|
+
if (!remoteFiles[docPath] && localFiles[docPath]) {
|
|
59
|
+
deletes.push({
|
|
60
|
+
path: docPath,
|
|
61
|
+
action: 'delete',
|
|
62
|
+
direction: 'download',
|
|
63
|
+
sizeBytes: 0,
|
|
64
|
+
reason: 'Deleted from remote',
|
|
65
|
+
});
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
const totalBytes = downloads.reduce((sum, d) => sum + d.sizeBytes, 0);
|
|
69
|
+
return { uploads: [], downloads, deletes, totalBytes };
|
|
70
|
+
}
|
|
71
|
+
/**
|
|
72
|
+
* Compute the diff between local and remote state for a push operation.
|
|
73
|
+
* Push = upload local changes to remote.
|
|
74
|
+
*/
|
|
75
|
+
export function computePushDiff(localFiles, remoteFiles, lastState) {
|
|
76
|
+
const uploads = [];
|
|
77
|
+
const deletes = [];
|
|
78
|
+
// Files locally that need to be uploaded
|
|
79
|
+
for (const [docPath, local] of Object.entries(localFiles)) {
|
|
80
|
+
const remote = remoteFiles[docPath];
|
|
81
|
+
const lastLocal = lastState.local[docPath];
|
|
82
|
+
if (!remote) {
|
|
83
|
+
// File exists locally but not remotely
|
|
84
|
+
if (lastState.remote[docPath]) {
|
|
85
|
+
// Was previously synced but deleted remotely — local wins on push
|
|
86
|
+
uploads.push({
|
|
87
|
+
path: docPath,
|
|
88
|
+
action: 'create',
|
|
89
|
+
direction: 'upload',
|
|
90
|
+
sizeBytes: local.size,
|
|
91
|
+
reason: 'Deleted remotely, exists locally (push restores)',
|
|
92
|
+
});
|
|
93
|
+
}
|
|
94
|
+
else {
|
|
95
|
+
// New local file
|
|
96
|
+
uploads.push({
|
|
97
|
+
path: docPath,
|
|
98
|
+
action: 'create',
|
|
99
|
+
direction: 'upload',
|
|
100
|
+
sizeBytes: local.size,
|
|
101
|
+
reason: 'New local file',
|
|
102
|
+
});
|
|
103
|
+
}
|
|
104
|
+
}
|
|
105
|
+
else if (lastLocal && local.hash !== lastLocal.hash) {
|
|
106
|
+
// Local file changed since last sync
|
|
107
|
+
uploads.push({
|
|
108
|
+
path: docPath,
|
|
109
|
+
action: 'update',
|
|
110
|
+
direction: 'upload',
|
|
111
|
+
sizeBytes: local.size,
|
|
112
|
+
reason: 'Local file updated',
|
|
113
|
+
});
|
|
114
|
+
}
|
|
115
|
+
else if (!lastLocal && local.hash !== remote.hash) {
|
|
116
|
+
// First sync, files differ — local wins on push
|
|
117
|
+
uploads.push({
|
|
118
|
+
path: docPath,
|
|
119
|
+
action: 'update',
|
|
120
|
+
direction: 'upload',
|
|
121
|
+
sizeBytes: local.size,
|
|
122
|
+
reason: 'Content differs (first sync, push prefers local)',
|
|
123
|
+
});
|
|
124
|
+
}
|
|
125
|
+
}
|
|
126
|
+
// Files deleted locally since last sync
|
|
127
|
+
for (const docPath of Object.keys(lastState.local)) {
|
|
128
|
+
if (!localFiles[docPath] && remoteFiles[docPath]) {
|
|
129
|
+
deletes.push({
|
|
130
|
+
path: docPath,
|
|
131
|
+
action: 'delete',
|
|
132
|
+
direction: 'upload',
|
|
133
|
+
sizeBytes: 0,
|
|
134
|
+
reason: 'Deleted locally',
|
|
135
|
+
});
|
|
136
|
+
}
|
|
137
|
+
}
|
|
138
|
+
const totalBytes = uploads.reduce((sum, u) => sum + u.sizeBytes, 0);
|
|
139
|
+
return { uploads, downloads: [], deletes, totalBytes };
|
|
140
|
+
}
|
|
141
|
+
/**
|
|
142
|
+
* Format a diff for human-readable display.
|
|
143
|
+
*/
|
|
144
|
+
export function formatDiff(diff) {
|
|
145
|
+
const lines = [];
|
|
146
|
+
const allEntries = [...diff.downloads, ...diff.uploads, ...diff.deletes];
|
|
147
|
+
if (allEntries.length === 0) {
|
|
148
|
+
return 'Everything is up to date.';
|
|
149
|
+
}
|
|
150
|
+
for (const entry of diff.downloads) {
|
|
151
|
+
const symbol = entry.action === 'delete' ? '-' : entry.action === 'create' ? '+' : '~';
|
|
152
|
+
lines.push(` ${symbol} ${entry.path} (${entry.reason})`);
|
|
153
|
+
}
|
|
154
|
+
for (const entry of diff.uploads) {
|
|
155
|
+
const symbol = entry.action === 'delete' ? '-' : entry.action === 'create' ? '+' : '~';
|
|
156
|
+
lines.push(` ${symbol} ${entry.path} (${entry.reason})`);
|
|
157
|
+
}
|
|
158
|
+
for (const entry of diff.deletes) {
|
|
159
|
+
lines.push(` - ${entry.path} (${entry.reason})`);
|
|
160
|
+
}
|
|
161
|
+
const totalFiles = allEntries.length;
|
|
162
|
+
const totalKB = Math.ceil(diff.totalBytes / 1024);
|
|
163
|
+
lines.push('');
|
|
164
|
+
lines.push(`${totalFiles} file(s), ${totalKB} KB to transfer`);
|
|
165
|
+
return lines.join('\n');
|
|
166
|
+
}
|
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
import type { LifestreamVaultClient } from '@lifestreamdynamics/vault-sdk';
|
|
2
|
+
import type { SyncConfig, FileState } from './types.js';
|
|
3
|
+
import { computePullDiff, computePushDiff, type SyncDiff, type SyncDiffEntry } from './diff.js';
|
|
4
|
+
export interface SyncProgress {
|
|
5
|
+
phase: 'scanning' | 'computing' | 'transferring' | 'complete';
|
|
6
|
+
current: number;
|
|
7
|
+
total: number;
|
|
8
|
+
currentFile?: string;
|
|
9
|
+
bytesTransferred: number;
|
|
10
|
+
totalBytes: number;
|
|
11
|
+
}
|
|
12
|
+
export type ProgressCallback = (progress: SyncProgress) => void;
|
|
13
|
+
export interface SyncResult {
|
|
14
|
+
filesUploaded: number;
|
|
15
|
+
filesDownloaded: number;
|
|
16
|
+
filesDeleted: number;
|
|
17
|
+
bytesTransferred: number;
|
|
18
|
+
errors: Array<{
|
|
19
|
+
path: string;
|
|
20
|
+
error: string;
|
|
21
|
+
}>;
|
|
22
|
+
}
|
|
23
|
+
/**
|
|
24
|
+
* Scan local directory recursively for .md files.
|
|
25
|
+
* Returns a map of relative doc paths -> FileState.
|
|
26
|
+
*/
|
|
27
|
+
export declare function scanLocalFiles(localPath: string, ignorePatterns: string[]): Record<string, FileState>;
|
|
28
|
+
/**
|
|
29
|
+
* Scan remote vault for document list.
|
|
30
|
+
* Returns a map of doc paths -> FileState.
|
|
31
|
+
*/
|
|
32
|
+
export declare function scanRemoteFiles(client: LifestreamVaultClient, vaultId: string, ignorePatterns: string[]): Promise<Record<string, FileState>>;
|
|
33
|
+
/**
|
|
34
|
+
* Execute a pull operation: download remote changes to local.
|
|
35
|
+
*/
|
|
36
|
+
export declare function executePull(client: LifestreamVaultClient, config: SyncConfig, diff: SyncDiff, onProgress?: ProgressCallback): Promise<SyncResult>;
|
|
37
|
+
/**
|
|
38
|
+
* Execute a push operation: upload local changes to remote.
|
|
39
|
+
*/
|
|
40
|
+
export declare function executePush(client: LifestreamVaultClient, config: SyncConfig, diff: SyncDiff, onProgress?: ProgressCallback): Promise<SyncResult>;
|
|
41
|
+
export { computePullDiff, computePushDiff, type SyncDiff, type SyncDiffEntry };
|
|
@@ -0,0 +1,233 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Core sync engine — performs pull and push operations.
|
|
3
|
+
*/
|
|
4
|
+
import fs from 'node:fs';
|
|
5
|
+
import path from 'node:path';
|
|
6
|
+
import { randomBytes } from 'node:crypto';
|
|
7
|
+
import { loadSyncState, saveSyncState, hashFileContent, buildRemoteFileState } from './state.js';
|
|
8
|
+
import { updateLastSync } from './config.js';
|
|
9
|
+
import { shouldIgnore } from './ignore.js';
|
|
10
|
+
import { computePullDiff, computePushDiff } from './diff.js';
|
|
11
|
+
/**
|
|
12
|
+
* Scan local directory recursively for .md files.
|
|
13
|
+
* Returns a map of relative doc paths -> FileState.
|
|
14
|
+
*/
|
|
15
|
+
export function scanLocalFiles(localPath, ignorePatterns) {
|
|
16
|
+
const files = {};
|
|
17
|
+
function walk(dir, prefix) {
|
|
18
|
+
if (!fs.existsSync(dir))
|
|
19
|
+
return;
|
|
20
|
+
const entries = fs.readdirSync(dir, { withFileTypes: true });
|
|
21
|
+
for (const entry of entries) {
|
|
22
|
+
const relPath = prefix ? `${prefix}/${entry.name}` : entry.name;
|
|
23
|
+
if (entry.isDirectory()) {
|
|
24
|
+
if (!shouldIgnore(relPath + '/', ignorePatterns)) {
|
|
25
|
+
walk(path.join(dir, entry.name), relPath);
|
|
26
|
+
}
|
|
27
|
+
}
|
|
28
|
+
else if (entry.isFile() && entry.name.endsWith('.md')) {
|
|
29
|
+
if (!shouldIgnore(relPath, ignorePatterns)) {
|
|
30
|
+
const absPath = path.join(dir, entry.name);
|
|
31
|
+
const content = fs.readFileSync(absPath);
|
|
32
|
+
const stat = fs.statSync(absPath);
|
|
33
|
+
files[relPath] = {
|
|
34
|
+
path: relPath,
|
|
35
|
+
hash: hashFileContent(content),
|
|
36
|
+
mtime: stat.mtime.toISOString(),
|
|
37
|
+
size: stat.size,
|
|
38
|
+
};
|
|
39
|
+
}
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
walk(localPath, '');
|
|
44
|
+
return files;
|
|
45
|
+
}
|
|
46
|
+
/**
|
|
47
|
+
* Scan remote vault for document list.
|
|
48
|
+
* Returns a map of doc paths -> FileState.
|
|
49
|
+
*/
|
|
50
|
+
export async function scanRemoteFiles(client, vaultId, ignorePatterns) {
|
|
51
|
+
const docs = await client.documents.list(vaultId);
|
|
52
|
+
const files = {};
|
|
53
|
+
for (const doc of docs) {
|
|
54
|
+
if (!shouldIgnore(doc.path, ignorePatterns)) {
|
|
55
|
+
files[doc.path] = {
|
|
56
|
+
path: doc.path,
|
|
57
|
+
hash: '', // We don't have content hash from list; will use mtime for comparison
|
|
58
|
+
mtime: doc.fileModifiedAt,
|
|
59
|
+
size: doc.sizeBytes,
|
|
60
|
+
};
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
return files;
|
|
64
|
+
}
|
|
65
|
+
/**
|
|
66
|
+
* Write a file atomically using a temp file + rename.
|
|
67
|
+
* Prevents partial reads if the process is interrupted mid-write.
|
|
68
|
+
*/
|
|
69
|
+
function atomicWriteFileSync(targetPath, content, encoding = 'utf-8') {
|
|
70
|
+
const tmpFile = targetPath + '.tmp.' + randomBytes(4).toString('hex');
|
|
71
|
+
fs.writeFileSync(tmpFile, content, encoding);
|
|
72
|
+
fs.renameSync(tmpFile, targetPath);
|
|
73
|
+
}
|
|
74
|
+
/**
|
|
75
|
+
* Shared sync operation executor used by both pull and push.
|
|
76
|
+
* Handles result initialization, state loading, progress callbacks,
|
|
77
|
+
* quota error handling, state saving, and lastSync update.
|
|
78
|
+
*/
|
|
79
|
+
async function executeSyncOperation(config, diff, handlers, onProgress) {
|
|
80
|
+
const result = {
|
|
81
|
+
filesUploaded: 0,
|
|
82
|
+
filesDownloaded: 0,
|
|
83
|
+
filesDeleted: 0,
|
|
84
|
+
bytesTransferred: 0,
|
|
85
|
+
errors: [],
|
|
86
|
+
};
|
|
87
|
+
const state = loadSyncState(config.id);
|
|
88
|
+
const allOps = [...handlers.transfers, ...handlers.deletes];
|
|
89
|
+
let current = 0;
|
|
90
|
+
for (const entry of handlers.transfers) {
|
|
91
|
+
current++;
|
|
92
|
+
onProgress?.({
|
|
93
|
+
phase: 'transferring',
|
|
94
|
+
current,
|
|
95
|
+
total: allOps.length,
|
|
96
|
+
currentFile: entry.path,
|
|
97
|
+
bytesTransferred: result.bytesTransferred,
|
|
98
|
+
totalBytes: diff.totalBytes,
|
|
99
|
+
});
|
|
100
|
+
try {
|
|
101
|
+
const content = await handlers.transferFile(entry, config);
|
|
102
|
+
result[handlers.transferCounterKey]++;
|
|
103
|
+
result.bytesTransferred += entry.sizeBytes;
|
|
104
|
+
// Update state
|
|
105
|
+
state.local[entry.path] = {
|
|
106
|
+
path: entry.path,
|
|
107
|
+
hash: hashFileContent(content),
|
|
108
|
+
mtime: new Date().toISOString(),
|
|
109
|
+
size: Buffer.byteLength(content, 'utf-8'),
|
|
110
|
+
};
|
|
111
|
+
state.remote[entry.path] = buildRemoteFileState(entry.path, content, new Date().toISOString());
|
|
112
|
+
}
|
|
113
|
+
catch (err) {
|
|
114
|
+
const message = err instanceof Error ? err.message : String(err);
|
|
115
|
+
if (isQuotaError(message)) {
|
|
116
|
+
result.errors.push({ path: entry.path, error: message });
|
|
117
|
+
break; // Stop immediately on quota errors
|
|
118
|
+
}
|
|
119
|
+
result.errors.push({ path: entry.path, error: message });
|
|
120
|
+
}
|
|
121
|
+
}
|
|
122
|
+
for (const entry of handlers.deletes) {
|
|
123
|
+
current++;
|
|
124
|
+
onProgress?.({
|
|
125
|
+
phase: 'transferring',
|
|
126
|
+
current,
|
|
127
|
+
total: allOps.length,
|
|
128
|
+
currentFile: entry.path,
|
|
129
|
+
bytesTransferred: result.bytesTransferred,
|
|
130
|
+
totalBytes: diff.totalBytes,
|
|
131
|
+
});
|
|
132
|
+
try {
|
|
133
|
+
await handlers.deleteFile(entry, config);
|
|
134
|
+
result.filesDeleted++;
|
|
135
|
+
delete state.local[entry.path];
|
|
136
|
+
delete state.remote[entry.path];
|
|
137
|
+
}
|
|
138
|
+
catch (err) {
|
|
139
|
+
const message = err instanceof Error ? err.message : String(err);
|
|
140
|
+
result.errors.push({ path: entry.path, error: message });
|
|
141
|
+
}
|
|
142
|
+
}
|
|
143
|
+
saveSyncState(state);
|
|
144
|
+
updateLastSync(config.id);
|
|
145
|
+
onProgress?.({
|
|
146
|
+
phase: 'complete',
|
|
147
|
+
current: allOps.length,
|
|
148
|
+
total: allOps.length,
|
|
149
|
+
bytesTransferred: result.bytesTransferred,
|
|
150
|
+
totalBytes: diff.totalBytes,
|
|
151
|
+
});
|
|
152
|
+
return result;
|
|
153
|
+
}
|
|
154
|
+
/**
|
|
155
|
+
* Execute a pull operation: download remote changes to local.
|
|
156
|
+
*/
|
|
157
|
+
export async function executePull(client, config, diff, onProgress) {
|
|
158
|
+
return executeSyncOperation(config, diff, {
|
|
159
|
+
transfers: diff.downloads,
|
|
160
|
+
deletes: diff.deletes,
|
|
161
|
+
transferCounterKey: 'filesDownloaded',
|
|
162
|
+
async transferFile(entry, cfg) {
|
|
163
|
+
const { content } = await retryWithBackoff(() => client.documents.get(cfg.vaultId, entry.path));
|
|
164
|
+
const localFile = path.join(cfg.localPath, entry.path);
|
|
165
|
+
const localDir = path.dirname(localFile);
|
|
166
|
+
if (!fs.existsSync(localDir)) {
|
|
167
|
+
fs.mkdirSync(localDir, { recursive: true });
|
|
168
|
+
}
|
|
169
|
+
atomicWriteFileSync(localFile, content, 'utf-8');
|
|
170
|
+
return content;
|
|
171
|
+
},
|
|
172
|
+
async deleteFile(entry, cfg) {
|
|
173
|
+
const localFile = path.join(cfg.localPath, entry.path);
|
|
174
|
+
if (fs.existsSync(localFile)) {
|
|
175
|
+
fs.unlinkSync(localFile);
|
|
176
|
+
}
|
|
177
|
+
},
|
|
178
|
+
}, onProgress);
|
|
179
|
+
}
|
|
180
|
+
/**
|
|
181
|
+
* Execute a push operation: upload local changes to remote.
|
|
182
|
+
*/
|
|
183
|
+
export async function executePush(client, config, diff, onProgress) {
|
|
184
|
+
return executeSyncOperation(config, diff, {
|
|
185
|
+
transfers: diff.uploads,
|
|
186
|
+
deletes: diff.deletes,
|
|
187
|
+
transferCounterKey: 'filesUploaded',
|
|
188
|
+
async transferFile(entry, cfg) {
|
|
189
|
+
const localFile = path.join(cfg.localPath, entry.path);
|
|
190
|
+
const content = fs.readFileSync(localFile, 'utf-8');
|
|
191
|
+
await retryWithBackoff(() => client.documents.put(cfg.vaultId, entry.path, content));
|
|
192
|
+
return content;
|
|
193
|
+
},
|
|
194
|
+
async deleteFile(entry, cfg) {
|
|
195
|
+
await retryWithBackoff(() => client.documents.delete(cfg.vaultId, entry.path));
|
|
196
|
+
},
|
|
197
|
+
}, onProgress);
|
|
198
|
+
}
|
|
199
|
+
/**
|
|
200
|
+
* Retry a function with exponential backoff (max 3 retries).
|
|
201
|
+
*/
|
|
202
|
+
async function retryWithBackoff(fn, maxRetries = 3) {
|
|
203
|
+
let lastError;
|
|
204
|
+
for (let attempt = 0; attempt <= maxRetries; attempt++) {
|
|
205
|
+
try {
|
|
206
|
+
return await fn();
|
|
207
|
+
}
|
|
208
|
+
catch (err) {
|
|
209
|
+
lastError = err;
|
|
210
|
+
const message = err instanceof Error ? err.message : String(err);
|
|
211
|
+
// Don't retry on non-transient errors
|
|
212
|
+
if (isQuotaError(message) || isPermissionError(message)) {
|
|
213
|
+
throw err;
|
|
214
|
+
}
|
|
215
|
+
if (attempt < maxRetries) {
|
|
216
|
+
const delay = Math.pow(2, attempt) * 500; // 500ms, 1s, 2s
|
|
217
|
+
await sleep(delay);
|
|
218
|
+
}
|
|
219
|
+
}
|
|
220
|
+
}
|
|
221
|
+
throw lastError;
|
|
222
|
+
}
|
|
223
|
+
function isQuotaError(message) {
|
|
224
|
+
return /quota|storage limit|limit exceeded/i.test(message);
|
|
225
|
+
}
|
|
226
|
+
function isPermissionError(message) {
|
|
227
|
+
return /permission|forbidden|unauthorized|access denied/i.test(message);
|
|
228
|
+
}
|
|
229
|
+
function sleep(ms) {
|
|
230
|
+
return new Promise(resolve => setTimeout(resolve, ms));
|
|
231
|
+
}
|
|
232
|
+
// Re-export diff functions for convenience
|
|
233
|
+
export { computePullDiff, computePushDiff };
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
/** Default patterns that are always ignored. */
|
|
2
|
+
export declare const DEFAULT_IGNORE_PATTERNS: string[];
|
|
3
|
+
/**
|
|
4
|
+
* Load ignore patterns from a .lsvault-ignore file.
|
|
5
|
+
* Returns empty array if file doesn't exist.
|
|
6
|
+
*/
|
|
7
|
+
export declare function loadIgnoreFile(localPath: string): string[];
|
|
8
|
+
/**
|
|
9
|
+
* Combine default patterns, config-level patterns, and .lsvault-ignore patterns.
|
|
10
|
+
*/
|
|
11
|
+
export declare function resolveIgnorePatterns(configIgnore: string[], localPath: string): string[];
|
|
12
|
+
/**
|
|
13
|
+
* Check if a document path should be ignored.
|
|
14
|
+
* The docPath should be a relative path using forward slashes.
|
|
15
|
+
*/
|
|
16
|
+
export declare function shouldIgnore(docPath: string, patterns: string[]): boolean;
|
|
@@ -0,0 +1,72 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Ignore pattern matching for sync operations.
|
|
3
|
+
* Supports .lsvault-ignore files and built-in default patterns.
|
|
4
|
+
*/
|
|
5
|
+
import fs from 'node:fs';
|
|
6
|
+
import path from 'node:path';
|
|
7
|
+
import { minimatch } from 'minimatch';
|
|
8
|
+
/** Default patterns that are always ignored. */
|
|
9
|
+
export const DEFAULT_IGNORE_PATTERNS = [
|
|
10
|
+
'.git/',
|
|
11
|
+
'.svn/',
|
|
12
|
+
'.hg/',
|
|
13
|
+
'node_modules/',
|
|
14
|
+
'*.tmp',
|
|
15
|
+
'.DS_Store',
|
|
16
|
+
'Thumbs.db',
|
|
17
|
+
'.lsvault/',
|
|
18
|
+
'.lsvault-*',
|
|
19
|
+
];
|
|
20
|
+
/**
|
|
21
|
+
* Load ignore patterns from a .lsvault-ignore file.
|
|
22
|
+
* Returns empty array if file doesn't exist.
|
|
23
|
+
*/
|
|
24
|
+
export function loadIgnoreFile(localPath) {
|
|
25
|
+
const ignoreFile = path.join(localPath, '.lsvault-ignore');
|
|
26
|
+
if (!fs.existsSync(ignoreFile))
|
|
27
|
+
return [];
|
|
28
|
+
try {
|
|
29
|
+
const content = fs.readFileSync(ignoreFile, 'utf-8');
|
|
30
|
+
return content
|
|
31
|
+
.split('\n')
|
|
32
|
+
.map(line => line.trim())
|
|
33
|
+
.filter(line => line.length > 0 && !line.startsWith('#'));
|
|
34
|
+
}
|
|
35
|
+
catch {
|
|
36
|
+
return [];
|
|
37
|
+
}
|
|
38
|
+
}
|
|
39
|
+
/**
|
|
40
|
+
* Combine default patterns, config-level patterns, and .lsvault-ignore patterns.
|
|
41
|
+
*/
|
|
42
|
+
export function resolveIgnorePatterns(configIgnore, localPath) {
|
|
43
|
+
const filePatterns = loadIgnoreFile(localPath);
|
|
44
|
+
// Deduplicate
|
|
45
|
+
const all = new Set([...DEFAULT_IGNORE_PATTERNS, ...configIgnore, ...filePatterns]);
|
|
46
|
+
return [...all];
|
|
47
|
+
}
|
|
48
|
+
/**
|
|
49
|
+
* Check if a document path should be ignored.
|
|
50
|
+
* The docPath should be a relative path using forward slashes.
|
|
51
|
+
*/
|
|
52
|
+
export function shouldIgnore(docPath, patterns) {
|
|
53
|
+
for (const pattern of patterns) {
|
|
54
|
+
// Directory patterns (ending with /)
|
|
55
|
+
if (pattern.endsWith('/')) {
|
|
56
|
+
const dirPattern = pattern.slice(0, -1);
|
|
57
|
+
if (docPath.startsWith(dirPattern + '/') || docPath === dirPattern) {
|
|
58
|
+
return true;
|
|
59
|
+
}
|
|
60
|
+
}
|
|
61
|
+
// Glob patterns
|
|
62
|
+
if (minimatch(docPath, pattern, { dot: true })) {
|
|
63
|
+
return true;
|
|
64
|
+
}
|
|
65
|
+
// Also check basename for file-level patterns (e.g., ".DS_Store" matches "sub/.DS_Store")
|
|
66
|
+
const basename = path.posix.basename(docPath);
|
|
67
|
+
if (minimatch(basename, pattern, { dot: true })) {
|
|
68
|
+
return true;
|
|
69
|
+
}
|
|
70
|
+
}
|
|
71
|
+
return false;
|
|
72
|
+
}
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
import type { LifestreamVaultClient } from '@lifestreamdynamics/vault-sdk';
|
|
2
|
+
import type { SyncConfig } from './types.js';
|
|
3
|
+
export interface PollerOptions {
|
|
4
|
+
/** Patterns to ignore */
|
|
5
|
+
ignorePatterns: string[];
|
|
6
|
+
/** Poll interval in ms (default: 30000) */
|
|
7
|
+
intervalMs?: number;
|
|
8
|
+
/** Callback for log messages */
|
|
9
|
+
onLog?: (message: string) => void;
|
|
10
|
+
/** Callback for conflict log messages */
|
|
11
|
+
onConflictLog?: (message: string) => void;
|
|
12
|
+
/** Callback for errors */
|
|
13
|
+
onError?: (error: Error) => void;
|
|
14
|
+
/** Callback when a file is written locally (for watcher loop prevention) */
|
|
15
|
+
onLocalWrite?: (docPath: string) => void;
|
|
16
|
+
}
|
|
17
|
+
/**
|
|
18
|
+
* Creates and starts a remote poller for a sync configuration.
|
|
19
|
+
* Returns a stop function.
|
|
20
|
+
*/
|
|
21
|
+
export declare function createRemotePoller(client: LifestreamVaultClient, config: SyncConfig, options: PollerOptions): {
|
|
22
|
+
stop: () => void;
|
|
23
|
+
};
|