@gobi-ai/cli 0.6.1 → 0.6.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/commands/sense.js +12 -22
- package/dist/commands/sync.js +618 -0
- package/dist/main.js +2 -0
- package/package.json +7 -3
- package/skills/gobi/SKILL.md +8 -2
- package/skills/gobi/references/sense.md +41 -0
- package/skills/gobi/references/sync.md +17 -0
package/dist/commands/sense.js
CHANGED
|
@@ -8,18 +8,13 @@ export function registerSenseCommand(program) {
|
|
|
8
8
|
sense
|
|
9
9
|
.command("activities")
|
|
10
10
|
.description("Fetch activity records within a time range.")
|
|
11
|
-
.requiredOption("--
|
|
12
|
-
.
|
|
13
|
-
.option("--end-time <iso>", "End of time range (ISO 8601, inclusive); requires --start-time")
|
|
11
|
+
.requiredOption("--start-time <iso>", "Start of time range (ISO 8601 UTC, e.g. 2026-03-20T00:00:00Z)")
|
|
12
|
+
.requiredOption("--end-time <iso>", "End of time range (ISO 8601 UTC, e.g. 2026-03-20T23:59:59Z)")
|
|
14
13
|
.action(async (opts) => {
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
if (opts.startTime)
|
|
20
|
-
params.startTime = opts.startTime;
|
|
21
|
-
if (opts.endTime)
|
|
22
|
-
params.endTime = opts.endTime;
|
|
14
|
+
const params = {
|
|
15
|
+
startTime: opts.startTime,
|
|
16
|
+
endTime: opts.endTime,
|
|
17
|
+
};
|
|
23
18
|
const resp = (await apiGet("/app/activities", params));
|
|
24
19
|
const activities = (resp.activities || []);
|
|
25
20
|
const pagination = (resp.pagination || {});
|
|
@@ -46,18 +41,13 @@ export function registerSenseCommand(program) {
|
|
|
46
41
|
sense
|
|
47
42
|
.command("transcriptions")
|
|
48
43
|
.description("Fetch transcription records within a time range.")
|
|
49
|
-
.requiredOption("--
|
|
50
|
-
.
|
|
51
|
-
.option("--end-time <iso>", "End of time range (ISO 8601, inclusive); requires --start-time")
|
|
44
|
+
.requiredOption("--start-time <iso>", "Start of time range (ISO 8601 UTC, e.g. 2026-03-20T00:00:00Z)")
|
|
45
|
+
.requiredOption("--end-time <iso>", "End of time range (ISO 8601 UTC, e.g. 2026-03-20T23:59:59Z)")
|
|
52
46
|
.action(async (opts) => {
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
if (opts.startTime)
|
|
58
|
-
params.startTime = opts.startTime;
|
|
59
|
-
if (opts.endTime)
|
|
60
|
-
params.endTime = opts.endTime;
|
|
47
|
+
const params = {
|
|
48
|
+
startTime: opts.startTime,
|
|
49
|
+
endTime: opts.endTime,
|
|
50
|
+
};
|
|
61
51
|
const resp = (await apiGet("/app/transcriptions", params));
|
|
62
52
|
const transcriptions = (resp.transcriptions || []);
|
|
63
53
|
const pagination = (resp.pagination || {});
|
|
@@ -0,0 +1,618 @@
|
|
|
1
|
+
import { createHash } from "crypto";
|
|
2
|
+
import { existsSync, readFileSync, rmSync, mkdirSync, readdirSync, statSync } from "fs";
|
|
3
|
+
import { writeFile } from "fs/promises";
|
|
4
|
+
import { join, dirname, extname, resolve as pathResolve } from "path";
|
|
5
|
+
import Database from "better-sqlite3";
|
|
6
|
+
import inquirer from "inquirer";
|
|
7
|
+
import ignore from "ignore";
|
|
8
|
+
import trash from "trash";
|
|
9
|
+
import { WEBDRIVE_BASE_URL } from "../constants.js";
|
|
10
|
+
import { getValidToken } from "../auth/manager.js";
|
|
11
|
+
import { GobiError } from "../errors.js";
|
|
12
|
+
import { getVaultSlug } from "./init.js";
|
|
13
|
+
import { isJsonMode, jsonOut } from "./utils.js";
|
|
14
|
+
// ─── Constants ────────────────────────────────────────────────────────────────
|
|
15
|
+
const SYNC_IGNORE_NAMES = new Set([
|
|
16
|
+
".gobi",
|
|
17
|
+
".DS_Store",
|
|
18
|
+
"Thumbs.db",
|
|
19
|
+
"desktop.ini",
|
|
20
|
+
".git",
|
|
21
|
+
".gitignore",
|
|
22
|
+
".gitattributes",
|
|
23
|
+
".vscode",
|
|
24
|
+
".idea",
|
|
25
|
+
".cursor",
|
|
26
|
+
".claude",
|
|
27
|
+
"node_modules",
|
|
28
|
+
".npm",
|
|
29
|
+
".yarn",
|
|
30
|
+
"__pycache__",
|
|
31
|
+
".env",
|
|
32
|
+
"venv",
|
|
33
|
+
".venv",
|
|
34
|
+
".Spotlight-V100",
|
|
35
|
+
".Trashes",
|
|
36
|
+
".fseventsd",
|
|
37
|
+
]);
|
|
38
|
+
const SYNC_IGNORE_EXTENSIONS = new Set([".pyc", ".tmp", ".temp"]);
|
|
39
|
+
// ─── Ignore Patterns ──────────────────────────────────────────────────────────
|
|
40
|
+
export function isSyncIgnored(relativePath) {
|
|
41
|
+
const parts = relativePath.split("/");
|
|
42
|
+
for (const part of parts) {
|
|
43
|
+
if (SYNC_IGNORE_NAMES.has(part))
|
|
44
|
+
return true;
|
|
45
|
+
const ext = extname(part).toLowerCase();
|
|
46
|
+
if (ext && SYNC_IGNORE_EXTENSIONS.has(ext))
|
|
47
|
+
return true;
|
|
48
|
+
if (part.startsWith("._"))
|
|
49
|
+
return true; // macOS resource forks
|
|
50
|
+
}
|
|
51
|
+
return false;
|
|
52
|
+
}
|
|
53
|
+
// ─── State (SQLite) ───────────────────────────────────────────────────────────
|
|
54
|
+
const EMPTY_STATE = {
|
|
55
|
+
cursor: null,
|
|
56
|
+
syncfilesHash: null,
|
|
57
|
+
patterns: [],
|
|
58
|
+
hashCache: {},
|
|
59
|
+
};
|
|
60
|
+
function openDb(gobiDir) {
|
|
61
|
+
const db = new Database(join(gobiDir, "sync.db"));
|
|
62
|
+
db.pragma("journal_mode = WAL");
|
|
63
|
+
db.exec(`
|
|
64
|
+
CREATE TABLE IF NOT EXISTS sync_meta (
|
|
65
|
+
key TEXT PRIMARY KEY,
|
|
66
|
+
value TEXT NOT NULL
|
|
67
|
+
);
|
|
68
|
+
CREATE TABLE IF NOT EXISTS hash_cache (
|
|
69
|
+
path TEXT PRIMARY KEY,
|
|
70
|
+
hash TEXT NOT NULL,
|
|
71
|
+
mtime REAL NOT NULL,
|
|
72
|
+
size INTEGER NOT NULL
|
|
73
|
+
);
|
|
74
|
+
`);
|
|
75
|
+
return db;
|
|
76
|
+
}
|
|
77
|
+
export function loadSyncState(gobiDir) {
|
|
78
|
+
// One-time migration from legacy sync_state.json
|
|
79
|
+
const jsonPath = join(gobiDir, "sync_state.json");
|
|
80
|
+
if (existsSync(jsonPath)) {
|
|
81
|
+
try {
|
|
82
|
+
const parsed = JSON.parse(readFileSync(jsonPath, "utf-8"));
|
|
83
|
+
const state = {
|
|
84
|
+
cursor: parsed.cursor ?? null,
|
|
85
|
+
syncfilesHash: parsed.syncfilesHash ?? null,
|
|
86
|
+
patterns: parsed.patterns ?? [],
|
|
87
|
+
hashCache: parsed.hashCache ?? {},
|
|
88
|
+
};
|
|
89
|
+
saveSyncState(gobiDir, state);
|
|
90
|
+
rmSync(jsonPath);
|
|
91
|
+
return state;
|
|
92
|
+
}
|
|
93
|
+
catch {
|
|
94
|
+
rmSync(jsonPath, { force: true });
|
|
95
|
+
return { ...EMPTY_STATE, hashCache: {} };
|
|
96
|
+
}
|
|
97
|
+
}
|
|
98
|
+
const db = openDb(gobiDir);
|
|
99
|
+
try {
|
|
100
|
+
const meta = db.prepare("SELECT key, value FROM sync_meta").all();
|
|
101
|
+
const metaMap = Object.fromEntries(meta.map((r) => [r.key, r.value]));
|
|
102
|
+
const rows = db.prepare("SELECT path, hash, mtime, size FROM hash_cache").all();
|
|
103
|
+
const hashCache = {};
|
|
104
|
+
for (const row of rows) {
|
|
105
|
+
hashCache[row.path] = { hash: row.hash, mtime: row.mtime, size: row.size };
|
|
106
|
+
}
|
|
107
|
+
return {
|
|
108
|
+
cursor: metaMap.cursor ? Number(metaMap.cursor) : EMPTY_STATE.cursor,
|
|
109
|
+
syncfilesHash: metaMap.syncfiles_hash || EMPTY_STATE.syncfilesHash,
|
|
110
|
+
patterns: metaMap.patterns ? JSON.parse(metaMap.patterns) : EMPTY_STATE.patterns,
|
|
111
|
+
hashCache,
|
|
112
|
+
};
|
|
113
|
+
}
|
|
114
|
+
finally {
|
|
115
|
+
db.close();
|
|
116
|
+
}
|
|
117
|
+
}
|
|
118
|
+
export function saveSyncState(gobiDir, state) {
|
|
119
|
+
const db = openDb(gobiDir);
|
|
120
|
+
try {
|
|
121
|
+
db.transaction(() => {
|
|
122
|
+
const upsert = db.prepare("INSERT OR REPLACE INTO sync_meta (key, value) VALUES (?, ?)");
|
|
123
|
+
upsert.run("cursor", state.cursor !== null ? String(state.cursor) : "");
|
|
124
|
+
upsert.run("syncfiles_hash", state.syncfilesHash ?? "");
|
|
125
|
+
upsert.run("patterns", JSON.stringify(state.patterns));
|
|
126
|
+
db.exec("DELETE FROM hash_cache");
|
|
127
|
+
const insert = db.prepare("INSERT INTO hash_cache (path, hash, mtime, size) VALUES (?, ?, ?, ?)");
|
|
128
|
+
for (const [path, entry] of Object.entries(state.hashCache)) {
|
|
129
|
+
insert.run(path, entry.hash, entry.mtime, entry.size);
|
|
130
|
+
}
|
|
131
|
+
})();
|
|
132
|
+
}
|
|
133
|
+
finally {
|
|
134
|
+
db.close();
|
|
135
|
+
}
|
|
136
|
+
}
|
|
137
|
+
// ─── Syncfiles ────────────────────────────────────────────────────────────────
|
|
138
|
+
export function readSyncfiles(gobiDir) {
|
|
139
|
+
const syncfilesPath = join(gobiDir, "syncfiles");
|
|
140
|
+
if (!existsSync(syncfilesPath)) {
|
|
141
|
+
return { patterns: [], contentHash: "" };
|
|
142
|
+
}
|
|
143
|
+
const content = readFileSync(syncfilesPath, "utf-8");
|
|
144
|
+
const patterns = content
|
|
145
|
+
.split("\n")
|
|
146
|
+
.map((l) => l.trim())
|
|
147
|
+
.filter((l) => l.length > 0 && !l.startsWith("#"));
|
|
148
|
+
const contentHash = createHash("md5").update(content).digest("hex");
|
|
149
|
+
return { patterns, contentHash };
|
|
150
|
+
}
|
|
151
|
+
export function buildWhitelistMatcher(patterns) {
|
|
152
|
+
if (patterns.length === 0)
|
|
153
|
+
return () => false;
|
|
154
|
+
const ig = ignore().add(patterns);
|
|
155
|
+
return (filePath) => ig.ignores(filePath.replace(/\\/g, "/"));
|
|
156
|
+
}
|
|
157
|
+
export function computeSyncfilesChanges(prevPatterns, currPatterns) {
|
|
158
|
+
const prevSet = new Set(prevPatterns);
|
|
159
|
+
const currSet = new Set(currPatterns);
|
|
160
|
+
return {
|
|
161
|
+
added: currPatterns.filter((p) => !prevSet.has(p)),
|
|
162
|
+
removed: prevPatterns.filter((p) => !currSet.has(p)),
|
|
163
|
+
};
|
|
164
|
+
}
|
|
165
|
+
// ─── Hashing ──────────────────────────────────────────────────────────────────
|
|
166
|
+
export function md5Hex(content) {
|
|
167
|
+
return createHash("md5").update(content).digest("hex");
|
|
168
|
+
}
|
|
169
|
+
function hashFile(absPath, relPath, cache) {
|
|
170
|
+
const stat = statSync(absPath);
|
|
171
|
+
const cached = cache[relPath];
|
|
172
|
+
if (cached && cached.mtime === stat.mtimeMs && cached.size === stat.size) {
|
|
173
|
+
return cached;
|
|
174
|
+
}
|
|
175
|
+
const content = readFileSync(absPath);
|
|
176
|
+
return { hash: md5Hex(content), mtime: stat.mtimeMs, size: stat.size };
|
|
177
|
+
}
|
|
178
|
+
// ─── File Walking ─────────────────────────────────────────────────────────────
|
|
179
|
+
function walkDir(dir, base, results) {
|
|
180
|
+
for (const entry of readdirSync(dir, { withFileTypes: true })) {
|
|
181
|
+
const relPath = base ? `${base}/${entry.name}` : entry.name;
|
|
182
|
+
if (isSyncIgnored(relPath))
|
|
183
|
+
continue;
|
|
184
|
+
if (entry.isDirectory()) {
|
|
185
|
+
walkDir(join(dir, entry.name), relPath, results);
|
|
186
|
+
}
|
|
187
|
+
else if (entry.isFile()) {
|
|
188
|
+
results.push(relPath);
|
|
189
|
+
}
|
|
190
|
+
}
|
|
191
|
+
}
|
|
192
|
+
function walkLocalFiles(vaultDir, cache, isWhitelisted) {
|
|
193
|
+
const allRelPaths = [];
|
|
194
|
+
walkDir(vaultDir, "", allRelPaths);
|
|
195
|
+
const files = [];
|
|
196
|
+
for (const relPath of allRelPaths) {
|
|
197
|
+
if (!isWhitelisted(relPath))
|
|
198
|
+
continue;
|
|
199
|
+
const absPath = join(vaultDir, relPath);
|
|
200
|
+
try {
|
|
201
|
+
const entry = hashFile(absPath, relPath, cache);
|
|
202
|
+
files.push({ path: relPath, hash: entry.hash, mtime: entry.mtime });
|
|
203
|
+
// Update cache in-place
|
|
204
|
+
cache[relPath] = entry;
|
|
205
|
+
}
|
|
206
|
+
catch {
|
|
207
|
+
// File may have been removed between walk and hash; skip it
|
|
208
|
+
}
|
|
209
|
+
}
|
|
210
|
+
return files;
|
|
211
|
+
}
|
|
212
|
+
// ─── HTTP ─────────────────────────────────────────────────────────────────────
|
|
213
|
+
function fileUrl(baseUrl, vaultSlug, filePath) {
|
|
214
|
+
const encoded = filePath
|
|
215
|
+
.split("/")
|
|
216
|
+
.map((s) => encodeURIComponent(s))
|
|
217
|
+
.join("/");
|
|
218
|
+
return `${baseUrl}/api/v1/vaults/${vaultSlug}/files/${encoded}`;
|
|
219
|
+
}
|
|
220
|
+
async function webdriveGet(baseUrl, vaultSlug, filePath, token) {
|
|
221
|
+
const res = await fetch(fileUrl(baseUrl, vaultSlug, filePath), {
|
|
222
|
+
headers: { Authorization: `Bearer ${token}` },
|
|
223
|
+
});
|
|
224
|
+
if (!res.ok) {
|
|
225
|
+
throw new Error(`HTTP ${res.status}: ${await res.text()}`);
|
|
226
|
+
}
|
|
227
|
+
return Buffer.from(await res.arrayBuffer());
|
|
228
|
+
}
|
|
229
|
+
async function webdrivePut(baseUrl, vaultSlug, filePath, content, hash, token) {
|
|
230
|
+
const res = await fetch(fileUrl(baseUrl, vaultSlug, filePath), {
|
|
231
|
+
method: "PUT",
|
|
232
|
+
headers: {
|
|
233
|
+
Authorization: `Bearer ${token}`,
|
|
234
|
+
"Content-Type": "application/octet-stream",
|
|
235
|
+
"X-Content-MD5": hash,
|
|
236
|
+
},
|
|
237
|
+
body: new Uint8Array(content),
|
|
238
|
+
});
|
|
239
|
+
if (!res.ok) {
|
|
240
|
+
throw new Error(`HTTP ${res.status}: ${await res.text()}`);
|
|
241
|
+
}
|
|
242
|
+
const cursor = res.headers.get("x-cursor");
|
|
243
|
+
return cursor ? Number(cursor) : null;
|
|
244
|
+
}
|
|
245
|
+
async function webdriveDelete(baseUrl, vaultSlug, filePath, token) {
|
|
246
|
+
const res = await fetch(fileUrl(baseUrl, vaultSlug, filePath), {
|
|
247
|
+
method: "DELETE",
|
|
248
|
+
headers: { Authorization: `Bearer ${token}` },
|
|
249
|
+
});
|
|
250
|
+
if (!res.ok && res.status !== 404) {
|
|
251
|
+
throw new Error(`HTTP ${res.status}: ${await res.text()}`);
|
|
252
|
+
}
|
|
253
|
+
const cursor = res.headers.get("x-cursor");
|
|
254
|
+
return cursor ? Number(cursor) : null;
|
|
255
|
+
}
|
|
256
|
+
async function webdriveSync(baseUrl, vaultSlug, body, token) {
|
|
257
|
+
const url = `${baseUrl}/api/v1/vaults/${vaultSlug}/sync`;
|
|
258
|
+
const res = await fetch(url, {
|
|
259
|
+
method: "POST",
|
|
260
|
+
headers: {
|
|
261
|
+
Authorization: `Bearer ${token}`,
|
|
262
|
+
"Content-Type": "application/json",
|
|
263
|
+
},
|
|
264
|
+
body: JSON.stringify(body),
|
|
265
|
+
});
|
|
266
|
+
if (!res.ok) {
|
|
267
|
+
if (res.status === 409) {
|
|
268
|
+
const err = new GobiError("Sync cursor invalid", "SYNC_CURSOR_INVALID");
|
|
269
|
+
err.status = 409;
|
|
270
|
+
throw err;
|
|
271
|
+
}
|
|
272
|
+
throw new Error(`Sync request failed: HTTP ${res.status}: ${await res.text()}`);
|
|
273
|
+
}
|
|
274
|
+
return (await res.json());
|
|
275
|
+
}
|
|
276
|
+
// ─── Conflict Resolution ──────────────────────────────────────────────────────
|
|
277
|
+
function formatDate(ms) {
|
|
278
|
+
return new Date(ms).toLocaleString();
|
|
279
|
+
}
|
|
280
|
+
async function resolveConflict(filePath, strategy, conflict, jsonMode) {
|
|
281
|
+
if (strategy === "server")
|
|
282
|
+
return "server";
|
|
283
|
+
if (strategy === "client")
|
|
284
|
+
return "client";
|
|
285
|
+
if (strategy === "skip")
|
|
286
|
+
return "skip";
|
|
287
|
+
// strategy === "ask"
|
|
288
|
+
if (jsonMode) {
|
|
289
|
+
// Can't show interactive prompt in JSON mode
|
|
290
|
+
return "skip";
|
|
291
|
+
}
|
|
292
|
+
const serverTime = conflict ? formatDate(conflict.serverMtime) : "unknown";
|
|
293
|
+
const clientTime = conflict ? formatDate(conflict.clientMtime) : "unknown";
|
|
294
|
+
const { choice } = await inquirer.prompt([
|
|
295
|
+
{
|
|
296
|
+
type: "list",
|
|
297
|
+
name: "choice",
|
|
298
|
+
message: `Conflict: ${filePath}\n Server modified: ${serverTime}\n Local modified: ${clientTime}`,
|
|
299
|
+
choices: [
|
|
300
|
+
{ name: "Keep server version (overwrite local)", value: "server" },
|
|
301
|
+
{ name: "Keep local version (skip download)", value: "client" },
|
|
302
|
+
{ name: "Skip (resolve later)", value: "skip" },
|
|
303
|
+
],
|
|
304
|
+
},
|
|
305
|
+
]);
|
|
306
|
+
return choice;
|
|
307
|
+
}
|
|
308
|
+
// ─── Path Filtering ──────────────────────────────────────────────────────────
|
|
309
|
+
/**
|
|
310
|
+
* Returns true if filePath is at or under any of the specified paths.
|
|
311
|
+
* An empty paths array matches everything.
|
|
312
|
+
*/
|
|
313
|
+
function matchesPaths(filePath, paths) {
|
|
314
|
+
if (paths.length === 0)
|
|
315
|
+
return true;
|
|
316
|
+
const normalized = filePath.replace(/\\/g, "/");
|
|
317
|
+
for (const p of paths) {
|
|
318
|
+
const np = p.replace(/\\/g, "/").replace(/\/+$/, "");
|
|
319
|
+
if (normalized === np || normalized.startsWith(np + "/"))
|
|
320
|
+
return true;
|
|
321
|
+
}
|
|
322
|
+
return false;
|
|
323
|
+
}
|
|
324
|
+
// ─── Core Sync ────────────────────────────────────────────────────────────────
|
|
325
|
+
async function performSync(baseUrl, vaultSlug, state, syncfilesChanges, localFiles, opts, token) {
|
|
326
|
+
const body = {
|
|
327
|
+
cursor: state.cursor,
|
|
328
|
+
syncfilesChanges,
|
|
329
|
+
clientFiles: localFiles,
|
|
330
|
+
uploadOnly: opts.uploadOnly,
|
|
331
|
+
downloadOnly: opts.downloadOnly,
|
|
332
|
+
};
|
|
333
|
+
return await webdriveSync(baseUrl, vaultSlug, body, token);
|
|
334
|
+
}
|
|
335
|
+
export async function runSync(opts) {
|
|
336
|
+
const { vaultSlug, dir: vaultDir, jsonMode } = opts;
|
|
337
|
+
const baseUrl = opts.webdriveUrl ?? WEBDRIVE_BASE_URL;
|
|
338
|
+
const gobiDir = join(vaultDir, ".gobi");
|
|
339
|
+
mkdirSync(gobiDir, { recursive: true });
|
|
340
|
+
const state = loadSyncState(gobiDir);
|
|
341
|
+
// --full: treat this run as a first-time sync (re-check every file against the server)
|
|
342
|
+
if (opts.full) {
|
|
343
|
+
state.cursor = null;
|
|
344
|
+
state.hashCache = {};
|
|
345
|
+
if (!jsonMode)
|
|
346
|
+
console.log("Full sync: ignoring cursor and hash cache.");
|
|
347
|
+
}
|
|
348
|
+
const token = opts.authToken ?? (await getValidToken());
|
|
349
|
+
// Read syncfiles whitelist
|
|
350
|
+
const { patterns: currPatterns, contentHash: currSyncfilesHash } = readSyncfiles(gobiDir);
|
|
351
|
+
if (currPatterns.length === 0 && !jsonMode) {
|
|
352
|
+
console.warn("Warning: No patterns found in .gobi/syncfiles. Nothing will be synced.\n" +
|
|
353
|
+
"Add gitignore-style patterns to .gobi/syncfiles to select files for sync.");
|
|
354
|
+
}
|
|
355
|
+
const isWhitelisted = buildWhitelistMatcher(currPatterns);
|
|
356
|
+
const syncfilesChanges = computeSyncfilesChanges(state.patterns, currPatterns);
|
|
357
|
+
// Walk local files (only whitelisted, non-ignored)
|
|
358
|
+
if (!jsonMode)
|
|
359
|
+
process.stdout.write("Scanning local files...");
|
|
360
|
+
const localFiles = walkLocalFiles(vaultDir, state.hashCache, isWhitelisted);
|
|
361
|
+
if (!jsonMode)
|
|
362
|
+
console.log(` ${localFiles.length} file(s) found.`);
|
|
363
|
+
const localPathSet = new Set(localFiles.map((f) => f.path));
|
|
364
|
+
// Detect and send offline deletions
|
|
365
|
+
let maxMutationCursor = null;
|
|
366
|
+
for (const cachedPath of Object.keys(state.hashCache)) {
|
|
367
|
+
if (!localPathSet.has(cachedPath) && !existsSync(join(vaultDir, cachedPath))) {
|
|
368
|
+
// File was in our cache but no longer on disk — deleted offline.
|
|
369
|
+
// In download-only mode, skip the DELETE: the server's client_deleted_paths
|
|
370
|
+
// mechanism will re-download the file instead (server-side download_only path).
|
|
371
|
+
if (opts.downloadOnly)
|
|
372
|
+
continue;
|
|
373
|
+
if (!opts.jsonMode)
|
|
374
|
+
console.log(` Deleting remote (offline deletion): ${cachedPath}`);
|
|
375
|
+
if (!opts.dryRun) {
|
|
376
|
+
try {
|
|
377
|
+
const cursor = await webdriveDelete(baseUrl, vaultSlug, cachedPath, token);
|
|
378
|
+
if (cursor !== null && (maxMutationCursor === null || cursor > maxMutationCursor)) {
|
|
379
|
+
maxMutationCursor = cursor;
|
|
380
|
+
}
|
|
381
|
+
}
|
|
382
|
+
catch (err) {
|
|
383
|
+
if (!jsonMode)
|
|
384
|
+
console.error(` Error deleting remote ${cachedPath}: ${err.message}`);
|
|
385
|
+
}
|
|
386
|
+
delete state.hashCache[cachedPath];
|
|
387
|
+
}
|
|
388
|
+
}
|
|
389
|
+
}
|
|
390
|
+
// In dry-run mode, include cached-but-locally-deleted files in clientFiles so the
|
|
391
|
+
// server does not propagate the deletion (the whole point of dry-run is no changes).
|
|
392
|
+
const clientFilesForSync = opts.dryRun
|
|
393
|
+
? [
|
|
394
|
+
...localFiles,
|
|
395
|
+
...Object.entries(state.hashCache)
|
|
396
|
+
.filter(([p]) => !localPathSet.has(p) && !existsSync(join(vaultDir, p)))
|
|
397
|
+
.map(([path, entry]) => ({ path, hash: entry.hash, mtime: entry.mtime })),
|
|
398
|
+
]
|
|
399
|
+
: localFiles;
|
|
400
|
+
// POST sync request
|
|
401
|
+
if (!jsonMode)
|
|
402
|
+
process.stdout.write("Syncing with server...");
|
|
403
|
+
let syncResp;
|
|
404
|
+
try {
|
|
405
|
+
syncResp = await performSync(baseUrl, vaultSlug, state, syncfilesChanges, clientFilesForSync, opts, token);
|
|
406
|
+
}
|
|
407
|
+
catch (err) {
|
|
408
|
+
if (err instanceof GobiError && err.status === 409) {
|
|
409
|
+
// Cursor is stale (server wiped the vault or syncfiles are missing).
|
|
410
|
+
// Reset all state including patterns so the retry re-registers current patterns
|
|
411
|
+
// as "added" — this allows vault resurrection after the server deleted an empty vault.
|
|
412
|
+
if (!jsonMode)
|
|
413
|
+
console.log("Sync cursor stale, resetting state and retrying...");
|
|
414
|
+
state.cursor = null;
|
|
415
|
+
state.hashCache = {};
|
|
416
|
+
state.patterns = []; // reset so retry sends currPatterns as syncfilesChanges.added
|
|
417
|
+
const retryChanges = computeSyncfilesChanges([], currPatterns);
|
|
418
|
+
syncResp = await performSync(baseUrl, vaultSlug, state, retryChanges, clientFilesForSync, opts, token);
|
|
419
|
+
}
|
|
420
|
+
else {
|
|
421
|
+
throw err;
|
|
422
|
+
}
|
|
423
|
+
}
|
|
424
|
+
if (!jsonMode)
|
|
425
|
+
console.log(` ${syncResp.files.length} action(s).`);
|
|
426
|
+
// Process actions
|
|
427
|
+
let uploaded = 0, downloaded = 0, deletedLocally = 0, conflicts = 0, skipped = 0, errors = 0;
|
|
428
|
+
const errorDetails = [];
|
|
429
|
+
const conflictWarnings = [];
|
|
430
|
+
const filterPaths = opts.paths ?? [];
|
|
431
|
+
for (const entry of syncResp.files) {
|
|
432
|
+
// --path: skip actions for files outside the specified scope
|
|
433
|
+
if (!matchesPaths(entry.path, filterPaths))
|
|
434
|
+
continue;
|
|
435
|
+
try {
|
|
436
|
+
const absPath = join(vaultDir, entry.path);
|
|
437
|
+
if (entry.action === "upload") {
|
|
438
|
+
if (opts.downloadOnly)
|
|
439
|
+
continue;
|
|
440
|
+
if (opts.dryRun) {
|
|
441
|
+
if (!jsonMode)
|
|
442
|
+
console.log(` [dry-run] would upload: ${entry.path}`);
|
|
443
|
+
uploaded++;
|
|
444
|
+
continue;
|
|
445
|
+
}
|
|
446
|
+
const content = readFileSync(absPath);
|
|
447
|
+
const hash = md5Hex(content);
|
|
448
|
+
const cursor = await webdrivePut(baseUrl, vaultSlug, entry.path, content, hash, token);
|
|
449
|
+
if (cursor !== null && (maxMutationCursor === null || cursor > maxMutationCursor)) {
|
|
450
|
+
maxMutationCursor = cursor;
|
|
451
|
+
}
|
|
452
|
+
state.hashCache[entry.path] = {
|
|
453
|
+
hash,
|
|
454
|
+
mtime: statSync(absPath).mtimeMs,
|
|
455
|
+
size: content.length,
|
|
456
|
+
};
|
|
457
|
+
if (!jsonMode)
|
|
458
|
+
console.log(` Uploaded: ${entry.path}`);
|
|
459
|
+
uploaded++;
|
|
460
|
+
}
|
|
461
|
+
else if (entry.action === "download") {
|
|
462
|
+
if (opts.uploadOnly)
|
|
463
|
+
continue;
|
|
464
|
+
if (opts.dryRun) {
|
|
465
|
+
if (!jsonMode)
|
|
466
|
+
console.log(` [dry-run] would download: ${entry.path}`);
|
|
467
|
+
downloaded++;
|
|
468
|
+
continue;
|
|
469
|
+
}
|
|
470
|
+
const content = await webdriveGet(baseUrl, vaultSlug, entry.path, token);
|
|
471
|
+
mkdirSync(dirname(absPath), { recursive: true });
|
|
472
|
+
await writeFile(absPath, content);
|
|
473
|
+
const hash = md5Hex(content);
|
|
474
|
+
state.hashCache[entry.path] = {
|
|
475
|
+
hash,
|
|
476
|
+
mtime: statSync(absPath).mtimeMs,
|
|
477
|
+
size: content.length,
|
|
478
|
+
};
|
|
479
|
+
if (!jsonMode)
|
|
480
|
+
console.log(` Downloaded: ${entry.path}`);
|
|
481
|
+
downloaded++;
|
|
482
|
+
}
|
|
483
|
+
else if (entry.action === "delete_local") {
|
|
484
|
+
if (opts.uploadOnly)
|
|
485
|
+
continue;
|
|
486
|
+
if (!existsSync(absPath))
|
|
487
|
+
continue;
|
|
488
|
+
if (opts.dryRun) {
|
|
489
|
+
if (!jsonMode)
|
|
490
|
+
console.log(` [dry-run] would delete local: ${entry.path}`);
|
|
491
|
+
deletedLocally++;
|
|
492
|
+
continue;
|
|
493
|
+
}
|
|
494
|
+
await trash(absPath);
|
|
495
|
+
delete state.hashCache[entry.path];
|
|
496
|
+
if (!jsonMode)
|
|
497
|
+
console.log(` Deleted local (moved to trash): ${entry.path}`);
|
|
498
|
+
deletedLocally++;
|
|
499
|
+
}
|
|
500
|
+
else if (entry.action === "conflict") {
|
|
501
|
+
conflicts++;
|
|
502
|
+
const choice = await resolveConflict(entry.path, opts.conflict, entry.conflict, jsonMode);
|
|
503
|
+
if (choice === "server") {
|
|
504
|
+
if (opts.dryRun) {
|
|
505
|
+
if (!jsonMode)
|
|
506
|
+
console.log(` [dry-run] would download (conflict→server): ${entry.path}`);
|
|
507
|
+
}
|
|
508
|
+
else {
|
|
509
|
+
const content = await webdriveGet(baseUrl, vaultSlug, entry.path, token);
|
|
510
|
+
mkdirSync(dirname(absPath), { recursive: true });
|
|
511
|
+
await writeFile(absPath, content);
|
|
512
|
+
const hash = md5Hex(content);
|
|
513
|
+
state.hashCache[entry.path] = {
|
|
514
|
+
hash,
|
|
515
|
+
mtime: statSync(absPath).mtimeMs,
|
|
516
|
+
size: content.length,
|
|
517
|
+
};
|
|
518
|
+
if (!jsonMode)
|
|
519
|
+
console.log(` Conflict resolved (server): ${entry.path}`);
|
|
520
|
+
}
|
|
521
|
+
downloaded++;
|
|
522
|
+
}
|
|
523
|
+
else if (choice === "client") {
|
|
524
|
+
if (!jsonMode)
|
|
525
|
+
console.log(` Conflict resolved (local kept): ${entry.path}`);
|
|
526
|
+
}
|
|
527
|
+
else {
|
|
528
|
+
skipped++;
|
|
529
|
+
const warning = `Conflict skipped (resolve later): ${entry.path}`;
|
|
530
|
+
conflictWarnings.push(warning);
|
|
531
|
+
if (!jsonMode)
|
|
532
|
+
console.log(` ${warning}`);
|
|
533
|
+
}
|
|
534
|
+
}
|
|
535
|
+
}
|
|
536
|
+
catch (err) {
|
|
537
|
+
errors++;
|
|
538
|
+
const msg = err.message;
|
|
539
|
+
errorDetails.push({ path: entry.path, action: entry.action, error: msg });
|
|
540
|
+
if (!jsonMode)
|
|
541
|
+
console.error(` Error [${entry.action}] ${entry.path}: ${msg}`);
|
|
542
|
+
}
|
|
543
|
+
}
|
|
544
|
+
// Persist state (always, even on partial failures)
|
|
545
|
+
const finalCursor = Math.max(syncResp.cursor, maxMutationCursor !== null ? maxMutationCursor : 0);
|
|
546
|
+
state.cursor = finalCursor;
|
|
547
|
+
state.syncfilesHash = currSyncfilesHash;
|
|
548
|
+
// If the server returned an empty syncfilesHash the vault was deleted server-side
|
|
549
|
+
// (empty patterns path). Reset patterns so the next sync re-registers them as "added",
|
|
550
|
+
// which lets the 409 retry resurrect the vault.
|
|
551
|
+
state.patterns = syncResp.syncfilesHash === "" ? [] : currPatterns;
|
|
552
|
+
saveSyncState(gobiDir, state);
|
|
553
|
+
// Output summary
|
|
554
|
+
const result = {
|
|
555
|
+
uploaded,
|
|
556
|
+
downloaded,
|
|
557
|
+
deletedLocally,
|
|
558
|
+
conflicts,
|
|
559
|
+
skipped,
|
|
560
|
+
errors,
|
|
561
|
+
cursor: finalCursor,
|
|
562
|
+
errorDetails,
|
|
563
|
+
};
|
|
564
|
+
if (jsonMode) {
|
|
565
|
+
if (conflictWarnings.length > 0 && opts.conflict === "ask") {
|
|
566
|
+
result.conflictWarning =
|
|
567
|
+
"Interactive conflict resolution skipped in JSON mode. Conflicts were skipped.";
|
|
568
|
+
}
|
|
569
|
+
jsonOut(result);
|
|
570
|
+
}
|
|
571
|
+
else {
|
|
572
|
+
const conflictSuffix = skipped > 0 ? ` (${skipped} skipped)` : "";
|
|
573
|
+
console.log("\nSync complete.");
|
|
574
|
+
console.log(` Uploaded: ${uploaded}`);
|
|
575
|
+
console.log(` Downloaded: ${downloaded}`);
|
|
576
|
+
console.log(` Deleted local: ${deletedLocally}`);
|
|
577
|
+
console.log(` Conflicts: ${conflicts}${conflictSuffix}`);
|
|
578
|
+
console.log(` Errors: ${errors}`);
|
|
579
|
+
if (errors > 0) {
|
|
580
|
+
process.exitCode = 1;
|
|
581
|
+
}
|
|
582
|
+
}
|
|
583
|
+
}
|
|
584
|
+
// ─── Commander Registration ───────────────────────────────────────────────────
|
|
585
|
+
export function registerSyncCommand(program) {
|
|
586
|
+
program
|
|
587
|
+
.command("sync")
|
|
588
|
+
.description("Sync local vault files with Gobi Webdrive.")
|
|
589
|
+
.option("--upload-only", "Only upload local changes to server")
|
|
590
|
+
.option("--download-only", "Only download server changes to local")
|
|
591
|
+
.option("--conflict <strategy>", "Conflict resolution strategy: ask|server|client|skip", "ask")
|
|
592
|
+
.option("--dir <path>", "Local vault directory (default: current directory)")
|
|
593
|
+
.option("--dry-run", "Preview changes without making them")
|
|
594
|
+
.option("--full", "Full sync: ignore cursor and hash cache, re-check every file")
|
|
595
|
+
.option("--path <path>", "Restrict sync to a specific file or folder (repeatable)", (v, prev) => prev.concat(v), [])
|
|
596
|
+
.action(async function (opts) {
|
|
597
|
+
if (opts.uploadOnly && opts.downloadOnly) {
|
|
598
|
+
throw new GobiError("--upload-only and --download-only are mutually exclusive.", "INVALID_OPTION");
|
|
599
|
+
}
|
|
600
|
+
const validStrategies = ["ask", "server", "client", "skip"];
|
|
601
|
+
if (!validStrategies.includes(opts.conflict)) {
|
|
602
|
+
throw new GobiError(`Invalid --conflict value "${opts.conflict}". Use: ask|server|client|skip`, "INVALID_OPTION");
|
|
603
|
+
}
|
|
604
|
+
const vaultSlug = getVaultSlug();
|
|
605
|
+
const dir = opts.dir ? pathResolve(opts.dir) : process.cwd();
|
|
606
|
+
await runSync({
|
|
607
|
+
vaultSlug,
|
|
608
|
+
dir,
|
|
609
|
+
uploadOnly: !!opts.uploadOnly,
|
|
610
|
+
downloadOnly: !!opts.downloadOnly,
|
|
611
|
+
conflict: opts.conflict,
|
|
612
|
+
dryRun: !!opts.dryRun,
|
|
613
|
+
full: !!opts.full,
|
|
614
|
+
paths: opts.path ?? [],
|
|
615
|
+
jsonMode: isJsonMode(this),
|
|
616
|
+
});
|
|
617
|
+
});
|
|
618
|
+
}
|
package/dist/main.js
CHANGED
|
@@ -8,6 +8,7 @@ import { registerSpaceCommand } from "./commands/space.js";
|
|
|
8
8
|
import { registerBrainCommand } from "./commands/brain.js";
|
|
9
9
|
import { registerSessionsCommand } from "./commands/sessions.js";
|
|
10
10
|
import { registerSenseCommand } from "./commands/sense.js";
|
|
11
|
+
import { registerSyncCommand } from "./commands/sync.js";
|
|
11
12
|
const require = createRequire(import.meta.url);
|
|
12
13
|
const { version } = require("../package.json");
|
|
13
14
|
const SKIP_BANNER_COMMANDS = new Set(["auth", "init"]);
|
|
@@ -32,6 +33,7 @@ export async function cli() {
|
|
|
32
33
|
registerBrainCommand(program);
|
|
33
34
|
registerSessionsCommand(program);
|
|
34
35
|
registerSenseCommand(program);
|
|
36
|
+
registerSyncCommand(program);
|
|
35
37
|
// Propagate helpWidth to all subcommands
|
|
36
38
|
const helpWidth = process.stdout.columns || 200;
|
|
37
39
|
for (const cmd of program.commands) {
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@gobi-ai/cli",
|
|
3
|
-
"version": "0.6.
|
|
3
|
+
"version": "0.6.3",
|
|
4
4
|
"description": "CLI client for the Gobi collaborative knowledge platform",
|
|
5
5
|
"license": "MIT",
|
|
6
6
|
"type": "module",
|
|
@@ -37,17 +37,21 @@
|
|
|
37
37
|
"build": "npm run clean && tsc && chmod +x dist/index.js",
|
|
38
38
|
"dev": "tsx src/index.ts",
|
|
39
39
|
"start": "node dist/index.js",
|
|
40
|
-
"test": "node --test dist/*.test.js",
|
|
40
|
+
"test": "node --test dist/*.test.js dist/**/*.test.js",
|
|
41
41
|
"generate-skill-docs": "npm run build && npx tsx skills/gobi/scripts/generate-docs.ts",
|
|
42
42
|
"prepare": "npm run build",
|
|
43
43
|
"prepublishOnly": "npm run build"
|
|
44
44
|
},
|
|
45
45
|
"dependencies": {
|
|
46
|
+
"better-sqlite3": "^12.8.0",
|
|
46
47
|
"commander": "^12.1.0",
|
|
48
|
+
"ignore": "^7.0.5",
|
|
47
49
|
"inquirer": "^12.3.0",
|
|
48
|
-
"js-yaml": "^4.1.0"
|
|
50
|
+
"js-yaml": "^4.1.0",
|
|
51
|
+
"trash": "^10.1.1"
|
|
49
52
|
},
|
|
50
53
|
"devDependencies": {
|
|
54
|
+
"@types/better-sqlite3": "^7.6.13",
|
|
51
55
|
"@types/inquirer": "^9.0.7",
|
|
52
56
|
"@types/js-yaml": "^4.0.9",
|
|
53
57
|
"@types/node": "^22.0.0",
|
package/skills/gobi/SKILL.md
CHANGED
|
@@ -10,12 +10,12 @@ description: >-
|
|
|
10
10
|
allowed-tools: Bash(gobi:*)
|
|
11
11
|
metadata:
|
|
12
12
|
author: gobi-ai
|
|
13
|
-
version: "0.6.
|
|
13
|
+
version: "0.6.3"
|
|
14
14
|
---
|
|
15
15
|
|
|
16
16
|
# gobi-cli
|
|
17
17
|
|
|
18
|
-
A CLI client for the Gobi collaborative knowledge platform (v0.6.
|
|
18
|
+
A CLI client for the Gobi collaborative knowledge platform (v0.6.3).
|
|
19
19
|
|
|
20
20
|
## Prerequisites
|
|
21
21
|
|
|
@@ -167,6 +167,10 @@ Note: `--space-slug` is not available on other `brain` subcommands or on `sessio
|
|
|
167
167
|
- `gobi session get` — Get a session and its messages (paginated).
|
|
168
168
|
- `gobi session list` — List all sessions you are part of, sorted by most recent activity.
|
|
169
169
|
- `gobi session reply` — Send a human reply to a session you are a member of.
|
|
170
|
+
- `gobi sense` — Sense commands (activities, transcriptions).
|
|
171
|
+
- `gobi sense activities` — Fetch activity records within a time range.
|
|
172
|
+
- `gobi sense transcriptions` — Fetch transcription records within a time range.
|
|
173
|
+
- `gobi sync` — Sync local vault files with Gobi Webdrive.
|
|
170
174
|
|
|
171
175
|
## Reference Documentation
|
|
172
176
|
|
|
@@ -175,6 +179,8 @@ Note: `--space-slug` is not available on other `brain` subcommands or on `sessio
|
|
|
175
179
|
- [gobi space](references/space.md)
|
|
176
180
|
- [gobi brain](references/brain.md)
|
|
177
181
|
- [gobi session](references/session.md)
|
|
182
|
+
- [gobi sense](references/sense.md)
|
|
183
|
+
- [gobi sync](references/sync.md)
|
|
178
184
|
|
|
179
185
|
## Discovering Options
|
|
180
186
|
|
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
# gobi sense
|
|
2
|
+
|
|
3
|
+
```
|
|
4
|
+
Usage: gobi sense [options] [command]
|
|
5
|
+
|
|
6
|
+
Sense commands (activities, transcriptions).
|
|
7
|
+
|
|
8
|
+
Options:
|
|
9
|
+
-h, --help display help for command
|
|
10
|
+
|
|
11
|
+
Commands:
|
|
12
|
+
activities [options] Fetch activity records within a time range.
|
|
13
|
+
transcriptions [options] Fetch transcription records within a time range.
|
|
14
|
+
help [command] display help for command
|
|
15
|
+
```
|
|
16
|
+
|
|
17
|
+
## activities
|
|
18
|
+
|
|
19
|
+
```
|
|
20
|
+
Usage: gobi sense activities [options]
|
|
21
|
+
|
|
22
|
+
Fetch activity records within a time range.
|
|
23
|
+
|
|
24
|
+
Options:
|
|
25
|
+
--start-time <iso> Start of time range (ISO 8601 UTC, e.g. 2026-03-20T00:00:00Z)
|
|
26
|
+
--end-time <iso> End of time range (ISO 8601 UTC, e.g. 2026-03-20T23:59:59Z)
|
|
27
|
+
-h, --help display help for command
|
|
28
|
+
```
|
|
29
|
+
|
|
30
|
+
## transcriptions
|
|
31
|
+
|
|
32
|
+
```
|
|
33
|
+
Usage: gobi sense transcriptions [options]
|
|
34
|
+
|
|
35
|
+
Fetch transcription records within a time range.
|
|
36
|
+
|
|
37
|
+
Options:
|
|
38
|
+
--start-time <iso> Start of time range (ISO 8601 UTC, e.g. 2026-03-20T00:00:00Z)
|
|
39
|
+
--end-time <iso> End of time range (ISO 8601 UTC, e.g. 2026-03-20T23:59:59Z)
|
|
40
|
+
-h, --help display help for command
|
|
41
|
+
```
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
# gobi sync
|
|
2
|
+
|
|
3
|
+
```
|
|
4
|
+
Usage: gobi sync [options]
|
|
5
|
+
|
|
6
|
+
Sync local vault files with Gobi Webdrive.
|
|
7
|
+
|
|
8
|
+
Options:
|
|
9
|
+
--upload-only Only upload local changes to server
|
|
10
|
+
--download-only Only download server changes to local
|
|
11
|
+
--conflict <strategy> Conflict resolution strategy: ask|server|client|skip (default: "ask")
|
|
12
|
+
--dir <path> Local vault directory (default: current directory)
|
|
13
|
+
--dry-run Preview changes without making them
|
|
14
|
+
--full Full sync: ignore cursor and hash cache, re-check every file
|
|
15
|
+
--path <path> Restrict sync to a specific file or folder (repeatable) (default: [])
|
|
16
|
+
-h, --help display help for command
|
|
17
|
+
```
|