@memrosetta/cli 0.5.1 → 0.5.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/chunk-47SU2YUJ.js +64 -0
- package/dist/chunk-C4ANKSCI.js +151 -0
- package/dist/chunk-CEHRM6IW.js +151 -0
- package/dist/chunk-G2W4YK2T.js +56 -0
- package/dist/chunk-GGXC7TAJ.js +139 -0
- package/dist/chunk-GRNZVSAF.js +56 -0
- package/dist/chunk-GZINXXM4.js +139 -0
- package/dist/chunk-RZFCVYTK.js +71 -0
- package/dist/chunk-US6CEDMU.js +66 -0
- package/dist/chunk-VMGX5FCY.js +64 -0
- package/dist/chunk-WYHEAKPC.js +71 -0
- package/dist/clear-32Y3U2WR.js +39 -0
- package/dist/clear-AFEJPCDA.js +39 -0
- package/dist/compress-CL5D4VVJ.js +33 -0
- package/dist/compress-UUEO7WCU.js +33 -0
- package/dist/count-U2ML5ZON.js +24 -0
- package/dist/count-VVOGYSM7.js +24 -0
- package/dist/duplicates-CEJ7WSGW.js +149 -0
- package/dist/duplicates-IBUS7CJS.js +149 -0
- package/dist/enforce-T7AS4PVD.js +381 -0
- package/dist/enforce-TC5SDPEZ.js +381 -0
- package/dist/feedback-3PJTTEOD.js +51 -0
- package/dist/feedback-IB7BHIRP.js +51 -0
- package/dist/get-TQ2U7HCD.js +30 -0
- package/dist/get-WPZIHQKW.js +30 -0
- package/dist/hooks/on-prompt.js +3 -3
- package/dist/hooks/on-stop.js +3 -3
- package/dist/index.js +30 -20
- package/dist/ingest-37UXPVT5.js +97 -0
- package/dist/ingest-TPQRH34A.js +97 -0
- package/dist/init-6YQL3RCQ.js +210 -0
- package/dist/init-LHXRCCLX.js +210 -0
- package/dist/invalidate-ER2TFFWK.js +40 -0
- package/dist/invalidate-PVHUGAJ6.js +40 -0
- package/dist/maintain-NICAXFK6.js +37 -0
- package/dist/maintain-Q553GBSF.js +37 -0
- package/dist/migrate-CZL3YNQK.js +255 -0
- package/dist/migrate-FI26FSBP.js +255 -0
- package/dist/relate-5TN2WEG3.js +57 -0
- package/dist/relate-KLBMYWB3.js +57 -0
- package/dist/reset-IPOAKTJM.js +132 -0
- package/dist/search-AYZBKRXF.js +48 -0
- package/dist/search-JQ3MLRKS.js +48 -0
- package/dist/status-JF2V7ZBX.js +184 -0
- package/dist/status-UV66PWUD.js +184 -0
- package/dist/store-AAJCT3PX.js +101 -0
- package/dist/store-OVDS57U5.js +101 -0
- package/dist/sync-56KJTKE7.js +542 -0
- package/dist/sync-BCKBYRXY.js +542 -0
- package/dist/working-memory-CJARSGEK.js +53 -0
- package/dist/working-memory-Z3RUGSTQ.js +53 -0
- package/package.json +3 -3
|
@@ -0,0 +1,255 @@
|
|
|
1
|
+
import {
|
|
2
|
+
hasFlag,
|
|
3
|
+
optionalOption
|
|
4
|
+
} from "./chunk-US6CEDMU.js";
|
|
5
|
+
import {
|
|
6
|
+
resolveDbPath
|
|
7
|
+
} from "./chunk-VMGX5FCY.js";
|
|
8
|
+
import {
|
|
9
|
+
output,
|
|
10
|
+
outputError
|
|
11
|
+
} from "./chunk-ET6TNQOJ.js";
|
|
12
|
+
import {
|
|
13
|
+
resolveCanonicalUserId
|
|
14
|
+
} from "./chunk-RZFCVYTK.js";
|
|
15
|
+
|
|
16
|
+
// src/commands/migrate.ts
|
|
17
|
+
import { createInterface } from "readline";
|
|
18
|
+
var MIGRATION_NAME = "legacy-user-id-to-canonical-v1";
|
|
19
|
+
function scanLegacyImpact(db, canonicalUserId) {
|
|
20
|
+
const totalRows = db.prepare("SELECT COUNT(*) AS c FROM memories").get().c;
|
|
21
|
+
const legacyRows = db.prepare("SELECT COUNT(*) AS c FROM memories WHERE user_id != ?").get(canonicalUserId).c;
|
|
22
|
+
const breakdownRows = db.prepare(
|
|
23
|
+
`SELECT user_id AS legacyUserId, COUNT(*) AS rows, COUNT(DISTINCT namespace) AS distinctNamespaces
|
|
24
|
+
FROM memories
|
|
25
|
+
WHERE user_id != ?
|
|
26
|
+
GROUP BY user_id
|
|
27
|
+
ORDER BY rows DESC`
|
|
28
|
+
).all(canonicalUserId);
|
|
29
|
+
const queuePending = hasTable(db, "sync_outbox") ? db.prepare("SELECT COUNT(*) AS c FROM sync_outbox WHERE pushed_at IS NULL").get().c : 0;
|
|
30
|
+
const crossPartitionDuplicateGroups = db.prepare(
|
|
31
|
+
`WITH x AS (
|
|
32
|
+
SELECT content, COUNT(DISTINCT user_id) AS u
|
|
33
|
+
FROM memories
|
|
34
|
+
GROUP BY content
|
|
35
|
+
)
|
|
36
|
+
SELECT COUNT(*) AS c FROM x WHERE u > 1`
|
|
37
|
+
).get().c;
|
|
38
|
+
const alreadyMigrated = hasTable(db, "migration_version") ? Boolean(
|
|
39
|
+
db.prepare("SELECT 1 FROM migration_version WHERE name = ?").get(MIGRATION_NAME)
|
|
40
|
+
) : false;
|
|
41
|
+
return {
|
|
42
|
+
canonicalUserId,
|
|
43
|
+
totalRows,
|
|
44
|
+
legacyRows,
|
|
45
|
+
distinctLegacyUserIds: breakdownRows.length,
|
|
46
|
+
breakdown: breakdownRows,
|
|
47
|
+
queuePending,
|
|
48
|
+
crossPartitionDuplicateGroups,
|
|
49
|
+
alreadyMigrated
|
|
50
|
+
};
|
|
51
|
+
}
|
|
52
|
+
function hasTable(db, name) {
|
|
53
|
+
const row = db.prepare("SELECT name FROM sqlite_master WHERE type='table' AND name = ?").get(name);
|
|
54
|
+
return Boolean(row);
|
|
55
|
+
}
|
|
56
|
+
function runLegacyUserIdMigration(db, canonicalUserId) {
|
|
57
|
+
const run2 = db.transaction(() => {
|
|
58
|
+
const insert = db.prepare(
|
|
59
|
+
`INSERT OR IGNORE INTO memory_legacy_scope (
|
|
60
|
+
memory_id, legacy_user_id, legacy_namespace, migrated_at
|
|
61
|
+
)
|
|
62
|
+
SELECT memory_id, user_id, namespace, CURRENT_TIMESTAMP
|
|
63
|
+
FROM memories
|
|
64
|
+
WHERE user_id != ?`
|
|
65
|
+
);
|
|
66
|
+
const insertInfo = insert.run(canonicalUserId);
|
|
67
|
+
const legacyScopeRows = insertInfo.changes;
|
|
68
|
+
const update = db.prepare(
|
|
69
|
+
"UPDATE memories SET user_id = ? WHERE user_id != ?"
|
|
70
|
+
);
|
|
71
|
+
const updateInfo = update.run(canonicalUserId, canonicalUserId);
|
|
72
|
+
const movedRows = updateInfo.changes;
|
|
73
|
+
let outboxCleared = 0;
|
|
74
|
+
let inboxCleared = 0;
|
|
75
|
+
if (hasTable(db, "sync_outbox")) {
|
|
76
|
+
outboxCleared = db.prepare("DELETE FROM sync_outbox").run().changes;
|
|
77
|
+
}
|
|
78
|
+
if (hasTable(db, "sync_inbox")) {
|
|
79
|
+
inboxCleared = db.prepare("DELETE FROM sync_inbox").run().changes;
|
|
80
|
+
}
|
|
81
|
+
let cursorReset = false;
|
|
82
|
+
if (hasTable(db, "sync_state")) {
|
|
83
|
+
const r = db.prepare(
|
|
84
|
+
`DELETE FROM sync_state WHERE key IN (
|
|
85
|
+
'last_cursor',
|
|
86
|
+
'pull_cursor',
|
|
87
|
+
'last_push_attempt_at',
|
|
88
|
+
'last_push_success_at',
|
|
89
|
+
'last_pull_attempt_at',
|
|
90
|
+
'last_pull_success_at'
|
|
91
|
+
)`
|
|
92
|
+
).run();
|
|
93
|
+
cursorReset = r.changes > 0;
|
|
94
|
+
}
|
|
95
|
+
db.prepare(
|
|
96
|
+
`INSERT OR IGNORE INTO migration_version (name, applied_at)
|
|
97
|
+
VALUES (?, CURRENT_TIMESTAMP)`
|
|
98
|
+
).run(MIGRATION_NAME);
|
|
99
|
+
return {
|
|
100
|
+
movedRows,
|
|
101
|
+
legacyScopeRows,
|
|
102
|
+
outboxCleared,
|
|
103
|
+
inboxCleared,
|
|
104
|
+
cursorReset
|
|
105
|
+
};
|
|
106
|
+
});
|
|
107
|
+
return run2();
|
|
108
|
+
}
|
|
109
|
+
async function confirmInteractive(question) {
|
|
110
|
+
if (!process.stdin.isTTY) return false;
|
|
111
|
+
const rl = createInterface({ input: process.stdin, output: process.stdout });
|
|
112
|
+
try {
|
|
113
|
+
const answer = await new Promise((resolve) => {
|
|
114
|
+
rl.question(`${question} [y/N] `, (a) => resolve(a));
|
|
115
|
+
});
|
|
116
|
+
return /^y(es)?$/i.test(answer.trim());
|
|
117
|
+
} finally {
|
|
118
|
+
rl.close();
|
|
119
|
+
}
|
|
120
|
+
}
|
|
121
|
+
async function run(options) {
|
|
122
|
+
const { args, format, db: dbOverride } = options;
|
|
123
|
+
const sub = args[0];
|
|
124
|
+
if (sub !== "legacy-user-ids") {
|
|
125
|
+
outputError(
|
|
126
|
+
"Usage: memrosetta migrate legacy-user-ids [--dry-run] [--canonical <user>] [--yes]",
|
|
127
|
+
format
|
|
128
|
+
);
|
|
129
|
+
process.exitCode = 1;
|
|
130
|
+
return;
|
|
131
|
+
}
|
|
132
|
+
const sliced = args.slice(1);
|
|
133
|
+
const dryRun = hasFlag(sliced, "--dry-run");
|
|
134
|
+
const autoYes = hasFlag(sliced, "--yes") || hasFlag(sliced, "-y");
|
|
135
|
+
const canonicalOverride = optionalOption(sliced, "--canonical");
|
|
136
|
+
const canonicalUserId = resolveCanonicalUserId(canonicalOverride ?? null);
|
|
137
|
+
const dbPath = resolveDbPath(dbOverride);
|
|
138
|
+
const { default: Database } = await import("better-sqlite3");
|
|
139
|
+
const db = new Database(dbPath);
|
|
140
|
+
try {
|
|
141
|
+
const { ensureSchema } = await import("@memrosetta/core");
|
|
142
|
+
ensureSchema(db, { vectorEnabled: false });
|
|
143
|
+
const report = scanLegacyImpact(db, canonicalUserId);
|
|
144
|
+
if (report.alreadyMigrated && report.legacyRows === 0) {
|
|
145
|
+
output(
|
|
146
|
+
{
|
|
147
|
+
status: "noop",
|
|
148
|
+
reason: `migration ${MIGRATION_NAME} already applied and no legacy rows remain`,
|
|
149
|
+
report
|
|
150
|
+
},
|
|
151
|
+
format
|
|
152
|
+
);
|
|
153
|
+
return;
|
|
154
|
+
}
|
|
155
|
+
if (report.legacyRows === 0) {
|
|
156
|
+
output(
|
|
157
|
+
{
|
|
158
|
+
status: "noop",
|
|
159
|
+
reason: "no legacy user_id partitions found",
|
|
160
|
+
report
|
|
161
|
+
},
|
|
162
|
+
format
|
|
163
|
+
);
|
|
164
|
+
return;
|
|
165
|
+
}
|
|
166
|
+
if (dryRun) {
|
|
167
|
+
output(
|
|
168
|
+
{
|
|
169
|
+
status: "dry-run",
|
|
170
|
+
canonicalUserId,
|
|
171
|
+
report,
|
|
172
|
+
wouldClear: {
|
|
173
|
+
syncOutbox: true,
|
|
174
|
+
syncInbox: true,
|
|
175
|
+
cursorState: true
|
|
176
|
+
},
|
|
177
|
+
nextSteps: [
|
|
178
|
+
"Run without --dry-run to apply the migration.",
|
|
179
|
+
"After migration: `memrosetta sync backfill` then `memrosetta sync now`."
|
|
180
|
+
]
|
|
181
|
+
},
|
|
182
|
+
format
|
|
183
|
+
);
|
|
184
|
+
return;
|
|
185
|
+
}
|
|
186
|
+
if (!autoYes) {
|
|
187
|
+
printImpactPreview(report, canonicalUserId);
|
|
188
|
+
const ok = await confirmInteractive(
|
|
189
|
+
`Apply migration and move ${report.legacyRows} row(s) onto '${canonicalUserId}'?`
|
|
190
|
+
);
|
|
191
|
+
if (!ok) {
|
|
192
|
+
output(
|
|
193
|
+
{
|
|
194
|
+
status: "aborted",
|
|
195
|
+
reason: "user declined or non-interactive session (pass --yes to skip prompt)",
|
|
196
|
+
report
|
|
197
|
+
},
|
|
198
|
+
format
|
|
199
|
+
);
|
|
200
|
+
return;
|
|
201
|
+
}
|
|
202
|
+
}
|
|
203
|
+
const result = runLegacyUserIdMigration(db, canonicalUserId);
|
|
204
|
+
output(
|
|
205
|
+
{
|
|
206
|
+
status: "applied",
|
|
207
|
+
canonicalUserId,
|
|
208
|
+
migration: MIGRATION_NAME,
|
|
209
|
+
result,
|
|
210
|
+
nextSteps: [
|
|
211
|
+
`Run \`memrosetta sync backfill --user ${canonicalUserId}\` to republish memories onto the canonical partition.`,
|
|
212
|
+
"Then `memrosetta sync now` to push them to the hub.",
|
|
213
|
+
"Run `memrosetta duplicates report` to audit cross-partition duplicates before any future dedupe pass."
|
|
214
|
+
]
|
|
215
|
+
},
|
|
216
|
+
format
|
|
217
|
+
);
|
|
218
|
+
} finally {
|
|
219
|
+
db.close();
|
|
220
|
+
}
|
|
221
|
+
}
|
|
222
|
+
function printImpactPreview(report, canonicalUserId) {
|
|
223
|
+
process.stderr.write(
|
|
224
|
+
[
|
|
225
|
+
"",
|
|
226
|
+
"Migration impact preview",
|
|
227
|
+
"------------------------",
|
|
228
|
+
` canonical user : ${canonicalUserId}`,
|
|
229
|
+
` total memories : ${report.totalRows}`,
|
|
230
|
+
` legacy rows to move : ${report.legacyRows}`,
|
|
231
|
+
` distinct legacy partitions: ${report.distinctLegacyUserIds}`,
|
|
232
|
+
` sync_outbox pending : ${report.queuePending}`,
|
|
233
|
+
` cross-partition dup groups: ${report.crossPartitionDuplicateGroups}`,
|
|
234
|
+
"",
|
|
235
|
+
"Top legacy partitions:",
|
|
236
|
+
...report.breakdown.slice(0, 10).map(
|
|
237
|
+
(r) => ` - ${r.legacyUserId.padEnd(40)} rows=${r.rows} namespaces=${r.distinctNamespaces}`
|
|
238
|
+
),
|
|
239
|
+
"",
|
|
240
|
+
"This will:",
|
|
241
|
+
" * copy legacy rows into memory_legacy_scope (non-destructive)",
|
|
242
|
+
" * rewrite memories.user_id to the canonical user",
|
|
243
|
+
" * leave memories.namespace untouched",
|
|
244
|
+
" * clear sync_outbox / sync_inbox / sync cursor state",
|
|
245
|
+
"",
|
|
246
|
+
"Back up ~/.memrosetta/memories.db before continuing if you have not already.",
|
|
247
|
+
""
|
|
248
|
+
].join("\n")
|
|
249
|
+
);
|
|
250
|
+
}
|
|
251
|
+
export {
|
|
252
|
+
run,
|
|
253
|
+
runLegacyUserIdMigration,
|
|
254
|
+
scanLegacyImpact
|
|
255
|
+
};
|
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
import {
|
|
2
|
+
buildRelationCreatedOp,
|
|
3
|
+
openCliSyncContext
|
|
4
|
+
} from "./chunk-GGXC7TAJ.js";
|
|
5
|
+
import {
|
|
6
|
+
optionalOption,
|
|
7
|
+
requireOption
|
|
8
|
+
} from "./chunk-US6CEDMU.js";
|
|
9
|
+
import {
|
|
10
|
+
getEngine,
|
|
11
|
+
resolveDbPath
|
|
12
|
+
} from "./chunk-47SU2YUJ.js";
|
|
13
|
+
import {
|
|
14
|
+
output,
|
|
15
|
+
outputError
|
|
16
|
+
} from "./chunk-ET6TNQOJ.js";
|
|
17
|
+
import "./chunk-WYHEAKPC.js";
|
|
18
|
+
|
|
19
|
+
// src/commands/relate.ts
|
|
20
|
+
var VALID_RELATION_TYPES = /* @__PURE__ */ new Set([
|
|
21
|
+
"updates",
|
|
22
|
+
"extends",
|
|
23
|
+
"derives",
|
|
24
|
+
"contradicts",
|
|
25
|
+
"supports"
|
|
26
|
+
]);
|
|
27
|
+
async function run(options) {
|
|
28
|
+
const { args, format, db, noEmbeddings } = options;
|
|
29
|
+
const src = requireOption(args, "--src", "source memory ID");
|
|
30
|
+
const dst = requireOption(args, "--dst", "destination memory ID");
|
|
31
|
+
const relationType = requireOption(args, "--type", "relation type");
|
|
32
|
+
const reason = optionalOption(args, "--reason");
|
|
33
|
+
if (!VALID_RELATION_TYPES.has(relationType)) {
|
|
34
|
+
outputError(
|
|
35
|
+
`Invalid relation type: ${relationType}. Must be one of: updates, extends, derives, contradicts, supports`,
|
|
36
|
+
format
|
|
37
|
+
);
|
|
38
|
+
process.exitCode = 1;
|
|
39
|
+
return;
|
|
40
|
+
}
|
|
41
|
+
const engine = await getEngine({ db, noEmbeddings });
|
|
42
|
+
const relation = await engine.relate(
|
|
43
|
+
src,
|
|
44
|
+
dst,
|
|
45
|
+
relationType,
|
|
46
|
+
reason
|
|
47
|
+
);
|
|
48
|
+
const sync = await openCliSyncContext(resolveDbPath(db));
|
|
49
|
+
if (sync.enabled) {
|
|
50
|
+
sync.enqueue(buildRelationCreatedOp(sync, relation));
|
|
51
|
+
sync.close();
|
|
52
|
+
}
|
|
53
|
+
output(relation, format);
|
|
54
|
+
}
|
|
55
|
+
export {
|
|
56
|
+
run
|
|
57
|
+
};
|
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
import {
|
|
2
|
+
buildRelationCreatedOp,
|
|
3
|
+
openCliSyncContext
|
|
4
|
+
} from "./chunk-GZINXXM4.js";
|
|
5
|
+
import {
|
|
6
|
+
optionalOption,
|
|
7
|
+
requireOption
|
|
8
|
+
} from "./chunk-US6CEDMU.js";
|
|
9
|
+
import {
|
|
10
|
+
getEngine,
|
|
11
|
+
resolveDbPath
|
|
12
|
+
} from "./chunk-VMGX5FCY.js";
|
|
13
|
+
import {
|
|
14
|
+
output,
|
|
15
|
+
outputError
|
|
16
|
+
} from "./chunk-ET6TNQOJ.js";
|
|
17
|
+
import "./chunk-RZFCVYTK.js";
|
|
18
|
+
|
|
19
|
+
// src/commands/relate.ts
|
|
20
|
+
var VALID_RELATION_TYPES = /* @__PURE__ */ new Set([
|
|
21
|
+
"updates",
|
|
22
|
+
"extends",
|
|
23
|
+
"derives",
|
|
24
|
+
"contradicts",
|
|
25
|
+
"supports"
|
|
26
|
+
]);
|
|
27
|
+
async function run(options) {
|
|
28
|
+
const { args, format, db, noEmbeddings } = options;
|
|
29
|
+
const src = requireOption(args, "--src", "source memory ID");
|
|
30
|
+
const dst = requireOption(args, "--dst", "destination memory ID");
|
|
31
|
+
const relationType = requireOption(args, "--type", "relation type");
|
|
32
|
+
const reason = optionalOption(args, "--reason");
|
|
33
|
+
if (!VALID_RELATION_TYPES.has(relationType)) {
|
|
34
|
+
outputError(
|
|
35
|
+
`Invalid relation type: ${relationType}. Must be one of: updates, extends, derives, contradicts, supports`,
|
|
36
|
+
format
|
|
37
|
+
);
|
|
38
|
+
process.exitCode = 1;
|
|
39
|
+
return;
|
|
40
|
+
}
|
|
41
|
+
const engine = await getEngine({ db, noEmbeddings });
|
|
42
|
+
const relation = await engine.relate(
|
|
43
|
+
src,
|
|
44
|
+
dst,
|
|
45
|
+
relationType,
|
|
46
|
+
reason
|
|
47
|
+
);
|
|
48
|
+
const sync = await openCliSyncContext(resolveDbPath(db));
|
|
49
|
+
if (sync.enabled) {
|
|
50
|
+
sync.enqueue(buildRelationCreatedOp(sync, relation));
|
|
51
|
+
sync.close();
|
|
52
|
+
}
|
|
53
|
+
output(relation, format);
|
|
54
|
+
}
|
|
55
|
+
export {
|
|
56
|
+
run
|
|
57
|
+
};
|
|
@@ -0,0 +1,132 @@
|
|
|
1
|
+
import {
|
|
2
|
+
removeAgentsMdSection,
|
|
3
|
+
removeClaudeCodeHooks,
|
|
4
|
+
removeClaudeMdSection,
|
|
5
|
+
removeCodexHooks,
|
|
6
|
+
removeCodexMCP,
|
|
7
|
+
removeCursorMCP,
|
|
8
|
+
removeCursorRulesSection,
|
|
9
|
+
removeGeminiMCP,
|
|
10
|
+
removeGeminiMdSection,
|
|
11
|
+
removeGenericMCP
|
|
12
|
+
} from "./chunk-4LNXT25H.js";
|
|
13
|
+
import {
|
|
14
|
+
hasFlag
|
|
15
|
+
} from "./chunk-US6CEDMU.js";
|
|
16
|
+
import {
|
|
17
|
+
output
|
|
18
|
+
} from "./chunk-ET6TNQOJ.js";
|
|
19
|
+
|
|
20
|
+
// src/commands/reset.ts
|
|
21
|
+
async function run(options) {
|
|
22
|
+
const { args, format } = options;
|
|
23
|
+
const wantClaudeCode = hasFlag(args, "--claude-code");
|
|
24
|
+
const wantCursor = hasFlag(args, "--cursor");
|
|
25
|
+
const wantCodex = hasFlag(args, "--codex");
|
|
26
|
+
const wantGemini = hasFlag(args, "--gemini");
|
|
27
|
+
const wantMCP = hasFlag(args, "--mcp");
|
|
28
|
+
const wantAll = hasFlag(args, "--all");
|
|
29
|
+
const noFlags = !wantClaudeCode && !wantCursor && !wantCodex && !wantGemini && !wantMCP && !wantAll;
|
|
30
|
+
if (noFlags) {
|
|
31
|
+
const msg = "Usage: memrosetta reset [--claude-code] [--cursor] [--codex] [--gemini] [--mcp] [--all]\n\nFlags:\n --claude-code Remove Claude Code hooks, MCP, and CLAUDE.md section\n --cursor Remove Cursor MCP configuration\n --codex Remove Codex MCP configuration and AGENTS.md section\n --gemini Remove Gemini MCP configuration and GEMINI.md section\n --mcp Remove generic MCP configuration (~/.mcp.json)\n --all Remove all integrations\n";
|
|
32
|
+
if (format === "text") {
|
|
33
|
+
process.stdout.write(msg);
|
|
34
|
+
} else {
|
|
35
|
+
output({ error: "No flags specified. Use --claude-code, --cursor, --codex, --gemini, --mcp, or --all." }, format);
|
|
36
|
+
}
|
|
37
|
+
return;
|
|
38
|
+
}
|
|
39
|
+
const result = {
|
|
40
|
+
removed: {
|
|
41
|
+
claudeCodeHooks: false,
|
|
42
|
+
claudeMd: false,
|
|
43
|
+
mcp: false,
|
|
44
|
+
cursor: false,
|
|
45
|
+
cursorRules: false,
|
|
46
|
+
codex: false,
|
|
47
|
+
agentsMd: false,
|
|
48
|
+
gemini: false,
|
|
49
|
+
geminiMd: false
|
|
50
|
+
}
|
|
51
|
+
};
|
|
52
|
+
if (wantClaudeCode || wantAll) {
|
|
53
|
+
const hooksRemoved = removeClaudeCodeHooks();
|
|
54
|
+
const mdRemoved = removeClaudeMdSection();
|
|
55
|
+
const mcpRemoved = removeGenericMCP();
|
|
56
|
+
result.removed.claudeCodeHooks = hooksRemoved;
|
|
57
|
+
result.removed.claudeMd = mdRemoved;
|
|
58
|
+
if (!wantMCP) {
|
|
59
|
+
result.removed.mcp = mcpRemoved;
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
if (wantCursor || wantAll) {
|
|
63
|
+
const removed = removeCursorMCP();
|
|
64
|
+
result.removed.cursor = removed;
|
|
65
|
+
const rulesRemoved = removeCursorRulesSection();
|
|
66
|
+
result.removed.cursorRules = rulesRemoved;
|
|
67
|
+
}
|
|
68
|
+
if (wantCodex || wantAll) {
|
|
69
|
+
const removed = removeCodexMCP();
|
|
70
|
+
result.removed.codex = removed;
|
|
71
|
+
const mdRemoved = removeAgentsMdSection();
|
|
72
|
+
result.removed.agentsMd = mdRemoved;
|
|
73
|
+
const stopHookRemoved = removeCodexHooks();
|
|
74
|
+
result.removed.codexStopHook = stopHookRemoved;
|
|
75
|
+
}
|
|
76
|
+
if (wantGemini || wantAll) {
|
|
77
|
+
const removed = removeGeminiMCP();
|
|
78
|
+
result.removed.gemini = removed;
|
|
79
|
+
const mdRemoved = removeGeminiMdSection();
|
|
80
|
+
result.removed.geminiMd = mdRemoved;
|
|
81
|
+
}
|
|
82
|
+
if (wantMCP || wantAll) {
|
|
83
|
+
const removed = removeGenericMCP();
|
|
84
|
+
result.removed.mcp = removed;
|
|
85
|
+
}
|
|
86
|
+
if (format === "text") {
|
|
87
|
+
printTextOutput(result);
|
|
88
|
+
return;
|
|
89
|
+
}
|
|
90
|
+
output(result, format);
|
|
91
|
+
}
|
|
92
|
+
function printTextOutput(result) {
|
|
93
|
+
const w = (s) => process.stdout.write(s);
|
|
94
|
+
const removed = result.removed;
|
|
95
|
+
if (removed.claudeCodeHooks) {
|
|
96
|
+
w("Removed Claude Code hooks from ~/.claude/settings.json\n");
|
|
97
|
+
}
|
|
98
|
+
if (removed.claudeMd) {
|
|
99
|
+
w("Removed MemRosetta section from ~/.claude/CLAUDE.md\n");
|
|
100
|
+
}
|
|
101
|
+
if (removed.mcp) {
|
|
102
|
+
w("Removed MCP server from ~/.mcp.json\n");
|
|
103
|
+
}
|
|
104
|
+
if (removed.cursor) {
|
|
105
|
+
w("Removed Cursor MCP from ~/.cursor/mcp.json\n");
|
|
106
|
+
}
|
|
107
|
+
if (removed.cursorRules) {
|
|
108
|
+
w("Removed MemRosetta section from ~/.cursorrules\n");
|
|
109
|
+
}
|
|
110
|
+
if (removed.codex) {
|
|
111
|
+
w("Removed Codex MCP from ~/.codex/config.toml\n");
|
|
112
|
+
}
|
|
113
|
+
if (removed.agentsMd) {
|
|
114
|
+
w("Removed MemRosetta section from AGENTS.md\n");
|
|
115
|
+
}
|
|
116
|
+
if (removed.gemini) {
|
|
117
|
+
w("Removed Gemini MCP from ~/.gemini/settings.json\n");
|
|
118
|
+
}
|
|
119
|
+
if (removed.geminiMd) {
|
|
120
|
+
w("Removed MemRosetta section from GEMINI.md\n");
|
|
121
|
+
}
|
|
122
|
+
const anyRemoved = removed.claudeCodeHooks || removed.claudeMd || removed.mcp || removed.cursor || removed.cursorRules || removed.codex || removed.agentsMd || removed.gemini || removed.geminiMd;
|
|
123
|
+
if (!anyRemoved) {
|
|
124
|
+
w("Nothing to remove (no integrations were configured).\n");
|
|
125
|
+
}
|
|
126
|
+
w(
|
|
127
|
+
"\nNote: ~/.memrosetta/ directory preserved. Delete manually if needed:\n rm -rf ~/.memrosetta\n"
|
|
128
|
+
);
|
|
129
|
+
}
|
|
130
|
+
export {
|
|
131
|
+
run
|
|
132
|
+
};
|
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
import {
|
|
2
|
+
optionalOption,
|
|
3
|
+
requireOption
|
|
4
|
+
} from "./chunk-US6CEDMU.js";
|
|
5
|
+
import {
|
|
6
|
+
getEngine
|
|
7
|
+
} from "./chunk-47SU2YUJ.js";
|
|
8
|
+
import {
|
|
9
|
+
output,
|
|
10
|
+
outputError
|
|
11
|
+
} from "./chunk-ET6TNQOJ.js";
|
|
12
|
+
import {
|
|
13
|
+
getDefaultUserId
|
|
14
|
+
} from "./chunk-WYHEAKPC.js";
|
|
15
|
+
|
|
16
|
+
// src/commands/search.ts
|
|
17
|
+
async function run(options) {
|
|
18
|
+
const { args, format, db, noEmbeddings } = options;
|
|
19
|
+
const userId = optionalOption(args, "--user") ?? getDefaultUserId();
|
|
20
|
+
const query = requireOption(args, "--query", "query");
|
|
21
|
+
const limitRaw = optionalOption(args, "--limit");
|
|
22
|
+
const namespace = optionalOption(args, "--namespace");
|
|
23
|
+
const typesRaw = optionalOption(args, "--types");
|
|
24
|
+
const minConfidenceRaw = optionalOption(args, "--min-confidence");
|
|
25
|
+
const limit = limitRaw ? parseInt(limitRaw, 10) : 5;
|
|
26
|
+
if (isNaN(limit) || limit < 1) {
|
|
27
|
+
outputError("Invalid limit value", format);
|
|
28
|
+
process.exitCode = 1;
|
|
29
|
+
return;
|
|
30
|
+
}
|
|
31
|
+
const memoryTypes = typesRaw ? typesRaw.split(",") : void 0;
|
|
32
|
+
const minConfidence = minConfidenceRaw ? parseFloat(minConfidenceRaw) : void 0;
|
|
33
|
+
const engine = await getEngine({ db, noEmbeddings });
|
|
34
|
+
const response = await engine.search({
|
|
35
|
+
userId,
|
|
36
|
+
query,
|
|
37
|
+
namespace,
|
|
38
|
+
limit,
|
|
39
|
+
filters: {
|
|
40
|
+
memoryTypes,
|
|
41
|
+
minConfidence
|
|
42
|
+
}
|
|
43
|
+
});
|
|
44
|
+
output(response, format);
|
|
45
|
+
}
|
|
46
|
+
export {
|
|
47
|
+
run
|
|
48
|
+
};
|
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
import {
|
|
2
|
+
optionalOption,
|
|
3
|
+
requireOption
|
|
4
|
+
} from "./chunk-US6CEDMU.js";
|
|
5
|
+
import {
|
|
6
|
+
getEngine
|
|
7
|
+
} from "./chunk-VMGX5FCY.js";
|
|
8
|
+
import {
|
|
9
|
+
output,
|
|
10
|
+
outputError
|
|
11
|
+
} from "./chunk-ET6TNQOJ.js";
|
|
12
|
+
import {
|
|
13
|
+
getDefaultUserId
|
|
14
|
+
} from "./chunk-RZFCVYTK.js";
|
|
15
|
+
|
|
16
|
+
// src/commands/search.ts
|
|
17
|
+
async function run(options) {
|
|
18
|
+
const { args, format, db, noEmbeddings } = options;
|
|
19
|
+
const userId = optionalOption(args, "--user") ?? getDefaultUserId();
|
|
20
|
+
const query = requireOption(args, "--query", "query");
|
|
21
|
+
const limitRaw = optionalOption(args, "--limit");
|
|
22
|
+
const namespace = optionalOption(args, "--namespace");
|
|
23
|
+
const typesRaw = optionalOption(args, "--types");
|
|
24
|
+
const minConfidenceRaw = optionalOption(args, "--min-confidence");
|
|
25
|
+
const limit = limitRaw ? parseInt(limitRaw, 10) : 5;
|
|
26
|
+
if (isNaN(limit) || limit < 1) {
|
|
27
|
+
outputError("Invalid limit value", format);
|
|
28
|
+
process.exitCode = 1;
|
|
29
|
+
return;
|
|
30
|
+
}
|
|
31
|
+
const memoryTypes = typesRaw ? typesRaw.split(",") : void 0;
|
|
32
|
+
const minConfidence = minConfidenceRaw ? parseFloat(minConfidenceRaw) : void 0;
|
|
33
|
+
const engine = await getEngine({ db, noEmbeddings });
|
|
34
|
+
const response = await engine.search({
|
|
35
|
+
userId,
|
|
36
|
+
query,
|
|
37
|
+
namespace,
|
|
38
|
+
limit,
|
|
39
|
+
filters: {
|
|
40
|
+
memoryTypes,
|
|
41
|
+
minConfidence
|
|
42
|
+
}
|
|
43
|
+
});
|
|
44
|
+
output(response, format);
|
|
45
|
+
}
|
|
46
|
+
export {
|
|
47
|
+
run
|
|
48
|
+
};
|