clawvault 2.1.1 → 2.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (54) hide show
  1. package/bin/command-registration.test.js +6 -1
  2. package/bin/help-contract.test.js +2 -0
  3. package/bin/register-maintenance-commands.js +111 -0
  4. package/bin/register-query-commands.js +32 -1
  5. package/bin/register-session-lifecycle-commands.js +2 -0
  6. package/dist/{chunk-USAY3OIO.js → chunk-2HM7ZI4X.js} +273 -435
  7. package/dist/{chunk-TBVI4N53.js → chunk-6RIHODNR.js} +120 -95
  8. package/dist/chunk-73P7XCQM.js +104 -0
  9. package/dist/{chunk-MIIXBNO3.js → chunk-DHBDH4DN.js} +4 -0
  10. package/dist/chunk-GJEGPO7U.js +49 -0
  11. package/dist/chunk-GQVYQCY5.js +396 -0
  12. package/dist/{chunk-TXO34J3O.js → chunk-H7JW4L7H.js} +1 -1
  13. package/dist/{chunk-QFBKWDYR.js → chunk-IFGDPIFI.js} +3 -3
  14. package/dist/chunk-K6XHCUFL.js +123 -0
  15. package/dist/{chunk-PIJGYMQZ.js → chunk-KNDVXXKC.js} +1 -1
  16. package/dist/chunk-L6NB43WV.js +472 -0
  17. package/dist/{chunk-UPHUI5PD.js → chunk-LB6P4CD5.js} +20 -7
  18. package/dist/chunk-MGDEINGP.js +99 -0
  19. package/dist/chunk-MQUJNOHK.js +58 -0
  20. package/dist/chunk-P5EPF6MB.js +182 -0
  21. package/dist/chunk-VR5NE7PZ.js +45 -0
  22. package/dist/chunk-WZI3OAE5.js +111 -0
  23. package/dist/chunk-Z2XBWN7A.js +247 -0
  24. package/dist/{chunk-O5V7SD5C.js → chunk-ZZA73MFY.js} +1 -1
  25. package/dist/commands/archive.d.ts +11 -0
  26. package/dist/commands/archive.js +11 -0
  27. package/dist/commands/context.d.ts +1 -1
  28. package/dist/commands/context.js +6 -4
  29. package/dist/commands/doctor.js +6 -6
  30. package/dist/commands/graph.js +2 -2
  31. package/dist/commands/link.js +1 -1
  32. package/dist/commands/migrate-observations.d.ts +19 -0
  33. package/dist/commands/migrate-observations.js +13 -0
  34. package/dist/commands/observe.js +5 -2
  35. package/dist/commands/rebuild.d.ts +11 -0
  36. package/dist/commands/rebuild.js +12 -0
  37. package/dist/commands/reflect.d.ts +11 -0
  38. package/dist/commands/reflect.js +13 -0
  39. package/dist/commands/replay.d.ts +16 -0
  40. package/dist/commands/replay.js +14 -0
  41. package/dist/commands/setup.js +2 -2
  42. package/dist/commands/sleep.d.ts +1 -0
  43. package/dist/commands/sleep.js +29 -6
  44. package/dist/commands/status.js +6 -6
  45. package/dist/commands/sync-bd.d.ts +10 -0
  46. package/dist/commands/sync-bd.js +9 -0
  47. package/dist/commands/wake.js +53 -35
  48. package/dist/{context-COo8oq1k.d.ts → context-BUGaWpyL.d.ts} +1 -0
  49. package/dist/index.d.ts +55 -20
  50. package/dist/index.js +67 -16
  51. package/hooks/clawvault/HOOK.md +3 -2
  52. package/hooks/clawvault/handler.js +51 -0
  53. package/hooks/clawvault/handler.test.js +20 -0
  54. package/package.json +2 -2
@@ -0,0 +1,182 @@
1
+ // src/observer/session-parser.ts
2
+ import * as fs from "fs";
3
+ import * as path from "path";
4
+ var JSONL_SAMPLE_LIMIT = 20;
5
+ var MARKDOWN_SIGNAL_RE = /^(#{1,6}\s|[-*+]\s|>\s)/;
6
+ var MARKDOWN_INLINE_RE = /(\[[^\]]+\]\([^)]+\)|[*_`~])/;
7
+ function normalizeText(value) {
8
+ return value.replace(/\s+/g, " ").trim();
9
+ }
10
+ function extractText(value) {
11
+ if (typeof value === "string") {
12
+ return normalizeText(value);
13
+ }
14
+ if (Array.isArray(value)) {
15
+ const parts = [];
16
+ for (const part of value) {
17
+ const extracted = extractText(part);
18
+ if (extracted) {
19
+ parts.push(extracted);
20
+ }
21
+ }
22
+ return normalizeText(parts.join(" "));
23
+ }
24
+ if (!value || typeof value !== "object") {
25
+ return "";
26
+ }
27
+ const record = value;
28
+ if (typeof record.text === "string") {
29
+ return normalizeText(record.text);
30
+ }
31
+ if (typeof record.content === "string") {
32
+ return normalizeText(record.content);
33
+ }
34
+ return "";
35
+ }
36
+ function normalizeRole(role) {
37
+ if (typeof role !== "string") {
38
+ return "";
39
+ }
40
+ const normalized = role.trim().toLowerCase();
41
+ if (!normalized) {
42
+ return "";
43
+ }
44
+ return normalized;
45
+ }
46
+ function isLikelyJsonMessage(value) {
47
+ if (!value || typeof value !== "object" || Array.isArray(value)) {
48
+ return false;
49
+ }
50
+ const record = value;
51
+ if ("role" in record && "content" in record) {
52
+ return true;
53
+ }
54
+ if (record.type === "message" && record.message && typeof record.message === "object") {
55
+ return true;
56
+ }
57
+ return false;
58
+ }
59
+ function parseJsonLine(line) {
60
+ let parsed;
61
+ try {
62
+ parsed = JSON.parse(line);
63
+ } catch {
64
+ return "";
65
+ }
66
+ if (!parsed || typeof parsed !== "object" || Array.isArray(parsed)) {
67
+ return "";
68
+ }
69
+ const entry = parsed;
70
+ if ("role" in entry && "content" in entry) {
71
+ const role = normalizeRole(entry.role);
72
+ const content = extractText(entry.content);
73
+ if (!content) return "";
74
+ return role ? `${role}: ${content}` : content;
75
+ }
76
+ if (entry.type === "message" && entry.message && typeof entry.message === "object") {
77
+ const message = entry.message;
78
+ const role = normalizeRole(message.role);
79
+ const content = extractText(message.content);
80
+ if (!content) return "";
81
+ return role ? `${role}: ${content}` : content;
82
+ }
83
+ return "";
84
+ }
85
+ function parseJsonLines(raw) {
86
+ const messages = [];
87
+ for (const line of raw.split(/\r?\n/)) {
88
+ const trimmed = line.trim();
89
+ if (!trimmed) continue;
90
+ const parsed = parseJsonLine(trimmed);
91
+ if (parsed) {
92
+ messages.push(parsed);
93
+ }
94
+ }
95
+ return messages;
96
+ }
97
+ function stripMarkdownSyntax(text) {
98
+ return normalizeText(
99
+ text.replace(/\[([^\]]+)\]\([^)]+\)/g, "$1").replace(/[*_`~]/g, "").replace(/<[^>]+>/g, "")
100
+ );
101
+ }
102
+ function normalizeMarkdownLine(line) {
103
+ return stripMarkdownSyntax(
104
+ line.replace(/^>\s*/, "").replace(/^[-*+]\s+/, "").replace(/^#{1,6}\s+/, "")
105
+ );
106
+ }
107
+ function parseMarkdown(raw) {
108
+ const withoutCodeBlocks = raw.replace(/```[\s\S]*?```/g, " ");
109
+ const blocks = withoutCodeBlocks.split(/\r?\n\s*\r?\n/).map((block) => block.trim()).filter(Boolean);
110
+ const messages = [];
111
+ for (const block of blocks) {
112
+ const lines = block.split(/\r?\n/).map((line) => normalizeMarkdownLine(line)).filter(Boolean);
113
+ if (lines.length === 0) {
114
+ continue;
115
+ }
116
+ const joined = stripMarkdownSyntax(lines.join(" "));
117
+ if (!joined) continue;
118
+ const roleMatch = /^(user|assistant|system|tool)\s*:?\s*(.+)$/i.exec(joined);
119
+ if (roleMatch) {
120
+ const role = normalizeRole(roleMatch[1]);
121
+ const content = normalizeText(roleMatch[2]);
122
+ if (content) {
123
+ messages.push(`${role}: ${content}`);
124
+ }
125
+ continue;
126
+ }
127
+ messages.push(joined);
128
+ }
129
+ return messages;
130
+ }
131
+ function parsePlainText(raw) {
132
+ return raw.split(/\r?\n/).map((line) => normalizeText(line)).filter(Boolean);
133
+ }
134
+ function detectSessionFormat(raw, filePath) {
135
+ const nonEmptyLines = raw.split(/\r?\n/).map((line) => line.trim()).filter(Boolean);
136
+ if (nonEmptyLines.length === 0) {
137
+ return "plain";
138
+ }
139
+ const sample = nonEmptyLines.slice(0, JSONL_SAMPLE_LIMIT);
140
+ const jsonHits = sample.filter((line) => {
141
+ try {
142
+ const parsed = JSON.parse(line);
143
+ return isLikelyJsonMessage(parsed);
144
+ } catch {
145
+ return false;
146
+ }
147
+ }).length;
148
+ if (jsonHits >= Math.max(1, Math.ceil(sample.length * 0.6))) {
149
+ return "jsonl";
150
+ }
151
+ const ext = path.extname(filePath).toLowerCase();
152
+ if (ext === ".md" || ext === ".markdown") {
153
+ return "markdown";
154
+ }
155
+ const markdownSignals = sample.filter((line) => MARKDOWN_SIGNAL_RE.test(line) || MARKDOWN_INLINE_RE.test(line)).length;
156
+ if (markdownSignals >= Math.max(2, Math.ceil(sample.length * 0.4))) {
157
+ return "markdown";
158
+ }
159
+ return "plain";
160
+ }
161
+ function parseSessionFile(filePath) {
162
+ const resolved = path.resolve(filePath);
163
+ const raw = fs.readFileSync(resolved, "utf-8");
164
+ const format = detectSessionFormat(raw, resolved);
165
+ if (format === "jsonl") {
166
+ const parsed = parseJsonLines(raw);
167
+ if (parsed.length > 0) {
168
+ return parsed;
169
+ }
170
+ }
171
+ if (format === "markdown") {
172
+ const parsed = parseMarkdown(raw);
173
+ if (parsed.length > 0) {
174
+ return parsed;
175
+ }
176
+ }
177
+ return parsePlainText(raw);
178
+ }
179
+
180
+ export {
181
+ parseSessionFile
182
+ };
@@ -0,0 +1,45 @@
1
+ import {
2
+ archiveObservations
3
+ } from "./chunk-MQUJNOHK.js";
4
+ import {
5
+ resolveVaultPath
6
+ } from "./chunk-MXSSG3QU.js";
7
+
8
+ // src/commands/archive.ts
9
+ function parsePositiveInteger(raw, label) {
10
+ const parsed = Number.parseInt(raw, 10);
11
+ if (!Number.isFinite(parsed) || parsed <= 0) {
12
+ throw new Error(`Invalid ${label}: ${raw}`);
13
+ }
14
+ return parsed;
15
+ }
16
+ async function archiveCommand(options) {
17
+ const vaultPath = resolveVaultPath({ explicitPath: options.vaultPath });
18
+ const result = archiveObservations(vaultPath, {
19
+ olderThanDays: options.olderThan,
20
+ dryRun: options.dryRun
21
+ });
22
+ if (result.archived === 0) {
23
+ console.log("No observations matched archive criteria.");
24
+ return;
25
+ }
26
+ if (result.dryRun) {
27
+ console.log(`Dry run: ${result.archived} observation file(s) would be archived.`);
28
+ return;
29
+ }
30
+ console.log(`Archived ${result.archived} observation file(s).`);
31
+ }
32
+ function registerArchiveCommand(program) {
33
+ program.command("archive").description("Archive old observations into ledger/archive").option("--older-than <days>", "Archive observations older than this many days", "14").option("--dry-run", "Show archive candidates without moving files").option("-v, --vault <path>", "Vault path").action(async (rawOptions) => {
34
+ await archiveCommand({
35
+ vaultPath: rawOptions.vault,
36
+ olderThan: parsePositiveInteger(rawOptions.olderThan, "older-than"),
37
+ dryRun: rawOptions.dryRun
38
+ });
39
+ });
40
+ }
41
+
42
+ export {
43
+ archiveCommand,
44
+ registerArchiveCommand
45
+ };
@@ -0,0 +1,111 @@
1
+ import {
2
+ DATE_HEADING_RE,
3
+ parseObservationLine,
4
+ renderScoredObservationLine
5
+ } from "./chunk-K6XHCUFL.js";
6
+ import {
7
+ listObservationFiles
8
+ } from "./chunk-Z2XBWN7A.js";
9
+ import {
10
+ resolveVaultPath
11
+ } from "./chunk-MXSSG3QU.js";
12
+
13
+ // src/commands/migrate-observations.ts
14
+ import * as fs from "fs";
15
+ function toBackupPath(filePath) {
16
+ if (filePath.toLowerCase().endsWith(".md")) {
17
+ return `${filePath.slice(0, -3)}.emoji-backup.md`;
18
+ }
19
+ return `${filePath}.emoji-backup`;
20
+ }
21
+ function convertObservationMarkdown(markdown) {
22
+ const lines = markdown.split(/\r?\n/);
23
+ let currentDate = "";
24
+ let changed = false;
25
+ const nextLines = lines.map((line) => {
26
+ const heading = line.match(DATE_HEADING_RE);
27
+ if (heading) {
28
+ currentDate = heading[1];
29
+ return line;
30
+ }
31
+ if (!currentDate) {
32
+ return line;
33
+ }
34
+ const parsed = parseObservationLine(line.trim(), currentDate);
35
+ if (!parsed || parsed.format !== "emoji") {
36
+ return line;
37
+ }
38
+ changed = true;
39
+ return renderScoredObservationLine({
40
+ type: parsed.type,
41
+ confidence: parsed.confidence,
42
+ importance: parsed.importance,
43
+ content: parsed.content
44
+ });
45
+ });
46
+ return {
47
+ converted: nextLines.join("\n"),
48
+ changed
49
+ };
50
+ }
51
+ function migrateObservations(vaultPath, options = {}) {
52
+ const dryRun = options.dryRun ?? false;
53
+ const files = listObservationFiles(vaultPath, {
54
+ includeLegacy: true,
55
+ includeArchive: false,
56
+ dedupeByDate: false
57
+ });
58
+ let migrated = 0;
59
+ let backups = 0;
60
+ for (const entry of files) {
61
+ const raw = fs.readFileSync(entry.path, "utf-8");
62
+ const { converted, changed } = convertObservationMarkdown(raw);
63
+ if (!changed) {
64
+ continue;
65
+ }
66
+ migrated += 1;
67
+ if (dryRun) {
68
+ continue;
69
+ }
70
+ const backupPath = toBackupPath(entry.path);
71
+ if (!fs.existsSync(backupPath)) {
72
+ fs.copyFileSync(entry.path, backupPath);
73
+ backups += 1;
74
+ }
75
+ fs.writeFileSync(entry.path, `${converted.trim()}
76
+ `, "utf-8");
77
+ }
78
+ return {
79
+ scanned: files.length,
80
+ migrated,
81
+ backups,
82
+ dryRun
83
+ };
84
+ }
85
+ async function migrateObservationsCommand(options) {
86
+ const vaultPath = resolveVaultPath({ explicitPath: options.vaultPath });
87
+ const result = migrateObservations(vaultPath, { dryRun: options.dryRun });
88
+ if (result.migrated === 0) {
89
+ console.log("No emoji observations found for migration.");
90
+ return;
91
+ }
92
+ if (result.dryRun) {
93
+ console.log(`Dry run: ${result.migrated} file(s) would be migrated.`);
94
+ return;
95
+ }
96
+ console.log(`Migrated ${result.migrated} file(s); created ${result.backups} backup(s).`);
97
+ }
98
+ function registerMigrateObservationsCommand(program) {
99
+ program.command("migrate-observations").description("Convert legacy emoji observations to scored format with backups").option("--dry-run", "Preview migration without writing files").option("-v, --vault <path>", "Vault path").action(async (rawOptions) => {
100
+ await migrateObservationsCommand({
101
+ vaultPath: rawOptions.vault,
102
+ dryRun: rawOptions.dryRun
103
+ });
104
+ });
105
+ }
106
+
107
+ export {
108
+ migrateObservations,
109
+ migrateObservationsCommand,
110
+ registerMigrateObservationsCommand
111
+ };
@@ -0,0 +1,247 @@
1
+ // src/lib/ledger.ts
2
+ import * as fs from "fs";
3
+ import * as path from "path";
4
+ var DATE_RE = /^\d{4}-\d{2}-\d{2}$/;
5
+ var YEAR_RE = /^\d{4}$/;
6
+ var MONTH_RE = /^(0[1-9]|1[0-2])$/;
7
+ var DAY_FILE_RE = /^(0[1-9]|[12]\d|3[01])\.md$/;
8
+ var RAW_DAY_FILE_RE = /^(0[1-9]|[12]\d|3[01])\.jsonl$/;
9
+ function normalizeDateKey(date) {
10
+ if (typeof date === "string") {
11
+ if (!DATE_RE.test(date)) {
12
+ throw new Error(`Invalid date key: ${date}`);
13
+ }
14
+ return date;
15
+ }
16
+ return date.toISOString().slice(0, 10);
17
+ }
18
+ function ensureDir(dirPath) {
19
+ fs.mkdirSync(dirPath, { recursive: true });
20
+ }
21
+ function walkThreeLevelDateTree(rootPath, extension) {
22
+ if (!fs.existsSync(rootPath)) {
23
+ return [];
24
+ }
25
+ const results = [];
26
+ for (const yearEntry of fs.readdirSync(rootPath, { withFileTypes: true })) {
27
+ if (!yearEntry.isDirectory() || !YEAR_RE.test(yearEntry.name)) continue;
28
+ const yearDir = path.join(rootPath, yearEntry.name);
29
+ for (const monthEntry of fs.readdirSync(yearDir, { withFileTypes: true })) {
30
+ if (!monthEntry.isDirectory() || !MONTH_RE.test(monthEntry.name)) continue;
31
+ const monthDir = path.join(yearDir, monthEntry.name);
32
+ for (const dayEntry of fs.readdirSync(monthDir, { withFileTypes: true })) {
33
+ if (!dayEntry.isFile()) continue;
34
+ const matches = extension === ".md" ? DAY_FILE_RE.test(dayEntry.name) : RAW_DAY_FILE_RE.test(dayEntry.name);
35
+ if (!matches) continue;
36
+ const day = dayEntry.name.slice(0, extension.length * -1);
37
+ const date = `${yearEntry.name}-${monthEntry.name}-${day}`;
38
+ if (!DATE_RE.test(date)) continue;
39
+ results.push({
40
+ date,
41
+ absolutePath: path.join(monthDir, dayEntry.name)
42
+ });
43
+ }
44
+ }
45
+ }
46
+ return results;
47
+ }
48
+ function inDateRange(date, fromDate, toDate) {
49
+ if (fromDate && date < fromDate) {
50
+ return false;
51
+ }
52
+ if (toDate && date > toDate) {
53
+ return false;
54
+ }
55
+ return true;
56
+ }
57
+ function toDateKey(date) {
58
+ return date.toISOString().slice(0, 10);
59
+ }
60
+ function parseDateKey(date) {
61
+ if (!DATE_RE.test(date)) {
62
+ return null;
63
+ }
64
+ const parsed = /* @__PURE__ */ new Date(`${date}T00:00:00.000Z`);
65
+ return Number.isNaN(parsed.getTime()) ? null : parsed;
66
+ }
67
+ function getLedgerRoot(vaultPath) {
68
+ return path.join(path.resolve(vaultPath), "ledger");
69
+ }
70
+ function getRawRoot(vaultPath) {
71
+ return path.join(getLedgerRoot(vaultPath), "raw");
72
+ }
73
+ function getRawSourceDir(vaultPath, source) {
74
+ return path.join(getRawRoot(vaultPath), source);
75
+ }
76
+ function getObservationsRoot(vaultPath) {
77
+ return path.join(getLedgerRoot(vaultPath), "observations");
78
+ }
79
+ function getReflectionsRoot(vaultPath) {
80
+ return path.join(getLedgerRoot(vaultPath), "reflections");
81
+ }
82
+ function getArchiveObservationsRoot(vaultPath) {
83
+ return path.join(getLedgerRoot(vaultPath), "archive", "observations");
84
+ }
85
+ function getLegacyObservationsRoot(vaultPath) {
86
+ return path.join(path.resolve(vaultPath), "observations");
87
+ }
88
+ function getObservationPath(vaultPath, date) {
89
+ const dateKey = normalizeDateKey(date);
90
+ const [year, month, day] = dateKey.split("-");
91
+ return path.join(getObservationsRoot(vaultPath), year, month, `${day}.md`);
92
+ }
93
+ function getArchiveObservationPath(vaultPath, date) {
94
+ const dateKey = normalizeDateKey(date);
95
+ const [year, month, day] = dateKey.split("-");
96
+ return path.join(getArchiveObservationsRoot(vaultPath), year, month, `${day}.md`);
97
+ }
98
+ function getLegacyObservationPath(vaultPath, date) {
99
+ const dateKey = normalizeDateKey(date);
100
+ return path.join(getLegacyObservationsRoot(vaultPath), `${dateKey}.md`);
101
+ }
102
+ function getRawTranscriptPath(vaultPath, source, date) {
103
+ const dateKey = normalizeDateKey(date);
104
+ const [year, month, day] = dateKey.split("-");
105
+ return path.join(getRawSourceDir(vaultPath, source), year, month, `${day}.jsonl`);
106
+ }
107
+ function ensureLedgerStructure(vaultPath) {
108
+ const root = getLedgerRoot(vaultPath);
109
+ const rawRoot = getRawRoot(vaultPath);
110
+ ensureDir(root);
111
+ ensureDir(rawRoot);
112
+ for (const source of ["openclaw", "chatgpt", "claude", "opencode"]) {
113
+ ensureDir(path.join(rawRoot, source));
114
+ }
115
+ ensureDir(getObservationsRoot(vaultPath));
116
+ ensureDir(getReflectionsRoot(vaultPath));
117
+ ensureDir(getArchiveObservationsRoot(vaultPath));
118
+ }
119
+ function listLedgerObservationFiles(vaultPath, options = {}) {
120
+ return walkThreeLevelDateTree(getObservationsRoot(vaultPath), ".md").filter((entry) => inDateRange(entry.date, options.fromDate, options.toDate)).map((entry) => ({
121
+ date: entry.date,
122
+ path: entry.absolutePath,
123
+ location: "ledger"
124
+ })).sort((left, right) => left.date.localeCompare(right.date));
125
+ }
126
+ function listArchiveObservationFiles(vaultPath, options = {}) {
127
+ return walkThreeLevelDateTree(getArchiveObservationsRoot(vaultPath), ".md").filter((entry) => inDateRange(entry.date, options.fromDate, options.toDate)).map((entry) => ({
128
+ date: entry.date,
129
+ path: entry.absolutePath,
130
+ location: "archive"
131
+ })).sort((left, right) => left.date.localeCompare(right.date));
132
+ }
133
+ function listLegacyObservationFiles(vaultPath, options = {}) {
134
+ const legacyRoot = getLegacyObservationsRoot(vaultPath);
135
+ if (!fs.existsSync(legacyRoot)) {
136
+ return [];
137
+ }
138
+ return fs.readdirSync(legacyRoot, { withFileTypes: true }).filter((entry) => entry.isFile() && DATE_RE.test(entry.name.replace(/\.md$/, "")) && entry.name.endsWith(".md")).map((entry) => {
139
+ const date = entry.name.replace(/\.md$/, "");
140
+ return {
141
+ date,
142
+ path: path.join(legacyRoot, entry.name),
143
+ location: "legacy"
144
+ };
145
+ }).filter((entry) => inDateRange(entry.date, options.fromDate, options.toDate)).sort((left, right) => left.date.localeCompare(right.date));
146
+ }
147
+ function listObservationFiles(vaultPath, options = {}) {
148
+ const includeLegacy = options.includeLegacy ?? true;
149
+ const includeArchive = options.includeArchive ?? false;
150
+ const dedupeByDate = options.dedupeByDate ?? true;
151
+ const files = [
152
+ ...listLedgerObservationFiles(vaultPath, options),
153
+ ...includeLegacy ? listLegacyObservationFiles(vaultPath, options) : [],
154
+ ...includeArchive ? listArchiveObservationFiles(vaultPath, options) : []
155
+ ];
156
+ if (!dedupeByDate) {
157
+ return files.sort((left, right) => left.date.localeCompare(right.date));
158
+ }
159
+ const byDate = /* @__PURE__ */ new Map();
160
+ const locationRank = {
161
+ ledger: 3,
162
+ legacy: 2,
163
+ archive: 1
164
+ };
165
+ for (const file of files) {
166
+ const existing = byDate.get(file.date);
167
+ if (!existing || locationRank[file.location] > locationRank[existing.location]) {
168
+ byDate.set(file.date, file);
169
+ }
170
+ }
171
+ return [...byDate.values()].sort((left, right) => left.date.localeCompare(right.date));
172
+ }
173
+ function listRawTranscriptFiles(vaultPath, options = {}) {
174
+ const rawRoot = getRawRoot(vaultPath);
175
+ if (!fs.existsSync(rawRoot)) {
176
+ return [];
177
+ }
178
+ const sources = options.source ? [options.source] : fs.readdirSync(rawRoot, { withFileTypes: true }).filter((entry) => entry.isDirectory()).map((entry) => entry.name);
179
+ const files = [];
180
+ for (const source of sources) {
181
+ const sourceRoot = path.join(rawRoot, source);
182
+ const datedFiles = walkThreeLevelDateTree(sourceRoot, ".jsonl");
183
+ for (const entry of datedFiles) {
184
+ if (!inDateRange(entry.date, options.fromDate, options.toDate)) {
185
+ continue;
186
+ }
187
+ files.push({
188
+ source,
189
+ date: entry.date,
190
+ path: entry.absolutePath
191
+ });
192
+ }
193
+ }
194
+ return files.sort(
195
+ (left, right) => left.date === right.date ? left.path.localeCompare(right.path) : left.date.localeCompare(right.date)
196
+ );
197
+ }
198
+ function getIsoWeekMonday(date) {
199
+ const normalized = new Date(Date.UTC(date.getUTCFullYear(), date.getUTCMonth(), date.getUTCDate()));
200
+ const day = normalized.getUTCDay() || 7;
201
+ normalized.setUTCDate(normalized.getUTCDate() - day + 1);
202
+ return normalized;
203
+ }
204
+ function getIsoWeek(date) {
205
+ const monday = getIsoWeekMonday(date);
206
+ const thursday = new Date(monday);
207
+ thursday.setUTCDate(monday.getUTCDate() + 3);
208
+ const isoYear = thursday.getUTCFullYear();
209
+ const firstThursday = new Date(Date.UTC(isoYear, 0, 4));
210
+ const firstWeekMonday = getIsoWeekMonday(firstThursday);
211
+ const diffMs = monday.getTime() - firstWeekMonday.getTime();
212
+ const week = Math.floor(diffMs / (7 * 24 * 60 * 60 * 1e3)) + 1;
213
+ return { year: isoYear, week };
214
+ }
215
+ function formatIsoWeekKey(input) {
216
+ const weekInfo = input instanceof Date ? getIsoWeek(input) : input;
217
+ return `${weekInfo.year}-W${String(weekInfo.week).padStart(2, "0")}`;
218
+ }
219
+ function getIsoWeekRange(year, week) {
220
+ const januaryFourth = new Date(Date.UTC(year, 0, 4));
221
+ const firstWeekMonday = getIsoWeekMonday(januaryFourth);
222
+ const start = new Date(firstWeekMonday);
223
+ start.setUTCDate(firstWeekMonday.getUTCDate() + (week - 1) * 7);
224
+ const end = new Date(start);
225
+ end.setUTCDate(start.getUTCDate() + 6);
226
+ return { start, end };
227
+ }
228
+ function ensureParentDir(filePath) {
229
+ ensureDir(path.dirname(filePath));
230
+ }
231
+
232
+ export {
233
+ toDateKey,
234
+ parseDateKey,
235
+ getReflectionsRoot,
236
+ getObservationPath,
237
+ getArchiveObservationPath,
238
+ getLegacyObservationPath,
239
+ getRawTranscriptPath,
240
+ ensureLedgerStructure,
241
+ listObservationFiles,
242
+ listRawTranscriptFiles,
243
+ getIsoWeek,
244
+ formatIsoWeekKey,
245
+ getIsoWeekRange,
246
+ ensureParentDir
247
+ };
@@ -346,7 +346,7 @@ async function buildOrUpdateMemoryGraphIndex(vaultPathInput, options = {}) {
346
346
  const existing = options.forceFull ? null : loadMemoryGraphIndex(vaultPath);
347
347
  const markdownFiles = await glob("**/*.md", {
348
348
  cwd: vaultPath,
349
- ignore: ["**/node_modules/**", "**/.git/**", "**/.obsidian/**", "**/.trash/**"]
349
+ ignore: ["**/node_modules/**", "**/.git/**", "**/.obsidian/**", "**/.trash/**", "**/ledger/archive/**"]
350
350
  });
351
351
  const normalizedFiles = markdownFiles.map(normalizeRelativePath).sort((a, b) => a.localeCompare(b));
352
352
  const registry = buildNoteRegistry(normalizedFiles);
@@ -0,0 +1,11 @@
1
+ import { Command } from 'commander';
2
+
3
+ interface ArchiveCommandOptions {
4
+ vaultPath?: string;
5
+ olderThan?: number;
6
+ dryRun?: boolean;
7
+ }
8
+ declare function archiveCommand(options: ArchiveCommandOptions): Promise<void>;
9
+ declare function registerArchiveCommand(program: Command): void;
10
+
11
+ export { type ArchiveCommandOptions, archiveCommand, registerArchiveCommand };
@@ -0,0 +1,11 @@
1
+ import {
2
+ archiveCommand,
3
+ registerArchiveCommand
4
+ } from "../chunk-VR5NE7PZ.js";
5
+ import "../chunk-MQUJNOHK.js";
6
+ import "../chunk-Z2XBWN7A.js";
7
+ import "../chunk-MXSSG3QU.js";
8
+ export {
9
+ archiveCommand,
10
+ registerArchiveCommand
11
+ };
@@ -1,2 +1,2 @@
1
1
  import 'commander';
2
- export { C as ContextEntry, a as ContextFormat, b as ContextOptions, c as ContextProfile, e as ContextProfileOption, f as ContextResult, g as buildContext, h as contextCommand, i as formatContextMarkdown, r as registerContextCommand } from '../context-COo8oq1k.js';
2
+ export { C as ContextEntry, a as ContextFormat, b as ContextOptions, c as ContextProfile, e as ContextProfileOption, f as ContextResult, g as buildContext, h as contextCommand, i as formatContextMarkdown, r as registerContextCommand } from '../context-BUGaWpyL.js';
@@ -3,10 +3,12 @@ import {
3
3
  contextCommand,
4
4
  formatContextMarkdown,
5
5
  registerContextCommand
6
- } from "../chunk-TBVI4N53.js";
7
- import "../chunk-QFBKWDYR.js";
8
- import "../chunk-MIIXBNO3.js";
9
- import "../chunk-O5V7SD5C.js";
6
+ } from "../chunk-6RIHODNR.js";
7
+ import "../chunk-K6XHCUFL.js";
8
+ import "../chunk-Z2XBWN7A.js";
9
+ import "../chunk-IFGDPIFI.js";
10
+ import "../chunk-DHBDH4DN.js";
11
+ import "../chunk-ZZA73MFY.js";
10
12
  export {
11
13
  buildContext,
12
14
  contextCommand,
@@ -1,3 +1,6 @@
1
+ import {
2
+ scanVaultLinks
3
+ } from "../chunk-4VQTUVH7.js";
1
4
  import {
2
5
  formatAge
3
6
  } from "../chunk-7ZRP733D.js";
@@ -7,17 +10,14 @@ import {
7
10
  import {
8
11
  ClawVault,
9
12
  findVault
10
- } from "../chunk-QFBKWDYR.js";
13
+ } from "../chunk-IFGDPIFI.js";
11
14
  import {
12
15
  hasQmd
13
- } from "../chunk-MIIXBNO3.js";
14
- import {
15
- scanVaultLinks
16
- } from "../chunk-4VQTUVH7.js";
16
+ } from "../chunk-DHBDH4DN.js";
17
17
  import "../chunk-J7ZWCI2C.js";
18
18
  import {
19
19
  loadMemoryGraphIndex
20
- } from "../chunk-O5V7SD5C.js";
20
+ } from "../chunk-ZZA73MFY.js";
21
21
 
22
22
  // src/commands/doctor.ts
23
23
  import * as fs from "fs";
@@ -1,8 +1,8 @@
1
1
  import {
2
2
  graphCommand,
3
3
  graphSummary
4
- } from "../chunk-TXO34J3O.js";
5
- import "../chunk-O5V7SD5C.js";
4
+ } from "../chunk-H7JW4L7H.js";
5
+ import "../chunk-ZZA73MFY.js";
6
6
  import "../chunk-MXSSG3QU.js";
7
7
  export {
8
8
  graphCommand,
@@ -13,7 +13,7 @@ import {
13
13
  } from "../chunk-J7ZWCI2C.js";
14
14
  import {
15
15
  buildOrUpdateMemoryGraphIndex
16
- } from "../chunk-O5V7SD5C.js";
16
+ } from "../chunk-ZZA73MFY.js";
17
17
  import {
18
18
  resolveVaultPath
19
19
  } from "../chunk-MXSSG3QU.js";