@tekmidian/pai 0.7.0 → 0.7.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cli/index.mjs +1 -1
- package/dist/daemon/index.mjs +1 -1
- package/dist/{daemon-D3hYb5_C.mjs → daemon-DuGlDnV7.mjs} +861 -4
- package/dist/daemon-DuGlDnV7.mjs.map +1 -0
- package/dist/hooks/context-compression-hook.mjs +58 -22
- package/dist/hooks/context-compression-hook.mjs.map +2 -2
- package/dist/hooks/load-project-context.mjs +78 -27
- package/dist/hooks/load-project-context.mjs.map +3 -3
- package/dist/hooks/stop-hook.mjs +220 -125
- package/dist/hooks/stop-hook.mjs.map +3 -3
- package/dist/hooks/sync-todo-to-md.mjs.map +1 -1
- package/dist/skills/Reconstruct/SKILL.md +232 -0
- package/package.json +1 -1
- package/plugins/productivity/plugin.json +1 -1
- package/plugins/productivity/skills/Reconstruct/SKILL.md +232 -0
- package/src/hooks/ts/lib/project-utils/index.ts +1 -0
- package/src/hooks/ts/lib/project-utils/session-notes.ts +46 -5
- package/src/hooks/ts/lib/project-utils.ts +1 -0
- package/src/hooks/ts/pre-compact/context-compression-hook.ts +60 -37
- package/src/hooks/ts/session-start/load-project-context.ts +110 -28
- package/src/hooks/ts/stop/stop-hook.ts +259 -199
- package/dist/daemon-D3hYb5_C.mjs.map +0 -1
|
@@ -8,11 +8,15 @@ import { t as createStorageBackend } from "./factory-Ygqe_bVZ.mjs";
|
|
|
8
8
|
import { C as setStorageBackend, E as startTime, O as storageBackend, S as setStartTime, T as shutdownRequested, _ as setLastIndexTime, a as indexSchedulerTimer, b as setRegistryDb, c as lastVaultIndexTime, d as setDaemonConfig, f as setEmbedInProgress, g as setLastEmbedTime, h as setIndexSchedulerTimer, i as indexInProgress, k as vaultIndexInProgress, l as notificationConfig, m as setIndexInProgress, n as embedInProgress, o as lastEmbedTime, p as setEmbedSchedulerTimer, r as embedSchedulerTimer, s as lastIndexTime, t as daemonConfig, u as registryDb, v as setLastVaultIndexTime, w as setVaultIndexInProgress, x as setShutdownRequested, y as setNotificationConfig } from "./state-C6_vqz7w.mjs";
|
|
9
9
|
import { a as toolProjectDetect, c as toolProjectList, d as toolMemorySearch, i as toolSessionRoute, l as toolProjectTodo, n as toolRegistrySearch, o as toolProjectHealth, r as toolSessionList, s as toolProjectInfo, u as toolMemoryGet } from "./tools-DcaJlYDN.mjs";
|
|
10
10
|
import { t as detectTopicShift } from "./detector-jGBuYQJM.mjs";
|
|
11
|
-
import { existsSync, mkdirSync, readFileSync, unlinkSync, writeFileSync } from "node:fs";
|
|
12
|
-
import { setPriority } from "node:os";
|
|
11
|
+
import { existsSync, mkdirSync, readFileSync, renameSync, statSync, unlinkSync, writeFileSync } from "node:fs";
|
|
12
|
+
import { homedir, setPriority } from "node:os";
|
|
13
|
+
import { basename, dirname, join } from "node:path";
|
|
13
14
|
import { randomUUID } from "node:crypto";
|
|
14
15
|
import { connect, createServer } from "node:net";
|
|
15
16
|
import { spawn } from "node:child_process";
|
|
17
|
+
import { existsSync as existsSync$1, mkdirSync as mkdirSync$1, readFileSync as readFileSync$1, readdirSync as readdirSync$1, renameSync as renameSync$1, writeFileSync as writeFileSync$1 } from "fs";
|
|
18
|
+
import { basename as basename$1, join as join$1, resolve as resolve$1 } from "path";
|
|
19
|
+
import { homedir as homedir$1 } from "os";
|
|
16
20
|
|
|
17
21
|
//#region src/notifications/config.ts
|
|
18
22
|
/**
|
|
@@ -734,6 +738,809 @@ async function dispatchTool(method, params) {
|
|
|
734
738
|
}
|
|
735
739
|
}
|
|
736
740
|
|
|
741
|
+
//#endregion
|
|
742
|
+
//#region src/daemon/work-queue.ts
|
|
743
|
+
/**
|
|
744
|
+
* work-queue.ts — Persistent work queue for the PAI Daemon
|
|
745
|
+
*
|
|
746
|
+
* Provides a durable, file-backed queue that survives daemon restarts.
|
|
747
|
+
* Items are processed sequentially to avoid concurrent writes to the same
|
|
748
|
+
* session note. Failed items are retried with exponential backoff.
|
|
749
|
+
*
|
|
750
|
+
* Queue file: ~/.config/pai/work-queue.json
|
|
751
|
+
* Written atomically (write temp → rename) to prevent corruption.
|
|
752
|
+
*/
|
|
753
|
+
const QUEUE_FILE = join(homedir(), ".config", "pai", "work-queue.json");
|
|
754
|
+
const MAX_QUEUE_SIZE = 1e3;
|
|
755
|
+
const MAX_QUEUE_FILE_BYTES = 1024 * 1024;
|
|
756
|
+
const COMPLETED_TTL_MS = 3600 * 1e3;
|
|
757
|
+
const FAILED_TTL_MS = 1440 * 60 * 1e3;
|
|
758
|
+
/** Backoff delays in ms by attempt number (0-indexed). */
|
|
759
|
+
const BACKOFF_MS = [
|
|
760
|
+
5e3,
|
|
761
|
+
3e4,
|
|
762
|
+
3e5
|
|
763
|
+
];
|
|
764
|
+
let _queue = [];
|
|
765
|
+
let _dirty = false;
|
|
766
|
+
/** Load queue from disk. Call once at daemon startup. */
|
|
767
|
+
function loadQueue() {
|
|
768
|
+
if (!existsSync(QUEUE_FILE)) {
|
|
769
|
+
_queue = [];
|
|
770
|
+
return;
|
|
771
|
+
}
|
|
772
|
+
try {
|
|
773
|
+
const raw = readFileSync(QUEUE_FILE, "utf-8");
|
|
774
|
+
const parsed = JSON.parse(raw);
|
|
775
|
+
if (!Array.isArray(parsed)) {
|
|
776
|
+
process.stderr.write("[work-queue] Invalid queue file format — starting empty.\n");
|
|
777
|
+
_queue = [];
|
|
778
|
+
return;
|
|
779
|
+
}
|
|
780
|
+
_queue = parsed.map((item) => {
|
|
781
|
+
if (item.status === "processing") return {
|
|
782
|
+
...item,
|
|
783
|
+
status: "pending"
|
|
784
|
+
};
|
|
785
|
+
return item;
|
|
786
|
+
});
|
|
787
|
+
const stats = getStats();
|
|
788
|
+
process.stderr.write(`[work-queue] Loaded ${_queue.length} items from disk (pending=${stats.pending}, failed=${stats.failed}).\n`);
|
|
789
|
+
} catch (e) {
|
|
790
|
+
process.stderr.write(`[work-queue] Could not load queue file: ${e}\n`);
|
|
791
|
+
_queue = [];
|
|
792
|
+
}
|
|
793
|
+
}
|
|
794
|
+
/** Persist queue to disk atomically. */
|
|
795
|
+
function saveQueue() {
|
|
796
|
+
const dir = dirname(QUEUE_FILE);
|
|
797
|
+
if (!existsSync(dir)) mkdirSync(dir, { recursive: true });
|
|
798
|
+
const tmpFile = QUEUE_FILE + ".tmp";
|
|
799
|
+
try {
|
|
800
|
+
writeFileSync(tmpFile, JSON.stringify(_queue, null, 2), "utf-8");
|
|
801
|
+
renameSync(tmpFile, QUEUE_FILE);
|
|
802
|
+
_dirty = false;
|
|
803
|
+
} catch (e) {
|
|
804
|
+
process.stderr.write(`[work-queue] Could not persist queue: ${e}\n`);
|
|
805
|
+
}
|
|
806
|
+
}
|
|
807
|
+
/** Persist only if there are unsaved changes. */
|
|
808
|
+
function saveIfDirty() {
|
|
809
|
+
if (_dirty) saveQueue();
|
|
810
|
+
}
|
|
811
|
+
/**
|
|
812
|
+
* Enforce the maximum queue size cap.
|
|
813
|
+
* Strategy: first drop oldest completed, then oldest low-priority pending.
|
|
814
|
+
*/
|
|
815
|
+
function enforceMaxSize() {
|
|
816
|
+
if (_queue.length <= MAX_QUEUE_SIZE) return;
|
|
817
|
+
const excess = _queue.length - MAX_QUEUE_SIZE;
|
|
818
|
+
const toDropCompleted = _queue.filter((i) => i.status === "completed").sort((a, b) => a.createdAt.localeCompare(b.createdAt)).slice(0, excess);
|
|
819
|
+
const dropIds = new Set(toDropCompleted.map((i) => i.id));
|
|
820
|
+
_queue = _queue.filter((i) => !dropIds.has(i.id));
|
|
821
|
+
if (_queue.length <= MAX_QUEUE_SIZE) return;
|
|
822
|
+
const remainingExcess = _queue.length - MAX_QUEUE_SIZE;
|
|
823
|
+
const toDropLow = _queue.filter((i) => i.status === "pending" && i.priority >= 4).sort((a, b) => a.priority - b.priority || a.createdAt.localeCompare(b.createdAt)).slice(0, remainingExcess);
|
|
824
|
+
const dropLowIds = new Set(toDropLow.map((i) => i.id));
|
|
825
|
+
_queue = _queue.filter((i) => !dropLowIds.has(i.id));
|
|
826
|
+
process.stderr.write(`[work-queue] Pruned queue to ${_queue.length} items (cap=${MAX_QUEUE_SIZE}).\n`);
|
|
827
|
+
}
|
|
828
|
+
/**
|
|
829
|
+
* Add a new work item to the queue.
|
|
830
|
+
* Returns the created WorkItem.
|
|
831
|
+
*/
|
|
832
|
+
function enqueue(params) {
|
|
833
|
+
const item = {
|
|
834
|
+
id: randomUUID(),
|
|
835
|
+
type: params.type,
|
|
836
|
+
priority: params.priority ?? 3,
|
|
837
|
+
payload: params.payload,
|
|
838
|
+
status: "pending",
|
|
839
|
+
createdAt: (/* @__PURE__ */ new Date()).toISOString(),
|
|
840
|
+
attempts: 0,
|
|
841
|
+
maxAttempts: params.maxAttempts ?? 3
|
|
842
|
+
};
|
|
843
|
+
_queue.push(item);
|
|
844
|
+
enforceMaxSize();
|
|
845
|
+
_dirty = true;
|
|
846
|
+
saveIfDirty();
|
|
847
|
+
process.stderr.write(`[work-queue] Enqueued ${item.type} (id=${item.id}, priority=${item.priority}).\n`);
|
|
848
|
+
return item;
|
|
849
|
+
}
|
|
850
|
+
/**
|
|
851
|
+
* Pick the next pending item that is ready to process (respects nextRetryAt).
|
|
852
|
+
* Returns null if no eligible item exists.
|
|
853
|
+
* Highest priority (lowest number) is processed first; ties broken by createdAt.
|
|
854
|
+
*/
|
|
855
|
+
function dequeue() {
|
|
856
|
+
const now = (/* @__PURE__ */ new Date()).toISOString();
|
|
857
|
+
const eligible = _queue.filter((i) => {
|
|
858
|
+
if (i.status !== "pending") return false;
|
|
859
|
+
if (i.nextRetryAt && i.nextRetryAt > now) return false;
|
|
860
|
+
return true;
|
|
861
|
+
}).sort((a, b) => {
|
|
862
|
+
if (a.priority !== b.priority) return a.priority - b.priority;
|
|
863
|
+
return a.createdAt.localeCompare(b.createdAt);
|
|
864
|
+
});
|
|
865
|
+
if (eligible.length === 0) return null;
|
|
866
|
+
const item = eligible[0];
|
|
867
|
+
item.status = "processing";
|
|
868
|
+
item.attempts += 1;
|
|
869
|
+
_dirty = true;
|
|
870
|
+
saveIfDirty();
|
|
871
|
+
return item;
|
|
872
|
+
}
|
|
873
|
+
/**
|
|
874
|
+
* Mark an item as completed.
|
|
875
|
+
*/
|
|
876
|
+
function markCompleted(id) {
|
|
877
|
+
const item = _queue.find((i) => i.id === id);
|
|
878
|
+
if (!item) return;
|
|
879
|
+
item.status = "completed";
|
|
880
|
+
item.completedAt = (/* @__PURE__ */ new Date()).toISOString();
|
|
881
|
+
item.error = void 0;
|
|
882
|
+
_dirty = true;
|
|
883
|
+
saveIfDirty();
|
|
884
|
+
}
|
|
885
|
+
/**
|
|
886
|
+
* Mark an item as failed.
|
|
887
|
+
* If attempts < maxAttempts, schedules a retry with exponential backoff.
|
|
888
|
+
* Otherwise, leaves status as 'failed'.
|
|
889
|
+
*/
|
|
890
|
+
function markFailed(id, errorMsg) {
|
|
891
|
+
const item = _queue.find((i) => i.id === id);
|
|
892
|
+
if (!item) return;
|
|
893
|
+
item.error = errorMsg;
|
|
894
|
+
if (item.attempts < item.maxAttempts) {
|
|
895
|
+
const backoffMs = BACKOFF_MS[item.attempts - 1] ?? BACKOFF_MS[BACKOFF_MS.length - 1];
|
|
896
|
+
item.status = "pending";
|
|
897
|
+
item.nextRetryAt = new Date(Date.now() + backoffMs).toISOString();
|
|
898
|
+
process.stderr.write(`[work-queue] Item ${id} failed (attempt ${item.attempts}/${item.maxAttempts}), retry in ${backoffMs / 1e3}s: ${errorMsg}\n`);
|
|
899
|
+
} else {
|
|
900
|
+
item.status = "failed";
|
|
901
|
+
process.stderr.write(`[work-queue] Item ${id} exhausted retries (${item.maxAttempts} attempts): ${errorMsg}\n`);
|
|
902
|
+
}
|
|
903
|
+
_dirty = true;
|
|
904
|
+
saveIfDirty();
|
|
905
|
+
}
|
|
906
|
+
function getStats() {
|
|
907
|
+
const stats = {
|
|
908
|
+
pending: 0,
|
|
909
|
+
processing: 0,
|
|
910
|
+
completed: 0,
|
|
911
|
+
failed: 0,
|
|
912
|
+
total: _queue.length
|
|
913
|
+
};
|
|
914
|
+
for (const item of _queue) stats[item.status]++;
|
|
915
|
+
return stats;
|
|
916
|
+
}
|
|
917
|
+
/**
|
|
918
|
+
* Remove completed and permanently-failed items older than their TTL.
|
|
919
|
+
* Also force-cleans all completed items if the queue file exceeds 1 MB.
|
|
920
|
+
*/
|
|
921
|
+
function cleanup() {
|
|
922
|
+
const now = Date.now();
|
|
923
|
+
const before = _queue.length;
|
|
924
|
+
let forceCleanCompleted = false;
|
|
925
|
+
try {
|
|
926
|
+
if (existsSync(QUEUE_FILE)) {
|
|
927
|
+
const { size } = statSync(QUEUE_FILE);
|
|
928
|
+
if (size > MAX_QUEUE_FILE_BYTES) {
|
|
929
|
+
forceCleanCompleted = true;
|
|
930
|
+
process.stderr.write(`[work-queue] Queue file exceeds 1 MB (${size} bytes) — force-cleaning completed items.\n`);
|
|
931
|
+
}
|
|
932
|
+
}
|
|
933
|
+
} catch {}
|
|
934
|
+
_queue = _queue.filter((item) => {
|
|
935
|
+
if (item.status === "completed") {
|
|
936
|
+
if (forceCleanCompleted) return false;
|
|
937
|
+
return now - (item.completedAt ? new Date(item.completedAt).getTime() : 0) < COMPLETED_TTL_MS;
|
|
938
|
+
}
|
|
939
|
+
if (item.status === "failed") return now - new Date(item.createdAt).getTime() < FAILED_TTL_MS;
|
|
940
|
+
return true;
|
|
941
|
+
});
|
|
942
|
+
const removed = before - _queue.length;
|
|
943
|
+
const stats = getStats();
|
|
944
|
+
if (removed > 0 || before === 0) process.stderr.write(`[work-queue] Cleanup: removed ${removed} items. Queue stats: pending=${stats.pending}, processing=${stats.processing}, completed=${stats.completed}, failed=${stats.failed}.\n`);
|
|
945
|
+
_dirty = removed > 0;
|
|
946
|
+
saveIfDirty();
|
|
947
|
+
}
|
|
948
|
+
|
|
949
|
+
//#endregion
|
|
950
|
+
//#region src/hooks/ts/lib/pai-paths.ts
|
|
951
|
+
/**
|
|
952
|
+
* PAI Path Resolution - Single Source of Truth
|
|
953
|
+
*
|
|
954
|
+
* This module provides consistent path resolution across all PAI hooks.
|
|
955
|
+
* It handles PAI_DIR detection whether set explicitly or defaulting to ~/.claude
|
|
956
|
+
*
|
|
957
|
+
* ALSO loads .env file from PAI_DIR so all hooks get environment variables
|
|
958
|
+
* without relying on Claude Code's settings.json injection.
|
|
959
|
+
*
|
|
960
|
+
* Usage in hooks:
|
|
961
|
+
* import { PAI_DIR, HOOKS_DIR, SKILLS_DIR } from './lib/pai-paths';
|
|
962
|
+
*/
|
|
963
|
+
/**
|
|
964
|
+
* Load .env file and inject into process.env
|
|
965
|
+
* Must run BEFORE PAI_DIR resolution so .env can set PAI_DIR if needed
|
|
966
|
+
*/
|
|
967
|
+
function loadEnvFile() {
|
|
968
|
+
const possiblePaths = [resolve$1(process.env.PAI_DIR || "", ".env"), resolve$1(homedir$1(), ".claude", ".env")];
|
|
969
|
+
for (const envPath of possiblePaths) if (existsSync$1(envPath)) try {
|
|
970
|
+
const content = readFileSync$1(envPath, "utf-8");
|
|
971
|
+
for (const line of content.split("\n")) {
|
|
972
|
+
const trimmed = line.trim();
|
|
973
|
+
if (!trimmed || trimmed.startsWith("#")) continue;
|
|
974
|
+
const eqIndex = trimmed.indexOf("=");
|
|
975
|
+
if (eqIndex > 0) {
|
|
976
|
+
const key = trimmed.substring(0, eqIndex).trim();
|
|
977
|
+
let value = trimmed.substring(eqIndex + 1).trim();
|
|
978
|
+
if (value.startsWith("\"") && value.endsWith("\"") || value.startsWith("'") && value.endsWith("'")) value = value.slice(1, -1);
|
|
979
|
+
value = value.replace(/\$HOME/g, homedir$1());
|
|
980
|
+
value = value.replace(/^~(?=\/|$)/, homedir$1());
|
|
981
|
+
if (process.env[key] === void 0) process.env[key] = value;
|
|
982
|
+
}
|
|
983
|
+
}
|
|
984
|
+
break;
|
|
985
|
+
} catch {}
|
|
986
|
+
}
|
|
987
|
+
loadEnvFile();
|
|
988
|
+
/**
|
|
989
|
+
* Smart PAI_DIR detection with fallback
|
|
990
|
+
* Priority:
|
|
991
|
+
* 1. PAI_DIR environment variable (if set)
|
|
992
|
+
* 2. ~/.claude (standard location)
|
|
993
|
+
*/
|
|
994
|
+
const PAI_DIR = process.env.PAI_DIR ? resolve$1(process.env.PAI_DIR) : resolve$1(homedir$1(), ".claude");
|
|
995
|
+
/**
|
|
996
|
+
* Common PAI directories
|
|
997
|
+
*/
|
|
998
|
+
const HOOKS_DIR = join$1(PAI_DIR, "Hooks");
|
|
999
|
+
const SKILLS_DIR = join$1(PAI_DIR, "Skills");
|
|
1000
|
+
const AGENTS_DIR = join$1(PAI_DIR, "Agents");
|
|
1001
|
+
const HISTORY_DIR = join$1(PAI_DIR, "History");
|
|
1002
|
+
const COMMANDS_DIR = join$1(PAI_DIR, "Commands");
|
|
1003
|
+
/**
|
|
1004
|
+
* Validate PAI directory structure on first import
|
|
1005
|
+
* This fails fast with a clear error if PAI is misconfigured
|
|
1006
|
+
*/
|
|
1007
|
+
function validatePAIStructure() {
|
|
1008
|
+
if (!existsSync$1(PAI_DIR)) {
|
|
1009
|
+
console.error(`PAI_DIR does not exist: ${PAI_DIR}`);
|
|
1010
|
+
console.error(` Expected ~/.claude or set PAI_DIR environment variable`);
|
|
1011
|
+
process.exit(1);
|
|
1012
|
+
}
|
|
1013
|
+
if (!existsSync$1(HOOKS_DIR)) {
|
|
1014
|
+
console.error(`PAI hooks directory not found: ${HOOKS_DIR}`);
|
|
1015
|
+
console.error(` Your PAI_DIR may be misconfigured`);
|
|
1016
|
+
console.error(` Current PAI_DIR: ${PAI_DIR}`);
|
|
1017
|
+
process.exit(1);
|
|
1018
|
+
}
|
|
1019
|
+
}
|
|
1020
|
+
validatePAIStructure();
|
|
1021
|
+
|
|
1022
|
+
//#endregion
|
|
1023
|
+
//#region src/hooks/ts/lib/project-utils/paths.ts
|
|
1024
|
+
/**
|
|
1025
|
+
* Path utilities — encoding, Notes/Sessions directory discovery and creation.
|
|
1026
|
+
*/
|
|
1027
|
+
const PROJECTS_DIR = join$1(PAI_DIR, "projects");
|
|
1028
|
+
/**
|
|
1029
|
+
* Encode a path the same way Claude Code does:
|
|
1030
|
+
* - Replace / with -
|
|
1031
|
+
* - Replace . with -
|
|
1032
|
+
* - Replace space with -
|
|
1033
|
+
*/
|
|
1034
|
+
function encodePath(path) {
|
|
1035
|
+
return path.replace(/\//g, "-").replace(/\./g, "-").replace(/ /g, "-");
|
|
1036
|
+
}
|
|
1037
|
+
/** Get the project directory for a given working directory. */
|
|
1038
|
+
function getProjectDir(cwd) {
|
|
1039
|
+
return join$1(PROJECTS_DIR, encodePath(cwd));
|
|
1040
|
+
}
|
|
1041
|
+
/** Get the Notes directory for a project (central location). */
|
|
1042
|
+
function getNotesDir(cwd) {
|
|
1043
|
+
return join$1(getProjectDir(cwd), "Notes");
|
|
1044
|
+
}
|
|
1045
|
+
/**
|
|
1046
|
+
* Find Notes directory — checks local first, falls back to central.
|
|
1047
|
+
* Does NOT create the directory.
|
|
1048
|
+
*/
|
|
1049
|
+
function findNotesDir(cwd) {
|
|
1050
|
+
if (basename$1(cwd).toLowerCase() === "notes" && existsSync$1(cwd)) return {
|
|
1051
|
+
path: cwd,
|
|
1052
|
+
isLocal: true
|
|
1053
|
+
};
|
|
1054
|
+
const localPaths = [
|
|
1055
|
+
join$1(cwd, "Notes"),
|
|
1056
|
+
join$1(cwd, "notes"),
|
|
1057
|
+
join$1(cwd, ".claude", "Notes")
|
|
1058
|
+
];
|
|
1059
|
+
for (const path of localPaths) if (existsSync$1(path)) return {
|
|
1060
|
+
path,
|
|
1061
|
+
isLocal: true
|
|
1062
|
+
};
|
|
1063
|
+
return {
|
|
1064
|
+
path: getNotesDir(cwd),
|
|
1065
|
+
isLocal: false
|
|
1066
|
+
};
|
|
1067
|
+
}
|
|
1068
|
+
/** Get the sessions/ directory from a project directory path. */
|
|
1069
|
+
function getSessionsDirFromProjectDir(projectDir) {
|
|
1070
|
+
return join$1(projectDir, "sessions");
|
|
1071
|
+
}
|
|
1072
|
+
/** Ensure the sessions/ directory exists (from project dir path). */
|
|
1073
|
+
function ensureSessionsDirFromProjectDir(projectDir) {
|
|
1074
|
+
const sessionsDir = getSessionsDirFromProjectDir(projectDir);
|
|
1075
|
+
if (!existsSync$1(sessionsDir)) {
|
|
1076
|
+
mkdirSync$1(sessionsDir, { recursive: true });
|
|
1077
|
+
console.error(`Created sessions directory: ${sessionsDir}`);
|
|
1078
|
+
}
|
|
1079
|
+
return sessionsDir;
|
|
1080
|
+
}
|
|
1081
|
+
/**
|
|
1082
|
+
* Move all .jsonl session files from project root to sessions/ subdirectory.
|
|
1083
|
+
* Returns the number of files moved.
|
|
1084
|
+
*/
|
|
1085
|
+
function moveSessionFilesToSessionsDir(projectDir, excludeFile, silent = false) {
|
|
1086
|
+
const sessionsDir = ensureSessionsDirFromProjectDir(projectDir);
|
|
1087
|
+
if (!existsSync$1(projectDir)) return 0;
|
|
1088
|
+
const files = readdirSync$1(projectDir);
|
|
1089
|
+
let movedCount = 0;
|
|
1090
|
+
for (const file of files) if (file.endsWith(".jsonl") && file !== excludeFile) {
|
|
1091
|
+
const sourcePath = join$1(projectDir, file);
|
|
1092
|
+
const destPath = join$1(sessionsDir, file);
|
|
1093
|
+
try {
|
|
1094
|
+
renameSync$1(sourcePath, destPath);
|
|
1095
|
+
if (!silent) console.error(`Moved ${file} → sessions/`);
|
|
1096
|
+
movedCount++;
|
|
1097
|
+
} catch (error) {
|
|
1098
|
+
if (!silent) console.error(`Could not move ${file}: ${error}`);
|
|
1099
|
+
}
|
|
1100
|
+
}
|
|
1101
|
+
return movedCount;
|
|
1102
|
+
}
|
|
1103
|
+
/** Find TODO.md — check local first, fallback to central. */
|
|
1104
|
+
function findTodoPath(cwd) {
|
|
1105
|
+
const localPaths = [
|
|
1106
|
+
join$1(cwd, "TODO.md"),
|
|
1107
|
+
join$1(cwd, "notes", "TODO.md"),
|
|
1108
|
+
join$1(cwd, "Notes", "TODO.md"),
|
|
1109
|
+
join$1(cwd, ".claude", "TODO.md")
|
|
1110
|
+
];
|
|
1111
|
+
for (const path of localPaths) if (existsSync$1(path)) return path;
|
|
1112
|
+
return join$1(getNotesDir(cwd), "TODO.md");
|
|
1113
|
+
}
|
|
1114
|
+
|
|
1115
|
+
//#endregion
|
|
1116
|
+
//#region src/hooks/ts/lib/project-utils/session-notes.ts
|
|
1117
|
+
/**
|
|
1118
|
+
* Session note creation, editing, checkpointing, renaming, and finalization.
|
|
1119
|
+
*/
|
|
1120
|
+
/**
|
|
1121
|
+
* Get the current (latest) note file path, or null if none exists.
|
|
1122
|
+
* Searches current month → previous month → flat notesDir (legacy).
|
|
1123
|
+
*/
|
|
1124
|
+
function getCurrentNotePath(notesDir) {
|
|
1125
|
+
if (!existsSync$1(notesDir)) return null;
|
|
1126
|
+
const findLatestIn = (dir) => {
|
|
1127
|
+
if (!existsSync$1(dir)) return null;
|
|
1128
|
+
const files = readdirSync$1(dir).filter((f) => f.match(/^\d{3,4}[\s_-].*\.md$/)).sort((a, b) => {
|
|
1129
|
+
return parseInt(a.match(/^(\d+)/)?.[1] || "0", 10) - parseInt(b.match(/^(\d+)/)?.[1] || "0", 10);
|
|
1130
|
+
});
|
|
1131
|
+
if (files.length === 0) return null;
|
|
1132
|
+
return join$1(dir, files[files.length - 1]);
|
|
1133
|
+
};
|
|
1134
|
+
const now = /* @__PURE__ */ new Date();
|
|
1135
|
+
const found = findLatestIn(join$1(notesDir, String(now.getFullYear()), String(now.getMonth() + 1).padStart(2, "0")));
|
|
1136
|
+
if (found) return found;
|
|
1137
|
+
const prevDate = new Date(now.getFullYear(), now.getMonth() - 1, 1);
|
|
1138
|
+
const prevFound = findLatestIn(join$1(notesDir, String(prevDate.getFullYear()), String(prevDate.getMonth() + 1).padStart(2, "0")));
|
|
1139
|
+
if (prevFound) return prevFound;
|
|
1140
|
+
return findLatestIn(notesDir);
|
|
1141
|
+
}
|
|
1142
|
+
/** Add work items to the "Work Done" section of a session note. */
|
|
1143
|
+
function addWorkToSessionNote(notePath, workItems, sectionTitle) {
|
|
1144
|
+
if (!existsSync$1(notePath)) {
|
|
1145
|
+
console.error(`Note file not found: ${notePath}`);
|
|
1146
|
+
return;
|
|
1147
|
+
}
|
|
1148
|
+
let content = readFileSync$1(notePath, "utf-8");
|
|
1149
|
+
let workText = "";
|
|
1150
|
+
if (sectionTitle) workText += `\n### ${sectionTitle}\n\n`;
|
|
1151
|
+
for (const item of workItems) {
|
|
1152
|
+
const checkbox = item.completed !== false ? "[x]" : "[ ]";
|
|
1153
|
+
workText += `- ${checkbox} **${item.title}**\n`;
|
|
1154
|
+
if (item.details && item.details.length > 0) for (const detail of item.details) workText += ` - ${detail}\n`;
|
|
1155
|
+
}
|
|
1156
|
+
const workDoneMatch = content.match(/## Work Done\n\n(<!-- .*? -->)?/);
|
|
1157
|
+
if (workDoneMatch) {
|
|
1158
|
+
const insertPoint = content.indexOf(workDoneMatch[0]) + workDoneMatch[0].length;
|
|
1159
|
+
content = content.substring(0, insertPoint) + workText + content.substring(insertPoint);
|
|
1160
|
+
} else {
|
|
1161
|
+
const nextStepsIndex = content.indexOf("## Next Steps");
|
|
1162
|
+
if (nextStepsIndex !== -1) content = content.substring(0, nextStepsIndex) + workText + "\n" + content.substring(nextStepsIndex);
|
|
1163
|
+
}
|
|
1164
|
+
writeFileSync$1(notePath, content);
|
|
1165
|
+
console.error(`Added ${workItems.length} work item(s) to: ${basename$1(notePath)}`);
|
|
1166
|
+
}
|
|
1167
|
+
/** Sanitize a string for use in a filename. */
|
|
1168
|
+
function sanitizeForFilename(str) {
|
|
1169
|
+
return str.toLowerCase().replace(/[^a-z0-9\s-]/g, "").replace(/\s+/g, "-").replace(/-+/g, "-").replace(/^-|-$/g, "").substring(0, 50);
|
|
1170
|
+
}
|
|
1171
|
+
/**
|
|
1172
|
+
* Return true if the candidate string should be rejected as a meaningful name.
|
|
1173
|
+
* Rejects file paths, shebangs, timestamps, system noise, XML tags, hashes, etc.
|
|
1174
|
+
*/
|
|
1175
|
+
function isMeaninglessCandidate(text) {
|
|
1176
|
+
const t = text.trim();
|
|
1177
|
+
if (!t) return true;
|
|
1178
|
+
if (t.length < 5) return true;
|
|
1179
|
+
if (t.startsWith("/") || t.startsWith("~")) return true;
|
|
1180
|
+
if (t.startsWith("#!")) return true;
|
|
1181
|
+
if (t.includes("[object Object]")) return true;
|
|
1182
|
+
if (/^\d{4}-\d{2}-\d{2}(T[\d:.Z+-]+)?$/.test(t)) return true;
|
|
1183
|
+
if (/^\d{1,2}:\d{2}(:\d{2})?(\s*(AM|PM))?$/i.test(t)) return true;
|
|
1184
|
+
if (/^<[a-z-]+[\s/>]/i.test(t)) return true;
|
|
1185
|
+
if (/^[0-9a-f]{10,}$/i.test(t)) return true;
|
|
1186
|
+
if (/^Exit code \d+/i.test(t)) return true;
|
|
1187
|
+
if (/^Error:/i.test(t)) return true;
|
|
1188
|
+
if (/^This session is being continued/i.test(t)) return true;
|
|
1189
|
+
if (/^\(Bash completed/i.test(t)) return true;
|
|
1190
|
+
if (/^Task Notification$/i.test(t)) return true;
|
|
1191
|
+
if (/^New Session$/i.test(t)) return true;
|
|
1192
|
+
if (/^Recovered Session$/i.test(t)) return true;
|
|
1193
|
+
if (/^Continued Session$/i.test(t)) return true;
|
|
1194
|
+
if (/^Untitled Session$/i.test(t)) return true;
|
|
1195
|
+
if (/^Context Compression$/i.test(t)) return true;
|
|
1196
|
+
if (/^[A-Fa-f0-9]{8,}\s+Output$/i.test(t)) return true;
|
|
1197
|
+
return false;
|
|
1198
|
+
}
|
|
1199
|
+
/**
|
|
1200
|
+
* Extract a meaningful name from session note content and summary.
|
|
1201
|
+
* Looks at Work Done section headers, bold text, and summary.
|
|
1202
|
+
*/
|
|
1203
|
+
function extractMeaningfulName(noteContent, summary) {
|
|
1204
|
+
const workDoneMatch = noteContent.match(/## Work Done\n\n([\s\S]*?)(?=\n---|\n## Next)/);
|
|
1205
|
+
if (workDoneMatch) {
|
|
1206
|
+
const workDoneSection = workDoneMatch[1];
|
|
1207
|
+
const subheadings = workDoneSection.match(/### ([^\n]+)/g);
|
|
1208
|
+
if (subheadings && subheadings.length > 0) {
|
|
1209
|
+
const firstHeading = subheadings[0].replace("### ", "").trim();
|
|
1210
|
+
if (!isMeaninglessCandidate(firstHeading) && firstHeading.length > 5 && firstHeading.length < 60) return sanitizeForFilename(firstHeading);
|
|
1211
|
+
}
|
|
1212
|
+
const boldMatches = workDoneSection.match(/\*\*([^*]+)\*\*/g);
|
|
1213
|
+
if (boldMatches && boldMatches.length > 0) {
|
|
1214
|
+
const firstBold = boldMatches[0].replace(/\*\*/g, "").trim();
|
|
1215
|
+
if (!isMeaninglessCandidate(firstBold) && firstBold.length > 3 && firstBold.length < 50) return sanitizeForFilename(firstBold);
|
|
1216
|
+
}
|
|
1217
|
+
const numberedItems = workDoneSection.match(/^\d+\.\s+\*\*([^*]+)\*\*/m);
|
|
1218
|
+
if (numberedItems && !isMeaninglessCandidate(numberedItems[1])) return sanitizeForFilename(numberedItems[1]);
|
|
1219
|
+
}
|
|
1220
|
+
if (summary && summary.length > 5 && summary !== "Session completed." && !isMeaninglessCandidate(summary)) {
|
|
1221
|
+
const cleanSummary = summary.replace(/[^\w\s-]/g, " ").trim().split(/\s+/).slice(0, 5).join(" ");
|
|
1222
|
+
if (cleanSummary.length > 3 && !isMeaninglessCandidate(cleanSummary)) return sanitizeForFilename(cleanSummary);
|
|
1223
|
+
}
|
|
1224
|
+
return "";
|
|
1225
|
+
}
|
|
1226
|
+
/**
|
|
1227
|
+
* Rename a session note with a meaningful name.
|
|
1228
|
+
* Always uses "NNNN - YYYY-MM-DD - Description.md" format.
|
|
1229
|
+
* Returns the new path, or original path if rename fails.
|
|
1230
|
+
*/
|
|
1231
|
+
function renameSessionNote(notePath, meaningfulName) {
|
|
1232
|
+
if (!meaningfulName || !existsSync$1(notePath)) return notePath;
|
|
1233
|
+
const dir = join$1(notePath, "..");
|
|
1234
|
+
const oldFilename = basename$1(notePath);
|
|
1235
|
+
const correctMatch = oldFilename.match(/^(\d{3,4}) - (\d{4}-\d{2}-\d{2}) - .*\.md$/);
|
|
1236
|
+
const legacyMatch = oldFilename.match(/^(\d{3,4})_(\d{4}-\d{2}-\d{2})_.*\.md$/);
|
|
1237
|
+
const match = correctMatch || legacyMatch;
|
|
1238
|
+
if (!match) return notePath;
|
|
1239
|
+
const [, noteNumber, date] = match;
|
|
1240
|
+
const titleCaseName = meaningfulName.split(/[\s_-]+/).map((word) => word.charAt(0).toUpperCase() + word.slice(1).toLowerCase()).join(" ").trim();
|
|
1241
|
+
const newFilename = `${noteNumber.padStart(4, "0")} - ${date} - ${titleCaseName}.md`;
|
|
1242
|
+
const newPath = join$1(dir, newFilename);
|
|
1243
|
+
if (newFilename === oldFilename) return notePath;
|
|
1244
|
+
try {
|
|
1245
|
+
renameSync$1(notePath, newPath);
|
|
1246
|
+
console.error(`Renamed note: ${oldFilename} → ${newFilename}`);
|
|
1247
|
+
return newPath;
|
|
1248
|
+
} catch (error) {
|
|
1249
|
+
console.error(`Could not rename note: ${error}`);
|
|
1250
|
+
return notePath;
|
|
1251
|
+
}
|
|
1252
|
+
}
|
|
1253
|
+
/**
|
|
1254
|
+
* Finalize session note — mark as complete, add summary, rename with meaningful name.
|
|
1255
|
+
* IDEMPOTENT: subsequent calls are no-ops if already finalized.
|
|
1256
|
+
* Returns the final path (may be renamed).
|
|
1257
|
+
*/
|
|
1258
|
+
function finalizeSessionNote(notePath, summary) {
|
|
1259
|
+
if (!existsSync$1(notePath)) {
|
|
1260
|
+
console.error(`Note file not found: ${notePath}`);
|
|
1261
|
+
return notePath;
|
|
1262
|
+
}
|
|
1263
|
+
let content = readFileSync$1(notePath, "utf-8");
|
|
1264
|
+
if (content.includes("**Status:** Completed")) {
|
|
1265
|
+
console.error(`Note already finalized: ${basename$1(notePath)}`);
|
|
1266
|
+
return notePath;
|
|
1267
|
+
}
|
|
1268
|
+
content = content.replace("**Status:** In Progress", "**Status:** Completed");
|
|
1269
|
+
if (!content.includes("**Completed:**")) {
|
|
1270
|
+
const completionTime = (/* @__PURE__ */ new Date()).toISOString();
|
|
1271
|
+
content = content.replace("---\n\n## Work Done", `**Completed:** ${completionTime}\n\n---\n\n## Work Done`);
|
|
1272
|
+
}
|
|
1273
|
+
const nextStepsMatch = content.match(/## Next Steps\n\n(<!-- .*? -->)/);
|
|
1274
|
+
if (nextStepsMatch) content = content.replace(nextStepsMatch[0], `## Next Steps\n\n${summary || "Session completed."}`);
|
|
1275
|
+
writeFileSync$1(notePath, content);
|
|
1276
|
+
console.error(`Session note finalized: ${basename$1(notePath)}`);
|
|
1277
|
+
const meaningfulName = extractMeaningfulName(content, summary);
|
|
1278
|
+
if (meaningfulName) return renameSessionNote(notePath, meaningfulName);
|
|
1279
|
+
return notePath;
|
|
1280
|
+
}
|
|
1281
|
+
|
|
1282
|
+
//#endregion
|
|
1283
|
+
//#region src/hooks/ts/lib/project-utils/todo.ts
|
|
1284
|
+
/**
|
|
1285
|
+
* TODO.md management — creation, task updates, checkpoints, and Continue section.
|
|
1286
|
+
*/
|
|
1287
|
+
/**
|
|
1288
|
+
* Ensure TODO.md exists. Creates it with default structure if missing.
|
|
1289
|
+
* Returns the path to the TODO.md file.
|
|
1290
|
+
*/
|
|
1291
|
+
function ensureTodoMd(cwd) {
|
|
1292
|
+
const todoPath = findTodoPath(cwd);
|
|
1293
|
+
if (!existsSync$1(todoPath)) {
|
|
1294
|
+
const parentDir = join$1(todoPath, "..");
|
|
1295
|
+
if (!existsSync$1(parentDir)) mkdirSync$1(parentDir, { recursive: true });
|
|
1296
|
+
writeFileSync$1(todoPath, `# TODO
|
|
1297
|
+
|
|
1298
|
+
## Current Session
|
|
1299
|
+
|
|
1300
|
+
- [ ] (Tasks will be tracked here)
|
|
1301
|
+
|
|
1302
|
+
## Backlog
|
|
1303
|
+
|
|
1304
|
+
- [ ] (Future tasks)
|
|
1305
|
+
|
|
1306
|
+
---
|
|
1307
|
+
|
|
1308
|
+
*Last updated: ${(/* @__PURE__ */ new Date()).toISOString()}*
|
|
1309
|
+
`);
|
|
1310
|
+
console.error(`Created TODO.md: ${todoPath}`);
|
|
1311
|
+
}
|
|
1312
|
+
return todoPath;
|
|
1313
|
+
}
|
|
1314
|
+
/**
|
|
1315
|
+
* Update the ## Continue section at the top of TODO.md.
|
|
1316
|
+
* Mirrors "pause session" behavior — gives the next session a starting point.
|
|
1317
|
+
* Replaces any existing ## Continue section.
|
|
1318
|
+
*/
|
|
1319
|
+
function updateTodoContinue(cwd, noteFilename, state, tokenDisplay) {
|
|
1320
|
+
const todoPath = ensureTodoMd(cwd);
|
|
1321
|
+
let content = readFileSync$1(todoPath, "utf-8");
|
|
1322
|
+
content = content.replace(/## Continue\n[\s\S]*?\n---\n+/, "");
|
|
1323
|
+
const now = (/* @__PURE__ */ new Date()).toISOString();
|
|
1324
|
+
const stateLines = state ? state.split("\n").filter((l) => l.trim()).slice(0, 10).map((l) => `> ${l}`).join("\n") : `> Working directory: ${cwd}. Check the latest session note for details.`;
|
|
1325
|
+
const continueSection = `## Continue
|
|
1326
|
+
|
|
1327
|
+
> **Last session:** ${noteFilename.replace(".md", "")}
|
|
1328
|
+
> **Paused at:** ${now}
|
|
1329
|
+
>
|
|
1330
|
+
${stateLines}
|
|
1331
|
+
|
|
1332
|
+
---
|
|
1333
|
+
|
|
1334
|
+
`;
|
|
1335
|
+
content = content.replace(/^\s+/, "");
|
|
1336
|
+
const titleMatch = content.match(/^(# [^\n]+\n+)/);
|
|
1337
|
+
if (titleMatch) content = titleMatch[1] + continueSection + content.substring(titleMatch[0].length);
|
|
1338
|
+
else content = continueSection + content;
|
|
1339
|
+
content = content.replace(/(\n---\s*)*(\n\*Last updated:.*\*\s*)+$/g, "");
|
|
1340
|
+
content = content.trimEnd() + `\n\n---\n\n*Last updated: ${now}*\n`;
|
|
1341
|
+
writeFileSync$1(todoPath, content);
|
|
1342
|
+
console.error("TODO.md ## Continue section updated");
|
|
1343
|
+
}
|
|
1344
|
+
|
|
1345
|
+
//#endregion
|
|
1346
|
+
//#region src/daemon/work-queue-worker.ts
|
|
1347
|
+
/**
|
|
1348
|
+
* work-queue-worker.ts — Daemon worker loop for the persistent work queue
|
|
1349
|
+
*
|
|
1350
|
+
* Runs every 5 seconds to drain the queue.
|
|
1351
|
+
* Handles 'session-end' work items by reading the transcript, extracting
|
|
1352
|
+
* work summaries, updating the session note, and updating TODO.md.
|
|
1353
|
+
*
|
|
1354
|
+
* Other item types (session-summary, note-update, todo-update, topic-detect)
|
|
1355
|
+
* are stubs — they log and complete immediately, ready for future expansion.
|
|
1356
|
+
*/
|
|
1357
|
+
const WORKER_INTERVAL_MS = 5e3;
|
|
1358
|
+
const HOUSEKEEPING_INTERVAL_MS = 600 * 1e3;
|
|
1359
|
+
let workerTimer = null;
|
|
1360
|
+
let housekeepingTimer = null;
|
|
1361
|
+
/** Start the background worker and housekeeping timers. */
|
|
1362
|
+
function startWorker() {
|
|
1363
|
+
process.stderr.write("[work-queue-worker] Starting worker loop.\n");
|
|
1364
|
+
workerTimer = setInterval(async () => {
|
|
1365
|
+
try {
|
|
1366
|
+
await processNextItem();
|
|
1367
|
+
} catch (e) {
|
|
1368
|
+
process.stderr.write(`[work-queue-worker] Uncaught error in worker loop: ${e}\n`);
|
|
1369
|
+
}
|
|
1370
|
+
}, WORKER_INTERVAL_MS);
|
|
1371
|
+
housekeepingTimer = setInterval(() => {
|
|
1372
|
+
try {
|
|
1373
|
+
cleanup();
|
|
1374
|
+
} catch (e) {
|
|
1375
|
+
process.stderr.write(`[work-queue-worker] Housekeeping error: ${e}\n`);
|
|
1376
|
+
}
|
|
1377
|
+
}, HOUSEKEEPING_INTERVAL_MS);
|
|
1378
|
+
process.stderr.write("[work-queue-worker] Worker started (interval=5s, housekeeping=10min).\n");
|
|
1379
|
+
}
|
|
1380
|
+
/** Stop the worker timers gracefully. */
|
|
1381
|
+
function stopWorker() {
|
|
1382
|
+
if (workerTimer !== null) {
|
|
1383
|
+
clearInterval(workerTimer);
|
|
1384
|
+
workerTimer = null;
|
|
1385
|
+
}
|
|
1386
|
+
if (housekeepingTimer !== null) {
|
|
1387
|
+
clearInterval(housekeepingTimer);
|
|
1388
|
+
housekeepingTimer = null;
|
|
1389
|
+
}
|
|
1390
|
+
process.stderr.write("[work-queue-worker] Worker stopped.\n");
|
|
1391
|
+
}
|
|
1392
|
+
/**
|
|
1393
|
+
* Signal that new work has been enqueued.
|
|
1394
|
+
* The worker will run on its next tick — we don't need to reset the timer
|
|
1395
|
+
* since 5 s is fast enough. The flag allows future optimisations.
|
|
1396
|
+
*/
|
|
1397
|
+
function notifyNewWork() {}
|
|
1398
|
+
async function processNextItem() {
|
|
1399
|
+
const item = dequeue();
|
|
1400
|
+
if (!item) return;
|
|
1401
|
+
process.stderr.write(`[work-queue-worker] Processing ${item.type} (id=${item.id}, attempt=${item.attempts}).\n`);
|
|
1402
|
+
try {
|
|
1403
|
+
switch (item.type) {
|
|
1404
|
+
case "session-end":
|
|
1405
|
+
await handleSessionEnd(item);
|
|
1406
|
+
break;
|
|
1407
|
+
case "session-summary":
|
|
1408
|
+
case "note-update":
|
|
1409
|
+
case "todo-update":
|
|
1410
|
+
case "topic-detect":
|
|
1411
|
+
process.stderr.write(`[work-queue-worker] Item type '${item.type}' is not yet implemented — completing as no-op.\n`);
|
|
1412
|
+
break;
|
|
1413
|
+
default: throw new Error(`Unknown work item type: ${item.type}`);
|
|
1414
|
+
}
|
|
1415
|
+
markCompleted(item.id);
|
|
1416
|
+
process.stderr.write(`[work-queue-worker] Completed ${item.type} (id=${item.id}).\n`);
|
|
1417
|
+
} catch (e) {
|
|
1418
|
+
const msg = e instanceof Error ? e.message : String(e);
|
|
1419
|
+
markFailed(item.id, msg);
|
|
1420
|
+
}
|
|
1421
|
+
}
|
|
1422
|
+
/**
|
|
1423
|
+
* Process a 'session-end' work item.
|
|
1424
|
+
*
|
|
1425
|
+
* Expected payload:
|
|
1426
|
+
* transcriptPath: string — absolute path to the .jsonl transcript
|
|
1427
|
+
* cwd: string — working directory of the session
|
|
1428
|
+
* message?: string — COMPLETED: line extracted by the hook (optional)
|
|
1429
|
+
*/
|
|
1430
|
+
async function handleSessionEnd(item) {
|
|
1431
|
+
const { transcriptPath, cwd, message: hookMessage } = item.payload;
|
|
1432
|
+
if (!transcriptPath) throw new Error("session-end payload missing transcriptPath");
|
|
1433
|
+
if (!cwd) throw new Error("session-end payload missing cwd");
|
|
1434
|
+
let transcript;
|
|
1435
|
+
try {
|
|
1436
|
+
transcript = readFileSync(transcriptPath, "utf-8");
|
|
1437
|
+
} catch (e) {
|
|
1438
|
+
throw new Error(`Could not read transcript at ${transcriptPath}: ${e}`);
|
|
1439
|
+
}
|
|
1440
|
+
const lines = transcript.trim().split("\n");
|
|
1441
|
+
const workItems = extractWorkFromTranscript(lines);
|
|
1442
|
+
let message = hookMessage ?? "";
|
|
1443
|
+
if (!message) {
|
|
1444
|
+
const lastEntry = tryParseJson(lines[lines.length - 1]);
|
|
1445
|
+
if (lastEntry?.type === "assistant" && lastEntry.message?.content) {
|
|
1446
|
+
const m = contentToText(lastEntry.message.content).match(/COMPLETED:\s*(.+?)(?:\n|$)/i);
|
|
1447
|
+
if (m) message = m[1].trim().replace(/\*+/g, "").replace(/\[.*?\]/g, "").trim();
|
|
1448
|
+
}
|
|
1449
|
+
}
|
|
1450
|
+
const currentNotePath = getCurrentNotePath(findNotesDir(cwd).path);
|
|
1451
|
+
if (currentNotePath) {
|
|
1452
|
+
if (workItems.length > 0) {
|
|
1453
|
+
addWorkToSessionNote(currentNotePath, workItems);
|
|
1454
|
+
process.stderr.write(`[work-queue-worker] Added ${workItems.length} work item(s) to note.\n`);
|
|
1455
|
+
} else if (message) {
|
|
1456
|
+
addWorkToSessionNote(currentNotePath, [{
|
|
1457
|
+
title: message,
|
|
1458
|
+
completed: true
|
|
1459
|
+
}]);
|
|
1460
|
+
process.stderr.write("[work-queue-worker] Added completion message to note.\n");
|
|
1461
|
+
}
|
|
1462
|
+
finalizeSessionNote(currentNotePath, message || "Session completed.");
|
|
1463
|
+
process.stderr.write(`[work-queue-worker] Finalized session note: ${basename(currentNotePath)}.\n`);
|
|
1464
|
+
try {
|
|
1465
|
+
const stateLines = [];
|
|
1466
|
+
stateLines.push(`Working directory: ${cwd}`);
|
|
1467
|
+
if (workItems.length > 0) {
|
|
1468
|
+
stateLines.push("", "Work completed:");
|
|
1469
|
+
for (const wi of workItems.slice(0, 5)) stateLines.push(`- ${wi.title}`);
|
|
1470
|
+
}
|
|
1471
|
+
if (message) stateLines.push("", `Last completed: ${message}`);
|
|
1472
|
+
updateTodoContinue(cwd, basename(currentNotePath), stateLines.join("\n"), "session-end");
|
|
1473
|
+
} catch (todoError) {
|
|
1474
|
+
process.stderr.write(`[work-queue-worker] Could not update TODO.md: ${todoError}\n`);
|
|
1475
|
+
}
|
|
1476
|
+
} else process.stderr.write("[work-queue-worker] No current session note found — skipping note update.\n");
|
|
1477
|
+
try {
|
|
1478
|
+
const movedCount = moveSessionFilesToSessionsDir(dirname(transcriptPath));
|
|
1479
|
+
if (movedCount > 0) process.stderr.write(`[work-queue-worker] Moved ${movedCount} session file(s) to sessions/.\n`);
|
|
1480
|
+
} catch (moveError) {
|
|
1481
|
+
process.stderr.write(`[work-queue-worker] Could not move session files: ${moveError}\n`);
|
|
1482
|
+
}
|
|
1483
|
+
}
|
|
1484
|
+
function tryParseJson(line) {
|
|
1485
|
+
try {
|
|
1486
|
+
return JSON.parse(line);
|
|
1487
|
+
} catch {
|
|
1488
|
+
return null;
|
|
1489
|
+
}
|
|
1490
|
+
}
|
|
1491
|
+
function contentToText(content) {
|
|
1492
|
+
if (typeof content === "string") return content;
|
|
1493
|
+
if (Array.isArray(content)) return content.map((c) => {
|
|
1494
|
+
if (typeof c === "string") return c;
|
|
1495
|
+
const block = c;
|
|
1496
|
+
if (block?.text) return String(block.text);
|
|
1497
|
+
if (block?.content) return String(block.content);
|
|
1498
|
+
return "";
|
|
1499
|
+
}).join(" ").trim();
|
|
1500
|
+
return "";
|
|
1501
|
+
}
|
|
1502
|
+
function extractWorkFromTranscript(lines) {
|
|
1503
|
+
const workItems = [];
|
|
1504
|
+
const seenSummaries = /* @__PURE__ */ new Set();
|
|
1505
|
+
for (const line of lines) {
|
|
1506
|
+
const entry = tryParseJson(line);
|
|
1507
|
+
if (!entry || entry.type !== "assistant") continue;
|
|
1508
|
+
const msg = entry.message;
|
|
1509
|
+
if (!msg?.content) continue;
|
|
1510
|
+
const content = contentToText(msg.content);
|
|
1511
|
+
const summaryMatch = content.match(/SUMMARY:\s*(.+?)(?:\n|$)/i);
|
|
1512
|
+
if (summaryMatch) {
|
|
1513
|
+
const summary = summaryMatch[1].trim();
|
|
1514
|
+
if (summary && !seenSummaries.has(summary) && summary.length > 5) {
|
|
1515
|
+
seenSummaries.add(summary);
|
|
1516
|
+
const details = [];
|
|
1517
|
+
const actionsMatch = content.match(/ACTIONS:\s*(.+?)(?=\n[A-Z]+:|$)/is);
|
|
1518
|
+
if (actionsMatch) {
|
|
1519
|
+
const actionLines = actionsMatch[1].split("\n").map((l) => l.replace(/^[-*•]\s*/, "").replace(/^\d+\.\s*/, "").trim()).filter((l) => l.length > 3 && l.length < 100);
|
|
1520
|
+
details.push(...actionLines.slice(0, 3));
|
|
1521
|
+
}
|
|
1522
|
+
workItems.push({
|
|
1523
|
+
title: summary,
|
|
1524
|
+
details: details.length > 0 ? details : void 0,
|
|
1525
|
+
completed: true
|
|
1526
|
+
});
|
|
1527
|
+
}
|
|
1528
|
+
}
|
|
1529
|
+
const completedMatch = content.match(/COMPLETED:\s*(.+?)(?:\n|$)/i);
|
|
1530
|
+
if (completedMatch && workItems.length === 0) {
|
|
1531
|
+
const completed = completedMatch[1].trim().replace(/\*+/g, "").replace(/\[.*?\]/g, "").trim();
|
|
1532
|
+
if (completed && !seenSummaries.has(completed) && completed.length > 5) {
|
|
1533
|
+
seenSummaries.add(completed);
|
|
1534
|
+
workItems.push({
|
|
1535
|
+
title: completed,
|
|
1536
|
+
completed: true
|
|
1537
|
+
});
|
|
1538
|
+
}
|
|
1539
|
+
}
|
|
1540
|
+
}
|
|
1541
|
+
return workItems;
|
|
1542
|
+
}
|
|
1543
|
+
|
|
737
1544
|
//#endregion
|
|
738
1545
|
//#region src/daemon/daemon/handler.ts
|
|
739
1546
|
function sendResponse(socket, response) {
|
|
@@ -776,7 +1583,8 @@ async function handleRequest(request, socket) {
|
|
|
776
1583
|
db: dbStats,
|
|
777
1584
|
vaultIndexInProgress,
|
|
778
1585
|
lastVaultIndexTime: lastVaultIndexTime ? new Date(lastVaultIndexTime).toISOString() : null,
|
|
779
|
-
vaultPath: daemonConfig.vaultPath ?? null
|
|
1586
|
+
vaultPath: daemonConfig.vaultPath ?? null,
|
|
1587
|
+
workQueue: getStats()
|
|
780
1588
|
}
|
|
781
1589
|
});
|
|
782
1590
|
socket.end();
|
|
@@ -1134,6 +1942,52 @@ async function handleRequest(request, socket) {
|
|
|
1134
1942
|
socket.end();
|
|
1135
1943
|
return;
|
|
1136
1944
|
}
|
|
1945
|
+
if (method === "work_queue_enqueue") {
|
|
1946
|
+
try {
|
|
1947
|
+
const p = params;
|
|
1948
|
+
if (!p.type) {
|
|
1949
|
+
sendResponse(socket, {
|
|
1950
|
+
id,
|
|
1951
|
+
ok: false,
|
|
1952
|
+
error: "work_queue_enqueue: type is required"
|
|
1953
|
+
});
|
|
1954
|
+
socket.end();
|
|
1955
|
+
return;
|
|
1956
|
+
}
|
|
1957
|
+
const item = enqueue({
|
|
1958
|
+
type: p.type,
|
|
1959
|
+
priority: p.priority,
|
|
1960
|
+
payload: p.payload ?? {},
|
|
1961
|
+
maxAttempts: p.maxAttempts
|
|
1962
|
+
});
|
|
1963
|
+
/* @__PURE__ */ notifyNewWork();
|
|
1964
|
+
sendResponse(socket, {
|
|
1965
|
+
id,
|
|
1966
|
+
ok: true,
|
|
1967
|
+
result: {
|
|
1968
|
+
id: item.id,
|
|
1969
|
+
status: item.status
|
|
1970
|
+
}
|
|
1971
|
+
});
|
|
1972
|
+
} catch (e) {
|
|
1973
|
+
sendResponse(socket, {
|
|
1974
|
+
id,
|
|
1975
|
+
ok: false,
|
|
1976
|
+
error: e instanceof Error ? e.message : String(e)
|
|
1977
|
+
});
|
|
1978
|
+
}
|
|
1979
|
+
socket.end();
|
|
1980
|
+
return;
|
|
1981
|
+
}
|
|
1982
|
+
if (method === "work_queue_stats") {
|
|
1983
|
+
sendResponse(socket, {
|
|
1984
|
+
id,
|
|
1985
|
+
ok: true,
|
|
1986
|
+
result: getStats()
|
|
1987
|
+
});
|
|
1988
|
+
socket.end();
|
|
1989
|
+
return;
|
|
1990
|
+
}
|
|
1137
1991
|
try {
|
|
1138
1992
|
sendResponse(socket, {
|
|
1139
1993
|
id,
|
|
@@ -1263,12 +2117,15 @@ async function serve(config) {
|
|
|
1263
2117
|
startIndexScheduler();
|
|
1264
2118
|
if (storageBackend.backendType === "postgres") startEmbedScheduler();
|
|
1265
2119
|
else process.stderr.write("[pai-daemon] Embed scheduler: disabled (SQLite backend)\n");
|
|
2120
|
+
loadQueue();
|
|
2121
|
+
startWorker();
|
|
1266
2122
|
const server = await startIpcServer(config.socketPath);
|
|
1267
2123
|
const shutdown = async (signal) => {
|
|
1268
2124
|
process.stderr.write(`\n[pai-daemon] ${signal} received. Stopping.\n`);
|
|
1269
2125
|
setShutdownRequested(true);
|
|
1270
2126
|
if (indexSchedulerTimer) clearInterval(indexSchedulerTimer);
|
|
1271
2127
|
if (embedSchedulerTimer) clearInterval(embedSchedulerTimer);
|
|
2128
|
+
stopWorker();
|
|
1272
2129
|
server.close();
|
|
1273
2130
|
const SHUTDOWN_TIMEOUT_MS = 1e4;
|
|
1274
2131
|
const POLL_INTERVAL_MS = 100;
|
|
@@ -1302,4 +2159,4 @@ var daemon_exports = /* @__PURE__ */ __exportAll({ serve: () => serve });
|
|
|
1302
2159
|
|
|
1303
2160
|
//#endregion
|
|
1304
2161
|
export { serve as n, daemon_exports as t };
|
|
1305
|
-
//# sourceMappingURL=daemon-
|
|
2162
|
+
//# sourceMappingURL=daemon-DuGlDnV7.mjs.map
|