panopticon-cli 0.4.33 → 0.5.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (71) hide show
  1. package/README.md +96 -210
  2. package/dist/{agents-VLK4BMVA.js → agents-5OPQKM5K.js} +6 -5
  3. package/dist/{chunk-OMNXYPXC.js → chunk-2V4NF7J2.js} +14 -1
  4. package/dist/chunk-2V4NF7J2.js.map +1 -0
  5. package/dist/{chunk-XKT5MHPT.js → chunk-4YSYJ4HM.js} +2 -2
  6. package/dist/{chunk-XFR2DLMR.js → chunk-76F6DSVS.js} +49 -10
  7. package/dist/chunk-76F6DSVS.js.map +1 -0
  8. package/dist/{chunk-PI7Y3PSN.js → chunk-F5555J3A.js} +42 -6
  9. package/dist/chunk-F5555J3A.js.map +1 -0
  10. package/dist/{chunk-KJ2TRXNK.js → chunk-FTCPTHIJ.js} +47 -420
  11. package/dist/chunk-FTCPTHIJ.js.map +1 -0
  12. package/dist/chunk-HJSM6E6U.js +1038 -0
  13. package/dist/chunk-HJSM6E6U.js.map +1 -0
  14. package/dist/{chunk-RBUO57TC.js → chunk-NLQRED36.js} +3 -3
  15. package/dist/chunk-NLQRED36.js.map +1 -0
  16. package/dist/{chunk-ASY7T35E.js → chunk-OWHXCGVO.js} +245 -90
  17. package/dist/chunk-OWHXCGVO.js.map +1 -0
  18. package/dist/{chunk-BKCWRMUX.js → chunk-VHKSS7QX.js} +106 -11
  19. package/dist/chunk-VHKSS7QX.js.map +1 -0
  20. package/dist/{chunk-GFP3PIPB.js → chunk-YGJ54GW2.js} +1 -1
  21. package/dist/chunk-YGJ54GW2.js.map +1 -0
  22. package/dist/cli/index.js +1521 -935
  23. package/dist/cli/index.js.map +1 -1
  24. package/dist/dashboard/prompts/work-agent.md +2 -0
  25. package/dist/dashboard/public/assets/index-Ce6q21Fm.js +743 -0
  26. package/dist/dashboard/public/assets/{index-UjZq6ykz.css → index-NzpI0ItZ.css} +1 -1
  27. package/dist/dashboard/public/index.html +2 -2
  28. package/dist/dashboard/server.js +4274 -2320
  29. package/dist/{feedback-writer-LVZ5TFYZ.js → feedback-writer-VRMMWWTW.js} +2 -2
  30. package/dist/git-utils-I2UDKNZH.js +131 -0
  31. package/dist/git-utils-I2UDKNZH.js.map +1 -0
  32. package/dist/index.d.ts +12 -1
  33. package/dist/index.js +5 -3
  34. package/dist/index.js.map +1 -1
  35. package/dist/{projects-JEIVIYC6.js → projects-CFX3RTDL.js} +4 -2
  36. package/dist/{remote-workspace-AHVHQEES.js → remote-workspace-7FPGF2RM.js} +2 -2
  37. package/dist/{review-status-EPFG4XM7.js → review-status-TDPSOU5J.js} +2 -2
  38. package/dist/{specialist-context-T3NBMCIE.js → specialist-context-WGUUYDWY.js} +5 -5
  39. package/dist/{specialist-logs-CVKD3YJ3.js → specialist-logs-XJB5TCKJ.js} +5 -5
  40. package/dist/{specialists-TKAP6T6Z.js → specialists-5LBRHYFA.js} +5 -5
  41. package/dist/{traefik-QX4ZV4YG.js → traefik-WFMQX2LY.js} +3 -3
  42. package/dist/{workspace-manager-KLHUCIZV.js → workspace-manager-E434Z45T.js} +2 -2
  43. package/package.json +1 -1
  44. package/scripts/record-cost-event.js +5 -5
  45. package/scripts/stop-hook +7 -0
  46. package/scripts/work-agent-stop-hook +137 -0
  47. package/skills/myn-standards/SKILL.md +351 -0
  48. package/skills/pan-new-project/SKILL.md +304 -0
  49. package/skills/write-spec/SKILL.md +138 -0
  50. package/dist/chunk-7XNJJBH6.js +0 -538
  51. package/dist/chunk-7XNJJBH6.js.map +0 -1
  52. package/dist/chunk-ASY7T35E.js.map +0 -1
  53. package/dist/chunk-BKCWRMUX.js.map +0 -1
  54. package/dist/chunk-GFP3PIPB.js.map +0 -1
  55. package/dist/chunk-KJ2TRXNK.js.map +0 -1
  56. package/dist/chunk-OMNXYPXC.js.map +0 -1
  57. package/dist/chunk-PI7Y3PSN.js.map +0 -1
  58. package/dist/chunk-RBUO57TC.js.map +0 -1
  59. package/dist/chunk-XFR2DLMR.js.map +0 -1
  60. package/dist/dashboard/public/assets/index-kAJqtLDO.js +0 -708
  61. /package/dist/{agents-VLK4BMVA.js.map → agents-5OPQKM5K.js.map} +0 -0
  62. /package/dist/{chunk-XKT5MHPT.js.map → chunk-4YSYJ4HM.js.map} +0 -0
  63. /package/dist/{feedback-writer-LVZ5TFYZ.js.map → feedback-writer-VRMMWWTW.js.map} +0 -0
  64. /package/dist/{projects-JEIVIYC6.js.map → projects-CFX3RTDL.js.map} +0 -0
  65. /package/dist/{remote-workspace-AHVHQEES.js.map → remote-workspace-7FPGF2RM.js.map} +0 -0
  66. /package/dist/{review-status-EPFG4XM7.js.map → review-status-TDPSOU5J.js.map} +0 -0
  67. /package/dist/{specialist-context-T3NBMCIE.js.map → specialist-context-WGUUYDWY.js.map} +0 -0
  68. /package/dist/{specialist-logs-CVKD3YJ3.js.map → specialist-logs-XJB5TCKJ.js.map} +0 -0
  69. /package/dist/{specialists-TKAP6T6Z.js.map → specialists-5LBRHYFA.js.map} +0 -0
  70. /package/dist/{traefik-QX4ZV4YG.js.map → traefik-WFMQX2LY.js.map} +0 -0
  71. /package/dist/{workspace-manager-KLHUCIZV.js.map → workspace-manager-E434Z45T.js.map} +0 -0
@@ -1,7 +1,7 @@
1
1
  import {
2
2
  init_projects,
3
3
  resolveProjectFromIssue
4
- } from "./chunk-OMNXYPXC.js";
4
+ } from "./chunk-2V4NF7J2.js";
5
5
  import "./chunk-ZTFNYOC7.js";
6
6
  import {
7
7
  __esm,
@@ -112,4 +112,4 @@ init_feedback_writer();
112
112
  export {
113
113
  writeFeedbackFile
114
114
  };
115
- //# sourceMappingURL=feedback-writer-LVZ5TFYZ.js.map
115
+ //# sourceMappingURL=feedback-writer-VRMMWWTW.js.map
@@ -0,0 +1,131 @@
1
+ import {
2
+ init_esm_shims
3
+ } from "./chunk-ZHC57RCV.js";
4
+
5
+ // src/lib/git-utils.ts
6
+ init_esm_shims();
7
+ import { existsSync, unlinkSync, readdirSync } from "fs";
8
+ import { join } from "path";
9
+ import { exec } from "child_process";
10
+ import { promisify } from "util";
11
+ var execAsync = promisify(exec);
12
+ async function hasRunningGitProcesses(repoPath) {
13
+ try {
14
+ try {
15
+ const gitDir = join(repoPath, ".git");
16
+ const { stdout } = await execAsync(`fuser "${gitDir}" 2>/dev/null`, {
17
+ encoding: "utf-8"
18
+ });
19
+ return stdout.trim().length > 0;
20
+ } catch {
21
+ try {
22
+ const { stdout } = await execAsync(
23
+ `ps aux | grep -E "git.*${repoPath.replace(/[.*+?^${}()|[\]\\]/g, "\\$&")}" | grep -v grep`,
24
+ { encoding: "utf-8" }
25
+ );
26
+ return stdout.trim().length > 0;
27
+ } catch {
28
+ return false;
29
+ }
30
+ }
31
+ } catch {
32
+ return false;
33
+ }
34
+ }
35
+ function findGitLockFiles(repoPath) {
36
+ const lockFiles = [];
37
+ const indexLock = join(repoPath, ".git", "index.lock");
38
+ if (existsSync(indexLock)) {
39
+ lockFiles.push(indexLock);
40
+ }
41
+ const refsDir = join(repoPath, ".git", "refs");
42
+ if (existsSync(refsDir)) {
43
+ const findLocksRecursive = (dir) => {
44
+ const entries = readdirSync(dir, { withFileTypes: true });
45
+ for (const entry of entries) {
46
+ const fullPath = join(dir, entry.name);
47
+ if (entry.isDirectory()) {
48
+ findLocksRecursive(fullPath);
49
+ } else if (entry.name.endsWith(".lock")) {
50
+ lockFiles.push(fullPath);
51
+ }
52
+ }
53
+ };
54
+ try {
55
+ findLocksRecursive(refsDir);
56
+ } catch {
57
+ }
58
+ }
59
+ return lockFiles;
60
+ }
61
+ async function cleanupStaleLocks(repoPath) {
62
+ const result = {
63
+ found: [],
64
+ removed: [],
65
+ errors: []
66
+ };
67
+ const lockFiles = findGitLockFiles(repoPath);
68
+ result.found = lockFiles;
69
+ if (lockFiles.length === 0) {
70
+ return result;
71
+ }
72
+ const hasGitProcesses = await hasRunningGitProcesses(repoPath);
73
+ if (hasGitProcesses) {
74
+ result.errors.push({
75
+ file: "N/A",
76
+ error: "Git processes are running - not safe to remove locks"
77
+ });
78
+ return result;
79
+ }
80
+ for (const lockFile of lockFiles) {
81
+ try {
82
+ unlinkSync(lockFile);
83
+ result.removed.push(lockFile);
84
+ } catch (error) {
85
+ const msg = error instanceof Error ? error.message : String(error);
86
+ result.errors.push({ file: lockFile, error: msg });
87
+ }
88
+ }
89
+ return result;
90
+ }
91
+ async function getWorkspaceCommitHashes(workspacePath) {
92
+ const result = {};
93
+ const hasTopLevelGit = existsSync(join(workspacePath, ".git"));
94
+ if (hasTopLevelGit) {
95
+ try {
96
+ const { stdout } = await execAsync("git rev-parse HEAD", { cwd: workspacePath, encoding: "utf-8" });
97
+ result["."] = stdout.trim();
98
+ } catch {
99
+ }
100
+ } else {
101
+ try {
102
+ const entries = readdirSync(workspacePath, { withFileTypes: true });
103
+ for (const entry of entries) {
104
+ if (!entry.isDirectory() || entry.name.startsWith(".")) continue;
105
+ const subPath = join(workspacePath, entry.name);
106
+ if (!existsSync(join(subPath, ".git"))) continue;
107
+ try {
108
+ const { stdout } = await execAsync("git rev-parse HEAD", { cwd: subPath, encoding: "utf-8" });
109
+ result[entry.name] = stdout.trim();
110
+ } catch {
111
+ }
112
+ }
113
+ } catch {
114
+ }
115
+ }
116
+ return result;
117
+ }
118
+ async function hasStaleLocks(repoPath) {
119
+ const lockFiles = findGitLockFiles(repoPath);
120
+ if (lockFiles.length === 0) {
121
+ return false;
122
+ }
123
+ const hasGitProcesses = await hasRunningGitProcesses(repoPath);
124
+ return !hasGitProcesses;
125
+ }
126
+ export {
127
+ cleanupStaleLocks,
128
+ getWorkspaceCommitHashes,
129
+ hasStaleLocks
130
+ };
131
+ //# sourceMappingURL=git-utils-I2UDKNZH.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/lib/git-utils.ts"],"sourcesContent":["/**\n * Git utilities for handling common git operations and recovery\n */\n\nimport { existsSync, unlinkSync, readdirSync } from 'fs';\nimport { join } from 'path';\nimport { exec } from 'child_process';\nimport { promisify } from 'util';\n\nconst execAsync = promisify(exec);\n\n/**\n * Check if any git processes are currently running in a specific repository\n *\n * This checks if there are git processes with the repository path in their command line.\n * If we can't determine repository-specific processes, we conservatively return false\n * (no processes detected) to allow cleanup to proceed.\n */\nasync function hasRunningGitProcesses(repoPath: string): Promise<boolean> {\n try {\n // Try to find git processes that reference this specific repository\n // Use fuser to check if any process has the .git directory open (more reliable)\n try {\n const gitDir = join(repoPath, '.git');\n const { stdout } = await execAsync(`fuser \"${gitDir}\" 2>/dev/null`, {\n encoding: 'utf-8',\n });\n // fuser returns PIDs if any process has the directory open\n return stdout.trim().length > 0;\n } catch {\n // fuser not available or no processes found\n // Fall back to checking ps for git processes in this directory\n try {\n const { stdout } = await execAsync(\n `ps aux | grep -E \"git.*${repoPath.replace(/[.*+?^${}()|[\\]\\\\]/g, '\\\\$&')}\" | grep -v grep`,\n { encoding: 'utf-8' }\n );\n return stdout.trim().length > 0;\n } catch {\n // No git processes found for this repo\n return false;\n }\n }\n } catch {\n // Error checking - conservatively assume no processes\n return false;\n }\n}\n\n/**\n * Find all git lock files in a repository\n */\nfunction findGitLockFiles(repoPath: string): string[] {\n const lockFiles: string[] = [];\n\n // Check for index.lock in .git directory\n const indexLock = join(repoPath, '.git', 'index.lock');\n if (existsSync(indexLock)) {\n lockFiles.push(indexLock);\n }\n\n // Check for ref locks in .git/refs\n const refsDir = join(repoPath, '.git', 'refs');\n if (existsSync(refsDir)) {\n const findLocksRecursive = (dir: string) => {\n const entries = readdirSync(dir, { withFileTypes: true });\n for (const entry of entries) {\n const fullPath = join(dir, entry.name);\n if (entry.isDirectory()) {\n findLocksRecursive(fullPath);\n } else if (entry.name.endsWith('.lock')) {\n lockFiles.push(fullPath);\n }\n }\n };\n try {\n findLocksRecursive(refsDir);\n } catch {\n // Ignore errors reading refs directory\n }\n }\n\n return lockFiles;\n}\n\n/**\n * Check for and clean up stale git lock files\n *\n * A lock file is considered stale if:\n * 1. It exists\n * 2. No git processes are currently running\n *\n * @param repoPath - Path to the git repository\n * @returns Object with cleanup results\n */\nexport async function cleanupStaleLocks(repoPath: string): Promise<{\n found: string[];\n removed: string[];\n errors: Array<{ file: string; error: string }>;\n}> {\n const result = {\n found: [] as string[],\n removed: [] as string[],\n errors: [] as Array<{ file: string; error: string }>,\n };\n\n // Find all lock files\n const lockFiles = findGitLockFiles(repoPath);\n result.found = lockFiles;\n\n if (lockFiles.length === 0) {\n return result;\n }\n\n // Check if git processes are running for this repository\n const hasGitProcesses = await hasRunningGitProcesses(repoPath);\n\n if (hasGitProcesses) {\n // Don't remove locks if git is actively running\n result.errors.push({\n file: 'N/A',\n error: 'Git processes are running - not safe to remove locks',\n });\n return result;\n }\n\n // Remove stale lock files\n for (const lockFile of lockFiles) {\n try {\n unlinkSync(lockFile);\n result.removed.push(lockFile);\n } catch (error: unknown) {\n const msg = error instanceof Error ? error.message : String(error);\n result.errors.push({ file: lockFile, error: msg });\n }\n }\n\n return result;\n}\n\n/**\n * Get the HEAD commit SHA for each git repository in a workspace.\n *\n * Handles both monorepo (single .git at top level) and polyrepo (subdirs with .git).\n * Returns a map of repo identifier → HEAD SHA:\n * - Monorepo: { '.': 'abc123...' }\n * - Polyrepo: { 'frontend': 'abc123...', 'api': 'def456...' }\n */\nexport async function getWorkspaceCommitHashes(workspacePath: string): Promise<Record<string, string>> {\n const result: Record<string, string> = {};\n\n const hasTopLevelGit = existsSync(join(workspacePath, '.git'));\n\n if (hasTopLevelGit) {\n try {\n const { stdout } = await execAsync('git rev-parse HEAD', { cwd: workspacePath, encoding: 'utf-8' });\n result['.'] = stdout.trim();\n } catch { /* skip */ }\n } else {\n try {\n const entries = readdirSync(workspacePath, { withFileTypes: true });\n for (const entry of entries) {\n if (!entry.isDirectory() || entry.name.startsWith('.')) continue;\n const subPath = join(workspacePath, entry.name);\n if (!existsSync(join(subPath, '.git'))) continue;\n try {\n const { stdout } = await execAsync('git rev-parse HEAD', { cwd: subPath, encoding: 'utf-8' });\n result[entry.name] = stdout.trim();\n } catch { /* skip this sub-repo */ }\n }\n } catch { /* skip */ }\n }\n\n return result;\n}\n\n/**\n * Check if a repository has stale lock files\n *\n * @param repoPath - Path to the git repository\n * @returns True if stale locks exist\n */\nexport async function hasStaleLocks(repoPath: string): Promise<boolean> {\n const lockFiles = findGitLockFiles(repoPath);\n if (lockFiles.length === 0) {\n return false;\n }\n\n const hasGitProcesses = await hasRunningGitProcesses(repoPath);\n return !hasGitProcesses;\n}\n"],"mappings":";;;;;AAAA;AAIA,SAAS,YAAY,YAAY,mBAAmB;AACpD,SAAS,YAAY;AACrB,SAAS,YAAY;AACrB,SAAS,iBAAiB;AAE1B,IAAM,YAAY,UAAU,IAAI;AAShC,eAAe,uBAAuB,UAAoC;AACxE,MAAI;AAGF,QAAI;AACF,YAAM,SAAS,KAAK,UAAU,MAAM;AACpC,YAAM,EAAE,OAAO,IAAI,MAAM,UAAU,UAAU,MAAM,iBAAiB;AAAA,QAClE,UAAU;AAAA,MACZ,CAAC;AAED,aAAO,OAAO,KAAK,EAAE,SAAS;AAAA,IAChC,QAAQ;AAGN,UAAI;AACF,cAAM,EAAE,OAAO,IAAI,MAAM;AAAA,UACvB,0BAA0B,SAAS,QAAQ,uBAAuB,MAAM,CAAC;AAAA,UACzE,EAAE,UAAU,QAAQ;AAAA,QACtB;AACA,eAAO,OAAO,KAAK,EAAE,SAAS;AAAA,MAChC,QAAQ;AAEN,eAAO;AAAA,MACT;AAAA,IACF;AAAA,EACF,QAAQ;AAEN,WAAO;AAAA,EACT;AACF;AAKA,SAAS,iBAAiB,UAA4B;AACpD,QAAM,YAAsB,CAAC;AAG7B,QAAM,YAAY,KAAK,UAAU,QAAQ,YAAY;AACrD,MAAI,WAAW,SAAS,GAAG;AACzB,cAAU,KAAK,SAAS;AAAA,EAC1B;AAGA,QAAM,UAAU,KAAK,UAAU,QAAQ,MAAM;AAC7C,MAAI,WAAW,OAAO,GAAG;AACvB,UAAM,qBAAqB,CAAC,QAAgB;AAC1C,YAAM,UAAU,YAAY,KAAK,EAAE,eAAe,KAAK,CAAC;AACxD,iBAAW,SAAS,SAAS;AAC3B,cAAM,WAAW,KAAK,KAAK,MAAM,IAAI;AACrC,YAAI,MAAM,YAAY,GAAG;AACvB,6BAAmB,QAAQ;AAAA,QAC7B,WAAW,MAAM,KAAK,SAAS,OAAO,GAAG;AACvC,oBAAU,KAAK,QAAQ;AAAA,QACzB;AAAA,MACF;AAAA,IACF;AACA,QAAI;AACF,yBAAmB,OAAO;AAAA,IAC5B,QAAQ;AAAA,IAER;AAAA,EACF;AAEA,SAAO;AACT;AAYA,eAAsB,kBAAkB,UAIrC;AACD,QAAM,SAAS;AAAA,IACb,OAAO,CAAC;AAAA,IACR,SAAS,CAAC;AAAA,IACV,QAAQ,CAAC;AAAA,EACX;AAGA,QAAM,YAAY,iBAAiB,QAAQ;AAC3C,SAAO,QAAQ;AAEf,MAAI,UAAU,WAAW,GAAG;AAC1B,WAAO;AAAA,EACT;AAGA,QAAM,kBAAkB,MAAM,uBAAuB,QAAQ;AAE7D,MAAI,iBAAiB;AAEnB,WAAO,OAAO,KAAK;AAAA,MACjB,MAAM;AAAA,MACN,OAAO;AAAA,IACT,CAAC;AACD,WAAO;AAAA,EACT;AAGA,aAAW,YAAY,WAAW;AAChC,QAAI;AACF,iBAAW,QAAQ;AACnB,aAAO,QAAQ,KAAK,QAAQ;AAAA,IAC9B,SAAS,OAAgB;AACvB,YAAM,MAAM,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AACjE,aAAO,OAAO,KAAK,EAAE,MAAM,UAAU,OAAO,IAAI,CAAC;AAAA,IACnD;AAAA,EACF;AAEA,SAAO;AACT;AAUA,eAAsB,yBAAyB,eAAwD;AACrG,QAAM,SAAiC,CAAC;AAExC,QAAM,iBAAiB,WAAW,KAAK,eAAe,MAAM,CAAC;AAE7D,MAAI,gBAAgB;AAClB,QAAI;AACF,YAAM,EAAE,OAAO,IAAI,MAAM,UAAU,sBAAsB,EAAE,KAAK,eAAe,UAAU,QAAQ,CAAC;AAClG,aAAO,GAAG,IAAI,OAAO,KAAK;AAAA,IAC5B,QAAQ;AAAA,IAAa;AAAA,EACvB,OAAO;AACL,QAAI;AACF,YAAM,UAAU,YAAY,eAAe,EAAE,eAAe,KAAK,CAAC;AAClE,iBAAW,SAAS,SAAS;AAC3B,YAAI,CAAC,MAAM,YAAY,KAAK,MAAM,KAAK,WAAW,GAAG,EAAG;AACxD,cAAM,UAAU,KAAK,eAAe,MAAM,IAAI;AAC9C,YAAI,CAAC,WAAW,KAAK,SAAS,MAAM,CAAC,EAAG;AACxC,YAAI;AACF,gBAAM,EAAE,OAAO,IAAI,MAAM,UAAU,sBAAsB,EAAE,KAAK,SAAS,UAAU,QAAQ,CAAC;AAC5F,iBAAO,MAAM,IAAI,IAAI,OAAO,KAAK;AAAA,QACnC,QAAQ;AAAA,QAA2B;AAAA,MACrC;AAAA,IACF,QAAQ;AAAA,IAAa;AAAA,EACvB;AAEA,SAAO;AACT;AAQA,eAAsB,cAAc,UAAoC;AACtE,QAAM,YAAY,iBAAiB,QAAQ;AAC3C,MAAI,UAAU,WAAW,GAAG;AAC1B,WAAO;AAAA,EACT;AAEA,QAAM,kBAAkB,MAAM,uBAAuB,QAAQ;AAC7D,SAAO,CAAC;AACV;","names":[]}
package/dist/index.d.ts CHANGED
@@ -523,6 +523,8 @@ declare class GitHubTracker implements IssueTracker {
523
523
  getComments(issueId: string): Promise<Comment[]>;
524
524
  addComment(issueId: string, body: string): Promise<Comment>;
525
525
  transitionIssue(id: string, state: IssueState): Promise<void>;
526
+ /** Ensure a label exists in the repo, creating it if needed. */
527
+ private ensureLabelExists;
526
528
  linkPR(issueId: string, prUrl: string): Promise<void>;
527
529
  private normalizeIssue;
528
530
  private mapStateFromGitHub;
@@ -829,5 +831,14 @@ declare function getProviderEnv(provider: ProviderConfig, apiKey: string): Recor
829
831
  * Must be called before spawning the agent.
830
832
  */
831
833
  declare function setupCredentialFileAuth(provider: ProviderConfig, workspacePath: string): void;
834
+ /**
835
+ * Clear credential-file auth from workspace settings.
836
+ *
837
+ * When switching from a credential-file provider (e.g. Kimi) to a static/plan-based
838
+ * provider (e.g. Anthropic), the apiKeyHelper must be removed from
839
+ * .claude/settings.local.json. Otherwise Claude Code will keep using the stale
840
+ * token helper and fail with "Invalid API key".
841
+ */
842
+ declare function clearCredentialFileAuth(workspacePath: string): void;
832
843
 
833
- export { AGENTS_DIR, ARCHIVES_DIR, type AnthropicModel, type ApiKeysConfig, BACKUPS_DIR, BIN_DIR, type BackupInfo, CACHE_AGENTS_DIR, CACHE_MANIFEST, CACHE_RULES_DIR, CACHE_SKILLS_DIR, CERTS_DIR, CLAUDE_DIR, CLAUDE_MD_TEMPLATES, COMMANDS_DIR, CONFIG_DIR, CONFIG_FILE, COSTS_DIR, type Comment, type ComplexityLevel, type ComplexityModels, DOCS_DIR, type DevrootSyncItem, type GitHubConfig, GitHubTracker, type GitLabConfig, GitLabTracker, type GoogleModel, HEARTBEATS_DIR, type HookItem, INIT_DIRS, type Issue, type IssueFilters, IssueNotFoundError, type IssueState, type IssueTracker, type IssueUpdate, type KimiModel, LEGACY_RUNTIME_DIRS, type LinearConfig, LinearTracker, type LinkDirection, LinkManager, type MigrationResult, type ModelId, type ModelsConfig, type NewIssue, NotImplementedError, type OpenAIModel, PANOPTICON_HOME, PRDS_DIR, PRD_DRAFTS_DIR, PRD_PUBLISHED_DIR, PROJECT_DOCS_SUBDIR, PROJECT_PRDS_ACTIVE_SUBDIR, PROJECT_PRDS_COMPLETED_SUBDIR, PROJECT_PRDS_PLANNED_SUBDIR, PROJECT_PRDS_SUBDIR, PROVIDERS, type PanopticonConfig, type ProviderAuthType, type ProviderCompatibility, type ProviderConfig, type ProviderName, type RallyConfig, type RefreshCacheResult, type RemoteConfig, type RemoteExeConfig, SETTINGS_FILE, SKILLS_DIR, SOURCE_AGENTS_DIR, SOURCE_DEV_SKILLS_DIR, SOURCE_RULES_DIR, SOURCE_SCRIPTS_DIR, SOURCE_SKILLS_DIR, SOURCE_TEMPLATES_DIR, SOURCE_TRAEFIK_TEMPLATES, SYNC_TARGET, type SettingsConfig, type ShadowConfig, type Shell, type SpecialistModels, type SyncItem, type SyncOptions, type SyncPlan, type SyncResult, TEMPLATES_DIR, TRAEFIK_CERTS_DIR, TRAEFIK_DIR, TRAEFIK_DYNAMIC_DIR, TrackerAuthError, type TrackerConfig, type TrackerConfigItem, type TrackerLink, type TrackerType, type TrackersConfig, type ZAIModel, addAlias, cleanOldBackups, createBackup, createBackupTimestamp, createTracker, createTrackerFromConfig, detectShell, executeSync, findDevrootForProject, formatIssueRef, getAgentCommand, getAliasInstructions, getAllTrackers, getAvailableModels, getClaudeModelFlag, getDashboardApiUrl, getDefaultConfig, getDefaultSettings, getDevrootPath, getDirectProviders, getLinkManager, getPanopticonHome, getPrimaryTracker, getProviderEnv, getProviderForModel, getRouterProviders, getSecondaryTracker, getShellRcFile, hasAlias, isAnthropicModel, isDevMode, isPanopticonSymlink, listBackups, loadConfig, loadSettings, migrateFromPersonalSymlinks, needsRouter, parseIssueRef, planHooksSync, planSync, refreshCache, requiresRouter, restoreBackup, saveConfig, saveSettings, setupCredentialFileAuth, syncHooks, syncStatusline, validateSettings };
844
+ export { AGENTS_DIR, ARCHIVES_DIR, type AnthropicModel, type ApiKeysConfig, BACKUPS_DIR, BIN_DIR, type BackupInfo, CACHE_AGENTS_DIR, CACHE_MANIFEST, CACHE_RULES_DIR, CACHE_SKILLS_DIR, CERTS_DIR, CLAUDE_DIR, CLAUDE_MD_TEMPLATES, COMMANDS_DIR, CONFIG_DIR, CONFIG_FILE, COSTS_DIR, type Comment, type ComplexityLevel, type ComplexityModels, DOCS_DIR, type DevrootSyncItem, type GitHubConfig, GitHubTracker, type GitLabConfig, GitLabTracker, type GoogleModel, HEARTBEATS_DIR, type HookItem, INIT_DIRS, type Issue, type IssueFilters, IssueNotFoundError, type IssueState, type IssueTracker, type IssueUpdate, type KimiModel, LEGACY_RUNTIME_DIRS, type LinearConfig, LinearTracker, type LinkDirection, LinkManager, type MigrationResult, type ModelId, type ModelsConfig, type NewIssue, NotImplementedError, type OpenAIModel, PANOPTICON_HOME, PRDS_DIR, PRD_DRAFTS_DIR, PRD_PUBLISHED_DIR, PROJECT_DOCS_SUBDIR, PROJECT_PRDS_ACTIVE_SUBDIR, PROJECT_PRDS_COMPLETED_SUBDIR, PROJECT_PRDS_PLANNED_SUBDIR, PROJECT_PRDS_SUBDIR, PROVIDERS, type PanopticonConfig, type ProviderAuthType, type ProviderCompatibility, type ProviderConfig, type ProviderName, type RallyConfig, type RefreshCacheResult, type RemoteConfig, type RemoteExeConfig, SETTINGS_FILE, SKILLS_DIR, SOURCE_AGENTS_DIR, SOURCE_DEV_SKILLS_DIR, SOURCE_RULES_DIR, SOURCE_SCRIPTS_DIR, SOURCE_SKILLS_DIR, SOURCE_TEMPLATES_DIR, SOURCE_TRAEFIK_TEMPLATES, SYNC_TARGET, type SettingsConfig, type ShadowConfig, type Shell, type SpecialistModels, type SyncItem, type SyncOptions, type SyncPlan, type SyncResult, TEMPLATES_DIR, TRAEFIK_CERTS_DIR, TRAEFIK_DIR, TRAEFIK_DYNAMIC_DIR, TrackerAuthError, type TrackerConfig, type TrackerConfigItem, type TrackerLink, type TrackerType, type TrackersConfig, type ZAIModel, addAlias, cleanOldBackups, clearCredentialFileAuth, createBackup, createBackupTimestamp, createTracker, createTrackerFromConfig, detectShell, executeSync, findDevrootForProject, formatIssueRef, getAgentCommand, getAliasInstructions, getAllTrackers, getAvailableModels, getClaudeModelFlag, getDashboardApiUrl, getDefaultConfig, getDefaultSettings, getDevrootPath, getDirectProviders, getLinkManager, getPanopticonHome, getPrimaryTracker, getProviderEnv, getProviderForModel, getRouterProviders, getSecondaryTracker, getShellRcFile, hasAlias, isAnthropicModel, isDevMode, isPanopticonSymlink, listBackups, loadConfig, loadSettings, migrateFromPersonalSymlinks, needsRouter, parseIssueRef, planHooksSync, planSync, refreshCache, requiresRouter, restoreBackup, saveConfig, saveSettings, setupCredentialFileAuth, syncHooks, syncStatusline, validateSettings };
package/dist/index.js CHANGED
@@ -21,7 +21,7 @@ import {
21
21
  restoreBackup,
22
22
  syncHooks,
23
23
  syncStatusline
24
- } from "./chunk-XKT5MHPT.js";
24
+ } from "./chunk-4YSYJ4HM.js";
25
25
  import "./chunk-AQXETQHW.js";
26
26
  import {
27
27
  GitHubTracker,
@@ -32,9 +32,10 @@ import {
32
32
  getAllTrackers,
33
33
  getPrimaryTracker,
34
34
  getSecondaryTracker
35
- } from "./chunk-XFR2DLMR.js";
35
+ } from "./chunk-76F6DSVS.js";
36
36
  import {
37
37
  PROVIDERS,
38
+ clearCredentialFileAuth,
38
39
  getAgentCommand,
39
40
  getAvailableModels,
40
41
  getClaudeModelFlag,
@@ -52,7 +53,7 @@ import {
52
53
  saveSettings,
53
54
  setupCredentialFileAuth,
54
55
  validateSettings
55
- } from "./chunk-7XNJJBH6.js";
56
+ } from "./chunk-HJSM6E6U.js";
56
57
  import {
57
58
  findDevrootForProject,
58
59
  getDashboardApiUrl,
@@ -177,6 +178,7 @@ export {
177
178
  TrackerAuthError,
178
179
  addAlias,
179
180
  cleanOldBackups,
181
+ clearCredentialFileAuth,
180
182
  createBackup,
181
183
  createBackupTimestamp,
182
184
  createTracker,
package/dist/index.js.map CHANGED
@@ -1 +1 @@
1
- {"version":3,"sources":["../src/index.ts"],"sourcesContent":["// Panopticon CLI - Main exports for library usage\nexport * from './lib/paths.js';\nexport * from './lib/config.js';\nexport * from './lib/shell.js';\nexport * from './lib/backup.js';\nexport * from './lib/sync.js';\nexport * from './lib/tracker/index.js';\nexport * from './lib/providers.js';\nexport * from './lib/settings.js';\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AACA;AACA;AAKA;AACA;","names":[]}
1
+ {"version":3,"sources":["../src/index.ts"],"sourcesContent":["// Panopticon CLI - Main exports for library usage\nexport * from './lib/paths.js';\nexport * from './lib/config.js';\nexport * from './lib/shell.js';\nexport * from './lib/backup.js';\nexport * from './lib/sync.js';\nexport * from './lib/tracker/index.js';\nexport * from './lib/providers.js';\nexport * from './lib/settings.js';\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AACA;AACA;AAKA;AACA;","names":[]}
@@ -2,6 +2,7 @@ import {
2
2
  PROJECTS_CONFIG_FILE,
3
3
  createDefaultProjectsConfig,
4
4
  extractTeamPrefix,
5
+ findProjectByPath,
5
6
  findProjectByTeam,
6
7
  findProjectsByRallyProject,
7
8
  getProject,
@@ -18,7 +19,7 @@ import {
18
19
  resolveProjectPath,
19
20
  saveProjectsConfig,
20
21
  unregisterProject
21
- } from "./chunk-OMNXYPXC.js";
22
+ } from "./chunk-2V4NF7J2.js";
22
23
  import "./chunk-ZTFNYOC7.js";
23
24
  import "./chunk-ZHC57RCV.js";
24
25
  init_projects();
@@ -26,6 +27,7 @@ export {
26
27
  PROJECTS_CONFIG_FILE,
27
28
  createDefaultProjectsConfig,
28
29
  extractTeamPrefix,
30
+ findProjectByPath,
29
31
  findProjectByTeam,
30
32
  findProjectsByRallyProject,
31
33
  getProject,
@@ -42,4 +44,4 @@ export {
42
44
  saveProjectsConfig,
43
45
  unregisterProject
44
46
  };
45
- //# sourceMappingURL=projects-JEIVIYC6.js.map
47
+ //# sourceMappingURL=projects-CFX3RTDL.js.map
@@ -13,7 +13,7 @@ import {
13
13
  extractTeamPrefix,
14
14
  findProjectByTeam,
15
15
  init_projects
16
- } from "./chunk-OMNXYPXC.js";
16
+ } from "./chunk-2V4NF7J2.js";
17
17
  import "./chunk-ZTFNYOC7.js";
18
18
  import {
19
19
  init_esm_shims
@@ -178,4 +178,4 @@ EOF`);
178
178
  export {
179
179
  createRemoteWorkspace
180
180
  };
181
- //# sourceMappingURL=remote-workspace-AHVHQEES.js.map
181
+ //# sourceMappingURL=remote-workspace-7FPGF2RM.js.map
@@ -5,7 +5,7 @@ import {
5
5
  loadReviewStatuses,
6
6
  saveReviewStatuses,
7
7
  setReviewStatus
8
- } from "./chunk-GFP3PIPB.js";
8
+ } from "./chunk-YGJ54GW2.js";
9
9
  import "./chunk-JQBV3Q2W.js";
10
10
  import "./chunk-ZHC57RCV.js";
11
11
  init_review_status();
@@ -16,4 +16,4 @@ export {
16
16
  saveReviewStatuses,
17
17
  setReviewStatus
18
18
  };
19
- //# sourceMappingURL=review-status-EPFG4XM7.js.map
19
+ //# sourceMappingURL=review-status-TDPSOU5J.js.map
@@ -1,17 +1,17 @@
1
1
  import {
2
2
  getRecentRunLogs,
3
3
  init_specialist_logs
4
- } from "./chunk-ASY7T35E.js";
4
+ } from "./chunk-OWHXCGVO.js";
5
5
  import {
6
6
  getModelId,
7
7
  init_work_type_router
8
- } from "./chunk-KJ2TRXNK.js";
8
+ } from "./chunk-FTCPTHIJ.js";
9
9
  import "./chunk-JQBV3Q2W.js";
10
- import "./chunk-7XNJJBH6.js";
10
+ import "./chunk-HJSM6E6U.js";
11
11
  import {
12
12
  getProject,
13
13
  init_projects
14
- } from "./chunk-OMNXYPXC.js";
14
+ } from "./chunk-2V4NF7J2.js";
15
15
  import {
16
16
  getPanopticonHome,
17
17
  init_paths
@@ -254,4 +254,4 @@ export {
254
254
  regenerateContextDigest,
255
255
  scheduleDigestGeneration
256
256
  };
257
- //# sourceMappingURL=specialist-context-T3NBMCIE.js.map
257
+ //# sourceMappingURL=specialist-context-WGUUYDWY.js.map
@@ -16,11 +16,11 @@ import {
16
16
  isRunLogActive,
17
17
  listRunLogs,
18
18
  parseLogMetadata
19
- } from "./chunk-ASY7T35E.js";
20
- import "./chunk-KJ2TRXNK.js";
19
+ } from "./chunk-OWHXCGVO.js";
20
+ import "./chunk-FTCPTHIJ.js";
21
21
  import "./chunk-JQBV3Q2W.js";
22
- import "./chunk-7XNJJBH6.js";
23
- import "./chunk-OMNXYPXC.js";
22
+ import "./chunk-HJSM6E6U.js";
23
+ import "./chunk-2V4NF7J2.js";
24
24
  import "./chunk-ZTFNYOC7.js";
25
25
  import "./chunk-ZHC57RCV.js";
26
26
  init_specialist_logs();
@@ -42,4 +42,4 @@ export {
42
42
  listRunLogs,
43
43
  parseLogMetadata
44
44
  };
45
- //# sourceMappingURL=specialist-logs-CVKD3YJ3.js.map
45
+ //# sourceMappingURL=specialist-logs-XJB5TCKJ.js.map
@@ -55,11 +55,11 @@ import {
55
55
  wakeSpecialist,
56
56
  wakeSpecialistOrQueue,
57
57
  wakeSpecialistWithTask
58
- } from "./chunk-ASY7T35E.js";
59
- import "./chunk-KJ2TRXNK.js";
58
+ } from "./chunk-OWHXCGVO.js";
59
+ import "./chunk-FTCPTHIJ.js";
60
60
  import "./chunk-JQBV3Q2W.js";
61
- import "./chunk-7XNJJBH6.js";
62
- import "./chunk-OMNXYPXC.js";
61
+ import "./chunk-HJSM6E6U.js";
62
+ import "./chunk-2V4NF7J2.js";
63
63
  import "./chunk-ZTFNYOC7.js";
64
64
  import "./chunk-ZHC57RCV.js";
65
65
  init_specialists();
@@ -120,4 +120,4 @@ export {
120
120
  wakeSpecialistOrQueue,
121
121
  wakeSpecialistWithTask
122
122
  };
123
- //# sourceMappingURL=specialists-TKAP6T6Z.js.map
123
+ //# sourceMappingURL=specialists-5LBRHYFA.js.map
@@ -4,9 +4,9 @@ import {
4
4
  ensureProjectCerts,
5
5
  generatePanopticonTraefikConfig,
6
6
  generateTlsConfig
7
- } from "./chunk-RBUO57TC.js";
7
+ } from "./chunk-NLQRED36.js";
8
8
  import "./chunk-FQ66DECN.js";
9
- import "./chunk-OMNXYPXC.js";
9
+ import "./chunk-2V4NF7J2.js";
10
10
  import "./chunk-ZTFNYOC7.js";
11
11
  import "./chunk-ZHC57RCV.js";
12
12
  export {
@@ -16,4 +16,4 @@ export {
16
16
  generatePanopticonTraefikConfig,
17
17
  generateTlsConfig
18
18
  };
19
- //# sourceMappingURL=traefik-QX4ZV4YG.js.map
19
+ //# sourceMappingURL=traefik-WFMQX2LY.js.map
@@ -4,7 +4,7 @@ import {
4
4
  preTrustDirectory,
5
5
  removeWorkspace,
6
6
  stopWorkspaceDocker
7
- } from "./chunk-PI7Y3PSN.js";
7
+ } from "./chunk-F5555J3A.js";
8
8
  import "./chunk-7SN4L4PH.js";
9
9
  import "./chunk-AQXETQHW.js";
10
10
  import "./chunk-ZTFNYOC7.js";
@@ -19,4 +19,4 @@ export {
19
19
  removeWorkspace,
20
20
  stopWorkspaceDocker
21
21
  };
22
- //# sourceMappingURL=workspace-manager-KLHUCIZV.js.map
22
+ //# sourceMappingURL=workspace-manager-E434Z45T.js.map
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "panopticon-cli",
3
- "version": "0.4.33",
3
+ "version": "0.5.1",
4
4
  "description": "Multi-agent orchestration for AI coding assistants (Claude Code, Codex, Cursor, Gemini CLI)",
5
5
  "keywords": [
6
6
  "ai-agents",
@@ -69,10 +69,10 @@ var PRD_PUBLISHED_DIR = join(PRDS_DIR, "published");
69
69
 
70
70
  // src/lib/cost.ts
71
71
  var DEFAULT_PRICING = [
72
- // Anthropic - 4.6 series
73
- { provider: "anthropic", model: "claude-opus-4.6", inputPer1k: 5e-3, outputPer1k: 0.025, cacheReadPer1k: 5e-4, cacheWrite5mPer1k: 625e-5, cacheWrite1hPer1k: 0.01, currency: "USD" },
74
- { provider: "anthropic", model: "claude-sonnet-4.5", inputPer1k: 3e-3, outputPer1k: 0.015, cacheReadPer1k: 3e-4, cacheWrite5mPer1k: 375e-5, cacheWrite1hPer1k: 6e-3, currency: "USD" },
75
- { provider: "anthropic", model: "claude-haiku-4.5", inputPer1k: 1e-3, outputPer1k: 5e-3, cacheReadPer1k: 1e-4, cacheWrite5mPer1k: 125e-5, cacheWrite1hPer1k: 2e-3, currency: "USD" },
72
+ // Anthropic - 4.6 series (API IDs use dashes: claude-opus-4-6, claude-sonnet-4-6, claude-haiku-4-5)
73
+ { provider: "anthropic", model: "claude-opus-4-6", inputPer1k: 5e-3, outputPer1k: 0.025, cacheReadPer1k: 5e-4, cacheWrite5mPer1k: 625e-5, cacheWrite1hPer1k: 0.01, currency: "USD" },
74
+ { provider: "anthropic", model: "claude-sonnet-4-6", inputPer1k: 3e-3, outputPer1k: 0.015, cacheReadPer1k: 3e-4, cacheWrite5mPer1k: 375e-5, cacheWrite1hPer1k: 6e-3, currency: "USD" },
75
+ { provider: "anthropic", model: "claude-haiku-4-5", inputPer1k: 1e-3, outputPer1k: 5e-3, cacheReadPer1k: 1e-4, cacheWrite5mPer1k: 125e-5, cacheWrite1hPer1k: 2e-3, currency: "USD" },
76
76
  // Anthropic - 4.x series
77
77
  { provider: "anthropic", model: "claude-opus-4-1", inputPer1k: 0.015, outputPer1k: 0.075, cacheReadPer1k: 15e-4, cacheWrite5mPer1k: 0.01875, cacheWrite1hPer1k: 0.03, currency: "USD" },
78
78
  { provider: "anthropic", model: "claude-opus-4", inputPer1k: 0.015, outputPer1k: 0.075, cacheReadPer1k: 15e-4, cacheWrite5mPer1k: 0.01875, cacheWrite1hPer1k: 0.03, currency: "USD" },
@@ -95,7 +95,7 @@ function calculateCost(usage, pricing) {
95
95
  let inputMultiplier = 1;
96
96
  let outputMultiplier = 1;
97
97
  const totalInputTokens = usage.inputTokens + (usage.cacheReadTokens || 0) + (usage.cacheWriteTokens || 0);
98
- if ((pricing.model === "claude-sonnet-4" || pricing.model === "claude-sonnet-4.5") && totalInputTokens > 2e5) {
98
+ if ((pricing.model === "claude-sonnet-4" || pricing.model === "claude-sonnet-4-6") && totalInputTokens > 2e5) {
99
99
  inputMultiplier = 2;
100
100
  outputMultiplier = 1.5;
101
101
  }
package/scripts/stop-hook CHANGED
@@ -57,5 +57,12 @@ if [ -x "$SPECIALIST_HOOK" ]; then
57
57
  "$SPECIALIST_HOOK" &
58
58
  fi
59
59
 
60
+ # Chain to work-agent completion detection hook
61
+ # Detects when a work agent forgot to call "pan work done" and nudges it
62
+ WORK_AGENT_HOOK="$HOME/.panopticon/bin/work-agent-stop-hook"
63
+ if [ -x "$WORK_AGENT_HOOK" ]; then
64
+ "$WORK_AGENT_HOOK" &
65
+ fi
66
+
60
67
  # Always exit successfully
61
68
  exit 0
@@ -0,0 +1,137 @@
1
+ #!/bin/bash
2
+ # ~/.panopticon/bin/work-agent-stop-hook
3
+ # Called when any agent goes idle — detects work agents that forgot "pan work done"
4
+ #
5
+ # Uses a lightweight AI model to analyze the last N lines of terminal output
6
+ # and determine if the agent completed its work but failed to signal completion.
7
+ # If so, sends a nudge message to the agent via tmux.
8
+ #
9
+ # The model used is configurable via PANOPTICON_COMPLETION_CHECK_MODEL env var
10
+ # or defaults to the models.overrides.completion-check-hook setting in config.yaml,
11
+ # falling back to claude-haiku-4-5.
12
+
13
+ # Don't use set -e - resilient to failures, never break Claude Code execution
14
+
15
+ # Get agent ID
16
+ if [ -n "$PANOPTICON_AGENT_ID" ]; then
17
+ AGENT_ID="$PANOPTICON_AGENT_ID"
18
+ elif [ -n "$TMUX" ]; then
19
+ AGENT_ID=$(tmux display-message -p '#S' 2>/dev/null)
20
+ else
21
+ exit 0
22
+ fi
23
+
24
+ # Only run for work agents (agent-min-XXX, agent-pan-XXX, etc.)
25
+ case "$AGENT_ID" in
26
+ agent-*)
27
+ # Check it's not a specialist
28
+ case "$AGENT_ID" in
29
+ specialist-*) exit 0 ;;
30
+ esac
31
+ ;;
32
+ *)
33
+ exit 0 # Not a work agent
34
+ ;;
35
+ esac
36
+
37
+ # Extract issue ID from agent ID (e.g., "agent-min-725" -> "MIN-725")
38
+ ISSUE_ID=$(echo "$AGENT_ID" | sed 's/^agent-//' | tr '[:lower:]' '[:upper:]')
39
+
40
+ # Skip if completion marker already exists (agent already called pan work done)
41
+ COMPLETED_FILE="$HOME/.panopticon/agents/$AGENT_ID/completed"
42
+ if [ -f "$COMPLETED_FILE" ]; then
43
+ exit 0
44
+ fi
45
+
46
+ # Cooldown: don't nudge more than once per 10 minutes
47
+ NUDGE_FILE="$HOME/.panopticon/agents/$AGENT_ID/.last-completion-nudge"
48
+ if [ -f "$NUDGE_FILE" ]; then
49
+ LAST_NUDGE=$(cat "$NUDGE_FILE" 2>/dev/null || echo "0")
50
+ NOW=$(date +%s)
51
+ ELAPSED=$(( NOW - LAST_NUDGE ))
52
+ if [ "$ELAPSED" -lt 600 ]; then
53
+ exit 0 # Too soon since last nudge
54
+ fi
55
+ fi
56
+
57
+ # Capture the last 80 lines of terminal output
58
+ OUTPUT=$(tmux capture-pane -t "$AGENT_ID" -p -S -80 2>/dev/null || echo "")
59
+ if [ -z "$OUTPUT" ]; then
60
+ exit 0
61
+ fi
62
+
63
+ # Quick heuristic pre-check: skip the expensive AI call if the output clearly
64
+ # shows the agent is mid-work (e.g., running tests, editing files, reading)
65
+ if echo "$OUTPUT" | grep -qE '(Reading|Editing|Searching|Running|Compiling|Building|Installing|●.*Bash|●.*Read|●.*Edit|●.*Grep|●.*Write|●.*Glob)' | tail -5 | grep -qE '●'; then
66
+ exit 0
67
+ fi
68
+
69
+ # Check if the agent appears to be at an idle prompt (not mid-response)
70
+ if ! echo "$OUTPUT" | tail -10 | grep -qE '(^❯|Worked for)'; then
71
+ exit 0 # Agent doesn't appear to be at an idle prompt
72
+ fi
73
+
74
+ # Determine model to use for completion check
75
+ # Priority: env var > config.yaml override > default
76
+ COMPLETION_MODEL="${PANOPTICON_COMPLETION_CHECK_MODEL:-}"
77
+ if [ -z "$COMPLETION_MODEL" ]; then
78
+ CONFIG_FILE="$HOME/.panopticon/config.yaml"
79
+ if [ -f "$CONFIG_FILE" ] && command -v grep &> /dev/null; then
80
+ COMPLETION_MODEL=$(grep 'completion-check-hook:' "$CONFIG_FILE" 2>/dev/null | awk '{print $2}' | tr -d '"' || echo "")
81
+ fi
82
+ fi
83
+ COMPLETION_MODEL="${COMPLETION_MODEL:-claude-haiku-4-5}"
84
+
85
+ # Build the analysis prompt
86
+ ANALYSIS_PROMPT="You are analyzing a work agent's terminal output to determine if it finished its work but forgot to call 'pan work done'.
87
+
88
+ The agent was working on issue $ISSUE_ID. Here is the last 80 lines of its terminal output:
89
+
90
+ <terminal_output>
91
+ $OUTPUT
92
+ </terminal_output>
93
+
94
+ Respond with EXACTLY one of these words (nothing else):
95
+ - FORGOT_COMPLETION — if the agent clearly finished its implementation work (closed beads, committed code, ran tests) but stopped without calling 'pan work done'
96
+ - STILL_WORKING — if the agent appears to have more work to do (mentioned next steps, was mid-task)
97
+ - STOPPED_FOR_INPUT — if the agent stopped because it needs human input or hit a blocker
98
+ - UNCLEAR — if you cannot determine the state"
99
+
100
+ # Run the analysis using claude CLI (headless, no interactive session)
101
+ RESULT=$(echo "$ANALYSIS_PROMPT" | claude -p --model "$COMPLETION_MODEL" --max-tokens 20 2>/dev/null || echo "UNCLEAR")
102
+
103
+ # Extract just the verdict (first word of output, strip whitespace)
104
+ VERDICT=$(echo "$RESULT" | tr -d '[:space:]' | head -c 30)
105
+
106
+ # Log the check
107
+ LOG_DIR="$HOME/.panopticon/logs"
108
+ mkdir -p "$LOG_DIR"
109
+ echo "[$(date -Iseconds)] work-agent-stop-hook: $AGENT_ID ($ISSUE_ID) -> $VERDICT (model: $COMPLETION_MODEL)" \
110
+ >> "$LOG_DIR/hooks.log" 2>/dev/null || true
111
+
112
+ if [ "$VERDICT" = "FORGOT_COMPLETION" ]; then
113
+ # Record nudge timestamp for cooldown
114
+ mkdir -p "$(dirname "$NUDGE_FILE")"
115
+ date +%s > "$NUDGE_FILE" 2>/dev/null || true
116
+
117
+ # Write the nudge message to a temp file and use load-buffer + paste-buffer
118
+ # (the reliable tmux message delivery pattern from CLAUDE.md)
119
+ NUDGE_MSG="You stopped without calling pan work done. If your implementation is complete, you MUST run this command now:
120
+
121
+ pan work done $ISSUE_ID -c \"Implementation complete\"
122
+
123
+ If you still have remaining tasks, continue working on them. Do NOT stop until all work is done AND you have called pan work done."
124
+
125
+ TMPFILE=$(mktemp)
126
+ echo "$NUDGE_MSG" > "$TMPFILE"
127
+ tmux load-buffer "$TMPFILE" 2>/dev/null
128
+ tmux paste-buffer -t "$AGENT_ID" 2>/dev/null
129
+ sleep 0.3
130
+ tmux send-keys -t "$AGENT_ID" C-m 2>/dev/null
131
+ rm -f "$TMPFILE"
132
+
133
+ echo "[$(date -Iseconds)] work-agent-stop-hook: Sent completion nudge to $AGENT_ID" \
134
+ >> "$LOG_DIR/hooks.log" 2>/dev/null || true
135
+ fi
136
+
137
+ exit 0