panopticon-cli 0.4.32 → 0.4.33

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (138) hide show
  1. package/dist/{agents-BDFHF4T3.js → agents-VLK4BMVA.js} +10 -7
  2. package/dist/chunk-7SN4L4PH.js +150 -0
  3. package/dist/chunk-7SN4L4PH.js.map +1 -0
  4. package/dist/chunk-7XNJJBH6.js +538 -0
  5. package/dist/chunk-7XNJJBH6.js.map +1 -0
  6. package/dist/chunk-AQXETQHW.js +113 -0
  7. package/dist/chunk-AQXETQHW.js.map +1 -0
  8. package/dist/{chunk-2NIAOCIC.js → chunk-ASY7T35E.js} +170 -64
  9. package/dist/chunk-ASY7T35E.js.map +1 -0
  10. package/dist/chunk-B3PF6JPQ.js +212 -0
  11. package/dist/chunk-B3PF6JPQ.js.map +1 -0
  12. package/dist/{chunk-XP2DXWYP.js → chunk-BKCWRMUX.js} +88 -35
  13. package/dist/chunk-BKCWRMUX.js.map +1 -0
  14. package/dist/chunk-CFCUOV3Q.js +669 -0
  15. package/dist/chunk-CFCUOV3Q.js.map +1 -0
  16. package/dist/chunk-CWELWPWQ.js +32 -0
  17. package/dist/chunk-CWELWPWQ.js.map +1 -0
  18. package/dist/chunk-DI7ABPNQ.js +352 -0
  19. package/dist/chunk-DI7ABPNQ.js.map +1 -0
  20. package/dist/{chunk-VU4FLXV5.js → chunk-FQ66DECN.js} +31 -4
  21. package/dist/chunk-FQ66DECN.js.map +1 -0
  22. package/dist/{review-status-GWQYY77L.js → chunk-GFP3PIPB.js} +14 -7
  23. package/dist/chunk-GFP3PIPB.js.map +1 -0
  24. package/dist/chunk-JQBV3Q2W.js +29 -0
  25. package/dist/chunk-JQBV3Q2W.js.map +1 -0
  26. package/dist/{chunk-BWGFN44T.js → chunk-JT4O4YVM.js} +28 -16
  27. package/dist/chunk-JT4O4YVM.js.map +1 -0
  28. package/dist/{chunk-VIWUCJ4V.js → chunk-KJ2TRXNK.js} +34 -36
  29. package/dist/chunk-KJ2TRXNK.js.map +1 -0
  30. package/dist/{chunk-JY7R7V4G.js → chunk-OMNXYPXC.js} +2 -2
  31. package/dist/chunk-OMNXYPXC.js.map +1 -0
  32. package/dist/chunk-PELXV435.js +215 -0
  33. package/dist/chunk-PELXV435.js.map +1 -0
  34. package/dist/chunk-PI7Y3PSN.js +797 -0
  35. package/dist/chunk-PI7Y3PSN.js.map +1 -0
  36. package/dist/chunk-RBUO57TC.js +154 -0
  37. package/dist/chunk-RBUO57TC.js.map +1 -0
  38. package/dist/chunk-XFR2DLMR.js +600 -0
  39. package/dist/chunk-XFR2DLMR.js.map +1 -0
  40. package/dist/chunk-XKT5MHPT.js +677 -0
  41. package/dist/chunk-XKT5MHPT.js.map +1 -0
  42. package/dist/{chunk-HCTJFIJJ.js → chunk-YLPSQAM2.js} +2 -2
  43. package/dist/{chunk-HCTJFIJJ.js.map → chunk-YLPSQAM2.js.map} +1 -1
  44. package/dist/{chunk-6HXKTOD7.js → chunk-ZTFNYOC7.js} +53 -38
  45. package/dist/chunk-ZTFNYOC7.js.map +1 -0
  46. package/dist/cli/index.js +4362 -2927
  47. package/dist/cli/index.js.map +1 -1
  48. package/dist/{config-BOAMSKTF.js → config-4CJNUE3O.js} +7 -3
  49. package/dist/dashboard/prompts/merge-agent.md +217 -0
  50. package/dist/dashboard/prompts/review-agent.md +409 -0
  51. package/dist/dashboard/prompts/sync-main.md +84 -0
  52. package/dist/dashboard/prompts/test-agent.md +283 -0
  53. package/dist/dashboard/prompts/work-agent.md +247 -0
  54. package/dist/dashboard/public/assets/index-UjZq6ykz.css +32 -0
  55. package/dist/dashboard/public/assets/index-kAJqtLDO.js +708 -0
  56. package/dist/dashboard/public/index.html +2 -2
  57. package/dist/dashboard/server.js +15194 -3160
  58. package/dist/{dns-L3L2BB27.js → dns-7BDJSD3E.js} +4 -2
  59. package/dist/{feedback-writer-AAKF5BTK.js → feedback-writer-LVZ5TFYZ.js} +8 -4
  60. package/dist/feedback-writer-LVZ5TFYZ.js.map +1 -0
  61. package/dist/hume-WMAUBBV2.js +13 -0
  62. package/dist/index.d.ts +153 -40
  63. package/dist/index.js +65 -23
  64. package/dist/index.js.map +1 -1
  65. package/dist/{projects-VXRUCMLM.js → projects-JEIVIYC6.js} +3 -3
  66. package/dist/rally-RKFSWC7E.js +10 -0
  67. package/dist/{remote-agents-Z3R2A5BN.js → remote-agents-TFSMW7GN.js} +2 -2
  68. package/dist/{remote-workspace-2G6V2KNP.js → remote-workspace-AHVHQEES.js} +8 -8
  69. package/dist/review-status-EPFG4XM7.js +19 -0
  70. package/dist/shadow-state-5MDP6YXH.js +30 -0
  71. package/dist/shadow-state-5MDP6YXH.js.map +1 -0
  72. package/dist/{specialist-context-N32QBNNQ.js → specialist-context-T3NBMCIE.js} +8 -7
  73. package/dist/{specialist-context-N32QBNNQ.js.map → specialist-context-T3NBMCIE.js.map} +1 -1
  74. package/dist/{specialist-logs-GF3YV4KL.js → specialist-logs-CVKD3YJ3.js} +7 -6
  75. package/dist/specialist-logs-CVKD3YJ3.js.map +1 -0
  76. package/dist/{specialists-JBIW6MP4.js → specialists-TKAP6T6Z.js} +7 -6
  77. package/dist/specialists-TKAP6T6Z.js.map +1 -0
  78. package/dist/tldr-daemon-T3THOUGT.js +21 -0
  79. package/dist/tldr-daemon-T3THOUGT.js.map +1 -0
  80. package/dist/traefik-QX4ZV4YG.js +19 -0
  81. package/dist/traefik-QX4ZV4YG.js.map +1 -0
  82. package/dist/tunnel-W2GZBLEV.js +13 -0
  83. package/dist/tunnel-W2GZBLEV.js.map +1 -0
  84. package/dist/workspace-manager-KLHUCIZV.js +22 -0
  85. package/dist/workspace-manager-KLHUCIZV.js.map +1 -0
  86. package/package.json +2 -2
  87. package/scripts/heartbeat-hook +37 -10
  88. package/scripts/patches/llm-tldr-tsx-support.py +109 -0
  89. package/scripts/pre-tool-hook +26 -15
  90. package/scripts/record-cost-event.js +177 -43
  91. package/scripts/record-cost-event.ts +87 -3
  92. package/scripts/statusline.sh +169 -0
  93. package/scripts/stop-hook +14 -11
  94. package/scripts/tldr-post-edit +72 -0
  95. package/scripts/tldr-read-enforcer +275 -0
  96. package/skills/check-merged/SKILL.md +143 -0
  97. package/skills/crash-investigation/SKILL.md +301 -0
  98. package/skills/github-cli/SKILL.md +185 -0
  99. package/skills/pan-reopen/SKILL.md +65 -0
  100. package/skills/pan-sync-main/SKILL.md +87 -0
  101. package/skills/pan-tldr/SKILL.md +149 -0
  102. package/skills/react-best-practices/SKILL.md +125 -0
  103. package/skills/spec-readiness/REPORT-TEMPLATE.md +158 -0
  104. package/skills/spec-readiness/SCORING-REFERENCE.md +369 -0
  105. package/skills/spec-readiness/SKILL.md +400 -0
  106. package/skills/spec-readiness-setup/SKILL.md +361 -0
  107. package/skills/workspace-status/SKILL.md +56 -0
  108. package/templates/traefik/dynamic/panopticon.yml.template +0 -5
  109. package/templates/traefik/traefik.yml +0 -8
  110. package/dist/chunk-2NIAOCIC.js.map +0 -1
  111. package/dist/chunk-3XAB4IXF.js +0 -51
  112. package/dist/chunk-3XAB4IXF.js.map +0 -1
  113. package/dist/chunk-6HXKTOD7.js.map +0 -1
  114. package/dist/chunk-BBCUK6N2.js +0 -241
  115. package/dist/chunk-BBCUK6N2.js.map +0 -1
  116. package/dist/chunk-BWGFN44T.js.map +0 -1
  117. package/dist/chunk-ELK6Q7QI.js +0 -545
  118. package/dist/chunk-ELK6Q7QI.js.map +0 -1
  119. package/dist/chunk-JY7R7V4G.js.map +0 -1
  120. package/dist/chunk-LYSBSZYV.js +0 -1523
  121. package/dist/chunk-LYSBSZYV.js.map +0 -1
  122. package/dist/chunk-VIWUCJ4V.js.map +0 -1
  123. package/dist/chunk-VU4FLXV5.js.map +0 -1
  124. package/dist/chunk-XP2DXWYP.js.map +0 -1
  125. package/dist/dashboard/public/assets/index-C7X6LP5Z.css +0 -32
  126. package/dist/dashboard/public/assets/index-ClYqpcAJ.js +0 -645
  127. package/dist/feedback-writer-AAKF5BTK.js.map +0 -1
  128. package/dist/review-status-GWQYY77L.js.map +0 -1
  129. package/dist/traefik-CUJM6K5Z.js +0 -12
  130. /package/dist/{agents-BDFHF4T3.js.map → agents-VLK4BMVA.js.map} +0 -0
  131. /package/dist/{config-BOAMSKTF.js.map → config-4CJNUE3O.js.map} +0 -0
  132. /package/dist/{dns-L3L2BB27.js.map → dns-7BDJSD3E.js.map} +0 -0
  133. /package/dist/{projects-VXRUCMLM.js.map → hume-WMAUBBV2.js.map} +0 -0
  134. /package/dist/{remote-agents-Z3R2A5BN.js.map → projects-JEIVIYC6.js.map} +0 -0
  135. /package/dist/{specialist-logs-GF3YV4KL.js.map → rally-RKFSWC7E.js.map} +0 -0
  136. /package/dist/{specialists-JBIW6MP4.js.map → remote-agents-TFSMW7GN.js.map} +0 -0
  137. /package/dist/{remote-workspace-2G6V2KNP.js.map → remote-workspace-AHVHQEES.js.map} +0 -0
  138. /package/dist/{traefik-CUJM6K5Z.js.map → review-status-EPFG4XM7.js.map} +0 -0
@@ -0,0 +1,113 @@
1
+ import {
2
+ __esm,
3
+ init_esm_shims
4
+ } from "./chunk-ZHC57RCV.js";
5
+
6
+ // src/lib/manifest.ts
7
+ import { createHash } from "crypto";
8
+ import { existsSync, mkdirSync, readFileSync, readdirSync, writeFileSync } from "fs";
9
+ import { join, relative } from "path";
10
+ function hashFile(filePath) {
11
+ const content = readFileSync(filePath);
12
+ const hex = createHash("sha256").update(content).digest("hex");
13
+ return `sha256:${hex}`;
14
+ }
15
+ function createEmptyManifest() {
16
+ return {
17
+ version: 1,
18
+ managed_by: "panopticon",
19
+ installed: {}
20
+ };
21
+ }
22
+ function readManifest(manifestPath) {
23
+ if (!existsSync(manifestPath)) {
24
+ return createEmptyManifest();
25
+ }
26
+ try {
27
+ const raw = JSON.parse(readFileSync(manifestPath, "utf-8"));
28
+ if (raw.version === 1 && raw.managed_by === "panopticon" && typeof raw.installed === "object") {
29
+ return raw;
30
+ }
31
+ return createEmptyManifest();
32
+ } catch {
33
+ return createEmptyManifest();
34
+ }
35
+ }
36
+ function writeManifest(manifestPath, manifest) {
37
+ mkdirSync(join(manifestPath, ".."), { recursive: true });
38
+ writeFileSync(manifestPath, JSON.stringify(manifest, null, 2) + "\n", "utf-8");
39
+ }
40
+ function setManifestEntry(manifest, relativePath, hash, source) {
41
+ manifest.installed[relativePath] = {
42
+ hash,
43
+ source,
44
+ installed_at: (/* @__PURE__ */ new Date()).toISOString()
45
+ };
46
+ }
47
+ function compareFileToManifest(targetFile, relativePath, manifest) {
48
+ if (!existsSync(targetFile)) {
49
+ return { action: "new" };
50
+ }
51
+ const entry = manifest.installed[relativePath];
52
+ if (!entry) {
53
+ return { action: "user-owned" };
54
+ }
55
+ const currentHash = hashFile(targetFile);
56
+ if (currentHash === entry.hash) {
57
+ return { action: "update", currentHash };
58
+ }
59
+ return { action: "modified", currentHash, manifestHash: entry.hash };
60
+ }
61
+ function collectSourceFiles(sourceDir, prefix) {
62
+ const results = [];
63
+ if (!existsSync(sourceDir)) {
64
+ return results;
65
+ }
66
+ function walk(dir) {
67
+ const entries = readdirSync(dir, { withFileTypes: true });
68
+ for (const entry of entries) {
69
+ const fullPath = join(dir, entry.name);
70
+ if (entry.isDirectory()) {
71
+ walk(fullPath);
72
+ } else if (entry.isFile()) {
73
+ const rel = relative(sourceDir, fullPath);
74
+ results.push({
75
+ absolutePath: fullPath,
76
+ relativePath: `${prefix}${rel}`
77
+ });
78
+ }
79
+ }
80
+ }
81
+ walk(sourceDir);
82
+ return results;
83
+ }
84
+ function buildManifestFromDirectory(baseDir, categories, source) {
85
+ const manifest = createEmptyManifest();
86
+ for (const category of categories) {
87
+ const categoryDir = join(baseDir, category);
88
+ const files = collectSourceFiles(categoryDir, `${category}/`);
89
+ for (const file of files) {
90
+ const hash = hashFile(file.absolutePath);
91
+ setManifestEntry(manifest, file.relativePath, hash, source);
92
+ }
93
+ }
94
+ return manifest;
95
+ }
96
+ var init_manifest = __esm({
97
+ "src/lib/manifest.ts"() {
98
+ "use strict";
99
+ init_esm_shims();
100
+ }
101
+ });
102
+
103
+ export {
104
+ hashFile,
105
+ readManifest,
106
+ writeManifest,
107
+ setManifestEntry,
108
+ compareFileToManifest,
109
+ collectSourceFiles,
110
+ buildManifestFromDirectory,
111
+ init_manifest
112
+ };
113
+ //# sourceMappingURL=chunk-AQXETQHW.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/lib/manifest.ts"],"sourcesContent":["import { createHash } from 'crypto';\nimport { existsSync, mkdirSync, readFileSync, readdirSync, statSync, writeFileSync } from 'fs';\nimport { join, relative } from 'path';\n\n/**\n * Manifest entry for a single distributed file.\n */\nexport interface ManifestEntry {\n hash: string; // sha256:<hex>\n source: string; // \"panopticon\" | \"project-template\" | custom\n installed_at: string; // ISO 8601 timestamp\n}\n\n/**\n * The manifest schema: tracks what Panopticon placed at a target location.\n */\nexport interface Manifest {\n version: 1;\n managed_by: 'panopticon';\n installed: Record<string, ManifestEntry>;\n}\n\n/**\n * Result of comparing a file against the manifest.\n */\nexport type FileStatus =\n | { action: 'new' } // File doesn't exist at target — safe to copy\n | { action: 'update'; currentHash: string } // File exists, hash matches manifest — we placed it, user didn't modify\n | { action: 'modified'; currentHash: string; manifestHash: string } // File exists, hash differs from manifest — user modified\n | { action: 'user-owned' } // File exists but NOT in manifest — user placed it, never touch\n ;\n\n/**\n * Compute SHA-256 hash of a file, prefixed with \"sha256:\".\n */\nexport function hashFile(filePath: string): string {\n const content = readFileSync(filePath);\n const hex = createHash('sha256').update(content).digest('hex');\n return `sha256:${hex}`;\n}\n\n/**\n * Create an empty manifest.\n */\nexport function createEmptyManifest(): Manifest {\n return {\n version: 1,\n managed_by: 'panopticon',\n installed: {},\n };\n}\n\n/**\n * Read a manifest from disk. Returns empty manifest if file doesn't exist or is invalid.\n */\nexport function readManifest(manifestPath: string): Manifest {\n if (!existsSync(manifestPath)) {\n return createEmptyManifest();\n }\n\n try {\n const raw = JSON.parse(readFileSync(manifestPath, 'utf-8'));\n if (raw.version === 1 && raw.managed_by === 'panopticon' && typeof raw.installed === 'object') {\n return raw as Manifest;\n }\n return createEmptyManifest();\n } catch {\n return createEmptyManifest();\n }\n}\n\n/**\n * Write a manifest to disk (creates parent directories if needed).\n */\nexport function writeManifest(manifestPath: string, manifest: Manifest): void {\n mkdirSync(join(manifestPath, '..'), { recursive: true });\n writeFileSync(manifestPath, JSON.stringify(manifest, null, 2) + '\\n', 'utf-8');\n}\n\n/**\n * Add or update an entry in a manifest.\n */\nexport function setManifestEntry(\n manifest: Manifest,\n relativePath: string,\n hash: string,\n source: string,\n): void {\n manifest.installed[relativePath] = {\n hash,\n source,\n installed_at: new Date().toISOString(),\n };\n}\n\n/**\n * Remove an entry from a manifest.\n */\nexport function removeManifestEntry(manifest: Manifest, relativePath: string): void {\n delete manifest.installed[relativePath];\n}\n\n/**\n * Compare a file on disk against the manifest to determine what action to take.\n *\n * @param targetFile - Absolute path to the file at the target location\n * @param relativePath - Relative path used as key in the manifest (e.g., \"skills/beads/SKILL.md\")\n * @param manifest - The manifest to compare against\n */\nexport function compareFileToManifest(\n targetFile: string,\n relativePath: string,\n manifest: Manifest,\n): FileStatus {\n if (!existsSync(targetFile)) {\n return { action: 'new' };\n }\n\n const entry = manifest.installed[relativePath];\n if (!entry) {\n return { action: 'user-owned' };\n }\n\n const currentHash = hashFile(targetFile);\n if (currentHash === entry.hash) {\n return { action: 'update', currentHash };\n }\n\n return { action: 'modified', currentHash, manifestHash: entry.hash };\n}\n\n/**\n * Walk a source directory and collect all files with their relative paths.\n * Used to build the list of files to distribute.\n *\n * @param sourceDir - Root directory to walk\n * @param prefix - Prefix for relative paths (e.g., \"skills/\" or \"agents/\")\n * @returns Array of { absolutePath, relativePath } for each file found\n */\nexport function collectSourceFiles(\n sourceDir: string,\n prefix: string,\n): Array<{ absolutePath: string; relativePath: string }> {\n const results: Array<{ absolutePath: string; relativePath: string }> = [];\n\n if (!existsSync(sourceDir)) {\n return results;\n }\n\n function walk(dir: string): void {\n const entries = readdirSync(dir, { withFileTypes: true });\n for (const entry of entries) {\n const fullPath = join(dir, entry.name);\n if (entry.isDirectory()) {\n walk(fullPath);\n } else if (entry.isFile()) {\n const rel = relative(sourceDir, fullPath);\n results.push({\n absolutePath: fullPath,\n relativePath: `${prefix}${rel}`,\n });\n }\n }\n }\n\n walk(sourceDir);\n return results;\n}\n\n/**\n * Build a manifest from a directory by hashing all files.\n * Useful for generating the initial cache manifest.\n *\n * @param baseDir - The directory to scan (e.g., ~/.panopticon/)\n * @param categories - Which subdirectories to include (e.g., [\"skills\", \"agents\", \"rules\"])\n * @param source - The source label for all entries (e.g., \"panopticon\")\n */\nexport function buildManifestFromDirectory(\n baseDir: string,\n categories: string[],\n source: string,\n): Manifest {\n const manifest = createEmptyManifest();\n\n for (const category of categories) {\n const categoryDir = join(baseDir, category);\n const files = collectSourceFiles(categoryDir, `${category}/`);\n for (const file of files) {\n const hash = hashFile(file.absolutePath);\n setManifestEntry(manifest, file.relativePath, hash, source);\n }\n }\n\n return manifest;\n}\n"],"mappings":";;;;;;AAAA,SAAS,kBAAkB;AAC3B,SAAS,YAAY,WAAW,cAAc,aAAuB,qBAAqB;AAC1F,SAAS,MAAM,gBAAgB;AAiCxB,SAAS,SAAS,UAA0B;AACjD,QAAM,UAAU,aAAa,QAAQ;AACrC,QAAM,MAAM,WAAW,QAAQ,EAAE,OAAO,OAAO,EAAE,OAAO,KAAK;AAC7D,SAAO,UAAU,GAAG;AACtB;AAKO,SAAS,sBAAgC;AAC9C,SAAO;AAAA,IACL,SAAS;AAAA,IACT,YAAY;AAAA,IACZ,WAAW,CAAC;AAAA,EACd;AACF;AAKO,SAAS,aAAa,cAAgC;AAC3D,MAAI,CAAC,WAAW,YAAY,GAAG;AAC7B,WAAO,oBAAoB;AAAA,EAC7B;AAEA,MAAI;AACF,UAAM,MAAM,KAAK,MAAM,aAAa,cAAc,OAAO,CAAC;AAC1D,QAAI,IAAI,YAAY,KAAK,IAAI,eAAe,gBAAgB,OAAO,IAAI,cAAc,UAAU;AAC7F,aAAO;AAAA,IACT;AACA,WAAO,oBAAoB;AAAA,EAC7B,QAAQ;AACN,WAAO,oBAAoB;AAAA,EAC7B;AACF;AAKO,SAAS,cAAc,cAAsB,UAA0B;AAC5E,YAAU,KAAK,cAAc,IAAI,GAAG,EAAE,WAAW,KAAK,CAAC;AACvD,gBAAc,cAAc,KAAK,UAAU,UAAU,MAAM,CAAC,IAAI,MAAM,OAAO;AAC/E;AAKO,SAAS,iBACd,UACA,cACA,MACA,QACM;AACN,WAAS,UAAU,YAAY,IAAI;AAAA,IACjC;AAAA,IACA;AAAA,IACA,eAAc,oBAAI,KAAK,GAAE,YAAY;AAAA,EACvC;AACF;AAgBO,SAAS,sBACd,YACA,cACA,UACY;AACZ,MAAI,CAAC,WAAW,UAAU,GAAG;AAC3B,WAAO,EAAE,QAAQ,MAAM;AAAA,EACzB;AAEA,QAAM,QAAQ,SAAS,UAAU,YAAY;AAC7C,MAAI,CAAC,OAAO;AACV,WAAO,EAAE,QAAQ,aAAa;AAAA,EAChC;AAEA,QAAM,cAAc,SAAS,UAAU;AACvC,MAAI,gBAAgB,MAAM,MAAM;AAC9B,WAAO,EAAE,QAAQ,UAAU,YAAY;AAAA,EACzC;AAEA,SAAO,EAAE,QAAQ,YAAY,aAAa,cAAc,MAAM,KAAK;AACrE;AAUO,SAAS,mBACd,WACA,QACuD;AACvD,QAAM,UAAiE,CAAC;AAExE,MAAI,CAAC,WAAW,SAAS,GAAG;AAC1B,WAAO;AAAA,EACT;AAEA,WAAS,KAAK,KAAmB;AAC/B,UAAM,UAAU,YAAY,KAAK,EAAE,eAAe,KAAK,CAAC;AACxD,eAAW,SAAS,SAAS;AAC3B,YAAM,WAAW,KAAK,KAAK,MAAM,IAAI;AACrC,UAAI,MAAM,YAAY,GAAG;AACvB,aAAK,QAAQ;AAAA,MACf,WAAW,MAAM,OAAO,GAAG;AACzB,cAAM,MAAM,SAAS,WAAW,QAAQ;AACxC,gBAAQ,KAAK;AAAA,UACX,cAAc;AAAA,UACd,cAAc,GAAG,MAAM,GAAG,GAAG;AAAA,QAC/B,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AAEA,OAAK,SAAS;AACd,SAAO;AACT;AAUO,SAAS,2BACd,SACA,YACA,QACU;AACV,QAAM,WAAW,oBAAoB;AAErC,aAAW,YAAY,YAAY;AACjC,UAAM,cAAc,KAAK,SAAS,QAAQ;AAC1C,UAAM,QAAQ,mBAAmB,aAAa,GAAG,QAAQ,GAAG;AAC5D,eAAW,QAAQ,OAAO;AACxB,YAAM,OAAO,SAAS,KAAK,YAAY;AACvC,uBAAiB,UAAU,KAAK,cAAc,MAAM,MAAM;AAAA,IAC5D;AAAA,EACF;AAEA,SAAO;AACT;AAlMA;AAAA;AAAA;AAAA;AAAA;AAAA;","names":[]}
@@ -7,17 +7,29 @@ import {
7
7
  popFromHook,
8
8
  pushToHook,
9
9
  sendKeysAsync
10
- } from "./chunk-VIWUCJ4V.js";
10
+ } from "./chunk-KJ2TRXNK.js";
11
+ import {
12
+ init_pipeline_notifier,
13
+ notifyPipeline
14
+ } from "./chunk-JQBV3Q2W.js";
15
+ import {
16
+ getProviderEnv,
17
+ getProviderForModel,
18
+ init_providers,
19
+ init_settings,
20
+ loadSettings,
21
+ setupCredentialFileAuth
22
+ } from "./chunk-7XNJJBH6.js";
11
23
  import {
12
24
  init_projects,
13
25
  projects_exports
14
- } from "./chunk-JY7R7V4G.js";
26
+ } from "./chunk-OMNXYPXC.js";
15
27
  import {
16
28
  COSTS_DIR,
17
29
  PANOPTICON_HOME,
18
30
  getPanopticonHome,
19
31
  init_paths
20
- } from "./chunk-6HXKTOD7.js";
32
+ } from "./chunk-ZTFNYOC7.js";
21
33
  import {
22
34
  __esm,
23
35
  __export,
@@ -380,6 +392,25 @@ import { join as join4, basename as basename2 } from "path";
380
392
  import { homedir as homedir2 } from "os";
381
393
  import { exec } from "child_process";
382
394
  import { promisify } from "util";
395
+ import { randomUUID } from "crypto";
396
+ function getProviderEnvForModel(model) {
397
+ const provider = getProviderForModel(model);
398
+ if (provider.name === "anthropic") return {};
399
+ const settings = loadSettings();
400
+ const apiKey = settings.api_keys?.[provider.name];
401
+ if (apiKey) {
402
+ return getProviderEnv(provider, apiKey);
403
+ }
404
+ console.warn(`[specialist] No API key for ${provider.displayName}, falling back to Anthropic`);
405
+ return {};
406
+ }
407
+ function buildTmuxEnvFlags(env) {
408
+ let flags = "";
409
+ for (const [key, value] of Object.entries(env)) {
410
+ flags += ` -e ${key}="${value.replace(/"/g, '\\"')}"`;
411
+ }
412
+ return flags;
413
+ }
383
414
  function initSpecialistsDirectory() {
384
415
  if (!existsSync3(SPECIALISTS_DIR)) {
385
416
  mkdirSync2(SPECIALISTS_DIR, { recursive: true });
@@ -553,9 +584,9 @@ function recordWake(name, sessionId) {
553
584
  }
554
585
  async function spawnEphemeralSpecialist(projectKey, specialistType, task) {
555
586
  ensureProjectSpecialistDir(projectKey, specialistType);
556
- const { loadContextDigest } = await import("./specialist-context-N32QBNNQ.js");
587
+ const { loadContextDigest } = await import("./specialist-context-T3NBMCIE.js");
557
588
  const contextDigest = loadContextDigest(projectKey, specialistType);
558
- const { createRunLog: createRunLog2 } = await import("./specialist-logs-GF3YV4KL.js");
589
+ const { createRunLog: createRunLog2 } = await import("./specialist-logs-CVKD3YJ3.js");
559
590
  const { runId, filePath: logFilePath } = createRunLog2(
560
591
  projectKey,
561
592
  specialistType,
@@ -568,13 +599,19 @@ async function spawnEphemeralSpecialist(projectKey, specialistType, task) {
568
599
  const tmuxSession = getTmuxSessionName(specialistType, projectKey);
569
600
  const cwd = process.env.HOME || "/home/exedev";
570
601
  try {
571
- let model = "claude-sonnet-4-5";
602
+ let model = "claude-sonnet-4-6";
572
603
  try {
573
604
  const workTypeId = `specialist-${specialistType}`;
574
605
  model = getModelId(workTypeId);
575
606
  } catch (error) {
576
607
  console.warn(`Warning: Could not resolve model for ${specialistType}, using default`);
577
608
  }
609
+ const providerEnv = getProviderEnvForModel(model);
610
+ const envFlags = buildTmuxEnvFlags(providerEnv);
611
+ const providerConfig = getProviderForModel(model);
612
+ if (providerConfig.authType === "credential-file") {
613
+ setupCredentialFileAuth(providerConfig, cwd);
614
+ }
578
615
  const permissionFlags = specialistType === "merge-agent" ? "--dangerously-skip-permissions --permission-mode bypassPermissions" : "--dangerously-skip-permissions";
579
616
  const agentDir = join4(homedir2(), ".panopticon", "agents", tmuxSession);
580
617
  await execAsync(`mkdir -p "${agentDir}"`, { encoding: "utf-8" });
@@ -593,10 +630,10 @@ echo ""
593
630
  echo "## Specialist completed task"
594
631
  `, { mode: 493 });
595
632
  await execAsync(
596
- `tmux new-session -d -s "${tmuxSession}" "bash '${launcherScript}'"`,
633
+ `tmux new-session -d -s "${tmuxSession}"${envFlags} "bash '${launcherScript}'"`,
597
634
  { encoding: "utf-8" }
598
635
  );
599
- const { saveAgentRuntimeState } = await import("./agents-BDFHF4T3.js");
636
+ const { saveAgentRuntimeState } = await import("./agents-VLK4BMVA.js");
600
637
  saveAgentRuntimeState(tmuxSession, {
601
638
  state: "active",
602
639
  lastActivity: (/* @__PURE__ */ new Date()).toISOString(),
@@ -620,7 +657,7 @@ echo "## Specialist completed task"
620
657
  }
621
658
  }
622
659
  async function buildTaskPrompt(projectKey, specialistType, task, contextDigest) {
623
- const { getSpecialistPromptOverride } = await import("./projects-VXRUCMLM.js");
660
+ const { getSpecialistPromptOverride } = await import("./projects-JEIVIYC6.js");
624
661
  const customPrompt = getSpecialistPromptOverride(projectKey, specialistType);
625
662
  let prompt = `# ${specialistType} Task - ${task.issueId}
626
663
 
@@ -781,7 +818,7 @@ async function terminateSpecialist(projectKey, specialistType) {
781
818
  console.error(`[specialist] Failed to kill tmux session ${tmuxSession}:`, error);
782
819
  }
783
820
  if (metadata.currentRun) {
784
- const { finalizeRunLog: finalizeRunLog2 } = await import("./specialist-logs-GF3YV4KL.js");
821
+ const { finalizeRunLog: finalizeRunLog2 } = await import("./specialist-logs-CVKD3YJ3.js");
785
822
  try {
786
823
  finalizeRunLog2(projectKey, specialistType, metadata.currentRun, {
787
824
  status: metadata.lastRunStatus || "incomplete",
@@ -794,20 +831,20 @@ async function terminateSpecialist(projectKey, specialistType) {
794
831
  }
795
832
  const key = `${projectKey}-${specialistType}`;
796
833
  gracePeriodStates.delete(key);
797
- const { saveAgentRuntimeState } = await import("./agents-BDFHF4T3.js");
834
+ const { saveAgentRuntimeState } = await import("./agents-VLK4BMVA.js");
798
835
  saveAgentRuntimeState(tmuxSession, {
799
836
  state: "suspended",
800
837
  lastActivity: (/* @__PURE__ */ new Date()).toISOString()
801
838
  });
802
- const { scheduleDigestGeneration } = await import("./specialist-context-N32QBNNQ.js");
839
+ const { scheduleDigestGeneration } = await import("./specialist-context-T3NBMCIE.js");
803
840
  scheduleDigestGeneration(projectKey, specialistType);
804
841
  scheduleLogCleanup(projectKey, specialistType);
805
842
  }
806
843
  function scheduleLogCleanup(projectKey, specialistType) {
807
844
  Promise.resolve().then(async () => {
808
845
  try {
809
- const { cleanupOldLogs: cleanupOldLogs2 } = await import("./specialist-logs-GF3YV4KL.js");
810
- const { getSpecialistRetention } = await import("./projects-VXRUCMLM.js");
846
+ const { cleanupOldLogs: cleanupOldLogs2 } = await import("./specialist-logs-CVKD3YJ3.js");
847
+ const { getSpecialistRetention } = await import("./projects-JEIVIYC6.js");
811
848
  const retention = getSpecialistRetention(projectKey);
812
849
  const deleted = cleanupOldLogs2(projectKey, specialistType, { maxDays: retention.max_days, maxRuns: retention.max_runs });
813
850
  if (deleted > 0) {
@@ -985,7 +1022,7 @@ async function getSpecialistStatus(name, projectKey) {
985
1022
  const sessionId = getSessionId(name);
986
1023
  const running = await isRunning(name, projectKey);
987
1024
  const contextTokens = countContextTokens(name);
988
- const { getAgentRuntimeState } = await import("./agents-BDFHF4T3.js");
1025
+ const { getAgentRuntimeState } = await import("./agents-VLK4BMVA.js");
989
1026
  const tmuxSession = getTmuxSessionName(name, projectKey);
990
1027
  const runtimeState = getAgentRuntimeState(tmuxSession);
991
1028
  let state;
@@ -1045,7 +1082,7 @@ async function initializeSpecialist(name) {
1045
1082
  }
1046
1083
  const tmuxSession = getTmuxSessionName(name);
1047
1084
  const cwd = process.env.HOME || "/home/eltmon";
1048
- let model = "claude-sonnet-4-5";
1085
+ let model = "claude-sonnet-4-6";
1049
1086
  try {
1050
1087
  const workTypeId = `specialist-${name}`;
1051
1088
  model = getModelId(workTypeId);
@@ -1058,18 +1095,26 @@ Your role: ${name === "merge-agent" ? "Resolve merge conflicts and ensure clean
1058
1095
  You will be woken up when your services are needed. For now, acknowledge your initialization and wait.
1059
1096
  Say: "I am the ${name} specialist, ready and waiting for tasks."`;
1060
1097
  try {
1098
+ const providerEnv = getProviderEnvForModel(model);
1099
+ const envFlags = buildTmuxEnvFlags(providerEnv);
1100
+ const providerCfg = getProviderForModel(model);
1101
+ if (providerCfg.authType === "credential-file") {
1102
+ setupCredentialFileAuth(providerCfg, cwd);
1103
+ }
1061
1104
  const agentDir = join4(homedir2(), ".panopticon", "agents", tmuxSession);
1062
1105
  await execAsync(`mkdir -p "${agentDir}"`, { encoding: "utf-8" });
1063
1106
  const promptFile = join4(agentDir, "identity-prompt.md");
1064
1107
  const launcherScript = join4(agentDir, "launcher.sh");
1065
1108
  writeFileSync(promptFile, identityPrompt);
1109
+ const newSessionId = randomUUID();
1066
1110
  writeFileSync(launcherScript, `#!/bin/bash
1067
1111
  cd "${cwd}"
1068
1112
  prompt=$(cat "${promptFile}")
1069
- exec claude --dangerously-skip-permissions --model ${model} "$prompt"
1113
+ exec claude --dangerously-skip-permissions --session-id "${newSessionId}" --model ${model} "$prompt"
1070
1114
  `, { mode: 493 });
1115
+ setSessionId(name, newSessionId);
1071
1116
  await execAsync(
1072
- `tmux new-session -d -s "${tmuxSession}" "bash '${launcherScript}'"`,
1117
+ `tmux new-session -d -s "${tmuxSession}"${envFlags} "bash '${launcherScript}'"`,
1073
1118
  { encoding: "utf-8" }
1074
1119
  );
1075
1120
  recordWake(name);
@@ -1142,11 +1187,31 @@ async function wakeSpecialist(name, taskPrompt, options = {}) {
1142
1187
  }
1143
1188
  const cwd = process.env.HOME || "/home/eltmon";
1144
1189
  try {
1145
- const modelFlag = name === "merge-agent" ? "--model opus" : "";
1190
+ let model = "claude-sonnet-4-6";
1191
+ try {
1192
+ const workTypeId = `specialist-${name}`;
1193
+ model = getModelId(workTypeId);
1194
+ } catch (error) {
1195
+ console.warn(`[specialist] Could not resolve model for ${name}, using default`);
1196
+ }
1197
+ const modelFlag = `--model ${model}`;
1198
+ const providerEnv = getProviderEnvForModel(model);
1199
+ const envFlags = buildTmuxEnvFlags(providerEnv);
1200
+ const provCfg = getProviderForModel(model);
1201
+ if (provCfg.authType === "credential-file") {
1202
+ setupCredentialFileAuth(provCfg, cwd);
1203
+ }
1146
1204
  const permissionFlags = name === "merge-agent" ? "--dangerously-skip-permissions --permission-mode bypassPermissions" : "--dangerously-skip-permissions";
1147
- const claudeCmd = sessionId ? `claude --resume "${sessionId}" ${modelFlag} ${permissionFlags}` : `claude ${modelFlag} ${permissionFlags}`;
1205
+ let claudeCmd;
1206
+ if (sessionId) {
1207
+ claudeCmd = `claude --resume "${sessionId}" ${modelFlag} ${permissionFlags}`;
1208
+ } else {
1209
+ const newSessionId = randomUUID();
1210
+ claudeCmd = `claude --session-id "${newSessionId}" ${modelFlag} ${permissionFlags}`;
1211
+ setSessionId(name, newSessionId);
1212
+ }
1148
1213
  await execAsync(
1149
- `tmux new-session -d -s "${tmuxSession}" -c "${cwd}" "${claudeCmd}"`,
1214
+ `tmux new-session -d -s "${tmuxSession}" -c "${cwd}"${envFlags} "${claudeCmd}"`,
1150
1215
  { encoding: "utf-8" }
1151
1216
  );
1152
1217
  if (waitForReady) {
@@ -1177,7 +1242,7 @@ async function wakeSpecialist(name, taskPrompt, options = {}) {
1177
1242
  await sendKeysAsync(tmuxSession, taskPrompt);
1178
1243
  }
1179
1244
  recordWake(name, sessionId || void 0);
1180
- const { saveAgentRuntimeState } = await import("./agents-BDFHF4T3.js");
1245
+ const { saveAgentRuntimeState } = await import("./agents-VLK4BMVA.js");
1181
1246
  saveAgentRuntimeState(tmuxSession, {
1182
1247
  state: "active",
1183
1248
  lastActivity: (/* @__PURE__ */ new Date()).toISOString(),
@@ -1229,25 +1294,45 @@ Use the send-feedback-to-agent skill to report findings back to the issue agent.
1229
1294
  break;
1230
1295
  case "review-agent": {
1231
1296
  const workspace = task.workspace || "unknown";
1232
- let staleBranch = false;
1297
+ let gitDirs = [];
1233
1298
  if (workspace !== "unknown") {
1299
+ if (existsSync3(join4(workspace, ".git"))) {
1300
+ gitDirs = [workspace];
1301
+ } else {
1302
+ try {
1303
+ const entries = readdirSync2(workspace, { withFileTypes: true });
1304
+ for (const entry of entries) {
1305
+ if (entry.isDirectory() && existsSync3(join4(workspace, entry.name, ".git"))) {
1306
+ gitDirs.push(join4(workspace, entry.name));
1307
+ }
1308
+ }
1309
+ } catch {
1310
+ }
1311
+ }
1312
+ }
1313
+ const gitDir = gitDirs[0] || workspace;
1314
+ let staleBranch = false;
1315
+ if (workspace !== "unknown" && gitDirs.length > 0) {
1234
1316
  try {
1235
- const { stdout: diffOutput } = await execAsync(
1236
- `cd "${workspace}" && git fetch origin main 2>/dev/null; git diff --name-only main...HEAD 2>/dev/null`,
1237
- { encoding: "utf-8", timeout: 15e3 }
1238
- );
1239
- const changedFiles = diffOutput.trim().split("\n").filter((f) => f.length > 0);
1240
- if (changedFiles.length === 0) {
1317
+ let totalChangedFiles = 0;
1318
+ for (const dir of gitDirs) {
1319
+ const { stdout: dirDiff } = await execAsync(
1320
+ `cd "${dir}" && git fetch origin main 2>/dev/null; git diff --name-only main...HEAD 2>/dev/null`,
1321
+ { encoding: "utf-8", timeout: 15e3 }
1322
+ );
1323
+ totalChangedFiles += dirDiff.trim().split("\n").filter((f) => f.length > 0).length;
1324
+ }
1325
+ if (totalChangedFiles === 0) {
1241
1326
  staleBranch = true;
1242
1327
  console.log(`[specialist] review-agent: stale branch detected for ${task.issueId} \u2014 0 files changed vs main`);
1243
- const { setReviewStatus } = await import("./review-status-GWQYY77L.js");
1328
+ const { setReviewStatus } = await import("./review-status-EPFG4XM7.js");
1244
1329
  setReviewStatus(task.issueId.toUpperCase(), {
1245
1330
  reviewStatus: "passed",
1246
1331
  reviewNotes: "No changes to review \u2014 branch identical to main (already merged or stale)"
1247
1332
  });
1248
1333
  console.log(`[specialist] review-agent: auto-passed ${task.issueId} (stale branch)`);
1249
1334
  const tmuxSession = getTmuxSessionName("review-agent");
1250
- const { saveAgentRuntimeState } = await import("./agents-BDFHF4T3.js");
1335
+ const { saveAgentRuntimeState } = await import("./agents-VLK4BMVA.js");
1251
1336
  saveAgentRuntimeState(tmuxSession, {
1252
1337
  state: "idle",
1253
1338
  lastActivity: (/* @__PURE__ */ new Date()).toISOString()
@@ -1258,10 +1343,14 @@ Use the send-feedback-to-agent skill to report findings back to the issue agent.
1258
1343
  console.warn(`[specialist] review-agent: stale branch pre-check failed for ${task.issueId}:`, err);
1259
1344
  }
1260
1345
  }
1346
+ const isPolyrepo = gitDirs.length > 1;
1347
+ const gitDiffCommands = gitDirs.length > 0 ? gitDirs.map((d) => `cd "${d}" && git diff --name-only main...HEAD`).join("\n") : `cd "${workspace}" && git diff --name-only main...HEAD`;
1348
+ const gitDiffFileCmd = gitDirs.length > 0 ? `cd "${gitDir}" && git diff main...HEAD -- <file>` : `cd "${workspace}" && git diff main...HEAD -- <file>`;
1261
1349
  prompt = `New review task for ${task.issueId}:
1262
1350
 
1263
1351
  Branch: ${task.branch || "unknown"}
1264
1352
  Workspace: ${workspace}
1353
+ ${isPolyrepo ? `Polyrepo: git repos in subdirectories: ${gitDirs.map((d) => basename2(d)).join(", ")}` : ""}
1265
1354
  ${task.prUrl ? `PR URL: ${task.prUrl}` : ""}
1266
1355
 
1267
1356
  Your task:
@@ -1276,11 +1365,12 @@ The TEST agent will run tests in the next step.
1276
1365
  ## How to Review Changes
1277
1366
 
1278
1367
  **Step 0 (CRITICAL):** First check if there are ANY changes to review:
1368
+ ${isPolyrepo ? `This is a polyrepo \u2014 run git diff in each repo subdirectory:` : ""}
1279
1369
  \`\`\`bash
1280
- cd ${workspace} && git diff --name-only main...HEAD
1370
+ ${gitDiffCommands}
1281
1371
  \`\`\`
1282
1372
 
1283
- **If the diff is EMPTY (0 files changed):** The branch is stale or already merged into main. In this case:
1373
+ **If the diff is EMPTY (0 files changed across all repos):** The branch is stale or already merged into main. In this case:
1284
1374
  1. Do NOT attempt a full review
1285
1375
  2. Update status as passed immediately:
1286
1376
  \`\`\`bash
@@ -1294,7 +1384,7 @@ pan work tell ${task.issueId} "Review complete: branch has 0 diff from main \u20
1294
1384
 
1295
1385
  **Step 1:** Get the list of changed files:
1296
1386
  \`\`\`bash
1297
- cd ${workspace} && git diff --name-only main...HEAD
1387
+ ${gitDiffCommands}
1298
1388
  \`\`\`
1299
1389
 
1300
1390
  **Step 2:** Read the CURRENT version of each changed file using the Read tool.
@@ -1302,7 +1392,7 @@ Review the actual file contents \u2014 do NOT rely solely on diff output.
1302
1392
 
1303
1393
  **Step 3:** If you need to see what specifically changed, use:
1304
1394
  \`\`\`bash
1305
- cd ${workspace} && git diff main...HEAD -- <file>
1395
+ ${gitDiffFileCmd}
1306
1396
  \`\`\`
1307
1397
 
1308
1398
  ## Avoiding False Positives
@@ -1350,40 +1440,52 @@ Branch: ${task.branch || "unknown"}
1350
1440
  Workspace: ${task.workspace || "unknown"}
1351
1441
 
1352
1442
  Your task:
1353
- 1. Run the full test suite on the feature branch
1354
- 2. Run the same test suite on the main branch (baseline)
1355
- 3. Compare results: identify which failures are NEW vs pre-existing
1356
- 4. Only fail the feature branch for NEW regressions
1443
+ 1. Run the full test suite \u2014 redirect output to file, read only summaries
1444
+ 2. If ALL pass, skip baseline and report PASS
1445
+ 3. If failures, run baseline on main and compare
1446
+ 4. Only fail for NEW regressions (not pre-existing)
1357
1447
  5. Update status via API when done
1358
1448
 
1449
+ ## CRITICAL: Context Management \u2014 Output Redirection
1450
+
1451
+ **NEVER let full test output flow into your context.** Always redirect to file and read only summaries.
1452
+ Raw test output from large suites (1000+ tests) WILL fill your context and cause compaction, losing your task.
1453
+
1359
1454
  ## CRITICAL: Bash Timeout for Test Commands
1360
1455
 
1361
1456
  **ALWAYS use timeout: 300000 (5 minutes) when running test commands.**
1362
- Test suites commonly take 2-5 minutes. The default bash timeout is only 2 minutes and WILL cause premature failures.
1363
- Do NOT run test commands in background mode \u2014 run them directly with a 5-minute timeout.
1364
1457
 
1365
- Example:
1458
+ ## Step 1: Run Feature Branch Tests
1459
+
1366
1460
  \`\`\`bash
1367
- cd ${task.workspace || "unknown"} && npm test 2>&1 | tail -30
1461
+ cd ${task.workspace || "unknown"} && npm test 2>&1 > /tmp/test-feature.txt; echo "EXIT_CODE: $?"
1368
1462
  # Use timeout: 300000 for this command
1463
+ tail -20 /tmp/test-feature.txt
1369
1464
  \`\`\`
1370
1465
 
1371
- ## CRITICAL: Baseline Comparison
1466
+ ## Step 2: Check Results
1467
+
1468
+ - If ALL tests pass (exit code 0) \u2192 skip baseline, go to "Update Status"
1469
+ - If failures exist \u2192 continue to Step 3
1372
1470
 
1373
- **You MUST compare test results against the main branch baseline.**
1471
+ ## Step 3: Baseline Comparison (ONLY if failures found)
1374
1472
 
1375
- Pre-existing failures that also occur on main branch should NOT block the feature branch.
1473
+ \`\`\`bash
1474
+ cd ${task.context?.workspace ? task.context.workspace.replace(/workspaces\/feature-[^/]+/, "") : "unknown"} && npm test 2>&1 > /tmp/test-main.txt; echo "EXIT_CODE: $?"
1475
+ # Use timeout: 300000 for this command
1476
+ tail -20 /tmp/test-main.txt
1477
+ \`\`\`
1376
1478
 
1377
- Steps:
1378
- 1. Run \`npm test\` (or detected command) on the feature branch - record results (timeout: 300000)
1379
- 2. Run tests on main branch baseline (timeout: 300000): \`cd ${task.context?.workspace ? task.context.workspace.replace(/workspaces\/feature-[^/]+/, "") : "unknown"} && npm test 2>&1 | tail -30\`
1380
- 3. Compare: any test that fails on BOTH branches is pre-existing
1381
- 4. Only NEW failures (pass on main, fail on feature) should block
1479
+ Then compare failures (targeted, NOT full output):
1480
+ \`\`\`bash
1481
+ grep -E "FAIL|\u2717|Error|failed" /tmp/test-feature.txt | head -30
1482
+ grep -E "FAIL|\u2717|Error|failed" /tmp/test-main.txt | head -30
1483
+ \`\`\`
1382
1484
 
1383
- **Pass criteria:** The feature branch introduces ZERO new test failures compared to main.
1384
- **Fail criteria:** The feature branch introduces one or more NEW test failures not present on main.
1485
+ Tests that fail on BOTH = pre-existing (don't block). Tests that fail ONLY on feature = NEW regression (block).
1385
1486
 
1386
- Report pre-existing failures as informational notes, but do NOT block the feature for them.
1487
+ **Pass criteria:** Feature branch introduces ZERO new test failures vs main.
1488
+ **Fail criteria:** Feature branch introduces NEW failures not present on main.
1387
1489
 
1388
1490
  ## REQUIRED: Update Status via API
1389
1491
 
@@ -1391,19 +1493,19 @@ You MUST execute the appropriate curl command and verify it succeeds. Do NOT jus
1391
1493
 
1392
1494
  If NO new regressions (tests PASS):
1393
1495
  \`\`\`bash
1394
- # EXECUTE THIS - verify you see JSON response with testStatus:"passed"
1395
1496
  curl -s -X POST ${apiUrl}/api/workspaces/${task.issueId}/review-status -H "Content-Type: application/json" -d '{"testStatus":"passed","testNotes":"[summary including pre-existing failures if any]"}' | jq .
1396
1497
  \`\`\`
1397
1498
 
1398
1499
  If NEW regressions found (tests FAIL):
1399
1500
  \`\`\`bash
1400
- # EXECUTE THIS - verify you see JSON response with testStatus:"failed"
1401
1501
  curl -s -X POST ${apiUrl}/api/workspaces/${task.issueId}/review-status -H "Content-Type: application/json" -d '{"testStatus":"failed","testNotes":"[describe NEW failures only]"}' | jq .
1402
1502
  \`\`\`
1403
1503
  Then use send-feedback-to-agent skill to notify issue agent of NEW failures only.
1404
1504
 
1405
1505
  \u26A0\uFE0F VERIFICATION: After running curl, confirm you see valid JSON output with the updated status. If you get an error or empty response, the update FAILED - report this.
1406
1506
 
1507
+ **NEVER run test commands without redirecting to a file.** This is not optional.
1508
+
1407
1509
  IMPORTANT: Do NOT hand off to merge-agent. Human clicks Merge button when ready.`;
1408
1510
  break;
1409
1511
  default:
@@ -1414,7 +1516,7 @@ IMPORTANT: Do NOT hand off to merge-agent. Human clicks Merge button when ready.
1414
1516
  async function wakeSpecialistOrQueue(name, task, options = {}) {
1415
1517
  const { priority = "normal", source = "handoff" } = options;
1416
1518
  const running = await isRunning(name);
1417
- const { getAgentRuntimeState } = await import("./agents-BDFHF4T3.js");
1519
+ const { getAgentRuntimeState } = await import("./agents-VLK4BMVA.js");
1418
1520
  const tmuxSession = getTmuxSessionName(name);
1419
1521
  const runtimeState = getAgentRuntimeState(tmuxSession);
1420
1522
  const idle = runtimeState?.state === "idle" || runtimeState?.state === "suspended";
@@ -1445,7 +1547,7 @@ async function wakeSpecialistOrQueue(name, task, options = {}) {
1445
1547
  };
1446
1548
  }
1447
1549
  }
1448
- const { saveAgentRuntimeState } = await import("./agents-BDFHF4T3.js");
1550
+ const { saveAgentRuntimeState } = await import("./agents-VLK4BMVA.js");
1449
1551
  saveAgentRuntimeState(tmuxSession, {
1450
1552
  state: "active",
1451
1553
  lastActivity: (/* @__PURE__ */ new Date()).toISOString(),
@@ -1499,6 +1601,7 @@ function submitToSpecialistQueue(specialistName, task) {
1499
1601
  }
1500
1602
  };
1501
1603
  const queueItem = pushToHook(specialistName, item);
1604
+ notifyPipeline({ type: "task_queued", specialist: specialistName, issueId: task.issueId });
1502
1605
  const handoffEvent = createSpecialistHandoff(
1503
1606
  task.source,
1504
1607
  // From (e.g., 'review-agent' or 'issue-agent')
@@ -1544,7 +1647,7 @@ async function sendFeedbackToAgent(feedback) {
1544
1647
  }
1545
1648
  const agentSession = `agent-${toIssueId.toLowerCase()}`;
1546
1649
  const feedbackMessage = formatFeedbackForAgent(fullFeedback);
1547
- const { writeFeedbackFile } = await import("./feedback-writer-AAKF5BTK.js");
1650
+ const { writeFeedbackFile } = await import("./feedback-writer-LVZ5TFYZ.js");
1548
1651
  const specialistMap = {
1549
1652
  "review-agent": "review-agent",
1550
1653
  "test-agent": "test-agent",
@@ -1564,7 +1667,7 @@ async function sendFeedbackToAgent(feedback) {
1564
1667
  return false;
1565
1668
  }
1566
1669
  try {
1567
- const { messageAgent } = await import("./agents-BDFHF4T3.js");
1670
+ const { messageAgent } = await import("./agents-VLK4BMVA.js");
1568
1671
  const msg = `SPECIALIST FEEDBACK: ${fromSpecialist} reported ${feedback.feedbackType.toUpperCase()} for ${toIssueId}.
1569
1672
  Read and address: ${fileResult.relativePath}`;
1570
1673
  await messageAgent(agentSession, msg);
@@ -1670,8 +1773,11 @@ var init_specialists = __esm({
1670
1773
  init_paths();
1671
1774
  init_jsonl_parser();
1672
1775
  init_specialist_handoff_logger();
1776
+ init_settings();
1673
1777
  init_work_type_router();
1778
+ init_providers();
1674
1779
  init_tmux();
1780
+ init_pipeline_notifier();
1675
1781
  init_hooks();
1676
1782
  execAsync = promisify(exec);
1677
1783
  SPECIALISTS_DIR = join4(PANOPTICON_HOME, "specialists");
@@ -1887,18 +1993,18 @@ function getRecentRunLogs(projectKey, specialistType, count) {
1887
1993
  }
1888
1994
  function cleanupOldLogs(projectKey, specialistType, retention) {
1889
1995
  const { maxDays, maxRuns } = retention;
1996
+ const now = /* @__PURE__ */ new Date();
1997
+ const cutoffDate = new Date(now.getTime() - maxDays * 24 * 60 * 60 * 1e3);
1890
1998
  const allLogs = listRunLogs(projectKey, specialistType);
1891
1999
  if (allLogs.length === 0) {
1892
2000
  return 0;
1893
2001
  }
1894
- const now = /* @__PURE__ */ new Date();
1895
- const cutoffDate = new Date(now.getTime() - maxDays * 24 * 60 * 60 * 1e3);
1896
2002
  let deletedCount = 0;
1897
2003
  allLogs.forEach((log, index) => {
1898
2004
  if (index < maxRuns) {
1899
2005
  return;
1900
2006
  }
1901
- if (log.createdAt >= cutoffDate) {
2007
+ if (maxDays > 0 && log.createdAt >= cutoffDate) {
1902
2008
  return;
1903
2009
  }
1904
2010
  try {
@@ -2055,4 +2161,4 @@ export {
2055
2161
  getFeedbackStats,
2056
2162
  init_specialists
2057
2163
  };
2058
- //# sourceMappingURL=chunk-2NIAOCIC.js.map
2164
+ //# sourceMappingURL=chunk-ASY7T35E.js.map