@automagik/genie 4.260420.17 → 4.260421.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/genie.js CHANGED
@@ -1820,7 +1820,7 @@ ${body}`;writeFileSync12(filePath,output,"utf-8")}function serializeSdkConfig(sd
1820
1820
  `);if(nlPos===-1)return{newOffset:fileSize,contentRowsInserted:0,toolEventsInserted:0};return{newOffset:effectiveOffset+Buffer.byteLength(skipStr.slice(0,nlPos+1),"utf-8"),contentRowsInserted:0,toolEventsInserted:0}}safeEnd=lastNewline+1}let safeData=raw.slice(0,safeEnd),newOffset=effectiveOffset+Buffer.byteLength(safeData,"utf-8"),{contentRows,toolEvents,turnCount}=parseJsonlChunk(safeData,sessionId,session.totalTurns,{agentId:session.agentId,team:session.team,wishSlug:session.wishSlug,taskId:session.taskId});return await sql.begin(async(tx)=>{await batchInsertContent(tx,contentRows),await batchInsertToolEvents(tx,toolEvents),await tx`
1821
1821
  UPDATE sessions SET last_ingested_offset = ${newOffset}, total_turns = ${session.totalTurns+turnCount}, updated_at = now()
1822
1822
  WHERE id = ${sessionId}
1823
- `}),{newOffset,contentRowsInserted:contentRows.length,toolEventsInserted:toolEvents.length}}finally{await fh.close()}}async function ingestFileFull(sql,sessionId,jsonlPath,projectPath,fromOffset,opts){return ingestFile(sql,sessionId,jsonlPath,projectPath,fromOffset,{...opts,chunkSize:Number.MAX_SAFE_INTEGER})}var liveWorkPending=!1,workerMapCache=null,WORKER_MAP_TTL_MS=300000,DEFAULT_CHUNK_SIZE=65536;var init_session_capture=()=>{};var exports_session_filewatch={};__export(exports_session_filewatch,{stopFilewatch:()=>stopFilewatch,startFilewatch:()=>startFilewatch,resetUnrecoverableSessions:()=>resetUnrecoverableSessions,isForeignKeyViolation:()=>isForeignKeyViolation,handleFileChange:()=>handleFileChange});import{watch}from"fs";import{homedir as homedir25}from"os";import{basename as basename7,join as join39}from"path";function resetUnrecoverableSessions(){unrecoverableSessions.clear(),offsetCache.clear()}function isForeignKeyViolation(err){if(!err||typeof err!=="object")return!1;let code=err.code;if(typeof code==="string"&&code==="23503")return!0;return(err instanceof Error?err.message:String(err)).toLowerCase().includes("foreign key constraint")}async function loadOffsets(sql){try{let rows=await sql`SELECT id, last_ingested_offset FROM sessions WHERE last_ingested_offset > 0`;for(let row of rows)offsetCache.set(row.id,row.last_ingested_offset)}catch{}}function extractSessionInfo(filePath){if(!filePath.endsWith(".jsonl"))return null;let sessionId=basename7(filePath,".jsonl"),parts=filePath.split("/"),sessionsIdx=parts.lastIndexOf("sessions"),subagentsIdx=parts.lastIndexOf("subagents");if(subagentsIdx>0&&parts[subagentsIdx-1]){let parentSessionId=parts[subagentsIdx-1],projectIdx=parts.indexOf("projects"),projectPath=projectIdx>=0?parts.slice(0,projectIdx+2).join("/"):"";return{sessionId,projectPath,parentSessionId,isSubagent:!0}}if(sessionsIdx>0){let projectPath=parts.slice(0,sessionsIdx).join("/");return{sessionId,projectPath,parentSessionId:null,isSubagent:!1}}return null}async function handleFileChange(filePath,sql,deps=defaultDeps3){let info=extractSessionInfo(filePath);if(!info)return;if(unrecoverableSessions.has(info.sessionId))return;let storedOffset=offsetCache.get(info.sessionId)??0;try{deps.setLiveWorkPending(!0);let workerMap=await deps.buildWorkerMap(sql),result2=await deps.ingestFileFull(sql,info.sessionId,filePath,info.projectPath,storedOffset,{parentSessionId:info.parentSessionId,isSubagent:info.isSubagent,workerMap});offsetCache.set(info.sessionId,result2.newOffset)}catch(err){let message=err instanceof Error?err.message:String(err);if(isForeignKeyViolation(err))unrecoverableSessions.add(info.sessionId),offsetCache.set(info.sessionId,Number.POSITIVE_INFINITY),deps.logError(`[filewatch] skipping ${filePath} \u2014 FK constraint violation (orphan session, parent not registered): ${message}`);else deps.logError(`[filewatch] error ingesting ${filePath} at offset ${storedOffset}: ${message}`)}finally{deps.setLiveWorkPending(!1)}}async function startFilewatch(sql){if(watcher)return!0;let claudeDir=join39(process.env.CLAUDE_CONFIG_DIR??join39(homedir25(),".claude"),"projects");await loadOffsets(sql);try{return watcher=watch(claudeDir,{recursive:!0},(_eventType,filename)=>{if(!filename||!filename.endsWith(".jsonl"))return;let fullPath=join39(claudeDir,filename),existing=debounceTimers.get(fullPath);if(existing)clearTimeout(existing);debounceTimers.set(fullPath,setTimeout(()=>{debounceTimers.delete(fullPath),handleFileChange(fullPath,sql).catch((err)=>{let message=err instanceof Error?err.message:String(err);console.error(`[filewatch] unhandled error for ${fullPath}: ${message}`)})},DEBOUNCE_MS))}),watcher.on("error",(err)=>{console.error("[filewatch] watcher error:",err.message)}),console.log(`[filewatch] watching ${claudeDir} (${offsetCache.size} sessions cached)`),!0}catch(err){let message=err instanceof Error?err.message:String(err);return console.error(`[filewatch] failed to start: ${message}`),!1}}function stopFilewatch(){if(watcher)watcher.close(),watcher=null;for(let timer2 of debounceTimers.values())clearTimeout(timer2);debounceTimers.clear()}var watcher=null,offsetCache,debounceTimers,DEBOUNCE_MS=500,unrecoverableSessions,defaultDeps3;var init_session_filewatch=__esm(()=>{init_session_capture();offsetCache=new Map,debounceTimers=new Map,unrecoverableSessions=new Set;defaultDeps3={buildWorkerMap,ingestFileFull,setLiveWorkPending,logError:(msg)=>console.error(msg)}});var exports_session_backfill={};__export(exports_session_backfill,{stopBackfill:()=>stopBackfill,startBackfill:()=>startBackfill,getBackfillStatus:()=>getBackfillStatus});function sleep2(ms){return new Promise((resolve5)=>setTimeout(resolve5,ms))}async function updateSyncState(sql,progress){await sql`
1823
+ `}),{newOffset,contentRowsInserted:contentRows.length,toolEventsInserted:toolEvents.length}}finally{await fh.close()}}async function ingestFileFull(sql,sessionId,jsonlPath,projectPath,fromOffset,opts){return ingestFile(sql,sessionId,jsonlPath,projectPath,fromOffset,{...opts,chunkSize:Number.MAX_SAFE_INTEGER})}var liveWorkPending=!1,workerMapCache=null,WORKER_MAP_TTL_MS=300000,DEFAULT_CHUNK_SIZE=65536;var init_session_capture=()=>{};var exports_session_filewatch={};__export(exports_session_filewatch,{stopFilewatch:()=>stopFilewatch,startFilewatch:()=>startFilewatch,resetUnrecoverableSessions:()=>resetUnrecoverableSessions,isForeignKeyViolation:()=>isForeignKeyViolation,handleFileChange:()=>handleFileChange});import{watch}from"fs";import{homedir as homedir25}from"os";import{basename as basename7,join as join39}from"path";function resetUnrecoverableSessions(){unrecoverableSessions.clear(),offsetCache.clear()}function isForeignKeyViolation(err){if(!err||typeof err!=="object")return!1;let code=err.code;if(typeof code==="string"&&code==="23503")return!0;return(err instanceof Error?err.message:String(err)).toLowerCase().includes("foreign key constraint")}async function loadOffsets(sql){try{let rows=await sql`SELECT id, last_ingested_offset FROM sessions WHERE last_ingested_offset > 0`;for(let row of rows)offsetCache.set(row.id,row.last_ingested_offset)}catch{}}function extractSessionInfo(filePath){if(!filePath.endsWith(".jsonl"))return null;let sessionId=basename7(filePath,".jsonl"),parts=filePath.split("/"),sessionsIdx=parts.lastIndexOf("sessions"),subagentsIdx=parts.lastIndexOf("subagents");if(subagentsIdx>0&&parts[subagentsIdx-1]){let parentSessionId=parts[subagentsIdx-1],projectIdx=parts.indexOf("projects"),projectPath=projectIdx>=0?parts.slice(0,projectIdx+2).join("/"):"";return{sessionId,projectPath,parentSessionId,isSubagent:!0}}if(sessionsIdx>0){let projectPath=parts.slice(0,sessionsIdx).join("/");return{sessionId,projectPath,parentSessionId:null,isSubagent:!1}}return null}async function handleFileChange(filePath,sql,deps=defaultDeps3){let info=extractSessionInfo(filePath);if(!info)return;if(unrecoverableSessions.has(info.sessionId))return;let storedOffset=offsetCache.get(info.sessionId)??0;try{deps.setLiveWorkPending(!0);let workerMap=await deps.buildWorkerMap(sql),result2=await deps.ingestFileFull(sql,info.sessionId,filePath,info.projectPath,storedOffset,{parentSessionId:info.parentSessionId,isSubagent:info.isSubagent,workerMap});offsetCache.set(info.sessionId,result2.newOffset)}catch(err){let message=err instanceof Error?err.message:String(err);if(isForeignKeyViolation(err))unrecoverableSessions.add(info.sessionId),offsetCache.set(info.sessionId,Number.POSITIVE_INFINITY),deps.logError(`[filewatch] skipping ${filePath} \u2014 FK constraint violation (orphan session, parent not registered): ${message}`);else deps.logError(`[filewatch] error ingesting ${filePath} at offset ${storedOffset}: ${message}`)}finally{deps.setLiveWorkPending(!1)}}async function startFilewatch(sql){if(watcher)return!0;let claudeDir=join39(process.env.CLAUDE_CONFIG_DIR??join39(homedir25(),".claude"),"projects");await loadOffsets(sql);try{return watcher=watch(claudeDir,{recursive:!0},(_eventType,filename)=>{if(!filename||!filename.endsWith(".jsonl"))return;let fullPath=join39(claudeDir,filename),existing=debounceTimers.get(fullPath);if(existing)clearTimeout(existing);debounceTimers.set(fullPath,setTimeout(()=>{debounceTimers.delete(fullPath),handleFileChange(fullPath,sql).catch((err)=>{let message=err instanceof Error?err.message:String(err);console.error(`[filewatch] unhandled error for ${fullPath}: ${message}`)})},DEBOUNCE_MS))}),watcher.on("error",(err)=>{console.error("[filewatch] watcher error:",err.message)}),console.log(`[filewatch] watching ${claudeDir} (${offsetCache.size} sessions cached)`),!0}catch(err){let message=err instanceof Error?err.message:String(err);return console.error(`[filewatch] failed to start: ${message}`),!1}}function stopFilewatch(){if(watcher)watcher.close(),watcher=null;for(let timer2 of debounceTimers.values())clearTimeout(timer2);debounceTimers.clear()}var watcher=null,offsetCache,debounceTimers,DEBOUNCE_MS=500,unrecoverableSessions,defaultDeps3;var init_session_filewatch=__esm(()=>{init_session_capture();offsetCache=new Map,debounceTimers=new Map,unrecoverableSessions=new Set;defaultDeps3={buildWorkerMap,ingestFileFull,setLiveWorkPending,logError:(msg)=>console.error(msg)}});var exports_session_backfill={};__export(exports_session_backfill,{stopBackfill:()=>stopBackfill,startBackfill:()=>startBackfill,getBackfillStatus:()=>getBackfillStatus,compareBackfillFiles:()=>compareBackfillFiles});function sleep2(ms){return new Promise((resolve5)=>setTimeout(resolve5,ms))}function compareBackfillFiles(a,b2){if(a.isSubagent!==b2.isSubagent)return a.isSubagent?1:-1;return b2.mtime-a.mtime}async function updateSyncState(sql,progress){await sql`
1824
1824
  INSERT INTO session_sync (id, status, total_files, processed_files, total_bytes, processed_bytes, errors, updated_at)
1825
1825
  VALUES ('backfill', ${progress.status}, ${progress.totalFiles}, ${progress.processedFiles}, ${progress.totalBytes}, ${progress.processedBytes}, ${progress.errors}, now())
1826
1826
  ON CONFLICT (id) DO UPDATE SET
@@ -1831,7 +1831,7 @@ ${body}`;writeFileSync12(filePath,output,"utf-8")}function serializeSdkConfig(sd
1831
1831
  processed_bytes = ${progress.processedBytes},
1832
1832
  errors = ${progress.errors},
1833
1833
  updated_at = now()
1834
- `}async function shouldSkipBackfill(sql){try{let existing=await sql`SELECT status FROM session_sync WHERE id = 'backfill'`;if(existing.length>0&&existing[0].status==="complete")return!0}catch{}try{let[{count}]=await sql`SELECT count(*)::int as count FROM sessions`;if(count>0){let existing=await sql`SELECT status FROM session_sync WHERE id = 'backfill'`;if(existing.length===0||existing[0].status==="complete")return!0}}catch{return!0}return!1}async function yieldToLiveWork(){while(liveWorkPending)await sleep2(LIVE_YIELD_POLL_MS)}async function getFileStartOffset(sql,file){let existing=await sql`SELECT last_ingested_offset FROM sessions WHERE id = ${file.sessionId}`;if(existing.length>0)return existing[0].last_ingested_offset??0;return 0}async function processBackfillFile(sql,file,progress,workerMap){let offset=await getFileStartOffset(sql,file);if(offset>=file.fileSize){progress.processedFiles++,progress.processedBytes+=file.fileSize;return}let currentOffset=offset;while(currentOffset<file.fileSize){await yieldToLiveWork();let result2=await ingestFile(sql,file.sessionId,file.jsonlPath,file.projectPath,currentOffset,{chunkSize:CHUNK_SIZE,parentSessionId:file.parentSessionId,isSubagent:file.isSubagent,fileSize:file.fileSize,mtime:file.mtime,workerMap});if(result2.newOffset<=currentOffset)break;progress.processedBytes+=result2.newOffset-currentOffset,currentOffset=result2.newOffset}progress.processedFiles++}async function processAllFiles(sql,allFiles,progress,workerMap){for(let file of allFiles){if(!running)break;await yieldToLiveWork();try{await processBackfillFile(sql,file,progress,workerMap)}catch(err){progress.errors++;let message=err instanceof Error?err.message:String(err);console.error(`[backfill] error on ${file.jsonlPath}: ${message}`)}if(progress.processedFiles%50===0)await updateSyncState(sql,progress);await sleep2(SLEEP_BETWEEN_FILES_MS)}}function resolveBackfillStatus(progress){if(!running)progress.status="paused",console.log(`[backfill] paused: ${progress.processedFiles}/${progress.totalFiles} files (will resume on next daemon start)`);else if(progress.errors>0&&progress.errors>=progress.totalFiles)progress.status="failed",console.error(`[backfill] failed: ${progress.errors}/${progress.totalFiles} files errored \u2014 will retry on next daemon start`);else progress.status="complete",console.log(`[backfill] complete: ${progress.processedFiles}/${progress.totalFiles} files, ${progress.errors} errors`)}async function startBackfill(sql){if(running)return;if(await shouldSkipBackfill(sql))return;running=!0,console.log("[backfill] starting session backfill...");try{let allFiles=await discoverAllJsonlFiles();allFiles.sort((a,b2)=>b2.mtime-a.mtime);let totalBytes=allFiles.reduce((sum,f)=>sum+f.fileSize,0),progress={totalFiles:allFiles.length,processedFiles:0,totalBytes,processedBytes:0,errors:0,status:"running"};await updateSyncState(sql,progress),console.log(`[backfill] discovered ${allFiles.length} files (${(totalBytes/1024/1024).toFixed(1)} MB)`);let workerMap=await buildWorkerMap(sql);await processAllFiles(sql,allFiles,progress,workerMap),resolveBackfillStatus(progress),await updateSyncState(sql,progress)}catch(err){let message=err instanceof Error?err.message:String(err);console.error(`[backfill] fatal error: ${message}`)}finally{running=!1}}function stopBackfill(){running=!1}async function getBackfillStatus(sql){try{let rows=await sql`SELECT * FROM session_sync WHERE id = 'backfill'`;if(rows.length===0)return null;let row=rows[0];return{totalFiles:row.total_files,processedFiles:row.processed_files,totalBytes:row.total_bytes,processedBytes:row.processed_bytes,errors:row.errors,status:row.status}}catch{return null}}var CHUNK_SIZE=65536,SLEEP_BETWEEN_FILES_MS=100,LIVE_YIELD_POLL_MS=200,running=!1;var init_session_backfill=__esm(()=>{init_session_capture()});var exports_scheduler_daemon={};__export(exports_scheduler_daemon,{terminalizeCleanExitUnverified:()=>terminalizeCleanExitUnverified,startDaemon:()=>startDaemon,runAgentRecoveryPass:()=>runAgentRecoveryPass,recoverOnStartup:()=>recoverOnStartup,reconcileUnresumable:()=>reconcileUnresumable,reconcileOrphans:()=>reconcileOrphans,reconcileOrphanedRuns:()=>reconcileOrphanedRuns,reclaimExpiredLeases:()=>reclaimExpiredLeases,processMailboxRetryMessage:()=>processMailboxRetryMessage,logToFile:()=>logToFile,logReconcilerMode:()=>logReconcilerMode,isTurnAwareReconcilerEnabled:()=>isTurnAwareReconcilerEnabled,fireTrigger:()=>fireTrigger,emitWorkerEvents:()=>emitWorkerEvents,collectMachineSnapshot:()=>collectMachineSnapshot,collectHeartbeats:()=>collectHeartbeats,claimDueTriggers:()=>claimDueTriggers,attemptAgentResume:()=>attemptAgentResume,_resetWorkerStatesForTesting:()=>_resetWorkerStatesForTesting,TURN_AWARE_RECONCILER_FLAG:()=>TURN_AWARE_RECONCILER_FLAG,MAX_DELIVERY_ATTEMPTS:()=>MAX_DELIVERY_ATTEMPTS,ESCALATION_RECIPIENT:()=>ESCALATION_RECIPIENT});import{randomUUID as randomUUID9}from"crypto";import{appendFileSync as appendFileSync3,mkdirSync as mkdirSync12}from"fs";import{homedir as homedir26}from"os";import{join as join40}from"path";function isTurnAwareReconcilerEnabled(env=process.env){let raw=env[TURN_AWARE_RECONCILER_FLAG];if(raw===void 0)return!0;let v=raw.trim().toLowerCase();if(v==="")return!0;if(v==="0"||v==="false"||v==="no")return!1;if(v==="1"||v==="true"||v==="yes")return!0;return!0}function logReconcilerMode(deps,daemonId){let enabled=isTurnAwareReconcilerEnabled();deps.log({timestamp:deps.now().toISOString(),level:"info",event:enabled?"reconciler_mode_turn_aware":"reconciler_mode_legacy",daemon_id:daemonId,flag:TURN_AWARE_RECONCILER_FLAG,enabled,message:enabled?"turn-aware reconciler enabled":"flag off, using legacy reconciler"})}function getLogDir2(){return join40(process.env.GENIE_HOME??join40(homedir26(),".genie"),"logs")}function getLogFile(){return join40(getLogDir2(),"scheduler.log")}function logToFile(entry2){let logDir=getLogDir2();mkdirSync12(logDir,{recursive:!0});let enriched=entry2.trace_id?entry2:withAmbientTraceId(entry2);appendFileSync3(getLogFile(),`${JSON.stringify(enriched)}
1834
+ `}async function shouldSkipBackfill(sql){try{let existing=await sql`SELECT status FROM session_sync WHERE id = 'backfill'`;if(existing.length>0&&existing[0].status==="complete")return!0}catch{}try{let[{count}]=await sql`SELECT count(*)::int as count FROM sessions`;if(count>0){let existing=await sql`SELECT status FROM session_sync WHERE id = 'backfill'`;if(existing.length===0||existing[0].status==="complete")return!0}}catch{return!0}return!1}async function yieldToLiveWork(){while(liveWorkPending)await sleep2(LIVE_YIELD_POLL_MS)}async function getFileStartOffset(sql,file){let existing=await sql`SELECT last_ingested_offset FROM sessions WHERE id = ${file.sessionId}`;if(existing.length>0)return existing[0].last_ingested_offset??0;return 0}async function processBackfillFile(sql,file,progress,workerMap){let offset=await getFileStartOffset(sql,file);if(offset>=file.fileSize){progress.processedFiles++,progress.processedBytes+=file.fileSize;return}let currentOffset=offset;while(currentOffset<file.fileSize){await yieldToLiveWork();let result2=await ingestFile(sql,file.sessionId,file.jsonlPath,file.projectPath,currentOffset,{chunkSize:CHUNK_SIZE,parentSessionId:file.parentSessionId,isSubagent:file.isSubagent,fileSize:file.fileSize,mtime:file.mtime,workerMap});if(result2.newOffset<=currentOffset)break;progress.processedBytes+=result2.newOffset-currentOffset,currentOffset=result2.newOffset}progress.processedFiles++}async function processAllFiles(sql,allFiles,progress,workerMap){for(let file of allFiles){if(!running)break;await yieldToLiveWork();try{await processBackfillFile(sql,file,progress,workerMap)}catch(err){progress.errors++;let message=err instanceof Error?err.message:String(err);console.error(`[backfill] error on ${file.jsonlPath}: ${message}`)}if(progress.processedFiles%50===0)await updateSyncState(sql,progress);await sleep2(SLEEP_BETWEEN_FILES_MS)}}function resolveBackfillStatus(progress){if(!running)progress.status="paused",console.log(`[backfill] paused: ${progress.processedFiles}/${progress.totalFiles} files (will resume on next daemon start)`);else if(progress.errors>0&&progress.errors>=progress.totalFiles)progress.status="failed",console.error(`[backfill] failed: ${progress.errors}/${progress.totalFiles} files errored \u2014 will retry on next daemon start`);else progress.status="complete",console.log(`[backfill] complete: ${progress.processedFiles}/${progress.totalFiles} files, ${progress.errors} errors`)}async function startBackfill(sql){if(running)return;if(await shouldSkipBackfill(sql))return;running=!0,console.log("[backfill] starting session backfill...");try{let allFiles=await discoverAllJsonlFiles();allFiles.sort(compareBackfillFiles);let totalBytes=allFiles.reduce((sum,f)=>sum+f.fileSize,0),progress={totalFiles:allFiles.length,processedFiles:0,totalBytes,processedBytes:0,errors:0,status:"running"};await updateSyncState(sql,progress),console.log(`[backfill] discovered ${allFiles.length} files (${(totalBytes/1024/1024).toFixed(1)} MB)`);let workerMap=await buildWorkerMap(sql);await processAllFiles(sql,allFiles,progress,workerMap),resolveBackfillStatus(progress),await updateSyncState(sql,progress)}catch(err){let message=err instanceof Error?err.message:String(err);console.error(`[backfill] fatal error: ${message}`)}finally{running=!1}}function stopBackfill(){running=!1}async function getBackfillStatus(sql){try{let rows=await sql`SELECT * FROM session_sync WHERE id = 'backfill'`;if(rows.length===0)return null;let row=rows[0];return{totalFiles:row.total_files,processedFiles:row.processed_files,totalBytes:row.total_bytes,processedBytes:row.processed_bytes,errors:row.errors,status:row.status}}catch{return null}}var CHUNK_SIZE=65536,SLEEP_BETWEEN_FILES_MS=100,LIVE_YIELD_POLL_MS=200,running=!1;var init_session_backfill=__esm(()=>{init_session_capture()});var exports_scheduler_daemon={};__export(exports_scheduler_daemon,{terminalizeCleanExitUnverified:()=>terminalizeCleanExitUnverified,startDaemon:()=>startDaemon,runAgentRecoveryPass:()=>runAgentRecoveryPass,recoverOnStartup:()=>recoverOnStartup,reconcileUnresumable:()=>reconcileUnresumable,reconcileOrphans:()=>reconcileOrphans,reconcileOrphanedRuns:()=>reconcileOrphanedRuns,reclaimExpiredLeases:()=>reclaimExpiredLeases,processMailboxRetryMessage:()=>processMailboxRetryMessage,logToFile:()=>logToFile,logReconcilerMode:()=>logReconcilerMode,isTurnAwareReconcilerEnabled:()=>isTurnAwareReconcilerEnabled,fireTrigger:()=>fireTrigger,emitWorkerEvents:()=>emitWorkerEvents,collectMachineSnapshot:()=>collectMachineSnapshot,collectHeartbeats:()=>collectHeartbeats,claimDueTriggers:()=>claimDueTriggers,attemptAgentResume:()=>attemptAgentResume,_resetWorkerStatesForTesting:()=>_resetWorkerStatesForTesting,TURN_AWARE_RECONCILER_FLAG:()=>TURN_AWARE_RECONCILER_FLAG,MAX_DELIVERY_ATTEMPTS:()=>MAX_DELIVERY_ATTEMPTS,ESCALATION_RECIPIENT:()=>ESCALATION_RECIPIENT});import{randomUUID as randomUUID9}from"crypto";import{appendFileSync as appendFileSync3,mkdirSync as mkdirSync12}from"fs";import{homedir as homedir26}from"os";import{join as join40}from"path";function isTurnAwareReconcilerEnabled(env=process.env){let raw=env[TURN_AWARE_RECONCILER_FLAG];if(raw===void 0)return!0;let v=raw.trim().toLowerCase();if(v==="")return!0;if(v==="0"||v==="false"||v==="no")return!1;if(v==="1"||v==="true"||v==="yes")return!0;return!0}function logReconcilerMode(deps,daemonId){let enabled=isTurnAwareReconcilerEnabled();deps.log({timestamp:deps.now().toISOString(),level:"info",event:enabled?"reconciler_mode_turn_aware":"reconciler_mode_legacy",daemon_id:daemonId,flag:TURN_AWARE_RECONCILER_FLAG,enabled,message:enabled?"turn-aware reconciler enabled":"flag off, using legacy reconciler"})}function getLogDir2(){return join40(process.env.GENIE_HOME??join40(homedir26(),".genie"),"logs")}function getLogFile(){return join40(getLogDir2(),"scheduler.log")}function logToFile(entry2){let logDir=getLogDir2();mkdirSync12(logDir,{recursive:!0});let enriched=entry2.trace_id?entry2:withAmbientTraceId(entry2);appendFileSync3(getLogFile(),`${JSON.stringify(enriched)}
1835
1835
  `)}function withAmbientTraceId(entry2){let ctx=getAmbient();if(!ctx)return entry2;return{...entry2,trace_id:ctx.trace_id}}async function defaultSpawnCommand(command,env){return{pid:Bun.spawn(["sh","-c",command],{env:{...process.env,...env},stdio:["ignore","ignore","ignore"]}).pid}}function defaultJitter(maxMs){return Math.floor(Math.random()*maxMs)}function defaultSleep(ms){return new Promise((resolve5)=>setTimeout(resolve5,ms))}async function defaultIsPaneAlive(paneId){let{isPaneAlive:isPaneAlive2}=await Promise.resolve().then(() => (init_tmux(),exports_tmux));return isPaneAlive2(paneId)}async function defaultListWorkers(){let{list:list2}=await Promise.resolve().then(() => (init_agent_registry(),exports_agent_registry));return(await list2()).map((a)=>({id:a.id,paneId:a.paneId,repoPath:a.repoPath,state:a.state,team:a.team,wishSlug:a.wishSlug,groupNumber:a.groupNumber,autoResume:a.autoResume,resumeAttempts:a.resumeAttempts,maxResumeAttempts:a.maxResumeAttempts,lastResumeAttempt:a.lastResumeAttempt,claudeSessionId:a.claudeSessionId}))}async function defaultPublishEvent(subject,data,repoPath){let payload=data,{publishSubjectEvent:publishSubjectEvent2}=await Promise.resolve().then(() => (init_runtime_events(),exports_runtime_events));await publishSubjectEvent2(repoPath,subject,{timestamp:payload.timestamp,kind:payload.kind??"system",agent:payload.agent??"scheduler",team:payload.team,direction:payload.direction,peer:payload.peer,text:payload.text??subject,data:payload.data,source:payload.source??"registry"})}async function defaultCountTmuxSessions(){try{let{execSync:execSync10}=await import("child_process"),{genieTmuxCmd:genieTmuxCmd2}=await Promise.resolve().then(() => (init_tmux_wrapper(),exports_tmux_wrapper));return execSync10(`${genieTmuxCmd2("list-sessions")} 2>/dev/null`,{encoding:"utf-8"}).trim().split(`
1836
1836
  `).filter(Boolean).length}catch{return 0}}async function defaultResumeAgent(agentId){try{let{execSync:execSync10}=await import("child_process");return execSync10(`genie agent resume ${agentId} --no-reset-attempts`,{encoding:"utf-8",stdio:["pipe","pipe","pipe"]}),!0}catch{return!1}}async function defaultUpdateAgent(agentId,updates){let{update:update2}=await Promise.resolve().then(() => (init_agent_registry(),exports_agent_registry));await update2(agentId,updates)}function createDefaultDeps(){return{getConnection:async()=>{let{getConnection:getConnection2}=await Promise.resolve().then(() => (init_db(),exports_db));return getConnection2()},spawnCommand:defaultSpawnCommand,log:logToFile,generateId:randomUUID9,now:()=>new Date,sleep:defaultSleep,jitter:defaultJitter,isPaneAlive:defaultIsPaneAlive,listWorkers:defaultListWorkers,countTmuxSessions:defaultCountTmuxSessions,publishEvent:defaultPublishEvent,resumeAgent:defaultResumeAgent,updateAgent:defaultUpdateAgent}}function resolveConfig(overrides){let envMax=process.env.GENIE_MAX_CONCURRENT,maxConcurrent=envMax?Number.parseInt(envMax,10):5;return{maxConcurrent:overrides?.maxConcurrent??(Number.isNaN(maxConcurrent)?5:maxConcurrent),pollIntervalMs:overrides?.pollIntervalMs??30000,maxJitterMs:overrides?.maxJitterMs??30000,jitterThreshold:overrides?.jitterThreshold??3,heartbeatIntervalMs:overrides?.heartbeatIntervalMs??60000,orphanCheckIntervalMs:overrides?.orphanCheckIntervalMs??300000,deadHeartbeatThreshold:overrides?.deadHeartbeatThreshold??2,leaseRecoveryIntervalMs:overrides?.leaseRecoveryIntervalMs??60000}}async function claimDueTriggers(deps,config,daemonId){let sql=await deps.getConnection(),now=deps.now(),leaseUntil=new Date(now.getTime()+300000),runningCount=(await sql`
1837
1837
  SELECT count(*)::int AS cnt FROM runs
@@ -1940,7 +1940,49 @@ ${body}`;writeFileSync12(filePath,output,"utf-8")}function serializeSdkConfig(sd
1940
1940
  `,failedCount++,deps.log({timestamp:now.toISOString(),level:"warn",event:"orphan_run_failed",run_id:run.id,worker_id:run.worker_id,dead_heartbeats:threshold})}if(failedCount>0)deps.log({timestamp:now.toISOString(),level:"info",event:"orphan_reconciliation_completed",failed_count:failedCount});return failedCount}async function collectMachineSnapshot(deps){let sql=await deps.getConnection(),now=deps.now(),snapshotId=deps.generateId(),workers=await deps.listWorkers(),activeWorkers=workers.filter((w)=>!["done","error","suspended"].includes(w.state)).length,teams=new Set(workers.filter((w)=>w.team).map((w)=>w.team)),tmuxSessions=await deps.countTmuxSessions(),cpuPercent=null,memoryMb=null;try{let mem=process.memoryUsage();memoryMb=Math.round(mem.rss/1024/1024)}catch{}try{let cpus=(await import("os")).cpus();if(cpus.length>0){let total=cpus.reduce((acc,cpu)=>{let t=Object.values(cpu.times).reduce((a,b2)=>a+b2,0);return acc+t-cpu.times.idle},0),totalAll=cpus.reduce((acc,cpu)=>acc+Object.values(cpu.times).reduce((a,b2)=>a+b2,0),0);cpuPercent=totalAll>0?Math.round(total/totalAll*100):null}}catch{}await sql`
1941
1941
  INSERT INTO machine_snapshots (id, active_workers, active_teams, tmux_sessions, cpu_percent, memory_mb, created_at)
1942
1942
  VALUES (${snapshotId}, ${activeWorkers}, ${teams.size}, ${tmuxSessions}, ${cpuPercent}, ${memoryMb}, ${now})
1943
- `,deps.log({timestamp:now.toISOString(),level:"debug",event:"machine_snapshot",active_workers:activeWorkers,active_teams:teams.size,tmux_sessions:tmuxSessions,cpu_percent:cpuPercent,memory_mb:memoryMb})}async function emitWorkerEvents(deps){let workers=await deps.listWorkers(),now=deps.now().toISOString(),currentIds=new Set;for(let worker of workers){currentIds.add(worker.id);let prev=previousWorkerStates.get(worker.id),repoPath=worker.repoPath??process.cwd();if(!prev)await deps.publishEvent(`genie.agent.${worker.id}.spawned`,{timestamp:now,kind:"state",agent:worker.id,team:worker.team,text:`Agent ${worker.id} spawned`,data:{state:worker.state},source:"registry"},repoPath);else if(prev.state!==worker.state){if(await deps.publishEvent(`genie.agent.${worker.id}.state`,{timestamp:now,kind:"state",agent:worker.id,team:worker.team,text:`Agent ${worker.id} state: ${prev.state} \u2192 ${worker.state}`,data:{previousState:prev.state,state:worker.state},source:"registry"},repoPath),worker.state==="done"&&worker.wishSlug&&worker.groupNumber!=null)await deps.publishEvent(`genie.wish.${worker.wishSlug}.group.${worker.groupNumber}.done`,{timestamp:now,kind:"system",agent:worker.id,team:worker.team,text:`Wish ${worker.wishSlug} group ${worker.groupNumber} completed by ${worker.id}`,data:{wishSlug:worker.wishSlug,groupNumber:worker.groupNumber},source:"registry"},repoPath)}previousWorkerStates.set(worker.id,{...worker})}for(let[id,prev]of previousWorkerStates)if(!currentIds.has(id))await deps.publishEvent(`genie.agent.${id}.killed`,{timestamp:now,kind:"state",agent:id,team:prev.team,text:`Agent ${id} killed`,data:{lastState:prev.state},source:"registry"},prev.repoPath??process.cwd()),previousWorkerStates.delete(id)}function _resetWorkerStatesForTesting(){previousWorkerStates.clear()}function startInboxWatcherIfEnabled(deps){let pollMs=getInboxPollIntervalMs();if(pollMs===0)return deps.log({timestamp:deps.now().toISOString(),level:"info",event:"inbox_watcher_disabled"}),null;return deps.log({timestamp:deps.now().toISOString(),level:"info",event:"inbox_watcher_started",poll_interval_ms:pollMs}),startInboxWatcher()}async function processMailboxRetryMessage(deps,msg,overrides={}){let deliverFn=overrides.deliverFn??(async(toWorker,messageId)=>{let{deliverToPane:deliverToPane2}=await Promise.resolve().then(() => (init_protocol_router(),exports_protocol_router));return deliverToPane2(toWorker,messageId)}),sendFn=overrides.sendFn??(async(repoPath2,from,to,body)=>{let{send:send2}=await Promise.resolve().then(() => (init_mailbox(),exports_mailbox));return send2(repoPath2,from,to,body)});if(await deliverFn(msg.to,msg.id)){deps.log({timestamp:deps.now().toISOString(),level:"info",event:"mailbox_delivery_retried",messageId:msg.id,to:msg.to});return}let rows=await(await deps.getConnection())`SELECT delivery_attempts, repo_path FROM mailbox WHERE id = ${msg.id} LIMIT 1`,attempts=rows[0]?.delivery_attempts??0;if(attempts<MAX_DELIVERY_ATTEMPTS)return;if(await markEscalated(msg.id),msg.from==="scheduler"){deps.log({timestamp:deps.now().toISOString(),level:"warn",event:"mailbox_delivery_escalation_dropped",reason:"already_escalated_by_scheduler",messageId:msg.id,to:msg.to,attempts});return}if(msg.body.startsWith("[escalation] ")){deps.log({timestamp:deps.now().toISOString(),level:"warn",event:"mailbox_delivery_escalation_dropped",reason:"body_prefix",messageId:msg.id,to:msg.to,attempts});return}if(msg.to===ESCALATION_RECIPIENT){deps.log({timestamp:deps.now().toISOString(),level:"warn",event:"mailbox_delivery_escalation_dropped",reason:"same_recipient",messageId:msg.id,to:msg.to,attempts});return}let repoPath=rows[0]?.repo_path;if(repoPath)await sendFn(repoPath,"scheduler",ESCALATION_RECIPIENT,`[escalation] Message ${msg.id} from "${msg.from}" to "${msg.to}" failed delivery after ${MAX_DELIVERY_ATTEMPTS} attempts. Body: "${msg.body.slice(0,200)}"`);deps.log({timestamp:deps.now().toISOString(),level:"warn",event:"mailbox_delivery_escalated",messageId:msg.id,to:msg.to,attempts})}function startDaemon(configOverrides,depsOverrides){let config=resolveConfig(configOverrides),deps={...createDefaultDeps(),...depsOverrides},daemonId=deps.generateId(),running2=!0,pollTimeout=null,pollResolve=null,listenConnection=null,heartbeatTimer=null,orphanTimer=null,leaseRecoveryTimer=null,agentResumeTimer=null,inboxWatcherHandle=null,captureFallbackTimer=null,eventRouterHandle=null,deliveryUnsub=null,deliveryRetryTimer=null,stop=()=>{if(running2=!1,pollTimeout)clearTimeout(pollTimeout),pollTimeout=null;if(pollResolve)pollResolve(),pollResolve=null;if(heartbeatTimer)clearInterval(heartbeatTimer),heartbeatTimer=null;if(orphanTimer)clearInterval(orphanTimer),orphanTimer=null;if(leaseRecoveryTimer)clearInterval(leaseRecoveryTimer),leaseRecoveryTimer=null;if(agentResumeTimer)clearInterval(agentResumeTimer),agentResumeTimer=null;if(inboxWatcherHandle)stopInboxWatcher(inboxWatcherHandle),inboxWatcherHandle=null;if(listenConnection)listenConnection.end().catch(()=>{}),listenConnection=null;if(captureFallbackTimer)clearInterval(captureFallbackTimer),captureFallbackTimer=null;if(eventRouterHandle?.stop().catch(()=>{}),eventRouterHandle=null,deliveryRetryTimer)clearInterval(deliveryRetryTimer),deliveryRetryTimer=null;if(deliveryUnsub)deliveryUnsub().catch(()=>{}),deliveryUnsub=null;Promise.resolve().then(() => (init_session_filewatch(),exports_session_filewatch)).then((m)=>m.stopFilewatch()).catch(()=>{}),Promise.resolve().then(() => (init_session_backfill(),exports_session_backfill)).then((m)=>m.stopBackfill()).catch(()=>{}),Promise.resolve().then(() => (init_db(),exports_db)).then(({getLockfilePath:getLockfilePath2})=>{try{__require("fs").unlinkSync(getLockfilePath2())}catch{}}).catch(()=>{})},processTriggers=async()=>{try{let claimed=await claimDueTriggers(deps,config,daemonId);if(claimed.length===0)return;if(claimed.length>config.jitterThreshold){let jitterMs=deps.jitter(config.maxJitterMs);deps.log({timestamp:deps.now().toISOString(),level:"info",event:"jitter_applied",count:claimed.length,jitter_ms:jitterMs}),await deps.sleep(jitterMs)}for(let trigger of claimed){if(!running2)break;await fireTrigger(deps,trigger,daemonId)}}catch(err){let message=err instanceof Error?err.message:String(err);deps.log({timestamp:deps.now().toISOString(),level:"error",event:"process_cycle_error",error:message})}};async function setupListenNotify(d,onTrigger){try{let sql=await d.getConnection();return await sql.listen("genie_trigger_due",async()=>{if(!running2)return;await onTrigger()}),d.log({timestamp:d.now().toISOString(),level:"info",event:"listen_started",channel:"genie_trigger_due"}),sql}catch(err){let message=err instanceof Error?err.message:String(err);return d.log({timestamp:d.now().toISOString(),level:"warn",event:"listen_failed",error:message}),null}}function startLeaseRecoveryTimer(d,cfg,dId){return setInterval(async()=>{if(!running2)return;try{await reclaimExpiredLeases(d,dId)}catch(err){let message=err instanceof Error?err.message:String(err);d.log({timestamp:d.now().toISOString(),level:"error",event:"lease_recovery_error",error:message})}},cfg.leaseRecoveryIntervalMs)}function startOrphanTimer(d,cfg){return setInterval(async()=>{if(!running2)return;try{await reconcileOrphans(d,cfg)}catch(err){let message=err instanceof Error?err.message:String(err);d.log({timestamp:d.now().toISOString(),level:"error",event:"orphan_reconciliation_error",error:message})}},cfg.orphanCheckIntervalMs)}async function reconcileDeadPaneZombies(d){try{let{reconcileStaleSpawns:reconcileStaleSpawns2}=await Promise.resolve().then(() => (init_agent_registry(),exports_agent_registry));await reconcileStaleSpawns2()}catch(err){let message=err instanceof Error?err.message:String(err);d.log({timestamp:d.now().toISOString(),level:"warn",event:"reconcile_stale_spawns_error",error:message})}}function startAgentResumeTimer(d,cfg,dId){return setInterval(async()=>{if(!running2)return;try{await reconcileDeadPaneZombies(d),await reconcileUnresumable(d),await runAgentRecoveryPass(d,dId,cfg)}catch(err){let message=err instanceof Error?err.message:String(err);d.log({timestamp:d.now().toISOString(),level:"error",event:"agent_resume_timer_error",error:message})}},cfg.leaseRecoveryIntervalMs)}async function startEventRouterSafe(d){try{let handle=await startEventRouter();return d.log({timestamp:d.now().toISOString(),level:"info",event:"event_router_started"}),handle}catch(err){let message=err instanceof Error?err.message:String(err);return d.log({timestamp:d.now().toISOString(),level:"warn",event:"event_router_start_failed",error:message}),null}}async function initSessionCapture(d,cfg){try{let captureSql=await d.getConnection(),{startFilewatch:startFilewatch2}=await Promise.resolve().then(() => (init_session_filewatch(),exports_session_filewatch)),{startBackfill:startBackfill2}=await Promise.resolve().then(() => (init_session_backfill(),exports_session_backfill));if(!await startFilewatch2(captureSql)){let{ingestFileFull:ingestFileFull2,discoverAllJsonlFiles:discoverAllJsonlFiles2,buildWorkerMap:buildWorkerMap2}=await Promise.resolve().then(() => (init_session_capture(),exports_session_capture));d.log({timestamp:d.now().toISOString(),level:"warn",event:"filewatch_failed_fallback_polling"});let timer2=setInterval(async()=>{if(!running2)return;try{let files=await discoverAllJsonlFiles2(),workerMap=await buildWorkerMap2(captureSql);for(let f of files)await ingestFileFull2(captureSql,f.sessionId,f.jsonlPath,f.projectPath,0,{parentSessionId:f.parentSessionId,isSubagent:f.isSubagent,workerMap})}catch{}},cfg.heartbeatIntervalMs);return startBackfill2(captureSql).catch((err)=>{let msg=err instanceof Error?err.message:String(err);d.log({timestamp:d.now().toISOString(),level:"error",event:"backfill_error",error:msg})}),timer2}return startBackfill2(captureSql).catch((err)=>{let msg=err instanceof Error?err.message:String(err);d.log({timestamp:d.now().toISOString(),level:"error",event:"backfill_error",error:msg})}),null}catch(err){let message=err instanceof Error?err.message:String(err);return d.log({timestamp:d.now().toISOString(),level:"warn",event:"session_capture_init_failed",error:message}),null}}async function runHeartbeat(d){if(!running2)return;try{await collectHeartbeats(d),await collectMachineSnapshot(d),await emitWorkerEvents(d);try{let retSql=await d.getConnection();await retSql`DELETE FROM heartbeats WHERE created_at < now() - interval '7 days'`,await retSql`DELETE FROM machine_snapshots WHERE created_at < now() - interval '30 days'`,await retSql`DELETE FROM audit_events WHERE entity_type LIKE 'otel_%' AND created_at < now() - interval '30 days'`}catch{}}catch(err){let message=err instanceof Error?err.message:String(err);d.log({timestamp:d.now().toISOString(),level:"error",event:"heartbeat_error",error:message})}}let done=(async()=>{deps.log({timestamp:deps.now().toISOString(),level:"info",event:"daemon_started",daemon_id:daemonId,max_concurrent:config.maxConcurrent,poll_interval_ms:config.pollIntervalMs}),logReconcilerMode(deps,daemonId);try{await recoverOnStartup(deps,daemonId,config)}catch(err){let message=err instanceof Error?err.message:String(err);deps.log({timestamp:deps.now().toISOString(),level:"error",event:"recovery_error",error:message})}listenConnection=await setupListenNotify(deps,processTriggers),heartbeatTimer=setInterval(()=>runHeartbeat(deps),config.heartbeatIntervalMs),orphanTimer=startOrphanTimer(deps,config),leaseRecoveryTimer=startLeaseRecoveryTimer(deps,config,daemonId),agentResumeTimer=startAgentResumeTimer(deps,config,daemonId),inboxWatcherHandle=startInboxWatcherIfEnabled(deps),eventRouterHandle=await startEventRouterSafe(deps);try{deliveryUnsub=await subscribeDelivery(async(toWorker,messageId)=>{try{let{deliverToPane:deliverToPane2}=await Promise.resolve().then(() => (init_protocol_router(),exports_protocol_router));await deliverToPane2(toWorker,messageId)}catch{}}),deps.log({timestamp:deps.now().toISOString(),level:"info",event:"mailbox_delivery_listen_started"})}catch{}deliveryRetryTimer=setInterval(async()=>{try{let retryable=await getRetryable(MAX_DELIVERY_ATTEMPTS);for(let msg of retryable)try{await processMailboxRetryMessage(deps,msg)}catch{}}catch(err){let message=err instanceof Error?err.message:String(err);deps.log({timestamp:deps.now().toISOString(),level:"error",event:"mailbox_retry_error",error:message})}},60000),captureFallbackTimer=await initSessionCapture(deps,config),await processTriggers();while(running2){if(await new Promise((resolve5)=>{pollResolve=resolve5,pollTimeout=setTimeout(resolve5,config.pollIntervalMs)}),pollResolve=null,!running2)break;await processTriggers()}deps.log({timestamp:deps.now().toISOString(),level:"info",event:"daemon_stopped",daemon_id:daemonId})})();return{stop,done,daemonId}}var ESCALATION_RECIPIENT="team-lead",MAX_DELIVERY_ATTEMPTS=3,TURN_AWARE_RECONCILER_FLAG="GENIE_RECONCILER_TURN_AWARE",RECOVERY_RETRY_DELAY_MS=60000,TURN_AWARE_RESUMABLE_STATES,RESUME_COOLDOWN_MS=60000,DEFAULT_MAX_RESUME_ATTEMPTS=3,INACTIVE_WORKER_STATES,previousWorkerStates;var init_scheduler_daemon=__esm(()=>{init_cron();init_event_router();init_inbox_watcher();init_mailbox();init_run_spec();init_trace_context();TURN_AWARE_RESUMABLE_STATES=new Set(["working","permission","question"]);INACTIVE_WORKER_STATES=new Set(["done","error","suspended","spawning"]);previousWorkerStates=new Map});import{existsSync as existsSync32}from"fs";async function listTeamsFromPg(){return(await listTeams2(!1)).map((row)=>({name:row.name,status:row.status,worktreePath:row.worktreePath}))}async function listNativeTeamDirs(){return listTeams()}function pgWorktreeExistsOnDisk(worktreePath){return existsSync32(worktreePath)}var init_team_drift_sources=__esm(()=>{init_claude_native_teams();init_team_manager()});function registerDetector(module){validateModule(module),registry2.set(module.id,module)}function listDetectors(){return Array.from(registry2.values())}function validateModule(module){if(!module||typeof module!=="object")throw Error("registerDetector: module must be an object");if(typeof module.id!=="string"||!ID_RE.test(module.id))throw Error(`registerDetector: invalid id '${String(module.id)}' \u2014 must match ${ID_RE.source}`);if(typeof module.version!=="string"||!SEMVER_RE.test(module.version))throw Error(`registerDetector: invalid version '${String(module.version)}' for '${module.id}' \u2014 must be semver`);if(module.riskClass!=="low"&&module.riskClass!=="medium"&&module.riskClass!=="high")throw Error(`registerDetector: invalid riskClass for '${module.id}'`);if(typeof module.query!=="function")throw Error(`registerDetector: query must be a function on '${module.id}'`);if(typeof module.shouldFire!=="function")throw Error(`registerDetector: shouldFire must be a function on '${module.id}'`);if(typeof module.render!=="function")throw Error(`registerDetector: render must be a function on '${module.id}'`)}var registry2,SEMVER_RE,ID_RE;var init_detectors=__esm(()=>{registry2=new Map;SEMVER_RE=/^\d+\.\d+\.\d+(?:[-+][0-9A-Za-z.-]+)?$/,ID_RE=/^[a-z0-9][a-z0-9._-]{0,127}$/});async function buildState(sources){let[lsRows,disbandDirs]=await Promise.all([sources.listTeamsFromPg(),sources.listNativeTeamDirs()]),disbandSet=new Set(disbandDirs),lsSanitizedMap=new Map;for(let row of lsRows)lsSanitizedMap.set(sanitizeTeamName(row.name),row);let divergent=[];for(let row of lsRows){let san=sanitizeTeamName(row.name);if(!disbandSet.has(san)){divergent.push({team_id:row.name,kind:"missing_in_disband",reason:`PG row visible in ls but no ~/.claude/teams/${san}/ dir`});continue}if(!sources.pgWorktreeExistsOnDisk(row.worktreePath))divergent.push({team_id:row.name,kind:"status_mismatch",reason:`PG row status='${row.status}' but worktree path missing on disk \u2014 pruneStaleWorktrees will silently delete on next disband`})}for(let dir of disbandDirs)if(!lsSanitizedMap.has(dir))divergent.push({team_id:dir,kind:"missing_in_ls",reason:`~/.claude/teams/${dir}/ exists but no PG row (status!='archived')`});return{ls_snapshot:lsRows,disband_snapshot:disbandDirs,divergent}}function primaryDivergenceKind(divergent){let order=["missing_in_ls","missing_in_disband","status_mismatch"];for(let kind of order)if(divergent.some((d)=>d.kind===kind))return kind;return"status_mismatch"}function renderPayload(state){let primary=primaryDivergenceKind(state.divergent),lsTrimmed=state.ls_snapshot.slice(0,MAX_SNAPSHOT_IN_EVENT).map((r)=>({name:r.name,status:r.status})),disbandTrimmed=state.disband_snapshot.slice(0,MAX_SNAPSHOT_IN_EVENT),divergentTrimmed=state.divergent.slice(0,MAX_DIVERGENT_IN_EVENT),observed={ls_snapshot:lsTrimmed,disband_snapshot:disbandTrimmed,divergent_ids:divergentTrimmed.map((d)=>d.team_id),divergence_kind:primary,divergent_detail:divergentTrimmed,ls_total:state.ls_snapshot.length,disband_total:state.disband_snapshot.length,divergent_total:state.divergent.length};return{divergence_kind:primary,divergent_count:state.divergent.length,observed_state_json:JSON.stringify(observed)}}function makeTeamLsDriftDetector(overrides){let sources={listTeamsFromPg:overrides?.listTeamsFromPg??listTeamsFromPg,listNativeTeamDirs:overrides?.listNativeTeamDirs??listNativeTeamDirs,pgWorktreeExistsOnDisk:overrides?.pgWorktreeExistsOnDisk??pgWorktreeExistsOnDisk};return{id:DETECTOR_ID,version:DETECTOR_VERSION,riskClass:"medium",query(){return buildState(sources)},shouldFire(state){return state.divergent.length>0},render(state){return{type:"rot.team-ls-drift.detected",subject:DETECTOR_ID,payload:renderPayload(state)}}}}var DETECTOR_ID="rot.team-ls-drift",DETECTOR_VERSION="0.1.0",MAX_DIVERGENT_IN_EVENT=100,MAX_SNAPSHOT_IN_EVENT=200;var init_pattern_2_team_ls_drift=__esm(()=>{init_claude_native_teams();init_team_drift_sources();init_detectors();registerDetector(makeTeamLsDriftDetector())});import{existsSync as existsSync33}from"fs";function isProbeableExecutorState(state){return state==="running"||state==="spawning"}async function resolveLastSeen(sql,executorId,fallback){let lastSeen=(await sql`
1943
+ `,deps.log({timestamp:now.toISOString(),level:"debug",event:"machine_snapshot",active_workers:activeWorkers,active_teams:teams.size,tmux_sessions:tmuxSessions,cpu_percent:cpuPercent,memory_mb:memoryMb})}async function emitWorkerEvents(deps){let workers=await deps.listWorkers(),now=deps.now().toISOString(),currentIds=new Set;for(let worker of workers){currentIds.add(worker.id);let prev=previousWorkerStates.get(worker.id),repoPath=worker.repoPath??process.cwd();if(!prev)await deps.publishEvent(`genie.agent.${worker.id}.spawned`,{timestamp:now,kind:"state",agent:worker.id,team:worker.team,text:`Agent ${worker.id} spawned`,data:{state:worker.state},source:"registry"},repoPath);else if(prev.state!==worker.state){if(await deps.publishEvent(`genie.agent.${worker.id}.state`,{timestamp:now,kind:"state",agent:worker.id,team:worker.team,text:`Agent ${worker.id} state: ${prev.state} \u2192 ${worker.state}`,data:{previousState:prev.state,state:worker.state},source:"registry"},repoPath),worker.state==="done"&&worker.wishSlug&&worker.groupNumber!=null)await deps.publishEvent(`genie.wish.${worker.wishSlug}.group.${worker.groupNumber}.done`,{timestamp:now,kind:"system",agent:worker.id,team:worker.team,text:`Wish ${worker.wishSlug} group ${worker.groupNumber} completed by ${worker.id}`,data:{wishSlug:worker.wishSlug,groupNumber:worker.groupNumber},source:"registry"},repoPath)}previousWorkerStates.set(worker.id,{...worker})}for(let[id,prev]of previousWorkerStates)if(!currentIds.has(id))await deps.publishEvent(`genie.agent.${id}.killed`,{timestamp:now,kind:"state",agent:id,team:prev.team,text:`Agent ${id} killed`,data:{lastState:prev.state},source:"registry"},prev.repoPath??process.cwd()),previousWorkerStates.delete(id)}function _resetWorkerStatesForTesting(){previousWorkerStates.clear()}function startInboxWatcherIfEnabled(deps){let pollMs=getInboxPollIntervalMs();if(pollMs===0)return deps.log({timestamp:deps.now().toISOString(),level:"info",event:"inbox_watcher_disabled"}),null;return deps.log({timestamp:deps.now().toISOString(),level:"info",event:"inbox_watcher_started",poll_interval_ms:pollMs}),startInboxWatcher()}async function processMailboxRetryMessage(deps,msg,overrides={}){let deliverFn=overrides.deliverFn??(async(toWorker,messageId)=>{let{deliverToPane:deliverToPane2}=await Promise.resolve().then(() => (init_protocol_router(),exports_protocol_router));return deliverToPane2(toWorker,messageId)}),sendFn=overrides.sendFn??(async(repoPath2,from,to,body)=>{let{send:send2}=await Promise.resolve().then(() => (init_mailbox(),exports_mailbox));return send2(repoPath2,from,to,body)});if(await deliverFn(msg.to,msg.id)){deps.log({timestamp:deps.now().toISOString(),level:"info",event:"mailbox_delivery_retried",messageId:msg.id,to:msg.to});return}let rows=await(await deps.getConnection())`SELECT delivery_attempts, repo_path FROM mailbox WHERE id = ${msg.id} LIMIT 1`,attempts=rows[0]?.delivery_attempts??0;if(attempts<MAX_DELIVERY_ATTEMPTS)return;if(await markEscalated(msg.id),msg.from==="scheduler"){deps.log({timestamp:deps.now().toISOString(),level:"warn",event:"mailbox_delivery_escalation_dropped",reason:"already_escalated_by_scheduler",messageId:msg.id,to:msg.to,attempts});return}if(msg.body.startsWith("[escalation] ")){deps.log({timestamp:deps.now().toISOString(),level:"warn",event:"mailbox_delivery_escalation_dropped",reason:"body_prefix",messageId:msg.id,to:msg.to,attempts});return}if(msg.to===ESCALATION_RECIPIENT){deps.log({timestamp:deps.now().toISOString(),level:"warn",event:"mailbox_delivery_escalation_dropped",reason:"same_recipient",messageId:msg.id,to:msg.to,attempts});return}let repoPath=rows[0]?.repo_path;if(repoPath)await sendFn(repoPath,"scheduler",ESCALATION_RECIPIENT,`[escalation] Message ${msg.id} from "${msg.from}" to "${msg.to}" failed delivery after ${MAX_DELIVERY_ATTEMPTS} attempts. Body: "${msg.body.slice(0,200)}"`);deps.log({timestamp:deps.now().toISOString(),level:"warn",event:"mailbox_delivery_escalated",messageId:msg.id,to:msg.to,attempts})}function startDaemon(configOverrides,depsOverrides){let config=resolveConfig(configOverrides),deps={...createDefaultDeps(),...depsOverrides},daemonId=deps.generateId(),running2=!0,pollTimeout=null,pollResolve=null,listenConnection=null,heartbeatTimer=null,orphanTimer=null,leaseRecoveryTimer=null,agentResumeTimer=null,inboxWatcherHandle=null,captureFallbackTimer=null,eventRouterHandle=null,deliveryUnsub=null,deliveryRetryTimer=null,stop=()=>{if(running2=!1,pollTimeout)clearTimeout(pollTimeout),pollTimeout=null;if(pollResolve)pollResolve(),pollResolve=null;if(heartbeatTimer)clearInterval(heartbeatTimer),heartbeatTimer=null;if(orphanTimer)clearInterval(orphanTimer),orphanTimer=null;if(leaseRecoveryTimer)clearInterval(leaseRecoveryTimer),leaseRecoveryTimer=null;if(agentResumeTimer)clearInterval(agentResumeTimer),agentResumeTimer=null;if(inboxWatcherHandle)stopInboxWatcher(inboxWatcherHandle),inboxWatcherHandle=null;if(listenConnection)listenConnection.end().catch(()=>{}),listenConnection=null;if(captureFallbackTimer)clearInterval(captureFallbackTimer),captureFallbackTimer=null;if(eventRouterHandle?.stop().catch(()=>{}),eventRouterHandle=null,deliveryRetryTimer)clearInterval(deliveryRetryTimer),deliveryRetryTimer=null;if(deliveryUnsub)deliveryUnsub().catch(()=>{}),deliveryUnsub=null;Promise.resolve().then(() => (init_session_filewatch(),exports_session_filewatch)).then((m)=>m.stopFilewatch()).catch(()=>{}),Promise.resolve().then(() => (init_session_backfill(),exports_session_backfill)).then((m)=>m.stopBackfill()).catch(()=>{}),Promise.resolve().then(() => (init_db(),exports_db)).then(({getLockfilePath:getLockfilePath2})=>{try{__require("fs").unlinkSync(getLockfilePath2())}catch{}}).catch(()=>{})},processTriggers=async()=>{try{let claimed=await claimDueTriggers(deps,config,daemonId);if(claimed.length===0)return;if(claimed.length>config.jitterThreshold){let jitterMs=deps.jitter(config.maxJitterMs);deps.log({timestamp:deps.now().toISOString(),level:"info",event:"jitter_applied",count:claimed.length,jitter_ms:jitterMs}),await deps.sleep(jitterMs)}for(let trigger of claimed){if(!running2)break;await fireTrigger(deps,trigger,daemonId)}}catch(err){let message=err instanceof Error?err.message:String(err);deps.log({timestamp:deps.now().toISOString(),level:"error",event:"process_cycle_error",error:message})}};async function setupListenNotify(d,onTrigger){try{let sql=await d.getConnection();return await sql.listen("genie_trigger_due",async()=>{if(!running2)return;await onTrigger()}),d.log({timestamp:d.now().toISOString(),level:"info",event:"listen_started",channel:"genie_trigger_due"}),sql}catch(err){let message=err instanceof Error?err.message:String(err);return d.log({timestamp:d.now().toISOString(),level:"warn",event:"listen_failed",error:message}),null}}function startLeaseRecoveryTimer(d,cfg,dId){return setInterval(async()=>{if(!running2)return;try{await reclaimExpiredLeases(d,dId)}catch(err){let message=err instanceof Error?err.message:String(err);d.log({timestamp:d.now().toISOString(),level:"error",event:"lease_recovery_error",error:message})}},cfg.leaseRecoveryIntervalMs)}function startOrphanTimer(d,cfg){return setInterval(async()=>{if(!running2)return;try{await reconcileOrphans(d,cfg)}catch(err){let message=err instanceof Error?err.message:String(err);d.log({timestamp:d.now().toISOString(),level:"error",event:"orphan_reconciliation_error",error:message})}},cfg.orphanCheckIntervalMs)}async function reconcileDeadPaneZombies(d){try{let{reconcileStaleSpawns:reconcileStaleSpawns2}=await Promise.resolve().then(() => (init_agent_registry(),exports_agent_registry));await reconcileStaleSpawns2()}catch(err){let message=err instanceof Error?err.message:String(err);d.log({timestamp:d.now().toISOString(),level:"warn",event:"reconcile_stale_spawns_error",error:message})}}function startAgentResumeTimer(d,cfg,dId){return setInterval(async()=>{if(!running2)return;try{await reconcileDeadPaneZombies(d),await reconcileUnresumable(d),await runAgentRecoveryPass(d,dId,cfg)}catch(err){let message=err instanceof Error?err.message:String(err);d.log({timestamp:d.now().toISOString(),level:"error",event:"agent_resume_timer_error",error:message})}},cfg.leaseRecoveryIntervalMs)}async function startEventRouterSafe(d){try{let handle=await startEventRouter();return d.log({timestamp:d.now().toISOString(),level:"info",event:"event_router_started"}),handle}catch(err){let message=err instanceof Error?err.message:String(err);return d.log({timestamp:d.now().toISOString(),level:"warn",event:"event_router_start_failed",error:message}),null}}async function initSessionCapture(d,cfg){try{let captureSql=await d.getConnection(),{startFilewatch:startFilewatch2}=await Promise.resolve().then(() => (init_session_filewatch(),exports_session_filewatch)),{startBackfill:startBackfill2}=await Promise.resolve().then(() => (init_session_backfill(),exports_session_backfill));if(!await startFilewatch2(captureSql)){let{ingestFileFull:ingestFileFull2,discoverAllJsonlFiles:discoverAllJsonlFiles2,buildWorkerMap:buildWorkerMap2}=await Promise.resolve().then(() => (init_session_capture(),exports_session_capture));d.log({timestamp:d.now().toISOString(),level:"warn",event:"filewatch_failed_fallback_polling"});let timer2=setInterval(async()=>{if(!running2)return;try{let files=await discoverAllJsonlFiles2(),workerMap=await buildWorkerMap2(captureSql);for(let f of files)await ingestFileFull2(captureSql,f.sessionId,f.jsonlPath,f.projectPath,0,{parentSessionId:f.parentSessionId,isSubagent:f.isSubagent,workerMap})}catch{}},cfg.heartbeatIntervalMs);return startBackfill2(captureSql).catch((err)=>{let msg=err instanceof Error?err.message:String(err);d.log({timestamp:d.now().toISOString(),level:"error",event:"backfill_error",error:msg})}),timer2}return startBackfill2(captureSql).catch((err)=>{let msg=err instanceof Error?err.message:String(err);d.log({timestamp:d.now().toISOString(),level:"error",event:"backfill_error",error:msg})}),null}catch(err){let message=err instanceof Error?err.message:String(err);return d.log({timestamp:d.now().toISOString(),level:"warn",event:"session_capture_init_failed",error:message}),null}}async function runHeartbeat(d){if(!running2)return;try{await collectHeartbeats(d),await collectMachineSnapshot(d),await emitWorkerEvents(d);try{let retSql=await d.getConnection();await retSql`DELETE FROM heartbeats WHERE created_at < now() - interval '7 days'`,await retSql`DELETE FROM machine_snapshots WHERE created_at < now() - interval '30 days'`,await retSql`DELETE FROM audit_events WHERE entity_type LIKE 'otel_%' AND created_at < now() - interval '30 days'`}catch{}}catch(err){let message=err instanceof Error?err.message:String(err);d.log({timestamp:d.now().toISOString(),level:"error",event:"heartbeat_error",error:message})}}let done=(async()=>{deps.log({timestamp:deps.now().toISOString(),level:"info",event:"daemon_started",daemon_id:daemonId,max_concurrent:config.maxConcurrent,poll_interval_ms:config.pollIntervalMs}),logReconcilerMode(deps,daemonId);try{await recoverOnStartup(deps,daemonId,config)}catch(err){let message=err instanceof Error?err.message:String(err);deps.log({timestamp:deps.now().toISOString(),level:"error",event:"recovery_error",error:message})}listenConnection=await setupListenNotify(deps,processTriggers),heartbeatTimer=setInterval(()=>runHeartbeat(deps),config.heartbeatIntervalMs),orphanTimer=startOrphanTimer(deps,config),leaseRecoveryTimer=startLeaseRecoveryTimer(deps,config,daemonId),agentResumeTimer=startAgentResumeTimer(deps,config,daemonId),inboxWatcherHandle=startInboxWatcherIfEnabled(deps),eventRouterHandle=await startEventRouterSafe(deps);try{deliveryUnsub=await subscribeDelivery(async(toWorker,messageId)=>{try{let{deliverToPane:deliverToPane2}=await Promise.resolve().then(() => (init_protocol_router(),exports_protocol_router));await deliverToPane2(toWorker,messageId)}catch{}}),deps.log({timestamp:deps.now().toISOString(),level:"info",event:"mailbox_delivery_listen_started"})}catch{}deliveryRetryTimer=setInterval(async()=>{try{let retryable=await getRetryable(MAX_DELIVERY_ATTEMPTS);for(let msg of retryable)try{await processMailboxRetryMessage(deps,msg)}catch{}}catch(err){let message=err instanceof Error?err.message:String(err);deps.log({timestamp:deps.now().toISOString(),level:"error",event:"mailbox_retry_error",error:message})}},60000),captureFallbackTimer=await initSessionCapture(deps,config),await processTriggers();while(running2){if(await new Promise((resolve5)=>{pollResolve=resolve5,pollTimeout=setTimeout(resolve5,config.pollIntervalMs)}),pollResolve=null,!running2)break;await processTriggers()}deps.log({timestamp:deps.now().toISOString(),level:"info",event:"daemon_stopped",daemon_id:daemonId})})();return{stop,done,daemonId}}var ESCALATION_RECIPIENT="team-lead",MAX_DELIVERY_ATTEMPTS=3,TURN_AWARE_RECONCILER_FLAG="GENIE_RECONCILER_TURN_AWARE",RECOVERY_RETRY_DELAY_MS=60000,TURN_AWARE_RESUMABLE_STATES,RESUME_COOLDOWN_MS=60000,DEFAULT_MAX_RESUME_ATTEMPTS=3,INACTIVE_WORKER_STATES,previousWorkerStates;var init_scheduler_daemon=__esm(()=>{init_cron();init_event_router();init_inbox_watcher();init_mailbox();init_run_spec();init_trace_context();TURN_AWARE_RESUMABLE_STATES=new Set(["working","permission","question"]);INACTIVE_WORKER_STATES=new Set(["done","error","suspended","spawning"]);previousWorkerStates=new Map});var exports_detectors={};__export(exports_detectors,{unregisterDetector:()=>unregisterDetector,registerDetector:()=>registerDetector,listDetectors:()=>listDetectors,__clearDetectorsForTests:()=>__clearDetectorsForTests});function registerDetector(module){validateModule(module),registry2.set(module.id,module)}function listDetectors(){return Array.from(registry2.values())}function unregisterDetector(id){return registry2.delete(id)}function __clearDetectorsForTests(){registry2.clear()}function validateModule(module){if(!module||typeof module!=="object")throw Error("registerDetector: module must be an object");if(typeof module.id!=="string"||!ID_RE.test(module.id))throw Error(`registerDetector: invalid id '${String(module.id)}' \u2014 must match ${ID_RE.source}`);if(typeof module.version!=="string"||!SEMVER_RE.test(module.version))throw Error(`registerDetector: invalid version '${String(module.version)}' for '${module.id}' \u2014 must be semver`);if(module.riskClass!=="low"&&module.riskClass!=="medium"&&module.riskClass!=="high")throw Error(`registerDetector: invalid riskClass for '${module.id}'`);if(typeof module.query!=="function")throw Error(`registerDetector: query must be a function on '${module.id}'`);if(typeof module.shouldFire!=="function")throw Error(`registerDetector: shouldFire must be a function on '${module.id}'`);if(typeof module.render!=="function")throw Error(`registerDetector: render must be a function on '${module.id}'`)}var registry2,SEMVER_RE,ID_RE;var init_detectors=__esm(()=>{registry2=new Map;SEMVER_RE=/^\d+\.\d+\.\d+(?:[-+][0-9A-Za-z.-]+)?$/,ID_RE=/^[a-z0-9][a-z0-9._-]{0,127}$/});import{statSync as statSync4}from"fs";function createBackfillNoWorktreeDetector(opts){let exists=opts?.exists??defaultWorktreeExistsCheck,version=opts?.version??"0.1.0",defaultQuery=async()=>{return await(await getConnection())`
1944
+ SELECT name, status, worktree_path
1945
+ FROM teams
1946
+ WHERE status = 'in_progress'
1947
+ ORDER BY updated_at DESC
1948
+ LIMIT 1000
1949
+ `},queryFn=opts?.query??defaultQuery;return{id:"rot.backfill-no-worktree",version,riskClass:"low",async query(){return{missing:(await queryFn()).filter((r)=>!exists(r.worktree_path))}},shouldFire(state){return state.missing.length>0},render(state){let row=state.missing[0];return{type:"rot.detected",subject:row.name,payload:{pattern_id:"pattern-1-backfill-no-worktree",entity_id:row.name,observed_state_json:{team_name:row.name,status:row.status,expected_worktree_path:row.worktree_path,fs_exists:!1,total_missing:state.missing.length}}}}}}var defaultWorktreeExistsCheck=(path3)=>{try{return statSync4(path3).isDirectory()}catch{return!1}};var init_pattern_1_backfill_no_worktree=__esm(()=>{init_db();init_detectors();registerDetector(createBackfillNoWorktreeDetector())});function createDuplicateAgentsDetector(opts){let version=opts?.version??"0.1.0",defaultQuery=async()=>{return await(await getConnection())`
1950
+ SELECT custom_name,
1951
+ team,
1952
+ COUNT(*)::int AS dup_count,
1953
+ array_agg(id ORDER BY created_at) AS agent_ids
1954
+ FROM agents
1955
+ WHERE custom_name IS NOT NULL
1956
+ AND team IS NOT NULL
1957
+ GROUP BY custom_name, team
1958
+ HAVING COUNT(*) > 1
1959
+ ORDER BY COUNT(*) DESC, custom_name ASC
1960
+ LIMIT 200
1961
+ `},queryFn=opts?.query??defaultQuery;return{id:"rot.duplicate-agents",version,riskClass:"low",async query(){return{duplicates:await queryFn()}},shouldFire(state){return state.duplicates.length>0},render(state){let row=state.duplicates[0],subject=`${row.team}/${row.custom_name}`;return{type:"rot.detected",subject,payload:{pattern_id:"pattern-4-duplicate-agents",entity_id:subject,observed_state_json:{team:row.team,custom_name:row.custom_name,dup_count:row.dup_count,agent_ids:row.agent_ids,total_offending_pairs:state.duplicates.length}}}}}}var init_pattern_4_duplicate_agents=__esm(()=>{init_db();init_detectors();registerDetector(createDuplicateAgentsDetector())});function createZombieTeamLeadDetector(opts){let thresholdMs=(opts?.idleMinutes??DEFAULT_IDLE_MINUTES)*60*1000,version=opts?.version??"0.1.0",defaultQuery=async()=>{let sql=await getConnection();return(await sql`
1962
+ WITH active_leads AS (
1963
+ SELECT id, team, state
1964
+ FROM agents
1965
+ WHERE role = 'team-lead'
1966
+ AND team IS NOT NULL
1967
+ AND state = ANY(${sql.array([...ALIVE_STATES])})
1968
+ ),
1969
+ last_activity AS (
1970
+ SELECT team, MAX(created_at) AS last_at
1971
+ FROM genie_runtime_events
1972
+ WHERE team IS NOT NULL
1973
+ AND subject = ANY(${sql.array([...ACTIVITY_SUBJECTS])})
1974
+ GROUP BY team
1975
+ )
1976
+ SELECT al.team AS team,
1977
+ al.id AS lead_agent_id,
1978
+ al.state AS lead_state,
1979
+ EXTRACT(EPOCH FROM la.last_at) * 1000 AS last_activity_ms,
1980
+ EXTRACT(EPOCH FROM now()) * 1000 AS now_ms
1981
+ FROM active_leads al
1982
+ LEFT JOIN last_activity la ON la.team = al.team
1983
+ ORDER BY al.team
1984
+ LIMIT 500
1985
+ `).map((r)=>({team:r.team,lead_agent_id:r.lead_agent_id,lead_state:r.lead_state,last_activity_ms:r.last_activity_ms===null?null:Number(r.last_activity_ms),now_ms:Number(r.now_ms)}))},queryFn=opts?.query??defaultQuery;return{id:"rot.zombie-team-lead",version,riskClass:"low",async query(){return{zombies:(await queryFn()).filter((r)=>{if(r.last_activity_ms===null)return!0;return r.now_ms-r.last_activity_ms>thresholdMs}),thresholdMs}},shouldFire(state){return state.zombies.length>0},render(state){let row=state.zombies[0],lastAtIso=row.last_activity_ms===null?null:new Date(row.last_activity_ms).toISOString(),minutesIdle=row.last_activity_ms===null?null:Math.floor((row.now_ms-row.last_activity_ms)/60000);return{type:"rot.detected",subject:row.team,payload:{pattern_id:"pattern-5-zombie-team-lead",entity_id:row.team,observed_state_json:{team_name:row.team,lead_agent_id:row.lead_agent_id,lead_state:row.lead_state,last_activity_at:lastAtIso,minutes_idle:minutesIdle,threshold_minutes:Math.floor(state.thresholdMs/60000),total_zombie_teams:state.zombies.length}}}}}}var ALIVE_STATES,ACTIVITY_SUBJECTS,DEFAULT_IDLE_MINUTES=5;var init_pattern_5_zombie_team_lead=__esm(()=>{init_db();init_detectors();ALIVE_STATES=["spawning","working","idle","permission","question"],ACTIVITY_SUBJECTS=["wish.dispatch","mailbox.delivery","agent.lifecycle"];registerDetector(createZombieTeamLeadDetector())});import{existsSync as existsSync32}from"fs";async function listTeamsFromPg(){return(await listTeams2(!1)).map((row)=>({name:row.name,status:row.status,worktreePath:row.worktreePath}))}async function listNativeTeamDirs(){return listTeams()}function pgWorktreeExistsOnDisk(worktreePath){return existsSync32(worktreePath)}var init_team_drift_sources=__esm(()=>{init_claude_native_teams();init_team_manager()});async function buildState(sources){let[lsRows,disbandDirs]=await Promise.all([sources.listTeamsFromPg(),sources.listNativeTeamDirs()]),disbandSet=new Set(disbandDirs),lsSanitizedMap=new Map;for(let row of lsRows)lsSanitizedMap.set(sanitizeTeamName(row.name),row);let divergent=[];for(let row of lsRows){let san=sanitizeTeamName(row.name);if(!disbandSet.has(san)){divergent.push({team_id:row.name,kind:"missing_in_disband",reason:`PG row visible in ls but no ~/.claude/teams/${san}/ dir`});continue}if(!sources.pgWorktreeExistsOnDisk(row.worktreePath))divergent.push({team_id:row.name,kind:"status_mismatch",reason:`PG row status='${row.status}' but worktree path missing on disk \u2014 pruneStaleWorktrees will silently delete on next disband`})}for(let dir of disbandDirs)if(!lsSanitizedMap.has(dir))divergent.push({team_id:dir,kind:"missing_in_ls",reason:`~/.claude/teams/${dir}/ exists but no PG row (status!='archived')`});return{ls_snapshot:lsRows,disband_snapshot:disbandDirs,divergent}}function primaryDivergenceKind(divergent){let order=["missing_in_ls","missing_in_disband","status_mismatch"];for(let kind of order)if(divergent.some((d)=>d.kind===kind))return kind;return"status_mismatch"}function renderPayload(state){let primary=primaryDivergenceKind(state.divergent),lsTrimmed=state.ls_snapshot.slice(0,MAX_SNAPSHOT_IN_EVENT).map((r)=>({name:r.name,status:r.status})),disbandTrimmed=state.disband_snapshot.slice(0,MAX_SNAPSHOT_IN_EVENT),divergentTrimmed=state.divergent.slice(0,MAX_DIVERGENT_IN_EVENT),observed={ls_snapshot:lsTrimmed,disband_snapshot:disbandTrimmed,divergent_ids:divergentTrimmed.map((d)=>d.team_id),divergence_kind:primary,divergent_detail:divergentTrimmed,ls_total:state.ls_snapshot.length,disband_total:state.disband_snapshot.length,divergent_total:state.divergent.length};return{divergence_kind:primary,divergent_count:state.divergent.length,observed_state_json:JSON.stringify(observed)}}function makeTeamLsDriftDetector(overrides){let sources={listTeamsFromPg:overrides?.listTeamsFromPg??listTeamsFromPg,listNativeTeamDirs:overrides?.listNativeTeamDirs??listNativeTeamDirs,pgWorktreeExistsOnDisk:overrides?.pgWorktreeExistsOnDisk??pgWorktreeExistsOnDisk};return{id:DETECTOR_ID,version:DETECTOR_VERSION,riskClass:"medium",query(){return buildState(sources)},shouldFire(state){return state.divergent.length>0},render(state){return{type:"rot.team-ls-drift.detected",subject:DETECTOR_ID,payload:renderPayload(state)}}}}var DETECTOR_ID="rot.team-ls-drift",DETECTOR_VERSION="0.1.0",MAX_DIVERGENT_IN_EVENT=100,MAX_SNAPSHOT_IN_EVENT=200;var init_pattern_2_team_ls_drift=__esm(()=>{init_claude_native_teams();init_team_drift_sources();init_detectors();registerDetector(makeTeamLsDriftDetector())});import{existsSync as existsSync33}from"fs";function isProbeableExecutorState(state){return state==="running"||state==="spawning"}async function resolveLastSeen(sql,executorId,fallback){let lastSeen=(await sql`
1944
1986
  SELECT updated_at FROM executors WHERE id = ${executorId}
1945
1987
  `)[0]?.updated_at??fallback;return typeof lastSeen==="string"?lastSeen:new Date(lastSeen).toISOString()}async function probeAgent(sql,agent){let executor=await getCurrentExecutor(agent.id);if(executor===null)return null;if(!isProbeableExecutorState(executor.state))return null;let paneId=executor.tmuxPaneId??"";if(await isPaneAliveSafe(paneId))return null;if(executor.worktree?existsSync33(executor.worktree):!1)return null;let lastSeenAt=await resolveLastSeen(sql,executor.id,executor.startedAt);return{agent_id:agent.id,custom_name:agent.customName??agent.role??agent.id,team:agent.team??"unknown",last_seen_at:lastSeenAt,expected_session_id:executor.tmuxSession??"",expected_pane_id:paneId,tmux_present:!1,transcript_present:!1}}async function defaultLoadState(){let agents=await listAgents(),sql=await getConnection(),orphans=[];for(let agent of agents){let orphan=await probeAgent(sql,agent);if(orphan!==null)orphans.push(orphan)}return{orphans}}async function isPaneAliveSafe(paneId){try{return await isPaneAlive(paneId)}catch{return!0}}function makeAnchorOrphanDetector(deps){return{id:"rot.anchor-orphan",version:"1.0.0",riskClass:"high",async query(){return deps.loadState()},shouldFire(state){return state.orphans.length>0},render(state){let first=state.orphans[0],agentIds=state.orphans.map((o)=>o.agent_id).slice(0,32),customNames=state.orphans.map((o)=>o.custom_name).slice(0,32),lastSeen=state.orphans.map((o)=>o.last_seen_at).slice(0,32);return{type:"rot.detected",subject:first?.agent_id??"unknown",payload:{pattern_id:"pattern-3-anchor-orphan",entity_id:first?.agent_id??"unknown",observed_state_json:{agent_id:first?.agent_id??"unknown",custom_name:first?.custom_name??"unknown",team:first?.team??"unknown",last_seen_at:first?.last_seen_at??"",expected_session_id:first?.expected_session_id??"",expected_pane_id:first?.expected_pane_id??"",tmux_present:!1,transcript_present:!1,orphan_count:state.orphans.length,all_agent_ids:agentIds,all_custom_names:customNames,all_last_seen_at:lastSeen}}}}}}var anchorOrphanDetector;var init_pattern_3_anchor_orphan=__esm(()=>{init_agent_registry();init_db();init_executor_registry();init_tmux();init_detectors();anchorOrphanDetector=makeAnchorOrphanDetector({loadState:defaultLoadState});registerDetector(anchorOrphanDetector)});function indexChildrenByParent(all){let out=new Map;for(let a of all){if(!a.reportsTo)continue;let bucket=out.get(a.reportsTo)??[];bucket.push(a),out.set(a.reportsTo,bucket)}return out}async function collectErroredChildren(children){let out=[];for(let child of children){let childExec=await getCurrentExecutor(child.id);if(childExec?.state!=="error")continue;out.push({id:child.id,erroredAt:childExec.endedAt??childExec.updatedAt})}return out}async function loadLastParentRecovery(sql,parentId,parentErroredAt){let raw=(await sql`
1946
1988
  SELECT created_at FROM genie_runtime_events
@@ -1984,7 +2026,7 @@ ${body}`;writeFileSync12(filePath,output,"utf-8")}function serializeSdkConfig(sd
1984
2026
  AND kind IN ('user', 'assistant', 'message')
1985
2027
  ORDER BY id ASC
1986
2028
  LIMIT 1
1987
- `)[0]?.text?.slice(0,TRANSCRIPT_PREVIEW_CAP)??""}async function matchFreshAgainstPeers(sql,fresh,peers,freshSeed,freshTokens){for(let peer of peers){let peerPreview=await loadArchivedPreview(sql,peer.team),peerTokens=topicTokens(peerPreview),similarity=jaccard(freshTokens,peerTokens);if(similarity>=TOPIC_MISMATCH_THRESHOLD)continue;return{new_agent_id:fresh.id,new_team:fresh.team,new_topic_seed:freshSeed.slice(0,256),conflicting_archived_agent_id:peer.agent_id,conflicting_archived_team:peer.team,conflicting_archived_last_transcript_preview:peerPreview.slice(0,256),jaccard_similarity:similarity}}return null}async function defaultLoadState4(){let sql=await getConnection(),cutoff=new Date(Date.now()-SPAWN_LOOKBACK_MS).toISOString(),recent=(await listAgents()).map((a)=>toFreshCandidate(a,cutoff)).filter((c)=>c!==null);if(recent.length===0)return{ghosts:[]};let ghosts=[];for(let fresh of recent){let peers=await findArchivedPeers(sql,fresh);if(peers.length===0)continue;let freshSeed=await loadFreshSeed(sql,fresh);if(freshSeed.length===0)continue;let freshTokens=topicTokens(freshSeed),match=await matchFreshAgainstPeers(sql,fresh,peers,freshSeed,freshTokens);if(match!==null)ghosts.push(match)}return{ghosts}}function makeSessionReuseGhostDetector(deps){return{id:"rot.session-reuse-ghost",version:"1.0.0",riskClass:"high",async query(){return deps.loadState()},shouldFire(state){return state.ghosts.length>0},render(state){let first=state.ghosts[0];return{type:"rot.detected",subject:first?.new_agent_id??"unknown",payload:{pattern_id:"pattern-8-session-reuse-ghost",entity_id:first?.new_agent_id??"unknown",observed_state_json:{new_agent_id:first?.new_agent_id??"unknown",new_team:first?.new_team??"unknown",new_topic_seed:first?.new_topic_seed??"",conflicting_archived_agent_id:first?.conflicting_archived_agent_id??"unknown",conflicting_archived_team:first?.conflicting_archived_team??"unknown",conflicting_archived_last_transcript_preview:first?.conflicting_archived_last_transcript_preview??"",jaccard_similarity:first?.jaccard_similarity??0,ghost_count:state.ghosts.length}}}}}}var SPAWN_LOOKBACK_MS=600000,TOPIC_SEED_TOKEN_CAP=8,TOPIC_MISMATCH_THRESHOLD=0.25,TRANSCRIPT_PREVIEW_CAP=2048,sessionReuseGhostDetector;var init_pattern_8_session_reuse_ghost=__esm(()=>{init_agent_registry();init_db();init_detectors();sessionReuseGhostDetector=makeSessionReuseGhostDetector({loadState:defaultLoadState4});registerDetector(sessionReuseGhostDetector)});var exports_built_in={};var init_built_in=__esm(()=>{init_pattern_2_team_ls_drift();init_pattern_3_anchor_orphan();init_pattern_6_subagent_cascade();init_pattern_7_dispatch_silent_drop();init_pattern_8_session_reuse_ghost()});var exports_detector_scheduler={};__export(exports_detector_scheduler,{start:()=>start,DEFAULT_TICK_INTERVAL_MS:()=>DEFAULT_TICK_INTERVAL_MS,DEFAULT_JITTER_MS:()=>DEFAULT_JITTER_MS,DEFAULT_FIRE_BUDGET:()=>DEFAULT_FIRE_BUDGET});function start(options={}){let tickIntervalMs=options.tickIntervalMs??DEFAULT_TICK_INTERVAL_MS,jitterMs=options.jitterMs??DEFAULT_JITTER_MS,defaultBudget=options.defaultFireBudget??DEFAULT_FIRE_BUDGET,budgets=options.fireBudgets??{},now=options.now??(()=>Date.now()),setTimeoutFn=options.setTimeoutFn??((fn,ms)=>setTimeout(fn,ms)),clearTimeoutFn=options.clearTimeoutFn??((handle)=>{clearTimeout(handle)}),resolveDetectors=options.detectorSource??listDetectors,emit2=options.emitFn??emitEvent,state={ticks:0,fires:0,disables:0,budgetBuckets:{}},disabledBuckets=new Set,stopped=!1,currentTimer=null,tickInFlight=null;async function runTick(){if(stopped)return;state.ticks++;let detectors=resolveDetectors();for(let detector of detectors)await runOneDetector(detector)}async function safeCall(fn){try{return await fn()}catch{return null}}function emitFire(detector,event){emit2(event.type,event.payload,{detector_version:detector.version,source_subsystem:"detector-scheduler",entity_id:event.subject??detector.id,agent:process.env.GENIE_AGENT_NAME??"detector-scheduler"}),state.fires++}function emitDisable(detector,budget,current,bucketStart){state.disables++,emit2("detector.disabled",{detector_id:detector.id,cause:"fire_budget_exceeded",budget,fire_count:current,bucket_end_ts:new Date(bucketStart+HOUR_MS).toISOString()},{detector_version:detector.version,source_subsystem:"detector-scheduler",entity_id:detector.id,severity:"warn",agent:process.env.GENIE_AGENT_NAME??"detector-scheduler"})}async function runOneDetector(detector){let bucketStart=Math.floor(now()/HOUR_MS)*HOUR_MS,bucketKey=`${detector.id}:${bucketStart}`,budget=budgets[detector.id]??defaultBudget;if(disabledBuckets.has(bucketKey))return;let result2=await safeCall(()=>detector.query());if(result2===null)return;if(!await safeCall(()=>detector.shouldFire(result2)))return;let current=(state.budgetBuckets[bucketKey]??0)+1;state.budgetBuckets[bucketKey]=current;let event=await safeCall(()=>detector.render(result2));if(event===null)return;if(emitFire(detector,event),current>=budget&&!disabledBuckets.has(bucketKey))disabledBuckets.add(bucketKey),emitDisable(detector,budget,current,bucketStart)}function scheduleNext(){if(stopped)return;let jitter=jitterMs>0?Math.floor((Math.random()*2-1)*jitterMs):0,delay=Math.max(0,tickIntervalMs+jitter);currentTimer=setTimeoutFn(()=>{tickInFlight=runTick().finally(()=>{tickInFlight=null,scheduleNext()})},delay)}return scheduleNext(),{stop(){if(stopped)return;if(stopped=!0,currentTimer)clearTimeoutFn(currentTimer),currentTimer=null},async tickNow(){if(tickInFlight)await tickInFlight;await runTick()},stats(){return{...state,budgetBuckets:{...state.budgetBuckets}}}}}var DEFAULT_TICK_INTERVAL_MS=60000,DEFAULT_JITTER_MS=5000,DEFAULT_FIRE_BUDGET=10,HOUR_MS=3600000;var init_detector_scheduler=__esm(()=>{init_detectors();init_emit();init_pattern_2_team_ls_drift()});var exports_executor_read={};__export(exports_executor_read,{stopExecutorReadEndpoint:()=>stopExecutorReadEndpoint,startExecutorReadEndpoint:()=>startExecutorReadEndpoint,readExecutorState:()=>readExecutorState,isExecutorReadEndpointRunning:()=>isExecutorReadEndpointRunning,getExecutorReadPort:()=>getExecutorReadPort});async function readExecutorState(id,sql){let rows=await(sql??await getConnection())`SELECT state, outcome, closed_at FROM executors WHERE id = ${id} LIMIT 1`;if(rows.length===0)return null;let row=rows[0];return{state:row.state,outcome:row.outcome??null,closed_at:row.closed_at==null?null:row.closed_at instanceof Date?row.closed_at.toISOString():row.closed_at}}function getExecutorReadPort(){let envPort=process.env.GENIE_EXECUTOR_READ_PORT;if(envPort){let parsed=Number.parseInt(envPort,10);if(!Number.isNaN(parsed)&&parsed>0&&parsed<65536)return parsed}return getActivePort()+2}async function handleStateRoute(id){if(!UUID_RE.test(id))return Response.json({error:"invalid executor id"},{status:400});try{let reply=await readExecutorState(id);if(!reply)return Response.json({error:"not found"},{status:404});return Response.json(reply,{headers:{"Cache-Control":"no-store"}})}catch(err){let msg=err instanceof Error?err.message:String(err);return Response.json({error:msg},{status:500})}}async function routeRequest(req,port){let url=new URL(req.url);if(req.method==="GET"&&url.pathname==="/health")return Response.json({status:"ok",port});if(req.method!=="GET")return new Response("Method Not Allowed",{status:405});let match=ROUTE_RE.exec(url.pathname);if(!match)return new Response("Not Found",{status:404});return handleStateRoute(match[1])}async function startExecutorReadEndpoint(){if(server2)return!0;let port=getExecutorReadPort();try{return server2=Bun.serve({port,hostname:"127.0.0.1",fetch:(req)=>routeRequest(req,port)}),!0}catch(err){let message=err instanceof Error?err.message:String(err);if(message.includes("EADDRINUSE")||message.includes("address already in use"))console.warn(`Executor read endpoint: port ${port} already in use \u2014 skipping`);else console.warn(`Executor read endpoint: failed to start on port ${port}: ${message}`);return!1}}async function stopExecutorReadEndpoint(){if(server2)await server2.stop(!0),server2=null}function isExecutorReadEndpointRunning(){return server2!==null}var server2=null,UUID_RE,ROUTE_RE;var init_executor_read=__esm(()=>{init_db();UUID_RE=/^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/i,ROUTE_RE=/^\/executors\/([^/]+)\/state\/?$/});var exports_omni_approval_handler={};__export(exports_omni_approval_handler,{startOmniApprovalHandler:()=>startOmniApprovalHandler});class OmniApprovalHandler{nc=null;subs=[];sc=import_nats2.StringCodec();permissions;natsUrl;approveTokens;denyTokens;constructor(config){this.permissions=config.permissions,this.natsUrl=config.natsUrl??"localhost:4222",this.approveTokens=(config.permissions.approveTokens??DEFAULT_APPROVE_TOKENS).map((t)=>t.toLowerCase()),this.denyTokens=(config.permissions.denyTokens??DEFAULT_DENY_TOKENS).map((t)=>t.toLowerCase())}async start(){let{omniChat,omniInstance}=this.permissions;if(!omniChat||!omniInstance)return;this.nc=await import_nats2.connect({servers:this.natsUrl});let messageTopic=`omni.message.${omniInstance}.>`,msgSub=this.nc.subscribe(messageTopic);this.subs.push(msgSub),this.processMessages(msgSub);let eventSub=this.nc.subscribe("omni.event.>");this.subs.push(eventSub),this.processEvents(eventSub),handlerInstance=this,console.log(`[omni-approval] Listening for approval replies on ${messageTopic}`)}async stop(){for(let sub of this.subs)sub.unsubscribe();if(this.subs=[],this.nc)await this.nc.close(),this.nc=null;if(handlerInstance===this)handlerInstance=null}async processMessages(sub){for await(let msg of sub)try{let data=JSON.parse(this.sc.decode(msg.data));if((data.chatId??this.extractChatIdFromSubject(msg.subject))!==this.permissions.omniChat)continue;if(data.content)await this.handleTextReply(data.content,data.sender??"whatsapp-user")}catch{}}async processEvents(sub){for await(let msg of sub)try{let data=JSON.parse(this.sc.decode(msg.data));if(data.type!=="reaction")continue;if(data.chatId!==this.permissions.omniChat||data.instanceId!==this.permissions.omniInstance)continue;if(data.emoji&&data.messageId)await this.handleReaction(data.emoji,data.messageId,data.sender??"whatsapp-user")}catch{}}extractChatIdFromSubject(subject){let parts=subject.split(".");if(parts.length>=4)return parts.slice(3).join(".");return}async handleTextReply(content,sender){let normalized=content.trim().toLowerCase();if(!normalized)return!1;let decision=null;if(this.approveTokens.includes(normalized))decision="allow";else if(this.denyTokens.includes(normalized))decision="deny";if(!decision)return!1;let pending=await listPendingApprovals();if(pending.length===0)return!1;let oldest=pending[0],resolved=await resolveApproval(oldest.id,decision,sender);if(resolved)console.log(`[omni-approval] Resolved ${oldest.id} as ${decision} by ${sender} (text: "${normalized}")`);return resolved}async handleReaction(emoji,messageId,sender){let decision=null;if(DEFAULT_APPROVE_REACTIONS.includes(emoji))decision="allow";else if(DEFAULT_DENY_REACTIONS.includes(emoji))decision="deny";if(!decision)return!1;try{let sql=await getConnection(),[approval]=await sql`
2029
+ `)[0]?.text?.slice(0,TRANSCRIPT_PREVIEW_CAP)??""}async function matchFreshAgainstPeers(sql,fresh,peers,freshSeed,freshTokens){for(let peer of peers){let peerPreview=await loadArchivedPreview(sql,peer.team),peerTokens=topicTokens(peerPreview),similarity=jaccard(freshTokens,peerTokens);if(similarity>=TOPIC_MISMATCH_THRESHOLD)continue;return{new_agent_id:fresh.id,new_team:fresh.team,new_topic_seed:freshSeed.slice(0,256),conflicting_archived_agent_id:peer.agent_id,conflicting_archived_team:peer.team,conflicting_archived_last_transcript_preview:peerPreview.slice(0,256),jaccard_similarity:similarity}}return null}async function defaultLoadState4(){let sql=await getConnection(),cutoff=new Date(Date.now()-SPAWN_LOOKBACK_MS).toISOString(),recent=(await listAgents()).map((a)=>toFreshCandidate(a,cutoff)).filter((c)=>c!==null);if(recent.length===0)return{ghosts:[]};let ghosts=[];for(let fresh of recent){let peers=await findArchivedPeers(sql,fresh);if(peers.length===0)continue;let freshSeed=await loadFreshSeed(sql,fresh);if(freshSeed.length===0)continue;let freshTokens=topicTokens(freshSeed),match=await matchFreshAgainstPeers(sql,fresh,peers,freshSeed,freshTokens);if(match!==null)ghosts.push(match)}return{ghosts}}function makeSessionReuseGhostDetector(deps){return{id:"rot.session-reuse-ghost",version:"1.0.0",riskClass:"high",async query(){return deps.loadState()},shouldFire(state){return state.ghosts.length>0},render(state){let first=state.ghosts[0];return{type:"rot.detected",subject:first?.new_agent_id??"unknown",payload:{pattern_id:"pattern-8-session-reuse-ghost",entity_id:first?.new_agent_id??"unknown",observed_state_json:{new_agent_id:first?.new_agent_id??"unknown",new_team:first?.new_team??"unknown",new_topic_seed:first?.new_topic_seed??"",conflicting_archived_agent_id:first?.conflicting_archived_agent_id??"unknown",conflicting_archived_team:first?.conflicting_archived_team??"unknown",conflicting_archived_last_transcript_preview:first?.conflicting_archived_last_transcript_preview??"",jaccard_similarity:first?.jaccard_similarity??0,ghost_count:state.ghosts.length}}}}}}var SPAWN_LOOKBACK_MS=600000,TOPIC_SEED_TOKEN_CAP=8,TOPIC_MISMATCH_THRESHOLD=0.25,TRANSCRIPT_PREVIEW_CAP=2048,sessionReuseGhostDetector;var init_pattern_8_session_reuse_ghost=__esm(()=>{init_agent_registry();init_db();init_detectors();sessionReuseGhostDetector=makeSessionReuseGhostDetector({loadState:defaultLoadState4});registerDetector(sessionReuseGhostDetector)});var exports_built_in={};var init_built_in=__esm(()=>{init_pattern_1_backfill_no_worktree();init_pattern_4_duplicate_agents();init_pattern_5_zombie_team_lead();init_pattern_2_team_ls_drift();init_pattern_3_anchor_orphan();init_pattern_6_subagent_cascade();init_pattern_7_dispatch_silent_drop();init_pattern_8_session_reuse_ghost()});var exports_detector_scheduler={};__export(exports_detector_scheduler,{start:()=>start,DEFAULT_TICK_INTERVAL_MS:()=>DEFAULT_TICK_INTERVAL_MS,DEFAULT_JITTER_MS:()=>DEFAULT_JITTER_MS,DEFAULT_FIRE_BUDGET:()=>DEFAULT_FIRE_BUDGET});function start(options={}){let tickIntervalMs=options.tickIntervalMs??DEFAULT_TICK_INTERVAL_MS,jitterMs=options.jitterMs??DEFAULT_JITTER_MS,defaultBudget=options.defaultFireBudget??DEFAULT_FIRE_BUDGET,budgets=options.fireBudgets??{},now=options.now??(()=>Date.now()),setTimeoutFn=options.setTimeoutFn??((fn,ms)=>setTimeout(fn,ms)),clearTimeoutFn=options.clearTimeoutFn??((handle)=>{clearTimeout(handle)}),resolveDetectors=options.detectorSource??listDetectors,emit2=options.emitFn??emitEvent,state={ticks:0,fires:0,disables:0,budgetBuckets:{}},disabledBuckets=new Set,stopped=!1,currentTimer=null,tickInFlight=null;async function runTick(){if(stopped)return;state.ticks++;let detectors=resolveDetectors();for(let detector of detectors)await runOneDetector(detector)}async function safeCall(fn){try{return await fn()}catch{return null}}function emitFire(detector,event){emit2(event.type,event.payload,{detector_version:detector.version,source_subsystem:"detector-scheduler",entity_id:event.subject??detector.id,agent:process.env.GENIE_AGENT_NAME??"detector-scheduler"}),state.fires++}function emitDisable(detector,budget,current,bucketStart){state.disables++,emit2("detector.disabled",{detector_id:detector.id,cause:"fire_budget_exceeded",budget,fire_count:current,bucket_end_ts:new Date(bucketStart+HOUR_MS).toISOString()},{detector_version:detector.version,source_subsystem:"detector-scheduler",entity_id:detector.id,severity:"warn",agent:process.env.GENIE_AGENT_NAME??"detector-scheduler"})}async function runOneDetector(detector){let bucketStart=Math.floor(now()/HOUR_MS)*HOUR_MS,bucketKey=`${detector.id}:${bucketStart}`,budget=budgets[detector.id]??defaultBudget;if(disabledBuckets.has(bucketKey))return;let result2=await safeCall(()=>detector.query());if(result2===null)return;if(!await safeCall(()=>detector.shouldFire(result2)))return;let current=(state.budgetBuckets[bucketKey]??0)+1;state.budgetBuckets[bucketKey]=current;let event=await safeCall(()=>detector.render(result2));if(event===null)return;if(emitFire(detector,event),current>=budget&&!disabledBuckets.has(bucketKey))disabledBuckets.add(bucketKey),emitDisable(detector,budget,current,bucketStart)}function scheduleNext(){if(stopped)return;let jitter=jitterMs>0?Math.floor((Math.random()*2-1)*jitterMs):0,delay=Math.max(0,tickIntervalMs+jitter);currentTimer=setTimeoutFn(()=>{tickInFlight=runTick().finally(()=>{tickInFlight=null,scheduleNext()})},delay)}return scheduleNext(),{stop(){if(stopped)return;if(stopped=!0,currentTimer)clearTimeoutFn(currentTimer),currentTimer=null},async tickNow(){if(tickInFlight)await tickInFlight;await runTick()},stats(){return{...state,budgetBuckets:{...state.budgetBuckets}}}}}var DEFAULT_TICK_INTERVAL_MS=60000,DEFAULT_JITTER_MS=5000,DEFAULT_FIRE_BUDGET=10,HOUR_MS=3600000;var init_detector_scheduler=__esm(()=>{init_detectors();init_emit();init_pattern_2_team_ls_drift()});var exports_executor_read={};__export(exports_executor_read,{stopExecutorReadEndpoint:()=>stopExecutorReadEndpoint,startExecutorReadEndpoint:()=>startExecutorReadEndpoint,readExecutorState:()=>readExecutorState,isExecutorReadEndpointRunning:()=>isExecutorReadEndpointRunning,getExecutorReadPort:()=>getExecutorReadPort});async function readExecutorState(id,sql){let rows=await(sql??await getConnection())`SELECT state, outcome, closed_at FROM executors WHERE id = ${id} LIMIT 1`;if(rows.length===0)return null;let row=rows[0];return{state:row.state,outcome:row.outcome??null,closed_at:row.closed_at==null?null:row.closed_at instanceof Date?row.closed_at.toISOString():row.closed_at}}function getExecutorReadPort(){let envPort=process.env.GENIE_EXECUTOR_READ_PORT;if(envPort){let parsed=Number.parseInt(envPort,10);if(!Number.isNaN(parsed)&&parsed>0&&parsed<65536)return parsed}return getActivePort()+2}async function handleStateRoute(id){if(!UUID_RE.test(id))return Response.json({error:"invalid executor id"},{status:400});try{let reply=await readExecutorState(id);if(!reply)return Response.json({error:"not found"},{status:404});return Response.json(reply,{headers:{"Cache-Control":"no-store"}})}catch(err){let msg=err instanceof Error?err.message:String(err);return Response.json({error:msg},{status:500})}}async function routeRequest(req,port){let url=new URL(req.url);if(req.method==="GET"&&url.pathname==="/health")return Response.json({status:"ok",port});if(req.method!=="GET")return new Response("Method Not Allowed",{status:405});let match=ROUTE_RE.exec(url.pathname);if(!match)return new Response("Not Found",{status:404});return handleStateRoute(match[1])}async function startExecutorReadEndpoint(){if(server2)return!0;let port=getExecutorReadPort();try{return server2=Bun.serve({port,hostname:"127.0.0.1",fetch:(req)=>routeRequest(req,port)}),!0}catch(err){let message=err instanceof Error?err.message:String(err);if(message.includes("EADDRINUSE")||message.includes("address already in use"))console.warn(`Executor read endpoint: port ${port} already in use \u2014 skipping`);else console.warn(`Executor read endpoint: failed to start on port ${port}: ${message}`);return!1}}async function stopExecutorReadEndpoint(){if(server2)await server2.stop(!0),server2=null}function isExecutorReadEndpointRunning(){return server2!==null}var server2=null,UUID_RE,ROUTE_RE;var init_executor_read=__esm(()=>{init_db();UUID_RE=/^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/i,ROUTE_RE=/^\/executors\/([^/]+)\/state\/?$/});var exports_omni_approval_handler={};__export(exports_omni_approval_handler,{startOmniApprovalHandler:()=>startOmniApprovalHandler});class OmniApprovalHandler{nc=null;subs=[];sc=import_nats2.StringCodec();permissions;natsUrl;approveTokens;denyTokens;constructor(config){this.permissions=config.permissions,this.natsUrl=config.natsUrl??"localhost:4222",this.approveTokens=(config.permissions.approveTokens??DEFAULT_APPROVE_TOKENS).map((t)=>t.toLowerCase()),this.denyTokens=(config.permissions.denyTokens??DEFAULT_DENY_TOKENS).map((t)=>t.toLowerCase())}async start(){let{omniChat,omniInstance}=this.permissions;if(!omniChat||!omniInstance)return;this.nc=await import_nats2.connect({servers:this.natsUrl});let messageTopic=`omni.message.${omniInstance}.>`,msgSub=this.nc.subscribe(messageTopic);this.subs.push(msgSub),this.processMessages(msgSub);let eventSub=this.nc.subscribe("omni.event.>");this.subs.push(eventSub),this.processEvents(eventSub),handlerInstance=this,console.log(`[omni-approval] Listening for approval replies on ${messageTopic}`)}async stop(){for(let sub of this.subs)sub.unsubscribe();if(this.subs=[],this.nc)await this.nc.close(),this.nc=null;if(handlerInstance===this)handlerInstance=null}async processMessages(sub){for await(let msg of sub)try{let data=JSON.parse(this.sc.decode(msg.data));if((data.chatId??this.extractChatIdFromSubject(msg.subject))!==this.permissions.omniChat)continue;if(data.content)await this.handleTextReply(data.content,data.sender??"whatsapp-user")}catch{}}async processEvents(sub){for await(let msg of sub)try{let data=JSON.parse(this.sc.decode(msg.data));if(data.type!=="reaction")continue;if(data.chatId!==this.permissions.omniChat||data.instanceId!==this.permissions.omniInstance)continue;if(data.emoji&&data.messageId)await this.handleReaction(data.emoji,data.messageId,data.sender??"whatsapp-user")}catch{}}extractChatIdFromSubject(subject){let parts=subject.split(".");if(parts.length>=4)return parts.slice(3).join(".");return}async handleTextReply(content,sender){let normalized=content.trim().toLowerCase();if(!normalized)return!1;let decision=null;if(this.approveTokens.includes(normalized))decision="allow";else if(this.denyTokens.includes(normalized))decision="deny";if(!decision)return!1;let pending=await listPendingApprovals();if(pending.length===0)return!1;let oldest=pending[0],resolved=await resolveApproval(oldest.id,decision,sender);if(resolved)console.log(`[omni-approval] Resolved ${oldest.id} as ${decision} by ${sender} (text: "${normalized}")`);return resolved}async handleReaction(emoji,messageId,sender){let decision=null;if(DEFAULT_APPROVE_REACTIONS.includes(emoji))decision="allow";else if(DEFAULT_DENY_REACTIONS.includes(emoji))decision="deny";if(!decision)return!1;try{let sql=await getConnection(),[approval]=await sql`
1988
2030
  SELECT id FROM approvals
1989
2031
  WHERE omni_message_id = ${messageId} AND decision = 'pending'
1990
2032
  `;if(approval){let resolved2=await resolveApproval(approval.id,decision,sender);if(resolved2)console.log(`[omni-approval] Resolved ${approval.id} via reaction ${emoji} by ${sender}`);return resolved2}}catch{}let pending=await listPendingApprovals();if(pending.length===0)return!1;let resolved=await resolveApproval(pending[0].id,decision,sender);if(resolved)console.log(`[omni-approval] Resolved ${pending[0].id} via reaction ${emoji} by ${sender} (fallback)`);return resolved}}async function startOmniApprovalHandler(natsUrl){let ws=findWorkspace();if(!ws)return null;let config=getWorkspaceConfig(ws.root);if(!config.permissions?.omniChat||!config.permissions?.omniInstance)return null;let handler=new OmniApprovalHandler({natsUrl,permissions:config.permissions});return await handler.start(),handler}var import_nats2,DEFAULT_APPROVE_TOKENS,DEFAULT_DENY_TOKENS,DEFAULT_APPROVE_REACTIONS,DEFAULT_DENY_REACTIONS,handlerInstance=null;var init_omni_approval_handler=__esm(()=>{init_db();init_claude_sdk_remote_approval();init_workspace();import_nats2=__toESM(require_mod4(),1),DEFAULT_APPROVE_TOKENS=["y","yes","approve","sim"],DEFAULT_DENY_TOKENS=["n","no","deny","nao"],DEFAULT_APPROVE_REACTIONS=["\uD83D\uDC4D","\u2705","\uD83D\uDC4C"],DEFAULT_DENY_REACTIONS=["\uD83D\uDC4E","\u274C","\uD83D\uDEAB"]});function isValid2(value){return typeof value==="string"&&VALID.has(value)}function resolveExecutorType(override){if(isValid2(override))return override;let env=process.env.GENIE_EXECUTOR;if(isValid2(env))return env;try{let persisted=loadGenieConfigSync().omni?.executor;if(isValid2(persisted))return persisted}catch{}return"tmux"}var VALID;var init_executor_config=__esm(()=>{init_genie_config2();VALID=new Set(["tmux","sdk"])});class BridgeSessionStore{sql;constructor(sql){this.sql=sql}async create(opts){let[row]=await this.sql`
@@ -2131,7 +2173,7 @@ export ${envVars.join(`
2131
2173
  export `)}
2132
2174
  exec ${bunPath} ${genieBin}
2133
2175
  `;writeFileSync14(scriptPath,content,{mode:493});try{execSync10(tuiTmux(`send-keys -t '${leftPane}' '${scriptPath}' Enter`),{stdio:"ignore"})}catch{}}function killTuiSession(){try{execSync10(tuiTmux("kill-server"),{stdio:"ignore"})}catch{}}function listAgentSessions(){try{return execSync10(genieTmuxCmd("list-sessions -F '#{session_name}'"),{encoding:"utf-8"}).trim().split(`
2134
- `).filter(Boolean)}catch{return[]}}function isServeRunning(){let entry2=readServePid();return entry2!==null&&isProcessAlive(entry2.pid)}async function autoStartServe(){if(isServeRunning())return;let bunPath=process.execPath??"bun",genieBin=process.argv[1]??"genie",{spawn:spawnChild}=await import("child_process");spawnChild(bunPath,[genieBin,"serve","--foreground"],{detached:!0,stdio:"ignore",env:{...process.env,GENIE_IS_DAEMON:"1"}}).unref();let deadline=Date.now()+15000;while(Date.now()<deadline)if(await new Promise((resolve5)=>setTimeout(resolve5,500)),isServeRunning()&&isTuiSessionReady())return;if(!isServeRunning())throw Error("genie serve failed to start within 15s. Run `genie serve` manually.")}function isTuiSessionReady(){try{return execSync10(tuiTmux(`has-session -t ${TUI_SESSION}`),{stdio:"ignore"}),!0}catch{return!1}}function ensureTuiSession(workspaceRoot){if(isTuiSessionReady())return;let{leftPane,rightPane}=startTuiTmuxServer();sendTuiLaunchScript(leftPane,rightPane,workspaceRoot)}async function startAgentSync(){try{let{findWorkspace:findWorkspace2,genieHome:genieHome4}=(init_workspace(),__toCommonJS(exports_workspace)),ws=findWorkspace2();if(!ws){let{join:join43}=__require("path"),configPath2=join43(genieHome4(),"config.json");return console.warn(` Agent sync: DISABLED \u2014 no workspace found from cwd or ${configPath2}`),console.warn(" Fix: `cd <workspace> && genie serve restart`, or run `genie init` to bootstrap one"),null}let{syncAgentDirectory:syncAgentDirectory2,watchAgentDirectory:watchAgentDirectory2}=await Promise.resolve().then(() => (init_agent_sync(),exports_agent_sync)),syncResult=await syncAgentDirectory2(ws.root);if(syncResult.registered.length+syncResult.updated.length>0)console.log(` Agent sync: ${syncResult.registered.length} registered, ${syncResult.updated.length} updated (workspace: ${ws.root})`);else console.log(` Agent sync: up to date (workspace: ${ws.root})`);if(syncResult.errors.length>0){console.warn(` Agent sync: ${syncResult.errors.length} error(s) \u2014 these agents were NOT registered:`);for(let e of syncResult.errors)console.warn(` ${e.name}: ${e.error}`)}let watcher2=watchAgentDirectory2(ws.root,{onSync:(name,action)=>{console.log(` [agent-watcher] ${name}: ${action}`)}});if(watcher2)console.log(" Agent watcher started (watching agents/ directory)");else console.warn(" Agent watcher: FAILED to start \u2014 new agents will not be auto-registered");return watcher2}catch(err){let msg=err instanceof Error?err.message:String(err);return console.error(` Agent sync failed: ${msg}`),null}}async function startPgserve(){console.log(" Starting pgserve...");try{let{ensurePgserve:ensurePgserve2}=await Promise.resolve().then(() => (init_db(),exports_db)),port=await ensurePgserve2();console.log(` pgserve ready on port ${port}`);try{let{registerService:registerService2}=await Promise.resolve().then(() => (init_service_registry(),exports_service_registry));registerService2("pgserve-owner",process.pid)}catch{}}catch(err){let msg=err instanceof Error?err.message:String(err);console.error(` pgserve failed: ${msg}`)}}async function startScheduler(){console.log(" Starting scheduler daemon...");try{let{startDaemon:startDaemon2}=await Promise.resolve().then(() => (init_scheduler_daemon(),exports_scheduler_daemon));handles.schedulerHandle=startDaemon2(),console.log(" Scheduler started (includes event-router + inbox-watcher)");try{let{registerService:registerService2}=await Promise.resolve().then(() => (init_service_registry(),exports_service_registry));registerService2("scheduler",process.pid)}catch{}}catch(err){let msg=err instanceof Error?err.message:String(err);console.error(` Scheduler failed: ${msg}`)}}async function startForeground(headless){let existingEntry=readServePid();if(existingEntry&&isProcessAlive(existingEntry.pid))console.log(`genie serve already running (PID ${existingEntry.pid})`),process.exit(0);if(existingEntry)forceRemoveServePid();process.env.GENIE_IS_DAEMON="1",writeServePid(process.pid);let mode=headless?"headless":"full";if(console.log(`genie serve starting (PID ${process.pid}, mode: ${mode})`),!headless)await ensureTmux();await startPgserve();let{loadGenieConfigSync:loadGenieConfigSync2}=await Promise.resolve().then(() => (init_genie_config2(),exports_genie_config));if(!loadGenieConfigSync2().brain.embedded)console.log(" Brain server: skipped (brain.embedded=false \u2014 managed externally)");else try{let brain=await import("@khal-os/brain");if(brain.startEmbeddedBrainServer){let{getActivePort:getActivePort2}=await Promise.resolve().then(() => (init_db(),exports_db)),pgPort=getActivePort2();if(pgPort){console.log(" Starting brain server...");let brainPath;try{let{findWorkspace:findWorkspace2}=(init_workspace(),__toCommonJS(exports_workspace)),ws=findWorkspace2();if(ws?.root){let bp=join42(ws.root,"brain");if(existsSync35(bp)&&existsSync35(join42(bp,"brain.json")))brainPath=bp}}catch{}if(brainPath){let handle=await brain.startEmbeddedBrainServer({brainPath,geniePgPort:pgPort});handles.brainHandle={stop:handle.stop,port:handle.port},console.log(` Brain server ready on port ${handle.port}`)}else console.log(" Brain server: no brain/ found in workspace (skipped)")}else console.log(" Brain server: pgserve not available (skipped)")}}catch{}if(!headless){let sessions=listAgentSessions();if(sessions.length>0)console.log(` Agent server (-L genie): ${sessions.length} sessions`);else console.log(" Agent server (-L genie): no sessions yet (created on first spawn)")}if(handles.agentWatcher=await startAgentSync(),!headless){console.log(" Setting up TUI session...");let{leftPane,rightPane}=startTuiTmuxServer(),ws=(()=>{try{let{findWorkspace:findWorkspace2}=(init_workspace(),__toCommonJS(exports_workspace));return findWorkspace2()}catch{return null}})();sendTuiLaunchScript(leftPane,rightPane,ws?.root),console.log(" TUI server ready (session: genie-tui)")}await startScheduler();try{await Promise.resolve().then(() => (init_built_in(),exports_built_in));let{start:startDetectorScheduler}=await Promise.resolve().then(() => (init_detector_scheduler(),exports_detector_scheduler));handles.detectorScheduler=startDetectorScheduler(),console.log(" Detector scheduler started (measurement only, 60s \xB1 5s cadence)")}catch(err){let msg=err instanceof Error?err.message:String(err);console.warn(` Detector scheduler: failed \u2014 ${msg}`)}try{let{startExecutorReadEndpoint:startExecutorReadEndpoint2,getExecutorReadPort:getExecutorReadPort2}=await Promise.resolve().then(() => (init_executor_read(),exports_executor_read));if(await startExecutorReadEndpoint2())console.log(` Executor read endpoint ready on port ${getExecutorReadPort2()}`)}catch(err){let msg=err instanceof Error?err.message:String(err);console.warn(` Executor read endpoint: failed \u2014 ${msg}`)}try{let{startOmniApprovalHandler:startOmniApprovalHandler2}=await Promise.resolve().then(() => (init_omni_approval_handler(),exports_omni_approval_handler)),handler=await startOmniApprovalHandler2();if(handler)handles.omniApprovalHandler=handler,console.log(" Omni approval handler started")}catch{}{let{OmniBridge:OmniBridge2}=await Promise.resolve().then(() => (init_omni_bridge(),exports_omni_bridge)),bridge=new OmniBridge2({natsUrl:process.env.GENIE_NATS_URL??"localhost:4222",maxConcurrent:Number(process.env.GENIE_MAX_CONCURRENT??"20"),idleTimeoutMs:Number(process.env.GENIE_IDLE_TIMEOUT_MS??"900000")});try{await bridge.start(),handles.omniBridge=bridge,console.log(" Omni bridge started")}catch(err){let msg=err instanceof Error?err.message:String(err);if(process.env.GENIE_OMNI_REQUIRED==="1")console.error(` Omni bridge: FAILED \u2014 ${msg}`),process.exit(1);console.warn(` Omni bridge: degraded \u2014 ${msg}; set GENIE_OMNI_REQUIRED=1 to make this fatal`)}}console.log(`
2176
+ `).filter(Boolean)}catch{return[]}}function isServeRunning(){let entry2=readServePid();return entry2!==null&&isProcessAlive(entry2.pid)}async function autoStartServe(){if(isServeRunning())return;let bunPath=process.execPath??"bun",genieBin=process.argv[1]??"genie",{spawn:spawnChild}=await import("child_process");spawnChild(bunPath,[genieBin,"serve","--foreground"],{detached:!0,stdio:"ignore",env:{...process.env,GENIE_IS_DAEMON:"1"}}).unref();let deadline=Date.now()+15000;while(Date.now()<deadline)if(await new Promise((resolve5)=>setTimeout(resolve5,500)),isServeRunning()&&isTuiSessionReady())return;if(!isServeRunning())throw Error("genie serve failed to start within 15s. Run `genie serve` manually.")}function isTuiSessionReady(){try{return execSync10(tuiTmux(`has-session -t ${TUI_SESSION}`),{stdio:"ignore"}),!0}catch{return!1}}function ensureTuiSession(workspaceRoot){if(isTuiSessionReady())return;let{leftPane,rightPane}=startTuiTmuxServer();sendTuiLaunchScript(leftPane,rightPane,workspaceRoot)}async function startAgentSync(){try{let{findWorkspace:findWorkspace2,genieHome:genieHome4}=(init_workspace(),__toCommonJS(exports_workspace)),ws=findWorkspace2();if(!ws){let{join:join43}=__require("path"),configPath2=join43(genieHome4(),"config.json");return console.warn(` Agent sync: DISABLED \u2014 no workspace found from cwd or ${configPath2}`),console.warn(" Fix: `cd <workspace> && genie serve restart`, or run `genie init` to bootstrap one"),null}let{syncAgentDirectory:syncAgentDirectory2,watchAgentDirectory:watchAgentDirectory2}=await Promise.resolve().then(() => (init_agent_sync(),exports_agent_sync)),syncResult=await syncAgentDirectory2(ws.root);if(syncResult.registered.length+syncResult.updated.length>0)console.log(` Agent sync: ${syncResult.registered.length} registered, ${syncResult.updated.length} updated (workspace: ${ws.root})`);else console.log(` Agent sync: up to date (workspace: ${ws.root})`);if(syncResult.errors.length>0){console.warn(` Agent sync: ${syncResult.errors.length} error(s) \u2014 these agents were NOT registered:`);for(let e of syncResult.errors)console.warn(` ${e.name}: ${e.error}`)}let watcher2=watchAgentDirectory2(ws.root,{onSync:(name,action)=>{console.log(` [agent-watcher] ${name}: ${action}`)}});if(watcher2)console.log(" Agent watcher started (watching agents/ directory)");else console.warn(" Agent watcher: FAILED to start \u2014 new agents will not be auto-registered");return watcher2}catch(err){let msg=err instanceof Error?err.message:String(err);return console.error(` Agent sync failed: ${msg}`),null}}async function startPgserve(){console.log(" Starting pgserve...");try{let{ensurePgserve:ensurePgserve2}=await Promise.resolve().then(() => (init_db(),exports_db)),port=await ensurePgserve2();console.log(` pgserve ready on port ${port}`);try{let{registerService:registerService2}=await Promise.resolve().then(() => (init_service_registry(),exports_service_registry));registerService2("pgserve-owner",process.pid)}catch{}}catch(err){let msg=err instanceof Error?err.message:String(err);console.error(` pgserve failed: ${msg}`)}}async function startScheduler(){console.log(" Starting scheduler daemon...");try{let{startDaemon:startDaemon2}=await Promise.resolve().then(() => (init_scheduler_daemon(),exports_scheduler_daemon));handles.schedulerHandle=startDaemon2(),console.log(" Scheduler started (includes event-router + inbox-watcher)");try{let{registerService:registerService2}=await Promise.resolve().then(() => (init_service_registry(),exports_service_registry));registerService2("scheduler",process.pid)}catch{}}catch(err){let msg=err instanceof Error?err.message:String(err);console.error(` Scheduler failed: ${msg}`)}}async function startForeground(headless){let existingEntry=readServePid();if(existingEntry&&isProcessAlive(existingEntry.pid))console.log(`genie serve already running (PID ${existingEntry.pid})`),process.exit(0);if(existingEntry)forceRemoveServePid();process.env.GENIE_IS_DAEMON="1",writeServePid(process.pid);let mode=headless?"headless":"full";if(console.log(`genie serve starting (PID ${process.pid}, mode: ${mode})`),!headless)await ensureTmux();await startPgserve();let{loadGenieConfigSync:loadGenieConfigSync2}=await Promise.resolve().then(() => (init_genie_config2(),exports_genie_config));if(!loadGenieConfigSync2().brain.embedded)console.log(" Brain server: skipped (brain.embedded=false \u2014 managed externally)");else try{let brain=await import("@khal-os/brain");if(brain.startEmbeddedBrainServer){let{getActivePort:getActivePort2}=await Promise.resolve().then(() => (init_db(),exports_db)),pgPort=getActivePort2();if(pgPort){console.log(" Starting brain server...");let brainPath;try{let{findWorkspace:findWorkspace2}=(init_workspace(),__toCommonJS(exports_workspace)),ws=findWorkspace2();if(ws?.root){let bp=join42(ws.root,"brain");if(existsSync35(bp)&&existsSync35(join42(bp,"brain.json")))brainPath=bp}}catch{}if(brainPath){let handle=await brain.startEmbeddedBrainServer({brainPath,geniePgPort:pgPort});handles.brainHandle={stop:handle.stop,port:handle.port},console.log(` Brain server ready on port ${handle.port}`)}else console.log(" Brain server: no brain/ found in workspace (skipped)")}else console.log(" Brain server: pgserve not available (skipped)")}}catch{}if(!headless){let sessions=listAgentSessions();if(sessions.length>0)console.log(` Agent server (-L genie): ${sessions.length} sessions`);else console.log(" Agent server (-L genie): no sessions yet (created on first spawn)")}if(handles.agentWatcher=await startAgentSync(),!headless){console.log(" Setting up TUI session...");let{leftPane,rightPane}=startTuiTmuxServer(),ws=(()=>{try{let{findWorkspace:findWorkspace2}=(init_workspace(),__toCommonJS(exports_workspace));return findWorkspace2()}catch{return null}})();sendTuiLaunchScript(leftPane,rightPane,ws?.root),console.log(" TUI server ready (session: genie-tui)")}await startScheduler();try{await Promise.resolve().then(() => (init_built_in(),exports_built_in));let{start:startDetectorScheduler}=await Promise.resolve().then(() => (init_detector_scheduler(),exports_detector_scheduler)),{listDetectors:listDetectors2}=await Promise.resolve().then(() => (init_detectors(),exports_detectors));handles.detectorScheduler=startDetectorScheduler();let registered=listDetectors2().map((d)=>d.id);console.log(` Detector scheduler started (measurement only, 60s \xB1 5s cadence) \u2014 registered: [${registered.join(", ")}]`)}catch(err){let msg=err instanceof Error?err.message:String(err);console.warn(` Detector scheduler: failed \u2014 ${msg}`)}try{let{startExecutorReadEndpoint:startExecutorReadEndpoint2,getExecutorReadPort:getExecutorReadPort2}=await Promise.resolve().then(() => (init_executor_read(),exports_executor_read));if(await startExecutorReadEndpoint2())console.log(` Executor read endpoint ready on port ${getExecutorReadPort2()}`)}catch(err){let msg=err instanceof Error?err.message:String(err);console.warn(` Executor read endpoint: failed \u2014 ${msg}`)}try{let{startOmniApprovalHandler:startOmniApprovalHandler2}=await Promise.resolve().then(() => (init_omni_approval_handler(),exports_omni_approval_handler)),handler=await startOmniApprovalHandler2();if(handler)handles.omniApprovalHandler=handler,console.log(" Omni approval handler started")}catch{}{let{OmniBridge:OmniBridge2}=await Promise.resolve().then(() => (init_omni_bridge(),exports_omni_bridge)),bridge=new OmniBridge2({natsUrl:process.env.GENIE_NATS_URL??"localhost:4222",maxConcurrent:Number(process.env.GENIE_MAX_CONCURRENT??"20"),idleTimeoutMs:Number(process.env.GENIE_IDLE_TIMEOUT_MS??"900000")});try{await bridge.start(),handles.omniBridge=bridge,console.log(" Omni bridge started")}catch(err){let msg=err instanceof Error?err.message:String(err);if(process.env.GENIE_OMNI_REQUIRED==="1")console.error(` Omni bridge: FAILED \u2014 ${msg}`),process.exit(1);console.warn(` Omni bridge: degraded \u2014 ${msg}; set GENIE_OMNI_REQUIRED=1 to make this fatal`)}}console.log(`
2135
2177
  genie serve is running (${mode}). ${headless?"Send SIGTERM to stop.":"Press Ctrl+C to stop."}`);let shutdownStarted=!1,shutdown2=async()=>{if(shutdownStarted)return;shutdownStarted=!0,console.log(`
2136
2178
  Shutting down genie serve...`),handles.agentWatcher?.close();let schedulerHandle=handles.schedulerHandle;if(schedulerHandle){schedulerHandle.stop();try{await schedulerHandle.done}catch{}handles.schedulerHandle=null}if(handles.detectorScheduler)handles.detectorScheduler.stop(),handles.detectorScheduler=null;if(handles.omniApprovalHandler)await handles.omniApprovalHandler.stop().catch(()=>{}),handles.omniApprovalHandler=null;if(handles.omniBridge)await handles.omniBridge.stop().catch(()=>{}),handles.omniBridge=null;if(Promise.resolve().then(() => (init_executor_read(),exports_executor_read)).then((m)=>m.stopExecutorReadEndpoint().catch(()=>{})),handles.brainHandle)await handles.brainHandle.stop().catch(()=>{}),handles.brainHandle=null;try{let{killAllServices:killAllServices2}=(init_service_registry(),__toCommonJS(exports_service_registry));killAllServices2()}catch{}if(!headless)killTuiSession();try{let lockfilePath=join42(genieHome3(),"pgserve.port");if(existsSync35(lockfilePath))unlinkSync10(lockfilePath)}catch{}removeServePid(),console.log("genie serve stopped.")},gracefulExit=(exitCode)=>{if(shutdownStarted)return;let forceTimer=setTimeout(()=>{console.error("Graceful shutdown timeout (10s). Force-killing remaining processes.");try{let{getRegisteredServices:getRegisteredServices2}=(init_service_registry(),__toCommonJS(exports_service_registry));for(let svc of getRegisteredServices2())try{process.kill(svc.pid,"SIGKILL")}catch{}}catch{}removeServePid(),process.exit(1)},1e4);forceTimer.unref(),shutdown2().catch(()=>{}).finally(()=>{clearTimeout(forceTimer),removeServePid(),process.exit(exitCode)})};if(process.on("SIGTERM",()=>gracefulExit(143)),process.on("SIGINT",()=>gracefulExit(130)),process.on("SIGHUP",()=>gracefulExit(129)),process.on("exit",()=>{removeServePid()}),process.on("uncaughtException",(err)=>{console.error("Uncaught exception in genie serve:",err),gracefulExit(1)}),handles.schedulerHandle)await handles.schedulerHandle.done;else await new Promise(()=>{});removeServePid()}async function startBackground(headless){let existingEntry=readServePid();if(existingEntry&&isProcessAlive(existingEntry.pid))console.log(`genie serve already running (PID ${existingEntry.pid})`),process.exit(0);if(existingEntry)forceRemoveServePid();let bunPath=process.execPath??"bun",args=[process.argv[1]??"genie","serve","--foreground"];if(headless)args.push("--headless");let child=spawn4(bunPath,args,{detached:!0,stdio:"ignore",env:{...process.env,GENIE_IS_DAEMON:"1"}});if(child.unref(),child.pid)if(await new Promise((resolve5)=>setTimeout(resolve5,1000)),isProcessAlive(child.pid))console.log(`genie serve started (PID ${child.pid})`);else console.error("Error: genie serve exited immediately."),process.exit(1);else console.error("Error: failed to spawn genie serve"),process.exit(1)}function forceRemoveServePid(){try{unlinkSync10(servePidPath())}catch{}}async function stopServe(){let entry2=readServePid();if(!entry2){console.log("genie serve is not running (no PID file).");return}let pid=entry2.pid;if(!isProcessAlive(pid)){console.log(`Stale PID file (PID ${pid} not running). Cleaning up.`),forceRemoveServePid(),killTuiSession();return}console.log(`Stopping genie serve (PID ${pid})...`);try{process.kill(-pid,"SIGTERM")}catch{try{process.kill(pid,"SIGTERM")}catch{}}let deadline=Date.now()+1e4;while(Date.now()<deadline&&isProcessAlive(pid))await new Promise((resolve5)=>setTimeout(resolve5,250));if(isProcessAlive(pid)){console.log("Did not stop within 10s. Sending SIGKILL.");try{process.kill(-pid,"SIGKILL")}catch{try{process.kill(pid,"SIGKILL")}catch{}}}killTuiSession(),forceRemoveServePid(),console.log("genie serve stopped.")}async function printPgserveStatus(){try{let{isAvailable:isAvailable2,getActivePort:getActivePort2}=await Promise.resolve().then(() => (init_db(),exports_db)),dbOk=await isAvailable2();console.log(` pgserve: ${dbOk?`healthy (port ${getActivePort2()})`:"unreachable"}`)}catch{console.log(" pgserve: unavailable")}try{let brain=await import("@khal-os/brain"),brainPort=null;try{let{findWorkspace:findWorkspace2}=(init_workspace(),__toCommonJS(exports_workspace)),ws=findWorkspace2();if(ws?.root&&brain.readServerInfo){let info=brain.readServerInfo(join42(ws.root,"brain"));if(info?.port)brainPort=info.port}}catch{}if(!brainPort&&handles.brainHandle)brainPort=handles.brainHandle.port;if(brainPort)try{let resp=await fetch(`http://127.0.0.1:${brainPort}/healthz`);if(resp.ok)console.log(` brain: running (port ${brainPort})`);else console.log(` brain: unhealthy (port ${brainPort}, status ${resp.status})`)}catch{console.log(` brain: stopped (port ${brainPort} unreachable)`)}else console.log(" brain: stopped")}catch{console.log(" brain: not installed")}}function printTmuxStatus(){let agentRunning=isGenieTmuxRunning(),sessions=agentRunning?listAgentSessions():[];if(console.log(` tmux -L genie: ${agentRunning?`running (${sessions.length} sessions)`:"stopped"}`),sessions.length>0)console.log(` ${sessions.join(", ")}`);let tuiReady=isTuiSessionReady();console.log(` tmux -L genie-tui: ${tuiReady?"running":"stopped"}`)}async function printDaemonStatus(serveRunning){try{let schedulerPidPath=join42(genieHome3(),"scheduler.pid");if(existsSync35(schedulerPidPath)){let sPid=Number.parseInt(readFileSync22(schedulerPidPath,"utf-8").trim(),10),sAlive=!Number.isNaN(sPid)&&isProcessAlive(sPid);console.log(` scheduler: ${sAlive?`running (PID ${sPid})`:"stopped"}`)}else if(serveRunning)console.log(" scheduler: integrated (in-process)");else console.log(" scheduler: stopped")}catch{console.log(" scheduler: unknown")}try{let{getInboxPollIntervalMs:getInboxPollIntervalMs2}=await Promise.resolve().then(() => (init_inbox_watcher(),exports_inbox_watcher)),pollMs=getInboxPollIntervalMs2();if(pollMs===0)console.log(" inbox: disabled");else console.log(` inbox: ${serveRunning?"watching":"stopped"} (poll ${pollMs/1000}s)`)}catch{console.log(" inbox: unavailable")}}async function printBridgeStatus(){try{let{getBridgeStatus:getBridgeStatus2}=await Promise.resolve().then(() => (init_bridge_status(),exports_bridge_status)),res=await getBridgeStatus2();if(res.state==="running"&&res.pong){let uptimeSec=Math.round(res.pong.uptimeMs/1000),latency=res.latencyMs??0;console.log(` omni-bridge: running (pid ${res.pong.pid}, uptime ${uptimeSec}s, ping ${latency}ms)`)}else if(res.state==="stale")console.log(` omni-bridge: stale \u2014 ${res.detail}`);else console.log(" omni-bridge: stopped")}catch{console.log(" omni-bridge: unavailable")}}async function statusServe(){let entry2=readServePid(),running2=entry2!==null&&isProcessAlive(entry2.pid);if(console.log(`
2137
2179
  Genie Serve`),console.log("\u2500".repeat(50)),console.log(` Status: ${running2?"running":"stopped"}`),running2&&entry2)console.log(` PID: ${entry2.pid}`);await printPgserveStatus(),printTmuxStatus(),await printDaemonStatus(running2),await printBridgeStatus(),console.log(` PID file: ${servePidPath()}`),console.log("")}function registerServeCommands(program2){let serve=program2.command("serve").description("Start all genie infrastructure (pgserve, tmux, scheduler)");serve.command("start",{isDefault:!0}).description("Start genie serve").option("--daemon","Run in background").option("--foreground","Run in foreground (default)").option("--headless","Run without TUI (services only: pgserve, scheduler, inbox-watcher)").action(async(options)=>{if(options.daemon)await startBackground(options.headless);else await startForeground(options.headless)}),serve.command("stop").description("Stop genie serve and all services").action(async()=>{await stopServe()}),serve.command("status").description("Show service health").action(async()=>{await statusServe()})}var TUI_SESSION="genie-tui",NAV_WIDTH=30,TUI_STYLE,handles;var init_serve=__esm(()=>{init_ensure_tmux();init_process_identity();init_tmux_wrapper();TUI_STYLE={activeBorder:"#7c3aed",inactiveBorder:"#414868"};handles={schedulerHandle:null,agentWatcher:null,brainHandle:null,omniApprovalHandler:null,omniBridge:null,detectorScheduler:null}});var exports_tmux2={};__export(exports_tmux2,{newAgentWindow:()=>newAgentWindow,hasProjectSession:()=>hasProjectSession,attachTuiSession:()=>attachTuiSession,attachProjectWindow:()=>attachProjectWindow});import{spawnSync as spawnSync4}from"child_process";function runTuiTmux(args,stdio="ignore"){return spawnSync4(TMUX_BIN,["-L",TMUX_SOCKET,"-f",TUI_TMUX_CONF,...args],{stdio})}function runTuiTmuxOutput(args){let result2=spawnSync4(TMUX_BIN,["-L",TMUX_SOCKET,"-f",TUI_TMUX_CONF,...args],{encoding:"utf-8"});return result2.status===0?result2.stdout.trim():null}function runAgentTmux(args,stdio="ignore"){return spawnSync4(TMUX_BIN,["-L",GENIE_AGENT_SOCKET,...args],{stdio})}function shellQuote3(value){return`'${value.replace(/'/g,"'\\''")}'`}function buildAttachLoop(targetSession){return`while true; do TMUX='' ${[TMUX_BIN,"-L",GENIE_AGENT_SOCKET,"attach-session","-t",targetSession].map(shellQuote3).join(" ")} 2>/dev/null; sleep 0.3; done`}function resolveRightPane(rightPane){if(runTuiTmux(["display-message","-t",rightPane,"-p",""]).status===0)return rightPane;let panes=runTuiTmuxOutput(["list-panes","-t",`${SESSION_NAME}:0`,"-F","#{pane_id}"])?.split(`
@@ -3753,7 +3795,7 @@ Genie Scheduler Daemon`),console.log("\u2500".repeat(50)),console.log(` Status:
3753
3795
  (showing last ${lines} of ${allLines.length} entries)`)}async function tailFollow(filePath,initialLines){let{watch:watch2}=await import("fs");tailStatic(filePath,initialLines),console.log(`
3754
3796
  --- following (Ctrl+C to exit) ---
3755
3797
  `);let lastSize=existsSync40(filePath)?readFileSync27(filePath).length:0,watcher2=watch2(filePath,()=>{try{let content=readFileSync27(filePath,"utf-8");if(content.length>lastSize){let newLines=content.slice(lastSize).trim().split(`
3756
- `).filter(Boolean);for(let line of newLines)printLogLine(line);lastSize=content.length}}catch{}});process.on("SIGINT",()=>{watcher2.close(),process.exit(0)}),await new Promise(()=>{})}function printLogLine(raw){try{let entry2=JSON.parse(raw),ts3=entry2.timestamp?new Date(entry2.timestamp).toLocaleTimeString("en-US",{hour12:!1}):"??:??:??",level=(entry2.level??"info").toUpperCase().padEnd(5),event=entry2.event??"unknown",extras=Object.entries(entry2).filter(([k])=>!["timestamp","level","event"].includes(k)).map(([k,v])=>`${k}=${typeof v==="object"?JSON.stringify(v):v}`).join(" ");console.log(`${ts3} ${level} ${event}${extras?` ${extras}`:""}`)}catch{console.log(raw)}}function formatUptime(ms){let seconds=Math.floor(ms/1000),minutes=Math.floor(seconds/60),hours=Math.floor(minutes/60),days=Math.floor(hours/24);if(days>0)return`${days}d ${hours%24}h ${minutes%60}m`;if(hours>0)return`${hours}h ${minutes%60}m`;if(minutes>0)return`${minutes}m ${seconds%60}s`;return`${seconds}s`}function registerDaemonCommands(program2){let daemon=program2.command("daemon").description("Manage scheduler daemon lifecycle (redirects to genie serve --headless)");daemon.command("install").description("Generate systemd service unit and enable it").action(async()=>{await daemonInstallCommand()}),daemon.command("start").description("Start the scheduler daemon (alias for genie serve --headless)").option("--foreground","Run in foreground (for systemd ExecStart)").action(async(options)=>{await daemonStartCommand(options)}),daemon.command("stop").description("Stop genie serve gracefully").action(async()=>{await daemonStopCommand()}),daemon.command("status").description("Show daemon state, PID, uptime, and trigger stats").action(async()=>{await daemonStatusCommand()}),daemon.command("logs").description("Tail structured JSON scheduler log").option("--follow, -f","Follow log output").option("--lines <n>","Number of lines to show (default: 20)",Number.parseInt).action(async(options)=>{await daemonLogsCommand(options)})}init_cron();import{createInterface as createInterface3}from"readline";init_db();init_wish_state();import{spawnSync as spawnSync5}from"child_process";import{existsSync as existsSync41,mkdirSync as mkdirSync19,readFileSync as readFileSync28,renameSync as renameSync4,statSync as statSync4,writeFileSync as writeFileSync21}from"fs";import{homedir as homedir33}from"os";import{basename as basename8,join as join48,relative,resolve as resolve7}from"path";import{gunzipSync,gzipSync}from"zlib";var DB_NAME2="genie",DB_USER="postgres",DB_HOST="127.0.0.1",SNAPSHOT_FILE="snapshot.sql.gz";function getSnapshotPath(cwd){let repoRoot=resolveRepoPath(cwd),genieHome6=process.env.GENIE_HOME??join48(homedir33(),".genie");return join48(genieHome6,"backups",basename8(repoRoot),SNAPSHOT_FILE)}function assertOutsideRepo(snapshotPath,cwd){let repoRoot=resolveRepoPath(cwd),rel=relative(repoRoot,resolve7(snapshotPath));if(!rel.startsWith("..")&&rel!==""&&!rel.startsWith("/"))throw Error(`Refusing to write snapshot inside repo tree: ${snapshotPath}. Snapshots must live outside the repo (default: ~/.genie/backups/<repo>/).`)}function pgEnv(port){return{...process.env,PGHOST:DB_HOST,PGPORT:String(port),PGUSER:DB_USER,PGPASSWORD:DB_USER,PGDATABASE:DB_NAME2}}function backup(cwd){let port=getActivePort(),snapshotPath=getSnapshotPath(cwd);assertOutsideRepo(snapshotPath,cwd);let snapshotDir=snapshotPath.slice(0,snapshotPath.lastIndexOf("/")),tmpPath=`${snapshotPath}.tmp`;mkdirSync19(snapshotDir,{recursive:!0});let result2=spawnSync5("pg_dump",["--no-owner","--no-acl"],{env:pgEnv(port),stdio:["pipe","pipe","pipe"],timeout:120000,maxBuffer:1073741824});if(result2.status!==0){let stderr=result2.stderr?.toString().trim()||"unknown error";throw Error(`pg_dump failed (exit ${result2.status}): ${stderr}`)}let compressed=gzipSync(result2.stdout);writeFileSync21(tmpPath,compressed),renameSync4(tmpPath,snapshotPath);let compressedBytes=statSync4(snapshotPath).size,uncompressedBytes=0;try{let sizeResult=spawnSync5("psql",["-t","-A","-c","SELECT pg_database_size(current_database())"],{env:pgEnv(port),encoding:"utf-8",timeout:1e4});if(sizeResult.status===0)uncompressedBytes=Number.parseInt(sizeResult.stdout.trim(),10)||0}catch{}return{path:snapshotPath,compressedBytes,uncompressedBytes}}function restore(snapshotFile,cwd){let port=getActivePort(),filePath=snapshotFile??getSnapshotPath(cwd);if(!existsSync41(filePath))throw Error(`Snapshot not found: ${filePath}`);let env=pgEnv(port),adminEnv={...env,PGDATABASE:"postgres"};spawnSync5("psql",["-v",`target_db=${DB_NAME2}`,"-c","SELECT pg_terminate_backend(pid) FROM pg_stat_activity WHERE datname = :'target_db' AND pid <> pg_backend_pid()"],{env:adminEnv,stdio:["pipe","pipe","pipe"],timeout:1e4});let dropResult=spawnSync5("psql",["-v",`target_db=${DB_NAME2}`,"-c",'DROP DATABASE IF EXISTS :"target_db"'],{env:adminEnv,stdio:["pipe","pipe","pipe"],timeout:1e4});if(dropResult.status!==0)throw Error(`Failed to drop database: ${dropResult.stderr?.toString().trim()}`);let createResult=spawnSync5("psql",["-v",`target_db=${DB_NAME2}`,"-c",'CREATE DATABASE :"target_db"'],{env:adminEnv,stdio:["pipe","pipe","pipe"],timeout:1e4});if(createResult.status!==0)throw Error(`Failed to create database: ${createResult.stderr?.toString().trim()}`);let compressed=readFileSync28(filePath),sql=gunzipSync(compressed),restoreResult=spawnSync5("psql",[],{env:{...env,PGDATABASE:DB_NAME2},input:sql,stdio:["pipe","pipe","pipe"],timeout:300000,maxBuffer:1073741824});if(restoreResult.status!==0)throw Error(`psql restore failed (exit ${restoreResult.status}): ${restoreResult.stderr?.toString().trim()}`)}init_db_migrations();init_db();init_term_format();function printTable(rows){if(rows.length===0){console.log("(0 rows)");return}let columns=Object.keys(rows[0]),widths=columns.map((col)=>{let values2=rows.map((r)=>String(r[col]??"NULL"));return Math.max(col.length,...values2.map((v)=>v.length))}),header=columns.map((col,i2)=>padRight(col,widths[i2])).join(" | ");console.log(header),console.log(widths.map((w)=>"-".repeat(w)).join("-+-"));for(let row of rows){let line=columns.map((col,i2)=>padRight(String(row[col]??"NULL"),widths[i2])).join(" | ");console.log(line)}console.log(`(${rows.length} row${rows.length===1?"":"s"})`)}async function dbStatusCommand(){let port=getActivePort(),dataDir=getDataDir();if(console.log(`
3798
+ `).filter(Boolean);for(let line of newLines)printLogLine(line);lastSize=content.length}}catch{}});process.on("SIGINT",()=>{watcher2.close(),process.exit(0)}),await new Promise(()=>{})}function printLogLine(raw){try{let entry2=JSON.parse(raw),ts3=entry2.timestamp?new Date(entry2.timestamp).toLocaleTimeString("en-US",{hour12:!1}):"??:??:??",level=(entry2.level??"info").toUpperCase().padEnd(5),event=entry2.event??"unknown",extras=Object.entries(entry2).filter(([k])=>!["timestamp","level","event"].includes(k)).map(([k,v])=>`${k}=${typeof v==="object"?JSON.stringify(v):v}`).join(" ");console.log(`${ts3} ${level} ${event}${extras?` ${extras}`:""}`)}catch{console.log(raw)}}function formatUptime(ms){let seconds=Math.floor(ms/1000),minutes=Math.floor(seconds/60),hours=Math.floor(minutes/60),days=Math.floor(hours/24);if(days>0)return`${days}d ${hours%24}h ${minutes%60}m`;if(hours>0)return`${hours}h ${minutes%60}m`;if(minutes>0)return`${minutes}m ${seconds%60}s`;return`${seconds}s`}function registerDaemonCommands(program2){let daemon=program2.command("daemon").description("Manage scheduler daemon lifecycle (redirects to genie serve --headless)");daemon.command("install").description("Generate systemd service unit and enable it").action(async()=>{await daemonInstallCommand()}),daemon.command("start").description("Start the scheduler daemon (alias for genie serve --headless)").option("--foreground","Run in foreground (for systemd ExecStart)").action(async(options)=>{await daemonStartCommand(options)}),daemon.command("stop").description("Stop genie serve gracefully").action(async()=>{await daemonStopCommand()}),daemon.command("status").description("Show daemon state, PID, uptime, and trigger stats").action(async()=>{await daemonStatusCommand()}),daemon.command("logs").description("Tail structured JSON scheduler log").option("--follow, -f","Follow log output").option("--lines <n>","Number of lines to show (default: 20)",Number.parseInt).action(async(options)=>{await daemonLogsCommand(options)})}init_cron();import{createInterface as createInterface3}from"readline";init_db();init_wish_state();import{spawnSync as spawnSync5}from"child_process";import{existsSync as existsSync41,mkdirSync as mkdirSync19,readFileSync as readFileSync28,renameSync as renameSync4,statSync as statSync5,writeFileSync as writeFileSync21}from"fs";import{homedir as homedir33}from"os";import{basename as basename8,join as join48,relative,resolve as resolve7}from"path";import{gunzipSync,gzipSync}from"zlib";var DB_NAME2="genie",DB_USER="postgres",DB_HOST="127.0.0.1",SNAPSHOT_FILE="snapshot.sql.gz";function getSnapshotPath(cwd){let repoRoot=resolveRepoPath(cwd),genieHome6=process.env.GENIE_HOME??join48(homedir33(),".genie");return join48(genieHome6,"backups",basename8(repoRoot),SNAPSHOT_FILE)}function assertOutsideRepo(snapshotPath,cwd){let repoRoot=resolveRepoPath(cwd),rel=relative(repoRoot,resolve7(snapshotPath));if(!rel.startsWith("..")&&rel!==""&&!rel.startsWith("/"))throw Error(`Refusing to write snapshot inside repo tree: ${snapshotPath}. Snapshots must live outside the repo (default: ~/.genie/backups/<repo>/).`)}function pgEnv(port){return{...process.env,PGHOST:DB_HOST,PGPORT:String(port),PGUSER:DB_USER,PGPASSWORD:DB_USER,PGDATABASE:DB_NAME2}}function backup(cwd){let port=getActivePort(),snapshotPath=getSnapshotPath(cwd);assertOutsideRepo(snapshotPath,cwd);let snapshotDir=snapshotPath.slice(0,snapshotPath.lastIndexOf("/")),tmpPath=`${snapshotPath}.tmp`;mkdirSync19(snapshotDir,{recursive:!0});let result2=spawnSync5("pg_dump",["--no-owner","--no-acl"],{env:pgEnv(port),stdio:["pipe","pipe","pipe"],timeout:120000,maxBuffer:1073741824});if(result2.status!==0){let stderr=result2.stderr?.toString().trim()||"unknown error";throw Error(`pg_dump failed (exit ${result2.status}): ${stderr}`)}let compressed=gzipSync(result2.stdout);writeFileSync21(tmpPath,compressed),renameSync4(tmpPath,snapshotPath);let compressedBytes=statSync5(snapshotPath).size,uncompressedBytes=0;try{let sizeResult=spawnSync5("psql",["-t","-A","-c","SELECT pg_database_size(current_database())"],{env:pgEnv(port),encoding:"utf-8",timeout:1e4});if(sizeResult.status===0)uncompressedBytes=Number.parseInt(sizeResult.stdout.trim(),10)||0}catch{}return{path:snapshotPath,compressedBytes,uncompressedBytes}}function restore(snapshotFile,cwd){let port=getActivePort(),filePath=snapshotFile??getSnapshotPath(cwd);if(!existsSync41(filePath))throw Error(`Snapshot not found: ${filePath}`);let env=pgEnv(port),adminEnv={...env,PGDATABASE:"postgres"};spawnSync5("psql",["-v",`target_db=${DB_NAME2}`,"-c","SELECT pg_terminate_backend(pid) FROM pg_stat_activity WHERE datname = :'target_db' AND pid <> pg_backend_pid()"],{env:adminEnv,stdio:["pipe","pipe","pipe"],timeout:1e4});let dropResult=spawnSync5("psql",["-v",`target_db=${DB_NAME2}`,"-c",'DROP DATABASE IF EXISTS :"target_db"'],{env:adminEnv,stdio:["pipe","pipe","pipe"],timeout:1e4});if(dropResult.status!==0)throw Error(`Failed to drop database: ${dropResult.stderr?.toString().trim()}`);let createResult=spawnSync5("psql",["-v",`target_db=${DB_NAME2}`,"-c",'CREATE DATABASE :"target_db"'],{env:adminEnv,stdio:["pipe","pipe","pipe"],timeout:1e4});if(createResult.status!==0)throw Error(`Failed to create database: ${createResult.stderr?.toString().trim()}`);let compressed=readFileSync28(filePath),sql=gunzipSync(compressed),restoreResult=spawnSync5("psql",[],{env:{...env,PGDATABASE:DB_NAME2},input:sql,stdio:["pipe","pipe","pipe"],timeout:300000,maxBuffer:1073741824});if(restoreResult.status!==0)throw Error(`psql restore failed (exit ${restoreResult.status}): ${restoreResult.stderr?.toString().trim()}`)}init_db_migrations();init_db();init_term_format();function printTable(rows){if(rows.length===0){console.log("(0 rows)");return}let columns=Object.keys(rows[0]),widths=columns.map((col)=>{let values2=rows.map((r)=>String(r[col]??"NULL"));return Math.max(col.length,...values2.map((v)=>v.length))}),header=columns.map((col,i2)=>padRight(col,widths[i2])).join(" | ");console.log(header),console.log(widths.map((w)=>"-".repeat(w)).join("-+-"));for(let row of rows){let line=columns.map((col,i2)=>padRight(String(row[col]??"NULL"),widths[i2])).join(" | ");console.log(line)}console.log(`(${rows.length} row${rows.length===1?"":"s"})`)}async function dbStatusCommand(){let port=getActivePort(),dataDir=getDataDir();if(console.log(`
3757
3799
  Genie Database Status`),console.log("\u2500".repeat(50)),console.log(` Port: ${port}`),console.log(" Host: 127.0.0.1"),console.log(` Data dir: ${dataDir}`),!await isAvailable()){console.log(" Status: stopped"),console.log(`
3758
3800
  pgserve is not running. It will auto-start on first use.`),console.log("");return}console.log(" Status: running");try{let sql=await getConnection(),sizeResult=await sql`SELECT pg_size_pretty(pg_database_size(current_database())) AS size`;console.log(` DB size: ${sizeResult[0].size}`);let migrations=await getMigrationStatus(sql);console.log(`
3759
3801
  Migrations: ${migrations.applied.length} applied, ${migrations.pending.length} pending`);let tables=await sql`
@@ -3771,7 +3813,7 @@ Done. ${results.length} migration${results.length===1?"":"s"} applied.`),await s
3771
3813
  DELETE FROM genie_runtime_events
3772
3814
  WHERE created_at < now() - make_interval(secs => ${intervalSec})
3773
3815
  `,count=Number(result2.count);console.log(`Deleted ${count} event${count===1?"":"s"} older than ${options.olderThan}.`)}await shutdown()}catch(err){let message=err instanceof Error?err.message:String(err);console.error(`Prune failed: ${message}`),process.exit(1)}}function registerDbCommands(program2){let db=program2.command("db").description("Database management (pgserve)");db.command("status").description("Show pgserve health, port, data dir, and table counts").action(dbStatusCommand),db.command("migrate").description("Run pending database migrations").action(dbMigrateCommand),db.command("query <sql>").description("Execute arbitrary SQL and print results").action(dbQueryCommand),db.command("url").description("Print postgres connection URL for direct access").option("--quiet","Print URL only, no trailing newline (for scripts)").action((options)=>{let url=`postgres://postgres:postgres@127.0.0.1:${getActivePort()}/genie`;if(options.quiet)process.stdout.write(url);else console.log(url)}),db.command("prune-events").description("Prune old runtime events beyond retention period").option("--older-than <duration>","Delete events older than (e.g., 30d, 7d)","14d").option("--dry-run","Show count without deleting").action(dbPruneEventsCommand),db.command("backup").description("Dump database to .genie/snapshot.sql.gz").action(dbBackupCommand),db.command("restore [file]").description("Restore database from snapshot (default: .genie/snapshot.sql.gz)").option("-y, --yes","Skip confirmation prompt").action(dbRestoreCommand)}init_dispatch();function registerDispatchGroupCommands(program2){let dispatch2=program2.command("dispatch").description("Framework skill dispatch primitives (brainstorm/wish/review)");dispatch2.command("brainstorm <agent> <slug>").description("Spawn agent with brainstorm DRAFT.md context").action(async(agent,slug)=>{await brainstormCommand(agent,slug)}),dispatch2.command("wish <agent> <slug>").description("Spawn agent with wish DESIGN.md context").action(async(agent,slug)=>{await wishCommand(agent,slug)}),dispatch2.command("review <agent> <ref>").description("Spawn agent with review scope for a wish group (format: <slug>#<group>)").action(async(agent,ref)=>{await reviewCommand(agent,ref)})}init_dispatch();init_export_format();import{existsSync as existsSync48,mkdirSync as mkdirSync20,writeFileSync as writeFileSync22}from"fs";import{dirname as dirname11}from"path";async function getSql(){let{getConnection:getConnection2}=await Promise.resolve().then(() => (init_db(),exports_db));return getConnection2()}async function getVersion(){let{VERSION:VERSION2}=await Promise.resolve().then(() => (init_version(),exports_version));return VERSION2}async function getActorName(){let{getActor:getActor2}=await Promise.resolve().then(() => (init_audit(),exports_audit));return getActor2()}async function detectTables(sql,tables){let{filterAvailableTables:filterAvailableTables2}=await Promise.resolve().then(() => exports_table_detect);return filterAvailableTables2(sql,tables)}function outputDocument(doc,options){let json2=options.pretty?JSON.stringify(doc,null,2):JSON.stringify(doc);if(options.output){let dir=dirname11(options.output);if(!existsSync48(dir))mkdirSync20(dir,{recursive:!0});writeFileSync22(options.output,`${json2}
3774
- `);let tables=Object.keys(doc.data),rows=Object.values(doc.data).reduce((sum,arr)=>sum+arr.length,0);if(console.log(`Exported ${tables.length} tables (${rows} rows) to ${options.output}`),doc.skippedTables.length>0)console.log(`Skipped tables (not found): ${doc.skippedTables.join(", ")}`)}else console.log(json2)}function autoOutputName(){let d=new Date;return`genie-backup-${`${d.getFullYear()}${String(d.getMonth()+1).padStart(2,"0")}${String(d.getDate()).padStart(2,"0")}`}.json`}async function exportGroup(sql,group,filter){let tables=GROUP_TABLES[group],{available,skipped}=await detectTables(sql,tables),data={};for(let table of available)if(filter)data[table]=[...await sql.unsafe(`SELECT * FROM ${table} WHERE ${filter.column} = $1`,[filter.value])];else data[table]=[...await sql.unsafe(`SELECT * FROM ${table}`)];return{data,skipped}}async function exportBoards(sql,name){let tables=GROUP_TABLES.boards,{available,skipped}=await detectTables(sql,tables),data={};for(let table of available)if(name&&table==="boards")data[table]=[...await sql`SELECT * FROM boards WHERE name = ${name}`];else if(table==="task_types")data[table]=[...await sql`SELECT * FROM task_types WHERE is_builtin = false`];else data[table]=[...await sql.unsafe(`SELECT * FROM ${table}`)];return{data,skipped}}var TASK_JOIN_ALIASES={task_tags:"tt",task_actors:"ta",task_dependencies:"td",task_stage_log:"tsl"};async function resolveProjectId2(sql,projectName){let projects=await sql`SELECT id FROM projects WHERE name = ${projectName}`;if(projects.length===0)throw Error(`Project not found: ${projectName}`);return projects[0].id}function stripEphemeralFields(rows){return rows.map((r)=>{let{checkout_run_id,execution_locked_at,session_id,pane_id,...rest}=r;return rest})}async function exportTaskTable(sql,table,projectId){let alias=TASK_JOIN_ALIASES[table];if(table==="tasks"){let rows=projectId?[...await sql.unsafe("SELECT * FROM tasks WHERE project_id = $1",[projectId])]:[...await sql`SELECT * FROM tasks`];return stripEphemeralFields(rows)}if(alias&&projectId)return[...await sql.unsafe(`SELECT ${alias}.* FROM ${table} ${alias} JOIN tasks t ON ${alias}.task_id = t.id WHERE t.project_id = $1`,[projectId])];return[...await sql.unsafe(`SELECT * FROM ${table}`)]}async function exportTasks(sql,projectName){let tables=GROUP_TABLES.tasks,{available,skipped}=await detectTables(sql,tables),data={},projectId=projectName?await resolveProjectId2(sql,projectName):null;for(let table of available)data[table]=await exportTaskTable(sql,table,projectId);return{data,skipped}}async function exportSchedules(sql,name){let{available,skipped}=await detectTables(sql,["schedules"]),data={};if(available.includes("schedules"))if(name)data.schedules=[...await sql`SELECT * FROM schedules WHERE name = ${name}`];else data.schedules=[...await sql`SELECT * FROM schedules`];return{data,skipped}}async function exportTags(sql){let{available,skipped}=await detectTables(sql,["tags"]),data={};if(available.includes("tags"))data.tags=[...await sql`SELECT * FROM tags WHERE name NOT LIKE 'test-%'`];return{data,skipped}}async function exportAll(sql){let allSkipped=[],allData={};for(let group of ALL_GROUPS){let result2;switch(group){case"boards":result2=await exportBoards(sql);break;case"tasks":result2=await exportTasks(sql);break;case"tags":result2=await exportTags(sql);break;case"schedules":result2=await exportSchedules(sql);break;default:result2=await exportGroup(sql,group);break}Object.assign(allData,result2.data),allSkipped.push(...result2.skipped)}return{data:allData,skipped:allSkipped}}async function runExport(groups,type2,exportFn,options){let sql=await getSql(),[version,actor]=await Promise.all([getVersion(),getActorName()]),doc=createExportDocument(type2,groups,version,actor),{data,skipped}=await exportFn(sql);doc.data=data,doc.skippedTables=skipped,outputDocument(doc,options)}function registerExportCommands(program2){let exp=program2.command("export").description("Export genie data as JSON").option("--output <file>","Write to file instead of stdout").option("-o <file>","Alias for --output").option("--pretty","Pretty-print JSON").action(async(options)=>{try{if(!options.output)options.output=autoOutputName();await runExport([...ALL_GROUPS],"full",(sql)=>exportAll(sql),options)}catch(error2){console.error(`Error: ${error2 instanceof Error?error2.message:String(error2)}`),process.exit(1)}}),sharedOpts=(cmd)=>cmd.option("--output <file>","Write to file instead of stdout").option("--pretty","Pretty-print JSON");sharedOpts(exp.command("all").description("Full backup (all present tables)")).action(async(options)=>{try{if(!options.output)options.output=autoOutputName();await runExport([...ALL_GROUPS],"full",(sql)=>exportAll(sql),options)}catch(error2){console.error(`Error: ${error2 instanceof Error?error2.message:String(error2)}`),process.exit(1)}}),sharedOpts(exp.command("boards [name]").description("Export boards, templates, and task types")).action(async(name,options)=>{try{await runExport(["boards"],"partial",(sql)=>exportBoards(sql,name),options)}catch(error2){console.error(`Error: ${error2 instanceof Error?error2.message:String(error2)}`),process.exit(1)}}),sharedOpts(exp.command("tasks").description("Export tasks with deps, actors, and stage log").option("--project <name>","Filter by project name")).action(async(options)=>{try{await runExport(["tasks"],"partial",(sql)=>exportTasks(sql,options.project),options)}catch(error2){console.error(`Error: ${error2 instanceof Error?error2.message:String(error2)}`),process.exit(1)}}),sharedOpts(exp.command("tags").description("Export tags")).action(async(options)=>{try{await runExport(["tags"],"partial",(sql)=>exportTags(sql),options)}catch(error2){console.error(`Error: ${error2 instanceof Error?error2.message:String(error2)}`),process.exit(1)}}),sharedOpts(exp.command("projects").description("Export projects")).action(async(options)=>{try{await runExport(["projects"],"partial",(sql)=>exportGroup(sql,"projects"),options)}catch(error2){console.error(`Error: ${error2 instanceof Error?error2.message:String(error2)}`),process.exit(1)}}),sharedOpts(exp.command("schedules [name]").description("Export schedules with run_spec")).action(async(name,options)=>{try{await runExport(["schedules"],"partial",(sql)=>exportSchedules(sql,name),options)}catch(error2){console.error(`Error: ${error2 instanceof Error?error2.message:String(error2)}`),process.exit(1)}}),sharedOpts(exp.command("agents").description("Export agents, templates, and checkpoints")).action(async(options)=>{try{await runExport(["agents"],"partial",(sql)=>exportGroup(sql,"agents"),options)}catch(error2){console.error(`Error: ${error2 instanceof Error?error2.message:String(error2)}`),process.exit(1)}}),sharedOpts(exp.command("comms").description("Export conversations, messages, mailbox")).action(async(options)=>{try{await runExport(["comms"],"partial",(sql)=>exportGroup(sql,"comms"),options)}catch(error2){console.error(`Error: ${error2 instanceof Error?error2.message:String(error2)}`),process.exit(1)}}),sharedOpts(exp.command("config").description("Export OS config (graceful skip if missing)")).action(async(options)=>{try{await runExport(["config"],"partial",(sql)=>exportGroup(sql,"config"),options)}catch(error2){console.error(`Error: ${error2 instanceof Error?error2.message:String(error2)}`),process.exit(1)}})}init_history();init_export_format();import{readFileSync as readFileSync30}from"fs";var IMPORT_LEVELS=[["schedules","sessions","projects","agent_templates","agent_checkpoints","tags","task_types","notification_preferences","os_config","golden_images","warm_pool","instances"],["triggers","boards","board_templates","agents","conversations"],["tasks","runs","messages","conversation_members","mailbox","team_chat"],["task_tags","task_actors","task_dependencies","task_stage_log","heartbeats","machine_snapshots"]],SELF_REFERENTIAL_COLUMNS={tasks:"parent_id",messages:"reply_to_id",conversations:"parent_message_id"};function getTableLevel(table){for(let i2=0;i2<IMPORT_LEVELS.length;i2++)if(IMPORT_LEVELS[i2].includes(table))return i2;return-1}function sortByImportOrder(tables){return[...tables].sort((a,b2)=>{let la=getTableLevel(a),lb=getTableLevel(b2);return(la===-1?999:la)-(lb===-1?999:lb)})}function getPrimaryKey(table){return{task_tags:["task_id","tag_id"],task_actors:["task_id","actor_type","actor_id","role"],task_dependencies:["task_id","depends_on_id"],conversation_members:["conversation_id","actor_type","actor_id"],notification_preferences:["actor_type","actor_id","channel"]}[table]??["id"]}var VALID_TABLES=new Set(Object.values(GROUP_TABLES).flat());function assertValidTable(name){if(!VALID_TABLES.has(name))throw Error(`Invalid table name: "${name}" is not in the schema whitelist`)}var VALID_COLUMN_RE=/^[a-zA-Z_][a-zA-Z0-9_]*$/;function assertValidColumnName(name){if(!VALID_COLUMN_RE.test(name))throw Error(`Invalid column name: "${name.slice(0,60)}" contains disallowed characters. Column names must match /^[a-zA-Z_][a-zA-Z0-9_]*$/.`)}async function getSql2(){let{getConnection:getConnection2}=await Promise.resolve().then(() => (init_db(),exports_db));return getConnection2()}async function getActorName2(){let{getActor:getActor2}=await Promise.resolve().then(() => (init_audit(),exports_audit));return getActor2()}async function detectTables2(sql,tables){let{filterAvailableTables:filterAvailableTables2}=await Promise.resolve().then(() => exports_table_detect);return filterAvailableTables2(sql,tables)}async function detectConflicts(sql,table,rows){if(rows.length===0)return[];assertValidTable(table);let pk=getPrimaryKey(table);if(pk.length===1){let key=pk[0],ids=rows.map((r)=>r[key]),existing=await sql.unsafe(`SELECT ${key} FROM ${table} WHERE ${key} = ANY($1)`,[ids]),existingSet=new Set(existing.map((r)=>String(r[key])));return rows.filter((r)=>existingSet.has(String(r[key])))}let conflicts=[];for(let row of rows){let conditions=pk.map((col,i2)=>`${col} = $${i2+1}`).join(" AND "),values2=pk.map((col)=>row[col]);if((await sql.unsafe(`SELECT 1 FROM ${table} WHERE ${conditions} LIMIT 1`,values2)).length>0)conflicts.push(row)}return conflicts}function prepareRow(row,table,selfRefUpdates){let selfRefCol=SELF_REFERENTIAL_COLUMNS[table],entries=Object.entries(row),columns=entries.map(([k])=>k),values2=entries.map(([,v])=>v);for(let col of columns)assertValidColumnName(col);if(selfRefCol&&row[selfRefCol]!=null){let idx=columns.indexOf(selfRefCol);if(idx!==-1){let originalSelfRef=values2[idx];values2[idx]=null;let pk=getPrimaryKey(table);selfRefUpdates.push({pk:pk.length===1?row[pk[0]]:pk.map((k)=>row[k]),value:originalSelfRef})}}return{columns,values:values2,quotedCols:columns.map((c)=>`"${c}"`).join(", "),placeholders:values2.map((_,i2)=>`$${i2+1}`).join(", ")}}async function insertOneRow(tx,table,row,prepared,mode){assertValidTable(table);let{quotedCols,placeholders,values:values2}=prepared,pk=getPrimaryKey(table);if(mode==="overwrite"){let pkCondition=pk.map((col,i2)=>`"${col}" = $${values2.length+i2+1}`).join(" AND "),pkValues=pk.map((col)=>row[col]);await tx.unsafe(`DELETE FROM ${table} WHERE ${pkCondition}`,pkValues),await tx.unsafe(`INSERT INTO ${table} (${quotedCols}) VALUES (${placeholders})`,values2)}else if(mode==="merge"){let onConflict=pk.map((c)=>`"${c}"`).join(", ");await tx.unsafe(`INSERT INTO ${table} (${quotedCols}) VALUES (${placeholders}) ON CONFLICT (${onConflict}) DO NOTHING`,values2)}else await tx.unsafe(`INSERT INTO ${table} (${quotedCols}) VALUES (${placeholders})`,values2)}async function updateSelfRefs(tx,table,updates){assertValidTable(table);let selfRefCol=SELF_REFERENTIAL_COLUMNS[table],pk=getPrimaryKey(table);if(pk.length!==1)return;for(let{pk:pkVal,value}of updates)await tx.unsafe(`UPDATE ${table} SET "${selfRefCol}" = $1 WHERE "${pk[0]}" = $2`,[value,pkVal])}async function insertRows(tx,table,rows,mode){if(rows.length===0)return 0;let selfRefUpdates=[];for(let row of rows){let prepared=prepareRow(row,table,selfRefUpdates);await insertOneRow(tx,table,row,prepared,mode)}if(selfRefUpdates.length>0)await updateSelfRefs(tx,table,selfRefUpdates);return rows.length}function parseExportFile(filePath){let raw=readFileSync30(filePath,"utf-8"),parsed;try{parsed=JSON.parse(raw)}catch{throw Error(`Invalid JSON in ${filePath}`)}let validation=validateExportDocument(parsed);if(!validation.valid)throw Error(`Invalid export document: ${validation.error}`);return validation.doc}async function filterTablesByGroup(allTables,groupFilter){if(!groupFilter||groupFilter.length===0)return allTables;let{GROUP_TABLES:GROUP_TABLES2}=await Promise.resolve().then(() => (init_export_format(),exports_export_format)),allowedTables=new Set;for(let group of groupFilter){let tables=GROUP_TABLES2[group];if(tables)for(let t of tables)allowedTables.add(t);else console.warn(`Warning: Unknown group "${group}", skipping`)}return allTables.filter((t)=>allowedTables.has(t))}async function checkConflicts(sql,tables,data){for(let table of tables){let rows=data[table];if(!rows||rows.length===0)continue;let conflicts=await detectConflicts(sql,table,rows);if(conflicts.length>0){let pk=getPrimaryKey(table),ids=conflicts.slice(0,5).map((r)=>pk.map((k)=>r[k]).join(",")).join("; ");throw Error(`Conflict in table "${table}": ${conflicts.length} existing row(s) (e.g., ${ids}). Use --merge or --overwrite to resolve.`)}}}async function runImport(filePath,mode,groupFilter){let doc=parseExportFile(filePath),tablesToImport=await filterTablesByGroup(Object.keys(doc.data),groupFilter);if(tablesToImport.length===0){console.log("No tables to import.");return}tablesToImport=sortByImportOrder(tablesToImport);let sql=await getSql2(),{available}=await detectTables2(sql,tablesToImport),skippedTables=tablesToImport.filter((t)=>!available.includes(t));if(tablesToImport=available,skippedTables.length>0)console.log(`Skipping tables not in database: ${skippedTables.join(", ")}`);if(mode==="fail")await checkConflicts(sql,tablesToImport,doc.data);let totalInserted=0,tableStats={};await sql.begin(async(tx)=>{for(let table of tablesToImport){let rows=doc.data[table];if(!rows||rows.length===0)continue;let count=await insertRows(tx,table,rows,mode);tableStats[table]=count,totalInserted+=count}});let actor=await getActorName2(),{recordAuditEvent:recordAuditEvent3}=await Promise.resolve().then(() => (init_audit(),exports_audit));await recordAuditEvent3("import",filePath,"import_complete",actor,{mode,tables:tableStats,totalRows:totalInserted,skippedTables,sourceVersion:doc.version,sourceDate:doc.exportedAt}),console.log(`Import complete: ${totalInserted} rows across ${Object.keys(tableStats).length} tables`);for(let[table,count]of Object.entries(tableStats))if(count>0)console.log(` ${table}: ${count} rows`);if(skippedTables.length>0)console.log(`Skipped (not in DB): ${skippedTables.join(", ")}`)}function registerImportCommands(program2){program2.command("import <file>").description("Import genie data from JSON export").option("--fail","Abort on any conflict (default)").option("--merge","Skip existing rows, import new ones").option("--overwrite","Replace existing rows with imported data").option("--groups <list>","Comma-separated groups to import (e.g., boards,tags)").action(async(file,options)=>{try{let mode="fail";if(options.overwrite)mode="overwrite";else if(options.merge)mode="merge";let groupFilter=options.groups?.split(",").map((g)=>g.trim());await runImport(file,mode,groupFilter)}catch(error2){console.error(`Error: ${error2 instanceof Error?error2.message:String(error2)}`),process.exit(1)}})}init_esm14();import{existsSync as existsSync52,mkdirSync as mkdirSync23,symlinkSync,writeFileSync as writeFileSync25}from"fs";import{basename as basename10,join as join59,relative as relative4,resolve as resolve9,sep as sep2}from"path";import{cpSync,existsSync as existsSync50,mkdirSync as mkdirSync21,renameSync as renameSync5,rmSync as rmSync4}from"fs";import{join as join56,relative as relative3}from"path";var import_ignore=__toESM(require_ignore(),1);import{existsSync as existsSync49,readFileSync as readFileSync31,readdirSync as readdirSync10,statSync as statSync5}from"fs";import{join as join55,relative as relative2}from"path";var GENIEIGNORE_DEFAULTS=`node_modules
3816
+ `);let tables=Object.keys(doc.data),rows=Object.values(doc.data).reduce((sum,arr)=>sum+arr.length,0);if(console.log(`Exported ${tables.length} tables (${rows} rows) to ${options.output}`),doc.skippedTables.length>0)console.log(`Skipped tables (not found): ${doc.skippedTables.join(", ")}`)}else console.log(json2)}function autoOutputName(){let d=new Date;return`genie-backup-${`${d.getFullYear()}${String(d.getMonth()+1).padStart(2,"0")}${String(d.getDate()).padStart(2,"0")}`}.json`}async function exportGroup(sql,group,filter){let tables=GROUP_TABLES[group],{available,skipped}=await detectTables(sql,tables),data={};for(let table of available)if(filter)data[table]=[...await sql.unsafe(`SELECT * FROM ${table} WHERE ${filter.column} = $1`,[filter.value])];else data[table]=[...await sql.unsafe(`SELECT * FROM ${table}`)];return{data,skipped}}async function exportBoards(sql,name){let tables=GROUP_TABLES.boards,{available,skipped}=await detectTables(sql,tables),data={};for(let table of available)if(name&&table==="boards")data[table]=[...await sql`SELECT * FROM boards WHERE name = ${name}`];else if(table==="task_types")data[table]=[...await sql`SELECT * FROM task_types WHERE is_builtin = false`];else data[table]=[...await sql.unsafe(`SELECT * FROM ${table}`)];return{data,skipped}}var TASK_JOIN_ALIASES={task_tags:"tt",task_actors:"ta",task_dependencies:"td",task_stage_log:"tsl"};async function resolveProjectId2(sql,projectName){let projects=await sql`SELECT id FROM projects WHERE name = ${projectName}`;if(projects.length===0)throw Error(`Project not found: ${projectName}`);return projects[0].id}function stripEphemeralFields(rows){return rows.map((r)=>{let{checkout_run_id,execution_locked_at,session_id,pane_id,...rest}=r;return rest})}async function exportTaskTable(sql,table,projectId){let alias=TASK_JOIN_ALIASES[table];if(table==="tasks"){let rows=projectId?[...await sql.unsafe("SELECT * FROM tasks WHERE project_id = $1",[projectId])]:[...await sql`SELECT * FROM tasks`];return stripEphemeralFields(rows)}if(alias&&projectId)return[...await sql.unsafe(`SELECT ${alias}.* FROM ${table} ${alias} JOIN tasks t ON ${alias}.task_id = t.id WHERE t.project_id = $1`,[projectId])];return[...await sql.unsafe(`SELECT * FROM ${table}`)]}async function exportTasks(sql,projectName){let tables=GROUP_TABLES.tasks,{available,skipped}=await detectTables(sql,tables),data={},projectId=projectName?await resolveProjectId2(sql,projectName):null;for(let table of available)data[table]=await exportTaskTable(sql,table,projectId);return{data,skipped}}async function exportSchedules(sql,name){let{available,skipped}=await detectTables(sql,["schedules"]),data={};if(available.includes("schedules"))if(name)data.schedules=[...await sql`SELECT * FROM schedules WHERE name = ${name}`];else data.schedules=[...await sql`SELECT * FROM schedules`];return{data,skipped}}async function exportTags(sql){let{available,skipped}=await detectTables(sql,["tags"]),data={};if(available.includes("tags"))data.tags=[...await sql`SELECT * FROM tags WHERE name NOT LIKE 'test-%'`];return{data,skipped}}async function exportAll(sql){let allSkipped=[],allData={};for(let group of ALL_GROUPS){let result2;switch(group){case"boards":result2=await exportBoards(sql);break;case"tasks":result2=await exportTasks(sql);break;case"tags":result2=await exportTags(sql);break;case"schedules":result2=await exportSchedules(sql);break;default:result2=await exportGroup(sql,group);break}Object.assign(allData,result2.data),allSkipped.push(...result2.skipped)}return{data:allData,skipped:allSkipped}}async function runExport(groups,type2,exportFn,options){let sql=await getSql(),[version,actor]=await Promise.all([getVersion(),getActorName()]),doc=createExportDocument(type2,groups,version,actor),{data,skipped}=await exportFn(sql);doc.data=data,doc.skippedTables=skipped,outputDocument(doc,options)}function registerExportCommands(program2){let exp=program2.command("export").description("Export genie data as JSON").option("--output <file>","Write to file instead of stdout").option("-o <file>","Alias for --output").option("--pretty","Pretty-print JSON").action(async(options)=>{try{if(!options.output)options.output=autoOutputName();await runExport([...ALL_GROUPS],"full",(sql)=>exportAll(sql),options)}catch(error2){console.error(`Error: ${error2 instanceof Error?error2.message:String(error2)}`),process.exit(1)}}),sharedOpts=(cmd)=>cmd.option("--output <file>","Write to file instead of stdout").option("--pretty","Pretty-print JSON");sharedOpts(exp.command("all").description("Full backup (all present tables)")).action(async(options)=>{try{if(!options.output)options.output=autoOutputName();await runExport([...ALL_GROUPS],"full",(sql)=>exportAll(sql),options)}catch(error2){console.error(`Error: ${error2 instanceof Error?error2.message:String(error2)}`),process.exit(1)}}),sharedOpts(exp.command("boards [name]").description("Export boards, templates, and task types")).action(async(name,options)=>{try{await runExport(["boards"],"partial",(sql)=>exportBoards(sql,name),options)}catch(error2){console.error(`Error: ${error2 instanceof Error?error2.message:String(error2)}`),process.exit(1)}}),sharedOpts(exp.command("tasks").description("Export tasks with deps, actors, and stage log").option("--project <name>","Filter by project name")).action(async(options)=>{try{await runExport(["tasks"],"partial",(sql)=>exportTasks(sql,options.project),options)}catch(error2){console.error(`Error: ${error2 instanceof Error?error2.message:String(error2)}`),process.exit(1)}}),sharedOpts(exp.command("tags").description("Export tags")).action(async(options)=>{try{await runExport(["tags"],"partial",(sql)=>exportTags(sql),options)}catch(error2){console.error(`Error: ${error2 instanceof Error?error2.message:String(error2)}`),process.exit(1)}}),sharedOpts(exp.command("projects").description("Export projects")).action(async(options)=>{try{await runExport(["projects"],"partial",(sql)=>exportGroup(sql,"projects"),options)}catch(error2){console.error(`Error: ${error2 instanceof Error?error2.message:String(error2)}`),process.exit(1)}}),sharedOpts(exp.command("schedules [name]").description("Export schedules with run_spec")).action(async(name,options)=>{try{await runExport(["schedules"],"partial",(sql)=>exportSchedules(sql,name),options)}catch(error2){console.error(`Error: ${error2 instanceof Error?error2.message:String(error2)}`),process.exit(1)}}),sharedOpts(exp.command("agents").description("Export agents, templates, and checkpoints")).action(async(options)=>{try{await runExport(["agents"],"partial",(sql)=>exportGroup(sql,"agents"),options)}catch(error2){console.error(`Error: ${error2 instanceof Error?error2.message:String(error2)}`),process.exit(1)}}),sharedOpts(exp.command("comms").description("Export conversations, messages, mailbox")).action(async(options)=>{try{await runExport(["comms"],"partial",(sql)=>exportGroup(sql,"comms"),options)}catch(error2){console.error(`Error: ${error2 instanceof Error?error2.message:String(error2)}`),process.exit(1)}}),sharedOpts(exp.command("config").description("Export OS config (graceful skip if missing)")).action(async(options)=>{try{await runExport(["config"],"partial",(sql)=>exportGroup(sql,"config"),options)}catch(error2){console.error(`Error: ${error2 instanceof Error?error2.message:String(error2)}`),process.exit(1)}})}init_history();init_export_format();import{readFileSync as readFileSync30}from"fs";var IMPORT_LEVELS=[["schedules","sessions","projects","agent_templates","agent_checkpoints","tags","task_types","notification_preferences","os_config","golden_images","warm_pool","instances"],["triggers","boards","board_templates","agents","conversations"],["tasks","runs","messages","conversation_members","mailbox","team_chat"],["task_tags","task_actors","task_dependencies","task_stage_log","heartbeats","machine_snapshots"]],SELF_REFERENTIAL_COLUMNS={tasks:"parent_id",messages:"reply_to_id",conversations:"parent_message_id"};function getTableLevel(table){for(let i2=0;i2<IMPORT_LEVELS.length;i2++)if(IMPORT_LEVELS[i2].includes(table))return i2;return-1}function sortByImportOrder(tables){return[...tables].sort((a,b2)=>{let la=getTableLevel(a),lb=getTableLevel(b2);return(la===-1?999:la)-(lb===-1?999:lb)})}function getPrimaryKey(table){return{task_tags:["task_id","tag_id"],task_actors:["task_id","actor_type","actor_id","role"],task_dependencies:["task_id","depends_on_id"],conversation_members:["conversation_id","actor_type","actor_id"],notification_preferences:["actor_type","actor_id","channel"]}[table]??["id"]}var VALID_TABLES=new Set(Object.values(GROUP_TABLES).flat());function assertValidTable(name){if(!VALID_TABLES.has(name))throw Error(`Invalid table name: "${name}" is not in the schema whitelist`)}var VALID_COLUMN_RE=/^[a-zA-Z_][a-zA-Z0-9_]*$/;function assertValidColumnName(name){if(!VALID_COLUMN_RE.test(name))throw Error(`Invalid column name: "${name.slice(0,60)}" contains disallowed characters. Column names must match /^[a-zA-Z_][a-zA-Z0-9_]*$/.`)}async function getSql2(){let{getConnection:getConnection2}=await Promise.resolve().then(() => (init_db(),exports_db));return getConnection2()}async function getActorName2(){let{getActor:getActor2}=await Promise.resolve().then(() => (init_audit(),exports_audit));return getActor2()}async function detectTables2(sql,tables){let{filterAvailableTables:filterAvailableTables2}=await Promise.resolve().then(() => exports_table_detect);return filterAvailableTables2(sql,tables)}async function detectConflicts(sql,table,rows){if(rows.length===0)return[];assertValidTable(table);let pk=getPrimaryKey(table);if(pk.length===1){let key=pk[0],ids=rows.map((r)=>r[key]),existing=await sql.unsafe(`SELECT ${key} FROM ${table} WHERE ${key} = ANY($1)`,[ids]),existingSet=new Set(existing.map((r)=>String(r[key])));return rows.filter((r)=>existingSet.has(String(r[key])))}let conflicts=[];for(let row of rows){let conditions=pk.map((col,i2)=>`${col} = $${i2+1}`).join(" AND "),values2=pk.map((col)=>row[col]);if((await sql.unsafe(`SELECT 1 FROM ${table} WHERE ${conditions} LIMIT 1`,values2)).length>0)conflicts.push(row)}return conflicts}function prepareRow(row,table,selfRefUpdates){let selfRefCol=SELF_REFERENTIAL_COLUMNS[table],entries=Object.entries(row),columns=entries.map(([k])=>k),values2=entries.map(([,v])=>v);for(let col of columns)assertValidColumnName(col);if(selfRefCol&&row[selfRefCol]!=null){let idx=columns.indexOf(selfRefCol);if(idx!==-1){let originalSelfRef=values2[idx];values2[idx]=null;let pk=getPrimaryKey(table);selfRefUpdates.push({pk:pk.length===1?row[pk[0]]:pk.map((k)=>row[k]),value:originalSelfRef})}}return{columns,values:values2,quotedCols:columns.map((c)=>`"${c}"`).join(", "),placeholders:values2.map((_,i2)=>`$${i2+1}`).join(", ")}}async function insertOneRow(tx,table,row,prepared,mode){assertValidTable(table);let{quotedCols,placeholders,values:values2}=prepared,pk=getPrimaryKey(table);if(mode==="overwrite"){let pkCondition=pk.map((col,i2)=>`"${col}" = $${values2.length+i2+1}`).join(" AND "),pkValues=pk.map((col)=>row[col]);await tx.unsafe(`DELETE FROM ${table} WHERE ${pkCondition}`,pkValues),await tx.unsafe(`INSERT INTO ${table} (${quotedCols}) VALUES (${placeholders})`,values2)}else if(mode==="merge"){let onConflict=pk.map((c)=>`"${c}"`).join(", ");await tx.unsafe(`INSERT INTO ${table} (${quotedCols}) VALUES (${placeholders}) ON CONFLICT (${onConflict}) DO NOTHING`,values2)}else await tx.unsafe(`INSERT INTO ${table} (${quotedCols}) VALUES (${placeholders})`,values2)}async function updateSelfRefs(tx,table,updates){assertValidTable(table);let selfRefCol=SELF_REFERENTIAL_COLUMNS[table],pk=getPrimaryKey(table);if(pk.length!==1)return;for(let{pk:pkVal,value}of updates)await tx.unsafe(`UPDATE ${table} SET "${selfRefCol}" = $1 WHERE "${pk[0]}" = $2`,[value,pkVal])}async function insertRows(tx,table,rows,mode){if(rows.length===0)return 0;let selfRefUpdates=[];for(let row of rows){let prepared=prepareRow(row,table,selfRefUpdates);await insertOneRow(tx,table,row,prepared,mode)}if(selfRefUpdates.length>0)await updateSelfRefs(tx,table,selfRefUpdates);return rows.length}function parseExportFile(filePath){let raw=readFileSync30(filePath,"utf-8"),parsed;try{parsed=JSON.parse(raw)}catch{throw Error(`Invalid JSON in ${filePath}`)}let validation=validateExportDocument(parsed);if(!validation.valid)throw Error(`Invalid export document: ${validation.error}`);return validation.doc}async function filterTablesByGroup(allTables,groupFilter){if(!groupFilter||groupFilter.length===0)return allTables;let{GROUP_TABLES:GROUP_TABLES2}=await Promise.resolve().then(() => (init_export_format(),exports_export_format)),allowedTables=new Set;for(let group of groupFilter){let tables=GROUP_TABLES2[group];if(tables)for(let t of tables)allowedTables.add(t);else console.warn(`Warning: Unknown group "${group}", skipping`)}return allTables.filter((t)=>allowedTables.has(t))}async function checkConflicts(sql,tables,data){for(let table of tables){let rows=data[table];if(!rows||rows.length===0)continue;let conflicts=await detectConflicts(sql,table,rows);if(conflicts.length>0){let pk=getPrimaryKey(table),ids=conflicts.slice(0,5).map((r)=>pk.map((k)=>r[k]).join(",")).join("; ");throw Error(`Conflict in table "${table}": ${conflicts.length} existing row(s) (e.g., ${ids}). Use --merge or --overwrite to resolve.`)}}}async function runImport(filePath,mode,groupFilter){let doc=parseExportFile(filePath),tablesToImport=await filterTablesByGroup(Object.keys(doc.data),groupFilter);if(tablesToImport.length===0){console.log("No tables to import.");return}tablesToImport=sortByImportOrder(tablesToImport);let sql=await getSql2(),{available}=await detectTables2(sql,tablesToImport),skippedTables=tablesToImport.filter((t)=>!available.includes(t));if(tablesToImport=available,skippedTables.length>0)console.log(`Skipping tables not in database: ${skippedTables.join(", ")}`);if(mode==="fail")await checkConflicts(sql,tablesToImport,doc.data);let totalInserted=0,tableStats={};await sql.begin(async(tx)=>{for(let table of tablesToImport){let rows=doc.data[table];if(!rows||rows.length===0)continue;let count=await insertRows(tx,table,rows,mode);tableStats[table]=count,totalInserted+=count}});let actor=await getActorName2(),{recordAuditEvent:recordAuditEvent3}=await Promise.resolve().then(() => (init_audit(),exports_audit));await recordAuditEvent3("import",filePath,"import_complete",actor,{mode,tables:tableStats,totalRows:totalInserted,skippedTables,sourceVersion:doc.version,sourceDate:doc.exportedAt}),console.log(`Import complete: ${totalInserted} rows across ${Object.keys(tableStats).length} tables`);for(let[table,count]of Object.entries(tableStats))if(count>0)console.log(` ${table}: ${count} rows`);if(skippedTables.length>0)console.log(`Skipped (not in DB): ${skippedTables.join(", ")}`)}function registerImportCommands(program2){program2.command("import <file>").description("Import genie data from JSON export").option("--fail","Abort on any conflict (default)").option("--merge","Skip existing rows, import new ones").option("--overwrite","Replace existing rows with imported data").option("--groups <list>","Comma-separated groups to import (e.g., boards,tags)").action(async(file,options)=>{try{let mode="fail";if(options.overwrite)mode="overwrite";else if(options.merge)mode="merge";let groupFilter=options.groups?.split(",").map((g)=>g.trim());await runImport(file,mode,groupFilter)}catch(error2){console.error(`Error: ${error2 instanceof Error?error2.message:String(error2)}`),process.exit(1)}})}init_esm14();import{existsSync as existsSync52,mkdirSync as mkdirSync23,symlinkSync,writeFileSync as writeFileSync25}from"fs";import{basename as basename10,join as join59,relative as relative4,resolve as resolve9,sep as sep2}from"path";import{cpSync,existsSync as existsSync50,mkdirSync as mkdirSync21,renameSync as renameSync5,rmSync as rmSync4}from"fs";import{join as join56,relative as relative3}from"path";var import_ignore=__toESM(require_ignore(),1);import{existsSync as existsSync49,readFileSync as readFileSync31,readdirSync as readdirSync10,statSync as statSync6}from"fs";import{join as join55,relative as relative2}from"path";var GENIEIGNORE_DEFAULTS=`node_modules
3775
3817
  .git
3776
3818
  .genie/worktrees
3777
3819
  dist
@@ -3784,7 +3826,7 @@ __pycache__
3784
3826
  target
3785
3827
  coverage
3786
3828
  .cache
3787
- `;function loadIgnoreRules(ignoreFilePath){let ig=import_ignore.default();if(existsSync49(ignoreFilePath)){let content=readFileSync31(ignoreFilePath,"utf-8");ig.add(content)}return ig}async function*scanForAgents(root,ignoreFilePath){let ig=loadIgnoreRules(ignoreFilePath??join55(root,".genieignore"));ig.add("agents"),yield*walkDir(root,root,ig)}function*walkDir(dir,root,ig){let names;try{names=readdirSync10(dir)}catch{return}for(let name of names){let fullPath=join55(dir,name);try{if(!statSync5(fullPath).isDirectory())continue}catch{continue}let relPath=`${relative2(root,fullPath)}/`;if(ig.ignores(relPath))continue;let agentsMdPath=join55(fullPath,"AGENTS.md");if(existsSync49(agentsMdPath)){let hasSubAgents=hasSubAgentDirs(fullPath);if(yield{path:fullPath,dirName:name,hasSubAgents,isSubAgent:!1},hasSubAgents)yield*scanSubAgents2(fullPath,name)}yield*walkDir(fullPath,root,ig)}}function hasSubAgentDirs(agentDir){let subAgentsDir=join55(agentDir,".genie","agents");if(!existsSync49(subAgentsDir))return!1;try{return readdirSync10(subAgentsDir).some((name)=>{let subPath=join55(subAgentsDir,name);try{return statSync5(subPath).isDirectory()&&existsSync49(join55(subPath,"AGENTS.md"))}catch{return!1}})}catch{return!1}}function*scanSubAgents2(parentDir,parentName){let subAgentsDir=join55(parentDir,".genie","agents");if(!existsSync49(subAgentsDir))return;let names;try{names=readdirSync10(subAgentsDir)}catch{return}for(let name of names){let subDir=join55(subAgentsDir,name);try{if(!statSync5(subDir).isDirectory())continue}catch{continue}if(!existsSync49(join55(subDir,"AGENTS.md")))continue;yield{path:subDir,dirName:name,hasSubAgents:!1,isSubAgent:!0,parentName}}}async function scanForAgentsAll(root,ignoreFilePath){let results=[];for await(let agent of scanForAgents(root,ignoreFilePath))results.push(agent);return results}init_workspace();async function discoverExternalAgents(workspaceRoot){let allScanned=await scanForAgentsAll(workspaceRoot),canonicalNames=new Set(scanAgents(workspaceRoot)),agentsDir=join56(workspaceRoot,"agents"),external2=[];for(let scanned of allScanned){if(scanned.path.startsWith(agentsDir))continue;if(canonicalNames.has(scanned.dirName))continue;external2.push({name:scanned.dirName,path:scanned.path,relativePath:relative3(workspaceRoot,scanned.path),isSubAgent:scanned.isSubAgent,parentName:scanned.parentName})}return external2}function importAgents(workspaceRoot,agents){let agentsDir=join56(workspaceRoot,"agents");mkdirSync21(agentsDir,{recursive:!0});let result2={imported:[],skipped:[],errors:[]};for(let agent of agents){let destName=resolveUniqueName(agentsDir,agent.name),destPath=join56(agentsDir,destName);if(existsSync50(destPath)){result2.skipped.push(agent.name);continue}try{moveDirectory(agent.path,destPath),result2.imported.push(destName)}catch(err){result2.errors.push({name:agent.name,error:err instanceof Error?err.message:String(err)})}}return result2}function moveDirectory(src,dest){try{renameSync5(src,dest)}catch{cpSync(src,dest,{recursive:!0}),rmSync4(src,{recursive:!0,force:!0})}}function resolveUniqueName(agentsDir,name){if(!existsSync50(join56(agentsDir,name)))return name;let suffix=2;while(existsSync50(join56(agentsDir,`${name}-${suffix}`)))suffix++;return`${name}-${suffix}`}init_interactivity();init_defaults();import{readFileSync as readFileSync32,writeFileSync as writeFileSync23}from"fs";import{join as join57}from"path";function formatDefaults(workspaceDefaults){let effective=computeEffectiveDefaults(workspaceDefaults),lines=[];for(let key of Object.keys(BUILTIN_DEFAULTS)){let value=effective[key],source=workspaceDefaults?.[key]!==void 0?"workspace":"built-in";lines.push(` ${key}: ${value} (${source})`)}return lines.join(`
3829
+ `;function loadIgnoreRules(ignoreFilePath){let ig=import_ignore.default();if(existsSync49(ignoreFilePath)){let content=readFileSync31(ignoreFilePath,"utf-8");ig.add(content)}return ig}async function*scanForAgents(root,ignoreFilePath){let ig=loadIgnoreRules(ignoreFilePath??join55(root,".genieignore"));ig.add("agents"),yield*walkDir(root,root,ig)}function*walkDir(dir,root,ig){let names;try{names=readdirSync10(dir)}catch{return}for(let name of names){let fullPath=join55(dir,name);try{if(!statSync6(fullPath).isDirectory())continue}catch{continue}let relPath=`${relative2(root,fullPath)}/`;if(ig.ignores(relPath))continue;let agentsMdPath=join55(fullPath,"AGENTS.md");if(existsSync49(agentsMdPath)){let hasSubAgents=hasSubAgentDirs(fullPath);if(yield{path:fullPath,dirName:name,hasSubAgents,isSubAgent:!1},hasSubAgents)yield*scanSubAgents2(fullPath,name)}yield*walkDir(fullPath,root,ig)}}function hasSubAgentDirs(agentDir){let subAgentsDir=join55(agentDir,".genie","agents");if(!existsSync49(subAgentsDir))return!1;try{return readdirSync10(subAgentsDir).some((name)=>{let subPath=join55(subAgentsDir,name);try{return statSync6(subPath).isDirectory()&&existsSync49(join55(subPath,"AGENTS.md"))}catch{return!1}})}catch{return!1}}function*scanSubAgents2(parentDir,parentName){let subAgentsDir=join55(parentDir,".genie","agents");if(!existsSync49(subAgentsDir))return;let names;try{names=readdirSync10(subAgentsDir)}catch{return}for(let name of names){let subDir=join55(subAgentsDir,name);try{if(!statSync6(subDir).isDirectory())continue}catch{continue}if(!existsSync49(join55(subDir,"AGENTS.md")))continue;yield{path:subDir,dirName:name,hasSubAgents:!1,isSubAgent:!0,parentName}}}async function scanForAgentsAll(root,ignoreFilePath){let results=[];for await(let agent of scanForAgents(root,ignoreFilePath))results.push(agent);return results}init_workspace();async function discoverExternalAgents(workspaceRoot){let allScanned=await scanForAgentsAll(workspaceRoot),canonicalNames=new Set(scanAgents(workspaceRoot)),agentsDir=join56(workspaceRoot,"agents"),external2=[];for(let scanned of allScanned){if(scanned.path.startsWith(agentsDir))continue;if(canonicalNames.has(scanned.dirName))continue;external2.push({name:scanned.dirName,path:scanned.path,relativePath:relative3(workspaceRoot,scanned.path),isSubAgent:scanned.isSubAgent,parentName:scanned.parentName})}return external2}function importAgents(workspaceRoot,agents){let agentsDir=join56(workspaceRoot,"agents");mkdirSync21(agentsDir,{recursive:!0});let result2={imported:[],skipped:[],errors:[]};for(let agent of agents){let destName=resolveUniqueName(agentsDir,agent.name),destPath=join56(agentsDir,destName);if(existsSync50(destPath)){result2.skipped.push(agent.name);continue}try{moveDirectory(agent.path,destPath),result2.imported.push(destName)}catch(err){result2.errors.push({name:agent.name,error:err instanceof Error?err.message:String(err)})}}return result2}function moveDirectory(src,dest){try{renameSync5(src,dest)}catch{cpSync(src,dest,{recursive:!0}),rmSync4(src,{recursive:!0,force:!0})}}function resolveUniqueName(agentsDir,name){if(!existsSync50(join56(agentsDir,name)))return name;let suffix=2;while(existsSync50(join56(agentsDir,`${name}-${suffix}`)))suffix++;return`${name}-${suffix}`}init_interactivity();init_defaults();import{readFileSync as readFileSync32,writeFileSync as writeFileSync23}from"fs";import{join as join57}from"path";function formatDefaults(workspaceDefaults){let effective=computeEffectiveDefaults(workspaceDefaults),lines=[];for(let key of Object.keys(BUILTIN_DEFAULTS)){let value=effective[key],source=workspaceDefaults?.[key]!==void 0?"workspace":"built-in";lines.push(` ${key}: ${value} (${source})`)}return lines.join(`
3788
3830
  `)}function formatWelcome(ctx){let lines=["",` Workspace: ${ctx.workspaceName}`,` Agents: ${ctx.canonicalAgentCount} registered`];if(ctx.discovered.length>0)lines.push(` Discovered: ${ctx.discovered.length} external agent(s) found`);return lines.push(""),lines.push(" Effective defaults:"),lines.push(formatDefaults(ctx.config.agents?.defaults)),lines.push(""),lines.join(`
3789
3831
  `)}function formatNextSteps(ctx){let lines=[""," Next steps:"];if(ctx.canonicalAgentCount===0)lines.push(" genie init agent <name> Scaffold your first agent");return lines.push(" genie spawn <agent> Launch an agent"),lines.push(" genie team create <name> Create a multi-agent team"),lines.push(" /wizard Full guided onboarding"),lines.push(""),lines.join(`
3790
3832
  `)}var MODEL_CHOICES=[{name:"opus (most capable)",value:"opus"},{name:"sonnet (balanced)",value:"sonnet"},{name:"haiku (fastest)",value:"haiku"}];async function runMiniWizard(ctx){let{confirm:confirm2}=await Promise.resolve().then(() => (init_esm14(),exports_esm));console.log(formatWelcome(ctx));let wantCustomize=await confirm2({message:"Customize workspace defaults?",default:!1}),result2={customized:!1,importedAgents:[],completed:!0};if(wantCustomize){let newDefaults=await customizeDefaults(ctx.config.agents?.defaults);if(newDefaults)result2.customized=!0,result2.defaults=newDefaults,persistDefaults(ctx.workspaceRoot,newDefaults)}if(ctx.pending.length>0){console.log(`
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@automagik/genie",
3
- "version": "4.260420.17",
3
+ "version": "4.260421.1",
4
4
  "description": "Collaborative terminal toolkit for human + AI workflows",
5
5
  "type": "module",
6
6
  "bin": {
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "genie",
3
- "version": "4.260420.17",
3
+ "version": "4.260421.1",
4
4
  "description": "Human-AI partnership for Claude Code. Share a terminal, orchestrate workers, evolve together. Brainstorm ideas, turn them into wishes, execute with /work, validate with /review, and ship as one team.",
5
5
  "author": {
6
6
  "name": "Namastex Labs"
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "genie-plugin",
3
- "version": "4.260420.17",
3
+ "version": "4.260421.1",
4
4
  "private": true,
5
5
  "description": "Runtime dependencies for genie bundled CLIs",
6
6
  "type": "module",