@automagik/genie 4.260421.13 → 4.260421.14
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/genie.js
CHANGED
|
@@ -1819,9 +1819,10 @@ ${body}`;writeFileSync12(filePath,output,"utf-8")}function serializeSdkConfig(sd
|
|
|
1819
1819
|
INSERT INTO sessions (id, agent_id, executor_id, team, wish_slug, task_id, role, project_path, jsonl_path, status, last_ingested_offset, total_turns, parent_session_id, is_subagent, file_size, file_mtime)
|
|
1820
1820
|
VALUES (${sessionId}, ${worker?.agentId??null}, ${worker?.executorId??null}, ${worker?.team??null}, ${worker?.wishSlug??null}, ${worker?.taskId??null}, ${worker?.role??null}, ${projectPath}, ${jsonlPath}, ${worker?"active":"orphaned"}, 0, 0, ${parentSessionId}, ${opts?.isSubagent??!1}, ${opts?.fileSize??0}, ${opts?.mtime??0})
|
|
1821
1821
|
ON CONFLICT (id) DO NOTHING
|
|
1822
|
-
`,workerToContext(worker)}async function reconcileSubagentParents(sql){
|
|
1822
|
+
`,workerToContext(worker)}async function reconcileSubagentParents(sql){let linkResult=await sql`
|
|
1823
1823
|
UPDATE sessions s
|
|
1824
|
-
SET parent_session_id = p.id
|
|
1824
|
+
SET parent_session_id = p.id,
|
|
1825
|
+
updated_at = now()
|
|
1825
1826
|
FROM sessions p
|
|
1826
1827
|
WHERE s.is_subagent = true
|
|
1827
1828
|
AND s.parent_session_id IS NULL
|
|
@@ -1831,7 +1832,35 @@ ${body}`;writeFileSync12(filePath,output,"utf-8")}function serializeSdkConfig(sd
|
|
|
1831
1832
|
'.*/',
|
|
1832
1833
|
''
|
|
1833
1834
|
)
|
|
1834
|
-
|
|
1835
|
+
`,metaResult=await sql`
|
|
1836
|
+
UPDATE sessions s
|
|
1837
|
+
SET
|
|
1838
|
+
agent_id = COALESCE(s.agent_id, p.agent_id),
|
|
1839
|
+
executor_id = COALESCE(
|
|
1840
|
+
s.executor_id,
|
|
1841
|
+
CASE
|
|
1842
|
+
WHEN s.agent_id IS NULL OR s.agent_id = p.agent_id THEN p.executor_id
|
|
1843
|
+
ELSE NULL
|
|
1844
|
+
END
|
|
1845
|
+
),
|
|
1846
|
+
team = COALESCE(s.team, p.team),
|
|
1847
|
+
wish_slug = COALESCE(s.wish_slug, p.wish_slug),
|
|
1848
|
+
task_id = COALESCE(s.task_id, p.task_id),
|
|
1849
|
+
role = COALESCE(s.role, p.role),
|
|
1850
|
+
updated_at = now()
|
|
1851
|
+
FROM sessions p
|
|
1852
|
+
WHERE s.is_subagent = true
|
|
1853
|
+
AND s.parent_session_id = p.id
|
|
1854
|
+
AND (
|
|
1855
|
+
(s.agent_id IS NULL AND p.agent_id IS NOT NULL) OR
|
|
1856
|
+
(s.executor_id IS NULL AND p.executor_id IS NOT NULL
|
|
1857
|
+
AND (s.agent_id IS NULL OR s.agent_id = p.agent_id)) OR
|
|
1858
|
+
(s.team IS NULL AND p.team IS NOT NULL) OR
|
|
1859
|
+
(s.wish_slug IS NULL AND p.wish_slug IS NOT NULL) OR
|
|
1860
|
+
(s.task_id IS NULL AND p.task_id IS NOT NULL) OR
|
|
1861
|
+
(s.role IS NULL AND p.role IS NOT NULL)
|
|
1862
|
+
)
|
|
1863
|
+
`;return{linked:linkResult.count??0,metadataFilled:metaResult.count??0}}function stringifyInput(input){if(!input)return null;return typeof input==="string"?input:JSON.stringify(input)}function extractBlockOutput(block){if(typeof block.content==="string")return block.content;return block.content?extractTextContent(block.content):null}function extractContentBlocks(entry2){if(!entry2.message?.content||!Array.isArray(entry2.message.content))return[];return entry2.message.content.filter((b2)=>typeof b2==="object"&&b2!==null)}function buildToolEventRow(pending,toolUseId,output,isError,ctx){return{session_id:ctx.sessionId,turn_index:pending.turnIndex,timestamp:pending.timestamp,tool_name:pending.name,sub_tool:extractSubTool(pending.name,pending.input),tool_use_id:toolUseId,input_raw:stringifyInput(pending.input),output_raw:output,is_error:isError,error_message:isError?output?.slice(0,1000)??null:null,duration_ms:null,agent_id:ctx.agentId,team:ctx.team,wish_slug:ctx.wishSlug,task_id:ctx.taskId}}function processToolUseBlocks(blocks,sessionId,turnIndex,timestamp2,contentRows,pendingToolUses){let idx=turnIndex;for(let block of blocks){if(block.type!=="tool_use"||!block.name||!block.id)continue;contentRows.push({session_id:sessionId,turn_index:idx,role:"tool_input",content:stringifyInput(block.input)??"",tool_name:block.name,timestamp:timestamp2}),idx++,pendingToolUses.set(block.id,{name:block.name,input:block.input,turnIndex:idx-1,timestamp:timestamp2})}return idx}function processToolResultBlocks(blocks,sessionId,turnIndex,timestamp2,contentRows,toolEvents,pendingToolUses,ctx){let idx=turnIndex;for(let block of blocks){if(block.type!=="tool_result"||!block.tool_use_id)continue;let output=extractBlockOutput(block);if(output&&output.length>0)contentRows.push({session_id:sessionId,turn_index:idx,role:"tool_output",content:output,tool_name:null,timestamp:timestamp2}),idx++;let pending=pendingToolUses.get(block.tool_use_id);if(pending){let isError=block.is_error===!0||typeof output==="string"&&output.includes("<tool_use_error>");toolEvents.push(buildToolEventRow(pending,block.tool_use_id,output,isError,ctx)),pendingToolUses.delete(block.tool_use_id)}}return idx}function parseJsonlChunk(data,sessionId,startTurnIndex,context){let contentRows=[],toolEvents=[],turnIndex=startTurnIndex,pendingToolUses=new Map,ctx={sessionId,...context};for(let line of data.split(`
|
|
1835
1864
|
`)){if(!line.trim())continue;let entry2;try{entry2=JSON.parse(line)}catch{continue}if(entry2.type!=="assistant")continue;let timestamp2=entry2.timestamp??new Date().toISOString(),blocks=extractContentBlocks(entry2),text=extractTextContent(entry2.message?.content);if(text&&text.length>0)contentRows.push({session_id:sessionId,turn_index:turnIndex,role:"assistant",content:text,tool_name:null,timestamp:timestamp2}),turnIndex++;turnIndex=processToolUseBlocks(blocks,sessionId,turnIndex,timestamp2,contentRows,pendingToolUses),turnIndex=processToolResultBlocks(blocks,sessionId,turnIndex,timestamp2,contentRows,toolEvents,pendingToolUses,ctx)}for(let[toolUseId,pending]of pendingToolUses)toolEvents.push(buildToolEventRow(pending,toolUseId,null,!1,ctx));return{contentRows,toolEvents,turnCount:turnIndex-startTurnIndex}}async function batchInsertContent(sql,rows){if(rows.length===0)return;await sql`
|
|
1836
1865
|
INSERT INTO session_content (session_id, turn_index, role, content, tool_name, timestamp)
|
|
1837
1866
|
SELECT * FROM unnest(
|
|
@@ -1879,7 +1908,7 @@ ${body}`;writeFileSync12(filePath,output,"utf-8")}function serializeSdkConfig(sd
|
|
|
1879
1908
|
processed_bytes = ${progress.processedBytes},
|
|
1880
1909
|
errors = ${progress.errors},
|
|
1881
1910
|
updated_at = now()
|
|
1882
|
-
`}async function shouldSkipBackfill(sql){try{let existing=await sql`SELECT status FROM session_sync WHERE id = 'backfill'`;if(existing.length>0&&existing[0].status==="complete")return!0}catch{}try{let[{count}]=await sql`SELECT count(*)::int as count FROM sessions`;if(count>0){let existing=await sql`SELECT status FROM session_sync WHERE id = 'backfill'`;if(existing.length===0||existing[0].status==="complete")return!0}}catch{return!0}return!1}async function yieldToLiveWork(){while(liveWorkPending)await sleep2(LIVE_YIELD_POLL_MS)}async function getFileStartOffset(sql,file){let existing=await sql`SELECT last_ingested_offset FROM sessions WHERE id = ${file.sessionId}`;if(existing.length>0)return existing[0].last_ingested_offset??0;return 0}async function processBackfillFile(sql,file,progress,workerMap){let offset=await getFileStartOffset(sql,file);if(offset>=file.fileSize){progress.processedFiles++,progress.processedBytes+=file.fileSize;return}let currentOffset=offset;while(currentOffset<file.fileSize){await yieldToLiveWork();let result2=await ingestFile(sql,file.sessionId,file.jsonlPath,file.projectPath,currentOffset,{chunkSize:CHUNK_SIZE,parentSessionId:file.parentSessionId,isSubagent:file.isSubagent,fileSize:file.fileSize,mtime:file.mtime,workerMap});if(result2.newOffset<=currentOffset)break;progress.processedBytes+=result2.newOffset-currentOffset,currentOffset=result2.newOffset}progress.processedFiles++}async function processAllFiles(sql,allFiles,progress,workerMap){for(let file of allFiles){if(!running)break;await yieldToLiveWork();try{await processBackfillFile(sql,file,progress,workerMap)}catch(err){progress.errors++;let message=err instanceof Error?err.message:String(err);console.error(`[backfill] error on ${file.jsonlPath}: ${message}`)}if(progress.processedFiles%50===0)await updateSyncState(sql,progress);await sleep2(SLEEP_BETWEEN_FILES_MS)}}function resolveBackfillStatus(progress){if(!running)progress.status="paused",console.log(`[backfill] paused: ${progress.processedFiles}/${progress.totalFiles} files (will resume on next daemon start)`);else if(progress.errors>0&&progress.errors>=progress.totalFiles)progress.status="failed",console.error(`[backfill] failed: ${progress.errors}/${progress.totalFiles} files errored \u2014 will retry on next daemon start`);else progress.status="complete",console.log(`[backfill] complete: ${progress.processedFiles}/${progress.totalFiles} files, ${progress.errors} errors`)}async function startBackfill(sql){if(running)return;if(await shouldSkipBackfill(sql))return;running=!0,console.log("[backfill] starting session backfill...");try{let allFiles=await discoverAllJsonlFiles();allFiles.sort(compareBackfillFiles);let totalBytes=allFiles.reduce((sum,f)=>sum+f.fileSize,0),progress={totalFiles:allFiles.length,processedFiles:0,totalBytes,processedBytes:0,errors:0,status:"running"};await updateSyncState(sql,progress),console.log(`[backfill] discovered ${allFiles.length} files (${(totalBytes/1024/1024).toFixed(1)} MB)`);let workerMap=await buildWorkerMap(sql);await processAllFiles(sql,allFiles,progress,workerMap);try{let
|
|
1911
|
+
`}async function shouldSkipBackfill(sql){try{let existing=await sql`SELECT status FROM session_sync WHERE id = 'backfill'`;if(existing.length>0&&existing[0].status==="complete")return!0}catch{}try{let[{count}]=await sql`SELECT count(*)::int as count FROM sessions`;if(count>0){let existing=await sql`SELECT status FROM session_sync WHERE id = 'backfill'`;if(existing.length===0||existing[0].status==="complete")return!0}}catch{return!0}return!1}async function yieldToLiveWork(){while(liveWorkPending)await sleep2(LIVE_YIELD_POLL_MS)}async function getFileStartOffset(sql,file){let existing=await sql`SELECT last_ingested_offset FROM sessions WHERE id = ${file.sessionId}`;if(existing.length>0)return existing[0].last_ingested_offset??0;return 0}async function processBackfillFile(sql,file,progress,workerMap){let offset=await getFileStartOffset(sql,file);if(offset>=file.fileSize){progress.processedFiles++,progress.processedBytes+=file.fileSize;return}let currentOffset=offset;while(currentOffset<file.fileSize){await yieldToLiveWork();let result2=await ingestFile(sql,file.sessionId,file.jsonlPath,file.projectPath,currentOffset,{chunkSize:CHUNK_SIZE,parentSessionId:file.parentSessionId,isSubagent:file.isSubagent,fileSize:file.fileSize,mtime:file.mtime,workerMap});if(result2.newOffset<=currentOffset)break;progress.processedBytes+=result2.newOffset-currentOffset,currentOffset=result2.newOffset}progress.processedFiles++}async function processAllFiles(sql,allFiles,progress,workerMap){for(let file of allFiles){if(!running)break;await yieldToLiveWork();try{await processBackfillFile(sql,file,progress,workerMap)}catch(err){progress.errors++;let message=err instanceof Error?err.message:String(err);console.error(`[backfill] error on ${file.jsonlPath}: ${message}`)}if(progress.processedFiles%50===0)await updateSyncState(sql,progress);await sleep2(SLEEP_BETWEEN_FILES_MS)}}function resolveBackfillStatus(progress){if(!running)progress.status="paused",console.log(`[backfill] paused: ${progress.processedFiles}/${progress.totalFiles} files (will resume on next daemon start)`);else if(progress.errors>0&&progress.errors>=progress.totalFiles)progress.status="failed",console.error(`[backfill] failed: ${progress.errors}/${progress.totalFiles} files errored \u2014 will retry on next daemon start`);else progress.status="complete",console.log(`[backfill] complete: ${progress.processedFiles}/${progress.totalFiles} files, ${progress.errors} errors`)}async function startBackfill(sql){if(running)return;if(await shouldSkipBackfill(sql))return;running=!0,console.log("[backfill] starting session backfill...");try{let allFiles=await discoverAllJsonlFiles();allFiles.sort(compareBackfillFiles);let totalBytes=allFiles.reduce((sum,f)=>sum+f.fileSize,0),progress={totalFiles:allFiles.length,processedFiles:0,totalBytes,processedBytes:0,errors:0,status:"running"};await updateSyncState(sql,progress),console.log(`[backfill] discovered ${allFiles.length} files (${(totalBytes/1024/1024).toFixed(1)} MB)`);let workerMap=await buildWorkerMap(sql);await processAllFiles(sql,allFiles,progress,workerMap);try{let{linked,metadataFilled}=await reconcileSubagentParents(sql);if(linked>0)console.log(`[backfill] reconciled parent_session_id for ${linked} subagent(s)`);if(metadataFilled>0)console.log(`[backfill] inherited parent metadata for ${metadataFilled} subagent(s)`)}catch(err){let message=err instanceof Error?err.message:String(err);console.warn(`[backfill] parent reconcile skipped: ${message}`)}resolveBackfillStatus(progress),await updateSyncState(sql,progress)}catch(err){let message=err instanceof Error?err.message:String(err);console.error(`[backfill] fatal error: ${message}`)}finally{running=!1}}function stopBackfill(){running=!1}async function getBackfillStatus(sql){try{let rows=await sql`SELECT * FROM session_sync WHERE id = 'backfill'`;if(rows.length===0)return null;let row=rows[0];return{totalFiles:row.total_files,processedFiles:row.processed_files,totalBytes:row.total_bytes,processedBytes:row.processed_bytes,errors:row.errors,status:row.status}}catch{return null}}var CHUNK_SIZE=65536,SLEEP_BETWEEN_FILES_MS=100,LIVE_YIELD_POLL_MS=200,running=!1;var init_session_backfill=__esm(()=>{init_session_capture()});var exports_scheduler_daemon={};__export(exports_scheduler_daemon,{terminalizeCleanExitUnverified:()=>terminalizeCleanExitUnverified,startDaemon:()=>startDaemon,runAgentRecoveryPass:()=>runAgentRecoveryPass,recoverOnStartup:()=>recoverOnStartup,reconcileUnresumable:()=>reconcileUnresumable,reconcileOrphans:()=>reconcileOrphans,reconcileOrphanedRuns:()=>reconcileOrphanedRuns,reclaimExpiredLeases:()=>reclaimExpiredLeases,processMailboxRetryMessage:()=>processMailboxRetryMessage,logToFile:()=>logToFile,logReconcilerMode:()=>logReconcilerMode,isTurnAwareReconcilerEnabled:()=>isTurnAwareReconcilerEnabled,fireTrigger:()=>fireTrigger,emitWorkerEvents:()=>emitWorkerEvents,collectMachineSnapshot:()=>collectMachineSnapshot,collectHeartbeats:()=>collectHeartbeats,claimDueTriggers:()=>claimDueTriggers,attemptAgentResume:()=>attemptAgentResume,_resetWorkerStatesForTesting:()=>_resetWorkerStatesForTesting,TURN_AWARE_RECONCILER_FLAG:()=>TURN_AWARE_RECONCILER_FLAG,MAX_DELIVERY_ATTEMPTS:()=>MAX_DELIVERY_ATTEMPTS,ESCALATION_RECIPIENT:()=>ESCALATION_RECIPIENT});import{randomUUID as randomUUID9}from"crypto";import{appendFileSync as appendFileSync3,mkdirSync as mkdirSync12}from"fs";import{homedir as homedir26}from"os";import{join as join40}from"path";function isTurnAwareReconcilerEnabled(env=process.env){let raw=env[TURN_AWARE_RECONCILER_FLAG];if(raw===void 0)return!0;let v=raw.trim().toLowerCase();if(v==="")return!0;if(v==="0"||v==="false"||v==="no")return!1;if(v==="1"||v==="true"||v==="yes")return!0;return!0}function logReconcilerMode(deps,daemonId){let enabled=isTurnAwareReconcilerEnabled();deps.log({timestamp:deps.now().toISOString(),level:"info",event:enabled?"reconciler_mode_turn_aware":"reconciler_mode_legacy",daemon_id:daemonId,flag:TURN_AWARE_RECONCILER_FLAG,enabled,message:enabled?"turn-aware reconciler enabled":"flag off, using legacy reconciler"})}function getLogDir2(){return join40(process.env.GENIE_HOME??join40(homedir26(),".genie"),"logs")}function getLogFile(){return join40(getLogDir2(),"scheduler.log")}function logToFile(entry2){let logDir=getLogDir2();mkdirSync12(logDir,{recursive:!0});let enriched=entry2.trace_id?entry2:withAmbientTraceId(entry2);appendFileSync3(getLogFile(),`${JSON.stringify(enriched)}
|
|
1883
1912
|
`)}function withAmbientTraceId(entry2){let ctx=getAmbient();if(!ctx)return entry2;return{...entry2,trace_id:ctx.trace_id}}async function defaultSpawnCommand(command,env){return{pid:Bun.spawn(["sh","-c",command],{env:{...process.env,...env},stdio:["ignore","ignore","ignore"]}).pid}}function defaultJitter(maxMs){return Math.floor(Math.random()*maxMs)}function defaultSleep(ms){return new Promise((resolve5)=>setTimeout(resolve5,ms))}async function defaultIsPaneAlive(paneId){let{isPaneAlive:isPaneAlive2}=await Promise.resolve().then(() => (init_tmux(),exports_tmux));return isPaneAlive2(paneId)}async function defaultListWorkers(){let{list:list2}=await Promise.resolve().then(() => (init_agent_registry(),exports_agent_registry));return(await list2()).map((a)=>({id:a.id,paneId:a.paneId,repoPath:a.repoPath,state:a.state,team:a.team,wishSlug:a.wishSlug,groupNumber:a.groupNumber,autoResume:a.autoResume,resumeAttempts:a.resumeAttempts,maxResumeAttempts:a.maxResumeAttempts,lastResumeAttempt:a.lastResumeAttempt,claudeSessionId:a.claudeSessionId}))}async function defaultPublishEvent(subject,data,repoPath){let payload=data,{publishSubjectEvent:publishSubjectEvent2}=await Promise.resolve().then(() => (init_runtime_events(),exports_runtime_events));await publishSubjectEvent2(repoPath,subject,{timestamp:payload.timestamp,kind:payload.kind??"system",agent:payload.agent??"scheduler",team:payload.team,direction:payload.direction,peer:payload.peer,text:payload.text??subject,data:payload.data,source:payload.source??"registry"})}async function defaultCountTmuxSessions(){try{let{execSync:execSync10}=await import("child_process"),{genieTmuxCmd:genieTmuxCmd2}=await Promise.resolve().then(() => (init_tmux_wrapper(),exports_tmux_wrapper));return execSync10(`${genieTmuxCmd2("list-sessions")} 2>/dev/null`,{encoding:"utf-8"}).trim().split(`
|
|
1884
1913
|
`).filter(Boolean).length}catch{return 0}}async function defaultResumeAgent(agentId){try{let{execSync:execSync10}=await import("child_process");return execSync10(`genie agent resume ${agentId} --no-reset-attempts`,{encoding:"utf-8",stdio:["pipe","pipe","pipe"]}),!0}catch{return!1}}async function defaultUpdateAgent(agentId,updates){let{update:update2}=await Promise.resolve().then(() => (init_agent_registry(),exports_agent_registry));await update2(agentId,updates)}function createDefaultDeps(){return{getConnection:async()=>{let{getConnection:getConnection2}=await Promise.resolve().then(() => (init_db(),exports_db));return getConnection2()},spawnCommand:defaultSpawnCommand,log:logToFile,generateId:randomUUID9,now:()=>new Date,sleep:defaultSleep,jitter:defaultJitter,isPaneAlive:defaultIsPaneAlive,listWorkers:defaultListWorkers,countTmuxSessions:defaultCountTmuxSessions,publishEvent:defaultPublishEvent,resumeAgent:defaultResumeAgent,updateAgent:defaultUpdateAgent}}function resolveConfig(overrides){let envMax=process.env.GENIE_MAX_CONCURRENT,maxConcurrent=envMax?Number.parseInt(envMax,10):5;return{maxConcurrent:overrides?.maxConcurrent??(Number.isNaN(maxConcurrent)?5:maxConcurrent),pollIntervalMs:overrides?.pollIntervalMs??30000,maxJitterMs:overrides?.maxJitterMs??30000,jitterThreshold:overrides?.jitterThreshold??3,heartbeatIntervalMs:overrides?.heartbeatIntervalMs??60000,orphanCheckIntervalMs:overrides?.orphanCheckIntervalMs??300000,deadHeartbeatThreshold:overrides?.deadHeartbeatThreshold??2,leaseRecoveryIntervalMs:overrides?.leaseRecoveryIntervalMs??60000}}async function claimDueTriggers(deps,config,daemonId){let sql=await deps.getConnection(),now=deps.now(),leaseUntil=new Date(now.getTime()+300000),runningCount=(await sql`
|
|
1885
1914
|
SELECT count(*)::int AS cnt FROM runs
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "genie",
|
|
3
|
-
"version": "4.260421.
|
|
3
|
+
"version": "4.260421.14",
|
|
4
4
|
"description": "Human-AI partnership for Claude Code. Share a terminal, orchestrate workers, evolve together. Brainstorm ideas, turn them into wishes, execute with /work, validate with /review, and ship as one team.",
|
|
5
5
|
"author": {
|
|
6
6
|
"name": "Namastex Labs"
|