@stackmemoryai/stackmemory 0.5.64 → 0.5.67
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +69 -346
- package/bin/claude-sm +1 -1
- package/bin/claude-smd +1 -1
- package/bin/codex-sm +6 -0
- package/bin/codex-smd +1 -1
- package/bin/opencode-sm +1 -1
- package/dist/src/cli/claude-sm.js +162 -25
- package/dist/src/cli/claude-sm.js.map +2 -2
- package/dist/src/cli/commands/ping.js +14 -0
- package/dist/src/cli/commands/ping.js.map +7 -0
- package/dist/src/cli/commands/ralph.js +103 -1
- package/dist/src/cli/commands/ralph.js.map +2 -2
- package/dist/src/cli/commands/retrieval.js +1 -1
- package/dist/src/cli/commands/retrieval.js.map +2 -2
- package/dist/src/cli/commands/skills.js +300 -1
- package/dist/src/cli/commands/skills.js.map +2 -2
- package/dist/src/cli/index.js +362 -20
- package/dist/src/cli/index.js.map +2 -2
- package/dist/src/core/digest/types.js +1 -1
- package/dist/src/core/digest/types.js.map +1 -1
- package/dist/src/core/extensions/provider-adapter.js +2 -5
- package/dist/src/core/extensions/provider-adapter.js.map +2 -2
- package/dist/src/core/retrieval/llm-provider.js +2 -2
- package/dist/src/core/retrieval/llm-provider.js.map +1 -1
- package/dist/src/core/retrieval/types.js +1 -1
- package/dist/src/core/retrieval/types.js.map +1 -1
- package/dist/src/features/sweep/pty-wrapper.js +15 -5
- package/dist/src/features/sweep/pty-wrapper.js.map +2 -2
- package/dist/src/features/workers/tmux-manager.js +71 -0
- package/dist/src/features/workers/tmux-manager.js.map +7 -0
- package/dist/src/features/workers/worker-registry.js +52 -0
- package/dist/src/features/workers/worker-registry.js.map +7 -0
- package/dist/src/integrations/linear/webhook-handler.js +82 -0
- package/dist/src/integrations/linear/webhook-handler.js.map +2 -2
- package/dist/src/integrations/mcp/pending-utils.js +33 -0
- package/dist/src/integrations/mcp/pending-utils.js.map +7 -0
- package/dist/src/integrations/mcp/server.js +571 -1
- package/dist/src/integrations/mcp/server.js.map +2 -2
- package/dist/src/integrations/ralph/patterns/oracle-worker-pattern.js +2 -2
- package/dist/src/integrations/ralph/patterns/oracle-worker-pattern.js.map +2 -2
- package/dist/src/orchestrators/multimodal/constants.js +17 -0
- package/dist/src/orchestrators/multimodal/constants.js.map +7 -0
- package/dist/src/orchestrators/multimodal/harness.js +292 -0
- package/dist/src/orchestrators/multimodal/harness.js.map +7 -0
- package/dist/src/orchestrators/multimodal/providers.js +98 -0
- package/dist/src/orchestrators/multimodal/providers.js.map +7 -0
- package/dist/src/orchestrators/multimodal/types.js +5 -0
- package/dist/src/orchestrators/multimodal/types.js.map +7 -0
- package/dist/src/orchestrators/multimodal/utils.js +25 -0
- package/dist/src/orchestrators/multimodal/utils.js.map +7 -0
- package/dist/src/skills/claude-skills.js +116 -1
- package/dist/src/skills/claude-skills.js.map +2 -2
- package/dist/src/skills/linear-task-runner.js +262 -0
- package/dist/src/skills/linear-task-runner.js.map +7 -0
- package/dist/src/skills/recursive-agent-orchestrator.js +114 -85
- package/dist/src/skills/recursive-agent-orchestrator.js.map +2 -2
- package/dist/src/skills/spec-generator-skill.js +441 -0
- package/dist/src/skills/spec-generator-skill.js.map +7 -0
- package/package.json +14 -9
- package/scripts/claude-code-wrapper.sh +18 -30
- package/scripts/demos/ralph-integration-demo.ts +14 -13
- package/scripts/demos/trace-demo.ts +7 -21
- package/scripts/demos/trace-test.ts +20 -8
- package/scripts/install-claude-hooks.sh +2 -2
- package/scripts/verify-dist.cjs +83 -0
- package/templates/claude-hooks/post-edit-sweep.js +7 -10
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../../src/core/retrieval/llm-provider.ts"],
|
|
4
|
-
"sourcesContent": ["/**\n * LLM Provider Implementation for Context Retrieval\n * Provides real Anthropic API integration for intelligent context analysis\n */\n\nimport Anthropic from '@anthropic-ai/sdk';\nimport { logger } from '../monitoring/logger.js';\n\n/**\n * LLM provider interface for context analysis\n */\nexport interface LLMProvider {\n analyze(prompt: string, maxTokens: number): Promise<string>;\n}\n\n/**\n * Configuration for Anthropic LLM provider\n */\nexport interface AnthropicProviderConfig {\n apiKey: string;\n model?: string;\n temperature?: number;\n maxRetries?: number;\n timeout?: number;\n}\n\n/**\n * Real Anthropic LLM provider using the official SDK\n */\nexport class AnthropicLLMProvider implements LLMProvider {\n private client: Anthropic;\n private model: string;\n private temperature: number;\n private maxRetries: number;\n private timeout: number;\n\n constructor(config: AnthropicProviderConfig) {\n this.client = new Anthropic({\n apiKey: config.apiKey,\n });\n this.model = config.model || 'claude-3-haiku-
|
|
4
|
+
"sourcesContent": ["/**\n * LLM Provider Implementation for Context Retrieval\n * Provides real Anthropic API integration for intelligent context analysis\n */\n\nimport Anthropic from '@anthropic-ai/sdk';\nimport { logger } from '../monitoring/logger.js';\n\n/**\n * LLM provider interface for context analysis\n */\nexport interface LLMProvider {\n analyze(prompt: string, maxTokens: number): Promise<string>;\n}\n\n/**\n * Configuration for Anthropic LLM provider\n */\nexport interface AnthropicProviderConfig {\n apiKey: string;\n model?: string;\n temperature?: number;\n maxRetries?: number;\n timeout?: number;\n}\n\n/**\n * Real Anthropic LLM provider using the official SDK\n */\nexport class AnthropicLLMProvider implements LLMProvider {\n private client: Anthropic;\n private model: string;\n private temperature: number;\n private maxRetries: number;\n private timeout: number;\n\n constructor(config: AnthropicProviderConfig) {\n this.client = new Anthropic({\n apiKey: config.apiKey,\n });\n this.model = config.model || 'claude-3-5-haiku-20241022';\n this.temperature = config.temperature ?? 0.3;\n this.maxRetries = config.maxRetries ?? 2;\n this.timeout = config.timeout ?? 30000;\n\n logger.info('AnthropicLLMProvider initialized', {\n model: this.model,\n temperature: this.temperature,\n });\n }\n\n /**\n * Analyze a prompt using the Anthropic API\n */\n async analyze(prompt: string, maxTokens: number): Promise<string> {\n const startTime = Date.now();\n let lastError: Error | null = null;\n\n for (let attempt = 0; attempt <= this.maxRetries; attempt++) {\n try {\n const response = await this.makeRequest(prompt, maxTokens);\n\n logger.debug('LLM analysis completed', {\n model: this.model,\n promptLength: prompt.length,\n responseLength: response.length,\n durationMs: Date.now() - startTime,\n attempt,\n });\n\n return response;\n } catch (error) {\n lastError = error instanceof Error ? error : new Error(String(error));\n\n // Check if retryable\n if (this.isRetryableError(error) && attempt < this.maxRetries) {\n const backoffMs = Math.pow(2, attempt) * 1000;\n logger.warn('LLM request failed, retrying', {\n attempt,\n backoffMs,\n error: lastError.message,\n });\n await this.sleep(backoffMs);\n continue;\n }\n\n break;\n }\n }\n\n logger.error('LLM analysis failed after retries', lastError!);\n throw lastError;\n }\n\n /**\n * Make the actual API request\n */\n private async makeRequest(\n prompt: string,\n maxTokens: number\n ): Promise<string> {\n const controller = new AbortController();\n const timeoutId = setTimeout(() => controller.abort(), this.timeout);\n\n try {\n const response = await this.client.messages.create({\n model: this.model,\n max_tokens: maxTokens,\n temperature: this.temperature,\n messages: [\n {\n role: 'user',\n content: prompt,\n },\n ],\n });\n\n // Extract text from response\n const textContent = response.content.find((c) => c.type === 'text');\n if (!textContent || textContent.type !== 'text') {\n throw new Error('No text content in response');\n }\n\n return textContent.text;\n } finally {\n clearTimeout(timeoutId);\n }\n }\n\n /**\n * Check if an error is retryable\n */\n private isRetryableError(error: unknown): boolean {\n if (error instanceof Anthropic.RateLimitError) {\n return true;\n }\n if (error instanceof Anthropic.APIConnectionError) {\n return true;\n }\n if (error instanceof Anthropic.InternalServerError) {\n return true;\n }\n // Timeout errors are retryable\n if (error instanceof Error && error.name === 'AbortError') {\n return true;\n }\n return false;\n }\n\n private sleep(ms: number): Promise<void> {\n return new Promise((resolve) => setTimeout(resolve, ms));\n }\n}\n\n/**\n * Local fallback LLM provider - uses heuristic summarization without external APIs\n * This ensures StackMemory works in LOCAL_ONLY mode\n */\nexport class LocalFallbackProvider implements LLMProvider {\n async analyze(prompt: string, maxTokens: number): Promise<string> {\n // Extract content from prompt and create a heuristic summary\n const lines = prompt.split('\\n').filter((l) => l.trim());\n const contentStart = lines.findIndex((l) => l.includes('Content:'));\n\n if (contentStart === -1 || lines.length < 3) {\n return 'Context summary not available (local mode)';\n }\n\n // Extract key information heuristically\n const content = lines.slice(contentStart + 1).join('\\n');\n const sentences = content\n .split(/[.!?]+/)\n .filter((s) => s.trim().length > 10);\n\n // Take first few sentences up to maxTokens (rough approximation: 4 chars = 1 token)\n const maxChars = maxTokens * 4;\n let summary = '';\n for (const sentence of sentences.slice(0, 5)) {\n if (summary.length + sentence.length > maxChars) break;\n summary += sentence.trim() + '. ';\n }\n\n return (\n summary.trim() || 'Context available (use LLM API for detailed analysis)'\n );\n }\n}\n\n/**\n * Factory function to create an LLM provider based on environment\n */\nexport function createLLMProvider(): LLMProvider | undefined {\n // Check for local-only mode\n if (\n process.env['STACKMEMORY_LOCAL'] === 'true' ||\n process.env['LOCAL_ONLY'] === 'true'\n ) {\n logger.info('LOCAL mode - using heuristic summarization');\n return new LocalFallbackProvider();\n }\n\n const apiKey = process.env['ANTHROPIC_API_KEY'];\n\n if (!apiKey) {\n logger.info(\n 'No ANTHROPIC_API_KEY found, LLM retrieval will use heuristics'\n );\n return new LocalFallbackProvider();\n }\n\n return new AnthropicLLMProvider({\n apiKey,\n model: process.env['ANTHROPIC_MODEL'] || 'claude-3-5-haiku-20241022',\n temperature: parseFloat(process.env['ANTHROPIC_TEMPERATURE'] || '0.3'),\n });\n}\n"],
|
|
5
5
|
"mappings": ";;;;AAKA,OAAO,eAAe;AACtB,SAAS,cAAc;AAuBhB,MAAM,qBAA4C;AAAA,EAC/C;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAER,YAAY,QAAiC;AAC3C,SAAK,SAAS,IAAI,UAAU;AAAA,MAC1B,QAAQ,OAAO;AAAA,IACjB,CAAC;AACD,SAAK,QAAQ,OAAO,SAAS;AAC7B,SAAK,cAAc,OAAO,eAAe;AACzC,SAAK,aAAa,OAAO,cAAc;AACvC,SAAK,UAAU,OAAO,WAAW;AAEjC,WAAO,KAAK,oCAAoC;AAAA,MAC9C,OAAO,KAAK;AAAA,MACZ,aAAa,KAAK;AAAA,IACpB,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,QAAQ,QAAgB,WAAoC;AAChE,UAAM,YAAY,KAAK,IAAI;AAC3B,QAAI,YAA0B;AAE9B,aAAS,UAAU,GAAG,WAAW,KAAK,YAAY,WAAW;AAC3D,UAAI;AACF,cAAM,WAAW,MAAM,KAAK,YAAY,QAAQ,SAAS;AAEzD,eAAO,MAAM,0BAA0B;AAAA,UACrC,OAAO,KAAK;AAAA,UACZ,cAAc,OAAO;AAAA,UACrB,gBAAgB,SAAS;AAAA,UACzB,YAAY,KAAK,IAAI,IAAI;AAAA,UACzB;AAAA,QACF,CAAC;AAED,eAAO;AAAA,MACT,SAAS,OAAO;AACd,oBAAY,iBAAiB,QAAQ,QAAQ,IAAI,MAAM,OAAO,KAAK,CAAC;AAGpE,YAAI,KAAK,iBAAiB,KAAK,KAAK,UAAU,KAAK,YAAY;AAC7D,gBAAM,YAAY,KAAK,IAAI,GAAG,OAAO,IAAI;AACzC,iBAAO,KAAK,gCAAgC;AAAA,YAC1C;AAAA,YACA;AAAA,YACA,OAAO,UAAU;AAAA,UACnB,CAAC;AACD,gBAAM,KAAK,MAAM,SAAS;AAC1B;AAAA,QACF;AAEA;AAAA,MACF;AAAA,IACF;AAEA,WAAO,MAAM,qCAAqC,SAAU;AAC5D,UAAM;AAAA,EACR;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,YACZ,QACA,WACiB;AACjB,UAAM,aAAa,IAAI,gBAAgB;AACvC,UAAM,YAAY,WAAW,MAAM,WAAW,MAAM,GAAG,KAAK,OAAO;AAEnE,QAAI;AACF,YAAM,WAAW,MAAM,KAAK,OAAO,SAAS,OAAO;AAAA,QACjD,OAAO,KAAK;AAAA,QACZ,YAAY;AAAA,QACZ,aAAa,KAAK;AAAA,QAClB,UAAU;AAAA,UACR;AAAA,YACE,MAAM;AAAA,YACN,SAAS;AAAA,UACX;AAAA,QACF;AAAA,MACF,CAAC;AAGD,YAAM,cAAc,SAAS,QAAQ,KAAK,CAAC,MAAM,EAAE,SAAS,MAAM;AAClE,UAAI,CAAC,eAAe,YAAY,SAAS,QAAQ;AAC/C,cAAM,IAAI,MAAM,6BAA6B;AAAA,MAC/C;AAEA,aAAO,YAAY;AAAA,IACrB,UAAE;AACA,mBAAa,SAAS;AAAA,IACxB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,iBAAiB,OAAyB;AAChD,QAAI,iBAAiB,UAAU,gBAAgB;AAC7C,aAAO;AAAA,IACT;AACA,QAAI,iBAAiB,UAAU,oBAAoB;AACjD,aAAO;AAAA,IACT;AACA,QAAI,iBAAiB,UAAU,qBAAqB;AAClD,aAAO;AAAA,IACT;AAEA,QAAI,iBAAiB,SAAS,MAAM,SAAS,cAAc;AACzD,aAAO;AAAA,IACT;AACA,WAAO;AAAA,EACT;AAAA,EAEQ,MAAM,IAA2B;AACvC,WAAO,IAAI,QAAQ,CAAC,YAAY,WAAW,SAAS,EAAE,CAAC;AAAA,EACzD;AACF;AAMO,MAAM,sBAA6C;AAAA,EACxD,MAAM,QAAQ,QAAgB,WAAoC;AAEhE,UAAM,QAAQ,OAAO,MAAM,IAAI,EAAE,OAAO,CAAC,MAAM,EAAE,KAAK,CAAC;AACvD,UAAM,eAAe,MAAM,UAAU,CAAC,MAAM,EAAE,SAAS,UAAU,CAAC;AAElE,QAAI,iBAAiB,MAAM,MAAM,SAAS,GAAG;AAC3C,aAAO;AAAA,IACT;AAGA,UAAM,UAAU,MAAM,MAAM,eAAe,CAAC,EAAE,KAAK,IAAI;AACvD,UAAM,YAAY,QACf,MAAM,QAAQ,EACd,OAAO,CAAC,MAAM,EAAE,KAAK,EAAE,SAAS,EAAE;AAGrC,UAAM,WAAW,YAAY;AAC7B,QAAI,UAAU;AACd,eAAW,YAAY,UAAU,MAAM,GAAG,CAAC,GAAG;AAC5C,UAAI,QAAQ,SAAS,SAAS,SAAS,SAAU;AACjD,iBAAW,SAAS,KAAK,IAAI;AAAA,IAC/B;AAEA,WACE,QAAQ,KAAK,KAAK;AAAA,EAEtB;AACF;AAKO,SAAS,oBAA6C;AAE3D,MACE,QAAQ,IAAI,mBAAmB,MAAM,UACrC,QAAQ,IAAI,YAAY,MAAM,QAC9B;AACA,WAAO,KAAK,4CAA4C;AACxD,WAAO,IAAI,sBAAsB;AAAA,EACnC;AAEA,QAAM,SAAS,QAAQ,IAAI,mBAAmB;AAE9C,MAAI,CAAC,QAAQ;AACX,WAAO;AAAA,MACL;AAAA,IACF;AACA,WAAO,IAAI,sBAAsB;AAAA,EACnC;AAEA,SAAO,IAAI,qBAAqB;AAAA,IAC9B;AAAA,IACA,OAAO,QAAQ,IAAI,iBAAiB,KAAK;AAAA,IACzC,aAAa,WAAW,QAAQ,IAAI,uBAAuB,KAAK,KAAK;AAAA,EACvE,CAAC;AACH;",
|
|
6
6
|
"names": []
|
|
7
7
|
}
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../../src/core/retrieval/types.ts"],
|
|
4
|
-
"sourcesContent": ["/**\n * Types for LLM-Driven Context Retrieval System\n * Implements intelligent context selection based on compressed summaries\n */\n\nimport { Frame, Anchor, Event } from '../context/index.js';\nimport { StackMemoryQuery } from '../query/query-parser.js';\n\n/**\n * Compressed summary of recent session activity\n */\nexport interface RecentSessionSummary {\n /** Recent frames with their key attributes */\n frames: FrameSummary[];\n /** Dominant operations performed */\n dominantOperations: OperationSummary[];\n /** Files that were touched */\n filesTouched: FileSummary[];\n /** Errors encountered */\n errorsEncountered: ErrorSummary[];\n /** Time range covered */\n timeRange: {\n start: number;\n end: number;\n durationMs: number;\n };\n}\n\nexport interface FrameSummary {\n frameId: string;\n name: string;\n type: string;\n depth: number;\n eventCount: number;\n anchorCount: number;\n score: number;\n createdAt: number;\n closedAt?: number;\n digestPreview?: string;\n}\n\nexport interface OperationSummary {\n operation: string;\n count: number;\n lastOccurrence: number;\n successRate: number;\n}\n\nexport interface FileSummary {\n path: string;\n operationCount: number;\n lastModified: number;\n operations: string[];\n}\n\nexport interface ErrorSummary {\n errorType: string;\n message: string;\n count: number;\n lastOccurrence: number;\n resolved: boolean;\n}\n\n/**\n * Historical patterns extracted from memory\n */\nexport interface HistoricalPatterns {\n /** Frame counts by topic */\n topicFrameCounts: Record<string, number>;\n /** Key decisions made */\n keyDecisions: DecisionSummary[];\n /** Recurring issues */\n recurringIssues: IssueSummary[];\n /** Common tool sequences */\n commonToolSequences: ToolSequence[];\n /** Time-based activity patterns */\n activityPatterns: ActivityPattern[];\n}\n\nexport interface DecisionSummary {\n id: string;\n text: string;\n frameId: string;\n timestamp: number;\n impact: 'low' | 'medium' | 'high';\n relatedFiles?: string[];\n}\n\nexport interface IssueSummary {\n issueType: string;\n occurrenceCount: number;\n lastSeen: number;\n resolutionRate: number;\n commonFixes?: string[];\n}\n\nexport interface ToolSequence {\n pattern: string;\n frequency: number;\n avgDuration: number;\n successRate: number;\n}\n\nexport interface ActivityPattern {\n periodType: 'hourly' | 'daily' | 'weekly';\n peakPeriods: string[];\n avgEventsPerPeriod: number;\n}\n\n/**\n * Queryable indices for fast retrieval\n */\nexport interface QueryableIndices {\n /** Index by error type */\n byErrorType: Record<string, string[]>; // errorType -> frameIds\n /** Index by timeframe */\n byTimeframe: Record<string, string[]>; // timeKey -> frameIds\n /** Index by contributor */\n byContributor: Record<string, string[]>; // userId -> frameIds\n /** Index by topic */\n byTopic: Record<string, string[]>; // topic -> frameIds\n /** Index by file */\n byFile: Record<string, string[]>; // filePath -> frameIds\n}\n\n/**\n * Complete compressed summary for LLM analysis\n */\nexport interface CompressedSummary {\n /** Project identifier */\n projectId: string;\n /** Generation timestamp */\n generatedAt: number;\n /** Recent session summary */\n recentSession: RecentSessionSummary;\n /** Historical patterns */\n historicalPatterns: HistoricalPatterns;\n /** Queryable indices */\n queryableIndices: QueryableIndices;\n /** Summary statistics */\n stats: SummaryStats;\n}\n\nexport interface SummaryStats {\n totalFrames: number;\n totalEvents: number;\n totalAnchors: number;\n totalDecisions: number;\n oldestFrame: number;\n newestFrame: number;\n avgFrameDepth: number;\n avgEventsPerFrame: number;\n}\n\n/**\n * LLM analysis request\n */\nexport interface LLMAnalysisRequest {\n /** Current user query */\n currentQuery: string;\n /** Parsed structured query */\n parsedQuery?: StackMemoryQuery;\n /** Compressed summary */\n compressedSummary: CompressedSummary;\n /** Token budget for context */\n tokenBudget: number;\n /** Optional hints for retrieval */\n hints?: RetrievalHints;\n}\n\nexport interface RetrievalHints {\n /** Prefer recent frames */\n preferRecent?: boolean;\n /** Focus on specific topics */\n focusTopics?: string[];\n /** Include error context */\n includeErrors?: boolean;\n /** Include decision history */\n includeDecisions?: boolean;\n /** Minimum relevance score */\n minRelevance?: number;\n}\n\n/**\n * LLM analysis response\n */\nexport interface LLMAnalysisResponse {\n /** Reasoning for the retrieval decision (auditable) */\n reasoning: string;\n /** Frames to retrieve with priority order */\n framesToRetrieve: FrameRetrievalPlan[];\n /** Confidence score (0.0 - 1.0) */\n confidenceScore: number;\n /** Additional context recommendations */\n recommendations: ContextRecommendation[];\n /** Analysis metadata */\n metadata: AnalysisMetadata;\n}\n\nexport interface FrameRetrievalPlan {\n frameId: string;\n priority: number; // 1-10, higher = more important\n reason: string;\n includeEvents: boolean;\n includeAnchors: boolean;\n includeDigest: boolean;\n estimatedTokens: number;\n}\n\nexport interface ContextRecommendation {\n type: 'include' | 'exclude' | 'summarize';\n target: string; // frameId, anchorId, or description\n reason: string;\n impact: 'low' | 'medium' | 'high';\n}\n\nexport interface AnalysisMetadata {\n analysisTimeMs: number;\n summaryTokens: number;\n queryComplexity: 'simple' | 'moderate' | 'complex';\n matchedPatterns: string[];\n fallbackUsed: boolean;\n}\n\n/**\n * Retrieved context result\n */\nexport interface RetrievedContext {\n /** Assembled context string */\n context: string;\n /** Frames included */\n frames: Frame[];\n /** Anchors included */\n anchors: Anchor[];\n /** Events included */\n events: Event[];\n /** LLM analysis that drove retrieval */\n analysis: LLMAnalysisResponse;\n /** Token usage */\n tokenUsage: {\n budget: number;\n used: number;\n remaining: number;\n };\n /** Retrieval metadata */\n metadata: RetrievalMetadata;\n}\n\nexport interface RetrievalMetadata {\n retrievalTimeMs: number;\n cacheHit: boolean;\n framesScanned: number;\n framesIncluded: number;\n compressionRatio: number;\n}\n\n/**\n * Configuration for the retrieval system\n */\nexport interface RetrievalConfig {\n /** Maximum frames to include in summary */\n maxSummaryFrames: number;\n /** Default token budget */\n defaultTokenBudget: number;\n /** Cache TTL in seconds */\n cacheTtlSeconds: number;\n /** Minimum confidence to use LLM suggestions */\n minConfidenceThreshold: number;\n /** Enable fallback to heuristic retrieval */\n enableFallback: boolean;\n /** LLM provider configuration */\n llmConfig: {\n provider: 'anthropic' | 'openai' | 'local';\n model: string;\n maxTokens: number;\n temperature: number;\n };\n}\n\nexport const DEFAULT_RETRIEVAL_CONFIG: RetrievalConfig = {\n maxSummaryFrames: 15,\n defaultTokenBudget: 8000,\n cacheTtlSeconds: 300,\n minConfidenceThreshold: 0.6,\n enableFallback: true,\n llmConfig: {\n provider: 'anthropic',\n model: 'claude-3-haiku-
|
|
4
|
+
"sourcesContent": ["/**\n * Types for LLM-Driven Context Retrieval System\n * Implements intelligent context selection based on compressed summaries\n */\n\nimport { Frame, Anchor, Event } from '../context/index.js';\nimport { StackMemoryQuery } from '../query/query-parser.js';\n\n/**\n * Compressed summary of recent session activity\n */\nexport interface RecentSessionSummary {\n /** Recent frames with their key attributes */\n frames: FrameSummary[];\n /** Dominant operations performed */\n dominantOperations: OperationSummary[];\n /** Files that were touched */\n filesTouched: FileSummary[];\n /** Errors encountered */\n errorsEncountered: ErrorSummary[];\n /** Time range covered */\n timeRange: {\n start: number;\n end: number;\n durationMs: number;\n };\n}\n\nexport interface FrameSummary {\n frameId: string;\n name: string;\n type: string;\n depth: number;\n eventCount: number;\n anchorCount: number;\n score: number;\n createdAt: number;\n closedAt?: number;\n digestPreview?: string;\n}\n\nexport interface OperationSummary {\n operation: string;\n count: number;\n lastOccurrence: number;\n successRate: number;\n}\n\nexport interface FileSummary {\n path: string;\n operationCount: number;\n lastModified: number;\n operations: string[];\n}\n\nexport interface ErrorSummary {\n errorType: string;\n message: string;\n count: number;\n lastOccurrence: number;\n resolved: boolean;\n}\n\n/**\n * Historical patterns extracted from memory\n */\nexport interface HistoricalPatterns {\n /** Frame counts by topic */\n topicFrameCounts: Record<string, number>;\n /** Key decisions made */\n keyDecisions: DecisionSummary[];\n /** Recurring issues */\n recurringIssues: IssueSummary[];\n /** Common tool sequences */\n commonToolSequences: ToolSequence[];\n /** Time-based activity patterns */\n activityPatterns: ActivityPattern[];\n}\n\nexport interface DecisionSummary {\n id: string;\n text: string;\n frameId: string;\n timestamp: number;\n impact: 'low' | 'medium' | 'high';\n relatedFiles?: string[];\n}\n\nexport interface IssueSummary {\n issueType: string;\n occurrenceCount: number;\n lastSeen: number;\n resolutionRate: number;\n commonFixes?: string[];\n}\n\nexport interface ToolSequence {\n pattern: string;\n frequency: number;\n avgDuration: number;\n successRate: number;\n}\n\nexport interface ActivityPattern {\n periodType: 'hourly' | 'daily' | 'weekly';\n peakPeriods: string[];\n avgEventsPerPeriod: number;\n}\n\n/**\n * Queryable indices for fast retrieval\n */\nexport interface QueryableIndices {\n /** Index by error type */\n byErrorType: Record<string, string[]>; // errorType -> frameIds\n /** Index by timeframe */\n byTimeframe: Record<string, string[]>; // timeKey -> frameIds\n /** Index by contributor */\n byContributor: Record<string, string[]>; // userId -> frameIds\n /** Index by topic */\n byTopic: Record<string, string[]>; // topic -> frameIds\n /** Index by file */\n byFile: Record<string, string[]>; // filePath -> frameIds\n}\n\n/**\n * Complete compressed summary for LLM analysis\n */\nexport interface CompressedSummary {\n /** Project identifier */\n projectId: string;\n /** Generation timestamp */\n generatedAt: number;\n /** Recent session summary */\n recentSession: RecentSessionSummary;\n /** Historical patterns */\n historicalPatterns: HistoricalPatterns;\n /** Queryable indices */\n queryableIndices: QueryableIndices;\n /** Summary statistics */\n stats: SummaryStats;\n}\n\nexport interface SummaryStats {\n totalFrames: number;\n totalEvents: number;\n totalAnchors: number;\n totalDecisions: number;\n oldestFrame: number;\n newestFrame: number;\n avgFrameDepth: number;\n avgEventsPerFrame: number;\n}\n\n/**\n * LLM analysis request\n */\nexport interface LLMAnalysisRequest {\n /** Current user query */\n currentQuery: string;\n /** Parsed structured query */\n parsedQuery?: StackMemoryQuery;\n /** Compressed summary */\n compressedSummary: CompressedSummary;\n /** Token budget for context */\n tokenBudget: number;\n /** Optional hints for retrieval */\n hints?: RetrievalHints;\n}\n\nexport interface RetrievalHints {\n /** Prefer recent frames */\n preferRecent?: boolean;\n /** Focus on specific topics */\n focusTopics?: string[];\n /** Include error context */\n includeErrors?: boolean;\n /** Include decision history */\n includeDecisions?: boolean;\n /** Minimum relevance score */\n minRelevance?: number;\n}\n\n/**\n * LLM analysis response\n */\nexport interface LLMAnalysisResponse {\n /** Reasoning for the retrieval decision (auditable) */\n reasoning: string;\n /** Frames to retrieve with priority order */\n framesToRetrieve: FrameRetrievalPlan[];\n /** Confidence score (0.0 - 1.0) */\n confidenceScore: number;\n /** Additional context recommendations */\n recommendations: ContextRecommendation[];\n /** Analysis metadata */\n metadata: AnalysisMetadata;\n}\n\nexport interface FrameRetrievalPlan {\n frameId: string;\n priority: number; // 1-10, higher = more important\n reason: string;\n includeEvents: boolean;\n includeAnchors: boolean;\n includeDigest: boolean;\n estimatedTokens: number;\n}\n\nexport interface ContextRecommendation {\n type: 'include' | 'exclude' | 'summarize';\n target: string; // frameId, anchorId, or description\n reason: string;\n impact: 'low' | 'medium' | 'high';\n}\n\nexport interface AnalysisMetadata {\n analysisTimeMs: number;\n summaryTokens: number;\n queryComplexity: 'simple' | 'moderate' | 'complex';\n matchedPatterns: string[];\n fallbackUsed: boolean;\n}\n\n/**\n * Retrieved context result\n */\nexport interface RetrievedContext {\n /** Assembled context string */\n context: string;\n /** Frames included */\n frames: Frame[];\n /** Anchors included */\n anchors: Anchor[];\n /** Events included */\n events: Event[];\n /** LLM analysis that drove retrieval */\n analysis: LLMAnalysisResponse;\n /** Token usage */\n tokenUsage: {\n budget: number;\n used: number;\n remaining: number;\n };\n /** Retrieval metadata */\n metadata: RetrievalMetadata;\n}\n\nexport interface RetrievalMetadata {\n retrievalTimeMs: number;\n cacheHit: boolean;\n framesScanned: number;\n framesIncluded: number;\n compressionRatio: number;\n}\n\n/**\n * Configuration for the retrieval system\n */\nexport interface RetrievalConfig {\n /** Maximum frames to include in summary */\n maxSummaryFrames: number;\n /** Default token budget */\n defaultTokenBudget: number;\n /** Cache TTL in seconds */\n cacheTtlSeconds: number;\n /** Minimum confidence to use LLM suggestions */\n minConfidenceThreshold: number;\n /** Enable fallback to heuristic retrieval */\n enableFallback: boolean;\n /** LLM provider configuration */\n llmConfig: {\n provider: 'anthropic' | 'openai' | 'local';\n model: string;\n maxTokens: number;\n temperature: number;\n };\n}\n\nexport const DEFAULT_RETRIEVAL_CONFIG: RetrievalConfig = {\n maxSummaryFrames: 15,\n defaultTokenBudget: 8000,\n cacheTtlSeconds: 300,\n minConfidenceThreshold: 0.6,\n enableFallback: true,\n llmConfig: {\n provider: 'anthropic',\n model: 'claude-3-5-haiku-20241022',\n maxTokens: 1024,\n temperature: 0.3,\n },\n};\n"],
|
|
5
5
|
"mappings": ";;;;AAuRO,MAAM,2BAA4C;AAAA,EACvD,kBAAkB;AAAA,EAClB,oBAAoB;AAAA,EACpB,iBAAiB;AAAA,EACjB,wBAAwB;AAAA,EACxB,gBAAgB;AAAA,EAChB,WAAW;AAAA,IACT,UAAU;AAAA,IACV,OAAO;AAAA,IACP,WAAW;AAAA,IACX,aAAa;AAAA,EACf;AACF;",
|
|
6
6
|
"names": []
|
|
7
7
|
}
|
|
@@ -9,7 +9,12 @@ import { SweepStateWatcher } from "./state-watcher.js";
|
|
|
9
9
|
import { StatusBar } from "./status-bar.js";
|
|
10
10
|
import { TabInterceptor } from "./tab-interceptor.js";
|
|
11
11
|
const HOME = process.env["HOME"] || "/tmp";
|
|
12
|
-
|
|
12
|
+
function getSweepDir() {
|
|
13
|
+
return process.env["SWEEP_STATE_DIR"] || join(HOME, ".stackmemory");
|
|
14
|
+
}
|
|
15
|
+
function getSweepPath(filename) {
|
|
16
|
+
return join(getSweepDir(), filename);
|
|
17
|
+
}
|
|
13
18
|
const ALT_SCREEN_ENTER = "\x1B[?1049h";
|
|
14
19
|
const ALT_SCREEN_EXIT = "\x1B[?1049l";
|
|
15
20
|
class PtyWrapper {
|
|
@@ -24,7 +29,7 @@ class PtyWrapper {
|
|
|
24
29
|
this.config = {
|
|
25
30
|
claudeBin: config.claudeBin || this.findClaude(),
|
|
26
31
|
claudeArgs: config.claudeArgs || [],
|
|
27
|
-
stateFile: config.stateFile ||
|
|
32
|
+
stateFile: config.stateFile || getSweepPath("sweep-state.json")
|
|
28
33
|
};
|
|
29
34
|
this.stateWatcher = new SweepStateWatcher(this.config.stateFile);
|
|
30
35
|
this.statusBar = new StatusBar();
|
|
@@ -35,6 +40,10 @@ class PtyWrapper {
|
|
|
35
40
|
});
|
|
36
41
|
}
|
|
37
42
|
async start() {
|
|
43
|
+
const sweepDir = getSweepDir();
|
|
44
|
+
if (!existsSync(sweepDir)) {
|
|
45
|
+
mkdirSync(sweepDir, { recursive: true });
|
|
46
|
+
}
|
|
38
47
|
let pty;
|
|
39
48
|
try {
|
|
40
49
|
pty = await import("node-pty");
|
|
@@ -109,12 +118,13 @@ class PtyWrapper {
|
|
|
109
118
|
}
|
|
110
119
|
acceptPrediction() {
|
|
111
120
|
if (!this.currentPrediction || !this.ptyProcess) return;
|
|
112
|
-
const dir =
|
|
121
|
+
const dir = getSweepDir();
|
|
113
122
|
if (!existsSync(dir)) {
|
|
114
123
|
mkdirSync(dir, { recursive: true });
|
|
115
124
|
}
|
|
125
|
+
const pendingFile = getSweepPath("sweep-pending.json");
|
|
116
126
|
writeFileSync(
|
|
117
|
-
|
|
127
|
+
pendingFile,
|
|
118
128
|
JSON.stringify(
|
|
119
129
|
{
|
|
120
130
|
file_path: this.currentPrediction.file_path,
|
|
@@ -125,7 +135,7 @@ class PtyWrapper {
|
|
|
125
135
|
2
|
|
126
136
|
)
|
|
127
137
|
);
|
|
128
|
-
const prompt = `Apply the Sweep prediction from ${
|
|
138
|
+
const prompt = `Apply the Sweep prediction from ${pendingFile}
|
|
129
139
|
`;
|
|
130
140
|
this.ptyProcess.write(prompt);
|
|
131
141
|
this.dismissPrediction();
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../../src/features/sweep/pty-wrapper.ts"],
|
|
4
|
-
"sourcesContent": ["/**\n * Sweep PTY Wrapper\n *\n * Wraps Claude Code in a pseudo-terminal to add a Sweep prediction\n * status bar at the bottom of the terminal. Predictions from the\n * PostToolUse hook are displayed via the status bar. Tab to accept,\n * Esc to dismiss.\n */\n\nimport { join } from 'path';\nimport { writeFileSync, existsSync, mkdirSync } from 'fs';\nimport { execSync } from 'child_process';\nimport { SweepStateWatcher, type PredictionEvent } from './state-watcher.js';\nimport { StatusBar } from './status-bar.js';\nimport { TabInterceptor } from './tab-interceptor.js';\n\nconst HOME = process.env['HOME'] || '/tmp';\
|
|
5
|
-
"mappings": ";;;;AASA,SAAS,YAAY;AACrB,SAAS,eAAe,YAAY,iBAAiB;AACrD,SAAS,gBAAgB;AACzB,SAAS,yBAA+C;AACxD,SAAS,iBAAiB;AAC1B,SAAS,sBAAsB;AAE/B,MAAM,OAAO,QAAQ,IAAI,MAAM,KAAK;
|
|
4
|
+
"sourcesContent": ["/**\n * Sweep PTY Wrapper\n *\n * Wraps Claude Code in a pseudo-terminal to add a Sweep prediction\n * status bar at the bottom of the terminal. Predictions from the\n * PostToolUse hook are displayed via the status bar. Tab to accept,\n * Esc to dismiss.\n */\n\nimport { join } from 'path';\nimport { writeFileSync, existsSync, mkdirSync } from 'fs';\nimport { execSync } from 'child_process';\nimport { SweepStateWatcher, type PredictionEvent } from './state-watcher.js';\nimport { StatusBar } from './status-bar.js';\nimport { TabInterceptor } from './tab-interceptor.js';\n\nconst HOME = process.env['HOME'] || '/tmp';\n\nfunction getSweepDir(): string {\n return process.env['SWEEP_STATE_DIR'] || join(HOME, '.stackmemory');\n}\n\nfunction getSweepPath(filename: string): string {\n return join(getSweepDir(), filename);\n}\n\n// Alt screen buffer detection\nconst ALT_SCREEN_ENTER = '\\x1b[?1049h';\nconst ALT_SCREEN_EXIT = '\\x1b[?1049l';\n\nexport interface PtyWrapperConfig {\n claudeBin?: string;\n claudeArgs?: string[];\n stateFile?: string;\n}\n\n// Minimal interface for node-pty process to avoid compile-time dep\ninterface PtyProcess {\n write(data: string): void;\n resize(cols: number, rows: number): void;\n onData(cb: (data: string) => void): void;\n onExit(cb: (e: { exitCode: number }) => void): void;\n kill(): void;\n}\n\nexport class PtyWrapper {\n private config: Required<PtyWrapperConfig>;\n private stateWatcher: SweepStateWatcher;\n private statusBar: StatusBar;\n private tabInterceptor: TabInterceptor;\n private currentPrediction: PredictionEvent | null = null;\n private inAltScreen = false;\n private ptyProcess: PtyProcess | null = null;\n\n constructor(config: PtyWrapperConfig = {}) {\n this.config = {\n claudeBin: config.claudeBin || this.findClaude(),\n claudeArgs: config.claudeArgs || [],\n stateFile: config.stateFile || getSweepPath('sweep-state.json'),\n };\n\n this.stateWatcher = new SweepStateWatcher(this.config.stateFile);\n this.statusBar = new StatusBar();\n this.tabInterceptor = new TabInterceptor({\n onAccept: () => this.acceptPrediction(),\n onDismiss: () => this.dismissPrediction(),\n onPassthrough: (data) => this.ptyProcess?.write(data.toString('utf-8')),\n });\n }\n\n async start(): Promise<void> {\n // Ensure the sweep state directory exists\n const sweepDir = getSweepDir();\n if (!existsSync(sweepDir)) {\n mkdirSync(sweepDir, { recursive: true });\n }\n\n // Dynamic import for optional dependency\n let pty: typeof import('node-pty');\n try {\n pty = await import('node-pty');\n } catch {\n throw new Error(\n 'node-pty is required for the PTY wrapper.\\n' +\n 'Install with: npm install node-pty'\n );\n }\n\n const cols = process.stdout.columns || 80;\n const rows = process.stdout.rows || 24;\n\n // Filter undefined values from env\n const env: Record<string, string> = {};\n for (const [k, v] of Object.entries(process.env)) {\n if (v !== undefined) env[k] = v;\n }\n\n // Spawn Claude Code in a PTY with 1 row reserved for status bar\n this.ptyProcess = pty.spawn(this.config.claudeBin, this.config.claudeArgs, {\n name: process.env['TERM'] || 'xterm-256color',\n cols,\n rows: rows - 1,\n cwd: process.cwd(),\n env,\n }) as PtyProcess;\n\n // Set raw mode on stdin\n if (process.stdin.isTTY) {\n process.stdin.setRawMode(true);\n }\n process.stdin.resume();\n\n // PTY stdout -> parent stdout (transparent passthrough)\n this.ptyProcess.onData((data: string) => {\n // Detect alt screen buffer transitions\n if (data.includes(ALT_SCREEN_ENTER)) {\n this.inAltScreen = true;\n this.statusBar.hide();\n }\n if (data.includes(ALT_SCREEN_EXIT)) {\n this.inAltScreen = false;\n }\n\n process.stdout.write(data);\n });\n\n // Parent stdin -> tab interceptor -> PTY\n process.stdin.on('data', (data: Buffer) => {\n this.tabInterceptor.process(data);\n });\n\n // State watcher -> status bar\n this.stateWatcher.on('loading', () => {\n if (!this.inAltScreen) {\n this.statusBar.showLoading();\n }\n });\n\n this.stateWatcher.on('prediction', (event: PredictionEvent) => {\n this.currentPrediction = event;\n this.tabInterceptor.setPredictionActive(true);\n if (!this.inAltScreen) {\n this.statusBar.show(\n event.prediction,\n event.file_path,\n event.latency_ms\n );\n }\n });\n\n this.stateWatcher.start();\n\n // Handle terminal resize\n process.stdout.on('resize', () => {\n const newCols = process.stdout.columns || 80;\n const newRows = process.stdout.rows || 24;\n this.ptyProcess?.resize(newCols, newRows - 1);\n this.statusBar.resize(newRows, newCols);\n });\n\n // Handle PTY exit\n this.ptyProcess.onExit(({ exitCode }) => {\n this.cleanup();\n process.exit(exitCode);\n });\n\n // Handle signals\n const onSignal = () => {\n this.cleanup();\n process.exit(0);\n };\n process.on('SIGINT', onSignal);\n process.on('SIGTERM', onSignal);\n }\n\n private acceptPrediction(): void {\n if (!this.currentPrediction || !this.ptyProcess) return;\n\n // Write prediction to pending file for Claude to read\n const dir = getSweepDir();\n if (!existsSync(dir)) {\n mkdirSync(dir, { recursive: true });\n }\n const pendingFile = getSweepPath('sweep-pending.json');\n writeFileSync(\n pendingFile,\n JSON.stringify(\n {\n file_path: this.currentPrediction.file_path,\n predicted_content: this.currentPrediction.prediction,\n timestamp: Date.now(),\n },\n null,\n 2\n )\n );\n\n // Inject acceptance prompt into PTY stdin.\n // SAFETY: pendingFile is derived from env or a constant path, not\n // arbitrary user input. The prompt is written to Claude Code's input,\n // which interprets it as a user message, not as a shell command.\n const prompt = `Apply the Sweep prediction from ${pendingFile}\\n`;\n this.ptyProcess.write(prompt);\n\n this.dismissPrediction();\n }\n\n private dismissPrediction(): void {\n this.currentPrediction = null;\n this.tabInterceptor.setPredictionActive(false);\n this.statusBar.hide();\n }\n\n private cleanup(): void {\n this.stateWatcher.stop();\n this.statusBar.hide();\n\n if (process.stdin.isTTY) {\n process.stdin.setRawMode(false);\n }\n process.stdin.pause();\n }\n\n private findClaude(): string {\n // Check PATH first via which\n try {\n const resolved = execSync('which claude', { encoding: 'utf-8' }).trim();\n if (resolved) return resolved;\n } catch {\n // Not on PATH\n }\n\n // Check known locations\n const candidates = [\n join(HOME, '.bun', 'bin', 'claude'),\n '/usr/local/bin/claude',\n '/opt/homebrew/bin/claude',\n ];\n\n for (const c of candidates) {\n if (existsSync(c)) return c;\n }\n\n return 'claude';\n }\n}\n\n/**\n * Launch the PTY wrapper\n */\nexport async function launchWrapper(config?: PtyWrapperConfig): Promise<void> {\n const wrapper = new PtyWrapper(config);\n await wrapper.start();\n}\n"],
|
|
5
|
+
"mappings": ";;;;AASA,SAAS,YAAY;AACrB,SAAS,eAAe,YAAY,iBAAiB;AACrD,SAAS,gBAAgB;AACzB,SAAS,yBAA+C;AACxD,SAAS,iBAAiB;AAC1B,SAAS,sBAAsB;AAE/B,MAAM,OAAO,QAAQ,IAAI,MAAM,KAAK;AAEpC,SAAS,cAAsB;AAC7B,SAAO,QAAQ,IAAI,iBAAiB,KAAK,KAAK,MAAM,cAAc;AACpE;AAEA,SAAS,aAAa,UAA0B;AAC9C,SAAO,KAAK,YAAY,GAAG,QAAQ;AACrC;AAGA,MAAM,mBAAmB;AACzB,MAAM,kBAAkB;AAiBjB,MAAM,WAAW;AAAA,EACd;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA,oBAA4C;AAAA,EAC5C,cAAc;AAAA,EACd,aAAgC;AAAA,EAExC,YAAY,SAA2B,CAAC,GAAG;AACzC,SAAK,SAAS;AAAA,MACZ,WAAW,OAAO,aAAa,KAAK,WAAW;AAAA,MAC/C,YAAY,OAAO,cAAc,CAAC;AAAA,MAClC,WAAW,OAAO,aAAa,aAAa,kBAAkB;AAAA,IAChE;AAEA,SAAK,eAAe,IAAI,kBAAkB,KAAK,OAAO,SAAS;AAC/D,SAAK,YAAY,IAAI,UAAU;AAC/B,SAAK,iBAAiB,IAAI,eAAe;AAAA,MACvC,UAAU,MAAM,KAAK,iBAAiB;AAAA,MACtC,WAAW,MAAM,KAAK,kBAAkB;AAAA,MACxC,eAAe,CAAC,SAAS,KAAK,YAAY,MAAM,KAAK,SAAS,OAAO,CAAC;AAAA,IACxE,CAAC;AAAA,EACH;AAAA,EAEA,MAAM,QAAuB;AAE3B,UAAM,WAAW,YAAY;AAC7B,QAAI,CAAC,WAAW,QAAQ,GAAG;AACzB,gBAAU,UAAU,EAAE,WAAW,KAAK,CAAC;AAAA,IACzC;AAGA,QAAI;AACJ,QAAI;AACF,YAAM,MAAM,OAAO,UAAU;AAAA,IAC/B,QAAQ;AACN,YAAM,IAAI;AAAA,QACR;AAAA,MAEF;AAAA,IACF;AAEA,UAAM,OAAO,QAAQ,OAAO,WAAW;AACvC,UAAM,OAAO,QAAQ,OAAO,QAAQ;AAGpC,UAAM,MAA8B,CAAC;AACrC,eAAW,CAAC,GAAG,CAAC,KAAK,OAAO,QAAQ,QAAQ,GAAG,GAAG;AAChD,UAAI,MAAM,OAAW,KAAI,CAAC,IAAI;AAAA,IAChC;AAGA,SAAK,aAAa,IAAI,MAAM,KAAK,OAAO,WAAW,KAAK,OAAO,YAAY;AAAA,MACzE,MAAM,QAAQ,IAAI,MAAM,KAAK;AAAA,MAC7B;AAAA,MACA,MAAM,OAAO;AAAA,MACb,KAAK,QAAQ,IAAI;AAAA,MACjB;AAAA,IACF,CAAC;AAGD,QAAI,QAAQ,MAAM,OAAO;AACvB,cAAQ,MAAM,WAAW,IAAI;AAAA,IAC/B;AACA,YAAQ,MAAM,OAAO;AAGrB,SAAK,WAAW,OAAO,CAAC,SAAiB;AAEvC,UAAI,KAAK,SAAS,gBAAgB,GAAG;AACnC,aAAK,cAAc;AACnB,aAAK,UAAU,KAAK;AAAA,MACtB;AACA,UAAI,KAAK,SAAS,eAAe,GAAG;AAClC,aAAK,cAAc;AAAA,MACrB;AAEA,cAAQ,OAAO,MAAM,IAAI;AAAA,IAC3B,CAAC;AAGD,YAAQ,MAAM,GAAG,QAAQ,CAAC,SAAiB;AACzC,WAAK,eAAe,QAAQ,IAAI;AAAA,IAClC,CAAC;AAGD,SAAK,aAAa,GAAG,WAAW,MAAM;AACpC,UAAI,CAAC,KAAK,aAAa;AACrB,aAAK,UAAU,YAAY;AAAA,MAC7B;AAAA,IACF,CAAC;AAED,SAAK,aAAa,GAAG,cAAc,CAAC,UAA2B;AAC7D,WAAK,oBAAoB;AACzB,WAAK,eAAe,oBAAoB,IAAI;AAC5C,UAAI,CAAC,KAAK,aAAa;AACrB,aAAK,UAAU;AAAA,UACb,MAAM;AAAA,UACN,MAAM;AAAA,UACN,MAAM;AAAA,QACR;AAAA,MACF;AAAA,IACF,CAAC;AAED,SAAK,aAAa,MAAM;AAGxB,YAAQ,OAAO,GAAG,UAAU,MAAM;AAChC,YAAM,UAAU,QAAQ,OAAO,WAAW;AAC1C,YAAM,UAAU,QAAQ,OAAO,QAAQ;AACvC,WAAK,YAAY,OAAO,SAAS,UAAU,CAAC;AAC5C,WAAK,UAAU,OAAO,SAAS,OAAO;AAAA,IACxC,CAAC;AAGD,SAAK,WAAW,OAAO,CAAC,EAAE,SAAS,MAAM;AACvC,WAAK,QAAQ;AACb,cAAQ,KAAK,QAAQ;AAAA,IACvB,CAAC;AAGD,UAAM,WAAW,MAAM;AACrB,WAAK,QAAQ;AACb,cAAQ,KAAK,CAAC;AAAA,IAChB;AACA,YAAQ,GAAG,UAAU,QAAQ;AAC7B,YAAQ,GAAG,WAAW,QAAQ;AAAA,EAChC;AAAA,EAEQ,mBAAyB;AAC/B,QAAI,CAAC,KAAK,qBAAqB,CAAC,KAAK,WAAY;AAGjD,UAAM,MAAM,YAAY;AACxB,QAAI,CAAC,WAAW,GAAG,GAAG;AACpB,gBAAU,KAAK,EAAE,WAAW,KAAK,CAAC;AAAA,IACpC;AACA,UAAM,cAAc,aAAa,oBAAoB;AACrD;AAAA,MACE;AAAA,MACA,KAAK;AAAA,QACH;AAAA,UACE,WAAW,KAAK,kBAAkB;AAAA,UAClC,mBAAmB,KAAK,kBAAkB;AAAA,UAC1C,WAAW,KAAK,IAAI;AAAA,QACtB;AAAA,QACA;AAAA,QACA;AAAA,MACF;AAAA,IACF;AAMA,UAAM,SAAS,mCAAmC,WAAW;AAAA;AAC7D,SAAK,WAAW,MAAM,MAAM;AAE5B,SAAK,kBAAkB;AAAA,EACzB;AAAA,EAEQ,oBAA0B;AAChC,SAAK,oBAAoB;AACzB,SAAK,eAAe,oBAAoB,KAAK;AAC7C,SAAK,UAAU,KAAK;AAAA,EACtB;AAAA,EAEQ,UAAgB;AACtB,SAAK,aAAa,KAAK;AACvB,SAAK,UAAU,KAAK;AAEpB,QAAI,QAAQ,MAAM,OAAO;AACvB,cAAQ,MAAM,WAAW,KAAK;AAAA,IAChC;AACA,YAAQ,MAAM,MAAM;AAAA,EACtB;AAAA,EAEQ,aAAqB;AAE3B,QAAI;AACF,YAAM,WAAW,SAAS,gBAAgB,EAAE,UAAU,QAAQ,CAAC,EAAE,KAAK;AACtE,UAAI,SAAU,QAAO;AAAA,IACvB,QAAQ;AAAA,IAER;AAGA,UAAM,aAAa;AAAA,MACjB,KAAK,MAAM,QAAQ,OAAO,QAAQ;AAAA,MAClC;AAAA,MACA;AAAA,IACF;AAEA,eAAW,KAAK,YAAY;AAC1B,UAAI,WAAW,CAAC,EAAG,QAAO;AAAA,IAC5B;AAEA,WAAO;AAAA,EACT;AACF;AAKA,eAAsB,cAAc,QAA0C;AAC5E,QAAM,UAAU,IAAI,WAAW,MAAM;AACrC,QAAM,QAAQ,MAAM;AACtB;",
|
|
6
6
|
"names": []
|
|
7
7
|
}
|
|
@@ -0,0 +1,71 @@
|
|
|
1
|
+
import { fileURLToPath as __fileURLToPath } from 'url';
|
|
2
|
+
import { dirname as __pathDirname } from 'path';
|
|
3
|
+
const __filename = __fileURLToPath(import.meta.url);
|
|
4
|
+
const __dirname = __pathDirname(__filename);
|
|
5
|
+
import { execSync } from "child_process";
|
|
6
|
+
function isTmuxAvailable() {
|
|
7
|
+
try {
|
|
8
|
+
execSync("which tmux", { stdio: "ignore" });
|
|
9
|
+
return true;
|
|
10
|
+
} catch {
|
|
11
|
+
return false;
|
|
12
|
+
}
|
|
13
|
+
}
|
|
14
|
+
function createTmuxSession(name, paneCount) {
|
|
15
|
+
execSync(`tmux new-session -d -s ${name}`, { stdio: "ignore" });
|
|
16
|
+
for (let i = 1; i < paneCount; i++) {
|
|
17
|
+
execSync(`tmux split-window -t ${name}`, { stdio: "ignore" });
|
|
18
|
+
execSync(`tmux select-layout -t ${name} tiled`, { stdio: "ignore" });
|
|
19
|
+
}
|
|
20
|
+
execSync(`tmux select-layout -t ${name} tiled`, { stdio: "ignore" });
|
|
21
|
+
}
|
|
22
|
+
function sendToPane(session, pane, command) {
|
|
23
|
+
execSync(
|
|
24
|
+
`tmux send-keys -t ${session}:${pane} ${shellEscape(command)} Enter`,
|
|
25
|
+
{
|
|
26
|
+
stdio: "ignore"
|
|
27
|
+
}
|
|
28
|
+
);
|
|
29
|
+
}
|
|
30
|
+
function killTmuxSession(name) {
|
|
31
|
+
execSync(`tmux kill-session -t ${name}`, { stdio: "ignore" });
|
|
32
|
+
}
|
|
33
|
+
function attachToSession(name) {
|
|
34
|
+
execSync(`tmux attach-session -t ${name}`, { stdio: "inherit" });
|
|
35
|
+
}
|
|
36
|
+
function listPanes(session) {
|
|
37
|
+
try {
|
|
38
|
+
const output = execSync(
|
|
39
|
+
`tmux list-panes -t ${session} -F "#{pane_index}"`,
|
|
40
|
+
{ encoding: "utf-8" }
|
|
41
|
+
);
|
|
42
|
+
return output.trim().split("\n").filter(Boolean);
|
|
43
|
+
} catch {
|
|
44
|
+
return [];
|
|
45
|
+
}
|
|
46
|
+
}
|
|
47
|
+
function sendCtrlC(session, pane) {
|
|
48
|
+
execSync(`tmux send-keys -t ${session}:${pane} C-c`, { stdio: "ignore" });
|
|
49
|
+
}
|
|
50
|
+
function sessionExists(name) {
|
|
51
|
+
try {
|
|
52
|
+
execSync(`tmux has-session -t ${name}`, { stdio: "ignore" });
|
|
53
|
+
return true;
|
|
54
|
+
} catch {
|
|
55
|
+
return false;
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
function shellEscape(cmd) {
|
|
59
|
+
return "'" + cmd.replace(/'/g, "'\\''") + "'";
|
|
60
|
+
}
|
|
61
|
+
export {
|
|
62
|
+
attachToSession,
|
|
63
|
+
createTmuxSession,
|
|
64
|
+
isTmuxAvailable,
|
|
65
|
+
killTmuxSession,
|
|
66
|
+
listPanes,
|
|
67
|
+
sendCtrlC,
|
|
68
|
+
sendToPane,
|
|
69
|
+
sessionExists
|
|
70
|
+
};
|
|
71
|
+
//# sourceMappingURL=tmux-manager.js.map
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
{
|
|
2
|
+
"version": 3,
|
|
3
|
+
"sources": ["../../../../src/features/workers/tmux-manager.ts"],
|
|
4
|
+
"sourcesContent": ["/**\n * Tmux Manager\n *\n * Creates and manages tmux sessions for parallel Claude workers.\n * Each pane runs an isolated claude-sm instance.\n */\n\nimport { execSync } from 'child_process';\n\nexport function isTmuxAvailable(): boolean {\n try {\n execSync('which tmux', { stdio: 'ignore' });\n return true;\n } catch {\n return false;\n }\n}\n\nexport function createTmuxSession(name: string, paneCount: number): void {\n // Create a detached session with the first pane\n execSync(`tmux new-session -d -s ${name}`, { stdio: 'ignore' });\n\n // Add remaining panes\n for (let i = 1; i < paneCount; i++) {\n execSync(`tmux split-window -t ${name}`, { stdio: 'ignore' });\n // Rebalance after each split to prevent \"no space\" errors\n execSync(`tmux select-layout -t ${name} tiled`, { stdio: 'ignore' });\n }\n\n // Final tiled layout\n execSync(`tmux select-layout -t ${name} tiled`, { stdio: 'ignore' });\n}\n\nexport function sendToPane(\n session: string,\n pane: string,\n command: string\n): void {\n execSync(\n `tmux send-keys -t ${session}:${pane} ${shellEscape(command)} Enter`,\n {\n stdio: 'ignore',\n }\n );\n}\n\nexport function killTmuxSession(name: string): void {\n execSync(`tmux kill-session -t ${name}`, { stdio: 'ignore' });\n}\n\nexport function attachToSession(name: string): void {\n execSync(`tmux attach-session -t ${name}`, { stdio: 'inherit' });\n}\n\nexport function listPanes(session: string): string[] {\n try {\n const output = execSync(\n `tmux list-panes -t ${session} -F \"#{pane_index}\"`,\n { encoding: 'utf-8' }\n );\n return output.trim().split('\\n').filter(Boolean);\n } catch {\n return [];\n }\n}\n\nexport function sendCtrlC(session: string, pane: string): void {\n execSync(`tmux send-keys -t ${session}:${pane} C-c`, { stdio: 'ignore' });\n}\n\nexport function sessionExists(name: string): boolean {\n try {\n execSync(`tmux has-session -t ${name}`, { stdio: 'ignore' });\n return true;\n } catch {\n return false;\n }\n}\n\nfunction shellEscape(cmd: string): string {\n // Wrap in single quotes, escaping existing single quotes\n return \"'\" + cmd.replace(/'/g, \"'\\\\''\") + \"'\";\n}\n"],
|
|
5
|
+
"mappings": ";;;;AAOA,SAAS,gBAAgB;AAElB,SAAS,kBAA2B;AACzC,MAAI;AACF,aAAS,cAAc,EAAE,OAAO,SAAS,CAAC;AAC1C,WAAO;AAAA,EACT,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAEO,SAAS,kBAAkB,MAAc,WAAyB;AAEvE,WAAS,0BAA0B,IAAI,IAAI,EAAE,OAAO,SAAS,CAAC;AAG9D,WAAS,IAAI,GAAG,IAAI,WAAW,KAAK;AAClC,aAAS,wBAAwB,IAAI,IAAI,EAAE,OAAO,SAAS,CAAC;AAE5D,aAAS,yBAAyB,IAAI,UAAU,EAAE,OAAO,SAAS,CAAC;AAAA,EACrE;AAGA,WAAS,yBAAyB,IAAI,UAAU,EAAE,OAAO,SAAS,CAAC;AACrE;AAEO,SAAS,WACd,SACA,MACA,SACM;AACN;AAAA,IACE,qBAAqB,OAAO,IAAI,IAAI,IAAI,YAAY,OAAO,CAAC;AAAA,IAC5D;AAAA,MACE,OAAO;AAAA,IACT;AAAA,EACF;AACF;AAEO,SAAS,gBAAgB,MAAoB;AAClD,WAAS,wBAAwB,IAAI,IAAI,EAAE,OAAO,SAAS,CAAC;AAC9D;AAEO,SAAS,gBAAgB,MAAoB;AAClD,WAAS,0BAA0B,IAAI,IAAI,EAAE,OAAO,UAAU,CAAC;AACjE;AAEO,SAAS,UAAU,SAA2B;AACnD,MAAI;AACF,UAAM,SAAS;AAAA,MACb,sBAAsB,OAAO;AAAA,MAC7B,EAAE,UAAU,QAAQ;AAAA,IACtB;AACA,WAAO,OAAO,KAAK,EAAE,MAAM,IAAI,EAAE,OAAO,OAAO;AAAA,EACjD,QAAQ;AACN,WAAO,CAAC;AAAA,EACV;AACF;AAEO,SAAS,UAAU,SAAiB,MAAoB;AAC7D,WAAS,qBAAqB,OAAO,IAAI,IAAI,QAAQ,EAAE,OAAO,SAAS,CAAC;AAC1E;AAEO,SAAS,cAAc,MAAuB;AACnD,MAAI;AACF,aAAS,uBAAuB,IAAI,IAAI,EAAE,OAAO,SAAS,CAAC;AAC3D,WAAO;AAAA,EACT,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAEA,SAAS,YAAY,KAAqB;AAExC,SAAO,MAAM,IAAI,QAAQ,MAAM,OAAO,IAAI;AAC5C;",
|
|
6
|
+
"names": []
|
|
7
|
+
}
|
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
import { fileURLToPath as __fileURLToPath } from 'url';
|
|
2
|
+
import { dirname as __pathDirname } from 'path';
|
|
3
|
+
const __filename = __fileURLToPath(import.meta.url);
|
|
4
|
+
const __dirname = __pathDirname(__filename);
|
|
5
|
+
import { join } from "path";
|
|
6
|
+
import { existsSync, mkdirSync, readFileSync, writeFileSync, rmSync } from "fs";
|
|
7
|
+
import { homedir } from "os";
|
|
8
|
+
function workersDir() {
|
|
9
|
+
return join(homedir(), ".stackmemory", "workers");
|
|
10
|
+
}
|
|
11
|
+
function registryFile() {
|
|
12
|
+
return join(workersDir(), "registry.json");
|
|
13
|
+
}
|
|
14
|
+
function ensureWorkerStateDir(workerId) {
|
|
15
|
+
const dir = join(workersDir(), workerId);
|
|
16
|
+
if (!existsSync(dir)) {
|
|
17
|
+
mkdirSync(dir, { recursive: true });
|
|
18
|
+
}
|
|
19
|
+
return dir;
|
|
20
|
+
}
|
|
21
|
+
function saveRegistry(session) {
|
|
22
|
+
const dir = workersDir();
|
|
23
|
+
if (!existsSync(dir)) {
|
|
24
|
+
mkdirSync(dir, { recursive: true });
|
|
25
|
+
}
|
|
26
|
+
writeFileSync(registryFile(), JSON.stringify(session, null, 2));
|
|
27
|
+
}
|
|
28
|
+
function loadRegistry() {
|
|
29
|
+
if (!existsSync(registryFile())) return null;
|
|
30
|
+
try {
|
|
31
|
+
return JSON.parse(readFileSync(registryFile(), "utf-8"));
|
|
32
|
+
} catch {
|
|
33
|
+
return null;
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
function clearRegistry() {
|
|
37
|
+
const file = registryFile();
|
|
38
|
+
if (existsSync(file)) {
|
|
39
|
+
rmSync(file, { force: true });
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
function getWorkersDir() {
|
|
43
|
+
return workersDir();
|
|
44
|
+
}
|
|
45
|
+
export {
|
|
46
|
+
clearRegistry,
|
|
47
|
+
ensureWorkerStateDir,
|
|
48
|
+
getWorkersDir,
|
|
49
|
+
loadRegistry,
|
|
50
|
+
saveRegistry
|
|
51
|
+
};
|
|
52
|
+
//# sourceMappingURL=worker-registry.js.map
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
{
|
|
2
|
+
"version": 3,
|
|
3
|
+
"sources": ["../../../../src/features/workers/worker-registry.ts"],
|
|
4
|
+
"sourcesContent": ["/**\n * Worker Registry\n *\n * Tracks parallel Claude worker sessions spawned via tmux.\n * Each worker gets an isolated state directory for Sweep predictions.\n */\n\nimport { join } from 'path';\nimport { existsSync, mkdirSync, readFileSync, writeFileSync, rmSync } from 'fs';\nimport { homedir } from 'os';\n\nfunction workersDir(): string {\n return join(homedir(), '.stackmemory', 'workers');\n}\n\nfunction registryFile(): string {\n return join(workersDir(), 'registry.json');\n}\n\nexport interface WorkerEntry {\n id: string;\n pane: string;\n pid?: number;\n task?: string;\n cwd: string;\n startedAt: string;\n stateDir: string;\n}\n\nexport interface WorkerSession {\n sessionName: string;\n workers: WorkerEntry[];\n createdAt: string;\n}\n\nexport function ensureWorkerStateDir(workerId: string): string {\n const dir = join(workersDir(), workerId);\n if (!existsSync(dir)) {\n mkdirSync(dir, { recursive: true });\n }\n return dir;\n}\n\nexport function saveRegistry(session: WorkerSession): void {\n const dir = workersDir();\n if (!existsSync(dir)) {\n mkdirSync(dir, { recursive: true });\n }\n writeFileSync(registryFile(), JSON.stringify(session, null, 2));\n}\n\nexport function loadRegistry(): WorkerSession | null {\n if (!existsSync(registryFile())) return null;\n try {\n return JSON.parse(readFileSync(registryFile(), 'utf-8')) as WorkerSession;\n } catch {\n return null;\n }\n}\n\nexport function clearRegistry(): void {\n const file = registryFile();\n if (existsSync(file)) {\n rmSync(file, { force: true });\n }\n}\n\nexport function getWorkersDir(): string {\n return workersDir();\n}\n"],
|
|
5
|
+
"mappings": ";;;;AAOA,SAAS,YAAY;AACrB,SAAS,YAAY,WAAW,cAAc,eAAe,cAAc;AAC3E,SAAS,eAAe;AAExB,SAAS,aAAqB;AAC5B,SAAO,KAAK,QAAQ,GAAG,gBAAgB,SAAS;AAClD;AAEA,SAAS,eAAuB;AAC9B,SAAO,KAAK,WAAW,GAAG,eAAe;AAC3C;AAkBO,SAAS,qBAAqB,UAA0B;AAC7D,QAAM,MAAM,KAAK,WAAW,GAAG,QAAQ;AACvC,MAAI,CAAC,WAAW,GAAG,GAAG;AACpB,cAAU,KAAK,EAAE,WAAW,KAAK,CAAC;AAAA,EACpC;AACA,SAAO;AACT;AAEO,SAAS,aAAa,SAA8B;AACzD,QAAM,MAAM,WAAW;AACvB,MAAI,CAAC,WAAW,GAAG,GAAG;AACpB,cAAU,KAAK,EAAE,WAAW,KAAK,CAAC;AAAA,EACpC;AACA,gBAAc,aAAa,GAAG,KAAK,UAAU,SAAS,MAAM,CAAC,CAAC;AAChE;AAEO,SAAS,eAAqC;AACnD,MAAI,CAAC,WAAW,aAAa,CAAC,EAAG,QAAO;AACxC,MAAI;AACF,WAAO,KAAK,MAAM,aAAa,aAAa,GAAG,OAAO,CAAC;AAAA,EACzD,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAEO,SAAS,gBAAsB;AACpC,QAAM,OAAO,aAAa;AAC1B,MAAI,WAAW,IAAI,GAAG;AACpB,WAAO,MAAM,EAAE,OAAO,KAAK,CAAC;AAAA,EAC9B;AACF;AAEO,SAAS,gBAAwB;AACtC,SAAO,WAAW;AACpB;",
|
|
6
|
+
"names": []
|
|
7
|
+
}
|
|
@@ -7,6 +7,8 @@ import { LinearSyncEngine } from "./sync.js";
|
|
|
7
7
|
import { LinearAuthManager } from "./auth.js";
|
|
8
8
|
import { IntegrationError, ErrorCode } from "../../core/errors/index.js";
|
|
9
9
|
import { logger } from "../../core/monitoring/logger.js";
|
|
10
|
+
import { ClaudeCodeSubagentClient } from "../claude-code/subagent-client.js";
|
|
11
|
+
const AUTOMATION_LABELS = ["automated", "claude-code", "stackmemory"];
|
|
10
12
|
function getEnv(key, defaultValue) {
|
|
11
13
|
const value = process.env[key];
|
|
12
14
|
if (value === void 0) {
|
|
@@ -115,6 +117,86 @@ class LinearWebhookHandler {
|
|
|
115
117
|
}
|
|
116
118
|
await this.storeLinearMapping(taskId, issue.id, issue.identifier);
|
|
117
119
|
logger.info(`Created task ${taskId} from Linear issue ${issue.identifier}`);
|
|
120
|
+
if (this.shouldSpawnSubagent(issue)) {
|
|
121
|
+
await this.spawnSubagentForIssue(issue);
|
|
122
|
+
}
|
|
123
|
+
}
|
|
124
|
+
/**
|
|
125
|
+
* Check if issue should trigger subagent spawn
|
|
126
|
+
*/
|
|
127
|
+
shouldSpawnSubagent(issue) {
|
|
128
|
+
if (!issue.labels) return false;
|
|
129
|
+
const labelNames = issue.labels.map((l) => l.name.toLowerCase());
|
|
130
|
+
return AUTOMATION_LABELS.some((label) => labelNames.includes(label));
|
|
131
|
+
}
|
|
132
|
+
/**
|
|
133
|
+
* Spawn a Claude Code subagent for the issue
|
|
134
|
+
*/
|
|
135
|
+
async spawnSubagentForIssue(issue) {
|
|
136
|
+
logger.info(`Spawning subagent for ${issue.identifier}`);
|
|
137
|
+
try {
|
|
138
|
+
const client = new ClaudeCodeSubagentClient();
|
|
139
|
+
const agentType = this.determineAgentType(issue.labels || []);
|
|
140
|
+
const task = this.buildTaskPrompt(issue);
|
|
141
|
+
const sourceUrl = this.extractSourceUrl(issue.description);
|
|
142
|
+
const result = await client.executeSubagent({
|
|
143
|
+
type: agentType,
|
|
144
|
+
task,
|
|
145
|
+
context: {
|
|
146
|
+
linearIssueId: issue.id,
|
|
147
|
+
linearIdentifier: issue.identifier,
|
|
148
|
+
linearUrl: issue.url,
|
|
149
|
+
sourceUrl: sourceUrl || issue.url
|
|
150
|
+
},
|
|
151
|
+
timeout: 5 * 60 * 1e3
|
|
152
|
+
// 5 min
|
|
153
|
+
});
|
|
154
|
+
logger.info(`Subagent completed for ${issue.identifier}`, {
|
|
155
|
+
sessionId: result.sessionId,
|
|
156
|
+
status: result.status
|
|
157
|
+
});
|
|
158
|
+
} catch (error) {
|
|
159
|
+
logger.error(`Failed to spawn subagent for ${issue.identifier}`, {
|
|
160
|
+
error: error instanceof Error ? error.message : "Unknown error"
|
|
161
|
+
});
|
|
162
|
+
}
|
|
163
|
+
}
|
|
164
|
+
/**
|
|
165
|
+
* Determine agent type from issue labels
|
|
166
|
+
*/
|
|
167
|
+
determineAgentType(labels) {
|
|
168
|
+
const lowerLabels = labels.map((l) => l.name.toLowerCase());
|
|
169
|
+
if (lowerLabels.some((l) => l.includes("review") || l.includes("pr"))) {
|
|
170
|
+
return "review";
|
|
171
|
+
}
|
|
172
|
+
if (lowerLabels.some((l) => l.includes("explore") || l.includes("research"))) {
|
|
173
|
+
return "context";
|
|
174
|
+
}
|
|
175
|
+
return "code";
|
|
176
|
+
}
|
|
177
|
+
/**
|
|
178
|
+
* Build task prompt from Linear issue
|
|
179
|
+
*/
|
|
180
|
+
buildTaskPrompt(issue) {
|
|
181
|
+
const parts = [`Linear Issue: ${issue.identifier} - ${issue.title}`];
|
|
182
|
+
if (issue.description) {
|
|
183
|
+
const quoteMatch = issue.description.match(/^>\s*(.+?)(?:\n\n|$)/s);
|
|
184
|
+
if (quoteMatch) {
|
|
185
|
+
parts.push("", "Context:", quoteMatch[1].replace(/^>\s*/gm, ""));
|
|
186
|
+
} else {
|
|
187
|
+
parts.push("", "Description:", issue.description);
|
|
188
|
+
}
|
|
189
|
+
}
|
|
190
|
+
parts.push("", `URL: ${issue.url}`);
|
|
191
|
+
return parts.join("\n");
|
|
192
|
+
}
|
|
193
|
+
/**
|
|
194
|
+
* Extract source URL from description
|
|
195
|
+
*/
|
|
196
|
+
extractSourceUrl(description) {
|
|
197
|
+
if (!description) return void 0;
|
|
198
|
+
const urlMatch = description.match(/\*\*Source:\*\*\s*\[.+?\]\((.+?)\)/);
|
|
199
|
+
return urlMatch?.[1];
|
|
118
200
|
}
|
|
119
201
|
/**
|
|
120
202
|
* Handle issue update
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../../src/integrations/linear/webhook-handler.ts"],
|
|
4
|
-
"sourcesContent": ["/**\n * Linear Webhook Handler\n * Processes incoming webhooks from Linear to update local task store\n */\n\nimport { createHmac } from 'crypto';\nimport { LinearTaskManager } from '../../features/tasks/linear-task-manager.js';\nimport { LinearSyncEngine } from './sync.js';\nimport { LinearAuthManager } from './auth.js';\nimport { LinearClient } from './client.js';\nimport { IntegrationError, ErrorCode } from '../../core/errors/index.js';\nimport { logger } from '../../core/monitoring/logger.js';\nimport type { Request, Response } from 'express';\n// Type-safe environment variable access\nfunction getEnv(key: string, defaultValue?: string): string {\n const value = process.env[key];\n if (value === undefined) {\n if (defaultValue !== undefined) return defaultValue;\n throw new IntegrationError(\n `Environment variable ${key} is required`,\n ErrorCode.LINEAR_WEBHOOK_FAILED\n );\n }\n return value;\n}\n\nfunction getOptionalEnv(key: string): string | undefined {\n return process.env[key];\n}\n\nexport interface LinearWebhookPayload {\n action: 'create' | 'update' | 'remove';\n createdAt: string;\n data: {\n id: string;\n identifier: string;\n title: string;\n description?: string;\n state: {\n id: string;\n name: string;\n type: 'backlog' | 'unstarted' | 'started' | 'completed' | 'cancelled';\n };\n priority?: number;\n estimate?: number;\n assignee?: {\n id: string;\n name: string;\n email: string;\n };\n labels?: Array<{ id: string; name: string }>;\n updatedAt: string;\n url: string;\n };\n type: 'Issue';\n organizationId: string;\n webhookId: string;\n}\n\nexport class LinearWebhookHandler {\n private taskStore: LinearTaskManager;\n private syncEngine: LinearSyncEngine | null = null;\n private webhookSecret: string;\n\n constructor(taskStore: LinearTaskManager, webhookSecret: string) {\n this.taskStore = taskStore;\n this.webhookSecret = webhookSecret;\n\n // Initialize sync engine if API key is available\n if (process.env['LINEAR_API_KEY']) {\n const authManager = new LinearAuthManager();\n this.syncEngine = new LinearSyncEngine(taskStore, authManager, {\n enabled: true,\n direction: 'from_linear',\n autoSync: false,\n conflictResolution: 'linear_wins',\n });\n }\n }\n\n /**\n * Verify webhook signature\n */\n private verifySignature(payload: string, signature: string): boolean {\n const hmac = createHmac('sha256', this.webhookSecret);\n hmac.update(payload);\n const expectedSignature = hmac.digest('hex');\n return signature === expectedSignature;\n }\n\n /**\n * Handle incoming webhook from Linear\n */\n async handleWebhook(req: Request, res: Response): Promise<void> {\n try {\n // Get raw body for signature verification\n const rawBody = JSON.stringify(req.body);\n const signature = req.headers['linear-signature'] as string;\n\n // Verify signature\n if (!this.verifySignature(rawBody, signature)) {\n logger.error('Invalid webhook signature');\n res.status(401).json({ error: 'Invalid signature' });\n return;\n }\n\n const payload = req.body as LinearWebhookPayload;\n\n // Only process Issue webhooks\n if (payload.type !== 'Issue') {\n res.status(200).json({ message: 'Ignored non-issue webhook' });\n return;\n }\n\n // Process based on action\n switch (payload.action) {\n case 'create':\n await this.handleIssueCreate(payload);\n break;\n case 'update':\n await this.handleIssueUpdate(payload);\n break;\n case 'remove':\n await this.handleIssueRemove(payload);\n break;\n default:\n logger.warn(`Unknown webhook action: ${payload.action}`);\n }\n\n res.status(200).json({ message: 'Webhook processed successfully' });\n } catch (error: unknown) {\n logger.error('Failed to process webhook:', error as Error);\n res.status(500).json({ error: 'Failed to process webhook' });\n }\n }\n\n /**\n * Handle issue creation\n */\n private async handleIssueCreate(\n payload: LinearWebhookPayload\n ): Promise<void> {\n const issue = payload.data;\n\n // Check if task already exists locally\n const existingTasks = this.taskStore.getActiveTasks();\n const exists = existingTasks.some(\n (t) =>\n t.title.includes(issue.identifier) ||\n t.external_refs?.linear === issue.id\n );\n\n if (exists) {\n logger.info(`Task ${issue.identifier} already exists locally`);\n return;\n }\n\n // Create local task\n const taskId = this.taskStore.createTask({\n title: `[${issue.identifier}] ${issue.title}`,\n description: issue.description || '',\n priority: this.mapLinearPriorityToLocal(issue.priority),\n frameId: 'linear-webhook',\n tags: issue.labels?.map((l) => l.name) || ['linear'],\n estimatedEffort: issue.estimate ? issue.estimate * 60 : undefined,\n assignee: issue.assignee?.name,\n });\n\n // Update task status if not pending\n const status = this.mapLinearStateToLocalStatus(issue.state.type);\n if (status !== 'pending') {\n this.taskStore.updateTaskStatus(\n taskId,\n status,\n `Synced from Linear (${issue.state.name})`\n );\n }\n\n // Store Linear mapping\n await this.storeLinearMapping(taskId, issue.id, issue.identifier);\n\n logger.info(`Created task ${taskId} from Linear issue ${issue.identifier}`);\n }\n\n /**\n * Handle issue update\n */\n private async handleIssueUpdate(\n payload: LinearWebhookPayload\n ): Promise<void> {\n const issue = payload.data;\n\n // Find local task by Linear ID or identifier\n const tasks = this.taskStore.getActiveTasks();\n const localTask = tasks.find(\n (t) =>\n t.title.includes(issue.identifier) ||\n t.external_refs?.linear === issue.id\n );\n\n if (!localTask) {\n // Task doesn't exist locally, create it\n await this.handleIssueCreate(payload);\n return;\n }\n\n // Update task status\n const newStatus = this.mapLinearStateToLocalStatus(issue.state.type);\n if (newStatus !== localTask.status) {\n this.taskStore.updateTaskStatus(\n localTask.id,\n newStatus,\n `Updated from Linear (${issue.state.name})`\n );\n }\n\n // Update priority if changed\n const newPriority = this.mapLinearPriorityToLocal(issue.priority);\n if (newPriority !== localTask.priority) {\n // Note: Would need to add updateTaskPriority method to taskStore\n logger.info(\n `Priority changed for ${issue.identifier}: ${localTask.priority} -> ${newPriority}`\n );\n }\n\n logger.info(\n `Updated task ${localTask.id} from Linear issue ${issue.identifier}`\n );\n }\n\n /**\n * Handle issue removal\n */\n private async handleIssueRemove(\n payload: LinearWebhookPayload\n ): Promise<void> {\n const issue = payload.data;\n\n // Find and cancel local task\n const tasks = this.taskStore.getActiveTasks();\n const localTask = tasks.find(\n (t) =>\n t.title.includes(issue.identifier) ||\n t.external_refs?.linear === issue.id\n );\n\n if (localTask) {\n this.taskStore.updateTaskStatus(\n localTask.id,\n 'cancelled',\n `Removed in Linear`\n );\n logger.info(\n `Cancelled task ${localTask.id} (Linear issue ${issue.identifier} was removed)`\n );\n }\n }\n\n /**\n * Store Linear mapping for a task\n */\n private async storeLinearMapping(\n taskId: string,\n linearId: string,\n linearIdentifier: string\n ): Promise<void> {\n // This would update the linear-mappings.json file\n // For now, just log it\n logger.info(\n `Mapped task ${taskId} to Linear ${linearIdentifier} (${linearId})`\n );\n }\n\n /**\n * Map Linear priority to local priority\n */\n private mapLinearPriorityToLocal(\n priority?: number\n ): 'urgent' | 'high' | 'medium' | 'low' {\n if (!priority) return 'medium';\n switch (priority) {\n case 0:\n return 'urgent';\n case 1:\n return 'high';\n case 2:\n return 'medium';\n case 3:\n case 4:\n return 'low';\n default:\n return 'medium';\n }\n }\n\n /**\n * Map Linear state to local status\n */\n private mapLinearStateToLocalStatus(\n state: string\n ): 'pending' | 'in_progress' | 'completed' | 'cancelled' | 'blocked' {\n switch (state) {\n case 'backlog':\n case 'unstarted':\n return 'pending';\n case 'started':\n return 'in_progress';\n case 'completed':\n return 'completed';\n case 'cancelled':\n return 'cancelled';\n default:\n return 'pending';\n }\n }\n}\n"],
|
|
5
|
-
"mappings": ";;;;AAKA,SAAS,kBAAkB;AAE3B,SAAS,wBAAwB;AACjC,SAAS,yBAAyB;AAElC,SAAS,kBAAkB,iBAAiB;AAC5C,SAAS,cAAc;
|
|
4
|
+
"sourcesContent": ["/**\n * Linear Webhook Handler\n * Processes incoming webhooks from Linear to update local task store\n */\n\nimport { createHmac } from 'crypto';\nimport { LinearTaskManager } from '../../features/tasks/linear-task-manager.js';\nimport { LinearSyncEngine } from './sync.js';\nimport { LinearAuthManager } from './auth.js';\nimport { LinearClient } from './client.js';\nimport { IntegrationError, ErrorCode } from '../../core/errors/index.js';\nimport { logger } from '../../core/monitoring/logger.js';\nimport { ClaudeCodeSubagentClient } from '../claude-code/subagent-client.js';\nimport type { Request, Response } from 'express';\n\n/** Labels that trigger automated Claude Code subagent */\nconst AUTOMATION_LABELS = ['automated', 'claude-code', 'stackmemory'];\n// Type-safe environment variable access\nfunction getEnv(key: string, defaultValue?: string): string {\n const value = process.env[key];\n if (value === undefined) {\n if (defaultValue !== undefined) return defaultValue;\n throw new IntegrationError(\n `Environment variable ${key} is required`,\n ErrorCode.LINEAR_WEBHOOK_FAILED\n );\n }\n return value;\n}\n\nfunction getOptionalEnv(key: string): string | undefined {\n return process.env[key];\n}\n\nexport interface LinearWebhookPayload {\n action: 'create' | 'update' | 'remove';\n createdAt: string;\n data: {\n id: string;\n identifier: string;\n title: string;\n description?: string;\n state: {\n id: string;\n name: string;\n type: 'backlog' | 'unstarted' | 'started' | 'completed' | 'cancelled';\n };\n priority?: number;\n estimate?: number;\n assignee?: {\n id: string;\n name: string;\n email: string;\n };\n labels?: Array<{ id: string; name: string }>;\n updatedAt: string;\n url: string;\n };\n type: 'Issue';\n organizationId: string;\n webhookId: string;\n}\n\nexport class LinearWebhookHandler {\n private taskStore: LinearTaskManager;\n private syncEngine: LinearSyncEngine | null = null;\n private webhookSecret: string;\n\n constructor(taskStore: LinearTaskManager, webhookSecret: string) {\n this.taskStore = taskStore;\n this.webhookSecret = webhookSecret;\n\n // Initialize sync engine if API key is available\n if (process.env['LINEAR_API_KEY']) {\n const authManager = new LinearAuthManager();\n this.syncEngine = new LinearSyncEngine(taskStore, authManager, {\n enabled: true,\n direction: 'from_linear',\n autoSync: false,\n conflictResolution: 'linear_wins',\n });\n }\n }\n\n /**\n * Verify webhook signature\n */\n private verifySignature(payload: string, signature: string): boolean {\n const hmac = createHmac('sha256', this.webhookSecret);\n hmac.update(payload);\n const expectedSignature = hmac.digest('hex');\n return signature === expectedSignature;\n }\n\n /**\n * Handle incoming webhook from Linear\n */\n async handleWebhook(req: Request, res: Response): Promise<void> {\n try {\n // Get raw body for signature verification\n const rawBody = JSON.stringify(req.body);\n const signature = req.headers['linear-signature'] as string;\n\n // Verify signature\n if (!this.verifySignature(rawBody, signature)) {\n logger.error('Invalid webhook signature');\n res.status(401).json({ error: 'Invalid signature' });\n return;\n }\n\n const payload = req.body as LinearWebhookPayload;\n\n // Only process Issue webhooks\n if (payload.type !== 'Issue') {\n res.status(200).json({ message: 'Ignored non-issue webhook' });\n return;\n }\n\n // Process based on action\n switch (payload.action) {\n case 'create':\n await this.handleIssueCreate(payload);\n break;\n case 'update':\n await this.handleIssueUpdate(payload);\n break;\n case 'remove':\n await this.handleIssueRemove(payload);\n break;\n default:\n logger.warn(`Unknown webhook action: ${payload.action}`);\n }\n\n res.status(200).json({ message: 'Webhook processed successfully' });\n } catch (error: unknown) {\n logger.error('Failed to process webhook:', error as Error);\n res.status(500).json({ error: 'Failed to process webhook' });\n }\n }\n\n /**\n * Handle issue creation\n */\n private async handleIssueCreate(\n payload: LinearWebhookPayload\n ): Promise<void> {\n const issue = payload.data;\n\n // Check if task already exists locally\n const existingTasks = this.taskStore.getActiveTasks();\n const exists = existingTasks.some(\n (t) =>\n t.title.includes(issue.identifier) ||\n t.external_refs?.linear === issue.id\n );\n\n if (exists) {\n logger.info(`Task ${issue.identifier} already exists locally`);\n return;\n }\n\n // Create local task\n const taskId = this.taskStore.createTask({\n title: `[${issue.identifier}] ${issue.title}`,\n description: issue.description || '',\n priority: this.mapLinearPriorityToLocal(issue.priority),\n frameId: 'linear-webhook',\n tags: issue.labels?.map((l) => l.name) || ['linear'],\n estimatedEffort: issue.estimate ? issue.estimate * 60 : undefined,\n assignee: issue.assignee?.name,\n });\n\n // Update task status if not pending\n const status = this.mapLinearStateToLocalStatus(issue.state.type);\n if (status !== 'pending') {\n this.taskStore.updateTaskStatus(\n taskId,\n status,\n `Synced from Linear (${issue.state.name})`\n );\n }\n\n // Store Linear mapping\n await this.storeLinearMapping(taskId, issue.id, issue.identifier);\n\n logger.info(`Created task ${taskId} from Linear issue ${issue.identifier}`);\n\n // Check if we should spawn a Claude Code subagent\n if (this.shouldSpawnSubagent(issue)) {\n await this.spawnSubagentForIssue(issue);\n }\n }\n\n /**\n * Check if issue should trigger subagent spawn\n */\n private shouldSpawnSubagent(issue: LinearWebhookPayload['data']): boolean {\n if (!issue.labels) return false;\n const labelNames = issue.labels.map((l) => l.name.toLowerCase());\n return AUTOMATION_LABELS.some((label) => labelNames.includes(label));\n }\n\n /**\n * Spawn a Claude Code subagent for the issue\n */\n private async spawnSubagentForIssue(\n issue: LinearWebhookPayload['data']\n ): Promise<void> {\n logger.info(`Spawning subagent for ${issue.identifier}`);\n\n try {\n const client = new ClaudeCodeSubagentClient();\n const agentType = this.determineAgentType(issue.labels || []);\n const task = this.buildTaskPrompt(issue);\n const sourceUrl = this.extractSourceUrl(issue.description);\n\n const result = await client.executeSubagent({\n type: agentType,\n task,\n context: {\n linearIssueId: issue.id,\n linearIdentifier: issue.identifier,\n linearUrl: issue.url,\n sourceUrl: sourceUrl || issue.url,\n },\n timeout: 5 * 60 * 1000, // 5 min\n });\n\n logger.info(`Subagent completed for ${issue.identifier}`, {\n sessionId: result.sessionId,\n status: result.status,\n });\n } catch (error) {\n logger.error(`Failed to spawn subagent for ${issue.identifier}`, {\n error: error instanceof Error ? error.message : 'Unknown error',\n });\n }\n }\n\n /**\n * Determine agent type from issue labels\n */\n private determineAgentType(\n labels: Array<{ name: string }>\n ): 'code' | 'review' | 'context' {\n const lowerLabels = labels.map((l) => l.name.toLowerCase());\n\n if (lowerLabels.some((l) => l.includes('review') || l.includes('pr'))) {\n return 'review';\n }\n\n if (\n lowerLabels.some((l) => l.includes('explore') || l.includes('research'))\n ) {\n return 'context';\n }\n\n return 'code';\n }\n\n /**\n * Build task prompt from Linear issue\n */\n private buildTaskPrompt(issue: LinearWebhookPayload['data']): string {\n const parts = [`Linear Issue: ${issue.identifier} - ${issue.title}`];\n\n if (issue.description) {\n const quoteMatch = issue.description.match(/^>\\s*(.+?)(?:\\n\\n|$)/s);\n if (quoteMatch) {\n parts.push('', 'Context:', quoteMatch[1].replace(/^>\\s*/gm, ''));\n } else {\n parts.push('', 'Description:', issue.description);\n }\n }\n\n parts.push('', `URL: ${issue.url}`);\n return parts.join('\\n');\n }\n\n /**\n * Extract source URL from description\n */\n private extractSourceUrl(description?: string): string | undefined {\n if (!description) return undefined;\n const urlMatch = description.match(/\\*\\*Source:\\*\\*\\s*\\[.+?\\]\\((.+?)\\)/);\n return urlMatch?.[1];\n }\n\n /**\n * Handle issue update\n */\n private async handleIssueUpdate(\n payload: LinearWebhookPayload\n ): Promise<void> {\n const issue = payload.data;\n\n // Find local task by Linear ID or identifier\n const tasks = this.taskStore.getActiveTasks();\n const localTask = tasks.find(\n (t) =>\n t.title.includes(issue.identifier) ||\n t.external_refs?.linear === issue.id\n );\n\n if (!localTask) {\n // Task doesn't exist locally, create it\n await this.handleIssueCreate(payload);\n return;\n }\n\n // Update task status\n const newStatus = this.mapLinearStateToLocalStatus(issue.state.type);\n if (newStatus !== localTask.status) {\n this.taskStore.updateTaskStatus(\n localTask.id,\n newStatus,\n `Updated from Linear (${issue.state.name})`\n );\n }\n\n // Update priority if changed\n const newPriority = this.mapLinearPriorityToLocal(issue.priority);\n if (newPriority !== localTask.priority) {\n // Note: Would need to add updateTaskPriority method to taskStore\n logger.info(\n `Priority changed for ${issue.identifier}: ${localTask.priority} -> ${newPriority}`\n );\n }\n\n logger.info(\n `Updated task ${localTask.id} from Linear issue ${issue.identifier}`\n );\n }\n\n /**\n * Handle issue removal\n */\n private async handleIssueRemove(\n payload: LinearWebhookPayload\n ): Promise<void> {\n const issue = payload.data;\n\n // Find and cancel local task\n const tasks = this.taskStore.getActiveTasks();\n const localTask = tasks.find(\n (t) =>\n t.title.includes(issue.identifier) ||\n t.external_refs?.linear === issue.id\n );\n\n if (localTask) {\n this.taskStore.updateTaskStatus(\n localTask.id,\n 'cancelled',\n `Removed in Linear`\n );\n logger.info(\n `Cancelled task ${localTask.id} (Linear issue ${issue.identifier} was removed)`\n );\n }\n }\n\n /**\n * Store Linear mapping for a task\n */\n private async storeLinearMapping(\n taskId: string,\n linearId: string,\n linearIdentifier: string\n ): Promise<void> {\n // This would update the linear-mappings.json file\n // For now, just log it\n logger.info(\n `Mapped task ${taskId} to Linear ${linearIdentifier} (${linearId})`\n );\n }\n\n /**\n * Map Linear priority to local priority\n */\n private mapLinearPriorityToLocal(\n priority?: number\n ): 'urgent' | 'high' | 'medium' | 'low' {\n if (!priority) return 'medium';\n switch (priority) {\n case 0:\n return 'urgent';\n case 1:\n return 'high';\n case 2:\n return 'medium';\n case 3:\n case 4:\n return 'low';\n default:\n return 'medium';\n }\n }\n\n /**\n * Map Linear state to local status\n */\n private mapLinearStateToLocalStatus(\n state: string\n ): 'pending' | 'in_progress' | 'completed' | 'cancelled' | 'blocked' {\n switch (state) {\n case 'backlog':\n case 'unstarted':\n return 'pending';\n case 'started':\n return 'in_progress';\n case 'completed':\n return 'completed';\n case 'cancelled':\n return 'cancelled';\n default:\n return 'pending';\n }\n }\n}\n"],
|
|
5
|
+
"mappings": ";;;;AAKA,SAAS,kBAAkB;AAE3B,SAAS,wBAAwB;AACjC,SAAS,yBAAyB;AAElC,SAAS,kBAAkB,iBAAiB;AAC5C,SAAS,cAAc;AACvB,SAAS,gCAAgC;AAIzC,MAAM,oBAAoB,CAAC,aAAa,eAAe,aAAa;AAEpE,SAAS,OAAO,KAAa,cAA+B;AAC1D,QAAM,QAAQ,QAAQ,IAAI,GAAG;AAC7B,MAAI,UAAU,QAAW;AACvB,QAAI,iBAAiB,OAAW,QAAO;AACvC,UAAM,IAAI;AAAA,MACR,wBAAwB,GAAG;AAAA,MAC3B,UAAU;AAAA,IACZ;AAAA,EACF;AACA,SAAO;AACT;AAEA,SAAS,eAAe,KAAiC;AACvD,SAAO,QAAQ,IAAI,GAAG;AACxB;AA+BO,MAAM,qBAAqB;AAAA,EACxB;AAAA,EACA,aAAsC;AAAA,EACtC;AAAA,EAER,YAAY,WAA8B,eAAuB;AAC/D,SAAK,YAAY;AACjB,SAAK,gBAAgB;AAGrB,QAAI,QAAQ,IAAI,gBAAgB,GAAG;AACjC,YAAM,cAAc,IAAI,kBAAkB;AAC1C,WAAK,aAAa,IAAI,iBAAiB,WAAW,aAAa;AAAA,QAC7D,SAAS;AAAA,QACT,WAAW;AAAA,QACX,UAAU;AAAA,QACV,oBAAoB;AAAA,MACtB,CAAC;AAAA,IACH;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,gBAAgB,SAAiB,WAA4B;AACnE,UAAM,OAAO,WAAW,UAAU,KAAK,aAAa;AACpD,SAAK,OAAO,OAAO;AACnB,UAAM,oBAAoB,KAAK,OAAO,KAAK;AAC3C,WAAO,cAAc;AAAA,EACvB;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,cAAc,KAAc,KAA8B;AAC9D,QAAI;AAEF,YAAM,UAAU,KAAK,UAAU,IAAI,IAAI;AACvC,YAAM,YAAY,IAAI,QAAQ,kBAAkB;AAGhD,UAAI,CAAC,KAAK,gBAAgB,SAAS,SAAS,GAAG;AAC7C,eAAO,MAAM,2BAA2B;AACxC,YAAI,OAAO,GAAG,EAAE,KAAK,EAAE,OAAO,oBAAoB,CAAC;AACnD;AAAA,MACF;AAEA,YAAM,UAAU,IAAI;AAGpB,UAAI,QAAQ,SAAS,SAAS;AAC5B,YAAI,OAAO,GAAG,EAAE,KAAK,EAAE,SAAS,4BAA4B,CAAC;AAC7D;AAAA,MACF;AAGA,cAAQ,QAAQ,QAAQ;AAAA,QACtB,KAAK;AACH,gBAAM,KAAK,kBAAkB,OAAO;AACpC;AAAA,QACF,KAAK;AACH,gBAAM,KAAK,kBAAkB,OAAO;AACpC;AAAA,QACF,KAAK;AACH,gBAAM,KAAK,kBAAkB,OAAO;AACpC;AAAA,QACF;AACE,iBAAO,KAAK,2BAA2B,QAAQ,MAAM,EAAE;AAAA,MAC3D;AAEA,UAAI,OAAO,GAAG,EAAE,KAAK,EAAE,SAAS,iCAAiC,CAAC;AAAA,IACpE,SAAS,OAAgB;AACvB,aAAO,MAAM,8BAA8B,KAAc;AACzD,UAAI,OAAO,GAAG,EAAE,KAAK,EAAE,OAAO,4BAA4B,CAAC;AAAA,IAC7D;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,kBACZ,SACe;AACf,UAAM,QAAQ,QAAQ;AAGtB,UAAM,gBAAgB,KAAK,UAAU,eAAe;AACpD,UAAM,SAAS,cAAc;AAAA,MAC3B,CAAC,MACC,EAAE,MAAM,SAAS,MAAM,UAAU,KACjC,EAAE,eAAe,WAAW,MAAM;AAAA,IACtC;AAEA,QAAI,QAAQ;AACV,aAAO,KAAK,QAAQ,MAAM,UAAU,yBAAyB;AAC7D;AAAA,IACF;AAGA,UAAM,SAAS,KAAK,UAAU,WAAW;AAAA,MACvC,OAAO,IAAI,MAAM,UAAU,KAAK,MAAM,KAAK;AAAA,MAC3C,aAAa,MAAM,eAAe;AAAA,MAClC,UAAU,KAAK,yBAAyB,MAAM,QAAQ;AAAA,MACtD,SAAS;AAAA,MACT,MAAM,MAAM,QAAQ,IAAI,CAAC,MAAM,EAAE,IAAI,KAAK,CAAC,QAAQ;AAAA,MACnD,iBAAiB,MAAM,WAAW,MAAM,WAAW,KAAK;AAAA,MACxD,UAAU,MAAM,UAAU;AAAA,IAC5B,CAAC;AAGD,UAAM,SAAS,KAAK,4BAA4B,MAAM,MAAM,IAAI;AAChE,QAAI,WAAW,WAAW;AACxB,WAAK,UAAU;AAAA,QACb;AAAA,QACA;AAAA,QACA,uBAAuB,MAAM,MAAM,IAAI;AAAA,MACzC;AAAA,IACF;AAGA,UAAM,KAAK,mBAAmB,QAAQ,MAAM,IAAI,MAAM,UAAU;AAEhE,WAAO,KAAK,gBAAgB,MAAM,sBAAsB,MAAM,UAAU,EAAE;AAG1E,QAAI,KAAK,oBAAoB,KAAK,GAAG;AACnC,YAAM,KAAK,sBAAsB,KAAK;AAAA,IACxC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,oBAAoB,OAA8C;AACxE,QAAI,CAAC,MAAM,OAAQ,QAAO;AAC1B,UAAM,aAAa,MAAM,OAAO,IAAI,CAAC,MAAM,EAAE,KAAK,YAAY,CAAC;AAC/D,WAAO,kBAAkB,KAAK,CAAC,UAAU,WAAW,SAAS,KAAK,CAAC;AAAA,EACrE;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,sBACZ,OACe;AACf,WAAO,KAAK,yBAAyB,MAAM,UAAU,EAAE;AAEvD,QAAI;AACF,YAAM,SAAS,IAAI,yBAAyB;AAC5C,YAAM,YAAY,KAAK,mBAAmB,MAAM,UAAU,CAAC,CAAC;AAC5D,YAAM,OAAO,KAAK,gBAAgB,KAAK;AACvC,YAAM,YAAY,KAAK,iBAAiB,MAAM,WAAW;AAEzD,YAAM,SAAS,MAAM,OAAO,gBAAgB;AAAA,QAC1C,MAAM;AAAA,QACN;AAAA,QACA,SAAS;AAAA,UACP,eAAe,MAAM;AAAA,UACrB,kBAAkB,MAAM;AAAA,UACxB,WAAW,MAAM;AAAA,UACjB,WAAW,aAAa,MAAM;AAAA,QAChC;AAAA,QACA,SAAS,IAAI,KAAK;AAAA;AAAA,MACpB,CAAC;AAED,aAAO,KAAK,0BAA0B,MAAM,UAAU,IAAI;AAAA,QACxD,WAAW,OAAO;AAAA,QAClB,QAAQ,OAAO;AAAA,MACjB,CAAC;AAAA,IACH,SAAS,OAAO;AACd,aAAO,MAAM,gCAAgC,MAAM,UAAU,IAAI;AAAA,QAC/D,OAAO,iBAAiB,QAAQ,MAAM,UAAU;AAAA,MAClD,CAAC;AAAA,IACH;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,mBACN,QAC+B;AAC/B,UAAM,cAAc,OAAO,IAAI,CAAC,MAAM,EAAE,KAAK,YAAY,CAAC;AAE1D,QAAI,YAAY,KAAK,CAAC,MAAM,EAAE,SAAS,QAAQ,KAAK,EAAE,SAAS,IAAI,CAAC,GAAG;AACrE,aAAO;AAAA,IACT;AAEA,QACE,YAAY,KAAK,CAAC,MAAM,EAAE,SAAS,SAAS,KAAK,EAAE,SAAS,UAAU,CAAC,GACvE;AACA,aAAO;AAAA,IACT;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,gBAAgB,OAA6C;AACnE,UAAM,QAAQ,CAAC,iBAAiB,MAAM,UAAU,MAAM,MAAM,KAAK,EAAE;AAEnE,QAAI,MAAM,aAAa;AACrB,YAAM,aAAa,MAAM,YAAY,MAAM,uBAAuB;AAClE,UAAI,YAAY;AACd,cAAM,KAAK,IAAI,YAAY,WAAW,CAAC,EAAE,QAAQ,WAAW,EAAE,CAAC;AAAA,MACjE,OAAO;AACL,cAAM,KAAK,IAAI,gBAAgB,MAAM,WAAW;AAAA,MAClD;AAAA,IACF;AAEA,UAAM,KAAK,IAAI,QAAQ,MAAM,GAAG,EAAE;AAClC,WAAO,MAAM,KAAK,IAAI;AAAA,EACxB;AAAA;AAAA;AAAA;AAAA,EAKQ,iBAAiB,aAA0C;AACjE,QAAI,CAAC,YAAa,QAAO;AACzB,UAAM,WAAW,YAAY,MAAM,oCAAoC;AACvE,WAAO,WAAW,CAAC;AAAA,EACrB;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,kBACZ,SACe;AACf,UAAM,QAAQ,QAAQ;AAGtB,UAAM,QAAQ,KAAK,UAAU,eAAe;AAC5C,UAAM,YAAY,MAAM;AAAA,MACtB,CAAC,MACC,EAAE,MAAM,SAAS,MAAM,UAAU,KACjC,EAAE,eAAe,WAAW,MAAM;AAAA,IACtC;AAEA,QAAI,CAAC,WAAW;AAEd,YAAM,KAAK,kBAAkB,OAAO;AACpC;AAAA,IACF;AAGA,UAAM,YAAY,KAAK,4BAA4B,MAAM,MAAM,IAAI;AACnE,QAAI,cAAc,UAAU,QAAQ;AAClC,WAAK,UAAU;AAAA,QACb,UAAU;AAAA,QACV;AAAA,QACA,wBAAwB,MAAM,MAAM,IAAI;AAAA,MAC1C;AAAA,IACF;AAGA,UAAM,cAAc,KAAK,yBAAyB,MAAM,QAAQ;AAChE,QAAI,gBAAgB,UAAU,UAAU;AAEtC,aAAO;AAAA,QACL,wBAAwB,MAAM,UAAU,KAAK,UAAU,QAAQ,OAAO,WAAW;AAAA,MACnF;AAAA,IACF;AAEA,WAAO;AAAA,MACL,gBAAgB,UAAU,EAAE,sBAAsB,MAAM,UAAU;AAAA,IACpE;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,kBACZ,SACe;AACf,UAAM,QAAQ,QAAQ;AAGtB,UAAM,QAAQ,KAAK,UAAU,eAAe;AAC5C,UAAM,YAAY,MAAM;AAAA,MACtB,CAAC,MACC,EAAE,MAAM,SAAS,MAAM,UAAU,KACjC,EAAE,eAAe,WAAW,MAAM;AAAA,IACtC;AAEA,QAAI,WAAW;AACb,WAAK,UAAU;AAAA,QACb,UAAU;AAAA,QACV;AAAA,QACA;AAAA,MACF;AACA,aAAO;AAAA,QACL,kBAAkB,UAAU,EAAE,kBAAkB,MAAM,UAAU;AAAA,MAClE;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,mBACZ,QACA,UACA,kBACe;AAGf,WAAO;AAAA,MACL,eAAe,MAAM,cAAc,gBAAgB,KAAK,QAAQ;AAAA,IAClE;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,yBACN,UACsC;AACtC,QAAI,CAAC,SAAU,QAAO;AACtB,YAAQ,UAAU;AAAA,MAChB,KAAK;AACH,eAAO;AAAA,MACT,KAAK;AACH,eAAO;AAAA,MACT,KAAK;AACH,eAAO;AAAA,MACT,KAAK;AAAA,MACL,KAAK;AACH,eAAO;AAAA,MACT;AACE,eAAO;AAAA,IACX;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,4BACN,OACmE;AACnE,YAAQ,OAAO;AAAA,MACb,KAAK;AAAA,MACL,KAAK;AACH,eAAO;AAAA,MACT,KAAK;AACH,eAAO;AAAA,MACT,KAAK;AACH,eAAO;AAAA,MACT,KAAK;AACH,eAAO;AAAA,MACT;AACE,eAAO;AAAA,IACX;AAAA,EACF;AACF;",
|
|
6
6
|
"names": []
|
|
7
7
|
}
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
import { fileURLToPath as __fileURLToPath } from 'url';
|
|
2
|
+
import { dirname as __pathDirname } from 'path';
|
|
3
|
+
const __filename = __fileURLToPath(import.meta.url);
|
|
4
|
+
const __dirname = __pathDirname(__filename);
|
|
5
|
+
function filterPending(items, filters = {}, now = Date.now()) {
|
|
6
|
+
let out = [...items];
|
|
7
|
+
const { taskContains, olderThanMs, newerThanMs, sort, limit } = filters;
|
|
8
|
+
if (taskContains) {
|
|
9
|
+
const q = taskContains.toLowerCase();
|
|
10
|
+
out = out.filter((it) => (it.task || "").toLowerCase().includes(q));
|
|
11
|
+
}
|
|
12
|
+
if (typeof olderThanMs === "number") {
|
|
13
|
+
out = out.filter(
|
|
14
|
+
(it) => typeof it.createdAt === "number" && now - it.createdAt > olderThanMs
|
|
15
|
+
);
|
|
16
|
+
}
|
|
17
|
+
if (typeof newerThanMs === "number") {
|
|
18
|
+
out = out.filter(
|
|
19
|
+
(it) => typeof it.createdAt === "number" && now - it.createdAt < newerThanMs
|
|
20
|
+
);
|
|
21
|
+
}
|
|
22
|
+
const getTs = (t) => t === null ? Infinity : t;
|
|
23
|
+
if (sort === "asc")
|
|
24
|
+
out.sort((a, b) => getTs(a.createdAt) - getTs(b.createdAt));
|
|
25
|
+
if (sort === "desc")
|
|
26
|
+
out.sort((a, b) => getTs(b.createdAt) - getTs(a.createdAt));
|
|
27
|
+
if (typeof limit === "number") out = out.slice(0, limit);
|
|
28
|
+
return out;
|
|
29
|
+
}
|
|
30
|
+
export {
|
|
31
|
+
filterPending
|
|
32
|
+
};
|
|
33
|
+
//# sourceMappingURL=pending-utils.js.map
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
{
|
|
2
|
+
"version": 3,
|
|
3
|
+
"sources": ["../../../../src/integrations/mcp/pending-utils.ts"],
|
|
4
|
+
"sourcesContent": ["export interface PendingItem {\n approvalId: string;\n task: string;\n createdAt: number | null;\n}\n\nexport interface PendingFilters {\n taskContains?: string;\n olderThanMs?: number;\n newerThanMs?: number;\n sort?: 'asc' | 'desc';\n limit?: number;\n}\n\nexport function filterPending(\n items: PendingItem[],\n filters: PendingFilters = {},\n now: number = Date.now()\n): PendingItem[] {\n let out = [...items];\n const { taskContains, olderThanMs, newerThanMs, sort, limit } = filters;\n\n if (taskContains) {\n const q = taskContains.toLowerCase();\n out = out.filter((it) => (it.task || '').toLowerCase().includes(q));\n }\n if (typeof olderThanMs === 'number') {\n out = out.filter(\n (it) =>\n typeof it.createdAt === 'number' &&\n now - (it.createdAt as number) > olderThanMs\n );\n }\n if (typeof newerThanMs === 'number') {\n out = out.filter(\n (it) =>\n typeof it.createdAt === 'number' &&\n now - (it.createdAt as number) < newerThanMs\n );\n }\n // Treat null timestamps as Infinity (sort last in asc, first in desc)\n const getTs = (t: number | null) => (t === null ? Infinity : t);\n if (sort === 'asc')\n out.sort((a, b) => getTs(a.createdAt) - getTs(b.createdAt));\n if (sort === 'desc')\n out.sort((a, b) => getTs(b.createdAt) - getTs(a.createdAt));\n if (typeof limit === 'number') out = out.slice(0, limit);\n return out;\n}\n"],
|
|
5
|
+
"mappings": ";;;;AAcO,SAAS,cACd,OACA,UAA0B,CAAC,GAC3B,MAAc,KAAK,IAAI,GACR;AACf,MAAI,MAAM,CAAC,GAAG,KAAK;AACnB,QAAM,EAAE,cAAc,aAAa,aAAa,MAAM,MAAM,IAAI;AAEhE,MAAI,cAAc;AAChB,UAAM,IAAI,aAAa,YAAY;AACnC,UAAM,IAAI,OAAO,CAAC,QAAQ,GAAG,QAAQ,IAAI,YAAY,EAAE,SAAS,CAAC,CAAC;AAAA,EACpE;AACA,MAAI,OAAO,gBAAgB,UAAU;AACnC,UAAM,IAAI;AAAA,MACR,CAAC,OACC,OAAO,GAAG,cAAc,YACxB,MAAO,GAAG,YAAuB;AAAA,IACrC;AAAA,EACF;AACA,MAAI,OAAO,gBAAgB,UAAU;AACnC,UAAM,IAAI;AAAA,MACR,CAAC,OACC,OAAO,GAAG,cAAc,YACxB,MAAO,GAAG,YAAuB;AAAA,IACrC;AAAA,EACF;AAEA,QAAM,QAAQ,CAAC,MAAsB,MAAM,OAAO,WAAW;AAC7D,MAAI,SAAS;AACX,QAAI,KAAK,CAAC,GAAG,MAAM,MAAM,EAAE,SAAS,IAAI,MAAM,EAAE,SAAS,CAAC;AAC5D,MAAI,SAAS;AACX,QAAI,KAAK,CAAC,GAAG,MAAM,MAAM,EAAE,SAAS,IAAI,MAAM,EAAE,SAAS,CAAC;AAC5D,MAAI,OAAO,UAAU,SAAU,OAAM,IAAI,MAAM,GAAG,KAAK;AACvD,SAAO;AACT;",
|
|
6
|
+
"names": []
|
|
7
|
+
}
|