@stackmemoryai/stackmemory 0.3.7 → 0.3.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (202) hide show
  1. package/dist/agents/core/agent-task-manager.js +5 -5
  2. package/dist/agents/core/agent-task-manager.js.map +2 -2
  3. package/dist/agents/verifiers/base-verifier.js +2 -2
  4. package/dist/agents/verifiers/base-verifier.js.map +2 -2
  5. package/dist/cli/claude-sm.js +0 -11
  6. package/dist/cli/claude-sm.js.map +2 -2
  7. package/dist/cli/codex-sm.js +0 -11
  8. package/dist/cli/codex-sm.js.map +2 -2
  9. package/dist/cli/commands/chromadb.js +64 -34
  10. package/dist/cli/commands/chromadb.js.map +2 -2
  11. package/dist/cli/commands/clear.js +9 -13
  12. package/dist/cli/commands/clear.js.map +2 -2
  13. package/dist/cli/commands/config.js +43 -33
  14. package/dist/cli/commands/config.js.map +2 -2
  15. package/dist/cli/commands/context.js.map +2 -2
  16. package/dist/cli/commands/dashboard.js +41 -13
  17. package/dist/cli/commands/dashboard.js.map +2 -2
  18. package/dist/cli/commands/gc.js +69 -20
  19. package/dist/cli/commands/gc.js.map +2 -2
  20. package/dist/cli/commands/handoff.js.map +2 -2
  21. package/dist/cli/commands/infinite-storage.js +60 -19
  22. package/dist/cli/commands/infinite-storage.js.map +2 -2
  23. package/dist/cli/commands/linear-create.js +36 -8
  24. package/dist/cli/commands/linear-create.js.map +2 -2
  25. package/dist/cli/commands/linear-list.js +33 -10
  26. package/dist/cli/commands/linear-list.js.map +2 -2
  27. package/dist/cli/commands/linear-migrate.js +17 -4
  28. package/dist/cli/commands/linear-migrate.js.map +2 -2
  29. package/dist/cli/commands/linear-test.js +14 -6
  30. package/dist/cli/commands/linear-test.js.map +2 -2
  31. package/dist/cli/commands/linear-unified.js +123 -35
  32. package/dist/cli/commands/linear-unified.js.map +2 -2
  33. package/dist/cli/commands/linear.js.map +2 -2
  34. package/dist/cli/commands/monitor.js.map +2 -2
  35. package/dist/cli/commands/onboard.js +35 -8
  36. package/dist/cli/commands/onboard.js.map +2 -2
  37. package/dist/cli/commands/quality.js +2 -7
  38. package/dist/cli/commands/quality.js.map +2 -2
  39. package/dist/cli/commands/session.js +23 -6
  40. package/dist/cli/commands/session.js.map +2 -2
  41. package/dist/cli/commands/skills.js +72 -27
  42. package/dist/cli/commands/skills.js.map +2 -2
  43. package/dist/cli/commands/storage.js +108 -38
  44. package/dist/cli/commands/storage.js.map +2 -2
  45. package/dist/cli/commands/tui.js.map +2 -2
  46. package/dist/cli/commands/webhook.js +57 -18
  47. package/dist/cli/commands/webhook.js.map +2 -2
  48. package/dist/cli/commands/workflow.js +8 -15
  49. package/dist/cli/commands/workflow.js.map +2 -2
  50. package/dist/cli/commands/worktree.js +34 -13
  51. package/dist/cli/commands/worktree.js.map +2 -2
  52. package/dist/cli/index.js +0 -11
  53. package/dist/cli/index.js.map +2 -2
  54. package/dist/core/config/types.js.map +1 -1
  55. package/dist/core/context/auto-context.js +10 -6
  56. package/dist/core/context/auto-context.js.map +2 -2
  57. package/dist/core/context/context-bridge.js.map +2 -2
  58. package/dist/core/context/frame-database.js +13 -3
  59. package/dist/core/context/frame-database.js.map +2 -2
  60. package/dist/core/context/frame-digest.js +7 -5
  61. package/dist/core/context/frame-digest.js.map +2 -2
  62. package/dist/core/context/frame-manager.js.map +2 -2
  63. package/dist/core/context/frame-stack.js +16 -5
  64. package/dist/core/context/frame-stack.js.map +2 -2
  65. package/dist/core/context/incremental-gc.js +10 -3
  66. package/dist/core/context/incremental-gc.js.map +2 -2
  67. package/dist/core/context/index.js.map +1 -1
  68. package/dist/core/context/permission-manager.js.map +2 -2
  69. package/dist/core/context/recursive-context-manager.js +582 -0
  70. package/dist/core/context/recursive-context-manager.js.map +7 -0
  71. package/dist/core/context/refactored-frame-manager.js +12 -3
  72. package/dist/core/context/refactored-frame-manager.js.map +2 -2
  73. package/dist/core/context/shared-context-layer.js +4 -2
  74. package/dist/core/context/shared-context-layer.js.map +2 -2
  75. package/dist/core/database/batch-operations.js +112 -86
  76. package/dist/core/database/batch-operations.js.map +2 -2
  77. package/dist/core/database/query-cache.js +19 -9
  78. package/dist/core/database/query-cache.js.map +2 -2
  79. package/dist/core/database/sqlite-adapter.js +1 -1
  80. package/dist/core/database/sqlite-adapter.js.map +2 -2
  81. package/dist/core/digest/enhanced-hybrid-digest.js +8 -2
  82. package/dist/core/digest/enhanced-hybrid-digest.js.map +2 -2
  83. package/dist/core/errors/recovery.js +9 -2
  84. package/dist/core/errors/recovery.js.map +2 -2
  85. package/dist/core/execution/parallel-executor.js +254 -0
  86. package/dist/core/execution/parallel-executor.js.map +7 -0
  87. package/dist/core/frame/workflow-templates-stub.js.map +1 -1
  88. package/dist/core/frame/workflow-templates.js +40 -1
  89. package/dist/core/frame/workflow-templates.js.map +2 -2
  90. package/dist/core/monitoring/logger.js +6 -1
  91. package/dist/core/monitoring/logger.js.map +2 -2
  92. package/dist/core/monitoring/metrics.js.map +2 -2
  93. package/dist/core/monitoring/progress-tracker.js.map +2 -2
  94. package/dist/core/performance/context-cache.js.map +2 -2
  95. package/dist/core/performance/lazy-context-loader.js +24 -20
  96. package/dist/core/performance/lazy-context-loader.js.map +2 -2
  97. package/dist/core/performance/optimized-frame-context.js +27 -12
  98. package/dist/core/performance/optimized-frame-context.js.map +2 -2
  99. package/dist/core/performance/performance-benchmark.js +10 -6
  100. package/dist/core/performance/performance-benchmark.js.map +2 -2
  101. package/dist/core/performance/performance-profiler.js +51 -14
  102. package/dist/core/performance/performance-profiler.js.map +2 -2
  103. package/dist/core/performance/streaming-jsonl-parser.js +5 -1
  104. package/dist/core/performance/streaming-jsonl-parser.js.map +2 -2
  105. package/dist/core/projects/project-manager.js +14 -20
  106. package/dist/core/projects/project-manager.js.map +2 -2
  107. package/dist/core/retrieval/context-retriever.js.map +1 -1
  108. package/dist/core/retrieval/llm-context-retrieval.js.map +2 -2
  109. package/dist/core/session/clear-survival-stub.js +5 -1
  110. package/dist/core/session/clear-survival-stub.js.map +2 -2
  111. package/dist/core/session/clear-survival.js +35 -0
  112. package/dist/core/session/clear-survival.js.map +2 -2
  113. package/dist/core/session/index.js.map +1 -1
  114. package/dist/core/session/session-manager.js.map +2 -2
  115. package/dist/core/storage/chromadb-adapter.js +6 -2
  116. package/dist/core/storage/chromadb-adapter.js.map +2 -2
  117. package/dist/core/storage/chromadb-simple.js +17 -5
  118. package/dist/core/storage/chromadb-simple.js.map +2 -2
  119. package/dist/core/storage/infinite-storage.js +109 -46
  120. package/dist/core/storage/infinite-storage.js.map +2 -2
  121. package/dist/core/storage/railway-optimized-storage.js +48 -22
  122. package/dist/core/storage/railway-optimized-storage.js.map +2 -2
  123. package/dist/core/storage/remote-storage.js +41 -23
  124. package/dist/core/storage/remote-storage.js.map +2 -2
  125. package/dist/core/trace/cli-trace-wrapper.js +9 -2
  126. package/dist/core/trace/cli-trace-wrapper.js.map +2 -2
  127. package/dist/core/trace/db-trace-wrapper.js +96 -68
  128. package/dist/core/trace/db-trace-wrapper.js.map +2 -2
  129. package/dist/core/trace/debug-trace.js +25 -8
  130. package/dist/core/trace/debug-trace.js.map +2 -2
  131. package/dist/core/trace/index.js +6 -2
  132. package/dist/core/trace/index.js.map +2 -2
  133. package/dist/core/trace/linear-api-wrapper.js +10 -5
  134. package/dist/core/trace/linear-api-wrapper.js.map +2 -2
  135. package/dist/core/trace/trace-demo.js +14 -10
  136. package/dist/core/trace/trace-demo.js.map +2 -2
  137. package/dist/core/trace/trace-detector.js +9 -2
  138. package/dist/core/trace/trace-detector.js.map +2 -2
  139. package/dist/core/trace/types.js.map +1 -1
  140. package/dist/core/utils/compression.js.map +1 -1
  141. package/dist/core/utils/update-checker.js.map +1 -1
  142. package/dist/core/worktree/worktree-manager.js +18 -7
  143. package/dist/core/worktree/worktree-manager.js.map +2 -2
  144. package/dist/features/analytics/core/analytics-service.js.map +2 -2
  145. package/dist/features/analytics/queries/metrics-queries.js +1 -1
  146. package/dist/features/analytics/queries/metrics-queries.js.map +2 -2
  147. package/dist/features/tasks/pebbles-task-store.js.map +1 -1
  148. package/dist/features/tui/components/analytics-panel.js +36 -15
  149. package/dist/features/tui/components/analytics-panel.js.map +2 -2
  150. package/dist/features/tui/components/pr-tracker.js +19 -7
  151. package/dist/features/tui/components/pr-tracker.js.map +2 -2
  152. package/dist/features/tui/components/session-monitor.js +22 -9
  153. package/dist/features/tui/components/session-monitor.js.map +2 -2
  154. package/dist/features/tui/components/subagent-fleet.js +20 -13
  155. package/dist/features/tui/components/subagent-fleet.js.map +2 -2
  156. package/dist/features/tui/components/task-board.js +26 -10
  157. package/dist/features/tui/components/task-board.js.map +2 -2
  158. package/dist/features/tui/index.js.map +2 -2
  159. package/dist/features/tui/services/data-service.js +6 -2
  160. package/dist/features/tui/services/data-service.js.map +2 -2
  161. package/dist/features/tui/services/linear-task-reader.js +3 -1
  162. package/dist/features/tui/services/linear-task-reader.js.map +2 -2
  163. package/dist/features/tui/services/websocket-client.js +3 -1
  164. package/dist/features/tui/services/websocket-client.js.map +2 -2
  165. package/dist/features/tui/terminal-compat.js +6 -2
  166. package/dist/features/tui/terminal-compat.js.map +2 -2
  167. package/dist/features/web/client/stores/task-store.js.map +2 -2
  168. package/dist/features/web/server/index.js +18 -10
  169. package/dist/features/web/server/index.js.map +2 -2
  170. package/dist/integrations/anthropic/client.js +259 -0
  171. package/dist/integrations/anthropic/client.js.map +7 -0
  172. package/dist/integrations/claude-code/subagent-client.js +404 -0
  173. package/dist/integrations/claude-code/subagent-client.js.map +7 -0
  174. package/dist/integrations/linear/sync-service.js +12 -13
  175. package/dist/integrations/linear/sync-service.js.map +2 -2
  176. package/dist/integrations/linear/sync.js +174 -12
  177. package/dist/integrations/linear/sync.js.map +2 -2
  178. package/dist/integrations/linear/unified-sync.js +1 -1
  179. package/dist/integrations/linear/unified-sync.js.map +1 -1
  180. package/dist/integrations/linear/webhook-server.js +15 -16
  181. package/dist/integrations/linear/webhook-server.js.map +2 -2
  182. package/dist/mcp/stackmemory-mcp-server.js +0 -11
  183. package/dist/mcp/stackmemory-mcp-server.js.map +2 -2
  184. package/dist/servers/production/auth-middleware.js.map +2 -2
  185. package/dist/servers/railway/index.js.map +2 -2
  186. package/dist/services/config-service.js +6 -7
  187. package/dist/services/config-service.js.map +2 -2
  188. package/dist/services/context-service.js +11 -12
  189. package/dist/services/context-service.js.map +2 -2
  190. package/dist/skills/claude-skills.js +101 -2
  191. package/dist/skills/claude-skills.js.map +2 -2
  192. package/dist/skills/dashboard-launcher.js.map +2 -2
  193. package/dist/skills/recursive-agent-orchestrator.js +559 -0
  194. package/dist/skills/recursive-agent-orchestrator.js.map +7 -0
  195. package/dist/skills/repo-ingestion-skill.js.map +2 -2
  196. package/dist/skills/security-secrets-scanner.js +265 -0
  197. package/dist/skills/security-secrets-scanner.js.map +7 -0
  198. package/dist/utils/env.js +46 -0
  199. package/dist/utils/env.js.map +7 -0
  200. package/dist/utils/logger.js +0 -11
  201. package/dist/utils/logger.js.map +2 -2
  202. package/package.json +1 -1
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../../src/core/storage/railway-optimized-storage.ts"],
4
- "sourcesContent": ["/**\n * Railway-Optimized 3-Tier Storage System\n * Tier 1: Redis (Hot) - Last 24 hours, instant access\n * Tier 2: Railway Buckets (Warm) - 1-30 days, S3-compatible\n * Tier 3: GCS (Cold) - 30+ days, cost-effective archive\n */\n\nimport { createClient, RedisClientType } from 'redis';\nimport { S3Client, PutObjectCommand, GetObjectCommand, DeleteObjectCommand, HeadObjectCommand } from '@aws-sdk/client-s3';\nimport { Storage } from '@google-cloud/storage';\nimport Database from 'better-sqlite3';\nimport { logger } from '../monitoring/logger.js';\nimport { Trace, CompressedTrace, ToolCall } from '../trace/types.js';\nimport { ConfigManager } from '../config/config-manager.js';\nimport * as zlib from 'zlib';\nimport { promisify } from 'util';\n// Type-safe environment variable access\nfunction getEnv(key: string, defaultValue?: string): string {\n const value = process.env[key];\n if (value === undefined) {\n if (defaultValue !== undefined) return defaultValue;\n throw new Error(`Environment variable ${key} is required`);\n }\n return value;\n}\n\nfunction getOptionalEnv(key: string): string | undefined {\n return process.env[key];\n}\n\n\nconst gzipAsync = promisify(zlib.gzip);\nconst gunzipAsync = promisify(zlib.gunzip);\n\nexport enum StorageTier {\n HOT = 'hot', // Redis: < 24 hours\n WARM = 'warm', // Railway Buckets: 1-30 days \n COLD = 'cold' // GCS: 30+ days\n}\n\nexport interface RailwayStorageConfig {\n redis: {\n url: string;\n ttlSeconds: number;\n maxMemoryMb: number;\n };\n railwayBuckets: {\n endpoint: string;\n bucket: string;\n accessKeyId: string;\n secretAccessKey: string;\n region: string;\n };\n gcs: {\n bucketName: string;\n projectId: string;\n keyFilename?: string;\n };\n tiers: {\n hotHours: number; // Hours to keep in Redis\n warmDays: number; // Days to keep in Railway Buckets\n compressionScore: number; // Score threshold for early compression\n };\n}\n\nexport const DEFAULT_RAILWAY_CONFIG: RailwayStorageConfig = {\n redis: {\n url: process.env['REDIS_URL'] || 'redis://localhost:6379',\n ttlSeconds: 86400, // 24 hours\n maxMemoryMb: 100, // 100MB Redis limit\n },\n railwayBuckets: {\n endpoint: process.env['RAILWAY_BUCKET_ENDPOINT'] || 'https://buckets.railway.app',\n bucket: process.env['RAILWAY_BUCKET_NAME'] || 'stackmemory-warm',\n accessKeyId: process.env['RAILWAY_BUCKET_ACCESS_KEY'] || '',\n secretAccessKey: process.env['RAILWAY_BUCKET_SECRET_KEY'] || '',\n region: 'us-east-1',\n },\n gcs: {\n bucketName: process.env['GCS_BUCKET'] || 'stackmemory-cold',\n projectId: process.env['GCP_PROJECT_ID'] || 'stackmemory',\n keyFilename: process.env['GCP_KEY_FILE'],\n },\n tiers: {\n hotHours: 24,\n warmDays: 30,\n compressionScore: 0.4,\n }\n};\n\ninterface StorageMetrics {\n tier: StorageTier;\n originalSize: number;\n compressedSize: number;\n compressionRatio: number;\n accessCount: number;\n lastAccessed: number;\n migrationTime?: number;\n}\n\n/**\n * Railway-optimized storage manager with 3-tier architecture\n */\nexport class RailwayOptimizedStorage {\n private redisClient?: RedisClientType;\n private railwayS3?: S3Client;\n private gcsStorage?: Storage;\n private localDb: Database.Database;\n private config: RailwayStorageConfig;\n private configManager: ConfigManager;\n private metricsCache: Map<string, StorageMetrics> = new Map();\n \n private initialized: Promise<void>;\n \n constructor(\n localDb: Database.Database,\n configManager: ConfigManager,\n config?: Partial<RailwayStorageConfig>\n ) {\n this.localDb = localDb;\n this.configManager = configManager;\n this.config = { ...DEFAULT_RAILWAY_CONFIG, ...config };\n \n this.initializeSchema();\n this.initialized = this.initializeClients();\n }\n \n /**\n * Initialize storage clients\n */\n private async initializeClients(): Promise<void> {\n // Initialize Redis\n if (this.config.redis.url) {\n try {\n this.redisClient = createClient({ url: this.config.redis.url });\n \n this.redisClient.on('error', (err) => {\n logger.error('Redis client error', err);\n });\n \n await this.redisClient.connect();\n \n // Configure Redis memory policy\n await this.redisClient.configSet('maxmemory-policy', 'allkeys-lru');\n \n logger.info('Redis connected for hot tier storage');\n } catch (error: unknown) {\n logger.warn('Redis connection failed, falling back to SQLite only', error);\n }\n }\n \n // Initialize Railway S3-compatible buckets\n if (this.config.railwayBuckets.accessKeyId) {\n this.railwayS3 = new S3Client({\n endpoint: this.config.railwayBuckets.endpoint,\n region: this.config.railwayBuckets.region,\n credentials: {\n accessKeyId: this.config.railwayBuckets.accessKeyId,\n secretAccessKey: this.config.railwayBuckets.secretAccessKey,\n },\n forcePathStyle: true, // Required for Railway buckets\n });\n \n logger.info('Railway Buckets configured for warm tier');\n }\n \n // Initialize GCS for cold storage\n if (this.config.gcs.projectId) {\n try {\n this.gcsStorage = new Storage({\n projectId: this.config.gcs.projectId,\n keyFilename: this.config.gcs.keyFilename,\n });\n \n logger.info('GCS configured for cold tier storage');\n } catch (error: unknown) {\n logger.warn('GCS setup failed, will use Railway buckets only', error);\n }\n }\n }\n \n /**\n * Initialize database schema for tracking\n */\n private initializeSchema(): void {\n this.localDb.exec(`\n CREATE TABLE IF NOT EXISTS storage_tiers (\n trace_id TEXT PRIMARY KEY,\n tier TEXT NOT NULL,\n location TEXT NOT NULL,\n original_size INTEGER,\n compressed_size INTEGER,\n compression_ratio REAL,\n access_count INTEGER DEFAULT 0,\n last_accessed INTEGER,\n created_at INTEGER,\n migrated_at INTEGER,\n score REAL,\n FOREIGN KEY (trace_id) REFERENCES traces(id) ON DELETE CASCADE\n )\n `);\n \n this.localDb.exec(`\n CREATE INDEX IF NOT EXISTS idx_storage_tier ON storage_tiers(tier);\n CREATE INDEX IF NOT EXISTS idx_storage_created ON storage_tiers(created_at);\n CREATE INDEX IF NOT EXISTS idx_storage_accessed ON storage_tiers(last_accessed);\n `);\n }\n \n /**\n * Store a trace in the appropriate tier\n */\n async storeTrace(trace: Trace): Promise<StorageTier> {\n // Ensure clients are initialized\n await this.initialized;\n \n const score = trace.score;\n const age = Date.now() - trace.metadata.startTime;\n const ageHours = age / (1000 * 60 * 60);\n \n // Determine tier based on age and score\n let tier: StorageTier;\n if (ageHours < this.config.tiers.hotHours && score > this.config.tiers.compressionScore) {\n tier = StorageTier.HOT;\n } else if (ageHours < this.config.tiers.warmDays * 24) {\n tier = StorageTier.WARM;\n } else {\n tier = StorageTier.COLD;\n }\n \n // Store in appropriate tier\n switch (tier) {\n case StorageTier.HOT:\n await this.storeInRedis(trace);\n break;\n case StorageTier.WARM:\n await this.storeInRailwayBuckets(trace);\n break;\n case StorageTier.COLD:\n await this.storeInGCS(trace);\n break;\n }\n \n // Track in database\n this.trackStorage(trace.id, tier, trace);\n \n return tier;\n }\n \n /**\n * Store trace in Redis (hot tier)\n */\n private async storeInRedis(trace: Trace): Promise<void> {\n if (!this.redisClient) {\n // Fallback to local SQLite if Redis unavailable\n return;\n }\n \n try {\n const key = `trace:${trace.id}`;\n const data = JSON.stringify(trace);\n \n // Compress if large\n let storedData: string;\n if (data.length > 10000) {\n const compressed = await gzipAsync(data);\n storedData = compressed.toString('base64');\n await this.redisClient.hSet(key, {\n data: storedData,\n compressed: 'true',\n score: trace.score.toString(),\n type: trace.type,\n timestamp: trace.metadata.startTime.toString(),\n });\n } else {\n storedData = data;\n await this.redisClient.hSet(key, {\n data: storedData,\n compressed: 'false',\n score: trace.score.toString(),\n type: trace.type,\n timestamp: trace.metadata.startTime.toString(),\n });\n }\n \n // Set TTL\n await this.redisClient.expire(key, this.config.redis.ttlSeconds);\n \n // Add to sorted set for efficient retrieval\n await this.redisClient.zAdd('traces:by_score', {\n score: trace.score,\n value: trace.id,\n });\n \n await this.redisClient.zAdd('traces:by_time', {\n score: trace.metadata.startTime,\n value: trace.id,\n });\n \n logger.debug('Stored trace in Redis', { \n traceId: trace.id, \n size: data.length,\n compressed: data.length > 10000,\n });\n \n } catch (error: unknown) {\n logger.error('Failed to store in Redis', error);\n throw error;\n }\n }\n \n /**\n * Store trace in Railway Buckets (warm tier)\n */\n private async storeInRailwayBuckets(trace: Trace): Promise<void> {\n if (!this.railwayS3) {\n throw new Error('Railway Buckets not configured');\n }\n \n try {\n // Compress trace\n const data = JSON.stringify(trace);\n const compressed = await gzipAsync(data);\n \n // Generate key with date partitioning\n const date = new Date(trace.metadata.startTime);\n const key = `traces/${date.getFullYear()}/${String(date.getMonth() + 1).padStart(2, '0')}/${String(date.getDate()).padStart(2, '0')}/${trace.id}.json.gz`;\n \n // Upload to Railway Bucket\n const command = new PutObjectCommand({\n Bucket: this.config.railwayBuckets.bucket,\n Key: key,\n Body: compressed,\n ContentType: 'application/gzip',\n Metadata: {\n 'trace-id': trace.id,\n 'trace-type': trace.type,\n 'trace-score': trace.score.toString(),\n 'original-size': data.length.toString(),\n 'compressed-size': compressed.length.toString(),\n },\n });\n \n await this.railwayS3.send(command);\n \n // Remove from Redis if exists\n if (this.redisClient) {\n await this.redisClient.del(`trace:${trace.id}`);\n }\n \n logger.info('Stored trace in Railway Buckets', {\n traceId: trace.id,\n key,\n originalSize: data.length,\n compressedSize: compressed.length,\n compressionRatio: (1 - compressed.length / data.length).toFixed(2),\n });\n \n } catch (error: unknown) {\n logger.error('Failed to store in Railway Buckets', error);\n throw error;\n }\n }\n \n /**\n * Store trace in GCS (cold tier)\n */\n private async storeInGCS(trace: Trace): Promise<void> {\n if (!this.gcsStorage) {\n // Fallback to Railway Buckets if GCS not available\n return this.storeInRailwayBuckets(trace);\n }\n \n try {\n // Heavy compression for cold storage\n const minimal = this.createMinimalTrace(trace);\n const data = JSON.stringify(minimal);\n const compressed = await gzipAsync(data);\n \n // Generate key with year/month partitioning\n const date = new Date(trace.metadata.startTime);\n const key = `archive/${date.getFullYear()}/${String(date.getMonth() + 1).padStart(2, '0')}/${trace.id}.json.gz`;\n \n // Upload to GCS with Coldline storage class\n const bucket = this.gcsStorage.bucket(this.config.gcs.bucketName);\n const file = bucket.file(key);\n \n await file.save(compressed, {\n metadata: {\n contentType: 'application/gzip',\n metadata: {\n traceId: trace.id,\n traceType: trace.type,\n score: trace.score.toString(),\n originalTools: trace.tools.length.toString(),\n },\n },\n storageClass: 'COLDLINE', // Use Coldline for cost optimization\n });\n \n // Remove from warm tier if exists\n if (this.railwayS3) {\n try {\n const warmKey = this.getWarmTierKey(trace);\n await this.railwayS3.send(new DeleteObjectCommand({\n Bucket: this.config.railwayBuckets.bucket,\n Key: warmKey,\n }));\n } catch (error: unknown) {\n // Ignore deletion errors\n }\n }\n \n logger.info('Archived trace to GCS', {\n traceId: trace.id,\n key,\n originalSize: JSON.stringify(trace).length,\n compressedSize: compressed.length,\n });\n \n } catch (error: unknown) {\n logger.error('Failed to store in GCS', error);\n throw error;\n }\n }\n \n /**\n * Create minimal trace for cold storage\n */\n private createMinimalTrace(trace: Trace): any {\n // Keep only essential information\n return {\n id: trace.id,\n type: trace.type,\n score: trace.score,\n summary: trace.summary,\n metadata: {\n startTime: trace.metadata.startTime,\n endTime: trace.metadata.endTime,\n filesModified: trace.metadata.filesModified.length,\n errorsCount: trace.metadata.errorsEncountered.length,\n decisionsCount: trace.metadata.decisionsRecorded.length,\n causalChain: trace.metadata.causalChain,\n },\n toolSummary: {\n count: trace.tools.length,\n types: [...new Set(trace.tools.map((t: any) => t.tool))],\n firstTool: trace.tools[0]?.tool,\n lastTool: trace.tools[trace.tools.length - 1]?.tool,\n },\n compressed: trace.compressed,\n };\n }\n \n /**\n * Retrieve a trace from any tier\n */\n async retrieveTrace(traceId: string): Promise<Trace | null> {\n // Ensure clients are initialized\n await this.initialized;\n \n // Check tier location\n const location = this.localDb.prepare(\n 'SELECT tier, location FROM storage_tiers WHERE trace_id = ?'\n ).get(traceId) as any;\n \n if (!location) {\n return null;\n }\n \n // Update access metrics\n this.localDb.prepare(\n 'UPDATE storage_tiers SET access_count = access_count + 1, last_accessed = ? WHERE trace_id = ?'\n ).run(Date.now(), traceId);\n \n // Retrieve based on tier\n switch (location.tier) {\n case StorageTier.HOT:\n return this.retrieveFromRedis(traceId);\n case StorageTier.WARM:\n return this.retrieveFromRailwayBuckets(traceId, location.location);\n case StorageTier.COLD:\n return this.retrieveFromGCS(traceId, location.location);\n default:\n return null;\n }\n }\n \n /**\n * Retrieve from Redis\n */\n private async retrieveFromRedis(traceId: string): Promise<Trace | null> {\n if (!this.redisClient) return null;\n \n try {\n const key = `trace:${traceId}`;\n const data = await this.redisClient.hGetAll(key);\n \n if (!data || !data.data) return null;\n \n let traceData: string;\n if (data.compressed === 'true') {\n const compressed = Buffer.from(data.data, 'base64');\n const decompressed = await gunzipAsync(compressed);\n traceData = decompressed.toString();\n } else {\n traceData = data.data;\n }\n \n return JSON.parse(traceData);\n \n } catch (error: unknown) {\n logger.error('Failed to retrieve from Redis', error);\n return null;\n }\n }\n \n /**\n * Retrieve from Railway Buckets\n */\n private async retrieveFromRailwayBuckets(traceId: string, key: string): Promise<Trace | null> {\n if (!this.railwayS3) return null;\n \n try {\n const command = new GetObjectCommand({\n Bucket: this.config.railwayBuckets.bucket,\n Key: key,\n });\n \n const response = await this.railwayS3.send(command);\n const compressed = await response.Body?.transformToByteArray();\n \n if (!compressed) return null;\n \n const decompressed = await gunzipAsync(Buffer.from(compressed));\n return JSON.parse(decompressed.toString());\n \n } catch (error: unknown) {\n logger.error('Failed to retrieve from Railway Buckets', error);\n return null;\n }\n }\n \n /**\n * Retrieve from GCS\n */\n private async retrieveFromGCS(traceId: string, key: string): Promise<Trace | null> {\n if (!this.gcsStorage) return null;\n \n try {\n const bucket = this.gcsStorage.bucket(this.config.gcs.bucketName);\n const file = bucket.file(key);\n \n const [compressed] = await file.download();\n const decompressed = await gunzipAsync(compressed);\n \n // Note: Returns minimal trace from cold storage\n return JSON.parse(decompressed.toString());\n \n } catch (error: unknown) {\n logger.error('Failed to retrieve from GCS', error);\n return null;\n }\n }\n \n /**\n * Track storage in database\n */\n private trackStorage(traceId: string, tier: StorageTier, trace: Trace): void {\n const originalSize = JSON.stringify(trace).length;\n const compressedSize = Math.floor(originalSize * 0.3); // Estimate\n \n this.localDb.prepare(`\n INSERT OR REPLACE INTO storage_tiers (\n trace_id, tier, location, original_size, compressed_size,\n compression_ratio, access_count, last_accessed, created_at,\n migrated_at, score\n ) VALUES (?, ?, ?, ?, ?, ?, 0, ?, ?, ?, ?)\n `).run(\n traceId,\n tier,\n this.getStorageLocation(trace, tier),\n originalSize,\n compressedSize,\n 1 - compressedSize / originalSize,\n Date.now(),\n trace.metadata.startTime,\n Date.now(),\n trace.score\n );\n }\n \n /**\n * Get storage location key\n */\n private getStorageLocation(trace: Trace, tier: StorageTier): string {\n const date = new Date(trace.metadata.startTime);\n const year = date.getFullYear();\n const month = String(date.getMonth() + 1).padStart(2, '0');\n const day = String(date.getDate()).padStart(2, '0');\n \n switch (tier) {\n case StorageTier.HOT:\n return `redis:trace:${trace.id}`;\n case StorageTier.WARM:\n return `traces/${year}/${month}/${day}/${trace.id}.json.gz`;\n case StorageTier.COLD:\n return `archive/${year}/${month}/${trace.id}.json.gz`;\n }\n }\n \n /**\n * Get warm tier key for a trace\n */\n private getWarmTierKey(trace: Trace): string {\n const date = new Date(trace.metadata.startTime);\n return `traces/${date.getFullYear()}/${String(date.getMonth() + 1).padStart(2, '0')}/${String(date.getDate()).padStart(2, '0')}/${trace.id}.json.gz`;\n }\n \n /**\n * Migrate traces between tiers based on age\n */\n async migrateTiers(): Promise<{\n hotToWarm: number;\n warmToCold: number;\n errors: string[];\n }> {\n const results = {\n hotToWarm: 0,\n warmToCold: 0,\n errors: [] as string[],\n };\n \n const now = Date.now();\n \n // Find traces to migrate\n const candidates = this.localDb.prepare(`\n SELECT trace_id, tier, created_at, score\n FROM storage_tiers\n WHERE tier != 'cold'\n ORDER BY created_at ASC\n `).all() as any[];\n \n for (const candidate of candidates) {\n const ageHours = (now - candidate.created_at) / (1000 * 60 * 60);\n \n try {\n if (candidate.tier === StorageTier.HOT && ageHours > this.config.tiers.hotHours) {\n // Migrate hot \u2192 warm\n const trace = await this.retrieveTrace(candidate.trace_id);\n if (trace) {\n await this.storeInRailwayBuckets(trace);\n this.trackStorage(candidate.trace_id, StorageTier.WARM, trace);\n results.hotToWarm++;\n }\n } else if (candidate.tier === StorageTier.WARM && ageHours > this.config.tiers.warmDays * 24) {\n // Migrate warm \u2192 cold\n const trace = await this.retrieveTrace(candidate.trace_id);\n if (trace) {\n await this.storeInGCS(trace);\n this.trackStorage(candidate.trace_id, StorageTier.COLD, trace);\n results.warmToCold++;\n }\n }\n } catch (error: unknown) {\n results.errors.push(`Failed to migrate ${candidate.trace_id}: ${error}`);\n }\n }\n \n logger.info('Tier migration completed', results);\n return results;\n }\n \n /**\n * Get storage statistics\n */\n getStorageStats(): any {\n const tierStats = this.localDb.prepare(`\n SELECT \n tier,\n COUNT(*) as count,\n SUM(original_size) as total_original,\n SUM(compressed_size) as total_compressed,\n AVG(compression_ratio) as avg_compression,\n AVG(access_count) as avg_access\n FROM storage_tiers\n GROUP BY tier\n `).all();\n \n const ageDistribution = this.localDb.prepare(`\n SELECT \n CASE \n WHEN (? - created_at) / 3600000 < 24 THEN '< 24h'\n WHEN (? - created_at) / 86400000 < 7 THEN '1-7d'\n WHEN (? - created_at) / 86400000 < 30 THEN '7-30d'\n ELSE '30d+'\n END as age_group,\n COUNT(*) as count\n FROM storage_tiers\n GROUP BY age_group\n `).all(Date.now(), Date.now(), Date.now());\n \n return {\n byTier: tierStats,\n byAge: ageDistribution,\n totalTraces: tierStats.reduce((sum: number, t: any) => sum + t.count, 0),\n totalSize: tierStats.reduce((sum: number, t: any) => sum + t.total_original, 0),\n compressedSize: tierStats.reduce((sum: number, t: any) => sum + t.total_compressed, 0),\n };\n }\n \n /**\n * Clean up expired data\n */\n async cleanup(): Promise<number> {\n let cleaned = 0;\n \n // Remove old entries from storage_tiers table\n const cutoff = Date.now() - (90 * 24 * 60 * 60 * 1000); // 90 days\n \n const result = this.localDb.prepare(`\n DELETE FROM storage_tiers\n WHERE tier = 'cold' AND created_at < ? AND access_count = 0\n `).run(cutoff);\n \n cleaned = result.changes;\n \n logger.info('Cleanup completed', { removed: cleaned });\n return cleaned;\n }\n}"],
5
- "mappings": "AAOA,SAAS,oBAAqC;AAC9C,SAAS,UAAU,kBAAkB,kBAAkB,2BAA8C;AACrG,SAAS,eAAe;AAExB,SAAS,cAAc;AAGvB,YAAY,UAAU;AACtB,SAAS,iBAAiB;AAE1B,SAAS,OAAO,KAAa,cAA+B;AAC1D,QAAM,QAAQ,QAAQ,IAAI,GAAG;AAC7B,MAAI,UAAU,QAAW;AACvB,QAAI,iBAAiB,OAAW,QAAO;AACvC,UAAM,IAAI,MAAM,wBAAwB,GAAG,cAAc;AAAA,EAC3D;AACA,SAAO;AACT;AAEA,SAAS,eAAe,KAAiC;AACvD,SAAO,QAAQ,IAAI,GAAG;AACxB;AAGA,MAAM,YAAY,UAAU,KAAK,IAAI;AACrC,MAAM,cAAc,UAAU,KAAK,MAAM;AAElC,IAAK,cAAL,kBAAKA,iBAAL;AACL,EAAAA,aAAA,SAAM;AACN,EAAAA,aAAA,UAAO;AACP,EAAAA,aAAA,UAAO;AAHG,SAAAA;AAAA,GAAA;AA+BL,MAAM,yBAA+C;AAAA,EAC1D,OAAO;AAAA,IACL,KAAK,QAAQ,IAAI,WAAW,KAAK;AAAA,IACjC,YAAY;AAAA;AAAA,IACZ,aAAa;AAAA;AAAA,EACf;AAAA,EACA,gBAAgB;AAAA,IACd,UAAU,QAAQ,IAAI,yBAAyB,KAAK;AAAA,IACpD,QAAQ,QAAQ,IAAI,qBAAqB,KAAK;AAAA,IAC9C,aAAa,QAAQ,IAAI,2BAA2B,KAAK;AAAA,IACzD,iBAAiB,QAAQ,IAAI,2BAA2B,KAAK;AAAA,IAC7D,QAAQ;AAAA,EACV;AAAA,EACA,KAAK;AAAA,IACH,YAAY,QAAQ,IAAI,YAAY,KAAK;AAAA,IACzC,WAAW,QAAQ,IAAI,gBAAgB,KAAK;AAAA,IAC5C,aAAa,QAAQ,IAAI,cAAc;AAAA,EACzC;AAAA,EACA,OAAO;AAAA,IACL,UAAU;AAAA,IACV,UAAU;AAAA,IACV,kBAAkB;AAAA,EACpB;AACF;AAeO,MAAM,wBAAwB;AAAA,EAC3B;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA,eAA4C,oBAAI,IAAI;AAAA,EAEpD;AAAA,EAER,YACE,SACA,eACA,QACA;AACA,SAAK,UAAU;AACf,SAAK,gBAAgB;AACrB,SAAK,SAAS,EAAE,GAAG,wBAAwB,GAAG,OAAO;AAErD,SAAK,iBAAiB;AACtB,SAAK,cAAc,KAAK,kBAAkB;AAAA,EAC5C;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,oBAAmC;AAE/C,QAAI,KAAK,OAAO,MAAM,KAAK;AACzB,UAAI;AACF,aAAK,cAAc,aAAa,EAAE,KAAK,KAAK,OAAO,MAAM,IAAI,CAAC;AAE9D,aAAK,YAAY,GAAG,SAAS,CAAC,QAAQ;AACpC,iBAAO,MAAM,sBAAsB,GAAG;AAAA,QACxC,CAAC;AAED,cAAM,KAAK,YAAY,QAAQ;AAG/B,cAAM,KAAK,YAAY,UAAU,oBAAoB,aAAa;AAElE,eAAO,KAAK,sCAAsC;AAAA,MACpD,SAAS,OAAgB;AACvB,eAAO,KAAK,wDAAwD,KAAK;AAAA,MAC3E;AAAA,IACF;AAGA,QAAI,KAAK,OAAO,eAAe,aAAa;AAC1C,WAAK,YAAY,IAAI,SAAS;AAAA,QAC5B,UAAU,KAAK,OAAO,eAAe;AAAA,QACrC,QAAQ,KAAK,OAAO,eAAe;AAAA,QACnC,aAAa;AAAA,UACX,aAAa,KAAK,OAAO,eAAe;AAAA,UACxC,iBAAiB,KAAK,OAAO,eAAe;AAAA,QAC9C;AAAA,QACA,gBAAgB;AAAA;AAAA,MAClB,CAAC;AAED,aAAO,KAAK,0CAA0C;AAAA,IACxD;AAGA,QAAI,KAAK,OAAO,IAAI,WAAW;AAC7B,UAAI;AACF,aAAK,aAAa,IAAI,QAAQ;AAAA,UAC5B,WAAW,KAAK,OAAO,IAAI;AAAA,UAC3B,aAAa,KAAK,OAAO,IAAI;AAAA,QAC/B,CAAC;AAED,eAAO,KAAK,sCAAsC;AAAA,MACpD,SAAS,OAAgB;AACvB,eAAO,KAAK,mDAAmD,KAAK;AAAA,MACtE;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,mBAAyB;AAC/B,SAAK,QAAQ,KAAK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,KAejB;AAED,SAAK,QAAQ,KAAK;AAAA;AAAA;AAAA;AAAA,KAIjB;AAAA,EACH;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,WAAW,OAAoC;AAEnD,UAAM,KAAK;AAEX,UAAM,QAAQ,MAAM;AACpB,UAAM,MAAM,KAAK,IAAI,IAAI,MAAM,SAAS;AACxC,UAAM,WAAW,OAAO,MAAO,KAAK;AAGpC,QAAI;AACJ,QAAI,WAAW,KAAK,OAAO,MAAM,YAAY,QAAQ,KAAK,OAAO,MAAM,kBAAkB;AACvF,aAAO;AAAA,IACT,WAAW,WAAW,KAAK,OAAO,MAAM,WAAW,IAAI;AACrD,aAAO;AAAA,IACT,OAAO;AACL,aAAO;AAAA,IACT;AAGA,YAAQ,MAAM;AAAA,MACZ,KAAK;AACH,cAAM,KAAK,aAAa,KAAK;AAC7B;AAAA,MACF,KAAK;AACH,cAAM,KAAK,sBAAsB,KAAK;AACtC;AAAA,MACF,KAAK;AACH,cAAM,KAAK,WAAW,KAAK;AAC3B;AAAA,IACJ;AAGA,SAAK,aAAa,MAAM,IAAI,MAAM,KAAK;AAEvC,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,aAAa,OAA6B;AACtD,QAAI,CAAC,KAAK,aAAa;AAErB;AAAA,IACF;AAEA,QAAI;AACF,YAAM,MAAM,SAAS,MAAM,EAAE;AAC7B,YAAM,OAAO,KAAK,UAAU,KAAK;AAGjC,UAAI;AACJ,UAAI,KAAK,SAAS,KAAO;AACvB,cAAM,aAAa,MAAM,UAAU,IAAI;AACvC,qBAAa,WAAW,SAAS,QAAQ;AACzC,cAAM,KAAK,YAAY,KAAK,KAAK;AAAA,UAC/B,MAAM;AAAA,UACN,YAAY;AAAA,UACZ,OAAO,MAAM,MAAM,SAAS;AAAA,UAC5B,MAAM,MAAM;AAAA,UACZ,WAAW,MAAM,SAAS,UAAU,SAAS;AAAA,QAC/C,CAAC;AAAA,MACH,OAAO;AACL,qBAAa;AACb,cAAM,KAAK,YAAY,KAAK,KAAK;AAAA,UAC/B,MAAM;AAAA,UACN,YAAY;AAAA,UACZ,OAAO,MAAM,MAAM,SAAS;AAAA,UAC5B,MAAM,MAAM;AAAA,UACZ,WAAW,MAAM,SAAS,UAAU,SAAS;AAAA,QAC/C,CAAC;AAAA,MACH;AAGA,YAAM,KAAK,YAAY,OAAO,KAAK,KAAK,OAAO,MAAM,UAAU;AAG/D,YAAM,KAAK,YAAY,KAAK,mBAAmB;AAAA,QAC7C,OAAO,MAAM;AAAA,QACb,OAAO,MAAM;AAAA,MACf,CAAC;AAED,YAAM,KAAK,YAAY,KAAK,kBAAkB;AAAA,QAC5C,OAAO,MAAM,SAAS;AAAA,QACtB,OAAO,MAAM;AAAA,MACf,CAAC;AAED,aAAO,MAAM,yBAAyB;AAAA,QACpC,SAAS,MAAM;AAAA,QACf,MAAM,KAAK;AAAA,QACX,YAAY,KAAK,SAAS;AAAA,MAC5B,CAAC;AAAA,IAEH,SAAS,OAAgB;AACvB,aAAO,MAAM,4BAA4B,KAAK;AAC9C,YAAM;AAAA,IACR;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,sBAAsB,OAA6B;AAC/D,QAAI,CAAC,KAAK,WAAW;AACnB,YAAM,IAAI,MAAM,gCAAgC;AAAA,IAClD;AAEA,QAAI;AAEF,YAAM,OAAO,KAAK,UAAU,KAAK;AACjC,YAAM,aAAa,MAAM,UAAU,IAAI;AAGvC,YAAM,OAAO,IAAI,KAAK,MAAM,SAAS,SAAS;AAC9C,YAAM,MAAM,UAAU,KAAK,YAAY,CAAC,IAAI,OAAO,KAAK,SAAS,IAAI,CAAC,EAAE,SAAS,GAAG,GAAG,CAAC,IAAI,OAAO,KAAK,QAAQ,CAAC,EAAE,SAAS,GAAG,GAAG,CAAC,IAAI,MAAM,EAAE;AAG/I,YAAM,UAAU,IAAI,iBAAiB;AAAA,QACnC,QAAQ,KAAK,OAAO,eAAe;AAAA,QACnC,KAAK;AAAA,QACL,MAAM;AAAA,QACN,aAAa;AAAA,QACb,UAAU;AAAA,UACR,YAAY,MAAM;AAAA,UAClB,cAAc,MAAM;AAAA,UACpB,eAAe,MAAM,MAAM,SAAS;AAAA,UACpC,iBAAiB,KAAK,OAAO,SAAS;AAAA,UACtC,mBAAmB,WAAW,OAAO,SAAS;AAAA,QAChD;AAAA,MACF,CAAC;AAED,YAAM,KAAK,UAAU,KAAK,OAAO;AAGjC,UAAI,KAAK,aAAa;AACpB,cAAM,KAAK,YAAY,IAAI,SAAS,MAAM,EAAE,EAAE;AAAA,MAChD;AAEA,aAAO,KAAK,mCAAmC;AAAA,QAC7C,SAAS,MAAM;AAAA,QACf;AAAA,QACA,cAAc,KAAK;AAAA,QACnB,gBAAgB,WAAW;AAAA,QAC3B,mBAAmB,IAAI,WAAW,SAAS,KAAK,QAAQ,QAAQ,CAAC;AAAA,MACnE,CAAC;AAAA,IAEH,SAAS,OAAgB;AACvB,aAAO,MAAM,sCAAsC,KAAK;AACxD,YAAM;AAAA,IACR;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,WAAW,OAA6B;AACpD,QAAI,CAAC,KAAK,YAAY;AAEpB,aAAO,KAAK,sBAAsB,KAAK;AAAA,IACzC;AAEA,QAAI;AAEF,YAAM,UAAU,KAAK,mBAAmB,KAAK;AAC7C,YAAM,OAAO,KAAK,UAAU,OAAO;AACnC,YAAM,aAAa,MAAM,UAAU,IAAI;AAGvC,YAAM,OAAO,IAAI,KAAK,MAAM,SAAS,SAAS;AAC9C,YAAM,MAAM,WAAW,KAAK,YAAY,CAAC,IAAI,OAAO,KAAK,SAAS,IAAI,CAAC,EAAE,SAAS,GAAG,GAAG,CAAC,IAAI,MAAM,EAAE;AAGrG,YAAM,SAAS,KAAK,WAAW,OAAO,KAAK,OAAO,IAAI,UAAU;AAChE,YAAM,OAAO,OAAO,KAAK,GAAG;AAE5B,YAAM,KAAK,KAAK,YAAY;AAAA,QAC1B,UAAU;AAAA,UACR,aAAa;AAAA,UACb,UAAU;AAAA,YACR,SAAS,MAAM;AAAA,YACf,WAAW,MAAM;AAAA,YACjB,OAAO,MAAM,MAAM,SAAS;AAAA,YAC5B,eAAe,MAAM,MAAM,OAAO,SAAS;AAAA,UAC7C;AAAA,QACF;AAAA,QACA,cAAc;AAAA;AAAA,MAChB,CAAC;AAGD,UAAI,KAAK,WAAW;AAClB,YAAI;AACF,gBAAM,UAAU,KAAK,eAAe,KAAK;AACzC,gBAAM,KAAK,UAAU,KAAK,IAAI,oBAAoB;AAAA,YAChD,QAAQ,KAAK,OAAO,eAAe;AAAA,YACnC,KAAK;AAAA,UACP,CAAC,CAAC;AAAA,QACJ,SAAS,OAAgB;AAAA,QAEzB;AAAA,MACF;AAEA,aAAO,KAAK,yBAAyB;AAAA,QACnC,SAAS,MAAM;AAAA,QACf;AAAA,QACA,cAAc,KAAK,UAAU,KAAK,EAAE;AAAA,QACpC,gBAAgB,WAAW;AAAA,MAC7B,CAAC;AAAA,IAEH,SAAS,OAAgB;AACvB,aAAO,MAAM,0BAA0B,KAAK;AAC5C,YAAM;AAAA,IACR;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,mBAAmB,OAAmB;AAE5C,WAAO;AAAA,MACL,IAAI,MAAM;AAAA,MACV,MAAM,MAAM;AAAA,MACZ,OAAO,MAAM;AAAA,MACb,SAAS,MAAM;AAAA,MACf,UAAU;AAAA,QACR,WAAW,MAAM,SAAS;AAAA,QAC1B,SAAS,MAAM,SAAS;AAAA,QACxB,eAAe,MAAM,SAAS,cAAc;AAAA,QAC5C,aAAa,MAAM,SAAS,kBAAkB;AAAA,QAC9C,gBAAgB,MAAM,SAAS,kBAAkB;AAAA,QACjD,aAAa,MAAM,SAAS;AAAA,MAC9B;AAAA,MACA,aAAa;AAAA,QACX,OAAO,MAAM,MAAM;AAAA,QACnB,OAAO,CAAC,GAAG,IAAI,IAAI,MAAM,MAAM,IAAI,CAAC,MAAW,EAAE,IAAI,CAAC,CAAC;AAAA,QACvD,WAAW,MAAM,MAAM,CAAC,GAAG;AAAA,QAC3B,UAAU,MAAM,MAAM,MAAM,MAAM,SAAS,CAAC,GAAG;AAAA,MACjD;AAAA,MACA,YAAY,MAAM;AAAA,IACpB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,cAAc,SAAwC;AAE1D,UAAM,KAAK;AAGX,UAAM,WAAW,KAAK,QAAQ;AAAA,MAC5B;AAAA,IACF,EAAE,IAAI,OAAO;AAEb,QAAI,CAAC,UAAU;AACb,aAAO;AAAA,IACT;AAGA,SAAK,QAAQ;AAAA,MACX;AAAA,IACF,EAAE,IAAI,KAAK,IAAI,GAAG,OAAO;AAGzB,YAAQ,SAAS,MAAM;AAAA,MACrB,KAAK;AACH,eAAO,KAAK,kBAAkB,OAAO;AAAA,MACvC,KAAK;AACH,eAAO,KAAK,2BAA2B,SAAS,SAAS,QAAQ;AAAA,MACnE,KAAK;AACH,eAAO,KAAK,gBAAgB,SAAS,SAAS,QAAQ;AAAA,MACxD;AACE,eAAO;AAAA,IACX;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,kBAAkB,SAAwC;AACtE,QAAI,CAAC,KAAK,YAAa,QAAO;AAE9B,QAAI;AACF,YAAM,MAAM,SAAS,OAAO;AAC5B,YAAM,OAAO,MAAM,KAAK,YAAY,QAAQ,GAAG;AAE/C,UAAI,CAAC,QAAQ,CAAC,KAAK,KAAM,QAAO;AAEhC,UAAI;AACJ,UAAI,KAAK,eAAe,QAAQ;AAC9B,cAAM,aAAa,OAAO,KAAK,KAAK,MAAM,QAAQ;AAClD,cAAM,eAAe,MAAM,YAAY,UAAU;AACjD,oBAAY,aAAa,SAAS;AAAA,MACpC,OAAO;AACL,oBAAY,KAAK;AAAA,MACnB;AAEA,aAAO,KAAK,MAAM,SAAS;AAAA,IAE7B,SAAS,OAAgB;AACvB,aAAO,MAAM,iCAAiC,KAAK;AACnD,aAAO;AAAA,IACT;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,2BAA2B,SAAiB,KAAoC;AAC5F,QAAI,CAAC,KAAK,UAAW,QAAO;AAE5B,QAAI;AACF,YAAM,UAAU,IAAI,iBAAiB;AAAA,QACnC,QAAQ,KAAK,OAAO,eAAe;AAAA,QACnC,KAAK;AAAA,MACP,CAAC;AAED,YAAM,WAAW,MAAM,KAAK,UAAU,KAAK,OAAO;AAClD,YAAM,aAAa,MAAM,SAAS,MAAM,qBAAqB;AAE7D,UAAI,CAAC,WAAY,QAAO;AAExB,YAAM,eAAe,MAAM,YAAY,OAAO,KAAK,UAAU,CAAC;AAC9D,aAAO,KAAK,MAAM,aAAa,SAAS,CAAC;AAAA,IAE3C,SAAS,OAAgB;AACvB,aAAO,MAAM,2CAA2C,KAAK;AAC7D,aAAO;AAAA,IACT;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,gBAAgB,SAAiB,KAAoC;AACjF,QAAI,CAAC,KAAK,WAAY,QAAO;AAE7B,QAAI;AACF,YAAM,SAAS,KAAK,WAAW,OAAO,KAAK,OAAO,IAAI,UAAU;AAChE,YAAM,OAAO,OAAO,KAAK,GAAG;AAE5B,YAAM,CAAC,UAAU,IAAI,MAAM,KAAK,SAAS;AACzC,YAAM,eAAe,MAAM,YAAY,UAAU;AAGjD,aAAO,KAAK,MAAM,aAAa,SAAS,CAAC;AAAA,IAE3C,SAAS,OAAgB;AACvB,aAAO,MAAM,+BAA+B,KAAK;AACjD,aAAO;AAAA,IACT;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,aAAa,SAAiB,MAAmB,OAAoB;AAC3E,UAAM,eAAe,KAAK,UAAU,KAAK,EAAE;AAC3C,UAAM,iBAAiB,KAAK,MAAM,eAAe,GAAG;AAEpD,SAAK,QAAQ,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,KAMpB,EAAE;AAAA,MACD;AAAA,MACA;AAAA,MACA,KAAK,mBAAmB,OAAO,IAAI;AAAA,MACnC;AAAA,MACA;AAAA,MACA,IAAI,iBAAiB;AAAA,MACrB,KAAK,IAAI;AAAA,MACT,MAAM,SAAS;AAAA,MACf,KAAK,IAAI;AAAA,MACT,MAAM;AAAA,IACR;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,mBAAmB,OAAc,MAA2B;AAClE,UAAM,OAAO,IAAI,KAAK,MAAM,SAAS,SAAS;AAC9C,UAAM,OAAO,KAAK,YAAY;AAC9B,UAAM,QAAQ,OAAO,KAAK,SAAS,IAAI,CAAC,EAAE,SAAS,GAAG,GAAG;AACzD,UAAM,MAAM,OAAO,KAAK,QAAQ,CAAC,EAAE,SAAS,GAAG,GAAG;AAElD,YAAQ,MAAM;AAAA,MACZ,KAAK;AACH,eAAO,eAAe,MAAM,EAAE;AAAA,MAChC,KAAK;AACH,eAAO,UAAU,IAAI,IAAI,KAAK,IAAI,GAAG,IAAI,MAAM,EAAE;AAAA,MACnD,KAAK;AACH,eAAO,WAAW,IAAI,IAAI,KAAK,IAAI,MAAM,EAAE;AAAA,IAC/C;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,eAAe,OAAsB;AAC3C,UAAM,OAAO,IAAI,KAAK,MAAM,SAAS,SAAS;AAC9C,WAAO,UAAU,KAAK,YAAY,CAAC,IAAI,OAAO,KAAK,SAAS,IAAI,CAAC,EAAE,SAAS,GAAG,GAAG,CAAC,IAAI,OAAO,KAAK,QAAQ,CAAC,EAAE,SAAS,GAAG,GAAG,CAAC,IAAI,MAAM,EAAE;AAAA,EAC5I;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,eAIH;AACD,UAAM,UAAU;AAAA,MACd,WAAW;AAAA,MACX,YAAY;AAAA,MACZ,QAAQ,CAAC;AAAA,IACX;AAEA,UAAM,MAAM,KAAK,IAAI;AAGrB,UAAM,aAAa,KAAK,QAAQ,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA,KAKvC,EAAE,IAAI;AAEP,eAAW,aAAa,YAAY;AAClC,YAAM,YAAY,MAAM,UAAU,eAAe,MAAO,KAAK;AAE7D,UAAI;AACF,YAAI,UAAU,SAAS,mBAAmB,WAAW,KAAK,OAAO,MAAM,UAAU;AAE/E,gBAAM,QAAQ,MAAM,KAAK,cAAc,UAAU,QAAQ;AACzD,cAAI,OAAO;AACT,kBAAM,KAAK,sBAAsB,KAAK;AACtC,iBAAK,aAAa,UAAU,UAAU,mBAAkB,KAAK;AAC7D,oBAAQ;AAAA,UACV;AAAA,QACF,WAAW,UAAU,SAAS,qBAAoB,WAAW,KAAK,OAAO,MAAM,WAAW,IAAI;AAE5F,gBAAM,QAAQ,MAAM,KAAK,cAAc,UAAU,QAAQ;AACzD,cAAI,OAAO;AACT,kBAAM,KAAK,WAAW,KAAK;AAC3B,iBAAK,aAAa,UAAU,UAAU,mBAAkB,KAAK;AAC7D,oBAAQ;AAAA,UACV;AAAA,QACF;AAAA,MACF,SAAS,OAAgB;AACvB,gBAAQ,OAAO,KAAK,qBAAqB,UAAU,QAAQ,KAAK,KAAK,EAAE;AAAA,MACzE;AAAA,IACF;AAEA,WAAO,KAAK,4BAA4B,OAAO;AAC/C,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,kBAAuB;AACrB,UAAM,YAAY,KAAK,QAAQ,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,KAUtC,EAAE,IAAI;AAEP,UAAM,kBAAkB,KAAK,QAAQ,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,KAW5C,EAAE,IAAI,KAAK,IAAI,GAAG,KAAK,IAAI,GAAG,KAAK,IAAI,CAAC;AAEzC,WAAO;AAAA,MACL,QAAQ;AAAA,MACR,OAAO;AAAA,MACP,aAAa,UAAU,OAAO,CAAC,KAAa,MAAW,MAAM,EAAE,OAAO,CAAC;AAAA,MACvE,WAAW,UAAU,OAAO,CAAC,KAAa,MAAW,MAAM,EAAE,gBAAgB,CAAC;AAAA,MAC9E,gBAAgB,UAAU,OAAO,CAAC,KAAa,MAAW,MAAM,EAAE,kBAAkB,CAAC;AAAA,IACvF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,UAA2B;AAC/B,QAAI,UAAU;AAGd,UAAM,SAAS,KAAK,IAAI,IAAK,KAAK,KAAK,KAAK,KAAK;AAEjD,UAAM,SAAS,KAAK,QAAQ,QAAQ;AAAA;AAAA;AAAA,KAGnC,EAAE,IAAI,MAAM;AAEb,cAAU,OAAO;AAEjB,WAAO,KAAK,qBAAqB,EAAE,SAAS,QAAQ,CAAC;AACrD,WAAO;AAAA,EACT;AACF;",
4
+ "sourcesContent": ["/**\n * Railway-Optimized 3-Tier Storage System\n * Tier 1: Redis (Hot) - Last 24 hours, instant access\n * Tier 2: Railway Buckets (Warm) - 1-30 days, S3-compatible\n * Tier 3: GCS (Cold) - 30+ days, cost-effective archive\n */\n\nimport { createClient, RedisClientType } from 'redis';\nimport {\n S3Client,\n PutObjectCommand,\n GetObjectCommand,\n DeleteObjectCommand,\n HeadObjectCommand,\n} from '@aws-sdk/client-s3';\nimport { Storage } from '@google-cloud/storage';\nimport Database from 'better-sqlite3';\nimport { logger } from '../monitoring/logger.js';\nimport { Trace, CompressedTrace, ToolCall } from '../trace/types.js';\nimport { ConfigManager } from '../config/config-manager.js';\nimport * as zlib from 'zlib';\nimport { promisify } from 'util';\n// Type-safe environment variable access\nfunction getEnv(key: string, defaultValue?: string): string {\n const value = process.env[key];\n if (value === undefined) {\n if (defaultValue !== undefined) return defaultValue;\n throw new Error(`Environment variable ${key} is required`);\n }\n return value;\n}\n\nfunction getOptionalEnv(key: string): string | undefined {\n return process.env[key];\n}\n\nconst gzipAsync = promisify(zlib.gzip);\nconst gunzipAsync = promisify(zlib.gunzip);\n\nexport enum StorageTier {\n HOT = 'hot', // Redis: < 24 hours\n WARM = 'warm', // Railway Buckets: 1-30 days\n COLD = 'cold', // GCS: 30+ days\n}\n\nexport interface RailwayStorageConfig {\n redis: {\n url: string;\n ttlSeconds: number;\n maxMemoryMb: number;\n };\n railwayBuckets: {\n endpoint: string;\n bucket: string;\n accessKeyId: string;\n secretAccessKey: string;\n region: string;\n };\n gcs: {\n bucketName: string;\n projectId: string;\n keyFilename?: string;\n };\n tiers: {\n hotHours: number; // Hours to keep in Redis\n warmDays: number; // Days to keep in Railway Buckets\n compressionScore: number; // Score threshold for early compression\n };\n}\n\nexport const DEFAULT_RAILWAY_CONFIG: RailwayStorageConfig = {\n redis: {\n url: process.env['REDIS_URL'] || 'redis://localhost:6379',\n ttlSeconds: 86400, // 24 hours\n maxMemoryMb: 100, // 100MB Redis limit\n },\n railwayBuckets: {\n endpoint:\n process.env['RAILWAY_BUCKET_ENDPOINT'] || 'https://buckets.railway.app',\n bucket: process.env['RAILWAY_BUCKET_NAME'] || 'stackmemory-warm',\n accessKeyId: process.env['RAILWAY_BUCKET_ACCESS_KEY'] || '',\n secretAccessKey: process.env['RAILWAY_BUCKET_SECRET_KEY'] || '',\n region: 'us-east-1',\n },\n gcs: {\n bucketName: process.env['GCS_BUCKET'] || 'stackmemory-cold',\n projectId: process.env['GCP_PROJECT_ID'] || 'stackmemory',\n keyFilename: process.env['GCP_KEY_FILE'],\n },\n tiers: {\n hotHours: 24,\n warmDays: 30,\n compressionScore: 0.4,\n },\n};\n\ninterface StorageMetrics {\n tier: StorageTier;\n originalSize: number;\n compressedSize: number;\n compressionRatio: number;\n accessCount: number;\n lastAccessed: number;\n migrationTime?: number;\n}\n\n/**\n * Railway-optimized storage manager with 3-tier architecture\n */\nexport class RailwayOptimizedStorage {\n private redisClient?: RedisClientType;\n private railwayS3?: S3Client;\n private gcsStorage?: Storage;\n private localDb: Database.Database;\n private config: RailwayStorageConfig;\n private configManager: ConfigManager;\n private metricsCache: Map<string, StorageMetrics> = new Map();\n\n private initialized: Promise<void>;\n\n constructor(\n localDb: Database.Database,\n configManager: ConfigManager,\n config?: Partial<RailwayStorageConfig>\n ) {\n this.localDb = localDb;\n this.configManager = configManager;\n this.config = { ...DEFAULT_RAILWAY_CONFIG, ...config };\n\n this.initializeSchema();\n this.initialized = this.initializeClients();\n }\n\n /**\n * Initialize storage clients\n */\n private async initializeClients(): Promise<void> {\n // Initialize Redis\n if (this.config.redis.url) {\n try {\n this.redisClient = createClient({ url: this.config.redis.url });\n\n this.redisClient.on('error', (err) => {\n logger.error('Redis client error', err);\n });\n\n await this.redisClient.connect();\n\n // Configure Redis memory policy\n await this.redisClient.configSet('maxmemory-policy', 'allkeys-lru');\n\n logger.info('Redis connected for hot tier storage');\n } catch (error: unknown) {\n logger.warn(\n 'Redis connection failed, falling back to SQLite only',\n error\n );\n }\n }\n\n // Initialize Railway S3-compatible buckets\n if (this.config.railwayBuckets.accessKeyId) {\n this.railwayS3 = new S3Client({\n endpoint: this.config.railwayBuckets.endpoint,\n region: this.config.railwayBuckets.region,\n credentials: {\n accessKeyId: this.config.railwayBuckets.accessKeyId,\n secretAccessKey: this.config.railwayBuckets.secretAccessKey,\n },\n forcePathStyle: true, // Required for Railway buckets\n });\n\n logger.info('Railway Buckets configured for warm tier');\n }\n\n // Initialize GCS for cold storage\n if (this.config.gcs.projectId) {\n try {\n this.gcsStorage = new Storage({\n projectId: this.config.gcs.projectId,\n keyFilename: this.config.gcs.keyFilename,\n });\n\n logger.info('GCS configured for cold tier storage');\n } catch (error: unknown) {\n logger.warn('GCS setup failed, will use Railway buckets only', error);\n }\n }\n }\n\n /**\n * Initialize database schema for tracking\n */\n private initializeSchema(): void {\n this.localDb.exec(`\n CREATE TABLE IF NOT EXISTS storage_tiers (\n trace_id TEXT PRIMARY KEY,\n tier TEXT NOT NULL,\n location TEXT NOT NULL,\n original_size INTEGER,\n compressed_size INTEGER,\n compression_ratio REAL,\n access_count INTEGER DEFAULT 0,\n last_accessed INTEGER,\n created_at INTEGER,\n migrated_at INTEGER,\n score REAL,\n FOREIGN KEY (trace_id) REFERENCES traces(id) ON DELETE CASCADE\n )\n `);\n\n this.localDb.exec(`\n CREATE INDEX IF NOT EXISTS idx_storage_tier ON storage_tiers(tier);\n CREATE INDEX IF NOT EXISTS idx_storage_created ON storage_tiers(created_at);\n CREATE INDEX IF NOT EXISTS idx_storage_accessed ON storage_tiers(last_accessed);\n `);\n }\n\n /**\n * Store a trace in the appropriate tier\n */\n async storeTrace(trace: Trace): Promise<StorageTier> {\n // Ensure clients are initialized\n await this.initialized;\n\n const score = trace.score;\n const age = Date.now() - trace.metadata.startTime;\n const ageHours = age / (1000 * 60 * 60);\n\n // Determine tier based on age and score\n let tier: StorageTier;\n if (\n ageHours < this.config.tiers.hotHours &&\n score > this.config.tiers.compressionScore\n ) {\n tier = StorageTier.HOT;\n } else if (ageHours < this.config.tiers.warmDays * 24) {\n tier = StorageTier.WARM;\n } else {\n tier = StorageTier.COLD;\n }\n\n // Store in appropriate tier\n switch (tier) {\n case StorageTier.HOT:\n await this.storeInRedis(trace);\n break;\n case StorageTier.WARM:\n await this.storeInRailwayBuckets(trace);\n break;\n case StorageTier.COLD:\n await this.storeInGCS(trace);\n break;\n }\n\n // Track in database\n this.trackStorage(trace.id, tier, trace);\n\n return tier;\n }\n\n /**\n * Store trace in Redis (hot tier)\n */\n private async storeInRedis(trace: Trace): Promise<void> {\n if (!this.redisClient) {\n // Fallback to local SQLite if Redis unavailable\n return;\n }\n\n try {\n const key = `trace:${trace.id}`;\n const data = JSON.stringify(trace);\n\n // Compress if large\n let storedData: string;\n if (data.length > 10000) {\n const compressed = await gzipAsync(data);\n storedData = compressed.toString('base64');\n await this.redisClient.hSet(key, {\n data: storedData,\n compressed: 'true',\n score: trace.score.toString(),\n type: trace.type,\n timestamp: trace.metadata.startTime.toString(),\n });\n } else {\n storedData = data;\n await this.redisClient.hSet(key, {\n data: storedData,\n compressed: 'false',\n score: trace.score.toString(),\n type: trace.type,\n timestamp: trace.metadata.startTime.toString(),\n });\n }\n\n // Set TTL\n await this.redisClient.expire(key, this.config.redis.ttlSeconds);\n\n // Add to sorted set for efficient retrieval\n await this.redisClient.zAdd('traces:by_score', {\n score: trace.score,\n value: trace.id,\n });\n\n await this.redisClient.zAdd('traces:by_time', {\n score: trace.metadata.startTime,\n value: trace.id,\n });\n\n logger.debug('Stored trace in Redis', {\n traceId: trace.id,\n size: data.length,\n compressed: data.length > 10000,\n });\n } catch (error: unknown) {\n logger.error('Failed to store in Redis', error);\n throw error;\n }\n }\n\n /**\n * Store trace in Railway Buckets (warm tier)\n */\n private async storeInRailwayBuckets(trace: Trace): Promise<void> {\n if (!this.railwayS3) {\n throw new Error('Railway Buckets not configured');\n }\n\n try {\n // Compress trace\n const data = JSON.stringify(trace);\n const compressed = await gzipAsync(data);\n\n // Generate key with date partitioning\n const date = new Date(trace.metadata.startTime);\n const key = `traces/${date.getFullYear()}/${String(date.getMonth() + 1).padStart(2, '0')}/${String(date.getDate()).padStart(2, '0')}/${trace.id}.json.gz`;\n\n // Upload to Railway Bucket\n const command = new PutObjectCommand({\n Bucket: this.config.railwayBuckets.bucket,\n Key: key,\n Body: compressed,\n ContentType: 'application/gzip',\n Metadata: {\n 'trace-id': trace.id,\n 'trace-type': trace.type,\n 'trace-score': trace.score.toString(),\n 'original-size': data.length.toString(),\n 'compressed-size': compressed.length.toString(),\n },\n });\n\n await this.railwayS3.send(command);\n\n // Remove from Redis if exists\n if (this.redisClient) {\n await this.redisClient.del(`trace:${trace.id}`);\n }\n\n logger.info('Stored trace in Railway Buckets', {\n traceId: trace.id,\n key,\n originalSize: data.length,\n compressedSize: compressed.length,\n compressionRatio: (1 - compressed.length / data.length).toFixed(2),\n });\n } catch (error: unknown) {\n logger.error('Failed to store in Railway Buckets', error);\n throw error;\n }\n }\n\n /**\n * Store trace in GCS (cold tier)\n */\n private async storeInGCS(trace: Trace): Promise<void> {\n if (!this.gcsStorage) {\n // Fallback to Railway Buckets if GCS not available\n return this.storeInRailwayBuckets(trace);\n }\n\n try {\n // Heavy compression for cold storage\n const minimal = this.createMinimalTrace(trace);\n const data = JSON.stringify(minimal);\n const compressed = await gzipAsync(data);\n\n // Generate key with year/month partitioning\n const date = new Date(trace.metadata.startTime);\n const key = `archive/${date.getFullYear()}/${String(date.getMonth() + 1).padStart(2, '0')}/${trace.id}.json.gz`;\n\n // Upload to GCS with Coldline storage class\n const bucket = this.gcsStorage.bucket(this.config.gcs.bucketName);\n const file = bucket.file(key);\n\n await file.save(compressed, {\n metadata: {\n contentType: 'application/gzip',\n metadata: {\n traceId: trace.id,\n traceType: trace.type,\n score: trace.score.toString(),\n originalTools: trace.tools.length.toString(),\n },\n },\n storageClass: 'COLDLINE', // Use Coldline for cost optimization\n });\n\n // Remove from warm tier if exists\n if (this.railwayS3) {\n try {\n const warmKey = this.getWarmTierKey(trace);\n await this.railwayS3.send(\n new DeleteObjectCommand({\n Bucket: this.config.railwayBuckets.bucket,\n Key: warmKey,\n })\n );\n } catch (error: unknown) {\n // Ignore deletion errors\n }\n }\n\n logger.info('Archived trace to GCS', {\n traceId: trace.id,\n key,\n originalSize: JSON.stringify(trace).length,\n compressedSize: compressed.length,\n });\n } catch (error: unknown) {\n logger.error('Failed to store in GCS', error);\n throw error;\n }\n }\n\n /**\n * Create minimal trace for cold storage\n */\n private createMinimalTrace(trace: Trace): any {\n // Keep only essential information\n return {\n id: trace.id,\n type: trace.type,\n score: trace.score,\n summary: trace.summary,\n metadata: {\n startTime: trace.metadata.startTime,\n endTime: trace.metadata.endTime,\n filesModified: trace.metadata.filesModified.length,\n errorsCount: trace.metadata.errorsEncountered.length,\n decisionsCount: trace.metadata.decisionsRecorded.length,\n causalChain: trace.metadata.causalChain,\n },\n toolSummary: {\n count: trace.tools.length,\n types: [...new Set(trace.tools.map((t: any) => t.tool))],\n firstTool: trace.tools[0]?.tool,\n lastTool: trace.tools[trace.tools.length - 1]?.tool,\n },\n compressed: trace.compressed,\n };\n }\n\n /**\n * Retrieve a trace from any tier\n */\n async retrieveTrace(traceId: string): Promise<Trace | null> {\n // Ensure clients are initialized\n await this.initialized;\n\n // Check tier location\n const location = this.localDb\n .prepare('SELECT tier, location FROM storage_tiers WHERE trace_id = ?')\n .get(traceId) as any;\n\n if (!location) {\n return null;\n }\n\n // Update access metrics\n this.localDb\n .prepare(\n 'UPDATE storage_tiers SET access_count = access_count + 1, last_accessed = ? WHERE trace_id = ?'\n )\n .run(Date.now(), traceId);\n\n // Retrieve based on tier\n switch (location.tier) {\n case StorageTier.HOT:\n return this.retrieveFromRedis(traceId);\n case StorageTier.WARM:\n return this.retrieveFromRailwayBuckets(traceId, location.location);\n case StorageTier.COLD:\n return this.retrieveFromGCS(traceId, location.location);\n default:\n return null;\n }\n }\n\n /**\n * Retrieve from Redis\n */\n private async retrieveFromRedis(traceId: string): Promise<Trace | null> {\n if (!this.redisClient) return null;\n\n try {\n const key = `trace:${traceId}`;\n const data = await this.redisClient.hGetAll(key);\n\n if (!data || !data.data) return null;\n\n let traceData: string;\n if (data.compressed === 'true') {\n const compressed = Buffer.from(data.data, 'base64');\n const decompressed = await gunzipAsync(compressed);\n traceData = decompressed.toString();\n } else {\n traceData = data.data;\n }\n\n return JSON.parse(traceData);\n } catch (error: unknown) {\n logger.error('Failed to retrieve from Redis', error);\n return null;\n }\n }\n\n /**\n * Retrieve from Railway Buckets\n */\n private async retrieveFromRailwayBuckets(\n traceId: string,\n key: string\n ): Promise<Trace | null> {\n if (!this.railwayS3) return null;\n\n try {\n const command = new GetObjectCommand({\n Bucket: this.config.railwayBuckets.bucket,\n Key: key,\n });\n\n const response = await this.railwayS3.send(command);\n const compressed = await response.Body?.transformToByteArray();\n\n if (!compressed) return null;\n\n const decompressed = await gunzipAsync(Buffer.from(compressed));\n return JSON.parse(decompressed.toString());\n } catch (error: unknown) {\n logger.error('Failed to retrieve from Railway Buckets', error);\n return null;\n }\n }\n\n /**\n * Retrieve from GCS\n */\n private async retrieveFromGCS(\n traceId: string,\n key: string\n ): Promise<Trace | null> {\n if (!this.gcsStorage) return null;\n\n try {\n const bucket = this.gcsStorage.bucket(this.config.gcs.bucketName);\n const file = bucket.file(key);\n\n const [compressed] = await file.download();\n const decompressed = await gunzipAsync(compressed);\n\n // Note: Returns minimal trace from cold storage\n return JSON.parse(decompressed.toString());\n } catch (error: unknown) {\n logger.error('Failed to retrieve from GCS', error);\n return null;\n }\n }\n\n /**\n * Track storage in database\n */\n private trackStorage(traceId: string, tier: StorageTier, trace: Trace): void {\n const originalSize = JSON.stringify(trace).length;\n const compressedSize = Math.floor(originalSize * 0.3); // Estimate\n\n this.localDb\n .prepare(\n `\n INSERT OR REPLACE INTO storage_tiers (\n trace_id, tier, location, original_size, compressed_size,\n compression_ratio, access_count, last_accessed, created_at,\n migrated_at, score\n ) VALUES (?, ?, ?, ?, ?, ?, 0, ?, ?, ?, ?)\n `\n )\n .run(\n traceId,\n tier,\n this.getStorageLocation(trace, tier),\n originalSize,\n compressedSize,\n 1 - compressedSize / originalSize,\n Date.now(),\n trace.metadata.startTime,\n Date.now(),\n trace.score\n );\n }\n\n /**\n * Get storage location key\n */\n private getStorageLocation(trace: Trace, tier: StorageTier): string {\n const date = new Date(trace.metadata.startTime);\n const year = date.getFullYear();\n const month = String(date.getMonth() + 1).padStart(2, '0');\n const day = String(date.getDate()).padStart(2, '0');\n\n switch (tier) {\n case StorageTier.HOT:\n return `redis:trace:${trace.id}`;\n case StorageTier.WARM:\n return `traces/${year}/${month}/${day}/${trace.id}.json.gz`;\n case StorageTier.COLD:\n return `archive/${year}/${month}/${trace.id}.json.gz`;\n }\n }\n\n /**\n * Get warm tier key for a trace\n */\n private getWarmTierKey(trace: Trace): string {\n const date = new Date(trace.metadata.startTime);\n return `traces/${date.getFullYear()}/${String(date.getMonth() + 1).padStart(2, '0')}/${String(date.getDate()).padStart(2, '0')}/${trace.id}.json.gz`;\n }\n\n /**\n * Migrate traces between tiers based on age\n */\n async migrateTiers(): Promise<{\n hotToWarm: number;\n warmToCold: number;\n errors: string[];\n }> {\n const results = {\n hotToWarm: 0,\n warmToCold: 0,\n errors: [] as string[],\n };\n\n const now = Date.now();\n\n // Find traces to migrate\n const candidates = this.localDb\n .prepare(\n `\n SELECT trace_id, tier, created_at, score\n FROM storage_tiers\n WHERE tier != 'cold'\n ORDER BY created_at ASC\n `\n )\n .all() as any[];\n\n for (const candidate of candidates) {\n const ageHours = (now - candidate.created_at) / (1000 * 60 * 60);\n\n try {\n if (\n candidate.tier === StorageTier.HOT &&\n ageHours > this.config.tiers.hotHours\n ) {\n // Migrate hot \u2192 warm\n const trace = await this.retrieveTrace(candidate.trace_id);\n if (trace) {\n await this.storeInRailwayBuckets(trace);\n this.trackStorage(candidate.trace_id, StorageTier.WARM, trace);\n results.hotToWarm++;\n }\n } else if (\n candidate.tier === StorageTier.WARM &&\n ageHours > this.config.tiers.warmDays * 24\n ) {\n // Migrate warm \u2192 cold\n const trace = await this.retrieveTrace(candidate.trace_id);\n if (trace) {\n await this.storeInGCS(trace);\n this.trackStorage(candidate.trace_id, StorageTier.COLD, trace);\n results.warmToCold++;\n }\n }\n } catch (error: unknown) {\n results.errors.push(\n `Failed to migrate ${candidate.trace_id}: ${error}`\n );\n }\n }\n\n logger.info('Tier migration completed', results);\n return results;\n }\n\n /**\n * Get storage statistics\n */\n getStorageStats(): any {\n const tierStats = this.localDb\n .prepare(\n `\n SELECT \n tier,\n COUNT(*) as count,\n SUM(original_size) as total_original,\n SUM(compressed_size) as total_compressed,\n AVG(compression_ratio) as avg_compression,\n AVG(access_count) as avg_access\n FROM storage_tiers\n GROUP BY tier\n `\n )\n .all();\n\n const ageDistribution = this.localDb\n .prepare(\n `\n SELECT \n CASE \n WHEN (? - created_at) / 3600000 < 24 THEN '< 24h'\n WHEN (? - created_at) / 86400000 < 7 THEN '1-7d'\n WHEN (? - created_at) / 86400000 < 30 THEN '7-30d'\n ELSE '30d+'\n END as age_group,\n COUNT(*) as count\n FROM storage_tiers\n GROUP BY age_group\n `\n )\n .all(Date.now(), Date.now(), Date.now());\n\n return {\n byTier: tierStats,\n byAge: ageDistribution,\n totalTraces: tierStats.reduce((sum: number, t: any) => sum + t.count, 0),\n totalSize: tierStats.reduce(\n (sum: number, t: any) => sum + t.total_original,\n 0\n ),\n compressedSize: tierStats.reduce(\n (sum: number, t: any) => sum + t.total_compressed,\n 0\n ),\n };\n }\n\n /**\n * Clean up expired data\n */\n async cleanup(): Promise<number> {\n let cleaned = 0;\n\n // Remove old entries from storage_tiers table\n const cutoff = Date.now() - 90 * 24 * 60 * 60 * 1000; // 90 days\n\n const result = this.localDb\n .prepare(\n `\n DELETE FROM storage_tiers\n WHERE tier = 'cold' AND created_at < ? AND access_count = 0\n `\n )\n .run(cutoff);\n\n cleaned = result.changes;\n\n logger.info('Cleanup completed', { removed: cleaned });\n return cleaned;\n }\n}\n"],
5
+ "mappings": "AAOA,SAAS,oBAAqC;AAC9C;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,OAEK;AACP,SAAS,eAAe;AAExB,SAAS,cAAc;AAGvB,YAAY,UAAU;AACtB,SAAS,iBAAiB;AAE1B,SAAS,OAAO,KAAa,cAA+B;AAC1D,QAAM,QAAQ,QAAQ,IAAI,GAAG;AAC7B,MAAI,UAAU,QAAW;AACvB,QAAI,iBAAiB,OAAW,QAAO;AACvC,UAAM,IAAI,MAAM,wBAAwB,GAAG,cAAc;AAAA,EAC3D;AACA,SAAO;AACT;AAEA,SAAS,eAAe,KAAiC;AACvD,SAAO,QAAQ,IAAI,GAAG;AACxB;AAEA,MAAM,YAAY,UAAU,KAAK,IAAI;AACrC,MAAM,cAAc,UAAU,KAAK,MAAM;AAElC,IAAK,cAAL,kBAAKA,iBAAL;AACL,EAAAA,aAAA,SAAM;AACN,EAAAA,aAAA,UAAO;AACP,EAAAA,aAAA,UAAO;AAHG,SAAAA;AAAA,GAAA;AA+BL,MAAM,yBAA+C;AAAA,EAC1D,OAAO;AAAA,IACL,KAAK,QAAQ,IAAI,WAAW,KAAK;AAAA,IACjC,YAAY;AAAA;AAAA,IACZ,aAAa;AAAA;AAAA,EACf;AAAA,EACA,gBAAgB;AAAA,IACd,UACE,QAAQ,IAAI,yBAAyB,KAAK;AAAA,IAC5C,QAAQ,QAAQ,IAAI,qBAAqB,KAAK;AAAA,IAC9C,aAAa,QAAQ,IAAI,2BAA2B,KAAK;AAAA,IACzD,iBAAiB,QAAQ,IAAI,2BAA2B,KAAK;AAAA,IAC7D,QAAQ;AAAA,EACV;AAAA,EACA,KAAK;AAAA,IACH,YAAY,QAAQ,IAAI,YAAY,KAAK;AAAA,IACzC,WAAW,QAAQ,IAAI,gBAAgB,KAAK;AAAA,IAC5C,aAAa,QAAQ,IAAI,cAAc;AAAA,EACzC;AAAA,EACA,OAAO;AAAA,IACL,UAAU;AAAA,IACV,UAAU;AAAA,IACV,kBAAkB;AAAA,EACpB;AACF;AAeO,MAAM,wBAAwB;AAAA,EAC3B;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA,eAA4C,oBAAI,IAAI;AAAA,EAEpD;AAAA,EAER,YACE,SACA,eACA,QACA;AACA,SAAK,UAAU;AACf,SAAK,gBAAgB;AACrB,SAAK,SAAS,EAAE,GAAG,wBAAwB,GAAG,OAAO;AAErD,SAAK,iBAAiB;AACtB,SAAK,cAAc,KAAK,kBAAkB;AAAA,EAC5C;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,oBAAmC;AAE/C,QAAI,KAAK,OAAO,MAAM,KAAK;AACzB,UAAI;AACF,aAAK,cAAc,aAAa,EAAE,KAAK,KAAK,OAAO,MAAM,IAAI,CAAC;AAE9D,aAAK,YAAY,GAAG,SAAS,CAAC,QAAQ;AACpC,iBAAO,MAAM,sBAAsB,GAAG;AAAA,QACxC,CAAC;AAED,cAAM,KAAK,YAAY,QAAQ;AAG/B,cAAM,KAAK,YAAY,UAAU,oBAAoB,aAAa;AAElE,eAAO,KAAK,sCAAsC;AAAA,MACpD,SAAS,OAAgB;AACvB,eAAO;AAAA,UACL;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAGA,QAAI,KAAK,OAAO,eAAe,aAAa;AAC1C,WAAK,YAAY,IAAI,SAAS;AAAA,QAC5B,UAAU,KAAK,OAAO,eAAe;AAAA,QACrC,QAAQ,KAAK,OAAO,eAAe;AAAA,QACnC,aAAa;AAAA,UACX,aAAa,KAAK,OAAO,eAAe;AAAA,UACxC,iBAAiB,KAAK,OAAO,eAAe;AAAA,QAC9C;AAAA,QACA,gBAAgB;AAAA;AAAA,MAClB,CAAC;AAED,aAAO,KAAK,0CAA0C;AAAA,IACxD;AAGA,QAAI,KAAK,OAAO,IAAI,WAAW;AAC7B,UAAI;AACF,aAAK,aAAa,IAAI,QAAQ;AAAA,UAC5B,WAAW,KAAK,OAAO,IAAI;AAAA,UAC3B,aAAa,KAAK,OAAO,IAAI;AAAA,QAC/B,CAAC;AAED,eAAO,KAAK,sCAAsC;AAAA,MACpD,SAAS,OAAgB;AACvB,eAAO,KAAK,mDAAmD,KAAK;AAAA,MACtE;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,mBAAyB;AAC/B,SAAK,QAAQ,KAAK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,KAejB;AAED,SAAK,QAAQ,KAAK;AAAA;AAAA;AAAA;AAAA,KAIjB;AAAA,EACH;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,WAAW,OAAoC;AAEnD,UAAM,KAAK;AAEX,UAAM,QAAQ,MAAM;AACpB,UAAM,MAAM,KAAK,IAAI,IAAI,MAAM,SAAS;AACxC,UAAM,WAAW,OAAO,MAAO,KAAK;AAGpC,QAAI;AACJ,QACE,WAAW,KAAK,OAAO,MAAM,YAC7B,QAAQ,KAAK,OAAO,MAAM,kBAC1B;AACA,aAAO;AAAA,IACT,WAAW,WAAW,KAAK,OAAO,MAAM,WAAW,IAAI;AACrD,aAAO;AAAA,IACT,OAAO;AACL,aAAO;AAAA,IACT;AAGA,YAAQ,MAAM;AAAA,MACZ,KAAK;AACH,cAAM,KAAK,aAAa,KAAK;AAC7B;AAAA,MACF,KAAK;AACH,cAAM,KAAK,sBAAsB,KAAK;AACtC;AAAA,MACF,KAAK;AACH,cAAM,KAAK,WAAW,KAAK;AAC3B;AAAA,IACJ;AAGA,SAAK,aAAa,MAAM,IAAI,MAAM,KAAK;AAEvC,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,aAAa,OAA6B;AACtD,QAAI,CAAC,KAAK,aAAa;AAErB;AAAA,IACF;AAEA,QAAI;AACF,YAAM,MAAM,SAAS,MAAM,EAAE;AAC7B,YAAM,OAAO,KAAK,UAAU,KAAK;AAGjC,UAAI;AACJ,UAAI,KAAK,SAAS,KAAO;AACvB,cAAM,aAAa,MAAM,UAAU,IAAI;AACvC,qBAAa,WAAW,SAAS,QAAQ;AACzC,cAAM,KAAK,YAAY,KAAK,KAAK;AAAA,UAC/B,MAAM;AAAA,UACN,YAAY;AAAA,UACZ,OAAO,MAAM,MAAM,SAAS;AAAA,UAC5B,MAAM,MAAM;AAAA,UACZ,WAAW,MAAM,SAAS,UAAU,SAAS;AAAA,QAC/C,CAAC;AAAA,MACH,OAAO;AACL,qBAAa;AACb,cAAM,KAAK,YAAY,KAAK,KAAK;AAAA,UAC/B,MAAM;AAAA,UACN,YAAY;AAAA,UACZ,OAAO,MAAM,MAAM,SAAS;AAAA,UAC5B,MAAM,MAAM;AAAA,UACZ,WAAW,MAAM,SAAS,UAAU,SAAS;AAAA,QAC/C,CAAC;AAAA,MACH;AAGA,YAAM,KAAK,YAAY,OAAO,KAAK,KAAK,OAAO,MAAM,UAAU;AAG/D,YAAM,KAAK,YAAY,KAAK,mBAAmB;AAAA,QAC7C,OAAO,MAAM;AAAA,QACb,OAAO,MAAM;AAAA,MACf,CAAC;AAED,YAAM,KAAK,YAAY,KAAK,kBAAkB;AAAA,QAC5C,OAAO,MAAM,SAAS;AAAA,QACtB,OAAO,MAAM;AAAA,MACf,CAAC;AAED,aAAO,MAAM,yBAAyB;AAAA,QACpC,SAAS,MAAM;AAAA,QACf,MAAM,KAAK;AAAA,QACX,YAAY,KAAK,SAAS;AAAA,MAC5B,CAAC;AAAA,IACH,SAAS,OAAgB;AACvB,aAAO,MAAM,4BAA4B,KAAK;AAC9C,YAAM;AAAA,IACR;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,sBAAsB,OAA6B;AAC/D,QAAI,CAAC,KAAK,WAAW;AACnB,YAAM,IAAI,MAAM,gCAAgC;AAAA,IAClD;AAEA,QAAI;AAEF,YAAM,OAAO,KAAK,UAAU,KAAK;AACjC,YAAM,aAAa,MAAM,UAAU,IAAI;AAGvC,YAAM,OAAO,IAAI,KAAK,MAAM,SAAS,SAAS;AAC9C,YAAM,MAAM,UAAU,KAAK,YAAY,CAAC,IAAI,OAAO,KAAK,SAAS,IAAI,CAAC,EAAE,SAAS,GAAG,GAAG,CAAC,IAAI,OAAO,KAAK,QAAQ,CAAC,EAAE,SAAS,GAAG,GAAG,CAAC,IAAI,MAAM,EAAE;AAG/I,YAAM,UAAU,IAAI,iBAAiB;AAAA,QACnC,QAAQ,KAAK,OAAO,eAAe;AAAA,QACnC,KAAK;AAAA,QACL,MAAM;AAAA,QACN,aAAa;AAAA,QACb,UAAU;AAAA,UACR,YAAY,MAAM;AAAA,UAClB,cAAc,MAAM;AAAA,UACpB,eAAe,MAAM,MAAM,SAAS;AAAA,UACpC,iBAAiB,KAAK,OAAO,SAAS;AAAA,UACtC,mBAAmB,WAAW,OAAO,SAAS;AAAA,QAChD;AAAA,MACF,CAAC;AAED,YAAM,KAAK,UAAU,KAAK,OAAO;AAGjC,UAAI,KAAK,aAAa;AACpB,cAAM,KAAK,YAAY,IAAI,SAAS,MAAM,EAAE,EAAE;AAAA,MAChD;AAEA,aAAO,KAAK,mCAAmC;AAAA,QAC7C,SAAS,MAAM;AAAA,QACf;AAAA,QACA,cAAc,KAAK;AAAA,QACnB,gBAAgB,WAAW;AAAA,QAC3B,mBAAmB,IAAI,WAAW,SAAS,KAAK,QAAQ,QAAQ,CAAC;AAAA,MACnE,CAAC;AAAA,IACH,SAAS,OAAgB;AACvB,aAAO,MAAM,sCAAsC,KAAK;AACxD,YAAM;AAAA,IACR;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,WAAW,OAA6B;AACpD,QAAI,CAAC,KAAK,YAAY;AAEpB,aAAO,KAAK,sBAAsB,KAAK;AAAA,IACzC;AAEA,QAAI;AAEF,YAAM,UAAU,KAAK,mBAAmB,KAAK;AAC7C,YAAM,OAAO,KAAK,UAAU,OAAO;AACnC,YAAM,aAAa,MAAM,UAAU,IAAI;AAGvC,YAAM,OAAO,IAAI,KAAK,MAAM,SAAS,SAAS;AAC9C,YAAM,MAAM,WAAW,KAAK,YAAY,CAAC,IAAI,OAAO,KAAK,SAAS,IAAI,CAAC,EAAE,SAAS,GAAG,GAAG,CAAC,IAAI,MAAM,EAAE;AAGrG,YAAM,SAAS,KAAK,WAAW,OAAO,KAAK,OAAO,IAAI,UAAU;AAChE,YAAM,OAAO,OAAO,KAAK,GAAG;AAE5B,YAAM,KAAK,KAAK,YAAY;AAAA,QAC1B,UAAU;AAAA,UACR,aAAa;AAAA,UACb,UAAU;AAAA,YACR,SAAS,MAAM;AAAA,YACf,WAAW,MAAM;AAAA,YACjB,OAAO,MAAM,MAAM,SAAS;AAAA,YAC5B,eAAe,MAAM,MAAM,OAAO,SAAS;AAAA,UAC7C;AAAA,QACF;AAAA,QACA,cAAc;AAAA;AAAA,MAChB,CAAC;AAGD,UAAI,KAAK,WAAW;AAClB,YAAI;AACF,gBAAM,UAAU,KAAK,eAAe,KAAK;AACzC,gBAAM,KAAK,UAAU;AAAA,YACnB,IAAI,oBAAoB;AAAA,cACtB,QAAQ,KAAK,OAAO,eAAe;AAAA,cACnC,KAAK;AAAA,YACP,CAAC;AAAA,UACH;AAAA,QACF,SAAS,OAAgB;AAAA,QAEzB;AAAA,MACF;AAEA,aAAO,KAAK,yBAAyB;AAAA,QACnC,SAAS,MAAM;AAAA,QACf;AAAA,QACA,cAAc,KAAK,UAAU,KAAK,EAAE;AAAA,QACpC,gBAAgB,WAAW;AAAA,MAC7B,CAAC;AAAA,IACH,SAAS,OAAgB;AACvB,aAAO,MAAM,0BAA0B,KAAK;AAC5C,YAAM;AAAA,IACR;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,mBAAmB,OAAmB;AAE5C,WAAO;AAAA,MACL,IAAI,MAAM;AAAA,MACV,MAAM,MAAM;AAAA,MACZ,OAAO,MAAM;AAAA,MACb,SAAS,MAAM;AAAA,MACf,UAAU;AAAA,QACR,WAAW,MAAM,SAAS;AAAA,QAC1B,SAAS,MAAM,SAAS;AAAA,QACxB,eAAe,MAAM,SAAS,cAAc;AAAA,QAC5C,aAAa,MAAM,SAAS,kBAAkB;AAAA,QAC9C,gBAAgB,MAAM,SAAS,kBAAkB;AAAA,QACjD,aAAa,MAAM,SAAS;AAAA,MAC9B;AAAA,MACA,aAAa;AAAA,QACX,OAAO,MAAM,MAAM;AAAA,QACnB,OAAO,CAAC,GAAG,IAAI,IAAI,MAAM,MAAM,IAAI,CAAC,MAAW,EAAE,IAAI,CAAC,CAAC;AAAA,QACvD,WAAW,MAAM,MAAM,CAAC,GAAG;AAAA,QAC3B,UAAU,MAAM,MAAM,MAAM,MAAM,SAAS,CAAC,GAAG;AAAA,MACjD;AAAA,MACA,YAAY,MAAM;AAAA,IACpB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,cAAc,SAAwC;AAE1D,UAAM,KAAK;AAGX,UAAM,WAAW,KAAK,QACnB,QAAQ,6DAA6D,EACrE,IAAI,OAAO;AAEd,QAAI,CAAC,UAAU;AACb,aAAO;AAAA,IACT;AAGA,SAAK,QACF;AAAA,MACC;AAAA,IACF,EACC,IAAI,KAAK,IAAI,GAAG,OAAO;AAG1B,YAAQ,SAAS,MAAM;AAAA,MACrB,KAAK;AACH,eAAO,KAAK,kBAAkB,OAAO;AAAA,MACvC,KAAK;AACH,eAAO,KAAK,2BAA2B,SAAS,SAAS,QAAQ;AAAA,MACnE,KAAK;AACH,eAAO,KAAK,gBAAgB,SAAS,SAAS,QAAQ;AAAA,MACxD;AACE,eAAO;AAAA,IACX;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,kBAAkB,SAAwC;AACtE,QAAI,CAAC,KAAK,YAAa,QAAO;AAE9B,QAAI;AACF,YAAM,MAAM,SAAS,OAAO;AAC5B,YAAM,OAAO,MAAM,KAAK,YAAY,QAAQ,GAAG;AAE/C,UAAI,CAAC,QAAQ,CAAC,KAAK,KAAM,QAAO;AAEhC,UAAI;AACJ,UAAI,KAAK,eAAe,QAAQ;AAC9B,cAAM,aAAa,OAAO,KAAK,KAAK,MAAM,QAAQ;AAClD,cAAM,eAAe,MAAM,YAAY,UAAU;AACjD,oBAAY,aAAa,SAAS;AAAA,MACpC,OAAO;AACL,oBAAY,KAAK;AAAA,MACnB;AAEA,aAAO,KAAK,MAAM,SAAS;AAAA,IAC7B,SAAS,OAAgB;AACvB,aAAO,MAAM,iCAAiC,KAAK;AACnD,aAAO;AAAA,IACT;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,2BACZ,SACA,KACuB;AACvB,QAAI,CAAC,KAAK,UAAW,QAAO;AAE5B,QAAI;AACF,YAAM,UAAU,IAAI,iBAAiB;AAAA,QACnC,QAAQ,KAAK,OAAO,eAAe;AAAA,QACnC,KAAK;AAAA,MACP,CAAC;AAED,YAAM,WAAW,MAAM,KAAK,UAAU,KAAK,OAAO;AAClD,YAAM,aAAa,MAAM,SAAS,MAAM,qBAAqB;AAE7D,UAAI,CAAC,WAAY,QAAO;AAExB,YAAM,eAAe,MAAM,YAAY,OAAO,KAAK,UAAU,CAAC;AAC9D,aAAO,KAAK,MAAM,aAAa,SAAS,CAAC;AAAA,IAC3C,SAAS,OAAgB;AACvB,aAAO,MAAM,2CAA2C,KAAK;AAC7D,aAAO;AAAA,IACT;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,gBACZ,SACA,KACuB;AACvB,QAAI,CAAC,KAAK,WAAY,QAAO;AAE7B,QAAI;AACF,YAAM,SAAS,KAAK,WAAW,OAAO,KAAK,OAAO,IAAI,UAAU;AAChE,YAAM,OAAO,OAAO,KAAK,GAAG;AAE5B,YAAM,CAAC,UAAU,IAAI,MAAM,KAAK,SAAS;AACzC,YAAM,eAAe,MAAM,YAAY,UAAU;AAGjD,aAAO,KAAK,MAAM,aAAa,SAAS,CAAC;AAAA,IAC3C,SAAS,OAAgB;AACvB,aAAO,MAAM,+BAA+B,KAAK;AACjD,aAAO;AAAA,IACT;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,aAAa,SAAiB,MAAmB,OAAoB;AAC3E,UAAM,eAAe,KAAK,UAAU,KAAK,EAAE;AAC3C,UAAM,iBAAiB,KAAK,MAAM,eAAe,GAAG;AAEpD,SAAK,QACF;AAAA,MACC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IAOF,EACC;AAAA,MACC;AAAA,MACA;AAAA,MACA,KAAK,mBAAmB,OAAO,IAAI;AAAA,MACnC;AAAA,MACA;AAAA,MACA,IAAI,iBAAiB;AAAA,MACrB,KAAK,IAAI;AAAA,MACT,MAAM,SAAS;AAAA,MACf,KAAK,IAAI;AAAA,MACT,MAAM;AAAA,IACR;AAAA,EACJ;AAAA;AAAA;AAAA;AAAA,EAKQ,mBAAmB,OAAc,MAA2B;AAClE,UAAM,OAAO,IAAI,KAAK,MAAM,SAAS,SAAS;AAC9C,UAAM,OAAO,KAAK,YAAY;AAC9B,UAAM,QAAQ,OAAO,KAAK,SAAS,IAAI,CAAC,EAAE,SAAS,GAAG,GAAG;AACzD,UAAM,MAAM,OAAO,KAAK,QAAQ,CAAC,EAAE,SAAS,GAAG,GAAG;AAElD,YAAQ,MAAM;AAAA,MACZ,KAAK;AACH,eAAO,eAAe,MAAM,EAAE;AAAA,MAChC,KAAK;AACH,eAAO,UAAU,IAAI,IAAI,KAAK,IAAI,GAAG,IAAI,MAAM,EAAE;AAAA,MACnD,KAAK;AACH,eAAO,WAAW,IAAI,IAAI,KAAK,IAAI,MAAM,EAAE;AAAA,IAC/C;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,eAAe,OAAsB;AAC3C,UAAM,OAAO,IAAI,KAAK,MAAM,SAAS,SAAS;AAC9C,WAAO,UAAU,KAAK,YAAY,CAAC,IAAI,OAAO,KAAK,SAAS,IAAI,CAAC,EAAE,SAAS,GAAG,GAAG,CAAC,IAAI,OAAO,KAAK,QAAQ,CAAC,EAAE,SAAS,GAAG,GAAG,CAAC,IAAI,MAAM,EAAE;AAAA,EAC5I;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,eAIH;AACD,UAAM,UAAU;AAAA,MACd,WAAW;AAAA,MACX,YAAY;AAAA,MACZ,QAAQ,CAAC;AAAA,IACX;AAEA,UAAM,MAAM,KAAK,IAAI;AAGrB,UAAM,aAAa,KAAK,QACrB;AAAA,MACC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IAMF,EACC,IAAI;AAEP,eAAW,aAAa,YAAY;AAClC,YAAM,YAAY,MAAM,UAAU,eAAe,MAAO,KAAK;AAE7D,UAAI;AACF,YACE,UAAU,SAAS,mBACnB,WAAW,KAAK,OAAO,MAAM,UAC7B;AAEA,gBAAM,QAAQ,MAAM,KAAK,cAAc,UAAU,QAAQ;AACzD,cAAI,OAAO;AACT,kBAAM,KAAK,sBAAsB,KAAK;AACtC,iBAAK,aAAa,UAAU,UAAU,mBAAkB,KAAK;AAC7D,oBAAQ;AAAA,UACV;AAAA,QACF,WACE,UAAU,SAAS,qBACnB,WAAW,KAAK,OAAO,MAAM,WAAW,IACxC;AAEA,gBAAM,QAAQ,MAAM,KAAK,cAAc,UAAU,QAAQ;AACzD,cAAI,OAAO;AACT,kBAAM,KAAK,WAAW,KAAK;AAC3B,iBAAK,aAAa,UAAU,UAAU,mBAAkB,KAAK;AAC7D,oBAAQ;AAAA,UACV;AAAA,QACF;AAAA,MACF,SAAS,OAAgB;AACvB,gBAAQ,OAAO;AAAA,UACb,qBAAqB,UAAU,QAAQ,KAAK,KAAK;AAAA,QACnD;AAAA,MACF;AAAA,IACF;AAEA,WAAO,KAAK,4BAA4B,OAAO;AAC/C,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,kBAAuB;AACrB,UAAM,YAAY,KAAK,QACpB;AAAA,MACC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IAWF,EACC,IAAI;AAEP,UAAM,kBAAkB,KAAK,QAC1B;AAAA,MACC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IAYF,EACC,IAAI,KAAK,IAAI,GAAG,KAAK,IAAI,GAAG,KAAK,IAAI,CAAC;AAEzC,WAAO;AAAA,MACL,QAAQ;AAAA,MACR,OAAO;AAAA,MACP,aAAa,UAAU,OAAO,CAAC,KAAa,MAAW,MAAM,EAAE,OAAO,CAAC;AAAA,MACvE,WAAW,UAAU;AAAA,QACnB,CAAC,KAAa,MAAW,MAAM,EAAE;AAAA,QACjC;AAAA,MACF;AAAA,MACA,gBAAgB,UAAU;AAAA,QACxB,CAAC,KAAa,MAAW,MAAM,EAAE;AAAA,QACjC;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,UAA2B;AAC/B,QAAI,UAAU;AAGd,UAAM,SAAS,KAAK,IAAI,IAAI,KAAK,KAAK,KAAK,KAAK;AAEhD,UAAM,SAAS,KAAK,QACjB;AAAA,MACC;AAAA;AAAA;AAAA;AAAA,IAIF,EACC,IAAI,MAAM;AAEb,cAAU,OAAO;AAEjB,WAAO,KAAK,qBAAqB,EAAE,SAAS,QAAQ,CAAC;AACrD,WAAO;AAAA,EACT;AACF;",
6
6
  "names": ["StorageTier"]
7
7
  }
@@ -1,4 +1,8 @@
1
- import { S3Client, PutObjectCommand, GetObjectCommand } from "@aws-sdk/client-s3";
1
+ import {
2
+ S3Client,
3
+ PutObjectCommand,
4
+ GetObjectCommand
5
+ } from "@aws-sdk/client-s3";
2
6
  import { Storage } from "@google-cloud/storage";
3
7
  import { logger } from "../monitoring/logger.js";
4
8
  function getEnv(key, defaultValue) {
@@ -115,7 +119,8 @@ class RemoteStorageManager {
115
119
  */
116
120
  async identifyMigrationCandidates() {
117
121
  const now = Date.now();
118
- const traces = this.localDb.prepare(`
122
+ const traces = this.localDb.prepare(
123
+ `
119
124
  SELECT
120
125
  t.id,
121
126
  t.score,
@@ -128,7 +133,8 @@ class RemoteStorageManager {
128
133
  LEFT JOIN remote_migrations rm ON t.id = rm.trace_id
129
134
  WHERE rm.trace_id IS NULL -- Not already migrated
130
135
  ORDER BY t.start_time ASC
131
- `).all();
136
+ `
137
+ ).all();
132
138
  const candidates = [];
133
139
  for (const trace of traces) {
134
140
  const ageHours = (now - trace.start_time) / (1e3 * 60 * 60);
@@ -207,7 +213,10 @@ class RemoteStorageManager {
207
213
  };
208
214
  try {
209
215
  const toMigrate = candidates.filter((c) => c.shouldMigrate);
210
- const batches = this.createBatches(toMigrate, this.config.migration.batchSize);
216
+ const batches = this.createBatches(
217
+ toMigrate,
218
+ this.config.migration.batchSize
219
+ );
211
220
  for (const batch of batches) {
212
221
  if (dryRun) {
213
222
  logger.info("Dry run - would migrate batch", {
@@ -246,7 +255,10 @@ class RemoteStorageManager {
246
255
  if (!trace) {
247
256
  throw new Error(`Trace ${candidate.traceId} not found`);
248
257
  }
249
- const compressed = this.compressTrace(trace, candidate.compressionLevel);
258
+ const compressed = this.compressTrace(
259
+ trace,
260
+ candidate.compressionLevel
261
+ );
250
262
  if (this.s3Client) {
251
263
  const s3Key = this.generateS3Key(candidate);
252
264
  await this.uploadToS3(s3Key, compressed);
@@ -261,9 +273,7 @@ class RemoteStorageManager {
261
273
  results.totalSize += candidate.size;
262
274
  } catch (error) {
263
275
  results.failed++;
264
- results.errors.push(
265
- `Failed to migrate ${candidate.traceId}: ${error}`
266
- );
276
+ results.errors.push(`Failed to migrate ${candidate.traceId}: ${error}`);
267
277
  logger.error("Migration failed for trace", {
268
278
  traceId: candidate.traceId,
269
279
  error
@@ -276,9 +286,7 @@ class RemoteStorageManager {
276
286
  * Get full trace data for migration
277
287
  */
278
288
  getTraceData(traceId) {
279
- const traceRow = this.localDb.prepare(
280
- "SELECT * FROM traces WHERE id = ?"
281
- ).get(traceId);
289
+ const traceRow = this.localDb.prepare("SELECT * FROM traces WHERE id = ?").get(traceId);
282
290
  if (!traceRow) return null;
283
291
  const toolCalls = this.localDb.prepare(
284
292
  "SELECT * FROM tool_calls WHERE trace_id = ? ORDER BY sequence_number"
@@ -292,7 +300,7 @@ class RemoteStorageManager {
292
300
  * Compress trace based on compression level
293
301
  */
294
302
  compressTrace(data, level) {
295
- let jsonData = JSON.stringify(data);
303
+ const jsonData = JSON.stringify(data);
296
304
  switch (level) {
297
305
  case "none":
298
306
  return Buffer.from(jsonData);
@@ -346,7 +354,7 @@ class RemoteStorageManager {
346
354
  ContentType: "application/json",
347
355
  Metadata: {
348
356
  "trace-version": "1.0",
349
- "compression": "true"
357
+ compression: "true"
350
358
  }
351
359
  });
352
360
  await this.s3Client.send(command);
@@ -382,13 +390,15 @@ class RemoteStorageManager {
382
390
  * Get current local storage size
383
391
  */
384
392
  getLocalStorageSize() {
385
- const result = this.localDb.prepare(`
393
+ const result = this.localDb.prepare(
394
+ `
386
395
  SELECT
387
396
  SUM(LENGTH(compressed_data)) +
388
397
  COALESCE((SELECT SUM(LENGTH(arguments) + LENGTH(result))
389
398
  FROM tool_calls), 0) as total_size
390
399
  FROM traces
391
- `).get();
400
+ `
401
+ ).get();
392
402
  return result?.total_size || 0;
393
403
  }
394
404
  /**
@@ -405,17 +415,21 @@ class RemoteStorageManager {
405
415
  * Retrieve trace from remote storage
406
416
  */
407
417
  async retrieveTrace(traceId) {
408
- const migration = this.localDb.prepare(`
418
+ const migration = this.localDb.prepare(
419
+ `
409
420
  SELECT * FROM remote_migrations WHERE trace_id = ?
410
- `).get(traceId);
421
+ `
422
+ ).get(traceId);
411
423
  if (!migration) {
412
424
  throw new Error(`Trace ${traceId} not found in remote storage`);
413
425
  }
414
- this.localDb.prepare(`
426
+ this.localDb.prepare(
427
+ `
415
428
  UPDATE remote_migrations
416
429
  SET retrieval_count = retrieval_count + 1, last_retrieved = ?
417
430
  WHERE trace_id = ?
418
- `).run(Date.now(), traceId);
431
+ `
432
+ ).run(Date.now(), traceId);
419
433
  if (!this.s3Client) {
420
434
  throw new Error("S3 client not configured");
421
435
  }
@@ -434,7 +448,8 @@ class RemoteStorageManager {
434
448
  * Get migration statistics
435
449
  */
436
450
  getMigrationStats() {
437
- const stats = this.localDb.prepare(`
451
+ const stats = this.localDb.prepare(
452
+ `
438
453
  SELECT
439
454
  storage_tier,
440
455
  COUNT(*) as count,
@@ -443,14 +458,17 @@ class RemoteStorageManager {
443
458
  AVG(retrieval_count) as avg_retrievals
444
459
  FROM remote_migrations
445
460
  GROUP BY storage_tier
446
- `).all();
447
- const total = this.localDb.prepare(`
461
+ `
462
+ ).all();
463
+ const total = this.localDb.prepare(
464
+ `
448
465
  SELECT
449
466
  COUNT(*) as total_migrated,
450
467
  SUM(original_size) as total_original,
451
468
  SUM(compressed_size) as total_compressed
452
469
  FROM remote_migrations
453
- `).get();
470
+ `
471
+ ).get();
454
472
  return {
455
473
  byTier: stats,
456
474
  total,
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../../src/core/storage/remote-storage.ts"],
4
- "sourcesContent": ["/**\n * Remote Storage Interface for Two-Tier Storage System\n * Implements infinite retention with TimeSeries DB + S3\n */\n\nimport { S3Client, PutObjectCommand, GetObjectCommand, DeleteObjectCommand } from '@aws-sdk/client-s3';\nimport { Storage } from '@google-cloud/storage';\nimport { logger } from '../monitoring/logger.js';\nimport { Trace, CompressedTrace } from '../trace/types.js';\nimport Database from 'better-sqlite3';\n// Type-safe environment variable access\nfunction getEnv(key: string, defaultValue?: string): string {\n const value = process.env[key];\n if (value === undefined) {\n if (defaultValue !== undefined) return defaultValue;\n throw new Error(`Environment variable ${key} is required`);\n }\n return value;\n}\n\nfunction getOptionalEnv(key: string): string | undefined {\n return process.env[key];\n}\n\n\nexport enum StorageTier {\n HOT = 'hot', // < 7 days - Railway Buckets or GCS Standard\n NEARLINE = 'nearline', // 7-30 days - GCS Nearline ($0.01/GB)\n COLDLINE = 'coldline', // 30-90 days - GCS Coldline ($0.004/GB)\n ARCHIVE = 'archive' // > 90 days - GCS Archive ($0.0012/GB)\n}\n\nexport interface RemoteStorageConfig {\n provider: 'gcs' | 's3' | 'railway';\n gcs?: {\n bucketName: string;\n projectId: string;\n keyFilename?: string; // Path to service account key\n };\n s3?: {\n bucket: string;\n region: string;\n accessKeyId?: string;\n secretAccessKey?: string;\n endpoint?: string; // For Railway buckets or MinIO\n };\n timeseries: {\n type: 'clickhouse' | 'timescale' | 'influxdb' | 'sqlite'; // SQLite for dev\n host: string;\n port: number;\n database: string;\n username?: string;\n password?: string;\n };\n migration: {\n batchSize: number;\n hotAgeHours: number; // < 7 days\n nearlineAgeHours: number; // 7-30 days \n coldlineAgeHours: number; // 30-90 days\n archiveAgeHours: number; // > 90 days\n scoreThreshold: number; // Score threshold for early migration\n };\n}\n\nexport const DEFAULT_REMOTE_CONFIG: RemoteStorageConfig = {\n provider: 'gcs', // Default to GCS for better pricing\n gcs: {\n bucketName: 'stackmemory-traces',\n projectId: process.env['GCP_PROJECT_ID'] || 'stackmemory',\n },\n timeseries: {\n type: 'sqlite', // Use SQLite for development\n host: 'localhost',\n port: 0,\n database: 'stackmemory_timeseries',\n },\n migration: {\n batchSize: 100,\n hotAgeHours: 168, // 7 days\n nearlineAgeHours: 720, // 30 days\n coldlineAgeHours: 2160, // 90 days\n archiveAgeHours: 8760, // 365 days\n scoreThreshold: 0.4,\n }\n};\n\nexport interface MigrationCandidate {\n traceId: string;\n age: number;\n score: number;\n size: number;\n tier: StorageTier;\n shouldMigrate: boolean;\n compressionLevel: 'none' | 'light' | 'medium' | 'heavy';\n}\n\n/**\n * Remote storage manager for infinite trace retention\n */\nexport class RemoteStorageManager {\n private storageClient?: S3Client | Storage;\n private config: RemoteStorageConfig;\n private localDb: Database.Database;\n private migrationInProgress = false;\n \n constructor(\n localDb: Database.Database,\n config?: Partial<RemoteStorageConfig>\n ) {\n this.localDb = localDb;\n this.config = { ...DEFAULT_REMOTE_CONFIG, ...config };\n \n this.initializeStorageClient();\n this.initializeSchema();\n }\n \n /**\n * Initialize storage client based on provider\n */\n private initializeStorageClient(): void {\n switch (this.config.provider) {\n case 'gcs':\n if (this.config.gcs) {\n this.storageClient = new Storage({\n projectId: this.config.gcs.projectId,\n keyFilename: this.config.gcs.keyFilename,\n });\n }\n break;\n \n case 's3':\n case 'railway':\n if (this.config.s3?.accessKeyId && this.config.s3?.secretAccessKey) {\n this.storageClient = new S3Client({\n region: this.config.s3.region,\n credentials: {\n accessKeyId: this.config.s3.accessKeyId,\n secretAccessKey: this.config.s3.secretAccessKey,\n },\n endpoint: this.config.s3.endpoint, // Railway buckets endpoint\n });\n }\n break;\n }\n }\n \n /**\n * Initialize migration tracking schema\n */\n private initializeSchema(): void {\n this.localDb.exec(`\n CREATE TABLE IF NOT EXISTS remote_migrations (\n trace_id TEXT PRIMARY KEY,\n migrated_at INTEGER NOT NULL,\n storage_tier TEXT NOT NULL,\n s3_key TEXT,\n timeseries_id TEXT,\n compression_level TEXT,\n original_size INTEGER,\n compressed_size INTEGER,\n retrieval_count INTEGER DEFAULT 0,\n last_retrieved INTEGER,\n FOREIGN KEY (trace_id) REFERENCES traces(id) ON DELETE CASCADE\n )\n `);\n \n this.localDb.exec(`\n CREATE INDEX IF NOT EXISTS idx_migrations_tier ON remote_migrations(storage_tier);\n CREATE INDEX IF NOT EXISTS idx_migrations_migrated ON remote_migrations(migrated_at);\n `);\n }\n \n /**\n * Identify traces for migration based on age and importance\n */\n async identifyMigrationCandidates(): Promise<MigrationCandidate[]> {\n const now = Date.now();\n \n // Query all traces with their metadata\n const traces = this.localDb.prepare(`\n SELECT \n t.id,\n t.score,\n t.start_time,\n LENGTH(t.compressed_data) + \n COALESCE((SELECT SUM(LENGTH(tc.arguments) + LENGTH(tc.result)) \n FROM tool_calls tc WHERE tc.trace_id = t.id), 0) as size,\n rm.trace_id as already_migrated\n FROM traces t\n LEFT JOIN remote_migrations rm ON t.id = rm.trace_id\n WHERE rm.trace_id IS NULL -- Not already migrated\n ORDER BY t.start_time ASC\n `).all() as any[];\n \n const candidates: MigrationCandidate[] = [];\n \n for (const trace of traces) {\n const ageHours = (now - trace.start_time) / (1000 * 60 * 60);\n const candidate = this.evaluateTrace(\n trace.id,\n ageHours,\n trace.score,\n trace.size || 0\n );\n \n candidates.push(candidate);\n }\n \n return candidates;\n }\n \n /**\n * Evaluate a trace for migration based on GCS storage classes\n */\n private evaluateTrace(\n traceId: string,\n ageHours: number,\n score: number,\n size: number\n ): MigrationCandidate {\n let tier = StorageTier.HOT;\n let shouldMigrate = false;\n let compressionLevel: 'none' | 'light' | 'medium' | 'heavy' = 'none';\n \n // Determine storage tier based on age and GCS storage classes\n if (ageHours > this.config.migration.archiveAgeHours) {\n // GCS Archive: $0.0012/GB - accessed < once per year\n tier = StorageTier.ARCHIVE;\n shouldMigrate = true;\n compressionLevel = 'heavy';\n } else if (ageHours > this.config.migration.coldlineAgeHours) {\n // GCS Coldline: $0.004/GB - accessed < once per quarter \n tier = StorageTier.COLDLINE;\n shouldMigrate = true;\n compressionLevel = 'heavy';\n } else if (ageHours > this.config.migration.nearlineAgeHours) {\n // GCS Nearline: $0.01/GB - accessed < once per month\n tier = StorageTier.NEARLINE;\n shouldMigrate = true;\n compressionLevel = 'medium';\n } else if (ageHours > this.config.migration.hotAgeHours) {\n // Still hot but consider migration if low importance\n tier = StorageTier.HOT;\n if (score < this.config.migration.scoreThreshold) {\n shouldMigrate = true;\n compressionLevel = 'light';\n }\n }\n \n // Force migration for size pressure\n const localSizeLimit = 2 * 1024 * 1024 * 1024; // 2GB\n const currentLocalSize = this.getLocalStorageSize();\n \n if (currentLocalSize > localSizeLimit * 0.75) {\n // Start migrating when 75% full\n shouldMigrate = true;\n if (compressionLevel === 'none') {\n compressionLevel = 'light';\n }\n }\n \n return {\n traceId,\n age: ageHours,\n score,\n size,\n tier,\n shouldMigrate,\n compressionLevel,\n };\n }\n \n /**\n * Migrate traces to remote storage\n */\n async migrateTraces(\n candidates: MigrationCandidate[],\n dryRun: boolean = false\n ): Promise<{\n migrated: number;\n failed: number;\n totalSize: number;\n errors: string[];\n }> {\n if (this.migrationInProgress) {\n return {\n migrated: 0,\n failed: 0,\n totalSize: 0,\n errors: ['Migration already in progress'],\n };\n }\n \n this.migrationInProgress = true;\n const results = {\n migrated: 0,\n failed: 0,\n totalSize: 0,\n errors: [] as string[],\n };\n \n try {\n // Process in batches\n const toMigrate = candidates.filter((c: any) => c.shouldMigrate);\n const batches = this.createBatches(toMigrate, this.config.migration.batchSize);\n \n for (const batch of batches) {\n if (dryRun) {\n logger.info('Dry run - would migrate batch', {\n count: batch.length,\n totalSize: batch.reduce((sum, c) => sum + c.size, 0),\n });\n results.migrated += batch.length;\n continue;\n }\n \n const batchResults = await this.migrateBatch(batch);\n results.migrated += batchResults.success;\n results.failed += batchResults.failed;\n results.totalSize += batchResults.totalSize;\n results.errors.push(...batchResults.errors);\n \n // Small delay between batches\n await new Promise(resolve => setTimeout(resolve, 100));\n }\n \n } finally {\n this.migrationInProgress = false;\n }\n \n logger.info('Migration completed', results);\n return results;\n }\n \n /**\n * Migrate a batch of traces\n */\n private async migrateBatch(\n batch: MigrationCandidate[]\n ): Promise<{\n success: number;\n failed: number;\n totalSize: number;\n errors: string[];\n }> {\n const results = {\n success: 0,\n failed: 0,\n totalSize: 0,\n errors: [] as string[],\n };\n \n for (const candidate of batch) {\n try {\n // Get full trace data\n const trace = this.getTraceData(candidate.traceId);\n if (!trace) {\n throw new Error(`Trace ${candidate.traceId} not found`);\n }\n \n // Compress based on level\n const compressed = this.compressTrace(trace, candidate.compressionLevel);\n \n // Upload to S3\n if (this.s3Client) {\n const s3Key = this.generateS3Key(candidate);\n await this.uploadToS3(s3Key, compressed);\n \n // Record migration\n this.recordMigration(candidate, s3Key, trace, compressed);\n } else {\n // Local simulation for testing\n this.recordMigration(candidate, 'simulated', trace, compressed);\n }\n \n // Optionally remove from local after successful migration\n if (candidate.tier === StorageTier.COLD || \n candidate.tier === StorageTier.ARCHIVE) {\n this.removeLocalTrace(candidate.traceId);\n }\n \n results.success++;\n results.totalSize += candidate.size;\n \n } catch (error: unknown) {\n results.failed++;\n results.errors.push(\n `Failed to migrate ${candidate.traceId}: ${error}`\n );\n logger.error('Migration failed for trace', { \n traceId: candidate.traceId, \n error \n });\n }\n }\n \n return results;\n }\n \n /**\n * Get full trace data for migration\n */\n private getTraceData(traceId: string): any {\n const traceRow = this.localDb.prepare(\n 'SELECT * FROM traces WHERE id = ?'\n ).get(traceId);\n \n if (!traceRow) return null;\n \n const toolCalls = this.localDb.prepare(\n 'SELECT * FROM tool_calls WHERE trace_id = ? ORDER BY sequence_number'\n ).all(traceId);\n \n return {\n trace: traceRow,\n toolCalls,\n };\n }\n \n /**\n * Compress trace based on compression level\n */\n private compressTrace(\n data: any,\n level: 'none' | 'light' | 'medium' | 'heavy'\n ): Buffer {\n let jsonData = JSON.stringify(data);\n \n // Apply different compression based on level\n switch (level) {\n case 'none':\n return Buffer.from(jsonData);\n \n case 'light':\n // Remove formatting, keep all data\n return Buffer.from(JSON.stringify(JSON.parse(jsonData)));\n \n case 'medium':\n // Remove null fields and compress\n const cleaned = JSON.parse(jsonData, (key, value) => \n value === null || value === undefined ? undefined : value\n );\n return Buffer.from(JSON.stringify(cleaned));\n \n case 'heavy':\n // Remove tool results and arguments, keep only essential\n const minimal = {\n id: data.trace.id,\n type: data.trace.type,\n score: data.trace.score,\n summary: data.trace.summary,\n timestamps: {\n start: data.trace.start_time,\n end: data.trace.end_time,\n },\n toolCount: data.toolCalls.length,\n toolTypes: [...new Set(data.toolCalls.map((t: any) => t.tool))],\n };\n return Buffer.from(JSON.stringify(minimal));\n \n default:\n return Buffer.from(jsonData);\n }\n }\n \n /**\n * Generate S3 key for trace\n */\n private generateS3Key(candidate: MigrationCandidate): string {\n const date = new Date(Date.now() - candidate.age * 60 * 60 * 1000);\n const year = date.getFullYear();\n const month = String(date.getMonth() + 1).padStart(2, '0');\n const day = String(date.getDate()).padStart(2, '0');\n \n return `traces/${year}/${month}/${day}/${candidate.tier}/${candidate.traceId}.json`;\n }\n \n /**\n * Upload to S3\n */\n private async uploadToS3(key: string, data: Buffer): Promise<void> {\n if (!this.s3Client) {\n throw new Error('S3 client not configured');\n }\n \n const command = new PutObjectCommand({\n Bucket: this.config.s3.bucket,\n Key: key,\n Body: data,\n ContentType: 'application/json',\n Metadata: {\n 'trace-version': '1.0',\n 'compression': 'true',\n },\n });\n \n await this.s3Client.send(command);\n }\n \n /**\n * Record migration in local database\n */\n private recordMigration(\n candidate: MigrationCandidate,\n s3Key: string,\n originalData: any,\n compressedData: Buffer\n ): void {\n const stmt = this.localDb.prepare(`\n INSERT INTO remote_migrations (\n trace_id, migrated_at, storage_tier, s3_key,\n compression_level, original_size, compressed_size\n ) VALUES (?, ?, ?, ?, ?, ?, ?)\n `);\n \n stmt.run(\n candidate.traceId,\n Date.now(),\n candidate.tier,\n s3Key,\n candidate.compressionLevel,\n JSON.stringify(originalData).length,\n compressedData.length\n );\n }\n \n /**\n * Remove local trace after migration\n */\n private removeLocalTrace(traceId: string): void {\n this.localDb.prepare('DELETE FROM tool_calls WHERE trace_id = ?').run(traceId);\n this.localDb.prepare('DELETE FROM traces WHERE id = ?').run(traceId);\n }\n \n /**\n * Get current local storage size\n */\n private getLocalStorageSize(): number {\n const result = this.localDb.prepare(`\n SELECT \n SUM(LENGTH(compressed_data)) +\n COALESCE((SELECT SUM(LENGTH(arguments) + LENGTH(result)) \n FROM tool_calls), 0) as total_size\n FROM traces\n `).get() as any;\n \n return result?.total_size || 0;\n }\n \n /**\n * Create batches from candidates\n */\n private createBatches<T>(items: T[], batchSize: number): T[][] {\n const batches: T[][] = [];\n for (let i = 0; i < items.length; i += batchSize) {\n batches.push(items.slice(i, i + batchSize));\n }\n return batches;\n }\n \n /**\n * Retrieve trace from remote storage\n */\n async retrieveTrace(traceId: string): Promise<any> {\n const migration = this.localDb.prepare(`\n SELECT * FROM remote_migrations WHERE trace_id = ?\n `).get(traceId) as any;\n \n if (!migration) {\n throw new Error(`Trace ${traceId} not found in remote storage`);\n }\n \n // Update retrieval count\n this.localDb.prepare(`\n UPDATE remote_migrations \n SET retrieval_count = retrieval_count + 1, last_retrieved = ?\n WHERE trace_id = ?\n `).run(Date.now(), traceId);\n \n if (!this.s3Client) {\n throw new Error('S3 client not configured');\n }\n \n // Retrieve from S3\n const command = new GetObjectCommand({\n Bucket: this.config.s3.bucket,\n Key: migration.s3_key,\n });\n \n const response = await this.s3Client.send(command);\n const data = await response.Body?.transformToString();\n \n if (!data) {\n throw new Error('No data retrieved from S3');\n }\n \n return JSON.parse(data);\n }\n \n /**\n * Get migration statistics\n */\n getMigrationStats(): any {\n const stats = this.localDb.prepare(`\n SELECT \n storage_tier,\n COUNT(*) as count,\n SUM(original_size) as original_size,\n SUM(compressed_size) as compressed_size,\n AVG(retrieval_count) as avg_retrievals\n FROM remote_migrations\n GROUP BY storage_tier\n `).all();\n \n const total = this.localDb.prepare(`\n SELECT \n COUNT(*) as total_migrated,\n SUM(original_size) as total_original,\n SUM(compressed_size) as total_compressed\n FROM remote_migrations\n `).get();\n \n return {\n byTier: stats,\n total,\n compressionRatio: total \n ? (1 - (total as any).total_compressed / (total as any).total_original).toFixed(2)\n : 0,\n localSize: this.getLocalStorageSize(),\n };\n }\n}"],
5
- "mappings": "AAKA,SAAS,UAAU,kBAAkB,wBAA6C;AAClF,SAAS,eAAe;AACxB,SAAS,cAAc;AAIvB,SAAS,OAAO,KAAa,cAA+B;AAC1D,QAAM,QAAQ,QAAQ,IAAI,GAAG;AAC7B,MAAI,UAAU,QAAW;AACvB,QAAI,iBAAiB,OAAW,QAAO;AACvC,UAAM,IAAI,MAAM,wBAAwB,GAAG,cAAc;AAAA,EAC3D;AACA,SAAO;AACT;AAEA,SAAS,eAAe,KAAiC;AACvD,SAAO,QAAQ,IAAI,GAAG;AACxB;AAGO,IAAK,cAAL,kBAAKA,iBAAL;AACL,EAAAA,aAAA,SAAM;AACN,EAAAA,aAAA,cAAW;AACX,EAAAA,aAAA,cAAW;AACX,EAAAA,aAAA,aAAU;AAJA,SAAAA;AAAA,GAAA;AAuCL,MAAM,wBAA6C;AAAA,EACxD,UAAU;AAAA;AAAA,EACV,KAAK;AAAA,IACH,YAAY;AAAA,IACZ,WAAW,QAAQ,IAAI,gBAAgB,KAAK;AAAA,EAC9C;AAAA,EACA,YAAY;AAAA,IACV,MAAM;AAAA;AAAA,IACN,MAAM;AAAA,IACN,MAAM;AAAA,IACN,UAAU;AAAA,EACZ;AAAA,EACA,WAAW;AAAA,IACT,WAAW;AAAA,IACX,aAAa;AAAA;AAAA,IACb,kBAAkB;AAAA;AAAA,IAClB,kBAAkB;AAAA;AAAA,IAClB,iBAAiB;AAAA;AAAA,IACjB,gBAAgB;AAAA,EAClB;AACF;AAeO,MAAM,qBAAqB;AAAA,EACxB;AAAA,EACA;AAAA,EACA;AAAA,EACA,sBAAsB;AAAA,EAE9B,YACE,SACA,QACA;AACA,SAAK,UAAU;AACf,SAAK,SAAS,EAAE,GAAG,uBAAuB,GAAG,OAAO;AAEpD,SAAK,wBAAwB;AAC7B,SAAK,iBAAiB;AAAA,EACxB;AAAA;AAAA;AAAA;AAAA,EAKQ,0BAAgC;AACtC,YAAQ,KAAK,OAAO,UAAU;AAAA,MAC5B,KAAK;AACH,YAAI,KAAK,OAAO,KAAK;AACnB,eAAK,gBAAgB,IAAI,QAAQ;AAAA,YAC/B,WAAW,KAAK,OAAO,IAAI;AAAA,YAC3B,aAAa,KAAK,OAAO,IAAI;AAAA,UAC/B,CAAC;AAAA,QACH;AACA;AAAA,MAEF,KAAK;AAAA,MACL,KAAK;AACH,YAAI,KAAK,OAAO,IAAI,eAAe,KAAK,OAAO,IAAI,iBAAiB;AAClE,eAAK,gBAAgB,IAAI,SAAS;AAAA,YAChC,QAAQ,KAAK,OAAO,GAAG;AAAA,YACvB,aAAa;AAAA,cACX,aAAa,KAAK,OAAO,GAAG;AAAA,cAC5B,iBAAiB,KAAK,OAAO,GAAG;AAAA,YAClC;AAAA,YACA,UAAU,KAAK,OAAO,GAAG;AAAA;AAAA,UAC3B,CAAC;AAAA,QACH;AACA;AAAA,IACJ;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,mBAAyB;AAC/B,SAAK,QAAQ,KAAK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,KAcjB;AAED,SAAK,QAAQ,KAAK;AAAA;AAAA;AAAA,KAGjB;AAAA,EACH;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,8BAA6D;AACjE,UAAM,MAAM,KAAK,IAAI;AAGrB,UAAM,SAAS,KAAK,QAAQ,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,KAanC,EAAE,IAAI;AAEP,UAAM,aAAmC,CAAC;AAE1C,eAAW,SAAS,QAAQ;AAC1B,YAAM,YAAY,MAAM,MAAM,eAAe,MAAO,KAAK;AACzD,YAAM,YAAY,KAAK;AAAA,QACrB,MAAM;AAAA,QACN;AAAA,QACA,MAAM;AAAA,QACN,MAAM,QAAQ;AAAA,MAChB;AAEA,iBAAW,KAAK,SAAS;AAAA,IAC3B;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,cACN,SACA,UACA,OACA,MACoB;AACpB,QAAI,OAAO;AACX,QAAI,gBAAgB;AACpB,QAAI,mBAA0D;AAG9D,QAAI,WAAW,KAAK,OAAO,UAAU,iBAAiB;AAEpD,aAAO;AACP,sBAAgB;AAChB,yBAAmB;AAAA,IACrB,WAAW,WAAW,KAAK,OAAO,UAAU,kBAAkB;AAE5D,aAAO;AACP,sBAAgB;AAChB,yBAAmB;AAAA,IACrB,WAAW,WAAW,KAAK,OAAO,UAAU,kBAAkB;AAE5D,aAAO;AACP,sBAAgB;AAChB,yBAAmB;AAAA,IACrB,WAAW,WAAW,KAAK,OAAO,UAAU,aAAa;AAEvD,aAAO;AACP,UAAI,QAAQ,KAAK,OAAO,UAAU,gBAAgB;AAChD,wBAAgB;AAChB,2BAAmB;AAAA,MACrB;AAAA,IACF;AAGA,UAAM,iBAAiB,IAAI,OAAO,OAAO;AACzC,UAAM,mBAAmB,KAAK,oBAAoB;AAElD,QAAI,mBAAmB,iBAAiB,MAAM;AAE5C,sBAAgB;AAChB,UAAI,qBAAqB,QAAQ;AAC/B,2BAAmB;AAAA,MACrB;AAAA,IACF;AAEA,WAAO;AAAA,MACL;AAAA,MACA,KAAK;AAAA,MACL;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,cACJ,YACA,SAAkB,OAMjB;AACD,QAAI,KAAK,qBAAqB;AAC5B,aAAO;AAAA,QACL,UAAU;AAAA,QACV,QAAQ;AAAA,QACR,WAAW;AAAA,QACX,QAAQ,CAAC,+BAA+B;AAAA,MAC1C;AAAA,IACF;AAEA,SAAK,sBAAsB;AAC3B,UAAM,UAAU;AAAA,MACd,UAAU;AAAA,MACV,QAAQ;AAAA,MACR,WAAW;AAAA,MACX,QAAQ,CAAC;AAAA,IACX;AAEA,QAAI;AAEF,YAAM,YAAY,WAAW,OAAO,CAAC,MAAW,EAAE,aAAa;AAC/D,YAAM,UAAU,KAAK,cAAc,WAAW,KAAK,OAAO,UAAU,SAAS;AAE7E,iBAAW,SAAS,SAAS;AAC3B,YAAI,QAAQ;AACV,iBAAO,KAAK,iCAAiC;AAAA,YAC3C,OAAO,MAAM;AAAA,YACb,WAAW,MAAM,OAAO,CAAC,KAAK,MAAM,MAAM,EAAE,MAAM,CAAC;AAAA,UACrD,CAAC;AACD,kBAAQ,YAAY,MAAM;AAC1B;AAAA,QACF;AAEA,cAAM,eAAe,MAAM,KAAK,aAAa,KAAK;AAClD,gBAAQ,YAAY,aAAa;AACjC,gBAAQ,UAAU,aAAa;AAC/B,gBAAQ,aAAa,aAAa;AAClC,gBAAQ,OAAO,KAAK,GAAG,aAAa,MAAM;AAG1C,cAAM,IAAI,QAAQ,aAAW,WAAW,SAAS,GAAG,CAAC;AAAA,MACvD;AAAA,IAEF,UAAE;AACA,WAAK,sBAAsB;AAAA,IAC7B;AAEA,WAAO,KAAK,uBAAuB,OAAO;AAC1C,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,aACZ,OAMC;AACD,UAAM,UAAU;AAAA,MACd,SAAS;AAAA,MACT,QAAQ;AAAA,MACR,WAAW;AAAA,MACX,QAAQ,CAAC;AAAA,IACX;AAEA,eAAW,aAAa,OAAO;AAC7B,UAAI;AAEF,cAAM,QAAQ,KAAK,aAAa,UAAU,OAAO;AACjD,YAAI,CAAC,OAAO;AACV,gBAAM,IAAI,MAAM,SAAS,UAAU,OAAO,YAAY;AAAA,QACxD;AAGA,cAAM,aAAa,KAAK,cAAc,OAAO,UAAU,gBAAgB;AAGvE,YAAI,KAAK,UAAU;AACjB,gBAAM,QAAQ,KAAK,cAAc,SAAS;AAC1C,gBAAM,KAAK,WAAW,OAAO,UAAU;AAGvC,eAAK,gBAAgB,WAAW,OAAO,OAAO,UAAU;AAAA,QAC1D,OAAO;AAEL,eAAK,gBAAgB,WAAW,aAAa,OAAO,UAAU;AAAA,QAChE;AAGA,YAAI,UAAU,SAAS,YAAY,QAC/B,UAAU,SAAS,yBAAqB;AAC1C,eAAK,iBAAiB,UAAU,OAAO;AAAA,QACzC;AAEA,gBAAQ;AACR,gBAAQ,aAAa,UAAU;AAAA,MAEjC,SAAS,OAAgB;AACvB,gBAAQ;AACR,gBAAQ,OAAO;AAAA,UACb,qBAAqB,UAAU,OAAO,KAAK,KAAK;AAAA,QAClD;AACA,eAAO,MAAM,8BAA8B;AAAA,UACzC,SAAS,UAAU;AAAA,UACnB;AAAA,QACF,CAAC;AAAA,MACH;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,aAAa,SAAsB;AACzC,UAAM,WAAW,KAAK,QAAQ;AAAA,MAC5B;AAAA,IACF,EAAE,IAAI,OAAO;AAEb,QAAI,CAAC,SAAU,QAAO;AAEtB,UAAM,YAAY,KAAK,QAAQ;AAAA,MAC7B;AAAA,IACF,EAAE,IAAI,OAAO;AAEb,WAAO;AAAA,MACL,OAAO;AAAA,MACP;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,cACN,MACA,OACQ;AACR,QAAI,WAAW,KAAK,UAAU,IAAI;AAGlC,YAAQ,OAAO;AAAA,MACb,KAAK;AACH,eAAO,OAAO,KAAK,QAAQ;AAAA,MAE7B,KAAK;AAEH,eAAO,OAAO,KAAK,KAAK,UAAU,KAAK,MAAM,QAAQ,CAAC,CAAC;AAAA,MAEzD,KAAK;AAEH,cAAM,UAAU,KAAK;AAAA,UAAM;AAAA,UAAU,CAAC,KAAK,UACzC,UAAU,QAAQ,UAAU,SAAY,SAAY;AAAA,QACtD;AACA,eAAO,OAAO,KAAK,KAAK,UAAU,OAAO,CAAC;AAAA,MAE5C,KAAK;AAEH,cAAM,UAAU;AAAA,UACd,IAAI,KAAK,MAAM;AAAA,UACf,MAAM,KAAK,MAAM;AAAA,UACjB,OAAO,KAAK,MAAM;AAAA,UAClB,SAAS,KAAK,MAAM;AAAA,UACpB,YAAY;AAAA,YACV,OAAO,KAAK,MAAM;AAAA,YAClB,KAAK,KAAK,MAAM;AAAA,UAClB;AAAA,UACA,WAAW,KAAK,UAAU;AAAA,UAC1B,WAAW,CAAC,GAAG,IAAI,IAAI,KAAK,UAAU,IAAI,CAAC,MAAW,EAAE,IAAI,CAAC,CAAC;AAAA,QAChE;AACA,eAAO,OAAO,KAAK,KAAK,UAAU,OAAO,CAAC;AAAA,MAE5C;AACE,eAAO,OAAO,KAAK,QAAQ;AAAA,IAC/B;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,cAAc,WAAuC;AAC3D,UAAM,OAAO,IAAI,KAAK,KAAK,IAAI,IAAI,UAAU,MAAM,KAAK,KAAK,GAAI;AACjE,UAAM,OAAO,KAAK,YAAY;AAC9B,UAAM,QAAQ,OAAO,KAAK,SAAS,IAAI,CAAC,EAAE,SAAS,GAAG,GAAG;AACzD,UAAM,MAAM,OAAO,KAAK,QAAQ,CAAC,EAAE,SAAS,GAAG,GAAG;AAElD,WAAO,UAAU,IAAI,IAAI,KAAK,IAAI,GAAG,IAAI,UAAU,IAAI,IAAI,UAAU,OAAO;AAAA,EAC9E;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,WAAW,KAAa,MAA6B;AACjE,QAAI,CAAC,KAAK,UAAU;AAClB,YAAM,IAAI,MAAM,0BAA0B;AAAA,IAC5C;AAEA,UAAM,UAAU,IAAI,iBAAiB;AAAA,MACnC,QAAQ,KAAK,OAAO,GAAG;AAAA,MACvB,KAAK;AAAA,MACL,MAAM;AAAA,MACN,aAAa;AAAA,MACb,UAAU;AAAA,QACR,iBAAiB;AAAA,QACjB,eAAe;AAAA,MACjB;AAAA,IACF,CAAC;AAED,UAAM,KAAK,SAAS,KAAK,OAAO;AAAA,EAClC;AAAA;AAAA;AAAA;AAAA,EAKQ,gBACN,WACA,OACA,cACA,gBACM;AACN,UAAM,OAAO,KAAK,QAAQ,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA,KAKjC;AAED,SAAK;AAAA,MACH,UAAU;AAAA,MACV,KAAK,IAAI;AAAA,MACT,UAAU;AAAA,MACV;AAAA,MACA,UAAU;AAAA,MACV,KAAK,UAAU,YAAY,EAAE;AAAA,MAC7B,eAAe;AAAA,IACjB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,iBAAiB,SAAuB;AAC9C,SAAK,QAAQ,QAAQ,2CAA2C,EAAE,IAAI,OAAO;AAC7E,SAAK,QAAQ,QAAQ,iCAAiC,EAAE,IAAI,OAAO;AAAA,EACrE;AAAA;AAAA;AAAA;AAAA,EAKQ,sBAA8B;AACpC,UAAM,SAAS,KAAK,QAAQ,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,KAMnC,EAAE,IAAI;AAEP,WAAO,QAAQ,cAAc;AAAA,EAC/B;AAAA;AAAA;AAAA;AAAA,EAKQ,cAAiB,OAAY,WAA0B;AAC7D,UAAM,UAAiB,CAAC;AACxB,aAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK,WAAW;AAChD,cAAQ,KAAK,MAAM,MAAM,GAAG,IAAI,SAAS,CAAC;AAAA,IAC5C;AACA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,cAAc,SAA+B;AACjD,UAAM,YAAY,KAAK,QAAQ,QAAQ;AAAA;AAAA,KAEtC,EAAE,IAAI,OAAO;AAEd,QAAI,CAAC,WAAW;AACd,YAAM,IAAI,MAAM,SAAS,OAAO,8BAA8B;AAAA,IAChE;AAGA,SAAK,QAAQ,QAAQ;AAAA;AAAA;AAAA;AAAA,KAIpB,EAAE,IAAI,KAAK,IAAI,GAAG,OAAO;AAE1B,QAAI,CAAC,KAAK,UAAU;AAClB,YAAM,IAAI,MAAM,0BAA0B;AAAA,IAC5C;AAGA,UAAM,UAAU,IAAI,iBAAiB;AAAA,MACnC,QAAQ,KAAK,OAAO,GAAG;AAAA,MACvB,KAAK,UAAU;AAAA,IACjB,CAAC;AAED,UAAM,WAAW,MAAM,KAAK,SAAS,KAAK,OAAO;AACjD,UAAM,OAAO,MAAM,SAAS,MAAM,kBAAkB;AAEpD,QAAI,CAAC,MAAM;AACT,YAAM,IAAI,MAAM,2BAA2B;AAAA,IAC7C;AAEA,WAAO,KAAK,MAAM,IAAI;AAAA,EACxB;AAAA;AAAA;AAAA;AAAA,EAKA,oBAAyB;AACvB,UAAM,QAAQ,KAAK,QAAQ,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,KASlC,EAAE,IAAI;AAEP,UAAM,QAAQ,KAAK,QAAQ,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,KAMlC,EAAE,IAAI;AAEP,WAAO;AAAA,MACL,QAAQ;AAAA,MACR;AAAA,MACA,kBAAkB,SACb,IAAK,MAAc,mBAAoB,MAAc,gBAAgB,QAAQ,CAAC,IAC/E;AAAA,MACJ,WAAW,KAAK,oBAAoB;AAAA,IACtC;AAAA,EACF;AACF;",
4
+ "sourcesContent": ["/**\n * Remote Storage Interface for Two-Tier Storage System\n * Implements infinite retention with TimeSeries DB + S3\n */\n\nimport {\n S3Client,\n PutObjectCommand,\n GetObjectCommand,\n DeleteObjectCommand,\n} from '@aws-sdk/client-s3';\nimport { Storage } from '@google-cloud/storage';\nimport { logger } from '../monitoring/logger.js';\nimport { Trace, CompressedTrace } from '../trace/types.js';\nimport Database from 'better-sqlite3';\n// Type-safe environment variable access\nfunction getEnv(key: string, defaultValue?: string): string {\n const value = process.env[key];\n if (value === undefined) {\n if (defaultValue !== undefined) return defaultValue;\n throw new Error(`Environment variable ${key} is required`);\n }\n return value;\n}\n\nfunction getOptionalEnv(key: string): string | undefined {\n return process.env[key];\n}\n\nexport enum StorageTier {\n HOT = 'hot', // < 7 days - Railway Buckets or GCS Standard\n NEARLINE = 'nearline', // 7-30 days - GCS Nearline ($0.01/GB)\n COLDLINE = 'coldline', // 30-90 days - GCS Coldline ($0.004/GB)\n ARCHIVE = 'archive', // > 90 days - GCS Archive ($0.0012/GB)\n}\n\nexport interface RemoteStorageConfig {\n provider: 'gcs' | 's3' | 'railway';\n gcs?: {\n bucketName: string;\n projectId: string;\n keyFilename?: string; // Path to service account key\n };\n s3?: {\n bucket: string;\n region: string;\n accessKeyId?: string;\n secretAccessKey?: string;\n endpoint?: string; // For Railway buckets or MinIO\n };\n timeseries: {\n type: 'clickhouse' | 'timescale' | 'influxdb' | 'sqlite'; // SQLite for dev\n host: string;\n port: number;\n database: string;\n username?: string;\n password?: string;\n };\n migration: {\n batchSize: number;\n hotAgeHours: number; // < 7 days\n nearlineAgeHours: number; // 7-30 days\n coldlineAgeHours: number; // 30-90 days\n archiveAgeHours: number; // > 90 days\n scoreThreshold: number; // Score threshold for early migration\n };\n}\n\nexport const DEFAULT_REMOTE_CONFIG: RemoteStorageConfig = {\n provider: 'gcs', // Default to GCS for better pricing\n gcs: {\n bucketName: 'stackmemory-traces',\n projectId: process.env['GCP_PROJECT_ID'] || 'stackmemory',\n },\n timeseries: {\n type: 'sqlite', // Use SQLite for development\n host: 'localhost',\n port: 0,\n database: 'stackmemory_timeseries',\n },\n migration: {\n batchSize: 100,\n hotAgeHours: 168, // 7 days\n nearlineAgeHours: 720, // 30 days\n coldlineAgeHours: 2160, // 90 days\n archiveAgeHours: 8760, // 365 days\n scoreThreshold: 0.4,\n },\n};\n\nexport interface MigrationCandidate {\n traceId: string;\n age: number;\n score: number;\n size: number;\n tier: StorageTier;\n shouldMigrate: boolean;\n compressionLevel: 'none' | 'light' | 'medium' | 'heavy';\n}\n\n/**\n * Remote storage manager for infinite trace retention\n */\nexport class RemoteStorageManager {\n private storageClient?: S3Client | Storage;\n private config: RemoteStorageConfig;\n private localDb: Database.Database;\n private migrationInProgress = false;\n\n constructor(\n localDb: Database.Database,\n config?: Partial<RemoteStorageConfig>\n ) {\n this.localDb = localDb;\n this.config = { ...DEFAULT_REMOTE_CONFIG, ...config };\n\n this.initializeStorageClient();\n this.initializeSchema();\n }\n\n /**\n * Initialize storage client based on provider\n */\n private initializeStorageClient(): void {\n switch (this.config.provider) {\n case 'gcs':\n if (this.config.gcs) {\n this.storageClient = new Storage({\n projectId: this.config.gcs.projectId,\n keyFilename: this.config.gcs.keyFilename,\n });\n }\n break;\n\n case 's3':\n case 'railway':\n if (this.config.s3?.accessKeyId && this.config.s3?.secretAccessKey) {\n this.storageClient = new S3Client({\n region: this.config.s3.region,\n credentials: {\n accessKeyId: this.config.s3.accessKeyId,\n secretAccessKey: this.config.s3.secretAccessKey,\n },\n endpoint: this.config.s3.endpoint, // Railway buckets endpoint\n });\n }\n break;\n }\n }\n\n /**\n * Initialize migration tracking schema\n */\n private initializeSchema(): void {\n this.localDb.exec(`\n CREATE TABLE IF NOT EXISTS remote_migrations (\n trace_id TEXT PRIMARY KEY,\n migrated_at INTEGER NOT NULL,\n storage_tier TEXT NOT NULL,\n s3_key TEXT,\n timeseries_id TEXT,\n compression_level TEXT,\n original_size INTEGER,\n compressed_size INTEGER,\n retrieval_count INTEGER DEFAULT 0,\n last_retrieved INTEGER,\n FOREIGN KEY (trace_id) REFERENCES traces(id) ON DELETE CASCADE\n )\n `);\n\n this.localDb.exec(`\n CREATE INDEX IF NOT EXISTS idx_migrations_tier ON remote_migrations(storage_tier);\n CREATE INDEX IF NOT EXISTS idx_migrations_migrated ON remote_migrations(migrated_at);\n `);\n }\n\n /**\n * Identify traces for migration based on age and importance\n */\n async identifyMigrationCandidates(): Promise<MigrationCandidate[]> {\n const now = Date.now();\n\n // Query all traces with their metadata\n const traces = this.localDb\n .prepare(\n `\n SELECT \n t.id,\n t.score,\n t.start_time,\n LENGTH(t.compressed_data) + \n COALESCE((SELECT SUM(LENGTH(tc.arguments) + LENGTH(tc.result)) \n FROM tool_calls tc WHERE tc.trace_id = t.id), 0) as size,\n rm.trace_id as already_migrated\n FROM traces t\n LEFT JOIN remote_migrations rm ON t.id = rm.trace_id\n WHERE rm.trace_id IS NULL -- Not already migrated\n ORDER BY t.start_time ASC\n `\n )\n .all() as any[];\n\n const candidates: MigrationCandidate[] = [];\n\n for (const trace of traces) {\n const ageHours = (now - trace.start_time) / (1000 * 60 * 60);\n const candidate = this.evaluateTrace(\n trace.id,\n ageHours,\n trace.score,\n trace.size || 0\n );\n\n candidates.push(candidate);\n }\n\n return candidates;\n }\n\n /**\n * Evaluate a trace for migration based on GCS storage classes\n */\n private evaluateTrace(\n traceId: string,\n ageHours: number,\n score: number,\n size: number\n ): MigrationCandidate {\n let tier = StorageTier.HOT;\n let shouldMigrate = false;\n let compressionLevel: 'none' | 'light' | 'medium' | 'heavy' = 'none';\n\n // Determine storage tier based on age and GCS storage classes\n if (ageHours > this.config.migration.archiveAgeHours) {\n // GCS Archive: $0.0012/GB - accessed < once per year\n tier = StorageTier.ARCHIVE;\n shouldMigrate = true;\n compressionLevel = 'heavy';\n } else if (ageHours > this.config.migration.coldlineAgeHours) {\n // GCS Coldline: $0.004/GB - accessed < once per quarter\n tier = StorageTier.COLDLINE;\n shouldMigrate = true;\n compressionLevel = 'heavy';\n } else if (ageHours > this.config.migration.nearlineAgeHours) {\n // GCS Nearline: $0.01/GB - accessed < once per month\n tier = StorageTier.NEARLINE;\n shouldMigrate = true;\n compressionLevel = 'medium';\n } else if (ageHours > this.config.migration.hotAgeHours) {\n // Still hot but consider migration if low importance\n tier = StorageTier.HOT;\n if (score < this.config.migration.scoreThreshold) {\n shouldMigrate = true;\n compressionLevel = 'light';\n }\n }\n\n // Force migration for size pressure\n const localSizeLimit = 2 * 1024 * 1024 * 1024; // 2GB\n const currentLocalSize = this.getLocalStorageSize();\n\n if (currentLocalSize > localSizeLimit * 0.75) {\n // Start migrating when 75% full\n shouldMigrate = true;\n if (compressionLevel === 'none') {\n compressionLevel = 'light';\n }\n }\n\n return {\n traceId,\n age: ageHours,\n score,\n size,\n tier,\n shouldMigrate,\n compressionLevel,\n };\n }\n\n /**\n * Migrate traces to remote storage\n */\n async migrateTraces(\n candidates: MigrationCandidate[],\n dryRun: boolean = false\n ): Promise<{\n migrated: number;\n failed: number;\n totalSize: number;\n errors: string[];\n }> {\n if (this.migrationInProgress) {\n return {\n migrated: 0,\n failed: 0,\n totalSize: 0,\n errors: ['Migration already in progress'],\n };\n }\n\n this.migrationInProgress = true;\n const results = {\n migrated: 0,\n failed: 0,\n totalSize: 0,\n errors: [] as string[],\n };\n\n try {\n // Process in batches\n const toMigrate = candidates.filter((c: any) => c.shouldMigrate);\n const batches = this.createBatches(\n toMigrate,\n this.config.migration.batchSize\n );\n\n for (const batch of batches) {\n if (dryRun) {\n logger.info('Dry run - would migrate batch', {\n count: batch.length,\n totalSize: batch.reduce((sum, c) => sum + c.size, 0),\n });\n results.migrated += batch.length;\n continue;\n }\n\n const batchResults = await this.migrateBatch(batch);\n results.migrated += batchResults.success;\n results.failed += batchResults.failed;\n results.totalSize += batchResults.totalSize;\n results.errors.push(...batchResults.errors);\n\n // Small delay between batches\n await new Promise((resolve) => setTimeout(resolve, 100));\n }\n } finally {\n this.migrationInProgress = false;\n }\n\n logger.info('Migration completed', results);\n return results;\n }\n\n /**\n * Migrate a batch of traces\n */\n private async migrateBatch(batch: MigrationCandidate[]): Promise<{\n success: number;\n failed: number;\n totalSize: number;\n errors: string[];\n }> {\n const results = {\n success: 0,\n failed: 0,\n totalSize: 0,\n errors: [] as string[],\n };\n\n for (const candidate of batch) {\n try {\n // Get full trace data\n const trace = this.getTraceData(candidate.traceId);\n if (!trace) {\n throw new Error(`Trace ${candidate.traceId} not found`);\n }\n\n // Compress based on level\n const compressed = this.compressTrace(\n trace,\n candidate.compressionLevel\n );\n\n // Upload to S3\n if (this.s3Client) {\n const s3Key = this.generateS3Key(candidate);\n await this.uploadToS3(s3Key, compressed);\n\n // Record migration\n this.recordMigration(candidate, s3Key, trace, compressed);\n } else {\n // Local simulation for testing\n this.recordMigration(candidate, 'simulated', trace, compressed);\n }\n\n // Optionally remove from local after successful migration\n if (\n candidate.tier === StorageTier.COLD ||\n candidate.tier === StorageTier.ARCHIVE\n ) {\n this.removeLocalTrace(candidate.traceId);\n }\n\n results.success++;\n results.totalSize += candidate.size;\n } catch (error: unknown) {\n results.failed++;\n results.errors.push(`Failed to migrate ${candidate.traceId}: ${error}`);\n logger.error('Migration failed for trace', {\n traceId: candidate.traceId,\n error,\n });\n }\n }\n\n return results;\n }\n\n /**\n * Get full trace data for migration\n */\n private getTraceData(traceId: string): any {\n const traceRow = this.localDb\n .prepare('SELECT * FROM traces WHERE id = ?')\n .get(traceId);\n\n if (!traceRow) return null;\n\n const toolCalls = this.localDb\n .prepare(\n 'SELECT * FROM tool_calls WHERE trace_id = ? ORDER BY sequence_number'\n )\n .all(traceId);\n\n return {\n trace: traceRow,\n toolCalls,\n };\n }\n\n /**\n * Compress trace based on compression level\n */\n private compressTrace(\n data: any,\n level: 'none' | 'light' | 'medium' | 'heavy'\n ): Buffer {\n const jsonData = JSON.stringify(data);\n\n // Apply different compression based on level\n switch (level) {\n case 'none':\n return Buffer.from(jsonData);\n\n case 'light':\n // Remove formatting, keep all data\n return Buffer.from(JSON.stringify(JSON.parse(jsonData)));\n\n case 'medium':\n // Remove null fields and compress\n const cleaned = JSON.parse(jsonData, (key, value) =>\n value === null || value === undefined ? undefined : value\n );\n return Buffer.from(JSON.stringify(cleaned));\n\n case 'heavy':\n // Remove tool results and arguments, keep only essential\n const minimal = {\n id: data.trace.id,\n type: data.trace.type,\n score: data.trace.score,\n summary: data.trace.summary,\n timestamps: {\n start: data.trace.start_time,\n end: data.trace.end_time,\n },\n toolCount: data.toolCalls.length,\n toolTypes: [...new Set(data.toolCalls.map((t: any) => t.tool))],\n };\n return Buffer.from(JSON.stringify(minimal));\n\n default:\n return Buffer.from(jsonData);\n }\n }\n\n /**\n * Generate S3 key for trace\n */\n private generateS3Key(candidate: MigrationCandidate): string {\n const date = new Date(Date.now() - candidate.age * 60 * 60 * 1000);\n const year = date.getFullYear();\n const month = String(date.getMonth() + 1).padStart(2, '0');\n const day = String(date.getDate()).padStart(2, '0');\n\n return `traces/${year}/${month}/${day}/${candidate.tier}/${candidate.traceId}.json`;\n }\n\n /**\n * Upload to S3\n */\n private async uploadToS3(key: string, data: Buffer): Promise<void> {\n if (!this.s3Client) {\n throw new Error('S3 client not configured');\n }\n\n const command = new PutObjectCommand({\n Bucket: this.config.s3.bucket,\n Key: key,\n Body: data,\n ContentType: 'application/json',\n Metadata: {\n 'trace-version': '1.0',\n compression: 'true',\n },\n });\n\n await this.s3Client.send(command);\n }\n\n /**\n * Record migration in local database\n */\n private recordMigration(\n candidate: MigrationCandidate,\n s3Key: string,\n originalData: any,\n compressedData: Buffer\n ): void {\n const stmt = this.localDb.prepare(`\n INSERT INTO remote_migrations (\n trace_id, migrated_at, storage_tier, s3_key,\n compression_level, original_size, compressed_size\n ) VALUES (?, ?, ?, ?, ?, ?, ?)\n `);\n\n stmt.run(\n candidate.traceId,\n Date.now(),\n candidate.tier,\n s3Key,\n candidate.compressionLevel,\n JSON.stringify(originalData).length,\n compressedData.length\n );\n }\n\n /**\n * Remove local trace after migration\n */\n private removeLocalTrace(traceId: string): void {\n this.localDb\n .prepare('DELETE FROM tool_calls WHERE trace_id = ?')\n .run(traceId);\n this.localDb.prepare('DELETE FROM traces WHERE id = ?').run(traceId);\n }\n\n /**\n * Get current local storage size\n */\n private getLocalStorageSize(): number {\n const result = this.localDb\n .prepare(\n `\n SELECT \n SUM(LENGTH(compressed_data)) +\n COALESCE((SELECT SUM(LENGTH(arguments) + LENGTH(result)) \n FROM tool_calls), 0) as total_size\n FROM traces\n `\n )\n .get() as any;\n\n return result?.total_size || 0;\n }\n\n /**\n * Create batches from candidates\n */\n private createBatches<T>(items: T[], batchSize: number): T[][] {\n const batches: T[][] = [];\n for (let i = 0; i < items.length; i += batchSize) {\n batches.push(items.slice(i, i + batchSize));\n }\n return batches;\n }\n\n /**\n * Retrieve trace from remote storage\n */\n async retrieveTrace(traceId: string): Promise<any> {\n const migration = this.localDb\n .prepare(\n `\n SELECT * FROM remote_migrations WHERE trace_id = ?\n `\n )\n .get(traceId) as any;\n\n if (!migration) {\n throw new Error(`Trace ${traceId} not found in remote storage`);\n }\n\n // Update retrieval count\n this.localDb\n .prepare(\n `\n UPDATE remote_migrations \n SET retrieval_count = retrieval_count + 1, last_retrieved = ?\n WHERE trace_id = ?\n `\n )\n .run(Date.now(), traceId);\n\n if (!this.s3Client) {\n throw new Error('S3 client not configured');\n }\n\n // Retrieve from S3\n const command = new GetObjectCommand({\n Bucket: this.config.s3.bucket,\n Key: migration.s3_key,\n });\n\n const response = await this.s3Client.send(command);\n const data = await response.Body?.transformToString();\n\n if (!data) {\n throw new Error('No data retrieved from S3');\n }\n\n return JSON.parse(data);\n }\n\n /**\n * Get migration statistics\n */\n getMigrationStats(): any {\n const stats = this.localDb\n .prepare(\n `\n SELECT \n storage_tier,\n COUNT(*) as count,\n SUM(original_size) as original_size,\n SUM(compressed_size) as compressed_size,\n AVG(retrieval_count) as avg_retrievals\n FROM remote_migrations\n GROUP BY storage_tier\n `\n )\n .all();\n\n const total = this.localDb\n .prepare(\n `\n SELECT \n COUNT(*) as total_migrated,\n SUM(original_size) as total_original,\n SUM(compressed_size) as total_compressed\n FROM remote_migrations\n `\n )\n .get();\n\n return {\n byTier: stats,\n total,\n compressionRatio: total\n ? (\n 1 -\n (total as any).total_compressed / (total as any).total_original\n ).toFixed(2)\n : 0,\n localSize: this.getLocalStorageSize(),\n };\n }\n}\n"],
5
+ "mappings": "AAKA;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,OAEK;AACP,SAAS,eAAe;AACxB,SAAS,cAAc;AAIvB,SAAS,OAAO,KAAa,cAA+B;AAC1D,QAAM,QAAQ,QAAQ,IAAI,GAAG;AAC7B,MAAI,UAAU,QAAW;AACvB,QAAI,iBAAiB,OAAW,QAAO;AACvC,UAAM,IAAI,MAAM,wBAAwB,GAAG,cAAc;AAAA,EAC3D;AACA,SAAO;AACT;AAEA,SAAS,eAAe,KAAiC;AACvD,SAAO,QAAQ,IAAI,GAAG;AACxB;AAEO,IAAK,cAAL,kBAAKA,iBAAL;AACL,EAAAA,aAAA,SAAM;AACN,EAAAA,aAAA,cAAW;AACX,EAAAA,aAAA,cAAW;AACX,EAAAA,aAAA,aAAU;AAJA,SAAAA;AAAA,GAAA;AAuCL,MAAM,wBAA6C;AAAA,EACxD,UAAU;AAAA;AAAA,EACV,KAAK;AAAA,IACH,YAAY;AAAA,IACZ,WAAW,QAAQ,IAAI,gBAAgB,KAAK;AAAA,EAC9C;AAAA,EACA,YAAY;AAAA,IACV,MAAM;AAAA;AAAA,IACN,MAAM;AAAA,IACN,MAAM;AAAA,IACN,UAAU;AAAA,EACZ;AAAA,EACA,WAAW;AAAA,IACT,WAAW;AAAA,IACX,aAAa;AAAA;AAAA,IACb,kBAAkB;AAAA;AAAA,IAClB,kBAAkB;AAAA;AAAA,IAClB,iBAAiB;AAAA;AAAA,IACjB,gBAAgB;AAAA,EAClB;AACF;AAeO,MAAM,qBAAqB;AAAA,EACxB;AAAA,EACA;AAAA,EACA;AAAA,EACA,sBAAsB;AAAA,EAE9B,YACE,SACA,QACA;AACA,SAAK,UAAU;AACf,SAAK,SAAS,EAAE,GAAG,uBAAuB,GAAG,OAAO;AAEpD,SAAK,wBAAwB;AAC7B,SAAK,iBAAiB;AAAA,EACxB;AAAA;AAAA;AAAA;AAAA,EAKQ,0BAAgC;AACtC,YAAQ,KAAK,OAAO,UAAU;AAAA,MAC5B,KAAK;AACH,YAAI,KAAK,OAAO,KAAK;AACnB,eAAK,gBAAgB,IAAI,QAAQ;AAAA,YAC/B,WAAW,KAAK,OAAO,IAAI;AAAA,YAC3B,aAAa,KAAK,OAAO,IAAI;AAAA,UAC/B,CAAC;AAAA,QACH;AACA;AAAA,MAEF,KAAK;AAAA,MACL,KAAK;AACH,YAAI,KAAK,OAAO,IAAI,eAAe,KAAK,OAAO,IAAI,iBAAiB;AAClE,eAAK,gBAAgB,IAAI,SAAS;AAAA,YAChC,QAAQ,KAAK,OAAO,GAAG;AAAA,YACvB,aAAa;AAAA,cACX,aAAa,KAAK,OAAO,GAAG;AAAA,cAC5B,iBAAiB,KAAK,OAAO,GAAG;AAAA,YAClC;AAAA,YACA,UAAU,KAAK,OAAO,GAAG;AAAA;AAAA,UAC3B,CAAC;AAAA,QACH;AACA;AAAA,IACJ;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,mBAAyB;AAC/B,SAAK,QAAQ,KAAK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,KAcjB;AAED,SAAK,QAAQ,KAAK;AAAA;AAAA;AAAA,KAGjB;AAAA,EACH;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,8BAA6D;AACjE,UAAM,MAAM,KAAK,IAAI;AAGrB,UAAM,SAAS,KAAK,QACjB;AAAA,MACC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IAcF,EACC,IAAI;AAEP,UAAM,aAAmC,CAAC;AAE1C,eAAW,SAAS,QAAQ;AAC1B,YAAM,YAAY,MAAM,MAAM,eAAe,MAAO,KAAK;AACzD,YAAM,YAAY,KAAK;AAAA,QACrB,MAAM;AAAA,QACN;AAAA,QACA,MAAM;AAAA,QACN,MAAM,QAAQ;AAAA,MAChB;AAEA,iBAAW,KAAK,SAAS;AAAA,IAC3B;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,cACN,SACA,UACA,OACA,MACoB;AACpB,QAAI,OAAO;AACX,QAAI,gBAAgB;AACpB,QAAI,mBAA0D;AAG9D,QAAI,WAAW,KAAK,OAAO,UAAU,iBAAiB;AAEpD,aAAO;AACP,sBAAgB;AAChB,yBAAmB;AAAA,IACrB,WAAW,WAAW,KAAK,OAAO,UAAU,kBAAkB;AAE5D,aAAO;AACP,sBAAgB;AAChB,yBAAmB;AAAA,IACrB,WAAW,WAAW,KAAK,OAAO,UAAU,kBAAkB;AAE5D,aAAO;AACP,sBAAgB;AAChB,yBAAmB;AAAA,IACrB,WAAW,WAAW,KAAK,OAAO,UAAU,aAAa;AAEvD,aAAO;AACP,UAAI,QAAQ,KAAK,OAAO,UAAU,gBAAgB;AAChD,wBAAgB;AAChB,2BAAmB;AAAA,MACrB;AAAA,IACF;AAGA,UAAM,iBAAiB,IAAI,OAAO,OAAO;AACzC,UAAM,mBAAmB,KAAK,oBAAoB;AAElD,QAAI,mBAAmB,iBAAiB,MAAM;AAE5C,sBAAgB;AAChB,UAAI,qBAAqB,QAAQ;AAC/B,2BAAmB;AAAA,MACrB;AAAA,IACF;AAEA,WAAO;AAAA,MACL;AAAA,MACA,KAAK;AAAA,MACL;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,cACJ,YACA,SAAkB,OAMjB;AACD,QAAI,KAAK,qBAAqB;AAC5B,aAAO;AAAA,QACL,UAAU;AAAA,QACV,QAAQ;AAAA,QACR,WAAW;AAAA,QACX,QAAQ,CAAC,+BAA+B;AAAA,MAC1C;AAAA,IACF;AAEA,SAAK,sBAAsB;AAC3B,UAAM,UAAU;AAAA,MACd,UAAU;AAAA,MACV,QAAQ;AAAA,MACR,WAAW;AAAA,MACX,QAAQ,CAAC;AAAA,IACX;AAEA,QAAI;AAEF,YAAM,YAAY,WAAW,OAAO,CAAC,MAAW,EAAE,aAAa;AAC/D,YAAM,UAAU,KAAK;AAAA,QACnB;AAAA,QACA,KAAK,OAAO,UAAU;AAAA,MACxB;AAEA,iBAAW,SAAS,SAAS;AAC3B,YAAI,QAAQ;AACV,iBAAO,KAAK,iCAAiC;AAAA,YAC3C,OAAO,MAAM;AAAA,YACb,WAAW,MAAM,OAAO,CAAC,KAAK,MAAM,MAAM,EAAE,MAAM,CAAC;AAAA,UACrD,CAAC;AACD,kBAAQ,YAAY,MAAM;AAC1B;AAAA,QACF;AAEA,cAAM,eAAe,MAAM,KAAK,aAAa,KAAK;AAClD,gBAAQ,YAAY,aAAa;AACjC,gBAAQ,UAAU,aAAa;AAC/B,gBAAQ,aAAa,aAAa;AAClC,gBAAQ,OAAO,KAAK,GAAG,aAAa,MAAM;AAG1C,cAAM,IAAI,QAAQ,CAAC,YAAY,WAAW,SAAS,GAAG,CAAC;AAAA,MACzD;AAAA,IACF,UAAE;AACA,WAAK,sBAAsB;AAAA,IAC7B;AAEA,WAAO,KAAK,uBAAuB,OAAO;AAC1C,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,aAAa,OAKxB;AACD,UAAM,UAAU;AAAA,MACd,SAAS;AAAA,MACT,QAAQ;AAAA,MACR,WAAW;AAAA,MACX,QAAQ,CAAC;AAAA,IACX;AAEA,eAAW,aAAa,OAAO;AAC7B,UAAI;AAEF,cAAM,QAAQ,KAAK,aAAa,UAAU,OAAO;AACjD,YAAI,CAAC,OAAO;AACV,gBAAM,IAAI,MAAM,SAAS,UAAU,OAAO,YAAY;AAAA,QACxD;AAGA,cAAM,aAAa,KAAK;AAAA,UACtB;AAAA,UACA,UAAU;AAAA,QACZ;AAGA,YAAI,KAAK,UAAU;AACjB,gBAAM,QAAQ,KAAK,cAAc,SAAS;AAC1C,gBAAM,KAAK,WAAW,OAAO,UAAU;AAGvC,eAAK,gBAAgB,WAAW,OAAO,OAAO,UAAU;AAAA,QAC1D,OAAO;AAEL,eAAK,gBAAgB,WAAW,aAAa,OAAO,UAAU;AAAA,QAChE;AAGA,YACE,UAAU,SAAS,YAAY,QAC/B,UAAU,SAAS,yBACnB;AACA,eAAK,iBAAiB,UAAU,OAAO;AAAA,QACzC;AAEA,gBAAQ;AACR,gBAAQ,aAAa,UAAU;AAAA,MACjC,SAAS,OAAgB;AACvB,gBAAQ;AACR,gBAAQ,OAAO,KAAK,qBAAqB,UAAU,OAAO,KAAK,KAAK,EAAE;AACtE,eAAO,MAAM,8BAA8B;AAAA,UACzC,SAAS,UAAU;AAAA,UACnB;AAAA,QACF,CAAC;AAAA,MACH;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,aAAa,SAAsB;AACzC,UAAM,WAAW,KAAK,QACnB,QAAQ,mCAAmC,EAC3C,IAAI,OAAO;AAEd,QAAI,CAAC,SAAU,QAAO;AAEtB,UAAM,YAAY,KAAK,QACpB;AAAA,MACC;AAAA,IACF,EACC,IAAI,OAAO;AAEd,WAAO;AAAA,MACL,OAAO;AAAA,MACP;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,cACN,MACA,OACQ;AACR,UAAM,WAAW,KAAK,UAAU,IAAI;AAGpC,YAAQ,OAAO;AAAA,MACb,KAAK;AACH,eAAO,OAAO,KAAK,QAAQ;AAAA,MAE7B,KAAK;AAEH,eAAO,OAAO,KAAK,KAAK,UAAU,KAAK,MAAM,QAAQ,CAAC,CAAC;AAAA,MAEzD,KAAK;AAEH,cAAM,UAAU,KAAK;AAAA,UAAM;AAAA,UAAU,CAAC,KAAK,UACzC,UAAU,QAAQ,UAAU,SAAY,SAAY;AAAA,QACtD;AACA,eAAO,OAAO,KAAK,KAAK,UAAU,OAAO,CAAC;AAAA,MAE5C,KAAK;AAEH,cAAM,UAAU;AAAA,UACd,IAAI,KAAK,MAAM;AAAA,UACf,MAAM,KAAK,MAAM;AAAA,UACjB,OAAO,KAAK,MAAM;AAAA,UAClB,SAAS,KAAK,MAAM;AAAA,UACpB,YAAY;AAAA,YACV,OAAO,KAAK,MAAM;AAAA,YAClB,KAAK,KAAK,MAAM;AAAA,UAClB;AAAA,UACA,WAAW,KAAK,UAAU;AAAA,UAC1B,WAAW,CAAC,GAAG,IAAI,IAAI,KAAK,UAAU,IAAI,CAAC,MAAW,EAAE,IAAI,CAAC,CAAC;AAAA,QAChE;AACA,eAAO,OAAO,KAAK,KAAK,UAAU,OAAO,CAAC;AAAA,MAE5C;AACE,eAAO,OAAO,KAAK,QAAQ;AAAA,IAC/B;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,cAAc,WAAuC;AAC3D,UAAM,OAAO,IAAI,KAAK,KAAK,IAAI,IAAI,UAAU,MAAM,KAAK,KAAK,GAAI;AACjE,UAAM,OAAO,KAAK,YAAY;AAC9B,UAAM,QAAQ,OAAO,KAAK,SAAS,IAAI,CAAC,EAAE,SAAS,GAAG,GAAG;AACzD,UAAM,MAAM,OAAO,KAAK,QAAQ,CAAC,EAAE,SAAS,GAAG,GAAG;AAElD,WAAO,UAAU,IAAI,IAAI,KAAK,IAAI,GAAG,IAAI,UAAU,IAAI,IAAI,UAAU,OAAO;AAAA,EAC9E;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,WAAW,KAAa,MAA6B;AACjE,QAAI,CAAC,KAAK,UAAU;AAClB,YAAM,IAAI,MAAM,0BAA0B;AAAA,IAC5C;AAEA,UAAM,UAAU,IAAI,iBAAiB;AAAA,MACnC,QAAQ,KAAK,OAAO,GAAG;AAAA,MACvB,KAAK;AAAA,MACL,MAAM;AAAA,MACN,aAAa;AAAA,MACb,UAAU;AAAA,QACR,iBAAiB;AAAA,QACjB,aAAa;AAAA,MACf;AAAA,IACF,CAAC;AAED,UAAM,KAAK,SAAS,KAAK,OAAO;AAAA,EAClC;AAAA;AAAA;AAAA;AAAA,EAKQ,gBACN,WACA,OACA,cACA,gBACM;AACN,UAAM,OAAO,KAAK,QAAQ,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA,KAKjC;AAED,SAAK;AAAA,MACH,UAAU;AAAA,MACV,KAAK,IAAI;AAAA,MACT,UAAU;AAAA,MACV;AAAA,MACA,UAAU;AAAA,MACV,KAAK,UAAU,YAAY,EAAE;AAAA,MAC7B,eAAe;AAAA,IACjB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,iBAAiB,SAAuB;AAC9C,SAAK,QACF,QAAQ,2CAA2C,EACnD,IAAI,OAAO;AACd,SAAK,QAAQ,QAAQ,iCAAiC,EAAE,IAAI,OAAO;AAAA,EACrE;AAAA;AAAA;AAAA;AAAA,EAKQ,sBAA8B;AACpC,UAAM,SAAS,KAAK,QACjB;AAAA,MACC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IAOF,EACC,IAAI;AAEP,WAAO,QAAQ,cAAc;AAAA,EAC/B;AAAA;AAAA;AAAA;AAAA,EAKQ,cAAiB,OAAY,WAA0B;AAC7D,UAAM,UAAiB,CAAC;AACxB,aAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK,WAAW;AAChD,cAAQ,KAAK,MAAM,MAAM,GAAG,IAAI,SAAS,CAAC;AAAA,IAC5C;AACA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,cAAc,SAA+B;AACjD,UAAM,YAAY,KAAK,QACpB;AAAA,MACC;AAAA;AAAA;AAAA,IAGF,EACC,IAAI,OAAO;AAEd,QAAI,CAAC,WAAW;AACd,YAAM,IAAI,MAAM,SAAS,OAAO,8BAA8B;AAAA,IAChE;AAGA,SAAK,QACF;AAAA,MACC;AAAA;AAAA;AAAA;AAAA;AAAA,IAKF,EACC,IAAI,KAAK,IAAI,GAAG,OAAO;AAE1B,QAAI,CAAC,KAAK,UAAU;AAClB,YAAM,IAAI,MAAM,0BAA0B;AAAA,IAC5C;AAGA,UAAM,UAAU,IAAI,iBAAiB;AAAA,MACnC,QAAQ,KAAK,OAAO,GAAG;AAAA,MACvB,KAAK,UAAU;AAAA,IACjB,CAAC;AAED,UAAM,WAAW,MAAM,KAAK,SAAS,KAAK,OAAO;AACjD,UAAM,OAAO,MAAM,SAAS,MAAM,kBAAkB;AAEpD,QAAI,CAAC,MAAM;AACT,YAAM,IAAI,MAAM,2BAA2B;AAAA,IAC7C;AAEA,WAAO,KAAK,MAAM,IAAI;AAAA,EACxB;AAAA;AAAA;AAAA;AAAA,EAKA,oBAAyB;AACvB,UAAM,QAAQ,KAAK,QAChB;AAAA,MACC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IAUF,EACC,IAAI;AAEP,UAAM,QAAQ,KAAK,QAChB;AAAA,MACC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IAOF,EACC,IAAI;AAEP,WAAO;AAAA,MACL,QAAQ;AAAA,MACR;AAAA,MACA,kBAAkB,SAEZ,IACC,MAAc,mBAAoB,MAAc,gBACjD,QAAQ,CAAC,IACX;AAAA,MACJ,WAAW,KAAK,oBAAoB;AAAA,IACtC;AAAA,EACF;AACF;",
6
6
  "names": ["StorageTier"]
7
7
  }
@@ -40,13 +40,20 @@ function wrapCommand(command) {
40
40
  console.log(trace.getExecutionSummary());
41
41
  }
42
42
  } catch (error) {
43
- logger.error(`CLI Command Failed: ${commandPath}`, error, context);
43
+ logger.error(
44
+ `CLI Command Failed: ${commandPath}`,
45
+ error,
46
+ context
47
+ );
44
48
  const lastError = trace.getLastError();
45
49
  if (lastError) {
46
50
  console.error("\n\u{1F4CD} Error occurred at:");
47
51
  console.error(` ${lastError.name}`);
48
52
  if (lastError.params) {
49
- console.error(" With params:", JSON.stringify(lastError.params, null, 2));
53
+ console.error(
54
+ " With params:",
55
+ JSON.stringify(lastError.params, null, 2)
56
+ );
50
57
  }
51
58
  console.error(" Error details:", lastError.error);
52
59
  }
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../../src/core/trace/cli-trace-wrapper.ts"],
4
- "sourcesContent": ["/**\n * CLI Command Trace Wrapper\n * Automatically wraps Commander.js commands with comprehensive tracing\n */\n\nimport { Command } from 'commander';\nimport { trace } from './debug-trace.js';\nimport { logger } from '../monitoring/logger.js';\n// Type-safe environment variable access\nfunction getEnv(key: string, defaultValue?: string): string {\n const value = process.env[key];\n if (value === undefined) {\n if (defaultValue !== undefined) return defaultValue;\n throw new Error(`Environment variable ${key} is required`);\n }\n return value;\n}\n\nfunction getOptionalEnv(key: string): string | undefined {\n return process.env[key];\n}\n\n\nexport function wrapCommand(command: Command): Command {\n const originalAction = command.action.bind(command);\n \n command.action(async function(...args: any[]): Promise<void> {\n // Extract command path and options\n const commandPath = getCommandPath(command);\n const options = args[args.length - 1];\n const commandArgs = args.slice(0, -1);\n \n // Build comprehensive context\n const context = {\n command: commandPath,\n args: commandArgs,\n options: typeof options === 'object' ? options : {},\n cwd: process.cwd(),\n env: {\n NODE_ENV: process.env['NODE_ENV'],\n DEBUG_TRACE: process.env['DEBUG_TRACE'],\n LINEAR_API_KEY: process.env['LINEAR_API_KEY'] ? '[SET]' : '[NOT SET]',\n },\n timestamp: new Date().toISOString(),\n };\n \n // Log command start\n logger.info(`CLI Command: ${commandPath}`, context);\n \n // Wrap the actual action with tracing\n await trace.command(commandPath, context, async () => {\n try {\n // Call the original action with wrapped handler\n const result = await originalAction.apply(null, args as any);\n \n // Log successful completion\n logger.info(`CLI Command Completed: ${commandPath}`, {\n duration: trace.exportTraces().find((t: any) => t.name === commandPath)?.duration,\n });\n \n // Show execution summary if verbose\n if (process.env['DEBUG_TRACE'] === 'true') {\n console.log(trace.getExecutionSummary());\n }\n } catch (error: unknown) {\n // Enhanced error logging for CLI commands\n logger.error(`CLI Command Failed: ${commandPath}`, error as Error, context);\n \n // Get the last error trace for debugging\n const lastError = trace.getLastError();\n if (lastError) {\n console.error('\\n\uD83D\uDCCD Error occurred at:');\n console.error(` ${lastError.name}`);\n if (lastError.params) {\n console.error(' With params:', JSON.stringify(lastError.params, null, 2));\n }\n console.error(' Error details:', lastError.error);\n }\n \n // Re-throw to maintain original error handling\n throw error;\n }\n });\n });\n \n // Recursively wrap subcommands\n command.commands.forEach(subcommand => {\n wrapCommand(subcommand);\n });\n \n return command;\n}\n\nfunction getCommandPath(command: Command): string {\n const parts: string[] = [];\n let current: Command | null = command;\n \n while (current) {\n if (current.name()) {\n parts.unshift(current.name());\n }\n current = current.parent as Command | null;\n }\n \n return parts.join(' ');\n}\n\n/**\n * Wrap the main program with comprehensive tracing\n */\nexport function wrapProgram(program: Command): Command {\n // Add global error handler with tracing\n program.exitOverride((err) => {\n if (err.code === 'commander.help' || err.code === 'commander.version') {\n // Normal help/version display, not an error\n process.exit(0);\n }\n \n // Log the error with full context\n logger.error('CLI Error', err, {\n code: err.code,\n exitCode: err.exitCode,\n command: process.argv.slice(2).join(' '),\n });\n \n // Show trace summary on error\n if (process.env['DEBUG_TRACE'] === 'true') {\n console.error('\\n' + trace.getExecutionSummary());\n }\n \n process.exit(err.exitCode || 1);\n });\n \n // Add pre-action hook for setup\n program.hook('preAction', (thisCommand) => {\n // Initialize trace context for this command\n trace.reset();\n \n // Log command invocation\n const commandPath = getCommandPath(thisCommand);\n logger.debug(`Preparing to execute: ${commandPath}`, {\n args: thisCommand.args,\n opts: thisCommand.opts(),\n });\n });\n \n // Add post-action hook for cleanup\n program.hook('postAction', (thisCommand) => {\n // Log completion\n const commandPath = getCommandPath(thisCommand);\n logger.debug(`Completed execution: ${commandPath}`);\n });\n \n // Wrap all existing commands\n program.commands.forEach(command => {\n wrapCommand(command);\n });\n \n return program;\n}\n\n/**\n * Helper to wrap async functions with step tracing\n */\nexport function traceStep<T>(name: string, fn: () => Promise<T>): Promise<T> {\n return trace.step(name, fn);\n}\n\n/**\n * Helper to wrap database queries\n */\nexport function traceQuery<T>(sql: string, params: any, fn: () => T): T {\n return trace.traceSync('query', sql.substring(0, 100), params, fn);\n}\n\n/**\n * Helper to wrap API calls\n */\nexport function traceAPI<T>(\n method: string,\n url: string,\n body: any,\n fn: () => Promise<T>\n): Promise<T> {\n return trace.api(method, url, body, fn);\n}"],
5
- "mappings": "AAMA,SAAS,aAAa;AACtB,SAAS,cAAc;AAEvB,SAAS,OAAO,KAAa,cAA+B;AAC1D,QAAM,QAAQ,QAAQ,IAAI,GAAG;AAC7B,MAAI,UAAU,QAAW;AACvB,QAAI,iBAAiB,OAAW,QAAO;AACvC,UAAM,IAAI,MAAM,wBAAwB,GAAG,cAAc;AAAA,EAC3D;AACA,SAAO;AACT;AAEA,SAAS,eAAe,KAAiC;AACvD,SAAO,QAAQ,IAAI,GAAG;AACxB;AAGO,SAAS,YAAY,SAA2B;AACrD,QAAM,iBAAiB,QAAQ,OAAO,KAAK,OAAO;AAElD,UAAQ,OAAO,kBAAkB,MAA4B;AAE3D,UAAM,cAAc,eAAe,OAAO;AAC1C,UAAM,UAAU,KAAK,KAAK,SAAS,CAAC;AACpC,UAAM,cAAc,KAAK,MAAM,GAAG,EAAE;AAGpC,UAAM,UAAU;AAAA,MACd,SAAS;AAAA,MACT,MAAM;AAAA,MACN,SAAS,OAAO,YAAY,WAAW,UAAU,CAAC;AAAA,MAClD,KAAK,QAAQ,IAAI;AAAA,MACjB,KAAK;AAAA,QACH,UAAU,QAAQ,IAAI,UAAU;AAAA,QAChC,aAAa,QAAQ,IAAI,aAAa;AAAA,QACtC,gBAAgB,QAAQ,IAAI,gBAAgB,IAAI,UAAU;AAAA,MAC5D;AAAA,MACA,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,IACpC;AAGA,WAAO,KAAK,gBAAgB,WAAW,IAAI,OAAO;AAGlD,UAAM,MAAM,QAAQ,aAAa,SAAS,YAAY;AACpD,UAAI;AAEF,cAAM,SAAS,MAAM,eAAe,MAAM,MAAM,IAAW;AAG3D,eAAO,KAAK,0BAA0B,WAAW,IAAI;AAAA,UACnD,UAAU,MAAM,aAAa,EAAE,KAAK,CAAC,MAAW,EAAE,SAAS,WAAW,GAAG;AAAA,QAC3E,CAAC;AAGD,YAAI,QAAQ,IAAI,aAAa,MAAM,QAAQ;AACzC,kBAAQ,IAAI,MAAM,oBAAoB,CAAC;AAAA,QACzC;AAAA,MACF,SAAS,OAAgB;AAEvB,eAAO,MAAM,uBAAuB,WAAW,IAAI,OAAgB,OAAO;AAG1E,cAAM,YAAY,MAAM,aAAa;AACrC,YAAI,WAAW;AACb,kBAAQ,MAAM,gCAAyB;AACvC,kBAAQ,MAAM,MAAM,UAAU,IAAI,EAAE;AACpC,cAAI,UAAU,QAAQ;AACpB,oBAAQ,MAAM,mBAAmB,KAAK,UAAU,UAAU,QAAQ,MAAM,CAAC,CAAC;AAAA,UAC5E;AACA,kBAAQ,MAAM,qBAAqB,UAAU,KAAK;AAAA,QACpD;AAGA,cAAM;AAAA,MACR;AAAA,IACF,CAAC;AAAA,EACH,CAAC;AAGD,UAAQ,SAAS,QAAQ,gBAAc;AACrC,gBAAY,UAAU;AAAA,EACxB,CAAC;AAED,SAAO;AACT;AAEA,SAAS,eAAe,SAA0B;AAChD,QAAM,QAAkB,CAAC;AACzB,MAAI,UAA0B;AAE9B,SAAO,SAAS;AACd,QAAI,QAAQ,KAAK,GAAG;AAClB,YAAM,QAAQ,QAAQ,KAAK,CAAC;AAAA,IAC9B;AACA,cAAU,QAAQ;AAAA,EACpB;AAEA,SAAO,MAAM,KAAK,GAAG;AACvB;AAKO,SAAS,YAAY,SAA2B;AAErD,UAAQ,aAAa,CAAC,QAAQ;AAC5B,QAAI,IAAI,SAAS,oBAAoB,IAAI,SAAS,qBAAqB;AAErE,cAAQ,KAAK,CAAC;AAAA,IAChB;AAGA,WAAO,MAAM,aAAa,KAAK;AAAA,MAC7B,MAAM,IAAI;AAAA,MACV,UAAU,IAAI;AAAA,MACd,SAAS,QAAQ,KAAK,MAAM,CAAC,EAAE,KAAK,GAAG;AAAA,IACzC,CAAC;AAGD,QAAI,QAAQ,IAAI,aAAa,MAAM,QAAQ;AACzC,cAAQ,MAAM,OAAO,MAAM,oBAAoB,CAAC;AAAA,IAClD;AAEA,YAAQ,KAAK,IAAI,YAAY,CAAC;AAAA,EAChC,CAAC;AAGD,UAAQ,KAAK,aAAa,CAAC,gBAAgB;AAEzC,UAAM,MAAM;AAGZ,UAAM,cAAc,eAAe,WAAW;AAC9C,WAAO,MAAM,yBAAyB,WAAW,IAAI;AAAA,MACnD,MAAM,YAAY;AAAA,MAClB,MAAM,YAAY,KAAK;AAAA,IACzB,CAAC;AAAA,EACH,CAAC;AAGD,UAAQ,KAAK,cAAc,CAAC,gBAAgB;AAE1C,UAAM,cAAc,eAAe,WAAW;AAC9C,WAAO,MAAM,wBAAwB,WAAW,EAAE;AAAA,EACpD,CAAC;AAGD,UAAQ,SAAS,QAAQ,aAAW;AAClC,gBAAY,OAAO;AAAA,EACrB,CAAC;AAED,SAAO;AACT;AAKO,SAAS,UAAa,MAAc,IAAkC;AAC3E,SAAO,MAAM,KAAK,MAAM,EAAE;AAC5B;AAKO,SAAS,WAAc,KAAa,QAAa,IAAgB;AACtE,SAAO,MAAM,UAAU,SAAS,IAAI,UAAU,GAAG,GAAG,GAAG,QAAQ,EAAE;AACnE;AAKO,SAAS,SACd,QACA,KACA,MACA,IACY;AACZ,SAAO,MAAM,IAAI,QAAQ,KAAK,MAAM,EAAE;AACxC;",
4
+ "sourcesContent": ["/**\n * CLI Command Trace Wrapper\n * Automatically wraps Commander.js commands with comprehensive tracing\n */\n\nimport { Command } from 'commander';\nimport { trace } from './debug-trace.js';\nimport { logger } from '../monitoring/logger.js';\n// Type-safe environment variable access\nfunction getEnv(key: string, defaultValue?: string): string {\n const value = process.env[key];\n if (value === undefined) {\n if (defaultValue !== undefined) return defaultValue;\n throw new Error(`Environment variable ${key} is required`);\n }\n return value;\n}\n\nfunction getOptionalEnv(key: string): string | undefined {\n return process.env[key];\n}\n\nexport function wrapCommand(command: Command): Command {\n const originalAction = command.action.bind(command);\n\n command.action(async function (...args: any[]): Promise<void> {\n // Extract command path and options\n const commandPath = getCommandPath(command);\n const options = args[args.length - 1];\n const commandArgs = args.slice(0, -1);\n\n // Build comprehensive context\n const context = {\n command: commandPath,\n args: commandArgs,\n options: typeof options === 'object' ? options : {},\n cwd: process.cwd(),\n env: {\n NODE_ENV: process.env['NODE_ENV'],\n DEBUG_TRACE: process.env['DEBUG_TRACE'],\n LINEAR_API_KEY: process.env['LINEAR_API_KEY'] ? '[SET]' : '[NOT SET]',\n },\n timestamp: new Date().toISOString(),\n };\n\n // Log command start\n logger.info(`CLI Command: ${commandPath}`, context);\n\n // Wrap the actual action with tracing\n await trace.command(commandPath, context, async () => {\n try {\n // Call the original action with wrapped handler\n const result = await originalAction.apply(null, args as any);\n\n // Log successful completion\n logger.info(`CLI Command Completed: ${commandPath}`, {\n duration: trace\n .exportTraces()\n .find((t: any) => t.name === commandPath)?.duration,\n });\n\n // Show execution summary if verbose\n if (process.env['DEBUG_TRACE'] === 'true') {\n console.log(trace.getExecutionSummary());\n }\n } catch (error: unknown) {\n // Enhanced error logging for CLI commands\n logger.error(\n `CLI Command Failed: ${commandPath}`,\n error as Error,\n context\n );\n\n // Get the last error trace for debugging\n const lastError = trace.getLastError();\n if (lastError) {\n console.error('\\n\uD83D\uDCCD Error occurred at:');\n console.error(` ${lastError.name}`);\n if (lastError.params) {\n console.error(\n ' With params:',\n JSON.stringify(lastError.params, null, 2)\n );\n }\n console.error(' Error details:', lastError.error);\n }\n\n // Re-throw to maintain original error handling\n throw error;\n }\n });\n });\n\n // Recursively wrap subcommands\n command.commands.forEach((subcommand) => {\n wrapCommand(subcommand);\n });\n\n return command;\n}\n\nfunction getCommandPath(command: Command): string {\n const parts: string[] = [];\n let current: Command | null = command;\n\n while (current) {\n if (current.name()) {\n parts.unshift(current.name());\n }\n current = current.parent as Command | null;\n }\n\n return parts.join(' ');\n}\n\n/**\n * Wrap the main program with comprehensive tracing\n */\nexport function wrapProgram(program: Command): Command {\n // Add global error handler with tracing\n program.exitOverride((err) => {\n if (err.code === 'commander.help' || err.code === 'commander.version') {\n // Normal help/version display, not an error\n process.exit(0);\n }\n\n // Log the error with full context\n logger.error('CLI Error', err, {\n code: err.code,\n exitCode: err.exitCode,\n command: process.argv.slice(2).join(' '),\n });\n\n // Show trace summary on error\n if (process.env['DEBUG_TRACE'] === 'true') {\n console.error('\\n' + trace.getExecutionSummary());\n }\n\n process.exit(err.exitCode || 1);\n });\n\n // Add pre-action hook for setup\n program.hook('preAction', (thisCommand) => {\n // Initialize trace context for this command\n trace.reset();\n\n // Log command invocation\n const commandPath = getCommandPath(thisCommand);\n logger.debug(`Preparing to execute: ${commandPath}`, {\n args: thisCommand.args,\n opts: thisCommand.opts(),\n });\n });\n\n // Add post-action hook for cleanup\n program.hook('postAction', (thisCommand) => {\n // Log completion\n const commandPath = getCommandPath(thisCommand);\n logger.debug(`Completed execution: ${commandPath}`);\n });\n\n // Wrap all existing commands\n program.commands.forEach((command) => {\n wrapCommand(command);\n });\n\n return program;\n}\n\n/**\n * Helper to wrap async functions with step tracing\n */\nexport function traceStep<T>(name: string, fn: () => Promise<T>): Promise<T> {\n return trace.step(name, fn);\n}\n\n/**\n * Helper to wrap database queries\n */\nexport function traceQuery<T>(sql: string, params: any, fn: () => T): T {\n return trace.traceSync('query', sql.substring(0, 100), params, fn);\n}\n\n/**\n * Helper to wrap API calls\n */\nexport function traceAPI<T>(\n method: string,\n url: string,\n body: any,\n fn: () => Promise<T>\n): Promise<T> {\n return trace.api(method, url, body, fn);\n}\n"],
5
+ "mappings": "AAMA,SAAS,aAAa;AACtB,SAAS,cAAc;AAEvB,SAAS,OAAO,KAAa,cAA+B;AAC1D,QAAM,QAAQ,QAAQ,IAAI,GAAG;AAC7B,MAAI,UAAU,QAAW;AACvB,QAAI,iBAAiB,OAAW,QAAO;AACvC,UAAM,IAAI,MAAM,wBAAwB,GAAG,cAAc;AAAA,EAC3D;AACA,SAAO;AACT;AAEA,SAAS,eAAe,KAAiC;AACvD,SAAO,QAAQ,IAAI,GAAG;AACxB;AAEO,SAAS,YAAY,SAA2B;AACrD,QAAM,iBAAiB,QAAQ,OAAO,KAAK,OAAO;AAElD,UAAQ,OAAO,kBAAmB,MAA4B;AAE5D,UAAM,cAAc,eAAe,OAAO;AAC1C,UAAM,UAAU,KAAK,KAAK,SAAS,CAAC;AACpC,UAAM,cAAc,KAAK,MAAM,GAAG,EAAE;AAGpC,UAAM,UAAU;AAAA,MACd,SAAS;AAAA,MACT,MAAM;AAAA,MACN,SAAS,OAAO,YAAY,WAAW,UAAU,CAAC;AAAA,MAClD,KAAK,QAAQ,IAAI;AAAA,MACjB,KAAK;AAAA,QACH,UAAU,QAAQ,IAAI,UAAU;AAAA,QAChC,aAAa,QAAQ,IAAI,aAAa;AAAA,QACtC,gBAAgB,QAAQ,IAAI,gBAAgB,IAAI,UAAU;AAAA,MAC5D;AAAA,MACA,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,IACpC;AAGA,WAAO,KAAK,gBAAgB,WAAW,IAAI,OAAO;AAGlD,UAAM,MAAM,QAAQ,aAAa,SAAS,YAAY;AACpD,UAAI;AAEF,cAAM,SAAS,MAAM,eAAe,MAAM,MAAM,IAAW;AAG3D,eAAO,KAAK,0BAA0B,WAAW,IAAI;AAAA,UACnD,UAAU,MACP,aAAa,EACb,KAAK,CAAC,MAAW,EAAE,SAAS,WAAW,GAAG;AAAA,QAC/C,CAAC;AAGD,YAAI,QAAQ,IAAI,aAAa,MAAM,QAAQ;AACzC,kBAAQ,IAAI,MAAM,oBAAoB,CAAC;AAAA,QACzC;AAAA,MACF,SAAS,OAAgB;AAEvB,eAAO;AAAA,UACL,uBAAuB,WAAW;AAAA,UAClC;AAAA,UACA;AAAA,QACF;AAGA,cAAM,YAAY,MAAM,aAAa;AACrC,YAAI,WAAW;AACb,kBAAQ,MAAM,gCAAyB;AACvC,kBAAQ,MAAM,MAAM,UAAU,IAAI,EAAE;AACpC,cAAI,UAAU,QAAQ;AACpB,oBAAQ;AAAA,cACN;AAAA,cACA,KAAK,UAAU,UAAU,QAAQ,MAAM,CAAC;AAAA,YAC1C;AAAA,UACF;AACA,kBAAQ,MAAM,qBAAqB,UAAU,KAAK;AAAA,QACpD;AAGA,cAAM;AAAA,MACR;AAAA,IACF,CAAC;AAAA,EACH,CAAC;AAGD,UAAQ,SAAS,QAAQ,CAAC,eAAe;AACvC,gBAAY,UAAU;AAAA,EACxB,CAAC;AAED,SAAO;AACT;AAEA,SAAS,eAAe,SAA0B;AAChD,QAAM,QAAkB,CAAC;AACzB,MAAI,UAA0B;AAE9B,SAAO,SAAS;AACd,QAAI,QAAQ,KAAK,GAAG;AAClB,YAAM,QAAQ,QAAQ,KAAK,CAAC;AAAA,IAC9B;AACA,cAAU,QAAQ;AAAA,EACpB;AAEA,SAAO,MAAM,KAAK,GAAG;AACvB;AAKO,SAAS,YAAY,SAA2B;AAErD,UAAQ,aAAa,CAAC,QAAQ;AAC5B,QAAI,IAAI,SAAS,oBAAoB,IAAI,SAAS,qBAAqB;AAErE,cAAQ,KAAK,CAAC;AAAA,IAChB;AAGA,WAAO,MAAM,aAAa,KAAK;AAAA,MAC7B,MAAM,IAAI;AAAA,MACV,UAAU,IAAI;AAAA,MACd,SAAS,QAAQ,KAAK,MAAM,CAAC,EAAE,KAAK,GAAG;AAAA,IACzC,CAAC;AAGD,QAAI,QAAQ,IAAI,aAAa,MAAM,QAAQ;AACzC,cAAQ,MAAM,OAAO,MAAM,oBAAoB,CAAC;AAAA,IAClD;AAEA,YAAQ,KAAK,IAAI,YAAY,CAAC;AAAA,EAChC,CAAC;AAGD,UAAQ,KAAK,aAAa,CAAC,gBAAgB;AAEzC,UAAM,MAAM;AAGZ,UAAM,cAAc,eAAe,WAAW;AAC9C,WAAO,MAAM,yBAAyB,WAAW,IAAI;AAAA,MACnD,MAAM,YAAY;AAAA,MAClB,MAAM,YAAY,KAAK;AAAA,IACzB,CAAC;AAAA,EACH,CAAC;AAGD,UAAQ,KAAK,cAAc,CAAC,gBAAgB;AAE1C,UAAM,cAAc,eAAe,WAAW;AAC9C,WAAO,MAAM,wBAAwB,WAAW,EAAE;AAAA,EACpD,CAAC;AAGD,UAAQ,SAAS,QAAQ,CAAC,YAAY;AACpC,gBAAY,OAAO;AAAA,EACrB,CAAC;AAED,SAAO;AACT;AAKO,SAAS,UAAa,MAAc,IAAkC;AAC3E,SAAO,MAAM,KAAK,MAAM,EAAE;AAC5B;AAKO,SAAS,WAAc,KAAa,QAAa,IAAgB;AACtE,SAAO,MAAM,UAAU,SAAS,IAAI,UAAU,GAAG,GAAG,GAAG,QAAQ,EAAE;AACnE;AAKO,SAAS,SACd,QACA,KACA,MACA,IACY;AACZ,SAAO,MAAM,IAAI,QAAQ,KAAK,MAAM,EAAE;AACxC;",
6
6
  "names": []
7
7
  }