@stackmemoryai/stackmemory 0.3.6 → 0.3.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (213) hide show
  1. package/dist/agents/verifiers/base-verifier.js.map +2 -2
  2. package/dist/agents/verifiers/formatter-verifier.js.map +2 -2
  3. package/dist/agents/verifiers/llm-judge.js.map +2 -2
  4. package/dist/cli/claude-sm.js +24 -13
  5. package/dist/cli/claude-sm.js.map +2 -2
  6. package/dist/cli/codex-sm.js +24 -13
  7. package/dist/cli/codex-sm.js.map +2 -2
  8. package/dist/cli/commands/agent.js.map +2 -2
  9. package/dist/cli/commands/chromadb.js +217 -32
  10. package/dist/cli/commands/chromadb.js.map +2 -2
  11. package/dist/cli/commands/clear.js +12 -1
  12. package/dist/cli/commands/clear.js.map +2 -2
  13. package/dist/cli/commands/context.js +13 -2
  14. package/dist/cli/commands/context.js.map +2 -2
  15. package/dist/cli/commands/dashboard.js.map +2 -2
  16. package/dist/cli/commands/gc.js +202 -0
  17. package/dist/cli/commands/gc.js.map +7 -0
  18. package/dist/cli/commands/handoff.js +12 -1
  19. package/dist/cli/commands/handoff.js.map +2 -2
  20. package/dist/cli/commands/infinite-storage.js +32 -21
  21. package/dist/cli/commands/infinite-storage.js.map +2 -2
  22. package/dist/cli/commands/linear-create.js +13 -2
  23. package/dist/cli/commands/linear-create.js.map +2 -2
  24. package/dist/cli/commands/linear-list.js +12 -1
  25. package/dist/cli/commands/linear-list.js.map +2 -2
  26. package/dist/cli/commands/linear-migrate.js +12 -1
  27. package/dist/cli/commands/linear-migrate.js.map +2 -2
  28. package/dist/cli/commands/linear-test.js +12 -1
  29. package/dist/cli/commands/linear-test.js.map +2 -2
  30. package/dist/cli/commands/linear-unified.js +262 -0
  31. package/dist/cli/commands/linear-unified.js.map +7 -0
  32. package/dist/cli/commands/linear.js +17 -6
  33. package/dist/cli/commands/linear.js.map +2 -2
  34. package/dist/cli/commands/monitor.js.map +2 -2
  35. package/dist/cli/commands/onboard.js.map +2 -2
  36. package/dist/cli/commands/quality.js.map +2 -2
  37. package/dist/cli/commands/search.js.map +2 -2
  38. package/dist/cli/commands/session.js.map +2 -2
  39. package/dist/cli/commands/skills.js +12 -1
  40. package/dist/cli/commands/skills.js.map +2 -2
  41. package/dist/cli/commands/storage.js +18 -7
  42. package/dist/cli/commands/storage.js.map +2 -2
  43. package/dist/cli/commands/tasks.js.map +2 -2
  44. package/dist/cli/commands/tui.js +13 -2
  45. package/dist/cli/commands/tui.js.map +2 -2
  46. package/dist/cli/commands/webhook.js +14 -3
  47. package/dist/cli/commands/webhook.js.map +2 -2
  48. package/dist/cli/commands/workflow.js +14 -3
  49. package/dist/cli/commands/workflow.js.map +2 -2
  50. package/dist/cli/commands/worktree.js.map +2 -2
  51. package/dist/cli/index.js +18 -5
  52. package/dist/cli/index.js.map +2 -2
  53. package/dist/core/config/config-manager.js.map +2 -2
  54. package/dist/core/context/auto-context.js.map +2 -2
  55. package/dist/core/context/compaction-handler.js.map +2 -2
  56. package/dist/core/context/context-bridge.js.map +2 -2
  57. package/dist/core/context/dual-stack-manager.js.map +2 -2
  58. package/dist/core/context/frame-database.js.map +2 -2
  59. package/dist/core/context/frame-digest.js.map +2 -2
  60. package/dist/core/context/frame-handoff-manager.js.map +2 -2
  61. package/dist/core/context/frame-manager.js +12 -1
  62. package/dist/core/context/frame-manager.js.map +2 -2
  63. package/dist/core/context/frame-stack.js.map +2 -2
  64. package/dist/core/context/incremental-gc.js +279 -0
  65. package/dist/core/context/incremental-gc.js.map +7 -0
  66. package/dist/core/context/permission-manager.js +12 -1
  67. package/dist/core/context/permission-manager.js.map +2 -2
  68. package/dist/core/context/refactored-frame-manager.js.map +2 -2
  69. package/dist/core/context/shared-context-layer.js +12 -1
  70. package/dist/core/context/shared-context-layer.js.map +2 -2
  71. package/dist/core/context/stack-merge-resolver.js.map +2 -2
  72. package/dist/core/context/validation.js.map +2 -2
  73. package/dist/core/database/batch-operations.js.map +2 -2
  74. package/dist/core/database/connection-pool.js.map +2 -2
  75. package/dist/core/database/migration-manager.js.map +2 -2
  76. package/dist/core/database/paradedb-adapter.js.map +2 -2
  77. package/dist/core/database/query-cache.js.map +2 -2
  78. package/dist/core/database/query-router.js.map +2 -2
  79. package/dist/core/database/sqlite-adapter.js.map +2 -2
  80. package/dist/core/digest/enhanced-hybrid-digest.js.map +2 -2
  81. package/dist/core/errors/recovery.js.map +2 -2
  82. package/dist/core/merge/resolution-engine.js.map +2 -2
  83. package/dist/core/monitoring/error-handler.js.map +2 -2
  84. package/dist/core/monitoring/logger.js +14 -3
  85. package/dist/core/monitoring/logger.js.map +2 -2
  86. package/dist/core/monitoring/metrics.js +13 -2
  87. package/dist/core/monitoring/metrics.js.map +2 -2
  88. package/dist/core/monitoring/progress-tracker.js +12 -1
  89. package/dist/core/monitoring/progress-tracker.js.map +2 -2
  90. package/dist/core/monitoring/session-monitor.js.map +2 -2
  91. package/dist/core/performance/context-cache.js.map +2 -2
  92. package/dist/core/performance/lazy-context-loader.js.map +2 -2
  93. package/dist/core/performance/monitor.js.map +2 -2
  94. package/dist/core/performance/optimized-frame-context.js.map +2 -2
  95. package/dist/core/performance/performance-benchmark.js.map +2 -2
  96. package/dist/core/performance/performance-profiler.js +12 -1
  97. package/dist/core/performance/performance-profiler.js.map +2 -2
  98. package/dist/core/performance/streaming-jsonl-parser.js.map +2 -2
  99. package/dist/core/persistence/postgres-adapter.js.map +2 -2
  100. package/dist/core/projects/project-manager.js.map +2 -2
  101. package/dist/core/retrieval/context-retriever.js.map +2 -2
  102. package/dist/core/retrieval/graph-retrieval.js.map +2 -2
  103. package/dist/core/retrieval/llm-context-retrieval.js.map +2 -2
  104. package/dist/core/retrieval/retrieval-benchmarks.js.map +2 -2
  105. package/dist/core/retrieval/summary-generator.js.map +2 -2
  106. package/dist/core/session/clear-survival.js.map +2 -2
  107. package/dist/core/session/handoff-generator.js.map +2 -2
  108. package/dist/core/session/session-manager.js +16 -5
  109. package/dist/core/session/session-manager.js.map +2 -2
  110. package/dist/core/skills/skill-storage.js +13 -2
  111. package/dist/core/skills/skill-storage.js.map +2 -2
  112. package/dist/core/storage/chromadb-adapter.js.map +2 -2
  113. package/dist/core/storage/chromadb-simple.js.map +2 -2
  114. package/dist/core/storage/infinite-storage.js.map +2 -2
  115. package/dist/core/storage/railway-optimized-storage.js +19 -8
  116. package/dist/core/storage/railway-optimized-storage.js.map +2 -2
  117. package/dist/core/storage/remote-storage.js +12 -1
  118. package/dist/core/storage/remote-storage.js.map +2 -2
  119. package/dist/core/trace/cli-trace-wrapper.js +16 -5
  120. package/dist/core/trace/cli-trace-wrapper.js.map +2 -2
  121. package/dist/core/trace/db-trace-wrapper.js.map +2 -2
  122. package/dist/core/trace/debug-trace.js +21 -10
  123. package/dist/core/trace/debug-trace.js.map +2 -2
  124. package/dist/core/trace/index.js +46 -35
  125. package/dist/core/trace/index.js.map +2 -2
  126. package/dist/core/trace/trace-demo.js +12 -1
  127. package/dist/core/trace/trace-demo.js.map +2 -2
  128. package/dist/core/trace/trace-detector.js.map +2 -2
  129. package/dist/core/trace/trace-store.js.map +2 -2
  130. package/dist/core/utils/update-checker.js.map +2 -2
  131. package/dist/core/worktree/worktree-manager.js.map +2 -2
  132. package/dist/features/analytics/api/analytics-api.js.map +2 -2
  133. package/dist/features/analytics/core/analytics-service.js +12 -1
  134. package/dist/features/analytics/core/analytics-service.js.map +2 -2
  135. package/dist/features/analytics/queries/metrics-queries.js.map +2 -2
  136. package/dist/features/tasks/pebbles-task-store.js.map +2 -2
  137. package/dist/features/tui/components/analytics-panel.js.map +2 -2
  138. package/dist/features/tui/components/pr-tracker.js.map +2 -2
  139. package/dist/features/tui/components/session-monitor.js.map +2 -2
  140. package/dist/features/tui/components/subagent-fleet.js.map +2 -2
  141. package/dist/features/tui/components/task-board.js +650 -2
  142. package/dist/features/tui/components/task-board.js.map +2 -2
  143. package/dist/features/tui/index.js +16 -5
  144. package/dist/features/tui/index.js.map +2 -2
  145. package/dist/features/tui/services/data-service.js +25 -14
  146. package/dist/features/tui/services/data-service.js.map +2 -2
  147. package/dist/features/tui/services/linear-task-reader.js.map +2 -2
  148. package/dist/features/tui/services/websocket-client.js +13 -2
  149. package/dist/features/tui/services/websocket-client.js.map +2 -2
  150. package/dist/features/tui/terminal-compat.js +27 -16
  151. package/dist/features/tui/terminal-compat.js.map +2 -2
  152. package/dist/features/web/client/stores/task-store.js.map +2 -2
  153. package/dist/features/web/server/index.js +13 -2
  154. package/dist/features/web/server/index.js.map +2 -2
  155. package/dist/integrations/claude-code/enhanced-pre-clear-hooks.js.map +2 -2
  156. package/dist/integrations/claude-code/lifecycle-hooks.js.map +2 -2
  157. package/dist/integrations/claude-code/post-task-hooks.js.map +2 -2
  158. package/dist/integrations/linear/auth.js +17 -6
  159. package/dist/integrations/linear/auth.js.map +2 -2
  160. package/dist/integrations/linear/auto-sync.js.map +2 -2
  161. package/dist/integrations/linear/client.js.map +2 -2
  162. package/dist/integrations/linear/config.js.map +2 -2
  163. package/dist/integrations/linear/migration.js.map +2 -2
  164. package/dist/integrations/linear/oauth-server.js +13 -2
  165. package/dist/integrations/linear/oauth-server.js.map +2 -2
  166. package/dist/integrations/linear/rest-client.js.map +2 -2
  167. package/dist/integrations/linear/sync-enhanced.js +202 -0
  168. package/dist/integrations/linear/sync-enhanced.js.map +7 -0
  169. package/dist/integrations/linear/sync-manager.js.map +2 -2
  170. package/dist/integrations/linear/sync-service.js +12 -1
  171. package/dist/integrations/linear/sync-service.js.map +2 -2
  172. package/dist/integrations/linear/sync.js +34 -3
  173. package/dist/integrations/linear/sync.js.map +2 -2
  174. package/dist/integrations/linear/unified-sync.js +560 -0
  175. package/dist/integrations/linear/unified-sync.js.map +7 -0
  176. package/dist/integrations/linear/webhook-handler.js +12 -1
  177. package/dist/integrations/linear/webhook-handler.js.map +2 -2
  178. package/dist/integrations/linear/webhook-server.js +14 -3
  179. package/dist/integrations/linear/webhook-server.js.map +2 -2
  180. package/dist/integrations/linear/webhook.js +12 -1
  181. package/dist/integrations/linear/webhook.js.map +2 -2
  182. package/dist/integrations/mcp/handlers/context-handlers.js.map +2 -2
  183. package/dist/integrations/mcp/handlers/linear-handlers.js.map +2 -2
  184. package/dist/integrations/mcp/handlers/skill-handlers.js +13 -2
  185. package/dist/integrations/mcp/handlers/skill-handlers.js.map +2 -2
  186. package/dist/integrations/mcp/handlers/task-handlers.js.map +2 -2
  187. package/dist/integrations/mcp/handlers/trace-handlers.js.map +2 -2
  188. package/dist/integrations/mcp/middleware/tool-scoring.js.map +2 -2
  189. package/dist/integrations/mcp/refactored-server.js +15 -4
  190. package/dist/integrations/mcp/refactored-server.js.map +2 -2
  191. package/dist/integrations/mcp/server.js +12 -1
  192. package/dist/integrations/mcp/server.js.map +2 -2
  193. package/dist/integrations/mcp/tool-definitions.js.map +2 -2
  194. package/dist/integrations/pg-aiguide/embedding-provider.js +13 -2
  195. package/dist/integrations/pg-aiguide/embedding-provider.js.map +2 -2
  196. package/dist/integrations/pg-aiguide/semantic-search.js.map +2 -2
  197. package/dist/mcp/stackmemory-mcp-server.js +12 -1
  198. package/dist/mcp/stackmemory-mcp-server.js.map +2 -2
  199. package/dist/middleware/exponential-rate-limiter.js.map +2 -2
  200. package/dist/servers/production/auth-middleware.js +13 -2
  201. package/dist/servers/production/auth-middleware.js.map +2 -2
  202. package/dist/servers/railway/index.js +22 -11
  203. package/dist/servers/railway/index.js.map +2 -2
  204. package/dist/services/config-service.js.map +2 -2
  205. package/dist/services/context-service.js.map +2 -2
  206. package/dist/skills/claude-skills.js +105 -2
  207. package/dist/skills/claude-skills.js.map +2 -2
  208. package/dist/skills/dashboard-launcher.js.map +2 -2
  209. package/dist/skills/repo-ingestion-skill.js +561 -0
  210. package/dist/skills/repo-ingestion-skill.js.map +7 -0
  211. package/dist/utils/logger.js +12 -1
  212. package/dist/utils/logger.js.map +2 -2
  213. package/package.json +5 -1
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../../src/core/storage/railway-optimized-storage.ts"],
4
- "sourcesContent": ["/**\n * Railway-Optimized 3-Tier Storage System\n * Tier 1: Redis (Hot) - Last 24 hours, instant access\n * Tier 2: Railway Buckets (Warm) - 1-30 days, S3-compatible\n * Tier 3: GCS (Cold) - 30+ days, cost-effective archive\n */\n\nimport { createClient, RedisClientType } from 'redis';\nimport { S3Client, PutObjectCommand, GetObjectCommand, DeleteObjectCommand, HeadObjectCommand } from '@aws-sdk/client-s3';\nimport { Storage } from '@google-cloud/storage';\nimport Database from 'better-sqlite3';\nimport { logger } from '../monitoring/logger.js';\nimport { Trace, CompressedTrace, ToolCall } from '../trace/types.js';\nimport { ConfigManager } from '../config/config-manager.js';\nimport * as zlib from 'zlib';\nimport { promisify } from 'util';\n\nconst gzipAsync = promisify(zlib.gzip);\nconst gunzipAsync = promisify(zlib.gunzip);\n\nexport enum StorageTier {\n HOT = 'hot', // Redis: < 24 hours\n WARM = 'warm', // Railway Buckets: 1-30 days \n COLD = 'cold' // GCS: 30+ days\n}\n\nexport interface RailwayStorageConfig {\n redis: {\n url: string;\n ttlSeconds: number;\n maxMemoryMb: number;\n };\n railwayBuckets: {\n endpoint: string;\n bucket: string;\n accessKeyId: string;\n secretAccessKey: string;\n region: string;\n };\n gcs: {\n bucketName: string;\n projectId: string;\n keyFilename?: string;\n };\n tiers: {\n hotHours: number; // Hours to keep in Redis\n warmDays: number; // Days to keep in Railway Buckets\n compressionScore: number; // Score threshold for early compression\n };\n}\n\nexport const DEFAULT_RAILWAY_CONFIG: RailwayStorageConfig = {\n redis: {\n url: process.env.REDIS_URL || 'redis://localhost:6379',\n ttlSeconds: 86400, // 24 hours\n maxMemoryMb: 100, // 100MB Redis limit\n },\n railwayBuckets: {\n endpoint: process.env.RAILWAY_BUCKET_ENDPOINT || 'https://buckets.railway.app',\n bucket: process.env.RAILWAY_BUCKET_NAME || 'stackmemory-warm',\n accessKeyId: process.env.RAILWAY_BUCKET_ACCESS_KEY || '',\n secretAccessKey: process.env.RAILWAY_BUCKET_SECRET_KEY || '',\n region: 'us-east-1',\n },\n gcs: {\n bucketName: process.env.GCS_BUCKET || 'stackmemory-cold',\n projectId: process.env.GCP_PROJECT_ID || 'stackmemory',\n keyFilename: process.env.GCP_KEY_FILE,\n },\n tiers: {\n hotHours: 24,\n warmDays: 30,\n compressionScore: 0.4,\n }\n};\n\ninterface StorageMetrics {\n tier: StorageTier;\n originalSize: number;\n compressedSize: number;\n compressionRatio: number;\n accessCount: number;\n lastAccessed: number;\n migrationTime?: number;\n}\n\n/**\n * Railway-optimized storage manager with 3-tier architecture\n */\nexport class RailwayOptimizedStorage {\n private redisClient?: RedisClientType;\n private railwayS3?: S3Client;\n private gcsStorage?: Storage;\n private localDb: Database.Database;\n private config: RailwayStorageConfig;\n private configManager: ConfigManager;\n private metricsCache: Map<string, StorageMetrics> = new Map();\n \n private initialized: Promise<void>;\n \n constructor(\n localDb: Database.Database,\n configManager: ConfigManager,\n config?: Partial<RailwayStorageConfig>\n ) {\n this.localDb = localDb;\n this.configManager = configManager;\n this.config = { ...DEFAULT_RAILWAY_CONFIG, ...config };\n \n this.initializeSchema();\n this.initialized = this.initializeClients();\n }\n \n /**\n * Initialize storage clients\n */\n private async initializeClients(): Promise<void> {\n // Initialize Redis\n if (this.config.redis.url) {\n try {\n this.redisClient = createClient({ url: this.config.redis.url });\n \n this.redisClient.on('error', (err) => {\n logger.error('Redis client error', err);\n });\n \n await this.redisClient.connect();\n \n // Configure Redis memory policy\n await this.redisClient.configSet('maxmemory-policy', 'allkeys-lru');\n \n logger.info('Redis connected for hot tier storage');\n } catch (error) {\n logger.warn('Redis connection failed, falling back to SQLite only', error);\n }\n }\n \n // Initialize Railway S3-compatible buckets\n if (this.config.railwayBuckets.accessKeyId) {\n this.railwayS3 = new S3Client({\n endpoint: this.config.railwayBuckets.endpoint,\n region: this.config.railwayBuckets.region,\n credentials: {\n accessKeyId: this.config.railwayBuckets.accessKeyId,\n secretAccessKey: this.config.railwayBuckets.secretAccessKey,\n },\n forcePathStyle: true, // Required for Railway buckets\n });\n \n logger.info('Railway Buckets configured for warm tier');\n }\n \n // Initialize GCS for cold storage\n if (this.config.gcs.projectId) {\n try {\n this.gcsStorage = new Storage({\n projectId: this.config.gcs.projectId,\n keyFilename: this.config.gcs.keyFilename,\n });\n \n logger.info('GCS configured for cold tier storage');\n } catch (error) {\n logger.warn('GCS setup failed, will use Railway buckets only', error);\n }\n }\n }\n \n /**\n * Initialize database schema for tracking\n */\n private initializeSchema(): void {\n this.localDb.exec(`\n CREATE TABLE IF NOT EXISTS storage_tiers (\n trace_id TEXT PRIMARY KEY,\n tier TEXT NOT NULL,\n location TEXT NOT NULL,\n original_size INTEGER,\n compressed_size INTEGER,\n compression_ratio REAL,\n access_count INTEGER DEFAULT 0,\n last_accessed INTEGER,\n created_at INTEGER,\n migrated_at INTEGER,\n score REAL,\n FOREIGN KEY (trace_id) REFERENCES traces(id) ON DELETE CASCADE\n )\n `);\n \n this.localDb.exec(`\n CREATE INDEX IF NOT EXISTS idx_storage_tier ON storage_tiers(tier);\n CREATE INDEX IF NOT EXISTS idx_storage_created ON storage_tiers(created_at);\n CREATE INDEX IF NOT EXISTS idx_storage_accessed ON storage_tiers(last_accessed);\n `);\n }\n \n /**\n * Store a trace in the appropriate tier\n */\n async storeTrace(trace: Trace): Promise<StorageTier> {\n // Ensure clients are initialized\n await this.initialized;\n \n const score = trace.score;\n const age = Date.now() - trace.metadata.startTime;\n const ageHours = age / (1000 * 60 * 60);\n \n // Determine tier based on age and score\n let tier: StorageTier;\n if (ageHours < this.config.tiers.hotHours && score > this.config.tiers.compressionScore) {\n tier = StorageTier.HOT;\n } else if (ageHours < this.config.tiers.warmDays * 24) {\n tier = StorageTier.WARM;\n } else {\n tier = StorageTier.COLD;\n }\n \n // Store in appropriate tier\n switch (tier) {\n case StorageTier.HOT:\n await this.storeInRedis(trace);\n break;\n case StorageTier.WARM:\n await this.storeInRailwayBuckets(trace);\n break;\n case StorageTier.COLD:\n await this.storeInGCS(trace);\n break;\n }\n \n // Track in database\n this.trackStorage(trace.id, tier, trace);\n \n return tier;\n }\n \n /**\n * Store trace in Redis (hot tier)\n */\n private async storeInRedis(trace: Trace): Promise<void> {\n if (!this.redisClient) {\n // Fallback to local SQLite if Redis unavailable\n return;\n }\n \n try {\n const key = `trace:${trace.id}`;\n const data = JSON.stringify(trace);\n \n // Compress if large\n let storedData: string;\n if (data.length > 10000) {\n const compressed = await gzipAsync(data);\n storedData = compressed.toString('base64');\n await this.redisClient.hSet(key, {\n data: storedData,\n compressed: 'true',\n score: trace.score.toString(),\n type: trace.type,\n timestamp: trace.metadata.startTime.toString(),\n });\n } else {\n storedData = data;\n await this.redisClient.hSet(key, {\n data: storedData,\n compressed: 'false',\n score: trace.score.toString(),\n type: trace.type,\n timestamp: trace.metadata.startTime.toString(),\n });\n }\n \n // Set TTL\n await this.redisClient.expire(key, this.config.redis.ttlSeconds);\n \n // Add to sorted set for efficient retrieval\n await this.redisClient.zAdd('traces:by_score', {\n score: trace.score,\n value: trace.id,\n });\n \n await this.redisClient.zAdd('traces:by_time', {\n score: trace.metadata.startTime,\n value: trace.id,\n });\n \n logger.debug('Stored trace in Redis', { \n traceId: trace.id, \n size: data.length,\n compressed: data.length > 10000,\n });\n \n } catch (error) {\n logger.error('Failed to store in Redis', error);\n throw error;\n }\n }\n \n /**\n * Store trace in Railway Buckets (warm tier)\n */\n private async storeInRailwayBuckets(trace: Trace): Promise<void> {\n if (!this.railwayS3) {\n throw new Error('Railway Buckets not configured');\n }\n \n try {\n // Compress trace\n const data = JSON.stringify(trace);\n const compressed = await gzipAsync(data);\n \n // Generate key with date partitioning\n const date = new Date(trace.metadata.startTime);\n const key = `traces/${date.getFullYear()}/${String(date.getMonth() + 1).padStart(2, '0')}/${String(date.getDate()).padStart(2, '0')}/${trace.id}.json.gz`;\n \n // Upload to Railway Bucket\n const command = new PutObjectCommand({\n Bucket: this.config.railwayBuckets.bucket,\n Key: key,\n Body: compressed,\n ContentType: 'application/gzip',\n Metadata: {\n 'trace-id': trace.id,\n 'trace-type': trace.type,\n 'trace-score': trace.score.toString(),\n 'original-size': data.length.toString(),\n 'compressed-size': compressed.length.toString(),\n },\n });\n \n await this.railwayS3.send(command);\n \n // Remove from Redis if exists\n if (this.redisClient) {\n await this.redisClient.del(`trace:${trace.id}`);\n }\n \n logger.info('Stored trace in Railway Buckets', {\n traceId: trace.id,\n key,\n originalSize: data.length,\n compressedSize: compressed.length,\n compressionRatio: (1 - compressed.length / data.length).toFixed(2),\n });\n \n } catch (error) {\n logger.error('Failed to store in Railway Buckets', error);\n throw error;\n }\n }\n \n /**\n * Store trace in GCS (cold tier)\n */\n private async storeInGCS(trace: Trace): Promise<void> {\n if (!this.gcsStorage) {\n // Fallback to Railway Buckets if GCS not available\n return this.storeInRailwayBuckets(trace);\n }\n \n try {\n // Heavy compression for cold storage\n const minimal = this.createMinimalTrace(trace);\n const data = JSON.stringify(minimal);\n const compressed = await gzipAsync(data);\n \n // Generate key with year/month partitioning\n const date = new Date(trace.metadata.startTime);\n const key = `archive/${date.getFullYear()}/${String(date.getMonth() + 1).padStart(2, '0')}/${trace.id}.json.gz`;\n \n // Upload to GCS with Coldline storage class\n const bucket = this.gcsStorage.bucket(this.config.gcs.bucketName);\n const file = bucket.file(key);\n \n await file.save(compressed, {\n metadata: {\n contentType: 'application/gzip',\n metadata: {\n traceId: trace.id,\n traceType: trace.type,\n score: trace.score.toString(),\n originalTools: trace.tools.length.toString(),\n },\n },\n storageClass: 'COLDLINE', // Use Coldline for cost optimization\n });\n \n // Remove from warm tier if exists\n if (this.railwayS3) {\n try {\n const warmKey = this.getWarmTierKey(trace);\n await this.railwayS3.send(new DeleteObjectCommand({\n Bucket: this.config.railwayBuckets.bucket,\n Key: warmKey,\n }));\n } catch (error) {\n // Ignore deletion errors\n }\n }\n \n logger.info('Archived trace to GCS', {\n traceId: trace.id,\n key,\n originalSize: JSON.stringify(trace).length,\n compressedSize: compressed.length,\n });\n \n } catch (error) {\n logger.error('Failed to store in GCS', error);\n throw error;\n }\n }\n \n /**\n * Create minimal trace for cold storage\n */\n private createMinimalTrace(trace: Trace): any {\n // Keep only essential information\n return {\n id: trace.id,\n type: trace.type,\n score: trace.score,\n summary: trace.summary,\n metadata: {\n startTime: trace.metadata.startTime,\n endTime: trace.metadata.endTime,\n filesModified: trace.metadata.filesModified.length,\n errorsCount: trace.metadata.errorsEncountered.length,\n decisionsCount: trace.metadata.decisionsRecorded.length,\n causalChain: trace.metadata.causalChain,\n },\n toolSummary: {\n count: trace.tools.length,\n types: [...new Set(trace.tools.map(t => t.tool))],\n firstTool: trace.tools[0]?.tool,\n lastTool: trace.tools[trace.tools.length - 1]?.tool,\n },\n compressed: trace.compressed,\n };\n }\n \n /**\n * Retrieve a trace from any tier\n */\n async retrieveTrace(traceId: string): Promise<Trace | null> {\n // Ensure clients are initialized\n await this.initialized;\n \n // Check tier location\n const location = this.localDb.prepare(\n 'SELECT tier, location FROM storage_tiers WHERE trace_id = ?'\n ).get(traceId) as any;\n \n if (!location) {\n return null;\n }\n \n // Update access metrics\n this.localDb.prepare(\n 'UPDATE storage_tiers SET access_count = access_count + 1, last_accessed = ? WHERE trace_id = ?'\n ).run(Date.now(), traceId);\n \n // Retrieve based on tier\n switch (location.tier) {\n case StorageTier.HOT:\n return this.retrieveFromRedis(traceId);\n case StorageTier.WARM:\n return this.retrieveFromRailwayBuckets(traceId, location.location);\n case StorageTier.COLD:\n return this.retrieveFromGCS(traceId, location.location);\n default:\n return null;\n }\n }\n \n /**\n * Retrieve from Redis\n */\n private async retrieveFromRedis(traceId: string): Promise<Trace | null> {\n if (!this.redisClient) return null;\n \n try {\n const key = `trace:${traceId}`;\n const data = await this.redisClient.hGetAll(key);\n \n if (!data || !data.data) return null;\n \n let traceData: string;\n if (data.compressed === 'true') {\n const compressed = Buffer.from(data.data, 'base64');\n const decompressed = await gunzipAsync(compressed);\n traceData = decompressed.toString();\n } else {\n traceData = data.data;\n }\n \n return JSON.parse(traceData);\n \n } catch (error) {\n logger.error('Failed to retrieve from Redis', error);\n return null;\n }\n }\n \n /**\n * Retrieve from Railway Buckets\n */\n private async retrieveFromRailwayBuckets(traceId: string, key: string): Promise<Trace | null> {\n if (!this.railwayS3) return null;\n \n try {\n const command = new GetObjectCommand({\n Bucket: this.config.railwayBuckets.bucket,\n Key: key,\n });\n \n const response = await this.railwayS3.send(command);\n const compressed = await response.Body?.transformToByteArray();\n \n if (!compressed) return null;\n \n const decompressed = await gunzipAsync(Buffer.from(compressed));\n return JSON.parse(decompressed.toString());\n \n } catch (error) {\n logger.error('Failed to retrieve from Railway Buckets', error);\n return null;\n }\n }\n \n /**\n * Retrieve from GCS\n */\n private async retrieveFromGCS(traceId: string, key: string): Promise<Trace | null> {\n if (!this.gcsStorage) return null;\n \n try {\n const bucket = this.gcsStorage.bucket(this.config.gcs.bucketName);\n const file = bucket.file(key);\n \n const [compressed] = await file.download();\n const decompressed = await gunzipAsync(compressed);\n \n // Note: Returns minimal trace from cold storage\n return JSON.parse(decompressed.toString());\n \n } catch (error) {\n logger.error('Failed to retrieve from GCS', error);\n return null;\n }\n }\n \n /**\n * Track storage in database\n */\n private trackStorage(traceId: string, tier: StorageTier, trace: Trace): void {\n const originalSize = JSON.stringify(trace).length;\n const compressedSize = Math.floor(originalSize * 0.3); // Estimate\n \n this.localDb.prepare(`\n INSERT OR REPLACE INTO storage_tiers (\n trace_id, tier, location, original_size, compressed_size,\n compression_ratio, access_count, last_accessed, created_at,\n migrated_at, score\n ) VALUES (?, ?, ?, ?, ?, ?, 0, ?, ?, ?, ?)\n `).run(\n traceId,\n tier,\n this.getStorageLocation(trace, tier),\n originalSize,\n compressedSize,\n 1 - compressedSize / originalSize,\n Date.now(),\n trace.metadata.startTime,\n Date.now(),\n trace.score\n );\n }\n \n /**\n * Get storage location key\n */\n private getStorageLocation(trace: Trace, tier: StorageTier): string {\n const date = new Date(trace.metadata.startTime);\n const year = date.getFullYear();\n const month = String(date.getMonth() + 1).padStart(2, '0');\n const day = String(date.getDate()).padStart(2, '0');\n \n switch (tier) {\n case StorageTier.HOT:\n return `redis:trace:${trace.id}`;\n case StorageTier.WARM:\n return `traces/${year}/${month}/${day}/${trace.id}.json.gz`;\n case StorageTier.COLD:\n return `archive/${year}/${month}/${trace.id}.json.gz`;\n }\n }\n \n /**\n * Get warm tier key for a trace\n */\n private getWarmTierKey(trace: Trace): string {\n const date = new Date(trace.metadata.startTime);\n return `traces/${date.getFullYear()}/${String(date.getMonth() + 1).padStart(2, '0')}/${String(date.getDate()).padStart(2, '0')}/${trace.id}.json.gz`;\n }\n \n /**\n * Migrate traces between tiers based on age\n */\n async migrateTiers(): Promise<{\n hotToWarm: number;\n warmToCold: number;\n errors: string[];\n }> {\n const results = {\n hotToWarm: 0,\n warmToCold: 0,\n errors: [] as string[],\n };\n \n const now = Date.now();\n \n // Find traces to migrate\n const candidates = this.localDb.prepare(`\n SELECT trace_id, tier, created_at, score\n FROM storage_tiers\n WHERE tier != 'cold'\n ORDER BY created_at ASC\n `).all() as any[];\n \n for (const candidate of candidates) {\n const ageHours = (now - candidate.created_at) / (1000 * 60 * 60);\n \n try {\n if (candidate.tier === StorageTier.HOT && ageHours > this.config.tiers.hotHours) {\n // Migrate hot \u2192 warm\n const trace = await this.retrieveTrace(candidate.trace_id);\n if (trace) {\n await this.storeInRailwayBuckets(trace);\n this.trackStorage(candidate.trace_id, StorageTier.WARM, trace);\n results.hotToWarm++;\n }\n } else if (candidate.tier === StorageTier.WARM && ageHours > this.config.tiers.warmDays * 24) {\n // Migrate warm \u2192 cold\n const trace = await this.retrieveTrace(candidate.trace_id);\n if (trace) {\n await this.storeInGCS(trace);\n this.trackStorage(candidate.trace_id, StorageTier.COLD, trace);\n results.warmToCold++;\n }\n }\n } catch (error) {\n results.errors.push(`Failed to migrate ${candidate.trace_id}: ${error}`);\n }\n }\n \n logger.info('Tier migration completed', results);\n return results;\n }\n \n /**\n * Get storage statistics\n */\n getStorageStats(): any {\n const tierStats = this.localDb.prepare(`\n SELECT \n tier,\n COUNT(*) as count,\n SUM(original_size) as total_original,\n SUM(compressed_size) as total_compressed,\n AVG(compression_ratio) as avg_compression,\n AVG(access_count) as avg_access\n FROM storage_tiers\n GROUP BY tier\n `).all();\n \n const ageDistribution = this.localDb.prepare(`\n SELECT \n CASE \n WHEN (? - created_at) / 3600000 < 24 THEN '< 24h'\n WHEN (? - created_at) / 86400000 < 7 THEN '1-7d'\n WHEN (? - created_at) / 86400000 < 30 THEN '7-30d'\n ELSE '30d+'\n END as age_group,\n COUNT(*) as count\n FROM storage_tiers\n GROUP BY age_group\n `).all(Date.now(), Date.now(), Date.now());\n \n return {\n byTier: tierStats,\n byAge: ageDistribution,\n totalTraces: tierStats.reduce((sum: number, t: any) => sum + t.count, 0),\n totalSize: tierStats.reduce((sum: number, t: any) => sum + t.total_original, 0),\n compressedSize: tierStats.reduce((sum: number, t: any) => sum + t.total_compressed, 0),\n };\n }\n \n /**\n * Clean up expired data\n */\n async cleanup(): Promise<number> {\n let cleaned = 0;\n \n // Remove old entries from storage_tiers table\n const cutoff = Date.now() - (90 * 24 * 60 * 60 * 1000); // 90 days\n \n const result = this.localDb.prepare(`\n DELETE FROM storage_tiers\n WHERE tier = 'cold' AND created_at < ? AND access_count = 0\n `).run(cutoff);\n \n cleaned = result.changes;\n \n logger.info('Cleanup completed', { removed: cleaned });\n return cleaned;\n }\n}"],
5
- "mappings": "AAOA,SAAS,oBAAqC;AAC9C,SAAS,UAAU,kBAAkB,kBAAkB,2BAA8C;AACrG,SAAS,eAAe;AAExB,SAAS,cAAc;AAGvB,YAAY,UAAU;AACtB,SAAS,iBAAiB;AAE1B,MAAM,YAAY,UAAU,KAAK,IAAI;AACrC,MAAM,cAAc,UAAU,KAAK,MAAM;AAElC,IAAK,cAAL,kBAAKA,iBAAL;AACL,EAAAA,aAAA,SAAM;AACN,EAAAA,aAAA,UAAO;AACP,EAAAA,aAAA,UAAO;AAHG,SAAAA;AAAA,GAAA;AA+BL,MAAM,yBAA+C;AAAA,EAC1D,OAAO;AAAA,IACL,KAAK,QAAQ,IAAI,aAAa;AAAA,IAC9B,YAAY;AAAA;AAAA,IACZ,aAAa;AAAA;AAAA,EACf;AAAA,EACA,gBAAgB;AAAA,IACd,UAAU,QAAQ,IAAI,2BAA2B;AAAA,IACjD,QAAQ,QAAQ,IAAI,uBAAuB;AAAA,IAC3C,aAAa,QAAQ,IAAI,6BAA6B;AAAA,IACtD,iBAAiB,QAAQ,IAAI,6BAA6B;AAAA,IAC1D,QAAQ;AAAA,EACV;AAAA,EACA,KAAK;AAAA,IACH,YAAY,QAAQ,IAAI,cAAc;AAAA,IACtC,WAAW,QAAQ,IAAI,kBAAkB;AAAA,IACzC,aAAa,QAAQ,IAAI;AAAA,EAC3B;AAAA,EACA,OAAO;AAAA,IACL,UAAU;AAAA,IACV,UAAU;AAAA,IACV,kBAAkB;AAAA,EACpB;AACF;AAeO,MAAM,wBAAwB;AAAA,EAC3B;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA,eAA4C,oBAAI,IAAI;AAAA,EAEpD;AAAA,EAER,YACE,SACA,eACA,QACA;AACA,SAAK,UAAU;AACf,SAAK,gBAAgB;AACrB,SAAK,SAAS,EAAE,GAAG,wBAAwB,GAAG,OAAO;AAErD,SAAK,iBAAiB;AACtB,SAAK,cAAc,KAAK,kBAAkB;AAAA,EAC5C;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,oBAAmC;AAE/C,QAAI,KAAK,OAAO,MAAM,KAAK;AACzB,UAAI;AACF,aAAK,cAAc,aAAa,EAAE,KAAK,KAAK,OAAO,MAAM,IAAI,CAAC;AAE9D,aAAK,YAAY,GAAG,SAAS,CAAC,QAAQ;AACpC,iBAAO,MAAM,sBAAsB,GAAG;AAAA,QACxC,CAAC;AAED,cAAM,KAAK,YAAY,QAAQ;AAG/B,cAAM,KAAK,YAAY,UAAU,oBAAoB,aAAa;AAElE,eAAO,KAAK,sCAAsC;AAAA,MACpD,SAAS,OAAO;AACd,eAAO,KAAK,wDAAwD,KAAK;AAAA,MAC3E;AAAA,IACF;AAGA,QAAI,KAAK,OAAO,eAAe,aAAa;AAC1C,WAAK,YAAY,IAAI,SAAS;AAAA,QAC5B,UAAU,KAAK,OAAO,eAAe;AAAA,QACrC,QAAQ,KAAK,OAAO,eAAe;AAAA,QACnC,aAAa;AAAA,UACX,aAAa,KAAK,OAAO,eAAe;AAAA,UACxC,iBAAiB,KAAK,OAAO,eAAe;AAAA,QAC9C;AAAA,QACA,gBAAgB;AAAA;AAAA,MAClB,CAAC;AAED,aAAO,KAAK,0CAA0C;AAAA,IACxD;AAGA,QAAI,KAAK,OAAO,IAAI,WAAW;AAC7B,UAAI;AACF,aAAK,aAAa,IAAI,QAAQ;AAAA,UAC5B,WAAW,KAAK,OAAO,IAAI;AAAA,UAC3B,aAAa,KAAK,OAAO,IAAI;AAAA,QAC/B,CAAC;AAED,eAAO,KAAK,sCAAsC;AAAA,MACpD,SAAS,OAAO;AACd,eAAO,KAAK,mDAAmD,KAAK;AAAA,MACtE;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,mBAAyB;AAC/B,SAAK,QAAQ,KAAK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,KAejB;AAED,SAAK,QAAQ,KAAK;AAAA;AAAA;AAAA;AAAA,KAIjB;AAAA,EACH;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,WAAW,OAAoC;AAEnD,UAAM,KAAK;AAEX,UAAM,QAAQ,MAAM;AACpB,UAAM,MAAM,KAAK,IAAI,IAAI,MAAM,SAAS;AACxC,UAAM,WAAW,OAAO,MAAO,KAAK;AAGpC,QAAI;AACJ,QAAI,WAAW,KAAK,OAAO,MAAM,YAAY,QAAQ,KAAK,OAAO,MAAM,kBAAkB;AACvF,aAAO;AAAA,IACT,WAAW,WAAW,KAAK,OAAO,MAAM,WAAW,IAAI;AACrD,aAAO;AAAA,IACT,OAAO;AACL,aAAO;AAAA,IACT;AAGA,YAAQ,MAAM;AAAA,MACZ,KAAK;AACH,cAAM,KAAK,aAAa,KAAK;AAC7B;AAAA,MACF,KAAK;AACH,cAAM,KAAK,sBAAsB,KAAK;AACtC;AAAA,MACF,KAAK;AACH,cAAM,KAAK,WAAW,KAAK;AAC3B;AAAA,IACJ;AAGA,SAAK,aAAa,MAAM,IAAI,MAAM,KAAK;AAEvC,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,aAAa,OAA6B;AACtD,QAAI,CAAC,KAAK,aAAa;AAErB;AAAA,IACF;AAEA,QAAI;AACF,YAAM,MAAM,SAAS,MAAM,EAAE;AAC7B,YAAM,OAAO,KAAK,UAAU,KAAK;AAGjC,UAAI;AACJ,UAAI,KAAK,SAAS,KAAO;AACvB,cAAM,aAAa,MAAM,UAAU,IAAI;AACvC,qBAAa,WAAW,SAAS,QAAQ;AACzC,cAAM,KAAK,YAAY,KAAK,KAAK;AAAA,UAC/B,MAAM;AAAA,UACN,YAAY;AAAA,UACZ,OAAO,MAAM,MAAM,SAAS;AAAA,UAC5B,MAAM,MAAM;AAAA,UACZ,WAAW,MAAM,SAAS,UAAU,SAAS;AAAA,QAC/C,CAAC;AAAA,MACH,OAAO;AACL,qBAAa;AACb,cAAM,KAAK,YAAY,KAAK,KAAK;AAAA,UAC/B,MAAM;AAAA,UACN,YAAY;AAAA,UACZ,OAAO,MAAM,MAAM,SAAS;AAAA,UAC5B,MAAM,MAAM;AAAA,UACZ,WAAW,MAAM,SAAS,UAAU,SAAS;AAAA,QAC/C,CAAC;AAAA,MACH;AAGA,YAAM,KAAK,YAAY,OAAO,KAAK,KAAK,OAAO,MAAM,UAAU;AAG/D,YAAM,KAAK,YAAY,KAAK,mBAAmB;AAAA,QAC7C,OAAO,MAAM;AAAA,QACb,OAAO,MAAM;AAAA,MACf,CAAC;AAED,YAAM,KAAK,YAAY,KAAK,kBAAkB;AAAA,QAC5C,OAAO,MAAM,SAAS;AAAA,QACtB,OAAO,MAAM;AAAA,MACf,CAAC;AAED,aAAO,MAAM,yBAAyB;AAAA,QACpC,SAAS,MAAM;AAAA,QACf,MAAM,KAAK;AAAA,QACX,YAAY,KAAK,SAAS;AAAA,MAC5B,CAAC;AAAA,IAEH,SAAS,OAAO;AACd,aAAO,MAAM,4BAA4B,KAAK;AAC9C,YAAM;AAAA,IACR;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,sBAAsB,OAA6B;AAC/D,QAAI,CAAC,KAAK,WAAW;AACnB,YAAM,IAAI,MAAM,gCAAgC;AAAA,IAClD;AAEA,QAAI;AAEF,YAAM,OAAO,KAAK,UAAU,KAAK;AACjC,YAAM,aAAa,MAAM,UAAU,IAAI;AAGvC,YAAM,OAAO,IAAI,KAAK,MAAM,SAAS,SAAS;AAC9C,YAAM,MAAM,UAAU,KAAK,YAAY,CAAC,IAAI,OAAO,KAAK,SAAS,IAAI,CAAC,EAAE,SAAS,GAAG,GAAG,CAAC,IAAI,OAAO,KAAK,QAAQ,CAAC,EAAE,SAAS,GAAG,GAAG,CAAC,IAAI,MAAM,EAAE;AAG/I,YAAM,UAAU,IAAI,iBAAiB;AAAA,QACnC,QAAQ,KAAK,OAAO,eAAe;AAAA,QACnC,KAAK;AAAA,QACL,MAAM;AAAA,QACN,aAAa;AAAA,QACb,UAAU;AAAA,UACR,YAAY,MAAM;AAAA,UAClB,cAAc,MAAM;AAAA,UACpB,eAAe,MAAM,MAAM,SAAS;AAAA,UACpC,iBAAiB,KAAK,OAAO,SAAS;AAAA,UACtC,mBAAmB,WAAW,OAAO,SAAS;AAAA,QAChD;AAAA,MACF,CAAC;AAED,YAAM,KAAK,UAAU,KAAK,OAAO;AAGjC,UAAI,KAAK,aAAa;AACpB,cAAM,KAAK,YAAY,IAAI,SAAS,MAAM,EAAE,EAAE;AAAA,MAChD;AAEA,aAAO,KAAK,mCAAmC;AAAA,QAC7C,SAAS,MAAM;AAAA,QACf;AAAA,QACA,cAAc,KAAK;AAAA,QACnB,gBAAgB,WAAW;AAAA,QAC3B,mBAAmB,IAAI,WAAW,SAAS,KAAK,QAAQ,QAAQ,CAAC;AAAA,MACnE,CAAC;AAAA,IAEH,SAAS,OAAO;AACd,aAAO,MAAM,sCAAsC,KAAK;AACxD,YAAM;AAAA,IACR;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,WAAW,OAA6B;AACpD,QAAI,CAAC,KAAK,YAAY;AAEpB,aAAO,KAAK,sBAAsB,KAAK;AAAA,IACzC;AAEA,QAAI;AAEF,YAAM,UAAU,KAAK,mBAAmB,KAAK;AAC7C,YAAM,OAAO,KAAK,UAAU,OAAO;AACnC,YAAM,aAAa,MAAM,UAAU,IAAI;AAGvC,YAAM,OAAO,IAAI,KAAK,MAAM,SAAS,SAAS;AAC9C,YAAM,MAAM,WAAW,KAAK,YAAY,CAAC,IAAI,OAAO,KAAK,SAAS,IAAI,CAAC,EAAE,SAAS,GAAG,GAAG,CAAC,IAAI,MAAM,EAAE;AAGrG,YAAM,SAAS,KAAK,WAAW,OAAO,KAAK,OAAO,IAAI,UAAU;AAChE,YAAM,OAAO,OAAO,KAAK,GAAG;AAE5B,YAAM,KAAK,KAAK,YAAY;AAAA,QAC1B,UAAU;AAAA,UACR,aAAa;AAAA,UACb,UAAU;AAAA,YACR,SAAS,MAAM;AAAA,YACf,WAAW,MAAM;AAAA,YACjB,OAAO,MAAM,MAAM,SAAS;AAAA,YAC5B,eAAe,MAAM,MAAM,OAAO,SAAS;AAAA,UAC7C;AAAA,QACF;AAAA,QACA,cAAc;AAAA;AAAA,MAChB,CAAC;AAGD,UAAI,KAAK,WAAW;AAClB,YAAI;AACF,gBAAM,UAAU,KAAK,eAAe,KAAK;AACzC,gBAAM,KAAK,UAAU,KAAK,IAAI,oBAAoB;AAAA,YAChD,QAAQ,KAAK,OAAO,eAAe;AAAA,YACnC,KAAK;AAAA,UACP,CAAC,CAAC;AAAA,QACJ,SAAS,OAAO;AAAA,QAEhB;AAAA,MACF;AAEA,aAAO,KAAK,yBAAyB;AAAA,QACnC,SAAS,MAAM;AAAA,QACf;AAAA,QACA,cAAc,KAAK,UAAU,KAAK,EAAE;AAAA,QACpC,gBAAgB,WAAW;AAAA,MAC7B,CAAC;AAAA,IAEH,SAAS,OAAO;AACd,aAAO,MAAM,0BAA0B,KAAK;AAC5C,YAAM;AAAA,IACR;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,mBAAmB,OAAmB;AAE5C,WAAO;AAAA,MACL,IAAI,MAAM;AAAA,MACV,MAAM,MAAM;AAAA,MACZ,OAAO,MAAM;AAAA,MACb,SAAS,MAAM;AAAA,MACf,UAAU;AAAA,QACR,WAAW,MAAM,SAAS;AAAA,QAC1B,SAAS,MAAM,SAAS;AAAA,QACxB,eAAe,MAAM,SAAS,cAAc;AAAA,QAC5C,aAAa,MAAM,SAAS,kBAAkB;AAAA,QAC9C,gBAAgB,MAAM,SAAS,kBAAkB;AAAA,QACjD,aAAa,MAAM,SAAS;AAAA,MAC9B;AAAA,MACA,aAAa;AAAA,QACX,OAAO,MAAM,MAAM;AAAA,QACnB,OAAO,CAAC,GAAG,IAAI,IAAI,MAAM,MAAM,IAAI,OAAK,EAAE,IAAI,CAAC,CAAC;AAAA,QAChD,WAAW,MAAM,MAAM,CAAC,GAAG;AAAA,QAC3B,UAAU,MAAM,MAAM,MAAM,MAAM,SAAS,CAAC,GAAG;AAAA,MACjD;AAAA,MACA,YAAY,MAAM;AAAA,IACpB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,cAAc,SAAwC;AAE1D,UAAM,KAAK;AAGX,UAAM,WAAW,KAAK,QAAQ;AAAA,MAC5B;AAAA,IACF,EAAE,IAAI,OAAO;AAEb,QAAI,CAAC,UAAU;AACb,aAAO;AAAA,IACT;AAGA,SAAK,QAAQ;AAAA,MACX;AAAA,IACF,EAAE,IAAI,KAAK,IAAI,GAAG,OAAO;AAGzB,YAAQ,SAAS,MAAM;AAAA,MACrB,KAAK;AACH,eAAO,KAAK,kBAAkB,OAAO;AAAA,MACvC,KAAK;AACH,eAAO,KAAK,2BAA2B,SAAS,SAAS,QAAQ;AAAA,MACnE,KAAK;AACH,eAAO,KAAK,gBAAgB,SAAS,SAAS,QAAQ;AAAA,MACxD;AACE,eAAO;AAAA,IACX;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,kBAAkB,SAAwC;AACtE,QAAI,CAAC,KAAK,YAAa,QAAO;AAE9B,QAAI;AACF,YAAM,MAAM,SAAS,OAAO;AAC5B,YAAM,OAAO,MAAM,KAAK,YAAY,QAAQ,GAAG;AAE/C,UAAI,CAAC,QAAQ,CAAC,KAAK,KAAM,QAAO;AAEhC,UAAI;AACJ,UAAI,KAAK,eAAe,QAAQ;AAC9B,cAAM,aAAa,OAAO,KAAK,KAAK,MAAM,QAAQ;AAClD,cAAM,eAAe,MAAM,YAAY,UAAU;AACjD,oBAAY,aAAa,SAAS;AAAA,MACpC,OAAO;AACL,oBAAY,KAAK;AAAA,MACnB;AAEA,aAAO,KAAK,MAAM,SAAS;AAAA,IAE7B,SAAS,OAAO;AACd,aAAO,MAAM,iCAAiC,KAAK;AACnD,aAAO;AAAA,IACT;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,2BAA2B,SAAiB,KAAoC;AAC5F,QAAI,CAAC,KAAK,UAAW,QAAO;AAE5B,QAAI;AACF,YAAM,UAAU,IAAI,iBAAiB;AAAA,QACnC,QAAQ,KAAK,OAAO,eAAe;AAAA,QACnC,KAAK;AAAA,MACP,CAAC;AAED,YAAM,WAAW,MAAM,KAAK,UAAU,KAAK,OAAO;AAClD,YAAM,aAAa,MAAM,SAAS,MAAM,qBAAqB;AAE7D,UAAI,CAAC,WAAY,QAAO;AAExB,YAAM,eAAe,MAAM,YAAY,OAAO,KAAK,UAAU,CAAC;AAC9D,aAAO,KAAK,MAAM,aAAa,SAAS,CAAC;AAAA,IAE3C,SAAS,OAAO;AACd,aAAO,MAAM,2CAA2C,KAAK;AAC7D,aAAO;AAAA,IACT;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,gBAAgB,SAAiB,KAAoC;AACjF,QAAI,CAAC,KAAK,WAAY,QAAO;AAE7B,QAAI;AACF,YAAM,SAAS,KAAK,WAAW,OAAO,KAAK,OAAO,IAAI,UAAU;AAChE,YAAM,OAAO,OAAO,KAAK,GAAG;AAE5B,YAAM,CAAC,UAAU,IAAI,MAAM,KAAK,SAAS;AACzC,YAAM,eAAe,MAAM,YAAY,UAAU;AAGjD,aAAO,KAAK,MAAM,aAAa,SAAS,CAAC;AAAA,IAE3C,SAAS,OAAO;AACd,aAAO,MAAM,+BAA+B,KAAK;AACjD,aAAO;AAAA,IACT;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,aAAa,SAAiB,MAAmB,OAAoB;AAC3E,UAAM,eAAe,KAAK,UAAU,KAAK,EAAE;AAC3C,UAAM,iBAAiB,KAAK,MAAM,eAAe,GAAG;AAEpD,SAAK,QAAQ,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,KAMpB,EAAE;AAAA,MACD;AAAA,MACA;AAAA,MACA,KAAK,mBAAmB,OAAO,IAAI;AAAA,MACnC;AAAA,MACA;AAAA,MACA,IAAI,iBAAiB;AAAA,MACrB,KAAK,IAAI;AAAA,MACT,MAAM,SAAS;AAAA,MACf,KAAK,IAAI;AAAA,MACT,MAAM;AAAA,IACR;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,mBAAmB,OAAc,MAA2B;AAClE,UAAM,OAAO,IAAI,KAAK,MAAM,SAAS,SAAS;AAC9C,UAAM,OAAO,KAAK,YAAY;AAC9B,UAAM,QAAQ,OAAO,KAAK,SAAS,IAAI,CAAC,EAAE,SAAS,GAAG,GAAG;AACzD,UAAM,MAAM,OAAO,KAAK,QAAQ,CAAC,EAAE,SAAS,GAAG,GAAG;AAElD,YAAQ,MAAM;AAAA,MACZ,KAAK;AACH,eAAO,eAAe,MAAM,EAAE;AAAA,MAChC,KAAK;AACH,eAAO,UAAU,IAAI,IAAI,KAAK,IAAI,GAAG,IAAI,MAAM,EAAE;AAAA,MACnD,KAAK;AACH,eAAO,WAAW,IAAI,IAAI,KAAK,IAAI,MAAM,EAAE;AAAA,IAC/C;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,eAAe,OAAsB;AAC3C,UAAM,OAAO,IAAI,KAAK,MAAM,SAAS,SAAS;AAC9C,WAAO,UAAU,KAAK,YAAY,CAAC,IAAI,OAAO,KAAK,SAAS,IAAI,CAAC,EAAE,SAAS,GAAG,GAAG,CAAC,IAAI,OAAO,KAAK,QAAQ,CAAC,EAAE,SAAS,GAAG,GAAG,CAAC,IAAI,MAAM,EAAE;AAAA,EAC5I;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,eAIH;AACD,UAAM,UAAU;AAAA,MACd,WAAW;AAAA,MACX,YAAY;AAAA,MACZ,QAAQ,CAAC;AAAA,IACX;AAEA,UAAM,MAAM,KAAK,IAAI;AAGrB,UAAM,aAAa,KAAK,QAAQ,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA,KAKvC,EAAE,IAAI;AAEP,eAAW,aAAa,YAAY;AAClC,YAAM,YAAY,MAAM,UAAU,eAAe,MAAO,KAAK;AAE7D,UAAI;AACF,YAAI,UAAU,SAAS,mBAAmB,WAAW,KAAK,OAAO,MAAM,UAAU;AAE/E,gBAAM,QAAQ,MAAM,KAAK,cAAc,UAAU,QAAQ;AACzD,cAAI,OAAO;AACT,kBAAM,KAAK,sBAAsB,KAAK;AACtC,iBAAK,aAAa,UAAU,UAAU,mBAAkB,KAAK;AAC7D,oBAAQ;AAAA,UACV;AAAA,QACF,WAAW,UAAU,SAAS,qBAAoB,WAAW,KAAK,OAAO,MAAM,WAAW,IAAI;AAE5F,gBAAM,QAAQ,MAAM,KAAK,cAAc,UAAU,QAAQ;AACzD,cAAI,OAAO;AACT,kBAAM,KAAK,WAAW,KAAK;AAC3B,iBAAK,aAAa,UAAU,UAAU,mBAAkB,KAAK;AAC7D,oBAAQ;AAAA,UACV;AAAA,QACF;AAAA,MACF,SAAS,OAAO;AACd,gBAAQ,OAAO,KAAK,qBAAqB,UAAU,QAAQ,KAAK,KAAK,EAAE;AAAA,MACzE;AAAA,IACF;AAEA,WAAO,KAAK,4BAA4B,OAAO;AAC/C,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,kBAAuB;AACrB,UAAM,YAAY,KAAK,QAAQ,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,KAUtC,EAAE,IAAI;AAEP,UAAM,kBAAkB,KAAK,QAAQ,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,KAW5C,EAAE,IAAI,KAAK,IAAI,GAAG,KAAK,IAAI,GAAG,KAAK,IAAI,CAAC;AAEzC,WAAO;AAAA,MACL,QAAQ;AAAA,MACR,OAAO;AAAA,MACP,aAAa,UAAU,OAAO,CAAC,KAAa,MAAW,MAAM,EAAE,OAAO,CAAC;AAAA,MACvE,WAAW,UAAU,OAAO,CAAC,KAAa,MAAW,MAAM,EAAE,gBAAgB,CAAC;AAAA,MAC9E,gBAAgB,UAAU,OAAO,CAAC,KAAa,MAAW,MAAM,EAAE,kBAAkB,CAAC;AAAA,IACvF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,UAA2B;AAC/B,QAAI,UAAU;AAGd,UAAM,SAAS,KAAK,IAAI,IAAK,KAAK,KAAK,KAAK,KAAK;AAEjD,UAAM,SAAS,KAAK,QAAQ,QAAQ;AAAA;AAAA;AAAA,KAGnC,EAAE,IAAI,MAAM;AAEb,cAAU,OAAO;AAEjB,WAAO,KAAK,qBAAqB,EAAE,SAAS,QAAQ,CAAC;AACrD,WAAO;AAAA,EACT;AACF;",
4
+ "sourcesContent": ["/**\n * Railway-Optimized 3-Tier Storage System\n * Tier 1: Redis (Hot) - Last 24 hours, instant access\n * Tier 2: Railway Buckets (Warm) - 1-30 days, S3-compatible\n * Tier 3: GCS (Cold) - 30+ days, cost-effective archive\n */\n\nimport { createClient, RedisClientType } from 'redis';\nimport { S3Client, PutObjectCommand, GetObjectCommand, DeleteObjectCommand, HeadObjectCommand } from '@aws-sdk/client-s3';\nimport { Storage } from '@google-cloud/storage';\nimport Database from 'better-sqlite3';\nimport { logger } from '../monitoring/logger.js';\nimport { Trace, CompressedTrace, ToolCall } from '../trace/types.js';\nimport { ConfigManager } from '../config/config-manager.js';\nimport * as zlib from 'zlib';\nimport { promisify } from 'util';\n// Type-safe environment variable access\nfunction getEnv(key: string, defaultValue?: string): string {\n const value = process.env[key];\n if (value === undefined) {\n if (defaultValue !== undefined) return defaultValue;\n throw new Error(`Environment variable ${key} is required`);\n }\n return value;\n}\n\nfunction getOptionalEnv(key: string): string | undefined {\n return process.env[key];\n}\n\n\nconst gzipAsync = promisify(zlib.gzip);\nconst gunzipAsync = promisify(zlib.gunzip);\n\nexport enum StorageTier {\n HOT = 'hot', // Redis: < 24 hours\n WARM = 'warm', // Railway Buckets: 1-30 days \n COLD = 'cold' // GCS: 30+ days\n}\n\nexport interface RailwayStorageConfig {\n redis: {\n url: string;\n ttlSeconds: number;\n maxMemoryMb: number;\n };\n railwayBuckets: {\n endpoint: string;\n bucket: string;\n accessKeyId: string;\n secretAccessKey: string;\n region: string;\n };\n gcs: {\n bucketName: string;\n projectId: string;\n keyFilename?: string;\n };\n tiers: {\n hotHours: number; // Hours to keep in Redis\n warmDays: number; // Days to keep in Railway Buckets\n compressionScore: number; // Score threshold for early compression\n };\n}\n\nexport const DEFAULT_RAILWAY_CONFIG: RailwayStorageConfig = {\n redis: {\n url: process.env['REDIS_URL'] || 'redis://localhost:6379',\n ttlSeconds: 86400, // 24 hours\n maxMemoryMb: 100, // 100MB Redis limit\n },\n railwayBuckets: {\n endpoint: process.env['RAILWAY_BUCKET_ENDPOINT'] || 'https://buckets.railway.app',\n bucket: process.env['RAILWAY_BUCKET_NAME'] || 'stackmemory-warm',\n accessKeyId: process.env['RAILWAY_BUCKET_ACCESS_KEY'] || '',\n secretAccessKey: process.env['RAILWAY_BUCKET_SECRET_KEY'] || '',\n region: 'us-east-1',\n },\n gcs: {\n bucketName: process.env['GCS_BUCKET'] || 'stackmemory-cold',\n projectId: process.env['GCP_PROJECT_ID'] || 'stackmemory',\n keyFilename: process.env['GCP_KEY_FILE'],\n },\n tiers: {\n hotHours: 24,\n warmDays: 30,\n compressionScore: 0.4,\n }\n};\n\ninterface StorageMetrics {\n tier: StorageTier;\n originalSize: number;\n compressedSize: number;\n compressionRatio: number;\n accessCount: number;\n lastAccessed: number;\n migrationTime?: number;\n}\n\n/**\n * Railway-optimized storage manager with 3-tier architecture\n */\nexport class RailwayOptimizedStorage {\n private redisClient?: RedisClientType;\n private railwayS3?: S3Client;\n private gcsStorage?: Storage;\n private localDb: Database.Database;\n private config: RailwayStorageConfig;\n private configManager: ConfigManager;\n private metricsCache: Map<string, StorageMetrics> = new Map();\n \n private initialized: Promise<void>;\n \n constructor(\n localDb: Database.Database,\n configManager: ConfigManager,\n config?: Partial<RailwayStorageConfig>\n ) {\n this.localDb = localDb;\n this.configManager = configManager;\n this.config = { ...DEFAULT_RAILWAY_CONFIG, ...config };\n \n this.initializeSchema();\n this.initialized = this.initializeClients();\n }\n \n /**\n * Initialize storage clients\n */\n private async initializeClients(): Promise<void> {\n // Initialize Redis\n if (this.config.redis.url) {\n try {\n this.redisClient = createClient({ url: this.config.redis.url });\n \n this.redisClient.on('error', (err) => {\n logger.error('Redis client error', err);\n });\n \n await this.redisClient.connect();\n \n // Configure Redis memory policy\n await this.redisClient.configSet('maxmemory-policy', 'allkeys-lru');\n \n logger.info('Redis connected for hot tier storage');\n } catch (error: unknown) {\n logger.warn('Redis connection failed, falling back to SQLite only', error);\n }\n }\n \n // Initialize Railway S3-compatible buckets\n if (this.config.railwayBuckets.accessKeyId) {\n this.railwayS3 = new S3Client({\n endpoint: this.config.railwayBuckets.endpoint,\n region: this.config.railwayBuckets.region,\n credentials: {\n accessKeyId: this.config.railwayBuckets.accessKeyId,\n secretAccessKey: this.config.railwayBuckets.secretAccessKey,\n },\n forcePathStyle: true, // Required for Railway buckets\n });\n \n logger.info('Railway Buckets configured for warm tier');\n }\n \n // Initialize GCS for cold storage\n if (this.config.gcs.projectId) {\n try {\n this.gcsStorage = new Storage({\n projectId: this.config.gcs.projectId,\n keyFilename: this.config.gcs.keyFilename,\n });\n \n logger.info('GCS configured for cold tier storage');\n } catch (error: unknown) {\n logger.warn('GCS setup failed, will use Railway buckets only', error);\n }\n }\n }\n \n /**\n * Initialize database schema for tracking\n */\n private initializeSchema(): void {\n this.localDb.exec(`\n CREATE TABLE IF NOT EXISTS storage_tiers (\n trace_id TEXT PRIMARY KEY,\n tier TEXT NOT NULL,\n location TEXT NOT NULL,\n original_size INTEGER,\n compressed_size INTEGER,\n compression_ratio REAL,\n access_count INTEGER DEFAULT 0,\n last_accessed INTEGER,\n created_at INTEGER,\n migrated_at INTEGER,\n score REAL,\n FOREIGN KEY (trace_id) REFERENCES traces(id) ON DELETE CASCADE\n )\n `);\n \n this.localDb.exec(`\n CREATE INDEX IF NOT EXISTS idx_storage_tier ON storage_tiers(tier);\n CREATE INDEX IF NOT EXISTS idx_storage_created ON storage_tiers(created_at);\n CREATE INDEX IF NOT EXISTS idx_storage_accessed ON storage_tiers(last_accessed);\n `);\n }\n \n /**\n * Store a trace in the appropriate tier\n */\n async storeTrace(trace: Trace): Promise<StorageTier> {\n // Ensure clients are initialized\n await this.initialized;\n \n const score = trace.score;\n const age = Date.now() - trace.metadata.startTime;\n const ageHours = age / (1000 * 60 * 60);\n \n // Determine tier based on age and score\n let tier: StorageTier;\n if (ageHours < this.config.tiers.hotHours && score > this.config.tiers.compressionScore) {\n tier = StorageTier.HOT;\n } else if (ageHours < this.config.tiers.warmDays * 24) {\n tier = StorageTier.WARM;\n } else {\n tier = StorageTier.COLD;\n }\n \n // Store in appropriate tier\n switch (tier) {\n case StorageTier.HOT:\n await this.storeInRedis(trace);\n break;\n case StorageTier.WARM:\n await this.storeInRailwayBuckets(trace);\n break;\n case StorageTier.COLD:\n await this.storeInGCS(trace);\n break;\n }\n \n // Track in database\n this.trackStorage(trace.id, tier, trace);\n \n return tier;\n }\n \n /**\n * Store trace in Redis (hot tier)\n */\n private async storeInRedis(trace: Trace): Promise<void> {\n if (!this.redisClient) {\n // Fallback to local SQLite if Redis unavailable\n return;\n }\n \n try {\n const key = `trace:${trace.id}`;\n const data = JSON.stringify(trace);\n \n // Compress if large\n let storedData: string;\n if (data.length > 10000) {\n const compressed = await gzipAsync(data);\n storedData = compressed.toString('base64');\n await this.redisClient.hSet(key, {\n data: storedData,\n compressed: 'true',\n score: trace.score.toString(),\n type: trace.type,\n timestamp: trace.metadata.startTime.toString(),\n });\n } else {\n storedData = data;\n await this.redisClient.hSet(key, {\n data: storedData,\n compressed: 'false',\n score: trace.score.toString(),\n type: trace.type,\n timestamp: trace.metadata.startTime.toString(),\n });\n }\n \n // Set TTL\n await this.redisClient.expire(key, this.config.redis.ttlSeconds);\n \n // Add to sorted set for efficient retrieval\n await this.redisClient.zAdd('traces:by_score', {\n score: trace.score,\n value: trace.id,\n });\n \n await this.redisClient.zAdd('traces:by_time', {\n score: trace.metadata.startTime,\n value: trace.id,\n });\n \n logger.debug('Stored trace in Redis', { \n traceId: trace.id, \n size: data.length,\n compressed: data.length > 10000,\n });\n \n } catch (error: unknown) {\n logger.error('Failed to store in Redis', error);\n throw error;\n }\n }\n \n /**\n * Store trace in Railway Buckets (warm tier)\n */\n private async storeInRailwayBuckets(trace: Trace): Promise<void> {\n if (!this.railwayS3) {\n throw new Error('Railway Buckets not configured');\n }\n \n try {\n // Compress trace\n const data = JSON.stringify(trace);\n const compressed = await gzipAsync(data);\n \n // Generate key with date partitioning\n const date = new Date(trace.metadata.startTime);\n const key = `traces/${date.getFullYear()}/${String(date.getMonth() + 1).padStart(2, '0')}/${String(date.getDate()).padStart(2, '0')}/${trace.id}.json.gz`;\n \n // Upload to Railway Bucket\n const command = new PutObjectCommand({\n Bucket: this.config.railwayBuckets.bucket,\n Key: key,\n Body: compressed,\n ContentType: 'application/gzip',\n Metadata: {\n 'trace-id': trace.id,\n 'trace-type': trace.type,\n 'trace-score': trace.score.toString(),\n 'original-size': data.length.toString(),\n 'compressed-size': compressed.length.toString(),\n },\n });\n \n await this.railwayS3.send(command);\n \n // Remove from Redis if exists\n if (this.redisClient) {\n await this.redisClient.del(`trace:${trace.id}`);\n }\n \n logger.info('Stored trace in Railway Buckets', {\n traceId: trace.id,\n key,\n originalSize: data.length,\n compressedSize: compressed.length,\n compressionRatio: (1 - compressed.length / data.length).toFixed(2),\n });\n \n } catch (error: unknown) {\n logger.error('Failed to store in Railway Buckets', error);\n throw error;\n }\n }\n \n /**\n * Store trace in GCS (cold tier)\n */\n private async storeInGCS(trace: Trace): Promise<void> {\n if (!this.gcsStorage) {\n // Fallback to Railway Buckets if GCS not available\n return this.storeInRailwayBuckets(trace);\n }\n \n try {\n // Heavy compression for cold storage\n const minimal = this.createMinimalTrace(trace);\n const data = JSON.stringify(minimal);\n const compressed = await gzipAsync(data);\n \n // Generate key with year/month partitioning\n const date = new Date(trace.metadata.startTime);\n const key = `archive/${date.getFullYear()}/${String(date.getMonth() + 1).padStart(2, '0')}/${trace.id}.json.gz`;\n \n // Upload to GCS with Coldline storage class\n const bucket = this.gcsStorage.bucket(this.config.gcs.bucketName);\n const file = bucket.file(key);\n \n await file.save(compressed, {\n metadata: {\n contentType: 'application/gzip',\n metadata: {\n traceId: trace.id,\n traceType: trace.type,\n score: trace.score.toString(),\n originalTools: trace.tools.length.toString(),\n },\n },\n storageClass: 'COLDLINE', // Use Coldline for cost optimization\n });\n \n // Remove from warm tier if exists\n if (this.railwayS3) {\n try {\n const warmKey = this.getWarmTierKey(trace);\n await this.railwayS3.send(new DeleteObjectCommand({\n Bucket: this.config.railwayBuckets.bucket,\n Key: warmKey,\n }));\n } catch (error: unknown) {\n // Ignore deletion errors\n }\n }\n \n logger.info('Archived trace to GCS', {\n traceId: trace.id,\n key,\n originalSize: JSON.stringify(trace).length,\n compressedSize: compressed.length,\n });\n \n } catch (error: unknown) {\n logger.error('Failed to store in GCS', error);\n throw error;\n }\n }\n \n /**\n * Create minimal trace for cold storage\n */\n private createMinimalTrace(trace: Trace): any {\n // Keep only essential information\n return {\n id: trace.id,\n type: trace.type,\n score: trace.score,\n summary: trace.summary,\n metadata: {\n startTime: trace.metadata.startTime,\n endTime: trace.metadata.endTime,\n filesModified: trace.metadata.filesModified.length,\n errorsCount: trace.metadata.errorsEncountered.length,\n decisionsCount: trace.metadata.decisionsRecorded.length,\n causalChain: trace.metadata.causalChain,\n },\n toolSummary: {\n count: trace.tools.length,\n types: [...new Set(trace.tools.map((t: any) => t.tool))],\n firstTool: trace.tools[0]?.tool,\n lastTool: trace.tools[trace.tools.length - 1]?.tool,\n },\n compressed: trace.compressed,\n };\n }\n \n /**\n * Retrieve a trace from any tier\n */\n async retrieveTrace(traceId: string): Promise<Trace | null> {\n // Ensure clients are initialized\n await this.initialized;\n \n // Check tier location\n const location = this.localDb.prepare(\n 'SELECT tier, location FROM storage_tiers WHERE trace_id = ?'\n ).get(traceId) as any;\n \n if (!location) {\n return null;\n }\n \n // Update access metrics\n this.localDb.prepare(\n 'UPDATE storage_tiers SET access_count = access_count + 1, last_accessed = ? WHERE trace_id = ?'\n ).run(Date.now(), traceId);\n \n // Retrieve based on tier\n switch (location.tier) {\n case StorageTier.HOT:\n return this.retrieveFromRedis(traceId);\n case StorageTier.WARM:\n return this.retrieveFromRailwayBuckets(traceId, location.location);\n case StorageTier.COLD:\n return this.retrieveFromGCS(traceId, location.location);\n default:\n return null;\n }\n }\n \n /**\n * Retrieve from Redis\n */\n private async retrieveFromRedis(traceId: string): Promise<Trace | null> {\n if (!this.redisClient) return null;\n \n try {\n const key = `trace:${traceId}`;\n const data = await this.redisClient.hGetAll(key);\n \n if (!data || !data.data) return null;\n \n let traceData: string;\n if (data.compressed === 'true') {\n const compressed = Buffer.from(data.data, 'base64');\n const decompressed = await gunzipAsync(compressed);\n traceData = decompressed.toString();\n } else {\n traceData = data.data;\n }\n \n return JSON.parse(traceData);\n \n } catch (error: unknown) {\n logger.error('Failed to retrieve from Redis', error);\n return null;\n }\n }\n \n /**\n * Retrieve from Railway Buckets\n */\n private async retrieveFromRailwayBuckets(traceId: string, key: string): Promise<Trace | null> {\n if (!this.railwayS3) return null;\n \n try {\n const command = new GetObjectCommand({\n Bucket: this.config.railwayBuckets.bucket,\n Key: key,\n });\n \n const response = await this.railwayS3.send(command);\n const compressed = await response.Body?.transformToByteArray();\n \n if (!compressed) return null;\n \n const decompressed = await gunzipAsync(Buffer.from(compressed));\n return JSON.parse(decompressed.toString());\n \n } catch (error: unknown) {\n logger.error('Failed to retrieve from Railway Buckets', error);\n return null;\n }\n }\n \n /**\n * Retrieve from GCS\n */\n private async retrieveFromGCS(traceId: string, key: string): Promise<Trace | null> {\n if (!this.gcsStorage) return null;\n \n try {\n const bucket = this.gcsStorage.bucket(this.config.gcs.bucketName);\n const file = bucket.file(key);\n \n const [compressed] = await file.download();\n const decompressed = await gunzipAsync(compressed);\n \n // Note: Returns minimal trace from cold storage\n return JSON.parse(decompressed.toString());\n \n } catch (error: unknown) {\n logger.error('Failed to retrieve from GCS', error);\n return null;\n }\n }\n \n /**\n * Track storage in database\n */\n private trackStorage(traceId: string, tier: StorageTier, trace: Trace): void {\n const originalSize = JSON.stringify(trace).length;\n const compressedSize = Math.floor(originalSize * 0.3); // Estimate\n \n this.localDb.prepare(`\n INSERT OR REPLACE INTO storage_tiers (\n trace_id, tier, location, original_size, compressed_size,\n compression_ratio, access_count, last_accessed, created_at,\n migrated_at, score\n ) VALUES (?, ?, ?, ?, ?, ?, 0, ?, ?, ?, ?)\n `).run(\n traceId,\n tier,\n this.getStorageLocation(trace, tier),\n originalSize,\n compressedSize,\n 1 - compressedSize / originalSize,\n Date.now(),\n trace.metadata.startTime,\n Date.now(),\n trace.score\n );\n }\n \n /**\n * Get storage location key\n */\n private getStorageLocation(trace: Trace, tier: StorageTier): string {\n const date = new Date(trace.metadata.startTime);\n const year = date.getFullYear();\n const month = String(date.getMonth() + 1).padStart(2, '0');\n const day = String(date.getDate()).padStart(2, '0');\n \n switch (tier) {\n case StorageTier.HOT:\n return `redis:trace:${trace.id}`;\n case StorageTier.WARM:\n return `traces/${year}/${month}/${day}/${trace.id}.json.gz`;\n case StorageTier.COLD:\n return `archive/${year}/${month}/${trace.id}.json.gz`;\n }\n }\n \n /**\n * Get warm tier key for a trace\n */\n private getWarmTierKey(trace: Trace): string {\n const date = new Date(trace.metadata.startTime);\n return `traces/${date.getFullYear()}/${String(date.getMonth() + 1).padStart(2, '0')}/${String(date.getDate()).padStart(2, '0')}/${trace.id}.json.gz`;\n }\n \n /**\n * Migrate traces between tiers based on age\n */\n async migrateTiers(): Promise<{\n hotToWarm: number;\n warmToCold: number;\n errors: string[];\n }> {\n const results = {\n hotToWarm: 0,\n warmToCold: 0,\n errors: [] as string[],\n };\n \n const now = Date.now();\n \n // Find traces to migrate\n const candidates = this.localDb.prepare(`\n SELECT trace_id, tier, created_at, score\n FROM storage_tiers\n WHERE tier != 'cold'\n ORDER BY created_at ASC\n `).all() as any[];\n \n for (const candidate of candidates) {\n const ageHours = (now - candidate.created_at) / (1000 * 60 * 60);\n \n try {\n if (candidate.tier === StorageTier.HOT && ageHours > this.config.tiers.hotHours) {\n // Migrate hot \u2192 warm\n const trace = await this.retrieveTrace(candidate.trace_id);\n if (trace) {\n await this.storeInRailwayBuckets(trace);\n this.trackStorage(candidate.trace_id, StorageTier.WARM, trace);\n results.hotToWarm++;\n }\n } else if (candidate.tier === StorageTier.WARM && ageHours > this.config.tiers.warmDays * 24) {\n // Migrate warm \u2192 cold\n const trace = await this.retrieveTrace(candidate.trace_id);\n if (trace) {\n await this.storeInGCS(trace);\n this.trackStorage(candidate.trace_id, StorageTier.COLD, trace);\n results.warmToCold++;\n }\n }\n } catch (error: unknown) {\n results.errors.push(`Failed to migrate ${candidate.trace_id}: ${error}`);\n }\n }\n \n logger.info('Tier migration completed', results);\n return results;\n }\n \n /**\n * Get storage statistics\n */\n getStorageStats(): any {\n const tierStats = this.localDb.prepare(`\n SELECT \n tier,\n COUNT(*) as count,\n SUM(original_size) as total_original,\n SUM(compressed_size) as total_compressed,\n AVG(compression_ratio) as avg_compression,\n AVG(access_count) as avg_access\n FROM storage_tiers\n GROUP BY tier\n `).all();\n \n const ageDistribution = this.localDb.prepare(`\n SELECT \n CASE \n WHEN (? - created_at) / 3600000 < 24 THEN '< 24h'\n WHEN (? - created_at) / 86400000 < 7 THEN '1-7d'\n WHEN (? - created_at) / 86400000 < 30 THEN '7-30d'\n ELSE '30d+'\n END as age_group,\n COUNT(*) as count\n FROM storage_tiers\n GROUP BY age_group\n `).all(Date.now(), Date.now(), Date.now());\n \n return {\n byTier: tierStats,\n byAge: ageDistribution,\n totalTraces: tierStats.reduce((sum: number, t: any) => sum + t.count, 0),\n totalSize: tierStats.reduce((sum: number, t: any) => sum + t.total_original, 0),\n compressedSize: tierStats.reduce((sum: number, t: any) => sum + t.total_compressed, 0),\n };\n }\n \n /**\n * Clean up expired data\n */\n async cleanup(): Promise<number> {\n let cleaned = 0;\n \n // Remove old entries from storage_tiers table\n const cutoff = Date.now() - (90 * 24 * 60 * 60 * 1000); // 90 days\n \n const result = this.localDb.prepare(`\n DELETE FROM storage_tiers\n WHERE tier = 'cold' AND created_at < ? AND access_count = 0\n `).run(cutoff);\n \n cleaned = result.changes;\n \n logger.info('Cleanup completed', { removed: cleaned });\n return cleaned;\n }\n}"],
5
+ "mappings": "AAOA,SAAS,oBAAqC;AAC9C,SAAS,UAAU,kBAAkB,kBAAkB,2BAA8C;AACrG,SAAS,eAAe;AAExB,SAAS,cAAc;AAGvB,YAAY,UAAU;AACtB,SAAS,iBAAiB;AAE1B,SAAS,OAAO,KAAa,cAA+B;AAC1D,QAAM,QAAQ,QAAQ,IAAI,GAAG;AAC7B,MAAI,UAAU,QAAW;AACvB,QAAI,iBAAiB,OAAW,QAAO;AACvC,UAAM,IAAI,MAAM,wBAAwB,GAAG,cAAc;AAAA,EAC3D;AACA,SAAO;AACT;AAEA,SAAS,eAAe,KAAiC;AACvD,SAAO,QAAQ,IAAI,GAAG;AACxB;AAGA,MAAM,YAAY,UAAU,KAAK,IAAI;AACrC,MAAM,cAAc,UAAU,KAAK,MAAM;AAElC,IAAK,cAAL,kBAAKA,iBAAL;AACL,EAAAA,aAAA,SAAM;AACN,EAAAA,aAAA,UAAO;AACP,EAAAA,aAAA,UAAO;AAHG,SAAAA;AAAA,GAAA;AA+BL,MAAM,yBAA+C;AAAA,EAC1D,OAAO;AAAA,IACL,KAAK,QAAQ,IAAI,WAAW,KAAK;AAAA,IACjC,YAAY;AAAA;AAAA,IACZ,aAAa;AAAA;AAAA,EACf;AAAA,EACA,gBAAgB;AAAA,IACd,UAAU,QAAQ,IAAI,yBAAyB,KAAK;AAAA,IACpD,QAAQ,QAAQ,IAAI,qBAAqB,KAAK;AAAA,IAC9C,aAAa,QAAQ,IAAI,2BAA2B,KAAK;AAAA,IACzD,iBAAiB,QAAQ,IAAI,2BAA2B,KAAK;AAAA,IAC7D,QAAQ;AAAA,EACV;AAAA,EACA,KAAK;AAAA,IACH,YAAY,QAAQ,IAAI,YAAY,KAAK;AAAA,IACzC,WAAW,QAAQ,IAAI,gBAAgB,KAAK;AAAA,IAC5C,aAAa,QAAQ,IAAI,cAAc;AAAA,EACzC;AAAA,EACA,OAAO;AAAA,IACL,UAAU;AAAA,IACV,UAAU;AAAA,IACV,kBAAkB;AAAA,EACpB;AACF;AAeO,MAAM,wBAAwB;AAAA,EAC3B;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA,eAA4C,oBAAI,IAAI;AAAA,EAEpD;AAAA,EAER,YACE,SACA,eACA,QACA;AACA,SAAK,UAAU;AACf,SAAK,gBAAgB;AACrB,SAAK,SAAS,EAAE,GAAG,wBAAwB,GAAG,OAAO;AAErD,SAAK,iBAAiB;AACtB,SAAK,cAAc,KAAK,kBAAkB;AAAA,EAC5C;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,oBAAmC;AAE/C,QAAI,KAAK,OAAO,MAAM,KAAK;AACzB,UAAI;AACF,aAAK,cAAc,aAAa,EAAE,KAAK,KAAK,OAAO,MAAM,IAAI,CAAC;AAE9D,aAAK,YAAY,GAAG,SAAS,CAAC,QAAQ;AACpC,iBAAO,MAAM,sBAAsB,GAAG;AAAA,QACxC,CAAC;AAED,cAAM,KAAK,YAAY,QAAQ;AAG/B,cAAM,KAAK,YAAY,UAAU,oBAAoB,aAAa;AAElE,eAAO,KAAK,sCAAsC;AAAA,MACpD,SAAS,OAAgB;AACvB,eAAO,KAAK,wDAAwD,KAAK;AAAA,MAC3E;AAAA,IACF;AAGA,QAAI,KAAK,OAAO,eAAe,aAAa;AAC1C,WAAK,YAAY,IAAI,SAAS;AAAA,QAC5B,UAAU,KAAK,OAAO,eAAe;AAAA,QACrC,QAAQ,KAAK,OAAO,eAAe;AAAA,QACnC,aAAa;AAAA,UACX,aAAa,KAAK,OAAO,eAAe;AAAA,UACxC,iBAAiB,KAAK,OAAO,eAAe;AAAA,QAC9C;AAAA,QACA,gBAAgB;AAAA;AAAA,MAClB,CAAC;AAED,aAAO,KAAK,0CAA0C;AAAA,IACxD;AAGA,QAAI,KAAK,OAAO,IAAI,WAAW;AAC7B,UAAI;AACF,aAAK,aAAa,IAAI,QAAQ;AAAA,UAC5B,WAAW,KAAK,OAAO,IAAI;AAAA,UAC3B,aAAa,KAAK,OAAO,IAAI;AAAA,QAC/B,CAAC;AAED,eAAO,KAAK,sCAAsC;AAAA,MACpD,SAAS,OAAgB;AACvB,eAAO,KAAK,mDAAmD,KAAK;AAAA,MACtE;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,mBAAyB;AAC/B,SAAK,QAAQ,KAAK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,KAejB;AAED,SAAK,QAAQ,KAAK;AAAA;AAAA;AAAA;AAAA,KAIjB;AAAA,EACH;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,WAAW,OAAoC;AAEnD,UAAM,KAAK;AAEX,UAAM,QAAQ,MAAM;AACpB,UAAM,MAAM,KAAK,IAAI,IAAI,MAAM,SAAS;AACxC,UAAM,WAAW,OAAO,MAAO,KAAK;AAGpC,QAAI;AACJ,QAAI,WAAW,KAAK,OAAO,MAAM,YAAY,QAAQ,KAAK,OAAO,MAAM,kBAAkB;AACvF,aAAO;AAAA,IACT,WAAW,WAAW,KAAK,OAAO,MAAM,WAAW,IAAI;AACrD,aAAO;AAAA,IACT,OAAO;AACL,aAAO;AAAA,IACT;AAGA,YAAQ,MAAM;AAAA,MACZ,KAAK;AACH,cAAM,KAAK,aAAa,KAAK;AAC7B;AAAA,MACF,KAAK;AACH,cAAM,KAAK,sBAAsB,KAAK;AACtC;AAAA,MACF,KAAK;AACH,cAAM,KAAK,WAAW,KAAK;AAC3B;AAAA,IACJ;AAGA,SAAK,aAAa,MAAM,IAAI,MAAM,KAAK;AAEvC,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,aAAa,OAA6B;AACtD,QAAI,CAAC,KAAK,aAAa;AAErB;AAAA,IACF;AAEA,QAAI;AACF,YAAM,MAAM,SAAS,MAAM,EAAE;AAC7B,YAAM,OAAO,KAAK,UAAU,KAAK;AAGjC,UAAI;AACJ,UAAI,KAAK,SAAS,KAAO;AACvB,cAAM,aAAa,MAAM,UAAU,IAAI;AACvC,qBAAa,WAAW,SAAS,QAAQ;AACzC,cAAM,KAAK,YAAY,KAAK,KAAK;AAAA,UAC/B,MAAM;AAAA,UACN,YAAY;AAAA,UACZ,OAAO,MAAM,MAAM,SAAS;AAAA,UAC5B,MAAM,MAAM;AAAA,UACZ,WAAW,MAAM,SAAS,UAAU,SAAS;AAAA,QAC/C,CAAC;AAAA,MACH,OAAO;AACL,qBAAa;AACb,cAAM,KAAK,YAAY,KAAK,KAAK;AAAA,UAC/B,MAAM;AAAA,UACN,YAAY;AAAA,UACZ,OAAO,MAAM,MAAM,SAAS;AAAA,UAC5B,MAAM,MAAM;AAAA,UACZ,WAAW,MAAM,SAAS,UAAU,SAAS;AAAA,QAC/C,CAAC;AAAA,MACH;AAGA,YAAM,KAAK,YAAY,OAAO,KAAK,KAAK,OAAO,MAAM,UAAU;AAG/D,YAAM,KAAK,YAAY,KAAK,mBAAmB;AAAA,QAC7C,OAAO,MAAM;AAAA,QACb,OAAO,MAAM;AAAA,MACf,CAAC;AAED,YAAM,KAAK,YAAY,KAAK,kBAAkB;AAAA,QAC5C,OAAO,MAAM,SAAS;AAAA,QACtB,OAAO,MAAM;AAAA,MACf,CAAC;AAED,aAAO,MAAM,yBAAyB;AAAA,QACpC,SAAS,MAAM;AAAA,QACf,MAAM,KAAK;AAAA,QACX,YAAY,KAAK,SAAS;AAAA,MAC5B,CAAC;AAAA,IAEH,SAAS,OAAgB;AACvB,aAAO,MAAM,4BAA4B,KAAK;AAC9C,YAAM;AAAA,IACR;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,sBAAsB,OAA6B;AAC/D,QAAI,CAAC,KAAK,WAAW;AACnB,YAAM,IAAI,MAAM,gCAAgC;AAAA,IAClD;AAEA,QAAI;AAEF,YAAM,OAAO,KAAK,UAAU,KAAK;AACjC,YAAM,aAAa,MAAM,UAAU,IAAI;AAGvC,YAAM,OAAO,IAAI,KAAK,MAAM,SAAS,SAAS;AAC9C,YAAM,MAAM,UAAU,KAAK,YAAY,CAAC,IAAI,OAAO,KAAK,SAAS,IAAI,CAAC,EAAE,SAAS,GAAG,GAAG,CAAC,IAAI,OAAO,KAAK,QAAQ,CAAC,EAAE,SAAS,GAAG,GAAG,CAAC,IAAI,MAAM,EAAE;AAG/I,YAAM,UAAU,IAAI,iBAAiB;AAAA,QACnC,QAAQ,KAAK,OAAO,eAAe;AAAA,QACnC,KAAK;AAAA,QACL,MAAM;AAAA,QACN,aAAa;AAAA,QACb,UAAU;AAAA,UACR,YAAY,MAAM;AAAA,UAClB,cAAc,MAAM;AAAA,UACpB,eAAe,MAAM,MAAM,SAAS;AAAA,UACpC,iBAAiB,KAAK,OAAO,SAAS;AAAA,UACtC,mBAAmB,WAAW,OAAO,SAAS;AAAA,QAChD;AAAA,MACF,CAAC;AAED,YAAM,KAAK,UAAU,KAAK,OAAO;AAGjC,UAAI,KAAK,aAAa;AACpB,cAAM,KAAK,YAAY,IAAI,SAAS,MAAM,EAAE,EAAE;AAAA,MAChD;AAEA,aAAO,KAAK,mCAAmC;AAAA,QAC7C,SAAS,MAAM;AAAA,QACf;AAAA,QACA,cAAc,KAAK;AAAA,QACnB,gBAAgB,WAAW;AAAA,QAC3B,mBAAmB,IAAI,WAAW,SAAS,KAAK,QAAQ,QAAQ,CAAC;AAAA,MACnE,CAAC;AAAA,IAEH,SAAS,OAAgB;AACvB,aAAO,MAAM,sCAAsC,KAAK;AACxD,YAAM;AAAA,IACR;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,WAAW,OAA6B;AACpD,QAAI,CAAC,KAAK,YAAY;AAEpB,aAAO,KAAK,sBAAsB,KAAK;AAAA,IACzC;AAEA,QAAI;AAEF,YAAM,UAAU,KAAK,mBAAmB,KAAK;AAC7C,YAAM,OAAO,KAAK,UAAU,OAAO;AACnC,YAAM,aAAa,MAAM,UAAU,IAAI;AAGvC,YAAM,OAAO,IAAI,KAAK,MAAM,SAAS,SAAS;AAC9C,YAAM,MAAM,WAAW,KAAK,YAAY,CAAC,IAAI,OAAO,KAAK,SAAS,IAAI,CAAC,EAAE,SAAS,GAAG,GAAG,CAAC,IAAI,MAAM,EAAE;AAGrG,YAAM,SAAS,KAAK,WAAW,OAAO,KAAK,OAAO,IAAI,UAAU;AAChE,YAAM,OAAO,OAAO,KAAK,GAAG;AAE5B,YAAM,KAAK,KAAK,YAAY;AAAA,QAC1B,UAAU;AAAA,UACR,aAAa;AAAA,UACb,UAAU;AAAA,YACR,SAAS,MAAM;AAAA,YACf,WAAW,MAAM;AAAA,YACjB,OAAO,MAAM,MAAM,SAAS;AAAA,YAC5B,eAAe,MAAM,MAAM,OAAO,SAAS;AAAA,UAC7C;AAAA,QACF;AAAA,QACA,cAAc;AAAA;AAAA,MAChB,CAAC;AAGD,UAAI,KAAK,WAAW;AAClB,YAAI;AACF,gBAAM,UAAU,KAAK,eAAe,KAAK;AACzC,gBAAM,KAAK,UAAU,KAAK,IAAI,oBAAoB;AAAA,YAChD,QAAQ,KAAK,OAAO,eAAe;AAAA,YACnC,KAAK;AAAA,UACP,CAAC,CAAC;AAAA,QACJ,SAAS,OAAgB;AAAA,QAEzB;AAAA,MACF;AAEA,aAAO,KAAK,yBAAyB;AAAA,QACnC,SAAS,MAAM;AAAA,QACf;AAAA,QACA,cAAc,KAAK,UAAU,KAAK,EAAE;AAAA,QACpC,gBAAgB,WAAW;AAAA,MAC7B,CAAC;AAAA,IAEH,SAAS,OAAgB;AACvB,aAAO,MAAM,0BAA0B,KAAK;AAC5C,YAAM;AAAA,IACR;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,mBAAmB,OAAmB;AAE5C,WAAO;AAAA,MACL,IAAI,MAAM;AAAA,MACV,MAAM,MAAM;AAAA,MACZ,OAAO,MAAM;AAAA,MACb,SAAS,MAAM;AAAA,MACf,UAAU;AAAA,QACR,WAAW,MAAM,SAAS;AAAA,QAC1B,SAAS,MAAM,SAAS;AAAA,QACxB,eAAe,MAAM,SAAS,cAAc;AAAA,QAC5C,aAAa,MAAM,SAAS,kBAAkB;AAAA,QAC9C,gBAAgB,MAAM,SAAS,kBAAkB;AAAA,QACjD,aAAa,MAAM,SAAS;AAAA,MAC9B;AAAA,MACA,aAAa;AAAA,QACX,OAAO,MAAM,MAAM;AAAA,QACnB,OAAO,CAAC,GAAG,IAAI,IAAI,MAAM,MAAM,IAAI,CAAC,MAAW,EAAE,IAAI,CAAC,CAAC;AAAA,QACvD,WAAW,MAAM,MAAM,CAAC,GAAG;AAAA,QAC3B,UAAU,MAAM,MAAM,MAAM,MAAM,SAAS,CAAC,GAAG;AAAA,MACjD;AAAA,MACA,YAAY,MAAM;AAAA,IACpB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,cAAc,SAAwC;AAE1D,UAAM,KAAK;AAGX,UAAM,WAAW,KAAK,QAAQ;AAAA,MAC5B;AAAA,IACF,EAAE,IAAI,OAAO;AAEb,QAAI,CAAC,UAAU;AACb,aAAO;AAAA,IACT;AAGA,SAAK,QAAQ;AAAA,MACX;AAAA,IACF,EAAE,IAAI,KAAK,IAAI,GAAG,OAAO;AAGzB,YAAQ,SAAS,MAAM;AAAA,MACrB,KAAK;AACH,eAAO,KAAK,kBAAkB,OAAO;AAAA,MACvC,KAAK;AACH,eAAO,KAAK,2BAA2B,SAAS,SAAS,QAAQ;AAAA,MACnE,KAAK;AACH,eAAO,KAAK,gBAAgB,SAAS,SAAS,QAAQ;AAAA,MACxD;AACE,eAAO;AAAA,IACX;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,kBAAkB,SAAwC;AACtE,QAAI,CAAC,KAAK,YAAa,QAAO;AAE9B,QAAI;AACF,YAAM,MAAM,SAAS,OAAO;AAC5B,YAAM,OAAO,MAAM,KAAK,YAAY,QAAQ,GAAG;AAE/C,UAAI,CAAC,QAAQ,CAAC,KAAK,KAAM,QAAO;AAEhC,UAAI;AACJ,UAAI,KAAK,eAAe,QAAQ;AAC9B,cAAM,aAAa,OAAO,KAAK,KAAK,MAAM,QAAQ;AAClD,cAAM,eAAe,MAAM,YAAY,UAAU;AACjD,oBAAY,aAAa,SAAS;AAAA,MACpC,OAAO;AACL,oBAAY,KAAK;AAAA,MACnB;AAEA,aAAO,KAAK,MAAM,SAAS;AAAA,IAE7B,SAAS,OAAgB;AACvB,aAAO,MAAM,iCAAiC,KAAK;AACnD,aAAO;AAAA,IACT;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,2BAA2B,SAAiB,KAAoC;AAC5F,QAAI,CAAC,KAAK,UAAW,QAAO;AAE5B,QAAI;AACF,YAAM,UAAU,IAAI,iBAAiB;AAAA,QACnC,QAAQ,KAAK,OAAO,eAAe;AAAA,QACnC,KAAK;AAAA,MACP,CAAC;AAED,YAAM,WAAW,MAAM,KAAK,UAAU,KAAK,OAAO;AAClD,YAAM,aAAa,MAAM,SAAS,MAAM,qBAAqB;AAE7D,UAAI,CAAC,WAAY,QAAO;AAExB,YAAM,eAAe,MAAM,YAAY,OAAO,KAAK,UAAU,CAAC;AAC9D,aAAO,KAAK,MAAM,aAAa,SAAS,CAAC;AAAA,IAE3C,SAAS,OAAgB;AACvB,aAAO,MAAM,2CAA2C,KAAK;AAC7D,aAAO;AAAA,IACT;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,gBAAgB,SAAiB,KAAoC;AACjF,QAAI,CAAC,KAAK,WAAY,QAAO;AAE7B,QAAI;AACF,YAAM,SAAS,KAAK,WAAW,OAAO,KAAK,OAAO,IAAI,UAAU;AAChE,YAAM,OAAO,OAAO,KAAK,GAAG;AAE5B,YAAM,CAAC,UAAU,IAAI,MAAM,KAAK,SAAS;AACzC,YAAM,eAAe,MAAM,YAAY,UAAU;AAGjD,aAAO,KAAK,MAAM,aAAa,SAAS,CAAC;AAAA,IAE3C,SAAS,OAAgB;AACvB,aAAO,MAAM,+BAA+B,KAAK;AACjD,aAAO;AAAA,IACT;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,aAAa,SAAiB,MAAmB,OAAoB;AAC3E,UAAM,eAAe,KAAK,UAAU,KAAK,EAAE;AAC3C,UAAM,iBAAiB,KAAK,MAAM,eAAe,GAAG;AAEpD,SAAK,QAAQ,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,KAMpB,EAAE;AAAA,MACD;AAAA,MACA;AAAA,MACA,KAAK,mBAAmB,OAAO,IAAI;AAAA,MACnC;AAAA,MACA;AAAA,MACA,IAAI,iBAAiB;AAAA,MACrB,KAAK,IAAI;AAAA,MACT,MAAM,SAAS;AAAA,MACf,KAAK,IAAI;AAAA,MACT,MAAM;AAAA,IACR;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,mBAAmB,OAAc,MAA2B;AAClE,UAAM,OAAO,IAAI,KAAK,MAAM,SAAS,SAAS;AAC9C,UAAM,OAAO,KAAK,YAAY;AAC9B,UAAM,QAAQ,OAAO,KAAK,SAAS,IAAI,CAAC,EAAE,SAAS,GAAG,GAAG;AACzD,UAAM,MAAM,OAAO,KAAK,QAAQ,CAAC,EAAE,SAAS,GAAG,GAAG;AAElD,YAAQ,MAAM;AAAA,MACZ,KAAK;AACH,eAAO,eAAe,MAAM,EAAE;AAAA,MAChC,KAAK;AACH,eAAO,UAAU,IAAI,IAAI,KAAK,IAAI,GAAG,IAAI,MAAM,EAAE;AAAA,MACnD,KAAK;AACH,eAAO,WAAW,IAAI,IAAI,KAAK,IAAI,MAAM,EAAE;AAAA,IAC/C;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,eAAe,OAAsB;AAC3C,UAAM,OAAO,IAAI,KAAK,MAAM,SAAS,SAAS;AAC9C,WAAO,UAAU,KAAK,YAAY,CAAC,IAAI,OAAO,KAAK,SAAS,IAAI,CAAC,EAAE,SAAS,GAAG,GAAG,CAAC,IAAI,OAAO,KAAK,QAAQ,CAAC,EAAE,SAAS,GAAG,GAAG,CAAC,IAAI,MAAM,EAAE;AAAA,EAC5I;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,eAIH;AACD,UAAM,UAAU;AAAA,MACd,WAAW;AAAA,MACX,YAAY;AAAA,MACZ,QAAQ,CAAC;AAAA,IACX;AAEA,UAAM,MAAM,KAAK,IAAI;AAGrB,UAAM,aAAa,KAAK,QAAQ,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA,KAKvC,EAAE,IAAI;AAEP,eAAW,aAAa,YAAY;AAClC,YAAM,YAAY,MAAM,UAAU,eAAe,MAAO,KAAK;AAE7D,UAAI;AACF,YAAI,UAAU,SAAS,mBAAmB,WAAW,KAAK,OAAO,MAAM,UAAU;AAE/E,gBAAM,QAAQ,MAAM,KAAK,cAAc,UAAU,QAAQ;AACzD,cAAI,OAAO;AACT,kBAAM,KAAK,sBAAsB,KAAK;AACtC,iBAAK,aAAa,UAAU,UAAU,mBAAkB,KAAK;AAC7D,oBAAQ;AAAA,UACV;AAAA,QACF,WAAW,UAAU,SAAS,qBAAoB,WAAW,KAAK,OAAO,MAAM,WAAW,IAAI;AAE5F,gBAAM,QAAQ,MAAM,KAAK,cAAc,UAAU,QAAQ;AACzD,cAAI,OAAO;AACT,kBAAM,KAAK,WAAW,KAAK;AAC3B,iBAAK,aAAa,UAAU,UAAU,mBAAkB,KAAK;AAC7D,oBAAQ;AAAA,UACV;AAAA,QACF;AAAA,MACF,SAAS,OAAgB;AACvB,gBAAQ,OAAO,KAAK,qBAAqB,UAAU,QAAQ,KAAK,KAAK,EAAE;AAAA,MACzE;AAAA,IACF;AAEA,WAAO,KAAK,4BAA4B,OAAO;AAC/C,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,kBAAuB;AACrB,UAAM,YAAY,KAAK,QAAQ,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,KAUtC,EAAE,IAAI;AAEP,UAAM,kBAAkB,KAAK,QAAQ,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,KAW5C,EAAE,IAAI,KAAK,IAAI,GAAG,KAAK,IAAI,GAAG,KAAK,IAAI,CAAC;AAEzC,WAAO;AAAA,MACL,QAAQ;AAAA,MACR,OAAO;AAAA,MACP,aAAa,UAAU,OAAO,CAAC,KAAa,MAAW,MAAM,EAAE,OAAO,CAAC;AAAA,MACvE,WAAW,UAAU,OAAO,CAAC,KAAa,MAAW,MAAM,EAAE,gBAAgB,CAAC;AAAA,MAC9E,gBAAgB,UAAU,OAAO,CAAC,KAAa,MAAW,MAAM,EAAE,kBAAkB,CAAC;AAAA,IACvF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,UAA2B;AAC/B,QAAI,UAAU;AAGd,UAAM,SAAS,KAAK,IAAI,IAAK,KAAK,KAAK,KAAK,KAAK;AAEjD,UAAM,SAAS,KAAK,QAAQ,QAAQ;AAAA;AAAA;AAAA,KAGnC,EAAE,IAAI,MAAM;AAEb,cAAU,OAAO;AAEjB,WAAO,KAAK,qBAAqB,EAAE,SAAS,QAAQ,CAAC;AACrD,WAAO;AAAA,EACT;AACF;",
6
6
  "names": ["StorageTier"]
7
7
  }
@@ -1,6 +1,17 @@
1
1
  import { S3Client, PutObjectCommand, GetObjectCommand } from "@aws-sdk/client-s3";
2
2
  import { Storage } from "@google-cloud/storage";
3
3
  import { logger } from "../monitoring/logger.js";
4
+ function getEnv(key, defaultValue) {
5
+ const value = process.env[key];
6
+ if (value === void 0) {
7
+ if (defaultValue !== void 0) return defaultValue;
8
+ throw new Error(`Environment variable ${key} is required`);
9
+ }
10
+ return value;
11
+ }
12
+ function getOptionalEnv(key) {
13
+ return process.env[key];
14
+ }
4
15
  var StorageTier = /* @__PURE__ */ ((StorageTier2) => {
5
16
  StorageTier2["HOT"] = "hot";
6
17
  StorageTier2["NEARLINE"] = "nearline";
@@ -13,7 +24,7 @@ const DEFAULT_REMOTE_CONFIG = {
13
24
  // Default to GCS for better pricing
14
25
  gcs: {
15
26
  bucketName: "stackmemory-traces",
16
- projectId: process.env.GCP_PROJECT_ID || "stackmemory"
27
+ projectId: process.env["GCP_PROJECT_ID"] || "stackmemory"
17
28
  },
18
29
  timeseries: {
19
30
  type: "sqlite",
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../../src/core/storage/remote-storage.ts"],
4
- "sourcesContent": ["/**\n * Remote Storage Interface for Two-Tier Storage System\n * Implements infinite retention with TimeSeries DB + S3\n */\n\nimport { S3Client, PutObjectCommand, GetObjectCommand, DeleteObjectCommand } from '@aws-sdk/client-s3';\nimport { Storage } from '@google-cloud/storage';\nimport { logger } from '../monitoring/logger.js';\nimport { Trace, CompressedTrace } from '../trace/types.js';\nimport Database from 'better-sqlite3';\n\nexport enum StorageTier {\n HOT = 'hot', // < 7 days - Railway Buckets or GCS Standard\n NEARLINE = 'nearline', // 7-30 days - GCS Nearline ($0.01/GB)\n COLDLINE = 'coldline', // 30-90 days - GCS Coldline ($0.004/GB)\n ARCHIVE = 'archive' // > 90 days - GCS Archive ($0.0012/GB)\n}\n\nexport interface RemoteStorageConfig {\n provider: 'gcs' | 's3' | 'railway';\n gcs?: {\n bucketName: string;\n projectId: string;\n keyFilename?: string; // Path to service account key\n };\n s3?: {\n bucket: string;\n region: string;\n accessKeyId?: string;\n secretAccessKey?: string;\n endpoint?: string; // For Railway buckets or MinIO\n };\n timeseries: {\n type: 'clickhouse' | 'timescale' | 'influxdb' | 'sqlite'; // SQLite for dev\n host: string;\n port: number;\n database: string;\n username?: string;\n password?: string;\n };\n migration: {\n batchSize: number;\n hotAgeHours: number; // < 7 days\n nearlineAgeHours: number; // 7-30 days \n coldlineAgeHours: number; // 30-90 days\n archiveAgeHours: number; // > 90 days\n scoreThreshold: number; // Score threshold for early migration\n };\n}\n\nexport const DEFAULT_REMOTE_CONFIG: RemoteStorageConfig = {\n provider: 'gcs', // Default to GCS for better pricing\n gcs: {\n bucketName: 'stackmemory-traces',\n projectId: process.env.GCP_PROJECT_ID || 'stackmemory',\n },\n timeseries: {\n type: 'sqlite', // Use SQLite for development\n host: 'localhost',\n port: 0,\n database: 'stackmemory_timeseries',\n },\n migration: {\n batchSize: 100,\n hotAgeHours: 168, // 7 days\n nearlineAgeHours: 720, // 30 days\n coldlineAgeHours: 2160, // 90 days\n archiveAgeHours: 8760, // 365 days\n scoreThreshold: 0.4,\n }\n};\n\nexport interface MigrationCandidate {\n traceId: string;\n age: number;\n score: number;\n size: number;\n tier: StorageTier;\n shouldMigrate: boolean;\n compressionLevel: 'none' | 'light' | 'medium' | 'heavy';\n}\n\n/**\n * Remote storage manager for infinite trace retention\n */\nexport class RemoteStorageManager {\n private storageClient?: S3Client | Storage;\n private config: RemoteStorageConfig;\n private localDb: Database.Database;\n private migrationInProgress = false;\n \n constructor(\n localDb: Database.Database,\n config?: Partial<RemoteStorageConfig>\n ) {\n this.localDb = localDb;\n this.config = { ...DEFAULT_REMOTE_CONFIG, ...config };\n \n this.initializeStorageClient();\n this.initializeSchema();\n }\n \n /**\n * Initialize storage client based on provider\n */\n private initializeStorageClient(): void {\n switch (this.config.provider) {\n case 'gcs':\n if (this.config.gcs) {\n this.storageClient = new Storage({\n projectId: this.config.gcs.projectId,\n keyFilename: this.config.gcs.keyFilename,\n });\n }\n break;\n \n case 's3':\n case 'railway':\n if (this.config.s3?.accessKeyId && this.config.s3?.secretAccessKey) {\n this.storageClient = new S3Client({\n region: this.config.s3.region,\n credentials: {\n accessKeyId: this.config.s3.accessKeyId,\n secretAccessKey: this.config.s3.secretAccessKey,\n },\n endpoint: this.config.s3.endpoint, // Railway buckets endpoint\n });\n }\n break;\n }\n }\n \n /**\n * Initialize migration tracking schema\n */\n private initializeSchema(): void {\n this.localDb.exec(`\n CREATE TABLE IF NOT EXISTS remote_migrations (\n trace_id TEXT PRIMARY KEY,\n migrated_at INTEGER NOT NULL,\n storage_tier TEXT NOT NULL,\n s3_key TEXT,\n timeseries_id TEXT,\n compression_level TEXT,\n original_size INTEGER,\n compressed_size INTEGER,\n retrieval_count INTEGER DEFAULT 0,\n last_retrieved INTEGER,\n FOREIGN KEY (trace_id) REFERENCES traces(id) ON DELETE CASCADE\n )\n `);\n \n this.localDb.exec(`\n CREATE INDEX IF NOT EXISTS idx_migrations_tier ON remote_migrations(storage_tier);\n CREATE INDEX IF NOT EXISTS idx_migrations_migrated ON remote_migrations(migrated_at);\n `);\n }\n \n /**\n * Identify traces for migration based on age and importance\n */\n async identifyMigrationCandidates(): Promise<MigrationCandidate[]> {\n const now = Date.now();\n \n // Query all traces with their metadata\n const traces = this.localDb.prepare(`\n SELECT \n t.id,\n t.score,\n t.start_time,\n LENGTH(t.compressed_data) + \n COALESCE((SELECT SUM(LENGTH(tc.arguments) + LENGTH(tc.result)) \n FROM tool_calls tc WHERE tc.trace_id = t.id), 0) as size,\n rm.trace_id as already_migrated\n FROM traces t\n LEFT JOIN remote_migrations rm ON t.id = rm.trace_id\n WHERE rm.trace_id IS NULL -- Not already migrated\n ORDER BY t.start_time ASC\n `).all() as any[];\n \n const candidates: MigrationCandidate[] = [];\n \n for (const trace of traces) {\n const ageHours = (now - trace.start_time) / (1000 * 60 * 60);\n const candidate = this.evaluateTrace(\n trace.id,\n ageHours,\n trace.score,\n trace.size || 0\n );\n \n candidates.push(candidate);\n }\n \n return candidates;\n }\n \n /**\n * Evaluate a trace for migration based on GCS storage classes\n */\n private evaluateTrace(\n traceId: string,\n ageHours: number,\n score: number,\n size: number\n ): MigrationCandidate {\n let tier = StorageTier.HOT;\n let shouldMigrate = false;\n let compressionLevel: 'none' | 'light' | 'medium' | 'heavy' = 'none';\n \n // Determine storage tier based on age and GCS storage classes\n if (ageHours > this.config.migration.archiveAgeHours) {\n // GCS Archive: $0.0012/GB - accessed < once per year\n tier = StorageTier.ARCHIVE;\n shouldMigrate = true;\n compressionLevel = 'heavy';\n } else if (ageHours > this.config.migration.coldlineAgeHours) {\n // GCS Coldline: $0.004/GB - accessed < once per quarter \n tier = StorageTier.COLDLINE;\n shouldMigrate = true;\n compressionLevel = 'heavy';\n } else if (ageHours > this.config.migration.nearlineAgeHours) {\n // GCS Nearline: $0.01/GB - accessed < once per month\n tier = StorageTier.NEARLINE;\n shouldMigrate = true;\n compressionLevel = 'medium';\n } else if (ageHours > this.config.migration.hotAgeHours) {\n // Still hot but consider migration if low importance\n tier = StorageTier.HOT;\n if (score < this.config.migration.scoreThreshold) {\n shouldMigrate = true;\n compressionLevel = 'light';\n }\n }\n \n // Force migration for size pressure\n const localSizeLimit = 2 * 1024 * 1024 * 1024; // 2GB\n const currentLocalSize = this.getLocalStorageSize();\n \n if (currentLocalSize > localSizeLimit * 0.75) {\n // Start migrating when 75% full\n shouldMigrate = true;\n if (compressionLevel === 'none') {\n compressionLevel = 'light';\n }\n }\n \n return {\n traceId,\n age: ageHours,\n score,\n size,\n tier,\n shouldMigrate,\n compressionLevel,\n };\n }\n \n /**\n * Migrate traces to remote storage\n */\n async migrateTraces(\n candidates: MigrationCandidate[],\n dryRun: boolean = false\n ): Promise<{\n migrated: number;\n failed: number;\n totalSize: number;\n errors: string[];\n }> {\n if (this.migrationInProgress) {\n return {\n migrated: 0,\n failed: 0,\n totalSize: 0,\n errors: ['Migration already in progress'],\n };\n }\n \n this.migrationInProgress = true;\n const results = {\n migrated: 0,\n failed: 0,\n totalSize: 0,\n errors: [] as string[],\n };\n \n try {\n // Process in batches\n const toMigrate = candidates.filter(c => c.shouldMigrate);\n const batches = this.createBatches(toMigrate, this.config.migration.batchSize);\n \n for (const batch of batches) {\n if (dryRun) {\n logger.info('Dry run - would migrate batch', {\n count: batch.length,\n totalSize: batch.reduce((sum, c) => sum + c.size, 0),\n });\n results.migrated += batch.length;\n continue;\n }\n \n const batchResults = await this.migrateBatch(batch);\n results.migrated += batchResults.success;\n results.failed += batchResults.failed;\n results.totalSize += batchResults.totalSize;\n results.errors.push(...batchResults.errors);\n \n // Small delay between batches\n await new Promise(resolve => setTimeout(resolve, 100));\n }\n \n } finally {\n this.migrationInProgress = false;\n }\n \n logger.info('Migration completed', results);\n return results;\n }\n \n /**\n * Migrate a batch of traces\n */\n private async migrateBatch(\n batch: MigrationCandidate[]\n ): Promise<{\n success: number;\n failed: number;\n totalSize: number;\n errors: string[];\n }> {\n const results = {\n success: 0,\n failed: 0,\n totalSize: 0,\n errors: [] as string[],\n };\n \n for (const candidate of batch) {\n try {\n // Get full trace data\n const trace = this.getTraceData(candidate.traceId);\n if (!trace) {\n throw new Error(`Trace ${candidate.traceId} not found`);\n }\n \n // Compress based on level\n const compressed = this.compressTrace(trace, candidate.compressionLevel);\n \n // Upload to S3\n if (this.s3Client) {\n const s3Key = this.generateS3Key(candidate);\n await this.uploadToS3(s3Key, compressed);\n \n // Record migration\n this.recordMigration(candidate, s3Key, trace, compressed);\n } else {\n // Local simulation for testing\n this.recordMigration(candidate, 'simulated', trace, compressed);\n }\n \n // Optionally remove from local after successful migration\n if (candidate.tier === StorageTier.COLD || \n candidate.tier === StorageTier.ARCHIVE) {\n this.removeLocalTrace(candidate.traceId);\n }\n \n results.success++;\n results.totalSize += candidate.size;\n \n } catch (error) {\n results.failed++;\n results.errors.push(\n `Failed to migrate ${candidate.traceId}: ${error}`\n );\n logger.error('Migration failed for trace', { \n traceId: candidate.traceId, \n error \n });\n }\n }\n \n return results;\n }\n \n /**\n * Get full trace data for migration\n */\n private getTraceData(traceId: string): any {\n const traceRow = this.localDb.prepare(\n 'SELECT * FROM traces WHERE id = ?'\n ).get(traceId);\n \n if (!traceRow) return null;\n \n const toolCalls = this.localDb.prepare(\n 'SELECT * FROM tool_calls WHERE trace_id = ? ORDER BY sequence_number'\n ).all(traceId);\n \n return {\n trace: traceRow,\n toolCalls,\n };\n }\n \n /**\n * Compress trace based on compression level\n */\n private compressTrace(\n data: any,\n level: 'none' | 'light' | 'medium' | 'heavy'\n ): Buffer {\n let jsonData = JSON.stringify(data);\n \n // Apply different compression based on level\n switch (level) {\n case 'none':\n return Buffer.from(jsonData);\n \n case 'light':\n // Remove formatting, keep all data\n return Buffer.from(JSON.stringify(JSON.parse(jsonData)));\n \n case 'medium':\n // Remove null fields and compress\n const cleaned = JSON.parse(jsonData, (key, value) => \n value === null || value === undefined ? undefined : value\n );\n return Buffer.from(JSON.stringify(cleaned));\n \n case 'heavy':\n // Remove tool results and arguments, keep only essential\n const minimal = {\n id: data.trace.id,\n type: data.trace.type,\n score: data.trace.score,\n summary: data.trace.summary,\n timestamps: {\n start: data.trace.start_time,\n end: data.trace.end_time,\n },\n toolCount: data.toolCalls.length,\n toolTypes: [...new Set(data.toolCalls.map((t: any) => t.tool))],\n };\n return Buffer.from(JSON.stringify(minimal));\n \n default:\n return Buffer.from(jsonData);\n }\n }\n \n /**\n * Generate S3 key for trace\n */\n private generateS3Key(candidate: MigrationCandidate): string {\n const date = new Date(Date.now() - candidate.age * 60 * 60 * 1000);\n const year = date.getFullYear();\n const month = String(date.getMonth() + 1).padStart(2, '0');\n const day = String(date.getDate()).padStart(2, '0');\n \n return `traces/${year}/${month}/${day}/${candidate.tier}/${candidate.traceId}.json`;\n }\n \n /**\n * Upload to S3\n */\n private async uploadToS3(key: string, data: Buffer): Promise<void> {\n if (!this.s3Client) {\n throw new Error('S3 client not configured');\n }\n \n const command = new PutObjectCommand({\n Bucket: this.config.s3.bucket,\n Key: key,\n Body: data,\n ContentType: 'application/json',\n Metadata: {\n 'trace-version': '1.0',\n 'compression': 'true',\n },\n });\n \n await this.s3Client.send(command);\n }\n \n /**\n * Record migration in local database\n */\n private recordMigration(\n candidate: MigrationCandidate,\n s3Key: string,\n originalData: any,\n compressedData: Buffer\n ): void {\n const stmt = this.localDb.prepare(`\n INSERT INTO remote_migrations (\n trace_id, migrated_at, storage_tier, s3_key,\n compression_level, original_size, compressed_size\n ) VALUES (?, ?, ?, ?, ?, ?, ?)\n `);\n \n stmt.run(\n candidate.traceId,\n Date.now(),\n candidate.tier,\n s3Key,\n candidate.compressionLevel,\n JSON.stringify(originalData).length,\n compressedData.length\n );\n }\n \n /**\n * Remove local trace after migration\n */\n private removeLocalTrace(traceId: string): void {\n this.localDb.prepare('DELETE FROM tool_calls WHERE trace_id = ?').run(traceId);\n this.localDb.prepare('DELETE FROM traces WHERE id = ?').run(traceId);\n }\n \n /**\n * Get current local storage size\n */\n private getLocalStorageSize(): number {\n const result = this.localDb.prepare(`\n SELECT \n SUM(LENGTH(compressed_data)) +\n COALESCE((SELECT SUM(LENGTH(arguments) + LENGTH(result)) \n FROM tool_calls), 0) as total_size\n FROM traces\n `).get() as any;\n \n return result?.total_size || 0;\n }\n \n /**\n * Create batches from candidates\n */\n private createBatches<T>(items: T[], batchSize: number): T[][] {\n const batches: T[][] = [];\n for (let i = 0; i < items.length; i += batchSize) {\n batches.push(items.slice(i, i + batchSize));\n }\n return batches;\n }\n \n /**\n * Retrieve trace from remote storage\n */\n async retrieveTrace(traceId: string): Promise<any> {\n const migration = this.localDb.prepare(`\n SELECT * FROM remote_migrations WHERE trace_id = ?\n `).get(traceId) as any;\n \n if (!migration) {\n throw new Error(`Trace ${traceId} not found in remote storage`);\n }\n \n // Update retrieval count\n this.localDb.prepare(`\n UPDATE remote_migrations \n SET retrieval_count = retrieval_count + 1, last_retrieved = ?\n WHERE trace_id = ?\n `).run(Date.now(), traceId);\n \n if (!this.s3Client) {\n throw new Error('S3 client not configured');\n }\n \n // Retrieve from S3\n const command = new GetObjectCommand({\n Bucket: this.config.s3.bucket,\n Key: migration.s3_key,\n });\n \n const response = await this.s3Client.send(command);\n const data = await response.Body?.transformToString();\n \n if (!data) {\n throw new Error('No data retrieved from S3');\n }\n \n return JSON.parse(data);\n }\n \n /**\n * Get migration statistics\n */\n getMigrationStats(): any {\n const stats = this.localDb.prepare(`\n SELECT \n storage_tier,\n COUNT(*) as count,\n SUM(original_size) as original_size,\n SUM(compressed_size) as compressed_size,\n AVG(retrieval_count) as avg_retrievals\n FROM remote_migrations\n GROUP BY storage_tier\n `).all();\n \n const total = this.localDb.prepare(`\n SELECT \n COUNT(*) as total_migrated,\n SUM(original_size) as total_original,\n SUM(compressed_size) as total_compressed\n FROM remote_migrations\n `).get();\n \n return {\n byTier: stats,\n total,\n compressionRatio: total \n ? (1 - (total as any).total_compressed / (total as any).total_original).toFixed(2)\n : 0,\n localSize: this.getLocalStorageSize(),\n };\n }\n}"],
5
- "mappings": "AAKA,SAAS,UAAU,kBAAkB,wBAA6C;AAClF,SAAS,eAAe;AACxB,SAAS,cAAc;AAIhB,IAAK,cAAL,kBAAKA,iBAAL;AACL,EAAAA,aAAA,SAAM;AACN,EAAAA,aAAA,cAAW;AACX,EAAAA,aAAA,cAAW;AACX,EAAAA,aAAA,aAAU;AAJA,SAAAA;AAAA,GAAA;AAuCL,MAAM,wBAA6C;AAAA,EACxD,UAAU;AAAA;AAAA,EACV,KAAK;AAAA,IACH,YAAY;AAAA,IACZ,WAAW,QAAQ,IAAI,kBAAkB;AAAA,EAC3C;AAAA,EACA,YAAY;AAAA,IACV,MAAM;AAAA;AAAA,IACN,MAAM;AAAA,IACN,MAAM;AAAA,IACN,UAAU;AAAA,EACZ;AAAA,EACA,WAAW;AAAA,IACT,WAAW;AAAA,IACX,aAAa;AAAA;AAAA,IACb,kBAAkB;AAAA;AAAA,IAClB,kBAAkB;AAAA;AAAA,IAClB,iBAAiB;AAAA;AAAA,IACjB,gBAAgB;AAAA,EAClB;AACF;AAeO,MAAM,qBAAqB;AAAA,EACxB;AAAA,EACA;AAAA,EACA;AAAA,EACA,sBAAsB;AAAA,EAE9B,YACE,SACA,QACA;AACA,SAAK,UAAU;AACf,SAAK,SAAS,EAAE,GAAG,uBAAuB,GAAG,OAAO;AAEpD,SAAK,wBAAwB;AAC7B,SAAK,iBAAiB;AAAA,EACxB;AAAA;AAAA;AAAA;AAAA,EAKQ,0BAAgC;AACtC,YAAQ,KAAK,OAAO,UAAU;AAAA,MAC5B,KAAK;AACH,YAAI,KAAK,OAAO,KAAK;AACnB,eAAK,gBAAgB,IAAI,QAAQ;AAAA,YAC/B,WAAW,KAAK,OAAO,IAAI;AAAA,YAC3B,aAAa,KAAK,OAAO,IAAI;AAAA,UAC/B,CAAC;AAAA,QACH;AACA;AAAA,MAEF,KAAK;AAAA,MACL,KAAK;AACH,YAAI,KAAK,OAAO,IAAI,eAAe,KAAK,OAAO,IAAI,iBAAiB;AAClE,eAAK,gBAAgB,IAAI,SAAS;AAAA,YAChC,QAAQ,KAAK,OAAO,GAAG;AAAA,YACvB,aAAa;AAAA,cACX,aAAa,KAAK,OAAO,GAAG;AAAA,cAC5B,iBAAiB,KAAK,OAAO,GAAG;AAAA,YAClC;AAAA,YACA,UAAU,KAAK,OAAO,GAAG;AAAA;AAAA,UAC3B,CAAC;AAAA,QACH;AACA;AAAA,IACJ;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,mBAAyB;AAC/B,SAAK,QAAQ,KAAK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,KAcjB;AAED,SAAK,QAAQ,KAAK;AAAA;AAAA;AAAA,KAGjB;AAAA,EACH;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,8BAA6D;AACjE,UAAM,MAAM,KAAK,IAAI;AAGrB,UAAM,SAAS,KAAK,QAAQ,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,KAanC,EAAE,IAAI;AAEP,UAAM,aAAmC,CAAC;AAE1C,eAAW,SAAS,QAAQ;AAC1B,YAAM,YAAY,MAAM,MAAM,eAAe,MAAO,KAAK;AACzD,YAAM,YAAY,KAAK;AAAA,QACrB,MAAM;AAAA,QACN;AAAA,QACA,MAAM;AAAA,QACN,MAAM,QAAQ;AAAA,MAChB;AAEA,iBAAW,KAAK,SAAS;AAAA,IAC3B;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,cACN,SACA,UACA,OACA,MACoB;AACpB,QAAI,OAAO;AACX,QAAI,gBAAgB;AACpB,QAAI,mBAA0D;AAG9D,QAAI,WAAW,KAAK,OAAO,UAAU,iBAAiB;AAEpD,aAAO;AACP,sBAAgB;AAChB,yBAAmB;AAAA,IACrB,WAAW,WAAW,KAAK,OAAO,UAAU,kBAAkB;AAE5D,aAAO;AACP,sBAAgB;AAChB,yBAAmB;AAAA,IACrB,WAAW,WAAW,KAAK,OAAO,UAAU,kBAAkB;AAE5D,aAAO;AACP,sBAAgB;AAChB,yBAAmB;AAAA,IACrB,WAAW,WAAW,KAAK,OAAO,UAAU,aAAa;AAEvD,aAAO;AACP,UAAI,QAAQ,KAAK,OAAO,UAAU,gBAAgB;AAChD,wBAAgB;AAChB,2BAAmB;AAAA,MACrB;AAAA,IACF;AAGA,UAAM,iBAAiB,IAAI,OAAO,OAAO;AACzC,UAAM,mBAAmB,KAAK,oBAAoB;AAElD,QAAI,mBAAmB,iBAAiB,MAAM;AAE5C,sBAAgB;AAChB,UAAI,qBAAqB,QAAQ;AAC/B,2BAAmB;AAAA,MACrB;AAAA,IACF;AAEA,WAAO;AAAA,MACL;AAAA,MACA,KAAK;AAAA,MACL;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,cACJ,YACA,SAAkB,OAMjB;AACD,QAAI,KAAK,qBAAqB;AAC5B,aAAO;AAAA,QACL,UAAU;AAAA,QACV,QAAQ;AAAA,QACR,WAAW;AAAA,QACX,QAAQ,CAAC,+BAA+B;AAAA,MAC1C;AAAA,IACF;AAEA,SAAK,sBAAsB;AAC3B,UAAM,UAAU;AAAA,MACd,UAAU;AAAA,MACV,QAAQ;AAAA,MACR,WAAW;AAAA,MACX,QAAQ,CAAC;AAAA,IACX;AAEA,QAAI;AAEF,YAAM,YAAY,WAAW,OAAO,OAAK,EAAE,aAAa;AACxD,YAAM,UAAU,KAAK,cAAc,WAAW,KAAK,OAAO,UAAU,SAAS;AAE7E,iBAAW,SAAS,SAAS;AAC3B,YAAI,QAAQ;AACV,iBAAO,KAAK,iCAAiC;AAAA,YAC3C,OAAO,MAAM;AAAA,YACb,WAAW,MAAM,OAAO,CAAC,KAAK,MAAM,MAAM,EAAE,MAAM,CAAC;AAAA,UACrD,CAAC;AACD,kBAAQ,YAAY,MAAM;AAC1B;AAAA,QACF;AAEA,cAAM,eAAe,MAAM,KAAK,aAAa,KAAK;AAClD,gBAAQ,YAAY,aAAa;AACjC,gBAAQ,UAAU,aAAa;AAC/B,gBAAQ,aAAa,aAAa;AAClC,gBAAQ,OAAO,KAAK,GAAG,aAAa,MAAM;AAG1C,cAAM,IAAI,QAAQ,aAAW,WAAW,SAAS,GAAG,CAAC;AAAA,MACvD;AAAA,IAEF,UAAE;AACA,WAAK,sBAAsB;AAAA,IAC7B;AAEA,WAAO,KAAK,uBAAuB,OAAO;AAC1C,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,aACZ,OAMC;AACD,UAAM,UAAU;AAAA,MACd,SAAS;AAAA,MACT,QAAQ;AAAA,MACR,WAAW;AAAA,MACX,QAAQ,CAAC;AAAA,IACX;AAEA,eAAW,aAAa,OAAO;AAC7B,UAAI;AAEF,cAAM,QAAQ,KAAK,aAAa,UAAU,OAAO;AACjD,YAAI,CAAC,OAAO;AACV,gBAAM,IAAI,MAAM,SAAS,UAAU,OAAO,YAAY;AAAA,QACxD;AAGA,cAAM,aAAa,KAAK,cAAc,OAAO,UAAU,gBAAgB;AAGvE,YAAI,KAAK,UAAU;AACjB,gBAAM,QAAQ,KAAK,cAAc,SAAS;AAC1C,gBAAM,KAAK,WAAW,OAAO,UAAU;AAGvC,eAAK,gBAAgB,WAAW,OAAO,OAAO,UAAU;AAAA,QAC1D,OAAO;AAEL,eAAK,gBAAgB,WAAW,aAAa,OAAO,UAAU;AAAA,QAChE;AAGA,YAAI,UAAU,SAAS,YAAY,QAC/B,UAAU,SAAS,yBAAqB;AAC1C,eAAK,iBAAiB,UAAU,OAAO;AAAA,QACzC;AAEA,gBAAQ;AACR,gBAAQ,aAAa,UAAU;AAAA,MAEjC,SAAS,OAAO;AACd,gBAAQ;AACR,gBAAQ,OAAO;AAAA,UACb,qBAAqB,UAAU,OAAO,KAAK,KAAK;AAAA,QAClD;AACA,eAAO,MAAM,8BAA8B;AAAA,UACzC,SAAS,UAAU;AAAA,UACnB;AAAA,QACF,CAAC;AAAA,MACH;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,aAAa,SAAsB;AACzC,UAAM,WAAW,KAAK,QAAQ;AAAA,MAC5B;AAAA,IACF,EAAE,IAAI,OAAO;AAEb,QAAI,CAAC,SAAU,QAAO;AAEtB,UAAM,YAAY,KAAK,QAAQ;AAAA,MAC7B;AAAA,IACF,EAAE,IAAI,OAAO;AAEb,WAAO;AAAA,MACL,OAAO;AAAA,MACP;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,cACN,MACA,OACQ;AACR,QAAI,WAAW,KAAK,UAAU,IAAI;AAGlC,YAAQ,OAAO;AAAA,MACb,KAAK;AACH,eAAO,OAAO,KAAK,QAAQ;AAAA,MAE7B,KAAK;AAEH,eAAO,OAAO,KAAK,KAAK,UAAU,KAAK,MAAM,QAAQ,CAAC,CAAC;AAAA,MAEzD,KAAK;AAEH,cAAM,UAAU,KAAK;AAAA,UAAM;AAAA,UAAU,CAAC,KAAK,UACzC,UAAU,QAAQ,UAAU,SAAY,SAAY;AAAA,QACtD;AACA,eAAO,OAAO,KAAK,KAAK,UAAU,OAAO,CAAC;AAAA,MAE5C,KAAK;AAEH,cAAM,UAAU;AAAA,UACd,IAAI,KAAK,MAAM;AAAA,UACf,MAAM,KAAK,MAAM;AAAA,UACjB,OAAO,KAAK,MAAM;AAAA,UAClB,SAAS,KAAK,MAAM;AAAA,UACpB,YAAY;AAAA,YACV,OAAO,KAAK,MAAM;AAAA,YAClB,KAAK,KAAK,MAAM;AAAA,UAClB;AAAA,UACA,WAAW,KAAK,UAAU;AAAA,UAC1B,WAAW,CAAC,GAAG,IAAI,IAAI,KAAK,UAAU,IAAI,CAAC,MAAW,EAAE,IAAI,CAAC,CAAC;AAAA,QAChE;AACA,eAAO,OAAO,KAAK,KAAK,UAAU,OAAO,CAAC;AAAA,MAE5C;AACE,eAAO,OAAO,KAAK,QAAQ;AAAA,IAC/B;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,cAAc,WAAuC;AAC3D,UAAM,OAAO,IAAI,KAAK,KAAK,IAAI,IAAI,UAAU,MAAM,KAAK,KAAK,GAAI;AACjE,UAAM,OAAO,KAAK,YAAY;AAC9B,UAAM,QAAQ,OAAO,KAAK,SAAS,IAAI,CAAC,EAAE,SAAS,GAAG,GAAG;AACzD,UAAM,MAAM,OAAO,KAAK,QAAQ,CAAC,EAAE,SAAS,GAAG,GAAG;AAElD,WAAO,UAAU,IAAI,IAAI,KAAK,IAAI,GAAG,IAAI,UAAU,IAAI,IAAI,UAAU,OAAO;AAAA,EAC9E;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,WAAW,KAAa,MAA6B;AACjE,QAAI,CAAC,KAAK,UAAU;AAClB,YAAM,IAAI,MAAM,0BAA0B;AAAA,IAC5C;AAEA,UAAM,UAAU,IAAI,iBAAiB;AAAA,MACnC,QAAQ,KAAK,OAAO,GAAG;AAAA,MACvB,KAAK;AAAA,MACL,MAAM;AAAA,MACN,aAAa;AAAA,MACb,UAAU;AAAA,QACR,iBAAiB;AAAA,QACjB,eAAe;AAAA,MACjB;AAAA,IACF,CAAC;AAED,UAAM,KAAK,SAAS,KAAK,OAAO;AAAA,EAClC;AAAA;AAAA;AAAA;AAAA,EAKQ,gBACN,WACA,OACA,cACA,gBACM;AACN,UAAM,OAAO,KAAK,QAAQ,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA,KAKjC;AAED,SAAK;AAAA,MACH,UAAU;AAAA,MACV,KAAK,IAAI;AAAA,MACT,UAAU;AAAA,MACV;AAAA,MACA,UAAU;AAAA,MACV,KAAK,UAAU,YAAY,EAAE;AAAA,MAC7B,eAAe;AAAA,IACjB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,iBAAiB,SAAuB;AAC9C,SAAK,QAAQ,QAAQ,2CAA2C,EAAE,IAAI,OAAO;AAC7E,SAAK,QAAQ,QAAQ,iCAAiC,EAAE,IAAI,OAAO;AAAA,EACrE;AAAA;AAAA;AAAA;AAAA,EAKQ,sBAA8B;AACpC,UAAM,SAAS,KAAK,QAAQ,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,KAMnC,EAAE,IAAI;AAEP,WAAO,QAAQ,cAAc;AAAA,EAC/B;AAAA;AAAA;AAAA;AAAA,EAKQ,cAAiB,OAAY,WAA0B;AAC7D,UAAM,UAAiB,CAAC;AACxB,aAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK,WAAW;AAChD,cAAQ,KAAK,MAAM,MAAM,GAAG,IAAI,SAAS,CAAC;AAAA,IAC5C;AACA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,cAAc,SAA+B;AACjD,UAAM,YAAY,KAAK,QAAQ,QAAQ;AAAA;AAAA,KAEtC,EAAE,IAAI,OAAO;AAEd,QAAI,CAAC,WAAW;AACd,YAAM,IAAI,MAAM,SAAS,OAAO,8BAA8B;AAAA,IAChE;AAGA,SAAK,QAAQ,QAAQ;AAAA;AAAA;AAAA;AAAA,KAIpB,EAAE,IAAI,KAAK,IAAI,GAAG,OAAO;AAE1B,QAAI,CAAC,KAAK,UAAU;AAClB,YAAM,IAAI,MAAM,0BAA0B;AAAA,IAC5C;AAGA,UAAM,UAAU,IAAI,iBAAiB;AAAA,MACnC,QAAQ,KAAK,OAAO,GAAG;AAAA,MACvB,KAAK,UAAU;AAAA,IACjB,CAAC;AAED,UAAM,WAAW,MAAM,KAAK,SAAS,KAAK,OAAO;AACjD,UAAM,OAAO,MAAM,SAAS,MAAM,kBAAkB;AAEpD,QAAI,CAAC,MAAM;AACT,YAAM,IAAI,MAAM,2BAA2B;AAAA,IAC7C;AAEA,WAAO,KAAK,MAAM,IAAI;AAAA,EACxB;AAAA;AAAA;AAAA;AAAA,EAKA,oBAAyB;AACvB,UAAM,QAAQ,KAAK,QAAQ,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,KASlC,EAAE,IAAI;AAEP,UAAM,QAAQ,KAAK,QAAQ,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,KAMlC,EAAE,IAAI;AAEP,WAAO;AAAA,MACL,QAAQ;AAAA,MACR;AAAA,MACA,kBAAkB,SACb,IAAK,MAAc,mBAAoB,MAAc,gBAAgB,QAAQ,CAAC,IAC/E;AAAA,MACJ,WAAW,KAAK,oBAAoB;AAAA,IACtC;AAAA,EACF;AACF;",
4
+ "sourcesContent": ["/**\n * Remote Storage Interface for Two-Tier Storage System\n * Implements infinite retention with TimeSeries DB + S3\n */\n\nimport { S3Client, PutObjectCommand, GetObjectCommand, DeleteObjectCommand } from '@aws-sdk/client-s3';\nimport { Storage } from '@google-cloud/storage';\nimport { logger } from '../monitoring/logger.js';\nimport { Trace, CompressedTrace } from '../trace/types.js';\nimport Database from 'better-sqlite3';\n// Type-safe environment variable access\nfunction getEnv(key: string, defaultValue?: string): string {\n const value = process.env[key];\n if (value === undefined) {\n if (defaultValue !== undefined) return defaultValue;\n throw new Error(`Environment variable ${key} is required`);\n }\n return value;\n}\n\nfunction getOptionalEnv(key: string): string | undefined {\n return process.env[key];\n}\n\n\nexport enum StorageTier {\n HOT = 'hot', // < 7 days - Railway Buckets or GCS Standard\n NEARLINE = 'nearline', // 7-30 days - GCS Nearline ($0.01/GB)\n COLDLINE = 'coldline', // 30-90 days - GCS Coldline ($0.004/GB)\n ARCHIVE = 'archive' // > 90 days - GCS Archive ($0.0012/GB)\n}\n\nexport interface RemoteStorageConfig {\n provider: 'gcs' | 's3' | 'railway';\n gcs?: {\n bucketName: string;\n projectId: string;\n keyFilename?: string; // Path to service account key\n };\n s3?: {\n bucket: string;\n region: string;\n accessKeyId?: string;\n secretAccessKey?: string;\n endpoint?: string; // For Railway buckets or MinIO\n };\n timeseries: {\n type: 'clickhouse' | 'timescale' | 'influxdb' | 'sqlite'; // SQLite for dev\n host: string;\n port: number;\n database: string;\n username?: string;\n password?: string;\n };\n migration: {\n batchSize: number;\n hotAgeHours: number; // < 7 days\n nearlineAgeHours: number; // 7-30 days \n coldlineAgeHours: number; // 30-90 days\n archiveAgeHours: number; // > 90 days\n scoreThreshold: number; // Score threshold for early migration\n };\n}\n\nexport const DEFAULT_REMOTE_CONFIG: RemoteStorageConfig = {\n provider: 'gcs', // Default to GCS for better pricing\n gcs: {\n bucketName: 'stackmemory-traces',\n projectId: process.env['GCP_PROJECT_ID'] || 'stackmemory',\n },\n timeseries: {\n type: 'sqlite', // Use SQLite for development\n host: 'localhost',\n port: 0,\n database: 'stackmemory_timeseries',\n },\n migration: {\n batchSize: 100,\n hotAgeHours: 168, // 7 days\n nearlineAgeHours: 720, // 30 days\n coldlineAgeHours: 2160, // 90 days\n archiveAgeHours: 8760, // 365 days\n scoreThreshold: 0.4,\n }\n};\n\nexport interface MigrationCandidate {\n traceId: string;\n age: number;\n score: number;\n size: number;\n tier: StorageTier;\n shouldMigrate: boolean;\n compressionLevel: 'none' | 'light' | 'medium' | 'heavy';\n}\n\n/**\n * Remote storage manager for infinite trace retention\n */\nexport class RemoteStorageManager {\n private storageClient?: S3Client | Storage;\n private config: RemoteStorageConfig;\n private localDb: Database.Database;\n private migrationInProgress = false;\n \n constructor(\n localDb: Database.Database,\n config?: Partial<RemoteStorageConfig>\n ) {\n this.localDb = localDb;\n this.config = { ...DEFAULT_REMOTE_CONFIG, ...config };\n \n this.initializeStorageClient();\n this.initializeSchema();\n }\n \n /**\n * Initialize storage client based on provider\n */\n private initializeStorageClient(): void {\n switch (this.config.provider) {\n case 'gcs':\n if (this.config.gcs) {\n this.storageClient = new Storage({\n projectId: this.config.gcs.projectId,\n keyFilename: this.config.gcs.keyFilename,\n });\n }\n break;\n \n case 's3':\n case 'railway':\n if (this.config.s3?.accessKeyId && this.config.s3?.secretAccessKey) {\n this.storageClient = new S3Client({\n region: this.config.s3.region,\n credentials: {\n accessKeyId: this.config.s3.accessKeyId,\n secretAccessKey: this.config.s3.secretAccessKey,\n },\n endpoint: this.config.s3.endpoint, // Railway buckets endpoint\n });\n }\n break;\n }\n }\n \n /**\n * Initialize migration tracking schema\n */\n private initializeSchema(): void {\n this.localDb.exec(`\n CREATE TABLE IF NOT EXISTS remote_migrations (\n trace_id TEXT PRIMARY KEY,\n migrated_at INTEGER NOT NULL,\n storage_tier TEXT NOT NULL,\n s3_key TEXT,\n timeseries_id TEXT,\n compression_level TEXT,\n original_size INTEGER,\n compressed_size INTEGER,\n retrieval_count INTEGER DEFAULT 0,\n last_retrieved INTEGER,\n FOREIGN KEY (trace_id) REFERENCES traces(id) ON DELETE CASCADE\n )\n `);\n \n this.localDb.exec(`\n CREATE INDEX IF NOT EXISTS idx_migrations_tier ON remote_migrations(storage_tier);\n CREATE INDEX IF NOT EXISTS idx_migrations_migrated ON remote_migrations(migrated_at);\n `);\n }\n \n /**\n * Identify traces for migration based on age and importance\n */\n async identifyMigrationCandidates(): Promise<MigrationCandidate[]> {\n const now = Date.now();\n \n // Query all traces with their metadata\n const traces = this.localDb.prepare(`\n SELECT \n t.id,\n t.score,\n t.start_time,\n LENGTH(t.compressed_data) + \n COALESCE((SELECT SUM(LENGTH(tc.arguments) + LENGTH(tc.result)) \n FROM tool_calls tc WHERE tc.trace_id = t.id), 0) as size,\n rm.trace_id as already_migrated\n FROM traces t\n LEFT JOIN remote_migrations rm ON t.id = rm.trace_id\n WHERE rm.trace_id IS NULL -- Not already migrated\n ORDER BY t.start_time ASC\n `).all() as any[];\n \n const candidates: MigrationCandidate[] = [];\n \n for (const trace of traces) {\n const ageHours = (now - trace.start_time) / (1000 * 60 * 60);\n const candidate = this.evaluateTrace(\n trace.id,\n ageHours,\n trace.score,\n trace.size || 0\n );\n \n candidates.push(candidate);\n }\n \n return candidates;\n }\n \n /**\n * Evaluate a trace for migration based on GCS storage classes\n */\n private evaluateTrace(\n traceId: string,\n ageHours: number,\n score: number,\n size: number\n ): MigrationCandidate {\n let tier = StorageTier.HOT;\n let shouldMigrate = false;\n let compressionLevel: 'none' | 'light' | 'medium' | 'heavy' = 'none';\n \n // Determine storage tier based on age and GCS storage classes\n if (ageHours > this.config.migration.archiveAgeHours) {\n // GCS Archive: $0.0012/GB - accessed < once per year\n tier = StorageTier.ARCHIVE;\n shouldMigrate = true;\n compressionLevel = 'heavy';\n } else if (ageHours > this.config.migration.coldlineAgeHours) {\n // GCS Coldline: $0.004/GB - accessed < once per quarter \n tier = StorageTier.COLDLINE;\n shouldMigrate = true;\n compressionLevel = 'heavy';\n } else if (ageHours > this.config.migration.nearlineAgeHours) {\n // GCS Nearline: $0.01/GB - accessed < once per month\n tier = StorageTier.NEARLINE;\n shouldMigrate = true;\n compressionLevel = 'medium';\n } else if (ageHours > this.config.migration.hotAgeHours) {\n // Still hot but consider migration if low importance\n tier = StorageTier.HOT;\n if (score < this.config.migration.scoreThreshold) {\n shouldMigrate = true;\n compressionLevel = 'light';\n }\n }\n \n // Force migration for size pressure\n const localSizeLimit = 2 * 1024 * 1024 * 1024; // 2GB\n const currentLocalSize = this.getLocalStorageSize();\n \n if (currentLocalSize > localSizeLimit * 0.75) {\n // Start migrating when 75% full\n shouldMigrate = true;\n if (compressionLevel === 'none') {\n compressionLevel = 'light';\n }\n }\n \n return {\n traceId,\n age: ageHours,\n score,\n size,\n tier,\n shouldMigrate,\n compressionLevel,\n };\n }\n \n /**\n * Migrate traces to remote storage\n */\n async migrateTraces(\n candidates: MigrationCandidate[],\n dryRun: boolean = false\n ): Promise<{\n migrated: number;\n failed: number;\n totalSize: number;\n errors: string[];\n }> {\n if (this.migrationInProgress) {\n return {\n migrated: 0,\n failed: 0,\n totalSize: 0,\n errors: ['Migration already in progress'],\n };\n }\n \n this.migrationInProgress = true;\n const results = {\n migrated: 0,\n failed: 0,\n totalSize: 0,\n errors: [] as string[],\n };\n \n try {\n // Process in batches\n const toMigrate = candidates.filter((c: any) => c.shouldMigrate);\n const batches = this.createBatches(toMigrate, this.config.migration.batchSize);\n \n for (const batch of batches) {\n if (dryRun) {\n logger.info('Dry run - would migrate batch', {\n count: batch.length,\n totalSize: batch.reduce((sum, c) => sum + c.size, 0),\n });\n results.migrated += batch.length;\n continue;\n }\n \n const batchResults = await this.migrateBatch(batch);\n results.migrated += batchResults.success;\n results.failed += batchResults.failed;\n results.totalSize += batchResults.totalSize;\n results.errors.push(...batchResults.errors);\n \n // Small delay between batches\n await new Promise(resolve => setTimeout(resolve, 100));\n }\n \n } finally {\n this.migrationInProgress = false;\n }\n \n logger.info('Migration completed', results);\n return results;\n }\n \n /**\n * Migrate a batch of traces\n */\n private async migrateBatch(\n batch: MigrationCandidate[]\n ): Promise<{\n success: number;\n failed: number;\n totalSize: number;\n errors: string[];\n }> {\n const results = {\n success: 0,\n failed: 0,\n totalSize: 0,\n errors: [] as string[],\n };\n \n for (const candidate of batch) {\n try {\n // Get full trace data\n const trace = this.getTraceData(candidate.traceId);\n if (!trace) {\n throw new Error(`Trace ${candidate.traceId} not found`);\n }\n \n // Compress based on level\n const compressed = this.compressTrace(trace, candidate.compressionLevel);\n \n // Upload to S3\n if (this.s3Client) {\n const s3Key = this.generateS3Key(candidate);\n await this.uploadToS3(s3Key, compressed);\n \n // Record migration\n this.recordMigration(candidate, s3Key, trace, compressed);\n } else {\n // Local simulation for testing\n this.recordMigration(candidate, 'simulated', trace, compressed);\n }\n \n // Optionally remove from local after successful migration\n if (candidate.tier === StorageTier.COLD || \n candidate.tier === StorageTier.ARCHIVE) {\n this.removeLocalTrace(candidate.traceId);\n }\n \n results.success++;\n results.totalSize += candidate.size;\n \n } catch (error: unknown) {\n results.failed++;\n results.errors.push(\n `Failed to migrate ${candidate.traceId}: ${error}`\n );\n logger.error('Migration failed for trace', { \n traceId: candidate.traceId, \n error \n });\n }\n }\n \n return results;\n }\n \n /**\n * Get full trace data for migration\n */\n private getTraceData(traceId: string): any {\n const traceRow = this.localDb.prepare(\n 'SELECT * FROM traces WHERE id = ?'\n ).get(traceId);\n \n if (!traceRow) return null;\n \n const toolCalls = this.localDb.prepare(\n 'SELECT * FROM tool_calls WHERE trace_id = ? ORDER BY sequence_number'\n ).all(traceId);\n \n return {\n trace: traceRow,\n toolCalls,\n };\n }\n \n /**\n * Compress trace based on compression level\n */\n private compressTrace(\n data: any,\n level: 'none' | 'light' | 'medium' | 'heavy'\n ): Buffer {\n let jsonData = JSON.stringify(data);\n \n // Apply different compression based on level\n switch (level) {\n case 'none':\n return Buffer.from(jsonData);\n \n case 'light':\n // Remove formatting, keep all data\n return Buffer.from(JSON.stringify(JSON.parse(jsonData)));\n \n case 'medium':\n // Remove null fields and compress\n const cleaned = JSON.parse(jsonData, (key, value) => \n value === null || value === undefined ? undefined : value\n );\n return Buffer.from(JSON.stringify(cleaned));\n \n case 'heavy':\n // Remove tool results and arguments, keep only essential\n const minimal = {\n id: data.trace.id,\n type: data.trace.type,\n score: data.trace.score,\n summary: data.trace.summary,\n timestamps: {\n start: data.trace.start_time,\n end: data.trace.end_time,\n },\n toolCount: data.toolCalls.length,\n toolTypes: [...new Set(data.toolCalls.map((t: any) => t.tool))],\n };\n return Buffer.from(JSON.stringify(minimal));\n \n default:\n return Buffer.from(jsonData);\n }\n }\n \n /**\n * Generate S3 key for trace\n */\n private generateS3Key(candidate: MigrationCandidate): string {\n const date = new Date(Date.now() - candidate.age * 60 * 60 * 1000);\n const year = date.getFullYear();\n const month = String(date.getMonth() + 1).padStart(2, '0');\n const day = String(date.getDate()).padStart(2, '0');\n \n return `traces/${year}/${month}/${day}/${candidate.tier}/${candidate.traceId}.json`;\n }\n \n /**\n * Upload to S3\n */\n private async uploadToS3(key: string, data: Buffer): Promise<void> {\n if (!this.s3Client) {\n throw new Error('S3 client not configured');\n }\n \n const command = new PutObjectCommand({\n Bucket: this.config.s3.bucket,\n Key: key,\n Body: data,\n ContentType: 'application/json',\n Metadata: {\n 'trace-version': '1.0',\n 'compression': 'true',\n },\n });\n \n await this.s3Client.send(command);\n }\n \n /**\n * Record migration in local database\n */\n private recordMigration(\n candidate: MigrationCandidate,\n s3Key: string,\n originalData: any,\n compressedData: Buffer\n ): void {\n const stmt = this.localDb.prepare(`\n INSERT INTO remote_migrations (\n trace_id, migrated_at, storage_tier, s3_key,\n compression_level, original_size, compressed_size\n ) VALUES (?, ?, ?, ?, ?, ?, ?)\n `);\n \n stmt.run(\n candidate.traceId,\n Date.now(),\n candidate.tier,\n s3Key,\n candidate.compressionLevel,\n JSON.stringify(originalData).length,\n compressedData.length\n );\n }\n \n /**\n * Remove local trace after migration\n */\n private removeLocalTrace(traceId: string): void {\n this.localDb.prepare('DELETE FROM tool_calls WHERE trace_id = ?').run(traceId);\n this.localDb.prepare('DELETE FROM traces WHERE id = ?').run(traceId);\n }\n \n /**\n * Get current local storage size\n */\n private getLocalStorageSize(): number {\n const result = this.localDb.prepare(`\n SELECT \n SUM(LENGTH(compressed_data)) +\n COALESCE((SELECT SUM(LENGTH(arguments) + LENGTH(result)) \n FROM tool_calls), 0) as total_size\n FROM traces\n `).get() as any;\n \n return result?.total_size || 0;\n }\n \n /**\n * Create batches from candidates\n */\n private createBatches<T>(items: T[], batchSize: number): T[][] {\n const batches: T[][] = [];\n for (let i = 0; i < items.length; i += batchSize) {\n batches.push(items.slice(i, i + batchSize));\n }\n return batches;\n }\n \n /**\n * Retrieve trace from remote storage\n */\n async retrieveTrace(traceId: string): Promise<any> {\n const migration = this.localDb.prepare(`\n SELECT * FROM remote_migrations WHERE trace_id = ?\n `).get(traceId) as any;\n \n if (!migration) {\n throw new Error(`Trace ${traceId} not found in remote storage`);\n }\n \n // Update retrieval count\n this.localDb.prepare(`\n UPDATE remote_migrations \n SET retrieval_count = retrieval_count + 1, last_retrieved = ?\n WHERE trace_id = ?\n `).run(Date.now(), traceId);\n \n if (!this.s3Client) {\n throw new Error('S3 client not configured');\n }\n \n // Retrieve from S3\n const command = new GetObjectCommand({\n Bucket: this.config.s3.bucket,\n Key: migration.s3_key,\n });\n \n const response = await this.s3Client.send(command);\n const data = await response.Body?.transformToString();\n \n if (!data) {\n throw new Error('No data retrieved from S3');\n }\n \n return JSON.parse(data);\n }\n \n /**\n * Get migration statistics\n */\n getMigrationStats(): any {\n const stats = this.localDb.prepare(`\n SELECT \n storage_tier,\n COUNT(*) as count,\n SUM(original_size) as original_size,\n SUM(compressed_size) as compressed_size,\n AVG(retrieval_count) as avg_retrievals\n FROM remote_migrations\n GROUP BY storage_tier\n `).all();\n \n const total = this.localDb.prepare(`\n SELECT \n COUNT(*) as total_migrated,\n SUM(original_size) as total_original,\n SUM(compressed_size) as total_compressed\n FROM remote_migrations\n `).get();\n \n return {\n byTier: stats,\n total,\n compressionRatio: total \n ? (1 - (total as any).total_compressed / (total as any).total_original).toFixed(2)\n : 0,\n localSize: this.getLocalStorageSize(),\n };\n }\n}"],
5
+ "mappings": "AAKA,SAAS,UAAU,kBAAkB,wBAA6C;AAClF,SAAS,eAAe;AACxB,SAAS,cAAc;AAIvB,SAAS,OAAO,KAAa,cAA+B;AAC1D,QAAM,QAAQ,QAAQ,IAAI,GAAG;AAC7B,MAAI,UAAU,QAAW;AACvB,QAAI,iBAAiB,OAAW,QAAO;AACvC,UAAM,IAAI,MAAM,wBAAwB,GAAG,cAAc;AAAA,EAC3D;AACA,SAAO;AACT;AAEA,SAAS,eAAe,KAAiC;AACvD,SAAO,QAAQ,IAAI,GAAG;AACxB;AAGO,IAAK,cAAL,kBAAKA,iBAAL;AACL,EAAAA,aAAA,SAAM;AACN,EAAAA,aAAA,cAAW;AACX,EAAAA,aAAA,cAAW;AACX,EAAAA,aAAA,aAAU;AAJA,SAAAA;AAAA,GAAA;AAuCL,MAAM,wBAA6C;AAAA,EACxD,UAAU;AAAA;AAAA,EACV,KAAK;AAAA,IACH,YAAY;AAAA,IACZ,WAAW,QAAQ,IAAI,gBAAgB,KAAK;AAAA,EAC9C;AAAA,EACA,YAAY;AAAA,IACV,MAAM;AAAA;AAAA,IACN,MAAM;AAAA,IACN,MAAM;AAAA,IACN,UAAU;AAAA,EACZ;AAAA,EACA,WAAW;AAAA,IACT,WAAW;AAAA,IACX,aAAa;AAAA;AAAA,IACb,kBAAkB;AAAA;AAAA,IAClB,kBAAkB;AAAA;AAAA,IAClB,iBAAiB;AAAA;AAAA,IACjB,gBAAgB;AAAA,EAClB;AACF;AAeO,MAAM,qBAAqB;AAAA,EACxB;AAAA,EACA;AAAA,EACA;AAAA,EACA,sBAAsB;AAAA,EAE9B,YACE,SACA,QACA;AACA,SAAK,UAAU;AACf,SAAK,SAAS,EAAE,GAAG,uBAAuB,GAAG,OAAO;AAEpD,SAAK,wBAAwB;AAC7B,SAAK,iBAAiB;AAAA,EACxB;AAAA;AAAA;AAAA;AAAA,EAKQ,0BAAgC;AACtC,YAAQ,KAAK,OAAO,UAAU;AAAA,MAC5B,KAAK;AACH,YAAI,KAAK,OAAO,KAAK;AACnB,eAAK,gBAAgB,IAAI,QAAQ;AAAA,YAC/B,WAAW,KAAK,OAAO,IAAI;AAAA,YAC3B,aAAa,KAAK,OAAO,IAAI;AAAA,UAC/B,CAAC;AAAA,QACH;AACA;AAAA,MAEF,KAAK;AAAA,MACL,KAAK;AACH,YAAI,KAAK,OAAO,IAAI,eAAe,KAAK,OAAO,IAAI,iBAAiB;AAClE,eAAK,gBAAgB,IAAI,SAAS;AAAA,YAChC,QAAQ,KAAK,OAAO,GAAG;AAAA,YACvB,aAAa;AAAA,cACX,aAAa,KAAK,OAAO,GAAG;AAAA,cAC5B,iBAAiB,KAAK,OAAO,GAAG;AAAA,YAClC;AAAA,YACA,UAAU,KAAK,OAAO,GAAG;AAAA;AAAA,UAC3B,CAAC;AAAA,QACH;AACA;AAAA,IACJ;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,mBAAyB;AAC/B,SAAK,QAAQ,KAAK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,KAcjB;AAED,SAAK,QAAQ,KAAK;AAAA;AAAA;AAAA,KAGjB;AAAA,EACH;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,8BAA6D;AACjE,UAAM,MAAM,KAAK,IAAI;AAGrB,UAAM,SAAS,KAAK,QAAQ,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,KAanC,EAAE,IAAI;AAEP,UAAM,aAAmC,CAAC;AAE1C,eAAW,SAAS,QAAQ;AAC1B,YAAM,YAAY,MAAM,MAAM,eAAe,MAAO,KAAK;AACzD,YAAM,YAAY,KAAK;AAAA,QACrB,MAAM;AAAA,QACN;AAAA,QACA,MAAM;AAAA,QACN,MAAM,QAAQ;AAAA,MAChB;AAEA,iBAAW,KAAK,SAAS;AAAA,IAC3B;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,cACN,SACA,UACA,OACA,MACoB;AACpB,QAAI,OAAO;AACX,QAAI,gBAAgB;AACpB,QAAI,mBAA0D;AAG9D,QAAI,WAAW,KAAK,OAAO,UAAU,iBAAiB;AAEpD,aAAO;AACP,sBAAgB;AAChB,yBAAmB;AAAA,IACrB,WAAW,WAAW,KAAK,OAAO,UAAU,kBAAkB;AAE5D,aAAO;AACP,sBAAgB;AAChB,yBAAmB;AAAA,IACrB,WAAW,WAAW,KAAK,OAAO,UAAU,kBAAkB;AAE5D,aAAO;AACP,sBAAgB;AAChB,yBAAmB;AAAA,IACrB,WAAW,WAAW,KAAK,OAAO,UAAU,aAAa;AAEvD,aAAO;AACP,UAAI,QAAQ,KAAK,OAAO,UAAU,gBAAgB;AAChD,wBAAgB;AAChB,2BAAmB;AAAA,MACrB;AAAA,IACF;AAGA,UAAM,iBAAiB,IAAI,OAAO,OAAO;AACzC,UAAM,mBAAmB,KAAK,oBAAoB;AAElD,QAAI,mBAAmB,iBAAiB,MAAM;AAE5C,sBAAgB;AAChB,UAAI,qBAAqB,QAAQ;AAC/B,2BAAmB;AAAA,MACrB;AAAA,IACF;AAEA,WAAO;AAAA,MACL;AAAA,MACA,KAAK;AAAA,MACL;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,cACJ,YACA,SAAkB,OAMjB;AACD,QAAI,KAAK,qBAAqB;AAC5B,aAAO;AAAA,QACL,UAAU;AAAA,QACV,QAAQ;AAAA,QACR,WAAW;AAAA,QACX,QAAQ,CAAC,+BAA+B;AAAA,MAC1C;AAAA,IACF;AAEA,SAAK,sBAAsB;AAC3B,UAAM,UAAU;AAAA,MACd,UAAU;AAAA,MACV,QAAQ;AAAA,MACR,WAAW;AAAA,MACX,QAAQ,CAAC;AAAA,IACX;AAEA,QAAI;AAEF,YAAM,YAAY,WAAW,OAAO,CAAC,MAAW,EAAE,aAAa;AAC/D,YAAM,UAAU,KAAK,cAAc,WAAW,KAAK,OAAO,UAAU,SAAS;AAE7E,iBAAW,SAAS,SAAS;AAC3B,YAAI,QAAQ;AACV,iBAAO,KAAK,iCAAiC;AAAA,YAC3C,OAAO,MAAM;AAAA,YACb,WAAW,MAAM,OAAO,CAAC,KAAK,MAAM,MAAM,EAAE,MAAM,CAAC;AAAA,UACrD,CAAC;AACD,kBAAQ,YAAY,MAAM;AAC1B;AAAA,QACF;AAEA,cAAM,eAAe,MAAM,KAAK,aAAa,KAAK;AAClD,gBAAQ,YAAY,aAAa;AACjC,gBAAQ,UAAU,aAAa;AAC/B,gBAAQ,aAAa,aAAa;AAClC,gBAAQ,OAAO,KAAK,GAAG,aAAa,MAAM;AAG1C,cAAM,IAAI,QAAQ,aAAW,WAAW,SAAS,GAAG,CAAC;AAAA,MACvD;AAAA,IAEF,UAAE;AACA,WAAK,sBAAsB;AAAA,IAC7B;AAEA,WAAO,KAAK,uBAAuB,OAAO;AAC1C,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,aACZ,OAMC;AACD,UAAM,UAAU;AAAA,MACd,SAAS;AAAA,MACT,QAAQ;AAAA,MACR,WAAW;AAAA,MACX,QAAQ,CAAC;AAAA,IACX;AAEA,eAAW,aAAa,OAAO;AAC7B,UAAI;AAEF,cAAM,QAAQ,KAAK,aAAa,UAAU,OAAO;AACjD,YAAI,CAAC,OAAO;AACV,gBAAM,IAAI,MAAM,SAAS,UAAU,OAAO,YAAY;AAAA,QACxD;AAGA,cAAM,aAAa,KAAK,cAAc,OAAO,UAAU,gBAAgB;AAGvE,YAAI,KAAK,UAAU;AACjB,gBAAM,QAAQ,KAAK,cAAc,SAAS;AAC1C,gBAAM,KAAK,WAAW,OAAO,UAAU;AAGvC,eAAK,gBAAgB,WAAW,OAAO,OAAO,UAAU;AAAA,QAC1D,OAAO;AAEL,eAAK,gBAAgB,WAAW,aAAa,OAAO,UAAU;AAAA,QAChE;AAGA,YAAI,UAAU,SAAS,YAAY,QAC/B,UAAU,SAAS,yBAAqB;AAC1C,eAAK,iBAAiB,UAAU,OAAO;AAAA,QACzC;AAEA,gBAAQ;AACR,gBAAQ,aAAa,UAAU;AAAA,MAEjC,SAAS,OAAgB;AACvB,gBAAQ;AACR,gBAAQ,OAAO;AAAA,UACb,qBAAqB,UAAU,OAAO,KAAK,KAAK;AAAA,QAClD;AACA,eAAO,MAAM,8BAA8B;AAAA,UACzC,SAAS,UAAU;AAAA,UACnB;AAAA,QACF,CAAC;AAAA,MACH;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,aAAa,SAAsB;AACzC,UAAM,WAAW,KAAK,QAAQ;AAAA,MAC5B;AAAA,IACF,EAAE,IAAI,OAAO;AAEb,QAAI,CAAC,SAAU,QAAO;AAEtB,UAAM,YAAY,KAAK,QAAQ;AAAA,MAC7B;AAAA,IACF,EAAE,IAAI,OAAO;AAEb,WAAO;AAAA,MACL,OAAO;AAAA,MACP;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,cACN,MACA,OACQ;AACR,QAAI,WAAW,KAAK,UAAU,IAAI;AAGlC,YAAQ,OAAO;AAAA,MACb,KAAK;AACH,eAAO,OAAO,KAAK,QAAQ;AAAA,MAE7B,KAAK;AAEH,eAAO,OAAO,KAAK,KAAK,UAAU,KAAK,MAAM,QAAQ,CAAC,CAAC;AAAA,MAEzD,KAAK;AAEH,cAAM,UAAU,KAAK;AAAA,UAAM;AAAA,UAAU,CAAC,KAAK,UACzC,UAAU,QAAQ,UAAU,SAAY,SAAY;AAAA,QACtD;AACA,eAAO,OAAO,KAAK,KAAK,UAAU,OAAO,CAAC;AAAA,MAE5C,KAAK;AAEH,cAAM,UAAU;AAAA,UACd,IAAI,KAAK,MAAM;AAAA,UACf,MAAM,KAAK,MAAM;AAAA,UACjB,OAAO,KAAK,MAAM;AAAA,UAClB,SAAS,KAAK,MAAM;AAAA,UACpB,YAAY;AAAA,YACV,OAAO,KAAK,MAAM;AAAA,YAClB,KAAK,KAAK,MAAM;AAAA,UAClB;AAAA,UACA,WAAW,KAAK,UAAU;AAAA,UAC1B,WAAW,CAAC,GAAG,IAAI,IAAI,KAAK,UAAU,IAAI,CAAC,MAAW,EAAE,IAAI,CAAC,CAAC;AAAA,QAChE;AACA,eAAO,OAAO,KAAK,KAAK,UAAU,OAAO,CAAC;AAAA,MAE5C;AACE,eAAO,OAAO,KAAK,QAAQ;AAAA,IAC/B;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,cAAc,WAAuC;AAC3D,UAAM,OAAO,IAAI,KAAK,KAAK,IAAI,IAAI,UAAU,MAAM,KAAK,KAAK,GAAI;AACjE,UAAM,OAAO,KAAK,YAAY;AAC9B,UAAM,QAAQ,OAAO,KAAK,SAAS,IAAI,CAAC,EAAE,SAAS,GAAG,GAAG;AACzD,UAAM,MAAM,OAAO,KAAK,QAAQ,CAAC,EAAE,SAAS,GAAG,GAAG;AAElD,WAAO,UAAU,IAAI,IAAI,KAAK,IAAI,GAAG,IAAI,UAAU,IAAI,IAAI,UAAU,OAAO;AAAA,EAC9E;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,WAAW,KAAa,MAA6B;AACjE,QAAI,CAAC,KAAK,UAAU;AAClB,YAAM,IAAI,MAAM,0BAA0B;AAAA,IAC5C;AAEA,UAAM,UAAU,IAAI,iBAAiB;AAAA,MACnC,QAAQ,KAAK,OAAO,GAAG;AAAA,MACvB,KAAK;AAAA,MACL,MAAM;AAAA,MACN,aAAa;AAAA,MACb,UAAU;AAAA,QACR,iBAAiB;AAAA,QACjB,eAAe;AAAA,MACjB;AAAA,IACF,CAAC;AAED,UAAM,KAAK,SAAS,KAAK,OAAO;AAAA,EAClC;AAAA;AAAA;AAAA;AAAA,EAKQ,gBACN,WACA,OACA,cACA,gBACM;AACN,UAAM,OAAO,KAAK,QAAQ,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA,KAKjC;AAED,SAAK;AAAA,MACH,UAAU;AAAA,MACV,KAAK,IAAI;AAAA,MACT,UAAU;AAAA,MACV;AAAA,MACA,UAAU;AAAA,MACV,KAAK,UAAU,YAAY,EAAE;AAAA,MAC7B,eAAe;AAAA,IACjB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,iBAAiB,SAAuB;AAC9C,SAAK,QAAQ,QAAQ,2CAA2C,EAAE,IAAI,OAAO;AAC7E,SAAK,QAAQ,QAAQ,iCAAiC,EAAE,IAAI,OAAO;AAAA,EACrE;AAAA;AAAA;AAAA;AAAA,EAKQ,sBAA8B;AACpC,UAAM,SAAS,KAAK,QAAQ,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,KAMnC,EAAE,IAAI;AAEP,WAAO,QAAQ,cAAc;AAAA,EAC/B;AAAA;AAAA;AAAA;AAAA,EAKQ,cAAiB,OAAY,WAA0B;AAC7D,UAAM,UAAiB,CAAC;AACxB,aAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK,WAAW;AAChD,cAAQ,KAAK,MAAM,MAAM,GAAG,IAAI,SAAS,CAAC;AAAA,IAC5C;AACA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,cAAc,SAA+B;AACjD,UAAM,YAAY,KAAK,QAAQ,QAAQ;AAAA;AAAA,KAEtC,EAAE,IAAI,OAAO;AAEd,QAAI,CAAC,WAAW;AACd,YAAM,IAAI,MAAM,SAAS,OAAO,8BAA8B;AAAA,IAChE;AAGA,SAAK,QAAQ,QAAQ;AAAA;AAAA;AAAA;AAAA,KAIpB,EAAE,IAAI,KAAK,IAAI,GAAG,OAAO;AAE1B,QAAI,CAAC,KAAK,UAAU;AAClB,YAAM,IAAI,MAAM,0BAA0B;AAAA,IAC5C;AAGA,UAAM,UAAU,IAAI,iBAAiB;AAAA,MACnC,QAAQ,KAAK,OAAO,GAAG;AAAA,MACvB,KAAK,UAAU;AAAA,IACjB,CAAC;AAED,UAAM,WAAW,MAAM,KAAK,SAAS,KAAK,OAAO;AACjD,UAAM,OAAO,MAAM,SAAS,MAAM,kBAAkB;AAEpD,QAAI,CAAC,MAAM;AACT,YAAM,IAAI,MAAM,2BAA2B;AAAA,IAC7C;AAEA,WAAO,KAAK,MAAM,IAAI;AAAA,EACxB;AAAA;AAAA;AAAA;AAAA,EAKA,oBAAyB;AACvB,UAAM,QAAQ,KAAK,QAAQ,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,KASlC,EAAE,IAAI;AAEP,UAAM,QAAQ,KAAK,QAAQ,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,KAMlC,EAAE,IAAI;AAEP,WAAO;AAAA,MACL,QAAQ;AAAA,MACR;AAAA,MACA,kBAAkB,SACb,IAAK,MAAc,mBAAoB,MAAc,gBAAgB,QAAQ,CAAC,IAC/E;AAAA,MACJ,WAAW,KAAK,oBAAoB;AAAA,IACtC;AAAA,EACF;AACF;",
6
6
  "names": ["StorageTier"]
7
7
  }
@@ -1,5 +1,16 @@
1
1
  import { trace } from "./debug-trace.js";
2
2
  import { logger } from "../monitoring/logger.js";
3
+ function getEnv(key, defaultValue) {
4
+ const value = process.env[key];
5
+ if (value === void 0) {
6
+ if (defaultValue !== void 0) return defaultValue;
7
+ throw new Error(`Environment variable ${key} is required`);
8
+ }
9
+ return value;
10
+ }
11
+ function getOptionalEnv(key) {
12
+ return process.env[key];
13
+ }
3
14
  function wrapCommand(command) {
4
15
  const originalAction = command.action.bind(command);
5
16
  command.action(async function(...args) {
@@ -12,9 +23,9 @@ function wrapCommand(command) {
12
23
  options: typeof options === "object" ? options : {},
13
24
  cwd: process.cwd(),
14
25
  env: {
15
- NODE_ENV: process.env.NODE_ENV,
16
- DEBUG_TRACE: process.env.DEBUG_TRACE,
17
- LINEAR_API_KEY: process.env.LINEAR_API_KEY ? "[SET]" : "[NOT SET]"
26
+ NODE_ENV: process.env["NODE_ENV"],
27
+ DEBUG_TRACE: process.env["DEBUG_TRACE"],
28
+ LINEAR_API_KEY: process.env["LINEAR_API_KEY"] ? "[SET]" : "[NOT SET]"
18
29
  },
19
30
  timestamp: (/* @__PURE__ */ new Date()).toISOString()
20
31
  };
@@ -25,7 +36,7 @@ function wrapCommand(command) {
25
36
  logger.info(`CLI Command Completed: ${commandPath}`, {
26
37
  duration: trace.exportTraces().find((t) => t.name === commandPath)?.duration
27
38
  });
28
- if (process.env.DEBUG_TRACE === "true") {
39
+ if (process.env["DEBUG_TRACE"] === "true") {
29
40
  console.log(trace.getExecutionSummary());
30
41
  }
31
42
  } catch (error) {
@@ -69,7 +80,7 @@ function wrapProgram(program) {
69
80
  exitCode: err.exitCode,
70
81
  command: process.argv.slice(2).join(" ")
71
82
  });
72
- if (process.env.DEBUG_TRACE === "true") {
83
+ if (process.env["DEBUG_TRACE"] === "true") {
73
84
  console.error("\n" + trace.getExecutionSummary());
74
85
  }
75
86
  process.exit(err.exitCode || 1);
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../../src/core/trace/cli-trace-wrapper.ts"],
4
- "sourcesContent": ["/**\n * CLI Command Trace Wrapper\n * Automatically wraps Commander.js commands with comprehensive tracing\n */\n\nimport { Command } from 'commander';\nimport { trace } from './debug-trace.js';\nimport { logger } from '../monitoring/logger.js';\n\nexport function wrapCommand(command: Command): Command {\n const originalAction = command.action.bind(command);\n \n command.action(async function(...args: any[]): Promise<void> {\n // Extract command path and options\n const commandPath = getCommandPath(command);\n const options = args[args.length - 1];\n const commandArgs = args.slice(0, -1);\n \n // Build comprehensive context\n const context = {\n command: commandPath,\n args: commandArgs,\n options: typeof options === 'object' ? options : {},\n cwd: process.cwd(),\n env: {\n NODE_ENV: process.env.NODE_ENV,\n DEBUG_TRACE: process.env.DEBUG_TRACE,\n LINEAR_API_KEY: process.env.LINEAR_API_KEY ? '[SET]' : '[NOT SET]',\n },\n timestamp: new Date().toISOString(),\n };\n \n // Log command start\n logger.info(`CLI Command: ${commandPath}`, context);\n \n // Wrap the actual action with tracing\n await trace.command(commandPath, context, async () => {\n try {\n // Call the original action with wrapped handler\n const result = await originalAction.apply(null, args as any);\n \n // Log successful completion\n logger.info(`CLI Command Completed: ${commandPath}`, {\n duration: trace.exportTraces().find(t => t.name === commandPath)?.duration,\n });\n \n // Show execution summary if verbose\n if (process.env.DEBUG_TRACE === 'true') {\n console.log(trace.getExecutionSummary());\n }\n } catch (error) {\n // Enhanced error logging for CLI commands\n logger.error(`CLI Command Failed: ${commandPath}`, error as Error, context);\n \n // Get the last error trace for debugging\n const lastError = trace.getLastError();\n if (lastError) {\n console.error('\\n\uD83D\uDCCD Error occurred at:');\n console.error(` ${lastError.name}`);\n if (lastError.params) {\n console.error(' With params:', JSON.stringify(lastError.params, null, 2));\n }\n console.error(' Error details:', lastError.error);\n }\n \n // Re-throw to maintain original error handling\n throw error;\n }\n });\n });\n \n // Recursively wrap subcommands\n command.commands.forEach(subcommand => {\n wrapCommand(subcommand);\n });\n \n return command;\n}\n\nfunction getCommandPath(command: Command): string {\n const parts: string[] = [];\n let current: Command | null = command;\n \n while (current) {\n if (current.name()) {\n parts.unshift(current.name());\n }\n current = current.parent as Command | null;\n }\n \n return parts.join(' ');\n}\n\n/**\n * Wrap the main program with comprehensive tracing\n */\nexport function wrapProgram(program: Command): Command {\n // Add global error handler with tracing\n program.exitOverride((err) => {\n if (err.code === 'commander.help' || err.code === 'commander.version') {\n // Normal help/version display, not an error\n process.exit(0);\n }\n \n // Log the error with full context\n logger.error('CLI Error', err, {\n code: err.code,\n exitCode: err.exitCode,\n command: process.argv.slice(2).join(' '),\n });\n \n // Show trace summary on error\n if (process.env.DEBUG_TRACE === 'true') {\n console.error('\\n' + trace.getExecutionSummary());\n }\n \n process.exit(err.exitCode || 1);\n });\n \n // Add pre-action hook for setup\n program.hook('preAction', (thisCommand) => {\n // Initialize trace context for this command\n trace.reset();\n \n // Log command invocation\n const commandPath = getCommandPath(thisCommand);\n logger.debug(`Preparing to execute: ${commandPath}`, {\n args: thisCommand.args,\n opts: thisCommand.opts(),\n });\n });\n \n // Add post-action hook for cleanup\n program.hook('postAction', (thisCommand) => {\n // Log completion\n const commandPath = getCommandPath(thisCommand);\n logger.debug(`Completed execution: ${commandPath}`);\n });\n \n // Wrap all existing commands\n program.commands.forEach(command => {\n wrapCommand(command);\n });\n \n return program;\n}\n\n/**\n * Helper to wrap async functions with step tracing\n */\nexport function traceStep<T>(name: string, fn: () => Promise<T>): Promise<T> {\n return trace.step(name, fn);\n}\n\n/**\n * Helper to wrap database queries\n */\nexport function traceQuery<T>(sql: string, params: any, fn: () => T): T {\n return trace.traceSync('query', sql.substring(0, 100), params, fn);\n}\n\n/**\n * Helper to wrap API calls\n */\nexport function traceAPI<T>(\n method: string,\n url: string,\n body: any,\n fn: () => Promise<T>\n): Promise<T> {\n return trace.api(method, url, body, fn);\n}"],
5
- "mappings": "AAMA,SAAS,aAAa;AACtB,SAAS,cAAc;AAEhB,SAAS,YAAY,SAA2B;AACrD,QAAM,iBAAiB,QAAQ,OAAO,KAAK,OAAO;AAElD,UAAQ,OAAO,kBAAkB,MAA4B;AAE3D,UAAM,cAAc,eAAe,OAAO;AAC1C,UAAM,UAAU,KAAK,KAAK,SAAS,CAAC;AACpC,UAAM,cAAc,KAAK,MAAM,GAAG,EAAE;AAGpC,UAAM,UAAU;AAAA,MACd,SAAS;AAAA,MACT,MAAM;AAAA,MACN,SAAS,OAAO,YAAY,WAAW,UAAU,CAAC;AAAA,MAClD,KAAK,QAAQ,IAAI;AAAA,MACjB,KAAK;AAAA,QACH,UAAU,QAAQ,IAAI;AAAA,QACtB,aAAa,QAAQ,IAAI;AAAA,QACzB,gBAAgB,QAAQ,IAAI,iBAAiB,UAAU;AAAA,MACzD;AAAA,MACA,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,IACpC;AAGA,WAAO,KAAK,gBAAgB,WAAW,IAAI,OAAO;AAGlD,UAAM,MAAM,QAAQ,aAAa,SAAS,YAAY;AACpD,UAAI;AAEF,cAAM,SAAS,MAAM,eAAe,MAAM,MAAM,IAAW;AAG3D,eAAO,KAAK,0BAA0B,WAAW,IAAI;AAAA,UACnD,UAAU,MAAM,aAAa,EAAE,KAAK,OAAK,EAAE,SAAS,WAAW,GAAG;AAAA,QACpE,CAAC;AAGD,YAAI,QAAQ,IAAI,gBAAgB,QAAQ;AACtC,kBAAQ,IAAI,MAAM,oBAAoB,CAAC;AAAA,QACzC;AAAA,MACF,SAAS,OAAO;AAEd,eAAO,MAAM,uBAAuB,WAAW,IAAI,OAAgB,OAAO;AAG1E,cAAM,YAAY,MAAM,aAAa;AACrC,YAAI,WAAW;AACb,kBAAQ,MAAM,gCAAyB;AACvC,kBAAQ,MAAM,MAAM,UAAU,IAAI,EAAE;AACpC,cAAI,UAAU,QAAQ;AACpB,oBAAQ,MAAM,mBAAmB,KAAK,UAAU,UAAU,QAAQ,MAAM,CAAC,CAAC;AAAA,UAC5E;AACA,kBAAQ,MAAM,qBAAqB,UAAU,KAAK;AAAA,QACpD;AAGA,cAAM;AAAA,MACR;AAAA,IACF,CAAC;AAAA,EACH,CAAC;AAGD,UAAQ,SAAS,QAAQ,gBAAc;AACrC,gBAAY,UAAU;AAAA,EACxB,CAAC;AAED,SAAO;AACT;AAEA,SAAS,eAAe,SAA0B;AAChD,QAAM,QAAkB,CAAC;AACzB,MAAI,UAA0B;AAE9B,SAAO,SAAS;AACd,QAAI,QAAQ,KAAK,GAAG;AAClB,YAAM,QAAQ,QAAQ,KAAK,CAAC;AAAA,IAC9B;AACA,cAAU,QAAQ;AAAA,EACpB;AAEA,SAAO,MAAM,KAAK,GAAG;AACvB;AAKO,SAAS,YAAY,SAA2B;AAErD,UAAQ,aAAa,CAAC,QAAQ;AAC5B,QAAI,IAAI,SAAS,oBAAoB,IAAI,SAAS,qBAAqB;AAErE,cAAQ,KAAK,CAAC;AAAA,IAChB;AAGA,WAAO,MAAM,aAAa,KAAK;AAAA,MAC7B,MAAM,IAAI;AAAA,MACV,UAAU,IAAI;AAAA,MACd,SAAS,QAAQ,KAAK,MAAM,CAAC,EAAE,KAAK,GAAG;AAAA,IACzC,CAAC;AAGD,QAAI,QAAQ,IAAI,gBAAgB,QAAQ;AACtC,cAAQ,MAAM,OAAO,MAAM,oBAAoB,CAAC;AAAA,IAClD;AAEA,YAAQ,KAAK,IAAI,YAAY,CAAC;AAAA,EAChC,CAAC;AAGD,UAAQ,KAAK,aAAa,CAAC,gBAAgB;AAEzC,UAAM,MAAM;AAGZ,UAAM,cAAc,eAAe,WAAW;AAC9C,WAAO,MAAM,yBAAyB,WAAW,IAAI;AAAA,MACnD,MAAM,YAAY;AAAA,MAClB,MAAM,YAAY,KAAK;AAAA,IACzB,CAAC;AAAA,EACH,CAAC;AAGD,UAAQ,KAAK,cAAc,CAAC,gBAAgB;AAE1C,UAAM,cAAc,eAAe,WAAW;AAC9C,WAAO,MAAM,wBAAwB,WAAW,EAAE;AAAA,EACpD,CAAC;AAGD,UAAQ,SAAS,QAAQ,aAAW;AAClC,gBAAY,OAAO;AAAA,EACrB,CAAC;AAED,SAAO;AACT;AAKO,SAAS,UAAa,MAAc,IAAkC;AAC3E,SAAO,MAAM,KAAK,MAAM,EAAE;AAC5B;AAKO,SAAS,WAAc,KAAa,QAAa,IAAgB;AACtE,SAAO,MAAM,UAAU,SAAS,IAAI,UAAU,GAAG,GAAG,GAAG,QAAQ,EAAE;AACnE;AAKO,SAAS,SACd,QACA,KACA,MACA,IACY;AACZ,SAAO,MAAM,IAAI,QAAQ,KAAK,MAAM,EAAE;AACxC;",
4
+ "sourcesContent": ["/**\n * CLI Command Trace Wrapper\n * Automatically wraps Commander.js commands with comprehensive tracing\n */\n\nimport { Command } from 'commander';\nimport { trace } from './debug-trace.js';\nimport { logger } from '../monitoring/logger.js';\n// Type-safe environment variable access\nfunction getEnv(key: string, defaultValue?: string): string {\n const value = process.env[key];\n if (value === undefined) {\n if (defaultValue !== undefined) return defaultValue;\n throw new Error(`Environment variable ${key} is required`);\n }\n return value;\n}\n\nfunction getOptionalEnv(key: string): string | undefined {\n return process.env[key];\n}\n\n\nexport function wrapCommand(command: Command): Command {\n const originalAction = command.action.bind(command);\n \n command.action(async function(...args: any[]): Promise<void> {\n // Extract command path and options\n const commandPath = getCommandPath(command);\n const options = args[args.length - 1];\n const commandArgs = args.slice(0, -1);\n \n // Build comprehensive context\n const context = {\n command: commandPath,\n args: commandArgs,\n options: typeof options === 'object' ? options : {},\n cwd: process.cwd(),\n env: {\n NODE_ENV: process.env['NODE_ENV'],\n DEBUG_TRACE: process.env['DEBUG_TRACE'],\n LINEAR_API_KEY: process.env['LINEAR_API_KEY'] ? '[SET]' : '[NOT SET]',\n },\n timestamp: new Date().toISOString(),\n };\n \n // Log command start\n logger.info(`CLI Command: ${commandPath}`, context);\n \n // Wrap the actual action with tracing\n await trace.command(commandPath, context, async () => {\n try {\n // Call the original action with wrapped handler\n const result = await originalAction.apply(null, args as any);\n \n // Log successful completion\n logger.info(`CLI Command Completed: ${commandPath}`, {\n duration: trace.exportTraces().find((t: any) => t.name === commandPath)?.duration,\n });\n \n // Show execution summary if verbose\n if (process.env['DEBUG_TRACE'] === 'true') {\n console.log(trace.getExecutionSummary());\n }\n } catch (error: unknown) {\n // Enhanced error logging for CLI commands\n logger.error(`CLI Command Failed: ${commandPath}`, error as Error, context);\n \n // Get the last error trace for debugging\n const lastError = trace.getLastError();\n if (lastError) {\n console.error('\\n\uD83D\uDCCD Error occurred at:');\n console.error(` ${lastError.name}`);\n if (lastError.params) {\n console.error(' With params:', JSON.stringify(lastError.params, null, 2));\n }\n console.error(' Error details:', lastError.error);\n }\n \n // Re-throw to maintain original error handling\n throw error;\n }\n });\n });\n \n // Recursively wrap subcommands\n command.commands.forEach(subcommand => {\n wrapCommand(subcommand);\n });\n \n return command;\n}\n\nfunction getCommandPath(command: Command): string {\n const parts: string[] = [];\n let current: Command | null = command;\n \n while (current) {\n if (current.name()) {\n parts.unshift(current.name());\n }\n current = current.parent as Command | null;\n }\n \n return parts.join(' ');\n}\n\n/**\n * Wrap the main program with comprehensive tracing\n */\nexport function wrapProgram(program: Command): Command {\n // Add global error handler with tracing\n program.exitOverride((err) => {\n if (err.code === 'commander.help' || err.code === 'commander.version') {\n // Normal help/version display, not an error\n process.exit(0);\n }\n \n // Log the error with full context\n logger.error('CLI Error', err, {\n code: err.code,\n exitCode: err.exitCode,\n command: process.argv.slice(2).join(' '),\n });\n \n // Show trace summary on error\n if (process.env['DEBUG_TRACE'] === 'true') {\n console.error('\\n' + trace.getExecutionSummary());\n }\n \n process.exit(err.exitCode || 1);\n });\n \n // Add pre-action hook for setup\n program.hook('preAction', (thisCommand) => {\n // Initialize trace context for this command\n trace.reset();\n \n // Log command invocation\n const commandPath = getCommandPath(thisCommand);\n logger.debug(`Preparing to execute: ${commandPath}`, {\n args: thisCommand.args,\n opts: thisCommand.opts(),\n });\n });\n \n // Add post-action hook for cleanup\n program.hook('postAction', (thisCommand) => {\n // Log completion\n const commandPath = getCommandPath(thisCommand);\n logger.debug(`Completed execution: ${commandPath}`);\n });\n \n // Wrap all existing commands\n program.commands.forEach(command => {\n wrapCommand(command);\n });\n \n return program;\n}\n\n/**\n * Helper to wrap async functions with step tracing\n */\nexport function traceStep<T>(name: string, fn: () => Promise<T>): Promise<T> {\n return trace.step(name, fn);\n}\n\n/**\n * Helper to wrap database queries\n */\nexport function traceQuery<T>(sql: string, params: any, fn: () => T): T {\n return trace.traceSync('query', sql.substring(0, 100), params, fn);\n}\n\n/**\n * Helper to wrap API calls\n */\nexport function traceAPI<T>(\n method: string,\n url: string,\n body: any,\n fn: () => Promise<T>\n): Promise<T> {\n return trace.api(method, url, body, fn);\n}"],
5
+ "mappings": "AAMA,SAAS,aAAa;AACtB,SAAS,cAAc;AAEvB,SAAS,OAAO,KAAa,cAA+B;AAC1D,QAAM,QAAQ,QAAQ,IAAI,GAAG;AAC7B,MAAI,UAAU,QAAW;AACvB,QAAI,iBAAiB,OAAW,QAAO;AACvC,UAAM,IAAI,MAAM,wBAAwB,GAAG,cAAc;AAAA,EAC3D;AACA,SAAO;AACT;AAEA,SAAS,eAAe,KAAiC;AACvD,SAAO,QAAQ,IAAI,GAAG;AACxB;AAGO,SAAS,YAAY,SAA2B;AACrD,QAAM,iBAAiB,QAAQ,OAAO,KAAK,OAAO;AAElD,UAAQ,OAAO,kBAAkB,MAA4B;AAE3D,UAAM,cAAc,eAAe,OAAO;AAC1C,UAAM,UAAU,KAAK,KAAK,SAAS,CAAC;AACpC,UAAM,cAAc,KAAK,MAAM,GAAG,EAAE;AAGpC,UAAM,UAAU;AAAA,MACd,SAAS;AAAA,MACT,MAAM;AAAA,MACN,SAAS,OAAO,YAAY,WAAW,UAAU,CAAC;AAAA,MAClD,KAAK,QAAQ,IAAI;AAAA,MACjB,KAAK;AAAA,QACH,UAAU,QAAQ,IAAI,UAAU;AAAA,QAChC,aAAa,QAAQ,IAAI,aAAa;AAAA,QACtC,gBAAgB,QAAQ,IAAI,gBAAgB,IAAI,UAAU;AAAA,MAC5D;AAAA,MACA,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,IACpC;AAGA,WAAO,KAAK,gBAAgB,WAAW,IAAI,OAAO;AAGlD,UAAM,MAAM,QAAQ,aAAa,SAAS,YAAY;AACpD,UAAI;AAEF,cAAM,SAAS,MAAM,eAAe,MAAM,MAAM,IAAW;AAG3D,eAAO,KAAK,0BAA0B,WAAW,IAAI;AAAA,UACnD,UAAU,MAAM,aAAa,EAAE,KAAK,CAAC,MAAW,EAAE,SAAS,WAAW,GAAG;AAAA,QAC3E,CAAC;AAGD,YAAI,QAAQ,IAAI,aAAa,MAAM,QAAQ;AACzC,kBAAQ,IAAI,MAAM,oBAAoB,CAAC;AAAA,QACzC;AAAA,MACF,SAAS,OAAgB;AAEvB,eAAO,MAAM,uBAAuB,WAAW,IAAI,OAAgB,OAAO;AAG1E,cAAM,YAAY,MAAM,aAAa;AACrC,YAAI,WAAW;AACb,kBAAQ,MAAM,gCAAyB;AACvC,kBAAQ,MAAM,MAAM,UAAU,IAAI,EAAE;AACpC,cAAI,UAAU,QAAQ;AACpB,oBAAQ,MAAM,mBAAmB,KAAK,UAAU,UAAU,QAAQ,MAAM,CAAC,CAAC;AAAA,UAC5E;AACA,kBAAQ,MAAM,qBAAqB,UAAU,KAAK;AAAA,QACpD;AAGA,cAAM;AAAA,MACR;AAAA,IACF,CAAC;AAAA,EACH,CAAC;AAGD,UAAQ,SAAS,QAAQ,gBAAc;AACrC,gBAAY,UAAU;AAAA,EACxB,CAAC;AAED,SAAO;AACT;AAEA,SAAS,eAAe,SAA0B;AAChD,QAAM,QAAkB,CAAC;AACzB,MAAI,UAA0B;AAE9B,SAAO,SAAS;AACd,QAAI,QAAQ,KAAK,GAAG;AAClB,YAAM,QAAQ,QAAQ,KAAK,CAAC;AAAA,IAC9B;AACA,cAAU,QAAQ;AAAA,EACpB;AAEA,SAAO,MAAM,KAAK,GAAG;AACvB;AAKO,SAAS,YAAY,SAA2B;AAErD,UAAQ,aAAa,CAAC,QAAQ;AAC5B,QAAI,IAAI,SAAS,oBAAoB,IAAI,SAAS,qBAAqB;AAErE,cAAQ,KAAK,CAAC;AAAA,IAChB;AAGA,WAAO,MAAM,aAAa,KAAK;AAAA,MAC7B,MAAM,IAAI;AAAA,MACV,UAAU,IAAI;AAAA,MACd,SAAS,QAAQ,KAAK,MAAM,CAAC,EAAE,KAAK,GAAG;AAAA,IACzC,CAAC;AAGD,QAAI,QAAQ,IAAI,aAAa,MAAM,QAAQ;AACzC,cAAQ,MAAM,OAAO,MAAM,oBAAoB,CAAC;AAAA,IAClD;AAEA,YAAQ,KAAK,IAAI,YAAY,CAAC;AAAA,EAChC,CAAC;AAGD,UAAQ,KAAK,aAAa,CAAC,gBAAgB;AAEzC,UAAM,MAAM;AAGZ,UAAM,cAAc,eAAe,WAAW;AAC9C,WAAO,MAAM,yBAAyB,WAAW,IAAI;AAAA,MACnD,MAAM,YAAY;AAAA,MAClB,MAAM,YAAY,KAAK;AAAA,IACzB,CAAC;AAAA,EACH,CAAC;AAGD,UAAQ,KAAK,cAAc,CAAC,gBAAgB;AAE1C,UAAM,cAAc,eAAe,WAAW;AAC9C,WAAO,MAAM,wBAAwB,WAAW,EAAE;AAAA,EACpD,CAAC;AAGD,UAAQ,SAAS,QAAQ,aAAW;AAClC,gBAAY,OAAO;AAAA,EACrB,CAAC;AAED,SAAO;AACT;AAKO,SAAS,UAAa,MAAc,IAAkC;AAC3E,SAAO,MAAM,KAAK,MAAM,EAAE;AAC5B;AAKO,SAAS,WAAc,KAAa,QAAa,IAAgB;AACtE,SAAO,MAAM,UAAU,SAAS,IAAI,UAAU,GAAG,GAAG,GAAG,QAAQ,EAAE;AACnE;AAKO,SAAS,SACd,QACA,KACA,MACA,IACY;AACZ,SAAO,MAAM,IAAI,QAAQ,KAAK,MAAM,EAAE;AACxC;",
6
6
  "names": []
7
7
  }
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../../src/core/trace/db-trace-wrapper.ts"],
4
- "sourcesContent": ["/**\n * Database Operations Trace Wrapper\n * Wraps SQLite operations with comprehensive tracing for debugging\n */\n\nimport Database from 'better-sqlite3';\nimport { trace } from './debug-trace.js';\nimport { logger } from '../monitoring/logger.js';\n\nexport interface TracedDatabaseOptions extends Database.Options {\n traceEnabled?: boolean;\n slowQueryThreshold?: number;\n}\n\n/**\n * Create a traced database instance\n */\nexport function createTracedDatabase(\n filename: string,\n options?: TracedDatabaseOptions\n): Database.Database {\n const db = new Database(filename, options);\n \n if (options?.traceEnabled !== false) {\n return wrapDatabase(db, options?.slowQueryThreshold);\n }\n \n return db;\n}\n\n/**\n * Wrap an existing database with tracing\n */\nexport function wrapDatabase(\n db: Database.Database,\n slowQueryThreshold = 100\n): Database.Database {\n // Wrap prepare method to trace all queries\n const originalPrepare = db.prepare.bind(db);\n \n db.prepare = function(source: string) {\n const statement = originalPrepare(source);\n return wrapStatement(statement, source, slowQueryThreshold);\n } as typeof db.prepare;\n \n // Wrap exec for direct SQL execution\n const originalExec = db.exec.bind(db);\n \n db.exec = function(source: string): Database.Database {\n return trace.traceSync('query', `EXEC: ${source.substring(0, 50)}...`, {}, () => {\n const startTime = performance.now();\n const result = originalExec(source);\n const duration = performance.now() - startTime;\n \n if (duration > slowQueryThreshold) {\n logger.warn(`Slow query detected: ${duration.toFixed(0)}ms`, {\n query: source.substring(0, 200),\n duration,\n });\n }\n \n return result;\n });\n };\n \n // Wrap transaction for transaction tracking\n const originalTransaction = db.transaction.bind(db);\n \n db.transaction = function(fn: any) {\n return originalTransaction(function(this: any, ...args: any[]) {\n return trace.traceSync('query', 'TRANSACTION', { args: args.length }, () => {\n return fn.apply(this, args);\n });\n });\n } as typeof db.transaction;\n \n // Add query statistics tracking\n (db as any).__queryStats = {\n totalQueries: 0,\n slowQueries: 0,\n totalDuration: 0,\n queryTypes: {} as Record<string, number>,\n };\n \n return db;\n}\n\n/**\n * Wrap a statement with tracing\n */\nfunction wrapStatement<T extends any[] = any[]>(\n statement: Database.Statement<T>,\n source: string,\n slowQueryThreshold: number\n): Database.Statement<T> {\n const queryType = source.trim().split(/\\s+/)[0].toUpperCase();\n const shortQuery = source.substring(0, 100).replace(/\\s+/g, ' ');\n \n // Wrap run method\n const originalRun = statement.run.bind(statement);\n statement.run = function(...params: T): Database.RunResult {\n return trace.traceSync('query', `${queryType}: ${shortQuery}`, params, () => {\n const startTime = performance.now();\n const result = originalRun(...params);\n const duration = performance.now() - startTime;\n \n // Track statistics\n updateQueryStats(statement, queryType, duration, slowQueryThreshold);\n \n // Log slow queries\n if (duration > slowQueryThreshold) {\n logger.warn(`Slow ${queryType} query: ${duration.toFixed(0)}ms`, {\n query: shortQuery,\n params,\n duration,\n changes: result.changes,\n });\n }\n \n return result;\n });\n };\n \n // Wrap get method\n const originalGet = statement.get.bind(statement);\n statement.get = function(...params: T): any {\n return trace.traceSync('query', `${queryType} (get): ${shortQuery}`, params, () => {\n const startTime = performance.now();\n const result = originalGet(...params);\n const duration = performance.now() - startTime;\n \n updateQueryStats(statement, queryType, duration, slowQueryThreshold);\n \n if (duration > slowQueryThreshold) {\n logger.warn(`Slow ${queryType} query: ${duration.toFixed(0)}ms`, {\n query: shortQuery,\n params,\n duration,\n found: result != null,\n });\n }\n \n return result;\n });\n };\n \n // Wrap all method\n const originalAll = statement.all.bind(statement);\n statement.all = function(...params: T): any[] {\n return trace.traceSync('query', `${queryType} (all): ${shortQuery}`, params, () => {\n const startTime = performance.now();\n const result = originalAll(...params);\n const duration = performance.now() - startTime;\n \n updateQueryStats(statement, queryType, duration, slowQueryThreshold);\n \n if (duration > slowQueryThreshold) {\n logger.warn(`Slow ${queryType} query: ${duration.toFixed(0)}ms`, {\n query: shortQuery,\n params,\n duration,\n rows: result.length,\n });\n }\n \n // Warn about potential N+1 queries\n if (result.length > 100 && queryType === 'SELECT') {\n logger.warn(`Large result set: ${result.length} rows`, {\n query: shortQuery,\n suggestion: 'Consider pagination or more specific queries',\n });\n }\n \n return result;\n });\n };\n \n // Wrap iterate method for cursor operations\n const originalIterate = statement.iterate.bind(statement);\n statement.iterate = function(...params: T): IterableIterator<any> {\n const startTime = performance.now();\n let rowCount = 0;\n \n const iterator = originalIterate(...params);\n const wrappedIterator: IterableIterator<any> = {\n [Symbol.iterator]() {\n return this;\n },\n next() {\n const result = iterator.next();\n if (!result.done) {\n rowCount++;\n } else {\n const duration = performance.now() - startTime;\n updateQueryStats(statement, queryType, duration, slowQueryThreshold);\n \n if (duration > slowQueryThreshold) {\n logger.warn(`Slow ${queryType} iteration: ${duration.toFixed(0)}ms`, {\n query: shortQuery,\n params,\n duration,\n rows: rowCount,\n });\n }\n }\n return result;\n },\n };\n \n return wrappedIterator;\n };\n \n return statement;\n}\n\n/**\n * Update query statistics\n */\nfunction updateQueryStats(\n statement: Database.Statement,\n queryType: string,\n duration: number,\n slowQueryThreshold: number\n): void {\n const db = statement.database as any;\n if (db.__queryStats) {\n db.__queryStats.totalQueries++;\n db.__queryStats.totalDuration += duration;\n \n if (duration > slowQueryThreshold) {\n db.__queryStats.slowQueries++;\n }\n \n if (!db.__queryStats.queryTypes[queryType]) {\n db.__queryStats.queryTypes[queryType] = 0;\n }\n db.__queryStats.queryTypes[queryType]++;\n }\n}\n\n/**\n * Get query statistics from a traced database\n */\nexport function getQueryStatistics(db: Database.Database): {\n totalQueries: number;\n slowQueries: number;\n averageDuration: number;\n totalDuration: number;\n queryTypes: Record<string, number>;\n} | null {\n const stats = (db as any).__queryStats;\n if (!stats) return null;\n \n return {\n ...stats,\n averageDuration: stats.totalQueries > 0 \n ? stats.totalDuration / stats.totalQueries \n : 0,\n };\n}\n\n/**\n * Helper to trace a specific query with context\n */\nexport async function traceQuery<T>(\n db: Database.Database,\n queryName: string,\n query: string,\n params: any[],\n fn: () => T\n): Promise<T> {\n return trace.traceAsync('query', queryName, { query, params }, async () => {\n try {\n const result = fn();\n \n // Log successful complex queries for debugging\n if (query.includes('JOIN') || query.includes('GROUP BY')) {\n logger.debug(`Complex query executed: ${queryName}`, {\n query: query.substring(0, 200),\n params,\n });\n }\n \n return result;\n } catch (error) {\n // Enhanced error logging for database errors\n logger.error(`Database query failed: ${queryName}`, error as Error, {\n query,\n params,\n errorCode: (error as any).code,\n });\n throw error;\n }\n });\n}\n\n/**\n * Create a traced transaction with automatic rollback on error\n */\nexport function createTracedTransaction<T>(\n db: Database.Database,\n name: string,\n fn: (tx: Database.Transaction<(args: any) => T>) => T\n): T {\n return trace.traceSync('query', `TRANSACTION: ${name}`, {}, () => {\n const startTime = performance.now();\n \n try {\n const tx = db.transaction(fn);\n const result = (tx as any).deferred();\n \n const duration = performance.now() - startTime;\n logger.info(`Transaction completed: ${name}`, {\n duration,\n success: true,\n });\n \n return result;\n } catch (error) {\n const duration = performance.now() - startTime;\n logger.error(`Transaction failed: ${name}`, error as Error, {\n duration,\n success: false,\n });\n throw error;\n }\n });\n}"],
5
- "mappings": "AAKA,OAAO,cAAc;AACrB,SAAS,aAAa;AACtB,SAAS,cAAc;AAUhB,SAAS,qBACd,UACA,SACmB;AACnB,QAAM,KAAK,IAAI,SAAS,UAAU,OAAO;AAEzC,MAAI,SAAS,iBAAiB,OAAO;AACnC,WAAO,aAAa,IAAI,SAAS,kBAAkB;AAAA,EACrD;AAEA,SAAO;AACT;AAKO,SAAS,aACd,IACA,qBAAqB,KACF;AAEnB,QAAM,kBAAkB,GAAG,QAAQ,KAAK,EAAE;AAE1C,KAAG,UAAU,SAAS,QAAgB;AACpC,UAAM,YAAY,gBAAgB,MAAM;AACxC,WAAO,cAAc,WAAW,QAAQ,kBAAkB;AAAA,EAC5D;AAGA,QAAM,eAAe,GAAG,KAAK,KAAK,EAAE;AAEpC,KAAG,OAAO,SAAS,QAAmC;AACpD,WAAO,MAAM,UAAU,SAAS,SAAS,OAAO,UAAU,GAAG,EAAE,CAAC,OAAO,CAAC,GAAG,MAAM;AAC/E,YAAM,YAAY,YAAY,IAAI;AAClC,YAAM,SAAS,aAAa,MAAM;AAClC,YAAM,WAAW,YAAY,IAAI,IAAI;AAErC,UAAI,WAAW,oBAAoB;AACjC,eAAO,KAAK,wBAAwB,SAAS,QAAQ,CAAC,CAAC,MAAM;AAAA,UAC3D,OAAO,OAAO,UAAU,GAAG,GAAG;AAAA,UAC9B;AAAA,QACF,CAAC;AAAA,MACH;AAEA,aAAO;AAAA,IACT,CAAC;AAAA,EACH;AAGA,QAAM,sBAAsB,GAAG,YAAY,KAAK,EAAE;AAElD,KAAG,cAAc,SAAS,IAAS;AACjC,WAAO,oBAAoB,YAAuB,MAAa;AAC7D,aAAO,MAAM,UAAU,SAAS,eAAe,EAAE,MAAM,KAAK,OAAO,GAAG,MAAM;AAC1E,eAAO,GAAG,MAAM,MAAM,IAAI;AAAA,MAC5B,CAAC;AAAA,IACH,CAAC;AAAA,EACH;AAGA,EAAC,GAAW,eAAe;AAAA,IACzB,cAAc;AAAA,IACd,aAAa;AAAA,IACb,eAAe;AAAA,IACf,YAAY,CAAC;AAAA,EACf;AAEA,SAAO;AACT;AAKA,SAAS,cACP,WACA,QACA,oBACuB;AACvB,QAAM,YAAY,OAAO,KAAK,EAAE,MAAM,KAAK,EAAE,CAAC,EAAE,YAAY;AAC5D,QAAM,aAAa,OAAO,UAAU,GAAG,GAAG,EAAE,QAAQ,QAAQ,GAAG;AAG/D,QAAM,cAAc,UAAU,IAAI,KAAK,SAAS;AAChD,YAAU,MAAM,YAAY,QAA+B;AACzD,WAAO,MAAM,UAAU,SAAS,GAAG,SAAS,KAAK,UAAU,IAAI,QAAQ,MAAM;AAC3E,YAAM,YAAY,YAAY,IAAI;AAClC,YAAM,SAAS,YAAY,GAAG,MAAM;AACpC,YAAM,WAAW,YAAY,IAAI,IAAI;AAGrC,uBAAiB,WAAW,WAAW,UAAU,kBAAkB;AAGnE,UAAI,WAAW,oBAAoB;AACjC,eAAO,KAAK,QAAQ,SAAS,WAAW,SAAS,QAAQ,CAAC,CAAC,MAAM;AAAA,UAC/D,OAAO;AAAA,UACP;AAAA,UACA;AAAA,UACA,SAAS,OAAO;AAAA,QAClB,CAAC;AAAA,MACH;AAEA,aAAO;AAAA,IACT,CAAC;AAAA,EACH;AAGA,QAAM,cAAc,UAAU,IAAI,KAAK,SAAS;AAChD,YAAU,MAAM,YAAY,QAAgB;AAC1C,WAAO,MAAM,UAAU,SAAS,GAAG,SAAS,WAAW,UAAU,IAAI,QAAQ,MAAM;AACjF,YAAM,YAAY,YAAY,IAAI;AAClC,YAAM,SAAS,YAAY,GAAG,MAAM;AACpC,YAAM,WAAW,YAAY,IAAI,IAAI;AAErC,uBAAiB,WAAW,WAAW,UAAU,kBAAkB;AAEnE,UAAI,WAAW,oBAAoB;AACjC,eAAO,KAAK,QAAQ,SAAS,WAAW,SAAS,QAAQ,CAAC,CAAC,MAAM;AAAA,UAC/D,OAAO;AAAA,UACP;AAAA,UACA;AAAA,UACA,OAAO,UAAU;AAAA,QACnB,CAAC;AAAA,MACH;AAEA,aAAO;AAAA,IACT,CAAC;AAAA,EACH;AAGA,QAAM,cAAc,UAAU,IAAI,KAAK,SAAS;AAChD,YAAU,MAAM,YAAY,QAAkB;AAC5C,WAAO,MAAM,UAAU,SAAS,GAAG,SAAS,WAAW,UAAU,IAAI,QAAQ,MAAM;AACjF,YAAM,YAAY,YAAY,IAAI;AAClC,YAAM,SAAS,YAAY,GAAG,MAAM;AACpC,YAAM,WAAW,YAAY,IAAI,IAAI;AAErC,uBAAiB,WAAW,WAAW,UAAU,kBAAkB;AAEnE,UAAI,WAAW,oBAAoB;AACjC,eAAO,KAAK,QAAQ,SAAS,WAAW,SAAS,QAAQ,CAAC,CAAC,MAAM;AAAA,UAC/D,OAAO;AAAA,UACP;AAAA,UACA;AAAA,UACA,MAAM,OAAO;AAAA,QACf,CAAC;AAAA,MACH;AAGA,UAAI,OAAO,SAAS,OAAO,cAAc,UAAU;AACjD,eAAO,KAAK,qBAAqB,OAAO,MAAM,SAAS;AAAA,UACrD,OAAO;AAAA,UACP,YAAY;AAAA,QACd,CAAC;AAAA,MACH;AAEA,aAAO;AAAA,IACT,CAAC;AAAA,EACH;AAGA,QAAM,kBAAkB,UAAU,QAAQ,KAAK,SAAS;AACxD,YAAU,UAAU,YAAY,QAAkC;AAChE,UAAM,YAAY,YAAY,IAAI;AAClC,QAAI,WAAW;AAEf,UAAM,WAAW,gBAAgB,GAAG,MAAM;AAC1C,UAAM,kBAAyC;AAAA,MAC7C,CAAC,OAAO,QAAQ,IAAI;AAClB,eAAO;AAAA,MACT;AAAA,MACA,OAAO;AACL,cAAM,SAAS,SAAS,KAAK;AAC7B,YAAI,CAAC,OAAO,MAAM;AAChB;AAAA,QACF,OAAO;AACL,gBAAM,WAAW,YAAY,IAAI,IAAI;AACrC,2BAAiB,WAAW,WAAW,UAAU,kBAAkB;AAEnE,cAAI,WAAW,oBAAoB;AACjC,mBAAO,KAAK,QAAQ,SAAS,eAAe,SAAS,QAAQ,CAAC,CAAC,MAAM;AAAA,cACnE,OAAO;AAAA,cACP;AAAA,cACA;AAAA,cACA,MAAM;AAAA,YACR,CAAC;AAAA,UACH;AAAA,QACF;AACA,eAAO;AAAA,MACT;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAEA,SAAO;AACT;AAKA,SAAS,iBACP,WACA,WACA,UACA,oBACM;AACN,QAAM,KAAK,UAAU;AACrB,MAAI,GAAG,cAAc;AACnB,OAAG,aAAa;AAChB,OAAG,aAAa,iBAAiB;AAEjC,QAAI,WAAW,oBAAoB;AACjC,SAAG,aAAa;AAAA,IAClB;AAEA,QAAI,CAAC,GAAG,aAAa,WAAW,SAAS,GAAG;AAC1C,SAAG,aAAa,WAAW,SAAS,IAAI;AAAA,IAC1C;AACA,OAAG,aAAa,WAAW,SAAS;AAAA,EACtC;AACF;AAKO,SAAS,mBAAmB,IAM1B;AACP,QAAM,QAAS,GAAW;AAC1B,MAAI,CAAC,MAAO,QAAO;AAEnB,SAAO;AAAA,IACL,GAAG;AAAA,IACH,iBAAiB,MAAM,eAAe,IAClC,MAAM,gBAAgB,MAAM,eAC5B;AAAA,EACN;AACF;AAKA,eAAsB,WACpB,IACA,WACA,OACA,QACA,IACY;AACZ,SAAO,MAAM,WAAW,SAAS,WAAW,EAAE,OAAO,OAAO,GAAG,YAAY;AACzE,QAAI;AACF,YAAM,SAAS,GAAG;AAGlB,UAAI,MAAM,SAAS,MAAM,KAAK,MAAM,SAAS,UAAU,GAAG;AACxD,eAAO,MAAM,2BAA2B,SAAS,IAAI;AAAA,UACnD,OAAO,MAAM,UAAU,GAAG,GAAG;AAAA,UAC7B;AAAA,QACF,CAAC;AAAA,MACH;AAEA,aAAO;AAAA,IACT,SAAS,OAAO;AAEd,aAAO,MAAM,0BAA0B,SAAS,IAAI,OAAgB;AAAA,QAClE;AAAA,QACA;AAAA,QACA,WAAY,MAAc;AAAA,MAC5B,CAAC;AACD,YAAM;AAAA,IACR;AAAA,EACF,CAAC;AACH;AAKO,SAAS,wBACd,IACA,MACA,IACG;AACH,SAAO,MAAM,UAAU,SAAS,gBAAgB,IAAI,IAAI,CAAC,GAAG,MAAM;AAChE,UAAM,YAAY,YAAY,IAAI;AAElC,QAAI;AACF,YAAM,KAAK,GAAG,YAAY,EAAE;AAC5B,YAAM,SAAU,GAAW,SAAS;AAEpC,YAAM,WAAW,YAAY,IAAI,IAAI;AACrC,aAAO,KAAK,0BAA0B,IAAI,IAAI;AAAA,QAC5C;AAAA,QACA,SAAS;AAAA,MACX,CAAC;AAED,aAAO;AAAA,IACT,SAAS,OAAO;AACd,YAAM,WAAW,YAAY,IAAI,IAAI;AACrC,aAAO,MAAM,uBAAuB,IAAI,IAAI,OAAgB;AAAA,QAC1D;AAAA,QACA,SAAS;AAAA,MACX,CAAC;AACD,YAAM;AAAA,IACR;AAAA,EACF,CAAC;AACH;",
4
+ "sourcesContent": ["/**\n * Database Operations Trace Wrapper\n * Wraps SQLite operations with comprehensive tracing for debugging\n */\n\nimport Database from 'better-sqlite3';\nimport { trace } from './debug-trace.js';\nimport { logger } from '../monitoring/logger.js';\n\nexport interface TracedDatabaseOptions extends Database.Options {\n traceEnabled?: boolean;\n slowQueryThreshold?: number;\n}\n\n/**\n * Create a traced database instance\n */\nexport function createTracedDatabase(\n filename: string,\n options?: TracedDatabaseOptions\n): Database.Database {\n const db = new Database(filename, options);\n \n if (options?.traceEnabled !== false) {\n return wrapDatabase(db, options?.slowQueryThreshold);\n }\n \n return db;\n}\n\n/**\n * Wrap an existing database with tracing\n */\nexport function wrapDatabase(\n db: Database.Database,\n slowQueryThreshold = 100\n): Database.Database {\n // Wrap prepare method to trace all queries\n const originalPrepare = db.prepare.bind(db);\n \n db.prepare = function(source: string) {\n const statement = originalPrepare(source);\n return wrapStatement(statement, source, slowQueryThreshold);\n } as typeof db.prepare;\n \n // Wrap exec for direct SQL execution\n const originalExec = db.exec.bind(db);\n \n db.exec = function(source: string): Database.Database {\n return trace.traceSync('query', `EXEC: ${source.substring(0, 50)}...`, {}, () => {\n const startTime = performance.now();\n const result = originalExec(source);\n const duration = performance.now() - startTime;\n \n if (duration > slowQueryThreshold) {\n logger.warn(`Slow query detected: ${duration.toFixed(0)}ms`, {\n query: source.substring(0, 200),\n duration,\n });\n }\n \n return result;\n });\n };\n \n // Wrap transaction for transaction tracking\n const originalTransaction = db.transaction.bind(db);\n \n db.transaction = function(fn: any) {\n return originalTransaction(function(this: any, ...args: any[]) {\n return trace.traceSync('query', 'TRANSACTION', { args: args.length }, () => {\n return fn.apply(this, args);\n });\n });\n } as typeof db.transaction;\n \n // Add query statistics tracking\n (db as any).__queryStats = {\n totalQueries: 0,\n slowQueries: 0,\n totalDuration: 0,\n queryTypes: {} as Record<string, number>,\n };\n \n return db;\n}\n\n/**\n * Wrap a statement with tracing\n */\nfunction wrapStatement<T extends any[] = any[]>(\n statement: Database.Statement<T>,\n source: string,\n slowQueryThreshold: number\n): Database.Statement<T> {\n const queryType = source.trim().split(/\\s+/)[0].toUpperCase();\n const shortQuery = source.substring(0, 100).replace(/\\s+/g, ' ');\n \n // Wrap run method\n const originalRun = statement.run.bind(statement);\n statement.run = function(...params: T): Database.RunResult {\n return trace.traceSync('query', `${queryType}: ${shortQuery}`, params, () => {\n const startTime = performance.now();\n const result = originalRun(...params);\n const duration = performance.now() - startTime;\n \n // Track statistics\n updateQueryStats(statement, queryType, duration, slowQueryThreshold);\n \n // Log slow queries\n if (duration > slowQueryThreshold) {\n logger.warn(`Slow ${queryType} query: ${duration.toFixed(0)}ms`, {\n query: shortQuery,\n params,\n duration,\n changes: result.changes,\n });\n }\n \n return result;\n });\n };\n \n // Wrap get method\n const originalGet = statement.get.bind(statement);\n statement.get = function(...params: T): any {\n return trace.traceSync('query', `${queryType} (get): ${shortQuery}`, params, () => {\n const startTime = performance.now();\n const result = originalGet(...params);\n const duration = performance.now() - startTime;\n \n updateQueryStats(statement, queryType, duration, slowQueryThreshold);\n \n if (duration > slowQueryThreshold) {\n logger.warn(`Slow ${queryType} query: ${duration.toFixed(0)}ms`, {\n query: shortQuery,\n params,\n duration,\n found: result != null,\n });\n }\n \n return result;\n });\n };\n \n // Wrap all method\n const originalAll = statement.all.bind(statement);\n statement.all = function(...params: T): any[] {\n return trace.traceSync('query', `${queryType} (all): ${shortQuery}`, params, () => {\n const startTime = performance.now();\n const result = originalAll(...params);\n const duration = performance.now() - startTime;\n \n updateQueryStats(statement, queryType, duration, slowQueryThreshold);\n \n if (duration > slowQueryThreshold) {\n logger.warn(`Slow ${queryType} query: ${duration.toFixed(0)}ms`, {\n query: shortQuery,\n params,\n duration,\n rows: result.length,\n });\n }\n \n // Warn about potential N+1 queries\n if (result.length > 100 && queryType === 'SELECT') {\n logger.warn(`Large result set: ${result.length} rows`, {\n query: shortQuery,\n suggestion: 'Consider pagination or more specific queries',\n });\n }\n \n return result;\n });\n };\n \n // Wrap iterate method for cursor operations\n const originalIterate = statement.iterate.bind(statement);\n statement.iterate = function(...params: T): IterableIterator<any> {\n const startTime = performance.now();\n let rowCount = 0;\n \n const iterator = originalIterate(...params);\n const wrappedIterator: IterableIterator<any> = {\n [Symbol.iterator]() {\n return this;\n },\n next() {\n const result = iterator.next();\n if (!result.done) {\n rowCount++;\n } else {\n const duration = performance.now() - startTime;\n updateQueryStats(statement, queryType, duration, slowQueryThreshold);\n \n if (duration > slowQueryThreshold) {\n logger.warn(`Slow ${queryType} iteration: ${duration.toFixed(0)}ms`, {\n query: shortQuery,\n params,\n duration,\n rows: rowCount,\n });\n }\n }\n return result;\n },\n };\n \n return wrappedIterator;\n };\n \n return statement;\n}\n\n/**\n * Update query statistics\n */\nfunction updateQueryStats(\n statement: Database.Statement,\n queryType: string,\n duration: number,\n slowQueryThreshold: number\n): void {\n const db = statement.database as any;\n if (db.__queryStats) {\n db.__queryStats.totalQueries++;\n db.__queryStats.totalDuration += duration;\n \n if (duration > slowQueryThreshold) {\n db.__queryStats.slowQueries++;\n }\n \n if (!db.__queryStats.queryTypes[queryType]) {\n db.__queryStats.queryTypes[queryType] = 0;\n }\n db.__queryStats.queryTypes[queryType]++;\n }\n}\n\n/**\n * Get query statistics from a traced database\n */\nexport function getQueryStatistics(db: Database.Database): {\n totalQueries: number;\n slowQueries: number;\n averageDuration: number;\n totalDuration: number;\n queryTypes: Record<string, number>;\n} | null {\n const stats = (db as any).__queryStats;\n if (!stats) return null;\n \n return {\n ...stats,\n averageDuration: stats.totalQueries > 0 \n ? stats.totalDuration / stats.totalQueries \n : 0,\n };\n}\n\n/**\n * Helper to trace a specific query with context\n */\nexport async function traceQuery<T>(\n db: Database.Database,\n queryName: string,\n query: string,\n params: any[],\n fn: () => T\n): Promise<T> {\n return trace.traceAsync('query', queryName, { query, params }, async () => {\n try {\n const result = fn();\n \n // Log successful complex queries for debugging\n if (query.includes('JOIN') || query.includes('GROUP BY')) {\n logger.debug(`Complex query executed: ${queryName}`, {\n query: query.substring(0, 200),\n params,\n });\n }\n \n return result;\n } catch (error: unknown) {\n // Enhanced error logging for database errors\n logger.error(`Database query failed: ${queryName}`, error as Error, {\n query,\n params,\n errorCode: (error as any).code,\n });\n throw error;\n }\n });\n}\n\n/**\n * Create a traced transaction with automatic rollback on error\n */\nexport function createTracedTransaction<T>(\n db: Database.Database,\n name: string,\n fn: (tx: Database.Transaction<(args: any) => T>) => T\n): T {\n return trace.traceSync('query', `TRANSACTION: ${name}`, {}, () => {\n const startTime = performance.now();\n \n try {\n const tx = db.transaction(fn);\n const result = (tx as any).deferred();\n \n const duration = performance.now() - startTime;\n logger.info(`Transaction completed: ${name}`, {\n duration,\n success: true,\n });\n \n return result;\n } catch (error: unknown) {\n const duration = performance.now() - startTime;\n logger.error(`Transaction failed: ${name}`, error as Error, {\n duration,\n success: false,\n });\n throw error;\n }\n });\n}"],
5
+ "mappings": "AAKA,OAAO,cAAc;AACrB,SAAS,aAAa;AACtB,SAAS,cAAc;AAUhB,SAAS,qBACd,UACA,SACmB;AACnB,QAAM,KAAK,IAAI,SAAS,UAAU,OAAO;AAEzC,MAAI,SAAS,iBAAiB,OAAO;AACnC,WAAO,aAAa,IAAI,SAAS,kBAAkB;AAAA,EACrD;AAEA,SAAO;AACT;AAKO,SAAS,aACd,IACA,qBAAqB,KACF;AAEnB,QAAM,kBAAkB,GAAG,QAAQ,KAAK,EAAE;AAE1C,KAAG,UAAU,SAAS,QAAgB;AACpC,UAAM,YAAY,gBAAgB,MAAM;AACxC,WAAO,cAAc,WAAW,QAAQ,kBAAkB;AAAA,EAC5D;AAGA,QAAM,eAAe,GAAG,KAAK,KAAK,EAAE;AAEpC,KAAG,OAAO,SAAS,QAAmC;AACpD,WAAO,MAAM,UAAU,SAAS,SAAS,OAAO,UAAU,GAAG,EAAE,CAAC,OAAO,CAAC,GAAG,MAAM;AAC/E,YAAM,YAAY,YAAY,IAAI;AAClC,YAAM,SAAS,aAAa,MAAM;AAClC,YAAM,WAAW,YAAY,IAAI,IAAI;AAErC,UAAI,WAAW,oBAAoB;AACjC,eAAO,KAAK,wBAAwB,SAAS,QAAQ,CAAC,CAAC,MAAM;AAAA,UAC3D,OAAO,OAAO,UAAU,GAAG,GAAG;AAAA,UAC9B;AAAA,QACF,CAAC;AAAA,MACH;AAEA,aAAO;AAAA,IACT,CAAC;AAAA,EACH;AAGA,QAAM,sBAAsB,GAAG,YAAY,KAAK,EAAE;AAElD,KAAG,cAAc,SAAS,IAAS;AACjC,WAAO,oBAAoB,YAAuB,MAAa;AAC7D,aAAO,MAAM,UAAU,SAAS,eAAe,EAAE,MAAM,KAAK,OAAO,GAAG,MAAM;AAC1E,eAAO,GAAG,MAAM,MAAM,IAAI;AAAA,MAC5B,CAAC;AAAA,IACH,CAAC;AAAA,EACH;AAGA,EAAC,GAAW,eAAe;AAAA,IACzB,cAAc;AAAA,IACd,aAAa;AAAA,IACb,eAAe;AAAA,IACf,YAAY,CAAC;AAAA,EACf;AAEA,SAAO;AACT;AAKA,SAAS,cACP,WACA,QACA,oBACuB;AACvB,QAAM,YAAY,OAAO,KAAK,EAAE,MAAM,KAAK,EAAE,CAAC,EAAE,YAAY;AAC5D,QAAM,aAAa,OAAO,UAAU,GAAG,GAAG,EAAE,QAAQ,QAAQ,GAAG;AAG/D,QAAM,cAAc,UAAU,IAAI,KAAK,SAAS;AAChD,YAAU,MAAM,YAAY,QAA+B;AACzD,WAAO,MAAM,UAAU,SAAS,GAAG,SAAS,KAAK,UAAU,IAAI,QAAQ,MAAM;AAC3E,YAAM,YAAY,YAAY,IAAI;AAClC,YAAM,SAAS,YAAY,GAAG,MAAM;AACpC,YAAM,WAAW,YAAY,IAAI,IAAI;AAGrC,uBAAiB,WAAW,WAAW,UAAU,kBAAkB;AAGnE,UAAI,WAAW,oBAAoB;AACjC,eAAO,KAAK,QAAQ,SAAS,WAAW,SAAS,QAAQ,CAAC,CAAC,MAAM;AAAA,UAC/D,OAAO;AAAA,UACP;AAAA,UACA;AAAA,UACA,SAAS,OAAO;AAAA,QAClB,CAAC;AAAA,MACH;AAEA,aAAO;AAAA,IACT,CAAC;AAAA,EACH;AAGA,QAAM,cAAc,UAAU,IAAI,KAAK,SAAS;AAChD,YAAU,MAAM,YAAY,QAAgB;AAC1C,WAAO,MAAM,UAAU,SAAS,GAAG,SAAS,WAAW,UAAU,IAAI,QAAQ,MAAM;AACjF,YAAM,YAAY,YAAY,IAAI;AAClC,YAAM,SAAS,YAAY,GAAG,MAAM;AACpC,YAAM,WAAW,YAAY,IAAI,IAAI;AAErC,uBAAiB,WAAW,WAAW,UAAU,kBAAkB;AAEnE,UAAI,WAAW,oBAAoB;AACjC,eAAO,KAAK,QAAQ,SAAS,WAAW,SAAS,QAAQ,CAAC,CAAC,MAAM;AAAA,UAC/D,OAAO;AAAA,UACP;AAAA,UACA;AAAA,UACA,OAAO,UAAU;AAAA,QACnB,CAAC;AAAA,MACH;AAEA,aAAO;AAAA,IACT,CAAC;AAAA,EACH;AAGA,QAAM,cAAc,UAAU,IAAI,KAAK,SAAS;AAChD,YAAU,MAAM,YAAY,QAAkB;AAC5C,WAAO,MAAM,UAAU,SAAS,GAAG,SAAS,WAAW,UAAU,IAAI,QAAQ,MAAM;AACjF,YAAM,YAAY,YAAY,IAAI;AAClC,YAAM,SAAS,YAAY,GAAG,MAAM;AACpC,YAAM,WAAW,YAAY,IAAI,IAAI;AAErC,uBAAiB,WAAW,WAAW,UAAU,kBAAkB;AAEnE,UAAI,WAAW,oBAAoB;AACjC,eAAO,KAAK,QAAQ,SAAS,WAAW,SAAS,QAAQ,CAAC,CAAC,MAAM;AAAA,UAC/D,OAAO;AAAA,UACP;AAAA,UACA;AAAA,UACA,MAAM,OAAO;AAAA,QACf,CAAC;AAAA,MACH;AAGA,UAAI,OAAO,SAAS,OAAO,cAAc,UAAU;AACjD,eAAO,KAAK,qBAAqB,OAAO,MAAM,SAAS;AAAA,UACrD,OAAO;AAAA,UACP,YAAY;AAAA,QACd,CAAC;AAAA,MACH;AAEA,aAAO;AAAA,IACT,CAAC;AAAA,EACH;AAGA,QAAM,kBAAkB,UAAU,QAAQ,KAAK,SAAS;AACxD,YAAU,UAAU,YAAY,QAAkC;AAChE,UAAM,YAAY,YAAY,IAAI;AAClC,QAAI,WAAW;AAEf,UAAM,WAAW,gBAAgB,GAAG,MAAM;AAC1C,UAAM,kBAAyC;AAAA,MAC7C,CAAC,OAAO,QAAQ,IAAI;AAClB,eAAO;AAAA,MACT;AAAA,MACA,OAAO;AACL,cAAM,SAAS,SAAS,KAAK;AAC7B,YAAI,CAAC,OAAO,MAAM;AAChB;AAAA,QACF,OAAO;AACL,gBAAM,WAAW,YAAY,IAAI,IAAI;AACrC,2BAAiB,WAAW,WAAW,UAAU,kBAAkB;AAEnE,cAAI,WAAW,oBAAoB;AACjC,mBAAO,KAAK,QAAQ,SAAS,eAAe,SAAS,QAAQ,CAAC,CAAC,MAAM;AAAA,cACnE,OAAO;AAAA,cACP;AAAA,cACA;AAAA,cACA,MAAM;AAAA,YACR,CAAC;AAAA,UACH;AAAA,QACF;AACA,eAAO;AAAA,MACT;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAEA,SAAO;AACT;AAKA,SAAS,iBACP,WACA,WACA,UACA,oBACM;AACN,QAAM,KAAK,UAAU;AACrB,MAAI,GAAG,cAAc;AACnB,OAAG,aAAa;AAChB,OAAG,aAAa,iBAAiB;AAEjC,QAAI,WAAW,oBAAoB;AACjC,SAAG,aAAa;AAAA,IAClB;AAEA,QAAI,CAAC,GAAG,aAAa,WAAW,SAAS,GAAG;AAC1C,SAAG,aAAa,WAAW,SAAS,IAAI;AAAA,IAC1C;AACA,OAAG,aAAa,WAAW,SAAS;AAAA,EACtC;AACF;AAKO,SAAS,mBAAmB,IAM1B;AACP,QAAM,QAAS,GAAW;AAC1B,MAAI,CAAC,MAAO,QAAO;AAEnB,SAAO;AAAA,IACL,GAAG;AAAA,IACH,iBAAiB,MAAM,eAAe,IAClC,MAAM,gBAAgB,MAAM,eAC5B;AAAA,EACN;AACF;AAKA,eAAsB,WACpB,IACA,WACA,OACA,QACA,IACY;AACZ,SAAO,MAAM,WAAW,SAAS,WAAW,EAAE,OAAO,OAAO,GAAG,YAAY;AACzE,QAAI;AACF,YAAM,SAAS,GAAG;AAGlB,UAAI,MAAM,SAAS,MAAM,KAAK,MAAM,SAAS,UAAU,GAAG;AACxD,eAAO,MAAM,2BAA2B,SAAS,IAAI;AAAA,UACnD,OAAO,MAAM,UAAU,GAAG,GAAG;AAAA,UAC7B;AAAA,QACF,CAAC;AAAA,MACH;AAEA,aAAO;AAAA,IACT,SAAS,OAAgB;AAEvB,aAAO,MAAM,0BAA0B,SAAS,IAAI,OAAgB;AAAA,QAClE;AAAA,QACA;AAAA,QACA,WAAY,MAAc;AAAA,MAC5B,CAAC;AACD,YAAM;AAAA,IACR;AAAA,EACF,CAAC;AACH;AAKO,SAAS,wBACd,IACA,MACA,IACG;AACH,SAAO,MAAM,UAAU,SAAS,gBAAgB,IAAI,IAAI,CAAC,GAAG,MAAM;AAChE,UAAM,YAAY,YAAY,IAAI;AAElC,QAAI;AACF,YAAM,KAAK,GAAG,YAAY,EAAE;AAC5B,YAAM,SAAU,GAAW,SAAS;AAEpC,YAAM,WAAW,YAAY,IAAI,IAAI;AACrC,aAAO,KAAK,0BAA0B,IAAI,IAAI;AAAA,QAC5C;AAAA,QACA,SAAS;AAAA,MACX,CAAC;AAED,aAAO;AAAA,IACT,SAAS,OAAgB;AACvB,YAAM,WAAW,YAAY,IAAI,IAAI;AACrC,aAAO,MAAM,uBAAuB,IAAI,IAAI,OAAgB;AAAA,QAC1D;AAAA,QACA,SAAS;AAAA,MACX,CAAC;AACD,YAAM;AAAA,IACR;AAAA,EACF,CAAC;AACH;",
6
6
  "names": []
7
7
  }
@@ -3,6 +3,17 @@ import { logger } from "../monitoring/logger.js";
3
3
  import * as fs from "fs";
4
4
  import * as path from "path";
5
5
  import { v4 as uuidv4 } from "uuid";
6
+ function getEnv(key, defaultValue) {
7
+ const value = process.env[key];
8
+ if (value === void 0) {
9
+ if (defaultValue !== void 0) return defaultValue;
10
+ throw new Error(`Environment variable ${key} is required`);
11
+ }
12
+ return value;
13
+ }
14
+ function getOptionalEnv(key) {
15
+ return process.env[key];
16
+ }
6
17
  class TraceContext {
7
18
  static instance;
8
19
  config;
@@ -35,19 +46,19 @@ class TraceContext {
35
46
  }
36
47
  loadConfig() {
37
48
  return {
38
- enabled: process.env.DEBUG_TRACE === "true" || process.env.STACKMEMORY_DEBUG === "true",
39
- verbosity: process.env.TRACE_VERBOSITY || "full",
40
- output: process.env.TRACE_OUTPUT || "console",
41
- includeParams: process.env.TRACE_PARAMS !== "false",
42
- includeResults: process.env.TRACE_RESULTS !== "false",
43
- maskSensitive: process.env.TRACE_MASK_SENSITIVE !== "false",
44
- performanceThreshold: parseInt(process.env.TRACE_PERF_THRESHOLD || "100"),
45
- maxDepth: parseInt(process.env.TRACE_MAX_DEPTH || "20"),
46
- captureMemory: process.env.TRACE_MEMORY === "true"
49
+ enabled: process.env["DEBUG_TRACE"] === "true" || process.env["STACKMEMORY_DEBUG"] === "true",
50
+ verbosity: process.env["TRACE_VERBOSITY"] || "full",
51
+ output: process.env["TRACE_OUTPUT"] || "console",
52
+ includeParams: process.env["TRACE_PARAMS"] !== "false",
53
+ includeResults: process.env["TRACE_RESULTS"] !== "false",
54
+ maskSensitive: process.env["TRACE_MASK_SENSITIVE"] !== "false",
55
+ performanceThreshold: parseInt(process.env["TRACE_PERF_THRESHOLD"] || "100"),
56
+ maxDepth: parseInt(process.env["TRACE_MAX_DEPTH"] || "20"),
57
+ captureMemory: process.env["TRACE_MEMORY"] === "true"
47
58
  };
48
59
  }
49
60
  initializeOutputFile() {
50
- const traceDir = path.join(process.env.HOME || ".", ".stackmemory", "traces");
61
+ const traceDir = path.join(process.env["HOME"] || ".", ".stackmemory", "traces");
51
62
  if (!fs.existsSync(traceDir)) {
52
63
  fs.mkdirSync(traceDir, { recursive: true });
53
64
  }