@stackmemoryai/stackmemory 0.5.22 → 0.5.24
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cli/claude-sm.js +2 -0
- package/dist/cli/claude-sm.js.map +2 -2
- package/dist/cli/commands/discovery.js +279 -0
- package/dist/cli/commands/discovery.js.map +7 -0
- package/dist/cli/commands/retrieval.js +248 -0
- package/dist/cli/commands/retrieval.js.map +7 -0
- package/dist/cli/index.js +4 -0
- package/dist/cli/index.js.map +2 -2
- package/dist/core/retrieval/llm-context-retrieval.js +33 -1
- package/dist/core/retrieval/llm-context-retrieval.js.map +2 -2
- package/dist/core/retrieval/llm-provider.js +128 -0
- package/dist/core/retrieval/llm-provider.js.map +7 -0
- package/dist/core/retrieval/retrieval-audit.js +236 -0
- package/dist/core/retrieval/retrieval-audit.js.map +7 -0
- package/dist/integrations/linear/client.js +125 -0
- package/dist/integrations/linear/client.js.map +2 -2
- package/dist/integrations/mcp/handlers/discovery-handlers.js +497 -0
- package/dist/integrations/mcp/handlers/discovery-handlers.js.map +7 -0
- package/dist/integrations/mcp/handlers/index.js +40 -12
- package/dist/integrations/mcp/handlers/index.js.map +2 -2
- package/dist/integrations/mcp/server.js +270 -0
- package/dist/integrations/mcp/server.js.map +2 -2
- package/dist/integrations/mcp/tool-definitions.js +141 -5
- package/dist/integrations/mcp/tool-definitions.js.map +2 -2
- package/package.json +1 -1
- package/dist/cli/commands/agent.js +0 -286
- package/dist/cli/commands/agent.js.map +0 -7
- package/dist/cli/commands/chromadb.js +0 -482
- package/dist/cli/commands/chromadb.js.map +0 -7
- package/dist/cli/commands/gc.js +0 -251
- package/dist/cli/commands/gc.js.map +0 -7
- package/dist/cli/commands/infinite-storage.js +0 -292
- package/dist/cli/commands/infinite-storage.js.map +0 -7
- package/dist/cli/commands/linear-create.js +0 -171
- package/dist/cli/commands/linear-create.js.map +0 -7
- package/dist/cli/commands/linear-list.js +0 -103
- package/dist/cli/commands/linear-list.js.map +0 -7
- package/dist/cli/commands/linear-migrate.js +0 -64
- package/dist/cli/commands/linear-migrate.js.map +0 -7
- package/dist/cli/commands/linear-test.js +0 -134
- package/dist/cli/commands/linear-test.js.map +0 -7
- package/dist/cli/commands/tui.js +0 -77
- package/dist/cli/commands/tui.js.map +0 -7
- package/dist/cli/commands/webhook.js +0 -181
- package/dist/cli/commands/webhook.js.map +0 -7
- package/dist/cli/streamlined-cli.js +0 -144
- package/dist/cli/streamlined-cli.js.map +0 -7
- package/dist/core/events/event-bus.js +0 -110
- package/dist/core/events/event-bus.js.map +0 -7
- package/dist/core/frame/workflow-templates-stub.js +0 -42
- package/dist/core/frame/workflow-templates-stub.js.map +0 -7
- package/dist/core/plugins/plugin-interface.js +0 -87
- package/dist/core/plugins/plugin-interface.js.map +0 -7
- package/dist/core/session/clear-survival-stub.js +0 -53
- package/dist/core/session/clear-survival-stub.js.map +0 -7
- package/dist/core/storage/chromadb-simple.js +0 -172
- package/dist/core/storage/chromadb-simple.js.map +0 -7
- package/dist/core/storage/simplified-storage.js +0 -328
- package/dist/core/storage/simplified-storage.js.map +0 -7
- package/dist/features/tasks/pebbles-task-store.js +0 -647
- package/dist/features/tasks/pebbles-task-store.js.map +0 -7
- package/dist/integrations/linear/sync-enhanced.js +0 -202
- package/dist/integrations/linear/sync-enhanced.js.map +0 -7
- package/dist/plugins/linear/index.js +0 -166
- package/dist/plugins/linear/index.js.map +0 -7
- package/dist/plugins/loader.js +0 -57
- package/dist/plugins/loader.js.map +0 -7
- package/dist/plugins/plugin-interface.js +0 -67
- package/dist/plugins/plugin-interface.js.map +0 -7
- package/dist/plugins/ralph/simple-ralph-plugin.js +0 -305
- package/dist/plugins/ralph/simple-ralph-plugin.js.map +0 -7
- package/dist/plugins/ralph/use-cases/code-generator.js +0 -151
- package/dist/plugins/ralph/use-cases/code-generator.js.map +0 -7
- package/dist/plugins/ralph/use-cases/test-generator.js +0 -201
- package/dist/plugins/ralph/use-cases/test-generator.js.map +0 -7
- package/dist/utils/logger.js +0 -52
- package/dist/utils/logger.js.map +0 -7
|
@@ -1,7 +0,0 @@
|
|
|
1
|
-
{
|
|
2
|
-
"version": 3,
|
|
3
|
-
"sources": ["../../../src/core/plugins/plugin-interface.ts"],
|
|
4
|
-
"sourcesContent": ["/**\n * Plugin Interface - Defines contract for all StackMemory plugins\n * Enables modular architecture with clear boundaries\n */\n\nimport { EventBus } from '../events/event-bus.js';\n\nexport interface PluginConfig {\n name: string;\n version: string;\n enabled: boolean;\n options?: Record<string, any>;\n}\n\nexport interface PluginContext {\n eventBus: EventBus;\n config: PluginConfig;\n dataDir: string;\n getRepository<T>(name: string): T;\n registerRepository(name: string, repository: any): void;\n}\n\nexport interface Plugin {\n readonly name: string;\n readonly version: string;\n readonly description: string;\n readonly dependencies?: string[];\n\n initialize(context: PluginContext): Promise<void>;\n start(): Promise<void>;\n stop(): Promise<void>;\n shutdown(): Promise<void>;\n \n getStatus(): PluginStatus;\n getMetrics(): PluginMetrics;\n validateConfig(config: PluginConfig): ValidationResult;\n}\n\nexport interface PluginStatus {\n state: 'uninitialized' | 'initialized' | 'starting' | 'running' | 'stopping' | 'stopped' | 'error';\n message?: string;\n lastActivity?: number;\n error?: Error;\n}\n\nexport interface PluginMetrics {\n startTime?: number;\n uptime?: number;\n eventsEmitted: number;\n eventsReceived: number;\n errors: number;\n custom?: Record<string, number>;\n}\n\nexport interface ValidationResult {\n valid: boolean;\n errors?: string[];\n warnings?: string[];\n}\n\nexport abstract class BasePlugin implements Plugin {\n abstract readonly name: string;\n abstract readonly version: string;\n abstract readonly description: string;\n readonly dependencies?: string[];\n\n protected context?: PluginContext;\n protected status: PluginStatus = { state: 'uninitialized' };\n protected metrics: PluginMetrics = {\n eventsEmitted: 0,\n eventsReceived: 0,\n errors: 0\n };\n protected startTime?: number;\n\n async initialize(context: PluginContext): Promise<void> {\n this.context = context;\n this.status = { state: 'initialized' };\n await this.onInitialize();\n }\n\n async start(): Promise<void> {\n this.status = { state: 'starting' };\n this.startTime = Date.now();\n await this.onStart();\n this.status = { state: 'running', lastActivity: Date.now() };\n }\n\n async stop(): Promise<void> {\n this.status = { state: 'stopping' };\n await this.onStop();\n this.status = { state: 'stopped' };\n }\n\n async shutdown(): Promise<void> {\n if (this.status.state === 'running') {\n await this.stop();\n }\n await this.onShutdown();\n this.status = { state: 'uninitialized' };\n }\n\n getStatus(): PluginStatus {\n return { ...this.status };\n }\n\n getMetrics(): PluginMetrics {\n return {\n ...this.metrics,\n startTime: this.startTime,\n uptime: this.startTime ? Date.now() - this.startTime : 0\n };\n }\n\n validateConfig(config: PluginConfig): ValidationResult {\n const errors: string[] = [];\n const warnings: string[] = [];\n\n if (config.name !== this.name) {\n errors.push(`Config name '${config.name}' doesn't match plugin name '${this.name}'`);\n }\n\n const customValidation = this.onValidateConfig(config);\n if (customValidation.errors) errors.push(...customValidation.errors);\n if (customValidation.warnings) warnings.push(...customValidation.warnings);\n\n return {\n valid: errors.length === 0,\n errors: errors.length > 0 ? errors : undefined,\n warnings: warnings.length > 0 ? warnings : undefined\n };\n }\n\n protected emit(eventType: string, data: Record<string, any>): void {\n if (!this.context) throw new Error('Plugin not initialized');\n \n this.context.eventBus.emit({\n type: eventType,\n source: this.name,\n data\n });\n this.metrics.eventsEmitted++;\n this.status.lastActivity = Date.now();\n }\n\n protected on(eventType: string, handler: (event: any) => void | Promise<void>): void {\n if (!this.context) throw new Error('Plugin not initialized');\n \n this.context.eventBus.on(eventType, async (event) => {\n this.metrics.eventsReceived++;\n this.status.lastActivity = Date.now();\n try {\n await handler(event);\n } catch (error) {\n this.metrics.errors++;\n this.status.error = error as Error;\n console.error(`Plugin ${this.name} error handling ${eventType}:`, error);\n }\n });\n }\n\n // Hooks for subclasses\n protected abstract onInitialize(): Promise<void>;\n protected abstract onStart(): Promise<void>;\n protected abstract onStop(): Promise<void>;\n protected abstract onShutdown(): Promise<void>;\n protected abstract onValidateConfig(config: PluginConfig): ValidationResult;\n}"],
|
|
5
|
-
"mappings": "AA4DO,MAAe,WAA6B;AAAA,EAIxC;AAAA,EAEC;AAAA,EACA,SAAuB,EAAE,OAAO,gBAAgB;AAAA,EAChD,UAAyB;AAAA,IACjC,eAAe;AAAA,IACf,gBAAgB;AAAA,IAChB,QAAQ;AAAA,EACV;AAAA,EACU;AAAA,EAEV,MAAM,WAAW,SAAuC;AACtD,SAAK,UAAU;AACf,SAAK,SAAS,EAAE,OAAO,cAAc;AACrC,UAAM,KAAK,aAAa;AAAA,EAC1B;AAAA,EAEA,MAAM,QAAuB;AAC3B,SAAK,SAAS,EAAE,OAAO,WAAW;AAClC,SAAK,YAAY,KAAK,IAAI;AAC1B,UAAM,KAAK,QAAQ;AACnB,SAAK,SAAS,EAAE,OAAO,WAAW,cAAc,KAAK,IAAI,EAAE;AAAA,EAC7D;AAAA,EAEA,MAAM,OAAsB;AAC1B,SAAK,SAAS,EAAE,OAAO,WAAW;AAClC,UAAM,KAAK,OAAO;AAClB,SAAK,SAAS,EAAE,OAAO,UAAU;AAAA,EACnC;AAAA,EAEA,MAAM,WAA0B;AAC9B,QAAI,KAAK,OAAO,UAAU,WAAW;AACnC,YAAM,KAAK,KAAK;AAAA,IAClB;AACA,UAAM,KAAK,WAAW;AACtB,SAAK,SAAS,EAAE,OAAO,gBAAgB;AAAA,EACzC;AAAA,EAEA,YAA0B;AACxB,WAAO,EAAE,GAAG,KAAK,OAAO;AAAA,EAC1B;AAAA,EAEA,aAA4B;AAC1B,WAAO;AAAA,MACL,GAAG,KAAK;AAAA,MACR,WAAW,KAAK;AAAA,MAChB,QAAQ,KAAK,YAAY,KAAK,IAAI,IAAI,KAAK,YAAY;AAAA,IACzD;AAAA,EACF;AAAA,EAEA,eAAe,QAAwC;AACrD,UAAM,SAAmB,CAAC;AAC1B,UAAM,WAAqB,CAAC;AAE5B,QAAI,OAAO,SAAS,KAAK,MAAM;AAC7B,aAAO,KAAK,gBAAgB,OAAO,IAAI,gCAAgC,KAAK,IAAI,GAAG;AAAA,IACrF;AAEA,UAAM,mBAAmB,KAAK,iBAAiB,MAAM;AACrD,QAAI,iBAAiB,OAAQ,QAAO,KAAK,GAAG,iBAAiB,MAAM;AACnE,QAAI,iBAAiB,SAAU,UAAS,KAAK,GAAG,iBAAiB,QAAQ;AAEzE,WAAO;AAAA,MACL,OAAO,OAAO,WAAW;AAAA,MACzB,QAAQ,OAAO,SAAS,IAAI,SAAS;AAAA,MACrC,UAAU,SAAS,SAAS,IAAI,WAAW;AAAA,IAC7C;AAAA,EACF;AAAA,EAEU,KAAK,WAAmB,MAAiC;AACjE,QAAI,CAAC,KAAK,QAAS,OAAM,IAAI,MAAM,wBAAwB;AAE3D,SAAK,QAAQ,SAAS,KAAK;AAAA,MACzB,MAAM;AAAA,MACN,QAAQ,KAAK;AAAA,MACb;AAAA,IACF,CAAC;AACD,SAAK,QAAQ;AACb,SAAK,OAAO,eAAe,KAAK,IAAI;AAAA,EACtC;AAAA,EAEU,GAAG,WAAmB,SAAqD;AACnF,QAAI,CAAC,KAAK,QAAS,OAAM,IAAI,MAAM,wBAAwB;AAE3D,SAAK,QAAQ,SAAS,GAAG,WAAW,OAAO,UAAU;AACnD,WAAK,QAAQ;AACb,WAAK,OAAO,eAAe,KAAK,IAAI;AACpC,UAAI;AACF,cAAM,QAAQ,KAAK;AAAA,MACrB,SAAS,OAAO;AACd,aAAK,QAAQ;AACb,aAAK,OAAO,QAAQ;AACpB,gBAAQ,MAAM,UAAU,KAAK,IAAI,mBAAmB,SAAS,KAAK,KAAK;AAAA,MACzE;AAAA,IACF,CAAC;AAAA,EACH;AAQF;",
|
|
6
|
-
"names": []
|
|
7
|
-
}
|
|
@@ -1,53 +0,0 @@
|
|
|
1
|
-
import * as path from "path";
|
|
2
|
-
import * as fs from "fs/promises";
|
|
3
|
-
class ClearSurvival {
|
|
4
|
-
constructor(frameManager, handoffGenerator, projectRoot) {
|
|
5
|
-
this.frameManager = frameManager;
|
|
6
|
-
this.handoffGenerator = handoffGenerator;
|
|
7
|
-
this.projectRoot = projectRoot;
|
|
8
|
-
}
|
|
9
|
-
async getContextUsage() {
|
|
10
|
-
return {
|
|
11
|
-
totalFrames: 50,
|
|
12
|
-
activeFrames: 3,
|
|
13
|
-
sessionCount: 2,
|
|
14
|
-
percentageUsed: 25
|
|
15
|
-
};
|
|
16
|
-
}
|
|
17
|
-
assessContextStatus(usage) {
|
|
18
|
-
if (usage.percentageUsed < 50) return "healthy";
|
|
19
|
-
if (usage.percentageUsed < 70) return "moderate";
|
|
20
|
-
if (usage.percentageUsed < 85) return "critical";
|
|
21
|
-
return "saved";
|
|
22
|
-
}
|
|
23
|
-
async saveContinuityLedger() {
|
|
24
|
-
const ledgerPath = path.join(
|
|
25
|
-
this.projectRoot,
|
|
26
|
-
".stackmemory",
|
|
27
|
-
"continuity.json"
|
|
28
|
-
);
|
|
29
|
-
const ledger = {
|
|
30
|
-
timestamp: (/* @__PURE__ */ new Date()).toISOString(),
|
|
31
|
-
activeFrames: [],
|
|
32
|
-
decisions: [],
|
|
33
|
-
context: {
|
|
34
|
-
importantTasks: []
|
|
35
|
-
}
|
|
36
|
-
};
|
|
37
|
-
await fs.mkdir(path.dirname(ledgerPath), { recursive: true });
|
|
38
|
-
await fs.writeFile(ledgerPath, JSON.stringify(ledger, null, 2));
|
|
39
|
-
return ledgerPath;
|
|
40
|
-
}
|
|
41
|
-
async restoreFromLedger() {
|
|
42
|
-
return {
|
|
43
|
-
success: true,
|
|
44
|
-
message: "Restored from ledger",
|
|
45
|
-
restoredFrames: 2,
|
|
46
|
-
restoredDecisions: 1
|
|
47
|
-
};
|
|
48
|
-
}
|
|
49
|
-
}
|
|
50
|
-
export {
|
|
51
|
-
ClearSurvival
|
|
52
|
-
};
|
|
53
|
-
//# sourceMappingURL=clear-survival-stub.js.map
|
|
@@ -1,7 +0,0 @@
|
|
|
1
|
-
{
|
|
2
|
-
"version": 3,
|
|
3
|
-
"sources": ["../../../src/core/session/clear-survival-stub.ts"],
|
|
4
|
-
"sourcesContent": ["/**\n * Stub implementation of ClearSurvival for testing\n */\nimport * as path from 'path';\nimport * as fs from 'fs/promises';\n\nexport interface ContextUsage {\n totalFrames: number;\n activeFrames: number;\n sessionCount: number;\n percentageUsed: number;\n}\n\nexport class ClearSurvival {\n constructor(\n private frameManager: any,\n private handoffGenerator: any,\n private projectRoot: string\n ) {}\n\n async getContextUsage(): Promise<ContextUsage> {\n // Return mock usage data\n return {\n totalFrames: 50,\n activeFrames: 3,\n sessionCount: 2,\n percentageUsed: 25,\n };\n }\n\n assessContextStatus(usage: ContextUsage): string {\n if (usage.percentageUsed < 50) return 'healthy';\n if (usage.percentageUsed < 70) return 'moderate';\n if (usage.percentageUsed < 85) return 'critical';\n return 'saved';\n }\n\n async saveContinuityLedger(): Promise<string> {\n const ledgerPath = path.join(\n this.projectRoot,\n '.stackmemory',\n 'continuity.json'\n );\n const ledger = {\n timestamp: new Date().toISOString(),\n activeFrames: [],\n decisions: [],\n context: {\n importantTasks: [],\n },\n };\n\n await fs.mkdir(path.dirname(ledgerPath), { recursive: true });\n await fs.writeFile(ledgerPath, JSON.stringify(ledger, null, 2));\n return ledgerPath;\n }\n\n async restoreFromLedger(): Promise<{\n success: boolean;\n message: string;\n restoredFrames: number;\n restoredDecisions: number;\n }> {\n return {\n success: true,\n message: 'Restored from ledger',\n restoredFrames: 2,\n restoredDecisions: 1,\n };\n }\n}\n"],
|
|
5
|
-
"mappings": "AAGA,YAAY,UAAU;AACtB,YAAY,QAAQ;AASb,MAAM,cAAc;AAAA,EACzB,YACU,cACA,kBACA,aACR;AAHQ;AACA;AACA;AAAA,EACP;AAAA,EAEH,MAAM,kBAAyC;AAE7C,WAAO;AAAA,MACL,aAAa;AAAA,MACb,cAAc;AAAA,MACd,cAAc;AAAA,MACd,gBAAgB;AAAA,IAClB;AAAA,EACF;AAAA,EAEA,oBAAoB,OAA6B;AAC/C,QAAI,MAAM,iBAAiB,GAAI,QAAO;AACtC,QAAI,MAAM,iBAAiB,GAAI,QAAO;AACtC,QAAI,MAAM,iBAAiB,GAAI,QAAO;AACtC,WAAO;AAAA,EACT;AAAA,EAEA,MAAM,uBAAwC;AAC5C,UAAM,aAAa,KAAK;AAAA,MACtB,KAAK;AAAA,MACL;AAAA,MACA;AAAA,IACF;AACA,UAAM,SAAS;AAAA,MACb,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,MAClC,cAAc,CAAC;AAAA,MACf,WAAW,CAAC;AAAA,MACZ,SAAS;AAAA,QACP,gBAAgB,CAAC;AAAA,MACnB;AAAA,IACF;AAEA,UAAM,GAAG,MAAM,KAAK,QAAQ,UAAU,GAAG,EAAE,WAAW,KAAK,CAAC;AAC5D,UAAM,GAAG,UAAU,YAAY,KAAK,UAAU,QAAQ,MAAM,CAAC,CAAC;AAC9D,WAAO;AAAA,EACT;AAAA,EAEA,MAAM,oBAKH;AACD,WAAO;AAAA,MACL,SAAS;AAAA,MACT,SAAS;AAAA,MACT,gBAAgB;AAAA,MAChB,mBAAmB;AAAA,IACrB;AAAA,EACF;AACF;",
|
|
6
|
-
"names": []
|
|
7
|
-
}
|
|
@@ -1,172 +0,0 @@
|
|
|
1
|
-
import { ChromaClient } from "chromadb";
|
|
2
|
-
import { v4 as uuidv4 } from "uuid";
|
|
3
|
-
class ChromaDBAdapter {
|
|
4
|
-
client;
|
|
5
|
-
collection = null;
|
|
6
|
-
config;
|
|
7
|
-
fallbackStorage = /* @__PURE__ */ new Map();
|
|
8
|
-
constructor(config) {
|
|
9
|
-
this.config = config;
|
|
10
|
-
try {
|
|
11
|
-
if (config.apiKey && (config.apiUrl?.includes("trychroma.com") || config.apiKey.startsWith("ck-"))) {
|
|
12
|
-
const tenant = process.env.CHROMADB_TENANT;
|
|
13
|
-
const database = process.env.CHROMADB_DATABASE || "stackmemory";
|
|
14
|
-
this.client = new ChromaClient({
|
|
15
|
-
ssl: true,
|
|
16
|
-
host: "api.trychroma.com",
|
|
17
|
-
port: 443,
|
|
18
|
-
headers: {
|
|
19
|
-
"X-Chroma-Token": config.apiKey
|
|
20
|
-
},
|
|
21
|
-
tenant,
|
|
22
|
-
database
|
|
23
|
-
});
|
|
24
|
-
} else {
|
|
25
|
-
this.client = new ChromaClient();
|
|
26
|
-
}
|
|
27
|
-
} catch (error) {
|
|
28
|
-
console.log("Using in-memory ChromaDB client");
|
|
29
|
-
this.client = new ChromaClient();
|
|
30
|
-
}
|
|
31
|
-
}
|
|
32
|
-
async initialize() {
|
|
33
|
-
try {
|
|
34
|
-
const collectionName = this.config.collectionName || "stackmemory_context";
|
|
35
|
-
this.collection = await this.client.getOrCreateCollection({
|
|
36
|
-
name: collectionName,
|
|
37
|
-
metadata: {
|
|
38
|
-
description: "StackMemory Claude context",
|
|
39
|
-
version: "2.0.0"
|
|
40
|
-
}
|
|
41
|
-
});
|
|
42
|
-
console.log(
|
|
43
|
-
`[${(/* @__PURE__ */ new Date()).toISOString()}] INFO: ChromaDB collection '${collectionName}' initialized`
|
|
44
|
-
);
|
|
45
|
-
} catch (error) {
|
|
46
|
-
console.log(
|
|
47
|
-
`ChromaDB service not available, using JSON fallback storage`
|
|
48
|
-
);
|
|
49
|
-
this.collection = null;
|
|
50
|
-
}
|
|
51
|
-
}
|
|
52
|
-
async store(context) {
|
|
53
|
-
const id = context.id || `ctx_${uuidv4()}`;
|
|
54
|
-
const metadata = {
|
|
55
|
-
timestamp: context.timestamp || (/* @__PURE__ */ new Date()).toISOString(),
|
|
56
|
-
type: context.type || "context",
|
|
57
|
-
user_id: context.user_id || this.config.userId || "default",
|
|
58
|
-
project: context.project || "stackmemory"
|
|
59
|
-
};
|
|
60
|
-
if (context.session_id) metadata.session_id = context.session_id;
|
|
61
|
-
if (context.metadata) {
|
|
62
|
-
Object.entries(context.metadata).forEach(([key, value]) => {
|
|
63
|
-
if (value !== void 0 && value !== null) {
|
|
64
|
-
metadata[key] = value;
|
|
65
|
-
}
|
|
66
|
-
});
|
|
67
|
-
}
|
|
68
|
-
if (!this.collection) {
|
|
69
|
-
await this.initialize();
|
|
70
|
-
}
|
|
71
|
-
try {
|
|
72
|
-
if (this.collection) {
|
|
73
|
-
await this.collection.upsert({
|
|
74
|
-
ids: [id],
|
|
75
|
-
documents: [context.content || JSON.stringify(context)],
|
|
76
|
-
metadatas: [metadata]
|
|
77
|
-
});
|
|
78
|
-
} else {
|
|
79
|
-
await this.storeToJsonFallback(id, context, metadata);
|
|
80
|
-
}
|
|
81
|
-
return {
|
|
82
|
-
success: true,
|
|
83
|
-
id,
|
|
84
|
-
stored_at: (/* @__PURE__ */ new Date()).toISOString()
|
|
85
|
-
};
|
|
86
|
-
} catch (error) {
|
|
87
|
-
console.error("Failed to store context:", error.message);
|
|
88
|
-
try {
|
|
89
|
-
await this.storeToJsonFallback(id, context, metadata);
|
|
90
|
-
return { success: true, id, stored_at: (/* @__PURE__ */ new Date()).toISOString() };
|
|
91
|
-
} catch (fallbackError) {
|
|
92
|
-
return { success: false, error: fallbackError.message };
|
|
93
|
-
}
|
|
94
|
-
}
|
|
95
|
-
}
|
|
96
|
-
async storeToJsonFallback(id, context, metadata) {
|
|
97
|
-
const fs = await import("fs");
|
|
98
|
-
const path = await import("path");
|
|
99
|
-
const os = await import("os");
|
|
100
|
-
const storageDir = path.join(
|
|
101
|
-
os.homedir(),
|
|
102
|
-
".stackmemory",
|
|
103
|
-
"context-storage"
|
|
104
|
-
);
|
|
105
|
-
const storageFile = path.join(storageDir, "contexts.jsonl");
|
|
106
|
-
if (!fs.existsSync(storageDir)) {
|
|
107
|
-
fs.mkdirSync(storageDir, { recursive: true });
|
|
108
|
-
}
|
|
109
|
-
const entry = {
|
|
110
|
-
id,
|
|
111
|
-
content: context.content || JSON.stringify(context),
|
|
112
|
-
metadata,
|
|
113
|
-
stored_at: (/* @__PURE__ */ new Date()).toISOString()
|
|
114
|
-
};
|
|
115
|
-
fs.appendFileSync(storageFile, JSON.stringify(entry) + "\n");
|
|
116
|
-
}
|
|
117
|
-
async search(params) {
|
|
118
|
-
if (!this.collection) {
|
|
119
|
-
await this.initialize();
|
|
120
|
-
}
|
|
121
|
-
try {
|
|
122
|
-
const query = params.query || "";
|
|
123
|
-
const limit = params.limit || 10;
|
|
124
|
-
const where = {};
|
|
125
|
-
if (params.filter) {
|
|
126
|
-
Object.entries(params.filter).forEach(([key, value]) => {
|
|
127
|
-
if (value !== void 0 && value !== null) {
|
|
128
|
-
where[key] = value;
|
|
129
|
-
}
|
|
130
|
-
});
|
|
131
|
-
}
|
|
132
|
-
const results = await this.collection.query({
|
|
133
|
-
queryTexts: [query],
|
|
134
|
-
nResults: limit,
|
|
135
|
-
where: Object.keys(where).length > 0 ? where : void 0
|
|
136
|
-
});
|
|
137
|
-
const contexts = [];
|
|
138
|
-
if (results.documents && results.documents[0]) {
|
|
139
|
-
for (let i = 0; i < results.documents[0].length; i++) {
|
|
140
|
-
contexts.push({
|
|
141
|
-
id: results.ids[0][i],
|
|
142
|
-
content: results.documents[0][i],
|
|
143
|
-
metadata: results.metadatas?.[0]?.[i] || {},
|
|
144
|
-
distance: results.distances?.[0]?.[i] || 0
|
|
145
|
-
});
|
|
146
|
-
}
|
|
147
|
-
}
|
|
148
|
-
return contexts;
|
|
149
|
-
} catch (error) {
|
|
150
|
-
console.error("Failed to search contexts:", error.message);
|
|
151
|
-
return [];
|
|
152
|
-
}
|
|
153
|
-
}
|
|
154
|
-
async deleteCollection() {
|
|
155
|
-
if (this.collection) {
|
|
156
|
-
await this.client.deleteCollection({
|
|
157
|
-
name: this.config.collectionName || "stackmemory_context"
|
|
158
|
-
});
|
|
159
|
-
console.log("Collection deleted");
|
|
160
|
-
}
|
|
161
|
-
}
|
|
162
|
-
async listCollections() {
|
|
163
|
-
const collections = await this.client.listCollections();
|
|
164
|
-
return collections;
|
|
165
|
-
}
|
|
166
|
-
}
|
|
167
|
-
var chromadb_simple_default = ChromaDBAdapter;
|
|
168
|
-
export {
|
|
169
|
-
ChromaDBAdapter,
|
|
170
|
-
chromadb_simple_default as default
|
|
171
|
-
};
|
|
172
|
-
//# sourceMappingURL=chromadb-simple.js.map
|
|
@@ -1,7 +0,0 @@
|
|
|
1
|
-
{
|
|
2
|
-
"version": 3,
|
|
3
|
-
"sources": ["../../../src/core/storage/chromadb-simple.ts"],
|
|
4
|
-
"sourcesContent": ["/**\n * Simplified ChromaDB adapter for Claude hooks\n */\n\nimport { ChromaClient } from 'chromadb';\nimport { v4 as uuidv4 } from 'uuid';\n\nexport class ChromaDBAdapter {\n private client: ChromaClient;\n private collection: any = null;\n private config: any;\n private fallbackStorage: Map<string, any> = new Map();\n\n constructor(config: any) {\n this.config = config;\n\n try {\n // Try to initialize ChromaDB client\n if (config.apiKey && (config.apiUrl?.includes('trychroma.com') || config.apiKey.startsWith('ck-'))) {\n // Cloud configuration - ChromaDB Cloud uses API key starting with 'ck-'\n const tenant = process.env.CHROMADB_TENANT;\n const database = process.env.CHROMADB_DATABASE || 'stackmemory';\n \n this.client = new ChromaClient({\n ssl: true,\n host: 'api.trychroma.com',\n port: 443,\n headers: {\n 'X-Chroma-Token': config.apiKey,\n },\n tenant: tenant,\n database: database\n } as any);\n } else {\n // In-memory/local configuration (no external service needed)\n this.client = new ChromaClient();\n }\n } catch (error: unknown) {\n // Fallback to in-memory client\n console.log('Using in-memory ChromaDB client');\n this.client = new ChromaClient();\n }\n }\n\n async initialize(): Promise<void> {\n try {\n // Use a single collection for all context\n const collectionName =\n this.config.collectionName || 'stackmemory_context';\n\n // Get or create collection\n this.collection = await this.client.getOrCreateCollection({\n name: collectionName,\n metadata: {\n description: 'StackMemory Claude context',\n version: '2.0.0',\n },\n });\n\n console.log(\n `[${new Date().toISOString()}] INFO: ChromaDB collection '${collectionName}' initialized`\n );\n } catch (error: any) {\n console.log(\n `ChromaDB service not available, using JSON fallback storage`\n );\n this.collection = null; // Use fallback\n }\n }\n\n async store(context: any): Promise<any> {\n const id = context.id || `ctx_${uuidv4()}`;\n\n // Prepare metadata - only include non-undefined values\n const metadata: any = {\n timestamp: context.timestamp || new Date().toISOString(),\n type: context.type || 'context',\n user_id: context.user_id || this.config.userId || 'default',\n project: context.project || 'stackmemory',\n };\n\n // Add optional metadata if defined\n if (context.session_id) metadata.session_id = context.session_id;\n if (context.metadata) {\n Object.entries(context.metadata).forEach(([key, value]) => {\n if (value !== undefined && value !== null) {\n metadata[key] = value;\n }\n });\n }\n\n if (!this.collection) {\n await this.initialize();\n }\n\n try {\n if (this.collection) {\n // Store in ChromaDB if available\n await this.collection.upsert({\n ids: [id],\n documents: [context.content || JSON.stringify(context)],\n metadatas: [metadata],\n });\n } else {\n // Fallback to JSON file storage\n await this.storeToJsonFallback(id, context, metadata);\n }\n\n return {\n success: true,\n id,\n stored_at: new Date().toISOString(),\n };\n } catch (error: any) {\n console.error('Failed to store context:', error.message);\n // Try fallback storage\n try {\n await this.storeToJsonFallback(id, context, metadata);\n return { success: true, id, stored_at: new Date().toISOString() };\n } catch (fallbackError: any) {\n return { success: false, error: fallbackError.message };\n }\n }\n }\n\n private async storeToJsonFallback(\n id: string,\n context: any,\n metadata: any\n ): Promise<void> {\n const fs = await import('fs');\n const path = await import('path');\n const os = await import('os');\n\n const storageDir = path.join(\n os.homedir(),\n '.stackmemory',\n 'context-storage'\n );\n const storageFile = path.join(storageDir, 'contexts.jsonl');\n\n // Ensure directory exists\n if (!fs.existsSync(storageDir)) {\n fs.mkdirSync(storageDir, { recursive: true });\n }\n\n const entry = {\n id,\n content: context.content || JSON.stringify(context),\n metadata,\n stored_at: new Date().toISOString(),\n };\n\n // Append to JSONL file\n fs.appendFileSync(storageFile, JSON.stringify(entry) + '\\n');\n }\n\n async search(params: any): Promise<any[]> {\n if (!this.collection) {\n await this.initialize();\n }\n\n try {\n const query = params.query || '';\n const limit = params.limit || 10;\n\n // Build where clause\n const where: any = {};\n if (params.filter) {\n Object.entries(params.filter).forEach(([key, value]) => {\n if (value !== undefined && value !== null) {\n where[key] = value;\n }\n });\n }\n\n // Query collection\n const results = await this.collection.query({\n queryTexts: [query],\n nResults: limit,\n where: Object.keys(where).length > 0 ? where : undefined,\n });\n\n // Format results\n const contexts: any[] = [];\n if (results.documents && results.documents[0]) {\n for (let i = 0; i < results.documents[0].length; i++) {\n contexts.push({\n id: results.ids[0][i],\n content: results.documents[0][i],\n metadata: results.metadatas?.[0]?.[i] || {},\n distance: results.distances?.[0]?.[i] || 0,\n });\n }\n }\n\n return contexts;\n } catch (error: any) {\n console.error('Failed to search contexts:', error.message);\n return [];\n }\n }\n\n async deleteCollection(): Promise<void> {\n if (this.collection) {\n await this.client.deleteCollection({\n name: this.config.collectionName || 'stackmemory_context',\n });\n console.log('Collection deleted');\n }\n }\n\n async listCollections(): Promise<any[]> {\n const collections = await this.client.listCollections();\n return collections;\n }\n}\n\nexport default ChromaDBAdapter;\n"],
|
|
5
|
-
"mappings": "AAIA,SAAS,oBAAoB;AAC7B,SAAS,MAAM,cAAc;AAEtB,MAAM,gBAAgB;AAAA,EACnB;AAAA,EACA,aAAkB;AAAA,EAClB;AAAA,EACA,kBAAoC,oBAAI,IAAI;AAAA,EAEpD,YAAY,QAAa;AACvB,SAAK,SAAS;AAEd,QAAI;AAEF,UAAI,OAAO,WAAW,OAAO,QAAQ,SAAS,eAAe,KAAK,OAAO,OAAO,WAAW,KAAK,IAAI;AAElG,cAAM,SAAS,QAAQ,IAAI;AAC3B,cAAM,WAAW,QAAQ,IAAI,qBAAqB;AAElD,aAAK,SAAS,IAAI,aAAa;AAAA,UAC7B,KAAK;AAAA,UACL,MAAM;AAAA,UACN,MAAM;AAAA,UACN,SAAS;AAAA,YACP,kBAAkB,OAAO;AAAA,UAC3B;AAAA,UACA;AAAA,UACA;AAAA,QACF,CAAQ;AAAA,MACV,OAAO;AAEL,aAAK,SAAS,IAAI,aAAa;AAAA,MACjC;AAAA,IACF,SAAS,OAAgB;AAEvB,cAAQ,IAAI,iCAAiC;AAC7C,WAAK,SAAS,IAAI,aAAa;AAAA,IACjC;AAAA,EACF;AAAA,EAEA,MAAM,aAA4B;AAChC,QAAI;AAEF,YAAM,iBACJ,KAAK,OAAO,kBAAkB;AAGhC,WAAK,aAAa,MAAM,KAAK,OAAO,sBAAsB;AAAA,QACxD,MAAM;AAAA,QACN,UAAU;AAAA,UACR,aAAa;AAAA,UACb,SAAS;AAAA,QACX;AAAA,MACF,CAAC;AAED,cAAQ;AAAA,QACN,KAAI,oBAAI,KAAK,GAAE,YAAY,CAAC,gCAAgC,cAAc;AAAA,MAC5E;AAAA,IACF,SAAS,OAAY;AACnB,cAAQ;AAAA,QACN;AAAA,MACF;AACA,WAAK,aAAa;AAAA,IACpB;AAAA,EACF;AAAA,EAEA,MAAM,MAAM,SAA4B;AACtC,UAAM,KAAK,QAAQ,MAAM,OAAO,OAAO,CAAC;AAGxC,UAAM,WAAgB;AAAA,MACpB,WAAW,QAAQ,cAAa,oBAAI,KAAK,GAAE,YAAY;AAAA,MACvD,MAAM,QAAQ,QAAQ;AAAA,MACtB,SAAS,QAAQ,WAAW,KAAK,OAAO,UAAU;AAAA,MAClD,SAAS,QAAQ,WAAW;AAAA,IAC9B;AAGA,QAAI,QAAQ,WAAY,UAAS,aAAa,QAAQ;AACtD,QAAI,QAAQ,UAAU;AACpB,aAAO,QAAQ,QAAQ,QAAQ,EAAE,QAAQ,CAAC,CAAC,KAAK,KAAK,MAAM;AACzD,YAAI,UAAU,UAAa,UAAU,MAAM;AACzC,mBAAS,GAAG,IAAI;AAAA,QAClB;AAAA,MACF,CAAC;AAAA,IACH;AAEA,QAAI,CAAC,KAAK,YAAY;AACpB,YAAM,KAAK,WAAW;AAAA,IACxB;AAEA,QAAI;AACF,UAAI,KAAK,YAAY;AAEnB,cAAM,KAAK,WAAW,OAAO;AAAA,UAC3B,KAAK,CAAC,EAAE;AAAA,UACR,WAAW,CAAC,QAAQ,WAAW,KAAK,UAAU,OAAO,CAAC;AAAA,UACtD,WAAW,CAAC,QAAQ;AAAA,QACtB,CAAC;AAAA,MACH,OAAO;AAEL,cAAM,KAAK,oBAAoB,IAAI,SAAS,QAAQ;AAAA,MACtD;AAEA,aAAO;AAAA,QACL,SAAS;AAAA,QACT;AAAA,QACA,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,MACpC;AAAA,IACF,SAAS,OAAY;AACnB,cAAQ,MAAM,4BAA4B,MAAM,OAAO;AAEvD,UAAI;AACF,cAAM,KAAK,oBAAoB,IAAI,SAAS,QAAQ;AACpD,eAAO,EAAE,SAAS,MAAM,IAAI,YAAW,oBAAI,KAAK,GAAE,YAAY,EAAE;AAAA,MAClE,SAAS,eAAoB;AAC3B,eAAO,EAAE,SAAS,OAAO,OAAO,cAAc,QAAQ;AAAA,MACxD;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAc,oBACZ,IACA,SACA,UACe;AACf,UAAM,KAAK,MAAM,OAAO,IAAI;AAC5B,UAAM,OAAO,MAAM,OAAO,MAAM;AAChC,UAAM,KAAK,MAAM,OAAO,IAAI;AAE5B,UAAM,aAAa,KAAK;AAAA,MACtB,GAAG,QAAQ;AAAA,MACX;AAAA,MACA;AAAA,IACF;AACA,UAAM,cAAc,KAAK,KAAK,YAAY,gBAAgB;AAG1D,QAAI,CAAC,GAAG,WAAW,UAAU,GAAG;AAC9B,SAAG,UAAU,YAAY,EAAE,WAAW,KAAK,CAAC;AAAA,IAC9C;AAEA,UAAM,QAAQ;AAAA,MACZ;AAAA,MACA,SAAS,QAAQ,WAAW,KAAK,UAAU,OAAO;AAAA,MAClD;AAAA,MACA,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,IACpC;AAGA,OAAG,eAAe,aAAa,KAAK,UAAU,KAAK,IAAI,IAAI;AAAA,EAC7D;AAAA,EAEA,MAAM,OAAO,QAA6B;AACxC,QAAI,CAAC,KAAK,YAAY;AACpB,YAAM,KAAK,WAAW;AAAA,IACxB;AAEA,QAAI;AACF,YAAM,QAAQ,OAAO,SAAS;AAC9B,YAAM,QAAQ,OAAO,SAAS;AAG9B,YAAM,QAAa,CAAC;AACpB,UAAI,OAAO,QAAQ;AACjB,eAAO,QAAQ,OAAO,MAAM,EAAE,QAAQ,CAAC,CAAC,KAAK,KAAK,MAAM;AACtD,cAAI,UAAU,UAAa,UAAU,MAAM;AACzC,kBAAM,GAAG,IAAI;AAAA,UACf;AAAA,QACF,CAAC;AAAA,MACH;AAGA,YAAM,UAAU,MAAM,KAAK,WAAW,MAAM;AAAA,QAC1C,YAAY,CAAC,KAAK;AAAA,QAClB,UAAU;AAAA,QACV,OAAO,OAAO,KAAK,KAAK,EAAE,SAAS,IAAI,QAAQ;AAAA,MACjD,CAAC;AAGD,YAAM,WAAkB,CAAC;AACzB,UAAI,QAAQ,aAAa,QAAQ,UAAU,CAAC,GAAG;AAC7C,iBAAS,IAAI,GAAG,IAAI,QAAQ,UAAU,CAAC,EAAE,QAAQ,KAAK;AACpD,mBAAS,KAAK;AAAA,YACZ,IAAI,QAAQ,IAAI,CAAC,EAAE,CAAC;AAAA,YACpB,SAAS,QAAQ,UAAU,CAAC,EAAE,CAAC;AAAA,YAC/B,UAAU,QAAQ,YAAY,CAAC,IAAI,CAAC,KAAK,CAAC;AAAA,YAC1C,UAAU,QAAQ,YAAY,CAAC,IAAI,CAAC,KAAK;AAAA,UAC3C,CAAC;AAAA,QACH;AAAA,MACF;AAEA,aAAO;AAAA,IACT,SAAS,OAAY;AACnB,cAAQ,MAAM,8BAA8B,MAAM,OAAO;AACzD,aAAO,CAAC;AAAA,IACV;AAAA,EACF;AAAA,EAEA,MAAM,mBAAkC;AACtC,QAAI,KAAK,YAAY;AACnB,YAAM,KAAK,OAAO,iBAAiB;AAAA,QACjC,MAAM,KAAK,OAAO,kBAAkB;AAAA,MACtC,CAAC;AACD,cAAQ,IAAI,oBAAoB;AAAA,IAClC;AAAA,EACF;AAAA,EAEA,MAAM,kBAAkC;AACtC,UAAM,cAAc,MAAM,KAAK,OAAO,gBAAgB;AACtD,WAAO;AAAA,EACT;AACF;AAEA,IAAO,0BAAQ;",
|
|
6
|
-
"names": []
|
|
7
|
-
}
|
|
@@ -1,328 +0,0 @@
|
|
|
1
|
-
import Database from "better-sqlite3";
|
|
2
|
-
import { Pool } from "pg";
|
|
3
|
-
import { S3Client, PutObjectCommand, GetObjectCommand } from "@aws-sdk/client-s3";
|
|
4
|
-
import { logger } from "../monitoring/logger.js";
|
|
5
|
-
import * as zlib from "zlib";
|
|
6
|
-
import { promisify } from "util";
|
|
7
|
-
const gzipAsync = promisify(zlib.gzip);
|
|
8
|
-
const gunzipAsync = promisify(zlib.gunzip);
|
|
9
|
-
var StorageTier = /* @__PURE__ */ ((StorageTier2) => {
|
|
10
|
-
StorageTier2["HOT"] = "hot";
|
|
11
|
-
StorageTier2["COLD"] = "cold";
|
|
12
|
-
return StorageTier2;
|
|
13
|
-
})(StorageTier || {});
|
|
14
|
-
const DEFAULT_SIMPLIFIED_CONFIG = {
|
|
15
|
-
database: {
|
|
16
|
-
type: process.env["DATABASE_URL"]?.startsWith("postgres") ? "postgresql" : "sqlite",
|
|
17
|
-
url: process.env["DATABASE_URL"] || "./storage/stackmemory.db",
|
|
18
|
-
maxConnections: 10
|
|
19
|
-
},
|
|
20
|
-
objectStorage: {
|
|
21
|
-
endpoint: process.env["S3_ENDPOINT"] || "https://s3.amazonaws.com",
|
|
22
|
-
bucket: process.env["S3_BUCKET"] || "stackmemory-archive",
|
|
23
|
-
accessKeyId: process.env["S3_ACCESS_KEY_ID"] || "",
|
|
24
|
-
secretAccessKey: process.env["S3_SECRET_ACCESS_KEY"] || "",
|
|
25
|
-
region: process.env["S3_REGION"] || "us-east-1"
|
|
26
|
-
},
|
|
27
|
-
tiers: {
|
|
28
|
-
archiveAfterDays: 30,
|
|
29
|
-
// Archive after 30 days instead of complex 3-tier
|
|
30
|
-
compressionThreshold: 1024
|
|
31
|
-
// Compress items > 1KB
|
|
32
|
-
}
|
|
33
|
-
};
|
|
34
|
-
class SimplifiedStorage {
|
|
35
|
-
config;
|
|
36
|
-
db;
|
|
37
|
-
pgPool;
|
|
38
|
-
s3Client;
|
|
39
|
-
isInitialized = false;
|
|
40
|
-
constructor(config = DEFAULT_SIMPLIFIED_CONFIG) {
|
|
41
|
-
this.config = config;
|
|
42
|
-
if (this.config.objectStorage.accessKeyId && this.config.objectStorage.secretAccessKey) {
|
|
43
|
-
this.s3Client = new S3Client({
|
|
44
|
-
endpoint: this.config.objectStorage.endpoint,
|
|
45
|
-
region: this.config.objectStorage.region,
|
|
46
|
-
credentials: {
|
|
47
|
-
accessKeyId: this.config.objectStorage.accessKeyId,
|
|
48
|
-
secretAccessKey: this.config.objectStorage.secretAccessKey
|
|
49
|
-
}
|
|
50
|
-
});
|
|
51
|
-
}
|
|
52
|
-
}
|
|
53
|
-
async initialize() {
|
|
54
|
-
if (this.isInitialized) return;
|
|
55
|
-
try {
|
|
56
|
-
if (this.config.database.type === "postgresql") {
|
|
57
|
-
await this.initializePostgreSQL();
|
|
58
|
-
} else {
|
|
59
|
-
await this.initializeSQLite();
|
|
60
|
-
}
|
|
61
|
-
await this.createTables();
|
|
62
|
-
this.isInitialized = true;
|
|
63
|
-
this.startArchivalProcess();
|
|
64
|
-
logger.info("Simplified storage initialized", {
|
|
65
|
-
databaseType: this.config.database.type,
|
|
66
|
-
objectStorageEnabled: !!this.s3Client
|
|
67
|
-
});
|
|
68
|
-
} catch (error) {
|
|
69
|
-
logger.error("Failed to initialize simplified storage", { error });
|
|
70
|
-
throw error;
|
|
71
|
-
}
|
|
72
|
-
}
|
|
73
|
-
async initializePostgreSQL() {
|
|
74
|
-
this.pgPool = new Pool({
|
|
75
|
-
connectionString: this.config.database.url,
|
|
76
|
-
max: this.config.database.maxConnections,
|
|
77
|
-
idleTimeoutMillis: 3e4
|
|
78
|
-
});
|
|
79
|
-
}
|
|
80
|
-
async initializeSQLite() {
|
|
81
|
-
this.db = new Database(this.config.database.url);
|
|
82
|
-
this.db.pragma("foreign_keys = ON");
|
|
83
|
-
this.db.pragma("journal_mode = WAL");
|
|
84
|
-
}
|
|
85
|
-
async createTables() {
|
|
86
|
-
const schema = `
|
|
87
|
-
CREATE TABLE IF NOT EXISTS storage_items (
|
|
88
|
-
id TEXT PRIMARY KEY,
|
|
89
|
-
data BLOB,
|
|
90
|
-
metadata TEXT,
|
|
91
|
-
tier TEXT NOT NULL,
|
|
92
|
-
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
93
|
-
last_accessed TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
94
|
-
compressed BOOLEAN DEFAULT FALSE,
|
|
95
|
-
object_key TEXT -- For cold storage reference
|
|
96
|
-
);
|
|
97
|
-
CREATE INDEX IF NOT EXISTS idx_storage_tier ON storage_items(tier);
|
|
98
|
-
CREATE INDEX IF NOT EXISTS idx_storage_created ON storage_items(created_at);
|
|
99
|
-
`;
|
|
100
|
-
if (this.pgPool) {
|
|
101
|
-
await this.pgPool.query(schema.replace(/BLOB/g, "BYTEA").replace(/TIMESTAMP DEFAULT CURRENT_TIMESTAMP/g, "TIMESTAMP DEFAULT NOW()"));
|
|
102
|
-
} else if (this.db) {
|
|
103
|
-
this.db.exec(schema);
|
|
104
|
-
}
|
|
105
|
-
}
|
|
106
|
-
/**
|
|
107
|
-
* Store item in hot tier (database)
|
|
108
|
-
*/
|
|
109
|
-
async store(id, data, metadata = {}) {
|
|
110
|
-
if (!this.isInitialized) await this.initialize();
|
|
111
|
-
let processedData = data;
|
|
112
|
-
let compressed = false;
|
|
113
|
-
if (data.length > this.config.tiers.compressionThreshold) {
|
|
114
|
-
processedData = await gzipAsync(data);
|
|
115
|
-
compressed = true;
|
|
116
|
-
logger.debug("Compressed storage item", { id, originalSize: data.length, compressedSize: processedData.length });
|
|
117
|
-
}
|
|
118
|
-
const item = {
|
|
119
|
-
id,
|
|
120
|
-
data: processedData,
|
|
121
|
-
metadata,
|
|
122
|
-
tier: "hot" /* HOT */,
|
|
123
|
-
createdAt: /* @__PURE__ */ new Date(),
|
|
124
|
-
lastAccessed: /* @__PURE__ */ new Date(),
|
|
125
|
-
compressed
|
|
126
|
-
};
|
|
127
|
-
if (this.pgPool) {
|
|
128
|
-
await this.pgPool.query(
|
|
129
|
-
`INSERT INTO storage_items (id, data, metadata, tier, created_at, last_accessed, compressed)
|
|
130
|
-
VALUES ($1, $2, $3, $4, $5, $6, $7)
|
|
131
|
-
ON CONFLICT (id) DO UPDATE SET
|
|
132
|
-
data = EXCLUDED.data,
|
|
133
|
-
metadata = EXCLUDED.metadata,
|
|
134
|
-
last_accessed = EXCLUDED.last_accessed,
|
|
135
|
-
compressed = EXCLUDED.compressed`,
|
|
136
|
-
[id, processedData, JSON.stringify(metadata), item.tier, item.createdAt, item.lastAccessed, compressed]
|
|
137
|
-
);
|
|
138
|
-
} else if (this.db) {
|
|
139
|
-
const stmt = this.db.prepare(`
|
|
140
|
-
INSERT OR REPLACE INTO storage_items (id, data, metadata, tier, created_at, last_accessed, compressed)
|
|
141
|
-
VALUES (?, ?, ?, ?, ?, ?, ?)
|
|
142
|
-
`);
|
|
143
|
-
stmt.run(id, processedData, JSON.stringify(metadata), item.tier, item.createdAt?.toISOString(), item.lastAccessed?.toISOString(), compressed ? 1 : 0);
|
|
144
|
-
}
|
|
145
|
-
logger.debug("Stored item in hot tier", { id, size: processedData.length, compressed });
|
|
146
|
-
}
|
|
147
|
-
/**
|
|
148
|
-
* Retrieve item from appropriate tier
|
|
149
|
-
*/
|
|
150
|
-
async retrieve(id) {
|
|
151
|
-
if (!this.isInitialized) await this.initialize();
|
|
152
|
-
await this.updateLastAccessed(id);
|
|
153
|
-
let row;
|
|
154
|
-
if (this.pgPool) {
|
|
155
|
-
const { rows } = await this.pgPool.query("SELECT * FROM storage_items WHERE id = $1", [id]);
|
|
156
|
-
row = rows[0];
|
|
157
|
-
} else if (this.db) {
|
|
158
|
-
row = this.db.prepare("SELECT * FROM storage_items WHERE id = ?").get(id);
|
|
159
|
-
}
|
|
160
|
-
if (!row) return null;
|
|
161
|
-
let data;
|
|
162
|
-
if (row.tier === "cold" /* COLD */ && row.object_key && this.s3Client) {
|
|
163
|
-
data = await this.retrieveFromColdStorage(row.object_key);
|
|
164
|
-
} else {
|
|
165
|
-
data = row.data;
|
|
166
|
-
}
|
|
167
|
-
if (row.compressed) {
|
|
168
|
-
data = await gunzipAsync(data);
|
|
169
|
-
}
|
|
170
|
-
logger.debug("Retrieved item", { id, tier: row.tier, compressed: row.compressed });
|
|
171
|
-
return data;
|
|
172
|
-
}
|
|
173
|
-
/**
|
|
174
|
-
* Archive old items to cold storage
|
|
175
|
-
*/
|
|
176
|
-
async archiveOldItems() {
|
|
177
|
-
if (!this.s3Client) return;
|
|
178
|
-
const cutoffDate = /* @__PURE__ */ new Date();
|
|
179
|
-
cutoffDate.setDate(cutoffDate.getDate() - this.config.tiers.archiveAfterDays);
|
|
180
|
-
let rows;
|
|
181
|
-
if (this.pgPool) {
|
|
182
|
-
const { rows: pgRows } = await this.pgPool.query(
|
|
183
|
-
"SELECT * FROM storage_items WHERE tier = $1 AND created_at < $2 LIMIT 100",
|
|
184
|
-
["hot" /* HOT */, cutoffDate]
|
|
185
|
-
);
|
|
186
|
-
rows = pgRows;
|
|
187
|
-
} else if (this.db) {
|
|
188
|
-
rows = this.db.prepare(
|
|
189
|
-
"SELECT * FROM storage_items WHERE tier = ? AND created_at < ? LIMIT 100"
|
|
190
|
-
).all("hot" /* HOT */, cutoffDate.toISOString());
|
|
191
|
-
} else {
|
|
192
|
-
return;
|
|
193
|
-
}
|
|
194
|
-
for (const row of rows) {
|
|
195
|
-
try {
|
|
196
|
-
await this.archiveItem(row);
|
|
197
|
-
logger.debug("Archived item to cold storage", { id: row.id });
|
|
198
|
-
} catch (error) {
|
|
199
|
-
logger.warn("Failed to archive item", { id: row.id, error });
|
|
200
|
-
}
|
|
201
|
-
}
|
|
202
|
-
if (rows.length > 0) {
|
|
203
|
-
logger.info("Archived old items", { count: rows.length, cutoffDate });
|
|
204
|
-
}
|
|
205
|
-
}
|
|
206
|
-
async archiveItem(row) {
|
|
207
|
-
if (!this.s3Client) return;
|
|
208
|
-
const objectKey = `archived/${row.id}`;
|
|
209
|
-
await this.s3Client.send(
|
|
210
|
-
new PutObjectCommand({
|
|
211
|
-
Bucket: this.config.objectStorage.bucket,
|
|
212
|
-
Key: objectKey,
|
|
213
|
-
Body: row.data,
|
|
214
|
-
Metadata: {
|
|
215
|
-
originalId: row.id,
|
|
216
|
-
compressed: row.compressed.toString(),
|
|
217
|
-
archivedAt: (/* @__PURE__ */ new Date()).toISOString()
|
|
218
|
-
}
|
|
219
|
-
})
|
|
220
|
-
);
|
|
221
|
-
if (this.pgPool) {
|
|
222
|
-
await this.pgPool.query(
|
|
223
|
-
"UPDATE storage_items SET tier = $1, data = NULL, object_key = $2 WHERE id = $3",
|
|
224
|
-
["cold" /* COLD */, objectKey, row.id]
|
|
225
|
-
);
|
|
226
|
-
} else if (this.db) {
|
|
227
|
-
this.db.prepare(
|
|
228
|
-
"UPDATE storage_items SET tier = ?, data = NULL, object_key = ? WHERE id = ?"
|
|
229
|
-
).run("cold" /* COLD */, objectKey, row.id);
|
|
230
|
-
}
|
|
231
|
-
}
|
|
232
|
-
async retrieveFromColdStorage(objectKey) {
|
|
233
|
-
if (!this.s3Client) throw new Error("Object storage not configured");
|
|
234
|
-
const response = await this.s3Client.send(
|
|
235
|
-
new GetObjectCommand({
|
|
236
|
-
Bucket: this.config.objectStorage.bucket,
|
|
237
|
-
Key: objectKey
|
|
238
|
-
})
|
|
239
|
-
);
|
|
240
|
-
if (!response.Body) throw new Error("Empty response from cold storage");
|
|
241
|
-
const chunks = [];
|
|
242
|
-
const reader = response.Body;
|
|
243
|
-
return new Promise((resolve, reject) => {
|
|
244
|
-
reader.on("data", (chunk) => chunks.push(chunk));
|
|
245
|
-
reader.on("end", () => resolve(Buffer.concat(chunks)));
|
|
246
|
-
reader.on("error", reject);
|
|
247
|
-
});
|
|
248
|
-
}
|
|
249
|
-
async updateLastAccessed(id) {
|
|
250
|
-
const now = /* @__PURE__ */ new Date();
|
|
251
|
-
if (this.pgPool) {
|
|
252
|
-
await this.pgPool.query(
|
|
253
|
-
"UPDATE storage_items SET last_accessed = $1 WHERE id = $2",
|
|
254
|
-
[now, id]
|
|
255
|
-
);
|
|
256
|
-
} else if (this.db) {
|
|
257
|
-
this.db.prepare("UPDATE storage_items SET last_accessed = ? WHERE id = ?").run(now.toISOString(), id);
|
|
258
|
-
}
|
|
259
|
-
}
|
|
260
|
-
startArchivalProcess() {
|
|
261
|
-
setInterval(async () => {
|
|
262
|
-
try {
|
|
263
|
-
await this.archiveOldItems();
|
|
264
|
-
} catch (error) {
|
|
265
|
-
logger.error("Archival process failed", { error });
|
|
266
|
-
}
|
|
267
|
-
}, 6 * 60 * 60 * 1e3);
|
|
268
|
-
logger.info("Started archival process", { intervalHours: 6 });
|
|
269
|
-
}
|
|
270
|
-
/**
|
|
271
|
-
* Get storage statistics
|
|
272
|
-
*/
|
|
273
|
-
async getStats() {
|
|
274
|
-
if (!this.isInitialized) await this.initialize();
|
|
275
|
-
let hotItems = 0;
|
|
276
|
-
let coldItems = 0;
|
|
277
|
-
let totalSize = 0;
|
|
278
|
-
if (this.pgPool) {
|
|
279
|
-
const { rows } = await this.pgPool.query(`
|
|
280
|
-
SELECT tier, COUNT(*) as count, SUM(length(data)) as size
|
|
281
|
-
FROM storage_items
|
|
282
|
-
GROUP BY tier
|
|
283
|
-
`);
|
|
284
|
-
for (const row of rows) {
|
|
285
|
-
if (row.tier === "hot" /* HOT */) {
|
|
286
|
-
hotItems = parseInt(row.count);
|
|
287
|
-
totalSize += parseInt(row.size || 0);
|
|
288
|
-
} else {
|
|
289
|
-
coldItems = parseInt(row.count);
|
|
290
|
-
}
|
|
291
|
-
}
|
|
292
|
-
} else if (this.db) {
|
|
293
|
-
const stmt = this.db.prepare(`
|
|
294
|
-
SELECT tier, COUNT(*) as count, SUM(length(data)) as size
|
|
295
|
-
FROM storage_items
|
|
296
|
-
GROUP BY tier
|
|
297
|
-
`);
|
|
298
|
-
const rows = stmt.all();
|
|
299
|
-
for (const row of rows) {
|
|
300
|
-
if (row.tier === "hot" /* HOT */) {
|
|
301
|
-
hotItems = row.count;
|
|
302
|
-
totalSize += row.size || 0;
|
|
303
|
-
} else {
|
|
304
|
-
coldItems = row.count;
|
|
305
|
-
}
|
|
306
|
-
}
|
|
307
|
-
}
|
|
308
|
-
return { hotItems, coldItems, totalSize };
|
|
309
|
-
}
|
|
310
|
-
async close() {
|
|
311
|
-
if (this.pgPool) {
|
|
312
|
-
await this.pgPool.end();
|
|
313
|
-
}
|
|
314
|
-
if (this.db) {
|
|
315
|
-
this.db.close();
|
|
316
|
-
}
|
|
317
|
-
this.isInitialized = false;
|
|
318
|
-
logger.info("Simplified storage closed");
|
|
319
|
-
}
|
|
320
|
-
}
|
|
321
|
-
var simplified_storage_default = SimplifiedStorage;
|
|
322
|
-
export {
|
|
323
|
-
DEFAULT_SIMPLIFIED_CONFIG,
|
|
324
|
-
SimplifiedStorage,
|
|
325
|
-
StorageTier,
|
|
326
|
-
simplified_storage_default as default
|
|
327
|
-
};
|
|
328
|
-
//# sourceMappingURL=simplified-storage.js.map
|
|
@@ -1,7 +0,0 @@
|
|
|
1
|
-
{
|
|
2
|
-
"version": 3,
|
|
3
|
-
"sources": ["../../../src/core/storage/simplified-storage.ts"],
|
|
4
|
-
"sourcesContent": ["/**\n * Simplified 2-Tier Storage System\n * Tier 1: SQLite/PostgreSQL (Hot) - Active data, immediate access\n * Tier 2: Object Storage (Cold) - Archive data, cost-effective long-term storage\n * \n * ARCHITECT RECOMMENDATION: Removed Redis complexity, GCS overkill\n */\n\nimport Database from 'better-sqlite3';\nimport { Pool } from 'pg';\nimport { S3Client, PutObjectCommand, GetObjectCommand } from '@aws-sdk/client-s3';\nimport { logger } from '../monitoring/logger.js';\nimport * as zlib from 'zlib';\nimport { promisify } from 'util';\n\nconst gzipAsync = promisify(zlib.gzip);\nconst gunzipAsync = promisify(zlib.gunzip);\n\nexport enum StorageTier {\n HOT = 'hot', // SQLite/PostgreSQL: Active data\n COLD = 'cold', // Object Storage: Archive data\n}\n\nexport interface SimplifiedStorageConfig {\n database: {\n type: 'sqlite' | 'postgresql';\n url: string;\n maxConnections?: number;\n };\n objectStorage: {\n endpoint: string;\n bucket: string;\n accessKeyId: string;\n secretAccessKey: string;\n region: string;\n };\n tiers: {\n archiveAfterDays: number; // Move to cold storage after N days\n compressionThreshold: number; // Compress items larger than N bytes\n };\n}\n\nexport const DEFAULT_SIMPLIFIED_CONFIG: SimplifiedStorageConfig = {\n database: {\n type: process.env['DATABASE_URL']?.startsWith('postgres') ? 'postgresql' : 'sqlite',\n url: process.env['DATABASE_URL'] || './storage/stackmemory.db',\n maxConnections: 10,\n },\n objectStorage: {\n endpoint: process.env['S3_ENDPOINT'] || 'https://s3.amazonaws.com',\n bucket: process.env['S3_BUCKET'] || 'stackmemory-archive',\n accessKeyId: process.env['S3_ACCESS_KEY_ID'] || '',\n secretAccessKey: process.env['S3_SECRET_ACCESS_KEY'] || '',\n region: process.env['S3_REGION'] || 'us-east-1',\n },\n tiers: {\n archiveAfterDays: 30, // Archive after 30 days instead of complex 3-tier\n compressionThreshold: 1024, // Compress items > 1KB\n },\n};\n\nexport interface StoredItem {\n id: string;\n data: Buffer;\n metadata: Record<string, unknown>;\n tier: StorageTier;\n createdAt: Date;\n lastAccessed: Date;\n compressed: boolean;\n}\n\ninterface DatabaseRow {\n id: string;\n data: Buffer;\n metadata: string;\n tier: string;\n created_at: string;\n last_accessed: string;\n compressed: number | boolean;\n object_key?: string;\n}\n\n/**\n * Simplified Storage System (Architect-approved)\n * Removes Redis complexity, GCS overkill, premature optimization\n */\nexport class SimplifiedStorage {\n private config: SimplifiedStorageConfig;\n private db?: Database.Database;\n private pgPool?: Pool;\n private s3Client?: S3Client;\n private isInitialized = false;\n\n constructor(config: SimplifiedStorageConfig = DEFAULT_SIMPLIFIED_CONFIG) {\n this.config = config;\n \n // Only initialize S3 if properly configured\n if (this.config.objectStorage.accessKeyId && this.config.objectStorage.secretAccessKey) {\n this.s3Client = new S3Client({\n endpoint: this.config.objectStorage.endpoint,\n region: this.config.objectStorage.region,\n credentials: {\n accessKeyId: this.config.objectStorage.accessKeyId,\n secretAccessKey: this.config.objectStorage.secretAccessKey,\n },\n });\n }\n }\n\n async initialize(): Promise<void> {\n if (this.isInitialized) return;\n\n try {\n if (this.config.database.type === 'postgresql') {\n await this.initializePostgreSQL();\n } else {\n await this.initializeSQLite();\n }\n\n await this.createTables();\n this.isInitialized = true;\n \n // Start background archival process (simple cron-like)\n this.startArchivalProcess();\n \n logger.info('Simplified storage initialized', {\n databaseType: this.config.database.type,\n objectStorageEnabled: !!this.s3Client,\n });\n } catch (error) {\n logger.error('Failed to initialize simplified storage', { error });\n throw error;\n }\n }\n\n private async initializePostgreSQL(): Promise<void> {\n this.pgPool = new Pool({\n connectionString: this.config.database.url,\n max: this.config.database.maxConnections,\n idleTimeoutMillis: 30000,\n });\n }\n\n private async initializeSQLite(): Promise<void> {\n this.db = new Database(this.config.database.url);\n this.db.pragma('foreign_keys = ON');\n this.db.pragma('journal_mode = WAL');\n }\n\n private async createTables(): Promise<void> {\n const schema = `\n CREATE TABLE IF NOT EXISTS storage_items (\n id TEXT PRIMARY KEY,\n data BLOB,\n metadata TEXT,\n tier TEXT NOT NULL,\n created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,\n last_accessed TIMESTAMP DEFAULT CURRENT_TIMESTAMP,\n compressed BOOLEAN DEFAULT FALSE,\n object_key TEXT -- For cold storage reference\n );\n CREATE INDEX IF NOT EXISTS idx_storage_tier ON storage_items(tier);\n CREATE INDEX IF NOT EXISTS idx_storage_created ON storage_items(created_at);\n `;\n\n if (this.pgPool) {\n await this.pgPool.query(schema.replace(/BLOB/g, 'BYTEA').replace(/TIMESTAMP DEFAULT CURRENT_TIMESTAMP/g, 'TIMESTAMP DEFAULT NOW()'));\n } else if (this.db) {\n this.db.exec(schema);\n }\n }\n\n /**\n * Store item in hot tier (database)\n */\n async store(id: string, data: Buffer, metadata: Record<string, unknown> = {}): Promise<void> {\n if (!this.isInitialized) await this.initialize();\n\n let processedData = data;\n let compressed = false;\n\n // Compress if over threshold\n if (data.length > this.config.tiers.compressionThreshold) {\n processedData = await gzipAsync(data);\n compressed = true;\n logger.debug('Compressed storage item', { id, originalSize: data.length, compressedSize: processedData.length });\n }\n\n const item: Partial<StoredItem> = {\n id,\n data: processedData,\n metadata,\n tier: StorageTier.HOT,\n createdAt: new Date(),\n lastAccessed: new Date(),\n compressed,\n };\n\n if (this.pgPool) {\n await this.pgPool.query(\n `INSERT INTO storage_items (id, data, metadata, tier, created_at, last_accessed, compressed)\n VALUES ($1, $2, $3, $4, $5, $6, $7)\n ON CONFLICT (id) DO UPDATE SET\n data = EXCLUDED.data,\n metadata = EXCLUDED.metadata,\n last_accessed = EXCLUDED.last_accessed,\n compressed = EXCLUDED.compressed`,\n [id, processedData, JSON.stringify(metadata), item.tier, item.createdAt, item.lastAccessed, compressed]\n );\n } else if (this.db) {\n const stmt = this.db.prepare(`\n INSERT OR REPLACE INTO storage_items (id, data, metadata, tier, created_at, last_accessed, compressed)\n VALUES (?, ?, ?, ?, ?, ?, ?)\n `);\n stmt.run(id, processedData, JSON.stringify(metadata), item.tier, item.createdAt?.toISOString(), item.lastAccessed?.toISOString(), compressed ? 1 : 0);\n }\n\n logger.debug('Stored item in hot tier', { id, size: processedData.length, compressed });\n }\n\n /**\n * Retrieve item from appropriate tier\n */\n async retrieve(id: string): Promise<Buffer | null> {\n if (!this.isInitialized) await this.initialize();\n\n // Update last accessed timestamp\n await this.updateLastAccessed(id);\n\n let row: DatabaseRow | undefined;\n if (this.pgPool) {\n const { rows } = await this.pgPool.query('SELECT * FROM storage_items WHERE id = $1', [id]);\n row = rows[0];\n } else if (this.db) {\n row = this.db.prepare('SELECT * FROM storage_items WHERE id = ?').get(id);\n }\n\n if (!row) return null;\n\n let data: Buffer;\n \n if (row.tier === StorageTier.COLD && row.object_key && this.s3Client) {\n // Retrieve from cold storage\n data = await this.retrieveFromColdStorage(row.object_key);\n } else {\n // Retrieve from hot storage\n data = row.data;\n }\n\n // Decompress if needed\n if (row.compressed) {\n data = await gunzipAsync(data);\n }\n\n logger.debug('Retrieved item', { id, tier: row.tier, compressed: row.compressed });\n return data;\n }\n\n /**\n * Archive old items to cold storage\n */\n private async archiveOldItems(): Promise<void> {\n if (!this.s3Client) return; // Skip if object storage not configured\n\n const cutoffDate = new Date();\n cutoffDate.setDate(cutoffDate.getDate() - this.config.tiers.archiveAfterDays);\n\n let rows: DatabaseRow[];\n if (this.pgPool) {\n const { rows: pgRows } = await this.pgPool.query(\n 'SELECT * FROM storage_items WHERE tier = $1 AND created_at < $2 LIMIT 100',\n [StorageTier.HOT, cutoffDate]\n );\n rows = pgRows;\n } else if (this.db) {\n rows = this.db.prepare(\n 'SELECT * FROM storage_items WHERE tier = ? AND created_at < ? LIMIT 100'\n ).all(StorageTier.HOT, cutoffDate.toISOString());\n } else {\n return;\n }\n\n for (const row of rows) {\n try {\n await this.archiveItem(row);\n logger.debug('Archived item to cold storage', { id: row.id });\n } catch (error) {\n logger.warn('Failed to archive item', { id: row.id, error });\n }\n }\n\n if (rows.length > 0) {\n logger.info('Archived old items', { count: rows.length, cutoffDate });\n }\n }\n\n private async archiveItem(row: DatabaseRow): Promise<void> {\n if (!this.s3Client) return;\n\n const objectKey = `archived/${row.id}`;\n \n // Upload to S3\n await this.s3Client.send(\n new PutObjectCommand({\n Bucket: this.config.objectStorage.bucket,\n Key: objectKey,\n Body: row.data,\n Metadata: {\n originalId: row.id,\n compressed: row.compressed.toString(),\n archivedAt: new Date().toISOString(),\n },\n })\n );\n\n // Update database record\n if (this.pgPool) {\n await this.pgPool.query(\n 'UPDATE storage_items SET tier = $1, data = NULL, object_key = $2 WHERE id = $3',\n [StorageTier.COLD, objectKey, row.id]\n );\n } else if (this.db) {\n this.db.prepare(\n 'UPDATE storage_items SET tier = ?, data = NULL, object_key = ? WHERE id = ?'\n ).run(StorageTier.COLD, objectKey, row.id);\n }\n }\n\n private async retrieveFromColdStorage(objectKey: string): Promise<Buffer> {\n if (!this.s3Client) throw new Error('Object storage not configured');\n\n const response = await this.s3Client.send(\n new GetObjectCommand({\n Bucket: this.config.objectStorage.bucket,\n Key: objectKey,\n })\n );\n\n if (!response.Body) throw new Error('Empty response from cold storage');\n\n // Convert stream to buffer\n const chunks: Buffer[] = [];\n const reader = response.Body as NodeJS.ReadableStream;\n \n return new Promise((resolve, reject) => {\n reader.on('data', (chunk: Buffer) => chunks.push(chunk));\n reader.on('end', () => resolve(Buffer.concat(chunks)));\n reader.on('error', reject);\n });\n }\n\n private async updateLastAccessed(id: string): Promise<void> {\n const now = new Date();\n \n if (this.pgPool) {\n await this.pgPool.query(\n 'UPDATE storage_items SET last_accessed = $1 WHERE id = $2',\n [now, id]\n );\n } else if (this.db) {\n this.db.prepare('UPDATE storage_items SET last_accessed = ? WHERE id = ?')\n .run(now.toISOString(), id);\n }\n }\n\n private startArchivalProcess(): void {\n // Run archival every 6 hours instead of complex real-time monitoring\n setInterval(async () => {\n try {\n await this.archiveOldItems();\n } catch (error) {\n logger.error('Archival process failed', { error });\n }\n }, 6 * 60 * 60 * 1000); // 6 hours\n\n logger.info('Started archival process', { intervalHours: 6 });\n }\n\n /**\n * Get storage statistics\n */\n async getStats(): Promise<{ hotItems: number; coldItems: number; totalSize: number }> {\n if (!this.isInitialized) await this.initialize();\n\n let hotItems = 0;\n let coldItems = 0;\n let totalSize = 0;\n\n if (this.pgPool) {\n const { rows } = await this.pgPool.query(`\n SELECT tier, COUNT(*) as count, SUM(length(data)) as size\n FROM storage_items\n GROUP BY tier\n `);\n \n for (const row of rows) {\n if (row.tier === StorageTier.HOT) {\n hotItems = parseInt(row.count);\n totalSize += parseInt(row.size || 0);\n } else {\n coldItems = parseInt(row.count);\n }\n }\n } else if (this.db) {\n const stmt = this.db.prepare(`\n SELECT tier, COUNT(*) as count, SUM(length(data)) as size\n FROM storage_items\n GROUP BY tier\n `);\n const rows = stmt.all();\n \n for (const row of rows) {\n if (row.tier === StorageTier.HOT) {\n hotItems = row.count;\n totalSize += row.size || 0;\n } else {\n coldItems = row.count;\n }\n }\n }\n\n return { hotItems, coldItems, totalSize };\n }\n\n async close(): Promise<void> {\n if (this.pgPool) {\n await this.pgPool.end();\n }\n if (this.db) {\n this.db.close();\n }\n this.isInitialized = false;\n logger.info('Simplified storage closed');\n }\n}\n\nexport default SimplifiedStorage;"],
|
|
5
|
-
"mappings": "AAQA,OAAO,cAAc;AACrB,SAAS,YAAY;AACrB,SAAS,UAAU,kBAAkB,wBAAwB;AAC7D,SAAS,cAAc;AACvB,YAAY,UAAU;AACtB,SAAS,iBAAiB;AAE1B,MAAM,YAAY,UAAU,KAAK,IAAI;AACrC,MAAM,cAAc,UAAU,KAAK,MAAM;AAElC,IAAK,cAAL,kBAAKA,iBAAL;AACL,EAAAA,aAAA,SAAM;AACN,EAAAA,aAAA,UAAO;AAFG,SAAAA;AAAA,GAAA;AAwBL,MAAM,4BAAqD;AAAA,EAChE,UAAU;AAAA,IACR,MAAM,QAAQ,IAAI,cAAc,GAAG,WAAW,UAAU,IAAI,eAAe;AAAA,IAC3E,KAAK,QAAQ,IAAI,cAAc,KAAK;AAAA,IACpC,gBAAgB;AAAA,EAClB;AAAA,EACA,eAAe;AAAA,IACb,UAAU,QAAQ,IAAI,aAAa,KAAK;AAAA,IACxC,QAAQ,QAAQ,IAAI,WAAW,KAAK;AAAA,IACpC,aAAa,QAAQ,IAAI,kBAAkB,KAAK;AAAA,IAChD,iBAAiB,QAAQ,IAAI,sBAAsB,KAAK;AAAA,IACxD,QAAQ,QAAQ,IAAI,WAAW,KAAK;AAAA,EACtC;AAAA,EACA,OAAO;AAAA,IACL,kBAAkB;AAAA;AAAA,IAClB,sBAAsB;AAAA;AAAA,EACxB;AACF;AA2BO,MAAM,kBAAkB;AAAA,EACrB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA,gBAAgB;AAAA,EAExB,YAAY,SAAkC,2BAA2B;AACvE,SAAK,SAAS;AAGd,QAAI,KAAK,OAAO,cAAc,eAAe,KAAK,OAAO,cAAc,iBAAiB;AACtF,WAAK,WAAW,IAAI,SAAS;AAAA,QAC3B,UAAU,KAAK,OAAO,cAAc;AAAA,QACpC,QAAQ,KAAK,OAAO,cAAc;AAAA,QAClC,aAAa;AAAA,UACX,aAAa,KAAK,OAAO,cAAc;AAAA,UACvC,iBAAiB,KAAK,OAAO,cAAc;AAAA,QAC7C;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AAAA,EAEA,MAAM,aAA4B;AAChC,QAAI,KAAK,cAAe;AAExB,QAAI;AACF,UAAI,KAAK,OAAO,SAAS,SAAS,cAAc;AAC9C,cAAM,KAAK,qBAAqB;AAAA,MAClC,OAAO;AACL,cAAM,KAAK,iBAAiB;AAAA,MAC9B;AAEA,YAAM,KAAK,aAAa;AACxB,WAAK,gBAAgB;AAGrB,WAAK,qBAAqB;AAE1B,aAAO,KAAK,kCAAkC;AAAA,QAC5C,cAAc,KAAK,OAAO,SAAS;AAAA,QACnC,sBAAsB,CAAC,CAAC,KAAK;AAAA,MAC/B,CAAC;AAAA,IACH,SAAS,OAAO;AACd,aAAO,MAAM,2CAA2C,EAAE,MAAM,CAAC;AACjE,YAAM;AAAA,IACR;AAAA,EACF;AAAA,EAEA,MAAc,uBAAsC;AAClD,SAAK,SAAS,IAAI,KAAK;AAAA,MACrB,kBAAkB,KAAK,OAAO,SAAS;AAAA,MACvC,KAAK,KAAK,OAAO,SAAS;AAAA,MAC1B,mBAAmB;AAAA,IACrB,CAAC;AAAA,EACH;AAAA,EAEA,MAAc,mBAAkC;AAC9C,SAAK,KAAK,IAAI,SAAS,KAAK,OAAO,SAAS,GAAG;AAC/C,SAAK,GAAG,OAAO,mBAAmB;AAClC,SAAK,GAAG,OAAO,oBAAoB;AAAA,EACrC;AAAA,EAEA,MAAc,eAA8B;AAC1C,UAAM,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAef,QAAI,KAAK,QAAQ;AACf,YAAM,KAAK,OAAO,MAAM,OAAO,QAAQ,SAAS,OAAO,EAAE,QAAQ,wCAAwC,yBAAyB,CAAC;AAAA,IACrI,WAAW,KAAK,IAAI;AAClB,WAAK,GAAG,KAAK,MAAM;AAAA,IACrB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,MAAM,IAAY,MAAc,WAAoC,CAAC,GAAkB;AAC3F,QAAI,CAAC,KAAK,cAAe,OAAM,KAAK,WAAW;AAE/C,QAAI,gBAAgB;AACpB,QAAI,aAAa;AAGjB,QAAI,KAAK,SAAS,KAAK,OAAO,MAAM,sBAAsB;AACxD,sBAAgB,MAAM,UAAU,IAAI;AACpC,mBAAa;AACb,aAAO,MAAM,2BAA2B,EAAE,IAAI,cAAc,KAAK,QAAQ,gBAAgB,cAAc,OAAO,CAAC;AAAA,IACjH;AAEA,UAAM,OAA4B;AAAA,MAChC;AAAA,MACA,MAAM;AAAA,MACN;AAAA,MACA,MAAM;AAAA,MACN,WAAW,oBAAI,KAAK;AAAA,MACpB,cAAc,oBAAI,KAAK;AAAA,MACvB;AAAA,IACF;AAEA,QAAI,KAAK,QAAQ;AACf,YAAM,KAAK,OAAO;AAAA,QAChB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,QAOA,CAAC,IAAI,eAAe,KAAK,UAAU,QAAQ,GAAG,KAAK,MAAM,KAAK,WAAW,KAAK,cAAc,UAAU;AAAA,MACxG;AAAA,IACF,WAAW,KAAK,IAAI;AAClB,YAAM,OAAO,KAAK,GAAG,QAAQ;AAAA;AAAA;AAAA,OAG5B;AACD,WAAK,IAAI,IAAI,eAAe,KAAK,UAAU,QAAQ,GAAG,KAAK,MAAM,KAAK,WAAW,YAAY,GAAG,KAAK,cAAc,YAAY,GAAG,aAAa,IAAI,CAAC;AAAA,IACtJ;AAEA,WAAO,MAAM,2BAA2B,EAAE,IAAI,MAAM,cAAc,QAAQ,WAAW,CAAC;AAAA,EACxF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,SAAS,IAAoC;AACjD,QAAI,CAAC,KAAK,cAAe,OAAM,KAAK,WAAW;AAG/C,UAAM,KAAK,mBAAmB,EAAE;AAEhC,QAAI;AACJ,QAAI,KAAK,QAAQ;AACf,YAAM,EAAE,KAAK,IAAI,MAAM,KAAK,OAAO,MAAM,6CAA6C,CAAC,EAAE,CAAC;AAC1F,YAAM,KAAK,CAAC;AAAA,IACd,WAAW,KAAK,IAAI;AAClB,YAAM,KAAK,GAAG,QAAQ,0CAA0C,EAAE,IAAI,EAAE;AAAA,IAC1E;AAEA,QAAI,CAAC,IAAK,QAAO;AAEjB,QAAI;AAEJ,QAAI,IAAI,SAAS,qBAAoB,IAAI,cAAc,KAAK,UAAU;AAEpE,aAAO,MAAM,KAAK,wBAAwB,IAAI,UAAU;AAAA,IAC1D,OAAO;AAEL,aAAO,IAAI;AAAA,IACb;AAGA,QAAI,IAAI,YAAY;AAClB,aAAO,MAAM,YAAY,IAAI;AAAA,IAC/B;AAEA,WAAO,MAAM,kBAAkB,EAAE,IAAI,MAAM,IAAI,MAAM,YAAY,IAAI,WAAW,CAAC;AACjF,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,kBAAiC;AAC7C,QAAI,CAAC,KAAK,SAAU;AAEpB,UAAM,aAAa,oBAAI,KAAK;AAC5B,eAAW,QAAQ,WAAW,QAAQ,IAAI,KAAK,OAAO,MAAM,gBAAgB;AAE5E,QAAI;AACJ,QAAI,KAAK,QAAQ;AACf,YAAM,EAAE,MAAM,OAAO,IAAI,MAAM,KAAK,OAAO;AAAA,QACzC;AAAA,QACA,CAAC,iBAAiB,UAAU;AAAA,MAC9B;AACA,aAAO;AAAA,IACT,WAAW,KAAK,IAAI;AAClB,aAAO,KAAK,GAAG;AAAA,QACb;AAAA,MACF,EAAE,IAAI,iBAAiB,WAAW,YAAY,CAAC;AAAA,IACjD,OAAO;AACL;AAAA,IACF;AAEA,eAAW,OAAO,MAAM;AACtB,UAAI;AACF,cAAM,KAAK,YAAY,GAAG;AAC1B,eAAO,MAAM,iCAAiC,EAAE,IAAI,IAAI,GAAG,CAAC;AAAA,MAC9D,SAAS,OAAO;AACd,eAAO,KAAK,0BAA0B,EAAE,IAAI,IAAI,IAAI,MAAM,CAAC;AAAA,MAC7D;AAAA,IACF;AAEA,QAAI,KAAK,SAAS,GAAG;AACnB,aAAO,KAAK,sBAAsB,EAAE,OAAO,KAAK,QAAQ,WAAW,CAAC;AAAA,IACtE;AAAA,EACF;AAAA,EAEA,MAAc,YAAY,KAAiC;AACzD,QAAI,CAAC,KAAK,SAAU;AAEpB,UAAM,YAAY,YAAY,IAAI,EAAE;AAGpC,UAAM,KAAK,SAAS;AAAA,MAClB,IAAI,iBAAiB;AAAA,QACnB,QAAQ,KAAK,OAAO,cAAc;AAAA,QAClC,KAAK;AAAA,QACL,MAAM,IAAI;AAAA,QACV,UAAU;AAAA,UACR,YAAY,IAAI;AAAA,UAChB,YAAY,IAAI,WAAW,SAAS;AAAA,UACpC,aAAY,oBAAI,KAAK,GAAE,YAAY;AAAA,QACrC;AAAA,MACF,CAAC;AAAA,IACH;AAGA,QAAI,KAAK,QAAQ;AACf,YAAM,KAAK,OAAO;AAAA,QAChB;AAAA,QACA,CAAC,mBAAkB,WAAW,IAAI,EAAE;AAAA,MACtC;AAAA,IACF,WAAW,KAAK,IAAI;AAClB,WAAK,GAAG;AAAA,QACN;AAAA,MACF,EAAE,IAAI,mBAAkB,WAAW,IAAI,EAAE;AAAA,IAC3C;AAAA,EACF;AAAA,EAEA,MAAc,wBAAwB,WAAoC;AACxE,QAAI,CAAC,KAAK,SAAU,OAAM,IAAI,MAAM,+BAA+B;AAEnE,UAAM,WAAW,MAAM,KAAK,SAAS;AAAA,MACnC,IAAI,iBAAiB;AAAA,QACnB,QAAQ,KAAK,OAAO,cAAc;AAAA,QAClC,KAAK;AAAA,MACP,CAAC;AAAA,IACH;AAEA,QAAI,CAAC,SAAS,KAAM,OAAM,IAAI,MAAM,kCAAkC;AAGtE,UAAM,SAAmB,CAAC;AAC1B,UAAM,SAAS,SAAS;AAExB,WAAO,IAAI,QAAQ,CAAC,SAAS,WAAW;AACtC,aAAO,GAAG,QAAQ,CAAC,UAAkB,OAAO,KAAK,KAAK,CAAC;AACvD,aAAO,GAAG,OAAO,MAAM,QAAQ,OAAO,OAAO,MAAM,CAAC,CAAC;AACrD,aAAO,GAAG,SAAS,MAAM;AAAA,IAC3B,CAAC;AAAA,EACH;AAAA,EAEA,MAAc,mBAAmB,IAA2B;AAC1D,UAAM,MAAM,oBAAI,KAAK;AAErB,QAAI,KAAK,QAAQ;AACf,YAAM,KAAK,OAAO;AAAA,QAChB;AAAA,QACA,CAAC,KAAK,EAAE;AAAA,MACV;AAAA,IACF,WAAW,KAAK,IAAI;AAClB,WAAK,GAAG,QAAQ,yDAAyD,EACtE,IAAI,IAAI,YAAY,GAAG,EAAE;AAAA,IAC9B;AAAA,EACF;AAAA,EAEQ,uBAA6B;AAEnC,gBAAY,YAAY;AACtB,UAAI;AACF,cAAM,KAAK,gBAAgB;AAAA,MAC7B,SAAS,OAAO;AACd,eAAO,MAAM,2BAA2B,EAAE,MAAM,CAAC;AAAA,MACnD;AAAA,IACF,GAAG,IAAI,KAAK,KAAK,GAAI;AAErB,WAAO,KAAK,4BAA4B,EAAE,eAAe,EAAE,CAAC;AAAA,EAC9D;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,WAAgF;AACpF,QAAI,CAAC,KAAK,cAAe,OAAM,KAAK,WAAW;AAE/C,QAAI,WAAW;AACf,QAAI,YAAY;AAChB,QAAI,YAAY;AAEhB,QAAI,KAAK,QAAQ;AACf,YAAM,EAAE,KAAK,IAAI,MAAM,KAAK,OAAO,MAAM;AAAA;AAAA;AAAA;AAAA,OAIxC;AAED,iBAAW,OAAO,MAAM;AACtB,YAAI,IAAI,SAAS,iBAAiB;AAChC,qBAAW,SAAS,IAAI,KAAK;AAC7B,uBAAa,SAAS,IAAI,QAAQ,CAAC;AAAA,QACrC,OAAO;AACL,sBAAY,SAAS,IAAI,KAAK;AAAA,QAChC;AAAA,MACF;AAAA,IACF,WAAW,KAAK,IAAI;AAClB,YAAM,OAAO,KAAK,GAAG,QAAQ;AAAA;AAAA;AAAA;AAAA,OAI5B;AACD,YAAM,OAAO,KAAK,IAAI;AAEtB,iBAAW,OAAO,MAAM;AACtB,YAAI,IAAI,SAAS,iBAAiB;AAChC,qBAAW,IAAI;AACf,uBAAa,IAAI,QAAQ;AAAA,QAC3B,OAAO;AACL,sBAAY,IAAI;AAAA,QAClB;AAAA,MACF;AAAA,IACF;AAEA,WAAO,EAAE,UAAU,WAAW,UAAU;AAAA,EAC1C;AAAA,EAEA,MAAM,QAAuB;AAC3B,QAAI,KAAK,QAAQ;AACf,YAAM,KAAK,OAAO,IAAI;AAAA,IACxB;AACA,QAAI,KAAK,IAAI;AACX,WAAK,GAAG,MAAM;AAAA,IAChB;AACA,SAAK,gBAAgB;AACrB,WAAO,KAAK,2BAA2B;AAAA,EACzC;AACF;AAEA,IAAO,6BAAQ;",
|
|
6
|
-
"names": ["StorageTier"]
|
|
7
|
-
}
|