@mxtommy/kip 4.5.1 → 4.6.0-beta.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (41) hide show
  1. package/CHANGELOG.md +4 -0
  2. package/package.json +13 -15
  3. package/plugin/history-series.service.js +14 -24
  4. package/plugin/index.js +198 -146
  5. package/plugin/{duckdb-parquet-storage.service.js → sqlite-history-storage.service.js} +327 -381
  6. package/public/{chunk-D7VDX7ZF.js → chunk-67V4XHCY.js} +1 -1
  7. package/public/{chunk-EQ2N7KDA.js → chunk-BEQKBGLG.js} +2 -2
  8. package/public/{chunk-JGGMFMY5.js → chunk-BJEHRCYP.js} +1 -1
  9. package/public/chunk-BTFZS2TW.js +16 -0
  10. package/public/chunk-FZFDGAQO.js +1 -0
  11. package/public/{chunk-VCY32MWT.js → chunk-FZSLNGBK.js} +8 -8
  12. package/public/{chunk-RONXIZ2U.js → chunk-J6EEFXKZ.js} +3 -3
  13. package/public/{chunk-IYRLINL7.js → chunk-KWTS7JF7.js} +1 -1
  14. package/public/chunk-NFJ4RQSE.js +4 -0
  15. package/public/{chunk-DEM56G4S.js → chunk-OPTBDYBL.js} +1 -1
  16. package/public/{chunk-YCEXTKGG.js → chunk-P4CRTB7N.js} +1 -1
  17. package/public/{chunk-IHURI4IH.js → chunk-P7JKENHI.js} +3 -3
  18. package/public/chunk-Q2ANAJAD.js +1 -0
  19. package/public/{chunk-B75MT7ND.js → chunk-R36UY4Q4.js} +1 -1
  20. package/public/{chunk-CHGXAEKT.js → chunk-RCYOZLZB.js} +1 -1
  21. package/public/chunk-RFNZ4AQG.js +50 -0
  22. package/public/{chunk-KPHICV76.js → chunk-SJFJEOSG.js} +1 -1
  23. package/public/{chunk-MGPPVLZ7.js → chunk-TBNKOU7M.js} +1 -1
  24. package/public/chunk-TVNXBPFF.js +6 -0
  25. package/public/{chunk-S72JTJPN.js → chunk-VPF5756E.js} +1 -1
  26. package/public/chunk-VXCYPAWR.js +1 -0
  27. package/public/{chunk-R7RQHWKJ.js → chunk-WH5CIUSB.js} +1 -1
  28. package/public/{chunk-LQDSU4WS.js → chunk-WQSJFJLW.js} +1 -1
  29. package/public/{chunk-KZ5DUKAX.js → chunk-XBSU7OGT.js} +1 -1
  30. package/public/{chunk-CEB42O2C.js → chunk-YI3MZWRZ.js} +1 -1
  31. package/public/index.html +1 -1
  32. package/public/main-TZOV3JCT.js +1 -0
  33. package/plugin/plugin-auth.service.js +0 -75
  34. package/public/chunk-A6DQJFP4.js +0 -16
  35. package/public/chunk-DEGYRCMI.js +0 -1
  36. package/public/chunk-DYTBBUMI.js +0 -4
  37. package/public/chunk-FNF7M3AE.js +0 -1
  38. package/public/chunk-JB4YVVNW.js +0 -1
  39. package/public/chunk-YKJKIWXO.js +0 -6
  40. package/public/chunk-ZV7IYYEQ.js +0 -50
  41. package/public/main-FQESQQV6.js +0 -1
@@ -1,43 +1,71 @@
1
1
  "use strict";
2
+ var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
3
+ if (k2 === undefined) k2 = k;
4
+ var desc = Object.getOwnPropertyDescriptor(m, k);
5
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
6
+ desc = { enumerable: true, get: function() { return m[k]; } };
7
+ }
8
+ Object.defineProperty(o, k2, desc);
9
+ }) : (function(o, m, k, k2) {
10
+ if (k2 === undefined) k2 = k;
11
+ o[k2] = m[k];
12
+ }));
13
+ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
14
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
15
+ }) : function(o, v) {
16
+ o["default"] = v;
17
+ });
18
+ var __importStar = (this && this.__importStar) || (function () {
19
+ var ownKeys = function(o) {
20
+ ownKeys = Object.getOwnPropertyNames || function (o) {
21
+ var ar = [];
22
+ for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
23
+ return ar;
24
+ };
25
+ return ownKeys(o);
26
+ };
27
+ return function (mod) {
28
+ if (mod && mod.__esModule) return mod;
29
+ var result = {};
30
+ if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
31
+ __setModuleDefault(result, mod);
32
+ return result;
33
+ };
34
+ })();
2
35
  Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.DuckDbParquetStorageService = void 0;
36
+ exports.SqliteHistoryStorageService = void 0;
4
37
  const fs_1 = require("fs");
5
38
  const path_1 = require("path");
6
- const node_api_1 = require("@duckdb/node-api");
7
- const parquetjs_1 = require("@dsnp/parquetjs");
39
+ const DEFAULT_STORAGE_CONFIG = {
40
+ engine: 'node:sqlite',
41
+ databaseFile: 'plugin-config-data/kip/historicalData/kip-history.sqlite',
42
+ flushIntervalMs: 30_000
43
+ };
8
44
  /**
9
- * Provides DuckDB storage and Parquet flush support for captured history samples.
45
+ * Provides node:sqlite storage for captured history samples.
10
46
  */
11
- class DuckDbParquetStorageService {
12
- config = {
13
- engine: 'duckdb-parquet',
14
- databaseFile: 'plugin-config-data/kip/historicalData/kip-history.duckdb',
15
- parquetDirectory: 'plugin-config-data/kip/historicalData/parquet',
16
- flushIntervalMs: 30_000
17
- };
18
- // 8 hour job interval (8 hours)
47
+ class SqliteHistoryStorageService {
19
48
  static EIGHT_HOURS_INTERVAL = 8 * 60 * 60 * 1000;
20
- vacuumJob = null;
21
- // 4 hour job interval (4 hours)
22
49
  static FOUR_HOURS_INTERVAL = 4 * 60 * 60 * 1000;
23
- pruneJob = null;
24
- // Stale series cleanup interval (6 months)
25
- static STALE_SERIES_AGE_MS = 180 * 24 * 60 * 60 * 1000; // 6 months
26
- staleSeriesCleanupJob = null;
50
+ static STALE_SERIES_AGE_MS = 180 * 24 * 60 * 60 * 1000;
27
51
  static PRUNE_BATCH_SIZE = 10_000;
52
+ config = { ...DEFAULT_STORAGE_CONFIG };
53
+ dataDirPath = null;
28
54
  logger = {
29
55
  debug: () => undefined,
30
56
  error: () => undefined
31
57
  };
32
58
  db = null;
33
- connection = null;
34
59
  pendingRows = [];
35
- pendingRangesBySeriesId = new Map();
36
60
  lastInitError = null;
37
61
  lifecycleToken = 0;
38
62
  initialized = false;
63
+ runtimeAvailable = true;
39
64
  maintenanceInProgress = false;
40
65
  flushInProgress = false;
66
+ vacuumJob = null;
67
+ pruneJob = null;
68
+ staleSeriesCleanupJob = null;
41
69
  /**
42
70
  * Sets logger callbacks used by the storage service.
43
71
  *
@@ -53,7 +81,7 @@ class DuckDbParquetStorageService {
53
81
  /**
54
82
  * Applies the fixed storage backend configuration.
55
83
  *
56
- * @returns {IDuckDbParquetStorageConfig} Fixed storage configuration.
84
+ * @returns {ISqliteHistoryStorageConfig} Fixed storage configuration.
57
85
  *
58
86
  * @example
59
87
  * const cfg = storage.configure();
@@ -61,230 +89,100 @@ class DuckDbParquetStorageService {
61
89
  */
62
90
  configure() {
63
91
  this.initialized = false;
92
+ const databaseFile = this.dataDirPath
93
+ ? (0, path_1.join)(this.dataDirPath, 'historicalData', 'kip-history.sqlite')
94
+ : DEFAULT_STORAGE_CONFIG.databaseFile;
64
95
  this.config = {
65
- engine: 'duckdb-parquet',
66
- databaseFile: 'plugin-config-data/kip/historicalData/kip-history.duckdb',
67
- parquetDirectory: 'plugin-config-data/kip/historicalData/parquet',
68
- flushIntervalMs: 30_000
96
+ ...DEFAULT_STORAGE_CONFIG,
97
+ databaseFile
69
98
  };
70
99
  return this.config;
71
100
  }
72
101
  /**
73
- * Initializes DuckDB storage if DuckDB engine is selected.
102
+ * Sets the base directory for persisted history data.
74
103
  *
75
- * @returns {Promise<boolean>} True when DuckDB is initialized and ready.
104
+ * @param {string | null} baseDir Absolute directory path for plugin data.
105
+ * @returns {void}
106
+ *
107
+ * @example
108
+ * storage.setDataDirPath('/var/lib/signalk');
109
+ */
110
+ setDataDirPath(baseDir) {
111
+ this.dataDirPath = typeof baseDir === 'string' && baseDir.trim() ? baseDir.trim() : null;
112
+ }
113
+ /**
114
+ * Updates runtime availability of node:sqlite, clearing stored errors when enabled.
115
+ *
116
+ * @param {boolean} available Whether node:sqlite is available at runtime.
117
+ * @param {string | undefined} errorMessage Optional runtime error message.
118
+ * @returns {void}
119
+ *
120
+ * @example
121
+ * storage.setRuntimeAvailability(false, 'node:sqlite unavailable');
122
+ */
123
+ setRuntimeAvailability(available, errorMessage) {
124
+ this.runtimeAvailable = available;
125
+ this.lastInitError = available ? null : (errorMessage ?? 'node:sqlite unavailable');
126
+ if (!available) {
127
+ this.initialized = false;
128
+ this.db = null;
129
+ }
130
+ }
131
+ /**
132
+ * Initializes node:sqlite storage.
133
+ *
134
+ * @returns {Promise<boolean>} True when node:sqlite is initialized and ready.
76
135
  *
77
136
  * @example
78
137
  * const ready = await storage.initialize();
79
138
  */
80
139
  async initialize() {
81
- if (!this.isDuckDbParquetEnabled()) {
140
+ if (!this.isSqliteEnabled() || !this.runtimeAvailable) {
82
141
  return false;
83
142
  }
84
143
  this.initialized = false;
85
144
  this.lifecycleToken += 1;
86
145
  try {
146
+ const sqlite = await this.loadSqliteModule();
147
+ if (!sqlite?.DatabaseSync) {
148
+ throw new Error('node:sqlite DatabaseSync is unavailable');
149
+ }
87
150
  const dbPath = (0, path_1.resolve)(this.config.databaseFile);
88
151
  (0, fs_1.mkdirSync)((0, path_1.dirname)(dbPath), { recursive: true });
89
- (0, fs_1.mkdirSync)((0, path_1.resolve)(this.config.parquetDirectory), { recursive: true });
90
- this.db = await node_api_1.DuckDBInstance.create(dbPath);
91
- this.connection = await this.db.connect();
152
+ this.db = new sqlite.DatabaseSync(dbPath, { timeout: 5000 });
153
+ this.db.exec('PRAGMA journal_mode=WAL;');
154
+ this.db.exec('PRAGMA synchronous=NORMAL;');
155
+ this.db.exec('PRAGMA temp_store=MEMORY;');
156
+ this.db.exec('PRAGMA foreign_keys=ON;');
92
157
  await this.createCoreTables();
93
158
  await this.runSql('CREATE INDEX IF NOT EXISTS idx_history_series_scope_ts ON history_samples(series_id, ts_ms)');
94
159
  await this.runSql('CREATE INDEX IF NOT EXISTS idx_history_series_scope_id ON history_series(series_id)');
95
160
  await this.runSql('CREATE INDEX IF NOT EXISTS idx_history_samples_scope_context_path_ts ON history_samples(context, path, ts_ms)');
96
161
  await this.runSql('CREATE INDEX IF NOT EXISTS idx_history_samples_scope_ts_path ON history_samples(ts_ms, path)');
97
162
  await this.runSql('CREATE INDEX IF NOT EXISTS idx_history_samples_scope_ts_context ON history_samples(ts_ms, context)');
98
- this.logger.debug(`[SERIES STORAGE] DuckDB initialized at ${dbPath}`);
163
+ this.logger.debug(`[SERIES STORAGE] node:sqlite initialized at ${dbPath}`);
99
164
  this.lastInitError = null;
100
165
  this.initialized = true;
101
- // Start VACUUM job
102
166
  this.startVacuumJob();
103
- // Start prune job
104
167
  this.startPruneJob();
105
- // Start stale series cleanup job
106
168
  this.startStaleSeriesCleanupJob();
107
169
  return true;
108
170
  }
109
171
  catch (error) {
110
172
  const message = error?.message ?? String(error);
111
173
  this.lastInitError = message;
112
- this.logger.error(`[SERIES STORAGE] DuckDB initialization failed: ${message}`);
113
- this.logger.error('[SERIES STORAGE] DuckDB Node API is required. Install runtime dependency with: npm i @duckdb/node-api in the installed plugin directory, then restart Signal K.');
114
- this.connection = null;
174
+ this.logger.error(`[SERIES STORAGE] node:sqlite initialization failed: ${message}`);
115
175
  this.db = null;
116
176
  this.pendingRows = [];
117
- this.pendingRangesBySeriesId.clear();
118
177
  this.initialized = false;
119
178
  this.stopVacuumJob();
179
+ this.stopPruneJob();
180
+ this.stopStaleSeriesCleanupJob();
120
181
  return false;
121
182
  }
122
183
  }
123
184
  /**
124
- * Starts VACUUM job for DuckDB.
125
- */
126
- startVacuumJob() {
127
- this.stopVacuumJob();
128
- if (!this.isDuckDbParquetReady() || !this.connection)
129
- return;
130
- this.vacuumJob = setInterval(() => {
131
- if (this.shouldSkipMaintenance()) {
132
- return;
133
- }
134
- void this.runWithMaintenanceLock('vacuum', async () => {
135
- this.logger.debug('[SERIES STORAGE] Running scheduled DuckDB VACUUM');
136
- await this.runSql('VACUUM;');
137
- }).catch(err => {
138
- this.logger.error(`[SERIES STORAGE] VACUUM failed: ${err?.message ?? err}`);
139
- });
140
- }, DuckDbParquetStorageService.EIGHT_HOURS_INTERVAL);
141
- this.vacuumJob.unref?.();
142
- }
143
- /**
144
- * Stops the scheduled VACUUM job if running.
145
- */
146
- stopVacuumJob() {
147
- if (this.vacuumJob) {
148
- clearInterval(this.vacuumJob);
149
- this.vacuumJob = null;
150
- }
151
- }
152
- /**
153
- * Starts the prune job for expired and orphaned samples.
154
- */
155
- startPruneJob() {
156
- this.stopPruneJob();
157
- if (!this.isDuckDbParquetReady() || !this.connection)
158
- return;
159
- this.pruneJob = setInterval(async () => {
160
- if (this.shouldSkipMaintenance()) {
161
- return;
162
- }
163
- try {
164
- await this.runWithMaintenanceLock('prune', async () => {
165
- this.logger.debug('[SERIES STORAGE] Running scheduled prune of expired and orphaned samples');
166
- const expired = await this.pruneExpiredSamples(Date.now(), this.lifecycleToken);
167
- const orphaned = await this.pruneOrphanedSamples(this.lifecycleToken);
168
- this.logger.debug(`[SERIES STORAGE] Pruned ${expired} expired and ${orphaned} orphaned samples`);
169
- });
170
- }
171
- catch (err) {
172
- this.logger.error(`[SERIES STORAGE] Prune failed: ${err?.message ?? err}`);
173
- }
174
- }, DuckDbParquetStorageService.FOUR_HOURS_INTERVAL);
175
- this.pruneJob.unref?.();
176
- }
177
- /**
178
- * Stops the scheduled prune job if running.
179
- */
180
- stopPruneJob() {
181
- if (this.pruneJob) {
182
- clearInterval(this.pruneJob);
183
- this.pruneJob = null;
184
- }
185
- }
186
- /**
187
- * Starts the scheduled job to delete series not reconciled in the last 6 months.
188
- */
189
- startStaleSeriesCleanupJob() {
190
- this.stopStaleSeriesCleanupJob();
191
- if (!this.isDuckDbParquetReady() || !this.connection)
192
- return;
193
- this.staleSeriesCleanupJob = setInterval(async () => {
194
- if (this.shouldSkipMaintenance()) {
195
- return;
196
- }
197
- try {
198
- await this.runWithMaintenanceLock('stale-cleanup', async () => {
199
- const cutoff = Date.now() - DuckDbParquetStorageService.STALE_SERIES_AGE_MS;
200
- this.logger.debug(`[SERIES STORAGE] Running scheduled stale series cleanup (cutoff: ${new Date(cutoff).toISOString()})`);
201
- const deleted = await this.deleteStaleSeries(cutoff);
202
- if (deleted > 0) {
203
- this.logger.debug(`[SERIES STORAGE] Deleted ${deleted} series not reconciled in the last 6 months`);
204
- }
205
- });
206
- }
207
- catch (err) {
208
- this.logger.error(`[SERIES STORAGE] Stale series cleanup failed: ${err?.message ?? err}`);
209
- }
210
- }, DuckDbParquetStorageService.EIGHT_HOURS_INTERVAL);
211
- this.staleSeriesCleanupJob.unref?.();
212
- }
213
- /**
214
- * Deletes series not reconciled since the given cutoff timestamp.
215
- * @param {number} cutoffMs - Milliseconds since epoch; series with reconcile_ts < cutoffMs will be deleted.
216
- * @returns {Promise<number>} Number of deleted series.
217
- *
218
- * @example
219
- * const deleted = await storage.deleteStaleSeries(Date.now() - 180 * 24 * 60 * 60 * 1000);
220
- */
221
- async deleteStaleSeries(cutoffMs) {
222
- if (!this.isDuckDbParquetEnabled() || !this.connection) {
223
- return 0;
224
- }
225
- // Find series to delete
226
- const rows = await this.querySql(`
227
- SELECT series_id FROM history_series
228
- WHERE reconcile_ts IS NULL OR reconcile_ts < ${Math.trunc(cutoffMs)}
229
- `);
230
- const ids = rows.map(r => r.series_id);
231
- if (ids.length === 0)
232
- return 0;
233
- for (const id of ids) {
234
- await this.deleteSeriesDefinition(id);
235
- }
236
- return ids.length;
237
- }
238
- /**
239
- * Stops the scheduled stale series cleanup job if running.
240
- */
241
- stopStaleSeriesCleanupJob() {
242
- if (this.staleSeriesCleanupJob) {
243
- clearInterval(this.staleSeriesCleanupJob);
244
- this.staleSeriesCleanupJob = null;
245
- }
246
- }
247
- shouldSkipMaintenance() {
248
- if (!this.isDuckDbParquetReady() || !this.connection) {
249
- return true;
250
- }
251
- if (this.maintenanceInProgress || this.flushInProgress) {
252
- return true;
253
- }
254
- if (this.pendingRows.length > 0) {
255
- return true;
256
- }
257
- return false;
258
- }
259
- async runWithMaintenanceLock(label, task) {
260
- if (this.maintenanceInProgress) {
261
- this.logger.debug(`[SERIES STORAGE] Skipping ${label} (maintenance already running)`);
262
- return;
263
- }
264
- this.maintenanceInProgress = true;
265
- const startedAt = Date.now();
266
- try {
267
- await task();
268
- const elapsedMs = Date.now() - startedAt;
269
- this.logger.debug(`[SERIES STORAGE] ${label} completed in ${elapsedMs}ms`);
270
- }
271
- finally {
272
- this.maintenanceInProgress = false;
273
- }
274
- }
275
- /**
276
- * Returns the active storage configuration.
277
- *
278
- * @returns {IDuckDbParquetStorageConfig} Current storage configuration.
279
- *
280
- * @example
281
- * const cfg = storage.getConfig();
282
- */
283
- getConfig() {
284
- return this.config;
285
- }
286
- /**
287
- * Returns last DuckDB initialization error when initialization failed.
185
+ * Returns last node:sqlite initialization error when initialization failed.
288
186
  *
289
187
  * @returns {string | null} Initialization error text or null.
290
188
  *
@@ -295,38 +193,34 @@ class DuckDbParquetStorageService {
295
193
  return this.lastInitError;
296
194
  }
297
195
  /**
298
- * Indicates whether DuckDB/Parquet mode is selected.
196
+ * Indicates whether node:sqlite mode is selected.
299
197
  *
300
- * @returns {boolean} True when the selected engine is `duckdb-parquet`.
198
+ * @returns {boolean} True when the selected engine is `node:sqlite`.
301
199
  *
302
200
  * @example
303
- * if (storage.isDuckDbParquetEnabled()) {
304
- * console.log('DuckDB mode enabled');
201
+ * if (storage.isSqliteEnabled()) {
202
+ * console.log('node:sqlite mode enabled');
305
203
  * }
306
204
  */
307
- isDuckDbParquetEnabled() {
308
- return this.config.engine === 'duckdb-parquet';
205
+ isSqliteEnabled() {
206
+ return this.config.engine === 'node:sqlite';
309
207
  }
310
208
  /**
311
- * Indicates whether DuckDB/Parquet mode is initialized and ready.
209
+ * Indicates whether node:sqlite mode is initialized and ready.
312
210
  *
313
- * @returns {boolean} True when DuckDB mode is selected and an active connection exists.
211
+ * @returns {boolean} True when node:sqlite mode is selected and an active connection exists.
314
212
  *
315
213
  * @example
316
- * if (storage.isDuckDbParquetReady()) {
317
- * console.log('DuckDB ready');
214
+ * if (storage.isSqliteReady()) {
215
+ * console.log('node:sqlite ready');
318
216
  * }
319
217
  */
320
- isDuckDbParquetReady() {
321
- return this.isDuckDbParquetEnabled() && this.initialized && this.connection !== null;
218
+ isSqliteReady() {
219
+ return this.isSqliteEnabled() && this.initialized && this.db !== null && this.runtimeAvailable;
322
220
  }
323
221
  /**
324
222
  * Returns the current storage lifecycle token.
325
223
  *
326
- * The token changes whenever a new initialization attempt starts and can be
327
- * used by callers to scope async stop operations (flush/close) so stale work
328
- * does not affect a newer startup session.
329
- *
330
224
  * @returns {number} Current lifecycle token.
331
225
  *
332
226
  * @example
@@ -345,30 +239,16 @@ class DuckDbParquetStorageService {
345
239
  * storage.enqueueSample(sample);
346
240
  */
347
241
  enqueueSample(sample) {
348
- if (!this.isDuckDbParquetReady()) {
242
+ if (!this.isSqliteReady()) {
349
243
  return;
350
244
  }
351
245
  this.pendingRows.push(sample);
352
- const rangeKey = sample.seriesId;
353
- const existing = this.pendingRangesBySeriesId.get(rangeKey);
354
- if (!existing) {
355
- this.pendingRangesBySeriesId.set(rangeKey, {
356
- seriesId: sample.seriesId,
357
- minTs: sample.timestamp,
358
- maxTs: sample.timestamp
359
- });
360
- return;
361
- }
362
- this.pendingRangesBySeriesId.set(rangeKey, {
363
- seriesId: existing.seriesId,
364
- minTs: Math.min(existing.minTs, sample.timestamp),
365
- maxTs: Math.max(existing.maxTs, sample.timestamp)
366
- });
367
246
  }
368
247
  /**
369
- * Flushes queued samples into DuckDB and exports changed ranges to Parquet chunks.
248
+ * Flushes queued samples into node:sqlite.
370
249
  *
371
- * @returns {Promise<{inserted: number; exported: number}>} Number of inserted rows and exported parquet files.
250
+ * @param {number} [expectedLifecycleToken] Optional lifecycle token guard to skip stale flushes.
251
+ * @returns {Promise<{ inserted: number; exported: number }>} Number of inserted rows (exported is always 0).
372
252
  *
373
253
  * @example
374
254
  * const result = await storage.flush();
@@ -377,7 +257,7 @@ class DuckDbParquetStorageService {
377
257
  if (expectedLifecycleToken !== undefined && expectedLifecycleToken !== this.lifecycleToken) {
378
258
  return { inserted: 0, exported: 0 };
379
259
  }
380
- if (!this.isDuckDbParquetEnabled() || !this.connection || this.pendingRows.length === 0) {
260
+ if (!this.isSqliteEnabled() || !this.db || this.pendingRows.length === 0) {
381
261
  return { inserted: 0, exported: 0 };
382
262
  }
383
263
  if (this.flushInProgress) {
@@ -385,35 +265,16 @@ class DuckDbParquetStorageService {
385
265
  }
386
266
  this.flushInProgress = true;
387
267
  const rows = this.pendingRows;
388
- const ranges = new Map(this.pendingRangesBySeriesId);
389
268
  this.pendingRows = [];
390
- this.pendingRangesBySeriesId.clear();
391
269
  const startedAt = Date.now();
392
270
  try {
393
271
  await this.insertRows(rows);
394
- let exported = 0;
395
- for (const range of ranges.values()) {
396
- await this.exportSeriesRange(range.seriesId, range.minTs, range.maxTs);
397
- exported += 1;
398
- }
399
272
  const elapsedMs = Date.now() - startedAt;
400
- this.logger.debug(`[SERIES STORAGE] flush inserted=${rows.length} exported=${exported} durationMs=${elapsedMs}`);
401
- return { inserted: rows.length, exported };
273
+ this.logger.debug(`[SERIES STORAGE] flush inserted=${rows.length} durationMs=${elapsedMs}`);
274
+ return { inserted: rows.length, exported: 0 };
402
275
  }
403
276
  catch (error) {
404
277
  this.pendingRows = [...rows, ...this.pendingRows];
405
- ranges.forEach((range, rangeKey) => {
406
- const current = this.pendingRangesBySeriesId.get(rangeKey);
407
- if (!current) {
408
- this.pendingRangesBySeriesId.set(rangeKey, range);
409
- return;
410
- }
411
- this.pendingRangesBySeriesId.set(rangeKey, {
412
- seriesId: current.seriesId,
413
- minTs: Math.min(current.minTs, range.minTs),
414
- maxTs: Math.max(current.maxTs, range.maxTs)
415
- });
416
- });
417
278
  throw error;
418
279
  }
419
280
  finally {
@@ -421,7 +282,7 @@ class DuckDbParquetStorageService {
421
282
  }
422
283
  }
423
284
  /**
424
- * Returns persisted series definitions from DuckDB.
285
+ * Returns persisted series definitions from node:sqlite.
425
286
  *
426
287
  * @returns {Promise<ISeriesDefinition[]>} Stored series definitions.
427
288
  *
@@ -429,7 +290,7 @@ class DuckDbParquetStorageService {
429
290
  * const series = await storage.getSeriesDefinitions();
430
291
  */
431
292
  async getSeriesDefinitions() {
432
- if (!this.isDuckDbParquetEnabled() || !this.connection) {
293
+ if (!this.isSqliteEnabled() || !this.db) {
433
294
  return [];
434
295
  }
435
296
  const rows = await this.querySql(`
@@ -446,7 +307,8 @@ class DuckDbParquetStorageService {
446
307
  retention_duration_ms,
447
308
  sample_time,
448
309
  enabled,
449
- methods_json
310
+ methods_json,
311
+ reconcile_ts
450
312
  FROM history_series
451
313
  ORDER BY series_id ASC
452
314
  `);
@@ -463,11 +325,12 @@ class DuckDbParquetStorageService {
463
325
  retentionDurationMs: this.toNumberOrUndefined(row.retention_duration_ms),
464
326
  sampleTime: this.toNumberOrUndefined(row.sample_time),
465
327
  enabled: this.toBoolean(row.enabled),
466
- methods: this.parseMethods(row.methods_json)
328
+ methods: this.parseMethods(row.methods_json),
329
+ reconcileTs: this.toNumberOrUndefined(row.reconcile_ts)
467
330
  }));
468
331
  }
469
332
  /**
470
- * Persists one series definition in DuckDB.
333
+ * Persists one series definition in node:sqlite.
471
334
  *
472
335
  * @param {ISeriesDefinition} series Series definition to persist.
473
336
  * @returns {Promise<void>}
@@ -476,7 +339,7 @@ class DuckDbParquetStorageService {
476
339
  * await storage.upsertSeriesDefinition(series);
477
340
  */
478
341
  async upsertSeriesDefinition(series) {
479
- if (!this.isDuckDbParquetEnabled() || !this.connection) {
342
+ if (!this.isSqliteEnabled() || !this.db) {
480
343
  return;
481
344
  }
482
345
  await this.runSql(`DELETE FROM history_series WHERE series_id = ${this.escape(series.seriesId)}`);
@@ -494,7 +357,8 @@ class DuckDbParquetStorageService {
494
357
  retention_duration_ms,
495
358
  sample_time,
496
359
  enabled,
497
- methods_json
360
+ methods_json,
361
+ reconcile_ts
498
362
  ) VALUES (
499
363
  ${this.escape(series.seriesId)},
500
364
  ${this.escape(series.datasetUuid)},
@@ -507,13 +371,14 @@ class DuckDbParquetStorageService {
507
371
  ${this.nullableNumber(series.period)},
508
372
  ${this.nullableNumber(series.retentionDurationMs)},
509
373
  ${this.nullableNumber(series.sampleTime)},
510
- ${series.enabled === false ? 'FALSE' : 'TRUE'},
511
- ${this.nullableString(series.methods ? JSON.stringify(series.methods) : null)}
374
+ ${series.enabled === false ? '0' : '1'},
375
+ ${this.nullableString(series.methods ? JSON.stringify(series.methods) : null)},
376
+ ${this.nullableNumber(series.reconcileTs)}
512
377
  )
513
378
  `);
514
379
  }
515
380
  /**
516
- * Deletes one persisted series definition in DuckDB.
381
+ * Deletes one persisted series definition in node:sqlite.
517
382
  *
518
383
  * @param {string} seriesId Series identifier.
519
384
  * @returns {Promise<void>}
@@ -522,7 +387,7 @@ class DuckDbParquetStorageService {
522
387
  * await storage.deleteSeriesDefinition('series-1');
523
388
  */
524
389
  async deleteSeriesDefinition(seriesId) {
525
- if (!this.isDuckDbParquetEnabled() || !this.connection) {
390
+ if (!this.isSqliteEnabled() || !this.db) {
526
391
  return;
527
392
  }
528
393
  await this.runSql(`DELETE FROM history_series WHERE series_id = ${this.escape(seriesId)}`);
@@ -537,7 +402,7 @@ class DuckDbParquetStorageService {
537
402
  * await storage.replaceSeriesDefinitions(series);
538
403
  */
539
404
  async replaceSeriesDefinitions(series) {
540
- if (!this.isDuckDbParquetEnabled() || !this.connection) {
405
+ if (!this.isSqliteEnabled() || !this.db) {
541
406
  return;
542
407
  }
543
408
  await this.runSql('DELETE FROM history_series');
@@ -545,6 +410,29 @@ class DuckDbParquetStorageService {
545
410
  await this.upsertSeriesDefinition(item);
546
411
  }
547
412
  }
413
+ /**
414
+ * Deletes series not reconciled since the given cutoff timestamp.
415
+ *
416
+ * @param {number} cutoffMs Milliseconds since epoch; series with reconcile_ts < cutoffMs will be deleted.
417
+ * @returns {Promise<number>} Number of deleted series.
418
+ *
419
+ * @example
420
+ * const deleted = await storage.deleteStaleSeries(Date.now() - 180 * 24 * 60 * 60 * 1000);
421
+ */
422
+ async deleteStaleSeries(cutoffMs) {
423
+ if (!this.isSqliteEnabled() || !this.db) {
424
+ return 0;
425
+ }
426
+ const rows = await this.querySql(`
427
+ SELECT series_id FROM history_series
428
+ WHERE reconcile_ts IS NULL OR reconcile_ts < ${Math.trunc(cutoffMs)}
429
+ `);
430
+ const ids = rows.map(row => row.series_id).filter(Boolean);
431
+ for (const id of ids) {
432
+ await this.deleteSeriesDefinition(id);
433
+ }
434
+ return ids.length;
435
+ }
548
436
  /**
549
437
  * Removes persisted samples that are older than each series retention window.
550
438
  *
@@ -559,7 +447,7 @@ class DuckDbParquetStorageService {
559
447
  if (expectedLifecycleToken !== undefined && expectedLifecycleToken !== this.lifecycleToken) {
560
448
  return 0;
561
449
  }
562
- if (!this.isDuckDbParquetEnabled() || !this.connection) {
450
+ if (!this.isSqliteEnabled() || !this.db) {
563
451
  return 0;
564
452
  }
565
453
  const anchorMs = Math.trunc(Number.isFinite(nowMs) ? nowMs : Date.now());
@@ -579,7 +467,7 @@ class DuckDbParquetStorageService {
579
467
  SELECT rowid
580
468
  FROM history_samples
581
469
  WHERE ${whereClause}
582
- LIMIT ${DuckDbParquetStorageService.PRUNE_BATCH_SIZE}
470
+ LIMIT ${SqliteHistoryStorageService.PRUNE_BATCH_SIZE}
583
471
  `);
584
472
  if (batch.length === 0) {
585
473
  break;
@@ -593,7 +481,7 @@ class DuckDbParquetStorageService {
593
481
  WHERE rowid IN (${rowIds.join(', ')})
594
482
  `);
595
483
  removedRows += rowIds.length;
596
- if (rowIds.length < DuckDbParquetStorageService.PRUNE_BATCH_SIZE) {
484
+ if (rowIds.length < SqliteHistoryStorageService.PRUNE_BATCH_SIZE) {
597
485
  break;
598
486
  }
599
487
  }
@@ -612,7 +500,7 @@ class DuckDbParquetStorageService {
612
500
  if (expectedLifecycleToken !== undefined && expectedLifecycleToken !== this.lifecycleToken) {
613
501
  return 0;
614
502
  }
615
- if (!this.isDuckDbParquetEnabled() || !this.connection) {
503
+ if (!this.isSqliteEnabled() || !this.db) {
616
504
  return 0;
617
505
  }
618
506
  const whereClause = `
@@ -628,7 +516,7 @@ class DuckDbParquetStorageService {
628
516
  SELECT rowid
629
517
  FROM history_samples
630
518
  WHERE ${whereClause}
631
- LIMIT ${DuckDbParquetStorageService.PRUNE_BATCH_SIZE}
519
+ LIMIT ${SqliteHistoryStorageService.PRUNE_BATCH_SIZE}
632
520
  `);
633
521
  if (batch.length === 0) {
634
522
  break;
@@ -642,7 +530,7 @@ class DuckDbParquetStorageService {
642
530
  WHERE rowid IN (${rowIds.join(', ')})
643
531
  `);
644
532
  removedRows += rowIds.length;
645
- if (rowIds.length < DuckDbParquetStorageService.PRUNE_BATCH_SIZE) {
533
+ if (rowIds.length < SqliteHistoryStorageService.PRUNE_BATCH_SIZE) {
646
534
  break;
647
535
  }
648
536
  }
@@ -651,13 +539,14 @@ class DuckDbParquetStorageService {
651
539
  /**
652
540
  * Lists known history paths from persisted samples.
653
541
  *
542
+ * @param {IHistoryRangeQuery} [query] Optional range filter.
654
543
  * @returns {Promise<string[]>} Ordered path names.
655
544
  *
656
545
  * @example
657
546
  * const paths = await storage.getStoredPaths();
658
547
  */
659
548
  async getStoredPaths(query) {
660
- if (!this.isDuckDbParquetEnabled() || !this.connection) {
549
+ if (!this.isSqliteEnabled() || !this.db) {
661
550
  return [];
662
551
  }
663
552
  const nowMs = Date.now();
@@ -675,13 +564,14 @@ class DuckDbParquetStorageService {
675
564
  /**
676
565
  * Lists known history contexts from persisted samples.
677
566
  *
567
+ * @param {IHistoryRangeQuery} [query] Optional range filter.
678
568
  * @returns {Promise<string[]>} Ordered context names.
679
569
  *
680
570
  * @example
681
571
  * const contexts = await storage.getStoredContexts();
682
572
  */
683
573
  async getStoredContexts(query) {
684
- if (!this.isDuckDbParquetEnabled() || !this.connection) {
574
+ if (!this.isSqliteEnabled() || !this.db) {
685
575
  return [];
686
576
  }
687
577
  const nowMs = Date.now();
@@ -697,16 +587,16 @@ class DuckDbParquetStorageService {
697
587
  return rows.map(row => row.value).filter(Boolean);
698
588
  }
699
589
  /**
700
- * Queries history values directly from DuckDB in History API-compatible shape.
590
+ * Queries history values directly from node:sqlite in History API-compatible shape.
701
591
  *
702
592
  * @param {IHistoryQueryParams} query Incoming history values query parameters.
703
- * @returns {Promise<IHistoryValuesResponse | null>} History payload when DuckDB is ready, otherwise null.
593
+ * @returns {Promise<IHistoryValuesResponse | null>} History payload when node:sqlite is ready, otherwise null.
704
594
  *
705
595
  * @example
706
596
  * const result = await storage.getValues({ paths: 'navigation.speedOverGround:avg', duration: 'PT1H' });
707
597
  */
708
598
  async getValues(query) {
709
- if (!this.isDuckDbParquetEnabled() || !this.connection) {
599
+ if (!this.isSqliteEnabled() || !this.db) {
710
600
  return null;
711
601
  }
712
602
  const nowMs = Date.now();
@@ -750,6 +640,7 @@ class DuckDbParquetStorageService {
750
640
  /**
751
641
  * Closes open storage resources.
752
642
  *
643
+ * @param {number} [expectedLifecycleToken] Optional lifecycle token guard to skip stale closes.
753
644
  * @returns {Promise<void>}
754
645
  *
755
646
  * @example
@@ -760,64 +651,174 @@ class DuckDbParquetStorageService {
760
651
  return;
761
652
  }
762
653
  this.initialized = false;
654
+ this.stopVacuumJob();
763
655
  this.stopPruneJob();
764
656
  this.stopStaleSeriesCleanupJob();
765
- if (!this.connection) {
766
- this.db = null;
657
+ if (!this.db) {
767
658
  return;
768
659
  }
769
- const connection = this.connection;
770
660
  const db = this.db;
771
- this.connection = null;
661
+ this.db = null;
772
662
  try {
773
- connection.disconnectSync();
663
+ db.close();
774
664
  }
775
665
  catch {
776
- // ignore disconnect failures during shutdown
666
+ // ignore close failures during shutdown
777
667
  }
668
+ }
669
+ async loadSqliteModule() {
778
670
  try {
779
- db?.closeSync();
671
+ return await Promise.resolve().then(() => __importStar(require('node:sqlite')));
780
672
  }
781
673
  catch {
782
- // ignore close failures during shutdown
674
+ return null;
675
+ }
676
+ }
677
+ startVacuumJob() {
678
+ this.stopVacuumJob();
679
+ if (!this.isSqliteReady())
680
+ return;
681
+ this.vacuumJob = setInterval(() => {
682
+ if (this.shouldSkipMaintenance()) {
683
+ return;
684
+ }
685
+ void this.runWithMaintenanceLock('vacuum', async () => {
686
+ this.logger.debug('[SERIES STORAGE] Running scheduled node:sqlite VACUUM');
687
+ await this.runSql('VACUUM;');
688
+ await this.runSql('PRAGMA optimize;');
689
+ }).catch(err => {
690
+ this.logger.error(`[SERIES STORAGE] VACUUM failed: ${err?.message ?? err}`);
691
+ });
692
+ }, SqliteHistoryStorageService.EIGHT_HOURS_INTERVAL);
693
+ this.vacuumJob.unref?.();
694
+ }
695
+ stopVacuumJob() {
696
+ if (this.vacuumJob) {
697
+ clearInterval(this.vacuumJob);
698
+ this.vacuumJob = null;
699
+ }
700
+ }
701
+ startPruneJob() {
702
+ this.stopPruneJob();
703
+ if (!this.isSqliteReady())
704
+ return;
705
+ this.pruneJob = setInterval(async () => {
706
+ if (this.shouldSkipMaintenance()) {
707
+ return;
708
+ }
709
+ try {
710
+ await this.runWithMaintenanceLock('prune', async () => {
711
+ this.logger.debug('[SERIES STORAGE] Running scheduled prune of expired and orphaned samples');
712
+ const expired = await this.pruneExpiredSamples(Date.now(), this.lifecycleToken);
713
+ const orphaned = await this.pruneOrphanedSamples(this.lifecycleToken);
714
+ this.logger.debug(`[SERIES STORAGE] Pruned ${expired} expired and ${orphaned} orphaned samples`);
715
+ });
716
+ }
717
+ catch (err) {
718
+ this.logger.error(`[SERIES STORAGE] Prune failed: ${err?.message ?? err}`);
719
+ }
720
+ }, SqliteHistoryStorageService.FOUR_HOURS_INTERVAL);
721
+ this.pruneJob.unref?.();
722
+ }
723
+ stopPruneJob() {
724
+ if (this.pruneJob) {
725
+ clearInterval(this.pruneJob);
726
+ this.pruneJob = null;
727
+ }
728
+ }
729
+ startStaleSeriesCleanupJob() {
730
+ this.stopStaleSeriesCleanupJob();
731
+ if (!this.isSqliteReady())
732
+ return;
733
+ this.staleSeriesCleanupJob = setInterval(async () => {
734
+ if (this.shouldSkipMaintenance()) {
735
+ return;
736
+ }
737
+ try {
738
+ await this.runWithMaintenanceLock('stale-cleanup', async () => {
739
+ const cutoff = Date.now() - SqliteHistoryStorageService.STALE_SERIES_AGE_MS;
740
+ this.logger.debug(`[SERIES STORAGE] Running scheduled stale series cleanup (cutoff: ${new Date(cutoff).toISOString()})`);
741
+ const deleted = await this.deleteStaleSeries(cutoff);
742
+ if (deleted > 0) {
743
+ this.logger.debug(`[SERIES STORAGE] Deleted ${deleted} series not reconciled in the last 6 months`);
744
+ }
745
+ });
746
+ }
747
+ catch (err) {
748
+ this.logger.error(`[SERIES STORAGE] Stale series cleanup failed: ${err?.message ?? err}`);
749
+ }
750
+ }, SqliteHistoryStorageService.EIGHT_HOURS_INTERVAL);
751
+ this.staleSeriesCleanupJob.unref?.();
752
+ }
753
+ stopStaleSeriesCleanupJob() {
754
+ if (this.staleSeriesCleanupJob) {
755
+ clearInterval(this.staleSeriesCleanupJob);
756
+ this.staleSeriesCleanupJob = null;
757
+ }
758
+ }
759
+ shouldSkipMaintenance() {
760
+ if (!this.isSqliteReady() || !this.db) {
761
+ return true;
762
+ }
763
+ if (this.maintenanceInProgress || this.flushInProgress) {
764
+ return true;
765
+ }
766
+ if (this.pendingRows.length > 0) {
767
+ return true;
768
+ }
769
+ return false;
770
+ }
771
+ async runWithMaintenanceLock(label, task) {
772
+ if (this.maintenanceInProgress) {
773
+ this.logger.debug(`[SERIES STORAGE] Skipping ${label} (maintenance already running)`);
774
+ return;
775
+ }
776
+ this.maintenanceInProgress = true;
777
+ const startedAt = Date.now();
778
+ try {
779
+ await task();
780
+ const elapsedMs = Date.now() - startedAt;
781
+ this.logger.debug(`[SERIES STORAGE] ${label} completed in ${elapsedMs}ms`);
782
+ }
783
+ finally {
784
+ this.maintenanceInProgress = false;
783
785
  }
784
- this.db = null;
785
786
  }
786
787
  async createCoreTables() {
787
788
  await this.runSql(`
788
789
  CREATE TABLE IF NOT EXISTS history_samples (
789
- series_id VARCHAR,
790
- dataset_uuid VARCHAR,
791
- owner_widget_uuid VARCHAR,
792
- path VARCHAR,
793
- context VARCHAR,
794
- source VARCHAR,
795
- ts_ms BIGINT,
796
- value DOUBLE
790
+ series_id TEXT,
791
+ dataset_uuid TEXT,
792
+ owner_widget_uuid TEXT,
793
+ path TEXT,
794
+ context TEXT,
795
+ source TEXT,
796
+ ts_ms INTEGER,
797
+ value REAL
797
798
  )
798
799
  `);
799
800
  await this.runSql(`
800
801
  CREATE TABLE IF NOT EXISTS history_series (
801
- series_id VARCHAR NOT NULL,
802
- dataset_uuid VARCHAR NOT NULL,
803
- owner_widget_uuid VARCHAR NOT NULL,
804
- owner_widget_selector VARCHAR,
805
- path VARCHAR NOT NULL,
806
- source VARCHAR,
807
- context VARCHAR,
808
- time_scale VARCHAR,
802
+ series_id TEXT NOT NULL,
803
+ dataset_uuid TEXT NOT NULL,
804
+ owner_widget_uuid TEXT NOT NULL,
805
+ owner_widget_selector TEXT,
806
+ path TEXT NOT NULL,
807
+ source TEXT,
808
+ context TEXT,
809
+ time_scale TEXT,
809
810
  period INTEGER,
810
- retention_duration_ms BIGINT,
811
+ retention_duration_ms INTEGER,
811
812
  sample_time INTEGER,
812
- enabled BOOLEAN,
813
- methods_json VARCHAR,
814
- reconcile_ts BIGINT,
813
+ enabled INTEGER,
814
+ methods_json TEXT,
815
+ reconcile_ts INTEGER,
815
816
  PRIMARY KEY (series_id)
816
817
  )
817
818
  `);
818
819
  }
819
820
  async insertRows(rows) {
820
- if (rows.length === 0) {
821
+ if (!this.db || rows.length === 0) {
821
822
  return;
822
823
  }
823
824
  const valuesSql = rows
@@ -835,80 +836,28 @@ class DuckDbParquetStorageService {
835
836
  value
836
837
  ) VALUES ${valuesSql}
837
838
  `;
838
- await this.runSql(sql);
839
- }
840
- async exportSeriesRange(seriesId, fromMs, toMs) {
841
- const baseDir = (0, path_1.resolve)(this.config.parquetDirectory);
842
- const seriesDir = (0, path_1.join)(baseDir, this.safePath(seriesId));
843
- (0, fs_1.mkdirSync)(seriesDir, { recursive: true });
844
- const filePath = (0, path_1.join)(seriesDir, `${fromMs}-${toMs}.parquet`);
845
- const rows = await this.querySql(`
846
- SELECT
847
- series_id,
848
- dataset_uuid,
849
- owner_widget_uuid,
850
- path,
851
- context,
852
- source,
853
- ts_ms,
854
- value
855
- FROM history_samples
856
- WHERE series_id = ${this.escape(seriesId)}
857
- AND ts_ms >= ${Math.trunc(fromMs)}
858
- AND ts_ms <= ${Math.trunc(toMs)}
859
- ORDER BY ts_ms
860
- `);
861
- if (rows.length === 0) {
862
- return;
863
- }
864
- const schema = new parquetjs_1.ParquetSchema({
865
- series_id: { type: 'UTF8' },
866
- dataset_uuid: { type: 'UTF8' },
867
- owner_widget_uuid: { type: 'UTF8' },
868
- path: { type: 'UTF8' },
869
- context: { type: 'UTF8' },
870
- source: { type: 'UTF8', optional: true },
871
- ts_ms: { type: 'INT64' },
872
- ts: { type: 'TIMESTAMP_MILLIS' },
873
- value: { type: 'DOUBLE' }
874
- });
875
- const writer = await parquetjs_1.ParquetWriter.openFile(schema, filePath);
839
+ this.db.exec('BEGIN');
876
840
  try {
877
- for (const row of rows) {
878
- const timestampMs = this.toNumberOrUndefined(row.ts_ms);
879
- const numericValue = this.toNumberOrUndefined(row.value);
880
- if (timestampMs === undefined || numericValue === undefined) {
881
- continue;
882
- }
883
- await writer.appendRow({
884
- series_id: row.series_id,
885
- dataset_uuid: row.dataset_uuid,
886
- owner_widget_uuid: row.owner_widget_uuid,
887
- path: row.path,
888
- context: row.context,
889
- source: row.source ?? undefined,
890
- ts_ms: Math.trunc(timestampMs),
891
- ts: new Date(timestampMs),
892
- value: numericValue
893
- });
894
- }
841
+ await this.runSql(sql);
842
+ this.db.exec('COMMIT');
895
843
  }
896
- finally {
897
- await writer.close();
844
+ catch (error) {
845
+ this.db.exec('ROLLBACK');
846
+ throw error;
898
847
  }
899
848
  }
900
849
  async runSql(sql) {
901
- if (!this.connection) {
902
- throw new Error('DuckDB connection is not initialized');
850
+ if (!this.db) {
851
+ throw new Error('node:sqlite database is not initialized');
903
852
  }
904
- await this.connection.run(sql);
853
+ this.db.exec(sql);
905
854
  }
906
855
  async querySql(sql) {
907
- if (!this.connection) {
908
- throw new Error('DuckDB connection is not initialized');
856
+ if (!this.db) {
857
+ throw new Error('node:sqlite database is not initialized');
909
858
  }
910
- const result = await this.connection.runAndReadAll(sql);
911
- return result.getRowObjectsJson();
859
+ const statement = this.db.prepare(sql);
860
+ return statement.all();
912
861
  }
913
862
  async selectRowsForPaths(paths, context, fromMs, toMs) {
914
863
  const rowsByPath = new Map();
@@ -1115,9 +1064,6 @@ class DuckDbParquetStorageService {
1115
1064
  const sum = values.reduce((acc, value) => acc + value, 0);
1116
1065
  return sum / values.length;
1117
1066
  }
1118
- safePath(value) {
1119
- return value.replace(/[^a-zA-Z0-9._-]/g, '_');
1120
- }
1121
1067
  escape(value) {
1122
1068
  return `'${String(value).replace(/'/g, "''")}'`;
1123
1069
  }
@@ -1179,4 +1125,4 @@ class DuckDbParquetStorageService {
1179
1125
  return Boolean(value);
1180
1126
  }
1181
1127
  }
1182
- exports.DuckDbParquetStorageService = DuckDbParquetStorageService;
1128
+ exports.SqliteHistoryStorageService = SqliteHistoryStorageService;