signalk-questdb 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (43) hide show
  1. package/.prettierignore +2 -0
  2. package/LICENSE +21 -0
  3. package/README.md +140 -0
  4. package/dist/config/schema.d.ts +20 -0
  5. package/dist/config/schema.js +70 -0
  6. package/dist/config/schema.js.map +1 -0
  7. package/dist/history-v1.d.ts +25 -0
  8. package/dist/history-v1.js +116 -0
  9. package/dist/history-v1.js.map +1 -0
  10. package/dist/history-v2.d.ts +50 -0
  11. package/dist/history-v2.js +192 -0
  12. package/dist/history-v2.js.map +1 -0
  13. package/dist/ilp-writer.d.ts +25 -0
  14. package/dist/ilp-writer.js +179 -0
  15. package/dist/ilp-writer.js.map +1 -0
  16. package/dist/index.d.ts +1 -0
  17. package/dist/index.js +610 -0
  18. package/dist/index.js.map +1 -0
  19. package/dist/query-client.d.ts +23 -0
  20. package/dist/query-client.js +119 -0
  21. package/dist/query-client.js.map +1 -0
  22. package/dist/retention.d.ts +2 -0
  23. package/dist/retention.js +23 -0
  24. package/dist/retention.js.map +1 -0
  25. package/dist/test/ilp-writer.test.d.ts +1 -0
  26. package/dist/test/ilp-writer.test.js +124 -0
  27. package/dist/test/ilp-writer.test.js.map +1 -0
  28. package/dist/test/query-client.test.d.ts +1 -0
  29. package/dist/test/query-client.test.js +57 -0
  30. package/dist/test/query-client.test.js.map +1 -0
  31. package/dist/test/time-range.test.d.ts +1 -0
  32. package/dist/test/time-range.test.js +58 -0
  33. package/dist/test/time-range.test.js.map +1 -0
  34. package/dist/time-range.d.ts +13 -0
  35. package/dist/time-range.js +45 -0
  36. package/dist/time-range.js.map +1 -0
  37. package/package.json +65 -0
  38. package/public/540.js +2 -0
  39. package/public/540.js.LICENSE.txt +9 -0
  40. package/public/805.js +1 -0
  41. package/public/main.js +1 -0
  42. package/public/remoteEntry.js +1 -0
  43. package/webpack.config.js +43 -0
@@ -0,0 +1,2 @@
1
+ dist/
2
+ node_modules/
package/LICENSE ADDED
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2026 Dirk Wahrheit
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
package/README.md ADDED
@@ -0,0 +1,140 @@
1
+ # signalk-questdb
2
+
3
+ QuestDB history provider for Signal K -- a drop-in replacement for signalk-to-influxdb and signalk-to-influxdb2.
4
+
5
+ Stores all vessel data in QuestDB running as a managed container (via [signalk-container](https://github.com/dirkwa/signalk-container)). Implements both the modern v2 History API and the legacy v1 playback API.
6
+
7
+ ## Features
8
+
9
+ - **Automatic container management** -- QuestDB runs in Podman/Docker, managed by signalk-container
10
+ - **ILP ingestion** -- writes via InfluxDB Line Protocol over raw TCP (no client library needed)
11
+ - **v2 History API** -- `getValues`, `getPaths`, `getContexts` with all aggregate methods
12
+ - **v1 Legacy API** -- `hasAnyData`, `streamHistory`, `getHistory` for WebSocket playback
13
+ - **Path filtering** -- include/exclude paths with glob patterns
14
+ - **Sampling rates** -- per-path throttling to control write volume
15
+ - **Retention policy** -- automatic partition drop after N days
16
+ - **AIS recording** -- optionally record other vessels
17
+ - **Position tracking** -- separate optimized table for lat/lon
18
+ - **On-disk compression** -- LZ4 (fast) or ZSTD (smaller) via QuestDB WAL segment compression
19
+ - **Parquet export** -- native QuestDB Parquet export with configurable compression
20
+ - **CSV export** -- download historical data via REST endpoint
21
+ - **InfluxDB migration** -- auto-detect InfluxDB 1.x/2.x on localhost or remote URL
22
+ - **One-click updates** -- check for new QuestDB releases and update from the config panel
23
+ - **Config panel** -- status dashboard with row counts, version picker, update check, collapsible compression/migration/export sections
24
+ - **SQL injection protection** -- strict input validation on all query endpoints
25
+ - **Container lifecycle** -- container stops when plugin is disabled, starts on enable
26
+
27
+ ## Config Panel
28
+
29
+ The plugin embeds a React config panel in the Signal K Admin UI showing:
30
+
31
+ - **QuestDB Status** -- running/not running indicator, total rows, active paths today
32
+ - **Update check** -- compares running version against latest GitHub release, one-click update
33
+ - **Image Version** -- dropdown with latest, pre-releases, and last 3 stable releases
34
+ - **Connection** -- managed container toggle, host/ports, PostgreSQL port for Grafana
35
+ - **Recording** -- record self, record AIS targets, retention days
36
+ - **Compression** (collapsible) -- LZ4/ZSTD codec selection for on-disk storage
37
+ - **InfluxDB Migration** (collapsible) -- auto-detect with manual URL for remote instances
38
+ - **Data Export** (collapsible) -- date range picker, Parquet/CSV format, download button
39
+
40
+ ## QuestDB Schema
41
+
42
+ Three tables, all with WAL mode, daily partitioning, and deduplication:
43
+
44
+ | Table | Purpose | Columns |
45
+ | ------------------ | -------------- | ---------------------------------------------------------------- |
46
+ | `signalk` | Numeric values | `ts`, `path` (SYMBOL), `context` (SYMBOL), `value` (DOUBLE) |
47
+ | `signalk_str` | String values | `ts`, `path` (SYMBOL), `context` (SYMBOL), `value_str` (VARCHAR) |
48
+ | `signalk_position` | Positions | `ts`, `context` (SYMBOL), `lat` (DOUBLE), `lon` (DOUBLE) |
49
+
50
+ ## History API
51
+
52
+ ### v2 (REST -- `/signalk/v2/api/history/`)
53
+
54
+ Registered via `app.registerHistoryApiProvider()`. Supports all aggregate methods:
55
+
56
+ | Method | QuestDB mapping |
57
+ | --------- | -------------------------------------- |
58
+ | `average` | `avg(value)` |
59
+ | `min` | `min(value)` |
60
+ | `max` | `max(value)` |
61
+ | `first` | `first(value)` |
62
+ | `last` | `last(value)` |
63
+ | `mid` | `(min + max) / 2` |
64
+ | `sma` | Client-side N-sample moving average |
65
+ | `ema` | Client-side exponential moving average |
66
+
67
+ Query example:
68
+
69
+ ```
70
+ GET /signalk/v2/api/history/values?paths=navigation.speedOverGround&duration=PT1H&resolution=60
71
+ ```
72
+
73
+ ### v1 (WebSocket playback)
74
+
75
+ Registered via `app.registerHistoryProvider()`. Supports playback at configurable speed multipliers using chunked reads from QuestDB.
76
+
77
+ ## REST Endpoints
78
+
79
+ All mounted at `/plugins/signalk-questdb/api/`:
80
+
81
+ | Method | Path | Description |
82
+ | ------ | ---------------------------------------- | -------------------------------------------------- |
83
+ | GET | `/status` | QuestDB health, row counts, active paths |
84
+ | GET | `/query?sql=...` | Read-only SQL proxy (DDL/DML blocked) |
85
+ | GET | `/paths` | All recorded paths with row counts and time range |
86
+ | GET | `/versions` | QuestDB releases from GitHub (for version picker) |
87
+ | GET | `/update/check` | Compare running version against latest release |
88
+ | POST | `/update/apply` | Pull latest image, recreate container, reconnect |
89
+ | GET | `/migration/detect` | Auto-detect InfluxDB (supports `?url=` for remote) |
90
+ | GET | `/export?from=...&to=...&format=parquet` | Parquet or CSV export |
91
+
92
+ ## Configuration
93
+
94
+ | Setting | Default | Description |
95
+ | ------------------ | ----------- | ------------------------------------------------------------ |
96
+ | QuestDB version | `latest` | Docker image tag (dropdown shows stable + pre-releases) |
97
+ | Managed container | `true` | Let signalk-container manage QuestDB, or connect to external |
98
+ | QuestDB host | `127.0.0.1` | Host (only used when managed=false) |
99
+ | HTTP port | `9000` | QuestDB REST API port |
100
+ | ILP port | `9009` | InfluxDB Line Protocol write port |
101
+ | PostgreSQL port | `8812` | For Grafana connections |
102
+ | Record own vessel | `true` | Record self context |
103
+ | Record AIS targets | `false` | Record other vessels |
104
+ | Retention (days) | `0` | Auto-delete old partitions (0 = keep forever) |
105
+ | Compression codec | `lz4` | On-disk WAL compression: `none`, `lz4`, or `zstd` |
106
+ | Compression level | `3` | ZSTD level 1-22 (only when codec is zstd) |
107
+
108
+ ## Data Storage
109
+
110
+ QuestDB data is stored at `~/.signalk/plugin-config-data/signalk-questdb/` on the host, mounted into the container at `/var/lib/questdb`. Data survives container restarts, image upgrades, and plugin disable/enable cycles.
111
+
112
+ ## Grafana Integration
113
+
114
+ Connect Grafana to QuestDB via the PostgreSQL data source:
115
+
116
+ - Host: `localhost:8812`
117
+ - User: `admin`
118
+ - Password: `quest`
119
+ - Database: `qdb`
120
+
121
+ Example query:
122
+
123
+ ```sql
124
+ SELECT ts AS time, avg(value) AS sog
125
+ FROM signalk
126
+ WHERE path = 'navigation.speedOverGround'
127
+ AND context = 'self'
128
+ AND ts BETWEEN $__timeFrom() AND $__timeTo()
129
+ SAMPLE BY $__interval
130
+ ```
131
+
132
+ ## Requirements
133
+
134
+ - Node.js >= 22
135
+ - [signalk-container](https://github.com/dirkwa/signalk-container) plugin (for managed mode)
136
+ - Signal K server
137
+
138
+ ## License
139
+
140
+ MIT
@@ -0,0 +1,20 @@
1
+ import { Static } from "@sinclair/typebox";
2
+ export declare const ConfigSchema: import("@sinclair/typebox").TObject<{
3
+ questdbHost: import("@sinclair/typebox").TString;
4
+ questdbIlpPort: import("@sinclair/typebox").TNumber;
5
+ questdbHttpPort: import("@sinclair/typebox").TNumber;
6
+ questdbPgPort: import("@sinclair/typebox").TNumber;
7
+ questdbVersion: import("@sinclair/typebox").TString;
8
+ managedContainer: import("@sinclair/typebox").TBoolean;
9
+ pathFilter: import("@sinclair/typebox").TObject<{
10
+ mode: import("@sinclair/typebox").TUnion<[import("@sinclair/typebox").TLiteral<"exclude">, import("@sinclair/typebox").TLiteral<"include">]>;
11
+ paths: import("@sinclair/typebox").TArray<import("@sinclair/typebox").TString>;
12
+ }>;
13
+ samplingRates: import("@sinclair/typebox").TRecord<import("@sinclair/typebox").TString, import("@sinclair/typebox").TNumber>;
14
+ recordSelf: import("@sinclair/typebox").TBoolean;
15
+ recordOthers: import("@sinclair/typebox").TBoolean;
16
+ retentionDays: import("@sinclair/typebox").TNumber;
17
+ compression: import("@sinclair/typebox").TUnion<[import("@sinclair/typebox").TLiteral<"none">, import("@sinclair/typebox").TLiteral<"lz4">, import("@sinclair/typebox").TLiteral<"zstd">]>;
18
+ compressionLevel: import("@sinclair/typebox").TNumber;
19
+ }>;
20
+ export type Config = Static<typeof ConfigSchema>;
@@ -0,0 +1,70 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.ConfigSchema = void 0;
4
+ const typebox_1 = require("@sinclair/typebox");
5
+ exports.ConfigSchema = typebox_1.Type.Object({
6
+ questdbHost: typebox_1.Type.String({
7
+ default: "127.0.0.1",
8
+ title: "QuestDB host",
9
+ }),
10
+ questdbIlpPort: typebox_1.Type.Number({
11
+ default: 9009,
12
+ title: "ILP port (writes)",
13
+ }),
14
+ questdbHttpPort: typebox_1.Type.Number({
15
+ default: 9000,
16
+ title: "HTTP port (queries)",
17
+ }),
18
+ questdbPgPort: typebox_1.Type.Number({
19
+ default: 8812,
20
+ title: "PostgreSQL wire port",
21
+ }),
22
+ questdbVersion: typebox_1.Type.String({
23
+ default: "9.2.0",
24
+ title: "QuestDB image version",
25
+ }),
26
+ managedContainer: typebox_1.Type.Boolean({
27
+ default: true,
28
+ title: "Manage QuestDB container via signalk-container",
29
+ description: "Disable to connect to an external QuestDB instance",
30
+ }),
31
+ pathFilter: typebox_1.Type.Object({
32
+ mode: typebox_1.Type.Union([typebox_1.Type.Literal("exclude"), typebox_1.Type.Literal("include")], {
33
+ default: "exclude",
34
+ title: "Filter mode",
35
+ }),
36
+ paths: typebox_1.Type.Array(typebox_1.Type.String(), {
37
+ default: [],
38
+ title: "Path patterns (glob supported)",
39
+ description: 'e.g. "notifications.*", "environment.wind.*"',
40
+ }),
41
+ }),
42
+ samplingRates: typebox_1.Type.Record(typebox_1.Type.String(), typebox_1.Type.Number(), {
43
+ default: {},
44
+ title: "Per-path sampling rates (ms)",
45
+ description: 'Minimum ms between writes per path. e.g. { "environment.wind.*": 1000 }',
46
+ }),
47
+ recordSelf: typebox_1.Type.Boolean({
48
+ default: true,
49
+ title: "Record own vessel",
50
+ }),
51
+ recordOthers: typebox_1.Type.Boolean({
52
+ default: false,
53
+ title: "Record AIS targets",
54
+ }),
55
+ retentionDays: typebox_1.Type.Number({
56
+ default: 0,
57
+ title: "Retention (days, 0 = keep forever)",
58
+ }),
59
+ compression: typebox_1.Type.Union([typebox_1.Type.Literal("none"), typebox_1.Type.Literal("lz4"), typebox_1.Type.Literal("zstd")], {
60
+ default: "lz4",
61
+ title: "Compression codec",
62
+ description: "Used for QuestDB on-disk WAL segments and Parquet exports (lz4 = fast, zstd = smaller)",
63
+ }),
64
+ compressionLevel: typebox_1.Type.Number({
65
+ default: 3,
66
+ title: "Compression level",
67
+ description: "zstd: 1-22 (default 3), lz4: ignored",
68
+ }),
69
+ });
70
+ //# sourceMappingURL=schema.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"schema.js","sourceRoot":"","sources":["../../src/config/schema.ts"],"names":[],"mappings":";;;AAAA,+CAAiD;AAEpC,QAAA,YAAY,GAAG,cAAI,CAAC,MAAM,CAAC;IACtC,WAAW,EAAE,cAAI,CAAC,MAAM,CAAC;QACvB,OAAO,EAAE,WAAW;QACpB,KAAK,EAAE,cAAc;KACtB,CAAC;IACF,cAAc,EAAE,cAAI,CAAC,MAAM,CAAC;QAC1B,OAAO,EAAE,IAAI;QACb,KAAK,EAAE,mBAAmB;KAC3B,CAAC;IACF,eAAe,EAAE,cAAI,CAAC,MAAM,CAAC;QAC3B,OAAO,EAAE,IAAI;QACb,KAAK,EAAE,qBAAqB;KAC7B,CAAC;IACF,aAAa,EAAE,cAAI,CAAC,MAAM,CAAC;QACzB,OAAO,EAAE,IAAI;QACb,KAAK,EAAE,sBAAsB;KAC9B,CAAC;IACF,cAAc,EAAE,cAAI,CAAC,MAAM,CAAC;QAC1B,OAAO,EAAE,OAAO;QAChB,KAAK,EAAE,uBAAuB;KAC/B,CAAC;IAEF,gBAAgB,EAAE,cAAI,CAAC,OAAO,CAAC;QAC7B,OAAO,EAAE,IAAI;QACb,KAAK,EAAE,gDAAgD;QACvD,WAAW,EAAE,oDAAoD;KAClE,CAAC;IAEF,UAAU,EAAE,cAAI,CAAC,MAAM,CAAC;QACtB,IAAI,EAAE,cAAI,CAAC,KAAK,CAAC,CAAC,cAAI,CAAC,OAAO,CAAC,SAAS,CAAC,EAAE,cAAI,CAAC,OAAO,CAAC,SAAS,CAAC,CAAC,EAAE;YACnE,OAAO,EAAE,SAAS;YAClB,KAAK,EAAE,aAAa;SACrB,CAAC;QACF,KAAK,EAAE,cAAI,CAAC,KAAK,CAAC,cAAI,CAAC,MAAM,EAAE,EAAE;YAC/B,OAAO,EAAE,EAAE;YACX,KAAK,EAAE,gCAAgC;YACvC,WAAW,EAAE,8CAA8C;SAC5D,CAAC;KACH,CAAC;IAEF,aAAa,EAAE,cAAI,CAAC,MAAM,CAAC,cAAI,CAAC,MAAM,EAAE,EAAE,cAAI,CAAC,MAAM,EAAE,EAAE;QACvD,OAAO,EAAE,EAAE;QACX,KAAK,EAAE,8BAA8B;QACrC,WAAW,EACT,yEAAyE;KAC5E,CAAC;IAEF,UAAU,EAAE,cAAI,CAAC,OAAO,CAAC;QACvB,OAAO,EAAE,IAAI;QACb,KAAK,EAAE,mBAAmB;KAC3B,CAAC;IACF,YAAY,EAAE,cAAI,CAAC,OAAO,CAAC;QACzB,OAAO,EAAE,KAAK;QACd,KAAK,EAAE,oBAAoB;KAC5B,CAAC;IAEF,aAAa,EAAE,cAAI,CAAC,MAAM,CAAC;QACzB,OAAO,EAAE,CAAC;QACV,KAAK,EAAE,oCAAoC;KAC5C,CAAC;IAEF,WAAW,EAAE,cAAI,CAAC,KAAK,CACrB,CAAC,cAAI,CAAC,OAAO,CAAC,MAAM,CAAC,EAAE,cAAI,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE,cAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC,EACjE;QACE,OAAO,EAAE,KAAK;QACd,KAAK,EAAE,mBAAmB;QAC1B,WAAW,EACT,wFAAwF;KAC3F,CACF;IACD,gBAAgB,EAAE,cAAI,CAAC,MAAM,CAAC;QAC5B,OAAO,EAAE,CAAC;QACV,KAAK,EAAE,mBAAmB;QAC1B,WAAW,EAAE,sCAAsC;KACpD,CAAC;CACH,CAAC,CAAC"}
@@ -0,0 +1,25 @@
1
+ import { QueryClient } from "./query-client";
2
+ interface HistoryOptions {
3
+ startTime: Date;
4
+ playbackRate: number;
5
+ subscribe?: string;
6
+ }
7
+ interface Delta {
8
+ context: string;
9
+ updates: {
10
+ timestamp: string;
11
+ values: {
12
+ path: string;
13
+ value: unknown;
14
+ }[];
15
+ }[];
16
+ }
17
+ export declare function createHistoryProviderV1(queryClient: QueryClient, selfContext: string, debug: (msg: string) => void): {
18
+ hasAnyData: (options: HistoryOptions, callback: (hasResults: boolean) => void) => void;
19
+ streamHistory: (spark: {
20
+ write: (data: unknown) => void;
21
+ on: (event: string, cb: (...args: unknown[]) => void) => void;
22
+ }, options: HistoryOptions, onChange: () => void) => () => void;
23
+ getHistory: (date: Date, path: string, callback: (deltas: Delta[]) => void) => void;
24
+ };
25
+ export {};
@@ -0,0 +1,116 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.createHistoryProviderV1 = createHistoryProviderV1;
4
+ const query_client_1 = require("./query-client");
5
+ function groupRowsIntoDeltas(rows) {
6
+ const byTimestamp = new Map();
7
+ for (const row of rows) {
8
+ const ts = row.ts;
9
+ const context = row.context || "self";
10
+ const path = row.path;
11
+ const value = row.value;
12
+ if (!byTimestamp.has(ts)) {
13
+ byTimestamp.set(ts, new Map());
14
+ }
15
+ const byContext = byTimestamp.get(ts);
16
+ if (!byContext.has(context)) {
17
+ byContext.set(context, []);
18
+ }
19
+ byContext.get(context).push({ path, value });
20
+ }
21
+ const deltas = [];
22
+ for (const [ts, byContext] of byTimestamp) {
23
+ for (const [context, values] of byContext) {
24
+ deltas.push({
25
+ context,
26
+ updates: [{ timestamp: ts, values }],
27
+ });
28
+ }
29
+ }
30
+ return deltas;
31
+ }
32
+ function createHistoryProviderV1(queryClient, selfContext, debug) {
33
+ function hasAnyData(options, callback) {
34
+ const startTime = (0, query_client_1.validateTimestamp)(options.startTime.toISOString());
35
+ queryClient
36
+ .exec(`SELECT count() as cnt FROM signalk WHERE ts >= '${startTime}' LIMIT 1`)
37
+ .then((result) => {
38
+ const count = result.dataset.length > 0 ? result.dataset[0][0] : 0;
39
+ callback(count > 0);
40
+ })
41
+ .catch(() => {
42
+ callback(false);
43
+ });
44
+ }
45
+ function streamHistory(spark, options,
46
+ // eslint-disable-next-line @typescript-eslint/no-unused-vars
47
+ onChange) {
48
+ let stopped = false;
49
+ const startTime = (0, query_client_1.validateTimestamp)(options.startTime.toISOString());
50
+ const playbackRate = Math.max(1, options.playbackRate);
51
+ const CHUNK_SECONDS = 60;
52
+ let currentTime = new Date(startTime);
53
+ async function streamChunk() {
54
+ if (stopped)
55
+ return;
56
+ const from = (0, query_client_1.validateTimestamp)(currentTime.toISOString());
57
+ const chunkEnd = new Date(currentTime.getTime() + CHUNK_SECONDS * 1000);
58
+ const to = (0, query_client_1.validateTimestamp)(chunkEnd.toISOString());
59
+ try {
60
+ const result = await queryClient.exec(`SELECT ts, path, context, value FROM signalk WHERE ts >= '${from}' AND ts < '${to}' ORDER BY ts LIMIT 10000`);
61
+ if (result.dataset.length === 0) {
62
+ currentTime = chunkEnd;
63
+ if (!stopped) {
64
+ setTimeout(streamChunk, 100);
65
+ }
66
+ return;
67
+ }
68
+ const rows = queryClient.toObjects(result);
69
+ const deltas = groupRowsIntoDeltas(rows);
70
+ for (const delta of deltas) {
71
+ if (stopped)
72
+ return;
73
+ const resolvedContext = delta.context === "self" ? selfContext : delta.context;
74
+ spark.write({
75
+ ...delta,
76
+ context: resolvedContext,
77
+ });
78
+ }
79
+ currentTime = chunkEnd;
80
+ const wallDelay = (CHUNK_SECONDS * 1000) / playbackRate;
81
+ if (!stopped) {
82
+ setTimeout(streamChunk, wallDelay);
83
+ }
84
+ }
85
+ catch (err) {
86
+ debug(`streamHistory error: ${err instanceof Error ? err.message : String(err)}`);
87
+ if (!stopped) {
88
+ setTimeout(streamChunk, 1000);
89
+ }
90
+ }
91
+ }
92
+ streamChunk();
93
+ spark.on("end", () => {
94
+ stopped = true;
95
+ });
96
+ return () => {
97
+ stopped = true;
98
+ };
99
+ }
100
+ function getHistory(date, path, callback) {
101
+ const ts = (0, query_client_1.validateTimestamp)(date.toISOString());
102
+ queryClient
103
+ .exec(`SELECT path, value, ts, context FROM signalk WHERE ts <= '${ts}' LATEST ON ts PARTITION BY path`)
104
+ .then((result) => {
105
+ const rows = queryClient.toObjects(result);
106
+ const deltas = groupRowsIntoDeltas(rows);
107
+ callback(deltas);
108
+ })
109
+ .catch((err) => {
110
+ debug(`getHistory error: ${err instanceof Error ? err.message : String(err)}`);
111
+ callback([]);
112
+ });
113
+ }
114
+ return { hasAnyData, streamHistory, getHistory };
115
+ }
116
+ //# sourceMappingURL=history-v1.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"history-v1.js","sourceRoot":"","sources":["../src/history-v1.ts"],"names":[],"mappings":";;AAmDA,0DA6HC;AAhLD,iDAAgE;AAgBhE,SAAS,mBAAmB,CAAC,IAA+B;IAC1D,MAAM,WAAW,GAAG,IAAI,GAAG,EAGxB,CAAC;IAEJ,KAAK,MAAM,GAAG,IAAI,IAAI,EAAE,CAAC;QACvB,MAAM,EAAE,GAAG,GAAG,CAAC,EAAY,CAAC;QAC5B,MAAM,OAAO,GAAI,GAAG,CAAC,OAAkB,IAAI,MAAM,CAAC;QAClD,MAAM,IAAI,GAAG,GAAG,CAAC,IAAc,CAAC;QAChC,MAAM,KAAK,GAAG,GAAG,CAAC,KAAgB,CAAC;QAEnC,IAAI,CAAC,WAAW,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE,CAAC;YACzB,WAAW,CAAC,GAAG,CAAC,EAAE,EAAE,IAAI,GAAG,EAAE,CAAC,CAAC;QACjC,CAAC;QACD,MAAM,SAAS,GAAG,WAAW,CAAC,GAAG,CAAC,EAAE,CAAE,CAAC;QACvC,IAAI,CAAC,SAAS,CAAC,GAAG,CAAC,OAAO,CAAC,EAAE,CAAC;YAC5B,SAAS,CAAC,GAAG,CAAC,OAAO,EAAE,EAAE,CAAC,CAAC;QAC7B,CAAC;QACD,SAAS,CAAC,GAAG,CAAC,OAAO,CAAE,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC;IAChD,CAAC;IAED,MAAM,MAAM,GAAY,EAAE,CAAC;IAC3B,KAAK,MAAM,CAAC,EAAE,EAAE,SAAS,CAAC,IAAI,WAAW,EAAE,CAAC;QAC1C,KAAK,MAAM,CAAC,OAAO,EAAE,MAAM,CAAC,IAAI,SAAS,EAAE,CAAC;YAC1C,MAAM,CAAC,IAAI,CAAC;gBACV,OAAO;gBACP,OAAO,EAAE,CAAC,EAAE,SAAS,EAAE,EAAE,EAAE,MAAM,EAAE,CAAC;aACrC,CAAC,CAAC;QACL,CAAC;IACH,CAAC;IAED,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,SAAgB,uBAAuB,CACrC,WAAwB,EACxB,WAAmB,EACnB,KAA4B;IAE5B,SAAS,UAAU,CACjB,OAAuB,EACvB,QAAuC;QAEvC,MAAM,SAAS,GAAG,IAAA,gCAAiB,EAAC,OAAO,CAAC,SAAS,CAAC,WAAW,EAAE,CAAC,CAAC;QACrE,WAAW;aACR,IAAI,CACH,mDAAmD,SAAS,WAAW,CACxE;aACA,IAAI,CAAC,CAAC,MAAM,EAAE,EAAE;YACf,MAAM,KAAK,GACT,MAAM,CAAC,OAAO,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAE,MAAM,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,CAAY,CAAC,CAAC,CAAC,CAAC,CAAC;YACnE,QAAQ,CAAC,KAAK,GAAG,CAAC,CAAC,CAAC;QACtB,CAAC,CAAC;aACD,KAAK,CAAC,GAAG,EAAE;YACV,QAAQ,CAAC,KAAK,CAAC,CAAC;QAClB,CAAC,CAAC,CAAC;IACP,CAAC;IAED,SAAS,aAAa,CACpB,KAGC,EACD,OAAuB;IACvB,6DAA6D;IAC7D,QAAoB;QAEpB,IAAI,OAAO,GAAG,KAAK,CAAC;QACpB,MAAM,SAAS,GAAG,IAAA,gCAAiB,EAAC,OAAO,CAAC,SAAS,CAAC,WAAW,EAAE,CAAC,CAAC;QACrE,MAAM,YAAY,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,OAAO,CAAC,YAAY,CAAC,CAAC;QAEvD,MAAM,aAAa,GAAG,EAAE,CAAC;QACzB,IAAI,WAAW,GAAG,IAAI,IAAI,CAAC,SAAS,CAAC,CAAC;QAEtC,KAAK,UAAU,WAAW;YACxB,IAAI,OAAO;gBAAE,OAAO;YAEpB,MAAM,IAAI,GAAG,IAAA,gCAAiB,EAAC,WAAW,CAAC,WAAW,EAAE,CAAC,CAAC;YAC1D,MAAM,QAAQ,GAAG,IAAI,IAAI,CAAC,WAAW,CAAC,OAAO,EAAE,GAAG,aAAa,GAAG,IAAI,CAAC,CAAC;YACxE,MAAM,EAAE,GAAG,IAAA,gCAAiB,EAAC,QAAQ,CAAC,WAAW,EAAE,CAAC,CAAC;YAErD,IAAI,CAAC;gBACH,MAAM,MAAM,GAAG,MAAM,WAAW,CAAC,IAAI,CACnC,6DAA6D,IAAI,eAAe,EAAE,2BAA2B,CAC9G,CAAC;gBAEF,IAAI,MAAM,CAAC,OAAO,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;oBAChC,WAAW,GAAG,QAAQ,CAAC;oBACvB,IAAI,CAAC,OAAO,EAAE,CAAC;wBACb,UAAU,CAAC,WAAW,EAAE,GAAG,CAAC,CAAC;oBAC/B,CAAC;oBACD,OAAO;gBACT,CAAC;gBAED,MAAM,IAAI,GAAG,WAAW,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC;gBAC3C,MAAM,MAAM,GAAG,mBAAmB,CAAC,IAAI,CAAC,CAAC;gBAEzC,KAAK,MAAM,KAAK,IAAI,MAAM,EAAE,CAAC;oBAC3B,IAAI,OAAO;wBAAE,OAAO;oBAEpB,MAAM,eAAe,GACnB,KAAK,CAAC,OAAO,KAAK,MAAM,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC;oBACzD,KAAK,CAAC,KAAK,CAAC;wBACV,GAAG,KAAK;wBACR,OAAO,EAAE,eAAe;qBACzB,CAAC,CAAC;gBACL,CAAC;gBAED,WAAW,GAAG,QAAQ,CAAC;gBACvB,MAAM,SAAS,GAAG,CAAC,aAAa,GAAG,IAAI,CAAC,GAAG,YAAY,CAAC;gBACxD,IAAI,CAAC,OAAO,EAAE,CAAC;oBACb,UAAU,CAAC,WAAW,EAAE,SAAS,CAAC,CAAC;gBACrC,CAAC;YACH,CAAC;YAAC,OAAO,GAAG,EAAE,CAAC;gBACb,KAAK,CACH,wBAAwB,GAAG,YAAY,KAAK,CAAC,CAAC,CAAC,GAAG,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,EAAE,CAC3E,CAAC;gBACF,IAAI,CAAC,OAAO,EAAE,CAAC;oBACb,UAAU,CAAC,WAAW,EAAE,IAAI,CAAC,CAAC;gBAChC,CAAC;YACH,CAAC;QACH,CAAC;QAED,WAAW,EAAE,CAAC;QAEd,KAAK,CAAC,EAAE,CAAC,KAAK,EAAE,GAAG,EAAE;YACnB,OAAO,GAAG,IAAI,CAAC;QACjB,CAAC,CAAC,CAAC;QAEH,OAAO,GAAG,EAAE;YACV,OAAO,GAAG,IAAI,CAAC;QACjB,CAAC,CAAC;IACJ,CAAC;IAED,SAAS,UAAU,CACjB,IAAU,EACV,IAAY,EACZ,QAAmC;QAEnC,MAAM,EAAE,GAAG,IAAA,gCAAiB,EAAC,IAAI,CAAC,WAAW,EAAE,CAAC,CAAC;QAEjD,WAAW;aACR,IAAI,CACH,6DAA6D,EAAE,kCAAkC,CAClG;aACA,IAAI,CAAC,CAAC,MAAM,EAAE,EAAE;YACf,MAAM,IAAI,GAAG,WAAW,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC;YAC3C,MAAM,MAAM,GAAG,mBAAmB,CAAC,IAAI,CAAC,CAAC;YACzC,QAAQ,CAAC,MAAM,CAAC,CAAC;QACnB,CAAC,CAAC;aACD,KAAK,CAAC,CAAC,GAAG,EAAE,EAAE;YACb,KAAK,CACH,qBAAqB,GAAG,YAAY,KAAK,CAAC,CAAC,CAAC,GAAG,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,EAAE,CACxE,CAAC;YACF,QAAQ,CAAC,EAAE,CAAC,CAAC;QACf,CAAC,CAAC,CAAC;IACP,CAAC;IAED,OAAO,EAAE,UAAU,EAAE,aAAa,EAAE,UAAU,EAAE,CAAC;AACnD,CAAC"}
@@ -0,0 +1,50 @@
1
+ import { QueryClient } from "./query-client";
2
+ interface PathSpec {
3
+ path: string;
4
+ aggregate: string;
5
+ parameter: string[];
6
+ }
7
+ interface ValuesRequest {
8
+ from?: {
9
+ toString(): string;
10
+ add(d: unknown): unknown;
11
+ subtract?(d: unknown): unknown;
12
+ };
13
+ to?: {
14
+ toString(): string;
15
+ subtract?(d: unknown): unknown;
16
+ };
17
+ duration?: unknown;
18
+ context?: string;
19
+ resolution?: number;
20
+ pathSpecs: PathSpec[];
21
+ }
22
+ interface ValuesResponse {
23
+ context: string;
24
+ range: {
25
+ from: string;
26
+ to: string;
27
+ };
28
+ values: {
29
+ path: string;
30
+ method: string;
31
+ }[];
32
+ data: [string, ...unknown[]][];
33
+ }
34
+ type PathsRequest = {
35
+ from?: {
36
+ toString(): string;
37
+ add(d: unknown): unknown;
38
+ };
39
+ to?: {
40
+ toString(): string;
41
+ };
42
+ duration?: unknown;
43
+ };
44
+ type ContextsRequest = PathsRequest;
45
+ export declare function createHistoryProviderV2(queryClient: QueryClient): {
46
+ getValues: (query: ValuesRequest) => Promise<ValuesResponse>;
47
+ getPaths: (query: PathsRequest) => Promise<string[]>;
48
+ getContexts: (query: ContextsRequest) => Promise<string[]>;
49
+ };
50
+ export {};
@@ -0,0 +1,192 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.createHistoryProviderV2 = createHistoryProviderV2;
4
+ const query_client_1 = require("./query-client");
5
+ const time_range_1 = require("./time-range");
6
+ function aggregateToSql(method) {
7
+ switch (method) {
8
+ case "average":
9
+ return "avg(value)";
10
+ case "min":
11
+ return "min(value)";
12
+ case "max":
13
+ return "max(value)";
14
+ case "first":
15
+ return "first(value)";
16
+ case "last":
17
+ return "last(value)";
18
+ case "mid":
19
+ return "(min(value) + max(value)) / 2";
20
+ default:
21
+ return "avg(value)";
22
+ }
23
+ }
24
+ function needsClientSideAggregation(method) {
25
+ return method === "middle_index" || method === "sma" || method === "ema";
26
+ }
27
+ function computeSMA(values, n) {
28
+ const result = [];
29
+ const window = [];
30
+ for (const v of values) {
31
+ if (v === null) {
32
+ result.push(null);
33
+ continue;
34
+ }
35
+ window.push(v);
36
+ if (window.length > n)
37
+ window.shift();
38
+ result.push(window.reduce((a, b) => a + b, 0) / window.length);
39
+ }
40
+ return result;
41
+ }
42
+ function computeEMA(values, alpha) {
43
+ const result = [];
44
+ let prev = null;
45
+ for (const v of values) {
46
+ if (v === null) {
47
+ result.push(prev);
48
+ continue;
49
+ }
50
+ if (prev === null) {
51
+ prev = v;
52
+ }
53
+ else {
54
+ prev = alpha * v + (1 - alpha) * prev;
55
+ }
56
+ result.push(prev);
57
+ }
58
+ return result;
59
+ }
60
+ function buildRangeWhere(range, context) {
61
+ const from = (0, query_client_1.validateTimestamp)(range.from);
62
+ const to = (0, query_client_1.validateTimestamp)(range.to);
63
+ let where = `ts >= '${from}' AND ts <= '${to}'`;
64
+ if (context) {
65
+ where += ` AND context = '${(0, query_client_1.validateIdentifier)(context)}'`;
66
+ }
67
+ return where;
68
+ }
69
+ function createHistoryProviderV2(queryClient) {
70
+ async function getValues(query) {
71
+ const range = (0, time_range_1.resolveTimeRange)(query);
72
+ const context = query.context ?? "self";
73
+ const safeContext = (0, query_client_1.validateIdentifier)(context);
74
+ const valuesList = [];
75
+ const columnData = new Map();
76
+ for (const spec of query.pathSpecs) {
77
+ const safePath = (0, query_client_1.validateIdentifier)(spec.path);
78
+ valuesList.push({ path: spec.path, method: spec.aggregate });
79
+ const isPosition = spec.path === "navigation.position";
80
+ const table = isPosition ? "signalk_position" : "signalk";
81
+ if (isPosition) {
82
+ const where = buildRangeWhere(range, safeContext);
83
+ let sql;
84
+ if (query.resolution && query.resolution > 0) {
85
+ sql = `SELECT ts, first(lat) as lat, first(lon) as lon FROM ${table} WHERE ${where} SAMPLE BY ${Math.floor(query.resolution)}s FILL(NULL) ORDER BY ts`;
86
+ }
87
+ else {
88
+ sql = `SELECT ts, lat, lon FROM ${table} WHERE ${where} ORDER BY ts LIMIT 10000`;
89
+ }
90
+ const result = await queryClient.exec(sql);
91
+ const rows = result.dataset.map((row) => [
92
+ row[0],
93
+ row[1] !== null && row[2] !== null
94
+ ? { latitude: row[1], longitude: row[2] }
95
+ : null,
96
+ ]);
97
+ columnData.set(spec.path, rows);
98
+ continue;
99
+ }
100
+ const where = `${buildRangeWhere(range, safeContext)} AND path = '${safePath}'`;
101
+ if (needsClientSideAggregation(spec.aggregate)) {
102
+ const sql = `SELECT ts, value FROM ${table} WHERE ${where} ORDER BY ts LIMIT 50000`;
103
+ const result = await queryClient.exec(sql);
104
+ const timestamps = result.dataset.map((r) => r[0]);
105
+ const rawValues = result.dataset.map((r) => r[1]);
106
+ let computed;
107
+ if (spec.aggregate === "sma") {
108
+ const n = parseInt(spec.parameter[0] ?? "5", 10);
109
+ computed = computeSMA(rawValues, n);
110
+ }
111
+ else if (spec.aggregate === "ema") {
112
+ const alpha = parseFloat(spec.parameter[0] ?? "0.2");
113
+ computed = computeEMA(rawValues, alpha);
114
+ }
115
+ else {
116
+ const mid = Math.floor(rawValues.length / 2);
117
+ computed = rawValues.map((_, i) => (i === mid ? rawValues[i] : null));
118
+ }
119
+ const rows = timestamps.map((ts, i) => [
120
+ ts,
121
+ computed[i],
122
+ ]);
123
+ columnData.set(spec.path, rows);
124
+ continue;
125
+ }
126
+ const aggExpr = aggregateToSql(spec.aggregate);
127
+ let sql;
128
+ if (query.resolution && query.resolution > 0) {
129
+ sql = `SELECT ts, ${aggExpr} as agg_value FROM ${table} WHERE ${where} SAMPLE BY ${Math.floor(query.resolution)}s FILL(NULL) ORDER BY ts`;
130
+ }
131
+ else {
132
+ sql = `SELECT ts, value FROM ${table} WHERE ${where} ORDER BY ts LIMIT 10000`;
133
+ }
134
+ const result = await queryClient.exec(sql);
135
+ const rows = result.dataset.map((row) => [
136
+ row[0],
137
+ row[1],
138
+ ]);
139
+ columnData.set(spec.path, rows);
140
+ }
141
+ const allTimestamps = new Set();
142
+ for (const rows of columnData.values()) {
143
+ for (const [ts] of rows) {
144
+ allTimestamps.add(ts);
145
+ }
146
+ }
147
+ const sortedTimestamps = Array.from(allTimestamps).sort();
148
+ const pathOrder = query.pathSpecs.map((s) => s.path);
149
+ const indexMaps = new Map();
150
+ for (const [path, rows] of columnData) {
151
+ const m = new Map();
152
+ for (const [ts, val] of rows) {
153
+ m.set(ts, val);
154
+ }
155
+ indexMaps.set(path, m);
156
+ }
157
+ const data = sortedTimestamps.map((ts) => {
158
+ const row = [ts];
159
+ for (const path of pathOrder) {
160
+ const m = indexMaps.get(path);
161
+ row.push(m?.get(ts) ?? null);
162
+ }
163
+ return row;
164
+ });
165
+ return {
166
+ context,
167
+ range: { from: range.from, to: range.to },
168
+ values: valuesList,
169
+ data,
170
+ };
171
+ }
172
+ async function getPaths(query) {
173
+ const range = (0, time_range_1.resolveTimeRange)(query);
174
+ const where = buildRangeWhere(range);
175
+ const result = await queryClient.exec(`SELECT DISTINCT path FROM signalk WHERE ${where}
176
+ UNION
177
+ SELECT DISTINCT path FROM signalk_str WHERE ${where}
178
+ ORDER BY path`);
179
+ return result.dataset.map((row) => row[0]);
180
+ }
181
+ async function getContexts(query) {
182
+ const range = (0, time_range_1.resolveTimeRange)(query);
183
+ const where = buildRangeWhere(range);
184
+ const result = await queryClient.exec(`SELECT DISTINCT context FROM signalk WHERE ${where}
185
+ UNION
186
+ SELECT DISTINCT context FROM signalk_str WHERE ${where}
187
+ ORDER BY context`);
188
+ return result.dataset.map((row) => row[0]);
189
+ }
190
+ return { getValues, getPaths, getContexts };
191
+ }
192
+ //# sourceMappingURL=history-v2.js.map