@synnaxlabs/client 0.49.2 → 0.49.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.turbo/turbo-build.log +5 -5
- package/dist/client.cjs +27 -27
- package/dist/client.js +3439 -3444
- package/dist/src/framer/external.d.ts +1 -0
- package/dist/src/framer/external.d.ts.map +1 -1
- package/dist/src/framer/iterator.d.ts +5 -0
- package/dist/src/framer/iterator.d.ts.map +1 -1
- package/dist/src/framer/reader.d.ts +3 -1
- package/dist/src/framer/reader.d.ts.map +1 -1
- package/package.json +5 -5
- package/src/framer/external.ts +1 -0
- package/src/framer/iterator.spec.ts +67 -0
- package/src/framer/iterator.ts +7 -0
- package/src/framer/reader.spec.ts +212 -83
- package/src/framer/reader.ts +13 -6
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"external.d.ts","sourceRoot":"","sources":["../../../src/framer/external.ts"],"names":[],"mappings":"AASA,cAAc,iBAAiB,CAAC;AAChC,cAAc,gBAAgB,CAAC;AAC/B,cAAc,mBAAmB,CAAC;AAClC,cAAc,kBAAkB,CAAC;AACjC,cAAc,mBAAmB,CAAC;AAClC,cAAc,iBAAiB,CAAC"}
|
|
1
|
+
{"version":3,"file":"external.d.ts","sourceRoot":"","sources":["../../../src/framer/external.ts"],"names":[],"mappings":"AASA,cAAc,iBAAiB,CAAC;AAChC,cAAc,gBAAgB,CAAC;AAC/B,cAAc,mBAAmB,CAAC;AAClC,cAAc,kBAAkB,CAAC;AACjC,cAAc,iBAAiB,CAAC;AAChC,cAAc,mBAAmB,CAAC;AAClC,cAAc,iBAAiB,CAAC"}
|
|
@@ -8,6 +8,11 @@ export interface IteratorConfig {
|
|
|
8
8
|
* resulting from a call to next with {@link AUTO_SPAN}.
|
|
9
9
|
*/
|
|
10
10
|
chunkSize?: number;
|
|
11
|
+
/**
|
|
12
|
+
* downsampleFactor is the factor to downsample the data by. If downsampleFactor is
|
|
13
|
+
* less than or equal to 1, no downsampling will be performed.
|
|
14
|
+
*/
|
|
15
|
+
downsampleFactor?: number;
|
|
11
16
|
}
|
|
12
17
|
/**
|
|
13
18
|
* Used to iterate over a clusters telemetry in time-order. It should not be
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"iterator.d.ts","sourceRoot":"","sources":["../../../src/framer/iterator.ts"],"names":[],"mappings":"AASA,OAAO,EAAe,KAAK,YAAY,EAAE,MAAM,uBAAuB,CAAC;AACvE,OAAO,EACL,KAAK,cAAc,EACnB,KAAK,aAAa,EAClB,KAAK,cAAc,EAGnB,QAAQ,EAET,MAAM,eAAe,CAAC;AAGvB,OAAO,EAAE,OAAO,EAAE,MAAM,WAAW,CAAC;AAEpC,OAAO,EAAE,KAAK,EAAU,MAAM,gBAAgB,CAAC;AAG/C,eAAO,MAAM,SAAS,UAAmB,CAAC;
|
|
1
|
+
{"version":3,"file":"iterator.d.ts","sourceRoot":"","sources":["../../../src/framer/iterator.ts"],"names":[],"mappings":"AASA,OAAO,EAAe,KAAK,YAAY,EAAE,MAAM,uBAAuB,CAAC;AACvE,OAAO,EACL,KAAK,cAAc,EACnB,KAAK,aAAa,EAClB,KAAK,cAAc,EAGnB,QAAQ,EAET,MAAM,eAAe,CAAC;AAGvB,OAAO,EAAE,OAAO,EAAE,MAAM,WAAW,CAAC;AAEpC,OAAO,EAAE,KAAK,EAAU,MAAM,gBAAgB,CAAC;AAG/C,eAAO,MAAM,SAAS,UAAmB,CAAC;AAuC1C,MAAM,WAAW,cAAc;IAC7B;;OAEG;IACH,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB;;;OAGG;IACH,gBAAgB,CAAC,EAAE,MAAM,CAAC;CAC3B;AAED;;;;;;;GAOG;AACH,qBAAa,QAAQ;IACnB,OAAO,CAAC,QAAQ,CAAC,MAAM,CAAwC;IAC/D,OAAO,CAAC,QAAQ,CAAC,OAAO,CAAc;IACtC,KAAK,EAAE,KAAK,CAAC;IAEb,OAAO;IAMP;;;;;;;;;;OAUG;WACU,KAAK,CAChB,EAAE,EAAE,cAAc,EAClB,QAAQ,EAAE,OAAO,CAAC,MAAM,EACxB,SAAS,EAAE,OAAO,CAAC,SAAS,EAC5B,MAAM,EAAE,YAAY,EACpB,IAAI,GAAE,cAAmB,GACxB,OAAO,CAAC,QAAQ,CAAC;IAcpB;;;;;;;;;;OAUG;IACG,IAAI,CAAC,IAAI,GAAE,aAAyB,GAAG,OAAO,CAAC,OAAO,CAAC;IAI7D;;;;;;;;;;OAUG;IACG,IAAI,CAAC,IAAI,GAAE,aAAyB,GAAG,OAAO,CAAC,OAAO,CAAC;IAI7D;;;;;;;OAOG;IACG,SAAS,IAAI,OAAO,CAAC,OAAO,CAAC;IAInC;;;;;;OAMG;IACG,QAAQ,IAAI,OAAO,CAAC,OAAO,CAAC;IAIlC;;;;;;;OAOG;IACG,MAAM,CAAC,KAAK,EAAE,cAAc,GAAG,OAAO,CAAC,OAAO,CAAC;IAIrD;;;;;;;OAOG;IACG,MAAM,CAAC,KAAK,EAAE,cAAc,GAAG,OAAO,CAAC,OAAO,CAAC;IAIrD;;;;OAIG;IACG,KAAK,IAAI,OAAO,CAAC,OAAO,CAAC;IAI/B;;;;OAIG;IACG,KAAK,IAAI,OAAO,CAAC,IAAI,CAAC;IAI5B,CAAC,MAAM,CAAC,aAAa,CAAC,IAAI,aAAa,CAAC,KAAK,EAAE,GAAG,EAAE,SAAS,CAAC;YAIhD,OAAO;CAStB"}
|
|
@@ -1,11 +1,13 @@
|
|
|
1
1
|
import { WebSocketClient } from '@synnaxlabs/freighter';
|
|
2
2
|
import { CrudeTimeRange } from '@synnaxlabs/x';
|
|
3
3
|
import { channel } from '../channel';
|
|
4
|
+
import { IteratorConfig } from './iterator';
|
|
4
5
|
export interface ReadRequest {
|
|
5
6
|
channels: channel.Params;
|
|
6
7
|
timeRange: CrudeTimeRange;
|
|
7
|
-
channelNames?:
|
|
8
|
+
channelNames?: Record<channel.KeyOrName, string>;
|
|
8
9
|
responseType: "csv";
|
|
10
|
+
iteratorConfig?: IteratorConfig;
|
|
9
11
|
}
|
|
10
12
|
export declare class Reader {
|
|
11
13
|
private readonly retriever;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"reader.d.ts","sourceRoot":"","sources":["../../../src/framer/reader.ts"],"names":[],"mappings":"AASA,OAAO,EAAE,KAAK,eAAe,EAAE,MAAM,uBAAuB,CAAC;AAC7D,OAAO,EAAE,KAAK,cAAc,EAAgB,MAAM,eAAe,CAAC;AAElE,OAAO,EAAE,KAAK,OAAO,EAAE,MAAM,WAAW,CAAC;
|
|
1
|
+
{"version":3,"file":"reader.d.ts","sourceRoot":"","sources":["../../../src/framer/reader.ts"],"names":[],"mappings":"AASA,OAAO,EAAE,KAAK,eAAe,EAAE,MAAM,uBAAuB,CAAC;AAC7D,OAAO,EAAE,KAAK,cAAc,EAAgB,MAAM,eAAe,CAAC;AAElE,OAAO,EAAE,KAAK,OAAO,EAAE,MAAM,WAAW,CAAC;AAGzC,OAAO,EAAY,KAAK,cAAc,EAAE,MAAM,mBAAmB,CAAC;AAElE,MAAM,WAAW,WAAW;IAC1B,QAAQ,EAAE,OAAO,CAAC,MAAM,CAAC;IACzB,SAAS,EAAE,cAAc,CAAC;IAC1B,YAAY,CAAC,EAAE,MAAM,CAAC,OAAO,CAAC,SAAS,EAAE,MAAM,CAAC,CAAC;IACjD,YAAY,EAAE,KAAK,CAAC;IACpB,cAAc,CAAC,EAAE,cAAc,CAAC;CACjC;AAED,qBAAa,MAAM;IACjB,OAAO,CAAC,QAAQ,CAAC,SAAS,CAAoB;IAC9C,OAAO,CAAC,QAAQ,CAAC,YAAY,CAAkB;gBAEnC,SAAS,EAAE,OAAO,CAAC,SAAS,EAAE,YAAY,EAAE,eAAe;IAKjE,IAAI,CAAC,OAAO,EAAE,WAAW,GAAG,OAAO,CAAC,cAAc,CAAC,UAAU,CAAC,CAAC;CAiCtE"}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@synnaxlabs/client",
|
|
3
|
-
"version": "0.49.
|
|
3
|
+
"version": "0.49.3",
|
|
4
4
|
"description": "The Synnax Client Library",
|
|
5
5
|
"keywords": [
|
|
6
6
|
"synnax",
|
|
@@ -26,8 +26,8 @@
|
|
|
26
26
|
"dependencies": {
|
|
27
27
|
"async-mutex": "^0.5.0",
|
|
28
28
|
"zod": "^4.1.12",
|
|
29
|
-
"@synnaxlabs/
|
|
30
|
-
"@synnaxlabs/
|
|
29
|
+
"@synnaxlabs/x": "^0.49.3",
|
|
30
|
+
"@synnaxlabs/freighter": "^0.49.0"
|
|
31
31
|
},
|
|
32
32
|
"devDependencies": {
|
|
33
33
|
"@vitest/coverage-v8": "^3.2.4",
|
|
@@ -38,8 +38,8 @@
|
|
|
38
38
|
"vite": "^7.1.12",
|
|
39
39
|
"vitest": "^3.2.4",
|
|
40
40
|
"@synnaxlabs/tsconfig": "^0.43.0",
|
|
41
|
-
"
|
|
42
|
-
"
|
|
41
|
+
"eslint-config-synnaxlabs": "^0.43.0",
|
|
42
|
+
"@synnaxlabs/vite-plugin": "^0.43.0"
|
|
43
43
|
},
|
|
44
44
|
"type": "module",
|
|
45
45
|
"types": "dist/src/index.d.ts",
|
package/src/framer/external.ts
CHANGED
|
@@ -99,4 +99,71 @@ describe("Iterator", () => {
|
|
|
99
99
|
await iter.close();
|
|
100
100
|
}
|
|
101
101
|
});
|
|
102
|
+
test("downsample factor 2", async () => {
|
|
103
|
+
const channels = await newIndexedPair(client);
|
|
104
|
+
const [idx_ch, data_ch] = channels;
|
|
105
|
+
const writer = await client.openWriter({ start: TimeStamp.SECOND, channels });
|
|
106
|
+
await writer.write({
|
|
107
|
+
[idx_ch.key]: secondsLinspace(1, 8),
|
|
108
|
+
[data_ch.key]: new Float64Array([1, 2, 3, 4, 5, 6, 7, 8]),
|
|
109
|
+
});
|
|
110
|
+
await writer.close();
|
|
111
|
+
const iter = await client.openIterator(TimeRange.MAX, channels, {
|
|
112
|
+
downsampleFactor: 2,
|
|
113
|
+
});
|
|
114
|
+
|
|
115
|
+
try {
|
|
116
|
+
expect(await iter.seekFirst()).toBe(true);
|
|
117
|
+
expect(await iter.next(AUTO_SPAN)).toBe(true);
|
|
118
|
+
// [1, 2, 3, 4, 5, 6, 7, 8] downsampled by 2 = [1, 3, 5, 7]
|
|
119
|
+
expect(iter.value.get(data_ch.key).data).toEqual(new Float64Array([1, 3, 5, 7]));
|
|
120
|
+
expect(await iter.next(AUTO_SPAN)).toBe(false);
|
|
121
|
+
} finally {
|
|
122
|
+
await iter.close();
|
|
123
|
+
}
|
|
124
|
+
});
|
|
125
|
+
test("downsample factor 3", async () => {
|
|
126
|
+
const channels = await newIndexedPair(client);
|
|
127
|
+
const [idx_ch, data_ch] = channels;
|
|
128
|
+
const writer = await client.openWriter({ start: TimeStamp.SECOND, channels });
|
|
129
|
+
await writer.write({
|
|
130
|
+
[idx_ch.key]: secondsLinspace(1, 9),
|
|
131
|
+
[data_ch.key]: new Float64Array([1, 2, 3, 4, 5, 6, 7, 8, 9]),
|
|
132
|
+
});
|
|
133
|
+
await writer.close();
|
|
134
|
+
const iter = await client.openIterator(TimeRange.MAX, channels, {
|
|
135
|
+
downsampleFactor: 3,
|
|
136
|
+
});
|
|
137
|
+
|
|
138
|
+
try {
|
|
139
|
+
expect(await iter.seekFirst()).toBe(true);
|
|
140
|
+
expect(await iter.next(AUTO_SPAN)).toBe(true);
|
|
141
|
+
// [1, 2, 3, 4, 5, 6, 7, 8, 9] downsampled by 3 = [1, 4, 7]
|
|
142
|
+
expect(iter.value.get(data_ch.key).data).toEqual(new Float64Array([1, 4, 7]));
|
|
143
|
+
expect(await iter.next(AUTO_SPAN)).toBe(false);
|
|
144
|
+
} finally {
|
|
145
|
+
await iter.close();
|
|
146
|
+
}
|
|
147
|
+
});
|
|
148
|
+
test("no downsample when factor is 1", async () => {
|
|
149
|
+
const channels = await newIndexedPair(client);
|
|
150
|
+
const [idx_ch, data_ch] = channels;
|
|
151
|
+
const writer = await client.openWriter({ start: TimeStamp.SECOND, channels });
|
|
152
|
+
await writer.write({
|
|
153
|
+
[idx_ch.key]: secondsLinspace(1, 4),
|
|
154
|
+
[data_ch.key]: new Float64Array([1, 2, 3, 4]),
|
|
155
|
+
});
|
|
156
|
+
await writer.close();
|
|
157
|
+
const iter = await client.openIterator(TimeRange.MAX, channels, {
|
|
158
|
+
downsampleFactor: 1,
|
|
159
|
+
});
|
|
160
|
+
try {
|
|
161
|
+
expect(await iter.seekFirst()).toBe(true);
|
|
162
|
+
expect(await iter.next(AUTO_SPAN)).toBe(true);
|
|
163
|
+
expect(iter.value.get(data_ch.key).data).toEqual(new Float64Array([1, 2, 3, 4]));
|
|
164
|
+
expect(await iter.next(AUTO_SPAN)).toBe(false);
|
|
165
|
+
} finally {
|
|
166
|
+
await iter.close();
|
|
167
|
+
}
|
|
168
|
+
});
|
|
102
169
|
});
|
package/src/framer/iterator.ts
CHANGED
|
@@ -51,6 +51,7 @@ const reqZ = z.object({
|
|
|
51
51
|
stamp: TimeStamp.z.optional(),
|
|
52
52
|
keys: channel.keyZ.array().optional(),
|
|
53
53
|
chunkSize: z.number().optional(),
|
|
54
|
+
downsampleFactor: z.int().optional(),
|
|
54
55
|
});
|
|
55
56
|
interface Request extends z.infer<typeof reqZ> {}
|
|
56
57
|
|
|
@@ -67,6 +68,11 @@ export interface IteratorConfig {
|
|
|
67
68
|
* resulting from a call to next with {@link AUTO_SPAN}.
|
|
68
69
|
*/
|
|
69
70
|
chunkSize?: number;
|
|
71
|
+
/**
|
|
72
|
+
* downsampleFactor is the factor to downsample the data by. If downsampleFactor is
|
|
73
|
+
* less than or equal to 1, no downsampling will be performed.
|
|
74
|
+
*/
|
|
75
|
+
downsampleFactor?: number;
|
|
70
76
|
}
|
|
71
77
|
|
|
72
78
|
/**
|
|
@@ -114,6 +120,7 @@ export class Iterator {
|
|
|
114
120
|
keys: Array.from(adapter.keys),
|
|
115
121
|
bounds: new TimeRange(tr),
|
|
116
122
|
chunkSize: opts.chunkSize ?? 1e5,
|
|
123
|
+
downsampleFactor: opts.downsampleFactor ?? 1,
|
|
117
124
|
});
|
|
118
125
|
return iter;
|
|
119
126
|
}
|
|
@@ -12,6 +12,7 @@ import { describe, expect, it } from "vitest";
|
|
|
12
12
|
|
|
13
13
|
import { type channel } from "@/channel";
|
|
14
14
|
import { createTestClient } from "@/testutil/client";
|
|
15
|
+
import { secondsLinspace } from "@/testutil/telem";
|
|
15
16
|
|
|
16
17
|
const client = createTestClient();
|
|
17
18
|
|
|
@@ -75,11 +76,11 @@ describe("Reader", () => {
|
|
|
75
76
|
const stream = await client.read({
|
|
76
77
|
channels: [index.key, data1.key, data2.key],
|
|
77
78
|
timeRange: { start: TimeStamp.seconds(0), end: TimeStamp.seconds(10) },
|
|
78
|
-
channelNames:
|
|
79
|
-
[index.key
|
|
80
|
-
[data1.key
|
|
81
|
-
[data2.key
|
|
82
|
-
|
|
79
|
+
channelNames: {
|
|
80
|
+
[index.key]: "Time",
|
|
81
|
+
[data1.key]: "Sensor1",
|
|
82
|
+
[data2.key]: "Sensor2",
|
|
83
|
+
},
|
|
83
84
|
responseType: "csv",
|
|
84
85
|
});
|
|
85
86
|
const records = await streamToRecords(stream);
|
|
@@ -145,12 +146,12 @@ describe("Reader", () => {
|
|
|
145
146
|
const stream = await client.read({
|
|
146
147
|
channels: [data1.key, data2.key], // Just data channels - indexes auto-included
|
|
147
148
|
timeRange: { start: TimeStamp.seconds(0), end: TimeStamp.seconds(10) },
|
|
148
|
-
channelNames:
|
|
149
|
-
[index1.key
|
|
150
|
-
[data1.key
|
|
151
|
-
[index2.key
|
|
152
|
-
[data2.key
|
|
153
|
-
|
|
149
|
+
channelNames: {
|
|
150
|
+
[index1.key]: "Time1",
|
|
151
|
+
[data1.key]: "Data1",
|
|
152
|
+
[index2.key]: "Time2",
|
|
153
|
+
[data2.key]: "Data2",
|
|
154
|
+
},
|
|
154
155
|
responseType: "csv",
|
|
155
156
|
});
|
|
156
157
|
const records = await streamToRecords(stream);
|
|
@@ -164,6 +165,47 @@ describe("Reader", () => {
|
|
|
164
165
|
["", "", "6000000000", "600"],
|
|
165
166
|
]);
|
|
166
167
|
});
|
|
168
|
+
it("should allow downsampling", async () => {
|
|
169
|
+
const index = await client.channels.create({
|
|
170
|
+
name: id.create(),
|
|
171
|
+
dataType: DataType.TIMESTAMP,
|
|
172
|
+
isIndex: true,
|
|
173
|
+
});
|
|
174
|
+
const data = await client.channels.create({
|
|
175
|
+
name: id.create(),
|
|
176
|
+
dataType: DataType.FLOAT64,
|
|
177
|
+
index: index.key,
|
|
178
|
+
});
|
|
179
|
+
const writer = await client.openWriter({
|
|
180
|
+
start: TimeStamp.seconds(1),
|
|
181
|
+
channels: [index.key, data.key],
|
|
182
|
+
});
|
|
183
|
+
await writer.write({
|
|
184
|
+
[index.key]: [
|
|
185
|
+
TimeStamp.seconds(1),
|
|
186
|
+
TimeStamp.seconds(2),
|
|
187
|
+
TimeStamp.seconds(3),
|
|
188
|
+
TimeStamp.seconds(4),
|
|
189
|
+
TimeStamp.seconds(5),
|
|
190
|
+
],
|
|
191
|
+
[data.key]: [10, 20, 30, 40, 50],
|
|
192
|
+
});
|
|
193
|
+
await writer.commit();
|
|
194
|
+
await writer.close();
|
|
195
|
+
const stream = await client.read({
|
|
196
|
+
channels: [data.key],
|
|
197
|
+
timeRange: { start: TimeStamp.seconds(0), end: TimeStamp.seconds(10) },
|
|
198
|
+
responseType: "csv",
|
|
199
|
+
iteratorConfig: { downsampleFactor: 2 },
|
|
200
|
+
});
|
|
201
|
+
const records = await streamToRecords(stream);
|
|
202
|
+
expect(records).toEqual([
|
|
203
|
+
[index.name, data.name],
|
|
204
|
+
["1000000000", "10"],
|
|
205
|
+
["3000000000", "30"],
|
|
206
|
+
["5000000000", "50"],
|
|
207
|
+
]);
|
|
208
|
+
});
|
|
167
209
|
it("should handle channels at different uneven rates with correct row ordering", async () => {
|
|
168
210
|
const indexFast = await client.channels.create({
|
|
169
211
|
name: id.create(),
|
|
@@ -380,7 +422,7 @@ describe("Reader", () => {
|
|
|
380
422
|
await writer.close();
|
|
381
423
|
const stream = await client.read({
|
|
382
424
|
channels: [data.key],
|
|
383
|
-
timeRange: { start: TimeStamp.seconds(0), end: TimeStamp.seconds(
|
|
425
|
+
timeRange: { start: TimeStamp.seconds(0), end: TimeStamp.seconds(100000) },
|
|
384
426
|
responseType: "csv",
|
|
385
427
|
});
|
|
386
428
|
const records = await streamToRecords(stream);
|
|
@@ -389,7 +431,133 @@ describe("Reader", () => {
|
|
|
389
431
|
["1", "42"],
|
|
390
432
|
]);
|
|
391
433
|
});
|
|
392
|
-
|
|
434
|
+
it("should handle channels across domains with gaps in them", async () => {
|
|
435
|
+
const index = await client.channels.create({
|
|
436
|
+
name: id.create(),
|
|
437
|
+
dataType: DataType.TIMESTAMP,
|
|
438
|
+
isIndex: true,
|
|
439
|
+
});
|
|
440
|
+
const data = await client.channels.create({
|
|
441
|
+
name: id.create(),
|
|
442
|
+
dataType: DataType.FLOAT64,
|
|
443
|
+
index: index.key,
|
|
444
|
+
});
|
|
445
|
+
let writer = await client.openWriter({
|
|
446
|
+
start: TimeStamp.nanoseconds(101),
|
|
447
|
+
channels: [index.key, data.key],
|
|
448
|
+
});
|
|
449
|
+
await writer.write({
|
|
450
|
+
[index.key]: [
|
|
451
|
+
TimeStamp.nanoseconds(101),
|
|
452
|
+
TimeStamp.nanoseconds(102),
|
|
453
|
+
TimeStamp.nanoseconds(103),
|
|
454
|
+
],
|
|
455
|
+
[data.key]: [10, 11, 12],
|
|
456
|
+
});
|
|
457
|
+
await writer.commit();
|
|
458
|
+
await writer.close();
|
|
459
|
+
writer = await client.openWriter({
|
|
460
|
+
start: TimeStamp.nanoseconds(1),
|
|
461
|
+
channels: [index.key, data.key],
|
|
462
|
+
});
|
|
463
|
+
await writer.write({
|
|
464
|
+
[index.key]: [
|
|
465
|
+
TimeStamp.nanoseconds(1),
|
|
466
|
+
TimeStamp.nanoseconds(2),
|
|
467
|
+
TimeStamp.nanoseconds(3),
|
|
468
|
+
],
|
|
469
|
+
[data.key]: [1, 2, 3],
|
|
470
|
+
});
|
|
471
|
+
await writer.commit();
|
|
472
|
+
await writer.close();
|
|
473
|
+
const stream = await client.read({
|
|
474
|
+
channels: [data.key],
|
|
475
|
+
timeRange: { start: TimeStamp.nanoseconds(3), end: TimeStamp.nanoseconds(103) },
|
|
476
|
+
responseType: "csv",
|
|
477
|
+
});
|
|
478
|
+
const rows = await streamToRecords(stream);
|
|
479
|
+
expect(rows).toEqual([
|
|
480
|
+
[index.name, data.name],
|
|
481
|
+
["3", "3"],
|
|
482
|
+
["101", "10"],
|
|
483
|
+
["102", "11"],
|
|
484
|
+
]);
|
|
485
|
+
});
|
|
486
|
+
it("should handle non-overlapping data across domains", async () => {
|
|
487
|
+
// first index will get written from times 10-15, second index from times 13-18
|
|
488
|
+
const index1 = await client.channels.create({
|
|
489
|
+
name: id.create(),
|
|
490
|
+
dataType: DataType.TIMESTAMP,
|
|
491
|
+
isIndex: true,
|
|
492
|
+
});
|
|
493
|
+
const index2 = await client.channels.create({
|
|
494
|
+
name: id.create(),
|
|
495
|
+
dataType: DataType.TIMESTAMP,
|
|
496
|
+
isIndex: true,
|
|
497
|
+
});
|
|
498
|
+
const data1 = await client.channels.create({
|
|
499
|
+
name: id.create(),
|
|
500
|
+
dataType: DataType.FLOAT64,
|
|
501
|
+
index: index1.key,
|
|
502
|
+
});
|
|
503
|
+
const data2 = await client.channels.create({
|
|
504
|
+
name: id.create(),
|
|
505
|
+
dataType: DataType.FLOAT64,
|
|
506
|
+
index: index2.key,
|
|
507
|
+
});
|
|
508
|
+
const writer1 = await client.openWriter({
|
|
509
|
+
start: TimeStamp.nanoseconds(10),
|
|
510
|
+
channels: [index1.key, data1.key],
|
|
511
|
+
});
|
|
512
|
+
await writer1.write({
|
|
513
|
+
[index1.key]: [
|
|
514
|
+
TimeStamp.nanoseconds(10),
|
|
515
|
+
TimeStamp.nanoseconds(11),
|
|
516
|
+
TimeStamp.nanoseconds(12),
|
|
517
|
+
TimeStamp.nanoseconds(13),
|
|
518
|
+
TimeStamp.nanoseconds(14),
|
|
519
|
+
TimeStamp.nanoseconds(15),
|
|
520
|
+
],
|
|
521
|
+
[data1.key]: [1, 2, 3, 4, 5, 6],
|
|
522
|
+
});
|
|
523
|
+
await writer1.commit();
|
|
524
|
+
await writer1.close();
|
|
525
|
+
const writer2 = await client.openWriter({
|
|
526
|
+
start: TimeStamp.nanoseconds(15),
|
|
527
|
+
channels: [index2.key, data2.key],
|
|
528
|
+
});
|
|
529
|
+
await writer2.write({
|
|
530
|
+
[index2.key]: [
|
|
531
|
+
TimeStamp.nanoseconds(13),
|
|
532
|
+
TimeStamp.nanoseconds(14),
|
|
533
|
+
TimeStamp.nanoseconds(15),
|
|
534
|
+
TimeStamp.nanoseconds(16),
|
|
535
|
+
TimeStamp.nanoseconds(17),
|
|
536
|
+
TimeStamp.nanoseconds(18),
|
|
537
|
+
],
|
|
538
|
+
[data2.key]: [11, 12, 13, 14, 15, 16],
|
|
539
|
+
});
|
|
540
|
+
await writer2.commit();
|
|
541
|
+
await writer2.close();
|
|
542
|
+
const stream = await client.read({
|
|
543
|
+
channels: [data1.key, data2.key],
|
|
544
|
+
timeRange: { start: TimeStamp.nanoseconds(0), end: TimeStamp.nanoseconds(19) },
|
|
545
|
+
responseType: "csv",
|
|
546
|
+
});
|
|
547
|
+
const rows = await streamToRecords(stream);
|
|
548
|
+
expect(rows).toEqual([
|
|
549
|
+
[index1.name, data1.name, index2.name, data2.name],
|
|
550
|
+
["10", "1", "", ""],
|
|
551
|
+
["11", "2", "", ""],
|
|
552
|
+
["12", "3", "", ""],
|
|
553
|
+
["13", "4", "13", "11"],
|
|
554
|
+
["14", "5", "14", "12"],
|
|
555
|
+
["15", "6", "15", "13"],
|
|
556
|
+
["", "", "16", "14"],
|
|
557
|
+
["", "", "17", "15"],
|
|
558
|
+
["", "", "18", "16"],
|
|
559
|
+
]);
|
|
560
|
+
});
|
|
393
561
|
it("should handle large dataset requiring multiple iterator calls", async () => {
|
|
394
562
|
// Create 4 groups with different indexes at different rates
|
|
395
563
|
const numGroups = 4;
|
|
@@ -558,7 +726,6 @@ describe("Reader", () => {
|
|
|
558
726
|
const sparseStep = 1_000;
|
|
559
727
|
const sparseSamples = denseSamples / sparseStep;
|
|
560
728
|
|
|
561
|
-
// Fast (dense) index + data
|
|
562
729
|
const indexFast = await client.channels.create({
|
|
563
730
|
name: `dense_index_${id.create()}`,
|
|
564
731
|
dataType: DataType.TIMESTAMP,
|
|
@@ -570,7 +737,6 @@ describe("Reader", () => {
|
|
|
570
737
|
index: indexFast.key,
|
|
571
738
|
});
|
|
572
739
|
|
|
573
|
-
// Slow (sparse) index + data
|
|
574
740
|
const indexSlow = await client.channels.create({
|
|
575
741
|
name: `sparse_index_${id.create()}`,
|
|
576
742
|
dataType: DataType.TIMESTAMP,
|
|
@@ -581,79 +747,59 @@ describe("Reader", () => {
|
|
|
581
747
|
dataType: DataType.FLOAT64,
|
|
582
748
|
index: indexSlow.key,
|
|
583
749
|
});
|
|
584
|
-
const
|
|
750
|
+
const start = TimeStamp.seconds(0);
|
|
585
751
|
const denseWriter = await client.openWriter({
|
|
586
|
-
start
|
|
752
|
+
start,
|
|
587
753
|
channels: [indexFast.key, dataFast.key],
|
|
588
754
|
});
|
|
589
755
|
|
|
590
|
-
const
|
|
756
|
+
const maxBatchSize = 10_000;
|
|
591
757
|
for (
|
|
592
758
|
let batchStart = 1;
|
|
593
759
|
batchStart <= denseSamples;
|
|
594
|
-
batchStart +=
|
|
760
|
+
batchStart += maxBatchSize
|
|
595
761
|
) {
|
|
596
|
-
const batchEnd = Math.min(batchStart +
|
|
597
|
-
const
|
|
598
|
-
const
|
|
599
|
-
|
|
600
|
-
|
|
601
|
-
// baseTime + i ns => underlying raw timestamps ~ [1..1_000_000]
|
|
602
|
-
tsBatch.push(baseTime.add(TimeSpan.nanoseconds(i)));
|
|
603
|
-
valBatch.push(i); // arbitrary data value
|
|
604
|
-
}
|
|
605
|
-
|
|
606
|
-
await denseWriter.write({
|
|
607
|
-
[indexFast.key]: tsBatch,
|
|
608
|
-
[dataFast.key]: valBatch,
|
|
609
|
-
});
|
|
762
|
+
const batchEnd = Math.min(batchStart + maxBatchSize - 1, denseSamples);
|
|
763
|
+
const batchSize = batchEnd - batchStart + 1;
|
|
764
|
+
const times = secondsLinspace(batchStart, batchSize);
|
|
765
|
+
const data = Array.from({ length: batchSize }, (_, i) => i + batchStart);
|
|
766
|
+
await denseWriter.write({ [indexFast.key]: times, [dataFast.key]: data });
|
|
610
767
|
}
|
|
611
768
|
await denseWriter.commit();
|
|
612
769
|
await denseWriter.close();
|
|
613
770
|
|
|
614
|
-
// ---- Write sparse channel: timestamps 1..1_000_000 every 1000 ----
|
|
615
771
|
const sparseWriter = await client.openWriter({
|
|
616
|
-
start
|
|
772
|
+
start,
|
|
617
773
|
channels: [indexSlow.key, dataSlow.key],
|
|
618
774
|
});
|
|
619
775
|
|
|
620
|
-
const sparseBatchSize = 1000; // at most 1000 sparse points total anyway
|
|
621
776
|
for (
|
|
622
|
-
let batchStart =
|
|
777
|
+
let batchStart = 1;
|
|
623
778
|
batchStart < sparseSamples;
|
|
624
|
-
batchStart +=
|
|
779
|
+
batchStart += maxBatchSize
|
|
625
780
|
) {
|
|
626
|
-
const batchEnd = Math.min(
|
|
627
|
-
|
|
628
|
-
|
|
629
|
-
|
|
630
|
-
|
|
631
|
-
|
|
632
|
-
|
|
633
|
-
|
|
781
|
+
const batchEnd = Math.min(
|
|
782
|
+
batchStart + (maxBatchSize - 1) * sparseStep,
|
|
783
|
+
batchStart + sparseSamples * sparseStep,
|
|
784
|
+
);
|
|
785
|
+
const times: TimeStamp[] = [];
|
|
786
|
+
const data: number[] = [];
|
|
787
|
+
|
|
788
|
+
for (let j = batchStart; j < batchEnd; j += sparseStep) {
|
|
789
|
+
times.push(start.add(TimeSpan.seconds(j)));
|
|
790
|
+
data.push(j); // arbitrary data value
|
|
634
791
|
}
|
|
635
|
-
|
|
636
|
-
await sparseWriter.write({
|
|
637
|
-
[indexSlow.key]: tsBatch,
|
|
638
|
-
[dataSlow.key]: valBatch,
|
|
639
|
-
});
|
|
792
|
+
await sparseWriter.write({ [indexSlow.key]: times, [dataSlow.key]: data });
|
|
640
793
|
}
|
|
641
794
|
await sparseWriter.commit();
|
|
642
795
|
await sparseWriter.close();
|
|
643
796
|
|
|
644
|
-
// ---- Export CSV with explicit headers so we know column order ----
|
|
645
797
|
const stream = await client.read({
|
|
646
798
|
channels: [dataFast.key, dataSlow.key],
|
|
647
799
|
timeRange: {
|
|
648
|
-
start:
|
|
649
|
-
end:
|
|
800
|
+
start: TimeStamp.seconds(0),
|
|
801
|
+
end: start.add(TimeSpan.seconds(denseSamples + 1)),
|
|
650
802
|
},
|
|
651
|
-
channelNames: new Map([
|
|
652
|
-
[indexFast.key, "FastTime"],
|
|
653
|
-
[dataFast.key, "FastValue"],
|
|
654
|
-
[indexSlow.key, "SlowTime"],
|
|
655
|
-
[dataSlow.key, "SlowValue"],
|
|
656
|
-
]),
|
|
657
803
|
responseType: "csv",
|
|
658
804
|
});
|
|
659
805
|
|
|
@@ -671,9 +817,7 @@ describe("Reader", () => {
|
|
|
671
817
|
const { done, value } = await reader.read();
|
|
672
818
|
if (done) break;
|
|
673
819
|
chunkCount++;
|
|
674
|
-
|
|
675
820
|
buffer += decoder.decode(value);
|
|
676
|
-
|
|
677
821
|
while (true) {
|
|
678
822
|
const idx = buffer.indexOf(delimiter);
|
|
679
823
|
if (idx === -1) break;
|
|
@@ -683,26 +827,20 @@ describe("Reader", () => {
|
|
|
683
827
|
if (isHeader) {
|
|
684
828
|
const headerCols = line.split(",");
|
|
685
829
|
expect(headerCols).toEqual([
|
|
686
|
-
|
|
687
|
-
|
|
688
|
-
|
|
689
|
-
|
|
830
|
+
indexFast.name,
|
|
831
|
+
dataFast.name,
|
|
832
|
+
indexSlow.name,
|
|
833
|
+
dataSlow.name,
|
|
690
834
|
]);
|
|
691
835
|
isHeader = false;
|
|
692
836
|
continue;
|
|
693
837
|
}
|
|
694
838
|
|
|
695
839
|
totalRows++;
|
|
696
|
-
|
|
697
840
|
const cols = line.split(",");
|
|
698
841
|
expect(cols).toHaveLength(4);
|
|
842
|
+
const [fastTsStr, fastValStr, slowTsStr, slowValStr] = cols;
|
|
699
843
|
|
|
700
|
-
const fastTsStr = cols[0];
|
|
701
|
-
const slowTsStr = cols[2];
|
|
702
|
-
const fastValStr = cols[1];
|
|
703
|
-
const slowValStr = cols[3];
|
|
704
|
-
|
|
705
|
-
// Dense channel should always have a timestamp and value
|
|
706
844
|
expect(fastTsStr).not.toBe("");
|
|
707
845
|
expect(fastValStr).not.toBe("");
|
|
708
846
|
|
|
@@ -710,25 +848,16 @@ describe("Reader", () => {
|
|
|
710
848
|
if (lastTimestamp !== null) expect(ts).toBeGreaterThan(lastTimestamp);
|
|
711
849
|
lastTimestamp = ts;
|
|
712
850
|
|
|
713
|
-
// Sparse channel only has data every 1000 "ticks"
|
|
714
851
|
if (slowValStr !== "") {
|
|
715
852
|
sparseRows++;
|
|
716
853
|
// When sparse has data, its timestamp should match dense's timestamp
|
|
717
854
|
expect(slowTsStr).toBe(fastTsStr);
|
|
855
|
+
expect(slowValStr).not.toBe("");
|
|
718
856
|
}
|
|
719
857
|
}
|
|
720
858
|
}
|
|
721
|
-
|
|
722
|
-
// Handle any final line without trailing CRLF
|
|
723
|
-
if (buffer.trim().length > 0) if (!isHeader) totalRows++;
|
|
724
|
-
|
|
725
|
-
// We should have streamed multiple chunks (proves AUTO_SPAN / multi-frame)
|
|
726
859
|
expect(chunkCount).toBeGreaterThan(1);
|
|
727
|
-
|
|
728
|
-
// One row per dense timestamp
|
|
729
860
|
expect(totalRows).toBe(denseSamples);
|
|
730
|
-
|
|
731
|
-
// One row per sparse timestamp (merged into dense rows)
|
|
732
861
|
expect(sparseRows).toBe(sparseSamples);
|
|
733
862
|
},
|
|
734
863
|
);
|