@synnaxlabs/client 0.48.0 → 0.49.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.turbo/turbo-build.log +6 -6
- package/dist/client.cjs +33 -31
- package/dist/client.js +6522 -6167
- package/dist/src/access/client.d.ts +3 -1
- package/dist/src/access/client.d.ts.map +1 -1
- package/dist/src/access/enforce.d.ts +35 -0
- package/dist/src/access/enforce.d.ts.map +1 -0
- package/dist/src/access/enforce.spec.d.ts +2 -0
- package/dist/src/access/enforce.spec.d.ts.map +1 -0
- package/dist/src/access/external.d.ts +3 -0
- package/dist/src/access/external.d.ts.map +1 -1
- package/dist/src/access/payload.d.ts +0 -6
- package/dist/src/access/payload.d.ts.map +1 -1
- package/dist/src/access/policy/access.spec.d.ts +2 -0
- package/dist/src/access/policy/access.spec.d.ts.map +1 -0
- package/dist/src/access/policy/client.d.ts +485 -31
- package/dist/src/access/policy/client.d.ts.map +1 -1
- package/dist/src/access/policy/payload.d.ts +36 -113
- package/dist/src/access/policy/payload.d.ts.map +1 -1
- package/dist/src/access/role/client.d.ts +135 -0
- package/dist/src/access/role/client.d.ts.map +1 -0
- package/dist/src/access/role/external.d.ts.map +1 -0
- package/dist/src/access/role/index.d.ts +2 -0
- package/dist/src/access/role/index.d.ts.map +1 -0
- package/dist/src/access/role/payload.d.ts +27 -0
- package/dist/src/access/role/payload.d.ts.map +1 -0
- package/dist/src/access/role/role.spec.d.ts +2 -0
- package/dist/src/access/role/role.spec.d.ts.map +1 -0
- package/dist/src/arc/access.spec.d.ts +2 -0
- package/dist/src/arc/access.spec.d.ts.map +1 -0
- package/dist/src/arc/client.d.ts +5 -14
- package/dist/src/arc/client.d.ts.map +1 -1
- package/dist/src/arc/payload.d.ts +11 -2
- package/dist/src/arc/payload.d.ts.map +1 -1
- package/dist/src/auth/auth.d.ts +5 -3
- package/dist/src/auth/auth.d.ts.map +1 -1
- package/dist/src/channel/access.spec.d.ts +2 -0
- package/dist/src/channel/access.spec.d.ts.map +1 -0
- package/dist/src/channel/client.d.ts +0 -1
- package/dist/src/channel/client.d.ts.map +1 -1
- package/dist/src/channel/payload.d.ts +18 -8
- package/dist/src/channel/payload.d.ts.map +1 -1
- package/dist/src/channel/payload.spec.d.ts +2 -0
- package/dist/src/channel/payload.spec.d.ts.map +1 -0
- package/dist/src/channel/retriever.d.ts +4 -6
- package/dist/src/channel/retriever.d.ts.map +1 -1
- package/dist/src/channel/writer.d.ts.map +1 -1
- package/dist/src/client.d.ts +9 -5
- package/dist/src/client.d.ts.map +1 -1
- package/dist/src/device/access.spec.d.ts +2 -0
- package/dist/src/device/access.spec.d.ts.map +1 -0
- package/dist/src/{hardware/device → device}/client.d.ts +14 -7
- package/dist/src/device/client.d.ts.map +1 -0
- package/dist/src/device/device.spec.d.ts.map +1 -0
- package/dist/src/device/external.d.ts.map +1 -0
- package/dist/src/device/index.d.ts.map +1 -0
- package/dist/src/{hardware/device → device}/payload.d.ts +1 -1
- package/dist/src/device/payload.d.ts.map +1 -0
- package/dist/src/errors.d.ts +3 -0
- package/dist/src/errors.d.ts.map +1 -1
- package/dist/src/framer/client.d.ts +11 -1
- package/dist/src/framer/client.d.ts.map +1 -1
- package/dist/src/framer/frame.d.ts +10 -5
- package/dist/src/framer/frame.d.ts.map +1 -1
- package/dist/src/framer/iterator.d.ts +3 -3
- package/dist/src/framer/reader.d.ts +16 -0
- package/dist/src/framer/reader.d.ts.map +1 -0
- package/dist/src/framer/reader.spec.d.ts +2 -0
- package/dist/src/framer/reader.spec.d.ts.map +1 -0
- package/dist/src/framer/streamer.d.ts +24 -21
- package/dist/src/framer/streamer.d.ts.map +1 -1
- package/dist/src/framer/writer.d.ts +13 -13
- package/dist/src/index.d.ts +4 -5
- package/dist/src/index.d.ts.map +1 -1
- package/dist/src/label/access.spec.d.ts +2 -0
- package/dist/src/label/access.spec.d.ts.map +1 -0
- package/dist/src/label/client.d.ts +20 -11
- package/dist/src/label/client.d.ts.map +1 -1
- package/dist/src/ontology/client.d.ts +6 -6
- package/dist/src/ontology/client.d.ts.map +1 -1
- package/dist/src/ontology/group/access.spec.d.ts +2 -0
- package/dist/src/ontology/group/access.spec.d.ts.map +1 -0
- package/dist/src/ontology/group/client.d.ts +2 -2
- package/dist/src/ontology/group/client.d.ts.map +1 -1
- package/dist/src/ontology/group/payload.d.ts +1 -2
- package/dist/src/ontology/group/payload.d.ts.map +1 -1
- package/dist/src/ontology/payload.d.ts +23 -17
- package/dist/src/ontology/payload.d.ts.map +1 -1
- package/dist/src/ontology/writer.d.ts +10 -10
- package/dist/src/ontology/writer.d.ts.map +1 -1
- package/dist/src/rack/access.spec.d.ts +2 -0
- package/dist/src/rack/access.spec.d.ts.map +1 -0
- package/dist/src/{hardware/rack → rack}/client.d.ts +15 -8
- package/dist/src/rack/client.d.ts.map +1 -0
- package/dist/src/rack/external.d.ts.map +1 -0
- package/dist/src/rack/index.d.ts.map +1 -0
- package/dist/src/{hardware/rack → rack}/payload.d.ts +1 -1
- package/dist/src/rack/payload.d.ts.map +1 -0
- package/dist/src/rack/rack.spec.d.ts.map +1 -0
- package/dist/src/ranger/access.spec.d.ts +2 -0
- package/dist/src/ranger/access.spec.d.ts.map +1 -0
- package/dist/src/ranger/alias.d.ts +1 -8
- package/dist/src/ranger/alias.d.ts.map +1 -1
- package/dist/src/ranger/client.d.ts +12 -5
- package/dist/src/ranger/client.d.ts.map +1 -1
- package/dist/src/ranger/kv.d.ts +0 -3
- package/dist/src/ranger/kv.d.ts.map +1 -1
- package/dist/src/ranger/writer.d.ts +2 -2
- package/dist/src/ranger/writer.d.ts.map +1 -1
- package/dist/src/status/access.spec.d.ts +2 -0
- package/dist/src/status/access.spec.d.ts.map +1 -0
- package/dist/src/status/client.d.ts +4 -4
- package/dist/src/status/client.d.ts.map +1 -1
- package/dist/src/status/payload.d.ts +9 -2
- package/dist/src/status/payload.d.ts.map +1 -1
- package/dist/src/task/access.spec.d.ts +2 -0
- package/dist/src/task/access.spec.d.ts.map +1 -0
- package/dist/src/{hardware/task → task}/client.d.ts +26 -15
- package/dist/src/task/client.d.ts.map +1 -0
- package/dist/src/task/external.d.ts +3 -0
- package/dist/src/task/external.d.ts.map +1 -0
- package/dist/src/task/index.d.ts.map +1 -0
- package/dist/src/{hardware/task → task}/payload.d.ts +45 -6
- package/dist/src/task/payload.d.ts.map +1 -0
- package/dist/src/task/task.spec.d.ts.map +1 -0
- package/dist/src/testutil/access.d.ts +4 -0
- package/dist/src/testutil/access.d.ts.map +1 -0
- package/dist/src/transport.d.ts.map +1 -1
- package/dist/src/user/access.spec.d.ts +2 -0
- package/dist/src/user/access.spec.d.ts.map +1 -0
- package/dist/src/user/client.d.ts +10 -1
- package/dist/src/user/client.d.ts.map +1 -1
- package/dist/src/user/external.d.ts +1 -1
- package/dist/src/user/external.d.ts.map +1 -1
- package/dist/src/user/payload.d.ts.map +1 -1
- package/dist/src/workspace/access.spec.d.ts +2 -0
- package/dist/src/workspace/access.spec.d.ts.map +1 -0
- package/dist/src/workspace/client.d.ts +10 -5
- package/dist/src/workspace/client.d.ts.map +1 -1
- package/dist/src/workspace/lineplot/access.spec.d.ts +2 -0
- package/dist/src/workspace/lineplot/access.spec.d.ts.map +1 -0
- package/dist/src/workspace/lineplot/client.d.ts +8 -1
- package/dist/src/workspace/lineplot/client.d.ts.map +1 -1
- package/dist/src/workspace/log/access.spec.d.ts +2 -0
- package/dist/src/workspace/log/access.spec.d.ts.map +1 -0
- package/dist/src/workspace/log/client.d.ts +8 -1
- package/dist/src/workspace/log/client.d.ts.map +1 -1
- package/dist/src/workspace/schematic/access.spec.d.ts +2 -0
- package/dist/src/workspace/schematic/access.spec.d.ts.map +1 -0
- package/dist/src/workspace/schematic/client.d.ts +8 -1
- package/dist/src/workspace/schematic/client.d.ts.map +1 -1
- package/dist/src/workspace/schematic/symbol/access.spec.d.ts +2 -0
- package/dist/src/workspace/schematic/symbol/access.spec.d.ts.map +1 -0
- package/dist/src/workspace/schematic/symbol/client.d.ts +1 -5
- package/dist/src/workspace/schematic/symbol/client.d.ts.map +1 -1
- package/dist/src/workspace/schematic/symbol/payload.d.ts +2 -2
- package/dist/src/workspace/table/access.spec.d.ts +2 -0
- package/dist/src/workspace/table/access.spec.d.ts.map +1 -0
- package/dist/src/workspace/table/client.d.ts +8 -1
- package/dist/src/workspace/table/client.d.ts.map +1 -1
- package/package.json +3 -3
- package/src/access/client.ts +5 -2
- package/src/access/enforce.spec.ts +189 -0
- package/src/access/enforce.ts +84 -0
- package/src/access/external.ts +3 -0
- package/src/access/payload.ts +1 -13
- package/src/access/policy/access.spec.ts +147 -0
- package/src/access/policy/client.ts +21 -25
- package/src/access/policy/payload.ts +9 -5
- package/src/access/role/client.ts +135 -0
- package/src/access/role/external.ts +11 -0
- package/src/{hardware → access/role}/index.ts +1 -1
- package/src/access/role/payload.ts +32 -0
- package/src/access/role/role.spec.ts +95 -0
- package/src/arc/access.spec.ts +143 -0
- package/src/arc/client.ts +7 -31
- package/src/arc/payload.ts +4 -0
- package/src/auth/auth.ts +33 -11
- package/src/channel/access.spec.ts +116 -0
- package/src/channel/channel.spec.ts +63 -73
- package/src/channel/client.ts +2 -8
- package/src/channel/payload.spec.ts +171 -0
- package/src/channel/payload.ts +35 -7
- package/src/channel/retriever.ts +10 -11
- package/src/channel/writer.ts +3 -7
- package/src/client.ts +14 -18
- package/src/device/access.spec.ts +159 -0
- package/src/{hardware/device → device}/client.ts +12 -21
- package/src/{hardware/device → device}/device.spec.ts +70 -34
- package/src/device/external.ts +11 -0
- package/src/{hardware/rack → device}/index.ts +1 -1
- package/src/{hardware/device → device}/payload.ts +3 -3
- package/src/errors.ts +2 -0
- package/src/framer/adapter.spec.ts +14 -14
- package/src/framer/client.spec.ts +14 -20
- package/src/framer/client.ts +15 -20
- package/src/framer/deleter.spec.ts +1 -1
- package/src/framer/frame.spec.ts +131 -0
- package/src/framer/frame.ts +10 -2
- package/src/framer/iterator.ts +3 -3
- package/src/framer/reader.spec.ts +736 -0
- package/src/framer/reader.ts +265 -0
- package/src/framer/streamer.spec.ts +100 -12
- package/src/framer/streamer.ts +29 -9
- package/src/framer/writer.spec.ts +5 -5
- package/src/index.ts +4 -5
- package/src/label/access.spec.ts +109 -0
- package/src/label/client.ts +10 -14
- package/src/ontology/client.ts +4 -6
- package/src/ontology/group/access.spec.ts +77 -0
- package/src/ontology/group/client.ts +3 -7
- package/src/ontology/group/group.spec.ts +18 -0
- package/src/ontology/group/payload.ts +2 -2
- package/src/ontology/ontology.spec.ts +2 -0
- package/src/ontology/payload.ts +18 -2
- package/src/ontology/writer.ts +3 -7
- package/src/rack/access.spec.ts +102 -0
- package/src/{hardware/rack → rack}/client.ts +14 -19
- package/src/{hardware/device/index.ts → rack/external.ts} +2 -1
- package/src/{hardware/external.ts → rack/index.ts} +1 -1
- package/src/{hardware/rack → rack}/payload.ts +2 -2
- package/src/{hardware/rack → rack}/rack.spec.ts +43 -17
- package/src/ranger/access.spec.ts +115 -0
- package/src/ranger/alias.ts +6 -14
- package/src/ranger/client.ts +13 -14
- package/src/ranger/kv.ts +7 -9
- package/src/ranger/ranger.spec.ts +4 -4
- package/src/ranger/writer.ts +3 -7
- package/src/status/access.spec.ts +129 -0
- package/src/status/client.ts +5 -9
- package/src/status/payload.ts +3 -2
- package/src/task/access.spec.ts +131 -0
- package/src/{hardware/task → task}/client.ts +50 -25
- package/src/task/external.ts +11 -0
- package/src/{hardware/task → task}/index.ts +1 -1
- package/src/{hardware/task → task}/payload.ts +22 -3
- package/src/{hardware/task → task}/task.spec.ts +197 -34
- package/src/testutil/access.ts +34 -0
- package/src/testutil/channels.ts +3 -3
- package/src/transport.ts +1 -3
- package/src/user/access.spec.ts +107 -0
- package/src/user/client.ts +10 -12
- package/src/user/external.ts +12 -1
- package/src/user/payload.ts +3 -5
- package/src/workspace/access.spec.ts +108 -0
- package/src/workspace/client.ts +11 -27
- package/src/workspace/lineplot/access.spec.ts +134 -0
- package/src/workspace/lineplot/client.ts +8 -13
- package/src/workspace/log/access.spec.ts +134 -0
- package/src/workspace/log/client.ts +8 -13
- package/src/workspace/schematic/access.spec.ts +134 -0
- package/src/workspace/schematic/client.ts +9 -18
- package/src/workspace/schematic/symbol/access.spec.ts +172 -0
- package/src/workspace/schematic/symbol/client.ts +6 -17
- package/src/workspace/schematic/symbol/payload.ts +1 -1
- package/src/workspace/table/access.spec.ts +134 -0
- package/src/workspace/table/client.ts +8 -13
- package/dist/src/access/policy/policy.spec.d.ts +0 -2
- package/dist/src/access/policy/policy.spec.d.ts.map +0 -1
- package/dist/src/hardware/client.d.ts +0 -10
- package/dist/src/hardware/client.d.ts.map +0 -1
- package/dist/src/hardware/device/client.d.ts.map +0 -1
- package/dist/src/hardware/device/device.spec.d.ts.map +0 -1
- package/dist/src/hardware/device/external.d.ts.map +0 -1
- package/dist/src/hardware/device/index.d.ts.map +0 -1
- package/dist/src/hardware/device/payload.d.ts.map +0 -1
- package/dist/src/hardware/external.d.ts +0 -2
- package/dist/src/hardware/external.d.ts.map +0 -1
- package/dist/src/hardware/index.d.ts +0 -2
- package/dist/src/hardware/index.d.ts.map +0 -1
- package/dist/src/hardware/rack/client.d.ts.map +0 -1
- package/dist/src/hardware/rack/external.d.ts.map +0 -1
- package/dist/src/hardware/rack/index.d.ts.map +0 -1
- package/dist/src/hardware/rack/payload.d.ts.map +0 -1
- package/dist/src/hardware/rack/rack.spec.d.ts.map +0 -1
- package/dist/src/hardware/task/client.d.ts.map +0 -1
- package/dist/src/hardware/task/external.d.ts.map +0 -1
- package/dist/src/hardware/task/index.d.ts.map +0 -1
- package/dist/src/hardware/task/payload.d.ts.map +0 -1
- package/dist/src/hardware/task/task.spec.d.ts.map +0 -1
- package/dist/src/user/retriever.d.ts +0 -16
- package/dist/src/user/retriever.d.ts.map +0 -1
- package/dist/src/user/writer.d.ts +0 -11
- package/dist/src/user/writer.d.ts.map +0 -1
- package/src/access/policy/policy.spec.ts +0 -329
- package/src/hardware/client.ts +0 -24
- package/src/hardware/device/external.ts +0 -11
- package/src/hardware/rack/external.ts +0 -11
- package/src/hardware/task/external.ts +0 -11
- package/src/user/retriever.ts +0 -41
- package/src/user/writer.ts +0 -84
- /package/dist/src/{hardware/device → access/role}/external.d.ts +0 -0
- /package/dist/src/{hardware/device → device}/device.spec.d.ts +0 -0
- /package/dist/src/{hardware/rack → device}/external.d.ts +0 -0
- /package/dist/src/{hardware/device → device}/index.d.ts +0 -0
- /package/dist/src/{hardware/task → rack}/external.d.ts +0 -0
- /package/dist/src/{hardware/rack → rack}/index.d.ts +0 -0
- /package/dist/src/{hardware/rack → rack}/rack.spec.d.ts +0 -0
- /package/dist/src/{hardware/task → task}/index.d.ts +0 -0
- /package/dist/src/{hardware/task → task}/task.spec.d.ts +0 -0
|
@@ -0,0 +1,265 @@
|
|
|
1
|
+
// Copyright 2025 Synnax Labs, Inc.
|
|
2
|
+
//
|
|
3
|
+
// Use of this software is governed by the Business Source License included in the file
|
|
4
|
+
// licenses/BSL.txt.
|
|
5
|
+
//
|
|
6
|
+
// As of the Change Date specified in that file, in accordance with the Business Source
|
|
7
|
+
// License, use of this software will be governed by the Apache License, Version 2.0,
|
|
8
|
+
// included in the file licenses/APL.txt.
|
|
9
|
+
|
|
10
|
+
import { type WebSocketClient } from "@synnaxlabs/freighter";
|
|
11
|
+
import { type CrudeTimeRange, csv, runtime } from "@synnaxlabs/x";
|
|
12
|
+
|
|
13
|
+
import { type channel } from "@/channel";
|
|
14
|
+
import { UnexpectedError } from "@/errors";
|
|
15
|
+
import { type Frame } from "@/framer/frame";
|
|
16
|
+
import { Iterator } from "@/framer/iterator";
|
|
17
|
+
|
|
18
|
+
export interface ReadRequest {
|
|
19
|
+
channels: channel.Params;
|
|
20
|
+
timeRange: CrudeTimeRange;
|
|
21
|
+
channelNames?: Map<channel.KeyOrName, string>;
|
|
22
|
+
responseType: "csv";
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
export class Reader {
|
|
26
|
+
private readonly retriever: channel.Retriever;
|
|
27
|
+
private readonly streamClient: WebSocketClient;
|
|
28
|
+
|
|
29
|
+
constructor(retriever: channel.Retriever, streamClient: WebSocketClient) {
|
|
30
|
+
this.retriever = retriever;
|
|
31
|
+
this.streamClient = streamClient;
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
async read(request: ReadRequest): Promise<ReadableStream<Uint8Array>> {
|
|
35
|
+
const { channels: channelParams, timeRange, channelNames } = request;
|
|
36
|
+
const channelPayloads = await this.retriever.retrieve(channelParams);
|
|
37
|
+
const allKeys = new Set<channel.Key>();
|
|
38
|
+
channelPayloads.forEach((ch) => {
|
|
39
|
+
allKeys.add(ch.key);
|
|
40
|
+
if (ch.index !== 0) allKeys.add(ch.index);
|
|
41
|
+
});
|
|
42
|
+
const missingIndexKeys = Array.from(allKeys).filter(
|
|
43
|
+
(k) => !channelPayloads.some((ch) => ch.key === k),
|
|
44
|
+
);
|
|
45
|
+
if (missingIndexKeys.length > 0) {
|
|
46
|
+
const indexChannels = await this.retriever.retrieve(missingIndexKeys);
|
|
47
|
+
channelPayloads.push(...indexChannels);
|
|
48
|
+
}
|
|
49
|
+
const iterator = await Iterator._open(
|
|
50
|
+
timeRange,
|
|
51
|
+
Array.from(allKeys),
|
|
52
|
+
this.retriever,
|
|
53
|
+
this.streamClient,
|
|
54
|
+
);
|
|
55
|
+
return createCSVReadableStream({
|
|
56
|
+
iterator,
|
|
57
|
+
channelPayloads,
|
|
58
|
+
headers: channelNames,
|
|
59
|
+
});
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
interface CreateCSVExportStreamParams {
|
|
64
|
+
iterator: Iterator;
|
|
65
|
+
channelPayloads: channel.Payload[];
|
|
66
|
+
headers?: Map<channel.KeyOrName, string>;
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
const createCSVReadableStream = ({
|
|
70
|
+
iterator,
|
|
71
|
+
channelPayloads,
|
|
72
|
+
headers,
|
|
73
|
+
}: CreateCSVExportStreamParams): ReadableStream<Uint8Array> => {
|
|
74
|
+
const delimiter = runtime.getOS() === "Windows" ? "\r\n" : "\n";
|
|
75
|
+
const encoder = new TextEncoder();
|
|
76
|
+
let headerWritten = false;
|
|
77
|
+
let seekDone = false;
|
|
78
|
+
const groups = groupChannelsByIndex(channelPayloads);
|
|
79
|
+
const { columns, columnsByIndexKey, emptyGroupStrings } = buildColumnMeta(
|
|
80
|
+
channelPayloads,
|
|
81
|
+
groups,
|
|
82
|
+
headers,
|
|
83
|
+
);
|
|
84
|
+
// Use a cursor-based approach instead of having to call .shift() for O(1) access
|
|
85
|
+
let pendingRecords: RecordEntry[] = [];
|
|
86
|
+
let pendingCursor = 0;
|
|
87
|
+
let stagedRecords: RecordEntry[] = [];
|
|
88
|
+
|
|
89
|
+
const extractRecordsFromFrame = (frame: Frame): void => {
|
|
90
|
+
for (const [indexKey] of groups) {
|
|
91
|
+
const indexSeries = frame.get(indexKey);
|
|
92
|
+
if (indexSeries.length === 0) continue;
|
|
93
|
+
const groupColumns = columnsByIndexKey.get(indexKey) ?? [];
|
|
94
|
+
// Pre-fetch all series for this group to avoid repeated lookups
|
|
95
|
+
const seriesData = groupColumns.map((col) => frame.get(col.key));
|
|
96
|
+
for (let i = 0; i < indexSeries.length; i++) {
|
|
97
|
+
const time = indexSeries.at(i, true) as bigint;
|
|
98
|
+
const values = seriesData.map((series) => csv.formatValue(series.at(i, true)));
|
|
99
|
+
stagedRecords.push({ time, values, indexKey });
|
|
100
|
+
}
|
|
101
|
+
}
|
|
102
|
+
};
|
|
103
|
+
|
|
104
|
+
const buildCSVRows = (maxRows: number, flush: boolean = false): string[] => {
|
|
105
|
+
if (stagedRecords.length > 0) {
|
|
106
|
+
stagedRecords.sort((a, b) => Number(a.time - b.time));
|
|
107
|
+
if (pendingCursor > 0) {
|
|
108
|
+
pendingRecords = pendingRecords.slice(pendingCursor);
|
|
109
|
+
pendingCursor = 0;
|
|
110
|
+
}
|
|
111
|
+
pendingRecords = mergeSortedRecords(pendingRecords, stagedRecords);
|
|
112
|
+
stagedRecords = [];
|
|
113
|
+
}
|
|
114
|
+
const rows: string[] = [];
|
|
115
|
+
const pendingLen = pendingRecords.length;
|
|
116
|
+
while (pendingCursor < pendingLen && rows.length < maxRows) {
|
|
117
|
+
const minTime = pendingRecords[pendingCursor].time;
|
|
118
|
+
// Don't output the last timestamp unless flushing - more data might arrive
|
|
119
|
+
// Optimization: only check if last record has same time (since array is sorted)
|
|
120
|
+
if (!flush && pendingRecords[pendingLen - 1].time === minTime) break;
|
|
121
|
+
// Collect all records at this timestamp using cursor (O(1) per record)
|
|
122
|
+
// Use Map keyed by indexKey for O(1) lookup instead of find()
|
|
123
|
+
const recordsByGroup = new Map<channel.Key, RecordEntry>();
|
|
124
|
+
while (
|
|
125
|
+
pendingCursor < pendingLen &&
|
|
126
|
+
pendingRecords[pendingCursor].time === minTime
|
|
127
|
+
) {
|
|
128
|
+
const record = pendingRecords[pendingCursor++];
|
|
129
|
+
recordsByGroup.set(record.indexKey, record);
|
|
130
|
+
}
|
|
131
|
+
const rowParts: string[] = [];
|
|
132
|
+
for (const [indexKey] of groups) {
|
|
133
|
+
const record = recordsByGroup.get(indexKey);
|
|
134
|
+
rowParts.push(
|
|
135
|
+
record?.values.join(",") ?? emptyGroupStrings.get(indexKey) ?? "",
|
|
136
|
+
);
|
|
137
|
+
}
|
|
138
|
+
rows.push(rowParts.join(","));
|
|
139
|
+
}
|
|
140
|
+
return rows;
|
|
141
|
+
};
|
|
142
|
+
|
|
143
|
+
return new ReadableStream<Uint8Array>({
|
|
144
|
+
async pull(controller): Promise<void> {
|
|
145
|
+
try {
|
|
146
|
+
if (!seekDone) {
|
|
147
|
+
await iterator.seekFirst();
|
|
148
|
+
seekDone = true;
|
|
149
|
+
}
|
|
150
|
+
if (!headerWritten) {
|
|
151
|
+
const headerRow = columns.map((c) => csv.formatValue(c.header)).join(",");
|
|
152
|
+
controller.enqueue(encoder.encode(`${headerRow}${delimiter}`));
|
|
153
|
+
headerWritten = true;
|
|
154
|
+
}
|
|
155
|
+
const bufferedCount =
|
|
156
|
+
pendingRecords.length - pendingCursor + stagedRecords.length;
|
|
157
|
+
if (bufferedCount < BUFFER_SIZE) {
|
|
158
|
+
const hasMore = await iterator.next();
|
|
159
|
+
if (hasMore) extractRecordsFromFrame(iterator.value);
|
|
160
|
+
}
|
|
161
|
+
const rows = buildCSVRows(BUFFER_SIZE);
|
|
162
|
+
if (rows.length > 0)
|
|
163
|
+
controller.enqueue(encoder.encode(`${rows.join(delimiter)}${delimiter}`));
|
|
164
|
+
const remainingPending = pendingRecords.length - pendingCursor;
|
|
165
|
+
if (remainingPending === 0 || stagedRecords.length === 0) {
|
|
166
|
+
const hasMore = await iterator.next();
|
|
167
|
+
if (!hasMore) {
|
|
168
|
+
// Flush remaining records
|
|
169
|
+
const finalRows = buildCSVRows(Infinity, true);
|
|
170
|
+
if (finalRows.length > 0)
|
|
171
|
+
controller.enqueue(
|
|
172
|
+
encoder.encode(`${finalRows.join(delimiter)}${delimiter}`),
|
|
173
|
+
);
|
|
174
|
+
await iterator.close();
|
|
175
|
+
controller.close();
|
|
176
|
+
return;
|
|
177
|
+
}
|
|
178
|
+
extractRecordsFromFrame(iterator.value);
|
|
179
|
+
}
|
|
180
|
+
} catch (error) {
|
|
181
|
+
await iterator.close();
|
|
182
|
+
controller.error(error);
|
|
183
|
+
}
|
|
184
|
+
},
|
|
185
|
+
|
|
186
|
+
async cancel(): Promise<void> {
|
|
187
|
+
await iterator.close();
|
|
188
|
+
},
|
|
189
|
+
});
|
|
190
|
+
};
|
|
191
|
+
|
|
192
|
+
const groupChannelsByIndex = (
|
|
193
|
+
channels: channel.Payload[],
|
|
194
|
+
): Map<channel.Key, channel.Keys> => {
|
|
195
|
+
const groupMap = new Map<channel.Key, channel.Keys>();
|
|
196
|
+
for (const ch of channels) {
|
|
197
|
+
if (ch.index === 0) continue;
|
|
198
|
+
let group = groupMap.get(ch.index);
|
|
199
|
+
if (group == null) {
|
|
200
|
+
group = [ch.index];
|
|
201
|
+
groupMap.set(ch.index, group);
|
|
202
|
+
}
|
|
203
|
+
if (!ch.isIndex && !group.includes(ch.key)) group.push(ch.key);
|
|
204
|
+
}
|
|
205
|
+
return groupMap;
|
|
206
|
+
};
|
|
207
|
+
|
|
208
|
+
interface ColumnMeta {
|
|
209
|
+
key: channel.Key;
|
|
210
|
+
header: string;
|
|
211
|
+
}
|
|
212
|
+
|
|
213
|
+
interface ColumnMetaResult {
|
|
214
|
+
columns: ColumnMeta[];
|
|
215
|
+
columnsByIndexKey: Map<channel.Key, ColumnMeta[]>;
|
|
216
|
+
emptyGroupStrings: Map<channel.Key, string>;
|
|
217
|
+
}
|
|
218
|
+
|
|
219
|
+
const buildColumnMeta = (
|
|
220
|
+
channels: channel.Payload[],
|
|
221
|
+
groups: Map<channel.Key, channel.Keys>,
|
|
222
|
+
headers?: Map<channel.KeyOrName, string>,
|
|
223
|
+
): ColumnMetaResult => {
|
|
224
|
+
const channelMap = new Map(channels.map((ch) => [ch.key, ch]));
|
|
225
|
+
const columns: ColumnMeta[] = [];
|
|
226
|
+
const columnsByIndexKey = new Map<channel.Key, ColumnMeta[]>();
|
|
227
|
+
const emptyGroupStrings = new Map<channel.Key, string>();
|
|
228
|
+
|
|
229
|
+
for (const [indexKey, channelKeys] of groups) {
|
|
230
|
+
const groupColumns: ColumnMeta[] = [];
|
|
231
|
+
for (const key of channelKeys) {
|
|
232
|
+
const ch = channelMap.get(key);
|
|
233
|
+
if (ch == null) throw new UnexpectedError(`Channel ${key} not found`);
|
|
234
|
+
const meta: ColumnMeta = {
|
|
235
|
+
key,
|
|
236
|
+
header: headers?.get(key) ?? headers?.get(ch.name) ?? ch.name,
|
|
237
|
+
};
|
|
238
|
+
columns.push(meta);
|
|
239
|
+
groupColumns.push(meta);
|
|
240
|
+
}
|
|
241
|
+
columnsByIndexKey.set(indexKey, groupColumns);
|
|
242
|
+
// Pre-compute empty group string for fast row building
|
|
243
|
+
emptyGroupStrings.set(indexKey, ",".repeat(groupColumns.length - 1));
|
|
244
|
+
}
|
|
245
|
+
|
|
246
|
+
return { columns, columnsByIndexKey, emptyGroupStrings };
|
|
247
|
+
};
|
|
248
|
+
interface RecordEntry {
|
|
249
|
+
time: bigint;
|
|
250
|
+
values: string[];
|
|
251
|
+
indexKey: channel.Key;
|
|
252
|
+
}
|
|
253
|
+
|
|
254
|
+
const mergeSortedRecords = (a: RecordEntry[], b: RecordEntry[]): RecordEntry[] => {
|
|
255
|
+
const result: RecordEntry[] = [];
|
|
256
|
+
let i = 0;
|
|
257
|
+
let j = 0;
|
|
258
|
+
while (i < a.length && j < b.length)
|
|
259
|
+
if (a[i].time <= b[j].time) result.push(a[i++]);
|
|
260
|
+
else result.push(b[j++]);
|
|
261
|
+
result.push(...a.slice(i), ...b.slice(j));
|
|
262
|
+
return result;
|
|
263
|
+
};
|
|
264
|
+
|
|
265
|
+
const BUFFER_SIZE = 1000;
|
|
@@ -8,8 +8,7 @@
|
|
|
8
8
|
// included in the file licenses/APL.txt.
|
|
9
9
|
|
|
10
10
|
import { EOF, Unreachable } from "@synnaxlabs/freighter";
|
|
11
|
-
import { id, sleep } from "@synnaxlabs/x";
|
|
12
|
-
import { DataType, Series, TimeSpan, TimeStamp } from "@synnaxlabs/x/telem";
|
|
11
|
+
import { DataType, id, Rate, Series, sleep, TimeSpan, TimeStamp } from "@synnaxlabs/x";
|
|
13
12
|
import { describe, expect, it, test, vi } from "vitest";
|
|
14
13
|
|
|
15
14
|
import { type channel } from "@/channel";
|
|
@@ -114,11 +113,98 @@ describe("Streamer", () => {
|
|
|
114
113
|
});
|
|
115
114
|
});
|
|
116
115
|
|
|
116
|
+
describe("throttling", () => {
|
|
117
|
+
test("throttle at 60Hz", async () => {
|
|
118
|
+
const ch = await newVirtualChannel(client);
|
|
119
|
+
const streamer = await client.openStreamer({
|
|
120
|
+
channels: ch.key,
|
|
121
|
+
throttleRate: 60,
|
|
122
|
+
});
|
|
123
|
+
const writer = await client.openWriter({
|
|
124
|
+
start: TimeStamp.now(),
|
|
125
|
+
channels: ch.key,
|
|
126
|
+
});
|
|
127
|
+
try {
|
|
128
|
+
const startTime = Date.now();
|
|
129
|
+
// Write data rapidly
|
|
130
|
+
for (let i = 0; i < 10; i++) {
|
|
131
|
+
await writer.write(ch.key, new Float64Array([i]));
|
|
132
|
+
await sleep.sleep(TimeSpan.milliseconds(5));
|
|
133
|
+
}
|
|
134
|
+
|
|
135
|
+
// Read frames - should be throttled
|
|
136
|
+
const receivedFrames: Frame[] = [];
|
|
137
|
+
const timeout = Date.now() + 500;
|
|
138
|
+
while (Date.now() < timeout)
|
|
139
|
+
try {
|
|
140
|
+
const frame = await Promise.race([
|
|
141
|
+
streamer.read(),
|
|
142
|
+
sleep.sleep(TimeSpan.milliseconds(100)).then(() => null),
|
|
143
|
+
]);
|
|
144
|
+
if (frame) receivedFrames.push(frame);
|
|
145
|
+
else break;
|
|
146
|
+
} catch {
|
|
147
|
+
break;
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
expect(receivedFrames.length).toBeGreaterThan(0);
|
|
151
|
+
const elapsed = Date.now() - startTime;
|
|
152
|
+
// Should take at least the throttle period
|
|
153
|
+
expect(elapsed).toBeGreaterThanOrEqual(16); // ~1/60Hz
|
|
154
|
+
} finally {
|
|
155
|
+
await writer.close();
|
|
156
|
+
streamer.close();
|
|
157
|
+
}
|
|
158
|
+
});
|
|
159
|
+
|
|
160
|
+
test("no throttling with rate of 0", async () => {
|
|
161
|
+
const ch = await newVirtualChannel(client);
|
|
162
|
+
const streamer = await client.openStreamer({
|
|
163
|
+
channels: ch.key,
|
|
164
|
+
throttleRate: 0,
|
|
165
|
+
});
|
|
166
|
+
const writer = await client.openWriter({
|
|
167
|
+
start: TimeStamp.now(),
|
|
168
|
+
channels: ch.key,
|
|
169
|
+
});
|
|
170
|
+
try {
|
|
171
|
+
await writer.write(ch.key, new Float64Array([1, 2, 3]));
|
|
172
|
+
const d = await streamer.read();
|
|
173
|
+
expect(Array.from(d.get(ch.key))).toEqual([1, 2, 3]);
|
|
174
|
+
} finally {
|
|
175
|
+
await writer.close();
|
|
176
|
+
streamer.close();
|
|
177
|
+
}
|
|
178
|
+
});
|
|
179
|
+
|
|
180
|
+
test("combine throttling and downsampling", async () => {
|
|
181
|
+
const ch = await newVirtualChannel(client);
|
|
182
|
+
const streamer = await client.openStreamer({
|
|
183
|
+
channels: ch.key,
|
|
184
|
+
downsampleFactor: 2,
|
|
185
|
+
throttleRate: 10,
|
|
186
|
+
});
|
|
187
|
+
const writer = await client.openWriter({
|
|
188
|
+
start: TimeStamp.now(),
|
|
189
|
+
channels: ch.key,
|
|
190
|
+
});
|
|
191
|
+
try {
|
|
192
|
+
await writer.write(ch.key, new Float64Array([1, 2, 3, 4, 5, 6]));
|
|
193
|
+
const d = await streamer.read();
|
|
194
|
+
// Should be downsampled to [1, 3, 5] and throttled
|
|
195
|
+
expect(Array.from(d.get(ch.key))).toEqual([1, 3, 5]);
|
|
196
|
+
} finally {
|
|
197
|
+
await writer.close();
|
|
198
|
+
streamer.close();
|
|
199
|
+
}
|
|
200
|
+
});
|
|
201
|
+
});
|
|
202
|
+
|
|
117
203
|
describe("calculations", () => {
|
|
118
204
|
test("basic calculated channel streaming", async () => {
|
|
119
205
|
// Create a timestamp index channel
|
|
120
206
|
const timeChannel = await client.channels.create({
|
|
121
|
-
name:
|
|
207
|
+
name: id.create(),
|
|
122
208
|
isIndex: true,
|
|
123
209
|
dataType: DataType.TIMESTAMP,
|
|
124
210
|
});
|
|
@@ -139,7 +225,7 @@ describe("Streamer", () => {
|
|
|
139
225
|
|
|
140
226
|
// Create calculated channel that adds the two source channels
|
|
141
227
|
const calcChannel = await client.channels.create({
|
|
142
|
-
name:
|
|
228
|
+
name: id.create(),
|
|
143
229
|
dataType: DataType.FLOAT64,
|
|
144
230
|
virtual: true,
|
|
145
231
|
expression: `return ${channelA.name} + ${channelB.name}`,
|
|
@@ -193,7 +279,7 @@ describe("Streamer", () => {
|
|
|
193
279
|
|
|
194
280
|
// Create calculated channel that adds 5
|
|
195
281
|
const calcChannel = await client.channels.create({
|
|
196
|
-
name:
|
|
282
|
+
name: id.create(),
|
|
197
283
|
dataType: DataType.FLOAT64,
|
|
198
284
|
virtual: true,
|
|
199
285
|
expression: `return ${baseChannel.name} + 5`,
|
|
@@ -232,23 +318,24 @@ describe("Streamer", () => {
|
|
|
232
318
|
test("calculated channel with multiple operations", async () => {
|
|
233
319
|
// Create timestamp channel
|
|
234
320
|
const timeChannel = await client.channels.create({
|
|
235
|
-
name:
|
|
321
|
+
name: id.create(),
|
|
236
322
|
isIndex: true,
|
|
237
323
|
dataType: DataType.TIMESTAMP,
|
|
238
324
|
});
|
|
239
325
|
|
|
240
326
|
// Create source channels
|
|
327
|
+
const names = [id.create(), id.create()];
|
|
241
328
|
const [channelA, channelB] = await client.channels.create([
|
|
242
|
-
{ name:
|
|
243
|
-
{ name:
|
|
329
|
+
{ name: names[0], dataType: DataType.FLOAT64, index: timeChannel.key },
|
|
330
|
+
{ name: names[1], dataType: DataType.FLOAT64, index: timeChannel.key },
|
|
244
331
|
]);
|
|
245
332
|
|
|
246
333
|
// Create calculated channel with multiple operations
|
|
247
334
|
const calcChannel = await client.channels.create({
|
|
248
|
-
name:
|
|
335
|
+
name: id.create(),
|
|
249
336
|
dataType: DataType.FLOAT64,
|
|
250
337
|
virtual: true,
|
|
251
|
-
expression:
|
|
338
|
+
expression: `return (${names[0]} * 2) + (${names[1]} / 2)`,
|
|
252
339
|
});
|
|
253
340
|
|
|
254
341
|
const streamer = await client.openStreamer(calcChannel.key);
|
|
@@ -279,7 +366,7 @@ describe("Streamer", () => {
|
|
|
279
366
|
describe("legacy calculations", async () => {
|
|
280
367
|
it("should correctly execute a calculation with a requires field", async () => {
|
|
281
368
|
const timeChannel = await client.channels.create({
|
|
282
|
-
name:
|
|
369
|
+
name: id.create(),
|
|
283
370
|
isIndex: true,
|
|
284
371
|
dataType: DataType.TIMESTAMP,
|
|
285
372
|
});
|
|
@@ -298,7 +385,7 @@ describe("Streamer", () => {
|
|
|
298
385
|
]);
|
|
299
386
|
|
|
300
387
|
const calcChannel = await client.channels.create({
|
|
301
|
-
name:
|
|
388
|
+
name: id.create(),
|
|
302
389
|
dataType: DataType.FLOAT64,
|
|
303
390
|
virtual: true,
|
|
304
391
|
expression: `return ${channelA.name} + ${channelB.name}`,
|
|
@@ -398,6 +485,7 @@ describe("Streamer", () => {
|
|
|
398
485
|
expect(openMock).toHaveBeenCalledWith({
|
|
399
486
|
...config,
|
|
400
487
|
downsampleFactor: 1,
|
|
488
|
+
throttleRate: new Rate(0),
|
|
401
489
|
});
|
|
402
490
|
await hardened.update([1, 2, 3]);
|
|
403
491
|
expect(streamer.updateMock).toHaveBeenCalledWith([1, 2, 3]);
|
package/src/framer/streamer.ts
CHANGED
|
@@ -8,7 +8,7 @@
|
|
|
8
8
|
// included in the file licenses/APL.txt.
|
|
9
9
|
|
|
10
10
|
import { EOF, type Stream, type WebSocketClient } from "@synnaxlabs/freighter";
|
|
11
|
-
import { breaker, observe, TimeSpan } from "@synnaxlabs/x";
|
|
11
|
+
import { breaker, observe, Rate, TimeSpan } from "@synnaxlabs/x";
|
|
12
12
|
import { z } from "zod";
|
|
13
13
|
|
|
14
14
|
import { type channel } from "@/channel";
|
|
@@ -18,7 +18,11 @@ import { WSStreamerCodec } from "@/framer/codec";
|
|
|
18
18
|
import { Frame, frameZ } from "@/framer/frame";
|
|
19
19
|
import { StreamProxy } from "@/framer/streamProxy";
|
|
20
20
|
|
|
21
|
-
const reqZ = z.object({
|
|
21
|
+
const reqZ = z.object({
|
|
22
|
+
keys: z.number().array(),
|
|
23
|
+
downsampleFactor: z.int(),
|
|
24
|
+
throttleRate: Rate.z.optional(),
|
|
25
|
+
});
|
|
22
26
|
|
|
23
27
|
/**
|
|
24
28
|
* Request interface for streaming frames from a Synnax cluster.
|
|
@@ -38,10 +42,12 @@ const intermediateStreamerConfigZ = z.object({
|
|
|
38
42
|
/** The channels to stream data from. Can be channel keys, names, or payloads. */
|
|
39
43
|
channels: paramsZ,
|
|
40
44
|
/** Optional factor to downsample the data by. Defaults to 1 (no downsampling). */
|
|
41
|
-
downsampleFactor: z.
|
|
42
|
-
/**
|
|
43
|
-
|
|
44
|
-
useHighPerformanceCodec
|
|
45
|
+
downsampleFactor: z.int().default(1),
|
|
46
|
+
/** Optional throttle rate in Hz to limit the rate of frames sent to the client. Defaults to 0 (no throttling). */
|
|
47
|
+
throttleRate: Rate.z.default(new Rate(0)),
|
|
48
|
+
/** useHighPerformanceCodec sets whether the writer will use the Synnax frame encoder
|
|
49
|
+
as opposed to the standard JSON encoding mechanisms for frames. */
|
|
50
|
+
useHighPerformanceCodec: z.boolean().default(true),
|
|
45
51
|
});
|
|
46
52
|
|
|
47
53
|
export const streamerConfigZ = intermediateStreamerConfigZ.or(
|
|
@@ -105,10 +111,16 @@ export const createStreamOpener =
|
|
|
105
111
|
if (cfg.useHighPerformanceCodec)
|
|
106
112
|
client = client.withCodec(new WSStreamerCodec(adapter.codec));
|
|
107
113
|
const stream = await client.stream("/frame/stream", reqZ, resZ);
|
|
108
|
-
const streamer = new CoreStreamer(
|
|
114
|
+
const streamer = new CoreStreamer(
|
|
115
|
+
stream,
|
|
116
|
+
adapter,
|
|
117
|
+
cfg.downsampleFactor,
|
|
118
|
+
cfg.throttleRate,
|
|
119
|
+
);
|
|
109
120
|
stream.send({
|
|
110
121
|
keys: Array.from(adapter.keys),
|
|
111
122
|
downsampleFactor: cfg.downsampleFactor,
|
|
123
|
+
throttleRate: cfg.throttleRate,
|
|
112
124
|
});
|
|
113
125
|
const [, err] = await stream.receive();
|
|
114
126
|
if (err != null) throw err;
|
|
@@ -132,11 +144,18 @@ class CoreStreamer implements Streamer {
|
|
|
132
144
|
private readonly stream: StreamProxy<typeof reqZ, typeof resZ>;
|
|
133
145
|
private readonly adapter: ReadAdapter;
|
|
134
146
|
private readonly downsampleFactor: number;
|
|
147
|
+
private readonly throttleRate: Rate;
|
|
135
148
|
|
|
136
|
-
constructor(
|
|
149
|
+
constructor(
|
|
150
|
+
stream: Stream<typeof reqZ, typeof resZ>,
|
|
151
|
+
adapter: ReadAdapter,
|
|
152
|
+
downsampleFactor: number = 1,
|
|
153
|
+
throttleRate: Rate = new Rate(0),
|
|
154
|
+
) {
|
|
137
155
|
this.stream = new StreamProxy("Streamer", stream);
|
|
138
156
|
this.adapter = adapter;
|
|
139
|
-
this.downsampleFactor =
|
|
157
|
+
this.downsampleFactor = downsampleFactor;
|
|
158
|
+
this.throttleRate = throttleRate;
|
|
140
159
|
}
|
|
141
160
|
|
|
142
161
|
get keys(): channel.Key[] {
|
|
@@ -163,6 +182,7 @@ class CoreStreamer implements Streamer {
|
|
|
163
182
|
this.stream.send({
|
|
164
183
|
keys: Array.from(this.adapter.keys),
|
|
165
184
|
downsampleFactor: this.downsampleFactor,
|
|
185
|
+
throttleRate: this.throttleRate,
|
|
166
186
|
});
|
|
167
187
|
}
|
|
168
188
|
|
|
@@ -7,7 +7,7 @@
|
|
|
7
7
|
// License, use of this software will be governed by the Apache License, Version 2.0,
|
|
8
8
|
// included in the file licenses/APL.txt.
|
|
9
9
|
|
|
10
|
-
import { DataType, TimeRange, TimeSpan, TimeStamp } from "@synnaxlabs/x";
|
|
10
|
+
import { DataType, id, TimeRange, TimeSpan, TimeStamp } from "@synnaxlabs/x";
|
|
11
11
|
import { describe, expect, it, test } from "vitest";
|
|
12
12
|
|
|
13
13
|
import { UnauthorizedError, ValidationError } from "@/errors";
|
|
@@ -42,8 +42,8 @@ describe("Writer", () => {
|
|
|
42
42
|
const channels = await newIndexedPair(client);
|
|
43
43
|
const writer = await client.openWriter({ start: TimeStamp.now(), channels });
|
|
44
44
|
await expect(
|
|
45
|
-
writer.write("
|
|
46
|
-
).rejects.toThrow('Channel "
|
|
45
|
+
writer.write("nonexistent_channel", randomSeries(10, DataType.FLOAT64)),
|
|
46
|
+
).rejects.toThrow('Channel "nonexistent_channel" not found');
|
|
47
47
|
await writer.close();
|
|
48
48
|
});
|
|
49
49
|
|
|
@@ -152,13 +152,13 @@ describe("Writer", () => {
|
|
|
152
152
|
|
|
153
153
|
test("write with out of order timestamp", async () => {
|
|
154
154
|
const indexCh = await client.channels.create({
|
|
155
|
-
name:
|
|
155
|
+
name: id.create(),
|
|
156
156
|
dataType: DataType.TIMESTAMP,
|
|
157
157
|
isIndex: true,
|
|
158
158
|
});
|
|
159
159
|
|
|
160
160
|
const dataCh = await client.channels.create({
|
|
161
|
-
name:
|
|
161
|
+
name: id.create(),
|
|
162
162
|
dataType: DataType.FLOAT64,
|
|
163
163
|
index: indexCh.key,
|
|
164
164
|
});
|
package/src/index.ts
CHANGED
|
@@ -8,7 +8,6 @@
|
|
|
8
8
|
// included in the file licenses/APL.txt.
|
|
9
9
|
|
|
10
10
|
export { access } from "@/access";
|
|
11
|
-
export { policy } from "@/access/policy";
|
|
12
11
|
export { arc } from "@/arc";
|
|
13
12
|
export { channel } from "@/channel";
|
|
14
13
|
export { Channel, isCalculated } from "@/channel/client";
|
|
@@ -22,6 +21,7 @@ export {
|
|
|
22
21
|
} from "@/client";
|
|
23
22
|
export * from "@/connection";
|
|
24
23
|
export { control } from "@/control";
|
|
24
|
+
export { device } from "@/device";
|
|
25
25
|
export {
|
|
26
26
|
AuthError,
|
|
27
27
|
ContiguityError,
|
|
@@ -35,15 +35,14 @@ export {
|
|
|
35
35
|
} from "@/errors";
|
|
36
36
|
export { framer } from "@/framer";
|
|
37
37
|
export { Frame } from "@/framer/frame";
|
|
38
|
-
export { hardware } from "@/hardware";
|
|
39
|
-
export { device } from "@/hardware/device";
|
|
40
|
-
export { rack } from "@/hardware/rack";
|
|
41
|
-
export { task } from "@/hardware/task";
|
|
42
38
|
export { label } from "@/label";
|
|
43
39
|
export { ontology } from "@/ontology";
|
|
44
40
|
export { group } from "@/ontology/group";
|
|
41
|
+
export { rack } from "@/rack";
|
|
45
42
|
export { ranger } from "@/ranger";
|
|
46
43
|
export { status } from "@/status";
|
|
44
|
+
export { task } from "@/task";
|
|
45
|
+
export { createTestClientWithPolicy } from "@/testutil/access";
|
|
47
46
|
export { createTestClient, TEST_CLIENT_PARAMS } from "@/testutil/client";
|
|
48
47
|
export { user } from "@/user";
|
|
49
48
|
export { workspace } from "@/workspace";
|