@synnaxlabs/client 0.49.0 → 0.49.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,272 @@
1
+ // Copyright 2025 Synnax Labs, Inc.
2
+ //
3
+ // Use of this software is governed by the Business Source License included in the file
4
+ // licenses/BSL.txt.
5
+ //
6
+ // As of the Change Date specified in that file, in accordance with the Business Source
7
+ // License, use of this software will be governed by the Apache License, Version 2.0,
8
+ // included in the file licenses/APL.txt.
9
+
10
+ import { type WebSocketClient } from "@synnaxlabs/freighter";
11
+ import { type CrudeTimeRange, csv, runtime } from "@synnaxlabs/x";
12
+
13
+ import { type channel } from "@/channel";
14
+ import { UnexpectedError } from "@/errors";
15
+ import { type Frame } from "@/framer/frame";
16
+ import { Iterator, type IteratorConfig } from "@/framer/iterator";
17
+
18
+ export interface ReadRequest {
19
+ channels: channel.Params;
20
+ timeRange: CrudeTimeRange;
21
+ channelNames?: Record<channel.KeyOrName, string>;
22
+ responseType: "csv";
23
+ iteratorConfig?: IteratorConfig;
24
+ }
25
+
26
+ export class Reader {
27
+ private readonly retriever: channel.Retriever;
28
+ private readonly streamClient: WebSocketClient;
29
+
30
+ constructor(retriever: channel.Retriever, streamClient: WebSocketClient) {
31
+ this.retriever = retriever;
32
+ this.streamClient = streamClient;
33
+ }
34
+
35
+ async read(request: ReadRequest): Promise<ReadableStream<Uint8Array>> {
36
+ const {
37
+ channels: channelParams,
38
+ timeRange,
39
+ channelNames,
40
+ iteratorConfig,
41
+ } = request;
42
+ const channelPayloads = await this.retriever.retrieve(channelParams);
43
+ const allKeys = new Set<channel.Key>();
44
+ channelPayloads.forEach((ch) => {
45
+ allKeys.add(ch.key);
46
+ if (ch.index !== 0) allKeys.add(ch.index);
47
+ });
48
+ const missingIndexKeys = Array.from(allKeys).filter(
49
+ (k) => !channelPayloads.some((ch) => ch.key === k),
50
+ );
51
+ if (missingIndexKeys.length > 0) {
52
+ const indexChannels = await this.retriever.retrieve(missingIndexKeys);
53
+ channelPayloads.push(...indexChannels);
54
+ }
55
+ const iterator = await Iterator._open(
56
+ timeRange,
57
+ Array.from(allKeys),
58
+ this.retriever,
59
+ this.streamClient,
60
+ iteratorConfig,
61
+ );
62
+ return createCSVReadableStream({
63
+ iterator,
64
+ channelPayloads,
65
+ headers: channelNames,
66
+ });
67
+ }
68
+ }
69
+
70
+ interface CreateCSVExportStreamParams {
71
+ iterator: Iterator;
72
+ channelPayloads: channel.Payload[];
73
+ headers?: Record<channel.KeyOrName, string>;
74
+ }
75
+
76
+ const createCSVReadableStream = ({
77
+ iterator,
78
+ channelPayloads,
79
+ headers,
80
+ }: CreateCSVExportStreamParams): ReadableStream<Uint8Array> => {
81
+ const delimiter = runtime.getOS() === "Windows" ? "\r\n" : "\n";
82
+ const encoder = new TextEncoder();
83
+ let headerWritten = false;
84
+ let seekDone = false;
85
+ const groups = groupChannelsByIndex(channelPayloads);
86
+ const { columns, columnsByIndexKey, emptyGroupStrings } = buildColumnMeta(
87
+ channelPayloads,
88
+ groups,
89
+ headers,
90
+ );
91
+ // Use a cursor-based approach instead of having to call .shift() for O(1) access
92
+ let pendingRecords: RecordEntry[] = [];
93
+ let pendingCursor = 0;
94
+ let stagedRecords: RecordEntry[] = [];
95
+
96
+ const extractRecordsFromFrame = (frame: Frame): void => {
97
+ for (const [indexKey] of groups) {
98
+ const indexSeries = frame.get(indexKey);
99
+ if (indexSeries.length === 0) continue;
100
+ const groupColumns = columnsByIndexKey.get(indexKey) ?? [];
101
+ // Pre-fetch all series for this group to avoid repeated lookups
102
+ const seriesData = groupColumns.map((col) => frame.get(col.key));
103
+ for (let i = 0; i < indexSeries.length; i++) {
104
+ const time = indexSeries.at(i, true) as bigint;
105
+ const values = seriesData.map((series) => csv.formatValue(series.at(i, true)));
106
+ stagedRecords.push({ time, values, indexKey });
107
+ }
108
+ }
109
+ };
110
+
111
+ const buildCSVRows = (maxRows: number, flush: boolean = false): string[] => {
112
+ if (stagedRecords.length > 0) {
113
+ stagedRecords.sort((a, b) => Number(a.time - b.time));
114
+ if (pendingCursor > 0) {
115
+ pendingRecords = pendingRecords.slice(pendingCursor);
116
+ pendingCursor = 0;
117
+ }
118
+ pendingRecords = mergeSortedRecords(pendingRecords, stagedRecords);
119
+ stagedRecords = [];
120
+ }
121
+ const rows: string[] = [];
122
+ const pendingLen = pendingRecords.length;
123
+ while (pendingCursor < pendingLen && rows.length < maxRows) {
124
+ const minTime = pendingRecords[pendingCursor].time;
125
+ // Don't output the last timestamp unless flushing - more data might arrive
126
+ // Optimization: only check if last record has same time (since array is sorted)
127
+ if (!flush && pendingRecords[pendingLen - 1].time === minTime) break;
128
+ // Collect all records at this timestamp using cursor (O(1) per record)
129
+ // Use Map keyed by indexKey for O(1) lookup instead of find()
130
+ const recordsByGroup = new Map<channel.Key, RecordEntry>();
131
+ while (
132
+ pendingCursor < pendingLen &&
133
+ pendingRecords[pendingCursor].time === minTime
134
+ ) {
135
+ const record = pendingRecords[pendingCursor++];
136
+ recordsByGroup.set(record.indexKey, record);
137
+ }
138
+ const rowParts: string[] = [];
139
+ for (const [indexKey] of groups) {
140
+ const record = recordsByGroup.get(indexKey);
141
+ rowParts.push(
142
+ record?.values.join(",") ?? emptyGroupStrings.get(indexKey) ?? "",
143
+ );
144
+ }
145
+ rows.push(rowParts.join(","));
146
+ }
147
+ return rows;
148
+ };
149
+
150
+ return new ReadableStream<Uint8Array>({
151
+ async pull(controller): Promise<void> {
152
+ try {
153
+ if (!seekDone) {
154
+ await iterator.seekFirst();
155
+ seekDone = true;
156
+ }
157
+ if (!headerWritten) {
158
+ const headerRow = columns.map((c) => csv.formatValue(c.header)).join(",");
159
+ controller.enqueue(encoder.encode(`${headerRow}${delimiter}`));
160
+ headerWritten = true;
161
+ }
162
+ const bufferedCount =
163
+ pendingRecords.length - pendingCursor + stagedRecords.length;
164
+ if (bufferedCount < BUFFER_SIZE) {
165
+ const hasMore = await iterator.next();
166
+ if (hasMore) extractRecordsFromFrame(iterator.value);
167
+ }
168
+ const rows = buildCSVRows(BUFFER_SIZE);
169
+ if (rows.length > 0)
170
+ controller.enqueue(encoder.encode(`${rows.join(delimiter)}${delimiter}`));
171
+ const remainingPending = pendingRecords.length - pendingCursor;
172
+ if (remainingPending === 0 || stagedRecords.length === 0) {
173
+ const hasMore = await iterator.next();
174
+ if (!hasMore) {
175
+ // Flush remaining records
176
+ const finalRows = buildCSVRows(Infinity, true);
177
+ if (finalRows.length > 0)
178
+ controller.enqueue(
179
+ encoder.encode(`${finalRows.join(delimiter)}${delimiter}`),
180
+ );
181
+ await iterator.close();
182
+ controller.close();
183
+ return;
184
+ }
185
+ extractRecordsFromFrame(iterator.value);
186
+ }
187
+ } catch (error) {
188
+ await iterator.close();
189
+ controller.error(error);
190
+ }
191
+ },
192
+
193
+ async cancel(): Promise<void> {
194
+ await iterator.close();
195
+ },
196
+ });
197
+ };
198
+
199
+ const groupChannelsByIndex = (
200
+ channels: channel.Payload[],
201
+ ): Map<channel.Key, channel.Keys> => {
202
+ const groupMap = new Map<channel.Key, channel.Keys>();
203
+ for (const ch of channels) {
204
+ if (ch.index === 0) continue;
205
+ let group = groupMap.get(ch.index);
206
+ if (group == null) {
207
+ group = [ch.index];
208
+ groupMap.set(ch.index, group);
209
+ }
210
+ if (!ch.isIndex && !group.includes(ch.key)) group.push(ch.key);
211
+ }
212
+ return groupMap;
213
+ };
214
+
215
+ interface ColumnMeta {
216
+ key: channel.Key;
217
+ header: string;
218
+ }
219
+
220
+ interface ColumnMetaResult {
221
+ columns: ColumnMeta[];
222
+ columnsByIndexKey: Map<channel.Key, ColumnMeta[]>;
223
+ emptyGroupStrings: Map<channel.Key, string>;
224
+ }
225
+
226
+ const buildColumnMeta = (
227
+ channels: channel.Payload[],
228
+ groups: Map<channel.Key, channel.Keys>,
229
+ headers?: Record<channel.KeyOrName, string>,
230
+ ): ColumnMetaResult => {
231
+ const channelMap = new Map(channels.map((ch) => [ch.key, ch]));
232
+ const columns: ColumnMeta[] = [];
233
+ const columnsByIndexKey = new Map<channel.Key, ColumnMeta[]>();
234
+ const emptyGroupStrings = new Map<channel.Key, string>();
235
+
236
+ for (const [indexKey, channelKeys] of groups) {
237
+ const groupColumns: ColumnMeta[] = [];
238
+ for (const key of channelKeys) {
239
+ const ch = channelMap.get(key);
240
+ if (ch == null) throw new UnexpectedError(`Channel ${key} not found`);
241
+ const meta: ColumnMeta = {
242
+ key,
243
+ header: headers?.[key] ?? headers?.[ch.name] ?? ch.name,
244
+ };
245
+ columns.push(meta);
246
+ groupColumns.push(meta);
247
+ }
248
+ columnsByIndexKey.set(indexKey, groupColumns);
249
+ // Pre-compute empty group string for fast row building
250
+ emptyGroupStrings.set(indexKey, ",".repeat(groupColumns.length - 1));
251
+ }
252
+
253
+ return { columns, columnsByIndexKey, emptyGroupStrings };
254
+ };
255
+ interface RecordEntry {
256
+ time: bigint;
257
+ values: string[];
258
+ indexKey: channel.Key;
259
+ }
260
+
261
+ const mergeSortedRecords = (a: RecordEntry[], b: RecordEntry[]): RecordEntry[] => {
262
+ const result: RecordEntry[] = [];
263
+ let i = 0;
264
+ let j = 0;
265
+ while (i < a.length && j < b.length)
266
+ if (a[i].time <= b[j].time) result.push(a[i++]);
267
+ else result.push(b[j++]);
268
+ result.push(...a.slice(i), ...b.slice(j));
269
+ return result;
270
+ };
271
+
272
+ const BUFFER_SIZE = 1000;