@powersync/common 1.51.0 → 1.52.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (37) hide show
  1. package/dist/bundle.cjs +431 -445
  2. package/dist/bundle.cjs.map +1 -1
  3. package/dist/bundle.mjs +432 -444
  4. package/dist/bundle.mjs.map +1 -1
  5. package/dist/bundle.node.cjs +429 -445
  6. package/dist/bundle.node.cjs.map +1 -1
  7. package/dist/bundle.node.mjs +430 -444
  8. package/dist/bundle.node.mjs.map +1 -1
  9. package/dist/index.d.cts +41 -70
  10. package/lib/client/AbstractPowerSyncDatabase.js +3 -3
  11. package/lib/client/AbstractPowerSyncDatabase.js.map +1 -1
  12. package/lib/client/sync/stream/AbstractRemote.d.ts +29 -8
  13. package/lib/client/sync/stream/AbstractRemote.js +154 -177
  14. package/lib/client/sync/stream/AbstractRemote.js.map +1 -1
  15. package/lib/client/sync/stream/AbstractStreamingSyncImplementation.d.ts +1 -0
  16. package/lib/client/sync/stream/AbstractStreamingSyncImplementation.js +69 -88
  17. package/lib/client/sync/stream/AbstractStreamingSyncImplementation.js.map +1 -1
  18. package/lib/index.d.ts +1 -1
  19. package/lib/index.js +0 -1
  20. package/lib/index.js.map +1 -1
  21. package/lib/utils/async.d.ts +0 -9
  22. package/lib/utils/async.js +0 -9
  23. package/lib/utils/async.js.map +1 -1
  24. package/lib/utils/stream_transform.d.ts +39 -0
  25. package/lib/utils/stream_transform.js +206 -0
  26. package/lib/utils/stream_transform.js.map +1 -0
  27. package/package.json +9 -7
  28. package/src/client/AbstractPowerSyncDatabase.ts +3 -3
  29. package/src/client/sync/stream/AbstractRemote.ts +182 -206
  30. package/src/client/sync/stream/AbstractStreamingSyncImplementation.ts +82 -83
  31. package/src/index.ts +1 -1
  32. package/src/utils/async.ts +0 -11
  33. package/src/utils/stream_transform.ts +252 -0
  34. package/lib/utils/DataStream.d.ts +0 -62
  35. package/lib/utils/DataStream.js +0 -169
  36. package/lib/utils/DataStream.js.map +0 -1
  37. package/src/utils/DataStream.ts +0 -222
@@ -4,7 +4,6 @@ import { InternalProgressInformation } from '../../../db/crud/SyncProgress.js';
4
4
  import { SyncStatus, SyncStatusOptions } from '../../../db/crud/SyncStatus.js';
5
5
  import { AbortOperation } from '../../../utils/AbortOperation.js';
6
6
  import { BaseListener, BaseObserver, BaseObserverInterface, Disposable } from '../../../utils/BaseObserver.js';
7
- import { DataStream } from '../../../utils/DataStream.js';
8
7
  import { throttleLeadingTrailing } from '../../../utils/async.js';
9
8
  import {
10
9
  BucketChecksum,
@@ -19,7 +18,6 @@ import { AbstractRemote, FetchStrategy, SyncStreamOptions } from './AbstractRemo
19
18
  import { EstablishSyncStream, Instruction, coreStatusToJs } from './core-instruction.js';
20
19
  import {
21
20
  BucketRequest,
22
- CrudUploadNotification,
23
21
  StreamingSyncLine,
24
22
  StreamingSyncLineOrCrudUploadComplete,
25
23
  StreamingSyncRequestParameterType,
@@ -30,6 +28,15 @@ import {
30
28
  isStreamingSyncCheckpointPartiallyComplete,
31
29
  isStreamingSyncData
32
30
  } from './streaming-sync-types.js';
31
+ import {
32
+ extractBsonObjects,
33
+ extractJsonLines,
34
+ injectable,
35
+ InjectableIterator,
36
+ map,
37
+ SimpleAsyncIterator
38
+ } from '../../../utils/stream_transform.js';
39
+ import type { BSON } from 'bson';
33
40
 
34
41
  export enum LockType {
35
42
  CRUD = 'crud',
@@ -682,6 +689,27 @@ The next upload iteration will be delayed.`);
682
689
  });
683
690
  }
684
691
 
692
+ private async receiveSyncLines(data: {
693
+ options: SyncStreamOptions;
694
+ connection: RequiredPowerSyncConnectionOptions;
695
+ bson?: typeof BSON;
696
+ }): Promise<SimpleAsyncIterator<Uint8Array | string>> {
697
+ const { options, connection, bson } = data;
698
+ const remote = this.options.remote;
699
+
700
+ if (connection.connectionMethod == SyncStreamConnectionMethod.HTTP) {
701
+ return await remote.fetchStream(options);
702
+ } else {
703
+ return await this.options.remote.socketStreamRaw(
704
+ {
705
+ ...options,
706
+ ...{ fetchStrategy: connection.fetchStrategy }
707
+ },
708
+ bson
709
+ );
710
+ }
711
+ }
712
+
685
713
  private async legacyStreamingSyncIteration(signal: AbortSignal, resolvedOptions: RequiredPowerSyncConnectionOptions) {
686
714
  const rawTables = resolvedOptions.serializedSchema?.raw_tables;
687
715
  if (rawTables != null && rawTables.length) {
@@ -717,46 +745,31 @@ The next upload iteration will be delayed.`);
717
745
  }
718
746
  };
719
747
 
720
- let stream: DataStream<StreamingSyncLineOrCrudUploadComplete>;
721
- if (resolvedOptions?.connectionMethod == SyncStreamConnectionMethod.HTTP) {
722
- stream = await this.options.remote.postStreamRaw(syncOptions, (line: string | CrudUploadNotification) => {
748
+ const bson = await this.options.remote.getBSON();
749
+ const source = await this.receiveSyncLines({
750
+ options: syncOptions,
751
+ connection: resolvedOptions,
752
+ bson
753
+ });
754
+ const stream: InjectableIterator<StreamingSyncLineOrCrudUploadComplete> = injectable(
755
+ map(source, (line) => {
723
756
  if (typeof line == 'string') {
724
757
  return JSON.parse(line) as StreamingSyncLine;
725
758
  } else {
726
- // Directly enqueued by us
727
- return line;
759
+ return bson.deserialize(line) as StreamingSyncLine;
728
760
  }
729
- });
730
- } else {
731
- const bson = await this.options.remote.getBSON();
732
- stream = await this.options.remote.socketStreamRaw(
733
- {
734
- ...syncOptions,
735
- ...{ fetchStrategy: resolvedOptions.fetchStrategy }
736
- },
737
- (payload: Uint8Array | CrudUploadNotification) => {
738
- if (payload instanceof Uint8Array) {
739
- return bson.deserialize(payload) as StreamingSyncLine;
740
- } else {
741
- // Directly enqueued by us
742
- return payload;
743
- }
744
- },
745
- bson
746
- );
747
- }
761
+ })
762
+ );
748
763
 
749
764
  this.logger.debug('Stream established. Processing events');
750
765
 
751
766
  this.notifyCompletedUploads = () => {
752
- if (!stream.closed) {
753
- stream.enqueueData({ crud_upload_completed: null });
754
- }
767
+ stream.inject({ crud_upload_completed: null });
755
768
  };
756
769
 
757
- while (!stream.closed) {
758
- const line = await stream.read();
759
- if (!line) {
770
+ while (true) {
771
+ const { value: line, done } = await stream.next();
772
+ if (done) {
760
773
  // The stream has closed while waiting
761
774
  return;
762
775
  }
@@ -942,6 +955,11 @@ The next upload iteration will be delayed.`);
942
955
  const syncImplementation = this;
943
956
  const adapter = this.options.adapter;
944
957
  const remote = this.options.remote;
958
+ const controller = new AbortController();
959
+ const abort = () => {
960
+ return controller.abort(signal.reason);
961
+ };
962
+ signal.addEventListener('abort', abort);
945
963
  let receivingLines: Promise<void> | null = null;
946
964
  let hadSyncLine = false;
947
965
  let hideDisconnectOnRestart = false;
@@ -949,64 +967,53 @@ The next upload iteration will be delayed.`);
949
967
  if (signal.aborted) {
950
968
  throw new AbortOperation('Connection request has been aborted');
951
969
  }
952
- const abortController = new AbortController();
953
- signal.addEventListener('abort', () => abortController.abort());
954
970
 
955
971
  // Pending sync lines received from the service, as well as local events that trigger a powersync_control
956
972
  // invocation (local events include refreshed tokens and completed uploads).
957
973
  // This is a single data stream so that we can handle all control calls from a single place.
958
- let controlInvocations: DataStream<EnqueuedCommand, Uint8Array | EnqueuedCommand> | null = null;
974
+ let controlInvocations: InjectableIterator<EnqueuedCommand> | null = null;
959
975
 
960
976
  async function connect(instr: EstablishSyncStream) {
961
977
  const syncOptions: SyncStreamOptions = {
962
978
  path: '/sync/stream',
963
- abortSignal: abortController.signal,
979
+ abortSignal: controller.signal,
964
980
  data: instr.request
965
981
  };
966
982
 
967
- if (resolvedOptions.connectionMethod == SyncStreamConnectionMethod.HTTP) {
968
- controlInvocations = await remote.postStreamRaw(syncOptions, (line: string | EnqueuedCommand) => {
969
- if (typeof line == 'string') {
970
- return {
971
- command: PowerSyncControlCommand.PROCESS_TEXT_LINE,
972
- payload: line
973
- };
974
- } else {
975
- // Directly enqueued by us
976
- return line;
977
- }
978
- });
979
- } else {
980
- controlInvocations = await remote.socketStreamRaw(
981
- {
982
- ...syncOptions,
983
- fetchStrategy: resolvedOptions.fetchStrategy
984
- },
985
- (payload: Uint8Array | EnqueuedCommand) => {
986
- if (payload instanceof Uint8Array) {
983
+ controlInvocations = injectable(
984
+ map(
985
+ await syncImplementation.receiveSyncLines({
986
+ options: syncOptions,
987
+ connection: resolvedOptions
988
+ }),
989
+ (line) => {
990
+ if (typeof line == 'string') {
987
991
  return {
988
- command: PowerSyncControlCommand.PROCESS_BSON_LINE,
989
- payload: payload
992
+ command: PowerSyncControlCommand.PROCESS_TEXT_LINE,
993
+ payload: line
990
994
  };
991
995
  } else {
992
- // Directly enqueued by us
993
- return payload;
996
+ return {
997
+ command: PowerSyncControlCommand.PROCESS_BSON_LINE,
998
+ payload: line
999
+ };
994
1000
  }
995
1001
  }
996
- );
997
- }
1002
+ )
1003
+ );
998
1004
 
999
1005
  // The rust client will set connected: true after the first sync line because that's when it gets invoked, but
1000
1006
  // we're already connected here and can report that.
1001
1007
  syncImplementation.updateSyncStatus({ connected: true });
1002
1008
 
1003
1009
  try {
1004
- while (!controlInvocations.closed) {
1005
- const line = await controlInvocations.read();
1006
- if (line == null) {
1007
- return;
1010
+ while (true) {
1011
+ let event = await controlInvocations.next();
1012
+ if (event.done) {
1013
+ break;
1008
1014
  }
1009
1015
 
1016
+ const line = event.value;
1010
1017
  await control(line.command, line.payload);
1011
1018
 
1012
1019
  if (!hadSyncLine) {
@@ -1015,12 +1022,8 @@ The next upload iteration will be delayed.`);
1015
1022
  }
1016
1023
  }
1017
1024
  } finally {
1018
- const activeInstructions = controlInvocations;
1019
- // We concurrently add events to the active data stream when e.g. a CRUD upload is completed or a token is
1020
- // refreshed. That would throw after closing (and we can't handle those events either way), so set this back
1021
- // to null.
1022
- controlInvocations = null;
1023
- await activeInstructions.close();
1025
+ abort();
1026
+ signal.removeEventListener('abort', abort);
1024
1027
  }
1025
1028
  }
1026
1029
 
@@ -1072,7 +1075,7 @@ The next upload iteration will be delayed.`);
1072
1075
  // Restart iteration after the credentials have been refreshed.
1073
1076
  remote.fetchCredentials().then(
1074
1077
  (_) => {
1075
- controlInvocations?.enqueueData({ command: PowerSyncControlCommand.NOTIFY_TOKEN_REFRESHED });
1078
+ controlInvocations?.inject({ command: PowerSyncControlCommand.NOTIFY_TOKEN_REFRESHED });
1076
1079
  },
1077
1080
  (err) => {
1078
1081
  syncImplementation.logger.warn('Could not prefetch credentials', err);
@@ -1080,7 +1083,7 @@ The next upload iteration will be delayed.`);
1080
1083
  );
1081
1084
  }
1082
1085
  } else if ('CloseSyncStream' in instruction) {
1083
- abortController.abort();
1086
+ controller.abort();
1084
1087
  hideDisconnectOnRestart = instruction.CloseSyncStream.hide_disconnect;
1085
1088
  } else if ('FlushFileSystem' in instruction) {
1086
1089
  // Not necessary on JS platforms.
@@ -1113,17 +1116,13 @@ The next upload iteration will be delayed.`);
1113
1116
  await control(PowerSyncControlCommand.START, JSON.stringify(options));
1114
1117
 
1115
1118
  this.notifyCompletedUploads = () => {
1116
- if (controlInvocations && !controlInvocations?.closed) {
1117
- controlInvocations.enqueueData({ command: PowerSyncControlCommand.NOTIFY_CRUD_UPLOAD_COMPLETED });
1118
- }
1119
+ controlInvocations?.inject({ command: PowerSyncControlCommand.NOTIFY_CRUD_UPLOAD_COMPLETED });
1119
1120
  };
1120
1121
  this.handleActiveStreamsChange = () => {
1121
- if (controlInvocations && !controlInvocations?.closed) {
1122
- controlInvocations.enqueueData({
1123
- command: PowerSyncControlCommand.UPDATE_SUBSCRIPTIONS,
1124
- payload: JSON.stringify(this.activeStreams)
1125
- });
1126
- }
1122
+ controlInvocations?.inject({
1123
+ command: PowerSyncControlCommand.UPDATE_SUBSCRIPTIONS,
1124
+ payload: JSON.stringify(this.activeStreams)
1125
+ });
1127
1126
  };
1128
1127
  await receivingLines;
1129
1128
  } finally {
package/src/index.ts CHANGED
@@ -59,9 +59,9 @@ export * from './client/watched/WatchedQuery.js';
59
59
  export * from './utils/AbortOperation.js';
60
60
  export * from './utils/BaseObserver.js';
61
61
  export * from './utils/ControlledExecutor.js';
62
- export * from './utils/DataStream.js';
63
62
  export * from './utils/Logger.js';
64
63
  export * from './utils/mutex.js';
65
64
  export * from './utils/parseQuery.js';
65
+ export type { SimpleAsyncIterator } from './utils/stream_transform.js';
66
66
 
67
67
  export * from './types/types.js';
@@ -1,14 +1,3 @@
1
- /**
2
- * A ponyfill for `Symbol.asyncIterator` that is compatible with the
3
- * [recommended polyfill](https://github.com/Azure/azure-sdk-for-js/blob/%40azure/core-asynciterator-polyfill_1.0.2/sdk/core/core-asynciterator-polyfill/src/index.ts#L4-L6)
4
- * we recommend for React Native.
5
- *
6
- * As long as we use this symbol (instead of `for await` and `async *`) in this package, we can be compatible with async
7
- * iterators without requiring them.
8
- */
9
- export const symbolAsyncIterator: typeof Symbol.asyncIterator =
10
- Symbol.asyncIterator ?? Symbol.for('Symbol.asyncIterator');
11
-
12
1
  /**
13
2
  * Throttle a function to be called at most once every "wait" milliseconds,
14
3
  * on the trailing edge.
@@ -0,0 +1,252 @@
1
+ /**
2
+ * An async iterator that can't be cancelled.
3
+ *
4
+ * To keep data flow simple, we always pass an explicit cancellation token when subscribing to async streams. Once the
5
+ * {@link AbortSignal} aborts, iterators are supposed to clean up and then emit a final `{done: true}` event. This means
6
+ * that there's no way to distinguish between streams that have completed normally and streams that have been cancelled,
7
+ * but that is acceptable for our uses of this.
8
+ */
9
+ export type SimpleAsyncIterator<T> = Pick<AsyncIterator<T>, 'next'>;
10
+
11
+ export const doneResult: IteratorReturnResult<any> = { done: true, value: undefined };
12
+
13
+ export function valueResult<T>(value: T) {
14
+ return { done: false, value };
15
+ }
16
+
17
+ /**
18
+ * A variant of {@link Array.map} for async iterators.
19
+ */
20
+ export function map<T1, T2>(source: SimpleAsyncIterator<T1>, map: (source: T1) => T2): SimpleAsyncIterator<T2> {
21
+ return {
22
+ next: async () => {
23
+ const value = await source.next();
24
+ if (value.done) {
25
+ return value;
26
+ } else {
27
+ return { value: map(value.value) };
28
+ }
29
+ }
30
+ };
31
+ }
32
+
33
+ export interface InjectableIterator<T> extends SimpleAsyncIterator<T> {
34
+ inject(event: T): void;
35
+ }
36
+
37
+ /**
38
+ * Expands a source async iterator by allowing to inject events asynchronously.
39
+ *
40
+ * The resulting iterator will emit all events from its source. Additionally though, events can be injected. These
41
+ * events are dropped once the main iterator completes, but are otherwise forwarded.
42
+ *
43
+ * The iterator completes when its source completes, and it supports backpressure by only calling `next()` on the source
44
+ * in response to a `next()` call from downstream if no pending injected events can be dispatched.
45
+ */
46
+ export function injectable<T>(source: SimpleAsyncIterator<T>): InjectableIterator<T> {
47
+ type Waiter = { resolve: (t: IteratorResult<T>) => void; reject: (e: unknown) => void };
48
+
49
+ let sourceIsDone = false;
50
+ let waiter: Waiter | undefined = undefined; // An active, waiting next() call.
51
+ // A pending upstream event that couldn't be dispatched because inject() has been called before it was resolved.
52
+ let pendingSourceEvent: ((w: Waiter) => void) | null = null;
53
+
54
+ let pendingInjectedEvents: T[] = [];
55
+
56
+ const consumeWaiter = () => {
57
+ const pending = waiter;
58
+ waiter = undefined;
59
+ return pending;
60
+ };
61
+
62
+ const fetchFromSource = () => {
63
+ const resolveWaiter = (propagate: (w: Waiter) => void) => {
64
+ const active = consumeWaiter();
65
+ if (active) {
66
+ propagate(active);
67
+ } else {
68
+ pendingSourceEvent = propagate;
69
+ }
70
+ };
71
+
72
+ const nextFromSource = source.next();
73
+ nextFromSource.then(
74
+ (value) => {
75
+ sourceIsDone = value.done == true;
76
+ resolveWaiter((w) => w.resolve(value));
77
+ },
78
+ (error) => {
79
+ resolveWaiter((w) => w.reject(error));
80
+ }
81
+ );
82
+ };
83
+
84
+ return {
85
+ next: () => {
86
+ return new Promise((resolve, reject) => {
87
+ // First priority: Dispatch ready upstream events.
88
+ if (sourceIsDone) {
89
+ return resolve(doneResult);
90
+ }
91
+ if (pendingSourceEvent) {
92
+ pendingSourceEvent({ resolve, reject });
93
+ pendingSourceEvent = null;
94
+ return;
95
+ }
96
+
97
+ // Second priority: Dispatch injected events
98
+ if (pendingInjectedEvents.length) {
99
+ return resolve(valueResult(pendingInjectedEvents.shift()!));
100
+ }
101
+
102
+ // Nothing pending? Fetch from source
103
+ waiter = { resolve, reject };
104
+ return fetchFromSource();
105
+ });
106
+ },
107
+ inject: (event) => {
108
+ const pending = consumeWaiter();
109
+ if (pending != null) {
110
+ pending.resolve(valueResult(event));
111
+ } else {
112
+ pendingInjectedEvents.push(event);
113
+ }
114
+ }
115
+ };
116
+ }
117
+
118
+ /**
119
+ * Splits a byte stream at line endings, emitting each line as a string.
120
+ */
121
+ export function extractJsonLines(
122
+ source: SimpleAsyncIterator<Uint8Array>,
123
+ decoder: TextDecoder
124
+ ): SimpleAsyncIterator<string> {
125
+ let buffer = '';
126
+ const pendingLines: string[] = [];
127
+ let isFinalEvent = false;
128
+
129
+ return {
130
+ next: async () => {
131
+ while (true) {
132
+ if (isFinalEvent) {
133
+ return doneResult;
134
+ }
135
+
136
+ {
137
+ const first = pendingLines.shift();
138
+ if (first) {
139
+ return { done: false, value: first };
140
+ }
141
+ }
142
+
143
+ const { done, value } = await source.next();
144
+ if (done) {
145
+ const remaining = buffer.trim();
146
+ if (remaining.length != 0) {
147
+ isFinalEvent = true;
148
+ return { done: false, value: remaining };
149
+ }
150
+
151
+ return doneResult;
152
+ }
153
+
154
+ const data = decoder.decode(value, { stream: true });
155
+ buffer += data;
156
+
157
+ const lines = buffer.split('\n');
158
+ for (let i = 0; i < lines.length - 1; i++) {
159
+ const l = lines[i].trim();
160
+ if (l.length > 0) {
161
+ pendingLines.push(l);
162
+ }
163
+ }
164
+
165
+ buffer = lines[lines.length - 1];
166
+ }
167
+ }
168
+ };
169
+ }
170
+
171
+ /**
172
+ * Splits a concatenated stream of BSON objects by emitting individual objects.
173
+ */
174
+ export function extractBsonObjects(source: SimpleAsyncIterator<Uint8Array>): SimpleAsyncIterator<Uint8Array> {
175
+ // Fully read but not emitted yet.
176
+ const completedObjects: Uint8Array[] = [];
177
+
178
+ // Whether source has returned { done: true }. We do the same once completed objects have been emitted.
179
+ let isDone = false;
180
+
181
+ const lengthBuffer = new DataView(new ArrayBuffer(4));
182
+ let objectBody: Uint8Array | null = null;
183
+ // If we're parsing the length field, a number between 1 and 4 (inclusive) describing remaining bytes in the header.
184
+ // If we're consuming a document, the bytes remaining.
185
+ let remainingLength = 4;
186
+
187
+ return {
188
+ async next(): Promise<IteratorResult<Uint8Array>> {
189
+ while (true) {
190
+ // Before fetching new data from upstream, return completed objects.
191
+ if (completedObjects.length) {
192
+ return valueResult(completedObjects.shift()!);
193
+ }
194
+ if (isDone) {
195
+ return doneResult;
196
+ }
197
+
198
+ const upstreamEvent = await source.next();
199
+ if (upstreamEvent.done) {
200
+ isDone = true;
201
+ if (objectBody || remainingLength != 4) {
202
+ throw new Error('illegal end of stream in BSON object');
203
+ }
204
+ return doneResult;
205
+ }
206
+
207
+ const chunk = upstreamEvent.value;
208
+ for (let i = 0; i < chunk.length; ) {
209
+ const availableInData = chunk.length - i;
210
+
211
+ if (objectBody) {
212
+ // We're in the middle of reading a BSON document.
213
+ const bytesToRead = Math.min(availableInData, remainingLength);
214
+ const copySource = new Uint8Array(chunk.buffer, chunk.byteOffset + i, bytesToRead);
215
+ objectBody.set(copySource, objectBody.length - remainingLength);
216
+ i += bytesToRead;
217
+ remainingLength -= bytesToRead;
218
+
219
+ if (remainingLength == 0) {
220
+ completedObjects.push(objectBody);
221
+
222
+ // Prepare to read another document, starting with its length
223
+ objectBody = null;
224
+ remainingLength = 4;
225
+ }
226
+ } else {
227
+ // Copy up to 4 bytes into lengthBuffer, depending on how many we still need.
228
+ const bytesToRead = Math.min(availableInData, remainingLength);
229
+ for (let j = 0; j < bytesToRead; j++) {
230
+ lengthBuffer.setUint8(4 - remainingLength + j, chunk[i + j]);
231
+ }
232
+ i += bytesToRead;
233
+ remainingLength -= bytesToRead;
234
+
235
+ if (remainingLength == 0) {
236
+ // Transition from reading length header to reading document. Subtracting 4 because the length of the
237
+ // header is included in length.
238
+ const length = lengthBuffer.getInt32(0, true /* little endian */);
239
+ remainingLength = length - 4;
240
+ if (remainingLength < 1) {
241
+ throw new Error(`invalid length for bson: ${length}`);
242
+ }
243
+
244
+ objectBody = new Uint8Array(length);
245
+ new DataView(objectBody.buffer).setInt32(0, length, true);
246
+ }
247
+ }
248
+ }
249
+ }
250
+ }
251
+ };
252
+ }
@@ -1,62 +0,0 @@
1
- import { ILogger } from 'js-logger';
2
- import { BaseListener, BaseObserver } from './BaseObserver.js';
3
- export type DataStreamOptions<ParsedData, SourceData> = {
4
- mapLine?: (line: SourceData) => ParsedData;
5
- /**
6
- * Close the stream if any consumer throws an error
7
- */
8
- closeOnError?: boolean;
9
- pressure?: {
10
- highWaterMark?: number;
11
- lowWaterMark?: number;
12
- };
13
- logger?: ILogger;
14
- };
15
- export type DataStreamCallback<Data extends any = any> = (data: Data) => Promise<void>;
16
- export interface DataStreamListener<Data extends any = any> extends BaseListener {
17
- data: (data: Data) => Promise<void>;
18
- closed: () => void;
19
- error: (error: Error) => void;
20
- highWater: () => Promise<void>;
21
- lowWater: () => Promise<void>;
22
- }
23
- export declare const DEFAULT_PRESSURE_LIMITS: {
24
- highWater: number;
25
- lowWater: number;
26
- };
27
- /**
28
- * A very basic implementation of a data stream with backpressure support which does not use
29
- * native JS streams or async iterators.
30
- * This is handy for environments such as React Native which need polyfills for the above.
31
- */
32
- export declare class DataStream<ParsedData, SourceData = any> extends BaseObserver<DataStreamListener<ParsedData>> {
33
- protected options?: DataStreamOptions<ParsedData, SourceData> | undefined;
34
- dataQueue: SourceData[];
35
- protected isClosed: boolean;
36
- protected processingPromise: Promise<void> | null;
37
- protected notifyDataAdded: (() => void) | null;
38
- protected logger: ILogger;
39
- protected mapLine: (line: SourceData) => ParsedData;
40
- constructor(options?: DataStreamOptions<ParsedData, SourceData> | undefined);
41
- get highWatermark(): number;
42
- get lowWatermark(): number;
43
- get closed(): boolean;
44
- close(): Promise<void>;
45
- /**
46
- * Enqueues data for the consumers to read
47
- */
48
- enqueueData(data: SourceData): void;
49
- /**
50
- * Reads data once from the data stream
51
- * @returns a Data payload or Null if the stream closed.
52
- */
53
- read(): Promise<ParsedData | null>;
54
- /**
55
- * Executes a callback for each data item in the stream
56
- */
57
- forEach(callback: DataStreamCallback<ParsedData>): () => void;
58
- protected processQueue(): Promise<void> | undefined;
59
- protected hasDataReader(): boolean;
60
- protected _processQueue(): Promise<void>;
61
- protected iterateAsyncErrored(cb: (l: Partial<DataStreamListener<ParsedData>>) => Promise<void>): Promise<void>;
62
- }