@bquery/bquery 1.9.0 → 1.10.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +181 -25
- package/dist/{a11y-_9X-kt-_.js → a11y-DG2i4iZN.js} +3 -3
- package/dist/{a11y-_9X-kt-_.js.map → a11y-DG2i4iZN.js.map} +1 -1
- package/dist/a11y.es.mjs +1 -1
- package/dist/{component-L3-JfOFz.js → component-DRotf1hl.js} +19 -18
- package/dist/{component-L3-JfOFz.js.map → component-DRotf1hl.js.map} +1 -1
- package/dist/component.es.mjs +1 -1
- package/dist/concurrency/errors.d.ts +29 -0
- package/dist/concurrency/errors.d.ts.map +1 -0
- package/dist/concurrency/high-level.d.ts +85 -0
- package/dist/concurrency/high-level.d.ts.map +1 -0
- package/dist/concurrency/index.d.ts +19 -0
- package/dist/concurrency/index.d.ts.map +1 -0
- package/dist/concurrency/internal.d.ts +26 -0
- package/dist/concurrency/internal.d.ts.map +1 -0
- package/dist/concurrency/pipeline.d.ts +30 -0
- package/dist/concurrency/pipeline.d.ts.map +1 -0
- package/dist/concurrency/pool.d.ts +48 -0
- package/dist/concurrency/pool.d.ts.map +1 -0
- package/dist/concurrency/reactive.d.ts +107 -0
- package/dist/concurrency/reactive.d.ts.map +1 -0
- package/dist/concurrency/rpc.d.ts +46 -0
- package/dist/concurrency/rpc.d.ts.map +1 -0
- package/dist/concurrency/support.d.ts +23 -0
- package/dist/concurrency/support.d.ts.map +1 -0
- package/dist/concurrency/task.d.ts +31 -0
- package/dist/concurrency/task.d.ts.map +1 -0
- package/dist/concurrency/types.d.ts +343 -0
- package/dist/concurrency/types.d.ts.map +1 -0
- package/dist/concurrency-BU1wPEsZ.js +826 -0
- package/dist/concurrency-BU1wPEsZ.js.map +1 -0
- package/dist/concurrency.es.mjs +29 -0
- package/dist/{constraints-D5RHQLmP.js → constraints-CqjhmpZC.js} +1 -1
- package/dist/{constraints-D5RHQLmP.js.map → constraints-CqjhmpZC.js.map} +1 -1
- package/dist/core-CongXJuo.js +87 -0
- package/dist/core-CongXJuo.js.map +1 -0
- package/dist/{custom-directives-Dr4C5lVV.js → custom-directives-BjFzFhuf.js} +1 -1
- package/dist/{custom-directives-Dr4C5lVV.js.map → custom-directives-BjFzFhuf.js.map} +1 -1
- package/dist/{devtools-BhB2iDPT.js → devtools-C5FExMwv.js} +2 -2
- package/dist/{devtools-BhB2iDPT.js.map → devtools-C5FExMwv.js.map} +1 -1
- package/dist/devtools.es.mjs +1 -1
- package/dist/{dnd-NwZBYh4l.js → dnd-BAqzPlSo.js} +1 -1
- package/dist/{dnd-NwZBYh4l.js.map → dnd-BAqzPlSo.js.map} +1 -1
- package/dist/dnd.es.mjs +1 -1
- package/dist/effect-Cc51IH91.js +87 -0
- package/dist/effect-Cc51IH91.js.map +1 -0
- package/dist/{env-CTdvLaH2.js → env-PvwYHnJq.js} +1 -1
- package/dist/{env-CTdvLaH2.js.map → env-PvwYHnJq.js.map} +1 -1
- package/dist/{forms-UhAeJEoO.js → forms-Dx1Scvh0.js} +41 -40
- package/dist/{forms-UhAeJEoO.js.map → forms-Dx1Scvh0.js.map} +1 -1
- package/dist/forms.es.mjs +1 -1
- package/dist/full.d.ts +2 -0
- package/dist/full.d.ts.map +1 -1
- package/dist/full.es.mjs +243 -214
- package/dist/full.iife.js +117 -33
- package/dist/full.iife.js.map +1 -1
- package/dist/full.umd.js +117 -33
- package/dist/full.umd.js.map +1 -1
- package/dist/{i18n-kuF6Ekj6.js → i18n-Cazyk9RD.js} +3 -3
- package/dist/{i18n-kuF6Ekj6.js.map → i18n-Cazyk9RD.js.map} +1 -1
- package/dist/i18n.es.mjs +1 -1
- package/dist/index.d.ts +1 -0
- package/dist/index.d.ts.map +1 -1
- package/dist/index.es.mjs +270 -241
- package/dist/media/observers.d.ts.map +1 -1
- package/dist/{media-D4zLj9t-.js → media-dAKIGPk3.js} +3 -3
- package/dist/{media-D4zLj9t-.js.map → media-dAKIGPk3.js.map} +1 -1
- package/dist/media.es.mjs +1 -1
- package/dist/{motion-BJsAuULb.js → motion-BBMso9Ir.js} +1 -1
- package/dist/{motion-BJsAuULb.js.map → motion-BBMso9Ir.js.map} +1 -1
- package/dist/motion.es.mjs +1 -1
- package/dist/{mount-B-JvH6Y0.js → mount-C8O2vXkQ.js} +10 -9
- package/dist/{mount-B-JvH6Y0.js.map → mount-C8O2vXkQ.js.map} +1 -1
- package/dist/{platform-Dw2gE3zI.js → platform-BPHIXbw8.js} +17 -16
- package/dist/{platform-Dw2gE3zI.js.map → platform-BPHIXbw8.js.map} +1 -1
- package/dist/platform.es.mjs +1 -1
- package/dist/{plugin-C2WuC8SF.js → plugin-DjTqWg-P.js} +2 -2
- package/dist/{plugin-C2WuC8SF.js.map → plugin-DjTqWg-P.js.map} +1 -1
- package/dist/plugin.es.mjs +1 -1
- package/dist/reactive/watch.d.ts.map +1 -1
- package/dist/reactive/websocket.d.ts +6 -3
- package/dist/reactive/websocket.d.ts.map +1 -1
- package/dist/{reactive-BjpLkclt.js → reactive-BAd2hfl8.js} +436 -449
- package/dist/reactive-BAd2hfl8.js.map +1 -0
- package/dist/reactive.es.mjs +42 -40
- package/dist/readonly-C0ZwS1Tf.js +35 -0
- package/dist/readonly-C0ZwS1Tf.js.map +1 -0
- package/dist/{registry-B08iilIh.js → registry-Cr6VH8CR.js} +1 -1
- package/dist/{registry-B08iilIh.js.map → registry-Cr6VH8CR.js.map} +1 -1
- package/dist/{router-BieVwgci.js → router-CCepRMpC.js} +29 -28
- package/dist/{router-BieVwgci.js.map → router-CCepRMpC.js.map} +1 -1
- package/dist/router.es.mjs +1 -1
- package/dist/{ssr-CrGSJySz.js → ssr-D-1IPcfw.js} +4 -4
- package/dist/{ssr-CrGSJySz.js.map → ssr-D-1IPcfw.js.map} +1 -1
- package/dist/ssr.es.mjs +1 -1
- package/dist/{store-CY6sjTW3.js → store-CjmEeX9-.js} +6 -6
- package/dist/{store-CY6sjTW3.js.map → store-CjmEeX9-.js.map} +1 -1
- package/dist/store.es.mjs +2 -2
- package/dist/{testing-UjAtu9aQ.js → testing-TdfaL7VE.js} +7 -7
- package/dist/{testing-UjAtu9aQ.js.map → testing-TdfaL7VE.js.map} +1 -1
- package/dist/testing.es.mjs +1 -1
- package/dist/{untrack-D0fnO5k2.js → untrack-bjWDNdyE.js} +11 -10
- package/dist/{untrack-D0fnO5k2.js.map → untrack-bjWDNdyE.js.map} +1 -1
- package/dist/view.es.mjs +12 -11
- package/package.json +17 -13
- package/src/concurrency/errors.ts +57 -0
- package/src/concurrency/high-level.ts +387 -0
- package/src/concurrency/index.ts +63 -0
- package/src/concurrency/internal.ts +100 -0
- package/src/concurrency/pipeline.ts +133 -0
- package/src/concurrency/pool.ts +450 -0
- package/src/concurrency/reactive.ts +339 -0
- package/src/concurrency/rpc.ts +380 -0
- package/src/concurrency/support.ts +44 -0
- package/src/concurrency/task.ts +318 -0
- package/src/concurrency/types.ts +431 -0
- package/src/full.ts +65 -0
- package/src/index.ts +3 -0
- package/src/media/observers.ts +5 -8
- package/src/reactive/watch.ts +10 -9
- package/src/reactive/websocket.ts +31 -8
- package/dist/core-DdtZHzsS.js +0 -168
- package/dist/core-DdtZHzsS.js.map +0 -1
- package/dist/reactive-BjpLkclt.js.map +0 -1
|
@@ -0,0 +1,387 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Thin high-level helpers layered on top of the explicit worker primitives.
|
|
3
|
+
*
|
|
4
|
+
* @module bquery/concurrency
|
|
5
|
+
*/
|
|
6
|
+
|
|
7
|
+
import { createTaskPool } from './pool';
|
|
8
|
+
import { runTask } from './task';
|
|
9
|
+
import { validateTaskHandler } from './internal';
|
|
10
|
+
import type {
|
|
11
|
+
ParallelCollectionOptions,
|
|
12
|
+
ParallelMapHandler,
|
|
13
|
+
ParallelMapOptions,
|
|
14
|
+
ParallelOptions,
|
|
15
|
+
ParallelPredicateHandler,
|
|
16
|
+
ParallelReduceHandler,
|
|
17
|
+
ParallelResults,
|
|
18
|
+
ParallelTask,
|
|
19
|
+
TaskPool,
|
|
20
|
+
TaskRunOptions,
|
|
21
|
+
WorkerTaskHandler,
|
|
22
|
+
} from './types';
|
|
23
|
+
|
|
24
|
+
interface SerializedParallelTask {
|
|
25
|
+
handlerSource: string;
|
|
26
|
+
input: unknown;
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
interface SerializedChunk<TInput = unknown> {
|
|
30
|
+
items: Array<{
|
|
31
|
+
index: number;
|
|
32
|
+
value: TInput;
|
|
33
|
+
}>;
|
|
34
|
+
handlerSource: string;
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
interface IndexedMapResult<TResult> {
|
|
38
|
+
index: number;
|
|
39
|
+
value: TResult;
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
const executeSerializedTask = async (job: SerializedParallelTask): Promise<unknown> => {
|
|
43
|
+
const revive = new Function(`return (${job.handlerSource});`);
|
|
44
|
+
const handler = revive() as ((input: unknown) => unknown | Promise<unknown>) | undefined;
|
|
45
|
+
|
|
46
|
+
if (typeof handler !== 'function') {
|
|
47
|
+
throw new TypeError('The serialized task handler did not revive as a function.');
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
return await handler(job.input);
|
|
51
|
+
};
|
|
52
|
+
|
|
53
|
+
interface SerializedReduceJob<TInput = unknown, TAccumulator = unknown> {
|
|
54
|
+
initialValue: TAccumulator;
|
|
55
|
+
reducerSource: string;
|
|
56
|
+
values: readonly TInput[];
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
const executeSerializedChunk = async (
|
|
60
|
+
job: SerializedChunk
|
|
61
|
+
): Promise<Array<IndexedMapResult<unknown>>> => {
|
|
62
|
+
const revive = new Function(`return (${job.handlerSource});`);
|
|
63
|
+
const handler = revive() as
|
|
64
|
+
| ((value: unknown, index: number) => unknown | Promise<unknown>)
|
|
65
|
+
| undefined;
|
|
66
|
+
|
|
67
|
+
if (typeof handler !== 'function') {
|
|
68
|
+
throw new TypeError('The serialized collection handler did not revive as a function.');
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
const results: Array<IndexedMapResult<unknown>> = [];
|
|
72
|
+
for (const item of job.items) {
|
|
73
|
+
results.push({
|
|
74
|
+
index: item.index,
|
|
75
|
+
value: await handler(item.value, item.index),
|
|
76
|
+
});
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
return results;
|
|
80
|
+
};
|
|
81
|
+
|
|
82
|
+
const executeSerializedReduce = async (job: SerializedReduceJob): Promise<unknown> => {
|
|
83
|
+
const revive = new Function(`return (${job.reducerSource});`);
|
|
84
|
+
const reducer = revive() as
|
|
85
|
+
| ((accumulator: unknown, value: unknown, index: number) => unknown | Promise<unknown>)
|
|
86
|
+
| undefined;
|
|
87
|
+
|
|
88
|
+
if (typeof reducer !== 'function') {
|
|
89
|
+
throw new TypeError('The serialized reducer did not revive as a function.');
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
let accumulator = job.initialValue;
|
|
93
|
+
for (let index = 0; index < job.values.length; index++) {
|
|
94
|
+
accumulator = await reducer(accumulator, job.values[index], index);
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
return accumulator;
|
|
98
|
+
};
|
|
99
|
+
|
|
100
|
+
const normalizeBatchSize = (batchSize: number | undefined, label: string): number => {
|
|
101
|
+
if (batchSize === undefined) {
|
|
102
|
+
return 1;
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
if (!Number.isInteger(batchSize) || batchSize < 1) {
|
|
106
|
+
throw new RangeError(`${label} batchSize must be a positive integer.`);
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
return batchSize;
|
|
110
|
+
};
|
|
111
|
+
|
|
112
|
+
const createSerializedTaskPool = (
|
|
113
|
+
options: ParallelOptions
|
|
114
|
+
): TaskPool<SerializedParallelTask, unknown> => {
|
|
115
|
+
return createTaskPool(executeSerializedTask, options);
|
|
116
|
+
};
|
|
117
|
+
|
|
118
|
+
const serializeTask = <TInput, TResult>(
|
|
119
|
+
task: ParallelTask<TInput, TResult>
|
|
120
|
+
): SerializedParallelTask => ({
|
|
121
|
+
handlerSource: validateTaskHandler(task.handler),
|
|
122
|
+
input: task.input,
|
|
123
|
+
});
|
|
124
|
+
|
|
125
|
+
const runChunkedHandler = async <TInput, TResult>(
|
|
126
|
+
values: readonly TInput[],
|
|
127
|
+
handler: (value: TInput, index: number) => TResult | Promise<TResult>,
|
|
128
|
+
options: ParallelCollectionOptions = {},
|
|
129
|
+
label: string
|
|
130
|
+
): Promise<TResult[]> => {
|
|
131
|
+
if (values.length === 0) {
|
|
132
|
+
return [];
|
|
133
|
+
}
|
|
134
|
+
|
|
135
|
+
const handlerSource = validateTaskHandler(
|
|
136
|
+
handler as unknown as WorkerTaskHandler<TInput, TResult>
|
|
137
|
+
);
|
|
138
|
+
const { batchSize, signal, ...poolOptions } = options;
|
|
139
|
+
const normalizedBatchSize = normalizeBatchSize(batchSize, label);
|
|
140
|
+
const pool = createTaskPool(executeSerializedChunk, poolOptions);
|
|
141
|
+
const chunks: Array<SerializedChunk<TInput>> = [];
|
|
142
|
+
|
|
143
|
+
for (let index = 0; index < values.length; index += normalizedBatchSize) {
|
|
144
|
+
const end = Math.min(index + normalizedBatchSize, values.length);
|
|
145
|
+
const items: SerializedChunk<TInput>['items'] = [];
|
|
146
|
+
|
|
147
|
+
for (let itemIndex = index; itemIndex < end; itemIndex += 1) {
|
|
148
|
+
items.push({
|
|
149
|
+
index: itemIndex,
|
|
150
|
+
value: values[itemIndex],
|
|
151
|
+
});
|
|
152
|
+
}
|
|
153
|
+
|
|
154
|
+
chunks.push({ items, handlerSource });
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
try {
|
|
158
|
+
const chunkResults = await Promise.all(
|
|
159
|
+
chunks.map((chunk) => pool.run(chunk, signal ? { signal } : undefined))
|
|
160
|
+
);
|
|
161
|
+
const mapped = new Array<TResult>(values.length);
|
|
162
|
+
|
|
163
|
+
for (const chunk of chunkResults) {
|
|
164
|
+
for (const item of chunk) {
|
|
165
|
+
mapped[item.index] = item.value as TResult;
|
|
166
|
+
}
|
|
167
|
+
}
|
|
168
|
+
|
|
169
|
+
return mapped;
|
|
170
|
+
} finally {
|
|
171
|
+
pool.terminate();
|
|
172
|
+
}
|
|
173
|
+
};
|
|
174
|
+
|
|
175
|
+
/**
|
|
176
|
+
* Executes multiple standalone tasks in parallel using a bounded worker pool.
|
|
177
|
+
*
|
|
178
|
+
* @example
|
|
179
|
+
* ```ts
|
|
180
|
+
* import { parallel } from '@bquery/bquery/concurrency';
|
|
181
|
+
*
|
|
182
|
+
* const results = await parallel([
|
|
183
|
+
* { handler: (value: number) => value * 2, input: 5 },
|
|
184
|
+
* { handler: ({ a, b }: { a: number; b: number }) => a + b, input: { a: 1, b: 2 } },
|
|
185
|
+
* ]);
|
|
186
|
+
* ```
|
|
187
|
+
*/
|
|
188
|
+
export async function parallel<TTasks extends readonly ParallelTask[]>(
|
|
189
|
+
tasks: TTasks,
|
|
190
|
+
options: ParallelOptions = {}
|
|
191
|
+
): Promise<ParallelResults<TTasks>> {
|
|
192
|
+
if (tasks.length === 0) {
|
|
193
|
+
return [] as unknown as ParallelResults<TTasks>;
|
|
194
|
+
}
|
|
195
|
+
|
|
196
|
+
const pool = createSerializedTaskPool(options);
|
|
197
|
+
|
|
198
|
+
try {
|
|
199
|
+
const results = await Promise.all(
|
|
200
|
+
tasks.map((task) => pool.run(serializeTask(task), task.options))
|
|
201
|
+
);
|
|
202
|
+
return results as ParallelResults<TTasks>;
|
|
203
|
+
} finally {
|
|
204
|
+
pool.terminate();
|
|
205
|
+
}
|
|
206
|
+
}
|
|
207
|
+
|
|
208
|
+
/**
|
|
209
|
+
* Executes tasks in sequential batches while each batch still uses parallel workers.
|
|
210
|
+
*
|
|
211
|
+
* This adapts `threadts-universal`'s batch helper to bQuery without colliding
|
|
212
|
+
* with the reactive module's existing `batch()` export.
|
|
213
|
+
*
|
|
214
|
+
* @example
|
|
215
|
+
* ```ts
|
|
216
|
+
* import { batchTasks } from '@bquery/bquery/concurrency';
|
|
217
|
+
*
|
|
218
|
+
* const results = await batchTasks(
|
|
219
|
+
* [
|
|
220
|
+
* { handler: (value: number) => value * 2, input: 1 },
|
|
221
|
+
* { handler: (value: number) => value * 2, input: 2 },
|
|
222
|
+
* { handler: (value: number) => value * 2, input: 3 },
|
|
223
|
+
* ],
|
|
224
|
+
* 2
|
|
225
|
+
* );
|
|
226
|
+
* ```
|
|
227
|
+
*/
|
|
228
|
+
export async function batchTasks<TTasks extends readonly ParallelTask[]>(
|
|
229
|
+
tasks: TTasks,
|
|
230
|
+
batchSize?: number,
|
|
231
|
+
options: ParallelOptions = {}
|
|
232
|
+
): Promise<ParallelResults<TTasks>> {
|
|
233
|
+
if (tasks.length === 0) {
|
|
234
|
+
return [] as unknown as ParallelResults<TTasks>;
|
|
235
|
+
}
|
|
236
|
+
|
|
237
|
+
const normalizedBatchSize = normalizeBatchSize(batchSize, 'batchTasks');
|
|
238
|
+
const pool = createSerializedTaskPool(options);
|
|
239
|
+
const results: unknown[] = [];
|
|
240
|
+
|
|
241
|
+
try {
|
|
242
|
+
for (let index = 0; index < tasks.length; index += normalizedBatchSize) {
|
|
243
|
+
const batch = tasks.slice(index, index + normalizedBatchSize);
|
|
244
|
+
const batchResults = await Promise.all(
|
|
245
|
+
batch.map((task) => pool.run(serializeTask(task), task.options))
|
|
246
|
+
);
|
|
247
|
+
results.push(...batchResults);
|
|
248
|
+
}
|
|
249
|
+
|
|
250
|
+
return results as ParallelResults<TTasks>;
|
|
251
|
+
} finally {
|
|
252
|
+
pool.terminate();
|
|
253
|
+
}
|
|
254
|
+
}
|
|
255
|
+
|
|
256
|
+
/**
|
|
257
|
+
* Maps an array in parallel using optional chunking on top of `createTaskPool()`.
|
|
258
|
+
*
|
|
259
|
+
* @example
|
|
260
|
+
* ```ts
|
|
261
|
+
* import { map } from '@bquery/bquery/concurrency';
|
|
262
|
+
*
|
|
263
|
+
* const results = await map([1, 2, 3], (value, index) => value + index, {
|
|
264
|
+
* batchSize: 2,
|
|
265
|
+
* concurrency: 2,
|
|
266
|
+
* });
|
|
267
|
+
* ```
|
|
268
|
+
*/
|
|
269
|
+
export async function map<TInput, TResult>(
|
|
270
|
+
values: readonly TInput[],
|
|
271
|
+
mapper: ParallelMapHandler<TInput, TResult>,
|
|
272
|
+
options: ParallelMapOptions = {}
|
|
273
|
+
): Promise<TResult[]> {
|
|
274
|
+
return runChunkedHandler(values, mapper, options, 'map');
|
|
275
|
+
}
|
|
276
|
+
|
|
277
|
+
/**
|
|
278
|
+
* Filters an array in parallel using a standalone predicate with optional chunking.
|
|
279
|
+
*/
|
|
280
|
+
export async function filter<TInput>(
|
|
281
|
+
values: readonly TInput[],
|
|
282
|
+
predicate: ParallelPredicateHandler<TInput>,
|
|
283
|
+
options: ParallelCollectionOptions = {}
|
|
284
|
+
): Promise<TInput[]> {
|
|
285
|
+
const matches = await runChunkedHandler(values, predicate, options, 'filter');
|
|
286
|
+
const filtered: TInput[] = [];
|
|
287
|
+
|
|
288
|
+
for (let index = 0; index < values.length; index += 1) {
|
|
289
|
+
if (!matches[index]) {
|
|
290
|
+
continue;
|
|
291
|
+
}
|
|
292
|
+
|
|
293
|
+
if (index in values) {
|
|
294
|
+
filtered.push(values[index] as TInput);
|
|
295
|
+
} else {
|
|
296
|
+
filtered.length += 1;
|
|
297
|
+
}
|
|
298
|
+
}
|
|
299
|
+
|
|
300
|
+
return filtered;
|
|
301
|
+
}
|
|
302
|
+
|
|
303
|
+
/**
|
|
304
|
+
* Returns whether at least one array item matches a standalone predicate.
|
|
305
|
+
*
|
|
306
|
+
* The current implementation evaluates predicate chunks explicitly and reduces
|
|
307
|
+
* the final boolean result on the main thread instead of using hidden globals
|
|
308
|
+
* or speculative worker cancellation.
|
|
309
|
+
*/
|
|
310
|
+
export async function some<TInput>(
|
|
311
|
+
values: readonly TInput[],
|
|
312
|
+
predicate: ParallelPredicateHandler<TInput>,
|
|
313
|
+
options: ParallelCollectionOptions = {}
|
|
314
|
+
): Promise<boolean> {
|
|
315
|
+
if (values.length === 0) {
|
|
316
|
+
return false;
|
|
317
|
+
}
|
|
318
|
+
|
|
319
|
+
const matches = await runChunkedHandler(values, predicate, options, 'some');
|
|
320
|
+
return matches.some(Boolean);
|
|
321
|
+
}
|
|
322
|
+
|
|
323
|
+
/**
|
|
324
|
+
* Returns whether every array item matches a standalone predicate.
|
|
325
|
+
*
|
|
326
|
+
* The current implementation evaluates predicate chunks explicitly and reduces
|
|
327
|
+
* the final boolean result on the main thread instead of using hidden globals
|
|
328
|
+
* or speculative worker cancellation.
|
|
329
|
+
*/
|
|
330
|
+
export async function every<TInput>(
|
|
331
|
+
values: readonly TInput[],
|
|
332
|
+
predicate: ParallelPredicateHandler<TInput>,
|
|
333
|
+
options: ParallelCollectionOptions = {}
|
|
334
|
+
): Promise<boolean> {
|
|
335
|
+
if (values.length === 0) {
|
|
336
|
+
return true;
|
|
337
|
+
}
|
|
338
|
+
|
|
339
|
+
const matches = await runChunkedHandler(values, predicate, options, 'every');
|
|
340
|
+
return matches.every(Boolean);
|
|
341
|
+
}
|
|
342
|
+
|
|
343
|
+
/**
|
|
344
|
+
* Finds the first array item that matches a standalone predicate.
|
|
345
|
+
*/
|
|
346
|
+
export async function find<TInput>(
|
|
347
|
+
values: readonly TInput[],
|
|
348
|
+
predicate: ParallelPredicateHandler<TInput>,
|
|
349
|
+
options: ParallelCollectionOptions = {}
|
|
350
|
+
): Promise<TInput | undefined> {
|
|
351
|
+
if (values.length === 0) {
|
|
352
|
+
return undefined;
|
|
353
|
+
}
|
|
354
|
+
|
|
355
|
+
const matches = await runChunkedHandler(values, predicate, options, 'find');
|
|
356
|
+
const index = matches.findIndex(Boolean);
|
|
357
|
+
return index === -1 ? undefined : values[index];
|
|
358
|
+
}
|
|
359
|
+
|
|
360
|
+
/**
|
|
361
|
+
* Reduces an array inside one isolated worker while preserving standard
|
|
362
|
+
* left-to-right accumulator semantics.
|
|
363
|
+
*/
|
|
364
|
+
export async function reduce<TInput, TAccumulator>(
|
|
365
|
+
values: readonly TInput[],
|
|
366
|
+
reducer: ParallelReduceHandler<TAccumulator, TInput>,
|
|
367
|
+
initialValue: TAccumulator,
|
|
368
|
+
options: TaskRunOptions = {}
|
|
369
|
+
): Promise<TAccumulator> {
|
|
370
|
+
if (values.length === 0) {
|
|
371
|
+
return initialValue;
|
|
372
|
+
}
|
|
373
|
+
|
|
374
|
+
const reducerSource = validateTaskHandler(
|
|
375
|
+
reducer as unknown as WorkerTaskHandler<unknown, unknown>
|
|
376
|
+
);
|
|
377
|
+
|
|
378
|
+
return runTask(
|
|
379
|
+
executeSerializedReduce,
|
|
380
|
+
{
|
|
381
|
+
initialValue,
|
|
382
|
+
reducerSource,
|
|
383
|
+
values,
|
|
384
|
+
},
|
|
385
|
+
options
|
|
386
|
+
) as Promise<TAccumulator>;
|
|
387
|
+
}
|
|
@@ -0,0 +1,63 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Optional concurrency helpers built on zero-build Web Workers.
|
|
3
|
+
*
|
|
4
|
+
* The concurrency surface intentionally stays browser-first and explicit:
|
|
5
|
+
* worker tasks, RPC helpers, bounded pools, and thin high-level helpers
|
|
6
|
+
* without decorators, hidden global runtimes, or build-time worker glue.
|
|
7
|
+
*
|
|
8
|
+
* @module bquery/concurrency
|
|
9
|
+
*/
|
|
10
|
+
|
|
11
|
+
export {
|
|
12
|
+
TaskWorkerAbortError,
|
|
13
|
+
TaskWorkerError,
|
|
14
|
+
TaskWorkerSerializationError,
|
|
15
|
+
TaskWorkerTimeoutError,
|
|
16
|
+
TaskWorkerUnsupportedError,
|
|
17
|
+
} from './errors';
|
|
18
|
+
export { batchTasks, every, filter, find, map, parallel, reduce, some } from './high-level';
|
|
19
|
+
export { pipeline } from './pipeline';
|
|
20
|
+
export { createRpcPool, createTaskPool } from './pool';
|
|
21
|
+
export {
|
|
22
|
+
createReactiveRpcPool,
|
|
23
|
+
createReactiveRpcWorker,
|
|
24
|
+
createReactiveTaskPool,
|
|
25
|
+
createReactiveTaskWorker,
|
|
26
|
+
} from './reactive';
|
|
27
|
+
export { callWorkerMethod, createRpcWorker } from './rpc';
|
|
28
|
+
export { getConcurrencySupport, isConcurrencySupported } from './support';
|
|
29
|
+
export { createTaskWorker, runTask } from './task';
|
|
30
|
+
|
|
31
|
+
export type {
|
|
32
|
+
CallWorkerMethodOptions,
|
|
33
|
+
ConcurrencyPipeline,
|
|
34
|
+
ConcurrencyPipelineOptions,
|
|
35
|
+
ConcurrencySupport,
|
|
36
|
+
CreateRpcPoolOptions,
|
|
37
|
+
CreateRpcWorkerOptions,
|
|
38
|
+
CreateTaskPoolOptions,
|
|
39
|
+
CreateTaskWorkerOptions,
|
|
40
|
+
ParallelCollectionOptions,
|
|
41
|
+
ParallelMapHandler,
|
|
42
|
+
ParallelMapOptions,
|
|
43
|
+
ParallelOptions,
|
|
44
|
+
ParallelPredicateHandler,
|
|
45
|
+
ParallelReduceHandler,
|
|
46
|
+
ParallelResults,
|
|
47
|
+
ParallelTask,
|
|
48
|
+
ReactiveRpcPool,
|
|
49
|
+
ReactiveRpcWorker,
|
|
50
|
+
ReactiveTaskPool,
|
|
51
|
+
ReactiveTaskWorker,
|
|
52
|
+
RpcPool,
|
|
53
|
+
RpcWorker,
|
|
54
|
+
RunTaskOptions,
|
|
55
|
+
TaskPool,
|
|
56
|
+
TaskRunOptions,
|
|
57
|
+
TaskWorker,
|
|
58
|
+
TaskWorkerErrorCode,
|
|
59
|
+
TaskWorkerState,
|
|
60
|
+
WorkerRpcHandler,
|
|
61
|
+
WorkerRpcHandlers,
|
|
62
|
+
WorkerTaskHandler,
|
|
63
|
+
} from './types';
|
|
@@ -0,0 +1,100 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Internal helpers shared across concurrency implementations.
|
|
3
|
+
*
|
|
4
|
+
* @internal
|
|
5
|
+
*/
|
|
6
|
+
|
|
7
|
+
import { TaskWorkerError, TaskWorkerSerializationError, TaskWorkerTimeoutError } from './errors';
|
|
8
|
+
import type { TaskWorkerErrorCode, WorkerTaskHandler } from './types';
|
|
9
|
+
|
|
10
|
+
/** @internal */
|
|
11
|
+
export interface SerializedWorkerError {
|
|
12
|
+
/** Untrusted serialized worker payload; validate against TaskWorkerErrorCode before use. */
|
|
13
|
+
code?: string;
|
|
14
|
+
message?: string;
|
|
15
|
+
name?: string;
|
|
16
|
+
stack?: string;
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
const TASK_WORKER_ERROR_CODES = new Set<TaskWorkerErrorCode>([
|
|
20
|
+
'ABORT',
|
|
21
|
+
'BUSY',
|
|
22
|
+
'METHOD_NOT_FOUND',
|
|
23
|
+
'QUEUE_CLEARED',
|
|
24
|
+
'QUEUE_FULL',
|
|
25
|
+
'SERIALIZATION',
|
|
26
|
+
'TERMINATED',
|
|
27
|
+
'TIMEOUT',
|
|
28
|
+
'UNSUPPORTED',
|
|
29
|
+
'WORKER',
|
|
30
|
+
]);
|
|
31
|
+
|
|
32
|
+
const NATIVE_FUNCTION_SOURCE_RE = /\{\s*\[native code\]\s*\}$/u;
|
|
33
|
+
|
|
34
|
+
/** @internal */
|
|
35
|
+
export const isTaskWorkerErrorCode = (code: string | undefined): code is TaskWorkerErrorCode => {
|
|
36
|
+
return typeof code === 'string' && TASK_WORKER_ERROR_CODES.has(code as TaskWorkerErrorCode);
|
|
37
|
+
};
|
|
38
|
+
|
|
39
|
+
/** @internal */
|
|
40
|
+
export const normalizeTimeout = (timeout?: number): number | undefined => {
|
|
41
|
+
if (typeof timeout !== 'number' || !Number.isFinite(timeout) || timeout <= 0) {
|
|
42
|
+
return undefined;
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
return timeout;
|
|
46
|
+
};
|
|
47
|
+
|
|
48
|
+
/** @internal */
|
|
49
|
+
export const validateTaskHandler = <TInput, TResult>(
|
|
50
|
+
handler: WorkerTaskHandler<TInput, TResult>
|
|
51
|
+
): string => {
|
|
52
|
+
const source = Function.prototype.toString.call(handler).trim();
|
|
53
|
+
|
|
54
|
+
if (!source || NATIVE_FUNCTION_SOURCE_RE.test(source)) {
|
|
55
|
+
throw new TaskWorkerSerializationError(
|
|
56
|
+
'Task handlers must be standalone user-defined functions or arrow functions.'
|
|
57
|
+
);
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
try {
|
|
61
|
+
const revived = new Function(`return (${source});`)() as unknown;
|
|
62
|
+
if (typeof revived !== 'function') {
|
|
63
|
+
throw new TypeError('Task handler did not revive as a function.');
|
|
64
|
+
}
|
|
65
|
+
} catch (error) {
|
|
66
|
+
throw new TaskWorkerSerializationError(
|
|
67
|
+
'Task handlers must be standalone functions that can be reconstructed in a worker context.',
|
|
68
|
+
error
|
|
69
|
+
);
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
return source;
|
|
73
|
+
};
|
|
74
|
+
|
|
75
|
+
/** @internal */
|
|
76
|
+
export const createWorkerInstance = (scriptSource: string, name?: string): Worker => {
|
|
77
|
+
const blob = new Blob([scriptSource], { type: 'text/javascript' });
|
|
78
|
+
const scriptUrl = URL.createObjectURL(blob);
|
|
79
|
+
|
|
80
|
+
try {
|
|
81
|
+
return new Worker(scriptUrl, name ? { name } : undefined);
|
|
82
|
+
} finally {
|
|
83
|
+
URL.revokeObjectURL(scriptUrl);
|
|
84
|
+
}
|
|
85
|
+
};
|
|
86
|
+
|
|
87
|
+
/** @internal */
|
|
88
|
+
export const restoreWorkerError = (payload: SerializedWorkerError | undefined): TaskWorkerError => {
|
|
89
|
+
const message = payload?.message || 'Worker task failed.';
|
|
90
|
+
const code = isTaskWorkerErrorCode(payload?.code) ? payload.code : 'WORKER';
|
|
91
|
+
const error =
|
|
92
|
+
code === 'TIMEOUT' ? new TaskWorkerTimeoutError(message) : new TaskWorkerError(message, code);
|
|
93
|
+
|
|
94
|
+
error.name = payload?.name || error.name;
|
|
95
|
+
if (payload?.stack) {
|
|
96
|
+
error.stack = payload.stack;
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
return error;
|
|
100
|
+
};
|
|
@@ -0,0 +1,133 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Optional fluent pipeline helpers layered on top of the explicit collection helpers.
|
|
3
|
+
*
|
|
4
|
+
* @module bquery/concurrency
|
|
5
|
+
*/
|
|
6
|
+
|
|
7
|
+
import { every, filter, find, map, reduce, some } from './high-level';
|
|
8
|
+
import type {
|
|
9
|
+
ConcurrencyPipeline,
|
|
10
|
+
ConcurrencyPipelineOptions,
|
|
11
|
+
ParallelCollectionOptions,
|
|
12
|
+
ParallelMapHandler,
|
|
13
|
+
ParallelPredicateHandler,
|
|
14
|
+
ParallelReduceHandler,
|
|
15
|
+
TaskRunOptions,
|
|
16
|
+
} from './types';
|
|
17
|
+
|
|
18
|
+
const mergeCollectionOptions = (
|
|
19
|
+
defaults: ConcurrencyPipelineOptions,
|
|
20
|
+
overrides: ParallelCollectionOptions = {}
|
|
21
|
+
): ParallelCollectionOptions => ({
|
|
22
|
+
...defaults,
|
|
23
|
+
...overrides,
|
|
24
|
+
});
|
|
25
|
+
|
|
26
|
+
const mergeTaskRunOptions = (
|
|
27
|
+
defaults: ConcurrencyPipelineOptions,
|
|
28
|
+
overrides: TaskRunOptions = {}
|
|
29
|
+
): TaskRunOptions => ({
|
|
30
|
+
signal: 'signal' in overrides ? overrides.signal : defaults.signal,
|
|
31
|
+
timeout: 'timeout' in overrides ? overrides.timeout : defaults.timeout,
|
|
32
|
+
transfer: overrides.transfer,
|
|
33
|
+
});
|
|
34
|
+
|
|
35
|
+
class FluentConcurrencyPipeline<TValue> implements ConcurrencyPipeline<TValue> {
|
|
36
|
+
constructor(
|
|
37
|
+
private readonly valuesPromise: Promise<readonly TValue[]>,
|
|
38
|
+
private readonly defaults: ConcurrencyPipelineOptions
|
|
39
|
+
) {}
|
|
40
|
+
|
|
41
|
+
private createNext<TNext>(
|
|
42
|
+
transform: (values: readonly TValue[]) => Promise<readonly TNext[]>
|
|
43
|
+
): ConcurrencyPipeline<TNext> {
|
|
44
|
+
return new FluentConcurrencyPipeline(
|
|
45
|
+
this.valuesPromise.then((values) => transform(values)),
|
|
46
|
+
this.defaults
|
|
47
|
+
);
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
map<TResult>(
|
|
51
|
+
mapper: ParallelMapHandler<TValue, TResult>,
|
|
52
|
+
options?: ParallelCollectionOptions
|
|
53
|
+
): ConcurrencyPipeline<TResult> {
|
|
54
|
+
const resolvedOptions = mergeCollectionOptions(this.defaults, options);
|
|
55
|
+
return this.createNext((values) => map(values, mapper, resolvedOptions));
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
filter(
|
|
59
|
+
predicate: ParallelPredicateHandler<TValue>,
|
|
60
|
+
options?: ParallelCollectionOptions
|
|
61
|
+
): ConcurrencyPipeline<TValue> {
|
|
62
|
+
const resolvedOptions = mergeCollectionOptions(this.defaults, options);
|
|
63
|
+
return this.createNext((values) => filter(values, predicate, resolvedOptions));
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
toArray(): Promise<TValue[]> {
|
|
67
|
+
return this.valuesPromise.then((values) => values.slice());
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
some(
|
|
71
|
+
predicate: ParallelPredicateHandler<TValue>,
|
|
72
|
+
options?: ParallelCollectionOptions
|
|
73
|
+
): Promise<boolean> {
|
|
74
|
+
const resolvedOptions = mergeCollectionOptions(this.defaults, options);
|
|
75
|
+
return this.valuesPromise.then((values) => some(values, predicate, resolvedOptions));
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
every(
|
|
79
|
+
predicate: ParallelPredicateHandler<TValue>,
|
|
80
|
+
options?: ParallelCollectionOptions
|
|
81
|
+
): Promise<boolean> {
|
|
82
|
+
const resolvedOptions = mergeCollectionOptions(this.defaults, options);
|
|
83
|
+
return this.valuesPromise.then((values) => every(values, predicate, resolvedOptions));
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
find(
|
|
87
|
+
predicate: ParallelPredicateHandler<TValue>,
|
|
88
|
+
options?: ParallelCollectionOptions
|
|
89
|
+
): Promise<TValue | undefined> {
|
|
90
|
+
const resolvedOptions = mergeCollectionOptions(this.defaults, options);
|
|
91
|
+
return this.valuesPromise.then((values) => find(values, predicate, resolvedOptions));
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
reduce<TAccumulator>(
|
|
95
|
+
reducer: ParallelReduceHandler<TAccumulator, TValue>,
|
|
96
|
+
initialValue: TAccumulator,
|
|
97
|
+
options?: TaskRunOptions
|
|
98
|
+
): Promise<TAccumulator> {
|
|
99
|
+
const resolvedOptions = mergeTaskRunOptions(this.defaults, options);
|
|
100
|
+
return this.valuesPromise.then((values) =>
|
|
101
|
+
reduce(values, reducer, initialValue, resolvedOptions)
|
|
102
|
+
);
|
|
103
|
+
}
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
/**
|
|
107
|
+
* Creates an optional fluent pipeline over the existing concurrency collection helpers.
|
|
108
|
+
*
|
|
109
|
+
* The pipeline itself does not create hidden global workers or proxies. Each stage
|
|
110
|
+
* delegates to the already explicit `map()`, `filter()`, `some()`, `every()`,
|
|
111
|
+
* `find()`, and `reduce()` helpers when the pipeline is executed.
|
|
112
|
+
*
|
|
113
|
+
* @example
|
|
114
|
+
* ```ts
|
|
115
|
+
* import { pipeline } from '@bquery/bquery/concurrency';
|
|
116
|
+
*
|
|
117
|
+
* const results = await pipeline([1, 2, 3, 4], {
|
|
118
|
+
* batchSize: 2,
|
|
119
|
+
* concurrency: 2,
|
|
120
|
+
* })
|
|
121
|
+
* .map((value) => value * 2)
|
|
122
|
+
* .filter((value) => value > 4)
|
|
123
|
+
* .toArray();
|
|
124
|
+
*
|
|
125
|
+
* console.log(results); // [6, 8]
|
|
126
|
+
* ```
|
|
127
|
+
*/
|
|
128
|
+
export function pipeline<TValue>(
|
|
129
|
+
values: readonly TValue[],
|
|
130
|
+
options: ConcurrencyPipelineOptions = {}
|
|
131
|
+
): ConcurrencyPipeline<TValue> {
|
|
132
|
+
return new FluentConcurrencyPipeline(Promise.resolve(values.slice()), options);
|
|
133
|
+
}
|