batchkit 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +267 -0
- package/dist/batch.d.ts +3 -0
- package/dist/batch.d.ts.map +1 -0
- package/dist/errors.d.ts +4 -0
- package/dist/errors.d.ts.map +1 -0
- package/dist/index.d.ts +6 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +320 -0
- package/dist/indexed.d.ts +2 -0
- package/dist/indexed.d.ts.map +1 -0
- package/dist/match.d.ts +6 -0
- package/dist/match.d.ts.map +1 -0
- package/dist/schedulers.d.ts +8 -0
- package/dist/schedulers.d.ts.map +1 -0
- package/dist/trace.d.ts +6 -0
- package/dist/trace.d.ts.map +1 -0
- package/dist/types.d.ts +61 -0
- package/dist/types.d.ts.map +1 -0
- package/package.json +35 -0
- package/src/batch.ts +301 -0
- package/src/errors.ts +6 -0
- package/src/index.ts +16 -0
- package/src/indexed.ts +1 -0
- package/src/match.ts +37 -0
- package/src/schedulers.ts +42 -0
- package/src/trace.ts +25 -0
- package/src/types.ts +57 -0
package/README.md
ADDED
|
@@ -0,0 +1,267 @@
|
|
|
1
|
+
# batchkit
|
|
2
|
+
|
|
3
|
+
Automatic batching for async operations.
|
|
4
|
+
|
|
5
|
+
## Installation
|
|
6
|
+
|
|
7
|
+
```bash
|
|
8
|
+
npm install batchkit
|
|
9
|
+
# or
|
|
10
|
+
bun add batchkit
|
|
11
|
+
```
|
|
12
|
+
|
|
13
|
+
## Quick Start
|
|
14
|
+
|
|
15
|
+
```typescript
|
|
16
|
+
import { batch } from 'batchkit'
|
|
17
|
+
|
|
18
|
+
const users = batch(
|
|
19
|
+
(ids) => db.users.findMany({ where: { id: { in: ids } } }),
|
|
20
|
+
'id'
|
|
21
|
+
)
|
|
22
|
+
|
|
23
|
+
// These calls are automatically batched into ONE database query
|
|
24
|
+
const [alice, bob] = await Promise.all([
|
|
25
|
+
users.get(1),
|
|
26
|
+
users.get(2),
|
|
27
|
+
])
|
|
28
|
+
```
|
|
29
|
+
|
|
30
|
+
That's it. Two arguments:
|
|
31
|
+
1. A function that fetches many items at once
|
|
32
|
+
2. The field to match results by
|
|
33
|
+
|
|
34
|
+
## API
|
|
35
|
+
|
|
36
|
+
### `batch(fn, match, options?)`
|
|
37
|
+
|
|
38
|
+
Creates a batcher.
|
|
39
|
+
|
|
40
|
+
```typescript
|
|
41
|
+
const users = batch(
|
|
42
|
+
// The batch function - receives keys and an AbortSignal
|
|
43
|
+
async (ids: number[], signal: AbortSignal) => {
|
|
44
|
+
return api.getUsers(ids, { signal })
|
|
45
|
+
},
|
|
46
|
+
// How to match results - just the field name
|
|
47
|
+
'id',
|
|
48
|
+
// Optional configuration
|
|
49
|
+
{
|
|
50
|
+
wait: 10, // ms to wait before dispatch (default: 0 = microtask)
|
|
51
|
+
max: 100, // max batch size
|
|
52
|
+
name: 'users', // for debugging
|
|
53
|
+
}
|
|
54
|
+
)
|
|
55
|
+
```
|
|
56
|
+
|
|
57
|
+
### `batcher.get(key)` / `batcher.get(keys)`
|
|
58
|
+
|
|
59
|
+
Get one or many items:
|
|
60
|
+
|
|
61
|
+
```typescript
|
|
62
|
+
// Single item
|
|
63
|
+
const user = await users.get(1)
|
|
64
|
+
|
|
65
|
+
// Multiple items (batched together)
|
|
66
|
+
const [a, b] = await Promise.all([users.get(1), users.get(2)])
|
|
67
|
+
|
|
68
|
+
// Array syntax
|
|
69
|
+
const team = await users.get([1, 2, 3, 4, 5])
|
|
70
|
+
```
|
|
71
|
+
|
|
72
|
+
### `batcher.get(key, { signal })`
|
|
73
|
+
|
|
74
|
+
Cancel a request:
|
|
75
|
+
|
|
76
|
+
```typescript
|
|
77
|
+
const controller = new AbortController()
|
|
78
|
+
const user = await users.get(1, { signal: controller.signal })
|
|
79
|
+
|
|
80
|
+
// Later...
|
|
81
|
+
controller.abort() // Rejects with AbortError
|
|
82
|
+
```
|
|
83
|
+
|
|
84
|
+
### `batcher.flush()`
|
|
85
|
+
|
|
86
|
+
Execute pending batch immediately:
|
|
87
|
+
|
|
88
|
+
```typescript
|
|
89
|
+
users.get(1)
|
|
90
|
+
users.get(2)
|
|
91
|
+
await users.flush() // Don't wait for scheduler
|
|
92
|
+
```
|
|
93
|
+
|
|
94
|
+
### `batcher.abort()`
|
|
95
|
+
|
|
96
|
+
Abort the in-flight batch:
|
|
97
|
+
|
|
98
|
+
```typescript
|
|
99
|
+
users.abort() // All pending requests reject with AbortError
|
|
100
|
+
```
|
|
101
|
+
|
|
102
|
+
## Matching Results
|
|
103
|
+
|
|
104
|
+
### By Field Name (most common)
|
|
105
|
+
|
|
106
|
+
```typescript
|
|
107
|
+
batch(fn, 'id')
|
|
108
|
+
// Matches results where result.id === requestedKey
|
|
109
|
+
```
|
|
110
|
+
|
|
111
|
+
### For Record/Object Responses
|
|
112
|
+
|
|
113
|
+
```typescript
|
|
114
|
+
import { batch, indexed } from 'batchkit'
|
|
115
|
+
|
|
116
|
+
const users = batch(
|
|
117
|
+
async (ids) => {
|
|
118
|
+
// Returns { "1": {...}, "2": {...} }
|
|
119
|
+
return fetchUsersAsRecord(ids)
|
|
120
|
+
},
|
|
121
|
+
indexed
|
|
122
|
+
)
|
|
123
|
+
```
|
|
124
|
+
|
|
125
|
+
### Custom Matching
|
|
126
|
+
|
|
127
|
+
```typescript
|
|
128
|
+
batch(
|
|
129
|
+
fn,
|
|
130
|
+
(results, key) => results.find(r => r.externalId === key)
|
|
131
|
+
)
|
|
132
|
+
```
|
|
133
|
+
|
|
134
|
+
## Scheduling
|
|
135
|
+
|
|
136
|
+
### Default: Microtask
|
|
137
|
+
|
|
138
|
+
Batches all calls within the same event loop tick:
|
|
139
|
+
|
|
140
|
+
```typescript
|
|
141
|
+
const users = batch(fn, 'id')
|
|
142
|
+
|
|
143
|
+
// All batched into ONE request
|
|
144
|
+
users.get(1)
|
|
145
|
+
users.get(2)
|
|
146
|
+
users.get(3)
|
|
147
|
+
```
|
|
148
|
+
|
|
149
|
+
### Delayed
|
|
150
|
+
|
|
151
|
+
Wait before dispatching:
|
|
152
|
+
|
|
153
|
+
```typescript
|
|
154
|
+
batch(fn, 'id', { wait: 10 }) // 10ms window
|
|
155
|
+
```
|
|
156
|
+
|
|
157
|
+
### Animation Frame
|
|
158
|
+
|
|
159
|
+
Sync with rendering:
|
|
160
|
+
|
|
161
|
+
```typescript
|
|
162
|
+
import { batch, onAnimationFrame } from 'batchkit'
|
|
163
|
+
|
|
164
|
+
batch(fn, 'id', { schedule: onAnimationFrame })
|
|
165
|
+
```
|
|
166
|
+
|
|
167
|
+
### Idle
|
|
168
|
+
|
|
169
|
+
Background/low-priority work:
|
|
170
|
+
|
|
171
|
+
```typescript
|
|
172
|
+
import { batch, onIdle } from 'batchkit'
|
|
173
|
+
|
|
174
|
+
batch(fn, 'id', { schedule: onIdle({ timeout: 100 }) })
|
|
175
|
+
```
|
|
176
|
+
|
|
177
|
+
## Deduplication
|
|
178
|
+
|
|
179
|
+
Duplicate keys in the same batch are automatically deduplicated:
|
|
180
|
+
|
|
181
|
+
```typescript
|
|
182
|
+
// Only ONE request for id=1
|
|
183
|
+
await Promise.all([
|
|
184
|
+
users.get(1),
|
|
185
|
+
users.get(1),
|
|
186
|
+
users.get(1),
|
|
187
|
+
])
|
|
188
|
+
```
|
|
189
|
+
|
|
190
|
+
For complex keys, provide a key function:
|
|
191
|
+
|
|
192
|
+
```typescript
|
|
193
|
+
batch(fn, match, {
|
|
194
|
+
key: (query) => query.id // Dedupe by query.id
|
|
195
|
+
})
|
|
196
|
+
```
|
|
197
|
+
|
|
198
|
+
## Tracing
|
|
199
|
+
|
|
200
|
+
Debug batch behavior:
|
|
201
|
+
|
|
202
|
+
```typescript
|
|
203
|
+
batch(fn, 'id', {
|
|
204
|
+
name: 'users',
|
|
205
|
+
trace: (event) => {
|
|
206
|
+
console.log(event.type, event)
|
|
207
|
+
// 'get', 'schedule', 'dispatch', 'resolve', 'error', 'abort'
|
|
208
|
+
}
|
|
209
|
+
})
|
|
210
|
+
```
|
|
211
|
+
|
|
212
|
+
## Examples
|
|
213
|
+
|
|
214
|
+
### React + TanStack Query
|
|
215
|
+
|
|
216
|
+
```typescript
|
|
217
|
+
import { batch } from 'batchkit'
|
|
218
|
+
import { useQuery } from '@tanstack/react-query'
|
|
219
|
+
|
|
220
|
+
const users = batch(
|
|
221
|
+
(ids, signal) => fetch(`/api/users?ids=${ids.join(',')}`, { signal }).then(r => r.json()),
|
|
222
|
+
'id'
|
|
223
|
+
)
|
|
224
|
+
|
|
225
|
+
function UserAvatar({ userId }: { userId: string }) {
|
|
226
|
+
const { data } = useQuery({
|
|
227
|
+
queryKey: ['user', userId],
|
|
228
|
+
queryFn: ({ signal }) => users.get(userId, { signal })
|
|
229
|
+
})
|
|
230
|
+
|
|
231
|
+
return <img src={data?.avatar} />
|
|
232
|
+
}
|
|
233
|
+
|
|
234
|
+
// Rendering 100 UserAvatars = 1 HTTP request
|
|
235
|
+
```
|
|
236
|
+
|
|
237
|
+
### API with Rate Limits
|
|
238
|
+
|
|
239
|
+
```typescript
|
|
240
|
+
const products = batch(
|
|
241
|
+
(ids) => shopify.products.list({ ids }),
|
|
242
|
+
'id',
|
|
243
|
+
{ max: 50 } // Shopify's limit
|
|
244
|
+
)
|
|
245
|
+
|
|
246
|
+
// 200 product requests = 4 API calls (50 each)
|
|
247
|
+
```
|
|
248
|
+
|
|
249
|
+
## TypeScript
|
|
250
|
+
|
|
251
|
+
Full type inference:
|
|
252
|
+
|
|
253
|
+
```typescript
|
|
254
|
+
type User = { id: number; name: string }
|
|
255
|
+
|
|
256
|
+
const users = batch(
|
|
257
|
+
async (ids: number[]): Promise<User[]> => fetchUsers(ids),
|
|
258
|
+
'id'
|
|
259
|
+
)
|
|
260
|
+
|
|
261
|
+
const user = await users.get(1) // user: User
|
|
262
|
+
const many = await users.get([1, 2]) // many: User[]
|
|
263
|
+
```
|
|
264
|
+
|
|
265
|
+
## License
|
|
266
|
+
|
|
267
|
+
MIT
|
package/dist/batch.d.ts
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"batch.d.ts","sourceRoot":"","sources":["../src/batch.ts"],"names":[],"mappings":"AAIA,OAAO,KAAK,EACV,OAAO,EACP,OAAO,EACP,YAAY,EAEZ,KAAK,EAGN,MAAM,SAAS,CAAC;AAEjB,wBAAgB,KAAK,CAAC,CAAC,EAAE,CAAC,EACxB,EAAE,EAAE,OAAO,CAAC,CAAC,EAAE,CAAC,CAAC,EACjB,KAAK,EAAE,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,EAClB,OAAO,GAAE,YAAY,CAAC,CAAC,CAAM,GAC5B,OAAO,CAAC,CAAC,EAAE,CAAC,CAAC,CA0Rf"}
|
package/dist/errors.d.ts
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"errors.d.ts","sourceRoot":"","sources":["../src/errors.ts"],"names":[],"mappings":"AAAA,qBAAa,UAAW,SAAQ,KAAK;gBACvB,OAAO,EAAE,MAAM;CAI5B"}
|
package/dist/index.d.ts
ADDED
|
@@ -0,0 +1,6 @@
|
|
|
1
|
+
export { batch } from './batch';
|
|
2
|
+
export { BatchError } from './errors';
|
|
3
|
+
export { indexed } from './indexed';
|
|
4
|
+
export { onAnimationFrame, onIdle } from './schedulers';
|
|
5
|
+
export type { Batcher, BatchFn, BatchOptions, GetOptions, Match, MatchFn, Scheduler, TraceEvent, TraceHandler, } from './types';
|
|
6
|
+
//# sourceMappingURL=index.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,KAAK,EAAE,MAAM,SAAS,CAAC;AAChC,OAAO,EAAE,UAAU,EAAE,MAAM,UAAU,CAAC;AACtC,OAAO,EAAE,OAAO,EAAE,MAAM,WAAW,CAAC;AACpC,OAAO,EAAE,gBAAgB,EAAE,MAAM,EAAE,MAAM,cAAc,CAAC;AAExD,YAAY,EACV,OAAO,EACP,OAAO,EACP,YAAY,EACZ,UAAU,EACV,KAAK,EACL,OAAO,EACP,SAAS,EACT,UAAU,EACV,YAAY,GACb,MAAM,SAAS,CAAC"}
|
package/dist/index.js
ADDED
|
@@ -0,0 +1,320 @@
|
|
|
1
|
+
// src/errors.ts
|
|
2
|
+
class BatchError extends Error {
|
|
3
|
+
constructor(message) {
|
|
4
|
+
super(message);
|
|
5
|
+
this.name = "BatchError";
|
|
6
|
+
}
|
|
7
|
+
}
|
|
8
|
+
|
|
9
|
+
// src/indexed.ts
|
|
10
|
+
var indexed = Symbol("indexed");
|
|
11
|
+
|
|
12
|
+
// src/match.ts
|
|
13
|
+
function isIndexed(match) {
|
|
14
|
+
return match === indexed;
|
|
15
|
+
}
|
|
16
|
+
function isKeyMatch(match) {
|
|
17
|
+
return typeof match === "string";
|
|
18
|
+
}
|
|
19
|
+
function normalizeMatch(match) {
|
|
20
|
+
if (isIndexed(match)) {
|
|
21
|
+
return null;
|
|
22
|
+
}
|
|
23
|
+
if (isKeyMatch(match)) {
|
|
24
|
+
const key = match;
|
|
25
|
+
return (results, requestedKey) => {
|
|
26
|
+
return results.find((item) => item[key] === requestedKey);
|
|
27
|
+
};
|
|
28
|
+
}
|
|
29
|
+
return match;
|
|
30
|
+
}
|
|
31
|
+
function createIndexedMatcher() {
|
|
32
|
+
return (results, key) => results[String(key)];
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
// src/schedulers.ts
|
|
36
|
+
var microtask = (dispatch) => {
|
|
37
|
+
let cancelled = false;
|
|
38
|
+
queueMicrotask(() => {
|
|
39
|
+
if (!cancelled) {
|
|
40
|
+
dispatch();
|
|
41
|
+
}
|
|
42
|
+
});
|
|
43
|
+
return () => {
|
|
44
|
+
cancelled = true;
|
|
45
|
+
};
|
|
46
|
+
};
|
|
47
|
+
function wait(ms) {
|
|
48
|
+
return (dispatch) => {
|
|
49
|
+
const id = setTimeout(dispatch, ms);
|
|
50
|
+
return () => clearTimeout(id);
|
|
51
|
+
};
|
|
52
|
+
}
|
|
53
|
+
var onAnimationFrame = (dispatch) => {
|
|
54
|
+
const id = requestAnimationFrame(dispatch);
|
|
55
|
+
return () => cancelAnimationFrame(id);
|
|
56
|
+
};
|
|
57
|
+
function onIdle(options) {
|
|
58
|
+
return (dispatch) => {
|
|
59
|
+
if (typeof requestIdleCallback !== "undefined") {
|
|
60
|
+
const id2 = requestIdleCallback(dispatch, options?.timeout ? { timeout: options.timeout } : undefined);
|
|
61
|
+
return () => cancelIdleCallback(id2);
|
|
62
|
+
}
|
|
63
|
+
const id = setTimeout(dispatch, options?.timeout ?? 1);
|
|
64
|
+
return () => clearTimeout(id);
|
|
65
|
+
};
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
// src/trace.ts
|
|
69
|
+
function createTracer(name, handler) {
|
|
70
|
+
let batchCounter = 0;
|
|
71
|
+
function emit(event) {
|
|
72
|
+
if (!handler)
|
|
73
|
+
return;
|
|
74
|
+
const fullEvent = {
|
|
75
|
+
...event,
|
|
76
|
+
timestamp: performance.now()
|
|
77
|
+
};
|
|
78
|
+
handler(fullEvent);
|
|
79
|
+
}
|
|
80
|
+
function nextBatchId() {
|
|
81
|
+
return `${name ?? "batch"}-${++batchCounter}`;
|
|
82
|
+
}
|
|
83
|
+
return { emit, nextBatchId };
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
// src/batch.ts
|
|
87
|
+
function batch(fn, match, options = {}) {
|
|
88
|
+
const {
|
|
89
|
+
wait: waitMs,
|
|
90
|
+
schedule,
|
|
91
|
+
max,
|
|
92
|
+
key: keyFn = (k) => k,
|
|
93
|
+
name,
|
|
94
|
+
trace: traceHandler
|
|
95
|
+
} = options;
|
|
96
|
+
const scheduler = schedule ?? (waitMs ? wait(waitMs) : microtask);
|
|
97
|
+
const tracer = createTracer(name, traceHandler);
|
|
98
|
+
const matchFn = normalizeMatch(match);
|
|
99
|
+
const isIndexedMatch = isIndexed(match);
|
|
100
|
+
const indexedMatcher = isIndexedMatch ? createIndexedMatcher() : null;
|
|
101
|
+
let queue = [];
|
|
102
|
+
const pendingKeys = new Set;
|
|
103
|
+
let cleanup = null;
|
|
104
|
+
let isScheduled = false;
|
|
105
|
+
let currentAbortController = null;
|
|
106
|
+
function scheduleDispatch() {
|
|
107
|
+
if (isScheduled || queue.length === 0)
|
|
108
|
+
return;
|
|
109
|
+
isScheduled = true;
|
|
110
|
+
const batchId = tracer.nextBatchId();
|
|
111
|
+
tracer.emit({
|
|
112
|
+
type: "schedule",
|
|
113
|
+
batchId,
|
|
114
|
+
size: queue.length
|
|
115
|
+
});
|
|
116
|
+
cleanup = scheduler(() => {
|
|
117
|
+
isScheduled = false;
|
|
118
|
+
dispatch(batchId);
|
|
119
|
+
});
|
|
120
|
+
}
|
|
121
|
+
async function dispatch(batchId) {
|
|
122
|
+
const activeQueue = queue.filter((req) => !req.aborted);
|
|
123
|
+
if (activeQueue.length === 0) {
|
|
124
|
+
queue = [];
|
|
125
|
+
pendingKeys.clear();
|
|
126
|
+
return;
|
|
127
|
+
}
|
|
128
|
+
if (cleanup) {
|
|
129
|
+
cleanup();
|
|
130
|
+
cleanup = null;
|
|
131
|
+
}
|
|
132
|
+
isScheduled = false;
|
|
133
|
+
const batch2 = activeQueue;
|
|
134
|
+
queue = [];
|
|
135
|
+
pendingKeys.clear();
|
|
136
|
+
const chunks = [];
|
|
137
|
+
if (max && max > 0) {
|
|
138
|
+
for (let i = 0;i < batch2.length; i += max) {
|
|
139
|
+
chunks.push(batch2.slice(i, i + max));
|
|
140
|
+
}
|
|
141
|
+
} else {
|
|
142
|
+
chunks.push(batch2);
|
|
143
|
+
}
|
|
144
|
+
for (let i = 0;i < chunks.length; i++) {
|
|
145
|
+
const chunkBatchId = i === 0 ? batchId ?? tracer.nextBatchId() : tracer.nextBatchId();
|
|
146
|
+
await processChunk(chunks[i], chunkBatchId);
|
|
147
|
+
}
|
|
148
|
+
}
|
|
149
|
+
async function processChunk(chunk, batchId) {
|
|
150
|
+
const keyToRequests = new Map;
|
|
151
|
+
const uniqueKeys = [];
|
|
152
|
+
for (const request of chunk) {
|
|
153
|
+
if (request.aborted)
|
|
154
|
+
continue;
|
|
155
|
+
const cacheKey = keyFn(request.key);
|
|
156
|
+
if (!keyToRequests.has(cacheKey)) {
|
|
157
|
+
keyToRequests.set(cacheKey, []);
|
|
158
|
+
uniqueKeys.push(request.key);
|
|
159
|
+
}
|
|
160
|
+
keyToRequests.get(cacheKey).push(request);
|
|
161
|
+
}
|
|
162
|
+
if (uniqueKeys.length === 0)
|
|
163
|
+
return;
|
|
164
|
+
tracer.emit({
|
|
165
|
+
type: "dispatch",
|
|
166
|
+
batchId,
|
|
167
|
+
keys: uniqueKeys
|
|
168
|
+
});
|
|
169
|
+
currentAbortController = new AbortController;
|
|
170
|
+
const signal = currentAbortController.signal;
|
|
171
|
+
const startedAt = performance.now();
|
|
172
|
+
try {
|
|
173
|
+
const results = await fn(uniqueKeys, signal);
|
|
174
|
+
const duration = performance.now() - startedAt;
|
|
175
|
+
if (signal.aborted) {
|
|
176
|
+
tracer.emit({ type: "abort", batchId });
|
|
177
|
+
return;
|
|
178
|
+
}
|
|
179
|
+
tracer.emit({
|
|
180
|
+
type: "resolve",
|
|
181
|
+
batchId,
|
|
182
|
+
duration
|
|
183
|
+
});
|
|
184
|
+
if (isIndexedMatch && indexedMatcher) {
|
|
185
|
+
const recordResults = results;
|
|
186
|
+
for (const key of uniqueKeys) {
|
|
187
|
+
const cacheKey = keyFn(key);
|
|
188
|
+
const requests = keyToRequests.get(cacheKey);
|
|
189
|
+
const value = indexedMatcher(recordResults, key);
|
|
190
|
+
for (const request of requests) {
|
|
191
|
+
if (request.aborted)
|
|
192
|
+
continue;
|
|
193
|
+
if (value === undefined) {
|
|
194
|
+
request.reject(new BatchError(`No result for key: ${String(key)}`));
|
|
195
|
+
} else {
|
|
196
|
+
request.resolve(value);
|
|
197
|
+
}
|
|
198
|
+
}
|
|
199
|
+
}
|
|
200
|
+
} else if (matchFn) {
|
|
201
|
+
const arrayResults = results;
|
|
202
|
+
if (!Array.isArray(arrayResults)) {
|
|
203
|
+
throw new BatchError("Batch function returned a non-array result. Use `indexed` for Record responses.");
|
|
204
|
+
}
|
|
205
|
+
for (const key of uniqueKeys) {
|
|
206
|
+
const cacheKey = keyFn(key);
|
|
207
|
+
const requests = keyToRequests.get(cacheKey);
|
|
208
|
+
const value = matchFn(arrayResults, key);
|
|
209
|
+
for (const request of requests) {
|
|
210
|
+
if (request.aborted)
|
|
211
|
+
continue;
|
|
212
|
+
if (value === undefined) {
|
|
213
|
+
request.reject(new BatchError(`No result for key: ${String(key)}`));
|
|
214
|
+
} else {
|
|
215
|
+
request.resolve(value);
|
|
216
|
+
}
|
|
217
|
+
}
|
|
218
|
+
}
|
|
219
|
+
}
|
|
220
|
+
} catch (error) {
|
|
221
|
+
const err = error instanceof Error ? error : new Error(String(error));
|
|
222
|
+
if (err.name === "AbortError" || signal.aborted) {
|
|
223
|
+
tracer.emit({ type: "abort", batchId });
|
|
224
|
+
} else {
|
|
225
|
+
tracer.emit({
|
|
226
|
+
type: "error",
|
|
227
|
+
batchId,
|
|
228
|
+
error: err
|
|
229
|
+
});
|
|
230
|
+
}
|
|
231
|
+
for (const requests of keyToRequests.values()) {
|
|
232
|
+
for (const request of requests) {
|
|
233
|
+
if (!request.aborted) {
|
|
234
|
+
request.reject(err);
|
|
235
|
+
}
|
|
236
|
+
}
|
|
237
|
+
}
|
|
238
|
+
} finally {
|
|
239
|
+
currentAbortController = null;
|
|
240
|
+
}
|
|
241
|
+
}
|
|
242
|
+
function getSingle(key, options2) {
|
|
243
|
+
const externalSignal = options2?.signal;
|
|
244
|
+
tracer.emit({ type: "get", key });
|
|
245
|
+
if (externalSignal?.aborted) {
|
|
246
|
+
return Promise.reject(new DOMException("Aborted", "AbortError"));
|
|
247
|
+
}
|
|
248
|
+
const cacheKey = keyFn(key);
|
|
249
|
+
if (pendingKeys.has(cacheKey)) {
|
|
250
|
+
tracer.emit({ type: "dedup", key });
|
|
251
|
+
} else {
|
|
252
|
+
pendingKeys.add(cacheKey);
|
|
253
|
+
}
|
|
254
|
+
return new Promise((resolve, reject) => {
|
|
255
|
+
const request = {
|
|
256
|
+
key,
|
|
257
|
+
resolve,
|
|
258
|
+
reject,
|
|
259
|
+
signal: externalSignal,
|
|
260
|
+
aborted: false
|
|
261
|
+
};
|
|
262
|
+
queue.push(request);
|
|
263
|
+
if (externalSignal) {
|
|
264
|
+
const onAbort = () => {
|
|
265
|
+
request.aborted = true;
|
|
266
|
+
reject(new DOMException("Aborted", "AbortError"));
|
|
267
|
+
const allAborted = queue.every((r) => r.aborted);
|
|
268
|
+
if (allAborted && currentAbortController) {
|
|
269
|
+
currentAbortController.abort();
|
|
270
|
+
}
|
|
271
|
+
};
|
|
272
|
+
externalSignal.addEventListener("abort", onAbort, { once: true });
|
|
273
|
+
}
|
|
274
|
+
scheduleDispatch();
|
|
275
|
+
});
|
|
276
|
+
}
|
|
277
|
+
function get(keyOrKeys, options2) {
|
|
278
|
+
if (Array.isArray(keyOrKeys)) {
|
|
279
|
+
return Promise.all(keyOrKeys.map((k) => getSingle(k, options2)));
|
|
280
|
+
}
|
|
281
|
+
return getSingle(keyOrKeys, options2);
|
|
282
|
+
}
|
|
283
|
+
async function flush() {
|
|
284
|
+
if (cleanup) {
|
|
285
|
+
cleanup();
|
|
286
|
+
cleanup = null;
|
|
287
|
+
}
|
|
288
|
+
isScheduled = false;
|
|
289
|
+
await dispatch();
|
|
290
|
+
}
|
|
291
|
+
function abort() {
|
|
292
|
+
for (const request of queue) {
|
|
293
|
+
request.aborted = true;
|
|
294
|
+
request.reject(new DOMException("Aborted", "AbortError"));
|
|
295
|
+
}
|
|
296
|
+
queue = [];
|
|
297
|
+
pendingKeys.clear();
|
|
298
|
+
if (currentAbortController) {
|
|
299
|
+
currentAbortController.abort();
|
|
300
|
+
}
|
|
301
|
+
if (cleanup) {
|
|
302
|
+
cleanup();
|
|
303
|
+
cleanup = null;
|
|
304
|
+
}
|
|
305
|
+
isScheduled = false;
|
|
306
|
+
}
|
|
307
|
+
return {
|
|
308
|
+
get,
|
|
309
|
+
flush,
|
|
310
|
+
abort,
|
|
311
|
+
name
|
|
312
|
+
};
|
|
313
|
+
}
|
|
314
|
+
export {
|
|
315
|
+
onIdle,
|
|
316
|
+
onAnimationFrame,
|
|
317
|
+
indexed,
|
|
318
|
+
batch,
|
|
319
|
+
BatchError
|
|
320
|
+
};
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"indexed.d.ts","sourceRoot":"","sources":["../src/indexed.ts"],"names":[],"mappings":"AAAA,eAAO,MAAM,OAAO,EAAE,OAAO,MAA0B,CAAC"}
|
package/dist/match.d.ts
ADDED
|
@@ -0,0 +1,6 @@
|
|
|
1
|
+
import type { Match, MatchFn } from './types';
|
|
2
|
+
export declare function isIndexed<K, V>(match: Match<K, V>): match is symbol;
|
|
3
|
+
export declare function isKeyMatch<K, V>(match: Match<K, V>): match is keyof V;
|
|
4
|
+
export declare function normalizeMatch<K, V>(match: Match<K, V>): MatchFn<K, V> | null;
|
|
5
|
+
export declare function createIndexedMatcher<K, V>(): (results: Record<string, V>, key: K) => V | undefined;
|
|
6
|
+
//# sourceMappingURL=match.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"match.d.ts","sourceRoot":"","sources":["../src/match.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,KAAK,EAAE,OAAO,EAAE,MAAM,SAAS,CAAC;AAE9C,wBAAgB,SAAS,CAAC,CAAC,EAAE,CAAC,EAAE,KAAK,EAAE,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,GAAG,KAAK,IAAI,MAAM,CAEnE;AAED,wBAAgB,UAAU,CAAC,CAAC,EAAE,CAAC,EAAE,KAAK,EAAE,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,GAAG,KAAK,IAAI,MAAM,CAAC,CAErE;AAED,wBAAgB,cAAc,CAAC,CAAC,EAAE,CAAC,EAAE,KAAK,EAAE,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,GAAG,OAAO,CAAC,CAAC,EAAE,CAAC,CAAC,GAAG,IAAI,CAkB7E;AAED,wBAAgB,oBAAoB,CAAC,CAAC,EAAE,CAAC,KAAK,CAC5C,OAAO,EAAE,MAAM,CAAC,MAAM,EAAE,CAAC,CAAC,EAC1B,GAAG,EAAE,CAAC,KACH,CAAC,GAAG,SAAS,CAEjB"}
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
import type { Scheduler } from './types';
|
|
2
|
+
export declare const microtask: Scheduler;
|
|
3
|
+
export declare function wait(ms: number): Scheduler;
|
|
4
|
+
export declare const onAnimationFrame: Scheduler;
|
|
5
|
+
export declare function onIdle(options?: {
|
|
6
|
+
timeout?: number;
|
|
7
|
+
}): Scheduler;
|
|
8
|
+
//# sourceMappingURL=schedulers.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"schedulers.d.ts","sourceRoot":"","sources":["../src/schedulers.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,SAAS,CAAC;AAEzC,eAAO,MAAM,SAAS,EAAE,SAYvB,CAAC;AAEF,wBAAgB,IAAI,CAAC,EAAE,EAAE,MAAM,GAAG,SAAS,CAK1C;AAED,eAAO,MAAM,gBAAgB,EAAE,SAG9B,CAAC;AAEF,wBAAgB,MAAM,CAAC,OAAO,CAAC,EAAE;IAAE,OAAO,CAAC,EAAE,MAAM,CAAA;CAAE,GAAG,SAAS,CAahE"}
|
package/dist/trace.d.ts
ADDED
|
@@ -0,0 +1,6 @@
|
|
|
1
|
+
import type { TraceEventData, TraceHandler } from './types';
|
|
2
|
+
export declare function createTracer<K>(name: string | undefined, handler: TraceHandler<K> | undefined): {
|
|
3
|
+
emit: (event: TraceEventData<K>) => void;
|
|
4
|
+
nextBatchId: () => string;
|
|
5
|
+
};
|
|
6
|
+
//# sourceMappingURL=trace.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"trace.d.ts","sourceRoot":"","sources":["../src/trace.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAc,cAAc,EAAE,YAAY,EAAE,MAAM,SAAS,CAAC;AAExE,wBAAgB,YAAY,CAAC,CAAC,EAC5B,IAAI,EAAE,MAAM,GAAG,SAAS,EACxB,OAAO,EAAE,YAAY,CAAC,CAAC,CAAC,GAAG,SAAS;kBAIf,eAAe,CAAC,CAAC;uBAWd,MAAM;EAK/B"}
|
package/dist/types.d.ts
ADDED
|
@@ -0,0 +1,61 @@
|
|
|
1
|
+
export type BatchFn<K, V> = (keys: K[], signal: AbortSignal) => Promise<V[] | Record<string, V>>;
|
|
2
|
+
export type Match<K, V> = keyof V | symbol | MatchFn<K, V>;
|
|
3
|
+
export type MatchFn<K, V> = (results: V[], key: K) => V | undefined;
|
|
4
|
+
export type IndexedMatchFn<K, V> = (results: Record<string, V>, key: K) => V | undefined;
|
|
5
|
+
export type Scheduler = (dispatch: () => void) => () => void;
|
|
6
|
+
export type TraceHandler<K = unknown> = (event: TraceEvent<K>) => void;
|
|
7
|
+
export type TraceEventData<K = unknown> = {
|
|
8
|
+
type: 'get';
|
|
9
|
+
key: K;
|
|
10
|
+
} | {
|
|
11
|
+
type: 'dedup';
|
|
12
|
+
key: K;
|
|
13
|
+
} | {
|
|
14
|
+
type: 'schedule';
|
|
15
|
+
batchId: string;
|
|
16
|
+
size: number;
|
|
17
|
+
} | {
|
|
18
|
+
type: 'dispatch';
|
|
19
|
+
batchId: string;
|
|
20
|
+
keys: K[];
|
|
21
|
+
} | {
|
|
22
|
+
type: 'resolve';
|
|
23
|
+
batchId: string;
|
|
24
|
+
duration: number;
|
|
25
|
+
} | {
|
|
26
|
+
type: 'error';
|
|
27
|
+
batchId: string;
|
|
28
|
+
error: Error;
|
|
29
|
+
} | {
|
|
30
|
+
type: 'abort';
|
|
31
|
+
batchId: string;
|
|
32
|
+
};
|
|
33
|
+
export type TraceEvent<K = unknown> = TraceEventData<K> & {
|
|
34
|
+
timestamp: number;
|
|
35
|
+
};
|
|
36
|
+
export interface BatchOptions<K = unknown> {
|
|
37
|
+
wait?: number;
|
|
38
|
+
schedule?: Scheduler;
|
|
39
|
+
max?: number;
|
|
40
|
+
key?: (k: K) => unknown;
|
|
41
|
+
name?: string;
|
|
42
|
+
trace?: TraceHandler<K>;
|
|
43
|
+
}
|
|
44
|
+
export interface GetOptions {
|
|
45
|
+
signal?: AbortSignal;
|
|
46
|
+
}
|
|
47
|
+
export interface Batcher<K, V> {
|
|
48
|
+
get(key: K, options?: GetOptions): Promise<V>;
|
|
49
|
+
get(keys: K[], options?: GetOptions): Promise<V[]>;
|
|
50
|
+
flush(): Promise<void>;
|
|
51
|
+
abort(): void;
|
|
52
|
+
readonly name?: string;
|
|
53
|
+
}
|
|
54
|
+
export interface PendingRequest<K, V> {
|
|
55
|
+
key: K;
|
|
56
|
+
resolve: (value: V) => void;
|
|
57
|
+
reject: (error: Error) => void;
|
|
58
|
+
signal?: AbortSignal;
|
|
59
|
+
aborted: boolean;
|
|
60
|
+
}
|
|
61
|
+
//# sourceMappingURL=types.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"types.d.ts","sourceRoot":"","sources":["../src/types.ts"],"names":[],"mappings":"AAAA,MAAM,MAAM,OAAO,CAAC,CAAC,EAAE,CAAC,IAAI,CAC1B,IAAI,EAAE,CAAC,EAAE,EACT,MAAM,EAAE,WAAW,KAChB,OAAO,CAAC,CAAC,EAAE,GAAG,MAAM,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,CAAC;AAEtC,MAAM,MAAM,KAAK,CAAC,CAAC,EAAE,CAAC,IAAI,MAAM,CAAC,GAAG,MAAM,GAAG,OAAO,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;AAE3D,MAAM,MAAM,OAAO,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,OAAO,EAAE,CAAC,EAAE,EAAE,GAAG,EAAE,CAAC,KAAK,CAAC,GAAG,SAAS,CAAC;AAEpE,MAAM,MAAM,cAAc,CAAC,CAAC,EAAE,CAAC,IAAI,CACjC,OAAO,EAAE,MAAM,CAAC,MAAM,EAAE,CAAC,CAAC,EAC1B,GAAG,EAAE,CAAC,KACH,CAAC,GAAG,SAAS,CAAC;AAEnB,MAAM,MAAM,SAAS,GAAG,CAAC,QAAQ,EAAE,MAAM,IAAI,KAAK,MAAM,IAAI,CAAC;AAE7D,MAAM,MAAM,YAAY,CAAC,CAAC,GAAG,OAAO,IAAI,CAAC,KAAK,EAAE,UAAU,CAAC,CAAC,CAAC,KAAK,IAAI,CAAC;AAEvE,MAAM,MAAM,cAAc,CAAC,CAAC,GAAG,OAAO,IAClC;IAAE,IAAI,EAAE,KAAK,CAAC;IAAC,GAAG,EAAE,CAAC,CAAA;CAAE,GACvB;IAAE,IAAI,EAAE,OAAO,CAAC;IAAC,GAAG,EAAE,CAAC,CAAA;CAAE,GACzB;IAAE,IAAI,EAAE,UAAU,CAAC;IAAC,OAAO,EAAE,MAAM,CAAC;IAAC,IAAI,EAAE,MAAM,CAAA;CAAE,GACnD;IAAE,IAAI,EAAE,UAAU,CAAC;IAAC,OAAO,EAAE,MAAM,CAAC;IAAC,IAAI,EAAE,CAAC,EAAE,CAAA;CAAE,GAChD;IAAE,IAAI,EAAE,SAAS,CAAC;IAAC,OAAO,EAAE,MAAM,CAAC;IAAC,QAAQ,EAAE,MAAM,CAAA;CAAE,GACtD;IAAE,IAAI,EAAE,OAAO,CAAC;IAAC,OAAO,EAAE,MAAM,CAAC;IAAC,KAAK,EAAE,KAAK,CAAA;CAAE,GAChD;IAAE,IAAI,EAAE,OAAO,CAAC;IAAC,OAAO,EAAE,MAAM,CAAA;CAAE,CAAC;AAEvC,MAAM,MAAM,UAAU,CAAC,CAAC,GAAG,OAAO,IAAI,cAAc,CAAC,CAAC,CAAC,GAAG;IAAE,SAAS,EAAE,MAAM,CAAA;CAAE,CAAC;AAEhF,MAAM,WAAW,YAAY,CAAC,CAAC,GAAG,OAAO;IACvC,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,QAAQ,CAAC,EAAE,SAAS,CAAC;IACrB,GAAG,CAAC,EAAE,MAAM,CAAC;IACb,GAAG,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,KAAK,OAAO,CAAC;IACxB,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,KAAK,CAAC,EAAE,YAAY,CAAC,CAAC,CAAC,CAAC;CACzB;AAED,MAAM,WAAW,UAAU;IACzB,MAAM,CAAC,EAAE,WAAW,CAAC;CACtB;AAED,MAAM,WAAW,OAAO,CAAC,CAAC,EAAE,CAAC;IAC3B,GAAG,CAAC,GAAG,EAAE,CAAC,EAAE,OAAO,CAAC,EAAE,UAAU,GAAG,OAAO,CAAC,CAAC,CAAC,CAAC;IAC9C,GAAG,CAAC,IAAI,EAAE,CAAC,EAAE,EAAE,OAAO,CAAC,EAAE,UAAU,GAAG,OAAO,CAAC,CAAC,EAAE,CAAC,CAAC;IACnD,KAAK,IAAI,OAAO,CAAC,IAAI,CAAC,CAAC;IACvB,KAAK,IAAI,IAAI,CAAC;IACd,QAAQ,CAAC,IAAI,CAAC,EAAE,MAAM,CAAC;CACxB;AAED,MAAM,WAAW,cAAc,CAAC,CAAC,EAAE,CAAC;IAClC,GAAG,EAAE,CAAC,CAAC;IACP,OAAO,EAAE,CAAC,KAAK,EAAE,CAAC,KAAK,IAAI,CAAC;IAC5B,MAAM,EAAE,CAAC,KAAK,EAAE,KAAK,KAAK,IAAI,CAAC;IAC/B,MAAM,CAAC,EAAE,WAAW,CAAC;IACrB,OAAO,EAAE,OAAO,CAAC;CAClB"}
|
package/package.json
ADDED
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "batchkit",
|
|
3
|
+
"version": "0.1.0",
|
|
4
|
+
"description": "A modern TypeScript library for batching async operations",
|
|
5
|
+
"type": "module",
|
|
6
|
+
"main": "./dist/index.js",
|
|
7
|
+
"types": "./dist/index.d.ts",
|
|
8
|
+
"exports": {
|
|
9
|
+
".": {
|
|
10
|
+
"types": "./dist/index.d.ts",
|
|
11
|
+
"bun": "./src/index.ts",
|
|
12
|
+
"default": "./dist/index.js"
|
|
13
|
+
}
|
|
14
|
+
},
|
|
15
|
+
"files": [
|
|
16
|
+
"dist",
|
|
17
|
+
"src"
|
|
18
|
+
],
|
|
19
|
+
"scripts": {
|
|
20
|
+
"build": "bun build ./src/index.ts --outdir ./dist --target node && tsc -p tsconfig.build.json",
|
|
21
|
+
"test": "bun test",
|
|
22
|
+
"typecheck": "tsc --noEmit"
|
|
23
|
+
},
|
|
24
|
+
"keywords": [
|
|
25
|
+
"batch",
|
|
26
|
+
"batching",
|
|
27
|
+
"dataloader"
|
|
28
|
+
],
|
|
29
|
+
"author": "",
|
|
30
|
+
"license": "MIT",
|
|
31
|
+
"devDependencies": {
|
|
32
|
+
"bun-types": "latest",
|
|
33
|
+
"typescript": "5.3.3"
|
|
34
|
+
}
|
|
35
|
+
}
|
package/src/batch.ts
ADDED
|
@@ -0,0 +1,301 @@
|
|
|
1
|
+
import { BatchError } from './errors';
|
|
2
|
+
import { createIndexedMatcher, isIndexed, normalizeMatch } from './match';
|
|
3
|
+
import { microtask, wait } from './schedulers';
|
|
4
|
+
import { createTracer } from './trace';
|
|
5
|
+
import type {
|
|
6
|
+
Batcher,
|
|
7
|
+
BatchFn,
|
|
8
|
+
BatchOptions,
|
|
9
|
+
GetOptions,
|
|
10
|
+
Match,
|
|
11
|
+
PendingRequest,
|
|
12
|
+
Scheduler,
|
|
13
|
+
} from './types';
|
|
14
|
+
|
|
15
|
+
export function batch<K, V>(
|
|
16
|
+
fn: BatchFn<K, V>,
|
|
17
|
+
match: Match<K, V>,
|
|
18
|
+
options: BatchOptions<K> = {},
|
|
19
|
+
): Batcher<K, V> {
|
|
20
|
+
const {
|
|
21
|
+
wait: waitMs,
|
|
22
|
+
schedule,
|
|
23
|
+
max,
|
|
24
|
+
key: keyFn = (k: K) => k,
|
|
25
|
+
name,
|
|
26
|
+
trace: traceHandler,
|
|
27
|
+
} = options;
|
|
28
|
+
|
|
29
|
+
const scheduler: Scheduler = schedule ?? (waitMs ? wait(waitMs) : microtask);
|
|
30
|
+
const tracer = createTracer(name, traceHandler);
|
|
31
|
+
|
|
32
|
+
const matchFn = normalizeMatch(match);
|
|
33
|
+
const isIndexedMatch = isIndexed(match);
|
|
34
|
+
const indexedMatcher = isIndexedMatch ? createIndexedMatcher<K, V>() : null;
|
|
35
|
+
|
|
36
|
+
let queue: PendingRequest<K, V>[] = [];
|
|
37
|
+
const pendingKeys = new Set<unknown>();
|
|
38
|
+
let cleanup: (() => void) | null = null;
|
|
39
|
+
let isScheduled = false;
|
|
40
|
+
let currentAbortController: AbortController | null = null;
|
|
41
|
+
|
|
42
|
+
function scheduleDispatch(): void {
|
|
43
|
+
if (isScheduled || queue.length === 0) return;
|
|
44
|
+
|
|
45
|
+
isScheduled = true;
|
|
46
|
+
const batchId = tracer.nextBatchId();
|
|
47
|
+
|
|
48
|
+
tracer.emit({
|
|
49
|
+
type: 'schedule',
|
|
50
|
+
batchId,
|
|
51
|
+
size: queue.length,
|
|
52
|
+
});
|
|
53
|
+
|
|
54
|
+
cleanup = scheduler(() => {
|
|
55
|
+
isScheduled = false;
|
|
56
|
+
dispatch(batchId);
|
|
57
|
+
});
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
async function dispatch(batchId?: string): Promise<void> {
|
|
61
|
+
const activeQueue = queue.filter((req) => !req.aborted);
|
|
62
|
+
|
|
63
|
+
if (activeQueue.length === 0) {
|
|
64
|
+
queue = [];
|
|
65
|
+
pendingKeys.clear();
|
|
66
|
+
return;
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
if (cleanup) {
|
|
70
|
+
cleanup();
|
|
71
|
+
cleanup = null;
|
|
72
|
+
}
|
|
73
|
+
isScheduled = false;
|
|
74
|
+
|
|
75
|
+
const batch = activeQueue;
|
|
76
|
+
queue = [];
|
|
77
|
+
pendingKeys.clear();
|
|
78
|
+
|
|
79
|
+
const chunks: PendingRequest<K, V>[][] = [];
|
|
80
|
+
if (max && max > 0) {
|
|
81
|
+
for (let i = 0; i < batch.length; i += max) {
|
|
82
|
+
chunks.push(batch.slice(i, i + max));
|
|
83
|
+
}
|
|
84
|
+
} else {
|
|
85
|
+
chunks.push(batch);
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
for (let i = 0; i < chunks.length; i++) {
|
|
89
|
+
const chunkBatchId =
|
|
90
|
+
i === 0 ? (batchId ?? tracer.nextBatchId()) : tracer.nextBatchId();
|
|
91
|
+
await processChunk(chunks[i], chunkBatchId);
|
|
92
|
+
}
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
async function processChunk(
|
|
96
|
+
chunk: PendingRequest<K, V>[],
|
|
97
|
+
batchId: string,
|
|
98
|
+
): Promise<void> {
|
|
99
|
+
const keyToRequests = new Map<unknown, PendingRequest<K, V>[]>();
|
|
100
|
+
const uniqueKeys: K[] = [];
|
|
101
|
+
|
|
102
|
+
for (const request of chunk) {
|
|
103
|
+
if (request.aborted) continue;
|
|
104
|
+
|
|
105
|
+
const cacheKey = keyFn(request.key);
|
|
106
|
+
|
|
107
|
+
if (!keyToRequests.has(cacheKey)) {
|
|
108
|
+
keyToRequests.set(cacheKey, []);
|
|
109
|
+
uniqueKeys.push(request.key);
|
|
110
|
+
}
|
|
111
|
+
keyToRequests.get(cacheKey)!.push(request);
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
if (uniqueKeys.length === 0) return;
|
|
115
|
+
|
|
116
|
+
tracer.emit({
|
|
117
|
+
type: 'dispatch',
|
|
118
|
+
batchId,
|
|
119
|
+
keys: uniqueKeys,
|
|
120
|
+
});
|
|
121
|
+
|
|
122
|
+
currentAbortController = new AbortController();
|
|
123
|
+
const signal = currentAbortController.signal;
|
|
124
|
+
|
|
125
|
+
const startedAt = performance.now();
|
|
126
|
+
|
|
127
|
+
try {
|
|
128
|
+
const results = await fn(uniqueKeys, signal);
|
|
129
|
+
|
|
130
|
+
const duration = performance.now() - startedAt;
|
|
131
|
+
|
|
132
|
+
if (signal.aborted) {
|
|
133
|
+
tracer.emit({ type: 'abort', batchId });
|
|
134
|
+
return;
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
tracer.emit({
|
|
138
|
+
type: 'resolve',
|
|
139
|
+
batchId,
|
|
140
|
+
duration,
|
|
141
|
+
});
|
|
142
|
+
|
|
143
|
+
if (isIndexedMatch && indexedMatcher) {
|
|
144
|
+
const recordResults = results as Record<string, V>;
|
|
145
|
+
for (const key of uniqueKeys) {
|
|
146
|
+
const cacheKey = keyFn(key);
|
|
147
|
+
const requests = keyToRequests.get(cacheKey)!;
|
|
148
|
+
const value = indexedMatcher(recordResults, key);
|
|
149
|
+
|
|
150
|
+
for (const request of requests) {
|
|
151
|
+
if (request.aborted) continue;
|
|
152
|
+
if (value === undefined) {
|
|
153
|
+
request.reject(
|
|
154
|
+
new BatchError(`No result for key: ${String(key)}`),
|
|
155
|
+
);
|
|
156
|
+
} else {
|
|
157
|
+
request.resolve(value);
|
|
158
|
+
}
|
|
159
|
+
}
|
|
160
|
+
}
|
|
161
|
+
} else if (matchFn) {
|
|
162
|
+
const arrayResults = results as V[];
|
|
163
|
+
|
|
164
|
+
if (!Array.isArray(arrayResults)) {
|
|
165
|
+
throw new BatchError(
|
|
166
|
+
'Batch function returned a non-array result. Use `indexed` for Record responses.',
|
|
167
|
+
);
|
|
168
|
+
}
|
|
169
|
+
|
|
170
|
+
for (const key of uniqueKeys) {
|
|
171
|
+
const cacheKey = keyFn(key);
|
|
172
|
+
const requests = keyToRequests.get(cacheKey)!;
|
|
173
|
+
const value = matchFn(arrayResults, key);
|
|
174
|
+
|
|
175
|
+
for (const request of requests) {
|
|
176
|
+
if (request.aborted) continue;
|
|
177
|
+
if (value === undefined) {
|
|
178
|
+
request.reject(
|
|
179
|
+
new BatchError(`No result for key: ${String(key)}`),
|
|
180
|
+
);
|
|
181
|
+
} else {
|
|
182
|
+
request.resolve(value);
|
|
183
|
+
}
|
|
184
|
+
}
|
|
185
|
+
}
|
|
186
|
+
}
|
|
187
|
+
} catch (error) {
|
|
188
|
+
const err = error instanceof Error ? error : new Error(String(error));
|
|
189
|
+
|
|
190
|
+
if (err.name === 'AbortError' || signal.aborted) {
|
|
191
|
+
tracer.emit({ type: 'abort', batchId });
|
|
192
|
+
} else {
|
|
193
|
+
tracer.emit({
|
|
194
|
+
type: 'error',
|
|
195
|
+
batchId,
|
|
196
|
+
error: err,
|
|
197
|
+
});
|
|
198
|
+
}
|
|
199
|
+
|
|
200
|
+
for (const requests of keyToRequests.values()) {
|
|
201
|
+
for (const request of requests) {
|
|
202
|
+
if (!request.aborted) {
|
|
203
|
+
request.reject(err);
|
|
204
|
+
}
|
|
205
|
+
}
|
|
206
|
+
}
|
|
207
|
+
} finally {
|
|
208
|
+
currentAbortController = null;
|
|
209
|
+
}
|
|
210
|
+
}
|
|
211
|
+
|
|
212
|
+
function getSingle(key: K, options?: GetOptions): Promise<V> {
|
|
213
|
+
const externalSignal = options?.signal;
|
|
214
|
+
|
|
215
|
+
tracer.emit({ type: 'get', key });
|
|
216
|
+
|
|
217
|
+
if (externalSignal?.aborted) {
|
|
218
|
+
return Promise.reject(new DOMException('Aborted', 'AbortError'));
|
|
219
|
+
}
|
|
220
|
+
|
|
221
|
+
const cacheKey = keyFn(key);
|
|
222
|
+
|
|
223
|
+
if (pendingKeys.has(cacheKey)) {
|
|
224
|
+
tracer.emit({ type: 'dedup', key });
|
|
225
|
+
} else {
|
|
226
|
+
pendingKeys.add(cacheKey);
|
|
227
|
+
}
|
|
228
|
+
|
|
229
|
+
return new Promise<V>((resolve, reject) => {
|
|
230
|
+
const request: PendingRequest<K, V> = {
|
|
231
|
+
key,
|
|
232
|
+
resolve,
|
|
233
|
+
reject,
|
|
234
|
+
signal: externalSignal,
|
|
235
|
+
aborted: false,
|
|
236
|
+
};
|
|
237
|
+
|
|
238
|
+
queue.push(request);
|
|
239
|
+
|
|
240
|
+
if (externalSignal) {
|
|
241
|
+
const onAbort = () => {
|
|
242
|
+
request.aborted = true;
|
|
243
|
+
reject(new DOMException('Aborted', 'AbortError'));
|
|
244
|
+
|
|
245
|
+
const allAborted = queue.every((r) => r.aborted);
|
|
246
|
+
if (allAborted && currentAbortController) {
|
|
247
|
+
currentAbortController.abort();
|
|
248
|
+
}
|
|
249
|
+
};
|
|
250
|
+
|
|
251
|
+
externalSignal.addEventListener('abort', onAbort, { once: true });
|
|
252
|
+
}
|
|
253
|
+
|
|
254
|
+
scheduleDispatch();
|
|
255
|
+
});
|
|
256
|
+
}
|
|
257
|
+
|
|
258
|
+
function get(key: K, options?: GetOptions): Promise<V>;
|
|
259
|
+
function get(keys: K[], options?: GetOptions): Promise<V[]>;
|
|
260
|
+
function get(keyOrKeys: K | K[], options?: GetOptions): Promise<V | V[]> {
|
|
261
|
+
if (Array.isArray(keyOrKeys)) {
|
|
262
|
+
return Promise.all(keyOrKeys.map((k) => getSingle(k, options)));
|
|
263
|
+
}
|
|
264
|
+
return getSingle(keyOrKeys, options);
|
|
265
|
+
}
|
|
266
|
+
|
|
267
|
+
async function flush(): Promise<void> {
|
|
268
|
+
if (cleanup) {
|
|
269
|
+
cleanup();
|
|
270
|
+
cleanup = null;
|
|
271
|
+
}
|
|
272
|
+
isScheduled = false;
|
|
273
|
+
await dispatch();
|
|
274
|
+
}
|
|
275
|
+
|
|
276
|
+
function abort(): void {
|
|
277
|
+
for (const request of queue) {
|
|
278
|
+
request.aborted = true;
|
|
279
|
+
request.reject(new DOMException('Aborted', 'AbortError'));
|
|
280
|
+
}
|
|
281
|
+
queue = [];
|
|
282
|
+
pendingKeys.clear();
|
|
283
|
+
|
|
284
|
+
if (currentAbortController) {
|
|
285
|
+
currentAbortController.abort();
|
|
286
|
+
}
|
|
287
|
+
|
|
288
|
+
if (cleanup) {
|
|
289
|
+
cleanup();
|
|
290
|
+
cleanup = null;
|
|
291
|
+
}
|
|
292
|
+
isScheduled = false;
|
|
293
|
+
}
|
|
294
|
+
|
|
295
|
+
return {
|
|
296
|
+
get,
|
|
297
|
+
flush,
|
|
298
|
+
abort,
|
|
299
|
+
name,
|
|
300
|
+
};
|
|
301
|
+
}
|
package/src/errors.ts
ADDED
package/src/index.ts
ADDED
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
export { batch } from './batch';
|
|
2
|
+
export { BatchError } from './errors';
|
|
3
|
+
export { indexed } from './indexed';
|
|
4
|
+
export { onAnimationFrame, onIdle } from './schedulers';
|
|
5
|
+
|
|
6
|
+
export type {
|
|
7
|
+
Batcher,
|
|
8
|
+
BatchFn,
|
|
9
|
+
BatchOptions,
|
|
10
|
+
GetOptions,
|
|
11
|
+
Match,
|
|
12
|
+
MatchFn,
|
|
13
|
+
Scheduler,
|
|
14
|
+
TraceEvent,
|
|
15
|
+
TraceHandler,
|
|
16
|
+
} from './types';
|
package/src/indexed.ts
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export const indexed: unique symbol = Symbol('indexed');
|
package/src/match.ts
ADDED
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
import { indexed } from './indexed';
|
|
2
|
+
import type { Match, MatchFn } from './types';
|
|
3
|
+
|
|
4
|
+
export function isIndexed<K, V>(match: Match<K, V>): match is symbol {
|
|
5
|
+
return match === indexed;
|
|
6
|
+
}
|
|
7
|
+
|
|
8
|
+
export function isKeyMatch<K, V>(match: Match<K, V>): match is keyof V {
|
|
9
|
+
return typeof match === 'string';
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
export function normalizeMatch<K, V>(match: Match<K, V>): MatchFn<K, V> | null {
|
|
13
|
+
if (isIndexed(match)) {
|
|
14
|
+
// Indexed matching is handled separately
|
|
15
|
+
return null;
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
if (isKeyMatch<K, V>(match)) {
|
|
19
|
+
const key = match;
|
|
20
|
+
return (results: V[], requestedKey: K) => {
|
|
21
|
+
return results.find(
|
|
22
|
+
(item) =>
|
|
23
|
+
(item as Record<string, unknown>)[key as string] === requestedKey,
|
|
24
|
+
);
|
|
25
|
+
};
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
// Already a function
|
|
29
|
+
return match as MatchFn<K, V>;
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
export function createIndexedMatcher<K, V>(): (
|
|
33
|
+
results: Record<string, V>,
|
|
34
|
+
key: K,
|
|
35
|
+
) => V | undefined {
|
|
36
|
+
return (results, key) => results[String(key)];
|
|
37
|
+
}
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
import type { Scheduler } from './types';
|
|
2
|
+
|
|
3
|
+
export const microtask: Scheduler = (dispatch) => {
|
|
4
|
+
let cancelled = false;
|
|
5
|
+
|
|
6
|
+
queueMicrotask(() => {
|
|
7
|
+
if (!cancelled) {
|
|
8
|
+
dispatch();
|
|
9
|
+
}
|
|
10
|
+
});
|
|
11
|
+
|
|
12
|
+
return () => {
|
|
13
|
+
cancelled = true;
|
|
14
|
+
};
|
|
15
|
+
};
|
|
16
|
+
|
|
17
|
+
export function wait(ms: number): Scheduler {
|
|
18
|
+
return (dispatch) => {
|
|
19
|
+
const id = setTimeout(dispatch, ms);
|
|
20
|
+
return () => clearTimeout(id);
|
|
21
|
+
};
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
export const onAnimationFrame: Scheduler = (dispatch) => {
|
|
25
|
+
const id = requestAnimationFrame(dispatch);
|
|
26
|
+
return () => cancelAnimationFrame(id);
|
|
27
|
+
};
|
|
28
|
+
|
|
29
|
+
export function onIdle(options?: { timeout?: number }): Scheduler {
|
|
30
|
+
return (dispatch) => {
|
|
31
|
+
if (typeof requestIdleCallback !== 'undefined') {
|
|
32
|
+
const id = requestIdleCallback(
|
|
33
|
+
dispatch,
|
|
34
|
+
options?.timeout ? { timeout: options.timeout } : undefined,
|
|
35
|
+
);
|
|
36
|
+
return () => cancelIdleCallback(id);
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
const id = setTimeout(dispatch, options?.timeout ?? 1);
|
|
40
|
+
return () => clearTimeout(id);
|
|
41
|
+
};
|
|
42
|
+
}
|
package/src/trace.ts
ADDED
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
import type { TraceEvent, TraceEventData, TraceHandler } from './types';
|
|
2
|
+
|
|
3
|
+
export function createTracer<K>(
|
|
4
|
+
name: string | undefined,
|
|
5
|
+
handler: TraceHandler<K> | undefined,
|
|
6
|
+
) {
|
|
7
|
+
let batchCounter = 0;
|
|
8
|
+
|
|
9
|
+
function emit(event: TraceEventData<K>) {
|
|
10
|
+
if (!handler) return;
|
|
11
|
+
|
|
12
|
+
const fullEvent = {
|
|
13
|
+
...event,
|
|
14
|
+
timestamp: performance.now(),
|
|
15
|
+
} as TraceEvent<K>;
|
|
16
|
+
|
|
17
|
+
handler(fullEvent);
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
function nextBatchId(): string {
|
|
21
|
+
return `${name ?? 'batch'}-${++batchCounter}`;
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
return { emit, nextBatchId };
|
|
25
|
+
}
|
package/src/types.ts
ADDED
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
export type BatchFn<K, V> = (
|
|
2
|
+
keys: K[],
|
|
3
|
+
signal: AbortSignal,
|
|
4
|
+
) => Promise<V[] | Record<string, V>>;
|
|
5
|
+
|
|
6
|
+
export type Match<K, V> = keyof V | symbol | MatchFn<K, V>;
|
|
7
|
+
|
|
8
|
+
export type MatchFn<K, V> = (results: V[], key: K) => V | undefined;
|
|
9
|
+
|
|
10
|
+
export type IndexedMatchFn<K, V> = (
|
|
11
|
+
results: Record<string, V>,
|
|
12
|
+
key: K,
|
|
13
|
+
) => V | undefined;
|
|
14
|
+
|
|
15
|
+
export type Scheduler = (dispatch: () => void) => () => void;
|
|
16
|
+
|
|
17
|
+
export type TraceHandler<K = unknown> = (event: TraceEvent<K>) => void;
|
|
18
|
+
|
|
19
|
+
export type TraceEventData<K = unknown> =
|
|
20
|
+
| { type: 'get'; key: K }
|
|
21
|
+
| { type: 'dedup'; key: K }
|
|
22
|
+
| { type: 'schedule'; batchId: string; size: number }
|
|
23
|
+
| { type: 'dispatch'; batchId: string; keys: K[] }
|
|
24
|
+
| { type: 'resolve'; batchId: string; duration: number }
|
|
25
|
+
| { type: 'error'; batchId: string; error: Error }
|
|
26
|
+
| { type: 'abort'; batchId: string };
|
|
27
|
+
|
|
28
|
+
export type TraceEvent<K = unknown> = TraceEventData<K> & { timestamp: number };
|
|
29
|
+
|
|
30
|
+
export interface BatchOptions<K = unknown> {
|
|
31
|
+
wait?: number;
|
|
32
|
+
schedule?: Scheduler;
|
|
33
|
+
max?: number;
|
|
34
|
+
key?: (k: K) => unknown;
|
|
35
|
+
name?: string;
|
|
36
|
+
trace?: TraceHandler<K>;
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
export interface GetOptions {
|
|
40
|
+
signal?: AbortSignal;
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
export interface Batcher<K, V> {
|
|
44
|
+
get(key: K, options?: GetOptions): Promise<V>;
|
|
45
|
+
get(keys: K[], options?: GetOptions): Promise<V[]>;
|
|
46
|
+
flush(): Promise<void>;
|
|
47
|
+
abort(): void;
|
|
48
|
+
readonly name?: string;
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
export interface PendingRequest<K, V> {
|
|
52
|
+
key: K;
|
|
53
|
+
resolve: (value: V) => void;
|
|
54
|
+
reject: (error: Error) => void;
|
|
55
|
+
signal?: AbortSignal;
|
|
56
|
+
aborted: boolean;
|
|
57
|
+
}
|