@livestore/sync-cf 0.4.0-dev.8 → 0.4.0-dev.9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/.tsbuildinfo +1 -1
- package/dist/cf-worker/do/durable-object.d.ts.map +1 -1
- package/dist/cf-worker/do/durable-object.js +5 -9
- package/dist/cf-worker/do/durable-object.js.map +1 -1
- package/dist/cf-worker/do/layer.d.ts +1 -1
- package/dist/cf-worker/do/pull.d.ts +1 -1
- package/dist/cf-worker/do/pull.d.ts.map +1 -1
- package/dist/cf-worker/do/pull.js +9 -3
- package/dist/cf-worker/do/pull.js.map +1 -1
- package/dist/cf-worker/do/push.d.ts.map +1 -1
- package/dist/cf-worker/do/push.js +65 -34
- package/dist/cf-worker/do/push.js.map +1 -1
- package/dist/cf-worker/do/transport/do-rpc-server.d.ts +2 -1
- package/dist/cf-worker/do/transport/do-rpc-server.d.ts.map +1 -1
- package/dist/cf-worker/do/transport/do-rpc-server.js.map +1 -1
- package/dist/cf-worker/do/transport/http-rpc-server.d.ts +1 -1
- package/dist/cf-worker/do/ws-chunking.d.ts +22 -0
- package/dist/cf-worker/do/ws-chunking.d.ts.map +1 -0
- package/dist/cf-worker/do/ws-chunking.js +49 -0
- package/dist/cf-worker/do/ws-chunking.js.map +1 -0
- package/dist/cf-worker/shared.d.ts +19 -13
- package/dist/cf-worker/shared.d.ts.map +1 -1
- package/dist/cf-worker/shared.js +15 -4
- package/dist/cf-worker/shared.js.map +1 -1
- package/dist/cf-worker/worker.d.ts +30 -45
- package/dist/cf-worker/worker.d.ts.map +1 -1
- package/dist/cf-worker/worker.js +30 -25
- package/dist/cf-worker/worker.js.map +1 -1
- package/dist/common/sync-message-types.d.ts +5 -5
- package/package.json +5 -5
- package/src/cf-worker/do/durable-object.ts +6 -10
- package/src/cf-worker/do/pull.ts +15 -3
- package/src/cf-worker/do/push.ts +84 -38
- package/src/cf-worker/do/transport/do-rpc-server.ts +4 -2
- package/src/cf-worker/do/ws-chunking.ts +76 -0
- package/src/cf-worker/shared.ts +19 -6
- package/src/cf-worker/worker.ts +46 -69
|
@@ -1,34 +1,19 @@
|
|
|
1
|
+
import type { HelperTypes } from '@livestore/common-cf';
|
|
1
2
|
import type { Schema } from '@livestore/utils/effect';
|
|
2
3
|
import type { CfTypes, SearchParams } from '../common/mod.ts';
|
|
3
4
|
import { type Env } from './shared.ts';
|
|
4
|
-
export declare namespace HelperTypes {
|
|
5
|
-
type AnyDON = CfTypes.DurableObjectNamespace<undefined>;
|
|
6
|
-
type DOKeys<T> = {
|
|
7
|
-
[K in keyof T]-?: T[K] extends AnyDON ? K : never;
|
|
8
|
-
}[keyof T];
|
|
9
|
-
type NonBuiltins<T> = Omit<T, keyof Env>;
|
|
10
|
-
/**
|
|
11
|
-
* Helper type to extract DurableObject keys from Env to give consumer type safety.
|
|
12
|
-
*
|
|
13
|
-
* @example
|
|
14
|
-
* ```ts
|
|
15
|
-
* type PlatformEnv = {
|
|
16
|
-
* DB: D1Database
|
|
17
|
-
* ADMIN_TOKEN: string
|
|
18
|
-
* SYNC_BACKEND_DO: DurableObjectNamespace<SyncBackendDO>
|
|
19
|
-
* }
|
|
20
|
-
* export default makeWorker<PlatformEnv>({
|
|
21
|
-
* durableObject: { name: "SYNC_BACKEND_DO" },
|
|
22
|
-
* // ^ (property) name?: "SYNC_BACKEND_DO" | undefined
|
|
23
|
-
* });
|
|
24
|
-
*/
|
|
25
|
-
export type ExtractDurableObjectKeys<TEnv = Env> = DOKeys<NonBuiltins<TEnv>> extends never ? string : DOKeys<NonBuiltins<TEnv>>;
|
|
26
|
-
export {};
|
|
27
|
-
}
|
|
28
5
|
export type CFWorker<TEnv extends Env = Env, _T extends CfTypes.Rpc.DurableObjectBranded | undefined = undefined> = {
|
|
29
6
|
fetch: <CFHostMetada = unknown>(request: CfTypes.Request<CFHostMetada>, env: TEnv, ctx: CfTypes.ExecutionContext) => Promise<CfTypes.Response>;
|
|
30
7
|
};
|
|
8
|
+
/**
|
|
9
|
+
* Options accepted by {@link makeWorker}. The Durable Object binding has to be
|
|
10
|
+
* supplied explicitly so we never fall back to deprecated defaults when Cloudflare config changes.
|
|
11
|
+
*/
|
|
31
12
|
export type MakeWorkerOptions<TEnv extends Env = Env> = {
|
|
13
|
+
/**
|
|
14
|
+
* Binding name of the sync Durable Object declared in wrangler config.
|
|
15
|
+
*/
|
|
16
|
+
syncBackendBinding: HelperTypes.ExtractDurableObjectKeys<TEnv>;
|
|
32
17
|
/**
|
|
33
18
|
* Validates the payload during WebSocket connection establishment.
|
|
34
19
|
* Note: This runs only at connection time, not for individual push events.
|
|
@@ -39,16 +24,15 @@ export type MakeWorkerOptions<TEnv extends Env = Env> = {
|
|
|
39
24
|
}) => void | Promise<void>;
|
|
40
25
|
/** @default false */
|
|
41
26
|
enableCORS?: boolean;
|
|
42
|
-
durableObject?: {
|
|
43
|
-
/**
|
|
44
|
-
* Needs to match the binding name from the wrangler config
|
|
45
|
-
*
|
|
46
|
-
* @default 'SYNC_BACKEND_DO'
|
|
47
|
-
*/
|
|
48
|
-
name?: HelperTypes.ExtractDurableObjectKeys<TEnv>;
|
|
49
|
-
};
|
|
50
27
|
};
|
|
51
|
-
|
|
28
|
+
/**
|
|
29
|
+
* Produces a Cloudflare Worker `fetch` handler that delegates sync traffic to the
|
|
30
|
+
* Durable Object identified by `syncBackendBinding`.
|
|
31
|
+
*
|
|
32
|
+
* For more complex setups prefer implementing a custom `fetch` and call {@link handleSyncRequest}
|
|
33
|
+
* from the branch that handles LiveStore sync requests.
|
|
34
|
+
*/
|
|
35
|
+
export declare const makeWorker: <TEnv extends Env = Env, TDurableObjectRpc extends CfTypes.Rpc.DurableObjectBranded | undefined = undefined>(options: MakeWorkerOptions<TEnv>) => CFWorker<TEnv, TDurableObjectRpc>;
|
|
52
36
|
/**
|
|
53
37
|
* Handles `/sync` endpoint.
|
|
54
38
|
*
|
|
@@ -63,16 +47,18 @@ export declare const makeWorker: <TEnv extends Env = Env, TDurableObjectRpc exte
|
|
|
63
47
|
*
|
|
64
48
|
* export default {
|
|
65
49
|
* fetch: async (request, env, ctx) => {
|
|
66
|
-
* const
|
|
50
|
+
* const searchParams = matchSyncRequest(request)
|
|
67
51
|
*
|
|
68
52
|
* // Is LiveStore sync request
|
|
69
|
-
* if (
|
|
53
|
+
* if (searchParams !== undefined) {
|
|
70
54
|
* return handleSyncRequest({
|
|
71
55
|
* request,
|
|
72
|
-
* searchParams
|
|
56
|
+
* searchParams,
|
|
73
57
|
* env,
|
|
74
58
|
* ctx,
|
|
75
|
-
*
|
|
59
|
+
* syncBackendBinding: 'SYNC_BACKEND_DO',
|
|
60
|
+
* headers: {},
|
|
61
|
+
* validatePayload,
|
|
76
62
|
* })
|
|
77
63
|
* }
|
|
78
64
|
*
|
|
@@ -83,18 +69,17 @@ export declare const makeWorker: <TEnv extends Env = Env, TDurableObjectRpc exte
|
|
|
83
69
|
*
|
|
84
70
|
* @throws {UnexpectedError} If the payload is invalid
|
|
85
71
|
*/
|
|
86
|
-
export declare const handleSyncRequest: <TEnv extends Env = Env, TDurableObjectRpc extends CfTypes.Rpc.DurableObjectBranded | undefined = undefined, CFHostMetada = unknown>({ request, searchParams, env,
|
|
72
|
+
export declare const handleSyncRequest: <TEnv extends Env = Env, TDurableObjectRpc extends CfTypes.Rpc.DurableObjectBranded | undefined = undefined, CFHostMetada = unknown>({ request, searchParams: { storeId, payload, transport }, env, syncBackendBinding, headers, validatePayload, }: {
|
|
87
73
|
request: CfTypes.Request<CFHostMetada>;
|
|
88
74
|
searchParams: SearchParams;
|
|
89
75
|
env: TEnv;
|
|
90
76
|
/** Only there for type-level reasons */
|
|
91
77
|
ctx: CfTypes.ExecutionContext;
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
};
|
|
78
|
+
/** Binding name of the sync backend Durable Object */
|
|
79
|
+
syncBackendBinding: MakeWorkerOptions<TEnv>["syncBackendBinding"];
|
|
80
|
+
headers?: CfTypes.HeadersInit | undefined;
|
|
81
|
+
validatePayload?: (payload: Schema.JsonValue | undefined, context: {
|
|
82
|
+
storeId: string;
|
|
83
|
+
}) => void | Promise<void>;
|
|
99
84
|
}) => Promise<CfTypes.Response>;
|
|
100
85
|
//# sourceMappingURL=worker.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"worker.d.ts","sourceRoot":"","sources":["../../src/cf-worker/worker.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,
|
|
1
|
+
{"version":3,"file":"worker.d.ts","sourceRoot":"","sources":["../../src/cf-worker/worker.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,WAAW,EAAE,MAAM,sBAAsB,CAAA;AACvD,OAAO,KAAK,EAAE,MAAM,EAAE,MAAM,yBAAyB,CAAA;AAErD,OAAO,KAAK,EAAE,OAAO,EAAE,YAAY,EAAE,MAAM,kBAAkB,CAAA;AAE7D,OAAO,EAAE,KAAK,GAAG,EAAoB,MAAM,aAAa,CAAA;AAMxD,MAAM,MAAM,QAAQ,CAAC,IAAI,SAAS,GAAG,GAAG,GAAG,EAAE,EAAE,SAAS,OAAO,CAAC,GAAG,CAAC,oBAAoB,GAAG,SAAS,GAAG,SAAS,IAAI;IAClH,KAAK,EAAE,CAAC,YAAY,GAAG,OAAO,EAC5B,OAAO,EAAE,OAAO,CAAC,OAAO,CAAC,YAAY,CAAC,EACtC,GAAG,EAAE,IAAI,EACT,GAAG,EAAE,OAAO,CAAC,gBAAgB,KAC1B,OAAO,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAA;CAC/B,CAAA;AAED;;;GAGG;AACH,MAAM,MAAM,iBAAiB,CAAC,IAAI,SAAS,GAAG,GAAG,GAAG,IAAI;IACtD;;OAEG;IACH,kBAAkB,EAAE,WAAW,CAAC,wBAAwB,CAAC,IAAI,CAAC,CAAA;IAC9D;;;;OAIG;IACH,eAAe,CAAC,EAAE,CAAC,OAAO,EAAE,MAAM,CAAC,SAAS,GAAG,SAAS,EAAE,OAAO,EAAE;QAAE,OAAO,EAAE,MAAM,CAAA;KAAE,KAAK,IAAI,GAAG,OAAO,CAAC,IAAI,CAAC,CAAA;IAC/G,qBAAqB;IACrB,UAAU,CAAC,EAAE,OAAO,CAAA;CACrB,CAAA;AAED;;;;;;GAMG;AACH,eAAO,MAAM,UAAU,GACrB,IAAI,SAAS,GAAG,GAAG,GAAG,EACtB,iBAAiB,SAAS,OAAO,CAAC,GAAG,CAAC,oBAAoB,GAAG,SAAS,GAAG,SAAS,EAElF,SAAS,iBAAiB,CAAC,IAAI,CAAC,KAC/B,QAAQ,CAAC,IAAI,EAAE,iBAAiB,CAuDlC,CAAA;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GAmCG;AACH,eAAO,MAAM,iBAAiB,GAC5B,IAAI,SAAS,GAAG,GAAG,GAAG,EACtB,iBAAiB,SAAS,OAAO,CAAC,GAAG,CAAC,oBAAoB,GAAG,SAAS,GAAG,SAAS,EAClF,YAAY,GAAG,OAAO,EACtB,gHAOC;IACD,OAAO,EAAE,OAAO,CAAC,OAAO,CAAC,YAAY,CAAC,CAAA;IACtC,YAAY,EAAE,YAAY,CAAA;IAC1B,GAAG,EAAE,IAAI,CAAA;IACT,wCAAwC;IACxC,GAAG,EAAE,OAAO,CAAC,gBAAgB,CAAA;IAC7B,sDAAsD;IACtD,kBAAkB,EAAE,iBAAiB,CAAC,IAAI,CAAC,CAAC,oBAAoB,CAAC,CAAA;IACjE,OAAO,CAAC,EAAE,OAAO,CAAC,WAAW,GAAG,SAAS,CAAA;IACzC,eAAe,CAAC,EAAE,CAAC,OAAO,EAAE,MAAM,CAAC,SAAS,GAAG,SAAS,EAAE,OAAO,EAAE;QAAE,OAAO,EAAE,MAAM,CAAA;KAAE,KAAK,IAAI,GAAG,OAAO,CAAC,IAAI,CAAC,CAAA;CAChH,KAAG,OAAO,CAAC,OAAO,CAAC,QAAQ,CAyC0B,CAAA"}
|
package/dist/cf-worker/worker.js
CHANGED
|
@@ -1,7 +1,14 @@
|
|
|
1
1
|
import { UnexpectedError } from '@livestore/common';
|
|
2
2
|
import { Effect } from '@livestore/utils/effect';
|
|
3
|
-
import {
|
|
4
|
-
|
|
3
|
+
import { matchSyncRequest } from "./shared.js";
|
|
4
|
+
/**
|
|
5
|
+
* Produces a Cloudflare Worker `fetch` handler that delegates sync traffic to the
|
|
6
|
+
* Durable Object identified by `syncBackendBinding`.
|
|
7
|
+
*
|
|
8
|
+
* For more complex setups prefer implementing a custom `fetch` and call {@link handleSyncRequest}
|
|
9
|
+
* from the branch that handles LiveStore sync requests.
|
|
10
|
+
*/
|
|
11
|
+
export const makeWorker = (options) => {
|
|
5
12
|
return {
|
|
6
13
|
fetch: async (request, env, _ctx) => {
|
|
7
14
|
const url = new URL(request.url);
|
|
@@ -18,19 +25,17 @@ export const makeWorker = (options = {}) => {
|
|
|
18
25
|
headers: corsHeaders,
|
|
19
26
|
});
|
|
20
27
|
}
|
|
21
|
-
const
|
|
28
|
+
const searchParams = matchSyncRequest(request);
|
|
22
29
|
// Check if this is a sync request first, before showing info message
|
|
23
|
-
if (
|
|
30
|
+
if (searchParams !== undefined) {
|
|
24
31
|
return handleSyncRequest({
|
|
25
32
|
request,
|
|
26
|
-
searchParams
|
|
33
|
+
searchParams,
|
|
27
34
|
env,
|
|
28
35
|
ctx: _ctx,
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
durableObject: options.durableObject,
|
|
33
|
-
},
|
|
36
|
+
syncBackendBinding: options.syncBackendBinding,
|
|
37
|
+
headers: corsHeaders,
|
|
38
|
+
validatePayload: options.validatePayload,
|
|
34
39
|
});
|
|
35
40
|
}
|
|
36
41
|
// Only show info message for GET requests to / without sync parameters
|
|
@@ -66,16 +71,18 @@ export const makeWorker = (options = {}) => {
|
|
|
66
71
|
*
|
|
67
72
|
* export default {
|
|
68
73
|
* fetch: async (request, env, ctx) => {
|
|
69
|
-
* const
|
|
74
|
+
* const searchParams = matchSyncRequest(request)
|
|
70
75
|
*
|
|
71
76
|
* // Is LiveStore sync request
|
|
72
|
-
* if (
|
|
77
|
+
* if (searchParams !== undefined) {
|
|
73
78
|
* return handleSyncRequest({
|
|
74
79
|
* request,
|
|
75
|
-
* searchParams
|
|
80
|
+
* searchParams,
|
|
76
81
|
* env,
|
|
77
82
|
* ctx,
|
|
78
|
-
*
|
|
83
|
+
* syncBackendBinding: 'SYNC_BACKEND_DO',
|
|
84
|
+
* headers: {},
|
|
85
|
+
* validatePayload,
|
|
79
86
|
* })
|
|
80
87
|
* }
|
|
81
88
|
*
|
|
@@ -86,23 +93,21 @@ export const makeWorker = (options = {}) => {
|
|
|
86
93
|
*
|
|
87
94
|
* @throws {UnexpectedError} If the payload is invalid
|
|
88
95
|
*/
|
|
89
|
-
export const handleSyncRequest = ({ request, searchParams, env,
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
const result = yield* Effect.promise(async () => options.validatePayload(payload, { storeId })).pipe(UnexpectedError.mapToUnexpectedError, Effect.either);
|
|
96
|
+
export const handleSyncRequest = ({ request, searchParams: { storeId, payload, transport }, env, syncBackendBinding, headers, validatePayload, }) => Effect.gen(function* () {
|
|
97
|
+
if (validatePayload !== undefined) {
|
|
98
|
+
const result = yield* Effect.promise(async () => validatePayload(payload, { storeId })).pipe(UnexpectedError.mapToUnexpectedError, Effect.either);
|
|
93
99
|
if (result._tag === 'Left') {
|
|
94
100
|
console.error('Invalid payload', result.left);
|
|
95
|
-
return new Response(result.left.toString(), { status: 400, headers
|
|
101
|
+
return new Response(result.left.toString(), { status: 400, headers });
|
|
96
102
|
}
|
|
97
103
|
}
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
return new Response(`Failed dependency: Required Durable Object binding '${durableObjectName}' not available`, {
|
|
104
|
+
if (!(syncBackendBinding in env)) {
|
|
105
|
+
return new Response(`Failed dependency: Required Durable Object binding '${syncBackendBinding}' not available`, {
|
|
101
106
|
status: 424,
|
|
102
|
-
headers
|
|
107
|
+
headers,
|
|
103
108
|
});
|
|
104
109
|
}
|
|
105
|
-
const durableObjectNamespace = env[
|
|
110
|
+
const durableObjectNamespace = env[syncBackendBinding];
|
|
106
111
|
const id = durableObjectNamespace.idFromName(storeId);
|
|
107
112
|
const durableObject = durableObjectNamespace.get(id);
|
|
108
113
|
// Handle WebSocket upgrade request
|
|
@@ -110,7 +115,7 @@ export const handleSyncRequest = ({ request, searchParams, env, options = {}, })
|
|
|
110
115
|
if (transport === 'ws' && (upgradeHeader === null || upgradeHeader !== 'websocket')) {
|
|
111
116
|
return new Response('Durable Object expected Upgrade: websocket', {
|
|
112
117
|
status: 426,
|
|
113
|
-
headers
|
|
118
|
+
headers,
|
|
114
119
|
});
|
|
115
120
|
}
|
|
116
121
|
return yield* Effect.promise(() => durableObject.fetch(request));
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"worker.js","sourceRoot":"","sources":["../../src/cf-worker/worker.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,eAAe,EAAE,MAAM,mBAAmB,CAAA;
|
|
1
|
+
{"version":3,"file":"worker.js","sourceRoot":"","sources":["../../src/cf-worker/worker.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,eAAe,EAAE,MAAM,mBAAmB,CAAA;AAGnD,OAAO,EAAE,MAAM,EAAE,MAAM,yBAAyB,CAAA;AAGhD,OAAO,EAAY,gBAAgB,EAAE,MAAM,aAAa,CAAA;AAiCxD;;;;;;GAMG;AACH,MAAM,CAAC,MAAM,UAAU,GAAG,CAIxB,OAAgC,EACG,EAAE;IACrC,OAAO;QACL,KAAK,EAAE,KAAK,EAAE,OAAO,EAAE,GAAG,EAAE,IAAI,EAAE,EAAE;YAClC,MAAM,GAAG,GAAG,IAAI,GAAG,CAAC,OAAO,CAAC,GAAG,CAAC,CAAA;YAEhC,MAAM,WAAW,GAAwB,OAAO,CAAC,UAAU;gBACzD,CAAC,CAAC;oBACE,6BAA6B,EAAE,GAAG;oBAClC,8BAA8B,EAAE,oBAAoB;oBACpD,8BAA8B,EAAE,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,gCAAgC,CAAC,IAAI,GAAG;iBAC7F;gBACH,CAAC,CAAC,EAAE,CAAA;YAEN,IAAI,OAAO,CAAC,MAAM,KAAK,SAAS,IAAI,OAAO,CAAC,UAAU,EAAE,CAAC;gBACvD,OAAO,IAAI,QAAQ,CAAC,IAAI,EAAE;oBACxB,MAAM,EAAE,GAAG;oBACX,OAAO,EAAE,WAAW;iBACrB,CAAC,CAAA;YACJ,CAAC;YAED,MAAM,YAAY,GAAG,gBAAgB,CAAC,OAAO,CAAC,CAAA;YAE9C,qEAAqE;YACrE,IAAI,YAAY,KAAK,SAAS,EAAE,CAAC;gBAC/B,OAAO,iBAAiB,CAA0B;oBAChD,OAAO;oBACP,YAAY;oBACZ,GAAG;oBACH,GAAG,EAAE,IAAI;oBACT,kBAAkB,EAAE,OAAO,CAAC,kBAAkB;oBAC9C,OAAO,EAAE,WAAW;oBACpB,eAAe,EAAE,OAAO,CAAC,eAAe;iBACzC,CAAC,CAAA;YACJ,CAAC;YAED,uEAAuE;YACvE,IAAI,OAAO,CAAC,MAAM,KAAK,KAAK,IAAI,GAAG,CAAC,QAAQ,KAAK,GAAG,EAAE,CAAC;gBACrD,OAAO,IAAI,QAAQ,CAAC,qDAAqD,EAAE;oBACzE,MAAM,EAAE,GAAG;oBACX,OAAO,EAAE,EAAE,cAAc,EAAE,YAAY,EAAE;iBAC1C,CAAC,CAAA;YACJ,CAAC;YAED,OAAO,CAAC,KAAK,CAAC,cAAc,EAAE,GAAG,CAAC,QAAQ,CAAC,CAAA;YAE3C,OAAO,IAAI,QAAQ,CAAC,cAAc,EAAE;gBAClC,MAAM,EAAE,GAAG;gBACX,UAAU,EAAE,aAAa;gBACzB,OAAO,EAAE;oBACP,GAAG,WAAW;oBACd,cAAc,EAAE,YAAY;iBAC7B;aACF,CAAC,CAAA;QACJ,CAAC;KACF,CAAA;AACH,CAAC,CAAA;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GAmCG;AACH,MAAM,CAAC,MAAM,iBAAiB,GAAG,CAI/B,EACA,OAAO,EACP,YAAY,EAAE,EAAE,OAAO,EAAE,OAAO,EAAE,SAAS,EAAE,EAC7C,GAAG,EACH,kBAAkB,EAClB,OAAO,EACP,eAAe,GAWhB,EAA6B,EAAE,CAC9B,MAAM,CAAC,GAAG,CAAC,QAAQ,CAAC;IAClB,IAAI,eAAe,KAAK,SAAS,EAAE,CAAC;QAClC,MAAM,MAAM,GAAG,KAAK,CAAC,CAAC,MAAM,CAAC,OAAO,CAAC,KAAK,IAAI,EAAE,CAAC,eAAgB,CAAC,OAAO,EAAE,EAAE,OAAO,EAAE,CAAC,CAAC,CAAC,IAAI,CAC3F,eAAe,CAAC,oBAAoB,EACpC,MAAM,CAAC,MAAM,CACd,CAAA;QAED,IAAI,MAAM,CAAC,IAAI,KAAK,MAAM,EAAE,CAAC;YAC3B,OAAO,CAAC,KAAK,CAAC,iBAAiB,EAAE,MAAM,CAAC,IAAI,CAAC,CAAA;YAC7C,OAAO,IAAI,QAAQ,CAAC,MAAM,CAAC,IAAI,CAAC,QAAQ,EAAE,EAAE,EAAE,MAAM,EAAE,GAAG,EAAE,OAAO,EAAE,CAAC,CAAA;QACvE,CAAC;IACH,CAAC;IAED,IAAI,CAAC,CAAC,kBAAkB,IAAI,GAAG,CAAC,EAAE,CAAC;QACjC,OAAO,IAAI,QAAQ,CACjB,uDAAuD,kBAA4B,iBAAiB,EACpG;YACE,MAAM,EAAE,GAAG;YACX,OAAO;SACR,CACF,CAAA;IACH,CAAC;IAED,MAAM,sBAAsB,GAAG,GAAG,CAChC,kBAAgC,CACoB,CAAA;IAEtD,MAAM,EAAE,GAAG,sBAAsB,CAAC,UAAU,CAAC,OAAO,CAAC,CAAA;IACrD,MAAM,aAAa,GAAG,sBAAsB,CAAC,GAAG,CAAC,EAAE,CAAC,CAAA;IAEpD,mCAAmC;IACnC,MAAM,aAAa,GAAG,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,CAAA;IACpD,IAAI,SAAS,KAAK,IAAI,IAAI,CAAC,aAAa,KAAK,IAAI,IAAI,aAAa,KAAK,WAAW,CAAC,EAAE,CAAC;QACpF,OAAO,IAAI,QAAQ,CAAC,4CAA4C,EAAE;YAChE,MAAM,EAAE,GAAG;YACX,OAAO;SACR,CAAC,CAAA;IACJ,CAAC;IAED,OAAO,KAAK,CAAC,CAAC,MAAM,CAAC,OAAO,CAAC,GAAG,EAAE,CAAC,aAAa,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAA;AAClE,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,iBAAiB,EAAE,MAAM,CAAC,UAAU,CAAC,CAAA"}
|
|
@@ -42,12 +42,7 @@ export declare const PullResponse: Schema.Struct<{
|
|
|
42
42
|
backendId: Schema.SchemaClass<string, string, never>;
|
|
43
43
|
}>;
|
|
44
44
|
export declare const emptyPullResponse: (backendId: string) => {
|
|
45
|
-
readonly backendId: string;
|
|
46
45
|
readonly batch: readonly {
|
|
47
|
-
readonly metadata: import("effect/Option").Option<{
|
|
48
|
-
readonly createdAt: string;
|
|
49
|
-
readonly _tag: "SyncMessage.SyncMetadata";
|
|
50
|
-
}>;
|
|
51
46
|
readonly eventEncoded: {
|
|
52
47
|
readonly name: string;
|
|
53
48
|
readonly args: any;
|
|
@@ -56,6 +51,10 @@ export declare const emptyPullResponse: (backendId: string) => {
|
|
|
56
51
|
readonly clientId: string;
|
|
57
52
|
readonly sessionId: string;
|
|
58
53
|
};
|
|
54
|
+
readonly metadata: import("effect/Option").Option<{
|
|
55
|
+
readonly _tag: "SyncMessage.SyncMetadata";
|
|
56
|
+
readonly createdAt: string;
|
|
57
|
+
}>;
|
|
59
58
|
}[];
|
|
60
59
|
readonly pageInfo: {
|
|
61
60
|
readonly _tag: "MoreUnknown";
|
|
@@ -65,6 +64,7 @@ export declare const emptyPullResponse: (backendId: string) => {
|
|
|
65
64
|
} | {
|
|
66
65
|
readonly _tag: "NoMore";
|
|
67
66
|
};
|
|
67
|
+
readonly backendId: string;
|
|
68
68
|
};
|
|
69
69
|
export type PullResponse = typeof PullResponse.Type;
|
|
70
70
|
export declare const PushRequest: Schema.Struct<{
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@livestore/sync-cf",
|
|
3
|
-
"version": "0.4.0-dev.
|
|
3
|
+
"version": "0.4.0-dev.9",
|
|
4
4
|
"type": "module",
|
|
5
5
|
"sideEffects": false,
|
|
6
6
|
"exports": {
|
|
@@ -9,10 +9,10 @@
|
|
|
9
9
|
"./cf-worker": "./dist/cf-worker/mod.js"
|
|
10
10
|
},
|
|
11
11
|
"dependencies": {
|
|
12
|
-
"@cloudflare/workers-types": "4.
|
|
13
|
-
"@livestore/common": "0.4.0-dev.
|
|
14
|
-
"@livestore/
|
|
15
|
-
"@livestore/
|
|
12
|
+
"@cloudflare/workers-types": "4.20250923.0",
|
|
13
|
+
"@livestore/common": "0.4.0-dev.9",
|
|
14
|
+
"@livestore/common-cf": "0.4.0-dev.9",
|
|
15
|
+
"@livestore/utils": "0.4.0-dev.9"
|
|
16
16
|
},
|
|
17
17
|
"files": [
|
|
18
18
|
"dist",
|
|
@@ -16,8 +16,8 @@ import {
|
|
|
16
16
|
} from '@livestore/utils/effect'
|
|
17
17
|
import {
|
|
18
18
|
type Env,
|
|
19
|
-
getSyncRequestSearchParams,
|
|
20
19
|
type MakeDurableObjectClassOptions,
|
|
20
|
+
matchSyncRequest,
|
|
21
21
|
type SyncBackendRpcInterface,
|
|
22
22
|
WebSocketAttachmentSchema,
|
|
23
23
|
} from '../shared.ts'
|
|
@@ -33,10 +33,10 @@ declare class Response extends CfDeclare.Response {}
|
|
|
33
33
|
declare class WebSocketPair extends CfDeclare.WebSocketPair {}
|
|
34
34
|
declare class WebSocketRequestResponsePair extends CfDeclare.WebSocketRequestResponsePair {}
|
|
35
35
|
|
|
36
|
-
const DurableObjectBase = DurableObject as any as new (
|
|
36
|
+
const DurableObjectBase = DurableObject<Env> as any as new (
|
|
37
37
|
state: CfTypes.DurableObjectState,
|
|
38
38
|
env: Env,
|
|
39
|
-
) => CfTypes.DurableObject
|
|
39
|
+
) => CfTypes.DurableObject & { ctx: CfTypes.DurableObjectState; env: Env }
|
|
40
40
|
|
|
41
41
|
// Type aliases needed to avoid TS bug https://github.com/microsoft/TypeScript/issues/55021
|
|
42
42
|
export type DoState = CfTypes.DurableObjectState
|
|
@@ -99,13 +99,9 @@ export const makeDurableObject: MakeDurableObjectClass = (options) => {
|
|
|
99
99
|
|
|
100
100
|
return class SyncBackendDOBase extends DurableObjectBase implements SyncBackendRpcInterface {
|
|
101
101
|
__DURABLE_OBJECT_BRAND = 'SyncBackendDOBase' as never
|
|
102
|
-
ctx: CfTypes.DurableObjectState
|
|
103
|
-
env: Env
|
|
104
102
|
|
|
105
103
|
constructor(ctx: CfTypes.DurableObjectState, env: Env) {
|
|
106
104
|
super(ctx, env)
|
|
107
|
-
this.ctx = ctx
|
|
108
|
-
this.env = env
|
|
109
105
|
|
|
110
106
|
const WebSocketRpcServerLive = makeRpcServer({ doSelf: this, doOptions: options })
|
|
111
107
|
|
|
@@ -148,12 +144,12 @@ export const makeDurableObject: MakeDurableObjectClass = (options) => {
|
|
|
148
144
|
|
|
149
145
|
fetch = async (request: Request): Promise<Response> =>
|
|
150
146
|
Effect.gen(this, function* () {
|
|
151
|
-
const
|
|
152
|
-
if (
|
|
147
|
+
const searchParams = matchSyncRequest(request)
|
|
148
|
+
if (searchParams === undefined) {
|
|
153
149
|
throw new Error('No search params found in request URL')
|
|
154
150
|
}
|
|
155
151
|
|
|
156
|
-
const { storeId, payload, transport } =
|
|
152
|
+
const { storeId, payload, transport } = searchParams
|
|
157
153
|
|
|
158
154
|
if (enabledTransports.has(transport) === false) {
|
|
159
155
|
throw new Error(`Transport ${transport} is not enabled (based on \`options.enabledTransports\`)`)
|
package/src/cf-worker/do/pull.ts
CHANGED
|
@@ -1,8 +1,11 @@
|
|
|
1
1
|
import { BackendIdMismatchError, InvalidPullError, SyncBackend, UnexpectedError } from '@livestore/common'
|
|
2
|
-
import { Chunk, Effect, Option,
|
|
2
|
+
import { Chunk, Effect, Option, Schema, Stream } from '@livestore/utils/effect'
|
|
3
3
|
import { SyncMessage } from '../../common/mod.ts'
|
|
4
|
-
import {
|
|
4
|
+
import { MAX_PULL_EVENTS_PER_MESSAGE, MAX_WS_MESSAGE_BYTES } from '../shared.ts'
|
|
5
5
|
import { DoCtx } from './layer.ts'
|
|
6
|
+
import { splitChunkBySize } from './ws-chunking.ts'
|
|
7
|
+
|
|
8
|
+
const encodePullResponse = Schema.encodeSync(SyncMessage.PullResponse)
|
|
6
9
|
|
|
7
10
|
// Notes on stream handling:
|
|
8
11
|
// We're intentionally closing the stream once we've read all existing events
|
|
@@ -35,7 +38,16 @@ export const makeEndingPullStream = (
|
|
|
35
38
|
)
|
|
36
39
|
|
|
37
40
|
return storedEvents.pipe(
|
|
38
|
-
Stream.
|
|
41
|
+
Stream.mapChunks(
|
|
42
|
+
splitChunkBySize({
|
|
43
|
+
maxItems: MAX_PULL_EVENTS_PER_MESSAGE,
|
|
44
|
+
maxBytes: MAX_WS_MESSAGE_BYTES,
|
|
45
|
+
encode: (batch) =>
|
|
46
|
+
encodePullResponse(
|
|
47
|
+
SyncMessage.PullResponse.make({ batch, pageInfo: SyncBackend.pageInfoNoMore, backendId }),
|
|
48
|
+
),
|
|
49
|
+
}),
|
|
50
|
+
),
|
|
39
51
|
Stream.mapAccum(total, (remaining, chunk) => {
|
|
40
52
|
const asArray = Chunk.toReadonlyArray(chunk)
|
|
41
53
|
const nextRemaining = Math.max(0, remaining - asArray.length)
|
package/src/cf-worker/do/push.ts
CHANGED
|
@@ -6,10 +6,21 @@ import {
|
|
|
6
6
|
UnexpectedError,
|
|
7
7
|
} from '@livestore/common'
|
|
8
8
|
import { type CfTypes, emitStreamResponse } from '@livestore/common-cf'
|
|
9
|
-
import { Effect, Option, type RpcMessage, Schema } from '@livestore/utils/effect'
|
|
9
|
+
import { Chunk, Effect, Option, type RpcMessage, Schema } from '@livestore/utils/effect'
|
|
10
10
|
import { SyncMessage } from '../../common/mod.ts'
|
|
11
|
-
import {
|
|
11
|
+
import {
|
|
12
|
+
type Env,
|
|
13
|
+
MAX_PULL_EVENTS_PER_MESSAGE,
|
|
14
|
+
MAX_WS_MESSAGE_BYTES,
|
|
15
|
+
type MakeDurableObjectClassOptions,
|
|
16
|
+
type StoreId,
|
|
17
|
+
WebSocketAttachmentSchema,
|
|
18
|
+
} from '../shared.ts'
|
|
12
19
|
import { DoCtx } from './layer.ts'
|
|
20
|
+
import { splitChunkBySize } from './ws-chunking.ts'
|
|
21
|
+
|
|
22
|
+
const encodePullResponse = Schema.encodeSync(SyncMessage.PullResponse)
|
|
23
|
+
type PullBatchItem = SyncMessage.PullResponse['batch'][number]
|
|
13
24
|
|
|
14
25
|
export const makePush =
|
|
15
26
|
({
|
|
@@ -51,6 +62,13 @@ export const makePush =
|
|
|
51
62
|
// Validate the batch
|
|
52
63
|
const firstEventParent = pushRequest.batch[0]!.parentSeqNum
|
|
53
64
|
if (firstEventParent !== currentHead) {
|
|
65
|
+
// yield* Effect.logDebug('ServerAheadError: backend head mismatch', {
|
|
66
|
+
// expectedHead: currentHead,
|
|
67
|
+
// providedHead: firstEventParent,
|
|
68
|
+
// batchSize: pushRequest.batch.length,
|
|
69
|
+
// backendId,
|
|
70
|
+
// })
|
|
71
|
+
|
|
54
72
|
return yield* new ServerAheadError({ minimumExpectedNum: currentHead, providedNum: firstEventParent })
|
|
55
73
|
}
|
|
56
74
|
|
|
@@ -68,40 +86,69 @@ export const makePush =
|
|
|
68
86
|
yield* Effect.gen(function* () {
|
|
69
87
|
const connectedClients = ctx.getWebSockets()
|
|
70
88
|
|
|
71
|
-
//
|
|
72
|
-
const
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
89
|
+
// Preparing chunks of responses to make sure we don't exceed the WS message size limit.
|
|
90
|
+
const responses = Chunk.fromIterable(pushRequest.batch).pipe(
|
|
91
|
+
splitChunkBySize({
|
|
92
|
+
maxItems: MAX_PULL_EVENTS_PER_MESSAGE,
|
|
93
|
+
maxBytes: MAX_WS_MESSAGE_BYTES,
|
|
94
|
+
encode: (items) =>
|
|
95
|
+
encodePullResponse(
|
|
96
|
+
SyncMessage.PullResponse.make({
|
|
97
|
+
batch: items.map(
|
|
98
|
+
(eventEncoded): PullBatchItem => ({
|
|
99
|
+
eventEncoded,
|
|
100
|
+
metadata: Option.some(SyncMessage.SyncMetadata.make({ createdAt })),
|
|
101
|
+
}),
|
|
102
|
+
),
|
|
103
|
+
pageInfo: SyncBackend.pageInfoNoMore,
|
|
104
|
+
backendId,
|
|
105
|
+
}),
|
|
106
|
+
),
|
|
107
|
+
}),
|
|
108
|
+
Chunk.map((eventsChunk) => {
|
|
109
|
+
const batchWithMetadata = Chunk.toReadonlyArray(eventsChunk).map((eventEncoded) => ({
|
|
110
|
+
eventEncoded,
|
|
111
|
+
metadata: Option.some(SyncMessage.SyncMetadata.make({ createdAt })),
|
|
112
|
+
}))
|
|
113
|
+
|
|
114
|
+
const response = SyncMessage.PullResponse.make({
|
|
115
|
+
batch: batchWithMetadata,
|
|
116
|
+
pageInfo: SyncBackend.pageInfoNoMore,
|
|
117
|
+
backendId,
|
|
118
|
+
})
|
|
119
|
+
|
|
120
|
+
return {
|
|
121
|
+
response,
|
|
122
|
+
encoded: Schema.encodeSync(SyncMessage.PullResponse)(response),
|
|
123
|
+
}
|
|
124
|
+
}),
|
|
125
|
+
)
|
|
80
126
|
|
|
81
|
-
|
|
127
|
+
// Dual broadcasting: WebSocket + RPC clients
|
|
82
128
|
|
|
83
129
|
// Broadcast to WebSocket clients
|
|
84
130
|
if (connectedClients.length > 0) {
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
131
|
+
for (const { response, encoded } of responses) {
|
|
132
|
+
// Only calling once for now.
|
|
133
|
+
if (options?.onPullRes) {
|
|
134
|
+
yield* Effect.tryAll(() => options.onPullRes!(response)).pipe(UnexpectedError.mapToUnexpectedError)
|
|
135
|
+
}
|
|
89
136
|
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
137
|
+
// NOTE we're also sending the pullRes chunk to the pushing ws client as confirmation
|
|
138
|
+
for (const conn of connectedClients) {
|
|
139
|
+
const attachment = Schema.decodeSync(WebSocketAttachmentSchema)(conn.deserializeAttachment())
|
|
140
|
+
|
|
141
|
+
// We're doing something a bit "advanced" here as we're directly emitting Effect RPC-compatible
|
|
142
|
+
// response messsages on the Effect RPC-managed websocket connection to the WS client.
|
|
143
|
+
// For this we need to get the RPC `requestId` from the WebSocket attachment.
|
|
144
|
+
for (const requestId of attachment.pullRequestIds) {
|
|
145
|
+
const res: RpcMessage.ResponseChunkEncoded = {
|
|
146
|
+
_tag: 'Chunk',
|
|
147
|
+
requestId,
|
|
148
|
+
values: [encoded],
|
|
149
|
+
}
|
|
150
|
+
conn.send(JSON.stringify(res))
|
|
103
151
|
}
|
|
104
|
-
conn.send(JSON.stringify(res))
|
|
105
152
|
}
|
|
106
153
|
}
|
|
107
154
|
|
|
@@ -110,17 +157,16 @@ export const makePush =
|
|
|
110
157
|
|
|
111
158
|
// RPC broadcasting would require reconstructing client stubs from clientIds
|
|
112
159
|
if (rpcSubscriptions.size > 0) {
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
emitStreamResponse({
|
|
160
|
+
for (const subscription of rpcSubscriptions.values()) {
|
|
161
|
+
for (const { encoded } of responses) {
|
|
162
|
+
yield* emitStreamResponse({
|
|
117
163
|
callerContext: subscription.callerContext,
|
|
118
164
|
env,
|
|
119
165
|
requestId: subscription.requestId,
|
|
120
|
-
values: [
|
|
121
|
-
}).pipe(Effect.tapCauseLogPretty, Effect.exit)
|
|
122
|
-
|
|
123
|
-
|
|
166
|
+
values: [encoded],
|
|
167
|
+
}).pipe(Effect.tapCauseLogPretty, Effect.exit)
|
|
168
|
+
}
|
|
169
|
+
}
|
|
124
170
|
|
|
125
171
|
yield* Effect.logDebug(`Broadcasted to ${rpcSubscriptions.size} RPC clients`)
|
|
126
172
|
}
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import { InvalidPullError, InvalidPushError } from '@livestore/common'
|
|
2
|
-
import { toDurableObjectHandler } from '@livestore/common-cf'
|
|
2
|
+
import { type CfTypes, toDurableObjectHandler } from '@livestore/common-cf'
|
|
3
3
|
import {
|
|
4
4
|
Effect,
|
|
5
5
|
Headers,
|
|
@@ -22,7 +22,9 @@ export interface DoRpcHandlerOptions {
|
|
|
22
22
|
input: Omit<DoCtxInput, 'from'>
|
|
23
23
|
}
|
|
24
24
|
|
|
25
|
-
export const createDoRpcHandler = (
|
|
25
|
+
export const createDoRpcHandler = (
|
|
26
|
+
options: DoRpcHandlerOptions,
|
|
27
|
+
): Effect.Effect<Uint8Array<ArrayBuffer> | CfTypes.ReadableStream> =>
|
|
26
28
|
Effect.gen(this, function* () {
|
|
27
29
|
const { payload, input } = options
|
|
28
30
|
// const { rpcSubscriptions, backendId, doOptions, ctx, env } = yield* DoCtx
|
|
@@ -0,0 +1,76 @@
|
|
|
1
|
+
import { Chunk } from '@livestore/utils/effect'
|
|
2
|
+
|
|
3
|
+
const textEncoder = new TextEncoder()
|
|
4
|
+
|
|
5
|
+
/**
|
|
6
|
+
* Configuration describing how to break a chunk into smaller payload-safe chunks.
|
|
7
|
+
*/
|
|
8
|
+
export interface ChunkingOptions<A> {
|
|
9
|
+
/** Maximum number of items that may appear in any emitted chunk. */
|
|
10
|
+
readonly maxItems: number
|
|
11
|
+
/** Maximum encoded byte size allowed for any emitted chunk. */
|
|
12
|
+
readonly maxBytes: number
|
|
13
|
+
/**
|
|
14
|
+
* Callback that produces a JSON-serialisable structure whose byte size should
|
|
15
|
+
* fit within {@link maxBytes}. This lets callers control framing overhead.
|
|
16
|
+
*/
|
|
17
|
+
readonly encode: (items: ReadonlyArray<A>) => unknown
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
/**
|
|
21
|
+
* Derives a function that splits an input chunk into sub-chunks confined by
|
|
22
|
+
* both item count and encoded byte size limits. Designed for transports with
|
|
23
|
+
* strict frame caps (e.g. Cloudflare hibernated WebSockets).
|
|
24
|
+
*/
|
|
25
|
+
export const splitChunkBySize =
|
|
26
|
+
<A>(options: ChunkingOptions<A>) =>
|
|
27
|
+
(chunk: Chunk.Chunk<A>): Chunk.Chunk<Chunk.Chunk<A>> => {
|
|
28
|
+
const maxItems = Math.max(1, options.maxItems)
|
|
29
|
+
const maxBytes = Math.max(1, options.maxBytes)
|
|
30
|
+
const encode = options.encode
|
|
31
|
+
|
|
32
|
+
const measure = (items: ReadonlyArray<A>) => {
|
|
33
|
+
const encoded = encode(items)
|
|
34
|
+
return textEncoder.encode(JSON.stringify(encoded)).byteLength
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
const items = Chunk.toReadonlyArray(chunk)
|
|
38
|
+
if (items.length === 0) {
|
|
39
|
+
return Chunk.fromIterable<Chunk.Chunk<A>>([])
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
const result: Array<Chunk.Chunk<A>> = []
|
|
43
|
+
let current: Array<A> = []
|
|
44
|
+
|
|
45
|
+
const flushCurrent = () => {
|
|
46
|
+
if (current.length > 0) {
|
|
47
|
+
result.push(Chunk.fromIterable(current))
|
|
48
|
+
current = []
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
for (const item of items) {
|
|
53
|
+
current.push(item)
|
|
54
|
+
const exceedsLimit = current.length > maxItems || measure(current) > maxBytes
|
|
55
|
+
|
|
56
|
+
if (exceedsLimit) {
|
|
57
|
+
// remove the item we just added and emit the previous chunk if it exists
|
|
58
|
+
const last = current.pop()!
|
|
59
|
+
flushCurrent()
|
|
60
|
+
|
|
61
|
+
if (last !== undefined) {
|
|
62
|
+
current = [last]
|
|
63
|
+
const singleItemTooLarge = measure(current) > maxBytes
|
|
64
|
+
if (singleItemTooLarge || current.length > maxItems) {
|
|
65
|
+
// Emit the oversized item on its own; downstream can decide how to handle it.
|
|
66
|
+
result.push(Chunk.of(last))
|
|
67
|
+
current = []
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
}
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
flushCurrent()
|
|
74
|
+
|
|
75
|
+
return Chunk.fromIterable(result)
|
|
76
|
+
}
|