@tldraw/sync-core 4.2.0 → 4.3.0-canary.3811ec6b3dcd

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -66,8 +66,8 @@ export declare class JsonChunkAssembler {
66
66
  *
67
67
  * @param msg - The message to process, either JSON or chunk format
68
68
  * @returns Result object with data/stringified on success, error object on failure, or null for incomplete chunks
69
- * - `\{ data: object, stringified: string \}` - Successfully parsed complete message
70
- * - `\{ error: Error \}` - Parse error or invalid chunk sequence
69
+ * - `\{ data: object, stringified: string \}` - Successfully parsed complete message
70
+ * - `\{ error: Error \}` - Parse error or invalid chunk sequence
71
71
  * - `null` - Chunk received but more chunks expected
72
72
  *
73
73
  * @example
package/dist-cjs/index.js CHANGED
@@ -51,7 +51,7 @@ var import_TLSyncClient = require("./lib/TLSyncClient");
51
51
  var import_TLSyncRoom = require("./lib/TLSyncRoom");
52
52
  (0, import_utils.registerTldrawLibraryVersion)(
53
53
  "@tldraw/sync-core",
54
- "4.2.0",
54
+ "4.3.0-canary.3811ec6b3dcd",
55
55
  "cjs"
56
56
  );
57
57
  //# sourceMappingURL=index.js.map
@@ -55,8 +55,8 @@ class JsonChunkAssembler {
55
55
  *
56
56
  * @param msg - The message to process, either JSON or chunk format
57
57
  * @returns Result object with data/stringified on success, error object on failure, or null for incomplete chunks
58
- * - `\{ data: object, stringified: string \}` - Successfully parsed complete message
59
- * - `\{ error: Error \}` - Parse error or invalid chunk sequence
58
+ * - `\{ data: object, stringified: string \}` - Successfully parsed complete message
59
+ * - `\{ error: Error \}` - Parse error or invalid chunk sequence
60
60
  * - `null` - Chunk received but more chunks expected
61
61
  *
62
62
  * @example
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../src/lib/chunk.ts"],
4
- "sourcesContent": ["// quarter of a megabyte, max possible utf-8 string size\n\n// cloudflare workers only accept messages of max 1mb\nconst MAX_CLIENT_SENT_MESSAGE_SIZE_BYTES = 1024 * 1024\n// utf-8 is max 4 bytes per char\nconst MAX_BYTES_PER_CHAR = 4\n\n// in the (admittedly impossible) worst case, the max size is 1/4 of a megabyte\nconst MAX_SAFE_MESSAGE_SIZE = MAX_CLIENT_SENT_MESSAGE_SIZE_BYTES / MAX_BYTES_PER_CHAR\n\n/**\n * Splits a string into smaller chunks suitable for transmission over WebSockets.\n * This function ensures messages don't exceed size limits imposed by platforms like Cloudflare Workers (1MB max).\n * Each chunk is prefixed with a number indicating how many more chunks follow.\n *\n * @param msg - The string to split into chunks\n * @param maxSafeMessageSize - Maximum safe size for each chunk in characters. Defaults to quarter megabyte to account for UTF-8 encoding\n * @returns Array of chunked strings, each prefixed with \"\\{number\\}_\" where number indicates remaining chunks\n *\n * @example\n * ```ts\n * // Small message - returns as single chunk\n * chunk('hello world') // ['hello world']\n *\n * // Large message - splits into multiple chunks\n * chunk('very long message...', 10)\n * // ['2_very long', '1_ message', '0_...']\n * ```\n *\n * @internal\n */\nexport function chunk(msg: string, maxSafeMessageSize = MAX_SAFE_MESSAGE_SIZE) {\n\tif (msg.length < maxSafeMessageSize) {\n\t\treturn [msg]\n\t} else {\n\t\tconst chunks = []\n\t\tlet chunkNumber = 0\n\t\tlet offset = msg.length\n\t\twhile (offset > 0) {\n\t\t\tconst prefix = `${chunkNumber}_`\n\t\t\tconst chunkSize = Math.max(Math.min(maxSafeMessageSize - prefix.length, offset), 1)\n\t\t\tchunks.unshift(prefix + msg.slice(offset - chunkSize, offset))\n\t\t\toffset -= chunkSize\n\t\t\tchunkNumber++\n\t\t}\n\t\treturn chunks\n\t}\n}\n\nconst chunkRe = /^(\\d+)_(.*)$/\n\n/**\n * Assembles chunked JSON messages back into complete objects.\n * Handles both regular JSON messages and chunked messages created by the chunk() function.\n * Maintains internal state to track partially received chunked messages.\n *\n * @example\n * ```ts\n * const assembler = new JsonChunkAssembler()\n *\n * // Handle regular JSON message\n * const result1 = assembler.handleMessage('{\"hello\": \"world\"}')\n * // Returns: { data: { hello: \"world\" }, stringified: '{\"hello\": \"world\"}' }\n *\n * // Handle chunked message\n * assembler.handleMessage('1_hello') // Returns: null (partial)\n * const result2 = assembler.handleMessage('0_ world')\n * // Returns: { data: \"hello world\", stringified: \"hello world\" }\n * ```\n *\n * @public\n */\nexport class JsonChunkAssembler {\n\t/**\n\t * Current assembly state - either 'idle' or tracking chunks being received\n\t */\n\tstate:\n\t\t| 'idle'\n\t\t| {\n\t\t\t\tchunksReceived: string[]\n\t\t\t\ttotalChunks: number\n\t\t } = 'idle'\n\n\t/**\n\t * Processes a single message, which can be either a complete JSON object or a chunk.\n\t * For complete JSON objects (starting with '\\{'), parses immediately.\n\t * For chunks (prefixed with \"\\{number\\}_\"), accumulates until all chunks received.\n\t *\n\t * @param msg - The message to process, either JSON or chunk format\n\t * @returns Result object with data/stringified on success, error object on failure, or null for incomplete chunks\n\t * - `\\{ data: object, stringified: string \\}` - Successfully parsed complete message\n\t * - `\\{ error: Error \\}` - Parse error or invalid chunk sequence\n\t * \t- `null` - Chunk received but more chunks expected\n\t *\n\t * @example\n\t * ```ts\n\t * const assembler = new JsonChunkAssembler()\n\t *\n\t * // Complete JSON message\n\t * const result = assembler.handleMessage('{\"key\": \"value\"}')\n\t * if (result && 'data' in result) {\n\t * console.log(result.data) // { key: \"value\" }\n\t * }\n\t *\n\t * // Chunked message sequence\n\t * assembler.handleMessage('2_hel') // null - more chunks expected\n\t * assembler.handleMessage('1_lo ') // null - more chunks expected\n\t * assembler.handleMessage('0_wor') // { data: \"hello wor\", stringified: \"hello wor\" }\n\t * ```\n\t */\n\thandleMessage(msg: string): { error: Error } | { stringified: string; data: object } | null {\n\t\tif (msg.startsWith('{')) {\n\t\t\tconst error = this.state === 'idle' ? undefined : new Error('Unexpected non-chunk message')\n\t\t\tthis.state = 'idle'\n\t\t\treturn error ? { error } : { data: JSON.parse(msg), stringified: msg }\n\t\t} else {\n\t\t\tconst match = chunkRe.exec(msg)!\n\t\t\tif (!match) {\n\t\t\t\tthis.state = 'idle'\n\t\t\t\treturn { error: new Error('Invalid chunk: ' + JSON.stringify(msg.slice(0, 20) + '...')) }\n\t\t\t}\n\t\t\tconst numChunksRemaining = Number(match[1])\n\t\t\tconst data = match[2]\n\n\t\t\tif (this.state === 'idle') {\n\t\t\t\tthis.state = {\n\t\t\t\t\tchunksReceived: [data],\n\t\t\t\t\ttotalChunks: numChunksRemaining + 1,\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\tthis.state.chunksReceived.push(data)\n\t\t\t\tif (numChunksRemaining !== this.state.totalChunks - this.state.chunksReceived.length) {\n\t\t\t\t\tthis.state = 'idle'\n\t\t\t\t\treturn { error: new Error(`Chunks received in wrong order`) }\n\t\t\t\t}\n\t\t\t}\n\t\t\tif (this.state.chunksReceived.length === this.state.totalChunks) {\n\t\t\t\ttry {\n\t\t\t\t\tconst stringified = this.state.chunksReceived.join('')\n\t\t\t\t\tconst data = JSON.parse(stringified)\n\t\t\t\t\treturn { data, stringified }\n\t\t\t\t} catch (e) {\n\t\t\t\t\treturn { error: e as Error }\n\t\t\t\t} finally {\n\t\t\t\t\tthis.state = 'idle'\n\t\t\t\t}\n\t\t\t}\n\t\t\treturn null\n\t\t}\n\t}\n}\n"],
4
+ "sourcesContent": ["// quarter of a megabyte, max possible utf-8 string size\n\n// cloudflare workers only accept messages of max 1mb\nconst MAX_CLIENT_SENT_MESSAGE_SIZE_BYTES = 1024 * 1024\n// utf-8 is max 4 bytes per char\nconst MAX_BYTES_PER_CHAR = 4\n\n// in the (admittedly impossible) worst case, the max size is 1/4 of a megabyte\nconst MAX_SAFE_MESSAGE_SIZE = MAX_CLIENT_SENT_MESSAGE_SIZE_BYTES / MAX_BYTES_PER_CHAR\n\n/**\n * Splits a string into smaller chunks suitable for transmission over WebSockets.\n * This function ensures messages don't exceed size limits imposed by platforms like Cloudflare Workers (1MB max).\n * Each chunk is prefixed with a number indicating how many more chunks follow.\n *\n * @param msg - The string to split into chunks\n * @param maxSafeMessageSize - Maximum safe size for each chunk in characters. Defaults to quarter megabyte to account for UTF-8 encoding\n * @returns Array of chunked strings, each prefixed with \"\\{number\\}_\" where number indicates remaining chunks\n *\n * @example\n * ```ts\n * // Small message - returns as single chunk\n * chunk('hello world') // ['hello world']\n *\n * // Large message - splits into multiple chunks\n * chunk('very long message...', 10)\n * // ['2_very long', '1_ message', '0_...']\n * ```\n *\n * @internal\n */\nexport function chunk(msg: string, maxSafeMessageSize = MAX_SAFE_MESSAGE_SIZE) {\n\tif (msg.length < maxSafeMessageSize) {\n\t\treturn [msg]\n\t} else {\n\t\tconst chunks = []\n\t\tlet chunkNumber = 0\n\t\tlet offset = msg.length\n\t\twhile (offset > 0) {\n\t\t\tconst prefix = `${chunkNumber}_`\n\t\t\tconst chunkSize = Math.max(Math.min(maxSafeMessageSize - prefix.length, offset), 1)\n\t\t\tchunks.unshift(prefix + msg.slice(offset - chunkSize, offset))\n\t\t\toffset -= chunkSize\n\t\t\tchunkNumber++\n\t\t}\n\t\treturn chunks\n\t}\n}\n\nconst chunkRe = /^(\\d+)_(.*)$/\n\n/**\n * Assembles chunked JSON messages back into complete objects.\n * Handles both regular JSON messages and chunked messages created by the chunk() function.\n * Maintains internal state to track partially received chunked messages.\n *\n * @example\n * ```ts\n * const assembler = new JsonChunkAssembler()\n *\n * // Handle regular JSON message\n * const result1 = assembler.handleMessage('{\"hello\": \"world\"}')\n * // Returns: { data: { hello: \"world\" }, stringified: '{\"hello\": \"world\"}' }\n *\n * // Handle chunked message\n * assembler.handleMessage('1_hello') // Returns: null (partial)\n * const result2 = assembler.handleMessage('0_ world')\n * // Returns: { data: \"hello world\", stringified: \"hello world\" }\n * ```\n *\n * @public\n */\nexport class JsonChunkAssembler {\n\t/**\n\t * Current assembly state - either 'idle' or tracking chunks being received\n\t */\n\tstate:\n\t\t| 'idle'\n\t\t| {\n\t\t\t\tchunksReceived: string[]\n\t\t\t\ttotalChunks: number\n\t\t } = 'idle'\n\n\t/**\n\t * Processes a single message, which can be either a complete JSON object or a chunk.\n\t * For complete JSON objects (starting with '\\{'), parses immediately.\n\t * For chunks (prefixed with \"\\{number\\}_\"), accumulates until all chunks received.\n\t *\n\t * @param msg - The message to process, either JSON or chunk format\n\t * @returns Result object with data/stringified on success, error object on failure, or null for incomplete chunks\n\t * \t- `\\{ data: object, stringified: string \\}` - Successfully parsed complete message\n\t * \t- `\\{ error: Error \\}` - Parse error or invalid chunk sequence\n\t * \t- `null` - Chunk received but more chunks expected\n\t *\n\t * @example\n\t * ```ts\n\t * const assembler = new JsonChunkAssembler()\n\t *\n\t * // Complete JSON message\n\t * const result = assembler.handleMessage('{\"key\": \"value\"}')\n\t * if (result && 'data' in result) {\n\t * console.log(result.data) // { key: \"value\" }\n\t * }\n\t *\n\t * // Chunked message sequence\n\t * assembler.handleMessage('2_hel') // null - more chunks expected\n\t * assembler.handleMessage('1_lo ') // null - more chunks expected\n\t * assembler.handleMessage('0_wor') // { data: \"hello wor\", stringified: \"hello wor\" }\n\t * ```\n\t */\n\thandleMessage(msg: string): { error: Error } | { stringified: string; data: object } | null {\n\t\tif (msg.startsWith('{')) {\n\t\t\tconst error = this.state === 'idle' ? undefined : new Error('Unexpected non-chunk message')\n\t\t\tthis.state = 'idle'\n\t\t\treturn error ? { error } : { data: JSON.parse(msg), stringified: msg }\n\t\t} else {\n\t\t\tconst match = chunkRe.exec(msg)!\n\t\t\tif (!match) {\n\t\t\t\tthis.state = 'idle'\n\t\t\t\treturn { error: new Error('Invalid chunk: ' + JSON.stringify(msg.slice(0, 20) + '...')) }\n\t\t\t}\n\t\t\tconst numChunksRemaining = Number(match[1])\n\t\t\tconst data = match[2]\n\n\t\t\tif (this.state === 'idle') {\n\t\t\t\tthis.state = {\n\t\t\t\t\tchunksReceived: [data],\n\t\t\t\t\ttotalChunks: numChunksRemaining + 1,\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\tthis.state.chunksReceived.push(data)\n\t\t\t\tif (numChunksRemaining !== this.state.totalChunks - this.state.chunksReceived.length) {\n\t\t\t\t\tthis.state = 'idle'\n\t\t\t\t\treturn { error: new Error(`Chunks received in wrong order`) }\n\t\t\t\t}\n\t\t\t}\n\t\t\tif (this.state.chunksReceived.length === this.state.totalChunks) {\n\t\t\t\ttry {\n\t\t\t\t\tconst stringified = this.state.chunksReceived.join('')\n\t\t\t\t\tconst data = JSON.parse(stringified)\n\t\t\t\t\treturn { data, stringified }\n\t\t\t\t} catch (e) {\n\t\t\t\t\treturn { error: e as Error }\n\t\t\t\t} finally {\n\t\t\t\t\tthis.state = 'idle'\n\t\t\t\t}\n\t\t\t}\n\t\t\treturn null\n\t\t}\n\t}\n}\n"],
5
5
  "mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAGA,MAAM,qCAAqC,OAAO;AAElD,MAAM,qBAAqB;AAG3B,MAAM,wBAAwB,qCAAqC;AAuB5D,SAAS,MAAM,KAAa,qBAAqB,uBAAuB;AAC9E,MAAI,IAAI,SAAS,oBAAoB;AACpC,WAAO,CAAC,GAAG;AAAA,EACZ,OAAO;AACN,UAAM,SAAS,CAAC;AAChB,QAAI,cAAc;AAClB,QAAI,SAAS,IAAI;AACjB,WAAO,SAAS,GAAG;AAClB,YAAM,SAAS,GAAG,WAAW;AAC7B,YAAM,YAAY,KAAK,IAAI,KAAK,IAAI,qBAAqB,OAAO,QAAQ,MAAM,GAAG,CAAC;AAClF,aAAO,QAAQ,SAAS,IAAI,MAAM,SAAS,WAAW,MAAM,CAAC;AAC7D,gBAAU;AACV;AAAA,IACD;AACA,WAAO;AAAA,EACR;AACD;AAEA,MAAM,UAAU;AAuBT,MAAM,mBAAmB;AAAA;AAAA;AAAA;AAAA,EAI/B,QAKO;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EA6BP,cAAc,KAA8E;AAC3F,QAAI,IAAI,WAAW,GAAG,GAAG;AACxB,YAAM,QAAQ,KAAK,UAAU,SAAS,SAAY,IAAI,MAAM,8BAA8B;AAC1F,WAAK,QAAQ;AACb,aAAO,QAAQ,EAAE,MAAM,IAAI,EAAE,MAAM,KAAK,MAAM,GAAG,GAAG,aAAa,IAAI;AAAA,IACtE,OAAO;AACN,YAAM,QAAQ,QAAQ,KAAK,GAAG;AAC9B,UAAI,CAAC,OAAO;AACX,aAAK,QAAQ;AACb,eAAO,EAAE,OAAO,IAAI,MAAM,oBAAoB,KAAK,UAAU,IAAI,MAAM,GAAG,EAAE,IAAI,KAAK,CAAC,EAAE;AAAA,MACzF;AACA,YAAM,qBAAqB,OAAO,MAAM,CAAC,CAAC;AAC1C,YAAM,OAAO,MAAM,CAAC;AAEpB,UAAI,KAAK,UAAU,QAAQ;AAC1B,aAAK,QAAQ;AAAA,UACZ,gBAAgB,CAAC,IAAI;AAAA,UACrB,aAAa,qBAAqB;AAAA,QACnC;AAAA,MACD,OAAO;AACN,aAAK,MAAM,eAAe,KAAK,IAAI;AACnC,YAAI,uBAAuB,KAAK,MAAM,cAAc,KAAK,MAAM,eAAe,QAAQ;AACrF,eAAK,QAAQ;AACb,iBAAO,EAAE,OAAO,IAAI,MAAM,gCAAgC,EAAE;AAAA,QAC7D;AAAA,MACD;AACA,UAAI,KAAK,MAAM,eAAe,WAAW,KAAK,MAAM,aAAa;AAChE,YAAI;AACH,gBAAM,cAAc,KAAK,MAAM,eAAe,KAAK,EAAE;AACrD,gBAAMA,QAAO,KAAK,MAAM,WAAW;AACnC,iBAAO,EAAE,MAAAA,OAAM,YAAY;AAAA,QAC5B,SAAS,GAAG;AACX,iBAAO,EAAE,OAAO,EAAW;AAAA,QAC5B,UAAE;AACD,eAAK,QAAQ;AAAA,QACd;AAAA,MACD;AACA,aAAO;AAAA,IACR;AAAA,EACD;AACD;",
6
6
  "names": ["data"]
7
7
  }
@@ -66,8 +66,8 @@ export declare class JsonChunkAssembler {
66
66
  *
67
67
  * @param msg - The message to process, either JSON or chunk format
68
68
  * @returns Result object with data/stringified on success, error object on failure, or null for incomplete chunks
69
- * - `\{ data: object, stringified: string \}` - Successfully parsed complete message
70
- * - `\{ error: Error \}` - Parse error or invalid chunk sequence
69
+ * - `\{ data: object, stringified: string \}` - Successfully parsed complete message
70
+ * - `\{ error: Error \}` - Parse error or invalid chunk sequence
71
71
  * - `null` - Chunk received but more chunks expected
72
72
  *
73
73
  * @example
@@ -26,7 +26,7 @@ import {
26
26
  } from "./lib/TLSyncRoom.mjs";
27
27
  registerTldrawLibraryVersion(
28
28
  "@tldraw/sync-core",
29
- "4.2.0",
29
+ "4.3.0-canary.3811ec6b3dcd",
30
30
  "esm"
31
31
  );
32
32
  export {
@@ -31,8 +31,8 @@ class JsonChunkAssembler {
31
31
  *
32
32
  * @param msg - The message to process, either JSON or chunk format
33
33
  * @returns Result object with data/stringified on success, error object on failure, or null for incomplete chunks
34
- * - `\{ data: object, stringified: string \}` - Successfully parsed complete message
35
- * - `\{ error: Error \}` - Parse error or invalid chunk sequence
34
+ * - `\{ data: object, stringified: string \}` - Successfully parsed complete message
35
+ * - `\{ error: Error \}` - Parse error or invalid chunk sequence
36
36
  * - `null` - Chunk received but more chunks expected
37
37
  *
38
38
  * @example
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../src/lib/chunk.ts"],
4
- "sourcesContent": ["// quarter of a megabyte, max possible utf-8 string size\n\n// cloudflare workers only accept messages of max 1mb\nconst MAX_CLIENT_SENT_MESSAGE_SIZE_BYTES = 1024 * 1024\n// utf-8 is max 4 bytes per char\nconst MAX_BYTES_PER_CHAR = 4\n\n// in the (admittedly impossible) worst case, the max size is 1/4 of a megabyte\nconst MAX_SAFE_MESSAGE_SIZE = MAX_CLIENT_SENT_MESSAGE_SIZE_BYTES / MAX_BYTES_PER_CHAR\n\n/**\n * Splits a string into smaller chunks suitable for transmission over WebSockets.\n * This function ensures messages don't exceed size limits imposed by platforms like Cloudflare Workers (1MB max).\n * Each chunk is prefixed with a number indicating how many more chunks follow.\n *\n * @param msg - The string to split into chunks\n * @param maxSafeMessageSize - Maximum safe size for each chunk in characters. Defaults to quarter megabyte to account for UTF-8 encoding\n * @returns Array of chunked strings, each prefixed with \"\\{number\\}_\" where number indicates remaining chunks\n *\n * @example\n * ```ts\n * // Small message - returns as single chunk\n * chunk('hello world') // ['hello world']\n *\n * // Large message - splits into multiple chunks\n * chunk('very long message...', 10)\n * // ['2_very long', '1_ message', '0_...']\n * ```\n *\n * @internal\n */\nexport function chunk(msg: string, maxSafeMessageSize = MAX_SAFE_MESSAGE_SIZE) {\n\tif (msg.length < maxSafeMessageSize) {\n\t\treturn [msg]\n\t} else {\n\t\tconst chunks = []\n\t\tlet chunkNumber = 0\n\t\tlet offset = msg.length\n\t\twhile (offset > 0) {\n\t\t\tconst prefix = `${chunkNumber}_`\n\t\t\tconst chunkSize = Math.max(Math.min(maxSafeMessageSize - prefix.length, offset), 1)\n\t\t\tchunks.unshift(prefix + msg.slice(offset - chunkSize, offset))\n\t\t\toffset -= chunkSize\n\t\t\tchunkNumber++\n\t\t}\n\t\treturn chunks\n\t}\n}\n\nconst chunkRe = /^(\\d+)_(.*)$/\n\n/**\n * Assembles chunked JSON messages back into complete objects.\n * Handles both regular JSON messages and chunked messages created by the chunk() function.\n * Maintains internal state to track partially received chunked messages.\n *\n * @example\n * ```ts\n * const assembler = new JsonChunkAssembler()\n *\n * // Handle regular JSON message\n * const result1 = assembler.handleMessage('{\"hello\": \"world\"}')\n * // Returns: { data: { hello: \"world\" }, stringified: '{\"hello\": \"world\"}' }\n *\n * // Handle chunked message\n * assembler.handleMessage('1_hello') // Returns: null (partial)\n * const result2 = assembler.handleMessage('0_ world')\n * // Returns: { data: \"hello world\", stringified: \"hello world\" }\n * ```\n *\n * @public\n */\nexport class JsonChunkAssembler {\n\t/**\n\t * Current assembly state - either 'idle' or tracking chunks being received\n\t */\n\tstate:\n\t\t| 'idle'\n\t\t| {\n\t\t\t\tchunksReceived: string[]\n\t\t\t\ttotalChunks: number\n\t\t } = 'idle'\n\n\t/**\n\t * Processes a single message, which can be either a complete JSON object or a chunk.\n\t * For complete JSON objects (starting with '\\{'), parses immediately.\n\t * For chunks (prefixed with \"\\{number\\}_\"), accumulates until all chunks received.\n\t *\n\t * @param msg - The message to process, either JSON or chunk format\n\t * @returns Result object with data/stringified on success, error object on failure, or null for incomplete chunks\n\t * - `\\{ data: object, stringified: string \\}` - Successfully parsed complete message\n\t * - `\\{ error: Error \\}` - Parse error or invalid chunk sequence\n\t * \t- `null` - Chunk received but more chunks expected\n\t *\n\t * @example\n\t * ```ts\n\t * const assembler = new JsonChunkAssembler()\n\t *\n\t * // Complete JSON message\n\t * const result = assembler.handleMessage('{\"key\": \"value\"}')\n\t * if (result && 'data' in result) {\n\t * console.log(result.data) // { key: \"value\" }\n\t * }\n\t *\n\t * // Chunked message sequence\n\t * assembler.handleMessage('2_hel') // null - more chunks expected\n\t * assembler.handleMessage('1_lo ') // null - more chunks expected\n\t * assembler.handleMessage('0_wor') // { data: \"hello wor\", stringified: \"hello wor\" }\n\t * ```\n\t */\n\thandleMessage(msg: string): { error: Error } | { stringified: string; data: object } | null {\n\t\tif (msg.startsWith('{')) {\n\t\t\tconst error = this.state === 'idle' ? undefined : new Error('Unexpected non-chunk message')\n\t\t\tthis.state = 'idle'\n\t\t\treturn error ? { error } : { data: JSON.parse(msg), stringified: msg }\n\t\t} else {\n\t\t\tconst match = chunkRe.exec(msg)!\n\t\t\tif (!match) {\n\t\t\t\tthis.state = 'idle'\n\t\t\t\treturn { error: new Error('Invalid chunk: ' + JSON.stringify(msg.slice(0, 20) + '...')) }\n\t\t\t}\n\t\t\tconst numChunksRemaining = Number(match[1])\n\t\t\tconst data = match[2]\n\n\t\t\tif (this.state === 'idle') {\n\t\t\t\tthis.state = {\n\t\t\t\t\tchunksReceived: [data],\n\t\t\t\t\ttotalChunks: numChunksRemaining + 1,\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\tthis.state.chunksReceived.push(data)\n\t\t\t\tif (numChunksRemaining !== this.state.totalChunks - this.state.chunksReceived.length) {\n\t\t\t\t\tthis.state = 'idle'\n\t\t\t\t\treturn { error: new Error(`Chunks received in wrong order`) }\n\t\t\t\t}\n\t\t\t}\n\t\t\tif (this.state.chunksReceived.length === this.state.totalChunks) {\n\t\t\t\ttry {\n\t\t\t\t\tconst stringified = this.state.chunksReceived.join('')\n\t\t\t\t\tconst data = JSON.parse(stringified)\n\t\t\t\t\treturn { data, stringified }\n\t\t\t\t} catch (e) {\n\t\t\t\t\treturn { error: e as Error }\n\t\t\t\t} finally {\n\t\t\t\t\tthis.state = 'idle'\n\t\t\t\t}\n\t\t\t}\n\t\t\treturn null\n\t\t}\n\t}\n}\n"],
4
+ "sourcesContent": ["// quarter of a megabyte, max possible utf-8 string size\n\n// cloudflare workers only accept messages of max 1mb\nconst MAX_CLIENT_SENT_MESSAGE_SIZE_BYTES = 1024 * 1024\n// utf-8 is max 4 bytes per char\nconst MAX_BYTES_PER_CHAR = 4\n\n// in the (admittedly impossible) worst case, the max size is 1/4 of a megabyte\nconst MAX_SAFE_MESSAGE_SIZE = MAX_CLIENT_SENT_MESSAGE_SIZE_BYTES / MAX_BYTES_PER_CHAR\n\n/**\n * Splits a string into smaller chunks suitable for transmission over WebSockets.\n * This function ensures messages don't exceed size limits imposed by platforms like Cloudflare Workers (1MB max).\n * Each chunk is prefixed with a number indicating how many more chunks follow.\n *\n * @param msg - The string to split into chunks\n * @param maxSafeMessageSize - Maximum safe size for each chunk in characters. Defaults to quarter megabyte to account for UTF-8 encoding\n * @returns Array of chunked strings, each prefixed with \"\\{number\\}_\" where number indicates remaining chunks\n *\n * @example\n * ```ts\n * // Small message - returns as single chunk\n * chunk('hello world') // ['hello world']\n *\n * // Large message - splits into multiple chunks\n * chunk('very long message...', 10)\n * // ['2_very long', '1_ message', '0_...']\n * ```\n *\n * @internal\n */\nexport function chunk(msg: string, maxSafeMessageSize = MAX_SAFE_MESSAGE_SIZE) {\n\tif (msg.length < maxSafeMessageSize) {\n\t\treturn [msg]\n\t} else {\n\t\tconst chunks = []\n\t\tlet chunkNumber = 0\n\t\tlet offset = msg.length\n\t\twhile (offset > 0) {\n\t\t\tconst prefix = `${chunkNumber}_`\n\t\t\tconst chunkSize = Math.max(Math.min(maxSafeMessageSize - prefix.length, offset), 1)\n\t\t\tchunks.unshift(prefix + msg.slice(offset - chunkSize, offset))\n\t\t\toffset -= chunkSize\n\t\t\tchunkNumber++\n\t\t}\n\t\treturn chunks\n\t}\n}\n\nconst chunkRe = /^(\\d+)_(.*)$/\n\n/**\n * Assembles chunked JSON messages back into complete objects.\n * Handles both regular JSON messages and chunked messages created by the chunk() function.\n * Maintains internal state to track partially received chunked messages.\n *\n * @example\n * ```ts\n * const assembler = new JsonChunkAssembler()\n *\n * // Handle regular JSON message\n * const result1 = assembler.handleMessage('{\"hello\": \"world\"}')\n * // Returns: { data: { hello: \"world\" }, stringified: '{\"hello\": \"world\"}' }\n *\n * // Handle chunked message\n * assembler.handleMessage('1_hello') // Returns: null (partial)\n * const result2 = assembler.handleMessage('0_ world')\n * // Returns: { data: \"hello world\", stringified: \"hello world\" }\n * ```\n *\n * @public\n */\nexport class JsonChunkAssembler {\n\t/**\n\t * Current assembly state - either 'idle' or tracking chunks being received\n\t */\n\tstate:\n\t\t| 'idle'\n\t\t| {\n\t\t\t\tchunksReceived: string[]\n\t\t\t\ttotalChunks: number\n\t\t } = 'idle'\n\n\t/**\n\t * Processes a single message, which can be either a complete JSON object or a chunk.\n\t * For complete JSON objects (starting with '\\{'), parses immediately.\n\t * For chunks (prefixed with \"\\{number\\}_\"), accumulates until all chunks received.\n\t *\n\t * @param msg - The message to process, either JSON or chunk format\n\t * @returns Result object with data/stringified on success, error object on failure, or null for incomplete chunks\n\t * \t- `\\{ data: object, stringified: string \\}` - Successfully parsed complete message\n\t * \t- `\\{ error: Error \\}` - Parse error or invalid chunk sequence\n\t * \t- `null` - Chunk received but more chunks expected\n\t *\n\t * @example\n\t * ```ts\n\t * const assembler = new JsonChunkAssembler()\n\t *\n\t * // Complete JSON message\n\t * const result = assembler.handleMessage('{\"key\": \"value\"}')\n\t * if (result && 'data' in result) {\n\t * console.log(result.data) // { key: \"value\" }\n\t * }\n\t *\n\t * // Chunked message sequence\n\t * assembler.handleMessage('2_hel') // null - more chunks expected\n\t * assembler.handleMessage('1_lo ') // null - more chunks expected\n\t * assembler.handleMessage('0_wor') // { data: \"hello wor\", stringified: \"hello wor\" }\n\t * ```\n\t */\n\thandleMessage(msg: string): { error: Error } | { stringified: string; data: object } | null {\n\t\tif (msg.startsWith('{')) {\n\t\t\tconst error = this.state === 'idle' ? undefined : new Error('Unexpected non-chunk message')\n\t\t\tthis.state = 'idle'\n\t\t\treturn error ? { error } : { data: JSON.parse(msg), stringified: msg }\n\t\t} else {\n\t\t\tconst match = chunkRe.exec(msg)!\n\t\t\tif (!match) {\n\t\t\t\tthis.state = 'idle'\n\t\t\t\treturn { error: new Error('Invalid chunk: ' + JSON.stringify(msg.slice(0, 20) + '...')) }\n\t\t\t}\n\t\t\tconst numChunksRemaining = Number(match[1])\n\t\t\tconst data = match[2]\n\n\t\t\tif (this.state === 'idle') {\n\t\t\t\tthis.state = {\n\t\t\t\t\tchunksReceived: [data],\n\t\t\t\t\ttotalChunks: numChunksRemaining + 1,\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\tthis.state.chunksReceived.push(data)\n\t\t\t\tif (numChunksRemaining !== this.state.totalChunks - this.state.chunksReceived.length) {\n\t\t\t\t\tthis.state = 'idle'\n\t\t\t\t\treturn { error: new Error(`Chunks received in wrong order`) }\n\t\t\t\t}\n\t\t\t}\n\t\t\tif (this.state.chunksReceived.length === this.state.totalChunks) {\n\t\t\t\ttry {\n\t\t\t\t\tconst stringified = this.state.chunksReceived.join('')\n\t\t\t\t\tconst data = JSON.parse(stringified)\n\t\t\t\t\treturn { data, stringified }\n\t\t\t\t} catch (e) {\n\t\t\t\t\treturn { error: e as Error }\n\t\t\t\t} finally {\n\t\t\t\t\tthis.state = 'idle'\n\t\t\t\t}\n\t\t\t}\n\t\t\treturn null\n\t\t}\n\t}\n}\n"],
5
5
  "mappings": "AAGA,MAAM,qCAAqC,OAAO;AAElD,MAAM,qBAAqB;AAG3B,MAAM,wBAAwB,qCAAqC;AAuB5D,SAAS,MAAM,KAAa,qBAAqB,uBAAuB;AAC9E,MAAI,IAAI,SAAS,oBAAoB;AACpC,WAAO,CAAC,GAAG;AAAA,EACZ,OAAO;AACN,UAAM,SAAS,CAAC;AAChB,QAAI,cAAc;AAClB,QAAI,SAAS,IAAI;AACjB,WAAO,SAAS,GAAG;AAClB,YAAM,SAAS,GAAG,WAAW;AAC7B,YAAM,YAAY,KAAK,IAAI,KAAK,IAAI,qBAAqB,OAAO,QAAQ,MAAM,GAAG,CAAC;AAClF,aAAO,QAAQ,SAAS,IAAI,MAAM,SAAS,WAAW,MAAM,CAAC;AAC7D,gBAAU;AACV;AAAA,IACD;AACA,WAAO;AAAA,EACR;AACD;AAEA,MAAM,UAAU;AAuBT,MAAM,mBAAmB;AAAA;AAAA;AAAA;AAAA,EAI/B,QAKO;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EA6BP,cAAc,KAA8E;AAC3F,QAAI,IAAI,WAAW,GAAG,GAAG;AACxB,YAAM,QAAQ,KAAK,UAAU,SAAS,SAAY,IAAI,MAAM,8BAA8B;AAC1F,WAAK,QAAQ;AACb,aAAO,QAAQ,EAAE,MAAM,IAAI,EAAE,MAAM,KAAK,MAAM,GAAG,GAAG,aAAa,IAAI;AAAA,IACtE,OAAO;AACN,YAAM,QAAQ,QAAQ,KAAK,GAAG;AAC9B,UAAI,CAAC,OAAO;AACX,aAAK,QAAQ;AACb,eAAO,EAAE,OAAO,IAAI,MAAM,oBAAoB,KAAK,UAAU,IAAI,MAAM,GAAG,EAAE,IAAI,KAAK,CAAC,EAAE;AAAA,MACzF;AACA,YAAM,qBAAqB,OAAO,MAAM,CAAC,CAAC;AAC1C,YAAM,OAAO,MAAM,CAAC;AAEpB,UAAI,KAAK,UAAU,QAAQ;AAC1B,aAAK,QAAQ;AAAA,UACZ,gBAAgB,CAAC,IAAI;AAAA,UACrB,aAAa,qBAAqB;AAAA,QACnC;AAAA,MACD,OAAO;AACN,aAAK,MAAM,eAAe,KAAK,IAAI;AACnC,YAAI,uBAAuB,KAAK,MAAM,cAAc,KAAK,MAAM,eAAe,QAAQ;AACrF,eAAK,QAAQ;AACb,iBAAO,EAAE,OAAO,IAAI,MAAM,gCAAgC,EAAE;AAAA,QAC7D;AAAA,MACD;AACA,UAAI,KAAK,MAAM,eAAe,WAAW,KAAK,MAAM,aAAa;AAChE,YAAI;AACH,gBAAM,cAAc,KAAK,MAAM,eAAe,KAAK,EAAE;AACrD,gBAAMA,QAAO,KAAK,MAAM,WAAW;AACnC,iBAAO,EAAE,MAAAA,OAAM,YAAY;AAAA,QAC5B,SAAS,GAAG;AACX,iBAAO,EAAE,OAAO,EAAW;AAAA,QAC5B,UAAE;AACD,eAAK,QAAQ;AAAA,QACd;AAAA,MACD;AACA,aAAO;AAAA,IACR;AAAA,EACD;AACD;",
6
6
  "names": ["data"]
7
7
  }
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "@tldraw/sync-core",
3
3
  "description": "tldraw infinite canvas SDK (multiplayer sync).",
4
- "version": "4.2.0",
4
+ "version": "4.3.0-canary.3811ec6b3dcd",
5
5
  "author": {
6
6
  "name": "tldraw GB Ltd.",
7
7
  "email": "hello@tldraw.com"
@@ -48,17 +48,17 @@
48
48
  "@types/uuid-readable": "^0.0.3",
49
49
  "react": "^18.3.1",
50
50
  "react-dom": "^18.3.1",
51
- "tldraw": "4.2.0",
51
+ "tldraw": "4.3.0-canary.3811ec6b3dcd",
52
52
  "typescript": "^5.8.3",
53
53
  "uuid-by-string": "^4.0.0",
54
54
  "uuid-readable": "^0.0.2",
55
55
  "vitest": "^3.2.4"
56
56
  },
57
57
  "dependencies": {
58
- "@tldraw/state": "4.2.0",
59
- "@tldraw/store": "4.2.0",
60
- "@tldraw/tlschema": "4.2.0",
61
- "@tldraw/utils": "4.2.0",
58
+ "@tldraw/state": "4.3.0-canary.3811ec6b3dcd",
59
+ "@tldraw/store": "4.3.0-canary.3811ec6b3dcd",
60
+ "@tldraw/tlschema": "4.3.0-canary.3811ec6b3dcd",
61
+ "@tldraw/utils": "4.3.0-canary.3811ec6b3dcd",
62
62
  "nanoevents": "^7.0.1",
63
63
  "ws": "^8.18.0"
64
64
  },
package/src/lib/chunk.ts CHANGED
@@ -88,8 +88,8 @@ export class JsonChunkAssembler {
88
88
  *
89
89
  * @param msg - The message to process, either JSON or chunk format
90
90
  * @returns Result object with data/stringified on success, error object on failure, or null for incomplete chunks
91
- * - `\{ data: object, stringified: string \}` - Successfully parsed complete message
92
- * - `\{ error: Error \}` - Parse error or invalid chunk sequence
91
+ * - `\{ data: object, stringified: string \}` - Successfully parsed complete message
92
+ * - `\{ error: Error \}` - Parse error or invalid chunk sequence
93
93
  * - `null` - Chunk received but more chunks expected
94
94
  *
95
95
  * @example
@@ -1,7 +1,6 @@
1
1
  import {
2
2
  Editor,
3
3
  PageRecordType,
4
- TLArrowBinding,
5
4
  TLPage,
6
5
  TLPageId,
7
6
  TLShape,
@@ -41,8 +40,8 @@ export type Op =
41
40
  }
42
41
  | {
43
42
  type: 'create-arrow'
44
- start: TLArrowBinding | VecModel
45
- end: TLArrowBinding | VecModel
43
+ start: VecModel
44
+ end: VecModel
46
45
  }
47
46
  | {
48
47
  type: 'delete-shape'
@@ -315,8 +314,8 @@ export class FuzzEditor extends RandomSource {
315
314
  x: 0,
316
315
  y: 0,
317
316
  props: {
318
- start: op.start,
319
- end: op.end,
317
+ start: op.start as any,
318
+ end: op.end as any,
320
319
  },
321
320
  })
322
321
  break
@@ -169,7 +169,7 @@ describe('TLSyncRoom', () => {
169
169
 
170
170
  const room = new TLSyncRoom({
171
171
  schema,
172
- snapshot: makeSnapshot([...records, oldArrow], {
172
+ snapshot: makeSnapshot([...records, oldArrow as any], {
173
173
  schema: oldSerializedSchema,
174
174
  }),
175
175
  })
@@ -1,7 +1,5 @@
1
1
  import {
2
2
  Editor,
3
- TLArrowBinding,
4
- TLArrowShape,
5
3
  TLRecord,
6
4
  TLStore,
7
5
  computed,
@@ -111,9 +109,9 @@ let totalNumShapes = 0
111
109
  let totalNumPages = 0
112
110
 
113
111
  function arrowsAreSound(editor: Editor) {
114
- const arrows = editor.getCurrentPageShapes().filter((s): s is TLArrowShape => s.type === 'arrow')
112
+ const arrows = editor.getCurrentPageShapes().filter((s) => s.type === 'arrow')
115
113
  for (const arrow of arrows) {
116
- const bindings = editor.getBindingsFromShape<TLArrowBinding>(arrow, 'arrow')
114
+ const bindings = editor.getBindingsFromShape(arrow, 'arrow')
117
115
  const terminalsSeen = new Set()
118
116
  for (const binding of bindings) {
119
117
  if (terminalsSeen.has(binding.props.terminal)) {