@liveblocks/server 1.0.1 → 1.0.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.cjs +228 -937
- package/dist/index.cjs.map +1 -1
- package/dist/index.js +134 -843
- package/dist/index.js.map +1 -1
- package/package.json +2 -1
package/dist/index.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../node_modules/@liveblocks/core/src/version.ts","../../../node_modules/@liveblocks/core/src/dupe-detection.ts","../../../node_modules/@liveblocks/core/src/lib/EventSource.ts","../../../node_modules/@liveblocks/core/src/lib/freeze.ts","../../../node_modules/@liveblocks/core/src/lib/utils.ts","../../../node_modules/@liveblocks/core/src/lib/signals.ts","../../../node_modules/@liveblocks/core/src/lib/SortedList.ts","../../../node_modules/@liveblocks/core/src/AiChatDB.ts","../../../node_modules/@liveblocks/core/src/convert-plain-data.ts","../../../node_modules/@liveblocks/core/src/lib/assert.ts","../../../node_modules/@liveblocks/core/src/lib/fancy-console.ts","../../../node_modules/@liveblocks/core/src/lib/guards.ts","../../../node_modules/@liveblocks/core/src/lib/autoRetry.ts","../../../node_modules/@liveblocks/core/src/lib/controlledPromise.ts","../../../node_modules/@liveblocks/core/src/lib/stringify.ts","../../../node_modules/@liveblocks/core/src/lib/batch.ts","../../../node_modules/@liveblocks/core/src/lib/chunk.ts","../../../node_modules/@liveblocks/core/src/lib/nanoid.ts","../../../node_modules/@liveblocks/core/src/lib/createIds.ts","../../../node_modules/@liveblocks/core/src/lib/DefaultMap.ts","../../../node_modules/@liveblocks/core/src/lib/objectToQuery.ts","../../../node_modules/@liveblocks/core/src/lib/url.ts","../../../node_modules/@liveblocks/core/src/api-client.ts","../../../node_modules/@liveblocks/core/src/lib/fsm.ts","../../../node_modules/@liveblocks/core/src/protocol/ServerMsg.ts","../../../node_modules/@liveblocks/core/src/types/IWebSocket.ts","../../../node_modules/@liveblocks/core/src/connection.ts","../../../node_modules/@liveblocks/core/src/internal.ts","../../../node_modules/@liveblocks/core/src/lib/IncrementalJsonParser.ts","../../../node_modules/@liveblocks/core/src/lib/shallow.ts","../../../node_modules/@liveblocks/core/src/lib/TreePool.ts","../../../node_modules/@liveblocks/core/src/ai.ts","../../../node_modules/@liveblocks/core/src/protocol/AuthToken.ts","../../../node_modules/@liveblocks/core/src/auth-manager.ts","../../../node_modules/@liveblocks/core/src/constants.ts","../../../node_modules/@liveblocks/core/src/devtools/bridge.ts","../../../node_modules/@liveblocks/core/src/devtools/index.ts","../../../node_modules/@liveblocks/core/src/lib/warnings.ts","../../../node_modules/@liveblocks/core/src/protocol/NotificationSettings.ts","../../../node_modules/@liveblocks/core/src/brand.ts","../../../node_modules/@liveblocks/core/src/lib/position.ts","../../../node_modules/@liveblocks/core/src/protocol/Op.ts","../../../node_modules/@liveblocks/core/src/crdts/AbstractCrdt.ts","../../../node_modules/@liveblocks/core/src/protocol/StorageNode.ts","../../../node_modules/@liveblocks/core/src/crdts/LiveRegister.ts","../../../node_modules/@liveblocks/core/src/crdts/LiveList.ts","../../../node_modules/@liveblocks/core/src/crdts/LiveMap.ts","../../../node_modules/@liveblocks/core/src/crdts/LiveObject.ts","../../../node_modules/@liveblocks/core/src/crdts/liveblocks-helpers.ts","../../../node_modules/@liveblocks/core/src/lib/Deque.ts","../../../node_modules/@liveblocks/core/src/lib/Json.ts","../../../node_modules/@liveblocks/core/src/protocol/ClientMsg.ts","../../../node_modules/@liveblocks/core/src/refs/ManagedOthers.ts","../../../node_modules/@liveblocks/core/src/types/LiveblocksError.ts","../../../node_modules/@liveblocks/core/src/room.ts","../../../node_modules/@liveblocks/core/src/client.ts","../../../node_modules/@liveblocks/core/src/comments/comment-body.ts","../../../node_modules/@liveblocks/core/src/crdts/utils.ts","../../../node_modules/@liveblocks/core/src/immutable.ts","../../../node_modules/@liveblocks/core/src/lib/abortController.ts","../../../node_modules/@liveblocks/core/src/lib/deprecation.ts","../../../node_modules/@liveblocks/core/src/lib/Poller.ts","../../../node_modules/@liveblocks/core/src/protocol/Subscriptions.ts","../../../node_modules/@liveblocks/core/src/types/Others.ts","../../../node_modules/@liveblocks/core/src/index.ts","../src/decoders/ClientMsg.ts","../src/decoders/jsonYolo.ts","../src/decoders/Op.ts","../src/decoders/y-types.ts","../src/formats/LossyJson.ts","../src/formats/NodeStream.ts","../src/formats/PlainLson.ts","../src/lib/DefaultMap.ts","../src/lib/NestedMap.ts","../src/makeInMemorySnapshot.ts","../src/MetadataDB.ts","../src/protocol/ProtocolVersion.ts","../src/Room.ts","../src/lib/Logger.ts","../src/plugins/InMemoryDriver.ts","../src/lib/text.ts","../src/Storage.ts","../src/YjsStorage.ts","../src/lib/tryCatch.ts","../src/lib/UniqueMap.ts","../src/utils.ts"],"sourcesContent":["declare const __VERSION__: string;\ndeclare const TSUP_FORMAT: string;\n\nexport const PKG_NAME = \"@liveblocks/core\";\nexport const PKG_VERSION = typeof __VERSION__ === \"string\" && __VERSION__;\nexport const PKG_FORMAT = typeof TSUP_FORMAT === \"string\" && TSUP_FORMAT;\n","import { PKG_NAME, PKG_VERSION } from \"./version\";\n\nconst g = (\n typeof globalThis !== \"undefined\"\n ? globalThis\n : typeof window !== \"undefined\"\n ? window\n : typeof global !== \"undefined\"\n ? global\n : {}\n) as { [key: symbol]: string };\n\nconst crossLinkedDocs = \"https://liveblocks.io/docs/errors/cross-linked\";\nconst dupesDocs = \"https://liveblocks.io/docs/errors/dupes\";\nconst SPACE = \" \"; // Important space to make sure links in errors are clickable in all browsers\n\nfunction error(msg: string): void {\n if (process.env.NODE_ENV === \"production\") {\n // eslint-disable-next-line rulesdir/console-must-be-fancy\n console.error(msg);\n } else {\n throw new Error(msg);\n }\n}\n\n/**\n * Throws an error if multiple copies of a Liveblocks package are being loaded\n * at runtime. This likely indicates a packaging issue with the project.\n */\nexport function detectDupes(\n pkgName: string,\n pkgVersion: string | false, // false if not built yet\n pkgFormat: string | false // false if not built yet\n): void {\n const pkgId = Symbol.for(pkgName);\n const pkgBuildInfo = pkgFormat\n ? `${pkgVersion || \"dev\"} (${pkgFormat})`\n : pkgVersion || \"dev\";\n\n if (!g[pkgId]) {\n g[pkgId] = pkgBuildInfo;\n } else if (g[pkgId] === pkgBuildInfo) {\n // Allow it, see https://github.com/liveblocks/liveblocks/pull/1004\n } else {\n const msg = [\n `Multiple copies of Liveblocks are being loaded in your project. This will cause issues! See ${\n dupesDocs + SPACE\n }`,\n \"\",\n \"Conflicts:\",\n `- ${pkgName} ${g[pkgId]} (already loaded)`,\n `- ${pkgName} ${pkgBuildInfo} (trying to load this now)`,\n ].join(\"\\n\");\n error(msg);\n }\n\n if (pkgVersion && PKG_VERSION && pkgVersion !== PKG_VERSION) {\n error(\n [\n `Cross-linked versions of Liveblocks found, which will cause issues! See ${\n crossLinkedDocs + SPACE\n }`,\n \"\",\n \"Conflicts:\",\n `- ${PKG_NAME} is at ${PKG_VERSION}`,\n `- ${pkgName} is at ${pkgVersion}`,\n \"\",\n \"Always upgrade all Liveblocks packages to the same version number.\",\n ].join(\"\\n\")\n );\n }\n}\n","export type Callback<T> = (event: T) => void;\nexport type UnsubscribeCallback = () => void;\n\nexport type Observable<T> = {\n /**\n * Register a callback function to be called whenever the event source emits\n * an event.\n */\n subscribe(callback: Callback<T>): UnsubscribeCallback;\n /**\n * Register a one-time callback function to be called whenever the event\n * source emits an event. After the event fires, the callback is\n * auto-unsubscribed.\n */\n subscribeOnce(callback: Callback<T>): UnsubscribeCallback;\n /**\n * Returns a promise that will resolve when an event is emitted by this\n * event source. Optionally, specify a predicate that has to match. The first\n * event matching that predicate will then resolve the promise.\n */\n waitUntil(predicate?: (event: T) => boolean): Promise<T>;\n};\n\nexport type EventSource<T> = Observable<T> & {\n /**\n * Notify all subscribers about the event. Will return `false` if there\n * weren't any subscribers at the time the .notify() was called, or `true` if\n * there was at least one subscriber.\n */\n notify(event: T): boolean;\n /**\n * Returns the number of active subscribers.\n */\n count(): number;\n /**\n * Observable instance, which can be used to subscribe to this event source\n * in a readonly fashion. Safe to publicly expose.\n */\n observable: Observable<T>;\n /**\n * Disposes of this event source.\n *\n * Will clears all registered event listeners. None of the registered\n * functions will ever get called again.\n *\n * WARNING!\n * Be careful when using this API, because the subscribers may not have any\n * idea they won't be notified anymore.\n */\n // NOTE: This can eventually become [Symbol.dispose] when it's widely\n // available in all browsers\n dispose(): void;\n};\n\nexport type BufferableEventSource<T> = EventSource<T> & {\n /**\n * Pauses event delivery until unpaused. Any .notify() calls made while\n * paused will get buffered into memory and emitted later.\n */\n pause(): void;\n /**\n * Emits all in-memory buffered events, and unpauses. Any .notify() calls\n * made after this will be synchronously delivered again.\n */\n unpause(): void;\n};\n\nexport type EventEmitter<T> = (event: T) => void;\n\n/**\n * makeEventSource allows you to generate a subscribe/notify pair of functions\n * to make subscribing easy and to get notified about events.\n *\n * The events are anonymous, so you can use it to define events, like so:\n *\n * const event1 = makeEventSource();\n * const event2 = makeEventSource();\n *\n * event1.subscribe(foo);\n * event1.subscribe(bar);\n * event2.subscribe(qux);\n *\n * // Unsubscription is pretty standard\n * const unsub = event2.subscribe(foo);\n * unsub();\n *\n * event1.notify(); // Now foo and bar will get called\n * event2.notify(); // Now qux will get called (but foo will not, since it's unsubscribed)\n *\n */\nexport function makeEventSource<T>(): EventSource<T> {\n const _observers = new Set<Callback<T>>();\n\n function subscribe(callback: Callback<T>): UnsubscribeCallback {\n _observers.add(callback);\n return () => _observers.delete(callback);\n }\n\n function subscribeOnce(callback: Callback<T>): UnsubscribeCallback {\n const unsub = subscribe((event: T) => {\n unsub();\n return callback(event);\n });\n return unsub;\n }\n\n async function waitUntil(predicate?: (event: T) => boolean): Promise<T> {\n let unsub: () => void | undefined;\n return new Promise<T>((res) => {\n unsub = subscribe((event) => {\n if (predicate === undefined || predicate(event)) {\n res(event);\n }\n });\n }).finally(() => unsub?.());\n }\n\n function notify(event: T) {\n let called = false;\n for (const callback of _observers) {\n callback(event);\n called = true;\n }\n return called;\n }\n\n function count() {\n return _observers.size;\n }\n\n return {\n // Private/internal control over event emission\n notify,\n subscribe,\n subscribeOnce,\n count,\n\n waitUntil,\n\n dispose(): void {\n _observers.clear();\n },\n\n // Publicly exposable subscription API\n observable: {\n subscribe,\n subscribeOnce,\n waitUntil,\n },\n };\n}\n\nexport function makeBufferableEventSource<T>(): BufferableEventSource<T> {\n const eventSource = makeEventSource<T>();\n let _buffer: T[] | null = null;\n\n function pause(): void {\n _buffer = [];\n }\n\n function unpause(): void {\n if (_buffer === null) {\n // Already unpaused\n return;\n }\n\n for (const event of _buffer) {\n eventSource.notify(event);\n }\n _buffer = null;\n }\n\n function notifyOrBuffer(event: T) {\n if (_buffer !== null) {\n _buffer.push(event);\n return false;\n } else {\n return eventSource.notify(event);\n }\n }\n\n return {\n ...eventSource,\n notify: notifyOrBuffer,\n pause,\n unpause,\n\n dispose(): void {\n eventSource.dispose();\n if (_buffer !== null) {\n _buffer.length = 0;\n }\n },\n };\n}\n","/**\n * Freezes the given argument, but only in development builds. In production\n * builds, this is a no-op for performance reasons.\n */\nexport const freeze: typeof Object.freeze =\n process.env.NODE_ENV === \"production\"\n ? /* istanbul ignore next */ (((x: unknown) => x) as typeof Object.freeze)\n : Object.freeze;\n","import type { Json } from \"./Json\";\n\ndeclare const brand: unique symbol;\nexport type Brand<T, TBrand extends string> = T & { [brand]: TBrand };\n\nexport type ISODateString = Brand<string, \"ISODateString\">;\n\nexport type DistributiveOmit<T, K extends PropertyKey> = T extends any\n ? Omit<T, K>\n : never;\n\n// export type DistributivePick<T, K extends keyof T> = T extends any\n// ? Pick<T, K>\n// : never;\n\nexport type WithRequired<T, K extends keyof T> = T & { [P in K]-?: T[P] };\n\nexport type WithOptional<T, K extends keyof T> = Omit<T, K> & {\n [P in K]?: T[P];\n};\n\n/**\n * Throw an error, but as an expression instead of a statement.\n */\nexport function raise(msg: string): never {\n throw new Error(msg);\n}\n\n/**\n * Drop-in replacement for Object.entries() that retains better types.\n */\nexport function entries<\n O extends { [key: string]: unknown },\n K extends keyof O,\n>(obj: O): [K, O[K]][] {\n return Object.entries(obj) as [K, O[K]][];\n}\n\n/**\n * Drop-in replacement for Object.keys() that retains better types.\n */\nexport function keys<O extends { [key: string]: unknown }, K extends keyof O>(\n obj: O\n): K[] {\n return Object.keys(obj) as K[];\n}\n\n/**\n * Drop-in replacement for Object.values() that retains better types.\n */\nexport function values<O extends Record<string, unknown>>(\n obj: O\n): O[keyof O][] {\n return Object.values(obj) as O[keyof O][];\n}\n\n/**\n * Drop-in replacement for Object.create() that retains better types.\n */\nexport function create<O extends Record<string, unknown>>(\n obj: O | null,\n descriptors?: PropertyDescriptorMap & ThisType<O>\n): O {\n if (typeof descriptors !== \"undefined\") {\n return Object.create(obj, descriptors) as O;\n }\n\n return Object.create(obj) as O;\n}\n\n/**\n * Creates a new object by mapping a function over all values. Keys remain the\n * same. Think Array.prototype.map(), but for values in an object.\n */\nexport function mapValues<V, O extends Record<string, unknown>>(\n obj: O,\n mapFn: (value: O[keyof O], key: keyof O) => V\n): { [K in keyof O]: V } {\n const result = {} as { [K in keyof O]: V };\n for (const pair of Object.entries(obj)) {\n const key: keyof O = pair[0];\n if (key === \"__proto__\") {\n // Avoid setting dangerous __proto__ keys\n continue;\n }\n const value = pair[1] as O[keyof O];\n result[key] = mapFn(value, key);\n }\n return result;\n}\n\n/**\n * Alternative to JSON.parse() that will not throw in production. If the passed\n * string cannot be parsed, this will return `undefined`.\n */\nexport function tryParseJson(rawMessage: string): Json | undefined {\n try {\n // eslint-disable-next-line no-restricted-syntax\n return JSON.parse(rawMessage) as Json;\n } catch (e) {\n return undefined;\n }\n}\n\n/**\n * Deep-clones a JSON-serializable value.\n *\n * NOTE: We should be able to replace `deepClone` by `structuredClone` once\n * we've upgraded to Node 18.\n */\nexport function deepClone<T extends Json>(value: T): T {\n // NOTE: In this case, the combination of JSON.parse() and JSON.stringify\n // won't lead to type unsafety, so this use case is okay.\n // eslint-disable-next-line no-restricted-syntax\n return JSON.parse(JSON.stringify(value)) as T;\n}\n\n/**\n * Decode base64 string.\n */\nexport function b64decode(b64value: string): string {\n try {\n const formattedValue = b64value.replace(/-/g, \"+\").replace(/_/g, \"/\");\n const decodedValue = decodeURIComponent(\n atob(formattedValue)\n .split(\"\")\n .map(function (c) {\n return \"%\" + (\"00\" + c.charCodeAt(0).toString(16)).slice(-2);\n })\n .join(\"\")\n );\n\n return decodedValue;\n } catch (err) {\n return atob(b64value);\n }\n}\n\n/**\n * Mutates the array in-place by removing the first occurrence of `item` from\n * the array.\n */\nexport function remove<T>(array: T[], item: T): void {\n for (let i = 0; i < array.length; i++) {\n if (array[i] === item) {\n array.splice(i, 1);\n break;\n }\n }\n}\n\n/**\n * Removes null and undefined values from the array, and reflects this in the\n * output type.\n */\nexport function compact<T>(items: readonly T[]): NonNullable<T>[] {\n return items.filter(\n (item: T): item is NonNullable<T> => item !== null && item !== undefined\n );\n}\n\nexport type RemoveUndefinedValues<T> = {\n [K in keyof T]-?: Exclude<T[K], undefined>;\n};\n\n/**\n * Returns a new object instance where all explictly-undefined values are\n * removed.\n */\nexport function compactObject<O extends Record<string, unknown>>(\n obj: O\n): RemoveUndefinedValues<O> {\n const newObj = { ...obj };\n Object.keys(obj).forEach((k) => {\n const key = k as keyof O;\n if (newObj[key] === undefined) {\n delete newObj[key];\n }\n });\n return newObj as RemoveUndefinedValues<O>;\n}\n\n/**\n * Returns a promise that resolves after the given number of milliseconds.\n */\nexport function wait(millis: number): Promise<void> {\n return new Promise((res) => setTimeout(res, millis));\n}\n\n/**\n * Returns whatever the given promise returns, but will be rejected with\n * a \"Timed out\" error if the given promise does not return or reject within\n * the given timeout period (in milliseconds).\n */\nexport async function withTimeout<T>(\n promise: Promise<T>,\n millis: number,\n errmsg: string\n): Promise<T> {\n let timerID: ReturnType<typeof setTimeout> | undefined;\n const timer$ = new Promise<never>((_, reject) => {\n timerID = setTimeout(() => {\n reject(new Error(errmsg));\n }, millis);\n });\n return (\n Promise\n // Race the given promise against the timer. Whichever one finishes\n // first wins the race.\n .race([promise, timer$])\n\n // Either way, clear the timeout, no matter who won\n .finally(() => clearTimeout(timerID))\n );\n}\n\n/**\n * Memoize a promise factory, so that each subsequent call will return the same\n * pending or success promise. If the promise rejects, will retain that failed\n * promise for a small time period, after which the next attempt will reset the\n * memoized value.\n */\nexport function memoizeOnSuccess<T>(\n factoryFn: () => Promise<T>\n): () => Promise<T> {\n let cached: Promise<T> | null = null;\n return () => {\n if (cached === null) {\n cached = factoryFn().catch((err) => {\n //\n // Keep returning the failed promise for any calls to the memoized\n // promise for the next 5 seconds. This time period is a bit arbitrary,\n // but exists to make this play nicely with frameworks like React.\n //\n // In React, after a component is suspended and its promise is\n // rejected, React will re-render the component, and expect the next\n // call to this function to return the rejected promise, so its error\n // can be shown. If we immediately reset this value, then such next\n // render would instantly trigger a new promise which would trigger an\n // infinite loop and keeping the component in loading state forever.\n //\n setTimeout(() => {\n cached = null;\n }, 5_000);\n throw err;\n });\n }\n return cached;\n };\n}\n\n/**\n * Polyfill for Array.prototype.findLastIndex()\n */\nexport function findLastIndex<T>(\n arr: T[],\n predicate: (value: T, index: number, obj: T[]) => boolean\n): number {\n for (let i = arr.length - 1; i >= 0; i--) {\n if (predicate(arr[i], i, arr)) {\n return i;\n }\n }\n return -1;\n}\n\nexport function iso(s: string): ISODateString {\n return new Date(s).toISOString() as ISODateString;\n}\n\nexport function partition<T, N extends T>(\n iterable: Iterable<T>,\n predicate: (item: T, index: number) => item is N\n): [N[], Exclude<T, N>[]];\nexport function partition<T>(\n iterable: Iterable<T>,\n predicate: (item: T, index: number) => boolean\n): [T[], T[]];\nexport function partition<T>(\n iterable: Iterable<T>,\n predicate: (item: T, index: number) => boolean\n): [T[], T[]] {\n const good = [];\n const bad = [];\n\n let index = 0;\n for (const item of iterable) {\n if (predicate(item, index++)) {\n good.push(item);\n } else {\n bad.push(item);\n }\n }\n\n return [good, bad];\n}\n","import type {\n Callback,\n EventSource,\n Observable,\n UnsubscribeCallback,\n} from \"../lib/EventSource\";\nimport { makeEventSource } from \"../lib/EventSource\";\nimport { freeze } from \"../lib/freeze\";\nimport type { JsonObject } from \"../lib/Json\";\nimport { compactObject, raise } from \"../lib/utils\";\n\nconst kSinks = Symbol(\"kSinks\");\nconst kTrigger = Symbol(\"kTrigger\");\n\n//\n// Before the batch is run, all sinks (recursively all the way down) are marked\n// dirty. This already is enough if we only ever use .get() calls.\n//\n// However, to ensure active subscription notifications also work, we need to\n// keep track of which Signals to notify. Any time the value of a Signal\n// changes, the Signal itself will notify its own subscribers, but \"sinks\" are\n// not \"normal\" subscribers.\n//\n// By treating sinks slightly differently, we can keep track of sink uniqueness\n// across the entire signal network, ensuring a sink will only be notified once\n// if more than one of its dependent Signals have changed.\n//\n// For example:\n//\n// A\n// / \\\n// B C\n// \\ /\n// D - - - - ( has 1 normal subscriber )\n//\n// Here, B and C are sinks of A, and D is a sink of both B and C.\n//\n// Here's what will happen when A changes:\n//\n// - If A changes, then all sinks (B, C, and D) will be marked dirty.\n//\n// - Because some of A's sinks are being watched (in this case, D has at least\n// one subscriber), A will trigger B and C that its value has changed.\n//\n// - Both B and C re-evaluate and may or may not have changed. Three\n// possibilities:\n// 1. Neither B and C have changed → D will *NOT* be triggered\n// 2. Either B or C has changed → D *will* be triggered\n// 3. Both B and C have changed → D *will* be triggered (but only once!)\n//\n// - If in the previous step D has been triggered, it will re-evaluate. If it\n// has changed itself, it will notify its normal subscriber.\n//\nlet signalsToTrigger: Set<AbstractSignal<any>> | null = null;\n\n//\n// If a derived signal is currently being computed, there is a global \"signals\n// that have been read\" registry that every call to `someSignal.get()` will\n// register itself under.\n//\nlet trackedReads: Set<AbstractSignal<any>> | null = null;\n\n/**\n * Runs a callback function that is allowed to change multiple signals. At the\n * end of the batch, all changed signals will be notified (at most once).\n *\n * Nesting batches is supported.\n */\nexport function batch(callback: Callback<void>): void {\n if (signalsToTrigger !== null) {\n // Already inside another batch, just run this inner callback\n callback();\n return;\n }\n\n signalsToTrigger = new Set();\n try {\n callback();\n } finally {\n for (const signal of signalsToTrigger) {\n signal[kTrigger]();\n }\n signalsToTrigger = null;\n }\n}\n\n/**\n * Ensures that the signal will be notified at the end of the current batch.\n * This should only be called within a batch callback. It's safe to call this\n * while notifications are being rolled out.\n */\nfunction enqueueTrigger(signal: AbstractSignal<any>) {\n if (!signalsToTrigger) raise(\"Expected to be in an active batch\");\n signalsToTrigger.add(signal);\n}\n\n/**\n * Patches a target object by \"merging in\" the provided fields. Patch\n * fields that are explicitly-undefined will delete keys from the target\n * object. Will return a new object.\n *\n * Important guarantee:\n * If the patch effectively did not mutate the target object because the\n * patch fields have the same value as the original, then the original\n * object reference will be returned.\n */\nexport function merge<T>(target: T, patch: Partial<T>): T {\n let updated = false;\n const newValue = { ...target };\n\n Object.keys(patch).forEach((k) => {\n const key = k as keyof T;\n const val = patch[key];\n if (newValue[key] !== val) {\n if (val === undefined) {\n delete newValue[key];\n } else {\n newValue[key] = val as T[keyof T];\n }\n updated = true;\n }\n });\n\n return updated ? newValue : target;\n}\n\nexport type SignalType<S extends ISignal<any>> =\n S extends ISignal<infer T> ? T : never;\n\nexport interface ISignal<T> {\n get(): T;\n subscribe(callback: Callback<void>): UnsubscribeCallback;\n addSink(sink: DerivedSignal<unknown>): void;\n removeSink(sink: DerivedSignal<unknown>): void;\n}\n\n/**\n * Base functionality every Signal implementation needs.\n */\nabstract class AbstractSignal<T> implements ISignal<T>, Observable<void> {\n /** @internal */\n protected readonly equals: (a: T, b: T) => boolean;\n readonly #eventSource: EventSource<void>;\n /** @internal */\n public readonly [kSinks]: Set<DerivedSignal<unknown>>;\n\n constructor(equals?: (a: T, b: T) => boolean) {\n this.equals = equals ?? Object.is;\n this.#eventSource = makeEventSource<void>();\n this[kSinks] = new Set();\n\n // Bind common methods to self\n this.get = this.get.bind(this);\n this.subscribe = this.subscribe.bind(this);\n this.subscribeOnce = this.subscribeOnce.bind(this);\n }\n\n dispose(): void {\n this.#eventSource.dispose();\n\n // @ts-expect-error make disposed object completely unusable\n this.#eventSource = \"(disposed)\";\n // @ts-expect-error make disposed object completely unusable\n this.equals = \"(disposed)\";\n }\n\n // Concrete subclasses implement this method in different ways\n abstract get(): T;\n\n get hasWatchers(): boolean {\n if (this.#eventSource.count() > 0) return true;\n\n for (const sink of this[kSinks]) {\n if (sink.hasWatchers) {\n return true;\n }\n }\n\n return false;\n }\n\n public [kTrigger](): void {\n this.#eventSource.notify();\n\n // While Signals are being triggered in the current unroll, we can enqueue\n // more signals to trigger (which will get added to the current unroll)\n for (const sink of this[kSinks]) {\n enqueueTrigger(sink);\n }\n }\n\n subscribe(callback: Callback<void>): UnsubscribeCallback {\n // If this is the first subscriber, we need to perform an initial .get()\n // now in case this is a DerivedSignal that has not been evaluated yet. The\n // reason we need to do this is that the .get() itself will register this\n // signal as sinks of the dependent signals, so we will actually get\n // notified here when one of the dependent signals changes.\n if (this.#eventSource.count() === 0) {\n this.get();\n }\n return this.#eventSource.subscribe(callback);\n }\n\n subscribeOnce(callback: Callback<void>): UnsubscribeCallback {\n const unsub = this.subscribe(() => {\n unsub();\n return callback();\n });\n return unsub;\n }\n\n waitUntil(): never {\n throw new Error(\"waitUntil not supported on Signals\");\n }\n\n markSinksDirty(): void {\n for (const sink of this[kSinks]) {\n sink.markDirty();\n }\n }\n\n addSink(sink: DerivedSignal<unknown>): void {\n this[kSinks].add(sink);\n }\n\n removeSink(sink: DerivedSignal<unknown>): void {\n this[kSinks].delete(sink);\n }\n\n asReadonly(): ISignal<T> {\n return this;\n }\n}\n\n// NOTE: This class is pretty similar to the Signal.State proposal\nexport class Signal<T> extends AbstractSignal<T> {\n #value: T;\n\n constructor(value: T, equals?: (a: T, b: T) => boolean) {\n super(equals);\n this.#value = freeze(value);\n }\n\n dispose(): void {\n super.dispose();\n // @ts-expect-error make disposed object completely unusable\n this.#value = \"(disposed)\";\n }\n\n get(): T {\n trackedReads?.add(this);\n return this.#value;\n }\n\n set(newValue: T | ((oldValue: T) => T)): void {\n batch(() => {\n if (typeof newValue === \"function\") {\n newValue = (newValue as (oldValue: T) => T)(this.#value);\n }\n if (!this.equals(this.#value, newValue)) {\n this.#value = freeze(newValue);\n this.markSinksDirty();\n enqueueTrigger(this);\n }\n });\n }\n}\n\nexport class PatchableSignal<J extends JsonObject> extends Signal<J> {\n constructor(data: J) {\n super(freeze(compactObject(data)));\n }\n\n set(): void {\n throw new Error(\"Don't call .set() directly, use .patch()\");\n }\n\n /**\n * Patches the current object.\n */\n patch(patch: Partial<J>): void {\n super.set((old) => merge(old, patch));\n }\n}\n\n/**\n * Placeholder for a deferred computation that has yet to happen on-demand in\n * the future.\n */\nconst INITIAL = Symbol();\n\n// NOTE: This class is pretty similar to the Signal.Computed proposal\nexport class DerivedSignal<T> extends AbstractSignal<T> {\n #prevValue: T;\n #dirty: boolean; // When true, the value in #value may not be up-to-date and needs re-checking\n\n #sources: Set<ISignal<unknown>>;\n readonly #deps: readonly ISignal<unknown>[];\n readonly #transform: (...values: unknown[]) => T;\n\n // Overload 1\n static from<Ts extends unknown[], V>(...args: [...signals: { [K in keyof Ts]: ISignal<Ts[K]> }, transform: (...values: Ts) => V]): DerivedSignal<V>; // prettier-ignore\n // Overload 2\n static from<Ts extends unknown[], V>(...args: [...signals: { [K in keyof Ts]: ISignal<Ts[K]> }, transform: (...values: Ts) => V, equals: (a: V, b: V) => boolean]): DerivedSignal<V>; // prettier-ignore\n static from<Ts extends unknown[], V>(\n // prettier-ignore\n ...args: [\n ...signals: { [K in keyof Ts]: ISignal<Ts[K]> },\n transform: (...values: Ts) => V,\n equals?: (a: V, b: V) => boolean,\n ]\n ): DerivedSignal<V> {\n const last = args.pop();\n if (typeof last !== \"function\")\n raise(\"Invalid .from() call, last argument expected to be a function\");\n\n if (typeof args[args.length - 1] === \"function\") {\n // Overload 2\n const equals = last as (a: V, b: V) => boolean;\n const transform = args.pop() as (...values: unknown[]) => V;\n return new DerivedSignal(args as ISignal<unknown>[], transform, equals);\n } else {\n // Overload 1\n const transform = last as (...values: unknown[]) => V;\n return new DerivedSignal(args as ISignal<unknown>[], transform);\n }\n }\n\n private constructor(\n deps: ISignal<unknown>[],\n transform: (...values: unknown[]) => T,\n equals?: (a: T, b: T) => boolean\n ) {\n super(equals);\n this.#dirty = true;\n this.#prevValue = INITIAL as unknown as T;\n this.#deps = deps;\n this.#sources = new Set();\n this.#transform = transform;\n }\n\n dispose(): void {\n for (const src of this.#sources) {\n src.removeSink(this as DerivedSignal<unknown>);\n }\n\n // @ts-expect-error make disposed object completely unusable\n this.#prevValue = \"(disposed)\";\n // @ts-expect-error make disposed object completely unusable\n this.#sources = \"(disposed)\";\n // @ts-expect-error make disposed object completely unusable\n this.#deps = \"(disposed)\";\n // @ts-expect-error make disposed object completely unusable\n this.#transform = \"(disposed)\";\n }\n\n get isDirty(): boolean {\n return this.#dirty;\n }\n\n #recompute(): boolean {\n const oldTrackedReads = trackedReads;\n\n let derived;\n trackedReads = new Set();\n try {\n derived = this.#transform(...this.#deps.map((p) => p.get()));\n } finally {\n const oldSources = this.#sources;\n this.#sources = new Set();\n\n for (const sig of trackedReads) {\n this.#sources.add(sig);\n oldSources.delete(sig);\n }\n\n for (const oldSource of oldSources) {\n oldSource.removeSink(this as DerivedSignal<unknown>);\n }\n for (const newSource of this.#sources) {\n newSource.addSink(this as DerivedSignal<unknown>);\n }\n\n trackedReads = oldTrackedReads;\n }\n\n this.#dirty = false;\n\n // Only emit a change to watchers if the value actually changed\n if (!this.equals(this.#prevValue, derived)) {\n this.#prevValue = derived;\n return true;\n }\n return false;\n }\n\n markDirty(): void {\n if (!this.#dirty) {\n this.#dirty = true;\n this.markSinksDirty();\n }\n }\n\n get(): T {\n if (this.#dirty) {\n this.#recompute();\n }\n trackedReads?.add(this);\n return this.#prevValue;\n }\n\n /**\n * Called by the Signal system if one or more of the dependent signals have\n * changed. In the case of a DerivedSignal, we'll only want to re-evaluate\n * the actual value if it's being watched, or any of their sinks are being\n * watched actively.\n */\n public [kTrigger](): void {\n if (!this.hasWatchers) {\n // If there are no watchers for this signal, we don't need to\n // re-evaluate. We can postpone re-evaluation until the next .get() call.\n return;\n }\n\n // Re-evaluate the current derived signal's value and if needed,\n // notify sinks. At this point, all sinks should already have been\n // marked dirty, so we won't have to do that again here now.\n const updated = this.#recompute();\n if (updated) {\n super[kTrigger](); // Actually notify subscribers\n }\n }\n}\n\n/**\n * A MutableSignal is a bit like Signal, except its state is managed by\n * a single value whose reference does not change but is mutated.\n *\n * Similar to how useSyncExternalState() works in React, there is a way to read\n * the current state at any point in time synchronously, and a way to update\n * its reference.\n */\nexport class MutableSignal<T extends object> extends AbstractSignal<T> {\n readonly #state: T;\n\n constructor(initialState: T) {\n super();\n this.#state = initialState;\n }\n\n dispose(): void {\n super.dispose();\n // @ts-expect-error make disposed object completely unusable\n this.#state = \"(disposed)\";\n }\n\n get(): T {\n trackedReads?.add(this);\n return this.#state;\n }\n\n /**\n * Invokes a callback function that is allowed to mutate the given state\n * value. Do not change the value outside of the callback.\n *\n * If the callback explicitly returns `false`, it's assumed that the state\n * was not changed.\n */\n mutate(callback?: (state: T) => void | boolean): void {\n batch(() => {\n const result = callback ? callback(this.#state) : true;\n if (result !== null && typeof result === \"object\" && \"then\" in result) {\n raise(\"MutableSignal.mutate() does not support async callbacks\");\n }\n\n if (result !== false) {\n this.markSinksDirty();\n enqueueTrigger(this);\n }\n });\n }\n}\n","/**\n * Returns the insertion index for the given item.\n */\nfunction bisectRight<T>(arr: readonly T[], x: T, lt: (a: T, b: T) => boolean) {\n let lo = 0;\n let hi = arr.length;\n while (lo < hi) {\n const mid = lo + ((hi - lo) >> 1); // equiv of Math.floor((lo + hi) / 2)\n if (lt(x, arr[mid])) {\n hi = mid;\n } else {\n lo = mid + 1;\n }\n }\n return lo;\n}\n\n/**\n * A datastructure to keep elements in ascending order, as defined by the \"less\n * than\" function you provide. The elements will be ordered according to\n * whatever you define as the \"less than\" for this element type, so that every\n * element is less than its successor in the list.\n *\n * const sorted = SortedList.from(\n * [{ id: 4 }, { id: 1 }, { id: 9 }, { id: 4 }],\n * (a, b) => a.id < b.id)\n * )\n * sorted.add({ id: 5 })\n * sorted.remove({ id: 4 }) // Assuming it's the same obj ref!\n *\n * Array.from(sorted)\n * [{ id: 1 }, { id: 4 }, { id: 5 }, { id: 9 }])\n */\nexport class SortedList<T> {\n #data: T[];\n #lt: (a: T, b: T) => boolean;\n\n private constructor(alreadySortedList: T[], lt: (a: T, b: T) => boolean) {\n this.#lt = lt;\n this.#data = alreadySortedList;\n }\n\n /**\n * Creates an empty SortedList with the given \"less than\" function.\n */\n public static with<T>(lt: (a: T, b: T) => boolean): SortedList<T> {\n return SortedList.fromAlreadySorted([], lt);\n }\n\n public static from<T>(\n arr: readonly T[],\n lt: (a: T, b: T) => boolean\n ): SortedList<T> {\n const sorted = new SortedList([], lt);\n for (const item of arr) {\n sorted.add(item);\n }\n return sorted;\n }\n\n public static fromAlreadySorted<T>(\n alreadySorted: T[],\n lt: (a: T, b: T) => boolean\n ): SortedList<T> {\n return new SortedList(alreadySorted, lt);\n }\n\n /**\n * Clones the sorted list to a new instance.\n */\n public clone(): SortedList<T> {\n return new SortedList(this.#data.slice(), this.#lt);\n }\n\n /**\n * Adds a new item to the sorted list, such that it remains sorted.\n * Returns the index where the item was inserted.\n */\n add(value: T): number {\n const idx = bisectRight(this.#data, value, this.#lt);\n this.#data.splice(idx, 0, value);\n return idx;\n }\n\n /**\n * Removes all values from the sorted list, making it empty again.\n * Returns whether the list was mutated or not.\n */\n clear(): boolean {\n const hadData = this.#data.length > 0;\n this.#data.length = 0;\n return hadData;\n }\n\n /**\n * Removes the first value matching the predicate.\n * Returns whether the list was mutated or not.\n */\n removeBy(\n predicate: (item: T) => boolean,\n limit: number = Number.POSITIVE_INFINITY\n ): boolean {\n let deleted = 0;\n for (let i = 0; i < this.#data.length; i++) {\n if (predicate(this.#data[i])) {\n this.#data.splice(i, 1);\n deleted++;\n if (deleted >= limit) {\n break;\n } else {\n i--;\n }\n }\n }\n return deleted > 0;\n }\n\n /**\n * Removes the given value from the sorted list, if it exists. The given\n * value must be `===` to one of the list items. Only the first entry will be\n * removed if the element exists in the sorted list multiple times.\n *\n * Returns whether the list was mutated or not.\n */\n remove(value: T): boolean {\n const idx = this.#data.indexOf(value);\n if (idx >= 0) {\n this.#data.splice(idx, 1);\n return true;\n }\n return false;\n }\n\n /**\n * Removes the item at the given index.\n * Returns the removed item, or undefined if index is out of bounds.\n */\n removeAt(index: number): T | undefined {\n if (index < 0 || index >= this.#data.length) {\n return undefined;\n }\n const [removed] = this.#data.splice(index, 1);\n return removed;\n }\n\n /**\n * Repositions an item to maintain sorted order after its sort key has\n * been mutated in-place. For example:\n *\n * const item = sorted.at(3);\n * item.updatedAt = new Date(); // mutate the item's sort key in-place\n * sorted.reposition(item); // restore sorted order\n *\n * Returns the new index of the item. Throws if the item is not in the list.\n *\n * Semantically equivalent to remove(value) + add(value), but optimized\n * to avoid array shifting when the item only moves a short distance.\n */\n reposition(value: T): number {\n const oldIdx = this.#data.indexOf(value);\n if (oldIdx < 0) {\n throw new Error(\"Cannot reposition item that is not in the list\");\n }\n\n // Quick check: if already in valid position, no need to move.\n // Valid means: prev < value < next (matching bisectRight insertion point)\n const prev = this.#data[oldIdx - 1];\n const next = this.#data[oldIdx + 1];\n const validLeft = prev === undefined || this.#lt(prev, value);\n const validRight = next === undefined || this.#lt(value, next);\n if (validLeft && validRight) {\n return oldIdx;\n }\n\n let newIdx = oldIdx;\n\n // Try moving left (value < prev means we're out of order on the left)\n while (newIdx > 0 && this.#lt(value, this.#data[newIdx - 1])) {\n this.#data[newIdx] = this.#data[newIdx - 1];\n newIdx--;\n }\n\n if (newIdx < oldIdx) {\n this.#data[newIdx] = value;\n return newIdx;\n }\n\n // Try moving right (next <= value means we need to move past it)\n while (\n newIdx < this.#data.length - 1 &&\n !this.#lt(value, this.#data[newIdx + 1])\n ) {\n this.#data[newIdx] = this.#data[newIdx + 1];\n newIdx++;\n }\n\n if (newIdx !== oldIdx) {\n this.#data[newIdx] = value;\n }\n\n return newIdx;\n }\n\n at(index: number): T | undefined {\n return this.#data[index];\n }\n\n get length(): number {\n return this.#data.length;\n }\n\n *filter(predicate: (value: T) => boolean): IterableIterator<T> {\n for (const item of this.#data) {\n if (predicate(item)) {\n yield item;\n }\n }\n }\n\n // XXXX If we keep this, add unit tests. Or remove it.\n *findAllRight(\n predicate: (value: T, index: number) => unknown\n ): IterableIterator<T> {\n for (let i = this.#data.length - 1; i >= 0; i--) {\n const item = this.#data[i];\n if (predicate(item, i)) {\n yield item;\n }\n }\n }\n\n [Symbol.iterator](): IterableIterator<T> {\n return this.#data[Symbol.iterator]();\n }\n\n *iterReversed(): IterableIterator<T> {\n for (let i = this.#data.length - 1; i >= 0; i--) {\n yield this.#data[i];\n }\n }\n\n /** Finds the leftmost item that matches the predicate. */\n find(\n predicate: (value: T, index: number) => unknown,\n start?: number\n ): T | undefined {\n const idx = this.findIndex(predicate, start);\n return idx > -1 ? this.#data.at(idx)! : undefined; // eslint-disable-line no-restricted-syntax\n }\n\n /** Finds the leftmost index that matches the predicate. */\n findIndex(\n predicate: (value: T, index: number) => unknown,\n start = 0\n ): number {\n for (let i = Math.max(0, start); i < this.#data.length; i++) {\n if (predicate(this.#data[i], i)) {\n return i;\n }\n }\n return -1;\n }\n\n /** Finds the rightmost item that matches the predicate. */\n findRight(\n predicate: (value: T, index: number) => unknown,\n start?: number\n ): T | undefined {\n const idx = this.findIndexRight(predicate, start);\n return idx > -1 ? this.#data.at(idx)! : undefined; // eslint-disable-line no-restricted-syntax\n }\n\n /** Finds the rightmost index that matches the predicate. */\n findIndexRight(\n predicate: (value: T, index: number) => unknown,\n start = this.#data.length - 1\n ): number {\n for (let i = Math.min(start, this.#data.length - 1); i >= 0; i--) {\n if (predicate(this.#data[i], i)) {\n return i;\n }\n }\n return -1;\n }\n\n get rawArray(): readonly T[] {\n return this.#data;\n }\n}\n","import { MutableSignal } from \"./lib/signals\";\nimport { SortedList } from \"./lib/SortedList\";\nimport type { ISODateString } from \"./lib/utils\";\nimport type { AiChat, AiChatsQuery } from \"./types/ai\";\n\nexport class AiChatDB {\n #byId: Map<string, AiChat>; // A map of chat id to chat details\n #chats: SortedList<Omit<AiChat, \"deletedAt\">>; // Sorted list of non-deleted chats, most recent first\n\n public readonly signal: MutableSignal<this>;\n\n constructor() {\n this.#byId = new Map();\n this.#chats = SortedList.from<Omit<AiChat, \"deletedAt\">>([], (c1, c2) => {\n // Sort by 'lastMessageAt' if available, otherwise 'createdAt' (most recent first)\n const d2 = c2.lastMessageAt ?? c2.createdAt;\n const d1 = c1.lastMessageAt ?? c1.createdAt;\n return d2 < d1 ? true : d2 === d1 ? c2.id < c1.id : false;\n });\n\n this.signal = new MutableSignal(this);\n }\n\n public getEvenIfDeleted(chatId: string): AiChat | undefined {\n this.signal.get(); // Access signal to establish dependency tracking for any derived signals\n return this.#byId.get(chatId);\n }\n\n public markDeleted(chatId: string): void {\n const chat = this.#byId.get(chatId);\n if (chat === undefined || chat.deletedAt !== undefined) return;\n this.upsert({\n ...chat,\n deletedAt: new Date().toISOString() as ISODateString,\n });\n }\n\n public upsert(chat: AiChat): void {\n this.signal.mutate(() => {\n // If the chat already exists, remove it before deciding whether to add the incoming one\n const existingThread = this.#byId.get(chat.id);\n if (existingThread !== undefined) {\n if (existingThread.deletedAt !== undefined) return false;\n\n this.#chats.remove(existingThread);\n this.#byId.delete(existingThread.id);\n }\n\n // We only add non-deleted chats to the chat list\n if (chat.deletedAt === undefined) {\n this.#chats.add(chat);\n }\n this.#byId.set(chat.id, chat);\n return true;\n });\n }\n\n public findMany(query: AiChatsQuery): Omit<AiChat, \"deletedAt\">[] {\n return Array.from(\n this.#chats.filter((chat) => {\n // If metadata query is not provided, include all chats\n if (query.metadata === undefined) return true;\n\n for (const [key, value] of Object.entries(query.metadata)) {\n // If the value is null, check that the key does not exist in the chat's metadata\n if (value === null) {\n if (key in chat.metadata) return false;\n }\n // If the metadata key is a string, check for an exact match against the chat's metadata\n else if (typeof value === \"string\") {\n if (chat.metadata[key] !== value) return false;\n }\n // If the metadata key is an array, ensure all values are present in the chat's metadata array\n else {\n const chatValue = chat.metadata[key];\n if (\n !Array.isArray(chatValue) ||\n !value.every((v) => chatValue.includes(v))\n ) {\n return false;\n }\n }\n }\n\n return true;\n })\n );\n }\n}\n","import type {\n BaseMetadata,\n CommentData,\n CommentDataPlain,\n CommentUserReaction,\n CommentUserReactionPlain,\n ThreadData,\n ThreadDataPlain,\n ThreadDeleteInfo,\n ThreadDeleteInfoPlain,\n} from \"./protocol/Comments\";\nimport type { GroupData, GroupDataPlain } from \"./protocol/Groups\";\nimport type {\n InboxNotificationData,\n InboxNotificationDataPlain,\n InboxNotificationDeleteInfo,\n InboxNotificationDeleteInfoPlain,\n} from \"./protocol/InboxNotifications\";\nimport type {\n SubscriptionData,\n SubscriptionDataPlain,\n SubscriptionDeleteInfo,\n SubscriptionDeleteInfoPlain,\n UserSubscriptionData,\n UserSubscriptionDataPlain,\n} from \"./protocol/Subscriptions\";\n\n/**\n * Converts a plain comment data object (usually returned by the API) to a comment data object that can be used by the client.\n * This is necessary because the plain data object stores dates as ISO strings, but the client expects them as Date objects.\n * @param data The plain comment data object (usually returned by the API)\n * @returns The rich comment data object that can be used by the client.\n */\nexport function convertToCommentData<CM extends BaseMetadata>(\n data: CommentDataPlain<CM>\n): CommentData<CM> {\n const editedAt = data.editedAt ? new Date(data.editedAt) : undefined;\n const createdAt = new Date(data.createdAt);\n const reactions = data.reactions.map((reaction) => ({\n ...reaction,\n createdAt: new Date(reaction.createdAt),\n }));\n\n if (data.body) {\n return {\n ...data,\n reactions,\n createdAt,\n editedAt,\n };\n } else {\n const deletedAt = new Date(data.deletedAt);\n return {\n ...data,\n reactions,\n createdAt,\n editedAt,\n deletedAt,\n };\n }\n}\n\n/**\n * Converts a plain thread data object (usually returned by the API) to a thread data object that can be used by the client.\n * This is necessary because the plain data object stores dates as ISO strings, but the client expects them as Date objects.\n * @param data The plain thread data object (usually returned by the API)\n * @returns The rich thread data object that can be used by the client.\n */\nexport function convertToThreadData<\n TM extends BaseMetadata,\n CM extends BaseMetadata,\n>(data: ThreadDataPlain<TM, CM>): ThreadData<TM, CM> {\n const createdAt = new Date(data.createdAt);\n const updatedAt = new Date(data.updatedAt);\n\n const comments = data.comments.map((comment) =>\n convertToCommentData(comment)\n );\n\n return {\n ...data,\n createdAt,\n updatedAt,\n comments,\n };\n}\n\n/**\n * Converts a plain comment reaction object (usually returned by the API) to a comment reaction object that can be used by the client.\n * This is necessary because the plain data object stores dates as ISO strings, but the client expects them as Date objects.\n * @param data The plain comment reaction object (usually returned by the API)\n * @returns The rich comment reaction object that can be used by the client.\n */\nexport function convertToCommentUserReaction(\n data: CommentUserReactionPlain\n): CommentUserReaction {\n return {\n ...data,\n createdAt: new Date(data.createdAt),\n };\n}\n\n/**\n * Converts a plain inbox notification data object (usually returned by the API) to an inbox notification data object that can be used by the client.\n * This is necessary because the plain data object stores dates as ISO strings, but the client expects them as Date objects.\n * @param data The plain inbox notification data object (usually returned by the API)\n * @returns The rich inbox notification data object that can be used by the client.\n */\nexport function convertToInboxNotificationData(\n data: InboxNotificationDataPlain\n): InboxNotificationData {\n const notifiedAt = new Date(data.notifiedAt);\n const readAt = data.readAt ? new Date(data.readAt) : null;\n\n if (\"activities\" in data) {\n const activities = data.activities.map((activity) => ({\n ...activity,\n createdAt: new Date(activity.createdAt),\n }));\n\n return {\n ...data,\n notifiedAt,\n readAt,\n activities,\n };\n }\n\n return {\n ...data,\n notifiedAt,\n readAt,\n };\n}\n\n/**\n * Converts a plain subscription data object (usually returned by the API) to a subscription data object that can be used by the client.\n * This is necessary because the plain data object stores dates as ISO strings, but the client expects them as Date objects.\n * @param data The plain subscription data object (usually returned by the API)\n * @returns The rich subscription data object that can be used by the client.\n */\nexport function convertToSubscriptionData(\n data: SubscriptionDataPlain\n): SubscriptionData {\n const createdAt = new Date(data.createdAt);\n\n return {\n ...data,\n createdAt,\n };\n}\n\n/**\n * Converts a plain user subscription data object (usually returned by the API) to a user subscription data object that can be used by the client.\n * This is necessary because the plain data object stores dates as ISO strings, but the client expects them as Date objects.\n * @param data The plain user subscription data object (usually returned by the API)\n * @returns The rich user subscription data object that can be used by the client.\n */\nexport function convertToUserSubscriptionData(\n data: UserSubscriptionDataPlain\n): UserSubscriptionData {\n const createdAt = new Date(data.createdAt);\n\n return {\n ...data,\n createdAt,\n };\n}\n\nexport function convertToThreadDeleteInfo(\n data: ThreadDeleteInfoPlain\n): ThreadDeleteInfo {\n const deletedAt = new Date(data.deletedAt);\n\n return {\n ...data,\n deletedAt,\n };\n}\n\nexport function convertToInboxNotificationDeleteInfo(\n data: InboxNotificationDeleteInfoPlain\n): InboxNotificationDeleteInfo {\n const deletedAt = new Date(data.deletedAt);\n\n return {\n ...data,\n deletedAt,\n };\n}\n\nexport function convertToSubscriptionDeleteInfo(\n data: SubscriptionDeleteInfoPlain\n): SubscriptionDeleteInfo {\n const deletedAt = new Date(data.deletedAt);\n\n return {\n ...data,\n deletedAt,\n };\n}\n\nexport function convertToGroupData(data: GroupDataPlain): GroupData {\n const createdAt = new Date(data.createdAt);\n const updatedAt = new Date(data.updatedAt);\n const members = data.members.map((member) => ({\n ...member,\n addedAt: new Date(member.addedAt),\n }));\n\n return {\n ...data,\n createdAt,\n updatedAt,\n members,\n };\n}\n","/**\n * Helper function that can be used to implement exhaustive switch statements\n * with TypeScript. Example usage:\n *\n * type Fruit = \"🍎\" | \"🍌\";\n *\n * switch (fruit) {\n * case \"🍎\":\n * case \"🍌\":\n * return doSomething();\n *\n * default:\n * return assertNever(fruit, \"Unknown fruit\");\n * }\n *\n * If now the Fruit union is extended (i.e. add \"🍒\"), TypeScript will catch\n * this *statically*, rather than at runtime, and force you to handle the\n * 🍒 case.\n */\n// istanbul ignore next\nexport function assertNever(_value: never, errmsg: string): never {\n throw new Error(errmsg);\n}\n\n/**\n * Asserts that a certain condition holds. If it does not hold, will throw\n * a runtime error in dev mode.\n *\n * In production, nothing is asserted and this acts as a no-op.\n */\nexport function assert(condition: boolean, errmsg: string): asserts condition {\n if (process.env.NODE_ENV !== \"production\") {\n // istanbul ignore if\n if (!condition) {\n const err = new Error(errmsg);\n err.name = \"Assertion failure\";\n throw err;\n }\n }\n}\n\n/**\n * Asserts that a given value is non-nullable. This is similar to TypeScript's\n * `!` operator, but will throw an error at runtime (dev-mode only) indicating\n * an incorrect assumption.\n *\n * Instead of:\n *\n * foo!.bar\n *\n * Use:\n *\n * nn(foo).bar\n *\n */\nexport function nn<T>(\n value: T,\n errmsg: string = \"Expected value to be non-nullable\"\n): NonNullable<T> {\n assert(value !== null && value !== undefined, errmsg);\n return value as NonNullable<T>;\n}\n","/* eslint-disable rulesdir/console-must-be-fancy */\n\nconst badge =\n \"background:#0e0d12;border-radius:9999px;color:#fff;padding:3px 7px;font-family:sans-serif;font-weight:600;\";\nconst bold = \"font-weight:600\";\n\nfunction wrap(\n method: \"log\" | \"warn\" | \"error\"\n): (message: string, ...args: readonly unknown[]) => void {\n return typeof window === \"undefined\" || process.env.NODE_ENV === \"test\"\n ? console[method]\n : /* istanbul ignore next */\n (message, ...args) =>\n console[method](\"%cLiveblocks\", badge, message, ...args);\n}\n\n// export const log = wrap(\"log\");\nexport const warn = wrap(\"warn\");\nexport const error = wrap(\"error\");\n\nfunction wrapWithTitle(\n method: \"log\" | \"warn\" | \"error\"\n): (title: string, message: string, ...args: readonly unknown[]) => void {\n return typeof window === \"undefined\" || process.env.NODE_ENV === \"test\"\n ? console[method]\n : /* istanbul ignore next */\n (title, message, ...args) =>\n console[method](\n `%cLiveblocks%c ${title}`,\n badge,\n bold,\n message,\n ...args\n );\n}\n\n// export const logWithTitle = wrapWithTitle(\"log\");\nexport const warnWithTitle = wrapWithTitle(\"warn\");\nexport const errorWithTitle = wrapWithTitle(\"error\");\n","export function isDefined<T>(value: T): value is NonNullable<T> {\n return value !== null && value !== undefined;\n}\n\nexport function isPlainObject(\n blob: unknown\n): blob is { [key: string]: unknown } {\n // Implementation borrowed from pojo decoder, see\n // https://github.com/nvie/decoders/blob/78849f843193647eb6b5307240387bdcff7161fb/src/lib/objects.js#L10-L41\n return (\n blob !== null &&\n typeof blob === \"object\" &&\n Object.prototype.toString.call(blob) === \"[object Object]\"\n );\n}\n\n/**\n * Check if value is of shape { startsWith: string }\n */\nexport function isStartsWithOperator(\n blob: unknown\n): blob is { startsWith: string } {\n return isPlainObject(blob) && typeof blob.startsWith === \"string\";\n}\n\nexport function isNumberOperator(\n blob: unknown\n): blob is { lt?: number; gt?: number; lte?: number; gte?: number } {\n return (\n isPlainObject(blob) &&\n (typeof blob.lt === \"number\" ||\n typeof blob.gt === \"number\" ||\n typeof blob.lte === \"number\" ||\n typeof blob.gte === \"number\")\n );\n}\n","import * as console from \"./fancy-console\";\nimport { isPlainObject } from \"./guards\";\nimport type { JsonObject } from \"./Json\";\nimport { tryParseJson, wait } from \"./utils\";\n\nexport class HttpError extends Error {\n public response: Response;\n public details?: JsonObject;\n\n private constructor(\n message: string,\n response: Response,\n details?: JsonObject\n ) {\n super(message);\n this.name = \"HttpError\";\n this.response = response;\n this.details = details;\n }\n\n static async fromResponse(response: Response): Promise<HttpError> {\n // Try to extract `message` and `details` from the response, and\n // construct the HttpError instance\n let bodyAsText: string | undefined;\n try {\n bodyAsText = await response.text();\n } catch {\n // Ignore\n }\n\n const bodyAsJson = bodyAsText ? tryParseJson(bodyAsText) : undefined;\n\n let bodyAsJsonObject: JsonObject | undefined;\n if (isPlainObject(bodyAsJson)) {\n bodyAsJsonObject = bodyAsJson;\n }\n\n let message = \"\";\n message ||=\n typeof bodyAsJsonObject?.message === \"string\"\n ? bodyAsJsonObject.message\n : \"\";\n message ||=\n typeof bodyAsJsonObject?.error === \"string\" ? bodyAsJsonObject.error : \"\";\n if (bodyAsJson === undefined) {\n message ||= bodyAsText || \"\";\n }\n message ||= response.statusText;\n\n let path: string | undefined;\n try {\n path = new URL(response.url).pathname;\n } catch {\n // Ignore\n }\n message +=\n path !== undefined\n ? ` (got status ${response.status} from ${path})`\n : ` (got status ${response.status})`;\n\n const details = bodyAsJsonObject;\n return new HttpError(message, response, details);\n }\n\n /**\n * Convenience accessor for response.status.\n */\n get status(): number {\n return this.response.status;\n }\n}\n\nconst DONT_RETRY_4XX = (x: unknown) =>\n x instanceof HttpError && x.status >= 400 && x.status < 500;\n\n/**\n * Wraps a promise factory. Will create promises until one succeeds. If\n * a promise rejects, it will retry calling the factory for at most `maxTries`\n * times. Between each attempt, it will inject a a backoff delay (in millis)\n * from the given array. If the array contains fewer items then `maxTries`,\n * then the last backoff number will be used indefinitely.\n *\n * If the last attempt is rejected too, the returned promise will fail too.\n *\n * @param promiseFn The promise factory to execute\n * @param maxTries The number of total tries (must be >=1)\n * @param backoff An array of timings to inject between each promise attempt\n * @param shouldStopRetrying An optional function to not auto-retry on certain errors\n */\nexport async function autoRetry<T>(\n promiseFn: () => Promise<T>,\n maxTries: number,\n backoff: number[],\n shouldStopRetrying: (err: any) => boolean = DONT_RETRY_4XX\n): Promise<T> {\n const fallbackBackoff = backoff.length > 0 ? backoff[backoff.length - 1] : 0;\n\n let attempt = 0;\n\n while (true) {\n attempt++;\n\n try {\n return await promiseFn();\n } catch (err) {\n if (shouldStopRetrying(err)) {\n throw err;\n }\n\n if (attempt >= maxTries) {\n // Fail the entire promise right now\n throw new Error(`Failed after ${maxTries} attempts: ${String(err)}`);\n }\n }\n\n // Do another retry\n const delay = backoff[attempt - 1] ?? fallbackBackoff;\n\n console.warn(\n `Attempt ${attempt} was unsuccessful. Retrying in ${delay} milliseconds.`\n );\n await wait(delay);\n }\n}\n","export type ControlledPromise<T> = {\n promise: Promise<T>;\n resolve: (value: T) => void;\n reject: (reason: unknown) => void;\n};\n\n/**\n * Returns a pair of a Promise, and a resolve function that can be passed\n * around to resolve the promise \"from anywhere\".\n *\n * The Promise will remain unresolved, until the resolve function is called.\n * Once the resolve function is called with a value, the Promise will resolve\n * to that value.\n *\n * Calling the resolve function beyond the first time is a no-op.\n */\nexport function controlledPromise<T>(): [\n promise: Promise<T>,\n resolve: (value: T) => void,\n reject: (reason: unknown) => void,\n] {\n let resolve: ((value: T) => void) | undefined;\n let reject: ((reason: unknown) => void) | undefined;\n const promise = new Promise<T>((res, rej) => {\n resolve = res;\n reject = rej;\n });\n // eslint-disable-next-line no-restricted-syntax\n return [promise, resolve!, reject!];\n}\n\n/**\n * Drop-in replacement for the ES2024 Promise.withResolvers() API.\n */\nexport function Promise_withResolvers<T>(): ControlledPromise<T> {\n const [promise, resolve, reject] = controlledPromise<T>();\n return { promise, resolve, reject };\n}\n","/* eslint-disable @typescript-eslint/no-unsafe-member-access */\n/* eslint-disable @typescript-eslint/no-unsafe-assignment */\n/* eslint-disable @typescript-eslint/no-unsafe-argument */\n\nfunction replacer(_key: string, value: unknown) {\n return value !== null && typeof value === \"object\" && !Array.isArray(value)\n ? Object.keys(value)\n .sort()\n .reduce((sorted, key) => {\n // @ts-expect-error this is fine\n sorted[key] = value[key];\n return sorted;\n }, {})\n : value;\n}\n\n/**\n * Like JSON.stringify(), but using stable (sorted) object key order, so that\n * it returns the same value for the same keys, no matter their order.\n */\nexport function stableStringify(value: unknown): string {\n return JSON.stringify(value, replacer);\n}\n\n/**\n * Drop-in replacement for JSON.stringify(), which will log any payload to the\n * console if it could not be stringified somehow.\n */\nexport function stringifyOrLog(value: unknown): string {\n try {\n return JSON.stringify(value);\n } catch (err) {\n /* eslint-disable rulesdir/console-must-be-fancy */\n console.error(`Could not stringify: ${(err as Error).message}`);\n console.error(value);\n /* eslint-enable rulesdir/console-must-be-fancy */\n throw err;\n }\n}\n","import type { AsyncResult } from \"./AsyncResult\";\nimport { Promise_withResolvers } from \"./controlledPromise\";\nimport type { Callback, UnsubscribeCallback } from \"./EventSource\";\nimport { MutableSignal } from \"./signals\";\nimport { stableStringify } from \"./stringify\";\n\nconst DEFAULT_SIZE = 50;\n\ntype Resolve<T> = (value: T) => void;\ntype Reject = (reason?: unknown) => void;\n\nexport type BatchCallback<O, I> = (\n inputs: I[]\n) => (O | Error)[] | Promise<(O | Error)[]>;\n\nexport type BatchStore<O, I> = {\n subscribe: (callback: Callback<void>) => UnsubscribeCallback;\n enqueue: (input: I) => Promise<void>;\n setData: (entries: [I, O][]) => void;\n getItemState: (input: I) => AsyncResult<O> | undefined;\n getData: (input: I) => O | undefined;\n invalidate: (inputs?: I[]) => void;\n\n /**\n * @internal\n */\n readonly batch: Batch<O, I>;\n\n /**\n * @internal\n *\n * Only for testing.\n */\n _cacheKeys: () => string[];\n};\n\ninterface BatchOptions {\n /**\n * How many calls to batch together at most.\n */\n size?: number;\n\n /**\n * How long to wait before flushing the batch.\n */\n delay: number;\n}\n\nclass BatchCall<O, I> {\n readonly input: I;\n readonly resolve: Resolve<O>;\n readonly reject: Reject;\n readonly promise: Promise<O>;\n\n constructor(input: I) {\n this.input = input;\n\n const { promise, resolve, reject } = Promise_withResolvers<O>();\n this.promise = promise;\n this.resolve = resolve;\n this.reject = reject;\n }\n}\n\n/**\n * Batch calls to a function, either by number of calls or by a maximum delay.\n */\nexport class Batch<O, I> {\n #queue: BatchCall<O, I>[] = [];\n #callback: BatchCallback<O, I>;\n #size: number;\n #delay: number;\n #delayTimeoutId?: ReturnType<typeof setTimeout>;\n public error = false;\n\n constructor(callback: BatchCallback<O, I>, options: BatchOptions) {\n this.#callback = callback;\n this.#size = options.size ?? DEFAULT_SIZE;\n this.#delay = options.delay;\n }\n\n #clearDelayTimeout(): void {\n if (this.#delayTimeoutId !== undefined) {\n clearTimeout(this.#delayTimeoutId);\n this.#delayTimeoutId = undefined;\n }\n }\n\n #schedule() {\n if (this.#queue.length === this.#size) {\n // If the queue is full, flush it immediately.\n void this.#flush();\n } else if (this.#queue.length === 1) {\n // If the call is the first in the queue, schedule a flush.\n this.#clearDelayTimeout();\n this.#delayTimeoutId = setTimeout(() => void this.#flush(), this.#delay);\n }\n }\n\n async #flush(): Promise<void> {\n // If the queue is empty, don't call the callback.\n if (this.#queue.length === 0) {\n return;\n }\n\n // Empty the queue and get its calls.\n const calls = this.#queue.splice(0);\n const inputs = calls.map((call) => call.input);\n\n try {\n // Call the batch callback with the queued arguments.\n const results = await this.#callback(inputs);\n this.error = false;\n\n // Resolve or reject each call.\n calls.forEach((call, index) => {\n const result = results?.[index];\n\n if (!Array.isArray(results)) {\n call.reject(new Error(\"Callback must return an array.\"));\n } else if (calls.length !== results.length) {\n call.reject(\n new Error(\n `Callback must return an array of the same length as the number of provided items. Expected ${calls.length}, but got ${results.length}.`\n )\n );\n } else if (result instanceof Error) {\n call.reject(result);\n } else {\n call.resolve(result);\n }\n });\n } catch (error) {\n this.error = true;\n\n // Reject all calls if the whole batch errored or was rejected.\n calls.forEach((call) => {\n call.reject(error);\n });\n }\n }\n\n get(input: I): Promise<O> {\n // Check if there's already an identical call in the queue.\n const existingCall = this.#queue.find(\n (call) => stableStringify(call.input) === stableStringify(input)\n );\n\n // If an existing call exists, return its promise.\n if (existingCall) {\n return existingCall.promise;\n }\n\n // If no existing call exists, add the call to the queue and schedule a flush.\n const call = new BatchCall<O, I>(input);\n this.#queue.push(call);\n this.#schedule();\n\n return call.promise;\n }\n\n clear(): void {\n this.#queue = [];\n this.error = false;\n this.#clearDelayTimeout();\n }\n}\n\n/**\n * Create a store around a Batch.\n * Each call will be cached and get its own state in addition to being batched.\n */\nexport function createBatchStore<O, I>(batch: Batch<O, I>): BatchStore<O, I> {\n const signal = new MutableSignal(new Map<string, AsyncResult<O>>());\n\n function getCacheKey(args: I): string {\n return stableStringify(args);\n }\n\n function update(\n entryOrEntries:\n | {\n key: string;\n state: AsyncResult<O>;\n }\n | {\n key: string;\n state: AsyncResult<O>;\n }[]\n ) {\n signal.mutate((cache) => {\n if (Array.isArray(entryOrEntries)) {\n for (const entry of entryOrEntries) {\n cache.set(entry.key, entry.state);\n }\n } else {\n cache.set(entryOrEntries.key, entryOrEntries.state);\n }\n });\n }\n\n function invalidate(inputs?: I[]): void {\n signal.mutate((cache) => {\n if (Array.isArray(inputs)) {\n // Invalidate the specific calls.\n for (const input of inputs) {\n cache.delete(getCacheKey(input));\n }\n } else {\n // Invalidate all calls.\n cache.clear();\n }\n });\n }\n\n async function enqueue(input: I): Promise<void> {\n const cacheKey = getCacheKey(input);\n\n // If this call already has a state, return early.\n const cache = signal.get();\n if (cache.has(cacheKey)) {\n return;\n }\n\n try {\n // Set the state to loading.\n update({ key: cacheKey, state: { isLoading: true } });\n\n // Wait for the batch to process this call.\n const result = await batch.get(input);\n\n // Set the state to the result.\n update({ key: cacheKey, state: { isLoading: false, data: result } });\n } catch (error) {\n // // TODO: Differentiate whole batch errors from individual errors.\n // if (batch.error) {\n // // If the whole batch errored, clear the state.\n // // TODO: Keep track of retries and only clear the state a few times because it will be retried each time.\n // // Also implement exponential backoff to delay retries to avoid hammering `resolveUsers`.\n // update(cacheKey, undefined);\n // } else {\n // // Otherwise, keep individual errors to avoid repeatedly loading the same error.\n // update(cacheKey, {\n // isLoading: false,\n // error: error as Error,\n // });\n // }\n\n // If there was an error (for various reasons), set the state to the error.\n update({\n key: cacheKey,\n state: { isLoading: false, error: error as Error },\n });\n }\n }\n\n function setData(entries: [I, O][]): void {\n update(\n entries.map((entry) => ({\n key: getCacheKey(entry[0]),\n state: { isLoading: false, data: entry[1] },\n }))\n );\n }\n\n function getItemState(input: I): AsyncResult<O> | undefined {\n const cacheKey = getCacheKey(input);\n const cache = signal.get();\n return cache.get(cacheKey);\n }\n\n function getData(input: I): O | undefined {\n const cacheKey = getCacheKey(input);\n const cache = signal.get();\n return cache.get(cacheKey)?.data;\n }\n\n /** @internal - Only for testing */\n function _cacheKeys() {\n const cache = signal.get();\n return [...cache.keys()];\n }\n\n return {\n subscribe: signal.subscribe,\n enqueue,\n setData,\n getItemState,\n getData,\n invalidate,\n\n batch,\n _cacheKeys,\n };\n}\n","export function chunk<T>(array: T[], size: number): T[][] {\n const chunks: T[][] = [];\n\n for (let i = 0, j = array.length; i < j; i += size) {\n chunks.push(array.slice(i, i + size));\n }\n\n return chunks;\n}\n","// Inlined version of 3.3.7 of nanoid.js\n// https://www.npmjs.com/package/nanoid/v/3.3.7?activeTab=code\nexport const nanoid = (t = 21): string =>\n crypto\n .getRandomValues(new Uint8Array(t))\n .reduce(\n (t, e) =>\n (t +=\n (e &= 63) < 36\n ? e.toString(36)\n : e < 62\n ? (e - 26).toString(36).toUpperCase()\n : e < 63\n ? \"_\"\n : \"-\"),\n \"\"\n );\n","import { nanoid } from \"./nanoid\";\n\nconst THREAD_ID_PREFIX = \"th\";\nconst COMMENT_ID_PREFIX = \"cm\";\nconst COMMENT_ATTACHMENT_ID_PREFIX = \"at\";\nconst INBOX_NOTIFICATION_ID_PREFIX = \"in\";\n\nfunction createOptimisticId(prefix: string): string {\n return `${prefix}_${nanoid()}`;\n}\n\nexport function createThreadId(): string {\n return createOptimisticId(THREAD_ID_PREFIX);\n}\n\nexport function createCommentId(): string {\n return createOptimisticId(COMMENT_ID_PREFIX);\n}\n\nexport function createCommentAttachmentId(): string {\n return createOptimisticId(COMMENT_ATTACHMENT_ID_PREFIX);\n}\n\nexport function createInboxNotificationId(): string {\n return createOptimisticId(INBOX_NOTIFICATION_ID_PREFIX);\n}\n","import { raise } from \"./utils\";\n\n/**\n * Like ES6 map, but takes a default (factory) function which will be used\n * to create entries for missing keys on the fly.\n *\n * Useful for code like:\n *\n * const map = new DefaultMap(() => []);\n * map.getOrCreate('foo').push('hello');\n * map.getOrCreate('foo').push('world');\n * map.getOrCreate('foo')\n * // ['hello', 'world']\n *\n */\nexport class DefaultMap<K, V> extends Map<K, V> {\n #defaultFn?: (key: K) => V;\n\n /**\n * If the default function is not provided to the constructor, it has to be\n * provided in each .getOrCreate() call individually.\n */\n constructor(\n defaultFn?: (key: K) => V,\n entries?: readonly (readonly [K, V])[] | null\n ) {\n super(entries);\n this.#defaultFn = defaultFn;\n }\n\n /**\n * Gets the value at the given key, or creates it.\n *\n * Difference from normal Map: if the key does not exist, it will be created\n * on the fly using the factory function, and that value will get returned\n * instead of `undefined`.\n */\n getOrCreate(key: K, defaultFn?: (key: K) => V): V {\n if (super.has(key)) {\n // eslint-disable-next-line no-restricted-syntax\n return super.get(key)!;\n } else {\n const fn =\n defaultFn ??\n this.#defaultFn ??\n raise(\"DefaultMap used without a factory function\");\n\n const value = fn(key);\n this.set(key, value);\n return value;\n }\n }\n}\n","import {\n isNumberOperator,\n isPlainObject,\n isStartsWithOperator,\n} from \"./guards\";\n\n/**\n * Converts an object to a query string\n * Example:\n * ```ts\n * const query = objectToQuery({\n * resolved: true,\n * subscribed: true,\n * metadata: {\n * status: \"open\",\n * priority: 3,\n * org: {\n * startsWith: \"liveblocks:\",\n * },\n * posX: {\n * gt: 100,\n * lt: 200,\n * },\n * posY: {\n * gte: 50,\n * lte: 300,\n * },\n * },\n * });\n *\n * console.log(query);\n * // resolved:true AND subscribed:true AND metadata[\"status\"]:open AND metadata[\"priority\"]:3 AND metadata[\"org\"]^\"liveblocks:\" AND metadata[\"posX\"]>100 AND metadata[\"posX\"]<200 AND metadata[\"posY\"]>=50 AND metadata[\"posY\"]<=300\n * ```\n */\ntype SimpleFilterValue = string | number | boolean | null;\ntype OperatorFilterValue =\n | { startsWith: string; gt?: never; lt?: never; gte?: never; lte?: never }\n | {\n lt?: number;\n gt?: number;\n lte?: number;\n gte?: number;\n startsWith?: never;\n };\n\ntype FilterValue = SimpleFilterValue | OperatorFilterValue;\n\ntype Filter = NumberFilter | StringFilter | BooleanFilter | NullFilter;\n\ntype NumberFilter = {\n key: string;\n operator: \":\" | \"<\" | \">\" | \"<=\" | \">=\";\n value: number;\n};\n\ntype StringFilter = {\n key: string;\n operator: \":\" | \"^\";\n value: string;\n};\n\ntype BooleanFilter = {\n key: string;\n operator: \":\";\n value: boolean;\n};\n\ntype NullFilter = {\n key: string;\n operator: \":\";\n value: null;\n};\n\n/**\n * Converts an object to a query string\n * @example\n * ```ts\n * const query = objectToQuery({\n * metadata: {\n * status: \"open\",\n * priority: 3,\n * org: {\n * startsWith: \"liveblocks:\",\n * },\n * },\n * });\n * console.log(query);\n * // metadata[\"status\"]:\"open\" AND metadata[\"priority\"]:3 AND metadata[\"org\"]^\"liveblocks:\"\n * ```\n * @example\n * ```ts\n * const query = objectToQuery({\n * resolved: true,\n * subscribed: true,\n * roomId: {\n * startsWith: \"engineering:\",\n * },\n * });\n * console.log(query);\n * // resolved:true AND subscribed:true AND roomId^\"engineering:\"\n * ```\n *\n */\n\nconst identifierRegex = /^[a-zA-Z_][a-zA-Z0-9_]*$/;\n\nexport function objectToQuery(obj: {\n [key: string]:\n | FilterValue\n | { [key: string]: FilterValue | undefined }\n | undefined;\n}): string {\n let filterList: Filter[] = [];\n const entries = Object.entries(obj);\n\n const keyValuePairs: [string, string | number | boolean | null][] = [];\n const keyValuePairsWithOperator: [string, OperatorFilterValue][] = [];\n const indexedKeys: [string, Record<string, FilterValue | undefined>][] = [];\n\n entries.forEach(([key, value]) => {\n if (!identifierRegex.test(key)) {\n throw new Error(\"Key must only contain letters, numbers, _\");\n }\n\n if (isSimpleValue(value)) {\n keyValuePairs.push([key, value]);\n } else if (isPlainObject(value)) {\n if (isStartsWithOperator(value) || isNumberOperator(value)) {\n keyValuePairsWithOperator.push([key, value]);\n } else {\n indexedKeys.push([key, value]);\n }\n }\n });\n\n filterList = [\n ...getFiltersFromKeyValuePairs(keyValuePairs),\n ...getFiltersFromKeyValuePairsWithOperator(keyValuePairsWithOperator),\n ];\n\n indexedKeys.forEach(([key, value]) => {\n const nestedEntries = Object.entries(value);\n const nKeyValuePairs: [string, SimpleFilterValue][] = [];\n const nKeyValuePairsWithOperator: [string, OperatorFilterValue][] = [];\n nestedEntries.forEach(([nestedKey, nestedValue]) => {\n if (isStringEmpty(nestedKey)) {\n throw new Error(\"Key cannot be empty\");\n }\n\n if (isSimpleValue(nestedValue)) {\n nKeyValuePairs.push([formatFilterKey(key, nestedKey), nestedValue]);\n } else if (\n isStartsWithOperator(nestedValue) ||\n isNumberOperator(nestedValue)\n ) {\n nKeyValuePairsWithOperator.push([\n formatFilterKey(key, nestedKey),\n nestedValue,\n ]);\n }\n });\n filterList = [\n ...filterList,\n ...getFiltersFromKeyValuePairs(nKeyValuePairs),\n ...getFiltersFromKeyValuePairsWithOperator(nKeyValuePairsWithOperator),\n ];\n });\n\n return filterList\n .map(({ key, operator, value }) => `${key}${operator}${quote(value)}`)\n .join(\" \");\n}\n\nconst getFiltersFromKeyValuePairs = (\n keyValuePairs: [string, string | number | boolean | null][]\n): Filter[] => {\n const filters: Filter[] = [];\n keyValuePairs.forEach(([key, value]) => {\n filters.push({\n key,\n operator: \":\",\n value,\n });\n });\n\n return filters;\n};\n\nconst getFiltersFromKeyValuePairsWithOperator = (\n keyValuePairsWithOperator: [string, OperatorFilterValue][]\n): Filter[] => {\n const filters: Filter[] = [];\n keyValuePairsWithOperator.forEach(([key, value]) => {\n if (\"startsWith\" in value && typeof value.startsWith === \"string\") {\n filters.push({\n key,\n operator: \"^\",\n value: value.startsWith,\n });\n }\n if (\"lt\" in value && typeof value.lt === \"number\") {\n filters.push({\n key,\n operator: \"<\",\n value: value.lt,\n });\n }\n if (\"gt\" in value && typeof value.gt === \"number\") {\n filters.push({\n key,\n operator: \">\",\n value: value.gt,\n });\n }\n if (\"gte\" in value && typeof value.gte === \"number\") {\n filters.push({\n key,\n operator: \">=\",\n value: value.gte,\n });\n }\n if (\"lte\" in value && typeof value.lte === \"number\") {\n filters.push({\n key,\n operator: \"<=\",\n value: value.lte,\n });\n }\n });\n\n return filters;\n};\n\nconst isSimpleValue = (value: unknown) => {\n return (\n typeof value === \"string\" ||\n typeof value === \"number\" ||\n typeof value === \"boolean\" ||\n value === null\n );\n};\n\nconst formatFilterKey = (key: string, nestedKey?: string) => {\n if (nestedKey) {\n return `${key}[${quote(nestedKey)}]`;\n }\n return key;\n};\n\nconst isStringEmpty = (value: string) => {\n return !value || value.toString().trim() === \"\";\n};\n\n/**\n * Quotes and escapes a string. Prefer to use single quotes when possible, but\n * falls back to JSON.stringify() (which uses double-quotes) when necessary.\n */\nexport function quote(input: unknown): string {\n const result = JSON.stringify(input);\n if (typeof input !== \"string\") {\n return result;\n }\n\n if (result.includes(\"'\")) {\n return result;\n }\n\n // See if we can turn this string into a single-quoted string, because those\n // generally are more readable in URLs\n return `'${result.slice(1, -1).replace(/\\\\\"/g, '\"')}'`;\n}\n","import type { Brand } from \"./utils\";\n\nconst PLACEHOLDER_BASE_URL = \"https://localhost:9999\";\nconst ABSOLUTE_URL_REGEX = /^[a-zA-Z][a-zA-Z\\d+\\-.]*?:/;\nconst TRAILING_SLASH_URL_REGEX = /\\/(?:(?:\\?|#).*)?$/;\n\nexport type QueryParams =\n | Record<string, string | number | null | undefined>\n | URLSearchParams;\n\n/**\n * Safely but conveniently build a URLSearchParams instance from a given\n * dictionary of values. For example:\n *\n * {\n * \"foo\": \"bar+qux/baz\",\n * \"empty\": \"\",\n * \"n\": 42,\n * \"nope\": undefined,\n * \"alsonope\": null,\n * }\n *\n * Will produce a value that will get serialized as\n * `foo=bar%2Bqux%2Fbaz&empty=&n=42`.\n *\n * Notice how the number is converted to its string representation\n * automatically and the `null`/`undefined` values simply don't end up in the\n * URL.\n */\nfunction toURLSearchParams(\n params: Record<string, string | number | null | undefined>\n): URLSearchParams {\n const result = new URLSearchParams();\n for (const [key, value] of Object.entries(params)) {\n if (value !== undefined && value !== null) {\n result.set(key, value.toString());\n }\n }\n return result;\n}\n\n/**\n * Concatenates a path to an existing URL.\n */\nexport function urljoin(\n baseUrl: string | URL,\n path: string,\n params?: QueryParams\n): string {\n // First, sanitize by removing user/passwd/search/hash parts from the URL\n const url = new URL(path, baseUrl);\n if (params !== undefined) {\n url.search = (\n params instanceof URLSearchParams ? params : toURLSearchParams(params)\n ).toString();\n }\n return url.toString();\n}\n\n/**\n * A string that is guaranteed to be URL safe (where all arguments are properly\n * encoded), only obtainable as the result of using `url` template strings.\n */\nexport type URLSafeString = Brand<string, \"URLSafeString\">;\n\n/**\n * Builds a URL where each \"hole\" in the template string will automatically be\n * encodeURIComponent()-escaped, so it's impossible to build invalid URLs.\n */\nexport function url(\n strings: TemplateStringsArray,\n ...values: string[]\n): URLSafeString {\n return strings.reduce(\n (result, str, i) => result + encodeURIComponent(values[i - 1] ?? \"\") + str\n ) as URLSafeString;\n}\n\n/**\n * Sanitize a URL (normalize www URLs, handle relative URLs, prevent XSS attacks, etc.)\n *\n * Accepted URLs:\n * - Absolute URLs with an http or https protocol (e.g. https://liveblocks.io)\n * - Absolute URLs with a `www` prefix (e.g. www.liveblocks.io)\n * - Relative URLs (e.g. /path/to/page)\n * - Hash-only URLs (e.g. #hash)\n *\n * The presence/absence of trailing slashes is preserved.\n * Rejected URLs are returned as `null`.\n */\nexport function sanitizeUrl(url: string): string | null {\n // If the URL starts with \"www.\", normalize it as an HTTPS URL\n if (url.startsWith(\"www.\")) {\n url = \"https://\" + url;\n }\n\n // If the URL is an empty hash, return it as is.\n if (url === \"#\") {\n return url;\n }\n\n try {\n const isAbsolute = ABSOLUTE_URL_REGEX.test(url);\n const urlObject = new URL(\n url,\n isAbsolute ? undefined : PLACEHOLDER_BASE_URL\n );\n\n if (urlObject.protocol !== \"http:\" && urlObject.protocol !== \"https:\") {\n return null;\n }\n\n const hasTrailingSlash = TRAILING_SLASH_URL_REGEX.test(url);\n\n // Instead of using URL.toString(), we rebuild the URL manually\n // to preserve the presence/absence of trailing slashes.\n const sanitizedUrl =\n // 1. Origin, only for absolute URLs\n (isAbsolute ? urlObject.origin : \"\") +\n // 2. Pathname, with a trailing slash if the original URL had one\n (urlObject.pathname === \"/\"\n ? // 2.a. Domain-only URLs, they always have their pathname set to \"/\"\n hasTrailingSlash\n ? \"/\"\n : \"\"\n : // 2.b. URLs with a path\n hasTrailingSlash && !urlObject.pathname.endsWith(\"/\")\n ? urlObject.pathname + \"/\"\n : urlObject.pathname) +\n // 3. Search params\n urlObject.search +\n // 4. Hash\n urlObject.hash;\n\n return sanitizedUrl !== \"\" ? sanitizedUrl : null;\n } catch {\n return null;\n }\n}\n\n/**\n * Construct a URL with optional parameters and hash.\n */\nexport function generateUrl(\n url: string,\n params?: Record<string, string | number | undefined>,\n hash?: string\n): string {\n const isAbsolute = ABSOLUTE_URL_REGEX.test(url);\n const urlObject = new URL(url, isAbsolute ? undefined : PLACEHOLDER_BASE_URL);\n\n if (params !== undefined) {\n for (const [param, value] of Object.entries(params)) {\n if (value) {\n urlObject.searchParams.set(param, String(value));\n }\n }\n }\n\n // Only add the new hash if the URL does not already have one\n if (!urlObject.hash && hash !== undefined) {\n urlObject.hash = `#${hash}`;\n }\n\n return isAbsolute\n ? urlObject.href\n : urlObject.href.replace(PLACEHOLDER_BASE_URL, \"\");\n}\n\nexport function isUrl(string: string): boolean {\n try {\n new URL(string);\n return true;\n } catch (_) {\n return false;\n }\n}\n","import type { AuthManager, AuthValue } from \"./auth-manager\";\nimport {\n convertToCommentData,\n convertToCommentUserReaction,\n convertToGroupData,\n convertToInboxNotificationData,\n convertToInboxNotificationDeleteInfo,\n convertToSubscriptionData,\n convertToSubscriptionDeleteInfo,\n convertToThreadData,\n convertToThreadDeleteInfo,\n} from \"./convert-plain-data\";\nimport { assertNever } from \"./lib/assert\";\nimport { autoRetry, HttpError } from \"./lib/autoRetry\";\nimport type { BatchStore } from \"./lib/batch\";\nimport { Batch, createBatchStore } from \"./lib/batch\";\nimport { chunk } from \"./lib/chunk\";\nimport { createCommentId, createThreadId } from \"./lib/createIds\";\nimport type { DateToString } from \"./lib/DateToString\";\nimport { DefaultMap } from \"./lib/DefaultMap\";\nimport type { Json, JsonObject } from \"./lib/Json\";\nimport { objectToQuery } from \"./lib/objectToQuery\";\nimport type { Signal } from \"./lib/signals\";\nimport { stringifyOrLog as stringify } from \"./lib/stringify\";\nimport type { QueryParams, URLSafeString } from \"./lib/url\";\nimport { url, urljoin } from \"./lib/url\";\nimport { raise } from \"./lib/utils\";\nimport type {\n ContextualPromptContext,\n ContextualPromptResponse,\n} from \"./protocol/Ai\";\nimport type { Permission } from \"./protocol/AuthToken\";\nimport type { ClientMsg } from \"./protocol/ClientMsg\";\nimport type {\n BaseMetadata,\n CommentAttachment,\n CommentBody,\n CommentData,\n CommentDataPlain,\n CommentLocalAttachment,\n CommentUserReaction,\n CommentUserReactionPlain,\n QueryMetadata,\n SearchCommentsResult,\n ThreadData,\n ThreadDataPlain,\n ThreadDeleteInfo,\n ThreadDeleteInfoPlain,\n} from \"./protocol/Comments\";\nimport type { GroupData, GroupDataPlain } from \"./protocol/Groups\";\nimport type {\n InboxNotificationData,\n InboxNotificationDataPlain,\n InboxNotificationDeleteInfo,\n InboxNotificationDeleteInfoPlain,\n} from \"./protocol/InboxNotifications\";\nimport type { MentionData } from \"./protocol/MentionData\";\nimport type {\n NotificationSettingsPlain,\n PartialNotificationSettings,\n} from \"./protocol/NotificationSettings\";\nimport type { RoomSubscriptionSettings } from \"./protocol/RoomSubscriptionSettings\";\nimport type { StorageNode } from \"./protocol/StorageNode\";\nimport type {\n SubscriptionData,\n SubscriptionDataPlain,\n SubscriptionDeleteInfo,\n SubscriptionDeleteInfoPlain,\n} from \"./protocol/Subscriptions\";\nimport type { UrlMetadata } from \"./protocol/UrlMetadata\";\nimport type { HistoryVersion } from \"./protocol/VersionHistory\";\nimport type { TextEditorType } from \"./types/Others\";\nimport type { Patchable } from \"./types/Patchable\";\nimport { PKG_VERSION } from \"./version\";\n\nexport interface RoomHttpApi<TM extends BaseMetadata, CM extends BaseMetadata> {\n getThreads(options: {\n roomId: string;\n cursor?: string;\n query?: {\n resolved?: boolean;\n subscribed?: boolean;\n metadata?: Partial<QueryMetadata<TM>>;\n };\n }): Promise<{\n threads: ThreadData<TM, CM>[];\n inboxNotifications: InboxNotificationData[];\n subscriptions: SubscriptionData[];\n requestedAt: Date;\n nextCursor: string | null;\n permissionHints: Record<string, Permission[]>;\n }>;\n\n getThreadsSince(options: {\n roomId: string;\n since: Date;\n signal?: AbortSignal;\n }): Promise<{\n threads: {\n updated: ThreadData<TM, CM>[];\n deleted: ThreadDeleteInfo[];\n };\n inboxNotifications: {\n updated: InboxNotificationData[];\n deleted: InboxNotificationDeleteInfo[];\n };\n subscriptions: {\n updated: SubscriptionData[];\n deleted: SubscriptionDeleteInfo[];\n };\n requestedAt: Date;\n permissionHints: Record<string, Permission[]>;\n }>;\n\n searchComments(\n options: {\n roomId: string;\n query: {\n threadMetadata?: Partial<QueryMetadata<TM>>;\n threadResolved?: boolean;\n hasAttachments?: boolean;\n hasMentions?: boolean;\n text: string;\n };\n },\n requestOptions?: {\n signal?: AbortSignal;\n }\n ): Promise<{\n data: Array<SearchCommentsResult>;\n }>;\n\n createThread({\n roomId,\n metadata,\n body,\n commentId,\n threadId,\n commentMetadata,\n attachmentIds,\n }: {\n roomId: string;\n threadId?: string;\n commentId?: string;\n metadata: TM | undefined;\n commentMetadata: CM | undefined;\n body: CommentBody;\n attachmentIds?: string[];\n }): Promise<ThreadData<TM, CM>>;\n\n getThread(options: { roomId: string; threadId: string }): Promise<{\n thread?: ThreadData<TM, CM>;\n inboxNotification?: InboxNotificationData;\n subscription?: SubscriptionData;\n }>;\n\n deleteThread({\n roomId,\n threadId,\n }: {\n roomId: string;\n threadId: string;\n }): Promise<void>;\n\n editThreadMetadata({\n roomId,\n metadata,\n threadId,\n }: {\n roomId: string;\n metadata: Patchable<TM>;\n threadId: string;\n }): Promise<TM>;\n\n editCommentMetadata({\n roomId,\n threadId,\n commentId,\n metadata,\n }: {\n roomId: string;\n threadId: string;\n commentId: string;\n metadata: Patchable<CM>;\n }): Promise<CM>;\n\n createComment({\n roomId,\n threadId,\n commentId,\n body,\n metadata,\n attachmentIds,\n }: {\n roomId: string;\n threadId: string;\n commentId?: string;\n body: CommentBody;\n metadata?: CM;\n attachmentIds?: string[];\n }): Promise<CommentData<CM>>;\n\n editComment({\n roomId,\n threadId,\n commentId,\n body,\n attachmentIds,\n metadata,\n }: {\n roomId: string;\n threadId: string;\n commentId: string;\n body: CommentBody;\n attachmentIds?: string[];\n metadata?: Patchable<CM>;\n }): Promise<CommentData<CM>>;\n\n deleteComment({\n roomId,\n threadId,\n commentId,\n }: {\n roomId: string;\n threadId: string;\n commentId: string;\n }): Promise<void>;\n\n addReaction({\n roomId,\n threadId,\n commentId,\n emoji,\n }: {\n roomId: string;\n threadId: string;\n commentId: string;\n emoji: string;\n }): Promise<CommentUserReaction>;\n\n removeReaction({\n roomId,\n threadId,\n commentId,\n emoji,\n }: {\n roomId: string;\n threadId: string;\n commentId: string;\n emoji: string;\n }): Promise<void>;\n\n markThreadAsResolved({\n roomId,\n threadId,\n }: {\n roomId: string;\n threadId: string;\n }): Promise<void>;\n\n markThreadAsUnresolved({\n roomId,\n threadId,\n }: {\n roomId: string;\n threadId: string;\n }): Promise<void>;\n\n subscribeToThread({\n roomId,\n threadId,\n }: {\n roomId: string;\n threadId: string;\n }): Promise<SubscriptionData>;\n\n unsubscribeFromThread({\n roomId,\n threadId,\n }: {\n roomId: string;\n threadId: string;\n }): Promise<void>;\n\n // Notifications\n markRoomInboxNotificationAsRead({\n roomId,\n inboxNotificationId,\n }: {\n roomId: string;\n inboxNotificationId: string;\n }): Promise<string>;\n\n getSubscriptionSettings({\n roomId,\n signal,\n }: {\n roomId: string;\n signal?: AbortSignal;\n }): Promise<RoomSubscriptionSettings>;\n\n updateSubscriptionSettings({\n roomId,\n settings,\n }: {\n roomId: string;\n settings: Partial<RoomSubscriptionSettings>;\n }): Promise<RoomSubscriptionSettings>;\n\n // Attachments\n getAttachmentUrl(options: {\n roomId: string;\n attachmentId: string;\n }): Promise<string>;\n\n uploadAttachment({\n roomId,\n attachment,\n signal,\n }: {\n roomId: string;\n attachment: CommentLocalAttachment;\n signal?: AbortSignal;\n }): Promise<CommentAttachment>;\n\n getOrCreateAttachmentUrlsStore(roomId: string): BatchStore<string, string>;\n\n uploadChatAttachment({\n chatId,\n attachment,\n signal,\n }: {\n chatId: string;\n attachment: { id: string; file: File };\n signal?: AbortSignal;\n }): Promise<void>;\n\n getOrCreateChatAttachmentUrlsStore(\n chatId: string\n ): BatchStore<string, string>;\n getChatAttachmentUrl(options: { attachmentId: string }): Promise<string>;\n\n // Text editor\n createTextMention({\n roomId,\n mentionId,\n mention,\n }: {\n roomId: string;\n mentionId: string;\n mention: MentionData;\n }): Promise<void>;\n\n deleteTextMention({\n roomId,\n mentionId,\n }: {\n roomId: string;\n mentionId: string;\n }): Promise<void>;\n\n getTextVersion({\n roomId,\n versionId,\n }: {\n roomId: string;\n versionId: string;\n }): Promise<Response>;\n\n createTextVersion({ roomId }: { roomId: string }): Promise<void>;\n\n reportTextEditor({\n roomId,\n type,\n rootKey,\n }: {\n roomId: string;\n type: TextEditorType;\n rootKey: string;\n }): Promise<void>;\n\n listTextVersions({ roomId }: { roomId: string }): Promise<{\n versions: {\n type: \"historyVersion\";\n kind: \"yjs\";\n id: string;\n authors: {\n id: string;\n }[];\n createdAt: Date;\n }[];\n requestedAt: Date;\n }>;\n\n listTextVersionsSince({\n roomId,\n since,\n signal,\n }: {\n roomId: string;\n since: Date;\n signal?: AbortSignal;\n }): Promise<{\n versions: {\n type: \"historyVersion\";\n kind: \"yjs\";\n id: string;\n authors: {\n id: string;\n }[];\n createdAt: Date;\n }[];\n requestedAt: Date;\n }>;\n\n streamStorage(options: { roomId: string }): Promise<StorageNode[]>;\n\n sendMessagesOverHTTP<P extends JsonObject, E extends Json>(options: {\n roomId: string;\n nonce: string | undefined;\n messages: ClientMsg<P, E>[];\n }): Promise<Response>;\n\n executeContextualPrompt({\n roomId,\n prompt,\n context,\n signal,\n }: {\n roomId: string;\n prompt: string;\n context: ContextualPromptContext;\n previous?: {\n prompt: string;\n response: ContextualPromptResponse;\n };\n signal: AbortSignal;\n }): Promise<string>;\n}\n\nexport interface NotificationHttpApi<\n TM extends BaseMetadata,\n CM extends BaseMetadata,\n> {\n getInboxNotifications(options?: {\n cursor?: string;\n query?: { roomId?: string; kind?: string };\n }): Promise<{\n inboxNotifications: InboxNotificationData[];\n threads: ThreadData<TM, CM>[];\n subscriptions: SubscriptionData[];\n nextCursor: string | null;\n requestedAt: Date;\n }>;\n\n getInboxNotificationsSince(options: {\n since: Date;\n query?: { roomId?: string; kind?: string };\n signal?: AbortSignal;\n }): Promise<{\n inboxNotifications: {\n updated: InboxNotificationData[];\n deleted: InboxNotificationDeleteInfo[];\n };\n threads: {\n updated: ThreadData<TM, CM>[];\n deleted: ThreadDeleteInfo[];\n };\n subscriptions: {\n updated: SubscriptionData[];\n deleted: SubscriptionDeleteInfo[];\n };\n requestedAt: Date;\n }>;\n\n getUnreadInboxNotificationsCount(options?: {\n query?: {\n roomId?: string;\n kind?: string;\n };\n signal?: AbortSignal;\n }): Promise<number>;\n\n markAllInboxNotificationsAsRead(): Promise<void>;\n\n markInboxNotificationAsRead(inboxNotificationId: string): Promise<void>;\n\n deleteAllInboxNotifications(): Promise<void>;\n\n deleteInboxNotification(inboxNotificationId: string): Promise<void>;\n\n getNotificationSettings(options?: {\n signal?: AbortSignal;\n }): Promise<NotificationSettingsPlain>;\n\n updateNotificationSettings(\n settings: PartialNotificationSettings\n ): Promise<NotificationSettingsPlain>;\n}\n\nexport interface LiveblocksHttpApi<\n TM extends BaseMetadata,\n CM extends BaseMetadata,\n> extends RoomHttpApi<TM, CM>,\n NotificationHttpApi<TM, CM> {\n getUrlMetadata(url: string): Promise<UrlMetadata>;\n\n getUserThreads_experimental(options?: {\n cursor?: string;\n query?: {\n resolved?: boolean;\n metadata?: Partial<QueryMetadata<TM>>;\n };\n }): Promise<{\n threads: ThreadData<TM, CM>[];\n inboxNotifications: InboxNotificationData[];\n subscriptions: SubscriptionData[];\n nextCursor: string | null;\n requestedAt: Date;\n permissionHints: Record<string, Permission[]>;\n }>;\n\n getUserThreadsSince_experimental(options: {\n since: Date;\n signal?: AbortSignal;\n }): Promise<{\n inboxNotifications: {\n updated: InboxNotificationData[];\n deleted: InboxNotificationDeleteInfo[];\n };\n threads: {\n updated: ThreadData<TM, CM>[];\n deleted: ThreadDeleteInfo[];\n };\n subscriptions: {\n updated: SubscriptionData[];\n deleted: SubscriptionDeleteInfo[];\n };\n requestedAt: Date;\n permissionHints: Record<string, Permission[]>;\n }>;\n\n groupsStore: BatchStore<GroupData | undefined, string>;\n\n getGroup(groupId: string): Promise<GroupData | undefined>;\n}\n\nexport function createApiClient<\n TM extends BaseMetadata,\n CM extends BaseMetadata,\n>({\n baseUrl,\n authManager,\n currentUserId,\n fetchPolyfill,\n}: {\n baseUrl: string;\n authManager: AuthManager;\n currentUserId: Signal<string | undefined>;\n fetchPolyfill: typeof fetch;\n}): LiveblocksHttpApi<TM, CM> {\n const httpClient = new HttpClient(baseUrl, fetchPolyfill);\n\n /* -------------------------------------------------------------------------------------------------\n * Threads (Room level)\n * -----------------------------------------------------------------------------------------------*/\n async function getThreadsSince(options: {\n roomId: string;\n since: Date;\n signal?: AbortSignal;\n }) {\n const result = await httpClient.get<{\n data: ThreadDataPlain<TM, CM>[];\n inboxNotifications: InboxNotificationDataPlain[];\n subscriptions: SubscriptionDataPlain[];\n deletedThreads: ThreadDeleteInfoPlain[];\n deletedInboxNotifications: InboxNotificationDeleteInfoPlain[];\n deletedSubscriptions: SubscriptionDeleteInfoPlain[];\n meta: {\n requestedAt: string;\n permissionHints: Record<string, Permission[]>;\n };\n }>(\n url`/v2/c/rooms/${options.roomId}/threads/delta`,\n await authManager.getAuthValue({\n requestedScope: \"comments:read\",\n roomId: options.roomId,\n }),\n {\n since: options.since.toISOString(),\n },\n { signal: options.signal }\n );\n\n return {\n threads: {\n updated: result.data.map(convertToThreadData),\n deleted: result.deletedThreads.map(convertToThreadDeleteInfo),\n },\n inboxNotifications: {\n updated: result.inboxNotifications.map(convertToInboxNotificationData),\n deleted: result.deletedInboxNotifications.map(\n convertToInboxNotificationDeleteInfo\n ),\n },\n subscriptions: {\n updated: result.subscriptions.map(convertToSubscriptionData),\n deleted: result.deletedSubscriptions.map(\n convertToSubscriptionDeleteInfo\n ),\n },\n requestedAt: new Date(result.meta.requestedAt),\n permissionHints: result.meta.permissionHints,\n };\n }\n\n async function getThreads(options: {\n roomId: string;\n cursor?: string;\n query?: {\n resolved?: boolean;\n subscribed?: boolean;\n metadata?: Partial<QueryMetadata<TM>>;\n };\n }) {\n let query: string | undefined;\n\n if (options.query) {\n query = objectToQuery(options.query);\n }\n\n const PAGE_SIZE = 50;\n\n try {\n const result = await httpClient.get<{\n data: ThreadDataPlain<TM, CM>[];\n inboxNotifications: InboxNotificationDataPlain[];\n subscriptions: SubscriptionDataPlain[];\n deletedThreads: ThreadDeleteInfoPlain[];\n deletedInboxNotifications: InboxNotificationDeleteInfoPlain[];\n deletedSubscriptions: SubscriptionDeleteInfoPlain[];\n meta: {\n requestedAt: string;\n nextCursor: string | null;\n permissionHints: Record<string, Permission[]>;\n };\n }>(\n url`/v2/c/rooms/${options.roomId}/threads`,\n await authManager.getAuthValue({\n requestedScope: \"comments:read\",\n roomId: options.roomId,\n }),\n {\n cursor: options.cursor,\n query,\n limit: PAGE_SIZE,\n }\n );\n\n return {\n threads: result.data.map(convertToThreadData),\n inboxNotifications: result.inboxNotifications.map(\n convertToInboxNotificationData\n ),\n subscriptions: result.subscriptions.map(convertToSubscriptionData),\n nextCursor: result.meta.nextCursor,\n requestedAt: new Date(result.meta.requestedAt),\n permissionHints: result.meta.permissionHints,\n };\n } catch (err) {\n if (err instanceof HttpError && err.status === 404) {\n // If the room does (not) yet exist, the response will be a 404 error\n // response which we'll interpret as an empty list of threads.\n return {\n threads: [],\n inboxNotifications: [],\n subscriptions: [],\n nextCursor: null,\n //\n // HACK\n // requestedAt needs to be a *server* timestamp here. However, on\n // this 404 error response, there is no such timestamp. So out of\n // pure necessity we'll fall back to a local timestamp instead (and\n // allow for a possible 6 hour clock difference between client and\n // server).\n //\n requestedAt: new Date(Date.now() - 6 * 60 * 60 * 1000),\n permissionHints: {},\n };\n }\n\n throw err;\n }\n }\n\n async function searchComments(\n options: {\n roomId: string;\n query: {\n threadMetadata?: Partial<QueryMetadata<TM>>;\n threadResolved?: boolean;\n hasAttachments?: boolean;\n hasMentions?: boolean;\n text: string;\n };\n },\n requestOptions?: {\n signal?: AbortSignal;\n }\n ) {\n const result = await httpClient.get<{\n data: Array<SearchCommentsResult>;\n }>(\n url`/v2/c/rooms/${options.roomId}/threads/comments/search`,\n await authManager.getAuthValue({\n requestedScope: \"comments:read\",\n roomId: options.roomId,\n }),\n {\n text: options.query.text,\n query: objectToQuery({\n threadMetadata: options.query.threadMetadata,\n threadResolved: options.query.threadResolved,\n hasAttachments: options.query.hasAttachments,\n hasMentions: options.query.hasMentions,\n }),\n },\n { signal: requestOptions?.signal }\n );\n return result;\n }\n\n async function createThread(options: {\n roomId: string;\n threadId?: string;\n commentId?: string;\n metadata: TM | undefined;\n body: CommentBody;\n commentMetadata?: CM;\n attachmentIds?: string[];\n }) {\n const commentId = options.commentId ?? createCommentId();\n const threadId = options.threadId ?? createThreadId();\n\n const thread = await httpClient.post<ThreadDataPlain<TM, CM>>(\n url`/v2/c/rooms/${options.roomId}/threads`,\n await authManager.getAuthValue({\n requestedScope: \"comments:read\",\n roomId: options.roomId,\n }),\n {\n id: threadId,\n comment: {\n id: commentId,\n body: options.body,\n metadata: options.commentMetadata,\n attachmentIds: options.attachmentIds,\n },\n metadata: options.metadata,\n }\n );\n\n return convertToThreadData<TM, CM>(thread);\n }\n\n async function deleteThread(options: { roomId: string; threadId: string }) {\n await httpClient.delete(\n url`/v2/c/rooms/${options.roomId}/threads/${options.threadId}`,\n await authManager.getAuthValue({\n requestedScope: \"comments:read\",\n roomId: options.roomId,\n })\n );\n }\n\n async function getThread(options: { roomId: string; threadId: string }) {\n const response = await httpClient.rawGet(\n url`/v2/c/rooms/${options.roomId}/thread-with-notification/${options.threadId}`,\n await authManager.getAuthValue({\n requestedScope: \"comments:read\",\n roomId: options.roomId,\n })\n );\n\n if (response.ok) {\n const json = (await response.json()) as {\n thread: ThreadDataPlain<TM, CM>;\n inboxNotification?: InboxNotificationDataPlain;\n subscription?: SubscriptionDataPlain;\n };\n\n return {\n thread: convertToThreadData(json.thread),\n inboxNotification: json.inboxNotification\n ? convertToInboxNotificationData(json.inboxNotification)\n : undefined,\n subscription: json.subscription\n ? convertToSubscriptionData(json.subscription)\n : undefined,\n };\n } else if (response.status === 404) {\n return {\n thread: undefined,\n inboxNotification: undefined,\n subscription: undefined,\n };\n } else {\n throw new Error(\n `There was an error while getting thread ${options.threadId}.`\n );\n }\n }\n\n async function editThreadMetadata(options: {\n roomId: string;\n metadata: Patchable<TM>;\n threadId: string;\n }) {\n return await httpClient.post<TM>(\n url`/v2/c/rooms/${options.roomId}/threads/${options.threadId}/metadata`,\n await authManager.getAuthValue({\n requestedScope: \"comments:read\",\n roomId: options.roomId,\n }),\n options.metadata\n );\n }\n\n async function editCommentMetadata(options: {\n roomId: string;\n threadId: string;\n commentId: string;\n metadata: Patchable<CM>;\n }) {\n return await httpClient.post<CM>(\n url`/v2/c/rooms/${options.roomId}/threads/${options.threadId}/comments/${options.commentId}/metadata`,\n await authManager.getAuthValue({\n requestedScope: \"comments:read\",\n roomId: options.roomId,\n }),\n options.metadata\n );\n }\n\n async function createComment(options: {\n roomId: string;\n threadId: string;\n commentId?: string;\n body: CommentBody;\n metadata?: CM;\n attachmentIds?: string[];\n }) {\n const commentId = options.commentId ?? createCommentId();\n const comment = await httpClient.post<CommentDataPlain<CM>>(\n url`/v2/c/rooms/${options.roomId}/threads/${options.threadId}/comments`,\n await authManager.getAuthValue({\n requestedScope: \"comments:read\",\n roomId: options.roomId,\n }),\n {\n id: commentId,\n body: options.body,\n metadata: options.metadata,\n attachmentIds: options.attachmentIds,\n }\n );\n return convertToCommentData(comment);\n }\n\n async function editComment(options: {\n roomId: string;\n threadId: string;\n commentId: string;\n body: CommentBody;\n attachmentIds?: string[];\n metadata?: Patchable<CM>;\n }) {\n const comment = await httpClient.post<CommentDataPlain<CM>>(\n url`/v2/c/rooms/${options.roomId}/threads/${options.threadId}/comments/${options.commentId}`,\n await authManager.getAuthValue({\n requestedScope: \"comments:read\",\n roomId: options.roomId,\n }),\n {\n body: options.body,\n attachmentIds: options.attachmentIds,\n metadata: options.metadata,\n }\n );\n\n return convertToCommentData(comment);\n }\n\n async function deleteComment(options: {\n roomId: string;\n threadId: string;\n commentId: string;\n }) {\n await httpClient.delete(\n url`/v2/c/rooms/${options.roomId}/threads/${options.threadId}/comments/${options.commentId}`,\n await authManager.getAuthValue({\n requestedScope: \"comments:read\",\n roomId: options.roomId,\n })\n );\n }\n\n async function addReaction(options: {\n roomId: string;\n threadId: string;\n commentId: string;\n emoji: string;\n }) {\n const reaction = await httpClient.post<CommentUserReactionPlain>(\n url`/v2/c/rooms/${options.roomId}/threads/${options.threadId}/comments/${options.commentId}/reactions`,\n await authManager.getAuthValue({\n requestedScope: \"comments:read\",\n roomId: options.roomId,\n }),\n { emoji: options.emoji }\n );\n\n return convertToCommentUserReaction(reaction);\n }\n\n async function removeReaction(options: {\n roomId: string;\n threadId: string;\n commentId: string;\n emoji: string;\n }) {\n await httpClient.delete<CommentDataPlain<CM>>(\n url`/v2/c/rooms/${options.roomId}/threads/${options.threadId}/comments/${options.commentId}/reactions/${options.emoji}`,\n await authManager.getAuthValue({\n requestedScope: \"comments:read\",\n roomId: options.roomId,\n })\n );\n }\n\n async function markThreadAsResolved(options: {\n roomId: string;\n threadId: string;\n }) {\n await httpClient.post(\n url`/v2/c/rooms/${options.roomId}/threads/${options.threadId}/mark-as-resolved`,\n await authManager.getAuthValue({\n requestedScope: \"comments:read\",\n roomId: options.roomId,\n })\n );\n }\n\n async function markThreadAsUnresolved(options: {\n roomId: string;\n threadId: string;\n }) {\n await httpClient.post(\n url`/v2/c/rooms/${options.roomId}/threads/${options.threadId}/mark-as-unresolved`,\n await authManager.getAuthValue({\n requestedScope: \"comments:read\",\n roomId: options.roomId,\n })\n );\n }\n\n async function subscribeToThread(options: {\n roomId: string;\n threadId: string;\n }) {\n const subscription = await httpClient.post<SubscriptionDataPlain>(\n url`/v2/c/rooms/${options.roomId}/threads/${options.threadId}/subscribe`,\n await authManager.getAuthValue({\n requestedScope: \"comments:read\",\n roomId: options.roomId,\n })\n );\n\n return convertToSubscriptionData(subscription);\n }\n\n async function unsubscribeFromThread(options: {\n roomId: string;\n threadId: string;\n }) {\n await httpClient.post(\n url`/v2/c/rooms/${options.roomId}/threads/${options.threadId}/unsubscribe`,\n await authManager.getAuthValue({\n requestedScope: \"comments:read\",\n roomId: options.roomId,\n })\n );\n }\n\n /* -------------------------------------------------------------------------------------------------\n * Attachments (Room level)\n * -----------------------------------------------------------------------------------------------*/\n async function uploadAttachment(options: {\n roomId: string;\n attachment: CommentLocalAttachment;\n signal?: AbortSignal;\n }): Promise<CommentAttachment> {\n const roomId = options.roomId;\n const abortSignal = options.signal;\n const attachment = options.attachment;\n\n const abortError = abortSignal\n ? new DOMException(\n `Upload of attachment ${options.attachment.id} was aborted.`,\n \"AbortError\"\n )\n : undefined;\n\n if (abortSignal?.aborted) {\n throw abortError;\n }\n\n const handleRetryError = (err: Error) => {\n if (abortSignal?.aborted) {\n throw abortError;\n }\n\n if (err instanceof HttpError && err.status === 413) {\n throw err;\n }\n\n return false;\n };\n\n const ATTACHMENT_PART_SIZE = 5 * 1024 * 1024; // 5 MB\n const RETRY_ATTEMPTS = 10;\n const RETRY_DELAYS = [\n 2000, 2000, 2000, 2000, 2000, 2000, 2000, 2000, 2000, 2000,\n ];\n\n function splitFileIntoParts(file: File) {\n const parts: { partNumber: number; part: Blob }[] = [];\n\n let start = 0;\n\n while (start < file.size) {\n const end = Math.min(start + ATTACHMENT_PART_SIZE, file.size);\n\n parts.push({\n partNumber: parts.length + 1,\n part: file.slice(start, end),\n });\n\n start = end;\n }\n\n return parts;\n }\n\n if (attachment.size <= ATTACHMENT_PART_SIZE) {\n // If the file is small enough, upload it in a single request\n return autoRetry(\n async () =>\n httpClient.putBlob<CommentAttachment>(\n url`/v2/c/rooms/${roomId}/attachments/${attachment.id}/upload/${encodeURIComponent(attachment.name)}`,\n await authManager.getAuthValue({\n requestedScope: \"comments:read\",\n roomId,\n }),\n attachment.file,\n { fileSize: attachment.size },\n { signal: abortSignal }\n ),\n RETRY_ATTEMPTS,\n RETRY_DELAYS,\n handleRetryError\n );\n } else {\n // Otherwise, upload it in multiple parts\n let uploadId: string | undefined;\n const uploadedParts: {\n etag: string;\n partNumber: number;\n }[] = [];\n\n // Create a multi-part upload\n const createMultiPartUpload = await autoRetry(\n async () =>\n httpClient.post<{\n uploadId: string;\n key: string;\n }>(\n url`/v2/c/rooms/${roomId}/attachments/${attachment.id}/multipart/${encodeURIComponent(attachment.name)}`,\n await authManager.getAuthValue({\n requestedScope: \"comments:read\",\n roomId,\n }),\n undefined,\n { signal: abortSignal },\n { fileSize: attachment.size }\n ),\n RETRY_ATTEMPTS,\n RETRY_DELAYS,\n handleRetryError\n );\n\n try {\n uploadId = createMultiPartUpload.uploadId;\n\n const parts = splitFileIntoParts(attachment.file);\n\n // Check if the upload was aborted\n if (abortSignal?.aborted) {\n throw abortError;\n }\n\n const batches = chunk(parts, 5);\n\n // Batches are uploaded one after the other\n for (const parts of batches) {\n const uploadedPartsPromises: Promise<{\n partNumber: number;\n etag: string;\n }>[] = [];\n\n for (const { part, partNumber } of parts) {\n uploadedPartsPromises.push(\n autoRetry(\n async () =>\n httpClient.putBlob<{\n partNumber: number;\n etag: string;\n }>(\n url`/v2/c/rooms/${roomId}/attachments/${attachment.id}/multipart/${createMultiPartUpload.uploadId}/${String(partNumber)}`,\n await authManager.getAuthValue({\n requestedScope: \"comments:read\",\n roomId,\n }),\n part,\n undefined,\n { signal: abortSignal }\n ),\n RETRY_ATTEMPTS,\n RETRY_DELAYS,\n handleRetryError\n )\n );\n }\n\n // Parts are uploaded in parallel\n uploadedParts.push(...(await Promise.all(uploadedPartsPromises)));\n }\n\n // Check if the upload was aborted\n if (abortSignal?.aborted) {\n throw abortError;\n }\n\n const sortedUploadedParts = uploadedParts.sort(\n (a, b) => a.partNumber - b.partNumber\n );\n\n return httpClient.post<CommentAttachment>(\n url`/v2/c/rooms/${roomId}/attachments/${attachment.id}/multipart/${uploadId}/complete`,\n await authManager.getAuthValue({\n requestedScope: \"comments:read\",\n roomId,\n }),\n { parts: sortedUploadedParts },\n { signal: abortSignal }\n );\n } catch (error) {\n if (\n uploadId &&\n (error as Error)?.name &&\n ((error as Error).name === \"AbortError\" ||\n (error as Error).name === \"TimeoutError\")\n ) {\n try {\n // Abort the multi-part upload if it was created\n await httpClient.rawDelete(\n url`/v2/c/rooms/${roomId}/attachments/${attachment.id}/multipart/${uploadId}`,\n await authManager.getAuthValue({\n requestedScope: \"comments:read\",\n roomId,\n })\n );\n } catch (error) {\n // Ignore the error, we are probably offline\n }\n }\n\n throw error;\n }\n }\n }\n\n const attachmentUrlsBatchStoresByRoom = new DefaultMap<\n string,\n BatchStore<string, string>\n >((roomId) => {\n const batch = new Batch<string, string>(\n async (batchedAttachmentIds) => {\n const attachmentIds = batchedAttachmentIds.flat();\n const { urls } = await httpClient.post<{\n urls: (string | null)[];\n }>(\n url`/v2/c/rooms/${roomId}/attachments/presigned-urls`,\n await authManager.getAuthValue({\n requestedScope: \"comments:read\",\n roomId,\n }),\n { attachmentIds }\n );\n\n return urls.map(\n (url) =>\n url ??\n new Error(\"There was an error while getting this attachment's URL\")\n );\n },\n { delay: 50 }\n );\n return createBatchStore(batch);\n });\n\n function getOrCreateAttachmentUrlsStore(\n roomId: string\n ): BatchStore<string, string> {\n return attachmentUrlsBatchStoresByRoom.getOrCreate(roomId);\n }\n\n function getAttachmentUrl(options: { roomId: string; attachmentId: string }) {\n const batch = getOrCreateAttachmentUrlsStore(options.roomId).batch;\n return batch.get(options.attachmentId);\n }\n\n /* -------------------------------------------------------------------------------------------------\n * Attachments (Chat level)\n * -----------------------------------------------------------------------------------------------*/\n async function uploadChatAttachment(options: {\n chatId: string;\n attachment: {\n id: string;\n file: File;\n };\n signal?: AbortSignal;\n }): Promise<void> {\n const { chatId, attachment, signal } = options;\n const userId = currentUserId.get();\n if (userId === undefined) {\n throw new Error(\"Attachment upload requires an authenticated user.\");\n }\n const ATTACHMENT_PART_SIZE = 5 * 1024 * 1024; // 5 MB\n\n if (options.attachment.file.size <= ATTACHMENT_PART_SIZE) {\n await httpClient.putBlob(\n url`/v2/c/chats/${chatId}/attachments/${attachment.id}/upload/${encodeURIComponent(attachment.file.name)}`,\n await authManager.getAuthValue({ requestedScope: \"comments:read\" }),\n attachment.file,\n { fileSize: attachment.file.size },\n { signal }\n );\n } else {\n const multipartUpload = await httpClient.post<{\n uploadId: string;\n key: string;\n }>(\n url`/v2/c/chats/${chatId}/attachments/${attachment.id}/multipart/${encodeURIComponent(attachment.file.name)}`,\n await authManager.getAuthValue({ requestedScope: \"comments:read\" }),\n undefined,\n { signal },\n { fileSize: attachment.file.size }\n );\n\n try {\n const uploadedParts: { etag: string; number: number }[] = [];\n\n const parts: { number: number; part: Blob }[] = [];\n let start = 0;\n while (start < attachment.file.size) {\n const end = Math.min(\n start + ATTACHMENT_PART_SIZE,\n attachment.file.size\n );\n parts.push({\n number: parts.length + 1,\n part: attachment.file.slice(start, end),\n });\n start = end;\n }\n\n uploadedParts.push(\n ...(await Promise.all(\n parts.map(async ({ number, part }) => {\n return await httpClient.putBlob<{\n etag: string;\n number: number;\n }>(\n url`/v2/c/chats/${chatId}/attachments/${attachment.id}/multipart/${multipartUpload.uploadId}/${String(number)}`,\n await authManager.getAuthValue({\n requestedScope: \"comments:read\",\n }),\n part,\n undefined,\n { signal }\n );\n })\n ))\n );\n\n await httpClient.post(\n url`/v2/c/chats/${chatId}/attachments/${attachment.id}/multipart/${multipartUpload.uploadId}/complete`,\n await authManager.getAuthValue({ requestedScope: \"comments:read\" }),\n { parts: uploadedParts.sort((a, b) => a.number - b.number) },\n { signal }\n );\n } catch (err) {\n try {\n await httpClient.delete(\n url`/v2/c/chats/${chatId}/attachments/${attachment.id}/multipart/${multipartUpload.uploadId}`,\n await authManager.getAuthValue({ requestedScope: \"comments:read\" })\n );\n } catch (err) {\n // Ignore the error, we are probably offline\n }\n throw err;\n }\n }\n }\n\n const attachmentUrlsBatchStoresByChat = new DefaultMap<\n string,\n BatchStore<string, string>\n >((chatId) => {\n const batch = new Batch<string, string>(\n async (batchedAttachmentIds) => {\n const attachmentIds = batchedAttachmentIds.flat();\n const { urls } = await httpClient.post<{\n urls: (string | null)[];\n }>(\n url`/v2/c/chats/${chatId}/attachments/presigned-urls`,\n await authManager.getAuthValue({\n requestedScope: \"comments:read\",\n }),\n { attachmentIds }\n );\n\n return urls.map(\n (url) =>\n url ??\n new Error(\"There was an error while getting this attachment's URL\")\n );\n },\n { delay: 50 }\n );\n return createBatchStore(batch);\n });\n\n function getOrCreateChatAttachmentUrlsStore(\n chatId: string\n ): BatchStore<string, string> {\n return attachmentUrlsBatchStoresByChat.getOrCreate(chatId);\n }\n\n function getChatAttachmentUrl(options: {\n chatId: string;\n attachmentId: string;\n }) {\n const batch = getOrCreateChatAttachmentUrlsStore(options.chatId).batch;\n return batch.get(options.attachmentId);\n }\n\n /* -------------------------------------------------------------------------------------------------\n * Notifications (Room level)\n * -----------------------------------------------------------------------------------------------*/\n async function getSubscriptionSettings(options: {\n roomId: string;\n signal?: AbortSignal;\n }): Promise<RoomSubscriptionSettings> {\n return httpClient.get<RoomSubscriptionSettings>(\n url`/v2/c/rooms/${options.roomId}/subscription-settings`,\n await authManager.getAuthValue({\n requestedScope: \"comments:read\",\n roomId: options.roomId,\n }),\n undefined,\n {\n signal: options.signal,\n }\n );\n }\n\n async function updateSubscriptionSettings(options: {\n roomId: string;\n settings: Partial<RoomSubscriptionSettings>;\n }): Promise<RoomSubscriptionSettings> {\n return httpClient.post<RoomSubscriptionSettings>(\n url`/v2/c/rooms/${options.roomId}/subscription-settings`,\n await authManager.getAuthValue({\n requestedScope: \"comments:read\",\n roomId: options.roomId,\n }),\n options.settings\n );\n }\n\n const markAsReadBatchesByRoom = new DefaultMap<string, Batch<string, string>>(\n (roomId) =>\n new Batch<string, string>(\n async (batchedInboxNotificationIds) => {\n const inboxNotificationIds = batchedInboxNotificationIds.flat();\n // This method (and the following batch handling) isn't the same as the one in\n // src/notifications.ts, this one is room-based: /v2/c/rooms/<roomId>/inbox-notifications/read.\n //\n // The reason for this is that unlike the room-based Comments ones, the Notifications endpoints\n // don't work with a public key. Since `markThreadAsRead` needs to mark the related inbox notifications\n // as read, this room-based method is necessary to keep all Comments features working with a public key.\n await httpClient.post(\n url`/v2/c/rooms/${roomId}/inbox-notifications/read`,\n await authManager.getAuthValue({\n requestedScope: \"comments:read\",\n roomId,\n }),\n { inboxNotificationIds }\n );\n return inboxNotificationIds;\n },\n { delay: 50 }\n )\n );\n\n async function markRoomInboxNotificationAsRead(options: {\n roomId: string;\n inboxNotificationId: string;\n }) {\n const batch = markAsReadBatchesByRoom.getOrCreate(options.roomId);\n return batch.get(options.inboxNotificationId);\n }\n\n /* -------------------------------------------------------------------------------------------------\n * Text editor (Room level)\n * -----------------------------------------------------------------------------------------------*/\n async function createTextMention(options: {\n roomId: string;\n mentionId: string;\n mention: MentionData;\n }) {\n if (options.mention.kind !== \"user\" && options.mention.kind !== \"group\") {\n return assertNever(options.mention, \"Unexpected mention kind\");\n }\n\n await httpClient.rawPost(\n url`/v2/c/rooms/${options.roomId}/text-mentions`,\n await authManager.getAuthValue({\n requestedScope: \"comments:read\",\n roomId: options.roomId,\n }),\n {\n userId:\n options.mention.kind === \"user\" ? options.mention.id : undefined,\n groupId:\n options.mention.kind === \"group\" ? options.mention.id : undefined,\n userIds:\n options.mention.kind === \"group\"\n ? options.mention.userIds\n : undefined,\n mentionId: options.mentionId,\n }\n );\n }\n\n async function deleteTextMention(options: {\n roomId: string;\n mentionId: string;\n }) {\n await httpClient.rawDelete(\n url`/v2/c/rooms/${options.roomId}/text-mentions/${options.mentionId}`,\n await authManager.getAuthValue({\n requestedScope: \"comments:read\",\n roomId: options.roomId,\n })\n );\n }\n\n async function getTextVersion(options: {\n roomId: string;\n versionId: string;\n }) {\n return httpClient.rawGet(\n url`/v2/c/rooms/${options.roomId}/y-version/${options.versionId}`,\n await authManager.getAuthValue({\n requestedScope: \"comments:read\",\n roomId: options.roomId,\n })\n );\n }\n\n async function createTextVersion(options: { roomId: string }) {\n await httpClient.rawPost(\n url`/v2/c/rooms/${options.roomId}/version`,\n await authManager.getAuthValue({\n requestedScope: \"comments:read\",\n roomId: options.roomId,\n })\n );\n }\n\n async function reportTextEditor(options: {\n roomId: string;\n type: TextEditorType;\n rootKey: string;\n }) {\n await httpClient.rawPost(\n url`/v2/c/rooms/${options.roomId}/text-metadata`,\n await authManager.getAuthValue({\n requestedScope: \"comments:read\",\n roomId: options.roomId,\n }),\n {\n type: options.type,\n rootKey: options.rootKey,\n }\n );\n }\n\n async function executeContextualPrompt(options: {\n roomId: string;\n prompt: string;\n context: ContextualPromptContext;\n previous?: {\n prompt: string;\n response: ContextualPromptResponse;\n };\n signal: AbortSignal;\n }): Promise<string> {\n const result = await httpClient.post<{\n content: { type: \"text\"; text: string }[];\n }>(\n url`/v2/c/rooms/${options.roomId}/ai/contextual-prompt`,\n await authManager.getAuthValue({\n requestedScope: \"room:read\",\n roomId: options.roomId,\n }),\n {\n prompt: options.prompt,\n context: {\n beforeSelection: options.context.beforeSelection,\n selection: options.context.selection,\n afterSelection: options.context.afterSelection,\n },\n previous: options.previous,\n },\n { signal: options.signal }\n );\n if (!result || result.content.length === 0) {\n throw new Error(\"No content returned from server\");\n }\n return result.content[0].text;\n }\n\n async function listTextVersions(options: { roomId: string }) {\n const result = await httpClient.get<{\n versions: DateToString<HistoryVersion>[];\n meta: {\n requestedAt: string;\n };\n }>(\n url`/v2/c/rooms/${options.roomId}/versions`,\n await authManager.getAuthValue({\n requestedScope: \"comments:read\",\n roomId: options.roomId,\n })\n );\n\n return {\n versions: result.versions.map(({ createdAt, ...version }) => {\n return {\n createdAt: new Date(createdAt),\n ...version,\n };\n }),\n requestedAt: new Date(result.meta.requestedAt),\n };\n }\n\n async function listTextVersionsSince(options: {\n roomId: string;\n since: Date;\n signal?: AbortSignal;\n }) {\n const result = await httpClient.get<{\n versions: DateToString<HistoryVersion>[];\n meta: {\n requestedAt: string;\n };\n }>(\n url`/v2/c/rooms/${options.roomId}/versions/delta`,\n await authManager.getAuthValue({\n requestedScope: \"comments:read\",\n roomId: options.roomId,\n }),\n { since: options.since.toISOString() },\n { signal: options.signal }\n );\n\n return {\n versions: result.versions.map(({ createdAt, ...version }) => {\n return {\n createdAt: new Date(createdAt),\n ...version,\n };\n }),\n requestedAt: new Date(result.meta.requestedAt),\n };\n }\n\n async function streamStorage(options: { roomId: string }) {\n const result = await httpClient.rawGet(\n url`/v2/c/rooms/${options.roomId}/storage`,\n await authManager.getAuthValue({\n requestedScope: \"room:read\",\n roomId: options.roomId,\n })\n );\n return (await result.json()) as StorageNode[];\n }\n\n async function sendMessagesOverHTTP<\n P extends JsonObject,\n E extends Json,\n >(options: {\n roomId: string;\n nonce: string | undefined;\n messages: ClientMsg<P, E>[];\n }) {\n return httpClient.rawPost(\n url`/v2/c/rooms/${options.roomId}/send-message`,\n await authManager.getAuthValue({\n requestedScope: \"room:read\",\n roomId: options.roomId,\n }),\n {\n nonce: options.nonce,\n messages: options.messages,\n }\n );\n }\n\n /* -------------------------------------------------------------------------------------------------\n * Inbox notifications (User-level)\n * -----------------------------------------------------------------------------------------------*/\n async function getInboxNotifications(options?: {\n cursor?: string;\n query?: { roomId?: string; kind?: string };\n }) {\n const PAGE_SIZE = 50;\n\n let query: string | undefined;\n\n if (options?.query) {\n query = objectToQuery(options.query);\n }\n\n const json = await httpClient.get<{\n threads: ThreadDataPlain<TM, CM>[];\n inboxNotifications: InboxNotificationDataPlain[];\n subscriptions: SubscriptionDataPlain[];\n groups: GroupDataPlain[];\n meta: {\n requestedAt: string;\n nextCursor: string | null;\n };\n }>(\n url`/v2/c/inbox-notifications`,\n await authManager.getAuthValue({ requestedScope: \"comments:read\" }),\n {\n cursor: options?.cursor,\n limit: PAGE_SIZE,\n query,\n }\n );\n\n const groups = json.groups.map(convertToGroupData);\n\n // Instead of being returned publicly, the user's groups are put in\n // a separate store which is also used for on-demand fetching.\n groupsStore.setData(groups.map((group) => [group.id, group]));\n\n return {\n inboxNotifications: json.inboxNotifications.map(\n convertToInboxNotificationData\n ),\n threads: json.threads.map(convertToThreadData),\n subscriptions: json.subscriptions.map(convertToSubscriptionData),\n nextCursor: json.meta.nextCursor,\n requestedAt: new Date(json.meta.requestedAt),\n };\n }\n\n async function getInboxNotificationsSince(options: {\n since: Date;\n query?: { roomId?: string; kind?: string };\n signal?: AbortSignal;\n }) {\n let query: string | undefined;\n\n if (options?.query) {\n query = objectToQuery(options.query);\n }\n\n const json = await httpClient.get<{\n threads: ThreadDataPlain<TM, CM>[];\n inboxNotifications: InboxNotificationDataPlain[];\n subscriptions: SubscriptionDataPlain[];\n deletedThreads: ThreadDeleteInfoPlain[];\n deletedInboxNotifications: InboxNotificationDeleteInfoPlain[];\n deletedSubscriptions: SubscriptionDeleteInfoPlain[];\n meta: {\n requestedAt: string;\n };\n }>(\n url`/v2/c/inbox-notifications/delta`,\n await authManager.getAuthValue({ requestedScope: \"comments:read\" }),\n { since: options.since.toISOString(), query },\n { signal: options.signal }\n );\n return {\n inboxNotifications: {\n updated: json.inboxNotifications.map(convertToInboxNotificationData),\n deleted: json.deletedInboxNotifications.map(\n convertToInboxNotificationDeleteInfo\n ),\n },\n threads: {\n updated: json.threads.map(convertToThreadData),\n deleted: json.deletedThreads.map(convertToThreadDeleteInfo),\n },\n subscriptions: {\n updated: json.subscriptions.map(convertToSubscriptionData),\n deleted: json.deletedSubscriptions.map(convertToSubscriptionDeleteInfo),\n },\n requestedAt: new Date(json.meta.requestedAt),\n };\n }\n\n async function getUnreadInboxNotificationsCount(options: {\n query?: {\n roomId?: string;\n kind?: string;\n };\n signal?: AbortSignal;\n }) {\n let query: string | undefined;\n\n if (options?.query) {\n query = objectToQuery(options.query);\n }\n\n const { count } = await httpClient.get<{ count: number }>(\n url`/v2/c/inbox-notifications/count`,\n await authManager.getAuthValue({ requestedScope: \"comments:read\" }),\n { query },\n { signal: options?.signal }\n );\n return count;\n }\n\n async function markAllInboxNotificationsAsRead() {\n await httpClient.post(\n url`/v2/c/inbox-notifications/read`,\n await authManager.getAuthValue({ requestedScope: \"comments:read\" }),\n {\n inboxNotificationIds: \"all\",\n }\n );\n }\n\n async function markInboxNotificationsAsRead(inboxNotificationIds: string[]) {\n await httpClient.post(\n url`/v2/c/inbox-notifications/read`,\n await authManager.getAuthValue({ requestedScope: \"comments:read\" }),\n {\n inboxNotificationIds,\n }\n );\n }\n\n const batchedMarkInboxNotificationsAsRead = new Batch<string, string>(\n async (batchedInboxNotificationIds) => {\n const inboxNotificationIds = batchedInboxNotificationIds.flat();\n\n await markInboxNotificationsAsRead(inboxNotificationIds);\n\n return inboxNotificationIds;\n },\n { delay: 50 }\n );\n\n async function markInboxNotificationAsRead(inboxNotificationId: string) {\n await batchedMarkInboxNotificationsAsRead.get(inboxNotificationId);\n }\n\n async function deleteAllInboxNotifications() {\n await httpClient.delete(\n url`/v2/c/inbox-notifications`,\n await authManager.getAuthValue({ requestedScope: \"comments:read\" })\n );\n }\n\n async function deleteInboxNotification(inboxNotificationId: string) {\n await httpClient.delete(\n url`/v2/c/inbox-notifications/${inboxNotificationId}`,\n await authManager.getAuthValue({ requestedScope: \"comments:read\" })\n );\n }\n\n /* -------------------------------------------------------------------------------------------------\n * Notifications settings (Project level)\n * -------------------------------------------------------------------------------------------------\n */\n async function getNotificationSettings(options?: {\n signal?: AbortSignal;\n }): Promise<NotificationSettingsPlain> {\n return httpClient.get<NotificationSettingsPlain>(\n url`/v2/c/notification-settings`,\n await authManager.getAuthValue({ requestedScope: \"comments:read\" }),\n undefined,\n { signal: options?.signal }\n );\n }\n\n async function updateNotificationSettings(\n settings: PartialNotificationSettings\n ): Promise<NotificationSettingsPlain> {\n return httpClient.post<NotificationSettingsPlain>(\n url`/v2/c/notification-settings`,\n await authManager.getAuthValue({ requestedScope: \"comments:read\" }),\n settings\n );\n }\n\n /* -------------------------------------------------------------------------------------------------\n * User threads\n * -------------------------------------------------------------------------------------------------\n */\n async function getUserThreads_experimental(options?: {\n cursor?: string;\n query?: {\n resolved?: boolean;\n metadata?: Partial<QueryMetadata<TM>>;\n };\n }) {\n let query: string | undefined;\n\n if (options?.query) {\n query = objectToQuery(options.query);\n }\n\n const PAGE_SIZE = 50;\n\n const json = await httpClient.get<{\n threads: ThreadDataPlain<TM, CM>[];\n inboxNotifications: InboxNotificationDataPlain[];\n subscriptions: SubscriptionDataPlain[];\n deletedThreads: ThreadDeleteInfoPlain[];\n deletedInboxNotifications: InboxNotificationDeleteInfoPlain[];\n deletedSubscriptions: SubscriptionDeleteInfoPlain[];\n meta: {\n requestedAt: string;\n nextCursor: string | null;\n permissionHints: Record<string, Permission[]>;\n };\n }>(\n url`/v2/c/threads`,\n await authManager.getAuthValue({ requestedScope: \"comments:read\" }),\n {\n cursor: options?.cursor,\n query,\n limit: PAGE_SIZE,\n }\n );\n\n return {\n threads: json.threads.map(convertToThreadData),\n inboxNotifications: json.inboxNotifications.map(\n convertToInboxNotificationData\n ),\n subscriptions: json.subscriptions.map(convertToSubscriptionData),\n nextCursor: json.meta.nextCursor,\n requestedAt: new Date(json.meta.requestedAt),\n permissionHints: json.meta.permissionHints,\n };\n }\n\n async function getUserThreadsSince_experimental(options: {\n since: Date;\n signal?: AbortSignal;\n }) {\n const json = await httpClient.get<{\n threads: ThreadDataPlain<TM, CM>[];\n inboxNotifications: InboxNotificationDataPlain[];\n subscriptions: SubscriptionDataPlain[];\n deletedThreads: ThreadDeleteInfoPlain[];\n deletedInboxNotifications: InboxNotificationDeleteInfoPlain[];\n deletedSubscriptions: SubscriptionDeleteInfoPlain[];\n meta: {\n requestedAt: string;\n permissionHints: Record<string, Permission[]>;\n };\n }>(\n url`/v2/c/threads/delta`,\n await authManager.getAuthValue({ requestedScope: \"comments:read\" }),\n { since: options.since.toISOString() },\n { signal: options.signal }\n );\n\n return {\n threads: {\n updated: json.threads.map(convertToThreadData),\n deleted: json.deletedThreads.map(convertToThreadDeleteInfo),\n },\n inboxNotifications: {\n updated: json.inboxNotifications.map(convertToInboxNotificationData),\n deleted: json.deletedInboxNotifications.map(\n convertToInboxNotificationDeleteInfo\n ),\n },\n subscriptions: {\n updated: json.subscriptions.map(convertToSubscriptionData),\n deleted: json.deletedSubscriptions.map(convertToSubscriptionDeleteInfo),\n },\n requestedAt: new Date(json.meta.requestedAt),\n permissionHints: json.meta.permissionHints,\n };\n }\n\n /* -------------------------------------------------------------------------------------------------\n * Groups\n * -------------------------------------------------------------------------------------------------\n */\n\n const batchedGetGroups = new Batch(\n async (batchedGroupIds: string[]) => {\n const groupIds = batchedGroupIds.flat();\n const { groups: plainGroups } = await httpClient.post<{\n groups: GroupDataPlain[];\n }>(\n url`/v2/c/groups/find`,\n await authManager.getAuthValue({\n requestedScope: \"comments:read\",\n }),\n { groupIds }\n );\n\n const groups = new Map<string, GroupData>();\n\n for (const group of plainGroups) {\n groups.set(group.id, convertToGroupData(group));\n }\n\n return groupIds.map((groupId) => groups.get(groupId));\n },\n { delay: 50 }\n );\n const groupsStore = createBatchStore(batchedGetGroups);\n\n function getGroup(groupId: string) {\n return batchedGetGroups.get(groupId);\n }\n\n /* -------------------------------------------------------------------------------------------------\n * URL metadata\n * -------------------------------------------------------------------------------------------------\n */\n async function getUrlMetadata(_url: string) {\n const { metadata } = await httpClient.get<{ metadata: UrlMetadata }>(\n url`/v2/c/urls/metadata`,\n await authManager.getAuthValue({ requestedScope: \"comments:read\" }),\n { url: _url }\n );\n\n return metadata;\n }\n\n return {\n // Room threads\n getThreads,\n getThreadsSince,\n searchComments,\n createThread,\n getThread,\n deleteThread,\n editThreadMetadata,\n createComment,\n editComment,\n editCommentMetadata,\n deleteComment,\n addReaction,\n removeReaction,\n markThreadAsResolved,\n markThreadAsUnresolved,\n subscribeToThread,\n unsubscribeFromThread,\n markRoomInboxNotificationAsRead,\n // Room subscription settings\n getSubscriptionSettings,\n updateSubscriptionSettings,\n // Room text editor\n createTextMention,\n deleteTextMention,\n getTextVersion,\n createTextVersion,\n reportTextEditor,\n listTextVersions,\n listTextVersionsSince,\n // Room attachments\n getAttachmentUrl,\n uploadAttachment,\n getOrCreateAttachmentUrlsStore,\n // User attachments\n uploadChatAttachment,\n getOrCreateChatAttachmentUrlsStore,\n getChatAttachmentUrl,\n // Room storage\n streamStorage,\n sendMessagesOverHTTP,\n // Notifications\n getInboxNotifications,\n getInboxNotificationsSince,\n getUnreadInboxNotificationsCount,\n markAllInboxNotificationsAsRead,\n markInboxNotificationAsRead,\n deleteAllInboxNotifications,\n deleteInboxNotification,\n getNotificationSettings,\n updateNotificationSettings,\n // User threads\n getUserThreads_experimental,\n getUserThreadsSince_experimental,\n // Groups\n groupsStore,\n getGroup,\n // AI\n executeContextualPrompt,\n // URL metadata\n getUrlMetadata,\n };\n}\n\nexport function getBearerTokenFromAuthValue(authValue: AuthValue): string {\n if (authValue.type === \"public\") {\n return authValue.publicApiKey;\n } else {\n return authValue.token.raw;\n }\n}\n\n/**\n * @internal\n *\n * Small HTTP client for client-only REST API requests (e.g. /v2/c/* URLs).\n * These URLs all use public key, ID token, or access token authorization. This\n * HTTP client can be shared and used by both the Liveblocks Client and\n * Liveblocks Room instances internally to talk to our client-only REST API\n * backend.\n */\nclass HttpClient {\n #baseUrl: string;\n #fetchPolyfill: typeof fetch;\n\n constructor(baseUrl: string, fetchPolyfill: typeof fetch) {\n this.#baseUrl = baseUrl;\n this.#fetchPolyfill = fetchPolyfill;\n }\n\n // ------------------------------------------------------------------\n // Public methods\n // ------------------------------------------------------------------\n\n /**\n * Constructs and makes the HTTP request, but does not handle the response.\n *\n * This is what .rawFetch() does: 👈 This method!\n * 1. Set Content-Type header\n * 2. Set Authorization header\n * 3. Call the callback to obtain the `authValue` to use in the Authorization header\n *\n * This is what .fetch() does ON TOP of that:\n * 4. Parse response body as Json\n * 5. ...but silently return `{}` if that parsing fails\n * 6. Throw HttpError if response is an error\n */\n async #rawFetch(\n endpoint: URLSafeString,\n authValue: AuthValue,\n options?: RequestInit,\n params?: QueryParams\n ): Promise<Response> {\n if (!endpoint.startsWith(\"/v2/c/\")) {\n raise(\"This client can only be used to make /v2/c/* requests\");\n }\n\n const url = urljoin(this.#baseUrl, endpoint, params);\n return await this.#fetchPolyfill(url, {\n ...options,\n headers: {\n // These headers are default, but can be overriden by custom headers\n \"Content-Type\": \"application/json; charset=utf-8\",\n\n // Possible header overrides\n ...options?.headers,\n\n // Cannot be overriden by custom headers\n Authorization: `Bearer ${getBearerTokenFromAuthValue(authValue)}`,\n \"X-LB-Client\": PKG_VERSION || \"dev\",\n },\n });\n }\n\n /**\n * Constructs, makes the HTTP request, and handles the response by parsing\n * JSON and/or throwing an HttpError if it failed.\n *\n * This is what .rawFetch() does:\n * 1. Set Content-Type header\n * 2. Set Authorization header\n * 3. Call the callback to obtain the `authValue` to use in the Authorization header\n *\n * This is what .fetch() does ON TOP of that: 👈 This method!\n * 4. Parse response body as Json\n * 5. ...but silently return `{}` if that parsing fails (🤔)\n * 6. Throw HttpError if response is an error\n */\n async #fetch<T extends JsonObject>(\n endpoint: URLSafeString,\n authValue: AuthValue,\n options?: RequestInit,\n params?: QueryParams\n ): Promise<T> {\n const response = await this.#rawFetch(endpoint, authValue, options, params);\n\n if (!response.ok) {\n throw await HttpError.fromResponse(response);\n }\n\n let body;\n try {\n body = (await response.json()) as T;\n } catch {\n // TODO This looks wrong 🤔 !\n // TODO Should we not be throwing this error if something fails to parse?\n body = {} as T;\n }\n return body;\n }\n\n /**\n * Makes a GET request and returns the raw response.\n * Won't throw if the reponse is a non-2xx.\n * @deprecated Ideally, use .get() instead.\n */\n public async rawGet(\n endpoint: URLSafeString,\n authValue: AuthValue,\n params?: QueryParams,\n options?: Omit<RequestInit, \"body\" | \"method\" | \"headers\">\n ): Promise<Response> {\n return await this.#rawFetch(endpoint, authValue, options, params);\n }\n\n /**\n * Makes a POST request and returns the raw response.\n * Won't throw if the reponse is a non-2xx.\n * @deprecated Ideally, use .post() instead.\n */\n public async rawPost(\n endpoint: URLSafeString,\n authValue: AuthValue,\n body?: JsonObject\n ): Promise<Response> {\n return await this.#rawFetch(endpoint, authValue, {\n method: \"POST\",\n body: stringify(body),\n });\n }\n\n /**\n * Makes a DELETE request and returns the raw response.\n * Won't throw if the reponse is a non-2xx.\n * @deprecated Ideally, use .delete() instead.\n */\n public async rawDelete(\n endpoint: URLSafeString,\n authValue: AuthValue\n ): Promise<Response> {\n return await this.#rawFetch(endpoint, authValue, { method: \"DELETE\" });\n }\n\n /**\n * Makes a GET request, and return the JSON response.\n * Will throw if the reponse is a non-2xx.\n */\n public async get<T extends JsonObject>(\n endpoint: URLSafeString,\n authValue: AuthValue,\n params?: QueryParams,\n options?: Omit<RequestInit, \"body\" | \"method\" | \"headers\">\n ): Promise<T> {\n return await this.#fetch<T>(endpoint, authValue, options, params);\n }\n\n /**\n * Makes a POST request, and return the JSON response.\n * Will throw if the reponse is a non-2xx.\n */\n public async post<T extends JsonObject>(\n endpoint: URLSafeString,\n authValue: AuthValue,\n body?: JsonObject,\n options?: Omit<RequestInit, \"body\" | \"method\" | \"headers\">,\n params?: QueryParams\n ): Promise<T> {\n return await this.#fetch<T>(\n endpoint,\n authValue,\n {\n ...options,\n method: \"POST\",\n body: stringify(body),\n },\n params\n );\n }\n\n /**\n * Makes a DELETE request, and return the JSON response.\n * Will throw if the reponse is a non-2xx.\n */\n public async delete<T extends JsonObject>(\n endpoint: URLSafeString,\n authValue: AuthValue\n ): Promise<T> {\n return await this.#fetch<T>(endpoint, authValue, { method: \"DELETE\" });\n }\n\n /**\n * Makes a PUT request for a Blob body, and return the JSON response.\n * Will throw if the reponse is a non-2xx.\n */\n public async putBlob<T extends JsonObject>(\n endpoint: URLSafeString,\n authValue: AuthValue,\n blob?: Blob,\n params?: QueryParams,\n options?: Omit<RequestInit, \"body\" | \"method\" | \"headers\">\n ): Promise<T> {\n return await this.#fetch<T>(\n endpoint,\n authValue,\n {\n ...options,\n method: \"PUT\",\n headers: {\n \"Content-Type\": \"application/octet-stream\",\n },\n body: blob,\n },\n params\n );\n }\n}\n","/**\n * A generic Finite State Machine (FSM) implementation.\n *\n * This is a generic implementation that is not Liveblocks specific. We could\n * put this in a separate NPM package if we wanted to make this more reusable.\n */\n\nimport type { EventSource, Observable } from \"./EventSource\";\nimport { makeEventSource } from \"./EventSource\";\n\n/**\n * Built-in event sent by .addTimedTransition().\n */\nexport type TimerEvent = { readonly type: \"TIMER\" };\n\n/**\n * Built-in events sent by .onEnterAsync().\n */\nexport type AsyncOKEvent<T> = {\n readonly type: \"ASYNC_OK\";\n readonly data: T;\n};\nexport type AsyncErrorEvent = {\n readonly type: \"ASYNC_ERROR\";\n readonly reason: unknown;\n};\n\nexport type BaseEvent = { readonly type: string };\nexport type BuiltinEvent = TimerEvent | AsyncOKEvent<unknown> | AsyncErrorEvent;\n\nexport type Patchable<TContext> = Readonly<TContext> & {\n patch(patch: Partial<TContext>): void;\n};\n\nexport type CleanupFn<TContext> = (context: Patchable<TContext>) => void;\nexport type EnterFn<TContext> = (\n context: Patchable<TContext>\n) => void | CleanupFn<TContext>;\n\nexport type TargetFn<\n TContext extends object,\n TEvent extends BaseEvent,\n TState extends string,\n> = (\n event: TEvent,\n context: Readonly<TContext>\n) => TState | TargetObject<TContext, TEvent, TState> | null;\n\nexport type Effect<TContext, TEvent extends BaseEvent> = (\n context: Patchable<TContext>,\n event: TEvent\n) => void;\n\n/**\n * \"Expanded\" object form to specify a target state with.\n */\nexport type TargetObject<\n TContext extends object,\n TEvent extends BaseEvent,\n TState extends string,\n> = {\n target: TState;\n\n /**\n * Emit a side effect (other than assigning to the context) when this\n * transition is taken.\n */\n effect: Effect<TContext, TEvent> | Effect<TContext, TEvent>[];\n};\n\nexport type Target<\n TContext extends object,\n TEvent extends BaseEvent,\n TState extends string,\n> =\n | TState // Static, e.g. 'complete'\n | TargetObject<TContext, TEvent, TState>\n | TargetFn<TContext, TEvent, TState>; // Dynamic, e.g. (context) => context.x ? 'complete' : 'other'\n\ntype Groups<T extends string> = T extends `${infer G}.${infer Rest}`\n ? G | `${G}.${Groups<Rest>}`\n : never;\nexport type Wildcard<T extends string> = \"*\" | `${Groups<T>}.*`;\n\n/** State or one of its parent group patterns (e.g., \"foo.bar.baz\" | \"foo.bar.*\" | \"foo.*\") */\nexport type StateOrGroupPattern<T extends string> = T | `${Groups<T>}.*`;\n\nfunction distance(state1: string, state2: string): [number, number] {\n if (state1 === state2) {\n return [0, 0];\n }\n\n const chunks1 = state1.split(\".\");\n const chunks2 = state2.split(\".\");\n const minLen = Math.min(chunks1.length, chunks2.length);\n let shared = 0;\n for (; shared < minLen; shared++) {\n if (chunks1[shared] !== chunks2[shared]) {\n break;\n }\n }\n\n const up = chunks1.length - shared;\n const down = chunks2.length - shared;\n return [up, down];\n}\n\nfunction patterns<TState extends string>(\n targetState: TState,\n levels: number\n): (Wildcard<TState> | TState)[] {\n const parts = targetState.split(\".\");\n if (levels < 1 || levels > parts.length + 1) {\n throw new Error(\"Invalid number of levels\");\n }\n\n const result: (Wildcard<TState> | TState)[] = [];\n if (levels > parts.length) {\n result.push(\"*\");\n }\n\n for (let i = parts.length - levels + 1; i < parts.length; i++) {\n const slice = parts.slice(0, i);\n if (slice.length > 0) {\n result.push((slice.join(\".\") + \".*\") as Wildcard<TState>);\n }\n }\n\n result.push(targetState);\n\n return result;\n}\n\nclass SafeContext<TContext extends object> {\n #curr: Readonly<TContext>;\n\n constructor(initialContext: TContext) {\n this.#curr = initialContext;\n }\n\n get current(): Readonly<TContext> {\n return this.#curr;\n }\n\n /**\n * Call a callback function that allows patching of the context, by\n * calling `context.patch()`. Patching is only allowed for the duration\n * of this window.\n */\n allowPatching(callback: (context: Patchable<TContext>) => void): void {\n // eslint-disable-next-line @typescript-eslint/no-this-alias\n const self = this;\n let allowed = true;\n\n const patchableContext = {\n ...this.#curr,\n patch(patch: Partial<TContext>): void {\n if (allowed) {\n self.#curr = Object.assign({}, self.#curr, patch);\n\n // Also patch the temporary mutable context helper itself, in case\n // there are multiple calls in a succession that need\n for (const pair of Object.entries(patch)) {\n const [key, value] = pair as [\n keyof TContext,\n TContext[keyof TContext],\n ];\n if (key !== \"patch\") {\n (this as TContext)[key] = value;\n }\n }\n } else {\n throw new Error(\"Can no longer patch stale context\");\n }\n },\n };\n callback(patchableContext);\n\n // If ever the patch function is called after this temporary window,\n // disallow it\n allowed = false;\n return;\n }\n}\n\nenum RunningState {\n NOT_STARTED_YET, // Machine can be set up during this phase\n STARTED,\n STOPPED,\n}\n\nlet nextId = 1;\n\nexport class FSM<\n TContext extends object,\n TEvent extends BaseEvent,\n TState extends string,\n> {\n public id: number;\n\n // Indicates whether this state machine is still being configured, has\n // started, or has terminated\n #runningState: RunningState;\n\n readonly #currentContext: SafeContext<TContext>;\n\n #states: Set<TState>;\n #currentStateOrNull: TState | null;\n\n #allowedTransitions: Map<\n TState,\n Map<TEvent[\"type\"], TargetFn<TContext, TEvent, TState>>\n >;\n\n readonly #eventHub: {\n readonly didReceiveEvent: EventSource<TEvent | BuiltinEvent>;\n readonly willTransition: EventSource<{ from: TState; to: TState }>;\n readonly didIgnoreEvent: EventSource<TEvent | BuiltinEvent>;\n readonly willExitState: EventSource<TState>;\n readonly didEnterState: EventSource<TState>;\n readonly didExitState: EventSource<{\n state: StateOrGroupPattern<TState>;\n durationMs: number;\n }>;\n };\n\n public readonly events: {\n readonly didReceiveEvent: Observable<TEvent | BuiltinEvent>;\n readonly willTransition: Observable<{ from: TState; to: TState }>;\n readonly didIgnoreEvent: Observable<TEvent | BuiltinEvent>;\n readonly willExitState: Observable<TState>;\n readonly didEnterState: Observable<TState>;\n readonly didExitState: Observable<{\n state: StateOrGroupPattern<TState>;\n durationMs: number;\n }>;\n };\n\n //\n // The cleanup stack is a stack of (optional) callback functions that will\n // be run when exiting the current state. If a state (or state group) does\n // not have an exit handler, then the entry for that level may be\n // `undefined`, but there will be an explicit entry in the stack for it.\n //\n // This will always be true:\n //\n // cleanupStack.length == currentState.split('.').length + 1\n //\n // Each stack level represents a different state \"group\".\n //\n // For example, if you are in a state named `foo.bar.qux`, then the stack\n // will contain the exit handler for `foo.bar.qux` (at the top), then\n // `foo.bar.*`, then `foo.*`, and finally, `*`.\n //\n #cleanupStack: (CleanupFn<TContext> | null)[];\n\n //\n // The entry times stack tracks when each state level was entered, using\n // performance.now() timestamps. This parallels the cleanup stack structure.\n //\n // For example, if you are in state `foo.bar.qux`, the stack contains:\n // [timestamp for *, timestamp for foo.*, timestamp for foo.bar.*, timestamp for foo.bar.qux]\n //\n #entryTimesStack: number[];\n\n #enterFns: Map<TState | Wildcard<TState>, EnterFn<TContext>>;\n\n // Used to provide better error messages\n #knownEventTypes: Set<string>;\n\n /**\n * Returns the initial state, which is defined by the first call made to\n * .addState().\n */\n get #initialState(): TState {\n // Return the first state ever defined as the initial state\n const result = this.#states.values()[Symbol.iterator]().next();\n if (result.done) {\n throw new Error(\"No states defined yet\");\n } else {\n return result.value;\n }\n }\n\n public get currentState(): TState {\n if (this.#currentStateOrNull === null) {\n if (this.#runningState === RunningState.NOT_STARTED_YET) {\n throw new Error(\"Not started yet\");\n } else {\n throw new Error(\"Already stopped\");\n }\n }\n return this.#currentStateOrNull;\n }\n\n /**\n * Starts the machine by entering the initial state.\n */\n public start(): this {\n if (this.#runningState !== RunningState.NOT_STARTED_YET) {\n throw new Error(\"State machine has already started\");\n }\n\n this.#runningState = RunningState.STARTED;\n this.#currentStateOrNull = this.#initialState;\n this.#enter(null);\n return this;\n }\n\n /**\n * Stops the state machine. Stopping the state machine will call exit\n * handlers for the current state, but not enter a new state.\n */\n public stop(): void {\n if (this.#runningState !== RunningState.STARTED) {\n throw new Error(\"Cannot stop a state machine that hasn't started yet\");\n }\n this.#exit(null);\n this.#runningState = RunningState.STOPPED;\n this.#currentStateOrNull = null;\n }\n\n constructor(initialContext: Readonly<TContext>) {\n this.id = nextId++;\n this.#runningState = RunningState.NOT_STARTED_YET;\n this.#currentStateOrNull = null;\n this.#states = new Set();\n this.#enterFns = new Map();\n this.#cleanupStack = [];\n this.#entryTimesStack = [];\n this.#knownEventTypes = new Set();\n this.#allowedTransitions = new Map();\n this.#currentContext = new SafeContext(initialContext);\n this.#eventHub = {\n didReceiveEvent: makeEventSource(),\n willTransition: makeEventSource(),\n didIgnoreEvent: makeEventSource(),\n willExitState: makeEventSource(),\n didEnterState: makeEventSource(),\n didExitState: makeEventSource(),\n };\n this.events = {\n didReceiveEvent: this.#eventHub.didReceiveEvent.observable,\n willTransition: this.#eventHub.willTransition.observable,\n didIgnoreEvent: this.#eventHub.didIgnoreEvent.observable,\n willExitState: this.#eventHub.willExitState.observable,\n didEnterState: this.#eventHub.didEnterState.observable,\n didExitState: this.#eventHub.didExitState.observable,\n };\n }\n\n public get context(): Readonly<TContext> {\n return this.#currentContext.current;\n }\n\n /**\n * Define an explicit finite state in the state machine.\n */\n public addState(state: TState): this {\n if (this.#runningState !== RunningState.NOT_STARTED_YET) {\n throw new Error(\"Already started\");\n }\n this.#states.add(state);\n return this;\n }\n\n public onEnter(\n nameOrPattern: TState | Wildcard<TState>,\n enterFn: EnterFn<TContext>\n ): this {\n if (this.#runningState !== RunningState.NOT_STARTED_YET) {\n throw new Error(\"Already started\");\n } else if (this.#enterFns.has(nameOrPattern)) {\n throw new Error(\n // TODO We _currently_ don't support multiple .onEnters() for the same\n // state, but this is not a fundamental limitation. Just not\n // implemented yet. If we wanted to, we could make this an array.\n `enter/exit function for ${nameOrPattern} already exists`\n );\n }\n\n this.#enterFns.set(nameOrPattern, enterFn);\n return this;\n }\n\n /**\n * Defines a promise-based state. When the state is entered, the promise is\n * created. When the promise resolves, the machine will transition to the\n * provided `onOK` target state. When the promise rejects, the machine will\n * transition to the `onError` target state.\n *\n * Optionally, a `maxTimeout` can be set. If the timeout happens before the\n * promise is settled, then the machine will also transition to the `onError`\n * target state.\n *\n * @param stateOrPattern The state name, or state group pattern name.\n * @param promiseFn The callback to be invoked when the state is entered.\n * @param onOK The state to transition to when the promise resolves.\n * @param onError The state to transition to when the promise\n * rejects, or when the timeout happens before the\n * promise has been settled.\n * @param maxTimeout Optional timeout in milliseconds.\n *\n * When the promise callback function is invoked, it's provided with an\n * AbortSignal (2nd argument).\n * If a state transition happens while the promise is pending (for example,\n * an event, or a timeout happens), then an abort signal will be used to\n * indicate this. Implementers can use this abort signal to terminate the\n * in-flight promise, or ignore its results, etc.\n */\n public onEnterAsync<T>(\n nameOrPattern: TState | Wildcard<TState>,\n promiseFn: (context: Readonly<TContext>, signal: AbortSignal) => Promise<T>,\n onOK: Target<TContext, AsyncOKEvent<T>, TState>,\n onError: Target<TContext, AsyncErrorEvent, TState>,\n maxTimeout?: number\n ): this {\n return this.onEnter(nameOrPattern, () => {\n const abortController = new AbortController();\n const signal = abortController.signal;\n\n const timeoutId = maxTimeout\n ? setTimeout(() => {\n const reason = new Error(\"Timed out\");\n this.#transition({ type: \"ASYNC_ERROR\", reason }, onError);\n }, maxTimeout)\n : undefined;\n\n let done = false;\n void promiseFn(this.#currentContext.current, signal).then(\n // On OK\n (data: T) => {\n if (!signal.aborted) {\n done = true;\n this.#transition({ type: \"ASYNC_OK\", data }, onOK);\n }\n },\n\n // On Error\n (reason: unknown) => {\n if (!signal.aborted) {\n done = true;\n this.#transition({ type: \"ASYNC_ERROR\", reason }, onError);\n }\n }\n );\n\n return () => {\n clearTimeout(timeoutId);\n if (!done) {\n abortController.abort();\n }\n };\n });\n }\n\n #getStatesMatching(nameOrPattern: TState | Wildcard<TState>): TState[] {\n const matches: TState[] = [];\n\n // We're trying to match a group pattern here, i.e. `foo.*` (which might\n // match `foo.bar` and `foo.qux` states)\n if (nameOrPattern === \"*\") {\n for (const state of this.#states) {\n matches.push(state);\n }\n } else if (nameOrPattern.endsWith(\".*\")) {\n const prefix = nameOrPattern.slice(0, -1); // Strip only the \"*\", keep the \".\"\n for (const state of this.#states) {\n if (state.startsWith(prefix)) {\n matches.push(state);\n }\n }\n } else {\n // Just a single, explicit state name\n const name = nameOrPattern as TState;\n if (this.#states.has(name)) {\n matches.push(name);\n }\n }\n\n if (matches.length === 0) {\n throw new Error(`No states match ${JSON.stringify(nameOrPattern)}`);\n }\n\n return matches;\n }\n\n /**\n * Define all allowed outgoing transitions for a state.\n *\n * The targets for each event can be defined as a function which returns the\n * next state to transition to. These functions can look at the `event` or\n * `context` params to conditionally decide which next state to transition\n * to.\n *\n * If you set it to `null`, then the transition will be explicitly forbidden\n * and throw an error. If you don't define a target for a transition, then\n * such events will get ignored.\n */\n public addTransitions(\n nameOrPattern: TState | Wildcard<TState>,\n mapping: {\n [E in TEvent as E[\"type\"]]?: Target<TContext, E, TState> | null;\n }\n ): this {\n if (this.#runningState !== RunningState.NOT_STARTED_YET) {\n throw new Error(\"Already started\");\n }\n\n for (const srcState of this.#getStatesMatching(nameOrPattern)) {\n let map = this.#allowedTransitions.get(srcState);\n if (map === undefined) {\n map = new Map();\n this.#allowedTransitions.set(srcState, map);\n }\n\n for (const [type, target_] of Object.entries(mapping)) {\n if (map.has(type)) {\n throw new Error(\n `Trying to set transition \"${type}\" on \"${srcState}\" (via \"${nameOrPattern}\"), but a transition already exists there.`\n );\n }\n\n const target = target_ as\n | Target<TContext, TEvent, TState>\n | null\n | undefined;\n this.#knownEventTypes.add(type);\n\n if (target !== undefined) {\n const targetFn = typeof target === \"function\" ? target : () => target;\n map.set(type, targetFn);\n }\n }\n }\n return this;\n }\n\n /**\n * Like `.addTransition()`, but takes an (anonymous) transition whenever the\n * timer fires.\n *\n * @param stateOrPattern The state name, or state group pattern name.\n * @param after Number of milliseconds after which to take the\n * transition. If in the mean time, another transition\n * is taken, the timer will get cancelled.\n * @param target The target state to go to.\n */\n public addTimedTransition(\n stateOrPattern: TState | Wildcard<TState>,\n after: number | ((context: Readonly<TContext>) => number),\n target: Target<TContext, TimerEvent, TState>\n ): this {\n return this.onEnter(stateOrPattern, () => {\n const ms =\n typeof after === \"function\"\n ? after(this.#currentContext.current)\n : after;\n const timeoutID = setTimeout(() => {\n this.#transition({ type: \"TIMER\" }, target);\n }, ms);\n\n return () => {\n clearTimeout(timeoutID);\n };\n });\n }\n\n #getTargetFn(\n eventName: TEvent[\"type\"]\n ): TargetFn<TContext, TEvent, TState> | undefined {\n return this.#allowedTransitions.get(this.currentState)?.get(eventName);\n }\n\n /**\n * Exits the current state, and executes any necessary cleanup functions.\n * Call this before changing the current state to the next state.\n *\n * @param levels Defines how many \"levels\" of nesting will be\n * exited. For example, if you transition from `foo.bar.qux` to\n * `foo.bar.baz`, then the level is 1. But if you transition from\n * `foo.bar.qux` to `bla.bla`, then the level is 3.\n * If `null`, it will exit all levels.\n */\n #exit(levels: number | null) {\n this.#eventHub.willExitState.notify(this.currentState);\n\n const now = performance.now();\n const parts = this.currentState.split(\".\");\n\n this.#currentContext.allowPatching((patchableContext) => {\n levels = levels ?? this.#cleanupStack.length;\n for (let i = 0; i < levels; i++) {\n this.#cleanupStack.pop()?.(patchableContext);\n\n // Emit timing info for the exited state level\n const entryTime = this.#entryTimesStack.pop();\n if (\n entryTime !== undefined &&\n // ...but avoid computing state names if nobody is listening\n this.#eventHub.didExitState.count() > 0\n ) {\n // Compute the state prefix for this level\n // Stack depth corresponds to: *, foo.*, foo.bar.*, foo.bar.baz\n // So current stack length after pop tells us which prefix we exited\n const depth = this.#entryTimesStack.length;\n\n // Skip the root wildcard level (depth === 0)\n if (depth === 0) continue;\n\n const state: StateOrGroupPattern<TState> =\n depth === parts.length\n ? this.currentState // Leaf state: use exact name\n : (`${parts.slice(0, depth).join(\".\")}.*` as `${Groups<TState>}.*`);\n this.#eventHub.didExitState.notify({\n state,\n durationMs: now - entryTime,\n });\n }\n }\n });\n }\n\n /**\n * Enters the current state, and executes any necessary onEnter handlers.\n * Call this directly _after_ setting the current state to the next state.\n */\n #enter(levels: number | null) {\n const enterPatterns = patterns(\n this.currentState,\n levels ?? this.currentState.split(\".\").length + 1\n );\n\n const now = performance.now();\n\n this.#currentContext.allowPatching((patchableContext) => {\n for (const pattern of enterPatterns) {\n const enterFn = this.#enterFns.get(pattern);\n const cleanupFn = enterFn?.(patchableContext);\n if (typeof cleanupFn === \"function\") {\n this.#cleanupStack.push(cleanupFn);\n } else {\n this.#cleanupStack.push(null);\n }\n // Track entry time for this state level\n this.#entryTimesStack.push(now);\n }\n });\n\n this.#eventHub.didEnterState.notify(this.currentState);\n }\n\n /**\n * Sends an event to the machine, which may cause an internal state\n * transition to happen. When that happens, will trigger side effects.\n */\n public send(event: TEvent): void {\n // Throw if the event is unknown, which may likely be a configuration error\n if (!this.#knownEventTypes.has(event.type)) {\n throw new Error(`Invalid event ${JSON.stringify(event.type)}`);\n }\n\n if (this.#runningState === RunningState.STOPPED) {\n // Ignore all events sent to the machine after it has stopped. This is\n // similar to how we ignore events sent to the machine after it\n // transitioned to a phase in which the event won't be handled: it would\n // also get ignored.\n // However, if the machine _hasn't started yet_, we still let it throw an\n // error, because then it's most likely a usage error.\n return;\n }\n\n const targetFn = this.#getTargetFn(event.type);\n if (targetFn !== undefined) {\n return this.#transition(event, targetFn);\n } else {\n // Ignore the event otherwise\n this.#eventHub.didIgnoreEvent.notify(event);\n }\n }\n\n #transition<E extends TEvent | BuiltinEvent>(\n event: E,\n target: Target<TContext, E, TState>\n ) {\n this.#eventHub.didReceiveEvent.notify(event);\n\n const oldState = this.currentState;\n\n const targetFn = typeof target === \"function\" ? target : () => target;\n const nextTarget = targetFn(event, this.#currentContext.current);\n let nextState: TState;\n let effects: Effect<TContext, E>[] | undefined = undefined;\n if (nextTarget === null) {\n // Do not transition\n this.#eventHub.didIgnoreEvent.notify(event);\n return;\n }\n\n if (typeof nextTarget === \"string\") {\n nextState = nextTarget;\n } else {\n nextState = nextTarget.target;\n effects = Array.isArray(nextTarget.effect)\n ? nextTarget.effect\n : [nextTarget.effect];\n }\n\n if (!this.#states.has(nextState)) {\n throw new Error(`Invalid next state name: ${JSON.stringify(nextState)}`);\n }\n\n this.#eventHub.willTransition.notify({ from: oldState, to: nextState });\n\n const [up, down] = distance(this.currentState, nextState);\n if (up > 0) {\n this.#exit(up);\n }\n\n this.#currentStateOrNull = nextState; // NOTE: Could stay the same, but... there could be an action to execute here\n if (effects !== undefined) {\n const effectsToRun = effects;\n this.#currentContext.allowPatching((patchableContext) => {\n for (const effect of effectsToRun) {\n if (typeof effect === \"function\") {\n // May mutate context\n effect(patchableContext, event);\n } else {\n patchableContext.patch(effect);\n }\n }\n });\n }\n\n if (down > 0) {\n this.#enter(down);\n }\n }\n}\n\n/** @internal - For unit tests only */\nexport { distance, patterns };\n","import type { Json, JsonObject } from \"../lib/Json\";\nimport type { BaseUserMeta } from \"./BaseUserMeta\";\nimport type { ServerWireOp } from \"./Op\";\nimport type { CompactNode, StorageNode } from \"./StorageNode\";\n\nexport type ServerMsgCode = (typeof ServerMsgCode)[keyof typeof ServerMsgCode];\nexport const ServerMsgCode = Object.freeze({\n // For Presence\n UPDATE_PRESENCE: 100,\n USER_JOINED: 101,\n USER_LEFT: 102,\n BROADCASTED_EVENT: 103,\n ROOM_STATE: 104,\n\n // For Storage\n STORAGE_STATE_V7: 200, // Only sent in V7\n STORAGE_CHUNK: 210, // Used in V8+\n STORAGE_STREAM_END: 211, // Used in V8+\n UPDATE_STORAGE: 201,\n\n // For Yjs Docs\n UPDATE_YDOC: 300,\n\n // For Comments\n THREAD_CREATED: 400,\n THREAD_DELETED: 407,\n THREAD_METADATA_UPDATED: 401,\n THREAD_UPDATED: 408,\n COMMENT_CREATED: 402,\n COMMENT_EDITED: 403,\n COMMENT_DELETED: 404,\n COMMENT_REACTION_ADDED: 405,\n COMMENT_REACTION_REMOVED: 406,\n COMMENT_METADATA_UPDATED: 409,\n\n // Error codes\n REJECT_STORAGE_OP: 299, // Sent if a mutation was not allowed on the server (i.e. due to permissions, limit exceeded, etc)\n});\n\nexport namespace ServerMsgCode {\n export type UPDATE_PRESENCE = typeof ServerMsgCode.UPDATE_PRESENCE;\n export type USER_JOINED = typeof ServerMsgCode.USER_JOINED;\n export type USER_LEFT = typeof ServerMsgCode.USER_LEFT;\n export type BROADCASTED_EVENT = typeof ServerMsgCode.BROADCASTED_EVENT;\n export type ROOM_STATE = typeof ServerMsgCode.ROOM_STATE;\n export type STORAGE_STATE_V7 = typeof ServerMsgCode.STORAGE_STATE_V7;\n export type STORAGE_CHUNK = typeof ServerMsgCode.STORAGE_CHUNK;\n export type STORAGE_STREAM_END = typeof ServerMsgCode.STORAGE_STREAM_END;\n export type UPDATE_STORAGE = typeof ServerMsgCode.UPDATE_STORAGE;\n export type UPDATE_YDOC = typeof ServerMsgCode.UPDATE_YDOC;\n export type THREAD_CREATED = typeof ServerMsgCode.THREAD_CREATED;\n export type THREAD_DELETED = typeof ServerMsgCode.THREAD_DELETED;\n export type THREAD_METADATA_UPDATED =\n typeof ServerMsgCode.THREAD_METADATA_UPDATED;\n export type THREAD_UPDATED = typeof ServerMsgCode.THREAD_UPDATED;\n export type COMMENT_CREATED = typeof ServerMsgCode.COMMENT_CREATED;\n export type COMMENT_EDITED = typeof ServerMsgCode.COMMENT_EDITED;\n export type COMMENT_DELETED = typeof ServerMsgCode.COMMENT_DELETED;\n export type COMMENT_REACTION_ADDED =\n typeof ServerMsgCode.COMMENT_REACTION_ADDED;\n export type COMMENT_REACTION_REMOVED =\n typeof ServerMsgCode.COMMENT_REACTION_REMOVED;\n export type COMMENT_METADATA_UPDATED =\n typeof ServerMsgCode.COMMENT_METADATA_UPDATED;\n export type REJECT_STORAGE_OP = typeof ServerMsgCode.REJECT_STORAGE_OP;\n}\n\n/**\n * Messages that can be sent from the server to the client.\n */\nexport type ServerMsg<\n P extends JsonObject,\n U extends BaseUserMeta,\n E extends Json,\n> =\n // For Presence\n | UpdatePresenceServerMsg<P> // Broadcasted\n | UserJoinServerMsg<U> // Broadcasted\n | UserLeftServerMsg // Broadcasted\n | BroadcastedEventServerMsg<E> // Broadcasted\n | RoomStateServerMsg<U> // For a single client\n\n // For Storage\n | StorageStateServerMsg_V7 // Only used in protocol v7\n | StorageChunkServerMsg // Used in protocol v8+\n | StorageEndServerMsg // Used in protocol v8+\n | UpdateStorageServerMsg // Broadcasted\n | YDocUpdateServerMsg // For receiving doc from backend\n | RejectedStorageOpServerMsg // For a single client\n\n // Comments\n | CommentsEventServerMsg;\n\nexport type CommentsEventServerMsg =\n | ThreadCreatedEvent\n | ThreadDeletedEvent\n | ThreadMetadataUpdatedEvent\n | ThreadUpdatedEvent\n | CommentCreatedEvent\n | CommentEditedEvent\n | CommentDeletedEvent\n | CommentReactionAdded\n | CommentReactionRemoved\n | CommentMetadataUpdatedEvent;\n\ntype ThreadCreatedEvent = {\n type: ServerMsgCode.THREAD_CREATED;\n threadId: string;\n};\n\ntype ThreadDeletedEvent = {\n type: ServerMsgCode.THREAD_DELETED;\n threadId: string;\n};\n\ntype ThreadMetadataUpdatedEvent = {\n type: ServerMsgCode.THREAD_METADATA_UPDATED;\n threadId: string;\n};\n\ntype ThreadUpdatedEvent = {\n type: ServerMsgCode.THREAD_UPDATED;\n threadId: string;\n};\n\ntype CommentCreatedEvent = {\n type: ServerMsgCode.COMMENT_CREATED;\n threadId: string;\n commentId: string;\n};\n\ntype CommentEditedEvent = {\n type: ServerMsgCode.COMMENT_EDITED;\n threadId: string;\n commentId: string;\n};\n\ntype CommentDeletedEvent = {\n type: ServerMsgCode.COMMENT_DELETED;\n threadId: string;\n commentId: string;\n};\n\ntype CommentReactionAdded = {\n type: ServerMsgCode.COMMENT_REACTION_ADDED;\n threadId: string;\n commentId: string;\n emoji: string;\n};\n\ntype CommentReactionRemoved = {\n type: ServerMsgCode.COMMENT_REACTION_REMOVED;\n threadId: string;\n commentId: string;\n emoji: string;\n};\n\ntype CommentMetadataUpdatedEvent = {\n type: ServerMsgCode.COMMENT_METADATA_UPDATED;\n threadId: string;\n commentId: string;\n};\n\n/**\n * Sent by the WebSocket server and broadcasted to all clients to announce that\n * a User updated their presence. For example, when a user moves their cursor.\n *\n * In most cases, the data payload will only include the fields from the\n * Presence that have been changed since the last announcement. However, after\n * a new user joins a room, a \"full presence\" will be announced so the newly\n * connected user will get each other's user full presence at least once. In\n * those cases, the `targetActor` field indicates the newly connected client,\n * so all other existing clients can ignore this broadcasted message.\n */\nexport type UpdatePresenceServerMsg<P extends JsonObject> =\n //\n // Full Presence™ message\n //\n | {\n readonly type: ServerMsgCode.UPDATE_PRESENCE;\n /**\n * The User whose Presence has changed.\n */\n readonly actor: number;\n /**\n * When set, signifies that this is a Full Presence™ update, not a patch.\n *\n * The numeric value itself no longer has specific meaning. Historically,\n * this field was intended so that clients could ignore these broadcasted\n * full presence messages, but it turned out that getting a full presence\n * \"keyframe\" from time to time was useful.\n *\n * So nowadays, the presence (pun intended) of this `targetActor` field\n * is a backward-compatible way of expressing that the `data` contains\n * all presence fields, and isn't a partial \"patch\".\n */\n readonly targetActor: number;\n /**\n * The partial or full Presence of a User. If the `targetActor` field is set,\n * this will be the full Presence, otherwise it only contain the fields that\n * have changed since the last broadcast.\n */\n readonly data: P;\n }\n\n //\n // Partial Presence™ message\n //\n | {\n readonly type: ServerMsgCode.UPDATE_PRESENCE;\n /**\n * The User whose Presence has changed.\n */\n readonly actor: number;\n /**\n * Not set for partial presence updates.\n */\n readonly targetActor?: undefined;\n /**\n * A partial Presence patch to apply to the User. It will only contain the\n * fields that have changed since the last broadcast.\n */\n readonly data: Partial<P>;\n };\n\n/**\n * Sent by the WebSocket server and broadcasted to all clients to announce that\n * a new User has joined the Room.\n */\nexport type UserJoinServerMsg<U extends BaseUserMeta> = {\n readonly type: ServerMsgCode.USER_JOINED;\n readonly actor: number;\n /**\n * The id of the User that has been set in the authentication endpoint.\n * Useful to get additional information about the connected user.\n */\n readonly id: U[\"id\"];\n /**\n * Additional user information that has been set in the authentication\n * endpoint.\n */\n readonly info: U[\"info\"];\n /**\n * Informs the client what (public) permissions this (other) User has.\n */\n readonly scopes: string[];\n};\n\n/**\n * Sent by the WebSocket server and broadcasted to all clients to announce that\n * a new User has left the Room.\n */\nexport type UserLeftServerMsg = {\n readonly type: ServerMsgCode.USER_LEFT;\n readonly actor: number;\n};\n\n/**\n * Sent by the WebSocket server when the ydoc is updated or when requested based on stateVector passed.\n * Contains a base64 encoded update\n */\nexport type YDocUpdateServerMsg = {\n readonly type: ServerMsgCode.UPDATE_YDOC;\n readonly update: string;\n readonly isSync: boolean; // dropped after 1.2, we use presence of stateVector instead\n readonly stateVector: string | null; // server's state vector, sent in response to fetch\n readonly guid?: string; // an optional guid to identify which subdoc this update to\n readonly v2?: boolean; // whether this is a v2 update\n readonly remoteSnapshotHash: string; // The hash of snapshot of server's document. Used to detect if the client has the latest version of the document.\n};\n\n/**\n * Sent by the WebSocket server and broadcasted to all clients to announce that\n * a User broadcasted an Event to everyone in the Room.\n */\nexport type BroadcastedEventServerMsg<E extends Json> = {\n readonly type: ServerMsgCode.BROADCASTED_EVENT;\n /**\n * The User who broadcast the Event. Absent when this event is broadcast from\n * the REST API in the backend.\n */\n readonly actor: number;\n /**\n * The arbitrary payload of the Event. This can be any JSON value. Clients\n * will have to manually verify/decode this event.\n */\n readonly event: E;\n};\n\n/**\n * Sent by the WebSocket server to a single client in response to the client\n * joining the Room, to provide the initial state of the Room. The payload\n * includes a list of all other Users that already are in the Room.\n */\nexport type RoomStateServerMsg<U extends BaseUserMeta> = {\n readonly type: ServerMsgCode.ROOM_STATE;\n /** Informs the client what their actor ID is going to be. */\n readonly actor: number;\n /** Secure nonce for the current session. */\n readonly nonce: string;\n /** Informs the client what permissions the current User (self) has. */\n readonly scopes: string[];\n readonly users: {\n readonly [otherActor: number]: U & { scopes: string[] };\n };\n /** Metadata sent from the server to the client. */\n readonly meta: JsonObject;\n};\n\n/**\n * No longer used as of WS API v8.\n */\nexport type StorageStateServerMsg_V7 = {\n readonly type: ServerMsgCode.STORAGE_STATE_V7;\n readonly items: StorageNode[];\n};\n\n/**\n * Sent by the WebSocket server to a single client in response to the client\n * sending a FetchStorageClientMsg message, to provide one chunk of the initial\n * Storage state of the Room.\n *\n * The server will respond with 1+ STORAGE_CHUNK messages, followed by exactly\n * one STORAGE_STREAM_END message to mark the end of the transmission.\n *\n * If the room is using the new storage engine that supports streaming, then\n * potentially multiple chunks might get sent. If the room is using the old\n * storage engine, then all nodes will be sent in a single/large chunk\n * (non-streaming).\n */\nexport type StorageChunkServerMsg = {\n readonly type: ServerMsgCode.STORAGE_CHUNK;\n readonly nodes: CompactNode[];\n};\n\nexport type StorageEndServerMsg = {\n readonly type: ServerMsgCode.STORAGE_STREAM_END;\n};\n\n/**\n * Sent by the WebSocket server and broadcasted to all clients to announce that\n * a change occurred in the Storage document.\n *\n * The payload of this message contains a list of Ops (aka incremental\n * mutations to make to the initially loaded document).\n */\nexport type UpdateStorageServerMsg = {\n readonly type: ServerMsgCode.UPDATE_STORAGE;\n readonly ops: ServerWireOp[];\n};\n\n/**\n * Sent by the WebSocket server to the client to indicate that certain opIds\n * have been rejected, possibly due to lack of permissions or exceeding\n * a limit.\n */\nexport type RejectedStorageOpServerMsg = {\n readonly type: ServerMsgCode.REJECT_STORAGE_OP;\n readonly opIds: string[];\n readonly reason: string;\n};\n","export interface IWebSocketEvent {\n type: string;\n}\n\nexport interface IWebSocketCloseEvent extends IWebSocketEvent {\n readonly code: WebsocketCloseCodes;\n readonly wasClean: boolean;\n readonly reason: string;\n}\n\nexport interface IWebSocketMessageEvent extends IWebSocketEvent {\n readonly data: string | Buffer | ArrayBuffer | readonly Buffer[];\n}\n\nexport interface IWebSocketInstance {\n readonly CONNECTING: number; // 0\n readonly OPEN: number; // 1\n readonly CLOSING: number; // 2\n readonly CLOSED: number; // 3\n\n readonly readyState: number;\n\n addEventListener(type: \"close\", listener: (this: IWebSocketInstance, ev: IWebSocketCloseEvent) => unknown): void; // prettier-ignore\n addEventListener(type: \"message\", listener: (this: IWebSocketInstance, ev: IWebSocketMessageEvent) => unknown): void; // prettier-ignore\n addEventListener(type: \"open\" | \"error\", listener: (this: IWebSocketInstance, ev: IWebSocketEvent) => unknown): void; // prettier-ignore\n\n removeEventListener(type: \"close\", listener: (this: IWebSocketInstance, ev: IWebSocketCloseEvent) => unknown): void; // prettier-ignore\n removeEventListener(type: \"message\", listener: (this: IWebSocketInstance, ev: IWebSocketMessageEvent) => unknown): void; // prettier-ignore\n removeEventListener(type: \"open\" | \"error\", listener: (this: IWebSocketInstance, ev: IWebSocketEvent) => unknown): void; // prettier-ignore\n\n close(): void;\n send(data: string): void;\n}\n\n/**\n * Either the browser-based WebSocket API or Node.js' WebSocket API (from the\n * 'ws' package).\n *\n * This type defines the minimal WebSocket API that Liveblocks needs from\n * a WebSocket implementation, and is a minimal subset of the browser-based\n * WebSocket APIs and Node.js' WebSocket API so that both implementations are\n * assignable to this type.\n */\nexport interface IWebSocket {\n new (address: string): IWebSocketInstance;\n}\n\n/**\n * The following ranges will be respected by the client:\n *\n * 10xx: client will reauthorize (just like 41xx)\n * 40xx: client will disconnect\n * 41xx: client will reauthorize\n * 42xx: client will retry without reauthorizing (currently not used)\n *\n */\nexport enum WebsocketCloseCodes {\n /** Normal close of connection, the connection fulfilled its purpose. */\n CLOSE_NORMAL = 1000,\n /** Unexpected error happened with the network/infra level. In spirit akin to HTTP 503 */\n CLOSE_ABNORMAL = 1006,\n /** Unexpected error happened. In spirit akin to HTTP 500 */\n UNEXPECTED_CONDITION = 1011,\n /** Please back off for now, but try again in a few moments */\n TRY_AGAIN_LATER = 1013,\n /** Message wasn't understood, disconnect */\n INVALID_MESSAGE_FORMAT = 4000,\n /** Server refused to allow connection. Re-authorizing won't help. Disconnect. In spirit akin to HTTP 403 */\n NOT_ALLOWED = 4001,\n /** Unused */\n MAX_NUMBER_OF_MESSAGES_PER_SECONDS = 4002,\n /** Unused */\n MAX_NUMBER_OF_CONCURRENT_CONNECTIONS = 4003,\n /** Unused */\n MAX_NUMBER_OF_MESSAGES_PER_DAY_PER_APP = 4004,\n /** Room is full, disconnect */\n MAX_NUMBER_OF_CONCURRENT_CONNECTIONS_PER_ROOM = 4005,\n /** The room's ID was updated, disconnect */\n ROOM_ID_UPDATED = 4006,\n /** The server kicked the connection from the room. */\n KICKED = 4100,\n /** The auth token is expired, reauthorize to get a fresh one. In spirit akin to HTTP 401 */\n TOKEN_EXPIRED = 4109,\n /** Disconnect immediately */\n CLOSE_WITHOUT_RETRY = 4999,\n}\n\nexport function shouldDisconnect(code: WebsocketCloseCodes): boolean {\n return (\n code === WebsocketCloseCodes.CLOSE_WITHOUT_RETRY ||\n ((code as number) >= 4000 && (code as number) < 4100)\n );\n}\n\nexport function shouldReauth(code: WebsocketCloseCodes): boolean {\n return (code as number) >= 4100 && (code as number) < 4200;\n}\n\nexport function shouldRetryWithoutReauth(code: WebsocketCloseCodes): boolean {\n return (\n code === WebsocketCloseCodes.TRY_AGAIN_LATER ||\n ((code as number) >= 4200 && (code as number) < 4300)\n );\n}\n","import { assertNever } from \"./lib/assert\";\nimport { controlledPromise } from \"./lib/controlledPromise\";\nimport type { Observable } from \"./lib/EventSource\";\nimport { makeBufferableEventSource, makeEventSource } from \"./lib/EventSource\";\nimport * as console from \"./lib/fancy-console\";\nimport type { BuiltinEvent, Patchable, Target } from \"./lib/fsm\";\nimport { FSM } from \"./lib/fsm\";\nimport type { Json } from \"./lib/Json\";\nimport { tryParseJson, withTimeout } from \"./lib/utils\";\nimport { ServerMsgCode } from \"./protocol/ServerMsg\";\nimport type {\n IWebSocketCloseEvent,\n IWebSocketEvent,\n IWebSocketInstance,\n IWebSocketMessageEvent,\n} from \"./types/IWebSocket\";\nimport {\n shouldDisconnect,\n shouldReauth,\n shouldRetryWithoutReauth,\n WebsocketCloseCodes,\n} from \"./types/IWebSocket\";\n\n/**\n * Returns a human-readable status indicating the current connection status of\n * a Room, as returned by `room.getStatus()`. Can be used to implement\n * a connection status badge.\n */\nexport type Status =\n | \"initial\"\n | \"connecting\"\n | \"connected\"\n | \"reconnecting\"\n | \"disconnected\";\n\n/**\n * Whether or not the status is an \"idle\" state. Here, idle means that nothing\n * will happen until some action is taken. Unsurprisingly, these statuses match\n * the start and end states of the state machine.\n */\nexport function isIdle(status: Status): status is \"initial\" | \"disconnected\" {\n return status === \"initial\" || status === \"disconnected\";\n}\n\n/**\n * Used to report about app-level reconnection issues.\n *\n * Normal (quick) reconnects won't be reported as a \"lost connection\". Instead,\n * the application will only get an event if the reconnection attempts by the\n * client are taking (much) longer than usual. Definitely a situation you want\n * to inform your users about, for example, by throwing a toast message on\n * screen, or show a \"trying to reconnect\" banner.\n */\nexport type LostConnectionEvent =\n | \"lost\" // the client is trying to reconnect to Liveblocks, but it's taking (much) longer than usual\n | \"restored\" // the client did reconnect after all\n | \"failed\"; // the client was told to stop trying\n\n/**\n * Maps internal machine state to the public Status API.\n */\nfunction toNewConnectionStatus(machine: FSM<Context, Event, State>): Status {\n const state = machine.currentState;\n switch (state) {\n case \"@ok.connected\":\n case \"@ok.awaiting-pong\":\n return \"connected\";\n\n case \"@idle.initial\":\n return \"initial\";\n\n case \"@auth.busy\":\n case \"@auth.backoff\":\n case \"@connecting.busy\":\n case \"@connecting.backoff\":\n case \"@idle.zombie\":\n return machine.context.successCount > 0 ? \"reconnecting\" : \"connecting\";\n\n case \"@idle.failed\":\n return \"disconnected\";\n\n // istanbul ignore next\n default:\n return assertNever(state, \"Unknown state\");\n }\n}\n\n/**\n * Events that can be sent to the machine externally.\n */\ntype Event =\n // Public events that can be called on the connection manager\n | { type: \"CONNECT\" } // e.g. when trying to enter a room\n | { type: \"RECONNECT\" } // e.g. user asking for an explicit reconnect of the socket\n | { type: \"DISCONNECT\" } // e.g. leaving the room\n | { type: \"WINDOW_GOT_FOCUS\" } // e.g. user's browser tab is refocused\n | { type: \"NAVIGATOR_ONLINE\" } // e.g. browser gets back online\n | { type: \"NAVIGATOR_OFFLINE\" } // e.g. browser goes offline\n\n // Events that the connection manager will internally deal with\n | { type: \"PONG\" }\n | { type: \"EXPLICIT_SOCKET_ERROR\"; event: IWebSocketEvent }\n | { type: \"EXPLICIT_SOCKET_CLOSE\"; event: IWebSocketCloseEvent }\n\n // Only used by the E2E testing app, to simulate a pong timeout :(\n | { type: \"PONG_TIMEOUT\" };\n\ntype State =\n | \"@idle.initial\"\n | \"@idle.failed\"\n | \"@idle.zombie\"\n | \"@auth.busy\"\n | \"@auth.backoff\"\n | \"@connecting.busy\"\n | \"@connecting.backoff\"\n | \"@ok.connected\"\n | \"@ok.awaiting-pong\";\n\n/**\n * Arbitrary record that will be used as the authentication \"authValue\". It's the\n * value that is returned by calling the authentication delegate, and will get\n * passed to the connection factory delegate. This value will be remembered by\n * the connection manager, but its value will not be interpreted, so it can be\n * any value (except null).\n */\nexport type BaseAuthResult = NonNullable<Json>;\n\ntype Context = {\n /**\n * Count the number of times the machine reaches an \"@ok.*\" state. Once the\n * machine reaches idle state again, this count is reset to 0 again.\n *\n * This lets us distinguish:\n * - If successCount = 0, then it's an initial \"connecting\" state.\n * - If successCount > 0, then it's an \"reconnecting\" state.\n */\n successCount: number;\n\n /**\n * Will be populated with the last known auth authValue.\n */\n authValue: BaseAuthResult | null;\n\n /**\n * The current active WebSocket connection to the room. If this is not null\n * on the context, then the socket has successfully been opened.\n */\n socket: IWebSocketInstance | null;\n\n /**\n * The current retry delay when automatically retrying. Will get bumped to\n * the next \"tier\" every time a connection attempt fails. Reset every time\n * a connection succeeded.\n */\n backoffDelay: number;\n};\n\nconst BACKOFF_DELAYS = [250, 500, 1_000, 2_000, 4_000, 8_000, 10_000] as const;\n\n// Resetting the delay happens upon success. We could reset to 0, but that\n// would risk no delay, which generally isn't wise. Instead, we'll reset it to\n// the lowest safe delay minus 1 millisecond. The reason is that every time\n// a retry happens, the retry delay will first be bumped to the next \"tier\".\nconst RESET_DELAY = BACKOFF_DELAYS[0] - 1;\n\n/**\n * Used to back off from WebSocket reconnection attempts after a known\n * Liveblocks issue, like \"room full\" or a \"rate limit\" error.\n */\nconst BACKOFF_DELAYS_SLOW = [2_000, 30_000, 60_000, 300_000] as const;\n\n/**\n * The client will send a PING to the server every 30 seconds, after which it\n * must receive a PONG back within the next 2 seconds. If that doesn't happen,\n * this is interpreted as an implicit connection loss event.\n */\nconst HEARTBEAT_INTERVAL = 30_000;\nconst PONG_TIMEOUT = 2_000;\n\n/**\n * Maximum amount of time that the authentication delegate take to return an\n * auth authValue, or else we consider authentication timed out.\n */\nconst AUTH_TIMEOUT = 10_000;\n\n/**\n * Maximum amount of time that the socket connect delegate may take to return\n * an opened WebSocket connection, or else we consider the attempt timed out.\n */\nconst SOCKET_CONNECT_TIMEOUT = 20_000;\n\n/**\n * Special error class that can be thrown during authentication to stop the\n * connection manager from retrying.\n */\nexport class StopRetrying extends Error {\n constructor(reason: string) {\n super(reason);\n }\n}\n\nfunction nextBackoffDelay(\n currentDelay: number,\n delays: readonly number[]\n): number {\n return (\n delays.find((delay) => delay > currentDelay) ?? delays[delays.length - 1]\n );\n}\n\nfunction increaseBackoffDelay(context: Patchable<Context>) {\n context.patch({\n backoffDelay: nextBackoffDelay(context.backoffDelay, BACKOFF_DELAYS),\n });\n}\n\nfunction increaseBackoffDelayAggressively(context: Patchable<Context>) {\n context.patch({\n backoffDelay: nextBackoffDelay(context.backoffDelay, BACKOFF_DELAYS_SLOW),\n });\n}\n\nfunction resetSuccessCount(context: Patchable<Context>) {\n context.patch({ successCount: 0 });\n}\n\nenum LogLevel {\n INFO,\n WARN,\n ERROR,\n}\n\n/**\n * Generic \"log\" effect. Use it in `effect` handlers of state transitions.\n */\nfunction log(level: LogLevel, message: string) {\n const logger =\n level === LogLevel.ERROR\n ? console.error\n : level === LogLevel.WARN\n ? console.warn\n : /* black hole */ () => {};\n return () => {\n logger(message);\n };\n}\n\nfunction logPrematureErrorOrCloseEvent(e: IWebSocketEvent | Error) {\n // Produce a useful log message\n const conn = \"Connection to Liveblocks websocket server\";\n return (ctx: Readonly<Context>) => {\n if (isCloseEvent(e)) {\n console.warn(\n `${conn} closed prematurely (code: ${e.code}). Retrying in ${ctx.backoffDelay}ms.`\n );\n } else {\n console.warn(`${conn} could not be established.`, e);\n }\n };\n}\n\nfunction logCloseEvent(event: IWebSocketCloseEvent) {\n const details = [`code: ${event.code}`];\n if (event.reason) {\n details.push(`reason: ${event.reason}`);\n }\n return (ctx: Readonly<Context>) => {\n console.warn(\n `Connection to Liveblocks websocket server closed (${details.join(\", \")}). Retrying in ${ctx.backoffDelay}ms.`\n );\n };\n}\n\nconst logPermanentClose = log(\n LogLevel.WARN,\n \"Connection to WebSocket closed permanently. Won't retry.\"\n);\n\nfunction isCloseEvent(\n error: IWebSocketEvent | Error\n): error is IWebSocketCloseEvent {\n return !(error instanceof Error) && error.type === \"close\";\n}\n\nexport type Delegates<T extends BaseAuthResult> = {\n authenticate: () => Promise<T>;\n createSocket: (authValue: T) => IWebSocketInstance;\n canZombie: () => boolean;\n};\n\n// istanbul ignore next\nfunction enableTracing(machine: FSM<Context, Event, State>) {\n function log(...args: unknown[]) {\n console.warn(`[FSM #${machine.id}]`, ...args);\n }\n\n const unsubs = [\n machine.events.didReceiveEvent.subscribe((e) => log(`Event ${e.type}`)),\n machine.events.willTransition.subscribe(({ from, to }) =>\n log(\"Transitioning\", from, \"→\", to)\n ),\n machine.events.didExitState.subscribe(({ state, durationMs }) =>\n log(`Exited ${state} after ${durationMs.toFixed(0)}ms`)\n ),\n machine.events.didIgnoreEvent.subscribe((e) =>\n log(\"Ignored event\", e.type, e, \"(current state won't handle it)\")\n ),\n ];\n return () => {\n for (const unsub of unsubs) {\n unsub();\n }\n };\n}\n\nfunction defineConnectivityEvents(machine: FSM<Context, Event, State>) {\n // Emitted whenever a new WebSocket connection attempt succeeds\n const statusDidChange = makeEventSource<Status>();\n const didConnect = makeEventSource<void>();\n const didDisconnect = makeEventSource<void>();\n\n let lastStatus: Status | null = null;\n\n const unsubscribe = machine.events.didEnterState.subscribe(() => {\n const currStatus = toNewConnectionStatus(machine);\n if (currStatus !== lastStatus) {\n statusDidChange.notify(currStatus);\n }\n\n if (lastStatus === \"connected\" && currStatus !== \"connected\") {\n didDisconnect.notify();\n } else if (lastStatus !== \"connected\" && currStatus === \"connected\") {\n didConnect.notify();\n }\n lastStatus = currStatus;\n });\n\n return {\n statusDidChange: statusDidChange.observable,\n didConnect: didConnect.observable,\n didDisconnect: didDisconnect.observable,\n unsubscribe,\n };\n}\n\nconst assign = (patch: Partial<Context>) => (ctx: Patchable<Context>) =>\n ctx.patch(patch);\n\n/**\n * A ConnectionError is a partial data structure to help build a proper\n * LiveblocksError down the line.\n */\ntype ConnectionError = { message: string; code: number };\n\nfunction createConnectionStateMachine<T extends BaseAuthResult>(\n delegates: Delegates<T>,\n options: {\n enableDebugLogging: boolean;\n /** In protocol V7, the actor will no longer be available on the token.\n * Instead, the `actor` will be sent to the client via a ROOM_STATE message\n * over an established WebSocket connection. If this setting is set to\n * `true`, the state machine will only jump to \"connected\" state _after_\n * this message has been received. If this setting is `false`, the machine\n * won't wait for the actor to be received, and instead jump to \"connected\"\n * as soon as the WebSocket connection is established. */\n waitForActorId: boolean;\n }\n) {\n // Create observable event sources, which this machine will call into when\n // specific events happen\n const onMessage = makeBufferableEventSource<IWebSocketMessageEvent>();\n onMessage.pause(); // Pause all message delivery until status is OPEN\n\n // Emitted whenever the server deliberately closes the connection for\n // a specific Liveblocks reason\n const onConnectionError = makeEventSource<ConnectionError>();\n\n function fireErrorEvent(message: string, code: number) {\n return () => {\n onConnectionError.notify({ message, code });\n };\n }\n\n const initialContext: Context & { authValue: T | null } = {\n successCount: 0,\n authValue: null,\n socket: null,\n backoffDelay: RESET_DELAY,\n };\n\n // The `machine` is the actual finite state machine instance that will\n // maintain the WebSocket's connection\n const machine = new FSM<Context, Event, State>(initialContext)\n .addState(\"@idle.initial\")\n .addState(\"@idle.failed\")\n .addState(\"@idle.zombie\")\n .addState(\"@auth.busy\")\n .addState(\"@auth.backoff\")\n .addState(\"@connecting.busy\")\n .addState(\"@connecting.backoff\")\n .addState(\"@ok.connected\")\n .addState(\"@ok.awaiting-pong\");\n\n //\n // Configure events that can happen from anywhere\n //\n // It's always possible to explicitly get a .reconnect() or .disconnect()\n // from the user.\n //\n machine.addTransitions(\"*\", {\n RECONNECT: {\n target: \"@auth.backoff\",\n effect: [increaseBackoffDelay, resetSuccessCount],\n },\n\n DISCONNECT: \"@idle.initial\",\n });\n\n //\n // Configure the @idle.* states\n //\n machine\n .onEnter(\"@idle.*\", resetSuccessCount)\n\n .addTransitions(\"@idle.*\", {\n CONNECT: (_, ctx) =>\n // If we still have a known authValue, try to reconnect to the socket directly,\n // otherwise, try to obtain a new authValue\n ctx.authValue !== null ? \"@connecting.busy\" : \"@auth.busy\",\n });\n\n //\n // Configure the @auth.* states\n //\n machine\n .addTransitions(\"@auth.backoff\", {\n NAVIGATOR_ONLINE: {\n target: \"@auth.busy\",\n effect: assign({ backoffDelay: RESET_DELAY }),\n },\n })\n .addTimedTransition(\n \"@auth.backoff\",\n (ctx) => ctx.backoffDelay,\n \"@auth.busy\"\n )\n\n .onEnterAsync(\n \"@auth.busy\",\n\n () =>\n withTimeout(\n delegates.authenticate(),\n AUTH_TIMEOUT,\n \"Timed out during auth\"\n ),\n\n // On successful authentication\n (okEvent) => ({\n target: \"@connecting.busy\",\n effect: assign({\n authValue: okEvent.data,\n }),\n }),\n\n // Auth failed\n (failedEvent) => {\n if (failedEvent.reason instanceof StopRetrying) {\n return {\n target: \"@idle.failed\",\n effect: [\n log(LogLevel.ERROR, failedEvent.reason.message),\n fireErrorEvent(failedEvent.reason.message, -1),\n ],\n };\n }\n\n return {\n target: \"@auth.backoff\",\n effect: [\n increaseBackoffDelay,\n log(\n LogLevel.ERROR,\n `Authentication failed: ${\n failedEvent.reason instanceof Error\n ? failedEvent.reason.message\n : String(failedEvent.reason)\n }`\n ),\n ],\n };\n }\n );\n\n //\n // Configure the @connecting.* states\n //\n\n // Function references\n const onSocketError = (event: IWebSocketEvent) =>\n machine.send({ type: \"EXPLICIT_SOCKET_ERROR\", event });\n\n const onSocketClose = (event: IWebSocketCloseEvent) =>\n machine.send({ type: \"EXPLICIT_SOCKET_CLOSE\", event });\n\n const onSocketMessage = (event: IWebSocketMessageEvent) =>\n event.data === \"pong\"\n ? machine.send({ type: \"PONG\" })\n : onMessage.notify(event);\n\n function teardownSocket(socket: IWebSocketInstance | null) {\n if (socket) {\n socket.removeEventListener(\"error\", onSocketError);\n socket.removeEventListener(\"close\", onSocketClose);\n socket.removeEventListener(\"message\", onSocketMessage);\n socket.close();\n }\n }\n\n machine\n .addTransitions(\"@connecting.backoff\", {\n NAVIGATOR_ONLINE: {\n target: \"@connecting.busy\",\n effect: assign({ backoffDelay: RESET_DELAY }),\n },\n })\n .addTimedTransition(\n \"@connecting.backoff\",\n (ctx) => ctx.backoffDelay,\n \"@connecting.busy\"\n )\n\n .onEnterAsync(\n \"@connecting.busy\",\n\n //\n // Use the \"createSocket\" delegate function (provided to the\n // ManagedSocket) to create the actual WebSocket connection instance.\n // Then, set up all the necessary event listeners, and wait for the\n // \"open\" event to occur.\n //\n // When the \"open\" event happens, we're ready to transition to the\n // OK state. This is done by resolving the Promise.\n //\n async (ctx, signal) => {\n const socketEpoch = performance.now();\n let socketOpenAt: number | null = null;\n\n let capturedPrematureEvent: IWebSocketEvent | null = null;\n let unconfirmedSocket: IWebSocketInstance | null = null;\n\n const connect$ = new Promise<[IWebSocketInstance, () => void]>(\n (resolve, rej) => {\n // istanbul ignore next\n if (ctx.authValue === null) {\n throw new Error(\"No auth authValue\"); // This should never happen\n }\n\n const socket = delegates.createSocket(ctx.authValue as T);\n unconfirmedSocket = socket;\n\n function reject(event: IWebSocketEvent) {\n capturedPrematureEvent = event;\n socket.removeEventListener(\"message\", onSocketMessage);\n rej(event);\n }\n\n const [actor$, didReceiveActor] = controlledPromise<void>();\n if (!options.waitForActorId) {\n // Mark the promise as \"resolved\" immediately, so we won't wait\n // for a ROOM_STATE message to happen.\n didReceiveActor();\n }\n\n /** Waits until actor is received (from the ROOM_STATE message) */\n function waitForActorId(event: IWebSocketMessageEvent) {\n const serverMsg = tryParseJson(event.data as string) as\n | Record<string, Json>\n | undefined;\n if (serverMsg?.type === ServerMsgCode.ROOM_STATE) {\n if (options.enableDebugLogging && socketOpenAt !== null) {\n const elapsed = performance.now() - socketOpenAt;\n console.warn(\n `[FSM #${machine.id}] Socket open → ROOM_STATE: ${elapsed.toFixed(0)}ms`\n );\n }\n didReceiveActor();\n }\n }\n\n //\n // Part 1:\n // The `error` and `close` event handlers marked (*) are installed\n // here only temporarily, just to handle this promise-based state.\n // When those get triggered, we reject this promise.\n //\n socket.addEventListener(\"message\", onSocketMessage);\n if (options.waitForActorId) {\n socket.addEventListener(\"message\", waitForActorId);\n }\n socket.addEventListener(\"error\", reject); // (*)\n socket.addEventListener(\"close\", reject); // (*)\n socket.addEventListener(\"open\", () => {\n socketOpenAt = performance.now();\n if (options.enableDebugLogging) {\n const elapsed = socketOpenAt - socketEpoch;\n console.warn(\n `[FSM #${machine.id}] Socket epoch → open: ${elapsed.toFixed(0)}ms`\n );\n }\n\n //\n // Part 2:\n // The \"open\" event just fired, so the server accepted our\n // attempt to connect. We'll go on and resolve() our promise as\n // a result.\n //\n // However, we cannot safely remove our error/close rejection\n // handlers _just yet_. There is a small, unlikely-but-possible\n // edge case: if (and only if) any close/error events are\n // _already_ queued up in the event queue before this handler is\n // invoked, then those will fire before our promise will be\n // resolved.\n //\n // Scenario:\n // - Event queue is empty, listeners are installed\n // - Two events synchronously get scheduled in the event queue: [<open event>, <close event>]\n // - The open handler is invoked (= this very callback)\n // - Event queue now looks like: [<close event>]\n // - We happily continue and resolve the promise\n // - Event queue now looks like: [<close event>, <our resolved promise>]\n // - Close event handler fires, but we already resolved promise! 😣\n //\n // This is what's called a \"premature\" event here, we'll deal\n // with it in part 3.\n //\n socket.addEventListener(\"error\", onSocketError);\n socket.addEventListener(\"close\", onSocketClose);\n const unsub = () => {\n socket.removeEventListener(\"error\", reject); // Remove (*)\n socket.removeEventListener(\"close\", reject); // Remove (*)\n socket.removeEventListener(\"message\", waitForActorId);\n };\n\n // Resolve the promise only once we received the actor ID from\n // the server. This will act like a traffic light, going green\n // only once the actor is received. If the machine is configured\n // not to wait for the actor, the traffic light will already be\n // green.\n // All messages received in the mean time while waiting for the\n // green light will be played back to the client after the\n // transition to \"connected\".\n void actor$.then(() => {\n resolve([socket, unsub]);\n });\n });\n }\n );\n\n return withTimeout(\n connect$,\n SOCKET_CONNECT_TIMEOUT,\n \"Timed out during websocket connection\"\n )\n .then(\n //\n // Part 3:\n // By now, our \"open\" event has fired, and the promise has been\n // resolved. Two possible scenarios:\n //\n // 1. The happy path. Most likely.\n // 2. Uh-oh. A premature close/error event has been observed. Let's\n // reject the promise after all.\n //\n // Any close/error event that will get scheduled after this point\n // onwards, will be caught in the OK state, and dealt with\n // accordingly.\n //\n ([socket, unsub]) => {\n unsub();\n\n if (signal.aborted) {\n // Trigger cleanup logic in .catch() below. At this point, the\n // promise is already cancelled, so none of the ok/err\n // transitions will take place.\n throw new Error(\"Aborted\");\n }\n\n if (capturedPrematureEvent) {\n throw capturedPrematureEvent; // Take failure transition\n }\n\n return socket;\n }\n )\n .catch((e) => {\n teardownSocket(unconfirmedSocket);\n throw e;\n });\n },\n\n // Only transition to OK state after a successfully opened WebSocket connection\n (okEvent) => ({\n target: \"@ok.connected\",\n effect: assign({\n socket: okEvent.data,\n backoffDelay: RESET_DELAY,\n }),\n }),\n\n // If the WebSocket connection cannot be established\n (failure) => {\n const err = failure.reason as IWebSocketEvent | StopRetrying | Error;\n\n // Stop retrying if this promise explicitly tells us so. This should,\n // in the case of a WebSocket connection attempt only be the case if\n // there is a configuration error.\n if (err instanceof StopRetrying) {\n return {\n target: \"@idle.failed\",\n effect: [\n log(LogLevel.ERROR, err.message),\n fireErrorEvent(err.message, -1),\n ],\n };\n }\n\n // If the server actively refuses the connection attempt, stop trying.\n if (isCloseEvent(err)) {\n // The default fall-through behavior is going to be reauthorizing\n // with a back-off strategy. If we know the token was expired however\n // we can reauthorize immediately (without back-off).\n if (err.code === WebsocketCloseCodes.TOKEN_EXPIRED) {\n return \"@auth.busy\";\n }\n\n if (shouldRetryWithoutReauth(err.code)) {\n // Retry after backoff, but don't get a new token\n return {\n target: \"@connecting.backoff\",\n effect: [\n increaseBackoffDelayAggressively,\n logPrematureErrorOrCloseEvent(err),\n ],\n };\n }\n\n // If the token was not allowed we can stop trying because getting\n // another token for the same user won't help\n if (shouldDisconnect(err.code)) {\n return {\n target: \"@idle.failed\",\n effect: [\n log(LogLevel.ERROR, err.reason),\n fireErrorEvent(err.reason, err.code),\n ],\n };\n }\n }\n\n // In all other (unknown) cases, always re-authenticate (but after a back-off)\n return {\n target: \"@auth.backoff\",\n effect: [increaseBackoffDelay, logPrematureErrorOrCloseEvent(err)],\n };\n }\n );\n\n //\n // Configure the @ok.* states\n //\n // Keeps a heartbeat alive with the server whenever in the @ok.* state group.\n // 30 seconds after entering the \"@ok.connected\" state, it will emit\n // a heartbeat, and awaits a PONG back that should arrive within 2 seconds.\n // If this happens, then it transitions back to normal \"connected\" state, and\n // the cycle repeats. If the PONG is not received timely, then we interpret\n // it as an implicit connection loss, and transition to reconnect (throw away\n // this socket, and open a new one).\n //\n\n const sendHeartbeat: Target<Context, Event | BuiltinEvent, State> = {\n target: \"@ok.awaiting-pong\",\n effect: (ctx) => {\n ctx.socket?.send(\"ping\");\n },\n };\n\n const maybeHeartbeat: Target<Context, Event | BuiltinEvent, State> = () => {\n // If the browser tab isn't visible currently, ask the application if going\n // zombie is fine\n const doc = typeof document !== \"undefined\" ? document : undefined;\n const canZombie =\n doc?.visibilityState === \"hidden\" && delegates.canZombie();\n return canZombie ? \"@idle.zombie\" : sendHeartbeat;\n };\n\n machine\n .addTimedTransition(\"@ok.connected\", HEARTBEAT_INTERVAL, maybeHeartbeat)\n .addTransitions(\"@ok.connected\", {\n NAVIGATOR_OFFLINE: maybeHeartbeat, // Don't take the browser's word for it when it says it's offline. Do a ping/pong to make sure.\n WINDOW_GOT_FOCUS: sendHeartbeat,\n });\n\n machine.addTransitions(\"@idle.zombie\", {\n WINDOW_GOT_FOCUS: \"@connecting.backoff\", // When in zombie state, the client will try to wake up automatically when the window regains focus\n });\n\n machine\n .onEnter(\"@ok.*\", (ctx) => {\n ctx.patch({ successCount: ctx.successCount + 1 });\n\n const timerID = setTimeout(\n // On the next tick, start delivering all messages that have already\n // been received, and continue synchronous delivery of all future\n // incoming messages.\n onMessage.unpause,\n 0\n );\n\n // ...but when *leaving* OK state, always tear down the old socket. It's\n // no longer valid.\n return (ctx) => {\n teardownSocket(ctx.socket);\n ctx.patch({ socket: null });\n clearTimeout(timerID);\n onMessage.pause();\n };\n })\n\n .addTransitions(\"@ok.awaiting-pong\", { PONG: \"@ok.connected\" })\n .addTimedTransition(\"@ok.awaiting-pong\", PONG_TIMEOUT, {\n target: \"@connecting.busy\",\n // Log implicit connection loss and drop the current open socket\n effect: log(\n LogLevel.WARN,\n \"Received no pong from server, assume implicit connection loss.\"\n ),\n })\n\n .addTransitions(\"@ok.*\", {\n // When a socket receives an error, this can cause the closing of the\n // socket, or not. So always check to see if the socket is still OPEN or\n // not. When still OPEN, don't transition.\n EXPLICIT_SOCKET_ERROR: (_, context) => {\n if (context.socket?.readyState === 1 /* WebSocket.OPEN */) {\n // TODO Do we need to forward this error to the client?\n return null; /* Do not leave OK state, socket is still usable */\n }\n\n return {\n target: \"@connecting.backoff\",\n effect: increaseBackoffDelay,\n };\n },\n\n EXPLICIT_SOCKET_CLOSE: (e) => {\n // Server instructed us to stop retrying, so move to failed state\n if (shouldDisconnect(e.event.code)) {\n return {\n target: \"@idle.failed\",\n effect: [\n logPermanentClose,\n fireErrorEvent(e.event.reason, e.event.code),\n ],\n };\n }\n\n if (shouldReauth(e.event.code)) {\n if (e.event.code === WebsocketCloseCodes.TOKEN_EXPIRED) {\n // Token expiry is a special case, we can reauthorize immediately\n // (without back-off)\n return \"@auth.busy\";\n } else {\n return {\n target: \"@auth.backoff\",\n effect: [increaseBackoffDelay, logCloseEvent(e.event)],\n };\n }\n }\n\n if (shouldRetryWithoutReauth(e.event.code)) {\n // If this is a custom Liveblocks server close reason, back off more\n // aggressively, and emit a Liveblocks error event...\n return {\n target: \"@connecting.backoff\",\n effect: [increaseBackoffDelayAggressively, logCloseEvent(e.event)],\n };\n }\n\n // Consider any other close event a temporary network hiccup, and retry\n // after a normal backoff delay\n return {\n target: \"@connecting.backoff\",\n effect: [increaseBackoffDelay, logCloseEvent(e.event)],\n };\n },\n });\n\n // Lastly, register an event handler to listen for window-focus events as\n // soon as the machine starts, and use it to send itself \"WINDOW_GOT_FOCUS\"\n // events.\n if (typeof document !== \"undefined\") {\n const doc = typeof document !== \"undefined\" ? document : undefined;\n const win = typeof window !== \"undefined\" ? window : undefined;\n const root = win ?? doc;\n\n machine.onEnter(\"*\", (ctx) => {\n function onNetworkOffline() {\n machine.send({ type: \"NAVIGATOR_OFFLINE\" });\n }\n\n function onNetworkBackOnline() {\n machine.send({ type: \"NAVIGATOR_ONLINE\" });\n }\n\n function onVisibilityChange() {\n if (doc?.visibilityState === \"visible\") {\n machine.send({ type: \"WINDOW_GOT_FOCUS\" });\n }\n }\n\n win?.addEventListener(\"online\", onNetworkBackOnline);\n win?.addEventListener(\"offline\", onNetworkOffline);\n root?.addEventListener(\"visibilitychange\", onVisibilityChange);\n return () => {\n root?.removeEventListener(\"visibilitychange\", onVisibilityChange);\n win?.removeEventListener(\"online\", onNetworkBackOnline);\n win?.removeEventListener(\"offline\", onNetworkOffline);\n\n // Also tear down the old socket when stopping the machine, if there is one\n teardownSocket(ctx.socket);\n };\n });\n }\n\n const cleanups = [];\n\n const { statusDidChange, didConnect, didDisconnect, unsubscribe } =\n defineConnectivityEvents(machine);\n cleanups.push(unsubscribe);\n\n // Install debug logging\n // istanbul ignore next\n if (options.enableDebugLogging) {\n cleanups.push(enableTracing(machine));\n }\n\n // Start the machine\n machine.start();\n\n return {\n machine,\n cleanups,\n\n // Observable events that will be emitted by this machine\n events: {\n statusDidChange,\n didConnect,\n didDisconnect,\n onMessage: onMessage.observable,\n onConnectionError: onConnectionError.observable,\n },\n };\n}\n\n/**\n * The ManagedSocket will set up a WebSocket connection to a room, and maintain\n * that connection over time.\n *\n * It's a light wrapper around the actual FSM that implements the logic,\n * exposing just a few safe actions and events that can be called or observed\n * from the outside.\n */\nexport class ManagedSocket<T extends BaseAuthResult> {\n #machine: FSM<Context, Event, State>;\n #cleanups: (() => void)[];\n\n public readonly events: {\n /**\n * Emitted when the WebSocket connection goes in or out of \"connected\"\n * state.\n */\n readonly statusDidChange: Observable<Status>;\n /**\n * Emitted when the WebSocket connection is first opened.\n */\n readonly didConnect: Observable<void>;\n /**\n * Emitted when the current WebSocket connection is lost and the socket\n * becomes useless. A new WebSocket connection must be made after this to\n * restore connectivity.\n */\n readonly didDisconnect: Observable<void>; // Deliberate close, a connection loss, etc.\n\n /**\n * Emitted for every incoming message from the currently active WebSocket\n * connection.\n */\n readonly onMessage: Observable<IWebSocketMessageEvent>;\n\n /**\n * Emitted whenever a connection gets closed for a known error reason, e.g.\n * max number of connections, max number of messages, etc.\n */\n readonly onConnectionError: Observable<ConnectionError>;\n };\n\n constructor(\n delegates: Delegates<T>,\n enableDebugLogging: boolean = false,\n waitForActorId: boolean = true\n ) {\n const { machine, events, cleanups } = createConnectionStateMachine(\n delegates,\n { waitForActorId, enableDebugLogging }\n );\n this.#machine = machine;\n this.events = events;\n this.#cleanups = cleanups;\n }\n\n getStatus(): Status {\n try {\n return toNewConnectionStatus(this.#machine);\n } catch {\n return \"initial\";\n }\n }\n\n /**\n * Returns the current auth authValue.\n */\n get authValue(): T | null {\n return this.#machine.context.authValue as T | null;\n }\n\n /**\n * Call this method to try to connect to a WebSocket. This only has an effect\n * if the machine is idle at the moment, otherwise this is a no-op.\n */\n public connect(): void {\n this.#machine.send({ type: \"CONNECT\" });\n }\n\n /**\n * If idle, will try to connect. Otherwise, it will attempt to reconnect to\n * the socket, potentially obtaining a new authValue first, if needed.\n */\n public reconnect(): void {\n this.#machine.send({ type: \"RECONNECT\" });\n }\n\n /**\n * Call this method to disconnect from the current WebSocket. Is going to be\n * a no-op if there is no active connection.\n */\n public disconnect(): void {\n this.#machine.send({ type: \"DISCONNECT\" });\n }\n\n /**\n * Call this to stop the machine and run necessary cleanup functions. After\n * calling destroy(), you can no longer use this instance. Call this before\n * letting the instance get garbage collected.\n */\n public destroy(): void {\n this.#machine.stop();\n\n let cleanup: (() => void) | undefined;\n while ((cleanup = this.#cleanups.pop())) {\n cleanup();\n }\n }\n\n /**\n * Safely send a message to the current WebSocket connection. Will emit a log\n * message if this is somehow impossible.\n */\n public send(data: string): void {\n const socket = this.#machine.context?.socket;\n if (socket === null) {\n console.warn(\"Cannot send: not connected yet\", data);\n } else if (socket.readyState !== 1 /* WebSocket.OPEN */) {\n console.warn(\"Cannot send: WebSocket no longer open\", data);\n } else {\n socket.send(data);\n }\n }\n\n /**\n * NOTE: Used by the E2E app only, to simulate explicit events.\n * Not ideal to keep exposed :(\n */\n public _privateSendMachineEvent(event: Event): void {\n this.#machine.send(event);\n }\n}\n","/**\n * Use this symbol to brand an object property as internal.\n *\n * @example\n * Object.defineProperty(\n * {\n * public,\n * [kInternal]: {\n * private\n * },\n * },\n * kInternal,\n * {\n * enumerable: false,\n * }\n * );\n */\nexport const kInternal = Symbol();\n","import type { JsonObject } from \"./Json\";\nimport { tryParseJson } from \"./utils\";\n\nconst EMPTY_OBJECT = Object.freeze({}) as JsonObject;\n\n// Characters that can end partial keywords: n, u, l, t, r, e, f, a, s\nconst NULL_KEYWORD_CHARS = Array.from(new Set(\"null\"));\nconst TRUE_KEYWORD_CHARS = Array.from(new Set(\"true\"));\nconst FALSE_KEYWORD_CHARS = Array.from(new Set(\"false\"));\nconst ALL_KEYWORD_CHARS = Array.from(new Set(\"nulltruefalse\"));\n\n/**\n * Strips the last character from `str` if it is one of the chars in the given\n * `chars` string.\n */\nfunction stripChar(str: string, chars: string): string {\n const lastChar = str[str.length - 1];\n if (chars.includes(lastChar)) {\n return str.slice(0, -1);\n }\n return str;\n}\n\nexport class IncrementalJsonParser {\n // Input\n #sourceText: string = \"\";\n\n // Output\n #cachedJson?: JsonObject;\n\n /** How much we've already parsed */\n #scanIndex: number = 0;\n /** Whether the last char processed was a backslash */\n #escaped: boolean = false;\n /**\n * Start position of the last unterminated string, -1 if we're not inside\n * a string currently.\n *\n * Example: '{\"a\": \"foo'\n * ^\n */\n #lastUnterminatedString: number = -1;\n /**\n * Start position of the last fully terminated string we've seen.\n *\n * Example: '{\"a\": \"foo'\n * ^\n */\n #lastTerminatedString: number = -1;\n /** The bracket stack of expected closing chars. For input '{\"a\": [\"foo', the stack would be ['}', ']']. */\n #stack: string[] = [];\n\n constructor(text: string = \"\") {\n this.append(text);\n }\n\n get source(): string {\n return this.#sourceText;\n }\n\n get json(): JsonObject {\n if (this.#cachedJson === undefined) {\n this.#cachedJson = this.#parse();\n }\n return this.#cachedJson;\n }\n\n /** Whether we're currently inside an unterminated string, e.g. '{\"hello' */\n get #inString(): boolean {\n return this.#lastUnterminatedString >= 0;\n }\n\n append(delta: string): void {\n if (delta) {\n // Trim leading whitespace only on the first delta\n if (this.#sourceText === \"\") {\n delta = delta.trimStart();\n }\n this.#sourceText += delta;\n this.#cachedJson = undefined; // Invalidate the cache\n }\n }\n\n #autocompleteTail(output: string): string {\n // Complete unambiguous partial JSON keywords,\n // e.g. '{\"a\": -' → '{\"a\": -0'\n // '{\"a\": n' → '{\"a\": null'\n // '{\"a\": t' → '{\"a\": true'\n // '{\"a\": f' → '{\"a\": false'\n\n if (this.#inString) {\n return \"\"; // Don't complete anything if we're in an unterminated string\n }\n\n const lastChar = output.charAt(output.length - 1);\n if (lastChar === \"\") return \"\";\n\n // Handle incomplete negative numbers\n if (lastChar === \"-\") {\n return \"0\"; // Complete to -0\n }\n\n // Skip keyword completion for most characters that can't be part of keywords\n if (!ALL_KEYWORD_CHARS.includes(lastChar)) return \"\";\n\n // Check the last few characters directly\n if (NULL_KEYWORD_CHARS.includes(lastChar)) {\n if (output.endsWith(\"nul\")) return \"l\";\n if (output.endsWith(\"nu\")) return \"ll\";\n if (output.endsWith(\"n\")) return \"ull\";\n }\n\n if (TRUE_KEYWORD_CHARS.includes(lastChar)) {\n if (output.endsWith(\"tru\")) return \"e\";\n if (output.endsWith(\"tr\")) return \"ue\";\n if (output.endsWith(\"t\")) return \"rue\";\n }\n\n if (FALSE_KEYWORD_CHARS.includes(lastChar)) {\n if (output.endsWith(\"fals\")) return \"e\";\n if (output.endsWith(\"fal\")) return \"se\";\n if (output.endsWith(\"fa\")) return \"lse\";\n if (output.endsWith(\"f\")) return \"alse\";\n }\n\n return \"\";\n }\n\n /**\n * Updates the internal parsing state by processing any new content\n * that has been appended since the last parse. This updates the state with\n * facts only. Any interpretation is left to the #parse() method.\n */\n #catchup(): void {\n const newContent = this.#sourceText.slice(this.#scanIndex);\n\n // Update internal parsing state by processing only the new content character by character\n for (let i = 0; i < newContent.length; i++) {\n const ch = newContent[i];\n const absolutePos = this.#scanIndex + i;\n\n if (this.#inString) {\n if (this.#escaped) {\n this.#escaped = false;\n } else if (ch === \"\\\\\") {\n this.#escaped = true;\n } else if (ch === '\"') {\n this.#lastTerminatedString = this.#lastUnterminatedString; // Save the terminated string's start\n this.#lastUnterminatedString = -1; // Exit string\n }\n } else {\n if (ch === '\"') {\n this.#lastUnterminatedString = absolutePos; // Enter string\n } else if (ch === \"{\") {\n this.#stack.push(\"}\");\n } else if (ch === \"[\") {\n this.#stack.push(\"]\");\n } else if (\n ch === \"}\" &&\n this.#stack.length > 0 &&\n this.#stack[this.#stack.length - 1] === \"}\"\n ) {\n this.#stack.pop();\n } else if (\n ch === \"]\" &&\n this.#stack.length > 0 &&\n this.#stack[this.#stack.length - 1] === \"]\"\n ) {\n this.#stack.pop();\n }\n }\n }\n\n this.#scanIndex = this.#sourceText.length;\n }\n\n #parse(): JsonObject {\n this.#catchup();\n\n let result = this.#sourceText; // Already trimmed on first append\n\n if (result.charAt(0) !== \"{\") {\n // Not an object, don't even try to parse it\n return EMPTY_OBJECT;\n }\n\n // If it's already valid JSON, return as-is\n if (result.endsWith(\"}\")) {\n const quickCheck = tryParseJson(result);\n if (quickCheck) {\n // Due to the '{' check above, we can safely assume it's an object\n return quickCheck as JsonObject;\n }\n }\n\n // Fix unterminated strings by appending a '\"' if needed\n // Use our tracked state instead of recalculating\n if (this.#inString) {\n // If we're in an escaped state (last char was \\), remove that incomplete escape\n if (this.#escaped) {\n result = result.slice(0, -1); // Remove the trailing backslash\n }\n result += '\"';\n }\n\n // If the last char is a ',' or '.', we can strip it, because it won't\n // change the value. Trim whitespace first, then check for comma/period.\n result = result.trimEnd();\n result = stripChar(result, \",.\");\n\n // Complete partial keywords at the end (if umambiguous)\n result = result + this.#autocompleteTail(result);\n\n // Use the bracket stack to compute the suffix\n const suffix = this.#stack.reduceRight((acc, ch) => acc + ch, \"\");\n\n // Attempt to \"just\" add the missing ] and }'s.\n {\n const attempt = tryParseJson(result + suffix);\n if (attempt) {\n // If it parses, return the result\n return attempt as JsonObject;\n }\n }\n\n // If there is a parse failure above, it's likely because we're missing\n // a \"value\" for a key in an object.\n\n if (this.#inString) {\n // We're in an unterminated string, just remove it - e.g. '{\"abc'\n result = result.slice(0, this.#lastUnterminatedString);\n } else {\n // If the last char is a \":\", just remove it - e.g. '{\"abc\"' or '{\"abc\":'\n result = stripChar(result, \":\");\n\n // If the last char is a '\"', remove that last string\n if (result.endsWith('\"')) {\n result = result.slice(0, this.#lastTerminatedString);\n }\n }\n\n // If the last char now is a trailing comma, strip it\n result = stripChar(result, \",\");\n\n // Re-add the missing brackets/braces\n result += suffix;\n\n // Run JSON.parse on the result again. it should now work!\n return (tryParseJson(result) as JsonObject | undefined) ?? EMPTY_OBJECT; // Still invalid JSON\n }\n}\n","import { isPlainObject } from \"./guards\";\n\nfunction shallowArray(xs: unknown[], ys: unknown[]): boolean {\n if (xs.length !== ys.length) {\n return false;\n }\n\n for (let i = 0; i < xs.length; i++) {\n if (!Object.is(xs[i], ys[i])) {\n return false;\n }\n }\n\n return true;\n}\n\nfunction shallowObj(objA: unknown, objB: unknown): boolean {\n // Only try to compare keys/values if these objects are both \"pojos\" (plain\n // old JavaScript objects)\n if (!isPlainObject(objA) || !isPlainObject(objB)) {\n return false;\n }\n\n const keysA = Object.keys(objA);\n if (keysA.length !== Object.keys(objB).length) {\n return false;\n }\n\n return keysA.every(\n (key) =>\n Object.prototype.hasOwnProperty.call(objB, key) &&\n Object.is(objA[key], objB[key])\n );\n}\n\n/**\n * Shallowly compares two given values.\n *\n * - Two simple values are considered equal if they're strictly equal\n * - Two arrays are considered equal if their members are strictly equal\n * - Two objects are considered equal if their values are strictly equal\n *\n * Testing goes one level deep.\n */\nexport function shallow(a: unknown, b: unknown): boolean {\n if (Object.is(a, b)) {\n return true;\n }\n\n const isArrayA = Array.isArray(a);\n const isArrayB = Array.isArray(b);\n if (isArrayA || isArrayB) {\n if (!isArrayA || !isArrayB) {\n return false;\n }\n\n return shallowArray(a, b);\n }\n\n return shallowObj(a, b);\n}\n\n/**\n * Two-level deep shallow check.\n * Useful for checking equality of { isLoading: false, myData: [ ... ] } like\n * data structures, where you want to do a shallow comparison on the \"data\"\n * key.\n *\n * NOTE: Works on objects only, not on arrays!\n */\nexport function shallow2(a: unknown, b: unknown): boolean {\n if (!isPlainObject(a) || !isPlainObject(b)) {\n return shallow(a, b);\n }\n\n const keysA = Object.keys(a);\n if (keysA.length !== Object.keys(b).length) {\n return false;\n }\n\n return keysA.every(\n (key) =>\n Object.prototype.hasOwnProperty.call(b, key) && shallow(a[key], b[key])\n );\n}\n","import { DefaultMap } from \"./DefaultMap\";\nimport { SortedList } from \"./SortedList\";\nimport { raise } from \"./utils\";\n\ntype PK = string;\n\n/**\n * A \"tree pool\" is a data structure that allows for easy insertion, deletion,\n * mutation, sorting, and accessing of an object pool of objects that have\n * tree-like relationships.\n *\n * const pool = new TreePool<Simpson>(\n * x => x.id,\n * x => x.parent,\n * (a, b) => a.name < b.name,\n * );\n *\n * The first argument is a function that returns the primary key of an item.\n * The second argument is a function that returns the parent ID for an item (or null if its a root).\n * The third argument is a function that returns how to compare two items, to\n * return queries in sorted order.\n *\n * To insert elements into the pool:\n *\n * pool.upsert({ id: \"1\", name: \"Homer\" });\n * pool.upsert({ id: \"2\", name: \"Marge\" });\n * pool.upsert({ id: \"3\", name: \"Bart\", parent: \"2\" });\n * pool.upsert({ id: \"4\", name: \"Lisa\", parent: \"2\" });\n * pool.upsert({ id: \"5\", name: \"Maggie\", parent: \"2\" });\n *\n * To get all items in the pool:\n *\n * // Items are sorted by the given comparison function, in this case\n * // alphabetically, so: Bart, Homer, Lisa, Maggie, Marge\n * Array.from(pool) // [{ id: \"3\", name: \"Bart\", parent: \"2\" }, ...]\n *\n * To get all children:\n *\n * // All kids are added as children of Marge\n * pool.getChildren(\"1\") // [] (Homer has no kids)\n * pool.getChildren(\"2\") // [Bart, Lisa, Maggie] (= alphabetically)\n *\n * To get all \"roots\":\n * pool.getChildren(null) // [Homer, Marge] (= alphabetically)\n *\n * To get all siblings:\n * pool.getSiblings(\"3\") // [Lisa, Maggie]\n * pool.getSiblings(\"4\") // [Bart, Maggie]\n * pool.getSiblings(\"5\") // [Bart, Lisa]\n *\n * A bit weird maybe, but Homer and Marge are siblings in this example:\n * pool.getSiblings(\"1\") // [Marge]\n * pool.getSiblings(\"2\") // [Homer]\n *\n * Changing data is no problem, as long as the primary key and parent key don't change:\n * pool.upsert({ id: \"1\", name: \"Homer Simpson\" });\n * pool.upsert({ id: \"3\", name: \"Bart, son of Marge\", parent: \"2\" });\n *\n * But... this will throw an error:\n * pool.upsert({ id: \"3\", name: \"Bart, son of Homer\", parent: \"1\" });\n * // ^^^\n * // Cannot change parent ID. If you want to ever\n * // do this, remove the entry, and recreate it!\n *\n * XXXX Idea for the API to iterate nodes in this tree in arbitrary ways:\n * Traversal can be done in all directions:\n * pool.walk(\"4\", \"up\", { includeSelf: true }) // Iterates: Lisa, Marge\n * pool.walk(\"4\", \"up\", { includeSelf: false }) // Iterates: Marge\n * pool.walk(\"4\", \"left\") // Iterates: Bart // Prev\n * pool.walk(\"4\", \"right\") // Iterates: Maggie // Next\n * pool.walk(\"4\", \"depth-first\")\n * pool.walk(\"4\", \"breadth-first\")\n * pool.walk(\"4\", \"depth-first-reversed\")\n * pool.walk(\"4\", \"breadth-first-reversed\")\n *\n */\nexport class TreePool<T> {\n #_items: Map<PK, T>;\n #_childrenOf: DefaultMap</* parent */ PK | null, /* children */ Set<PK>>;\n #_sorted: SortedList<T>;\n\n #_primaryKey: (item: T) => PK;\n #_parentKeyFn: (item: T) => PK | null;\n #_lt: (a: T, b: T) => boolean;\n\n constructor(\n primaryKey: (item: T) => PK,\n parentKey: (item: T) => PK | null,\n lt: (a: T, b: T) => boolean\n ) {\n this.#_primaryKey = primaryKey;\n this.#_parentKeyFn = parentKey;\n this.#_lt = lt;\n\n this.#_items = new Map();\n this.#_childrenOf = new DefaultMap(() => new Set());\n this.#_sorted = SortedList.with(lt);\n }\n\n public get(id: PK): T | undefined {\n return this.#_items.get(id);\n }\n\n public getOrThrow(id: PK): T {\n return this.get(id) ?? raise(`Item with id ${id} not found`);\n }\n\n public get sorted(): SortedList<T> {\n // XXXX While it's fine to expose this SortedList for efficiency, really we\n // should be exposing it as a readonly value.\n return this.#_sorted;\n }\n\n public getParentId(id: PK): PK | null {\n const item = this.getOrThrow(id);\n return this.#_parentKeyFn(item);\n }\n\n public getParent(id: PK): T | null {\n const parentId = this.getParentId(id);\n return parentId ? this.getOrThrow(parentId) : null;\n }\n\n public getChildren(id: PK | null): T[] {\n const childIds = this.#_childrenOf.get(id);\n if (!childIds) return [];\n\n // XXXX Should we return a sorted list here? From previous note: Think about *storing* it as a sorted list here!\n return Array.from(childIds).map(\n (id) => this.#_items.get(id)! // eslint-disable-line no-restricted-syntax\n );\n }\n\n public *walkUp(\n id: PK,\n predicate?: (item: T) => boolean\n // options?: { includeSelf?: boolean },\n ): IterableIterator<T> {\n // const includeSelf = options?.includeSelf ?? true;\n const includeSelf = true; // XXXX Generalize\n let nodeId: PK | null = id;\n do {\n const item = this.getOrThrow(nodeId);\n if (includeSelf || nodeId !== id) {\n if (!predicate || predicate(item)) {\n yield item;\n }\n }\n nodeId = this.#_parentKeyFn(item);\n } while (nodeId !== null);\n }\n\n // XXXX Generalize\n public *walkLeft(\n id: PK,\n predicate?: (item: T) => boolean\n ): IterableIterator<T> {\n // XXXX Calling getSiblings is too inefficient, optimize later!\n // XXXX But first make it work\n const self = this.getOrThrow(id);\n const siblings = SortedList.from(this.getSiblings(id), this.#_lt);\n for (const sibling of siblings.iterReversed()) {\n // Skip over all the \"right\" siblings\n if (this.#_lt(self, sibling)) continue;\n\n // If we get here, it's a \"left\" sibling\n if (!predicate || predicate(sibling)) {\n yield sibling;\n }\n }\n }\n\n // XXXX Generalize\n public *walkRight(\n id: PK,\n predicate?: (item: T) => boolean\n ): IterableIterator<T> {\n // XXXX Calling getSiblings is too inefficient, optimize later!\n // XXXX But first make it work\n const self = this.getOrThrow(id);\n const siblings = SortedList.from(this.getSiblings(id), this.#_lt);\n for (const sibling of siblings) {\n // Skip over all the \"left\" siblings\n if (this.#_lt(sibling, self)) continue;\n\n // If we get here, it's a \"right\" sibling\n if (!predicate || predicate(sibling)) {\n yield sibling;\n }\n }\n }\n\n // XXXX Generalize\n public *walkDown(\n id: PK,\n predicate?: (item: T) => boolean\n // _direction?: \"depth-first\",\n // _reversed?: true\n // | \"depth-first\"\n // | \"breadth-first\"\n // | \"breadth-first-rev\"\n // options?: {\n // _direction: \"depth-first\";\n // _reversed: true;\n // // _includeSelf?: boolean;\n // }\n ): IterableIterator<T> {\n const children = SortedList.from(this.getChildren(id), this.#_lt).rawArray;\n for (let i = children.length - 1; i >= 0; i--) {\n const child = children[i];\n yield* this.walkDown(\n this.#_primaryKey(child),\n predicate\n // \"depth-first\",\n // true\n );\n if (!predicate || predicate(child)) {\n yield child;\n }\n }\n\n // if (options?.includeSelf) {\n // yield this.getOrThrow(id);\n // }\n }\n\n /** Returns all siblings, not including the item itself. */\n public getSiblings(id: PK): readonly T[] {\n const self = this.getOrThrow(id);\n const parentId = this.getParentId(id);\n return this.getChildren(parentId).filter((item) => item !== self);\n }\n\n public [Symbol.iterator](): IterableIterator<T> {\n return this.#_sorted[Symbol.iterator]();\n }\n\n public upsert(item: T): void {\n const pk = this.#_primaryKey(item);\n const existing = this.#_items.get(pk);\n if (existing) {\n // Allow upserts if the parent ID hasn't changed, otherwise, remove the\n // entry and replace it with the new item\n if (this.#_parentKeyFn(existing) !== this.#_parentKeyFn(item)) {\n throw new Error(\n \"Cannot upsert parent ID changes that change the tree structure. Remove the entry first, and recreate it\"\n );\n }\n\n this.#_sorted.remove(existing);\n }\n\n this.#_items.set(pk, item);\n this.#_sorted.add(item);\n\n const parentId = this.#_parentKeyFn(item);\n this.#_childrenOf.getOrCreate(parentId).add(pk);\n }\n\n public remove(pk: PK): boolean {\n const item = this.#_items.get(pk);\n if (!item) return false;\n\n const childIds = this.#_childrenOf.get(pk);\n if (childIds) {\n throw new Error(\n `Cannot remove item '${pk}' while it still has children. Remove children first.`\n );\n }\n\n const parentId = this.#_parentKeyFn(item);\n const siblings = this.#_childrenOf.get(parentId);\n if (siblings) {\n siblings.delete(pk);\n if (siblings.size === 0) {\n this.#_childrenOf.delete(parentId);\n }\n }\n\n this.#_sorted.remove(item);\n this.#_childrenOf.delete(pk);\n this.#_items.delete(pk);\n return true;\n }\n\n public clear(): boolean {\n if (this.#_items.size === 0) return false;\n\n this.#_childrenOf.clear();\n this.#_items.clear();\n this.#_sorted.clear();\n return true;\n }\n}\n","import { AiChatDB } from \"./AiChatDB\";\nimport { getBearerTokenFromAuthValue } from \"./api-client\";\nimport type { AuthValue } from \"./auth-manager\";\nimport type { Delegates, Status } from \"./connection\";\nimport { ManagedSocket, StopRetrying } from \"./connection\";\nimport { kInternal } from \"./internal\";\nimport { assertNever } from \"./lib/assert\";\nimport { Promise_withResolvers } from \"./lib/controlledPromise\";\nimport { DefaultMap } from \"./lib/DefaultMap\";\nimport * as console from \"./lib/fancy-console\";\nimport { isDefined } from \"./lib/guards\";\nimport { IncrementalJsonParser } from \"./lib/IncrementalJsonParser\";\nimport type { JsonObject } from \"./lib/Json\";\nimport { nanoid } from \"./lib/nanoid\";\nimport type { Resolve } from \"./lib/Resolve\";\nimport { shallow, shallow2 } from \"./lib/shallow\";\nimport { batch, DerivedSignal, MutableSignal, Signal } from \"./lib/signals\";\nimport { TreePool } from \"./lib/TreePool\";\nimport type { Brand, DistributiveOmit, ISODateString } from \"./lib/utils\";\nimport { findLastIndex, raise, tryParseJson } from \"./lib/utils\";\nimport type {\n DynamicSessionInfo,\n OptionalTupleUnless,\n Polyfills,\n StaticSessionInfo,\n TimeoutID,\n} from \"./room\";\nimport type {\n AbortAiResponse,\n AiAssistantContentPart,\n AiAssistantDeltaUpdate,\n AiAssistantMessage,\n AiChat,\n AiChatMessage,\n AiChatsQuery,\n AiFailedAssistantMessage,\n AiGeneratingAssistantMessage,\n AiGenerationOptions,\n AiKnowledgeSource,\n AiReceivingToolInvocationPart,\n AiToolDescription,\n AiToolInvocationPart,\n AiUserContentPart,\n AiUserMessage,\n AskInChatResponse,\n ClearChatResponse,\n ClientAiMsg,\n CmdId,\n CopilotId,\n CreateChatOptions,\n DeleteChatResponse,\n DeleteMessageResponse,\n GetChatsOptions,\n GetChatsResponse,\n GetMessageTreeResponse,\n GetOrCreateChatResponse,\n MessageId,\n ServerAiMsg,\n SetToolResultResponse,\n ToolResultResponse,\n} from \"./types/ai\";\nimport type { Awaitable } from \"./types/Awaitable\";\nimport type {\n InferFromSchema,\n JSONObjectSchema7,\n} from \"./types/InferFromSchema\";\nimport type {\n IWebSocket,\n IWebSocketInstance,\n IWebSocketMessageEvent,\n} from \"./types/IWebSocket\";\nimport { PKG_VERSION } from \"./version\";\n\n// Server must respond to any command within 4 seconds. Note that this timeout\n// isn't related to the timeout for long-running AI tasks. If a long-running AI\n// task is started, the initial command response from the server is \"okay, I'll\n// keep you posted about this long-running task\". That okay is the response\n// which must happen within 4 seconds. In practice it should only take a few\n// milliseconds at most.\nconst DEFAULT_REQUEST_TIMEOUT = 4_000;\n\nexport type AiToolTypePack<\n A extends JsonObject = JsonObject,\n R extends JsonObject = JsonObject,\n> = {\n A: A;\n R: R;\n};\n\nexport type AskUserMessageInChatOptions = Omit<AiGenerationOptions, \"tools\">;\n\nexport type SetToolResultOptions = Omit<\n AiGenerationOptions,\n \"tools\" | \"knowledge\"\n>;\n\nexport type AiToolInvocationProps<\n A extends JsonObject,\n R extends JsonObject,\n> = Resolve<\n DistributiveOmit<AiToolInvocationPart<A, R>, \"type\"> & {\n respond: (\n ...args: OptionalTupleUnless<R, [result: ToolResultResponse<R>]>\n ) => void;\n\n /**\n * These are the inferred types for your tool call which you can pass down\n * to UI components, like so:\n *\n * <AiTool.Confirmation\n * types={types}\n * confirm={\n * // Now fully type-safe!\n * (args) => result\n * } />\n *\n * This will make your AiTool.Confirmation component aware of the types for\n * `args` and `result`.\n */\n types: AiToolTypePack<A, R>;\n\n // Private APIs\n [kInternal]: {\n execute: AiToolExecuteCallback<A, R> | undefined;\n messageStatus: AiAssistantMessage[\"status\"];\n };\n }\n>;\n\nexport type AiOpaqueToolInvocationProps = AiToolInvocationProps<\n JsonObject,\n JsonObject\n>;\n\nexport type AiToolExecuteContext = {\n name: string;\n invocationId: string;\n};\n\nexport type AiToolExecuteCallback<\n A extends JsonObject,\n R extends JsonObject,\n> = (\n args: A,\n context: AiToolExecuteContext\n) => Record<string, never> extends R\n ? Awaitable<ToolResultResponse<R> | undefined | void>\n : Awaitable<ToolResultResponse<R>>;\n\nexport type AiToolDefinition<\n S extends JSONObjectSchema7,\n A extends JsonObject,\n R extends JsonObject,\n> = {\n description?: string;\n parameters: S;\n execute?: AiToolExecuteCallback<A, R>;\n render?: (props: AiToolInvocationProps<A, R>) => unknown;\n enabled?: boolean;\n};\n\nexport type AiOpaqueToolDefinition = AiToolDefinition<\n JSONObjectSchema7,\n JsonObject,\n JsonObject\n>;\n\n/**\n * Helper function to help infer the types of `args`, `render`, and `result`.\n * This function has no runtime implementation and is only needed to make it\n * possible for TypeScript to infer types.\n */\nexport function defineAiTool<R extends JsonObject>() {\n return <const S extends JSONObjectSchema7>(\n def: AiToolDefinition<\n S,\n InferFromSchema<S> extends JsonObject ? InferFromSchema<S> : JsonObject,\n R\n >\n ): AiOpaqueToolDefinition => {\n return def as AiOpaqueToolDefinition;\n };\n}\n\ntype NavigationInfo = {\n /**\n * The message ID of the parent message, or null if there is no parent.\n */\n parent: MessageId | null;\n /**\n * The message ID of the left sibling message, or null if there is no left sibling.\n */\n prev: MessageId | null;\n /**\n * The message ID of the right sibling message, or null if there is no right sibling.\n */\n next: MessageId | null;\n};\n\nexport type WithNavigation<T> = T & { navigation: NavigationInfo };\n\ntype UiChatMessage = WithNavigation<AiChatMessage>;\n\ntype AiContext = {\n staticSessionInfoSig: Signal<StaticSessionInfo | null>;\n dynamicSessionInfoSig: Signal<DynamicSessionInfo | null>;\n pendingCmds: Map<\n CmdId,\n {\n resolve: (value: ServerAiMsg) => void;\n reject: (reason: unknown) => void;\n }\n >;\n chatsStore: ReturnType<typeof createStore_forUserAiChats>;\n toolsStore: ReturnType<typeof createStore_forTools>;\n messagesStore: ReturnType<typeof createStore_forChatMessages>;\n knowledgeStore: ReturnType<typeof createStore_forKnowledge>;\n};\n\nexport type LayerKey = Brand<string, \"LayerKey\">;\n\nexport class KnowledgeStack {\n #_layers: Set<LayerKey>;\n\n #stack: DefaultMap<string, Map<LayerKey, AiKnowledgeSource | null>>;\n // / \\\n // knowledge key \"layer\" key\n // (random, or optionally (one entry per mounted component)\n // set by user)\n #_cache: AiKnowledgeSource[] | undefined;\n\n constructor() {\n this.#_layers = new Set<LayerKey>();\n this.#stack = new DefaultMap(\n () => new Map<LayerKey, AiKnowledgeSource | null>()\n );\n this.#_cache = undefined;\n }\n\n // Typically a useId()\n registerLayer(uniqueLayerId: string): LayerKey {\n const layerKey = uniqueLayerId as LayerKey;\n if (this.#_layers.has(layerKey))\n raise(`Layer '${layerKey}' already exists, provide a unique layer id`);\n this.#_layers.add(layerKey);\n return layerKey;\n }\n\n deregisterLayer(layerKey: LayerKey): void {\n this.#_layers.delete(layerKey);\n let deleted = false;\n for (const [key, knowledge] of this.#stack) {\n if (knowledge.delete(layerKey)) {\n deleted = true;\n }\n if (knowledge.size === 0)\n // Just memory cleanup\n this.#stack.delete(key);\n }\n if (deleted) {\n this.invalidate();\n }\n }\n\n get(): AiKnowledgeSource[] {\n return (this.#_cache ??= this.#recompute());\n }\n\n invalidate(): void {\n this.#_cache = undefined;\n }\n\n #recompute(): AiKnowledgeSource[] {\n return Array.from(this.#stack.values()).flatMap((layer) =>\n // Return only the last item (returns [] when empty)\n Array.from(layer.values()).slice(-1).filter(isDefined)\n );\n }\n\n updateKnowledge(\n layerKey: LayerKey,\n key: string,\n data: AiKnowledgeSource | null\n ): void {\n if (!this.#_layers.has(layerKey)) raise(`Unknown layer key: ${layerKey}`);\n this.#stack.getOrCreate(key).set(layerKey, data);\n this.invalidate();\n }\n}\n\nfunction createStore_forKnowledge() {\n const knowledgeByChatId = new DefaultMap(\n (_chatId: string | typeof kWILDCARD) => new KnowledgeStack()\n );\n\n function getKnowledgeStack(chatId?: string): KnowledgeStack {\n return knowledgeByChatId.getOrCreate(chatId ?? kWILDCARD);\n }\n\n function getKnowledgeForChat(chatId: string): AiKnowledgeSource[] {\n const globalKnowledge = knowledgeByChatId.getOrCreate(kWILDCARD).get();\n const scopedKnowledge = knowledgeByChatId.get(chatId)?.get() ?? [];\n return [...globalKnowledge, ...scopedKnowledge];\n }\n\n return {\n getKnowledgeStack,\n getKnowledgeForChat,\n };\n}\n\nexport type GetOrCreateChatOptions = {\n name: string;\n metadata?: AiChat[\"metadata\"];\n};\n\nfunction now(): ISODateString {\n return new Date().toISOString() as ISODateString;\n}\n\n// Symbol used to register tools globally. These tools are not scoped to\n// a particular chatId and made available to any AiChat instance.\nconst kWILDCARD = Symbol(\"*\");\n\nfunction createStore_forTools() {\n const toolsByChatIdΣ = new DefaultMap(\n (_chatId: string | typeof kWILDCARD) => {\n return new DefaultMap((_name: string) => {\n return new Signal<AiOpaqueToolDefinition | undefined>(undefined);\n });\n }\n );\n\n //\n // TODO This administration is pretty ugly at the moment.\n // Would be nice to have some kind of helper for constructing these\n // structures. Maintaining them in all these different DefaultMaps is pretty\n // getting pretty tricky. Ideas are very welcomed!\n //\n // Key here is: '[\"my-tool\",\"my-chat\"]' or just '[\"my-tool\"]' (for global tools)\n //\n const globalOrScopedToolΣ = new DefaultMap((nameAndChat: string) => {\n const [name, chatId] = tryParseJson(nameAndChat) as [\n string,\n string | undefined,\n ];\n return DerivedSignal.from(() => {\n return (\n // A tool that's registered and scoped to a specific chat ID...\n (chatId !== undefined\n ? toolsByChatIdΣ.getOrCreate(chatId).getOrCreate(name)\n : undefined\n )?.get() ??\n // ...or a globally registered tool\n toolsByChatIdΣ.getOrCreate(kWILDCARD).getOrCreate(name).get()\n );\n });\n });\n\n function getToolΣ(name: string, chatId?: string) {\n const key = JSON.stringify(chatId !== undefined ? [name, chatId] : [name]);\n return globalOrScopedToolΣ.getOrCreate(key);\n }\n\n function registerTool(\n name: string,\n tool: AiOpaqueToolDefinition,\n chatId?: string\n ) {\n if (!tool.execute && !tool.render) {\n throw new Error(\n \"A tool definition must have an execute() function, a render() function, or both.\"\n );\n }\n\n const key = chatId ?? kWILDCARD;\n toolsByChatIdΣ.getOrCreate(key).getOrCreate(name).set(tool);\n\n return () => unregisterTool(key, name);\n }\n\n function unregisterTool(chatId: string | typeof kWILDCARD, name: string) {\n const tools = toolsByChatIdΣ.get(chatId);\n if (tools === undefined) return;\n const tool = tools.get(name);\n if (tool === undefined) return;\n tool.set(undefined);\n }\n\n function getToolDescriptions(chatId: string): AiToolDescription[] {\n const globalToolsΣ = toolsByChatIdΣ.get(kWILDCARD);\n const scopedToolsΣ = toolsByChatIdΣ.get(chatId);\n return Array.from([\n ...(globalToolsΣ?.entries() ?? []),\n ...(scopedToolsΣ?.entries() ?? []),\n ]).flatMap(([name, toolΣ]) => {\n const tool = toolΣ.get();\n return tool && (tool.enabled ?? true)\n ? [{ name, description: tool.description, parameters: tool.parameters }]\n : [];\n });\n }\n\n return {\n getToolDescriptions,\n\n getToolΣ,\n registerTool,\n };\n}\n\nfunction createStore_forChatMessages(\n toolsStore: ReturnType<typeof createStore_forTools>,\n setToolResultFn: (\n chatId: string,\n messageId: MessageId,\n invocationId: string,\n result: ToolResultResponse,\n options?: SetToolResultOptions\n ) => Promise<void>\n) {\n // Keeps track of all message IDs that are originated from this client. We\n // use this concept of \"ownership\" to determine which client instance is\n // allowed to auto-execute tool invocations for this message.\n const myMessages = new Set<MessageId>();\n\n // Keeps track of any tool invocations that have been auto-executed by this\n // client. Note that this can also record invocations that don't have an\n // execute() function. In that case, we also handled it (by kicking off nothing).\n const handledInvocations = new Set<string>();\n\n // We maintain a Map with mutable signals. Each such signal contains\n // a mutable automatically-sorted list of chat messages by chat ID.\n const messagePoolByChatIdΣ = new DefaultMap(\n (_chatId: string) =>\n new MutableSignal(\n new TreePool<AiChatMessage>(\n (x) => x.id,\n (x) => x.parentId,\n (x, y) => x.createdAt < y.createdAt\n )\n )\n );\n\n // Separately from that, we track all _generating_ signals in a separate\n // administration. Because generating messages are likely to receive\n // many/frequent updates, updating them in a separate administration makes\n // rendering streaming contents much more efficient than if we had to\n // re-create and re-render the entire chat list on every such update.\n const generatingMessagesΣ = new MutableSignal(\n new Map<MessageId, AiGeneratingAssistantMessage>()\n );\n\n function createOptimistically(\n chatId: string,\n role: \"user\",\n parentId: MessageId | null,\n content: AiUserContentPart[]\n ): MessageId;\n function createOptimistically(\n chatId: string,\n role: \"assistant\",\n parentId: MessageId | null,\n copilotId?: CopilotId\n ): MessageId;\n function createOptimistically(\n chatId: string,\n role: \"user\" | \"assistant\",\n parentId: MessageId | null,\n third?: AiUserContentPart[] | CopilotId\n ) {\n const id = `ms_${nanoid()}` as MessageId;\n const createdAt = now();\n if (role === \"user\") {\n const content = third as AiUserContentPart[];\n upsert({\n id,\n chatId,\n role,\n parentId,\n createdAt,\n content,\n _optimistic: true,\n } satisfies AiUserMessage);\n } else {\n const copilotId = third as CopilotId | undefined;\n upsert({\n id,\n chatId,\n role,\n parentId,\n createdAt,\n status: \"generating\",\n contentSoFar: [],\n copilotId,\n _optimistic: true,\n } satisfies AiGeneratingAssistantMessage);\n }\n return id;\n }\n\n function upsertMany(messages: AiChatMessage[]): void {\n batch(() => {\n for (const message of messages) {\n upsert(message);\n }\n });\n }\n\n function remove(chatId: string, messageId: MessageId): void {\n const chatMsgsΣ = messagePoolByChatIdΣ.get(chatId);\n if (!chatMsgsΣ) return;\n\n const existing = chatMsgsΣ.get().get(messageId);\n if (!existing || existing.deletedAt) return;\n\n if (existing.role === \"assistant\" && existing.status !== \"completed\") {\n upsert({ ...existing, deletedAt: now(), contentSoFar: [] });\n } else {\n upsert({ ...existing, deletedAt: now(), content: [] });\n }\n }\n\n function removeByChatId(chatId: string): void {\n const chatMsgsΣ = messagePoolByChatIdΣ.get(chatId);\n if (chatMsgsΣ === undefined) return;\n chatMsgsΣ.mutate((pool) => pool.clear());\n }\n\n function upsert(message: AiChatMessage): void {\n batch(() => {\n const chatMsgsΣ = messagePoolByChatIdΣ.getOrCreate(message.chatId);\n chatMsgsΣ.mutate((pool) => pool.upsert(message));\n\n // If the message is a pending update, write it to the generating\n // messages LUT. If not, remove it from there.\n if (message.role === \"assistant\" && message.status === \"generating\") {\n generatingMessagesΣ.mutate((lut) => {\n lut.set(message.id, structuredClone(message));\n });\n } else {\n generatingMessagesΣ.mutate((lut) => {\n lut.delete(message.id);\n });\n }\n\n //\n // If this message has \"awaiting-tool\" status, it may be the client's\n // move to trigger an action / call an execute function.\n //\n // We will automatically invoke execute()...\n // - only if such function is provided by the user\n // - at most once (which is why we track it in seenToolCallIds)\n // - and only if the current client ID is the designated client ID\n //\n if (message.role === \"assistant\" && message.status === \"awaiting-tool\") {\n if (myMessages.has(message.id)) {\n for (const toolInvocation of message.contentSoFar.filter(\n (part) =>\n part.type === \"tool-invocation\" && part.stage === \"executing\"\n )) {\n if (!handledInvocations.has(toolInvocation.invocationId)) {\n handledInvocations.add(toolInvocation.invocationId);\n } else {\n // Do nothing, we already kicked this one off\n continue;\n }\n\n const executeFn = toolsStore\n .getToolΣ(toolInvocation.name, message.chatId)\n .get()?.execute;\n if (executeFn) {\n (async () => {\n const result = await executeFn(toolInvocation.args, {\n name: toolInvocation.name,\n invocationId: toolInvocation.invocationId,\n });\n return await setToolResultFn(\n message.chatId,\n message.id,\n toolInvocation.invocationId,\n result ?? { data: {} },\n { copilotId: message.copilotId } // TODO: Should we pass the other generation options (tools, knowledge) as well?\n );\n })().catch((err) => {\n console.error(\n `Error trying to respond to tool-call: ${String(err)} (in execute())`\n );\n });\n }\n }\n }\n } else {\n // Clean up the ownership administration\n if (message.role === \"assistant\" && message.status === \"generating\") {\n // ...unless it's still generating\n } else {\n myMessages.delete(message.id);\n }\n }\n });\n }\n\n function addDelta(messageId: MessageId, delta: AiAssistantDeltaUpdate): void {\n generatingMessagesΣ.mutate((lut) => {\n const message = lut.get(messageId);\n if (message === undefined) return false;\n\n patchContentWithDelta(message.contentSoFar, delta);\n lut.set(messageId, message);\n return true;\n });\n }\n\n function* iterGeneratingMessages() {\n for (const chatMsgsΣ of messagePoolByChatIdΣ.values()) {\n for (const m of chatMsgsΣ.get()) {\n if (\n m.role === \"assistant\" &&\n m.status === \"generating\" &&\n !m._optimistic\n ) {\n yield m;\n }\n }\n }\n }\n\n function failAllPending(): void {\n batch(() => {\n generatingMessagesΣ.mutate((lut) => {\n let deleted = false;\n for (const [k, v] of lut) {\n if (!v._optimistic) {\n lut.delete(k);\n deleted = true;\n }\n }\n return deleted;\n });\n\n upsertMany(\n Array.from(iterGeneratingMessages()).map(\n (message) =>\n ({\n ...message,\n status: \"failed\",\n errorReason: \"Lost connection\",\n }) as AiFailedAssistantMessage\n )\n );\n });\n }\n\n function getMessageById(messageId: MessageId): AiChatMessage | undefined {\n for (const messagesΣ of messagePoolByChatIdΣ.values()) {\n const message = messagesΣ.get().get(messageId);\n if (message) {\n return message;\n }\n }\n return undefined;\n }\n\n function first<T>(iterable: IterableIterator<T>): T | undefined {\n const result = iterable.next();\n return result.done ? undefined : result.value;\n }\n\n function selectBranch(\n pool: TreePool<AiChatMessage>,\n preferredBranch: MessageId | null\n ): UiChatMessage[] {\n function isAlive(message: AiChatMessage): boolean {\n // This could be generalized by doing a walk(\n // { direction: 'down',\n // type: 'breadth-first',\n // includeSelf: true,\n // predicate: m => !m.deletedAt,\n // })\n\n // If it's a non-deleted message, it's alive\n if (!message.deletedAt) {\n return true;\n }\n for (const _ of pool.walkDown(message.id, (m) => !m.deletedAt)) {\n return true;\n }\n return false;\n }\n\n function selectSpine(leaf: AiChatMessage): UiChatMessage[] {\n const spine = [];\n\n let lastVisitedMessage: UiChatMessage | null = null;\n for (const message of pool.walkUp(leaf.id)) {\n const prev = first(pool.walkLeft(message.id, isAlive))?.id ?? null;\n const next = first(pool.walkRight(message.id, isAlive))?.id ?? null;\n\n // Remove deleted messages only if they don't have any non-deleted\n // children, and also don't have a next/prev link, requiring the\n // deleted node to have an on-screen presence.\n if (!message.deletedAt || prev || next) {\n const node: UiChatMessage = {\n ...message,\n navigation: { parent: null, prev, next },\n };\n // Set the parent of the last visited to the id of the current node.\n if (lastVisitedMessage !== null) {\n lastVisitedMessage.navigation.parent = node.id;\n }\n lastVisitedMessage = node;\n spine.push(node);\n }\n }\n return spine.reverse();\n }\n\n function fallback(): UiChatMessage[] {\n const latest = pool.sorted.findRight((m) => !m.deletedAt);\n return latest ? selectSpine(latest) : [];\n }\n\n if (preferredBranch === null) {\n return fallback();\n }\n\n const message = pool.get(preferredBranch);\n if (!message) {\n return fallback();\n }\n\n // Find the first non-deleted grand child. If one doesn't exist, keep\n // walking up the tree and repeat, until we find one.\n for (const current of pool.walkUp(message.id)) {\n // If a non-deleted grandchild exists, select it.\n for (const desc of pool.walkDown(current.id, (m) => !m.deletedAt)) {\n return selectSpine(desc);\n }\n\n // If the current node is not deleted, select it.\n if (!current.deletedAt) {\n return selectSpine(current);\n }\n\n // Otherwise, continue looping by walking up one level and repeating.\n }\n\n return fallback();\n }\n\n const immutableMessagesByBranch = new DefaultMap((chatId: string) => {\n return new DefaultMap((branchId: MessageId | null) => {\n const messagesΣ = DerivedSignal.from(() => {\n const pool = messagePoolByChatIdΣ.getOrCreate(chatId).get();\n return selectBranch(pool, branchId);\n }, shallow2);\n\n return DerivedSignal.from((): UiChatMessage[] => {\n const generatingMessages = generatingMessagesΣ.get();\n return messagesΣ.get().map((message) => {\n if (message.role !== \"assistant\" || message.status !== \"generating\") {\n return message;\n }\n const generatingMessage = generatingMessages.get(message.id);\n if (generatingMessage === undefined) return message;\n return {\n ...message,\n contentSoFar: generatingMessage.contentSoFar,\n } satisfies AiGeneratingAssistantMessage;\n });\n }, shallow);\n });\n });\n\n function getChatMessagesForBranchΣ(chatId: string, branch?: MessageId) {\n return immutableMessagesByBranch\n .getOrCreate(chatId)\n .getOrCreate(branch || null);\n }\n\n function getLastUsedCopilotId(chatId: string): CopilotId | undefined {\n const pool = messagePoolByChatIdΣ.getOrCreate(chatId).get();\n // Find the most recent non-deleted assistant message\n const latest = pool.sorted.findRight(\n (m) => m.role === \"assistant\" && !m.deletedAt\n );\n return latest?.copilotId;\n }\n\n return {\n // Readers\n getMessageById,\n getChatMessagesForBranchΣ,\n getLastUsedCopilotId,\n\n // Mutations\n createOptimistically,\n upsert,\n upsertMany,\n remove,\n removeByChatId,\n addDelta,\n failAllPending,\n\n markMine(messageId: MessageId) {\n myMessages.add(messageId);\n },\n\n /**\n * Iterates over all my auto-executing messages.\n *\n * These are messages that match all these conditions:\n * - The message is an assistant message\n * - The message is owned by this client (\"mine\")\n * - The message is currently in \"awaiting-tool\" status\n * - The message has at least one tool invocation in \"executing\" stage\n * - The tool invocation has an execute() function defined\n */\n *getAutoExecutingMessageIds(): Iterable<MessageId> {\n for (const messageId of myMessages) {\n const message = getMessageById(messageId);\n if (\n message?.role === \"assistant\" &&\n message.status === \"awaiting-tool\"\n ) {\n const isAutoExecuting = message.contentSoFar.some((part) => {\n if (part.type === \"tool-invocation\" && part.stage === \"executing\") {\n const tool = toolsStore.getToolΣ(part.name, message.chatId).get();\n return typeof tool?.execute === \"function\";\n }\n return false;\n });\n\n if (isAutoExecuting) {\n yield message.id;\n }\n }\n }\n },\n };\n}\n\nfunction createStore_forUserAiChats() {\n const chatsDB = new AiChatDB();\n\n function upsertMany(chats: AiChat[]) {\n batch(() => {\n for (const chat of chats) {\n chatsDB.upsert(chat);\n }\n });\n }\n\n function upsert(chat: AiChat) {\n chatsDB.upsert(chat);\n }\n\n /**\n * \"Just\" deleting a chat we already know about might break assumptions in\n * clients that are currently displaying the chat on-screen. So instead,\n * we'll re-render those so they can display the chat is deleted.\n */\n function markDeleted(chatId: string) {\n chatsDB.markDeleted(chatId);\n }\n\n function getChatById(chatId: string) {\n return chatsDB.getEvenIfDeleted(chatId);\n }\n\n function findMany(query: AiChatsQuery): AiChat[] {\n return chatsDB.signal.get().findMany(query);\n }\n\n return {\n getChatById,\n\n findMany,\n\n // Mutations\n upsert,\n upsertMany,\n markDeleted,\n };\n}\n\n/** @private This API will change, and is not considered stable. DO NOT RELY on it. */\nexport type Ai = {\n [kInternal]: {\n context: AiContext;\n };\n connectInitially: () => void;\n // connect: () => void;\n // reconnect: () => void;\n disconnect: () => void;\n getStatus: () => Status;\n\n /** @private This API will change, and is not considered stable. DO NOT RELY on it. */\n getChats: (options?: GetChatsOptions) => Promise<GetChatsResponse>;\n /** @private This API will change, and is not considered stable. DO NOT RELY on it. */\n getOrCreateChat: (\n /** A unique identifier for the chat. */\n chatId: string,\n options?: CreateChatOptions\n ) => Promise<GetOrCreateChatResponse>;\n /** @private This API will change, and is not considered stable. DO NOT RELY on it. */\n deleteChat: (chatId: string) => Promise<DeleteChatResponse>;\n /** @private This API will change, and is not considered stable. DO NOT RELY on it. */\n getMessageTree: (chatId: string) => Promise<GetMessageTreeResponse>;\n /** @private This API will change, and is not considered stable. DO NOT RELY on it. */\n deleteMessage: (\n chatId: string,\n messageId: MessageId\n ) => Promise<DeleteMessageResponse>;\n /** @private This API will change, and is not considered stable. DO NOT RELY on it. */\n clearChat: (chatId: string) => Promise<ClearChatResponse>;\n /** @private This API will change, and is not considered stable. DO NOT RELY on it. */\n askUserMessageInChat: (\n chatId: string,\n userMessage:\n | MessageId\n | {\n id: MessageId;\n parentMessageId: MessageId | null;\n content: AiUserContentPart[];\n },\n targetMessageId: MessageId,\n options?: AskUserMessageInChatOptions\n ) => Promise<AskInChatResponse>;\n /** @private This API will change, and is not considered stable. DO NOT RELY on it. */\n abort: (messageId: MessageId) => Promise<AbortAiResponse>;\n /** @private This API will change, and is not considered stable. DO NOT RELY on it. */\n setToolResult: (\n chatId: string,\n messageId: MessageId,\n invocationId: string,\n result: ToolResultResponse,\n options?: SetToolResultOptions\n ) => Promise<void>;\n /** @private This API will change, and is not considered stable. DO NOT RELY on it. */\n signals: {\n getChatMessagesForBranchΣ(\n chatId: string,\n branch?: MessageId\n ): DerivedSignal<UiChatMessage[]>;\n getToolΣ(\n name: string,\n chatId?: string\n ): DerivedSignal<AiOpaqueToolDefinition | undefined>;\n statusΣ: Signal<Status>;\n };\n /** @private This API will change, and is not considered stable. DO NOT RELY on it. */\n getChatById: (chatId: string) => AiChat | undefined;\n /** @private This API will change, and is not considered stable. DO NOT RELY on it. */\n queryChats: (query: AiChatsQuery) => AiChat[];\n /** @private This API will change, and is not considered stable. DO NOT RELY on it. */\n getLastUsedCopilotId: (chatId: string) => CopilotId | undefined;\n /** @private This API will change, and is not considered stable. DO NOT RELY on it. */\n registerKnowledgeLayer: (\n uniqueLayerId: string,\n chatId?: string\n ) => {\n layerKey: LayerKey;\n deregister: () => void;\n };\n /** @private This API will change, and is not considered stable. DO NOT RELY on it. */\n updateKnowledge: (\n layerKey: LayerKey,\n data: AiKnowledgeSource,\n key?: string,\n chatId?: string\n ) => void;\n /** @private This API will change, and is not considered stable. DO NOT RELY on it. */\n registerTool: (\n name: string,\n tool: AiOpaqueToolDefinition,\n chatId?: string\n ) => () => void;\n};\n\n/** @internal */\nexport type AiConfig = {\n delegates: Delegates<AuthValue>;\n\n userId?: string;\n lostConnectionTimeout: number;\n backgroundKeepAliveTimeout?: number;\n polyfills?: Polyfills;\n\n enableDebugLogging?: boolean;\n};\n\nexport function createAi(config: AiConfig): Ai {\n const managedSocket: ManagedSocket<AuthValue> = new ManagedSocket(\n config.delegates,\n config.enableDebugLogging,\n false // AI doesn't have actors (yet, but it will)\n );\n\n const chatsStore = createStore_forUserAiChats();\n const toolsStore = createStore_forTools();\n const knowledgeStore = createStore_forKnowledge();\n const messagesStore = createStore_forChatMessages(toolsStore, setToolResult);\n const context: AiContext = {\n staticSessionInfoSig: new Signal<StaticSessionInfo | null>(null),\n dynamicSessionInfoSig: new Signal<DynamicSessionInfo | null>(null),\n pendingCmds: new Map(),\n chatsStore,\n messagesStore,\n toolsStore,\n knowledgeStore,\n };\n\n const statusΣ = new Signal<Status>(\"initial\");\n\n // Delta batch processing system to throttle incoming delta updates. Incoming\n // deltas are buffered and only let through every every 25ms. This creates\n // a ceiling of max 40 rerenders/second during streaming.\n const DELTA_THROTTLE = 25;\n let pendingDeltas: { id: MessageId; delta: AiAssistantDeltaUpdate }[] = [];\n let deltaBatchTimer: ReturnType<typeof setTimeout> | null = null;\n\n function flushPendingDeltas() {\n const currentQueue = pendingDeltas;\n\n pendingDeltas = [];\n if (deltaBatchTimer !== null) {\n clearTimeout(deltaBatchTimer);\n deltaBatchTimer = null;\n }\n\n // Process all pending deltas in a single batch\n batch(() => {\n for (const { id, delta } of currentQueue) {\n context.messagesStore.addDelta(id, delta);\n }\n });\n }\n\n function enqueueDelta(id: MessageId, delta: AiAssistantDeltaUpdate) {\n pendingDeltas.push({ id, delta });\n\n // If no timer is running, start one to process the batch\n if (deltaBatchTimer === null) {\n deltaBatchTimer = setTimeout(flushPendingDeltas, DELTA_THROTTLE);\n }\n }\n\n let lastTokenKey: string | undefined;\n function onStatusDidChange(newStatus: Status) {\n const authValue = managedSocket.authValue;\n if (authValue !== null) {\n const tokenKey = getBearerTokenFromAuthValue(authValue);\n\n if (tokenKey !== lastTokenKey) {\n lastTokenKey = tokenKey;\n\n if (authValue.type === \"secret\") {\n const token = authValue.token.parsed;\n context.staticSessionInfoSig.set({\n userId: token.uid,\n userInfo: token.ui,\n });\n } else {\n context.staticSessionInfoSig.set({\n userId: undefined,\n userInfo: undefined,\n });\n }\n }\n }\n\n // Forward to the outside world\n statusΣ.set(newStatus);\n }\n let _connectionLossTimerId: TimeoutID | undefined;\n let _hasLostConnection = false;\n\n function handleConnectionLossEvent(newStatus: Status) {\n if (newStatus === \"reconnecting\") {\n _connectionLossTimerId = setTimeout(() => {\n _hasLostConnection = true;\n }, config.lostConnectionTimeout);\n } else {\n clearTimeout(_connectionLossTimerId);\n\n if (_hasLostConnection) {\n _hasLostConnection = false;\n }\n }\n }\n\n function onDidConnect() {\n // NoOp for now, but we should maybe fetch messages or something?\n }\n\n function onDidDisconnect() {\n // Flush any pending deltas before disconnect to prevent data loss\n flushPendingDeltas();\n }\n\n function handleServerMessage(event: IWebSocketMessageEvent) {\n if (typeof event.data !== \"string\")\n // Ignore binary (non-string) WebSocket messages\n return;\n\n const msg = tryParseJson(event.data) as ServerAiMsg | undefined;\n if (!msg)\n // Ignore non-JSON messages\n return;\n\n // If the current msg carries a cmdId, check to see if it's a known one,\n // and if it's still exists in our pendingRequest administration. If not,\n // it may have timed out already, or it wasn't intended for us.\n const cmdId =\n \"cmdId\" in msg\n ? msg.cmdId\n : msg.event === \"cmd-failed\"\n ? msg.failedCmdId\n : undefined;\n const pendingCmd = context.pendingCmds.get(cmdId!); // eslint-disable-line no-restricted-syntax\n\n if (cmdId && !pendingCmd) {\n console.warn(\"Ignoring unexpected command response. Already timed out, or not for us?\", msg); // prettier-ignore\n return;\n }\n\n if (\"event\" in msg) {\n // Delta's are handled separately\n if (msg.event === \"delta\") {\n const { id, delta } = msg;\n enqueueDelta(id, delta);\n } else {\n batch(() => {\n flushPendingDeltas();\n\n switch (msg.event) {\n case \"cmd-failed\":\n pendingCmd?.reject(new Error(msg.error));\n break;\n\n case \"settle\": {\n context.messagesStore.upsert(msg.message);\n break;\n }\n\n case \"warning\":\n console.warn(msg.message);\n break;\n\n case \"error\":\n console.error(msg.error);\n break;\n\n case \"rebooted\":\n context.messagesStore.failAllPending();\n break;\n\n case \"sync\":\n // Delete any resources?\n for (const m of msg[\"-messages\"] ?? []) {\n context.messagesStore.remove(m.chatId, m.id);\n }\n for (const chatId of msg[\"-chats\"] ?? []) {\n context.chatsStore.markDeleted(chatId);\n context.messagesStore.removeByChatId(chatId);\n }\n for (const chatId of msg.clear ?? []) {\n context.messagesStore.removeByChatId(chatId);\n }\n\n // Add any new resources?\n if (msg.chats) {\n context.chatsStore.upsertMany(msg.chats);\n }\n if (msg.messages) {\n context.messagesStore.upsertMany(msg.messages);\n }\n break;\n\n default:\n return assertNever(msg, \"Unhandled case\");\n }\n });\n }\n } else {\n switch (msg.cmd) {\n case \"get-chats\":\n context.chatsStore.upsertMany(msg.chats);\n break;\n\n case \"get-or-create-chat\":\n context.chatsStore.upsert(msg.chat);\n break;\n\n case \"delete-chat\":\n context.chatsStore.markDeleted(msg.chatId);\n context.messagesStore.removeByChatId(msg.chatId);\n break;\n\n case \"get-message-tree\":\n context.chatsStore.upsert(msg.chat);\n context.messagesStore.upsertMany(msg.messages);\n break;\n\n case \"delete-message\":\n context.messagesStore.remove(msg.chatId, msg.messageId);\n break;\n\n case \"clear-chat\":\n context.messagesStore.removeByChatId(msg.chatId);\n break;\n\n case \"ask-in-chat\":\n if (msg.sourceMessage) {\n // This field will only be returned if the ask-in-chat command\n // created a new source message\n context.messagesStore.upsert(msg.sourceMessage);\n }\n context.messagesStore.upsert(msg.targetMessage);\n break;\n\n case \"abort-ai\":\n // TODO Not handled yet\n break;\n\n case \"set-tool-result\":\n if (msg.ok) {\n context.messagesStore.upsert(msg.message);\n }\n break;\n\n default:\n return assertNever(msg, \"Unhandled case\");\n }\n }\n\n // After handling the side-effects above, we can resolve the promise\n pendingCmd?.resolve(msg);\n }\n\n managedSocket.events.onMessage.subscribe(handleServerMessage);\n managedSocket.events.statusDidChange.subscribe(onStatusDidChange);\n managedSocket.events.statusDidChange.subscribe(handleConnectionLossEvent);\n managedSocket.events.didConnect.subscribe(onDidConnect);\n managedSocket.events.didDisconnect.subscribe(onDidDisconnect);\n managedSocket.events.onConnectionError.subscribe(({ message, code }) => {\n //const type = \"AI_CONNECTION_ERROR\";\n // const err = new LiveblocksError(message, { type, code });\n if (process.env.NODE_ENV !== \"production\") {\n console.error(\n `Connection to websocket server closed. Reason: ${message} (code: ${code}).`\n );\n }\n });\n\n function connectInitially() {\n if (managedSocket.getStatus() === \"initial\") {\n managedSocket.connect();\n }\n }\n\n async function sendClientMsgWithResponse<T extends ServerAiMsg>(\n msg: DistributiveOmit<ClientAiMsg, \"cmdId\">\n ): Promise<T> {\n connectInitially();\n if (managedSocket.getStatus() !== \"connected\") {\n await managedSocket.events.didConnect.waitUntil();\n }\n\n const { promise, resolve, reject } = Promise_withResolvers<ServerAiMsg>();\n\n // Automatically calls reject() when signal is aborted\n const abortSignal = AbortSignal.timeout(DEFAULT_REQUEST_TIMEOUT);\n abortSignal.addEventListener(\"abort\", () => reject(abortSignal.reason), {\n once: true,\n });\n\n const cmdId = nanoid(7) as CmdId;\n context.pendingCmds.set(cmdId, { resolve, reject });\n\n sendClientMsg({ ...msg, cmdId });\n return (\n (promise as Promise<T>)\n .finally(() => {\n // Always cleanup\n context.pendingCmds.delete(cmdId);\n })\n // Make sure these promises don't go uncaught (in contrast to the\n // promise instance we return to the caller)\n .catch((err: Error) => {\n console.error(err.message);\n throw err;\n })\n );\n }\n\n function sendClientMsg(msg: ClientAiMsg) {\n managedSocket.send(\n JSON.stringify({\n ...msg,\n })\n );\n }\n\n function getChats(options: GetChatsOptions = {}) {\n return sendClientMsgWithResponse<GetChatsResponse>({\n cmd: \"get-chats\",\n cursor: options.cursor,\n query: options.query,\n });\n }\n\n function getOrCreateChat(id: string, options?: CreateChatOptions) {\n return sendClientMsgWithResponse<GetOrCreateChatResponse>({\n cmd: \"get-or-create-chat\",\n id,\n options,\n });\n }\n\n function getMessageTree(chatId: string) {\n return sendClientMsgWithResponse<GetMessageTreeResponse>({\n cmd: \"get-message-tree\",\n chatId,\n });\n }\n\n async function setToolResult(\n chatId: string,\n messageId: MessageId,\n invocationId: string,\n result: ToolResultResponse,\n options?: SetToolResultOptions\n ): Promise<void> {\n const knowledge = context.knowledgeStore.getKnowledgeForChat(chatId);\n const tools = context.toolsStore.getToolDescriptions(chatId);\n\n const resp: SetToolResultResponse = await sendClientMsgWithResponse({\n cmd: \"set-tool-result\",\n chatId,\n messageId,\n invocationId,\n result,\n generationOptions: {\n copilotId: options?.copilotId,\n stream: options?.stream,\n timeout: options?.timeout,\n\n // Knowledge and tools aren't coming from the options, but retrieved\n // from the global context\n knowledge: knowledge.length > 0 ? knowledge : undefined,\n tools: tools.length > 0 ? tools : undefined,\n },\n });\n if (resp.ok) {\n messagesStore.markMine(resp.message.id);\n }\n }\n\n // Abort all my auto-executing messages when the page is unloaded\n function handleBeforeUnload() {\n for (const messageId of context.messagesStore.getAutoExecutingMessageIds()) {\n sendClientMsgWithResponse({ cmd: \"abort-ai\", messageId }).catch(() => {\n // Ignore errors during page unload\n });\n }\n }\n\n const win = typeof window !== \"undefined\" ? window : undefined;\n win?.addEventListener(\"beforeunload\", handleBeforeUnload, { once: true });\n\n return Object.defineProperty(\n {\n [kInternal]: {\n context,\n },\n\n connectInitially,\n // reconnect: () => managedSocket.reconnect(),\n disconnect: () => managedSocket.disconnect(),\n\n getChats,\n getOrCreateChat,\n\n deleteChat: (chatId: string) => {\n return sendClientMsgWithResponse({ cmd: \"delete-chat\", chatId });\n },\n\n getMessageTree,\n\n deleteMessage: (chatId: string, messageId: MessageId) =>\n sendClientMsgWithResponse({ cmd: \"delete-message\", chatId, messageId }),\n clearChat: (chatId: string) =>\n sendClientMsgWithResponse({ cmd: \"clear-chat\", chatId }),\n\n askUserMessageInChat: async (\n chatId: string,\n userMessage:\n | MessageId\n | {\n id: MessageId;\n parentMessageId: MessageId | null;\n content: AiUserContentPart[];\n },\n targetMessageId: MessageId,\n options?: AskUserMessageInChatOptions\n ): Promise<AskInChatResponse> => {\n const knowledge = context.knowledgeStore.getKnowledgeForChat(chatId);\n const requestKnowledge = options?.knowledge || [];\n const combinedKnowledge = [...knowledge, ...requestKnowledge];\n const tools = context.toolsStore.getToolDescriptions(chatId);\n\n messagesStore.markMine(targetMessageId);\n const resp: AskInChatResponse = await sendClientMsgWithResponse({\n cmd: \"ask-in-chat\",\n chatId,\n sourceMessage: userMessage,\n targetMessageId,\n generationOptions: {\n copilotId: options?.copilotId,\n stream: options?.stream,\n timeout: options?.timeout,\n\n // Combine global knowledge with request-specific knowledge\n knowledge:\n combinedKnowledge.length > 0 ? combinedKnowledge : undefined,\n tools: tools.length > 0 ? tools : undefined,\n },\n });\n return resp;\n },\n\n abort: (messageId: MessageId) =>\n sendClientMsgWithResponse({ cmd: \"abort-ai\", messageId }),\n\n setToolResult,\n\n getStatus: () => managedSocket.getStatus(),\n\n signals: {\n getChatMessagesForBranchΣ:\n context.messagesStore.getChatMessagesForBranchΣ,\n getToolΣ: context.toolsStore.getToolΣ,\n statusΣ,\n },\n\n getChatById: context.chatsStore.getChatById,\n queryChats: context.chatsStore.findMany,\n getLastUsedCopilotId: context.messagesStore.getLastUsedCopilotId,\n registerKnowledgeLayer: (uniqueLayerId: string, chatId?: string) => {\n const stack = context.knowledgeStore.getKnowledgeStack(chatId);\n const layerKey = stack.registerLayer(uniqueLayerId);\n const deregister = () => stack.deregisterLayer(layerKey);\n return {\n layerKey,\n deregister,\n };\n },\n updateKnowledge: (\n layerKey: LayerKey,\n data: AiKnowledgeSource,\n key?: string,\n chatId?: string\n ) => {\n context.knowledgeStore\n .getKnowledgeStack(chatId)\n .updateKnowledge(layerKey, key ?? nanoid(), data);\n },\n\n registerTool: context.toolsStore.registerTool,\n } satisfies Ai,\n kInternal,\n { enumerable: false }\n );\n}\n\nexport function makeCreateSocketDelegateForAi(\n baseUrl: string,\n WebSocketPolyfill?: IWebSocket\n) {\n return (authValue: AuthValue): IWebSocketInstance => {\n const ws: IWebSocket | undefined =\n WebSocketPolyfill ??\n (typeof WebSocket === \"undefined\" ? undefined : WebSocket);\n\n if (ws === undefined) {\n throw new StopRetrying(\n \"To use Liveblocks client in a non-DOM environment, you need to provide a WebSocket polyfill.\"\n );\n }\n\n const url = new URL(baseUrl);\n url.protocol = url.protocol === \"http:\" ? \"ws\" : \"wss\";\n url.pathname = \"/ai/v7\";\n // TODO: don't allow public key to do this\n if (authValue.type === \"secret\") {\n url.searchParams.set(\"tok\", authValue.token.raw);\n } else if (authValue.type === \"public\") {\n throw new Error(\"Public key not supported with AI Copilots\");\n } else {\n return assertNever(authValue, \"Unhandled case\");\n }\n url.searchParams.set(\"version\", PKG_VERSION || \"dev\");\n return new ws(url.toString());\n };\n}\n\n/**\n * Finds the last item in the content array that matches the type and the given\n * keyFn. If found, replaces that item with newItem in the content array. If\n * not found, appends newItem to the content array.\n * Mutates the content array in-place.\n */\nfunction replaceOrAppend<const T extends AiAssistantContentPart>(\n content: AiAssistantContentPart[],\n newItem: T,\n keyFn: (item: T) => string,\n now: ISODateString\n): void {\n const existingIndex = findLastIndex(\n content,\n (item) => item.type === newItem.type && keyFn(item as T) === keyFn(newItem)\n );\n\n if (existingIndex > -1) {\n // Replace the existing one\n content[existingIndex] = newItem;\n } else {\n // No existing one found, just append\n closePart(content[content.length - 1], now);\n content.push(newItem);\n }\n}\n\n/**\n * Given a part, mutates it in-place by setting its endedAt timestamp.\n */\nfunction closePart(\n prevPart: AiAssistantContentPart | undefined,\n endedAt: ISODateString\n) {\n // Currently, only reasoning parts have an endedAt timestamp\n if (prevPart?.type === \"reasoning\") {\n prevPart.endedAt ??= endedAt;\n }\n}\n\nexport function patchContentWithDelta(\n content: AiAssistantContentPart[],\n delta: AiAssistantDeltaUpdate | null\n): void {\n if (delta === null)\n // Nothing to do\n return;\n\n // Filter out sources parts from the content array to ensure we only process the other parts and handle sources separately\n const parts: AiAssistantContentPart[] = content.filter(\n (part) => part.type !== \"sources\"\n );\n\n // Collect all sources from the content array and flatten them so that we can add them to the content array at the end\n const sources = content\n .filter((part) => part.type === \"sources\")\n .flatMap((part) => part.sources);\n\n const now = new Date().toISOString() as ISODateString;\n const lastPart = parts[parts.length - 1];\n\n // Otherwise, append a new part type to the array, which we can start\n // writing into\n switch (delta.type) {\n case \"text-delta\":\n if (lastPart?.type === \"text\") {\n lastPart.text += delta.textDelta;\n } else {\n closePart(lastPart, now);\n parts.push({ type: \"text\", text: delta.textDelta });\n }\n break;\n\n case \"reasoning-delta\":\n if (lastPart?.type === \"reasoning\") {\n lastPart.text += delta.textDelta;\n } else {\n closePart(lastPart, now);\n parts.push({\n type: \"reasoning\",\n text: delta.textDelta,\n startedAt: now,\n });\n }\n break;\n\n case \"tool-stream\": {\n const toolInvocation = createReceivingToolInvocation(\n delta.invocationId,\n delta.name\n );\n parts.push(toolInvocation);\n break;\n }\n\n case \"tool-delta\": {\n // Take the last part, expect it to be a tool invocation in receiving\n // stage. If not, ignore this delta. If it is, append the delta to the\n // parser\n if (\n lastPart?.type === \"tool-invocation\" &&\n lastPart.stage === \"receiving\"\n ) {\n lastPart.__appendDelta?.(delta.delta);\n }\n // Otherwise ignore the delta - it's out of order or unexpected\n break;\n }\n\n case \"tool-invocation\":\n replaceOrAppend(parts, delta, (x) => x.invocationId, now);\n break;\n\n case \"retrieval\":\n replaceOrAppend(parts, delta, (x) => x.id, now);\n break;\n\n case \"source\": {\n sources.push(delta);\n break;\n }\n\n default:\n return assertNever(delta, \"Unhandled case\");\n }\n\n // Add the sources part to the parts array at the end if there are any sources\n if (sources.length > 0) {\n parts.push({\n type: \"sources\",\n sources,\n });\n }\n\n // Replace the content array with the parts array\n content.length = 0;\n content.push(...parts);\n}\n\n/**\n * Creates a receiving tool invocation part for testing purposes.\n * This helper eliminates the need to manually create fake tool invocation objects\n * and provides a clean API for tests.\n */\nexport function createReceivingToolInvocation(\n invocationId: string,\n name: string,\n partialArgsText: string = \"\"\n): AiReceivingToolInvocationPart {\n const parser = new IncrementalJsonParser(partialArgsText); // FRONTEND only\n return {\n type: \"tool-invocation\",\n stage: \"receiving\",\n invocationId,\n name,\n // --- Alternative implementation for FRONTEND only ------------------------\n get partialArgsText(): string { return parser.source; }, // prettier-ignore\n get partialArgs(): JsonObject { return parser.json; }, // prettier-ignore\n __appendDelta(delta: string) { parser.append(delta); }, // prettier-ignore\n // ------------------------------------------------------------------------\n } satisfies AiReceivingToolInvocationPart;\n}\n","import { isPlainObject } from \"../lib/guards\";\nimport type { Json } from \"../lib/Json\";\nimport { b64decode, tryParseJson } from \"../lib/utils\";\nimport type { IUserInfo } from \"./BaseUserMeta\";\n\nexport enum Permission {\n Read = \"room:read\",\n Write = \"room:write\",\n PresenceWrite = \"room:presence:write\",\n CommentsWrite = \"comments:write\",\n CommentsRead = \"comments:read\",\n}\n\nexport type LiveblocksPermissions = Record<string, Permission[]>;\n\nexport enum TokenKind {\n ACCESS_TOKEN = \"acc\",\n ID_TOKEN = \"id\",\n}\n\n/**\n * Infers from the given scopes whether the user can write the document (e.g.\n * Storage and/or YDoc).\n */\nexport function canWriteStorage(scopes: readonly string[]): boolean {\n return scopes.includes(Permission.Write);\n}\n\nexport function canComment(scopes: readonly string[]): boolean {\n return (\n scopes.includes(Permission.CommentsWrite) ||\n scopes.includes(Permission.Write)\n );\n}\n\ntype JwtMeta = {\n iat: number;\n exp: number;\n};\n\n/**\n * Access Token.\n */\nexport type AccessToken = {\n k: TokenKind.ACCESS_TOKEN;\n pid: string; // project id\n uid: string; // user id\n perms: LiveblocksPermissions; // permissions\n ui?: IUserInfo; // user info\n} & JwtMeta;\n\n/**\n * New authorization ID Token.\n */\nexport type IDToken = {\n k: TokenKind.ID_TOKEN;\n pid: string; // project id\n uid: string; // user id\n gids?: string[]; // group ids\n ui?: IUserInfo; // user info\n} & JwtMeta;\n\nexport type AuthToken = AccessToken | IDToken;\n\n// The \"rich\" token is data we obtain by parsing the JWT token and making all\n// metadata on it accessible. It's done right after hitting the backend, but\n// before the promise will get returned, so it's an inherent part of the\n// authentication step.\nexport type ParsedAuthToken = {\n readonly raw: string; // The raw JWT value, unchanged\n readonly parsed: AuthToken; // Rich data on the JWT value\n};\n\nfunction isValidAuthTokenPayload(data: Json): data is AccessToken | IDToken {\n return (\n isPlainObject(data) &&\n (data.k === TokenKind.ACCESS_TOKEN || data.k === TokenKind.ID_TOKEN)\n );\n}\n\n/**\n * Parses a raw JWT token string, which allows reading the metadata/payload of\n * the token.\n *\n * NOTE: Doesn't do any validation, so always treat the metadata as other user\n * input: never trust these values for anything important.\n */\nexport function parseAuthToken(rawTokenString: string): ParsedAuthToken {\n const tokenParts = rawTokenString.split(\".\");\n if (tokenParts.length !== 3) {\n throw new Error(\"Authentication error: invalid JWT token\");\n }\n\n const payload = tryParseJson(b64decode(tokenParts[1]));\n if (!(payload && isValidAuthTokenPayload(payload))) {\n throw new Error(\n \"Authentication error: expected a valid token but did not get one. Hint: if you are using a callback, ensure the room is passed when creating the token. For more information: https://liveblocks.io/docs/api-reference/liveblocks-client#createClientCallback\"\n );\n }\n\n return {\n raw: rawTokenString,\n parsed: payload,\n };\n}\n","import { StopRetrying } from \"./connection\";\nimport { isPlainObject } from \"./lib/guards\";\nimport type { Json } from \"./lib/Json\";\nimport type { Relax } from \"./lib/Relax\";\nimport { stringifyOrLog as stringify } from \"./lib/stringify\";\nimport type {\n Authentication,\n CustomAuthenticationResult,\n} from \"./protocol/Authentication\";\nimport type { AuthToken, ParsedAuthToken } from \"./protocol/AuthToken\";\nimport { parseAuthToken, Permission, TokenKind } from \"./protocol/AuthToken\";\nimport type { Polyfills } from \"./room\";\n\nexport type AuthValue =\n | { type: \"secret\"; token: ParsedAuthToken }\n | { type: \"public\"; publicApiKey: string };\n\nexport type RequestedScope = \"room:read\" | \"comments:read\";\n\nexport type AuthManager = {\n reset(): void;\n getAuthValue(requestOptions: {\n requestedScope: RequestedScope;\n roomId?: string;\n }): Promise<AuthValue>;\n};\n\ntype AuthEndpoint =\n | string\n | ((room?: string) => Promise<CustomAuthenticationResult>);\n\nexport type AuthenticationOptions = {\n polyfills?: Polyfills;\n} & Relax<{ publicApiKey: string } | { authEndpoint: AuthEndpoint }>;\n\nconst NON_RETRY_STATUS_CODES = [\n 400, 401, 403, 404, 405, 410, 412, 414, 422, 431, 451,\n];\n\nexport function createAuthManager(\n authOptions: AuthenticationOptions,\n onAuthenticate?: (token: AuthToken) => void\n): AuthManager {\n const authentication = prepareAuthentication(authOptions);\n\n const seenTokens: Set<string> = new Set();\n\n const tokens: ParsedAuthToken[] = [];\n const expiryTimes: number[] = []; // Supposed to always contain the same number of elements as `tokens`\n\n const requestPromises = new Map<string, Promise<ParsedAuthToken>>();\n\n function reset() {\n seenTokens.clear();\n tokens.length = 0;\n expiryTimes.length = 0;\n requestPromises.clear();\n }\n\n function hasCorrespondingScopes(\n requestedScope: RequestedScope,\n scopes: Permission[]\n ) {\n if (requestedScope === \"comments:read\") {\n return (\n scopes.includes(Permission.CommentsRead) ||\n scopes.includes(Permission.CommentsWrite) ||\n scopes.includes(Permission.Read) ||\n scopes.includes(Permission.Write)\n );\n } else if (requestedScope === \"room:read\") {\n return (\n scopes.includes(Permission.Read) || scopes.includes(Permission.Write)\n );\n }\n\n return false;\n }\n\n function getCachedToken(requestOptions: {\n requestedScope: RequestedScope;\n roomId?: string;\n }): ParsedAuthToken | undefined {\n const now = Math.ceil(Date.now() / 1000);\n\n for (let i = tokens.length - 1; i >= 0; i--) {\n const token = tokens[i];\n const expiresAt = expiryTimes[i];\n\n // If this token is expired, remove it from cache, as if it never existed\n // in the first place\n if (expiresAt <= now) {\n tokens.splice(i, 1);\n expiryTimes.splice(i, 1);\n continue;\n }\n\n if (token.parsed.k === TokenKind.ID_TOKEN) {\n // When ID token method is used, only one token per user should be used and cached at the same time.\n return token;\n } else if (token.parsed.k === TokenKind.ACCESS_TOKEN) {\n // In this version, we accept access tokens with zero permission when issuing token for resources outside a room.\n if (\n !requestOptions.roomId &&\n Object.entries(token.parsed.perms).length === 0\n ) {\n return token;\n }\n\n for (const [resource, scopes] of Object.entries(token.parsed.perms)) {\n // If the requester didn't pass a roomId,\n // it means they need the token to access the user's resources (inbox notifications for example).\n // We return any access token that contains a wildcard for the requested scope.\n if (!requestOptions.roomId) {\n if (\n resource.includes(\"*\") &&\n hasCorrespondingScopes(requestOptions.requestedScope, scopes)\n ) {\n return token;\n }\n } else if (\n (resource.includes(\"*\") &&\n requestOptions.roomId.startsWith(resource.replace(\"*\", \"\"))) ||\n (requestOptions.roomId === resource &&\n hasCorrespondingScopes(requestOptions.requestedScope, scopes))\n ) {\n return token;\n }\n }\n }\n }\n\n return undefined;\n }\n\n async function makeAuthRequest(options: {\n requestedScope: RequestedScope;\n roomId?: string;\n }): Promise<ParsedAuthToken> {\n const fetcher =\n authOptions.polyfills?.fetch ??\n (typeof window === \"undefined\" ? undefined : window.fetch);\n\n if (authentication.type === \"private\") {\n if (fetcher === undefined) {\n throw new StopRetrying(\n \"To use Liveblocks client in a non-DOM environment with a url as auth endpoint, you need to provide a fetch polyfill.\"\n );\n }\n\n const response = await fetchAuthEndpoint(fetcher, authentication.url, {\n room: options.roomId,\n });\n const parsed = parseAuthToken(response.token);\n\n if (seenTokens.has(parsed.raw)) {\n throw new StopRetrying(\n \"The same Liveblocks auth token was issued from the backend before. Caching Liveblocks tokens is not supported.\"\n );\n }\n\n onAuthenticate?.(parsed.parsed);\n return parsed;\n }\n\n if (authentication.type === \"custom\") {\n const response = await authentication.callback(options.roomId);\n if (response && typeof response === \"object\") {\n if (typeof response.token === \"string\") {\n const parsed = parseAuthToken(response.token);\n\n onAuthenticate?.(parsed.parsed);\n return parsed;\n } else if (typeof response.error === \"string\") {\n const reason = `Authentication failed: ${\n \"reason\" in response && typeof response.reason === \"string\"\n ? response.reason\n : \"Forbidden\"\n }`;\n\n // istanbul ignore else\n if (response.error === \"forbidden\") {\n throw new StopRetrying(reason);\n } else {\n throw new Error(reason);\n }\n }\n }\n\n throw new Error(\n 'Your authentication callback function should return a token, but it did not. Hint: the return value should look like: { token: \"...\" }'\n );\n }\n\n // istanbul ignore next\n throw new Error(\n \"Unexpected authentication type. Must be private or custom.\"\n );\n }\n\n async function getAuthValue(requestOptions: {\n requestedScope: RequestedScope;\n roomId?: string;\n }): Promise<AuthValue> {\n if (authentication.type === \"public\") {\n return { type: \"public\", publicApiKey: authentication.publicApiKey };\n }\n\n const cachedToken = getCachedToken(requestOptions);\n if (cachedToken !== undefined) {\n return { type: \"secret\", token: cachedToken };\n }\n\n let currentPromise;\n if (requestOptions.roomId) {\n currentPromise = requestPromises.get(requestOptions.roomId);\n if (currentPromise === undefined) {\n currentPromise = makeAuthRequest(requestOptions);\n requestPromises.set(requestOptions.roomId, currentPromise);\n }\n } else {\n currentPromise = requestPromises.get(\"liveblocks-user-token\");\n if (currentPromise === undefined) {\n currentPromise = makeAuthRequest(requestOptions);\n requestPromises.set(\"liveblocks-user-token\", currentPromise);\n }\n }\n\n try {\n const token = await currentPromise;\n // Translate \"server timestamps\" to \"local timestamps\" in case clocks aren't in sync\n const BUFFER = 30; // Expire tokens 30 seconds sooner than they have to\n const expiresAt =\n Math.floor(Date.now() / 1000) +\n (token.parsed.exp - token.parsed.iat) -\n BUFFER;\n\n seenTokens.add(token.raw);\n tokens.push(token);\n expiryTimes.push(expiresAt);\n\n return { type: \"secret\", token };\n } finally {\n if (requestOptions.roomId) {\n requestPromises.delete(requestOptions.roomId);\n } else {\n requestPromises.delete(\"liveblocks-user-token\");\n }\n }\n }\n\n return {\n reset,\n getAuthValue,\n };\n}\n\nfunction prepareAuthentication(\n authOptions: AuthenticationOptions\n): Authentication {\n const { publicApiKey, authEndpoint } = authOptions;\n\n if (authEndpoint !== undefined && publicApiKey !== undefined) {\n throw new Error(\n \"You cannot simultaneously use `publicApiKey` and `authEndpoint` options. Please pick one and leave the other option unspecified. For more information: https://liveblocks.io/docs/api-reference/liveblocks-client#createClient\"\n );\n }\n\n if (typeof publicApiKey === \"string\") {\n if (publicApiKey.startsWith(\"sk_\")) {\n throw new Error(\n \"Invalid `publicApiKey` option. The value you passed is a secret key, which should not be used from the client. Please only ever pass a public key here. For more information: https://liveblocks.io/docs/api-reference/liveblocks-client#createClientPublicKey\"\n );\n } else if (!publicApiKey.startsWith(\"pk_\")) {\n throw new Error(\n \"Invalid key. Please use the public key format: pk_<public key>. For more information: https://liveblocks.io/docs/api-reference/liveblocks-client#createClientPublicKey\"\n );\n }\n return {\n type: \"public\",\n publicApiKey,\n };\n }\n\n if (typeof authEndpoint === \"string\") {\n return {\n type: \"private\",\n url: authEndpoint,\n };\n } else if (typeof authEndpoint === \"function\") {\n return {\n type: \"custom\",\n callback: authEndpoint,\n };\n } else if (authEndpoint !== undefined) {\n throw new Error(\n \"The `authEndpoint` option must be a string or a function. For more information: https://liveblocks.io/docs/api-reference/liveblocks-client#createClientAuthEndpoint\"\n );\n }\n\n throw new Error(\n \"Invalid Liveblocks client options. Please provide either a `publicApiKey` or `authEndpoint` option. They cannot both be empty. For more information: https://liveblocks.io/docs/api-reference/liveblocks-client#createClient\"\n );\n}\n\nasync function fetchAuthEndpoint(\n fetch: typeof window.fetch,\n endpoint: string,\n body: {\n room?: string;\n }\n): Promise<{ token: string }> {\n const res = await fetch(endpoint, {\n method: \"POST\",\n headers: {\n \"Content-Type\": \"application/json\",\n },\n body: stringify(body),\n });\n if (!res.ok) {\n const reason = `${\n (await res.text()).trim() || \"reason not provided in auth response\"\n } (${res.status} returned by POST ${endpoint})`;\n\n if (NON_RETRY_STATUS_CODES.includes(res.status)) {\n // Throw a special error instance, which the connection manager will\n // recognize and understand that retrying will have no effect\n throw new StopRetrying(`Unauthorized: ${reason}`);\n } else {\n throw new Error(`Failed to authenticate: ${reason}`);\n }\n }\n\n let data: Json;\n try {\n data = await (res.json() as Promise<Json>);\n } catch (er) {\n throw new Error(\n `Expected a JSON response when doing a POST request on \"${endpoint}\". ${String(\n er\n )}`\n );\n }\n\n if (!isPlainObject(data) || typeof data.token !== \"string\") {\n throw new Error(\n `Expected a JSON response of the form \\`{ token: \"...\" }\\` when doing a POST request on \"${endpoint}\", but got ${stringify(\n data\n )}`\n );\n }\n const { token } = data;\n return { token };\n}\n","export const DEFAULT_BASE_URL = \"https://api.liveblocks.io\";\n\nexport const MENTION_CHARACTER = \"@\";\n","import { makeEventSource } from \"../lib/EventSource\";\nimport type * as DevTools from \"./protocol\";\n\ntype SendToPanelOptions = {\n /**\n * We'll only want to send messages from the client to the panel if the panel\n * has shown interest in this. To allow message passing to the dev panel,\n * call allowMessagePassing().\n */\n force: boolean;\n};\n\nlet _bridgeActive = false;\nexport function activateBridge(allowed: boolean): void {\n _bridgeActive = allowed;\n}\n\nexport function sendToPanel(\n message: DevTools.ClientToPanelMessage,\n options?: SendToPanelOptions\n): void {\n // DevTools communication only happens on the client side\n // Define it as a no-op in production environments or when run outside of a browser context\n if (process.env.NODE_ENV === \"production\" || typeof window === \"undefined\") {\n return;\n }\n\n const fullMsg = {\n ...message,\n source: \"liveblocks-devtools-client\",\n };\n\n if (!(options?.force || _bridgeActive)) {\n /*\n // eslint-disable-next-line rulesdir/console-must-be-fancy\n console.log(\n \"%c[client → panel] %c%s\",\n \"color: green\",\n \"color: gray; font-weight: bold\",\n fullMsg.msg,\n \"[🚫 NOT sent!]\"\n );\n */\n return;\n }\n /*\n // eslint-disable-next-line rulesdir/console-must-be-fancy\n console.log(\n \"%c[client → panel] %c%s\",\n \"color: green\",\n \"color: green; font-weight: bold\",\n fullMsg.msg,\n fullMsg\n );*/\n window.postMessage(fullMsg, \"*\");\n}\n\nconst eventSource = makeEventSource<DevTools.FullPanelToClientMessage>();\n\n// Define it as a no-op in production environments or when run outside of a browser context\nif (process.env.NODE_ENV !== \"production\" && typeof window !== \"undefined\") {\n window.addEventListener(\"message\", (event: MessageEvent<unknown>) => {\n if (\n event.source === window &&\n (event.data as Record<string, unknown>)?.source ===\n \"liveblocks-devtools-panel\"\n ) {\n // console.log(\n // \"%c[client ← panel] %c%s\",\n // \"color: purple\",\n // \"color: purple; font-weight: bold\",\n // (event.data as Record<string, unknown>).msg,\n // event.data\n // );\n eventSource.notify(event.data as DevTools.FullPanelToClientMessage);\n } else {\n // Message not for us\n }\n });\n}\n\nexport const onMessageFromPanel = eventSource.observable;\n","import { kInternal } from \"../internal\";\nimport type { Json, JsonObject } from \"../lib/Json\";\nimport type { BaseUserMeta } from \"../protocol/BaseUserMeta\";\nimport type { UpdateYDocClientMsg } from \"../protocol/ClientMsg\";\nimport type { YDocUpdateServerMsg } from \"../protocol/ServerMsg\";\nimport type { OpaqueRoom, RoomEventMessage } from \"../room\";\nimport { PKG_VERSION } from \"../version\";\nimport { activateBridge, onMessageFromPanel, sendToPanel } from \"./bridge\";\n\nconst VERSION = PKG_VERSION || \"dev\";\nlet _devtoolsSetupHasRun = false;\n\n/**\n * Sends a wake up message to the devtools panel, if any such panel exists, and\n * listens for the initial connect message, which would be the trigger to start\n * emitting updates.\n *\n * Must be called before linkDevTools() can be used.\n *\n * Will only run once, even when called multiple times.\n */\nexport function setupDevTools(getAllRooms: () => string[]): void {\n // Define it as a no-op in production environments or when run outside of a browser context\n if (process.env.NODE_ENV === \"production\" || typeof window === \"undefined\") {\n return;\n }\n\n if (_devtoolsSetupHasRun) {\n // This setup code should only happen the first time\n return;\n }\n\n _devtoolsSetupHasRun = true;\n\n onMessageFromPanel.subscribe((msg) => {\n switch (msg.msg) {\n // When a devtool panel sends an explicit \"connect\" message back to this\n // live running client (in response to the \"wake-up-devtools\" message,\n // or when the devtool panel is opened for the first time), it means that it's okay to\n // start emitting messages.\n // Before this explicit acknowledgement, any call to sendToPanel() will\n // be a no-op.\n case \"connect\": {\n // Allows future sendToPanel() messages to go through\n activateBridge(true);\n\n // Emit an explicit \"room::available\" message for every known room at\n // this point. These can be used by the devpanel to subscribe to such\n // room's updates.\n for (const roomId of getAllRooms()) {\n sendToPanel({\n msg: \"room::available\",\n roomId,\n clientVersion: VERSION,\n });\n }\n\n break;\n }\n\n // TODO: Later on, we can support explicit disconnects, too\n // case \"disconnect\": {\n // // Make sendToPanel() no-ops again\n // activateBridge(false);\n // break;\n // }\n }\n });\n\n // Send initial wake up message, in case the devtool panel is already open!\n sendToPanel({ msg: \"wake-up-devtools\" }, { force: true });\n}\n\nconst unsubsByRoomId = new Map<string, (() => void)[]>();\n\nfunction stopSyncStream(roomId: string): void {\n const unsubs = unsubsByRoomId.get(roomId) ?? [];\n unsubsByRoomId.delete(roomId); // Pop it off\n\n for (const unsub of unsubs) {\n // Cancel all of the subscriptions to room updates that are synchronizing\n // partial state to the devtools panel\n unsub();\n }\n}\n\n/**\n * Starts, or restarts, the stream of sync messages for the given room. A sync\n * stream consists of an initial \"full sync\" message, followed by many\n * \"partial\" messages that happen whenever part of the room changes.\n */\nfunction startSyncStream(room: OpaqueRoom): void {\n stopSyncStream(room.id);\n\n // Sync the room ID instantly, as soon as we know it\n fullSync(room);\n\n unsubsByRoomId.set(room.id, [\n // When the connection status changes\n room.events.status.subscribe(() => partialSyncConnection(room)),\n\n // When storage initializes, send the update\n room.events.storageDidLoad.subscribeOnce(() => partialSyncStorage(room)),\n\n // Any time storage updates, send the new storage root\n room.events.storageBatch.subscribe(() => partialSyncStorage(room)),\n\n // Any time \"me\" or \"others\" updates, send the new values accordingly\n room.events.self.subscribe(() => partialSyncMe(room)),\n room.events.others.subscribe(() => partialSyncOthers(room)),\n\n // Any time ydoc is updated, forward the update\n room.events.ydoc.subscribe((update) => syncYdocUpdate(room, update)),\n\n // Any time a custom room event is received, forward it\n room.events.customEvent.subscribe((eventData) =>\n forwardEvent(room, eventData)\n ),\n ]);\n}\n\nfunction syncYdocUpdate(\n room: OpaqueRoom,\n update: YDocUpdateServerMsg | UpdateYDocClientMsg\n) {\n sendToPanel({\n msg: \"room::sync::ydoc\",\n roomId: room.id,\n update,\n });\n}\n\nconst loadedAt = Date.now();\nlet eventCounter = 0;\n\nfunction nextEventId() {\n return `event-${loadedAt}-${eventCounter++}`;\n}\n\nfunction forwardEvent(\n room: OpaqueRoom,\n eventData: RoomEventMessage<JsonObject, BaseUserMeta, Json>\n) {\n sendToPanel({\n msg: \"room::events::custom-event\",\n roomId: room.id,\n event: {\n type: \"CustomEvent\",\n id: nextEventId(),\n key: \"Event\",\n connectionId: eventData.connectionId,\n payload: eventData.event,\n },\n });\n}\n\nfunction partialSyncConnection(room: OpaqueRoom) {\n sendToPanel({\n msg: \"room::sync::partial\",\n roomId: room.id,\n status: room.getStatus(),\n });\n}\n\nfunction partialSyncStorage(room: OpaqueRoom) {\n const root = room.getStorageSnapshot();\n if (root) {\n sendToPanel({\n msg: \"room::sync::partial\",\n roomId: room.id,\n storage: root.toTreeNode(\"root\").payload,\n });\n }\n}\n\nfunction partialSyncMe(room: OpaqueRoom) {\n const me = room[kInternal].getSelf_forDevTools();\n if (me) {\n sendToPanel({\n msg: \"room::sync::partial\",\n roomId: room.id,\n me,\n });\n }\n}\n\nfunction partialSyncOthers(room: OpaqueRoom) {\n // Any time others updates, send the new storage root to the dev panel\n const others = room[kInternal].getOthers_forDevTools();\n if (others) {\n sendToPanel({\n msg: \"room::sync::partial\",\n roomId: room.id,\n others,\n });\n }\n}\n\nfunction fullSync(room: OpaqueRoom) {\n const root = room.getStorageSnapshot();\n const me = room[kInternal].getSelf_forDevTools();\n const others = room[kInternal].getOthers_forDevTools();\n // Because the room doesn't have access to the YJS doc, we must tell it to go get the full doc\n // sending an empty vector will return the whole document and then devtools will be up to date\n room.fetchYDoc(\"\");\n sendToPanel({\n msg: \"room::sync::full\",\n roomId: room.id,\n status: room.getStatus(),\n storage: root?.toTreeNode(\"root\").payload ?? null,\n me,\n others,\n });\n}\n\n// Currently registered \"channel\" listeners, waiting for \"room::subscribe\" or\n// \"room::unsubscribe\" messages coming from the devtools panel\nconst roomChannelListeners = new Map<string, () => void>();\n\nfunction stopRoomChannelListener(roomId: string) {\n const listener = roomChannelListeners.get(roomId);\n roomChannelListeners.delete(roomId);\n if (listener) {\n listener();\n }\n}\n\n/**\n * Publicly announce to the devtool panel that a new room is available.\n */\nexport function linkDevTools(roomId: string, room: OpaqueRoom): void {\n // Define it as a no-op in production environments or when run outside of a browser context\n if (process.env.NODE_ENV === \"production\" || typeof window === \"undefined\") {\n return;\n }\n sendToPanel({ msg: \"room::available\", roomId, clientVersion: VERSION });\n\n // Before adding a new listener, stop all active listeners, so there is only\n // ever going to be one listener per room \"channel\"\n stopRoomChannelListener(roomId);\n roomChannelListeners.set(\n roomId,\n\n // Returns the unsubscribe callback, that we store in the\n // roomChannelListeners registry\n onMessageFromPanel.subscribe((msg) => {\n switch (msg.msg) {\n // Sent by the devtool panel when it wants to receive the sync stream\n // for a room\n case \"room::subscribe\": {\n // Only act on this message if it's intended for this room\n if (msg.roomId === roomId) {\n startSyncStream(room);\n }\n break;\n }\n\n case \"room::unsubscribe\": {\n // Only act on this message if it's intended for this room\n if (msg.roomId === roomId) {\n stopSyncStream(roomId);\n }\n break;\n }\n }\n })\n );\n}\n\nexport function unlinkDevTools(roomId: string): void {\n // Define it as a no-op in production environments or when run outside of a browser context\n if (process.env.NODE_ENV === \"production\" || typeof window === \"undefined\") {\n return;\n }\n\n // Immediately stop the sync stream of room updates to the dev panel\n stopSyncStream(roomId);\n\n stopRoomChannelListener(roomId);\n\n // Inform dev panel that this room is no longer available\n sendToPanel({\n msg: \"room::unavailable\",\n roomId,\n });\n}\n","import * as console from \"./fancy-console\";\n\n// Keeps a set of messages in memory that it has warned about\n// already. There will be only one message in the console, no\n// matter how often it gets called.\nconst _emittedWarnings: Set<string> = new Set();\n\n/**\n * Emit a warning only once.\n *\n * Only has effect in dev mode. In production, this is a no-op.\n */\n// istanbul ignore next\nexport function warnOnce(message: string, key = message): void {\n if (process.env.NODE_ENV !== \"production\") {\n if (!_emittedWarnings.has(key)) {\n _emittedWarnings.add(key);\n console.warn(message);\n }\n }\n}\n\n/**\n * Emit a warning only once if a condition is met.\n *\n * Only has effect in dev mode. In production, this is a no-op.\n */\n// istanbul ignore next\nexport function warnOnceIf(\n condition: boolean | (() => boolean),\n message: string,\n key = message\n): void {\n if (typeof condition === \"function\" ? condition() : condition) {\n warnOnce(message, key);\n }\n}\n","import type { DAD } from \"../globals/augmentation\";\nimport * as console from \"../lib/fancy-console\";\nimport { create, entries, keys, values } from \"../lib/utils\";\n\n/**\n * Pre-defined notification channels support list.\n */\nexport type NotificationChannel = \"email\" | \"slack\" | \"teams\" | \"webPush\";\n\n/**\n * `K` represents custom notification kinds\n * defined in the augmentation `ActivitiesData` (e.g `liveblocks.config.ts`).\n * It means the type `NotificationKind` will be shaped like:\n * thread | textMention | $customKind1 | $customKind2 | ...\n */\nexport type NotificationKind<K extends keyof DAD = keyof DAD> =\n | \"thread\"\n | \"textMention\"\n | K;\n\n/**\n * A notification channel settings is a set of notification kinds.\n * One setting can have multiple kinds (+ augmentation)\n */\nexport type NotificationChannelSettings = {\n [K in NotificationKind]: boolean;\n};\n\n/**\n * @private\n *\n * Base definition of notification settings.\n * Plain means it's a simple object coming from the remote backend.\n *\n * It's the raw settings object where somme channels cannot exists\n * because there are no notification kinds enabled on the dashboard.\n * And this object isn't yet proxied by the creator factory `createNotificationSettings`.\n */\nexport type NotificationSettingsPlain = {\n [C in NotificationChannel]?: NotificationChannelSettings;\n};\n\n/**\n * @internal\n *\n * Symbol to branch plain value of notification settings\n * inside the NotificationSettings object.\n */\nconst kPlain = Symbol(\"notification-settings-plain\");\n\n/**\n * @internal\n * Proxied `NotificationSettingsPlain` object.\n */\ntype ProxiedNotificationSettings = NotificationSettingsPlain;\n\n/**\n * Notification settings.\n * One channel for one set of settings.\n */\nexport type NotificationSettings = {\n [C in NotificationChannel]: NotificationChannelSettings | null;\n};\n\n/**\n * It creates a deep partial specific for `NotificationSettings`\n * to offer a nice DX when updating the settings (e.g not being forced to define every keys)\n * and at the same the some preserver the augmentation for custom kinds (e.g `liveblocks.config.ts`).\n */\ntype DeepPartialWithAugmentation<T> = T extends object\n ? {\n [P in keyof T]?: T[P] extends { [K in NotificationKind]: boolean }\n ? Partial<T[P]> & { [K in keyof DAD]?: boolean }\n : DeepPartialWithAugmentation<T[P]>;\n }\n : T;\n\n/**\n * Partial notification settings with augmentation preserved gracefully.\n * It means you can update the settings without being forced to define every keys.\n * Useful when implementing update functions.\n */\nexport type PartialNotificationSettings =\n DeepPartialWithAugmentation<NotificationSettingsPlain>;\n\n/**\n * @private\n *\n * Creates a `NotificationSettings` object with the given initial plain settings.\n * It defines a getter for each channel to access the settings and returns `null` with an error log\n * in case the required channel isn't enabled in the dashboard.\n *\n * You can see this function as `Proxy` like around `NotificationSettingsPlain` type.\n * We can't predict what will be enabled on the dashboard or not, so it's important\n * provide a good DX to developers by returning `null` completed by an error log\n * when they try to access a channel that isn't enabled in the dashboard.\n */\nexport function createNotificationSettings(\n plain: NotificationSettingsPlain\n): NotificationSettings {\n const channels: NotificationChannel[] = [\n \"email\",\n \"slack\",\n \"teams\",\n \"webPush\",\n ];\n const descriptors: PropertyDescriptorMap &\n ThisType<NotificationSettings & { [kPlain]: ProxiedNotificationSettings }> =\n {\n [kPlain]: {\n value: plain,\n enumerable: false,\n },\n };\n\n for (const channel of channels) {\n descriptors[channel] = {\n enumerable: true,\n /**\n * In the TypeScript standard library definitions, the built-in interface for a property descriptor\n * does not include a specialized type for the “this” context in the getter or setter functions.\n * As a result, both the get and set methods implicitly have this: any.\n * The reason is that property descriptors in JavaScript are used across various objects with\n * no enforced shape for this. And so the standard library definitions have to remain as broad as possible\n * to support any valid JavaScript usage (e.g `Object.defineProperty`).\n *\n * So we can safely tells that this getter is typed as `this: NotificationSettings` because we're\n * creating a well known shaped object → `NotificationSettings`.\n */\n get(\n this: NotificationSettings & {\n [kPlain]: ProxiedNotificationSettings;\n }\n ): NotificationChannelSettings | null {\n const value = this[kPlain][channel];\n if (typeof value === \"undefined\") {\n console.error(\n `In order to use the '${channel}' channel, please set up your project first. For more information: https://liveblocks.io/docs/errors/enable-a-notification-channel`\n );\n return null;\n }\n return value;\n },\n };\n }\n\n return create<NotificationSettings>(null, descriptors);\n}\n\n/**\n * @private\n *\n * Patch a `NotificationSettings` object by applying notification kind updates\n * coming from a `PartialNotificationSettings` object.\n */\nexport function patchNotificationSettings(\n existing: NotificationSettings,\n patch: PartialNotificationSettings\n): NotificationSettings {\n // Create a copy of the settings object to mutate\n const outcoming = createNotificationSettings({\n ...(\n existing as NotificationSettings & {\n [kPlain]: ProxiedNotificationSettings;\n }\n )[kPlain],\n });\n\n for (const channel of keys(patch)) {\n const updates = patch[channel];\n if (updates !== undefined) {\n const kindUpdates = Object.fromEntries(\n entries(updates).filter(([, value]) => value !== undefined)\n ) as NotificationChannelSettings; // Fine to type cast here because we've filtered out undefined values\n\n (\n outcoming as NotificationSettings & {\n [kPlain]: ProxiedNotificationSettings;\n }\n )[kPlain][channel] = {\n ...(\n outcoming as NotificationSettings & {\n [kPlain]: ProxiedNotificationSettings;\n }\n )[kPlain][channel],\n ...kindUpdates,\n };\n }\n }\n\n return outcoming;\n}\n\n/**\n *\n * Utility to check if a notification channel settings\n * is enabled for every notification kinds.\n *\n * Usage:\n * ```ts\n * const isEmailChannelEnabled = isNotificationChannelEnabled(settings.email);\n * ```\n */\nexport function isNotificationChannelEnabled(\n settings: NotificationChannelSettings | null\n): boolean {\n return settings !== null\n ? values(settings).every((enabled) => enabled === true)\n : false;\n}\n","import type { BadgeLocation } from \"./types/Others\";\n\nconst OFFSET = \"12px\";\n\nconst injectBrandBadge = (\n badgeLocation: BadgeLocation = \"bottom-right\"\n): void => {\n // Only inject in browser environments\n if (typeof document === \"undefined\") {\n return;\n }\n\n // Check if badge already exists to avoid duplicates\n if (document.getElementById(\"liveblocks-badge\")) {\n return;\n }\n\n // Create and inject the badge HTML\n const badgeDiv = document.createElement(\"div\");\n badgeDiv.id = \"liveblocks-badge\";\n badgeDiv.style.position = \"fixed\";\n badgeDiv.style.opacity = \"0\";\n badgeDiv.style.transition = \"opacity 300ms\";\n badgeDiv.style.zIndex = \"9999\";\n\n // Set position based on badgeLocation\n switch (badgeLocation) {\n case \"top-right\":\n badgeDiv.style.top = OFFSET;\n badgeDiv.style.right = OFFSET;\n break;\n case \"bottom-right\":\n badgeDiv.style.bottom = OFFSET;\n badgeDiv.style.right = OFFSET;\n break;\n case \"bottom-left\":\n badgeDiv.style.bottom = OFFSET;\n badgeDiv.style.left = OFFSET;\n break;\n case \"top-left\":\n badgeDiv.style.top = OFFSET;\n badgeDiv.style.left = OFFSET;\n break;\n }\n badgeDiv.onmouseenter = () => {\n const hideButton = document.getElementById(\"liveblocks-badge-hide-button\");\n if (hideButton) {\n hideButton.style.opacity = \"0.3\";\n }\n };\n badgeDiv.onmouseleave = () => {\n const hideButton = document.getElementById(\"liveblocks-badge-hide-button\");\n if (hideButton) {\n hideButton.style.opacity = \"0\";\n }\n };\n\n const link = document.createElement(\"a\");\n link.href = \"https://lblcks.io/badge\";\n link.target = \"_blank\";\n link.rel = \"noopener noreferrer\";\n link.title = \"Liveblocks\";\n\n const svg = document.createElementNS(\"http://www.w3.org/2000/svg\", \"svg\");\n svg.setAttribute(\"width\", \"111\");\n svg.setAttribute(\"height\", \"38\");\n svg.setAttribute(\"viewBox\", \"0 0 111 38\");\n svg.setAttribute(\"fill\", \"none\");\n\n const rect1 = document.createElementNS(\"http://www.w3.org/2000/svg\", \"rect\");\n rect1.setAttribute(\"x\", \"1\");\n rect1.setAttribute(\"y\", \"1\");\n rect1.setAttribute(\"width\", \"109\");\n rect1.setAttribute(\"height\", \"36\");\n rect1.setAttribute(\"rx\", \"8\");\n rect1.setAttribute(\"fill\", \"white\");\n svg.appendChild(rect1);\n\n const rect2 = document.createElementNS(\"http://www.w3.org/2000/svg\", \"rect\");\n rect2.setAttribute(\"x\", \"0.5\");\n rect2.setAttribute(\"y\", \"0.5\");\n rect2.setAttribute(\"width\", \"110\");\n rect2.setAttribute(\"height\", \"37\");\n rect2.setAttribute(\"rx\", \"8.5\");\n rect2.setAttribute(\"stroke\", \"black\");\n rect2.setAttribute(\"stroke-opacity\", \"0.1\");\n svg.appendChild(rect2);\n\n const path1 = document.createElementNS(\"http://www.w3.org/2000/svg\", \"path\");\n path1.setAttribute(\n \"d\",\n \"M39.8256 18.0832H38L38.0005 28.9009H39.8256V18.0832ZM43.213 21.1757H41.3879V28.9009H43.213V21.1757ZM43.213 18H41.3879V19.9688H43.213V18ZM45.8916 21.1757H43.9302L46.6915 28.9009H48.7283L51.4896 21.1757H49.5581L48.4039 24.8348C48.3739 24.93 48.2657 25.3024 48.0795 25.9511L47.7099 27.1959C47.4914 26.406 47.2626 25.6189 47.0237 24.8348L45.8916 21.1757ZM58.9659 23.59C58.8147 23.0721 58.5934 22.6317 58.3014 22.2694C57.9743 21.8628 57.5598 21.5482 57.0571 21.3268C56.5589 21.1004 56.0058 20.9877 55.3973 20.9877C54.2407 20.9877 53.3152 21.3495 52.6212 22.0735C52.275 22.4476 52.0103 22.8896 51.844 23.3715C51.6731 23.8691 51.5876 24.4199 51.5876 25.0233C51.5876 26.3157 51.9322 27.3142 52.6217 28.0184C53.3203 28.7323 54.2509 29.0894 55.4125 29.0894C56.4083 29.0894 57.2151 28.8658 57.8342 28.4181C58.453 27.9704 58.8628 27.3068 59.0638 26.4266L57.2834 26.2908C57.198 26.7685 57.0016 27.1257 56.6948 27.3618C56.3879 27.5933 55.9555 27.7093 55.3973 27.7093C54.1201 27.7093 53.4663 26.9899 53.4363 25.5514H59.1844L59.1922 25.295C59.1922 24.6763 59.1165 24.108 58.9659 23.59ZM53.8434 23.0471C54.1654 22.5943 54.6834 22.3679 55.3973 22.3679C55.7443 22.3679 56.0363 22.4155 56.2725 22.5111C56.509 22.6067 56.7128 22.7601 56.8837 22.9713C57.0167 23.1352 57.1189 23.3218 57.1855 23.5221C57.2605 23.731 57.3062 23.9493 57.3213 24.1708H53.451C53.4964 23.7231 53.6271 23.3484 53.8434 23.0471ZM66.2836 21.4779C65.7656 21.1406 65.1673 20.9724 64.4885 20.9724H64.4875C63.995 20.9724 63.5449 21.0727 63.1374 21.2742C62.7364 21.4696 62.3931 21.7659 62.1412 22.1341V18.0832H60.316V28.9009H62.1416V27.8373C62.373 28.2195 62.7008 28.5342 63.0921 28.7499C63.4996 28.9763 63.9497 29.0894 64.4422 29.0894C65.1266 29.0894 65.7324 28.9259 66.2614 28.5992C66.7891 28.2726 67.1989 27.8045 67.4905 27.1959C67.7871 26.5823 67.9359 25.8634 67.9359 25.0386C67.9359 24.2337 67.7895 23.5248 67.4984 22.9113C67.2115 22.2926 66.8066 21.8148 66.2836 21.4779ZM65.5064 27.0828C65.2148 27.5254 64.7172 27.7463 64.0125 27.7463C63.3786 27.7463 62.9013 27.5175 62.5797 27.0601C62.2576 26.6026 62.0968 25.9336 62.0968 25.0534C62.0968 24.2032 62.24 23.5397 62.5265 23.0619C62.8185 22.579 63.3088 22.3378 63.9978 22.3378C64.7019 22.3378 65.2023 22.5592 65.499 23.0013C65.7961 23.4389 65.944 24.1181 65.944 25.0386C65.944 25.9585 65.7985 26.6401 65.5064 27.0828ZM70.901 18.0832H69.0754V28.9009H70.901V18.0832ZM73.8582 28.6066C74.452 28.9286 75.1558 29.0894 75.9708 29.0894C76.7554 29.0894 77.4444 28.9208 78.0377 28.5844C78.6268 28.2569 79.1068 27.7637 79.4183 27.1659C79.745 26.5523 79.9086 25.843 79.9086 25.0386C79.9086 24.2642 79.7478 23.5697 79.4257 22.9565C79.117 22.3516 78.6404 21.8487 78.0529 21.508C77.4592 21.161 76.7652 20.9872 75.9708 20.9872C75.171 20.9872 74.4719 21.161 73.8735 21.508C73.2876 21.8472 72.811 22.3471 72.5002 22.9486C72.1837 23.5572 72.0252 24.254 72.0252 25.0386C72.0252 25.8735 72.181 26.5948 72.4929 27.2033C72.7962 27.798 73.2722 28.2871 73.8582 28.6066ZM77.4869 27.0906C77.18 27.5231 76.6746 27.7393 75.9708 27.7393C75.493 27.7393 75.1082 27.6437 74.8166 27.4524C74.5246 27.2565 74.3134 26.9621 74.1826 26.5699C74.0518 26.1725 73.9867 25.6619 73.9867 25.0381C73.9867 24.1029 74.1401 23.419 74.4469 22.9866C74.7588 22.5541 75.2667 22.3378 75.9708 22.3378C76.6699 22.3378 77.1727 22.5541 77.4795 22.9866C77.7914 23.419 77.9471 24.1029 77.9471 25.0386C77.9471 25.9738 77.7937 26.6576 77.4869 27.0906ZM82.3982 28.6066C82.9869 28.9286 83.6883 29.0894 84.5033 29.0894C85.1369 29.0894 85.7029 28.9791 86.2005 28.7577C86.6989 28.5364 87.1031 28.2272 87.4152 27.8299C87.7279 27.4249 87.9329 26.9471 88.0109 26.4414L86.2534 26.2607C86.1328 26.7431 85.9316 27.1031 85.6497 27.3392C85.3683 27.5757 84.9857 27.694 84.5033 27.694C84.0103 27.694 83.6232 27.5933 83.3413 27.3918C83.0598 27.1858 82.8635 26.8919 82.7531 26.5093C82.6422 26.1221 82.5872 25.6318 82.5872 25.0386C82.5872 24.455 82.6422 23.9722 82.7531 23.59C82.8635 23.2028 83.0571 22.9062 83.3339 22.6997C83.6157 22.4884 84.0053 22.3831 84.5033 22.3831C85.0362 22.3831 85.4312 22.5314 85.6877 22.8281C85.949 23.1196 86.1354 23.5193 86.2456 24.0276L87.9735 23.718C87.8078 22.8932 87.4326 22.232 86.8491 21.7344C86.2708 21.2363 85.489 20.9877 84.5033 20.9877C83.6985 20.9877 83.0021 21.1587 82.4134 21.5006C81.8322 21.8351 81.3625 22.3337 81.0633 22.9339C80.7514 23.5424 80.5952 24.2439 80.5952 25.0386C80.5952 25.8781 80.7486 26.6026 81.0554 27.2112C81.3673 27.8197 81.8151 28.285 82.3982 28.6066ZM90.8104 26.0343L91.6627 25.2192L93.9257 28.9009H96.0308L92.8999 24.0498L95.9328 21.1757H93.5634L90.8104 23.9976V18.0832H88.9845V28.9009H90.8104V26.0343ZM97.6304 28.8103C98.1436 28.9965 98.7142 29.0894 99.3428 29.0894C100.303 29.0894 101.095 28.8981 101.719 28.5165C102.348 28.1339 102.663 27.5101 102.663 26.6451C102.663 26.0818 102.516 25.6392 102.225 25.3176C101.933 24.9905 101.586 24.7566 101.184 24.6157C100.786 24.4702 100.245 24.3219 99.562 24.1707C99.1642 24.0853 98.8502 24.0049 98.6188 23.9295C98.3873 23.8538 98.201 23.7558 98.0604 23.6352C97.925 23.5141 97.8567 23.3584 97.8567 23.1676C97.8567 22.8858 97.9824 22.6723 98.2338 22.5263C98.4853 22.3803 98.7973 22.3073 99.1694 22.3073C99.6619 22.3073 100.047 22.4255 100.324 22.6621C100.605 22.8987 100.756 23.2531 100.776 23.7258L102.466 23.4467C102.385 22.5817 102.046 21.9556 101.447 21.568C100.854 21.1812 100.095 20.9872 99.1694 20.9872C98.6258 20.9872 98.1236 21.0699 97.6607 21.2362C97.2029 21.397 96.8335 21.6511 96.5517 21.9986C96.27 22.3451 96.1294 22.7827 96.1294 23.3109C96.1294 23.7988 96.25 24.1934 96.4911 24.4951C96.7368 24.8001 97.0527 25.0408 97.4118 25.1965C97.7838 25.3578 98.2512 25.5112 98.8147 25.6567L99.2828 25.7699C99.5949 25.844 99.9044 25.9295 100.211 26.0264C100.421 26.092 100.595 26.1826 100.731 26.2981C100.867 26.4136 100.935 26.5647 100.935 26.7509C100.935 27.0776 100.799 27.3294 100.527 27.505C100.261 27.6815 99.8708 27.7693 99.3583 27.7693C98.8347 27.7693 98.4176 27.6385 98.1055 27.377C97.7941 27.1154 97.6355 26.7384 97.6304 26.2454L95.9109 26.4413C95.9302 27.0351 96.094 27.5304 96.4009 27.9278C96.7129 28.3252 97.1223 28.619 97.6304 28.8103Z\"\n );\n path1.setAttribute(\"fill\", \"black\");\n svg.appendChild(path1);\n\n const path2 = document.createElementNS(\"http://www.w3.org/2000/svg\", \"path\");\n path2.setAttribute(\n \"d\",\n \"M37.6392 15V9.18182H39.7131C40.1657 9.18182 40.5407 9.2642 40.8381 9.42898C41.1354 9.59375 41.358 9.81913 41.5057 10.1051C41.6534 10.3892 41.7273 10.7093 41.7273 11.0653C41.7273 11.4233 41.6525 11.7453 41.5028 12.0312C41.3551 12.3153 41.1316 12.5407 40.8324 12.7074C40.535 12.8722 40.161 12.9545 39.7102 12.9545H38.2841V12.2102H39.6307C39.9167 12.2102 40.1487 12.161 40.3267 12.0625C40.5047 11.9621 40.6354 11.8258 40.7188 11.6534C40.8021 11.4811 40.8438 11.285 40.8438 11.0653C40.8438 10.8456 40.8021 10.6506 40.7188 10.4801C40.6354 10.3097 40.5038 10.1761 40.3239 10.0795C40.1458 9.98295 39.911 9.93466 39.6193 9.93466H38.517V15H37.6392ZM44.4716 15.0881C44.0625 15.0881 43.7055 14.9943 43.4006 14.8068C43.0956 14.6193 42.8589 14.357 42.6903 14.0199C42.5218 13.6828 42.4375 13.2888 42.4375 12.8381C42.4375 12.3854 42.5218 11.9896 42.6903 11.6506C42.8589 11.3116 43.0956 11.0483 43.4006 10.8608C43.7055 10.6733 44.0625 10.5795 44.4716 10.5795C44.8807 10.5795 45.2377 10.6733 45.5426 10.8608C45.8475 11.0483 46.0843 11.3116 46.2528 11.6506C46.4214 11.9896 46.5057 12.3854 46.5057 12.8381C46.5057 13.2888 46.4214 13.6828 46.2528 14.0199C46.0843 14.357 45.8475 14.6193 45.5426 14.8068C45.2377 14.9943 44.8807 15.0881 44.4716 15.0881ZM44.4744 14.375C44.7396 14.375 44.9593 14.3049 45.1335 14.1648C45.3078 14.0246 45.4366 13.8381 45.5199 13.6051C45.6051 13.3722 45.6477 13.1155 45.6477 12.8352C45.6477 12.5568 45.6051 12.3011 45.5199 12.0682C45.4366 11.8333 45.3078 11.6449 45.1335 11.5028C44.9593 11.3608 44.7396 11.2898 44.4744 11.2898C44.2074 11.2898 43.9858 11.3608 43.8097 11.5028C43.6354 11.6449 43.5057 11.8333 43.4205 12.0682C43.3371 12.3011 43.2955 12.5568 43.2955 12.8352C43.2955 13.1155 43.3371 13.3722 43.4205 13.6051C43.5057 13.8381 43.6354 14.0246 43.8097 14.1648C43.9858 14.3049 44.2074 14.375 44.4744 14.375ZM48.267 15L46.983 10.6364H47.8608L48.7159 13.8409H48.7585L49.6165 10.6364H50.4943L51.3466 13.8267H51.3892L52.2386 10.6364H53.1165L51.8352 15H50.9688L50.0824 11.8494H50.017L49.1307 15H48.267ZM55.6705 15.0881C55.2405 15.0881 54.8703 14.9962 54.5597 14.8125C54.2509 14.6269 54.0123 14.3665 53.8438 14.0312C53.6771 13.6941 53.5938 13.2992 53.5938 12.8466C53.5938 12.3996 53.6771 12.0057 53.8438 11.6648C54.0123 11.3239 54.2472 11.0578 54.5483 10.8665C54.8513 10.6752 55.2055 10.5795 55.6108 10.5795C55.857 10.5795 56.0956 10.6203 56.3267 10.7017C56.5578 10.7831 56.7652 10.911 56.9489 11.0852C57.1326 11.2595 57.2775 11.4858 57.3835 11.7642C57.4896 12.0407 57.5426 12.3769 57.5426 12.7727V13.0739H54.0739V12.4375H56.7102C56.7102 12.214 56.6648 12.0161 56.5739 11.8438C56.483 11.6695 56.3551 11.5322 56.1903 11.4318C56.0275 11.3314 55.8362 11.2812 55.6165 11.2812C55.3778 11.2812 55.1695 11.34 54.9915 11.4574C54.8153 11.5729 54.679 11.7244 54.5824 11.9119C54.4877 12.0975 54.4403 12.2992 54.4403 12.517V13.0142C54.4403 13.3059 54.4915 13.554 54.5938 13.7585C54.6979 13.9631 54.8428 14.1193 55.0284 14.2273C55.214 14.3333 55.4309 14.3864 55.679 14.3864C55.84 14.3864 55.9867 14.3636 56.1193 14.3182C56.2519 14.2708 56.3665 14.2008 56.4631 14.108C56.5597 14.0152 56.6335 13.9006 56.6847 13.7642L57.4886 13.9091C57.4242 14.1458 57.3087 14.3532 57.142 14.5312C56.9773 14.7074 56.7699 14.8447 56.5199 14.9432C56.2718 15.0398 55.9886 15.0881 55.6705 15.0881ZM58.4851 15V10.6364H59.3061V11.3295H59.3516C59.4311 11.0947 59.5713 10.91 59.772 10.7756C59.9747 10.6392 60.2038 10.571 60.4595 10.571C60.5125 10.571 60.575 10.5729 60.647 10.5767C60.7209 10.5805 60.7786 10.5852 60.8203 10.5909V11.4034C60.7862 11.3939 60.7256 11.3835 60.6385 11.3722C60.5514 11.3589 60.4643 11.3523 60.3771 11.3523C60.1764 11.3523 59.9974 11.3949 59.8402 11.4801C59.6849 11.5634 59.5618 11.6799 59.4709 11.8295C59.38 11.9773 59.3345 12.1458 59.3345 12.3352V15H58.4851ZM63.2798 15.0881C62.8499 15.0881 62.4796 14.9962 62.169 14.8125C61.8603 14.6269 61.6217 14.3665 61.4531 14.0312C61.2865 13.6941 61.2031 13.2992 61.2031 12.8466C61.2031 12.3996 61.2865 12.0057 61.4531 11.6648C61.6217 11.3239 61.8565 11.0578 62.1577 10.8665C62.4607 10.6752 62.8149 10.5795 63.2202 10.5795C63.4664 10.5795 63.705 10.6203 63.9361 10.7017C64.1671 10.7831 64.3745 10.911 64.5582 11.0852C64.742 11.2595 64.8868 11.4858 64.9929 11.7642C65.099 12.0407 65.152 12.3769 65.152 12.7727V13.0739H61.6832V12.4375H64.3196C64.3196 12.214 64.2741 12.0161 64.1832 11.8438C64.0923 11.6695 63.9645 11.5322 63.7997 11.4318C63.6368 11.3314 63.4455 11.2812 63.2259 11.2812C62.9872 11.2812 62.7789 11.34 62.6009 11.4574C62.4247 11.5729 62.2884 11.7244 62.1918 11.9119C62.0971 12.0975 62.0497 12.2992 62.0497 12.517V13.0142C62.0497 13.3059 62.1009 13.554 62.2031 13.7585C62.3073 13.9631 62.4522 14.1193 62.6378 14.2273C62.8234 14.3333 63.0402 14.3864 63.2884 14.3864C63.4493 14.3864 63.5961 14.3636 63.7287 14.3182C63.8613 14.2708 63.9759 14.2008 64.0724 14.108C64.169 14.0152 64.2429 13.9006 64.294 13.7642L65.098 13.9091C65.0336 14.1458 64.9181 14.3532 64.7514 14.5312C64.5866 14.7074 64.3793 14.8447 64.1293 14.9432C63.8812 15.0398 63.598 15.0881 63.2798 15.0881ZM67.728 15.0852C67.3757 15.0852 67.0613 14.9953 66.7848 14.8153C66.5102 14.6335 66.2943 14.375 66.1371 14.0398C65.9818 13.7027 65.9041 13.2983 65.9041 12.8267C65.9041 12.3551 65.9827 11.9517 66.1399 11.6165C66.299 11.2812 66.5168 11.0246 66.7933 10.8466C67.0698 10.6686 67.3833 10.5795 67.7337 10.5795C68.0045 10.5795 68.2223 10.625 68.3871 10.7159C68.5537 10.8049 68.6825 10.9091 68.7734 11.0284C68.8662 11.1477 68.9382 11.2528 68.9893 11.3438H69.0405V9.18182H69.8899V15H69.0604V14.321H68.9893C68.9382 14.4138 68.8643 14.5199 68.7678 14.6392C68.6731 14.7585 68.5424 14.8627 68.3757 14.9517C68.209 15.0407 67.9931 15.0852 67.728 15.0852ZM67.9155 14.3608C68.1598 14.3608 68.3662 14.2964 68.5348 14.1676C68.7053 14.0369 68.834 13.8561 68.9212 13.625C69.0102 13.3939 69.0547 13.125 69.0547 12.8182C69.0547 12.5152 69.0111 12.25 68.924 12.0227C68.8369 11.7955 68.709 11.6184 68.5405 11.4915C68.3719 11.3646 68.1636 11.3011 67.9155 11.3011C67.6598 11.3011 67.4467 11.3674 67.2763 11.5C67.1058 11.6326 66.977 11.8134 66.8899 12.0426C66.8047 12.2718 66.7621 12.5303 66.7621 12.8182C66.7621 13.1098 66.8056 13.3722 66.8928 13.6051C66.9799 13.8381 67.1087 14.0227 67.2791 14.1591C67.4515 14.2936 67.6636 14.3608 67.9155 14.3608ZM73.2876 15V9.18182H74.1371V11.3438H74.1882C74.2375 11.2528 74.3085 11.1477 74.4013 11.0284C74.4941 10.9091 74.6229 10.8049 74.7876 10.7159C74.9524 10.625 75.1702 10.5795 75.4411 10.5795C75.7933 10.5795 76.1077 10.6686 76.3842 10.8466C76.6607 11.0246 76.8776 11.2812 77.0348 11.6165C77.1939 11.9517 77.2734 12.3551 77.2734 12.8267C77.2734 13.2983 77.1948 13.7027 77.0376 14.0398C76.8804 14.375 76.6645 14.6335 76.3899 14.8153C76.1153 14.9953 75.8018 15.0852 75.4496 15.0852C75.1844 15.0852 74.9676 15.0407 74.799 14.9517C74.6323 14.8627 74.5017 14.7585 74.407 14.6392C74.3123 14.5199 74.2393 14.4138 74.1882 14.321H74.1172V15H73.2876ZM74.12 12.8182C74.12 13.125 74.1645 13.3939 74.2536 13.625C74.3426 13.8561 74.4714 14.0369 74.6399 14.1676C74.8085 14.2964 75.0149 14.3608 75.2592 14.3608C75.513 14.3608 75.7251 14.2936 75.8956 14.1591C76.0661 14.0227 76.1948 13.8381 76.282 13.6051C76.371 13.3722 76.4155 13.1098 76.4155 12.8182C76.4155 12.5303 76.3719 12.2718 76.2848 12.0426C76.1996 11.8134 76.0708 11.6326 75.8984 11.5C75.728 11.3674 75.5149 11.3011 75.2592 11.3011C75.013 11.3011 74.8047 11.3646 74.6342 11.4915C74.4657 11.6184 74.3378 11.7955 74.2507 12.0227C74.1636 12.25 74.12 12.5152 74.12 12.8182ZM78.6158 16.6364C78.4889 16.6364 78.3733 16.6259 78.2692 16.6051C78.165 16.5862 78.0874 16.5653 78.0362 16.5426L78.2408 15.8466C78.3961 15.8883 78.5343 15.9063 78.6555 15.9006C78.7768 15.8949 78.8838 15.8494 78.9766 15.7642C79.0713 15.679 79.1546 15.5398 79.2266 15.3466L79.3317 15.0568L77.7351 10.6364H78.6442L79.7493 14.0227H79.7947L80.8999 10.6364H81.8118L80.0135 15.5824C79.9302 15.8097 79.8241 16.0019 79.6953 16.1591C79.5665 16.3182 79.4131 16.4375 79.2351 16.517C79.0571 16.5966 78.8506 16.6364 78.6158 16.6364Z\"\n );\n path2.setAttribute(\"fill\", \"black\");\n path2.setAttribute(\"fill-opacity\", \"0.5\");\n svg.appendChild(path2);\n\n const rect3 = document.createElementNS(\"http://www.w3.org/2000/svg\", \"rect\");\n rect3.setAttribute(\"x\", \"7\");\n rect3.setAttribute(\"y\", \"7\");\n rect3.setAttribute(\"width\", \"24\");\n rect3.setAttribute(\"height\", \"24\");\n rect3.setAttribute(\"rx\", \"3\");\n rect3.setAttribute(\"fill\", \"black\");\n svg.appendChild(rect3);\n\n const path3 = document.createElementNS(\"http://www.w3.org/2000/svg\", \"path\");\n path3.setAttribute(\"fill-rule\", \"evenodd\");\n path3.setAttribute(\"clip-rule\", \"evenodd\");\n path3.setAttribute(\n \"d\",\n \"M22.0455 15H12.2273L15.1364 17.9091V21.9091L22.0455 15Z\"\n );\n path3.setAttribute(\"fill\", \"white\");\n svg.appendChild(path3);\n\n const path4 = document.createElementNS(\"http://www.w3.org/2000/svg\", \"path\");\n path4.setAttribute(\"fill-rule\", \"evenodd\");\n path4.setAttribute(\"clip-rule\", \"evenodd\");\n path4.setAttribute(\n \"d\",\n \"M16.9546 22.9999H26.7728L23.8637 20.0908V16.0908L16.9546 22.9999Z\"\n );\n path4.setAttribute(\"fill\", \"white\");\n svg.appendChild(path4);\n\n link.appendChild(svg);\n badgeDiv.appendChild(link);\n\n const hideButton = document.createElement(\"button\");\n hideButton.id = \"liveblocks-badge-hide-button\";\n hideButton.style.position = \"absolute\";\n hideButton.style.top = \"0\";\n hideButton.style.right = \"0\";\n hideButton.style.border = \"none\";\n hideButton.style.padding = \"0\";\n hideButton.style.margin = \"0\";\n hideButton.style.background = \"none\";\n hideButton.style.font = \"inherit\";\n hideButton.style.cursor = \"pointer\";\n hideButton.style.outline = \"none\";\n hideButton.style.setProperty(\"-webkit-appearance\", \"none\");\n hideButton.style.setProperty(\"-moz-appearance\", \"none\");\n hideButton.style.setProperty(\"appearance\", \"none\");\n hideButton.style.opacity = \"0\";\n hideButton.onclick = () => {\n const badge = document.getElementById(\"liveblocks-badge\");\n if (badge) {\n badge.style.display = \"none\";\n }\n };\n hideButton.onmouseenter = () => {\n hideButton.style.opacity = \"0.5\";\n };\n hideButton.onmouseleave = () => {\n hideButton.style.opacity = \"0.3\";\n };\n\n const hideSvg = document.createElementNS(\"http://www.w3.org/2000/svg\", \"svg\");\n hideSvg.setAttribute(\"width\", \"18\");\n hideSvg.setAttribute(\"height\", \"18\");\n hideSvg.setAttribute(\"viewBox\", \"0 0 18 18\");\n hideSvg.setAttribute(\"fill\", \"none\");\n\n const hidePath = document.createElementNS(\n \"http://www.w3.org/2000/svg\",\n \"path\"\n );\n hidePath.setAttribute(\"d\", \"M6 6L9 9M12 12L9 9M9 9L12 6M9 9L6 12\");\n hidePath.setAttribute(\"stroke\", \"black\");\n hideSvg.appendChild(hidePath);\n\n hideButton.appendChild(hideSvg);\n badgeDiv.appendChild(hideButton);\n\n document.body.appendChild(badgeDiv);\n\n // trigger twice so it goes from 0 to 1\n requestAnimationFrame(() => {\n requestAnimationFrame(() => {\n badgeDiv.style.opacity = \"1\";\n });\n });\n};\n\nexport { injectBrandBadge };\n","/**\n * Positions, aka the Pos type, are efficient encodings of \"positions\" in\n * a list, using the following printable subset of the ASCII alphabet:\n *\n * !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~\n * ^ ^\n * Lowest digit Highest digit\n *\n * Each Pos is a sequence of characters from the above alphabet, conceptually\n * codifying a floating point number 0 < n < 1. For example, the string \"31007\"\n * would be used to represent the number 0.31007, except that this\n * representation uses base 96.\n *\n * 0 ≃ ' ' (lowest digit)\n * 1 ≃ '!'\n * 2 ≃ '\"'\n * ...\n * 9 ≃ '~' (highest digit)\n *\n * So think:\n * '!' ≃ 0.1\n * '\"' ≃ 0.2\n * '!\"~' ≃ 0.129\n *\n * Three rules:\n * - All \"characters\" in the string should be valid digits (from the above\n * alphabet)\n * - The value 0.0 is not a valid Pos value\n * - A Pos cannot have trailing \"zeroes\"\n *\n * This representation has the following benefits:\n *\n * 1. It's always possible to get a number that lies before, after, or between\n * two arbitrary Pos values.\n * 2. Pos values can be compared using normal string comparison.\n *\n * Some examples:\n * - '!' < '\"' (like how .1 < .2)\n * - '!' < '~' (like how .1 < .9)\n * - '!!' < '!~' (like how .11 < .19)\n * - '~!' < '~~' (like how .91 < .99)\n * - '~' < '~!' (like how .9 < .91)\n * - '!!' < '!O' (like how .1 < .5)\n * - '!O' < '!~' (like how .5 < .9)\n *\n */\nimport type { Brand } from \"./utils\";\n\n/**\n * A valid/verified \"position\" string. These values are used as \"parentKey\"s by\n * LiveList children, and define their relative ordering.\n */\nexport type Pos = Brand<string, \"Pos\">;\n\nconst MIN_CODE = 32; // ASCII code of the lowest alphabet char (e.g. ' ')\nconst MAX_CODE = 126; // ASCII code of the highest alphabet char (e.g. '~')\n\nconst NUM_DIGITS = MAX_CODE - MIN_CODE + 1; // 95\n\nconst ZERO: string = nthDigit(0); // \" \"\n\n/**\n * The \"first\" canonical position.\n * In an equivalent decimal number system, think of this as the value .1.\n */\nconst ONE: Pos = nthDigit(1); // \"!\"\n\nconst ZERO_NINE = (ZERO + nthDigit(-1)) as Pos;\n\n/**\n * Returns the Pos value for the nth digit in the alphabet.\n * Value must be between 0 and 94.\n *\n * Just used to generate some static data, and for usage in test cases.\n */\nfunction nthDigit(n: 0): string; // \"0\" is a legal _digit_, but not a legal Pos value\nfunction nthDigit(n: number): Pos;\nfunction nthDigit(n: number): Pos {\n const code = MIN_CODE + (n < 0 ? NUM_DIGITS + n : n);\n if (code < MIN_CODE || code > MAX_CODE) {\n throw new Error(`Invalid n value: ${n}`);\n }\n return String.fromCharCode(code) as Pos;\n}\n\n/**\n * Given two positions, returns the position value that lies in the middle.\n * When given only a high bound, computes the canonical position \"before\" it.\n * When given only a low bound, computes the canonical position \"after\" it.\n * When given no bounds at all, returns the \"first\" canonical position.\n */\nfunction makePosition(x?: Pos, y?: Pos): Pos {\n if (x !== undefined && y !== undefined) {\n return between(x, y);\n } else if (x !== undefined) {\n return after(x);\n } else if (y !== undefined) {\n return before(y);\n } else {\n return ONE;\n }\n}\n\n/**\n * Given any position value, computes the canonical position \"before\" it.\n *\n * The equivalent in a decimal number system would be:\n * before(.1) // .09\n * before(.11) // .1\n * before(.111) // .1\n * before(.2) // .1\n * before(.23101) // .2\n * before(.3) // .2\n * ...\n * before(.8) // .7\n * before(.9) // .8\n * before(.91) // .9\n * before(.92) // .9\n * before(.93) // .9\n * ...\n * before(.98) // .9\n * before(.99) // .9\n *\n * Note:\n * before(.01) // .009\n * before(.001) // .0009\n * before(.002) // .001\n * before(.00283) // .002\n *\n */\nfunction before(pos: Pos): Pos {\n const lastIndex = pos.length - 1;\n for (let i = 0; i <= lastIndex; i++) {\n const code = pos.charCodeAt(i);\n\n // Scan away all leading zeros, if there are any\n if (code <= MIN_CODE) {\n continue;\n }\n\n //\n // Now, i points to the first non-zero digit\n //\n // Two options:\n // 1. It's the last digit.\n // a. If it's a 1, it's on the edge. Replace with \"09\".\n // b. Otherwise, just lower it.\n // 2. It's not the last digit, so we can just chop off the remainder.\n //\n if (i === lastIndex) {\n if (code === MIN_CODE + 1) {\n return (pos.substring(0, i) + ZERO_NINE) as Pos;\n } else {\n return (pos.substring(0, i) + String.fromCharCode(code - 1)) as Pos;\n }\n } else {\n return pos.substring(0, i + 1) as Pos;\n }\n }\n\n // If we end up here, it means the input consisted of only zeroes, which is\n // invalid, so return the canonical first value as a best effort\n return ONE;\n}\n\n/**\n * Given any position value, computes the canonical position \"after\" it.\n *\n * Uses \"viewport-based allocation\" (V=2+3) to bound position length growth\n * when repeatedly pushing items. Instead of always incrementing the last digit\n * (which leads to O(n/94) length growth), we treat positions as fixed-width\n * numbers within a \"viewport\" of V digits.\n *\n * - V=2: positions stay ≤2 chars for first ~8,900 pushes\n * - V=5: positions stay ≤5 chars for next ~848k pushes\n * - V=8, V=11, ...: each +3 adds capacity for ~848k more pushes\n *\n * This keeps position lengths dramatically smaller for typical usage while\n * remaining backward compatible with all existing position strings.\n *\n * Viewport growth: V=2 → V=5 → V=8 → V=11 → ...\n *\n * Examples (conceptually in decimal):\n * after(.1) // .11 (within V=2 viewport)\n * after(.11) // .12\n * after(.99) // .99001 (overflow V=2, extend to V=5)\n *\n */\nconst VIEWPORT_START = 2;\nconst VIEWPORT_STEP = 3;\n\nfunction after(pos: Pos): Pos {\n // For positions with any chars outside valid range, just append ONE.\n // This guarantees result > pos for any input string.\n for (let i = 0; i < pos.length; i++) {\n const code = pos.charCodeAt(i);\n if (code < MIN_CODE || code > MAX_CODE) {\n return (pos + ONE) as Pos;\n }\n }\n\n // Strip trailing zeros for canonical form\n while (pos.length > 1 && pos.charCodeAt(pos.length - 1) === MIN_CODE) {\n pos = pos.slice(0, -1) as Pos;\n }\n\n // Handle empty/zero input\n if (pos.length === 0 || pos === ZERO) {\n return ONE;\n }\n\n // Determine viewport: V=2, then 5, 8, 11, ...\n let viewport = VIEWPORT_START;\n if (pos.length > VIEWPORT_START) {\n viewport =\n VIEWPORT_START +\n Math.ceil((pos.length - VIEWPORT_START) / VIEWPORT_STEP) * VIEWPORT_STEP;\n }\n\n // Try to increment within current viewport\n const result = incrementWithinViewport(pos, viewport);\n if (result !== null) {\n return result;\n }\n\n // Overflow: extend viewport and increment\n viewport += VIEWPORT_STEP;\n const extendedResult = incrementWithinViewport(pos, viewport);\n if (extendedResult !== null) {\n return extendedResult;\n }\n\n // Fallback (should rarely happen): just append\n return (pos + ONE) as Pos;\n}\n\n/**\n * Increment a position string within a fixed viewport width.\n * Returns null if overflow occurs (all digits were at max).\n */\nfunction incrementWithinViewport(pos: Pos, viewport: number): Pos | null {\n // Build array of digit values, padded to viewport width\n const digits: number[] = [];\n for (let i = 0; i < viewport; i++) {\n if (i < pos.length) {\n digits.push(pos.charCodeAt(i) - MIN_CODE);\n } else {\n digits.push(0); // Pad with zeros\n }\n }\n\n // Increment from right to left with carry\n let carry = 1;\n for (let i = viewport - 1; i >= 0 && carry; i--) {\n const sum = digits[i] + carry;\n if (sum >= NUM_DIGITS) {\n digits[i] = 0;\n carry = 1;\n } else {\n digits[i] = sum;\n carry = 0;\n }\n }\n\n // If carry remains, we overflowed the viewport\n if (carry) {\n return null;\n }\n\n // Convert back to string, stripping trailing zeros\n let result = \"\";\n for (const d of digits) {\n result += String.fromCharCode(d + MIN_CODE);\n }\n\n // Strip trailing zeros\n while (result.length > 1 && result.charCodeAt(result.length - 1) === MIN_CODE) {\n result = result.slice(0, -1);\n }\n\n return result as Pos;\n}\n\n/**\n * Given two positions, returns the position value that lies in the middle.\n *\n * Think:\n * between('!', '%') // '#' (like how between(.1, .5) would be .3)\n * between('!', '\"') // '!O' (like how between(.1, .2) would be .15)\n *\n * between(.1, .3) // .2\n * between(.1, .4) // also .2\n * between(.1, .5) // .3\n * between(.11, .21) // .15\n * between(.1, .1003) // .1001\n * between(.11, .12) // .115\n * between(.09, .1) // .095\n * between(.19, .21) // .195\n *\n */\nfunction between(lo: Pos, hi: Pos): Pos {\n if (lo < hi) {\n return _between(lo, hi);\n } else if (lo > hi) {\n return _between(hi, lo);\n } else {\n throw new Error(\"Cannot compute value between two equal positions\");\n }\n}\n\n/**\n * Like between(), but guaranteed that lo < hi.\n */\nfunction _between(lo: Pos, hi: Pos | \"\"): Pos {\n let index = 0;\n\n const loLen = lo.length;\n const hiLen = hi.length;\n while (true) {\n const loCode = index < loLen ? lo.charCodeAt(index) : MIN_CODE;\n const hiCode = index < hiLen ? hi.charCodeAt(index) : MAX_CODE;\n\n if (loCode === hiCode) {\n index++;\n continue;\n }\n\n // Difference of only 1 means we'll have to settle this in the next digit\n if (hiCode - loCode === 1) {\n const size = index + 1;\n let prefix = lo.substring(0, size);\n if (prefix.length < size) {\n prefix += ZERO.repeat(size - prefix.length);\n }\n const suffix = lo.substring(size) as Pos;\n const nines = \"\"; // Will get interpreted like .999999…\n return (prefix + _between(suffix, nines)) as Pos;\n } else {\n // Difference of more than 1 means we take the \"middle\" between these digits\n return (takeN(lo, index) +\n String.fromCharCode((hiCode + loCode) >> 1)) as Pos;\n }\n }\n}\n\nfunction takeN(pos: string, n: number): string {\n return n < pos.length\n ? pos.substring(0, n)\n : pos + ZERO.repeat(n - pos.length);\n}\n\nconst MIN_NON_ZERO_CODE = MIN_CODE + 1;\n\n/**\n * Checks whether a given string is a valid Pos value. There are three rules:\n *\n * - The string must not be the empty string\n * - The string must not have any trailing \"zeroes\" (trailing \" \")\n * - All characters in the string must be from our alphabet\n *\n */\nfunction isPos(str: string): str is Pos {\n // May not be empty string\n if (str === \"\") {\n return false;\n }\n\n // Last digit may not be a \"0\" (no trailing zeroes)\n const lastIdx = str.length - 1;\n const last = str.charCodeAt(lastIdx);\n if (last < MIN_NON_ZERO_CODE || last > MAX_CODE) {\n return false;\n }\n\n for (let i = 0; i < lastIdx; i++) {\n const code = str.charCodeAt(i);\n if (code < MIN_CODE || code > MAX_CODE) {\n return false;\n }\n }\n\n return true;\n}\n\nfunction convertToPos(str: string): Pos {\n const codes: number[] = [];\n\n // All chars in the string must be in the min-max range\n for (let i = 0; i < str.length; i++) {\n const code = str.charCodeAt(i);\n\n // Clamp to min-max range\n codes.push(code < MIN_CODE ? MIN_CODE : code > MAX_CODE ? MAX_CODE : code);\n }\n\n // Strip all trailing zeros\n while (codes.length > 0 && codes[codes.length - 1] === MIN_CODE) {\n codes.length--;\n }\n\n return codes.length > 0\n ? (String.fromCharCode(...codes) as Pos)\n : // Edge case: the str was a 0-only string, which is invalid. Default back to .1\n ONE;\n}\n\n/**\n * Checks that a str is a valid Pos, and converts it to the nearest valid one\n * if not.\n */\nfunction asPos(str: string): Pos {\n // Calling convertToPos(str) would suffice here, but since this is a hot code\n // path, we prefer to just check, which is a lot faster.\n return isPos(str) ? str : convertToPos(str);\n}\n\nexport { asPos, makePosition };\n\n// For use in unit tests only\nexport {\n after as __after,\n before as __before,\n between as __between,\n isPos as __isPos,\n nthDigit as __nthDigit,\n NUM_DIGITS as __NUM_DIGITS,\n};\n","import type { Json, JsonObject } from \"../lib/Json\";\nimport type { DistributiveOmit } from \"../lib/utils\";\n\nexport type OpCode = (typeof OpCode)[keyof typeof OpCode];\nexport const OpCode = Object.freeze({\n INIT: 0,\n SET_PARENT_KEY: 1,\n CREATE_LIST: 2,\n UPDATE_OBJECT: 3,\n CREATE_OBJECT: 4,\n DELETE_CRDT: 5,\n DELETE_OBJECT_KEY: 6,\n CREATE_MAP: 7,\n CREATE_REGISTER: 8,\n});\n\nexport namespace OpCode {\n export type INIT = typeof OpCode.INIT;\n export type SET_PARENT_KEY = typeof OpCode.SET_PARENT_KEY;\n export type CREATE_LIST = typeof OpCode.CREATE_LIST;\n export type UPDATE_OBJECT = typeof OpCode.UPDATE_OBJECT;\n export type CREATE_OBJECT = typeof OpCode.CREATE_OBJECT;\n export type DELETE_CRDT = typeof OpCode.DELETE_CRDT;\n export type DELETE_OBJECT_KEY = typeof OpCode.DELETE_OBJECT_KEY;\n export type CREATE_MAP = typeof OpCode.CREATE_MAP;\n export type CREATE_REGISTER = typeof OpCode.CREATE_REGISTER;\n}\n\n/**\n * These operations are the payload for {@link UpdateStorageServerMsg} messages\n * only.\n */\nexport type Op =\n | CreateOp\n | UpdateObjectOp\n | DeleteCrdtOp\n | SetParentKeyOp // Only for lists!\n | DeleteObjectKeyOp;\n\nexport type CreateOp =\n | CreateObjectOp\n | CreateRegisterOp\n | CreateMapOp\n | CreateListOp;\n\nexport type UpdateObjectOp = {\n readonly opId?: string;\n readonly id: string;\n readonly type: OpCode.UPDATE_OBJECT;\n readonly data: Partial<JsonObject>;\n};\n\nexport type CreateObjectOp = {\n readonly opId?: string;\n readonly id: string;\n readonly intent?: \"set\";\n readonly deletedId?: string;\n readonly type: OpCode.CREATE_OBJECT;\n readonly parentId: string;\n readonly parentKey: string;\n readonly data: JsonObject;\n};\n\nexport type CreateListOp = {\n readonly opId?: string;\n readonly id: string;\n readonly intent?: \"set\";\n readonly deletedId?: string;\n readonly type: OpCode.CREATE_LIST;\n readonly parentId: string;\n readonly parentKey: string;\n};\n\nexport type CreateMapOp = {\n readonly opId?: string;\n readonly id: string;\n readonly intent?: \"set\";\n readonly deletedId?: string;\n readonly type: OpCode.CREATE_MAP;\n readonly parentId: string;\n readonly parentKey: string;\n};\n\nexport type CreateRegisterOp = {\n readonly opId?: string;\n readonly id: string;\n readonly intent?: \"set\";\n readonly deletedId?: string;\n readonly type: OpCode.CREATE_REGISTER;\n readonly parentId: string;\n readonly parentKey: string;\n readonly data: Json;\n};\n\nexport type DeleteCrdtOp = {\n readonly opId?: string;\n readonly id: string;\n readonly type: OpCode.DELETE_CRDT;\n};\n\n//\n// HACK:\n// Disguised as a \"DeleteCrdtOp\" for a nonexisting node \"ACK\", this Op that the\n// server may return to senders is effectively sent as a backward-compatible\n// way to trigger an acknowledgement for Ops that were seen by the server, but\n// deliberately ignored.\n//\nexport type IgnoredOp = {\n readonly type: OpCode.DELETE_CRDT; // Not a typo!\n readonly id: \"ACK\"; // (H)ACK\n readonly opId: string;\n};\n\nexport function isIgnoredOp(op: ServerWireOp): op is IgnoredOp {\n return op.type === OpCode.DELETE_CRDT && op.id === \"ACK\";\n}\n\nexport type SetParentKeyOp = {\n readonly opId?: string;\n readonly id: string;\n readonly type: OpCode.SET_PARENT_KEY;\n readonly parentKey: string;\n};\n\nexport type DeleteObjectKeyOp = {\n readonly opId?: string;\n readonly id: string;\n readonly type: OpCode.DELETE_OBJECT_KEY;\n readonly key: string;\n};\n\n//\n// ------------------------------------------------------------------------------\n// Wire types for Ops sent over the network\n// ------------------------------------------------------------------------------\n//\n\nexport type HasOpId = { opId: string };\n\n/**\n * Ops sent from client → server. Always includes an opId so the server can\n * acknowledge the receipt.\n */\nexport type ClientWireOp = Op & HasOpId;\nexport type ClientWireCreateOp = CreateOp & HasOpId;\n\n/**\n * ServerWireOp: Ops sent from server → client. Three variants:\n * 1. ClientWireOp — Full echo back of our own op, confirming it was applied\n * 2. IgnoredOp — Our op was seen but intentionally ignored (still counts as ack)\n * 3. Op without opId — Another client's op being forwarded to us\n */\nexport type ServerWireOp =\n | ClientWireOp // \"Our\" Op echoed back in full to ACK (V7 response)\n | IgnoredOp // \"Our\" Op was ignored by the server (not forwarded) in v7\n | TheirOp; // \"Their\" Op (V7 forward)\n\ntype TheirOp = DistributiveOmit<Op, \"opId\"> & { opId?: undefined };\n","import { assertNever } from \"../lib/assert\";\nimport type { Pos } from \"../lib/position\";\nimport { asPos } from \"../lib/position\";\nimport type {\n ClientWireCreateOp,\n ClientWireOp,\n CreateOp,\n Op,\n} from \"../protocol/Op\";\nimport { OpCode } from \"../protocol/Op\";\nimport type { SerializedCrdt } from \"../protocol/StorageNode\";\nimport type * as DevTools from \"../types/DevToolsTreeNode\";\nimport type { Immutable } from \"../types/Immutable\";\nimport type { LiveNode, Lson } from \"./Lson\";\nimport type { StorageUpdate } from \"./StorageUpdates\";\n\nexport type ApplyResult =\n | { reverse: Op[]; modified: StorageUpdate }\n | { modified: false };\n\n/**\n * The managed pool is a namespace registry (i.e. a context) that \"owns\" all\n * the individual live nodes, ensuring each one has a unique ID, and holding on\n * to live nodes before and after they are inter-connected.\n */\nexport interface ManagedPool {\n readonly roomId: string;\n readonly nodes: ReadonlyMap<string, LiveNode>;\n readonly generateId: () => string;\n readonly generateOpId: () => string;\n\n readonly getNode: (id: string) => LiveNode | undefined;\n readonly addNode: (id: string, node: LiveNode) => void;\n readonly deleteNode: (id: string) => void;\n\n /**\n * Dispatching has three responsibilities:\n * - Sends serialized ops to the WebSocket servers\n * - Add reverse operations to the undo/redo stack\n * - Notify room subscribers with updates (in-client, no networking)\n */\n dispatch: (\n ops: ClientWireOp[],\n reverseOps: Op[],\n storageUpdates: Map<string, StorageUpdate>\n ) => void;\n\n /**\n * Ensures storage can be written to else throws an error.\n * This is used to prevent writing to storage when the user does not have\n * permission to do so.\n * @throws {Error} if storage is not writable\n * @returns {void}\n */\n assertStorageIsWritable: () => void;\n}\n\nexport type CreateManagedPoolOptions = {\n /**\n * Returns the current connection ID. This is used to generate unique\n * prefixes for nodes created by this client. This number is allowed to\n * change over time (for example, when the client reconnects).\n */\n getCurrentConnectionId(): number;\n\n /**\n * Will get invoked when any Live structure calls .dispatch() on the pool.\n */\n onDispatch?: (\n ops: ClientWireOp[],\n reverse: Op[],\n storageUpdates: Map<string, StorageUpdate>\n ) => void;\n\n /**\n * Will get invoked when any Live structure calls .assertStorageIsWritable()\n * on the pool. Defaults to true when not provided. Return false if you want\n * to prevent writes to the pool locally early, because you know they won't\n * have an effect upstream.\n */\n isStorageWritable?: () => boolean;\n};\n\n/**\n * @private Private API, never use this API directly.\n */\nexport function createManagedPool(\n roomId: string,\n options: CreateManagedPoolOptions\n): ManagedPool {\n const {\n getCurrentConnectionId,\n onDispatch,\n isStorageWritable = () => true,\n } = options;\n\n let clock = 0;\n let opClock = 0;\n const nodes = new Map<string, LiveNode>();\n\n return {\n roomId,\n nodes,\n\n getNode: (id: string) => nodes.get(id),\n addNode: (id: string, node: LiveNode) => void nodes.set(id, node),\n deleteNode: (id: string) => void nodes.delete(id),\n\n generateId: () => `${getCurrentConnectionId()}:${clock++}`,\n generateOpId: () => `${getCurrentConnectionId()}:${opClock++}`,\n\n dispatch(\n ops: ClientWireOp[],\n reverse: Op[],\n storageUpdates: Map<string, StorageUpdate>\n ) {\n onDispatch?.(ops, reverse, storageUpdates);\n },\n\n assertStorageIsWritable: () => {\n if (!isStorageWritable()) {\n throw new Error(\n \"Cannot write to storage with a read only user, please ensure the user has write permissions\"\n );\n }\n },\n };\n}\n\n/**\n * When applying an op to a CRDT, we need to know where it came from to apply\n * it correctly.\n */\nexport enum OpSource {\n /**\n * Optimistic update applied locally (from an undo, redo, or reconnect). Not\n * yet acknowledged by the server. Will be sent to server and needs to be\n * tracked for conflict resolution.\n */\n LOCAL,\n\n /**\n * Op received from server, originated from another client. Apply it, unless\n * there's a pending local op for the same key (local ops take precedence\n * until acknowledged).\n *\n * Note that a \"fix Op\" sent by the server in response to a local mutation\n * that caused a conflict will also be classified as a THEIRS-like mutation.\n * (As if another client resolved the conflict.)\n */\n THEIRS,\n\n /**\n * Op received from server, originated from THIS client. Server echoed it\n * back to confirm.\n */\n OURS,\n}\n\n// TODO Temporary helper to help convert from AbstractCrdt -> LiveNode, only\n// needed for within this module. The reason is that AbstractCrdt is an\n// _abstract_ type, and in our LiveNode union we exhaustively include all\n// concrete types.\n// TODO Remove me later, if we inline the abstract base methods in the concrete\n// classes.\nfunction crdtAsLiveNode(\n value: AbstractCrdt // eslint-disable-line no-restricted-syntax\n): LiveNode {\n return value as LiveNode;\n}\n\ntype HasParent = {\n readonly type: \"HasParent\";\n readonly node: LiveNode;\n readonly key: string;\n\n // Typically the same as `key`, but checked to be a valid Pos value (needed\n // when used as item in a LiveList)\n readonly pos: Pos;\n};\n\ntype NoParent = {\n readonly type: \"NoParent\";\n};\n\ntype Orphaned = {\n readonly type: \"Orphaned\";\n readonly oldKey: string;\n\n // Typically the same as `key`, but checked to be a valid Pos value (needed\n // when used as item in a LiveList)\n readonly oldPos: Pos;\n};\n\nfunction HasParent(\n node: LiveNode,\n key: string,\n pos: Pos = asPos(key)\n): HasParent {\n return Object.freeze({ type: \"HasParent\", node, key, pos });\n}\n\nconst NoParent: NoParent = Object.freeze({ type: \"NoParent\" });\n\nfunction Orphaned(oldKey: string, oldPos: Pos = asPos(oldKey)): Orphaned {\n return Object.freeze({ type: \"Orphaned\", oldKey, oldPos });\n}\n\n/**\n * Represents the possible states of the parent field pointers.\n */\ntype ParentInfo =\n // Both the parent node and the parent key are set. This is a normal child.\n | HasParent\n\n // Neither are set. This is either the root node (if attached to a document),\n // or it's a dangling node that hasn't been attached yet.\n | NoParent\n\n // -------------------------------------------------------------------------\n // TODO Refactor this state away!\n // -------------------------------------------------------------------------\n // Tricky case! This state is used after the node is detached from its\n // parent, but we still need to retain the parent key that it was originally\n // attached under. For example we rely on this to derive the reverse Op to\n // add. We should be able to get rid of this case by structuring the code\n // differently!\n | Orphaned;\n\nexport abstract class AbstractCrdt {\n // ^^^^^^^^^^^^ TODO: Make this an interface\n #pool?: ManagedPool;\n #id?: string;\n\n #parent: ParentInfo = NoParent;\n\n /** @internal */\n _getParentKeyOrThrow(): string {\n switch (this.parent.type) {\n case \"HasParent\":\n return this.parent.key;\n\n case \"NoParent\":\n throw new Error(\"Parent key is missing\");\n\n case \"Orphaned\":\n return this.parent.oldKey;\n\n default:\n return assertNever(this.parent, \"Unknown state\");\n }\n }\n\n /** @internal */\n get _parentPos(): Pos {\n switch (this.parent.type) {\n case \"HasParent\":\n return this.parent.pos;\n\n case \"NoParent\":\n throw new Error(\"Parent key is missing\");\n\n case \"Orphaned\":\n return this.parent.oldPos;\n\n default:\n return assertNever(this.parent, \"Unknown state\");\n }\n }\n\n /** @internal */\n protected get _pool(): ManagedPool | undefined {\n return this.#pool;\n }\n\n get roomId(): string | null {\n return this.#pool ? this.#pool.roomId : null;\n }\n\n /** @internal */\n get _id(): string | undefined {\n return this.#id;\n }\n\n /** @internal */\n get parent(): ParentInfo {\n return this.#parent;\n }\n\n /** @internal */\n get _parentKey(): string | null {\n switch (this.parent.type) {\n case \"HasParent\":\n return this.parent.key;\n\n case \"NoParent\":\n return null;\n\n case \"Orphaned\":\n return this.parent.oldKey;\n\n default:\n return assertNever(this.parent, \"Unknown state\");\n }\n }\n\n /** @internal */\n _apply(op: Op, _isLocal: boolean): ApplyResult {\n switch (op.type) {\n case OpCode.DELETE_CRDT: {\n if (this.parent.type === \"HasParent\") {\n return this.parent.node._detachChild(crdtAsLiveNode(this));\n }\n\n return { modified: false };\n }\n }\n\n return { modified: false };\n }\n\n /** @internal */\n _setParentLink(newParentNode: LiveNode, newParentKey: string): void {\n switch (this.parent.type) {\n case \"HasParent\":\n if (this.parent.node !== newParentNode) {\n throw new Error(\"Cannot set parent: node already has a parent\");\n } else {\n // Ignore\n this.#parent = HasParent(newParentNode, newParentKey);\n return;\n }\n\n case \"Orphaned\":\n case \"NoParent\": {\n this.#parent = HasParent(newParentNode, newParentKey);\n return;\n }\n\n default:\n return assertNever(this.parent, \"Unknown state\");\n }\n }\n\n /** @internal */\n _attach(id: string, pool: ManagedPool): void {\n if (this.#id || this.#pool) {\n throw new Error(\"Cannot attach node: already attached\");\n }\n\n pool.addNode(id, crdtAsLiveNode(this));\n\n this.#id = id;\n this.#pool = pool;\n }\n\n /** @internal */\n abstract _attachChild(op: CreateOp, source: OpSource): ApplyResult;\n\n /** @internal */\n _detach(): void {\n if (this.#pool && this.#id) {\n this.#pool.deleteNode(this.#id);\n }\n\n switch (this.parent.type) {\n case \"HasParent\": {\n this.#parent = Orphaned(this.parent.key, this.parent.pos);\n break;\n }\n\n case \"NoParent\": {\n this.#parent = NoParent;\n break;\n }\n\n case \"Orphaned\": {\n // No change needed\n break;\n }\n\n default:\n assertNever(this.parent, \"Unknown state\");\n }\n\n this.#pool = undefined;\n }\n\n /** @internal */\n abstract _detachChild(crdt: LiveNode): ApplyResult;\n\n /**\n * Serializes this CRDT and all its children into a list of creation ops\n * without opIds. Used for creating reverse/undo operations, which get their\n * opIds assigned later when the undo is actually applied.\n *\n * @internal\n */\n abstract _toOps(parentId: string, parentKey: string): CreateOp[];\n\n /**\n * Serializes this CRDT and all its children into a list of creation ops\n * with opIds. Used for forward operations that will be sent over the wire\n * immediately. Each op gets a unique opId for server acknowledgement.\n *\n * @internal\n */\n _toOpsWithOpId(\n parentId: string,\n parentKey: string,\n pool: ManagedPool\n ): ClientWireCreateOp[] {\n return this._toOps(parentId, parentKey).map((op) => ({\n opId: pool.generateOpId(),\n ...op,\n }));\n }\n\n /** @internal */\n abstract _serialize(): SerializedCrdt;\n\n /** This caches the result of the last .toImmutable() call for this Live node. */\n #cachedImmutable?: Immutable;\n\n #cachedTreeNodeKey?: string | number;\n /** This caches the result of the last .toTreeNode() call for this Live node. */\n #cachedTreeNode?: DevTools.LsonTreeNode;\n\n /**\n * @internal\n *\n * Clear the Immutable cache, so that the next call to `.toImmutable()` will\n * recompute the equivalent Immutable value again. Call this after every\n * mutation to the Live node.\n */\n invalidate(): void {\n if (\n this.#cachedImmutable !== undefined ||\n this.#cachedTreeNode !== undefined\n ) {\n this.#cachedImmutable = undefined;\n this.#cachedTreeNode = undefined;\n\n if (this.parent.type === \"HasParent\") {\n this.parent.node.invalidate();\n }\n }\n }\n\n /** @internal */\n abstract _toTreeNode(key: string): DevTools.LsonTreeNode;\n\n /**\n * @internal\n *\n * Return an snapshot of this Live tree for use in DevTools.\n */\n toTreeNode(key: string): DevTools.LsonTreeNode {\n if (this.#cachedTreeNode === undefined || this.#cachedTreeNodeKey !== key) {\n this.#cachedTreeNodeKey = key;\n this.#cachedTreeNode = this._toTreeNode(key);\n }\n\n // Return cached version\n return this.#cachedTreeNode;\n }\n\n /** @internal */\n abstract _toImmutable(): Immutable;\n\n /**\n * Return an immutable snapshot of this Live node and its children.\n */\n toImmutable(): Immutable {\n if (this.#cachedImmutable === undefined) {\n this.#cachedImmutable = this._toImmutable();\n }\n\n // Return cached version\n return this.#cachedImmutable;\n }\n\n /**\n * Returns a deep clone of the current LiveStructure, suitable for insertion\n * in the tree elsewhere.\n */\n abstract clone(): Lson;\n}\n","import type { Json, JsonObject } from \"../lib/Json\";\n\nexport type IdTuple<T> = [id: string, value: T];\n\nexport type CrdtType = (typeof CrdtType)[keyof typeof CrdtType];\nexport const CrdtType = Object.freeze({\n OBJECT: 0,\n LIST: 1,\n MAP: 2,\n REGISTER: 3,\n});\n\nexport namespace CrdtType {\n export type OBJECT = typeof CrdtType.OBJECT;\n export type LIST = typeof CrdtType.LIST;\n export type MAP = typeof CrdtType.MAP;\n export type REGISTER = typeof CrdtType.REGISTER;\n}\n\nexport type SerializedCrdt = SerializedRootObject | SerializedChild;\n\nexport type SerializedChild =\n | SerializedObject\n | SerializedList\n | SerializedMap\n | SerializedRegister;\n\nexport type SerializedRootObject = {\n readonly type: CrdtType.OBJECT;\n readonly data: JsonObject;\n\n // Root objects don't have a parent relationship\n readonly parentId?: never;\n readonly parentKey?: never;\n};\n\nexport type SerializedObject = {\n readonly type: CrdtType.OBJECT;\n readonly parentId: string;\n readonly parentKey: string;\n readonly data: JsonObject;\n};\n\nexport type SerializedList = {\n readonly type: CrdtType.LIST;\n readonly parentId: string;\n readonly parentKey: string;\n};\n\nexport type SerializedMap = {\n readonly type: CrdtType.MAP;\n readonly parentId: string;\n readonly parentKey: string;\n};\n\nexport type SerializedRegister = {\n readonly type: CrdtType.REGISTER;\n readonly parentId: string;\n readonly parentKey: string;\n readonly data: Json;\n};\n\nexport type StorageNode = RootStorageNode | ChildStorageNode;\n\nexport type ChildStorageNode =\n | ObjectStorageNode\n | ListStorageNode\n | MapStorageNode\n | RegisterStorageNode;\n\nexport type RootStorageNode = [id: \"root\", value: SerializedRootObject];\nexport type ObjectStorageNode = [id: string, value: SerializedObject];\nexport type ListStorageNode = [id: string, value: SerializedList];\nexport type MapStorageNode = [id: string, value: SerializedMap];\nexport type RegisterStorageNode = [id: string, value: SerializedRegister];\n\nexport type NodeMap = Map<string, SerializedCrdt>;\nexport type NodeStream = Iterable<StorageNode>;\n\nexport function isRootStorageNode(node: StorageNode): node is RootStorageNode {\n return node[0] === \"root\";\n}\n\nexport function isObjectStorageNode(\n node: StorageNode\n): node is RootStorageNode | ObjectStorageNode {\n return node[1].type === CrdtType.OBJECT;\n}\n\nexport function isListStorageNode(node: StorageNode): node is ListStorageNode {\n return node[1].type === CrdtType.LIST;\n}\n\nexport function isMapStorageNode(node: StorageNode): node is MapStorageNode {\n return node[1].type === CrdtType.MAP;\n}\n\nexport function isRegisterStorageNode(\n node: StorageNode\n): node is RegisterStorageNode {\n return node[1].type === CrdtType.REGISTER;\n}\n\nexport type CompactNode = CompactRootNode | CompactChildNode;\n\nexport type CompactChildNode =\n | CompactObjectNode\n | CompactListNode\n | CompactMapNode\n | CompactRegisterNode;\n\nexport type CompactRootNode = readonly [id: \"root\", data: JsonObject];\n\nexport type CompactObjectNode = readonly [\n id: string,\n type: CrdtType.OBJECT,\n parentId: string,\n parentKey: string,\n data: JsonObject,\n];\n\nexport type CompactListNode = readonly [\n id: string,\n type: CrdtType.LIST,\n parentId: string,\n parentKey: string,\n];\n\nexport type CompactMapNode = readonly [\n id: string,\n type: CrdtType.MAP,\n parentId: string,\n parentKey: string,\n];\n\nexport type CompactRegisterNode = readonly [\n id: string,\n type: CrdtType.REGISTER,\n parentId: string,\n parentKey: string,\n data: Json,\n];\n\nfunction isCompactRootNode(node: CompactNode): node is CompactRootNode {\n return node[0] === \"root\";\n}\n\nexport function* compactNodesToNodeStream(\n compactNodes: CompactNode[]\n): NodeStream {\n for (const cnode of compactNodes) {\n // Handle root nodes first - they have format [\"root\", data] where data is JsonObject\n if (isCompactRootNode(cnode)) {\n yield [cnode[0], { type: CrdtType.OBJECT, data: cnode[1] }];\n continue;\n }\n\n switch (cnode[1]) {\n case CrdtType.OBJECT:\n // prettier-ignore\n yield [cnode[0], { type: CrdtType.OBJECT, parentId: cnode[2], parentKey: cnode[3], data: cnode[4] }];\n break;\n case CrdtType.LIST:\n // prettier-ignore\n yield [cnode[0], { type: CrdtType.LIST, parentId: cnode[2], parentKey: cnode[3] }];\n break;\n case CrdtType.MAP:\n // prettier-ignore\n yield [cnode[0], { type: CrdtType.MAP, parentId: cnode[2], parentKey: cnode[3] }];\n break;\n case CrdtType.REGISTER:\n // prettier-ignore\n yield [cnode[0], {type: CrdtType.REGISTER, parentId: cnode[2], parentKey: cnode[3], data: cnode[4], }];\n break;\n default:\n // Ignore\n }\n }\n}\n\nexport function* nodeStreamToCompactNodes(\n nodes: NodeStream\n): Iterable<CompactNode> {\n for (const node of nodes) {\n if (isObjectStorageNode(node)) {\n if (isRootStorageNode(node)) {\n const id = node[0];\n const crdt = node[1];\n yield [id, crdt.data];\n } else {\n const id = node[0];\n const crdt = node[1];\n yield [id, CrdtType.OBJECT, crdt.parentId, crdt.parentKey, crdt.data];\n }\n } else if (isListStorageNode(node)) {\n const id = node[0];\n const crdt = node[1];\n yield [id, CrdtType.LIST, crdt.parentId, crdt.parentKey];\n } else if (isMapStorageNode(node)) {\n const id = node[0];\n const crdt = node[1];\n yield [id, CrdtType.MAP, crdt.parentId, crdt.parentKey];\n } else if (isRegisterStorageNode(node)) {\n const id = node[0];\n const crdt = node[1];\n yield [id, CrdtType.REGISTER, crdt.parentId, crdt.parentKey, crdt.data];\n } else {\n // Ignore\n }\n }\n}\n","import type { LiveNode } from \"../crdts/Lson\";\nimport { nn } from \"../lib/assert\";\nimport type { Json } from \"../lib/Json\";\nimport { nanoid } from \"../lib/nanoid\";\nimport { deepClone } from \"../lib/utils\";\nimport type { CreateOp, CreateRegisterOp, Op } from \"../protocol/Op\";\nimport { OpCode } from \"../protocol/Op\";\nimport type {\n RegisterStorageNode,\n SerializedRegister,\n} from \"../protocol/StorageNode\";\nimport { CrdtType } from \"../protocol/StorageNode\";\nimport type * as DevTools from \"../types/DevToolsTreeNode\";\nimport type { Immutable } from \"../types/Immutable\";\nimport type { ParentToChildNodeMap } from \"../types/NodeMap\";\nimport type { ApplyResult, ManagedPool } from \"./AbstractCrdt\";\nimport { AbstractCrdt } from \"./AbstractCrdt\";\n\n/**\n * INTERNAL\n */\nexport class LiveRegister<TValue extends Json> extends AbstractCrdt {\n #data: TValue;\n\n constructor(data: TValue) {\n super();\n this.#data = data;\n }\n\n get data(): TValue {\n return this.#data;\n }\n\n /** @internal */\n static _deserialize(\n [id, item]: RegisterStorageNode,\n _parentToChildren: ParentToChildNodeMap,\n pool: ManagedPool\n ): LiveRegister<Json> {\n const register = new LiveRegister(item.data);\n register._attach(id, pool);\n return register;\n }\n\n /** @internal */\n _toOps(parentId: string, parentKey: string): CreateRegisterOp[] {\n if (this._id === undefined) {\n throw new Error(\n \"Cannot serialize register if parentId or parentKey is undefined\"\n );\n }\n\n return [\n {\n type: OpCode.CREATE_REGISTER,\n id: this._id,\n parentId,\n parentKey,\n data: this.data,\n },\n ];\n }\n\n /** @internal */\n _serialize(): SerializedRegister {\n if (this.parent.type !== \"HasParent\") {\n throw new Error(\"Cannot serialize LiveRegister if parent is missing\");\n }\n\n return {\n type: CrdtType.REGISTER,\n parentId: nn(this.parent.node._id, \"Parent node expected to have ID\"),\n parentKey: this.parent.key,\n data: this.data,\n };\n }\n\n /** @internal */\n _attachChild(_op: CreateOp): ApplyResult {\n throw new Error(\"Method not implemented.\");\n }\n\n /** @internal */\n _detachChild(_crdt: LiveNode): ApplyResult {\n throw new Error(\"Method not implemented.\");\n }\n\n /** @internal */\n _apply(op: Op, isLocal: boolean): ApplyResult {\n return super._apply(op, isLocal);\n }\n\n /** @internal */\n _toTreeNode(key: string): DevTools.LsonTreeNode {\n return {\n type: \"Json\",\n id: this._id ?? nanoid(),\n key,\n payload: this.#data,\n };\n }\n\n /** @internal */\n _toImmutable(): Immutable {\n return this.#data;\n }\n\n clone(): TValue {\n return deepClone(this.data);\n }\n}\n","import { nn } from \"../lib/assert\";\nimport { nanoid } from \"../lib/nanoid\";\nimport type { Pos } from \"../lib/position\";\nimport { asPos, makePosition } from \"../lib/position\";\nimport { SortedList } from \"../lib/SortedList\";\nimport type { ClientWireOp, CreateListOp, CreateOp, Op } from \"../protocol/Op\";\nimport { OpCode } from \"../protocol/Op\";\nimport type { ListStorageNode, SerializedList } from \"../protocol/StorageNode\";\nimport { CrdtType } from \"../protocol/StorageNode\";\nimport type * as DevTools from \"../types/DevToolsTreeNode\";\nimport type { ParentToChildNodeMap } from \"../types/NodeMap\";\nimport type { ApplyResult, ManagedPool } from \"./AbstractCrdt\";\nimport { AbstractCrdt, OpSource } from \"./AbstractCrdt\";\nimport {\n creationOpToLiveNode,\n deserialize,\n liveNodeToLson,\n lsonToLiveNode,\n} from \"./liveblocks-helpers\";\nimport { LiveRegister } from \"./LiveRegister\";\nimport type { LiveNode, Lson } from \"./Lson\";\nimport type { ToImmutable } from \"./utils\";\n\nexport type LiveListUpdateDelta =\n | { type: \"insert\"; index: number; item: Lson }\n | { type: \"delete\"; index: number; deletedItem: Lson }\n | { type: \"move\"; index: number; previousIndex: number; item: Lson }\n | { type: \"set\"; index: number; item: Lson };\n\n/**\n * A LiveList notification that is sent in-client to any subscribers whenever\n * one or more of the items inside the LiveList instance have changed.\n */\nexport type LiveListUpdates<TItem extends Lson> = {\n type: \"LiveList\";\n node: LiveList<TItem>;\n updates: LiveListUpdateDelta[];\n};\n\nfunction childNodeLt(a: LiveNode, b: LiveNode): boolean {\n return a._parentPos < b._parentPos;\n}\n\n/**\n * The LiveList class represents an ordered collection of items that is synchronized across clients.\n */\nexport class LiveList<TItem extends Lson> extends AbstractCrdt {\n #items: SortedList<LiveNode>;\n #implicitlyDeletedItems: WeakSet<LiveNode>;\n #unacknowledgedSets: Map<string, string>;\n\n constructor(items: TItem[]) {\n super();\n this.#implicitlyDeletedItems = new WeakSet();\n this.#unacknowledgedSets = new Map();\n\n const nodes: LiveNode[] = [];\n let lastPos: Pos | undefined;\n for (const item of items) {\n const pos = makePosition(lastPos);\n const node = lsonToLiveNode(item);\n node._setParentLink(this, pos);\n nodes.push(node);\n lastPos = pos;\n }\n this.#items = SortedList.fromAlreadySorted(nodes, childNodeLt);\n }\n\n /** @internal */\n static _deserialize(\n [id, _]: ListStorageNode,\n parentToChildren: ParentToChildNodeMap,\n pool: ManagedPool\n ): LiveList<Lson> {\n const list = new LiveList([]);\n list._attach(id, pool);\n\n const children = parentToChildren.get(id);\n if (children === undefined) {\n return list;\n }\n\n for (const node of children) {\n const crdt = node[1];\n const child = deserialize(node, parentToChildren, pool);\n\n child._setParentLink(list, crdt.parentKey);\n list.#insert(child);\n }\n\n return list;\n }\n\n /**\n * @internal\n * This function assumes that the resulting ops will be sent to the server if they have an 'opId'\n * so we mutate _unacknowledgedSets to avoid potential flickering\n * https://github.com/liveblocks/liveblocks/pull/1177\n *\n * This is quite unintuitive and should disappear as soon as\n * we introduce an explicit LiveList.Set operation\n */\n _toOps(parentId: string, parentKey: string): CreateOp[] {\n if (this._id === undefined) {\n throw new Error(\"Cannot serialize item is not attached\");\n }\n\n const ops: CreateOp[] = [];\n const op: CreateListOp = {\n id: this._id,\n type: OpCode.CREATE_LIST,\n parentId,\n parentKey,\n };\n\n ops.push(op);\n\n for (const item of this.#items) {\n const parentKey = item._getParentKeyOrThrow();\n const childOps = HACK_addIntentAndDeletedIdToOperation(\n item._toOps(this._id, parentKey),\n undefined\n );\n ops.push(...childOps);\n }\n\n return ops;\n }\n\n /**\n * Inserts a new child into the list in the correct location (binary search\n * finds correct position efficiently). Returns the insertion index.\n */\n #insert(childNode: LiveNode): number {\n const index = this.#items.add(childNode);\n this.invalidate();\n return index;\n }\n\n /**\n * Updates an item's position and repositions it in the sorted list.\n * Encapsulates the remove -> mutate -> add cycle needed when changing sort keys.\n *\n * IMPORTANT: Item must exist in this list. List count remains unchanged.\n */\n #updateItemPosition(item: LiveNode, newKey: string): void {\n item._setParentLink(this, newKey);\n this.#items.reposition(item);\n this.invalidate();\n }\n\n /**\n * Updates an item's position by index. Safer than #updateItemPosition when you have\n * an index, as it ensures the item exists and is from this list.\n */\n #updateItemPositionAt(index: number, newKey: string): void {\n const item = nn(this.#items.at(index));\n this.#updateItemPosition(item, newKey);\n }\n\n /** @internal */\n _indexOfPosition(position: string): number {\n return this.#items.findIndex(\n (item) => item._getParentKeyOrThrow() === position\n );\n }\n\n /** @internal */\n _attach(id: string, pool: ManagedPool): void {\n super._attach(id, pool);\n\n for (const item of this.#items) {\n item._attach(pool.generateId(), pool);\n }\n }\n\n /** @internal */\n _detach(): void {\n super._detach();\n\n for (const item of this.#items) {\n item._detach();\n }\n }\n\n #applySetRemote(op: CreateOp): ApplyResult {\n if (this._pool === undefined) {\n throw new Error(\"Can't attach child if managed pool is not present\");\n }\n\n const { id, parentKey: key } = op;\n const child = creationOpToLiveNode(op);\n child._attach(id, this._pool);\n child._setParentLink(this, key);\n\n const deletedId = op.deletedId;\n\n const indexOfItemWithSamePosition = this._indexOfPosition(key);\n\n // If there is already an item at this position\n if (indexOfItemWithSamePosition !== -1) {\n const itemWithSamePosition = nn(\n this.#items.removeAt(indexOfItemWithSamePosition)\n );\n\n // No conflict, the item that is being replaced is the same that was deleted on the sender\n if (itemWithSamePosition._id === deletedId) {\n itemWithSamePosition._detach();\n\n // Replace the existing item with the newly created item\n this.#items.add(child);\n\n return {\n modified: makeUpdate(this, [\n setDelta(indexOfItemWithSamePosition, child),\n ]),\n reverse: [],\n };\n } else {\n // Item at position to be replaced is different from server, so we\n // remember it in case we need to restore it later.\n // This scenario can happen if an other item has been put at this position\n // while getting the acknowledgement of the set (move, insert or set)\n this.#implicitlyDeletedItems.add(itemWithSamePosition);\n\n // Replace the existing item with the newly created item without sorting the list\n this.#items.remove(itemWithSamePosition);\n this.#items.add(child);\n\n const delta: LiveListUpdateDelta[] = [\n setDelta(indexOfItemWithSamePosition, child),\n ];\n\n // Even if we implicitly delete the item at the set position\n // We still need to delete the item that was orginaly deleted by the set\n const deleteDelta = this.#detachItemAssociatedToSetOperation(\n op.deletedId\n );\n\n if (deleteDelta) {\n delta.push(deleteDelta);\n }\n\n return {\n modified: makeUpdate(this, delta),\n reverse: [],\n };\n }\n } else {\n // Item at position to be replaced doesn't exist\n const updates: LiveListUpdateDelta[] = [];\n const deleteDelta = this.#detachItemAssociatedToSetOperation(\n op.deletedId\n );\n if (deleteDelta) {\n updates.push(deleteDelta);\n }\n\n this.#insert(child);\n\n updates.push(insertDelta(this._indexOfPosition(key), child));\n\n return {\n reverse: [],\n modified: makeUpdate(this, updates),\n };\n }\n }\n\n #applySetAck(op: CreateOp): ApplyResult {\n if (this._pool === undefined) {\n throw new Error(\"Can't attach child if managed pool is not present\");\n }\n\n const delta: LiveListUpdateDelta[] = [];\n\n // Deleted item can be re-inserted by remote undo/redo\n const deletedDelta = this.#detachItemAssociatedToSetOperation(op.deletedId);\n if (deletedDelta) {\n delta.push(deletedDelta);\n }\n\n const unacknowledgedOpId = this.#unacknowledgedSets.get(op.parentKey);\n\n if (unacknowledgedOpId !== undefined) {\n if (unacknowledgedOpId !== op.opId) {\n return delta.length === 0\n ? { modified: false }\n : { modified: makeUpdate(this, delta), reverse: [] };\n } else {\n this.#unacknowledgedSets.delete(op.parentKey);\n }\n }\n\n const indexOfItemWithSamePosition = this._indexOfPosition(op.parentKey);\n\n const existingItem = this.#items.find((item) => item._id === op.id);\n\n // If item already exists...\n if (existingItem !== undefined) {\n // ...and if it's at the right position\n if (existingItem._parentKey === op.parentKey) {\n // ... do nothing\n return {\n modified: delta.length > 0 ? makeUpdate(this, delta) : false,\n reverse: [],\n };\n }\n\n // Item exists but not at the right position (local move after set)\n if (indexOfItemWithSamePosition !== -1) {\n const itemAtPosition = nn(\n this.#items.removeAt(indexOfItemWithSamePosition)\n );\n this.#implicitlyDeletedItems.add(itemAtPosition);\n delta.push(deleteDelta(indexOfItemWithSamePosition, itemAtPosition));\n }\n\n const prevIndex = this.#items.findIndex((item) => item === existingItem);\n this.#updateItemPosition(existingItem, op.parentKey);\n const newIndex = this.#items.findIndex((item) => item === existingItem);\n if (newIndex !== prevIndex) {\n delta.push(moveDelta(prevIndex, newIndex, existingItem));\n }\n\n return {\n modified: delta.length > 0 ? makeUpdate(this, delta) : false,\n reverse: [],\n };\n } else {\n // Item associated to the set ack does not exist either deleted localy or via remote undo/redo\n const orphan = this._pool.getNode(op.id);\n if (orphan && this.#implicitlyDeletedItems.has(orphan)) {\n // Reattach orphan at the new position\n orphan._setParentLink(this, op.parentKey);\n // And delete it from the orphan cache\n this.#implicitlyDeletedItems.delete(orphan);\n\n const recreatedItemIndex = this.#insert(orphan);\n return {\n modified: makeUpdate(this, [\n // If there is an item at this position, update is a set, else it's an insert\n indexOfItemWithSamePosition === -1\n ? insertDelta(recreatedItemIndex, orphan)\n : setDelta(recreatedItemIndex, orphan),\n ...delta,\n ]),\n reverse: [],\n };\n } else {\n if (indexOfItemWithSamePosition !== -1) {\n nn(this.#items.removeAt(indexOfItemWithSamePosition));\n }\n\n const { newItem, newIndex } = this.#createAttachItemAndSort(\n op,\n op.parentKey\n );\n\n return {\n modified: makeUpdate(this, [\n // If there is an item at this position, update is a set, else it's an insert\n indexOfItemWithSamePosition === -1\n ? insertDelta(newIndex, newItem)\n : setDelta(newIndex, newItem),\n ...delta,\n ]),\n reverse: [],\n };\n }\n }\n }\n\n /**\n * Returns the update delta of the deletion or null\n */\n #detachItemAssociatedToSetOperation(\n deletedId?: string\n ): LiveListUpdateDelta | null {\n if (deletedId === undefined || this._pool === undefined) {\n return null;\n }\n\n const deletedItem = this._pool.getNode(deletedId);\n if (deletedItem === undefined) {\n return null;\n }\n\n const result = this._detachChild(deletedItem);\n if (result.modified === false) {\n return null;\n }\n\n return result.modified.updates[0];\n }\n\n #applyRemoteInsert(op: CreateOp): ApplyResult {\n if (this._pool === undefined) {\n throw new Error(\"Can't attach child if managed pool is not present\");\n }\n\n const key = asPos(op.parentKey);\n\n const existingItemIndex = this._indexOfPosition(key);\n\n if (existingItemIndex !== -1) {\n // If change is remote => assign a temporary position to existing child until we get the fix from the backend\n this.#shiftItemPosition(existingItemIndex, key);\n }\n\n const { newItem, newIndex } = this.#createAttachItemAndSort(op, key);\n\n // TODO: add move update?\n return {\n modified: makeUpdate(this, [insertDelta(newIndex, newItem)]),\n reverse: [],\n };\n }\n\n #applyInsertAck(op: CreateOp): ApplyResult {\n const existingItem = this.#items.find((item) => item._id === op.id);\n const key = asPos(op.parentKey);\n\n const itemIndexAtPosition = this._indexOfPosition(key);\n\n if (existingItem) {\n if (existingItem._parentKey === key) {\n // Normal case, no modification\n return {\n modified: false,\n };\n } else {\n const oldPositionIndex = this.#items.findIndex(\n (item) => item === existingItem\n );\n if (itemIndexAtPosition !== -1) {\n this.#shiftItemPosition(itemIndexAtPosition, key);\n }\n\n this.#updateItemPosition(existingItem, key);\n\n const newIndex = this._indexOfPosition(key);\n\n if (newIndex === oldPositionIndex) {\n return { modified: false };\n }\n\n return {\n modified: makeUpdate(this, [\n moveDelta(oldPositionIndex, newIndex, existingItem),\n ]),\n reverse: [],\n };\n }\n } else {\n const orphan = nn(this._pool).getNode(op.id);\n if (orphan && this.#implicitlyDeletedItems.has(orphan)) {\n // Implicit delete after set\n orphan._setParentLink(this, key);\n this.#implicitlyDeletedItems.delete(orphan);\n\n this.#insert(orphan);\n\n const newIndex = this._indexOfPosition(key);\n\n return {\n modified: makeUpdate(this, [insertDelta(newIndex, orphan)]),\n reverse: [],\n };\n } else {\n if (itemIndexAtPosition !== -1) {\n this.#shiftItemPosition(itemIndexAtPosition, key);\n }\n\n const { newItem, newIndex } = this.#createAttachItemAndSort(op, key);\n\n return {\n modified: makeUpdate(this, [insertDelta(newIndex, newItem)]),\n reverse: [],\n };\n }\n }\n }\n\n #applyInsertUndoRedo(op: CreateOp): ApplyResult {\n const { id, parentKey: key } = op;\n const child = creationOpToLiveNode(op);\n\n if (this._pool?.getNode(id) !== undefined) {\n return { modified: false };\n }\n\n child._attach(id, nn(this._pool));\n child._setParentLink(this, key);\n\n const existingItemIndex = this._indexOfPosition(key);\n\n let newKey = key;\n\n if (existingItemIndex !== -1) {\n const before = this.#items.at(existingItemIndex)?._parentPos;\n const after = this.#items.at(existingItemIndex + 1)?._parentPos;\n\n newKey = makePosition(before, after);\n child._setParentLink(this, newKey);\n }\n\n this.#insert(child);\n\n const newIndex = this._indexOfPosition(newKey);\n\n return {\n modified: makeUpdate(this, [insertDelta(newIndex, child)]),\n reverse: [{ type: OpCode.DELETE_CRDT, id }],\n };\n }\n\n #applySetUndoRedo(op: CreateOp): ApplyResult {\n const { id, parentKey: key } = op;\n const child = creationOpToLiveNode(op);\n\n if (this._pool?.getNode(id) !== undefined) {\n return { modified: false };\n }\n\n this.#unacknowledgedSets.set(key, nn(op.opId));\n\n const indexOfItemWithSameKey = this._indexOfPosition(key);\n\n child._attach(id, nn(this._pool));\n child._setParentLink(this, key);\n\n const newKey = key;\n\n // If there is already an item at this position\n if (indexOfItemWithSameKey !== -1) {\n // TODO: Should we add this item to implictly deleted item?\n const existingItem = this.#items.at(indexOfItemWithSameKey)!; // eslint-disable-line no-restricted-syntax\n existingItem._detach();\n\n this.#items.remove(existingItem);\n this.#items.add(child);\n\n const reverse = HACK_addIntentAndDeletedIdToOperation(\n existingItem._toOps(nn(this._id), key),\n op.id\n );\n\n const delta = [setDelta(indexOfItemWithSameKey, child)];\n const deletedDelta = this.#detachItemAssociatedToSetOperation(\n op.deletedId\n );\n if (deletedDelta) {\n delta.push(deletedDelta);\n }\n\n return {\n modified: makeUpdate(this, delta),\n reverse,\n };\n } else {\n this.#insert(child);\n\n // TODO: Use delta\n this.#detachItemAssociatedToSetOperation(op.deletedId);\n\n const newIndex = this._indexOfPosition(newKey);\n\n return {\n reverse: [{ type: OpCode.DELETE_CRDT, id }],\n modified: makeUpdate(this, [insertDelta(newIndex, child)]),\n };\n }\n }\n\n /** @internal */\n _attachChild(op: CreateOp, source: OpSource): ApplyResult {\n if (this._pool === undefined) {\n throw new Error(\"Can't attach child if managed pool is not present\");\n }\n\n let result: ApplyResult;\n\n if (op.intent === \"set\") {\n if (source === OpSource.THEIRS) {\n result = this.#applySetRemote(op);\n } else if (source === OpSource.OURS) {\n result = this.#applySetAck(op);\n } else {\n result = this.#applySetUndoRedo(op);\n }\n } else {\n if (source === OpSource.THEIRS) {\n result = this.#applyRemoteInsert(op);\n } else if (source === OpSource.OURS) {\n result = this.#applyInsertAck(op);\n } else {\n result = this.#applyInsertUndoRedo(op);\n }\n }\n\n if (result.modified !== false) {\n this.invalidate();\n }\n\n return result;\n }\n\n /** @internal */\n _detachChild(\n child: LiveNode\n ): { reverse: Op[]; modified: LiveListUpdates<TItem> } | { modified: false } {\n if (child) {\n const parentKey = nn(child._parentKey);\n const reverse = child._toOps(nn(this._id), parentKey);\n\n const indexToDelete = this.#items.findIndex((item) => item === child);\n\n if (indexToDelete === -1) {\n return {\n modified: false,\n };\n }\n\n const previousNode = this.#items.at(indexToDelete)!; // eslint-disable-line no-restricted-syntax\n this.#items.remove(child);\n this.invalidate();\n\n child._detach();\n\n return {\n modified: makeUpdate(this, [deleteDelta(indexToDelete, previousNode)]),\n reverse,\n };\n }\n\n return { modified: false };\n }\n\n #applySetChildKeyRemote(newKey: Pos, child: LiveNode): ApplyResult {\n if (this.#implicitlyDeletedItems.has(child)) {\n this.#implicitlyDeletedItems.delete(child);\n\n child._setParentLink(this, newKey);\n const newIndex = this.#insert(child);\n\n // TODO: Shift existing item?\n return {\n modified: makeUpdate(this, [insertDelta(newIndex, child)]),\n reverse: [],\n };\n }\n\n const previousKey = child._parentKey;\n\n if (newKey === previousKey) {\n return {\n modified: false,\n };\n }\n\n // TODO: should we look at orphan\n const existingItemIndex = this._indexOfPosition(newKey);\n\n // Normal case\n if (existingItemIndex === -1) {\n const previousIndex = this.#items.findIndex((item) => item === child);\n this.#updateItemPosition(child, newKey);\n const newIndex = this.#items.findIndex((item) => item === child);\n\n if (newIndex === previousIndex) {\n return {\n modified: false,\n };\n }\n\n return {\n modified: makeUpdate(this, [moveDelta(previousIndex, newIndex, child)]),\n reverse: [],\n };\n } else {\n this.#updateItemPositionAt(\n existingItemIndex,\n makePosition(newKey, this.#items.at(existingItemIndex + 1)?._parentPos)\n );\n\n const previousIndex = this.#items.findIndex((item) => item === child);\n this.#updateItemPosition(child, newKey);\n const newIndex = this.#items.findIndex((item) => item === child);\n\n if (newIndex === previousIndex) {\n return {\n modified: false,\n };\n }\n\n return {\n modified: makeUpdate(this, [moveDelta(previousIndex, newIndex, child)]),\n reverse: [],\n };\n }\n }\n\n #applySetChildKeyAck(newKey: Pos, child: LiveNode): ApplyResult {\n const previousKey = nn(child._parentKey);\n\n if (this.#implicitlyDeletedItems.has(child)) {\n const existingItemIndex = this._indexOfPosition(newKey);\n\n this.#implicitlyDeletedItems.delete(child);\n\n if (existingItemIndex !== -1) {\n const existingItem = this.#items.at(existingItemIndex)!; // eslint-disable-line no-restricted-syntax\n existingItem._setParentLink(\n this,\n makePosition(\n newKey,\n this.#items.at(existingItemIndex + 1)?._parentPos\n )\n );\n this.#items.reposition(existingItem);\n }\n\n child._setParentLink(this, newKey);\n const newIndex = this.#insert(child);\n return {\n modified: makeUpdate(this, [insertDelta(newIndex, child)]),\n reverse: [],\n };\n } else {\n if (newKey === previousKey) {\n return {\n modified: false,\n };\n }\n\n // At this point, it means that the item has been moved before receiving the ack\n // so we replace it at the right position\n\n const previousIndex = this.#items.findIndex((item) => item === child);\n\n const existingItemIndex = this._indexOfPosition(newKey);\n\n if (existingItemIndex !== -1) {\n this.#updateItemPositionAt(\n existingItemIndex,\n makePosition(\n newKey,\n this.#items.at(existingItemIndex + 1)?._parentPos\n )\n );\n }\n\n this.#updateItemPosition(child, newKey);\n\n const newIndex = this.#items.findIndex((item) => item === child);\n\n if (previousIndex === newIndex) {\n // parentKey changed but final position in the list didn't\n return {\n modified: false,\n };\n } else {\n return {\n modified: makeUpdate(this, [\n moveDelta(previousIndex, newIndex, child),\n ]),\n reverse: [],\n };\n }\n }\n }\n\n #applySetChildKeyUndoRedo(newKey: Pos, child: LiveNode): ApplyResult {\n const previousKey = nn(child._parentKey);\n\n const previousIndex = this.#items.findIndex((item) => item === child);\n const existingItemIndex = this._indexOfPosition(newKey);\n\n // If position is occupied, find a free position for item being moved\n let actualNewKey = newKey;\n if (existingItemIndex !== -1) {\n // Find a free position near the desired position\n actualNewKey = makePosition(\n newKey,\n this.#items.at(existingItemIndex + 1)?._parentPos\n );\n }\n\n this.#updateItemPosition(child, actualNewKey);\n\n const newIndex = this.#items.findIndex((item) => item === child);\n\n if (previousIndex === newIndex) {\n return {\n modified: false,\n };\n }\n\n return {\n modified: makeUpdate(this, [moveDelta(previousIndex, newIndex, child)]),\n reverse: [\n {\n type: OpCode.SET_PARENT_KEY,\n id: nn(child._id),\n parentKey: previousKey,\n },\n ],\n };\n }\n\n /** @internal */\n _setChildKey(newKey: Pos, child: LiveNode, source: OpSource): ApplyResult {\n if (source === OpSource.THEIRS) {\n return this.#applySetChildKeyRemote(newKey, child);\n } else if (source === OpSource.OURS) {\n return this.#applySetChildKeyAck(newKey, child);\n } else {\n return this.#applySetChildKeyUndoRedo(newKey, child);\n }\n }\n\n /** @internal */\n _apply(op: Op, isLocal: boolean): ApplyResult {\n return super._apply(op, isLocal);\n }\n\n /** @internal */\n _serialize(): SerializedList {\n if (this.parent.type !== \"HasParent\") {\n throw new Error(\"Cannot serialize LiveList if parent is missing\");\n }\n\n return {\n type: CrdtType.LIST,\n parentId: nn(this.parent.node._id, \"Parent node expected to have ID\"),\n parentKey: this.parent.key,\n };\n }\n\n /**\n * Returns the number of elements.\n */\n get length(): number {\n return this.#items.length;\n }\n\n /**\n * Adds one element to the end of the LiveList.\n * @param element The element to add to the end of the LiveList.\n */\n push(element: TItem): void {\n this._pool?.assertStorageIsWritable();\n return this.insert(element, this.length);\n }\n\n /**\n * Inserts one element at a specified index.\n * @param element The element to insert.\n * @param index The index at which you want to insert the element.\n */\n insert(element: TItem, index: number): void {\n this._pool?.assertStorageIsWritable();\n if (index < 0 || index > this.#items.length) {\n throw new Error(\n `Cannot insert list item at index \"\u001d${index}\". index should be between 0 and ${this.#items.length}`\n );\n }\n\n const before = this.#items.at(index - 1)?._parentPos;\n const after = this.#items.at(index)?._parentPos;\n\n const position = makePosition(before, after);\n\n const value = lsonToLiveNode(element);\n value._setParentLink(this, position);\n\n this.#insert(value);\n\n if (this._pool && this._id) {\n const id = this._pool.generateId();\n value._attach(id, this._pool);\n\n this._pool.dispatch(\n value._toOpsWithOpId(this._id, position, this._pool),\n [{ type: OpCode.DELETE_CRDT, id }],\n new Map<string, LiveListUpdates<TItem>>([\n [this._id, makeUpdate(this, [insertDelta(index, value)])],\n ])\n );\n }\n }\n\n /**\n * Move one element from one index to another.\n * @param index The index of the element to move\n * @param targetIndex The index where the element should be after moving.\n */\n move(index: number, targetIndex: number): void {\n this._pool?.assertStorageIsWritable();\n if (targetIndex < 0) {\n throw new Error(\"targetIndex cannot be less than 0\");\n }\n\n if (targetIndex >= this.#items.length) {\n throw new Error(\n \"targetIndex cannot be greater or equal than the list length\"\n );\n }\n\n if (index < 0) {\n throw new Error(\"index cannot be less than 0\");\n }\n\n if (index >= this.#items.length) {\n throw new Error(\"index cannot be greater or equal than the list length\");\n }\n\n let beforePosition = null;\n let afterPosition = null;\n\n if (index < targetIndex) {\n afterPosition =\n targetIndex === this.#items.length - 1\n ? undefined\n : this.#items.at(targetIndex + 1)?._parentPos;\n beforePosition = this.#items.at(targetIndex)!._parentPos; // eslint-disable-line no-restricted-syntax\n } else {\n afterPosition = this.#items.at(targetIndex)!._parentPos; // eslint-disable-line no-restricted-syntax\n beforePosition =\n targetIndex === 0\n ? undefined\n : this.#items.at(targetIndex - 1)?._parentPos;\n }\n\n const position = makePosition(beforePosition, afterPosition);\n\n const item = this.#items.at(index)!; // eslint-disable-line no-restricted-syntax\n const previousPosition = item._getParentKeyOrThrow();\n this.#updateItemPositionAt(index, position);\n\n if (this._pool && this._id) {\n const storageUpdates = new Map<string, LiveListUpdates<TItem>>([\n [this._id, makeUpdate(this, [moveDelta(index, targetIndex, item)])],\n ]);\n\n this._pool.dispatch(\n [\n {\n type: OpCode.SET_PARENT_KEY,\n id: nn(item._id),\n opId: this._pool.generateOpId(),\n parentKey: position,\n },\n ],\n [\n {\n type: OpCode.SET_PARENT_KEY,\n id: nn(item._id),\n parentKey: previousPosition,\n },\n ],\n storageUpdates\n );\n }\n }\n\n /**\n * Deletes an element at the specified index\n * @param index The index of the element to delete\n */\n delete(index: number): void {\n this._pool?.assertStorageIsWritable();\n if (index < 0 || index >= this.#items.length) {\n throw new Error(\n `Cannot delete list item at index \"${index}\". index should be between 0 and ${\n this.#items.length - 1\n }`\n );\n }\n\n const item = this.#items.at(index)!; // eslint-disable-line no-restricted-syntax\n item._detach();\n this.#items.remove(item);\n this.invalidate();\n\n if (this._pool) {\n const childRecordId = item._id;\n if (childRecordId) {\n const storageUpdates = new Map<string, LiveListUpdates<TItem>>();\n storageUpdates.set(\n nn(this._id),\n makeUpdate(this, [deleteDelta(index, item)])\n );\n\n this._pool.dispatch(\n [\n {\n id: childRecordId,\n opId: this._pool.generateOpId(),\n type: OpCode.DELETE_CRDT,\n },\n ],\n item._toOps(nn(this._id), item._getParentKeyOrThrow()),\n storageUpdates\n );\n }\n }\n }\n\n clear(): void {\n this._pool?.assertStorageIsWritable();\n if (this._pool) {\n const ops: ClientWireOp[] = [];\n const reverseOps: Op[] = [];\n\n const updateDelta: LiveListUpdateDelta[] = [];\n\n for (const item of this.#items) {\n item._detach();\n const childId = item._id;\n if (childId) {\n ops.push({\n type: OpCode.DELETE_CRDT,\n id: childId,\n opId: this._pool.generateOpId(),\n });\n reverseOps.push(\n ...item._toOps(nn(this._id), item._getParentKeyOrThrow())\n );\n\n // Index is always 0 because updates are applied one after another\n // when applied on an immutable state\n updateDelta.push(deleteDelta(0, item));\n }\n }\n\n this.#items.clear();\n this.invalidate();\n\n const storageUpdates = new Map<string, LiveListUpdates<TItem>>();\n storageUpdates.set(nn(this._id), makeUpdate(this, updateDelta));\n\n this._pool.dispatch(ops, reverseOps, storageUpdates);\n } else {\n for (const item of this.#items) {\n item._detach();\n }\n this.#items.clear();\n this.invalidate();\n }\n }\n\n set(index: number, item: TItem): void {\n this._pool?.assertStorageIsWritable();\n if (index < 0 || index >= this.#items.length) {\n throw new Error(\n `Cannot set list item at index \"\u001d${index}\". index should be between 0 and ${\n this.#items.length - 1\n }`\n );\n }\n\n const existingItem = this.#items.at(index)!; // eslint-disable-line no-restricted-syntax\n const position = existingItem._getParentKeyOrThrow();\n\n const existingId = existingItem._id;\n existingItem._detach();\n\n const value = lsonToLiveNode(item);\n value._setParentLink(this, position);\n this.#items.remove(existingItem);\n this.#items.add(value);\n this.invalidate();\n\n if (this._pool && this._id) {\n const id = this._pool.generateId();\n value._attach(id, this._pool);\n\n const storageUpdates = new Map<string, LiveListUpdates<TItem>>();\n storageUpdates.set(this._id, makeUpdate(this, [setDelta(index, value)]));\n\n const ops = HACK_addIntentAndDeletedIdToOperation(\n value._toOpsWithOpId(this._id, position, this._pool),\n existingId\n );\n this.#unacknowledgedSets.set(position, nn(ops[0].opId));\n const reverseOps = HACK_addIntentAndDeletedIdToOperation(\n existingItem._toOps(this._id, position),\n id\n );\n\n this._pool.dispatch(ops, reverseOps, storageUpdates);\n }\n }\n\n /**\n * Returns an Array of all the elements in the LiveList.\n */\n toArray(): TItem[] {\n return Array.from(this.#items, (entry) => liveNodeToLson(entry) as TItem);\n // ^^^^^^^^\n // FIXME! This isn't safe.\n }\n\n /**\n * Tests whether all elements pass the test implemented by the provided function.\n * @param predicate Function to test for each element, taking two arguments (the element and its index).\n * @returns true if the predicate function returns a truthy value for every element. Otherwise, false.\n */\n every(predicate: (value: TItem, index: number) => unknown): boolean {\n return this.toArray().every(predicate);\n }\n\n /**\n * Creates an array with all elements that pass the test implemented by the provided function.\n * @param predicate Function to test each element of the LiveList. Return a value that coerces to true to keep the element, or to false otherwise.\n * @returns An array with the elements that pass the test.\n */\n filter(predicate: (value: TItem, index: number) => unknown): TItem[] {\n return this.toArray().filter(predicate);\n }\n\n /**\n * Returns the first element that satisfies the provided testing function.\n * @param predicate Function to execute on each value.\n * @returns The value of the first element in the LiveList that satisfies the provided testing function. Otherwise, undefined is returned.\n */\n find(predicate: (value: TItem, index: number) => unknown): TItem | undefined {\n return this.toArray().find(predicate);\n }\n\n /**\n * Returns the index of the first element in the LiveList that satisfies the provided testing function.\n * @param predicate Function to execute on each value until the function returns true, indicating that the satisfying element was found.\n * @returns The index of the first element in the LiveList that passes the test. Otherwise, -1.\n */\n findIndex(predicate: (value: TItem, index: number) => unknown): number {\n return this.toArray().findIndex(predicate);\n }\n\n /**\n * Executes a provided function once for each element.\n * @param callbackfn Function to execute on each element.\n */\n forEach(callbackfn: (value: TItem, index: number) => void): void {\n return this.toArray().forEach(callbackfn);\n }\n\n /**\n * Get the element at the specified index.\n * @param index The index on the element to get.\n * @returns The element at the specified index or undefined.\n */\n get(index: number): TItem | undefined {\n if (index < 0 || index >= this.#items.length) {\n return undefined;\n }\n\n const item = this.#items.at(index);\n return item ? (liveNodeToLson(item) as TItem | undefined) : undefined;\n // ^^^^^^^^^^^^^^^^^\n // FIXME! This isn't safe.\n }\n\n /**\n * Returns the first index at which a given element can be found in the LiveList, or -1 if it is not present.\n * @param searchElement Element to locate.\n * @param fromIndex The index to start the search at.\n * @returns The first index of the element in the LiveList; -1 if not found.\n */\n indexOf(searchElement: TItem, fromIndex?: number): number {\n return this.toArray().indexOf(searchElement, fromIndex);\n }\n\n /**\n * Returns the last index at which a given element can be found in the LiveList, or -1 if it is not present. The LiveLsit is searched backwards, starting at fromIndex.\n * @param searchElement Element to locate.\n * @param fromIndex The index at which to start searching backwards.\n * @returns\n */\n lastIndexOf(searchElement: TItem, fromIndex?: number): number {\n return this.toArray().lastIndexOf(searchElement, fromIndex);\n }\n\n /**\n * Creates an array populated with the results of calling a provided function on every element.\n * @param callback Function that is called for every element.\n * @returns An array with each element being the result of the callback function.\n */\n map<U>(callback: (value: TItem, index: number) => U): U[] {\n const result: U[] = [];\n let i = 0;\n for (const entry of this.#items) {\n result.push(\n callback(\n liveNodeToLson(entry) as TItem,\n // ^^^^^^^^\n // FIXME! This isn't safe.\n i\n )\n );\n i++;\n }\n return result;\n }\n\n /**\n * Tests whether at least one element in the LiveList passes the test implemented by the provided function.\n * @param predicate Function to test for each element.\n * @returns true if the callback function returns a truthy value for at least one element. Otherwise, false.\n */\n some(predicate: (value: TItem, index: number) => unknown): boolean {\n return this.toArray().some(predicate);\n }\n\n [Symbol.iterator](): IterableIterator<TItem> {\n return new LiveListIterator(this.#items);\n }\n\n #createAttachItemAndSort(\n op: CreateOp,\n key: string\n ): {\n newItem: LiveNode;\n newIndex: number;\n } {\n const newItem = creationOpToLiveNode(op);\n\n newItem._attach(op.id, nn(this._pool));\n newItem._setParentLink(this, key);\n\n this.#insert(newItem);\n\n const newIndex = this._indexOfPosition(key);\n\n return { newItem, newIndex };\n }\n\n #shiftItemPosition(index: number, key: Pos) {\n const shiftedPosition = makePosition(\n key,\n this.#items.length > index + 1\n ? this.#items.at(index + 1)?._parentPos\n : undefined\n );\n\n this.#updateItemPositionAt(index, shiftedPosition);\n }\n\n /** @internal */\n _toTreeNode(key: string): DevTools.LsonTreeNode {\n const payload: DevTools.LsonTreeNode[] = [];\n let index = 0;\n for (const item of this.#items) {\n payload.push(item.toTreeNode(index.toString()));\n index++;\n }\n return {\n type: \"LiveList\",\n id: this._id ?? nanoid(),\n key,\n payload,\n };\n }\n\n toImmutable(): readonly ToImmutable<TItem>[] {\n // Don't implement actual toJson logic in here. Implement it in ._toImmutable()\n // instead. This helper merely exists to help TypeScript infer better\n // return types.\n return super.toImmutable() as readonly ToImmutable<TItem>[];\n }\n\n /** @internal */\n _toImmutable(): readonly ToImmutable<TItem>[] {\n const result = Array.from(this.#items, (node) => node.toImmutable());\n return (\n process.env.NODE_ENV === \"production\" ? result : Object.freeze(result)\n ) as readonly ToImmutable<TItem>[];\n }\n\n clone(): LiveList<TItem> {\n return new LiveList(\n Array.from(this.#items, (item) => item.clone() as TItem)\n );\n }\n}\n\nclass LiveListIterator<T extends Lson> implements IterableIterator<T> {\n #innerIterator: IterableIterator<LiveNode>;\n\n constructor(items: SortedList<LiveNode>) {\n this.#innerIterator = items[Symbol.iterator]();\n }\n\n [Symbol.iterator](): IterableIterator<T> {\n return this;\n }\n\n next(): IteratorResult<T> {\n const result = this.#innerIterator.next();\n\n if (result.done) {\n return {\n done: true,\n value: undefined,\n };\n }\n\n const value = liveNodeToLson(result.value) as T;\n // ^^^^\n // FIXME! This isn't safe.\n return { value };\n }\n}\n\nfunction makeUpdate<TItem extends Lson>(\n liveList: LiveList<TItem>,\n deltaUpdates: LiveListUpdateDelta[]\n): LiveListUpdates<TItem> {\n return {\n node: liveList,\n type: \"LiveList\",\n updates: deltaUpdates,\n };\n}\n\nfunction setDelta(index: number, item: LiveNode): LiveListUpdateDelta {\n return {\n index,\n type: \"set\",\n item: item instanceof LiveRegister ? item.data : item,\n };\n}\n\nfunction deleteDelta(\n index: number,\n deletedNode: LiveNode\n): LiveListUpdateDelta {\n return {\n type: \"delete\",\n index,\n deletedItem:\n deletedNode instanceof LiveRegister ? deletedNode.data : deletedNode,\n };\n}\n\nfunction insertDelta(index: number, item: LiveNode): LiveListUpdateDelta {\n return {\n index,\n type: \"insert\",\n item: item instanceof LiveRegister ? item.data : item,\n };\n}\n\nfunction moveDelta(\n previousIndex: number,\n index: number,\n item: LiveNode\n): LiveListUpdateDelta {\n return {\n type: \"move\",\n index,\n item: item instanceof LiveRegister ? item.data : item,\n previousIndex,\n };\n}\n\n/**\n * This function is only temporary.\n * As soon as we refactor the operations structure,\n * serializing a LiveStructure should not know anything about intent\n */\nfunction HACK_addIntentAndDeletedIdToOperation<T extends CreateOp>(\n ops: T[],\n deletedId: string | undefined\n): T[] {\n return ops.map((op, index) => {\n if (index === 0) {\n // NOTE: Only patch the first Op here\n const firstOp = op;\n return {\n ...firstOp,\n intent: \"set\",\n deletedId,\n };\n } else {\n return op;\n }\n });\n}\n","import { nn } from \"../lib/assert\";\nimport { freeze } from \"../lib/freeze\";\nimport { nanoid } from \"../lib/nanoid\";\nimport type { CreateMapOp, CreateOp, Op } from \"../protocol/Op\";\nimport { OpCode } from \"../protocol/Op\";\nimport type { MapStorageNode, SerializedMap } from \"../protocol/StorageNode\";\nimport { CrdtType } from \"../protocol/StorageNode\";\nimport type * as DevTools from \"../types/DevToolsTreeNode\";\nimport type { ParentToChildNodeMap } from \"../types/NodeMap\";\nimport type { ApplyResult, ManagedPool } from \"./AbstractCrdt\";\nimport { AbstractCrdt, OpSource } from \"./AbstractCrdt\";\nimport {\n creationOpToLiveNode,\n deserialize,\n isLiveNode,\n liveNodeToLson,\n lsonToLiveNode,\n} from \"./liveblocks-helpers\";\nimport type { LiveNode, Lson } from \"./Lson\";\nimport type { UpdateDelta } from \"./UpdateDelta\";\nimport type { ToImmutable } from \"./utils\";\n\n/**\n * A LiveMap notification that is sent in-client to any subscribers whenever\n * one or more of the values inside the LiveMap instance have changed.\n */\nexport type LiveMapUpdates<TKey extends string, TValue extends Lson> = {\n type: \"LiveMap\";\n node: LiveMap<TKey, TValue>;\n updates: { [key: string]: UpdateDelta };\n // ^^^^^^\n // FIXME: `string` is not specific enough here. See if we can\n // improve this type to match TKey!\n};\n\n/**\n * The LiveMap class is similar to a JavaScript Map that is synchronized on all clients.\n * Keys should be a string, and values should be serializable to JSON.\n * If multiple clients update the same property simultaneously, the last modification received by the Liveblocks servers is the winner.\n */\nexport class LiveMap<\n TKey extends string,\n TValue extends Lson,\n> extends AbstractCrdt {\n #map: Map<TKey, LiveNode>;\n #unacknowledgedSet: Map<TKey, string>;\n\n constructor(entries?: readonly (readonly [TKey, TValue])[] | undefined) {\n super();\n this.#unacknowledgedSet = new Map<TKey, string>();\n\n if (entries) {\n const mappedEntries: [TKey, LiveNode][] = [];\n for (const [key, value] of entries) {\n const node = lsonToLiveNode(value);\n node._setParentLink(this, key);\n mappedEntries.push([key, node]);\n }\n this.#map = new Map(mappedEntries);\n } else {\n this.#map = new Map();\n }\n }\n\n /** @internal */\n _toOps(parentId: string, parentKey: string): CreateOp[] {\n if (this._id === undefined) {\n throw new Error(\"Cannot serialize item is not attached\");\n }\n\n const ops: CreateOp[] = [];\n const op: CreateMapOp = {\n id: this._id,\n type: OpCode.CREATE_MAP,\n parentId,\n parentKey,\n };\n\n ops.push(op);\n\n for (const [key, value] of this.#map) {\n ops.push(...value._toOps(this._id, key));\n }\n\n return ops;\n }\n\n /** @internal */\n static _deserialize(\n [id, _item]: MapStorageNode,\n parentToChildren: ParentToChildNodeMap,\n pool: ManagedPool\n ): LiveMap<string, Lson> {\n const map = new LiveMap();\n map._attach(id, pool);\n\n const children = parentToChildren.get(id);\n if (children === undefined) {\n return map;\n }\n\n for (const node of children) {\n const crdt = node[1];\n const child = deserialize(node, parentToChildren, pool);\n child._setParentLink(map, crdt.parentKey);\n map.#map.set(crdt.parentKey, child);\n map.invalidate();\n }\n\n return map;\n }\n\n /** @internal */\n _attach(id: string, pool: ManagedPool): void {\n super._attach(id, pool);\n\n for (const [_key, value] of this.#map) {\n if (isLiveNode(value)) {\n value._attach(pool.generateId(), pool);\n }\n }\n }\n\n /** @internal */\n _attachChild(op: CreateOp, source: OpSource): ApplyResult {\n if (this._pool === undefined) {\n throw new Error(\"Can't attach child if managed pool is not present\");\n }\n\n const { id, parentKey, opId } = op;\n\n const key = parentKey as TKey;\n // ^^^^^^^ TODO: Fix me!\n\n const child = creationOpToLiveNode(op);\n\n if (this._pool.getNode(id) !== undefined) {\n return { modified: false };\n }\n\n if (source === OpSource.OURS) {\n const lastUpdateOpId = this.#unacknowledgedSet.get(key);\n if (lastUpdateOpId === opId) {\n // Acknowlegment from local operation\n this.#unacknowledgedSet.delete(key);\n return { modified: false };\n } else if (lastUpdateOpId !== undefined) {\n // Another local set has overriden the value, so we do nothing\n return { modified: false };\n }\n } else if (source === OpSource.THEIRS) {\n // If a remote operation set an item,\n // delete the unacknowledgedSet associated to the key\n // to make sure any future ack can override it\n this.#unacknowledgedSet.delete(key);\n }\n\n const previousValue = this.#map.get(key);\n let reverse: Op[];\n if (previousValue) {\n const thisId = nn(this._id);\n reverse = previousValue._toOps(thisId, key);\n previousValue._detach();\n } else {\n reverse = [{ type: OpCode.DELETE_CRDT, id }];\n }\n\n child._setParentLink(this, key);\n child._attach(id, this._pool);\n this.#map.set(key, child);\n this.invalidate();\n\n return {\n modified: {\n node: this,\n type: \"LiveMap\",\n updates: { [key]: { type: \"update\" } },\n },\n reverse,\n };\n }\n\n /** @internal */\n _detach(): void {\n super._detach();\n\n for (const item of this.#map.values()) {\n item._detach();\n }\n }\n\n /** @internal */\n _detachChild(child: LiveNode): ApplyResult {\n const id = nn(this._id);\n const parentKey = nn(child._parentKey);\n const reverse = child._toOps(id, parentKey);\n\n for (const [key, value] of this.#map) {\n if (value === child) {\n this.#map.delete(key);\n this.invalidate();\n }\n }\n\n child._detach();\n\n const storageUpdate: LiveMapUpdates<TKey, TValue> = {\n node: this,\n type: \"LiveMap\",\n updates: {\n [parentKey]: {\n type: \"delete\",\n deletedItem: liveNodeToLson(child),\n },\n },\n };\n\n return { modified: storageUpdate, reverse };\n }\n\n /** @internal */\n _serialize(): SerializedMap {\n if (this.parent.type !== \"HasParent\") {\n throw new Error(\"Cannot serialize LiveMap if parent is missing\");\n }\n\n return {\n type: CrdtType.MAP,\n parentId: nn(this.parent.node._id, \"Parent node expected to have ID\"),\n parentKey: this.parent.key,\n };\n }\n\n /**\n * Returns a specified element from the LiveMap.\n * @param key The key of the element to return.\n * @returns The element associated with the specified key, or undefined if the key can't be found in the LiveMap.\n */\n get(key: TKey): TValue | undefined {\n const value = this.#map.get(key);\n if (value === undefined) {\n return undefined;\n }\n return liveNodeToLson(value) as TValue | undefined;\n // ^^^^^^^^^^^^^^^^^^^^^\n // FIXME! This isn't safe.\n }\n\n /**\n * Adds or updates an element with a specified key and a value.\n * @param key The key of the element to add. Should be a string.\n * @param value The value of the element to add. Should be serializable to JSON.\n */\n set(key: TKey, value: TValue): void {\n this._pool?.assertStorageIsWritable();\n const oldValue = this.#map.get(key);\n\n if (oldValue) {\n oldValue._detach();\n }\n\n const item = lsonToLiveNode(value);\n item._setParentLink(this, key);\n\n this.#map.set(key, item);\n this.invalidate();\n\n if (this._pool && this._id) {\n const id = this._pool.generateId();\n item._attach(id, this._pool);\n\n const storageUpdates = new Map<string, LiveMapUpdates<TKey, TValue>>();\n storageUpdates.set(this._id, {\n node: this,\n type: \"LiveMap\",\n updates: { [key]: { type: \"update\" } },\n });\n\n const ops = item._toOpsWithOpId(this._id, key, this._pool);\n\n this.#unacknowledgedSet.set(key, nn(ops[0].opId));\n\n this._pool.dispatch(\n ops,\n oldValue\n ? oldValue._toOps(this._id, key)\n : [{ type: OpCode.DELETE_CRDT, id }],\n storageUpdates\n );\n }\n }\n\n /**\n * Returns the number of elements in the LiveMap.\n */\n get size(): number {\n return this.#map.size;\n }\n\n /**\n * Returns a boolean indicating whether an element with the specified key exists or not.\n * @param key The key of the element to test for presence.\n */\n has(key: TKey): boolean {\n return this.#map.has(key);\n }\n\n /**\n * Removes the specified element by key.\n * @param key The key of the element to remove.\n * @returns true if an element existed and has been removed, or false if the element does not exist.\n */\n delete(key: TKey): boolean {\n this._pool?.assertStorageIsWritable();\n const item = this.#map.get(key);\n\n if (item === undefined) {\n return false;\n }\n\n item._detach();\n this.#map.delete(key);\n this.invalidate();\n\n if (this._pool && item._id) {\n const thisId = nn(this._id);\n const storageUpdates = new Map<string, LiveMapUpdates<TKey, TValue>>();\n storageUpdates.set(thisId, {\n node: this,\n type: \"LiveMap\",\n updates: {\n [key]: {\n type: \"delete\",\n deletedItem: liveNodeToLson(item),\n },\n },\n });\n this._pool.dispatch(\n [\n {\n type: OpCode.DELETE_CRDT,\n id: item._id,\n opId: this._pool.generateOpId(),\n },\n ],\n item._toOps(thisId, key),\n storageUpdates\n );\n }\n\n return true;\n }\n\n /**\n * Returns a new Iterator object that contains the [key, value] pairs for each element.\n */\n entries(): IterableIterator<[TKey, TValue]> {\n const innerIterator = this.#map.entries();\n\n return {\n [Symbol.iterator]() {\n return this;\n },\n next() {\n const iteratorValue = innerIterator.next();\n\n if (iteratorValue.done) {\n return {\n done: true,\n value: undefined,\n };\n }\n\n const entry = iteratorValue.value;\n\n const key = entry[0];\n const value = liveNodeToLson(iteratorValue.value[1]) as TValue;\n // ^^^^^^^^^\n // FIXME! This isn't safe.\n return {\n value: [key, value],\n };\n },\n };\n }\n\n /**\n * Same function object as the initial value of the entries method.\n */\n [Symbol.iterator](): IterableIterator<[TKey, TValue]> {\n return this.entries();\n }\n\n /**\n * Returns a new Iterator object that contains the keys for each element.\n */\n keys(): IterableIterator<TKey> {\n return this.#map.keys();\n }\n\n /**\n * Returns a new Iterator object that contains the values for each element.\n */\n values(): IterableIterator<TValue> {\n const innerIterator = this.#map.values();\n\n return {\n [Symbol.iterator]() {\n return this;\n },\n next() {\n const iteratorValue = innerIterator.next();\n\n if (iteratorValue.done) {\n return {\n done: true,\n value: undefined,\n };\n }\n\n const value = liveNodeToLson(iteratorValue.value) as TValue;\n // ^^^^^^^^^\n // FIXME! This isn't safe.\n\n return { value };\n },\n };\n }\n\n /**\n * Executes a provided function once per each key/value pair in the Map object, in insertion order.\n * @param callback Function to execute for each entry in the map.\n */\n forEach(\n callback: (value: TValue, key: TKey, map: LiveMap<TKey, TValue>) => void\n ): void {\n for (const entry of this) {\n callback(entry[1], entry[0], this);\n }\n }\n\n /** @internal */\n _toTreeNode(key: string): DevTools.LsonTreeNode {\n return {\n type: \"LiveMap\",\n id: this._id ?? nanoid(),\n key,\n payload: Array.from(this.#map.entries()).map(([key, val]) =>\n val.toTreeNode(key)\n ),\n };\n }\n\n toImmutable(): ReadonlyMap<TKey, ToImmutable<TValue>> {\n // Don't implement actual toImmutable logic in here. Implement it in\n // ._toImmutable() instead. This helper merely exists to help TypeScript\n // infer better return types.\n return super.toImmutable() as ReadonlyMap<TKey, ToImmutable<TValue>>;\n }\n\n /** @internal */\n _toImmutable(): ReadonlyMap<TKey, ToImmutable<TValue>> {\n const result: Map<TKey, ToImmutable<TValue>> = new Map();\n for (const [key, value] of this.#map) {\n result.set(key, value.toImmutable() as ToImmutable<TValue>);\n }\n return freeze(result);\n }\n\n clone(): LiveMap<TKey, TValue> {\n return new LiveMap(\n Array.from(this.#map).map(([key, node]) => [key, node.clone() as TValue])\n );\n }\n}\n","import type { LiveNode, Lson, LsonObject } from \"../crdts/Lson\";\nimport { nn } from \"../lib/assert\";\nimport type { Json, JsonObject } from \"../lib/Json\";\nimport { nanoid } from \"../lib/nanoid\";\nimport type { RemoveUndefinedValues } from \"../lib/utils\";\nimport { compactObject, deepClone } from \"../lib/utils\";\nimport type {\n ClientWireOp,\n CreateObjectOp,\n CreateOp,\n DeleteObjectKeyOp,\n Op,\n UpdateObjectOp,\n} from \"../protocol/Op\";\nimport { OpCode } from \"../protocol/Op\";\nimport type {\n NodeStream,\n ObjectStorageNode,\n RootStorageNode,\n SerializedObject,\n SerializedRootObject,\n} from \"../protocol/StorageNode\";\nimport { CrdtType, isRootStorageNode } from \"../protocol/StorageNode\";\nimport type * as DevTools from \"../types/DevToolsTreeNode\";\nimport type { ParentToChildNodeMap } from \"../types/NodeMap\";\nimport type { ApplyResult, ManagedPool } from \"./AbstractCrdt\";\nimport { AbstractCrdt, OpSource } from \"./AbstractCrdt\";\nimport {\n creationOpToLson,\n deserializeToLson,\n isLiveNode,\n isLiveStructure,\n} from \"./liveblocks-helpers\";\nimport type { UpdateDelta } from \"./UpdateDelta\";\nimport type { ToImmutable } from \"./utils\";\n\nexport type LiveObjectUpdateDelta<O extends { [key: string]: unknown }> = {\n [K in keyof O]?: UpdateDelta | undefined;\n};\n\n// One key platform limit is that a LiveObject cannot exceed 128 kB when\n// totalling the size of the keys and values.\n// See https://liveblocks.io/docs/platform/limits#Liveblocks-Storage-limits\nconst MAX_LIVE_OBJECT_SIZE = 128 * 1024;\n\n/**\n * A LiveObject notification that is sent in-client to any subscribers whenever\n * one or more of the entries inside the LiveObject instance have changed.\n */\nexport type LiveObjectUpdates<TData extends LsonObject> = {\n type: \"LiveObject\";\n node: LiveObject<TData>;\n updates: LiveObjectUpdateDelta<TData>;\n};\n\n/**\n * The LiveObject class is similar to a JavaScript object that is synchronized on all clients.\n * Keys should be a string, and values should be serializable to JSON.\n * If multiple clients update the same property simultaneously, the last modification received by the Liveblocks servers is the winner.\n */\nexport class LiveObject<O extends LsonObject> extends AbstractCrdt {\n #map: Map<string, Lson>;\n\n /**\n * Tracks unacknowledged local changes per property to preserve optimistic\n * updates. Maps property keys to their pending operation IDs.\n *\n * INVARIANT: Only locally-generated opIds are ever stored here. Remote opIds\n * are only compared against (to detect ACKs), never stored.\n *\n * When a local change is made, the opId is stored here. When a remote op\n * arrives for the same key:\n * - If no entry exists → apply remote op\n * - If opId matches → it's an ACK, clear the entry\n * - If opId differs → ignore remote op to preserve optimistic update\n */\n #unackedOpsByKey: Map<string, string>;\n\n /**\n * Enable or disable detection of too large LiveObjects.\n * When enabled, throws an error if LiveObject static data exceeds 128KB, which\n * is the maximum value the server will be able to accept.\n * By default, this behavior is disabled to avoid the runtime performance\n * overhead on every LiveObject.set() or LiveObject.update() call.\n *\n * @experimental\n */\n public static detectLargeObjects = false;\n\n static #buildRootAndParentToChildren(\n nodes: NodeStream\n ): [root: SerializedRootObject, nodeMap: ParentToChildNodeMap] {\n const parentToChildren: ParentToChildNodeMap = new Map();\n let root: SerializedRootObject | null = null;\n\n for (const node of nodes) {\n if (isRootStorageNode(node)) {\n root = node[1];\n } else {\n const crdt = node[1];\n const children = parentToChildren.get(crdt.parentId);\n if (children !== undefined) {\n children.push(node);\n } else {\n parentToChildren.set(crdt.parentId, [node]);\n }\n }\n }\n\n if (root === null) {\n throw new Error(\"Root can't be null\");\n }\n\n return [root, parentToChildren];\n }\n\n /** @private Do not use this API directly */\n static _fromItems<O extends LsonObject>(\n nodes: NodeStream,\n pool: ManagedPool\n ): LiveObject<O> {\n const [root, parentToChildren] =\n LiveObject.#buildRootAndParentToChildren(nodes);\n return LiveObject._deserialize(\n [\"root\", root],\n parentToChildren,\n pool\n ) as LiveObject<O>;\n }\n\n constructor(obj: O = {} as O) {\n super();\n\n this.#unackedOpsByKey = new Map();\n\n const o: RemoveUndefinedValues<LsonObject> = compactObject(obj);\n for (const key of Object.keys(o)) {\n const value = o[key];\n if (isLiveNode(value)) {\n value._setParentLink(this, key);\n }\n }\n\n this.#map = new Map(Object.entries(o));\n }\n\n /** @internal */\n _toOps(parentId: string, parentKey: string): CreateOp[] {\n if (this._id === undefined) {\n throw new Error(\"Cannot serialize item is not attached\");\n }\n\n const ops: CreateOp[] = [];\n const op: CreateObjectOp = {\n type: OpCode.CREATE_OBJECT,\n id: this._id,\n parentId,\n parentKey,\n data: {},\n };\n\n ops.push(op);\n\n for (const [key, value] of this.#map) {\n if (isLiveNode(value)) {\n ops.push(...value._toOps(this._id, key));\n } else {\n op.data[key] = value;\n }\n }\n\n return ops;\n }\n\n /** @internal */\n static _deserialize(\n [id, item]: RootStorageNode | ObjectStorageNode,\n parentToChildren: ParentToChildNodeMap,\n pool: ManagedPool\n ): LiveObject<LsonObject> {\n const liveObj = new LiveObject(item.data);\n liveObj._attach(id, pool);\n return this._deserializeChildren(liveObj, parentToChildren, pool);\n }\n\n /** @internal */\n static _deserializeChildren(\n liveObj: LiveObject<JsonObject>,\n parentToChildren: ParentToChildNodeMap,\n pool: ManagedPool\n ): LiveObject<LsonObject> {\n const children = parentToChildren.get(nn(liveObj._id));\n if (children === undefined) {\n return liveObj;\n }\n\n for (const node of children) {\n const child = deserializeToLson(node, parentToChildren, pool);\n const crdt = node[1];\n if (isLiveStructure(child)) {\n child._setParentLink(liveObj, crdt.parentKey);\n }\n liveObj.#map.set(crdt.parentKey, child);\n liveObj.invalidate();\n }\n\n return liveObj;\n }\n\n /** @internal */\n _attach(id: string, pool: ManagedPool): void {\n super._attach(id, pool);\n\n for (const [_key, value] of this.#map) {\n if (isLiveNode(value)) {\n value._attach(pool.generateId(), pool);\n }\n }\n }\n\n /** @internal */\n _attachChild(op: CreateOp, source: OpSource): ApplyResult {\n if (this._pool === undefined) {\n throw new Error(\"Can't attach child if managed pool is not present\");\n }\n\n const { id, opId, parentKey: key } = op;\n const child = creationOpToLson(op);\n\n if (this._pool.getNode(id) !== undefined) {\n if (this.#unackedOpsByKey.get(key) === opId) {\n // Acknowlegment from local operation\n this.#unackedOpsByKey.delete(key);\n }\n\n return { modified: false };\n }\n\n if (source === OpSource.LOCAL) {\n // Track locally-generated opId to preserve optimistic update\n this.#unackedOpsByKey.set(key, nn(opId));\n } else if (this.#unackedOpsByKey.get(key) === undefined) {\n // Remote operation with no local change => apply operation\n } else if (this.#unackedOpsByKey.get(key) === opId) {\n // Acknowlegment from local operation\n this.#unackedOpsByKey.delete(key);\n return { modified: false };\n } else {\n // Conflict, ignore remote operation\n return { modified: false };\n }\n\n const thisId = nn(this._id);\n const previousValue = this.#map.get(key);\n let reverse: Op[];\n if (isLiveNode(previousValue)) {\n reverse = previousValue._toOps(thisId, key);\n previousValue._detach();\n } else if (previousValue === undefined) {\n reverse = [{ type: OpCode.DELETE_OBJECT_KEY, id: thisId, key }];\n } else {\n reverse = [\n {\n type: OpCode.UPDATE_OBJECT,\n id: thisId,\n data: { [key]: previousValue },\n },\n ];\n }\n\n this.#map.set(key, child);\n this.invalidate();\n\n if (isLiveStructure(child)) {\n child._setParentLink(this, key);\n child._attach(id, this._pool);\n }\n\n return {\n reverse,\n modified: {\n node: this,\n type: \"LiveObject\",\n updates: { [key]: { type: \"update\" } },\n },\n };\n }\n\n /** @internal */\n _detachChild(child: LiveNode): ApplyResult {\n if (child) {\n const id = nn(this._id);\n const parentKey = nn(child._parentKey);\n const reverse = child._toOps(id, parentKey);\n\n for (const [key, value] of this.#map) {\n if (value === child) {\n this.#map.delete(key);\n this.invalidate();\n }\n }\n\n child._detach();\n\n const storageUpdate: LiveObjectUpdates<O> = {\n node: this,\n type: \"LiveObject\",\n updates: {\n [parentKey]: { type: \"delete\" },\n } as { [K in keyof O]: UpdateDelta },\n };\n\n return { modified: storageUpdate, reverse };\n }\n\n return { modified: false };\n }\n\n /** @internal */\n _detach(): void {\n super._detach();\n\n for (const value of this.#map.values()) {\n if (isLiveNode(value)) {\n value._detach();\n }\n }\n }\n\n /** @internal */\n _apply(op: Op, isLocal: boolean): ApplyResult {\n if (op.type === OpCode.UPDATE_OBJECT) {\n return this.#applyUpdate(op, isLocal);\n } else if (op.type === OpCode.DELETE_OBJECT_KEY) {\n return this.#applyDeleteObjectKey(op, isLocal);\n }\n\n return super._apply(op, isLocal);\n }\n\n /** @internal */\n _serialize(): SerializedObject | SerializedRootObject {\n const data: JsonObject = {};\n\n // Add only the static Json data fields into the objects\n for (const [key, value] of this.#map) {\n if (!isLiveNode(value)) {\n data[key] = value;\n }\n }\n\n if (this.parent.type === \"HasParent\" && this.parent.node._id) {\n return {\n type: CrdtType.OBJECT,\n parentId: this.parent.node._id,\n parentKey: this.parent.key,\n data,\n };\n } else {\n // Root object has no parent ID/key\n return {\n type: CrdtType.OBJECT,\n data,\n };\n }\n }\n\n #applyUpdate(op: UpdateObjectOp, isLocal: boolean): ApplyResult {\n let isModified = false;\n const id = nn(this._id);\n const reverse: Op[] = [];\n const reverseUpdate: UpdateObjectOp = {\n type: OpCode.UPDATE_OBJECT,\n id,\n data: {},\n };\n\n for (const key in op.data as Partial<O>) {\n const oldValue = this.#map.get(key);\n if (isLiveNode(oldValue)) {\n reverse.push(...oldValue._toOps(id, key));\n oldValue._detach();\n } else if (oldValue !== undefined) {\n reverseUpdate.data[key] = oldValue;\n } else if (oldValue === undefined) {\n reverse.push({ type: OpCode.DELETE_OBJECT_KEY, id, key });\n }\n }\n\n const updateDelta: LiveObjectUpdateDelta<O> = {};\n for (const key in op.data as Partial<O>) {\n const value = op.data[key];\n if (value === undefined) {\n continue;\n }\n\n if (isLocal) {\n // Track locally-generated opId to preserve optimistic update\n this.#unackedOpsByKey.set(key, nn(op.opId));\n } else if (this.#unackedOpsByKey.get(key) === undefined) {\n // Not modified localy so we apply update\n isModified = true;\n } else if (this.#unackedOpsByKey.get(key) === op.opId) {\n // Acknowlegment from local operation\n this.#unackedOpsByKey.delete(key);\n continue;\n } else {\n // Conflict, ignore remote operation\n continue;\n }\n\n const oldValue = this.#map.get(key);\n\n if (isLiveNode(oldValue)) {\n oldValue._detach();\n }\n\n isModified = true;\n updateDelta[key] = { type: \"update\" };\n this.#map.set(key, value);\n this.invalidate();\n }\n\n if (Object.keys(reverseUpdate.data).length !== 0) {\n reverse.unshift(reverseUpdate);\n }\n\n return isModified\n ? {\n modified: {\n node: this,\n type: \"LiveObject\",\n updates: updateDelta,\n },\n reverse,\n }\n : { modified: false };\n }\n\n #applyDeleteObjectKey(op: DeleteObjectKeyOp, isLocal: boolean): ApplyResult {\n const key = op.key;\n\n // If property does not exist, exit without notifying\n const oldValue = this.#map.get(key);\n if (oldValue === undefined) {\n return { modified: false };\n }\n\n // If a local operation exists on the same key and we receive a remote\n // one prevent flickering by not applying delete op.\n if (!isLocal && this.#unackedOpsByKey.get(key) !== undefined) {\n return { modified: false };\n }\n\n const id = nn(this._id);\n let reverse: Op[] = [];\n if (isLiveNode(oldValue)) {\n reverse = oldValue._toOps(id, op.key);\n oldValue._detach();\n } else if (oldValue !== undefined) {\n reverse = [\n {\n type: OpCode.UPDATE_OBJECT,\n id,\n data: { [key]: oldValue },\n },\n ];\n }\n\n this.#map.delete(key);\n this.invalidate();\n return {\n modified: {\n node: this,\n type: \"LiveObject\",\n updates: {\n [op.key]: { type: \"delete\", deletedItem: oldValue satisfies Lson },\n },\n },\n reverse,\n };\n }\n\n /**\n * Transform the LiveObject into a javascript object\n */\n toObject(): O {\n return Object.fromEntries(this.#map) as O;\n }\n\n /**\n * Adds or updates a property with a specified key and a value.\n * @param key The key of the property to add\n * @param value The value of the property to add\n */\n set<TKey extends keyof O>(key: TKey, value: O[TKey]): void {\n // TODO: Find out why typescript complains\n this._pool?.assertStorageIsWritable();\n this.update({ [key]: value } as unknown as Partial<O>);\n }\n\n /**\n * Returns a specified property from the LiveObject.\n * @param key The key of the property to get\n */\n get<TKey extends keyof O>(key: TKey): O[TKey] {\n return this.#map.get(key as string) as O[TKey];\n }\n\n /**\n * Deletes a key from the LiveObject\n * @param key The key of the property to delete\n */\n delete(key: keyof O): void {\n this._pool?.assertStorageIsWritable();\n const keyAsString = key as string;\n\n const oldValue = this.#map.get(keyAsString);\n if (oldValue === undefined) {\n return;\n }\n\n if (this._pool === undefined || this._id === undefined) {\n if (isLiveNode(oldValue)) {\n oldValue._detach();\n }\n this.#map.delete(keyAsString);\n this.invalidate();\n return;\n }\n\n let reverse: Op[];\n\n if (isLiveNode(oldValue)) {\n oldValue._detach();\n reverse = oldValue._toOps(this._id, keyAsString);\n } else {\n reverse = [\n {\n type: OpCode.UPDATE_OBJECT,\n data: { [keyAsString]: oldValue },\n id: this._id,\n },\n ];\n }\n\n this.#map.delete(keyAsString);\n this.invalidate();\n\n const storageUpdates = new Map<string, LiveObjectUpdates<O>>();\n storageUpdates.set(this._id, {\n node: this,\n type: \"LiveObject\",\n updates: {\n [key]: { type: \"delete\", deletedItem: oldValue } satisfies UpdateDelta,\n } as {\n [K in keyof O]: UpdateDelta;\n },\n });\n\n this._pool.dispatch(\n [\n {\n type: OpCode.DELETE_OBJECT_KEY,\n key: keyAsString,\n id: this._id,\n opId: this._pool.generateOpId(),\n },\n ],\n reverse,\n storageUpdates\n );\n }\n\n /**\n * Adds or updates multiple properties at once with an object.\n * @param patch The object used to overrides properties\n */\n update(patch: Partial<O>): void {\n this._pool?.assertStorageIsWritable();\n\n // If detectLargeObjects is enabled, perform a runtime size check now so we\n // can immediately throw as soon as the max object size is exceeded.\n if (LiveObject.detectLargeObjects) {\n const data: Record<string, Json> = {};\n for (const [key, value] of this.#map) {\n if (!isLiveNode(value)) {\n data[key] = value;\n }\n }\n for (const key of Object.keys(patch)) {\n const value = patch[key];\n if (value === undefined) continue;\n if (!isLiveNode(value)) {\n data[key] = value;\n }\n }\n\n // Fast upper-bound check: multiply JSON string length by 4 (worst-case UTF-8)\n // This is much faster than TextEncoder and gives us an upper bound\n const jsonString = JSON.stringify(data);\n const upperBoundSize = jsonString.length * 4;\n\n // Only do the precise calculation if the fast check suggests we might be close\n if (upperBoundSize > MAX_LIVE_OBJECT_SIZE) {\n const preciseSize = new TextEncoder().encode(jsonString).length;\n if (preciseSize > MAX_LIVE_OBJECT_SIZE) {\n throw new Error(\n `LiveObject size exceeded limit: ${preciseSize} bytes > ${MAX_LIVE_OBJECT_SIZE} bytes. See https://liveblocks.io/docs/platform/limits#Liveblocks-Storage-limits`\n );\n }\n }\n }\n\n if (this._pool === undefined || this._id === undefined) {\n for (const key in patch) {\n const newValue = patch[key];\n if (newValue === undefined) {\n continue;\n }\n\n const oldValue = this.#map.get(key);\n if (isLiveNode(oldValue)) {\n oldValue._detach();\n }\n\n if (isLiveNode(newValue)) {\n newValue._setParentLink(this, key);\n }\n\n this.#map.set(key, newValue);\n this.invalidate();\n }\n\n return;\n }\n\n const ops: ClientWireOp[] = [];\n const reverseOps: Op[] = [];\n\n const opId = this._pool.generateOpId();\n const updatedProps: JsonObject = {};\n\n const reverseUpdateOp: UpdateObjectOp = {\n id: this._id,\n type: OpCode.UPDATE_OBJECT,\n data: {},\n };\n\n const updateDelta: LiveObjectUpdateDelta<O> = {};\n\n for (const key in patch) {\n const newValue: Lson | undefined = patch[key];\n if (newValue === undefined) {\n continue;\n }\n\n const oldValue = this.#map.get(key);\n\n if (isLiveNode(oldValue)) {\n reverseOps.push(...oldValue._toOps(this._id, key));\n oldValue._detach();\n } else if (oldValue === undefined) {\n reverseOps.push({ type: OpCode.DELETE_OBJECT_KEY, id: this._id, key });\n } else {\n reverseUpdateOp.data[key] = oldValue;\n }\n\n if (isLiveNode(newValue)) {\n newValue._setParentLink(this, key);\n newValue._attach(this._pool.generateId(), this._pool);\n const newAttachChildOps = newValue._toOpsWithOpId(\n this._id,\n key,\n this._pool\n );\n\n const createCrdtOp = newAttachChildOps.find(\n (op: Op & { parentId?: string }) => op.parentId === this._id\n );\n if (createCrdtOp) {\n // Track locally-generated opId to preserve optimistic update\n this.#unackedOpsByKey.set(key, nn(createCrdtOp.opId));\n }\n\n ops.push(...newAttachChildOps);\n } else {\n updatedProps[key] = newValue;\n // Track locally-generated opId to preserve optimistic update\n this.#unackedOpsByKey.set(key, opId);\n }\n\n this.#map.set(key, newValue);\n this.invalidate();\n updateDelta[key] = { type: \"update\" };\n }\n\n if (Object.keys(reverseUpdateOp.data).length !== 0) {\n reverseOps.unshift(reverseUpdateOp);\n }\n\n if (Object.keys(updatedProps).length !== 0) {\n ops.unshift({\n opId,\n id: this._id,\n type: OpCode.UPDATE_OBJECT,\n data: updatedProps,\n });\n }\n\n const storageUpdates = new Map<string, LiveObjectUpdates<O>>();\n storageUpdates.set(this._id, {\n node: this,\n type: \"LiveObject\",\n updates: updateDelta,\n });\n this._pool.dispatch(ops, reverseOps, storageUpdates);\n }\n\n toImmutable(): ToImmutable<O> {\n // Don't implement actual toImmutable logic in here. Implement it in\n // ._toImmutable() instead. This helper merely exists to help TypeScript\n // infer better return types.\n return super.toImmutable() as ToImmutable<O>;\n }\n\n /** @internal */\n toTreeNode(key: string): DevTools.LiveTreeNode<\"LiveObject\"> {\n // Don't implement actual toTreeNode logic in here. Implement it in\n // ._toTreeNode() instead. This helper merely exists to help TypeScript\n // infer better return types.\n return super.toTreeNode(key) as DevTools.LiveTreeNode<\"LiveObject\">;\n }\n\n /** @internal */\n _toTreeNode(key: string): DevTools.LsonTreeNode {\n const nodeId = this._id ?? nanoid();\n return {\n type: \"LiveObject\",\n id: nodeId,\n key,\n payload: Array.from(this.#map.entries()).map(([key, value]) =>\n isLiveNode(value)\n ? value.toTreeNode(key)\n : { type: \"Json\", id: `${nodeId}:${key}`, key, payload: value }\n ),\n };\n }\n\n /** @internal */\n _toImmutable(): ToImmutable<O> {\n const result: { [key: string]: unknown } = {};\n for (const [key, val] of this.#map) {\n result[key] = isLiveStructure(val) ? val.toImmutable() : val;\n }\n return (\n process.env.NODE_ENV === \"production\" ? result : Object.freeze(result)\n ) as ToImmutable<O>;\n }\n\n clone(): LiveObject<O> {\n return new LiveObject(\n Object.fromEntries(\n Array.from(this.#map).map(([key, value]) => [\n key,\n isLiveStructure(value) ? value.clone() : deepClone(value),\n ])\n ) as O\n );\n }\n}\n","import { assertNever, nn } from \"../lib/assert\";\nimport { isPlainObject } from \"../lib/guards\";\nimport type { Json } from \"../lib/Json\";\nimport { stringifyOrLog as stringify } from \"../lib/stringify\";\nimport { deepClone, entries } from \"../lib/utils\";\nimport type { CreateOp, Op } from \"../protocol/Op\";\nimport { OpCode } from \"../protocol/Op\";\nimport type { NodeMap, StorageNode } from \"../protocol/StorageNode\";\nimport {\n CrdtType,\n isListStorageNode,\n isMapStorageNode,\n isObjectStorageNode,\n isRegisterStorageNode,\n} from \"../protocol/StorageNode\";\nimport type { ParentToChildNodeMap } from \"../types/NodeMap\";\nimport type { ManagedPool } from \"./AbstractCrdt\";\nimport { LiveList, type LiveListUpdates } from \"./LiveList\";\nimport { LiveMap, type LiveMapUpdates } from \"./LiveMap\";\nimport { LiveObject, type LiveObjectUpdates } from \"./LiveObject\";\nimport { LiveRegister } from \"./LiveRegister\";\nimport type { LiveNode, LiveStructure, Lson, LsonObject } from \"./Lson\";\nimport type { StorageUpdate } from \"./StorageUpdates\";\n\nexport function creationOpToLiveNode(op: CreateOp): LiveNode {\n return lsonToLiveNode(creationOpToLson(op));\n}\n\nexport function creationOpToLson(op: CreateOp): Lson {\n switch (op.type) {\n case OpCode.CREATE_REGISTER:\n return op.data;\n case OpCode.CREATE_OBJECT:\n return new LiveObject(op.data);\n case OpCode.CREATE_MAP:\n return new LiveMap();\n case OpCode.CREATE_LIST:\n return new LiveList([]);\n default:\n return assertNever(op, \"Unknown creation Op\");\n }\n}\n\nexport function isSameNodeOrChildOf(node: LiveNode, parent: LiveNode): boolean {\n if (node === parent) {\n return true;\n }\n if (node.parent.type === \"HasParent\") {\n return isSameNodeOrChildOf(node.parent.node, parent);\n }\n return false;\n}\n\nexport function deserialize(\n node: StorageNode,\n parentToChildren: ParentToChildNodeMap,\n pool: ManagedPool\n): LiveNode {\n if (isObjectStorageNode(node)) {\n return LiveObject._deserialize(node, parentToChildren, pool);\n } else if (isListStorageNode(node)) {\n return LiveList._deserialize(node, parentToChildren, pool);\n } else if (isMapStorageNode(node)) {\n return LiveMap._deserialize(node, parentToChildren, pool);\n } else if (isRegisterStorageNode(node)) {\n return LiveRegister._deserialize(node, parentToChildren, pool);\n } else {\n throw new Error(\"Unexpected CRDT type\");\n }\n}\n\nexport function deserializeToLson(\n node: StorageNode,\n parentToChildren: ParentToChildNodeMap,\n pool: ManagedPool\n): Lson {\n if (isObjectStorageNode(node)) {\n return LiveObject._deserialize(node, parentToChildren, pool);\n } else if (isListStorageNode(node)) {\n return LiveList._deserialize(node, parentToChildren, pool);\n } else if (isMapStorageNode(node)) {\n return LiveMap._deserialize(node, parentToChildren, pool);\n } else if (isRegisterStorageNode(node)) {\n return node[1].data;\n } else {\n throw new Error(\"Unexpected CRDT type\");\n }\n}\n\nexport function isLiveStructure(value: unknown): value is LiveStructure {\n return isLiveList(value) || isLiveMap(value) || isLiveObject(value);\n}\n\nexport function isLiveNode(value: unknown): value is LiveNode {\n return isLiveStructure(value) || isLiveRegister(value);\n}\n\nexport function isLiveList(value: unknown): value is LiveList<Lson> {\n return value instanceof LiveList;\n}\n\nexport function isLiveMap(value: unknown): value is LiveMap<string, Lson> {\n return value instanceof LiveMap;\n}\n\nexport function isLiveObject(value: unknown): value is LiveObject<LsonObject> {\n return value instanceof LiveObject;\n}\n\nexport function isLiveRegister(value: unknown): value is LiveRegister<Json> {\n return value instanceof LiveRegister;\n}\n\nexport function cloneLson<L extends Lson | undefined>(value: L): L {\n return value === undefined\n ? (undefined as L)\n : isLiveStructure(value)\n ? (value.clone() as L)\n : (deepClone(value) as L);\n}\n\nexport function liveNodeToLson(obj: LiveNode): Lson {\n if (obj instanceof LiveRegister) {\n return obj.data;\n } else if (\n obj instanceof LiveList ||\n obj instanceof LiveMap ||\n obj instanceof LiveObject\n ) {\n return obj;\n } else {\n return assertNever(obj, \"Unknown AbstractCrdt\");\n }\n}\n\nexport function lsonToLiveNode(value: Lson): LiveNode {\n if (\n value instanceof LiveObject ||\n value instanceof LiveMap ||\n value instanceof LiveList\n ) {\n return value;\n } else {\n return new LiveRegister(value);\n }\n}\n\n/**\n * Computes the operations needed to transform one NodeMap into another.\n *\n * Used when the client receives a fresh storage snapshot from the server\n * (e.g. after reconnecting). The local state may have diverged, so we diff\n * the two trees and apply the resulting ops to bring local state in sync\n * with the server's authoritative version.\n *\n * Returns ops for:\n * - DELETE_CRDT: nodes in current but not in new\n * - CREATE_*: nodes in new but not in current\n * - UPDATE_OBJECT: objects whose data changed\n * - SET_PARENT_KEY: nodes whose position changed\n *\n * Example:\n * - Current: { \"root\": { a: 1 }, \"node1\": { b: 2 } }\n * - New: { \"root\": { a: 99 }, \"node2\": { c: 3 } }\n *\n * Returns:\n * - DELETE_CRDT for \"node1\" (removed)\n * - UPDATE_OBJECT for \"root\" (data changed: a: 1 → 99)\n * - CREATE_OBJECT for \"node2\" (added)\n */\nexport function getTreesDiffOperations(\n currentItems: NodeMap,\n newItems: NodeMap\n): Op[] {\n const ops: Op[] = [];\n\n currentItems.forEach((_, id) => {\n if (!newItems.get(id)) {\n // Delete crdt\n ops.push({ type: OpCode.DELETE_CRDT, id });\n }\n });\n\n newItems.forEach((crdt, id) => {\n const currentCrdt = currentItems.get(id);\n if (currentCrdt) {\n if (crdt.type === CrdtType.OBJECT) {\n if (\n currentCrdt.type !== CrdtType.OBJECT ||\n stringify(crdt.data) !== stringify(currentCrdt.data)\n ) {\n ops.push({\n type: OpCode.UPDATE_OBJECT,\n id,\n data: crdt.data,\n });\n }\n }\n if (crdt.parentKey !== currentCrdt.parentKey) {\n ops.push({\n type: OpCode.SET_PARENT_KEY,\n id,\n parentKey: nn(crdt.parentKey, \"Parent key must not be missing\"),\n });\n }\n } else {\n // new Crdt\n switch (crdt.type) {\n case CrdtType.REGISTER:\n ops.push({\n type: OpCode.CREATE_REGISTER,\n id,\n parentId: crdt.parentId,\n parentKey: crdt.parentKey,\n data: crdt.data,\n });\n break;\n case CrdtType.LIST:\n ops.push({\n type: OpCode.CREATE_LIST,\n id,\n parentId: crdt.parentId,\n parentKey: crdt.parentKey,\n });\n break;\n case CrdtType.OBJECT:\n if (crdt.parentId === undefined || crdt.parentKey === undefined) {\n throw new Error(\n \"Internal error. Cannot serialize storage root into an operation\"\n );\n }\n ops.push({\n type: OpCode.CREATE_OBJECT,\n id,\n parentId: crdt.parentId,\n parentKey: crdt.parentKey,\n data: crdt.data,\n });\n break;\n case CrdtType.MAP:\n ops.push({\n type: OpCode.CREATE_MAP,\n id,\n parentId: crdt.parentId,\n parentKey: crdt.parentKey,\n });\n break;\n }\n }\n });\n\n return ops;\n}\n\nfunction mergeObjectStorageUpdates<A extends LsonObject, B extends LsonObject>(\n first: LiveObjectUpdates<A>,\n second: LiveObjectUpdates<B>\n): LiveObjectUpdates<B> {\n const updates = first.updates as (typeof second)[\"updates\"];\n for (const [key, value] of entries(second.updates)) {\n updates[key] = value;\n }\n return {\n ...second,\n updates,\n };\n}\n\nfunction mergeMapStorageUpdates<K2 extends string, V2 extends Lson>(\n first: LiveMapUpdates<string, Lson>,\n second: LiveMapUpdates<K2, V2>\n): LiveMapUpdates<K2, V2> {\n const updates = first.updates;\n for (const [key, value] of entries(second.updates)) {\n updates[key] = value;\n }\n return {\n ...second,\n updates,\n };\n}\n\nfunction mergeListStorageUpdates<T extends Lson>(\n first: LiveListUpdates<Lson>,\n second: LiveListUpdates<T>\n): LiveListUpdates<T> {\n const updates = first.updates;\n return {\n ...second,\n updates: updates.concat(second.updates),\n };\n}\n\nexport function mergeStorageUpdates(\n first: StorageUpdate | undefined,\n second: StorageUpdate\n): StorageUpdate {\n if (first === undefined) {\n return second;\n }\n\n if (first.type === \"LiveObject\" && second.type === \"LiveObject\") {\n return mergeObjectStorageUpdates(first, second);\n } else if (first.type === \"LiveMap\" && second.type === \"LiveMap\") {\n return mergeMapStorageUpdates(first, second);\n } else if (first.type === \"LiveList\" && second.type === \"LiveList\") {\n return mergeListStorageUpdates(first, second);\n } else {\n /* Mismatching merge types. Throw an error here? */\n }\n\n return second;\n}\n\nfunction isPlain(\n value: unknown\n): value is\n | undefined\n | null\n | string\n | boolean\n | number\n | unknown[]\n | { [key: string]: unknown } {\n const type = typeof value;\n return (\n value === undefined ||\n value === null ||\n type === \"string\" ||\n type === \"boolean\" ||\n type === \"number\" ||\n Array.isArray(value) ||\n isPlainObject(value)\n );\n}\n\nexport function findNonSerializableValue(\n value: unknown,\n path: string = \"\"\n): { path: string; value: unknown } | false {\n if (!isPlain) {\n return {\n path: path || \"root\",\n value,\n };\n }\n\n if (typeof value !== \"object\" || value === null) {\n return false;\n }\n\n for (const [key, nestedValue] of Object.entries(value)) {\n const nestedPath = path ? path + \".\" + key : key;\n\n if (!isPlain(nestedValue)) {\n return {\n path: nestedPath,\n value: nestedValue,\n };\n }\n\n if (typeof nestedValue === \"object\") {\n const nonSerializableNestedValue = findNonSerializableValue(\n nestedValue,\n nestedPath\n );\n\n if (nonSerializableNestedValue) {\n return nonSerializableNestedValue;\n }\n }\n }\n\n return false;\n}\n","import { raise } from \"./utils\";\n\n/**\n * A Deque (= Double Ended Queue) is like a stack, but where elements can be\n * efficiently pushed or popped from either side.\n *\n * The following calls are equivalent with arrays (but insertions are O(n)\n * instead of O(n^2)):\n *\n * - deque.push(1) ⇔ array.push(1)\n * - deque.push([1, 2, 3]) ⇔ array.push(1, 2, 3)\n * - deque.push(many) ⇔ array.push(...many)\n * - deque.pop() ⇔ array.pop()\n *\n * - deque.pushLeft(1) ⇔ array.unshift(1)\n * - deque.pushLeft([1, 2, 3]) ⇔ array.unshift(1, 2, 3)\n * - deque.pushLeft(many) ⇔ array.unshift(...many)\n * - deque.popLeft() ⇔ array.shift()\n *\n */\nexport class Deque<T> {\n readonly #data: Record<number, T>;\n #front: number;\n #back: number;\n #size: number;\n\n constructor() {\n this.#data = {};\n this.#front = 0; // Inclusive\n this.#back = 1; // Exclusive\n this.#size = 0;\n }\n\n get length(): number {\n return this.#size;\n }\n\n *[Symbol.iterator](): IterableIterator<T> {\n const size = this.#size;\n const front = this.#front;\n for (let i = 0; i < size; i++) {\n yield this.#data[front + i];\n }\n }\n\n push(value: T | readonly T[]): void {\n const values: readonly T[] = Array.isArray(value)\n ? value\n : ([value] as readonly T[]);\n if (this.#back > Number.MAX_SAFE_INTEGER - values.length - 1)\n raise(\"Deque full\");\n for (const value of values) {\n this.#data[this.#back++ - 1] = value;\n }\n this.#size += values.length;\n }\n\n pop(): T | undefined {\n if (this.#size < 1) return undefined;\n\n this.#back--;\n const value = this.#data[this.#back - 1];\n delete this.#data[this.#back - 1];\n this.#size--;\n return value;\n }\n\n pushLeft(value: T | readonly T[]): void {\n const values: readonly T[] = Array.isArray(value)\n ? value\n : ([value] as readonly T[]);\n if (this.#front < Number.MIN_SAFE_INTEGER + values.length)\n raise(\"Deque full\");\n for (let i = values.length - 1; i >= 0; i--) {\n this.#data[--this.#front] = values[i];\n }\n this.#size += values.length;\n }\n\n popLeft(): T | undefined {\n if (this.#size < 1) return undefined;\n\n const value = this.#data[this.#front];\n delete this.#data[this.#front];\n this.#front++;\n this.#size--;\n return value;\n }\n}\n","/**\n * Represents an indefinitely deep arbitrary JSON data structure. There are\n * four types that make up the Json family:\n *\n * - Json any legal JSON value\n * - JsonScalar any legal JSON leaf value (no lists or objects)\n * - JsonArray a JSON value whose outer type is an array\n * - JsonObject a JSON value whose outer type is an object\n *\n */\nexport type Json = JsonScalar | JsonArray | JsonObject;\nexport type JsonScalar = string | number | boolean | null;\nexport type JsonArray = Json[];\n/**\n * Any valid JSON object.\n */\nexport type JsonObject = { [key: string]: Json | undefined };\n\nexport function isJsonScalar(data: Json): data is JsonScalar {\n return (\n data === null ||\n typeof data === \"string\" ||\n typeof data === \"number\" ||\n typeof data === \"boolean\"\n );\n}\n\nexport function isJsonArray(data: Json): data is JsonArray {\n return Array.isArray(data);\n}\n\nexport function isJsonObject(data: Json): data is JsonObject {\n return !isJsonScalar(data) && !isJsonArray(data);\n}\n","import type { Json, JsonObject } from \"../lib/Json\";\nimport type { ClientWireOp } from \"./Op\";\n\nexport type ClientMsgCode = (typeof ClientMsgCode)[keyof typeof ClientMsgCode];\nexport const ClientMsgCode = Object.freeze({\n // For Presence\n UPDATE_PRESENCE: 100,\n BROADCAST_EVENT: 103,\n\n // For Storage\n FETCH_STORAGE: 200,\n UPDATE_STORAGE: 201,\n\n // For Yjs support\n FETCH_YDOC: 300,\n UPDATE_YDOC: 301,\n});\n\nexport namespace ClientMsgCode {\n export type UPDATE_PRESENCE = typeof ClientMsgCode.UPDATE_PRESENCE;\n export type BROADCAST_EVENT = typeof ClientMsgCode.BROADCAST_EVENT;\n export type FETCH_STORAGE = typeof ClientMsgCode.FETCH_STORAGE;\n export type UPDATE_STORAGE = typeof ClientMsgCode.UPDATE_STORAGE;\n export type FETCH_YDOC = typeof ClientMsgCode.FETCH_YDOC;\n export type UPDATE_YDOC = typeof ClientMsgCode.UPDATE_YDOC;\n}\n\n/**\n * Messages that can be sent from the client to the server.\n */\nexport type ClientMsg<P extends JsonObject, E extends Json> =\n // For Presence\n | BroadcastEventClientMsg<E>\n | UpdatePresenceClientMsg<P>\n\n // For Storage\n | UpdateStorageClientMsg\n | FetchStorageClientMsg\n\n // For Yjs support\n | FetchYDocClientMsg\n | UpdateYDocClientMsg;\n\nexport type BroadcastEventClientMsg<E extends Json> = {\n type: ClientMsgCode.BROADCAST_EVENT;\n event: E;\n};\n\nexport type UpdatePresenceClientMsg<P extends JsonObject> =\n //\n // Full Presence™ message\n //\n | {\n readonly type: ClientMsgCode.UPDATE_PRESENCE;\n /**\n * Set this to any number to signify that this is a Full Presence™\n * update, not a patch.\n *\n * The numeric value itself no longer has specific meaning. Historically,\n * this field was intended so that clients could ignore these broadcasted\n * full presence messages, but it turned out that getting a full presence\n * \"keyframe\" from time to time was useful.\n *\n * So nowadays, the presence (pun intended) of this `targetActor` field\n * is a backward-compatible way of expressing that the `data` contains\n * all presence fields, and isn't a partial \"patch\".\n */\n readonly targetActor: number;\n readonly data: P;\n }\n\n //\n // Partial Presence™ message\n //\n | {\n readonly type: ClientMsgCode.UPDATE_PRESENCE;\n /**\n * Absence of the `targetActor` field signifies that this is a Partial\n * Presence™ \"patch\".\n */\n readonly targetActor?: undefined;\n readonly data: Partial<P>;\n };\n\nexport type UpdateStorageClientMsg = {\n readonly type: ClientMsgCode.UPDATE_STORAGE;\n readonly ops: ClientWireOp[];\n};\n\nexport type FetchStorageClientMsg = {\n readonly type: ClientMsgCode.FETCH_STORAGE;\n};\n\nexport type FetchYDocClientMsg = {\n readonly type: ClientMsgCode.FETCH_YDOC;\n readonly vector: string; // base64 encoded stateVector a from yjs doc\n readonly guid?: string; // an optional guid to identify a subdoc\n readonly v2?: boolean; // if it's a v2 update\n};\n\nexport type UpdateYDocClientMsg = {\n readonly type: ClientMsgCode.UPDATE_YDOC;\n readonly update: string; // base64 encoded update from a yjs doc\n readonly guid?: string; // an optional guid to identify a subdoc\n readonly v2?: boolean; // if it's a v2 update\n};\n","import { freeze } from \"../lib/freeze\";\nimport type { JsonObject } from \"../lib/Json\";\nimport { DerivedSignal, merge, MutableSignal } from \"../lib/signals\";\nimport { compact, compactObject } from \"../lib/utils\";\nimport { canComment, canWriteStorage } from \"../protocol/AuthToken\";\nimport type { BaseUserMeta } from \"../protocol/BaseUserMeta\";\nimport type { User } from \"../types/User\";\n\ntype Connection<U extends BaseUserMeta> = {\n readonly connectionId: number;\n readonly scopes: string[];\n readonly id: U[\"id\"];\n readonly info: U[\"info\"];\n};\n\nfunction makeUser<P extends JsonObject, U extends BaseUserMeta>(\n conn: Connection<U>,\n presence: P\n): User<P, U> {\n const { connectionId, id, info } = conn;\n const canWrite = canWriteStorage(conn.scopes);\n return freeze(\n compactObject({\n connectionId,\n id,\n info,\n canWrite,\n canComment: canComment(conn.scopes),\n isReadOnly: !canWrite, // Deprecated, kept for backward-compatibility\n presence,\n })\n );\n}\n\nexport class ManagedOthers<P extends JsonObject, U extends BaseUserMeta> {\n // Track mutable state internally, but signal to the outside when the\n // observable derived state changes only\n readonly #internal: MutableSignal<{\n connections: Map</* connectionId */ number, Connection<U>>;\n presences: Map</* connectionId */ number, P>;\n }>;\n readonly #userCache: Map</* connectionId */ number, User<P, U>>;\n\n // The \"clean\" signal that is exposed to the outside world\n public readonly signal: DerivedSignal<readonly User<P, U>[]>;\n\n constructor() {\n this.#internal = new MutableSignal({\n connections: new Map</* connectionId */ number, Connection<U>>(),\n presences: new Map</* connectionId */ number, P>(),\n });\n\n this.signal = DerivedSignal.from(\n this.#internal,\n (_ignore): readonly User<P, U>[] =>\n compact(\n Array.from(this.#internal.get().presences.keys()).map(\n (connectionId) => this.getUser(Number(connectionId))\n )\n )\n );\n\n // Others\n this.#userCache = new Map();\n }\n\n // Shorthand for .signal.get()\n get(): readonly User<P, U>[] {\n return this.signal.get();\n }\n\n public connectionIds(): IterableIterator<number> {\n return this.#internal.get().connections.keys();\n }\n\n clearOthers(): void {\n this.#internal.mutate((state) => {\n state.connections.clear();\n state.presences.clear();\n this.#userCache.clear();\n });\n }\n\n #_getUser(connectionId: number): User<P, U> | undefined {\n const state = this.#internal.get();\n const conn = state.connections.get(connectionId);\n const presence = state.presences.get(connectionId);\n if (conn !== undefined && presence !== undefined) {\n return makeUser(conn, presence);\n }\n return undefined;\n }\n\n getUser(connectionId: number): User<P, U> | undefined {\n const cachedUser = this.#userCache.get(connectionId);\n if (cachedUser) {\n return cachedUser;\n }\n\n const computedUser = this.#_getUser(connectionId);\n if (computedUser) {\n this.#userCache.set(connectionId, computedUser);\n return computedUser;\n }\n\n return undefined;\n }\n\n #invalidateUser(connectionId: number): void {\n this.#userCache.delete(connectionId);\n }\n\n /**\n * Records a known connection. This records the connection ID and the\n * associated metadata.\n */\n setConnection(\n connectionId: number,\n metaUserId: U[\"id\"],\n metaUserInfo: U[\"info\"],\n scopes: string[]\n ): void {\n this.#internal.mutate((state) => {\n state.connections.set(\n connectionId,\n freeze({\n connectionId,\n id: metaUserId,\n info: metaUserInfo,\n scopes,\n })\n );\n if (!state.presences.has(connectionId)) {\n return false;\n }\n return this.#invalidateUser(connectionId);\n });\n }\n\n /**\n * Removes a known connectionId. Removes both the connection's metadata and\n * the presence information.\n */\n removeConnection(connectionId: number): void {\n this.#internal.mutate((state) => {\n state.connections.delete(connectionId);\n state.presences.delete(connectionId);\n this.#invalidateUser(connectionId);\n });\n }\n\n /**\n * Stores a new user from a full presence update. If the user already exists,\n * its known presence data is overwritten.\n */\n setOther(connectionId: number, presence: P): void {\n this.#internal.mutate((state) => {\n state.presences.set(connectionId, freeze(compactObject(presence)));\n if (!state.connections.has(connectionId)) {\n return false;\n }\n return this.#invalidateUser(connectionId);\n });\n }\n\n /**\n * Patches the presence data for an existing \"other\". If we don't know the\n * initial presence data for this user yet, discard this patch and await the\n * full .setOther() call first.\n */\n patchOther(connectionId: number, patch: Partial<P>): void {\n this.#internal.mutate((state) => {\n const oldPresence = state.presences.get(connectionId);\n if (oldPresence === undefined) {\n return false;\n }\n\n const newPresence = merge(oldPresence, patch);\n if (oldPresence === newPresence) {\n return false;\n }\n\n state.presences.set(connectionId, freeze(newPresence));\n return this.#invalidateUser(connectionId);\n });\n }\n}\n","import { assertNever } from \"../lib/assert\";\nimport type { Relax } from \"../lib/Relax\";\nimport type { BaseMetadata, CommentBody } from \"../protocol/Comments\";\nimport type { Patchable } from \"./Patchable\";\n\n// All possible error originating from using Presence, Storage, or Yjs\n\ntype AiConnectionErrorContext = {\n type: \"AI_CONNECTION_ERROR\";\n code: -1 | 4001 | (number & {}); // eslint-disable-line @typescript-eslint/ban-types\n};\n\ntype RoomConnectionErrorContext = {\n type: \"ROOM_CONNECTION_ERROR\";\n code: -1 | 4001 | 4005 | 4006 | (number & {}); // eslint-disable-line @typescript-eslint/ban-types\n roomId: string;\n};\n\ntype LargeMessageErrorContext = {\n type: \"LARGE_MESSAGE_ERROR\";\n};\n\n// All possible errors originating from using Comments or Notifications\ntype CommentsOrNotificationsErrorContext =\n | {\n type: \"CREATE_THREAD_ERROR\";\n roomId: string;\n threadId: string;\n commentId: string;\n body: CommentBody;\n metadata: BaseMetadata;\n commentMetadata: BaseMetadata;\n }\n | {\n type: \"DELETE_THREAD_ERROR\";\n roomId: string;\n threadId: string;\n }\n | {\n type: \"EDIT_THREAD_METADATA_ERROR\";\n roomId: string;\n threadId: string;\n metadata: Patchable<BaseMetadata>;\n }\n | {\n type: \"EDIT_COMMENT_METADATA_ERROR\";\n roomId: string;\n threadId: string;\n commentId: string;\n metadata: Patchable<BaseMetadata>;\n }\n | {\n type:\n | \"MARK_THREAD_AS_RESOLVED_ERROR\"\n | \"MARK_THREAD_AS_UNRESOLVED_ERROR\"\n | \"SUBSCRIBE_TO_THREAD_ERROR\"\n | \"UNSUBSCRIBE_FROM_THREAD_ERROR\";\n roomId: string;\n threadId: string;\n }\n | {\n type: \"CREATE_COMMENT_ERROR\" | \"EDIT_COMMENT_ERROR\";\n roomId: string;\n threadId: string;\n commentId: string;\n body: CommentBody;\n metadata: BaseMetadata;\n }\n | {\n type: \"DELETE_COMMENT_ERROR\";\n roomId: string;\n threadId: string;\n commentId: string;\n }\n | {\n type: \"ADD_REACTION_ERROR\" | \"REMOVE_REACTION_ERROR\";\n roomId: string;\n threadId: string;\n commentId: string;\n emoji: string;\n }\n | {\n type: \"MARK_INBOX_NOTIFICATION_AS_READ_ERROR\";\n inboxNotificationId: string;\n roomId?: string;\n }\n | {\n type: \"DELETE_INBOX_NOTIFICATION_ERROR\";\n inboxNotificationId: string;\n }\n | {\n type:\n | \"MARK_ALL_INBOX_NOTIFICATIONS_AS_READ_ERROR\"\n | \"DELETE_ALL_INBOX_NOTIFICATIONS_ERROR\";\n }\n | {\n type: \"UPDATE_ROOM_SUBSCRIPTION_SETTINGS_ERROR\";\n roomId: string;\n }\n | {\n type: \"UPDATE_NOTIFICATION_SETTINGS_ERROR\";\n };\n\nexport type LiveblocksErrorContext = Relax<\n | RoomConnectionErrorContext // from Presence, Storage, or Yjs\n | CommentsOrNotificationsErrorContext // from Comments or Notifications or UserNotificationSettings\n | AiConnectionErrorContext // from AI\n | LargeMessageErrorContext // whena message is too large\n>;\n\nexport class LiveblocksError extends Error {\n public readonly context: LiveblocksErrorContext;\n\n constructor(message: string, context: LiveblocksErrorContext, cause?: Error) {\n super(message, { cause });\n this.context = context;\n this.name = \"LiveblocksError\";\n }\n\n /** Convenience accessor for error.context.roomId (if available) */\n get roomId(): LiveblocksErrorContext[\"roomId\"] {\n return this.context.roomId;\n }\n\n /** @internal Use `context.code` instead, to enable type narrowing */\n get code(): LiveblocksErrorContext[\"code\"] {\n return this.context.code;\n }\n\n /**\n * Creates a LiveblocksError from a generic error, by attaching Liveblocks\n * contextual information like room ID, thread ID, etc.\n */\n static from(context: LiveblocksErrorContext, cause?: Error): LiveblocksError {\n return new LiveblocksError(\n defaultMessageFromContext(context),\n context,\n cause\n );\n }\n}\n\n/**\n * Return a default, human-friendly error message for each possible error.\n */\nfunction defaultMessageFromContext(context: LiveblocksErrorContext): string {\n // prettier-ignore\n switch (context.type) {\n case \"ROOM_CONNECTION_ERROR\": {\n switch (context.code) {\n case 4001: return \"Not allowed to connect to the room\";\n case 4005: return \"Room is already full\";\n case 4006: return \"Kicked out of the room, because the room ID changed\";\n default: return \"Could not connect to the room\";\n }\n }\n\n case \"AI_CONNECTION_ERROR\": {\n switch (context.code) {\n case 4001: return \"Not allowed to connect to ai\";\n default: return \"Could not connect to the room\";\n }\n }\n\n case \"CREATE_THREAD_ERROR\": return \"Could not create new thread\";\n case \"DELETE_THREAD_ERROR\": return \"Could not delete thread\";\n case \"EDIT_THREAD_METADATA_ERROR\": return \"Could not edit thread metadata\";\n case \"EDIT_COMMENT_METADATA_ERROR\": return \"Could not edit comment metadata\";\n case \"MARK_THREAD_AS_RESOLVED_ERROR\": return \"Could not mark thread as resolved\";\n case \"MARK_THREAD_AS_UNRESOLVED_ERROR\": return \"Could not mark thread as unresolved\";\n case \"SUBSCRIBE_TO_THREAD_ERROR\": return \"Could not subscribe to thread\";\n case \"UNSUBSCRIBE_FROM_THREAD_ERROR\": return \"Could not unsubscribe from thread\";\n case \"CREATE_COMMENT_ERROR\": return \"Could not create new comment\";\n case \"EDIT_COMMENT_ERROR\": return \"Could not edit comment\";\n case \"DELETE_COMMENT_ERROR\": return \"Could not delete comment\";\n case \"ADD_REACTION_ERROR\": return \"Could not add reaction\";\n case \"REMOVE_REACTION_ERROR\": return \"Could not remove reaction\";\n case \"MARK_INBOX_NOTIFICATION_AS_READ_ERROR\": return \"Could not mark inbox notification as read\";\n case \"DELETE_INBOX_NOTIFICATION_ERROR\": return \"Could not delete inbox notification\";\n case \"MARK_ALL_INBOX_NOTIFICATIONS_AS_READ_ERROR\": return \"Could not mark all inbox notifications as read\";\n case \"DELETE_ALL_INBOX_NOTIFICATIONS_ERROR\": return \"Could not delete all inbox notifications\";\n case \"UPDATE_ROOM_SUBSCRIPTION_SETTINGS_ERROR\": return \"Could not update room subscription settings\";\n case \"UPDATE_NOTIFICATION_SETTINGS_ERROR\": return \"Could not update notification settings\";\n case \"LARGE_MESSAGE_ERROR\": return \"Could not send large message\";\n\n default:\n return assertNever(context, \"Unhandled case\");\n }\n}\n","import { getBearerTokenFromAuthValue, type RoomHttpApi } from \"./api-client\";\nimport type { AuthManager, AuthValue } from \"./auth-manager\";\nimport { injectBrandBadge } from \"./brand\";\nimport type { InternalSyncStatus } from \"./client\";\nimport type { Delegates, LostConnectionEvent, Status } from \"./connection\";\nimport { ManagedSocket, StopRetrying } from \"./connection\";\nimport type { ApplyResult, ManagedPool } from \"./crdts/AbstractCrdt\";\nimport { createManagedPool, OpSource } from \"./crdts/AbstractCrdt\";\nimport {\n cloneLson,\n getTreesDiffOperations,\n isLiveList,\n isLiveNode,\n isSameNodeOrChildOf,\n mergeStorageUpdates,\n} from \"./crdts/liveblocks-helpers\";\nimport { LiveObject } from \"./crdts/LiveObject\";\nimport type { LiveStructure, LsonObject } from \"./crdts/Lson\";\nimport type { StorageCallback, StorageUpdate } from \"./crdts/StorageUpdates\";\nimport type { DCM, DE, DP, DS, DTM, DU } from \"./globals/augmentation\";\nimport { kInternal } from \"./internal\";\nimport { assertNever, nn } from \"./lib/assert\";\nimport type { BatchStore } from \"./lib/batch\";\nimport { Promise_withResolvers } from \"./lib/controlledPromise\";\nimport { createCommentAttachmentId } from \"./lib/createIds\";\nimport { Deque } from \"./lib/Deque\";\nimport type { Callback, EventSource, Observable } from \"./lib/EventSource\";\nimport { makeEventSource } from \"./lib/EventSource\";\nimport * as console from \"./lib/fancy-console\";\nimport type { Json, JsonObject } from \"./lib/Json\";\nimport { isJsonArray, isJsonObject } from \"./lib/Json\";\nimport { asPos } from \"./lib/position\";\nimport { DerivedSignal, PatchableSignal, Signal } from \"./lib/signals\";\nimport { stringifyOrLog as stringify } from \"./lib/stringify\";\nimport {\n compact,\n deepClone,\n memoizeOnSuccess,\n partition,\n raise,\n tryParseJson,\n} from \"./lib/utils\";\nimport type {\n ContextualPromptContext,\n ContextualPromptResponse,\n} from \"./protocol/Ai\";\nimport type { Permission } from \"./protocol/AuthToken\";\nimport { canComment, canWriteStorage } from \"./protocol/AuthToken\";\nimport type { BaseUserMeta, IUserInfo } from \"./protocol/BaseUserMeta\";\nimport type {\n ClientMsg,\n UpdateStorageClientMsg,\n UpdateYDocClientMsg,\n} from \"./protocol/ClientMsg\";\nimport { ClientMsgCode } from \"./protocol/ClientMsg\";\nimport type {\n BaseMetadata,\n CommentAttachment,\n CommentBody,\n CommentData,\n CommentLocalAttachment,\n CommentUserReaction,\n QueryMetadata,\n ThreadData,\n ThreadDeleteInfo,\n} from \"./protocol/Comments\";\nimport type {\n InboxNotificationData,\n InboxNotificationDeleteInfo,\n} from \"./protocol/InboxNotifications\";\nimport type { MentionData } from \"./protocol/MentionData\";\nimport type { ClientWireOp, Op, ServerWireOp } from \"./protocol/Op\";\nimport { isIgnoredOp, OpCode } from \"./protocol/Op\";\nimport type { RoomSubscriptionSettings } from \"./protocol/RoomSubscriptionSettings\";\nimport type {\n CommentsEventServerMsg,\n RoomStateServerMsg,\n ServerMsg,\n UpdatePresenceServerMsg,\n UserJoinServerMsg,\n UserLeftServerMsg,\n YDocUpdateServerMsg,\n} from \"./protocol/ServerMsg\";\nimport { ServerMsgCode } from \"./protocol/ServerMsg\";\nimport type {\n NodeMap,\n NodeStream,\n SerializedCrdt,\n} from \"./protocol/StorageNode\";\nimport { compactNodesToNodeStream } from \"./protocol/StorageNode\";\nimport type {\n SubscriptionData,\n SubscriptionDeleteInfo,\n} from \"./protocol/Subscriptions\";\nimport type { HistoryVersion } from \"./protocol/VersionHistory\";\nimport { ManagedOthers } from \"./refs/ManagedOthers\";\nimport type * as DevTools from \"./types/DevToolsTreeNode\";\nimport type {\n IWebSocket,\n IWebSocketCloseEvent,\n IWebSocketInstance,\n IWebSocketMessageEvent,\n} from \"./types/IWebSocket\";\nimport { LiveblocksError } from \"./types/LiveblocksError\";\nimport type {\n BadgeLocation,\n InternalOthersEvent,\n OthersEvent,\n TextEditorType,\n} from \"./types/Others\";\nimport type { Patchable } from \"./types/Patchable\";\nimport type { User } from \"./types/User\";\nimport { PKG_VERSION } from \"./version\";\n\nexport type TimeoutID = ReturnType<typeof setTimeout>;\n\n//\n// NOTE:\n// This type looks an awful lot like InternalOthersEvent, but don't change this\n// type definition or DRY this up!\n// The type LegacyOthersEvent is used in the signature of some public APIs, and\n// as such should remain backward compatible.\n//\ntype LegacyOthersEvent<P extends JsonObject, U extends BaseUserMeta> =\n | { type: \"leave\"; user: User<P, U> }\n | { type: \"enter\"; user: User<P, U> }\n | {\n type: \"update\";\n user: User<P, U>;\n updates: Partial<P>;\n }\n | { type: \"reset\" };\n\ntype LegacyOthersEventCallback<P extends JsonObject, U extends BaseUserMeta> = (\n others: readonly User<P, U>[],\n event: LegacyOthersEvent<P, U>\n) => void;\n\nexport type RoomEventMessage<\n P extends JsonObject,\n U extends BaseUserMeta,\n E extends Json,\n> = {\n /**\n * The connection ID of the client that sent the event.\n * If this message was broadcast from the server (via the REST API), then\n * this value will be -1.\n */\n connectionId: number;\n /**\n * The User (from the others list) that sent the event.\n * If this message was broadcast from the server (via the REST API), then\n * this value will be null.\n */\n user: User<P, U> | null;\n event: E;\n};\n\nexport type SyncStatus =\n /* Liveblocks is in the process of writing changes */\n | \"synchronizing\"\n /* Liveblocks has persisted all pending changes */\n | \"synchronized\";\n\nexport type StorageStatus =\n /* The storage is not loaded and has not been requested. */\n | \"not-loaded\"\n /* The storage is loading from Liveblocks servers */\n | \"loading\"\n /* Some storage modifications has not been acknowledged yet by the server */\n | \"synchronizing\"\n /* The storage is sync with Liveblocks servers */\n | \"synchronized\";\n\ntype RoomEventCallbackMap<\n P extends JsonObject,\n U extends BaseUserMeta,\n E extends Json,\n> = {\n status: Callback<Status>; // New/recommended API\n \"lost-connection\": Callback<LostConnectionEvent>;\n event: Callback<RoomEventMessage<P, U, E>>;\n \"my-presence\": Callback<P>;\n //\n // NOTE: LegacyOthersEventCallback is the only one not taking a Callback<T>\n // shape, since this API historically has taken _two_ callback arguments\n // instead of just one.\n others: LegacyOthersEventCallback<P, U>;\n error: Callback<Error>;\n history: Callback<HistoryEvent>;\n \"storage-status\": Callback<StorageStatus>;\n comments: Callback<CommentsEventServerMsg>;\n};\n\nexport interface History {\n /**\n * Undoes the last operation executed by the current client.\n * It does not impact operations made by other clients.\n *\n * @example\n * room.updatePresence({ selectedId: \"xx\" }, { addToHistory: true });\n * room.updatePresence({ selectedId: \"yy\" }, { addToHistory: true });\n * room.history.undo();\n * // room.getPresence() equals { selectedId: \"xx\" }\n */\n undo: () => void;\n\n /**\n * Redoes the last operation executed by the current client.\n * It does not impact operations made by other clients.\n *\n * @example\n * room.updatePresence({ selectedId: \"xx\" }, { addToHistory: true });\n * room.updatePresence({ selectedId: \"yy\" }, { addToHistory: true });\n * room.history.undo();\n * // room.getPresence() equals { selectedId: \"xx\" }\n * room.history.redo();\n * // room.getPresence() equals { selectedId: \"yy\" }\n */\n redo: () => void;\n\n /**\n * Returns whether there are any operations to undo.\n *\n * @example\n * room.updatePresence({ selectedId: \"xx\" }, { addToHistory: true });\n * // room.history.canUndo() is true\n * room.history.undo();\n * // room.history.canUndo() is false\n */\n canUndo: () => boolean;\n\n /**\n * Returns whether there are any operations to redo.\n *\n * @example\n * room.updatePresence({ selectedId: \"xx\" }, { addToHistory: true });\n * room.history.undo();\n * // room.history.canRedo() is true\n * room.history.redo();\n * // room.history.canRedo() is false\n */\n canRedo: () => boolean;\n\n /**\n * Clears the undo and redo stacks. This operation cannot be undone ;)\n */\n clear: () => void;\n\n /**\n * All future modifications made on the Room will be merged together to create a single history item until resume is called.\n *\n * @example\n * room.updatePresence({ cursor: { x: 0, y: 0 } }, { addToHistory: true });\n * room.history.pause();\n * room.updatePresence({ cursor: { x: 1, y: 1 } }, { addToHistory: true });\n * room.updatePresence({ cursor: { x: 2, y: 2 } }, { addToHistory: true });\n * room.history.resume();\n * room.history.undo();\n * // room.getPresence() equals { cursor: { x: 0, y: 0 } }\n */\n pause: () => void;\n\n /**\n * Resumes history. Modifications made on the Room are not merged into a single history item anymore.\n *\n * @example\n * room.updatePresence({ cursor: { x: 0, y: 0 } }, { addToHistory: true });\n * room.history.pause();\n * room.updatePresence({ cursor: { x: 1, y: 1 } }, { addToHistory: true });\n * room.updatePresence({ cursor: { x: 2, y: 2 } }, { addToHistory: true });\n * room.history.resume();\n * room.history.undo();\n * // room.getPresence() equals { cursor: { x: 0, y: 0 } }\n */\n resume: () => void;\n}\n\nexport type HistoryEvent = {\n canUndo: boolean;\n canRedo: boolean;\n};\n\nexport type RoomEventName = Extract<\n keyof RoomEventCallbackMap<never, never, never>,\n string\n>;\n\nexport type RoomEventCallbackFor<\n K extends RoomEventName,\n P extends JsonObject,\n U extends BaseUserMeta,\n E extends Json,\n> = RoomEventCallbackMap<P, U, E>[K];\n\nexport type RoomEventCallback = RoomEventCallbackFor<\n RoomEventName,\n JsonObject,\n BaseUserMeta,\n Json\n>;\n\nexport type BroadcastOptions = {\n /**\n * Whether or not event is queued if the connection is currently closed.\n *\n * ❗ We are not sure if we want to support this option in the future so it might be deprecated to be replaced by something else\n */\n shouldQueueEventIfNotReady: boolean;\n};\n\ntype SubscribeFn<\n P extends JsonObject,\n _TStorage extends LsonObject,\n U extends BaseUserMeta,\n E extends Json,\n> = {\n /**\n * Subscribes to changes made on any Live structure. Returns an unsubscribe function.\n *\n * @internal This legacy API works, but was never documented publicly.\n */\n (callback: StorageCallback): () => void;\n\n /**\n * Subscribe to the current user presence updates.\n *\n * @param listener the callback that is called every time the current user presence is updated with {@link Room.updatePresence}.\n *\n * @returns Unsubscribe function.\n *\n * @example\n * room.subscribe(\"my-presence\", (presence) => {\n * // Do something\n * });\n */\n (type: \"my-presence\", listener: Callback<P>): () => void;\n\n /**\n * Subscribe to the other users updates.\n *\n * @param listener the callback that is called when a user enters or leaves the room or when a user update its presence.\n *\n * @returns Unsubscribe function.\n *\n * @example\n * room.subscribe(\"others\", (others) => {\n * // Do something\n * });\n *\n */\n (type: \"others\", listener: LegacyOthersEventCallback<P, U>): () => void;\n\n /**\n * Subscribe to events broadcasted by {@link Room.broadcastEvent}\n *\n * @param listener the callback that is called when a user calls {@link Room.broadcastEvent}\n *\n * @returns Unsubscribe function.\n *\n * @example\n * room.subscribe(\"event\", ({ event, connectionId }) => {\n * // Do something\n * });\n *\n */\n (type: \"event\", listener: Callback<RoomEventMessage<P, U, E>>): () => void;\n\n /**\n * Subscribe to errors thrown in the room.\n *\n * @returns Unsubscribe function.\n *\n */\n (type: \"error\", listener: Callback<LiveblocksError>): () => void;\n\n /**\n * Subscribe to connection status updates. The callback will be called any\n * time the status changes.\n *\n * @returns Unsubscribe function.\n *\n */\n (type: \"status\", listener: Callback<Status>): () => void;\n\n /**\n * Subscribe to the exceptional event where reconnecting to the Liveblocks\n * servers is taking longer than usual. This typically is a sign of a client\n * that has lost internet connectivity.\n *\n * This isn't problematic (because the Liveblocks client is still trying to\n * reconnect), but it's typically a good idea to inform users about it if\n * the connection takes too long to recover.\n */\n (\n type: \"lost-connection\",\n listener: Callback<LostConnectionEvent>\n ): () => void;\n\n /**\n * Subscribes to changes made on a Live structure. Returns an unsubscribe function.\n *\n * @param callback The callback this called when the Live structure changes.\n *\n * @returns Unsubscribe function.\n *\n * @example\n * const liveMap = new LiveMap(); // Could also be LiveList or LiveObject\n * const unsubscribe = room.subscribe(liveMap, (liveMap) => { });\n * unsubscribe();\n */\n <L extends LiveStructure>(\n liveStructure: L,\n callback: (node: L) => void\n ): () => void;\n\n /**\n * Subscribes to changes made on a Live structure and all the nested data\n * structures. Returns an unsubscribe function. In a future version, we\n * will also expose what exactly changed in the Live structure.\n *\n * @param callback The callback this called when the Live structure, or any\n * of its nested values, changes.\n *\n * @returns Unsubscribe function.\n *\n * @example\n * const liveMap = new LiveMap(); // Could also be LiveList or LiveObject\n * const unsubscribe = room.subscribe(liveMap, (updates) => { }, { isDeep: true });\n * unsubscribe();\n */\n <L extends LiveStructure>(\n liveStructure: L,\n callback: StorageCallback,\n options: { isDeep: true }\n ): () => void;\n\n /**\n * Subscribe to the current user's history changes.\n *\n * @returns Unsubscribe function.\n *\n * @example\n * room.subscribe(\"history\", ({ canUndo, canRedo }) => {\n * // Do something\n * });\n */\n (type: \"history\", listener: Callback<HistoryEvent>): () => void;\n\n /**\n * Subscribe to storage status changes.\n *\n * @returns Unsubscribe function.\n *\n * @example\n * room.subscribe(\"storage-status\", (status) => {\n * switch(status) {\n * case \"not-loaded\":\n * break;\n * case \"loading\":\n * break;\n * case \"synchronizing\":\n * break;\n * case \"synchronized\":\n * break;\n * default:\n * break;\n * }\n * });\n */\n (type: \"storage-status\", listener: Callback<StorageStatus>): () => void;\n\n (type: \"comments\", listener: Callback<CommentsEventServerMsg>): () => void;\n};\n\nexport type GetThreadsOptions<TM extends BaseMetadata> = {\n cursor?: string;\n query?: {\n resolved?: boolean;\n subscribed?: boolean;\n metadata?: Partial<QueryMetadata<TM>>;\n };\n};\n\nexport type GetThreadsSinceOptions = {\n since: Date;\n signal?: AbortSignal;\n};\n\nexport type UploadAttachmentOptions = {\n signal?: AbortSignal;\n};\n\ntype ListTextVersionsSinceOptions = {\n since: Date;\n signal?: AbortSignal;\n};\n\ntype GetSubscriptionSettingsOptions = {\n signal?: AbortSignal;\n};\n\n/**\n * @private Widest-possible Room type, matching _any_ Room instance. Note that\n * this type is different from `Room`-without-type-arguments. That represents\n * a Room instance using globally augmented types only, which is narrower.\n */\nexport type OpaqueRoom = Room<\n JsonObject,\n LsonObject,\n BaseUserMeta,\n Json,\n BaseMetadata\n>;\n\nexport type Room<\n P extends JsonObject = DP,\n S extends LsonObject = DS,\n U extends BaseUserMeta = DU,\n E extends Json = DE,\n TM extends BaseMetadata = DTM,\n CM extends BaseMetadata = DCM,\n> = {\n /**\n * @private\n *\n * Private methods and variables used in the core internals, but as a user\n * of Liveblocks, NEVER USE ANY OF THESE DIRECTLY, because bad things\n * will probably happen if you do.\n */\n readonly [kInternal]: PrivateRoomApi;\n\n /**\n * The id of the room.\n */\n readonly id: string;\n\n /**\n * Return the current connection status for this room. Can be used to display\n * a status badge for your Liveblocks connection.\n */\n getStatus(): Status;\n readonly subscribe: SubscribeFn<P, S, U, E>;\n\n /**\n * Room's history contains functions that let you undo and redo operation made on by the current client on the presence and storage.\n */\n readonly history: History;\n\n /**\n * Gets the current user.\n * Returns null if not it is not yet connected to the room.\n *\n * @example\n * const user = room.getSelf();\n */\n getSelf(): User<P, U> | null;\n\n /**\n * Gets the presence of the current user.\n *\n * @example\n * const presence = room.getPresence();\n */\n getPresence(): P;\n\n /**\n * Gets all the other users in the room.\n *\n * @example\n * const others = room.getOthers();\n */\n getOthers(): readonly User<P, U>[];\n\n /**\n * Updates the presence of the current user. Only pass the properties you want to update. No need to send the full presence.\n * @param patch A partial object that contains the properties you want to update.\n * @param options Optional object to configure the behavior of updatePresence.\n *\n * @example\n * room.updatePresence({ x: 0 });\n * room.updatePresence({ y: 0 });\n *\n * const presence = room.getPresence();\n * // presence is equivalent to { x: 0, y: 0 }\n */\n updatePresence(\n patch: Partial<P>,\n options?: {\n /**\n * Whether or not the presence should have an impact on the undo/redo history.\n */\n addToHistory: boolean;\n }\n ): void;\n\n /**\n * Sends Yjs document updates to Liveblocks server.\n *\n * @param {string} data the doc update to send to the server, base64 encoded uint8array\n */\n updateYDoc(data: string, guid?: string, isV2?: boolean): void;\n\n /**\n * Sends a request for the current document from liveblocks server\n */\n fetchYDoc(stateVector: string, guid?: string, isV2?: boolean): void;\n\n /**\n * Broadcasts an event to other users in the room. Event broadcasted to the room can be listened with {@link Room.subscribe}(\"event\").\n * @param {any} event the event to broadcast. Should be serializable to JSON\n *\n * @example\n * // On client A\n * room.broadcastEvent({ type: \"EMOJI\", emoji: \"🔥\" });\n *\n * // On client B\n * room.subscribe(\"event\", ({ event }) => {\n * if(event.type === \"EMOJI\") {\n * // Do something\n * }\n * });\n */\n broadcastEvent(event: E, options?: BroadcastOptions): void;\n\n /**\n * Get the room's storage asynchronously.\n * The storage's root is a {@link LiveObject}.\n *\n * @example\n * const { root } = await room.getStorage();\n */\n getStorage(): Promise<{\n root: LiveObject<S>;\n }>;\n\n /**\n * Get the room's storage synchronously.\n * The storage's root is a {@link LiveObject}.\n *\n * @example\n * const root = room.getStorageSnapshot();\n */\n getStorageSnapshot(): LiveObject<S> | null;\n\n /**\n * All possible room events, subscribable from a single place.\n *\n * @private These event sources are private for now, but will become public\n * once they're stable.\n */\n readonly events: {\n readonly status: Observable<Status>;\n readonly lostConnection: Observable<LostConnectionEvent>;\n\n readonly customEvent: Observable<RoomEventMessage<P, U, E>>; // prettier-ignore\n readonly self: Observable<User<P, U>>;\n readonly myPresence: Observable<P>;\n readonly others: Observable<OthersEvent<P, U>>;\n readonly storageBatch: Observable<StorageUpdate[]>;\n readonly history: Observable<HistoryEvent>;\n\n /**\n * Subscribe to the storage loaded event. Will fire any time a full Storage\n * copy is downloaded. (This happens after the initial connect, and on\n * every reconnect.)\n */\n readonly storageDidLoad: Observable<void>;\n\n readonly storageStatus: Observable<StorageStatus>;\n readonly ydoc: Observable<YDocUpdateServerMsg | UpdateYDocClientMsg>;\n readonly comments: Observable<CommentsEventServerMsg>;\n\n /**\n * Called right before the room is destroyed. The event cannot be used to\n * prevent the room from being destroyed, only to be informed that this is\n * imminent.\n */\n readonly roomWillDestroy: Observable<void>;\n };\n\n /**\n * Batches modifications made during the given function.\n * All the modifications are sent to other clients in a single message.\n * All the subscribers are called only after the batch is over.\n * All the modifications are merged in a single history item (undo/redo).\n *\n * @example\n * const { root } = await room.getStorage();\n * room.batch(() => {\n * root.set(\"x\", 0);\n * room.updatePresence({ cursor: { x: 100, y: 100 }});\n * });\n */\n batch<T>(fn: () => T): T;\n\n /**\n * Get the storage status.\n *\n * - `not-loaded`: Initial state when entering the room.\n * - `loading`: Once the storage has been requested via room.getStorage().\n * - `synchronizing`: When some local updates have not been acknowledged by Liveblocks servers.\n * - `synchronized`: Storage is in sync with Liveblocks servers.\n */\n getStorageStatus(): StorageStatus;\n\n isPresenceReady(): boolean;\n isStorageReady(): boolean;\n\n /**\n * Returns a Promise that resolves as soon as Presence is available, which\n * happens shortly after the WebSocket connection has been established. Once\n * this happens, `self` and `others` are known and available to use. After\n * awaiting this promise, `.isPresenceReady()` will be guaranteed to be true.\n * Even when calling this function multiple times, it's guaranteed to return\n * the same Promise instance.\n */\n waitUntilPresenceReady(): Promise<void>;\n\n /**\n * Returns a Promise that resolves as soon as Storage has been loaded and\n * available. After awaiting this promise, `.isStorageReady()` will be\n * guaranteed to be true. Even when calling this function multiple times,\n * it's guaranteed to return the same Promise instance.\n */\n waitUntilStorageReady(): Promise<void>;\n\n /**\n * Start an attempt to connect the room (aka \"enter\" it). Calling\n * `.connect()` only has an effect if the room is still in its idle initial\n * state, or the room was explicitly disconnected, or reconnection attempts\n * were stopped (for example, because the user isn't authorized to enter the\n * room). Will be a no-op otherwise.\n */\n connect(): void;\n\n /**\n * Disconnect the room's connection to the Liveblocks server, if any. Puts\n * the room back into an idle state. It will not do anything until either\n * `.connect()` or `.reconnect()` is called.\n *\n * Only use this API if you wish to connect the room again at a later time.\n * If you want to disconnect the room because you no longer need it, call\n * `.destroy()` instead.\n */\n disconnect(): void;\n\n /**\n * @internal (for now)\n *\n * Disconnect the room's connection to the Liveblocks server, if any. Runs\n * cleanup functions. The room instance can no longer be used to (re)connect.\n */\n destroy(): void;\n\n /**\n * Reconnect the room to the Liveblocks server by re-establishing a fresh\n * connection. If the room is not connected yet, initiate it.\n */\n reconnect(): void;\n\n /**\n * Returns the threads within the current room and their associated inbox notifications.\n * It also returns the request date that can be used for subsequent polling.\n *\n * @example\n * const {\n * threads,\n * inboxNotifications,\n * subscriptions,\n * requestedAt\n * } = await room.getThreads({ query: { resolved: false }});\n */\n getThreads(options?: GetThreadsOptions<TM>): Promise<{\n threads: ThreadData<TM, CM>[];\n inboxNotifications: InboxNotificationData[];\n subscriptions: SubscriptionData[];\n requestedAt: Date;\n nextCursor: string | null;\n permissionHints: Record<string, Permission[]>;\n }>;\n\n /**\n * Returns the updated and deleted threads and their associated inbox notifications and subscriptions since the requested date.\n *\n * @example\n * const result = await room.getThreads();\n * // ... //\n * await room.getThreadsSince({ since: result.requestedAt });\n */\n getThreadsSince(options: GetThreadsSinceOptions): Promise<{\n threads: {\n updated: ThreadData<TM, CM>[];\n deleted: ThreadDeleteInfo[];\n };\n inboxNotifications: {\n updated: InboxNotificationData[];\n deleted: InboxNotificationDeleteInfo[];\n };\n subscriptions: {\n updated: SubscriptionData[];\n deleted: SubscriptionDeleteInfo[];\n };\n requestedAt: Date;\n permissionHints: Record<string, Permission[]>;\n }>;\n\n /**\n * Returns a thread and the associated inbox notification and subscription if it exists.\n *\n * @example\n * const { thread, inboxNotification, subscription } = await room.getThread(\"th_xxx\");\n */\n getThread(threadId: string): Promise<{\n thread?: ThreadData<TM, CM>;\n inboxNotification?: InboxNotificationData;\n subscription?: SubscriptionData;\n }>;\n\n /**\n * Creates a thread.\n *\n * @example\n * const thread = await room.createThread({\n * body: {\n * version: 1,\n * content: [{ type: \"paragraph\", children: [{ text: \"Hello\" }] }],\n * },\n * })\n */\n createThread(options: {\n threadId?: string;\n commentId?: string;\n metadata: TM | undefined;\n body: CommentBody;\n commentMetadata?: CM;\n attachmentIds?: string[];\n }): Promise<ThreadData<TM, CM>>;\n\n /**\n * Deletes a thread.\n *\n * @example\n * await room.deleteThread(\"th_xxx\");\n */\n deleteThread(threadId: string): Promise<void>;\n\n /**\n * Edits a thread's metadata.\n * To delete an existing metadata property, set its value to `null`.\n *\n * @example\n * await room.editThreadMetadata({ threadId: \"th_xxx\", metadata: { x: 100, y: 100 } })\n */\n editThreadMetadata(options: {\n metadata: Patchable<TM>;\n threadId: string;\n }): Promise<TM>;\n\n /**\n * Edits a comment's metadata.\n * To delete an existing metadata property, set its value to `null`.\n *\n * @example\n * await room.editCommentMetadata({ threadId: \"th_xxx\", commentId: \"cm_xxx\", metadata: { tag: \"important\", externalId: 1234 } })\n */\n editCommentMetadata(options: {\n threadId: string;\n commentId: string;\n metadata: Patchable<CM>;\n }): Promise<CM>;\n\n /**\n * Marks a thread as resolved.\n *\n * @example\n * await room.markThreadAsResolved(\"th_xxx\");\n */\n markThreadAsResolved(threadId: string): Promise<void>;\n\n /**\n * Marks a thread as unresolved.\n *\n * @example\n * await room.markThreadAsUnresolved(\"th_xxx\");\n */\n markThreadAsUnresolved(threadId: string): Promise<void>;\n\n /**\n * Subscribes the user to a thread.\n *\n * @example\n * await room.subscribeToThread(\"th_xxx\");\n */\n subscribeToThread(threadId: string): Promise<SubscriptionData>;\n\n /**\n * Unsubscribes the user from a thread.\n *\n * @example\n * await room.unsubscribeFromThread(\"th_xxx\");\n */\n unsubscribeFromThread(threadId: string): Promise<void>;\n\n /**\n * Creates a comment.\n *\n * @example\n * await room.createComment({\n * threadId: \"th_xxx\",\n * body: {\n * version: 1,\n * content: [{ type: \"paragraph\", children: [{ text: \"Hello\" }] }],\n * },\n * });\n */\n createComment(options: {\n threadId: string;\n commentId?: string;\n body: CommentBody;\n metadata?: CM;\n attachmentIds?: string[];\n }): Promise<CommentData<CM>>;\n\n /**\n * Edits a comment.\n *\n * @example\n * await room.editComment({\n * threadId: \"th_xxx\",\n * commentId: \"cm_xxx\"\n * body: {\n * version: 1,\n * content: [{ type: \"paragraph\", children: [{ text: \"Hello\" }] }],\n * },\n * });\n */\n editComment(options: {\n threadId: string;\n commentId: string;\n body: CommentBody;\n metadata?: Patchable<CM>;\n attachmentIds?: string[];\n }): Promise<CommentData<CM>>;\n\n /**\n * Deletes a comment.\n * If it is the last non-deleted comment, the thread also gets deleted.\n *\n * @example\n * await room.deleteComment({\n * threadId: \"th_xxx\",\n * commentId: \"cm_xxx\"\n * });\n */\n deleteComment(options: {\n threadId: string;\n commentId: string;\n }): Promise<void>;\n\n /**\n * Adds a reaction from a comment for the current user.\n *\n * @example\n * await room.addReaction({ threadId: \"th_xxx\", commentId: \"cm_xxx\", emoji: \"👍\" })\n */\n addReaction(options: {\n threadId: string;\n commentId: string;\n emoji: string;\n }): Promise<CommentUserReaction>;\n\n /**\n * Removes a reaction from a comment.\n *\n * @example\n * await room.removeReaction({ threadId: \"th_xxx\", commentId: \"cm_xxx\", emoji: \"👍\" })\n */\n removeReaction(options: {\n threadId: string;\n commentId: string;\n emoji: string;\n }): Promise<void>;\n\n /**\n * Creates a local attachment from a file.\n *\n * @example\n * room.prepareAttachment(file);\n */\n prepareAttachment(file: File): CommentLocalAttachment;\n\n /**\n * Uploads a local attachment.\n *\n * @example\n * const attachment = room.prepareAttachment(file);\n * await room.uploadAttachment(attachment);\n */\n uploadAttachment(\n attachment: CommentLocalAttachment,\n options?: UploadAttachmentOptions\n ): Promise<CommentAttachment>;\n\n /**\n * Returns a presigned URL for an attachment by its ID.\n *\n * @example\n * await room.getAttachmentUrl(\"at_xxx\");\n */\n getAttachmentUrl(attachmentId: string): Promise<string>;\n\n /**\n * Gets the user's subscription settings for the current room.\n *\n * @example\n * const settings = await room.getSubscriptionSettings();\n */\n getSubscriptionSettings(\n options?: GetSubscriptionSettingsOptions\n ): Promise<RoomSubscriptionSettings>;\n\n /**\n * Updates the user's subscription settings for the current room.\n *\n * @example\n * await room.updateSubscriptionSettings({ threads: \"replies_and_mentions\" });\n */\n updateSubscriptionSettings(\n settings: Partial<RoomSubscriptionSettings>\n ): Promise<RoomSubscriptionSettings>;\n\n /**\n * @private\n *\n * Internal use only. Signature might change in the future.\n */\n markInboxNotificationAsRead(notificationId: string): Promise<void>;\n};\n\nexport type YjsSyncStatus = \"loading\" | \"synchronizing\" | \"synchronized\";\n\n/**\n * Interface that @liveblocks/yjs must respect.\n * This interface type is declare in @liveblocks/core, so we don't have to\n * depend on `yjs`. It's only used to determine the API contract between\n * @liveblocks/core and @liveblocks/yjs.\n */\nexport interface IYjsProvider {\n synced: boolean;\n getStatus: () => YjsSyncStatus;\n on(event: \"sync\", listener: (synced: boolean) => void): void;\n on(event: \"status\", listener: (status: YjsSyncStatus) => void): void;\n off(event: \"sync\", listener: (synced: boolean) => void): void;\n off(event: \"status\", listener: (status: YjsSyncStatus) => void): void;\n}\n\n/**\n * A \"Sync Source\" can be a Storage document, a Yjs document, Comments,\n * Notifications, etc.\n * The Client keeps a registry of all active sync sources, and will use it to\n * determine the global \"sync status\" for Liveblocks.\n */\nexport interface SyncSource {\n setSyncStatus(status: InternalSyncStatus): void;\n destroy(): void;\n}\n\n/**\n * @private\n *\n * Private methods to directly control the underlying state machine for this\n * room. Used in the core internals and for unit testing, but as a user of\n * Liveblocks, NEVER USE ANY OF THESE METHODS DIRECTLY, because bad things\n * will probably happen if you do.\n */\nexport type PrivateRoomApi = {\n // For introspection in unit tests only\n presenceBuffer: Json | undefined;\n undoStack: readonly (readonly Readonly<Stackframe<JsonObject>>[])[];\n nodeCount: number;\n\n // Get/set the associated Yjs provider on this room\n getYjsProvider(): IYjsProvider | undefined;\n setYjsProvider(provider: IYjsProvider | undefined): void;\n yjsProviderDidChange: Observable<void>;\n\n // For DevTools support (Liveblocks browser extension)\n getSelf_forDevTools(): DevTools.UserTreeNode | null;\n getOthers_forDevTools(): readonly DevTools.UserTreeNode[];\n\n // For reporting editor metadata\n reportTextEditor(editor: TextEditorType, rootKey: string): Promise<void>;\n\n createTextMention(mentionId: string, mention: MentionData): Promise<void>;\n deleteTextMention(mentionId: string): Promise<void>;\n listTextVersions(): Promise<{\n versions: HistoryVersion[];\n requestedAt: Date;\n }>;\n listTextVersionsSince(options: ListTextVersionsSinceOptions): Promise<{\n versions: HistoryVersion[];\n requestedAt: Date;\n }>;\n\n getTextVersion(versionId: string): Promise<Response>;\n createTextVersion(): Promise<void>;\n\n executeContextualPrompt(options: {\n prompt: string;\n context: ContextualPromptContext;\n previous?: {\n prompt: string;\n response: ContextualPromptResponse;\n };\n signal: AbortSignal;\n }): Promise<string>;\n\n // NOTE: These are only used in our e2e test app!\n simulate: {\n explicitClose(event: IWebSocketCloseEvent): void;\n rawSend(data: string): void;\n };\n\n attachmentUrlsStore: BatchStore<string, string>;\n};\n\n//\n// The maximum message size on websockets is 1MB. If a message larger than this\n// threshold is attempted to be sent, the strategy picked via the\n// `largeMessageStrategy` option will be used.\n//\n// In practice, we'll set threshold to slightly less than 1 MB.\nconst MAX_SOCKET_MESSAGE_SIZE = 1024 * 1024 - 512;\n\nfunction makeIdFactory(connectionId: number): IdFactory {\n let count = 0;\n return () => `${connectionId}:${count++}`;\n}\n\ntype Stackframe<P extends JsonObject> = Op | PresenceStackframe<P>;\n\ntype PresenceStackframe<P extends JsonObject> = {\n readonly type: \"presence\";\n readonly data: P;\n};\n\ntype IdFactory = () => string;\n\nexport type StaticSessionInfo = {\n readonly userId?: string;\n readonly userInfo?: IUserInfo;\n};\n\nexport type DynamicSessionInfo = {\n readonly actor: number;\n readonly nonce: string;\n readonly scopes: string[];\n readonly meta: JsonObject;\n};\n\ntype RoomState<\n P extends JsonObject,\n S extends LsonObject,\n U extends BaseUserMeta,\n E extends Json,\n> = {\n /**\n * All pending changes that yet need to be synced.\n */\n buffer: {\n flushTimerID: TimeoutID | undefined;\n\n // When the last flush happened. Together with config.throttleDelay, this\n // will control whether the next flush will be sent out immediately, or if\n // a flush will get scheduled for a few milliseconds into the future.\n readonly lastFlushedAt: number;\n\n // Queued-up \"my presence\" updates to be flushed at the earliest convenience\n presenceUpdates:\n | { type: \"partial\"; data: Partial<P> }\n | { type: \"full\"; data: P }\n | null;\n messages: ClientMsg<P, E>[];\n storageOperations: ClientWireOp[];\n };\n\n //\n // The \"self\" User takes assembly of three sources-of-truth:\n // - The JWT token provides the userId and userInfo metadata (static)\n // - The server, in its initial ROOM_STATE message, will provide the actor ID\n // and the scopes (dynamic)\n // - The presence is provided by the client's initialPresence configuration (presence)\n //\n readonly staticSessionInfoSig: Signal<StaticSessionInfo | null>;\n readonly dynamicSessionInfoSig: Signal<DynamicSessionInfo | null>;\n readonly myPresence: PatchableSignal<P>;\n readonly others: ManagedOthers<P, U>;\n\n idFactory: IdFactory | null;\n initialStorage: S;\n\n yjsProvider: IYjsProvider | undefined;\n readonly yjsProviderDidChange: EventSource<void>;\n\n pool: ManagedPool;\n root: LiveObject<S> | undefined;\n\n readonly undoStack: Stackframe<P>[][];\n readonly redoStack: Stackframe<P>[][];\n\n /**\n * When history is paused, all operations will get queued up here. When\n * history is resumed, these operations get \"committed\" to the undo stack.\n */\n pausedHistory: null | Deque<Stackframe<P>>;\n\n /**\n * Place to collect all mutations during a batch. Ops will be sent over the\n * wire after the batch is ended.\n */\n activeBatch: {\n ops: ClientWireOp[];\n reverseOps: Deque<Stackframe<P>>;\n updates: {\n others: [];\n presence: boolean;\n storageUpdates: Map<string, StorageUpdate>;\n };\n } | null;\n\n // A registry of yet-unacknowledged Ops. These Ops have already been\n // submitted to the server, but have not yet been acknowledged.\n readonly unacknowledgedOps: Map<string, ClientWireOp>;\n};\n\nexport type Polyfills = {\n atob?: (data: string) => string;\n fetch?: typeof fetch;\n WebSocket?: IWebSocket;\n};\n\n/**\n * Makes all tuple positions optional.\n * Example, turns:\n * [foo: string; bar: number]\n * into:\n * [foo?: string; bar?: number]\n */\ntype OptionalTuple<T extends any[]> = { [K in keyof T]?: T[K] };\n\n/**\n * Returns Partial<T> if all fields on C are optional, T otherwise.\n */\nexport type PartialUnless<C, T> =\n Record<string, never> extends C\n ? Partial<T>\n : // Extra test. We'll want to treat \"never\" as if the empty object is\n // assignable to it, because otherwise it will not\n [C] extends [never]\n ? Partial<T>\n : T;\n\n/**\n * Returns OptionalTupleUnless<T> if all fields on C are optional, T otherwise.\n */\nexport type OptionalTupleUnless<C, T extends any[]> =\n Record<string, never> extends C\n ? OptionalTuple<T>\n : // Extra test. We'll want to treat \"never\" as if the empty object is\n // assignable to it, because otherwise it will not\n [C] extends [never]\n ? OptionalTuple<T>\n : T;\n\nexport type RoomDelegates = Omit<Delegates<AuthValue>, \"canZombie\">;\n\nexport type LargeMessageStrategy =\n | \"default\"\n | \"split\"\n | \"experimental-fallback-to-http\";\n\n/** @internal */\nexport type RoomConfig<TM extends BaseMetadata, CM extends BaseMetadata> = {\n delegates: RoomDelegates;\n\n roomId: string;\n throttleDelay: number;\n lostConnectionTimeout: number;\n backgroundKeepAliveTimeout?: number;\n largeMessageStrategy?: LargeMessageStrategy;\n\n unstable_streamData?: boolean;\n\n polyfills?: Polyfills;\n\n roomHttpClient: RoomHttpApi<TM, CM>;\n\n baseUrl: string;\n enableDebugLogging?: boolean;\n\n badgeLocation?: BadgeLocation;\n\n // We would not have to pass this complicated factory/callback functions to\n // the createRoom() function if we would simply pass the Client instance to\n // the Room instance, so it can directly call this back on the Client.\n createSyncSource: () => SyncSource;\n errorEventSource: EventSource<LiveblocksError>;\n};\n\nfunction userToTreeNode(\n key: string,\n user: User<JsonObject, BaseUserMeta>\n): DevTools.UserTreeNode {\n return {\n type: \"User\",\n id: `${user.connectionId}`,\n key,\n payload: {\n connectionId: user.connectionId,\n id: user.id,\n info: user.info,\n presence: user.presence,\n isReadOnly: !user.canWrite,\n },\n };\n}\n\n/**\n * Returns a ref to access if, and if so, how long the current tab is in the\n * background and an unsubscribe function.\n *\n * The `inBackgroundSince` value will either be a JS timestamp indicating the\n * moment the tab was put into the background, or `null` in case the tab isn't\n * currently in the background. In non-DOM environments, this will always\n * return `null`.\n */\nfunction installBackgroundTabSpy(): [\n inBackgroundSince: { readonly current: number | null },\n unsub: () => void,\n] {\n const doc = typeof document !== \"undefined\" ? document : undefined;\n const inBackgroundSince: { current: number | null } = { current: null };\n\n function onVisibilityChange() {\n if (doc?.visibilityState === \"hidden\") {\n inBackgroundSince.current = inBackgroundSince.current ?? Date.now();\n } else {\n inBackgroundSince.current = null;\n }\n }\n\n doc?.addEventListener(\"visibilitychange\", onVisibilityChange);\n const unsub = () => {\n doc?.removeEventListener(\"visibilitychange\", onVisibilityChange);\n };\n\n return [inBackgroundSince, unsub];\n}\n\nfunction makeNodeMapBuffer() {\n let map: NodeMap = new Map();\n return {\n /** Append a \"page\" of nodes to the current NodeMap buffer. */\n append(chunk: NodeStream) {\n for (const [id, node] of chunk) {\n map.set(id, node);\n }\n },\n /** Return the contents of the current NodeMap buffer, and create a fresh new one. */\n take(): NodeMap {\n const result = map;\n map = new Map();\n return result;\n },\n };\n}\n\n/**\n * @internal\n * Initializes a new Room, and returns its public API.\n */\nexport function createRoom<\n P extends JsonObject,\n S extends LsonObject,\n U extends BaseUserMeta,\n E extends Json,\n TM extends BaseMetadata,\n CM extends BaseMetadata,\n>(\n options: { initialPresence: P; initialStorage: S },\n config: RoomConfig<TM, CM>\n): Room<P, S, U, E, TM, CM> {\n const roomId = config.roomId;\n const initialPresence = options.initialPresence; // ?? {};\n const initialStorage = options.initialStorage; // ?? {};\n\n const httpClient = config.roomHttpClient;\n\n const [inBackgroundSince, uninstallBgTabSpy] = installBackgroundTabSpy();\n\n // Create a delegate pair for (a specific) Live Room socket connection(s)\n const delegates = {\n ...config.delegates,\n\n // A connection is allowed to go into \"zombie state\" only if all of the\n // following conditions apply:\n //\n // - The `backgroundKeepAliveTimeout` client option is configured\n // - The browser window has been in the background for at least\n // `backgroundKeepAliveTimeout` milliseconds\n // - There are no pending changes\n //\n canZombie() {\n return (\n config.backgroundKeepAliveTimeout !== undefined &&\n inBackgroundSince.current !== null &&\n Date.now() >\n inBackgroundSince.current + config.backgroundKeepAliveTimeout &&\n getStorageStatus() !== \"synchronizing\"\n );\n },\n };\n\n const managedSocket: ManagedSocket<AuthValue> = new ManagedSocket(\n delegates,\n config.enableDebugLogging\n );\n\n // The room's internal stateful context\n const context: RoomState<P, S, U, E> = {\n buffer: {\n flushTimerID: undefined,\n lastFlushedAt: 0,\n presenceUpdates:\n // Queue up the initial presence message as a Full Presence™ update\n {\n type: \"full\",\n data: initialPresence,\n },\n messages: [],\n storageOperations: [],\n },\n\n staticSessionInfoSig: new Signal<StaticSessionInfo | null>(null),\n dynamicSessionInfoSig: new Signal<DynamicSessionInfo | null>(null),\n myPresence: new PatchableSignal(initialPresence),\n others: new ManagedOthers<P, U>(),\n\n initialStorage,\n idFactory: null,\n\n // The Yjs provider associated to this room\n yjsProvider: undefined,\n yjsProviderDidChange: makeEventSource(),\n\n // Storage\n pool: createManagedPool(roomId, {\n getCurrentConnectionId,\n onDispatch,\n isStorageWritable,\n }),\n root: undefined,\n\n undoStack: [],\n redoStack: [],\n pausedHistory: null,\n\n activeBatch: null,\n unacknowledgedOps: new Map<string, ClientWireOp>(),\n };\n\n // Accumulates nodes as initial storage arrives in chunks via\n // STORAGE_CHUNK messages. Once the final chunk arrives (with\n // done: true), the complete map is passed to processInitialStorage().\n const nodeMapBuffer = makeNodeMapBuffer();\n\n let lastTokenKey: string | undefined;\n function onStatusDidChange(newStatus: Status) {\n const authValue = managedSocket.authValue;\n if (authValue !== null) {\n const tokenKey = getBearerTokenFromAuthValue(authValue);\n\n if (tokenKey !== lastTokenKey) {\n lastTokenKey = tokenKey;\n\n if (authValue.type === \"secret\") {\n const token = authValue.token.parsed;\n context.staticSessionInfoSig.set({\n userId: token.uid,\n userInfo: token.ui,\n });\n } else {\n context.staticSessionInfoSig.set({\n userId: undefined,\n userInfo: undefined,\n });\n }\n }\n }\n\n // Forward to the outside world\n eventHub.status.notify(newStatus);\n notifySelfChanged();\n }\n\n let _connectionLossTimerId: TimeoutID | undefined;\n let _hasLostConnection = false;\n\n function handleConnectionLossEvent(newStatus: Status) {\n if (newStatus === \"reconnecting\") {\n _connectionLossTimerId = setTimeout(() => {\n eventHub.lostConnection.notify(\"lost\");\n _hasLostConnection = true;\n\n // Clear the others\n context.others.clearOthers();\n notify({ others: [{ type: \"reset\" }] });\n }, config.lostConnectionTimeout);\n } else {\n clearTimeout(_connectionLossTimerId);\n\n if (_hasLostConnection) {\n if (newStatus === \"disconnected\") {\n eventHub.lostConnection.notify(\"failed\");\n } else {\n // Typically the case when going back to \"connected\", but really take\n // *any* other state change as a recovery sign\n eventHub.lostConnection.notify(\"restored\");\n }\n\n _hasLostConnection = false;\n }\n }\n }\n\n function onDidConnect() {\n // Re-broadcast the full user presence as soon as we (re)connect\n context.buffer.presenceUpdates = {\n type: \"full\",\n data:\n // Because context.me.current is a readonly object, we'll have to\n // make a copy here. Otherwise, type errors happen later when\n // \"patching\" my presence.\n { ...context.myPresence.get() },\n };\n\n // NOTE: There was a flush here before, but I don't think it's really\n // needed anymore. We're now combining this flush with the one below, to\n // combine them in a single batch.\n // tryFlushing();\n\n // If a storage fetch has ever been initiated, we assume the client is\n // interested in storage, so we will refresh it after a reconnection.\n if (_getStorage$ !== null) {\n refreshStorage({ flush: false });\n }\n flushNowOrSoon();\n }\n\n function onDidDisconnect() {\n clearTimeout(context.buffer.flushTimerID);\n }\n\n // Register events handlers for events coming from the socket\n // We never have to unsubscribe, because the Room and the Connection Manager\n // will have the same life-time.\n managedSocket.events.onMessage.subscribe(handleServerMessage);\n managedSocket.events.statusDidChange.subscribe(onStatusDidChange);\n managedSocket.events.statusDidChange.subscribe(handleConnectionLossEvent);\n managedSocket.events.didConnect.subscribe(onDidConnect);\n managedSocket.events.didDisconnect.subscribe(onDidDisconnect);\n managedSocket.events.onConnectionError.subscribe(({ message, code }) => {\n const type = \"ROOM_CONNECTION_ERROR\";\n const err = new LiveblocksError(message, { type, code, roomId });\n const didNotify = config.errorEventSource.notify(err);\n if (!didNotify) {\n if (process.env.NODE_ENV !== \"production\") {\n console.error(\n `Connection to websocket server closed. Reason: ${message} (code: ${code}).`\n );\n }\n }\n });\n\n function onDispatch(\n ops: ClientWireOp[],\n reverse: Op[],\n storageUpdates: Map<string, StorageUpdate>\n ): void {\n if (context.activeBatch) {\n for (const op of ops) {\n context.activeBatch.ops.push(op);\n }\n for (const [key, value] of storageUpdates) {\n context.activeBatch.updates.storageUpdates.set(\n key,\n mergeStorageUpdates(\n context.activeBatch.updates.storageUpdates.get(key),\n value\n )\n );\n }\n context.activeBatch.reverseOps.pushLeft(reverse);\n } else {\n addToUndoStack(reverse);\n context.redoStack.length = 0;\n dispatchOps(ops);\n notify({ storageUpdates });\n }\n }\n\n function isStorageWritable(): boolean {\n const scopes = context.dynamicSessionInfoSig.get()?.scopes;\n // If we aren't connected yet, assume we can write\n return scopes !== undefined ? canWriteStorage(scopes) : true;\n }\n\n const eventHub = {\n status: makeEventSource<Status>(), // New/recommended API\n lostConnection: makeEventSource<LostConnectionEvent>(),\n\n customEvent: makeEventSource<RoomEventMessage<P, U, E>>(),\n self: makeEventSource<User<P, U>>(),\n myPresence: makeEventSource<P>(),\n others: makeEventSource<OthersEvent<P, U>>(),\n storageBatch: makeEventSource<StorageUpdate[]>(),\n history: makeEventSource<HistoryEvent>(),\n storageDidLoad: makeEventSource<void>(),\n storageStatus: makeEventSource<StorageStatus>(),\n ydoc: makeEventSource<YDocUpdateServerMsg | UpdateYDocClientMsg>(),\n\n comments: makeEventSource<CommentsEventServerMsg>(),\n roomWillDestroy: makeEventSource<void>(),\n };\n\n async function createTextMention(mentionId: string, mention: MentionData) {\n return httpClient.createTextMention({ roomId, mentionId, mention });\n }\n\n async function deleteTextMention(mentionId: string) {\n return httpClient.deleteTextMention({ roomId, mentionId });\n }\n\n async function reportTextEditor(type: TextEditorType, rootKey: string) {\n await httpClient.reportTextEditor({ roomId, type, rootKey });\n }\n\n async function listTextVersions() {\n return httpClient.listTextVersions({ roomId });\n }\n\n async function listTextVersionsSince(options: ListTextVersionsSinceOptions) {\n return httpClient.listTextVersionsSince({\n roomId,\n since: options.since,\n signal: options.signal,\n });\n }\n\n async function getTextVersion(versionId: string) {\n return httpClient.getTextVersion({ roomId, versionId });\n }\n\n async function createTextVersion() {\n return httpClient.createTextVersion({ roomId });\n }\n\n async function executeContextualPrompt(options: {\n prompt: string;\n context: ContextualPromptContext;\n previous?: {\n prompt: string;\n response: ContextualPromptResponse;\n };\n signal: AbortSignal;\n }) {\n return httpClient.executeContextualPrompt({\n roomId,\n ...options,\n });\n }\n\n /**\n * Split a single large UPDATE_STORAGE message into smaller chunks, by\n * splitting the ops list recursively in half.\n */\n function* chunkOps(msg: UpdateStorageClientMsg): IterableIterator<string> {\n const { ops, ...rest } = msg;\n if (ops.length < 2) {\n throw new Error(\"Cannot split ops into smaller chunks\");\n }\n\n const mid = Math.floor(ops.length / 2);\n const firstHalf = ops.slice(0, mid);\n const secondHalf = ops.slice(mid);\n\n for (const halfOps of [firstHalf, secondHalf]) {\n const half: UpdateStorageClientMsg = { ops: halfOps, ...rest };\n const text = stringify([half]);\n if (!isTooBigForWebSocket(text)) {\n yield text;\n } else {\n yield* chunkOps(half);\n }\n }\n }\n\n /**\n * Split the message array in half (two chunks), and try to send each chunk\n * separately. If the chunk is still too big, repeat the process. If a chunk\n * can no longer be split up (i.e. is 1 message), then error.\n */\n function* chunkMessages(\n messages: ClientMsg<P, E>[]\n ): IterableIterator<string> {\n if (messages.length < 2) {\n if (messages[0].type === ClientMsgCode.UPDATE_STORAGE) {\n yield* chunkOps(messages[0]);\n return;\n } else {\n throw new Error(\n \"Cannot split into chunks smaller than the allowed message size\"\n );\n }\n }\n\n const mid = Math.floor(messages.length / 2);\n const firstHalf = messages.slice(0, mid);\n const secondHalf = messages.slice(mid);\n\n for (const half of [firstHalf, secondHalf]) {\n const text = stringify(half);\n if (!isTooBigForWebSocket(text)) {\n yield text;\n } else {\n yield* chunkMessages(half);\n }\n }\n }\n\n function isTooBigForWebSocket(text: string): boolean {\n // The theoretical worst case is that each character in the string is\n // a 4-byte UTF-8 character. String.prototype.length is an O(1) operation,\n // so we can spare ourselves the TextEncoder() measurement overhead with\n // this heuristic.\n if (text.length * 4 < MAX_SOCKET_MESSAGE_SIZE) {\n return false;\n }\n\n // Otherwise we need to measure to be sure\n return new TextEncoder().encode(text).length >= MAX_SOCKET_MESSAGE_SIZE;\n }\n\n function sendMessages(messages: ClientMsg<P, E>[]) {\n const strategy = config.largeMessageStrategy ?? \"default\";\n\n const text = stringify(messages);\n if (!isTooBigForWebSocket(text)) {\n return managedSocket.send(text); // Happy path\n }\n\n // If message is too big for WebSockets, we need to follow a strategy\n switch (strategy) {\n case \"default\": {\n const type = \"LARGE_MESSAGE_ERROR\";\n const err = new LiveblocksError(\"Message is too large for websockets\", {\n type,\n });\n const didNotify = config.errorEventSource.notify(err);\n if (!didNotify) {\n console.error(\n \"Message is too large for websockets. Configure largeMessageStrategy option or useErrorListener to handle this.\"\n );\n }\n return;\n }\n\n case \"split\": {\n console.warn(\"Message is too large for websockets, splitting into smaller chunks\"); // prettier-ignore\n for (const chunk of chunkMessages(messages)) {\n managedSocket.send(chunk);\n }\n return;\n }\n\n // NOTE: This strategy is experimental as it will not work in all situations.\n // It should only be used for broadcasting, presence updates, but isn't suitable\n // for Storage or Yjs updates yet (because through this channel the server does\n // not respond with acks or rejections, causing the client's reported status to\n // be stuck in \"synchronizing\" forever).\n case \"experimental-fallback-to-http\": {\n console.warn(\"Message is too large for websockets, so sending over HTTP instead\"); // prettier-ignore\n const nonce =\n context.dynamicSessionInfoSig.get()?.nonce ??\n raise(\"Session is not authorized to send message over HTTP\");\n\n void httpClient\n .sendMessagesOverHTTP<P, E>({ roomId, nonce, messages })\n .then((resp) => {\n if (!resp.ok && resp.status === 403) {\n managedSocket.reconnect();\n }\n })\n .catch((err) => {\n console.error(\n `Failed to deliver message over HTTP: ${String(err)}`\n );\n });\n return;\n }\n }\n }\n\n const self = DerivedSignal.from(\n context.staticSessionInfoSig,\n context.dynamicSessionInfoSig,\n context.myPresence,\n (staticSession, dynamicSession, myPresence): User<P, U> | null => {\n if (staticSession === null || dynamicSession === null) {\n return null;\n } else {\n const canWrite = canWriteStorage(dynamicSession.scopes);\n return {\n connectionId: dynamicSession.actor,\n id: staticSession.userId,\n info: staticSession.userInfo,\n presence: myPresence,\n canWrite,\n canComment: canComment(dynamicSession.scopes),\n };\n }\n }\n );\n\n let _lastSelf: Readonly<User<P, U>> | undefined;\n function notifySelfChanged() {\n const currSelf = self.get();\n if (currSelf !== null && currSelf !== _lastSelf) {\n eventHub.self.notify(currSelf);\n _lastSelf = currSelf;\n }\n }\n\n // For use in DevTools\n const selfAsTreeNode = DerivedSignal.from(self, (me) =>\n me !== null ? userToTreeNode(\"Me\", me) : null\n );\n\n function createOrUpdateRootFromMessage(nodes: NodeMap) {\n if (nodes.size === 0) {\n throw new Error(\"Internal error: cannot load storage without items\");\n }\n\n if (context.root !== undefined) {\n const currentItems: NodeMap = new Map();\n for (const [id, crdt] of context.pool.nodes) {\n currentItems.set(id, crdt._serialize());\n }\n\n // Get operations that represent the diff between 2 states.\n const ops = getTreesDiffOperations(currentItems, nodes);\n\n const result = applyRemoteOps(ops);\n notify(result.updates);\n } else {\n context.root = LiveObject._fromItems<S>(\n nodes as NodeStream,\n context.pool\n );\n }\n\n const canWrite = self.get()?.canWrite ?? true;\n\n // Populate missing top-level keys using `initialStorage`\n const stackSizeBefore = context.undoStack.length;\n for (const key in context.initialStorage) {\n if (context.root.get(key) === undefined) {\n if (canWrite) {\n context.root.set(key, cloneLson(context.initialStorage[key]));\n } else {\n console.warn(\n `Attempted to populate missing storage key '${key}', but current user has no write access`\n );\n }\n }\n }\n\n // Initial storage is populated using normal \"set\" operations in the loop\n // above, those updates can end up in the undo stack, so let's prune it.\n context.undoStack.length = stackSizeBefore;\n }\n\n function _addToRealUndoStack(frames: Stackframe<P>[]) {\n // If undo stack is too large, we remove the older item\n if (context.undoStack.length >= 50) {\n context.undoStack.shift();\n }\n\n context.undoStack.push(frames);\n onHistoryChange();\n }\n\n function addToUndoStack(frames: Stackframe<P>[]) {\n if (context.pausedHistory !== null) {\n context.pausedHistory.pushLeft(frames);\n } else {\n _addToRealUndoStack(frames);\n }\n }\n\n type NotifyUpdates = {\n storageUpdates?: Map<string, StorageUpdate>;\n presence?: boolean;\n others?: InternalOthersEvent<P, U>[];\n };\n\n function notify(updates: NotifyUpdates) {\n const storageUpdates = updates.storageUpdates;\n const othersUpdates = updates.others;\n\n if (othersUpdates !== undefined && othersUpdates.length > 0) {\n const others = context.others.get();\n for (const event of othersUpdates) {\n eventHub.others.notify({ ...event, others });\n }\n }\n\n if (updates.presence ?? false) {\n notifySelfChanged();\n eventHub.myPresence.notify(context.myPresence.get());\n }\n\n if (storageUpdates !== undefined && storageUpdates.size > 0) {\n const updates = Array.from(storageUpdates.values());\n eventHub.storageBatch.notify(updates);\n }\n notifyStorageStatus();\n }\n\n function getCurrentConnectionId() {\n const info = context.dynamicSessionInfoSig.get();\n if (info) {\n return info.actor;\n }\n\n throw new Error(\n \"Internal. Tried to get connection id but connection was never open\"\n );\n }\n\n function applyLocalOps(frames: readonly Stackframe<P>[]): {\n opsToEmit: ClientWireOp[]; // Ops to send over the wire afterwards\n reverse: Stackframe<P>[]; // Reverse ops to add to the undo stack aftwards\n // Updates to notify about afterwards\n updates: {\n storageUpdates: Map<string, StorageUpdate>;\n presence: boolean;\n };\n } {\n const [pframes, ops] = partition(\n frames,\n (f): f is PresenceStackframe<P> => f.type === \"presence\"\n );\n\n // Ensure all local ops have opIds assigned before applying them\n const opsWithOpIds = ops.map((op: Op) =>\n op.opId === undefined\n ? { ...op, opId: context.pool.generateOpId() }\n : (op as ClientWireOp)\n );\n\n const { reverse, updates } = applyOps(\n pframes,\n opsWithOpIds,\n /* isLocal */ true\n );\n return { opsToEmit: opsWithOpIds, reverse, updates };\n }\n\n function applyRemoteOps(ops: readonly ServerWireOp[]): {\n // Updates to notify about afterwards\n updates: {\n storageUpdates: Map<string, StorageUpdate>;\n presence: boolean;\n };\n } {\n return applyOps([], ops, /* isLocal */ false);\n }\n\n function applyOps(\n pframes: readonly PresenceStackframe<P>[],\n ops: readonly Op[],\n isLocal: boolean\n ): {\n reverse: Stackframe<P>[];\n updates: {\n storageUpdates: Map<string, StorageUpdate>;\n presence: boolean;\n };\n } {\n const output = {\n reverse: new Deque<Stackframe<P>>(),\n storageUpdates: new Map<string, StorageUpdate>(),\n presence: false,\n };\n\n for (const pf of pframes) {\n const reverse = {\n type: \"presence\" as const,\n data: {} as P,\n };\n\n for (const key in pf.data) {\n reverse.data[key] = context.myPresence.get()[key];\n }\n\n context.myPresence.patch(pf.data);\n\n if (context.buffer.presenceUpdates === null) {\n context.buffer.presenceUpdates = { type: \"partial\", data: pf.data };\n } else {\n // Merge the new fields with whatever is already queued up (doesn't\n // matter whether its a partial or full update)\n for (const key in pf.data) {\n context.buffer.presenceUpdates.data[key] = pf.data[key];\n }\n }\n\n output.reverse.pushLeft(reverse);\n output.presence = true;\n }\n\n const createdNodeIds = new Set<string>();\n for (const op of ops) {\n let source: OpSource;\n\n if (isLocal) {\n source = OpSource.LOCAL;\n } else if (op.opId !== undefined) {\n context.unacknowledgedOps.delete(op.opId);\n source = OpSource.OURS;\n } else {\n // Remotely generated Ops (and fix Ops as a special case of that)\n // don't have opId anymore.\n source = OpSource.THEIRS;\n }\n\n const applyOpResult = applyOp(op, source);\n if (applyOpResult.modified) {\n const nodeId = applyOpResult.modified.node._id;\n\n // If the modified node was created in the same batch, we don't want\n // to notify storage updates for it (children of newly created nodes\n // shouldn't trigger separate updates).\n if (!(nodeId && createdNodeIds.has(nodeId))) {\n output.storageUpdates.set(\n nn(applyOpResult.modified.node._id),\n mergeStorageUpdates(\n output.storageUpdates.get(nn(applyOpResult.modified.node._id)),\n applyOpResult.modified\n )\n );\n output.reverse.pushLeft(applyOpResult.reverse);\n }\n\n if (\n op.type === OpCode.CREATE_LIST ||\n op.type === OpCode.CREATE_MAP ||\n op.type === OpCode.CREATE_OBJECT\n ) {\n createdNodeIds.add(op.id);\n }\n }\n }\n\n return {\n reverse: Array.from(output.reverse),\n updates: {\n storageUpdates: output.storageUpdates,\n presence: output.presence,\n },\n };\n }\n\n function applyOp(op: Op, source: OpSource): ApplyResult {\n // Explicit case to handle ignored Ops\n if (isIgnoredOp(op)) {\n return { modified: false };\n }\n\n switch (op.type) {\n case OpCode.DELETE_OBJECT_KEY:\n case OpCode.UPDATE_OBJECT:\n case OpCode.DELETE_CRDT: {\n const node = context.pool.nodes.get(op.id);\n if (node === undefined) {\n return { modified: false };\n }\n\n return node._apply(op, source === OpSource.LOCAL);\n }\n\n case OpCode.SET_PARENT_KEY: {\n const node = context.pool.nodes.get(op.id);\n if (node === undefined) {\n return { modified: false };\n }\n\n if (node.parent.type === \"HasParent\" && isLiveList(node.parent.node)) {\n return node.parent.node._setChildKey(\n asPos(op.parentKey),\n node,\n source\n );\n }\n return { modified: false };\n }\n case OpCode.CREATE_OBJECT:\n case OpCode.CREATE_LIST:\n case OpCode.CREATE_MAP:\n case OpCode.CREATE_REGISTER: {\n if (op.parentId === undefined) {\n return { modified: false };\n }\n\n const parentNode = context.pool.nodes.get(op.parentId);\n if (parentNode === undefined) {\n return { modified: false };\n }\n\n return parentNode._attachChild(op, source);\n }\n }\n }\n\n function updatePresence(\n patch: Partial<P>,\n options?: { addToHistory: boolean }\n ) {\n const oldValues = {} as P;\n\n if (context.buffer.presenceUpdates === null) {\n // try {\n context.buffer.presenceUpdates = {\n type: \"partial\",\n data: {},\n };\n // } catch (err) {\n // window.console.log({ context, patch, err });\n // throw err;\n // }\n }\n\n for (const key in patch) {\n type K = typeof key;\n const overrideValue: P[K] | undefined = patch[key];\n if (overrideValue === undefined) {\n continue;\n }\n context.buffer.presenceUpdates.data[key] = overrideValue;\n oldValues[key] = context.myPresence.get()[key];\n }\n\n context.myPresence.patch(patch);\n\n if (context.activeBatch) {\n if (options?.addToHistory) {\n context.activeBatch.reverseOps.pushLeft({\n type: \"presence\",\n data: oldValues,\n });\n }\n context.activeBatch.updates.presence = true;\n } else {\n flushNowOrSoon();\n if (options?.addToHistory) {\n addToUndoStack([{ type: \"presence\", data: oldValues }]);\n }\n notify({ presence: true });\n }\n }\n\n function onUpdatePresenceMessage(\n message: UpdatePresenceServerMsg<P>\n ): InternalOthersEvent<P, U> | undefined {\n if (message.targetActor !== undefined) {\n // The incoming message is a full presence update. We are obliged to\n // handle it if `targetActor` matches our own connection ID, but we can\n // use the opportunity to effectively reset the known presence as\n // a \"keyframe\" update, while we have free access to it.\n const oldUser = context.others.getUser(message.actor);\n context.others.setOther(message.actor, message.data);\n\n const newUser = context.others.getUser(message.actor);\n if (oldUser === undefined && newUser !== undefined) {\n // The user just became \"visible\" due to this update, so fire the\n // \"enter\" event\n return { type: \"enter\", user: newUser };\n }\n } else {\n // The incoming message is a partial presence update\n context.others.patchOther(message.actor, message.data), message;\n }\n\n const user = context.others.getUser(message.actor);\n if (user) {\n return {\n type: \"update\",\n updates: message.data,\n user,\n };\n } else {\n return undefined;\n }\n }\n\n function onUserLeftMessage(\n message: UserLeftServerMsg\n ): InternalOthersEvent<P, U> | null {\n const user = context.others.getUser(message.actor);\n if (user) {\n context.others.removeConnection(message.actor);\n return { type: \"leave\", user };\n }\n return null;\n }\n\n function onRoomStateMessage(\n message: RoomStateServerMsg<U>\n ): InternalOthersEvent<P, U> {\n // The server will inform the client about its assigned actor ID and scopes\n context.dynamicSessionInfoSig.set({\n actor: message.actor,\n nonce: message.nonce,\n scopes: message.scopes,\n meta: message.meta,\n });\n context.idFactory = makeIdFactory(message.actor);\n notifySelfChanged();\n\n // Inject brand badge if meta.showBrand is true\n if (message.meta.showBrand === true) {\n injectBrandBadge(config.badgeLocation ?? \"bottom-right\");\n }\n\n for (const connectionId of context.others.connectionIds()) {\n const user = message.users[connectionId];\n if (user === undefined) {\n context.others.removeConnection(connectionId);\n }\n }\n\n for (const key in message.users) {\n const user = message.users[key];\n const connectionId = Number(key);\n context.others.setConnection(\n connectionId,\n user.id,\n user.info,\n user.scopes\n );\n }\n\n // NOTE: We could be notifying the \"others\" event here, but the reality is\n // that ROOM_STATE is often the first message to be received from the\n // server, and it won't contain all the information needed to update the\n // other views yet. Instead, we'll let the others' presences trickle in,\n // and notify each time that happens.\n return { type: \"reset\" };\n }\n\n function canUndo() { return context.undoStack.length > 0; } // prettier-ignore\n function canRedo() { return context.redoStack.length > 0; } // prettier-ignore\n function onHistoryChange() {\n eventHub.history.notify({ canUndo: canUndo(), canRedo: canRedo() });\n }\n\n function onUserJoinedMessage(\n message: UserJoinServerMsg<U>\n ): InternalOthersEvent<P, U> | undefined {\n context.others.setConnection(\n message.actor,\n message.id,\n message.info,\n message.scopes\n );\n // Send current presence to new user\n // TODO: Consider storing it on the backend\n context.buffer.messages.push({\n type: ClientMsgCode.UPDATE_PRESENCE,\n data: context.myPresence.get(),\n targetActor: message.actor,\n });\n flushNowOrSoon();\n\n // We recorded the connection, but we won't make the new user visible\n // unless we also know their initial presence data at this point.\n const user = context.others.getUser(message.actor);\n return user ? { type: \"enter\", user } : undefined;\n }\n\n function parseServerMessage(data: Json): ServerMsg<P, U, E> | null {\n if (!isJsonObject(data)) {\n return null;\n }\n return data as ServerMsg<P, U, E>;\n }\n\n function parseServerMessages(text: string): ServerMsg<P, U, E>[] | null {\n const data: Json | undefined = tryParseJson(text);\n if (data === undefined) {\n return null;\n } else if (isJsonArray(data)) {\n return compact(data.map((item) => parseServerMessage(item)));\n } else {\n return compact([parseServerMessage(data)]);\n }\n }\n\n function applyAndSendOfflineOps(unackedOps: Map<string, ClientWireOp>) {\n if (unackedOps.size === 0) {\n return;\n }\n\n const messages: ClientMsg<P, E>[] = [];\n const inOps = Array.from(unackedOps.values());\n const result = applyLocalOps(inOps);\n messages.push({\n type: ClientMsgCode.UPDATE_STORAGE,\n ops: result.opsToEmit,\n });\n\n notify(result.updates);\n sendMessages(messages);\n }\n\n /**\n * Handles a message received on the WebSocket. Will never be a \"pong\". The\n * \"pong\" is handled at the connection manager level.\n */\n function handleServerMessage(event: IWebSocketMessageEvent) {\n if (typeof event.data !== \"string\") {\n // istanbul ignore next: Unknown incoming message\n return;\n }\n\n const messages = parseServerMessages(event.data);\n if (messages === null || messages.length === 0) {\n // istanbul ignore next: Unknown incoming message\n return;\n }\n\n const updates = {\n storageUpdates: new Map<string, StorageUpdate>(),\n others: [] as InternalOthersEvent<P, U>[],\n };\n\n for (const message of messages) {\n switch (message.type) {\n case ServerMsgCode.USER_JOINED: {\n const userJoinedUpdate = onUserJoinedMessage(message);\n if (userJoinedUpdate) {\n updates.others.push(userJoinedUpdate);\n }\n break;\n }\n\n case ServerMsgCode.UPDATE_PRESENCE: {\n const othersPresenceUpdate = onUpdatePresenceMessage(message);\n if (othersPresenceUpdate) {\n updates.others.push(othersPresenceUpdate);\n }\n break;\n }\n\n case ServerMsgCode.BROADCASTED_EVENT: {\n const others = context.others.get();\n eventHub.customEvent.notify({\n connectionId: message.actor,\n user:\n message.actor < 0\n ? null\n : (others.find((u) => u.connectionId === message.actor) ??\n null),\n event: message.event,\n });\n break;\n }\n\n case ServerMsgCode.USER_LEFT: {\n const event = onUserLeftMessage(message);\n if (event) {\n updates.others.push(event);\n }\n break;\n }\n\n case ServerMsgCode.UPDATE_YDOC: {\n eventHub.ydoc.notify(message);\n break;\n }\n\n case ServerMsgCode.ROOM_STATE: {\n updates.others.push(onRoomStateMessage(message));\n break;\n }\n\n case ServerMsgCode.STORAGE_CHUNK:\n nodeMapBuffer.append(compactNodesToNodeStream(message.nodes));\n break;\n\n case ServerMsgCode.STORAGE_STREAM_END:\n processInitialStorage(nodeMapBuffer.take());\n break;\n\n case ServerMsgCode.UPDATE_STORAGE: {\n const applyResult = applyRemoteOps(message.ops);\n for (const [key, value] of applyResult.updates.storageUpdates) {\n updates.storageUpdates.set(\n key,\n mergeStorageUpdates(updates.storageUpdates.get(key), value)\n );\n }\n break;\n }\n\n // Receiving a RejectedOps message in the client means that the server is no\n // longer in sync with the client. Trying to synchronize the client again by\n // rolling back particular Ops may be hard/impossible. It's fine to not try and\n // accept the out-of-sync reality and throw an error.\n case ServerMsgCode.REJECT_STORAGE_OP: {\n console.errorWithTitle(\n \"Storage mutation rejection error\",\n message.reason\n );\n\n if (process.env.NODE_ENV !== \"production\") {\n throw new Error(\n `Storage mutations rejected by server: ${message.reason}`\n );\n }\n\n break;\n }\n\n case ServerMsgCode.THREAD_CREATED:\n case ServerMsgCode.THREAD_DELETED:\n case ServerMsgCode.THREAD_METADATA_UPDATED:\n case ServerMsgCode.THREAD_UPDATED:\n case ServerMsgCode.COMMENT_REACTION_ADDED:\n case ServerMsgCode.COMMENT_REACTION_REMOVED:\n case ServerMsgCode.COMMENT_CREATED:\n case ServerMsgCode.COMMENT_EDITED:\n case ServerMsgCode.COMMENT_DELETED:\n case ServerMsgCode.COMMENT_METADATA_UPDATED: {\n eventHub.comments.notify(message);\n break;\n }\n\n case ServerMsgCode.STORAGE_STATE_V7: // No longer used in V8\n default:\n // Ignore unknown server messages\n break;\n }\n }\n\n notify(updates);\n }\n\n function flushNowOrSoon() {\n const storageOps = context.buffer.storageOperations;\n if (storageOps.length > 0) {\n for (const op of storageOps) {\n context.unacknowledgedOps.set(op.opId, op);\n }\n notifyStorageStatus();\n }\n\n if (managedSocket.getStatus() !== \"connected\") {\n context.buffer.storageOperations = [];\n return;\n }\n\n const now = Date.now();\n const elapsedMillis = now - context.buffer.lastFlushedAt;\n\n if (elapsedMillis >= config.throttleDelay) {\n // Flush the buffer right now\n const messagesToFlush = serializeBuffer();\n if (messagesToFlush.length === 0) {\n return;\n }\n\n sendMessages(messagesToFlush);\n context.buffer = {\n flushTimerID: undefined,\n lastFlushedAt: now,\n messages: [],\n storageOperations: [],\n presenceUpdates: null,\n };\n } else {\n // Or schedule the flush a few millis into the future\n clearTimeout(context.buffer.flushTimerID);\n context.buffer.flushTimerID = setTimeout(\n flushNowOrSoon,\n config.throttleDelay - elapsedMillis\n );\n }\n }\n\n /**\n * Returns a list of ClientMsgs to flush to the network, computed from all\n * pending changes in the buffer. Has no side effects.\n */\n function serializeBuffer() {\n const messages: ClientMsg<P, E>[] = [];\n if (context.buffer.presenceUpdates) {\n messages.push(\n context.buffer.presenceUpdates.type === \"full\"\n ? {\n type: ClientMsgCode.UPDATE_PRESENCE,\n // Populating the `targetActor` field turns this message into\n // a Full Presence™ update message (not a patch), which will get\n // interpreted by other clients as such.\n targetActor: -1,\n data: context.buffer.presenceUpdates.data,\n }\n : {\n type: ClientMsgCode.UPDATE_PRESENCE,\n data: context.buffer.presenceUpdates.data,\n }\n );\n }\n for (const event of context.buffer.messages) {\n messages.push(event);\n }\n if (context.buffer.storageOperations.length > 0) {\n messages.push({\n type: ClientMsgCode.UPDATE_STORAGE,\n ops: context.buffer.storageOperations,\n });\n }\n return messages;\n }\n\n function updateYDoc(update: string, guid?: string, isV2?: boolean) {\n const clientMsg: UpdateYDocClientMsg = {\n type: ClientMsgCode.UPDATE_YDOC,\n update,\n guid,\n v2: isV2,\n };\n context.buffer.messages.push(clientMsg);\n eventHub.ydoc.notify(clientMsg);\n flushNowOrSoon();\n }\n\n function broadcastEvent(\n event: E,\n options: BroadcastOptions = {\n shouldQueueEventIfNotReady: false,\n }\n ) {\n if (\n managedSocket.getStatus() !== \"connected\" &&\n !options.shouldQueueEventIfNotReady\n ) {\n return;\n }\n\n context.buffer.messages.push({\n type: ClientMsgCode.BROADCAST_EVENT,\n event,\n });\n flushNowOrSoon();\n }\n\n /**\n * Schedule Ops to be sent to the server (now or soon). All ops should be\n * \"wire-ready\" (have an opId), once dispatched there is no going back.\n */\n function dispatchOps(ops: ClientWireOp[]) {\n const { storageOperations } = context.buffer;\n for (const op of ops) {\n storageOperations.push(op);\n }\n flushNowOrSoon();\n }\n\n let _getStorage$: Promise<void> | null = null;\n let _resolveStoragePromise: (() => void) | null = null;\n\n function processInitialStorage(nodes: NodeMap) {\n const unacknowledgedOps = new Map(context.unacknowledgedOps);\n createOrUpdateRootFromMessage(nodes);\n applyAndSendOfflineOps(unacknowledgedOps);\n _resolveStoragePromise?.();\n notifyStorageStatus();\n eventHub.storageDidLoad.notify();\n }\n\n async function streamStorage() {\n // TODO: Handle potential race conditions where the room get disconnected while the request is pending\n if (!managedSocket.authValue) return;\n const nodes = new Map<string, SerializedCrdt>(\n await httpClient.streamStorage({ roomId })\n );\n processInitialStorage(nodes);\n }\n\n function refreshStorage(options: { flush: boolean }) {\n const messages = context.buffer.messages;\n if (config.unstable_streamData) {\n // instead of sending a fetch message over WS, stream over HTTP\n void streamStorage();\n } else if (\n !messages.some((msg) => msg.type === ClientMsgCode.FETCH_STORAGE)\n ) {\n // Only add the fetch message to the outgoing message queue if it isn't\n // already there\n messages.push({ type: ClientMsgCode.FETCH_STORAGE });\n nodeMapBuffer.take(); // Reset any partial state from previous fetch\n }\n\n if (options.flush) {\n flushNowOrSoon();\n }\n }\n\n function startLoadingStorage(): Promise<void> {\n if (_getStorage$ === null) {\n refreshStorage({ flush: true });\n _getStorage$ = new Promise((resolve) => {\n _resolveStoragePromise = resolve;\n });\n notifyStorageStatus();\n }\n return _getStorage$;\n }\n\n /**\n * Closely related to .getStorage(), but synchronously. Will be `null`\n * initially. When requested for the first time, will kick off the loading of\n * Storage if it hasn't happened yet.\n *\n * Once Storage is loaded, will return a stable reference to the storage\n * root.\n */\n function getStorageSnapshot(): LiveObject<S> | null {\n const root = context.root;\n if (root !== undefined) {\n // Done loading\n return root;\n } else {\n // Not done loading, kick off the loading (will not do anything if already kicked off)\n void startLoadingStorage();\n return null;\n }\n }\n\n async function getStorage(): Promise<{\n root: LiveObject<S>;\n }> {\n if (context.root !== undefined) {\n // Store has already loaded, so we can resolve it directly\n return Promise.resolve({\n root: context.root,\n });\n }\n\n await startLoadingStorage();\n return {\n root: nn(context.root) as LiveObject<S>,\n };\n }\n\n function fetchYDoc(vector: string, guid?: string, isV2?: boolean): void {\n // don't allow multiple fetches in the same buffer with the same vector\n // dev tools may also call with a different vector (if its opened later), and that's okay\n // because the updates will be ignored by the provider\n if (\n !context.buffer.messages.find((m) => {\n return (\n m.type === ClientMsgCode.FETCH_YDOC &&\n m.vector === vector &&\n m.guid === guid &&\n m.v2 === isV2\n );\n })\n ) {\n context.buffer.messages.push({\n type: ClientMsgCode.FETCH_YDOC,\n vector,\n guid,\n v2: isV2,\n });\n }\n\n flushNowOrSoon();\n }\n\n function undo() {\n if (context.activeBatch) {\n throw new Error(\"undo is not allowed during a batch\");\n }\n const frames = context.undoStack.pop();\n if (frames === undefined) {\n return;\n }\n\n context.pausedHistory = null;\n const result = applyLocalOps(frames);\n\n notify(result.updates);\n context.redoStack.push(result.reverse);\n onHistoryChange();\n\n for (const op of result.opsToEmit) {\n context.buffer.storageOperations.push(op);\n }\n flushNowOrSoon();\n }\n\n function redo() {\n if (context.activeBatch) {\n throw new Error(\"redo is not allowed during a batch\");\n }\n\n const frames = context.redoStack.pop();\n if (frames === undefined) {\n return;\n }\n\n context.pausedHistory = null;\n const result = applyLocalOps(frames);\n\n notify(result.updates);\n context.undoStack.push(result.reverse);\n onHistoryChange();\n\n for (const op of result.opsToEmit) {\n context.buffer.storageOperations.push(op);\n }\n flushNowOrSoon();\n }\n\n function clear() {\n context.undoStack.length = 0;\n context.redoStack.length = 0;\n }\n\n function batch<T>(callback: () => T): T {\n if (context.activeBatch) {\n // If there already is an active batch, we don't have to handle this in\n // any special way. That outer active batch will handle the batch. This\n // nested call can be a no-op.\n return callback();\n }\n\n let returnValue: T = undefined as unknown as T;\n\n context.activeBatch = {\n ops: [],\n updates: {\n storageUpdates: new Map(),\n presence: false,\n others: [],\n },\n reverseOps: new Deque(),\n };\n try {\n returnValue = callback();\n } finally {\n // \"Pop\" the current batch of the state, closing the active batch, but\n // handling it separately here\n const currentBatch = context.activeBatch;\n context.activeBatch = null;\n\n if (currentBatch.reverseOps.length > 0) {\n addToUndoStack(Array.from(currentBatch.reverseOps));\n }\n\n if (currentBatch.ops.length > 0) {\n // Only clear the redo stack if something has changed during a batch\n // Clear the redo stack because batch is always called from a local operation\n context.redoStack.length = 0;\n }\n\n if (currentBatch.ops.length > 0) {\n dispatchOps(currentBatch.ops);\n }\n\n notify(currentBatch.updates);\n flushNowOrSoon();\n }\n\n return returnValue;\n }\n\n function pauseHistory() {\n if (context.pausedHistory === null) {\n context.pausedHistory = new Deque();\n }\n }\n\n function resumeHistory() {\n const frames = context.pausedHistory;\n context.pausedHistory = null;\n if (frames !== null && frames.length > 0) {\n _addToRealUndoStack(Array.from(frames));\n }\n }\n\n // Register a global source of pending changes for Storage™, so that the\n // useSyncStatus() hook will be able to report this to end users\n const syncSourceForStorage = config.createSyncSource();\n\n function getStorageStatus(): StorageStatus {\n if (context.root === undefined) {\n return _getStorage$ === null ? \"not-loaded\" : \"loading\";\n } else {\n return context.unacknowledgedOps.size === 0\n ? \"synchronized\"\n : \"synchronizing\";\n }\n }\n\n /**\n * Storage status is a computed value based other internal states so we need to keep a reference to the previous computed value to avoid triggering events when it does not change\n * This is far from ideal because we need to call this function whenever we update our internal states.\n *\n * TODO: Encapsulate our internal state differently to make sure this event is triggered whenever necessary.\n * Currently okay because we only have 4 callers and shielded by tests.\n */\n let _lastStorageStatus = getStorageStatus();\n function notifyStorageStatus() {\n const storageStatus = getStorageStatus();\n if (_lastStorageStatus !== storageStatus) {\n _lastStorageStatus = storageStatus;\n eventHub.storageStatus.notify(storageStatus);\n }\n syncSourceForStorage.setSyncStatus(\n storageStatus === \"synchronizing\" ? \"synchronizing\" : \"synchronized\"\n );\n }\n\n function isPresenceReady() {\n return self.get() !== null;\n }\n\n async function waitUntilPresenceReady(): Promise<void> {\n while (!isPresenceReady()) {\n const { promise, resolve } = Promise_withResolvers();\n\n const unsub1 = events.self.subscribeOnce(resolve);\n const unsub2 = events.status.subscribeOnce(resolve);\n // Return whenever one of these returns, whichever is first\n await promise;\n unsub1();\n unsub2();\n }\n }\n\n function isStorageReady() {\n return getStorageSnapshot() !== null;\n }\n\n async function waitUntilStorageReady(): Promise<void> {\n while (!isStorageReady()) {\n // Trigger a load of Storage and wait until it finished\n await getStorage();\n }\n }\n\n // Derived cached state for use in DevTools\n const others_forDevTools = DerivedSignal.from(\n context.others.signal,\n (others) =>\n others.map((other, index) => userToTreeNode(`Other ${index}`, other))\n );\n\n const events = {\n status: eventHub.status.observable,\n lostConnection: eventHub.lostConnection.observable,\n\n customEvent: eventHub.customEvent.observable,\n others: eventHub.others.observable,\n self: eventHub.self.observable,\n myPresence: eventHub.myPresence.observable,\n storageBatch: eventHub.storageBatch.observable,\n history: eventHub.history.observable,\n storageDidLoad: eventHub.storageDidLoad.observable,\n storageStatus: eventHub.storageStatus.observable,\n ydoc: eventHub.ydoc.observable,\n\n comments: eventHub.comments.observable,\n roomWillDestroy: eventHub.roomWillDestroy.observable,\n };\n\n async function getThreadsSince(options: GetThreadsSinceOptions) {\n return httpClient.getThreadsSince({\n roomId,\n since: options.since,\n signal: options.signal,\n });\n }\n\n async function getThreads(options?: GetThreadsOptions<TM>) {\n return httpClient.getThreads({\n roomId,\n query: options?.query,\n cursor: options?.cursor,\n });\n }\n\n async function getThread(threadId: string) {\n return httpClient.getThread({ roomId, threadId });\n }\n\n // TODO 4.0: Update API to be similar to `@liveblocks/node`'s `createThread` method.\n // Instead of a flat list of options (`commentId`, `metadata`, `body`, `commentMetadata`, etc.),\n // we could move to using a nested `comment` object to differentiate between thread and comment properties.\n //\n // {\n // roomId: string;\n // threadId?: string;\n // metadata: TM | undefined;\n // comment: {\n // id?: string;\n // metadata: CM | undefined;\n // body: CommentBody;\n // attachmentIds?: string[];\n // };\n // }\n async function createThread(options: {\n roomId: string;\n threadId?: string;\n commentId?: string;\n metadata: TM | undefined;\n commentMetadata: CM | undefined;\n body: CommentBody;\n attachmentIds?: string[];\n }) {\n return httpClient.createThread({\n roomId,\n threadId: options.threadId,\n commentId: options.commentId,\n metadata: options.metadata,\n body: options.body,\n commentMetadata: options.commentMetadata,\n attachmentIds: options.attachmentIds,\n });\n }\n\n async function deleteThread(threadId: string) {\n return httpClient.deleteThread({ roomId, threadId });\n }\n\n async function editThreadMetadata({\n metadata,\n threadId,\n }: {\n roomId: string;\n metadata: Patchable<TM>;\n threadId: string;\n }) {\n return httpClient.editThreadMetadata({ roomId, threadId, metadata });\n }\n\n async function editCommentMetadata({\n threadId,\n commentId,\n metadata,\n }: {\n roomId: string;\n threadId: string;\n commentId: string;\n metadata: Patchable<CM>;\n }) {\n return httpClient.editCommentMetadata({\n roomId,\n threadId,\n commentId,\n metadata,\n });\n }\n\n async function markThreadAsResolved(threadId: string) {\n return httpClient.markThreadAsResolved({ roomId, threadId });\n }\n\n async function markThreadAsUnresolved(threadId: string) {\n return httpClient.markThreadAsUnresolved({\n roomId,\n threadId,\n });\n }\n\n async function subscribeToThread(threadId: string) {\n return httpClient.subscribeToThread({ roomId, threadId });\n }\n\n async function unsubscribeFromThread(threadId: string) {\n return httpClient.unsubscribeFromThread({ roomId, threadId });\n }\n\n async function createComment(options: {\n threadId: string;\n commentId?: string;\n body: CommentBody;\n metadata?: CM;\n attachmentIds?: string[];\n }) {\n return httpClient.createComment({\n roomId,\n threadId: options.threadId,\n commentId: options.commentId,\n body: options.body,\n metadata: options.metadata,\n attachmentIds: options.attachmentIds,\n });\n }\n\n async function editComment(options: {\n threadId: string;\n commentId: string;\n body: CommentBody;\n metadata?: Patchable<CM>;\n attachmentIds?: string[];\n }) {\n return httpClient.editComment({\n roomId,\n threadId: options.threadId,\n commentId: options.commentId,\n body: options.body,\n metadata: options.metadata,\n attachmentIds: options.attachmentIds,\n });\n }\n\n async function deleteComment({\n threadId,\n commentId,\n }: {\n roomId: string;\n threadId: string;\n commentId: string;\n }) {\n return httpClient.deleteComment({ roomId, threadId, commentId });\n }\n\n async function addReaction({\n threadId,\n commentId,\n emoji,\n }: {\n threadId: string;\n commentId: string;\n emoji: string;\n }) {\n return httpClient.addReaction({ roomId, threadId, commentId, emoji });\n }\n\n async function removeReaction({\n threadId,\n commentId,\n emoji,\n }: {\n threadId: string;\n commentId: string;\n emoji: string;\n }) {\n return await httpClient.removeReaction({\n roomId,\n threadId,\n commentId,\n emoji,\n });\n }\n\n function prepareAttachment(file: File): CommentLocalAttachment {\n return {\n type: \"localAttachment\",\n status: \"idle\",\n id: createCommentAttachmentId(),\n name: file.name,\n size: file.size,\n mimeType: file.type,\n file,\n };\n }\n\n async function uploadAttachment(\n attachment: CommentLocalAttachment,\n options: UploadAttachmentOptions = {}\n ): Promise<CommentAttachment> {\n return httpClient.uploadAttachment({\n roomId,\n attachment,\n signal: options.signal,\n });\n }\n\n function getAttachmentUrl(attachmentId: string) {\n return httpClient.getAttachmentUrl({ roomId, attachmentId });\n }\n\n function getSubscriptionSettings(\n options?: GetSubscriptionSettingsOptions\n ): Promise<RoomSubscriptionSettings> {\n return httpClient.getSubscriptionSettings({\n roomId,\n signal: options?.signal,\n });\n }\n\n function updateSubscriptionSettings(\n settings: Partial<RoomSubscriptionSettings>\n ): Promise<RoomSubscriptionSettings> {\n return httpClient.updateSubscriptionSettings({ roomId, settings });\n }\n\n async function markInboxNotificationAsRead(inboxNotificationId: string) {\n await httpClient.markRoomInboxNotificationAsRead({\n roomId,\n inboxNotificationId,\n });\n }\n\n // Register a global source of pending changes for Storage™, so that the\n // useSyncStatus() hook will be able to report this to end users\n const syncSourceForYjs = config.createSyncSource();\n\n function yjsStatusDidChange(status: YjsSyncStatus) {\n return syncSourceForYjs.setSyncStatus(\n status === \"synchronizing\" || status === \"loading\"\n ? \"synchronizing\"\n : \"synchronized\"\n );\n }\n\n return Object.defineProperty(\n {\n [kInternal]: {\n get presenceBuffer() { return deepClone(context.buffer.presenceUpdates?.data ?? null) }, // prettier-ignore\n get undoStack() { return deepClone(context.undoStack) }, // prettier-ignore\n get nodeCount() { return context.pool.nodes.size }, // prettier-ignore\n\n getYjsProvider() {\n return context.yjsProvider;\n },\n\n setYjsProvider(newProvider: IYjsProvider | undefined) {\n // Deregister status change listener for the old Yjs provider\n // Register status change listener for the new Yjs provider\n context.yjsProvider?.off(\"status\", yjsStatusDidChange);\n context.yjsProvider = newProvider;\n newProvider?.on(\"status\", yjsStatusDidChange);\n context.yjsProviderDidChange.notify();\n },\n\n yjsProviderDidChange: context.yjsProviderDidChange.observable,\n\n // send metadata when using a text editor\n reportTextEditor,\n // create a text mention when using a text editor\n createTextMention,\n // delete a text mention when using a text editor\n deleteTextMention,\n // list versions of the document\n listTextVersions,\n // List versions of the document since the specified date\n listTextVersionsSince,\n // get a specific version\n getTextVersion,\n // create a version\n createTextVersion,\n // execute a contextual prompt\n executeContextualPrompt,\n\n // Support for the Liveblocks browser extension\n getSelf_forDevTools: () => selfAsTreeNode.get(),\n getOthers_forDevTools: (): readonly DevTools.UserTreeNode[] =>\n others_forDevTools.get(),\n\n // prettier-ignore\n simulate: {\n // These exist only for our E2E testing app\n explicitClose: (event) => managedSocket._privateSendMachineEvent({ type: \"EXPLICIT_SOCKET_CLOSE\", event }),\n rawSend: (data) => managedSocket.send(data),\n },\n\n attachmentUrlsStore: httpClient.getOrCreateAttachmentUrlsStore(roomId),\n },\n\n id: roomId,\n subscribe: makeClassicSubscribeFn(\n roomId,\n events,\n config.errorEventSource\n ),\n\n connect: () => managedSocket.connect(),\n reconnect: () => managedSocket.reconnect(),\n disconnect: () => managedSocket.disconnect(),\n destroy: () => {\n // remove the roomWillDestroy event from the event hub\n const { roomWillDestroy, ...eventsExceptDestroy } = eventHub;\n // Unregister all registered callbacks\n for (const source of Object.values(eventsExceptDestroy)) {\n source.dispose();\n }\n eventHub.roomWillDestroy.notify();\n context.yjsProvider?.off(\"status\", yjsStatusDidChange);\n syncSourceForStorage.destroy();\n syncSourceForYjs.destroy();\n uninstallBgTabSpy();\n managedSocket.destroy();\n\n // cleanup will destroy listener\n roomWillDestroy.dispose();\n },\n\n // Presence\n updatePresence,\n updateYDoc,\n broadcastEvent,\n\n // Storage\n batch,\n history: {\n undo,\n redo,\n canUndo,\n canRedo,\n clear,\n pause: pauseHistory,\n resume: resumeHistory,\n },\n\n fetchYDoc,\n getStorage,\n getStorageSnapshot,\n getStorageStatus,\n\n isPresenceReady,\n isStorageReady,\n waitUntilPresenceReady: memoizeOnSuccess(waitUntilPresenceReady),\n waitUntilStorageReady: memoizeOnSuccess(waitUntilStorageReady),\n\n events,\n\n // Core\n getStatus: () => managedSocket.getStatus(),\n getSelf: () => self.get(),\n\n // Presence\n getPresence: () => context.myPresence.get(),\n getOthers: () => context.others.get(),\n\n // Comments\n getThreads,\n getThreadsSince,\n getThread,\n createThread,\n deleteThread,\n editThreadMetadata,\n markThreadAsResolved,\n markThreadAsUnresolved,\n subscribeToThread,\n unsubscribeFromThread,\n createComment,\n editComment,\n editCommentMetadata,\n deleteComment,\n addReaction,\n removeReaction,\n prepareAttachment,\n uploadAttachment,\n getAttachmentUrl,\n\n // Notifications\n getNotificationSettings: getSubscriptionSettings,\n getSubscriptionSettings,\n updateNotificationSettings: updateSubscriptionSettings,\n updateSubscriptionSettings,\n markInboxNotificationAsRead,\n },\n\n // Explictly make the internal field non-enumerable, to avoid aggressive\n // freezing when used with Immer\n kInternal,\n { enumerable: false }\n );\n}\n\n/**\n * @internal\n * This recreates the classic single `.subscribe()` method for the Room API, as\n * documented here https://liveblocks.io/docs/api-reference/liveblocks-client#Room.subscribe(storageItem)\n */\nfunction makeClassicSubscribeFn<\n P extends JsonObject,\n S extends LsonObject,\n U extends BaseUserMeta,\n E extends Json,\n TM extends BaseMetadata,\n CM extends BaseMetadata,\n>(\n roomId: string,\n events: Room<P, S, U, E, TM, CM>[\"events\"],\n errorEvents: EventSource<LiveblocksError>\n): SubscribeFn<P, S, U, E> {\n // Set up the \"subscribe\" wrapper API\n function subscribeToLiveStructureDeeply<L extends LiveStructure>(\n node: L,\n callback: (updates: StorageUpdate[]) => void\n ): () => void {\n return events.storageBatch.subscribe((updates) => {\n const relatedUpdates = updates.filter((update) =>\n isSameNodeOrChildOf(update.node, node)\n );\n if (relatedUpdates.length > 0) {\n callback(relatedUpdates);\n }\n });\n }\n\n function subscribeToLiveStructureShallowly<L extends LiveStructure>(\n node: L,\n callback: (node: L) => void\n ): () => void {\n return events.storageBatch.subscribe((updates) => {\n for (const update of updates) {\n if (update.node._id === node._id) {\n callback(update.node as L);\n }\n }\n });\n }\n\n // Generic storage callbacks\n function subscribe(callback: StorageCallback): () => void; // prettier-ignore\n // Storage callbacks filtered by Live structure\n function subscribe<L extends LiveStructure>(liveStructure: L, callback: (node: L) => void): () => void; // prettier-ignore\n function subscribe(node: LiveStructure, callback: StorageCallback, options: { isDeep: true }): () => void; // prettier-ignore\n // Room event callbacks\n function subscribe<K extends RoomEventName>(type: K, listener: RoomEventCallbackFor<K, P, U, E>): () => void; // prettier-ignore\n\n function subscribe<L extends LiveStructure, K extends RoomEventName>(\n first: StorageCallback | L | K,\n second?: ((node: L) => void) | StorageCallback | RoomEventCallback,\n options?: { isDeep: boolean }\n ): () => void {\n if (typeof first === \"string\" && isRoomEventName(first)) {\n if (typeof second !== \"function\") {\n throw new Error(\"Second argument must be a callback function\");\n }\n const callback = second;\n switch (first) {\n case \"event\":\n return events.customEvent.subscribe(\n callback as Callback<RoomEventMessage<P, U, E>>\n );\n\n case \"my-presence\":\n return events.myPresence.subscribe(callback as Callback<P>);\n\n case \"others\": {\n // NOTE: Others have a different callback structure, where the API\n // exposed on the outside takes _two_ callback arguments!\n const cb = callback as LegacyOthersEventCallback<P, U>;\n return events.others.subscribe((event) => {\n const { others, ...internalEvent } = event;\n return cb(others, internalEvent);\n });\n }\n\n case \"error\": {\n return errorEvents.subscribe((err) => {\n if (err.roomId === roomId) {\n return (callback as Callback<Error>)(err);\n }\n });\n }\n\n case \"status\":\n return events.status.subscribe(callback as Callback<Status>);\n\n case \"lost-connection\":\n return events.lostConnection.subscribe(\n callback as Callback<LostConnectionEvent>\n );\n\n case \"history\":\n return events.history.subscribe(callback as Callback<HistoryEvent>);\n\n case \"storage-status\":\n return events.storageStatus.subscribe(\n callback as Callback<StorageStatus>\n );\n\n case \"comments\":\n return events.comments.subscribe(\n callback as Callback<CommentsEventServerMsg>\n );\n\n // istanbul ignore next\n default:\n return assertNever(\n first,\n `\"${String(first)}\" is not a valid event name`\n );\n }\n }\n\n if (second === undefined || typeof first === \"function\") {\n if (typeof first === \"function\") {\n const storageCallback = first;\n return events.storageBatch.subscribe(storageCallback);\n } else {\n // istanbul ignore next\n throw new Error(\"Please specify a listener callback\");\n }\n }\n\n if (isLiveNode(first)) {\n const node = first;\n if (options?.isDeep) {\n const storageCallback = second as StorageCallback;\n return subscribeToLiveStructureDeeply(node, storageCallback);\n } else {\n const nodeCallback = second as (node: L) => void;\n return subscribeToLiveStructureShallowly(node, nodeCallback);\n }\n }\n\n throw new Error(\n `${String(first)} is not a value that can be subscribed to.`\n );\n }\n\n return subscribe;\n}\n\nfunction isRoomEventName(value: string) {\n return (\n value === \"my-presence\" ||\n value === \"others\" ||\n value === \"event\" ||\n value === \"error\" ||\n value === \"history\" ||\n value === \"status\" ||\n value === \"storage-status\" ||\n value === \"lost-connection\" ||\n value === \"connection\" ||\n value === \"comments\"\n );\n}\n\nexport function makeAuthDelegateForRoom(\n roomId: string,\n authManager: AuthManager\n): () => Promise<AuthValue> {\n return async () => {\n return authManager.getAuthValue({ requestedScope: \"room:read\", roomId });\n };\n}\n\nexport function makeCreateSocketDelegateForRoom(\n roomId: string,\n baseUrl: string,\n WebSocketPolyfill?: IWebSocket,\n engine?: 1 | 2\n) {\n return (authValue: AuthValue): IWebSocketInstance => {\n const ws: IWebSocket | undefined =\n WebSocketPolyfill ??\n (typeof WebSocket === \"undefined\" ? undefined : WebSocket);\n\n if (ws === undefined) {\n throw new StopRetrying(\n \"To use Liveblocks client in a non-DOM environment, you need to provide a WebSocket polyfill.\"\n );\n }\n\n const url = new URL(baseUrl);\n url.protocol = url.protocol === \"http:\" ? \"ws\" : \"wss\";\n url.pathname = \"/v8\";\n url.searchParams.set(\"roomId\", roomId);\n if (authValue.type === \"secret\") {\n url.searchParams.set(\"tok\", authValue.token.raw);\n } else if (authValue.type === \"public\") {\n url.searchParams.set(\"pubkey\", authValue.publicApiKey);\n } else {\n return assertNever(authValue, \"Unhandled case\");\n }\n url.searchParams.set(\"version\", PKG_VERSION || \"dev\");\n if (engine !== undefined) {\n url.searchParams.set(\"e\", String(engine));\n }\n return new ws(url.toString());\n };\n}\n","import { type Ai, createAi, makeCreateSocketDelegateForAi } from \"./ai\";\nimport type { LiveblocksHttpApi } from \"./api-client\";\nimport { createApiClient } from \"./api-client\";\nimport { createAuthManager } from \"./auth-manager\";\nimport { isIdle, StopRetrying } from \"./connection\";\nimport { DEFAULT_BASE_URL } from \"./constants\";\nimport type { LsonObject } from \"./crdts/Lson\";\nimport { linkDevTools, setupDevTools, unlinkDevTools } from \"./devtools\";\nimport type {\n DCM,\n DE,\n DGI,\n DP,\n DRI,\n DS,\n DTM,\n DU,\n} from \"./globals/augmentation\";\nimport { kInternal } from \"./internal\";\nimport type { BatchStore } from \"./lib/batch\";\nimport { Batch, createBatchStore } from \"./lib/batch\";\nimport type { Observable } from \"./lib/EventSource\";\nimport { makeEventSource } from \"./lib/EventSource\";\nimport * as console from \"./lib/fancy-console\";\nimport type { Json, JsonObject } from \"./lib/Json\";\nimport type { NoInfr } from \"./lib/NoInfer\";\nimport type { Relax } from \"./lib/Relax\";\nimport type { Resolve } from \"./lib/Resolve\";\nimport { Signal } from \"./lib/signals\";\nimport { warnOnceIf } from \"./lib/warnings\";\nimport type { CustomAuthenticationResult } from \"./protocol/Authentication\";\nimport type { BaseUserMeta } from \"./protocol/BaseUserMeta\";\nimport type {\n BaseMetadata,\n ThreadData,\n ThreadDeleteInfo,\n} from \"./protocol/Comments\";\nimport type {\n InboxNotificationData,\n InboxNotificationDeleteInfo,\n} from \"./protocol/InboxNotifications\";\nimport type { MentionData } from \"./protocol/MentionData\";\nimport type {\n NotificationSettings,\n PartialNotificationSettings,\n} from \"./protocol/NotificationSettings\";\nimport { createNotificationSettings } from \"./protocol/NotificationSettings\";\nimport type {\n SubscriptionData,\n SubscriptionDeleteInfo,\n} from \"./protocol/Subscriptions\";\nimport type {\n LargeMessageStrategy,\n OpaqueRoom,\n OptionalTupleUnless,\n PartialUnless,\n Polyfills,\n Room,\n RoomDelegates,\n SyncSource,\n} from \"./room\";\nimport {\n createRoom,\n makeAuthDelegateForRoom,\n makeCreateSocketDelegateForRoom,\n} from \"./room\";\nimport type { Awaitable } from \"./types/Awaitable\";\nimport type { LiveblocksErrorContext } from \"./types/LiveblocksError\";\nimport { LiveblocksError } from \"./types/LiveblocksError\";\nimport type { BadgeLocation } from \"./types/Others\";\n\nconst MIN_THROTTLE = 16;\nconst MAX_THROTTLE = 1_000;\nconst DEFAULT_THROTTLE = 100;\n\nconst MIN_BACKGROUND_KEEP_ALIVE_TIMEOUT = 15_000;\nconst MIN_LOST_CONNECTION_TIMEOUT = 200;\nconst RECOMMENDED_MIN_LOST_CONNECTION_TIMEOUT = 1_000;\nconst MAX_LOST_CONNECTION_TIMEOUT = 30_000;\nconst DEFAULT_LOST_CONNECTION_TIMEOUT = 5_000;\n\nconst RESOLVE_USERS_BATCH_DELAY = 50;\nconst RESOLVE_ROOMS_INFO_BATCH_DELAY = 50;\nconst RESOLVE_GROUPS_INFO_BATCH_DELAY = 50;\n\nexport type ResolveMentionSuggestionsArgs = {\n /**\n * The ID of the current room.\n */\n roomId: string;\n\n /**\n * The text to search for.\n */\n text: string;\n};\n\nexport type ResolveUsersArgs = {\n /**\n * The IDs of the users to resolve.\n */\n userIds: string[];\n};\n\nexport type ResolveRoomsInfoArgs = {\n /**\n * The IDs of the rooms to resolve.\n */\n roomIds: string[];\n};\n\nexport type ResolveGroupsInfoArgs = {\n /**\n * The IDs of the groups to resolve.\n */\n groupIds: string[];\n};\n\nexport type EnterOptions<P extends JsonObject = DP, S extends LsonObject = DS> =\n // prettier-ignore\n Resolve<\n {\n /**\n * Whether or not the room automatically connects to Liveblock servers.\n * Default is true.\n *\n * Usually set to false when the client is used from the server to not call\n * the authentication endpoint or connect via WebSocket.\n */\n autoConnect?: boolean;\n\n /**\n * @private Preferred storage engine version to use when creating the\n * room. Only takes effect if the room doesn't exist yet. Version\n * 2 supports streaming and will become the default in the future.\n */\n engine?: 1 | 2;\n }\n\n // Initial presence is only mandatory if the custom type requires it to be\n & PartialUnless<\n P,\n {\n /**\n * The initial Presence to use and announce when you enter the Room. The\n * Presence is available on all users in the Room (me & others).\n */\n initialPresence: P | ((roomId: string) => P);\n }\n >\n \n // Initial storage is only mandatory if the custom type requires it to be\n & PartialUnless<\n S,\n {\n /**\n * The initial Storage to use when entering a new Room.\n */\n initialStorage: S | ((roomId: string) => S);\n }\n >\n>;\n\nexport type SyncStatus =\n /* Liveblocks is in the process of writing changes */\n | \"synchronizing\"\n /* Liveblocks has persisted all pending changes */\n | \"synchronized\";\n\n/**\n * \"synchronizing\" - Liveblocks is in the process of writing changes\n * \"synchronized\" - Liveblocks has persisted all pending changes\n * \"has-local-changes\" - There is local pending state inputted by the user, but\n * we're not yet \"synchronizing\" it until a user\n * interaction, like the draft text in a comment box.\n */\nexport type InternalSyncStatus = SyncStatus | \"has-local-changes\";\n\n/**\n * @private\n *\n * Private methods and variables used in the core internals, but as a user\n * of Liveblocks, NEVER USE ANY OF THESE DIRECTLY, because bad things\n * will probably happen if you do.\n */\nexport type PrivateClientApi<\n U extends BaseUserMeta,\n TM extends BaseMetadata,\n CM extends BaseMetadata,\n> = {\n readonly currentUserId: Signal<string | undefined>;\n readonly mentionSuggestionsCache: Map<string, MentionData[]>;\n readonly resolveMentionSuggestions: ClientOptions<U>[\"resolveMentionSuggestions\"];\n readonly usersStore: BatchStore<U[\"info\"] | undefined, string>;\n readonly roomsInfoStore: BatchStore<DRI | undefined, string>;\n readonly groupsInfoStore: BatchStore<DGI | undefined, string>;\n readonly getRoomIds: () => string[];\n readonly httpClient: LiveblocksHttpApi<TM, CM>;\n // Type-level helper\n as<TM2 extends BaseMetadata, CM2 extends BaseMetadata>(): Client<U, TM2, CM2>;\n // Tracking pending changes globally\n createSyncSource(): SyncSource;\n emitError(context: LiveblocksErrorContext, cause?: Error): void;\n ai: Ai;\n};\n\nexport type NotificationsApi<\n TM extends BaseMetadata,\n CM extends BaseMetadata,\n> = {\n /**\n * Gets a page (or the initial page) for user inbox notifications and their\n * associated threads and thread subscriptions.\n *\n * This function should NOT be used for delta updates, only for pagination\n * (including the first page fetch). For delta updates (done during the\n * periodic polling), use the `getInboxNotificationsSince` function.\n *\n * @example\n * const {\n * inboxNotifications,\n * threads,\n * subscriptions,\n * nextCursor,\n * } = await client.getInboxNotifications();\n * const data = await client.getInboxNotifications(); // Fetch initial page (of 20 inbox notifications)\n * const data = await client.getInboxNotifications({ cursor: nextCursor }); // Fetch next page (= next 20 inbox notifications)\n */\n getInboxNotifications(options?: {\n cursor?: string;\n query?: { roomId?: string; kind?: string };\n }): Promise<{\n inboxNotifications: InboxNotificationData[];\n threads: ThreadData<TM, CM>[];\n subscriptions: SubscriptionData[];\n nextCursor: string | null;\n requestedAt: Date;\n }>;\n\n /**\n * Fetches a \"delta update\" since the last time we updated.\n *\n * This function should NOT be used for pagination, for that, see the\n * `getInboxNotifications` function.\n *\n * @example\n * const {\n * inboxNotifications: {\n * updated,\n * deleted,\n * },\n * threads: {\n * updated,\n * deleted,\n * },\n * subscriptions: {\n * updated,\n * deleted,\n * },\n * requestedAt,\n * } = await client.getInboxNotificationsSince({ since: result.requestedAt }});\n */\n getInboxNotificationsSince(options: {\n since: Date;\n query?: { roomId?: string; kind?: string };\n signal?: AbortSignal;\n }): Promise<{\n inboxNotifications: {\n updated: InboxNotificationData[];\n deleted: InboxNotificationDeleteInfo[];\n };\n threads: {\n updated: ThreadData<TM, CM>[];\n deleted: ThreadDeleteInfo[];\n };\n subscriptions: {\n updated: SubscriptionData[];\n deleted: SubscriptionDeleteInfo[];\n };\n requestedAt: Date;\n }>;\n\n /**\n * Gets the number of unread inbox notifications for the current user.\n *\n * @example\n * const count = await client.getUnreadInboxNotificationsCount();\n */\n getUnreadInboxNotificationsCount(options?: {\n query?: {\n roomId?: string;\n kind?: string;\n };\n signal?: AbortSignal;\n }): Promise<number>;\n\n /**\n * Marks all inbox notifications as read.\n *\n * @example\n * await client.markAllInboxNotificationsAsRead();\n */\n markAllInboxNotificationsAsRead(): Promise<void>;\n\n /**\n * Marks an inbox notification as read.\n *\n * @example\n * await client.markInboxNotificationAsRead(\"in_xxx\");\n */\n markInboxNotificationAsRead(inboxNotificationId: string): Promise<void>;\n\n /**\n * Deletes all inbox notifications for the current user.\n *\n * @example\n * await client.deleteAllInboxNotifications();\n */\n deleteAllInboxNotifications(): Promise<void>;\n\n /**\n * Deletes an inbox notification for the current user.\n *\n * @example\n * await client.deleteInboxNotification(\"in_xxx\");\n */\n deleteInboxNotification(inboxNotificationId: string): Promise<void>;\n\n /**\n * Gets notifications settings for a user for a project.\n *\n * @example\n * const notificationSettings = await client.getNotificationSettings();\n */\n getNotificationSettings(options?: {\n signal?: AbortSignal;\n }): Promise<NotificationSettings>;\n\n /**\n * Update notifications settings for a user for a project.\n *\n * @example\n * await client.updateNotificationSettings({\n * email: {\n * thread: true,\n * textMention: false,\n * $customKind1: true,\n * }\n * })\n */\n updateNotificationSettings(\n settings: PartialNotificationSettings\n ): Promise<NotificationSettings>;\n};\n\n/**\n * @private Widest-possible Client type, matching _any_ Client instance. Note\n * that this type is different from `Client`-without-type-arguments. That\n * represents a Client instance using globally augmented types only, which is\n * narrower.\n */\nexport type OpaqueClient = Client<BaseUserMeta>;\n\nexport type Client<\n U extends BaseUserMeta = DU,\n TM extends BaseMetadata = DTM,\n CM extends BaseMetadata = DCM,\n> = {\n /**\n * Gets a room. Returns null if {@link Client.enter} has not been called previously.\n *\n * @param roomId The id of the room\n */\n getRoom<\n P extends JsonObject = DP,\n S extends LsonObject = DS,\n E extends Json = DE,\n TM2 extends BaseMetadata = TM,\n CM2 extends BaseMetadata = CM,\n >(\n roomId: string\n ): Room<P, S, U, E, TM2, CM2> | null;\n\n /**\n * Enter a room.\n * @param roomId The id of the room\n * @param options Optional. You can provide initializers for the Presence or Storage when entering the Room.\n * @returns The room and a leave function. Call the returned leave() function when you no longer need the room.\n */\n enterRoom<\n P extends JsonObject = DP,\n S extends LsonObject = DS,\n E extends Json = DE,\n TM2 extends BaseMetadata = TM,\n CM2 extends BaseMetadata = CM,\n >(\n roomId: string,\n ...args: OptionalTupleUnless<\n P & S,\n [options: EnterOptions<NoInfr<P>, NoInfr<S>>]\n >\n ): {\n room: Room<P, S, U, E, TM2, CM2>;\n leave: () => void;\n };\n\n /**\n * Purges all cached auth tokens and reconnects all rooms that are still\n * connected, if any.\n *\n * Call this whenever you log out a user in your application.\n */\n logout(): void;\n\n /**\n * Advanced APIs related to the resolvers.\n */\n resolvers: {\n /**\n * Invalidate some or all users that were previously cached by `resolveUsers`.\n *\n * @example\n * // Invalidate all users\n * client.resolvers.invalidateUsers();\n *\n * @example\n * // Invalidate specific users\n * client.resolvers.invalidateUsers([\"user-1\", \"user-2\"]);\n */\n invalidateUsers(userIds?: string[]): void;\n\n /**\n * Invalidate some or all rooms info that were previously cached by `resolveRoomsInfo`.\n *\n * @example\n * // Invalidate all rooms\n * client.resolvers.invalidateRoomsInfo();\n *\n * @example\n * // Invalidate specific rooms\n * client.resolvers.invalidateRoomsInfo([\"room-1\", \"room-2\"]);\n */\n invalidateRoomsInfo(roomIds?: string[]): void;\n\n /**\n * Invalidate some or all groups info that were previously cached by `resolveGroupsInfo`.\n *\n * @example\n * // Invalidate all groups\n * client.resolvers.invalidateGroupsInfo();\n *\n * @example\n * // Invalidate specific groups\n * client.resolvers.invalidateGroupsInfo([\"group-1\", \"group-2\"]);\n */\n invalidateGroupsInfo(groupIds?: string[]): void;\n\n /**\n * Invalidate all mention suggestions cached by `resolveMentionSuggestions`.\n *\n * @example\n * // Invalidate all mention suggestions\n * client.resolvers.invalidateMentionSuggestions();\n */\n invalidateMentionSuggestions(): void;\n };\n\n /**\n * @private\n *\n * Private methods and variables used in the core internals, but as a user\n * of Liveblocks, NEVER USE ANY OF THESE DIRECTLY, because bad things\n * will probably happen if you do.\n */\n // TODO Make this a getter, so we can provide M\n readonly [kInternal]: PrivateClientApi<U, TM, CM>;\n\n /**\n * Returns the current global sync status of the Liveblocks client. If any\n * part of Liveblocks has any local pending changes that haven't been\n * confirmed by or persisted by the server yet, this will be \"synchronizing\",\n * otherwise \"synchronized\".\n *\n * This is a combined status for all of the below parts of Liveblocks:\n * - Storage (realtime APIs)\n * - Text Editors\n * - Comments\n * - Notifications\n *\n * @example\n * const status = client.getSyncStatus(); // \"synchronizing\" | \"synchronized\"\n */\n getSyncStatus(): SyncStatus;\n\n /**\n * All possible client events, subscribable from a single place.\n */\n readonly events: {\n readonly error: Observable<LiveblocksError>;\n readonly syncStatus: Observable<void>;\n };\n} & NotificationsApi<TM, CM>;\n\nexport type AuthEndpoint =\n | string\n | ((room?: string) => Promise<CustomAuthenticationResult>);\n\n/**\n * The authentication endpoint that is called to ensure that the current user has access to a room.\n * Can be an url or a callback if you need to add additional headers.\n */\nexport type ClientOptions<U extends BaseUserMeta = DU> = {\n throttle?: number; // in milliseconds\n lostConnectionTimeout?: number; // in milliseconds\n backgroundKeepAliveTimeout?: number; // in milliseconds\n polyfills?: Polyfills;\n largeMessageStrategy?: LargeMessageStrategy;\n unstable_streamData?: boolean;\n /**\n * A function that returns a list of mention suggestions matching a string.\n */\n resolveMentionSuggestions?: (\n args: ResolveMentionSuggestionsArgs\n ) => Awaitable<string[] | MentionData[]>;\n\n /**\n * A function that returns user info from user IDs.\n * You should return a list of user objects of the same size, in the same order.\n */\n resolveUsers?: (\n args: ResolveUsersArgs\n ) => Awaitable<(U[\"info\"] | undefined)[] | undefined>;\n\n /**\n * A function that returns room info from room IDs.\n * You should return a list of room info objects of the same size, in the same order.\n */\n resolveRoomsInfo?: (\n args: ResolveRoomsInfoArgs\n ) => Awaitable<(DRI | undefined)[] | undefined>;\n\n /**\n * A function that returns group info from group IDs.\n * You should return a list of group info objects of the same size, in the same order.\n */\n resolveGroupsInfo?: (\n args: ResolveGroupsInfoArgs\n ) => Awaitable<(DGI | undefined)[] | undefined>;\n\n /**\n * Prevent the current browser tab from being closed if there are any locally\n * pending Liveblocks changes that haven't been submitted to or confirmed by\n * the server yet.\n */\n preventUnsavedChanges?: boolean;\n\n /**\n * The location where the brand badge should be displayed when using a free plan.\n * Default is \"bottom-right\".\n */\n badgeLocation?: BadgeLocation;\n\n /** Point the client to an alternative Liveblocks server. */\n baseUrl?: string;\n\n /** @internal */\n mockedDelegates?: RoomDelegates;\n\n /** @internal */\n enableDebugLogging?: boolean;\n\n /** @internal */\n __DANGEROUSLY_disableThrottling?: true; // for unit testing purposes only, never use this in production\n} & Relax<{ publicApiKey: string } | { authEndpoint: AuthEndpoint }>;\n\nfunction getBaseUrl(baseUrl?: string | undefined): string {\n if (\n typeof baseUrl === \"string\" &&\n baseUrl.startsWith(\"http\") // Must be http or https URL\n ) {\n return baseUrl;\n } else {\n return DEFAULT_BASE_URL;\n }\n}\n\n/**\n * Create a client that will be responsible to communicate with liveblocks servers.\n *\n * @example\n * const client = createClient({\n * authEndpoint: \"/api/auth\"\n * });\n *\n * // It's also possible to use a function to call your authentication endpoint.\n * // Useful to add additional headers or use an API wrapper (like Firebase functions)\n * const client = createClient({\n * authEndpoint: async (room?) => {\n * const response = await fetch(\"/api/auth\", {\n * method: \"POST\",\n * headers: {\n * Authentication: \"token\",\n * \"Content-Type\": \"application/json\"\n * },\n * body: JSON.stringify({ room })\n * });\n *\n * return await response.json(); // should be: { token: \"...\" }\n * }\n * });\n */\nexport function createClient<U extends BaseUserMeta = DU>(\n options: ClientOptions<U>\n): Client<U> {\n const clientOptions = options;\n const throttleDelay =\n process.env.NODE_ENV !== \"production\" &&\n clientOptions.__DANGEROUSLY_disableThrottling\n ? 0\n : getThrottle(clientOptions.throttle ?? DEFAULT_THROTTLE);\n const lostConnectionTimeout = getLostConnectionTimeout(\n clientOptions.lostConnectionTimeout ?? DEFAULT_LOST_CONNECTION_TIMEOUT\n );\n const backgroundKeepAliveTimeout = getBackgroundKeepAliveTimeout(\n clientOptions.backgroundKeepAliveTimeout\n );\n const baseUrl = getBaseUrl(clientOptions.baseUrl);\n\n const currentUserId = new Signal<string | undefined>(undefined);\n\n const authManager = createAuthManager(options, (token) => {\n currentUserId.set(() => token.uid);\n });\n\n const fetchPolyfill =\n clientOptions.polyfills?.fetch ||\n /* istanbul ignore next */ globalThis.fetch?.bind(globalThis);\n\n const httpClient = createApiClient({\n baseUrl,\n fetchPolyfill,\n currentUserId,\n authManager,\n });\n\n type RoomDetails = {\n room: OpaqueRoom;\n unsubs: Set<() => void>;\n };\n\n const roomsById = new Map<string, RoomDetails>();\n\n const ai = createAi({\n userId: currentUserId.get(),\n lostConnectionTimeout,\n backgroundKeepAliveTimeout: getBackgroundKeepAliveTimeout(\n clientOptions.backgroundKeepAliveTimeout\n ),\n polyfills: clientOptions.polyfills,\n delegates: {\n createSocket: makeCreateSocketDelegateForAi(\n baseUrl,\n clientOptions.polyfills?.WebSocket\n ),\n authenticate: async () => {\n const resp = await authManager.getAuthValue({\n requestedScope: \"room:read\",\n });\n if (resp.type === \"public\") {\n throw new StopRetrying(\n \"Cannot use AI Copilots with a public API key\"\n );\n }\n return resp;\n },\n canZombie: () => false,\n },\n });\n\n function teardownRoom(room: OpaqueRoom) {\n unlinkDevTools(room.id);\n roomsById.delete(room.id);\n room.destroy();\n }\n\n function leaseRoom<\n P extends JsonObject,\n S extends LsonObject,\n U extends BaseUserMeta,\n E extends Json,\n TM extends BaseMetadata,\n CM extends BaseMetadata,\n >(\n details: RoomDetails\n ): {\n room: Room<P, S, U, E, TM, CM>;\n leave: () => void;\n } {\n // Create a new self-destructing leave function\n const leave = () => {\n const self = leave; // A reference to the currently executing function itself\n\n if (!details.unsubs.delete(self)) {\n console.warn(\n \"This leave function was already called. Calling it more than once has no effect.\"\n );\n } else {\n // Was this the last room lease? If so, tear down the room\n if (details.unsubs.size === 0) {\n teardownRoom(details.room);\n }\n }\n };\n\n details.unsubs.add(leave);\n return {\n room: details.room as Room<P, S, U, E, TM, CM>,\n leave,\n };\n }\n\n function enterRoom<\n P extends JsonObject,\n S extends LsonObject,\n U extends BaseUserMeta,\n E extends Json,\n TM extends BaseMetadata,\n CM extends BaseMetadata,\n >(\n roomId: string,\n ...args: OptionalTupleUnless<\n P & S,\n [options: EnterOptions<NoInfr<P>, NoInfr<S>>]\n >\n ): {\n room: Room<P, S, U, E, TM, CM>;\n leave: () => void;\n } {\n const existing = roomsById.get(roomId);\n if (existing !== undefined) {\n return leaseRoom(existing);\n }\n\n const options = args[0] ?? ({} as EnterOptions<P, S>);\n const initialPresence =\n (typeof options.initialPresence === \"function\"\n ? options.initialPresence(roomId)\n : options.initialPresence) ?? ({} as P);\n\n const initialStorage =\n (typeof options.initialStorage === \"function\"\n ? options.initialStorage(roomId)\n : options.initialStorage) ?? ({} as S);\n\n const newRoom = createRoom<P, S, U, E, TM, CM>(\n { initialPresence, initialStorage },\n {\n roomId,\n throttleDelay,\n lostConnectionTimeout,\n backgroundKeepAliveTimeout,\n polyfills: clientOptions.polyfills,\n delegates: clientOptions.mockedDelegates ?? {\n createSocket: makeCreateSocketDelegateForRoom(\n roomId,\n baseUrl,\n clientOptions.polyfills?.WebSocket,\n options.engine\n ),\n authenticate: makeAuthDelegateForRoom(roomId, authManager),\n },\n enableDebugLogging: clientOptions.enableDebugLogging,\n baseUrl,\n errorEventSource: liveblocksErrorSource,\n largeMessageStrategy: clientOptions.largeMessageStrategy,\n unstable_streamData: !!clientOptions.unstable_streamData,\n roomHttpClient: httpClient as LiveblocksHttpApi<TM, CM>,\n createSyncSource,\n badgeLocation: clientOptions.badgeLocation ?? \"bottom-right\",\n }\n );\n\n const newRoomDetails: RoomDetails = {\n room: newRoom,\n unsubs: new Set(),\n };\n roomsById.set(roomId, newRoomDetails);\n\n setupDevTools(() => Array.from(roomsById.keys()));\n linkDevTools(roomId, newRoom);\n\n const shouldConnect = options.autoConnect ?? true;\n if (shouldConnect) {\n // we need to check here because nextjs would fail earlier with Node < 16\n if (typeof atob === \"undefined\") {\n if (clientOptions.polyfills?.atob === undefined) {\n throw new Error(\n \"You need to polyfill atob to use the client in your environment. Please follow the instructions at https://liveblocks.io/docs/errors/liveblocks-client/atob-polyfill\"\n );\n }\n // At this point, atob does not exist so we are either on React Native or on Node < 16, hence global is available.\n global.atob = clientOptions.polyfills.atob;\n }\n\n newRoom.connect();\n }\n\n return leaseRoom(newRoomDetails);\n }\n\n function getRoom<\n P extends JsonObject,\n S extends LsonObject,\n U extends BaseUserMeta,\n E extends Json,\n TM extends BaseMetadata,\n CM extends BaseMetadata,\n >(roomId: string): Room<P, S, U, E, TM, CM> | null {\n const room = roomsById.get(roomId)?.room;\n return room ? (room as Room<P, S, U, E, TM, CM>) : null;\n }\n\n function logout() {\n authManager.reset();\n\n // Reset the current user id store when the client is logged out\n currentUserId.set(() => undefined);\n\n // Reconnect all rooms that aren't idle, if any. This ensures that those\n // rooms will get reauthorized now that the auth cache is reset. If that\n // fails, they might disconnect.\n for (const { room } of roomsById.values()) {\n if (!isIdle(room.getStatus())) {\n room.reconnect();\n }\n }\n }\n\n const resolveUsers = clientOptions.resolveUsers;\n const batchedResolveUsers = new Batch(\n async (batchedUserIds: string[]) => {\n const userIds = batchedUserIds.flat();\n const users = await resolveUsers?.({ userIds });\n\n warnOnceIf(\n !resolveUsers,\n \"Set the resolveUsers option in createClient to specify user info.\"\n );\n\n return users ?? userIds.map(() => undefined);\n },\n { delay: RESOLVE_USERS_BATCH_DELAY }\n );\n const usersStore = createBatchStore(batchedResolveUsers);\n\n function invalidateResolvedUsers(userIds?: string[]) {\n usersStore.invalidate(userIds);\n }\n\n const resolveRoomsInfo = clientOptions.resolveRoomsInfo;\n const batchedResolveRoomsInfo = new Batch(\n async (batchedRoomIds: string[]) => {\n const roomIds = batchedRoomIds.flat();\n const roomsInfo = await resolveRoomsInfo?.({ roomIds });\n\n warnOnceIf(\n !resolveRoomsInfo,\n \"Set the resolveRoomsInfo option in createClient to specify room info.\"\n );\n\n return roomsInfo ?? roomIds.map(() => undefined);\n },\n { delay: RESOLVE_ROOMS_INFO_BATCH_DELAY }\n );\n const roomsInfoStore = createBatchStore(batchedResolveRoomsInfo);\n\n function invalidateResolvedRoomsInfo(roomIds?: string[]) {\n roomsInfoStore.invalidate(roomIds);\n }\n\n const resolveGroupsInfo = clientOptions.resolveGroupsInfo;\n const batchedResolveGroupsInfo = new Batch(\n async (batchedGroupIds: string[]) => {\n const groupIds = batchedGroupIds.flat();\n const groupsInfo = await resolveGroupsInfo?.({ groupIds });\n\n warnOnceIf(\n !resolveGroupsInfo,\n \"Set the resolveGroupsInfo option in createClient to specify group info.\"\n );\n\n return groupsInfo ?? groupIds.map(() => undefined);\n },\n { delay: RESOLVE_GROUPS_INFO_BATCH_DELAY }\n );\n const groupsInfoStore = createBatchStore(batchedResolveGroupsInfo);\n\n function invalidateResolvedGroupsInfo(groupIds?: string[]) {\n groupsInfoStore.invalidate(groupIds);\n }\n\n const mentionSuggestionsCache = new Map<string, MentionData[]>();\n\n function invalidateResolvedMentionSuggestions() {\n mentionSuggestionsCache.clear();\n }\n\n // ----------------------------------------------------------------\n\n const syncStatusSources: Signal<InternalSyncStatus>[] = [];\n const syncStatusSignal = new Signal<InternalSyncStatus>(\"synchronized\");\n\n const liveblocksErrorSource = makeEventSource<LiveblocksError>();\n\n function getSyncStatus(): SyncStatus {\n const status = syncStatusSignal.get();\n return status === \"synchronizing\" ? status : \"synchronized\";\n }\n\n function recompute() {\n syncStatusSignal.set(\n syncStatusSources.some((src) => src.get() === \"synchronizing\")\n ? \"synchronizing\"\n : syncStatusSources.some((src) => src.get() === \"has-local-changes\")\n ? \"has-local-changes\"\n : \"synchronized\"\n );\n }\n\n function createSyncSource(): SyncSource {\n const source = new Signal<InternalSyncStatus>(\"synchronized\");\n syncStatusSources.push(source);\n\n const unsub = source.subscribe(() => recompute());\n\n function setSyncStatus(status: InternalSyncStatus) {\n source.set(status);\n }\n\n function destroy() {\n unsub();\n const index = syncStatusSources.findIndex((item) => item === source);\n if (index > -1) {\n const [ref] = syncStatusSources.splice(index, 1);\n const wasStillPending = ref.get() !== \"synchronized\";\n if (wasStillPending) {\n // We only have to recompute if it was still pending. Otherwise it\n // could not have an effect on the global state anyway.\n recompute();\n }\n }\n }\n\n return { setSyncStatus, destroy };\n }\n\n // ----------------------------------------------------------------\n\n // Set up event handler that will prevent the browser tab from being closed\n // if there are locally pending changes to any part of Liveblocks (Storage,\n // text editors, Threads, Notifications, etc)\n {\n const maybePreventClose = (e: BeforeUnloadEvent) => {\n if (\n clientOptions.preventUnsavedChanges &&\n syncStatusSignal.get() !== \"synchronized\"\n ) {\n e.preventDefault();\n }\n };\n\n // A Liveblocks client is currently never destroyed.\n // TODO Call win.removeEventListener(\"beforeunload\", maybePreventClose)\n // once we have a client.destroy() method\n const win = typeof window !== \"undefined\" ? window : undefined;\n win?.addEventListener(\"beforeunload\", maybePreventClose);\n }\n\n async function getNotificationSettings(options?: {\n signal?: AbortSignal;\n }): Promise<NotificationSettings> {\n const plainSettings = await httpClient.getNotificationSettings(options);\n const settings = createNotificationSettings(plainSettings);\n\n return settings;\n }\n\n async function updateNotificationSettings(\n settings: PartialNotificationSettings\n ): Promise<NotificationSettings> {\n const plainSettings = await httpClient.updateNotificationSettings(settings);\n const settingsObject = createNotificationSettings(plainSettings);\n\n return settingsObject;\n }\n\n const client: Client<U> = Object.defineProperty(\n {\n enterRoom,\n getRoom,\n\n logout,\n\n // Public inbox notifications API\n getInboxNotifications: httpClient.getInboxNotifications,\n getInboxNotificationsSince: httpClient.getInboxNotificationsSince,\n getUnreadInboxNotificationsCount:\n httpClient.getUnreadInboxNotificationsCount,\n markAllInboxNotificationsAsRead:\n httpClient.markAllInboxNotificationsAsRead,\n markInboxNotificationAsRead: httpClient.markInboxNotificationAsRead,\n deleteAllInboxNotifications: httpClient.deleteAllInboxNotifications,\n deleteInboxNotification: httpClient.deleteInboxNotification,\n\n // Public notification settings API\n getNotificationSettings,\n updateNotificationSettings,\n\n // Advanced resolvers APIs\n resolvers: {\n invalidateUsers: invalidateResolvedUsers,\n invalidateRoomsInfo: invalidateResolvedRoomsInfo,\n invalidateGroupsInfo: invalidateResolvedGroupsInfo,\n invalidateMentionSuggestions: invalidateResolvedMentionSuggestions,\n },\n\n getSyncStatus,\n events: {\n error: liveblocksErrorSource,\n syncStatus: syncStatusSignal,\n },\n\n // Internal\n [kInternal]: {\n currentUserId,\n mentionSuggestionsCache,\n ai,\n resolveMentionSuggestions: clientOptions.resolveMentionSuggestions,\n usersStore,\n roomsInfoStore,\n groupsInfoStore,\n getRoomIds() {\n return Array.from(roomsById.keys());\n },\n httpClient,\n // Type-level helper only, it's effectively only an identity-function at runtime\n as: <TM2 extends BaseMetadata, CM2 extends BaseMetadata>() =>\n client as Client<U, TM2, CM2>,\n createSyncSource,\n emitError: (context: LiveblocksErrorContext, cause?: Error) => {\n const error = LiveblocksError.from(context, cause);\n const didNotify = liveblocksErrorSource.notify(error);\n if (!didNotify) {\n console.error(error.message);\n }\n },\n },\n },\n kInternal,\n {\n enumerable: false,\n }\n );\n\n return client;\n}\n\n/**\n * @private Private API, don't use this directly.\n */\nexport function checkBounds(\n option: string,\n value: unknown,\n min: number,\n max?: number,\n recommendedMin?: number\n): number {\n if (\n typeof value !== \"number\" ||\n value < min ||\n (max !== undefined && value > max)\n ) {\n throw new Error(\n max !== undefined\n ? `${option} should be between ${recommendedMin ?? min} and ${max}.`\n : `${option} should be at least ${recommendedMin ?? min}.`\n );\n }\n return value;\n}\n\nfunction getBackgroundKeepAliveTimeout(\n value: number | undefined\n): number | undefined {\n if (value === undefined) return undefined;\n return checkBounds(\n \"backgroundKeepAliveTimeout\",\n value,\n MIN_BACKGROUND_KEEP_ALIVE_TIMEOUT\n );\n}\n\nfunction getThrottle(value: number): number {\n return checkBounds(\"throttle\", value, MIN_THROTTLE, MAX_THROTTLE);\n}\n\nfunction getLostConnectionTimeout(value: number): number {\n return checkBounds(\n \"lostConnectionTimeout\",\n value,\n MIN_LOST_CONNECTION_TIMEOUT,\n MAX_LOST_CONNECTION_TIMEOUT,\n RECOMMENDED_MIN_LOST_CONNECTION_TIMEOUT\n );\n}\n","import type { ResolveGroupsInfoArgs, ResolveUsersArgs } from \"../client\";\nimport type { DGI, DU } from \"../globals/augmentation\";\nimport { nn } from \"../lib/assert\";\nimport { sanitizeUrl } from \"../lib/url\";\nimport type { BaseUserMeta } from \"../protocol/BaseUserMeta\";\nimport type {\n CommentBody,\n CommentBodyBlockElement,\n CommentBodyElement,\n CommentBodyInlineElement,\n CommentBodyLink,\n CommentBodyMention,\n CommentBodyParagraph,\n CommentBodyText,\n} from \"../protocol/Comments\";\nimport type { Awaitable } from \"../types/Awaitable\";\n\ntype CommentBodyBlockElementName = Exclude<\n CommentBodyBlockElement,\n CommentBodyText\n>[\"type\"];\n\ntype CommentBodyInlineElementName =\n | Exclude<CommentBodyInlineElement, CommentBodyText>[\"type\"]\n | \"text\";\n\ntype CommentBodyElementName =\n | CommentBodyBlockElementName\n | CommentBodyInlineElementName;\n\ntype CommentBodyBlockElements = {\n paragraph: CommentBodyParagraph;\n};\n\ntype CommentBodyInlineElements = {\n text: CommentBodyText;\n link: CommentBodyLink;\n mention: CommentBodyMention;\n};\n\ntype CommentBodyElements = CommentBodyBlockElements & CommentBodyInlineElements;\n\ntype CommentBodyVisitor<T extends CommentBodyElement = CommentBodyElement> = (\n element: T\n) => void;\n\nexport type CommentBodyParagraphElementArgs = {\n /**\n * The paragraph element.\n */\n element: CommentBodyParagraph;\n\n /**\n * The text content of the paragraph.\n */\n children: string;\n};\n\nexport type CommentBodyTextElementArgs = {\n /**\n * The text element.\n */\n element: CommentBodyText;\n};\n\nexport type CommentBodyLinkElementArgs = {\n /**\n * The link element.\n */\n element: CommentBodyLink;\n\n /**\n * The absolute URL of the link.\n */\n href: string;\n};\n\nexport type CommentBodyMentionElementArgs<U extends BaseUserMeta = DU> = {\n /**\n * The mention element.\n */\n element: CommentBodyMention;\n\n /**\n * The mention's user info, if the mention is a user mention and the `resolveUsers` option was provided.\n */\n user?: U[\"info\"];\n\n /**\n * The mention's group info, if the mention is a group mention and the `resolveGroupsInfo` option was provided.\n */\n group?: DGI;\n};\n\nexport type StringifyCommentBodyElements<U extends BaseUserMeta = DU> = {\n /**\n * The element used to display paragraphs.\n */\n paragraph: (args: CommentBodyParagraphElementArgs, index: number) => string;\n\n /**\n * The element used to display text elements.\n */\n text: (args: CommentBodyTextElementArgs, index: number) => string;\n\n /**\n * The element used to display links.\n */\n link: (args: CommentBodyLinkElementArgs, index: number) => string;\n\n /**\n * The element used to display mentions.\n */\n mention: (args: CommentBodyMentionElementArgs<U>, index: number) => string;\n};\n\nexport type StringifyCommentBodyOptions<U extends BaseUserMeta = DU> = {\n /**\n * Which format to convert the comment to.\n */\n format?: \"plain\" | \"html\" | \"markdown\";\n\n /**\n * The elements used to customize the resulting string. Each element has\n * priority over the defaults inherited from the `format` option.\n */\n elements?: Partial<StringifyCommentBodyElements<U>>;\n\n /**\n * The separator used between paragraphs.\n */\n separator?: string;\n\n /**\n * A function that returns user info from user IDs.\n * You should return a list of user objects of the same size, in the same order.\n */\n resolveUsers?: (\n args: ResolveUsersArgs\n ) => Awaitable<(U[\"info\"] | undefined)[] | undefined>;\n\n /**\n * A function that returns group info from group IDs.\n * You should return a list of group info objects of the same size, in the same order.\n */\n resolveGroupsInfo?: (\n args: ResolveGroupsInfoArgs\n ) => Awaitable<(DGI | undefined)[] | undefined>;\n};\n\nexport function isCommentBodyParagraph(\n element: CommentBodyElement\n): element is CommentBodyParagraph {\n return \"type\" in element && element.type === \"paragraph\";\n}\n\nexport function isCommentBodyText(\n element: CommentBodyElement\n): element is CommentBodyText {\n return (\n !(\"type\" in element) &&\n \"text\" in element &&\n typeof element.text === \"string\"\n );\n}\n\nexport function isCommentBodyMention(\n element: CommentBodyElement\n): element is CommentBodyMention {\n return \"type\" in element && element.type === \"mention\";\n}\n\nexport function isCommentBodyLink(\n element: CommentBodyElement\n): element is CommentBodyLink {\n return \"type\" in element && element.type === \"link\";\n}\n\nconst commentBodyElementsGuards = {\n paragraph: isCommentBodyParagraph,\n text: isCommentBodyText,\n link: isCommentBodyLink,\n mention: isCommentBodyMention,\n};\n\nconst commentBodyElementsTypes: Record<\n CommentBodyElementName,\n \"block\" | \"inline\"\n> = {\n paragraph: \"block\",\n text: \"inline\",\n link: \"inline\",\n mention: \"inline\",\n};\n\nfunction traverseCommentBody(\n body: CommentBody,\n visitor: CommentBodyVisitor\n): void;\nfunction traverseCommentBody<T extends CommentBodyElementName>(\n body: CommentBody,\n element: T,\n visitor: CommentBodyVisitor<CommentBodyElements[T]>\n): void;\nfunction traverseCommentBody(\n body: CommentBody,\n elementOrVisitor: CommentBodyElementName | CommentBodyVisitor,\n possiblyVisitor?: CommentBodyVisitor\n): void {\n if (!body || !body?.content) {\n return;\n }\n\n const element =\n typeof elementOrVisitor === \"string\" ? elementOrVisitor : undefined;\n const type = element ? commentBodyElementsTypes[element] : \"all\";\n const guard = element ? commentBodyElementsGuards[element] : () => true;\n const visitor =\n typeof elementOrVisitor === \"function\" ? elementOrVisitor : possiblyVisitor;\n\n for (const block of body.content) {\n if (type === \"all\" || type === \"block\") {\n if (guard(block)) {\n visitor?.(block);\n }\n }\n\n if (type === \"all\" || type === \"inline\") {\n for (const inline of block.children) {\n if (guard(inline)) {\n visitor?.(inline);\n }\n }\n }\n }\n}\n\n/**\n * Get an array of all mentions in a `CommentBody`.\n *\n * Narrow results with an optional predicate, e.g.\n * `(mention) => mention.kind === \"user\"` to only get user mentions.\n */\nexport function getMentionsFromCommentBody(\n body: CommentBody,\n predicate?: (mention: CommentBodyMention) => boolean\n): CommentBodyMention[] {\n const mentionIds = new Set<string>();\n const mentions: CommentBodyMention[] = [];\n\n traverseCommentBody(body, \"mention\", (mention) => {\n if (\n // If this mention isn't already in the list\n !mentionIds.has(mention.id) &&\n // And the provided predicate is true\n (predicate ? predicate(mention) : true)\n ) {\n mentionIds.add(mention.id);\n mentions.push(mention);\n }\n });\n\n return mentions;\n}\n\nexport async function resolveMentionsInCommentBody<U extends BaseUserMeta>(\n body: CommentBody,\n resolveUsers?: (\n args: ResolveUsersArgs\n ) => Awaitable<(U[\"info\"] | undefined)[] | undefined>,\n resolveGroupsInfo?: (\n args: ResolveGroupsInfoArgs\n ) => Awaitable<(DGI | undefined)[] | undefined>\n): Promise<{\n users: Map<string, U[\"info\"]>;\n groups: Map<string, DGI>;\n}> {\n const resolvedUsers = new Map<string, U[\"info\"]>();\n const resolvedGroupsInfo = new Map<string, DGI>();\n\n if (!resolveUsers && !resolveGroupsInfo) {\n return {\n users: resolvedUsers,\n groups: resolvedGroupsInfo,\n };\n }\n\n const mentions = getMentionsFromCommentBody(body);\n const userIds = mentions\n .filter((mention) => mention.kind === \"user\")\n .map((mention) => mention.id);\n const groupIds = mentions\n .filter((mention) => mention.kind === \"group\")\n .map((mention) => mention.id);\n\n const [users, groups] = await Promise.all([\n resolveUsers && userIds.length > 0 ? resolveUsers({ userIds }) : undefined,\n resolveGroupsInfo && groupIds.length > 0\n ? resolveGroupsInfo({ groupIds })\n : undefined,\n ]);\n\n if (users) {\n for (const [index, userId] of userIds.entries()) {\n const user = users[index];\n if (user) {\n resolvedUsers.set(userId, user);\n }\n }\n }\n\n if (groups) {\n for (const [index, groupId] of groupIds.entries()) {\n const group = groups[index];\n if (group) {\n resolvedGroupsInfo.set(groupId, group);\n }\n }\n }\n\n return {\n users: resolvedUsers,\n groups: resolvedGroupsInfo,\n };\n}\n\nconst htmlEscapables = {\n \"&\": \"&\",\n \"<\": \"<\",\n \">\": \">\",\n '\"': \""\",\n \"'\": \"'\",\n};\n\nconst htmlEscapablesRegex = new RegExp(\n Object.keys(htmlEscapables)\n .map((entity) => `\\\\${entity}`)\n .join(\"|\"),\n \"g\"\n);\n\nexport function htmlSafe(value: string): HtmlSafeString {\n return new HtmlSafeString([String(value)], []);\n}\n\nfunction joinHtml(strings: (string | HtmlSafeString)[]) {\n if (strings.length <= 0) {\n return new HtmlSafeString([\"\"], []);\n }\n\n return new HtmlSafeString(\n [\"\", ...(Array(strings.length - 1).fill(\"\") as string[]), \"\"],\n strings\n );\n}\n\nfunction escapeHtml(\n value: string | string[] | HtmlSafeString | HtmlSafeString[]\n) {\n if (value instanceof HtmlSafeString) {\n return value.toString();\n }\n\n if (Array.isArray(value)) {\n return joinHtml(value).toString();\n }\n\n return String(value).replace(\n htmlEscapablesRegex,\n (character) => htmlEscapables[character as keyof typeof htmlEscapables]\n );\n}\n\n// Adapted from https://github.com/Janpot/escape-html-template-tag\nexport class HtmlSafeString {\n #strings: readonly string[];\n #values: readonly (string | string[] | HtmlSafeString | HtmlSafeString[])[];\n\n constructor(\n strings: readonly string[],\n values: readonly (string | string[] | HtmlSafeString | HtmlSafeString[])[]\n ) {\n this.#strings = strings;\n this.#values = values;\n }\n\n toString(): string {\n return this.#strings.reduce((result, str, i) => {\n return result + escapeHtml(nn(this.#values[i - 1])) + str;\n });\n }\n}\n\n/**\n * Build an HTML string from a template literal where the values are escaped.\n * Nested calls are supported and won't be escaped.\n */\nexport function html(\n strings: TemplateStringsArray,\n ...values: (string | string[] | HtmlSafeString | HtmlSafeString[])[]\n): string {\n return new HtmlSafeString(strings, values) as unknown as string;\n}\n\nconst markdownEscapables = {\n _: \"\\\\_\",\n \"*\": \"\\\\*\",\n \"#\": \"\\\\#\",\n \"`\": \"\\\\`\",\n \"~\": \"\\\\~\",\n \"!\": \"\\\\!\",\n \"|\": \"\\\\|\",\n \"(\": \"\\\\(\",\n \")\": \"\\\\)\",\n \"{\": \"\\\\{\",\n \"}\": \"\\\\}\",\n \"[\": \"\\\\[\",\n \"]\": \"\\\\]\",\n};\n\nconst markdownEscapablesRegex = new RegExp(\n Object.keys(markdownEscapables)\n .map((entity) => `\\\\${entity}`)\n .join(\"|\"),\n \"g\"\n);\n\nfunction joinMarkdown(strings: (string | MarkdownSafeString)[]) {\n if (strings.length <= 0) {\n return new MarkdownSafeString([\"\"], []);\n }\n\n return new MarkdownSafeString(\n [\"\", ...(Array(strings.length - 1).fill(\"\") as string[]), \"\"],\n strings\n );\n}\n\nfunction escapeMarkdown(\n value: string | string[] | MarkdownSafeString | MarkdownSafeString[]\n) {\n if (value instanceof MarkdownSafeString) {\n return value.toString();\n }\n\n if (Array.isArray(value)) {\n return joinMarkdown(value).toString();\n }\n\n return String(value).replace(\n markdownEscapablesRegex,\n (character) =>\n markdownEscapables[character as keyof typeof markdownEscapables]\n );\n}\n\n// Adapted from https://github.com/Janpot/escape-html-template-tag\nexport class MarkdownSafeString {\n #strings: readonly string[];\n #values: readonly (\n | string\n | string[]\n | MarkdownSafeString\n | MarkdownSafeString[]\n )[];\n\n constructor(\n strings: readonly string[],\n values: readonly (\n | string\n | string[]\n | MarkdownSafeString\n | MarkdownSafeString[]\n )[]\n ) {\n this.#strings = strings;\n this.#values = values;\n }\n\n toString(): string {\n return this.#strings.reduce((result, str, i) => {\n return result + escapeMarkdown(nn(this.#values[i - 1])) + str;\n });\n }\n}\n\n/**\n * Build a Markdown string from a template literal where the values are escaped.\n * Nested calls are supported and won't be escaped.\n */\nfunction markdown(\n strings: TemplateStringsArray,\n ...values: (string | string[] | MarkdownSafeString | MarkdownSafeString[])[]\n) {\n return new MarkdownSafeString(strings, values) as unknown as string;\n}\n\nconst stringifyCommentBodyPlainElements: StringifyCommentBodyElements<BaseUserMeta> =\n {\n paragraph: ({ children }) => children,\n text: ({ element }) => element.text,\n link: ({ element }) => element.text ?? element.url,\n mention: ({ element, user, group }) => {\n return `@${user?.name ?? group?.name ?? element.id}`;\n },\n };\n\nconst stringifyCommentBodyHtmlElements: StringifyCommentBodyElements<BaseUserMeta> =\n {\n paragraph: ({ children }) => {\n // prettier-ignore\n return children ? html`<p>${htmlSafe(children)}</p>` : children;\n },\n text: ({ element }) => {\n // <code><s><em><strong>text</strong></s></em></code>\n let children = element.text;\n\n if (!children) {\n return html`${children}`;\n }\n\n if (element.bold) {\n // prettier-ignore\n children = html`<strong>${children}</strong>`;\n }\n\n if (element.italic) {\n // prettier-ignore\n children = html`<em>${children}</em>`;\n }\n\n if (element.strikethrough) {\n // prettier-ignore\n children = html`<s>${children}</s>`;\n }\n\n if (element.code) {\n // prettier-ignore\n children = html`<code>${children}</code>`;\n }\n\n return html`${children}`;\n },\n link: ({ element, href }) => {\n // prettier-ignore\n return html`<a href=\"${href}\" target=\"_blank\" rel=\"noopener noreferrer\">${element.text ? html`${element.text}` : element.url}</a>`;\n },\n mention: ({ element, user, group }) => {\n // prettier-ignore\n return html`<span data-mention>@${user?.name ? html`${user?.name}` : group?.name ? html`${group?.name}` : element.id}</span>`;\n },\n };\n\nconst stringifyCommentBodyMarkdownElements: StringifyCommentBodyElements<BaseUserMeta> =\n {\n paragraph: ({ children }) => {\n return children;\n },\n text: ({ element }) => {\n // <code><s><em><strong>text</strong></s></em></code>\n let children = element.text;\n\n if (!children) {\n return children;\n }\n\n if (element.bold) {\n // prettier-ignore\n children = markdown`**${children}**`;\n }\n\n if (element.italic) {\n // prettier-ignore\n children = markdown`_${children}_`;\n }\n\n if (element.strikethrough) {\n // prettier-ignore\n children = markdown`~~${children}~~`;\n }\n\n if (element.code) {\n // prettier-ignore\n children = markdown`\\`${children}\\``;\n }\n\n return children;\n },\n link: ({ element, href }) => {\n // prettier-ignore\n return markdown`[${element.text ?? element.url}](${href})`;\n },\n mention: ({ element, user, group }) => {\n // prettier-ignore\n return markdown`@${user?.name ?? group?.name ?? element.id}`;\n },\n };\n\n/**\n * Convert a `CommentBody` into either a plain string,\n * Markdown, HTML, or a custom format.\n */\nexport async function stringifyCommentBody(\n body: CommentBody,\n options?: StringifyCommentBodyOptions<BaseUserMeta>\n): Promise<string> {\n const format = options?.format ?? \"plain\";\n const separator =\n options?.separator ?? (format === \"markdown\" ? \"\\n\\n\" : \"\\n\");\n const elements = {\n ...(format === \"html\"\n ? stringifyCommentBodyHtmlElements\n : format === \"markdown\"\n ? stringifyCommentBodyMarkdownElements\n : stringifyCommentBodyPlainElements),\n ...options?.elements,\n };\n const { users: resolvedUsers, groups: resolvedGroupsInfo } =\n await resolveMentionsInCommentBody(\n body,\n options?.resolveUsers,\n options?.resolveGroupsInfo\n );\n\n const blocks = body.content.flatMap((block, blockIndex) => {\n switch (block.type) {\n case \"paragraph\": {\n const inlines = block.children.flatMap((inline, inlineIndex) => {\n if (isCommentBodyMention(inline)) {\n return inline.id\n ? [\n elements.mention(\n {\n element: inline,\n user:\n inline.kind === \"user\"\n ? resolvedUsers.get(inline.id)\n : undefined,\n group:\n inline.kind === \"group\"\n ? resolvedGroupsInfo.get(inline.id)\n : undefined,\n },\n inlineIndex\n ),\n ]\n : [];\n }\n\n if (isCommentBodyLink(inline)) {\n const href = sanitizeUrl(inline.url);\n\n // If the URL is invalid, its text/URL are used as plain text.\n if (href === null) {\n return [\n elements.text(\n {\n element: { text: inline.text ?? inline.url },\n },\n inlineIndex\n ),\n ];\n }\n\n return [\n elements.link(\n {\n element: inline,\n href,\n },\n inlineIndex\n ),\n ];\n }\n\n if (isCommentBodyText(inline)) {\n return [elements.text({ element: inline }, inlineIndex)];\n }\n\n return [];\n });\n\n return [\n elements.paragraph(\n { element: block, children: inlines.join(\"\") },\n blockIndex\n ),\n ];\n }\n\n default:\n return [];\n }\n });\n\n return blocks.join(separator);\n}\n","import type { Json } from \"../lib/Json\";\nimport type { PlainLson } from \"../types/PlainLson\";\nimport { LiveList } from \"./LiveList\";\nimport { LiveMap } from \"./LiveMap\";\nimport { LiveObject } from \"./LiveObject\";\nimport type { Lson, LsonObject } from \"./Lson\";\n\n/**\n * Helper type to convert any valid Lson type to the equivalent Json type.\n *\n * Examples:\n *\n * ToImmutable<42> // 42\n * ToImmutable<'hi'> // 'hi'\n * ToImmutable<number> // number\n * ToImmutable<string> // string\n * ToImmutable<string | LiveList<number>> // string | readonly number[]\n * ToImmutable<LiveMap<string, LiveList<number>>>\n * // ReadonlyMap<string, readonly number[]>\n * ToImmutable<LiveObject<{ a: number, b: LiveList<string>, c?: number }>>\n * // { readonly a: null, readonly b: readonly string[], readonly c?: number }\n *\n */\n// prettier-ignore\nexport type ToImmutable<L extends Lson | LsonObject> =\n // A LiveList serializes to an equivalent JSON array\n L extends LiveList<infer I> ? readonly ToImmutable<I>[] :\n\n // A LiveObject serializes to an equivalent JSON object\n L extends LiveObject<infer O> ? ToImmutable<O> :\n\n // A LiveMap serializes to a JSON object with string-V pairs\n L extends LiveMap<infer K, infer V> ? ReadonlyMap<K, ToImmutable<V>> :\n\n // Any LsonObject recursively becomes a JsonObject\n L extends LsonObject ?\n { readonly [K in keyof L]: ToImmutable<Exclude<L[K], undefined>>\n | (undefined extends L[K] ? undefined : never) } :\n\n // Any Json value already is a legal Json value\n L extends Json ? L :\n\n // Otherwise, this is not possible\n never;\n\n/**\n * Returns PlainLson for a given Json or LiveStructure, suitable for calling the storage init api\n */\nexport function toPlainLson(lson: Lson): PlainLson {\n if (lson instanceof LiveObject) {\n return {\n liveblocksType: \"LiveObject\",\n data: Object.fromEntries(\n Object.entries(lson.toObject()).flatMap(([key, value]) =>\n value !== undefined ? [[key, toPlainLson(value)]] : []\n )\n ),\n };\n } else if (lson instanceof LiveMap) {\n return {\n liveblocksType: \"LiveMap\",\n data: Object.fromEntries(\n [...lson].map(([key, value]) => [key, toPlainLson(value)])\n ),\n };\n } else if (lson instanceof LiveList) {\n return {\n liveblocksType: \"LiveList\",\n data: [...lson].map((item) => toPlainLson(item)),\n };\n } else {\n return lson;\n }\n}\n","import {\n findNonSerializableValue,\n isLiveList,\n isLiveObject,\n} from \"./crdts/liveblocks-helpers\";\nimport { LiveList } from \"./crdts/LiveList\";\nimport { LiveMap } from \"./crdts/LiveMap\";\nimport { LiveObject } from \"./crdts/LiveObject\";\nimport { LiveRegister } from \"./crdts/LiveRegister\";\nimport type { LiveNode, Lson, LsonObject, ToJson } from \"./crdts/Lson\";\nimport type { StorageUpdate } from \"./crdts/StorageUpdates\";\nimport * as console from \"./lib/fancy-console\";\nimport { isPlainObject } from \"./lib/guards\";\nimport type { Json, JsonObject } from \"./lib/Json\";\nimport { isJsonObject } from \"./lib/Json\";\n\nfunction lsonObjectToJson<O extends LsonObject>(\n obj: O\n): { [K in keyof O]: Json } {\n const result = {} as { [K in keyof O]: Json };\n for (const key in obj) {\n const val = obj[key];\n if (val !== undefined) {\n result[key] = lsonToJson(val);\n }\n }\n return result;\n}\n\nexport function liveObjectToJson<O extends LsonObject>(\n liveObject: LiveObject<O>\n): { [K in keyof O]: Json } {\n return lsonObjectToJson(liveObject.toObject());\n}\n\nfunction liveMapToJson<TKey extends string>(\n map: LiveMap<TKey, Lson>\n): { [K in TKey]: Json } {\n const result = {} as { [K in TKey]: Json };\n for (const [key, value] of map.entries()) {\n result[key] = lsonToJson(value);\n }\n return result;\n}\n\nfunction lsonListToJson(value: Lson[]): Json[] {\n return value.map(lsonToJson);\n}\n\nfunction liveListToJson(value: LiveList<Lson>): Json[] {\n return lsonListToJson(value.toArray());\n}\n\nexport function lsonToJson(value: Lson): Json {\n // Check for LiveStructure datastructures first\n if (value instanceof LiveObject) {\n return liveObjectToJson(value);\n } else if (value instanceof LiveList) {\n return liveListToJson(value);\n } else if (value instanceof LiveMap) {\n return liveMapToJson(value);\n } else if (value instanceof LiveRegister) {\n // NOTE: This branch should never be taken, because LiveRegister isn't a valid Lson value\n return value.data as Json;\n }\n\n // Then for composite Lson values\n if (Array.isArray(value)) {\n return lsonListToJson(value);\n } else if (isPlainObject(value)) {\n return lsonObjectToJson(value);\n }\n\n // Finally, if value is an LsonScalar, then it's also a valid JsonScalar\n return value;\n}\n\n/**\n * Deeply converts all nested lists to LiveLists, and all nested objects to\n * LiveObjects.\n *\n * As such, the returned result will not contain any Json arrays or Json\n * objects anymore.\n */\nfunction deepLiveify(value: Lson | LsonObject): Lson {\n if (Array.isArray(value)) {\n return new LiveList(value.map(deepLiveify));\n } else if (isPlainObject(value)) {\n const init: LsonObject = {};\n for (const key in value) {\n const val = value[key];\n if (val === undefined) {\n continue;\n }\n init[key] = deepLiveify(val);\n }\n return new LiveObject(init);\n } else {\n return value;\n }\n}\n\nexport function patchLiveList<T extends Lson>(\n liveList: LiveList<T>,\n prev: Array<T>,\n next: Array<T>\n): void {\n let i = 0;\n let prevEnd = prev.length - 1;\n let nextEnd = next.length - 1;\n\n let prevNode = prev[0];\n let nextNode = next[0];\n\n /**\n * For A,B,C => A,B,C,D\n * i = 3, prevEnd = 2, nextEnd = 3\n *\n * For A,B,C => B,C\n * i = 2, prevEnd = 2, nextEnd = 1\n *\n * For B,C => A,B,C\n * i = 0, pre\n */\n\n outer: {\n while (prevNode === nextNode) {\n ++i;\n if (i > prevEnd || i > nextEnd) {\n break outer;\n }\n prevNode = prev[i];\n nextNode = next[i];\n }\n\n prevNode = prev[prevEnd];\n nextNode = next[nextEnd];\n\n while (prevNode === nextNode) {\n prevEnd--;\n nextEnd--;\n\n if (i > prevEnd || i > nextEnd) {\n break outer;\n }\n\n prevNode = prev[prevEnd];\n nextNode = next[nextEnd];\n }\n }\n\n if (i > prevEnd) {\n if (i <= nextEnd) {\n while (i <= nextEnd) {\n liveList.insert(deepLiveify(next[i]) as T, i);\n // ^^^^ FIXME Not entirely true\n i++;\n }\n }\n } else if (i > nextEnd) {\n let localI = i;\n while (localI <= prevEnd) {\n liveList.delete(i);\n localI++;\n }\n } else {\n while (i <= prevEnd && i <= nextEnd) {\n prevNode = prev[i];\n nextNode = next[i];\n const liveListNode = liveList.get(i);\n\n if (\n isLiveObject(liveListNode) &&\n isPlainObject(prevNode) &&\n isPlainObject(nextNode)\n ) {\n patchLiveObject(liveListNode, prevNode, nextNode);\n } else {\n liveList.set(i, deepLiveify(nextNode) as T);\n // ^^^^ FIXME Not entirely true\n }\n\n i++;\n }\n while (i <= nextEnd) {\n liveList.insert(deepLiveify(next[i]) as T, i);\n // ^^^^ FIXME Not entirely true\n i++;\n }\n let localI = i;\n while (localI <= prevEnd) {\n liveList.delete(i);\n localI++;\n }\n }\n}\n\nexport function patchLiveObjectKey<\n O extends LsonObject,\n K extends keyof O,\n V extends Json,\n>(liveObject: LiveObject<O>, key: K, prev?: V, next?: V): void {\n if (process.env.NODE_ENV !== \"production\") {\n const nonSerializableValue = findNonSerializableValue(next);\n if (nonSerializableValue) {\n console.error(\n `New state path: '${nonSerializableValue.path}' value: '${String(\n nonSerializableValue.value\n )}' is not serializable.\\nOnly serializable value can be synced with Liveblocks.`\n );\n return;\n }\n }\n\n const value = liveObject.get(key);\n\n if (next === undefined) {\n liveObject.delete(key);\n } else if (value === undefined) {\n liveObject.set(key, deepLiveify(next) as O[K]);\n // ^^^^^^^ FIXME Not entirely true\n } else if (prev === next) {\n return;\n } else if (isLiveList(value) && Array.isArray(prev) && Array.isArray(next)) {\n patchLiveList(value, prev, next);\n } else if (\n isLiveObject(value) &&\n isPlainObject(prev) &&\n isPlainObject(next)\n ) {\n patchLiveObject(value, prev, next);\n } else {\n liveObject.set(key, deepLiveify(next) as O[K]);\n // ^^^^^^^ FIXME Not entirely true\n }\n}\n\nexport function patchLiveObject<O extends LsonObject>(\n root: LiveObject<O>,\n prev: ToJson<O>,\n next: ToJson<O>\n): void {\n const updates: Partial<O> = {};\n\n for (const key in next) {\n patchLiveObjectKey(root, key, prev[key] as Json, next[key] as Json);\n }\n\n for (const key in prev) {\n if (next[key] === undefined) {\n root.delete(key);\n }\n }\n\n if (Object.keys(updates).length > 0) {\n root.update(updates);\n }\n}\n\nfunction getParentsPath(node: LiveNode): Array<string | number> {\n const path = [];\n while (node.parent.type === \"HasParent\") {\n if (isLiveList(node.parent.node)) {\n path.push(node.parent.node._indexOfPosition(node.parent.key));\n } else {\n path.push(node.parent.key);\n }\n node = node.parent.node;\n }\n return path;\n}\n\n//\n// TODO: Remove `patchImmutableObject`!\n//\n// This helper is now only used internally, to support our Zustand and\n// Redux packages. We should be able to reimplement those using the new\n// `.toImmutable()` APIs.\n//\nexport function legacy_patchImmutableObject<TState extends JsonObject>(\n state: TState,\n updates: StorageUpdate[]\n): TState {\n return updates.reduce(\n (state, update) => legacy_patchImmutableObjectWithUpdate(state, update),\n state\n );\n}\n\nfunction legacy_patchImmutableObjectWithUpdate<TState extends JsonObject>(\n state: TState,\n update: StorageUpdate\n): TState {\n const path = getParentsPath(update.node);\n return legacy_patchImmutableNode(state, path, update);\n}\n\nfunction legacy_patchImmutableNode<S extends Json>(\n state: S,\n path: Array<string | number>,\n update: StorageUpdate\n): S {\n // FIXME: Split this function up into a few smaller ones! In each of them,\n // the types can be define much more narrowly and correctly, and there will\n // be less type shoehorning necessary.\n\n const pathItem = path.pop();\n if (pathItem === undefined) {\n switch (update.type) {\n case \"LiveObject\": {\n if (!isJsonObject(state)) {\n throw new Error(\n \"Internal: received update on LiveObject but state was not an object\"\n );\n }\n\n const newState: JsonObject = Object.assign({}, state);\n\n for (const key in update.updates) {\n if (update.updates[key]?.type === \"update\") {\n const val = update.node.get(key);\n if (val !== undefined) {\n newState[key] = lsonToJson(val);\n }\n } else if (update.updates[key]?.type === \"delete\") {\n delete newState[key];\n }\n }\n\n return newState as S;\n // ^^^^\n // FIXME Not completely true, because we could have been\n // updating keys from StorageUpdate here that aren't in S,\n // technically.\n }\n\n case \"LiveList\": {\n if (!Array.isArray(state)) {\n throw new Error(\n \"Internal: received update on LiveList but state was not an array\"\n );\n }\n\n let newState: Json[] = state.map((x: Json) => x);\n\n for (const listUpdate of update.updates) {\n if (listUpdate.type === \"set\") {\n newState = newState.map((item, index) =>\n index === listUpdate.index ? lsonToJson(listUpdate.item) : item\n );\n } else if (listUpdate.type === \"insert\") {\n if (listUpdate.index === newState.length) {\n newState.push(lsonToJson(listUpdate.item));\n } else {\n newState = [\n ...newState.slice(0, listUpdate.index),\n lsonToJson(listUpdate.item),\n ...newState.slice(listUpdate.index),\n ];\n }\n } else if (listUpdate.type === \"delete\") {\n newState.splice(listUpdate.index, 1);\n } else if (listUpdate.type === \"move\") {\n if (listUpdate.previousIndex > listUpdate.index) {\n newState = [\n ...newState.slice(0, listUpdate.index),\n lsonToJson(listUpdate.item),\n ...newState.slice(listUpdate.index, listUpdate.previousIndex),\n ...newState.slice(listUpdate.previousIndex + 1),\n ];\n } else {\n newState = [\n ...newState.slice(0, listUpdate.previousIndex),\n ...newState.slice(\n listUpdate.previousIndex + 1,\n listUpdate.index + 1\n ),\n lsonToJson(listUpdate.item),\n ...newState.slice(listUpdate.index + 1),\n ];\n }\n }\n }\n\n return newState as S;\n // ^^^^\n // FIXME Not completely true, because we could have been\n // updating keys from StorageUpdate here that aren't in S,\n // technically.\n }\n\n case \"LiveMap\": {\n if (!isJsonObject(state)) {\n throw new Error(\n \"Internal: received update on LiveMap but state was not an object\"\n );\n }\n const newState: JsonObject = Object.assign({}, state);\n\n for (const key in update.updates) {\n if (update.updates[key]?.type === \"update\") {\n const value = update.node.get(key);\n if (value !== undefined) {\n newState[key] = lsonToJson(value);\n }\n } else if (update.updates[key]?.type === \"delete\") {\n delete newState[key];\n }\n }\n\n return newState as S;\n // ^^^^\n // FIXME Not completely true, because we could have been\n // updating keys from StorageUpdate here that aren't in S,\n // technically.\n }\n }\n }\n\n if (Array.isArray(state)) {\n const newArray: Json[] = [...state];\n newArray[pathItem as number] = legacy_patchImmutableNode(\n state[pathItem as number],\n path,\n update\n );\n return newArray as S;\n // ^^^^\n // FIXME Not completely true, because we could have been\n // updating indexes from StorageUpdate here that aren't in S,\n // technically.\n } else if (isJsonObject(state)) {\n const node = state[pathItem];\n if (node === undefined) {\n return state;\n } else {\n const stateAsObj: JsonObject = state;\n return {\n ...stateAsObj,\n [pathItem]: legacy_patchImmutableNode(node, path, update),\n } as S;\n // ^\n // FIXME Not completely true, because we could have been updating\n // indexes from StorageUpdate here that aren't in S, technically.\n }\n } else {\n return state;\n }\n}\n","/**\n * Like `new AbortController()`, but where the result can be unpacked\n * safely, i.e. `const { signal, abort } = makeAbortController()`.\n *\n * This unpacking is unsafe to do with a regular `AbortController` because\n * the `abort` method is not bound to the controller instance.\n *\n * In addition to this, you can also pass an optional (external)\n * AbortSignal to \"wrap\", in which case the returned signal will be in\n * aborted state when either the signal is aborted externally or\n * internally.\n */\nexport function makeAbortController(externalSignal?: AbortSignal): {\n signal: AbortSignal;\n abort: (reason?: unknown) => void;\n} {\n const ctl = new AbortController();\n return {\n signal: externalSignal\n ? AbortSignal.any([ctl.signal, externalSignal])\n : ctl.signal,\n abort: ctl.abort.bind(ctl),\n };\n}\n","import * as console from \"./fancy-console\";\n\n/**\n * Tools to help with the controlled deprecation of public APIs.\n *\n * First warn, then error, then remove eventually.\n */\n\n// Keeps a set of deprecation messages in memory that it has warned about\n// already. There will be only one deprecation message in the console, no\n// matter how often it gets called.\nconst _emittedDeprecationWarnings: Set<string> = new Set();\n\n/**\n * Displays a deprecation warning in the dev console. Only in dev mode, and\n * only once per message/key. In production, this is a no-op.\n */\n// istanbul ignore next\nexport function deprecate(message: string, key = message): void {\n if (process.env.NODE_ENV !== \"production\") {\n if (!_emittedDeprecationWarnings.has(key)) {\n _emittedDeprecationWarnings.add(key);\n console.errorWithTitle(\"Deprecation warning\", message);\n }\n }\n}\n\n/**\n * Conditionally displays a deprecation warning in the dev\n * console if the first argument is truthy. Only in dev mode, and\n * only once per message/key. In production, this is a no-op.\n */\n// istanbul ignore next\nexport function deprecateIf(\n condition: unknown,\n message: string,\n key = message\n): void {\n if (process.env.NODE_ENV !== \"production\") {\n // istanbul ignore if\n if (condition) {\n deprecate(message, key);\n }\n }\n}\n\n/**\n * Throws a deprecation error in the dev console.\n *\n * Only triggers in dev mode. In production, this is a no-op.\n */\n// istanbul ignore next\nexport function throwUsageError(message: string): void {\n if (process.env.NODE_ENV !== \"production\") {\n const usageError = new Error(message);\n usageError.name = \"Usage error\";\n console.errorWithTitle(\"Usage error\", message);\n throw usageError;\n }\n}\n\n/**\n * Conditionally throws a usage error in the dev console if the first argument\n * is truthy. Use this to \"escalate\" usage patterns that in previous versions\n * we already warned about with deprecation warnings.\n *\n * Only has effect in dev mode. In production, this is a no-op.\n */\n// istanbul ignore next\nexport function errorIf(condition: unknown, message: string): void {\n if (process.env.NODE_ENV !== \"production\") {\n if (condition) {\n throwUsageError(message);\n }\n }\n}\n","import { FSM } from \"./fsm\";\n\nexport type Poller = {\n /**\n * Increments the subscriber count for this poller. If it becomes > 0, the\n * poller will be enabled.\n */\n inc(): void;\n /**\n * Decrements the subscriber count for this poller. If it becomes = 0, the\n * poller will be disabled.\n */\n dec(): void;\n\n /**\n * Polls immediately only if it has been more than `maxStaleTimeMs` milliseconds since\n * the last poll and no poll is currently in progress. After polling, schedules\n * the next poll at the regular interval.\n */\n pollNowIfStale(): void;\n\n /**\n * Marks the poller as stale. This can be used to force the next call\n * to `.pollNowIfStale()` to poll immediately.\n */\n markAsStale(): void;\n\n /**\n * Used in unit tests only.\n * @internal\n */\n setInForeground(condition: boolean): void;\n};\n\ntype Context = {\n inForeground: boolean; // Whether the visibility state is visible\n lastSuccessfulPollAt: number; // The timestamp of the last successful poll (or when the poller was initialized)\n count: number; // Subscriber count\n backoff: number; // Backoff delay in ms\n};\n\ntype State =\n | \"@idle\" //\n | \"@enabled\" //\n | \"@polling\";\n\ntype Event =\n | { type: \"START\" } //\n | { type: \"STOP\" } //\n | { type: \"POLL\" };\n\nconst BACKOFF_DELAYS = [1_000, 2_000, 4_000, 8_000, 10_000] as const;\n\n/**\n * Makes a poller that will call `await callback()` at the desired interval (in\n * millis).\n *\n * The poller has only three public APIs, all side effects:\n * - .inc(): void\n * - .dec(): void\n * - .pollNowIfStale(): void\n *\n * It has the following behaviors/guarantees:\n * - Performing a \"poll\" literally means calling the provided callback (and\n * awaiting it)\n * - It will only ever start polling if .inc() was called (more often than .dec())\n * - It will not _immediately_ poll if .inc() is called. The first poll\n * can be expected no earlier than the specified interval.\n * - If .dec() is called as many times as .inc(), it stops the poller. This\n * means that any next poll will get unscheduled. If .dev() is called while\n * a poll is ongoing, it will still finish that poll, but after that stop\n * further polling.\n * - If the document's visibility state changes to hidden (tab is moved to the\n * background), polling will be paused until the document's made visible again\n * - If the document becomes visible again, the poller will:\n * - Still do nothing if the poller isn't enabled\n * - Still do nothing if the poller is enabled, but the last time a poll\n * happened recently enough (= less than the maxStaleTimeMs, which defaults\n * to infinity)\n * - Trigger a poll right away otherwise. If an existing poll was already\n * scheduled, think of it as if this future poll is \"earlied\" and just\n * happening right now instead\n */\nexport function makePoller(\n callback: (signal: AbortSignal) => Promise<void> | void,\n intervalMs: number,\n options?: {\n maxStaleTimeMs?: number;\n }\n): Poller {\n const startTime = performance.now();\n const doc = typeof document !== \"undefined\" ? document : undefined;\n const win = typeof window !== \"undefined\" ? window : undefined;\n\n const maxStaleTimeMs = options?.maxStaleTimeMs ?? Number.POSITIVE_INFINITY;\n const context: Context = {\n inForeground: doc?.visibilityState !== \"hidden\",\n lastSuccessfulPollAt: startTime,\n count: 0,\n backoff: 0,\n };\n\n function mayPoll() {\n return context.count > 0 && context.inForeground;\n }\n\n /**\n * +----------+\n * +-------------------------> | @idle |\n * | else +----------+\n * | | ^\n * | on STOP | | on START\n * | v |\n * +--------+ if mayPoll() +----------+ on POLL +----------+\n * | decide |---------------------> | @enabled | -----------------------> | @polling |\n * +--------+ +----------+ after POLL_INTERVAL +----------+\n * ^ |\n * | |\n * +----------------------------------------------------------------------+\n */\n const fsm = new FSM<object, Event, State>({})\n .addState(\"@idle\")\n .addState(\"@enabled\")\n .addState(\"@polling\");\n\n fsm.addTransitions(\"@idle\", { START: \"@enabled\" });\n fsm.addTransitions(\"@enabled\", { STOP: \"@idle\", POLL: \"@polling\" });\n fsm.addTimedTransition(\n \"@enabled\",\n () => {\n const lastPoll = context.lastSuccessfulPollAt;\n const nextPoll = lastPoll + intervalMs;\n return Math.max(0, nextPoll - performance.now()) + context.backoff;\n },\n \"@polling\"\n );\n\n fsm.onEnterAsync(\n \"@polling\",\n async (_ctx, signal) => {\n await callback(signal);\n if (!signal.aborted) {\n context.lastSuccessfulPollAt = performance.now();\n }\n },\n // When OK\n () => {\n return {\n target: mayPoll() ? \"@enabled\" : \"@idle\",\n effect: () => {\n // Reset backoff delay to 0 if the callback was successful\n context.backoff = 0;\n },\n };\n },\n // When error\n () => {\n return {\n target: mayPoll() ? \"@enabled\" : \"@idle\",\n effect: () => {\n // Increase the backoff delay if an error occured\n context.backoff =\n BACKOFF_DELAYS.find((delay) => delay > context.backoff) ??\n BACKOFF_DELAYS[BACKOFF_DELAYS.length - 1];\n },\n };\n },\n 30_000 // Abort the poll if the callback takes more than 30 seconds to complete\n );\n\n function startOrStop() {\n if (mayPoll()) {\n fsm.send({ type: \"START\" });\n } else {\n fsm.send({ type: \"STOP\" });\n }\n }\n\n function inc() {\n context.count++;\n startOrStop();\n }\n\n function dec() {\n context.count--;\n if (context.count < 0) {\n context.count = 0;\n }\n startOrStop();\n }\n\n function pollNowIfStale() {\n if (performance.now() - context.lastSuccessfulPollAt > maxStaleTimeMs) {\n fsm.send({ type: \"POLL\" });\n }\n }\n\n function markAsStale() {\n // Set the last successful poll timestamp to a stale time\n context.lastSuccessfulPollAt = performance.now() - maxStaleTimeMs - 1;\n }\n\n function setInForeground(inForeground: boolean) {\n context.inForeground = inForeground;\n startOrStop();\n pollNowIfStale(); // Won't do anything if in @idle\n }\n\n function onVisibilityChange() {\n setInForeground(doc?.visibilityState !== \"hidden\");\n }\n\n // NOTE: Currently, poller instances are only ever created and never\n // destroyed. If we add a destroy() method in the future, then we should also\n // unregister these event handlers.\n doc?.addEventListener(\"visibilitychange\", onVisibilityChange);\n win?.addEventListener(\"online\", onVisibilityChange);\n win?.addEventListener(\"focus\", pollNowIfStale);\n\n fsm.start();\n return {\n inc,\n dec,\n pollNowIfStale,\n markAsStale,\n\n // Internal API, used by unit tests only to simulate visibility events\n setInForeground,\n };\n}\n","import type { DAD } from \"../globals/augmentation\";\nimport type { DateToString } from \"../lib/DateToString\";\nimport type { NotificationKind } from \"./NotificationSettings\";\n\nexport type SubscriptionData<K extends keyof DAD = keyof DAD> = {\n kind: NotificationKind<K>;\n subjectId: string;\n createdAt: Date;\n};\n\nexport type SubscriptionDataPlain = DateToString<SubscriptionData>;\n\nexport type UserSubscriptionData<K extends keyof DAD = keyof DAD> =\n SubscriptionData<K> & {\n userId: string;\n };\n\nexport type UserSubscriptionDataPlain = DateToString<UserSubscriptionData>;\n\nexport type SubscriptionDeleteInfo = {\n type: \"deletedSubscription\";\n kind: NotificationKind;\n subjectId: string;\n deletedAt: Date;\n};\n\nexport type SubscriptionDeleteInfoPlain = DateToString<SubscriptionDeleteInfo>;\n\nexport type SubscriptionKey = `${NotificationKind}:${string}`;\n\nexport function getSubscriptionKey(\n subscription: SubscriptionData | SubscriptionDeleteInfo\n): SubscriptionKey;\nexport function getSubscriptionKey(\n kind: NotificationKind,\n subjectId: string\n): SubscriptionKey;\nexport function getSubscriptionKey(\n subscription: SubscriptionData | SubscriptionDeleteInfo | NotificationKind,\n subjectId?: string\n): SubscriptionKey {\n if (typeof subscription === \"string\") {\n return `${subscription}:${subjectId}`;\n }\n\n return `${subscription.kind}:${subscription.subjectId}`;\n}\n","import type { DP, DU } from \"../globals/augmentation\";\nimport type { JsonObject } from \"../lib/Json\";\nimport type { Relax } from \"../lib/Relax\";\nimport type { Resolve } from \"../lib/Resolve\";\nimport type { BaseUserMeta } from \"../protocol/BaseUserMeta\";\nimport type { User } from \"./User\";\n\nexport type InternalOthersEvent<\n P extends JsonObject,\n U extends BaseUserMeta,\n> = Relax<\n | { type: \"leave\"; user: User<P, U> }\n | { type: \"enter\"; user: User<P, U> }\n | {\n type: \"update\";\n user: User<P, U>;\n updates: Partial<P>;\n }\n | { type: \"reset\" }\n>;\n\nexport type OthersEvent<\n P extends JsonObject = DP,\n U extends BaseUserMeta = DU,\n> = Resolve<\n InternalOthersEvent<P, U> & {\n others: readonly User<P, U>[];\n }\n>;\n\nexport enum TextEditorType {\n Lexical = \"lexical\",\n TipTap = \"tiptap\",\n BlockNote = \"blocknote\",\n}\n\nexport type BadgeLocation =\n | \"top-right\"\n | \"bottom-right\"\n | \"bottom-left\"\n | \"top-left\";\n","import { detectDupes } from \"./dupe-detection\";\nimport { PKG_FORMAT, PKG_NAME, PKG_VERSION } from \"./version\";\n\ndetectDupes(PKG_NAME, PKG_VERSION, PKG_FORMAT);\n\n/**\n * PRIVATE / INTERNAL APIS\n * -----------------------\n *\n * This module is intended for internal use only, PLEASE DO NOT RELY ON ANY OF\n * THE EXPORTS IN HERE. These are implementation details that can change at any\n * time and without announcement. This module purely exists to share code\n * between the several Liveblocks packages.\n *\n * But since you're so deep inside Liveblocks code... we're hiring!\n * https://join.team/liveblocks ;)\n */\n\nexport type {\n AiOpaqueToolDefinition,\n AiOpaqueToolInvocationProps,\n AiToolDefinition,\n AiToolExecuteCallback,\n AiToolExecuteContext,\n AiToolInvocationProps,\n AiToolTypePack,\n LayerKey,\n WithNavigation,\n} from \"./ai\";\nexport { defineAiTool } from \"./ai\";\nexport type {\n Client,\n ClientOptions,\n EnterOptions,\n OpaqueClient,\n PrivateClientApi,\n ResolveGroupsInfoArgs,\n ResolveMentionSuggestionsArgs,\n ResolveRoomsInfoArgs,\n ResolveUsersArgs,\n SyncStatus,\n} from \"./client\";\nexport { checkBounds, createClient } from \"./client\";\nexport type {\n CommentBodyLinkElementArgs,\n CommentBodyMentionElementArgs,\n CommentBodyParagraphElementArgs,\n CommentBodyTextElementArgs,\n StringifyCommentBodyElements,\n StringifyCommentBodyOptions,\n} from \"./comments/comment-body\";\nexport {\n getMentionsFromCommentBody,\n html,\n htmlSafe,\n isCommentBodyLink,\n isCommentBodyMention,\n isCommentBodyText,\n resolveMentionsInCommentBody,\n stringifyCommentBody,\n} from \"./comments/comment-body\";\nexport type { BaseAuthResult, Delegates } from \"./connection\";\nexport type { LostConnectionEvent, Status } from \"./connection\";\nexport { MENTION_CHARACTER } from \"./constants\";\nexport {\n convertToCommentData,\n convertToCommentUserReaction,\n convertToGroupData,\n convertToInboxNotificationData,\n convertToSubscriptionData,\n convertToThreadData,\n convertToUserSubscriptionData,\n} from \"./convert-plain-data\";\nexport type {\n CreateManagedPoolOptions,\n ManagedPool,\n} from \"./crdts/AbstractCrdt\";\nexport { createManagedPool } from \"./crdts/AbstractCrdt\";\nexport { cloneLson, isLiveNode } from \"./crdts/liveblocks-helpers\";\nexport { LiveList } from \"./crdts/LiveList\";\nexport { LiveMap } from \"./crdts/LiveMap\";\nexport { LiveObject } from \"./crdts/LiveObject\";\nexport type {\n LiveNode,\n LiveStructure,\n Lson,\n LsonObject,\n ToJson,\n} from \"./crdts/Lson\";\nexport type {\n LiveListUpdate,\n LiveMapUpdate,\n LiveObjectUpdate,\n StorageUpdate,\n} from \"./crdts/StorageUpdates\";\nexport type { ToImmutable } from \"./crdts/utils\";\nexport { toPlainLson } from \"./crdts/utils\";\nexport type {\n DAD,\n DCM,\n DE,\n DGI,\n DP,\n DRI,\n DS,\n DTM,\n DU,\n KDAD,\n} from \"./globals/augmentation\";\nexport {\n legacy_patchImmutableObject,\n lsonToJson,\n patchLiveObjectKey,\n} from \"./immutable\";\nexport { kInternal } from \"./internal\";\nexport { makeAbortController } from \"./lib/abortController\";\nexport { assert, assertNever, nn } from \"./lib/assert\";\nexport type {\n AsyncError,\n AsyncLoading,\n AsyncResult,\n AsyncSuccess,\n} from \"./lib/AsyncResult\";\nexport { autoRetry, HttpError } from \"./lib/autoRetry\";\nexport { chunk } from \"./lib/chunk\";\nexport { Promise_withResolvers } from \"./lib/controlledPromise\";\nexport {\n createCommentAttachmentId,\n createCommentId,\n createInboxNotificationId,\n createThreadId,\n} from \"./lib/createIds\";\nexport { DefaultMap } from \"./lib/DefaultMap\";\nexport {\n deprecate,\n deprecateIf,\n errorIf,\n throwUsageError,\n} from \"./lib/deprecation\";\nexport { Deque } from \"./lib/Deque\";\nexport type {\n EventSource,\n Observable,\n UnsubscribeCallback,\n} from \"./lib/EventSource\";\nexport { makeEventSource } from \"./lib/EventSource\";\nexport * as console from \"./lib/fancy-console\";\nexport { freeze } from \"./lib/freeze\";\nexport {\n isNumberOperator,\n isPlainObject,\n isStartsWithOperator,\n} from \"./lib/guards\";\nexport type { Json, JsonArray, JsonObject, JsonScalar } from \"./lib/Json\";\nexport { isJsonArray, isJsonObject, isJsonScalar } from \"./lib/Json\";\nexport { nanoid } from \"./lib/nanoid\";\nexport type { NoInfr } from \"./lib/NoInfer\";\nexport { objectToQuery } from \"./lib/objectToQuery\";\nexport type { Poller } from \"./lib/Poller\";\nexport { makePoller } from \"./lib/Poller\";\nexport { asPos, makePosition } from \"./lib/position\";\nexport type { Relax } from \"./lib/Relax\";\nexport type { Resolve } from \"./lib/Resolve\";\nexport { shallow, shallow2 } from \"./lib/shallow\";\nexport type { ISignal, SignalType } from \"./lib/signals\";\nexport { batch, DerivedSignal, MutableSignal, Signal } from \"./lib/signals\";\nexport { SortedList } from \"./lib/SortedList\";\nexport { stableStringify } from \"./lib/stringify\";\nexport type { QueryParams, URLSafeString } from \"./lib/url\";\nexport { generateUrl, isUrl, sanitizeUrl, url, urljoin } from \"./lib/url\";\nexport type {\n Brand,\n DistributiveOmit,\n ISODateString,\n WithOptional,\n WithRequired,\n} from \"./lib/utils\";\nexport {\n b64decode,\n compactObject,\n entries,\n findLastIndex,\n keys,\n mapValues,\n memoizeOnSuccess,\n raise,\n tryParseJson,\n wait,\n withTimeout,\n} from \"./lib/utils\";\nexport { warnOnce, warnOnceIf } from \"./lib/warnings\";\nexport type {\n ContextualPromptContext,\n ContextualPromptResponse,\n} from \"./protocol/Ai\";\nexport type { CustomAuthenticationResult } from \"./protocol/Authentication\";\nexport { Permission } from \"./protocol/AuthToken\";\nexport type { BaseActivitiesData } from \"./protocol/BaseActivitiesData\";\nexport type { BaseGroupInfo } from \"./protocol/BaseGroupInfo\";\nexport type { BaseRoomInfo } from \"./protocol/BaseRoomInfo\";\nexport type { BaseUserMeta, IUserInfo } from \"./protocol/BaseUserMeta\";\nexport type {\n BroadcastEventClientMsg,\n ClientMsg,\n FetchStorageClientMsg,\n FetchYDocClientMsg,\n UpdatePresenceClientMsg,\n UpdateStorageClientMsg,\n UpdateYDocClientMsg,\n} from \"./protocol/ClientMsg\";\nexport { ClientMsgCode } from \"./protocol/ClientMsg\";\nexport type { BaseMetadata } from \"./protocol/Comments\";\nexport type {\n CommentBody,\n CommentBodyBlockElement,\n CommentBodyElement,\n CommentBodyInlineElement,\n CommentBodyLink,\n CommentBodyMention,\n CommentBodyParagraph,\n CommentBodyText,\n} from \"./protocol/Comments\";\nexport type {\n CommentAttachment,\n CommentData,\n CommentDataPlain,\n CommentLocalAttachment,\n CommentMixedAttachment,\n CommentReaction,\n} from \"./protocol/Comments\";\nexport type {\n CommentUserReaction,\n CommentUserReactionPlain,\n} from \"./protocol/Comments\";\nexport type { QueryMetadata } from \"./protocol/Comments\";\nexport type {\n SearchCommentsResult,\n ThreadData,\n ThreadDataPlain,\n ThreadDataWithDeleteInfo,\n} from \"./protocol/Comments\";\nexport type { ThreadDeleteInfo } from \"./protocol/Comments\";\nexport type {\n GroupData,\n GroupDataPlain,\n GroupMemberData,\n GroupScopes,\n} from \"./protocol/Groups\";\nexport type {\n ActivityData,\n InboxNotificationCustomData,\n InboxNotificationCustomDataPlain,\n InboxNotificationData,\n InboxNotificationDataPlain,\n InboxNotificationTextMentionData,\n InboxNotificationTextMentionDataPlain,\n InboxNotificationThreadData,\n InboxNotificationThreadDataPlain,\n} from \"./protocol/InboxNotifications\";\nexport type { InboxNotificationDeleteInfo } from \"./protocol/InboxNotifications\";\nexport type {\n NotificationChannel,\n NotificationChannelSettings,\n NotificationKind,\n NotificationSettings,\n NotificationSettingsPlain,\n PartialNotificationSettings,\n} from \"./protocol/NotificationSettings\";\nexport {\n createNotificationSettings,\n isNotificationChannelEnabled,\n patchNotificationSettings,\n} from \"./protocol/NotificationSettings\";\nexport type {\n ClientWireOp,\n CreateListOp,\n CreateMapOp,\n CreateObjectOp,\n CreateOp,\n CreateRegisterOp,\n DeleteCrdtOp,\n DeleteObjectKeyOp,\n HasOpId,\n IgnoredOp,\n Op,\n ServerWireOp,\n SetParentKeyOp,\n UpdateObjectOp,\n} from \"./protocol/Op\";\nexport { OpCode } from \"./protocol/Op\";\nexport type {\n RoomSubscriptionSettings,\n UserRoomSubscriptionSettings,\n} from \"./protocol/RoomSubscriptionSettings\";\nexport type {\n BroadcastedEventServerMsg,\n CommentsEventServerMsg,\n RejectedStorageOpServerMsg,\n RoomStateServerMsg,\n ServerMsg,\n StorageChunkServerMsg,\n UpdatePresenceServerMsg,\n UpdateStorageServerMsg,\n UserJoinServerMsg,\n UserLeftServerMsg,\n YDocUpdateServerMsg,\n} from \"./protocol/ServerMsg\";\nexport { ServerMsgCode } from \"./protocol/ServerMsg\";\nexport type {\n ChildStorageNode,\n CompactChildNode,\n CompactListNode,\n CompactMapNode,\n CompactNode,\n CompactObjectNode,\n CompactRegisterNode,\n CompactRootNode,\n ListStorageNode,\n MapStorageNode,\n NodeMap,\n NodeStream,\n ObjectStorageNode,\n RegisterStorageNode,\n RootStorageNode,\n SerializedChild,\n SerializedCrdt,\n SerializedList,\n SerializedMap,\n SerializedObject,\n SerializedRegister,\n SerializedRootObject,\n StorageNode,\n} from \"./protocol/StorageNode\";\nexport {\n compactNodesToNodeStream,\n CrdtType,\n isListStorageNode,\n isMapStorageNode,\n isObjectStorageNode,\n isRegisterStorageNode,\n isRootStorageNode,\n nodeStreamToCompactNodes,\n} from \"./protocol/StorageNode\";\nexport type {\n SubscriptionData,\n SubscriptionDataPlain,\n SubscriptionDeleteInfo,\n SubscriptionDeleteInfoPlain,\n SubscriptionKey,\n UserSubscriptionData,\n UserSubscriptionDataPlain,\n} from \"./protocol/Subscriptions\";\nexport { getSubscriptionKey } from \"./protocol/Subscriptions\";\nexport type { UrlMetadata } from \"./protocol/UrlMetadata\";\nexport type { HistoryVersion } from \"./protocol/VersionHistory\";\nexport type {\n IYjsProvider,\n LargeMessageStrategy,\n PrivateRoomApi,\n YjsSyncStatus,\n} from \"./room\";\nexport type {\n BroadcastOptions,\n History,\n OpaqueRoom,\n OptionalTupleUnless,\n PartialUnless,\n Room,\n RoomEventMessage,\n StorageStatus,\n SyncSource,\n} from \"./room\";\nexport type { GetThreadsOptions, UploadAttachmentOptions } from \"./room\";\nexport type {\n AiAssistantContentPart,\n AiAssistantMessage,\n AiChat,\n AiChatMessage,\n AiChatsQuery,\n AiKnowledgeRetrievalPart,\n AiKnowledgeSource,\n AiReasoningPart,\n AiRetrievalPart,\n AiSourcesPart,\n AiTextPart,\n AiToolInvocationPart,\n AiUrlSource,\n AiUserMessage,\n AiWebRetrievalPart,\n CopilotId,\n Cursor,\n MessageId,\n RenderableToolResultResponse,\n ToolResultResponse,\n} from \"./types/ai\";\nexport type { Awaitable } from \"./types/Awaitable\";\nexport type { Immutable } from \"./types/Immutable\";\nexport type { InferFromSchema } from \"./types/InferFromSchema\";\nexport type {\n IWebSocket,\n IWebSocketCloseEvent,\n IWebSocketEvent,\n IWebSocketInstance,\n IWebSocketMessageEvent,\n} from \"./types/IWebSocket\";\nexport { WebsocketCloseCodes } from \"./types/IWebSocket\";\nexport type { LiveblocksErrorContext } from \"./types/LiveblocksError\";\nexport { LiveblocksError } from \"./types/LiveblocksError\";\nexport type { ParentToChildNodeMap } from \"./types/NodeMap\";\nexport type { OthersEvent } from \"./types/Others\";\nexport { TextEditorType } from \"./types/Others\";\nexport type { Patchable } from \"./types/Patchable\";\nexport type {\n PlainLson,\n PlainLsonFields,\n PlainLsonList,\n PlainLsonMap,\n PlainLsonObject,\n} from \"./types/PlainLson\";\nexport type { User } from \"./types/User\";\nexport { detectDupes };\nexport type {\n GroupMentionData,\n MentionData,\n UserMentionData,\n} from \"./protocol/MentionData\";\n\n/**\n * Helper type to help users adopt to Lson types from interface definitions.\n * You should only use this to wrap interfaces you don't control. For more\n * information, see\n * https://liveblocks.io/docs/guides/limits#lson-constraint-and-interfaces\n */\n// prettier-ignore\nexport type EnsureJson<T> =\n // Retain all valid `JSON` fields\n T extends Json ? T :\n // Retain all valid arrays\n T extends Array<infer I> ? (EnsureJson<I>)[] :\n // Retain `unknown` fields, but just treat them as if they're Json | undefined\n [unknown] extends [T] ? Json | undefined :\n // Dates become strings when serialized to JSON\n T extends Date ? string :\n // Remove functions\n T extends (...args: any[]) => any ? never :\n // Resolve all other values explicitly\n { [K in keyof T as EnsureJson<T[K]> extends never ? never : K]: EnsureJson<T[K]> };\n\n// Support for DevTools\nimport type * as DevToolsMsg from \"./devtools/protocol\";\nexport type { DevToolsMsg };\nimport type { Json } from \"./lib/Json\";\nimport type * as DevTools from \"./types/DevToolsTreeNode\";\nexport type { DevTools };\n","/**\n * Copyright (c) Liveblocks Inc.\n *\n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Affero General Public License as published\n * by the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n * GNU Affero General Public License for more details.\n *\n * You should have received a copy of the GNU Affero General Public License\n * along with this program. If not, see <https://www.gnu.org/licenses/>.\n */\n\nimport type { Json, JsonObject } from \"@liveblocks/core\";\nimport { ClientMsgCode } from \"@liveblocks/core\";\nimport type { Decoder } from \"decoders\";\nimport {\n array,\n boolean,\n constant,\n number,\n object,\n optional,\n string,\n taggedUnion,\n} from \"decoders\";\n\nimport type {\n BroadcastEventClientMsg,\n ClientMsg,\n FetchStorageClientMsg,\n FetchYDocClientMsg,\n UpdatePresenceClientMsg,\n UpdateStorageClientMsg,\n UpdateYDocClientMsg,\n} from \"~/protocol\";\n\nimport { jsonObjectYolo, jsonYolo } from \"./jsonYolo\";\nimport { op } from \"./Op\";\nimport type { YUpdate, YVector } from \"./y-types\";\nimport { guidDecoder } from \"./y-types\";\n\nconst updatePresenceClientMsg: Decoder<UpdatePresenceClientMsg<JsonObject>> =\n object({\n type: constant(ClientMsgCode.UPDATE_PRESENCE),\n data: jsonObjectYolo,\n targetActor: optional(number),\n });\n\nconst broadcastEventClientMsg: Decoder<BroadcastEventClientMsg<Json>> = object({\n type: constant(ClientMsgCode.BROADCAST_EVENT),\n event: jsonYolo,\n});\n\nconst fetchStorageClientMsg: Decoder<FetchStorageClientMsg> = object({\n type: constant(ClientMsgCode.FETCH_STORAGE),\n});\n\nconst updateStorageClientMsg: Decoder<UpdateStorageClientMsg> = object({\n type: constant(ClientMsgCode.UPDATE_STORAGE),\n ops: array(op),\n});\n\nconst fetchYDocClientMsg: Decoder<FetchYDocClientMsg> = object({\n type: constant(ClientMsgCode.FETCH_YDOC),\n vector: string.refineType<YVector>(),\n guid: optional(guidDecoder), // Don't specify to update the root doc\n v2: optional(boolean),\n});\n\nconst updateYDocClientMsg: Decoder<UpdateYDocClientMsg> = object({\n type: constant(ClientMsgCode.UPDATE_YDOC),\n update: string.refineType<YUpdate>(),\n guid: optional(guidDecoder), // Don't specify to update the root doc\n v2: optional(boolean),\n});\n\nexport const clientMsgDecoder: Decoder<ClientMsg<JsonObject, Json>> =\n taggedUnion(\"type\", {\n [ClientMsgCode.UPDATE_PRESENCE]: updatePresenceClientMsg,\n [ClientMsgCode.BROADCAST_EVENT]: broadcastEventClientMsg,\n [ClientMsgCode.FETCH_STORAGE]: fetchStorageClientMsg,\n [ClientMsgCode.UPDATE_STORAGE]: updateStorageClientMsg,\n [ClientMsgCode.FETCH_YDOC]: fetchYDocClientMsg,\n [ClientMsgCode.UPDATE_YDOC]: updateYDocClientMsg,\n }).describe(\"Must be a valid client message\");\n\nexport const transientClientMsgDecoder: Decoder<ClientMsg<JsonObject, Json>> =\n taggedUnion(\"type\", {\n // [ClientMsgCode.UPDATE_PRESENCE]: updatePresenceClientMsg,\n // [ClientMsgCode.BROADCAST_EVENT]: broadcastEventClientMsg,\n // [ClientMsgCode.FETCH_STORAGE]: fetchStorageClientMsg,\n [ClientMsgCode.UPDATE_STORAGE]: updateStorageClientMsg,\n // [ClientMsgCode.FETCH_YDOC]: fetchYDocClientMsg,\n // [ClientMsgCode.UPDATE_YDOC]: updateYDocClientMsg,\n }).describe(\"Must be a valid transient client message\");\n","/**\n * Copyright (c) Liveblocks Inc.\n *\n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Affero General Public License as published\n * by the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n * GNU Affero General Public License for more details.\n *\n * You should have received a copy of the GNU Affero General Public License\n * along with this program. If not, see <https://www.gnu.org/licenses/>.\n */\n\nimport type { Json, JsonObject } from \"@liveblocks/core\";\nimport type { Decoder } from \"decoders\";\nimport { unknown } from \"decoders\";\n\n/**\n * Drop-in replacement for the `json` decoder from the decoders standard\n * library, but implemented as a no-op. This is, of course, only safe to use in\n * contexts where you know that the input already is valid JSON.\n *\n * You know this for sure, for example, if you're decoding the result of\n * a `JSON.parse()` call.\n *\n * Done for performance reasons!\n */\nexport const jsonYolo: Decoder<Json> = unknown as Decoder<Json>;\n\n/**\n * Drop-in replacement for the `jsonObject` decoder from the decoders standard\n * library, but implemented as just a check for plain old JavaScript object.\n * This is, of course, only safe to use in contexts where you know that the\n * input already is valid JSON.\n *\n * You know this for sure, for example, if you're decoding the result of\n * a `JSON.parse()` call.\n *\n * Done for performance reasons!\n */\nexport const jsonObjectYolo: Decoder<JsonObject> = jsonYolo.refine(\n (value): value is JsonObject =>\n value !== null && typeof value === \"object\" && !Array.isArray(value),\n \"Must be JSON object\"\n);\n","/**\n * Copyright (c) Liveblocks Inc.\n *\n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Affero General Public License as published\n * by the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n * GNU Affero General Public License for more details.\n *\n * You should have received a copy of the GNU Affero General Public License\n * along with this program. If not, see <https://www.gnu.org/licenses/>.\n */\n\nimport { OpCode } from \"@liveblocks/core\";\nimport type { Decoder } from \"decoders\";\nimport { constant, object, optional, string, taggedUnion } from \"decoders\";\n\nimport type {\n ClientWireOp,\n CreateListOp,\n CreateMapOp,\n CreateObjectOp,\n CreateRegisterOp,\n DeleteCrdtOp,\n DeleteObjectKeyOp,\n SetParentKeyOp,\n UpdateObjectOp,\n} from \"~/protocol\";\n\nimport { jsonObjectYolo, jsonYolo } from \"./jsonYolo\";\n\ntype HasOpId = { opId: string };\n\nconst updateObjectOp: Decoder<UpdateObjectOp & HasOpId> = object({\n type: constant(OpCode.UPDATE_OBJECT),\n opId: string,\n id: string,\n data: jsonObjectYolo,\n});\n\nconst createObjectOp: Decoder<CreateObjectOp & HasOpId> = object({\n type: constant(OpCode.CREATE_OBJECT),\n opId: string,\n id: string,\n parentId: string,\n parentKey: string,\n data: jsonObjectYolo,\n intent: optional(constant(\"set\")),\n deletedId: optional(string),\n});\n\nconst createListOp: Decoder<CreateListOp & HasOpId> = object({\n type: constant(OpCode.CREATE_LIST),\n opId: string,\n id: string,\n parentId: string,\n parentKey: string,\n intent: optional(constant(\"set\")),\n deletedId: optional(string),\n});\n\nconst createMapOp: Decoder<CreateMapOp & HasOpId> = object({\n type: constant(OpCode.CREATE_MAP),\n opId: string,\n id: string,\n parentId: string,\n parentKey: string,\n intent: optional(constant(\"set\")),\n deletedId: optional(string),\n});\n\nconst createRegisterOp: Decoder<CreateRegisterOp & HasOpId> = object({\n type: constant(OpCode.CREATE_REGISTER),\n opId: string,\n id: string,\n parentId: string,\n parentKey: string,\n data: jsonYolo,\n intent: optional(constant(\"set\")),\n deletedId: optional(string),\n});\n\nconst deleteCrdtOp: Decoder<DeleteCrdtOp & HasOpId> = object({\n type: constant(OpCode.DELETE_CRDT),\n opId: string,\n id: string,\n});\n\nconst setParentKeyOp: Decoder<SetParentKeyOp & HasOpId> = object({\n type: constant(OpCode.SET_PARENT_KEY),\n opId: string,\n id: string,\n parentKey: string,\n});\n\nconst deleteObjectKeyOp: Decoder<DeleteObjectKeyOp & HasOpId> = object({\n type: constant(OpCode.DELETE_OBJECT_KEY),\n opId: string,\n id: string,\n key: string,\n});\n\nexport const op: Decoder<ClientWireOp> = taggedUnion(\"type\", {\n [OpCode.UPDATE_OBJECT]: updateObjectOp,\n [OpCode.CREATE_OBJECT]: createObjectOp,\n [OpCode.CREATE_LIST]: createListOp,\n [OpCode.CREATE_MAP]: createMapOp,\n [OpCode.CREATE_REGISTER]: createRegisterOp,\n [OpCode.DELETE_CRDT]: deleteCrdtOp,\n [OpCode.SET_PARENT_KEY]: setParentKeyOp,\n [OpCode.DELETE_OBJECT_KEY]: deleteObjectKeyOp,\n});\n","/**\n * Copyright (c) Liveblocks Inc.\n *\n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Affero General Public License as published\n * by the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n * GNU Affero General Public License for more details.\n *\n * You should have received a copy of the GNU Affero General Public License\n * along with this program. If not, see <https://www.gnu.org/licenses/>.\n */\n\nimport type { Brand } from \"@liveblocks/core\";\nimport { uuid } from \"decoders\";\n\n/**\n * A guid, a unique identifier for a Yjs sub document.\n */\nexport type Guid = Brand<string, \"Guid\">;\n\nexport const guidDecoder = uuid.refineType<Guid>();\n\nexport const ROOT_YDOC_ID = \"root\";\nexport type YDocId = typeof ROOT_YDOC_ID | Guid /* unique ID for subdoc */;\n\n/**\n * Any string that is a valid base64 encoded YJS update.\n */\nexport type YUpdate = Brand<string, \"YUpdate\">;\n\n/**\n * Any string that is a valid base64 encoded YJS state vector.\n */\nexport type YVector = Brand<string, \"YVector\">;\n","/**\n * Copyright (c) Liveblocks Inc.\n *\n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Affero General Public License as published\n * by the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n * GNU Affero General Public License for more details.\n *\n * You should have received a copy of the GNU Affero General Public License\n * along with this program. If not, see <https://www.gnu.org/licenses/>.\n */\n\nimport type { Json, JsonObject } from \"@liveblocks/core\";\nimport { CrdtType } from \"@liveblocks/core\";\n\nimport type { IReadableSnapshot } from \"~/interfaces\";\n\n// ---------------------------------------------------------------------------\n// Non-streaming version\n// ---------------------------------------------------------------------------\n\n/**\n * Serialize a storage snapshot to a simple JSON representation, returning a\n * full in-memory JsonObject. Faster than snapshotToLossyJson_lazy for\n * small/medium documents because the result can be passed straight to\n * JSON.stringify(). This format is lossy — the original storage structure\n * cannot be reconstructed from it, so it's output-only.\n */\nexport function snapshotToLossyJson_eager(\n snapshot: IReadableSnapshot\n): JsonObject {\n try {\n return buildObject(snapshot, \"root\", snapshot.get_root().data);\n } finally {\n snapshot.destroy();\n }\n}\n\nfunction buildNode(snapshot: IReadableSnapshot, id: string): Json {\n const node = snapshot.get_node(id);\n if (node.type === CrdtType.OBJECT) {\n return buildObject(snapshot, id, node.data);\n } else if (node.type === CrdtType.LIST) {\n return buildList(snapshot, id);\n } else if (node.type === CrdtType.MAP) {\n return buildMap(snapshot, id);\n } else {\n return node.data;\n }\n}\n\nfunction buildObject(\n snapshot: IReadableSnapshot,\n id: string,\n staticData: JsonObject\n): JsonObject {\n const data = Object.assign(Object.create(null), staticData) as JsonObject;\n for (const [key, childId] of snapshot.iter_children(id)) {\n data[key] = buildNode(snapshot, childId);\n }\n return data;\n}\n\nfunction buildList(snapshot: IReadableSnapshot, id: string): Json[] {\n const data: Json[] = [];\n for (const [_, childId] of snapshot.iter_children(id)) {\n data.push(buildNode(snapshot, childId));\n }\n return data;\n}\n\nfunction buildMap(snapshot: IReadableSnapshot, id: string): JsonObject {\n const data = Object.create(null) as JsonObject;\n for (const [key, childId] of snapshot.iter_children(id)) {\n data[key] = buildNode(snapshot, childId);\n }\n return data;\n}\n\n// ---------------------------------------------------------------------------\n// Streaming version\n// ---------------------------------------------------------------------------\n\n// Generator-of-strings type alias for brevity of signatures\ntype StringGen = Generator<string, void, never>;\n\n/**\n * Serialize a storage snapshot to a simple JSON representation. This format is\n * easy to consume but lossy — the original storage structure cannot be\n * reconstructed from it, so it's an output-only format. Slower than\n * snapshotToLossyJson_eager but can stream documents that don't fit entirely\n * in memory.\n *\n * This generator yields text chunks that together, when concatenated, form the\n * output JSON document.\n */\nexport function* snapshotToLossyJson_lazy(\n snapshot: IReadableSnapshot\n): StringGen {\n try {\n const staticJson = JSON.stringify(snapshot.get_root().data).slice(1, -1);\n yield* emitObject(snapshot, \"root\", staticJson);\n } finally {\n snapshot.destroy();\n }\n}\n\nfunction* emit(snapshot: IReadableSnapshot, id: string): StringGen {\n const node = snapshot.get_node(id);\n if (node.type === CrdtType.OBJECT) {\n yield* emitObject(snapshot, id, JSON.stringify(node.data).slice(1, -1));\n } else if (node.type === CrdtType.LIST) {\n yield* emitList(snapshot, id);\n } else if (node.type === CrdtType.MAP) {\n yield* emitMap(snapshot, id);\n } else if (node.type === CrdtType.REGISTER) {\n yield JSON.stringify(node.data);\n }\n}\n\n/**\n * @param staticJson - The object's static (non-CRDT) properties as a raw JSON\n * string without the surrounding braces, e.g. `\"foo\":1,\"bar\":\"hi\"`.\n *\n * Children are emitted _after_ the static properties. If a child key\n * collides with a static key (which shouldn't normally happen, but\n * defensively), the child wins because JSON.parse keeps the last value\n * for duplicate keys.\n */\nfunction* emitObject(\n snapshot: IReadableSnapshot,\n id: string,\n staticJson: string\n): StringGen {\n let comma = staticJson.length > 0;\n\n yield \"{\";\n yield staticJson;\n\n for (const [key, childId] of snapshot.iter_children(id)) {\n if (comma) yield \",\";\n else comma = true;\n\n yield `${JSON.stringify(key)}:`;\n yield* emit(snapshot, childId);\n }\n yield \"}\";\n}\n\nfunction* emitList(snapshot: IReadableSnapshot, id: string): StringGen {\n let comma = false;\n\n yield \"[\";\n for (const [_, childId] of snapshot.iter_children(id)) {\n if (comma) yield \",\";\n else comma = true;\n yield* emit(snapshot, childId);\n }\n yield \"]\";\n}\n\nfunction* emitMap(snapshot: IReadableSnapshot, id: string): StringGen {\n let comma = false;\n\n yield \"{\";\n for (const [key, childId] of snapshot.iter_children(id)) {\n if (comma) yield \",\";\n else comma = true;\n\n yield `${JSON.stringify(key)}:`;\n yield* emit(snapshot, childId);\n }\n yield \"}\";\n}\n","/**\n * Copyright (c) Liveblocks Inc.\n *\n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Affero General Public License as published\n * by the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n * GNU Affero General Public License for more details.\n *\n * You should have received a copy of the GNU Affero General Public License\n * along with this program. If not, see <https://www.gnu.org/licenses/>.\n */\n\nimport type { StorageNode } from \"@liveblocks/core\";\n\nimport type { IReadableSnapshot } from \"~/interfaces\";\n\n/**\n * Yield all nodes from a snapshot as [id, crdt] tuples.\n * Destroys the snapshot when done (or aborted).\n */\nexport function* snapshotToNodeStream(\n snapshot: IReadableSnapshot\n): Generator<StorageNode, void, never> {\n try {\n yield* snapshot.iter_all();\n } finally {\n snapshot.destroy();\n }\n}\n","/**\n * Copyright (c) Liveblocks Inc.\n *\n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Affero General Public License as published\n * by the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n * GNU Affero General Public License for more details.\n *\n * You should have received a copy of the GNU Affero General Public License\n * along with this program. If not, see <https://www.gnu.org/licenses/>.\n */\n\nimport type {\n JsonObject,\n ObjectStorageNode,\n PlainLson,\n PlainLsonFields,\n PlainLsonList,\n PlainLsonMap,\n PlainLsonObject,\n RootStorageNode,\n SerializedList,\n StorageNode,\n} from \"@liveblocks/core\";\nimport {\n assertNever,\n CrdtType,\n isJsonObject,\n makePosition,\n} from \"@liveblocks/core\";\n\nimport type { IReadableSnapshot } from \"~/interfaces\";\n\nconst SERVER_INIT_OP_PREFIX = \"si\";\n\nfunction generateId(state: { clock: number }) {\n return `${SERVER_INIT_OP_PREFIX}:${state.clock++}`;\n}\n\nfunction isSpecialPlainLsonValue(\n value: PlainLson\n): value is PlainLsonObject | PlainLsonMap | PlainLsonList {\n return isJsonObject(value) && value.liveblocksType !== undefined;\n}\n\n/**\n * Generator that yields NodeTuples for a JSON value.\n * Always yields parent nodes before their children.\n */\nfunction* iterJson(\n key: string,\n data: PlainLson,\n parent: StorageNode,\n state: { clock: number }\n): Generator<StorageNode, void, undefined> {\n if (isSpecialPlainLsonValue(data)) {\n switch (data.liveblocksType) {\n case \"LiveObject\":\n yield* iterObjectInner(key, data.data, parent, state);\n return;\n\n case \"LiveList\":\n yield* iterList(key, data.data, parent, state);\n return;\n\n case \"LiveMap\":\n yield* iterMap(key, data.data, parent, state);\n return;\n\n // istanbul ignore next\n default:\n assertNever(data, \"Unknown `liveblocksType` field\");\n }\n } else {\n yield [\n generateId(state),\n {\n type: CrdtType.REGISTER,\n data,\n parentId: parent[0],\n parentKey: key,\n },\n ];\n }\n}\n\n/**\n * Generator that yields NodeTuples for a LiveMap.\n * Yields the map node first, then its children.\n */\nfunction* iterMap(\n key: string,\n map: PlainLsonFields,\n parent: StorageNode,\n state: { clock: number }\n): Generator<StorageNode, void, undefined> {\n const mapTuple: StorageNode = [\n generateId(state),\n { type: CrdtType.MAP, parentId: parent[0], parentKey: key },\n ];\n\n // Yield the map node first (parent before children)\n yield mapTuple;\n\n // Then yield all children\n for (const [subKey, subValue] of Object.entries(map)) {\n yield* iterJson(subKey, subValue, mapTuple, state);\n }\n}\n\n/**\n * Generator that yields NodeTuples for a LiveList.\n * Yields the list node first, then its children.\n */\nfunction* iterList(\n key: string,\n list: PlainLson[],\n parent: StorageNode,\n state: { clock: number }\n): Generator<StorageNode, void, undefined> {\n const id = generateId(state);\n const crdt: SerializedList = {\n type: CrdtType.LIST,\n parentId: parent[0],\n parentKey: key,\n };\n const listTuple: StorageNode = [id, crdt];\n\n // Yield the list node first (parent before children)\n yield listTuple;\n\n // Then yield all children\n let position = makePosition();\n for (const subValue of list) {\n yield* iterJson(position, subValue, listTuple, state);\n position = makePosition(position);\n }\n}\n\n/**\n * Generator that yields NodeTuples for a LiveObject.\n * Yields the object node first, then its children.\n *\n * Note: The object's data field is populated with non-special values\n * (primitives, arrays, plain objects), while special values (LiveObject,\n * LiveList, LiveMap) are yielded as separate nodes.\n */\nfunction* iterObjectInner(\n key: string,\n value: PlainLsonFields,\n parent: StorageNode | null,\n state: { clock: number }\n): Generator<StorageNode, void, undefined> {\n // First pass: collect non-special data and identify special children\n const data: JsonObject = {};\n const specialChildren: Array<[string, PlainLson]> = [];\n\n for (const [subKey, subValue] of Object.entries(value)) {\n if (isSpecialPlainLsonValue(subValue)) {\n specialChildren.push([subKey, subValue]);\n } else {\n data[subKey] = subValue;\n }\n }\n\n // Create the object tuple with collected data\n const objectTuple: RootStorageNode | ObjectStorageNode =\n parent !== null\n ? [\n generateId(state),\n {\n type: CrdtType.OBJECT,\n data,\n parentId: parent[0],\n parentKey: key,\n },\n ]\n : [\"root\", { type: CrdtType.OBJECT, data }];\n\n // Yield the object node first (parent before children)\n yield objectTuple;\n\n // Then yield all special children\n for (const [subKey, subValue] of specialChildren) {\n yield* iterJson(subKey, subValue, objectTuple, state);\n }\n}\n\n/**\n * Transform a \"Plain LSON\" document to a lazy NodeStream. Used to initialize\n * the storage with a predefined state.\n * Always emits parent nodes before their children.\n */\nexport function* plainLsonToNodeStream(\n root: PlainLsonObject\n): Generator<StorageNode, void, undefined> {\n const state = { clock: 1 };\n yield* iterObjectInner(\"root\", root.data, null, state);\n}\n\n// ---------------------------------------------------------------------------\n// Non-streaming serialization: builds a full PlainLsonObject in memory.\n// ---------------------------------------------------------------------------\n\n/**\n * Serialize a storage snapshot to \"Plain LSON\" format, returning a full\n * in-memory PlainLsonObject. Faster than snapshotToPlainLson_lazy for\n * small/medium documents because the result can be passed straight to\n * JSON.stringify().\n */\nexport function snapshotToPlainLson_eager(\n snapshot: IReadableSnapshot\n): PlainLsonObject {\n try {\n return buildObject(snapshot, \"root\", snapshot.get_root().data);\n } finally {\n snapshot.destroy();\n }\n}\n\nfunction buildNode(snapshot: IReadableSnapshot, id: string): PlainLson {\n const node = snapshot.get_node(id);\n if (node.type === CrdtType.OBJECT) {\n return buildObject(snapshot, id, node.data);\n } else if (node.type === CrdtType.LIST) {\n return buildList(snapshot, id);\n } else if (node.type === CrdtType.MAP) {\n return buildMap(snapshot, id);\n } else {\n return node.data;\n }\n}\n\nfunction buildObject(\n snapshot: IReadableSnapshot,\n id: string,\n staticData: JsonObject\n): PlainLsonObject {\n // Static data values are Json, which is a subset of PlainLson\n const data: PlainLsonFields = Object.assign(\n Object.create(null),\n staticData\n ) as PlainLsonFields;\n for (const [key, childId] of snapshot.iter_children(id)) {\n data[key] = buildNode(snapshot, childId);\n }\n return { liveblocksType: \"LiveObject\", data };\n}\n\nfunction buildList(snapshot: IReadableSnapshot, id: string): PlainLsonList {\n const data: PlainLson[] = [];\n for (const [_, childId] of snapshot.iter_children(id)) {\n data.push(buildNode(snapshot, childId));\n }\n return { liveblocksType: \"LiveList\", data };\n}\n\nfunction buildMap(snapshot: IReadableSnapshot, id: string): PlainLsonMap {\n const data = Object.create(null) as PlainLsonFields;\n for (const [key, childId] of snapshot.iter_children(id)) {\n data[key] = buildNode(snapshot, childId);\n }\n return { liveblocksType: \"LiveMap\", data };\n}\n\n// ---------------------------------------------------------------------------\n// Streaming serialization: yields string chunks that concatenate to JSON.\n// ---------------------------------------------------------------------------\n\n// Generator-of-strings type alias for brevity of signatures\ntype StringGen = Generator<string, void, never>;\n\n/**\n * Serialize a storage snapshot to \"Plain LSON\" format. Yields string chunks\n * that, when concatenated, form a valid JSON string representing the storage\n * document. Slower than snapshotToPlainLson_eager but can stream documents\n * that don't fit entirely in memory.\n */\nexport function* snapshotToPlainLson_lazy(\n snapshot: IReadableSnapshot\n): StringGen {\n try {\n const staticJson = JSON.stringify(snapshot.get_root().data).slice(1, -1);\n yield* emitObject(snapshot, \"root\", staticJson);\n } finally {\n snapshot.destroy();\n }\n}\n\nfunction* emit(snapshot: IReadableSnapshot, id: string): StringGen {\n const node = snapshot.get_node(id);\n if (node.type === CrdtType.OBJECT) {\n yield* emitObject(snapshot, id, JSON.stringify(node.data).slice(1, -1));\n } else if (node.type === CrdtType.LIST) {\n yield* emitList(snapshot, id);\n } else if (node.type === CrdtType.MAP) {\n yield* emitMap(snapshot, id);\n } else if (node.type === CrdtType.REGISTER) {\n yield JSON.stringify(node.data);\n }\n}\n\n/**\n * @param staticJson - The object's static (non-CRDT) properties as a raw JSON\n * string without the surrounding braces, e.g. `\"foo\":1,\"bar\":\"hi\"`.\n *\n * Children are emitted _after_ the static properties. If a child key\n * collides with a static key (which shouldn't normally happen, but\n * defensively), the child wins because JSON.parse keeps the last value\n * for duplicate keys.\n */\nfunction* emitObject(\n snapshot: IReadableSnapshot,\n id: string,\n staticJson: string\n): StringGen {\n let comma = staticJson.length > 0;\n\n yield '{\"liveblocksType\":\"LiveObject\",\"data\":{';\n yield staticJson;\n\n for (const [key, childId] of snapshot.iter_children(id)) {\n if (comma) yield \",\";\n else comma = true;\n\n yield `${JSON.stringify(key)}:`;\n yield* emit(snapshot, childId);\n }\n yield \"}}\";\n}\n\nfunction* emitList(snapshot: IReadableSnapshot, id: string): StringGen {\n let comma = false;\n\n yield '{\"liveblocksType\":\"LiveList\",\"data\":[';\n for (const [_, childId] of snapshot.iter_children(id)) {\n if (comma) yield \",\";\n else comma = true;\n yield* emit(snapshot, childId);\n }\n yield \"]}\";\n}\n\nfunction* emitMap(snapshot: IReadableSnapshot, id: string): StringGen {\n let comma = false;\n\n yield '{\"liveblocksType\":\"LiveMap\",\"data\":{';\n for (const [key, childId] of snapshot.iter_children(id)) {\n if (comma) yield \",\";\n else comma = true;\n\n yield `${JSON.stringify(key)}:`;\n yield* emit(snapshot, childId);\n }\n yield \"}}\";\n}\n","/**\n * Copyright (c) Liveblocks Inc.\n *\n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Affero General Public License as published\n * by the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n * GNU Affero General Public License for more details.\n *\n * You should have received a copy of the GNU Affero General Public License\n * along with this program. If not, see <https://www.gnu.org/licenses/>.\n */\n\nimport { raise } from \"@liveblocks/core\";\n\n/**\n * Like ES6 map, but takes a default (factory) function which will be used\n * to create entries for missing keys on the fly.\n *\n * Useful for code like:\n *\n * const map = new DefaultMap(() => []);\n * map.getOrCreate('foo').push('hello');\n * map.getOrCreate('foo').push('world');\n * map.getOrCreate('foo')\n * // ['hello', 'world']\n *\n */\nexport class DefaultMap<K, V> extends Map<K, V> {\n #defaultFn?: (key: K) => V;\n\n /**\n * If the default function is not provided to the constructor, it has to be\n * provided in each .getOrCreate() call individually.\n */\n constructor(\n defaultFn?: (key: K) => V,\n entries?: readonly (readonly [K, V])[] | null\n ) {\n super(entries);\n this.#defaultFn = defaultFn;\n }\n\n /**\n * Gets the value at the given key, or creates it.\n *\n * Difference from normal Map: if the key does not exist, it will be created\n * on the fly using the factory function, and that value will get returned\n * instead of `undefined`.\n */\n getOrCreate(key: K, defaultFn?: (key: K) => V): V {\n if (super.has(key)) {\n // eslint-disable-next-line no-restricted-syntax\n return super.get(key)!;\n } else {\n const fn =\n defaultFn ??\n this.#defaultFn ??\n raise(\"DefaultMap used without a factory function\");\n\n const value = fn(key);\n this.set(key, value);\n return value;\n }\n }\n}\n","/**\n * Copyright (c) Liveblocks Inc.\n *\n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Affero General Public License as published\n * by the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n * GNU Affero General Public License for more details.\n *\n * You should have received a copy of the GNU Affero General Public License\n * along with this program. If not, see <https://www.gnu.org/licenses/>.\n */\n\nimport { DefaultMap } from \"./DefaultMap\";\n\nfunction emptyIterator(): IterableIterator<never> {\n return [][Symbol.iterator]();\n}\n\n/**\n * Like an ES6 Map, but two levels deep. Useful for building reverse lookup\n * tables. Will automatically delete second-level maps when they are empty.\n */\nexport class NestedMap<K1, K2, V> {\n #map: DefaultMap<K1, Map<K2, V>>;\n\n constructor() {\n this.#map = new DefaultMap(() => new Map<K2, V>());\n }\n\n get size(): number {\n let total = 0;\n for (const value of this.#map.values()) {\n total += value.size;\n }\n return total;\n }\n\n count(key1: K1): number {\n return this.#map.get(key1)?.size ?? 0;\n }\n\n *keys(): IterableIterator<[K1, K2]> {\n for (const [key1, nested] of this.#map) {\n for (const key2 of nested.keys()) {\n yield [key1, key2];\n }\n }\n }\n\n has(key1: K1, key2: K2): boolean {\n return this.#map.get(key1)?.has(key2) ?? false;\n }\n\n get(key1: K1, key2: K2): V | undefined {\n return this.#map.get(key1)?.get(key2);\n }\n\n set(key1: K1, key2: K2, value: V): this {\n this.#map.getOrCreate(key1).set(key2, value);\n return this;\n }\n\n delete(key1: K1, key2: K2): void {\n if (!this.#map.has(key1)) {\n return;\n }\n\n const nested = this.#map.get(key1)!;\n nested.delete(key2);\n if (nested.size === 0) {\n this.#map.delete(key1);\n }\n }\n\n clear(): void {\n this.#map.clear();\n }\n\n *[Symbol.iterator](): IterableIterator<[K1, K2, V]> {\n for (const [key1, nested] of this.#map) {\n for (const [key2, value] of nested) {\n yield [key1, key2, value];\n }\n }\n }\n\n entriesAt(key1: K1): IterableIterator<[K2, V]> {\n return this.#map.get(key1)?.entries() ?? emptyIterator();\n }\n\n *filterAt(key1: K1, keys: Iterable<K2>): Iterable<[K2, V]> {\n const nested = this.#map.get(key1);\n if (nested === undefined) {\n return;\n }\n\n for (const k2 of keys) {\n const value = nested.get(k2);\n if (value !== undefined) {\n yield [k2, value];\n }\n }\n }\n\n keysAt(key1: K1): IterableIterator<K2> {\n return this.#map.get(key1)?.keys() ?? emptyIterator();\n }\n\n valuesAt(key1: K1): IterableIterator<V> {\n return this.#map.get(key1)?.values() ?? emptyIterator();\n }\n\n deleteAll(key1: K1): void {\n this.#map.delete(key1);\n }\n}\n","/**\n * Copyright (c) Liveblocks Inc.\n *\n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Affero General Public License as published\n * by the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n * GNU Affero General Public License for more details.\n *\n * You should have received a copy of the GNU Affero General Public License\n * along with this program. If not, see <https://www.gnu.org/licenses/>.\n */\n\nimport type {\n NodeMap,\n NodeStream,\n SerializedChild,\n SerializedCrdt,\n SerializedRootObject,\n StorageNode,\n} from \"@liveblocks/core\";\nimport { CrdtType, isRootStorageNode, nn } from \"@liveblocks/core\";\n\nimport type { IReadableSnapshot } from \"~/interfaces\";\nimport { NestedMap } from \"~/lib/NestedMap\";\n\n/**\n * Create a basic in-memory snapshot from a set of storage nodes.\n *\n * Takes a copy of the provided nodes, so the snapshot is isolated from\n * subsequent mutations to the source.\n */\nexport function makeInMemorySnapshot(\n values: NodeMap | NodeStream\n): IReadableSnapshot {\n const map: NodeMap = new Map<string, SerializedCrdt>(values as NodeStream);\n\n if (!map.has(\"root\")) {\n map.set(\"root\", { type: CrdtType.OBJECT, data: {} });\n }\n\n // Collect child entries, sort by (parentId, parentKey), then insert into\n // the revMap so that entriesAt() returns children in parent_key order\n // without needing to re-sort on every iter_children call.\n const entries: Array<[parentId: string, parentKey: string, id: string]> = [];\n const nodeStream = map as NodeStream;\n for (const node of nodeStream) {\n if (isRootStorageNode(node)) continue;\n const [id, crdt] = node;\n entries.push([crdt.parentId, crdt.parentKey, id]);\n }\n entries.sort((a, b) =>\n a[0] < b[0] ? -1 : a[0] > b[0] ? 1 : a[1] < b[1] ? -1 : a[1] > b[1] ? 1 : 0\n );\n\n const revMap = new NestedMap<string, string, string>();\n for (const [parentId, parentKey, id] of entries) {\n revMap.set(parentId, parentKey, id);\n }\n\n function get_node(id: string): SerializedChild {\n return nn(map.get(id), `Node not found: ${id}`) as SerializedChild;\n }\n\n return {\n get_root: () =>\n nn(\n map.get(\"root\"),\n \"Root not found\"\n ) as SerializedCrdt as SerializedRootObject,\n get_node,\n iter_children: (nodeId) => revMap.entriesAt(nodeId),\n iter_all: () => map as Iterable<StorageNode>,\n destroy() {\n map.clear();\n revMap.clear();\n },\n };\n}\n","/**\n * Copyright (c) Liveblocks Inc.\n *\n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Affero General Public License as published\n * by the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n * GNU Affero General Public License for more details.\n *\n * You should have received a copy of the GNU Affero General Public License\n * along with this program. If not, see <https://www.gnu.org/licenses/>.\n */\n\nimport type { Awaitable, Json } from \"@liveblocks/core\";\nimport type { Decoder } from \"decoders\";\n\nimport type { IStorageDriver } from \"~/interfaces\";\n\nexport interface MetadataDB {\n // Getter supports optional decoder\n get(key: string): Promise<Json | undefined>;\n get<T>(decoder: Decoder<T>, key: string): Promise<T | undefined>;\n\n put(key: string, value: Json): Awaitable<void>;\n delete(key: string): Awaitable<void>;\n}\n\n/**\n * Returns a thin wrapper around an IStorageDriver to provide MetadataDB\n * functionality, including type-safe reads.\n */\nexport function makeMetadataDB(driver: IStorageDriver): MetadataDB {\n async function get(key: string): Promise<Json | undefined>;\n async function get<T>(\n decoder: Decoder<T>,\n key: string\n ): Promise<T | undefined>;\n async function get<T>(\n a1: string | Decoder<T>,\n a2?: string\n ): Promise<T | Json | undefined> {\n if (a2 === undefined) {\n return await driver.get_meta(a1 as string);\n } else {\n return (a1 as Decoder<T>).value(await driver.get_meta(a2));\n }\n }\n\n return {\n get,\n put: driver.put_meta.bind(driver),\n delete: driver.delete_meta.bind(driver),\n };\n}\n","/**\n * Copyright (c) Liveblocks Inc.\n *\n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Affero General Public License as published\n * by the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n * GNU Affero General Public License for more details.\n *\n * You should have received a copy of the GNU Affero General Public License\n * along with this program. If not, see <https://www.gnu.org/licenses/>.\n */\n\nimport { enum_ } from \"decoders\";\n\nexport enum ProtocolVersion {\n //\n // NOTE:\n // The following versions were once used, but there is no usage of it anymore\n // in the wild, so we've removed support for them:\n //\n // V1 - Initial version\n // V2 - ?\n // V3 - started to broadcast storage operations to the sender to fix some\n // conflicts\n // V4 - created a virtual root to fix an issue where multiple people\n // initialize the storage at the same time\n // V5 - started to broadcast messages in a batch (arrays) for clients\n // V6 - started to validate inputs with decoders.\n //\n\n /**\n * V7 changes the URL params used to authorize the user.\n *\n * In V6 and lower, the ?token= URL param is used, which will only ever\n * contain a `pub-legacy` or `sec-legacy` token.\n *\n * URL PARAM CHANGES:\n * Starting with V7, the ?token= is no longer a legal URL param. Instead,\n * either of the following params is used:\n *\n * - ?tok=... for ID tokens\n * - ?tok=... for Access tokens\n * - ?tok=... for Secret Legacy tokens\n * - ?pubkey=... for public keys (no token, public key can be directly used here)\n *\n * Note that `pub-legacy` tokens are no longer accepted in V7, and are\n * replaced by the direct use of the public key.\n *\n * BEHAVIORAL CHANGES:\n * Starting with V7, the RoomState server message that gets sent when\n * a client initially connects will now include new fields:\n *\n * - `actor`\n * - `scopes`\n *\n * Since v1.2.0 (Jul 31, 2023)\n */\n V7 = 7,\n\n /**\n * V8 changes storage response format and allows streaming.\n *\n * MESSAGE FORMAT CHANGES:\n * - V8: sends 1+ STORAGE_CHUNK messages, followed by 1 final\n * STORAGE_STREAM_END message (with compact nodes)\n * - V7: sends 1 STORAGE_STATE_V7 message (with full nodes)\n *\n * STREAMING BEHAVIOR in V8:\n * - For SQLite-backed rooms: nodes are split into multiple STORAGE_CHUNK\n * messages, followed by STORAGE_STREAM_END\n * - For KV-backed rooms: all nodes are sent in a single STORAGE_CHUNK\n * message that will contain all nodes, followed by STORAGE_STREAM_END\n *\n * Since 3.14.0\n */\n V8 = 8,\n}\n\nexport const protocolVersionDecoder = enum_(ProtocolVersion).describe(\n \"Unsupported protocol version\"\n);\n","/**\n * Copyright (c) Liveblocks Inc.\n *\n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Affero General Public License as published\n * by the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n * GNU Affero General Public License for more details.\n *\n * You should have received a copy of the GNU Affero General Public License\n * along with this program. If not, see <https://www.gnu.org/licenses/>.\n */\n\nimport type {\n BaseUserMeta,\n Brand,\n IUserInfo,\n Json,\n JsonObject,\n} from \"@liveblocks/core\";\nimport {\n assertNever,\n ClientMsgCode,\n nodeStreamToCompactNodes,\n OpCode,\n raise,\n ServerMsgCode,\n tryParseJson,\n WebsocketCloseCodes as CloseCode,\n} from \"@liveblocks/core\";\nimport { Mutex } from \"async-mutex\";\nimport { array, formatInline } from \"decoders\";\nimport { chunked } from \"itertools\";\nimport { nanoid } from \"nanoid\";\n\nimport type { Guid } from \"~/decoders\";\nimport { clientMsgDecoder } from \"~/decoders\";\nimport type { IServerWebSocket, IStorageDriver } from \"~/interfaces\";\nimport { Logger } from \"~/lib/Logger\";\nimport { makeNewInMemoryDriver } from \"~/plugins/InMemoryDriver\";\nimport type {\n ClientMsg as GenericClientMsg,\n IgnoredOp,\n Op,\n ServerMsg as GenericServerMsg,\n ServerWireOp,\n} from \"~/protocol\";\nimport { ProtocolVersion } from \"~/protocol\";\nimport { Storage } from \"~/Storage\";\nimport { YjsStorage } from \"~/YjsStorage\";\n\nimport { tryCatch } from \"./lib/tryCatch\";\nimport { UniqueMap } from \"./lib/UniqueMap\";\nimport { makeRoomStateMsg } from \"./utils\";\n\nconst messagesDecoder = array(clientMsgDecoder);\n\nconst HIGHEST_PROTOCOL_VERSION = Math.max(\n ...Object.values(ProtocolVersion).filter(\n (v): v is number => typeof v === \"number\"\n )\n) as ProtocolVersion;\n\n// Reverse lookup for ServerMsgCodes\nconst SERVER_MSG_CODE_NAMES = Object.fromEntries(\n Object.entries(ServerMsgCode).map(([k, v]) => [v, k])\n) as Record<(typeof ServerMsgCode)[keyof typeof ServerMsgCode], string>;\n\nconst BLACK_HOLE = new Logger([\n /* No targets, i.e. black hole logger */\n]);\n\nexport type LoadingState = \"initial\" | \"loading\" | \"loaded\";\nexport type ActorID = Brand<number, \"ActorID\">;\n\n/**\n * Session keys are also known as the \"nonce\" in the protocol. It's a random,\n * unique, but PRIVATE, identifier for the session, and it's important that\n * this ID is never shared to anyone except the connected client, which\n * receives it as part of its ROOM_STATE message.\n */\nexport type SessionKey = Brand<string, \"SessionKey\">;\n\nexport type PreSerializedServerMsg = Brand<string, \"PreSerializedServerMsg\">;\ntype ClientMsg = GenericClientMsg<JsonObject, Json>;\ntype ServerMsg = GenericServerMsg<JsonObject, BaseUserMeta, Json>;\n\n/**\n * Creates a collector for deferred promises (side effects that should run\n * outside a mutex). Call `defer` to collect promises, then `waitAll` to\n * await them all.\n */\nfunction collectSideEffects() {\n const deferred: Promise<void>[] = [];\n return {\n defer: (p: Promise<void>) => void deferred.push(p),\n waitAll: () => Promise.allSettled(deferred),\n };\n}\n\nfunction serialize(\n msgs: ServerMsg | readonly ServerMsg[]\n): PreSerializedServerMsg {\n return JSON.stringify(msgs) as PreSerializedServerMsg;\n}\n\nexport function ackIgnoredOp(opId: string): IgnoredOp {\n return { type: OpCode.DELETE_CRDT, id: \"ACK\", opId }; // (H)Ack Op\n}\n\nfunction stripOpId(op: Op): ServerWireOp {\n // TODO: Optimize later! Instead of duplicating every op and\n // stripping the opId explicitly, it would be generally more\n // efficient if we treated the opIds as \"envelopes\" around Ops (or\n // send them in a separate array altogether at the protocol level\n // in V8 soon--even better, as it would not even require any stripping!)\n const { opId: _, ...rest } = op; // Strip opIds from all outgoing messages!\n return rest;\n}\n\n/**\n * A known or anonymous user.\n *\n * BY DEFINITION:\n * A User with an assigned `id` property is a non-anonymous user.\n * A User with an assigned `anonymousId` property is an anonymous user.\n * A User with neither of those properties is also an anonymous user.\n *\n * WHAT'S THE DIFFERENCE?\n * When creating a non-anonymous user, other users in the room will be able to\n * observe the assigned `id` property in Presence (e.g. via the `other.user.id`\n * in the Liveblocks client).\n *\n * When creating an anonymous user, you can _optionally_ provide an anonymous\n * ID to (re)use. While not authorized, this still allows you to correlate\n * unique users.\n */\nexport type IUserData = AuthorizedUser | AnonymousUser; // YYY Remove this export before launch. It's a private API, but only needed temporarily, while refactoring our CF server\n\ntype AuthorizedUser = {\n readonly id: string;\n readonly anonymousId?: never;\n readonly info?: IUserInfo;\n};\n\n// Anonymous users, by definition, have no ID, or have an explicitly-assigned\n// anonymous ID (in case you need to control anonymous ID generation, e.g. by\n// tracking a cookie). The anonymous ID will not show up in other clients. To\n// those clients, it will appear as a user without an ID.\ntype AnonymousUser = {\n readonly anonymousId: string;\n readonly id?: never;\n readonly info?: IUserInfo;\n};\n\n/*\n\nSession Types: \n| | Browser Session | Backend Session | Virtual Session |\n|-----------------------------------|-----------------|-----------------|-----------------|\n| Sends enter/leave/presence events | ✓ | | ✓ |\n| Visible to other users in room | ✓ | | ✓ |\n| Has WebSocket connection | ✓ | | |\n| Updated from | Browser | REST API | REST API |\n\n*Note: VirtualSession is not yet implemented. \n\n*/\n\n/**\n * Each BrowserSession is an abstraction around a socket instance, and maintains\n * metadata about the connection.\n */\nexport class BrowserSession<SM, CM extends JsonObject> {\n // ^^ User-defined Session Metadata\n // ^^ User-defined Client Metadata (sent to client in ROOM_STATE)\n\n public readonly version: ProtocolVersion; // Liveblocks protocol version this client will speak\n public readonly actor: ActorID; // Must be unique within the room\n public readonly createdAt: Date;\n\n // Externally provided (public!) user metadata. This information will get shared with other clients\n public readonly user: IUserData;\n public readonly scopes: string[]; // Permissions for this session, sent to connected clients (so consider public info)\n public readonly meta: SM; // Arbitrary *private* meta data to attach to this session (will NOT be shared)\n public readonly publicMeta?: CM; // Metadata sent to client in ROOM_STATE message's \"meta\" field\n\n readonly #_socket: IServerWebSocket;\n readonly #_debug: boolean;\n #_lastActiveAt: Date;\n\n // We keep a status in-memory in the session of whether we already sent a rejected ops message to the client.\n #_hasNotifiedClientStorageUpdateError: boolean;\n\n /** @internal - Never create a BrowserSession instance manually. Use the room.startBrowserSession() API instead. */\n constructor(\n ticket: Ticket<SM, CM>,\n socket: IServerWebSocket,\n debug: boolean\n ) {\n this.version = ticket.version;\n this.actor = ticket.actor;\n this.user = ticket.user;\n this.scopes = ticket.scopes;\n this.meta = ticket.meta ?? (undefined as unknown as SM);\n this.publicMeta = ticket.publicMeta;\n this.#_socket = socket;\n this.#_debug = debug;\n\n const now = new Date();\n this.createdAt = now;\n this.#_lastActiveAt = now;\n this.#_hasNotifiedClientStorageUpdateError = false;\n }\n\n get lastActiveAt(): Date {\n const lastPing = this.#_socket.getLastPongTimestamp?.();\n if (lastPing && lastPing > this.#_lastActiveAt) {\n return lastPing;\n } else {\n return this.#_lastActiveAt;\n }\n }\n\n get hasNotifiedClientStorageUpdateError(): boolean {\n return this.#_hasNotifiedClientStorageUpdateError;\n }\n\n markActive(now = new Date()): void {\n if (now > this.#_lastActiveAt) {\n this.#_lastActiveAt = now;\n }\n }\n\n setHasNotifiedClientStorageUpdateError(): void {\n this.#_hasNotifiedClientStorageUpdateError = true;\n }\n\n sendPong(): number {\n this.markActive();\n\n const sent = this.#_socket.send(\"pong\");\n if (this.#_debug) {\n if (sent < 0) {\n console.error(\n `failed to send \"pong\" to actor=${this.actor} (back pressure)`\n );\n } else if (sent === 0) {\n console.error(\n `failed to send \"pong\" to actor=${this.actor} (connection issue)`\n );\n } else {\n // Success\n console.log(`sent to actor=${this.actor}: \"pong\"`);\n }\n }\n return sent;\n }\n\n send(serverMsg: ServerMsg | ServerMsg[] | PreSerializedServerMsg): number {\n const data =\n typeof serverMsg === \"string\" ? serverMsg : serialize(serverMsg);\n const sent = this.#_socket.send(data);\n if (this.#_debug) {\n if (sent < 0) {\n console.error(\n `failed to send message to actor=${this.actor} (back pressure)`\n );\n } else if (sent === 0) {\n console.error(\n `failed to send message to actor=${this.actor} (connection issue)`\n );\n }\n\n const msgs = JSON.parse(data) as ServerMsg | ServerMsg[];\n for (const msg of Array.isArray(msgs) ? msgs : [msgs]) {\n console.log(\n `sent to actor=${this.actor}: [${\n SERVER_MSG_CODE_NAMES[msg.type] ?? msg.type\n }] ${JSON.stringify(msg)}`\n );\n }\n }\n return sent;\n }\n\n /**\n * @internal\n * Closes the socket associated to this BrowserSession.\n *\n * NOTE: Never call this API directly! Call .endBrowserSession() instead.\n */\n closeSocket(code: number, reason?: string): void {\n this.#_socket.close(code, reason);\n }\n}\n\nexport class BackendSession extends BrowserSession<never, never> {\n /** @internal Never call this constructor directly */\n constructor(\n ticket: Ticket<never, never>,\n socket: IServerWebSocket,\n debug: boolean\n ) {\n super(ticket, socket, debug);\n }\n}\n\nexport type Ticket<SM, CM extends JsonObject> = {\n readonly sessionKey: SessionKey; // Should stay private\n readonly version: ProtocolVersion;\n readonly actor: ActorID;\n readonly meta?: SM; // Private Session metadata\n readonly publicMeta?: CM; // Client metadata is *public* metadata sent to client in ROOM_STATE message\n readonly user: IUserData; // User-provided, public, metadata\n readonly scopes: string[];\n};\n\nexport type CreateTicketOptions<SM, CM extends JsonObject> = {\n /** The Liveblocks protocol version this client will speak */\n version?: ProtocolVersion;\n meta?: SM;\n publicMeta?: CM;\n /** A user-provided ID to externally recognize the user by */\n id?: string;\n /**\n * A user-provided anonymous ID to use. When `id` is provided, this field is\n * ignored. When both fields are missing, a new anonymous ID will be\n * generated.\n */\n anonymousId?: string;\n /** Static user metadata to assign this session, will get broadcasted to other clients */\n info?: IUserInfo;\n /** Permissions to assign this session */\n scopes?: string[];\n\n /** An explicit actor ID to use. Supported for legacy use cases only. It's best to not set this and let it get assigned dynamically, as it's important for this identifier to be unique. */\n actor?: ActorID;\n};\n\ntype InternalData = {\n readonly storage: Storage;\n readonly yjsStorage: YjsStorage;\n readonly mutex: Mutex;\n};\n\ntype RoomOptions<SM, CM extends JsonObject, C> = {\n /**\n * Bring your own persistence backend\n */\n storage?: IStorageDriver;\n logger?: Logger;\n\n /**\n * Whether to allow streaming storage responses. Only safe with drivers\n * that can guarantee that no Ops from other clients can get interleaved\n * between the chunk generation until the last chunk has been sent.\n * Defaults to true, but is notably NOT safe to use from DOS-KV backends.\n */\n allowStreaming?: boolean;\n\n // YYY Restructure these hooks to all take a single `event` param\n hooks?: {\n /** Customize which incoming messages from a client are allowed or disallowed. */\n isClientMsgAllowed?: (\n msg: ClientMsg,\n session: BrowserSession<SM, CM>\n ) => { allowed: true } | { allowed: false; reason: string };\n\n /** Called whenever the server acknowledged a ping with a pong */\n onDidPong?: (ctx?: C) => void | Promise<void>;\n\n /** Called before the room is attempted to be loaded */\n onRoomWillLoad?: (ctx?: C) => void | Promise<void>;\n /** Called right after the room's contents are loaded, but before any session has been started */\n onRoomDidLoad?: (ctx?: C) => void | Promise<void>;\n\n /** Called right before the room is attempted to be unloaded. Synchronous. May throw to abort the unloading. */\n onRoomWillUnload?: (ctx?: C) => void;\n /** Called right after the room has been unloaded from memory. Synchronous. */\n onRoomDidUnload?: (ctx?: C) => void;\n\n /** Called when a new user entered the room. */\n onSessionDidStart?: (\n session: BrowserSession<SM, CM>,\n ctx?: C\n ) => void | Promise<void>;\n /** Called when a user left the room. */\n onSessionDidEnd?: (\n session: BrowserSession<SM, CM>,\n ctx?: C\n ) => void | Promise<void>;\n\n /**\n * Called when Liveblocks Storage for the room was updated.\n *\n * IMPORTANT! If you implement these as async functions, it's important to\n * note that these run outside of the storage mutex that guarantees\n * a consistent view of storage.\n * Therefore, only ever use this hook to implement a side effect (like\n * trigger a notification), don't read storage in this hook directly.\n */\n postClientMsgStorageDidUpdate?: (ctx?: C) => void | Promise<void>;\n /**\n * Called when Yjs Storage for the room was updated.\n *\n * IMPORTANT! If you implement these as async functions, it's important to\n * note that these run outside of the storage mutex that guarantees\n * a consistent view of storage.\n * Therefore, only ever use this hook to implement a side effect (like\n * trigger a notification), don't read storage in this hook directly.\n */\n postClientMsgYdocDidUpdate?: (\n ctx?: C,\n sess?: BrowserSession<SM, CM>\n ) => void | Promise<void>;\n };\n\n /** Enable debug logging */\n enableDebugLogging?: boolean;\n};\n\n/**\n * A Liveblocks Room server.\n */\nexport class Room<RM, SM, CM extends JsonObject, C = undefined> {\n // ^^^^^^^^^^ User-defined Room Metadata, Session Metadata, and Client Metadata\n\n public meta: RM;\n public readonly driver: IStorageDriver;\n public logger: Logger;\n\n private _loadData$: Promise<void> | null = null;\n private _data: InternalData | null = null;\n private _qsize = 0;\n\n private readonly sessions = new UniqueMap<\n SessionKey,\n BrowserSession<SM, CM>,\n ActorID\n >((s) => s.actor);\n\n private readonly hooks: {\n isClientMsgAllowed: (\n msg: ClientMsg,\n session: BrowserSession<SM, CM>\n ) => { allowed: true } | { allowed: false; reason: string };\n\n onDidPong?: (ctx?: C) => void | Promise<void>;\n\n onRoomWillLoad?: (ctx?: C) => void | Promise<void>;\n onRoomDidLoad?: (ctx?: C) => void | Promise<void>;\n\n onRoomWillUnload?: (ctx?: C) => void;\n onRoomDidUnload?: (ctx?: C) => void;\n\n onSessionDidStart?: (\n session: BrowserSession<SM, CM>,\n ctx: C | undefined\n ) => void | Promise<void>;\n onSessionDidEnd?: (\n session: BrowserSession<SM, CM>,\n ctx: C | undefined\n ) => void | Promise<void>;\n\n // Don't like these callback names yet. Think about how to better abstract it later.\n postClientMsgStorageDidUpdate?: (ctx?: C) => void | Promise<void>;\n postClientMsgYdocDidUpdate?: (\n ctx?: C,\n sess?: BrowserSession<SM, CM>\n ) => void | Promise<void>;\n };\n\n readonly #_debug: boolean;\n readonly #_allowStreaming: boolean;\n\n constructor(meta: RM, options?: RoomOptions<SM, CM, C>) {\n const driver = options?.storage ?? makeNewInMemoryDriver();\n this.meta = meta;\n this.driver = driver;\n this.logger = options?.logger ?? BLACK_HOLE;\n this.#_allowStreaming = options?.allowStreaming ?? true;\n this.hooks = {\n isClientMsgAllowed:\n options?.hooks?.isClientMsgAllowed ??\n (() => {\n return {\n allowed: true,\n };\n }),\n\n // YYY .load() isn't called on the RoomServer yet! As soon as it does, these hooks will get called\n onRoomWillLoad: options?.hooks?.onRoomWillLoad,\n onRoomDidLoad: options?.hooks?.onRoomDidLoad,\n\n onRoomWillUnload: options?.hooks?.onRoomWillUnload,\n onRoomDidUnload: options?.hooks?.onRoomDidUnload,\n\n onSessionDidStart: options?.hooks?.onSessionDidStart,\n onSessionDidEnd: options?.hooks?.onSessionDidEnd,\n\n postClientMsgStorageDidUpdate:\n options?.hooks?.postClientMsgStorageDidUpdate,\n postClientMsgYdocDidUpdate: options?.hooks?.postClientMsgYdocDidUpdate,\n };\n this.#_debug = options?.enableDebugLogging ?? false;\n }\n\n public get loadingState(): LoadingState {\n if (this._loadData$ === null) {\n return \"initial\";\n } else if (this._data === null) {\n return \"loading\";\n } else {\n return \"loaded\";\n }\n }\n\n public get numSessions(): number { return this.sessions.size; } // prettier-ignore\n\n public get storage(): Storage { return this.data.storage; } // prettier-ignore\n public get yjsStorage(): YjsStorage { return this.data.yjsStorage; } // prettier-ignore\n\n public get mutex(): Mutex { return this.data.mutex; } // prettier-ignore\n\n private get data(): InternalData { return this._data ?? raise(\"Cannot use room before it's loaded\"); } // prettier-ignore\n\n // ------------------------------------------------------------------------------------\n // Public API\n // ------------------------------------------------------------------------------------\n\n /**\n * Initializes the Room, so it's ready to start accepting connections. Safe\n * to call multiple times. After awaiting `room.load()` the Room is ready to\n * be used.\n */\n public async load(ctx?: C): Promise<void> {\n if (this._loadData$ === null) {\n this._data = null;\n this._loadData$ = this._load(ctx).catch((e) => {\n this._data = null;\n this._loadData$ = null;\n throw e;\n });\n }\n return this._loadData$;\n }\n\n /**\n * Releases the currently-loaded storage tree from worker memory, freeing it\n * up to be garbage collected. The next time a user will join the room, the\n * room will be reloaded from storage.\n */\n public unload(ctx?: C): void {\n this.hooks.onRoomWillUnload?.(ctx); // May throw to cancel unloading\n if (this._data) {\n this.storage.unload();\n this.yjsStorage.unload();\n }\n // YYY Abort any potentially in-flight _loadData$ calls here\n this._loadData$ = null;\n // this._data = null; // YYY Should we also clear _data? I think so!\n this.hooks.onRoomDidUnload?.(ctx);\n }\n\n /**\n * Issues a Ticket with a new/unique actor ID\n *\n * IMPORTANT! As the caller of this function, you are responsible for\n * ensuring you trust the values passed in here. Never pass unauthorized\n * values in here.\n *\n * The returned Ticket can be turned into a active Session once the socket\n * connection is established. If the socket is never established, this\n * unused Ticket will simply get garbage collected.\n */\n public async createTicket(\n options?: CreateTicketOptions<SM, CM>\n ): Promise<Ticket<SM, CM>> {\n const actor$ = options?.actor ?? this.getNextActor();\n const sessionKey = nanoid() as SessionKey;\n const info = options?.info;\n const ticket: Ticket<SM, CM> = {\n version: options?.version ?? HIGHEST_PROTOCOL_VERSION,\n actor: await actor$,\n sessionKey,\n meta: options?.meta,\n publicMeta: options?.publicMeta,\n user: options?.id\n ? { id: options.id, info }\n : { anonymousId: options?.anonymousId ?? nanoid(), info },\n scopes: options?.scopes ?? [\"room:write\"],\n };\n if (this.#_debug) {\n console.log(`new ticket created: ${JSON.stringify(ticket)}`);\n }\n return ticket;\n }\n\n public async createBackendSession_experimental(): Promise<\n [session: BackendSession, outgoingMessages: PreSerializedServerMsg[]]\n > {\n const ticket = (await this.createTicket()) as Ticket<never, never>;\n const capturedServerMsgs: PreSerializedServerMsg[] = [];\n const stub = {\n send: (data) => {\n if (typeof data === \"string\") {\n capturedServerMsgs.push(data as PreSerializedServerMsg);\n }\n return 0;\n },\n close: () => {}, // noop\n } satisfies IServerWebSocket;\n const session = new BackendSession(ticket, stub, false);\n return [session, capturedServerMsgs];\n }\n\n /**\n * Restores the given sessions as the Room server's session list. Can only be\n * called as long as there are no existing sessions.\n *\n * The key difference with the .startBrowserSession() API is that restoreSessions is\n * used in cases where a session was hibernated and needs to be restored,\n * without _conceptually_ starting a new session.\n *\n * Because there are no side effects to restoreSession, it's synchronous.\n */\n public restoreSessions(\n sessions: {\n ticket: Ticket<SM, CM>;\n socket: IServerWebSocket;\n lastActivity: Date;\n }[]\n ): void {\n if (this.sessions.size > 0) {\n throw new Error(\"This API can only be called before any sessions exist\");\n }\n\n for (const { ticket, socket, lastActivity } of sessions) {\n const newSession = new BrowserSession(ticket, socket, this.#_debug);\n this.sessions.set(ticket.sessionKey, newSession);\n newSession.markActive(lastActivity);\n }\n }\n\n /**\n * Registers a new BrowserSession into the Room server's session list, along with\n * the socket connection to use for that BrowserSession, now that it is known.\n *\n * This kicks off a few side effects:\n * - Sends a ROOM_STATE message to the socket.\n * - Broadcasts a USER_JOINED message to all other sessions in the room.\n */\n public startBrowserSession(\n ticket: Ticket<SM, CM>,\n socket: IServerWebSocket,\n ctx?: C,\n defer: (promise: Promise<void>) => void = () => {\n throw new Error(\n \"One of your hook handlers returned a promise, but no side effect collector was provided. \" +\n \"Pass a `defer` callback to startBrowserSession() to collect async side effects.\"\n );\n }\n ): void {\n let existing: SessionKey | undefined;\n while (\n (existing = this.sessions.lookupPrimaryKey(ticket.actor)) !== undefined\n ) {\n // If this happens, it means a new connection attempt is happening for an\n // existing actor ID. It's most likely from a reconnection attempt using\n // a legacy token (which has the actor ID hardcoded in it), where the old\n // session hasn't been closed explicitly. We'll actively kill it now.\n\n // Terminate old session\n this.endBrowserSession(\n existing,\n CloseCode.KICKED,\n \"Closed stale connection\",\n ctx,\n defer\n );\n\n this.logger.warn(\n `Previous session for actor ${ticket.actor} killed in favor of new session`\n );\n }\n\n const newSession = new BrowserSession(ticket, socket, this.#_debug);\n this.sessions.set(ticket.sessionKey, newSession);\n\n const users: Record<ActorID, BaseUserMeta & { scopes: string[] }> = {};\n for (const session of this.otherSessions(ticket.sessionKey)) {\n users[session.actor] = {\n id: session.user.id,\n info: session.user.info,\n scopes: session.scopes,\n };\n }\n\n newSession.send(\n makeRoomStateMsg(\n newSession.actor,\n ticket.sessionKey, // called \"nonce\" in the protocol\n newSession.scopes,\n users,\n ticket.publicMeta\n )\n );\n\n this.sendToOthers(\n ticket.sessionKey,\n {\n type: ServerMsgCode.USER_JOINED,\n actor: newSession.actor,\n id: newSession.user.id,\n info: newSession.user.info,\n scopes: newSession.scopes,\n },\n ctx,\n defer\n );\n\n // Call the hook, but don't await the results here\n const p$ = this.hooks.onSessionDidStart?.(newSession, ctx);\n if (p$) defer(p$);\n }\n\n /**\n * Unregisters the BrowserSession for the given actor. Call this when the socket has\n * been closed from the client's end.\n *\n * This kicks off a few side effects:\n * - Broadcasts a USER_LEFT message to all other sessions in the room.\n */\n public endBrowserSession(\n key: SessionKey,\n code: number,\n reason: string,\n ctx?: C,\n defer: (promise: Promise<void>) => void = () => {\n throw new Error(\n \"Your onSessionDidEnd handler returned a promise, but no side effect collector was provided. \" +\n \"Pass a `defer` callback to endBrowserSession() to collect async side effects.\"\n );\n }\n ): void {\n const sessions = this.sessions;\n\n const session = sessions.get(key);\n if (session === undefined) return;\n\n session.closeSocket(code, reason);\n\n const deleted = sessions.delete(key);\n if (deleted) {\n for (const other of this.otherSessions(key)) {\n other.send({ type: ServerMsgCode.USER_LEFT, actor: session.actor });\n }\n\n // Call the hook\n const p$ = this.hooks.onSessionDidEnd?.(session, ctx);\n if (p$) defer(p$);\n }\n }\n\n /**\n * Force-closes all sessions matching the given predicate.\n */\n public endSessionBy(\n predicate: (session: BrowserSession<SM, CM>) => boolean,\n code: number,\n reason: string,\n ctx?: C,\n defer: (promise: Promise<void>) => void = () => {\n throw new Error(\n \"Your onSessionDidEnd handler returned a promise, but no side effect collector was provided. \" +\n \"Pass a `defer` callback to endSessionBy() to collect async side effects.\"\n );\n }\n ): number {\n let count = 0;\n for (const [key, session] of this.sessions) {\n if (predicate(session)) {\n count++;\n this.endBrowserSession(key, code, reason, ctx, defer);\n }\n }\n return count;\n }\n\n /**\n * Handles a raw incoming socket message, which can be a ping, or an\n * JSON-encoded message batch.\n */\n public async handleData(\n key: SessionKey,\n data: unknown,\n ctx?: C,\n defer: (promise: Promise<void>) => void = () => {\n throw new Error(\n \"One of your hook handlers returned a promise, but no side effect collector was provided. \" +\n \"Pass a `defer` callback to handleData() to collect async side effects.\"\n );\n }\n ): Promise<void> {\n const text =\n typeof data === \"string\" ? data : raise(\"Unsupported message format\");\n\n if (text === \"ping\") {\n await this.handlePing(key, ctx);\n } else {\n const json = tryParseJson(text);\n const messages = messagesDecoder.decode(json);\n\n if (!messages.ok) {\n const reason =\n process.env.NODE_ENV !== \"production\"\n ? formatInline(messages.error)\n : \"Invalid message format\";\n\n this.endBrowserSession(\n key,\n CloseCode.INVALID_MESSAGE_FORMAT,\n reason,\n ctx,\n defer\n );\n return;\n }\n\n // TODO: Decide on these limits later.\n // If qsize is > 0, then it means there is a traffic jam. This shouldn't\n // be a problem for a while, but it grows beyond a certain (soft or hard)\n // limit, we may want to take measures.\n if (this._qsize > 10_000) {\n // Over hard limit\n // TODO: Maybe disconnect this sockets with a 42xx close code? This\n // will make the client back off more aggressively. See\n // https://github.com/liveblocks/liveblocks/blob/223f7ce0d77380fecd3b08ed9454ca8c330bbe16/packages/liveblocks-core/src/types/IWebSocket.ts#L53\n } else if (this._qsize > 5_000) {\n // Over soft limit\n // TODO: Maybe instruct clients to increase their throttle values?\n }\n\n this._qsize++;\n\n // Run this.handleMsgs(), but guarded by a mutex lock, ensuring that no\n // two messages will get processed simultaneously. This provides similar\n // concurrency protection as Cloudflare's I/O gates\n try {\n await this.processClientMsg(key, messages.value, ctx);\n } finally {\n this._qsize--;\n }\n }\n }\n\n /**\n * Processes an incoming batch of 1 or more ClientMsgs on behalf of\n * a (regular user/browser) session.\n *\n * IMPORTANT: Only use this API on \"trusted\" data!\n * To handle untrusted input data, use `.handleData()` instead.\n *\n * Before calling this API, make sure:\n * 1. The call site is entitled to call this message on behalf of this session; and\n * 2. The ClientMsg payload has been validated to be correct.\n */\n public async processClientMsg(\n key: SessionKey,\n messages: ClientMsg[],\n ctx?: C\n ): Promise<void> {\n await this.load(ctx);\n const { defer, waitAll } = collectSideEffects();\n await this.mutex.runExclusive(() =>\n this._processClientMsg_withExclusiveAccess(key, messages, ctx, defer)\n );\n\n // Run all deferred work (like queueing messages, sending notifications,\n // etc) outside of the mutex\n await waitAll();\n }\n\n /**\n * Processes an incoming batch of 1 or more ClientMsgs on behalf of\n * a BACKEND session.\n *\n * Difference 1: HTTP RESPONSE instead of WEB SOCKET RESPONSE\n * ----------------------------------------------------------\n * For \"normal\" sessions that have a socket attached, any \"responses\" (i.e.\n * server messages like acks or fixops) will be sent back through that\n * existing socket connection.\n *\n * The key difference when using this method is that there is no such socket,\n * so any \"response\" ServerMsgs will get sent back as an HTTP response.\n *\n * Difference 2: No auth check\n * ---------------------------\n * Another key difference is that when processing a backend session, no\n * \"isClientMsgAllowed()\" check is performed, because those checks assume\n * a session.\n */\n public async processClientMsgFromBackendSession(\n session: BackendSession,\n messages: ClientMsg[],\n ctx?: C\n ): Promise<void> {\n await this.load(ctx);\n const { defer, waitAll } = collectSideEffects();\n await this.mutex.runExclusive(() =>\n this._processClientMsgFromBackendSession_withExclusiveAccess(\n session,\n messages,\n ctx,\n defer\n )\n );\n\n // Run all deferred work (like queueing messages, sending notifications,\n // etc) outside of the mutex\n await waitAll();\n }\n\n public getSession(\n sessionKey: SessionKey\n ): BrowserSession<SM, CM> | undefined {\n return this.sessions.get(sessionKey);\n }\n\n public listSessions(): BrowserSession<SM, CM>[] {\n return Array.from(this.sessions.values());\n }\n\n /**\n * Will send the given ServerMsg to all Sessions, except the Session\n * where the message originates from.\n */\n public sendToOthers(\n sender: SessionKey,\n serverMsg: ServerMsg | readonly ServerMsg[],\n ctx?: C,\n defer: (promise: Promise<void>) => void = () => {\n throw new Error(\n \"One of your hook handlers returned a promise, but no side effect collector was provided. \" +\n \"Pass a `defer` callback to sendToOthers() to collect async side effects.\"\n );\n }\n ): void {\n const msg = serialize(serverMsg);\n for (const [key, session] of this.otherSessionEntries(sender)) {\n const success = session.send(msg);\n if (success === 0) {\n // If there is a connection issue, terminate the session at once.\n // Note that in the case of -1 (= back pressure), we don't terminate\n // the connection.\n this.endBrowserSession(\n key,\n CloseCode.KICKED,\n \"Closed broken connection\",\n ctx,\n defer\n );\n }\n }\n }\n\n /**\n * Will broadcast the given ServerMsg to all Sessions in the Room.\n */\n public sendToAll(\n serverMsg: ServerMsg | readonly ServerMsg[],\n ctx?: C,\n defer: (promise: Promise<void>) => void = () => {\n throw new Error(\n \"One of your hook handlers returned a promise, but no side effect collector was provided. \" +\n \"Pass a `defer` callback to sendToAll() to collect async side effects.\"\n );\n }\n ): void {\n const msg = serialize(serverMsg);\n for (const [key, session] of this.sessions) {\n const success = session.send(msg);\n if (success === 0) {\n // If there is a connection issue, terminate the session at once.\n // Note that in the case of -1 (= back pressure), we don't terminate\n // the connection.\n this.endBrowserSession(\n key,\n CloseCode.KICKED,\n \"Closed broken connection\",\n ctx,\n defer\n );\n }\n }\n }\n\n // ------------------------------------------------------------------------------------\n // Private APIs\n // ------------------------------------------------------------------------------------\n\n private async _loadStorage(): Promise<Storage> {\n const storage = new Storage(this.driver);\n await storage.load(this.logger);\n return storage;\n }\n\n private async _loadYjsStorage(): Promise<YjsStorage> {\n const yjsStorage = new YjsStorage(this.driver);\n await yjsStorage.load(this.logger);\n return yjsStorage;\n }\n\n // Don't ever manually call this!\n private async _load(ctx?: C): Promise<void> {\n await this.hooks.onRoomWillLoad?.(ctx);\n\n // YYY Maybe later run these in parallel? See https://github.com/liveblocks/liveblocks-cloudflare/pull/721#discussion_r1489076389\n const storage = await this._loadStorage();\n const yjsStorage = await this._loadYjsStorage();\n\n this._data = {\n mutex: new Mutex(),\n storage,\n yjsStorage,\n };\n\n await this.hooks.onRoomDidLoad?.(ctx);\n }\n\n /**\n * Returns a new, unique, actor ID.\n */\n private async getNextActor(): Promise<ActorID> {\n return (await this.driver.next_actor()) as ActorID;\n }\n\n /**\n * Iterates over all *other* Sessions and their session keys.\n */\n private *otherSessionEntries(\n currentKey: SessionKey\n ): Generator<[SessionKey, BrowserSession<SM, CM>]> {\n for (const [key, session] of this.sessions) {\n if (key !== currentKey) {\n yield [key, session];\n }\n }\n }\n\n /**\n * Iterates over all *other* Sessions.\n */\n private *otherSessions(\n currentKey: SessionKey\n ): Generator<BrowserSession<SM, CM>> {\n for (const [key, session] of this.sessions) {\n if (key !== currentKey) {\n yield session;\n }\n }\n }\n\n /**\n * @internal\n * Handles an incoming ping, by sending a pong back.\n */\n // eslint-disable-next-line @typescript-eslint/require-await\n private async handlePing(sessionKey: SessionKey, ctx?: C): Promise<void> {\n const session = this.sessions.get(sessionKey);\n if (session === undefined) {\n this.logger\n .withContext({ sessionKey })\n .warn(\"[probe] in handlePing, no such session exists\");\n return;\n }\n\n const sent = session.sendPong();\n\n // 0 means there was a connection issue\n // -1 means there was back pressure, which is no issue (we'll just count the ping)\n if (sent !== 0) {\n await this.hooks.onDidPong?.(ctx);\n }\n }\n\n private async _processClientMsg_withExclusiveAccess(\n sessionKey: SessionKey,\n messages: ClientMsg[],\n ctx: C | undefined,\n defer: (p: Promise<void>) => void\n ): Promise<void> {\n const session = this.sessions.get(sessionKey);\n if (!session) {\n this.logger\n .withContext({ sessionKey })\n .warn(\"[probe] in handleClientMsgs, no such session exists\");\n return;\n }\n\n // Keep two ServerMsg buffers to send at the end:\n // - Messages to fan-out to all *others* (current session not included)\n // - Messages to reply back to the current sender (i.e. acks and rejections)\n const toFanOut: ServerMsg[] = [];\n const toReply: ServerMsg[] = [];\n const replyImmediately = (msg: ServerMsg | ServerMsg[]) =>\n void session.send(msg);\n const scheduleFanOut = (msg: ServerMsg) => void toFanOut.push(msg);\n const scheduleReply = (msg: ServerMsg) => void toReply.push(msg);\n\n for (const msg of messages) {\n const isMsgAllowed = this.hooks.isClientMsgAllowed(msg, session);\n if (isMsgAllowed.allowed) {\n await this.handleOne(\n session,\n msg,\n replyImmediately,\n scheduleFanOut,\n scheduleReply,\n ctx,\n defer\n );\n } else {\n if (!session.hasNotifiedClientStorageUpdateError) {\n toReply.push({\n type: ServerMsgCode.REJECT_STORAGE_OP,\n opIds:\n msg.type === ClientMsgCode.UPDATE_STORAGE\n ? msg.ops.map((op) => op.opId)\n : [],\n reason: isMsgAllowed.reason,\n });\n session.setHasNotifiedClientStorageUpdateError();\n }\n }\n }\n\n if (toFanOut.length > 0) {\n this.sendToOthers(sessionKey, toFanOut, ctx, defer);\n }\n\n if (toReply.length > 0) {\n session.send(toReply);\n }\n }\n\n // TODO It's a bit bothering how much duplication there is between this method\n // and the _processClientMsg_withExclusiveAccess version. A better\n // abstraction is needed.\n private async _processClientMsgFromBackendSession_withExclusiveAccess(\n session: BackendSession,\n messages: ClientMsg[],\n ctx: C | undefined,\n defer: (p: Promise<void>) => void\n ): Promise<void> {\n // Keep two ServerMsg buffers to send at the end:\n // - Messages to fan-out to all *others* (current session not included)\n // - Messages to reply back to the current sender (i.e. acks and rejections)\n const toFanOut: ServerMsg[] = [];\n const toReplyImmediately: ServerMsg[] = [];\n const toReplyAfter: ServerMsg[] = [];\n\n const replyImmediately = (msg: ServerMsg | ServerMsg[]) => {\n if (Array.isArray(msg)) {\n for (const m of msg) {\n toReplyImmediately.push(m);\n }\n } else {\n toReplyImmediately.push(msg);\n }\n };\n const scheduleFanOut = (msg: ServerMsg) => void toFanOut.push(msg);\n const scheduleReply = (msg: ServerMsg) => void toReplyAfter.push(msg);\n\n for (const msg of messages) {\n await this.handleOne(\n session,\n msg,\n replyImmediately,\n scheduleFanOut,\n scheduleReply,\n ctx,\n defer\n );\n }\n\n if (toReplyImmediately.length > 0) {\n session.send(toReplyImmediately);\n toReplyImmediately.length = 0;\n }\n\n if (toFanOut.length > 0) {\n this.sendToOthers(\"(transient)\" as SessionKey, toFanOut, ctx, defer);\n toFanOut.length = 0;\n }\n\n if (toReplyAfter.length > 0) {\n session.send(toReplyAfter);\n toReplyAfter.length = 0;\n }\n }\n\n private async handleOne(\n session: BrowserSession<SM, CM>,\n msg: ClientMsg,\n replyImmediately: (msg: ServerMsg | ServerMsg[]) => void,\n scheduleFanOut: (msg: ServerMsg) => void,\n scheduleReply: (msg: ServerMsg) => void,\n ctx: C | undefined,\n defer: (p: Promise<void>) => void\n ): Promise<void> {\n if (!this.mutex.isLocked()) {\n throw new Error(\"Handling messages requires exclusive access\");\n }\n\n switch (msg.type) {\n case ClientMsgCode.UPDATE_PRESENCE: {\n // YYY Maybe consider calling session.sendToOthers() directly here instead of queueing for fan-out?\n scheduleFanOut({\n type: ServerMsgCode.UPDATE_PRESENCE,\n actor: session.actor,\n data: msg.data,\n targetActor: msg.targetActor,\n });\n break;\n }\n\n case ClientMsgCode.BROADCAST_EVENT: {\n // YYY Maybe consider calling session.sendToOthers() directly here instead of queueing for fan-out?\n scheduleFanOut({\n type: ServerMsgCode.BROADCASTED_EVENT,\n actor: session.actor,\n event: msg.event,\n });\n break;\n }\n\n case ClientMsgCode.FETCH_STORAGE: {\n if (session.version >= ProtocolVersion.V8) {\n if (this.#_allowStreaming) {\n const NODES_PER_CHUNK = 250; // = arbitrary! Could be tuned later\n\n for (const chunk of chunked(\n nodeStreamToCompactNodes(this.storage.loadedDriver.iter_nodes()),\n NODES_PER_CHUNK\n )) {\n // NOTE: We don't take a storage snapshot here, because this\n // iteration is happening synchronously, so consistency of the\n // current document automatically guaranteed. If we ever make\n // this streaming asynchronous, however, we need to take\n // a storage snapshot to guarantee document consistency.\n replyImmediately({\n type: ServerMsgCode.STORAGE_CHUNK,\n nodes: chunk,\n });\n }\n } else {\n replyImmediately({\n type: ServerMsgCode.STORAGE_CHUNK,\n nodes: Array.from(\n nodeStreamToCompactNodes(this.storage.loadedDriver.iter_nodes())\n ),\n });\n }\n\n replyImmediately({ type: ServerMsgCode.STORAGE_STREAM_END });\n } else {\n replyImmediately({\n type: ServerMsgCode.STORAGE_STATE_V7,\n items: Array.from(this.storage.loadedDriver.iter_nodes()),\n });\n }\n break;\n }\n\n case ClientMsgCode.UPDATE_STORAGE: {\n // Bump storage version to indicate data will get mutated\n // A driver can use this information to implement copy-on-write\n // semantics to provide snapshot isolation.\n this.driver.bump_storage_version?.();\n\n const result = await this.storage.applyOps(msg.ops);\n\n const opsToForward: ServerWireOp[] = result.flatMap((r) =>\n r.action === \"accepted\" ? [r.op] : []\n );\n\n const opsToSendBack: ServerWireOp[] = result.flatMap((r) => {\n switch (r.action) {\n case \"ignored\":\n // HACK! We send a cleverly composed message, that will act\n // as an acknowledgement to all old clients out there in\n // the wild.\n return r.ignoredOpId !== undefined\n ? [ackIgnoredOp(r.ignoredOpId)]\n : [];\n\n case \"accepted\":\n return r.fix !== undefined ? [r.fix] : [];\n\n // istanbul ignore next\n default:\n return assertNever(r, \"Unhandled case\");\n }\n });\n\n if (opsToForward.length > 0) {\n scheduleFanOut({\n type: ServerMsgCode.UPDATE_STORAGE,\n ops: opsToForward.map(stripOpId),\n });\n scheduleReply({\n type: ServerMsgCode.UPDATE_STORAGE,\n ops: opsToForward,\n });\n }\n\n if (opsToSendBack.length > 0) {\n replyImmediately({\n type: ServerMsgCode.UPDATE_STORAGE,\n ops: opsToSendBack,\n });\n }\n\n if (opsToForward.length > 0) {\n // NOTE! These are being called after *every* handleOne() call\n // currently. Should we not just call these once at the end of\n // handleClientMsgs()?\n const p$ = this.hooks.postClientMsgStorageDidUpdate?.(ctx);\n if (p$) defer(p$);\n }\n break;\n }\n\n case ClientMsgCode.FETCH_YDOC: {\n const vector = msg.vector;\n const guid = msg.guid as Guid | undefined;\n const isV2 = msg.v2;\n const [update, stateVector, snapshotHash] = await Promise.all([\n this.yjsStorage.getYDocUpdate(this.logger, vector, guid, isV2),\n this.yjsStorage.getYStateVector(guid),\n this.yjsStorage.getSnapshotHash({ guid, isV2 }),\n ]);\n\n if (update !== null && snapshotHash !== null) {\n replyImmediately({\n type: ServerMsgCode.UPDATE_YDOC,\n update,\n isSync: true, // this is no longer used by the client, instead we use the presence of stateVector\n stateVector,\n guid,\n v2: isV2,\n remoteSnapshotHash: snapshotHash,\n });\n }\n break;\n }\n\n case ClientMsgCode.UPDATE_YDOC: {\n const update = msg.update;\n const guid = msg.guid as Guid | undefined;\n const isV2 = msg.v2;\n const [result, error] = await tryCatch(\n this.yjsStorage.addYDocUpdate(this.logger, update, guid, isV2)\n );\n\n if (error)\n // Ignore any errors\n break;\n\n this.sendToAll(\n {\n type: ServerMsgCode.UPDATE_YDOC,\n update,\n guid,\n isSync: false,\n stateVector: null,\n v2: isV2,\n remoteSnapshotHash: result.snapshotHash,\n },\n ctx,\n defer\n );\n if (result.isUpdated) {\n const p$ = this.hooks.postClientMsgYdocDidUpdate?.(ctx, session);\n if (p$) defer(p$);\n }\n\n break;\n }\n\n default: {\n try {\n return assertNever(msg, \"Unrecognized client msg\");\n } catch {\n // Ignore\n }\n }\n }\n }\n}\n\nexport { serialize as serializeServerMsg };\n","/**\n * Copyright (c) Liveblocks Inc.\n *\n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Affero General Public License as published\n * by the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n * GNU Affero General Public License for more details.\n *\n * You should have received a copy of the GNU Affero General Public License\n * along with this program. If not, see <https://www.gnu.org/licenses/>.\n */\n\nimport type { JsonObject } from \"@liveblocks/core\";\nimport { raise } from \"@liveblocks/core\";\n\nexport enum LogLevel {\n DEBUG = 0,\n INFO = 1,\n WARNING = 2,\n ERROR = 3,\n}\n\nfunction formatError(err: Error) {\n const prefix = `${err.name}: ${err.message}`;\n return (\n err.stack?.startsWith(prefix) ? err.stack : `${prefix}\\n${err.stack ?? \"\"}`\n ).trimEnd();\n}\n\n/**\n * Inherit from this abstract log target to implement your own custom\n * LogTarget.\n */\nexport abstract class LogTarget {\n public readonly level: LogLevel;\n\n #cache = new WeakMap<JsonObject, string>();\n\n constructor(level: LogLevel | keyof typeof LogLevelNames = LogLevel.INFO) {\n this.level =\n typeof level === \"number\"\n ? level\n : (LogLevelNames[level] ?? LogLevel.INFO);\n }\n\n /** Helper for formatting a log level */\n protected formatLevel(level: LogLevel): string {\n switch (level) {\n case LogLevel.DEBUG:\n return \"debug\";\n case LogLevel.INFO:\n return \"info\";\n case LogLevel.WARNING:\n return \"warn\";\n case LogLevel.ERROR:\n return \"error\";\n default:\n return raise(\"Invalid log level\");\n }\n }\n\n /** Helper for formatting an Arg */\n protected formatArg(arg: string | Error): string {\n return typeof arg === \"object\"\n ? arg instanceof Error\n ? formatError(arg)\n : JSON.stringify(arg)\n : String(arg); // Coerce to string in case TypeScript is bypassed\n }\n\n /**\n * Helper for formatting a Context. Override this in a subclass to change the\n * formatting.\n */\n protected formatContextImpl(context: JsonObject): string {\n const parts = [];\n for (const [k, v] of Object.entries(context ?? {})) {\n if (v !== undefined) {\n // Object, or null, or array\n const sv = typeof v === \"object\" ? JSON.stringify(v) : v;\n parts.push(`${k}=${sv}`);\n }\n }\n return parts.length > 0 ? `[${parts.join(\" \")}]` : \"\";\n }\n\n /**\n * Helper for formatting a Context. Will only compute the string once for\n * every Context instance, and keep its computed string value cached for\n * performance.\n */\n protected formatContext(context: JsonObject): string {\n let formatted = this.#cache.get(context);\n if (formatted === undefined) {\n formatted = this.formatContextImpl(context);\n this.#cache.set(context, formatted);\n }\n return formatted;\n }\n\n /**\n * Implement this in a concrete subclass. The goal is to do whatever to log\n * the given log level, context, and log arg. You'll typically want to\n * utilize the pre-defined helper methods .formatContext() and .formatArg()\n * to implement this.\n */\n abstract log(level: LogLevel, context: JsonObject, arg: string | Error): void;\n}\n\n//\n// Console log target ----------------------------------------------------------\n//\n\nconst CONSOLE_METHOD = {\n [LogLevel.DEBUG]: \"info\",\n [LogLevel.INFO]: \"info\",\n [LogLevel.WARNING]: \"warn\",\n [LogLevel.ERROR]: \"error\",\n} as const;\n\nexport class ConsoleTarget extends LogTarget {\n log(level: LogLevel, context: JsonObject, arg: string | Error): void {\n console[CONSOLE_METHOD[level]](\n this.formatArg(arg),\n this.formatContext(context)\n );\n }\n}\n\n//\n// Logger implementation ------------------------------------------------------\n//\n\n// Friendly names to pass to the constructor\nconst LogLevelNames = {\n debug: LogLevel.DEBUG,\n info: LogLevel.INFO,\n warning: LogLevel.WARNING,\n error: LogLevel.ERROR,\n} as const;\n\ntype LogFn = (arg: string | Error) => void;\n\n/**\n * Structured logger with configurable log targets.\n */\nexport class Logger {\n public readonly debug: LogFn;\n public readonly info: LogFn;\n public readonly warn: LogFn;\n public readonly error: LogFn;\n\n public readonly o: {\n readonly debug?: LogFn;\n readonly info?: LogFn;\n readonly warn?: LogFn;\n readonly error?: LogFn;\n };\n\n private readonly _context: JsonObject;\n private readonly _targets: readonly LogTarget[];\n\n constructor(\n target: LogTarget | readonly LogTarget[] = new ConsoleTarget(),\n context: JsonObject = {}\n ) {\n this._context = context;\n this._targets = Array.isArray(target) ? target : [target];\n\n const minLevel: number = Math.min(...this._targets.map((t) => t.level));\n\n const noop = () => {};\n const makeLogFn = (lvl: LogLevel) => (arg: string | Error) =>\n this._targets.forEach((target) => {\n if (target.level <= lvl) {\n target.log(lvl, this._context, arg);\n }\n });\n\n this.o = {\n /* eslint-disable @typescript-eslint/no-unsafe-enum-comparison */\n debug: minLevel <= LogLevel.DEBUG ? makeLogFn(LogLevel.DEBUG) : undefined,\n info: minLevel <= LogLevel.INFO ? makeLogFn(LogLevel.INFO) : undefined,\n warn:\n minLevel <= LogLevel.WARNING ? makeLogFn(LogLevel.WARNING) : undefined,\n error: minLevel <= LogLevel.ERROR ? makeLogFn(LogLevel.ERROR) : undefined,\n /* eslint-enable @typescript-eslint/no-unsafe-enum-comparison */\n };\n\n this.debug = this.o.debug ?? noop;\n this.info = this.o.info ?? noop;\n this.warn = this.o.warn ?? noop;\n this.error = this.o.error ?? noop;\n }\n\n /**\n * Creates a new Logger instance with the given extra context applied. All\n * log calls made from that new Logger will carry all current _and_ the extra\n * context, with the extra context taking precedence. Assign an explicit\n * `undefined` value to a key to \"remove\" it from the context.\n */\n withContext(extra: JsonObject): Logger {\n const combined: JsonObject = { ...this._context, ...extra };\n return new Logger(this._targets, combined);\n }\n}\n","/**\n * Copyright (c) Liveblocks Inc.\n *\n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Affero General Public License as published\n * by the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n * GNU Affero General Public License for more details.\n *\n * You should have received a copy of the GNU Affero General Public License\n * along with this program. If not, see <https://www.gnu.org/licenses/>.\n */\n\n/* eslint-disable @typescript-eslint/explicit-module-boundary-types */\n/* eslint-disable @typescript-eslint/require-await */\nimport type {\n Json,\n JsonObject,\n NodeMap,\n NodeStream,\n PlainLsonObject,\n SerializedChild,\n SerializedCrdt,\n SerializedObject,\n SerializedRootObject,\n} from \"@liveblocks/core\";\nimport { asPos, CrdtType, isRootStorageNode, nn } from \"@liveblocks/core\";\nimport { ifilter, imap } from \"itertools\";\n\nimport type { YDocId } from \"~/decoders/y-types\";\nimport { plainLsonToNodeStream } from \"~/formats/PlainLson\";\nimport type {\n IReadableSnapshot,\n IStorageDriver,\n IStorageDriverNodeAPI,\n} from \"~/interfaces\";\nimport { NestedMap } from \"~/lib/NestedMap\";\nimport { quote } from \"~/lib/text\";\nimport { makeInMemorySnapshot } from \"~/makeInMemorySnapshot\";\nimport type { Pos } from \"~/types\";\n\nfunction buildRevNodes(nodeStream: NodeStream) {\n const result = new NestedMap<string, string, string>();\n for (const node of nodeStream) {\n if (isRootStorageNode(node)) continue;\n\n // Highest node id wins in case of conflict (deterministic across backends)\n const [id, crdt] = node;\n const existing = result.get(crdt.parentId, crdt.parentKey);\n if (existing === undefined || id > existing) {\n result.set(crdt.parentId, crdt.parentKey, id);\n }\n }\n return result;\n}\n\n/**\n * Builds the reverse node index, and corrects any data corruption found\n * along the way.\n */\nfunction buildReverseLookup(nodes: NodeMap) {\n const revNodes = buildRevNodes(nodes as NodeStream);\n\n const queue: string[] = [\"root\"];\n const reachableNodes: Set<string> = new Set();\n\n while (queue.length > 0) {\n const nodeId = queue.pop()!;\n const node = nn(nodes.get(nodeId));\n\n if (node.type === CrdtType.OBJECT) {\n for (const key of revNodes.keysAt(nodeId)) {\n delete node.data[key]; // Remove static data that conflicts with child nodes\n }\n }\n\n if (node.type !== CrdtType.REGISTER) {\n queue.push(...revNodes.valuesAt(nodeId));\n } else {\n const parent = nodes.get(node.parentId);\n if (parent?.type === CrdtType.OBJECT) {\n continue;\n }\n }\n\n reachableNodes.add(nodeId);\n }\n\n // Delete unreachable nodes (safe to delete from Map during iteration)\n let deletedCount = 0;\n for (const [id] of nodes) {\n if (!reachableNodes.has(id)) {\n nodes.delete(id);\n deletedCount++;\n }\n }\n\n // If no nodes were dropped (99% happy path), revNodes is correct already.\n // Otherwise, recompute it.\n return deletedCount === 0 ? revNodes : buildRevNodes(nodes as NodeStream);\n}\n\nfunction hasStaticDataAt(\n node: SerializedCrdt,\n key: string\n): node is SerializedObject | SerializedRootObject {\n return (\n node.type === CrdtType.OBJECT &&\n Object.prototype.hasOwnProperty.call(node.data, key) &&\n node.data[key] !== undefined\n );\n}\n\n/**\n * Implements the most basic in-memory store. Used if no explicit store is\n * provided.\n */\nexport class InMemoryDriver implements IStorageDriver {\n private _nextActor;\n private _nodes: NodeMap;\n private _metadb: Map<string, Json>;\n private _ydb: Map<string, Uint8Array>;\n\n constructor(options?: {\n initialActor?: number;\n initialNodes?: Iterable<[string, SerializedCrdt]>;\n }) {\n this._nodes = new Map();\n this._metadb = new Map();\n this._ydb = new Map();\n\n this._nextActor = options?.initialActor ?? -1;\n\n for (const [key, value] of options?.initialNodes ?? []) {\n this._nodes.set(key, value);\n }\n }\n\n raw_iter_nodes() {\n return this._nodes[Symbol.iterator]();\n }\n\n /** Deletes all nodes and replaces them with the given document. */\n DANGEROUSLY_reset_nodes(doc: PlainLsonObject) {\n this._nodes.clear();\n for (const [id, node] of plainLsonToNodeStream(doc)) {\n this._nodes.set(id, node);\n }\n }\n\n async get_meta(key: string) {\n return this._metadb.get(key);\n }\n async put_meta(key: string, value: Json) {\n this._metadb.set(key, value);\n }\n async delete_meta(key: string) {\n this._metadb.delete(key);\n }\n\n next_actor() {\n return ++this._nextActor;\n }\n\n async iter_y_updates(docId: YDocId) {\n const prefix = `${docId}@|@`;\n return imap(\n ifilter(this._ydb.entries(), ([k]) => k.startsWith(prefix)),\n ([k, v]) => [k.slice(prefix.length), v] as [string, Uint8Array]\n );\n }\n async write_y_updates(docId: YDocId, key: string, data: Uint8Array) {\n this._ydb.set(`${docId}@|@${key}`, data);\n }\n async delete_y_updates(docId: YDocId, keys: string[]) {\n for (const key of keys) {\n this._ydb.delete(`${docId}@|@${key}`);\n }\n }\n\n /** @private Only use this in unit tests, never in production. */\n async DANGEROUSLY_wipe_all_y_updates() {\n this._ydb.clear();\n }\n\n // Intercept load_nodes_api to add caching layer\n load_nodes_api(): IStorageDriverNodeAPI {\n // For the in-memory backend, this._nodes IS the \"on-disk\" storage,\n // so we operate on it directly (no separate cache needed).\n const nodes = this._nodes;\n if (!nodes.has(\"root\")) {\n nodes.set(\"root\", { type: CrdtType.OBJECT, data: {} });\n }\n\n const revNodes = buildReverseLookup(nodes);\n\n function get_next_sibling(parentId: string, pos: Pos): Pos | undefined {\n let nextPos: Pos | undefined;\n // Find the smallest position greater than current\n for (const siblingKey of revNodes.keysAt(parentId)) {\n const siblingPos = asPos(siblingKey);\n if (\n siblingPos > pos &&\n (nextPos === undefined || siblingPos < nextPos)\n ) {\n nextPos = siblingPos;\n }\n }\n return nextPos;\n }\n\n /**\n * Inserts a node in the storage tree, deleting any nodes that already exist\n * under this key (including all of its children), if any.\n */\n async function set_child(\n id: string,\n node: SerializedChild,\n allowOverwrite = false\n ): Promise<void> {\n const parentNode = nodes.get(node.parentId);\n // Reject orphans - parent must exist\n if (parentNode === undefined) {\n throw new Error(`No such parent ${quote(node.parentId)}`);\n }\n\n if (\n node.type === CrdtType.REGISTER &&\n parentNode.type === CrdtType.OBJECT\n ) {\n throw new Error(\"Cannot add register under object\");\n }\n\n const conflictingSiblingId = revNodes.get(node.parentId, node.parentKey);\n if (conflictingSiblingId !== id) {\n // Conflict!\n const parentNode = nodes.get(node.parentId);\n const hasConflictingData =\n parentNode !== undefined &&\n hasStaticDataAt(parentNode, node.parentKey);\n if (conflictingSiblingId !== undefined || hasConflictingData) {\n if (allowOverwrite) {\n delete_child_key(node.parentId, node.parentKey);\n } else {\n throw new Error(`Key ${quote(node.parentKey)} already exists`); // prettier-ignore\n }\n }\n\n // Finally, modify revNodes\n revNodes.set(node.parentId, node.parentKey, id);\n }\n\n nodes.set(id, node);\n }\n\n /**\n * Conceptually this is like \"detaching\" the node from its parent, and\n * \"reattaching\" it at the new position.\n *\n * However, this is a native operation, because doing a naive\n * delete-then-insert would would immediately destroy all (grand)children\n * when it's deleted.\n */\n async function move_sibling(id: string, newPos: Pos): Promise<void> {\n const node = nodes.get(id);\n if (node?.parentId === undefined) {\n return;\n }\n\n // If there is a conflicting sibling at the new position, disallow the move\n if (revNodes.has(node.parentId, newPos))\n throw new Error(`Pos ${quote(newPos)} already taken`); // prettier-ignore\n\n revNodes.delete(node.parentId, node.parentKey);\n const newNode = { ...node, parentKey: newPos };\n nodes.set(id, newNode);\n revNodes.set(node.parentId, newPos, id);\n }\n\n /**\n * Sets some static data on a node. The node must be an OBJECT node, or this\n * method will be a no-op.\n *\n * If any keys exist that also conflict with a child node, then the conflict\n * mode will determine what will happen. By default, an error will be thrown.\n * But if `allowOverwrite` is set to true, the conflicting child node (and\n * its entire subtree) will be deleted to make room for the new static data.\n */\n async function set_object_data(\n id: string,\n data: JsonObject,\n allowOverwrite = false\n ): Promise<void> {\n const node = nodes.get(id);\n if (node?.type !== CrdtType.OBJECT) {\n // Nothing to do\n return;\n }\n\n for (const key of Object.keys(data)) {\n // Handle if conflict!\n const childId = revNodes.get(id, key);\n if (childId !== undefined) {\n if (allowOverwrite) {\n delete_node(childId);\n } else {\n throw new Error(`Child node already exists under ${quote(key)}`); // prettier-ignore\n }\n }\n }\n\n nodes.set(id, { ...node, data: { ...node.data, ...data } });\n }\n\n /**\n * Delete a node from the tree, including all of its children.\n */\n function delete_node(id: string): void {\n const node = nodes.get(id);\n if (node?.parentId === undefined) {\n return;\n }\n\n // Delete the entry in the parent's children administration for this node\n revNodes.delete(node.parentId, node.parentKey);\n\n // Now proceed to deleting the node tree recursively\n const queue = [id];\n while (queue.length > 0) {\n const currid = queue.pop()!;\n queue.push(...revNodes.valuesAt(currid));\n nodes.delete(currid);\n revNodes.deleteAll(currid);\n }\n }\n\n /**\n * Deletes the child key under a given node, whether it's a static object\n * field, or a child node.\n */\n function delete_child_key(id: string, key: string): void {\n // At most one of these will do something, the other is a no-op\n const node = nodes.get(id);\n if (node !== undefined && hasStaticDataAt(node, key)) {\n const { [key]: _, ...rest } = node.data;\n nodes.set(id, { ...node, data: rest });\n }\n\n const childId = revNodes.get(id, key);\n if (childId !== undefined) {\n delete_node(childId);\n }\n }\n\n const api: IStorageDriverNodeAPI = {\n /**\n * Return the node with the given id, or undefined if no such node exists.\n * Must always return a valid root node for id=\"root\", even if empty.\n */\n get_node: (id) => nodes.get(id),\n\n /**\n * Yield all nodes as [id, node] pairs. Must always include the root node.\n */\n iter_nodes: () => nodes as NodeStream,\n\n /**\n * Return true iff a node with the given id exists. Must return true for \"root\".\n */\n has_node: (id) => nodes.has(id),\n\n /**\n * Return the id of the child node at (parentId, parentKey), or undefined if\n * none. Only checks child nodes registered via set_child, NOT static data\n * keys on OBJECT nodes.\n */\n get_child_at: (id, key) => revNodes.get(id, key),\n\n /**\n * Return true iff a child node exists at (parentId, parentKey). Static data\n * keys on OBJECT nodes do not count—return false for those.\n */\n has_child_at: (id, key) => revNodes.has(id, key),\n\n /**\n * Return the position of the closest sibling \"to the right\" of `pos` under\n * parentId, or undefined if no such sibling exists. The given `pos` may, but\n * does not have to exist already. Positions compare lexicographically.\n */\n get_next_sibling,\n\n /**\n * Insert a child node with the given id.\n *\n * If allowOverwrite=false (default): throw if a node with this id exists.\n * If allowOverwrite=true: replace any existing node at this id, deleting its\n * entire subtree if it has children.\n */\n set_child,\n\n /**\n * Change a node's parentKey, effectively repositioning the node within its\n * parent. The new position must be free.\n * Throw if another node already occupies (parentId, newPos).\n */\n move_sibling,\n\n /**\n * Delete a node and its entire subtree recursively.\n * Ignore if id=\"root\" (root is immortal).\n */\n delete_node,\n\n /**\n * Delete a key from node `id`. Handle two cases:\n *\n * 1. If id is an OBJECT with `key` in its data: remove that data field.\n * 2. If a child exists at (id, key): delete that child and all its\n * descendants recursively.\n *\n * No-op if neither applies or if the node doesn't exist.\n */\n delete_child_key,\n\n /**\n * Replace the data object of an OBJECT node.\n *\n * If allowOverwrite=false (default): throw if any key in `data` conflicts\n * with an existing child's parentKey.\n * If allowOverwrite=true: first delete any conflicting children (and their\n * entire subtrees), then set the data.\n */\n set_object_data,\n\n /**\n * Return a readable snapshot of the storage tree.\n *\n * @param lowMemory When true, the call site hints that the snapshot should\n * be optimized for lower memory consumption, even if that means slower\n * access.\n */\n get_snapshot(_lowMemory?: boolean): IReadableSnapshot {\n return makeInMemorySnapshot(nodes);\n },\n };\n return api;\n }\n}\n\nexport function makeNewInMemoryDriver(options?: {\n initialActor?: number;\n initialNodes?: Iterable<[string, SerializedCrdt]>;\n}): IStorageDriver {\n return new InMemoryDriver(options);\n}\n","/**\n * Copyright (c) Liveblocks Inc.\n *\n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Affero General Public License as published\n * by the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n * GNU Affero General Public License for more details.\n *\n * You should have received a copy of the GNU Affero General Public License\n * along with this program. If not, see <https://www.gnu.org/licenses/>.\n */\n\n/**\n * Wraps single-quotes around any string value. Useful for displaying field\n * names or other identifiers in error messages or logs.\n *\n * Examples:\n * quote(\"hi\") // \"'hi'\"\n * quote(\"i'm\") // \"'i'm'\"\n *\n * Note: no \"escaping\" happens here to the string value. This is because this\n * is intended to be used for human consumption, not machine consumption.\n */\nexport function quote(value: string | undefined): string {\n return value !== undefined ? `'${value}'` : \"???\";\n}\n","/**\n * Copyright (c) Liveblocks Inc.\n *\n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Affero General Public License as published\n * by the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n * GNU Affero General Public License for more details.\n *\n * You should have received a copy of the GNU Affero General Public License\n * along with this program. If not, see <https://www.gnu.org/licenses/>.\n */\n\nimport type {\n Awaitable,\n SerializedChild,\n SerializedCrdt,\n} from \"@liveblocks/core\";\nimport {\n asPos,\n assertNever,\n CrdtType,\n makePosition,\n OpCode,\n} from \"@liveblocks/core\";\n\nimport type { IStorageDriver, IStorageDriverNodeAPI } from \"~/interfaces\";\nimport type { Logger } from \"~/lib/Logger\";\nimport type {\n ClientWireOp,\n CreateOp,\n DeleteCrdtOp,\n DeleteObjectKeyOp,\n FixOp,\n HasOpId,\n SetParentKeyOp,\n UpdateObjectOp,\n} from \"~/protocol\";\nimport type { Pos } from \"~/types\";\n\ntype ApplyOpResult = OpAccepted | OpIgnored;\n\nexport type OpAccepted = {\n action: \"accepted\";\n op: ClientWireOp;\n fix?: FixOp;\n};\n\nexport type OpIgnored = {\n action: \"ignored\";\n ignoredOpId?: string;\n};\n\nfunction accept(op: ClientWireOp, fix?: FixOp): OpAccepted {\n return { action: \"accepted\", op, fix };\n}\n\nfunction ignore(ignoredOp: ClientWireOp): OpIgnored {\n return { action: \"ignored\", ignoredOpId: ignoredOp.opId };\n}\n\nfunction nodeFromCreateChildOp(op: CreateOp): SerializedChild {\n switch (op.type) {\n case OpCode.CREATE_LIST:\n return {\n type: CrdtType.LIST,\n parentId: op.parentId,\n parentKey: op.parentKey,\n };\n\n case OpCode.CREATE_MAP:\n return {\n type: CrdtType.MAP,\n parentId: op.parentId,\n parentKey: op.parentKey,\n };\n\n case OpCode.CREATE_OBJECT:\n return {\n type: CrdtType.OBJECT,\n parentId: op.parentId,\n parentKey: op.parentKey,\n data: op.data,\n };\n\n case OpCode.CREATE_REGISTER:\n return {\n type: CrdtType.REGISTER,\n parentId: op.parentId,\n parentKey: op.parentKey,\n data: op.data,\n };\n\n // istanbul ignore next\n default:\n return assertNever(op, \"Unknown op code\");\n }\n}\n\nexport class Storage {\n // The actual underlying storage API (could be backed by in-memory store,\n // SQLite, Redis, Postgres, Cloudflare Durable Object Storage, etc.)\n private readonly coreDriver: IStorageDriver;\n private _loadedDriver: IStorageDriverNodeAPI | undefined;\n\n constructor(coreDriver: IStorageDriver) {\n this.coreDriver = coreDriver;\n }\n\n // -------------------------------------------------------------------------\n // Public API (for Storage)\n // -------------------------------------------------------------------------\n\n get loadedDriver(): IStorageDriverNodeAPI {\n if (this._loadedDriver === undefined) {\n throw new Error(\"Cannot access tree before it's been loaded\");\n }\n return this._loadedDriver;\n }\n\n // REFACTOR NOTE: Eventually raw_iter_nodes has to be removed here\n raw_iter_nodes(): Awaitable<Iterable<[string, SerializedCrdt]>> {\n return this.coreDriver.raw_iter_nodes();\n }\n\n /**\n * Load the room data from object storage into memory. Persisted room\n * data consists of the main node map, which represents the Liveblocks\n * Storage tree, and special keys where we store usage metrics, or room\n * metadata.\n */\n async load(logger: Logger): Promise<void> {\n this._loadedDriver = await this.coreDriver.load_nodes_api(logger);\n }\n\n unload(): void {\n this._loadedDriver = undefined;\n }\n\n /**\n * Applies a batch of Ops.\n */\n async applyOps(ops: ClientWireOp[]): Promise<ApplyOpResult[]> {\n const results: ApplyOpResult[] = [];\n for (const op of ops) {\n results.push(await this.applyOp(op));\n }\n return results;\n }\n\n // -------------------------------------------------------------------------\n // Private APIs (for Storage)\n // -------------------------------------------------------------------------\n\n /**\n * Applies a single Op.\n */\n private async applyOp(op: ClientWireOp): Promise<ApplyOpResult> {\n switch (op.type) {\n case OpCode.CREATE_LIST:\n case OpCode.CREATE_MAP:\n case OpCode.CREATE_REGISTER:\n case OpCode.CREATE_OBJECT:\n return await this.applyCreateOp(op);\n\n case OpCode.UPDATE_OBJECT:\n return await this.applyUpdateObjectOp(op);\n\n case OpCode.SET_PARENT_KEY:\n return await this.applySetParentKeyOp(op);\n\n case OpCode.DELETE_OBJECT_KEY:\n return await this.applyDeleteObjectKeyOp(op);\n\n case OpCode.DELETE_CRDT:\n return await this.applyDeleteCrdtOp(op);\n\n // istanbul ignore next\n default:\n if (process.env.NODE_ENV === \"production\") {\n return ignore(op);\n } else {\n return assertNever(op, \"Invalid op\");\n }\n }\n }\n\n private async applyCreateOp(op: CreateOp & HasOpId): Promise<ApplyOpResult> {\n if (this.loadedDriver.has_node(op.id)) {\n // Node already exists, the operation is ignored\n return ignore(op);\n }\n\n const node = nodeFromCreateChildOp(op);\n\n const parent = this.loadedDriver.get_node(node.parentId);\n if (parent === undefined) {\n // Parent does not exist because the op is invalid or because it was deleted in race condition.\n return ignore(op);\n }\n\n // How to create this node in the node map depends on the parent node's type\n switch (parent.type) {\n case CrdtType.OBJECT:\n // Register children under object nodes are forbidden. We'll simply\n // ignore these Ops. This matches the eventual storage behavior: if\n // we'd persist them, they would get ignored when re-loading the\n // persisted room data into memory the next time the room loads.\n if (op.type === OpCode.CREATE_REGISTER) {\n return ignore(op);\n }\n // fall through\n\n case CrdtType.MAP:\n // Children of maps and objects require no special needs\n await this.loadedDriver.set_child(op.id, node, true);\n return accept(op);\n\n case CrdtType.LIST:\n // List items need special handling around conflicting resolution,\n // which depends on the users intention\n return this.createChildAsListItem(op, node);\n\n case CrdtType.REGISTER:\n // It's illegal for registers to have children\n return ignore(op);\n\n // istanbul ignore next\n default:\n return assertNever(parent, \"Unhandled CRDT type\");\n }\n }\n\n private async createChildAsListItem(\n op: CreateOp & HasOpId,\n node: SerializedChild\n ): Promise<ApplyOpResult> {\n let fix: FixOp | undefined;\n\n // The default intent, when not explicitly provided, is to insert, not set,\n // into the list.\n const intent: \"insert\" | \"set\" = op.intent ?? \"insert\";\n\n // istanbul ignore else\n if (intent === \"insert\") {\n const insertedParentKey = await this.insertIntoList(op.id, node);\n\n // If the inserted parent key is different from the input, it means there\n // was a conflict and the node has been inserted in an alternative free\n // list position. We should broadcast a modified Op to all clients that\n // has the modified position, and send a \"fix\" op back to the originating\n // client.\n if (insertedParentKey !== node.parentKey) {\n op = { ...op, parentKey: insertedParentKey };\n fix = {\n type: OpCode.SET_PARENT_KEY,\n id: op.id,\n parentKey: insertedParentKey,\n };\n return accept(op, fix);\n }\n\n // No conflict, node got inserted as intended\n return accept(op);\n }\n // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition\n else if (intent === \"set\") {\n // The intent here is to \"set\", not insert, into the list, replacing the\n // existing item that\n\n // Special handling required here. They will include a \"deletedId\" that\n // points to the object they expect to be replacing. If in the mean time,\n // that object disappeared there (because it was moved, for example), be\n // sure to delete it anyway.\n // We should not just trust the given value, because we're about to\n // delete a node. It's only safe to delete the node if it indeed is\n // a sibling of the current node.\n const deletedId =\n op.deletedId !== undefined &&\n op.deletedId !== op.id &&\n this.loadedDriver.get_node(op.deletedId)?.parentId === node.parentId\n ? op.deletedId\n : undefined;\n\n if (deletedId !== undefined) {\n await this.loadedDriver.delete_node(deletedId);\n }\n\n const prevItemId = this.loadedDriver.get_child_at(\n node.parentId,\n node.parentKey\n );\n if (prevItemId !== undefined && prevItemId !== deletedId) {\n // If this \"set\" operation indeed removed an item, but it wasn't the\n // expected `deletedId`, let the invoking client know that they'll\n // have to delete this object, too.\n fix = {\n type: OpCode.DELETE_CRDT,\n id: prevItemId,\n };\n }\n\n await this.loadedDriver.set_child(op.id, node, true);\n\n return accept(op, fix);\n } else {\n return assertNever(intent, \"Invalid intent\");\n }\n }\n\n private async applyDeleteObjectKeyOp(\n op: DeleteObjectKeyOp & HasOpId\n ): Promise<ApplyOpResult> {\n await this.loadedDriver.delete_child_key(op.id, op.key);\n return accept(op);\n }\n\n private async applyUpdateObjectOp(\n op: UpdateObjectOp & HasOpId\n ): Promise<ApplyOpResult> {\n await this.loadedDriver.set_object_data(op.id, op.data, true);\n return accept(op);\n }\n\n private async applyDeleteCrdtOp(\n op: DeleteCrdtOp & HasOpId\n ): Promise<ApplyOpResult> {\n await this.loadedDriver.delete_node(op.id);\n return accept(op);\n }\n\n private async applySetParentKeyOp(\n op: SetParentKeyOp & HasOpId\n ): Promise<ApplyOpResult> {\n const newPosition = await this.moveToPosInList(op.id, op.parentKey);\n if (newPosition === undefined) {\n // The operation got rejected because it didn't make sense, ignore it\n return ignore(op);\n }\n\n // If the inserted node is different from the input, it means there was\n // a conflict and the node has been inserted in a new, free, list position.\n // We should broadcast a modified Op to all clients that has the modified\n // position, and send a \"fix\" op back to the originating client.\n if (newPosition !== op.parentKey) {\n const modifiedOp = { ...op, parentKey: newPosition };\n const fix: FixOp = {\n type: OpCode.SET_PARENT_KEY,\n id: op.id,\n parentKey: newPosition,\n };\n return accept(modifiedOp, fix);\n } else {\n return accept(op);\n }\n }\n\n /**\n * Inserts a new node in the storage tree, under a list parent. If an\n * existing sibling node already exist under this key, however, it will look\n * for another free position under that parent and insert it under\n * a different parent key that is guaranteed to be available.\n *\n * Returns the key that was used for the insertion.\n */\n private async insertIntoList(\n id: string,\n node: SerializedChild\n ): Promise<string> {\n // First, compute the key to use to insert this node\n const key = this.findFreeListPosition(node.parentId, asPos(node.parentKey));\n if (key !== node.parentKey) {\n node = { ...node, parentKey: key };\n }\n await this.loadedDriver.set_child(id, node);\n return node.parentKey;\n }\n\n /**\n * Tries to move a node to the given position under the same parent. If\n * a conflicting sibling node already exist at this position, it will use\n * another free position instead, to avoid the conflict.\n *\n * Returns the position (parentKey) that the node was eventually placed at.\n * If the node could be inserted without conflict, it will return the same\n * parentKey position.\n *\n * Will return `undefined` if this action could not be interpreted. Will be\n * a no-op for non-list items.\n */\n private async moveToPosInList(\n id: string,\n targetKey: string\n ): Promise<string | undefined> {\n const node = this.loadedDriver.get_node(id);\n if (node?.parentId === undefined) {\n return; /* reject */\n }\n\n if (this.loadedDriver.get_node(node.parentId)?.type !== CrdtType.LIST) {\n // SetParentKeyOp is a no-op for all nodes, except list items\n return; /* reject */\n }\n\n if (node.parentKey === targetKey) {\n // Already there\n return targetKey; /* no-op */\n }\n\n // First, compute the key to use to insert this node\n const key = this.findFreeListPosition(node.parentId, asPos(targetKey));\n if (key !== node.parentKey) {\n await this.loadedDriver.move_sibling(id, key);\n }\n return key;\n }\n\n /**\n * Checks whether the given parentKey is a \"free position\" under the\n * parentId, i.e. there are no siblings that have the same key. If a sibling\n * exists under that key, it tries to generate new positions until it finds\n * a free slot, and returns that. The returned value is therefore always safe\n * to use as parentKey.\n */\n private findFreeListPosition(parentId: string, parentPos: Pos): Pos {\n if (!this.loadedDriver.has_child_at(parentId, parentPos)) {\n return parentPos;\n }\n\n const currPos = parentPos;\n const nextPos = this.loadedDriver.get_next_sibling(parentId, currPos);\n if (nextPos !== undefined) {\n return makePosition(currPos, nextPos); // Between current and next\n } else {\n return makePosition(currPos); // After current (fallback)\n }\n }\n}\n","/**\n * Copyright (c) Liveblocks Inc.\n *\n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Affero General Public License as published\n * by the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n * GNU Affero General Public License for more details.\n *\n * You should have received a copy of the GNU Affero General Public License\n * along with this program. If not, see <https://www.gnu.org/licenses/>.\n */\n\nimport { DefaultMap } from \"@liveblocks/core\";\nimport { Base64 } from \"js-base64\";\nimport { nanoid } from \"nanoid\";\nimport * as Y from \"yjs\";\n\nimport type { Guid, YDocId } from \"~/decoders\";\nimport { ROOT_YDOC_ID } from \"~/decoders\";\nimport type { IStorageDriver } from \"~/interfaces\";\nimport type { Logger } from \"~/lib/Logger\";\n\n// How big an update can be until we compress all individual updates into\n// a single vector and persist that instead (i.e. when we trigger \"garbage\n// collection\")\nconst MAX_Y_UPDATE_SIZE = 100_000;\n\ntype YUpdateInfo = {\n currentKey: string;\n lastVector: Uint8Array | undefined;\n};\n\nexport class YjsStorage {\n private readonly driver: IStorageDriver;\n\n private readonly doc: Y.Doc = new Y.Doc(); // the root document\n private readonly lastUpdatesById = new Map<YDocId, YUpdateInfo>();\n private readonly lastSnapshotById = new Map<YDocId, Y.Snapshot>();\n // Keeps track of which keys are loaded, so we can clean them up without calling `.list()`\n private readonly keysById = new DefaultMap<YDocId, Set<string>>(\n () => new Set()\n );\n private readonly initPromisesById: Map<YDocId, Promise<Y.Doc>> = new Map();\n\n constructor(driver: IStorageDriver) {\n this.driver = driver;\n this.doc.on(\"subdocs\", ({ removed }) => {\n removed.forEach((subdoc: Y.Doc) => {\n subdoc.destroy(); // will remove listeners\n });\n });\n }\n\n // ------------------------------------------------------------------------------------\n // Public API\n // ------------------------------------------------------------------------------------\n\n public async getYDoc(docId: YDocId): Promise<Y.Doc> {\n const doc = await this.loadDocByIdIfNotAlreadyLoaded(docId);\n return doc;\n }\n\n /**\n * If passed a state vector, an update with diff will be returned, if not the entire doc is returned.\n *\n * @param stateVector a base64 encoded target state vector created by running Y.encodeStateVector(Doc) on the client\n * @returns a base64 encoded array of YJS updates\n */\n public async getYDocUpdate(\n logger: Logger,\n stateVector: string = \"\",\n guid?: Guid,\n isV2: boolean = false\n ): Promise<string | null> {\n const update = await this.getYDocUpdateBinary(\n logger,\n stateVector,\n guid,\n isV2\n );\n if (!update) return null;\n return Base64.fromUint8Array(update);\n }\n\n public async getYDocUpdateBinary(\n logger: Logger,\n stateVector: string = \"\",\n guid?: Guid,\n isV2: boolean = false\n ): Promise<Uint8Array | null> {\n const doc = guid !== undefined ? await this.getYSubdoc(guid) : this.doc;\n if (!doc) {\n return null;\n }\n let encodedTargetVector;\n try {\n // if given a state vector, attempt to decode it a single diffed update\n encodedTargetVector =\n stateVector.length > 0 ? Base64.toUint8Array(stateVector) : undefined;\n } catch (e) {\n logger.warn(\n \"Could not get update from passed vector, returning all updates\"\n );\n }\n if (isV2) {\n return Y.encodeStateAsUpdateV2(doc, encodedTargetVector);\n }\n return Y.encodeStateAsUpdate(doc, encodedTargetVector);\n }\n\n public async getYStateVector(guid?: Guid): Promise<string | null> {\n const doc = guid !== undefined ? await this.getYSubdoc(guid) : this.doc;\n if (!doc) {\n return null;\n }\n return Base64.fromUint8Array(Y.encodeStateVector(doc));\n }\n\n public async getSnapshotHash(options: {\n guid?: Guid;\n isV2?: boolean;\n }): Promise<string | null> {\n const doc =\n options.guid !== undefined\n ? await this.getYSubdoc(options.guid)\n : this.doc;\n if (!doc) {\n return null;\n }\n const snapshot = this._getOrPutLastSnapshot(doc);\n return this.calculateSnapshotHash(snapshot, { isV2: options.isV2 });\n }\n\n /**\n * @param update base64 encoded uint8array\n * @returns\n */\n public async addYDocUpdate(\n logger: Logger,\n update: string | Uint8Array,\n guid?: Guid,\n isV2?: boolean\n ): Promise<{ isUpdated: boolean; snapshotHash: string }> {\n const doc = guid !== undefined ? await this.getYSubdoc(guid) : this.doc;\n if (!doc) {\n throw new Error(`YDoc with guid ${guid} not found`);\n }\n\n try {\n // takes a snapshot if none is stored in memory - NOTE: snapshots are a combination of statevector + deleteset, not a full doc\n const beforeSnapshot = this._getOrPutLastSnapshot(doc);\n const updateAsU8 =\n typeof update === \"string\" ? Base64.toUint8Array(update) : update;\n const applyUpdate = isV2 ? Y.applyUpdateV2 : Y.applyUpdate;\n applyUpdate(doc, updateAsU8, \"client\");\n // put the new \"after update\" snapshot\n const afterSnapshot = this._putLastSnapshot(doc);\n // Check the snapshot before/after to see if the update had an effect\n const updated = !Y.equalSnapshots(beforeSnapshot, afterSnapshot);\n if (updated) {\n await this.handleYDocUpdate(doc);\n }\n\n return {\n isUpdated: updated,\n snapshotHash: await this.calculateSnapshotHash(afterSnapshot, { isV2 }),\n };\n } catch (e) {\n // The only reason this would happen is if a user would send bad data\n logger.warn(`Ignored bad YDoc update: ${String(e)}`);\n throw new Error(\n \"Bad YDoc update. Data is corrupted, or data does not match the encoding.\"\n );\n }\n }\n\n public loadDocByIdIfNotAlreadyLoaded(docId: YDocId): Promise<Y.Doc> {\n let loaded$ = this.initPromisesById.get(docId);\n let doc = docId === ROOT_YDOC_ID ? this.doc : this.findYSubdocByGuid(docId);\n if (!doc) {\n // An API call can load a subdoc without the root doc (this._doc) being loaded, we account for that by just instantiating a doc here.\n doc = new Y.Doc();\n }\n if (loaded$ === undefined) {\n loaded$ = this._loadYDocFromDurableStorage(doc, docId);\n this.initPromisesById.set(docId, loaded$);\n }\n return loaded$;\n }\n\n public async load(_logger: Logger): Promise<void> {\n await this.loadDocByIdIfNotAlreadyLoaded(ROOT_YDOC_ID);\n }\n\n /**\n * Unloads the Yjs documents from memory.\n */\n public unload(): void {\n // YYY Implement this later!\n // YYY We're currently never unloading data read into memory, but let's\n // sync this with the .unload() method from Storage, so there will not be\n // any surprises here later!\n //\n // this.doc = new Y.Doc();\n // this.initPromisesById.clear();\n // this.lastUpdatesById.clear();\n // this.keysById.clear();\n // this.initPromisesById.clear();\n }\n\n // ------------------------------------------------------------------------------------\n // Private APIs\n // ------------------------------------------------------------------------------------\n\n // NOTE: We could instead store the hash of snapshot instead of the whole snapshot to optimize memory usage.\n private _getOrPutLastSnapshot(doc: Y.Doc): Y.Snapshot {\n const docId: YDocId =\n doc.guid === this.doc.guid ? ROOT_YDOC_ID : (doc.guid as Guid);\n const snapshot = this.lastSnapshotById.get(docId);\n if (snapshot) {\n return snapshot;\n }\n return this._putLastSnapshot(doc);\n }\n\n // NOTE: We could instead store the hash of snapshot instead of the whole snapshot to optimize memory usage.\n private _putLastSnapshot(doc: Y.Doc): Y.Snapshot {\n const docId: YDocId =\n doc.guid === this.doc.guid ? ROOT_YDOC_ID : (doc.guid as Guid);\n const snapshot = Y.snapshot(doc);\n this.lastSnapshotById.set(docId, snapshot);\n return snapshot;\n }\n /**\n * Given a record of updates, merge them and compress if savings are significant\n */\n private _loadAndCompressYJSUpdates = async (\n docUpdates: Record<string, Uint8Array>,\n doc: Y.Doc,\n docId: YDocId\n ): Promise<void> => {\n // the percent we need to save to trigger re-writing storage, ie. only rewrite storage if we save more than 20%\n const SAVINGS_THRESHOLD = 0.2;\n // get all updates from disk\n const updates = Object.values(docUpdates);\n // uint8arrays size on disk is equal to their length, combine them to see how much we're using\n const sizeOnDisk = updates.reduce((acc, update) => {\n return acc + update.length;\n }, 0);\n if (updates.length > 0) {\n const docKeys = Object.keys(docUpdates);\n // keep track of keys in use\n this.keysById.set(docId, new Set(docKeys));\n\n const mergedUpdate = Y.mergeUpdates(updates);\n // Garbage collection won't happen unless we actually apply the update\n Y.applyUpdate(doc, mergedUpdate);\n\n // get the update so we can check out how big it is\n const garbageCollectedUpdate = Y.encodeStateAsUpdate(doc);\n\n if (\n garbageCollectedUpdate.length <\n sizeOnDisk * (1 - SAVINGS_THRESHOLD)\n ) {\n const newKey = nanoid();\n await this.driver.write_y_updates(\n docId,\n newKey,\n garbageCollectedUpdate\n );\n // delete all old keys, we're going to write new merged updates\n await this.driver.delete_y_updates(docId, docKeys);\n this.keysById.set(docId, new Set([newKey]));\n }\n }\n };\n\n private _loadYDocFromDurableStorage = async (\n doc: Y.Doc,\n docId: YDocId\n ): Promise<Y.Doc> => {\n const docUpdates = Object.fromEntries(\n await this.driver.iter_y_updates(docId)\n );\n await this._loadAndCompressYJSUpdates(docUpdates, doc, docId);\n // store the vector of the last update\n this.lastUpdatesById.set(docId, {\n currentKey: nanoid(),\n lastVector: Y.encodeStateVector(doc),\n });\n doc.emit(\"load\", [doc]); // sets the \"isLoaded\" to true on the doc\n\n return doc;\n };\n\n private findYSubdocByGuid(guid: Guid): Y.Doc | null {\n for (const subdoc of this.doc.getSubdocs()) {\n if (subdoc.guid === guid) {\n return subdoc;\n }\n }\n return null;\n }\n\n private async calculateSnapshotHash(\n snapshot: Y.Snapshot,\n { isV2 }: { isV2?: boolean }\n ): Promise<string> {\n const encodedSnapshot = isV2\n ? Y.encodeSnapshotV2(snapshot)\n : Y.encodeSnapshot(snapshot);\n return Base64.fromUint8Array(\n new Uint8Array(\n await crypto.subtle.digest(\"SHA-256\", new Uint8Array(encodedSnapshot))\n )\n );\n }\n\n // gets a subdoc, it will be loaded if not already loaded\n private async getYSubdoc(guid: Guid): Promise<Y.Doc | null> {\n const subdoc = this.findYSubdocByGuid(guid);\n if (!subdoc) {\n return null;\n }\n await this.loadDocByIdIfNotAlreadyLoaded(guid);\n return subdoc;\n }\n\n // When the YJS doc changes, update it in durable storage\n private async handleYDocUpdate(doc: Y.Doc): Promise<void> {\n const docId: YDocId =\n doc.guid === this.doc.guid ? ROOT_YDOC_ID : (doc.guid as Guid);\n const docUpdateInfo = this.lastUpdatesById.get(docId);\n // get the update since last vector\n const updateSinceLastVector = Y.encodeStateAsUpdate(\n doc,\n docUpdateInfo?.lastVector\n );\n // this should happen before the await on putYDoc to avoid race conditions\n // but we need the current key before, so store it here\n const storageKey = docUpdateInfo?.currentKey ?? nanoid();\n if (updateSinceLastVector.length > MAX_Y_UPDATE_SIZE) {\n // compress update, not using the vector, we want to write the whole doc\n const newKey = nanoid();\n await this.driver.write_y_updates(\n docId,\n newKey,\n Y.encodeStateAsUpdate(doc)\n );\n // delete all old keys on disk\n await this.driver.delete_y_updates(\n docId,\n Array.from(this.keysById.getOrCreate(docId))\n );\n // update the keys we have stored\n this.keysById.set(docId, new Set([newKey]));\n // future updates will write from this vector and to this key\n this.lastUpdatesById.set(docId, {\n currentKey: nanoid(), // start writing to a new key\n lastVector: Y.encodeStateVector(doc),\n });\n } else {\n // in this case, the update is small enough, just overwrite it\n await this.driver.write_y_updates(\n docId,\n storageKey,\n updateSinceLastVector\n );\n const keys = [storageKey];\n // keep track of keys used\n const currentKeys = this.keysById.getOrCreate(docId);\n for (const key of keys) {\n currentKeys.add(key);\n }\n }\n }\n}\n","/**\n * Copyright (c) Liveblocks Inc.\n *\n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Affero General Public License as published\n * by the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n * GNU Affero General Public License for more details.\n *\n * You should have received a copy of the GNU Affero General Public License\n * along with this program. If not, see <https://www.gnu.org/licenses/>.\n */\n\n/**\n * Given a promise or promise factory, returns a 2-tuple of success or failure.\n * This pattern avoids having to build deeply nested try / catch clauses, where\n * success variables need to be defined as a `let` outside of the `try` block.\n *\n * Turns:\n *\n * let result;\n * try {\n * result = await doSomething();\n * } catch (error) {\n * // do something with error\n * }\n *\n * doAnotherThing(result);\n *\n * Into:\n *\n * const [result, error] = await tryCatch(doSomething());\n * if (error) {\n * // do something with error\n * }\n * doAnotherThing(result);\n *\n */\nexport async function tryCatch<T, E = Error>(\n promise: Promise<T> | (() => Promise<T>) | (() => T)\n): Promise<[T, undefined] | [undefined, E]> {\n try {\n const data = await (typeof promise === \"function\" ? promise() : promise);\n return [data, undefined];\n } catch (error) {\n return [undefined, error as E];\n }\n}\n","/**\n * Copyright (c) Liveblocks Inc.\n *\n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Affero General Public License as published\n * by the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n * GNU Affero General Public License for more details.\n *\n * You should have received a copy of the GNU Affero General Public License\n * along with this program. If not, see <https://www.gnu.org/licenses/>.\n */\n\n/**\n * Like ES6 map, but also provides a unique reverse lookup index for values\n * stored in the map.\n *\n * Useful for code like:\n *\n * // Store a list of persons by their IDs, but each person's email must also\n * // be unique\n * const map = new UniqueMap((person) => person.email);\n * map.set(1, { name: 'John Doe', email: 'john@example.org' });\n * map.set(2, { name: 'John Foo', email: 'john@example.org' }); // Will error!\n * map.delete(1);\n * map.set(3, { name: 'Johnny', email: 'john@example.org' }); // Now it's allowed\n *\n * map.getReverseKey('john@example.org') // 3\n * map.getReverse('john@example.org') // { name: 'Johnny', email: 'john@example.org' }\n *\n */\nexport class UniqueMap<K, V, UK> extends Map<K, V> {\n // / \\\n // Primary key Unique key\n #_revMap: Map<UK, K>;\n #_keyFn: (value: V) => UK;\n\n constructor(\n keyFn: (value: V) => UK\n // entries?: readonly (readonly [K, V])[] | null\n ) {\n super(); // super(entries)\n this.#_keyFn = keyFn;\n this.#_revMap = new Map();\n }\n\n lookupPrimaryKey(uniqKey: UK): K | undefined {\n return this.#_revMap.get(uniqKey);\n }\n\n lookup(uniqKey: UK): V | undefined {\n const key = this.#_revMap.get(uniqKey);\n return key !== undefined ? this.get(key) : undefined;\n }\n\n set(key: K, value: V): this {\n const uniqKey = this.#_keyFn(value);\n const primaryKey = this.#_revMap.get(uniqKey);\n if (primaryKey !== undefined && primaryKey !== key) {\n throw new Error(`Unique key ${String(uniqKey)} already exists`);\n }\n this.#_revMap.set(uniqKey, key);\n return super.set(key, value);\n }\n\n delete(primaryKey: K): boolean {\n const value = this.get(primaryKey);\n if (value !== undefined) {\n const indexedKey = this.#_keyFn(value);\n this.#_revMap.delete(indexedKey);\n }\n return super.delete(primaryKey);\n }\n}\n","/**\n * Copyright (c) Liveblocks Inc.\n *\n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Affero General Public License as published\n * by the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n * GNU Affero General Public License for more details.\n *\n * You should have received a copy of the GNU Affero General Public License\n * along with this program. If not, see <https://www.gnu.org/licenses/>.\n */\n\nimport type { BaseUserMeta, JsonObject } from \"@liveblocks/core\";\nimport { ServerMsgCode } from \"@liveblocks/core\";\n\nimport type { RoomStateServerMsg } from \"~/protocol\";\n\n/**\n * Concatenates multiple Uint8Arrays into a single Uint8Array.\n */\nexport function concatUint8Arrays(arrays: Uint8Array[]): Uint8Array {\n const totalLength = arrays.reduce((sum, arr) => sum + arr.length, 0);\n const result = new Uint8Array(totalLength);\n let offset = 0;\n for (const arr of arrays) {\n result.set(arr, offset);\n offset += arr.length;\n }\n return result;\n}\n\nexport function makeRoomStateMsg(\n actor: number,\n nonce: string,\n scopes: string[],\n users: Record<number, BaseUserMeta & { scopes: string[] }>,\n publicMeta?: JsonObject\n): RoomStateServerMsg<BaseUserMeta> {\n return {\n type: ServerMsgCode.ROOM_STATE,\n actor,\n nonce,\n scopes,\n users,\n meta: publicMeta ?? {},\n };\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;AAGO,IAAM,WAAW;AACjB,IAAM,cAAiD;AACvD,IAAM,aAAgD;ACH7D,IAAM,IACJ,OAAO,eAAe,cAClB,aACA,OAAO,WAAW,cAChB,SACA,OAAO,WAAW,cAChB,SACA,CAAC;AAGX,IAAM,kBAAkB;AACxB,IAAM,YAAY;AAClB,IAAM,QAAQ;AAEd,SAAS,MAAM,KAAmB;AAChC,MAAI,QAAQ,IAAI,aAAa,cAAc;AAEzC,YAAQ,MAAM,GAAG;EACnB,OAAO;AACL,UAAM,IAAI,MAAM,GAAG;EACrB;AACF;AAMO,SAAS,YACd,SACA,YACA,WACM;AACN,QAAM,QAAQ,OAAO,IAAI,OAAO;AAChC,QAAM,eAAe,YACjB,GAAG,cAAc,KAAK,KAAK,SAAS,MACpC,cAAc;AAElB,MAAI,CAAC,EAAE,KAAK,GAAG;AACb,MAAE,KAAK,IAAI;EACb,WAAW,EAAE,KAAK,MAAM,cAAc;EAEtC,OAAO;AACL,UAAM,MAAM;MACV,+FACE,YAAY,KACd;MACA;MACA;MACA,KAAK,OAAO,IAAI,EAAE,KAAK,CAAC;MACxB,KAAK,OAAO,IAAI,YAAY;IAC9B,EAAE,KAAK,IAAI;AACX,UAAM,GAAG;EACX;AAEA,MAAI,cAAc,eAAe,eAAe,aAAa;AAC3D;MACE;QACE,2EACE,kBAAkB,KACpB;QACA;QACA;QACA,KAAK,QAAQ,UAAU,WAAW;QAClC,KAAK,OAAO,UAAU,UAAU;QAChC;QACA;MACF,EAAE,KAAK,IAAI;IACb;EACF;AACF;ACmBO,SAAS,kBAAqC;AACnD,QAAM,aAAa,oBAAI,IAAiB;AAExC,WAAS,UAAU,UAA4C;AAC7D,eAAW,IAAI,QAAQ;AACvB,WAAO,MAAM,WAAW,OAAO,QAAQ;EACzC;AAEA,WAAS,cAAc,UAA4C;AACjE,UAAM,QAAQ,UAAU,CAAC,UAAa;AACpC,YAAM;AACN,aAAO,SAAS,KAAK;IACvB,CAAC;AACD,WAAO;EACT;AAEA,iBAAe,UAAU,WAA+C;AACtE,QAAI;AACJ,WAAO,IAAI,QAAW,CAAC,QAAQ;AAC7B,cAAQ,UAAU,CAAC,UAAU;AAC3B,YAAI,cAAc,UAAa,UAAU,KAAK,GAAG;AAC/C,cAAI,KAAK;QACX;MACF,CAAC;IACH,CAAC,EAAE,QAAQ,MAAM,QAAQ,CAAC;EAC5B;AAEA,WAAS,OAAO,OAAU;AACxB,QAAI,SAAS;AACb,eAAW,YAAY,YAAY;AACjC,eAAS,KAAK;AACd,eAAS;IACX;AACA,WAAO;EACT;AAEA,WAAS,QAAQ;AACf,WAAO,WAAW;EACpB;AAEA,SAAO;;IAEL;IACA;IACA;IACA;IAEA;IAEA,UAAgB;AACd,iBAAW,MAAM;IACnB;;IAGA,YAAY;MACV;MACA;MACA;IACF;EACF;AACF;AClJO,IAAM,SACX,QAAQ,IAAI,aAAa;;EACQ,CAAC,MAAe;IAC7C,OAAO;ACiBN,SAAS,MAAM,KAAoB;AACxC,QAAM,IAAI,MAAM,GAAG;AACrB;AAqEO,SAAS,aAAa,YAAsC;AACjE,MAAI;AAEF,WAAO,KAAK,MAAM,UAAU;EAC9B,SAAS,GAAG;AACV,WAAO;EACT;AACF;AC3FA,IAAM,SAAS,OAAO,QAAQ;AAC9B,IAAM,WAAW,OAAO,UAAU;AAyClC,IAAI,mBAAoD;AAOxD,IAAI,eAAgD;AA+BpD,SAAS,eAAe,QAA6B;AACnD,MAAI,CAAC,iBAAkB,OAAM,mCAAmC;AAChE,mBAAiB,IAAI,MAAM;AAC7B;;AA6CA,IAAe,kBAAf,WAAyE;EAOvE,YAAY,QAAkC;AAL3B;;AACV;AAET;wBAAiB;AAGf,SAAK,SAAS,UAAU,OAAO;AAC/B,uBAAK,cAAe,gBAAsB;AAC1C,SAAK,MAAM,IAAI,oBAAI,IAAI;AAGvB,SAAK,MAAM,KAAK,IAAI,KAAK,IAAI;AAC7B,SAAK,YAAY,KAAK,UAAU,KAAK,IAAI;AACzC,SAAK,gBAAgB,KAAK,cAAc,KAAK,IAAI;EACnD;EAEA,UAAgB;AACd,uBAAK,cAAa,QAAQ;AAG1B,uBAAK,cAAe;AAEpB,SAAK,SAAS;EAChB;EAKA,IAAI,cAAuB;AACzB,QAAI,mBAAK,cAAa,MAAM,IAAI,EAAG,QAAO;AAE1C,eAAW,QAAQ,KAAK,MAAM,GAAG;AAC/B,UAAI,KAAK,aAAa;AACpB,eAAO;MACT;IACF;AAEA,WAAO;EACT;EAEA,EArCiB,aAqCT,SAAQ,IAAU;AACxB,uBAAK,cAAa,OAAO;AAIzB,eAAW,QAAQ,KAAK,MAAM,GAAG;AAC/B,qBAAe,IAAI;IACrB;EACF;EAEA,UAAU,UAA+C;AAMvD,QAAI,mBAAK,cAAa,MAAM,MAAM,GAAG;AACnC,WAAK,IAAI;IACX;AACA,WAAO,mBAAK,cAAa,UAAU,QAAQ;EAC7C;EAEA,cAAc,UAA+C;AAC3D,UAAM,QAAQ,KAAK,UAAU,MAAM;AACjC,YAAM;AACN,aAAO,SAAS;IAClB,CAAC;AACD,WAAO;EACT;EAEA,YAAmB;AACjB,UAAM,IAAI,MAAM,oCAAoC;EACtD;EAEA,iBAAuB;AACrB,eAAW,QAAQ,KAAK,MAAM,GAAG;AAC/B,WAAK,UAAU;IACjB;EACF;EAEA,QAAQ,MAAoC;AAC1C,SAAK,MAAM,EAAE,IAAI,IAAI;EACvB;EAEA,WAAW,MAAoC;AAC7C,SAAK,MAAM,EAAE,OAAO,IAAI;EAC1B;EAEA,aAAyB;AACvB,WAAO;EACT;AACF,GA1FW,8BAHX;AAsJA,IAAM,UAAU,OAAO;;AAGhB,IAAM,iBAANA,MAAA,cAA+B,eAAkB;EAoC9C,YACN,MACA,WACA,QACA;AACA,UAAM,MAAM;AAzCT;AACL;AACA;AAEA;;AACS;AACA;AAoCP,uBAAK,QAAS;AACd,uBAAK,YAAa;AAClB,uBAAK,OAAQ;AACb,uBAAK,UAAW,oBAAI,IAAI;AACxB,uBAAK,YAAa;EACpB;;EAnCA,OAAO,QAEF,MAKe;AAClB,UAAM,OAAO,KAAK,IAAI;AACtB,QAAI,OAAO,SAAS;AAClB,YAAM,+DAA+D;AAEvE,QAAI,OAAO,KAAK,KAAK,SAAS,CAAC,MAAM,YAAY;AAE/C,YAAM,SAAS;AACf,YAAM,YAAY,KAAK,IAAI;AAC3B,aAAO,IAAIA,IAAc,MAA4B,WAAW,MAAM;IACxE,OAAO;AAEL,YAAM,YAAY;AAClB,aAAO,IAAIA,IAAc,MAA4B,SAAS;IAChE;EACF;EAeA,UAAgB;AACd,eAAW,OAAO,mBAAK,WAAU;AAC/B,UAAI,WAAW,IAA8B;IAC/C;AAGA,uBAAK,YAAa;AAElB,uBAAK,UAAW;AAEhB,uBAAK,OAAQ;AAEb,uBAAK,YAAa;EACpB;EAEA,IAAI,UAAmB;AACrB,WAAO,mBAAK;EACd;EAsCA,YAAkB;AAChB,QAAI,CAAC,mBAAK,SAAQ;AAChB,yBAAK,QAAS;AACd,WAAK,eAAe;IACtB;EACF;EAEA,MAAS;AACP,QAAI,mBAAK,SAAQ;AACf,4BAAK,yCAAL;IACF;AACA,kBAAc,IAAI,IAAI;AACtB,WAAO,mBAAK;EACd;;;;;;;EAQA,CAAQ,QAAQ,IAAU;AACxB,QAAI,CAAC,KAAK,aAAa;AAGrB;IACF;AAKA,UAAM,UAAU,sBAAK,yCAAL;AAChB,QAAI,SAAS;AACX,YAAM,QAAQ,EAAE;IAClB;EACF;AACF,GA3IE,4BACA,wBAEA,0BACS,uBACA,4BANJ,2CAoEL,eAAA,WAAsB;AACpB,QAAM,kBAAkB;AAExB,MAAI;AACJ,iBAAe,oBAAI,IAAI;AACvB,MAAI;AACF,cAAU,mBAAK,YAAL,WAAgB,GAAG,mBAAK,OAAM,IAAI,CAAC,MAAM,EAAE,IAAI,CAAC;EAC5D,UAAA;AACE,UAAM,aAAa,mBAAK;AACxB,uBAAK,UAAW,oBAAI,IAAI;AAExB,eAAW,OAAO,cAAc;AAC9B,yBAAK,UAAS,IAAI,GAAG;AACrB,iBAAW,OAAO,GAAG;IACvB;AAEA,eAAW,aAAa,YAAY;AAClC,gBAAU,WAAW,IAA8B;IACrD;AACA,eAAW,aAAa,mBAAK,WAAU;AACrC,gBAAU,QAAQ,IAA8B;IAClD;AAEA,mBAAe;EACjB;AAEA,qBAAK,QAAS;AAGd,MAAI,CAAC,KAAK,OAAO,mBAAK,aAAY,OAAO,GAAG;AAC1C,uBAAK,YAAa;AAClB,WAAO;EACT;AACA,SAAO;AACT,GAtGKA;AIhRA,SAAS,YAAY,QAAe,QAAuB;AAChE,QAAM,IAAI,MAAM,MAAM;AACxB;AAQO,SAAS,OAAO,WAAoB,QAAmC;AAC5E,MAAI,QAAQ,IAAI,aAAa,cAAc;AAEzC,QAAI,CAAC,WAAW;AACd,YAAM,MAAM,IAAI,MAAM,MAAM;AAC5B,UAAI,OAAO;AACX,YAAM;IACR;EACF;AACF;AAgBO,SAAS,GACd,OACA,SAAiB,qCACD;AAChB,SAAO,UAAU,QAAQ,UAAU,QAAW,MAAM;AACpD,SAAO;AACT;AC7DA,IAAA,wBAAA,CAAA;AAAA,SAAA,uBAAA;EAAA,OAAA,MAAAC;EAAA,gBAAA,MAAA;EAAA,MAAA,MAAA;EAAA,eAAA,MAAA;AAAA,CAAA;AAEA,IAAM,QACJ;AACF,IAAM,OAAO;AAEb,SAAS,KACP,QACwD;AACxD,SAAO,OAAO,WAAW,eAAe,QAAQ,IAAI,aAAa,SAC7D,QAAQ,MAAM;;IAEd,CAAC,YAAY,SACX,QAAQ,MAAM,EAAE,gBAAgB,OAAO,SAAS,GAAG,IAAI;;AAC/D;AAGO,IAAM,OAAO,KAAK,MAAM;AACxB,IAAMA,SAAQ,KAAK,OAAO;AAEjC,SAAS,cACP,QACuE;AACvE,SAAO,OAAO,WAAW,eAAe,QAAQ,IAAI,aAAa,SAC7D,QAAQ,MAAM;;IAEd,CAAC,OAAO,YAAY,SAClB,QAAQ,MAAM;MACZ,kBAAkB,KAAK;MACvB;MACA;MACA;MACA,GAAG;IACL;;AACR;AAGO,IAAM,gBAAgB,cAAc,MAAM;AAC1C,IAAM,iBAAiB,cAAc,OAAO;;ASvB5C,IAAM,cAANC,MAAA,cAA+B,IAAU;;;;;EAO9C,YACE,WACAC,UACA;AACA,UAAMA,QAAO;AAVf;AAWE,uBAAK,YAAa;EACpB;;;;;;;;EASA,YAAY,KAAQ,WAA8B;AAChD,QAAI,MAAM,IAAI,GAAG,GAAG;AAElB,aAAO,MAAM,IAAI,GAAG;IACtB,OAAO;AACL,YAAM,KACJ,aACA,mBAAK,eACL,MAAM,4CAA4C;AAEpD,YAAM,QAAQ,GAAG,GAAG;AACpB,WAAK,IAAI,KAAK,KAAK;AACnB,aAAO;IACT;EACF;AACF,GApCE,4BADKD;AKTA,IAAM,gBAAgB,OAAO,OAAO;;EAEzC,iBAAiB;EACjB,aAAa;EACb,WAAW;EACX,mBAAmB;EACnB,YAAY;;EAGZ,kBAAkB;;EAClB,eAAe;;EACf,oBAAoB;;EACpB,gBAAgB;;EAGhB,aAAa;;EAGb,gBAAgB;EAChB,gBAAgB;EAChB,yBAAyB;EACzB,gBAAgB;EAChB,iBAAiB;EACjB,gBAAgB;EAChB,iBAAiB;EACjB,wBAAwB;EACxB,0BAA0B;EAC1B,0BAA0B;;EAG1B,mBAAmB;;AACrB,CAAC;ACmBM,IAAK,sBAAL,kBAAKE,yBAAL;AAELA,uBAAAA,qBAAA,cAAA,IAAe,GAAA,IAAf;AAEAA,uBAAAA,qBAAA,gBAAA,IAAiB,IAAA,IAAjB;AAEAA,uBAAAA,qBAAA,sBAAA,IAAuB,IAAA,IAAvB;AAEAA,uBAAAA,qBAAA,iBAAA,IAAkB,IAAA,IAAlB;AAEAA,uBAAAA,qBAAA,wBAAA,IAAyB,GAAA,IAAzB;AAEAA,uBAAAA,qBAAA,aAAA,IAAc,IAAA,IAAd;AAEAA,uBAAAA,qBAAA,oCAAA,IAAqC,IAAA,IAArC;AAEAA,uBAAAA,qBAAA,sCAAA,IAAuC,IAAA,IAAvC;AAEAA,uBAAAA,qBAAA,wCAAA,IAAyC,IAAA,IAAzC;AAEAA,uBAAAA,qBAAA,+CAAA,IAAgD,IAAA,IAAhD;AAEAA,uBAAAA,qBAAA,iBAAA,IAAkB,IAAA,IAAlB;AAEAA,uBAAAA,qBAAA,QAAA,IAAS,IAAA,IAAT;AAEAA,uBAAAA,qBAAA,eAAA,IAAgB,IAAA,IAAhB;AAEAA,uBAAAA,qBAAA,qBAAA,IAAsB,IAAA,IAAtB;AA5BU,SAAAA;AAAA,GAAA,uBAAA,CAAA,CAAA;ACqGZ,IAAM,iBAAiB,CAAC,KAAK,KAAK,KAAO,KAAO,KAAO,KAAO,GAAM;AAMpE,IAAM,cAAc,eAAe,CAAC,IAAI;AAwExC,SAAS,IAAI,OAAiB,SAAiB;AAC7C,QAAM,SACJ,UAAU,IACEC,SACR,UAAU,IACA;;IACS,MAAM;IAAC;;AAChC,SAAO,MAAM;AACX,WAAO,OAAO;EAChB;AACF;AA4BA,IAAM,oBAAoB;EACxB;EACA;AACF;ACnQO,IAAM,YAAY,OAAO;ACdhC,IAAM,eAAe,OAAO,OAAO,CAAC,CAAC;AAGrC,IAAM,qBAAqB,MAAM,KAAK,IAAI,IAAI,MAAM,CAAC;AACrD,IAAM,qBAAqB,MAAM,KAAK,IAAI,IAAI,MAAM,CAAC;AACrD,IAAM,sBAAsB,MAAM,KAAK,IAAI,IAAI,OAAO,CAAC;AACvD,IAAM,oBAAoB,MAAM,KAAK,IAAI,IAAI,eAAe,CAAC;AGyT7D,IAAM,YAAY,OAAO,GAAG;AIzQ5B,IAAM,cAAc,gBAAmD;AAGvE,IAAI,QAAQ,IAAI,aAAa,gBAAgB,OAAO,WAAW,aAAa;AAC1E,SAAO,iBAAiB,WAAW,CAAC,UAAiC;AACnE,QACE,MAAM,WAAW,UAChB,MAAM,MAAkC,WACvC,6BACF;AAQA,kBAAY,OAAO,MAAM,IAAyC;IACpE,OAAO;IAEP;EACF,CAAC;AACH;AAEO,IAAM,qBAAqB,YAAY;ACmD9C,IAAM,WAAW,KAAK,IAAI;AEpF1B,IAAM,SAAS,OAAO,6BAA6B;AEMnD,IAAM,WAAW;AACjB,IAAM,WAAW;AAEjB,IAAM,aAAa,WAAW,WAAW;AAEzC,IAAM,OAAe,SAAS,CAAC;AAM/B,IAAM,MAAW,SAAS,CAAC;AAE3B,IAAM,YAAa,OAAO,SAAS,EAAE;AAUrC,SAAS,SAAS,GAAgB;AAChC,QAAM,OAAO,YAAY,IAAI,IAAI,aAAa,IAAI;AAClD,MAAI,OAAO,YAAY,OAAO,UAAU;AACtC,UAAM,IAAI,MAAM,oBAAoB,CAAC,EAAE;EACzC;AACA,SAAO,OAAO,aAAa,IAAI;AACjC;AAQA,SAAS,aAAa,GAAS,GAAc;AAC3C,MAAI,MAAM,UAAa,MAAM,QAAW;AACtC,WAAO,QAAQ,GAAG,CAAC;EACrB,WAAW,MAAM,QAAW;AAC1B,WAAO,MAAM,CAAC;EAChB,WAAW,MAAM,QAAW;AAC1B,WAAO,OAAO,CAAC;EACjB,OAAO;AACL,WAAO;EACT;AACF;AA6BA,SAAS,OAAO,KAAe;AAC7B,QAAM,YAAY,IAAI,SAAS;AAC/B,WAAS,IAAI,GAAG,KAAK,WAAW,KAAK;AACnC,UAAM,OAAO,IAAI,WAAW,CAAC;AAG7B,QAAI,QAAQ,UAAU;AACpB;IACF;AAWA,QAAI,MAAM,WAAW;AACnB,UAAI,SAAS,WAAW,GAAG;AACzB,eAAQ,IAAI,UAAU,GAAG,CAAC,IAAI;MAChC,OAAO;AACL,eAAQ,IAAI,UAAU,GAAG,CAAC,IAAI,OAAO,aAAa,OAAO,CAAC;MAC5D;IACF,OAAO;AACL,aAAO,IAAI,UAAU,GAAG,IAAI,CAAC;IAC/B;EACF;AAIA,SAAO;AACT;AAyBA,IAAM,iBAAiB;AACvB,IAAM,gBAAgB;AAEtB,SAAS,MAAM,KAAe;AAG5B,WAAS,IAAI,GAAG,IAAI,IAAI,QAAQ,KAAK;AACnC,UAAM,OAAO,IAAI,WAAW,CAAC;AAC7B,QAAI,OAAO,YAAY,OAAO,UAAU;AACtC,aAAQ,MAAM;IAChB;EACF;AAGA,SAAO,IAAI,SAAS,KAAK,IAAI,WAAW,IAAI,SAAS,CAAC,MAAM,UAAU;AACpE,UAAM,IAAI,MAAM,GAAG,EAAE;EACvB;AAGA,MAAI,IAAI,WAAW,KAAK,QAAQ,MAAM;AACpC,WAAO;EACT;AAGA,MAAI,WAAW;AACf,MAAI,IAAI,SAAS,gBAAgB;AAC/B,eACE,iBACA,KAAK,MAAM,IAAI,SAAS,kBAAkB,aAAa,IAAI;EAC/D;AAGA,QAAM,SAAS,wBAAwB,KAAK,QAAQ;AACpD,MAAI,WAAW,MAAM;AACnB,WAAO;EACT;AAGA,cAAY;AACZ,QAAM,iBAAiB,wBAAwB,KAAK,QAAQ;AAC5D,MAAI,mBAAmB,MAAM;AAC3B,WAAO;EACT;AAGA,SAAQ,MAAM;AAChB;AAMA,SAAS,wBAAwB,KAAU,UAA8B;AAEvE,QAAM,SAAmB,CAAC;AAC1B,WAAS,IAAI,GAAG,IAAI,UAAU,KAAK;AACjC,QAAI,IAAI,IAAI,QAAQ;AAClB,aAAO,KAAK,IAAI,WAAW,CAAC,IAAI,QAAQ;IAC1C,OAAO;AACL,aAAO,KAAK,CAAC;IACf;EACF;AAGA,MAAI,QAAQ;AACZ,WAAS,IAAI,WAAW,GAAG,KAAK,KAAK,OAAO,KAAK;AAC/C,UAAM,MAAM,OAAO,CAAC,IAAI;AACxB,QAAI,OAAO,YAAY;AACrB,aAAO,CAAC,IAAI;AACZ,cAAQ;IACV,OAAO;AACL,aAAO,CAAC,IAAI;AACZ,cAAQ;IACV;EACF;AAGA,MAAI,OAAO;AACT,WAAO;EACT;AAGA,MAAI,SAAS;AACb,aAAW,KAAK,QAAQ;AACtB,cAAU,OAAO,aAAa,IAAI,QAAQ;EAC5C;AAGA,SAAO,OAAO,SAAS,KAAK,OAAO,WAAW,OAAO,SAAS,CAAC,MAAM,UAAU;AAC7E,aAAS,OAAO,MAAM,GAAG,EAAE;EAC7B;AAEA,SAAO;AACT;AAmBA,SAAS,QAAQ,IAAS,IAAc;AACtC,MAAI,KAAK,IAAI;AACX,WAAO,SAAS,IAAI,EAAE;EACxB,WAAW,KAAK,IAAI;AAClB,WAAO,SAAS,IAAI,EAAE;EACxB,OAAO;AACL,UAAM,IAAI,MAAM,kDAAkD;EACpE;AACF;AAKA,SAAS,SAAS,IAAS,IAAmB;AAC5C,MAAI,QAAQ;AAEZ,QAAM,QAAQ,GAAG;AACjB,QAAM,QAAQ,GAAG;AACjB,SAAO,MAAM;AACX,UAAM,SAAS,QAAQ,QAAQ,GAAG,WAAW,KAAK,IAAI;AACtD,UAAM,SAAS,QAAQ,QAAQ,GAAG,WAAW,KAAK,IAAI;AAEtD,QAAI,WAAW,QAAQ;AACrB;AACA;IACF;AAGA,QAAI,SAAS,WAAW,GAAG;AACzB,YAAM,OAAO,QAAQ;AACrB,UAAI,SAAS,GAAG,UAAU,GAAG,IAAI;AACjC,UAAI,OAAO,SAAS,MAAM;AACxB,kBAAU,KAAK,OAAO,OAAO,OAAO,MAAM;MAC5C;AACA,YAAM,SAAS,GAAG,UAAU,IAAI;AAChC,YAAM,QAAQ;AACd,aAAQ,SAAS,SAAS,QAAQ,KAAK;IACzC,OAAO;AAEL,aAAQ,MAAM,IAAI,KAAK,IACrB,OAAO,aAAc,SAAS,UAAW,CAAC;IAC9C;EACF;AACF;AAEA,SAAS,MAAM,KAAa,GAAmB;AAC7C,SAAO,IAAI,IAAI,SACX,IAAI,UAAU,GAAG,CAAC,IAClB,MAAM,KAAK,OAAO,IAAI,IAAI,MAAM;AACtC;AAEA,IAAM,oBAAoB,WAAW;AAUrC,SAAS,MAAM,KAAyB;AAEtC,MAAI,QAAQ,IAAI;AACd,WAAO;EACT;AAGA,QAAM,UAAU,IAAI,SAAS;AAC7B,QAAM,OAAO,IAAI,WAAW,OAAO;AACnC,MAAI,OAAO,qBAAqB,OAAO,UAAU;AAC/C,WAAO;EACT;AAEA,WAAS,IAAI,GAAG,IAAI,SAAS,KAAK;AAChC,UAAM,OAAO,IAAI,WAAW,CAAC;AAC7B,QAAI,OAAO,YAAY,OAAO,UAAU;AACtC,aAAO;IACT;EACF;AAEA,SAAO;AACT;AAEA,SAAS,aAAa,KAAkB;AACtC,QAAM,QAAkB,CAAC;AAGzB,WAAS,IAAI,GAAG,IAAI,IAAI,QAAQ,KAAK;AACnC,UAAM,OAAO,IAAI,WAAW,CAAC;AAG7B,UAAM,KAAK,OAAO,WAAW,WAAW,OAAO,WAAW,WAAW,IAAI;EAC3E;AAGA,SAAO,MAAM,SAAS,KAAK,MAAM,MAAM,SAAS,CAAC,MAAM,UAAU;AAC/D,UAAM;EACR;AAEA,SAAO,MAAM,SAAS,IACjB,OAAO,aAAa,GAAG,KAAK;;IAE7B;;AACN;AAMA,SAAS,MAAM,KAAkB;AAG/B,SAAO,MAAM,GAAG,IAAI,MAAM,aAAa,GAAG;AAC5C;AC1ZO,IAAM,SAAS,OAAO,OAAO;EAClC,MAAM;EACN,gBAAgB;EAChB,aAAa;EACb,eAAe;EACf,eAAe;EACf,aAAa;EACb,mBAAmB;EACnB,YAAY;EACZ,iBAAiB;AACnB,CAAC;AC4LD,IAAM,WAAqB,OAAO,OAAO,EAAE,MAAM,WAAW,CAAC;ACrMtD,IAAM,WAAW,OAAO,OAAO;EACpC,QAAQ;EACR,MAAM;EACN,KAAK;EACL,UAAU;AACZ,CAAC;AAqEM,SAAS,kBAAkB,MAA4C;AAC5E,SAAO,KAAK,CAAC,MAAM;AACrB;AAEO,SAAS,oBACd,MAC6C;AAC7C,SAAO,KAAK,CAAC,EAAE,SAAS,SAAS;AACnC;AAEO,SAAS,kBAAkB,MAA4C;AAC5E,SAAO,KAAK,CAAC,EAAE,SAAS,SAAS;AACnC;AAEO,SAAS,iBAAiB,MAA2C;AAC1E,SAAO,KAAK,CAAC,EAAE,SAAS,SAAS;AACnC;AAEO,SAAS,sBACd,MAC6B;AAC7B,SAAO,KAAK,CAAC,EAAE,SAAS,SAAS;AACnC;AA+EO,UAAU,yBACf,OACuB;AACvB,aAAW,QAAQ,OAAO;AACxB,QAAI,oBAAoB,IAAI,GAAG;AAC7B,UAAI,kBAAkB,IAAI,GAAG;AAC3B,cAAM,KAAK,KAAK,CAAC;AACjB,cAAM,OAAO,KAAK,CAAC;AACnB,cAAM,CAAC,IAAI,KAAK,IAAI;MACtB,OAAO;AACL,cAAM,KAAK,KAAK,CAAC;AACjB,cAAM,OAAO,KAAK,CAAC;AACnB,cAAM,CAAC,IAAI,SAAS,QAAQ,KAAK,UAAU,KAAK,WAAW,KAAK,IAAI;MACtE;IACF,WAAW,kBAAkB,IAAI,GAAG;AAClC,YAAM,KAAK,KAAK,CAAC;AACjB,YAAM,OAAO,KAAK,CAAC;AACnB,YAAM,CAAC,IAAI,SAAS,MAAM,KAAK,UAAU,KAAK,SAAS;IACzD,WAAW,iBAAiB,IAAI,GAAG;AACjC,YAAM,KAAK,KAAK,CAAC;AACjB,YAAM,OAAO,KAAK,CAAC;AACnB,YAAM,CAAC,IAAI,SAAS,KAAK,KAAK,UAAU,KAAK,SAAS;IACxD,WAAW,sBAAsB,IAAI,GAAG;AACtC,YAAM,KAAK,KAAK,CAAC;AACjB,YAAM,OAAO,KAAK,CAAC;AACnB,YAAM,CAAC,IAAI,SAAS,UAAU,KAAK,UAAU,KAAK,WAAW,KAAK,IAAI;IACxE,OAAO;IAEP;EACF;AACF;AIvKA,IAAM,uBAAuB,MAAM;AGzB5B,SAAS,aAAa,MAAgC;AAC3D,SACE,SAAS,QACT,OAAO,SAAS,YAChB,OAAO,SAAS,YAChB,OAAO,SAAS;AAEpB;AAEO,SAAS,YAAY,MAA+B;AACzD,SAAO,MAAM,QAAQ,IAAI;AAC3B;AAEO,SAAS,aAAa,MAAgC;AAC3D,SAAO,CAAC,aAAa,IAAI,KAAK,CAAC,YAAY,IAAI;AACjD;AC7BO,IAAM,gBAAgB,OAAO,OAAO;;EAEzC,iBAAiB;EACjB,iBAAiB;;EAGjB,eAAe;EACf,gBAAgB;;EAGhB,YAAY;EACZ,aAAa;AACf,CAAC;AG6lCD,IAAM,0BAA0B,OAAO,OAAO;AEvyB9C,IAAM,iBAAiB;EACrB,KAAK;EACL,KAAK;EACL,KAAK;EACL,KAAK;EACL,KAAK;AACP;AAEA,IAAM,sBAAsB,IAAI;EAC9B,OAAO,KAAK,cAAc,EACvB,IAAI,CAAC,WAAW,KAAK,MAAM,EAAE,EAC7B,KAAK,GAAG;EACX;AACF;AAiEA,IAAM,qBAAqB;EACzB,GAAG;EACH,KAAK;EACL,KAAK;EACL,KAAK;EACL,KAAK;EACL,KAAK;EACL,KAAK;EACL,KAAK;EACL,KAAK;EACL,KAAK;EACL,KAAK;EACL,KAAK;EACL,KAAK;AACP;AAEA,IAAM,0BAA0B,IAAI;EAClC,OAAO,KAAK,kBAAkB,EAC3B,IAAI,CAAC,WAAW,KAAK,MAAM,EAAE,EAC7B,KAAK,GAAG;EACX;AACF;AQtaA,YAAY,UAAU,aAAa,UAAU;;;ACiB7C;AAAA,EACE;AAAA,EACA;AAAA,EACA,YAAAC;AAAA,EACA;AAAA,EACA,UAAAC;AAAA,EACA,YAAAC;AAAA,EACA,UAAAC;AAAA,EACA,eAAAC;AAAA,OACK;;;ACVP,SAAS,eAAe;AAYjB,IAAM,WAA0B;AAahC,IAAM,iBAAsC,SAAS;AAAA,EAC1D,CAAC,UACC,UAAU,QAAQ,OAAO,UAAU,YAAY,CAAC,MAAM,QAAQ,KAAK;AAAA,EACrE;AACF;;;AC7BA,SAAS,UAAU,QAAQ,UAAU,QAAQ,mBAAmB;AAkBhE,IAAM,iBAAoD,OAAO;AAAA,EAC/D,MAAM,SAAS,OAAO,aAAa;AAAA,EACnC,MAAM;AAAA,EACN,IAAI;AAAA,EACJ,MAAM;AACR,CAAC;AAED,IAAM,iBAAoD,OAAO;AAAA,EAC/D,MAAM,SAAS,OAAO,aAAa;AAAA,EACnC,MAAM;AAAA,EACN,IAAI;AAAA,EACJ,UAAU;AAAA,EACV,WAAW;AAAA,EACX,MAAM;AAAA,EACN,QAAQ,SAAS,SAAS,KAAK,CAAC;AAAA,EAChC,WAAW,SAAS,MAAM;AAC5B,CAAC;AAED,IAAM,eAAgD,OAAO;AAAA,EAC3D,MAAM,SAAS,OAAO,WAAW;AAAA,EACjC,MAAM;AAAA,EACN,IAAI;AAAA,EACJ,UAAU;AAAA,EACV,WAAW;AAAA,EACX,QAAQ,SAAS,SAAS,KAAK,CAAC;AAAA,EAChC,WAAW,SAAS,MAAM;AAC5B,CAAC;AAED,IAAM,cAA8C,OAAO;AAAA,EACzD,MAAM,SAAS,OAAO,UAAU;AAAA,EAChC,MAAM;AAAA,EACN,IAAI;AAAA,EACJ,UAAU;AAAA,EACV,WAAW;AAAA,EACX,QAAQ,SAAS,SAAS,KAAK,CAAC;AAAA,EAChC,WAAW,SAAS,MAAM;AAC5B,CAAC;AAED,IAAM,mBAAwD,OAAO;AAAA,EACnE,MAAM,SAAS,OAAO,eAAe;AAAA,EACrC,MAAM;AAAA,EACN,IAAI;AAAA,EACJ,UAAU;AAAA,EACV,WAAW;AAAA,EACX,MAAM;AAAA,EACN,QAAQ,SAAS,SAAS,KAAK,CAAC;AAAA,EAChC,WAAW,SAAS,MAAM;AAC5B,CAAC;AAED,IAAM,eAAgD,OAAO;AAAA,EAC3D,MAAM,SAAS,OAAO,WAAW;AAAA,EACjC,MAAM;AAAA,EACN,IAAI;AACN,CAAC;AAED,IAAM,iBAAoD,OAAO;AAAA,EAC/D,MAAM,SAAS,OAAO,cAAc;AAAA,EACpC,MAAM;AAAA,EACN,IAAI;AAAA,EACJ,WAAW;AACb,CAAC;AAED,IAAM,oBAA0D,OAAO;AAAA,EACrE,MAAM,SAAS,OAAO,iBAAiB;AAAA,EACvC,MAAM;AAAA,EACN,IAAI;AAAA,EACJ,KAAK;AACP,CAAC;AAEM,IAAM,KAA4B,YAAY,QAAQ;AAAA,EAC3D,CAAC,OAAO,aAAa,GAAG;AAAA,EACxB,CAAC,OAAO,aAAa,GAAG;AAAA,EACxB,CAAC,OAAO,WAAW,GAAG;AAAA,EACtB,CAAC,OAAO,UAAU,GAAG;AAAA,EACrB,CAAC,OAAO,eAAe,GAAG;AAAA,EAC1B,CAAC,OAAO,WAAW,GAAG;AAAA,EACtB,CAAC,OAAO,cAAc,GAAG;AAAA,EACzB,CAAC,OAAO,iBAAiB,GAAG;AAC9B,CAAC;;;ACjGD,SAAS,YAAY;AAOd,IAAM,cAAc,KAAK,WAAiB;AAE1C,IAAM,eAAe;;;AHmB5B,IAAM,0BACJC,QAAO;AAAA,EACL,MAAMC,UAAS,cAAc,eAAe;AAAA,EAC5C,MAAM;AAAA,EACN,aAAaC,UAAS,MAAM;AAC9B,CAAC;AAEH,IAAM,0BAAkEF,QAAO;AAAA,EAC7E,MAAMC,UAAS,cAAc,eAAe;AAAA,EAC5C,OAAO;AACT,CAAC;AAED,IAAM,wBAAwDD,QAAO;AAAA,EACnE,MAAMC,UAAS,cAAc,aAAa;AAC5C,CAAC;AAED,IAAM,yBAA0DD,QAAO;AAAA,EACrE,MAAMC,UAAS,cAAc,cAAc;AAAA,EAC3C,KAAK,MAAM,EAAE;AACf,CAAC;AAED,IAAM,qBAAkDD,QAAO;AAAA,EAC7D,MAAMC,UAAS,cAAc,UAAU;AAAA,EACvC,QAAQE,QAAO,WAAoB;AAAA,EACnC,MAAMD,UAAS,WAAW;AAAA;AAAA,EAC1B,IAAIA,UAAS,OAAO;AACtB,CAAC;AAED,IAAM,sBAAoDF,QAAO;AAAA,EAC/D,MAAMC,UAAS,cAAc,WAAW;AAAA,EACxC,QAAQE,QAAO,WAAoB;AAAA,EACnC,MAAMD,UAAS,WAAW;AAAA;AAAA,EAC1B,IAAIA,UAAS,OAAO;AACtB,CAAC;AAEM,IAAM,mBACXE,aAAY,QAAQ;AAAA,EAClB,CAAC,cAAc,eAAe,GAAG;AAAA,EACjC,CAAC,cAAc,eAAe,GAAG;AAAA,EACjC,CAAC,cAAc,aAAa,GAAG;AAAA,EAC/B,CAAC,cAAc,cAAc,GAAG;AAAA,EAChC,CAAC,cAAc,UAAU,GAAG;AAAA,EAC5B,CAAC,cAAc,WAAW,GAAG;AAC/B,CAAC,EAAE,SAAS,gCAAgC;AAEvC,IAAM,4BACXA,aAAY,QAAQ;AAAA;AAAA;AAAA;AAAA,EAIlB,CAAC,cAAc,cAAc,GAAG;AAAA;AAAA;AAGlC,CAAC,EAAE,SAAS,0CAA0C;;;AIlEjD,SAAS,0BACdC,WACY;AACZ,MAAI;AACF,WAAO,YAAYA,WAAU,QAAQA,UAAS,SAAS,EAAE,IAAI;AAAA,EAC/D,UAAE;AACA,IAAAA,UAAS,QAAQ;AAAA,EACnB;AACF;AAEA,SAAS,UAAUA,WAA6B,IAAkB;AAChE,QAAM,OAAOA,UAAS,SAAS,EAAE;AACjC,MAAI,KAAK,SAAS,SAAS,QAAQ;AACjC,WAAO,YAAYA,WAAU,IAAI,KAAK,IAAI;AAAA,EAC5C,WAAW,KAAK,SAAS,SAAS,MAAM;AACtC,WAAO,UAAUA,WAAU,EAAE;AAAA,EAC/B,WAAW,KAAK,SAAS,SAAS,KAAK;AACrC,WAAO,SAASA,WAAU,EAAE;AAAA,EAC9B,OAAO;AACL,WAAO,KAAK;AAAA,EACd;AACF;AAEA,SAAS,YACPA,WACA,IACA,YACY;AACZ,QAAM,OAAO,OAAO,OAAO,uBAAO,OAAO,IAAI,GAAG,UAAU;AAC1D,aAAW,CAAC,KAAK,OAAO,KAAKA,UAAS,cAAc,EAAE,GAAG;AACvD,SAAK,GAAG,IAAI,UAAUA,WAAU,OAAO;AAAA,EACzC;AACA,SAAO;AACT;AAEA,SAAS,UAAUA,WAA6B,IAAoB;AAClE,QAAM,OAAe,CAAC;AACtB,aAAW,CAAC,GAAG,OAAO,KAAKA,UAAS,cAAc,EAAE,GAAG;AACrD,SAAK,KAAK,UAAUA,WAAU,OAAO,CAAC;AAAA,EACxC;AACA,SAAO;AACT;AAEA,SAAS,SAASA,WAA6B,IAAwB;AACrE,QAAM,OAAO,uBAAO,OAAO,IAAI;AAC/B,aAAW,CAAC,KAAK,OAAO,KAAKA,UAAS,cAAc,EAAE,GAAG;AACvD,SAAK,GAAG,IAAI,UAAUA,WAAU,OAAO;AAAA,EACzC;AACA,SAAO;AACT;AAmBO,UAAU,yBACfA,WACW;AACX,MAAI;AACF,UAAM,aAAa,KAAK,UAAUA,UAAS,SAAS,EAAE,IAAI,EAAE,MAAM,GAAG,EAAE;AACvE,WAAO,WAAWA,WAAU,QAAQ,UAAU;AAAA,EAChD,UAAE;AACA,IAAAA,UAAS,QAAQ;AAAA,EACnB;AACF;AAEA,UAAU,KAAKA,WAA6B,IAAuB;AACjE,QAAM,OAAOA,UAAS,SAAS,EAAE;AACjC,MAAI,KAAK,SAAS,SAAS,QAAQ;AACjC,WAAO,WAAWA,WAAU,IAAI,KAAK,UAAU,KAAK,IAAI,EAAE,MAAM,GAAG,EAAE,CAAC;AAAA,EACxE,WAAW,KAAK,SAAS,SAAS,MAAM;AACtC,WAAO,SAASA,WAAU,EAAE;AAAA,EAC9B,WAAW,KAAK,SAAS,SAAS,KAAK;AACrC,WAAO,QAAQA,WAAU,EAAE;AAAA,EAC7B,WAAW,KAAK,SAAS,SAAS,UAAU;AAC1C,UAAM,KAAK,UAAU,KAAK,IAAI;AAAA,EAChC;AACF;AAWA,UAAU,WACRA,WACA,IACA,YACW;AACX,MAAI,QAAQ,WAAW,SAAS;AAEhC,QAAM;AACN,QAAM;AAEN,aAAW,CAAC,KAAK,OAAO,KAAKA,UAAS,cAAc,EAAE,GAAG;AACvD,QAAI,MAAO,OAAM;AAAA,QACZ,SAAQ;AAEb,UAAM,GAAG,KAAK,UAAU,GAAG,CAAC;AAC5B,WAAO,KAAKA,WAAU,OAAO;AAAA,EAC/B;AACA,QAAM;AACR;AAEA,UAAU,SAASA,WAA6B,IAAuB;AACrE,MAAI,QAAQ;AAEZ,QAAM;AACN,aAAW,CAAC,GAAG,OAAO,KAAKA,UAAS,cAAc,EAAE,GAAG;AACrD,QAAI,MAAO,OAAM;AAAA,QACZ,SAAQ;AACb,WAAO,KAAKA,WAAU,OAAO;AAAA,EAC/B;AACA,QAAM;AACR;AAEA,UAAU,QAAQA,WAA6B,IAAuB;AACpE,MAAI,QAAQ;AAEZ,QAAM;AACN,aAAW,CAAC,KAAK,OAAO,KAAKA,UAAS,cAAc,EAAE,GAAG;AACvD,QAAI,MAAO,OAAM;AAAA,QACZ,SAAQ;AAEb,UAAM,GAAG,KAAK,UAAU,GAAG,CAAC;AAC5B,WAAO,KAAKA,WAAU,OAAO;AAAA,EAC/B;AACA,QAAM;AACR;;;ACzJO,UAAU,qBACfC,WACqC;AACrC,MAAI;AACF,WAAOA,UAAS,SAAS;AAAA,EAC3B,UAAE;AACA,IAAAA,UAAS,QAAQ;AAAA,EACnB;AACF;;;ACKA,IAAM,wBAAwB;AAE9B,SAAS,WAAW,OAA0B;AAC5C,SAAO,GAAG,qBAAqB,IAAI,MAAM,OAAO;AAClD;AAEA,SAAS,wBACP,OACyD;AACzD,SAAO,aAAa,KAAK,KAAK,MAAM,mBAAmB;AACzD;AAMA,UAAU,SACR,KACA,MACA,QACA,OACyC;AACzC,MAAI,wBAAwB,IAAI,GAAG;AACjC,YAAQ,KAAK,gBAAgB;AAAA,MAC3B,KAAK;AACH,eAAO,gBAAgB,KAAK,KAAK,MAAM,QAAQ,KAAK;AACpD;AAAA,MAEF,KAAK;AACH,eAAO,SAAS,KAAK,KAAK,MAAM,QAAQ,KAAK;AAC7C;AAAA,MAEF,KAAK;AACH,eAAO,QAAQ,KAAK,KAAK,MAAM,QAAQ,KAAK;AAC5C;AAAA,MAGF;AACE,oBAAY,MAAM,gCAAgC;AAAA,IACtD;AAAA,EACF,OAAO;AACL,UAAM;AAAA,MACJ,WAAW,KAAK;AAAA,MAChB;AAAA,QACE,MAAM,SAAS;AAAA,QACf;AAAA,QACA,UAAU,OAAO,CAAC;AAAA,QAClB,WAAW;AAAA,MACb;AAAA,IACF;AAAA,EACF;AACF;AAMA,UAAU,QACR,KACA,KACA,QACA,OACyC;AACzC,QAAM,WAAwB;AAAA,IAC5B,WAAW,KAAK;AAAA,IAChB,EAAE,MAAM,SAAS,KAAK,UAAU,OAAO,CAAC,GAAG,WAAW,IAAI;AAAA,EAC5D;AAGA,QAAM;AAGN,aAAW,CAAC,QAAQ,QAAQ,KAAK,OAAO,QAAQ,GAAG,GAAG;AACpD,WAAO,SAAS,QAAQ,UAAU,UAAU,KAAK;AAAA,EACnD;AACF;AAMA,UAAU,SACR,KACA,MACA,QACA,OACyC;AACzC,QAAM,KAAK,WAAW,KAAK;AAC3B,QAAM,OAAuB;AAAA,IAC3B,MAAM,SAAS;AAAA,IACf,UAAU,OAAO,CAAC;AAAA,IAClB,WAAW;AAAA,EACb;AACA,QAAM,YAAyB,CAAC,IAAI,IAAI;AAGxC,QAAM;AAGN,MAAI,WAAW,aAAa;AAC5B,aAAW,YAAY,MAAM;AAC3B,WAAO,SAAS,UAAU,UAAU,WAAW,KAAK;AACpD,eAAW,aAAa,QAAQ;AAAA,EAClC;AACF;AAUA,UAAU,gBACR,KACA,OACA,QACA,OACyC;AAEzC,QAAM,OAAmB,CAAC;AAC1B,QAAM,kBAA8C,CAAC;AAErD,aAAW,CAAC,QAAQ,QAAQ,KAAK,OAAO,QAAQ,KAAK,GAAG;AACtD,QAAI,wBAAwB,QAAQ,GAAG;AACrC,sBAAgB,KAAK,CAAC,QAAQ,QAAQ,CAAC;AAAA,IACzC,OAAO;AACL,WAAK,MAAM,IAAI;AAAA,IACjB;AAAA,EACF;AAGA,QAAM,cACJ,WAAW,OACP;AAAA,IACE,WAAW,KAAK;AAAA,IAChB;AAAA,MACE,MAAM,SAAS;AAAA,MACf;AAAA,MACA,UAAU,OAAO,CAAC;AAAA,MAClB,WAAW;AAAA,IACb;AAAA,EACF,IACA,CAAC,QAAQ,EAAE,MAAM,SAAS,QAAQ,KAAK,CAAC;AAG9C,QAAM;AAGN,aAAW,CAAC,QAAQ,QAAQ,KAAK,iBAAiB;AAChD,WAAO,SAAS,QAAQ,UAAU,aAAa,KAAK;AAAA,EACtD;AACF;AAOO,UAAU,sBACf,MACyC;AACzC,QAAM,QAAQ,EAAE,OAAO,EAAE;AACzB,SAAO,gBAAgB,QAAQ,KAAK,MAAM,MAAM,KAAK;AACvD;AAYO,SAAS,0BACdC,WACiB;AACjB,MAAI;AACF,WAAOC,aAAYD,WAAU,QAAQA,UAAS,SAAS,EAAE,IAAI;AAAA,EAC/D,UAAE;AACA,IAAAA,UAAS,QAAQ;AAAA,EACnB;AACF;AAEA,SAASE,WAAUF,WAA6B,IAAuB;AACrE,QAAM,OAAOA,UAAS,SAAS,EAAE;AACjC,MAAI,KAAK,SAAS,SAAS,QAAQ;AACjC,WAAOC,aAAYD,WAAU,IAAI,KAAK,IAAI;AAAA,EAC5C,WAAW,KAAK,SAAS,SAAS,MAAM;AACtC,WAAOG,WAAUH,WAAU,EAAE;AAAA,EAC/B,WAAW,KAAK,SAAS,SAAS,KAAK;AACrC,WAAOI,UAASJ,WAAU,EAAE;AAAA,EAC9B,OAAO;AACL,WAAO,KAAK;AAAA,EACd;AACF;AAEA,SAASC,aACPD,WACA,IACA,YACiB;AAEjB,QAAM,OAAwB,OAAO;AAAA,IACnC,uBAAO,OAAO,IAAI;AAAA,IAClB;AAAA,EACF;AACA,aAAW,CAAC,KAAK,OAAO,KAAKA,UAAS,cAAc,EAAE,GAAG;AACvD,SAAK,GAAG,IAAIE,WAAUF,WAAU,OAAO;AAAA,EACzC;AACA,SAAO,EAAE,gBAAgB,cAAc,KAAK;AAC9C;AAEA,SAASG,WAAUH,WAA6B,IAA2B;AACzE,QAAM,OAAoB,CAAC;AAC3B,aAAW,CAAC,GAAG,OAAO,KAAKA,UAAS,cAAc,EAAE,GAAG;AACrD,SAAK,KAAKE,WAAUF,WAAU,OAAO,CAAC;AAAA,EACxC;AACA,SAAO,EAAE,gBAAgB,YAAY,KAAK;AAC5C;AAEA,SAASI,UAASJ,WAA6B,IAA0B;AACvE,QAAM,OAAO,uBAAO,OAAO,IAAI;AAC/B,aAAW,CAAC,KAAK,OAAO,KAAKA,UAAS,cAAc,EAAE,GAAG;AACvD,SAAK,GAAG,IAAIE,WAAUF,WAAU,OAAO;AAAA,EACzC;AACA,SAAO,EAAE,gBAAgB,WAAW,KAAK;AAC3C;AAeO,UAAU,yBACfA,WACW;AACX,MAAI;AACF,UAAM,aAAa,KAAK,UAAUA,UAAS,SAAS,EAAE,IAAI,EAAE,MAAM,GAAG,EAAE;AACvE,WAAOK,YAAWL,WAAU,QAAQ,UAAU;AAAA,EAChD,UAAE;AACA,IAAAA,UAAS,QAAQ;AAAA,EACnB;AACF;AAEA,UAAUM,MAAKN,WAA6B,IAAuB;AACjE,QAAM,OAAOA,UAAS,SAAS,EAAE;AACjC,MAAI,KAAK,SAAS,SAAS,QAAQ;AACjC,WAAOK,YAAWL,WAAU,IAAI,KAAK,UAAU,KAAK,IAAI,EAAE,MAAM,GAAG,EAAE,CAAC;AAAA,EACxE,WAAW,KAAK,SAAS,SAAS,MAAM;AACtC,WAAOO,UAASP,WAAU,EAAE;AAAA,EAC9B,WAAW,KAAK,SAAS,SAAS,KAAK;AACrC,WAAOQ,SAAQR,WAAU,EAAE;AAAA,EAC7B,WAAW,KAAK,SAAS,SAAS,UAAU;AAC1C,UAAM,KAAK,UAAU,KAAK,IAAI;AAAA,EAChC;AACF;AAWA,UAAUK,YACRL,WACA,IACA,YACW;AACX,MAAI,QAAQ,WAAW,SAAS;AAEhC,QAAM;AACN,QAAM;AAEN,aAAW,CAAC,KAAK,OAAO,KAAKA,UAAS,cAAc,EAAE,GAAG;AACvD,QAAI,MAAO,OAAM;AAAA,QACZ,SAAQ;AAEb,UAAM,GAAG,KAAK,UAAU,GAAG,CAAC;AAC5B,WAAOM,MAAKN,WAAU,OAAO;AAAA,EAC/B;AACA,QAAM;AACR;AAEA,UAAUO,UAASP,WAA6B,IAAuB;AACrE,MAAI,QAAQ;AAEZ,QAAM;AACN,aAAW,CAAC,GAAG,OAAO,KAAKA,UAAS,cAAc,EAAE,GAAG;AACrD,QAAI,MAAO,OAAM;AAAA,QACZ,SAAQ;AACb,WAAOM,MAAKN,WAAU,OAAO;AAAA,EAC/B;AACA,QAAM;AACR;AAEA,UAAUQ,SAAQR,WAA6B,IAAuB;AACpE,MAAI,QAAQ;AAEZ,QAAM;AACN,aAAW,CAAC,KAAK,OAAO,KAAKA,UAAS,cAAc,EAAE,GAAG;AACvD,QAAI,MAAO,OAAM;AAAA,QACZ,SAAQ;AAEb,UAAM,GAAG,KAAK,UAAU,GAAG,CAAC;AAC5B,WAAOM,MAAKN,WAAU,OAAO;AAAA,EAC/B;AACA,QAAM;AACR;;;ACxWA,IAAAS;AAgCO,IAAMC,cAAN,cAA+B,IAAU;AAAA;AAAA;AAAA;AAAA;AAAA,EAO9C,YACE,WACA,SACA;AACA,UAAM,OAAO;AAVf,uBAAAD;AAWE,uBAAKA,aAAa;AAAA,EACpB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,YAAY,KAAQ,WAA8B;AAChD,QAAI,MAAM,IAAI,GAAG,GAAG;AAElB,aAAO,MAAM,IAAI,GAAG;AAAA,IACtB,OAAO;AACL,YAAM,KACJ,aACA,mBAAKA,gBACL,MAAM,4CAA4C;AAEpD,YAAM,QAAQ,GAAG,GAAG;AACpB,WAAK,IAAI,KAAK,KAAK;AACnB,aAAO;AAAA,IACT;AAAA,EACF;AACF;AApCEA,cAAA;;;ACdF,SAAS,gBAAyC;AAChD,SAAO,CAAC,EAAE,OAAO,QAAQ,EAAE;AAC7B;AArBA;AA2BO,IAAM,YAAN,MAA2B;AAAA,EAGhC,cAAc;AAFd;AAGE,uBAAK,MAAO,IAAIE,YAAW,MAAM,oBAAI,IAAW,CAAC;AAAA,EACnD;AAAA,EAEA,IAAI,OAAe;AACjB,QAAI,QAAQ;AACZ,eAAW,SAAS,mBAAK,MAAK,OAAO,GAAG;AACtC,eAAS,MAAM;AAAA,IACjB;AACA,WAAO;AAAA,EACT;AAAA,EAEA,MAAM,MAAkB;AACtB,WAAO,mBAAK,MAAK,IAAI,IAAI,GAAG,QAAQ;AAAA,EACtC;AAAA,EAEA,CAAC,OAAmC;AAClC,eAAW,CAAC,MAAM,MAAM,KAAK,mBAAK,OAAM;AACtC,iBAAW,QAAQ,OAAO,KAAK,GAAG;AAChC,cAAM,CAAC,MAAM,IAAI;AAAA,MACnB;AAAA,IACF;AAAA,EACF;AAAA,EAEA,IAAI,MAAU,MAAmB;AAC/B,WAAO,mBAAK,MAAK,IAAI,IAAI,GAAG,IAAI,IAAI,KAAK;AAAA,EAC3C;AAAA,EAEA,IAAI,MAAU,MAAyB;AACrC,WAAO,mBAAK,MAAK,IAAI,IAAI,GAAG,IAAI,IAAI;AAAA,EACtC;AAAA,EAEA,IAAI,MAAU,MAAU,OAAgB;AACtC,uBAAK,MAAK,YAAY,IAAI,EAAE,IAAI,MAAM,KAAK;AAC3C,WAAO;AAAA,EACT;AAAA,EAEA,OAAO,MAAU,MAAgB;AAC/B,QAAI,CAAC,mBAAK,MAAK,IAAI,IAAI,GAAG;AACxB;AAAA,IACF;AAEA,UAAM,SAAS,mBAAK,MAAK,IAAI,IAAI;AACjC,WAAO,OAAO,IAAI;AAClB,QAAI,OAAO,SAAS,GAAG;AACrB,yBAAK,MAAK,OAAO,IAAI;AAAA,IACvB;AAAA,EACF;AAAA,EAEA,QAAc;AACZ,uBAAK,MAAK,MAAM;AAAA,EAClB;AAAA,EAEA,EAAE,OAAO,QAAQ,IAAmC;AAClD,eAAW,CAAC,MAAM,MAAM,KAAK,mBAAK,OAAM;AACtC,iBAAW,CAAC,MAAM,KAAK,KAAK,QAAQ;AAClC,cAAM,CAAC,MAAM,MAAM,KAAK;AAAA,MAC1B;AAAA,IACF;AAAA,EACF;AAAA,EAEA,UAAU,MAAqC;AAC7C,WAAO,mBAAK,MAAK,IAAI,IAAI,GAAG,QAAQ,KAAK,cAAc;AAAA,EACzD;AAAA,EAEA,CAAC,SAAS,MAAU,MAAuC;AACzD,UAAM,SAAS,mBAAK,MAAK,IAAI,IAAI;AACjC,QAAI,WAAW,QAAW;AACxB;AAAA,IACF;AAEA,eAAW,MAAM,MAAM;AACrB,YAAM,QAAQ,OAAO,IAAI,EAAE;AAC3B,UAAI,UAAU,QAAW;AACvB,cAAM,CAAC,IAAI,KAAK;AAAA,MAClB;AAAA,IACF;AAAA,EACF;AAAA,EAEA,OAAO,MAAgC;AACrC,WAAO,mBAAK,MAAK,IAAI,IAAI,GAAG,KAAK,KAAK,cAAc;AAAA,EACtD;AAAA,EAEA,SAAS,MAA+B;AACtC,WAAO,mBAAK,MAAK,IAAI,IAAI,GAAG,OAAO,KAAK,cAAc;AAAA,EACxD;AAAA,EAEA,UAAU,MAAgB;AACxB,uBAAK,MAAK,OAAO,IAAI;AAAA,EACvB;AACF;AA5FE;;;ACQK,SAAS,qBACd,QACmB;AACnB,QAAM,MAAe,IAAI,IAA4B,MAAoB;AAEzE,MAAI,CAAC,IAAI,IAAI,MAAM,GAAG;AACpB,QAAI,IAAI,QAAQ,EAAE,MAAM,SAAS,QAAQ,MAAM,CAAC,EAAE,CAAC;AAAA,EACrD;AAKA,QAAM,UAAoE,CAAC;AAC3E,QAAM,aAAa;AACnB,aAAW,QAAQ,YAAY;AAC7B,QAAI,kBAAkB,IAAI,EAAG;AAC7B,UAAM,CAAC,IAAI,IAAI,IAAI;AACnB,YAAQ,KAAK,CAAC,KAAK,UAAU,KAAK,WAAW,EAAE,CAAC;AAAA,EAClD;AACA,UAAQ;AAAA,IAAK,CAAC,GAAG,MACf,EAAE,CAAC,IAAI,EAAE,CAAC,IAAI,KAAK,EAAE,CAAC,IAAI,EAAE,CAAC,IAAI,IAAI,EAAE,CAAC,IAAI,EAAE,CAAC,IAAI,KAAK,EAAE,CAAC,IAAI,EAAE,CAAC,IAAI,IAAI;AAAA,EAC5E;AAEA,QAAM,SAAS,IAAI,UAAkC;AACrD,aAAW,CAAC,UAAU,WAAW,EAAE,KAAK,SAAS;AAC/C,WAAO,IAAI,UAAU,WAAW,EAAE;AAAA,EACpC;AAEA,WAAS,SAAS,IAA6B;AAC7C,WAAO,GAAG,IAAI,IAAI,EAAE,GAAG,mBAAmB,EAAE,EAAE;AAAA,EAChD;AAEA,SAAO;AAAA,IACL,UAAU,MACR;AAAA,MACE,IAAI,IAAI,MAAM;AAAA,MACd;AAAA,IACF;AAAA,IACF;AAAA,IACA,eAAe,CAAC,WAAW,OAAO,UAAU,MAAM;AAAA,IAClD,UAAU,MAAM;AAAA,IAChB,UAAU;AACR,UAAI,MAAM;AACV,aAAO,MAAM;AAAA,IACf;AAAA,EACF;AACF;;;AC/CO,SAAS,eAAe,QAAoC;AAMjE,iBAAe,IACb,IACA,IAC+B;AAC/B,QAAI,OAAO,QAAW;AACpB,aAAO,MAAM,OAAO,SAAS,EAAY;AAAA,IAC3C,OAAO;AACL,aAAQ,GAAkB,MAAM,MAAM,OAAO,SAAS,EAAE,CAAC;AAAA,IAC3D;AAAA,EACF;AAEA,SAAO;AAAA,IACL;AAAA,IACA,KAAK,OAAO,SAAS,KAAK,MAAM;AAAA,IAChC,QAAQ,OAAO,YAAY,KAAK,MAAM;AAAA,EACxC;AACF;;;ACxCA,SAAS,aAAa;AAEf,IAAK,kBAAL,kBAAKC,qBAAL;AA2CL,EAAAA,kCAAA,QAAK,KAAL;AAkBA,EAAAA,kCAAA,QAAK,KAAL;AA7DU,SAAAA;AAAA,GAAA;AAgEL,IAAM,yBAAyB,MAAM,eAAe,EAAE;AAAA,EAC3D;AACF;;;ACnDA,SAAS,aAAa;AACtB,SAAS,SAAAC,QAAO,oBAAoB;AACpC,SAAS,eAAe;AACxB,SAAS,UAAAC,eAAc;;;ACjBhB,IAAK,WAAL,kBAAKC,cAAL;AACL,EAAAA,oBAAA,WAAQ,KAAR;AACA,EAAAA,oBAAA,UAAO,KAAP;AACA,EAAAA,oBAAA,aAAU,KAAV;AACA,EAAAA,oBAAA,WAAQ,KAAR;AAJU,SAAAA;AAAA,GAAA;AAOZ,SAAS,YAAY,KAAY;AAC/B,QAAM,SAAS,GAAG,IAAI,IAAI,KAAK,IAAI,OAAO;AAC1C,UACE,IAAI,OAAO,WAAW,MAAM,IAAI,IAAI,QAAQ,GAAG,MAAM;AAAA,EAAK,IAAI,SAAS,EAAE,IACzE,QAAQ;AACZ;AAhCA;AAsCO,IAAe,YAAf,MAAyB;AAAA,EAK9B,YAAY,QAA+C,cAAe;AAJ1E,wBAAgB;AAEhB,+BAAS,oBAAI,QAA4B;AAGvC,SAAK,QACH,OAAO,UAAU,WACb,QACC,cAAc,KAAK,KAAK;AAAA,EACjC;AAAA;AAAA,EAGU,YAAY,OAAyB;AAC7C,YAAQ,OAAO;AAAA,MACb,KAAK;AACH,eAAO;AAAA,MACT,KAAK;AACH,eAAO;AAAA,MACT,KAAK;AACH,eAAO;AAAA,MACT,KAAK;AACH,eAAO;AAAA,MACT;AACE,eAAO,MAAM,mBAAmB;AAAA,IACpC;AAAA,EACF;AAAA;AAAA,EAGU,UAAU,KAA6B;AAC/C,WAAO,OAAO,QAAQ,WAClB,eAAe,QACb,YAAY,GAAG,IACf,KAAK,UAAU,GAAG,IACpB,OAAO,GAAG;AAAA,EAChB;AAAA;AAAA;AAAA;AAAA;AAAA,EAMU,kBAAkB,SAA6B;AACvD,UAAM,QAAQ,CAAC;AACf,eAAW,CAAC,GAAG,CAAC,KAAK,OAAO,QAAQ,WAAW,CAAC,CAAC,GAAG;AAClD,UAAI,MAAM,QAAW;AAEnB,cAAM,KAAK,OAAO,MAAM,WAAW,KAAK,UAAU,CAAC,IAAI;AACvD,cAAM,KAAK,GAAG,CAAC,IAAI,EAAE,EAAE;AAAA,MACzB;AAAA,IACF;AACA,WAAO,MAAM,SAAS,IAAI,IAAI,MAAM,KAAK,GAAG,CAAC,MAAM;AAAA,EACrD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOU,cAAc,SAA6B;AACnD,QAAI,YAAY,mBAAK,QAAO,IAAI,OAAO;AACvC,QAAI,cAAc,QAAW;AAC3B,kBAAY,KAAK,kBAAkB,OAAO;AAC1C,yBAAK,QAAO,IAAI,SAAS,SAAS;AAAA,IACpC;AACA,WAAO;AAAA,EACT;AASF;AAvEE;AA6EF,IAAM,iBAAiB;AAAA,EACrB,CAAC,aAAc,GAAG;AAAA,EAClB,CAAC,YAAa,GAAG;AAAA,EACjB,CAAC,eAAgB,GAAG;AAAA,EACpB,CAAC,aAAc,GAAG;AACpB;AAEO,IAAM,gBAAN,cAA4B,UAAU;AAAA,EAC3C,IAAI,OAAiB,SAAqB,KAA2B;AACnE,YAAQ,eAAe,KAAK,CAAC;AAAA,MAC3B,KAAK,UAAU,GAAG;AAAA,MAClB,KAAK,cAAc,OAAO;AAAA,IAC5B;AAAA,EACF;AACF;AAOA,IAAM,gBAAgB;AAAA,EACpB,OAAO;AAAA,EACP,MAAM;AAAA,EACN,SAAS;AAAA,EACT,OAAO;AACT;AAOO,IAAM,SAAN,MAAM,QAAO;AAAA,EAgBlB,YACE,SAA2C,IAAI,cAAc,GAC7D,UAAsB,CAAC,GACvB;AAlBF,wBAAgB;AAChB,wBAAgB;AAChB,wBAAgB;AAChB,wBAAgB;AAEhB,wBAAgB;AAOhB,wBAAiB;AACjB,wBAAiB;AAMf,SAAK,WAAW;AAChB,SAAK,WAAW,MAAM,QAAQ,MAAM,IAAI,SAAS,CAAC,MAAM;AAExD,UAAM,WAAmB,KAAK,IAAI,GAAG,KAAK,SAAS,IAAI,CAAC,MAAM,EAAE,KAAK,CAAC;AAEtE,UAAM,OAAO,MAAM;AAAA,IAAC;AACpB,UAAM,YAAY,CAAC,QAAkB,CAAC,QACpC,KAAK,SAAS,QAAQ,CAACC,YAAW;AAChC,UAAIA,QAAO,SAAS,KAAK;AACvB,QAAAA,QAAO,IAAI,KAAK,KAAK,UAAU,GAAG;AAAA,MACpC;AAAA,IACF,CAAC;AAEH,SAAK,IAAI;AAAA;AAAA,MAEP,OAAO,YAAY,gBAAiB,UAAU,aAAc,IAAI;AAAA,MAChE,MAAM,YAAY,eAAgB,UAAU,YAAa,IAAI;AAAA,MAC7D,MACE,YAAY,kBAAmB,UAAU,eAAgB,IAAI;AAAA,MAC/D,OAAO,YAAY,gBAAiB,UAAU,aAAc,IAAI;AAAA;AAAA,IAElE;AAEA,SAAK,QAAQ,KAAK,EAAE,SAAS;AAC7B,SAAK,OAAO,KAAK,EAAE,QAAQ;AAC3B,SAAK,OAAO,KAAK,EAAE,QAAQ;AAC3B,SAAK,QAAQ,KAAK,EAAE,SAAS;AAAA,EAC/B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,YAAY,OAA2B;AACrC,UAAM,WAAuB,EAAE,GAAG,KAAK,UAAU,GAAG,MAAM;AAC1D,WAAO,IAAI,QAAO,KAAK,UAAU,QAAQ;AAAA,EAC3C;AACF;;;ACnLA,SAAS,SAAS,YAAY;;;ACHvB,SAAS,MAAM,OAAmC;AACvD,SAAO,UAAU,SAAY,IAAI,KAAK,MAAM;AAC9C;;;ADeA,SAAS,cAAc,YAAwB;AAC7C,QAAM,SAAS,IAAI,UAAkC;AACrD,aAAW,QAAQ,YAAY;AAC7B,QAAI,kBAAkB,IAAI,EAAG;AAG7B,UAAM,CAAC,IAAI,IAAI,IAAI;AACnB,UAAM,WAAW,OAAO,IAAI,KAAK,UAAU,KAAK,SAAS;AACzD,QAAI,aAAa,UAAa,KAAK,UAAU;AAC3C,aAAO,IAAI,KAAK,UAAU,KAAK,WAAW,EAAE;AAAA,IAC9C;AAAA,EACF;AACA,SAAO;AACT;AAMA,SAAS,mBAAmB,OAAgB;AAC1C,QAAM,WAAW,cAAc,KAAmB;AAElD,QAAM,QAAkB,CAAC,MAAM;AAC/B,QAAM,iBAA8B,oBAAI,IAAI;AAE5C,SAAO,MAAM,SAAS,GAAG;AACvB,UAAM,SAAS,MAAM,IAAI;AACzB,UAAM,OAAO,GAAG,MAAM,IAAI,MAAM,CAAC;AAEjC,QAAI,KAAK,SAAS,SAAS,QAAQ;AACjC,iBAAW,OAAO,SAAS,OAAO,MAAM,GAAG;AACzC,eAAO,KAAK,KAAK,GAAG;AAAA,MACtB;AAAA,IACF;AAEA,QAAI,KAAK,SAAS,SAAS,UAAU;AACnC,YAAM,KAAK,GAAG,SAAS,SAAS,MAAM,CAAC;AAAA,IACzC,OAAO;AACL,YAAM,SAAS,MAAM,IAAI,KAAK,QAAQ;AACtC,UAAI,QAAQ,SAAS,SAAS,QAAQ;AACpC;AAAA,MACF;AAAA,IACF;AAEA,mBAAe,IAAI,MAAM;AAAA,EAC3B;AAGA,MAAI,eAAe;AACnB,aAAW,CAAC,EAAE,KAAK,OAAO;AACxB,QAAI,CAAC,eAAe,IAAI,EAAE,GAAG;AAC3B,YAAM,OAAO,EAAE;AACf;AAAA,IACF;AAAA,EACF;AAIA,SAAO,iBAAiB,IAAI,WAAW,cAAc,KAAmB;AAC1E;AAEA,SAAS,gBACP,MACA,KACiD;AACjD,SACE,KAAK,SAAS,SAAS,UACvB,OAAO,UAAU,eAAe,KAAK,KAAK,MAAM,GAAG,KACnD,KAAK,KAAK,GAAG,MAAM;AAEvB;AAMO,IAAM,iBAAN,MAA+C;AAAA,EAMpD,YAAY,SAGT;AARH,wBAAQ;AACR,wBAAQ;AACR,wBAAQ;AACR,wBAAQ;AAMN,SAAK,SAAS,oBAAI,IAAI;AACtB,SAAK,UAAU,oBAAI,IAAI;AACvB,SAAK,OAAO,oBAAI,IAAI;AAEpB,SAAK,aAAa,SAAS,gBAAgB;AAE3C,eAAW,CAAC,KAAK,KAAK,KAAK,SAAS,gBAAgB,CAAC,GAAG;AACtD,WAAK,OAAO,IAAI,KAAK,KAAK;AAAA,IAC5B;AAAA,EACF;AAAA,EAEA,iBAAiB;AACf,WAAO,KAAK,OAAO,OAAO,QAAQ,EAAE;AAAA,EACtC;AAAA;AAAA,EAGA,wBAAwB,KAAsB;AAC5C,SAAK,OAAO,MAAM;AAClB,eAAW,CAAC,IAAI,IAAI,KAAK,sBAAsB,GAAG,GAAG;AACnD,WAAK,OAAO,IAAI,IAAI,IAAI;AAAA,IAC1B;AAAA,EACF;AAAA,EAEA,MAAM,SAAS,KAAa;AAC1B,WAAO,KAAK,QAAQ,IAAI,GAAG;AAAA,EAC7B;AAAA,EACA,MAAM,SAAS,KAAa,OAAa;AACvC,SAAK,QAAQ,IAAI,KAAK,KAAK;AAAA,EAC7B;AAAA,EACA,MAAM,YAAY,KAAa;AAC7B,SAAK,QAAQ,OAAO,GAAG;AAAA,EACzB;AAAA,EAEA,aAAa;AACX,WAAO,EAAE,KAAK;AAAA,EAChB;AAAA,EAEA,MAAM,eAAe,OAAe;AAClC,UAAM,SAAS,GAAG,KAAK;AACvB,WAAO;AAAA,MACL,QAAQ,KAAK,KAAK,QAAQ,GAAG,CAAC,CAAC,CAAC,MAAM,EAAE,WAAW,MAAM,CAAC;AAAA,MAC1D,CAAC,CAAC,GAAG,CAAC,MAAM,CAAC,EAAE,MAAM,OAAO,MAAM,GAAG,CAAC;AAAA,IACxC;AAAA,EACF;AAAA,EACA,MAAM,gBAAgB,OAAe,KAAa,MAAkB;AAClE,SAAK,KAAK,IAAI,GAAG,KAAK,MAAM,GAAG,IAAI,IAAI;AAAA,EACzC;AAAA,EACA,MAAM,iBAAiB,OAAe,MAAgB;AACpD,eAAW,OAAO,MAAM;AACtB,WAAK,KAAK,OAAO,GAAG,KAAK,MAAM,GAAG,EAAE;AAAA,IACtC;AAAA,EACF;AAAA;AAAA,EAGA,MAAM,iCAAiC;AACrC,SAAK,KAAK,MAAM;AAAA,EAClB;AAAA;AAAA,EAGA,iBAAwC;AAGtC,UAAM,QAAQ,KAAK;AACnB,QAAI,CAAC,MAAM,IAAI,MAAM,GAAG;AACtB,YAAM,IAAI,QAAQ,EAAE,MAAM,SAAS,QAAQ,MAAM,CAAC,EAAE,CAAC;AAAA,IACvD;AAEA,UAAM,WAAW,mBAAmB,KAAK;AAEzC,aAAS,iBAAiB,UAAkB,KAA2B;AACrE,UAAI;AAEJ,iBAAW,cAAc,SAAS,OAAO,QAAQ,GAAG;AAClD,cAAM,aAAa,MAAM,UAAU;AACnC,YACE,aAAa,QACZ,YAAY,UAAa,aAAa,UACvC;AACA,oBAAU;AAAA,QACZ;AAAA,MACF;AACA,aAAO;AAAA,IACT;AAMA,mBAAe,UACb,IACA,MACA,iBAAiB,OACF;AACf,YAAM,aAAa,MAAM,IAAI,KAAK,QAAQ;AAE1C,UAAI,eAAe,QAAW;AAC5B,cAAM,IAAI,MAAM,kBAAkB,MAAM,KAAK,QAAQ,CAAC,EAAE;AAAA,MAC1D;AAEA,UACE,KAAK,SAAS,SAAS,YACvB,WAAW,SAAS,SAAS,QAC7B;AACA,cAAM,IAAI,MAAM,kCAAkC;AAAA,MACpD;AAEA,YAAM,uBAAuB,SAAS,IAAI,KAAK,UAAU,KAAK,SAAS;AACvE,UAAI,yBAAyB,IAAI;AAE/B,cAAMC,cAAa,MAAM,IAAI,KAAK,QAAQ;AAC1C,cAAM,qBACJA,gBAAe,UACf,gBAAgBA,aAAY,KAAK,SAAS;AAC5C,YAAI,yBAAyB,UAAa,oBAAoB;AAC5D,cAAI,gBAAgB;AAClB,6BAAiB,KAAK,UAAU,KAAK,SAAS;AAAA,UAChD,OAAO;AACL,kBAAM,IAAI,MAAM,OAAO,MAAM,KAAK,SAAS,CAAC,iBAAiB;AAAA,UAC/D;AAAA,QACF;AAGA,iBAAS,IAAI,KAAK,UAAU,KAAK,WAAW,EAAE;AAAA,MAChD;AAEA,YAAM,IAAI,IAAI,IAAI;AAAA,IACpB;AAUA,mBAAe,aAAa,IAAY,QAA4B;AAClE,YAAM,OAAO,MAAM,IAAI,EAAE;AACzB,UAAI,MAAM,aAAa,QAAW;AAChC;AAAA,MACF;AAGA,UAAI,SAAS,IAAI,KAAK,UAAU,MAAM;AACpC,cAAM,IAAI,MAAM,OAAO,MAAM,MAAM,CAAC,gBAAgB;AAEtD,eAAS,OAAO,KAAK,UAAU,KAAK,SAAS;AAC7C,YAAM,UAAU,EAAE,GAAG,MAAM,WAAW,OAAO;AAC7C,YAAM,IAAI,IAAI,OAAO;AACrB,eAAS,IAAI,KAAK,UAAU,QAAQ,EAAE;AAAA,IACxC;AAWA,mBAAe,gBACb,IACA,MACA,iBAAiB,OACF;AACf,YAAM,OAAO,MAAM,IAAI,EAAE;AACzB,UAAI,MAAM,SAAS,SAAS,QAAQ;AAElC;AAAA,MACF;AAEA,iBAAW,OAAO,OAAO,KAAK,IAAI,GAAG;AAEnC,cAAM,UAAU,SAAS,IAAI,IAAI,GAAG;AACpC,YAAI,YAAY,QAAW;AACzB,cAAI,gBAAgB;AAClB,wBAAY,OAAO;AAAA,UACrB,OAAO;AACL,kBAAM,IAAI,MAAM,mCAAmC,MAAM,GAAG,CAAC,EAAE;AAAA,UACjE;AAAA,QACF;AAAA,MACF;AAEA,YAAM,IAAI,IAAI,EAAE,GAAG,MAAM,MAAM,EAAE,GAAG,KAAK,MAAM,GAAG,KAAK,EAAE,CAAC;AAAA,IAC5D;AAKA,aAAS,YAAY,IAAkB;AACrC,YAAM,OAAO,MAAM,IAAI,EAAE;AACzB,UAAI,MAAM,aAAa,QAAW;AAChC;AAAA,MACF;AAGA,eAAS,OAAO,KAAK,UAAU,KAAK,SAAS;AAG7C,YAAM,QAAQ,CAAC,EAAE;AACjB,aAAO,MAAM,SAAS,GAAG;AACvB,cAAM,SAAS,MAAM,IAAI;AACzB,cAAM,KAAK,GAAG,SAAS,SAAS,MAAM,CAAC;AACvC,cAAM,OAAO,MAAM;AACnB,iBAAS,UAAU,MAAM;AAAA,MAC3B;AAAA,IACF;AAMA,aAAS,iBAAiB,IAAY,KAAmB;AAEvD,YAAM,OAAO,MAAM,IAAI,EAAE;AACzB,UAAI,SAAS,UAAa,gBAAgB,MAAM,GAAG,GAAG;AACpD,cAAM,EAAE,CAAC,GAAG,GAAG,GAAG,GAAG,KAAK,IAAI,KAAK;AACnC,cAAM,IAAI,IAAI,EAAE,GAAG,MAAM,MAAM,KAAK,CAAC;AAAA,MACvC;AAEA,YAAM,UAAU,SAAS,IAAI,IAAI,GAAG;AACpC,UAAI,YAAY,QAAW;AACzB,oBAAY,OAAO;AAAA,MACrB;AAAA,IACF;AAEA,UAAM,MAA6B;AAAA;AAAA;AAAA;AAAA;AAAA,MAKjC,UAAU,CAAC,OAAO,MAAM,IAAI,EAAE;AAAA;AAAA;AAAA;AAAA,MAK9B,YAAY,MAAM;AAAA;AAAA;AAAA;AAAA,MAKlB,UAAU,CAAC,OAAO,MAAM,IAAI,EAAE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAO9B,cAAc,CAAC,IAAI,QAAQ,SAAS,IAAI,IAAI,GAAG;AAAA;AAAA;AAAA;AAAA;AAAA,MAM/C,cAAc,CAAC,IAAI,QAAQ,SAAS,IAAI,IAAI,GAAG;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAO/C;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MASA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAOA;AAAA;AAAA;AAAA;AAAA;AAAA,MAMA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAWA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAUA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MASA,aAAa,YAAyC;AACpD,eAAO,qBAAqB,KAAK;AAAA,MACnC;AAAA,IACF;AACA,WAAO;AAAA,EACT;AACF;AAEO,SAAS,sBAAsB,SAGnB;AACjB,SAAO,IAAI,eAAe,OAAO;AACnC;;;AEjZA,SAAS,OAAOC,KAAkB,KAAyB;AACzD,SAAO,EAAE,QAAQ,YAAY,IAAAA,KAAI,IAAI;AACvC;AAEA,SAAS,OAAO,WAAoC;AAClD,SAAO,EAAE,QAAQ,WAAW,aAAa,UAAU,KAAK;AAC1D;AAEA,SAAS,sBAAsBA,KAA+B;AAC5D,UAAQA,IAAG,MAAM;AAAA,IACf,KAAK,OAAO;AACV,aAAO;AAAA,QACL,MAAM,SAAS;AAAA,QACf,UAAUA,IAAG;AAAA,QACb,WAAWA,IAAG;AAAA,MAChB;AAAA,IAEF,KAAK,OAAO;AACV,aAAO;AAAA,QACL,MAAM,SAAS;AAAA,QACf,UAAUA,IAAG;AAAA,QACb,WAAWA,IAAG;AAAA,MAChB;AAAA,IAEF,KAAK,OAAO;AACV,aAAO;AAAA,QACL,MAAM,SAAS;AAAA,QACf,UAAUA,IAAG;AAAA,QACb,WAAWA,IAAG;AAAA,QACd,MAAMA,IAAG;AAAA,MACX;AAAA,IAEF,KAAK,OAAO;AACV,aAAO;AAAA,QACL,MAAM,SAAS;AAAA,QACf,UAAUA,IAAG;AAAA,QACb,WAAWA,IAAG;AAAA,QACd,MAAMA,IAAG;AAAA,MACX;AAAA,IAGF;AACE,aAAO,YAAYA,KAAI,iBAAiB;AAAA,EAC5C;AACF;AAEO,IAAM,UAAN,MAAc;AAAA,EAMnB,YAAY,YAA4B;AAHxC;AAAA;AAAA,wBAAiB;AACjB,wBAAQ;AAGN,SAAK,aAAa;AAAA,EACpB;AAAA;AAAA;AAAA;AAAA,EAMA,IAAI,eAAsC;AACxC,QAAI,KAAK,kBAAkB,QAAW;AACpC,YAAM,IAAI,MAAM,4CAA4C;AAAA,IAC9D;AACA,WAAO,KAAK;AAAA,EACd;AAAA;AAAA,EAGA,iBAAgE;AAC9D,WAAO,KAAK,WAAW,eAAe;AAAA,EACxC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAM,KAAK,QAA+B;AACxC,SAAK,gBAAgB,MAAM,KAAK,WAAW,eAAe,MAAM;AAAA,EAClE;AAAA,EAEA,SAAe;AACb,SAAK,gBAAgB;AAAA,EACvB;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,SAAS,KAA+C;AAC5D,UAAM,UAA2B,CAAC;AAClC,eAAWA,OAAM,KAAK;AACpB,cAAQ,KAAK,MAAM,KAAK,QAAQA,GAAE,CAAC;AAAA,IACrC;AACA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,MAAc,QAAQA,KAA0C;AAC9D,YAAQA,IAAG,MAAM;AAAA,MACf,KAAK,OAAO;AAAA,MACZ,KAAK,OAAO;AAAA,MACZ,KAAK,OAAO;AAAA,MACZ,KAAK,OAAO;AACV,eAAO,MAAM,KAAK,cAAcA,GAAE;AAAA,MAEpC,KAAK,OAAO;AACV,eAAO,MAAM,KAAK,oBAAoBA,GAAE;AAAA,MAE1C,KAAK,OAAO;AACV,eAAO,MAAM,KAAK,oBAAoBA,GAAE;AAAA,MAE1C,KAAK,OAAO;AACV,eAAO,MAAM,KAAK,uBAAuBA,GAAE;AAAA,MAE7C,KAAK,OAAO;AACV,eAAO,MAAM,KAAK,kBAAkBA,GAAE;AAAA,MAGxC;AACE,YAAI,QAAQ,IAAI,aAAa,cAAc;AACzC,iBAAO,OAAOA,GAAE;AAAA,QAClB,OAAO;AACL,iBAAO,YAAYA,KAAI,YAAY;AAAA,QACrC;AAAA,IACJ;AAAA,EACF;AAAA,EAEA,MAAc,cAAcA,KAAgD;AAC1E,QAAI,KAAK,aAAa,SAASA,IAAG,EAAE,GAAG;AAErC,aAAO,OAAOA,GAAE;AAAA,IAClB;AAEA,UAAM,OAAO,sBAAsBA,GAAE;AAErC,UAAM,SAAS,KAAK,aAAa,SAAS,KAAK,QAAQ;AACvD,QAAI,WAAW,QAAW;AAExB,aAAO,OAAOA,GAAE;AAAA,IAClB;AAGA,YAAQ,OAAO,MAAM;AAAA,MACnB,KAAK,SAAS;AAKZ,YAAIA,IAAG,SAAS,OAAO,iBAAiB;AACtC,iBAAO,OAAOA,GAAE;AAAA,QAClB;AAAA,MAGF,KAAK,SAAS;AAEZ,cAAM,KAAK,aAAa,UAAUA,IAAG,IAAI,MAAM,IAAI;AACnD,eAAO,OAAOA,GAAE;AAAA,MAElB,KAAK,SAAS;AAGZ,eAAO,KAAK,sBAAsBA,KAAI,IAAI;AAAA,MAE5C,KAAK,SAAS;AAEZ,eAAO,OAAOA,GAAE;AAAA,MAGlB;AACE,eAAO,YAAY,QAAQ,qBAAqB;AAAA,IACpD;AAAA,EACF;AAAA,EAEA,MAAc,sBACZA,KACA,MACwB;AACxB,QAAI;AAIJ,UAAM,SAA2BA,IAAG,UAAU;AAG9C,QAAI,WAAW,UAAU;AACvB,YAAM,oBAAoB,MAAM,KAAK,eAAeA,IAAG,IAAI,IAAI;AAO/D,UAAI,sBAAsB,KAAK,WAAW;AACxC,QAAAA,MAAK,EAAE,GAAGA,KAAI,WAAW,kBAAkB;AAC3C,cAAM;AAAA,UACJ,MAAM,OAAO;AAAA,UACb,IAAIA,IAAG;AAAA,UACP,WAAW;AAAA,QACb;AACA,eAAO,OAAOA,KAAI,GAAG;AAAA,MACvB;AAGA,aAAO,OAAOA,GAAE;AAAA,IAClB,WAES,WAAW,OAAO;AAWzB,YAAM,YACJA,IAAG,cAAc,UACjBA,IAAG,cAAcA,IAAG,MACpB,KAAK,aAAa,SAASA,IAAG,SAAS,GAAG,aAAa,KAAK,WACxDA,IAAG,YACH;AAEN,UAAI,cAAc,QAAW;AAC3B,cAAM,KAAK,aAAa,YAAY,SAAS;AAAA,MAC/C;AAEA,YAAM,aAAa,KAAK,aAAa;AAAA,QACnC,KAAK;AAAA,QACL,KAAK;AAAA,MACP;AACA,UAAI,eAAe,UAAa,eAAe,WAAW;AAIxD,cAAM;AAAA,UACJ,MAAM,OAAO;AAAA,UACb,IAAI;AAAA,QACN;AAAA,MACF;AAEA,YAAM,KAAK,aAAa,UAAUA,IAAG,IAAI,MAAM,IAAI;AAEnD,aAAO,OAAOA,KAAI,GAAG;AAAA,IACvB,OAAO;AACL,aAAO,YAAY,QAAQ,gBAAgB;AAAA,IAC7C;AAAA,EACF;AAAA,EAEA,MAAc,uBACZA,KACwB;AACxB,UAAM,KAAK,aAAa,iBAAiBA,IAAG,IAAIA,IAAG,GAAG;AACtD,WAAO,OAAOA,GAAE;AAAA,EAClB;AAAA,EAEA,MAAc,oBACZA,KACwB;AACxB,UAAM,KAAK,aAAa,gBAAgBA,IAAG,IAAIA,IAAG,MAAM,IAAI;AAC5D,WAAO,OAAOA,GAAE;AAAA,EAClB;AAAA,EAEA,MAAc,kBACZA,KACwB;AACxB,UAAM,KAAK,aAAa,YAAYA,IAAG,EAAE;AACzC,WAAO,OAAOA,GAAE;AAAA,EAClB;AAAA,EAEA,MAAc,oBACZA,KACwB;AACxB,UAAM,cAAc,MAAM,KAAK,gBAAgBA,IAAG,IAAIA,IAAG,SAAS;AAClE,QAAI,gBAAgB,QAAW;AAE7B,aAAO,OAAOA,GAAE;AAAA,IAClB;AAMA,QAAI,gBAAgBA,IAAG,WAAW;AAChC,YAAM,aAAa,EAAE,GAAGA,KAAI,WAAW,YAAY;AACnD,YAAM,MAAa;AAAA,QACjB,MAAM,OAAO;AAAA,QACb,IAAIA,IAAG;AAAA,QACP,WAAW;AAAA,MACb;AACA,aAAO,OAAO,YAAY,GAAG;AAAA,IAC/B,OAAO;AACL,aAAO,OAAOA,GAAE;AAAA,IAClB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,MAAc,eACZ,IACA,MACiB;AAEjB,UAAM,MAAM,KAAK,qBAAqB,KAAK,UAAU,MAAM,KAAK,SAAS,CAAC;AAC1E,QAAI,QAAQ,KAAK,WAAW;AAC1B,aAAO,EAAE,GAAG,MAAM,WAAW,IAAI;AAAA,IACnC;AACA,UAAM,KAAK,aAAa,UAAU,IAAI,IAAI;AAC1C,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAcA,MAAc,gBACZ,IACA,WAC6B;AAC7B,UAAM,OAAO,KAAK,aAAa,SAAS,EAAE;AAC1C,QAAI,MAAM,aAAa,QAAW;AAChC;AAAA,IACF;AAEA,QAAI,KAAK,aAAa,SAAS,KAAK,QAAQ,GAAG,SAAS,SAAS,MAAM;AAErE;AAAA,IACF;AAEA,QAAI,KAAK,cAAc,WAAW;AAEhC,aAAO;AAAA,IACT;AAGA,UAAM,MAAM,KAAK,qBAAqB,KAAK,UAAU,MAAM,SAAS,CAAC;AACrE,QAAI,QAAQ,KAAK,WAAW;AAC1B,YAAM,KAAK,aAAa,aAAa,IAAI,GAAG;AAAA,IAC9C;AACA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASQ,qBAAqB,UAAkB,WAAqB;AAClE,QAAI,CAAC,KAAK,aAAa,aAAa,UAAU,SAAS,GAAG;AACxD,aAAO;AAAA,IACT;AAEA,UAAM,UAAU;AAChB,UAAM,UAAU,KAAK,aAAa,iBAAiB,UAAU,OAAO;AACpE,QAAI,YAAY,QAAW;AACzB,aAAO,aAAa,SAAS,OAAO;AAAA,IACtC,OAAO;AACL,aAAO,aAAa,OAAO;AAAA,IAC7B;AAAA,EACF;AACF;;;ACvaA,SAAS,cAAc;AACvB,SAAS,cAAc;AACvB,YAAY,OAAO;AAUnB,IAAM,oBAAoB;AAOnB,IAAM,aAAN,MAAiB;AAAA,EAYtB,YAAY,QAAwB;AAXpC,wBAAiB;AAEjB,wBAAiB,OAAa,IAAM,MAAI;AACxC;AAAA,wBAAiB,mBAAkB,oBAAI,IAAyB;AAChE,wBAAiB,oBAAmB,oBAAI,IAAwB;AAEhE;AAAA,wBAAiB,YAAW,IAAI;AAAA,MAC9B,MAAM,oBAAI,IAAI;AAAA,IAChB;AACA,wBAAiB,oBAAgD,oBAAI,IAAI;AAkMzE;AAAA;AAAA;AAAA,wBAAQ,8BAA6B,OACnC,YACA,KACA,UACkB;AAElB,YAAM,oBAAoB;AAE1B,YAAM,UAAU,OAAO,OAAO,UAAU;AAExC,YAAM,aAAa,QAAQ,OAAO,CAAC,KAAK,WAAW;AACjD,eAAO,MAAM,OAAO;AAAA,MACtB,GAAG,CAAC;AACJ,UAAI,QAAQ,SAAS,GAAG;AACtB,cAAM,UAAU,OAAO,KAAK,UAAU;AAEtC,aAAK,SAAS,IAAI,OAAO,IAAI,IAAI,OAAO,CAAC;AAEzC,cAAM,eAAiB,eAAa,OAAO;AAE3C,QAAE,cAAY,KAAK,YAAY;AAG/B,cAAM,yBAA2B,sBAAoB,GAAG;AAExD,YACE,uBAAuB,SACvB,cAAc,IAAI,oBAClB;AACA,gBAAM,SAAS,OAAO;AACtB,gBAAM,KAAK,OAAO;AAAA,YAChB;AAAA,YACA;AAAA,YACA;AAAA,UACF;AAEA,gBAAM,KAAK,OAAO,iBAAiB,OAAO,OAAO;AACjD,eAAK,SAAS,IAAI,OAAO,oBAAI,IAAI,CAAC,MAAM,CAAC,CAAC;AAAA,QAC5C;AAAA,MACF;AAAA,IACF;AAEA,wBAAQ,+BAA8B,OACpC,KACA,UACmB;AACnB,YAAM,aAAa,OAAO;AAAA,QACxB,MAAM,KAAK,OAAO,eAAe,KAAK;AAAA,MACxC;AACA,YAAM,KAAK,2BAA2B,YAAY,KAAK,KAAK;AAE5D,WAAK,gBAAgB,IAAI,OAAO;AAAA,QAC9B,YAAY,OAAO;AAAA,QACnB,YAAc,oBAAkB,GAAG;AAAA,MACrC,CAAC;AACD,UAAI,KAAK,QAAQ,CAAC,GAAG,CAAC;AAEtB,aAAO;AAAA,IACT;AAzPE,SAAK,SAAS;AACd,SAAK,IAAI,GAAG,WAAW,CAAC,EAAE,QAAQ,MAAM;AACtC,cAAQ,QAAQ,CAAC,WAAkB;AACjC,eAAO,QAAQ;AAAA,MACjB,CAAC;AAAA,IACH,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA,EAMA,MAAa,QAAQ,OAA+B;AAClD,UAAM,MAAM,MAAM,KAAK,8BAA8B,KAAK;AAC1D,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAa,cACX,QACA,cAAsB,IACtB,MACA,OAAgB,OACQ;AACxB,UAAM,SAAS,MAAM,KAAK;AAAA,MACxB;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AACA,QAAI,CAAC,OAAQ,QAAO;AACpB,WAAO,OAAO,eAAe,MAAM;AAAA,EACrC;AAAA,EAEA,MAAa,oBACX,QACA,cAAsB,IACtB,MACA,OAAgB,OACY;AAC5B,UAAM,MAAM,SAAS,SAAY,MAAM,KAAK,WAAW,IAAI,IAAI,KAAK;AACpE,QAAI,CAAC,KAAK;AACR,aAAO;AAAA,IACT;AACA,QAAI;AACJ,QAAI;AAEF,4BACE,YAAY,SAAS,IAAI,OAAO,aAAa,WAAW,IAAI;AAAA,IAChE,SAAS,GAAG;AACV,aAAO;AAAA,QACL;AAAA,MACF;AAAA,IACF;AACA,QAAI,MAAM;AACR,aAAS,wBAAsB,KAAK,mBAAmB;AAAA,IACzD;AACA,WAAS,sBAAoB,KAAK,mBAAmB;AAAA,EACvD;AAAA,EAEA,MAAa,gBAAgB,MAAqC;AAChE,UAAM,MAAM,SAAS,SAAY,MAAM,KAAK,WAAW,IAAI,IAAI,KAAK;AACpE,QAAI,CAAC,KAAK;AACR,aAAO;AAAA,IACT;AACA,WAAO,OAAO,eAAiB,oBAAkB,GAAG,CAAC;AAAA,EACvD;AAAA,EAEA,MAAa,gBAAgB,SAGF;AACzB,UAAM,MACJ,QAAQ,SAAS,SACb,MAAM,KAAK,WAAW,QAAQ,IAAI,IAClC,KAAK;AACX,QAAI,CAAC,KAAK;AACR,aAAO;AAAA,IACT;AACA,UAAMC,YAAW,KAAK,sBAAsB,GAAG;AAC/C,WAAO,KAAK,sBAAsBA,WAAU,EAAE,MAAM,QAAQ,KAAK,CAAC;AAAA,EACpE;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAa,cACX,QACA,QACA,MACA,MACuD;AACvD,UAAM,MAAM,SAAS,SAAY,MAAM,KAAK,WAAW,IAAI,IAAI,KAAK;AACpE,QAAI,CAAC,KAAK;AACR,YAAM,IAAI,MAAM,kBAAkB,IAAI,YAAY;AAAA,IACpD;AAEA,QAAI;AAEF,YAAM,iBAAiB,KAAK,sBAAsB,GAAG;AACrD,YAAM,aACJ,OAAO,WAAW,WAAW,OAAO,aAAa,MAAM,IAAI;AAC7D,YAAMC,eAAc,OAAS,kBAAkB;AAC/C,MAAAA,aAAY,KAAK,YAAY,QAAQ;AAErC,YAAM,gBAAgB,KAAK,iBAAiB,GAAG;AAE/C,YAAM,UAAU,CAAG,iBAAe,gBAAgB,aAAa;AAC/D,UAAI,SAAS;AACX,cAAM,KAAK,iBAAiB,GAAG;AAAA,MACjC;AAEA,aAAO;AAAA,QACL,WAAW;AAAA,QACX,cAAc,MAAM,KAAK,sBAAsB,eAAe,EAAE,KAAK,CAAC;AAAA,MACxE;AAAA,IACF,SAAS,GAAG;AAEV,aAAO,KAAK,4BAA4B,OAAO,CAAC,CAAC,EAAE;AACnD,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA,EAEO,8BAA8B,OAA+B;AAClE,QAAI,UAAU,KAAK,iBAAiB,IAAI,KAAK;AAC7C,QAAI,MAAM,UAAU,eAAe,KAAK,MAAM,KAAK,kBAAkB,KAAK;AAC1E,QAAI,CAAC,KAAK;AAER,YAAM,IAAM,MAAI;AAAA,IAClB;AACA,QAAI,YAAY,QAAW;AACzB,gBAAU,KAAK,4BAA4B,KAAK,KAAK;AACrD,WAAK,iBAAiB,IAAI,OAAO,OAAO;AAAA,IAC1C;AACA,WAAO;AAAA,EACT;AAAA,EAEA,MAAa,KAAK,SAAgC;AAChD,UAAM,KAAK,8BAA8B,YAAY;AAAA,EACvD;AAAA;AAAA;AAAA;AAAA,EAKO,SAAe;AAAA,EAWtB;AAAA;AAAA;AAAA;AAAA;AAAA,EAOQ,sBAAsB,KAAwB;AACpD,UAAM,QACJ,IAAI,SAAS,KAAK,IAAI,OAAO,eAAgB,IAAI;AACnD,UAAMD,YAAW,KAAK,iBAAiB,IAAI,KAAK;AAChD,QAAIA,WAAU;AACZ,aAAOA;AAAA,IACT;AACA,WAAO,KAAK,iBAAiB,GAAG;AAAA,EAClC;AAAA;AAAA,EAGQ,iBAAiB,KAAwB;AAC/C,UAAM,QACJ,IAAI,SAAS,KAAK,IAAI,OAAO,eAAgB,IAAI;AACnD,UAAMA,YAAa,WAAS,GAAG;AAC/B,SAAK,iBAAiB,IAAI,OAAOA,SAAQ;AACzC,WAAOA;AAAA,EACT;AAAA,EAgEQ,kBAAkB,MAA0B;AAClD,eAAW,UAAU,KAAK,IAAI,WAAW,GAAG;AAC1C,UAAI,OAAO,SAAS,MAAM;AACxB,eAAO;AAAA,MACT;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAAA,EAEA,MAAc,sBACZA,WACA,EAAE,KAAK,GACU;AACjB,UAAM,kBAAkB,OAClB,mBAAiBA,SAAQ,IACzB,iBAAeA,SAAQ;AAC7B,WAAO,OAAO;AAAA,MACZ,IAAI;AAAA,QACF,MAAM,OAAO,OAAO,OAAO,WAAW,IAAI,WAAW,eAAe,CAAC;AAAA,MACvE;AAAA,IACF;AAAA,EACF;AAAA;AAAA,EAGA,MAAc,WAAW,MAAmC;AAC1D,UAAM,SAAS,KAAK,kBAAkB,IAAI;AAC1C,QAAI,CAAC,QAAQ;AACX,aAAO;AAAA,IACT;AACA,UAAM,KAAK,8BAA8B,IAAI;AAC7C,WAAO;AAAA,EACT;AAAA;AAAA,EAGA,MAAc,iBAAiB,KAA2B;AACxD,UAAM,QACJ,IAAI,SAAS,KAAK,IAAI,OAAO,eAAgB,IAAI;AACnD,UAAM,gBAAgB,KAAK,gBAAgB,IAAI,KAAK;AAEpD,UAAM,wBAA0B;AAAA,MAC9B;AAAA,MACA,eAAe;AAAA,IACjB;AAGA,UAAM,aAAa,eAAe,cAAc,OAAO;AACvD,QAAI,sBAAsB,SAAS,mBAAmB;AAEpD,YAAM,SAAS,OAAO;AACtB,YAAM,KAAK,OAAO;AAAA,QAChB;AAAA,QACA;AAAA,QACE,sBAAoB,GAAG;AAAA,MAC3B;AAEA,YAAM,KAAK,OAAO;AAAA,QAChB;AAAA,QACA,MAAM,KAAK,KAAK,SAAS,YAAY,KAAK,CAAC;AAAA,MAC7C;AAEA,WAAK,SAAS,IAAI,OAAO,oBAAI,IAAI,CAAC,MAAM,CAAC,CAAC;AAE1C,WAAK,gBAAgB,IAAI,OAAO;AAAA,QAC9B,YAAY,OAAO;AAAA;AAAA,QACnB,YAAc,oBAAkB,GAAG;AAAA,MACrC,CAAC;AAAA,IACH,OAAO;AAEL,YAAM,KAAK,OAAO;AAAA,QAChB;AAAA,QACA;AAAA,QACA;AAAA,MACF;AACA,YAAM,OAAO,CAAC,UAAU;AAExB,YAAM,cAAc,KAAK,SAAS,YAAY,KAAK;AACnD,iBAAW,OAAO,MAAM;AACtB,oBAAY,IAAI,GAAG;AAAA,MACrB;AAAA,IACF;AAAA,EACF;AACF;;;ACpVA,eAAsB,SACpB,SAC0C;AAC1C,MAAI;AACF,UAAM,OAAO,OAAO,OAAO,YAAY,aAAa,QAAQ,IAAI;AAChE,WAAO,CAAC,MAAM,MAAS;AAAA,EACzB,SAASE,QAAO;AACd,WAAO,CAAC,QAAWA,MAAU;AAAA,EAC/B;AACF;;;ACnDA;AAmCO,IAAM,YAAN,cAAkC,IAAU;AAAA,EAMjD,YACE,OAEA;AACA,UAAM;AAPR;AAAA;AAAA;AACA;AAOE,uBAAK,SAAU;AACf,uBAAK,UAAW,oBAAI,IAAI;AAAA,EAC1B;AAAA,EAEA,iBAAiB,SAA4B;AAC3C,WAAO,mBAAK,UAAS,IAAI,OAAO;AAAA,EAClC;AAAA,EAEA,OAAO,SAA4B;AACjC,UAAM,MAAM,mBAAK,UAAS,IAAI,OAAO;AACrC,WAAO,QAAQ,SAAY,KAAK,IAAI,GAAG,IAAI;AAAA,EAC7C;AAAA,EAEA,IAAI,KAAQ,OAAgB;AAC1B,UAAM,UAAU,mBAAK,SAAL,WAAa;AAC7B,UAAM,aAAa,mBAAK,UAAS,IAAI,OAAO;AAC5C,QAAI,eAAe,UAAa,eAAe,KAAK;AAClD,YAAM,IAAI,MAAM,cAAc,OAAO,OAAO,CAAC,iBAAiB;AAAA,IAChE;AACA,uBAAK,UAAS,IAAI,SAAS,GAAG;AAC9B,WAAO,MAAM,IAAI,KAAK,KAAK;AAAA,EAC7B;AAAA,EAEA,OAAO,YAAwB;AAC7B,UAAM,QAAQ,KAAK,IAAI,UAAU;AACjC,QAAI,UAAU,QAAW;AACvB,YAAM,aAAa,mBAAK,SAAL,WAAa;AAChC,yBAAK,UAAS,OAAO,UAAU;AAAA,IACjC;AACA,WAAO,MAAM,OAAO,UAAU;AAAA,EAChC;AACF;AAvCE;AACA;;;ACdK,SAAS,kBAAkB,QAAkC;AAClE,QAAM,cAAc,OAAO,OAAO,CAAC,KAAK,QAAQ,MAAM,IAAI,QAAQ,CAAC;AACnE,QAAM,SAAS,IAAI,WAAW,WAAW;AACzC,MAAI,SAAS;AACb,aAAW,OAAO,QAAQ;AACxB,WAAO,IAAI,KAAK,MAAM;AACtB,cAAU,IAAI;AAAA,EAChB;AACA,SAAO;AACT;AAEO,SAAS,iBACd,OACA,OACA,QACA,OACA,YACkC;AAClC,SAAO;AAAA,IACL,MAAM,cAAc;AAAA,IACpB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,MAAM,cAAc,CAAC;AAAA,EACvB;AACF;;;ARQA,IAAM,kBAAkBC,OAAM,gBAAgB;AAE9C,IAAM,2BAA2B,KAAK;AAAA,EACpC,GAAG,OAAO,OAAO,eAAe,EAAE;AAAA,IAChC,CAAC,MAAmB,OAAO,MAAM;AAAA,EACnC;AACF;AAGA,IAAM,wBAAwB,OAAO;AAAA,EACnC,OAAO,QAAQ,aAAa,EAAE,IAAI,CAAC,CAAC,GAAG,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC;AACtD;AAEA,IAAM,aAAa,IAAI,OAAO;AAAA;AAE9B,CAAC;AAsBD,SAAS,qBAAqB;AAC5B,QAAM,WAA4B,CAAC;AACnC,SAAO;AAAA,IACL,OAAO,CAAC,MAAqB,KAAK,SAAS,KAAK,CAAC;AAAA,IACjD,SAAS,MAAM,QAAQ,WAAW,QAAQ;AAAA,EAC5C;AACF;AAEA,SAAS,UACP,MACwB;AACxB,SAAO,KAAK,UAAU,IAAI;AAC5B;AAEO,SAAS,aAAa,MAAyB;AACpD,SAAO,EAAE,MAAM,OAAO,aAAa,IAAI,OAAO,KAAK;AACrD;AAEA,SAAS,UAAUC,KAAsB;AAMvC,QAAM,EAAE,MAAM,GAAG,GAAG,KAAK,IAAIA;AAC7B,SAAO;AACT;AA1HA;AAiLO,IAAM,iBAAN,MAAgD;AAAA;AAAA,EAsBrD,YACE,QACA,QACA,OACA;AAtBF;AAAA;AAAA,wBAAgB;AAChB;AAAA,wBAAgB;AAChB;AAAA,wBAAgB;AAGhB;AAAA,wBAAgB;AAChB,wBAAgB;AAChB;AAAA,wBAAgB;AAChB;AAAA,wBAAgB;AAEhB;AAAA,uBAAS;AACT,uBAAS;AACT;AAGA;AAAA;AAQE,SAAK,UAAU,OAAO;AACtB,SAAK,QAAQ,OAAO;AACpB,SAAK,OAAO,OAAO;AACnB,SAAK,SAAS,OAAO;AACrB,SAAK,OAAO,OAAO,QAAS;AAC5B,SAAK,aAAa,OAAO;AACzB,uBAAK,UAAW;AAChB,uBAAK,SAAU;AAEf,UAAM,MAAM,oBAAI,KAAK;AACrB,SAAK,YAAY;AACjB,uBAAK,gBAAiB;AACtB,uBAAK,uCAAwC;AAAA,EAC/C;AAAA,EAEA,IAAI,eAAqB;AACvB,UAAM,WAAW,mBAAK,UAAS,uBAAuB;AACtD,QAAI,YAAY,WAAW,mBAAK,iBAAgB;AAC9C,aAAO;AAAA,IACT,OAAO;AACL,aAAO,mBAAK;AAAA,IACd;AAAA,EACF;AAAA,EAEA,IAAI,sCAA+C;AACjD,WAAO,mBAAK;AAAA,EACd;AAAA,EAEA,WAAW,MAAM,oBAAI,KAAK,GAAS;AACjC,QAAI,MAAM,mBAAK,iBAAgB;AAC7B,yBAAK,gBAAiB;AAAA,IACxB;AAAA,EACF;AAAA,EAEA,yCAA+C;AAC7C,uBAAK,uCAAwC;AAAA,EAC/C;AAAA,EAEA,WAAmB;AACjB,SAAK,WAAW;AAEhB,UAAM,OAAO,mBAAK,UAAS,KAAK,MAAM;AACtC,QAAI,mBAAK,UAAS;AAChB,UAAI,OAAO,GAAG;AACZ,gBAAQ;AAAA,UACN,kCAAkC,KAAK,KAAK;AAAA,QAC9C;AAAA,MACF,WAAW,SAAS,GAAG;AACrB,gBAAQ;AAAA,UACN,kCAAkC,KAAK,KAAK;AAAA,QAC9C;AAAA,MACF,OAAO;AAEL,gBAAQ,IAAI,iBAAiB,KAAK,KAAK,UAAU;AAAA,MACnD;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAAA,EAEA,KAAK,WAAqE;AACxE,UAAM,OACJ,OAAO,cAAc,WAAW,YAAY,UAAU,SAAS;AACjE,UAAM,OAAO,mBAAK,UAAS,KAAK,IAAI;AACpC,QAAI,mBAAK,UAAS;AAChB,UAAI,OAAO,GAAG;AACZ,gBAAQ;AAAA,UACN,mCAAmC,KAAK,KAAK;AAAA,QAC/C;AAAA,MACF,WAAW,SAAS,GAAG;AACrB,gBAAQ;AAAA,UACN,mCAAmC,KAAK,KAAK;AAAA,QAC/C;AAAA,MACF;AAEA,YAAM,OAAO,KAAK,MAAM,IAAI;AAC5B,iBAAW,OAAO,MAAM,QAAQ,IAAI,IAAI,OAAO,CAAC,IAAI,GAAG;AACrD,gBAAQ;AAAA,UACN,iBAAiB,KAAK,KAAK,MACzB,sBAAsB,IAAI,IAAI,KAAK,IAAI,IACzC,KAAK,KAAK,UAAU,GAAG,CAAC;AAAA,QAC1B;AAAA,MACF;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,YAAY,MAAc,QAAuB;AAC/C,uBAAK,UAAS,MAAM,MAAM,MAAM;AAAA,EAClC;AACF;AA5GW;AACA;AACT;AAGA;AAyGK,IAAM,iBAAN,cAA6B,eAA6B;AAAA;AAAA,EAE/D,YACE,QACA,QACA,OACA;AACA,UAAM,QAAQ,QAAQ,KAAK;AAAA,EAC7B;AACF;AAtTA,IAAAC,UAAA;AA6aO,IAAM,OAAN,MAAyD;AAAA,EAmD9D,YAAY,MAAU,SAAkC;AAhDxD;AAAA,wBAAO;AACP,wBAAgB;AAChB,wBAAO;AAEP,wBAAQ,cAAmC;AAC3C,wBAAQ,SAA6B;AACrC,wBAAQ,UAAS;AAEjB,wBAAiB,YAAW,IAAI,UAI9B,CAAC,MAAM,EAAE,KAAK;AAEhB,wBAAiB;AA+BjB,uBAASA;AACT,uBAAS;AAGP,UAAM,SAAS,SAAS,WAAW,sBAAsB;AACzD,SAAK,OAAO;AACZ,SAAK,SAAS;AACd,SAAK,SAAS,SAAS,UAAU;AACjC,uBAAK,kBAAmB,SAAS,kBAAkB;AACnD,SAAK,QAAQ;AAAA,MACX,oBACE,SAAS,OAAO,uBACf,MAAM;AACL,eAAO;AAAA,UACL,SAAS;AAAA,QACX;AAAA,MACF;AAAA;AAAA,MAGF,gBAAgB,SAAS,OAAO;AAAA,MAChC,eAAe,SAAS,OAAO;AAAA,MAE/B,kBAAkB,SAAS,OAAO;AAAA,MAClC,iBAAiB,SAAS,OAAO;AAAA,MAEjC,mBAAmB,SAAS,OAAO;AAAA,MACnC,iBAAiB,SAAS,OAAO;AAAA,MAEjC,+BACE,SAAS,OAAO;AAAA,MAClB,4BAA4B,SAAS,OAAO;AAAA,IAC9C;AACA,uBAAKA,UAAU,SAAS,sBAAsB;AAAA,EAChD;AAAA,EAEA,IAAW,eAA6B;AACtC,QAAI,KAAK,eAAe,MAAM;AAC5B,aAAO;AAAA,IACT,WAAW,KAAK,UAAU,MAAM;AAC9B,aAAO;AAAA,IACT,OAAO;AACL,aAAO;AAAA,IACT;AAAA,EACF;AAAA,EAEA,IAAY,cAA0B;AAAE,WAAO,KAAK,SAAS;AAAA,EAAM;AAAA;AAAA,EAEnE,IAAY,UAA0B;AAAE,WAAO,KAAK,KAAK;AAAA,EAAS;AAAA;AAAA,EAClE,IAAY,aAA0B;AAAE,WAAO,KAAK,KAAK;AAAA,EAAY;AAAA;AAAA,EAErE,IAAW,QAA0B;AAAE,WAAO,KAAK,KAAK;AAAA,EAAO;AAAA;AAAA,EAE/D,IAAY,OAA0B;AAAE,WAAO,KAAK,SAAS,MAAM,oCAAoC;AAAA,EAAG;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAW1G,MAAa,KAAK,KAAwB;AACxC,QAAI,KAAK,eAAe,MAAM;AAC5B,WAAK,QAAQ;AACb,WAAK,aAAa,KAAK,MAAM,GAAG,EAAE,MAAM,CAAC,MAAM;AAC7C,aAAK,QAAQ;AACb,aAAK,aAAa;AAClB,cAAM;AAAA,MACR,CAAC;AAAA,IACH;AACA,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOO,OAAO,KAAe;AAC3B,SAAK,MAAM,mBAAmB,GAAG;AACjC,QAAI,KAAK,OAAO;AACd,WAAK,QAAQ,OAAO;AACpB,WAAK,WAAW,OAAO;AAAA,IACzB;AAEA,SAAK,aAAa;AAElB,SAAK,MAAM,kBAAkB,GAAG;AAAA,EAClC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAaA,MAAa,aACX,SACyB;AACzB,UAAM,SAAS,SAAS,SAAS,KAAK,aAAa;AACnD,UAAM,aAAaC,QAAO;AAC1B,UAAM,OAAO,SAAS;AACtB,UAAM,SAAyB;AAAA,MAC7B,SAAS,SAAS,WAAW;AAAA,MAC7B,OAAO,MAAM;AAAA,MACb;AAAA,MACA,MAAM,SAAS;AAAA,MACf,YAAY,SAAS;AAAA,MACrB,MAAM,SAAS,KACX,EAAE,IAAI,QAAQ,IAAI,KAAK,IACvB,EAAE,aAAa,SAAS,eAAeA,QAAO,GAAG,KAAK;AAAA,MAC1D,QAAQ,SAAS,UAAU,CAAC,YAAY;AAAA,IAC1C;AACA,QAAI,mBAAKD,WAAS;AAChB,cAAQ,IAAI,uBAAuB,KAAK,UAAU,MAAM,CAAC,EAAE;AAAA,IAC7D;AACA,WAAO;AAAA,EACT;AAAA,EAEA,MAAa,oCAEX;AACA,UAAM,SAAU,MAAM,KAAK,aAAa;AACxC,UAAM,qBAA+C,CAAC;AACtD,UAAM,OAAO;AAAA,MACX,MAAM,CAAC,SAAS;AACd,YAAI,OAAO,SAAS,UAAU;AAC5B,6BAAmB,KAAK,IAA8B;AAAA,QACxD;AACA,eAAO;AAAA,MACT;AAAA,MACA,OAAO,MAAM;AAAA,MAAC;AAAA;AAAA,IAChB;AACA,UAAM,UAAU,IAAI,eAAe,QAAQ,MAAM,KAAK;AACtD,WAAO,CAAC,SAAS,kBAAkB;AAAA,EACrC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYO,gBACL,UAKM;AACN,QAAI,KAAK,SAAS,OAAO,GAAG;AAC1B,YAAM,IAAI,MAAM,uDAAuD;AAAA,IACzE;AAEA,eAAW,EAAE,QAAQ,QAAQ,aAAa,KAAK,UAAU;AACvD,YAAM,aAAa,IAAI,eAAe,QAAQ,QAAQ,mBAAKA,SAAO;AAClE,WAAK,SAAS,IAAI,OAAO,YAAY,UAAU;AAC/C,iBAAW,WAAW,YAAY;AAAA,IACpC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUO,oBACL,QACA,QACA,KACA,QAA0C,MAAM;AAC9C,UAAM,IAAI;AAAA,MACR;AAAA,IAEF;AAAA,EACF,GACM;AACN,QAAI;AACJ,YACG,WAAW,KAAK,SAAS,iBAAiB,OAAO,KAAK,OAAO,QAC9D;AAOA,WAAK;AAAA,QACH;AAAA,QACA,oBAAU;AAAA,QACV;AAAA,QACA;AAAA,QACA;AAAA,MACF;AAEA,WAAK,OAAO;AAAA,QACV,8BAA8B,OAAO,KAAK;AAAA,MAC5C;AAAA,IACF;AAEA,UAAM,aAAa,IAAI,eAAe,QAAQ,QAAQ,mBAAKA,SAAO;AAClE,SAAK,SAAS,IAAI,OAAO,YAAY,UAAU;AAE/C,UAAM,QAA8D,CAAC;AACrE,eAAW,WAAW,KAAK,cAAc,OAAO,UAAU,GAAG;AAC3D,YAAM,QAAQ,KAAK,IAAI;AAAA,QACrB,IAAI,QAAQ,KAAK;AAAA,QACjB,MAAM,QAAQ,KAAK;AAAA,QACnB,QAAQ,QAAQ;AAAA,MAClB;AAAA,IACF;AAEA,eAAW;AAAA,MACT;AAAA,QACE,WAAW;AAAA,QACX,OAAO;AAAA;AAAA,QACP,WAAW;AAAA,QACX;AAAA,QACA,OAAO;AAAA,MACT;AAAA,IACF;AAEA,SAAK;AAAA,MACH,OAAO;AAAA,MACP;AAAA,QACE,MAAM,cAAc;AAAA,QACpB,OAAO,WAAW;AAAA,QAClB,IAAI,WAAW,KAAK;AAAA,QACpB,MAAM,WAAW,KAAK;AAAA,QACtB,QAAQ,WAAW;AAAA,MACrB;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAGA,UAAM,KAAK,KAAK,MAAM,oBAAoB,YAAY,GAAG;AACzD,QAAI,GAAI,OAAM,EAAE;AAAA,EAClB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASO,kBACL,KACA,MACA,QACA,KACA,QAA0C,MAAM;AAC9C,UAAM,IAAI;AAAA,MACR;AAAA,IAEF;AAAA,EACF,GACM;AACN,UAAM,WAAW,KAAK;AAEtB,UAAM,UAAU,SAAS,IAAI,GAAG;AAChC,QAAI,YAAY,OAAW;AAE3B,YAAQ,YAAY,MAAM,MAAM;AAEhC,UAAM,UAAU,SAAS,OAAO,GAAG;AACnC,QAAI,SAAS;AACX,iBAAW,SAAS,KAAK,cAAc,GAAG,GAAG;AAC3C,cAAM,KAAK,EAAE,MAAM,cAAc,WAAW,OAAO,QAAQ,MAAM,CAAC;AAAA,MACpE;AAGA,YAAM,KAAK,KAAK,MAAM,kBAAkB,SAAS,GAAG;AACpD,UAAI,GAAI,OAAM,EAAE;AAAA,IAClB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKO,aACL,WACA,MACA,QACA,KACA,QAA0C,MAAM;AAC9C,UAAM,IAAI;AAAA,MACR;AAAA,IAEF;AAAA,EACF,GACQ;AACR,QAAI,QAAQ;AACZ,eAAW,CAAC,KAAK,OAAO,KAAK,KAAK,UAAU;AAC1C,UAAI,UAAU,OAAO,GAAG;AACtB;AACA,aAAK,kBAAkB,KAAK,MAAM,QAAQ,KAAK,KAAK;AAAA,MACtD;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAa,WACX,KACA,MACA,KACA,QAA0C,MAAM;AAC9C,UAAM,IAAI;AAAA,MACR;AAAA,IAEF;AAAA,EACF,GACe;AACf,UAAM,OACJ,OAAO,SAAS,WAAW,OAAO,MAAM,4BAA4B;AAEtE,QAAI,SAAS,QAAQ;AACnB,YAAM,KAAK,WAAW,KAAK,GAAG;AAAA,IAChC,OAAO;AACL,YAAM,OAAO,aAAa,IAAI;AAC9B,YAAM,WAAW,gBAAgB,OAAO,IAAI;AAE5C,UAAI,CAAC,SAAS,IAAI;AAChB,cAAM,SACJ,QAAQ,IAAI,aAAa,eACrB,aAAa,SAAS,KAAK,IAC3B;AAEN,aAAK;AAAA,UACH;AAAA,UACA,oBAAU;AAAA,UACV;AAAA,UACA;AAAA,UACA;AAAA,QACF;AACA;AAAA,MACF;AAMA,UAAI,KAAK,SAAS,KAAQ;AAAA,MAK1B,WAAW,KAAK,SAAS,KAAO;AAAA,MAGhC;AAEA,WAAK;AAKL,UAAI;AACF,cAAM,KAAK,iBAAiB,KAAK,SAAS,OAAO,GAAG;AAAA,MACtD,UAAE;AACA,aAAK;AAAA,MACP;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAaA,MAAa,iBACX,KACA,UACA,KACe;AACf,UAAM,KAAK,KAAK,GAAG;AACnB,UAAM,EAAE,OAAO,QAAQ,IAAI,mBAAmB;AAC9C,UAAM,KAAK,MAAM;AAAA,MAAa,MAC5B,KAAK,sCAAsC,KAAK,UAAU,KAAK,KAAK;AAAA,IACtE;AAIA,UAAM,QAAQ;AAAA,EAChB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAqBA,MAAa,mCACX,SACA,UACA,KACe;AACf,UAAM,KAAK,KAAK,GAAG;AACnB,UAAM,EAAE,OAAO,QAAQ,IAAI,mBAAmB;AAC9C,UAAM,KAAK,MAAM;AAAA,MAAa,MAC5B,KAAK;AAAA,QACH;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF;AAAA,IACF;AAIA,UAAM,QAAQ;AAAA,EAChB;AAAA,EAEO,WACL,YACoC;AACpC,WAAO,KAAK,SAAS,IAAI,UAAU;AAAA,EACrC;AAAA,EAEO,eAAyC;AAC9C,WAAO,MAAM,KAAK,KAAK,SAAS,OAAO,CAAC;AAAA,EAC1C;AAAA;AAAA;AAAA;AAAA;AAAA,EAMO,aACL,QACA,WACA,KACA,QAA0C,MAAM;AAC9C,UAAM,IAAI;AAAA,MACR;AAAA,IAEF;AAAA,EACF,GACM;AACN,UAAM,MAAM,UAAU,SAAS;AAC/B,eAAW,CAAC,KAAK,OAAO,KAAK,KAAK,oBAAoB,MAAM,GAAG;AAC7D,YAAM,UAAU,QAAQ,KAAK,GAAG;AAChC,UAAI,YAAY,GAAG;AAIjB,aAAK;AAAA,UACH;AAAA,UACA,oBAAU;AAAA,UACV;AAAA,UACA;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKO,UACL,WACA,KACA,QAA0C,MAAM;AAC9C,UAAM,IAAI;AAAA,MACR;AAAA,IAEF;AAAA,EACF,GACM;AACN,UAAM,MAAM,UAAU,SAAS;AAC/B,eAAW,CAAC,KAAK,OAAO,KAAK,KAAK,UAAU;AAC1C,YAAM,UAAU,QAAQ,KAAK,GAAG;AAChC,UAAI,YAAY,GAAG;AAIjB,aAAK;AAAA,UACH;AAAA,UACA,oBAAU;AAAA,UACV;AAAA,UACA;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAMA,MAAc,eAAiC;AAC7C,UAAM,UAAU,IAAI,QAAQ,KAAK,MAAM;AACvC,UAAM,QAAQ,KAAK,KAAK,MAAM;AAC9B,WAAO;AAAA,EACT;AAAA,EAEA,MAAc,kBAAuC;AACnD,UAAM,aAAa,IAAI,WAAW,KAAK,MAAM;AAC7C,UAAM,WAAW,KAAK,KAAK,MAAM;AACjC,WAAO;AAAA,EACT;AAAA;AAAA,EAGA,MAAc,MAAM,KAAwB;AAC1C,UAAM,KAAK,MAAM,iBAAiB,GAAG;AAGrC,UAAM,UAAU,MAAM,KAAK,aAAa;AACxC,UAAM,aAAa,MAAM,KAAK,gBAAgB;AAE9C,SAAK,QAAQ;AAAA,MACX,OAAO,IAAI,MAAM;AAAA,MACjB;AAAA,MACA;AAAA,IACF;AAEA,UAAM,KAAK,MAAM,gBAAgB,GAAG;AAAA,EACtC;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,eAAiC;AAC7C,WAAQ,MAAM,KAAK,OAAO,WAAW;AAAA,EACvC;AAAA;AAAA;AAAA;AAAA,EAKA,CAAS,oBACP,YACiD;AACjD,eAAW,CAAC,KAAK,OAAO,KAAK,KAAK,UAAU;AAC1C,UAAI,QAAQ,YAAY;AACtB,cAAM,CAAC,KAAK,OAAO;AAAA,MACrB;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,CAAS,cACP,YACmC;AACnC,eAAW,CAAC,KAAK,OAAO,KAAK,KAAK,UAAU;AAC1C,UAAI,QAAQ,YAAY;AACtB,cAAM;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,MAAc,WAAW,YAAwB,KAAwB;AACvE,UAAM,UAAU,KAAK,SAAS,IAAI,UAAU;AAC5C,QAAI,YAAY,QAAW;AACzB,WAAK,OACF,YAAY,EAAE,WAAW,CAAC,EAC1B,KAAK,+CAA+C;AACvD;AAAA,IACF;AAEA,UAAM,OAAO,QAAQ,SAAS;AAI9B,QAAI,SAAS,GAAG;AACd,YAAM,KAAK,MAAM,YAAY,GAAG;AAAA,IAClC;AAAA,EACF;AAAA,EAEA,MAAc,sCACZ,YACA,UACA,KACA,OACe;AACf,UAAM,UAAU,KAAK,SAAS,IAAI,UAAU;AAC5C,QAAI,CAAC,SAAS;AACZ,WAAK,OACF,YAAY,EAAE,WAAW,CAAC,EAC1B,KAAK,qDAAqD;AAC7D;AAAA,IACF;AAKA,UAAM,WAAwB,CAAC;AAC/B,UAAM,UAAuB,CAAC;AAC9B,UAAM,mBAAmB,CAAC,QACxB,KAAK,QAAQ,KAAK,GAAG;AACvB,UAAM,iBAAiB,CAAC,QAAmB,KAAK,SAAS,KAAK,GAAG;AACjE,UAAM,gBAAgB,CAAC,QAAmB,KAAK,QAAQ,KAAK,GAAG;AAE/D,eAAW,OAAO,UAAU;AAC1B,YAAM,eAAe,KAAK,MAAM,mBAAmB,KAAK,OAAO;AAC/D,UAAI,aAAa,SAAS;AACxB,cAAM,KAAK;AAAA,UACT;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,QACF;AAAA,MACF,OAAO;AACL,YAAI,CAAC,QAAQ,qCAAqC;AAChD,kBAAQ,KAAK;AAAA,YACX,MAAM,cAAc;AAAA,YACpB,OACE,IAAI,SAAS,cAAc,iBACvB,IAAI,IAAI,IAAI,CAACD,QAAOA,IAAG,IAAI,IAC3B,CAAC;AAAA,YACP,QAAQ,aAAa;AAAA,UACvB,CAAC;AACD,kBAAQ,uCAAuC;AAAA,QACjD;AAAA,MACF;AAAA,IACF;AAEA,QAAI,SAAS,SAAS,GAAG;AACvB,WAAK,aAAa,YAAY,UAAU,KAAK,KAAK;AAAA,IACpD;AAEA,QAAI,QAAQ,SAAS,GAAG;AACtB,cAAQ,KAAK,OAAO;AAAA,IACtB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,wDACZ,SACA,UACA,KACA,OACe;AAIf,UAAM,WAAwB,CAAC;AAC/B,UAAM,qBAAkC,CAAC;AACzC,UAAM,eAA4B,CAAC;AAEnC,UAAM,mBAAmB,CAAC,QAAiC;AACzD,UAAI,MAAM,QAAQ,GAAG,GAAG;AACtB,mBAAW,KAAK,KAAK;AACnB,6BAAmB,KAAK,CAAC;AAAA,QAC3B;AAAA,MACF,OAAO;AACL,2BAAmB,KAAK,GAAG;AAAA,MAC7B;AAAA,IACF;AACA,UAAM,iBAAiB,CAAC,QAAmB,KAAK,SAAS,KAAK,GAAG;AACjE,UAAM,gBAAgB,CAAC,QAAmB,KAAK,aAAa,KAAK,GAAG;AAEpE,eAAW,OAAO,UAAU;AAC1B,YAAM,KAAK;AAAA,QACT;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF;AAAA,IACF;AAEA,QAAI,mBAAmB,SAAS,GAAG;AACjC,cAAQ,KAAK,kBAAkB;AAC/B,yBAAmB,SAAS;AAAA,IAC9B;AAEA,QAAI,SAAS,SAAS,GAAG;AACvB,WAAK,aAAa,eAA6B,UAAU,KAAK,KAAK;AACnE,eAAS,SAAS;AAAA,IACpB;AAEA,QAAI,aAAa,SAAS,GAAG;AAC3B,cAAQ,KAAK,YAAY;AACzB,mBAAa,SAAS;AAAA,IACxB;AAAA,EACF;AAAA,EAEA,MAAc,UACZ,SACA,KACA,kBACA,gBACA,eACA,KACA,OACe;AACf,QAAI,CAAC,KAAK,MAAM,SAAS,GAAG;AAC1B,YAAM,IAAI,MAAM,6CAA6C;AAAA,IAC/D;AAEA,YAAQ,IAAI,MAAM;AAAA,MAChB,KAAK,cAAc,iBAAiB;AAElC,uBAAe;AAAA,UACb,MAAM,cAAc;AAAA,UACpB,OAAO,QAAQ;AAAA,UACf,MAAM,IAAI;AAAA,UACV,aAAa,IAAI;AAAA,QACnB,CAAC;AACD;AAAA,MACF;AAAA,MAEA,KAAK,cAAc,iBAAiB;AAElC,uBAAe;AAAA,UACb,MAAM,cAAc;AAAA,UACpB,OAAO,QAAQ;AAAA,UACf,OAAO,IAAI;AAAA,QACb,CAAC;AACD;AAAA,MACF;AAAA,MAEA,KAAK,cAAc,eAAe;AAChC,YAAI,QAAQ,uBAA+B;AACzC,cAAI,mBAAK,mBAAkB;AACzB,kBAAM,kBAAkB;AAExB,uBAAW,SAAS;AAAA,cAClB,yBAAyB,KAAK,QAAQ,aAAa,WAAW,CAAC;AAAA,cAC/D;AAAA,YACF,GAAG;AAMD,+BAAiB;AAAA,gBACf,MAAM,cAAc;AAAA,gBACpB,OAAO;AAAA,cACT,CAAC;AAAA,YACH;AAAA,UACF,OAAO;AACL,6BAAiB;AAAA,cACf,MAAM,cAAc;AAAA,cACpB,OAAO,MAAM;AAAA,gBACX,yBAAyB,KAAK,QAAQ,aAAa,WAAW,CAAC;AAAA,cACjE;AAAA,YACF,CAAC;AAAA,UACH;AAEA,2BAAiB,EAAE,MAAM,cAAc,mBAAmB,CAAC;AAAA,QAC7D,OAAO;AACL,2BAAiB;AAAA,YACf,MAAM,cAAc;AAAA,YACpB,OAAO,MAAM,KAAK,KAAK,QAAQ,aAAa,WAAW,CAAC;AAAA,UAC1D,CAAC;AAAA,QACH;AACA;AAAA,MACF;AAAA,MAEA,KAAK,cAAc,gBAAgB;AAIjC,aAAK,OAAO,uBAAuB;AAEnC,cAAM,SAAS,MAAM,KAAK,QAAQ,SAAS,IAAI,GAAG;AAElD,cAAM,eAA+B,OAAO;AAAA,UAAQ,CAAC,MACnD,EAAE,WAAW,aAAa,CAAC,EAAE,EAAE,IAAI,CAAC;AAAA,QACtC;AAEA,cAAM,gBAAgC,OAAO,QAAQ,CAAC,MAAM;AAC1D,kBAAQ,EAAE,QAAQ;AAAA,YAChB,KAAK;AAIH,qBAAO,EAAE,gBAAgB,SACrB,CAAC,aAAa,EAAE,WAAW,CAAC,IAC5B,CAAC;AAAA,YAEP,KAAK;AACH,qBAAO,EAAE,QAAQ,SAAY,CAAC,EAAE,GAAG,IAAI,CAAC;AAAA,YAG1C;AACE,qBAAO,YAAY,GAAG,gBAAgB;AAAA,UAC1C;AAAA,QACF,CAAC;AAED,YAAI,aAAa,SAAS,GAAG;AAC3B,yBAAe;AAAA,YACb,MAAM,cAAc;AAAA,YACpB,KAAK,aAAa,IAAI,SAAS;AAAA,UACjC,CAAC;AACD,wBAAc;AAAA,YACZ,MAAM,cAAc;AAAA,YACpB,KAAK;AAAA,UACP,CAAC;AAAA,QACH;AAEA,YAAI,cAAc,SAAS,GAAG;AAC5B,2BAAiB;AAAA,YACf,MAAM,cAAc;AAAA,YACpB,KAAK;AAAA,UACP,CAAC;AAAA,QACH;AAEA,YAAI,aAAa,SAAS,GAAG;AAI3B,gBAAM,KAAK,KAAK,MAAM,gCAAgC,GAAG;AACzD,cAAI,GAAI,OAAM,EAAE;AAAA,QAClB;AACA;AAAA,MACF;AAAA,MAEA,KAAK,cAAc,YAAY;AAC7B,cAAM,SAAS,IAAI;AACnB,cAAM,OAAO,IAAI;AACjB,cAAM,OAAO,IAAI;AACjB,cAAM,CAAC,QAAQ,aAAa,YAAY,IAAI,MAAM,QAAQ,IAAI;AAAA,UAC5D,KAAK,WAAW,cAAc,KAAK,QAAQ,QAAQ,MAAM,IAAI;AAAA,UAC7D,KAAK,WAAW,gBAAgB,IAAI;AAAA,UACpC,KAAK,WAAW,gBAAgB,EAAE,MAAM,KAAK,CAAC;AAAA,QAChD,CAAC;AAED,YAAI,WAAW,QAAQ,iBAAiB,MAAM;AAC5C,2BAAiB;AAAA,YACf,MAAM,cAAc;AAAA,YACpB;AAAA,YACA,QAAQ;AAAA;AAAA,YACR;AAAA,YACA;AAAA,YACA,IAAI;AAAA,YACJ,oBAAoB;AAAA,UACtB,CAAC;AAAA,QACH;AACA;AAAA,MACF;AAAA,MAEA,KAAK,cAAc,aAAa;AAC9B,cAAM,SAAS,IAAI;AACnB,cAAM,OAAO,IAAI;AACjB,cAAM,OAAO,IAAI;AACjB,cAAM,CAAC,QAAQG,MAAK,IAAI,MAAM;AAAA,UAC5B,KAAK,WAAW,cAAc,KAAK,QAAQ,QAAQ,MAAM,IAAI;AAAA,QAC/D;AAEA,YAAIA;AAEF;AAEF,aAAK;AAAA,UACH;AAAA,YACE,MAAM,cAAc;AAAA,YACpB;AAAA,YACA;AAAA,YACA,QAAQ;AAAA,YACR,aAAa;AAAA,YACb,IAAI;AAAA,YACJ,oBAAoB,OAAO;AAAA,UAC7B;AAAA,UACA;AAAA,UACA;AAAA,QACF;AACA,YAAI,OAAO,WAAW;AACpB,gBAAM,KAAK,KAAK,MAAM,6BAA6B,KAAK,OAAO;AAC/D,cAAI,GAAI,OAAM,EAAE;AAAA,QAClB;AAEA;AAAA,MACF;AAAA,MAEA,SAAS;AACP,YAAI;AACF,iBAAO,YAAY,KAAK,yBAAyB;AAAA,QACnD,QAAQ;AAAA,QAER;AAAA,MACF;AAAA,IACF;AAAA,EACF;AACF;AAh6BWF,WAAA;AACA;","names":["_a","error","_a","entries","WebsocketCloseCodes","error","constant","object","optional","string","taggedUnion","object","constant","optional","string","taggedUnion","snapshot","snapshot","snapshot","buildObject","buildNode","buildList","buildMap","emitObject","emit","emitList","emitMap","_defaultFn","DefaultMap","DefaultMap","ProtocolVersion","array","nanoid","LogLevel","target","parentNode","op","snapshot","applyUpdate","error","array","op","__debug","nanoid","error"]}
|
|
1
|
+
{"version":3,"sources":["../src/decoders/ClientMsg.ts","../src/decoders/jsonYolo.ts","../src/decoders/Op.ts","../src/decoders/y-types.ts","../src/formats/LossyJson.ts","../src/formats/NodeStream.ts","../src/formats/PlainLson.ts","../src/makeInMemorySnapshot.ts","../src/lib/DefaultMap.ts","../src/lib/NestedMap.ts","../src/MetadataDB.ts","../src/protocol/ProtocolVersion.ts","../src/Room.ts","../src/lib/Logger.ts","../src/plugins/InMemoryDriver.ts","../src/lib/text.ts","../src/Storage.ts","../src/YjsStorage.ts","../src/lib/tryCatch.ts","../src/lib/UniqueMap.ts","../src/utils.ts"],"sourcesContent":["/**\n * Copyright (c) Liveblocks Inc.\n *\n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Affero General Public License as published\n * by the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n * GNU Affero General Public License for more details.\n *\n * You should have received a copy of the GNU Affero General Public License\n * along with this program. If not, see <https://www.gnu.org/licenses/>.\n */\n\nimport type { Json, JsonObject } from \"@liveblocks/core\";\nimport { ClientMsgCode } from \"@liveblocks/core\";\nimport type { Decoder } from \"decoders\";\nimport {\n array,\n boolean,\n constant,\n number,\n object,\n optional,\n string,\n taggedUnion,\n} from \"decoders\";\n\nimport type {\n BroadcastEventClientMsg,\n ClientMsg,\n FetchStorageClientMsg,\n FetchYDocClientMsg,\n UpdatePresenceClientMsg,\n UpdateStorageClientMsg,\n UpdateYDocClientMsg,\n} from \"~/protocol\";\n\nimport { jsonObjectYolo, jsonYolo } from \"./jsonYolo\";\nimport { op } from \"./Op\";\nimport type { YUpdate, YVector } from \"./y-types\";\nimport { guidDecoder } from \"./y-types\";\n\nconst updatePresenceClientMsg: Decoder<UpdatePresenceClientMsg<JsonObject>> =\n object({\n type: constant(ClientMsgCode.UPDATE_PRESENCE),\n data: jsonObjectYolo,\n targetActor: optional(number),\n });\n\nconst broadcastEventClientMsg: Decoder<BroadcastEventClientMsg<Json>> = object({\n type: constant(ClientMsgCode.BROADCAST_EVENT),\n event: jsonYolo,\n});\n\nconst fetchStorageClientMsg: Decoder<FetchStorageClientMsg> = object({\n type: constant(ClientMsgCode.FETCH_STORAGE),\n});\n\nconst updateStorageClientMsg: Decoder<UpdateStorageClientMsg> = object({\n type: constant(ClientMsgCode.UPDATE_STORAGE),\n ops: array(op),\n});\n\nconst fetchYDocClientMsg: Decoder<FetchYDocClientMsg> = object({\n type: constant(ClientMsgCode.FETCH_YDOC),\n vector: string.refineType<YVector>(),\n guid: optional(guidDecoder), // Don't specify to update the root doc\n v2: optional(boolean),\n});\n\nconst updateYDocClientMsg: Decoder<UpdateYDocClientMsg> = object({\n type: constant(ClientMsgCode.UPDATE_YDOC),\n update: string.refineType<YUpdate>(),\n guid: optional(guidDecoder), // Don't specify to update the root doc\n v2: optional(boolean),\n});\n\nexport const clientMsgDecoder: Decoder<ClientMsg<JsonObject, Json>> =\n taggedUnion(\"type\", {\n [ClientMsgCode.UPDATE_PRESENCE]: updatePresenceClientMsg,\n [ClientMsgCode.BROADCAST_EVENT]: broadcastEventClientMsg,\n [ClientMsgCode.FETCH_STORAGE]: fetchStorageClientMsg,\n [ClientMsgCode.UPDATE_STORAGE]: updateStorageClientMsg,\n [ClientMsgCode.FETCH_YDOC]: fetchYDocClientMsg,\n [ClientMsgCode.UPDATE_YDOC]: updateYDocClientMsg,\n }).describe(\"Must be a valid client message\");\n\nexport const transientClientMsgDecoder: Decoder<ClientMsg<JsonObject, Json>> =\n taggedUnion(\"type\", {\n // [ClientMsgCode.UPDATE_PRESENCE]: updatePresenceClientMsg,\n // [ClientMsgCode.BROADCAST_EVENT]: broadcastEventClientMsg,\n // [ClientMsgCode.FETCH_STORAGE]: fetchStorageClientMsg,\n [ClientMsgCode.UPDATE_STORAGE]: updateStorageClientMsg,\n // [ClientMsgCode.FETCH_YDOC]: fetchYDocClientMsg,\n // [ClientMsgCode.UPDATE_YDOC]: updateYDocClientMsg,\n }).describe(\"Must be a valid transient client message\");\n","/**\n * Copyright (c) Liveblocks Inc.\n *\n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Affero General Public License as published\n * by the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n * GNU Affero General Public License for more details.\n *\n * You should have received a copy of the GNU Affero General Public License\n * along with this program. If not, see <https://www.gnu.org/licenses/>.\n */\n\nimport type { Json, JsonObject } from \"@liveblocks/core\";\nimport type { Decoder } from \"decoders\";\nimport { unknown } from \"decoders\";\n\n/**\n * Drop-in replacement for the `json` decoder from the decoders standard\n * library, but implemented as a no-op. This is, of course, only safe to use in\n * contexts where you know that the input already is valid JSON.\n *\n * You know this for sure, for example, if you're decoding the result of\n * a `JSON.parse()` call.\n *\n * Done for performance reasons!\n */\nexport const jsonYolo: Decoder<Json> = unknown as Decoder<Json>;\n\n/**\n * Drop-in replacement for the `jsonObject` decoder from the decoders standard\n * library, but implemented as just a check for plain old JavaScript object.\n * This is, of course, only safe to use in contexts where you know that the\n * input already is valid JSON.\n *\n * You know this for sure, for example, if you're decoding the result of\n * a `JSON.parse()` call.\n *\n * Done for performance reasons!\n */\nexport const jsonObjectYolo: Decoder<JsonObject> = jsonYolo.refine(\n (value): value is JsonObject =>\n value !== null && typeof value === \"object\" && !Array.isArray(value),\n \"Must be JSON object\"\n);\n","/**\n * Copyright (c) Liveblocks Inc.\n *\n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Affero General Public License as published\n * by the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n * GNU Affero General Public License for more details.\n *\n * You should have received a copy of the GNU Affero General Public License\n * along with this program. If not, see <https://www.gnu.org/licenses/>.\n */\n\nimport { OpCode } from \"@liveblocks/core\";\nimport type { Decoder } from \"decoders\";\nimport { constant, object, optional, string, taggedUnion } from \"decoders\";\n\nimport type {\n ClientWireOp,\n CreateListOp,\n CreateMapOp,\n CreateObjectOp,\n CreateRegisterOp,\n DeleteCrdtOp,\n DeleteObjectKeyOp,\n SetParentKeyOp,\n UpdateObjectOp,\n} from \"~/protocol\";\n\nimport { jsonObjectYolo, jsonYolo } from \"./jsonYolo\";\n\ntype HasOpId = { opId: string };\n\nconst updateObjectOp: Decoder<UpdateObjectOp & HasOpId> = object({\n type: constant(OpCode.UPDATE_OBJECT),\n opId: string,\n id: string,\n data: jsonObjectYolo,\n});\n\nconst createObjectOp: Decoder<CreateObjectOp & HasOpId> = object({\n type: constant(OpCode.CREATE_OBJECT),\n opId: string,\n id: string,\n parentId: string,\n parentKey: string,\n data: jsonObjectYolo,\n intent: optional(constant(\"set\")),\n deletedId: optional(string),\n});\n\nconst createListOp: Decoder<CreateListOp & HasOpId> = object({\n type: constant(OpCode.CREATE_LIST),\n opId: string,\n id: string,\n parentId: string,\n parentKey: string,\n intent: optional(constant(\"set\")),\n deletedId: optional(string),\n});\n\nconst createMapOp: Decoder<CreateMapOp & HasOpId> = object({\n type: constant(OpCode.CREATE_MAP),\n opId: string,\n id: string,\n parentId: string,\n parentKey: string,\n intent: optional(constant(\"set\")),\n deletedId: optional(string),\n});\n\nconst createRegisterOp: Decoder<CreateRegisterOp & HasOpId> = object({\n type: constant(OpCode.CREATE_REGISTER),\n opId: string,\n id: string,\n parentId: string,\n parentKey: string,\n data: jsonYolo,\n intent: optional(constant(\"set\")),\n deletedId: optional(string),\n});\n\nconst deleteCrdtOp: Decoder<DeleteCrdtOp & HasOpId> = object({\n type: constant(OpCode.DELETE_CRDT),\n opId: string,\n id: string,\n});\n\nconst setParentKeyOp: Decoder<SetParentKeyOp & HasOpId> = object({\n type: constant(OpCode.SET_PARENT_KEY),\n opId: string,\n id: string,\n parentKey: string,\n});\n\nconst deleteObjectKeyOp: Decoder<DeleteObjectKeyOp & HasOpId> = object({\n type: constant(OpCode.DELETE_OBJECT_KEY),\n opId: string,\n id: string,\n key: string,\n});\n\nexport const op: Decoder<ClientWireOp> = taggedUnion(\"type\", {\n [OpCode.UPDATE_OBJECT]: updateObjectOp,\n [OpCode.CREATE_OBJECT]: createObjectOp,\n [OpCode.CREATE_LIST]: createListOp,\n [OpCode.CREATE_MAP]: createMapOp,\n [OpCode.CREATE_REGISTER]: createRegisterOp,\n [OpCode.DELETE_CRDT]: deleteCrdtOp,\n [OpCode.SET_PARENT_KEY]: setParentKeyOp,\n [OpCode.DELETE_OBJECT_KEY]: deleteObjectKeyOp,\n});\n","/**\n * Copyright (c) Liveblocks Inc.\n *\n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Affero General Public License as published\n * by the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n * GNU Affero General Public License for more details.\n *\n * You should have received a copy of the GNU Affero General Public License\n * along with this program. If not, see <https://www.gnu.org/licenses/>.\n */\n\nimport type { Brand } from \"@liveblocks/core\";\nimport { uuid } from \"decoders\";\n\n/**\n * A guid, a unique identifier for a Yjs sub document.\n */\nexport type Guid = Brand<string, \"Guid\">;\n\nexport const guidDecoder = uuid.refineType<Guid>();\n\nexport const ROOT_YDOC_ID = \"root\";\nexport type YDocId = typeof ROOT_YDOC_ID | Guid /* unique ID for subdoc */;\n\n/**\n * Any string that is a valid base64 encoded YJS update.\n */\nexport type YUpdate = Brand<string, \"YUpdate\">;\n\n/**\n * Any string that is a valid base64 encoded YJS state vector.\n */\nexport type YVector = Brand<string, \"YVector\">;\n","/**\n * Copyright (c) Liveblocks Inc.\n *\n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Affero General Public License as published\n * by the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n * GNU Affero General Public License for more details.\n *\n * You should have received a copy of the GNU Affero General Public License\n * along with this program. If not, see <https://www.gnu.org/licenses/>.\n */\n\nimport type { Json, JsonObject } from \"@liveblocks/core\";\nimport { CrdtType } from \"@liveblocks/core\";\n\nimport type { IReadableSnapshot } from \"~/interfaces\";\n\n// ---------------------------------------------------------------------------\n// Non-streaming version\n// ---------------------------------------------------------------------------\n\n/**\n * Serialize a storage snapshot to a simple JSON representation, returning a\n * full in-memory JsonObject. Faster than snapshotToLossyJson_lazy for\n * small/medium documents because the result can be passed straight to\n * JSON.stringify(). This format is lossy — the original storage structure\n * cannot be reconstructed from it, so it's output-only.\n */\nexport function snapshotToLossyJson_eager(\n snapshot: IReadableSnapshot\n): JsonObject {\n try {\n return buildObject(snapshot, \"root\", snapshot.get_root().data);\n } finally {\n snapshot.destroy();\n }\n}\n\nfunction buildNode(snapshot: IReadableSnapshot, id: string): Json {\n const node = snapshot.get_node(id);\n if (node.type === CrdtType.OBJECT) {\n return buildObject(snapshot, id, node.data);\n } else if (node.type === CrdtType.LIST) {\n return buildList(snapshot, id);\n } else if (node.type === CrdtType.MAP) {\n return buildMap(snapshot, id);\n } else {\n return node.data;\n }\n}\n\nfunction buildObject(\n snapshot: IReadableSnapshot,\n id: string,\n staticData: JsonObject\n): JsonObject {\n const data = Object.assign(Object.create(null), staticData) as JsonObject;\n for (const [key, childId] of snapshot.iter_children(id)) {\n data[key] = buildNode(snapshot, childId);\n }\n return data;\n}\n\nfunction buildList(snapshot: IReadableSnapshot, id: string): Json[] {\n const data: Json[] = [];\n for (const [_, childId] of snapshot.iter_children(id)) {\n data.push(buildNode(snapshot, childId));\n }\n return data;\n}\n\nfunction buildMap(snapshot: IReadableSnapshot, id: string): JsonObject {\n const data = Object.create(null) as JsonObject;\n for (const [key, childId] of snapshot.iter_children(id)) {\n data[key] = buildNode(snapshot, childId);\n }\n return data;\n}\n\n// ---------------------------------------------------------------------------\n// Streaming version\n// ---------------------------------------------------------------------------\n\n// Generator-of-strings type alias for brevity of signatures\ntype StringGen = Generator<string, void, never>;\n\n/**\n * Serialize a storage snapshot to a simple JSON representation. This format is\n * easy to consume but lossy — the original storage structure cannot be\n * reconstructed from it, so it's an output-only format. Slower than\n * snapshotToLossyJson_eager but can stream documents that don't fit entirely\n * in memory.\n *\n * This generator yields text chunks that together, when concatenated, form the\n * output JSON document.\n */\nexport function* snapshotToLossyJson_lazy(\n snapshot: IReadableSnapshot\n): StringGen {\n try {\n const staticJson = JSON.stringify(snapshot.get_root().data).slice(1, -1);\n yield* emitObject(snapshot, \"root\", staticJson);\n } finally {\n snapshot.destroy();\n }\n}\n\nfunction* emit(snapshot: IReadableSnapshot, id: string): StringGen {\n const node = snapshot.get_node(id);\n if (node.type === CrdtType.OBJECT) {\n yield* emitObject(snapshot, id, JSON.stringify(node.data).slice(1, -1));\n } else if (node.type === CrdtType.LIST) {\n yield* emitList(snapshot, id);\n } else if (node.type === CrdtType.MAP) {\n yield* emitMap(snapshot, id);\n } else if (node.type === CrdtType.REGISTER) {\n yield JSON.stringify(node.data);\n }\n}\n\n/**\n * @param staticJson - The object's static (non-CRDT) properties as a raw JSON\n * string without the surrounding braces, e.g. `\"foo\":1,\"bar\":\"hi\"`.\n *\n * Children are emitted _after_ the static properties. If a child key\n * collides with a static key (which shouldn't normally happen, but\n * defensively), the child wins because JSON.parse keeps the last value\n * for duplicate keys.\n */\nfunction* emitObject(\n snapshot: IReadableSnapshot,\n id: string,\n staticJson: string\n): StringGen {\n let comma = staticJson.length > 0;\n\n yield \"{\";\n yield staticJson;\n\n for (const [key, childId] of snapshot.iter_children(id)) {\n if (comma) yield \",\";\n else comma = true;\n\n yield `${JSON.stringify(key)}:`;\n yield* emit(snapshot, childId);\n }\n yield \"}\";\n}\n\nfunction* emitList(snapshot: IReadableSnapshot, id: string): StringGen {\n let comma = false;\n\n yield \"[\";\n for (const [_, childId] of snapshot.iter_children(id)) {\n if (comma) yield \",\";\n else comma = true;\n yield* emit(snapshot, childId);\n }\n yield \"]\";\n}\n\nfunction* emitMap(snapshot: IReadableSnapshot, id: string): StringGen {\n let comma = false;\n\n yield \"{\";\n for (const [key, childId] of snapshot.iter_children(id)) {\n if (comma) yield \",\";\n else comma = true;\n\n yield `${JSON.stringify(key)}:`;\n yield* emit(snapshot, childId);\n }\n yield \"}\";\n}\n","/**\n * Copyright (c) Liveblocks Inc.\n *\n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Affero General Public License as published\n * by the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n * GNU Affero General Public License for more details.\n *\n * You should have received a copy of the GNU Affero General Public License\n * along with this program. If not, see <https://www.gnu.org/licenses/>.\n */\n\nimport type { StorageNode } from \"@liveblocks/core\";\n\nimport type { IReadableSnapshot } from \"~/interfaces\";\n\n/**\n * Yield all nodes from a snapshot as [id, crdt] tuples.\n * Destroys the snapshot when done (or aborted).\n */\nexport function* snapshotToNodeStream(\n snapshot: IReadableSnapshot\n): Generator<StorageNode, void, never> {\n try {\n yield* snapshot.iter_all();\n } finally {\n snapshot.destroy();\n }\n}\n","/**\n * Copyright (c) Liveblocks Inc.\n *\n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Affero General Public License as published\n * by the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n * GNU Affero General Public License for more details.\n *\n * You should have received a copy of the GNU Affero General Public License\n * along with this program. If not, see <https://www.gnu.org/licenses/>.\n */\n\nimport type {\n JsonObject,\n ObjectStorageNode,\n PlainLson,\n PlainLsonFields,\n PlainLsonList,\n PlainLsonMap,\n PlainLsonObject,\n RootStorageNode,\n SerializedList,\n StorageNode,\n} from \"@liveblocks/core\";\nimport {\n assertNever,\n CrdtType,\n isJsonObject,\n makePosition,\n} from \"@liveblocks/core\";\n\nimport type { IReadableSnapshot } from \"~/interfaces\";\n\nconst SERVER_INIT_OP_PREFIX = \"si\";\n\nfunction generateId(state: { clock: number }) {\n return `${SERVER_INIT_OP_PREFIX}:${state.clock++}`;\n}\n\nfunction isSpecialPlainLsonValue(\n value: PlainLson\n): value is PlainLsonObject | PlainLsonMap | PlainLsonList {\n return isJsonObject(value) && value.liveblocksType !== undefined;\n}\n\n/**\n * Generator that yields NodeTuples for a JSON value.\n * Always yields parent nodes before their children.\n */\nfunction* iterJson(\n key: string,\n data: PlainLson,\n parent: StorageNode,\n state: { clock: number }\n): Generator<StorageNode, void, undefined> {\n if (isSpecialPlainLsonValue(data)) {\n switch (data.liveblocksType) {\n case \"LiveObject\":\n yield* iterObjectInner(key, data.data, parent, state);\n return;\n\n case \"LiveList\":\n yield* iterList(key, data.data, parent, state);\n return;\n\n case \"LiveMap\":\n yield* iterMap(key, data.data, parent, state);\n return;\n\n // istanbul ignore next\n default:\n assertNever(data, \"Unknown `liveblocksType` field\");\n }\n } else {\n yield [\n generateId(state),\n {\n type: CrdtType.REGISTER,\n data,\n parentId: parent[0],\n parentKey: key,\n },\n ];\n }\n}\n\n/**\n * Generator that yields NodeTuples for a LiveMap.\n * Yields the map node first, then its children.\n */\nfunction* iterMap(\n key: string,\n map: PlainLsonFields,\n parent: StorageNode,\n state: { clock: number }\n): Generator<StorageNode, void, undefined> {\n const mapTuple: StorageNode = [\n generateId(state),\n { type: CrdtType.MAP, parentId: parent[0], parentKey: key },\n ];\n\n // Yield the map node first (parent before children)\n yield mapTuple;\n\n // Then yield all children\n for (const [subKey, subValue] of Object.entries(map)) {\n yield* iterJson(subKey, subValue, mapTuple, state);\n }\n}\n\n/**\n * Generator that yields NodeTuples for a LiveList.\n * Yields the list node first, then its children.\n */\nfunction* iterList(\n key: string,\n list: PlainLson[],\n parent: StorageNode,\n state: { clock: number }\n): Generator<StorageNode, void, undefined> {\n const id = generateId(state);\n const crdt: SerializedList = {\n type: CrdtType.LIST,\n parentId: parent[0],\n parentKey: key,\n };\n const listTuple: StorageNode = [id, crdt];\n\n // Yield the list node first (parent before children)\n yield listTuple;\n\n // Then yield all children\n let position = makePosition();\n for (const subValue of list) {\n yield* iterJson(position, subValue, listTuple, state);\n position = makePosition(position);\n }\n}\n\n/**\n * Generator that yields NodeTuples for a LiveObject.\n * Yields the object node first, then its children.\n *\n * Note: The object's data field is populated with non-special values\n * (primitives, arrays, plain objects), while special values (LiveObject,\n * LiveList, LiveMap) are yielded as separate nodes.\n */\nfunction* iterObjectInner(\n key: string,\n value: PlainLsonFields,\n parent: StorageNode | null,\n state: { clock: number }\n): Generator<StorageNode, void, undefined> {\n // First pass: collect non-special data and identify special children\n const data: JsonObject = {};\n const specialChildren: Array<[string, PlainLson]> = [];\n\n for (const [subKey, subValue] of Object.entries(value)) {\n if (isSpecialPlainLsonValue(subValue)) {\n specialChildren.push([subKey, subValue]);\n } else {\n data[subKey] = subValue;\n }\n }\n\n // Create the object tuple with collected data\n const objectTuple: RootStorageNode | ObjectStorageNode =\n parent !== null\n ? [\n generateId(state),\n {\n type: CrdtType.OBJECT,\n data,\n parentId: parent[0],\n parentKey: key,\n },\n ]\n : [\"root\", { type: CrdtType.OBJECT, data }];\n\n // Yield the object node first (parent before children)\n yield objectTuple;\n\n // Then yield all special children\n for (const [subKey, subValue] of specialChildren) {\n yield* iterJson(subKey, subValue, objectTuple, state);\n }\n}\n\n/**\n * Transform a \"Plain LSON\" document to a lazy NodeStream. Used to initialize\n * the storage with a predefined state.\n * Always emits parent nodes before their children.\n */\nexport function* plainLsonToNodeStream(\n root: PlainLsonObject\n): Generator<StorageNode, void, undefined> {\n const state = { clock: 1 };\n yield* iterObjectInner(\"root\", root.data, null, state);\n}\n\n// ---------------------------------------------------------------------------\n// Non-streaming serialization: builds a full PlainLsonObject in memory.\n// ---------------------------------------------------------------------------\n\n/**\n * Serialize a storage snapshot to \"Plain LSON\" format, returning a full\n * in-memory PlainLsonObject. Faster than snapshotToPlainLson_lazy for\n * small/medium documents because the result can be passed straight to\n * JSON.stringify().\n */\nexport function snapshotToPlainLson_eager(\n snapshot: IReadableSnapshot\n): PlainLsonObject {\n try {\n return buildObject(snapshot, \"root\", snapshot.get_root().data);\n } finally {\n snapshot.destroy();\n }\n}\n\nfunction buildNode(snapshot: IReadableSnapshot, id: string): PlainLson {\n const node = snapshot.get_node(id);\n if (node.type === CrdtType.OBJECT) {\n return buildObject(snapshot, id, node.data);\n } else if (node.type === CrdtType.LIST) {\n return buildList(snapshot, id);\n } else if (node.type === CrdtType.MAP) {\n return buildMap(snapshot, id);\n } else {\n return node.data;\n }\n}\n\nfunction buildObject(\n snapshot: IReadableSnapshot,\n id: string,\n staticData: JsonObject\n): PlainLsonObject {\n // Static data values are Json, which is a subset of PlainLson\n const data: PlainLsonFields = Object.assign(\n Object.create(null),\n staticData\n ) as PlainLsonFields;\n for (const [key, childId] of snapshot.iter_children(id)) {\n data[key] = buildNode(snapshot, childId);\n }\n return { liveblocksType: \"LiveObject\", data };\n}\n\nfunction buildList(snapshot: IReadableSnapshot, id: string): PlainLsonList {\n const data: PlainLson[] = [];\n for (const [_, childId] of snapshot.iter_children(id)) {\n data.push(buildNode(snapshot, childId));\n }\n return { liveblocksType: \"LiveList\", data };\n}\n\nfunction buildMap(snapshot: IReadableSnapshot, id: string): PlainLsonMap {\n const data = Object.create(null) as PlainLsonFields;\n for (const [key, childId] of snapshot.iter_children(id)) {\n data[key] = buildNode(snapshot, childId);\n }\n return { liveblocksType: \"LiveMap\", data };\n}\n\n// ---------------------------------------------------------------------------\n// Streaming serialization: yields string chunks that concatenate to JSON.\n// ---------------------------------------------------------------------------\n\n// Generator-of-strings type alias for brevity of signatures\ntype StringGen = Generator<string, void, never>;\n\n/**\n * Serialize a storage snapshot to \"Plain LSON\" format. Yields string chunks\n * that, when concatenated, form a valid JSON string representing the storage\n * document. Slower than snapshotToPlainLson_eager but can stream documents\n * that don't fit entirely in memory.\n */\nexport function* snapshotToPlainLson_lazy(\n snapshot: IReadableSnapshot\n): StringGen {\n try {\n const staticJson = JSON.stringify(snapshot.get_root().data).slice(1, -1);\n yield* emitObject(snapshot, \"root\", staticJson);\n } finally {\n snapshot.destroy();\n }\n}\n\nfunction* emit(snapshot: IReadableSnapshot, id: string): StringGen {\n const node = snapshot.get_node(id);\n if (node.type === CrdtType.OBJECT) {\n yield* emitObject(snapshot, id, JSON.stringify(node.data).slice(1, -1));\n } else if (node.type === CrdtType.LIST) {\n yield* emitList(snapshot, id);\n } else if (node.type === CrdtType.MAP) {\n yield* emitMap(snapshot, id);\n } else if (node.type === CrdtType.REGISTER) {\n yield JSON.stringify(node.data);\n }\n}\n\n/**\n * @param staticJson - The object's static (non-CRDT) properties as a raw JSON\n * string without the surrounding braces, e.g. `\"foo\":1,\"bar\":\"hi\"`.\n *\n * Children are emitted _after_ the static properties. If a child key\n * collides with a static key (which shouldn't normally happen, but\n * defensively), the child wins because JSON.parse keeps the last value\n * for duplicate keys.\n */\nfunction* emitObject(\n snapshot: IReadableSnapshot,\n id: string,\n staticJson: string\n): StringGen {\n let comma = staticJson.length > 0;\n\n yield '{\"liveblocksType\":\"LiveObject\",\"data\":{';\n yield staticJson;\n\n for (const [key, childId] of snapshot.iter_children(id)) {\n if (comma) yield \",\";\n else comma = true;\n\n yield `${JSON.stringify(key)}:`;\n yield* emit(snapshot, childId);\n }\n yield \"}}\";\n}\n\nfunction* emitList(snapshot: IReadableSnapshot, id: string): StringGen {\n let comma = false;\n\n yield '{\"liveblocksType\":\"LiveList\",\"data\":[';\n for (const [_, childId] of snapshot.iter_children(id)) {\n if (comma) yield \",\";\n else comma = true;\n yield* emit(snapshot, childId);\n }\n yield \"]}\";\n}\n\nfunction* emitMap(snapshot: IReadableSnapshot, id: string): StringGen {\n let comma = false;\n\n yield '{\"liveblocksType\":\"LiveMap\",\"data\":{';\n for (const [key, childId] of snapshot.iter_children(id)) {\n if (comma) yield \",\";\n else comma = true;\n\n yield `${JSON.stringify(key)}:`;\n yield* emit(snapshot, childId);\n }\n yield \"}}\";\n}\n","/**\n * Copyright (c) Liveblocks Inc.\n *\n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Affero General Public License as published\n * by the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n * GNU Affero General Public License for more details.\n *\n * You should have received a copy of the GNU Affero General Public License\n * along with this program. If not, see <https://www.gnu.org/licenses/>.\n */\n\nimport type {\n NodeMap,\n NodeStream,\n SerializedChild,\n SerializedCrdt,\n SerializedRootObject,\n StorageNode,\n} from \"@liveblocks/core\";\nimport { CrdtType, isRootStorageNode, nn } from \"@liveblocks/core\";\n\nimport type { IReadableSnapshot } from \"~/interfaces\";\nimport { NestedMap } from \"~/lib/NestedMap\";\n\n/**\n * Create a basic in-memory snapshot from a set of storage nodes.\n *\n * Takes a copy of the provided nodes, so the snapshot is isolated from\n * subsequent mutations to the source.\n */\nexport function makeInMemorySnapshot(\n values: NodeMap | NodeStream\n): IReadableSnapshot {\n const map: NodeMap = new Map<string, SerializedCrdt>(values as NodeStream);\n\n if (!map.has(\"root\")) {\n map.set(\"root\", { type: CrdtType.OBJECT, data: {} });\n }\n\n // Collect child entries, sort by (parentId, parentKey), then insert into\n // the revMap so that entriesAt() returns children in parent_key order\n // without needing to re-sort on every iter_children call.\n const entries: Array<[parentId: string, parentKey: string, id: string]> = [];\n const nodeStream = map as NodeStream;\n for (const node of nodeStream) {\n if (isRootStorageNode(node)) continue;\n const [id, crdt] = node;\n entries.push([crdt.parentId, crdt.parentKey, id]);\n }\n entries.sort((a, b) =>\n a[0] < b[0] ? -1 : a[0] > b[0] ? 1 : a[1] < b[1] ? -1 : a[1] > b[1] ? 1 : 0\n );\n\n const revMap = new NestedMap<string, string, string>();\n for (const [parentId, parentKey, id] of entries) {\n revMap.set(parentId, parentKey, id);\n }\n\n function get_node(id: string): SerializedChild {\n return nn(map.get(id), `Node not found: ${id}`) as SerializedChild;\n }\n\n return {\n get_root: () =>\n nn(\n map.get(\"root\"),\n \"Root not found\"\n ) as SerializedCrdt as SerializedRootObject,\n get_node,\n iter_children: (nodeId) => revMap.entriesAt(nodeId),\n iter_all: () => map as Iterable<StorageNode>,\n destroy() {\n map.clear();\n revMap.clear();\n },\n };\n}\n","/**\n * Copyright (c) Liveblocks Inc.\n *\n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Affero General Public License as published\n * by the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n * GNU Affero General Public License for more details.\n *\n * You should have received a copy of the GNU Affero General Public License\n * along with this program. If not, see <https://www.gnu.org/licenses/>.\n */\n\nimport { raise } from \"@liveblocks/core\";\n\n/**\n * Like ES6 map, but takes a default (factory) function which will be used\n * to create entries for missing keys on the fly.\n *\n * Useful for code like:\n *\n * const map = new DefaultMap(() => []);\n * map.getOrCreate('foo').push('hello');\n * map.getOrCreate('foo').push('world');\n * map.getOrCreate('foo')\n * // ['hello', 'world']\n *\n */\nexport class DefaultMap<K, V> extends Map<K, V> {\n #defaultFn?: (key: K) => V;\n\n /**\n * If the default function is not provided to the constructor, it has to be\n * provided in each .getOrCreate() call individually.\n */\n constructor(\n defaultFn?: (key: K) => V,\n entries?: readonly (readonly [K, V])[] | null\n ) {\n super(entries);\n this.#defaultFn = defaultFn;\n }\n\n /**\n * Gets the value at the given key, or creates it.\n *\n * Difference from normal Map: if the key does not exist, it will be created\n * on the fly using the factory function, and that value will get returned\n * instead of `undefined`.\n */\n getOrCreate(key: K, defaultFn?: (key: K) => V): V {\n if (super.has(key)) {\n // eslint-disable-next-line no-restricted-syntax\n return super.get(key)!;\n } else {\n const fn =\n defaultFn ??\n this.#defaultFn ??\n raise(\"DefaultMap used without a factory function\");\n\n const value = fn(key);\n this.set(key, value);\n return value;\n }\n }\n}\n","/**\n * Copyright (c) Liveblocks Inc.\n *\n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Affero General Public License as published\n * by the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n * GNU Affero General Public License for more details.\n *\n * You should have received a copy of the GNU Affero General Public License\n * along with this program. If not, see <https://www.gnu.org/licenses/>.\n */\n\nimport { DefaultMap } from \"./DefaultMap\";\n\nfunction emptyIterator(): IterableIterator<never> {\n return [][Symbol.iterator]();\n}\n\n/**\n * Like an ES6 Map, but two levels deep. Useful for building reverse lookup\n * tables. Will automatically delete second-level maps when they are empty.\n */\nexport class NestedMap<K1, K2, V> {\n #map: DefaultMap<K1, Map<K2, V>>;\n\n constructor() {\n this.#map = new DefaultMap(() => new Map<K2, V>());\n }\n\n get size(): number {\n let total = 0;\n for (const value of this.#map.values()) {\n total += value.size;\n }\n return total;\n }\n\n count(key1: K1): number {\n return this.#map.get(key1)?.size ?? 0;\n }\n\n *keys(): IterableIterator<[K1, K2]> {\n for (const [key1, nested] of this.#map) {\n for (const key2 of nested.keys()) {\n yield [key1, key2];\n }\n }\n }\n\n has(key1: K1, key2: K2): boolean {\n return this.#map.get(key1)?.has(key2) ?? false;\n }\n\n get(key1: K1, key2: K2): V | undefined {\n return this.#map.get(key1)?.get(key2);\n }\n\n set(key1: K1, key2: K2, value: V): this {\n this.#map.getOrCreate(key1).set(key2, value);\n return this;\n }\n\n delete(key1: K1, key2: K2): void {\n if (!this.#map.has(key1)) {\n return;\n }\n\n const nested = this.#map.get(key1)!;\n nested.delete(key2);\n if (nested.size === 0) {\n this.#map.delete(key1);\n }\n }\n\n clear(): void {\n this.#map.clear();\n }\n\n *[Symbol.iterator](): IterableIterator<[K1, K2, V]> {\n for (const [key1, nested] of this.#map) {\n for (const [key2, value] of nested) {\n yield [key1, key2, value];\n }\n }\n }\n\n entriesAt(key1: K1): IterableIterator<[K2, V]> {\n return this.#map.get(key1)?.entries() ?? emptyIterator();\n }\n\n *filterAt(key1: K1, keys: Iterable<K2>): Iterable<[K2, V]> {\n const nested = this.#map.get(key1);\n if (nested === undefined) {\n return;\n }\n\n for (const k2 of keys) {\n const value = nested.get(k2);\n if (value !== undefined) {\n yield [k2, value];\n }\n }\n }\n\n keysAt(key1: K1): IterableIterator<K2> {\n return this.#map.get(key1)?.keys() ?? emptyIterator();\n }\n\n valuesAt(key1: K1): IterableIterator<V> {\n return this.#map.get(key1)?.values() ?? emptyIterator();\n }\n\n deleteAll(key1: K1): void {\n this.#map.delete(key1);\n }\n}\n","/**\n * Copyright (c) Liveblocks Inc.\n *\n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Affero General Public License as published\n * by the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n * GNU Affero General Public License for more details.\n *\n * You should have received a copy of the GNU Affero General Public License\n * along with this program. If not, see <https://www.gnu.org/licenses/>.\n */\n\nimport type { Awaitable, Json } from \"@liveblocks/core\";\nimport type { Decoder } from \"decoders\";\n\nimport type { IStorageDriver } from \"~/interfaces\";\n\nexport interface MetadataDB {\n // Getter supports optional decoder\n get(key: string): Promise<Json | undefined>;\n get<T>(decoder: Decoder<T>, key: string): Promise<T | undefined>;\n\n put(key: string, value: Json): Awaitable<void>;\n delete(key: string): Awaitable<void>;\n}\n\n/**\n * Returns a thin wrapper around an IStorageDriver to provide MetadataDB\n * functionality, including type-safe reads.\n */\nexport function makeMetadataDB(driver: IStorageDriver): MetadataDB {\n async function get(key: string): Promise<Json | undefined>;\n async function get<T>(\n decoder: Decoder<T>,\n key: string\n ): Promise<T | undefined>;\n async function get<T>(\n a1: string | Decoder<T>,\n a2?: string\n ): Promise<T | Json | undefined> {\n if (a2 === undefined) {\n return await driver.get_meta(a1 as string);\n } else {\n return (a1 as Decoder<T>).value(await driver.get_meta(a2));\n }\n }\n\n return {\n get,\n put: driver.put_meta.bind(driver),\n delete: driver.delete_meta.bind(driver),\n };\n}\n","/**\n * Copyright (c) Liveblocks Inc.\n *\n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Affero General Public License as published\n * by the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n * GNU Affero General Public License for more details.\n *\n * You should have received a copy of the GNU Affero General Public License\n * along with this program. If not, see <https://www.gnu.org/licenses/>.\n */\n\nimport { enum_ } from \"decoders\";\n\nexport enum ProtocolVersion {\n //\n // NOTE:\n // The following versions were once used, but there is no usage of it anymore\n // in the wild, so we've removed support for them:\n //\n // V1 - Initial version\n // V2 - ?\n // V3 - started to broadcast storage operations to the sender to fix some\n // conflicts\n // V4 - created a virtual root to fix an issue where multiple people\n // initialize the storage at the same time\n // V5 - started to broadcast messages in a batch (arrays) for clients\n // V6 - started to validate inputs with decoders.\n //\n\n /**\n * V7 changes the URL params used to authorize the user.\n *\n * In V6 and lower, the ?token= URL param is used, which will only ever\n * contain a `pub-legacy` or `sec-legacy` token.\n *\n * URL PARAM CHANGES:\n * Starting with V7, the ?token= is no longer a legal URL param. Instead,\n * either of the following params is used:\n *\n * - ?tok=... for ID tokens\n * - ?tok=... for Access tokens\n * - ?tok=... for Secret Legacy tokens\n * - ?pubkey=... for public keys (no token, public key can be directly used here)\n *\n * Note that `pub-legacy` tokens are no longer accepted in V7, and are\n * replaced by the direct use of the public key.\n *\n * BEHAVIORAL CHANGES:\n * Starting with V7, the RoomState server message that gets sent when\n * a client initially connects will now include new fields:\n *\n * - `actor`\n * - `scopes`\n *\n * Since v1.2.0 (Jul 31, 2023)\n */\n V7 = 7,\n\n /**\n * V8 changes storage response format and allows streaming.\n *\n * MESSAGE FORMAT CHANGES:\n * - V8: sends 1+ STORAGE_CHUNK messages, followed by 1 final\n * STORAGE_STREAM_END message (with compact nodes)\n * - V7: sends 1 STORAGE_STATE_V7 message (with full nodes)\n *\n * STREAMING BEHAVIOR in V8:\n * - For SQLite-backed rooms: nodes are split into multiple STORAGE_CHUNK\n * messages, followed by STORAGE_STREAM_END\n * - For KV-backed rooms: all nodes are sent in a single STORAGE_CHUNK\n * message that will contain all nodes, followed by STORAGE_STREAM_END\n *\n * Since 3.14.0\n */\n V8 = 8,\n}\n\nexport const protocolVersionDecoder = enum_(ProtocolVersion).describe(\n \"Unsupported protocol version\"\n);\n","/**\n * Copyright (c) Liveblocks Inc.\n *\n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Affero General Public License as published\n * by the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n * GNU Affero General Public License for more details.\n *\n * You should have received a copy of the GNU Affero General Public License\n * along with this program. If not, see <https://www.gnu.org/licenses/>.\n */\n\nimport type {\n BaseUserMeta,\n Brand,\n IUserInfo,\n Json,\n JsonObject,\n} from \"@liveblocks/core\";\nimport {\n assertNever,\n ClientMsgCode,\n nodeStreamToCompactNodes,\n OpCode,\n raise,\n ServerMsgCode,\n tryParseJson,\n WebsocketCloseCodes as CloseCode,\n} from \"@liveblocks/core\";\nimport { Mutex } from \"async-mutex\";\nimport { array, formatInline } from \"decoders\";\nimport { chunked } from \"itertools\";\nimport { nanoid } from \"nanoid\";\n\nimport type { Guid } from \"~/decoders\";\nimport { clientMsgDecoder } from \"~/decoders\";\nimport type { IServerWebSocket, IStorageDriver } from \"~/interfaces\";\nimport { Logger } from \"~/lib/Logger\";\nimport { makeNewInMemoryDriver } from \"~/plugins/InMemoryDriver\";\nimport type {\n ClientMsg as GenericClientMsg,\n IgnoredOp,\n Op,\n ServerMsg as GenericServerMsg,\n ServerWireOp,\n} from \"~/protocol\";\nimport { ProtocolVersion } from \"~/protocol\";\nimport { Storage } from \"~/Storage\";\nimport { YjsStorage } from \"~/YjsStorage\";\n\nimport { tryCatch } from \"./lib/tryCatch\";\nimport { UniqueMap } from \"./lib/UniqueMap\";\nimport { makeRoomStateMsg } from \"./utils\";\n\nconst messagesDecoder = array(clientMsgDecoder);\n\nconst HIGHEST_PROTOCOL_VERSION = Math.max(\n ...Object.values(ProtocolVersion).filter(\n (v): v is number => typeof v === \"number\"\n )\n) as ProtocolVersion;\n\n// Reverse lookup for ServerMsgCodes\nconst SERVER_MSG_CODE_NAMES = Object.fromEntries(\n Object.entries(ServerMsgCode).map(([k, v]) => [v, k])\n) as Record<(typeof ServerMsgCode)[keyof typeof ServerMsgCode], string>;\n\nconst BLACK_HOLE = new Logger([\n /* No targets, i.e. black hole logger */\n]);\n\nexport type LoadingState = \"initial\" | \"loading\" | \"loaded\";\nexport type ActorID = Brand<number, \"ActorID\">;\n\n/**\n * Session keys are also known as the \"nonce\" in the protocol. It's a random,\n * unique, but PRIVATE, identifier for the session, and it's important that\n * this ID is never shared to anyone except the connected client, which\n * receives it as part of its ROOM_STATE message.\n */\nexport type SessionKey = Brand<string, \"SessionKey\">;\n\nexport type PreSerializedServerMsg = Brand<string, \"PreSerializedServerMsg\">;\ntype ClientMsg = GenericClientMsg<JsonObject, Json>;\ntype ServerMsg = GenericServerMsg<JsonObject, BaseUserMeta, Json>;\n\n/**\n * Creates a collector for deferred promises (side effects that should run\n * outside a mutex). Call `defer` to collect promises, then `waitAll` to\n * await them all.\n */\nfunction collectSideEffects() {\n const deferred: Promise<void>[] = [];\n return {\n defer: (p: Promise<void>) => void deferred.push(p),\n waitAll: () => Promise.allSettled(deferred),\n };\n}\n\nfunction serialize(\n msgs: ServerMsg | readonly ServerMsg[]\n): PreSerializedServerMsg {\n return JSON.stringify(msgs) as PreSerializedServerMsg;\n}\n\nexport function ackIgnoredOp(opId: string): IgnoredOp {\n return { type: OpCode.DELETE_CRDT, id: \"ACK\", opId }; // (H)Ack Op\n}\n\nfunction stripOpId(op: Op): ServerWireOp {\n // TODO: Optimize later! Instead of duplicating every op and\n // stripping the opId explicitly, it would be generally more\n // efficient if we treated the opIds as \"envelopes\" around Ops (or\n // send them in a separate array altogether at the protocol level\n // in V8 soon--even better, as it would not even require any stripping!)\n const { opId: _, ...rest } = op; // Strip opIds from all outgoing messages!\n return rest;\n}\n\n/**\n * A known or anonymous user.\n *\n * BY DEFINITION:\n * A User with an assigned `id` property is a non-anonymous user.\n * A User with an assigned `anonymousId` property is an anonymous user.\n * A User with neither of those properties is also an anonymous user.\n *\n * WHAT'S THE DIFFERENCE?\n * When creating a non-anonymous user, other users in the room will be able to\n * observe the assigned `id` property in Presence (e.g. via the `other.user.id`\n * in the Liveblocks client).\n *\n * When creating an anonymous user, you can _optionally_ provide an anonymous\n * ID to (re)use. While not authorized, this still allows you to correlate\n * unique users.\n */\nexport type IUserData = AuthorizedUser | AnonymousUser; // YYY Remove this export before launch. It's a private API, but only needed temporarily, while refactoring our CF server\n\ntype AuthorizedUser = {\n readonly id: string;\n readonly anonymousId?: never;\n readonly info?: IUserInfo;\n};\n\n// Anonymous users, by definition, have no ID, or have an explicitly-assigned\n// anonymous ID (in case you need to control anonymous ID generation, e.g. by\n// tracking a cookie). The anonymous ID will not show up in other clients. To\n// those clients, it will appear as a user without an ID.\ntype AnonymousUser = {\n readonly anonymousId: string;\n readonly id?: never;\n readonly info?: IUserInfo;\n};\n\n/*\n\nSession Types: \n| | Browser Session | Backend Session | Virtual Session |\n|-----------------------------------|-----------------|-----------------|-----------------|\n| Sends enter/leave/presence events | ✓ | | ✓ |\n| Visible to other users in room | ✓ | | ✓ |\n| Has WebSocket connection | ✓ | | |\n| Updated from | Browser | REST API | REST API |\n\n*Note: VirtualSession is not yet implemented. \n\n*/\n\n/**\n * Each BrowserSession is an abstraction around a socket instance, and maintains\n * metadata about the connection.\n */\nexport class BrowserSession<SM, CM extends JsonObject> {\n // ^^ User-defined Session Metadata\n // ^^ User-defined Client Metadata (sent to client in ROOM_STATE)\n\n public readonly version: ProtocolVersion; // Liveblocks protocol version this client will speak\n public readonly actor: ActorID; // Must be unique within the room\n public readonly createdAt: Date;\n\n // Externally provided (public!) user metadata. This information will get shared with other clients\n public readonly user: IUserData;\n public readonly scopes: string[]; // Permissions for this session, sent to connected clients (so consider public info)\n public readonly meta: SM; // Arbitrary *private* meta data to attach to this session (will NOT be shared)\n public readonly publicMeta?: CM; // Metadata sent to client in ROOM_STATE message's \"meta\" field\n\n readonly #_socket: IServerWebSocket;\n readonly #_debug: boolean;\n #_lastActiveAt: Date;\n\n // We keep a status in-memory in the session of whether we already sent a rejected ops message to the client.\n #_hasNotifiedClientStorageUpdateError: boolean;\n\n /** @internal - Never create a BrowserSession instance manually. Use the room.startBrowserSession() API instead. */\n constructor(\n ticket: Ticket<SM, CM>,\n socket: IServerWebSocket,\n debug: boolean\n ) {\n this.version = ticket.version;\n this.actor = ticket.actor;\n this.user = ticket.user;\n this.scopes = ticket.scopes;\n this.meta = ticket.meta ?? (undefined as unknown as SM);\n this.publicMeta = ticket.publicMeta;\n this.#_socket = socket;\n this.#_debug = debug;\n\n const now = new Date();\n this.createdAt = now;\n this.#_lastActiveAt = now;\n this.#_hasNotifiedClientStorageUpdateError = false;\n }\n\n get lastActiveAt(): Date {\n const lastPing = this.#_socket.getLastPongTimestamp?.();\n if (lastPing && lastPing > this.#_lastActiveAt) {\n return lastPing;\n } else {\n return this.#_lastActiveAt;\n }\n }\n\n get hasNotifiedClientStorageUpdateError(): boolean {\n return this.#_hasNotifiedClientStorageUpdateError;\n }\n\n markActive(now = new Date()): void {\n if (now > this.#_lastActiveAt) {\n this.#_lastActiveAt = now;\n }\n }\n\n setHasNotifiedClientStorageUpdateError(): void {\n this.#_hasNotifiedClientStorageUpdateError = true;\n }\n\n sendPong(): number {\n this.markActive();\n\n const sent = this.#_socket.send(\"pong\");\n if (this.#_debug) {\n if (sent < 0) {\n console.error(\n `failed to send \"pong\" to actor=${this.actor} (back pressure)`\n );\n } else if (sent === 0) {\n console.error(\n `failed to send \"pong\" to actor=${this.actor} (connection issue)`\n );\n } else {\n // Success\n console.log(`sent to actor=${this.actor}: \"pong\"`);\n }\n }\n return sent;\n }\n\n send(serverMsg: ServerMsg | ServerMsg[] | PreSerializedServerMsg): number {\n const data =\n typeof serverMsg === \"string\" ? serverMsg : serialize(serverMsg);\n const sent = this.#_socket.send(data);\n if (this.#_debug) {\n if (sent < 0) {\n console.error(\n `failed to send message to actor=${this.actor} (back pressure)`\n );\n } else if (sent === 0) {\n console.error(\n `failed to send message to actor=${this.actor} (connection issue)`\n );\n }\n\n const msgs = JSON.parse(data) as ServerMsg | ServerMsg[];\n for (const msg of Array.isArray(msgs) ? msgs : [msgs]) {\n console.log(\n `sent to actor=${this.actor}: [${\n SERVER_MSG_CODE_NAMES[msg.type] ?? msg.type\n }] ${JSON.stringify(msg)}`\n );\n }\n }\n return sent;\n }\n\n /**\n * @internal\n * Closes the socket associated to this BrowserSession.\n *\n * NOTE: Never call this API directly! Call .endBrowserSession() instead.\n */\n closeSocket(code: number, reason?: string): void {\n this.#_socket.close(code, reason);\n }\n}\n\nexport class BackendSession extends BrowserSession<never, never> {\n /** @internal Never call this constructor directly */\n constructor(\n ticket: Ticket<never, never>,\n socket: IServerWebSocket,\n debug: boolean\n ) {\n super(ticket, socket, debug);\n }\n}\n\nexport type Ticket<SM, CM extends JsonObject> = {\n readonly sessionKey: SessionKey; // Should stay private\n readonly version: ProtocolVersion;\n readonly actor: ActorID;\n readonly meta?: SM; // Private Session metadata\n readonly publicMeta?: CM; // Client metadata is *public* metadata sent to client in ROOM_STATE message\n readonly user: IUserData; // User-provided, public, metadata\n readonly scopes: string[];\n};\n\nexport type CreateTicketOptions<SM, CM extends JsonObject> = {\n /** The Liveblocks protocol version this client will speak */\n version?: ProtocolVersion;\n meta?: SM;\n publicMeta?: CM;\n /** A user-provided ID to externally recognize the user by */\n id?: string;\n /**\n * A user-provided anonymous ID to use. When `id` is provided, this field is\n * ignored. When both fields are missing, a new anonymous ID will be\n * generated.\n */\n anonymousId?: string;\n /** Static user metadata to assign this session, will get broadcasted to other clients */\n info?: IUserInfo;\n /** Permissions to assign this session */\n scopes?: string[];\n\n /** An explicit actor ID to use. Supported for legacy use cases only. It's best to not set this and let it get assigned dynamically, as it's important for this identifier to be unique. */\n actor?: ActorID;\n};\n\ntype InternalData = {\n readonly storage: Storage;\n readonly yjsStorage: YjsStorage;\n readonly mutex: Mutex;\n};\n\ntype RoomOptions<SM, CM extends JsonObject, C> = {\n /**\n * Bring your own persistence backend\n */\n storage?: IStorageDriver;\n logger?: Logger;\n\n /**\n * Whether to allow streaming storage responses. Only safe with drivers\n * that can guarantee that no Ops from other clients can get interleaved\n * between the chunk generation until the last chunk has been sent.\n * Defaults to true, but is notably NOT safe to use from DOS-KV backends.\n */\n allowStreaming?: boolean;\n\n // YYY Restructure these hooks to all take a single `event` param\n hooks?: {\n /** Customize which incoming messages from a client are allowed or disallowed. */\n isClientMsgAllowed?: (\n msg: ClientMsg,\n session: BrowserSession<SM, CM>\n ) => { allowed: true } | { allowed: false; reason: string };\n\n /** Called whenever the server acknowledged a ping with a pong */\n onDidPong?: (ctx?: C) => void | Promise<void>;\n\n /** Called before the room is attempted to be loaded */\n onRoomWillLoad?: (ctx?: C) => void | Promise<void>;\n /** Called right after the room's contents are loaded, but before any session has been started */\n onRoomDidLoad?: (ctx?: C) => void | Promise<void>;\n\n /** Called right before the room is attempted to be unloaded. Synchronous. May throw to abort the unloading. */\n onRoomWillUnload?: (ctx?: C) => void;\n /** Called right after the room has been unloaded from memory. Synchronous. */\n onRoomDidUnload?: (ctx?: C) => void;\n\n /** Called when a new user entered the room. */\n onSessionDidStart?: (\n session: BrowserSession<SM, CM>,\n ctx?: C\n ) => void | Promise<void>;\n /** Called when a user left the room. */\n onSessionDidEnd?: (\n session: BrowserSession<SM, CM>,\n ctx?: C\n ) => void | Promise<void>;\n\n /**\n * Called when Liveblocks Storage for the room was updated.\n *\n * IMPORTANT! If you implement these as async functions, it's important to\n * note that these run outside of the storage mutex that guarantees\n * a consistent view of storage.\n * Therefore, only ever use this hook to implement a side effect (like\n * trigger a notification), don't read storage in this hook directly.\n */\n postClientMsgStorageDidUpdate?: (ctx?: C) => void | Promise<void>;\n /**\n * Called when Yjs Storage for the room was updated.\n *\n * IMPORTANT! If you implement these as async functions, it's important to\n * note that these run outside of the storage mutex that guarantees\n * a consistent view of storage.\n * Therefore, only ever use this hook to implement a side effect (like\n * trigger a notification), don't read storage in this hook directly.\n */\n postClientMsgYdocDidUpdate?: (\n ctx?: C,\n sess?: BrowserSession<SM, CM>\n ) => void | Promise<void>;\n };\n\n /** Enable debug logging */\n enableDebugLogging?: boolean;\n};\n\n/**\n * A Liveblocks Room server.\n */\nexport class Room<RM, SM, CM extends JsonObject, C = undefined> {\n // ^^^^^^^^^^ User-defined Room Metadata, Session Metadata, and Client Metadata\n\n public meta: RM;\n public readonly driver: IStorageDriver;\n public logger: Logger;\n\n private _loadData$: Promise<void> | null = null;\n private _data: InternalData | null = null;\n private _qsize = 0;\n\n private readonly sessions = new UniqueMap<\n SessionKey,\n BrowserSession<SM, CM>,\n ActorID\n >((s) => s.actor);\n\n private readonly hooks: {\n isClientMsgAllowed: (\n msg: ClientMsg,\n session: BrowserSession<SM, CM>\n ) => { allowed: true } | { allowed: false; reason: string };\n\n onDidPong?: (ctx?: C) => void | Promise<void>;\n\n onRoomWillLoad?: (ctx?: C) => void | Promise<void>;\n onRoomDidLoad?: (ctx?: C) => void | Promise<void>;\n\n onRoomWillUnload?: (ctx?: C) => void;\n onRoomDidUnload?: (ctx?: C) => void;\n\n onSessionDidStart?: (\n session: BrowserSession<SM, CM>,\n ctx: C | undefined\n ) => void | Promise<void>;\n onSessionDidEnd?: (\n session: BrowserSession<SM, CM>,\n ctx: C | undefined\n ) => void | Promise<void>;\n\n // Don't like these callback names yet. Think about how to better abstract it later.\n postClientMsgStorageDidUpdate?: (ctx?: C) => void | Promise<void>;\n postClientMsgYdocDidUpdate?: (\n ctx?: C,\n sess?: BrowserSession<SM, CM>\n ) => void | Promise<void>;\n };\n\n readonly #_debug: boolean;\n readonly #_allowStreaming: boolean;\n\n constructor(meta: RM, options?: RoomOptions<SM, CM, C>) {\n const driver = options?.storage ?? makeNewInMemoryDriver();\n this.meta = meta;\n this.driver = driver;\n this.logger = options?.logger ?? BLACK_HOLE;\n this.#_allowStreaming = options?.allowStreaming ?? true;\n this.hooks = {\n isClientMsgAllowed:\n options?.hooks?.isClientMsgAllowed ??\n (() => {\n return {\n allowed: true,\n };\n }),\n\n // YYY .load() isn't called on the RoomServer yet! As soon as it does, these hooks will get called\n onRoomWillLoad: options?.hooks?.onRoomWillLoad,\n onRoomDidLoad: options?.hooks?.onRoomDidLoad,\n\n onRoomWillUnload: options?.hooks?.onRoomWillUnload,\n onRoomDidUnload: options?.hooks?.onRoomDidUnload,\n\n onSessionDidStart: options?.hooks?.onSessionDidStart,\n onSessionDidEnd: options?.hooks?.onSessionDidEnd,\n\n postClientMsgStorageDidUpdate:\n options?.hooks?.postClientMsgStorageDidUpdate,\n postClientMsgYdocDidUpdate: options?.hooks?.postClientMsgYdocDidUpdate,\n };\n this.#_debug = options?.enableDebugLogging ?? false;\n }\n\n public get loadingState(): LoadingState {\n if (this._loadData$ === null) {\n return \"initial\";\n } else if (this._data === null) {\n return \"loading\";\n } else {\n return \"loaded\";\n }\n }\n\n public get numSessions(): number { return this.sessions.size; } // prettier-ignore\n\n public get storage(): Storage { return this.data.storage; } // prettier-ignore\n public get yjsStorage(): YjsStorage { return this.data.yjsStorage; } // prettier-ignore\n\n public get mutex(): Mutex { return this.data.mutex; } // prettier-ignore\n\n private get data(): InternalData { return this._data ?? raise(\"Cannot use room before it's loaded\"); } // prettier-ignore\n\n // ------------------------------------------------------------------------------------\n // Public API\n // ------------------------------------------------------------------------------------\n\n /**\n * Initializes the Room, so it's ready to start accepting connections. Safe\n * to call multiple times. After awaiting `room.load()` the Room is ready to\n * be used.\n */\n public async load(ctx?: C): Promise<void> {\n if (this._loadData$ === null) {\n this._data = null;\n this._loadData$ = this._load(ctx).catch((e) => {\n this._data = null;\n this._loadData$ = null;\n throw e;\n });\n }\n return this._loadData$;\n }\n\n /**\n * Releases the currently-loaded storage tree from worker memory, freeing it\n * up to be garbage collected. The next time a user will join the room, the\n * room will be reloaded from storage.\n */\n public unload(ctx?: C): void {\n this.hooks.onRoomWillUnload?.(ctx); // May throw to cancel unloading\n if (this._data) {\n this.storage.unload();\n this.yjsStorage.unload();\n }\n // YYY Abort any potentially in-flight _loadData$ calls here\n this._loadData$ = null;\n // this._data = null; // YYY Should we also clear _data? I think so!\n this.hooks.onRoomDidUnload?.(ctx);\n }\n\n /**\n * Issues a Ticket with a new/unique actor ID\n *\n * IMPORTANT! As the caller of this function, you are responsible for\n * ensuring you trust the values passed in here. Never pass unauthorized\n * values in here.\n *\n * The returned Ticket can be turned into a active Session once the socket\n * connection is established. If the socket is never established, this\n * unused Ticket will simply get garbage collected.\n */\n public async createTicket(\n options?: CreateTicketOptions<SM, CM>\n ): Promise<Ticket<SM, CM>> {\n const actor$ = options?.actor ?? this.getNextActor();\n const sessionKey = nanoid() as SessionKey;\n const info = options?.info;\n const ticket: Ticket<SM, CM> = {\n version: options?.version ?? HIGHEST_PROTOCOL_VERSION,\n actor: await actor$,\n sessionKey,\n meta: options?.meta,\n publicMeta: options?.publicMeta,\n user: options?.id\n ? { id: options.id, info }\n : { anonymousId: options?.anonymousId ?? nanoid(), info },\n scopes: options?.scopes ?? [\"room:write\"],\n };\n if (this.#_debug) {\n console.log(`new ticket created: ${JSON.stringify(ticket)}`);\n }\n return ticket;\n }\n\n public async createBackendSession_experimental(): Promise<\n [session: BackendSession, outgoingMessages: PreSerializedServerMsg[]]\n > {\n const ticket = (await this.createTicket()) as Ticket<never, never>;\n const capturedServerMsgs: PreSerializedServerMsg[] = [];\n const stub = {\n send: (data) => {\n if (typeof data === \"string\") {\n capturedServerMsgs.push(data as PreSerializedServerMsg);\n }\n return 0;\n },\n close: () => {}, // noop\n } satisfies IServerWebSocket;\n const session = new BackendSession(ticket, stub, false);\n return [session, capturedServerMsgs];\n }\n\n /**\n * Restores the given sessions as the Room server's session list. Can only be\n * called as long as there are no existing sessions.\n *\n * The key difference with the .startBrowserSession() API is that restoreSessions is\n * used in cases where a session was hibernated and needs to be restored,\n * without _conceptually_ starting a new session.\n *\n * Because there are no side effects to restoreSession, it's synchronous.\n */\n public restoreSessions(\n sessions: {\n ticket: Ticket<SM, CM>;\n socket: IServerWebSocket;\n lastActivity: Date;\n }[]\n ): void {\n if (this.sessions.size > 0) {\n throw new Error(\"This API can only be called before any sessions exist\");\n }\n\n for (const { ticket, socket, lastActivity } of sessions) {\n const newSession = new BrowserSession(ticket, socket, this.#_debug);\n this.sessions.set(ticket.sessionKey, newSession);\n newSession.markActive(lastActivity);\n }\n }\n\n /**\n * Registers a new BrowserSession into the Room server's session list, along with\n * the socket connection to use for that BrowserSession, now that it is known.\n *\n * This kicks off a few side effects:\n * - Sends a ROOM_STATE message to the socket.\n * - Broadcasts a USER_JOINED message to all other sessions in the room.\n */\n public startBrowserSession(\n ticket: Ticket<SM, CM>,\n socket: IServerWebSocket,\n ctx?: C,\n defer: (promise: Promise<void>) => void = () => {\n throw new Error(\n \"One of your hook handlers returned a promise, but no side effect collector was provided. \" +\n \"Pass a `defer` callback to startBrowserSession() to collect async side effects.\"\n );\n }\n ): void {\n let existing: SessionKey | undefined;\n while (\n (existing = this.sessions.lookupPrimaryKey(ticket.actor)) !== undefined\n ) {\n // If this happens, it means a new connection attempt is happening for an\n // existing actor ID. It's most likely from a reconnection attempt using\n // a legacy token (which has the actor ID hardcoded in it), where the old\n // session hasn't been closed explicitly. We'll actively kill it now.\n\n // Terminate old session\n this.endBrowserSession(\n existing,\n CloseCode.KICKED,\n \"Closed stale connection\",\n ctx,\n defer\n );\n\n this.logger.warn(\n `Previous session for actor ${ticket.actor} killed in favor of new session`\n );\n }\n\n const newSession = new BrowserSession(ticket, socket, this.#_debug);\n this.sessions.set(ticket.sessionKey, newSession);\n\n const users: Record<ActorID, BaseUserMeta & { scopes: string[] }> = {};\n for (const session of this.otherSessions(ticket.sessionKey)) {\n users[session.actor] = {\n id: session.user.id,\n info: session.user.info,\n scopes: session.scopes,\n };\n }\n\n newSession.send(\n makeRoomStateMsg(\n newSession.actor,\n ticket.sessionKey, // called \"nonce\" in the protocol\n newSession.scopes,\n users,\n ticket.publicMeta\n )\n );\n\n this.sendToOthers(\n ticket.sessionKey,\n {\n type: ServerMsgCode.USER_JOINED,\n actor: newSession.actor,\n id: newSession.user.id,\n info: newSession.user.info,\n scopes: newSession.scopes,\n },\n ctx,\n defer\n );\n\n // Call the hook, but don't await the results here\n const p$ = this.hooks.onSessionDidStart?.(newSession, ctx);\n if (p$) defer(p$);\n }\n\n /**\n * Unregisters the BrowserSession for the given actor. Call this when the socket has\n * been closed from the client's end.\n *\n * This kicks off a few side effects:\n * - Broadcasts a USER_LEFT message to all other sessions in the room.\n */\n public endBrowserSession(\n key: SessionKey,\n code: number,\n reason: string,\n ctx?: C,\n defer: (promise: Promise<void>) => void = () => {\n throw new Error(\n \"Your onSessionDidEnd handler returned a promise, but no side effect collector was provided. \" +\n \"Pass a `defer` callback to endBrowserSession() to collect async side effects.\"\n );\n }\n ): void {\n const sessions = this.sessions;\n\n const session = sessions.get(key);\n if (session === undefined) return;\n\n session.closeSocket(code, reason);\n\n const deleted = sessions.delete(key);\n if (deleted) {\n for (const other of this.otherSessions(key)) {\n other.send({ type: ServerMsgCode.USER_LEFT, actor: session.actor });\n }\n\n // Call the hook\n const p$ = this.hooks.onSessionDidEnd?.(session, ctx);\n if (p$) defer(p$);\n }\n }\n\n /**\n * Force-closes all sessions matching the given predicate.\n */\n public endSessionBy(\n predicate: (session: BrowserSession<SM, CM>) => boolean,\n code: number,\n reason: string,\n ctx?: C,\n defer: (promise: Promise<void>) => void = () => {\n throw new Error(\n \"Your onSessionDidEnd handler returned a promise, but no side effect collector was provided. \" +\n \"Pass a `defer` callback to endSessionBy() to collect async side effects.\"\n );\n }\n ): number {\n let count = 0;\n for (const [key, session] of this.sessions) {\n if (predicate(session)) {\n count++;\n this.endBrowserSession(key, code, reason, ctx, defer);\n }\n }\n return count;\n }\n\n /**\n * Handles a raw incoming socket message, which can be a ping, or an\n * JSON-encoded message batch.\n */\n public async handleData(\n key: SessionKey,\n data: unknown,\n ctx?: C,\n defer: (promise: Promise<void>) => void = () => {\n throw new Error(\n \"One of your hook handlers returned a promise, but no side effect collector was provided. \" +\n \"Pass a `defer` callback to handleData() to collect async side effects.\"\n );\n }\n ): Promise<void> {\n const text =\n typeof data === \"string\" ? data : raise(\"Unsupported message format\");\n\n if (text === \"ping\") {\n await this.handlePing(key, ctx);\n } else {\n const json = tryParseJson(text);\n const messages = messagesDecoder.decode(json);\n\n if (!messages.ok) {\n const reason =\n process.env.NODE_ENV !== \"production\"\n ? formatInline(messages.error)\n : \"Invalid message format\";\n\n this.endBrowserSession(\n key,\n CloseCode.INVALID_MESSAGE_FORMAT,\n reason,\n ctx,\n defer\n );\n return;\n }\n\n // TODO: Decide on these limits later.\n // If qsize is > 0, then it means there is a traffic jam. This shouldn't\n // be a problem for a while, but it grows beyond a certain (soft or hard)\n // limit, we may want to take measures.\n if (this._qsize > 10_000) {\n // Over hard limit\n // TODO: Maybe disconnect this sockets with a 42xx close code? This\n // will make the client back off more aggressively. See\n // https://github.com/liveblocks/liveblocks/blob/223f7ce0d77380fecd3b08ed9454ca8c330bbe16/packages/liveblocks-core/src/types/IWebSocket.ts#L53\n } else if (this._qsize > 5_000) {\n // Over soft limit\n // TODO: Maybe instruct clients to increase their throttle values?\n }\n\n this._qsize++;\n\n // Run this.handleMsgs(), but guarded by a mutex lock, ensuring that no\n // two messages will get processed simultaneously. This provides similar\n // concurrency protection as Cloudflare's I/O gates\n try {\n await this.processClientMsg(key, messages.value, ctx);\n } finally {\n this._qsize--;\n }\n }\n }\n\n /**\n * Processes an incoming batch of 1 or more ClientMsgs on behalf of\n * a (regular user/browser) session.\n *\n * IMPORTANT: Only use this API on \"trusted\" data!\n * To handle untrusted input data, use `.handleData()` instead.\n *\n * Before calling this API, make sure:\n * 1. The call site is entitled to call this message on behalf of this session; and\n * 2. The ClientMsg payload has been validated to be correct.\n */\n public async processClientMsg(\n key: SessionKey,\n messages: ClientMsg[],\n ctx?: C\n ): Promise<void> {\n await this.load(ctx);\n const { defer, waitAll } = collectSideEffects();\n await this.mutex.runExclusive(() =>\n this._processClientMsg_withExclusiveAccess(key, messages, ctx, defer)\n );\n\n // Run all deferred work (like queueing messages, sending notifications,\n // etc) outside of the mutex\n await waitAll();\n }\n\n /**\n * Processes an incoming batch of 1 or more ClientMsgs on behalf of\n * a BACKEND session.\n *\n * Difference 1: HTTP RESPONSE instead of WEB SOCKET RESPONSE\n * ----------------------------------------------------------\n * For \"normal\" sessions that have a socket attached, any \"responses\" (i.e.\n * server messages like acks or fixops) will be sent back through that\n * existing socket connection.\n *\n * The key difference when using this method is that there is no such socket,\n * so any \"response\" ServerMsgs will get sent back as an HTTP response.\n *\n * Difference 2: No auth check\n * ---------------------------\n * Another key difference is that when processing a backend session, no\n * \"isClientMsgAllowed()\" check is performed, because those checks assume\n * a session.\n */\n public async processClientMsgFromBackendSession(\n session: BackendSession,\n messages: ClientMsg[],\n ctx?: C\n ): Promise<void> {\n await this.load(ctx);\n const { defer, waitAll } = collectSideEffects();\n await this.mutex.runExclusive(() =>\n this._processClientMsgFromBackendSession_withExclusiveAccess(\n session,\n messages,\n ctx,\n defer\n )\n );\n\n // Run all deferred work (like queueing messages, sending notifications,\n // etc) outside of the mutex\n await waitAll();\n }\n\n public getSession(\n sessionKey: SessionKey\n ): BrowserSession<SM, CM> | undefined {\n return this.sessions.get(sessionKey);\n }\n\n public listSessions(): BrowserSession<SM, CM>[] {\n return Array.from(this.sessions.values());\n }\n\n /**\n * Will send the given ServerMsg to all Sessions, except the Session\n * where the message originates from.\n */\n public sendToOthers(\n sender: SessionKey,\n serverMsg: ServerMsg | readonly ServerMsg[],\n ctx?: C,\n defer: (promise: Promise<void>) => void = () => {\n throw new Error(\n \"One of your hook handlers returned a promise, but no side effect collector was provided. \" +\n \"Pass a `defer` callback to sendToOthers() to collect async side effects.\"\n );\n }\n ): void {\n const msg = serialize(serverMsg);\n for (const [key, session] of this.otherSessionEntries(sender)) {\n const success = session.send(msg);\n if (success === 0) {\n // If there is a connection issue, terminate the session at once.\n // Note that in the case of -1 (= back pressure), we don't terminate\n // the connection.\n this.endBrowserSession(\n key,\n CloseCode.KICKED,\n \"Closed broken connection\",\n ctx,\n defer\n );\n }\n }\n }\n\n /**\n * Will broadcast the given ServerMsg to all Sessions in the Room.\n */\n public sendToAll(\n serverMsg: ServerMsg | readonly ServerMsg[],\n ctx?: C,\n defer: (promise: Promise<void>) => void = () => {\n throw new Error(\n \"One of your hook handlers returned a promise, but no side effect collector was provided. \" +\n \"Pass a `defer` callback to sendToAll() to collect async side effects.\"\n );\n }\n ): void {\n const msg = serialize(serverMsg);\n for (const [key, session] of this.sessions) {\n const success = session.send(msg);\n if (success === 0) {\n // If there is a connection issue, terminate the session at once.\n // Note that in the case of -1 (= back pressure), we don't terminate\n // the connection.\n this.endBrowserSession(\n key,\n CloseCode.KICKED,\n \"Closed broken connection\",\n ctx,\n defer\n );\n }\n }\n }\n\n // ------------------------------------------------------------------------------------\n // Private APIs\n // ------------------------------------------------------------------------------------\n\n private async _loadStorage(): Promise<Storage> {\n const storage = new Storage(this.driver);\n await storage.load(this.logger);\n return storage;\n }\n\n private async _loadYjsStorage(): Promise<YjsStorage> {\n const yjsStorage = new YjsStorage(this.driver);\n await yjsStorage.load(this.logger);\n return yjsStorage;\n }\n\n // Don't ever manually call this!\n private async _load(ctx?: C): Promise<void> {\n await this.hooks.onRoomWillLoad?.(ctx);\n\n // YYY Maybe later run these in parallel? See https://github.com/liveblocks/liveblocks-cloudflare/pull/721#discussion_r1489076389\n const storage = await this._loadStorage();\n const yjsStorage = await this._loadYjsStorage();\n\n this._data = {\n mutex: new Mutex(),\n storage,\n yjsStorage,\n };\n\n await this.hooks.onRoomDidLoad?.(ctx);\n }\n\n /**\n * Returns a new, unique, actor ID.\n */\n private async getNextActor(): Promise<ActorID> {\n return (await this.driver.next_actor()) as ActorID;\n }\n\n /**\n * Iterates over all *other* Sessions and their session keys.\n */\n private *otherSessionEntries(\n currentKey: SessionKey\n ): Generator<[SessionKey, BrowserSession<SM, CM>]> {\n for (const [key, session] of this.sessions) {\n if (key !== currentKey) {\n yield [key, session];\n }\n }\n }\n\n /**\n * Iterates over all *other* Sessions.\n */\n private *otherSessions(\n currentKey: SessionKey\n ): Generator<BrowserSession<SM, CM>> {\n for (const [key, session] of this.sessions) {\n if (key !== currentKey) {\n yield session;\n }\n }\n }\n\n /**\n * @internal\n * Handles an incoming ping, by sending a pong back.\n */\n // eslint-disable-next-line @typescript-eslint/require-await\n private async handlePing(sessionKey: SessionKey, ctx?: C): Promise<void> {\n const session = this.sessions.get(sessionKey);\n if (session === undefined) {\n this.logger\n .withContext({ sessionKey })\n .warn(\"[probe] in handlePing, no such session exists\");\n return;\n }\n\n const sent = session.sendPong();\n\n // 0 means there was a connection issue\n // -1 means there was back pressure, which is no issue (we'll just count the ping)\n if (sent !== 0) {\n await this.hooks.onDidPong?.(ctx);\n }\n }\n\n private async _processClientMsg_withExclusiveAccess(\n sessionKey: SessionKey,\n messages: ClientMsg[],\n ctx: C | undefined,\n defer: (p: Promise<void>) => void\n ): Promise<void> {\n const session = this.sessions.get(sessionKey);\n if (!session) {\n this.logger\n .withContext({ sessionKey })\n .warn(\"[probe] in handleClientMsgs, no such session exists\");\n return;\n }\n\n // Keep two ServerMsg buffers to send at the end:\n // - Messages to fan-out to all *others* (current session not included)\n // - Messages to reply back to the current sender (i.e. acks and rejections)\n const toFanOut: ServerMsg[] = [];\n const toReply: ServerMsg[] = [];\n const replyImmediately = (msg: ServerMsg | ServerMsg[]) =>\n void session.send(msg);\n const scheduleFanOut = (msg: ServerMsg) => void toFanOut.push(msg);\n const scheduleReply = (msg: ServerMsg) => void toReply.push(msg);\n\n for (const msg of messages) {\n const isMsgAllowed = this.hooks.isClientMsgAllowed(msg, session);\n if (isMsgAllowed.allowed) {\n await this.handleOne(\n session,\n msg,\n replyImmediately,\n scheduleFanOut,\n scheduleReply,\n ctx,\n defer\n );\n } else {\n if (!session.hasNotifiedClientStorageUpdateError) {\n toReply.push({\n type: ServerMsgCode.REJECT_STORAGE_OP,\n opIds:\n msg.type === ClientMsgCode.UPDATE_STORAGE\n ? msg.ops.map((op) => op.opId)\n : [],\n reason: isMsgAllowed.reason,\n });\n session.setHasNotifiedClientStorageUpdateError();\n }\n }\n }\n\n if (toFanOut.length > 0) {\n this.sendToOthers(sessionKey, toFanOut, ctx, defer);\n }\n\n if (toReply.length > 0) {\n session.send(toReply);\n }\n }\n\n // TODO It's a bit bothering how much duplication there is between this method\n // and the _processClientMsg_withExclusiveAccess version. A better\n // abstraction is needed.\n private async _processClientMsgFromBackendSession_withExclusiveAccess(\n session: BackendSession,\n messages: ClientMsg[],\n ctx: C | undefined,\n defer: (p: Promise<void>) => void\n ): Promise<void> {\n // Keep two ServerMsg buffers to send at the end:\n // - Messages to fan-out to all *others* (current session not included)\n // - Messages to reply back to the current sender (i.e. acks and rejections)\n const toFanOut: ServerMsg[] = [];\n const toReplyImmediately: ServerMsg[] = [];\n const toReplyAfter: ServerMsg[] = [];\n\n const replyImmediately = (msg: ServerMsg | ServerMsg[]) => {\n if (Array.isArray(msg)) {\n for (const m of msg) {\n toReplyImmediately.push(m);\n }\n } else {\n toReplyImmediately.push(msg);\n }\n };\n const scheduleFanOut = (msg: ServerMsg) => void toFanOut.push(msg);\n const scheduleReply = (msg: ServerMsg) => void toReplyAfter.push(msg);\n\n for (const msg of messages) {\n await this.handleOne(\n session,\n msg,\n replyImmediately,\n scheduleFanOut,\n scheduleReply,\n ctx,\n defer\n );\n }\n\n if (toReplyImmediately.length > 0) {\n session.send(toReplyImmediately);\n toReplyImmediately.length = 0;\n }\n\n if (toFanOut.length > 0) {\n this.sendToOthers(\"(transient)\" as SessionKey, toFanOut, ctx, defer);\n toFanOut.length = 0;\n }\n\n if (toReplyAfter.length > 0) {\n session.send(toReplyAfter);\n toReplyAfter.length = 0;\n }\n }\n\n private async handleOne(\n session: BrowserSession<SM, CM>,\n msg: ClientMsg,\n replyImmediately: (msg: ServerMsg | ServerMsg[]) => void,\n scheduleFanOut: (msg: ServerMsg) => void,\n scheduleReply: (msg: ServerMsg) => void,\n ctx: C | undefined,\n defer: (p: Promise<void>) => void\n ): Promise<void> {\n if (!this.mutex.isLocked()) {\n throw new Error(\"Handling messages requires exclusive access\");\n }\n\n switch (msg.type) {\n case ClientMsgCode.UPDATE_PRESENCE: {\n // YYY Maybe consider calling session.sendToOthers() directly here instead of queueing for fan-out?\n scheduleFanOut({\n type: ServerMsgCode.UPDATE_PRESENCE,\n actor: session.actor,\n data: msg.data,\n targetActor: msg.targetActor,\n });\n break;\n }\n\n case ClientMsgCode.BROADCAST_EVENT: {\n // YYY Maybe consider calling session.sendToOthers() directly here instead of queueing for fan-out?\n scheduleFanOut({\n type: ServerMsgCode.BROADCASTED_EVENT,\n actor: session.actor,\n event: msg.event,\n });\n break;\n }\n\n case ClientMsgCode.FETCH_STORAGE: {\n if (session.version >= ProtocolVersion.V8) {\n if (this.#_allowStreaming) {\n const NODES_PER_CHUNK = 250; // = arbitrary! Could be tuned later\n\n for (const chunk of chunked(\n nodeStreamToCompactNodes(this.storage.loadedDriver.iter_nodes()),\n NODES_PER_CHUNK\n )) {\n // NOTE: We don't take a storage snapshot here, because this\n // iteration is happening synchronously, so consistency of the\n // current document automatically guaranteed. If we ever make\n // this streaming asynchronous, however, we need to take\n // a storage snapshot to guarantee document consistency.\n replyImmediately({\n type: ServerMsgCode.STORAGE_CHUNK,\n nodes: chunk,\n });\n }\n } else {\n replyImmediately({\n type: ServerMsgCode.STORAGE_CHUNK,\n nodes: Array.from(\n nodeStreamToCompactNodes(this.storage.loadedDriver.iter_nodes())\n ),\n });\n }\n\n replyImmediately({ type: ServerMsgCode.STORAGE_STREAM_END });\n } else {\n replyImmediately({\n type: ServerMsgCode.STORAGE_STATE_V7,\n items: Array.from(this.storage.loadedDriver.iter_nodes()),\n });\n }\n break;\n }\n\n case ClientMsgCode.UPDATE_STORAGE: {\n // Bump storage version to indicate data will get mutated\n // A driver can use this information to implement copy-on-write\n // semantics to provide snapshot isolation.\n this.driver.bump_storage_version?.();\n\n const result = await this.storage.applyOps(msg.ops);\n\n const opsToForward: ServerWireOp[] = result.flatMap((r) =>\n r.action === \"accepted\" ? [r.op] : []\n );\n\n const opsToSendBack: ServerWireOp[] = result.flatMap((r) => {\n switch (r.action) {\n case \"ignored\":\n // HACK! We send a cleverly composed message, that will act\n // as an acknowledgement to all old clients out there in\n // the wild.\n return r.ignoredOpId !== undefined\n ? [ackIgnoredOp(r.ignoredOpId)]\n : [];\n\n case \"accepted\":\n return r.fix !== undefined ? [r.fix] : [];\n\n // istanbul ignore next\n default:\n return assertNever(r, \"Unhandled case\");\n }\n });\n\n if (opsToForward.length > 0) {\n scheduleFanOut({\n type: ServerMsgCode.UPDATE_STORAGE,\n ops: opsToForward.map(stripOpId),\n });\n scheduleReply({\n type: ServerMsgCode.UPDATE_STORAGE,\n ops: opsToForward,\n });\n }\n\n if (opsToSendBack.length > 0) {\n replyImmediately({\n type: ServerMsgCode.UPDATE_STORAGE,\n ops: opsToSendBack,\n });\n }\n\n if (opsToForward.length > 0) {\n // NOTE! These are being called after *every* handleOne() call\n // currently. Should we not just call these once at the end of\n // handleClientMsgs()?\n const p$ = this.hooks.postClientMsgStorageDidUpdate?.(ctx);\n if (p$) defer(p$);\n }\n break;\n }\n\n case ClientMsgCode.FETCH_YDOC: {\n const vector = msg.vector;\n const guid = msg.guid as Guid | undefined;\n const isV2 = msg.v2;\n const [update, stateVector, snapshotHash] = await Promise.all([\n this.yjsStorage.getYDocUpdate(this.logger, vector, guid, isV2),\n this.yjsStorage.getYStateVector(guid),\n this.yjsStorage.getSnapshotHash({ guid, isV2 }),\n ]);\n\n if (update !== null && snapshotHash !== null) {\n replyImmediately({\n type: ServerMsgCode.UPDATE_YDOC,\n update,\n isSync: true, // this is no longer used by the client, instead we use the presence of stateVector\n stateVector,\n guid,\n v2: isV2,\n remoteSnapshotHash: snapshotHash,\n });\n }\n break;\n }\n\n case ClientMsgCode.UPDATE_YDOC: {\n const update = msg.update;\n const guid = msg.guid as Guid | undefined;\n const isV2 = msg.v2;\n const [result, error] = await tryCatch(\n this.yjsStorage.addYDocUpdate(this.logger, update, guid, isV2)\n );\n\n if (error)\n // Ignore any errors\n break;\n\n this.sendToAll(\n {\n type: ServerMsgCode.UPDATE_YDOC,\n update,\n guid,\n isSync: false,\n stateVector: null,\n v2: isV2,\n remoteSnapshotHash: result.snapshotHash,\n },\n ctx,\n defer\n );\n if (result.isUpdated) {\n const p$ = this.hooks.postClientMsgYdocDidUpdate?.(ctx, session);\n if (p$) defer(p$);\n }\n\n break;\n }\n\n default: {\n try {\n return assertNever(msg, \"Unrecognized client msg\");\n } catch {\n // Ignore\n }\n }\n }\n }\n}\n\nexport { serialize as serializeServerMsg };\n","/**\n * Copyright (c) Liveblocks Inc.\n *\n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Affero General Public License as published\n * by the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n * GNU Affero General Public License for more details.\n *\n * You should have received a copy of the GNU Affero General Public License\n * along with this program. If not, see <https://www.gnu.org/licenses/>.\n */\n\nimport type { JsonObject } from \"@liveblocks/core\";\nimport { raise } from \"@liveblocks/core\";\n\nexport enum LogLevel {\n DEBUG = 0,\n INFO = 1,\n WARNING = 2,\n ERROR = 3,\n}\n\nfunction formatError(err: Error) {\n const prefix = `${err.name}: ${err.message}`;\n return (\n err.stack?.startsWith(prefix) ? err.stack : `${prefix}\\n${err.stack ?? \"\"}`\n ).trimEnd();\n}\n\n/**\n * Inherit from this abstract log target to implement your own custom\n * LogTarget.\n */\nexport abstract class LogTarget {\n public readonly level: LogLevel;\n\n #cache = new WeakMap<JsonObject, string>();\n\n constructor(level: LogLevel | keyof typeof LogLevelNames = LogLevel.INFO) {\n this.level =\n typeof level === \"number\"\n ? level\n : (LogLevelNames[level] ?? LogLevel.INFO);\n }\n\n /** Helper for formatting a log level */\n protected formatLevel(level: LogLevel): string {\n switch (level) {\n case LogLevel.DEBUG:\n return \"debug\";\n case LogLevel.INFO:\n return \"info\";\n case LogLevel.WARNING:\n return \"warn\";\n case LogLevel.ERROR:\n return \"error\";\n default:\n return raise(\"Invalid log level\");\n }\n }\n\n /** Helper for formatting an Arg */\n protected formatArg(arg: string | Error): string {\n return typeof arg === \"object\"\n ? arg instanceof Error\n ? formatError(arg)\n : JSON.stringify(arg)\n : String(arg); // Coerce to string in case TypeScript is bypassed\n }\n\n /**\n * Helper for formatting a Context. Override this in a subclass to change the\n * formatting.\n */\n protected formatContextImpl(context: JsonObject): string {\n const parts = [];\n for (const [k, v] of Object.entries(context ?? {})) {\n if (v !== undefined) {\n // Object, or null, or array\n const sv = typeof v === \"object\" ? JSON.stringify(v) : v;\n parts.push(`${k}=${sv}`);\n }\n }\n return parts.length > 0 ? `[${parts.join(\" \")}]` : \"\";\n }\n\n /**\n * Helper for formatting a Context. Will only compute the string once for\n * every Context instance, and keep its computed string value cached for\n * performance.\n */\n protected formatContext(context: JsonObject): string {\n let formatted = this.#cache.get(context);\n if (formatted === undefined) {\n formatted = this.formatContextImpl(context);\n this.#cache.set(context, formatted);\n }\n return formatted;\n }\n\n /**\n * Implement this in a concrete subclass. The goal is to do whatever to log\n * the given log level, context, and log arg. You'll typically want to\n * utilize the pre-defined helper methods .formatContext() and .formatArg()\n * to implement this.\n */\n abstract log(level: LogLevel, context: JsonObject, arg: string | Error): void;\n}\n\n//\n// Console log target ----------------------------------------------------------\n//\n\nconst CONSOLE_METHOD = {\n [LogLevel.DEBUG]: \"info\",\n [LogLevel.INFO]: \"info\",\n [LogLevel.WARNING]: \"warn\",\n [LogLevel.ERROR]: \"error\",\n} as const;\n\nexport class ConsoleTarget extends LogTarget {\n log(level: LogLevel, context: JsonObject, arg: string | Error): void {\n console[CONSOLE_METHOD[level]](\n this.formatArg(arg),\n this.formatContext(context)\n );\n }\n}\n\n//\n// Logger implementation ------------------------------------------------------\n//\n\n// Friendly names to pass to the constructor\nconst LogLevelNames = {\n debug: LogLevel.DEBUG,\n info: LogLevel.INFO,\n warning: LogLevel.WARNING,\n error: LogLevel.ERROR,\n} as const;\n\ntype LogFn = (arg: string | Error) => void;\n\n/**\n * Structured logger with configurable log targets.\n */\nexport class Logger {\n public readonly debug: LogFn;\n public readonly info: LogFn;\n public readonly warn: LogFn;\n public readonly error: LogFn;\n\n public readonly o: {\n readonly debug?: LogFn;\n readonly info?: LogFn;\n readonly warn?: LogFn;\n readonly error?: LogFn;\n };\n\n private readonly _context: JsonObject;\n private readonly _targets: readonly LogTarget[];\n\n constructor(\n target: LogTarget | readonly LogTarget[] = new ConsoleTarget(),\n context: JsonObject = {}\n ) {\n this._context = context;\n this._targets = Array.isArray(target) ? target : [target];\n\n const minLevel: number = Math.min(...this._targets.map((t) => t.level));\n\n const noop = () => {};\n const makeLogFn = (lvl: LogLevel) => (arg: string | Error) =>\n this._targets.forEach((target) => {\n if (target.level <= lvl) {\n target.log(lvl, this._context, arg);\n }\n });\n\n this.o = {\n /* eslint-disable @typescript-eslint/no-unsafe-enum-comparison */\n debug: minLevel <= LogLevel.DEBUG ? makeLogFn(LogLevel.DEBUG) : undefined,\n info: minLevel <= LogLevel.INFO ? makeLogFn(LogLevel.INFO) : undefined,\n warn:\n minLevel <= LogLevel.WARNING ? makeLogFn(LogLevel.WARNING) : undefined,\n error: minLevel <= LogLevel.ERROR ? makeLogFn(LogLevel.ERROR) : undefined,\n /* eslint-enable @typescript-eslint/no-unsafe-enum-comparison */\n };\n\n this.debug = this.o.debug ?? noop;\n this.info = this.o.info ?? noop;\n this.warn = this.o.warn ?? noop;\n this.error = this.o.error ?? noop;\n }\n\n /**\n * Creates a new Logger instance with the given extra context applied. All\n * log calls made from that new Logger will carry all current _and_ the extra\n * context, with the extra context taking precedence. Assign an explicit\n * `undefined` value to a key to \"remove\" it from the context.\n */\n withContext(extra: JsonObject): Logger {\n const combined: JsonObject = { ...this._context, ...extra };\n return new Logger(this._targets, combined);\n }\n}\n","/**\n * Copyright (c) Liveblocks Inc.\n *\n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Affero General Public License as published\n * by the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n * GNU Affero General Public License for more details.\n *\n * You should have received a copy of the GNU Affero General Public License\n * along with this program. If not, see <https://www.gnu.org/licenses/>.\n */\n\n/* eslint-disable @typescript-eslint/explicit-module-boundary-types */\n/* eslint-disable @typescript-eslint/require-await */\nimport type {\n Json,\n JsonObject,\n NodeMap,\n NodeStream,\n PlainLsonObject,\n SerializedChild,\n SerializedCrdt,\n SerializedObject,\n SerializedRootObject,\n} from \"@liveblocks/core\";\nimport { asPos, CrdtType, isRootStorageNode, nn } from \"@liveblocks/core\";\nimport { ifilter, imap } from \"itertools\";\n\nimport type { YDocId } from \"~/decoders/y-types\";\nimport { plainLsonToNodeStream } from \"~/formats/PlainLson\";\nimport type {\n IReadableSnapshot,\n IStorageDriver,\n IStorageDriverNodeAPI,\n} from \"~/interfaces\";\nimport { NestedMap } from \"~/lib/NestedMap\";\nimport { quote } from \"~/lib/text\";\nimport { makeInMemorySnapshot } from \"~/makeInMemorySnapshot\";\nimport type { Pos } from \"~/types\";\n\nfunction buildRevNodes(nodeStream: NodeStream) {\n const result = new NestedMap<string, string, string>();\n for (const node of nodeStream) {\n if (isRootStorageNode(node)) continue;\n\n // Highest node id wins in case of conflict (deterministic across backends)\n const [id, crdt] = node;\n const existing = result.get(crdt.parentId, crdt.parentKey);\n if (existing === undefined || id > existing) {\n result.set(crdt.parentId, crdt.parentKey, id);\n }\n }\n return result;\n}\n\n/**\n * Builds the reverse node index, and corrects any data corruption found\n * along the way.\n */\nfunction buildReverseLookup(nodes: NodeMap) {\n const revNodes = buildRevNodes(nodes as NodeStream);\n\n const queue: string[] = [\"root\"];\n const reachableNodes: Set<string> = new Set();\n\n while (queue.length > 0) {\n const nodeId = queue.pop()!;\n const node = nn(nodes.get(nodeId));\n\n if (node.type === CrdtType.OBJECT) {\n for (const key of revNodes.keysAt(nodeId)) {\n delete node.data[key]; // Remove static data that conflicts with child nodes\n }\n }\n\n if (node.type !== CrdtType.REGISTER) {\n queue.push(...revNodes.valuesAt(nodeId));\n } else {\n const parent = nodes.get(node.parentId);\n if (parent?.type === CrdtType.OBJECT) {\n continue;\n }\n }\n\n reachableNodes.add(nodeId);\n }\n\n // Delete unreachable nodes (safe to delete from Map during iteration)\n let deletedCount = 0;\n for (const [id] of nodes) {\n if (!reachableNodes.has(id)) {\n nodes.delete(id);\n deletedCount++;\n }\n }\n\n // If no nodes were dropped (99% happy path), revNodes is correct already.\n // Otherwise, recompute it.\n return deletedCount === 0 ? revNodes : buildRevNodes(nodes as NodeStream);\n}\n\nfunction hasStaticDataAt(\n node: SerializedCrdt,\n key: string\n): node is SerializedObject | SerializedRootObject {\n return (\n node.type === CrdtType.OBJECT &&\n Object.prototype.hasOwnProperty.call(node.data, key) &&\n node.data[key] !== undefined\n );\n}\n\n/**\n * Implements the most basic in-memory store. Used if no explicit store is\n * provided.\n */\nexport class InMemoryDriver implements IStorageDriver {\n private _nextActor;\n private _nodes: NodeMap;\n private _metadb: Map<string, Json>;\n private _ydb: Map<string, Uint8Array>;\n\n constructor(options?: {\n initialActor?: number;\n initialNodes?: Iterable<[string, SerializedCrdt]>;\n }) {\n this._nodes = new Map();\n this._metadb = new Map();\n this._ydb = new Map();\n\n this._nextActor = options?.initialActor ?? -1;\n\n for (const [key, value] of options?.initialNodes ?? []) {\n this._nodes.set(key, value);\n }\n }\n\n raw_iter_nodes() {\n return this._nodes[Symbol.iterator]();\n }\n\n /** Deletes all nodes and replaces them with the given document. */\n DANGEROUSLY_reset_nodes(doc: PlainLsonObject) {\n this._nodes.clear();\n for (const [id, node] of plainLsonToNodeStream(doc)) {\n this._nodes.set(id, node);\n }\n }\n\n async get_meta(key: string) {\n return this._metadb.get(key);\n }\n async put_meta(key: string, value: Json) {\n this._metadb.set(key, value);\n }\n async delete_meta(key: string) {\n this._metadb.delete(key);\n }\n\n next_actor() {\n return ++this._nextActor;\n }\n\n async iter_y_updates(docId: YDocId) {\n const prefix = `${docId}@|@`;\n return imap(\n ifilter(this._ydb.entries(), ([k]) => k.startsWith(prefix)),\n ([k, v]) => [k.slice(prefix.length), v] as [string, Uint8Array]\n );\n }\n async write_y_updates(docId: YDocId, key: string, data: Uint8Array) {\n this._ydb.set(`${docId}@|@${key}`, data);\n }\n async delete_y_updates(docId: YDocId, keys: string[]) {\n for (const key of keys) {\n this._ydb.delete(`${docId}@|@${key}`);\n }\n }\n\n /** @private Only use this in unit tests, never in production. */\n async DANGEROUSLY_wipe_all_y_updates() {\n this._ydb.clear();\n }\n\n // Intercept load_nodes_api to add caching layer\n load_nodes_api(): IStorageDriverNodeAPI {\n // For the in-memory backend, this._nodes IS the \"on-disk\" storage,\n // so we operate on it directly (no separate cache needed).\n const nodes = this._nodes;\n if (!nodes.has(\"root\")) {\n nodes.set(\"root\", { type: CrdtType.OBJECT, data: {} });\n }\n\n const revNodes = buildReverseLookup(nodes);\n\n function get_next_sibling(parentId: string, pos: Pos): Pos | undefined {\n let nextPos: Pos | undefined;\n // Find the smallest position greater than current\n for (const siblingKey of revNodes.keysAt(parentId)) {\n const siblingPos = asPos(siblingKey);\n if (\n siblingPos > pos &&\n (nextPos === undefined || siblingPos < nextPos)\n ) {\n nextPos = siblingPos;\n }\n }\n return nextPos;\n }\n\n /**\n * Inserts a node in the storage tree, deleting any nodes that already exist\n * under this key (including all of its children), if any.\n */\n async function set_child(\n id: string,\n node: SerializedChild,\n allowOverwrite = false\n ): Promise<void> {\n const parentNode = nodes.get(node.parentId);\n // Reject orphans - parent must exist\n if (parentNode === undefined) {\n throw new Error(`No such parent ${quote(node.parentId)}`);\n }\n\n if (\n node.type === CrdtType.REGISTER &&\n parentNode.type === CrdtType.OBJECT\n ) {\n throw new Error(\"Cannot add register under object\");\n }\n\n const conflictingSiblingId = revNodes.get(node.parentId, node.parentKey);\n if (conflictingSiblingId !== id) {\n // Conflict!\n const parentNode = nodes.get(node.parentId);\n const hasConflictingData =\n parentNode !== undefined &&\n hasStaticDataAt(parentNode, node.parentKey);\n if (conflictingSiblingId !== undefined || hasConflictingData) {\n if (allowOverwrite) {\n delete_child_key(node.parentId, node.parentKey);\n } else {\n throw new Error(`Key ${quote(node.parentKey)} already exists`); // prettier-ignore\n }\n }\n\n // Finally, modify revNodes\n revNodes.set(node.parentId, node.parentKey, id);\n }\n\n nodes.set(id, node);\n }\n\n /**\n * Conceptually this is like \"detaching\" the node from its parent, and\n * \"reattaching\" it at the new position.\n *\n * However, this is a native operation, because doing a naive\n * delete-then-insert would would immediately destroy all (grand)children\n * when it's deleted.\n */\n async function move_sibling(id: string, newPos: Pos): Promise<void> {\n const node = nodes.get(id);\n if (node?.parentId === undefined) {\n return;\n }\n\n // If there is a conflicting sibling at the new position, disallow the move\n if (revNodes.has(node.parentId, newPos))\n throw new Error(`Pos ${quote(newPos)} already taken`); // prettier-ignore\n\n revNodes.delete(node.parentId, node.parentKey);\n const newNode = { ...node, parentKey: newPos };\n nodes.set(id, newNode);\n revNodes.set(node.parentId, newPos, id);\n }\n\n /**\n * Sets some static data on a node. The node must be an OBJECT node, or this\n * method will be a no-op.\n *\n * If any keys exist that also conflict with a child node, then the conflict\n * mode will determine what will happen. By default, an error will be thrown.\n * But if `allowOverwrite` is set to true, the conflicting child node (and\n * its entire subtree) will be deleted to make room for the new static data.\n */\n async function set_object_data(\n id: string,\n data: JsonObject,\n allowOverwrite = false\n ): Promise<void> {\n const node = nodes.get(id);\n if (node?.type !== CrdtType.OBJECT) {\n // Nothing to do\n return;\n }\n\n for (const key of Object.keys(data)) {\n // Handle if conflict!\n const childId = revNodes.get(id, key);\n if (childId !== undefined) {\n if (allowOverwrite) {\n delete_node(childId);\n } else {\n throw new Error(`Child node already exists under ${quote(key)}`); // prettier-ignore\n }\n }\n }\n\n nodes.set(id, { ...node, data: { ...node.data, ...data } });\n }\n\n /**\n * Delete a node from the tree, including all of its children.\n */\n function delete_node(id: string): void {\n const node = nodes.get(id);\n if (node?.parentId === undefined) {\n return;\n }\n\n // Delete the entry in the parent's children administration for this node\n revNodes.delete(node.parentId, node.parentKey);\n\n // Now proceed to deleting the node tree recursively\n const queue = [id];\n while (queue.length > 0) {\n const currid = queue.pop()!;\n queue.push(...revNodes.valuesAt(currid));\n nodes.delete(currid);\n revNodes.deleteAll(currid);\n }\n }\n\n /**\n * Deletes the child key under a given node, whether it's a static object\n * field, or a child node.\n */\n function delete_child_key(id: string, key: string): void {\n // At most one of these will do something, the other is a no-op\n const node = nodes.get(id);\n if (node !== undefined && hasStaticDataAt(node, key)) {\n const { [key]: _, ...rest } = node.data;\n nodes.set(id, { ...node, data: rest });\n }\n\n const childId = revNodes.get(id, key);\n if (childId !== undefined) {\n delete_node(childId);\n }\n }\n\n const api: IStorageDriverNodeAPI = {\n /**\n * Return the node with the given id, or undefined if no such node exists.\n * Must always return a valid root node for id=\"root\", even if empty.\n */\n get_node: (id) => nodes.get(id),\n\n /**\n * Yield all nodes as [id, node] pairs. Must always include the root node.\n */\n iter_nodes: () => nodes as NodeStream,\n\n /**\n * Return true iff a node with the given id exists. Must return true for \"root\".\n */\n has_node: (id) => nodes.has(id),\n\n /**\n * Return the id of the child node at (parentId, parentKey), or undefined if\n * none. Only checks child nodes registered via set_child, NOT static data\n * keys on OBJECT nodes.\n */\n get_child_at: (id, key) => revNodes.get(id, key),\n\n /**\n * Return true iff a child node exists at (parentId, parentKey). Static data\n * keys on OBJECT nodes do not count—return false for those.\n */\n has_child_at: (id, key) => revNodes.has(id, key),\n\n /**\n * Return the position of the closest sibling \"to the right\" of `pos` under\n * parentId, or undefined if no such sibling exists. The given `pos` may, but\n * does not have to exist already. Positions compare lexicographically.\n */\n get_next_sibling,\n\n /**\n * Insert a child node with the given id.\n *\n * If allowOverwrite=false (default): throw if a node with this id exists.\n * If allowOverwrite=true: replace any existing node at this id, deleting its\n * entire subtree if it has children.\n */\n set_child,\n\n /**\n * Change a node's parentKey, effectively repositioning the node within its\n * parent. The new position must be free.\n * Throw if another node already occupies (parentId, newPos).\n */\n move_sibling,\n\n /**\n * Delete a node and its entire subtree recursively.\n * Ignore if id=\"root\" (root is immortal).\n */\n delete_node,\n\n /**\n * Delete a key from node `id`. Handle two cases:\n *\n * 1. If id is an OBJECT with `key` in its data: remove that data field.\n * 2. If a child exists at (id, key): delete that child and all its\n * descendants recursively.\n *\n * No-op if neither applies or if the node doesn't exist.\n */\n delete_child_key,\n\n /**\n * Replace the data object of an OBJECT node.\n *\n * If allowOverwrite=false (default): throw if any key in `data` conflicts\n * with an existing child's parentKey.\n * If allowOverwrite=true: first delete any conflicting children (and their\n * entire subtrees), then set the data.\n */\n set_object_data,\n\n /**\n * Return a readable snapshot of the storage tree.\n *\n * @param lowMemory When true, the call site hints that the snapshot should\n * be optimized for lower memory consumption, even if that means slower\n * access.\n */\n get_snapshot(_lowMemory?: boolean): IReadableSnapshot {\n return makeInMemorySnapshot(nodes);\n },\n };\n return api;\n }\n}\n\nexport function makeNewInMemoryDriver(options?: {\n initialActor?: number;\n initialNodes?: Iterable<[string, SerializedCrdt]>;\n}): IStorageDriver {\n return new InMemoryDriver(options);\n}\n","/**\n * Copyright (c) Liveblocks Inc.\n *\n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Affero General Public License as published\n * by the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n * GNU Affero General Public License for more details.\n *\n * You should have received a copy of the GNU Affero General Public License\n * along with this program. If not, see <https://www.gnu.org/licenses/>.\n */\n\n/**\n * Wraps single-quotes around any string value. Useful for displaying field\n * names or other identifiers in error messages or logs.\n *\n * Examples:\n * quote(\"hi\") // \"'hi'\"\n * quote(\"i'm\") // \"'i'm'\"\n *\n * Note: no \"escaping\" happens here to the string value. This is because this\n * is intended to be used for human consumption, not machine consumption.\n */\nexport function quote(value: string | undefined): string {\n return value !== undefined ? `'${value}'` : \"???\";\n}\n","/**\n * Copyright (c) Liveblocks Inc.\n *\n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Affero General Public License as published\n * by the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n * GNU Affero General Public License for more details.\n *\n * You should have received a copy of the GNU Affero General Public License\n * along with this program. If not, see <https://www.gnu.org/licenses/>.\n */\n\nimport type {\n Awaitable,\n SerializedChild,\n SerializedCrdt,\n} from \"@liveblocks/core\";\nimport {\n asPos,\n assertNever,\n CrdtType,\n makePosition,\n OpCode,\n} from \"@liveblocks/core\";\n\nimport type { IStorageDriver, IStorageDriverNodeAPI } from \"~/interfaces\";\nimport type { Logger } from \"~/lib/Logger\";\nimport type {\n ClientWireOp,\n CreateOp,\n DeleteCrdtOp,\n DeleteObjectKeyOp,\n FixOp,\n HasOpId,\n SetParentKeyOp,\n UpdateObjectOp,\n} from \"~/protocol\";\nimport type { Pos } from \"~/types\";\n\ntype ApplyOpResult = OpAccepted | OpIgnored;\n\nexport type OpAccepted = {\n action: \"accepted\";\n op: ClientWireOp;\n fix?: FixOp;\n};\n\nexport type OpIgnored = {\n action: \"ignored\";\n ignoredOpId?: string;\n};\n\nfunction accept(op: ClientWireOp, fix?: FixOp): OpAccepted {\n return { action: \"accepted\", op, fix };\n}\n\nfunction ignore(ignoredOp: ClientWireOp): OpIgnored {\n return { action: \"ignored\", ignoredOpId: ignoredOp.opId };\n}\n\nfunction nodeFromCreateChildOp(op: CreateOp): SerializedChild {\n switch (op.type) {\n case OpCode.CREATE_LIST:\n return {\n type: CrdtType.LIST,\n parentId: op.parentId,\n parentKey: op.parentKey,\n };\n\n case OpCode.CREATE_MAP:\n return {\n type: CrdtType.MAP,\n parentId: op.parentId,\n parentKey: op.parentKey,\n };\n\n case OpCode.CREATE_OBJECT:\n return {\n type: CrdtType.OBJECT,\n parentId: op.parentId,\n parentKey: op.parentKey,\n data: op.data,\n };\n\n case OpCode.CREATE_REGISTER:\n return {\n type: CrdtType.REGISTER,\n parentId: op.parentId,\n parentKey: op.parentKey,\n data: op.data,\n };\n\n // istanbul ignore next\n default:\n return assertNever(op, \"Unknown op code\");\n }\n}\n\nexport class Storage {\n // The actual underlying storage API (could be backed by in-memory store,\n // SQLite, Redis, Postgres, Cloudflare Durable Object Storage, etc.)\n private readonly coreDriver: IStorageDriver;\n private _loadedDriver: IStorageDriverNodeAPI | undefined;\n\n constructor(coreDriver: IStorageDriver) {\n this.coreDriver = coreDriver;\n }\n\n // -------------------------------------------------------------------------\n // Public API (for Storage)\n // -------------------------------------------------------------------------\n\n get loadedDriver(): IStorageDriverNodeAPI {\n if (this._loadedDriver === undefined) {\n throw new Error(\"Cannot access tree before it's been loaded\");\n }\n return this._loadedDriver;\n }\n\n // REFACTOR NOTE: Eventually raw_iter_nodes has to be removed here\n raw_iter_nodes(): Awaitable<Iterable<[string, SerializedCrdt]>> {\n return this.coreDriver.raw_iter_nodes();\n }\n\n /**\n * Load the room data from object storage into memory. Persisted room\n * data consists of the main node map, which represents the Liveblocks\n * Storage tree, and special keys where we store usage metrics, or room\n * metadata.\n */\n async load(logger: Logger): Promise<void> {\n this._loadedDriver = await this.coreDriver.load_nodes_api(logger);\n }\n\n unload(): void {\n this._loadedDriver = undefined;\n }\n\n /**\n * Applies a batch of Ops.\n */\n async applyOps(ops: ClientWireOp[]): Promise<ApplyOpResult[]> {\n const results: ApplyOpResult[] = [];\n for (const op of ops) {\n results.push(await this.applyOp(op));\n }\n return results;\n }\n\n // -------------------------------------------------------------------------\n // Private APIs (for Storage)\n // -------------------------------------------------------------------------\n\n /**\n * Applies a single Op.\n */\n private async applyOp(op: ClientWireOp): Promise<ApplyOpResult> {\n switch (op.type) {\n case OpCode.CREATE_LIST:\n case OpCode.CREATE_MAP:\n case OpCode.CREATE_REGISTER:\n case OpCode.CREATE_OBJECT:\n return await this.applyCreateOp(op);\n\n case OpCode.UPDATE_OBJECT:\n return await this.applyUpdateObjectOp(op);\n\n case OpCode.SET_PARENT_KEY:\n return await this.applySetParentKeyOp(op);\n\n case OpCode.DELETE_OBJECT_KEY:\n return await this.applyDeleteObjectKeyOp(op);\n\n case OpCode.DELETE_CRDT:\n return await this.applyDeleteCrdtOp(op);\n\n // istanbul ignore next\n default:\n if (process.env.NODE_ENV === \"production\") {\n return ignore(op);\n } else {\n return assertNever(op, \"Invalid op\");\n }\n }\n }\n\n private async applyCreateOp(op: CreateOp & HasOpId): Promise<ApplyOpResult> {\n if (this.loadedDriver.has_node(op.id)) {\n // Node already exists, the operation is ignored\n return ignore(op);\n }\n\n const node = nodeFromCreateChildOp(op);\n\n const parent = this.loadedDriver.get_node(node.parentId);\n if (parent === undefined) {\n // Parent does not exist because the op is invalid or because it was deleted in race condition.\n return ignore(op);\n }\n\n // How to create this node in the node map depends on the parent node's type\n switch (parent.type) {\n case CrdtType.OBJECT:\n // Register children under object nodes are forbidden. We'll simply\n // ignore these Ops. This matches the eventual storage behavior: if\n // we'd persist them, they would get ignored when re-loading the\n // persisted room data into memory the next time the room loads.\n if (op.type === OpCode.CREATE_REGISTER) {\n return ignore(op);\n }\n // fall through\n\n case CrdtType.MAP:\n // Children of maps and objects require no special needs\n await this.loadedDriver.set_child(op.id, node, true);\n return accept(op);\n\n case CrdtType.LIST:\n // List items need special handling around conflicting resolution,\n // which depends on the users intention\n return this.createChildAsListItem(op, node);\n\n case CrdtType.REGISTER:\n // It's illegal for registers to have children\n return ignore(op);\n\n // istanbul ignore next\n default:\n return assertNever(parent, \"Unhandled CRDT type\");\n }\n }\n\n private async createChildAsListItem(\n op: CreateOp & HasOpId,\n node: SerializedChild\n ): Promise<ApplyOpResult> {\n let fix: FixOp | undefined;\n\n // The default intent, when not explicitly provided, is to insert, not set,\n // into the list.\n const intent: \"insert\" | \"set\" = op.intent ?? \"insert\";\n\n // istanbul ignore else\n if (intent === \"insert\") {\n const insertedParentKey = await this.insertIntoList(op.id, node);\n\n // If the inserted parent key is different from the input, it means there\n // was a conflict and the node has been inserted in an alternative free\n // list position. We should broadcast a modified Op to all clients that\n // has the modified position, and send a \"fix\" op back to the originating\n // client.\n if (insertedParentKey !== node.parentKey) {\n op = { ...op, parentKey: insertedParentKey };\n fix = {\n type: OpCode.SET_PARENT_KEY,\n id: op.id,\n parentKey: insertedParentKey,\n };\n return accept(op, fix);\n }\n\n // No conflict, node got inserted as intended\n return accept(op);\n }\n // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition\n else if (intent === \"set\") {\n // The intent here is to \"set\", not insert, into the list, replacing the\n // existing item that\n\n // Special handling required here. They will include a \"deletedId\" that\n // points to the object they expect to be replacing. If in the mean time,\n // that object disappeared there (because it was moved, for example), be\n // sure to delete it anyway.\n // We should not just trust the given value, because we're about to\n // delete a node. It's only safe to delete the node if it indeed is\n // a sibling of the current node.\n const deletedId =\n op.deletedId !== undefined &&\n op.deletedId !== op.id &&\n this.loadedDriver.get_node(op.deletedId)?.parentId === node.parentId\n ? op.deletedId\n : undefined;\n\n if (deletedId !== undefined) {\n await this.loadedDriver.delete_node(deletedId);\n }\n\n const prevItemId = this.loadedDriver.get_child_at(\n node.parentId,\n node.parentKey\n );\n if (prevItemId !== undefined && prevItemId !== deletedId) {\n // If this \"set\" operation indeed removed an item, but it wasn't the\n // expected `deletedId`, let the invoking client know that they'll\n // have to delete this object, too.\n fix = {\n type: OpCode.DELETE_CRDT,\n id: prevItemId,\n };\n }\n\n await this.loadedDriver.set_child(op.id, node, true);\n\n return accept(op, fix);\n } else {\n return assertNever(intent, \"Invalid intent\");\n }\n }\n\n private async applyDeleteObjectKeyOp(\n op: DeleteObjectKeyOp & HasOpId\n ): Promise<ApplyOpResult> {\n await this.loadedDriver.delete_child_key(op.id, op.key);\n return accept(op);\n }\n\n private async applyUpdateObjectOp(\n op: UpdateObjectOp & HasOpId\n ): Promise<ApplyOpResult> {\n await this.loadedDriver.set_object_data(op.id, op.data, true);\n return accept(op);\n }\n\n private async applyDeleteCrdtOp(\n op: DeleteCrdtOp & HasOpId\n ): Promise<ApplyOpResult> {\n await this.loadedDriver.delete_node(op.id);\n return accept(op);\n }\n\n private async applySetParentKeyOp(\n op: SetParentKeyOp & HasOpId\n ): Promise<ApplyOpResult> {\n const newPosition = await this.moveToPosInList(op.id, op.parentKey);\n if (newPosition === undefined) {\n // The operation got rejected because it didn't make sense, ignore it\n return ignore(op);\n }\n\n // If the inserted node is different from the input, it means there was\n // a conflict and the node has been inserted in a new, free, list position.\n // We should broadcast a modified Op to all clients that has the modified\n // position, and send a \"fix\" op back to the originating client.\n if (newPosition !== op.parentKey) {\n const modifiedOp = { ...op, parentKey: newPosition };\n const fix: FixOp = {\n type: OpCode.SET_PARENT_KEY,\n id: op.id,\n parentKey: newPosition,\n };\n return accept(modifiedOp, fix);\n } else {\n return accept(op);\n }\n }\n\n /**\n * Inserts a new node in the storage tree, under a list parent. If an\n * existing sibling node already exist under this key, however, it will look\n * for another free position under that parent and insert it under\n * a different parent key that is guaranteed to be available.\n *\n * Returns the key that was used for the insertion.\n */\n private async insertIntoList(\n id: string,\n node: SerializedChild\n ): Promise<string> {\n // First, compute the key to use to insert this node\n const key = this.findFreeListPosition(node.parentId, asPos(node.parentKey));\n if (key !== node.parentKey) {\n node = { ...node, parentKey: key };\n }\n await this.loadedDriver.set_child(id, node);\n return node.parentKey;\n }\n\n /**\n * Tries to move a node to the given position under the same parent. If\n * a conflicting sibling node already exist at this position, it will use\n * another free position instead, to avoid the conflict.\n *\n * Returns the position (parentKey) that the node was eventually placed at.\n * If the node could be inserted without conflict, it will return the same\n * parentKey position.\n *\n * Will return `undefined` if this action could not be interpreted. Will be\n * a no-op for non-list items.\n */\n private async moveToPosInList(\n id: string,\n targetKey: string\n ): Promise<string | undefined> {\n const node = this.loadedDriver.get_node(id);\n if (node?.parentId === undefined) {\n return; /* reject */\n }\n\n if (this.loadedDriver.get_node(node.parentId)?.type !== CrdtType.LIST) {\n // SetParentKeyOp is a no-op for all nodes, except list items\n return; /* reject */\n }\n\n if (node.parentKey === targetKey) {\n // Already there\n return targetKey; /* no-op */\n }\n\n // First, compute the key to use to insert this node\n const key = this.findFreeListPosition(node.parentId, asPos(targetKey));\n if (key !== node.parentKey) {\n await this.loadedDriver.move_sibling(id, key);\n }\n return key;\n }\n\n /**\n * Checks whether the given parentKey is a \"free position\" under the\n * parentId, i.e. there are no siblings that have the same key. If a sibling\n * exists under that key, it tries to generate new positions until it finds\n * a free slot, and returns that. The returned value is therefore always safe\n * to use as parentKey.\n */\n private findFreeListPosition(parentId: string, parentPos: Pos): Pos {\n if (!this.loadedDriver.has_child_at(parentId, parentPos)) {\n return parentPos;\n }\n\n const currPos = parentPos;\n const nextPos = this.loadedDriver.get_next_sibling(parentId, currPos);\n if (nextPos !== undefined) {\n return makePosition(currPos, nextPos); // Between current and next\n } else {\n return makePosition(currPos); // After current (fallback)\n }\n }\n}\n","/**\n * Copyright (c) Liveblocks Inc.\n *\n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Affero General Public License as published\n * by the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n * GNU Affero General Public License for more details.\n *\n * You should have received a copy of the GNU Affero General Public License\n * along with this program. If not, see <https://www.gnu.org/licenses/>.\n */\n\nimport { DefaultMap } from \"@liveblocks/core\";\nimport { Base64 } from \"js-base64\";\nimport { nanoid } from \"nanoid\";\nimport * as Y from \"yjs\";\n\nimport type { Guid, YDocId } from \"~/decoders\";\nimport { ROOT_YDOC_ID } from \"~/decoders\";\nimport type { IStorageDriver } from \"~/interfaces\";\nimport type { Logger } from \"~/lib/Logger\";\n\n// How big an update can be until we compress all individual updates into\n// a single vector and persist that instead (i.e. when we trigger \"garbage\n// collection\")\nconst MAX_Y_UPDATE_SIZE = 100_000;\n\ntype YUpdateInfo = {\n currentKey: string;\n lastVector: Uint8Array | undefined;\n};\n\nexport class YjsStorage {\n private readonly driver: IStorageDriver;\n\n private readonly doc: Y.Doc = new Y.Doc(); // the root document\n private readonly lastUpdatesById = new Map<YDocId, YUpdateInfo>();\n private readonly lastSnapshotById = new Map<YDocId, Y.Snapshot>();\n // Keeps track of which keys are loaded, so we can clean them up without calling `.list()`\n private readonly keysById = new DefaultMap<YDocId, Set<string>>(\n () => new Set()\n );\n private readonly initPromisesById: Map<YDocId, Promise<Y.Doc>> = new Map();\n\n constructor(driver: IStorageDriver) {\n this.driver = driver;\n this.doc.on(\"subdocs\", ({ removed }) => {\n removed.forEach((subdoc: Y.Doc) => {\n subdoc.destroy(); // will remove listeners\n });\n });\n }\n\n // ------------------------------------------------------------------------------------\n // Public API\n // ------------------------------------------------------------------------------------\n\n public async getYDoc(docId: YDocId): Promise<Y.Doc> {\n const doc = await this.loadDocByIdIfNotAlreadyLoaded(docId);\n return doc;\n }\n\n /**\n * If passed a state vector, an update with diff will be returned, if not the entire doc is returned.\n *\n * @param stateVector a base64 encoded target state vector created by running Y.encodeStateVector(Doc) on the client\n * @returns a base64 encoded array of YJS updates\n */\n public async getYDocUpdate(\n logger: Logger,\n stateVector: string = \"\",\n guid?: Guid,\n isV2: boolean = false\n ): Promise<string | null> {\n const update = await this.getYDocUpdateBinary(\n logger,\n stateVector,\n guid,\n isV2\n );\n if (!update) return null;\n return Base64.fromUint8Array(update);\n }\n\n public async getYDocUpdateBinary(\n logger: Logger,\n stateVector: string = \"\",\n guid?: Guid,\n isV2: boolean = false\n ): Promise<Uint8Array | null> {\n const doc = guid !== undefined ? await this.getYSubdoc(guid) : this.doc;\n if (!doc) {\n return null;\n }\n let encodedTargetVector;\n try {\n // if given a state vector, attempt to decode it a single diffed update\n encodedTargetVector =\n stateVector.length > 0 ? Base64.toUint8Array(stateVector) : undefined;\n } catch (e) {\n logger.warn(\n \"Could not get update from passed vector, returning all updates\"\n );\n }\n if (isV2) {\n return Y.encodeStateAsUpdateV2(doc, encodedTargetVector);\n }\n return Y.encodeStateAsUpdate(doc, encodedTargetVector);\n }\n\n public async getYStateVector(guid?: Guid): Promise<string | null> {\n const doc = guid !== undefined ? await this.getYSubdoc(guid) : this.doc;\n if (!doc) {\n return null;\n }\n return Base64.fromUint8Array(Y.encodeStateVector(doc));\n }\n\n public async getSnapshotHash(options: {\n guid?: Guid;\n isV2?: boolean;\n }): Promise<string | null> {\n const doc =\n options.guid !== undefined\n ? await this.getYSubdoc(options.guid)\n : this.doc;\n if (!doc) {\n return null;\n }\n const snapshot = this._getOrPutLastSnapshot(doc);\n return this.calculateSnapshotHash(snapshot, { isV2: options.isV2 });\n }\n\n /**\n * @param update base64 encoded uint8array\n * @returns\n */\n public async addYDocUpdate(\n logger: Logger,\n update: string | Uint8Array,\n guid?: Guid,\n isV2?: boolean\n ): Promise<{ isUpdated: boolean; snapshotHash: string }> {\n const doc = guid !== undefined ? await this.getYSubdoc(guid) : this.doc;\n if (!doc) {\n throw new Error(`YDoc with guid ${guid} not found`);\n }\n\n try {\n // takes a snapshot if none is stored in memory - NOTE: snapshots are a combination of statevector + deleteset, not a full doc\n const beforeSnapshot = this._getOrPutLastSnapshot(doc);\n const updateAsU8 =\n typeof update === \"string\" ? Base64.toUint8Array(update) : update;\n const applyUpdate = isV2 ? Y.applyUpdateV2 : Y.applyUpdate;\n applyUpdate(doc, updateAsU8, \"client\");\n // put the new \"after update\" snapshot\n const afterSnapshot = this._putLastSnapshot(doc);\n // Check the snapshot before/after to see if the update had an effect\n const updated = !Y.equalSnapshots(beforeSnapshot, afterSnapshot);\n if (updated) {\n await this.handleYDocUpdate(doc);\n }\n\n return {\n isUpdated: updated,\n snapshotHash: await this.calculateSnapshotHash(afterSnapshot, { isV2 }),\n };\n } catch (e) {\n // The only reason this would happen is if a user would send bad data\n logger.warn(`Ignored bad YDoc update: ${String(e)}`);\n throw new Error(\n \"Bad YDoc update. Data is corrupted, or data does not match the encoding.\"\n );\n }\n }\n\n public loadDocByIdIfNotAlreadyLoaded(docId: YDocId): Promise<Y.Doc> {\n let loaded$ = this.initPromisesById.get(docId);\n let doc = docId === ROOT_YDOC_ID ? this.doc : this.findYSubdocByGuid(docId);\n if (!doc) {\n // An API call can load a subdoc without the root doc (this._doc) being loaded, we account for that by just instantiating a doc here.\n doc = new Y.Doc();\n }\n if (loaded$ === undefined) {\n loaded$ = this._loadYDocFromDurableStorage(doc, docId);\n this.initPromisesById.set(docId, loaded$);\n }\n return loaded$;\n }\n\n public async load(_logger: Logger): Promise<void> {\n await this.loadDocByIdIfNotAlreadyLoaded(ROOT_YDOC_ID);\n }\n\n /**\n * Unloads the Yjs documents from memory.\n */\n public unload(): void {\n // YYY Implement this later!\n // YYY We're currently never unloading data read into memory, but let's\n // sync this with the .unload() method from Storage, so there will not be\n // any surprises here later!\n //\n // this.doc = new Y.Doc();\n // this.initPromisesById.clear();\n // this.lastUpdatesById.clear();\n // this.keysById.clear();\n // this.initPromisesById.clear();\n }\n\n // ------------------------------------------------------------------------------------\n // Private APIs\n // ------------------------------------------------------------------------------------\n\n // NOTE: We could instead store the hash of snapshot instead of the whole snapshot to optimize memory usage.\n private _getOrPutLastSnapshot(doc: Y.Doc): Y.Snapshot {\n const docId: YDocId =\n doc.guid === this.doc.guid ? ROOT_YDOC_ID : (doc.guid as Guid);\n const snapshot = this.lastSnapshotById.get(docId);\n if (snapshot) {\n return snapshot;\n }\n return this._putLastSnapshot(doc);\n }\n\n // NOTE: We could instead store the hash of snapshot instead of the whole snapshot to optimize memory usage.\n private _putLastSnapshot(doc: Y.Doc): Y.Snapshot {\n const docId: YDocId =\n doc.guid === this.doc.guid ? ROOT_YDOC_ID : (doc.guid as Guid);\n const snapshot = Y.snapshot(doc);\n this.lastSnapshotById.set(docId, snapshot);\n return snapshot;\n }\n /**\n * Given a record of updates, merge them and compress if savings are significant\n */\n private _loadAndCompressYJSUpdates = async (\n docUpdates: Record<string, Uint8Array>,\n doc: Y.Doc,\n docId: YDocId\n ): Promise<void> => {\n // the percent we need to save to trigger re-writing storage, ie. only rewrite storage if we save more than 20%\n const SAVINGS_THRESHOLD = 0.2;\n // get all updates from disk\n const updates = Object.values(docUpdates);\n // uint8arrays size on disk is equal to their length, combine them to see how much we're using\n const sizeOnDisk = updates.reduce((acc, update) => {\n return acc + update.length;\n }, 0);\n if (updates.length > 0) {\n const docKeys = Object.keys(docUpdates);\n // keep track of keys in use\n this.keysById.set(docId, new Set(docKeys));\n\n const mergedUpdate = Y.mergeUpdates(updates);\n // Garbage collection won't happen unless we actually apply the update\n Y.applyUpdate(doc, mergedUpdate);\n\n // get the update so we can check out how big it is\n const garbageCollectedUpdate = Y.encodeStateAsUpdate(doc);\n\n if (\n garbageCollectedUpdate.length <\n sizeOnDisk * (1 - SAVINGS_THRESHOLD)\n ) {\n const newKey = nanoid();\n await this.driver.write_y_updates(\n docId,\n newKey,\n garbageCollectedUpdate\n );\n // delete all old keys, we're going to write new merged updates\n await this.driver.delete_y_updates(docId, docKeys);\n this.keysById.set(docId, new Set([newKey]));\n }\n }\n };\n\n private _loadYDocFromDurableStorage = async (\n doc: Y.Doc,\n docId: YDocId\n ): Promise<Y.Doc> => {\n const docUpdates = Object.fromEntries(\n await this.driver.iter_y_updates(docId)\n );\n await this._loadAndCompressYJSUpdates(docUpdates, doc, docId);\n // store the vector of the last update\n this.lastUpdatesById.set(docId, {\n currentKey: nanoid(),\n lastVector: Y.encodeStateVector(doc),\n });\n doc.emit(\"load\", [doc]); // sets the \"isLoaded\" to true on the doc\n\n return doc;\n };\n\n private findYSubdocByGuid(guid: Guid): Y.Doc | null {\n for (const subdoc of this.doc.getSubdocs()) {\n if (subdoc.guid === guid) {\n return subdoc;\n }\n }\n return null;\n }\n\n private async calculateSnapshotHash(\n snapshot: Y.Snapshot,\n { isV2 }: { isV2?: boolean }\n ): Promise<string> {\n const encodedSnapshot = isV2\n ? Y.encodeSnapshotV2(snapshot)\n : Y.encodeSnapshot(snapshot);\n return Base64.fromUint8Array(\n new Uint8Array(\n await crypto.subtle.digest(\"SHA-256\", new Uint8Array(encodedSnapshot))\n )\n );\n }\n\n // gets a subdoc, it will be loaded if not already loaded\n private async getYSubdoc(guid: Guid): Promise<Y.Doc | null> {\n const subdoc = this.findYSubdocByGuid(guid);\n if (!subdoc) {\n return null;\n }\n await this.loadDocByIdIfNotAlreadyLoaded(guid);\n return subdoc;\n }\n\n // When the YJS doc changes, update it in durable storage\n private async handleYDocUpdate(doc: Y.Doc): Promise<void> {\n const docId: YDocId =\n doc.guid === this.doc.guid ? ROOT_YDOC_ID : (doc.guid as Guid);\n const docUpdateInfo = this.lastUpdatesById.get(docId);\n // get the update since last vector\n const updateSinceLastVector = Y.encodeStateAsUpdate(\n doc,\n docUpdateInfo?.lastVector\n );\n // this should happen before the await on putYDoc to avoid race conditions\n // but we need the current key before, so store it here\n const storageKey = docUpdateInfo?.currentKey ?? nanoid();\n if (updateSinceLastVector.length > MAX_Y_UPDATE_SIZE) {\n // compress update, not using the vector, we want to write the whole doc\n const newKey = nanoid();\n await this.driver.write_y_updates(\n docId,\n newKey,\n Y.encodeStateAsUpdate(doc)\n );\n // delete all old keys on disk\n await this.driver.delete_y_updates(\n docId,\n Array.from(this.keysById.getOrCreate(docId))\n );\n // update the keys we have stored\n this.keysById.set(docId, new Set([newKey]));\n // future updates will write from this vector and to this key\n this.lastUpdatesById.set(docId, {\n currentKey: nanoid(), // start writing to a new key\n lastVector: Y.encodeStateVector(doc),\n });\n } else {\n // in this case, the update is small enough, just overwrite it\n await this.driver.write_y_updates(\n docId,\n storageKey,\n updateSinceLastVector\n );\n const keys = [storageKey];\n // keep track of keys used\n const currentKeys = this.keysById.getOrCreate(docId);\n for (const key of keys) {\n currentKeys.add(key);\n }\n }\n }\n}\n","/**\n * Copyright (c) Liveblocks Inc.\n *\n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Affero General Public License as published\n * by the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n * GNU Affero General Public License for more details.\n *\n * You should have received a copy of the GNU Affero General Public License\n * along with this program. If not, see <https://www.gnu.org/licenses/>.\n */\n\n/**\n * Given a promise or promise factory, returns a 2-tuple of success or failure.\n * This pattern avoids having to build deeply nested try / catch clauses, where\n * success variables need to be defined as a `let` outside of the `try` block.\n *\n * Turns:\n *\n * let result;\n * try {\n * result = await doSomething();\n * } catch (error) {\n * // do something with error\n * }\n *\n * doAnotherThing(result);\n *\n * Into:\n *\n * const [result, error] = await tryCatch(doSomething());\n * if (error) {\n * // do something with error\n * }\n * doAnotherThing(result);\n *\n */\nexport async function tryCatch<T, E = Error>(\n promise: Promise<T> | (() => Promise<T>) | (() => T)\n): Promise<[T, undefined] | [undefined, E]> {\n try {\n const data = await (typeof promise === \"function\" ? promise() : promise);\n return [data, undefined];\n } catch (error) {\n return [undefined, error as E];\n }\n}\n","/**\n * Copyright (c) Liveblocks Inc.\n *\n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Affero General Public License as published\n * by the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n * GNU Affero General Public License for more details.\n *\n * You should have received a copy of the GNU Affero General Public License\n * along with this program. If not, see <https://www.gnu.org/licenses/>.\n */\n\n/**\n * Like ES6 map, but also provides a unique reverse lookup index for values\n * stored in the map.\n *\n * Useful for code like:\n *\n * // Store a list of persons by their IDs, but each person's email must also\n * // be unique\n * const map = new UniqueMap((person) => person.email);\n * map.set(1, { name: 'John Doe', email: 'john@example.org' });\n * map.set(2, { name: 'John Foo', email: 'john@example.org' }); // Will error!\n * map.delete(1);\n * map.set(3, { name: 'Johnny', email: 'john@example.org' }); // Now it's allowed\n *\n * map.getReverseKey('john@example.org') // 3\n * map.getReverse('john@example.org') // { name: 'Johnny', email: 'john@example.org' }\n *\n */\nexport class UniqueMap<K, V, UK> extends Map<K, V> {\n // / \\\n // Primary key Unique key\n #_revMap: Map<UK, K>;\n #_keyFn: (value: V) => UK;\n\n constructor(\n keyFn: (value: V) => UK\n // entries?: readonly (readonly [K, V])[] | null\n ) {\n super(); // super(entries)\n this.#_keyFn = keyFn;\n this.#_revMap = new Map();\n }\n\n lookupPrimaryKey(uniqKey: UK): K | undefined {\n return this.#_revMap.get(uniqKey);\n }\n\n lookup(uniqKey: UK): V | undefined {\n const key = this.#_revMap.get(uniqKey);\n return key !== undefined ? this.get(key) : undefined;\n }\n\n set(key: K, value: V): this {\n const uniqKey = this.#_keyFn(value);\n const primaryKey = this.#_revMap.get(uniqKey);\n if (primaryKey !== undefined && primaryKey !== key) {\n throw new Error(`Unique key ${String(uniqKey)} already exists`);\n }\n this.#_revMap.set(uniqKey, key);\n return super.set(key, value);\n }\n\n delete(primaryKey: K): boolean {\n const value = this.get(primaryKey);\n if (value !== undefined) {\n const indexedKey = this.#_keyFn(value);\n this.#_revMap.delete(indexedKey);\n }\n return super.delete(primaryKey);\n }\n}\n","/**\n * Copyright (c) Liveblocks Inc.\n *\n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Affero General Public License as published\n * by the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n * GNU Affero General Public License for more details.\n *\n * You should have received a copy of the GNU Affero General Public License\n * along with this program. If not, see <https://www.gnu.org/licenses/>.\n */\n\nimport type { BaseUserMeta, JsonObject } from \"@liveblocks/core\";\nimport { ServerMsgCode } from \"@liveblocks/core\";\n\nimport type { RoomStateServerMsg } from \"~/protocol\";\n\n/**\n * Concatenates multiple Uint8Arrays into a single Uint8Array.\n */\nexport function concatUint8Arrays(arrays: Uint8Array[]): Uint8Array {\n const totalLength = arrays.reduce((sum, arr) => sum + arr.length, 0);\n const result = new Uint8Array(totalLength);\n let offset = 0;\n for (const arr of arrays) {\n result.set(arr, offset);\n offset += arr.length;\n }\n return result;\n}\n\nexport function makeRoomStateMsg(\n actor: number,\n nonce: string,\n scopes: string[],\n users: Record<number, BaseUserMeta & { scopes: string[] }>,\n publicMeta?: JsonObject\n): RoomStateServerMsg<BaseUserMeta> {\n return {\n type: ServerMsgCode.ROOM_STATE,\n actor,\n nonce,\n scopes,\n users,\n meta: publicMeta ?? {},\n };\n}\n"],"mappings":";;;;;;;;;;;;AAkBA,SAAS,qBAAqB;AAE9B;AAAA,EACE;AAAA,EACA;AAAA,EACA,YAAAA;AAAA,EACA;AAAA,EACA,UAAAC;AAAA,EACA,YAAAC;AAAA,EACA,UAAAC;AAAA,EACA,eAAAC;AAAA,OACK;;;ACVP,SAAS,eAAe;AAYjB,IAAM,WAA0B;AAahC,IAAM,iBAAsC,SAAS;AAAA,EAC1D,CAAC,UACC,UAAU,QAAQ,OAAO,UAAU,YAAY,CAAC,MAAM,QAAQ,KAAK;AAAA,EACrE;AACF;;;AC/BA,SAAS,cAAc;AAEvB,SAAS,UAAU,QAAQ,UAAU,QAAQ,mBAAmB;AAkBhE,IAAM,iBAAoD,OAAO;AAAA,EAC/D,MAAM,SAAS,OAAO,aAAa;AAAA,EACnC,MAAM;AAAA,EACN,IAAI;AAAA,EACJ,MAAM;AACR,CAAC;AAED,IAAM,iBAAoD,OAAO;AAAA,EAC/D,MAAM,SAAS,OAAO,aAAa;AAAA,EACnC,MAAM;AAAA,EACN,IAAI;AAAA,EACJ,UAAU;AAAA,EACV,WAAW;AAAA,EACX,MAAM;AAAA,EACN,QAAQ,SAAS,SAAS,KAAK,CAAC;AAAA,EAChC,WAAW,SAAS,MAAM;AAC5B,CAAC;AAED,IAAM,eAAgD,OAAO;AAAA,EAC3D,MAAM,SAAS,OAAO,WAAW;AAAA,EACjC,MAAM;AAAA,EACN,IAAI;AAAA,EACJ,UAAU;AAAA,EACV,WAAW;AAAA,EACX,QAAQ,SAAS,SAAS,KAAK,CAAC;AAAA,EAChC,WAAW,SAAS,MAAM;AAC5B,CAAC;AAED,IAAM,cAA8C,OAAO;AAAA,EACzD,MAAM,SAAS,OAAO,UAAU;AAAA,EAChC,MAAM;AAAA,EACN,IAAI;AAAA,EACJ,UAAU;AAAA,EACV,WAAW;AAAA,EACX,QAAQ,SAAS,SAAS,KAAK,CAAC;AAAA,EAChC,WAAW,SAAS,MAAM;AAC5B,CAAC;AAED,IAAM,mBAAwD,OAAO;AAAA,EACnE,MAAM,SAAS,OAAO,eAAe;AAAA,EACrC,MAAM;AAAA,EACN,IAAI;AAAA,EACJ,UAAU;AAAA,EACV,WAAW;AAAA,EACX,MAAM;AAAA,EACN,QAAQ,SAAS,SAAS,KAAK,CAAC;AAAA,EAChC,WAAW,SAAS,MAAM;AAC5B,CAAC;AAED,IAAM,eAAgD,OAAO;AAAA,EAC3D,MAAM,SAAS,OAAO,WAAW;AAAA,EACjC,MAAM;AAAA,EACN,IAAI;AACN,CAAC;AAED,IAAM,iBAAoD,OAAO;AAAA,EAC/D,MAAM,SAAS,OAAO,cAAc;AAAA,EACpC,MAAM;AAAA,EACN,IAAI;AAAA,EACJ,WAAW;AACb,CAAC;AAED,IAAM,oBAA0D,OAAO;AAAA,EACrE,MAAM,SAAS,OAAO,iBAAiB;AAAA,EACvC,MAAM;AAAA,EACN,IAAI;AAAA,EACJ,KAAK;AACP,CAAC;AAEM,IAAM,KAA4B,YAAY,QAAQ;AAAA,EAC3D,CAAC,OAAO,aAAa,GAAG;AAAA,EACxB,CAAC,OAAO,aAAa,GAAG;AAAA,EACxB,CAAC,OAAO,WAAW,GAAG;AAAA,EACtB,CAAC,OAAO,UAAU,GAAG;AAAA,EACrB,CAAC,OAAO,eAAe,GAAG;AAAA,EAC1B,CAAC,OAAO,WAAW,GAAG;AAAA,EACtB,CAAC,OAAO,cAAc,GAAG;AAAA,EACzB,CAAC,OAAO,iBAAiB,GAAG;AAC9B,CAAC;;;ACjGD,SAAS,YAAY;AAOd,IAAM,cAAc,KAAK,WAAiB;AAE1C,IAAM,eAAe;;;AHmB5B,IAAM,0BACJC,QAAO;AAAA,EACL,MAAMC,UAAS,cAAc,eAAe;AAAA,EAC5C,MAAM;AAAA,EACN,aAAaC,UAAS,MAAM;AAC9B,CAAC;AAEH,IAAM,0BAAkEF,QAAO;AAAA,EAC7E,MAAMC,UAAS,cAAc,eAAe;AAAA,EAC5C,OAAO;AACT,CAAC;AAED,IAAM,wBAAwDD,QAAO;AAAA,EACnE,MAAMC,UAAS,cAAc,aAAa;AAC5C,CAAC;AAED,IAAM,yBAA0DD,QAAO;AAAA,EACrE,MAAMC,UAAS,cAAc,cAAc;AAAA,EAC3C,KAAK,MAAM,EAAE;AACf,CAAC;AAED,IAAM,qBAAkDD,QAAO;AAAA,EAC7D,MAAMC,UAAS,cAAc,UAAU;AAAA,EACvC,QAAQE,QAAO,WAAoB;AAAA,EACnC,MAAMD,UAAS,WAAW;AAAA;AAAA,EAC1B,IAAIA,UAAS,OAAO;AACtB,CAAC;AAED,IAAM,sBAAoDF,QAAO;AAAA,EAC/D,MAAMC,UAAS,cAAc,WAAW;AAAA,EACxC,QAAQE,QAAO,WAAoB;AAAA,EACnC,MAAMD,UAAS,WAAW;AAAA;AAAA,EAC1B,IAAIA,UAAS,OAAO;AACtB,CAAC;AAEM,IAAM,mBACXE,aAAY,QAAQ;AAAA,EAClB,CAAC,cAAc,eAAe,GAAG;AAAA,EACjC,CAAC,cAAc,eAAe,GAAG;AAAA,EACjC,CAAC,cAAc,aAAa,GAAG;AAAA,EAC/B,CAAC,cAAc,cAAc,GAAG;AAAA,EAChC,CAAC,cAAc,UAAU,GAAG;AAAA,EAC5B,CAAC,cAAc,WAAW,GAAG;AAC/B,CAAC,EAAE,SAAS,gCAAgC;AAEvC,IAAM,4BACXA,aAAY,QAAQ;AAAA;AAAA;AAAA;AAAA,EAIlB,CAAC,cAAc,cAAc,GAAG;AAAA;AAAA;AAGlC,CAAC,EAAE,SAAS,0CAA0C;;;AIjFxD,SAAS,gBAAgB;AAelB,SAAS,0BACdC,WACY;AACZ,MAAI;AACF,WAAO,YAAYA,WAAU,QAAQA,UAAS,SAAS,EAAE,IAAI;AAAA,EAC/D,UAAE;AACA,IAAAA,UAAS,QAAQ;AAAA,EACnB;AACF;AAEA,SAAS,UAAUA,WAA6B,IAAkB;AAChE,QAAM,OAAOA,UAAS,SAAS,EAAE;AACjC,MAAI,KAAK,SAAS,SAAS,QAAQ;AACjC,WAAO,YAAYA,WAAU,IAAI,KAAK,IAAI;AAAA,EAC5C,WAAW,KAAK,SAAS,SAAS,MAAM;AACtC,WAAO,UAAUA,WAAU,EAAE;AAAA,EAC/B,WAAW,KAAK,SAAS,SAAS,KAAK;AACrC,WAAO,SAASA,WAAU,EAAE;AAAA,EAC9B,OAAO;AACL,WAAO,KAAK;AAAA,EACd;AACF;AAEA,SAAS,YACPA,WACA,IACA,YACY;AACZ,QAAM,OAAO,OAAO,OAAO,uBAAO,OAAO,IAAI,GAAG,UAAU;AAC1D,aAAW,CAAC,KAAK,OAAO,KAAKA,UAAS,cAAc,EAAE,GAAG;AACvD,SAAK,GAAG,IAAI,UAAUA,WAAU,OAAO;AAAA,EACzC;AACA,SAAO;AACT;AAEA,SAAS,UAAUA,WAA6B,IAAoB;AAClE,QAAM,OAAe,CAAC;AACtB,aAAW,CAAC,GAAG,OAAO,KAAKA,UAAS,cAAc,EAAE,GAAG;AACrD,SAAK,KAAK,UAAUA,WAAU,OAAO,CAAC;AAAA,EACxC;AACA,SAAO;AACT;AAEA,SAAS,SAASA,WAA6B,IAAwB;AACrE,QAAM,OAAO,uBAAO,OAAO,IAAI;AAC/B,aAAW,CAAC,KAAK,OAAO,KAAKA,UAAS,cAAc,EAAE,GAAG;AACvD,SAAK,GAAG,IAAI,UAAUA,WAAU,OAAO;AAAA,EACzC;AACA,SAAO;AACT;AAmBO,UAAU,yBACfA,WACW;AACX,MAAI;AACF,UAAM,aAAa,KAAK,UAAUA,UAAS,SAAS,EAAE,IAAI,EAAE,MAAM,GAAG,EAAE;AACvE,WAAO,WAAWA,WAAU,QAAQ,UAAU;AAAA,EAChD,UAAE;AACA,IAAAA,UAAS,QAAQ;AAAA,EACnB;AACF;AAEA,UAAU,KAAKA,WAA6B,IAAuB;AACjE,QAAM,OAAOA,UAAS,SAAS,EAAE;AACjC,MAAI,KAAK,SAAS,SAAS,QAAQ;AACjC,WAAO,WAAWA,WAAU,IAAI,KAAK,UAAU,KAAK,IAAI,EAAE,MAAM,GAAG,EAAE,CAAC;AAAA,EACxE,WAAW,KAAK,SAAS,SAAS,MAAM;AACtC,WAAO,SAASA,WAAU,EAAE;AAAA,EAC9B,WAAW,KAAK,SAAS,SAAS,KAAK;AACrC,WAAO,QAAQA,WAAU,EAAE;AAAA,EAC7B,WAAW,KAAK,SAAS,SAAS,UAAU;AAC1C,UAAM,KAAK,UAAU,KAAK,IAAI;AAAA,EAChC;AACF;AAWA,UAAU,WACRA,WACA,IACA,YACW;AACX,MAAI,QAAQ,WAAW,SAAS;AAEhC,QAAM;AACN,QAAM;AAEN,aAAW,CAAC,KAAK,OAAO,KAAKA,UAAS,cAAc,EAAE,GAAG;AACvD,QAAI,MAAO,OAAM;AAAA,QACZ,SAAQ;AAEb,UAAM,GAAG,KAAK,UAAU,GAAG,CAAC;AAC5B,WAAO,KAAKA,WAAU,OAAO;AAAA,EAC/B;AACA,QAAM;AACR;AAEA,UAAU,SAASA,WAA6B,IAAuB;AACrE,MAAI,QAAQ;AAEZ,QAAM;AACN,aAAW,CAAC,GAAG,OAAO,KAAKA,UAAS,cAAc,EAAE,GAAG;AACrD,QAAI,MAAO,OAAM;AAAA,QACZ,SAAQ;AACb,WAAO,KAAKA,WAAU,OAAO;AAAA,EAC/B;AACA,QAAM;AACR;AAEA,UAAU,QAAQA,WAA6B,IAAuB;AACpE,MAAI,QAAQ;AAEZ,QAAM;AACN,aAAW,CAAC,KAAK,OAAO,KAAKA,UAAS,cAAc,EAAE,GAAG;AACvD,QAAI,MAAO,OAAM;AAAA,QACZ,SAAQ;AAEb,UAAM,GAAG,KAAK,UAAU,GAAG,CAAC;AAC5B,WAAO,KAAKA,WAAU,OAAO;AAAA,EAC/B;AACA,QAAM;AACR;;;ACzJO,UAAU,qBACfC,WACqC;AACrC,MAAI;AACF,WAAOA,UAAS,SAAS;AAAA,EAC3B,UAAE;AACA,IAAAA,UAAS,QAAQ;AAAA,EACnB;AACF;;;ACJA;AAAA,EACE;AAAA,EACA,YAAAC;AAAA,EACA;AAAA,EACA;AAAA,OACK;AAIP,IAAM,wBAAwB;AAE9B,SAAS,WAAW,OAA0B;AAC5C,SAAO,GAAG,qBAAqB,IAAI,MAAM,OAAO;AAClD;AAEA,SAAS,wBACP,OACyD;AACzD,SAAO,aAAa,KAAK,KAAK,MAAM,mBAAmB;AACzD;AAMA,UAAU,SACR,KACA,MACA,QACA,OACyC;AACzC,MAAI,wBAAwB,IAAI,GAAG;AACjC,YAAQ,KAAK,gBAAgB;AAAA,MAC3B,KAAK;AACH,eAAO,gBAAgB,KAAK,KAAK,MAAM,QAAQ,KAAK;AACpD;AAAA,MAEF,KAAK;AACH,eAAO,SAAS,KAAK,KAAK,MAAM,QAAQ,KAAK;AAC7C;AAAA,MAEF,KAAK;AACH,eAAO,QAAQ,KAAK,KAAK,MAAM,QAAQ,KAAK;AAC5C;AAAA,MAGF;AACE,oBAAY,MAAM,gCAAgC;AAAA,IACtD;AAAA,EACF,OAAO;AACL,UAAM;AAAA,MACJ,WAAW,KAAK;AAAA,MAChB;AAAA,QACE,MAAMA,UAAS;AAAA,QACf;AAAA,QACA,UAAU,OAAO,CAAC;AAAA,QAClB,WAAW;AAAA,MACb;AAAA,IACF;AAAA,EACF;AACF;AAMA,UAAU,QACR,KACA,KACA,QACA,OACyC;AACzC,QAAM,WAAwB;AAAA,IAC5B,WAAW,KAAK;AAAA,IAChB,EAAE,MAAMA,UAAS,KAAK,UAAU,OAAO,CAAC,GAAG,WAAW,IAAI;AAAA,EAC5D;AAGA,QAAM;AAGN,aAAW,CAAC,QAAQ,QAAQ,KAAK,OAAO,QAAQ,GAAG,GAAG;AACpD,WAAO,SAAS,QAAQ,UAAU,UAAU,KAAK;AAAA,EACnD;AACF;AAMA,UAAU,SACR,KACA,MACA,QACA,OACyC;AACzC,QAAM,KAAK,WAAW,KAAK;AAC3B,QAAM,OAAuB;AAAA,IAC3B,MAAMA,UAAS;AAAA,IACf,UAAU,OAAO,CAAC;AAAA,IAClB,WAAW;AAAA,EACb;AACA,QAAM,YAAyB,CAAC,IAAI,IAAI;AAGxC,QAAM;AAGN,MAAI,WAAW,aAAa;AAC5B,aAAW,YAAY,MAAM;AAC3B,WAAO,SAAS,UAAU,UAAU,WAAW,KAAK;AACpD,eAAW,aAAa,QAAQ;AAAA,EAClC;AACF;AAUA,UAAU,gBACR,KACA,OACA,QACA,OACyC;AAEzC,QAAM,OAAmB,CAAC;AAC1B,QAAM,kBAA8C,CAAC;AAErD,aAAW,CAAC,QAAQ,QAAQ,KAAK,OAAO,QAAQ,KAAK,GAAG;AACtD,QAAI,wBAAwB,QAAQ,GAAG;AACrC,sBAAgB,KAAK,CAAC,QAAQ,QAAQ,CAAC;AAAA,IACzC,OAAO;AACL,WAAK,MAAM,IAAI;AAAA,IACjB;AAAA,EACF;AAGA,QAAM,cACJ,WAAW,OACP;AAAA,IACE,WAAW,KAAK;AAAA,IAChB;AAAA,MACE,MAAMA,UAAS;AAAA,MACf;AAAA,MACA,UAAU,OAAO,CAAC;AAAA,MAClB,WAAW;AAAA,IACb;AAAA,EACF,IACA,CAAC,QAAQ,EAAE,MAAMA,UAAS,QAAQ,KAAK,CAAC;AAG9C,QAAM;AAGN,aAAW,CAAC,QAAQ,QAAQ,KAAK,iBAAiB;AAChD,WAAO,SAAS,QAAQ,UAAU,aAAa,KAAK;AAAA,EACtD;AACF;AAOO,UAAU,sBACf,MACyC;AACzC,QAAM,QAAQ,EAAE,OAAO,EAAE;AACzB,SAAO,gBAAgB,QAAQ,KAAK,MAAM,MAAM,KAAK;AACvD;AAYO,SAAS,0BACdC,WACiB;AACjB,MAAI;AACF,WAAOC,aAAYD,WAAU,QAAQA,UAAS,SAAS,EAAE,IAAI;AAAA,EAC/D,UAAE;AACA,IAAAA,UAAS,QAAQ;AAAA,EACnB;AACF;AAEA,SAASE,WAAUF,WAA6B,IAAuB;AACrE,QAAM,OAAOA,UAAS,SAAS,EAAE;AACjC,MAAI,KAAK,SAASD,UAAS,QAAQ;AACjC,WAAOE,aAAYD,WAAU,IAAI,KAAK,IAAI;AAAA,EAC5C,WAAW,KAAK,SAASD,UAAS,MAAM;AACtC,WAAOI,WAAUH,WAAU,EAAE;AAAA,EAC/B,WAAW,KAAK,SAASD,UAAS,KAAK;AACrC,WAAOK,UAASJ,WAAU,EAAE;AAAA,EAC9B,OAAO;AACL,WAAO,KAAK;AAAA,EACd;AACF;AAEA,SAASC,aACPD,WACA,IACA,YACiB;AAEjB,QAAM,OAAwB,OAAO;AAAA,IACnC,uBAAO,OAAO,IAAI;AAAA,IAClB;AAAA,EACF;AACA,aAAW,CAAC,KAAK,OAAO,KAAKA,UAAS,cAAc,EAAE,GAAG;AACvD,SAAK,GAAG,IAAIE,WAAUF,WAAU,OAAO;AAAA,EACzC;AACA,SAAO,EAAE,gBAAgB,cAAc,KAAK;AAC9C;AAEA,SAASG,WAAUH,WAA6B,IAA2B;AACzE,QAAM,OAAoB,CAAC;AAC3B,aAAW,CAAC,GAAG,OAAO,KAAKA,UAAS,cAAc,EAAE,GAAG;AACrD,SAAK,KAAKE,WAAUF,WAAU,OAAO,CAAC;AAAA,EACxC;AACA,SAAO,EAAE,gBAAgB,YAAY,KAAK;AAC5C;AAEA,SAASI,UAASJ,WAA6B,IAA0B;AACvE,QAAM,OAAO,uBAAO,OAAO,IAAI;AAC/B,aAAW,CAAC,KAAK,OAAO,KAAKA,UAAS,cAAc,EAAE,GAAG;AACvD,SAAK,GAAG,IAAIE,WAAUF,WAAU,OAAO;AAAA,EACzC;AACA,SAAO,EAAE,gBAAgB,WAAW,KAAK;AAC3C;AAeO,UAAU,yBACfA,WACW;AACX,MAAI;AACF,UAAM,aAAa,KAAK,UAAUA,UAAS,SAAS,EAAE,IAAI,EAAE,MAAM,GAAG,EAAE;AACvE,WAAOK,YAAWL,WAAU,QAAQ,UAAU;AAAA,EAChD,UAAE;AACA,IAAAA,UAAS,QAAQ;AAAA,EACnB;AACF;AAEA,UAAUM,MAAKN,WAA6B,IAAuB;AACjE,QAAM,OAAOA,UAAS,SAAS,EAAE;AACjC,MAAI,KAAK,SAASD,UAAS,QAAQ;AACjC,WAAOM,YAAWL,WAAU,IAAI,KAAK,UAAU,KAAK,IAAI,EAAE,MAAM,GAAG,EAAE,CAAC;AAAA,EACxE,WAAW,KAAK,SAASD,UAAS,MAAM;AACtC,WAAOQ,UAASP,WAAU,EAAE;AAAA,EAC9B,WAAW,KAAK,SAASD,UAAS,KAAK;AACrC,WAAOS,SAAQR,WAAU,EAAE;AAAA,EAC7B,WAAW,KAAK,SAASD,UAAS,UAAU;AAC1C,UAAM,KAAK,UAAU,KAAK,IAAI;AAAA,EAChC;AACF;AAWA,UAAUM,YACRL,WACA,IACA,YACW;AACX,MAAI,QAAQ,WAAW,SAAS;AAEhC,QAAM;AACN,QAAM;AAEN,aAAW,CAAC,KAAK,OAAO,KAAKA,UAAS,cAAc,EAAE,GAAG;AACvD,QAAI,MAAO,OAAM;AAAA,QACZ,SAAQ;AAEb,UAAM,GAAG,KAAK,UAAU,GAAG,CAAC;AAC5B,WAAOM,MAAKN,WAAU,OAAO;AAAA,EAC/B;AACA,QAAM;AACR;AAEA,UAAUO,UAASP,WAA6B,IAAuB;AACrE,MAAI,QAAQ;AAEZ,QAAM;AACN,aAAW,CAAC,GAAG,OAAO,KAAKA,UAAS,cAAc,EAAE,GAAG;AACrD,QAAI,MAAO,OAAM;AAAA,QACZ,SAAQ;AACb,WAAOM,MAAKN,WAAU,OAAO;AAAA,EAC/B;AACA,QAAM;AACR;AAEA,UAAUQ,SAAQR,WAA6B,IAAuB;AACpE,MAAI,QAAQ;AAEZ,QAAM;AACN,aAAW,CAAC,KAAK,OAAO,KAAKA,UAAS,cAAc,EAAE,GAAG;AACvD,QAAI,MAAO,OAAM;AAAA,QACZ,SAAQ;AAEb,UAAM,GAAG,KAAK,UAAU,GAAG,CAAC;AAC5B,WAAOM,MAAKN,WAAU,OAAO;AAAA,EAC/B;AACA,QAAM;AACR;;;AC/UA,SAAS,YAAAS,WAAU,mBAAmB,UAAU;;;ACRhD,SAAS,aAAa;AAjBtB;AAgCO,IAAM,aAAN,cAA+B,IAAU;AAAA;AAAA;AAAA;AAAA;AAAA,EAO9C,YACE,WACA,SACA;AACA,UAAM,OAAO;AAVf;AAWE,uBAAK,YAAa;AAAA,EACpB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,YAAY,KAAQ,WAA8B;AAChD,QAAI,MAAM,IAAI,GAAG,GAAG;AAElB,aAAO,MAAM,IAAI,GAAG;AAAA,IACtB,OAAO;AACL,YAAM,KACJ,aACA,mBAAK,eACL,MAAM,4CAA4C;AAEpD,YAAM,QAAQ,GAAG,GAAG;AACpB,WAAK,IAAI,KAAK,KAAK;AACnB,aAAO;AAAA,IACT;AAAA,EACF;AACF;AApCE;;;ACdF,SAAS,gBAAyC;AAChD,SAAO,CAAC,EAAE,OAAO,QAAQ,EAAE;AAC7B;AArBA;AA2BO,IAAM,YAAN,MAA2B;AAAA,EAGhC,cAAc;AAFd;AAGE,uBAAK,MAAO,IAAI,WAAW,MAAM,oBAAI,IAAW,CAAC;AAAA,EACnD;AAAA,EAEA,IAAI,OAAe;AACjB,QAAI,QAAQ;AACZ,eAAW,SAAS,mBAAK,MAAK,OAAO,GAAG;AACtC,eAAS,MAAM;AAAA,IACjB;AACA,WAAO;AAAA,EACT;AAAA,EAEA,MAAM,MAAkB;AACtB,WAAO,mBAAK,MAAK,IAAI,IAAI,GAAG,QAAQ;AAAA,EACtC;AAAA,EAEA,CAAC,OAAmC;AAClC,eAAW,CAAC,MAAM,MAAM,KAAK,mBAAK,OAAM;AACtC,iBAAW,QAAQ,OAAO,KAAK,GAAG;AAChC,cAAM,CAAC,MAAM,IAAI;AAAA,MACnB;AAAA,IACF;AAAA,EACF;AAAA,EAEA,IAAI,MAAU,MAAmB;AAC/B,WAAO,mBAAK,MAAK,IAAI,IAAI,GAAG,IAAI,IAAI,KAAK;AAAA,EAC3C;AAAA,EAEA,IAAI,MAAU,MAAyB;AACrC,WAAO,mBAAK,MAAK,IAAI,IAAI,GAAG,IAAI,IAAI;AAAA,EACtC;AAAA,EAEA,IAAI,MAAU,MAAU,OAAgB;AACtC,uBAAK,MAAK,YAAY,IAAI,EAAE,IAAI,MAAM,KAAK;AAC3C,WAAO;AAAA,EACT;AAAA,EAEA,OAAO,MAAU,MAAgB;AAC/B,QAAI,CAAC,mBAAK,MAAK,IAAI,IAAI,GAAG;AACxB;AAAA,IACF;AAEA,UAAM,SAAS,mBAAK,MAAK,IAAI,IAAI;AACjC,WAAO,OAAO,IAAI;AAClB,QAAI,OAAO,SAAS,GAAG;AACrB,yBAAK,MAAK,OAAO,IAAI;AAAA,IACvB;AAAA,EACF;AAAA,EAEA,QAAc;AACZ,uBAAK,MAAK,MAAM;AAAA,EAClB;AAAA,EAEA,EAAE,OAAO,QAAQ,IAAmC;AAClD,eAAW,CAAC,MAAM,MAAM,KAAK,mBAAK,OAAM;AACtC,iBAAW,CAAC,MAAM,KAAK,KAAK,QAAQ;AAClC,cAAM,CAAC,MAAM,MAAM,KAAK;AAAA,MAC1B;AAAA,IACF;AAAA,EACF;AAAA,EAEA,UAAU,MAAqC;AAC7C,WAAO,mBAAK,MAAK,IAAI,IAAI,GAAG,QAAQ,KAAK,cAAc;AAAA,EACzD;AAAA,EAEA,CAAC,SAAS,MAAU,MAAuC;AACzD,UAAM,SAAS,mBAAK,MAAK,IAAI,IAAI;AACjC,QAAI,WAAW,QAAW;AACxB;AAAA,IACF;AAEA,eAAW,MAAM,MAAM;AACrB,YAAM,QAAQ,OAAO,IAAI,EAAE;AAC3B,UAAI,UAAU,QAAW;AACvB,cAAM,CAAC,IAAI,KAAK;AAAA,MAClB;AAAA,IACF;AAAA,EACF;AAAA,EAEA,OAAO,MAAgC;AACrC,WAAO,mBAAK,MAAK,IAAI,IAAI,GAAG,KAAK,KAAK,cAAc;AAAA,EACtD;AAAA,EAEA,SAAS,MAA+B;AACtC,WAAO,mBAAK,MAAK,IAAI,IAAI,GAAG,OAAO,KAAK,cAAc;AAAA,EACxD;AAAA,EAEA,UAAU,MAAgB;AACxB,uBAAK,MAAK,OAAO,IAAI;AAAA,EACvB;AACF;AA5FE;;;AFQK,SAAS,qBACd,QACmB;AACnB,QAAM,MAAe,IAAI,IAA4B,MAAoB;AAEzE,MAAI,CAAC,IAAI,IAAI,MAAM,GAAG;AACpB,QAAI,IAAI,QAAQ,EAAE,MAAMC,UAAS,QAAQ,MAAM,CAAC,EAAE,CAAC;AAAA,EACrD;AAKA,QAAM,UAAoE,CAAC;AAC3E,QAAM,aAAa;AACnB,aAAW,QAAQ,YAAY;AAC7B,QAAI,kBAAkB,IAAI,EAAG;AAC7B,UAAM,CAAC,IAAI,IAAI,IAAI;AACnB,YAAQ,KAAK,CAAC,KAAK,UAAU,KAAK,WAAW,EAAE,CAAC;AAAA,EAClD;AACA,UAAQ;AAAA,IAAK,CAAC,GAAG,MACf,EAAE,CAAC,IAAI,EAAE,CAAC,IAAI,KAAK,EAAE,CAAC,IAAI,EAAE,CAAC,IAAI,IAAI,EAAE,CAAC,IAAI,EAAE,CAAC,IAAI,KAAK,EAAE,CAAC,IAAI,EAAE,CAAC,IAAI,IAAI;AAAA,EAC5E;AAEA,QAAM,SAAS,IAAI,UAAkC;AACrD,aAAW,CAAC,UAAU,WAAW,EAAE,KAAK,SAAS;AAC/C,WAAO,IAAI,UAAU,WAAW,EAAE;AAAA,EACpC;AAEA,WAAS,SAAS,IAA6B;AAC7C,WAAO,GAAG,IAAI,IAAI,EAAE,GAAG,mBAAmB,EAAE,EAAE;AAAA,EAChD;AAEA,SAAO;AAAA,IACL,UAAU,MACR;AAAA,MACE,IAAI,IAAI,MAAM;AAAA,MACd;AAAA,IACF;AAAA,IACF;AAAA,IACA,eAAe,CAAC,WAAW,OAAO,UAAU,MAAM;AAAA,IAClD,UAAU,MAAM;AAAA,IAChB,UAAU;AACR,UAAI,MAAM;AACV,aAAO,MAAM;AAAA,IACf;AAAA,EACF;AACF;;;AG/CO,SAAS,eAAe,QAAoC;AAMjE,iBAAe,IACb,IACA,IAC+B;AAC/B,QAAI,OAAO,QAAW;AACpB,aAAO,MAAM,OAAO,SAAS,EAAY;AAAA,IAC3C,OAAO;AACL,aAAQ,GAAkB,MAAM,MAAM,OAAO,SAAS,EAAE,CAAC;AAAA,IAC3D;AAAA,EACF;AAEA,SAAO;AAAA,IACL;AAAA,IACA,KAAK,OAAO,SAAS,KAAK,MAAM;AAAA,IAChC,QAAQ,OAAO,YAAY,KAAK,MAAM;AAAA,EACxC;AACF;;;ACxCA,SAAS,aAAa;AAEf,IAAK,kBAAL,kBAAKC,qBAAL;AA2CL,EAAAA,kCAAA,QAAK,KAAL;AAkBA,EAAAA,kCAAA,QAAK,KAAL;AA7DU,SAAAA;AAAA,GAAA;AAgEL,IAAM,yBAAyB,MAAM,eAAe,EAAE;AAAA,EAC3D;AACF;;;AC7DA;AAAA,EACE,eAAAC;AAAA,EACA,iBAAAC;AAAA,EACA;AAAA,EACA,UAAAC;AAAA,EACA,SAAAC;AAAA,EACA,iBAAAC;AAAA,EACA;AAAA,EACA,uBAAuB;AAAA,OAClB;AACP,SAAS,aAAa;AACtB,SAAS,SAAAC,QAAO,oBAAoB;AACpC,SAAS,eAAe;AACxB,SAAS,UAAAC,eAAc;;;ACnBvB,SAAS,SAAAC,cAAa;AAEf,IAAK,WAAL,kBAAKC,cAAL;AACL,EAAAA,oBAAA,WAAQ,KAAR;AACA,EAAAA,oBAAA,UAAO,KAAP;AACA,EAAAA,oBAAA,aAAU,KAAV;AACA,EAAAA,oBAAA,WAAQ,KAAR;AAJU,SAAAA;AAAA,GAAA;AAOZ,SAAS,YAAY,KAAY;AAC/B,QAAM,SAAS,GAAG,IAAI,IAAI,KAAK,IAAI,OAAO;AAC1C,UACE,IAAI,OAAO,WAAW,MAAM,IAAI,IAAI,QAAQ,GAAG,MAAM;AAAA,EAAK,IAAI,SAAS,EAAE,IACzE,QAAQ;AACZ;AAhCA;AAsCO,IAAe,YAAf,MAAyB;AAAA,EAK9B,YAAY,QAA+C,cAAe;AAJ1E,wBAAgB;AAEhB,+BAAS,oBAAI,QAA4B;AAGvC,SAAK,QACH,OAAO,UAAU,WACb,QACC,cAAc,KAAK,KAAK;AAAA,EACjC;AAAA;AAAA,EAGU,YAAY,OAAyB;AAC7C,YAAQ,OAAO;AAAA,MACb,KAAK;AACH,eAAO;AAAA,MACT,KAAK;AACH,eAAO;AAAA,MACT,KAAK;AACH,eAAO;AAAA,MACT,KAAK;AACH,eAAO;AAAA,MACT;AACE,eAAOC,OAAM,mBAAmB;AAAA,IACpC;AAAA,EACF;AAAA;AAAA,EAGU,UAAU,KAA6B;AAC/C,WAAO,OAAO,QAAQ,WAClB,eAAe,QACb,YAAY,GAAG,IACf,KAAK,UAAU,GAAG,IACpB,OAAO,GAAG;AAAA,EAChB;AAAA;AAAA;AAAA;AAAA;AAAA,EAMU,kBAAkB,SAA6B;AACvD,UAAM,QAAQ,CAAC;AACf,eAAW,CAAC,GAAG,CAAC,KAAK,OAAO,QAAQ,WAAW,CAAC,CAAC,GAAG;AAClD,UAAI,MAAM,QAAW;AAEnB,cAAM,KAAK,OAAO,MAAM,WAAW,KAAK,UAAU,CAAC,IAAI;AACvD,cAAM,KAAK,GAAG,CAAC,IAAI,EAAE,EAAE;AAAA,MACzB;AAAA,IACF;AACA,WAAO,MAAM,SAAS,IAAI,IAAI,MAAM,KAAK,GAAG,CAAC,MAAM;AAAA,EACrD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOU,cAAc,SAA6B;AACnD,QAAI,YAAY,mBAAK,QAAO,IAAI,OAAO;AACvC,QAAI,cAAc,QAAW;AAC3B,kBAAY,KAAK,kBAAkB,OAAO;AAC1C,yBAAK,QAAO,IAAI,SAAS,SAAS;AAAA,IACpC;AACA,WAAO;AAAA,EACT;AASF;AAvEE;AA6EF,IAAM,iBAAiB;AAAA,EACrB,CAAC,aAAc,GAAG;AAAA,EAClB,CAAC,YAAa,GAAG;AAAA,EACjB,CAAC,eAAgB,GAAG;AAAA,EACpB,CAAC,aAAc,GAAG;AACpB;AAEO,IAAM,gBAAN,cAA4B,UAAU;AAAA,EAC3C,IAAI,OAAiB,SAAqB,KAA2B;AACnE,YAAQ,eAAe,KAAK,CAAC;AAAA,MAC3B,KAAK,UAAU,GAAG;AAAA,MAClB,KAAK,cAAc,OAAO;AAAA,IAC5B;AAAA,EACF;AACF;AAOA,IAAM,gBAAgB;AAAA,EACpB,OAAO;AAAA,EACP,MAAM;AAAA,EACN,SAAS;AAAA,EACT,OAAO;AACT;AAOO,IAAM,SAAN,MAAM,QAAO;AAAA,EAgBlB,YACE,SAA2C,IAAI,cAAc,GAC7D,UAAsB,CAAC,GACvB;AAlBF,wBAAgB;AAChB,wBAAgB;AAChB,wBAAgB;AAChB,wBAAgB;AAEhB,wBAAgB;AAOhB,wBAAiB;AACjB,wBAAiB;AAMf,SAAK,WAAW;AAChB,SAAK,WAAW,MAAM,QAAQ,MAAM,IAAI,SAAS,CAAC,MAAM;AAExD,UAAM,WAAmB,KAAK,IAAI,GAAG,KAAK,SAAS,IAAI,CAAC,MAAM,EAAE,KAAK,CAAC;AAEtE,UAAM,OAAO,MAAM;AAAA,IAAC;AACpB,UAAM,YAAY,CAAC,QAAkB,CAAC,QACpC,KAAK,SAAS,QAAQ,CAACC,YAAW;AAChC,UAAIA,QAAO,SAAS,KAAK;AACvB,QAAAA,QAAO,IAAI,KAAK,KAAK,UAAU,GAAG;AAAA,MACpC;AAAA,IACF,CAAC;AAEH,SAAK,IAAI;AAAA;AAAA,MAEP,OAAO,YAAY,gBAAiB,UAAU,aAAc,IAAI;AAAA,MAChE,MAAM,YAAY,eAAgB,UAAU,YAAa,IAAI;AAAA,MAC7D,MACE,YAAY,kBAAmB,UAAU,eAAgB,IAAI;AAAA,MAC/D,OAAO,YAAY,gBAAiB,UAAU,aAAc,IAAI;AAAA;AAAA,IAElE;AAEA,SAAK,QAAQ,KAAK,EAAE,SAAS;AAC7B,SAAK,OAAO,KAAK,EAAE,QAAQ;AAC3B,SAAK,OAAO,KAAK,EAAE,QAAQ;AAC3B,SAAK,QAAQ,KAAK,EAAE,SAAS;AAAA,EAC/B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,YAAY,OAA2B;AACrC,UAAM,WAAuB,EAAE,GAAG,KAAK,UAAU,GAAG,MAAM;AAC1D,WAAO,IAAI,QAAO,KAAK,UAAU,QAAQ;AAAA,EAC3C;AACF;;;ACpLA,SAAS,OAAO,YAAAC,WAAU,qBAAAC,oBAAmB,MAAAC,WAAU;AACvD,SAAS,SAAS,YAAY;;;ACHvB,SAAS,MAAM,OAAmC;AACvD,SAAO,UAAU,SAAY,IAAI,KAAK,MAAM;AAC9C;;;ADeA,SAAS,cAAc,YAAwB;AAC7C,QAAM,SAAS,IAAI,UAAkC;AACrD,aAAW,QAAQ,YAAY;AAC7B,QAAIC,mBAAkB,IAAI,EAAG;AAG7B,UAAM,CAAC,IAAI,IAAI,IAAI;AACnB,UAAM,WAAW,OAAO,IAAI,KAAK,UAAU,KAAK,SAAS;AACzD,QAAI,aAAa,UAAa,KAAK,UAAU;AAC3C,aAAO,IAAI,KAAK,UAAU,KAAK,WAAW,EAAE;AAAA,IAC9C;AAAA,EACF;AACA,SAAO;AACT;AAMA,SAAS,mBAAmB,OAAgB;AAC1C,QAAM,WAAW,cAAc,KAAmB;AAElD,QAAM,QAAkB,CAAC,MAAM;AAC/B,QAAM,iBAA8B,oBAAI,IAAI;AAE5C,SAAO,MAAM,SAAS,GAAG;AACvB,UAAM,SAAS,MAAM,IAAI;AACzB,UAAM,OAAOC,IAAG,MAAM,IAAI,MAAM,CAAC;AAEjC,QAAI,KAAK,SAASC,UAAS,QAAQ;AACjC,iBAAW,OAAO,SAAS,OAAO,MAAM,GAAG;AACzC,eAAO,KAAK,KAAK,GAAG;AAAA,MACtB;AAAA,IACF;AAEA,QAAI,KAAK,SAASA,UAAS,UAAU;AACnC,YAAM,KAAK,GAAG,SAAS,SAAS,MAAM,CAAC;AAAA,IACzC,OAAO;AACL,YAAM,SAAS,MAAM,IAAI,KAAK,QAAQ;AACtC,UAAI,QAAQ,SAASA,UAAS,QAAQ;AACpC;AAAA,MACF;AAAA,IACF;AAEA,mBAAe,IAAI,MAAM;AAAA,EAC3B;AAGA,MAAI,eAAe;AACnB,aAAW,CAAC,EAAE,KAAK,OAAO;AACxB,QAAI,CAAC,eAAe,IAAI,EAAE,GAAG;AAC3B,YAAM,OAAO,EAAE;AACf;AAAA,IACF;AAAA,EACF;AAIA,SAAO,iBAAiB,IAAI,WAAW,cAAc,KAAmB;AAC1E;AAEA,SAAS,gBACP,MACA,KACiD;AACjD,SACE,KAAK,SAASA,UAAS,UACvB,OAAO,UAAU,eAAe,KAAK,KAAK,MAAM,GAAG,KACnD,KAAK,KAAK,GAAG,MAAM;AAEvB;AAMO,IAAM,iBAAN,MAA+C;AAAA,EAMpD,YAAY,SAGT;AARH,wBAAQ;AACR,wBAAQ;AACR,wBAAQ;AACR,wBAAQ;AAMN,SAAK,SAAS,oBAAI,IAAI;AACtB,SAAK,UAAU,oBAAI,IAAI;AACvB,SAAK,OAAO,oBAAI,IAAI;AAEpB,SAAK,aAAa,SAAS,gBAAgB;AAE3C,eAAW,CAAC,KAAK,KAAK,KAAK,SAAS,gBAAgB,CAAC,GAAG;AACtD,WAAK,OAAO,IAAI,KAAK,KAAK;AAAA,IAC5B;AAAA,EACF;AAAA,EAEA,iBAAiB;AACf,WAAO,KAAK,OAAO,OAAO,QAAQ,EAAE;AAAA,EACtC;AAAA;AAAA,EAGA,wBAAwB,KAAsB;AAC5C,SAAK,OAAO,MAAM;AAClB,eAAW,CAAC,IAAI,IAAI,KAAK,sBAAsB,GAAG,GAAG;AACnD,WAAK,OAAO,IAAI,IAAI,IAAI;AAAA,IAC1B;AAAA,EACF;AAAA,EAEA,MAAM,SAAS,KAAa;AAC1B,WAAO,KAAK,QAAQ,IAAI,GAAG;AAAA,EAC7B;AAAA,EACA,MAAM,SAAS,KAAa,OAAa;AACvC,SAAK,QAAQ,IAAI,KAAK,KAAK;AAAA,EAC7B;AAAA,EACA,MAAM,YAAY,KAAa;AAC7B,SAAK,QAAQ,OAAO,GAAG;AAAA,EACzB;AAAA,EAEA,aAAa;AACX,WAAO,EAAE,KAAK;AAAA,EAChB;AAAA,EAEA,MAAM,eAAe,OAAe;AAClC,UAAM,SAAS,GAAG,KAAK;AACvB,WAAO;AAAA,MACL,QAAQ,KAAK,KAAK,QAAQ,GAAG,CAAC,CAAC,CAAC,MAAM,EAAE,WAAW,MAAM,CAAC;AAAA,MAC1D,CAAC,CAAC,GAAG,CAAC,MAAM,CAAC,EAAE,MAAM,OAAO,MAAM,GAAG,CAAC;AAAA,IACxC;AAAA,EACF;AAAA,EACA,MAAM,gBAAgB,OAAe,KAAa,MAAkB;AAClE,SAAK,KAAK,IAAI,GAAG,KAAK,MAAM,GAAG,IAAI,IAAI;AAAA,EACzC;AAAA,EACA,MAAM,iBAAiB,OAAe,MAAgB;AACpD,eAAW,OAAO,MAAM;AACtB,WAAK,KAAK,OAAO,GAAG,KAAK,MAAM,GAAG,EAAE;AAAA,IACtC;AAAA,EACF;AAAA;AAAA,EAGA,MAAM,iCAAiC;AACrC,SAAK,KAAK,MAAM;AAAA,EAClB;AAAA;AAAA,EAGA,iBAAwC;AAGtC,UAAM,QAAQ,KAAK;AACnB,QAAI,CAAC,MAAM,IAAI,MAAM,GAAG;AACtB,YAAM,IAAI,QAAQ,EAAE,MAAMA,UAAS,QAAQ,MAAM,CAAC,EAAE,CAAC;AAAA,IACvD;AAEA,UAAM,WAAW,mBAAmB,KAAK;AAEzC,aAAS,iBAAiB,UAAkB,KAA2B;AACrE,UAAI;AAEJ,iBAAW,cAAc,SAAS,OAAO,QAAQ,GAAG;AAClD,cAAM,aAAa,MAAM,UAAU;AACnC,YACE,aAAa,QACZ,YAAY,UAAa,aAAa,UACvC;AACA,oBAAU;AAAA,QACZ;AAAA,MACF;AACA,aAAO;AAAA,IACT;AAMA,mBAAe,UACb,IACA,MACA,iBAAiB,OACF;AACf,YAAM,aAAa,MAAM,IAAI,KAAK,QAAQ;AAE1C,UAAI,eAAe,QAAW;AAC5B,cAAM,IAAI,MAAM,kBAAkB,MAAM,KAAK,QAAQ,CAAC,EAAE;AAAA,MAC1D;AAEA,UACE,KAAK,SAASA,UAAS,YACvB,WAAW,SAASA,UAAS,QAC7B;AACA,cAAM,IAAI,MAAM,kCAAkC;AAAA,MACpD;AAEA,YAAM,uBAAuB,SAAS,IAAI,KAAK,UAAU,KAAK,SAAS;AACvE,UAAI,yBAAyB,IAAI;AAE/B,cAAMC,cAAa,MAAM,IAAI,KAAK,QAAQ;AAC1C,cAAM,qBACJA,gBAAe,UACf,gBAAgBA,aAAY,KAAK,SAAS;AAC5C,YAAI,yBAAyB,UAAa,oBAAoB;AAC5D,cAAI,gBAAgB;AAClB,6BAAiB,KAAK,UAAU,KAAK,SAAS;AAAA,UAChD,OAAO;AACL,kBAAM,IAAI,MAAM,OAAO,MAAM,KAAK,SAAS,CAAC,iBAAiB;AAAA,UAC/D;AAAA,QACF;AAGA,iBAAS,IAAI,KAAK,UAAU,KAAK,WAAW,EAAE;AAAA,MAChD;AAEA,YAAM,IAAI,IAAI,IAAI;AAAA,IACpB;AAUA,mBAAe,aAAa,IAAY,QAA4B;AAClE,YAAM,OAAO,MAAM,IAAI,EAAE;AACzB,UAAI,MAAM,aAAa,QAAW;AAChC;AAAA,MACF;AAGA,UAAI,SAAS,IAAI,KAAK,UAAU,MAAM;AACpC,cAAM,IAAI,MAAM,OAAO,MAAM,MAAM,CAAC,gBAAgB;AAEtD,eAAS,OAAO,KAAK,UAAU,KAAK,SAAS;AAC7C,YAAM,UAAU,EAAE,GAAG,MAAM,WAAW,OAAO;AAC7C,YAAM,IAAI,IAAI,OAAO;AACrB,eAAS,IAAI,KAAK,UAAU,QAAQ,EAAE;AAAA,IACxC;AAWA,mBAAe,gBACb,IACA,MACA,iBAAiB,OACF;AACf,YAAM,OAAO,MAAM,IAAI,EAAE;AACzB,UAAI,MAAM,SAASD,UAAS,QAAQ;AAElC;AAAA,MACF;AAEA,iBAAW,OAAO,OAAO,KAAK,IAAI,GAAG;AAEnC,cAAM,UAAU,SAAS,IAAI,IAAI,GAAG;AACpC,YAAI,YAAY,QAAW;AACzB,cAAI,gBAAgB;AAClB,wBAAY,OAAO;AAAA,UACrB,OAAO;AACL,kBAAM,IAAI,MAAM,mCAAmC,MAAM,GAAG,CAAC,EAAE;AAAA,UACjE;AAAA,QACF;AAAA,MACF;AAEA,YAAM,IAAI,IAAI,EAAE,GAAG,MAAM,MAAM,EAAE,GAAG,KAAK,MAAM,GAAG,KAAK,EAAE,CAAC;AAAA,IAC5D;AAKA,aAAS,YAAY,IAAkB;AACrC,YAAM,OAAO,MAAM,IAAI,EAAE;AACzB,UAAI,MAAM,aAAa,QAAW;AAChC;AAAA,MACF;AAGA,eAAS,OAAO,KAAK,UAAU,KAAK,SAAS;AAG7C,YAAM,QAAQ,CAAC,EAAE;AACjB,aAAO,MAAM,SAAS,GAAG;AACvB,cAAM,SAAS,MAAM,IAAI;AACzB,cAAM,KAAK,GAAG,SAAS,SAAS,MAAM,CAAC;AACvC,cAAM,OAAO,MAAM;AACnB,iBAAS,UAAU,MAAM;AAAA,MAC3B;AAAA,IACF;AAMA,aAAS,iBAAiB,IAAY,KAAmB;AAEvD,YAAM,OAAO,MAAM,IAAI,EAAE;AACzB,UAAI,SAAS,UAAa,gBAAgB,MAAM,GAAG,GAAG;AACpD,cAAM,EAAE,CAAC,GAAG,GAAG,GAAG,GAAG,KAAK,IAAI,KAAK;AACnC,cAAM,IAAI,IAAI,EAAE,GAAG,MAAM,MAAM,KAAK,CAAC;AAAA,MACvC;AAEA,YAAM,UAAU,SAAS,IAAI,IAAI,GAAG;AACpC,UAAI,YAAY,QAAW;AACzB,oBAAY,OAAO;AAAA,MACrB;AAAA,IACF;AAEA,UAAM,MAA6B;AAAA;AAAA;AAAA;AAAA;AAAA,MAKjC,UAAU,CAAC,OAAO,MAAM,IAAI,EAAE;AAAA;AAAA;AAAA;AAAA,MAK9B,YAAY,MAAM;AAAA;AAAA;AAAA;AAAA,MAKlB,UAAU,CAAC,OAAO,MAAM,IAAI,EAAE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAO9B,cAAc,CAAC,IAAI,QAAQ,SAAS,IAAI,IAAI,GAAG;AAAA;AAAA;AAAA;AAAA;AAAA,MAM/C,cAAc,CAAC,IAAI,QAAQ,SAAS,IAAI,IAAI,GAAG;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAO/C;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MASA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAOA;AAAA;AAAA;AAAA;AAAA;AAAA,MAMA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAWA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAUA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MASA,aAAa,YAAyC;AACpD,eAAO,qBAAqB,KAAK;AAAA,MACnC;AAAA,IACF;AACA,WAAO;AAAA,EACT;AACF;AAEO,SAAS,sBAAsB,SAGnB;AACjB,SAAO,IAAI,eAAe,OAAO;AACnC;;;AEpbA;AAAA,EACE,SAAAE;AAAA,EACA,eAAAC;AAAA,EACA,YAAAC;AAAA,EACA,gBAAAC;AAAA,EACA,UAAAC;AAAA,OACK;AA6BP,SAAS,OAAOC,KAAkB,KAAyB;AACzD,SAAO,EAAE,QAAQ,YAAY,IAAAA,KAAI,IAAI;AACvC;AAEA,SAAS,OAAO,WAAoC;AAClD,SAAO,EAAE,QAAQ,WAAW,aAAa,UAAU,KAAK;AAC1D;AAEA,SAAS,sBAAsBA,KAA+B;AAC5D,UAAQA,IAAG,MAAM;AAAA,IACf,KAAKC,QAAO;AACV,aAAO;AAAA,QACL,MAAMC,UAAS;AAAA,QACf,UAAUF,IAAG;AAAA,QACb,WAAWA,IAAG;AAAA,MAChB;AAAA,IAEF,KAAKC,QAAO;AACV,aAAO;AAAA,QACL,MAAMC,UAAS;AAAA,QACf,UAAUF,IAAG;AAAA,QACb,WAAWA,IAAG;AAAA,MAChB;AAAA,IAEF,KAAKC,QAAO;AACV,aAAO;AAAA,QACL,MAAMC,UAAS;AAAA,QACf,UAAUF,IAAG;AAAA,QACb,WAAWA,IAAG;AAAA,QACd,MAAMA,IAAG;AAAA,MACX;AAAA,IAEF,KAAKC,QAAO;AACV,aAAO;AAAA,QACL,MAAMC,UAAS;AAAA,QACf,UAAUF,IAAG;AAAA,QACb,WAAWA,IAAG;AAAA,QACd,MAAMA,IAAG;AAAA,MACX;AAAA,IAGF;AACE,aAAOG,aAAYH,KAAI,iBAAiB;AAAA,EAC5C;AACF;AAEO,IAAM,UAAN,MAAc;AAAA,EAMnB,YAAY,YAA4B;AAHxC;AAAA;AAAA,wBAAiB;AACjB,wBAAQ;AAGN,SAAK,aAAa;AAAA,EACpB;AAAA;AAAA;AAAA;AAAA,EAMA,IAAI,eAAsC;AACxC,QAAI,KAAK,kBAAkB,QAAW;AACpC,YAAM,IAAI,MAAM,4CAA4C;AAAA,IAC9D;AACA,WAAO,KAAK;AAAA,EACd;AAAA;AAAA,EAGA,iBAAgE;AAC9D,WAAO,KAAK,WAAW,eAAe;AAAA,EACxC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAM,KAAK,QAA+B;AACxC,SAAK,gBAAgB,MAAM,KAAK,WAAW,eAAe,MAAM;AAAA,EAClE;AAAA,EAEA,SAAe;AACb,SAAK,gBAAgB;AAAA,EACvB;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,SAAS,KAA+C;AAC5D,UAAM,UAA2B,CAAC;AAClC,eAAWA,OAAM,KAAK;AACpB,cAAQ,KAAK,MAAM,KAAK,QAAQA,GAAE,CAAC;AAAA,IACrC;AACA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,MAAc,QAAQA,KAA0C;AAC9D,YAAQA,IAAG,MAAM;AAAA,MACf,KAAKC,QAAO;AAAA,MACZ,KAAKA,QAAO;AAAA,MACZ,KAAKA,QAAO;AAAA,MACZ,KAAKA,QAAO;AACV,eAAO,MAAM,KAAK,cAAcD,GAAE;AAAA,MAEpC,KAAKC,QAAO;AACV,eAAO,MAAM,KAAK,oBAAoBD,GAAE;AAAA,MAE1C,KAAKC,QAAO;AACV,eAAO,MAAM,KAAK,oBAAoBD,GAAE;AAAA,MAE1C,KAAKC,QAAO;AACV,eAAO,MAAM,KAAK,uBAAuBD,GAAE;AAAA,MAE7C,KAAKC,QAAO;AACV,eAAO,MAAM,KAAK,kBAAkBD,GAAE;AAAA,MAGxC;AACE,YAAI,QAAQ,IAAI,aAAa,cAAc;AACzC,iBAAO,OAAOA,GAAE;AAAA,QAClB,OAAO;AACL,iBAAOG,aAAYH,KAAI,YAAY;AAAA,QACrC;AAAA,IACJ;AAAA,EACF;AAAA,EAEA,MAAc,cAAcA,KAAgD;AAC1E,QAAI,KAAK,aAAa,SAASA,IAAG,EAAE,GAAG;AAErC,aAAO,OAAOA,GAAE;AAAA,IAClB;AAEA,UAAM,OAAO,sBAAsBA,GAAE;AAErC,UAAM,SAAS,KAAK,aAAa,SAAS,KAAK,QAAQ;AACvD,QAAI,WAAW,QAAW;AAExB,aAAO,OAAOA,GAAE;AAAA,IAClB;AAGA,YAAQ,OAAO,MAAM;AAAA,MACnB,KAAKE,UAAS;AAKZ,YAAIF,IAAG,SAASC,QAAO,iBAAiB;AACtC,iBAAO,OAAOD,GAAE;AAAA,QAClB;AAAA,MAGF,KAAKE,UAAS;AAEZ,cAAM,KAAK,aAAa,UAAUF,IAAG,IAAI,MAAM,IAAI;AACnD,eAAO,OAAOA,GAAE;AAAA,MAElB,KAAKE,UAAS;AAGZ,eAAO,KAAK,sBAAsBF,KAAI,IAAI;AAAA,MAE5C,KAAKE,UAAS;AAEZ,eAAO,OAAOF,GAAE;AAAA,MAGlB;AACE,eAAOG,aAAY,QAAQ,qBAAqB;AAAA,IACpD;AAAA,EACF;AAAA,EAEA,MAAc,sBACZH,KACA,MACwB;AACxB,QAAI;AAIJ,UAAM,SAA2BA,IAAG,UAAU;AAG9C,QAAI,WAAW,UAAU;AACvB,YAAM,oBAAoB,MAAM,KAAK,eAAeA,IAAG,IAAI,IAAI;AAO/D,UAAI,sBAAsB,KAAK,WAAW;AACxC,QAAAA,MAAK,EAAE,GAAGA,KAAI,WAAW,kBAAkB;AAC3C,cAAM;AAAA,UACJ,MAAMC,QAAO;AAAA,UACb,IAAID,IAAG;AAAA,UACP,WAAW;AAAA,QACb;AACA,eAAO,OAAOA,KAAI,GAAG;AAAA,MACvB;AAGA,aAAO,OAAOA,GAAE;AAAA,IAClB,WAES,WAAW,OAAO;AAWzB,YAAM,YACJA,IAAG,cAAc,UACjBA,IAAG,cAAcA,IAAG,MACpB,KAAK,aAAa,SAASA,IAAG,SAAS,GAAG,aAAa,KAAK,WACxDA,IAAG,YACH;AAEN,UAAI,cAAc,QAAW;AAC3B,cAAM,KAAK,aAAa,YAAY,SAAS;AAAA,MAC/C;AAEA,YAAM,aAAa,KAAK,aAAa;AAAA,QACnC,KAAK;AAAA,QACL,KAAK;AAAA,MACP;AACA,UAAI,eAAe,UAAa,eAAe,WAAW;AAIxD,cAAM;AAAA,UACJ,MAAMC,QAAO;AAAA,UACb,IAAI;AAAA,QACN;AAAA,MACF;AAEA,YAAM,KAAK,aAAa,UAAUD,IAAG,IAAI,MAAM,IAAI;AAEnD,aAAO,OAAOA,KAAI,GAAG;AAAA,IACvB,OAAO;AACL,aAAOG,aAAY,QAAQ,gBAAgB;AAAA,IAC7C;AAAA,EACF;AAAA,EAEA,MAAc,uBACZH,KACwB;AACxB,UAAM,KAAK,aAAa,iBAAiBA,IAAG,IAAIA,IAAG,GAAG;AACtD,WAAO,OAAOA,GAAE;AAAA,EAClB;AAAA,EAEA,MAAc,oBACZA,KACwB;AACxB,UAAM,KAAK,aAAa,gBAAgBA,IAAG,IAAIA,IAAG,MAAM,IAAI;AAC5D,WAAO,OAAOA,GAAE;AAAA,EAClB;AAAA,EAEA,MAAc,kBACZA,KACwB;AACxB,UAAM,KAAK,aAAa,YAAYA,IAAG,EAAE;AACzC,WAAO,OAAOA,GAAE;AAAA,EAClB;AAAA,EAEA,MAAc,oBACZA,KACwB;AACxB,UAAM,cAAc,MAAM,KAAK,gBAAgBA,IAAG,IAAIA,IAAG,SAAS;AAClE,QAAI,gBAAgB,QAAW;AAE7B,aAAO,OAAOA,GAAE;AAAA,IAClB;AAMA,QAAI,gBAAgBA,IAAG,WAAW;AAChC,YAAM,aAAa,EAAE,GAAGA,KAAI,WAAW,YAAY;AACnD,YAAM,MAAa;AAAA,QACjB,MAAMC,QAAO;AAAA,QACb,IAAID,IAAG;AAAA,QACP,WAAW;AAAA,MACb;AACA,aAAO,OAAO,YAAY,GAAG;AAAA,IAC/B,OAAO;AACL,aAAO,OAAOA,GAAE;AAAA,IAClB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,MAAc,eACZ,IACA,MACiB;AAEjB,UAAM,MAAM,KAAK,qBAAqB,KAAK,UAAUI,OAAM,KAAK,SAAS,CAAC;AAC1E,QAAI,QAAQ,KAAK,WAAW;AAC1B,aAAO,EAAE,GAAG,MAAM,WAAW,IAAI;AAAA,IACnC;AACA,UAAM,KAAK,aAAa,UAAU,IAAI,IAAI;AAC1C,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAcA,MAAc,gBACZ,IACA,WAC6B;AAC7B,UAAM,OAAO,KAAK,aAAa,SAAS,EAAE;AAC1C,QAAI,MAAM,aAAa,QAAW;AAChC;AAAA,IACF;AAEA,QAAI,KAAK,aAAa,SAAS,KAAK,QAAQ,GAAG,SAASF,UAAS,MAAM;AAErE;AAAA,IACF;AAEA,QAAI,KAAK,cAAc,WAAW;AAEhC,aAAO;AAAA,IACT;AAGA,UAAM,MAAM,KAAK,qBAAqB,KAAK,UAAUE,OAAM,SAAS,CAAC;AACrE,QAAI,QAAQ,KAAK,WAAW;AAC1B,YAAM,KAAK,aAAa,aAAa,IAAI,GAAG;AAAA,IAC9C;AACA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASQ,qBAAqB,UAAkB,WAAqB;AAClE,QAAI,CAAC,KAAK,aAAa,aAAa,UAAU,SAAS,GAAG;AACxD,aAAO;AAAA,IACT;AAEA,UAAM,UAAU;AAChB,UAAM,UAAU,KAAK,aAAa,iBAAiB,UAAU,OAAO;AACpE,QAAI,YAAY,QAAW;AACzB,aAAOC,cAAa,SAAS,OAAO;AAAA,IACtC,OAAO;AACL,aAAOA,cAAa,OAAO;AAAA,IAC7B;AAAA,EACF;AACF;;;ACxaA,SAAS,cAAAC,mBAAkB;AAC3B,SAAS,cAAc;AACvB,SAAS,cAAc;AACvB,YAAY,OAAO;AAUnB,IAAM,oBAAoB;AAOnB,IAAM,aAAN,MAAiB;AAAA,EAYtB,YAAY,QAAwB;AAXpC,wBAAiB;AAEjB,wBAAiB,OAAa,IAAM,MAAI;AACxC;AAAA,wBAAiB,mBAAkB,oBAAI,IAAyB;AAChE,wBAAiB,oBAAmB,oBAAI,IAAwB;AAEhE;AAAA,wBAAiB,YAAW,IAAIC;AAAA,MAC9B,MAAM,oBAAI,IAAI;AAAA,IAChB;AACA,wBAAiB,oBAAgD,oBAAI,IAAI;AAkMzE;AAAA;AAAA;AAAA,wBAAQ,8BAA6B,OACnC,YACA,KACA,UACkB;AAElB,YAAM,oBAAoB;AAE1B,YAAM,UAAU,OAAO,OAAO,UAAU;AAExC,YAAM,aAAa,QAAQ,OAAO,CAAC,KAAK,WAAW;AACjD,eAAO,MAAM,OAAO;AAAA,MACtB,GAAG,CAAC;AACJ,UAAI,QAAQ,SAAS,GAAG;AACtB,cAAM,UAAU,OAAO,KAAK,UAAU;AAEtC,aAAK,SAAS,IAAI,OAAO,IAAI,IAAI,OAAO,CAAC;AAEzC,cAAM,eAAiB,eAAa,OAAO;AAE3C,QAAE,cAAY,KAAK,YAAY;AAG/B,cAAM,yBAA2B,sBAAoB,GAAG;AAExD,YACE,uBAAuB,SACvB,cAAc,IAAI,oBAClB;AACA,gBAAM,SAAS,OAAO;AACtB,gBAAM,KAAK,OAAO;AAAA,YAChB;AAAA,YACA;AAAA,YACA;AAAA,UACF;AAEA,gBAAM,KAAK,OAAO,iBAAiB,OAAO,OAAO;AACjD,eAAK,SAAS,IAAI,OAAO,oBAAI,IAAI,CAAC,MAAM,CAAC,CAAC;AAAA,QAC5C;AAAA,MACF;AAAA,IACF;AAEA,wBAAQ,+BAA8B,OACpC,KACA,UACmB;AACnB,YAAM,aAAa,OAAO;AAAA,QACxB,MAAM,KAAK,OAAO,eAAe,KAAK;AAAA,MACxC;AACA,YAAM,KAAK,2BAA2B,YAAY,KAAK,KAAK;AAE5D,WAAK,gBAAgB,IAAI,OAAO;AAAA,QAC9B,YAAY,OAAO;AAAA,QACnB,YAAc,oBAAkB,GAAG;AAAA,MACrC,CAAC;AACD,UAAI,KAAK,QAAQ,CAAC,GAAG,CAAC;AAEtB,aAAO;AAAA,IACT;AAzPE,SAAK,SAAS;AACd,SAAK,IAAI,GAAG,WAAW,CAAC,EAAE,QAAQ,MAAM;AACtC,cAAQ,QAAQ,CAAC,WAAkB;AACjC,eAAO,QAAQ;AAAA,MACjB,CAAC;AAAA,IACH,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA,EAMA,MAAa,QAAQ,OAA+B;AAClD,UAAM,MAAM,MAAM,KAAK,8BAA8B,KAAK;AAC1D,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAa,cACX,QACA,cAAsB,IACtB,MACA,OAAgB,OACQ;AACxB,UAAM,SAAS,MAAM,KAAK;AAAA,MACxB;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AACA,QAAI,CAAC,OAAQ,QAAO;AACpB,WAAO,OAAO,eAAe,MAAM;AAAA,EACrC;AAAA,EAEA,MAAa,oBACX,QACA,cAAsB,IACtB,MACA,OAAgB,OACY;AAC5B,UAAM,MAAM,SAAS,SAAY,MAAM,KAAK,WAAW,IAAI,IAAI,KAAK;AACpE,QAAI,CAAC,KAAK;AACR,aAAO;AAAA,IACT;AACA,QAAI;AACJ,QAAI;AAEF,4BACE,YAAY,SAAS,IAAI,OAAO,aAAa,WAAW,IAAI;AAAA,IAChE,SAAS,GAAG;AACV,aAAO;AAAA,QACL;AAAA,MACF;AAAA,IACF;AACA,QAAI,MAAM;AACR,aAAS,wBAAsB,KAAK,mBAAmB;AAAA,IACzD;AACA,WAAS,sBAAoB,KAAK,mBAAmB;AAAA,EACvD;AAAA,EAEA,MAAa,gBAAgB,MAAqC;AAChE,UAAM,MAAM,SAAS,SAAY,MAAM,KAAK,WAAW,IAAI,IAAI,KAAK;AACpE,QAAI,CAAC,KAAK;AACR,aAAO;AAAA,IACT;AACA,WAAO,OAAO,eAAiB,oBAAkB,GAAG,CAAC;AAAA,EACvD;AAAA,EAEA,MAAa,gBAAgB,SAGF;AACzB,UAAM,MACJ,QAAQ,SAAS,SACb,MAAM,KAAK,WAAW,QAAQ,IAAI,IAClC,KAAK;AACX,QAAI,CAAC,KAAK;AACR,aAAO;AAAA,IACT;AACA,UAAMC,YAAW,KAAK,sBAAsB,GAAG;AAC/C,WAAO,KAAK,sBAAsBA,WAAU,EAAE,MAAM,QAAQ,KAAK,CAAC;AAAA,EACpE;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAa,cACX,QACA,QACA,MACA,MACuD;AACvD,UAAM,MAAM,SAAS,SAAY,MAAM,KAAK,WAAW,IAAI,IAAI,KAAK;AACpE,QAAI,CAAC,KAAK;AACR,YAAM,IAAI,MAAM,kBAAkB,IAAI,YAAY;AAAA,IACpD;AAEA,QAAI;AAEF,YAAM,iBAAiB,KAAK,sBAAsB,GAAG;AACrD,YAAM,aACJ,OAAO,WAAW,WAAW,OAAO,aAAa,MAAM,IAAI;AAC7D,YAAMC,eAAc,OAAS,kBAAkB;AAC/C,MAAAA,aAAY,KAAK,YAAY,QAAQ;AAErC,YAAM,gBAAgB,KAAK,iBAAiB,GAAG;AAE/C,YAAM,UAAU,CAAG,iBAAe,gBAAgB,aAAa;AAC/D,UAAI,SAAS;AACX,cAAM,KAAK,iBAAiB,GAAG;AAAA,MACjC;AAEA,aAAO;AAAA,QACL,WAAW;AAAA,QACX,cAAc,MAAM,KAAK,sBAAsB,eAAe,EAAE,KAAK,CAAC;AAAA,MACxE;AAAA,IACF,SAAS,GAAG;AAEV,aAAO,KAAK,4BAA4B,OAAO,CAAC,CAAC,EAAE;AACnD,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA,EAEO,8BAA8B,OAA+B;AAClE,QAAI,UAAU,KAAK,iBAAiB,IAAI,KAAK;AAC7C,QAAI,MAAM,UAAU,eAAe,KAAK,MAAM,KAAK,kBAAkB,KAAK;AAC1E,QAAI,CAAC,KAAK;AAER,YAAM,IAAM,MAAI;AAAA,IAClB;AACA,QAAI,YAAY,QAAW;AACzB,gBAAU,KAAK,4BAA4B,KAAK,KAAK;AACrD,WAAK,iBAAiB,IAAI,OAAO,OAAO;AAAA,IAC1C;AACA,WAAO;AAAA,EACT;AAAA,EAEA,MAAa,KAAK,SAAgC;AAChD,UAAM,KAAK,8BAA8B,YAAY;AAAA,EACvD;AAAA;AAAA;AAAA;AAAA,EAKO,SAAe;AAAA,EAWtB;AAAA;AAAA;AAAA;AAAA;AAAA,EAOQ,sBAAsB,KAAwB;AACpD,UAAM,QACJ,IAAI,SAAS,KAAK,IAAI,OAAO,eAAgB,IAAI;AACnD,UAAMD,YAAW,KAAK,iBAAiB,IAAI,KAAK;AAChD,QAAIA,WAAU;AACZ,aAAOA;AAAA,IACT;AACA,WAAO,KAAK,iBAAiB,GAAG;AAAA,EAClC;AAAA;AAAA,EAGQ,iBAAiB,KAAwB;AAC/C,UAAM,QACJ,IAAI,SAAS,KAAK,IAAI,OAAO,eAAgB,IAAI;AACnD,UAAMA,YAAa,WAAS,GAAG;AAC/B,SAAK,iBAAiB,IAAI,OAAOA,SAAQ;AACzC,WAAOA;AAAA,EACT;AAAA,EAgEQ,kBAAkB,MAA0B;AAClD,eAAW,UAAU,KAAK,IAAI,WAAW,GAAG;AAC1C,UAAI,OAAO,SAAS,MAAM;AACxB,eAAO;AAAA,MACT;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAAA,EAEA,MAAc,sBACZA,WACA,EAAE,KAAK,GACU;AACjB,UAAM,kBAAkB,OAClB,mBAAiBA,SAAQ,IACzB,iBAAeA,SAAQ;AAC7B,WAAO,OAAO;AAAA,MACZ,IAAI;AAAA,QACF,MAAM,OAAO,OAAO,OAAO,WAAW,IAAI,WAAW,eAAe,CAAC;AAAA,MACvE;AAAA,IACF;AAAA,EACF;AAAA;AAAA,EAGA,MAAc,WAAW,MAAmC;AAC1D,UAAM,SAAS,KAAK,kBAAkB,IAAI;AAC1C,QAAI,CAAC,QAAQ;AACX,aAAO;AAAA,IACT;AACA,UAAM,KAAK,8BAA8B,IAAI;AAC7C,WAAO;AAAA,EACT;AAAA;AAAA,EAGA,MAAc,iBAAiB,KAA2B;AACxD,UAAM,QACJ,IAAI,SAAS,KAAK,IAAI,OAAO,eAAgB,IAAI;AACnD,UAAM,gBAAgB,KAAK,gBAAgB,IAAI,KAAK;AAEpD,UAAM,wBAA0B;AAAA,MAC9B;AAAA,MACA,eAAe;AAAA,IACjB;AAGA,UAAM,aAAa,eAAe,cAAc,OAAO;AACvD,QAAI,sBAAsB,SAAS,mBAAmB;AAEpD,YAAM,SAAS,OAAO;AACtB,YAAM,KAAK,OAAO;AAAA,QAChB;AAAA,QACA;AAAA,QACE,sBAAoB,GAAG;AAAA,MAC3B;AAEA,YAAM,KAAK,OAAO;AAAA,QAChB;AAAA,QACA,MAAM,KAAK,KAAK,SAAS,YAAY,KAAK,CAAC;AAAA,MAC7C;AAEA,WAAK,SAAS,IAAI,OAAO,oBAAI,IAAI,CAAC,MAAM,CAAC,CAAC;AAE1C,WAAK,gBAAgB,IAAI,OAAO;AAAA,QAC9B,YAAY,OAAO;AAAA;AAAA,QACnB,YAAc,oBAAkB,GAAG;AAAA,MACrC,CAAC;AAAA,IACH,OAAO;AAEL,YAAM,KAAK,OAAO;AAAA,QAChB;AAAA,QACA;AAAA,QACA;AAAA,MACF;AACA,YAAM,OAAO,CAAC,UAAU;AAExB,YAAM,cAAc,KAAK,SAAS,YAAY,KAAK;AACnD,iBAAW,OAAO,MAAM;AACtB,oBAAY,IAAI,GAAG;AAAA,MACrB;AAAA,IACF;AAAA,EACF;AACF;;;ACpVA,eAAsB,SACpB,SAC0C;AAC1C,MAAI;AACF,UAAM,OAAO,OAAO,OAAO,YAAY,aAAa,QAAQ,IAAI;AAChE,WAAO,CAAC,MAAM,MAAS;AAAA,EACzB,SAAS,OAAO;AACd,WAAO,CAAC,QAAW,KAAU;AAAA,EAC/B;AACF;;;ACnDA;AAmCO,IAAM,YAAN,cAAkC,IAAU;AAAA,EAMjD,YACE,OAEA;AACA,UAAM;AAPR;AAAA;AAAA;AACA;AAOE,uBAAK,SAAU;AACf,uBAAK,UAAW,oBAAI,IAAI;AAAA,EAC1B;AAAA,EAEA,iBAAiB,SAA4B;AAC3C,WAAO,mBAAK,UAAS,IAAI,OAAO;AAAA,EAClC;AAAA,EAEA,OAAO,SAA4B;AACjC,UAAM,MAAM,mBAAK,UAAS,IAAI,OAAO;AACrC,WAAO,QAAQ,SAAY,KAAK,IAAI,GAAG,IAAI;AAAA,EAC7C;AAAA,EAEA,IAAI,KAAQ,OAAgB;AAC1B,UAAM,UAAU,mBAAK,SAAL,WAAa;AAC7B,UAAM,aAAa,mBAAK,UAAS,IAAI,OAAO;AAC5C,QAAI,eAAe,UAAa,eAAe,KAAK;AAClD,YAAM,IAAI,MAAM,cAAc,OAAO,OAAO,CAAC,iBAAiB;AAAA,IAChE;AACA,uBAAK,UAAS,IAAI,SAAS,GAAG;AAC9B,WAAO,MAAM,IAAI,KAAK,KAAK;AAAA,EAC7B;AAAA,EAEA,OAAO,YAAwB;AAC7B,UAAM,QAAQ,KAAK,IAAI,UAAU;AACjC,QAAI,UAAU,QAAW;AACvB,YAAM,aAAa,mBAAK,SAAL,WAAa;AAChC,yBAAK,UAAS,OAAO,UAAU;AAAA,IACjC;AACA,WAAO,MAAM,OAAO,UAAU;AAAA,EAChC;AACF;AAvCE;AACA;;;ACrBF,SAAS,qBAAqB;AAOvB,SAAS,kBAAkB,QAAkC;AAClE,QAAM,cAAc,OAAO,OAAO,CAAC,KAAK,QAAQ,MAAM,IAAI,QAAQ,CAAC;AACnE,QAAM,SAAS,IAAI,WAAW,WAAW;AACzC,MAAI,SAAS;AACb,aAAW,OAAO,QAAQ;AACxB,WAAO,IAAI,KAAK,MAAM;AACtB,cAAU,IAAI;AAAA,EAChB;AACA,SAAO;AACT;AAEO,SAAS,iBACd,OACA,OACA,QACA,OACA,YACkC;AAClC,SAAO;AAAA,IACL,MAAM,cAAc;AAAA,IACpB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,MAAM,cAAc,CAAC;AAAA,EACvB;AACF;;;ARQA,IAAM,kBAAkBE,OAAM,gBAAgB;AAE9C,IAAM,2BAA2B,KAAK;AAAA,EACpC,GAAG,OAAO,OAAO,eAAe,EAAE;AAAA,IAChC,CAAC,MAAmB,OAAO,MAAM;AAAA,EACnC;AACF;AAGA,IAAM,wBAAwB,OAAO;AAAA,EACnC,OAAO,QAAQC,cAAa,EAAE,IAAI,CAAC,CAAC,GAAG,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC;AACtD;AAEA,IAAM,aAAa,IAAI,OAAO;AAAA;AAE9B,CAAC;AAsBD,SAAS,qBAAqB;AAC5B,QAAM,WAA4B,CAAC;AACnC,SAAO;AAAA,IACL,OAAO,CAAC,MAAqB,KAAK,SAAS,KAAK,CAAC;AAAA,IACjD,SAAS,MAAM,QAAQ,WAAW,QAAQ;AAAA,EAC5C;AACF;AAEA,SAAS,UACP,MACwB;AACxB,SAAO,KAAK,UAAU,IAAI;AAC5B;AAEO,SAAS,aAAa,MAAyB;AACpD,SAAO,EAAE,MAAMC,QAAO,aAAa,IAAI,OAAO,KAAK;AACrD;AAEA,SAAS,UAAUC,KAAsB;AAMvC,QAAM,EAAE,MAAM,GAAG,GAAG,KAAK,IAAIA;AAC7B,SAAO;AACT;AA1HA;AAiLO,IAAM,iBAAN,MAAgD;AAAA;AAAA,EAsBrD,YACE,QACA,QACA,OACA;AAtBF;AAAA;AAAA,wBAAgB;AAChB;AAAA,wBAAgB;AAChB;AAAA,wBAAgB;AAGhB;AAAA,wBAAgB;AAChB,wBAAgB;AAChB;AAAA,wBAAgB;AAChB;AAAA,wBAAgB;AAEhB;AAAA,uBAAS;AACT,uBAAS;AACT;AAGA;AAAA;AAQE,SAAK,UAAU,OAAO;AACtB,SAAK,QAAQ,OAAO;AACpB,SAAK,OAAO,OAAO;AACnB,SAAK,SAAS,OAAO;AACrB,SAAK,OAAO,OAAO,QAAS;AAC5B,SAAK,aAAa,OAAO;AACzB,uBAAK,UAAW;AAChB,uBAAK,SAAU;AAEf,UAAM,MAAM,oBAAI,KAAK;AACrB,SAAK,YAAY;AACjB,uBAAK,gBAAiB;AACtB,uBAAK,uCAAwC;AAAA,EAC/C;AAAA,EAEA,IAAI,eAAqB;AACvB,UAAM,WAAW,mBAAK,UAAS,uBAAuB;AACtD,QAAI,YAAY,WAAW,mBAAK,iBAAgB;AAC9C,aAAO;AAAA,IACT,OAAO;AACL,aAAO,mBAAK;AAAA,IACd;AAAA,EACF;AAAA,EAEA,IAAI,sCAA+C;AACjD,WAAO,mBAAK;AAAA,EACd;AAAA,EAEA,WAAW,MAAM,oBAAI,KAAK,GAAS;AACjC,QAAI,MAAM,mBAAK,iBAAgB;AAC7B,yBAAK,gBAAiB;AAAA,IACxB;AAAA,EACF;AAAA,EAEA,yCAA+C;AAC7C,uBAAK,uCAAwC;AAAA,EAC/C;AAAA,EAEA,WAAmB;AACjB,SAAK,WAAW;AAEhB,UAAM,OAAO,mBAAK,UAAS,KAAK,MAAM;AACtC,QAAI,mBAAK,UAAS;AAChB,UAAI,OAAO,GAAG;AACZ,gBAAQ;AAAA,UACN,kCAAkC,KAAK,KAAK;AAAA,QAC9C;AAAA,MACF,WAAW,SAAS,GAAG;AACrB,gBAAQ;AAAA,UACN,kCAAkC,KAAK,KAAK;AAAA,QAC9C;AAAA,MACF,OAAO;AAEL,gBAAQ,IAAI,iBAAiB,KAAK,KAAK,UAAU;AAAA,MACnD;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAAA,EAEA,KAAK,WAAqE;AACxE,UAAM,OACJ,OAAO,cAAc,WAAW,YAAY,UAAU,SAAS;AACjE,UAAM,OAAO,mBAAK,UAAS,KAAK,IAAI;AACpC,QAAI,mBAAK,UAAS;AAChB,UAAI,OAAO,GAAG;AACZ,gBAAQ;AAAA,UACN,mCAAmC,KAAK,KAAK;AAAA,QAC/C;AAAA,MACF,WAAW,SAAS,GAAG;AACrB,gBAAQ;AAAA,UACN,mCAAmC,KAAK,KAAK;AAAA,QAC/C;AAAA,MACF;AAEA,YAAM,OAAO,KAAK,MAAM,IAAI;AAC5B,iBAAW,OAAO,MAAM,QAAQ,IAAI,IAAI,OAAO,CAAC,IAAI,GAAG;AACrD,gBAAQ;AAAA,UACN,iBAAiB,KAAK,KAAK,MACzB,sBAAsB,IAAI,IAAI,KAAK,IAAI,IACzC,KAAK,KAAK,UAAU,GAAG,CAAC;AAAA,QAC1B;AAAA,MACF;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,YAAY,MAAc,QAAuB;AAC/C,uBAAK,UAAS,MAAM,MAAM,MAAM;AAAA,EAClC;AACF;AA5GW;AACA;AACT;AAGA;AAyGK,IAAM,iBAAN,cAA6B,eAA6B;AAAA;AAAA,EAE/D,YACE,QACA,QACA,OACA;AACA,UAAM,QAAQ,QAAQ,KAAK;AAAA,EAC7B;AACF;AAtTA,IAAAC,UAAA;AA6aO,IAAM,OAAN,MAAyD;AAAA,EAmD9D,YAAY,MAAU,SAAkC;AAhDxD;AAAA,wBAAO;AACP,wBAAgB;AAChB,wBAAO;AAEP,wBAAQ,cAAmC;AAC3C,wBAAQ,SAA6B;AACrC,wBAAQ,UAAS;AAEjB,wBAAiB,YAAW,IAAI,UAI9B,CAAC,MAAM,EAAE,KAAK;AAEhB,wBAAiB;AA+BjB,uBAASA;AACT,uBAAS;AAGP,UAAM,SAAS,SAAS,WAAW,sBAAsB;AACzD,SAAK,OAAO;AACZ,SAAK,SAAS;AACd,SAAK,SAAS,SAAS,UAAU;AACjC,uBAAK,kBAAmB,SAAS,kBAAkB;AACnD,SAAK,QAAQ;AAAA,MACX,oBACE,SAAS,OAAO,uBACf,MAAM;AACL,eAAO;AAAA,UACL,SAAS;AAAA,QACX;AAAA,MACF;AAAA;AAAA,MAGF,gBAAgB,SAAS,OAAO;AAAA,MAChC,eAAe,SAAS,OAAO;AAAA,MAE/B,kBAAkB,SAAS,OAAO;AAAA,MAClC,iBAAiB,SAAS,OAAO;AAAA,MAEjC,mBAAmB,SAAS,OAAO;AAAA,MACnC,iBAAiB,SAAS,OAAO;AAAA,MAEjC,+BACE,SAAS,OAAO;AAAA,MAClB,4BAA4B,SAAS,OAAO;AAAA,IAC9C;AACA,uBAAKA,UAAU,SAAS,sBAAsB;AAAA,EAChD;AAAA,EAEA,IAAW,eAA6B;AACtC,QAAI,KAAK,eAAe,MAAM;AAC5B,aAAO;AAAA,IACT,WAAW,KAAK,UAAU,MAAM;AAC9B,aAAO;AAAA,IACT,OAAO;AACL,aAAO;AAAA,IACT;AAAA,EACF;AAAA,EAEA,IAAY,cAA0B;AAAE,WAAO,KAAK,SAAS;AAAA,EAAM;AAAA;AAAA,EAEnE,IAAY,UAA0B;AAAE,WAAO,KAAK,KAAK;AAAA,EAAS;AAAA;AAAA,EAClE,IAAY,aAA0B;AAAE,WAAO,KAAK,KAAK;AAAA,EAAY;AAAA;AAAA,EAErE,IAAW,QAA0B;AAAE,WAAO,KAAK,KAAK;AAAA,EAAO;AAAA;AAAA,EAE/D,IAAY,OAA0B;AAAE,WAAO,KAAK,SAASC,OAAM,oCAAoC;AAAA,EAAG;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAW1G,MAAa,KAAK,KAAwB;AACxC,QAAI,KAAK,eAAe,MAAM;AAC5B,WAAK,QAAQ;AACb,WAAK,aAAa,KAAK,MAAM,GAAG,EAAE,MAAM,CAAC,MAAM;AAC7C,aAAK,QAAQ;AACb,aAAK,aAAa;AAClB,cAAM;AAAA,MACR,CAAC;AAAA,IACH;AACA,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOO,OAAO,KAAe;AAC3B,SAAK,MAAM,mBAAmB,GAAG;AACjC,QAAI,KAAK,OAAO;AACd,WAAK,QAAQ,OAAO;AACpB,WAAK,WAAW,OAAO;AAAA,IACzB;AAEA,SAAK,aAAa;AAElB,SAAK,MAAM,kBAAkB,GAAG;AAAA,EAClC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAaA,MAAa,aACX,SACyB;AACzB,UAAM,SAAS,SAAS,SAAS,KAAK,aAAa;AACnD,UAAM,aAAaC,QAAO;AAC1B,UAAM,OAAO,SAAS;AACtB,UAAM,SAAyB;AAAA,MAC7B,SAAS,SAAS,WAAW;AAAA,MAC7B,OAAO,MAAM;AAAA,MACb;AAAA,MACA,MAAM,SAAS;AAAA,MACf,YAAY,SAAS;AAAA,MACrB,MAAM,SAAS,KACX,EAAE,IAAI,QAAQ,IAAI,KAAK,IACvB,EAAE,aAAa,SAAS,eAAeA,QAAO,GAAG,KAAK;AAAA,MAC1D,QAAQ,SAAS,UAAU,CAAC,YAAY;AAAA,IAC1C;AACA,QAAI,mBAAKF,WAAS;AAChB,cAAQ,IAAI,uBAAuB,KAAK,UAAU,MAAM,CAAC,EAAE;AAAA,IAC7D;AACA,WAAO;AAAA,EACT;AAAA,EAEA,MAAa,oCAEX;AACA,UAAM,SAAU,MAAM,KAAK,aAAa;AACxC,UAAM,qBAA+C,CAAC;AACtD,UAAM,OAAO;AAAA,MACX,MAAM,CAAC,SAAS;AACd,YAAI,OAAO,SAAS,UAAU;AAC5B,6BAAmB,KAAK,IAA8B;AAAA,QACxD;AACA,eAAO;AAAA,MACT;AAAA,MACA,OAAO,MAAM;AAAA,MAAC;AAAA;AAAA,IAChB;AACA,UAAM,UAAU,IAAI,eAAe,QAAQ,MAAM,KAAK;AACtD,WAAO,CAAC,SAAS,kBAAkB;AAAA,EACrC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYO,gBACL,UAKM;AACN,QAAI,KAAK,SAAS,OAAO,GAAG;AAC1B,YAAM,IAAI,MAAM,uDAAuD;AAAA,IACzE;AAEA,eAAW,EAAE,QAAQ,QAAQ,aAAa,KAAK,UAAU;AACvD,YAAM,aAAa,IAAI,eAAe,QAAQ,QAAQ,mBAAKA,SAAO;AAClE,WAAK,SAAS,IAAI,OAAO,YAAY,UAAU;AAC/C,iBAAW,WAAW,YAAY;AAAA,IACpC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUO,oBACL,QACA,QACA,KACA,QAA0C,MAAM;AAC9C,UAAM,IAAI;AAAA,MACR;AAAA,IAEF;AAAA,EACF,GACM;AACN,QAAI;AACJ,YACG,WAAW,KAAK,SAAS,iBAAiB,OAAO,KAAK,OAAO,QAC9D;AAOA,WAAK;AAAA,QACH;AAAA,QACA,UAAU;AAAA,QACV;AAAA,QACA;AAAA,QACA;AAAA,MACF;AAEA,WAAK,OAAO;AAAA,QACV,8BAA8B,OAAO,KAAK;AAAA,MAC5C;AAAA,IACF;AAEA,UAAM,aAAa,IAAI,eAAe,QAAQ,QAAQ,mBAAKA,SAAO;AAClE,SAAK,SAAS,IAAI,OAAO,YAAY,UAAU;AAE/C,UAAM,QAA8D,CAAC;AACrE,eAAW,WAAW,KAAK,cAAc,OAAO,UAAU,GAAG;AAC3D,YAAM,QAAQ,KAAK,IAAI;AAAA,QACrB,IAAI,QAAQ,KAAK;AAAA,QACjB,MAAM,QAAQ,KAAK;AAAA,QACnB,QAAQ,QAAQ;AAAA,MAClB;AAAA,IACF;AAEA,eAAW;AAAA,MACT;AAAA,QACE,WAAW;AAAA,QACX,OAAO;AAAA;AAAA,QACP,WAAW;AAAA,QACX;AAAA,QACA,OAAO;AAAA,MACT;AAAA,IACF;AAEA,SAAK;AAAA,MACH,OAAO;AAAA,MACP;AAAA,QACE,MAAMH,eAAc;AAAA,QACpB,OAAO,WAAW;AAAA,QAClB,IAAI,WAAW,KAAK;AAAA,QACpB,MAAM,WAAW,KAAK;AAAA,QACtB,QAAQ,WAAW;AAAA,MACrB;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAGA,UAAM,KAAK,KAAK,MAAM,oBAAoB,YAAY,GAAG;AACzD,QAAI,GAAI,OAAM,EAAE;AAAA,EAClB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASO,kBACL,KACA,MACA,QACA,KACA,QAA0C,MAAM;AAC9C,UAAM,IAAI;AAAA,MACR;AAAA,IAEF;AAAA,EACF,GACM;AACN,UAAM,WAAW,KAAK;AAEtB,UAAM,UAAU,SAAS,IAAI,GAAG;AAChC,QAAI,YAAY,OAAW;AAE3B,YAAQ,YAAY,MAAM,MAAM;AAEhC,UAAM,UAAU,SAAS,OAAO,GAAG;AACnC,QAAI,SAAS;AACX,iBAAW,SAAS,KAAK,cAAc,GAAG,GAAG;AAC3C,cAAM,KAAK,EAAE,MAAMA,eAAc,WAAW,OAAO,QAAQ,MAAM,CAAC;AAAA,MACpE;AAGA,YAAM,KAAK,KAAK,MAAM,kBAAkB,SAAS,GAAG;AACpD,UAAI,GAAI,OAAM,EAAE;AAAA,IAClB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKO,aACL,WACA,MACA,QACA,KACA,QAA0C,MAAM;AAC9C,UAAM,IAAI;AAAA,MACR;AAAA,IAEF;AAAA,EACF,GACQ;AACR,QAAI,QAAQ;AACZ,eAAW,CAAC,KAAK,OAAO,KAAK,KAAK,UAAU;AAC1C,UAAI,UAAU,OAAO,GAAG;AACtB;AACA,aAAK,kBAAkB,KAAK,MAAM,QAAQ,KAAK,KAAK;AAAA,MACtD;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAa,WACX,KACA,MACA,KACA,QAA0C,MAAM;AAC9C,UAAM,IAAI;AAAA,MACR;AAAA,IAEF;AAAA,EACF,GACe;AACf,UAAM,OACJ,OAAO,SAAS,WAAW,OAAOI,OAAM,4BAA4B;AAEtE,QAAI,SAAS,QAAQ;AACnB,YAAM,KAAK,WAAW,KAAK,GAAG;AAAA,IAChC,OAAO;AACL,YAAM,OAAO,aAAa,IAAI;AAC9B,YAAM,WAAW,gBAAgB,OAAO,IAAI;AAE5C,UAAI,CAAC,SAAS,IAAI;AAChB,cAAM,SACJ,QAAQ,IAAI,aAAa,eACrB,aAAa,SAAS,KAAK,IAC3B;AAEN,aAAK;AAAA,UACH;AAAA,UACA,UAAU;AAAA,UACV;AAAA,UACA;AAAA,UACA;AAAA,QACF;AACA;AAAA,MACF;AAMA,UAAI,KAAK,SAAS,KAAQ;AAAA,MAK1B,WAAW,KAAK,SAAS,KAAO;AAAA,MAGhC;AAEA,WAAK;AAKL,UAAI;AACF,cAAM,KAAK,iBAAiB,KAAK,SAAS,OAAO,GAAG;AAAA,MACtD,UAAE;AACA,aAAK;AAAA,MACP;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAaA,MAAa,iBACX,KACA,UACA,KACe;AACf,UAAM,KAAK,KAAK,GAAG;AACnB,UAAM,EAAE,OAAO,QAAQ,IAAI,mBAAmB;AAC9C,UAAM,KAAK,MAAM;AAAA,MAAa,MAC5B,KAAK,sCAAsC,KAAK,UAAU,KAAK,KAAK;AAAA,IACtE;AAIA,UAAM,QAAQ;AAAA,EAChB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAqBA,MAAa,mCACX,SACA,UACA,KACe;AACf,UAAM,KAAK,KAAK,GAAG;AACnB,UAAM,EAAE,OAAO,QAAQ,IAAI,mBAAmB;AAC9C,UAAM,KAAK,MAAM;AAAA,MAAa,MAC5B,KAAK;AAAA,QACH;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF;AAAA,IACF;AAIA,UAAM,QAAQ;AAAA,EAChB;AAAA,EAEO,WACL,YACoC;AACpC,WAAO,KAAK,SAAS,IAAI,UAAU;AAAA,EACrC;AAAA,EAEO,eAAyC;AAC9C,WAAO,MAAM,KAAK,KAAK,SAAS,OAAO,CAAC;AAAA,EAC1C;AAAA;AAAA;AAAA;AAAA;AAAA,EAMO,aACL,QACA,WACA,KACA,QAA0C,MAAM;AAC9C,UAAM,IAAI;AAAA,MACR;AAAA,IAEF;AAAA,EACF,GACM;AACN,UAAM,MAAM,UAAU,SAAS;AAC/B,eAAW,CAAC,KAAK,OAAO,KAAK,KAAK,oBAAoB,MAAM,GAAG;AAC7D,YAAM,UAAU,QAAQ,KAAK,GAAG;AAChC,UAAI,YAAY,GAAG;AAIjB,aAAK;AAAA,UACH;AAAA,UACA,UAAU;AAAA,UACV;AAAA,UACA;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKO,UACL,WACA,KACA,QAA0C,MAAM;AAC9C,UAAM,IAAI;AAAA,MACR;AAAA,IAEF;AAAA,EACF,GACM;AACN,UAAM,MAAM,UAAU,SAAS;AAC/B,eAAW,CAAC,KAAK,OAAO,KAAK,KAAK,UAAU;AAC1C,YAAM,UAAU,QAAQ,KAAK,GAAG;AAChC,UAAI,YAAY,GAAG;AAIjB,aAAK;AAAA,UACH;AAAA,UACA,UAAU;AAAA,UACV;AAAA,UACA;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAMA,MAAc,eAAiC;AAC7C,UAAM,UAAU,IAAI,QAAQ,KAAK,MAAM;AACvC,UAAM,QAAQ,KAAK,KAAK,MAAM;AAC9B,WAAO;AAAA,EACT;AAAA,EAEA,MAAc,kBAAuC;AACnD,UAAM,aAAa,IAAI,WAAW,KAAK,MAAM;AAC7C,UAAM,WAAW,KAAK,KAAK,MAAM;AACjC,WAAO;AAAA,EACT;AAAA;AAAA,EAGA,MAAc,MAAM,KAAwB;AAC1C,UAAM,KAAK,MAAM,iBAAiB,GAAG;AAGrC,UAAM,UAAU,MAAM,KAAK,aAAa;AACxC,UAAM,aAAa,MAAM,KAAK,gBAAgB;AAE9C,SAAK,QAAQ;AAAA,MACX,OAAO,IAAI,MAAM;AAAA,MACjB;AAAA,MACA;AAAA,IACF;AAEA,UAAM,KAAK,MAAM,gBAAgB,GAAG;AAAA,EACtC;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,eAAiC;AAC7C,WAAQ,MAAM,KAAK,OAAO,WAAW;AAAA,EACvC;AAAA;AAAA;AAAA;AAAA,EAKA,CAAS,oBACP,YACiD;AACjD,eAAW,CAAC,KAAK,OAAO,KAAK,KAAK,UAAU;AAC1C,UAAI,QAAQ,YAAY;AACtB,cAAM,CAAC,KAAK,OAAO;AAAA,MACrB;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,CAAS,cACP,YACmC;AACnC,eAAW,CAAC,KAAK,OAAO,KAAK,KAAK,UAAU;AAC1C,UAAI,QAAQ,YAAY;AACtB,cAAM;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,MAAc,WAAW,YAAwB,KAAwB;AACvE,UAAM,UAAU,KAAK,SAAS,IAAI,UAAU;AAC5C,QAAI,YAAY,QAAW;AACzB,WAAK,OACF,YAAY,EAAE,WAAW,CAAC,EAC1B,KAAK,+CAA+C;AACvD;AAAA,IACF;AAEA,UAAM,OAAO,QAAQ,SAAS;AAI9B,QAAI,SAAS,GAAG;AACd,YAAM,KAAK,MAAM,YAAY,GAAG;AAAA,IAClC;AAAA,EACF;AAAA,EAEA,MAAc,sCACZ,YACA,UACA,KACA,OACe;AACf,UAAM,UAAU,KAAK,SAAS,IAAI,UAAU;AAC5C,QAAI,CAAC,SAAS;AACZ,WAAK,OACF,YAAY,EAAE,WAAW,CAAC,EAC1B,KAAK,qDAAqD;AAC7D;AAAA,IACF;AAKA,UAAM,WAAwB,CAAC;AAC/B,UAAM,UAAuB,CAAC;AAC9B,UAAM,mBAAmB,CAAC,QACxB,KAAK,QAAQ,KAAK,GAAG;AACvB,UAAM,iBAAiB,CAAC,QAAmB,KAAK,SAAS,KAAK,GAAG;AACjE,UAAM,gBAAgB,CAAC,QAAmB,KAAK,QAAQ,KAAK,GAAG;AAE/D,eAAW,OAAO,UAAU;AAC1B,YAAM,eAAe,KAAK,MAAM,mBAAmB,KAAK,OAAO;AAC/D,UAAI,aAAa,SAAS;AACxB,cAAM,KAAK;AAAA,UACT;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,QACF;AAAA,MACF,OAAO;AACL,YAAI,CAAC,QAAQ,qCAAqC;AAChD,kBAAQ,KAAK;AAAA,YACX,MAAMJ,eAAc;AAAA,YACpB,OACE,IAAI,SAASM,eAAc,iBACvB,IAAI,IAAI,IAAI,CAACJ,QAAOA,IAAG,IAAI,IAC3B,CAAC;AAAA,YACP,QAAQ,aAAa;AAAA,UACvB,CAAC;AACD,kBAAQ,uCAAuC;AAAA,QACjD;AAAA,MACF;AAAA,IACF;AAEA,QAAI,SAAS,SAAS,GAAG;AACvB,WAAK,aAAa,YAAY,UAAU,KAAK,KAAK;AAAA,IACpD;AAEA,QAAI,QAAQ,SAAS,GAAG;AACtB,cAAQ,KAAK,OAAO;AAAA,IACtB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,wDACZ,SACA,UACA,KACA,OACe;AAIf,UAAM,WAAwB,CAAC;AAC/B,UAAM,qBAAkC,CAAC;AACzC,UAAM,eAA4B,CAAC;AAEnC,UAAM,mBAAmB,CAAC,QAAiC;AACzD,UAAI,MAAM,QAAQ,GAAG,GAAG;AACtB,mBAAW,KAAK,KAAK;AACnB,6BAAmB,KAAK,CAAC;AAAA,QAC3B;AAAA,MACF,OAAO;AACL,2BAAmB,KAAK,GAAG;AAAA,MAC7B;AAAA,IACF;AACA,UAAM,iBAAiB,CAAC,QAAmB,KAAK,SAAS,KAAK,GAAG;AACjE,UAAM,gBAAgB,CAAC,QAAmB,KAAK,aAAa,KAAK,GAAG;AAEpE,eAAW,OAAO,UAAU;AAC1B,YAAM,KAAK;AAAA,QACT;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF;AAAA,IACF;AAEA,QAAI,mBAAmB,SAAS,GAAG;AACjC,cAAQ,KAAK,kBAAkB;AAC/B,yBAAmB,SAAS;AAAA,IAC9B;AAEA,QAAI,SAAS,SAAS,GAAG;AACvB,WAAK,aAAa,eAA6B,UAAU,KAAK,KAAK;AACnE,eAAS,SAAS;AAAA,IACpB;AAEA,QAAI,aAAa,SAAS,GAAG;AAC3B,cAAQ,KAAK,YAAY;AACzB,mBAAa,SAAS;AAAA,IACxB;AAAA,EACF;AAAA,EAEA,MAAc,UACZ,SACA,KACA,kBACA,gBACA,eACA,KACA,OACe;AACf,QAAI,CAAC,KAAK,MAAM,SAAS,GAAG;AAC1B,YAAM,IAAI,MAAM,6CAA6C;AAAA,IAC/D;AAEA,YAAQ,IAAI,MAAM;AAAA,MAChB,KAAKI,eAAc,iBAAiB;AAElC,uBAAe;AAAA,UACb,MAAMN,eAAc;AAAA,UACpB,OAAO,QAAQ;AAAA,UACf,MAAM,IAAI;AAAA,UACV,aAAa,IAAI;AAAA,QACnB,CAAC;AACD;AAAA,MACF;AAAA,MAEA,KAAKM,eAAc,iBAAiB;AAElC,uBAAe;AAAA,UACb,MAAMN,eAAc;AAAA,UACpB,OAAO,QAAQ;AAAA,UACf,OAAO,IAAI;AAAA,QACb,CAAC;AACD;AAAA,MACF;AAAA,MAEA,KAAKM,eAAc,eAAe;AAChC,YAAI,QAAQ,uBAA+B;AACzC,cAAI,mBAAK,mBAAkB;AACzB,kBAAM,kBAAkB;AAExB,uBAAW,SAAS;AAAA,cAClB,yBAAyB,KAAK,QAAQ,aAAa,WAAW,CAAC;AAAA,cAC/D;AAAA,YACF,GAAG;AAMD,+BAAiB;AAAA,gBACf,MAAMN,eAAc;AAAA,gBACpB,OAAO;AAAA,cACT,CAAC;AAAA,YACH;AAAA,UACF,OAAO;AACL,6BAAiB;AAAA,cACf,MAAMA,eAAc;AAAA,cACpB,OAAO,MAAM;AAAA,gBACX,yBAAyB,KAAK,QAAQ,aAAa,WAAW,CAAC;AAAA,cACjE;AAAA,YACF,CAAC;AAAA,UACH;AAEA,2BAAiB,EAAE,MAAMA,eAAc,mBAAmB,CAAC;AAAA,QAC7D,OAAO;AACL,2BAAiB;AAAA,YACf,MAAMA,eAAc;AAAA,YACpB,OAAO,MAAM,KAAK,KAAK,QAAQ,aAAa,WAAW,CAAC;AAAA,UAC1D,CAAC;AAAA,QACH;AACA;AAAA,MACF;AAAA,MAEA,KAAKM,eAAc,gBAAgB;AAIjC,aAAK,OAAO,uBAAuB;AAEnC,cAAM,SAAS,MAAM,KAAK,QAAQ,SAAS,IAAI,GAAG;AAElD,cAAM,eAA+B,OAAO;AAAA,UAAQ,CAAC,MACnD,EAAE,WAAW,aAAa,CAAC,EAAE,EAAE,IAAI,CAAC;AAAA,QACtC;AAEA,cAAM,gBAAgC,OAAO,QAAQ,CAAC,MAAM;AAC1D,kBAAQ,EAAE,QAAQ;AAAA,YAChB,KAAK;AAIH,qBAAO,EAAE,gBAAgB,SACrB,CAAC,aAAa,EAAE,WAAW,CAAC,IAC5B,CAAC;AAAA,YAEP,KAAK;AACH,qBAAO,EAAE,QAAQ,SAAY,CAAC,EAAE,GAAG,IAAI,CAAC;AAAA,YAG1C;AACE,qBAAOC,aAAY,GAAG,gBAAgB;AAAA,UAC1C;AAAA,QACF,CAAC;AAED,YAAI,aAAa,SAAS,GAAG;AAC3B,yBAAe;AAAA,YACb,MAAMP,eAAc;AAAA,YACpB,KAAK,aAAa,IAAI,SAAS;AAAA,UACjC,CAAC;AACD,wBAAc;AAAA,YACZ,MAAMA,eAAc;AAAA,YACpB,KAAK;AAAA,UACP,CAAC;AAAA,QACH;AAEA,YAAI,cAAc,SAAS,GAAG;AAC5B,2BAAiB;AAAA,YACf,MAAMA,eAAc;AAAA,YACpB,KAAK;AAAA,UACP,CAAC;AAAA,QACH;AAEA,YAAI,aAAa,SAAS,GAAG;AAI3B,gBAAM,KAAK,KAAK,MAAM,gCAAgC,GAAG;AACzD,cAAI,GAAI,OAAM,EAAE;AAAA,QAClB;AACA;AAAA,MACF;AAAA,MAEA,KAAKM,eAAc,YAAY;AAC7B,cAAM,SAAS,IAAI;AACnB,cAAM,OAAO,IAAI;AACjB,cAAM,OAAO,IAAI;AACjB,cAAM,CAAC,QAAQ,aAAa,YAAY,IAAI,MAAM,QAAQ,IAAI;AAAA,UAC5D,KAAK,WAAW,cAAc,KAAK,QAAQ,QAAQ,MAAM,IAAI;AAAA,UAC7D,KAAK,WAAW,gBAAgB,IAAI;AAAA,UACpC,KAAK,WAAW,gBAAgB,EAAE,MAAM,KAAK,CAAC;AAAA,QAChD,CAAC;AAED,YAAI,WAAW,QAAQ,iBAAiB,MAAM;AAC5C,2BAAiB;AAAA,YACf,MAAMN,eAAc;AAAA,YACpB;AAAA,YACA,QAAQ;AAAA;AAAA,YACR;AAAA,YACA;AAAA,YACA,IAAI;AAAA,YACJ,oBAAoB;AAAA,UACtB,CAAC;AAAA,QACH;AACA;AAAA,MACF;AAAA,MAEA,KAAKM,eAAc,aAAa;AAC9B,cAAM,SAAS,IAAI;AACnB,cAAM,OAAO,IAAI;AACjB,cAAM,OAAO,IAAI;AACjB,cAAM,CAAC,QAAQ,KAAK,IAAI,MAAM;AAAA,UAC5B,KAAK,WAAW,cAAc,KAAK,QAAQ,QAAQ,MAAM,IAAI;AAAA,QAC/D;AAEA,YAAI;AAEF;AAEF,aAAK;AAAA,UACH;AAAA,YACE,MAAMN,eAAc;AAAA,YACpB;AAAA,YACA;AAAA,YACA,QAAQ;AAAA,YACR,aAAa;AAAA,YACb,IAAI;AAAA,YACJ,oBAAoB,OAAO;AAAA,UAC7B;AAAA,UACA;AAAA,UACA;AAAA,QACF;AACA,YAAI,OAAO,WAAW;AACpB,gBAAM,KAAK,KAAK,MAAM,6BAA6B,KAAK,OAAO;AAC/D,cAAI,GAAI,OAAM,EAAE;AAAA,QAClB;AAEA;AAAA,MACF;AAAA,MAEA,SAAS;AACP,YAAI;AACF,iBAAOO,aAAY,KAAK,yBAAyB;AAAA,QACnD,QAAQ;AAAA,QAER;AAAA,MACF;AAAA,IACF;AAAA,EACF;AACF;AAh6BWJ,WAAA;AACA;","names":["constant","object","optional","string","taggedUnion","object","constant","optional","string","taggedUnion","snapshot","snapshot","CrdtType","snapshot","buildObject","buildNode","buildList","buildMap","emitObject","emit","emitList","emitMap","CrdtType","CrdtType","ProtocolVersion","assertNever","ClientMsgCode","OpCode","raise","ServerMsgCode","array","nanoid","raise","LogLevel","raise","target","CrdtType","isRootStorageNode","nn","isRootStorageNode","nn","CrdtType","parentNode","asPos","assertNever","CrdtType","makePosition","OpCode","op","OpCode","CrdtType","assertNever","asPos","makePosition","DefaultMap","DefaultMap","snapshot","applyUpdate","array","ServerMsgCode","OpCode","op","__debug","raise","nanoid","ClientMsgCode","assertNever"]}
|