@rocicorp/zero 0.24.2025092700 → 0.24.2025093001

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (28) hide show
  1. package/out/{chunk-C524XUFX.js → chunk-L2ZHMO4E.js} +3 -3
  2. package/out/{chunk-C524XUFX.js.map → chunk-L2ZHMO4E.js.map} +2 -2
  3. package/out/{chunk-YHZFOFXO.js → chunk-QCPDXNFF.js} +3 -3
  4. package/out/{chunk-MGNNTSXN.js → chunk-YXYKEMHQ.js} +2 -2
  5. package/out/{chunk-MGNNTSXN.js.map → chunk-YXYKEMHQ.js.map} +1 -1
  6. package/out/{lazy-inspector-VMXLTE36.js → lazy-inspector-A5VQLZYJ.js} +2 -2
  7. package/out/react.js +2 -2
  8. package/out/solid.js +3 -3
  9. package/out/zero/package.json +1 -1
  10. package/out/zero-cache/src/services/change-source/pg/logical-replication/stream.d.ts.map +1 -1
  11. package/out/zero-cache/src/services/change-source/pg/logical-replication/stream.js +13 -6
  12. package/out/zero-cache/src/services/change-source/pg/logical-replication/stream.js.map +1 -1
  13. package/out/zero-cache/src/services/replicator/replication-status.d.ts.map +1 -1
  14. package/out/zero-cache/src/services/replicator/replication-status.js +10 -6
  15. package/out/zero-cache/src/services/replicator/replication-status.js.map +1 -1
  16. package/out/zero-events/src/status.d.ts +44 -18
  17. package/out/zero-events/src/status.d.ts.map +1 -1
  18. package/out/zero-events/src/status.js +8 -3
  19. package/out/zero-events/src/status.js.map +1 -1
  20. package/out/zero.js +3 -3
  21. package/out/zql/src/query/named.d.ts +2 -1
  22. package/out/zql/src/query/named.d.ts.map +1 -1
  23. package/out/zql/src/query/named.js.map +1 -1
  24. package/out/zql/src/query/query.d.ts +1 -1
  25. package/out/zql/src/query/query.d.ts.map +1 -1
  26. package/package.json +1 -1
  27. /package/out/{chunk-YHZFOFXO.js.map → chunk-QCPDXNFF.js.map} +0 -0
  28. /package/out/{lazy-inspector-VMXLTE36.js.map → lazy-inspector-A5VQLZYJ.js.map} +0 -0
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../replicache/src/format-version-enum.ts", "../../shared/src/valita.ts", "../../shared/src/json.ts", "../../shared/src/random-uint64.ts", "../../replicache/src/hash.ts", "../../replicache/src/size-of-value.ts", "../../replicache/src/btree/node.ts", "../../shared/src/binary-search.ts", "../../shared/src/iterables.ts", "../../replicache/src/btree/splice.ts", "../../replicache/src/btree/read.ts", "../../shared/src/string-compare.ts", "../../replicache/src/cookies.ts", "../../replicache/src/dag/chunk.ts", "../../replicache/src/dag/store.ts", "../../replicache/src/db/meta-type-enum.ts", "../../replicache/src/db/commit.ts", "../../replicache/src/db/index-operation-enum.ts", "../../replicache/src/db/index.ts", "../../replicache/src/db/read.ts", "../../replicache/src/with-transactions.ts", "../../replicache/src/index-defs.ts", "../../replicache/src/persist/client-groups.ts", "../../replicache/src/async-iterable-to-array.ts", "../../replicache/src/btree/diff.ts", "../../replicache/src/btree/write.ts", "../../replicache/src/lazy.ts", "../../replicache/src/sync/diff.ts", "../../replicache/src/db/write.ts", "../../replicache/src/sync/ids.ts", "../../replicache/src/persist/make-client-id.ts", "../../replicache/src/persist/clients.ts", "../../shared/src/objects.ts", "../../shared/src/must.ts", "../../zql/src/ivm/data.ts", "../../zql/src/ivm/view-apply-change.ts", "../../shared/src/json-schema.ts", "../../shared/src/tdigest-schema.ts", "../../zero-protocol/src/data.ts", "../../zero-protocol/src/analyze-query-result.ts", "../../zero-protocol/src/ast.ts", "../../shared/src/arrays.ts", "../../zero-protocol/src/inspect-down.ts", "../../shared/src/random-values.ts", "../../zero-client/src/util/nanoid.ts", "../../shared/src/hash.ts", "../../zero-protocol/src/primary-key.ts", "../../zero-client/src/client/keys.ts", "../../zero-client/src/client/inspector/client-group.ts", "../../zero-client/src/client/inspector/client.ts", "../../shared/src/centroid.ts", "../../shared/src/tdigest.ts", "../../zero-client/src/client/inspector/inspector.ts", "../../zero-schema/src/table-schema.ts", "../../shared/src/sentinels.ts", "../../zql/src/query/query.ts", "../../zql/src/query/ttl.ts", "../../zql/src/query/query-impl.ts", "../../zero-protocol/src/query-hash.ts", "../../zql/src/ivm/filter-operators.ts", "../../zql/src/ivm/operator.ts", "../../zql/src/ivm/stream.ts", "../../zql/src/ivm/exists.ts", "../../zql/src/ivm/push-accumulated.ts", "../../zql/src/ivm/fan-in.ts", "../../zql/src/ivm/fan-out.ts", "../../zql/src/ivm/maybe-split-and-push-edit-change.ts", "../../zql/src/ivm/filter-push.ts", "../../zql/src/ivm/filter.ts", "../../zql/src/ivm/constraint.ts", "../../zql/src/ivm/join-utils.ts", "../../zql/src/ivm/flipped-join.ts", "../../zql/src/ivm/join.ts", "../../zql/src/ivm/skip.ts", "../../zql/src/ivm/take.ts", "../../zql/src/ivm/union-fan-in.ts", "../../zql/src/ivm/union-fan-out.ts", "../../zql/src/query/expression.ts", "../../zql/src/builder/like.ts", "../../zql/src/builder/filter.ts", "../../zql/src/builder/builder.ts", "../../zql/src/error.ts", "../../zql/src/ivm/array-view.ts", "../../zql/src/query/assert-no-not-exists.ts"],
4
- "sourcesContent": ["/* eslint-disable @typescript-eslint/naming-convention */\n\nexport const SDD = 4;\nexport const DD31 = 5;\n// V6 added refreshHashes and persistHash to Client to fix ChunkNotFound errors\nexport const V6 = 6;\n// V7 added sizeOfEntry to the BTree chunk data.\nexport const V7 = 7;\nexport const Latest = V7;\n\nexport type SDD = typeof SDD;\nexport type DD31 = typeof DD31;\nexport type V6 = typeof V6;\nexport type V7 = typeof V7;\nexport type Latest = typeof Latest;\n", "import * as v from '@badrap/valita';\n\nexport * from '@badrap/valita';\n\nfunction toDisplay(value: unknown): string {\n switch (typeof value) {\n case 'string':\n case 'number':\n case 'boolean':\n return JSON.stringify(value);\n case 'undefined':\n return 'undefined';\n case 'bigint':\n return value.toString() + 'n';\n default:\n if (value === null) {\n return 'null';\n }\n if (Array.isArray(value)) {\n return 'array';\n }\n return typeof value;\n }\n}\n\ntype Key = string | number;\n\nfunction toDisplayAtPath(v: unknown, path: Key[] | undefined): string {\n if (!path?.length) {\n return toDisplay(v);\n }\n\n let cur = v;\n for (const p of path) {\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n cur = (cur as any)[p];\n }\n return toDisplay(cur);\n}\n\nfunction displayList<T>(\n word: string,\n expected: T[],\n toDisplay: (x: T) => string | number = x => String(x),\n): string | number {\n if (expected.length === 1) {\n return toDisplay(expected[0]);\n }\n\n const suffix = `${toDisplay(\n expected[expected.length - 2],\n )} ${word} ${toDisplay(expected[expected.length - 1])}`;\n if (expected.length === 2) {\n return suffix;\n }\n return `${expected.slice(0, -2).map(toDisplay).join(', ')}, ${suffix}`;\n}\n\nfunction getMessage(\n err: v.Err | v.ValitaError,\n v: unknown,\n schema: v.Type | v.Optional,\n mode: ParseOptionsMode | undefined,\n): string {\n const firstIssue = err.issues[0];\n const {path} = firstIssue;\n const atPath = path?.length ? ` at ${path.join('.')}` : '';\n\n switch (firstIssue.code) {\n case 'invalid_type':\n return `Expected ${displayList(\n 'or',\n firstIssue.expected,\n )}${atPath}. Got ${toDisplayAtPath(v, path)}`;\n case 'missing_value': {\n const atPath =\n path && path.length > 1 ? ` at ${path.slice(0, -1).join('.')}` : '';\n\n if (firstIssue.path?.length) {\n return `Missing property ${firstIssue.path.at(-1)}${atPath}`;\n }\n return `TODO Unknown missing property${atPath}`;\n }\n\n case 'invalid_literal':\n return `Expected literal value ${displayList(\n 'or',\n firstIssue.expected,\n toDisplay,\n )}${atPath} Got ${toDisplayAtPath(v, path)}`;\n\n case 'invalid_length': {\n return `Expected array with length ${\n firstIssue.minLength === firstIssue.maxLength\n ? firstIssue.minLength\n : `between ${firstIssue.minLength} and ${firstIssue.maxLength}`\n }${atPath}. Got array with length ${(v as {length: number}).length}`;\n }\n\n case 'unrecognized_keys':\n if (firstIssue.keys.length === 1) {\n return `Unexpected property ${firstIssue.keys[0]}${atPath}`;\n }\n return `Unexpected properties ${displayList(\n 'and',\n firstIssue.keys,\n )}${atPath}`;\n\n case 'invalid_union':\n return schema.name === 'union'\n ? getDeepestUnionParseError(v, schema as v.UnionType, mode ?? 'strict')\n : `Invalid union value${atPath}`;\n\n case 'custom_error': {\n const {error} = firstIssue;\n const message = !error\n ? 'unknown'\n : typeof error === 'string'\n ? error\n : (error.message ?? 'unknown');\n return `${message}${atPath}. Got ${toDisplayAtPath(v, path)}`;\n }\n }\n}\n\ntype FailedType = {type: v.Type; err: v.Err};\n\nfunction getDeepestUnionParseError(\n value: unknown,\n schema: v.UnionType,\n mode: ParseOptionsMode,\n): string {\n const failures: FailedType[] = [];\n for (const type of schema.options) {\n const r = type.try(value, {mode});\n if (!r.ok) {\n failures.push({type, err: r});\n }\n }\n if (failures.length) {\n // compare the first and second longest-path errors\n failures.sort(pathCmp);\n if (failures.length === 1 || pathCmp(failures[0], failures[1]) < 0) {\n return getMessage(failures[0].err, value, failures[0].type, mode);\n }\n }\n // paths are equivalent\n try {\n const str = JSON.stringify(value);\n return `Invalid union value: ${str}`;\n } catch {\n // fallback if the value could not be stringified\n return `Invalid union value`;\n }\n}\n\n// Descending-order comparison of Issue paths.\n// * [1, 'a'] sorts before [1]\n// * [1] sorts before [0] (i.e. errors later in the tuple sort before earlier errors)\nfunction pathCmp(a: FailedType, b: FailedType) {\n const aPath = a.err.issues[0].path;\n const bPath = b.err.issues[0].path;\n if (aPath.length !== bPath.length) {\n return bPath.length - aPath.length;\n }\n for (let i = 0; i < aPath.length; i++) {\n if (bPath[i] > aPath[i]) {\n return -1;\n }\n if (bPath[i] < aPath[i]) {\n return 1;\n }\n }\n return 0;\n}\n\n/**\n * 'strip' allows unknown properties and removes unknown properties.\n * 'strict' errors if there are unknown properties.\n * 'passthrough' allows unknown properties.\n */\nexport type ParseOptionsMode = 'passthrough' | 'strict' | 'strip';\n\nexport function parse<T>(\n value: unknown,\n schema: v.Type<T>,\n mode?: ParseOptionsMode,\n): T {\n const res = test(value, schema, mode);\n if (!res.ok) {\n throw new TypeError(res.error);\n }\n return res.value;\n}\n\nexport function is<T>(\n value: unknown,\n schema: v.Type<T>,\n mode?: ParseOptionsMode,\n): value is T {\n return test(value, schema, mode).ok;\n}\n\nexport function assert<T>(\n value: unknown,\n schema: v.Type<T>,\n mode?: ParseOptionsMode,\n): asserts value is T {\n parse(value, schema, mode);\n}\n\ntype Result<T> = {ok: true; value: T} | {ok: false; error: string};\n\nexport function test<T>(\n value: unknown,\n schema: v.Type<T>,\n mode?: ParseOptionsMode,\n): Result<T> {\n const res = schema.try(value, mode ? {mode} : undefined);\n if (!res.ok) {\n return {\n ok: false,\n error: getMessage(res, value, schema, mode),\n };\n }\n return res;\n}\n\n/**\n * Similar to {@link test} but works for AbstractTypes such as Optional.\n * This is for advanced usage. Prefer {@link test} unless you really need\n * to operate directly on an Optional field.\n */\nexport function testOptional<T>(\n value: unknown,\n schema: v.Type<T> | v.Optional<T>,\n mode?: ParseOptionsMode,\n): Result<T | undefined> {\n let flags = 0x1; // FLAG_FORBID_EXTRA_KEYS;\n if (mode === 'passthrough') {\n flags = 0;\n } else if (mode === 'strip') {\n flags = 0x2; // FLAG_STRIP_EXTRA_KEYS;\n }\n const res = schema.func(value, flags);\n if (res === undefined) {\n return {ok: true, value} as Result<T>;\n } else if (res.ok) {\n return res;\n }\n const err = new v.ValitaError(res);\n return {ok: false, error: getMessage(err, value, schema, mode)};\n}\n\n/**\n * Shallowly marks the schema as readonly.\n */\nexport function readonly<T extends v.Type>(t: T): v.Type<Readonly<v.Infer<T>>> {\n return t as v.Type<Readonly<v.Infer<T>>>;\n}\n\nexport function readonlyObject<T extends Record<string, v.Type | v.Optional>>(\n t: T,\n): v.ObjectType<Readonly<T>, undefined> {\n return v.object(t);\n}\n\nexport function readonlyArray<T extends v.Type>(\n t: T,\n): v.Type<readonly v.Infer<T>[]> {\n return v.array(t);\n}\n\nexport function readonlyRecord<T extends v.Type>(\n t: T,\n): v.Type<Readonly<Record<string, v.Infer<T>>>> {\n return v.record(t);\n}\n\n// eslint-disable-next-line @typescript-eslint/naming-convention\nconst AbstractType = Object.getPrototypeOf(\n Object.getPrototypeOf(v.string().optional()),\n).constructor;\n\nexport function instanceOfAbstractType<T = unknown>(\n obj: unknown,\n): obj is v.Type<T> | v.Optional<T> {\n return obj instanceof AbstractType;\n}\n\ntype ObjectShape = Record<string, typeof AbstractType>;\n\n/**\n * Similar to `ObjectType.partial()` except it recurses into nested objects.\n * Rest types are not supported.\n */\nexport function deepPartial<Shape extends ObjectShape>(\n s: v.ObjectType<Shape, undefined>,\n) {\n const shape = {} as Record<string, unknown>;\n for (const [key, type] of Object.entries(s.shape)) {\n if (type.name === 'object') {\n shape[key] = deepPartial(type as v.ObjectType).optional();\n } else {\n shape[key] = type.optional();\n }\n }\n return v.object(shape as {[K in keyof Shape]: v.Optional<v.Infer<Shape[K]>>});\n}\n\ntype Literal = string | number | bigint | boolean;\n\nexport function literalUnion<T extends [...Literal[]]>(\n ...literals: T\n): v.Type<T[number]> {\n return v.union(...literals.map(v.literal));\n}\n", "import {assertObject, throwInvalidType} from './asserts.ts';\nimport {skipAssertJSONValue} from './config.ts';\nimport {hasOwn} from './has-own.ts';\n\n/** The values that can be represented in JSON */\nexport type JSONValue =\n | null\n | string\n | boolean\n | number\n | Array<JSONValue>\n | JSONObject;\n\n/**\n * A JSON object. This is a map from strings to JSON values or `undefined`. We\n * allow `undefined` values as a convenience... but beware that the `undefined`\n * values do not round trip to the server. For example:\n *\n * ```\n * // Time t1\n * await tx.set('a', {a: undefined});\n *\n * // time passes, in a new transaction\n * const v = await tx.get('a');\n * console.log(v); // either {a: undefined} or {}\n * ```\n */\nexport type JSONObject = {[key: string]: JSONValue | undefined};\n\n/** Like {@link JSONValue} but deeply readonly */\nexport type ReadonlyJSONValue =\n | null\n | string\n | boolean\n | number\n | ReadonlyArray<ReadonlyJSONValue>\n | ReadonlyJSONObject;\n\n/** Like {@link JSONObject} but deeply readonly */\nexport type ReadonlyJSONObject = {\n readonly [key: string]: ReadonlyJSONValue | undefined;\n};\n\n/**\n * Checks deep equality of two JSON value with (almost) same semantics as\n * `JSON.stringify`. The only difference is that with `JSON.stringify` the\n * ordering of the properties in an object/map/dictionary matters. In\n * {@link deepEqual} the following two values are consider equal, even though the\n * strings JSON.stringify would produce is different:\n *\n * ```js\n * assert(deepEqual(t({a: 1, b: 2}, {b: 2, a: 1}))\n * ```\n */\nexport function deepEqual(\n a: ReadonlyJSONValue | undefined,\n b: ReadonlyJSONValue | undefined,\n): boolean {\n if (a === b) {\n return true;\n }\n\n if (typeof a !== typeof b) {\n return false;\n }\n\n switch (typeof a) {\n case 'boolean':\n case 'number':\n case 'string':\n return false;\n }\n\n // a cannot be undefined here because either a and b are undefined or their\n // types are different.\n // eslint-disable-next-line @typescript-eslint/no-non-null-assertion\n a = a!;\n\n // 'object'\n if (Array.isArray(a)) {\n if (!Array.isArray(b)) {\n return false;\n }\n if (a.length !== b.length) {\n return false;\n }\n for (let i = 0; i < a.length; i++) {\n if (!deepEqual(a[i], b[i])) {\n return false;\n }\n }\n return true;\n }\n\n if (a === null || b === null) {\n return false;\n }\n\n if (Array.isArray(b)) {\n return false;\n }\n\n // We know a and b are objects here but type inference is not smart enough.\n a = a as ReadonlyJSONObject;\n b = b as ReadonlyJSONObject;\n\n // We use for-in loops instead of for of Object.keys() to make sure deepEquals\n // does not allocate any objects.\n\n let aSize = 0;\n for (const key in a) {\n if (hasOwn(a, key)) {\n if (!deepEqual(a[key], b[key])) {\n return false;\n }\n aSize++;\n }\n }\n\n let bSize = 0;\n for (const key in b) {\n if (hasOwn(b, key)) {\n bSize++;\n }\n }\n\n return aSize === bSize;\n}\n\nexport function assertJSONValue(v: unknown): asserts v is JSONValue {\n if (skipAssertJSONValue) {\n return;\n }\n switch (typeof v) {\n case 'boolean':\n case 'number':\n case 'string':\n return;\n case 'object':\n if (v === null) {\n return;\n }\n if (Array.isArray(v)) {\n return assertJSONArray(v);\n }\n return assertObjectIsJSONObject(v as Record<string, unknown>);\n }\n throwInvalidType(v, 'JSON value');\n}\n\nexport function assertJSONObject(v: unknown): asserts v is JSONObject {\n assertObject(v);\n assertObjectIsJSONObject(v);\n}\n\nfunction assertObjectIsJSONObject(\n v: Record<string, unknown>,\n): asserts v is JSONObject {\n for (const k in v) {\n if (hasOwn(v, k)) {\n const value = v[k];\n if (value !== undefined) {\n assertJSONValue(value);\n }\n }\n }\n}\n\nfunction assertJSONArray(v: unknown[]): asserts v is JSONValue[] {\n for (const item of v) {\n assertJSONValue(item);\n }\n}\n\ninterface Path {\n push(key: string | number): void;\n pop(): void;\n}\n\n/**\n * Checks if a value is a JSON value. If there is a value that is not a JSON\n * value, the path parameter is updated to the path of the invalid value.\n */\nexport function isJSONValue(v: unknown, path: Path): v is JSONValue {\n switch (typeof v) {\n case 'boolean':\n case 'number':\n case 'string':\n return true;\n case 'object':\n if (v === null) {\n return true;\n }\n if (Array.isArray(v)) {\n return isJSONArray(v, path);\n }\n return objectIsJSONObject(v as Record<string, unknown>, path);\n }\n return false;\n}\n\nexport function isJSONObject(v: unknown, path: Path): v is JSONObject {\n if (typeof v !== 'object' || v === null) {\n return false;\n }\n return objectIsJSONObject(v as Record<string, unknown>, path);\n}\n\nfunction objectIsJSONObject(\n v: Record<string, unknown>,\n path: Path,\n): v is JSONObject {\n for (const k in v) {\n if (hasOwn(v, k)) {\n path.push(k);\n const value = v[k];\n if (value !== undefined && !isJSONValue(value, path)) {\n return false;\n }\n path.pop();\n }\n }\n return true;\n}\n\nfunction isJSONArray(v: unknown[], path: Path): v is JSONValue[] {\n for (let i = 0; i < v.length; i++) {\n path.push(i);\n if (!isJSONValue(v[i], path)) {\n return false;\n }\n path.pop();\n }\n return true;\n}\n\n/** Basic deep readonly type. It works for {@link JSONValue} types. */\nexport type DeepReadonly<T> = T extends\n | null\n | boolean\n | string\n | number\n | undefined\n ? T\n : {readonly [K in keyof T]: DeepReadonly<T[K]>};\n", "export function randomUint64(): bigint {\n // Generate two random 32-bit unsigned integers using Math.random()\n const high = Math.floor(Math.random() * 0xffffffff); // High 32 bits\n const low = Math.floor(Math.random() * 0xffffffff); // Low 32 bits\n\n // Combine the high and low parts to form a 64-bit unsigned integer\n return (BigInt(high) << 32n) | BigInt(low);\n}\n", "import {assert} from '../../shared/src/asserts.ts';\nimport {randomUint64} from '../../shared/src/random-uint64.ts';\nimport * as valita from '../../shared/src/valita.ts';\n\nexport const STRING_LENGTH = 22;\n\n// We use an opaque type so that we can make sure that a hash is always a hash.\n// TypeScript does not have direct support but we can use a trick described\n// here:\n//\n// https://evertpot.com/opaque-ts-types/\n//\n// The basic idea is to declare a type that cannot be created. We then use\n// functions that cast a string to this type.\n//\n\n// By using declare we tell the type system that there is a unique symbol.\n// However, there is no such symbol but the type system does not care.\ndeclare const hashTag: unique symbol;\n\n/**\n * Opaque type representing a hash. The only way to create one is using `parse`\n * or `hashOf` (except for static unsafe cast of course).\n */\nexport type Hash = string & {[hashTag]: true};\n\n// We are no longer using hashes but due to legacy reason we still refer to\n// them as hashes. We use UUID and counters instead.\nconst hashRe = /^[0-9a-v-]+$/;\n\nexport function parse(s: string): Hash {\n assertHash(s);\n return s;\n}\n\nconst emptyUUID = '0'.repeat(STRING_LENGTH);\nexport const emptyHash = emptyUUID as unknown as Hash;\n\n/**\n * Creates a function that generates random hashes.\n */\nexport const newRandomHash = makeNewRandomHashFunctionInternal();\n\n/**\n * Creates a function that generates UUID hashes for tests.\n */\nexport function makeNewFakeHashFunction(hashPrefix = 'fake'): () => Hash {\n assert(\n /^[0-9a-v]{0,8}$/.test(hashPrefix),\n `Invalid hash prefix: ${hashPrefix}`,\n );\n let i = 0;\n return () => {\n const count = String(i++);\n return (hashPrefix +\n '0'.repeat(STRING_LENGTH - hashPrefix.length - count.length) +\n count) as Hash;\n };\n}\n\nfunction toStringAndSlice(n: number | bigint, len: number): string {\n return n.toString(32).slice(-len).padStart(len, '0');\n}\n\n/**\n * This creates an ID that looks like `<RANDOM><COUNTER>`. The random part is\n * a random number encoded with base 32 and the length is 12 characters. The\n * is 10 characters long and encoded as base 32. The total length is 22 characters.\n *\n * Do the math: https://devina.io/collision-calculator\n */\nfunction makeNewRandomHashFunctionInternal(): () => Hash {\n let base = '';\n let i = 0;\n\n return () => {\n if (!base) {\n // This needs to be lazy because the cloudflare worker environment will\n // throw an error if crypto.getRandomValues is used statically. Specifically:\n // Error: Some functionality, such as asynchronous I/O, timeouts, and\n // generating random values, can only be performed while handling a\n // request.\n base = toStringAndSlice(randomUint64(), 12);\n }\n const tail = toStringAndSlice(i++, 10);\n return (base + tail) as Hash;\n };\n}\n\n/**\n * Generates a fake hash useful for testing.\n */\nexport function fakeHash(word: string | number): Hash {\n if (typeof word === 'number') {\n word = String(word);\n }\n return ('fake' + '0'.repeat(STRING_LENGTH - 4 - word.length) + word) as Hash;\n}\n\nexport function isHash(value: unknown): value is Hash {\n return typeof value === 'string' && hashRe.test(value);\n}\n\nexport function assertHash(value: unknown): asserts value is Hash {\n valita.assert(value, hashSchema);\n}\n\nexport const hashSchema = valita.string().assert(isHash, 'Invalid hash');\n", "import {hasOwn} from '../../shared/src/has-own.ts';\nimport type {ReadonlyJSONObject} from '../../shared/src/json.ts';\n\nconst SIZE_TAG = 1;\nconst SIZE_INT32 = 4;\nconst SIZE_SMI = 5;\nconst SIZE_DOUBLE = 8;\n\n/**\n * Gives a size of a value. The size is modelled after the size used by\n * Chromium/V8's structuredClone algorithm. It does not match exactly so the\n * size is just an approximation.\n * https://source.chromium.org/chromium/chromium/src/+/main:v8/src/objects/value-serializer.cc;l=102;drc=f0b6f7d12ea47ad7c08fb554f678c1e73801ca36;bpv=1;bpt=1\n * For example we follow JSC/Mozilla for ints and skip the varint encoding.\n *\n * Mozilla does things similarly. Main difference is that there is no varint\n * encoding and every value uses multiples of 64bits\n * https://searchfox.org/mozilla-central/source/js/src/vm/StructuredClone.cpp#94\n *\n * And JSC:\n * https://github.com/WebKit/WebKit/blob/main/Source/WebCore/bindings/js/SerializedScriptValue.cpp#L356\n * - Use 1 byte tag\n * - Numbers are either stored as Int32 or Float64\n */\nexport function getSizeOfValue(value: unknown): number {\n switch (typeof value) {\n case 'string':\n // Assumes all strings are one byte strings. V8 writes OneByteString and\n // TwoByteString. We could check the string but it would require iterating\n // over all the characters.\n return SIZE_TAG + SIZE_INT32 + value.length;\n case 'number':\n if (isSmi(value)) {\n if (value <= -(2 ** 30) || value >= 2 ** 30 - 1) {\n return SIZE_TAG + SIZE_SMI;\n }\n return SIZE_TAG + SIZE_INT32;\n }\n return SIZE_TAG + SIZE_DOUBLE;\n case 'boolean':\n return SIZE_TAG;\n case 'object':\n if (value === null) {\n return SIZE_TAG;\n }\n\n if (Array.isArray(value)) {\n let sum = 2 * SIZE_TAG + SIZE_INT32;\n for (const element of value) {\n sum += getSizeOfValue(element);\n }\n return sum;\n }\n\n {\n const val = value as ReadonlyJSONObject;\n let sum: number = 2 * SIZE_TAG + SIZE_INT32;\n for (const k in val) {\n if (hasOwn(val, k)) {\n // Skip undefined values. undefined values in an object gets\n // stripped if we round trip through JSON.stringif which is what we\n // use when syncing.\n const propertyValue = val[k];\n if (propertyValue !== undefined) {\n sum += getSizeOfValue(k) + getSizeOfValue(propertyValue);\n }\n }\n }\n return sum;\n }\n }\n\n throw new Error(`Invalid value. type: ${typeof value}, value: ${value}`);\n}\n\nfunction isSmi(value: number): boolean {\n return value === (value | 0);\n}\n\nconst entryFixed = 2 * SIZE_TAG + SIZE_INT32 + SIZE_TAG + SIZE_INT32;\n\nexport function getSizeOfEntry<K, V>(key: K, value: V): number {\n // Entries are stored as [key, value, sizeOfEntry]\n return entryFixed + getSizeOfValue(key) + getSizeOfValue(value);\n}\n", "import {compareUTF8} from 'compare-utf8';\nimport {\n assert,\n assertArray,\n assertNumber,\n assertString,\n} from '../../../shared/src/asserts.ts';\nimport {binarySearch as binarySearchWithFunc} from '../../../shared/src/binary-search.ts';\nimport type {Enum} from '../../../shared/src/enum.ts';\nimport {joinIterables} from '../../../shared/src/iterables.ts';\nimport {\n type JSONValue,\n type ReadonlyJSONValue,\n assertJSONValue,\n} from '../../../shared/src/json.ts';\nimport {skipBTreeNodeAsserts} from '../config.ts';\nimport type {IndexKey} from '../db/index.ts';\nimport * as FormatVersion from '../format-version-enum.ts';\nimport {\n type FrozenJSONValue,\n type FrozenTag,\n assertDeepFrozen,\n deepFreeze,\n} from '../frozen-json.ts';\nimport {type Hash, emptyHash, newRandomHash} from '../hash.ts';\nimport type {BTreeRead} from './read.ts';\nimport type {BTreeWrite} from './write.ts';\n\ntype FormatVersion = Enum<typeof FormatVersion>;\n\nexport type Entry<V> = readonly [key: string, value: V, sizeOfEntry: number];\n\nexport const NODE_LEVEL = 0;\nexport const NODE_ENTRIES = 1;\n\n/**\n * The type of B+Tree node chunk data\n */\ntype BaseNode<V> = FrozenTag<\n readonly [level: number, entries: ReadonlyArray<Entry<V>>]\n>;\nexport type InternalNode = BaseNode<Hash>;\n\nexport type DataNode = BaseNode<FrozenJSONValue>;\n\nexport function makeNodeChunkData<V>(\n level: number,\n entries: ReadonlyArray<Entry<V>>,\n formatVersion: FormatVersion,\n): BaseNode<V> {\n return deepFreeze([\n level,\n (formatVersion >= FormatVersion.V7\n ? entries\n : entries.map(e => e.slice(0, 2))) as readonly ReadonlyJSONValue[],\n ]) as BaseNode<V>;\n}\n\nexport type Node = DataNode | InternalNode;\n\n/**\n * Describes the changes that happened to Replicache after a\n * {@link WriteTransaction} was committed.\n *\n * @experimental This type is experimental and may change in the future.\n */\nexport type Diff = IndexDiff | NoIndexDiff;\n\n/**\n * @experimental This type is experimental and may change in the future.\n */\nexport type IndexDiff = readonly DiffOperation<IndexKey>[];\n\n/**\n * @experimental This type is experimental and may change in the future.\n */\nexport type NoIndexDiff = readonly DiffOperation<string>[];\n\n/**\n * InternalDiff uses string keys even for the secondary index maps.\n */\nexport type InternalDiff = readonly InternalDiffOperation[];\n\nexport type DiffOperationAdd<Key, Value = ReadonlyJSONValue> = {\n readonly op: 'add';\n readonly key: Key;\n readonly newValue: Value;\n};\n\nexport type DiffOperationDel<Key, Value = ReadonlyJSONValue> = {\n readonly op: 'del';\n readonly key: Key;\n readonly oldValue: Value;\n};\n\nexport type DiffOperationChange<Key, Value = ReadonlyJSONValue> = {\n readonly op: 'change';\n readonly key: Key;\n readonly oldValue: Value;\n readonly newValue: Value;\n};\n\n/**\n * The individual parts describing the changes that happened to the Replicache\n * data. There are three different kinds of operations:\n * - `add`: A new entry was added.\n * - `del`: An entry was deleted.\n * - `change`: An entry was changed.\n *\n * @experimental This type is experimental and may change in the future.\n */\nexport type DiffOperation<Key> =\n | DiffOperationAdd<Key>\n | DiffOperationDel<Key>\n | DiffOperationChange<Key>;\n\n// Duplicated with DiffOperation to make the docs less confusing.\nexport type InternalDiffOperation<Key = string, Value = FrozenJSONValue> =\n | DiffOperationAdd<Key, Value>\n | DiffOperationDel<Key, Value>\n | DiffOperationChange<Key, Value>;\n\n/**\n * Finds the leaf where a key is (if present) or where it should go if not\n * present.\n */\nexport async function findLeaf(\n key: string,\n hash: Hash,\n source: BTreeRead,\n expectedRootHash: Hash,\n): Promise<DataNodeImpl> {\n const node = await source.getNode(hash);\n // The root changed. Try again\n if (expectedRootHash !== source.rootHash) {\n return findLeaf(key, source.rootHash, source, source.rootHash);\n }\n if (isDataNodeImpl(node)) {\n return node;\n }\n const {entries} = node;\n let i = binarySearch(key, entries);\n if (i === entries.length) {\n i--;\n }\n const entry = entries[i];\n return findLeaf(key, entry[1], source, expectedRootHash);\n}\n\ntype BinarySearchEntries = readonly Entry<unknown>[];\n\n/**\n * Does a binary search over entries\n *\n * If the key found then the return value is the index it was found at.\n *\n * If the key was *not* found then the return value is the index where it should\n * be inserted at\n */\nexport function binarySearch(\n key: string,\n entries: BinarySearchEntries,\n): number {\n return binarySearchWithFunc(entries.length, i =>\n compareUTF8(key, entries[i][0]),\n );\n}\n\nexport function binarySearchFound(\n i: number,\n entries: BinarySearchEntries,\n key: string,\n): boolean {\n return i !== entries.length && entries[i][0] === key;\n}\n\nexport function parseBTreeNode(\n v: unknown,\n formatVersion: FormatVersion,\n getSizeOfEntry: <K, V>(key: K, value: V) => number,\n): InternalNode | DataNode {\n if (skipBTreeNodeAsserts && formatVersion >= FormatVersion.V7) {\n return v as InternalNode | DataNode;\n }\n\n assertArray(v);\n assertDeepFrozen(v);\n // Be relaxed about what we accept.\n assert(v.length >= 2);\n const [level, entries] = v;\n assertNumber(level);\n assertArray(entries);\n\n const f = level > 0 ? assertString : assertJSONValue;\n\n // For V7 we do not need to change the entries. Just assert that they are correct.\n if (formatVersion >= FormatVersion.V7) {\n for (const e of entries) {\n assertEntry(e, f);\n }\n return v as unknown as InternalNode | DataNode;\n }\n\n const newEntries = entries.map(e => convertNonV7Entry(e, f, getSizeOfEntry));\n return [level, newEntries] as unknown as InternalNode | DataNode;\n}\n\nfunction assertEntry(\n entry: unknown,\n f:\n | ((v: unknown) => asserts v is Hash)\n | ((v: unknown) => asserts v is JSONValue),\n): asserts entry is Entry<Hash | JSONValue> {\n assertArray(entry);\n // Be relaxed about what we accept.\n assert(entry.length >= 3);\n assertString(entry[0]);\n f(entry[1]);\n assertNumber(entry[2]);\n}\n\n/**\n * Converts an entry that was from a format version before V7 to the format\n * wanted by V7.\n */\nfunction convertNonV7Entry(\n entry: unknown,\n f:\n | ((v: unknown) => asserts v is Hash)\n | ((v: unknown) => asserts v is JSONValue),\n getSizeOfEntry: <K, V>(key: K, value: V) => number,\n): Entry<Hash | JSONValue> {\n assertArray(entry);\n assert(entry.length >= 2);\n assertString(entry[0]);\n f(entry[1]);\n const entrySize = getSizeOfEntry(entry[0], entry[1]);\n return [entry[0], entry[1], entrySize] as Entry<Hash | JSONValue>;\n}\n\nexport function isInternalNode(node: Node): node is InternalNode {\n return node[NODE_LEVEL] > 0;\n}\n\nabstract class NodeImpl<Value> {\n entries: Array<Entry<Value>>;\n hash: Hash;\n abstract readonly level: number;\n readonly isMutable: boolean;\n\n #childNodeSize = -1;\n\n constructor(entries: Array<Entry<Value>>, hash: Hash, isMutable: boolean) {\n this.entries = entries;\n this.hash = hash;\n this.isMutable = isMutable;\n }\n\n abstract set(\n key: string,\n value: FrozenJSONValue,\n entrySize: number,\n tree: BTreeWrite,\n ): Promise<NodeImpl<Value>>;\n\n abstract del(\n key: string,\n tree: BTreeWrite,\n ): Promise<NodeImpl<Value> | DataNodeImpl>;\n\n maxKey(): string {\n return this.entries[this.entries.length - 1][0];\n }\n\n getChildNodeSize(tree: BTreeRead): number {\n if (this.#childNodeSize !== -1) {\n return this.#childNodeSize;\n }\n\n let sum = tree.chunkHeaderSize;\n for (const entry of this.entries) {\n sum += entry[2];\n }\n return (this.#childNodeSize = sum);\n }\n\n protected _updateNode(tree: BTreeWrite) {\n this.#childNodeSize = -1;\n tree.updateNode(\n this as NodeImpl<unknown> as DataNodeImpl | InternalNodeImpl,\n );\n }\n}\n\nexport function toChunkData<V>(\n node: NodeImpl<V>,\n formatVersion: FormatVersion,\n): BaseNode<V> {\n return makeNodeChunkData(node.level, node.entries, formatVersion);\n}\n\nexport class DataNodeImpl extends NodeImpl<FrozenJSONValue> {\n readonly level = 0;\n\n set(\n key: string,\n value: FrozenJSONValue,\n entrySize: number,\n tree: BTreeWrite,\n ): Promise<DataNodeImpl> {\n let deleteCount: number;\n const i = binarySearch(key, this.entries);\n if (!binarySearchFound(i, this.entries, key)) {\n // Not found, insert.\n deleteCount = 0;\n } else {\n deleteCount = 1;\n }\n\n return Promise.resolve(\n this.#splice(tree, i, deleteCount, [key, value, entrySize]),\n );\n }\n\n #splice(\n tree: BTreeWrite,\n start: number,\n deleteCount: number,\n ...items: Entry<FrozenJSONValue>[]\n ): DataNodeImpl {\n if (this.isMutable) {\n this.entries.splice(start, deleteCount, ...items);\n this._updateNode(tree);\n return this;\n }\n\n const entries = readonlySplice(this.entries, start, deleteCount, ...items);\n return tree.newDataNodeImpl(entries);\n }\n\n del(key: string, tree: BTreeWrite): Promise<DataNodeImpl> {\n const i = binarySearch(key, this.entries);\n if (!binarySearchFound(i, this.entries, key)) {\n // Not found. Return this without changes.\n return Promise.resolve(this);\n }\n\n // Found. Create new node or mutate existing one.\n return Promise.resolve(this.#splice(tree, i, 1));\n }\n\n async *keys(_tree: BTreeRead): AsyncGenerator<string, void> {\n for (const entry of this.entries) {\n yield entry[0];\n }\n }\n\n async *entriesIter(\n _tree: BTreeRead,\n ): AsyncGenerator<Entry<FrozenJSONValue>, void> {\n for (const entry of this.entries) {\n yield entry;\n }\n }\n}\n\nfunction readonlySplice<T>(\n array: ReadonlyArray<T>,\n start: number,\n deleteCount: number,\n ...items: T[]\n): T[] {\n const arr = array.slice(0, start);\n for (let i = 0; i < items.length; i++) {\n arr.push(items[i]);\n }\n for (let i = start + deleteCount; i < array.length; i++) {\n arr.push(array[i]);\n }\n return arr;\n}\n\nexport class InternalNodeImpl extends NodeImpl<Hash> {\n readonly level: number;\n\n constructor(\n entries: Array<Entry<Hash>>,\n hash: Hash,\n level: number,\n isMutable: boolean,\n ) {\n super(entries, hash, isMutable);\n this.level = level;\n }\n\n async set(\n key: string,\n value: FrozenJSONValue,\n entrySize: number,\n tree: BTreeWrite,\n ): Promise<InternalNodeImpl> {\n let i = binarySearch(key, this.entries);\n if (i === this.entries.length) {\n // We are going to insert into last (right most) leaf.\n i--;\n }\n\n const childHash = this.entries[i][1];\n const oldChildNode = await tree.getNode(childHash);\n\n const childNode = await oldChildNode.set(key, value, entrySize, tree);\n\n const childNodeSize = childNode.getChildNodeSize(tree);\n if (childNodeSize > tree.maxSize || childNodeSize < tree.minSize) {\n return this.#mergeAndPartition(tree, i, childNode);\n }\n\n const newEntry = createNewInternalEntryForNode(\n childNode,\n tree.getEntrySize,\n );\n return this.#replaceChild(tree, i, newEntry);\n }\n\n /**\n * This merges the child node entries with previous or next sibling and then\n * partitions the merged entries.\n */\n async #mergeAndPartition(\n tree: BTreeWrite,\n i: number,\n childNode: DataNodeImpl | InternalNodeImpl,\n ): Promise<InternalNodeImpl> {\n const level = this.level - 1;\n const thisEntries = this.entries;\n\n type IterableHashEntries = Iterable<Entry<Hash>>;\n\n let values: IterableHashEntries;\n let startIndex: number;\n let removeCount: number;\n if (i > 0) {\n const hash = thisEntries[i - 1][1];\n const previousSibling = await tree.getNode(hash);\n values = joinIterables(\n previousSibling.entries as IterableHashEntries,\n childNode.entries as IterableHashEntries,\n );\n startIndex = i - 1;\n removeCount = 2;\n } else if (i < thisEntries.length - 1) {\n const hash = thisEntries[i + 1][1];\n const nextSibling = await tree.getNode(hash);\n values = joinIterables(\n childNode.entries as IterableHashEntries,\n nextSibling.entries as IterableHashEntries,\n );\n startIndex = i;\n removeCount = 2;\n } else {\n values = childNode.entries as IterableHashEntries;\n startIndex = i;\n removeCount = 1;\n }\n\n const partitions = partition(\n values,\n value => value[2],\n tree.minSize - tree.chunkHeaderSize,\n tree.maxSize - tree.chunkHeaderSize,\n );\n\n // TODO: There are cases where we can reuse the old nodes. Creating new ones\n // means more memory churn but also more writes to the underlying KV store.\n const newEntries: Entry<Hash>[] = [];\n for (const entries of partitions) {\n const node = tree.newNodeImpl(entries, level);\n const newHashEntry = createNewInternalEntryForNode(\n node,\n tree.getEntrySize,\n );\n newEntries.push(newHashEntry);\n }\n\n if (this.isMutable) {\n this.entries.splice(startIndex, removeCount, ...newEntries);\n this._updateNode(tree);\n return this;\n }\n\n const entries = readonlySplice(\n thisEntries,\n startIndex,\n removeCount,\n ...newEntries,\n );\n\n return tree.newInternalNodeImpl(entries, this.level);\n }\n\n #replaceChild(\n tree: BTreeWrite,\n index: number,\n newEntry: Entry<Hash>,\n ): InternalNodeImpl {\n if (this.isMutable) {\n this.entries.splice(index, 1, newEntry);\n this._updateNode(tree);\n return this;\n }\n const entries = readonlySplice(this.entries, index, 1, newEntry);\n return tree.newInternalNodeImpl(entries, this.level);\n }\n\n async del(\n key: string,\n tree: BTreeWrite,\n ): Promise<InternalNodeImpl | DataNodeImpl> {\n const i = binarySearch(key, this.entries);\n if (i === this.entries.length) {\n // Key is larger than maxKey of rightmost entry so it is not present.\n return this;\n }\n\n const childHash = this.entries[i][1];\n const oldChildNode = await tree.getNode(childHash);\n const oldHash = oldChildNode.hash;\n\n const childNode = await oldChildNode.del(key, tree);\n if (childNode.hash === oldHash) {\n // Not changed so not found.\n return this;\n }\n\n if (childNode.entries.length === 0) {\n // Subtree is now empty. Remove internal node.\n const entries = readonlySplice(this.entries, i, 1);\n return tree.newInternalNodeImpl(entries, this.level);\n }\n\n if (i === 0 && this.entries.length === 1) {\n // There was only one node at this level and it was removed. We can return\n // the modified subtree.\n return childNode;\n }\n\n // The child node is still a good size.\n if (childNode.getChildNodeSize(tree) > tree.minSize) {\n // No merging needed.\n const entry = createNewInternalEntryForNode(childNode, tree.getEntrySize);\n return this.#replaceChild(tree, i, entry);\n }\n\n // Child node size is too small.\n return this.#mergeAndPartition(tree, i, childNode);\n }\n\n async *keys(tree: BTreeRead): AsyncGenerator<string, void> {\n for (const entry of this.entries) {\n const childNode = await tree.getNode(entry[1]);\n yield* childNode.keys(tree);\n }\n }\n\n async *entriesIter(\n tree: BTreeRead,\n ): AsyncGenerator<Entry<FrozenJSONValue>, void> {\n for (const entry of this.entries) {\n const childNode = await tree.getNode(entry[1]);\n yield* childNode.entriesIter(tree);\n }\n }\n\n getChildren(\n start: number,\n length: number,\n tree: BTreeRead,\n ): Promise<Array<InternalNodeImpl | DataNodeImpl>> {\n const ps: Promise<DataNodeImpl | InternalNodeImpl>[] = [];\n for (let i = start; i < length && i < this.entries.length; i++) {\n ps.push(tree.getNode(this.entries[i][1]));\n }\n return Promise.all(ps);\n }\n\n async getCompositeChildren(\n start: number,\n length: number,\n tree: BTreeRead,\n ): Promise<InternalNodeImpl | DataNodeImpl> {\n const {level} = this;\n\n if (length === 0) {\n return new InternalNodeImpl([], newRandomHash(), level - 1, true);\n }\n\n const output = await this.getChildren(start, start + length, tree);\n\n if (level > 1) {\n const entries: Entry<Hash>[] = [];\n for (const child of output as InternalNodeImpl[]) {\n entries.push(...child.entries);\n }\n return new InternalNodeImpl(entries, newRandomHash(), level - 1, true);\n }\n\n assert(level === 1);\n const entries: Entry<FrozenJSONValue>[] = [];\n for (const child of output as DataNodeImpl[]) {\n entries.push(...child.entries);\n }\n return new DataNodeImpl(entries, newRandomHash(), true);\n }\n}\n\nexport function newNodeImpl(\n entries: Array<Entry<FrozenJSONValue>>,\n hash: Hash,\n level: number,\n isMutable: boolean,\n): DataNodeImpl;\nexport function newNodeImpl(\n entries: Array<Entry<Hash>>,\n hash: Hash,\n level: number,\n isMutable: boolean,\n): InternalNodeImpl;\nexport function newNodeImpl(\n entries: Array<Entry<FrozenJSONValue>> | Array<Entry<Hash>>,\n hash: Hash,\n level: number,\n isMutable: boolean,\n): DataNodeImpl | InternalNodeImpl;\nexport function newNodeImpl(\n entries: Array<Entry<FrozenJSONValue>> | Array<Entry<Hash>>,\n hash: Hash,\n level: number,\n isMutable: boolean,\n): DataNodeImpl | InternalNodeImpl {\n if (level === 0) {\n return new DataNodeImpl(\n entries as Entry<FrozenJSONValue>[],\n hash,\n isMutable,\n );\n }\n return new InternalNodeImpl(entries as Entry<Hash>[], hash, level, isMutable);\n}\n\nexport function isDataNodeImpl(\n node: DataNodeImpl | InternalNodeImpl,\n): node is DataNodeImpl {\n return node.level === 0;\n}\n\nexport function partition<T>(\n values: Iterable<T>,\n // This is the size of each Entry\n getSizeOfEntry: (v: T) => number,\n min: number,\n max: number,\n): T[][] {\n const partitions: T[][] = [];\n const sizes: number[] = [];\n let sum = 0;\n let accum: T[] = [];\n for (const value of values) {\n const size = getSizeOfEntry(value);\n if (size >= max) {\n if (accum.length > 0) {\n partitions.push(accum);\n sizes.push(sum);\n }\n partitions.push([value]);\n sizes.push(size);\n sum = 0;\n accum = [];\n } else if (sum + size >= min) {\n accum.push(value);\n partitions.push(accum);\n sizes.push(sum + size);\n sum = 0;\n accum = [];\n } else {\n sum += size;\n accum.push(value);\n }\n }\n\n if (sum > 0) {\n if (sizes.length > 0 && sum + sizes[sizes.length - 1] <= max) {\n partitions[partitions.length - 1].push(...accum);\n } else {\n partitions.push(accum);\n }\n }\n\n return partitions;\n}\n\nexport const emptyDataNode = makeNodeChunkData<ReadonlyJSONValue>(\n 0,\n [],\n FormatVersion.Latest,\n);\nexport const emptyDataNodeImpl = new DataNodeImpl([], emptyHash, false);\n\nexport function createNewInternalEntryForNode(\n node: NodeImpl<unknown>,\n getSizeOfEntry: <K, V>(k: K, v: V) => number,\n): [string, Hash, number] {\n const key = node.maxKey();\n const value = node.hash;\n const size = getSizeOfEntry(key, value);\n return [key, value, size];\n}\n", "/**\n * This is a binary search that returns the index of the first element in the\n * array that is greater than or equal to the given value.\n *\n * Typical usage:\n *\n * ```\n * const haystack = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9];\n * const needle = 3;\n * const index = binarySearch(haystack.length, i => needle - haystack[i]);\n * const found = index < haystack.length && haystack[index] === needle;\n * ```\n */\nexport function binarySearch(high: number, compare: (i: number) => number) {\n let low = 0;\n while (low < high) {\n const mid = low + ((high - low) >> 1);\n const i = compare(mid);\n if (i === 0) {\n return mid;\n }\n if (i > 0) {\n low = mid + 1;\n } else {\n high = mid;\n }\n }\n return low;\n}\n", "import {assert} from './asserts.ts';\n\nexport function* joinIterables<T>(...iters: Iterable<T>[]) {\n for (const iter of iters) {\n yield* iter;\n }\n}\n\nfunction* filterIter<T>(\n iter: Iterable<T>,\n p: (t: T, index: number) => boolean,\n): Iterable<T> {\n let index = 0;\n for (const t of iter) {\n if (p(t, index++)) {\n yield t;\n }\n }\n}\n\nfunction* mapIter<T, U>(\n iter: Iterable<T>,\n f: (t: T, index: number) => U,\n): Iterable<U> {\n let index = 0;\n for (const t of iter) {\n yield f(t, index++);\n }\n}\n\nexport function first<T>(stream: Iterable<T>): T | undefined {\n const it = stream[Symbol.iterator]();\n const {value} = it.next();\n it.return?.();\n return value;\n}\n\nexport function* once<T>(stream: Iterable<T>): Iterable<T> {\n const it = stream[Symbol.iterator]();\n const {value} = it.next();\n if (value !== undefined) {\n yield value;\n }\n it.return?.();\n}\n\n// TODO(arv): Use ES2024 Iterable.from when available\n// https://github.com/tc39/proposal-iterator-helpers\n\nclass IterWrapper<T> implements Iterable<T> {\n iter: Iterable<T>;\n constructor(iter: Iterable<T>) {\n this.iter = iter;\n }\n\n [Symbol.iterator]() {\n return this.iter[Symbol.iterator]();\n }\n\n map<U>(f: (t: T, index: number) => U): IterWrapper<U> {\n return new IterWrapper(mapIter(this.iter, f));\n }\n\n filter(p: (t: T, index: number) => boolean): IterWrapper<T> {\n return new IterWrapper(filterIter(this.iter, p));\n }\n}\n\nexport function wrapIterable<T>(iter: Iterable<T>): IterWrapper<T> {\n return new IterWrapper(iter);\n}\n\nexport function* mergeIterables<T>(\n iterables: Iterable<T>[],\n comparator: (l: T, r: T) => number,\n distinct = false,\n): IterableIterator<T> {\n const iterators = iterables.map(i => i[Symbol.iterator]());\n try {\n const current = iterators.map(i => i.next());\n let lastYielded: T | undefined;\n while (current.some(c => !c.done)) {\n const min = current.reduce(\n (acc: [T, number] | undefined, c, i): [T, number] | undefined => {\n if (c.done) {\n return acc;\n }\n if (acc === undefined || comparator(c.value, acc[0]) < 0) {\n return [c.value, i];\n }\n return acc;\n },\n undefined,\n );\n\n assert(min !== undefined, 'min is undefined');\n current[min[1]] = iterators[min[1]].next();\n if (\n lastYielded !== undefined &&\n distinct &&\n comparator(lastYielded, min[0]) === 0\n ) {\n continue;\n }\n lastYielded = min[0];\n yield min[0];\n }\n } finally {\n for (const it of iterators) {\n it.return?.();\n }\n }\n}\n", "import {deepEqual, type ReadonlyJSONValue} from '../../../shared/src/json.ts';\n\nexport type Splice = [at: number, removed: number, added: number, from: number];\n\nconst SPLICE_UNASSIGNED = -1;\nexport const SPLICE_AT = 0;\nexport const SPLICE_REMOVED = 1;\nexport const SPLICE_ADDED = 2;\nexport const SPLICE_FROM = 3;\n\nconst KEY = 0;\nconst VALUE = 1;\n\ntype Entry<V> = readonly [key: string, value: V, ...rest: unknown[]];\n\nexport function* computeSplices<T>(\n previous: readonly Entry<T>[],\n current: readonly Entry<T>[],\n): Generator<Splice, void> {\n let previousIndex = 0;\n let currentIndex = 0;\n let splice: Splice | undefined;\n\n function ensureAssigned(splice: Splice, index: number): void {\n if (splice[SPLICE_FROM] === SPLICE_UNASSIGNED) {\n splice[SPLICE_FROM] = index;\n }\n }\n\n function newSplice(): Splice {\n return [previousIndex, 0, 0, SPLICE_UNASSIGNED];\n }\n\n while (previousIndex < previous.length && currentIndex < current.length) {\n if (previous[previousIndex][KEY] === current[currentIndex][KEY]) {\n if (\n deepEqual(\n // These are really Hash | InternalValue\n previous[previousIndex][VALUE] as ReadonlyJSONValue,\n current[currentIndex][VALUE] as ReadonlyJSONValue,\n )\n ) {\n if (splice) {\n ensureAssigned(splice, 0);\n yield splice;\n splice = undefined;\n }\n } else {\n if (!splice) {\n splice = newSplice();\n }\n splice[SPLICE_ADDED]++;\n splice[SPLICE_REMOVED]++;\n ensureAssigned(splice, currentIndex);\n }\n previousIndex++;\n currentIndex++;\n } else if (previous[previousIndex][KEY] < current[currentIndex][KEY]) {\n // previous was removed\n if (!splice) {\n splice = newSplice();\n }\n splice[SPLICE_REMOVED]++;\n\n previousIndex++;\n } else {\n // current was added\n if (!splice) {\n splice = newSplice();\n }\n splice[SPLICE_ADDED]++;\n ensureAssigned(splice, currentIndex);\n\n currentIndex++;\n }\n }\n\n if (currentIndex < current.length) {\n if (!splice) {\n splice = newSplice();\n }\n splice[SPLICE_ADDED] += current.length - currentIndex;\n ensureAssigned(splice, currentIndex);\n }\n\n if (previousIndex < previous.length) {\n if (!splice) {\n splice = newSplice();\n }\n splice[SPLICE_REMOVED] += previous.length - previousIndex;\n }\n\n if (splice) {\n ensureAssigned(splice, 0);\n yield splice;\n }\n}\n", "import type {Enum} from '../../../shared/src/enum.ts';\nimport {deepEqual} from '../../../shared/src/json.ts';\nimport type {Read} from '../dag/store.ts';\nimport * as FormatVersion from '../format-version-enum.ts';\nimport type {FrozenJSONValue} from '../frozen-json.ts';\nimport {type Hash, emptyHash} from '../hash.ts';\nimport {getSizeOfEntry} from '../size-of-value.ts';\nimport {\n DataNodeImpl,\n type Entry,\n type InternalDiff,\n type InternalDiffOperation,\n InternalNodeImpl,\n NODE_ENTRIES,\n NODE_LEVEL,\n binarySearch,\n binarySearchFound,\n emptyDataNodeImpl,\n findLeaf,\n isDataNodeImpl,\n newNodeImpl,\n parseBTreeNode,\n} from './node.ts';\nimport {\n SPLICE_ADDED,\n SPLICE_AT,\n SPLICE_FROM,\n SPLICE_REMOVED,\n computeSplices,\n} from './splice.ts';\n\ntype FormatVersion = Enum<typeof FormatVersion>;\n\n/**\n * The size of the header of a node. (If we had compile time\n * constants we would have used that).\n *\n * There is a test ensuring this is correct.\n */\nexport const NODE_HEADER_SIZE = 11;\n\nexport class BTreeRead implements AsyncIterable<Entry<FrozenJSONValue>> {\n protected readonly _cache: Map<Hash, DataNodeImpl | InternalNodeImpl> =\n new Map();\n\n protected readonly _dagRead: Read;\n protected readonly _formatVersion: FormatVersion;\n rootHash: Hash;\n readonly getEntrySize: <K, V>(k: K, v: V) => number;\n readonly chunkHeaderSize: number;\n\n constructor(\n dagRead: Read,\n formatVersion: FormatVersion,\n root: Hash = emptyHash,\n getEntrySize: <K, V>(k: K, v: V) => number = getSizeOfEntry,\n chunkHeaderSize = NODE_HEADER_SIZE,\n ) {\n this._dagRead = dagRead;\n this._formatVersion = formatVersion;\n this.rootHash = root;\n this.getEntrySize = getEntrySize;\n this.chunkHeaderSize = chunkHeaderSize;\n }\n\n async getNode(hash: Hash): Promise<DataNodeImpl | InternalNodeImpl> {\n if (hash === emptyHash) {\n return emptyDataNodeImpl;\n }\n\n const cached = this._cache.get(hash);\n if (cached) {\n return cached;\n }\n\n const chunk = await this._dagRead.mustGetChunk(hash);\n const data = parseBTreeNode(\n chunk.data,\n this._formatVersion,\n this.getEntrySize,\n );\n const impl = newNodeImpl(\n data[NODE_ENTRIES] as Entry<FrozenJSONValue>[],\n hash,\n data[NODE_LEVEL],\n false,\n );\n this._cache.set(hash, impl);\n return impl;\n }\n\n async get(key: string): Promise<FrozenJSONValue | undefined> {\n const leaf = await findLeaf(key, this.rootHash, this, this.rootHash);\n const index = binarySearch(key, leaf.entries);\n if (!binarySearchFound(index, leaf.entries, key)) {\n return undefined;\n }\n return leaf.entries[index][1];\n }\n\n async has(key: string): Promise<boolean> {\n const leaf = await findLeaf(key, this.rootHash, this, this.rootHash);\n const index = binarySearch(key, leaf.entries);\n return binarySearchFound(index, leaf.entries, key);\n }\n\n async isEmpty(): Promise<boolean> {\n const {rootHash} = this;\n const node = await this.getNode(this.rootHash);\n // The root hash has changed, so the tree has been modified.\n if (this.rootHash !== rootHash) {\n return this.isEmpty();\n }\n return node.entries.length === 0;\n }\n\n // We don't do any encoding of the key in the map, so we have no way of\n // determining from an entry.key alone whether it is a regular key or an\n // encoded IndexKey in an index map. Without encoding regular map keys the\n // caller has to deal with encoding and decoding the keys for the index map.\n scan(fromKey: string): AsyncIterableIterator<Entry<FrozenJSONValue>> {\n return scanForHash(\n this.rootHash,\n () => this.rootHash,\n this.rootHash,\n fromKey,\n async hash => {\n const cached = await this.getNode(hash);\n if (cached) {\n return [\n cached.level,\n cached.isMutable ? cached.entries.slice() : cached.entries,\n ];\n }\n const chunk = await this._dagRead.mustGetChunk(hash);\n return parseBTreeNode(\n chunk.data,\n this._formatVersion,\n this.getEntrySize,\n );\n },\n );\n }\n\n async *keys(): AsyncIterableIterator<string> {\n const node = await this.getNode(this.rootHash);\n yield* node.keys(this);\n }\n\n async *entries(): AsyncIterableIterator<Entry<FrozenJSONValue>> {\n const node = await this.getNode(this.rootHash);\n yield* node.entriesIter(this);\n }\n\n [Symbol.asyncIterator](): AsyncIterableIterator<Entry<FrozenJSONValue>> {\n return this.entries();\n }\n\n async *diff(last: BTreeRead): AsyncIterableIterator<InternalDiffOperation> {\n const [currentNode, lastNode] = await Promise.all([\n this.getNode(this.rootHash),\n last.getNode(last.rootHash),\n ]);\n yield* diffNodes(lastNode, currentNode, last, this);\n }\n}\n\nasync function* diffNodes(\n last: InternalNodeImpl | DataNodeImpl,\n current: InternalNodeImpl | DataNodeImpl,\n lastTree: BTreeRead,\n currentTree: BTreeRead,\n): AsyncIterableIterator<InternalDiffOperation> {\n if (last.level > current.level) {\n // merge all of last's children into a new node\n // We know last is an internal node because level > 0.\n const lastChild = (await (last as InternalNodeImpl).getCompositeChildren(\n 0,\n last.entries.length,\n lastTree,\n )) as InternalNodeImpl;\n yield* diffNodes(lastChild, current, lastTree, currentTree);\n return;\n }\n\n if (current.level > last.level) {\n // We know current is an internal node because level > 0.\n const currentChild = (await (\n current as InternalNodeImpl\n ).getCompositeChildren(\n 0,\n current.entries.length,\n currentTree,\n )) as InternalNodeImpl;\n yield* diffNodes(last, currentChild, lastTree, currentTree);\n return;\n }\n\n if (isDataNodeImpl(last) && isDataNodeImpl(current)) {\n yield* diffEntries(\n (last as DataNodeImpl).entries,\n (current as DataNodeImpl).entries,\n );\n return;\n }\n\n // Now we have two internal nodes with the same level. We compute the diff as\n // splices for the internal node entries. We then flatten these and call diff\n // recursively.\n const initialSplices = computeSplices(\n (last as InternalNodeImpl).entries,\n (current as InternalNodeImpl).entries,\n );\n for (const splice of initialSplices) {\n const [lastChild, currentChild] = await Promise.all([\n (last as InternalNodeImpl).getCompositeChildren(\n splice[SPLICE_AT],\n splice[SPLICE_REMOVED],\n lastTree,\n ),\n (current as InternalNodeImpl).getCompositeChildren(\n splice[SPLICE_FROM],\n splice[SPLICE_ADDED],\n currentTree,\n ),\n ]);\n yield* diffNodes(lastChild, currentChild, lastTree, currentTree);\n }\n}\n\nfunction* diffEntries(\n lastEntries: readonly Entry<FrozenJSONValue>[],\n currentEntries: readonly Entry<FrozenJSONValue>[],\n): IterableIterator<InternalDiffOperation> {\n const lastLength = lastEntries.length;\n const currentLength = currentEntries.length;\n let i = 0;\n let j = 0;\n while (i < lastLength && j < currentLength) {\n const lastKey = lastEntries[i][0];\n const currentKey = currentEntries[j][0];\n if (lastKey === currentKey) {\n if (!deepEqual(lastEntries[i][1], currentEntries[j][1])) {\n yield {\n op: 'change',\n key: lastKey,\n oldValue: lastEntries[i][1],\n newValue: currentEntries[j][1],\n };\n }\n i++;\n j++;\n } else if (lastKey < currentKey) {\n yield {\n op: 'del',\n key: lastKey,\n oldValue: lastEntries[i][1],\n };\n i++;\n } else {\n yield {\n op: 'add',\n key: currentKey,\n newValue: currentEntries[j][1],\n };\n j++;\n }\n }\n for (; i < lastLength; i++) {\n yield {\n op: 'del',\n key: lastEntries[i][0],\n oldValue: lastEntries[i][1],\n };\n }\n for (; j < currentLength; j++) {\n yield {\n op: 'add',\n key: currentEntries[j][0],\n newValue: currentEntries[j][1],\n };\n }\n}\n\n// Redefine the type here to allow the optional size in the tuple.\ntype ReadNodeResult = readonly [\n level: number,\n data: readonly Entry<FrozenJSONValue>[] | readonly Entry<Hash>[],\n];\n\ntype ReadNode = (hash: Hash) => Promise<ReadNodeResult>;\n\nasync function* scanForHash(\n expectedRootHash: Hash,\n getRootHash: () => Hash,\n hash: Hash,\n fromKey: string,\n readNode: ReadNode,\n): AsyncIterableIterator<Entry<FrozenJSONValue>> {\n if (hash === emptyHash) {\n return;\n }\n\n const data = await readNode(hash);\n const entries = data[NODE_ENTRIES];\n let i = 0;\n if (fromKey) {\n i = binarySearch(fromKey, entries);\n }\n if (data[NODE_LEVEL] > 0) {\n for (; i < entries.length; i++) {\n yield* scanForHash(\n expectedRootHash,\n getRootHash,\n (entries[i] as Entry<Hash>)[1],\n fromKey,\n readNode,\n );\n fromKey = '';\n }\n } else {\n for (; i < entries.length; i++) {\n const rootHash = getRootHash();\n // If rootHash changed then we start a new iterator from the key.\n if (expectedRootHash !== rootHash) {\n yield* scanForHash(\n rootHash,\n getRootHash,\n rootHash,\n entries[i][0],\n readNode,\n );\n return;\n }\n yield entries[i] as Entry<FrozenJSONValue>;\n }\n }\n}\n\nexport async function allEntriesAsDiff(\n map: BTreeRead,\n op: 'add' | 'del',\n): Promise<InternalDiff> {\n const diff: InternalDiffOperation[] = [];\n const make: (entry: Entry<FrozenJSONValue>) => InternalDiffOperation =\n op === 'add'\n ? entry => ({\n op: 'add',\n key: entry[0],\n newValue: entry[1],\n })\n : entry => ({\n op: 'del',\n key: entry[0],\n oldValue: entry[1],\n });\n\n for await (const entry of map.entries()) {\n diff.push(make(entry));\n }\n return diff;\n}\n", "export function stringCompare(a: string, b: string): number {\n if (a === b) {\n return 0;\n }\n if (a < b) {\n return -1;\n }\n return 1;\n}\n", "import {\n assertJSONObject,\n type ReadonlyJSONValue,\n} from '../../shared/src/json.ts';\nimport {stringCompare} from '../../shared/src/string-compare.ts';\nimport type {FrozenJSONValue} from './frozen-json.ts';\n\n/**\n * A cookie is a value that is used to determine the order of snapshots. It\n * needs to be comparable. This can be a `string`, `number` or if you want to\n * use a more complex value, you can use an object with an `order` property. The\n * value `null` is considered to be less than any other cookie and it is used\n * for the first pull when no cookie has been set.\n *\n * The order is the natural order of numbers and strings. If one of the cookies\n * is an object then the value of the `order` property is treated as the cookie\n * when doing comparison.\n *\n * If one of the cookies is a string and the other is a number, the number is\n * fist converted to a string (using `toString()`).\n */\nexport type Cookie =\n | null\n | string\n | number\n | (ReadonlyJSONValue & {readonly order: number | string});\n\nexport type FrozenCookie =\n | null\n | string\n | number\n | (FrozenJSONValue & {readonly order: number | string});\n\n/**\n * Compare two cookies.\n * `null` is considered to be less than any other cookie.\n */\nexport function compareCookies(a: Cookie, b: Cookie): number {\n if (a === b) {\n return 0;\n }\n if (a === null) {\n return -1;\n }\n if (b === null) {\n return 1;\n }\n\n const cva = getCompareValue(a);\n const cvb = getCompareValue(b);\n\n // If either a or b is a string. Compare by string.\n if (typeof cva === 'string' || typeof cvb === 'string') {\n return stringCompare(String(cva), String(cvb));\n }\n\n return cva - cvb;\n}\n\ntype NonNull<T> = T extends null ? never : T;\n\nfunction getCompareValue(cookie: NonNull<Cookie>): string | number {\n if (typeof cookie === 'string' || typeof cookie === 'number') {\n return cookie;\n }\n return cookie.order;\n}\n\nexport function assertCookie(v: unknown): asserts v is Cookie {\n if (v === null || typeof v === 'string' || typeof v === 'number') {\n return;\n }\n\n assertJSONObject(v);\n if (typeof v.order === 'string' || typeof v.order === 'number') {\n return;\n }\n\n throw new Error('Invalid cookie');\n}\n", "import {assert, assertString} from '../../../shared/src/asserts.ts';\nimport {assertDeepFrozen} from '../frozen-json.ts';\nimport type {Hash} from '../hash.ts';\n\n// By using declare we tell the type system that there is a unique symbol.\n// However, there is no such symbol but the type system does not care.\ndeclare const refsTag: unique symbol;\n\n/**\n * Opaque type representing a Refs. The reason to use an opaque type here is to\n * make sure that Refs are always sorted and have no duplicates.\n */\nexport type Refs = [] | readonly [Hash] | (readonly Hash[] & {[refsTag]: true});\n\n/**\n * Convert to a Refs when we already know it is sorted and has no duplicates.\n */\nexport function asRefs(sortedRefs: Hash[]): Refs {\n return sortedRefs as unknown as Refs;\n}\n\n/**\n * Sorts and tags as Refs. If an Array is passed in the array is sorted in\n * place, otherwise a copy of the iterable is created. This checks for duplicates.\n */\nexport function toRefs(refs: Hash[] | Set<Hash>): Refs {\n if (Array.isArray(refs)) {\n refs.sort();\n for (let i = 1; i < refs.length; i++) {\n assert(refs[i - 1] !== refs[i], 'Refs must not have duplicates');\n }\n return asRefs(refs);\n }\n\n const refsArray = [...refs];\n refsArray.sort();\n // no need to check for duplicates as Set cannot have duplicates.\n return asRefs(refsArray);\n}\n\nexport class Chunk<V = unknown> {\n readonly hash: Hash;\n readonly data: V;\n\n /**\n * Meta is an array of refs. If there are no refs we do not write a meta\n * chunk.\n */\n readonly meta: Refs;\n\n constructor(hash: Hash, data: V, refs: Refs) {\n assert(\n !(refs as unknown[]).includes(hash),\n 'Chunk cannot reference itself',\n );\n assertDeepFrozen(data);\n this.hash = hash;\n this.data = data;\n this.meta = refs;\n }\n}\n\nexport function assertRefs(v: unknown): asserts v is Refs {\n if (!Array.isArray(v)) {\n throw new Error('Refs must be an array');\n }\n if (v.length > 0) {\n assertString(v[0]);\n for (let i = 1; i < v.length; i++) {\n assertString(v[i]);\n }\n }\n}\n\nexport function createChunk<V>(\n data: V,\n refs: Refs,\n chunkHasher: ChunkHasher,\n): Chunk<V> {\n const hash = chunkHasher();\n return new Chunk(hash, data, refs);\n}\n\nexport type CreateChunk = <V>(data: V, refs: Refs) => Chunk<V>;\n\nexport type ChunkHasher = () => Hash;\n\nexport function throwChunkHasher(): Hash {\n throw new Error('unexpected call to compute chunk hash');\n}\n", "import {assert} from '../../../shared/src/asserts.ts';\nimport type {Hash} from '../hash.ts';\nimport type {Release} from '../with-transactions.ts';\nimport type {Chunk, Refs} from './chunk.ts';\n\nexport interface Store {\n read(): Promise<Read>;\n write(): Promise<Write>;\n close(): Promise<void>;\n}\n\ninterface GetChunk {\n getChunk(hash: Hash): Promise<Chunk | undefined>;\n}\n\nexport interface MustGetChunk {\n mustGetChunk(hash: Hash): Promise<Chunk>;\n}\n\nexport interface Read extends GetChunk, MustGetChunk, Release {\n hasChunk(hash: Hash): Promise<boolean>;\n getHead(name: string): Promise<Hash | undefined>;\n get closed(): boolean;\n}\n\nexport interface Write extends Read {\n createChunk<V>(data: V, refs: Refs): Chunk<V>;\n putChunk<V>(c: Chunk<V>): Promise<void>;\n setHead(name: string, hash: Hash): Promise<void>;\n removeHead(name: string): Promise<void>;\n assertValidHash(hash: Hash): void;\n commit(): Promise<void>;\n}\n\nexport class ChunkNotFoundError extends Error {\n name = 'ChunkNotFoundError';\n readonly hash: Hash;\n constructor(hash: Hash) {\n super(`Chunk not found ${hash}`);\n this.hash = hash;\n }\n}\n\nexport async function mustGetChunk(\n store: GetChunk,\n hash: Hash,\n): Promise<Chunk> {\n const chunk = await store.getChunk(hash);\n if (chunk) {\n return chunk;\n }\n throw new ChunkNotFoundError(hash);\n}\n\nexport async function mustGetHeadHash(\n name: string,\n store: Read,\n): Promise<Hash> {\n const hash = await store.getHead(name);\n assert(hash, `Missing head ${name}`);\n return hash;\n}\n", "/* eslint-disable @typescript-eslint/naming-convention */\n\n// These three were used before...\n// IndexChangeSDD = 1;\n// LocalSDD = 2;\n// SnapshotSDD = 3;\nexport const LocalDD31 = 4;\nexport const SnapshotDD31 = 5;\n\nexport type LocalDD31 = typeof LocalDD31;\nexport type SnapshotDD31 = typeof SnapshotDD31;\n", "import {\n assert,\n assertArray,\n assertBoolean,\n assertNumber,\n assertObject,\n assertString,\n unreachable,\n} from '../../../shared/src/asserts.ts';\nimport {assertJSONValue} from '../../../shared/src/json.ts';\nimport {skipCommitDataAsserts} from '../config.ts';\nimport {type FrozenCookie, compareCookies} from '../cookies.ts';\nimport {type Chunk, type CreateChunk, type Refs, toRefs} from '../dag/chunk.ts';\nimport {type MustGetChunk, type Read, mustGetHeadHash} from '../dag/store.ts';\nimport {\n type FrozenJSONValue,\n type FrozenTag,\n assertDeepFrozen,\n deepFreeze,\n} from '../frozen-json.ts';\nimport {type Hash, assertHash} from '../hash.ts';\nimport type {IndexDefinition} from '../index-defs.ts';\nimport type {ClientID} from '../sync/ids.ts';\nimport * as MetaType from './meta-type-enum.ts';\n\nexport const DEFAULT_HEAD_NAME = 'main';\n\nexport function commitIsLocalDD31(\n commit: Commit<Meta>,\n): commit is Commit<LocalMetaDD31> {\n return isLocalMetaDD31(commit.meta);\n}\n\nexport function commitIsLocal(\n commit: Commit<Meta>,\n): commit is Commit<LocalMetaDD31> {\n return commitIsLocalDD31(commit);\n}\n\nexport function commitIsSnapshot(\n commit: Commit<Meta>,\n): commit is Commit<SnapshotMetaDD31> {\n return isSnapshotMetaDD31(commit.meta);\n}\n\nexport class Commit<M extends Meta> {\n readonly chunk: Chunk<CommitData<M>>;\n\n constructor(chunk: Chunk<CommitData<M>>) {\n this.chunk = chunk;\n }\n\n get meta(): M {\n return this.chunk.data.meta;\n }\n\n get valueHash(): Hash {\n // Already validated!\n return this.chunk.data.valueHash;\n }\n\n getMutationID(clientID: ClientID, dagRead: MustGetChunk): Promise<number> {\n return getMutationID(clientID, dagRead, this.meta);\n }\n\n async getNextMutationID(\n clientID: ClientID,\n dagRead: MustGetChunk,\n ): Promise<number> {\n return (await this.getMutationID(clientID, dagRead)) + 1;\n }\n\n get indexes(): readonly IndexRecord[] {\n // Already validated!\n return this.chunk.data.indexes;\n }\n}\n\nexport async function getMutationID(\n clientID: ClientID,\n dagRead: MustGetChunk,\n meta: Meta,\n): Promise<number> {\n switch (meta.type) {\n case MetaType.SnapshotDD31:\n return meta.lastMutationIDs[clientID] ?? 0;\n\n case MetaType.LocalDD31: {\n if (meta.clientID === clientID) {\n return meta.mutationID;\n }\n const {basisHash} = meta;\n const basisCommit = await commitFromHash(basisHash, dagRead);\n return getMutationID(clientID, dagRead, basisCommit.meta);\n }\n\n default:\n unreachable(meta);\n }\n}\n\n/**\n * Returns the set of local commits from the given `fromCommitHash` back to but not\n * including its base snapshot. If `fromCommitHash` is a snapshot, the returned vector\n * will be empty. When, as typical, `fromCommitHash` is the head of the default chain\n * then the returned commits are the set of pending commits, ie the set of local commits\n * that have not yet been pushed to the data layer.\n *\n * The vector of commits is returned in reverse chain order, that is, starting\n * with the commit with hash `fromCommitHash` and walking backwards.\n */\nexport async function localMutations(\n fromCommitHash: Hash,\n dagRead: Read,\n): Promise<Commit<LocalMetaDD31>[]> {\n const commits = await commitChain(fromCommitHash, dagRead);\n // Filter does not deal with type narrowing.\n return commits.filter(c => commitIsLocal(c)) as Commit<LocalMetaDD31>[];\n}\n\nexport async function localMutationsDD31(\n fromCommitHash: Hash,\n dagRead: Read,\n): Promise<Commit<LocalMetaDD31>[]> {\n const commits = await commitChain(fromCommitHash, dagRead);\n // Filter does not deal with type narrowing.\n return commits.filter(c => commitIsLocalDD31(c)) as Commit<LocalMetaDD31>[];\n}\n\nexport async function localMutationsGreaterThan(\n commit: Commit<Meta>,\n mutationIDLimits: Record<ClientID, number>,\n dagRead: Read,\n): Promise<Commit<LocalMetaDD31>[]> {\n const commits: Commit<LocalMetaDD31>[] = [];\n const remainingMutationIDLimits = new Map(Object.entries(mutationIDLimits));\n while (!commitIsSnapshot(commit) && remainingMutationIDLimits.size > 0) {\n if (commitIsLocalDD31(commit)) {\n const {meta} = commit;\n const mutationIDLowerLimit = remainingMutationIDLimits.get(meta.clientID);\n if (mutationIDLowerLimit !== undefined) {\n if (meta.mutationID <= mutationIDLowerLimit) {\n remainingMutationIDLimits.delete(meta.clientID);\n } else {\n commits.push(commit as Commit<LocalMetaDD31>);\n }\n }\n }\n const {basisHash} = commit.meta;\n if (basisHash === null) {\n throw new Error(`Commit ${commit.chunk.hash} has no basis`);\n }\n commit = await commitFromHash(basisHash, dagRead);\n }\n return commits;\n}\n\nexport async function baseSnapshotFromHead(\n name: string,\n dagRead: Read,\n): Promise<Commit<SnapshotMetaDD31>> {\n const hash = await dagRead.getHead(name);\n assert(hash, `Missing head ${name}`);\n return baseSnapshotFromHash(hash, dagRead);\n}\n\nexport async function baseSnapshotHashFromHash(\n hash: Hash,\n dagRead: Read,\n): Promise<Hash> {\n return (await baseSnapshotFromHash(hash, dagRead)).chunk.hash;\n}\n\nexport async function baseSnapshotFromHash(\n hash: Hash,\n dagRead: Read,\n): Promise<Commit<SnapshotMetaDD31>> {\n const commit = await commitFromHash(hash, dagRead);\n return baseSnapshotFromCommit(commit, dagRead);\n}\n\nexport async function baseSnapshotFromCommit(\n commit: Commit<Meta>,\n dagRead: Read,\n): Promise<Commit<SnapshotMetaDD31>> {\n while (!commitIsSnapshot(commit)) {\n const {meta} = commit;\n if (isLocalMetaDD31(meta)) {\n commit = await commitFromHash(meta.baseSnapshotHash, dagRead);\n } else {\n const {basisHash} = meta;\n if (basisHash === null) {\n throw new Error(`Commit ${commit.chunk.hash} has no basis`);\n }\n commit = await commitFromHash(basisHash, dagRead);\n }\n }\n return commit;\n}\n\nexport function snapshotMetaParts(\n c: Commit<SnapshotMetaDD31>,\n clientID: ClientID,\n): [lastMutationID: number, cookie: FrozenCookie | FrozenJSONValue] {\n const m = c.meta;\n const lmid = m.lastMutationIDs[clientID] ?? 0;\n return [lmid, m.cookieJSON];\n}\n\nexport function compareCookiesForSnapshots(\n a: Commit<SnapshotMetaDD31>,\n b: Commit<SnapshotMetaDD31>,\n): number {\n return compareCookies(a.meta.cookieJSON, b.meta.cookieJSON);\n}\n\n/**\n * Returns all commits from the commit with fromCommitHash to its base snapshot,\n * inclusive of both. Resulting vector is in chain-head-first order (so snapshot\n * comes last).\n */\nexport async function commitChain(\n fromCommitHash: Hash,\n dagRead: Read,\n): Promise<Commit<Meta>[]> {\n let commit = await commitFromHash(fromCommitHash, dagRead);\n const commits = [];\n while (!commitIsSnapshot(commit)) {\n const {meta} = commit;\n const {basisHash} = meta;\n if (basisHash === null) {\n throw new Error(`Commit ${commit.chunk.hash} has no basis`);\n }\n commits.push(commit);\n commit = await commitFromHash(basisHash, dagRead);\n }\n commits.push(commit);\n return commits;\n}\n\nexport async function commitFromHash(\n hash: Hash,\n dagRead: MustGetChunk,\n): Promise<Commit<Meta>> {\n const chunk = await dagRead.mustGetChunk(hash);\n return fromChunk(chunk);\n}\n\nexport async function commitFromHead(\n name: string,\n dagRead: Read,\n): Promise<Commit<Meta>> {\n const hash = await mustGetHeadHash(name, dagRead);\n return commitFromHash(hash, dagRead);\n}\n\nexport type LocalMetaDD31 = {\n readonly type: MetaType.LocalDD31;\n readonly basisHash: Hash;\n readonly mutationID: number;\n readonly mutatorName: string;\n readonly mutatorArgsJSON: FrozenJSONValue;\n readonly originalHash: Hash | null;\n readonly timestamp: number;\n readonly clientID: ClientID;\n readonly baseSnapshotHash: Hash;\n};\n\nexport type LocalMeta = LocalMetaDD31;\n\nexport function assertLocalMetaDD31(\n v: Record<string, unknown>,\n): asserts v is LocalMetaDD31 {\n // type already asserted\n assertString(v.clientID);\n assertNumber(v.mutationID);\n assertString(v.mutatorName);\n if (!v.mutatorName) {\n throw new Error('Missing mutator name');\n }\n assertJSONValue(v.mutatorArgsJSON);\n if (v.originalHash !== null) {\n assertHash(v.originalHash);\n }\n assertNumber(v.timestamp);\n}\n\nexport function isLocalMetaDD31(meta: Meta): meta is LocalMetaDD31 {\n return meta.type === MetaType.LocalDD31;\n}\n\nexport function assertLocalCommitDD31(\n c: Commit<Meta>,\n): asserts c is Commit<LocalMetaDD31> {\n assertLocalMetaDD31(c.meta);\n}\n\nexport type SnapshotMetaDD31 = {\n readonly type: MetaType.SnapshotDD31;\n readonly basisHash: Hash | null;\n readonly lastMutationIDs: Record<ClientID, number>;\n readonly cookieJSON: FrozenCookie;\n};\n\nexport type SnapshotMeta = SnapshotMetaDD31;\n\nexport function assertSnapshotMetaDD31(\n v: Record<string, unknown>,\n): asserts v is SnapshotMetaDD31 {\n // type already asserted\n if (v.basisHash !== null) {\n assertHash(v.basisHash);\n }\n assertJSONValue(v.cookieJSON);\n assertLastMutationIDs(v.lastMutationIDs);\n}\n\nfunction assertLastMutationIDs(\n v: unknown,\n): asserts v is Record<ClientID, number> {\n assertObject(v);\n for (const e of Object.values(v)) {\n assertNumber(e);\n }\n}\n\nexport type Meta = LocalMetaDD31 | SnapshotMetaDD31;\n\nexport function assertSnapshotCommitDD31(\n c: Commit<Meta>,\n): asserts c is Commit<SnapshotMetaDD31> {\n assertSnapshotMetaDD31(c.meta);\n}\n\nfunction isSnapshotMetaDD31(meta: Meta): meta is SnapshotMetaDD31 {\n return meta.type === MetaType.SnapshotDD31;\n}\n\nfunction assertMeta(v: unknown): asserts v is Meta {\n assertObject(v);\n assertDeepFrozen(v);\n if (v.basisHash !== null) {\n assertString(v.basisHash);\n }\n\n assertNumber(v.type);\n switch (v.type) {\n case MetaType.LocalDD31:\n assertLocalMetaDD31(v);\n break;\n case MetaType.SnapshotDD31:\n assertSnapshotMetaDD31(v);\n break;\n default:\n throw new Error(`Invalid enum value ${v.type}`);\n }\n}\n\n/**\n * This is the type used for index definitions as defined in the Commit chunk data.\n *\n * Changing this requires a REPLICACHE_FORMAT_VERSION bump.\n */\nexport type ChunkIndexDefinition = {\n readonly name: string;\n readonly keyPrefix: string;\n readonly jsonPointer: string;\n // Used to not exist\n readonly allowEmpty?: boolean;\n};\n\nexport function chunkIndexDefinitionEqualIgnoreName(\n a: ChunkIndexDefinition,\n b: ChunkIndexDefinition,\n): boolean {\n return (\n a.jsonPointer === b.jsonPointer &&\n (a.allowEmpty ?? false) === (b.allowEmpty ?? false) &&\n a.keyPrefix === b.keyPrefix\n );\n}\n\nfunction assertChunkIndexDefinition(\n v: unknown,\n): asserts v is ChunkIndexDefinition {\n assertObject(v);\n assertDeepFrozen(v);\n assertString(v.name);\n assertString(v.keyPrefix);\n assertString(v.jsonPointer);\n if (v.allowEmpty !== undefined) {\n assertBoolean(v.allowEmpty);\n }\n}\n\nexport function toChunkIndexDefinition(\n name: string,\n indexDefinition: IndexDefinition,\n): Required<ChunkIndexDefinition> {\n return {\n name,\n keyPrefix: indexDefinition.prefix ?? '',\n jsonPointer: indexDefinition.jsonPointer,\n allowEmpty: indexDefinition.allowEmpty ?? false,\n };\n}\n\nexport type IndexRecord = {\n readonly definition: ChunkIndexDefinition;\n readonly valueHash: Hash;\n};\n\nfunction assertIndexRecord(v: unknown): asserts v is IndexRecord {\n assertObject(v);\n assertDeepFrozen(v);\n assertChunkIndexDefinition(v.definition);\n assertString(v.valueHash);\n}\n\nfunction assertIndexRecords(v: unknown): asserts v is IndexRecord[] {\n assertArray(v);\n assertDeepFrozen(v);\n for (const ir of v) {\n assertIndexRecord(ir);\n }\n}\n\nexport function newLocalDD31(\n createChunk: CreateChunk,\n basisHash: Hash,\n baseSnapshotHash: Hash,\n mutationID: number,\n mutatorName: string,\n mutatorArgsJSON: FrozenJSONValue,\n originalHash: Hash | null,\n valueHash: Hash,\n indexes: readonly IndexRecord[],\n timestamp: number,\n clientID: ClientID,\n): Commit<LocalMetaDD31> {\n const meta: LocalMetaDD31 = {\n type: MetaType.LocalDD31,\n basisHash,\n baseSnapshotHash,\n mutationID,\n mutatorName,\n mutatorArgsJSON,\n originalHash,\n timestamp,\n clientID,\n };\n return commitFromCommitData(\n createChunk,\n makeCommitData(meta, valueHash, indexes),\n );\n}\n\nexport function newSnapshotDD31(\n createChunk: CreateChunk,\n basisHash: Hash | null,\n lastMutationIDs: Record<ClientID, number>,\n cookieJSON: FrozenCookie,\n valueHash: Hash,\n indexes: readonly IndexRecord[],\n): Commit<SnapshotMetaDD31> {\n return commitFromCommitData(\n createChunk,\n newSnapshotCommitDataDD31(\n basisHash,\n lastMutationIDs,\n cookieJSON,\n valueHash,\n indexes,\n ),\n );\n}\n\nexport function newSnapshotCommitDataDD31(\n basisHash: Hash | null,\n lastMutationIDs: Record<ClientID, number>,\n cookieJSON: FrozenCookie,\n valueHash: Hash,\n indexes: readonly IndexRecord[],\n): CommitData<SnapshotMetaDD31> {\n const meta: SnapshotMetaDD31 = {\n type: MetaType.SnapshotDD31,\n basisHash,\n lastMutationIDs,\n cookieJSON,\n };\n return makeCommitData(meta, valueHash, indexes);\n}\n\nexport function fromChunk(chunk: Chunk): Commit<Meta> {\n validateChunk(chunk);\n return new Commit(chunk);\n}\n\nfunction commitFromCommitData<M extends Meta>(\n createChunk: CreateChunk,\n data: CommitData<M>,\n): Commit<M> {\n return new Commit(createChunk(data, getRefs(data)));\n}\n\nexport function getRefs(data: CommitData<Meta>): Refs {\n const refs: Set<Hash> = new Set();\n refs.add(data.valueHash);\n const {meta} = data;\n switch (meta.type) {\n case MetaType.LocalDD31:\n meta.basisHash && refs.add(meta.basisHash);\n // Local has weak originalHash\n break;\n case MetaType.SnapshotDD31:\n // Snapshot has weak basisHash\n break;\n default:\n unreachable(meta);\n }\n\n for (const index of data.indexes) {\n refs.add(index.valueHash);\n }\n\n return toRefs(refs);\n}\n\nexport type CommitData<M extends Meta> = FrozenTag<{\n readonly meta: M;\n readonly valueHash: Hash;\n readonly indexes: readonly IndexRecord[];\n}>;\n\nexport function makeCommitData<M extends Meta>(\n meta: M,\n valueHash: Hash,\n indexes: readonly IndexRecord[],\n): CommitData<M> {\n return deepFreeze({\n meta,\n valueHash,\n indexes,\n }) as unknown as CommitData<M>;\n}\n\nexport function assertCommitData(v: unknown): asserts v is CommitData<Meta> {\n if (skipCommitDataAsserts) {\n return;\n }\n\n assertObject(v);\n assertDeepFrozen(v);\n assertMeta(v.meta);\n assertString(v.valueHash);\n assertIndexRecords(v.indexes);\n}\n\nfunction validateChunk(chunk: Chunk): asserts chunk is Chunk<CommitData<Meta>> {\n const {data} = chunk;\n assertCommitData(data);\n\n const seen = new Set();\n for (const index of data.indexes) {\n const {name} = index.definition;\n if (seen.has(name)) {\n throw new Error(`Duplicate index ${name}`);\n }\n seen.add(name);\n }\n}\n", "/* eslint-disable @typescript-eslint/naming-convention */\n\nexport const Add = 0;\nexport const Remove = 1;\n\nexport type Add = typeof Add;\nexport type Remove = typeof Remove;\n", "import type {LogContext} from '@rocicorp/logger';\nimport type {Enum} from '../../../shared/src/enum.ts';\nimport type {BTreeRead} from '../btree/read.ts';\nimport type {BTreeWrite} from '../btree/write.ts';\nimport type {FrozenJSONObject, FrozenJSONValue} from '../frozen-json.ts';\nimport type {Hash} from '../hash.ts';\nimport type {IndexRecord} from './commit.ts';\nimport * as IndexOperation from './index-operation-enum.ts';\n\ntype IndexOperation = Enum<typeof IndexOperation>;\n\nexport class IndexRead<BTree = BTreeRead> {\n readonly meta: IndexRecord;\n readonly map: BTree;\n\n constructor(meta: IndexRecord, map: BTree) {\n this.meta = meta;\n this.map = map;\n }\n}\n\nexport class IndexWrite extends IndexRead<BTreeWrite> {\n // Note: does not update self.meta.valueHash (doesn't need to at this point as flush\n // is only called during commit.)\n flush(): Promise<Hash> {\n return this.map.flush();\n }\n\n clear(): Promise<void> {\n return this.map.clear();\n }\n}\n\n// Index or de-index a single primary entry.\nexport async function indexValue(\n lc: LogContext,\n index: BTreeWrite,\n op: IndexOperation,\n key: string,\n val: FrozenJSONValue,\n jsonPointer: string,\n allowEmpty: boolean,\n): Promise<void> {\n try {\n for (const entry of getIndexKeys(key, val, jsonPointer, allowEmpty)) {\n switch (op) {\n case IndexOperation.Add:\n await index.put(entry, val);\n break;\n case IndexOperation.Remove:\n await index.del(entry);\n break;\n }\n }\n } catch (e) {\n // Right now all the errors that index_value() returns are customers dev\n // problems: either the value is not json, the pointer is into nowhere, etc.\n // So we ignore them.\n lc.info?.('Not indexing value', val, ':', e);\n }\n}\n\n// Gets the set of index keys for a given primary key and value.\nexport function getIndexKeys(\n primary: string,\n value: FrozenJSONValue,\n jsonPointer: string,\n allowEmpty: boolean,\n): string[] {\n const target = evaluateJSONPointer(value, jsonPointer);\n if (target === undefined) {\n if (allowEmpty) {\n return [];\n }\n throw new Error(`No value at path: ${jsonPointer}`);\n }\n\n const values = Array.isArray(target) ? target : [target];\n\n const indexKeys: string[] = [];\n for (const value of values) {\n if (typeof value === 'string') {\n indexKeys.push(encodeIndexKey([value, primary]));\n } else {\n throw new Error('Unsupported target type');\n }\n }\n\n return indexKeys;\n}\n\nexport const KEY_VERSION_0 = '\\u0000';\nexport const KEY_SEPARATOR = '\\u0000';\n\n/**\n * When using indexes the key is a tuple of the secondary key and the primary\n * key.\n */\nexport type IndexKey = readonly [secondary: string, primary: string];\n\n// An index key is encoded to vec of bytes in the following order:\n// - key version byte(s), followed by\n// - the secondary key bytes (which for now is a UTF8 encoded string), followed by\n// - the key separator, a null byte, followed by\n// - the primary key bytes\n//\n// The null separator byte ensures that if a secondary key A is longer than B then\n// A always sorts after B. Appending the primary key ensures index keys with\n// identical secondary keys sort in primary key order. Secondary keys must not\n// contain a zero (null) byte.\nexport function encodeIndexKey(indexKey: IndexKey): string {\n const secondary = indexKey[0];\n const primary = indexKey[1];\n\n if (secondary.includes('\\u0000')) {\n throw new Error('Secondary key cannot contain null byte');\n }\n return KEY_VERSION_0 + secondary + KEY_SEPARATOR + primary;\n}\n\n// Returns bytes that can be used to scan for the given secondary index value.\n//\n// Consider a scan for start_secondary_key=\"a\" (97). We want to scan with scan\n// key [0, 97]. We could also scan with [0, 97, 0], but then we couldn't use\n// this function for prefix scans, so we lop off the null byte. If we want\n// the scan to be exclusive, we scan with the next greater value, [0, 97, 1]\n// (we disallow zero bytes in secondary keys).\n//\n// Now it gets a little tricky. We also want to be able to scan using the\n// primary key, start_key. When we do this we have to encode the scan key\n// a little differently We essentially have to fix the value of the\n// secondary key so we can vary the start_key. That is, the match on\n// start_secondary_key becomes an exact match.\n//\n// Consider the scan for start_secondary_key=\"a\" and start_key=[2]. We want\n// to scan with [0, 97, 0, 2]. If we want exclusive we want to scan with\n// the next highest value, [0, 97, 0, 2, 0] (zero bytes are allowed in primary\n// keys). So far so good. It is important to notice that we need to\n// be able to distinguish between not wanting use start_key and wanting to\n// use start_key=[]. In the former case we want to scan with the secondary\n// key value, possibly followed by a 1 with no trailing zero byte ([0, 97]\n// or [0, 97, 1]). In the latter case we want to scan by the secondary\n// key value, followed by the zero byte, followed by the primary key value\n// and another zero if it is exclusive ([0, 97, 0] or [0, 97, 0, 0]).\n// This explains why we need the Option around start_key.\nexport function encodeIndexScanKey(\n secondary: string,\n primary: string | undefined,\n): string {\n const k = encodeIndexKey([secondary, primary || '']);\n if (primary === undefined) {\n return k.slice(0, k.length - 1);\n }\n return k;\n}\n\n// Decodes an IndexKey encoded by encode_index_key.\nexport function decodeIndexKey(encodedIndexKey: string): IndexKey {\n if (encodedIndexKey[0] !== KEY_VERSION_0) {\n throw new Error('Invalid version');\n }\n\n const versionLen = KEY_VERSION_0.length;\n const separatorLen = KEY_SEPARATOR.length;\n const separatorOffset = encodedIndexKey.indexOf(KEY_SEPARATOR, versionLen);\n if (separatorOffset === -1) {\n throw new Error('Invalid formatting');\n }\n\n const secondary = encodedIndexKey.slice(versionLen, separatorOffset);\n const primary = encodedIndexKey.slice(separatorOffset + separatorLen);\n return [secondary, primary];\n}\n\nexport function evaluateJSONPointer(\n value: FrozenJSONValue,\n pointer: string,\n): FrozenJSONValue | undefined {\n function parseIndex(s: string): number | undefined {\n if (s.startsWith('+') || (s.startsWith('0') && s.length !== 1)) {\n return undefined;\n }\n return parseInt(s, 10);\n }\n\n if (pointer === '') {\n return value;\n }\n if (!pointer.startsWith('/')) {\n throw new Error(`Invalid JSON pointer: ${pointer}`);\n }\n\n const tokens = pointer\n .split('/')\n .slice(1)\n .map(x => x.replace(/~1/g, '/').replace(/~0/g, '~'));\n\n let target = value;\n for (const token of tokens) {\n let targetOpt;\n if (Array.isArray(target)) {\n const i = parseIndex(token);\n if (i === undefined) {\n return undefined;\n }\n targetOpt = target[i];\n } else if (target === null) {\n return undefined;\n } else if (typeof target === 'object') {\n target = target as FrozenJSONObject;\n targetOpt = target[token];\n }\n if (targetOpt === undefined) {\n return undefined;\n }\n target = targetOpt;\n }\n return target;\n}\n", "import type {Enum} from '../../../shared/src/enum.ts';\nimport {BTreeRead} from '../btree/read.ts';\nimport type {Read as DagRead} from '../dag/store.ts';\nimport * as FormatVersion from '../format-version-enum.ts';\nimport type {FrozenJSONValue} from '../frozen-json.ts';\nimport type {Hash} from '../hash.ts';\nimport {\n Commit,\n DEFAULT_HEAD_NAME,\n type Meta,\n commitFromHash,\n commitFromHead,\n} from './commit.ts';\nimport {IndexRead} from './index.ts';\n\ntype FormatVersion = Enum<typeof FormatVersion>;\n\nexport class Read {\n readonly #dagRead: DagRead;\n map: BTreeRead;\n readonly indexes: Map<string, IndexRead>;\n\n constructor(\n dagRead: DagRead,\n map: BTreeRead,\n indexes: Map<string, IndexRead>,\n ) {\n this.#dagRead = dagRead;\n this.map = map;\n this.indexes = indexes;\n }\n\n has(key: string): Promise<boolean> {\n return this.map.has(key);\n }\n\n get(key: string): Promise<FrozenJSONValue | undefined> {\n return this.map.get(key);\n }\n\n isEmpty(): Promise<boolean> {\n return this.map.isEmpty();\n }\n\n getMapForIndex(indexName: string): BTreeRead {\n const idx = this.indexes.get(indexName);\n if (idx === undefined) {\n throw new Error(`Unknown index name: ${indexName}`);\n }\n return idx.map;\n }\n\n get closed(): boolean {\n return this.#dagRead.closed;\n }\n\n close(): void {\n this.#dagRead.release();\n }\n}\n\nexport function readFromDefaultHead(\n dagRead: DagRead,\n formatVersion: FormatVersion,\n): Promise<Read> {\n return readFromHead(DEFAULT_HEAD_NAME, dagRead, formatVersion);\n}\n\nexport async function readFromHead(\n name: string,\n dagRead: DagRead,\n formatVersion: FormatVersion,\n): Promise<Read> {\n const commit = await commitFromHead(name, dagRead);\n return readFromCommit(commit, dagRead, formatVersion);\n}\n\nexport async function readFromHash(\n hash: Hash,\n dagRead: DagRead,\n formatVersion: FormatVersion,\n): Promise<Read> {\n const commit = await commitFromHash(hash, dagRead);\n return readFromCommit(commit, dagRead, formatVersion);\n}\n\nfunction readFromCommit(\n commit: Commit<Meta>,\n dagRead: DagRead,\n formatVersion: FormatVersion,\n): Read {\n const indexes = readIndexesForRead(commit, dagRead, formatVersion);\n const map = new BTreeRead(dagRead, formatVersion, commit.valueHash);\n return new Read(dagRead, map, indexes);\n}\n\nexport function readIndexesForRead(\n commit: Commit<Meta>,\n dagRead: DagRead,\n formatVersion: FormatVersion,\n): Map<string, IndexRead> {\n const m = new Map();\n for (const index of commit.indexes) {\n m.set(\n index.definition.name,\n new IndexRead(\n index,\n new BTreeRead(dagRead, formatVersion, index.valueHash),\n ),\n );\n }\n return m;\n}\n", "export interface Release {\n release(): void;\n}\n\nexport interface Commit {\n commit(): Promise<void>;\n}\n\ninterface ReadStore<Read extends Release> {\n read(): Promise<Read>;\n}\n\ninterface WriteStore<Write extends Release> {\n write(): Promise<Write>;\n}\n\nexport function withRead<Read extends Release, Return>(\n store: ReadStore<Read>,\n fn: (read: Read) => Return | Promise<Return>,\n): Promise<Return> {\n return using(store.read(), fn);\n}\n\nexport function withWriteNoImplicitCommit<Write extends Release, Return>(\n store: WriteStore<Write>,\n fn: (write: Write) => Return | Promise<Return>,\n): Promise<Return> {\n return using(store.write(), fn);\n}\n\nexport function withWrite<Write extends Release & Commit, Return>(\n store: WriteStore<Write>,\n fn: (write: Write) => Return | Promise<Return>,\n): Promise<Return> {\n return using(store.write(), async write => {\n const result = await fn(write);\n await write.commit();\n return result;\n });\n}\n\n/**\n * This function takes a promise for a resource and a function that uses that\n * resource. It will release the resource after the function returns by calling\n * the `release` function\n */\nexport async function using<TX extends Release, Return>(\n x: Promise<TX>,\n fn: (tx: TX) => Return | Promise<Return>,\n): Promise<Return> {\n const write = await x;\n try {\n return await fn(write);\n } finally {\n write.release();\n }\n}\n", "import * as valita from '../../shared/src/valita.ts';\n\n/**\n * The definition of a single index.\n */\nexport type IndexDefinition = {\n /**\n * The prefix, if any, to limit the index over. If not provided the values of\n * all keys are indexed.\n */\n readonly prefix?: string | undefined;\n\n /**\n * A [JSON Pointer](https://tools.ietf.org/html/rfc6901) pointing at the sub\n * value inside each value to index over.\n *\n * For example, one might index over users' ages like so:\n * `{prefix: '/user/', jsonPointer: '/age'}`\n */\n readonly jsonPointer: string;\n\n /**\n * If `true`, indexing empty values will not emit a warning. Defaults to `false`.\n */\n readonly allowEmpty?: boolean | undefined;\n};\n\nexport const indexDefinitionSchema: valita.Type<IndexDefinition> =\n valita.readonlyObject({\n prefix: valita.string().optional(),\n jsonPointer: valita.string(),\n allowEmpty: valita.boolean().optional(),\n });\n\n/**\n * An object as a map defining the indexes. The keys are the index names and the\n * values are the index definitions.\n */\nexport type IndexDefinitions = {readonly [name: string]: IndexDefinition};\n\nexport const indexDefinitionsSchema = valita.readonlyRecord(\n indexDefinitionSchema,\n);\n\nexport function indexDefinitionEqual(\n a: IndexDefinition,\n b: IndexDefinition,\n): boolean {\n return (\n a.jsonPointer === b.jsonPointer &&\n (a.allowEmpty ?? false) === (b.allowEmpty ?? false) &&\n (a.prefix ?? '') === (b.prefix ?? '')\n );\n}\n\nexport function indexDefinitionsEqual(\n a: IndexDefinitions,\n b: IndexDefinitions,\n): boolean {\n if (Object.keys(a).length !== Object.keys(b).length) {\n return false;\n }\n for (const [aKey, aValue] of Object.entries(a)) {\n const bValue = b[aKey];\n if (!bValue || !indexDefinitionEqual(aValue, bValue)) {\n return false;\n }\n }\n return true;\n}\n\nexport function assertIndexDefinitions(\n value: unknown,\n): asserts value is IndexDefinitions {\n valita.assert(value, indexDefinitionsSchema);\n}\n", "import {assert, assertObject} from '../../../shared/src/asserts.ts';\nimport * as valita from '../../../shared/src/valita.ts';\nimport {toRefs} from '../dag/chunk.ts';\nimport type {Read, Write} from '../dag/store.ts';\nimport {deepFreeze, type FrozenJSONValue} from '../frozen-json.ts';\nimport {type Hash, hashSchema} from '../hash.ts';\nimport {indexDefinitionsEqual, indexDefinitionsSchema} from '../index-defs.ts';\nimport type {ClientGroupID} from '../sync/ids.ts';\n\nexport type ClientGroupMap = ReadonlyMap<ClientGroupID, ClientGroup>;\n\nconst clientGroupSchema = valita.readonlyObject({\n /**\n * The hash of the commit in the perdag last persisted to this client group.\n * Should only be updated by clients assigned to this client group.\n */\n headHash: hashSchema,\n\n /**\n * Set of mutator names common to all clients assigned to this client group.\n */\n mutatorNames: valita.readonlyArray(valita.string()),\n\n /**\n * Index definitions common to all clients assigned to this client group.\n */\n indexes: indexDefinitionsSchema,\n\n /**\n * The highest mutation ID of every client assigned to this client group.\n * Should only be updated by clients assigned to this client group. Read by\n * other clients to determine if there are unacknowledged pending mutations\n * for them to try to recover. This is redundant with information in the\n * commit graph at `headHash`, but allows other clients to determine if there\n * are unacknowledged pending mutations without having to load the commit\n * graph.\n */\n mutationIDs: valita.readonlyRecord(valita.number()),\n\n /**\n * The highest lastMutationID received from the server for every client\n * assigned to this client group.\n *\n * Should be updated by the clients assigned to this client group whenever\n * they persist to this client group. Read by other clients to determine if\n * there are unacknowledged pending mutations for them to recover and\n * *updated* by other clients upon successfully recovering pending mutations\n * to avoid redundant pushes of pending mutations.\n *\n * Note: This will be the same as the `lastMutationIDs` of the base snapshot\n * of the client group's commit graph when written by clients assigned to this\n * client group. However, when written by another client recovering mutations\n * it may be different because the other client does not update the commit\n * graph.\n */\n lastServerAckdMutationIDs: valita.record(valita.number()),\n\n /**\n * If the server deletes this client group it can signal that the client group\n * was deleted. If that happens we mark this client group as disabled so that\n * we do not use it again when creating new clients.\n */\n disabled: valita.boolean(),\n});\n\nexport type ClientGroup = valita.Infer<typeof clientGroupSchema>;\n\nexport const CLIENT_GROUPS_HEAD_NAME = 'client-groups';\n\nfunction assertClientGroup(value: unknown): asserts value is ClientGroup {\n valita.assert(value, clientGroupSchema);\n}\n\nfunction chunkDataToClientGroupMap(chunkData: unknown): ClientGroupMap {\n assertObject(chunkData);\n const clientGroups = new Map<ClientGroupID, ClientGroup>();\n for (const [key, value] of Object.entries(chunkData)) {\n if (value !== undefined) {\n assertClientGroup(value);\n clientGroups.set(key, value);\n }\n }\n return clientGroups;\n}\n\nfunction clientGroupMapToChunkData(\n clientGroups: ClientGroupMap,\n dagWrite: Write,\n): FrozenJSONValue {\n const chunkData: {[id: ClientGroupID]: ClientGroup} = {};\n for (const [clientGroupID, clientGroup] of clientGroups.entries()) {\n dagWrite.assertValidHash(clientGroup.headHash);\n chunkData[clientGroupID] = {\n ...clientGroup,\n mutatorNames: [...clientGroup.mutatorNames.values()],\n };\n }\n return deepFreeze(chunkData);\n}\n\nasync function getClientGroupsAtHash(\n hash: Hash,\n dagRead: Read,\n): Promise<ClientGroupMap> {\n const chunk = await dagRead.getChunk(hash);\n return chunkDataToClientGroupMap(chunk?.data);\n}\n\nexport async function getClientGroups(dagRead: Read): Promise<ClientGroupMap> {\n const hash = await dagRead.getHead(CLIENT_GROUPS_HEAD_NAME);\n if (!hash) {\n return new Map();\n }\n return getClientGroupsAtHash(hash, dagRead);\n}\n\nexport async function setClientGroups(\n clientGroups: ClientGroupMap,\n dagWrite: Write,\n): Promise<ClientGroupMap> {\n const currClientGroups = await getClientGroups(dagWrite);\n for (const [clientGroupID, clientGroup] of clientGroups) {\n const currClientGroup = currClientGroups.get(clientGroupID);\n validateClientGroupUpdate(clientGroup, currClientGroup);\n }\n return setValidatedClientGroups(clientGroups, dagWrite);\n}\n\nexport async function setClientGroup(\n clientGroupID: ClientGroupID,\n clientGroup: ClientGroup,\n dagWrite: Write,\n): Promise<ClientGroupMap> {\n const currClientGroups = await getClientGroups(dagWrite);\n const currClientGroup = currClientGroups.get(clientGroupID);\n validateClientGroupUpdate(clientGroup, currClientGroup);\n const newClientGroups = new Map(currClientGroups);\n newClientGroups.set(clientGroupID, clientGroup);\n return setValidatedClientGroups(newClientGroups, dagWrite);\n}\n\nexport async function deleteClientGroup(\n clientGroupID: ClientGroupID,\n dagWrite: Write,\n): Promise<ClientGroupMap> {\n const currClientGroups = await getClientGroups(dagWrite);\n if (!currClientGroups.has(clientGroupID)) {\n return currClientGroups;\n }\n const newClientGroups = new Map(currClientGroups.entries());\n newClientGroups.delete(clientGroupID);\n return setValidatedClientGroups(newClientGroups, dagWrite);\n}\n\nfunction validateClientGroupUpdate(\n clientGroup: ClientGroup,\n currClientGroup: ClientGroup | undefined,\n) {\n const mutatorNamesSet = new Set(clientGroup.mutatorNames);\n assert(\n mutatorNamesSet.size === clientGroup.mutatorNames.length,\n \"A client group's mutatorNames must be a set.\",\n );\n if (currClientGroup !== undefined) {\n assert(\n indexDefinitionsEqual(currClientGroup.indexes, clientGroup.indexes),\n \"A client group's index definitions must never change.\",\n );\n assert(\n mutatorNamesEqual(mutatorNamesSet, currClientGroup.mutatorNames),\n \"A client group's mutatorNames must never change.\",\n );\n }\n}\n\nasync function setValidatedClientGroups(\n clientGroups: ClientGroupMap,\n dagWrite: Write,\n): Promise<ClientGroupMap> {\n const chunkData = clientGroupMapToChunkData(clientGroups, dagWrite);\n const refs: Set<Hash> = new Set();\n for (const clientGroup of clientGroups.values()) {\n refs.add(clientGroup.headHash);\n }\n const chunk = dagWrite.createChunk(chunkData, toRefs(refs));\n await dagWrite.putChunk(chunk);\n await dagWrite.setHead(CLIENT_GROUPS_HEAD_NAME, chunk.hash);\n return clientGroups;\n}\n\nexport function mutatorNamesEqual(\n mutatorNamesSet: ReadonlySet<string>,\n mutatorNames: readonly string[],\n): boolean {\n if (mutatorNames.length !== mutatorNamesSet.size) {\n return false;\n }\n for (const mutatorName of mutatorNames) {\n if (!mutatorNamesSet.has(mutatorName)) {\n return false;\n }\n }\n return true;\n}\n\nexport async function getClientGroup(\n id: ClientGroupID,\n dagRead: Read,\n): Promise<ClientGroup | undefined> {\n const clientGroups = await getClientGroups(dagRead);\n return clientGroups.get(id);\n}\n\nexport function clientGroupHasPendingMutations(clientGroup: ClientGroup) {\n for (const [clientID, mutationID] of Object.entries(\n clientGroup.mutationIDs,\n )) {\n const lastServerAckdMutationID =\n clientGroup.lastServerAckdMutationIDs[clientID];\n if (\n (lastServerAckdMutationID === undefined && mutationID !== 0) ||\n lastServerAckdMutationID < mutationID\n ) {\n return true;\n }\n }\n return false;\n}\n\n/**\n * Marks a client group as disabled. This can happen if the server deletes the\n * client group (servers should not delete clients or client groups but it often\n * happens in practice when developing).\n *\n * A disabled client group prevents pulls and pushes from happening.\n */\nexport async function disableClientGroup(\n clientGroupID: string,\n dagWrite: Write,\n): Promise<void> {\n const clientGroup = await getClientGroup(clientGroupID, dagWrite);\n if (!clientGroup) {\n // No client group matching in the database, so nothing to do.\n return;\n }\n const disabledClientGroup = {\n ...clientGroup,\n disabled: true,\n };\n await setClientGroup(clientGroupID, disabledClientGroup, dagWrite);\n}\n", "export async function asyncIterableToArray<T>(\n it: AsyncIterable<T>,\n): Promise<T[]> {\n const arr: T[] = [];\n for await (const v of it) {\n arr.push(v);\n }\n return arr;\n}\n", "import {asyncIterableToArray} from '../async-iterable-to-array.ts';\nimport type {InternalDiff} from './node.ts';\nimport type {BTreeRead} from './read.ts';\n\nexport function diff(\n oldMap: BTreeRead,\n newMap: BTreeRead,\n): Promise<InternalDiff> {\n // Return an array to ensure we do not compute the diff more than once.\n return asyncIterableToArray(newMap.diff(oldMap));\n}\n", "import {Lock} from '@rocicorp/lock';\nimport {assert} from '../../../shared/src/asserts.ts';\nimport type {Enum} from '../../../shared/src/enum.ts';\nimport type {ReadonlyJSONValue} from '../../../shared/src/json.ts';\nimport {type Chunk, type CreateChunk, toRefs} from '../dag/chunk.ts';\nimport type {Write} from '../dag/store.ts';\nimport * as FormatVersion from '../format-version-enum.ts';\nimport type {FrozenJSONValue} from '../frozen-json.ts';\nimport {type Hash, emptyHash, newRandomHash} from '../hash.ts';\nimport {getSizeOfEntry} from '../size-of-value.ts';\nimport {\n DataNodeImpl,\n type Entry,\n InternalNodeImpl,\n createNewInternalEntryForNode,\n emptyDataNode,\n isDataNodeImpl,\n newNodeImpl,\n partition,\n toChunkData,\n} from './node.ts';\nimport {BTreeRead} from './read.ts';\n\ntype FormatVersion = Enum<typeof FormatVersion>;\n\nexport class BTreeWrite extends BTreeRead {\n /**\n * This rw lock is used to ensure we do not mutate the btree in parallel. It\n * would be a problem if we didn't have the lock in cases like this:\n *\n * ```ts\n * const p1 = tree.put('a', 0);\n * const p2 = tree.put('b', 1);\n * await p1;\n * await p2;\n * ```\n *\n * because both `p1` and `p2` would start from the old root hash but a put\n * changes the root hash so the two concurrent puts would lead to only one of\n * them actually working, and it is not deterministic which one would finish\n * last.\n */\n readonly #lock = new Lock();\n readonly #modified: Map<Hash, DataNodeImpl | InternalNodeImpl> = new Map();\n\n declare protected _dagRead: Write;\n\n readonly minSize: number;\n readonly maxSize: number;\n\n constructor(\n dagWrite: Write,\n formatVersion: FormatVersion,\n root: Hash = emptyHash,\n minSize = 8 * 1024,\n maxSize = 16 * 1024,\n getEntrySize: <K, V>(k: K, v: V) => number = getSizeOfEntry,\n chunkHeaderSize?: number,\n ) {\n super(dagWrite, formatVersion, root, getEntrySize, chunkHeaderSize);\n\n this.minSize = minSize;\n this.maxSize = maxSize;\n }\n\n #addToModified(node: DataNodeImpl | InternalNodeImpl): void {\n assert(node.isMutable);\n this.#modified.set(node.hash, node);\n this._cache.set(node.hash, node);\n }\n\n updateNode(node: DataNodeImpl | InternalNodeImpl): void {\n assert(node.isMutable);\n this.#modified.delete(node.hash);\n node.hash = newRandomHash();\n this.#addToModified(node);\n }\n\n newInternalNodeImpl(\n entries: Array<Entry<Hash>>,\n level: number,\n ): InternalNodeImpl {\n const n = new InternalNodeImpl(entries, newRandomHash(), level, true);\n this.#addToModified(n);\n return n;\n }\n\n newDataNodeImpl(entries: Entry<FrozenJSONValue>[]): DataNodeImpl {\n const n = new DataNodeImpl(entries, newRandomHash(), true);\n this.#addToModified(n);\n return n;\n }\n\n newNodeImpl(entries: Entry<FrozenJSONValue>[], level: number): DataNodeImpl;\n newNodeImpl(entries: Entry<Hash>[], level: number): InternalNodeImpl;\n newNodeImpl(\n entries: Entry<Hash>[] | Entry<FrozenJSONValue>[],\n level: number,\n ): InternalNodeImpl | DataNodeImpl;\n newNodeImpl(\n entries: Entry<Hash>[] | Entry<FrozenJSONValue>[],\n level: number,\n ): InternalNodeImpl | DataNodeImpl {\n const n = newNodeImpl(entries, newRandomHash(), level, true);\n this.#addToModified(n);\n return n;\n }\n\n put(key: string, value: FrozenJSONValue): Promise<void> {\n return this.#lock.withLock(async () => {\n const oldRootNode = await this.getNode(this.rootHash);\n const entrySize = this.getEntrySize(key, value);\n const rootNode = await oldRootNode.set(key, value, entrySize, this);\n\n // We do the rebalancing in the parent so we need to do it here as well.\n if (rootNode.getChildNodeSize(this) > this.maxSize) {\n const headerSize = this.chunkHeaderSize;\n const partitions = partition(\n rootNode.entries,\n value => value[2],\n this.minSize - headerSize,\n this.maxSize - headerSize,\n );\n const {level} = rootNode;\n const entries: Entry<Hash>[] = partitions.map(entries => {\n const node = this.newNodeImpl(entries, level);\n return createNewInternalEntryForNode(node, this.getEntrySize);\n });\n const newRoot = this.newInternalNodeImpl(entries, level + 1);\n this.rootHash = newRoot.hash;\n return;\n }\n\n this.rootHash = rootNode.hash;\n });\n }\n\n del(key: string): Promise<boolean> {\n return this.#lock.withLock(async () => {\n const oldRootNode = await this.getNode(this.rootHash);\n const newRootNode = await oldRootNode.del(key, this);\n\n // No need to rebalance here since if root gets too small there is nothing\n // we can do about that.\n const found = this.rootHash !== newRootNode.hash;\n if (found) {\n // Flatten one layer.\n if (newRootNode.level > 0 && newRootNode.entries.length === 1) {\n this.rootHash = (newRootNode as InternalNodeImpl).entries[0][1];\n } else {\n this.rootHash = newRootNode.hash;\n }\n }\n\n return found;\n });\n }\n\n clear(): Promise<void> {\n return this.#lock.withLock(() => {\n this.#modified.clear();\n this.rootHash = emptyHash;\n });\n }\n\n flush(): Promise<Hash> {\n return this.#lock.withLock(async () => {\n const dagWrite = this._dagRead;\n\n if (this.rootHash === emptyHash) {\n // Write a chunk for the empty tree.\n const chunk = dagWrite.createChunk(emptyDataNode, []);\n await dagWrite.putChunk(chunk as Chunk<ReadonlyJSONValue>);\n return chunk.hash;\n }\n\n const newChunks: Chunk[] = [];\n const newRoot = gatherNewChunks(\n this.rootHash,\n newChunks,\n dagWrite.createChunk,\n this.#modified,\n this._formatVersion,\n );\n await Promise.all(newChunks.map(chunk => dagWrite.putChunk(chunk)));\n this.#modified.clear();\n this.rootHash = newRoot;\n return newRoot;\n });\n }\n}\n\nfunction gatherNewChunks(\n hash: Hash,\n newChunks: Chunk[],\n createChunk: CreateChunk,\n modified: Map<Hash, DataNodeImpl | InternalNodeImpl>,\n formatVersion: FormatVersion,\n): Hash {\n const node = modified.get(hash);\n if (node === undefined) {\n // Not modified, use the original.\n return hash;\n }\n\n if (isDataNodeImpl(node)) {\n const chunk = createChunk(toChunkData(node, formatVersion), []);\n newChunks.push(chunk);\n return chunk.hash;\n }\n\n // The BTree cannot have duplicate keys so the child entry hashes are unique.\n // No need fot a set to dedupe here.\n const refs: Hash[] = [];\n const {entries} = node;\n for (let i = 0; i < entries.length; i++) {\n const entry = entries[i];\n const childHash = entry[1];\n const newChildHash = gatherNewChunks(\n childHash,\n newChunks,\n createChunk,\n modified,\n formatVersion,\n );\n if (newChildHash !== childHash) {\n // MUTATES the entries!\n // Hashes do not change the size of the entry because all hashes have the same length\n entries[i] = [entry[0], newChildHash, entry[2]];\n }\n refs.push(newChildHash);\n }\n const chunk = createChunk(toChunkData(node, formatVersion), toRefs(refs));\n newChunks.push(chunk);\n return chunk.hash;\n}\n", "export function lazy<T>(factory: () => T): () => T {\n let value: T | undefined;\n return () => {\n if (value === undefined) {\n value = factory();\n }\n return value;\n };\n}\n", "import {assert} from '../../../shared/src/asserts.ts';\nimport type {Enum} from '../../../shared/src/enum.ts';\nimport {diff as btreeDiff} from '../btree/diff.ts';\nimport type {InternalDiff} from '../btree/node.ts';\nimport {allEntriesAsDiff, BTreeRead} from '../btree/read.ts';\nimport type {Read} from '../dag/store.ts';\nimport {Commit, commitFromHash, type Meta} from '../db/commit.ts';\nimport {readIndexesForRead} from '../db/read.ts';\nimport * as FormatVersion from '../format-version-enum.ts';\nimport type {Hash} from '../hash.ts';\n\ntype FormatVersion = Enum<typeof FormatVersion>;\n\n/**\n * Interface allowing different diff functions to skip costly diff computations.\n */\nexport interface DiffComputationConfig {\n shouldComputeDiffs(): boolean;\n shouldComputeDiffsForIndex(name: string): boolean;\n}\n\n/**\n * The diffs in different indexes. The key of the map is the index name.\n * \"\" is used for the primary index.\n */\nexport class DiffsMap extends Map<string, InternalDiff> {\n override set(key: string, value: InternalDiff): this {\n if (value.length === 0) {\n return this;\n }\n return super.set(key, value);\n }\n}\n\n/**\n * Diffs the state of the db at two different hashes.\n * It will include the primary indexes as well as all the secondary indexes.\n */\nexport async function diff(\n oldHash: Hash,\n newHash: Hash,\n read: Read,\n diffConfig: DiffComputationConfig,\n formatVersion: FormatVersion,\n): Promise<DiffsMap> {\n const [oldCommit, newCommit] = await Promise.all([\n commitFromHash(oldHash, read),\n commitFromHash(newHash, read),\n ]);\n\n return diffCommits(oldCommit, newCommit, read, diffConfig, formatVersion);\n}\n\n/**\n * Diffs the state of the db at two different commits.\n * It will include the primary indexes as well as all the secondary indexes.\n */\n// TODO: this should probably move to db/\nexport async function diffCommits(\n oldCommit: Commit<Meta>,\n newCommit: Commit<Meta>,\n read: Read,\n diffConfig: DiffComputationConfig,\n formatVersion: FormatVersion,\n): Promise<DiffsMap> {\n const diffsMap = new DiffsMap();\n if (!diffConfig.shouldComputeDiffs()) {\n return diffsMap;\n }\n\n const oldMap = new BTreeRead(read, formatVersion, oldCommit.valueHash);\n const newMap = new BTreeRead(read, formatVersion, newCommit.valueHash);\n const valueDiff = await btreeDiff(oldMap, newMap);\n diffsMap.set('', valueDiff);\n\n await addDiffsForIndexes(\n oldCommit,\n newCommit,\n read,\n diffsMap,\n diffConfig,\n formatVersion,\n );\n\n return diffsMap;\n}\n\nexport async function addDiffsForIndexes(\n mainCommit: Commit<Meta>,\n syncCommit: Commit<Meta>,\n read: Read,\n diffsMap: DiffsMap,\n diffConfig: DiffComputationConfig,\n formatVersion: FormatVersion,\n) {\n const oldIndexes = readIndexesForRead(mainCommit, read, formatVersion);\n const newIndexes = readIndexesForRead(syncCommit, read, formatVersion);\n\n for (const [oldIndexName, oldIndex] of oldIndexes) {\n if (!diffConfig.shouldComputeDiffsForIndex(oldIndexName)) {\n continue;\n }\n\n const newIndex = newIndexes.get(oldIndexName);\n if (newIndex !== undefined) {\n assert(newIndex !== oldIndex);\n const diffs = await btreeDiff(oldIndex.map, newIndex.map);\n newIndexes.delete(oldIndexName);\n diffsMap.set(oldIndexName, diffs);\n } else {\n // old index name is not in the new indexes. All entries removed!\n const diffs = await allEntriesAsDiff(oldIndex.map, 'del');\n diffsMap.set(oldIndexName, diffs);\n }\n }\n\n for (const [newIndexName, newIndex] of newIndexes) {\n if (!diffConfig.shouldComputeDiffsForIndex(newIndexName)) {\n continue;\n }\n // new index name is not in the old indexes. All keys added!\n const diffs = await allEntriesAsDiff(newIndex.map, 'add');\n diffsMap.set(newIndexName, diffs);\n }\n}\n", "import type {LogContext} from '@rocicorp/logger';\nimport {assert} from '../../../shared/src/asserts.ts';\nimport type {Enum} from '../../../shared/src/enum.ts';\nimport {diff} from '../btree/diff.ts';\nimport type {InternalDiff} from '../btree/node.ts';\nimport {BTreeRead, allEntriesAsDiff} from '../btree/read.ts';\nimport {BTreeWrite} from '../btree/write.ts';\nimport type {FrozenCookie} from '../cookies.ts';\nimport type {Write as DagWrite} from '../dag/store.ts';\nimport * as FormatVersion from '../format-version-enum.ts';\nimport type {FrozenJSONValue} from '../frozen-json.ts';\nimport {type Hash, emptyHash} from '../hash.ts';\nimport {lazy} from '../lazy.ts';\nimport type {DiffComputationConfig} from '../sync/diff.ts';\nimport {DiffsMap} from '../sync/diff.ts';\nimport type {ClientID} from '../sync/ids.ts';\nimport {\n Commit,\n type Meta as CommitMeta,\n type IndexRecord,\n type Meta,\n baseSnapshotHashFromHash,\n commitFromHash,\n newLocalDD31 as commitNewLocalDD31,\n newSnapshotDD31 as commitNewSnapshotDD31,\n getMutationID,\n} from './commit.ts';\nimport * as IndexOperation from './index-operation-enum.ts';\nimport {IndexRead, IndexWrite, indexValue} from './index.ts';\nimport * as MetaType from './meta-type-enum.ts';\nimport {Read, readIndexesForRead} from './read.ts';\n\ntype FormatVersion = Enum<typeof FormatVersion>;\n\nexport class Write extends Read {\n readonly #dagWrite: DagWrite;\n readonly #basis: Commit<CommitMeta> | undefined;\n readonly #meta: CommitMeta;\n\n declare map: BTreeWrite;\n\n declare readonly indexes: Map<string, IndexWrite>;\n readonly #clientID: ClientID;\n readonly #formatVersion: FormatVersion;\n\n constructor(\n dagWrite: DagWrite,\n map: BTreeWrite,\n basis: Commit<CommitMeta> | undefined,\n meta: CommitMeta,\n indexes: Map<string, IndexWrite>,\n clientID: ClientID,\n formatVersion: FormatVersion,\n ) {\n // TypeScript has trouble\n super(dagWrite, map, indexes);\n this.#dagWrite = dagWrite;\n this.#basis = basis;\n this.#meta = meta;\n this.#clientID = clientID;\n this.#formatVersion = formatVersion;\n\n // TODO(arv): if (DEBUG) { ...\n if (basis === undefined) {\n assert(meta.basisHash === emptyHash);\n } else {\n assert(meta.basisHash === basis.chunk.hash);\n }\n }\n\n /**\n * The value needs to be frozen since it is kept in memory and used later for\n * comparison as well as returned in `get`.\n */\n async put(\n lc: LogContext,\n key: string,\n value: FrozenJSONValue,\n ): Promise<void> {\n const oldVal = lazy(() => this.map.get(key));\n await updateIndexes(lc, this.indexes, key, oldVal, value);\n\n await this.map.put(key, value);\n }\n\n getMutationID(): Promise<number> {\n return getMutationID(this.#clientID, this.#dagWrite, this.#meta);\n }\n\n async del(lc: LogContext, key: string): Promise<boolean> {\n // TODO(arv): This does the binary search twice. We can do better.\n const oldVal = lazy(() => this.map.get(key));\n if (oldVal !== undefined) {\n await updateIndexes(lc, this.indexes, key, oldVal, undefined);\n }\n return this.map.del(key);\n }\n\n async clear(): Promise<void> {\n await this.map.clear();\n const ps = [];\n for (const idx of this.indexes.values()) {\n ps.push(idx.clear());\n }\n await Promise.all(ps);\n }\n\n async putCommit(): Promise<Commit<CommitMeta>> {\n const valueHash = await this.map.flush();\n const indexRecords: IndexRecord[] = [];\n\n for (const index of this.indexes.values()) {\n const valueHash = await index.flush();\n const indexRecord: IndexRecord = {\n definition: index.meta.definition,\n valueHash,\n };\n indexRecords.push(indexRecord);\n }\n\n let commit: Commit<Meta>;\n const meta = this.#meta;\n switch (meta.type) {\n case MetaType.LocalDD31: {\n assert(this.#formatVersion >= FormatVersion.DD31);\n const {\n basisHash,\n mutationID,\n mutatorName,\n mutatorArgsJSON,\n originalHash,\n timestamp,\n } = meta;\n commit = commitNewLocalDD31(\n this.#dagWrite.createChunk,\n basisHash,\n await baseSnapshotHashFromHash(basisHash, this.#dagWrite),\n mutationID,\n mutatorName,\n mutatorArgsJSON,\n originalHash,\n valueHash,\n indexRecords,\n timestamp,\n this.#clientID,\n );\n break;\n }\n\n case MetaType.SnapshotDD31: {\n assert(this.#formatVersion > FormatVersion.DD31);\n const {basisHash, lastMutationIDs, cookieJSON} = meta;\n commit = commitNewSnapshotDD31(\n this.#dagWrite.createChunk,\n basisHash,\n lastMutationIDs,\n cookieJSON,\n valueHash,\n indexRecords,\n );\n break;\n }\n }\n await this.#dagWrite.putChunk(commit.chunk);\n return commit;\n }\n\n // Return value is the hash of the new commit.\n async commit(headName: string): Promise<Hash> {\n const commit = await this.putCommit();\n const commitHash = commit.chunk.hash;\n await this.#dagWrite.setHead(headName, commitHash);\n await this.#dagWrite.commit();\n return commitHash;\n }\n\n async commitWithDiffs(\n headName: string,\n diffConfig: DiffComputationConfig,\n ): Promise<[Hash, DiffsMap]> {\n const commit = this.putCommit();\n const diffMap = await this.#generateDiffs(diffConfig);\n const commitHash = (await commit).chunk.hash;\n await this.#dagWrite.setHead(headName, commitHash);\n await this.#dagWrite.commit();\n return [commitHash, diffMap];\n }\n\n async #generateDiffs(diffConfig: DiffComputationConfig): Promise<DiffsMap> {\n const diffsMap = new DiffsMap();\n if (!diffConfig.shouldComputeDiffs()) {\n return diffsMap;\n }\n\n let valueDiff: InternalDiff = [];\n if (this.#basis) {\n const basisMap = new BTreeRead(\n this.#dagWrite,\n this.#formatVersion,\n this.#basis.valueHash,\n );\n valueDiff = await diff(basisMap, this.map);\n }\n diffsMap.set('', valueDiff);\n let basisIndexes: Map<string, IndexRead>;\n if (this.#basis) {\n basisIndexes = readIndexesForRead(\n this.#basis,\n this.#dagWrite,\n this.#formatVersion,\n );\n } else {\n basisIndexes = new Map();\n }\n\n for (const [name, index] of this.indexes) {\n if (!diffConfig.shouldComputeDiffsForIndex(name)) {\n continue;\n }\n const basisIndex = basisIndexes.get(name);\n assert(index !== basisIndex);\n\n const indexDiffResult = await (basisIndex\n ? diff(basisIndex.map, index.map)\n : // No basis. All keys are new.\n allEntriesAsDiff(index.map, 'add'));\n diffsMap.set(name, indexDiffResult);\n }\n\n // Handle indexes in basisIndex but not in this.indexes. All keys are\n // deleted.\n for (const [name, basisIndex] of basisIndexes) {\n if (\n !this.indexes.has(name) &&\n diffConfig.shouldComputeDiffsForIndex(name)\n ) {\n const indexDiffResult = await allEntriesAsDiff(basisIndex.map, 'del');\n diffsMap.set(name, indexDiffResult);\n }\n }\n return diffsMap;\n }\n\n close(): void {\n this.#dagWrite.release();\n }\n}\n\nexport async function newWriteLocal(\n basisHash: Hash,\n mutatorName: string,\n mutatorArgsJSON: FrozenJSONValue,\n originalHash: Hash | null,\n dagWrite: DagWrite,\n timestamp: number,\n clientID: ClientID,\n formatVersion: FormatVersion,\n): Promise<Write> {\n const basis = await commitFromHash(basisHash, dagWrite);\n const bTreeWrite = new BTreeWrite(dagWrite, formatVersion, basis.valueHash);\n const mutationID = await basis.getNextMutationID(clientID, dagWrite);\n const indexes = readIndexesForWrite(basis, dagWrite, formatVersion);\n assert(formatVersion >= FormatVersion.DD31);\n return new Write(\n dagWrite,\n bTreeWrite,\n basis,\n\n {\n type: MetaType.LocalDD31,\n basisHash,\n baseSnapshotHash: await baseSnapshotHashFromHash(basisHash, dagWrite),\n mutatorName,\n mutatorArgsJSON,\n mutationID,\n originalHash,\n timestamp,\n clientID,\n },\n indexes,\n clientID,\n formatVersion,\n );\n}\n\nexport async function newWriteSnapshotDD31(\n basisHash: Hash,\n lastMutationIDs: Record<ClientID, number>,\n cookieJSON: FrozenCookie,\n dagWrite: DagWrite,\n clientID: ClientID,\n formatVersion: FormatVersion,\n): Promise<Write> {\n const basis = await commitFromHash(basisHash, dagWrite);\n const bTreeWrite = new BTreeWrite(dagWrite, formatVersion, basis.valueHash);\n return new Write(\n dagWrite,\n bTreeWrite,\n basis,\n {basisHash, type: MetaType.SnapshotDD31, lastMutationIDs, cookieJSON},\n readIndexesForWrite(basis, dagWrite, formatVersion),\n clientID,\n formatVersion,\n );\n}\n\nexport async function updateIndexes(\n lc: LogContext,\n indexes: Map<string, IndexWrite>,\n key: string,\n oldValGetter: () => Promise<FrozenJSONValue | undefined>,\n newVal: FrozenJSONValue | undefined,\n): Promise<void> {\n const ps: Promise<void>[] = [];\n for (const idx of indexes.values()) {\n const {keyPrefix} = idx.meta.definition;\n if (!keyPrefix || key.startsWith(keyPrefix)) {\n const oldVal = await oldValGetter();\n if (oldVal !== undefined) {\n ps.push(\n indexValue(\n lc,\n idx.map,\n IndexOperation.Remove,\n key,\n oldVal,\n idx.meta.definition.jsonPointer,\n idx.meta.definition.allowEmpty ?? false,\n ),\n );\n }\n if (newVal !== undefined) {\n ps.push(\n indexValue(\n lc,\n idx.map,\n IndexOperation.Add,\n key,\n newVal,\n idx.meta.definition.jsonPointer,\n idx.meta.definition.allowEmpty ?? false,\n ),\n );\n }\n }\n }\n await Promise.all(ps);\n}\n\nexport function readIndexesForWrite(\n commit: Commit<CommitMeta>,\n dagWrite: DagWrite,\n formatVersion: FormatVersion,\n): Map<string, IndexWrite> {\n const m = new Map();\n for (const index of commit.indexes) {\n m.set(\n index.definition.name,\n new IndexWrite(\n index,\n new BTreeWrite(dagWrite, formatVersion, index.valueHash),\n ),\n );\n }\n return m;\n}\n\nexport async function createIndexBTree(\n lc: LogContext,\n dagWrite: DagWrite,\n valueMap: BTreeRead,\n prefix: string,\n jsonPointer: string,\n allowEmpty: boolean,\n formatVersion: FormatVersion,\n): Promise<BTreeWrite> {\n const indexMap = new BTreeWrite(dagWrite, formatVersion);\n for await (const entry of valueMap.scan(prefix)) {\n const key = entry[0];\n if (!key.startsWith(prefix)) {\n break;\n }\n await indexValue(\n lc,\n indexMap,\n IndexOperation.Add,\n key,\n entry[1],\n jsonPointer,\n allowEmpty,\n );\n }\n return indexMap;\n}\n", "import * as valita from '../../../shared/src/valita.ts';\n\n/**\n * The ID describing a group of clients. All clients in the same group share a\n * persistent storage (IDB).\n */\nexport type ClientGroupID = string;\n\nexport const clientGroupIDSchema: valita.Type<ClientGroupID> = valita.string();\n\n/**\n * The ID describing a client.\n */\nexport type ClientID = string;\n\nexport const clientIDSchema: valita.Type<ClientID> = valita.string();\n", "import {randomUint64} from '../../../shared/src/random-uint64.ts';\n\n/**\n * Returns a random 18 character string encoded in base32 suitable as a client\n * ID.\n */\nexport function makeClientID(): string {\n const length = 18;\n const high = randomUint64();\n const low = randomUint64();\n const combined = (high << 64n) | low;\n return combined.toString(32).slice(-length).padStart(length, '0');\n}\n", "import type {LogContext} from '@rocicorp/logger';\nimport {assert, assertObject} from '../../../shared/src/asserts.ts';\nimport type {Enum} from '../../../shared/src/enum.ts';\nimport {hasOwn} from '../../../shared/src/has-own.ts';\nimport * as valita from '../../../shared/src/valita.ts';\nimport {emptyDataNode} from '../btree/node.ts';\nimport {BTreeRead} from '../btree/read.ts';\nimport {type FrozenCookie, compareCookies} from '../cookies.ts';\nimport {type Refs, toRefs} from '../dag/chunk.ts';\nimport type {Read, Store, Write} from '../dag/store.ts';\nimport {\n type ChunkIndexDefinition,\n Commit,\n type IndexRecord,\n type SnapshotMetaDD31,\n assertSnapshotCommitDD31,\n baseSnapshotFromHash,\n chunkIndexDefinitionEqualIgnoreName,\n getRefs,\n newSnapshotCommitDataDD31,\n toChunkIndexDefinition,\n} from '../db/commit.ts';\nimport {createIndexBTree} from '../db/write.ts';\nimport type {DeletedClients} from '../deleted-clients.ts';\nimport * as FormatVersion from '../format-version-enum.ts';\nimport {type FrozenJSONValue, deepFreeze} from '../frozen-json.ts';\nimport {type Hash, hashSchema} from '../hash.ts';\nimport {type IndexDefinitions, indexDefinitionsEqual} from '../index-defs.ts';\nimport {\n type ClientGroupID,\n type ClientID,\n clientGroupIDSchema,\n} from '../sync/ids.ts';\nimport {withWrite} from '../with-transactions.ts';\nimport {\n type ClientGroup,\n getClientGroup,\n getClientGroups,\n mutatorNamesEqual,\n setClientGroup,\n} from './client-groups.ts';\nimport {makeClientID} from './make-client-id.ts';\n\ntype FormatVersion = Enum<typeof FormatVersion>;\n\nexport type ClientMap = ReadonlyMap<ClientID, ClientV5 | ClientV6>;\n\nconst clientV5Schema = valita.readonlyObject({\n heartbeatTimestampMs: valita.number(),\n\n headHash: hashSchema,\n\n /**\n * The hash of a commit we are in the middle of refreshing into this client's\n * memdag.\n */\n tempRefreshHash: hashSchema.nullable(),\n\n /**\n * ID of this client's perdag client group. This needs to be sent in pull\n * request (to enable syncing all last mutation ids in the client group).\n */\n clientGroupID: clientGroupIDSchema,\n});\n\nexport type ClientV5 = valita.Infer<typeof clientV5Schema>;\n\nconst clientV6Schema = valita.readonlyObject({\n heartbeatTimestampMs: valita.number(),\n\n /**\n * A set of hashes, which contains:\n * 1. The hash of the last commit this client refreshed from its client group\n * (this is the commit it bootstrapped from until it completes its first\n * refresh).\n * 2. One or more hashes that were added to retain chunks of a commit while it\n * was being refreshed into this client's memdag. (This can be one or more\n * because refresh's cleanup step is a separate transaction and can fail).\n * Upon refresh completing and successfully running its clean up step, this\n * set will contain a single hash: the hash of the last commit this client\n * refreshed.\n */\n refreshHashes: valita.readonlyArray(hashSchema),\n\n /**\n * The hash of the last snapshot commit persisted by this client to this\n * client's client group, or null if has never persisted a snapshot.\n */\n persistHash: hashSchema.nullable(),\n\n /**\n * ID of this client's perdag client group. This needs to be sent in pull\n * request (to enable syncing all last mutation ids in the client group).\n */\n clientGroupID: clientGroupIDSchema,\n});\n\nexport type ClientV6 = valita.Infer<typeof clientV6Schema>;\n\nexport type Client = ClientV5 | ClientV6;\n\nfunction isClientV6(client: Client): client is ClientV6 {\n return (client as ClientV6).refreshHashes !== undefined;\n}\n\nexport const CLIENTS_HEAD_NAME = 'clients';\n\nconst clientSchema = valita.union(clientV5Schema, clientV6Schema);\n\nfunction assertClient(value: unknown): asserts value is Client {\n valita.assert(value, clientSchema);\n}\n\nexport function assertClientV6(value: unknown): asserts value is ClientV6 {\n valita.assert(value, clientV6Schema);\n}\n\nfunction chunkDataToClientMap(chunkData: unknown): ClientMap {\n assertObject(chunkData);\n const clients = new Map();\n for (const key in chunkData) {\n if (hasOwn(chunkData, key)) {\n const value = chunkData[key];\n if (value !== undefined) {\n assertClient(value);\n clients.set(key, value);\n }\n }\n }\n return clients;\n}\n\nfunction clientMapToChunkData(\n clients: ClientMap,\n dagWrite: Write,\n): FrozenJSONValue {\n for (const client of clients.values()) {\n if (isClientV6(client)) {\n client.refreshHashes.forEach(dagWrite.assertValidHash);\n if (client.persistHash) {\n dagWrite.assertValidHash(client.persistHash);\n }\n } else {\n dagWrite.assertValidHash(client.headHash);\n if (client.tempRefreshHash) {\n dagWrite.assertValidHash(client.tempRefreshHash);\n }\n }\n }\n return deepFreeze(Object.fromEntries(clients));\n}\n\nexport async function getClients(dagRead: Read): Promise<ClientMap> {\n const hash = await dagRead.getHead(CLIENTS_HEAD_NAME);\n return getClientsAtHash(hash, dagRead);\n}\n\nasync function getClientsAtHash(\n hash: Hash | undefined,\n dagRead: Read,\n): Promise<ClientMap> {\n if (!hash) {\n return new Map();\n }\n const chunk = await dagRead.getChunk(hash);\n return chunkDataToClientMap(chunk?.data);\n}\n\n/**\n * Used to signal that a client does not exist. Maybe it was garbage collected?\n */\nexport class ClientStateNotFoundError extends Error {\n name = 'ClientStateNotFoundError';\n readonly id: string;\n constructor(id: ClientID) {\n super(`Client state not found, id: ${id}`);\n this.id = id;\n }\n}\n\n/**\n * Throws a `ClientStateNotFoundError` if the client does not exist.\n */\nexport async function assertHasClientState(\n id: ClientID,\n dagRead: Read,\n): Promise<void> {\n if (!(await hasClientState(id, dagRead))) {\n throw new ClientStateNotFoundError(id);\n }\n}\n\nexport async function hasClientState(\n id: ClientID,\n dagRead: Read,\n): Promise<boolean> {\n return !!(await getClient(id, dagRead));\n}\n\nexport async function getClient(\n id: ClientID,\n dagRead: Read,\n): Promise<Client | undefined> {\n const clients = await getClients(dagRead);\n return clients.get(id);\n}\n\nexport async function mustGetClient(\n id: ClientID,\n dagRead: Read,\n): Promise<Client> {\n const client = await getClient(id, dagRead);\n if (!client) {\n throw new ClientStateNotFoundError(id);\n }\n return client;\n}\n\ntype InitClientV6Result = [\n client: ClientV6,\n hash: Hash,\n clientMap: ClientMap,\n newClientGroup: boolean,\n];\n\nexport function initClientV6(\n newClientID: ClientID,\n lc: LogContext,\n perdag: Store,\n mutatorNames: string[],\n indexes: IndexDefinitions,\n formatVersion: FormatVersion,\n enableClientGroupForking: boolean,\n): Promise<InitClientV6Result> {\n return withWrite(perdag, async dagWrite => {\n async function setClientsAndClientGroupAndCommit(\n basisHash: Hash | null,\n cookieJSON: FrozenCookie,\n valueHash: Hash,\n indexRecords: readonly IndexRecord[],\n ): Promise<InitClientV6Result> {\n const newSnapshotData = newSnapshotCommitDataDD31(\n basisHash,\n {},\n cookieJSON,\n valueHash,\n indexRecords,\n );\n const chunk = dagWrite.createChunk(\n newSnapshotData,\n getRefs(newSnapshotData),\n );\n\n const newClientGroupID = makeClientID();\n\n const newClient: ClientV6 = {\n heartbeatTimestampMs: Date.now(),\n refreshHashes: [chunk.hash],\n persistHash: null,\n clientGroupID: newClientGroupID,\n };\n\n const newClients = new Map(clients).set(newClientID, newClient);\n\n const clientGroup: ClientGroup = {\n headHash: chunk.hash,\n mutatorNames,\n indexes,\n mutationIDs: {},\n lastServerAckdMutationIDs: {},\n disabled: false,\n };\n\n await Promise.all([\n dagWrite.putChunk(chunk),\n setClients(newClients, dagWrite),\n setClientGroup(newClientGroupID, clientGroup, dagWrite),\n ]);\n\n return [newClient, chunk.hash, newClients, true];\n }\n\n const clients = await getClients(dagWrite);\n\n const res = await findMatchingClient(dagWrite, mutatorNames, indexes);\n if (res.type === FIND_MATCHING_CLIENT_TYPE_HEAD) {\n // We found a client group with matching mutators and indexes. We can\n // reuse it.\n const {clientGroupID, headHash} = res;\n\n const newClient: ClientV6 = {\n clientGroupID,\n refreshHashes: [headHash],\n heartbeatTimestampMs: Date.now(),\n persistHash: null,\n };\n const newClients = new Map(clients).set(newClientID, newClient);\n await setClients(newClients, dagWrite);\n\n return [newClient, headHash, newClients, false];\n }\n\n if (\n !enableClientGroupForking ||\n res.type === FIND_MATCHING_CLIENT_TYPE_NEW\n ) {\n // No client group to fork from. Create empty snapshot.\n const emptyBTreeChunk = dagWrite.createChunk(emptyDataNode, []);\n await dagWrite.putChunk(emptyBTreeChunk);\n\n // Create indexes\n const indexRecords: IndexRecord[] = [];\n\n // At this point the value of replicache is the empty tree so all index\n // maps will also be the empty tree.\n for (const [name, indexDefinition] of Object.entries(indexes)) {\n const chunkIndexDefinition = toChunkIndexDefinition(\n name,\n indexDefinition,\n );\n indexRecords.push({\n definition: chunkIndexDefinition,\n valueHash: emptyBTreeChunk.hash,\n });\n }\n\n return setClientsAndClientGroupAndCommit(\n null,\n null,\n emptyBTreeChunk.hash,\n indexRecords,\n );\n }\n\n // Now we create a new client and client group that we fork from the found\n // snapshot.\n assert(res.type === FIND_MATCHING_CLIENT_TYPE_FORK);\n\n const {snapshot} = res;\n\n // Create indexes\n const indexRecords: IndexRecord[] = [];\n const {valueHash, indexes: oldIndexes} = snapshot;\n const map = new BTreeRead(dagWrite, formatVersion, valueHash);\n\n for (const [name, indexDefinition] of Object.entries(indexes)) {\n const {prefix = '', jsonPointer, allowEmpty = false} = indexDefinition;\n const chunkIndexDefinition: ChunkIndexDefinition = {\n name,\n keyPrefix: prefix,\n jsonPointer,\n allowEmpty,\n };\n\n const oldIndex = findMatchingOldIndex(oldIndexes, chunkIndexDefinition);\n if (oldIndex) {\n indexRecords.push({\n definition: chunkIndexDefinition,\n valueHash: oldIndex.valueHash,\n });\n } else {\n const indexBTree = await createIndexBTree(\n lc,\n dagWrite,\n map,\n prefix,\n jsonPointer,\n allowEmpty,\n formatVersion,\n );\n indexRecords.push({\n definition: chunkIndexDefinition,\n valueHash: await indexBTree.flush(),\n });\n }\n }\n\n return setClientsAndClientGroupAndCommit(\n snapshot.meta.basisHash,\n snapshot.meta.cookieJSON,\n snapshot.valueHash,\n indexRecords,\n );\n });\n}\n\nfunction findMatchingOldIndex(\n oldIndexes: readonly IndexRecord[],\n chunkIndexDefinition: ChunkIndexDefinition,\n) {\n return oldIndexes.find(index =>\n chunkIndexDefinitionEqualIgnoreName(index.definition, chunkIndexDefinition),\n );\n}\n\nexport const FIND_MATCHING_CLIENT_TYPE_NEW = 0;\nexport const FIND_MATCHING_CLIENT_TYPE_FORK = 1;\nexport const FIND_MATCHING_CLIENT_TYPE_HEAD = 2;\n\nexport type FindMatchingClientResult =\n | {\n type: typeof FIND_MATCHING_CLIENT_TYPE_NEW;\n }\n | {\n type: typeof FIND_MATCHING_CLIENT_TYPE_FORK;\n snapshot: Commit<SnapshotMetaDD31>;\n }\n | {\n type: typeof FIND_MATCHING_CLIENT_TYPE_HEAD;\n clientGroupID: ClientGroupID;\n headHash: Hash;\n };\n\nexport async function findMatchingClient(\n dagRead: Read,\n mutatorNames: string[],\n indexes: IndexDefinitions,\n): Promise<FindMatchingClientResult> {\n let newestCookie: FrozenCookie | undefined;\n let bestSnapshot: Commit<SnapshotMetaDD31> | undefined;\n const mutatorNamesSet = new Set(mutatorNames);\n\n const clientGroups = await getClientGroups(dagRead);\n for (const [clientGroupID, clientGroup] of clientGroups) {\n if (\n !clientGroup.disabled &&\n mutatorNamesEqual(mutatorNamesSet, clientGroup.mutatorNames) &&\n indexDefinitionsEqual(indexes, clientGroup.indexes)\n ) {\n // exact match\n return {\n type: FIND_MATCHING_CLIENT_TYPE_HEAD,\n clientGroupID,\n headHash: clientGroup.headHash,\n };\n }\n\n const clientGroupSnapshotCommit = await baseSnapshotFromHash(\n clientGroup.headHash,\n dagRead,\n );\n assertSnapshotCommitDD31(clientGroupSnapshotCommit);\n\n const {cookieJSON} = clientGroupSnapshotCommit.meta;\n if (\n newestCookie === undefined ||\n compareCookies(cookieJSON, newestCookie) > 0\n ) {\n newestCookie = cookieJSON;\n bestSnapshot = clientGroupSnapshotCommit;\n }\n }\n\n if (bestSnapshot) {\n return {\n type: FIND_MATCHING_CLIENT_TYPE_FORK,\n snapshot: bestSnapshot,\n };\n }\n\n return {type: FIND_MATCHING_CLIENT_TYPE_NEW};\n}\n\nfunction getRefsForClients(clients: ClientMap): Refs {\n const refs: Set<Hash> = new Set();\n for (const client of clients.values()) {\n if (isClientV6(client)) {\n for (const hash of client.refreshHashes) {\n refs.add(hash);\n }\n if (client.persistHash) {\n refs.add(client.persistHash);\n }\n } else {\n refs.add(client.headHash);\n if (client.tempRefreshHash) {\n refs.add(client.tempRefreshHash);\n }\n }\n }\n return toRefs(refs);\n}\n\nexport async function getClientGroupForClient(\n clientID: ClientID,\n read: Read,\n): Promise<ClientGroup | undefined> {\n const clientGroupID = await getClientGroupIDForClient(clientID, read);\n if (!clientGroupID) {\n return undefined;\n }\n return getClientGroup(clientGroupID, read);\n}\n\nexport async function getClientGroupIDForClient(\n clientID: ClientID,\n read: Read,\n): Promise<ClientGroupID | undefined> {\n const client = await getClient(clientID, read);\n return client?.clientGroupID;\n}\n\n/**\n * Adds a Client to the ClientMap and updates the 'clients' head to point at\n * the updated clients.\n */\nexport async function setClient(\n clientID: ClientID,\n client: Client,\n dagWrite: Write,\n): Promise<Hash> {\n const clients = await getClients(dagWrite);\n const newClients = new Map(clients).set(clientID, client);\n return setClients(newClients, dagWrite);\n}\n\n/**\n * Sets the ClientMap and updates the 'clients' head top point at the new\n * clients.\n */\nexport async function setClients(\n clients: ClientMap,\n dagWrite: Write,\n): Promise<Hash> {\n const chunkData = clientMapToChunkData(clients, dagWrite);\n const chunk = dagWrite.createChunk(chunkData, getRefsForClients(clients));\n await dagWrite.putChunk(chunk);\n await dagWrite.setHead(CLIENTS_HEAD_NAME, chunk.hash);\n return chunk.hash;\n}\n\n/**\n * Callback function for when Replicache has deleted one or more clients.\n */\nexport type OnClientsDeleted = (\n deletedClients: DeletedClients,\n) => Promise<void>;\n", "export function mapValues<T extends Record<string, unknown>, U>(\n input: T,\n mapper: (value: T[keyof T]) => U,\n): {[K in keyof T]: U} {\n return mapEntries(input, (k, v) => [k, mapper(v as T[keyof T])]) as {\n [K in keyof T]: U;\n };\n}\n\nexport function mapEntries<T, U>(\n input: Record<string, T>,\n mapper: (key: string, val: T) => [key: string, val: U],\n): Record<string, U> {\n // Direct assignment is faster than Object.fromEntries()\n // https://github.com/rocicorp/mono/pull/3927#issuecomment-2706059475\n const output: Record<string, U> = {};\n\n // In chrome Object.entries is faster than for-in (13x) or Object.keys (15x)\n // https://gist.github.com/arv/1b4e113724f6a14e2d4742bcc760d1fa\n for (const entry of Object.entries(input)) {\n const mapped = mapper(entry[0], entry[1]);\n output[mapped[0]] = mapped[1];\n }\n return output;\n}\n\nexport function mapAllEntries<T, U>(\n input: Record<string, T>,\n mapper: (entries: [key: string, val: T][]) => [key: string, val: U][],\n): Record<string, U> {\n // Direct assignment is faster than Object.fromEntries()\n // https://github.com/rocicorp/mono/pull/3927#issuecomment-2706059475\n const output: Record<string, U> = {};\n for (const mapped of mapper(Object.entries(input))) {\n output[mapped[0]] = mapped[1];\n }\n return output;\n}\n", "export function must<T>(v: T | undefined | null, msg?: string): T {\n // eslint-disable-next-line eqeqeq\n if (v == null) {\n throw new Error(msg ?? `Unexpected ${v} value`);\n }\n return v;\n}\n", "import {compareUTF8} from 'compare-utf8';\nimport {\n assertBoolean,\n assertNumber,\n assertString,\n} from '../../../shared/src/asserts.ts';\nimport type {Ordering} from '../../../zero-protocol/src/ast.ts';\nimport type {Row, Value} from '../../../zero-protocol/src/data.ts';\nimport type {Stream} from './stream.ts';\n\n/**\n * A row flowing through the pipeline, plus its relationships.\n * Relationships are generated lazily as read.\n */\nexport type Node = {\n row: Row;\n relationships: Record<string, () => Stream<Node>>;\n};\n\n/**\n * Compare two values. The values must be of the same type. This function\n * throws at runtime if the types differ.\n *\n * Note, this function considers `null === null` and\n * `undefined === undefined`. This is different than SQL. In join code,\n * null must be treated separately.\n *\n * See: https://github.com/rocicorp/mono/pull/2116/files#r1704811479\n *\n * @returns < 0 if a < b, 0 if a === b, > 0 if a > b\n */\nexport function compareValues(a: Value, b: Value): number {\n a = normalizeUndefined(a);\n b = normalizeUndefined(b);\n\n if (a === b) {\n return 0;\n }\n if (a === null) {\n return -1;\n }\n if (b === null) {\n return 1;\n }\n if (typeof a === 'boolean') {\n assertBoolean(b);\n return a ? 1 : -1;\n }\n if (typeof a === 'number') {\n assertNumber(b);\n return a - b;\n }\n if (typeof a === 'string') {\n assertString(b);\n // We compare all strings in Zero as UTF-8. This is the default on SQLite\n // and we need to match it. See:\n // https://blog.replicache.dev/blog/replicache-11-adventures-in-text-encoding.\n //\n // TODO: We could change this since SQLite supports UTF-16. Microbenchmark\n // to see if there's a big win.\n //\n // https://www.sqlite.org/c3ref/create_collation.html\n return compareUTF8(a, b);\n }\n throw new Error(`Unsupported type: ${a}`);\n}\n\nexport type NormalizedValue = Exclude<Value, undefined>;\n\n/**\n * We allow undefined to be passed for the convenience of developers, but we\n * treat it equivalently to null. It's better for perf to not create an copy\n * of input values, so we just normalize at use when necessary.\n */\nexport function normalizeUndefined(v: Value): NormalizedValue {\n return v ?? null;\n}\n\nexport type Comparator = (r1: Row, r2: Row) => number;\n\nexport function makeComparator(\n order: Ordering,\n reverse?: boolean | undefined,\n): Comparator {\n return (a, b) => {\n // Skip destructuring here since it is hot code.\n for (const ord of order) {\n const field = ord[0];\n const comp = compareValues(a[field], b[field]);\n if (comp !== 0) {\n const result = ord[1] === 'asc' ? comp : -comp;\n return reverse ? -result : result;\n }\n }\n return 0;\n };\n}\n\n/**\n * Determine if two values are equal. Note that unlike compareValues() above,\n * this function treats `null` as unequal to itself (and same for `undefined`).\n * This is required to make joins work correctly, but may not be the right\n * semantic for your application.\n */\nexport function valuesEqual(a: Value, b: Value): boolean {\n // eslint-disable-next-line eqeqeq\n if (a == null || b == null) {\n return false;\n }\n return a === b;\n}\n\nexport function drainStreams(node: Node) {\n for (const stream of Object.values(node.relationships)) {\n for (const node of stream()) {\n drainStreams(node);\n }\n }\n}\n", "import {\n assert,\n assertArray,\n assertNumber,\n unreachable,\n} from '../../../shared/src/asserts.ts';\nimport {must} from '../../../shared/src/must.ts';\nimport type {Writable} from '../../../shared/src/writable.ts';\nimport type {Row} from '../../../zero-protocol/src/data.ts';\nimport {drainStreams, type Comparator, type Node} from './data.ts';\nimport type {SourceSchema} from './schema.ts';\nimport type {Entry, Format} from './view.ts';\n\nexport const refCountSymbol = Symbol('rc');\nexport const idSymbol = Symbol('id');\n\ntype MetaEntry = Writable<Entry> & {\n [refCountSymbol]: number;\n [idSymbol]?: string | undefined;\n};\ntype MetaEntryList = MetaEntry[];\n\n/**\n * `applyChange` does not consume the `relationships` of `ChildChange#node`,\n * `EditChange#node` and `EditChange#oldNode`. The `ViewChange` type\n * documents and enforces this via the type system.\n */\nexport type ViewChange =\n | AddViewChange\n | RemoveViewChange\n | ChildViewChange\n | EditViewChange;\n\nexport type RowOnlyNode = {row: Row};\n\nexport type AddViewChange = {\n type: 'add';\n node: Node;\n};\n\nexport type RemoveViewChange = {\n type: 'remove';\n node: Node;\n};\n\ntype ChildViewChange = {\n type: 'child';\n node: RowOnlyNode;\n child: {\n relationshipName: string;\n change: ViewChange;\n };\n};\n\ntype EditViewChange = {\n type: 'edit';\n node: RowOnlyNode;\n oldNode: RowOnlyNode;\n};\n\n/**\n * This is a subset of WeakMap but restricted to what we need.\n * @deprecated Not used anymore. This will be removed in the future.\n */\nexport interface RefCountMap {\n get(entry: Entry): number | undefined;\n set(entry: Entry, refCount: number): void;\n delete(entry: Entry): boolean;\n}\n\nexport function applyChange(\n parentEntry: Entry,\n change: ViewChange,\n schema: SourceSchema,\n relationship: string,\n format: Format,\n withIDs = false,\n): void {\n if (schema.isHidden) {\n switch (change.type) {\n case 'add':\n case 'remove':\n for (const [relationship, children] of Object.entries(\n change.node.relationships,\n )) {\n const childSchema = must(schema.relationships[relationship]);\n for (const node of children()) {\n applyChange(\n parentEntry,\n {type: change.type, node},\n childSchema,\n relationship,\n format,\n withIDs,\n );\n }\n }\n return;\n case 'edit':\n // If hidden at this level it means that the hidden row was changed. If\n // the row was changed in such a way that it would change the\n // relationships then the edit would have been split into remove and\n // add.\n return;\n case 'child': {\n const childSchema = must(\n schema.relationships[change.child.relationshipName],\n );\n applyChange(\n parentEntry,\n change.child.change,\n childSchema,\n relationship,\n format,\n withIDs,\n );\n return;\n }\n default:\n unreachable(change);\n }\n }\n\n const {singular, relationships: childFormats} = format;\n switch (change.type) {\n case 'add': {\n let newEntry: MetaEntry | undefined;\n\n if (singular) {\n const oldEntry = parentEntry[relationship] as MetaEntry | undefined;\n if (oldEntry !== undefined) {\n assert(\n schema.compareRows(oldEntry, change.node.row) === 0,\n `Singular relationship '${relationship}' should not have multiple rows. You may need to declare this relationship with the \\`many\\` helper instead of the \\`one\\` helper in your schema.`,\n );\n // adding same again.\n oldEntry[refCountSymbol]++;\n } else {\n newEntry = makeNewMetaEntry(change.node.row, schema, withIDs, 1);\n\n (parentEntry as Writable<Entry>)[relationship] = newEntry;\n }\n } else {\n newEntry = add(\n change.node.row,\n getChildEntryList(parentEntry, relationship),\n schema,\n withIDs,\n );\n }\n\n if (newEntry) {\n for (const [relationship, children] of Object.entries(\n change.node.relationships,\n )) {\n // TODO: Is there a flag to make TypeScript complain that dictionary access might be undefined?\n const childSchema = must(schema.relationships[relationship]);\n const childFormat = childFormats[relationship];\n if (childFormat === undefined) {\n continue;\n }\n\n const newView = childFormat.singular\n ? undefined\n : ([] as MetaEntryList);\n newEntry[relationship] = newView;\n\n for (const node of children()) {\n applyChange(\n newEntry,\n {type: 'add', node},\n childSchema,\n relationship,\n childFormat,\n withIDs,\n );\n }\n }\n }\n break;\n }\n case 'remove': {\n if (singular) {\n const oldEntry = parentEntry[relationship] as MetaEntry | undefined;\n assert(oldEntry !== undefined, 'node does not exist');\n const rc = oldEntry[refCountSymbol];\n if (rc === 1) {\n (parentEntry as Writable<Entry>)[relationship] = undefined;\n }\n oldEntry[refCountSymbol]--;\n } else {\n removeAndUpdateRefCount(\n getChildEntryList(parentEntry, relationship),\n change.node.row,\n schema.compareRows,\n );\n }\n // Needed to ensure cleanup of operator state is fully done.\n drainStreams(change.node);\n break;\n }\n case 'child': {\n let existing: MetaEntry;\n if (singular) {\n existing = getSingularEntry(parentEntry, relationship);\n } else {\n const view = getChildEntryList(parentEntry, relationship);\n const {pos, found} = binarySearch(\n view,\n change.node.row,\n schema.compareRows,\n );\n assert(found, 'node does not exist');\n existing = view[pos];\n }\n\n const childSchema = must(\n schema.relationships[change.child.relationshipName],\n );\n const childFormat = format.relationships[change.child.relationshipName];\n if (childFormat !== undefined) {\n applyChange(\n existing,\n change.child.change,\n childSchema,\n change.child.relationshipName,\n childFormat,\n withIDs,\n );\n }\n break;\n }\n case 'edit': {\n if (singular) {\n const existing = parentEntry[relationship];\n assertMetaEntry(existing);\n applyEdit(existing, change, schema, withIDs);\n } else {\n const view = getChildEntryList(parentEntry, relationship);\n // The position of the row in the list may have changed due to the edit.\n if (schema.compareRows(change.oldNode.row, change.node.row) !== 0) {\n const {pos: oldPos, found: oldFound} = binarySearch(\n view,\n change.oldNode.row,\n schema.compareRows,\n );\n assert(oldFound, 'old node does not exist');\n const oldEntry = view[oldPos];\n const {pos, found} = binarySearch(\n view,\n change.node.row,\n schema.compareRows,\n );\n // A special case:\n // when refCount is 1 (so the row is being moved\n // without leaving a placeholder behind), and the new pos is\n // the same as the old, or directly after the old (so after the remove\n // of the old it would be in the same pos):\n // the row does not need to be moved, it can just be edited in place.\n if (\n oldEntry[refCountSymbol] === 1 &&\n (pos === oldPos || pos - 1 === oldPos)\n ) {\n applyEdit(oldEntry, change, schema, withIDs);\n } else {\n // Move the row. If the row has > 1 ref count, an edit should\n // be received for each ref count. On the first edit, the original\n // row is moved, the edit is applied to it and its ref count is set\n // to 1. A shallow copy of the row is left at the old pos for\n // processing of the remaining edit, and the copy's ref count\n // is decremented. As each edit is received the ref count of the\n // copy is decrement, and the ref count of the row at the new\n // position is incremented. When the copy's ref count goes to 0,\n // it is removed.\n oldEntry[refCountSymbol]--;\n let adjustedPos = pos;\n if (oldEntry[refCountSymbol] === 0) {\n view.splice(oldPos, 1);\n adjustedPos = oldPos < pos ? pos - 1 : pos;\n }\n\n let entryToEdit;\n if (found) {\n entryToEdit = view[adjustedPos];\n } else {\n view.splice(adjustedPos, 0, oldEntry);\n entryToEdit = oldEntry;\n if (oldEntry[refCountSymbol] > 0) {\n const oldEntryCopy = {...oldEntry};\n view[oldPos] = oldEntryCopy;\n }\n }\n entryToEdit[refCountSymbol]++;\n applyEdit(entryToEdit, change, schema, withIDs);\n }\n } else {\n // Position could not have changed, so simply edit in place.\n const {pos, found} = binarySearch(\n view,\n change.oldNode.row,\n schema.compareRows,\n );\n assert(found, 'node does not exist');\n applyEdit(view[pos], change, schema, withIDs);\n }\n }\n\n break;\n }\n default:\n unreachable(change);\n }\n}\n\nfunction applyEdit(\n existing: MetaEntry,\n change: EditViewChange,\n schema: SourceSchema,\n withIDs: boolean,\n) {\n Object.assign(existing, change.node.row);\n if (withIDs) {\n existing[idSymbol] = makeID(change.node.row, schema);\n }\n}\n\nfunction add(\n row: Row,\n view: MetaEntryList,\n schema: SourceSchema,\n withIDs: boolean,\n): MetaEntry | undefined {\n const {pos, found} = binarySearch(view, row, schema.compareRows);\n\n if (found) {\n view[pos][refCountSymbol]++;\n return undefined;\n }\n const newEntry = makeNewMetaEntry(row, schema, withIDs, 1);\n view.splice(pos, 0, newEntry);\n return newEntry;\n}\n\nfunction removeAndUpdateRefCount(\n view: MetaEntryList,\n row: Row,\n compareRows: Comparator,\n): MetaEntry {\n const {pos, found} = binarySearch(view, row, compareRows);\n assert(found, 'node does not exist');\n const oldEntry = view[pos];\n const rc = oldEntry[refCountSymbol];\n if (rc === 1) {\n view.splice(pos, 1);\n }\n oldEntry[refCountSymbol]--;\n\n return oldEntry;\n}\n\n// TODO: Do not return an object. It puts unnecessary pressure on the GC.\nfunction binarySearch(\n view: MetaEntryList,\n target: Row,\n comparator: Comparator,\n) {\n let low = 0;\n let high = view.length - 1;\n while (low <= high) {\n const mid = (low + high) >>> 1;\n const comparison = comparator(view[mid] as Row, target as Row);\n if (comparison < 0) {\n low = mid + 1;\n } else if (comparison > 0) {\n high = mid - 1;\n } else {\n return {pos: mid, found: true};\n }\n }\n return {pos: low, found: false};\n}\n\nfunction getChildEntryList(\n parentEntry: Entry,\n relationship: string,\n): MetaEntryList {\n const view = parentEntry[relationship];\n assertArray(view);\n return view as MetaEntryList;\n}\n\nfunction assertMetaEntry(v: unknown): asserts v is MetaEntry {\n assertNumber((v as Partial<MetaEntry>)[refCountSymbol]);\n}\n\nfunction getSingularEntry(parentEntry: Entry, relationship: string): MetaEntry {\n const e = parentEntry[relationship];\n assertNumber((e as Partial<MetaEntry>)[refCountSymbol]);\n return e as MetaEntry;\n}\n\nfunction makeNewMetaEntry(\n row: Row,\n schema: SourceSchema,\n withIDs: boolean,\n rc: number,\n): MetaEntry {\n if (withIDs) {\n return {...row, [refCountSymbol]: rc, [idSymbol]: makeID(row, schema)};\n }\n return {...row, [refCountSymbol]: rc};\n}\nfunction makeID(row: Row, schema: SourceSchema) {\n // optimization for case of non-compound primary key\n if (schema.primaryKey.length === 1) {\n return JSON.stringify(row[schema.primaryKey[0]]);\n }\n return JSON.stringify(schema.primaryKey.map(k => row[k]));\n}\n", "import * as valita from '@badrap/valita';\nimport {skipAssertJSONValue} from './config.ts';\nimport type {ReadonlyJSONObject, ReadonlyJSONValue} from './json.ts';\nimport {isJSONObject, isJSONValue} from './json.ts';\nimport * as v from './valita.ts';\n\nconst path: (string | number)[] = [];\n\nexport const jsonSchema: valita.Type<ReadonlyJSONValue> = v\n .unknown()\n .chain(v => {\n if (skipAssertJSONValue) {\n return valita.ok(v as ReadonlyJSONValue);\n }\n const rv = isJSONValue(v, path)\n ? valita.ok(v)\n : valita.err({\n message: `Not a JSON value`,\n path: path.slice(),\n });\n path.length = 0;\n return rv;\n });\n\nexport const jsonObjectSchema: valita.Type<ReadonlyJSONObject> = v\n .unknown()\n .chain(v => {\n if (skipAssertJSONValue) {\n return valita.ok(v as ReadonlyJSONObject);\n }\n const rv = isJSONObject(v, path)\n ? valita.ok(v)\n : valita.err({\n message: `Not a JSON object`,\n path: path.slice(),\n });\n path.length = 0;\n return rv;\n });\n", "import * as v from './valita.ts';\n\n/**\n * Valita schema for TDigest JSON representation.\n * Matches the structure returned by TDigest.toJSON().\n */\nexport const tdigestSchema = v.tuple([v.number()]).concat(v.array(v.number()));\n\nexport type TDigestJSON = v.Infer<typeof tdigestSchema>;\n", "import {jsonSchema} from '../../shared/src/json-schema.ts';\nimport * as v from '../../shared/src/valita.ts';\n\nexport const valueSchema = v.union(jsonSchema, v.undefined());\n\nexport const rowSchema = v.readonlyRecord(valueSchema);\n\n/**\n * The data types that Zero can represent are limited by two things:\n *\n * 1. The underlying Replicache sync layer currently can only represent JSON\n * types. This could possibly be expanded in the future, but we do want to be\n * careful of adding encoding overhead. By using JSON, we are taking\n * advantage of IndexedDB\u2019s fast native JSValue [de]serialization which has\n * historically been a perf advantage for us.\n *\n * 2. IDs in Zero need to be comparable because we use them for sorting and row\n * identity. We could expand the set of allowed value types (to include,\n * i.e., Objects) but we would then need to restrict IDs to only comparable\n * types.\n *\n * These two facts leave us with the following allowed types. Zero's replication\n * layer must convert other types into these for tables to be used with Zero.\n *\n * For developer convenience we also allow `undefined`, which we treat\n * equivalently to `null`.\n */\nexport type Value = v.Infer<typeof valueSchema>;\n\n/**\n * A Row is represented as a JS Object.\n *\n * We do everything in IVM as loosely typed values because these pipelines are\n * going to be constructed at runtime by other code, so type-safety can't buy us\n * anything.\n *\n * Also since the calling code on the client ultimately wants objects to work\n * with we end up with a lot less copies by using objects throughout.\n */\nexport type Row = v.Infer<typeof rowSchema>;\n", "import * as v from '../../shared/src/valita.ts';\n\nimport {rowSchema} from './data.ts';\n\nexport const rowCountsByQuerySchema = v.record(v.number());\nexport type RowCountsByQuery = v.Infer<typeof rowCountsByQuerySchema>;\n\nexport const rowCountsBySourceSchema = v.record(rowCountsByQuerySchema);\nexport type RowCountsBySource = v.Infer<typeof rowCountsBySourceSchema>;\n\nexport const rowsByQuerySchema = v.record(v.array(rowSchema));\nexport type RowsByQuery = v.Infer<typeof rowsByQuerySchema>;\n\nexport const rowsBySourceSchema = v.record(rowsByQuerySchema);\nexport type RowsBySource = v.Infer<typeof rowsBySourceSchema>;\n\nexport const analyzeQueryResultSchema = v.object({\n warnings: v.array(v.string()),\n syncedRows: v.record(v.array(rowSchema)).optional(),\n syncedRowCount: v.number(),\n start: v.number(),\n end: v.number(),\n afterPermissions: v.string().optional(),\n vendedRowCounts: rowCountsBySourceSchema.optional(),\n vendedRows: rowsBySourceSchema.optional(),\n plans: v.record(v.array(v.string())).optional(),\n});\n\nexport type AnalyzeQueryResult = v.Infer<typeof analyzeQueryResultSchema>;\n", "/**\n * Wire-format representation of the zql AST interface.\n *\n * `v.Type<...>` types are explicitly declared to facilitate Typescript verification\n * that the schemas satisfy the zql type definitions. (Incidentally, explicit types\n * are also required for recursive schema definitions.)\n */\n\nimport {compareUTF8} from 'compare-utf8';\nimport {defined} from '../../shared/src/arrays.ts';\nimport {assert} from '../../shared/src/asserts.ts';\nimport {must} from '../../shared/src/must.ts';\nimport * as v from '../../shared/src/valita.ts';\nimport type {NameMapper} from '../../zero-schema/src/name-mapper.ts';\nimport {rowSchema, type Row} from './data.ts';\n\nexport const selectorSchema = v.string();\nexport const toStaticParam = Symbol();\n\nconst orderingElementSchema = v.readonly(\n v.tuple([selectorSchema, v.literalUnion('asc', 'desc')]),\n);\n\nexport const orderingSchema = v.readonlyArray(orderingElementSchema);\nexport type System = 'permissions' | 'client' | 'test';\n\nexport const primitiveSchema = v.union(\n v.string(),\n v.number(),\n v.boolean(),\n v.null(),\n);\n\nexport const equalityOpsSchema = v.literalUnion('=', '!=', 'IS', 'IS NOT');\n\nexport const orderOpsSchema = v.literalUnion('<', '>', '<=', '>=');\n\nexport const likeOpsSchema = v.literalUnion(\n 'LIKE',\n 'NOT LIKE',\n 'ILIKE',\n 'NOT ILIKE',\n);\n\nexport const inOpsSchema = v.literalUnion('IN', 'NOT IN');\n\nexport const simpleOperatorSchema = v.union(\n equalityOpsSchema,\n orderOpsSchema,\n likeOpsSchema,\n inOpsSchema,\n);\n\nconst literalReferenceSchema: v.Type<LiteralReference> = v.readonlyObject({\n type: v.literal('literal'),\n value: v.union(\n v.string(),\n v.number(),\n v.boolean(),\n v.null(),\n v.readonlyArray(v.union(v.string(), v.number(), v.boolean())),\n ),\n});\nconst columnReferenceSchema: v.Type<ColumnReference> = v.readonlyObject({\n type: v.literal('column'),\n name: v.string(),\n});\n\n/**\n * A parameter is a value that is not known at the time the query is written\n * and is resolved at runtime.\n *\n * Static parameters refer to something provided by the caller.\n * Static parameters are injected when the query pipeline is built from the AST\n * and do not change for the life of that pipeline.\n *\n * An example static parameter is the current authentication data.\n * When a user is authenticated, queries on the server have access\n * to the user's authentication data in order to evaluate authorization rules.\n * Authentication data doesn't change over the life of a query as a change\n * in auth data would represent a log-in / log-out of the user.\n *\n * AncestorParameters refer to rows encountered while running the query.\n * They are used by subqueries to refer to rows emitted by parent queries.\n */\nconst parameterReferenceSchema = v.readonlyObject({\n type: v.literal('static'),\n // The \"namespace\" of the injected parameter.\n // Write authorization will send the value of a row\n // prior to the mutation being run (preMutationRow).\n // Read and write authorization will both send the\n // current authentication data (authData).\n anchor: v.literalUnion('authData', 'preMutationRow'),\n field: v.union(v.string(), v.array(v.string())),\n});\n\nconst conditionValueSchema = v.union(\n literalReferenceSchema,\n columnReferenceSchema,\n parameterReferenceSchema,\n);\n\nexport type Parameter = v.Infer<typeof parameterReferenceSchema>;\n\nexport const simpleConditionSchema: v.Type<SimpleCondition> = v.readonlyObject({\n type: v.literal('simple'),\n op: simpleOperatorSchema,\n left: conditionValueSchema,\n right: v.union(parameterReferenceSchema, literalReferenceSchema),\n});\n\ntype ConditionValue = v.Infer<typeof conditionValueSchema>;\n\nexport const correlatedSubqueryConditionOperatorSchema: v.Type<CorrelatedSubqueryConditionOperator> =\n v.literalUnion('EXISTS', 'NOT EXISTS');\n\nexport const correlatedSubqueryConditionSchema: v.Type<CorrelatedSubqueryCondition> =\n v.readonlyObject({\n type: v.literal('correlatedSubquery'),\n related: v.lazy(() => correlatedSubquerySchema),\n op: correlatedSubqueryConditionOperatorSchema,\n flip: v.boolean().optional(),\n });\n\nexport const conditionSchema: v.Type<Condition> = v.union(\n simpleConditionSchema,\n v.lazy(() => conjunctionSchema),\n v.lazy(() => disjunctionSchema),\n correlatedSubqueryConditionSchema,\n);\n\nconst conjunctionSchema: v.Type<Conjunction> = v.readonlyObject({\n type: v.literal('and'),\n conditions: v.readonlyArray(conditionSchema),\n});\n\nconst disjunctionSchema: v.Type<Disjunction> = v.readonlyObject({\n type: v.literal('or'),\n conditions: v.readonlyArray(conditionSchema),\n});\n\nexport type CompoundKey = readonly [string, ...string[]];\n\nfunction mustCompoundKey(field: readonly string[]): CompoundKey {\n assert(Array.isArray(field) && field.length >= 1);\n return field as unknown as CompoundKey;\n}\n\nexport const compoundKeySchema: v.Type<CompoundKey> = v.readonly(\n v.tuple([v.string()]).concat(v.array(v.string())),\n);\n\nconst correlationSchema = v.readonlyObject({\n parentField: compoundKeySchema,\n childField: compoundKeySchema,\n});\n\n// Split out so that its inferred type can be checked against\n// Omit<CorrelatedSubquery, 'correlation'> in ast-type-test.ts.\n// The mutually-recursive reference of the 'other' field to astSchema\n// is the only thing added in v.lazy. The v.lazy is necessary due to the\n// mutually-recursive types, but v.lazy prevents inference of the resulting\n// type.\nexport const correlatedSubquerySchemaOmitSubquery = v.readonlyObject({\n correlation: correlationSchema,\n hidden: v.boolean().optional(),\n system: v.literalUnion('permissions', 'client', 'test').optional(),\n});\n\nexport const correlatedSubquerySchema: v.Type<CorrelatedSubquery> =\n correlatedSubquerySchemaOmitSubquery.extend({\n subquery: v.lazy(() => astSchema),\n });\n\nexport const astSchema: v.Type<AST> = v.readonlyObject({\n schema: v.string().optional(),\n table: v.string(),\n alias: v.string().optional(),\n where: conditionSchema.optional(),\n related: v.readonlyArray(correlatedSubquerySchema).optional(),\n limit: v.number().optional(),\n orderBy: orderingSchema.optional(),\n start: v\n .object({\n row: rowSchema,\n exclusive: v.boolean(),\n })\n .optional(),\n});\n\nexport type Bound = {\n row: Row;\n exclusive: boolean;\n};\n\n/**\n * As in SQL you can have multiple orderings. We don't currently\n * support ordering on anything other than the root query.\n */\nexport type OrderPart = readonly [field: string, direction: 'asc' | 'desc'];\nexport type Ordering = readonly OrderPart[];\n\nexport type SimpleOperator = EqualityOps | OrderOps | LikeOps | InOps;\nexport type EqualityOps = '=' | '!=' | 'IS' | 'IS NOT';\nexport type OrderOps = '<' | '>' | '<=' | '>=';\nexport type LikeOps = 'LIKE' | 'NOT LIKE' | 'ILIKE' | 'NOT ILIKE';\nexport type InOps = 'IN' | 'NOT IN';\n\nexport type AST = {\n readonly schema?: string | undefined;\n readonly table: string;\n\n // A query would be aliased if the AST is a subquery.\n // e.g., when two subqueries select from the same table\n // they need an alias to differentiate them.\n // `SELECT\n // [SELECT * FROM issue WHERE issue.id = outer.parentId] AS parent\n // [SELECT * FROM issue WHERE issue.parentId = outer.id] AS children\n // FROM issue as outer`\n readonly alias?: string | undefined;\n\n // `select` is missing given we return all columns for now.\n\n // The PipelineBuilder will pick what to use to correlate\n // a subquery with a parent query. It can choose something from the\n // where conditions or choose the _first_ `related` entry.\n // Choosing the first `related` entry is almost always the best choice if\n // one exists.\n readonly where?: Condition | undefined;\n\n readonly related?: readonly CorrelatedSubquery[] | undefined;\n readonly start?: Bound | undefined;\n readonly limit?: number | undefined;\n readonly orderBy?: Ordering | undefined;\n};\n\nexport type Correlation = {\n readonly parentField: CompoundKey;\n readonly childField: CompoundKey;\n};\n\nexport type CorrelatedSubquery = {\n /**\n * Only equality correlation are supported for now.\n * E.g., direct foreign key relationships.\n */\n readonly correlation: Correlation;\n readonly subquery: AST;\n readonly system?: System | undefined;\n // If a hop in the subquery chain should be hidden from the output view.\n // A common example is junction edges. The query API provides the illusion\n // that they don't exist: `issue.related('labels')` instead of `issue.related('issue_labels').related('labels')`.\n // To maintain this illusion, the junction edge should be hidden.\n // When `hidden` is set to true, this hop will not be included in the output view\n // but its children will be.\n readonly hidden?: boolean | undefined;\n};\n\nexport type ValuePosition = LiteralReference | Parameter | ColumnReference;\n\nexport type ColumnReference = {\n readonly type: 'column';\n /**\n * Not a path yet as we're currently not allowing\n * comparisons across tables. This will need to\n * be a path through the tree in the near future.\n */\n readonly name: string;\n};\n\nexport type LiteralReference = {\n readonly type: 'literal';\n readonly value: LiteralValue;\n};\n\nexport type LiteralValue =\n | string\n | number\n | boolean\n | null\n | ReadonlyArray<string | number | boolean>;\n\n/**\n * Starting only with SimpleCondition for now.\n * ivm1 supports Conjunctions and Disjunctions.\n * We'll support them in the future.\n */\nexport type Condition =\n | SimpleCondition\n | Conjunction\n | Disjunction\n | CorrelatedSubqueryCondition;\n\nexport type SimpleCondition = {\n readonly type: 'simple';\n readonly op: SimpleOperator;\n readonly left: ValuePosition;\n\n /**\n * `null` is absent since we do not have an `IS` or `IS NOT`\n * operator defined and `null != null` in SQL.\n */\n readonly right: Exclude<ValuePosition, ColumnReference>;\n};\n\nexport type Conjunction = {\n type: 'and';\n conditions: readonly Condition[];\n};\n\nexport type Disjunction = {\n type: 'or';\n conditions: readonly Condition[];\n};\n\nexport type CorrelatedSubqueryCondition = {\n type: 'correlatedSubquery';\n related: CorrelatedSubquery;\n op: CorrelatedSubqueryConditionOperator;\n flip?: boolean | undefined;\n};\n\nexport type CorrelatedSubqueryConditionOperator = 'EXISTS' | 'NOT EXISTS';\n\ninterface ASTTransform {\n tableName(orig: string): string;\n columnName(origTable: string, origColumn: string): string;\n related(subqueries: CorrelatedSubquery[]): readonly CorrelatedSubquery[];\n where(cond: Condition): Condition | undefined;\n // conjunction or disjunction, called when traversing the return value of where()\n conditions(conds: Condition[]): readonly Condition[];\n}\n\nfunction transformAST(ast: AST, transform: ASTTransform): Required<AST> {\n // Name mapping functions (e.g. to server names)\n const {tableName, columnName} = transform;\n const colName = (c: string) => columnName(ast.table, c);\n const key = (table: string, k: CompoundKey) => {\n const serverKey = k.map(col => columnName(table, col));\n return mustCompoundKey(serverKey);\n };\n\n const where = ast.where ? transform.where(ast.where) : undefined;\n const transformed = {\n schema: ast.schema,\n table: tableName(ast.table),\n alias: ast.alias,\n where: where ? transformWhere(where, ast.table, transform) : undefined,\n related: ast.related\n ? transform.related(\n ast.related.map(\n r =>\n ({\n correlation: {\n parentField: key(ast.table, r.correlation.parentField),\n childField: key(r.subquery.table, r.correlation.childField),\n },\n hidden: r.hidden,\n subquery: transformAST(r.subquery, transform),\n system: r.system,\n }) satisfies Required<CorrelatedSubquery>,\n ),\n )\n : undefined,\n start: ast.start\n ? {\n ...ast.start,\n row: Object.fromEntries(\n Object.entries(ast.start.row).map(([col, val]) => [\n colName(col),\n val,\n ]),\n ),\n }\n : undefined,\n limit: ast.limit,\n orderBy: ast.orderBy?.map(([col, dir]) => [colName(col), dir] as const),\n };\n\n return transformed;\n}\n\nfunction transformWhere(\n where: Condition,\n table: string,\n transform: ASTTransform,\n): Condition {\n // Name mapping functions (e.g. to server names)\n const {columnName} = transform;\n const condValue = (c: ConditionValue) =>\n c.type !== 'column' ? c : {...c, name: columnName(table, c.name)};\n const key = (table: string, k: CompoundKey) => {\n const serverKey = k.map(col => columnName(table, col));\n return mustCompoundKey(serverKey);\n };\n\n if (where.type === 'simple') {\n return {...where, left: condValue(where.left)};\n } else if (where.type === 'correlatedSubquery') {\n const {correlation, subquery} = where.related;\n return {\n ...where,\n related: {\n ...where.related,\n correlation: {\n parentField: key(table, correlation.parentField),\n childField: key(subquery.table, correlation.childField),\n },\n subquery: transformAST(subquery, transform),\n },\n };\n }\n\n return {\n type: where.type,\n conditions: transform.conditions(\n where.conditions.map(c => transformWhere(c, table, transform)),\n ),\n };\n}\n\nconst normalizeCache = new WeakMap<AST, Required<AST>>();\n\nconst NORMALIZE_TRANSFORM: ASTTransform = {\n tableName: t => t,\n columnName: (_, c) => c,\n related: sortedRelated,\n where: flattened,\n conditions: c => c.sort(cmpCondition),\n};\n\nexport function normalizeAST(ast: AST): Required<AST> {\n let normalized = normalizeCache.get(ast);\n if (!normalized) {\n normalized = transformAST(ast, NORMALIZE_TRANSFORM);\n normalizeCache.set(ast, normalized);\n }\n return normalized;\n}\n\nexport function mapAST(ast: AST, mapper: NameMapper) {\n return transformAST(ast, {\n tableName: table => mapper.tableName(table),\n columnName: (table, col) => mapper.columnName(table, col),\n related: r => r,\n where: w => w,\n conditions: c => c,\n });\n}\n\nexport function mapCondition(\n cond: Condition,\n table: string,\n mapper: NameMapper,\n) {\n return transformWhere(cond, table, {\n tableName: table => mapper.tableName(table),\n columnName: (table, col) => mapper.columnName(table, col),\n related: r => r,\n where: w => w,\n conditions: c => c,\n });\n}\n\nfunction sortedRelated(\n related: CorrelatedSubquery[],\n): readonly CorrelatedSubquery[] {\n return related.sort(cmpRelated);\n}\n\nfunction cmpCondition(a: Condition, b: Condition): number {\n if (a.type === 'simple') {\n if (b.type !== 'simple') {\n return -1; // Order SimpleConditions first\n }\n\n return (\n compareValuePosition(a.left, b.left) ||\n compareUTF8MaybeNull(a.op, b.op) ||\n compareValuePosition(a.right, b.right)\n );\n }\n\n if (b.type === 'simple') {\n return 1; // Order SimpleConditions first\n }\n\n if (a.type === 'correlatedSubquery') {\n if (b.type !== 'correlatedSubquery') {\n return -1; // Order subquery before conjuctions/disjuctions\n }\n return cmpRelated(a.related, b.related) || compareUTF8MaybeNull(a.op, b.op);\n }\n if (b.type === 'correlatedSubquery') {\n return -1; // Order correlatedSubquery before conjuctions/disjuctions\n }\n\n const val = compareUTF8MaybeNull(a.type, b.type);\n if (val !== 0) {\n return val;\n }\n for (\n let l = 0, r = 0;\n l < a.conditions.length && r < b.conditions.length;\n l++, r++\n ) {\n const val = cmpCondition(a.conditions[l], b.conditions[r]);\n if (val !== 0) {\n return val;\n }\n }\n // prefixes first\n return a.conditions.length - b.conditions.length;\n}\n\nfunction compareValuePosition(a: ValuePosition, b: ValuePosition): number {\n if (a.type !== b.type) {\n return compareUTF8(a.type, b.type);\n }\n switch (a.type) {\n case 'literal':\n assert(b.type === 'literal');\n return compareUTF8(String(a.value), String(b.value));\n case 'column':\n assert(b.type === 'column');\n return compareUTF8(a.name, b.name);\n case 'static':\n throw new Error(\n 'Static parameters should be resolved before normalization',\n );\n }\n}\n\nfunction cmpRelated(a: CorrelatedSubquery, b: CorrelatedSubquery): number {\n return compareUTF8(must(a.subquery.alias), must(b.subquery.alias));\n}\n\n/**\n * Returns a flattened version of the Conditions in which nested Conjunctions with\n * the same operation ('AND' or 'OR') are flattened to the same level. e.g.\n *\n * ```\n * ((a AND b) AND (c AND (d OR (e OR f)))) -> (a AND b AND c AND (d OR e OR f))\n * ```\n *\n * Also flattens singleton Conjunctions regardless of operator, and removes\n * empty Conjunctions.\n */\nfunction flattened(cond: Condition): Condition | undefined {\n if (cond.type === 'simple' || cond.type === 'correlatedSubquery') {\n return cond;\n }\n const conditions = defined(\n cond.conditions.flatMap(c =>\n c.type === cond.type ? c.conditions.map(c => flattened(c)) : flattened(c),\n ),\n );\n\n switch (conditions.length) {\n case 0:\n return undefined;\n case 1:\n return conditions[0];\n default:\n return {\n type: cond.type,\n conditions,\n };\n }\n}\n\nfunction compareUTF8MaybeNull(a: string | null, b: string | null): number {\n if (a !== null && b !== null) {\n return compareUTF8(a, b);\n }\n if (b !== null) {\n return -1;\n }\n if (a !== null) {\n return 1;\n }\n return 0;\n}\n", "import {assert} from './asserts.ts';\n\n/**\n * Returns `arr` as is if none of the elements are `undefined`.\n * Otherwise returns a new array with only defined elements in `arr`.\n */\nexport function defined<T>(arr: (T | undefined)[]): T[] {\n // avoid an array copy if possible\n let i = arr.findIndex(x => x === undefined);\n if (i < 0) {\n return arr as T[];\n }\n const defined: T[] = arr.slice(0, i) as T[];\n for (i++; i < arr.length; i++) {\n const x = arr[i];\n if (x !== undefined) {\n defined.push(x);\n }\n }\n return defined;\n}\n\nexport function areEqual<T>(arr1: readonly T[], arr2: readonly T[]): boolean {\n return arr1.length === arr2.length && arr1.every((e, i) => e === arr2[i]);\n}\n\nexport function zip<T1, T2>(a1: readonly T1[], a2: readonly T2[]): [T1, T2][] {\n assert(a1.length === a2.length);\n const result: [T1, T2][] = [];\n for (let i = 0; i < a1.length; i++) {\n result.push([a1[i], a2[i]]);\n }\n return result;\n}\n\nexport function last<T>(arr: T[]): T | undefined {\n if (arr.length === 0) {\n return undefined;\n }\n return arr[arr.length - 1];\n}\n\nexport function groupBy<T, K>(\n arr: readonly T[],\n keyFn: (el: T) => K,\n): Map<K, T[]> {\n const groups = new Map<K, T[]>();\n for (const el of arr) {\n const key = keyFn(el);\n let group = groups.get(key);\n if (group === undefined) {\n group = [];\n groups.set(key, group);\n }\n group.push(el);\n }\n return groups;\n}\n", "import {jsonSchema} from '../../shared/src/json-schema.ts';\nimport {tdigestSchema} from '../../shared/src/tdigest-schema.ts';\nimport * as v from '../../shared/src/valita.ts';\nimport {analyzeQueryResultSchema} from './analyze-query-result.ts';\nimport {astSchema} from './ast.ts';\n\nconst serverMetricsSchema = v.object({\n 'query-materialization-server': tdigestSchema,\n 'query-update-server': tdigestSchema,\n});\n\nexport type ServerMetrics = v.Infer<typeof serverMetricsSchema>;\n\nconst inspectQueryRowSchema = v.object({\n clientID: v.string(),\n queryID: v.string(),\n // This is the server return AST for custom queries\n // TODO: Return server generated AST\n ast: astSchema.nullable(),\n // not null for custom queries\n name: v.string().nullable(),\n // not null for custom queries\n args: v.readonlyArray(jsonSchema).nullable(),\n got: v.boolean(),\n deleted: v.boolean(),\n ttl: v.number(),\n inactivatedAt: v.number().nullable(),\n rowCount: v.number(),\n metrics: serverMetricsSchema.nullable().optional(),\n});\n\nexport type InspectQueryRow = v.Infer<typeof inspectQueryRowSchema>;\n\nconst inspectBaseDownSchema = v.object({\n id: v.string(),\n});\n\nexport const inspectQueriesDownSchema = inspectBaseDownSchema.extend({\n op: v.literal('queries'),\n value: v.array(inspectQueryRowSchema),\n});\n\nexport type InspectQueriesDown = v.Infer<typeof inspectQueriesDownSchema>;\n\nexport const inspectMetricsDownSchema = inspectBaseDownSchema.extend({\n op: v.literal('metrics'),\n value: serverMetricsSchema,\n});\n\nexport type InspectMetricsDown = v.Infer<typeof inspectMetricsDownSchema>;\n\nexport const inspectVersionDownSchema = inspectBaseDownSchema.extend({\n op: v.literal('version'),\n value: v.string(),\n});\n\nexport const inspectAuthenticatedDownSchema = inspectBaseDownSchema.extend({\n op: v.literal('authenticated'),\n value: v.boolean(),\n});\n\nexport type InspectAuthenticatedDown = v.Infer<\n typeof inspectAuthenticatedDownSchema\n>;\n\nexport const inspectAnalyzeQueryDownSchema = inspectBaseDownSchema.extend({\n op: v.literal('analyze-query'),\n value: analyzeQueryResultSchema,\n});\n\nexport type InspectAnalyzeQueryDown = v.Infer<\n typeof inspectAnalyzeQueryDownSchema\n>;\n\nexport const inspectDownBodySchema = v.union(\n inspectQueriesDownSchema,\n inspectMetricsDownSchema,\n inspectVersionDownSchema,\n inspectAuthenticatedDownSchema,\n inspectAnalyzeQueryDownSchema,\n);\n\nexport const inspectDownMessageSchema = v.tuple([\n v.literal('inspect'),\n inspectDownBodySchema,\n]);\n\nexport type InspectDownMessage = v.Infer<typeof inspectDownMessageSchema>;\n\nexport type InspectDownBody = v.Infer<typeof inspectDownBodySchema>;\n", "export function getNonCryptoRandomValues(array: Uint8Array) {\n if (array === null) {\n throw new TypeError('array cannot be null');\n }\n\n // Fill the array with random values\n for (let i = 0; i < array.length; i++) {\n array[i] = Math.floor(Math.random() * 256); // Random byte (0-255)\n }\n\n return array;\n}\n\nexport function randomCharacters(length: number) {\n let result = '';\n const characters =\n 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789';\n const charactersLength = characters.length;\n let counter = 0;\n while (counter < length) {\n result += characters.charAt(Math.floor(Math.random() * charactersLength));\n counter += 1;\n }\n return result;\n}\n", "// This is taken from https://github.com/ai/nanoid/blob/main/index.browser.js We\n// copy this because we want to use `--platform=neutral` which doesn't work with\n// the npm package.\n// Also we changed the random number generator to use Math.random() for compat\n// with React Native.\n\nimport {getNonCryptoRandomValues} from '../../../shared/src/random-values.ts';\n\nexport function nanoid(size = 21): string {\n // Use our custom getRandomValues function to fill a Uint8Array with random values.\n const randomBytes = getNonCryptoRandomValues(new Uint8Array(size));\n\n return randomBytes.reduce((id, byte) => {\n // It is incorrect to use bytes exceeding the alphabet size.\n // The following mask reduces the random byte in the 0-255 value\n // range to the 0-63 value range. Therefore, adding hacks, such\n // as empty string fallback or magic numbers, is unneccessary because\n // the bitmask trims bytes down to the alphabet size.\n byte &= 63;\n if (byte < 36) {\n // `0-9a-z`\n id += byte.toString(36);\n } else if (byte < 62) {\n // `A-Z`\n id += (byte - 26).toString(36).toUpperCase();\n } else if (byte > 62) {\n id += '-';\n } else {\n id += '_';\n }\n return id;\n }, '');\n}\n", "import {xxHash32} from 'js-xxhash';\n\nexport const h32 = (s: string) => xxHash32(s, 0);\nexport const h64 = (s: string) => hash(s, 2);\nexport const h128 = (s: string) => hash(s, 4);\n\n/**\n * xxHash32 only computes 32-bit values. Run it n times with different seeds to\n * get a larger hash with better collision resistance.\n */\nfunction hash(str: string, words: number): bigint {\n let hash = 0n;\n for (let i = 0; i < words; i++) {\n hash = (hash << 32n) + BigInt(xxHash32(str, i));\n }\n return hash;\n}\n", "import * as v from '../../shared/src/valita.ts';\n\nexport const primaryKeySchema = v.readonly(\n v.tuple([v.string()]).concat(v.array(v.string())),\n);\n\nexport type PrimaryKey = v.Infer<typeof primaryKeySchema>;\n\nexport const primaryKeyValueSchema = v.union(\n v.string(),\n v.number(),\n v.boolean(),\n);\n\nexport type PrimaryKeyValue = v.Infer<typeof primaryKeyValueSchema>;\n\nexport const primaryKeyValueRecordSchema = v.readonlyRecord(\n primaryKeyValueSchema,\n);\n\nexport type PrimaryKeyValueRecord = v.Infer<typeof primaryKeyValueRecordSchema>;\n", "import {h128} from '../../../shared/src/hash.ts';\nimport * as v from '../../../shared/src/valita.ts';\nimport type {CompoundKey} from '../../../zero-protocol/src/ast.ts';\nimport type {Row} from '../../../zero-protocol/src/data.ts';\nimport {primaryKeyValueSchema} from '../../../zero-protocol/src/primary-key.ts';\nimport type {MutationID} from '../../../zero-protocol/src/push.ts';\n\nexport const DESIRED_QUERIES_KEY_PREFIX = 'd/';\nexport const GOT_QUERIES_KEY_PREFIX = 'g/';\nexport const ENTITIES_KEY_PREFIX = 'e/';\nexport const MUTATIONS_KEY_PREFIX = 'm/';\n\nexport function toDesiredQueriesKey(clientID: string, hash: string): string {\n return DESIRED_QUERIES_KEY_PREFIX + clientID + '/' + hash;\n}\n\nexport function desiredQueriesPrefixForClient(clientID: string): string {\n return DESIRED_QUERIES_KEY_PREFIX + clientID + '/';\n}\n\nexport function toGotQueriesKey(hash: string): string {\n return GOT_QUERIES_KEY_PREFIX + hash;\n}\n\nexport function toMutationResponseKey(mid: MutationID): string {\n return MUTATIONS_KEY_PREFIX + mid.clientID + '/' + mid.id;\n}\n\nexport function toPrimaryKeyString(\n tableName: string,\n primaryKey: CompoundKey,\n value: Row,\n): string {\n if (primaryKey.length === 1) {\n return (\n ENTITIES_KEY_PREFIX +\n tableName +\n '/' +\n v.parse(value[primaryKey[0]], primaryKeyValueSchema)\n );\n }\n\n const values = primaryKey.map(k => v.parse(value[k], primaryKeyValueSchema));\n const str = JSON.stringify(values);\n\n const idSegment = h128(str);\n return ENTITIES_KEY_PREFIX + tableName + '/' + idSegment;\n}\n\nexport function sourceNameFromKey(key: string): string {\n const slash = key.indexOf('/', ENTITIES_KEY_PREFIX.length);\n return key.slice(ENTITIES_KEY_PREFIX.length, slash);\n}\n", "import type {Client} from './client.ts';\nimport type {ExtendedInspectorDelegate} from './lazy-inspector.ts';\nimport type {Query} from './query.ts';\n\nexport class ClientGroup {\n readonly #delegate: ExtendedInspectorDelegate;\n readonly id: Promise<string> | string;\n\n constructor(\n delegate: ExtendedInspectorDelegate,\n clientGroupID: Promise<string> | string,\n ) {\n this.#delegate = delegate;\n this.id = clientGroupID;\n }\n\n async clients(): Promise<Client[]> {\n return (await this.#delegate.lazy).clientGroupClients(\n this.#delegate,\n this.id,\n );\n }\n\n async clientsWithQueries(): Promise<Client[]> {\n return (await this.#delegate.lazy).clientGroupClientsWithQueries(\n this.#delegate,\n this.id,\n );\n }\n\n async queries(): Promise<Query[]> {\n return (await this.#delegate.lazy).clientGroupQueries(this.#delegate);\n }\n}\n", "import type {ReadonlyJSONValue} from '../../../../shared/src/json.ts';\nimport type {Row} from '../../../../zero-protocol/src/data.ts';\nimport {ClientGroup} from './client-group.ts';\nimport type {ExtendedInspectorDelegate} from './lazy-inspector.ts';\nimport type {Query} from './query.ts';\n\nexport class Client {\n readonly #delegate: ExtendedInspectorDelegate;\n readonly id: string;\n readonly clientGroup: ClientGroup;\n\n constructor(\n delegate: ExtendedInspectorDelegate,\n clientID: string,\n clientGroupID: Promise<string> | string,\n ) {\n this.#delegate = delegate;\n this.id = clientID;\n\n this.clientGroup = new ClientGroup(this.#delegate, clientGroupID);\n }\n\n async queries(): Promise<Query[]> {\n return (await this.#delegate.lazy).clientQueries(this.#delegate, this.id);\n }\n\n async map(): Promise<Map<string, ReadonlyJSONValue>> {\n return (await this.#delegate.lazy).clientMap(this.#delegate, this.id);\n }\n\n async rows(tableName: string): Promise<Row[]> {\n return (await this.#delegate.lazy).clientRows(\n this.#delegate,\n this.id,\n tableName,\n );\n }\n}\n", "// Apache License 2.0\n// https://github.com/influxdata/tdigest\n\n// Centroid average position of all points in a shape\nexport class Centroid {\n mean: number;\n weight: number;\n\n constructor(mean: number, weight: number) {\n this.mean = mean;\n this.weight = weight;\n }\n\n add(r: Centroid): void {\n if (r.weight < 0) {\n throw new Error('centroid weight cannot be less than zero');\n }\n if (this.weight !== 0) {\n this.weight += r.weight;\n this.mean += (r.weight * (r.mean - this.mean)) / this.weight;\n } else {\n this.weight = r.weight;\n this.mean = r.mean;\n }\n }\n}\n\n/** CentroidList is sorted by the mean of the centroid, ascending. */\nexport type CentroidList = Centroid[];\n\nexport function sortCentroidList(centroids: CentroidList): void {\n centroids.sort((a, b) => a.mean - b.mean);\n}\n", "// Apache License 2.0\n// https://github.com/influxdata/tdigest\n\nimport {binarySearch} from './binary-search.ts';\nimport {Centroid, sortCentroidList, type CentroidList} from './centroid.ts';\nimport type {TDigestJSON} from './tdigest-schema.ts';\n\nexport interface ReadonlyTDigest {\n readonly count: () => number;\n readonly quantile: (q: number) => number;\n readonly cdf: (x: number) => number;\n}\n\n// TDigest is a data structure for accurate on-line accumulation of\n// rank-based statistics such as quantiles and trimmed means.\nexport class TDigest {\n readonly compression: number;\n\n #maxProcessed: number;\n #maxUnprocessed: number;\n #processed!: CentroidList;\n #unprocessed!: CentroidList;\n #cumulative!: number[];\n #processedWeight!: number;\n #unprocessedWeight!: number;\n #min!: number;\n #max!: number;\n\n constructor(compression: number = 1000) {\n this.compression = compression;\n this.#maxProcessed = processedSize(0, this.compression);\n this.#maxUnprocessed = unprocessedSize(0, this.compression);\n this.reset();\n }\n\n /**\n * fromJSON creates a TDigest from a JSON-serializable representation.\n * The data should be an object with compression and centroids array.\n */\n static fromJSON(data: Readonly<TDigestJSON>): TDigest {\n const digest = new TDigest(data[0]);\n if (data.length % 2 !== 1) {\n throw new Error('Invalid centroids array');\n }\n for (let i = 1; i < data.length; i += 2) {\n digest.add(data[i], data[i + 1]);\n }\n return digest;\n }\n\n reset(): void {\n this.#processed = [];\n this.#unprocessed = [];\n this.#cumulative = [];\n this.#processedWeight = 0;\n this.#unprocessedWeight = 0;\n this.#min = Number.MAX_VALUE;\n this.#max = -Number.MAX_VALUE;\n }\n\n add(mean: number, weight: number = 1) {\n this.addCentroid(new Centroid(mean, weight));\n }\n\n /** AddCentroidList can quickly add multiple centroids. */\n addCentroidList(centroidList: CentroidList) {\n for (const c of centroidList) {\n this.addCentroid(c);\n }\n }\n\n /**\n * AddCentroid adds a single centroid.\n * Weights which are not a number or are <= 0 are ignored, as are NaN means.\n */\n addCentroid(c: Centroid): void {\n if (\n Number.isNaN(c.mean) ||\n c.weight <= 0 ||\n Number.isNaN(c.weight) ||\n !Number.isFinite(c.weight)\n ) {\n return;\n }\n\n this.#unprocessed.push(new Centroid(c.mean, c.weight));\n this.#unprocessedWeight += c.weight;\n\n if (\n this.#processed.length > this.#maxProcessed ||\n this.#unprocessed.length > this.#maxUnprocessed\n ) {\n this.#process();\n }\n }\n\n /**\n * Merges the supplied digest into this digest. Functionally equivalent to\n * calling t.AddCentroidList(t2.Centroids(nil)), but avoids making an extra\n * copy of the CentroidList.\n **/\n merge(t2: TDigest) {\n t2.#process();\n this.addCentroidList(t2.#processed);\n }\n\n #process() {\n if (\n this.#unprocessed.length > 0 ||\n this.#processed.length > this.#maxProcessed\n ) {\n // Append all processed centroids to the unprocessed list and sort\n this.#unprocessed.push(...this.#processed);\n sortCentroidList(this.#unprocessed);\n\n // Reset processed list with first centroid\n this.#processed.length = 0;\n this.#processed.push(this.#unprocessed[0]);\n\n this.#processedWeight += this.#unprocessedWeight;\n this.#unprocessedWeight = 0;\n let soFar = this.#unprocessed[0].weight;\n let limit = this.#processedWeight * this.#integratedQ(1);\n for (let i = 1; i < this.#unprocessed.length; i++) {\n const centroid = this.#unprocessed[i];\n const projected = soFar + centroid.weight;\n if (projected <= limit) {\n soFar = projected;\n this.#processed[this.#processed.length - 1].add(centroid);\n } else {\n const k1 = this.#integratedLocation(soFar / this.#processedWeight);\n limit = this.#processedWeight * this.#integratedQ(k1 + 1);\n soFar += centroid.weight;\n this.#processed.push(centroid);\n }\n }\n this.#min = Math.min(this.#min, this.#processed[0].mean);\n this.#max = Math.max(\n this.#max,\n this.#processed[this.#processed.length - 1].mean,\n );\n this.#unprocessed.length = 0;\n }\n }\n\n /**\n * Centroids returns a copy of processed centroids.\n * Useful when aggregating multiple t-digests.\n *\n * Centroids are appended to the passed CentroidList; if you're re-using a\n * buffer, be sure to pass cl[:0].\n */\n centroids(cl: CentroidList = []): CentroidList {\n this.#process();\n return cl.concat(this.#processed);\n }\n\n count(): number {\n this.#process();\n\n // this.process always updates this.processedWeight to the total count of all\n // centroids, so we don't need to re-count here.\n return this.#processedWeight;\n }\n\n /**\n * toJSON returns a JSON-serializable representation of the digest.\n * This processes the digest and returns an object with compression and centroid data.\n */\n toJSON(): TDigestJSON {\n this.#process();\n const data: TDigestJSON = [this.compression];\n for (const centroid of this.#processed) {\n data.push(centroid.mean, centroid.weight);\n }\n return data;\n }\n\n #updateCumulative() {\n // Weight can only increase, so the final cumulative value will always be\n // either equal to, or less than, the total weight. If they are the same,\n // then nothing has changed since the last update.\n if (\n this.#cumulative.length > 0 &&\n this.#cumulative[this.#cumulative.length - 1] === this.#processedWeight\n ) {\n return;\n }\n const n = this.#processed.length + 1;\n if (this.#cumulative.length > n) {\n this.#cumulative.length = n;\n }\n\n let prev = 0;\n for (let i = 0; i < this.#processed.length; i++) {\n const centroid = this.#processed[i];\n const cur = centroid.weight;\n this.#cumulative[i] = prev + cur / 2;\n prev += cur;\n }\n this.#cumulative[this.#processed.length] = prev;\n }\n\n // Quantile returns the (approximate) quantile of\n // the distribution. Accepted values for q are between 0 and 1.\n // Returns NaN if Count is zero or bad inputs.\n quantile(q: number): number {\n this.#process();\n this.#updateCumulative();\n if (q < 0 || q > 1 || this.#processed.length === 0) {\n return NaN;\n }\n if (this.#processed.length === 1) {\n return this.#processed[0].mean;\n }\n const index = q * this.#processedWeight;\n if (index <= this.#processed[0].weight / 2) {\n return (\n this.#min +\n ((2 * index) / this.#processed[0].weight) *\n (this.#processed[0].mean - this.#min)\n );\n }\n\n const lower = binarySearch(\n this.#cumulative.length,\n (i: number) => -this.#cumulative[i] + index,\n );\n\n if (lower + 1 !== this.#cumulative.length) {\n const z1 = index - this.#cumulative[lower - 1];\n const z2 = this.#cumulative[lower] - index;\n return weightedAverage(\n this.#processed[lower - 1].mean,\n z2,\n this.#processed[lower].mean,\n z1,\n );\n }\n\n const z1 =\n index - this.#processedWeight - this.#processed[lower - 1].weight / 2;\n const z2 = this.#processed[lower - 1].weight / 2 - z1;\n return weightedAverage(\n this.#processed[this.#processed.length - 1].mean,\n z1,\n this.#max,\n z2,\n );\n }\n\n /**\n * CDF returns the cumulative distribution function for a given value x.\n */\n cdf(x: number): number {\n this.#process();\n this.#updateCumulative();\n switch (this.#processed.length) {\n case 0:\n return 0;\n case 1: {\n const width = this.#max - this.#min;\n if (x <= this.#min) {\n return 0;\n }\n if (x >= this.#max) {\n return 1;\n }\n if (x - this.#min <= width) {\n // min and max are too close together to do any viable interpolation\n return 0.5;\n }\n return (x - this.#min) / width;\n }\n }\n\n if (x <= this.#min) {\n return 0;\n }\n if (x >= this.#max) {\n return 1;\n }\n const m0 = this.#processed[0].mean;\n // Left Tail\n if (x <= m0) {\n if (m0 - this.#min > 0) {\n return (\n (((x - this.#min) / (m0 - this.#min)) * this.#processed[0].weight) /\n this.#processedWeight /\n 2\n );\n }\n return 0;\n }\n // Right Tail\n const mn = this.#processed[this.#processed.length - 1].mean;\n if (x >= mn) {\n if (this.#max - mn > 0) {\n return (\n 1 -\n (((this.#max - x) / (this.#max - mn)) *\n this.#processed[this.#processed.length - 1].weight) /\n this.#processedWeight /\n 2\n );\n }\n return 1;\n }\n\n const upper = binarySearch(\n this.#processed.length,\n // Treat equals as greater than, so we can use the upper index\n // This is equivalent to:\n // i => this.#processed[i].mean > x ? -1 : 1,\n i => x - this.#processed[i].mean || 1,\n );\n\n const z1 = x - this.#processed[upper - 1].mean;\n const z2 = this.#processed[upper].mean - x;\n return (\n weightedAverage(\n this.#cumulative[upper - 1],\n z2,\n this.#cumulative[upper],\n z1,\n ) / this.#processedWeight\n );\n }\n\n #integratedQ(k: number): number {\n return (\n (Math.sin(\n (Math.min(k, this.compression) * Math.PI) / this.compression -\n Math.PI / 2,\n ) +\n 1) /\n 2\n );\n }\n\n #integratedLocation(q: number): number {\n return (this.compression * (Math.asin(2 * q - 1) + Math.PI / 2)) / Math.PI;\n }\n}\n\n// Calculate number of bytes needed for a tdigest of size c,\n// where c is the compression value\nexport function byteSizeForCompression(comp: number): number {\n const c = comp | 0;\n // // A centroid is 2 float64s, so we need 16 bytes for each centroid\n // float_size := 8\n // centroid_size := 2 * float_size\n\n // // Unprocessed and processed can grow up to length c\n // unprocessed_size := centroid_size * c\n // processed_size := unprocessed_size\n\n // // the cumulative field can also be of length c, but each item is a single float64\n // cumulative_size := float_size * c // <- this could also be unprocessed_size / 2\n\n // return unprocessed_size + processed_size + cumulative_size\n\n // // or, more succinctly:\n // return float_size * c * 5\n\n // or even more succinctly\n return c * 40;\n}\n\nfunction weightedAverage(\n x1: number,\n w1: number,\n x2: number,\n w2: number,\n): number {\n if (x1 <= x2) {\n return weightedAverageSorted(x1, w1, x2, w2);\n }\n return weightedAverageSorted(x2, w2, x1, w1);\n}\n\nfunction weightedAverageSorted(\n x1: number,\n w1: number,\n x2: number,\n w2: number,\n): number {\n const x = (x1 * w1 + x2 * w2) / (w1 + w2);\n return Math.max(x1, Math.min(x, x2));\n}\n\nfunction processedSize(size: number, compression: number): number {\n if (size === 0) {\n return Math.ceil(compression) * 2;\n }\n return size;\n}\n\nfunction unprocessedSize(size: number, compression: number): number {\n if (size === 0) {\n return Math.ceil(compression) * 8;\n }\n return size;\n}\n", "import type {ClientGroup} from './client-group.ts';\nimport {Client} from './client.ts';\nimport type {\n ExtendedInspectorDelegate,\n InspectorDelegate,\n Metrics,\n Rep,\n} from './lazy-inspector.ts';\n\nexport type {InspectorDelegate};\n\nexport type Lazy = typeof import('./lazy-inspector.ts');\n\nexport class Inspector {\n readonly #delegate: ExtendedInspectorDelegate;\n readonly client: Client;\n readonly clientGroup: ClientGroup;\n\n constructor(\n rep: Rep,\n delegate: InspectorDelegate,\n getSocket: () => Promise<WebSocket>,\n ) {\n this.#delegate = {\n getQueryMetrics: delegate.getQueryMetrics.bind(delegate),\n getAST: delegate.getAST.bind(delegate),\n get metrics() {\n return delegate.metrics;\n },\n rep,\n getSocket,\n lazy: import('./lazy-inspector.ts'),\n };\n\n this.client = new Client(this.#delegate, rep.clientID, rep.clientGroupID);\n this.clientGroup = this.client.clientGroup;\n }\n\n async metrics(): Promise<Metrics> {\n return (await this.#delegate.lazy).inspectorMetrics(this.#delegate);\n }\n\n async clients(): Promise<Client[]> {\n return (await this.#delegate.lazy).inspectorClients(this.#delegate);\n }\n\n async clientsWithQueries(): Promise<Client[]> {\n return (await this.#delegate.lazy).inspectorClientsWithQueries(\n this.#delegate,\n );\n }\n\n async serverVersion(): Promise<string> {\n return (await this.#delegate.lazy).serverVersion(this.#delegate);\n }\n}\n", "import type {ValueType} from '../../zero-protocol/src/client-schema.ts';\nimport type {PrimaryKey} from '../../zero-protocol/src/primary-key.ts';\n\nexport type {ValueType} from '../../zero-protocol/src/client-schema.ts';\n\n/**\n * `related` calls need to know what the available relationships are.\n * The `schema` type encodes this information.\n */\nexport type SchemaValue<T = unknown> =\n | {\n type: ValueType;\n serverName?: string | undefined;\n optional?: boolean | undefined;\n }\n | SchemaValueWithCustomType<T>;\n\nexport type SchemaValueWithCustomType<T> = {\n type: ValueType;\n serverName?: string | undefined;\n optional?: boolean;\n customType: T;\n};\n\nexport type TableSchema = {\n readonly name: string;\n readonly serverName?: string | undefined;\n readonly columns: Record<string, SchemaValue>;\n readonly primaryKey: PrimaryKey;\n};\n\nexport type RelationshipsSchema = {\n readonly [name: string]: Relationship;\n};\n\nexport type TypeNameToTypeMap = {\n string: string;\n number: number;\n boolean: boolean;\n null: null;\n\n // In schema-v2, the user will be able to specify the TS type that\n // the JSON should match and `any`` will no\n // longer be used here.\n // ReadOnlyJSONValue is not used as it causes\n // infinite depth errors to pop up for users of our APIs.\n\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n json: any;\n};\n\nexport type ColumnTypeName<T extends SchemaValue | ValueType> =\n T extends SchemaValue ? T['type'] : T;\n\n/**\n * Given a schema value, return the TypeScript type.\n *\n * This allows us to create the correct return type for a\n * query that has a selection.\n */\nexport type SchemaValueToTSType<T extends SchemaValue | ValueType> =\n T extends ValueType\n ? TypeNameToTypeMap[T]\n : T extends {\n optional: true;\n }\n ?\n | (T extends SchemaValueWithCustomType<infer V>\n ? V\n : TypeNameToTypeMap[ColumnTypeName<T>])\n | null\n : T extends SchemaValueWithCustomType<infer V>\n ? V\n : TypeNameToTypeMap[ColumnTypeName<T>];\n\ntype Connection = {\n readonly sourceField: readonly string[];\n readonly destField: readonly string[];\n readonly destSchema: string;\n readonly cardinality: Cardinality;\n};\n\nexport type Cardinality = 'one' | 'many';\n\nexport type Relationship =\n | readonly [Connection]\n | readonly [Connection, Connection];\n// | readonly [Connection, Connection, Connection];\n\nexport type LastInTuple<T extends Relationship> = T extends readonly [infer L]\n ? L\n : T extends readonly [unknown, infer L]\n ? L\n : T extends readonly [unknown, unknown, infer L]\n ? L\n : never;\n\nexport type AtLeastOne<T> = readonly [T, ...T[]];\n\nexport function atLeastOne<T>(arr: readonly T[]): AtLeastOne<T> {\n if (arr.length === 0) {\n throw new Error('Expected at least one element');\n }\n return arr as AtLeastOne<T>;\n}\n\nexport function isOneHop(r: Relationship): r is readonly [Connection] {\n return r.length === 1;\n}\n\nexport function isTwoHop(\n r: Relationship,\n): r is readonly [Connection, Connection] {\n return r.length === 2;\n}\n\nexport type Opaque<BaseType, BrandType = unknown> = BaseType & {\n readonly [base]: BaseType;\n readonly [brand]: BrandType;\n};\n\ndeclare const base: unique symbol;\ndeclare const brand: unique symbol;\n\nexport type IsOpaque<T> = T extends {\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n readonly [brand]: any;\n}\n ? true\n : false;\n\nexport type ExpandRecursiveSkipOpaque<T> =\n IsOpaque<T> extends true\n ? T\n : T extends object\n ? T extends infer O\n ? {[K in keyof O]: ExpandRecursiveSkipOpaque<O[K]>}\n : never\n : T;\n", "export function emptyFunction() {}\nexport const emptyObject = Object.freeze({});\nexport const emptyArray = Object.freeze([]);\nexport function identity<T>(x: T): T {\n return x;\n}\n", "/* eslint-disable @typescript-eslint/no-explicit-any */\nimport type {Expand, ExpandRecursive} from '../../../shared/src/expand.ts';\nimport type {ReadonlyJSONValue} from '../../../shared/src/json.ts';\nimport {type AST, type SimpleOperator} from '../../../zero-protocol/src/ast.ts';\nimport type {Schema as ZeroSchema} from '../../../zero-schema/src/builder/schema-builder.ts';\nimport type {\n LastInTuple,\n SchemaValueToTSType,\n SchemaValueWithCustomType,\n TableSchema,\n} from '../../../zero-schema/src/table-schema.ts';\nimport type {Format, ViewFactory} from '../ivm/view.ts';\nimport type {ExpressionFactory, ParameterReference} from './expression.ts';\nimport type {CustomQueryID} from './named.ts';\nimport type {QueryDelegate} from './query-delegate.ts';\nimport type {TTL} from './ttl.ts';\nimport type {TypedView} from './typed-view.ts';\n\ntype Selector<E extends TableSchema> = keyof E['columns'];\nexport type NoCompoundTypeSelector<T extends TableSchema> = Exclude<\n Selector<T>,\n JsonSelectors<T> | ArraySelectors<T>\n>;\n\ntype JsonSelectors<E extends TableSchema> = {\n [K in keyof E['columns']]: E['columns'][K] extends {type: 'json'} ? K : never;\n}[keyof E['columns']];\n\ntype ArraySelectors<E extends TableSchema> = {\n [K in keyof E['columns']]: E['columns'][K] extends SchemaValueWithCustomType<\n any[]\n >\n ? K\n : never;\n}[keyof E['columns']];\n\nexport type QueryReturn<Q> = Q extends Query<any, any, infer R> ? R : never;\nexport type QueryTable<Q> = Q extends Query<any, infer T, any> ? T : never;\nexport const delegateSymbol = Symbol('delegate');\n\nexport type ExistsOptions = {flip: boolean};\n\nexport type GetFilterType<\n TSchema extends TableSchema,\n TColumn extends keyof TSchema['columns'],\n TOperator extends SimpleOperator,\n> = TOperator extends 'IS' | 'IS NOT'\n ? // SchemaValueToTSType adds null if the type is optional, but we add null\n // no matter what for dx reasons. See:\n // https://github.com/rocicorp/mono/pull/3576#discussion_r1925792608\n SchemaValueToTSType<TSchema['columns'][TColumn]> | null\n : TOperator extends 'IN' | 'NOT IN'\n ? // We don't want to compare to null in where clauses because it causes\n // confusing results:\n // https://zero.rocicorp.dev/docs/reading-data#comparing-to-null\n readonly Exclude<SchemaValueToTSType<TSchema['columns'][TColumn]>, null>[]\n : Exclude<SchemaValueToTSType<TSchema['columns'][TColumn]>, null>;\n\nexport type AvailableRelationships<\n TTable extends string,\n TSchema extends ZeroSchema,\n> = keyof TSchema['relationships'][TTable] & string;\n\nexport type DestTableName<\n TTable extends string,\n TSchema extends ZeroSchema,\n TRelationship extends string,\n> = LastInTuple<TSchema['relationships'][TTable][TRelationship]>['destSchema'];\n\ntype DestRow<\n TTable extends string,\n TSchema extends ZeroSchema,\n TRelationship extends string,\n> = TSchema['relationships'][TTable][TRelationship][0]['cardinality'] extends 'many'\n ? PullRow<DestTableName<TTable, TSchema, TRelationship>, TSchema>\n : PullRow<DestTableName<TTable, TSchema, TRelationship>, TSchema> | undefined;\n\ntype AddSubreturn<TExistingReturn, TSubselectReturn, TAs extends string> = {\n readonly [K in TAs]: undefined extends TSubselectReturn\n ? TSubselectReturn\n : readonly TSubselectReturn[];\n} extends infer TNewRelationship\n ? undefined extends TExistingReturn\n ? (Exclude<TExistingReturn, undefined> & TNewRelationship) | undefined\n : TExistingReturn & TNewRelationship\n : never;\n\nexport type PullTableSchema<\n TTable extends string,\n TSchemas extends ZeroSchema,\n> = TSchemas['tables'][TTable];\n\nexport type PullRow<TTable extends string, TSchema extends ZeroSchema> = {\n readonly [K in keyof PullTableSchema<\n TTable,\n TSchema\n >['columns']]: SchemaValueToTSType<\n PullTableSchema<TTable, TSchema>['columns'][K]\n >;\n};\n\nexport type Row<T extends TableSchema | Query<ZeroSchema, string, any>> =\n T extends TableSchema\n ? {\n readonly [K in keyof T['columns']]: SchemaValueToTSType<\n T['columns'][K]\n >;\n }\n : T extends Query<ZeroSchema, string, infer TReturn>\n ? TReturn\n : never;\n\n/**\n * A hybrid query that runs on both client and server.\n * Results are returned immediately from the client followed by authoritative\n * results from the server.\n *\n * Queries are transactional in that all queries update at once when a new transaction\n * has been committed on the client or server. No query results will reflect stale state.\n *\n * A query can be:\n * - {@linkcode materialize | materialize}\n * - awaited (`then`/{@linkcode run})\n * - {@linkcode preload | preloaded}\n *\n * The normal way to use a query would be through your UI framework's bindings (e.g., useQuery(q))\n * or within a custom mutator.\n *\n * `materialize` and `run/then` are provided for more advanced use cases.\n * Remember that any `view` returned by `materialize` must be destroyed.\n *\n * A query can be run as a 1-shot query by awaiting it. E.g.,\n *\n * ```ts\n * const result = await z.query.issue.limit(10);\n * ```\n *\n * For more information on how to use queries, see the documentation:\n * https://zero.rocicorp.dev/docs/reading-data\n *\n * @typeParam TSchema The database schema type extending ZeroSchema\n * @typeParam TTable The name of the table being queried, must be a key of TSchema['tables']\n * @typeParam TReturn The return type of the query, defaults to PullRow<TTable, TSchema>\n */\nexport interface Query<\n TSchema extends ZeroSchema,\n TTable extends keyof TSchema['tables'] & string,\n TReturn = PullRow<TTable, TSchema>,\n> {\n /**\n * Format is used to specify the shape of the query results. This is used by\n * {@linkcode one} and it also describes the shape when using\n * {@linkcode related}.\n */\n readonly format: Format;\n\n /**\n * A string that uniquely identifies this query. This can be used to determine\n * if two queries are the same.\n *\n * The hash of a custom query, on the client, is the hash of its AST.\n * The hash of a custom query, on the server, is the hash of its name and args.\n *\n * The first allows many client-side queries to be pinned to the same backend query.\n * The second ensures we do not invoke a named query on the backend more than once for the same `name:arg` pairing.\n *\n * If the query.hash was of `name:args` then `useQuery` would de-dupe\n * queries with divergent ASTs.\n *\n * QueryManager will hash based on `name:args` since it is speaking with\n * the server which tracks queries by `name:args`.\n */\n hash(): string;\n readonly ast: AST;\n readonly customQueryID: CustomQueryID | undefined;\n\n nameAndArgs(\n name: string,\n args: ReadonlyArray<ReadonlyJSONValue>,\n ): Query<TSchema, TTable, TReturn>;\n [delegateSymbol](delegate: QueryDelegate): Query<TSchema, TTable, TReturn>;\n\n /**\n * Related is used to add a related query to the current query. This is used\n * for subqueries and joins. These relationships are defined in the\n * relationships section of the schema. The result of the query will\n * include the related rows in the result set as a sub object of the row.\n *\n * ```typescript\n * const row = await z.query.users\n * .related('posts');\n * // {\n * // id: '1',\n * // posts: [\n * // ...\n * // ]\n * // }\n * ```\n * If you want to add a subquery to the related query, you can do so by\n * providing a callback function that receives the related query as an argument.\n *\n * ```typescript\n * const row = await z.query.users\n * .related('posts', q => q.where('published', true));\n * // {\n * // id: '1',\n * // posts: [\n * // {published: true, ...},\n * // ...\n * // ]\n * // }\n * ```\n *\n * @param relationship The name of the relationship\n */\n related<TRelationship extends AvailableRelationships<TTable, TSchema>>(\n relationship: TRelationship,\n ): Query<\n TSchema,\n TTable,\n AddSubreturn<\n TReturn,\n DestRow<TTable, TSchema, TRelationship>,\n TRelationship\n >\n >;\n related<\n TRelationship extends AvailableRelationships<TTable, TSchema>,\n TSub extends Query<TSchema, string, any>,\n >(\n relationship: TRelationship,\n cb: (\n q: Query<\n TSchema,\n DestTableName<TTable, TSchema, TRelationship>,\n DestRow<TTable, TSchema, TRelationship>\n >,\n ) => TSub,\n ): Query<\n TSchema,\n TTable,\n AddSubreturn<\n TReturn,\n TSub extends Query<TSchema, string, infer TSubReturn>\n ? TSubReturn\n : never,\n TRelationship\n >\n >;\n\n /**\n * Represents a condition to filter the query results.\n *\n * @param field The column name to filter on.\n * @param op The operator to use for filtering.\n * @param value The value to compare against.\n *\n * @returns A new query instance with the applied filter.\n *\n * @example\n *\n * ```typescript\n * const query = db.query('users')\n * .where('age', '>', 18)\n * .where('name', 'LIKE', '%John%');\n * ```\n */\n where<\n TSelector extends NoCompoundTypeSelector<PullTableSchema<TTable, TSchema>>,\n TOperator extends SimpleOperator,\n >(\n field: TSelector,\n op: TOperator,\n value:\n | GetFilterType<PullTableSchema<TTable, TSchema>, TSelector, TOperator>\n | ParameterReference,\n ): Query<TSchema, TTable, TReturn>;\n /**\n * Represents a condition to filter the query results.\n *\n * This overload is used when the operator is '='.\n *\n * @param field The column name to filter on.\n * @param value The value to compare against.\n *\n * @returns A new query instance with the applied filter.\n *\n * @example\n * ```typescript\n * const query = db.query('users')\n * .where('age', 18)\n * ```\n */\n where<\n TSelector extends NoCompoundTypeSelector<PullTableSchema<TTable, TSchema>>,\n >(\n field: TSelector,\n value:\n | GetFilterType<PullTableSchema<TTable, TSchema>, TSelector, '='>\n | ParameterReference,\n ): Query<TSchema, TTable, TReturn>;\n\n /**\n * Represents a condition to filter the query results.\n *\n * @param expressionFactory A function that takes a query builder and returns an expression.\n *\n * @returns A new query instance with the applied filter.\n *\n * @example\n * ```typescript\n * const query = db.query('users')\n * .where(({cmp, or}) => or(cmp('age', '>', 18), cmp('name', 'LIKE', '%John%')));\n * ```\n */\n where(\n expressionFactory: ExpressionFactory<TSchema, TTable>,\n ): Query<TSchema, TTable, TReturn>;\n\n whereExists(\n relationship: AvailableRelationships<TTable, TSchema>,\n options?: ExistsOptions | undefined,\n ): Query<TSchema, TTable, TReturn>;\n whereExists<TRelationship extends AvailableRelationships<TTable, TSchema>>(\n relationship: TRelationship,\n cb: (\n q: Query<TSchema, DestTableName<TTable, TSchema, TRelationship>>,\n ) => Query<TSchema, string>,\n options?: ExistsOptions | undefined,\n ): Query<TSchema, TTable, TReturn>;\n\n /**\n * Skips the rows of the query until row matches the given row. If opts is\n * provided, it determines whether the match is inclusive.\n *\n * @param row The row to start from. This is a partial row object and only the provided\n * fields will be used for the comparison.\n * @param opts Optional options object that specifies whether the match is inclusive.\n * If `inclusive` is true, the row will be included in the result.\n * If `inclusive` is false, the row will be excluded from the result and the result\n * will start from the next row.\n *\n * @returns A new query instance with the applied start condition.\n */\n start(\n row: Partial<PullRow<TTable, TSchema>>,\n opts?: {inclusive: boolean} | undefined,\n ): Query<TSchema, TTable, TReturn>;\n\n /**\n * Limits the number of rows returned by the query.\n * @param limit The maximum number of rows to return.\n *\n * @returns A new query instance with the applied limit.\n */\n limit(limit: number): Query<TSchema, TTable, TReturn>;\n\n /**\n * Orders the results by a specified column. If multiple orderings are\n * specified, the results will be ordered by the first column, then the\n * second column, and so on.\n *\n * @param field The column name to order by.\n * @param direction The direction to order the results (ascending or descending).\n *\n * @returns A new query instance with the applied order.\n */\n orderBy<TSelector extends Selector<PullTableSchema<TTable, TSchema>>>(\n field: TSelector,\n direction: 'asc' | 'desc',\n ): Query<TSchema, TTable, TReturn>;\n\n /**\n * Limits the number of rows returned by the query to a single row and then\n * unpacks the result so that you do not get an array of rows but a single\n * row. This is useful when you expect only one row to be returned and want to\n * work with the row directly.\n *\n * If the query returns no rows, the result will be `undefined`.\n *\n * @returns A new query instance with the applied limit to one row.\n */\n one(): Query<TSchema, TTable, TReturn | undefined>;\n\n /**\n * Creates a materialized view of the query. This is a view that will be kept\n * in memory and updated as the query results change.\n *\n * Most of the time you will want to use the `useQuery` hook or the\n * `run`/`then` method to get the results of a query. This method is only\n * needed if you want to access to lower level APIs of the view.\n *\n * @param ttl Time To Live. This is the amount of time to keep the rows\n * associated with this query after `TypedView.destroy`\n * has been called.\n */\n materialize(ttl?: TTL): TypedView<HumanReadable<TReturn>>;\n /**\n * Creates a custom materialized view using a provided factory function. This\n * allows framework-specific bindings (like SolidJS, Vue, etc.) to create\n * optimized views.\n *\n * @param factory A function that creates a custom view implementation\n * @param ttl Optional Time To Live for the view's data after destruction\n * @returns A custom view instance of type {@linkcode T}\n *\n * @example\n * ```ts\n * const view = query.materialize(createSolidViewFactory, '1m');\n * ```\n */\n materialize<T>(\n factory: ViewFactory<TSchema, TTable, TReturn, T>,\n ttl?: TTL,\n ): T;\n\n /**\n * Executes the query and returns the result once. The `options` parameter\n * specifies whether to wait for complete results or return immediately,\n * and the time to live for the query.\n *\n * - `{type: 'unknown'}`: Returns a snapshot of the data immediately.\n * - `{type: 'complete'}`: Waits for the latest, complete results from the server.\n *\n * By default, `run` uses `{type: 'unknown'}` to avoid waiting for the server.\n *\n * `Query` implements `PromiseLike`, and calling `then` on it will invoke `run`\n * with the default behavior (`unknown`).\n *\n * @param options Options to control the result type.\n * @param options.type The type of result to return.\n * @param options.ttl Time To Live. This is the amount of time to keep the rows\n * associated with this query after the returned promise has\n * resolved.\n * @returns A promise resolving to the query result.\n *\n * @example\n * ```js\n * const result = await query.run({type: 'complete', ttl: '1m'});\n * ```\n */\n run(options?: RunOptions): Promise<HumanReadable<TReturn>>;\n\n /**\n * Preload loads the data into the clients cache without keeping it in memory.\n * This is useful for preloading data that will be used later.\n *\n * @param options Options for preloading the query.\n * @param options.ttl Time To Live. This is the amount of time to keep the rows\n * associated with this query after {@linkcode cleanup} has\n * been called.\n */\n preload(options?: PreloadOptions): {\n cleanup: () => void;\n complete: Promise<void>;\n };\n}\n\nexport type PreloadOptions = {\n /**\n * Time To Live. This is the amount of time to keep the rows associated with\n * this query after {@linkcode cleanup} has been called.\n */\n ttl?: TTL | undefined;\n};\n\nexport type MaterializeOptions = PreloadOptions;\n\n/**\n * A helper type that tries to make the type more readable.\n */\nexport type HumanReadable<T> = undefined extends T ? Expand<T> : Expand<T>[];\n\n/**\n * A helper type that tries to make the type more readable.\n */\n// Note: opaque types expand incorrectly.\nexport type HumanReadableRecursive<T> = undefined extends T\n ? ExpandRecursive<T>\n : ExpandRecursive<T>[];\n\n/**\n * The kind of results we want to wait for when using {@linkcode run} on {@linkcode Query}.\n *\n * `unknown` means we don't want to wait for the server to return results. The result is a\n * snapshot of the data at the time the query was run.\n *\n * `complete` means we want to ensure that we have the latest result from the server. The\n * result is a complete and up-to-date view of the data. In some cases this means that we\n * have to wait for the server to return results. To ensure that we have the result for\n * this query you can preload it before calling run. See {@link preload}.\n *\n * By default, `run` uses `{type: 'unknown'}` to avoid waiting for the server.\n *\n * The `ttl` option is used to specify the time to live for the query. This is the amount of\n * time to keep the rows associated with this query after the promise has resolved.\n */\nexport type RunOptions = {\n type: 'unknown' | 'complete';\n ttl?: TTL;\n};\n\nexport const DEFAULT_RUN_OPTIONS_UNKNOWN = {\n type: 'unknown',\n} as const;\n\nexport const DEFAULT_RUN_OPTIONS_COMPLETE = {\n type: 'complete',\n} as const;\n", "import type {LogContext} from '@rocicorp/logger';\n\nexport type TimeUnit = 's' | 'm' | 'h' | 'd' | 'y';\n\n/**\n * Time To Live. This is used for query expiration.\n * - `forever` means the query will never expire.\n * - `none` means the query will expire immediately.\n * - A number means the query will expire after that many milliseconds.\n * - A negative number means the query will never expire, this is same as 'forever'.\n * - A string like `1s` means the query will expire after that many seconds.\n * - A string like `1m` means the query will expire after that many minutes.\n * - A string like `1h` means the query will expire after that many hours.\n * - A string like `1d` means the query will expire after that many days.\n * - A string like `1y` means the query will expire after that many years.\n */\nexport type TTL = `${number}${TimeUnit}` | 'forever' | 'none' | number;\n\nexport const DEFAULT_TTL: TTL = '5m';\nexport const DEFAULT_TTL_MS = 1_000 * 60 * 5;\n\nexport const DEFAULT_PRELOAD_TTL: TTL = 'none';\nexport const DEFAULT_PRELOAD_TTL_MS = 0;\n\nexport const MAX_TTL: TTL = '10m';\nexport const MAX_TTL_MS = 1_000 * 60 * 10;\n\nconst multiplier = {\n s: 1000,\n m: 60 * 1000,\n h: 60 * 60 * 1000,\n d: 24 * 60 * 60 * 1000,\n y: 365 * 24 * 60 * 60 * 1000,\n} as const;\n\nexport function parseTTL(ttl: TTL): number {\n if (typeof ttl === 'number') {\n return Number.isNaN(ttl) ? 0 : !Number.isFinite(ttl) || ttl < 0 ? -1 : ttl;\n }\n if (ttl === 'none') {\n return 0;\n }\n if (ttl === 'forever') {\n return -1;\n }\n const multi = multiplier[ttl[ttl.length - 1] as TimeUnit];\n return Number(ttl.slice(0, -1)) * multi;\n}\n\nexport function compareTTL(a: TTL, b: TTL): number {\n const ap = parseTTL(a);\n const bp = parseTTL(b);\n if (ap === -1 && bp !== -1) {\n return 1;\n }\n if (ap !== -1 && bp === -1) {\n return -1;\n }\n return ap - bp;\n}\n\nexport function normalizeTTL(ttl: TTL): TTL {\n if (typeof ttl === 'string') {\n return ttl;\n }\n\n if (ttl < 0) {\n return 'forever';\n }\n\n if (ttl === 0) {\n return 'none';\n }\n\n let shortest = ttl.toString();\n const lengthOfNumber = shortest.length;\n for (const unit of ['y', 'd', 'h', 'm', 's'] as const) {\n const multi = multiplier[unit];\n const value = ttl / multi;\n const candidate = `${value}${unit}`;\n if (candidate.length < shortest.length) {\n shortest = candidate;\n }\n }\n\n return (shortest.length < lengthOfNumber ? shortest : ttl) as TTL;\n}\n\nexport function clampTTL(ttl: TTL, lc?: Pick<LogContext, 'warn'>): number {\n const parsedTTL = parseTTL(ttl);\n if (parsedTTL === -1 || parsedTTL > 10 * 60 * 1000) {\n // 10 minutes in milliseconds\n lc?.warn?.(`TTL (${ttl}) is too high, clamping to ${MAX_TTL}`);\n return parseTTL(MAX_TTL);\n }\n return parsedTTL;\n}\n", "/* eslint-disable @typescript-eslint/naming-convention */\n/* eslint-disable @typescript-eslint/no-explicit-any */\nimport {resolver} from '@rocicorp/resolver';\nimport {assert} from '../../../shared/src/asserts.ts';\nimport type {ReadonlyJSONValue} from '../../../shared/src/json.ts';\nimport {must} from '../../../shared/src/must.ts';\nimport type {Writable} from '../../../shared/src/writable.ts';\nimport type {\n AST,\n CompoundKey,\n Condition,\n Ordering,\n Parameter,\n SimpleOperator,\n System,\n} from '../../../zero-protocol/src/ast.ts';\nimport type {Row as IVMRow} from '../../../zero-protocol/src/data.ts';\nimport {\n hashOfAST,\n hashOfNameAndArgs,\n} from '../../../zero-protocol/src/query-hash.ts';\nimport type {Schema} from '../../../zero-schema/src/builder/schema-builder.ts';\nimport {\n isOneHop,\n isTwoHop,\n type TableSchema,\n} from '../../../zero-schema/src/table-schema.ts';\nimport {buildPipeline} from '../builder/builder.ts';\nimport {NotImplementedError} from '../error.ts';\nimport {ArrayView} from '../ivm/array-view.ts';\nimport type {Input} from '../ivm/operator.ts';\nimport type {Format, ViewFactory} from '../ivm/view.ts';\nimport {assertNoNotExists} from './assert-no-not-exists.ts';\nimport {\n and,\n cmp,\n ExpressionBuilder,\n simplifyCondition,\n type ExpressionFactory,\n} from './expression.ts';\nimport type {CustomQueryID} from './named.ts';\nimport type {GotCallback, QueryDelegate} from './query-delegate.ts';\nimport {\n delegateSymbol,\n type ExistsOptions,\n type GetFilterType,\n type HumanReadable,\n type MaterializeOptions,\n type PreloadOptions,\n type PullRow,\n type Query,\n type QueryReturn,\n type QueryTable,\n type RunOptions,\n} from './query.ts';\nimport {DEFAULT_PRELOAD_TTL_MS, DEFAULT_TTL_MS, type TTL} from './ttl.ts';\nimport type {TypedView} from './typed-view.ts';\nimport type {ErroredQuery} from '../../../zero-protocol/src/custom-queries.ts';\n\nexport type AnyQuery = Query<Schema, string, any>;\n\nexport function materialize<S extends Schema, T, Q>(\n query: Q,\n delegate: QueryDelegate,\n factoryOrOptions?:\n | ViewFactory<S, QueryTable<Q>, QueryReturn<Q>, T>\n | MaterializeOptions\n | undefined,\n maybeOptions?: MaterializeOptions | undefined,\n) {\n if (typeof factoryOrOptions === 'function') {\n return (\n (query as AnyQuery)\n // eslint-disable-next-line no-unexpected-multiline\n [delegateSymbol](delegate)\n .materialize(factoryOrOptions, maybeOptions?.ttl)\n );\n }\n return (\n (query as AnyQuery)\n // eslint-disable-next-line no-unexpected-multiline\n [delegateSymbol](delegate)\n .materialize(factoryOrOptions?.ttl)\n );\n}\n\nconst astSymbol = Symbol();\n\nexport function ast(query: AnyQuery): AST {\n return (query as AbstractQuery<Schema, string>)[astSymbol];\n}\n\nexport function newQuery<\n TSchema extends Schema,\n TTable extends keyof TSchema['tables'] & string,\n>(\n delegate: QueryDelegate | undefined,\n schema: TSchema,\n table: TTable,\n): Query<TSchema, TTable> {\n return new QueryImpl(\n delegate,\n schema,\n table,\n {table},\n defaultFormat,\n undefined,\n );\n}\n\nexport function staticParam(\n anchorClass: 'authData' | 'preMutationRow',\n field: string | string[],\n): Parameter {\n return {\n type: 'static',\n anchor: anchorClass,\n // for backwards compatibility\n field: field.length === 1 ? field[0] : field,\n };\n}\n\nexport const SUBQ_PREFIX = 'zsubq_';\n\nexport const defaultFormat = {singular: false, relationships: {}} as const;\n\nexport const newQuerySymbol = Symbol();\n\nexport abstract class AbstractQuery<\n TSchema extends Schema,\n TTable extends keyof TSchema['tables'] & string,\n TReturn = PullRow<TTable, TSchema>,\n> implements Query<TSchema, TTable, TReturn>\n{\n readonly #schema: TSchema;\n protected readonly _delegate: QueryDelegate | undefined;\n readonly #tableName: TTable;\n readonly _ast: AST;\n readonly format: Format;\n #hash: string = '';\n readonly #system: System;\n readonly #currentJunction: string | undefined;\n readonly customQueryID: CustomQueryID | undefined;\n\n constructor(\n delegate: QueryDelegate | undefined,\n schema: TSchema,\n tableName: TTable,\n ast: AST,\n format: Format,\n system: System,\n customQueryID: CustomQueryID | undefined,\n currentJunction?: string | undefined,\n ) {\n this.#schema = schema;\n this._delegate = delegate;\n this.#tableName = tableName;\n this._ast = ast;\n this.format = format;\n this.#system = system;\n this.#currentJunction = currentJunction;\n this.customQueryID = customQueryID;\n }\n\n [delegateSymbol](delegate: QueryDelegate): Query<TSchema, TTable, TReturn> {\n return this[newQuerySymbol](\n delegate,\n this.#schema,\n this.#tableName,\n this._ast,\n this.format,\n this.customQueryID,\n this.#currentJunction,\n );\n }\n\n nameAndArgs(\n name: string,\n args: ReadonlyArray<ReadonlyJSONValue>,\n ): Query<TSchema, TTable, TReturn> {\n return this[newQuerySymbol](\n this._delegate,\n this.#schema,\n this.#tableName,\n this._ast,\n this.format,\n {\n name,\n args: args as ReadonlyArray<ReadonlyJSONValue>,\n },\n this.#currentJunction,\n );\n }\n\n get [astSymbol](): AST {\n return this._ast;\n }\n\n get ast() {\n return this._completeAst();\n }\n\n hash(): string {\n if (!this.#hash) {\n this.#hash = hashOfAST(this._completeAst());\n }\n return this.#hash;\n }\n\n // TODO(arv): Put this in the delegate?\n protected abstract [newQuerySymbol]<\n TSchema extends Schema,\n TTable extends keyof TSchema['tables'] & string,\n TReturn,\n >(\n delegate: QueryDelegate | undefined,\n schema: TSchema,\n table: TTable,\n ast: AST,\n format: Format,\n customQueryID: CustomQueryID | undefined,\n currentJunction: string | undefined,\n ): AbstractQuery<TSchema, TTable, TReturn>;\n\n one = (): Query<TSchema, TTable, TReturn | undefined> =>\n this[newQuerySymbol](\n this._delegate,\n this.#schema,\n this.#tableName,\n {\n ...this._ast,\n limit: 1,\n },\n {\n ...this.format,\n singular: true,\n },\n this.customQueryID,\n this.#currentJunction,\n );\n\n whereExists = (\n relationship: string,\n cbOrOptions?: ((q: AnyQuery) => AnyQuery) | ExistsOptions | undefined,\n options?: ExistsOptions | undefined,\n ): Query<TSchema, TTable, TReturn> => {\n const cb = typeof cbOrOptions === 'function' ? cbOrOptions : undefined;\n const opts = typeof cbOrOptions === 'function' ? options : cbOrOptions;\n const flipped = opts?.flip ?? false;\n return this.where(({exists}) => exists(relationship, cb, {flip: flipped}));\n };\n\n related = (\n relationship: string,\n cb?: (q: AnyQuery) => AnyQuery,\n ): AnyQuery => {\n if (relationship.startsWith(SUBQ_PREFIX)) {\n throw new Error(\n `Relationship names may not start with \"${SUBQ_PREFIX}\". That is a reserved prefix.`,\n );\n }\n cb = cb ?? (q => q);\n\n const related = this.#schema.relationships[this.#tableName][relationship];\n assert(related, 'Invalid relationship');\n if (isOneHop(related)) {\n const {destSchema, destField, sourceField, cardinality} = related[0];\n const q: AnyQuery = this[newQuerySymbol](\n this._delegate,\n this.#schema,\n destSchema,\n {\n table: destSchema,\n alias: relationship,\n },\n {\n relationships: {},\n singular: cardinality === 'one',\n },\n this.customQueryID,\n undefined,\n ) as AnyQuery;\n // Intentionally not setting to `one` as it is a perf degradation\n // and the user should not be making the mistake of setting cardinality to\n // `one` when it is actually not.\n // if (cardinality === 'one') {\n // q = q.one();\n // }\n const sq = cb(q) as AbstractQuery<Schema, string>;\n assert(\n isCompoundKey(sourceField),\n 'The source of a relationship must specify at last 1 field',\n );\n assert(\n isCompoundKey(destField),\n 'The destination of a relationship must specify at last 1 field',\n );\n assert(\n sourceField.length === destField.length,\n 'The source and destination of a relationship must have the same number of fields',\n );\n\n return this[newQuerySymbol](\n this._delegate,\n this.#schema,\n this.#tableName,\n {\n ...this._ast,\n related: [\n ...(this._ast.related ?? []),\n {\n system: this.#system,\n correlation: {\n parentField: sourceField,\n childField: destField,\n },\n subquery: addPrimaryKeysToAst(\n this.#schema.tables[destSchema],\n sq._ast,\n ),\n },\n ],\n },\n {\n ...this.format,\n relationships: {\n ...this.format.relationships,\n [relationship]: sq.format,\n },\n },\n this.customQueryID,\n this.#currentJunction,\n );\n }\n\n if (isTwoHop(related)) {\n const [firstRelation, secondRelation] = related;\n const {destSchema} = secondRelation;\n const junctionSchema = firstRelation.destSchema;\n const sq = cb(\n this[newQuerySymbol](\n this._delegate,\n this.#schema,\n destSchema,\n {\n table: destSchema,\n alias: relationship,\n },\n {\n relationships: {},\n singular: secondRelation.cardinality === 'one',\n },\n this.customQueryID,\n relationship,\n ) as unknown as QueryImpl<Schema, string>,\n ) as unknown as QueryImpl<Schema, string>;\n\n assert(isCompoundKey(firstRelation.sourceField), 'Invalid relationship');\n assert(isCompoundKey(firstRelation.destField), 'Invalid relationship');\n assert(isCompoundKey(secondRelation.sourceField), 'Invalid relationship');\n assert(isCompoundKey(secondRelation.destField), 'Invalid relationship');\n\n return this[newQuerySymbol](\n this._delegate,\n this.#schema,\n this.#tableName,\n {\n ...this._ast,\n related: [\n ...(this._ast.related ?? []),\n {\n system: this.#system,\n correlation: {\n parentField: firstRelation.sourceField,\n childField: firstRelation.destField,\n },\n hidden: true,\n subquery: {\n table: junctionSchema,\n alias: relationship,\n orderBy: addPrimaryKeys(\n this.#schema.tables[junctionSchema],\n undefined,\n ),\n related: [\n {\n system: this.#system,\n correlation: {\n parentField: secondRelation.sourceField,\n childField: secondRelation.destField,\n },\n subquery: addPrimaryKeysToAst(\n this.#schema.tables[destSchema],\n sq._ast,\n ),\n },\n ],\n },\n },\n ],\n },\n {\n ...this.format,\n relationships: {\n ...this.format.relationships,\n [relationship]: sq.format,\n },\n },\n this.customQueryID,\n this.#currentJunction,\n );\n }\n\n throw new Error(`Invalid relationship ${relationship}`);\n };\n\n where = (\n fieldOrExpressionFactory: string | ExpressionFactory<TSchema, TTable>,\n opOrValue?: SimpleOperator | GetFilterType<any, any, any> | Parameter,\n value?: GetFilterType<any, any, any> | Parameter,\n ): Query<TSchema, TTable, TReturn> => {\n let cond: Condition;\n\n if (typeof fieldOrExpressionFactory === 'function') {\n cond = fieldOrExpressionFactory(\n new ExpressionBuilder(this._exists) as ExpressionBuilder<\n TSchema,\n TTable\n >,\n );\n } else {\n assert(opOrValue !== undefined, 'Invalid condition');\n cond = cmp(fieldOrExpressionFactory, opOrValue, value);\n }\n\n const existingWhere = this._ast.where;\n if (existingWhere) {\n cond = and(existingWhere, cond);\n }\n\n const where = simplifyCondition(cond);\n\n if (this.#system === 'client') {\n // We need to do this after the DNF since the DNF conversion might change\n // an EXISTS to a NOT EXISTS condition (and vice versa).\n assertNoNotExists(where);\n }\n\n return this[newQuerySymbol](\n this._delegate,\n this.#schema,\n this.#tableName,\n {\n ...this._ast,\n where,\n },\n this.format,\n this.customQueryID,\n this.#currentJunction,\n );\n };\n\n start = (\n row: Partial<PullRow<TTable, TSchema>>,\n opts?: {inclusive: boolean} | undefined,\n ): Query<TSchema, TTable, TReturn> =>\n this[newQuerySymbol](\n this._delegate,\n this.#schema,\n this.#tableName,\n {\n ...this._ast,\n start: {\n row,\n exclusive: !opts?.inclusive,\n },\n },\n this.format,\n this.customQueryID,\n this.#currentJunction,\n );\n\n limit = (limit: number): Query<TSchema, TTable, TReturn> => {\n if (limit < 0) {\n throw new Error('Limit must be non-negative');\n }\n if ((limit | 0) !== limit) {\n throw new Error('Limit must be an integer');\n }\n if (this.#currentJunction) {\n throw new NotImplementedError(\n 'Limit is not supported in junction relationships yet. Junction relationship being limited: ' +\n this.#currentJunction,\n );\n }\n\n return this[newQuerySymbol](\n this._delegate,\n this.#schema,\n this.#tableName,\n {\n ...this._ast,\n limit,\n },\n this.format,\n this.customQueryID,\n this.#currentJunction,\n );\n };\n\n orderBy = <TSelector extends keyof TSchema['tables'][TTable]['columns']>(\n field: TSelector,\n direction: 'asc' | 'desc',\n ): Query<TSchema, TTable, TReturn> => {\n if (this.#currentJunction) {\n throw new NotImplementedError(\n 'Order by is not supported in junction relationships yet. Junction relationship being ordered: ' +\n this.#currentJunction,\n );\n }\n return this[newQuerySymbol](\n this._delegate,\n this.#schema,\n this.#tableName,\n {\n ...this._ast,\n orderBy: [...(this._ast.orderBy ?? []), [field as string, direction]],\n },\n this.format,\n this.customQueryID,\n this.#currentJunction,\n );\n };\n\n protected _exists = (\n relationship: string,\n cb: ((query: AnyQuery) => AnyQuery) | undefined,\n options?: ExistsOptions | undefined,\n ): Condition => {\n cb = cb ?? (q => q);\n const flip = options?.flip ?? false;\n const related = this.#schema.relationships[this.#tableName][relationship];\n assert(related, 'Invalid relationship');\n\n if (isOneHop(related)) {\n const {destSchema, sourceField, destField} = related[0];\n assert(isCompoundKey(sourceField), 'Invalid relationship');\n assert(isCompoundKey(destField), 'Invalid relationship');\n\n const sq = cb(\n this[newQuerySymbol](\n this._delegate,\n this.#schema,\n destSchema,\n {\n table: destSchema,\n alias: `${SUBQ_PREFIX}${relationship}`,\n },\n defaultFormat,\n this.customQueryID,\n undefined,\n ) as AnyQuery,\n ) as unknown as QueryImpl<any, any>;\n return {\n type: 'correlatedSubquery',\n related: {\n system: this.#system,\n correlation: {\n parentField: sourceField,\n childField: destField,\n },\n subquery: addPrimaryKeysToAst(\n this.#schema.tables[destSchema],\n sq._ast,\n ),\n },\n op: 'EXISTS',\n flip,\n };\n }\n\n if (isTwoHop(related)) {\n const [firstRelation, secondRelation] = related;\n assert(isCompoundKey(firstRelation.sourceField), 'Invalid relationship');\n assert(isCompoundKey(firstRelation.destField), 'Invalid relationship');\n assert(isCompoundKey(secondRelation.sourceField), 'Invalid relationship');\n assert(isCompoundKey(secondRelation.destField), 'Invalid relationship');\n const {destSchema} = secondRelation;\n const junctionSchema = firstRelation.destSchema;\n const queryToDest = cb(\n this[newQuerySymbol](\n this._delegate,\n this.#schema,\n destSchema,\n {\n table: destSchema,\n alias: `${SUBQ_PREFIX}zhidden_${relationship}`,\n },\n defaultFormat,\n this.customQueryID,\n relationship,\n ) as AnyQuery,\n );\n\n return {\n type: 'correlatedSubquery',\n related: {\n system: this.#system,\n correlation: {\n parentField: firstRelation.sourceField,\n childField: firstRelation.destField,\n },\n subquery: {\n table: junctionSchema,\n alias: `${SUBQ_PREFIX}${relationship}`,\n orderBy: addPrimaryKeys(\n this.#schema.tables[junctionSchema],\n undefined,\n ),\n where: {\n type: 'correlatedSubquery',\n related: {\n system: this.#system,\n correlation: {\n parentField: secondRelation.sourceField,\n childField: secondRelation.destField,\n },\n\n subquery: addPrimaryKeysToAst(\n this.#schema.tables[destSchema],\n (queryToDest as QueryImpl<any, any>)._ast,\n ),\n },\n op: 'EXISTS',\n flip,\n },\n },\n },\n op: 'EXISTS',\n flip,\n };\n }\n\n throw new Error(`Invalid relationship ${relationship}`);\n };\n\n #completedAST: AST | undefined;\n\n protected _completeAst(): AST {\n if (!this.#completedAST) {\n const finalOrderBy = addPrimaryKeys(\n this.#schema.tables[this.#tableName],\n this._ast.orderBy,\n );\n if (this._ast.start) {\n const {row} = this._ast.start;\n const narrowedRow: Writable<IVMRow> = {};\n for (const [field] of finalOrderBy) {\n narrowedRow[field] = row[field];\n }\n this.#completedAST = {\n ...this._ast,\n start: {\n ...this._ast.start,\n row: narrowedRow,\n },\n orderBy: finalOrderBy,\n };\n } else {\n this.#completedAST = {\n ...this._ast,\n orderBy: addPrimaryKeys(\n this.#schema.tables[this.#tableName],\n this._ast.orderBy,\n ),\n };\n }\n }\n return this.#completedAST;\n }\n\n abstract materialize(\n ttl?: TTL | undefined,\n ): TypedView<HumanReadable<TReturn>>;\n abstract materialize<T>(\n factory: ViewFactory<TSchema, TTable, TReturn, T>,\n ttl?: TTL | undefined,\n ): T;\n\n abstract run(options?: RunOptions): Promise<HumanReadable<TReturn>>;\n\n abstract preload(): {\n cleanup: () => void;\n complete: Promise<void>;\n };\n}\n\nconst completedAstSymbol = Symbol();\n\nexport function completedAST(q: Query<Schema, string, any>) {\n return (q as QueryImpl<Schema, string>)[completedAstSymbol];\n}\n\nexport class QueryImpl<\n TSchema extends Schema,\n TTable extends keyof TSchema['tables'] & string,\n TReturn = PullRow<TTable, TSchema>,\n> extends AbstractQuery<TSchema, TTable, TReturn> {\n readonly #system: System;\n\n constructor(\n delegate: QueryDelegate | undefined,\n schema: TSchema,\n tableName: TTable,\n ast: AST = {table: tableName},\n format: Format = defaultFormat,\n system: System = 'client',\n customQueryID?: CustomQueryID | undefined,\n currentJunction?: string | undefined,\n ) {\n super(\n delegate,\n schema,\n tableName,\n ast,\n format,\n system,\n customQueryID,\n currentJunction,\n );\n this.#system = system;\n }\n\n get [completedAstSymbol](): AST {\n return this._completeAst();\n }\n\n protected [newQuerySymbol]<\n TSchema extends Schema,\n TTable extends string,\n TReturn,\n >(\n delegate: QueryDelegate | undefined,\n schema: TSchema,\n tableName: TTable,\n ast: AST,\n format: Format,\n customQueryID: CustomQueryID | undefined,\n currentJunction: string | undefined,\n ): QueryImpl<TSchema, TTable, TReturn> {\n return new QueryImpl(\n delegate,\n schema,\n tableName,\n ast,\n format,\n this.#system,\n customQueryID,\n currentJunction,\n );\n }\n\n materialize<T>(\n factoryOrTTL?: ViewFactory<TSchema, TTable, TReturn, T> | TTL,\n ttl: TTL = DEFAULT_TTL_MS,\n ): T {\n const delegate = must(\n this._delegate,\n 'materialize requires a query delegate to be set',\n );\n let factory: ViewFactory<TSchema, TTable, TReturn, T> | undefined;\n if (typeof factoryOrTTL === 'function') {\n factory = factoryOrTTL;\n } else {\n ttl = factoryOrTTL ?? DEFAULT_TTL_MS;\n }\n const ast = this._completeAst();\n const queryID = this.customQueryID\n ? hashOfNameAndArgs(this.customQueryID.name, this.customQueryID.args)\n : this.hash();\n const queryCompleteResolver = resolver<true>();\n let queryComplete: boolean | ErroredQuery = delegate.defaultQueryComplete;\n const updateTTL = (newTTL: TTL) => {\n this.customQueryID\n ? delegate.updateCustomQuery(this.customQueryID, newTTL)\n : delegate.updateServerQuery(ast, newTTL);\n };\n\n const gotCallback: GotCallback = (got, error) => {\n if (error) {\n queryCompleteResolver.reject(error);\n queryComplete = error;\n return;\n }\n\n if (got) {\n delegate.addMetric(\n 'query-materialization-end-to-end',\n performance.now() - t0,\n queryID,\n ast,\n );\n queryComplete = true;\n queryCompleteResolver.resolve(true);\n }\n };\n\n let removeCommitObserver: (() => void) | undefined;\n const onDestroy = () => {\n input.destroy();\n removeCommitObserver?.();\n removeAddedQuery();\n };\n\n const t0 = performance.now();\n\n const removeAddedQuery = this.customQueryID\n ? delegate.addCustomQuery(ast, this.customQueryID, ttl, gotCallback)\n : delegate.addServerQuery(ast, ttl, gotCallback);\n\n const input = buildPipeline(ast, delegate, queryID);\n\n const view = delegate.batchViewUpdates(() =>\n (factory ?? arrayViewFactory)(\n this,\n input,\n this.format,\n onDestroy,\n cb => {\n removeCommitObserver = delegate.onTransactionCommit(cb);\n },\n queryComplete || queryCompleteResolver.promise,\n updateTTL,\n ),\n );\n\n delegate.addMetric(\n 'query-materialization-client',\n performance.now() - t0,\n queryID,\n );\n\n return view as T;\n }\n\n run(options?: RunOptions): Promise<HumanReadable<TReturn>> {\n const delegate = must(\n this._delegate,\n 'run requires a query delegate to be set',\n );\n delegate.assertValidRunOptions(options);\n const v: TypedView<HumanReadable<TReturn>> = this.materialize(options?.ttl);\n if (options?.type === 'complete') {\n return new Promise(resolve => {\n v.addListener((data, type) => {\n if (type === 'complete') {\n v.destroy();\n resolve(data as HumanReadable<TReturn>);\n } else if (type === 'error') {\n v.destroy();\n resolve(Promise.reject(data));\n }\n });\n });\n }\n\n options?.type satisfies 'unknown' | undefined;\n\n const ret = v.data;\n v.destroy();\n return Promise.resolve(ret);\n }\n\n preload(options?: PreloadOptions): {\n cleanup: () => void;\n complete: Promise<void>;\n } {\n const delegate = must(\n this._delegate,\n 'preload requires a query delegate to be set',\n );\n const ttl = options?.ttl ?? DEFAULT_PRELOAD_TTL_MS;\n const ast = this._completeAst();\n const {resolve, promise: complete} = resolver<void>();\n if (this.customQueryID) {\n const cleanup = delegate.addCustomQuery(\n ast,\n this.customQueryID,\n ttl,\n got => {\n if (got) {\n resolve();\n }\n },\n );\n return {\n cleanup,\n complete,\n };\n }\n\n const cleanup = delegate.addServerQuery(ast, ttl, got => {\n if (got) {\n resolve();\n }\n });\n return {\n cleanup,\n complete,\n };\n }\n}\n\nfunction addPrimaryKeys(\n schema: TableSchema,\n orderBy: Ordering | undefined,\n): Ordering {\n orderBy = orderBy ?? [];\n const {primaryKey} = schema;\n const primaryKeysToAdd = new Set(primaryKey);\n\n for (const [field] of orderBy) {\n primaryKeysToAdd.delete(field);\n }\n\n if (primaryKeysToAdd.size === 0) {\n return orderBy;\n }\n\n return [\n ...orderBy,\n ...[...primaryKeysToAdd].map(key => [key, 'asc'] as [string, 'asc']),\n ];\n}\n\nfunction addPrimaryKeysToAst(schema: TableSchema, ast: AST): AST {\n return {\n ...ast,\n orderBy: addPrimaryKeys(schema, ast.orderBy),\n };\n}\n\nfunction arrayViewFactory<\n TSchema extends Schema,\n TTable extends string,\n TReturn,\n>(\n _query: AbstractQuery<TSchema, TTable, TReturn>,\n input: Input,\n format: Format,\n onDestroy: () => void,\n onTransactionCommit: (cb: () => void) => void,\n queryComplete: true | ErroredQuery | Promise<true>,\n updateTTL: (ttl: TTL) => void,\n): TypedView<HumanReadable<TReturn>> {\n const v = new ArrayView<HumanReadable<TReturn>>(\n input,\n format,\n queryComplete,\n updateTTL,\n );\n v.onDestroy = onDestroy;\n onTransactionCommit(() => {\n v.flush();\n });\n return v;\n}\n\nfunction isCompoundKey(field: readonly string[]): field is CompoundKey {\n return Array.isArray(field) && field.length >= 1;\n}\n", "import {h64} from '../../shared/src/hash.ts';\nimport {normalizeAST, type AST} from './ast.ts';\n\nconst hashCache = new WeakMap<AST, string>();\n\nexport function hashOfAST(ast: AST): string {\n const normalized = normalizeAST(ast);\n const cached = hashCache.get(normalized);\n if (cached) {\n return cached;\n }\n const hash = h64(JSON.stringify(normalized)).toString(36);\n hashCache.set(normalized, hash);\n return hash;\n}\n\nexport function hashOfNameAndArgs(\n name: string,\n args: readonly unknown[],\n): string {\n const argsString = JSON.stringify(args);\n return h64(`${name}:${argsString}`).toString(36);\n}\n", "import type {FetchRequest, Input, InputBase, Output} from './operator.ts';\nimport {drainStreams, type Node} from './data.ts';\nimport type {Change} from './change.ts';\nimport type {SourceSchema} from './schema.ts';\nimport type {Stream} from './stream.ts';\nimport type {BuilderDelegate} from '../builder/builder.ts';\n\n/**\n * The `where` clause of a ZQL query is implemented using a sub-graph of\n * `FilterOperators`. This sub-graph starts with a `FilterStart` operator,\n * that adapts from the normal `Operator` `Output`, to the\n * `FilterOperator` `FilterInput`, and ends with a `FilterEnd` operator that\n * adapts from a `FilterOperator` `FilterOutput` to a normal `Operator` `Input`.\n * `FilterOperator'`s do not have `fetch` or `cleanup` instead they have a\n * `filter(node: Node, cleanup: boolean): boolean` method.\n * They also have `push` which is just like normal `Operator` push.\n * Not having a `fetch` means these `FilterOperator`'s cannot modify\n * `Node` `row`s or `relationship`s, but they shouldn't, they should just\n * filter.\n *\n * This `FilterOperator` abstraction enables much more efficient processing of\n * `fetch` for `where` clauses containing OR conditions.\n *\n * See https://github.com/rocicorp/mono/pull/4339\n */\n\nexport interface FilterInput extends InputBase {\n /** Tell the input where to send its output. */\n setFilterOutput(output: FilterOutput): void;\n}\n\nexport interface FilterOutput extends Output {\n filter(node: Node, cleanup: boolean): boolean;\n}\n\nexport interface FilterOperator extends FilterInput, FilterOutput {}\n\n/**\n * An implementation of FilterOutput that throws if push or filter is called.\n * It is used as the initial value for for an operator's output before it is\n * set.\n */\nexport const throwFilterOutput: FilterOutput = {\n push(_change: Change): void {\n throw new Error('Output not set');\n },\n\n filter(_node: Node, _cleanup): boolean {\n throw new Error('Output not set');\n },\n};\n\nexport class FilterStart implements FilterInput, Output {\n readonly #input: Input;\n #output: FilterOutput = throwFilterOutput;\n\n constructor(input: Input) {\n this.#input = input;\n input.setOutput(this);\n }\n\n setFilterOutput(output: FilterOutput) {\n this.#output = output;\n }\n\n destroy(): void {\n this.#input.destroy();\n }\n\n getSchema(): SourceSchema {\n return this.#input.getSchema();\n }\n\n push(change: Change) {\n this.#output.push(change, this);\n }\n\n *fetch(req: FetchRequest): Stream<Node> {\n for (const node of this.#input.fetch(req)) {\n if (this.#output.filter(node, false)) {\n yield node;\n }\n }\n }\n\n *cleanup(req: FetchRequest): Stream<Node> {\n for (const node of this.#input.cleanup(req)) {\n if (this.#output.filter(node, true)) {\n yield node;\n } else {\n drainStreams(node);\n }\n }\n }\n}\n\nexport class FilterEnd implements Input, FilterOutput {\n readonly #start: FilterStart;\n readonly #input: FilterInput;\n\n #output: Output = throwFilterOutput;\n\n constructor(start: FilterStart, input: FilterInput) {\n this.#start = start;\n this.#input = input;\n input.setFilterOutput(this);\n }\n\n *fetch(req: FetchRequest): Stream<Node> {\n for (const node of this.#start.fetch(req)) {\n yield node;\n }\n }\n\n *cleanup(req: FetchRequest): Stream<Node> {\n for (const node of this.#start.cleanup(req)) {\n yield node;\n }\n }\n\n filter(_node: Node, _cleanup: boolean) {\n return true;\n }\n\n setOutput(output: Output) {\n this.#output = output;\n }\n\n destroy(): void {\n this.#input.destroy();\n }\n\n getSchema(): SourceSchema {\n return this.#input.getSchema();\n }\n\n push(change: Change) {\n this.#output.push(change, this);\n }\n}\n\nexport function buildFilterPipeline(\n input: Input,\n delegate: BuilderDelegate,\n pipeline: (filterInput: FilterInput) => FilterInput,\n): Input {\n const filterStart = new FilterStart(input);\n delegate.addEdge(input, filterStart);\n const middle = pipeline(filterStart);\n delegate.addEdge(filterStart, middle);\n const filterEnd = new FilterEnd(filterStart, middle);\n delegate.addEdge(middle, filterEnd);\n return filterEnd;\n}\n", "import type {JSONValue} from '../../../shared/src/json.ts';\nimport type {Row} from '../../../zero-protocol/src/data.ts';\nimport type {Change} from './change.ts';\nimport type {Constraint} from './constraint.ts';\nimport type {Node} from './data.ts';\nimport type {SourceSchema} from './schema.ts';\nimport type {Stream} from './stream.ts';\n\n/**\n * Input to an operator.\n */\nexport interface InputBase {\n /** The schema of the data this input returns. */\n getSchema(): SourceSchema;\n\n /**\n * Completely destroy the input. Destroying an input\n * causes it to call destroy on its upstreams, fully\n * cleaning up a pipeline.\n */\n destroy(): void;\n}\n\nexport interface Input extends InputBase {\n /** Tell the input where to send its output. */\n setOutput(output: Output): void;\n\n /**\n * Fetch data. May modify the data in place.\n * Returns nodes sorted in order of `SourceSchema.compareRows`.\n */\n fetch(req: FetchRequest): Stream<Node>;\n\n /**\n * Cleanup maintained state. This is called when `output` will no longer need\n * the data returned by {@linkcode fetch}. The receiving operator should clean up any\n * resources it has allocated to service such requests.\n *\n * This is different from {@linkcode destroy} which means this input will no longer\n * be called at all, for any input.\n *\n * Returns the same thing as {@linkcode fetch}. This allows callers to properly\n * propagate the cleanup message through the graph.\n */\n cleanup(req: FetchRequest): Stream<Node>;\n}\n\nexport type FetchRequest = {\n readonly constraint?: Constraint | undefined;\n /** If supplied, `start.row` must have previously been output by fetch or push. */\n readonly start?: Start | undefined;\n\n /** Whether to fetch in reverse order of the SourceSchema's sort. */\n readonly reverse?: boolean | undefined;\n};\n\nexport type Start = {\n readonly row: Row;\n readonly basis: 'at' | 'after';\n};\n\n/**\n * An output for an operator. Typically another Operator but can also be\n * the code running the pipeline.\n */\nexport interface Output {\n /**\n * Push incremental changes to data previously received with fetch().\n * Consumers must apply all pushed changes or incremental result will\n * be incorrect.\n * Callers must maintain some invariants for correct operation:\n * - Only add rows which do not already exist (by deep equality).\n * - Only remove rows which do exist (by deep equality).\n */\n push(change: Change, pusher: InputBase): void;\n}\n\n/**\n * An implementation of Output that throws if pushed to. It is used as the\n * initial value for for an operator's output before it is set.\n */\nexport const throwOutput: Output = {\n push(_change: Change): void {\n throw new Error('Output not set');\n },\n};\n\n/**\n * Operators are arranged into pipelines.\n * They are stateful.\n * Each operator is an input to the next operator in the chain and an output\n * to the previous.\n */\nexport interface Operator extends Input, Output {}\n\n/**\n * Operators get access to storage that they can store their internal\n * state in.\n */\nexport interface Storage {\n set(key: string, value: JSONValue): void;\n get(key: string, def?: JSONValue): JSONValue | undefined;\n /**\n * If options is not specified, defaults to scanning all entries.\n */\n scan(options?: {prefix: string}): Stream<[string, JSONValue]>;\n del(key: string): void;\n}\n", "/**\n * streams are lazy forward-only iterables.\n * Once a stream reaches the end it can't be restarted.\n * They are iterable, not iterator, so that they can be used in for-each,\n * and so that we know when consumer has stopped iterating the stream. This allows us\n * to clean up resources like sql statements.\n */\nexport type Stream<T> = Iterable<T>;\n\nexport function* take<T>(stream: Stream<T>, limit: number): Stream<T> {\n if (limit < 1) {\n return;\n }\n let count = 0;\n for (const v of stream) {\n yield v;\n if (++count === limit) {\n break;\n }\n }\n}\n\nexport function first<T>(stream: Stream<T>): T | undefined {\n const it = stream[Symbol.iterator]();\n const {value} = it.next();\n it.return?.();\n return value;\n}\n", "import {areEqual} from '../../../shared/src/arrays.ts';\nimport {assert, unreachable} from '../../../shared/src/asserts.ts';\nimport type {CompoundKey} from '../../../zero-protocol/src/ast.ts';\nimport {type Change} from './change.ts';\nimport {normalizeUndefined, type Node, type NormalizedValue} from './data.ts';\nimport {\n throwFilterOutput,\n type FilterInput,\n type FilterOperator,\n type FilterOutput,\n} from './filter-operators.ts';\nimport {type Storage} from './operator.ts';\nimport type {SourceSchema} from './schema.ts';\nimport {first} from './stream.ts';\n\ntype SizeStorageKeyPrefix = `row/${string}/`;\n/**\n * Key is of format\n * `row/${JSON.stringify(parentJoinKeyValues)}/${JSON.stringify(primaryKeyValues)}`\n * This format allows us to look up an existing cached size for a given set of\n * `parentJoinKeyValues` by scanning for prefix\n * `row/${JSON.stringify(parentJoinKeyValues)}/` and using the first result, and\n * to look up the cached size for a specific row by the full key.\n * If the parent join and primary key are the same, then format is changed to\n * `row//${JSON.stringify(primaryKeyValues)}` to shorten the key, since there\n * is no point in looking up an existing cached size by\n * `parentJoinKeyValues` if the specific rows cached size is missing.\n */\ntype SizeStorageKey = `${SizeStorageKeyPrefix}${string}`;\n\ninterface ExistsStorage {\n get(key: SizeStorageKey): number | undefined;\n set(key: SizeStorageKey, value: number): void;\n del(key: SizeStorageKey): void;\n scan({prefix}: {prefix: SizeStorageKeyPrefix}): Iterable<[string, number]>;\n}\n\n/**\n * The Exists operator filters data based on whether or not a relationship is\n * non-empty.\n */\nexport class Exists implements FilterOperator {\n readonly #input: FilterInput;\n readonly #relationshipName: string;\n readonly #storage: ExistsStorage;\n readonly #not: boolean;\n readonly #parentJoinKey: CompoundKey;\n readonly #noSizeReuse: boolean;\n\n #output: FilterOutput = throwFilterOutput;\n\n /**\n * This instance variable is `true` when this operator is processing a `push`,\n * and is used to disable reuse of cached sizes across rows with the\n * same parent join key value.\n * This is necessary because during a push relationships can be inconsistent\n * due to push communicating changes (which may change multiple Nodes) one\n * Node at a time.\n */\n #inPush = false;\n\n constructor(\n input: FilterInput,\n storage: Storage,\n relationshipName: string,\n parentJoinKey: CompoundKey,\n type: 'EXISTS' | 'NOT EXISTS',\n ) {\n this.#input = input;\n this.#relationshipName = relationshipName;\n this.#input.setFilterOutput(this);\n this.#storage = storage as ExistsStorage;\n assert(\n this.#input.getSchema().relationships[relationshipName],\n `Input schema missing ${relationshipName}`,\n );\n this.#not = type === 'NOT EXISTS';\n this.#parentJoinKey = parentJoinKey;\n\n // If the parentJoinKey is the primary key, no sense in trying to reuse.\n this.#noSizeReuse = areEqual(\n parentJoinKey,\n this.#input.getSchema().primaryKey,\n );\n }\n\n setFilterOutput(output: FilterOutput): void {\n this.#output = output;\n }\n\n filter(node: Node, cleanup: boolean): boolean {\n const result = this.#filter(node) && this.#output.filter(node, cleanup);\n if (cleanup) {\n this.#delSize(node);\n }\n return result;\n }\n\n destroy(): void {\n this.#input.destroy();\n }\n\n getSchema(): SourceSchema {\n return this.#input.getSchema();\n }\n\n push(change: Change) {\n assert(!this.#inPush, 'Unexpected re-entrancy');\n this.#inPush = true;\n try {\n switch (change.type) {\n // add, remove and edit cannot change the size of the\n // this.#relationshipName relationship, so simply #pushWithFilter\n case 'add':\n case 'edit': {\n this.#pushWithFilter(change);\n return;\n }\n case 'remove': {\n const size = this.#getSize(change.node);\n // If size is undefined, this operator has not output\n // this row before and so it is unnecessary to output a remove for\n // it.\n if (size === undefined) {\n return;\n }\n this.#pushWithFilter(change, size);\n this.#delSize(change.node);\n return;\n }\n case 'child':\n // Only add and remove child changes for the\n // this.#relationshipName relationship, can change the size\n // of the this.#relationshipName relationship, for other\n // child changes simply #pushWithFilter\n if (\n change.child.relationshipName !== this.#relationshipName ||\n change.child.change.type === 'edit' ||\n change.child.change.type === 'child'\n ) {\n this.#pushWithFilter(change);\n return;\n }\n switch (change.child.change.type) {\n case 'add': {\n let size = this.#getSize(change.node);\n if (size !== undefined) {\n size++;\n this.#setSize(change.node, size);\n } else {\n size = this.#fetchSize(change.node);\n }\n if (size === 1) {\n if (this.#not) {\n // Since the add child change currently being processed is not\n // pushed to output, the added child needs to be excluded from\n // the remove being pushed to output (since the child has\n // never been added to the output).\n this.#output.push(\n {\n type: 'remove',\n node: {\n row: change.node.row,\n relationships: {\n ...change.node.relationships,\n [this.#relationshipName]: () => [],\n },\n },\n },\n this,\n );\n } else {\n this.#output.push(\n {\n type: 'add',\n node: change.node,\n },\n this,\n );\n }\n } else {\n this.#pushWithFilter(change, size);\n }\n return;\n }\n case 'remove': {\n let size = this.#getSize(change.node);\n if (size !== undefined) {\n assert(size > 0);\n size--;\n this.#setSize(change.node, size);\n } else {\n size = this.#fetchSize(change.node);\n }\n if (size === 0) {\n if (this.#not) {\n this.#output.push(\n {\n type: 'add',\n node: change.node,\n },\n this,\n );\n } else {\n // Since the remove child change currently being processed is\n // not pushed to output, the removed child needs to be added to\n // the remove being pushed to output.\n this.#output.push(\n {\n type: 'remove',\n node: {\n row: change.node.row,\n relationships: {\n ...change.node.relationships,\n [this.#relationshipName]: () => [\n change.child.change.node,\n ],\n },\n },\n },\n this,\n );\n }\n } else {\n this.#pushWithFilter(change, size);\n }\n return;\n }\n }\n return;\n default:\n unreachable(change);\n }\n } finally {\n this.#inPush = false;\n }\n }\n\n /**\n * Returns whether or not the node's this.#relationshipName\n * relationship passes the exist/not exists filter condition.\n * If the optional `size` is passed it is used.\n * Otherwise, if there is a stored size for the row it is used.\n * Otherwise the size is computed by streaming the node's\n * relationship with this.#relationshipName (this computed size is also\n * stored).\n */\n #filter(node: Node, size?: number): boolean {\n const exists = (size ?? this.#getOrFetchSize(node)) > 0;\n return this.#not ? !exists : exists;\n }\n\n /**\n * Pushes a change if this.#filter is true for its row.\n */\n #pushWithFilter(change: Change, size?: number): void {\n if (this.#filter(change.node, size)) {\n this.#output.push(change, this);\n }\n }\n\n #getSize(node: Node): number | undefined {\n return this.#storage.get(this.#makeSizeStorageKey(node));\n }\n\n #setSize(node: Node, size: number) {\n this.#storage.set(this.#makeSizeStorageKey(node), size);\n }\n\n #delSize(node: Node) {\n this.#storage.del(this.#makeSizeStorageKey(node));\n }\n\n #getOrFetchSize(node: Node): number {\n const size = this.#getSize(node);\n if (size !== undefined) {\n return size;\n }\n return this.#fetchSize(node);\n }\n\n #fetchSize(node: Node): number {\n if (!this.#noSizeReuse && !this.#inPush) {\n const cachedSizeEntry = first(\n this.#storage.scan({\n prefix: this.#makeSizeStorageKeyPrefix(node),\n }),\n );\n if (cachedSizeEntry !== undefined) {\n this.#setSize(node, cachedSizeEntry[1]);\n return cachedSizeEntry[1];\n }\n }\n\n const relationship = node.relationships[this.#relationshipName];\n assert(relationship);\n let size = 0;\n for (const _relatedNode of relationship()) {\n size++;\n }\n\n this.#setSize(node, size);\n return size;\n }\n\n #makeSizeStorageKeyPrefix(node: Node): SizeStorageKeyPrefix {\n return `row/${\n this.#noSizeReuse\n ? ''\n : JSON.stringify(this.#getKeyValues(node, this.#parentJoinKey))\n }/`;\n }\n\n #makeSizeStorageKey(node: Node): SizeStorageKey {\n return `${this.#makeSizeStorageKeyPrefix(node)}${JSON.stringify(\n this.#getKeyValues(node, this.#input.getSchema().primaryKey),\n )}`;\n }\n\n #getKeyValues(node: Node, def: CompoundKey): NormalizedValue[] {\n const values: NormalizedValue[] = [];\n for (const key of def) {\n values.push(normalizeUndefined(node.row[key]));\n }\n return values;\n }\n}\n", "import {assert, unreachable} from '../../../shared/src/asserts.ts';\nimport {must} from '../../../shared/src/must.ts';\nimport {emptyArray} from '../../../shared/src/sentinels.ts';\nimport type {Change} from './change.ts';\nimport type {Node} from './data.ts';\nimport type {InputBase, Output} from './operator.ts';\nimport type {SourceSchema} from './schema.ts';\nimport type {Stream} from './stream.ts';\n\n/**\n * # pushAccumulatedChanges\n *\n * Pushes the changes that were accumulated by\n * [fan-out, fan-in] or [ufo, ufi] sub-graphs.\n *\n * This function is called at the end of the sub-graph.\n *\n * The sub-graphs represents `OR`s.\n *\n * Changes that can enter the subgraphs:\n * 1. child (due to exist joins being above the sub-graph)\n * 2. add\n * 3. remove\n * 4. edit\n *\n * # Changes that can exit into `pushAccumulatedChanges`:\n *\n * ## Child\n * If a `child` change enters a sub-graph, it will flow to all branches.\n * Each branch will either:\n * - preserve the `child` change\n * - stop the `child` change (e.g., filter)\n * - convert it to an `add` or `remove` (e.g., exists filter)\n *\n * ## Add\n * If an `add` change enters a sub-graph, it will flow to all branches.\n * Each branch will either:\n * - preserve the `add` change\n * - hide the change (e.g., filter)\n *\n * ## Remove\n * If a `remove` change enters a sub-graph, it will flow to all branches.\n * Each branch will either:\n * - preserve the `remove` change\n * - hide the change (e.g., filter)\n *\n * ## Edit\n * If an `edit` change enters a sub-graph, it will flow to all branches.\n * Each branch will either:\n * - preserve the `edit` change\n * - convert it to an `add` (e.g., filter where old didn't match but new does)\n * - convert it to a `remove` (e.g., filter where old matched but new doesn't)\n *\n * This results in some invariants:\n * - an add coming in will only create adds coming out\n * - a remove coming in will only create removes coming out\n * - an edit coming in can create adds, removes, and edits coming out\n * - a child coming in can create adds, removes, and children coming out\n *\n * # Return of `pushAccumulatedChanges`\n *\n * This function will only push a single change.\n * Given the above invariants, how is this possible?\n *\n * An add that becomes many `adds` results in a single add\n * as the `add` is the same row across all adds. Branches do not change the row.\n *\n * A remove that becomes many `removes` results in a single remove\n * for the same reason.\n *\n * If a child enters and exits, it takes precedence over all other changes.\n * If a child enters and is converted only to add and remove it exits as an edit.\n * If a child enters and is converted to only add or only remove, it exits as that change.\n *\n * If an edit enters and is converted to add and remove it exits as an edit.\n * If an edit enters and is converted to only add or only remove, it exits as that change.\n * If an edit enters and exits as edits only, it exits as a single edit.\n */\nexport function pushAccumulatedChanges(\n accumulatedPushes: Change[],\n output: Output,\n pusher: InputBase,\n fanOutChangeType: Change['type'],\n mergeRelationships: (existing: Change, incoming: Change) => Change,\n addEmptyRelationships: (change: Change) => Change,\n) {\n if (accumulatedPushes.length === 0) {\n // It is possible for no forks to pass along the push.\n // E.g., if no filters match in any fork.\n return;\n }\n\n // collapse down to a single change per type\n const candidatesToPush = new Map<Change['type'], Change>();\n for (const change of accumulatedPushes) {\n if (fanOutChangeType === 'child' && change.type !== 'child') {\n assert(\n candidatesToPush.has(change.type) === false,\n () =>\n `Fan-in:child expected at most one ${change.type} when fan-out is of type child`,\n );\n }\n\n const existing = candidatesToPush.get(change.type);\n let mergedChange = change;\n if (existing) {\n // merge in relationships\n mergedChange = mergeRelationships(existing, change);\n }\n candidatesToPush.set(change.type, mergedChange);\n }\n\n accumulatedPushes.length = 0;\n\n const types = [...candidatesToPush.keys()];\n /**\n * Based on the received `fanOutChangeType` only certain output types are valid.\n *\n * - remove must result in all removes\n * - add must result in all adds\n * - edit must result in add or removes or edits\n * - child must result in a single add or single remove or many child changes\n * - Single add or remove because the relationship will be unique to one exist check within the fan-out,fan-in sub-graph\n * - Many child changes because other operators may preserve the child change\n */\n switch (fanOutChangeType) {\n case 'remove':\n assert(\n types.length === 1 && types[0] === 'remove',\n 'Fan-in:remove expected all removes',\n );\n output.push(\n addEmptyRelationships(must(candidatesToPush.get('remove'))),\n pusher,\n );\n return;\n case 'add':\n assert(\n types.length === 1 && types[0] === 'add',\n 'Fan-in:add expected all adds',\n );\n output.push(\n addEmptyRelationships(must(candidatesToPush.get('add'))),\n pusher,\n );\n return;\n case 'edit': {\n assert(\n types.every(\n type => type === 'add' || type === 'remove' || type === 'edit',\n ),\n 'Fan-in:edit expected all adds, removes, or edits',\n );\n const addChange = candidatesToPush.get('add');\n const removeChange = candidatesToPush.get('remove');\n let editChange = candidatesToPush.get('edit');\n\n // If an `edit` is present, it supersedes `add` and `remove`\n // as it semantically represents both.\n if (editChange) {\n if (addChange) {\n editChange = mergeRelationships(editChange, addChange);\n }\n if (removeChange) {\n editChange = mergeRelationships(editChange, removeChange);\n }\n output.push(addEmptyRelationships(editChange), pusher);\n return;\n }\n\n // If `edit` didn't make it through but both `add` and `remove` did,\n // convert back to an edit.\n //\n // When can this happen?\n //\n // EDIT old: a=1, new: a=2\n // |\n // FanOut\n // / \\\n // a=1 a=2\n // | |\n // remove add\n // \\ /\n // FanIn\n //\n // The left filter converts the edit into a remove.\n // The right filter converts the edit into an add.\n if (addChange && removeChange) {\n output.push(\n addEmptyRelationships({\n type: 'edit',\n node: addChange.node,\n oldNode: removeChange.node,\n } as const),\n pusher,\n );\n return;\n }\n\n output.push(\n addEmptyRelationships(must(addChange ?? removeChange)),\n pusher,\n );\n return;\n }\n case 'child': {\n assert(\n types.every(\n type =>\n type === 'add' || // exists can change child to add or remove\n type === 'remove' || // exists can change child to add or remove\n type === 'child', // other operators may preserve the child change\n ),\n 'Fan-in:child expected all adds, removes, or children',\n );\n assert(\n types.length <= 2,\n 'Fan-in:child expected at most 2 types on a child change from fan-out',\n );\n\n // If any branch preserved the original child change, that takes precedence over all other changes.\n const childChange = candidatesToPush.get('child');\n if (childChange) {\n output.push(childChange, pusher);\n return;\n }\n\n const addChange = candidatesToPush.get('add');\n const removeChange = candidatesToPush.get('remove');\n\n assert(\n addChange === undefined || removeChange === undefined,\n 'Fan-in:child expected either add or remove, not both',\n );\n\n output.push(\n addEmptyRelationships(must(addChange ?? removeChange)),\n pusher,\n );\n return;\n }\n default:\n fanOutChangeType satisfies never;\n }\n}\n\n/**\n * Puts relationships from `right` into `left` if they don't already exist in `left`.\n */\nexport function mergeRelationships(left: Change, right: Change): Change {\n // change types will always match\n // unless we have an edit on the left\n // then the right could be edit, add, or remove\n if (left.type === right.type) {\n switch (left.type) {\n case 'add': {\n return {\n type: 'add',\n node: {\n row: left.node.row,\n relationships: {\n ...right.node.relationships,\n ...left.node.relationships,\n },\n },\n };\n }\n case 'remove': {\n return {\n type: 'remove',\n node: {\n row: left.node.row,\n relationships: {\n ...right.node.relationships,\n ...left.node.relationships,\n },\n },\n };\n }\n case 'edit': {\n assert(right.type === 'edit');\n // merge edits into a single edit\n return {\n type: 'edit',\n node: {\n row: left.node.row,\n relationships: {\n ...right.node.relationships,\n ...left.node.relationships,\n },\n },\n oldNode: {\n row: left.oldNode.row,\n relationships: {\n ...right.oldNode.relationships,\n ...left.oldNode.relationships,\n },\n },\n };\n }\n }\n }\n\n // left is always an edit here\n assert(left.type === 'edit');\n switch (right.type) {\n case 'add': {\n return {\n type: 'edit',\n node: {\n ...left.node,\n relationships: {\n ...right.node.relationships,\n ...left.node.relationships,\n },\n },\n oldNode: left.oldNode,\n };\n }\n case 'remove': {\n return {\n type: 'edit',\n node: left.node,\n oldNode: {\n ...left.oldNode,\n relationships: {\n ...right.node.relationships,\n ...left.oldNode.relationships,\n },\n },\n };\n }\n }\n\n unreachable();\n}\n\nexport function makeAddEmptyRelationships(\n schema: SourceSchema,\n): (change: Change) => Change {\n return (change: Change): Change => {\n if (Object.keys(schema.relationships).length === 0) {\n return change;\n }\n\n switch (change.type) {\n case 'add':\n case 'remove': {\n const ret = {\n ...change,\n node: {\n ...change.node,\n relationships: {\n ...change.node.relationships,\n },\n },\n };\n\n mergeEmpty(ret.node.relationships, Object.keys(schema.relationships));\n\n return ret;\n }\n case 'edit': {\n const ret = {\n ...change,\n node: {\n ...change.node,\n relationships: {\n ...change.node.relationships,\n },\n },\n oldNode: {\n ...change.oldNode,\n relationships: {\n ...change.oldNode.relationships,\n },\n },\n };\n\n mergeEmpty(ret.node.relationships, Object.keys(schema.relationships));\n mergeEmpty(\n ret.oldNode.relationships,\n Object.keys(schema.relationships),\n );\n\n return ret;\n }\n case 'child':\n return change; // children only have relationships along the path to the change\n }\n };\n}\n\n/**\n * For each relationship in `schema` that does not exist\n * in `relationships`, add it with an empty stream.\n *\n * This modifies the `relationships` object in place.\n */\nexport function mergeEmpty(\n relationships: Record<string, () => Stream<Node>>,\n relationshipNames: string[],\n) {\n for (const relName of relationshipNames) {\n if (relationships[relName] === undefined) {\n relationships[relName] = () => emptyArray;\n }\n }\n}\n", "import {assert} from '../../../shared/src/asserts.ts';\nimport {identity} from '../../../shared/src/sentinels.ts';\nimport type {Change} from './change.ts';\nimport {type Node} from './data.ts';\nimport type {FanOut} from './fan-out.ts';\nimport {\n throwFilterOutput,\n type FilterInput,\n type FilterOperator,\n type FilterOutput,\n} from './filter-operators.ts';\nimport {pushAccumulatedChanges} from './push-accumulated.ts';\nimport type {SourceSchema} from './schema.ts';\n\n/**\n * The FanIn operator merges multiple streams into one.\n * It eliminates duplicates and must be paired with a fan-out operator\n * somewhere upstream of the fan-in.\n *\n * issue\n * |\n * fan-out\n * / \\\n * a b\n * \\ /\n * fan-in\n * |\n */\nexport class FanIn implements FilterOperator {\n readonly #inputs: readonly FilterInput[];\n readonly #schema: SourceSchema;\n #output: FilterOutput = throwFilterOutput;\n #accumulatedPushes: Change[] = [];\n\n constructor(fanOut: FanOut, inputs: FilterInput[]) {\n this.#inputs = inputs;\n this.#schema = fanOut.getSchema();\n for (const input of inputs) {\n input.setFilterOutput(this);\n assert(this.#schema === input.getSchema(), `Schema mismatch in fan-in`);\n }\n }\n\n setFilterOutput(output: FilterOutput): void {\n this.#output = output;\n }\n\n destroy(): void {\n for (const input of this.#inputs) {\n input.destroy();\n }\n }\n\n getSchema() {\n return this.#schema;\n }\n\n filter(node: Node, cleanup: boolean): boolean {\n return this.#output.filter(node, cleanup);\n }\n\n push(change: Change) {\n this.#accumulatedPushes.push(change);\n }\n\n fanOutDonePushingToAllBranches(fanOutChangeType: Change['type']) {\n if (this.#inputs.length === 0) {\n assert(\n this.#accumulatedPushes.length === 0,\n 'If there are no inputs then fan-in should not receive any pushes.',\n );\n return;\n }\n\n pushAccumulatedChanges(\n this.#accumulatedPushes,\n this.#output,\n this,\n fanOutChangeType,\n identity,\n identity,\n );\n }\n}\n", "import {must} from '../../../shared/src/must.ts';\nimport type {Change} from './change.ts';\nimport type {FanIn} from './fan-in.ts';\nimport type {Node} from './data.ts';\nimport type {\n FilterInput,\n FilterOperator,\n FilterOutput,\n} from './filter-operators.ts';\n\n/**\n * Forks a stream into multiple streams.\n * Is meant to be paired with a `FanIn` operator which will\n * later merge the forks back together.\n */\nexport class FanOut implements FilterOperator {\n readonly #input: FilterInput;\n readonly #outputs: FilterOutput[] = [];\n #fanIn: FanIn | undefined;\n #destroyCount: number = 0;\n\n constructor(input: FilterInput) {\n this.#input = input;\n input.setFilterOutput(this);\n }\n\n setFanIn(fanIn: FanIn) {\n this.#fanIn = fanIn;\n }\n\n setFilterOutput(output: FilterOutput): void {\n this.#outputs.push(output);\n }\n\n destroy(): void {\n if (this.#destroyCount < this.#outputs.length) {\n ++this.#destroyCount;\n if (this.#destroyCount === this.#outputs.length) {\n this.#input.destroy();\n }\n } else {\n throw new Error('FanOut already destroyed once for each output');\n }\n }\n\n getSchema() {\n return this.#input.getSchema();\n }\n\n filter(node: Node, cleanup: boolean): boolean {\n let result = false;\n for (const output of this.#outputs) {\n result = output.filter(node, cleanup) || result;\n // Cleanup needs to be forwarded to all outputs, don't short circuit\n // cleanup. For non-cleanup we can short-circuit on first true.\n if (!cleanup && result) {\n return true;\n }\n }\n return result;\n }\n\n push(change: Change) {\n for (const out of this.#outputs) {\n out.push(change, this);\n }\n must(\n this.#fanIn,\n 'fan-out must have a corresponding fan-in set!',\n ).fanOutDonePushingToAllBranches(change.type);\n }\n}\n", "import type {Row} from '../../../zero-protocol/src/data.ts';\nimport type {EditChange} from './change.ts';\nimport type {InputBase, Output} from './operator.ts';\n\n/**\n * This takes an {@linkcode EditChange} and a predicate that determines if a row\n * should be present based on the row's data. It then splits the change and\n * pushes the appropriate changes to the output based on the predicate.\n */\nexport function maybeSplitAndPushEditChange(\n change: EditChange,\n predicate: (row: Row) => boolean,\n output: Output,\n pusher: InputBase,\n) {\n const oldWasPresent = predicate(change.oldNode.row);\n const newIsPresent = predicate(change.node.row);\n\n if (oldWasPresent && newIsPresent) {\n output.push(change, pusher);\n } else if (oldWasPresent && !newIsPresent) {\n output.push(\n {\n type: 'remove',\n node: change.oldNode,\n },\n pusher,\n );\n } else if (!oldWasPresent && newIsPresent) {\n output.push(\n {\n type: 'add',\n node: change.node,\n },\n pusher,\n );\n }\n}\n", "import {unreachable} from '../../../shared/src/asserts.ts';\nimport type {Row} from '../../../zero-protocol/src/data.ts';\nimport type {Change} from './change.ts';\nimport {maybeSplitAndPushEditChange} from './maybe-split-and-push-edit-change.ts';\nimport type {InputBase, Output} from './operator.ts';\n\nexport function filterPush(\n change: Change,\n output: Output,\n pusher: InputBase,\n predicate?: ((row: Row) => boolean) | undefined,\n) {\n if (!predicate) {\n output.push(change, pusher);\n return;\n }\n switch (change.type) {\n case 'add':\n case 'remove':\n if (predicate(change.node.row)) {\n output.push(change, pusher);\n }\n break;\n case 'child':\n if (predicate(change.node.row)) {\n output.push(change, pusher);\n }\n break;\n case 'edit':\n maybeSplitAndPushEditChange(change, predicate, output, pusher);\n break;\n default:\n unreachable(change);\n }\n}\n", "import type {Row} from '../../../zero-protocol/src/data.ts';\nimport type {Change} from './change.ts';\nimport {\n throwFilterOutput,\n type FilterInput,\n type FilterOperator,\n type FilterOutput,\n} from './filter-operators.ts';\nimport {filterPush} from './filter-push.ts';\nimport {type Node} from './data.ts';\nimport type {SourceSchema} from './schema.ts';\n\n/**\n * The Filter operator filters data through a predicate. It is stateless.\n *\n * The predicate must be pure.\n */\nexport class Filter implements FilterOperator {\n readonly #input: FilterInput;\n readonly #predicate: (row: Row) => boolean;\n\n #output: FilterOutput = throwFilterOutput;\n\n constructor(input: FilterInput, predicate: (row: Row) => boolean) {\n this.#input = input;\n this.#predicate = predicate;\n input.setFilterOutput(this);\n }\n\n filter(node: Node, cleanup: boolean): boolean {\n return this.#predicate(node.row) && this.#output.filter(node, cleanup);\n }\n\n setFilterOutput(output: FilterOutput) {\n this.#output = output;\n }\n\n destroy(): void {\n this.#input.destroy();\n }\n\n getSchema(): SourceSchema {\n return this.#input.getSchema();\n }\n\n push(change: Change) {\n filterPush(change, this.#output, this, this.#predicate);\n }\n}\n", "import {assert} from '../../../shared/src/asserts.ts';\nimport {stringCompare} from '../../../shared/src/string-compare.ts';\nimport type {Writable} from '../../../shared/src/writable.ts';\nimport type {\n Condition,\n SimpleCondition,\n} from '../../../zero-protocol/src/ast.ts';\nimport type {Row, Value} from '../../../zero-protocol/src/data.ts';\nimport type {PrimaryKey} from '../../../zero-protocol/src/primary-key.ts';\nimport {valuesEqual} from './data.ts';\n\nexport type Constraint = {\n readonly [key: string]: Value;\n};\n\nexport function constraintMatchesRow(\n constraint: Constraint,\n row: Row,\n): boolean {\n for (const key in constraint) {\n if (!valuesEqual(row[key], constraint[key])) {\n return false;\n }\n }\n return true;\n}\n\n/**\n * Constraints are compatible if:\n * 1. They do not have any keys in common\n * 2. They have keys in common, but the values for those keys are equal\n */\nexport function constraintsAreCompatible(\n left: Constraint,\n right: Constraint,\n): boolean {\n for (const key in left) {\n if (key in right && !valuesEqual(left[key], right[key])) {\n return false;\n }\n }\n return true;\n}\n\nexport function constraintMatchesPrimaryKey(\n constraint: Constraint,\n primary: PrimaryKey,\n): boolean {\n const constraintKeys = Object.keys(constraint);\n\n if (constraintKeys.length !== primary.length) {\n return false;\n }\n\n // Primary key is always sorted\n // Constraint does not have to be sorted\n constraintKeys.sort(stringCompare);\n\n for (let i = 0; i < constraintKeys.length; i++) {\n if (constraintKeys[i] !== primary[i]) {\n return false;\n }\n }\n return true;\n}\n\n/**\n * Pulls top level `and` components out of a condition tree.\n * The resulting array of simple conditions would match a superset of\n * values that the original condition would match.\n *\n * Examples:\n * a AND b OR c\n *\n * In this case we cannot pull anything because the `or` is at the top level.\n *\n * a AND b AND c\n * We can pull all three.\n *\n * a AND (b OR c)\n * We can only pull `a`.\n */\nexport function pullSimpleAndComponents(\n condition: Condition,\n): SimpleCondition[] {\n if (condition.type === 'and') {\n return condition.conditions.flatMap(pullSimpleAndComponents);\n }\n\n if (condition.type === 'simple') {\n return [condition];\n }\n\n if (condition.type === 'or' && condition.conditions.length === 1) {\n return pullSimpleAndComponents(condition.conditions[0]);\n }\n\n return [];\n}\n\n/**\n * Checks if the supplied filters constitute a primary key lookup.\n * If so, returns the constraint that would be used to look up the primary key.\n * If not, returns undefined.\n */\nexport function primaryKeyConstraintFromFilters(\n condition: Condition | undefined,\n primary: PrimaryKey,\n): Constraint | undefined {\n if (condition === undefined) {\n return undefined;\n }\n\n const conditions = pullSimpleAndComponents(condition);\n if (conditions.length === 0) {\n return undefined;\n }\n\n const ret: Writable<Constraint> = {};\n for (const subCondition of conditions) {\n if (subCondition.op === '=') {\n const column = extractColumn(subCondition);\n if (column !== undefined) {\n if (!primary.includes(column.name)) {\n continue;\n }\n ret[column.name] = column.value;\n }\n }\n }\n\n if (Object.keys(ret).length !== primary.length) {\n return undefined;\n }\n\n return ret;\n}\n\nfunction extractColumn(\n condition: SimpleCondition,\n): {name: string; value: Value} | undefined {\n if (condition.left.type === 'column') {\n assert(condition.right.type === 'literal');\n return {name: condition.left.name, value: condition.right.value};\n }\n\n return undefined;\n}\n\ndeclare const TESTING: boolean;\n\nexport class SetOfConstraint {\n #data: Constraint[] = [];\n\n constructor() {\n // Only used in testing\n assert(TESTING);\n }\n\n #indexOf(value: Constraint): number {\n return this.#data.findIndex(v => constraintEquals(v, value));\n }\n\n has(value: Constraint): boolean {\n return this.#indexOf(value) !== -1;\n }\n\n add(value: Constraint): this {\n if (!this.has(value)) {\n this.#data.push(value);\n }\n return this;\n }\n}\n\nfunction constraintEquals(a: Constraint, b: Constraint): boolean {\n const aEntries = Object.entries(a);\n const bEntries = Object.entries(b);\n if (aEntries.length !== bEntries.length) {\n return false;\n }\n for (let i = 0; i < aEntries.length; i++) {\n if (\n aEntries[i][0] !== bEntries[i][0] ||\n !valuesEqual(aEntries[i][1], bEntries[i][1])\n ) {\n return false;\n }\n }\n return true;\n}\n", "import type {Row} from '../../../zero-protocol/src/data.ts';\nimport type {Change} from './change.ts';\nimport type {SourceSchema} from './schema.ts';\nimport type {Stream} from './stream.ts';\nimport {compareValues, valuesEqual, type Node} from './data.ts';\nimport {assert} from '../../../shared/src/asserts.ts';\nimport type {CompoundKey} from '../../../zero-protocol/src/ast.ts';\n\nexport type JoinChangeOverlay = {\n change: Change;\n position: Row | undefined;\n};\n\nexport function* generateWithOverlay(\n stream: Stream<Node>,\n overlay: Change,\n schema: SourceSchema,\n): Stream<Node> {\n let applied = false;\n let editOldApplied = false;\n let editNewApplied = false;\n for (const node of stream) {\n let yieldNode = true;\n if (!applied) {\n switch (overlay.type) {\n case 'add': {\n if (schema.compareRows(overlay.node.row, node.row) === 0) {\n applied = true;\n yieldNode = false;\n }\n break;\n }\n case 'remove': {\n if (schema.compareRows(overlay.node.row, node.row) < 0) {\n applied = true;\n yield overlay.node;\n }\n break;\n }\n case 'edit': {\n if (\n !editOldApplied &&\n schema.compareRows(overlay.oldNode.row, node.row) < 0\n ) {\n editOldApplied = true;\n if (editNewApplied) {\n applied = true;\n }\n yield overlay.oldNode;\n }\n if (\n !editNewApplied &&\n schema.compareRows(overlay.node.row, node.row) === 0\n ) {\n editNewApplied = true;\n if (editOldApplied) {\n applied = true;\n }\n yieldNode = false;\n }\n break;\n }\n case 'child': {\n if (schema.compareRows(overlay.node.row, node.row) === 0) {\n applied = true;\n yield {\n row: node.row,\n relationships: {\n ...node.relationships,\n [overlay.child.relationshipName]: () =>\n generateWithOverlay(\n node.relationships[overlay.child.relationshipName](),\n overlay.child.change,\n schema.relationships[overlay.child.relationshipName],\n ),\n },\n };\n yieldNode = false;\n }\n break;\n }\n }\n }\n if (yieldNode) {\n yield node;\n }\n }\n if (!applied) {\n if (overlay.type === 'remove') {\n applied = true;\n yield overlay.node;\n } else if (overlay.type === 'edit') {\n assert(editNewApplied);\n editOldApplied = true;\n applied = true;\n yield overlay.oldNode;\n }\n }\n\n assert(applied);\n}\n\nexport function rowEqualsForCompoundKey(\n a: Row,\n b: Row,\n key: CompoundKey,\n): boolean {\n for (let i = 0; i < key.length; i++) {\n if (compareValues(a[key[i]], b[key[i]]) !== 0) {\n return false;\n }\n }\n return true;\n}\n\nexport function isJoinMatch(\n parent: Row,\n parentKey: CompoundKey,\n child: Row,\n childKey: CompoundKey,\n) {\n for (let i = 0; i < parentKey.length; i++) {\n if (!valuesEqual(parent[parentKey[i]], child[childKey[i]])) {\n return false;\n }\n }\n return true;\n}\n", "import {assert, unreachable} from '../../../shared/src/asserts.ts';\nimport {binarySearch} from '../../../shared/src/binary-search.ts';\nimport {emptyArray} from '../../../shared/src/sentinels.ts';\nimport type {Writable} from '../../../shared/src/writable.ts';\nimport type {CompoundKey, System} from '../../../zero-protocol/src/ast.ts';\nimport type {Change} from './change.ts';\nimport {constraintsAreCompatible, type Constraint} from './constraint.ts';\nimport type {Node} from './data.ts';\nimport {\n generateWithOverlay,\n isJoinMatch,\n rowEqualsForCompoundKey,\n type JoinChangeOverlay,\n} from './join-utils.ts';\nimport {\n throwOutput,\n type FetchRequest,\n type Input,\n type Output,\n} from './operator.ts';\nimport type {SourceSchema} from './schema.ts';\nimport {first, type Stream} from './stream.ts';\n\ntype Args = {\n parent: Input;\n child: Input;\n // The nth key in childKey corresponds to the nth key in parentKey.\n parentKey: CompoundKey;\n childKey: CompoundKey;\n\n relationshipName: string;\n hidden: boolean;\n system: System;\n};\n\n/**\n * An *inner* join which fetches nodes from its child input first and then\n * fetches their related nodes from its parent input. Output nodes are the\n * nodes from parent input (in parent input order), which have at least one\n * related child. These output nodes have a new relationship added to them,\n * which has the name `relationshipName`. The value of the relationship is a\n * stream of related nodes from the child input (in child input order).\n */\nexport class FlippedJoin implements Input {\n readonly #parent: Input;\n readonly #child: Input;\n readonly #parentKey: CompoundKey;\n readonly #childKey: CompoundKey;\n readonly #relationshipName: string;\n readonly #schema: SourceSchema;\n\n #output: Output = throwOutput;\n\n #inprogressChildChange: JoinChangeOverlay | undefined;\n\n constructor({\n parent,\n child,\n parentKey,\n childKey,\n relationshipName,\n hidden,\n system,\n }: Args) {\n assert(parent !== child, 'Parent and child must be different operators');\n assert(\n parentKey.length === childKey.length,\n 'The parentKey and childKey keys must have same length',\n );\n this.#parent = parent;\n this.#child = child;\n this.#parentKey = parentKey;\n this.#childKey = childKey;\n this.#relationshipName = relationshipName;\n\n const parentSchema = parent.getSchema();\n const childSchema = child.getSchema();\n this.#schema = {\n ...parentSchema,\n relationships: {\n ...parentSchema.relationships,\n [relationshipName]: {\n ...childSchema,\n isHidden: hidden,\n system,\n },\n },\n };\n\n parent.setOutput({\n push: (change: Change) => this.#pushParent(change),\n });\n child.setOutput({\n push: (change: Change) => this.#pushChild(change),\n });\n }\n\n destroy(): void {\n this.#child.destroy();\n this.#parent.destroy();\n }\n\n setOutput(output: Output): void {\n this.#output = output;\n }\n\n getSchema(): SourceSchema {\n return this.#schema;\n }\n\n // TODO: When parentKey is the parent's primary key (or more\n // generally when the parent cardinality is expected to be small) a different\n // algorithm should be used: For each child node, fetch all parent nodes\n // eagerly and then sort using quicksort.\n *fetch(req: FetchRequest): Stream<Node> {\n const childNodes = [...this.#child.fetch({})];\n // FlippedJoin's split-push change overlay logic is largely\n // the same as Join's with the exception of remove. For remove,\n // the change is undone here, and then re-applied to parents with order\n // less than or equal to change.position below. This is necessary\n // because if the removed node was the last related child, the\n // related parents with position greater than change.position\n // (which should not yet have the node removed), would not even\n // be fetched here, and would be absent from the output all together.\n if (this.#inprogressChildChange?.change.type === 'remove') {\n const removedNode = this.#inprogressChildChange.change.node;\n const compare = this.#child.getSchema().compareRows;\n const insertPos = binarySearch(childNodes.length, i =>\n compare(removedNode.row, childNodes[i].row),\n );\n childNodes.splice(insertPos, 0, removedNode);\n }\n const parentIterators: Iterator<Node>[] = [];\n let threw = false;\n try {\n for (const childNode of childNodes) {\n // TODO: consider adding the ability to pass a set of\n // ids to fetch, and have them applied to sqlite using IN.\n const constraintFromChild: Writable<Constraint> = {};\n for (let i = 0; i < this.#parentKey.length; i++) {\n constraintFromChild[this.#parentKey[i]] =\n childNode.row[this.#childKey[i]];\n }\n if (\n req.constraint &&\n !constraintsAreCompatible(constraintFromChild, req.constraint)\n ) {\n parentIterators.push(emptyArray[Symbol.iterator]());\n } else {\n const stream = this.#parent.fetch({\n ...req,\n constraint: {\n ...req.constraint,\n ...constraintFromChild,\n },\n });\n const iterator = stream[Symbol.iterator]();\n parentIterators.push(iterator);\n }\n }\n const nextParentNodes: (Node | null)[] = [];\n for (let i = 0; i < parentIterators.length; i++) {\n const iter = parentIterators[i];\n const result = iter.next();\n nextParentNodes[i] = result.done ? null : result.value;\n }\n\n while (true) {\n let minParentNode = null;\n let minParentNodeChildIndexes: number[] = [];\n for (let i = 0; i < nextParentNodes.length; i++) {\n const parentNode = nextParentNodes[i];\n if (parentNode === null) {\n continue;\n }\n if (minParentNode === null) {\n minParentNode = parentNode;\n minParentNodeChildIndexes.push(i);\n } else {\n const compareResult =\n this.#schema.compareRows(parentNode.row, minParentNode.row) *\n (req.reverse ? -1 : 1);\n if (compareResult === 0) {\n minParentNodeChildIndexes.push(i);\n } else if (compareResult < 0) {\n minParentNode = parentNode;\n minParentNodeChildIndexes = [i];\n }\n }\n }\n if (minParentNode === null) {\n return;\n }\n const relatedChildNodes: Node[] = [];\n for (const minParentNodeChildIndex of minParentNodeChildIndexes) {\n relatedChildNodes.push(childNodes[minParentNodeChildIndex]);\n const iter = parentIterators[minParentNodeChildIndex];\n const result = iter.next();\n nextParentNodes[minParentNodeChildIndex] = result.done\n ? null\n : result.value;\n }\n let overlaidRelatedChildNodes = relatedChildNodes;\n if (\n this.#inprogressChildChange &&\n this.#inprogressChildChange.position &&\n isJoinMatch(\n this.#inprogressChildChange.change.node.row,\n this.#childKey,\n minParentNode.row,\n this.#parentKey,\n )\n ) {\n const hasInprogressChildChangeBeenPushedForMinParentNode =\n this.#parent\n .getSchema()\n .compareRows(\n minParentNode.row,\n this.#inprogressChildChange.position,\n ) <= 0;\n if (this.#inprogressChildChange.change.type === 'remove') {\n if (hasInprogressChildChangeBeenPushedForMinParentNode) {\n // Remove form relatedChildNodes since the removed child\n // was inserted into childNodes above.\n overlaidRelatedChildNodes = relatedChildNodes.filter(\n n => n !== this.#inprogressChildChange?.change.node,\n );\n }\n } else if (!hasInprogressChildChangeBeenPushedForMinParentNode) {\n overlaidRelatedChildNodes = [\n ...generateWithOverlay(\n relatedChildNodes,\n this.#inprogressChildChange.change,\n this.#child.getSchema(),\n ),\n ];\n }\n }\n\n // yield node if after the overlay it still has relationship nodes\n if (overlaidRelatedChildNodes.length > 0) {\n yield {\n ...minParentNode,\n relationships: {\n ...minParentNode.relationships,\n [this.#relationshipName]: () => overlaidRelatedChildNodes,\n },\n };\n }\n }\n } catch (e) {\n threw = true;\n for (const iter of parentIterators) {\n try {\n iter.throw?.(e);\n } catch (_cleanupError) {\n // error in the iter.throw cleanup,\n // catch so other iterators are cleaned up\n }\n }\n throw e;\n } finally {\n if (!threw) {\n for (const iter of parentIterators) {\n try {\n iter.return?.();\n } catch (_cleanupError) {\n // error in the iter.return cleanup,\n // catch so other iterators are cleaned up\n }\n }\n }\n }\n }\n\n *cleanup(_req: FetchRequest): Stream<Node> {}\n\n #pushChild(change: Change): void {\n const pushChildChange = (exists?: boolean) => {\n this.#inprogressChildChange = {\n change,\n position: undefined,\n };\n try {\n const parentNodeStream = this.#parent.fetch({\n constraint: Object.fromEntries(\n this.#parentKey.map((key, i) => [\n key,\n change.node.row[this.#childKey[i]],\n ]),\n ),\n });\n for (const parentNode of parentNodeStream) {\n this.#inprogressChildChange = {\n change,\n position: parentNode.row,\n };\n const childNodeStream = () =>\n this.#child.fetch({\n constraint: Object.fromEntries(\n this.#childKey.map((key, i) => [\n key,\n parentNode.row[this.#parentKey[i]],\n ]),\n ),\n });\n if (!exists) {\n for (const childNode of childNodeStream()) {\n if (\n this.#child\n .getSchema()\n .compareRows(childNode.row, change.node.row) !== 0\n ) {\n exists = true;\n break;\n }\n }\n }\n if (exists) {\n this.#output.push(\n {\n type: 'child',\n node: {\n ...parentNode,\n relationships: {\n ...parentNode.relationships,\n [this.#relationshipName]: childNodeStream,\n },\n },\n child: {\n relationshipName: this.#relationshipName,\n change,\n },\n },\n this,\n );\n } else {\n this.#output.push(\n {\n ...change,\n node: {\n ...parentNode,\n relationships: {\n ...parentNode.relationships,\n [this.#relationshipName]: () => [change.node],\n },\n },\n },\n this,\n );\n }\n }\n } finally {\n this.#inprogressChildChange = undefined;\n }\n };\n\n switch (change.type) {\n case 'add':\n case 'remove':\n pushChildChange();\n break;\n case 'edit': {\n assert(\n rowEqualsForCompoundKey(\n change.oldNode.row,\n change.node.row,\n this.#childKey,\n ),\n `Child edit must not change relationship.`,\n );\n pushChildChange(true);\n break;\n }\n case 'child':\n pushChildChange(true);\n break;\n }\n }\n\n #pushParent(change: Change): void {\n const childNodeStream = (node: Node) => () =>\n this.#child.fetch({\n constraint: Object.fromEntries(\n this.#childKey.map((key, i) => [key, node.row[this.#parentKey[i]]]),\n ),\n });\n\n const flip = (node: Node) => ({\n ...node,\n relationships: {\n ...node.relationships,\n [this.#relationshipName]: childNodeStream(node),\n },\n });\n\n // If no related child don't push as this is an inner join.\n if (first(childNodeStream(change.node)()) === undefined) {\n return;\n }\n\n switch (change.type) {\n case 'add':\n case 'remove':\n case 'child': {\n this.#output.push(\n {\n ...change,\n node: flip(change.node),\n },\n this,\n );\n break;\n }\n case 'edit': {\n assert(\n rowEqualsForCompoundKey(\n change.oldNode.row,\n change.node.row,\n this.#parentKey,\n ),\n `Parent edit must not change relationship.`,\n );\n this.#output.push(\n {\n type: 'edit',\n oldNode: flip(change.oldNode),\n node: flip(change.node),\n },\n this,\n );\n break;\n }\n default:\n unreachable(change);\n }\n }\n}\n", "import {assert, unreachable} from '../../../shared/src/asserts.ts';\nimport type {CompoundKey, System} from '../../../zero-protocol/src/ast.ts';\nimport type {Row, Value} from '../../../zero-protocol/src/data.ts';\nimport type {PrimaryKey} from '../../../zero-protocol/src/primary-key.ts';\nimport type {Change, ChildChange} from './change.ts';\nimport type {Node} from './data.ts';\nimport {\n generateWithOverlay,\n isJoinMatch,\n rowEqualsForCompoundKey,\n type JoinChangeOverlay,\n} from './join-utils.ts';\nimport {\n throwOutput,\n type FetchRequest,\n type Input,\n type Output,\n type Storage,\n} from './operator.ts';\nimport type {SourceSchema} from './schema.ts';\nimport {take, type Stream} from './stream.ts';\n\ntype Args = {\n parent: Input;\n child: Input;\n storage: Storage;\n // The nth key in parentKey corresponds to the nth key in childKey.\n parentKey: CompoundKey;\n childKey: CompoundKey;\n relationshipName: string;\n hidden: boolean;\n system: System;\n};\n\n/**\n * The Join operator joins the output from two upstream inputs. Zero's join\n * is a little different from SQL's join in that we output hierarchical data,\n * not a flat table. This makes it a lot more useful for UI programming and\n * avoids duplicating tons of data like left join would.\n *\n * The Nodes output from Join have a new relationship added to them, which has\n * the name #relationshipName. The value of the relationship is a stream of\n * child nodes which are the corresponding values from the child source.\n */\nexport class Join implements Input {\n readonly #parent: Input;\n readonly #child: Input;\n readonly #storage: Storage;\n readonly #parentKey: CompoundKey;\n readonly #childKey: CompoundKey;\n readonly #relationshipName: string;\n readonly #schema: SourceSchema;\n\n #output: Output = throwOutput;\n\n #inprogressChildChange: JoinChangeOverlay | undefined;\n\n constructor({\n parent,\n child,\n storage,\n parentKey,\n childKey,\n relationshipName,\n hidden,\n system,\n }: Args) {\n assert(parent !== child, 'Parent and child must be different operators');\n assert(\n parentKey.length === childKey.length,\n 'The parentKey and childKey keys must have same length',\n );\n this.#parent = parent;\n this.#child = child;\n this.#storage = storage;\n this.#parentKey = parentKey;\n this.#childKey = childKey;\n this.#relationshipName = relationshipName;\n\n const parentSchema = parent.getSchema();\n const childSchema = child.getSchema();\n this.#schema = {\n ...parentSchema,\n relationships: {\n ...parentSchema.relationships,\n [relationshipName]: {\n ...childSchema,\n isHidden: hidden,\n system,\n },\n },\n };\n\n parent.setOutput({\n push: (change: Change) => this.#pushParent(change),\n });\n child.setOutput({\n push: (change: Change) => this.#pushChild(change),\n });\n }\n\n destroy(): void {\n this.#parent.destroy();\n this.#child.destroy();\n }\n\n setOutput(output: Output): void {\n this.#output = output;\n }\n\n getSchema(): SourceSchema {\n return this.#schema;\n }\n\n *fetch(req: FetchRequest): Stream<Node> {\n for (const parentNode of this.#parent.fetch(req)) {\n yield this.#processParentNode(\n parentNode.row,\n parentNode.relationships,\n 'fetch',\n );\n }\n }\n\n *cleanup(req: FetchRequest): Stream<Node> {\n for (const parentNode of this.#parent.cleanup(req)) {\n yield this.#processParentNode(\n parentNode.row,\n parentNode.relationships,\n 'cleanup',\n );\n }\n }\n\n #pushParent(change: Change): void {\n switch (change.type) {\n case 'add':\n this.#output.push(\n {\n type: 'add',\n node: this.#processParentNode(\n change.node.row,\n change.node.relationships,\n 'fetch',\n ),\n },\n this,\n );\n break;\n case 'remove':\n this.#output.push(\n {\n type: 'remove',\n node: this.#processParentNode(\n change.node.row,\n change.node.relationships,\n 'cleanup',\n ),\n },\n this,\n );\n break;\n case 'child':\n this.#output.push(\n {\n type: 'child',\n node: this.#processParentNode(\n change.node.row,\n change.node.relationships,\n 'fetch',\n ),\n child: change.child,\n },\n this,\n );\n break;\n case 'edit': {\n // Assert the edit could not change the relationship.\n assert(\n rowEqualsForCompoundKey(\n change.oldNode.row,\n change.node.row,\n this.#parentKey,\n ),\n `Parent edit must not change relationship.`,\n );\n this.#output.push(\n {\n type: 'edit',\n oldNode: this.#processParentNode(\n change.oldNode.row,\n change.oldNode.relationships,\n 'cleanup',\n ),\n node: this.#processParentNode(\n change.node.row,\n change.node.relationships,\n 'fetch',\n ),\n },\n this,\n );\n break;\n }\n default:\n unreachable(change);\n }\n }\n\n #pushChild(change: Change): void {\n const pushChildChange = (childRow: Row, change: Change) => {\n this.#inprogressChildChange = {\n change,\n position: undefined,\n };\n try {\n const parentNodes = this.#parent.fetch({\n constraint: Object.fromEntries(\n this.#parentKey.map((key, i) => [key, childRow[this.#childKey[i]]]),\n ),\n });\n\n for (const parentNode of parentNodes) {\n this.#inprogressChildChange.position = parentNode.row;\n const childChange: ChildChange = {\n type: 'child',\n node: this.#processParentNode(\n parentNode.row,\n parentNode.relationships,\n 'fetch',\n ),\n child: {\n relationshipName: this.#relationshipName,\n change,\n },\n };\n this.#output.push(childChange, this);\n }\n } finally {\n this.#inprogressChildChange = undefined;\n }\n };\n\n switch (change.type) {\n case 'add':\n case 'remove':\n pushChildChange(change.node.row, change);\n break;\n case 'child':\n pushChildChange(change.node.row, change);\n break;\n case 'edit': {\n const childRow = change.node.row;\n const oldChildRow = change.oldNode.row;\n // Assert the edit could not change the relationship.\n assert(\n rowEqualsForCompoundKey(oldChildRow, childRow, this.#childKey),\n 'Child edit must not change relationship.',\n );\n pushChildChange(childRow, change);\n break;\n }\n\n default:\n unreachable(change);\n }\n }\n\n #processParentNode(\n parentNodeRow: Row,\n parentNodeRelations: Record<string, () => Stream<Node>>,\n mode: ProcessParentMode,\n ): Node {\n let method: ProcessParentMode = mode;\n let storageUpdated = false;\n const childStream = () => {\n if (!storageUpdated) {\n if (mode === 'cleanup') {\n this.#storage.del(\n makeStorageKey(\n this.#parentKey,\n this.#parent.getSchema().primaryKey,\n parentNodeRow,\n ),\n );\n const empty =\n [\n ...take(\n this.#storage.scan({\n prefix: makeStorageKeyPrefix(parentNodeRow, this.#parentKey),\n }),\n 1,\n ),\n ].length === 0;\n method = empty ? 'cleanup' : 'fetch';\n }\n\n storageUpdated = true;\n // Defer the work to update storage until the child stream\n // is actually accessed\n if (mode === 'fetch') {\n this.#storage.set(\n makeStorageKey(\n this.#parentKey,\n this.#parent.getSchema().primaryKey,\n parentNodeRow,\n ),\n true,\n );\n }\n }\n\n const stream = this.#child[method]({\n constraint: Object.fromEntries(\n this.#childKey.map((key, i) => [\n key,\n parentNodeRow[this.#parentKey[i]],\n ]),\n ),\n });\n\n if (\n this.#inprogressChildChange &&\n isJoinMatch(\n parentNodeRow,\n this.#parentKey,\n this.#inprogressChildChange.change.node.row,\n this.#childKey,\n ) &&\n this.#inprogressChildChange.position &&\n this.#schema.compareRows(\n parentNodeRow,\n this.#inprogressChildChange.position,\n ) > 0\n ) {\n return generateWithOverlay(\n stream,\n this.#inprogressChildChange.change,\n this.#child.getSchema(),\n );\n }\n return stream;\n };\n\n return {\n row: parentNodeRow,\n relationships: {\n ...parentNodeRelations,\n [this.#relationshipName]: childStream,\n },\n };\n }\n}\n\ntype ProcessParentMode = 'fetch' | 'cleanup';\n\n/** Exported for testing. */\nexport function makeStorageKeyForValues(values: readonly Value[]): string {\n const json = JSON.stringify(['pKeySet', ...values]);\n return json.substring(1, json.length - 1) + ',';\n}\n\n/** Exported for testing. */\nexport function makeStorageKeyPrefix(row: Row, key: CompoundKey): string {\n return makeStorageKeyForValues(key.map(k => row[k]));\n}\n\n/** Exported for testing.\n * This storage key tracks the primary keys seen for each unique\n * value joined on. This is used to know when to cleanup a child's state.\n */\nexport function makeStorageKey(\n key: CompoundKey,\n primaryKey: PrimaryKey,\n row: Row,\n): string {\n const values: Value[] = key.map(k => row[k]);\n for (const key of primaryKey) {\n values.push(row[key]);\n }\n return makeStorageKeyForValues(values);\n}\n", "import type {Row} from '../../../zero-protocol/src/data.ts';\nimport type {AddChange, Change, ChildChange, RemoveChange} from './change.ts';\nimport type {Comparator, Node} from './data.ts';\nimport {maybeSplitAndPushEditChange} from './maybe-split-and-push-edit-change.ts';\nimport {\n throwOutput,\n type FetchRequest,\n type Input,\n type Operator,\n type Output,\n type Start,\n} from './operator.ts';\nimport type {SourceSchema} from './schema.ts';\nimport type {Stream} from './stream.ts';\n\nexport type Bound = {\n row: Row;\n exclusive: boolean;\n};\n\n/**\n * Skip sets the start position for the pipeline. No rows before the bound will\n * be output.\n */\nexport class Skip implements Operator {\n readonly #input: Input;\n readonly #bound: Bound;\n readonly #comparator: Comparator;\n\n #output: Output = throwOutput;\n\n constructor(input: Input, bound: Bound) {\n this.#input = input;\n this.#bound = bound;\n this.#comparator = input.getSchema().compareRows;\n input.setOutput(this);\n }\n\n getSchema(): SourceSchema {\n return this.#input.getSchema();\n }\n\n fetch(req: FetchRequest): Stream<Node> {\n return this.#fetchOrCleanup('fetch', req);\n }\n\n cleanup(req: FetchRequest): Stream<Node> {\n return this.#fetchOrCleanup('fetch', req);\n }\n\n *#fetchOrCleanup(method: 'fetch' | 'cleanup', req: FetchRequest) {\n const start = this.#getStart(req);\n if (start === 'empty') {\n return;\n }\n const nodes = this.#input[method]({...req, start});\n if (!req.reverse) {\n yield* nodes;\n return;\n }\n for (const node of nodes) {\n if (!this.#shouldBePresent(node.row)) {\n return;\n }\n yield node;\n }\n }\n\n setOutput(output: Output): void {\n this.#output = output;\n }\n\n destroy(): void {\n this.#input.destroy();\n }\n\n #shouldBePresent(row: Row): boolean {\n const cmp = this.#comparator(this.#bound.row, row);\n return cmp < 0 || (cmp === 0 && !this.#bound.exclusive);\n }\n\n push(change: Change): void {\n const shouldBePresent = (row: Row) => this.#shouldBePresent(row);\n if (change.type === 'edit') {\n maybeSplitAndPushEditChange(change, shouldBePresent, this.#output, this);\n return;\n }\n\n change satisfies AddChange | RemoveChange | ChildChange;\n\n if (shouldBePresent(change.node.row)) {\n this.#output.push(change, this);\n }\n }\n\n #getStart(req: FetchRequest): Start | undefined | 'empty' {\n const boundStart = {\n row: this.#bound.row,\n basis: this.#bound.exclusive ? 'after' : 'at',\n } as const;\n\n if (!req.start) {\n if (req.reverse) {\n return undefined;\n }\n return boundStart;\n }\n\n const cmp = this.#comparator(this.#bound.row, req.start.row);\n\n if (!req.reverse) {\n // The skip bound is after the requested bound. The requested bound cannot\n // be relevant because even if it was basis: 'after', the skip bound is\n // itself after the requested bound. Return the skip bound.\n if (cmp > 0) {\n return boundStart;\n }\n\n // The skip bound and requested bound are equal. If either is exclusive,\n // return that bound with exclusive. Otherwise, return the skip bound.\n if (cmp === 0) {\n if (this.#bound.exclusive || req.start.basis === 'after') {\n return {\n row: this.#bound.row,\n basis: 'after',\n };\n }\n return boundStart;\n }\n\n return req.start;\n }\n\n req.reverse satisfies true;\n\n // bound is after the start, but request is for reverse so results\n // must be empty\n if (cmp > 0) {\n return 'empty';\n }\n\n if (cmp === 0) {\n // if both are inclusive, the result can be the single row at bound\n // return it as start\n if (!this.#bound.exclusive && req.start.basis === 'at') {\n return boundStart;\n }\n // otherwise the results must be empty, one or both are exclusive\n // in opposite directions\n return 'empty';\n }\n\n // bound is before the start, return start\n return req.start;\n }\n}\n", "import {assert, unreachable} from '../../../shared/src/asserts.ts';\nimport {hasOwn} from '../../../shared/src/has-own.ts';\nimport {must} from '../../../shared/src/must.ts';\nimport type {Row, Value} from '../../../zero-protocol/src/data.ts';\nimport type {PrimaryKey} from '../../../zero-protocol/src/primary-key.ts';\nimport {assertOrderingIncludesPK} from '../builder/builder.ts';\nimport {type Change, type EditChange, type RemoveChange} from './change.ts';\nimport type {Constraint} from './constraint.ts';\nimport {compareValues, type Comparator, type Node} from './data.ts';\nimport {\n throwOutput,\n type FetchRequest,\n type Input,\n type Operator,\n type Output,\n type Storage,\n} from './operator.ts';\nimport type {SourceSchema} from './schema.ts';\nimport {first, take, type Stream} from './stream.ts';\n\nconst MAX_BOUND_KEY = 'maxBound';\n\ntype TakeState = {\n size: number;\n bound: Row | undefined;\n};\n\ninterface TakeStorage {\n get(key: typeof MAX_BOUND_KEY): Row | undefined;\n get(key: string): TakeState | undefined;\n set(key: typeof MAX_BOUND_KEY, value: Row): void;\n set(key: string, value: TakeState): void;\n del(key: string): void;\n}\n\nexport type PartitionKey = PrimaryKey;\n\n/**\n * The Take operator is for implementing limit queries. It takes the first n\n * nodes of its input as determined by the input\u2019s comparator. It then keeps\n * a *bound* of the last item it has accepted so that it can evaluate whether\n * new incoming pushes should be accepted or rejected.\n *\n * Take can count rows globally or by unique value of some field.\n *\n * Maintains the invariant that its output size is always <= limit, even\n * mid processing of a push.\n */\nexport class Take implements Operator {\n readonly #input: Input;\n readonly #storage: TakeStorage;\n readonly #limit: number;\n readonly #partitionKey: PartitionKey | undefined;\n readonly #partitionKeyComparator: Comparator | undefined;\n // Fetch overlay needed for some split push cases.\n #rowHiddenFromFetch: Row | undefined;\n\n #output: Output = throwOutput;\n\n constructor(\n input: Input,\n storage: Storage,\n limit: number,\n partitionKey?: PartitionKey | undefined,\n ) {\n assert(limit >= 0);\n assertOrderingIncludesPK(\n input.getSchema().sort,\n input.getSchema().primaryKey,\n );\n input.setOutput(this);\n this.#input = input;\n this.#storage = storage as TakeStorage;\n this.#limit = limit;\n this.#partitionKey = partitionKey;\n this.#partitionKeyComparator =\n partitionKey && makePartitionKeyComparator(partitionKey);\n }\n\n setOutput(output: Output): void {\n this.#output = output;\n }\n\n getSchema(): SourceSchema {\n return this.#input.getSchema();\n }\n\n *fetch(req: FetchRequest): Stream<Node> {\n if (\n !this.#partitionKey ||\n (req.constraint &&\n constraintMatchesPartitionKey(req.constraint, this.#partitionKey))\n ) {\n const takeStateKey = getTakeStateKey(this.#partitionKey, req.constraint);\n const takeState = this.#storage.get(takeStateKey);\n if (!takeState) {\n yield* this.#initialFetch(req);\n return;\n }\n if (takeState.bound === undefined) {\n return;\n }\n for (const inputNode of this.#input.fetch(req)) {\n if (this.getSchema().compareRows(takeState.bound, inputNode.row) < 0) {\n return;\n }\n if (\n this.#rowHiddenFromFetch &&\n this.getSchema().compareRows(\n this.#rowHiddenFromFetch,\n inputNode.row,\n ) === 0\n ) {\n continue;\n }\n yield inputNode;\n }\n return;\n }\n // There is a partition key, but the fetch is not constrained or constrained\n // on a different key. Thus we don't have a single take state to bound by.\n // This currently only happens with nested sub-queries\n // e.g. issues include issuelabels include label. We could remove this\n // case if we added a translation layer (powered by some state) in join.\n // Specifically we need joinKeyValue => parent constraint key\n const maxBound = this.#storage.get(MAX_BOUND_KEY);\n if (maxBound === undefined) {\n return;\n }\n for (const inputNode of this.#input.fetch(req)) {\n if (this.getSchema().compareRows(inputNode.row, maxBound) > 0) {\n return;\n }\n const takeStateKey = getTakeStateKey(this.#partitionKey, inputNode.row);\n const takeState = this.#storage.get(takeStateKey);\n if (\n takeState?.bound !== undefined &&\n this.getSchema().compareRows(takeState.bound, inputNode.row) >= 0\n ) {\n yield inputNode;\n }\n }\n }\n\n *#initialFetch(req: FetchRequest): Stream<Node> {\n assert(req.start === undefined);\n assert(!req.reverse);\n assert(constraintMatchesPartitionKey(req.constraint, this.#partitionKey));\n\n if (this.#limit === 0) {\n return;\n }\n\n const takeStateKey = getTakeStateKey(this.#partitionKey, req.constraint);\n assert(this.#storage.get(takeStateKey) === undefined);\n\n let size = 0;\n let bound: Row | undefined;\n let downstreamEarlyReturn = true;\n let exceptionThrown = false;\n try {\n for (const inputNode of this.#input.fetch(req)) {\n yield inputNode;\n bound = inputNode.row;\n size++;\n if (size === this.#limit) {\n break;\n }\n }\n downstreamEarlyReturn = false;\n } catch (e) {\n exceptionThrown = true;\n throw e;\n } finally {\n if (!exceptionThrown) {\n this.#setTakeState(\n takeStateKey,\n size,\n bound,\n this.#storage.get(MAX_BOUND_KEY),\n );\n // If it becomes necessary to support downstream early return, this\n // assert should be removed, and replaced with code that consumes\n // the input stream until limit is reached or the input stream is\n // exhausted so that takeState is properly hydrated.\n assert(\n !downstreamEarlyReturn,\n 'Unexpected early return prevented full hydration',\n );\n }\n }\n }\n\n *cleanup(req: FetchRequest): Stream<Node> {\n assert(req.start === undefined);\n assert(constraintMatchesPartitionKey(req.constraint, this.#partitionKey));\n const takeStateKey = getTakeStateKey(this.#partitionKey, req.constraint);\n this.#storage.del(takeStateKey);\n let size = 0;\n for (const inputNode of this.#input.cleanup(req)) {\n if (size === this.#limit) {\n return;\n }\n size++;\n yield inputNode;\n }\n }\n\n #getStateAndConstraint(row: Row) {\n const takeStateKey = getTakeStateKey(this.#partitionKey, row);\n const takeState = this.#storage.get(takeStateKey);\n let maxBound: Row | undefined;\n let constraint: Constraint | undefined;\n if (takeState) {\n maxBound = this.#storage.get(MAX_BOUND_KEY);\n constraint =\n this.#partitionKey &&\n Object.fromEntries(\n this.#partitionKey.map(key => [key, row[key]] as const),\n );\n }\n\n return {takeState, takeStateKey, maxBound, constraint} as\n | {\n takeState: undefined;\n takeStateKey: string;\n maxBound: undefined;\n constraint: undefined;\n }\n | {\n takeState: TakeState;\n takeStateKey: string;\n maxBound: Row | undefined;\n constraint: Constraint | undefined;\n };\n }\n\n push(change: Change): void {\n if (change.type === 'edit') {\n this.#pushEditChange(change);\n return;\n }\n\n const {takeState, takeStateKey, maxBound, constraint} =\n this.#getStateAndConstraint(change.node.row);\n if (!takeState) {\n return;\n }\n\n const {compareRows} = this.getSchema();\n\n if (change.type === 'add') {\n if (takeState.size < this.#limit) {\n this.#setTakeState(\n takeStateKey,\n takeState.size + 1,\n takeState.bound === undefined ||\n compareRows(takeState.bound, change.node.row) < 0\n ? change.node.row\n : takeState.bound,\n maxBound,\n );\n this.#output.push(change, this);\n return;\n }\n // size === limit\n if (\n takeState.bound === undefined ||\n compareRows(change.node.row, takeState.bound) >= 0\n ) {\n return;\n }\n // added row < bound\n let beforeBoundNode: Node | undefined;\n let boundNode: Node;\n if (this.#limit === 1) {\n boundNode = must(\n first(\n this.#input.fetch({\n start: {\n row: takeState.bound,\n basis: 'at',\n },\n constraint,\n }),\n ),\n );\n } else {\n [boundNode, beforeBoundNode] = take(\n this.#input.fetch({\n start: {\n row: takeState.bound,\n basis: 'at',\n },\n constraint,\n reverse: true,\n }),\n 2,\n );\n }\n const removeChange: RemoveChange = {\n type: 'remove',\n node: boundNode,\n };\n // Remove before add to maintain invariant that\n // output size <= limit.\n this.#setTakeState(\n takeStateKey,\n takeState.size,\n beforeBoundNode === undefined ||\n compareRows(change.node.row, beforeBoundNode.row) > 0\n ? change.node.row\n : beforeBoundNode.row,\n maxBound,\n );\n this.#withRowHiddenFromFetch(change.node.row, () => {\n this.#output.push(removeChange, this);\n });\n this.#output.push(change, this);\n } else if (change.type === 'remove') {\n if (takeState.bound === undefined) {\n // change is after bound\n return;\n }\n const compToBound = compareRows(change.node.row, takeState.bound);\n if (compToBound > 0) {\n // change is after bound\n return;\n }\n const [beforeBoundNode] = take(\n this.#input.fetch({\n start: {\n row: takeState.bound,\n basis: 'after',\n },\n constraint,\n reverse: true,\n }),\n 1,\n );\n\n let newBound: {node: Node; push: boolean} | undefined;\n if (beforeBoundNode) {\n const push = compareRows(beforeBoundNode.row, takeState.bound) > 0;\n newBound = {\n node: beforeBoundNode,\n push,\n };\n }\n if (!newBound?.push) {\n for (const node of this.#input.fetch({\n start: {\n row: takeState.bound,\n basis: 'at',\n },\n constraint,\n })) {\n const push = compareRows(node.row, takeState.bound) > 0;\n newBound = {\n node,\n push,\n };\n if (push) {\n break;\n }\n }\n }\n\n if (newBound?.push) {\n this.#output.push(change, this);\n this.#setTakeState(\n takeStateKey,\n takeState.size,\n newBound.node.row,\n maxBound,\n );\n this.#output.push(\n {\n type: 'add',\n node: newBound.node,\n },\n this,\n );\n return;\n }\n this.#setTakeState(\n takeStateKey,\n takeState.size - 1,\n newBound?.node.row,\n maxBound,\n );\n this.#output.push(change, this);\n } else if (change.type === 'child') {\n // A 'child' change should be pushed to output if its row\n // is <= bound.\n if (\n takeState.bound &&\n compareRows(change.node.row, takeState.bound) <= 0\n ) {\n this.#output.push(change, this);\n }\n }\n }\n\n #pushEditChange(change: EditChange): void {\n assert(\n !this.#partitionKeyComparator ||\n this.#partitionKeyComparator(change.oldNode.row, change.node.row) === 0,\n 'Unexpected change of partition key',\n );\n\n const {takeState, takeStateKey, maxBound, constraint} =\n this.#getStateAndConstraint(change.oldNode.row);\n if (!takeState) {\n return;\n }\n\n assert(takeState.bound, 'Bound should be set');\n const {compareRows} = this.getSchema();\n const oldCmp = compareRows(change.oldNode.row, takeState.bound);\n const newCmp = compareRows(change.node.row, takeState.bound);\n\n const replaceBoundAndForwardChange = () => {\n this.#setTakeState(\n takeStateKey,\n takeState.size,\n change.node.row,\n maxBound,\n );\n this.#output.push(change, this);\n };\n\n // The bounds row was changed.\n if (oldCmp === 0) {\n // The new row is the new bound.\n if (newCmp === 0) {\n // no need to update the state since we are keeping the bounds\n this.#output.push(change, this);\n return;\n }\n\n if (newCmp < 0) {\n if (this.#limit === 1) {\n replaceBoundAndForwardChange();\n return;\n }\n\n // New row will be in the result but it might not be the bounds any\n // more. We need to find the row before the bounds to determine the new\n // bounds.\n\n const beforeBoundNode = must(\n first(\n this.#input.fetch({\n start: {\n row: takeState.bound,\n basis: 'after',\n },\n constraint,\n reverse: true,\n }),\n ),\n );\n\n this.#setTakeState(\n takeStateKey,\n takeState.size,\n beforeBoundNode.row,\n maxBound,\n );\n this.#output.push(change, this);\n return;\n }\n\n assert(newCmp > 0);\n // Find the first item at the old bounds. This will be the new bounds.\n const newBoundNode = must(\n first(\n this.#input.fetch({\n start: {\n row: takeState.bound,\n basis: 'at',\n },\n constraint,\n }),\n ),\n );\n\n // The next row is the new row. We can replace the bounds and keep the\n // edit change.\n if (compareRows(newBoundNode.row, change.node.row) === 0) {\n replaceBoundAndForwardChange();\n return;\n }\n\n // The new row is now outside the bounds, so we need to remove the old\n // row and add the new bounds row.\n this.#setTakeState(\n takeStateKey,\n takeState.size,\n newBoundNode.row,\n maxBound,\n );\n this.#withRowHiddenFromFetch(newBoundNode.row, () => {\n this.#output.push(\n {\n type: 'remove',\n node: change.oldNode,\n },\n this,\n );\n });\n this.#output.push(\n {\n type: 'add',\n node: newBoundNode,\n },\n this,\n );\n return;\n }\n\n if (oldCmp > 0) {\n assert(newCmp !== 0, 'Invalid state. Row has duplicate primary key');\n\n // Both old and new outside of bounds\n if (newCmp > 0) {\n return;\n }\n\n // old was outside, new is inside. Pushing out the old bounds\n assert(newCmp < 0);\n\n const [oldBoundNode, newBoundNode] = take(\n this.#input.fetch({\n start: {\n row: takeState.bound,\n basis: 'at',\n },\n constraint,\n reverse: true,\n }),\n 2,\n );\n // Remove before add to maintain invariant that\n // output size <= limit.\n this.#setTakeState(\n takeStateKey,\n takeState.size,\n newBoundNode.row,\n maxBound,\n );\n this.#withRowHiddenFromFetch(change.node.row, () => {\n this.#output.push(\n {\n type: 'remove',\n node: oldBoundNode,\n },\n this,\n );\n });\n this.#output.push(\n {\n type: 'add',\n node: change.node,\n },\n this,\n );\n\n return;\n }\n\n if (oldCmp < 0) {\n assert(newCmp !== 0, 'Invalid state. Row has duplicate primary key');\n\n // Both old and new inside of bounds\n if (newCmp < 0) {\n this.#output.push(change, this);\n return;\n }\n\n // old was inside, new is larger than old bound\n\n assert(newCmp > 0);\n\n // at this point we need to find the row after the bound and use that or\n // the newRow as the new bound.\n const afterBoundNode = must(\n first(\n this.#input.fetch({\n start: {\n row: takeState.bound,\n basis: 'after',\n },\n constraint,\n }),\n ),\n );\n\n // The new row is the new bound. Use an edit change.\n if (compareRows(afterBoundNode.row, change.node.row) === 0) {\n replaceBoundAndForwardChange();\n return;\n }\n\n this.#output.push(\n {\n type: 'remove',\n node: change.oldNode,\n },\n this,\n );\n this.#setTakeState(\n takeStateKey,\n takeState.size,\n afterBoundNode.row,\n maxBound,\n );\n this.#output.push(\n {\n type: 'add',\n node: afterBoundNode,\n },\n this,\n );\n return;\n }\n\n unreachable();\n }\n\n #withRowHiddenFromFetch(row: Row, fn: () => void) {\n this.#rowHiddenFromFetch = row;\n try {\n fn();\n } finally {\n this.#rowHiddenFromFetch = undefined;\n }\n }\n\n #setTakeState(\n takeStateKey: string,\n size: number,\n bound: Row | undefined,\n maxBound: Row | undefined,\n ) {\n this.#storage.set(takeStateKey, {\n size,\n bound,\n });\n if (\n bound !== undefined &&\n (maxBound === undefined ||\n this.getSchema().compareRows(bound, maxBound) > 0)\n ) {\n this.#storage.set(MAX_BOUND_KEY, bound);\n }\n }\n\n destroy(): void {\n this.#input.destroy();\n }\n}\n\nfunction getTakeStateKey(\n partitionKey: PartitionKey | undefined,\n rowOrConstraint: Row | Constraint | undefined,\n): string {\n // The order must be consistent. We always use the order as defined by the\n // partition key.\n const partitionValues: Value[] = [];\n\n if (partitionKey && rowOrConstraint) {\n for (const key of partitionKey) {\n partitionValues.push(rowOrConstraint[key]);\n }\n }\n\n return JSON.stringify(['take', ...partitionValues]);\n}\n\nfunction constraintMatchesPartitionKey(\n constraint: Constraint | undefined,\n partitionKey: PartitionKey | undefined,\n): boolean {\n if (constraint === undefined || partitionKey === undefined) {\n return constraint === partitionKey;\n }\n if (partitionKey.length !== Object.keys(constraint).length) {\n return false;\n }\n for (const key of partitionKey) {\n if (!hasOwn(constraint, key)) {\n return false;\n }\n }\n return true;\n}\n\nfunction makePartitionKeyComparator(partitionKey: PartitionKey): Comparator {\n return (a, b) => {\n for (const key of partitionKey) {\n const cmp = compareValues(a[key], b[key]);\n if (cmp !== 0) {\n return cmp;\n }\n }\n return 0;\n };\n}\n", "import {assert} from '../../../shared/src/asserts.ts';\nimport {mergeIterables} from '../../../shared/src/iterables.ts';\nimport type {Writable} from '../../../shared/src/writable.ts';\nimport type {Change} from './change.ts';\nimport type {Constraint} from './constraint.ts';\nimport type {Node} from './data.ts';\nimport {\n throwOutput,\n type FetchRequest,\n type Input,\n type InputBase,\n type Operator,\n type Output,\n} from './operator.ts';\nimport {\n makeAddEmptyRelationships,\n mergeRelationships,\n pushAccumulatedChanges,\n} from './push-accumulated.ts';\nimport type {SourceSchema} from './schema.ts';\nimport {first, type Stream} from './stream.ts';\nimport type {UnionFanOut} from './union-fan-out.ts';\n\nexport class UnionFanIn implements Operator {\n readonly #inputs: readonly Input[];\n readonly #schema: SourceSchema;\n #fanOutPushStarted: boolean = false;\n #output: Output = throwOutput;\n #accumulatedPushes: Change[] = [];\n\n constructor(fanOut: UnionFanOut, inputs: Input[]) {\n this.#inputs = inputs;\n const fanOutSchema = fanOut.getSchema();\n fanOut.setFanIn(this);\n\n const schema: Writable<SourceSchema> = {\n tableName: fanOutSchema.tableName,\n columns: fanOutSchema.columns,\n primaryKey: fanOutSchema.primaryKey,\n relationships: {\n ...fanOutSchema.relationships,\n },\n isHidden: fanOutSchema.isHidden,\n system: fanOutSchema.system,\n compareRows: fanOutSchema.compareRows,\n sort: fanOutSchema.sort,\n };\n\n // now go through inputs and merge relationships\n const relationshipsFromBranches: Set<string> = new Set();\n for (const input of inputs) {\n const inputSchema = input.getSchema();\n assert(\n schema.tableName === inputSchema.tableName,\n `Table name mismatch in union fan-in: ${schema.tableName} !== ${inputSchema.tableName}`,\n );\n assert(\n schema.primaryKey === inputSchema.primaryKey,\n `Primary key mismatch in union fan-in`,\n );\n assert(\n schema.system === inputSchema.system,\n `System mismatch in union fan-in: ${schema.system} !== ${inputSchema.system}`,\n );\n assert(\n schema.compareRows === inputSchema.compareRows,\n `compareRows mismatch in union fan-in`,\n );\n assert(schema.sort === inputSchema.sort, `Sort mismatch in union fan-in`);\n\n for (const [relName, relSchema] of Object.entries(\n inputSchema.relationships,\n )) {\n if (relName in fanOutSchema.relationships) {\n continue;\n }\n\n // All branches will have unique relationship names except for relationships\n // that come in from `fanOut`.\n assert(\n !relationshipsFromBranches.has(relName),\n `Relationship ${relName} exists in multiple upstream inputs to union fan-in`,\n );\n schema.relationships[relName] = relSchema;\n relationshipsFromBranches.add(relName);\n }\n\n input.setOutput(this);\n }\n\n this.#schema = schema;\n this.#inputs = inputs;\n }\n\n cleanup(_req: FetchRequest): Stream<Node> {\n // Cleanup is going away. Not implemented.\n return [];\n }\n\n destroy(): void {\n for (const input of this.#inputs) {\n input.destroy();\n }\n }\n\n fetch(req: FetchRequest): Stream<Node> {\n const iterables = this.#inputs.map(input => input.fetch(req));\n return mergeIterables(\n iterables,\n (l, r) => this.#schema.compareRows(l.row, r.row),\n true,\n );\n }\n\n getSchema(): SourceSchema {\n return this.#schema;\n }\n\n push(change: Change, pusher: InputBase): void {\n if (!this.#fanOutPushStarted) {\n this.#pushInternalChange(change, pusher);\n } else {\n this.#accumulatedPushes.push(change);\n }\n }\n\n /**\n * An internal change means that a change was received inside the fan-out/fan-in sub-graph.\n *\n * These changes always come from children of a flip-join as no other push generating operators\n * currently exist between union-fan-in and union-fan-out. All other pushes\n * enter into union-fan-out before reaching union-fan-in.\n *\n * - normal joins for `exists` come before `union-fan-out`\n * - joins for `related` come after `union-fan-out`\n * - take comes after `union-fan-out`\n *\n * The algorithm for deciding whether or not to forward a push that came from inside the ufo/ufi sub-graph:\n * 1. If the change is a `child` change we can forward it. This is because all child branches in the ufo/ufi sub-graph are unique.\n * 2. If the change is `add` we can forward it iff no `fetches` for the row return any results.\n * If another branch has it, the add was already emitted in the past.\n * 3. If the change is `remove` we can forward it iff no `fetches` for the row return any results.\n * If no other branches have the change, the remove can be sent as the value is no longer present.\n * If other branches have it, the last branch the processes the remove will send the remove.\n * 4. Edits will always come through as child changes as flip join will flip them into children.\n * An edit that would result in a remove or add will have been split into an add/remove pair rather than being an edit.\n */\n #pushInternalChange(change: Change, pusher: InputBase): void {\n if (change.type === 'child') {\n this.#output.push(change, this);\n return;\n }\n\n assert(change.type === 'add' || change.type === 'remove');\n\n let hadMatch = false;\n for (const input of this.#inputs) {\n if (input === pusher) {\n hadMatch = true;\n continue;\n }\n\n const constraint: Writable<Constraint> = {};\n for (const key of this.#schema.primaryKey) {\n constraint[key] = change.node.row[key];\n }\n const fetchResult = input.fetch({\n constraint,\n });\n\n if (first(fetchResult) !== undefined) {\n // Another branch has the row, so the add/remove is not needed.\n return;\n }\n }\n\n assert(hadMatch, 'Pusher was not one of the inputs to union-fan-in!');\n\n // No other branches have the row, so we can push the change.\n this.#output.push(change, this);\n }\n\n fanOutStartedPushing() {\n assert(this.#fanOutPushStarted === false);\n this.#fanOutPushStarted = true;\n }\n\n fanOutDonePushing(fanOutChangeType: Change['type']) {\n assert(this.#fanOutPushStarted);\n this.#fanOutPushStarted = false;\n if (this.#inputs.length === 0) {\n return;\n }\n\n if (this.#accumulatedPushes.length === 0) {\n // It is possible for no forks to pass along the push.\n // E.g., if no filters match in any fork.\n return;\n }\n\n pushAccumulatedChanges(\n this.#accumulatedPushes,\n this.#output,\n this,\n fanOutChangeType,\n mergeRelationships,\n makeAddEmptyRelationships(this.#schema),\n );\n }\n\n setOutput(output: Output): void {\n this.#output = output;\n }\n}\n", "import {assert} from '../../../shared/src/asserts.ts';\nimport {must} from '../../../shared/src/must.ts';\nimport type {Change} from './change.ts';\nimport type {Node} from './data.ts';\nimport type {FetchRequest, Input, Operator, Output} from './operator.ts';\nimport type {SourceSchema} from './schema.ts';\nimport type {Stream} from './stream.ts';\nimport type {UnionFanIn} from './union-fan-in.ts';\n\nexport class UnionFanOut implements Operator {\n #destroyCount: number = 0;\n #unionFanIn?: UnionFanIn;\n readonly #input: Input;\n readonly #outputs: Output[] = [];\n\n constructor(input: Input) {\n this.#input = input;\n input.setOutput(this);\n }\n\n setFanIn(fanIn: UnionFanIn) {\n assert(!this.#unionFanIn, 'FanIn already set for this FanOut');\n this.#unionFanIn = fanIn;\n }\n\n push(change: Change): void {\n must(this.#unionFanIn).fanOutStartedPushing();\n for (const output of this.#outputs) {\n output.push(change, this);\n }\n must(this.#unionFanIn).fanOutDonePushing(change.type);\n }\n\n setOutput(output: Output): void {\n this.#outputs.push(output);\n }\n\n getSchema(): SourceSchema {\n return this.#input.getSchema();\n }\n\n fetch(req: FetchRequest): Stream<Node> {\n return this.#input.fetch(req);\n }\n\n cleanup(_req: FetchRequest): Stream<Node> {\n // Cleanup is going away. Not implemented.\n return [];\n }\n\n destroy(): void {\n if (this.#destroyCount < this.#outputs.length) {\n ++this.#destroyCount;\n if (this.#destroyCount === this.#outputs.length) {\n this.#input.destroy();\n }\n } else {\n throw new Error('FanOut already destroyed once for each output');\n }\n }\n}\n", "/* eslint-disable @typescript-eslint/no-explicit-any */\nimport {must} from '../../../shared/src/must.ts';\nimport {\n toStaticParam,\n type Condition,\n type LiteralValue,\n type Parameter,\n type SimpleOperator,\n} from '../../../zero-protocol/src/ast.ts';\nimport type {Schema} from '../../../zero-schema/src/builder/schema-builder.ts';\nimport type {\n AvailableRelationships,\n DestTableName,\n ExistsOptions,\n GetFilterType,\n NoCompoundTypeSelector,\n PullTableSchema,\n Query,\n} from './query.ts';\n\nexport type ParameterReference = {\n [toStaticParam](): Parameter;\n};\n\n/**\n * A factory function that creates a condition. This is used to create\n * complex conditions that can be passed to the `where` method of a query.\n *\n * @example\n *\n * ```ts\n * const condition: ExpressionFactory<User> = ({and, cmp, or}) =>\n * and(\n * cmp('name', '=', 'Alice'),\n * or(cmp('age', '>', 18), cmp('isStudent', '=', true)),\n * );\n *\n * const query = z.query.user.where(condition);\n * ```\n */\nexport interface ExpressionFactory<\n TSchema extends Schema,\n TTable extends keyof TSchema['tables'] & string,\n> {\n (eb: ExpressionBuilder<TSchema, TTable>): Condition;\n}\n\nexport class ExpressionBuilder<\n TSchema extends Schema,\n TTable extends keyof TSchema['tables'] & string,\n> {\n readonly #exists: (\n relationship: string,\n cb?: ((query: Query<TSchema, TTable>) => Query<TSchema, any>) | undefined,\n options?: ExistsOptions | undefined,\n ) => Condition;\n\n constructor(\n exists: (\n relationship: string,\n cb?: ((query: Query<TSchema, TTable>) => Query<TSchema, any>) | undefined,\n options?: ExistsOptions | undefined,\n ) => Condition,\n ) {\n this.#exists = exists;\n this.exists = this.exists.bind(this);\n }\n\n get eb() {\n return this;\n }\n\n cmp<\n TSelector extends NoCompoundTypeSelector<PullTableSchema<TTable, TSchema>>,\n TOperator extends SimpleOperator,\n >(\n field: TSelector,\n op: TOperator,\n value:\n | GetFilterType<PullTableSchema<TTable, TSchema>, TSelector, TOperator>\n | ParameterReference,\n ): Condition;\n cmp<\n TSelector extends NoCompoundTypeSelector<PullTableSchema<TTable, TSchema>>,\n >(\n field: TSelector,\n value:\n | GetFilterType<PullTableSchema<TTable, TSchema>, TSelector, '='>\n | ParameterReference,\n ): Condition;\n cmp(\n field: string,\n opOrValue: SimpleOperator | ParameterReference | LiteralValue,\n value?: ParameterReference | LiteralValue,\n ): Condition {\n return cmp(field, opOrValue, value);\n }\n\n cmpLit(\n left: ParameterReference | LiteralValue,\n op: SimpleOperator,\n right: ParameterReference | LiteralValue,\n ): Condition {\n return {\n type: 'simple',\n left: isParameterReference(left)\n ? left[toStaticParam]()\n : {type: 'literal', value: left},\n right: isParameterReference(right)\n ? right[toStaticParam]()\n : {type: 'literal', value: right},\n op,\n };\n }\n\n and = and;\n or = or;\n not = not;\n\n exists = <TRelationship extends AvailableRelationships<TTable, TSchema>>(\n relationship: TRelationship,\n cb?:\n | ((\n query: Query<TSchema, DestTableName<TTable, TSchema, TRelationship>>,\n ) => Query<TSchema, any>)\n | undefined,\n options?: ExistsOptions | undefined,\n ): Condition => this.#exists(relationship, cb, options);\n}\n\nexport function and(...conditions: (Condition | undefined)[]): Condition {\n const expressions = filterTrue(filterUndefined(conditions));\n\n if (expressions.length === 1) {\n return expressions[0];\n }\n\n if (expressions.some(isAlwaysFalse)) {\n return FALSE;\n }\n\n return {type: 'and', conditions: expressions};\n}\n\nexport function or(...conditions: (Condition | undefined)[]): Condition {\n const expressions = filterFalse(filterUndefined(conditions));\n\n if (expressions.length === 1) {\n return expressions[0];\n }\n\n if (expressions.some(isAlwaysTrue)) {\n return TRUE;\n }\n\n return {type: 'or', conditions: expressions};\n}\n\nexport function not(expression: Condition): Condition {\n switch (expression.type) {\n case 'and':\n return {\n type: 'or',\n conditions: expression.conditions.map(not),\n };\n case 'or':\n return {\n type: 'and',\n conditions: expression.conditions.map(not),\n };\n case 'correlatedSubquery':\n return {\n type: 'correlatedSubquery',\n related: expression.related,\n op: negateOperator(expression.op),\n };\n case 'simple':\n return {\n type: 'simple',\n op: negateOperator(expression.op),\n left: expression.left,\n right: expression.right,\n };\n }\n}\n\nexport function cmp(\n field: string,\n opOrValue: SimpleOperator | ParameterReference | LiteralValue,\n value?: ParameterReference | LiteralValue,\n): Condition {\n let op: SimpleOperator;\n if (value === undefined) {\n value = opOrValue;\n op = '=';\n } else {\n op = opOrValue as SimpleOperator;\n }\n\n return {\n type: 'simple',\n left: {type: 'column', name: field},\n right: isParameterReference(value)\n ? value[toStaticParam]()\n : {type: 'literal', value},\n op,\n };\n}\n\nfunction isParameterReference(\n value: ParameterReference | LiteralValue | null,\n): value is ParameterReference {\n return (\n value !== null && typeof value === 'object' && (value as any)[toStaticParam]\n );\n}\n\nexport const TRUE: Condition = {\n type: 'and',\n conditions: [],\n};\n\nconst FALSE: Condition = {\n type: 'or',\n conditions: [],\n};\n\nfunction isAlwaysTrue(condition: Condition): boolean {\n return condition.type === 'and' && condition.conditions.length === 0;\n}\n\nfunction isAlwaysFalse(condition: Condition): boolean {\n return condition.type === 'or' && condition.conditions.length === 0;\n}\n\nexport function simplifyCondition(c: Condition): Condition {\n if (c.type === 'simple' || c.type === 'correlatedSubquery') {\n return c;\n }\n if (c.conditions.length === 1) {\n return simplifyCondition(c.conditions[0]);\n }\n const conditions = flatten(c.type, c.conditions.map(simplifyCondition));\n if (c.type === 'and' && conditions.some(isAlwaysFalse)) {\n return FALSE;\n }\n if (c.type === 'or' && conditions.some(isAlwaysTrue)) {\n return TRUE;\n }\n return {\n type: c.type,\n conditions,\n };\n}\n\nexport function flatten(\n type: 'and' | 'or',\n conditions: readonly Condition[],\n): Condition[] {\n const flattened: Condition[] = [];\n for (const c of conditions) {\n if (c.type === type) {\n flattened.push(...c.conditions);\n } else {\n flattened.push(c);\n }\n }\n\n return flattened;\n}\n\nconst negateSimpleOperatorMap = {\n ['=']: '!=',\n ['!=']: '=',\n ['<']: '>=',\n ['>']: '<=',\n ['>=']: '<',\n ['<=']: '>',\n ['IN']: 'NOT IN',\n ['NOT IN']: 'IN',\n ['LIKE']: 'NOT LIKE',\n ['NOT LIKE']: 'LIKE',\n ['ILIKE']: 'NOT ILIKE',\n ['NOT ILIKE']: 'ILIKE',\n ['IS']: 'IS NOT',\n ['IS NOT']: 'IS',\n} as const;\n\nconst negateOperatorMap = {\n ...negateSimpleOperatorMap,\n ['EXISTS']: 'NOT EXISTS',\n ['NOT EXISTS']: 'EXISTS',\n} as const;\n\nexport function negateOperator<OP extends keyof typeof negateOperatorMap>(\n op: OP,\n): (typeof negateOperatorMap)[OP] {\n return must(negateOperatorMap[op]);\n}\n\nfunction filterUndefined<T>(array: (T | undefined)[]): T[] {\n return array.filter(e => e !== undefined);\n}\n\nfunction filterTrue(conditions: Condition[]): Condition[] {\n return conditions.filter(c => !isAlwaysTrue(c));\n}\n\nfunction filterFalse(conditions: Condition[]): Condition[] {\n return conditions.filter(c => !isAlwaysFalse(c));\n}\n", "import {assertString} from '../../../shared/src/asserts.ts';\nimport type {NonNullValue, SimplePredicateNoNull} from './filter.ts';\n\nexport function getLikePredicate(\n pattern: NonNullValue,\n flags: 'i' | '',\n): SimplePredicateNoNull {\n const op = getLikeOp(String(pattern), flags);\n return (lhs: NonNullValue) => {\n assertString(lhs);\n return op(String(lhs));\n };\n}\n\nfunction getLikeOp(pattern: string, flags: 'i' | ''): (lhs: string) => boolean {\n // if lhs does not contain '%' or '_' then it is a simple string comparison.\n // if it does contain '%' or '_' then it is a regex comparison.\n // '%' is a wildcard for any number of characters\n // '_' is a wildcard for a single character\n // Postgres SQL allows escaping using `\\`.\n\n if (!/_|%|\\\\/.test(pattern)) {\n if (flags === 'i') {\n const rhsLower = pattern.toLowerCase();\n return (lhs: string) => lhs.toLowerCase() === rhsLower;\n }\n return (lhs: string) => lhs === pattern;\n }\n const re = patternToRegExp(pattern, flags);\n return (lhs: string) => re.test(lhs);\n}\n\nconst specialCharsRe = /[$()*+.?[\\]\\\\^{|}]/;\n\nfunction patternToRegExp(source: string, flags: '' | 'i' = ''): RegExp {\n // There are a few cases:\n // % => .*\n // _ => .\n // \\x => \\x for any x except special regexp chars\n // special regexp chars => \\special regexp chars\n let pattern = '^';\n for (let i = 0; i < source.length; i++) {\n let c = source[i];\n switch (c) {\n case '%':\n pattern += '.*';\n break;\n case '_':\n pattern += '.';\n break;\n\n // @ts-expect-error fallthrough\n case '\\\\':\n if (i === source.length - 1) {\n throw new Error('LIKE pattern must not end with escape character');\n }\n i++;\n c = source[i];\n\n // fall through\n default:\n if (specialCharsRe.test(c)) {\n pattern += '\\\\';\n }\n pattern += c;\n\n break;\n }\n }\n return new RegExp(pattern + '$', flags + 'm');\n}\n", "import {assert, unreachable} from '../../../shared/src/asserts.ts';\nimport type {\n Condition,\n SimpleCondition,\n SimpleOperator,\n} from '../../../zero-protocol/src/ast.ts';\nimport type {Row, Value} from '../../../zero-protocol/src/data.ts';\nimport {simplifyCondition} from '../query/expression.ts';\nimport {getLikePredicate} from './like.ts';\n\nexport type NonNullValue = Exclude<Value, null | undefined>;\nexport type SimplePredicate = (rhs: Value) => boolean;\nexport type SimplePredicateNoNull = (rhs: NonNullValue) => boolean;\n\nexport type NoSubqueryCondition =\n | SimpleCondition\n | {\n type: 'and';\n conditions: readonly NoSubqueryCondition[];\n }\n | {\n type: 'or';\n conditions: readonly NoSubqueryCondition[];\n };\n\nexport function createPredicate(\n condition: NoSubqueryCondition,\n): (row: Row) => boolean {\n if (condition.type !== 'simple') {\n const predicates = condition.conditions.map(c => createPredicate(c));\n return condition.type === 'and'\n ? (row: Row) => {\n // and\n for (const predicate of predicates) {\n if (!predicate(row)) {\n return false;\n }\n }\n return true;\n }\n : (row: Row) => {\n // or\n for (const predicate of predicates) {\n if (predicate(row)) {\n return true;\n }\n }\n return false;\n };\n }\n const {left} = condition;\n const {right} = condition;\n assert(\n right.type !== 'static',\n 'static values should be resolved before creating predicates',\n );\n assert(\n left.type !== 'static',\n 'static values should be resolved before creating predicates',\n );\n\n switch (condition.op) {\n case 'IS':\n case 'IS NOT': {\n const impl = createIsPredicate(right.value, condition.op);\n if (left.type === 'literal') {\n const result = impl(left.value);\n return () => result;\n }\n return (row: Row) => impl(row[left.name]);\n }\n }\n\n if (right.value === null || right.value === undefined) {\n return (_row: Row) => false;\n }\n\n const impl = createPredicateImpl(right.value, condition.op);\n if (left.type === 'literal') {\n if (left.value === null || left.value === undefined) {\n return (_row: Row) => false;\n }\n const result = impl(left.value);\n return () => result;\n }\n\n return (row: Row) => {\n const lhs = row[left.name];\n if (lhs === null || lhs === undefined) {\n return false;\n }\n return impl(lhs);\n };\n}\n\nfunction createIsPredicate(\n rhs: Value | readonly Value[],\n operator: 'IS' | 'IS NOT',\n): SimplePredicate {\n switch (operator) {\n case 'IS':\n return lhs => lhs === rhs;\n case 'IS NOT':\n return lhs => lhs !== rhs;\n }\n}\n\nfunction createPredicateImpl(\n rhs: NonNullValue | readonly NonNullValue[],\n operator: Exclude<SimpleOperator, 'IS' | 'IS NOT'>,\n): SimplePredicateNoNull {\n switch (operator) {\n case '=':\n return lhs => lhs === rhs;\n case '!=':\n return lhs => lhs !== rhs;\n case '<':\n return lhs => lhs < rhs;\n case '<=':\n return lhs => lhs <= rhs;\n case '>':\n return lhs => lhs > rhs;\n case '>=':\n return lhs => lhs >= rhs;\n case 'LIKE':\n return getLikePredicate(rhs, '');\n case 'NOT LIKE':\n return not(getLikePredicate(rhs, ''));\n case 'ILIKE':\n return getLikePredicate(rhs, 'i');\n case 'NOT ILIKE':\n return not(getLikePredicate(rhs, 'i'));\n case 'IN': {\n assert(Array.isArray(rhs));\n const set = new Set(rhs);\n return lhs => set.has(lhs);\n }\n case 'NOT IN': {\n assert(Array.isArray(rhs));\n const set = new Set(rhs);\n return lhs => !set.has(lhs);\n }\n default:\n operator satisfies never;\n throw new Error(`Unexpected operator: ${operator}`);\n }\n}\n\nfunction not<T>(f: (lhs: T) => boolean) {\n return (lhs: T) => !f(lhs);\n}\n\n/**\n * If the condition contains any CorrelatedSubqueryConditions, returns a\n * transformed condition which contains no CorrelatedSubqueryCondition(s) but\n * which will filter a subset of the rows that would be filtered by the original\n * condition, or undefined if no such transformation exists.\n *\n * If the condition does not contain any CorrelatedSubqueryConditions\n * returns the condition unmodified and `conditionsRemoved: false`.\n */\nexport function transformFilters(filters: Condition | undefined): {\n filters: NoSubqueryCondition | undefined;\n conditionsRemoved: boolean;\n} {\n if (!filters) {\n return {filters: undefined, conditionsRemoved: false};\n }\n switch (filters.type) {\n case 'simple':\n return {filters, conditionsRemoved: false};\n case 'correlatedSubquery':\n return {filters: undefined, conditionsRemoved: true};\n case 'and':\n case 'or': {\n const transformedConditions: NoSubqueryCondition[] = [];\n let conditionsRemoved = false;\n for (const cond of filters.conditions) {\n const transformed = transformFilters(cond);\n // If any branch of the OR ends up empty, the entire OR needs\n // to be removed.\n if (transformed.filters === undefined && filters.type === 'or') {\n return {filters: undefined, conditionsRemoved: true};\n }\n conditionsRemoved = conditionsRemoved || transformed.conditionsRemoved;\n if (transformed.filters) {\n transformedConditions.push(transformed.filters);\n }\n }\n return {\n filters: simplifyCondition({\n type: filters.type,\n conditions: transformedConditions,\n }) as NoSubqueryCondition,\n conditionsRemoved,\n };\n }\n default:\n unreachable(filters);\n }\n}\n", "import {assert} from '../../../shared/src/asserts.ts';\nimport type {JSONValue} from '../../../shared/src/json.ts';\nimport {must} from '../../../shared/src/must.ts';\nimport type {\n AST,\n ColumnReference,\n CompoundKey,\n Condition,\n Conjunction,\n CorrelatedSubquery,\n CorrelatedSubqueryCondition,\n Disjunction,\n LiteralValue,\n Ordering,\n Parameter,\n SimpleCondition,\n ValuePosition,\n} from '../../../zero-protocol/src/ast.ts';\nimport type {Row} from '../../../zero-protocol/src/data.ts';\nimport type {PrimaryKey} from '../../../zero-protocol/src/primary-key.ts';\nimport {Exists} from '../ivm/exists.ts';\nimport {FanIn} from '../ivm/fan-in.ts';\nimport {FanOut} from '../ivm/fan-out.ts';\nimport {\n buildFilterPipeline,\n type FilterInput,\n} from '../ivm/filter-operators.ts';\nimport {Filter} from '../ivm/filter.ts';\nimport {FlippedJoin} from '../ivm/flipped-join.ts';\nimport {Join} from '../ivm/join.ts';\nimport type {Input, InputBase, Storage} from '../ivm/operator.ts';\nimport {Skip} from '../ivm/skip.ts';\nimport type {Source, SourceInput} from '../ivm/source.ts';\nimport {Take} from '../ivm/take.ts';\nimport {UnionFanIn} from '../ivm/union-fan-in.ts';\nimport {UnionFanOut} from '../ivm/union-fan-out.ts';\nimport type {DebugDelegate} from './debug-delegate.ts';\nimport {createPredicate, type NoSubqueryCondition} from './filter.ts';\n\nexport type StaticQueryParameters = {\n authData: Record<string, JSONValue>;\n preMutationRow?: Row | undefined;\n};\n\n/**\n * Interface required of caller to buildPipeline. Connects to constructed\n * pipeline to delegate environment to provide sources and storage.\n */\nexport interface BuilderDelegate {\n readonly applyFiltersAnyway?: boolean | undefined;\n readonly debug?: DebugDelegate | undefined;\n\n /**\n * Called once for each source needed by the AST.\n * Might be called multiple times with same tableName. It is OK to return\n * same storage instance in that case.\n */\n getSource(tableName: string): Source | undefined;\n\n /**\n * Called once for each operator that requires storage. Should return a new\n * unique storage object for each call.\n */\n createStorage(name: string): Storage;\n\n decorateInput(input: Input, name: string): Input;\n\n addEdge(source: InputBase, dest: InputBase): void;\n\n decorateFilterInput(input: FilterInput, name: string): FilterInput;\n\n decorateSourceInput(input: SourceInput, queryID: string): Input;\n\n /**\n * The AST is mapped on-the-wire between client and server names.\n *\n * There is no \"wire\" for zqlite tests so this function is provided\n * to allow tests to remap the AST.\n */\n mapAst?: ((ast: AST) => AST) | undefined;\n}\n\n/**\n * Builds a pipeline from an AST. Caller must provide a delegate to create source\n * and storage interfaces as necessary.\n *\n * Usage:\n *\n * ```ts\n * class MySink implements Output {\n * readonly #input: Input;\n *\n * constructor(input: Input) {\n * this.#input = input;\n * input.setOutput(this);\n * }\n *\n * push(change: Change, _: Operator) {\n * console.log(change);\n * }\n * }\n *\n * const input = buildPipeline(ast, myDelegate, hash(ast));\n * const sink = new MySink(input);\n * ```\n */\nexport function buildPipeline(\n ast: AST,\n delegate: BuilderDelegate,\n queryID: string,\n): Input {\n ast = delegate.mapAst ? delegate.mapAst(ast) : ast;\n return buildPipelineInternal(ast, delegate, queryID, '');\n}\n\nexport function bindStaticParameters(\n ast: AST,\n staticQueryParameters: StaticQueryParameters | undefined,\n) {\n const visit = (node: AST): AST => ({\n ...node,\n where: node.where ? bindCondition(node.where) : undefined,\n related: node.related?.map(sq => ({\n ...sq,\n subquery: visit(sq.subquery),\n })),\n });\n\n function bindCondition(condition: Condition): Condition {\n if (condition.type === 'simple') {\n return {\n ...condition,\n left: bindValue(condition.left),\n right: bindValue(condition.right) as Exclude<\n ValuePosition,\n ColumnReference\n >,\n };\n }\n if (condition.type === 'correlatedSubquery') {\n return {\n ...condition,\n related: {\n ...condition.related,\n subquery: visit(condition.related.subquery),\n },\n };\n }\n return {\n ...condition,\n conditions: condition.conditions.map(bindCondition),\n };\n }\n\n const bindValue = (value: ValuePosition): ValuePosition => {\n if (isParameter(value)) {\n const anchor = must(\n staticQueryParameters,\n 'Static query params do not exist',\n )[value.anchor];\n const resolvedValue = resolveField(anchor, value.field);\n return {\n type: 'literal',\n value: resolvedValue as LiteralValue,\n };\n }\n return value;\n };\n\n return visit(ast);\n}\n\nfunction resolveField(\n anchor: Record<string, JSONValue> | Row | undefined,\n field: string | string[],\n): unknown {\n if (anchor === undefined) {\n return null;\n }\n\n if (Array.isArray(field)) {\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n return field.reduce((acc, f) => (acc as any)?.[f], anchor) ?? null;\n }\n\n return anchor[field] ?? null;\n}\n\nfunction isParameter(value: ValuePosition): value is Parameter {\n return value.type === 'static';\n}\n\nconst EXISTS_LIMIT = 3;\nconst PERMISSIONS_EXISTS_LIMIT = 1;\n\nfunction buildPipelineInternal(\n ast: AST,\n delegate: BuilderDelegate,\n queryID: string,\n name: string,\n partitionKey?: CompoundKey | undefined,\n): Input {\n const source = delegate.getSource(ast.table);\n if (!source) {\n throw new Error(`Source not found: ${ast.table}`);\n }\n ast = uniquifyCorrelatedSubqueryConditionAliases(ast);\n\n const csqConditions = gatherCorrelatedSubqueryQueryConditions(ast.where);\n const splitEditKeys: Set<string> = partitionKey\n ? new Set(partitionKey)\n : new Set();\n const aliases = new Set<string>();\n for (const csq of csqConditions) {\n aliases.add(csq.related.subquery.alias || '');\n for (const key of csq.related.correlation.parentField) {\n splitEditKeys.add(key);\n }\n }\n if (ast.related) {\n for (const csq of ast.related) {\n for (const key of csq.correlation.parentField) {\n splitEditKeys.add(key);\n }\n }\n }\n const conn = source.connect(\n must(ast.orderBy),\n ast.where,\n splitEditKeys,\n delegate.debug,\n );\n\n let end: Input = delegate.decorateSourceInput(conn, queryID);\n end = delegate.decorateInput(end, `${name}:source(${ast.table})`);\n const {fullyAppliedFilters} = conn;\n\n if (ast.start) {\n const skip = new Skip(end, ast.start);\n delegate.addEdge(end, skip);\n end = delegate.decorateInput(skip, `${name}:skip)`);\n }\n\n for (const csqCondition of csqConditions) {\n // flipped EXISTS are handled in applyWhere\n if (!csqCondition.flip) {\n end = applyCorrelatedSubQuery(\n {\n ...csqCondition.related,\n subquery: {\n ...csqCondition.related.subquery,\n limit:\n csqCondition.related.system === 'permissions'\n ? PERMISSIONS_EXISTS_LIMIT\n : EXISTS_LIMIT,\n },\n },\n delegate,\n queryID,\n end,\n name,\n true,\n );\n }\n }\n\n if (ast.where && (!fullyAppliedFilters || delegate.applyFiltersAnyway)) {\n end = applyWhere(end, ast.where, delegate, name);\n }\n\n if (ast.limit !== undefined) {\n const takeName = `${name}:take`;\n const take = new Take(\n end,\n delegate.createStorage(takeName),\n ast.limit,\n partitionKey,\n );\n delegate.addEdge(end, take);\n end = delegate.decorateInput(take, takeName);\n }\n\n if (ast.related) {\n for (const csq of ast.related) {\n end = applyCorrelatedSubQuery(csq, delegate, queryID, end, name, false);\n }\n }\n\n return end;\n}\n\nfunction applyWhere(\n input: Input,\n condition: Condition,\n delegate: BuilderDelegate,\n name: string,\n): Input {\n if (!conditionIncludesFlippedSubqueryAtAnyLevel(condition)) {\n return buildFilterPipeline(input, delegate, filterInput =>\n applyFilter(filterInput, condition, delegate, name),\n );\n }\n\n return applyFilterWithFlips(input, condition, delegate, name);\n}\n\nfunction applyFilterWithFlips(\n input: Input,\n condition: Condition,\n delegate: BuilderDelegate,\n name: string,\n): Input {\n let end = input;\n assert(condition.type !== 'simple', 'Simple conditions cannot have flips');\n\n switch (condition.type) {\n case 'and': {\n const [withFlipped, withoutFlipped] = partitionBranches(\n condition.conditions,\n conditionIncludesFlippedSubqueryAtAnyLevel,\n );\n if (withoutFlipped.length > 0) {\n end = buildFilterPipeline(input, delegate, filterInput =>\n applyAnd(\n filterInput,\n {\n type: 'and',\n conditions: withoutFlipped,\n },\n delegate,\n name,\n ),\n );\n }\n assert(withFlipped.length > 0, 'Impossible to have no flips here');\n for (const cond of withFlipped) {\n end = applyFilterWithFlips(end, cond, delegate, name);\n }\n break;\n }\n case 'or': {\n const [withFlipped, withoutFlipped] = partitionBranches(\n condition.conditions,\n conditionIncludesFlippedSubqueryAtAnyLevel,\n );\n assert(withFlipped.length > 0, 'Impossible to have no flips here');\n\n const ufo = new UnionFanOut(end);\n delegate.addEdge(end, ufo);\n end = delegate.decorateInput(ufo, `${name}:ufo`);\n\n const branches: Input[] = [];\n if (withoutFlipped.length > 0) {\n branches.push(\n buildFilterPipeline(end, delegate, filterInput =>\n applyOr(\n filterInput,\n {\n type: 'or',\n conditions: withoutFlipped,\n },\n delegate,\n name,\n ),\n ),\n );\n }\n\n for (const cond of withFlipped) {\n branches.push(applyFilterWithFlips(end, cond, delegate, name));\n }\n\n const ufi = new UnionFanIn(ufo, branches);\n for (const branch of branches) {\n delegate.addEdge(branch, ufi);\n }\n end = delegate.decorateInput(ufi, `${name}:ufi`);\n\n break;\n }\n case 'correlatedSubquery': {\n const sq = condition.related;\n const child = buildPipelineInternal(\n sq.subquery,\n delegate,\n '',\n `${name}.${sq.subquery.alias}`,\n sq.correlation.childField,\n );\n const flippedJoin = new FlippedJoin({\n parent: end,\n child,\n parentKey: sq.correlation.parentField,\n childKey: sq.correlation.childField,\n relationshipName: must(\n sq.subquery.alias,\n 'Subquery must have an alias',\n ),\n hidden: sq.hidden ?? false,\n system: sq.system ?? 'client',\n });\n delegate.addEdge(end, flippedJoin);\n delegate.addEdge(child, flippedJoin);\n end = delegate.decorateInput(\n flippedJoin,\n `${name}:flipped-join(${sq.subquery.alias})`,\n );\n break;\n }\n }\n\n return end;\n}\n\nfunction applyFilter(\n input: FilterInput,\n condition: Condition,\n delegate: BuilderDelegate,\n name: string,\n) {\n switch (condition.type) {\n case 'and':\n return applyAnd(input, condition, delegate, name);\n case 'or':\n return applyOr(input, condition, delegate, name);\n case 'correlatedSubquery':\n return applyCorrelatedSubqueryCondition(input, condition, delegate, name);\n case 'simple':\n return applySimpleCondition(input, delegate, condition);\n }\n}\n\nfunction applyAnd(\n input: FilterInput,\n condition: Conjunction,\n delegate: BuilderDelegate,\n name: string,\n): FilterInput {\n for (const subCondition of condition.conditions) {\n input = applyFilter(input, subCondition, delegate, name);\n }\n return input;\n}\n\nexport function applyOr(\n input: FilterInput,\n condition: Disjunction,\n delegate: BuilderDelegate,\n name: string,\n): FilterInput {\n const [subqueryConditions, otherConditions] =\n groupSubqueryConditions(condition);\n // if there are no subquery conditions, no fan-in / fan-out is needed\n if (subqueryConditions.length === 0) {\n const filter = new Filter(\n input,\n createPredicate({\n type: 'or',\n conditions: otherConditions,\n }),\n );\n delegate.addEdge(input, filter);\n return filter;\n }\n\n const fanOut = new FanOut(input);\n delegate.addEdge(input, fanOut);\n const branches = subqueryConditions.map(subCondition =>\n applyFilter(fanOut, subCondition, delegate, name),\n );\n if (otherConditions.length > 0) {\n const filter = new Filter(\n fanOut,\n createPredicate({\n type: 'or',\n conditions: otherConditions,\n }),\n );\n delegate.addEdge(fanOut, filter);\n branches.push(filter);\n }\n const ret = new FanIn(fanOut, branches);\n for (const branch of branches) {\n delegate.addEdge(branch, ret);\n }\n fanOut.setFanIn(ret);\n return ret;\n}\n\nexport function groupSubqueryConditions(condition: Disjunction) {\n const partitioned: [\n subqueryConditions: Condition[],\n otherConditions: NoSubqueryCondition[],\n ] = [[], []];\n for (const subCondition of condition.conditions) {\n if (isNotAndDoesNotContainSubquery(subCondition)) {\n partitioned[1].push(subCondition);\n } else {\n partitioned[0].push(subCondition);\n }\n }\n return partitioned;\n}\n\nexport function isNotAndDoesNotContainSubquery(\n condition: Condition,\n): condition is NoSubqueryCondition {\n if (condition.type === 'correlatedSubquery') {\n return false;\n }\n if (condition.type === 'simple') {\n return true;\n }\n return condition.conditions.every(isNotAndDoesNotContainSubquery);\n}\n\nfunction applySimpleCondition(\n input: FilterInput,\n delegate: BuilderDelegate,\n condition: SimpleCondition,\n): FilterInput {\n const filter = new Filter(input, createPredicate(condition));\n delegate.decorateFilterInput(\n filter,\n `${valuePosName(condition.left)}:${condition.op}:${valuePosName(condition.right)}`,\n );\n delegate.addEdge(input, filter);\n return filter;\n}\n\nfunction valuePosName(left: ValuePosition) {\n switch (left.type) {\n case 'static':\n return left.field;\n case 'literal':\n return left.value;\n case 'column':\n return left.name;\n }\n}\n\nfunction applyCorrelatedSubQuery(\n sq: CorrelatedSubquery,\n delegate: BuilderDelegate,\n queryID: string,\n end: Input,\n name: string,\n fromCondition: boolean,\n) {\n // TODO: we only omit the join if the CSQ if from a condition since\n // we want to create an empty array for `related` fields that are `limit(0)`\n if (sq.subquery.limit === 0 && fromCondition) {\n return end;\n }\n\n assert(sq.subquery.alias, 'Subquery must have an alias');\n const child = buildPipelineInternal(\n sq.subquery,\n delegate,\n queryID,\n `${name}.${sq.subquery.alias}`,\n sq.correlation.childField,\n );\n\n const joinName = `${name}:join(${sq.subquery.alias})`;\n const join = new Join({\n parent: end,\n child,\n storage: delegate.createStorage(joinName),\n parentKey: sq.correlation.parentField,\n childKey: sq.correlation.childField,\n relationshipName: sq.subquery.alias,\n hidden: sq.hidden ?? false,\n system: sq.system ?? 'client',\n });\n delegate.addEdge(end, join);\n delegate.addEdge(child, join);\n return delegate.decorateInput(join, joinName);\n}\n\nfunction applyCorrelatedSubqueryCondition(\n input: FilterInput,\n condition: CorrelatedSubqueryCondition,\n delegate: BuilderDelegate,\n name: string,\n): FilterInput {\n assert(condition.op === 'EXISTS' || condition.op === 'NOT EXISTS');\n if (condition.related.subquery.limit === 0) {\n if (condition.op === 'EXISTS') {\n const filter = new Filter(input, () => false);\n delegate.addEdge(input, filter);\n return filter;\n }\n const filter = new Filter(input, () => true);\n delegate.addEdge(input, filter);\n return filter;\n }\n const existsName = `${name}:exists(${condition.related.subquery.alias})`;\n const exists = new Exists(\n input,\n delegate.createStorage(existsName),\n must(condition.related.subquery.alias),\n condition.related.correlation.parentField,\n condition.op,\n );\n delegate.addEdge(input, exists);\n return delegate.decorateFilterInput(exists, existsName);\n}\n\nfunction gatherCorrelatedSubqueryQueryConditions(\n condition: Condition | undefined,\n) {\n const csqs: CorrelatedSubqueryCondition[] = [];\n const gather = (condition: Condition) => {\n if (condition.type === 'correlatedSubquery') {\n csqs.push(condition);\n return;\n }\n if (condition.type === 'and' || condition.type === 'or') {\n for (const c of condition.conditions) {\n gather(c);\n }\n return;\n }\n };\n if (condition) {\n gather(condition);\n }\n return csqs;\n}\n\nexport function assertOrderingIncludesPK(\n ordering: Ordering,\n pk: PrimaryKey,\n): void {\n // eslint-disable-next-line unicorn/prefer-set-has -- Array is more appropriate here for small collections\n const orderingFields = ordering.map(([field]) => field);\n const missingFields = pk.filter(pkField => !orderingFields.includes(pkField));\n\n if (missingFields.length > 0) {\n throw new Error(\n `Ordering must include all primary key fields. Missing: ${missingFields.join(\n ', ',\n )}. ZQL automatically appends primary key fields to the ordering if they are missing \n so a common cause of this error is a casing mismatch between Postgres and ZQL.\n E.g., \"userid\" vs \"userID\".\n You may want to add double-quotes around your Postgres column names to prevent Postgres from lower-casing them:\n https://www.postgresql.org/docs/current/sql-syntax-lexical.htm`,\n );\n }\n}\n\nfunction uniquifyCorrelatedSubqueryConditionAliases(ast: AST): AST {\n if (!ast.where) {\n return ast;\n }\n const {where} = ast;\n if (where.type !== 'and' && where.type !== 'or') {\n return ast;\n }\n\n let count = 0;\n const uniquifyCorrelatedSubquery = (csqc: CorrelatedSubqueryCondition) => ({\n ...csqc,\n related: {\n ...csqc.related,\n subquery: {\n ...csqc.related.subquery,\n alias: (csqc.related.subquery.alias ?? '') + '_' + count++,\n },\n },\n });\n\n const uniquify = (cond: Condition): Condition => {\n if (cond.type === 'simple') {\n return cond;\n } else if (cond.type === 'correlatedSubquery') {\n return uniquifyCorrelatedSubquery(cond);\n }\n const conditions = [];\n for (const c of cond.conditions) {\n conditions.push(uniquify(c));\n }\n return {\n type: cond.type,\n conditions,\n };\n };\n\n const result = {\n ...ast,\n where: uniquify(where),\n };\n return result;\n}\n\nexport function conditionIncludesFlippedSubqueryAtAnyLevel(\n cond: Condition,\n): boolean {\n if (cond.type === 'correlatedSubquery') {\n return !!cond.flip;\n }\n if (cond.type === 'and' || cond.type === 'or') {\n return cond.conditions.some(c =>\n conditionIncludesFlippedSubqueryAtAnyLevel(c),\n );\n }\n return false;\n}\n\nexport function partitionBranches(\n conditions: readonly Condition[],\n predicate: (c: Condition) => boolean,\n) {\n const matched: Condition[] = [];\n const notMatched: Condition[] = [];\n for (const c of conditions) {\n if (predicate(c)) {\n matched.push(c);\n } else {\n notMatched.push(c);\n }\n }\n return [matched, notMatched] as const;\n}\n", "export class NotImplementedError extends Error {\n constructor(message: string) {\n super(message);\n this.name = 'NotImplementedError';\n }\n}\n", "import {assert} from '../../../shared/src/asserts.ts';\nimport type {Immutable} from '../../../shared/src/immutable.ts';\nimport type {ErroredQuery} from '../../../zero-protocol/src/custom-queries.ts';\nimport type {TTL} from '../query/ttl.ts';\nimport type {Listener, ResultType, TypedView} from '../query/typed-view.ts';\nimport type {Change} from './change.ts';\nimport type {Input, Output} from './operator.ts';\nimport type {SourceSchema} from './schema.ts';\nimport {applyChange} from './view-apply-change.ts';\nimport type {Entry, Format, View} from './view.ts';\n\n/**\n * Implements a materialized view of the output of an operator.\n *\n * It might seem more efficient to use an immutable b-tree for the\n * materialization, but it's not so clear. Inserts in the middle are\n * asymptotically slower in an array, but can often be done with zero\n * allocations, where changes to the b-tree will often require several allocs.\n *\n * Also the plain array view is more convenient for consumers since you can dump\n * it into console to see what it is, rather than having to iterate it.\n */\nexport class ArrayView<V extends View> implements Output, TypedView<V> {\n readonly #input: Input;\n readonly #listeners = new Set<Listener<V>>();\n readonly #schema: SourceSchema;\n readonly #format: Format;\n\n // Synthetic \"root\" entry that has a single \"\" relationship, so that we can\n // treat all changes, including the root change, generically.\n readonly #root: Entry;\n\n onDestroy: (() => void) | undefined;\n\n #dirty = false;\n #resultType: ResultType = 'unknown';\n #error: ErroredQuery | undefined;\n readonly #updateTTL: (ttl: TTL) => void;\n\n constructor(\n input: Input,\n format: Format,\n queryComplete: true | ErroredQuery | Promise<true>,\n updateTTL: (ttl: TTL) => void,\n ) {\n this.#input = input;\n this.#schema = input.getSchema();\n this.#format = format;\n this.#updateTTL = updateTTL;\n this.#root = {'': format.singular ? undefined : []};\n input.setOutput(this);\n\n if (queryComplete === true) {\n this.#resultType = 'complete';\n } else if ('error' in queryComplete) {\n this.#resultType = 'error';\n this.#error = queryComplete;\n } else {\n void queryComplete\n .then(() => {\n this.#resultType = 'complete';\n this.#fireListeners();\n })\n .catch(e => {\n this.#resultType = 'error';\n this.#error = e;\n this.#fireListeners();\n });\n }\n this.#hydrate();\n }\n\n get data() {\n return this.#root[''] as V;\n }\n\n addListener(listener: Listener<V>) {\n assert(!this.#listeners.has(listener), 'Listener already registered');\n this.#listeners.add(listener);\n\n this.#fireListener(listener);\n\n return () => {\n this.#listeners.delete(listener);\n };\n }\n\n #fireListeners() {\n for (const listener of this.#listeners) {\n this.#fireListener(listener);\n }\n }\n\n #fireListener(listener: Listener<V>) {\n listener(this.data as Immutable<V>, this.#resultType, this.#error);\n }\n\n destroy() {\n this.onDestroy?.();\n }\n\n #hydrate() {\n this.#dirty = true;\n for (const node of this.#input.fetch({})) {\n applyChange(\n this.#root,\n {type: 'add', node},\n this.#schema,\n '',\n this.#format,\n );\n }\n this.flush();\n }\n\n push(change: Change): void {\n this.#dirty = true;\n applyChange(this.#root, change, this.#schema, '', this.#format);\n }\n\n flush() {\n if (!this.#dirty) {\n return;\n }\n this.#dirty = false;\n this.#fireListeners();\n }\n\n updateTTL(ttl: TTL) {\n this.#updateTTL(ttl);\n }\n}\n", "import {unreachable} from '../../../shared/src/asserts.ts';\nimport type {Condition} from '../../../zero-protocol/src/ast.ts';\n\n/**\n * Checks if a condition contains any NOT EXISTS operations.\n *\n * The client-side query engine cannot support NOT EXISTS operations because:\n *\n * 1. Zero only syncs a subset of data to the client, defined by the queries you use\n * 2. On the client, we can't distinguish between a row not existing at all vs.\n * a row not being synced to the client\n * 3. For NOT EXISTS to work correctly, we would need complete knowledge of what\n * doesn't exist, which is not reasonable with the partial sync model\n *\n * @param condition The condition to check\n * @throws Error if the condition uses NOT EXISTS operator\n */\nexport function assertNoNotExists(condition: Condition): void {\n switch (condition.type) {\n case 'simple':\n // Simple conditions don't use EXISTS/NOT EXISTS\n return;\n\n case 'correlatedSubquery':\n if (condition.op === 'NOT EXISTS') {\n throw new Error(\n 'not(exists()) is not supported on the client - see https://bugs.rocicorp.dev/issue/3438',\n );\n }\n // Check if the subquery has a where condition\n if (condition.related.subquery.where) {\n assertNoNotExists(condition.related.subquery.where);\n }\n return;\n\n case 'and':\n case 'or':\n for (const c of condition.conditions) {\n assertNoNotExists(c);\n }\n return;\n default:\n unreachable(condition);\n }\n}\n"],
4
+ "sourcesContent": ["/* eslint-disable @typescript-eslint/naming-convention */\n\nexport const SDD = 4;\nexport const DD31 = 5;\n// V6 added refreshHashes and persistHash to Client to fix ChunkNotFound errors\nexport const V6 = 6;\n// V7 added sizeOfEntry to the BTree chunk data.\nexport const V7 = 7;\nexport const Latest = V7;\n\nexport type SDD = typeof SDD;\nexport type DD31 = typeof DD31;\nexport type V6 = typeof V6;\nexport type V7 = typeof V7;\nexport type Latest = typeof Latest;\n", "import * as v from '@badrap/valita';\n\nexport * from '@badrap/valita';\n\nfunction toDisplay(value: unknown): string {\n switch (typeof value) {\n case 'string':\n case 'number':\n case 'boolean':\n return JSON.stringify(value);\n case 'undefined':\n return 'undefined';\n case 'bigint':\n return value.toString() + 'n';\n default:\n if (value === null) {\n return 'null';\n }\n if (Array.isArray(value)) {\n return 'array';\n }\n return typeof value;\n }\n}\n\ntype Key = string | number;\n\nfunction toDisplayAtPath(v: unknown, path: Key[] | undefined): string {\n if (!path?.length) {\n return toDisplay(v);\n }\n\n let cur = v;\n for (const p of path) {\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n cur = (cur as any)[p];\n }\n return toDisplay(cur);\n}\n\nfunction displayList<T>(\n word: string,\n expected: T[],\n toDisplay: (x: T) => string | number = x => String(x),\n): string | number {\n if (expected.length === 1) {\n return toDisplay(expected[0]);\n }\n\n const suffix = `${toDisplay(\n expected[expected.length - 2],\n )} ${word} ${toDisplay(expected[expected.length - 1])}`;\n if (expected.length === 2) {\n return suffix;\n }\n return `${expected.slice(0, -2).map(toDisplay).join(', ')}, ${suffix}`;\n}\n\nfunction getMessage(\n err: v.Err | v.ValitaError,\n v: unknown,\n schema: v.Type | v.Optional,\n mode: ParseOptionsMode | undefined,\n): string {\n const firstIssue = err.issues[0];\n const {path} = firstIssue;\n const atPath = path?.length ? ` at ${path.join('.')}` : '';\n\n switch (firstIssue.code) {\n case 'invalid_type':\n return `Expected ${displayList(\n 'or',\n firstIssue.expected,\n )}${atPath}. Got ${toDisplayAtPath(v, path)}`;\n case 'missing_value': {\n const atPath =\n path && path.length > 1 ? ` at ${path.slice(0, -1).join('.')}` : '';\n\n if (firstIssue.path?.length) {\n return `Missing property ${firstIssue.path.at(-1)}${atPath}`;\n }\n return `TODO Unknown missing property${atPath}`;\n }\n\n case 'invalid_literal':\n return `Expected literal value ${displayList(\n 'or',\n firstIssue.expected,\n toDisplay,\n )}${atPath} Got ${toDisplayAtPath(v, path)}`;\n\n case 'invalid_length': {\n return `Expected array with length ${\n firstIssue.minLength === firstIssue.maxLength\n ? firstIssue.minLength\n : `between ${firstIssue.minLength} and ${firstIssue.maxLength}`\n }${atPath}. Got array with length ${(v as {length: number}).length}`;\n }\n\n case 'unrecognized_keys':\n if (firstIssue.keys.length === 1) {\n return `Unexpected property ${firstIssue.keys[0]}${atPath}`;\n }\n return `Unexpected properties ${displayList(\n 'and',\n firstIssue.keys,\n )}${atPath}`;\n\n case 'invalid_union':\n return schema.name === 'union'\n ? getDeepestUnionParseError(v, schema as v.UnionType, mode ?? 'strict')\n : `Invalid union value${atPath}`;\n\n case 'custom_error': {\n const {error} = firstIssue;\n const message = !error\n ? 'unknown'\n : typeof error === 'string'\n ? error\n : (error.message ?? 'unknown');\n return `${message}${atPath}. Got ${toDisplayAtPath(v, path)}`;\n }\n }\n}\n\ntype FailedType = {type: v.Type; err: v.Err};\n\nfunction getDeepestUnionParseError(\n value: unknown,\n schema: v.UnionType,\n mode: ParseOptionsMode,\n): string {\n const failures: FailedType[] = [];\n for (const type of schema.options) {\n const r = type.try(value, {mode});\n if (!r.ok) {\n failures.push({type, err: r});\n }\n }\n if (failures.length) {\n // compare the first and second longest-path errors\n failures.sort(pathCmp);\n if (failures.length === 1 || pathCmp(failures[0], failures[1]) < 0) {\n return getMessage(failures[0].err, value, failures[0].type, mode);\n }\n }\n // paths are equivalent\n try {\n const str = JSON.stringify(value);\n return `Invalid union value: ${str}`;\n } catch {\n // fallback if the value could not be stringified\n return `Invalid union value`;\n }\n}\n\n// Descending-order comparison of Issue paths.\n// * [1, 'a'] sorts before [1]\n// * [1] sorts before [0] (i.e. errors later in the tuple sort before earlier errors)\nfunction pathCmp(a: FailedType, b: FailedType) {\n const aPath = a.err.issues[0].path;\n const bPath = b.err.issues[0].path;\n if (aPath.length !== bPath.length) {\n return bPath.length - aPath.length;\n }\n for (let i = 0; i < aPath.length; i++) {\n if (bPath[i] > aPath[i]) {\n return -1;\n }\n if (bPath[i] < aPath[i]) {\n return 1;\n }\n }\n return 0;\n}\n\n/**\n * 'strip' allows unknown properties and removes unknown properties.\n * 'strict' errors if there are unknown properties.\n * 'passthrough' allows unknown properties.\n */\nexport type ParseOptionsMode = 'passthrough' | 'strict' | 'strip';\n\nexport function parse<T>(\n value: unknown,\n schema: v.Type<T>,\n mode?: ParseOptionsMode,\n): T {\n const res = test(value, schema, mode);\n if (!res.ok) {\n throw new TypeError(res.error);\n }\n return res.value;\n}\n\nexport function is<T>(\n value: unknown,\n schema: v.Type<T>,\n mode?: ParseOptionsMode,\n): value is T {\n return test(value, schema, mode).ok;\n}\n\nexport function assert<T>(\n value: unknown,\n schema: v.Type<T>,\n mode?: ParseOptionsMode,\n): asserts value is T {\n parse(value, schema, mode);\n}\n\ntype Result<T> = {ok: true; value: T} | {ok: false; error: string};\n\nexport function test<T>(\n value: unknown,\n schema: v.Type<T>,\n mode?: ParseOptionsMode,\n): Result<T> {\n const res = schema.try(value, mode ? {mode} : undefined);\n if (!res.ok) {\n return {\n ok: false,\n error: getMessage(res, value, schema, mode),\n };\n }\n return res;\n}\n\n/**\n * Similar to {@link test} but works for AbstractTypes such as Optional.\n * This is for advanced usage. Prefer {@link test} unless you really need\n * to operate directly on an Optional field.\n */\nexport function testOptional<T>(\n value: unknown,\n schema: v.Type<T> | v.Optional<T>,\n mode?: ParseOptionsMode,\n): Result<T | undefined> {\n let flags = 0x1; // FLAG_FORBID_EXTRA_KEYS;\n if (mode === 'passthrough') {\n flags = 0;\n } else if (mode === 'strip') {\n flags = 0x2; // FLAG_STRIP_EXTRA_KEYS;\n }\n const res = schema.func(value, flags);\n if (res === undefined) {\n return {ok: true, value} as Result<T>;\n } else if (res.ok) {\n return res;\n }\n const err = new v.ValitaError(res);\n return {ok: false, error: getMessage(err, value, schema, mode)};\n}\n\n/**\n * Shallowly marks the schema as readonly.\n */\nexport function readonly<T extends v.Type>(t: T): v.Type<Readonly<v.Infer<T>>> {\n return t as v.Type<Readonly<v.Infer<T>>>;\n}\n\nexport function readonlyObject<T extends Record<string, v.Type | v.Optional>>(\n t: T,\n): v.ObjectType<Readonly<T>, undefined> {\n return v.object(t);\n}\n\nexport function readonlyArray<T extends v.Type>(\n t: T,\n): v.Type<readonly v.Infer<T>[]> {\n return v.array(t);\n}\n\nexport function readonlyRecord<T extends v.Type>(\n t: T,\n): v.Type<Readonly<Record<string, v.Infer<T>>>> {\n return v.record(t);\n}\n\n// eslint-disable-next-line @typescript-eslint/naming-convention\nconst AbstractType = Object.getPrototypeOf(\n Object.getPrototypeOf(v.string().optional()),\n).constructor;\n\nexport function instanceOfAbstractType<T = unknown>(\n obj: unknown,\n): obj is v.Type<T> | v.Optional<T> {\n return obj instanceof AbstractType;\n}\n\ntype ObjectShape = Record<string, typeof AbstractType>;\n\n/**\n * Similar to `ObjectType.partial()` except it recurses into nested objects.\n * Rest types are not supported.\n */\nexport function deepPartial<Shape extends ObjectShape>(\n s: v.ObjectType<Shape, undefined>,\n) {\n const shape = {} as Record<string, unknown>;\n for (const [key, type] of Object.entries(s.shape)) {\n if (type.name === 'object') {\n shape[key] = deepPartial(type as v.ObjectType).optional();\n } else {\n shape[key] = type.optional();\n }\n }\n return v.object(shape as {[K in keyof Shape]: v.Optional<v.Infer<Shape[K]>>});\n}\n\ntype Literal = string | number | bigint | boolean;\n\nexport function literalUnion<T extends [...Literal[]]>(\n ...literals: T\n): v.Type<T[number]> {\n return v.union(...literals.map(v.literal));\n}\n", "import {assertObject, throwInvalidType} from './asserts.ts';\nimport {skipAssertJSONValue} from './config.ts';\nimport {hasOwn} from './has-own.ts';\n\n/** The values that can be represented in JSON */\nexport type JSONValue =\n | null\n | string\n | boolean\n | number\n | Array<JSONValue>\n | JSONObject;\n\n/**\n * A JSON object. This is a map from strings to JSON values or `undefined`. We\n * allow `undefined` values as a convenience... but beware that the `undefined`\n * values do not round trip to the server. For example:\n *\n * ```\n * // Time t1\n * await tx.set('a', {a: undefined});\n *\n * // time passes, in a new transaction\n * const v = await tx.get('a');\n * console.log(v); // either {a: undefined} or {}\n * ```\n */\nexport type JSONObject = {[key: string]: JSONValue | undefined};\n\n/** Like {@link JSONValue} but deeply readonly */\nexport type ReadonlyJSONValue =\n | null\n | string\n | boolean\n | number\n | ReadonlyArray<ReadonlyJSONValue>\n | ReadonlyJSONObject;\n\n/** Like {@link JSONObject} but deeply readonly */\nexport type ReadonlyJSONObject = {\n readonly [key: string]: ReadonlyJSONValue | undefined;\n};\n\n/**\n * Checks deep equality of two JSON value with (almost) same semantics as\n * `JSON.stringify`. The only difference is that with `JSON.stringify` the\n * ordering of the properties in an object/map/dictionary matters. In\n * {@link deepEqual} the following two values are consider equal, even though the\n * strings JSON.stringify would produce is different:\n *\n * ```js\n * assert(deepEqual(t({a: 1, b: 2}, {b: 2, a: 1}))\n * ```\n */\nexport function deepEqual(\n a: ReadonlyJSONValue | undefined,\n b: ReadonlyJSONValue | undefined,\n): boolean {\n if (a === b) {\n return true;\n }\n\n if (typeof a !== typeof b) {\n return false;\n }\n\n switch (typeof a) {\n case 'boolean':\n case 'number':\n case 'string':\n return false;\n }\n\n // a cannot be undefined here because either a and b are undefined or their\n // types are different.\n // eslint-disable-next-line @typescript-eslint/no-non-null-assertion\n a = a!;\n\n // 'object'\n if (Array.isArray(a)) {\n if (!Array.isArray(b)) {\n return false;\n }\n if (a.length !== b.length) {\n return false;\n }\n for (let i = 0; i < a.length; i++) {\n if (!deepEqual(a[i], b[i])) {\n return false;\n }\n }\n return true;\n }\n\n if (a === null || b === null) {\n return false;\n }\n\n if (Array.isArray(b)) {\n return false;\n }\n\n // We know a and b are objects here but type inference is not smart enough.\n a = a as ReadonlyJSONObject;\n b = b as ReadonlyJSONObject;\n\n // We use for-in loops instead of for of Object.keys() to make sure deepEquals\n // does not allocate any objects.\n\n let aSize = 0;\n for (const key in a) {\n if (hasOwn(a, key)) {\n if (!deepEqual(a[key], b[key])) {\n return false;\n }\n aSize++;\n }\n }\n\n let bSize = 0;\n for (const key in b) {\n if (hasOwn(b, key)) {\n bSize++;\n }\n }\n\n return aSize === bSize;\n}\n\nexport function assertJSONValue(v: unknown): asserts v is JSONValue {\n if (skipAssertJSONValue) {\n return;\n }\n switch (typeof v) {\n case 'boolean':\n case 'number':\n case 'string':\n return;\n case 'object':\n if (v === null) {\n return;\n }\n if (Array.isArray(v)) {\n return assertJSONArray(v);\n }\n return assertObjectIsJSONObject(v as Record<string, unknown>);\n }\n throwInvalidType(v, 'JSON value');\n}\n\nexport function assertJSONObject(v: unknown): asserts v is JSONObject {\n assertObject(v);\n assertObjectIsJSONObject(v);\n}\n\nfunction assertObjectIsJSONObject(\n v: Record<string, unknown>,\n): asserts v is JSONObject {\n for (const k in v) {\n if (hasOwn(v, k)) {\n const value = v[k];\n if (value !== undefined) {\n assertJSONValue(value);\n }\n }\n }\n}\n\nfunction assertJSONArray(v: unknown[]): asserts v is JSONValue[] {\n for (const item of v) {\n assertJSONValue(item);\n }\n}\n\ninterface Path {\n push(key: string | number): void;\n pop(): void;\n}\n\n/**\n * Checks if a value is a JSON value. If there is a value that is not a JSON\n * value, the path parameter is updated to the path of the invalid value.\n */\nexport function isJSONValue(v: unknown, path: Path): v is JSONValue {\n switch (typeof v) {\n case 'boolean':\n case 'number':\n case 'string':\n return true;\n case 'object':\n if (v === null) {\n return true;\n }\n if (Array.isArray(v)) {\n return isJSONArray(v, path);\n }\n return objectIsJSONObject(v as Record<string, unknown>, path);\n }\n return false;\n}\n\nexport function isJSONObject(v: unknown, path: Path): v is JSONObject {\n if (typeof v !== 'object' || v === null) {\n return false;\n }\n return objectIsJSONObject(v as Record<string, unknown>, path);\n}\n\nfunction objectIsJSONObject(\n v: Record<string, unknown>,\n path: Path,\n): v is JSONObject {\n for (const k in v) {\n if (hasOwn(v, k)) {\n path.push(k);\n const value = v[k];\n if (value !== undefined && !isJSONValue(value, path)) {\n return false;\n }\n path.pop();\n }\n }\n return true;\n}\n\nfunction isJSONArray(v: unknown[], path: Path): v is JSONValue[] {\n for (let i = 0; i < v.length; i++) {\n path.push(i);\n if (!isJSONValue(v[i], path)) {\n return false;\n }\n path.pop();\n }\n return true;\n}\n\n/** Basic deep readonly type. It works for {@link JSONValue} types. */\nexport type DeepReadonly<T> = T extends\n | null\n | boolean\n | string\n | number\n | undefined\n ? T\n : {readonly [K in keyof T]: DeepReadonly<T[K]>};\n", "export function randomUint64(): bigint {\n // Generate two random 32-bit unsigned integers using Math.random()\n const high = Math.floor(Math.random() * 0xffffffff); // High 32 bits\n const low = Math.floor(Math.random() * 0xffffffff); // Low 32 bits\n\n // Combine the high and low parts to form a 64-bit unsigned integer\n return (BigInt(high) << 32n) | BigInt(low);\n}\n", "import {assert} from '../../shared/src/asserts.ts';\nimport {randomUint64} from '../../shared/src/random-uint64.ts';\nimport * as valita from '../../shared/src/valita.ts';\n\nexport const STRING_LENGTH = 22;\n\n// We use an opaque type so that we can make sure that a hash is always a hash.\n// TypeScript does not have direct support but we can use a trick described\n// here:\n//\n// https://evertpot.com/opaque-ts-types/\n//\n// The basic idea is to declare a type that cannot be created. We then use\n// functions that cast a string to this type.\n//\n\n// By using declare we tell the type system that there is a unique symbol.\n// However, there is no such symbol but the type system does not care.\ndeclare const hashTag: unique symbol;\n\n/**\n * Opaque type representing a hash. The only way to create one is using `parse`\n * or `hashOf` (except for static unsafe cast of course).\n */\nexport type Hash = string & {[hashTag]: true};\n\n// We are no longer using hashes but due to legacy reason we still refer to\n// them as hashes. We use UUID and counters instead.\nconst hashRe = /^[0-9a-v-]+$/;\n\nexport function parse(s: string): Hash {\n assertHash(s);\n return s;\n}\n\nconst emptyUUID = '0'.repeat(STRING_LENGTH);\nexport const emptyHash = emptyUUID as unknown as Hash;\n\n/**\n * Creates a function that generates random hashes.\n */\nexport const newRandomHash = makeNewRandomHashFunctionInternal();\n\n/**\n * Creates a function that generates UUID hashes for tests.\n */\nexport function makeNewFakeHashFunction(hashPrefix = 'fake'): () => Hash {\n assert(\n /^[0-9a-v]{0,8}$/.test(hashPrefix),\n `Invalid hash prefix: ${hashPrefix}`,\n );\n let i = 0;\n return () => {\n const count = String(i++);\n return (hashPrefix +\n '0'.repeat(STRING_LENGTH - hashPrefix.length - count.length) +\n count) as Hash;\n };\n}\n\nfunction toStringAndSlice(n: number | bigint, len: number): string {\n return n.toString(32).slice(-len).padStart(len, '0');\n}\n\n/**\n * This creates an ID that looks like `<RANDOM><COUNTER>`. The random part is\n * a random number encoded with base 32 and the length is 12 characters. The\n * is 10 characters long and encoded as base 32. The total length is 22 characters.\n *\n * Do the math: https://devina.io/collision-calculator\n */\nfunction makeNewRandomHashFunctionInternal(): () => Hash {\n let base = '';\n let i = 0;\n\n return () => {\n if (!base) {\n // This needs to be lazy because the cloudflare worker environment will\n // throw an error if crypto.getRandomValues is used statically. Specifically:\n // Error: Some functionality, such as asynchronous I/O, timeouts, and\n // generating random values, can only be performed while handling a\n // request.\n base = toStringAndSlice(randomUint64(), 12);\n }\n const tail = toStringAndSlice(i++, 10);\n return (base + tail) as Hash;\n };\n}\n\n/**\n * Generates a fake hash useful for testing.\n */\nexport function fakeHash(word: string | number): Hash {\n if (typeof word === 'number') {\n word = String(word);\n }\n return ('fake' + '0'.repeat(STRING_LENGTH - 4 - word.length) + word) as Hash;\n}\n\nexport function isHash(value: unknown): value is Hash {\n return typeof value === 'string' && hashRe.test(value);\n}\n\nexport function assertHash(value: unknown): asserts value is Hash {\n valita.assert(value, hashSchema);\n}\n\nexport const hashSchema = valita.string().assert(isHash, 'Invalid hash');\n", "import {hasOwn} from '../../shared/src/has-own.ts';\nimport type {ReadonlyJSONObject} from '../../shared/src/json.ts';\n\nconst SIZE_TAG = 1;\nconst SIZE_INT32 = 4;\nconst SIZE_SMI = 5;\nconst SIZE_DOUBLE = 8;\n\n/**\n * Gives a size of a value. The size is modelled after the size used by\n * Chromium/V8's structuredClone algorithm. It does not match exactly so the\n * size is just an approximation.\n * https://source.chromium.org/chromium/chromium/src/+/main:v8/src/objects/value-serializer.cc;l=102;drc=f0b6f7d12ea47ad7c08fb554f678c1e73801ca36;bpv=1;bpt=1\n * For example we follow JSC/Mozilla for ints and skip the varint encoding.\n *\n * Mozilla does things similarly. Main difference is that there is no varint\n * encoding and every value uses multiples of 64bits\n * https://searchfox.org/mozilla-central/source/js/src/vm/StructuredClone.cpp#94\n *\n * And JSC:\n * https://github.com/WebKit/WebKit/blob/main/Source/WebCore/bindings/js/SerializedScriptValue.cpp#L356\n * - Use 1 byte tag\n * - Numbers are either stored as Int32 or Float64\n */\nexport function getSizeOfValue(value: unknown): number {\n switch (typeof value) {\n case 'string':\n // Assumes all strings are one byte strings. V8 writes OneByteString and\n // TwoByteString. We could check the string but it would require iterating\n // over all the characters.\n return SIZE_TAG + SIZE_INT32 + value.length;\n case 'number':\n if (isSmi(value)) {\n if (value <= -(2 ** 30) || value >= 2 ** 30 - 1) {\n return SIZE_TAG + SIZE_SMI;\n }\n return SIZE_TAG + SIZE_INT32;\n }\n return SIZE_TAG + SIZE_DOUBLE;\n case 'boolean':\n return SIZE_TAG;\n case 'object':\n if (value === null) {\n return SIZE_TAG;\n }\n\n if (Array.isArray(value)) {\n let sum = 2 * SIZE_TAG + SIZE_INT32;\n for (const element of value) {\n sum += getSizeOfValue(element);\n }\n return sum;\n }\n\n {\n const val = value as ReadonlyJSONObject;\n let sum: number = 2 * SIZE_TAG + SIZE_INT32;\n for (const k in val) {\n if (hasOwn(val, k)) {\n // Skip undefined values. undefined values in an object gets\n // stripped if we round trip through JSON.stringif which is what we\n // use when syncing.\n const propertyValue = val[k];\n if (propertyValue !== undefined) {\n sum += getSizeOfValue(k) + getSizeOfValue(propertyValue);\n }\n }\n }\n return sum;\n }\n }\n\n throw new Error(`Invalid value. type: ${typeof value}, value: ${value}`);\n}\n\nfunction isSmi(value: number): boolean {\n return value === (value | 0);\n}\n\nconst entryFixed = 2 * SIZE_TAG + SIZE_INT32 + SIZE_TAG + SIZE_INT32;\n\nexport function getSizeOfEntry<K, V>(key: K, value: V): number {\n // Entries are stored as [key, value, sizeOfEntry]\n return entryFixed + getSizeOfValue(key) + getSizeOfValue(value);\n}\n", "import {compareUTF8} from 'compare-utf8';\nimport {\n assert,\n assertArray,\n assertNumber,\n assertString,\n} from '../../../shared/src/asserts.ts';\nimport {binarySearch as binarySearchWithFunc} from '../../../shared/src/binary-search.ts';\nimport type {Enum} from '../../../shared/src/enum.ts';\nimport {joinIterables} from '../../../shared/src/iterables.ts';\nimport {\n type JSONValue,\n type ReadonlyJSONValue,\n assertJSONValue,\n} from '../../../shared/src/json.ts';\nimport {skipBTreeNodeAsserts} from '../config.ts';\nimport type {IndexKey} from '../db/index.ts';\nimport * as FormatVersion from '../format-version-enum.ts';\nimport {\n type FrozenJSONValue,\n type FrozenTag,\n assertDeepFrozen,\n deepFreeze,\n} from '../frozen-json.ts';\nimport {type Hash, emptyHash, newRandomHash} from '../hash.ts';\nimport type {BTreeRead} from './read.ts';\nimport type {BTreeWrite} from './write.ts';\n\ntype FormatVersion = Enum<typeof FormatVersion>;\n\nexport type Entry<V> = readonly [key: string, value: V, sizeOfEntry: number];\n\nexport const NODE_LEVEL = 0;\nexport const NODE_ENTRIES = 1;\n\n/**\n * The type of B+Tree node chunk data\n */\ntype BaseNode<V> = FrozenTag<\n readonly [level: number, entries: ReadonlyArray<Entry<V>>]\n>;\nexport type InternalNode = BaseNode<Hash>;\n\nexport type DataNode = BaseNode<FrozenJSONValue>;\n\nexport function makeNodeChunkData<V>(\n level: number,\n entries: ReadonlyArray<Entry<V>>,\n formatVersion: FormatVersion,\n): BaseNode<V> {\n return deepFreeze([\n level,\n (formatVersion >= FormatVersion.V7\n ? entries\n : entries.map(e => e.slice(0, 2))) as readonly ReadonlyJSONValue[],\n ]) as BaseNode<V>;\n}\n\nexport type Node = DataNode | InternalNode;\n\n/**\n * Describes the changes that happened to Replicache after a\n * {@link WriteTransaction} was committed.\n *\n * @experimental This type is experimental and may change in the future.\n */\nexport type Diff = IndexDiff | NoIndexDiff;\n\n/**\n * @experimental This type is experimental and may change in the future.\n */\nexport type IndexDiff = readonly DiffOperation<IndexKey>[];\n\n/**\n * @experimental This type is experimental and may change in the future.\n */\nexport type NoIndexDiff = readonly DiffOperation<string>[];\n\n/**\n * InternalDiff uses string keys even for the secondary index maps.\n */\nexport type InternalDiff = readonly InternalDiffOperation[];\n\nexport type DiffOperationAdd<Key, Value = ReadonlyJSONValue> = {\n readonly op: 'add';\n readonly key: Key;\n readonly newValue: Value;\n};\n\nexport type DiffOperationDel<Key, Value = ReadonlyJSONValue> = {\n readonly op: 'del';\n readonly key: Key;\n readonly oldValue: Value;\n};\n\nexport type DiffOperationChange<Key, Value = ReadonlyJSONValue> = {\n readonly op: 'change';\n readonly key: Key;\n readonly oldValue: Value;\n readonly newValue: Value;\n};\n\n/**\n * The individual parts describing the changes that happened to the Replicache\n * data. There are three different kinds of operations:\n * - `add`: A new entry was added.\n * - `del`: An entry was deleted.\n * - `change`: An entry was changed.\n *\n * @experimental This type is experimental and may change in the future.\n */\nexport type DiffOperation<Key> =\n | DiffOperationAdd<Key>\n | DiffOperationDel<Key>\n | DiffOperationChange<Key>;\n\n// Duplicated with DiffOperation to make the docs less confusing.\nexport type InternalDiffOperation<Key = string, Value = FrozenJSONValue> =\n | DiffOperationAdd<Key, Value>\n | DiffOperationDel<Key, Value>\n | DiffOperationChange<Key, Value>;\n\n/**\n * Finds the leaf where a key is (if present) or where it should go if not\n * present.\n */\nexport async function findLeaf(\n key: string,\n hash: Hash,\n source: BTreeRead,\n expectedRootHash: Hash,\n): Promise<DataNodeImpl> {\n const node = await source.getNode(hash);\n // The root changed. Try again\n if (expectedRootHash !== source.rootHash) {\n return findLeaf(key, source.rootHash, source, source.rootHash);\n }\n if (isDataNodeImpl(node)) {\n return node;\n }\n const {entries} = node;\n let i = binarySearch(key, entries);\n if (i === entries.length) {\n i--;\n }\n const entry = entries[i];\n return findLeaf(key, entry[1], source, expectedRootHash);\n}\n\ntype BinarySearchEntries = readonly Entry<unknown>[];\n\n/**\n * Does a binary search over entries\n *\n * If the key found then the return value is the index it was found at.\n *\n * If the key was *not* found then the return value is the index where it should\n * be inserted at\n */\nexport function binarySearch(\n key: string,\n entries: BinarySearchEntries,\n): number {\n return binarySearchWithFunc(entries.length, i =>\n compareUTF8(key, entries[i][0]),\n );\n}\n\nexport function binarySearchFound(\n i: number,\n entries: BinarySearchEntries,\n key: string,\n): boolean {\n return i !== entries.length && entries[i][0] === key;\n}\n\nexport function parseBTreeNode(\n v: unknown,\n formatVersion: FormatVersion,\n getSizeOfEntry: <K, V>(key: K, value: V) => number,\n): InternalNode | DataNode {\n if (skipBTreeNodeAsserts && formatVersion >= FormatVersion.V7) {\n return v as InternalNode | DataNode;\n }\n\n assertArray(v);\n assertDeepFrozen(v);\n // Be relaxed about what we accept.\n assert(v.length >= 2);\n const [level, entries] = v;\n assertNumber(level);\n assertArray(entries);\n\n const f = level > 0 ? assertString : assertJSONValue;\n\n // For V7 we do not need to change the entries. Just assert that they are correct.\n if (formatVersion >= FormatVersion.V7) {\n for (const e of entries) {\n assertEntry(e, f);\n }\n return v as unknown as InternalNode | DataNode;\n }\n\n const newEntries = entries.map(e => convertNonV7Entry(e, f, getSizeOfEntry));\n return [level, newEntries] as unknown as InternalNode | DataNode;\n}\n\nfunction assertEntry(\n entry: unknown,\n f:\n | ((v: unknown) => asserts v is Hash)\n | ((v: unknown) => asserts v is JSONValue),\n): asserts entry is Entry<Hash | JSONValue> {\n assertArray(entry);\n // Be relaxed about what we accept.\n assert(entry.length >= 3);\n assertString(entry[0]);\n f(entry[1]);\n assertNumber(entry[2]);\n}\n\n/**\n * Converts an entry that was from a format version before V7 to the format\n * wanted by V7.\n */\nfunction convertNonV7Entry(\n entry: unknown,\n f:\n | ((v: unknown) => asserts v is Hash)\n | ((v: unknown) => asserts v is JSONValue),\n getSizeOfEntry: <K, V>(key: K, value: V) => number,\n): Entry<Hash | JSONValue> {\n assertArray(entry);\n assert(entry.length >= 2);\n assertString(entry[0]);\n f(entry[1]);\n const entrySize = getSizeOfEntry(entry[0], entry[1]);\n return [entry[0], entry[1], entrySize] as Entry<Hash | JSONValue>;\n}\n\nexport function isInternalNode(node: Node): node is InternalNode {\n return node[NODE_LEVEL] > 0;\n}\n\nabstract class NodeImpl<Value> {\n entries: Array<Entry<Value>>;\n hash: Hash;\n abstract readonly level: number;\n readonly isMutable: boolean;\n\n #childNodeSize = -1;\n\n constructor(entries: Array<Entry<Value>>, hash: Hash, isMutable: boolean) {\n this.entries = entries;\n this.hash = hash;\n this.isMutable = isMutable;\n }\n\n abstract set(\n key: string,\n value: FrozenJSONValue,\n entrySize: number,\n tree: BTreeWrite,\n ): Promise<NodeImpl<Value>>;\n\n abstract del(\n key: string,\n tree: BTreeWrite,\n ): Promise<NodeImpl<Value> | DataNodeImpl>;\n\n maxKey(): string {\n return this.entries[this.entries.length - 1][0];\n }\n\n getChildNodeSize(tree: BTreeRead): number {\n if (this.#childNodeSize !== -1) {\n return this.#childNodeSize;\n }\n\n let sum = tree.chunkHeaderSize;\n for (const entry of this.entries) {\n sum += entry[2];\n }\n return (this.#childNodeSize = sum);\n }\n\n protected _updateNode(tree: BTreeWrite) {\n this.#childNodeSize = -1;\n tree.updateNode(\n this as NodeImpl<unknown> as DataNodeImpl | InternalNodeImpl,\n );\n }\n}\n\nexport function toChunkData<V>(\n node: NodeImpl<V>,\n formatVersion: FormatVersion,\n): BaseNode<V> {\n return makeNodeChunkData(node.level, node.entries, formatVersion);\n}\n\nexport class DataNodeImpl extends NodeImpl<FrozenJSONValue> {\n readonly level = 0;\n\n set(\n key: string,\n value: FrozenJSONValue,\n entrySize: number,\n tree: BTreeWrite,\n ): Promise<DataNodeImpl> {\n let deleteCount: number;\n const i = binarySearch(key, this.entries);\n if (!binarySearchFound(i, this.entries, key)) {\n // Not found, insert.\n deleteCount = 0;\n } else {\n deleteCount = 1;\n }\n\n return Promise.resolve(\n this.#splice(tree, i, deleteCount, [key, value, entrySize]),\n );\n }\n\n #splice(\n tree: BTreeWrite,\n start: number,\n deleteCount: number,\n ...items: Entry<FrozenJSONValue>[]\n ): DataNodeImpl {\n if (this.isMutable) {\n this.entries.splice(start, deleteCount, ...items);\n this._updateNode(tree);\n return this;\n }\n\n const entries = readonlySplice(this.entries, start, deleteCount, ...items);\n return tree.newDataNodeImpl(entries);\n }\n\n del(key: string, tree: BTreeWrite): Promise<DataNodeImpl> {\n const i = binarySearch(key, this.entries);\n if (!binarySearchFound(i, this.entries, key)) {\n // Not found. Return this without changes.\n return Promise.resolve(this);\n }\n\n // Found. Create new node or mutate existing one.\n return Promise.resolve(this.#splice(tree, i, 1));\n }\n\n async *keys(_tree: BTreeRead): AsyncGenerator<string, void> {\n for (const entry of this.entries) {\n yield entry[0];\n }\n }\n\n async *entriesIter(\n _tree: BTreeRead,\n ): AsyncGenerator<Entry<FrozenJSONValue>, void> {\n for (const entry of this.entries) {\n yield entry;\n }\n }\n}\n\nfunction readonlySplice<T>(\n array: ReadonlyArray<T>,\n start: number,\n deleteCount: number,\n ...items: T[]\n): T[] {\n const arr = array.slice(0, start);\n for (let i = 0; i < items.length; i++) {\n arr.push(items[i]);\n }\n for (let i = start + deleteCount; i < array.length; i++) {\n arr.push(array[i]);\n }\n return arr;\n}\n\nexport class InternalNodeImpl extends NodeImpl<Hash> {\n readonly level: number;\n\n constructor(\n entries: Array<Entry<Hash>>,\n hash: Hash,\n level: number,\n isMutable: boolean,\n ) {\n super(entries, hash, isMutable);\n this.level = level;\n }\n\n async set(\n key: string,\n value: FrozenJSONValue,\n entrySize: number,\n tree: BTreeWrite,\n ): Promise<InternalNodeImpl> {\n let i = binarySearch(key, this.entries);\n if (i === this.entries.length) {\n // We are going to insert into last (right most) leaf.\n i--;\n }\n\n const childHash = this.entries[i][1];\n const oldChildNode = await tree.getNode(childHash);\n\n const childNode = await oldChildNode.set(key, value, entrySize, tree);\n\n const childNodeSize = childNode.getChildNodeSize(tree);\n if (childNodeSize > tree.maxSize || childNodeSize < tree.minSize) {\n return this.#mergeAndPartition(tree, i, childNode);\n }\n\n const newEntry = createNewInternalEntryForNode(\n childNode,\n tree.getEntrySize,\n );\n return this.#replaceChild(tree, i, newEntry);\n }\n\n /**\n * This merges the child node entries with previous or next sibling and then\n * partitions the merged entries.\n */\n async #mergeAndPartition(\n tree: BTreeWrite,\n i: number,\n childNode: DataNodeImpl | InternalNodeImpl,\n ): Promise<InternalNodeImpl> {\n const level = this.level - 1;\n const thisEntries = this.entries;\n\n type IterableHashEntries = Iterable<Entry<Hash>>;\n\n let values: IterableHashEntries;\n let startIndex: number;\n let removeCount: number;\n if (i > 0) {\n const hash = thisEntries[i - 1][1];\n const previousSibling = await tree.getNode(hash);\n values = joinIterables(\n previousSibling.entries as IterableHashEntries,\n childNode.entries as IterableHashEntries,\n );\n startIndex = i - 1;\n removeCount = 2;\n } else if (i < thisEntries.length - 1) {\n const hash = thisEntries[i + 1][1];\n const nextSibling = await tree.getNode(hash);\n values = joinIterables(\n childNode.entries as IterableHashEntries,\n nextSibling.entries as IterableHashEntries,\n );\n startIndex = i;\n removeCount = 2;\n } else {\n values = childNode.entries as IterableHashEntries;\n startIndex = i;\n removeCount = 1;\n }\n\n const partitions = partition(\n values,\n value => value[2],\n tree.minSize - tree.chunkHeaderSize,\n tree.maxSize - tree.chunkHeaderSize,\n );\n\n // TODO: There are cases where we can reuse the old nodes. Creating new ones\n // means more memory churn but also more writes to the underlying KV store.\n const newEntries: Entry<Hash>[] = [];\n for (const entries of partitions) {\n const node = tree.newNodeImpl(entries, level);\n const newHashEntry = createNewInternalEntryForNode(\n node,\n tree.getEntrySize,\n );\n newEntries.push(newHashEntry);\n }\n\n if (this.isMutable) {\n this.entries.splice(startIndex, removeCount, ...newEntries);\n this._updateNode(tree);\n return this;\n }\n\n const entries = readonlySplice(\n thisEntries,\n startIndex,\n removeCount,\n ...newEntries,\n );\n\n return tree.newInternalNodeImpl(entries, this.level);\n }\n\n #replaceChild(\n tree: BTreeWrite,\n index: number,\n newEntry: Entry<Hash>,\n ): InternalNodeImpl {\n if (this.isMutable) {\n this.entries.splice(index, 1, newEntry);\n this._updateNode(tree);\n return this;\n }\n const entries = readonlySplice(this.entries, index, 1, newEntry);\n return tree.newInternalNodeImpl(entries, this.level);\n }\n\n async del(\n key: string,\n tree: BTreeWrite,\n ): Promise<InternalNodeImpl | DataNodeImpl> {\n const i = binarySearch(key, this.entries);\n if (i === this.entries.length) {\n // Key is larger than maxKey of rightmost entry so it is not present.\n return this;\n }\n\n const childHash = this.entries[i][1];\n const oldChildNode = await tree.getNode(childHash);\n const oldHash = oldChildNode.hash;\n\n const childNode = await oldChildNode.del(key, tree);\n if (childNode.hash === oldHash) {\n // Not changed so not found.\n return this;\n }\n\n if (childNode.entries.length === 0) {\n // Subtree is now empty. Remove internal node.\n const entries = readonlySplice(this.entries, i, 1);\n return tree.newInternalNodeImpl(entries, this.level);\n }\n\n if (i === 0 && this.entries.length === 1) {\n // There was only one node at this level and it was removed. We can return\n // the modified subtree.\n return childNode;\n }\n\n // The child node is still a good size.\n if (childNode.getChildNodeSize(tree) > tree.minSize) {\n // No merging needed.\n const entry = createNewInternalEntryForNode(childNode, tree.getEntrySize);\n return this.#replaceChild(tree, i, entry);\n }\n\n // Child node size is too small.\n return this.#mergeAndPartition(tree, i, childNode);\n }\n\n async *keys(tree: BTreeRead): AsyncGenerator<string, void> {\n for (const entry of this.entries) {\n const childNode = await tree.getNode(entry[1]);\n yield* childNode.keys(tree);\n }\n }\n\n async *entriesIter(\n tree: BTreeRead,\n ): AsyncGenerator<Entry<FrozenJSONValue>, void> {\n for (const entry of this.entries) {\n const childNode = await tree.getNode(entry[1]);\n yield* childNode.entriesIter(tree);\n }\n }\n\n getChildren(\n start: number,\n length: number,\n tree: BTreeRead,\n ): Promise<Array<InternalNodeImpl | DataNodeImpl>> {\n const ps: Promise<DataNodeImpl | InternalNodeImpl>[] = [];\n for (let i = start; i < length && i < this.entries.length; i++) {\n ps.push(tree.getNode(this.entries[i][1]));\n }\n return Promise.all(ps);\n }\n\n async getCompositeChildren(\n start: number,\n length: number,\n tree: BTreeRead,\n ): Promise<InternalNodeImpl | DataNodeImpl> {\n const {level} = this;\n\n if (length === 0) {\n return new InternalNodeImpl([], newRandomHash(), level - 1, true);\n }\n\n const output = await this.getChildren(start, start + length, tree);\n\n if (level > 1) {\n const entries: Entry<Hash>[] = [];\n for (const child of output as InternalNodeImpl[]) {\n entries.push(...child.entries);\n }\n return new InternalNodeImpl(entries, newRandomHash(), level - 1, true);\n }\n\n assert(level === 1);\n const entries: Entry<FrozenJSONValue>[] = [];\n for (const child of output as DataNodeImpl[]) {\n entries.push(...child.entries);\n }\n return new DataNodeImpl(entries, newRandomHash(), true);\n }\n}\n\nexport function newNodeImpl(\n entries: Array<Entry<FrozenJSONValue>>,\n hash: Hash,\n level: number,\n isMutable: boolean,\n): DataNodeImpl;\nexport function newNodeImpl(\n entries: Array<Entry<Hash>>,\n hash: Hash,\n level: number,\n isMutable: boolean,\n): InternalNodeImpl;\nexport function newNodeImpl(\n entries: Array<Entry<FrozenJSONValue>> | Array<Entry<Hash>>,\n hash: Hash,\n level: number,\n isMutable: boolean,\n): DataNodeImpl | InternalNodeImpl;\nexport function newNodeImpl(\n entries: Array<Entry<FrozenJSONValue>> | Array<Entry<Hash>>,\n hash: Hash,\n level: number,\n isMutable: boolean,\n): DataNodeImpl | InternalNodeImpl {\n if (level === 0) {\n return new DataNodeImpl(\n entries as Entry<FrozenJSONValue>[],\n hash,\n isMutable,\n );\n }\n return new InternalNodeImpl(entries as Entry<Hash>[], hash, level, isMutable);\n}\n\nexport function isDataNodeImpl(\n node: DataNodeImpl | InternalNodeImpl,\n): node is DataNodeImpl {\n return node.level === 0;\n}\n\nexport function partition<T>(\n values: Iterable<T>,\n // This is the size of each Entry\n getSizeOfEntry: (v: T) => number,\n min: number,\n max: number,\n): T[][] {\n const partitions: T[][] = [];\n const sizes: number[] = [];\n let sum = 0;\n let accum: T[] = [];\n for (const value of values) {\n const size = getSizeOfEntry(value);\n if (size >= max) {\n if (accum.length > 0) {\n partitions.push(accum);\n sizes.push(sum);\n }\n partitions.push([value]);\n sizes.push(size);\n sum = 0;\n accum = [];\n } else if (sum + size >= min) {\n accum.push(value);\n partitions.push(accum);\n sizes.push(sum + size);\n sum = 0;\n accum = [];\n } else {\n sum += size;\n accum.push(value);\n }\n }\n\n if (sum > 0) {\n if (sizes.length > 0 && sum + sizes[sizes.length - 1] <= max) {\n partitions[partitions.length - 1].push(...accum);\n } else {\n partitions.push(accum);\n }\n }\n\n return partitions;\n}\n\nexport const emptyDataNode = makeNodeChunkData<ReadonlyJSONValue>(\n 0,\n [],\n FormatVersion.Latest,\n);\nexport const emptyDataNodeImpl = new DataNodeImpl([], emptyHash, false);\n\nexport function createNewInternalEntryForNode(\n node: NodeImpl<unknown>,\n getSizeOfEntry: <K, V>(k: K, v: V) => number,\n): [string, Hash, number] {\n const key = node.maxKey();\n const value = node.hash;\n const size = getSizeOfEntry(key, value);\n return [key, value, size];\n}\n", "/**\n * This is a binary search that returns the index of the first element in the\n * array that is greater than or equal to the given value.\n *\n * Typical usage:\n *\n * ```\n * const haystack = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9];\n * const needle = 3;\n * const index = binarySearch(haystack.length, i => needle - haystack[i]);\n * const found = index < haystack.length && haystack[index] === needle;\n * ```\n */\nexport function binarySearch(high: number, compare: (i: number) => number) {\n let low = 0;\n while (low < high) {\n const mid = low + ((high - low) >> 1);\n const i = compare(mid);\n if (i === 0) {\n return mid;\n }\n if (i > 0) {\n low = mid + 1;\n } else {\n high = mid;\n }\n }\n return low;\n}\n", "import {assert} from './asserts.ts';\n\nexport function* joinIterables<T>(...iters: Iterable<T>[]) {\n for (const iter of iters) {\n yield* iter;\n }\n}\n\nfunction* filterIter<T>(\n iter: Iterable<T>,\n p: (t: T, index: number) => boolean,\n): Iterable<T> {\n let index = 0;\n for (const t of iter) {\n if (p(t, index++)) {\n yield t;\n }\n }\n}\n\nfunction* mapIter<T, U>(\n iter: Iterable<T>,\n f: (t: T, index: number) => U,\n): Iterable<U> {\n let index = 0;\n for (const t of iter) {\n yield f(t, index++);\n }\n}\n\nexport function first<T>(stream: Iterable<T>): T | undefined {\n const it = stream[Symbol.iterator]();\n const {value} = it.next();\n it.return?.();\n return value;\n}\n\nexport function* once<T>(stream: Iterable<T>): Iterable<T> {\n const it = stream[Symbol.iterator]();\n const {value} = it.next();\n if (value !== undefined) {\n yield value;\n }\n it.return?.();\n}\n\n// TODO(arv): Use ES2024 Iterable.from when available\n// https://github.com/tc39/proposal-iterator-helpers\n\nclass IterWrapper<T> implements Iterable<T> {\n iter: Iterable<T>;\n constructor(iter: Iterable<T>) {\n this.iter = iter;\n }\n\n [Symbol.iterator]() {\n return this.iter[Symbol.iterator]();\n }\n\n map<U>(f: (t: T, index: number) => U): IterWrapper<U> {\n return new IterWrapper(mapIter(this.iter, f));\n }\n\n filter(p: (t: T, index: number) => boolean): IterWrapper<T> {\n return new IterWrapper(filterIter(this.iter, p));\n }\n}\n\nexport function wrapIterable<T>(iter: Iterable<T>): IterWrapper<T> {\n return new IterWrapper(iter);\n}\n\nexport function* mergeIterables<T>(\n iterables: Iterable<T>[],\n comparator: (l: T, r: T) => number,\n distinct = false,\n): IterableIterator<T> {\n const iterators = iterables.map(i => i[Symbol.iterator]());\n try {\n const current = iterators.map(i => i.next());\n let lastYielded: T | undefined;\n while (current.some(c => !c.done)) {\n const min = current.reduce(\n (acc: [T, number] | undefined, c, i): [T, number] | undefined => {\n if (c.done) {\n return acc;\n }\n if (acc === undefined || comparator(c.value, acc[0]) < 0) {\n return [c.value, i];\n }\n return acc;\n },\n undefined,\n );\n\n assert(min !== undefined, 'min is undefined');\n current[min[1]] = iterators[min[1]].next();\n if (\n lastYielded !== undefined &&\n distinct &&\n comparator(lastYielded, min[0]) === 0\n ) {\n continue;\n }\n lastYielded = min[0];\n yield min[0];\n }\n } finally {\n for (const it of iterators) {\n it.return?.();\n }\n }\n}\n", "import {deepEqual, type ReadonlyJSONValue} from '../../../shared/src/json.ts';\n\nexport type Splice = [at: number, removed: number, added: number, from: number];\n\nconst SPLICE_UNASSIGNED = -1;\nexport const SPLICE_AT = 0;\nexport const SPLICE_REMOVED = 1;\nexport const SPLICE_ADDED = 2;\nexport const SPLICE_FROM = 3;\n\nconst KEY = 0;\nconst VALUE = 1;\n\ntype Entry<V> = readonly [key: string, value: V, ...rest: unknown[]];\n\nexport function* computeSplices<T>(\n previous: readonly Entry<T>[],\n current: readonly Entry<T>[],\n): Generator<Splice, void> {\n let previousIndex = 0;\n let currentIndex = 0;\n let splice: Splice | undefined;\n\n function ensureAssigned(splice: Splice, index: number): void {\n if (splice[SPLICE_FROM] === SPLICE_UNASSIGNED) {\n splice[SPLICE_FROM] = index;\n }\n }\n\n function newSplice(): Splice {\n return [previousIndex, 0, 0, SPLICE_UNASSIGNED];\n }\n\n while (previousIndex < previous.length && currentIndex < current.length) {\n if (previous[previousIndex][KEY] === current[currentIndex][KEY]) {\n if (\n deepEqual(\n // These are really Hash | InternalValue\n previous[previousIndex][VALUE] as ReadonlyJSONValue,\n current[currentIndex][VALUE] as ReadonlyJSONValue,\n )\n ) {\n if (splice) {\n ensureAssigned(splice, 0);\n yield splice;\n splice = undefined;\n }\n } else {\n if (!splice) {\n splice = newSplice();\n }\n splice[SPLICE_ADDED]++;\n splice[SPLICE_REMOVED]++;\n ensureAssigned(splice, currentIndex);\n }\n previousIndex++;\n currentIndex++;\n } else if (previous[previousIndex][KEY] < current[currentIndex][KEY]) {\n // previous was removed\n if (!splice) {\n splice = newSplice();\n }\n splice[SPLICE_REMOVED]++;\n\n previousIndex++;\n } else {\n // current was added\n if (!splice) {\n splice = newSplice();\n }\n splice[SPLICE_ADDED]++;\n ensureAssigned(splice, currentIndex);\n\n currentIndex++;\n }\n }\n\n if (currentIndex < current.length) {\n if (!splice) {\n splice = newSplice();\n }\n splice[SPLICE_ADDED] += current.length - currentIndex;\n ensureAssigned(splice, currentIndex);\n }\n\n if (previousIndex < previous.length) {\n if (!splice) {\n splice = newSplice();\n }\n splice[SPLICE_REMOVED] += previous.length - previousIndex;\n }\n\n if (splice) {\n ensureAssigned(splice, 0);\n yield splice;\n }\n}\n", "import type {Enum} from '../../../shared/src/enum.ts';\nimport {deepEqual} from '../../../shared/src/json.ts';\nimport type {Read} from '../dag/store.ts';\nimport * as FormatVersion from '../format-version-enum.ts';\nimport type {FrozenJSONValue} from '../frozen-json.ts';\nimport {type Hash, emptyHash} from '../hash.ts';\nimport {getSizeOfEntry} from '../size-of-value.ts';\nimport {\n DataNodeImpl,\n type Entry,\n type InternalDiff,\n type InternalDiffOperation,\n InternalNodeImpl,\n NODE_ENTRIES,\n NODE_LEVEL,\n binarySearch,\n binarySearchFound,\n emptyDataNodeImpl,\n findLeaf,\n isDataNodeImpl,\n newNodeImpl,\n parseBTreeNode,\n} from './node.ts';\nimport {\n SPLICE_ADDED,\n SPLICE_AT,\n SPLICE_FROM,\n SPLICE_REMOVED,\n computeSplices,\n} from './splice.ts';\n\ntype FormatVersion = Enum<typeof FormatVersion>;\n\n/**\n * The size of the header of a node. (If we had compile time\n * constants we would have used that).\n *\n * There is a test ensuring this is correct.\n */\nexport const NODE_HEADER_SIZE = 11;\n\nexport class BTreeRead implements AsyncIterable<Entry<FrozenJSONValue>> {\n protected readonly _cache: Map<Hash, DataNodeImpl | InternalNodeImpl> =\n new Map();\n\n protected readonly _dagRead: Read;\n protected readonly _formatVersion: FormatVersion;\n rootHash: Hash;\n readonly getEntrySize: <K, V>(k: K, v: V) => number;\n readonly chunkHeaderSize: number;\n\n constructor(\n dagRead: Read,\n formatVersion: FormatVersion,\n root: Hash = emptyHash,\n getEntrySize: <K, V>(k: K, v: V) => number = getSizeOfEntry,\n chunkHeaderSize = NODE_HEADER_SIZE,\n ) {\n this._dagRead = dagRead;\n this._formatVersion = formatVersion;\n this.rootHash = root;\n this.getEntrySize = getEntrySize;\n this.chunkHeaderSize = chunkHeaderSize;\n }\n\n async getNode(hash: Hash): Promise<DataNodeImpl | InternalNodeImpl> {\n if (hash === emptyHash) {\n return emptyDataNodeImpl;\n }\n\n const cached = this._cache.get(hash);\n if (cached) {\n return cached;\n }\n\n const chunk = await this._dagRead.mustGetChunk(hash);\n const data = parseBTreeNode(\n chunk.data,\n this._formatVersion,\n this.getEntrySize,\n );\n const impl = newNodeImpl(\n data[NODE_ENTRIES] as Entry<FrozenJSONValue>[],\n hash,\n data[NODE_LEVEL],\n false,\n );\n this._cache.set(hash, impl);\n return impl;\n }\n\n async get(key: string): Promise<FrozenJSONValue | undefined> {\n const leaf = await findLeaf(key, this.rootHash, this, this.rootHash);\n const index = binarySearch(key, leaf.entries);\n if (!binarySearchFound(index, leaf.entries, key)) {\n return undefined;\n }\n return leaf.entries[index][1];\n }\n\n async has(key: string): Promise<boolean> {\n const leaf = await findLeaf(key, this.rootHash, this, this.rootHash);\n const index = binarySearch(key, leaf.entries);\n return binarySearchFound(index, leaf.entries, key);\n }\n\n async isEmpty(): Promise<boolean> {\n const {rootHash} = this;\n const node = await this.getNode(this.rootHash);\n // The root hash has changed, so the tree has been modified.\n if (this.rootHash !== rootHash) {\n return this.isEmpty();\n }\n return node.entries.length === 0;\n }\n\n // We don't do any encoding of the key in the map, so we have no way of\n // determining from an entry.key alone whether it is a regular key or an\n // encoded IndexKey in an index map. Without encoding regular map keys the\n // caller has to deal with encoding and decoding the keys for the index map.\n scan(fromKey: string): AsyncIterableIterator<Entry<FrozenJSONValue>> {\n return scanForHash(\n this.rootHash,\n () => this.rootHash,\n this.rootHash,\n fromKey,\n async hash => {\n const cached = await this.getNode(hash);\n if (cached) {\n return [\n cached.level,\n cached.isMutable ? cached.entries.slice() : cached.entries,\n ];\n }\n const chunk = await this._dagRead.mustGetChunk(hash);\n return parseBTreeNode(\n chunk.data,\n this._formatVersion,\n this.getEntrySize,\n );\n },\n );\n }\n\n async *keys(): AsyncIterableIterator<string> {\n const node = await this.getNode(this.rootHash);\n yield* node.keys(this);\n }\n\n async *entries(): AsyncIterableIterator<Entry<FrozenJSONValue>> {\n const node = await this.getNode(this.rootHash);\n yield* node.entriesIter(this);\n }\n\n [Symbol.asyncIterator](): AsyncIterableIterator<Entry<FrozenJSONValue>> {\n return this.entries();\n }\n\n async *diff(last: BTreeRead): AsyncIterableIterator<InternalDiffOperation> {\n const [currentNode, lastNode] = await Promise.all([\n this.getNode(this.rootHash),\n last.getNode(last.rootHash),\n ]);\n yield* diffNodes(lastNode, currentNode, last, this);\n }\n}\n\nasync function* diffNodes(\n last: InternalNodeImpl | DataNodeImpl,\n current: InternalNodeImpl | DataNodeImpl,\n lastTree: BTreeRead,\n currentTree: BTreeRead,\n): AsyncIterableIterator<InternalDiffOperation> {\n if (last.level > current.level) {\n // merge all of last's children into a new node\n // We know last is an internal node because level > 0.\n const lastChild = (await (last as InternalNodeImpl).getCompositeChildren(\n 0,\n last.entries.length,\n lastTree,\n )) as InternalNodeImpl;\n yield* diffNodes(lastChild, current, lastTree, currentTree);\n return;\n }\n\n if (current.level > last.level) {\n // We know current is an internal node because level > 0.\n const currentChild = (await (\n current as InternalNodeImpl\n ).getCompositeChildren(\n 0,\n current.entries.length,\n currentTree,\n )) as InternalNodeImpl;\n yield* diffNodes(last, currentChild, lastTree, currentTree);\n return;\n }\n\n if (isDataNodeImpl(last) && isDataNodeImpl(current)) {\n yield* diffEntries(\n (last as DataNodeImpl).entries,\n (current as DataNodeImpl).entries,\n );\n return;\n }\n\n // Now we have two internal nodes with the same level. We compute the diff as\n // splices for the internal node entries. We then flatten these and call diff\n // recursively.\n const initialSplices = computeSplices(\n (last as InternalNodeImpl).entries,\n (current as InternalNodeImpl).entries,\n );\n for (const splice of initialSplices) {\n const [lastChild, currentChild] = await Promise.all([\n (last as InternalNodeImpl).getCompositeChildren(\n splice[SPLICE_AT],\n splice[SPLICE_REMOVED],\n lastTree,\n ),\n (current as InternalNodeImpl).getCompositeChildren(\n splice[SPLICE_FROM],\n splice[SPLICE_ADDED],\n currentTree,\n ),\n ]);\n yield* diffNodes(lastChild, currentChild, lastTree, currentTree);\n }\n}\n\nfunction* diffEntries(\n lastEntries: readonly Entry<FrozenJSONValue>[],\n currentEntries: readonly Entry<FrozenJSONValue>[],\n): IterableIterator<InternalDiffOperation> {\n const lastLength = lastEntries.length;\n const currentLength = currentEntries.length;\n let i = 0;\n let j = 0;\n while (i < lastLength && j < currentLength) {\n const lastKey = lastEntries[i][0];\n const currentKey = currentEntries[j][0];\n if (lastKey === currentKey) {\n if (!deepEqual(lastEntries[i][1], currentEntries[j][1])) {\n yield {\n op: 'change',\n key: lastKey,\n oldValue: lastEntries[i][1],\n newValue: currentEntries[j][1],\n };\n }\n i++;\n j++;\n } else if (lastKey < currentKey) {\n yield {\n op: 'del',\n key: lastKey,\n oldValue: lastEntries[i][1],\n };\n i++;\n } else {\n yield {\n op: 'add',\n key: currentKey,\n newValue: currentEntries[j][1],\n };\n j++;\n }\n }\n for (; i < lastLength; i++) {\n yield {\n op: 'del',\n key: lastEntries[i][0],\n oldValue: lastEntries[i][1],\n };\n }\n for (; j < currentLength; j++) {\n yield {\n op: 'add',\n key: currentEntries[j][0],\n newValue: currentEntries[j][1],\n };\n }\n}\n\n// Redefine the type here to allow the optional size in the tuple.\ntype ReadNodeResult = readonly [\n level: number,\n data: readonly Entry<FrozenJSONValue>[] | readonly Entry<Hash>[],\n];\n\ntype ReadNode = (hash: Hash) => Promise<ReadNodeResult>;\n\nasync function* scanForHash(\n expectedRootHash: Hash,\n getRootHash: () => Hash,\n hash: Hash,\n fromKey: string,\n readNode: ReadNode,\n): AsyncIterableIterator<Entry<FrozenJSONValue>> {\n if (hash === emptyHash) {\n return;\n }\n\n const data = await readNode(hash);\n const entries = data[NODE_ENTRIES];\n let i = 0;\n if (fromKey) {\n i = binarySearch(fromKey, entries);\n }\n if (data[NODE_LEVEL] > 0) {\n for (; i < entries.length; i++) {\n yield* scanForHash(\n expectedRootHash,\n getRootHash,\n (entries[i] as Entry<Hash>)[1],\n fromKey,\n readNode,\n );\n fromKey = '';\n }\n } else {\n for (; i < entries.length; i++) {\n const rootHash = getRootHash();\n // If rootHash changed then we start a new iterator from the key.\n if (expectedRootHash !== rootHash) {\n yield* scanForHash(\n rootHash,\n getRootHash,\n rootHash,\n entries[i][0],\n readNode,\n );\n return;\n }\n yield entries[i] as Entry<FrozenJSONValue>;\n }\n }\n}\n\nexport async function allEntriesAsDiff(\n map: BTreeRead,\n op: 'add' | 'del',\n): Promise<InternalDiff> {\n const diff: InternalDiffOperation[] = [];\n const make: (entry: Entry<FrozenJSONValue>) => InternalDiffOperation =\n op === 'add'\n ? entry => ({\n op: 'add',\n key: entry[0],\n newValue: entry[1],\n })\n : entry => ({\n op: 'del',\n key: entry[0],\n oldValue: entry[1],\n });\n\n for await (const entry of map.entries()) {\n diff.push(make(entry));\n }\n return diff;\n}\n", "export function stringCompare(a: string, b: string): number {\n if (a === b) {\n return 0;\n }\n if (a < b) {\n return -1;\n }\n return 1;\n}\n", "import {\n assertJSONObject,\n type ReadonlyJSONValue,\n} from '../../shared/src/json.ts';\nimport {stringCompare} from '../../shared/src/string-compare.ts';\nimport type {FrozenJSONValue} from './frozen-json.ts';\n\n/**\n * A cookie is a value that is used to determine the order of snapshots. It\n * needs to be comparable. This can be a `string`, `number` or if you want to\n * use a more complex value, you can use an object with an `order` property. The\n * value `null` is considered to be less than any other cookie and it is used\n * for the first pull when no cookie has been set.\n *\n * The order is the natural order of numbers and strings. If one of the cookies\n * is an object then the value of the `order` property is treated as the cookie\n * when doing comparison.\n *\n * If one of the cookies is a string and the other is a number, the number is\n * fist converted to a string (using `toString()`).\n */\nexport type Cookie =\n | null\n | string\n | number\n | (ReadonlyJSONValue & {readonly order: number | string});\n\nexport type FrozenCookie =\n | null\n | string\n | number\n | (FrozenJSONValue & {readonly order: number | string});\n\n/**\n * Compare two cookies.\n * `null` is considered to be less than any other cookie.\n */\nexport function compareCookies(a: Cookie, b: Cookie): number {\n if (a === b) {\n return 0;\n }\n if (a === null) {\n return -1;\n }\n if (b === null) {\n return 1;\n }\n\n const cva = getCompareValue(a);\n const cvb = getCompareValue(b);\n\n // If either a or b is a string. Compare by string.\n if (typeof cva === 'string' || typeof cvb === 'string') {\n return stringCompare(String(cva), String(cvb));\n }\n\n return cva - cvb;\n}\n\ntype NonNull<T> = T extends null ? never : T;\n\nfunction getCompareValue(cookie: NonNull<Cookie>): string | number {\n if (typeof cookie === 'string' || typeof cookie === 'number') {\n return cookie;\n }\n return cookie.order;\n}\n\nexport function assertCookie(v: unknown): asserts v is Cookie {\n if (v === null || typeof v === 'string' || typeof v === 'number') {\n return;\n }\n\n assertJSONObject(v);\n if (typeof v.order === 'string' || typeof v.order === 'number') {\n return;\n }\n\n throw new Error('Invalid cookie');\n}\n", "import {assert, assertString} from '../../../shared/src/asserts.ts';\nimport {assertDeepFrozen} from '../frozen-json.ts';\nimport type {Hash} from '../hash.ts';\n\n// By using declare we tell the type system that there is a unique symbol.\n// However, there is no such symbol but the type system does not care.\ndeclare const refsTag: unique symbol;\n\n/**\n * Opaque type representing a Refs. The reason to use an opaque type here is to\n * make sure that Refs are always sorted and have no duplicates.\n */\nexport type Refs = [] | readonly [Hash] | (readonly Hash[] & {[refsTag]: true});\n\n/**\n * Convert to a Refs when we already know it is sorted and has no duplicates.\n */\nexport function asRefs(sortedRefs: Hash[]): Refs {\n return sortedRefs as unknown as Refs;\n}\n\n/**\n * Sorts and tags as Refs. If an Array is passed in the array is sorted in\n * place, otherwise a copy of the iterable is created. This checks for duplicates.\n */\nexport function toRefs(refs: Hash[] | Set<Hash>): Refs {\n if (Array.isArray(refs)) {\n refs.sort();\n for (let i = 1; i < refs.length; i++) {\n assert(refs[i - 1] !== refs[i], 'Refs must not have duplicates');\n }\n return asRefs(refs);\n }\n\n const refsArray = [...refs];\n refsArray.sort();\n // no need to check for duplicates as Set cannot have duplicates.\n return asRefs(refsArray);\n}\n\nexport class Chunk<V = unknown> {\n readonly hash: Hash;\n readonly data: V;\n\n /**\n * Meta is an array of refs. If there are no refs we do not write a meta\n * chunk.\n */\n readonly meta: Refs;\n\n constructor(hash: Hash, data: V, refs: Refs) {\n assert(\n !(refs as unknown[]).includes(hash),\n 'Chunk cannot reference itself',\n );\n assertDeepFrozen(data);\n this.hash = hash;\n this.data = data;\n this.meta = refs;\n }\n}\n\nexport function assertRefs(v: unknown): asserts v is Refs {\n if (!Array.isArray(v)) {\n throw new Error('Refs must be an array');\n }\n if (v.length > 0) {\n assertString(v[0]);\n for (let i = 1; i < v.length; i++) {\n assertString(v[i]);\n }\n }\n}\n\nexport function createChunk<V>(\n data: V,\n refs: Refs,\n chunkHasher: ChunkHasher,\n): Chunk<V> {\n const hash = chunkHasher();\n return new Chunk(hash, data, refs);\n}\n\nexport type CreateChunk = <V>(data: V, refs: Refs) => Chunk<V>;\n\nexport type ChunkHasher = () => Hash;\n\nexport function throwChunkHasher(): Hash {\n throw new Error('unexpected call to compute chunk hash');\n}\n", "import {assert} from '../../../shared/src/asserts.ts';\nimport type {Hash} from '../hash.ts';\nimport type {Release} from '../with-transactions.ts';\nimport type {Chunk, Refs} from './chunk.ts';\n\nexport interface Store {\n read(): Promise<Read>;\n write(): Promise<Write>;\n close(): Promise<void>;\n}\n\ninterface GetChunk {\n getChunk(hash: Hash): Promise<Chunk | undefined>;\n}\n\nexport interface MustGetChunk {\n mustGetChunk(hash: Hash): Promise<Chunk>;\n}\n\nexport interface Read extends GetChunk, MustGetChunk, Release {\n hasChunk(hash: Hash): Promise<boolean>;\n getHead(name: string): Promise<Hash | undefined>;\n get closed(): boolean;\n}\n\nexport interface Write extends Read {\n createChunk<V>(data: V, refs: Refs): Chunk<V>;\n putChunk<V>(c: Chunk<V>): Promise<void>;\n setHead(name: string, hash: Hash): Promise<void>;\n removeHead(name: string): Promise<void>;\n assertValidHash(hash: Hash): void;\n commit(): Promise<void>;\n}\n\nexport class ChunkNotFoundError extends Error {\n name = 'ChunkNotFoundError';\n readonly hash: Hash;\n constructor(hash: Hash) {\n super(`Chunk not found ${hash}`);\n this.hash = hash;\n }\n}\n\nexport async function mustGetChunk(\n store: GetChunk,\n hash: Hash,\n): Promise<Chunk> {\n const chunk = await store.getChunk(hash);\n if (chunk) {\n return chunk;\n }\n throw new ChunkNotFoundError(hash);\n}\n\nexport async function mustGetHeadHash(\n name: string,\n store: Read,\n): Promise<Hash> {\n const hash = await store.getHead(name);\n assert(hash, `Missing head ${name}`);\n return hash;\n}\n", "/* eslint-disable @typescript-eslint/naming-convention */\n\n// These three were used before...\n// IndexChangeSDD = 1;\n// LocalSDD = 2;\n// SnapshotSDD = 3;\nexport const LocalDD31 = 4;\nexport const SnapshotDD31 = 5;\n\nexport type LocalDD31 = typeof LocalDD31;\nexport type SnapshotDD31 = typeof SnapshotDD31;\n", "import {\n assert,\n assertArray,\n assertBoolean,\n assertNumber,\n assertObject,\n assertString,\n unreachable,\n} from '../../../shared/src/asserts.ts';\nimport {assertJSONValue} from '../../../shared/src/json.ts';\nimport {skipCommitDataAsserts} from '../config.ts';\nimport {type FrozenCookie, compareCookies} from '../cookies.ts';\nimport {type Chunk, type CreateChunk, type Refs, toRefs} from '../dag/chunk.ts';\nimport {type MustGetChunk, type Read, mustGetHeadHash} from '../dag/store.ts';\nimport {\n type FrozenJSONValue,\n type FrozenTag,\n assertDeepFrozen,\n deepFreeze,\n} from '../frozen-json.ts';\nimport {type Hash, assertHash} from '../hash.ts';\nimport type {IndexDefinition} from '../index-defs.ts';\nimport type {ClientID} from '../sync/ids.ts';\nimport * as MetaType from './meta-type-enum.ts';\n\nexport const DEFAULT_HEAD_NAME = 'main';\n\nexport function commitIsLocalDD31(\n commit: Commit<Meta>,\n): commit is Commit<LocalMetaDD31> {\n return isLocalMetaDD31(commit.meta);\n}\n\nexport function commitIsLocal(\n commit: Commit<Meta>,\n): commit is Commit<LocalMetaDD31> {\n return commitIsLocalDD31(commit);\n}\n\nexport function commitIsSnapshot(\n commit: Commit<Meta>,\n): commit is Commit<SnapshotMetaDD31> {\n return isSnapshotMetaDD31(commit.meta);\n}\n\nexport class Commit<M extends Meta> {\n readonly chunk: Chunk<CommitData<M>>;\n\n constructor(chunk: Chunk<CommitData<M>>) {\n this.chunk = chunk;\n }\n\n get meta(): M {\n return this.chunk.data.meta;\n }\n\n get valueHash(): Hash {\n // Already validated!\n return this.chunk.data.valueHash;\n }\n\n getMutationID(clientID: ClientID, dagRead: MustGetChunk): Promise<number> {\n return getMutationID(clientID, dagRead, this.meta);\n }\n\n async getNextMutationID(\n clientID: ClientID,\n dagRead: MustGetChunk,\n ): Promise<number> {\n return (await this.getMutationID(clientID, dagRead)) + 1;\n }\n\n get indexes(): readonly IndexRecord[] {\n // Already validated!\n return this.chunk.data.indexes;\n }\n}\n\nexport async function getMutationID(\n clientID: ClientID,\n dagRead: MustGetChunk,\n meta: Meta,\n): Promise<number> {\n switch (meta.type) {\n case MetaType.SnapshotDD31:\n return meta.lastMutationIDs[clientID] ?? 0;\n\n case MetaType.LocalDD31: {\n if (meta.clientID === clientID) {\n return meta.mutationID;\n }\n const {basisHash} = meta;\n const basisCommit = await commitFromHash(basisHash, dagRead);\n return getMutationID(clientID, dagRead, basisCommit.meta);\n }\n\n default:\n unreachable(meta);\n }\n}\n\n/**\n * Returns the set of local commits from the given `fromCommitHash` back to but not\n * including its base snapshot. If `fromCommitHash` is a snapshot, the returned vector\n * will be empty. When, as typical, `fromCommitHash` is the head of the default chain\n * then the returned commits are the set of pending commits, ie the set of local commits\n * that have not yet been pushed to the data layer.\n *\n * The vector of commits is returned in reverse chain order, that is, starting\n * with the commit with hash `fromCommitHash` and walking backwards.\n */\nexport async function localMutations(\n fromCommitHash: Hash,\n dagRead: Read,\n): Promise<Commit<LocalMetaDD31>[]> {\n const commits = await commitChain(fromCommitHash, dagRead);\n // Filter does not deal with type narrowing.\n return commits.filter(c => commitIsLocal(c)) as Commit<LocalMetaDD31>[];\n}\n\nexport async function localMutationsDD31(\n fromCommitHash: Hash,\n dagRead: Read,\n): Promise<Commit<LocalMetaDD31>[]> {\n const commits = await commitChain(fromCommitHash, dagRead);\n // Filter does not deal with type narrowing.\n return commits.filter(c => commitIsLocalDD31(c)) as Commit<LocalMetaDD31>[];\n}\n\nexport async function localMutationsGreaterThan(\n commit: Commit<Meta>,\n mutationIDLimits: Record<ClientID, number>,\n dagRead: Read,\n): Promise<Commit<LocalMetaDD31>[]> {\n const commits: Commit<LocalMetaDD31>[] = [];\n const remainingMutationIDLimits = new Map(Object.entries(mutationIDLimits));\n while (!commitIsSnapshot(commit) && remainingMutationIDLimits.size > 0) {\n if (commitIsLocalDD31(commit)) {\n const {meta} = commit;\n const mutationIDLowerLimit = remainingMutationIDLimits.get(meta.clientID);\n if (mutationIDLowerLimit !== undefined) {\n if (meta.mutationID <= mutationIDLowerLimit) {\n remainingMutationIDLimits.delete(meta.clientID);\n } else {\n commits.push(commit as Commit<LocalMetaDD31>);\n }\n }\n }\n const {basisHash} = commit.meta;\n if (basisHash === null) {\n throw new Error(`Commit ${commit.chunk.hash} has no basis`);\n }\n commit = await commitFromHash(basisHash, dagRead);\n }\n return commits;\n}\n\nexport async function baseSnapshotFromHead(\n name: string,\n dagRead: Read,\n): Promise<Commit<SnapshotMetaDD31>> {\n const hash = await dagRead.getHead(name);\n assert(hash, `Missing head ${name}`);\n return baseSnapshotFromHash(hash, dagRead);\n}\n\nexport async function baseSnapshotHashFromHash(\n hash: Hash,\n dagRead: Read,\n): Promise<Hash> {\n return (await baseSnapshotFromHash(hash, dagRead)).chunk.hash;\n}\n\nexport async function baseSnapshotFromHash(\n hash: Hash,\n dagRead: Read,\n): Promise<Commit<SnapshotMetaDD31>> {\n const commit = await commitFromHash(hash, dagRead);\n return baseSnapshotFromCommit(commit, dagRead);\n}\n\nexport async function baseSnapshotFromCommit(\n commit: Commit<Meta>,\n dagRead: Read,\n): Promise<Commit<SnapshotMetaDD31>> {\n while (!commitIsSnapshot(commit)) {\n const {meta} = commit;\n if (isLocalMetaDD31(meta)) {\n commit = await commitFromHash(meta.baseSnapshotHash, dagRead);\n } else {\n const {basisHash} = meta;\n if (basisHash === null) {\n throw new Error(`Commit ${commit.chunk.hash} has no basis`);\n }\n commit = await commitFromHash(basisHash, dagRead);\n }\n }\n return commit;\n}\n\nexport function snapshotMetaParts(\n c: Commit<SnapshotMetaDD31>,\n clientID: ClientID,\n): [lastMutationID: number, cookie: FrozenCookie | FrozenJSONValue] {\n const m = c.meta;\n const lmid = m.lastMutationIDs[clientID] ?? 0;\n return [lmid, m.cookieJSON];\n}\n\nexport function compareCookiesForSnapshots(\n a: Commit<SnapshotMetaDD31>,\n b: Commit<SnapshotMetaDD31>,\n): number {\n return compareCookies(a.meta.cookieJSON, b.meta.cookieJSON);\n}\n\n/**\n * Returns all commits from the commit with fromCommitHash to its base snapshot,\n * inclusive of both. Resulting vector is in chain-head-first order (so snapshot\n * comes last).\n */\nexport async function commitChain(\n fromCommitHash: Hash,\n dagRead: Read,\n): Promise<Commit<Meta>[]> {\n let commit = await commitFromHash(fromCommitHash, dagRead);\n const commits = [];\n while (!commitIsSnapshot(commit)) {\n const {meta} = commit;\n const {basisHash} = meta;\n if (basisHash === null) {\n throw new Error(`Commit ${commit.chunk.hash} has no basis`);\n }\n commits.push(commit);\n commit = await commitFromHash(basisHash, dagRead);\n }\n commits.push(commit);\n return commits;\n}\n\nexport async function commitFromHash(\n hash: Hash,\n dagRead: MustGetChunk,\n): Promise<Commit<Meta>> {\n const chunk = await dagRead.mustGetChunk(hash);\n return fromChunk(chunk);\n}\n\nexport async function commitFromHead(\n name: string,\n dagRead: Read,\n): Promise<Commit<Meta>> {\n const hash = await mustGetHeadHash(name, dagRead);\n return commitFromHash(hash, dagRead);\n}\n\nexport type LocalMetaDD31 = {\n readonly type: MetaType.LocalDD31;\n readonly basisHash: Hash;\n readonly mutationID: number;\n readonly mutatorName: string;\n readonly mutatorArgsJSON: FrozenJSONValue;\n readonly originalHash: Hash | null;\n readonly timestamp: number;\n readonly clientID: ClientID;\n readonly baseSnapshotHash: Hash;\n};\n\nexport type LocalMeta = LocalMetaDD31;\n\nexport function assertLocalMetaDD31(\n v: Record<string, unknown>,\n): asserts v is LocalMetaDD31 {\n // type already asserted\n assertString(v.clientID);\n assertNumber(v.mutationID);\n assertString(v.mutatorName);\n if (!v.mutatorName) {\n throw new Error('Missing mutator name');\n }\n assertJSONValue(v.mutatorArgsJSON);\n if (v.originalHash !== null) {\n assertHash(v.originalHash);\n }\n assertNumber(v.timestamp);\n}\n\nexport function isLocalMetaDD31(meta: Meta): meta is LocalMetaDD31 {\n return meta.type === MetaType.LocalDD31;\n}\n\nexport function assertLocalCommitDD31(\n c: Commit<Meta>,\n): asserts c is Commit<LocalMetaDD31> {\n assertLocalMetaDD31(c.meta);\n}\n\nexport type SnapshotMetaDD31 = {\n readonly type: MetaType.SnapshotDD31;\n readonly basisHash: Hash | null;\n readonly lastMutationIDs: Record<ClientID, number>;\n readonly cookieJSON: FrozenCookie;\n};\n\nexport type SnapshotMeta = SnapshotMetaDD31;\n\nexport function assertSnapshotMetaDD31(\n v: Record<string, unknown>,\n): asserts v is SnapshotMetaDD31 {\n // type already asserted\n if (v.basisHash !== null) {\n assertHash(v.basisHash);\n }\n assertJSONValue(v.cookieJSON);\n assertLastMutationIDs(v.lastMutationIDs);\n}\n\nfunction assertLastMutationIDs(\n v: unknown,\n): asserts v is Record<ClientID, number> {\n assertObject(v);\n for (const e of Object.values(v)) {\n assertNumber(e);\n }\n}\n\nexport type Meta = LocalMetaDD31 | SnapshotMetaDD31;\n\nexport function assertSnapshotCommitDD31(\n c: Commit<Meta>,\n): asserts c is Commit<SnapshotMetaDD31> {\n assertSnapshotMetaDD31(c.meta);\n}\n\nfunction isSnapshotMetaDD31(meta: Meta): meta is SnapshotMetaDD31 {\n return meta.type === MetaType.SnapshotDD31;\n}\n\nfunction assertMeta(v: unknown): asserts v is Meta {\n assertObject(v);\n assertDeepFrozen(v);\n if (v.basisHash !== null) {\n assertString(v.basisHash);\n }\n\n assertNumber(v.type);\n switch (v.type) {\n case MetaType.LocalDD31:\n assertLocalMetaDD31(v);\n break;\n case MetaType.SnapshotDD31:\n assertSnapshotMetaDD31(v);\n break;\n default:\n throw new Error(`Invalid enum value ${v.type}`);\n }\n}\n\n/**\n * This is the type used for index definitions as defined in the Commit chunk data.\n *\n * Changing this requires a REPLICACHE_FORMAT_VERSION bump.\n */\nexport type ChunkIndexDefinition = {\n readonly name: string;\n readonly keyPrefix: string;\n readonly jsonPointer: string;\n // Used to not exist\n readonly allowEmpty?: boolean;\n};\n\nexport function chunkIndexDefinitionEqualIgnoreName(\n a: ChunkIndexDefinition,\n b: ChunkIndexDefinition,\n): boolean {\n return (\n a.jsonPointer === b.jsonPointer &&\n (a.allowEmpty ?? false) === (b.allowEmpty ?? false) &&\n a.keyPrefix === b.keyPrefix\n );\n}\n\nfunction assertChunkIndexDefinition(\n v: unknown,\n): asserts v is ChunkIndexDefinition {\n assertObject(v);\n assertDeepFrozen(v);\n assertString(v.name);\n assertString(v.keyPrefix);\n assertString(v.jsonPointer);\n if (v.allowEmpty !== undefined) {\n assertBoolean(v.allowEmpty);\n }\n}\n\nexport function toChunkIndexDefinition(\n name: string,\n indexDefinition: IndexDefinition,\n): Required<ChunkIndexDefinition> {\n return {\n name,\n keyPrefix: indexDefinition.prefix ?? '',\n jsonPointer: indexDefinition.jsonPointer,\n allowEmpty: indexDefinition.allowEmpty ?? false,\n };\n}\n\nexport type IndexRecord = {\n readonly definition: ChunkIndexDefinition;\n readonly valueHash: Hash;\n};\n\nfunction assertIndexRecord(v: unknown): asserts v is IndexRecord {\n assertObject(v);\n assertDeepFrozen(v);\n assertChunkIndexDefinition(v.definition);\n assertString(v.valueHash);\n}\n\nfunction assertIndexRecords(v: unknown): asserts v is IndexRecord[] {\n assertArray(v);\n assertDeepFrozen(v);\n for (const ir of v) {\n assertIndexRecord(ir);\n }\n}\n\nexport function newLocalDD31(\n createChunk: CreateChunk,\n basisHash: Hash,\n baseSnapshotHash: Hash,\n mutationID: number,\n mutatorName: string,\n mutatorArgsJSON: FrozenJSONValue,\n originalHash: Hash | null,\n valueHash: Hash,\n indexes: readonly IndexRecord[],\n timestamp: number,\n clientID: ClientID,\n): Commit<LocalMetaDD31> {\n const meta: LocalMetaDD31 = {\n type: MetaType.LocalDD31,\n basisHash,\n baseSnapshotHash,\n mutationID,\n mutatorName,\n mutatorArgsJSON,\n originalHash,\n timestamp,\n clientID,\n };\n return commitFromCommitData(\n createChunk,\n makeCommitData(meta, valueHash, indexes),\n );\n}\n\nexport function newSnapshotDD31(\n createChunk: CreateChunk,\n basisHash: Hash | null,\n lastMutationIDs: Record<ClientID, number>,\n cookieJSON: FrozenCookie,\n valueHash: Hash,\n indexes: readonly IndexRecord[],\n): Commit<SnapshotMetaDD31> {\n return commitFromCommitData(\n createChunk,\n newSnapshotCommitDataDD31(\n basisHash,\n lastMutationIDs,\n cookieJSON,\n valueHash,\n indexes,\n ),\n );\n}\n\nexport function newSnapshotCommitDataDD31(\n basisHash: Hash | null,\n lastMutationIDs: Record<ClientID, number>,\n cookieJSON: FrozenCookie,\n valueHash: Hash,\n indexes: readonly IndexRecord[],\n): CommitData<SnapshotMetaDD31> {\n const meta: SnapshotMetaDD31 = {\n type: MetaType.SnapshotDD31,\n basisHash,\n lastMutationIDs,\n cookieJSON,\n };\n return makeCommitData(meta, valueHash, indexes);\n}\n\nexport function fromChunk(chunk: Chunk): Commit<Meta> {\n validateChunk(chunk);\n return new Commit(chunk);\n}\n\nfunction commitFromCommitData<M extends Meta>(\n createChunk: CreateChunk,\n data: CommitData<M>,\n): Commit<M> {\n return new Commit(createChunk(data, getRefs(data)));\n}\n\nexport function getRefs(data: CommitData<Meta>): Refs {\n const refs: Set<Hash> = new Set();\n refs.add(data.valueHash);\n const {meta} = data;\n switch (meta.type) {\n case MetaType.LocalDD31:\n meta.basisHash && refs.add(meta.basisHash);\n // Local has weak originalHash\n break;\n case MetaType.SnapshotDD31:\n // Snapshot has weak basisHash\n break;\n default:\n unreachable(meta);\n }\n\n for (const index of data.indexes) {\n refs.add(index.valueHash);\n }\n\n return toRefs(refs);\n}\n\nexport type CommitData<M extends Meta> = FrozenTag<{\n readonly meta: M;\n readonly valueHash: Hash;\n readonly indexes: readonly IndexRecord[];\n}>;\n\nexport function makeCommitData<M extends Meta>(\n meta: M,\n valueHash: Hash,\n indexes: readonly IndexRecord[],\n): CommitData<M> {\n return deepFreeze({\n meta,\n valueHash,\n indexes,\n }) as unknown as CommitData<M>;\n}\n\nexport function assertCommitData(v: unknown): asserts v is CommitData<Meta> {\n if (skipCommitDataAsserts) {\n return;\n }\n\n assertObject(v);\n assertDeepFrozen(v);\n assertMeta(v.meta);\n assertString(v.valueHash);\n assertIndexRecords(v.indexes);\n}\n\nfunction validateChunk(chunk: Chunk): asserts chunk is Chunk<CommitData<Meta>> {\n const {data} = chunk;\n assertCommitData(data);\n\n const seen = new Set();\n for (const index of data.indexes) {\n const {name} = index.definition;\n if (seen.has(name)) {\n throw new Error(`Duplicate index ${name}`);\n }\n seen.add(name);\n }\n}\n", "/* eslint-disable @typescript-eslint/naming-convention */\n\nexport const Add = 0;\nexport const Remove = 1;\n\nexport type Add = typeof Add;\nexport type Remove = typeof Remove;\n", "import type {LogContext} from '@rocicorp/logger';\nimport type {Enum} from '../../../shared/src/enum.ts';\nimport type {BTreeRead} from '../btree/read.ts';\nimport type {BTreeWrite} from '../btree/write.ts';\nimport type {FrozenJSONObject, FrozenJSONValue} from '../frozen-json.ts';\nimport type {Hash} from '../hash.ts';\nimport type {IndexRecord} from './commit.ts';\nimport * as IndexOperation from './index-operation-enum.ts';\n\ntype IndexOperation = Enum<typeof IndexOperation>;\n\nexport class IndexRead<BTree = BTreeRead> {\n readonly meta: IndexRecord;\n readonly map: BTree;\n\n constructor(meta: IndexRecord, map: BTree) {\n this.meta = meta;\n this.map = map;\n }\n}\n\nexport class IndexWrite extends IndexRead<BTreeWrite> {\n // Note: does not update self.meta.valueHash (doesn't need to at this point as flush\n // is only called during commit.)\n flush(): Promise<Hash> {\n return this.map.flush();\n }\n\n clear(): Promise<void> {\n return this.map.clear();\n }\n}\n\n// Index or de-index a single primary entry.\nexport async function indexValue(\n lc: LogContext,\n index: BTreeWrite,\n op: IndexOperation,\n key: string,\n val: FrozenJSONValue,\n jsonPointer: string,\n allowEmpty: boolean,\n): Promise<void> {\n try {\n for (const entry of getIndexKeys(key, val, jsonPointer, allowEmpty)) {\n switch (op) {\n case IndexOperation.Add:\n await index.put(entry, val);\n break;\n case IndexOperation.Remove:\n await index.del(entry);\n break;\n }\n }\n } catch (e) {\n // Right now all the errors that index_value() returns are customers dev\n // problems: either the value is not json, the pointer is into nowhere, etc.\n // So we ignore them.\n lc.info?.('Not indexing value', val, ':', e);\n }\n}\n\n// Gets the set of index keys for a given primary key and value.\nexport function getIndexKeys(\n primary: string,\n value: FrozenJSONValue,\n jsonPointer: string,\n allowEmpty: boolean,\n): string[] {\n const target = evaluateJSONPointer(value, jsonPointer);\n if (target === undefined) {\n if (allowEmpty) {\n return [];\n }\n throw new Error(`No value at path: ${jsonPointer}`);\n }\n\n const values = Array.isArray(target) ? target : [target];\n\n const indexKeys: string[] = [];\n for (const value of values) {\n if (typeof value === 'string') {\n indexKeys.push(encodeIndexKey([value, primary]));\n } else {\n throw new Error('Unsupported target type');\n }\n }\n\n return indexKeys;\n}\n\nexport const KEY_VERSION_0 = '\\u0000';\nexport const KEY_SEPARATOR = '\\u0000';\n\n/**\n * When using indexes the key is a tuple of the secondary key and the primary\n * key.\n */\nexport type IndexKey = readonly [secondary: string, primary: string];\n\n// An index key is encoded to vec of bytes in the following order:\n// - key version byte(s), followed by\n// - the secondary key bytes (which for now is a UTF8 encoded string), followed by\n// - the key separator, a null byte, followed by\n// - the primary key bytes\n//\n// The null separator byte ensures that if a secondary key A is longer than B then\n// A always sorts after B. Appending the primary key ensures index keys with\n// identical secondary keys sort in primary key order. Secondary keys must not\n// contain a zero (null) byte.\nexport function encodeIndexKey(indexKey: IndexKey): string {\n const secondary = indexKey[0];\n const primary = indexKey[1];\n\n if (secondary.includes('\\u0000')) {\n throw new Error('Secondary key cannot contain null byte');\n }\n return KEY_VERSION_0 + secondary + KEY_SEPARATOR + primary;\n}\n\n// Returns bytes that can be used to scan for the given secondary index value.\n//\n// Consider a scan for start_secondary_key=\"a\" (97). We want to scan with scan\n// key [0, 97]. We could also scan with [0, 97, 0], but then we couldn't use\n// this function for prefix scans, so we lop off the null byte. If we want\n// the scan to be exclusive, we scan with the next greater value, [0, 97, 1]\n// (we disallow zero bytes in secondary keys).\n//\n// Now it gets a little tricky. We also want to be able to scan using the\n// primary key, start_key. When we do this we have to encode the scan key\n// a little differently We essentially have to fix the value of the\n// secondary key so we can vary the start_key. That is, the match on\n// start_secondary_key becomes an exact match.\n//\n// Consider the scan for start_secondary_key=\"a\" and start_key=[2]. We want\n// to scan with [0, 97, 0, 2]. If we want exclusive we want to scan with\n// the next highest value, [0, 97, 0, 2, 0] (zero bytes are allowed in primary\n// keys). So far so good. It is important to notice that we need to\n// be able to distinguish between not wanting use start_key and wanting to\n// use start_key=[]. In the former case we want to scan with the secondary\n// key value, possibly followed by a 1 with no trailing zero byte ([0, 97]\n// or [0, 97, 1]). In the latter case we want to scan by the secondary\n// key value, followed by the zero byte, followed by the primary key value\n// and another zero if it is exclusive ([0, 97, 0] or [0, 97, 0, 0]).\n// This explains why we need the Option around start_key.\nexport function encodeIndexScanKey(\n secondary: string,\n primary: string | undefined,\n): string {\n const k = encodeIndexKey([secondary, primary || '']);\n if (primary === undefined) {\n return k.slice(0, k.length - 1);\n }\n return k;\n}\n\n// Decodes an IndexKey encoded by encode_index_key.\nexport function decodeIndexKey(encodedIndexKey: string): IndexKey {\n if (encodedIndexKey[0] !== KEY_VERSION_0) {\n throw new Error('Invalid version');\n }\n\n const versionLen = KEY_VERSION_0.length;\n const separatorLen = KEY_SEPARATOR.length;\n const separatorOffset = encodedIndexKey.indexOf(KEY_SEPARATOR, versionLen);\n if (separatorOffset === -1) {\n throw new Error('Invalid formatting');\n }\n\n const secondary = encodedIndexKey.slice(versionLen, separatorOffset);\n const primary = encodedIndexKey.slice(separatorOffset + separatorLen);\n return [secondary, primary];\n}\n\nexport function evaluateJSONPointer(\n value: FrozenJSONValue,\n pointer: string,\n): FrozenJSONValue | undefined {\n function parseIndex(s: string): number | undefined {\n if (s.startsWith('+') || (s.startsWith('0') && s.length !== 1)) {\n return undefined;\n }\n return parseInt(s, 10);\n }\n\n if (pointer === '') {\n return value;\n }\n if (!pointer.startsWith('/')) {\n throw new Error(`Invalid JSON pointer: ${pointer}`);\n }\n\n const tokens = pointer\n .split('/')\n .slice(1)\n .map(x => x.replace(/~1/g, '/').replace(/~0/g, '~'));\n\n let target = value;\n for (const token of tokens) {\n let targetOpt;\n if (Array.isArray(target)) {\n const i = parseIndex(token);\n if (i === undefined) {\n return undefined;\n }\n targetOpt = target[i];\n } else if (target === null) {\n return undefined;\n } else if (typeof target === 'object') {\n target = target as FrozenJSONObject;\n targetOpt = target[token];\n }\n if (targetOpt === undefined) {\n return undefined;\n }\n target = targetOpt;\n }\n return target;\n}\n", "import type {Enum} from '../../../shared/src/enum.ts';\nimport {BTreeRead} from '../btree/read.ts';\nimport type {Read as DagRead} from '../dag/store.ts';\nimport * as FormatVersion from '../format-version-enum.ts';\nimport type {FrozenJSONValue} from '../frozen-json.ts';\nimport type {Hash} from '../hash.ts';\nimport {\n Commit,\n DEFAULT_HEAD_NAME,\n type Meta,\n commitFromHash,\n commitFromHead,\n} from './commit.ts';\nimport {IndexRead} from './index.ts';\n\ntype FormatVersion = Enum<typeof FormatVersion>;\n\nexport class Read {\n readonly #dagRead: DagRead;\n map: BTreeRead;\n readonly indexes: Map<string, IndexRead>;\n\n constructor(\n dagRead: DagRead,\n map: BTreeRead,\n indexes: Map<string, IndexRead>,\n ) {\n this.#dagRead = dagRead;\n this.map = map;\n this.indexes = indexes;\n }\n\n has(key: string): Promise<boolean> {\n return this.map.has(key);\n }\n\n get(key: string): Promise<FrozenJSONValue | undefined> {\n return this.map.get(key);\n }\n\n isEmpty(): Promise<boolean> {\n return this.map.isEmpty();\n }\n\n getMapForIndex(indexName: string): BTreeRead {\n const idx = this.indexes.get(indexName);\n if (idx === undefined) {\n throw new Error(`Unknown index name: ${indexName}`);\n }\n return idx.map;\n }\n\n get closed(): boolean {\n return this.#dagRead.closed;\n }\n\n close(): void {\n this.#dagRead.release();\n }\n}\n\nexport function readFromDefaultHead(\n dagRead: DagRead,\n formatVersion: FormatVersion,\n): Promise<Read> {\n return readFromHead(DEFAULT_HEAD_NAME, dagRead, formatVersion);\n}\n\nexport async function readFromHead(\n name: string,\n dagRead: DagRead,\n formatVersion: FormatVersion,\n): Promise<Read> {\n const commit = await commitFromHead(name, dagRead);\n return readFromCommit(commit, dagRead, formatVersion);\n}\n\nexport async function readFromHash(\n hash: Hash,\n dagRead: DagRead,\n formatVersion: FormatVersion,\n): Promise<Read> {\n const commit = await commitFromHash(hash, dagRead);\n return readFromCommit(commit, dagRead, formatVersion);\n}\n\nfunction readFromCommit(\n commit: Commit<Meta>,\n dagRead: DagRead,\n formatVersion: FormatVersion,\n): Read {\n const indexes = readIndexesForRead(commit, dagRead, formatVersion);\n const map = new BTreeRead(dagRead, formatVersion, commit.valueHash);\n return new Read(dagRead, map, indexes);\n}\n\nexport function readIndexesForRead(\n commit: Commit<Meta>,\n dagRead: DagRead,\n formatVersion: FormatVersion,\n): Map<string, IndexRead> {\n const m = new Map();\n for (const index of commit.indexes) {\n m.set(\n index.definition.name,\n new IndexRead(\n index,\n new BTreeRead(dagRead, formatVersion, index.valueHash),\n ),\n );\n }\n return m;\n}\n", "export interface Release {\n release(): void;\n}\n\nexport interface Commit {\n commit(): Promise<void>;\n}\n\ninterface ReadStore<Read extends Release> {\n read(): Promise<Read>;\n}\n\ninterface WriteStore<Write extends Release> {\n write(): Promise<Write>;\n}\n\nexport function withRead<Read extends Release, Return>(\n store: ReadStore<Read>,\n fn: (read: Read) => Return | Promise<Return>,\n): Promise<Return> {\n return using(store.read(), fn);\n}\n\nexport function withWriteNoImplicitCommit<Write extends Release, Return>(\n store: WriteStore<Write>,\n fn: (write: Write) => Return | Promise<Return>,\n): Promise<Return> {\n return using(store.write(), fn);\n}\n\nexport function withWrite<Write extends Release & Commit, Return>(\n store: WriteStore<Write>,\n fn: (write: Write) => Return | Promise<Return>,\n): Promise<Return> {\n return using(store.write(), async write => {\n const result = await fn(write);\n await write.commit();\n return result;\n });\n}\n\n/**\n * This function takes a promise for a resource and a function that uses that\n * resource. It will release the resource after the function returns by calling\n * the `release` function\n */\nexport async function using<TX extends Release, Return>(\n x: Promise<TX>,\n fn: (tx: TX) => Return | Promise<Return>,\n): Promise<Return> {\n const write = await x;\n try {\n return await fn(write);\n } finally {\n write.release();\n }\n}\n", "import * as valita from '../../shared/src/valita.ts';\n\n/**\n * The definition of a single index.\n */\nexport type IndexDefinition = {\n /**\n * The prefix, if any, to limit the index over. If not provided the values of\n * all keys are indexed.\n */\n readonly prefix?: string | undefined;\n\n /**\n * A [JSON Pointer](https://tools.ietf.org/html/rfc6901) pointing at the sub\n * value inside each value to index over.\n *\n * For example, one might index over users' ages like so:\n * `{prefix: '/user/', jsonPointer: '/age'}`\n */\n readonly jsonPointer: string;\n\n /**\n * If `true`, indexing empty values will not emit a warning. Defaults to `false`.\n */\n readonly allowEmpty?: boolean | undefined;\n};\n\nexport const indexDefinitionSchema: valita.Type<IndexDefinition> =\n valita.readonlyObject({\n prefix: valita.string().optional(),\n jsonPointer: valita.string(),\n allowEmpty: valita.boolean().optional(),\n });\n\n/**\n * An object as a map defining the indexes. The keys are the index names and the\n * values are the index definitions.\n */\nexport type IndexDefinitions = {readonly [name: string]: IndexDefinition};\n\nexport const indexDefinitionsSchema = valita.readonlyRecord(\n indexDefinitionSchema,\n);\n\nexport function indexDefinitionEqual(\n a: IndexDefinition,\n b: IndexDefinition,\n): boolean {\n return (\n a.jsonPointer === b.jsonPointer &&\n (a.allowEmpty ?? false) === (b.allowEmpty ?? false) &&\n (a.prefix ?? '') === (b.prefix ?? '')\n );\n}\n\nexport function indexDefinitionsEqual(\n a: IndexDefinitions,\n b: IndexDefinitions,\n): boolean {\n if (Object.keys(a).length !== Object.keys(b).length) {\n return false;\n }\n for (const [aKey, aValue] of Object.entries(a)) {\n const bValue = b[aKey];\n if (!bValue || !indexDefinitionEqual(aValue, bValue)) {\n return false;\n }\n }\n return true;\n}\n\nexport function assertIndexDefinitions(\n value: unknown,\n): asserts value is IndexDefinitions {\n valita.assert(value, indexDefinitionsSchema);\n}\n", "import {assert, assertObject} from '../../../shared/src/asserts.ts';\nimport * as valita from '../../../shared/src/valita.ts';\nimport {toRefs} from '../dag/chunk.ts';\nimport type {Read, Write} from '../dag/store.ts';\nimport {deepFreeze, type FrozenJSONValue} from '../frozen-json.ts';\nimport {type Hash, hashSchema} from '../hash.ts';\nimport {indexDefinitionsEqual, indexDefinitionsSchema} from '../index-defs.ts';\nimport type {ClientGroupID} from '../sync/ids.ts';\n\nexport type ClientGroupMap = ReadonlyMap<ClientGroupID, ClientGroup>;\n\nconst clientGroupSchema = valita.readonlyObject({\n /**\n * The hash of the commit in the perdag last persisted to this client group.\n * Should only be updated by clients assigned to this client group.\n */\n headHash: hashSchema,\n\n /**\n * Set of mutator names common to all clients assigned to this client group.\n */\n mutatorNames: valita.readonlyArray(valita.string()),\n\n /**\n * Index definitions common to all clients assigned to this client group.\n */\n indexes: indexDefinitionsSchema,\n\n /**\n * The highest mutation ID of every client assigned to this client group.\n * Should only be updated by clients assigned to this client group. Read by\n * other clients to determine if there are unacknowledged pending mutations\n * for them to try to recover. This is redundant with information in the\n * commit graph at `headHash`, but allows other clients to determine if there\n * are unacknowledged pending mutations without having to load the commit\n * graph.\n */\n mutationIDs: valita.readonlyRecord(valita.number()),\n\n /**\n * The highest lastMutationID received from the server for every client\n * assigned to this client group.\n *\n * Should be updated by the clients assigned to this client group whenever\n * they persist to this client group. Read by other clients to determine if\n * there are unacknowledged pending mutations for them to recover and\n * *updated* by other clients upon successfully recovering pending mutations\n * to avoid redundant pushes of pending mutations.\n *\n * Note: This will be the same as the `lastMutationIDs` of the base snapshot\n * of the client group's commit graph when written by clients assigned to this\n * client group. However, when written by another client recovering mutations\n * it may be different because the other client does not update the commit\n * graph.\n */\n lastServerAckdMutationIDs: valita.record(valita.number()),\n\n /**\n * If the server deletes this client group it can signal that the client group\n * was deleted. If that happens we mark this client group as disabled so that\n * we do not use it again when creating new clients.\n */\n disabled: valita.boolean(),\n});\n\nexport type ClientGroup = valita.Infer<typeof clientGroupSchema>;\n\nexport const CLIENT_GROUPS_HEAD_NAME = 'client-groups';\n\nfunction assertClientGroup(value: unknown): asserts value is ClientGroup {\n valita.assert(value, clientGroupSchema);\n}\n\nfunction chunkDataToClientGroupMap(chunkData: unknown): ClientGroupMap {\n assertObject(chunkData);\n const clientGroups = new Map<ClientGroupID, ClientGroup>();\n for (const [key, value] of Object.entries(chunkData)) {\n if (value !== undefined) {\n assertClientGroup(value);\n clientGroups.set(key, value);\n }\n }\n return clientGroups;\n}\n\nfunction clientGroupMapToChunkData(\n clientGroups: ClientGroupMap,\n dagWrite: Write,\n): FrozenJSONValue {\n const chunkData: {[id: ClientGroupID]: ClientGroup} = {};\n for (const [clientGroupID, clientGroup] of clientGroups.entries()) {\n dagWrite.assertValidHash(clientGroup.headHash);\n chunkData[clientGroupID] = {\n ...clientGroup,\n mutatorNames: [...clientGroup.mutatorNames.values()],\n };\n }\n return deepFreeze(chunkData);\n}\n\nasync function getClientGroupsAtHash(\n hash: Hash,\n dagRead: Read,\n): Promise<ClientGroupMap> {\n const chunk = await dagRead.getChunk(hash);\n return chunkDataToClientGroupMap(chunk?.data);\n}\n\nexport async function getClientGroups(dagRead: Read): Promise<ClientGroupMap> {\n const hash = await dagRead.getHead(CLIENT_GROUPS_HEAD_NAME);\n if (!hash) {\n return new Map();\n }\n return getClientGroupsAtHash(hash, dagRead);\n}\n\nexport async function setClientGroups(\n clientGroups: ClientGroupMap,\n dagWrite: Write,\n): Promise<ClientGroupMap> {\n const currClientGroups = await getClientGroups(dagWrite);\n for (const [clientGroupID, clientGroup] of clientGroups) {\n const currClientGroup = currClientGroups.get(clientGroupID);\n validateClientGroupUpdate(clientGroup, currClientGroup);\n }\n return setValidatedClientGroups(clientGroups, dagWrite);\n}\n\nexport async function setClientGroup(\n clientGroupID: ClientGroupID,\n clientGroup: ClientGroup,\n dagWrite: Write,\n): Promise<ClientGroupMap> {\n const currClientGroups = await getClientGroups(dagWrite);\n const currClientGroup = currClientGroups.get(clientGroupID);\n validateClientGroupUpdate(clientGroup, currClientGroup);\n const newClientGroups = new Map(currClientGroups);\n newClientGroups.set(clientGroupID, clientGroup);\n return setValidatedClientGroups(newClientGroups, dagWrite);\n}\n\nexport async function deleteClientGroup(\n clientGroupID: ClientGroupID,\n dagWrite: Write,\n): Promise<ClientGroupMap> {\n const currClientGroups = await getClientGroups(dagWrite);\n if (!currClientGroups.has(clientGroupID)) {\n return currClientGroups;\n }\n const newClientGroups = new Map(currClientGroups.entries());\n newClientGroups.delete(clientGroupID);\n return setValidatedClientGroups(newClientGroups, dagWrite);\n}\n\nfunction validateClientGroupUpdate(\n clientGroup: ClientGroup,\n currClientGroup: ClientGroup | undefined,\n) {\n const mutatorNamesSet = new Set(clientGroup.mutatorNames);\n assert(\n mutatorNamesSet.size === clientGroup.mutatorNames.length,\n \"A client group's mutatorNames must be a set.\",\n );\n if (currClientGroup !== undefined) {\n assert(\n indexDefinitionsEqual(currClientGroup.indexes, clientGroup.indexes),\n \"A client group's index definitions must never change.\",\n );\n assert(\n mutatorNamesEqual(mutatorNamesSet, currClientGroup.mutatorNames),\n \"A client group's mutatorNames must never change.\",\n );\n }\n}\n\nasync function setValidatedClientGroups(\n clientGroups: ClientGroupMap,\n dagWrite: Write,\n): Promise<ClientGroupMap> {\n const chunkData = clientGroupMapToChunkData(clientGroups, dagWrite);\n const refs: Set<Hash> = new Set();\n for (const clientGroup of clientGroups.values()) {\n refs.add(clientGroup.headHash);\n }\n const chunk = dagWrite.createChunk(chunkData, toRefs(refs));\n await dagWrite.putChunk(chunk);\n await dagWrite.setHead(CLIENT_GROUPS_HEAD_NAME, chunk.hash);\n return clientGroups;\n}\n\nexport function mutatorNamesEqual(\n mutatorNamesSet: ReadonlySet<string>,\n mutatorNames: readonly string[],\n): boolean {\n if (mutatorNames.length !== mutatorNamesSet.size) {\n return false;\n }\n for (const mutatorName of mutatorNames) {\n if (!mutatorNamesSet.has(mutatorName)) {\n return false;\n }\n }\n return true;\n}\n\nexport async function getClientGroup(\n id: ClientGroupID,\n dagRead: Read,\n): Promise<ClientGroup | undefined> {\n const clientGroups = await getClientGroups(dagRead);\n return clientGroups.get(id);\n}\n\nexport function clientGroupHasPendingMutations(clientGroup: ClientGroup) {\n for (const [clientID, mutationID] of Object.entries(\n clientGroup.mutationIDs,\n )) {\n const lastServerAckdMutationID =\n clientGroup.lastServerAckdMutationIDs[clientID];\n if (\n (lastServerAckdMutationID === undefined && mutationID !== 0) ||\n lastServerAckdMutationID < mutationID\n ) {\n return true;\n }\n }\n return false;\n}\n\n/**\n * Marks a client group as disabled. This can happen if the server deletes the\n * client group (servers should not delete clients or client groups but it often\n * happens in practice when developing).\n *\n * A disabled client group prevents pulls and pushes from happening.\n */\nexport async function disableClientGroup(\n clientGroupID: string,\n dagWrite: Write,\n): Promise<void> {\n const clientGroup = await getClientGroup(clientGroupID, dagWrite);\n if (!clientGroup) {\n // No client group matching in the database, so nothing to do.\n return;\n }\n const disabledClientGroup = {\n ...clientGroup,\n disabled: true,\n };\n await setClientGroup(clientGroupID, disabledClientGroup, dagWrite);\n}\n", "export async function asyncIterableToArray<T>(\n it: AsyncIterable<T>,\n): Promise<T[]> {\n const arr: T[] = [];\n for await (const v of it) {\n arr.push(v);\n }\n return arr;\n}\n", "import {asyncIterableToArray} from '../async-iterable-to-array.ts';\nimport type {InternalDiff} from './node.ts';\nimport type {BTreeRead} from './read.ts';\n\nexport function diff(\n oldMap: BTreeRead,\n newMap: BTreeRead,\n): Promise<InternalDiff> {\n // Return an array to ensure we do not compute the diff more than once.\n return asyncIterableToArray(newMap.diff(oldMap));\n}\n", "import {Lock} from '@rocicorp/lock';\nimport {assert} from '../../../shared/src/asserts.ts';\nimport type {Enum} from '../../../shared/src/enum.ts';\nimport type {ReadonlyJSONValue} from '../../../shared/src/json.ts';\nimport {type Chunk, type CreateChunk, toRefs} from '../dag/chunk.ts';\nimport type {Write} from '../dag/store.ts';\nimport * as FormatVersion from '../format-version-enum.ts';\nimport type {FrozenJSONValue} from '../frozen-json.ts';\nimport {type Hash, emptyHash, newRandomHash} from '../hash.ts';\nimport {getSizeOfEntry} from '../size-of-value.ts';\nimport {\n DataNodeImpl,\n type Entry,\n InternalNodeImpl,\n createNewInternalEntryForNode,\n emptyDataNode,\n isDataNodeImpl,\n newNodeImpl,\n partition,\n toChunkData,\n} from './node.ts';\nimport {BTreeRead} from './read.ts';\n\ntype FormatVersion = Enum<typeof FormatVersion>;\n\nexport class BTreeWrite extends BTreeRead {\n /**\n * This rw lock is used to ensure we do not mutate the btree in parallel. It\n * would be a problem if we didn't have the lock in cases like this:\n *\n * ```ts\n * const p1 = tree.put('a', 0);\n * const p2 = tree.put('b', 1);\n * await p1;\n * await p2;\n * ```\n *\n * because both `p1` and `p2` would start from the old root hash but a put\n * changes the root hash so the two concurrent puts would lead to only one of\n * them actually working, and it is not deterministic which one would finish\n * last.\n */\n readonly #lock = new Lock();\n readonly #modified: Map<Hash, DataNodeImpl | InternalNodeImpl> = new Map();\n\n declare protected _dagRead: Write;\n\n readonly minSize: number;\n readonly maxSize: number;\n\n constructor(\n dagWrite: Write,\n formatVersion: FormatVersion,\n root: Hash = emptyHash,\n minSize = 8 * 1024,\n maxSize = 16 * 1024,\n getEntrySize: <K, V>(k: K, v: V) => number = getSizeOfEntry,\n chunkHeaderSize?: number,\n ) {\n super(dagWrite, formatVersion, root, getEntrySize, chunkHeaderSize);\n\n this.minSize = minSize;\n this.maxSize = maxSize;\n }\n\n #addToModified(node: DataNodeImpl | InternalNodeImpl): void {\n assert(node.isMutable);\n this.#modified.set(node.hash, node);\n this._cache.set(node.hash, node);\n }\n\n updateNode(node: DataNodeImpl | InternalNodeImpl): void {\n assert(node.isMutable);\n this.#modified.delete(node.hash);\n node.hash = newRandomHash();\n this.#addToModified(node);\n }\n\n newInternalNodeImpl(\n entries: Array<Entry<Hash>>,\n level: number,\n ): InternalNodeImpl {\n const n = new InternalNodeImpl(entries, newRandomHash(), level, true);\n this.#addToModified(n);\n return n;\n }\n\n newDataNodeImpl(entries: Entry<FrozenJSONValue>[]): DataNodeImpl {\n const n = new DataNodeImpl(entries, newRandomHash(), true);\n this.#addToModified(n);\n return n;\n }\n\n newNodeImpl(entries: Entry<FrozenJSONValue>[], level: number): DataNodeImpl;\n newNodeImpl(entries: Entry<Hash>[], level: number): InternalNodeImpl;\n newNodeImpl(\n entries: Entry<Hash>[] | Entry<FrozenJSONValue>[],\n level: number,\n ): InternalNodeImpl | DataNodeImpl;\n newNodeImpl(\n entries: Entry<Hash>[] | Entry<FrozenJSONValue>[],\n level: number,\n ): InternalNodeImpl | DataNodeImpl {\n const n = newNodeImpl(entries, newRandomHash(), level, true);\n this.#addToModified(n);\n return n;\n }\n\n put(key: string, value: FrozenJSONValue): Promise<void> {\n return this.#lock.withLock(async () => {\n const oldRootNode = await this.getNode(this.rootHash);\n const entrySize = this.getEntrySize(key, value);\n const rootNode = await oldRootNode.set(key, value, entrySize, this);\n\n // We do the rebalancing in the parent so we need to do it here as well.\n if (rootNode.getChildNodeSize(this) > this.maxSize) {\n const headerSize = this.chunkHeaderSize;\n const partitions = partition(\n rootNode.entries,\n value => value[2],\n this.minSize - headerSize,\n this.maxSize - headerSize,\n );\n const {level} = rootNode;\n const entries: Entry<Hash>[] = partitions.map(entries => {\n const node = this.newNodeImpl(entries, level);\n return createNewInternalEntryForNode(node, this.getEntrySize);\n });\n const newRoot = this.newInternalNodeImpl(entries, level + 1);\n this.rootHash = newRoot.hash;\n return;\n }\n\n this.rootHash = rootNode.hash;\n });\n }\n\n del(key: string): Promise<boolean> {\n return this.#lock.withLock(async () => {\n const oldRootNode = await this.getNode(this.rootHash);\n const newRootNode = await oldRootNode.del(key, this);\n\n // No need to rebalance here since if root gets too small there is nothing\n // we can do about that.\n const found = this.rootHash !== newRootNode.hash;\n if (found) {\n // Flatten one layer.\n if (newRootNode.level > 0 && newRootNode.entries.length === 1) {\n this.rootHash = (newRootNode as InternalNodeImpl).entries[0][1];\n } else {\n this.rootHash = newRootNode.hash;\n }\n }\n\n return found;\n });\n }\n\n clear(): Promise<void> {\n return this.#lock.withLock(() => {\n this.#modified.clear();\n this.rootHash = emptyHash;\n });\n }\n\n flush(): Promise<Hash> {\n return this.#lock.withLock(async () => {\n const dagWrite = this._dagRead;\n\n if (this.rootHash === emptyHash) {\n // Write a chunk for the empty tree.\n const chunk = dagWrite.createChunk(emptyDataNode, []);\n await dagWrite.putChunk(chunk as Chunk<ReadonlyJSONValue>);\n return chunk.hash;\n }\n\n const newChunks: Chunk[] = [];\n const newRoot = gatherNewChunks(\n this.rootHash,\n newChunks,\n dagWrite.createChunk,\n this.#modified,\n this._formatVersion,\n );\n await Promise.all(newChunks.map(chunk => dagWrite.putChunk(chunk)));\n this.#modified.clear();\n this.rootHash = newRoot;\n return newRoot;\n });\n }\n}\n\nfunction gatherNewChunks(\n hash: Hash,\n newChunks: Chunk[],\n createChunk: CreateChunk,\n modified: Map<Hash, DataNodeImpl | InternalNodeImpl>,\n formatVersion: FormatVersion,\n): Hash {\n const node = modified.get(hash);\n if (node === undefined) {\n // Not modified, use the original.\n return hash;\n }\n\n if (isDataNodeImpl(node)) {\n const chunk = createChunk(toChunkData(node, formatVersion), []);\n newChunks.push(chunk);\n return chunk.hash;\n }\n\n // The BTree cannot have duplicate keys so the child entry hashes are unique.\n // No need fot a set to dedupe here.\n const refs: Hash[] = [];\n const {entries} = node;\n for (let i = 0; i < entries.length; i++) {\n const entry = entries[i];\n const childHash = entry[1];\n const newChildHash = gatherNewChunks(\n childHash,\n newChunks,\n createChunk,\n modified,\n formatVersion,\n );\n if (newChildHash !== childHash) {\n // MUTATES the entries!\n // Hashes do not change the size of the entry because all hashes have the same length\n entries[i] = [entry[0], newChildHash, entry[2]];\n }\n refs.push(newChildHash);\n }\n const chunk = createChunk(toChunkData(node, formatVersion), toRefs(refs));\n newChunks.push(chunk);\n return chunk.hash;\n}\n", "export function lazy<T>(factory: () => T): () => T {\n let value: T | undefined;\n return () => {\n if (value === undefined) {\n value = factory();\n }\n return value;\n };\n}\n", "import {assert} from '../../../shared/src/asserts.ts';\nimport type {Enum} from '../../../shared/src/enum.ts';\nimport {diff as btreeDiff} from '../btree/diff.ts';\nimport type {InternalDiff} from '../btree/node.ts';\nimport {allEntriesAsDiff, BTreeRead} from '../btree/read.ts';\nimport type {Read} from '../dag/store.ts';\nimport {Commit, commitFromHash, type Meta} from '../db/commit.ts';\nimport {readIndexesForRead} from '../db/read.ts';\nimport * as FormatVersion from '../format-version-enum.ts';\nimport type {Hash} from '../hash.ts';\n\ntype FormatVersion = Enum<typeof FormatVersion>;\n\n/**\n * Interface allowing different diff functions to skip costly diff computations.\n */\nexport interface DiffComputationConfig {\n shouldComputeDiffs(): boolean;\n shouldComputeDiffsForIndex(name: string): boolean;\n}\n\n/**\n * The diffs in different indexes. The key of the map is the index name.\n * \"\" is used for the primary index.\n */\nexport class DiffsMap extends Map<string, InternalDiff> {\n override set(key: string, value: InternalDiff): this {\n if (value.length === 0) {\n return this;\n }\n return super.set(key, value);\n }\n}\n\n/**\n * Diffs the state of the db at two different hashes.\n * It will include the primary indexes as well as all the secondary indexes.\n */\nexport async function diff(\n oldHash: Hash,\n newHash: Hash,\n read: Read,\n diffConfig: DiffComputationConfig,\n formatVersion: FormatVersion,\n): Promise<DiffsMap> {\n const [oldCommit, newCommit] = await Promise.all([\n commitFromHash(oldHash, read),\n commitFromHash(newHash, read),\n ]);\n\n return diffCommits(oldCommit, newCommit, read, diffConfig, formatVersion);\n}\n\n/**\n * Diffs the state of the db at two different commits.\n * It will include the primary indexes as well as all the secondary indexes.\n */\n// TODO: this should probably move to db/\nexport async function diffCommits(\n oldCommit: Commit<Meta>,\n newCommit: Commit<Meta>,\n read: Read,\n diffConfig: DiffComputationConfig,\n formatVersion: FormatVersion,\n): Promise<DiffsMap> {\n const diffsMap = new DiffsMap();\n if (!diffConfig.shouldComputeDiffs()) {\n return diffsMap;\n }\n\n const oldMap = new BTreeRead(read, formatVersion, oldCommit.valueHash);\n const newMap = new BTreeRead(read, formatVersion, newCommit.valueHash);\n const valueDiff = await btreeDiff(oldMap, newMap);\n diffsMap.set('', valueDiff);\n\n await addDiffsForIndexes(\n oldCommit,\n newCommit,\n read,\n diffsMap,\n diffConfig,\n formatVersion,\n );\n\n return diffsMap;\n}\n\nexport async function addDiffsForIndexes(\n mainCommit: Commit<Meta>,\n syncCommit: Commit<Meta>,\n read: Read,\n diffsMap: DiffsMap,\n diffConfig: DiffComputationConfig,\n formatVersion: FormatVersion,\n) {\n const oldIndexes = readIndexesForRead(mainCommit, read, formatVersion);\n const newIndexes = readIndexesForRead(syncCommit, read, formatVersion);\n\n for (const [oldIndexName, oldIndex] of oldIndexes) {\n if (!diffConfig.shouldComputeDiffsForIndex(oldIndexName)) {\n continue;\n }\n\n const newIndex = newIndexes.get(oldIndexName);\n if (newIndex !== undefined) {\n assert(newIndex !== oldIndex);\n const diffs = await btreeDiff(oldIndex.map, newIndex.map);\n newIndexes.delete(oldIndexName);\n diffsMap.set(oldIndexName, diffs);\n } else {\n // old index name is not in the new indexes. All entries removed!\n const diffs = await allEntriesAsDiff(oldIndex.map, 'del');\n diffsMap.set(oldIndexName, diffs);\n }\n }\n\n for (const [newIndexName, newIndex] of newIndexes) {\n if (!diffConfig.shouldComputeDiffsForIndex(newIndexName)) {\n continue;\n }\n // new index name is not in the old indexes. All keys added!\n const diffs = await allEntriesAsDiff(newIndex.map, 'add');\n diffsMap.set(newIndexName, diffs);\n }\n}\n", "import type {LogContext} from '@rocicorp/logger';\nimport {assert} from '../../../shared/src/asserts.ts';\nimport type {Enum} from '../../../shared/src/enum.ts';\nimport {diff} from '../btree/diff.ts';\nimport type {InternalDiff} from '../btree/node.ts';\nimport {BTreeRead, allEntriesAsDiff} from '../btree/read.ts';\nimport {BTreeWrite} from '../btree/write.ts';\nimport type {FrozenCookie} from '../cookies.ts';\nimport type {Write as DagWrite} from '../dag/store.ts';\nimport * as FormatVersion from '../format-version-enum.ts';\nimport type {FrozenJSONValue} from '../frozen-json.ts';\nimport {type Hash, emptyHash} from '../hash.ts';\nimport {lazy} from '../lazy.ts';\nimport type {DiffComputationConfig} from '../sync/diff.ts';\nimport {DiffsMap} from '../sync/diff.ts';\nimport type {ClientID} from '../sync/ids.ts';\nimport {\n Commit,\n type Meta as CommitMeta,\n type IndexRecord,\n type Meta,\n baseSnapshotHashFromHash,\n commitFromHash,\n newLocalDD31 as commitNewLocalDD31,\n newSnapshotDD31 as commitNewSnapshotDD31,\n getMutationID,\n} from './commit.ts';\nimport * as IndexOperation from './index-operation-enum.ts';\nimport {IndexRead, IndexWrite, indexValue} from './index.ts';\nimport * as MetaType from './meta-type-enum.ts';\nimport {Read, readIndexesForRead} from './read.ts';\n\ntype FormatVersion = Enum<typeof FormatVersion>;\n\nexport class Write extends Read {\n readonly #dagWrite: DagWrite;\n readonly #basis: Commit<CommitMeta> | undefined;\n readonly #meta: CommitMeta;\n\n declare map: BTreeWrite;\n\n declare readonly indexes: Map<string, IndexWrite>;\n readonly #clientID: ClientID;\n readonly #formatVersion: FormatVersion;\n\n constructor(\n dagWrite: DagWrite,\n map: BTreeWrite,\n basis: Commit<CommitMeta> | undefined,\n meta: CommitMeta,\n indexes: Map<string, IndexWrite>,\n clientID: ClientID,\n formatVersion: FormatVersion,\n ) {\n // TypeScript has trouble\n super(dagWrite, map, indexes);\n this.#dagWrite = dagWrite;\n this.#basis = basis;\n this.#meta = meta;\n this.#clientID = clientID;\n this.#formatVersion = formatVersion;\n\n // TODO(arv): if (DEBUG) { ...\n if (basis === undefined) {\n assert(meta.basisHash === emptyHash);\n } else {\n assert(meta.basisHash === basis.chunk.hash);\n }\n }\n\n /**\n * The value needs to be frozen since it is kept in memory and used later for\n * comparison as well as returned in `get`.\n */\n async put(\n lc: LogContext,\n key: string,\n value: FrozenJSONValue,\n ): Promise<void> {\n const oldVal = lazy(() => this.map.get(key));\n await updateIndexes(lc, this.indexes, key, oldVal, value);\n\n await this.map.put(key, value);\n }\n\n getMutationID(): Promise<number> {\n return getMutationID(this.#clientID, this.#dagWrite, this.#meta);\n }\n\n async del(lc: LogContext, key: string): Promise<boolean> {\n // TODO(arv): This does the binary search twice. We can do better.\n const oldVal = lazy(() => this.map.get(key));\n if (oldVal !== undefined) {\n await updateIndexes(lc, this.indexes, key, oldVal, undefined);\n }\n return this.map.del(key);\n }\n\n async clear(): Promise<void> {\n await this.map.clear();\n const ps = [];\n for (const idx of this.indexes.values()) {\n ps.push(idx.clear());\n }\n await Promise.all(ps);\n }\n\n async putCommit(): Promise<Commit<CommitMeta>> {\n const valueHash = await this.map.flush();\n const indexRecords: IndexRecord[] = [];\n\n for (const index of this.indexes.values()) {\n const valueHash = await index.flush();\n const indexRecord: IndexRecord = {\n definition: index.meta.definition,\n valueHash,\n };\n indexRecords.push(indexRecord);\n }\n\n let commit: Commit<Meta>;\n const meta = this.#meta;\n switch (meta.type) {\n case MetaType.LocalDD31: {\n assert(this.#formatVersion >= FormatVersion.DD31);\n const {\n basisHash,\n mutationID,\n mutatorName,\n mutatorArgsJSON,\n originalHash,\n timestamp,\n } = meta;\n commit = commitNewLocalDD31(\n this.#dagWrite.createChunk,\n basisHash,\n await baseSnapshotHashFromHash(basisHash, this.#dagWrite),\n mutationID,\n mutatorName,\n mutatorArgsJSON,\n originalHash,\n valueHash,\n indexRecords,\n timestamp,\n this.#clientID,\n );\n break;\n }\n\n case MetaType.SnapshotDD31: {\n assert(this.#formatVersion > FormatVersion.DD31);\n const {basisHash, lastMutationIDs, cookieJSON} = meta;\n commit = commitNewSnapshotDD31(\n this.#dagWrite.createChunk,\n basisHash,\n lastMutationIDs,\n cookieJSON,\n valueHash,\n indexRecords,\n );\n break;\n }\n }\n await this.#dagWrite.putChunk(commit.chunk);\n return commit;\n }\n\n // Return value is the hash of the new commit.\n async commit(headName: string): Promise<Hash> {\n const commit = await this.putCommit();\n const commitHash = commit.chunk.hash;\n await this.#dagWrite.setHead(headName, commitHash);\n await this.#dagWrite.commit();\n return commitHash;\n }\n\n async commitWithDiffs(\n headName: string,\n diffConfig: DiffComputationConfig,\n ): Promise<[Hash, DiffsMap]> {\n const commit = this.putCommit();\n const diffMap = await this.#generateDiffs(diffConfig);\n const commitHash = (await commit).chunk.hash;\n await this.#dagWrite.setHead(headName, commitHash);\n await this.#dagWrite.commit();\n return [commitHash, diffMap];\n }\n\n async #generateDiffs(diffConfig: DiffComputationConfig): Promise<DiffsMap> {\n const diffsMap = new DiffsMap();\n if (!diffConfig.shouldComputeDiffs()) {\n return diffsMap;\n }\n\n let valueDiff: InternalDiff = [];\n if (this.#basis) {\n const basisMap = new BTreeRead(\n this.#dagWrite,\n this.#formatVersion,\n this.#basis.valueHash,\n );\n valueDiff = await diff(basisMap, this.map);\n }\n diffsMap.set('', valueDiff);\n let basisIndexes: Map<string, IndexRead>;\n if (this.#basis) {\n basisIndexes = readIndexesForRead(\n this.#basis,\n this.#dagWrite,\n this.#formatVersion,\n );\n } else {\n basisIndexes = new Map();\n }\n\n for (const [name, index] of this.indexes) {\n if (!diffConfig.shouldComputeDiffsForIndex(name)) {\n continue;\n }\n const basisIndex = basisIndexes.get(name);\n assert(index !== basisIndex);\n\n const indexDiffResult = await (basisIndex\n ? diff(basisIndex.map, index.map)\n : // No basis. All keys are new.\n allEntriesAsDiff(index.map, 'add'));\n diffsMap.set(name, indexDiffResult);\n }\n\n // Handle indexes in basisIndex but not in this.indexes. All keys are\n // deleted.\n for (const [name, basisIndex] of basisIndexes) {\n if (\n !this.indexes.has(name) &&\n diffConfig.shouldComputeDiffsForIndex(name)\n ) {\n const indexDiffResult = await allEntriesAsDiff(basisIndex.map, 'del');\n diffsMap.set(name, indexDiffResult);\n }\n }\n return diffsMap;\n }\n\n close(): void {\n this.#dagWrite.release();\n }\n}\n\nexport async function newWriteLocal(\n basisHash: Hash,\n mutatorName: string,\n mutatorArgsJSON: FrozenJSONValue,\n originalHash: Hash | null,\n dagWrite: DagWrite,\n timestamp: number,\n clientID: ClientID,\n formatVersion: FormatVersion,\n): Promise<Write> {\n const basis = await commitFromHash(basisHash, dagWrite);\n const bTreeWrite = new BTreeWrite(dagWrite, formatVersion, basis.valueHash);\n const mutationID = await basis.getNextMutationID(clientID, dagWrite);\n const indexes = readIndexesForWrite(basis, dagWrite, formatVersion);\n assert(formatVersion >= FormatVersion.DD31);\n return new Write(\n dagWrite,\n bTreeWrite,\n basis,\n\n {\n type: MetaType.LocalDD31,\n basisHash,\n baseSnapshotHash: await baseSnapshotHashFromHash(basisHash, dagWrite),\n mutatorName,\n mutatorArgsJSON,\n mutationID,\n originalHash,\n timestamp,\n clientID,\n },\n indexes,\n clientID,\n formatVersion,\n );\n}\n\nexport async function newWriteSnapshotDD31(\n basisHash: Hash,\n lastMutationIDs: Record<ClientID, number>,\n cookieJSON: FrozenCookie,\n dagWrite: DagWrite,\n clientID: ClientID,\n formatVersion: FormatVersion,\n): Promise<Write> {\n const basis = await commitFromHash(basisHash, dagWrite);\n const bTreeWrite = new BTreeWrite(dagWrite, formatVersion, basis.valueHash);\n return new Write(\n dagWrite,\n bTreeWrite,\n basis,\n {basisHash, type: MetaType.SnapshotDD31, lastMutationIDs, cookieJSON},\n readIndexesForWrite(basis, dagWrite, formatVersion),\n clientID,\n formatVersion,\n );\n}\n\nexport async function updateIndexes(\n lc: LogContext,\n indexes: Map<string, IndexWrite>,\n key: string,\n oldValGetter: () => Promise<FrozenJSONValue | undefined>,\n newVal: FrozenJSONValue | undefined,\n): Promise<void> {\n const ps: Promise<void>[] = [];\n for (const idx of indexes.values()) {\n const {keyPrefix} = idx.meta.definition;\n if (!keyPrefix || key.startsWith(keyPrefix)) {\n const oldVal = await oldValGetter();\n if (oldVal !== undefined) {\n ps.push(\n indexValue(\n lc,\n idx.map,\n IndexOperation.Remove,\n key,\n oldVal,\n idx.meta.definition.jsonPointer,\n idx.meta.definition.allowEmpty ?? false,\n ),\n );\n }\n if (newVal !== undefined) {\n ps.push(\n indexValue(\n lc,\n idx.map,\n IndexOperation.Add,\n key,\n newVal,\n idx.meta.definition.jsonPointer,\n idx.meta.definition.allowEmpty ?? false,\n ),\n );\n }\n }\n }\n await Promise.all(ps);\n}\n\nexport function readIndexesForWrite(\n commit: Commit<CommitMeta>,\n dagWrite: DagWrite,\n formatVersion: FormatVersion,\n): Map<string, IndexWrite> {\n const m = new Map();\n for (const index of commit.indexes) {\n m.set(\n index.definition.name,\n new IndexWrite(\n index,\n new BTreeWrite(dagWrite, formatVersion, index.valueHash),\n ),\n );\n }\n return m;\n}\n\nexport async function createIndexBTree(\n lc: LogContext,\n dagWrite: DagWrite,\n valueMap: BTreeRead,\n prefix: string,\n jsonPointer: string,\n allowEmpty: boolean,\n formatVersion: FormatVersion,\n): Promise<BTreeWrite> {\n const indexMap = new BTreeWrite(dagWrite, formatVersion);\n for await (const entry of valueMap.scan(prefix)) {\n const key = entry[0];\n if (!key.startsWith(prefix)) {\n break;\n }\n await indexValue(\n lc,\n indexMap,\n IndexOperation.Add,\n key,\n entry[1],\n jsonPointer,\n allowEmpty,\n );\n }\n return indexMap;\n}\n", "import * as valita from '../../../shared/src/valita.ts';\n\n/**\n * The ID describing a group of clients. All clients in the same group share a\n * persistent storage (IDB).\n */\nexport type ClientGroupID = string;\n\nexport const clientGroupIDSchema: valita.Type<ClientGroupID> = valita.string();\n\n/**\n * The ID describing a client.\n */\nexport type ClientID = string;\n\nexport const clientIDSchema: valita.Type<ClientID> = valita.string();\n", "import {randomUint64} from '../../../shared/src/random-uint64.ts';\n\n/**\n * Returns a random 18 character string encoded in base32 suitable as a client\n * ID.\n */\nexport function makeClientID(): string {\n const length = 18;\n const high = randomUint64();\n const low = randomUint64();\n const combined = (high << 64n) | low;\n return combined.toString(32).slice(-length).padStart(length, '0');\n}\n", "import type {LogContext} from '@rocicorp/logger';\nimport {assert, assertObject} from '../../../shared/src/asserts.ts';\nimport type {Enum} from '../../../shared/src/enum.ts';\nimport {hasOwn} from '../../../shared/src/has-own.ts';\nimport * as valita from '../../../shared/src/valita.ts';\nimport {emptyDataNode} from '../btree/node.ts';\nimport {BTreeRead} from '../btree/read.ts';\nimport {type FrozenCookie, compareCookies} from '../cookies.ts';\nimport {type Refs, toRefs} from '../dag/chunk.ts';\nimport type {Read, Store, Write} from '../dag/store.ts';\nimport {\n type ChunkIndexDefinition,\n Commit,\n type IndexRecord,\n type SnapshotMetaDD31,\n assertSnapshotCommitDD31,\n baseSnapshotFromHash,\n chunkIndexDefinitionEqualIgnoreName,\n getRefs,\n newSnapshotCommitDataDD31,\n toChunkIndexDefinition,\n} from '../db/commit.ts';\nimport {createIndexBTree} from '../db/write.ts';\nimport type {DeletedClients} from '../deleted-clients.ts';\nimport * as FormatVersion from '../format-version-enum.ts';\nimport {type FrozenJSONValue, deepFreeze} from '../frozen-json.ts';\nimport {type Hash, hashSchema} from '../hash.ts';\nimport {type IndexDefinitions, indexDefinitionsEqual} from '../index-defs.ts';\nimport {\n type ClientGroupID,\n type ClientID,\n clientGroupIDSchema,\n} from '../sync/ids.ts';\nimport {withWrite} from '../with-transactions.ts';\nimport {\n type ClientGroup,\n getClientGroup,\n getClientGroups,\n mutatorNamesEqual,\n setClientGroup,\n} from './client-groups.ts';\nimport {makeClientID} from './make-client-id.ts';\n\ntype FormatVersion = Enum<typeof FormatVersion>;\n\nexport type ClientMap = ReadonlyMap<ClientID, ClientV5 | ClientV6>;\n\nconst clientV5Schema = valita.readonlyObject({\n heartbeatTimestampMs: valita.number(),\n\n headHash: hashSchema,\n\n /**\n * The hash of a commit we are in the middle of refreshing into this client's\n * memdag.\n */\n tempRefreshHash: hashSchema.nullable(),\n\n /**\n * ID of this client's perdag client group. This needs to be sent in pull\n * request (to enable syncing all last mutation ids in the client group).\n */\n clientGroupID: clientGroupIDSchema,\n});\n\nexport type ClientV5 = valita.Infer<typeof clientV5Schema>;\n\nconst clientV6Schema = valita.readonlyObject({\n heartbeatTimestampMs: valita.number(),\n\n /**\n * A set of hashes, which contains:\n * 1. The hash of the last commit this client refreshed from its client group\n * (this is the commit it bootstrapped from until it completes its first\n * refresh).\n * 2. One or more hashes that were added to retain chunks of a commit while it\n * was being refreshed into this client's memdag. (This can be one or more\n * because refresh's cleanup step is a separate transaction and can fail).\n * Upon refresh completing and successfully running its clean up step, this\n * set will contain a single hash: the hash of the last commit this client\n * refreshed.\n */\n refreshHashes: valita.readonlyArray(hashSchema),\n\n /**\n * The hash of the last snapshot commit persisted by this client to this\n * client's client group, or null if has never persisted a snapshot.\n */\n persistHash: hashSchema.nullable(),\n\n /**\n * ID of this client's perdag client group. This needs to be sent in pull\n * request (to enable syncing all last mutation ids in the client group).\n */\n clientGroupID: clientGroupIDSchema,\n});\n\nexport type ClientV6 = valita.Infer<typeof clientV6Schema>;\n\nexport type Client = ClientV5 | ClientV6;\n\nfunction isClientV6(client: Client): client is ClientV6 {\n return (client as ClientV6).refreshHashes !== undefined;\n}\n\nexport const CLIENTS_HEAD_NAME = 'clients';\n\nconst clientSchema = valita.union(clientV5Schema, clientV6Schema);\n\nfunction assertClient(value: unknown): asserts value is Client {\n valita.assert(value, clientSchema);\n}\n\nexport function assertClientV6(value: unknown): asserts value is ClientV6 {\n valita.assert(value, clientV6Schema);\n}\n\nfunction chunkDataToClientMap(chunkData: unknown): ClientMap {\n assertObject(chunkData);\n const clients = new Map();\n for (const key in chunkData) {\n if (hasOwn(chunkData, key)) {\n const value = chunkData[key];\n if (value !== undefined) {\n assertClient(value);\n clients.set(key, value);\n }\n }\n }\n return clients;\n}\n\nfunction clientMapToChunkData(\n clients: ClientMap,\n dagWrite: Write,\n): FrozenJSONValue {\n for (const client of clients.values()) {\n if (isClientV6(client)) {\n client.refreshHashes.forEach(dagWrite.assertValidHash);\n if (client.persistHash) {\n dagWrite.assertValidHash(client.persistHash);\n }\n } else {\n dagWrite.assertValidHash(client.headHash);\n if (client.tempRefreshHash) {\n dagWrite.assertValidHash(client.tempRefreshHash);\n }\n }\n }\n return deepFreeze(Object.fromEntries(clients));\n}\n\nexport async function getClients(dagRead: Read): Promise<ClientMap> {\n const hash = await dagRead.getHead(CLIENTS_HEAD_NAME);\n return getClientsAtHash(hash, dagRead);\n}\n\nasync function getClientsAtHash(\n hash: Hash | undefined,\n dagRead: Read,\n): Promise<ClientMap> {\n if (!hash) {\n return new Map();\n }\n const chunk = await dagRead.getChunk(hash);\n return chunkDataToClientMap(chunk?.data);\n}\n\n/**\n * Used to signal that a client does not exist. Maybe it was garbage collected?\n */\nexport class ClientStateNotFoundError extends Error {\n name = 'ClientStateNotFoundError';\n readonly id: string;\n constructor(id: ClientID) {\n super(`Client state not found, id: ${id}`);\n this.id = id;\n }\n}\n\n/**\n * Throws a `ClientStateNotFoundError` if the client does not exist.\n */\nexport async function assertHasClientState(\n id: ClientID,\n dagRead: Read,\n): Promise<void> {\n if (!(await hasClientState(id, dagRead))) {\n throw new ClientStateNotFoundError(id);\n }\n}\n\nexport async function hasClientState(\n id: ClientID,\n dagRead: Read,\n): Promise<boolean> {\n return !!(await getClient(id, dagRead));\n}\n\nexport async function getClient(\n id: ClientID,\n dagRead: Read,\n): Promise<Client | undefined> {\n const clients = await getClients(dagRead);\n return clients.get(id);\n}\n\nexport async function mustGetClient(\n id: ClientID,\n dagRead: Read,\n): Promise<Client> {\n const client = await getClient(id, dagRead);\n if (!client) {\n throw new ClientStateNotFoundError(id);\n }\n return client;\n}\n\ntype InitClientV6Result = [\n client: ClientV6,\n hash: Hash,\n clientMap: ClientMap,\n newClientGroup: boolean,\n];\n\nexport function initClientV6(\n newClientID: ClientID,\n lc: LogContext,\n perdag: Store,\n mutatorNames: string[],\n indexes: IndexDefinitions,\n formatVersion: FormatVersion,\n enableClientGroupForking: boolean,\n): Promise<InitClientV6Result> {\n return withWrite(perdag, async dagWrite => {\n async function setClientsAndClientGroupAndCommit(\n basisHash: Hash | null,\n cookieJSON: FrozenCookie,\n valueHash: Hash,\n indexRecords: readonly IndexRecord[],\n ): Promise<InitClientV6Result> {\n const newSnapshotData = newSnapshotCommitDataDD31(\n basisHash,\n {},\n cookieJSON,\n valueHash,\n indexRecords,\n );\n const chunk = dagWrite.createChunk(\n newSnapshotData,\n getRefs(newSnapshotData),\n );\n\n const newClientGroupID = makeClientID();\n\n const newClient: ClientV6 = {\n heartbeatTimestampMs: Date.now(),\n refreshHashes: [chunk.hash],\n persistHash: null,\n clientGroupID: newClientGroupID,\n };\n\n const newClients = new Map(clients).set(newClientID, newClient);\n\n const clientGroup: ClientGroup = {\n headHash: chunk.hash,\n mutatorNames,\n indexes,\n mutationIDs: {},\n lastServerAckdMutationIDs: {},\n disabled: false,\n };\n\n await Promise.all([\n dagWrite.putChunk(chunk),\n setClients(newClients, dagWrite),\n setClientGroup(newClientGroupID, clientGroup, dagWrite),\n ]);\n\n return [newClient, chunk.hash, newClients, true];\n }\n\n const clients = await getClients(dagWrite);\n\n const res = await findMatchingClient(dagWrite, mutatorNames, indexes);\n if (res.type === FIND_MATCHING_CLIENT_TYPE_HEAD) {\n // We found a client group with matching mutators and indexes. We can\n // reuse it.\n const {clientGroupID, headHash} = res;\n\n const newClient: ClientV6 = {\n clientGroupID,\n refreshHashes: [headHash],\n heartbeatTimestampMs: Date.now(),\n persistHash: null,\n };\n const newClients = new Map(clients).set(newClientID, newClient);\n await setClients(newClients, dagWrite);\n\n return [newClient, headHash, newClients, false];\n }\n\n if (\n !enableClientGroupForking ||\n res.type === FIND_MATCHING_CLIENT_TYPE_NEW\n ) {\n // No client group to fork from. Create empty snapshot.\n const emptyBTreeChunk = dagWrite.createChunk(emptyDataNode, []);\n await dagWrite.putChunk(emptyBTreeChunk);\n\n // Create indexes\n const indexRecords: IndexRecord[] = [];\n\n // At this point the value of replicache is the empty tree so all index\n // maps will also be the empty tree.\n for (const [name, indexDefinition] of Object.entries(indexes)) {\n const chunkIndexDefinition = toChunkIndexDefinition(\n name,\n indexDefinition,\n );\n indexRecords.push({\n definition: chunkIndexDefinition,\n valueHash: emptyBTreeChunk.hash,\n });\n }\n\n return setClientsAndClientGroupAndCommit(\n null,\n null,\n emptyBTreeChunk.hash,\n indexRecords,\n );\n }\n\n // Now we create a new client and client group that we fork from the found\n // snapshot.\n assert(res.type === FIND_MATCHING_CLIENT_TYPE_FORK);\n\n const {snapshot} = res;\n\n // Create indexes\n const indexRecords: IndexRecord[] = [];\n const {valueHash, indexes: oldIndexes} = snapshot;\n const map = new BTreeRead(dagWrite, formatVersion, valueHash);\n\n for (const [name, indexDefinition] of Object.entries(indexes)) {\n const {prefix = '', jsonPointer, allowEmpty = false} = indexDefinition;\n const chunkIndexDefinition: ChunkIndexDefinition = {\n name,\n keyPrefix: prefix,\n jsonPointer,\n allowEmpty,\n };\n\n const oldIndex = findMatchingOldIndex(oldIndexes, chunkIndexDefinition);\n if (oldIndex) {\n indexRecords.push({\n definition: chunkIndexDefinition,\n valueHash: oldIndex.valueHash,\n });\n } else {\n const indexBTree = await createIndexBTree(\n lc,\n dagWrite,\n map,\n prefix,\n jsonPointer,\n allowEmpty,\n formatVersion,\n );\n indexRecords.push({\n definition: chunkIndexDefinition,\n valueHash: await indexBTree.flush(),\n });\n }\n }\n\n return setClientsAndClientGroupAndCommit(\n snapshot.meta.basisHash,\n snapshot.meta.cookieJSON,\n snapshot.valueHash,\n indexRecords,\n );\n });\n}\n\nfunction findMatchingOldIndex(\n oldIndexes: readonly IndexRecord[],\n chunkIndexDefinition: ChunkIndexDefinition,\n) {\n return oldIndexes.find(index =>\n chunkIndexDefinitionEqualIgnoreName(index.definition, chunkIndexDefinition),\n );\n}\n\nexport const FIND_MATCHING_CLIENT_TYPE_NEW = 0;\nexport const FIND_MATCHING_CLIENT_TYPE_FORK = 1;\nexport const FIND_MATCHING_CLIENT_TYPE_HEAD = 2;\n\nexport type FindMatchingClientResult =\n | {\n type: typeof FIND_MATCHING_CLIENT_TYPE_NEW;\n }\n | {\n type: typeof FIND_MATCHING_CLIENT_TYPE_FORK;\n snapshot: Commit<SnapshotMetaDD31>;\n }\n | {\n type: typeof FIND_MATCHING_CLIENT_TYPE_HEAD;\n clientGroupID: ClientGroupID;\n headHash: Hash;\n };\n\nexport async function findMatchingClient(\n dagRead: Read,\n mutatorNames: string[],\n indexes: IndexDefinitions,\n): Promise<FindMatchingClientResult> {\n let newestCookie: FrozenCookie | undefined;\n let bestSnapshot: Commit<SnapshotMetaDD31> | undefined;\n const mutatorNamesSet = new Set(mutatorNames);\n\n const clientGroups = await getClientGroups(dagRead);\n for (const [clientGroupID, clientGroup] of clientGroups) {\n if (\n !clientGroup.disabled &&\n mutatorNamesEqual(mutatorNamesSet, clientGroup.mutatorNames) &&\n indexDefinitionsEqual(indexes, clientGroup.indexes)\n ) {\n // exact match\n return {\n type: FIND_MATCHING_CLIENT_TYPE_HEAD,\n clientGroupID,\n headHash: clientGroup.headHash,\n };\n }\n\n const clientGroupSnapshotCommit = await baseSnapshotFromHash(\n clientGroup.headHash,\n dagRead,\n );\n assertSnapshotCommitDD31(clientGroupSnapshotCommit);\n\n const {cookieJSON} = clientGroupSnapshotCommit.meta;\n if (\n newestCookie === undefined ||\n compareCookies(cookieJSON, newestCookie) > 0\n ) {\n newestCookie = cookieJSON;\n bestSnapshot = clientGroupSnapshotCommit;\n }\n }\n\n if (bestSnapshot) {\n return {\n type: FIND_MATCHING_CLIENT_TYPE_FORK,\n snapshot: bestSnapshot,\n };\n }\n\n return {type: FIND_MATCHING_CLIENT_TYPE_NEW};\n}\n\nfunction getRefsForClients(clients: ClientMap): Refs {\n const refs: Set<Hash> = new Set();\n for (const client of clients.values()) {\n if (isClientV6(client)) {\n for (const hash of client.refreshHashes) {\n refs.add(hash);\n }\n if (client.persistHash) {\n refs.add(client.persistHash);\n }\n } else {\n refs.add(client.headHash);\n if (client.tempRefreshHash) {\n refs.add(client.tempRefreshHash);\n }\n }\n }\n return toRefs(refs);\n}\n\nexport async function getClientGroupForClient(\n clientID: ClientID,\n read: Read,\n): Promise<ClientGroup | undefined> {\n const clientGroupID = await getClientGroupIDForClient(clientID, read);\n if (!clientGroupID) {\n return undefined;\n }\n return getClientGroup(clientGroupID, read);\n}\n\nexport async function getClientGroupIDForClient(\n clientID: ClientID,\n read: Read,\n): Promise<ClientGroupID | undefined> {\n const client = await getClient(clientID, read);\n return client?.clientGroupID;\n}\n\n/**\n * Adds a Client to the ClientMap and updates the 'clients' head to point at\n * the updated clients.\n */\nexport async function setClient(\n clientID: ClientID,\n client: Client,\n dagWrite: Write,\n): Promise<Hash> {\n const clients = await getClients(dagWrite);\n const newClients = new Map(clients).set(clientID, client);\n return setClients(newClients, dagWrite);\n}\n\n/**\n * Sets the ClientMap and updates the 'clients' head top point at the new\n * clients.\n */\nexport async function setClients(\n clients: ClientMap,\n dagWrite: Write,\n): Promise<Hash> {\n const chunkData = clientMapToChunkData(clients, dagWrite);\n const chunk = dagWrite.createChunk(chunkData, getRefsForClients(clients));\n await dagWrite.putChunk(chunk);\n await dagWrite.setHead(CLIENTS_HEAD_NAME, chunk.hash);\n return chunk.hash;\n}\n\n/**\n * Callback function for when Replicache has deleted one or more clients.\n */\nexport type OnClientsDeleted = (\n deletedClients: DeletedClients,\n) => Promise<void>;\n", "export function mapValues<T extends Record<string, unknown>, U>(\n input: T,\n mapper: (value: T[keyof T]) => U,\n): {[K in keyof T]: U} {\n return mapEntries(input, (k, v) => [k, mapper(v as T[keyof T])]) as {\n [K in keyof T]: U;\n };\n}\n\nexport function mapEntries<T, U>(\n input: Record<string, T>,\n mapper: (key: string, val: T) => [key: string, val: U],\n): Record<string, U> {\n // Direct assignment is faster than Object.fromEntries()\n // https://github.com/rocicorp/mono/pull/3927#issuecomment-2706059475\n const output: Record<string, U> = {};\n\n // In chrome Object.entries is faster than for-in (13x) or Object.keys (15x)\n // https://gist.github.com/arv/1b4e113724f6a14e2d4742bcc760d1fa\n for (const entry of Object.entries(input)) {\n const mapped = mapper(entry[0], entry[1]);\n output[mapped[0]] = mapped[1];\n }\n return output;\n}\n\nexport function mapAllEntries<T, U>(\n input: Record<string, T>,\n mapper: (entries: [key: string, val: T][]) => [key: string, val: U][],\n): Record<string, U> {\n // Direct assignment is faster than Object.fromEntries()\n // https://github.com/rocicorp/mono/pull/3927#issuecomment-2706059475\n const output: Record<string, U> = {};\n for (const mapped of mapper(Object.entries(input))) {\n output[mapped[0]] = mapped[1];\n }\n return output;\n}\n", "export function must<T>(v: T | undefined | null, msg?: string): T {\n // eslint-disable-next-line eqeqeq\n if (v == null) {\n throw new Error(msg ?? `Unexpected ${v} value`);\n }\n return v;\n}\n", "import {compareUTF8} from 'compare-utf8';\nimport {\n assertBoolean,\n assertNumber,\n assertString,\n} from '../../../shared/src/asserts.ts';\nimport type {Ordering} from '../../../zero-protocol/src/ast.ts';\nimport type {Row, Value} from '../../../zero-protocol/src/data.ts';\nimport type {Stream} from './stream.ts';\n\n/**\n * A row flowing through the pipeline, plus its relationships.\n * Relationships are generated lazily as read.\n */\nexport type Node = {\n row: Row;\n relationships: Record<string, () => Stream<Node>>;\n};\n\n/**\n * Compare two values. The values must be of the same type. This function\n * throws at runtime if the types differ.\n *\n * Note, this function considers `null === null` and\n * `undefined === undefined`. This is different than SQL. In join code,\n * null must be treated separately.\n *\n * See: https://github.com/rocicorp/mono/pull/2116/files#r1704811479\n *\n * @returns < 0 if a < b, 0 if a === b, > 0 if a > b\n */\nexport function compareValues(a: Value, b: Value): number {\n a = normalizeUndefined(a);\n b = normalizeUndefined(b);\n\n if (a === b) {\n return 0;\n }\n if (a === null) {\n return -1;\n }\n if (b === null) {\n return 1;\n }\n if (typeof a === 'boolean') {\n assertBoolean(b);\n return a ? 1 : -1;\n }\n if (typeof a === 'number') {\n assertNumber(b);\n return a - b;\n }\n if (typeof a === 'string') {\n assertString(b);\n // We compare all strings in Zero as UTF-8. This is the default on SQLite\n // and we need to match it. See:\n // https://blog.replicache.dev/blog/replicache-11-adventures-in-text-encoding.\n //\n // TODO: We could change this since SQLite supports UTF-16. Microbenchmark\n // to see if there's a big win.\n //\n // https://www.sqlite.org/c3ref/create_collation.html\n return compareUTF8(a, b);\n }\n throw new Error(`Unsupported type: ${a}`);\n}\n\nexport type NormalizedValue = Exclude<Value, undefined>;\n\n/**\n * We allow undefined to be passed for the convenience of developers, but we\n * treat it equivalently to null. It's better for perf to not create an copy\n * of input values, so we just normalize at use when necessary.\n */\nexport function normalizeUndefined(v: Value): NormalizedValue {\n return v ?? null;\n}\n\nexport type Comparator = (r1: Row, r2: Row) => number;\n\nexport function makeComparator(\n order: Ordering,\n reverse?: boolean | undefined,\n): Comparator {\n return (a, b) => {\n // Skip destructuring here since it is hot code.\n for (const ord of order) {\n const field = ord[0];\n const comp = compareValues(a[field], b[field]);\n if (comp !== 0) {\n const result = ord[1] === 'asc' ? comp : -comp;\n return reverse ? -result : result;\n }\n }\n return 0;\n };\n}\n\n/**\n * Determine if two values are equal. Note that unlike compareValues() above,\n * this function treats `null` as unequal to itself (and same for `undefined`).\n * This is required to make joins work correctly, but may not be the right\n * semantic for your application.\n */\nexport function valuesEqual(a: Value, b: Value): boolean {\n // eslint-disable-next-line eqeqeq\n if (a == null || b == null) {\n return false;\n }\n return a === b;\n}\n\nexport function drainStreams(node: Node) {\n for (const stream of Object.values(node.relationships)) {\n for (const node of stream()) {\n drainStreams(node);\n }\n }\n}\n", "import {\n assert,\n assertArray,\n assertNumber,\n unreachable,\n} from '../../../shared/src/asserts.ts';\nimport {must} from '../../../shared/src/must.ts';\nimport type {Writable} from '../../../shared/src/writable.ts';\nimport type {Row} from '../../../zero-protocol/src/data.ts';\nimport {drainStreams, type Comparator, type Node} from './data.ts';\nimport type {SourceSchema} from './schema.ts';\nimport type {Entry, Format} from './view.ts';\n\nexport const refCountSymbol = Symbol('rc');\nexport const idSymbol = Symbol('id');\n\ntype MetaEntry = Writable<Entry> & {\n [refCountSymbol]: number;\n [idSymbol]?: string | undefined;\n};\ntype MetaEntryList = MetaEntry[];\n\n/**\n * `applyChange` does not consume the `relationships` of `ChildChange#node`,\n * `EditChange#node` and `EditChange#oldNode`. The `ViewChange` type\n * documents and enforces this via the type system.\n */\nexport type ViewChange =\n | AddViewChange\n | RemoveViewChange\n | ChildViewChange\n | EditViewChange;\n\nexport type RowOnlyNode = {row: Row};\n\nexport type AddViewChange = {\n type: 'add';\n node: Node;\n};\n\nexport type RemoveViewChange = {\n type: 'remove';\n node: Node;\n};\n\ntype ChildViewChange = {\n type: 'child';\n node: RowOnlyNode;\n child: {\n relationshipName: string;\n change: ViewChange;\n };\n};\n\ntype EditViewChange = {\n type: 'edit';\n node: RowOnlyNode;\n oldNode: RowOnlyNode;\n};\n\n/**\n * This is a subset of WeakMap but restricted to what we need.\n * @deprecated Not used anymore. This will be removed in the future.\n */\nexport interface RefCountMap {\n get(entry: Entry): number | undefined;\n set(entry: Entry, refCount: number): void;\n delete(entry: Entry): boolean;\n}\n\nexport function applyChange(\n parentEntry: Entry,\n change: ViewChange,\n schema: SourceSchema,\n relationship: string,\n format: Format,\n withIDs = false,\n): void {\n if (schema.isHidden) {\n switch (change.type) {\n case 'add':\n case 'remove':\n for (const [relationship, children] of Object.entries(\n change.node.relationships,\n )) {\n const childSchema = must(schema.relationships[relationship]);\n for (const node of children()) {\n applyChange(\n parentEntry,\n {type: change.type, node},\n childSchema,\n relationship,\n format,\n withIDs,\n );\n }\n }\n return;\n case 'edit':\n // If hidden at this level it means that the hidden row was changed. If\n // the row was changed in such a way that it would change the\n // relationships then the edit would have been split into remove and\n // add.\n return;\n case 'child': {\n const childSchema = must(\n schema.relationships[change.child.relationshipName],\n );\n applyChange(\n parentEntry,\n change.child.change,\n childSchema,\n relationship,\n format,\n withIDs,\n );\n return;\n }\n default:\n unreachable(change);\n }\n }\n\n const {singular, relationships: childFormats} = format;\n switch (change.type) {\n case 'add': {\n let newEntry: MetaEntry | undefined;\n\n if (singular) {\n const oldEntry = parentEntry[relationship] as MetaEntry | undefined;\n if (oldEntry !== undefined) {\n assert(\n schema.compareRows(oldEntry, change.node.row) === 0,\n `Singular relationship '${relationship}' should not have multiple rows. You may need to declare this relationship with the \\`many\\` helper instead of the \\`one\\` helper in your schema.`,\n );\n // adding same again.\n oldEntry[refCountSymbol]++;\n } else {\n newEntry = makeNewMetaEntry(change.node.row, schema, withIDs, 1);\n\n (parentEntry as Writable<Entry>)[relationship] = newEntry;\n }\n } else {\n newEntry = add(\n change.node.row,\n getChildEntryList(parentEntry, relationship),\n schema,\n withIDs,\n );\n }\n\n if (newEntry) {\n for (const [relationship, children] of Object.entries(\n change.node.relationships,\n )) {\n // TODO: Is there a flag to make TypeScript complain that dictionary access might be undefined?\n const childSchema = must(schema.relationships[relationship]);\n const childFormat = childFormats[relationship];\n if (childFormat === undefined) {\n continue;\n }\n\n const newView = childFormat.singular\n ? undefined\n : ([] as MetaEntryList);\n newEntry[relationship] = newView;\n\n for (const node of children()) {\n applyChange(\n newEntry,\n {type: 'add', node},\n childSchema,\n relationship,\n childFormat,\n withIDs,\n );\n }\n }\n }\n break;\n }\n case 'remove': {\n if (singular) {\n const oldEntry = parentEntry[relationship] as MetaEntry | undefined;\n assert(oldEntry !== undefined, 'node does not exist');\n const rc = oldEntry[refCountSymbol];\n if (rc === 1) {\n (parentEntry as Writable<Entry>)[relationship] = undefined;\n }\n oldEntry[refCountSymbol]--;\n } else {\n removeAndUpdateRefCount(\n getChildEntryList(parentEntry, relationship),\n change.node.row,\n schema.compareRows,\n );\n }\n // Needed to ensure cleanup of operator state is fully done.\n drainStreams(change.node);\n break;\n }\n case 'child': {\n let existing: MetaEntry;\n if (singular) {\n existing = getSingularEntry(parentEntry, relationship);\n } else {\n const view = getChildEntryList(parentEntry, relationship);\n const {pos, found} = binarySearch(\n view,\n change.node.row,\n schema.compareRows,\n );\n assert(found, 'node does not exist');\n existing = view[pos];\n }\n\n const childSchema = must(\n schema.relationships[change.child.relationshipName],\n );\n const childFormat = format.relationships[change.child.relationshipName];\n if (childFormat !== undefined) {\n applyChange(\n existing,\n change.child.change,\n childSchema,\n change.child.relationshipName,\n childFormat,\n withIDs,\n );\n }\n break;\n }\n case 'edit': {\n if (singular) {\n const existing = parentEntry[relationship];\n assertMetaEntry(existing);\n applyEdit(existing, change, schema, withIDs);\n } else {\n const view = getChildEntryList(parentEntry, relationship);\n // The position of the row in the list may have changed due to the edit.\n if (schema.compareRows(change.oldNode.row, change.node.row) !== 0) {\n const {pos: oldPos, found: oldFound} = binarySearch(\n view,\n change.oldNode.row,\n schema.compareRows,\n );\n assert(oldFound, 'old node does not exist');\n const oldEntry = view[oldPos];\n const {pos, found} = binarySearch(\n view,\n change.node.row,\n schema.compareRows,\n );\n // A special case:\n // when refCount is 1 (so the row is being moved\n // without leaving a placeholder behind), and the new pos is\n // the same as the old, or directly after the old (so after the remove\n // of the old it would be in the same pos):\n // the row does not need to be moved, it can just be edited in place.\n if (\n oldEntry[refCountSymbol] === 1 &&\n (pos === oldPos || pos - 1 === oldPos)\n ) {\n applyEdit(oldEntry, change, schema, withIDs);\n } else {\n // Move the row. If the row has > 1 ref count, an edit should\n // be received for each ref count. On the first edit, the original\n // row is moved, the edit is applied to it and its ref count is set\n // to 1. A shallow copy of the row is left at the old pos for\n // processing of the remaining edit, and the copy's ref count\n // is decremented. As each edit is received the ref count of the\n // copy is decrement, and the ref count of the row at the new\n // position is incremented. When the copy's ref count goes to 0,\n // it is removed.\n oldEntry[refCountSymbol]--;\n let adjustedPos = pos;\n if (oldEntry[refCountSymbol] === 0) {\n view.splice(oldPos, 1);\n adjustedPos = oldPos < pos ? pos - 1 : pos;\n }\n\n let entryToEdit;\n if (found) {\n entryToEdit = view[adjustedPos];\n } else {\n view.splice(adjustedPos, 0, oldEntry);\n entryToEdit = oldEntry;\n if (oldEntry[refCountSymbol] > 0) {\n const oldEntryCopy = {...oldEntry};\n view[oldPos] = oldEntryCopy;\n }\n }\n entryToEdit[refCountSymbol]++;\n applyEdit(entryToEdit, change, schema, withIDs);\n }\n } else {\n // Position could not have changed, so simply edit in place.\n const {pos, found} = binarySearch(\n view,\n change.oldNode.row,\n schema.compareRows,\n );\n assert(found, 'node does not exist');\n applyEdit(view[pos], change, schema, withIDs);\n }\n }\n\n break;\n }\n default:\n unreachable(change);\n }\n}\n\nfunction applyEdit(\n existing: MetaEntry,\n change: EditViewChange,\n schema: SourceSchema,\n withIDs: boolean,\n) {\n Object.assign(existing, change.node.row);\n if (withIDs) {\n existing[idSymbol] = makeID(change.node.row, schema);\n }\n}\n\nfunction add(\n row: Row,\n view: MetaEntryList,\n schema: SourceSchema,\n withIDs: boolean,\n): MetaEntry | undefined {\n const {pos, found} = binarySearch(view, row, schema.compareRows);\n\n if (found) {\n view[pos][refCountSymbol]++;\n return undefined;\n }\n const newEntry = makeNewMetaEntry(row, schema, withIDs, 1);\n view.splice(pos, 0, newEntry);\n return newEntry;\n}\n\nfunction removeAndUpdateRefCount(\n view: MetaEntryList,\n row: Row,\n compareRows: Comparator,\n): MetaEntry {\n const {pos, found} = binarySearch(view, row, compareRows);\n assert(found, 'node does not exist');\n const oldEntry = view[pos];\n const rc = oldEntry[refCountSymbol];\n if (rc === 1) {\n view.splice(pos, 1);\n }\n oldEntry[refCountSymbol]--;\n\n return oldEntry;\n}\n\n// TODO: Do not return an object. It puts unnecessary pressure on the GC.\nfunction binarySearch(\n view: MetaEntryList,\n target: Row,\n comparator: Comparator,\n) {\n let low = 0;\n let high = view.length - 1;\n while (low <= high) {\n const mid = (low + high) >>> 1;\n const comparison = comparator(view[mid] as Row, target as Row);\n if (comparison < 0) {\n low = mid + 1;\n } else if (comparison > 0) {\n high = mid - 1;\n } else {\n return {pos: mid, found: true};\n }\n }\n return {pos: low, found: false};\n}\n\nfunction getChildEntryList(\n parentEntry: Entry,\n relationship: string,\n): MetaEntryList {\n const view = parentEntry[relationship];\n assertArray(view);\n return view as MetaEntryList;\n}\n\nfunction assertMetaEntry(v: unknown): asserts v is MetaEntry {\n assertNumber((v as Partial<MetaEntry>)[refCountSymbol]);\n}\n\nfunction getSingularEntry(parentEntry: Entry, relationship: string): MetaEntry {\n const e = parentEntry[relationship];\n assertNumber((e as Partial<MetaEntry>)[refCountSymbol]);\n return e as MetaEntry;\n}\n\nfunction makeNewMetaEntry(\n row: Row,\n schema: SourceSchema,\n withIDs: boolean,\n rc: number,\n): MetaEntry {\n if (withIDs) {\n return {...row, [refCountSymbol]: rc, [idSymbol]: makeID(row, schema)};\n }\n return {...row, [refCountSymbol]: rc};\n}\nfunction makeID(row: Row, schema: SourceSchema) {\n // optimization for case of non-compound primary key\n if (schema.primaryKey.length === 1) {\n return JSON.stringify(row[schema.primaryKey[0]]);\n }\n return JSON.stringify(schema.primaryKey.map(k => row[k]));\n}\n", "import * as valita from '@badrap/valita';\nimport {skipAssertJSONValue} from './config.ts';\nimport type {ReadonlyJSONObject, ReadonlyJSONValue} from './json.ts';\nimport {isJSONObject, isJSONValue} from './json.ts';\nimport * as v from './valita.ts';\n\nconst path: (string | number)[] = [];\n\nexport const jsonSchema: valita.Type<ReadonlyJSONValue> = v\n .unknown()\n .chain(v => {\n if (skipAssertJSONValue) {\n return valita.ok(v as ReadonlyJSONValue);\n }\n const rv = isJSONValue(v, path)\n ? valita.ok(v)\n : valita.err({\n message: `Not a JSON value`,\n path: path.slice(),\n });\n path.length = 0;\n return rv;\n });\n\nexport const jsonObjectSchema: valita.Type<ReadonlyJSONObject> = v\n .unknown()\n .chain(v => {\n if (skipAssertJSONValue) {\n return valita.ok(v as ReadonlyJSONObject);\n }\n const rv = isJSONObject(v, path)\n ? valita.ok(v)\n : valita.err({\n message: `Not a JSON object`,\n path: path.slice(),\n });\n path.length = 0;\n return rv;\n });\n", "import * as v from './valita.ts';\n\n/**\n * Valita schema for TDigest JSON representation.\n * Matches the structure returned by TDigest.toJSON().\n */\nexport const tdigestSchema = v.tuple([v.number()]).concat(v.array(v.number()));\n\nexport type TDigestJSON = v.Infer<typeof tdigestSchema>;\n", "import {jsonSchema} from '../../shared/src/json-schema.ts';\nimport * as v from '../../shared/src/valita.ts';\n\nexport const valueSchema = v.union(jsonSchema, v.undefined());\n\nexport const rowSchema = v.readonlyRecord(valueSchema);\n\n/**\n * The data types that Zero can represent are limited by two things:\n *\n * 1. The underlying Replicache sync layer currently can only represent JSON\n * types. This could possibly be expanded in the future, but we do want to be\n * careful of adding encoding overhead. By using JSON, we are taking\n * advantage of IndexedDB\u2019s fast native JSValue [de]serialization which has\n * historically been a perf advantage for us.\n *\n * 2. IDs in Zero need to be comparable because we use them for sorting and row\n * identity. We could expand the set of allowed value types (to include,\n * i.e., Objects) but we would then need to restrict IDs to only comparable\n * types.\n *\n * These two facts leave us with the following allowed types. Zero's replication\n * layer must convert other types into these for tables to be used with Zero.\n *\n * For developer convenience we also allow `undefined`, which we treat\n * equivalently to `null`.\n */\nexport type Value = v.Infer<typeof valueSchema>;\n\n/**\n * A Row is represented as a JS Object.\n *\n * We do everything in IVM as loosely typed values because these pipelines are\n * going to be constructed at runtime by other code, so type-safety can't buy us\n * anything.\n *\n * Also since the calling code on the client ultimately wants objects to work\n * with we end up with a lot less copies by using objects throughout.\n */\nexport type Row = v.Infer<typeof rowSchema>;\n", "import * as v from '../../shared/src/valita.ts';\n\nimport {rowSchema} from './data.ts';\n\nexport const rowCountsByQuerySchema = v.record(v.number());\nexport type RowCountsByQuery = v.Infer<typeof rowCountsByQuerySchema>;\n\nexport const rowCountsBySourceSchema = v.record(rowCountsByQuerySchema);\nexport type RowCountsBySource = v.Infer<typeof rowCountsBySourceSchema>;\n\nexport const rowsByQuerySchema = v.record(v.array(rowSchema));\nexport type RowsByQuery = v.Infer<typeof rowsByQuerySchema>;\n\nexport const rowsBySourceSchema = v.record(rowsByQuerySchema);\nexport type RowsBySource = v.Infer<typeof rowsBySourceSchema>;\n\nexport const analyzeQueryResultSchema = v.object({\n warnings: v.array(v.string()),\n syncedRows: v.record(v.array(rowSchema)).optional(),\n syncedRowCount: v.number(),\n start: v.number(),\n end: v.number(),\n afterPermissions: v.string().optional(),\n vendedRowCounts: rowCountsBySourceSchema.optional(),\n vendedRows: rowsBySourceSchema.optional(),\n plans: v.record(v.array(v.string())).optional(),\n});\n\nexport type AnalyzeQueryResult = v.Infer<typeof analyzeQueryResultSchema>;\n", "/**\n * Wire-format representation of the zql AST interface.\n *\n * `v.Type<...>` types are explicitly declared to facilitate Typescript verification\n * that the schemas satisfy the zql type definitions. (Incidentally, explicit types\n * are also required for recursive schema definitions.)\n */\n\nimport {compareUTF8} from 'compare-utf8';\nimport {defined} from '../../shared/src/arrays.ts';\nimport {assert} from '../../shared/src/asserts.ts';\nimport {must} from '../../shared/src/must.ts';\nimport * as v from '../../shared/src/valita.ts';\nimport type {NameMapper} from '../../zero-schema/src/name-mapper.ts';\nimport {rowSchema, type Row} from './data.ts';\n\nexport const selectorSchema = v.string();\nexport const toStaticParam = Symbol();\n\nconst orderingElementSchema = v.readonly(\n v.tuple([selectorSchema, v.literalUnion('asc', 'desc')]),\n);\n\nexport const orderingSchema = v.readonlyArray(orderingElementSchema);\nexport type System = 'permissions' | 'client' | 'test';\n\nexport const primitiveSchema = v.union(\n v.string(),\n v.number(),\n v.boolean(),\n v.null(),\n);\n\nexport const equalityOpsSchema = v.literalUnion('=', '!=', 'IS', 'IS NOT');\n\nexport const orderOpsSchema = v.literalUnion('<', '>', '<=', '>=');\n\nexport const likeOpsSchema = v.literalUnion(\n 'LIKE',\n 'NOT LIKE',\n 'ILIKE',\n 'NOT ILIKE',\n);\n\nexport const inOpsSchema = v.literalUnion('IN', 'NOT IN');\n\nexport const simpleOperatorSchema = v.union(\n equalityOpsSchema,\n orderOpsSchema,\n likeOpsSchema,\n inOpsSchema,\n);\n\nconst literalReferenceSchema: v.Type<LiteralReference> = v.readonlyObject({\n type: v.literal('literal'),\n value: v.union(\n v.string(),\n v.number(),\n v.boolean(),\n v.null(),\n v.readonlyArray(v.union(v.string(), v.number(), v.boolean())),\n ),\n});\nconst columnReferenceSchema: v.Type<ColumnReference> = v.readonlyObject({\n type: v.literal('column'),\n name: v.string(),\n});\n\n/**\n * A parameter is a value that is not known at the time the query is written\n * and is resolved at runtime.\n *\n * Static parameters refer to something provided by the caller.\n * Static parameters are injected when the query pipeline is built from the AST\n * and do not change for the life of that pipeline.\n *\n * An example static parameter is the current authentication data.\n * When a user is authenticated, queries on the server have access\n * to the user's authentication data in order to evaluate authorization rules.\n * Authentication data doesn't change over the life of a query as a change\n * in auth data would represent a log-in / log-out of the user.\n *\n * AncestorParameters refer to rows encountered while running the query.\n * They are used by subqueries to refer to rows emitted by parent queries.\n */\nconst parameterReferenceSchema = v.readonlyObject({\n type: v.literal('static'),\n // The \"namespace\" of the injected parameter.\n // Write authorization will send the value of a row\n // prior to the mutation being run (preMutationRow).\n // Read and write authorization will both send the\n // current authentication data (authData).\n anchor: v.literalUnion('authData', 'preMutationRow'),\n field: v.union(v.string(), v.array(v.string())),\n});\n\nconst conditionValueSchema = v.union(\n literalReferenceSchema,\n columnReferenceSchema,\n parameterReferenceSchema,\n);\n\nexport type Parameter = v.Infer<typeof parameterReferenceSchema>;\n\nexport const simpleConditionSchema: v.Type<SimpleCondition> = v.readonlyObject({\n type: v.literal('simple'),\n op: simpleOperatorSchema,\n left: conditionValueSchema,\n right: v.union(parameterReferenceSchema, literalReferenceSchema),\n});\n\ntype ConditionValue = v.Infer<typeof conditionValueSchema>;\n\nexport const correlatedSubqueryConditionOperatorSchema: v.Type<CorrelatedSubqueryConditionOperator> =\n v.literalUnion('EXISTS', 'NOT EXISTS');\n\nexport const correlatedSubqueryConditionSchema: v.Type<CorrelatedSubqueryCondition> =\n v.readonlyObject({\n type: v.literal('correlatedSubquery'),\n related: v.lazy(() => correlatedSubquerySchema),\n op: correlatedSubqueryConditionOperatorSchema,\n flip: v.boolean().optional(),\n });\n\nexport const conditionSchema: v.Type<Condition> = v.union(\n simpleConditionSchema,\n v.lazy(() => conjunctionSchema),\n v.lazy(() => disjunctionSchema),\n correlatedSubqueryConditionSchema,\n);\n\nconst conjunctionSchema: v.Type<Conjunction> = v.readonlyObject({\n type: v.literal('and'),\n conditions: v.readonlyArray(conditionSchema),\n});\n\nconst disjunctionSchema: v.Type<Disjunction> = v.readonlyObject({\n type: v.literal('or'),\n conditions: v.readonlyArray(conditionSchema),\n});\n\nexport type CompoundKey = readonly [string, ...string[]];\n\nfunction mustCompoundKey(field: readonly string[]): CompoundKey {\n assert(Array.isArray(field) && field.length >= 1);\n return field as unknown as CompoundKey;\n}\n\nexport const compoundKeySchema: v.Type<CompoundKey> = v.readonly(\n v.tuple([v.string()]).concat(v.array(v.string())),\n);\n\nconst correlationSchema = v.readonlyObject({\n parentField: compoundKeySchema,\n childField: compoundKeySchema,\n});\n\n// Split out so that its inferred type can be checked against\n// Omit<CorrelatedSubquery, 'correlation'> in ast-type-test.ts.\n// The mutually-recursive reference of the 'other' field to astSchema\n// is the only thing added in v.lazy. The v.lazy is necessary due to the\n// mutually-recursive types, but v.lazy prevents inference of the resulting\n// type.\nexport const correlatedSubquerySchemaOmitSubquery = v.readonlyObject({\n correlation: correlationSchema,\n hidden: v.boolean().optional(),\n system: v.literalUnion('permissions', 'client', 'test').optional(),\n});\n\nexport const correlatedSubquerySchema: v.Type<CorrelatedSubquery> =\n correlatedSubquerySchemaOmitSubquery.extend({\n subquery: v.lazy(() => astSchema),\n });\n\nexport const astSchema: v.Type<AST> = v.readonlyObject({\n schema: v.string().optional(),\n table: v.string(),\n alias: v.string().optional(),\n where: conditionSchema.optional(),\n related: v.readonlyArray(correlatedSubquerySchema).optional(),\n limit: v.number().optional(),\n orderBy: orderingSchema.optional(),\n start: v\n .object({\n row: rowSchema,\n exclusive: v.boolean(),\n })\n .optional(),\n});\n\nexport type Bound = {\n row: Row;\n exclusive: boolean;\n};\n\n/**\n * As in SQL you can have multiple orderings. We don't currently\n * support ordering on anything other than the root query.\n */\nexport type OrderPart = readonly [field: string, direction: 'asc' | 'desc'];\nexport type Ordering = readonly OrderPart[];\n\nexport type SimpleOperator = EqualityOps | OrderOps | LikeOps | InOps;\nexport type EqualityOps = '=' | '!=' | 'IS' | 'IS NOT';\nexport type OrderOps = '<' | '>' | '<=' | '>=';\nexport type LikeOps = 'LIKE' | 'NOT LIKE' | 'ILIKE' | 'NOT ILIKE';\nexport type InOps = 'IN' | 'NOT IN';\n\nexport type AST = {\n readonly schema?: string | undefined;\n readonly table: string;\n\n // A query would be aliased if the AST is a subquery.\n // e.g., when two subqueries select from the same table\n // they need an alias to differentiate them.\n // `SELECT\n // [SELECT * FROM issue WHERE issue.id = outer.parentId] AS parent\n // [SELECT * FROM issue WHERE issue.parentId = outer.id] AS children\n // FROM issue as outer`\n readonly alias?: string | undefined;\n\n // `select` is missing given we return all columns for now.\n\n // The PipelineBuilder will pick what to use to correlate\n // a subquery with a parent query. It can choose something from the\n // where conditions or choose the _first_ `related` entry.\n // Choosing the first `related` entry is almost always the best choice if\n // one exists.\n readonly where?: Condition | undefined;\n\n readonly related?: readonly CorrelatedSubquery[] | undefined;\n readonly start?: Bound | undefined;\n readonly limit?: number | undefined;\n readonly orderBy?: Ordering | undefined;\n};\n\nexport type Correlation = {\n readonly parentField: CompoundKey;\n readonly childField: CompoundKey;\n};\n\nexport type CorrelatedSubquery = {\n /**\n * Only equality correlation are supported for now.\n * E.g., direct foreign key relationships.\n */\n readonly correlation: Correlation;\n readonly subquery: AST;\n readonly system?: System | undefined;\n // If a hop in the subquery chain should be hidden from the output view.\n // A common example is junction edges. The query API provides the illusion\n // that they don't exist: `issue.related('labels')` instead of `issue.related('issue_labels').related('labels')`.\n // To maintain this illusion, the junction edge should be hidden.\n // When `hidden` is set to true, this hop will not be included in the output view\n // but its children will be.\n readonly hidden?: boolean | undefined;\n};\n\nexport type ValuePosition = LiteralReference | Parameter | ColumnReference;\n\nexport type ColumnReference = {\n readonly type: 'column';\n /**\n * Not a path yet as we're currently not allowing\n * comparisons across tables. This will need to\n * be a path through the tree in the near future.\n */\n readonly name: string;\n};\n\nexport type LiteralReference = {\n readonly type: 'literal';\n readonly value: LiteralValue;\n};\n\nexport type LiteralValue =\n | string\n | number\n | boolean\n | null\n | ReadonlyArray<string | number | boolean>;\n\n/**\n * Starting only with SimpleCondition for now.\n * ivm1 supports Conjunctions and Disjunctions.\n * We'll support them in the future.\n */\nexport type Condition =\n | SimpleCondition\n | Conjunction\n | Disjunction\n | CorrelatedSubqueryCondition;\n\nexport type SimpleCondition = {\n readonly type: 'simple';\n readonly op: SimpleOperator;\n readonly left: ValuePosition;\n\n /**\n * `null` is absent since we do not have an `IS` or `IS NOT`\n * operator defined and `null != null` in SQL.\n */\n readonly right: Exclude<ValuePosition, ColumnReference>;\n};\n\nexport type Conjunction = {\n type: 'and';\n conditions: readonly Condition[];\n};\n\nexport type Disjunction = {\n type: 'or';\n conditions: readonly Condition[];\n};\n\nexport type CorrelatedSubqueryCondition = {\n type: 'correlatedSubquery';\n related: CorrelatedSubquery;\n op: CorrelatedSubqueryConditionOperator;\n flip?: boolean | undefined;\n};\n\nexport type CorrelatedSubqueryConditionOperator = 'EXISTS' | 'NOT EXISTS';\n\ninterface ASTTransform {\n tableName(orig: string): string;\n columnName(origTable: string, origColumn: string): string;\n related(subqueries: CorrelatedSubquery[]): readonly CorrelatedSubquery[];\n where(cond: Condition): Condition | undefined;\n // conjunction or disjunction, called when traversing the return value of where()\n conditions(conds: Condition[]): readonly Condition[];\n}\n\nfunction transformAST(ast: AST, transform: ASTTransform): Required<AST> {\n // Name mapping functions (e.g. to server names)\n const {tableName, columnName} = transform;\n const colName = (c: string) => columnName(ast.table, c);\n const key = (table: string, k: CompoundKey) => {\n const serverKey = k.map(col => columnName(table, col));\n return mustCompoundKey(serverKey);\n };\n\n const where = ast.where ? transform.where(ast.where) : undefined;\n const transformed = {\n schema: ast.schema,\n table: tableName(ast.table),\n alias: ast.alias,\n where: where ? transformWhere(where, ast.table, transform) : undefined,\n related: ast.related\n ? transform.related(\n ast.related.map(\n r =>\n ({\n correlation: {\n parentField: key(ast.table, r.correlation.parentField),\n childField: key(r.subquery.table, r.correlation.childField),\n },\n hidden: r.hidden,\n subquery: transformAST(r.subquery, transform),\n system: r.system,\n }) satisfies Required<CorrelatedSubquery>,\n ),\n )\n : undefined,\n start: ast.start\n ? {\n ...ast.start,\n row: Object.fromEntries(\n Object.entries(ast.start.row).map(([col, val]) => [\n colName(col),\n val,\n ]),\n ),\n }\n : undefined,\n limit: ast.limit,\n orderBy: ast.orderBy?.map(([col, dir]) => [colName(col), dir] as const),\n };\n\n return transformed;\n}\n\nfunction transformWhere(\n where: Condition,\n table: string,\n transform: ASTTransform,\n): Condition {\n // Name mapping functions (e.g. to server names)\n const {columnName} = transform;\n const condValue = (c: ConditionValue) =>\n c.type !== 'column' ? c : {...c, name: columnName(table, c.name)};\n const key = (table: string, k: CompoundKey) => {\n const serverKey = k.map(col => columnName(table, col));\n return mustCompoundKey(serverKey);\n };\n\n if (where.type === 'simple') {\n return {...where, left: condValue(where.left)};\n } else if (where.type === 'correlatedSubquery') {\n const {correlation, subquery} = where.related;\n return {\n ...where,\n related: {\n ...where.related,\n correlation: {\n parentField: key(table, correlation.parentField),\n childField: key(subquery.table, correlation.childField),\n },\n subquery: transformAST(subquery, transform),\n },\n };\n }\n\n return {\n type: where.type,\n conditions: transform.conditions(\n where.conditions.map(c => transformWhere(c, table, transform)),\n ),\n };\n}\n\nconst normalizeCache = new WeakMap<AST, Required<AST>>();\n\nconst NORMALIZE_TRANSFORM: ASTTransform = {\n tableName: t => t,\n columnName: (_, c) => c,\n related: sortedRelated,\n where: flattened,\n conditions: c => c.sort(cmpCondition),\n};\n\nexport function normalizeAST(ast: AST): Required<AST> {\n let normalized = normalizeCache.get(ast);\n if (!normalized) {\n normalized = transformAST(ast, NORMALIZE_TRANSFORM);\n normalizeCache.set(ast, normalized);\n }\n return normalized;\n}\n\nexport function mapAST(ast: AST, mapper: NameMapper) {\n return transformAST(ast, {\n tableName: table => mapper.tableName(table),\n columnName: (table, col) => mapper.columnName(table, col),\n related: r => r,\n where: w => w,\n conditions: c => c,\n });\n}\n\nexport function mapCondition(\n cond: Condition,\n table: string,\n mapper: NameMapper,\n) {\n return transformWhere(cond, table, {\n tableName: table => mapper.tableName(table),\n columnName: (table, col) => mapper.columnName(table, col),\n related: r => r,\n where: w => w,\n conditions: c => c,\n });\n}\n\nfunction sortedRelated(\n related: CorrelatedSubquery[],\n): readonly CorrelatedSubquery[] {\n return related.sort(cmpRelated);\n}\n\nfunction cmpCondition(a: Condition, b: Condition): number {\n if (a.type === 'simple') {\n if (b.type !== 'simple') {\n return -1; // Order SimpleConditions first\n }\n\n return (\n compareValuePosition(a.left, b.left) ||\n compareUTF8MaybeNull(a.op, b.op) ||\n compareValuePosition(a.right, b.right)\n );\n }\n\n if (b.type === 'simple') {\n return 1; // Order SimpleConditions first\n }\n\n if (a.type === 'correlatedSubquery') {\n if (b.type !== 'correlatedSubquery') {\n return -1; // Order subquery before conjuctions/disjuctions\n }\n return cmpRelated(a.related, b.related) || compareUTF8MaybeNull(a.op, b.op);\n }\n if (b.type === 'correlatedSubquery') {\n return -1; // Order correlatedSubquery before conjuctions/disjuctions\n }\n\n const val = compareUTF8MaybeNull(a.type, b.type);\n if (val !== 0) {\n return val;\n }\n for (\n let l = 0, r = 0;\n l < a.conditions.length && r < b.conditions.length;\n l++, r++\n ) {\n const val = cmpCondition(a.conditions[l], b.conditions[r]);\n if (val !== 0) {\n return val;\n }\n }\n // prefixes first\n return a.conditions.length - b.conditions.length;\n}\n\nfunction compareValuePosition(a: ValuePosition, b: ValuePosition): number {\n if (a.type !== b.type) {\n return compareUTF8(a.type, b.type);\n }\n switch (a.type) {\n case 'literal':\n assert(b.type === 'literal');\n return compareUTF8(String(a.value), String(b.value));\n case 'column':\n assert(b.type === 'column');\n return compareUTF8(a.name, b.name);\n case 'static':\n throw new Error(\n 'Static parameters should be resolved before normalization',\n );\n }\n}\n\nfunction cmpRelated(a: CorrelatedSubquery, b: CorrelatedSubquery): number {\n return compareUTF8(must(a.subquery.alias), must(b.subquery.alias));\n}\n\n/**\n * Returns a flattened version of the Conditions in which nested Conjunctions with\n * the same operation ('AND' or 'OR') are flattened to the same level. e.g.\n *\n * ```\n * ((a AND b) AND (c AND (d OR (e OR f)))) -> (a AND b AND c AND (d OR e OR f))\n * ```\n *\n * Also flattens singleton Conjunctions regardless of operator, and removes\n * empty Conjunctions.\n */\nfunction flattened(cond: Condition): Condition | undefined {\n if (cond.type === 'simple' || cond.type === 'correlatedSubquery') {\n return cond;\n }\n const conditions = defined(\n cond.conditions.flatMap(c =>\n c.type === cond.type ? c.conditions.map(c => flattened(c)) : flattened(c),\n ),\n );\n\n switch (conditions.length) {\n case 0:\n return undefined;\n case 1:\n return conditions[0];\n default:\n return {\n type: cond.type,\n conditions,\n };\n }\n}\n\nfunction compareUTF8MaybeNull(a: string | null, b: string | null): number {\n if (a !== null && b !== null) {\n return compareUTF8(a, b);\n }\n if (b !== null) {\n return -1;\n }\n if (a !== null) {\n return 1;\n }\n return 0;\n}\n", "import {assert} from './asserts.ts';\n\n/**\n * Returns `arr` as is if none of the elements are `undefined`.\n * Otherwise returns a new array with only defined elements in `arr`.\n */\nexport function defined<T>(arr: (T | undefined)[]): T[] {\n // avoid an array copy if possible\n let i = arr.findIndex(x => x === undefined);\n if (i < 0) {\n return arr as T[];\n }\n const defined: T[] = arr.slice(0, i) as T[];\n for (i++; i < arr.length; i++) {\n const x = arr[i];\n if (x !== undefined) {\n defined.push(x);\n }\n }\n return defined;\n}\n\nexport function areEqual<T>(arr1: readonly T[], arr2: readonly T[]): boolean {\n return arr1.length === arr2.length && arr1.every((e, i) => e === arr2[i]);\n}\n\nexport function zip<T1, T2>(a1: readonly T1[], a2: readonly T2[]): [T1, T2][] {\n assert(a1.length === a2.length);\n const result: [T1, T2][] = [];\n for (let i = 0; i < a1.length; i++) {\n result.push([a1[i], a2[i]]);\n }\n return result;\n}\n\nexport function last<T>(arr: T[]): T | undefined {\n if (arr.length === 0) {\n return undefined;\n }\n return arr[arr.length - 1];\n}\n\nexport function groupBy<T, K>(\n arr: readonly T[],\n keyFn: (el: T) => K,\n): Map<K, T[]> {\n const groups = new Map<K, T[]>();\n for (const el of arr) {\n const key = keyFn(el);\n let group = groups.get(key);\n if (group === undefined) {\n group = [];\n groups.set(key, group);\n }\n group.push(el);\n }\n return groups;\n}\n", "import {jsonSchema} from '../../shared/src/json-schema.ts';\nimport {tdigestSchema} from '../../shared/src/tdigest-schema.ts';\nimport * as v from '../../shared/src/valita.ts';\nimport {analyzeQueryResultSchema} from './analyze-query-result.ts';\nimport {astSchema} from './ast.ts';\n\nconst serverMetricsSchema = v.object({\n 'query-materialization-server': tdigestSchema,\n 'query-update-server': tdigestSchema,\n});\n\nexport type ServerMetrics = v.Infer<typeof serverMetricsSchema>;\n\nconst inspectQueryRowSchema = v.object({\n clientID: v.string(),\n queryID: v.string(),\n // This is the server return AST for custom queries\n // TODO: Return server generated AST\n ast: astSchema.nullable(),\n // not null for custom queries\n name: v.string().nullable(),\n // not null for custom queries\n args: v.readonlyArray(jsonSchema).nullable(),\n got: v.boolean(),\n deleted: v.boolean(),\n ttl: v.number(),\n inactivatedAt: v.number().nullable(),\n rowCount: v.number(),\n metrics: serverMetricsSchema.nullable().optional(),\n});\n\nexport type InspectQueryRow = v.Infer<typeof inspectQueryRowSchema>;\n\nconst inspectBaseDownSchema = v.object({\n id: v.string(),\n});\n\nexport const inspectQueriesDownSchema = inspectBaseDownSchema.extend({\n op: v.literal('queries'),\n value: v.array(inspectQueryRowSchema),\n});\n\nexport type InspectQueriesDown = v.Infer<typeof inspectQueriesDownSchema>;\n\nexport const inspectMetricsDownSchema = inspectBaseDownSchema.extend({\n op: v.literal('metrics'),\n value: serverMetricsSchema,\n});\n\nexport type InspectMetricsDown = v.Infer<typeof inspectMetricsDownSchema>;\n\nexport const inspectVersionDownSchema = inspectBaseDownSchema.extend({\n op: v.literal('version'),\n value: v.string(),\n});\n\nexport const inspectAuthenticatedDownSchema = inspectBaseDownSchema.extend({\n op: v.literal('authenticated'),\n value: v.boolean(),\n});\n\nexport type InspectAuthenticatedDown = v.Infer<\n typeof inspectAuthenticatedDownSchema\n>;\n\nexport const inspectAnalyzeQueryDownSchema = inspectBaseDownSchema.extend({\n op: v.literal('analyze-query'),\n value: analyzeQueryResultSchema,\n});\n\nexport type InspectAnalyzeQueryDown = v.Infer<\n typeof inspectAnalyzeQueryDownSchema\n>;\n\nexport const inspectDownBodySchema = v.union(\n inspectQueriesDownSchema,\n inspectMetricsDownSchema,\n inspectVersionDownSchema,\n inspectAuthenticatedDownSchema,\n inspectAnalyzeQueryDownSchema,\n);\n\nexport const inspectDownMessageSchema = v.tuple([\n v.literal('inspect'),\n inspectDownBodySchema,\n]);\n\nexport type InspectDownMessage = v.Infer<typeof inspectDownMessageSchema>;\n\nexport type InspectDownBody = v.Infer<typeof inspectDownBodySchema>;\n", "export function getNonCryptoRandomValues(array: Uint8Array) {\n if (array === null) {\n throw new TypeError('array cannot be null');\n }\n\n // Fill the array with random values\n for (let i = 0; i < array.length; i++) {\n array[i] = Math.floor(Math.random() * 256); // Random byte (0-255)\n }\n\n return array;\n}\n\nexport function randomCharacters(length: number) {\n let result = '';\n const characters =\n 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789';\n const charactersLength = characters.length;\n let counter = 0;\n while (counter < length) {\n result += characters.charAt(Math.floor(Math.random() * charactersLength));\n counter += 1;\n }\n return result;\n}\n", "// This is taken from https://github.com/ai/nanoid/blob/main/index.browser.js We\n// copy this because we want to use `--platform=neutral` which doesn't work with\n// the npm package.\n// Also we changed the random number generator to use Math.random() for compat\n// with React Native.\n\nimport {getNonCryptoRandomValues} from '../../../shared/src/random-values.ts';\n\nexport function nanoid(size = 21): string {\n // Use our custom getRandomValues function to fill a Uint8Array with random values.\n const randomBytes = getNonCryptoRandomValues(new Uint8Array(size));\n\n return randomBytes.reduce((id, byte) => {\n // It is incorrect to use bytes exceeding the alphabet size.\n // The following mask reduces the random byte in the 0-255 value\n // range to the 0-63 value range. Therefore, adding hacks, such\n // as empty string fallback or magic numbers, is unneccessary because\n // the bitmask trims bytes down to the alphabet size.\n byte &= 63;\n if (byte < 36) {\n // `0-9a-z`\n id += byte.toString(36);\n } else if (byte < 62) {\n // `A-Z`\n id += (byte - 26).toString(36).toUpperCase();\n } else if (byte > 62) {\n id += '-';\n } else {\n id += '_';\n }\n return id;\n }, '');\n}\n", "import {xxHash32} from 'js-xxhash';\n\nexport const h32 = (s: string) => xxHash32(s, 0);\nexport const h64 = (s: string) => hash(s, 2);\nexport const h128 = (s: string) => hash(s, 4);\n\n/**\n * xxHash32 only computes 32-bit values. Run it n times with different seeds to\n * get a larger hash with better collision resistance.\n */\nfunction hash(str: string, words: number): bigint {\n let hash = 0n;\n for (let i = 0; i < words; i++) {\n hash = (hash << 32n) + BigInt(xxHash32(str, i));\n }\n return hash;\n}\n", "import * as v from '../../shared/src/valita.ts';\n\nexport const primaryKeySchema = v.readonly(\n v.tuple([v.string()]).concat(v.array(v.string())),\n);\n\nexport type PrimaryKey = v.Infer<typeof primaryKeySchema>;\n\nexport const primaryKeyValueSchema = v.union(\n v.string(),\n v.number(),\n v.boolean(),\n);\n\nexport type PrimaryKeyValue = v.Infer<typeof primaryKeyValueSchema>;\n\nexport const primaryKeyValueRecordSchema = v.readonlyRecord(\n primaryKeyValueSchema,\n);\n\nexport type PrimaryKeyValueRecord = v.Infer<typeof primaryKeyValueRecordSchema>;\n", "import {h128} from '../../../shared/src/hash.ts';\nimport * as v from '../../../shared/src/valita.ts';\nimport type {CompoundKey} from '../../../zero-protocol/src/ast.ts';\nimport type {Row} from '../../../zero-protocol/src/data.ts';\nimport {primaryKeyValueSchema} from '../../../zero-protocol/src/primary-key.ts';\nimport type {MutationID} from '../../../zero-protocol/src/push.ts';\n\nexport const DESIRED_QUERIES_KEY_PREFIX = 'd/';\nexport const GOT_QUERIES_KEY_PREFIX = 'g/';\nexport const ENTITIES_KEY_PREFIX = 'e/';\nexport const MUTATIONS_KEY_PREFIX = 'm/';\n\nexport function toDesiredQueriesKey(clientID: string, hash: string): string {\n return DESIRED_QUERIES_KEY_PREFIX + clientID + '/' + hash;\n}\n\nexport function desiredQueriesPrefixForClient(clientID: string): string {\n return DESIRED_QUERIES_KEY_PREFIX + clientID + '/';\n}\n\nexport function toGotQueriesKey(hash: string): string {\n return GOT_QUERIES_KEY_PREFIX + hash;\n}\n\nexport function toMutationResponseKey(mid: MutationID): string {\n return MUTATIONS_KEY_PREFIX + mid.clientID + '/' + mid.id;\n}\n\nexport function toPrimaryKeyString(\n tableName: string,\n primaryKey: CompoundKey,\n value: Row,\n): string {\n if (primaryKey.length === 1) {\n return (\n ENTITIES_KEY_PREFIX +\n tableName +\n '/' +\n v.parse(value[primaryKey[0]], primaryKeyValueSchema)\n );\n }\n\n const values = primaryKey.map(k => v.parse(value[k], primaryKeyValueSchema));\n const str = JSON.stringify(values);\n\n const idSegment = h128(str);\n return ENTITIES_KEY_PREFIX + tableName + '/' + idSegment;\n}\n\nexport function sourceNameFromKey(key: string): string {\n const slash = key.indexOf('/', ENTITIES_KEY_PREFIX.length);\n return key.slice(ENTITIES_KEY_PREFIX.length, slash);\n}\n", "import type {Client} from './client.ts';\nimport type {ExtendedInspectorDelegate} from './lazy-inspector.ts';\nimport type {Query} from './query.ts';\n\nexport class ClientGroup {\n readonly #delegate: ExtendedInspectorDelegate;\n readonly id: Promise<string> | string;\n\n constructor(\n delegate: ExtendedInspectorDelegate,\n clientGroupID: Promise<string> | string,\n ) {\n this.#delegate = delegate;\n this.id = clientGroupID;\n }\n\n async clients(): Promise<Client[]> {\n return (await this.#delegate.lazy).clientGroupClients(\n this.#delegate,\n this.id,\n );\n }\n\n async clientsWithQueries(): Promise<Client[]> {\n return (await this.#delegate.lazy).clientGroupClientsWithQueries(\n this.#delegate,\n this.id,\n );\n }\n\n async queries(): Promise<Query[]> {\n return (await this.#delegate.lazy).clientGroupQueries(this.#delegate);\n }\n}\n", "import type {ReadonlyJSONValue} from '../../../../shared/src/json.ts';\nimport type {Row} from '../../../../zero-protocol/src/data.ts';\nimport {ClientGroup} from './client-group.ts';\nimport type {ExtendedInspectorDelegate} from './lazy-inspector.ts';\nimport type {Query} from './query.ts';\n\nexport class Client {\n readonly #delegate: ExtendedInspectorDelegate;\n readonly id: string;\n readonly clientGroup: ClientGroup;\n\n constructor(\n delegate: ExtendedInspectorDelegate,\n clientID: string,\n clientGroupID: Promise<string> | string,\n ) {\n this.#delegate = delegate;\n this.id = clientID;\n\n this.clientGroup = new ClientGroup(this.#delegate, clientGroupID);\n }\n\n async queries(): Promise<Query[]> {\n return (await this.#delegate.lazy).clientQueries(this.#delegate, this.id);\n }\n\n async map(): Promise<Map<string, ReadonlyJSONValue>> {\n return (await this.#delegate.lazy).clientMap(this.#delegate, this.id);\n }\n\n async rows(tableName: string): Promise<Row[]> {\n return (await this.#delegate.lazy).clientRows(\n this.#delegate,\n this.id,\n tableName,\n );\n }\n}\n", "// Apache License 2.0\n// https://github.com/influxdata/tdigest\n\n// Centroid average position of all points in a shape\nexport class Centroid {\n mean: number;\n weight: number;\n\n constructor(mean: number, weight: number) {\n this.mean = mean;\n this.weight = weight;\n }\n\n add(r: Centroid): void {\n if (r.weight < 0) {\n throw new Error('centroid weight cannot be less than zero');\n }\n if (this.weight !== 0) {\n this.weight += r.weight;\n this.mean += (r.weight * (r.mean - this.mean)) / this.weight;\n } else {\n this.weight = r.weight;\n this.mean = r.mean;\n }\n }\n}\n\n/** CentroidList is sorted by the mean of the centroid, ascending. */\nexport type CentroidList = Centroid[];\n\nexport function sortCentroidList(centroids: CentroidList): void {\n centroids.sort((a, b) => a.mean - b.mean);\n}\n", "// Apache License 2.0\n// https://github.com/influxdata/tdigest\n\nimport {binarySearch} from './binary-search.ts';\nimport {Centroid, sortCentroidList, type CentroidList} from './centroid.ts';\nimport type {TDigestJSON} from './tdigest-schema.ts';\n\nexport interface ReadonlyTDigest {\n readonly count: () => number;\n readonly quantile: (q: number) => number;\n readonly cdf: (x: number) => number;\n}\n\n// TDigest is a data structure for accurate on-line accumulation of\n// rank-based statistics such as quantiles and trimmed means.\nexport class TDigest {\n readonly compression: number;\n\n #maxProcessed: number;\n #maxUnprocessed: number;\n #processed!: CentroidList;\n #unprocessed!: CentroidList;\n #cumulative!: number[];\n #processedWeight!: number;\n #unprocessedWeight!: number;\n #min!: number;\n #max!: number;\n\n constructor(compression: number = 1000) {\n this.compression = compression;\n this.#maxProcessed = processedSize(0, this.compression);\n this.#maxUnprocessed = unprocessedSize(0, this.compression);\n this.reset();\n }\n\n /**\n * fromJSON creates a TDigest from a JSON-serializable representation.\n * The data should be an object with compression and centroids array.\n */\n static fromJSON(data: Readonly<TDigestJSON>): TDigest {\n const digest = new TDigest(data[0]);\n if (data.length % 2 !== 1) {\n throw new Error('Invalid centroids array');\n }\n for (let i = 1; i < data.length; i += 2) {\n digest.add(data[i], data[i + 1]);\n }\n return digest;\n }\n\n reset(): void {\n this.#processed = [];\n this.#unprocessed = [];\n this.#cumulative = [];\n this.#processedWeight = 0;\n this.#unprocessedWeight = 0;\n this.#min = Number.MAX_VALUE;\n this.#max = -Number.MAX_VALUE;\n }\n\n add(mean: number, weight: number = 1) {\n this.addCentroid(new Centroid(mean, weight));\n }\n\n /** AddCentroidList can quickly add multiple centroids. */\n addCentroidList(centroidList: CentroidList) {\n for (const c of centroidList) {\n this.addCentroid(c);\n }\n }\n\n /**\n * AddCentroid adds a single centroid.\n * Weights which are not a number or are <= 0 are ignored, as are NaN means.\n */\n addCentroid(c: Centroid): void {\n if (\n Number.isNaN(c.mean) ||\n c.weight <= 0 ||\n Number.isNaN(c.weight) ||\n !Number.isFinite(c.weight)\n ) {\n return;\n }\n\n this.#unprocessed.push(new Centroid(c.mean, c.weight));\n this.#unprocessedWeight += c.weight;\n\n if (\n this.#processed.length > this.#maxProcessed ||\n this.#unprocessed.length > this.#maxUnprocessed\n ) {\n this.#process();\n }\n }\n\n /**\n * Merges the supplied digest into this digest. Functionally equivalent to\n * calling t.AddCentroidList(t2.Centroids(nil)), but avoids making an extra\n * copy of the CentroidList.\n **/\n merge(t2: TDigest) {\n t2.#process();\n this.addCentroidList(t2.#processed);\n }\n\n #process() {\n if (\n this.#unprocessed.length > 0 ||\n this.#processed.length > this.#maxProcessed\n ) {\n // Append all processed centroids to the unprocessed list and sort\n this.#unprocessed.push(...this.#processed);\n sortCentroidList(this.#unprocessed);\n\n // Reset processed list with first centroid\n this.#processed.length = 0;\n this.#processed.push(this.#unprocessed[0]);\n\n this.#processedWeight += this.#unprocessedWeight;\n this.#unprocessedWeight = 0;\n let soFar = this.#unprocessed[0].weight;\n let limit = this.#processedWeight * this.#integratedQ(1);\n for (let i = 1; i < this.#unprocessed.length; i++) {\n const centroid = this.#unprocessed[i];\n const projected = soFar + centroid.weight;\n if (projected <= limit) {\n soFar = projected;\n this.#processed[this.#processed.length - 1].add(centroid);\n } else {\n const k1 = this.#integratedLocation(soFar / this.#processedWeight);\n limit = this.#processedWeight * this.#integratedQ(k1 + 1);\n soFar += centroid.weight;\n this.#processed.push(centroid);\n }\n }\n this.#min = Math.min(this.#min, this.#processed[0].mean);\n this.#max = Math.max(\n this.#max,\n this.#processed[this.#processed.length - 1].mean,\n );\n this.#unprocessed.length = 0;\n }\n }\n\n /**\n * Centroids returns a copy of processed centroids.\n * Useful when aggregating multiple t-digests.\n *\n * Centroids are appended to the passed CentroidList; if you're re-using a\n * buffer, be sure to pass cl[:0].\n */\n centroids(cl: CentroidList = []): CentroidList {\n this.#process();\n return cl.concat(this.#processed);\n }\n\n count(): number {\n this.#process();\n\n // this.process always updates this.processedWeight to the total count of all\n // centroids, so we don't need to re-count here.\n return this.#processedWeight;\n }\n\n /**\n * toJSON returns a JSON-serializable representation of the digest.\n * This processes the digest and returns an object with compression and centroid data.\n */\n toJSON(): TDigestJSON {\n this.#process();\n const data: TDigestJSON = [this.compression];\n for (const centroid of this.#processed) {\n data.push(centroid.mean, centroid.weight);\n }\n return data;\n }\n\n #updateCumulative() {\n // Weight can only increase, so the final cumulative value will always be\n // either equal to, or less than, the total weight. If they are the same,\n // then nothing has changed since the last update.\n if (\n this.#cumulative.length > 0 &&\n this.#cumulative[this.#cumulative.length - 1] === this.#processedWeight\n ) {\n return;\n }\n const n = this.#processed.length + 1;\n if (this.#cumulative.length > n) {\n this.#cumulative.length = n;\n }\n\n let prev = 0;\n for (let i = 0; i < this.#processed.length; i++) {\n const centroid = this.#processed[i];\n const cur = centroid.weight;\n this.#cumulative[i] = prev + cur / 2;\n prev += cur;\n }\n this.#cumulative[this.#processed.length] = prev;\n }\n\n // Quantile returns the (approximate) quantile of\n // the distribution. Accepted values for q are between 0 and 1.\n // Returns NaN if Count is zero or bad inputs.\n quantile(q: number): number {\n this.#process();\n this.#updateCumulative();\n if (q < 0 || q > 1 || this.#processed.length === 0) {\n return NaN;\n }\n if (this.#processed.length === 1) {\n return this.#processed[0].mean;\n }\n const index = q * this.#processedWeight;\n if (index <= this.#processed[0].weight / 2) {\n return (\n this.#min +\n ((2 * index) / this.#processed[0].weight) *\n (this.#processed[0].mean - this.#min)\n );\n }\n\n const lower = binarySearch(\n this.#cumulative.length,\n (i: number) => -this.#cumulative[i] + index,\n );\n\n if (lower + 1 !== this.#cumulative.length) {\n const z1 = index - this.#cumulative[lower - 1];\n const z2 = this.#cumulative[lower] - index;\n return weightedAverage(\n this.#processed[lower - 1].mean,\n z2,\n this.#processed[lower].mean,\n z1,\n );\n }\n\n const z1 =\n index - this.#processedWeight - this.#processed[lower - 1].weight / 2;\n const z2 = this.#processed[lower - 1].weight / 2 - z1;\n return weightedAverage(\n this.#processed[this.#processed.length - 1].mean,\n z1,\n this.#max,\n z2,\n );\n }\n\n /**\n * CDF returns the cumulative distribution function for a given value x.\n */\n cdf(x: number): number {\n this.#process();\n this.#updateCumulative();\n switch (this.#processed.length) {\n case 0:\n return 0;\n case 1: {\n const width = this.#max - this.#min;\n if (x <= this.#min) {\n return 0;\n }\n if (x >= this.#max) {\n return 1;\n }\n if (x - this.#min <= width) {\n // min and max are too close together to do any viable interpolation\n return 0.5;\n }\n return (x - this.#min) / width;\n }\n }\n\n if (x <= this.#min) {\n return 0;\n }\n if (x >= this.#max) {\n return 1;\n }\n const m0 = this.#processed[0].mean;\n // Left Tail\n if (x <= m0) {\n if (m0 - this.#min > 0) {\n return (\n (((x - this.#min) / (m0 - this.#min)) * this.#processed[0].weight) /\n this.#processedWeight /\n 2\n );\n }\n return 0;\n }\n // Right Tail\n const mn = this.#processed[this.#processed.length - 1].mean;\n if (x >= mn) {\n if (this.#max - mn > 0) {\n return (\n 1 -\n (((this.#max - x) / (this.#max - mn)) *\n this.#processed[this.#processed.length - 1].weight) /\n this.#processedWeight /\n 2\n );\n }\n return 1;\n }\n\n const upper = binarySearch(\n this.#processed.length,\n // Treat equals as greater than, so we can use the upper index\n // This is equivalent to:\n // i => this.#processed[i].mean > x ? -1 : 1,\n i => x - this.#processed[i].mean || 1,\n );\n\n const z1 = x - this.#processed[upper - 1].mean;\n const z2 = this.#processed[upper].mean - x;\n return (\n weightedAverage(\n this.#cumulative[upper - 1],\n z2,\n this.#cumulative[upper],\n z1,\n ) / this.#processedWeight\n );\n }\n\n #integratedQ(k: number): number {\n return (\n (Math.sin(\n (Math.min(k, this.compression) * Math.PI) / this.compression -\n Math.PI / 2,\n ) +\n 1) /\n 2\n );\n }\n\n #integratedLocation(q: number): number {\n return (this.compression * (Math.asin(2 * q - 1) + Math.PI / 2)) / Math.PI;\n }\n}\n\n// Calculate number of bytes needed for a tdigest of size c,\n// where c is the compression value\nexport function byteSizeForCompression(comp: number): number {\n const c = comp | 0;\n // // A centroid is 2 float64s, so we need 16 bytes for each centroid\n // float_size := 8\n // centroid_size := 2 * float_size\n\n // // Unprocessed and processed can grow up to length c\n // unprocessed_size := centroid_size * c\n // processed_size := unprocessed_size\n\n // // the cumulative field can also be of length c, but each item is a single float64\n // cumulative_size := float_size * c // <- this could also be unprocessed_size / 2\n\n // return unprocessed_size + processed_size + cumulative_size\n\n // // or, more succinctly:\n // return float_size * c * 5\n\n // or even more succinctly\n return c * 40;\n}\n\nfunction weightedAverage(\n x1: number,\n w1: number,\n x2: number,\n w2: number,\n): number {\n if (x1 <= x2) {\n return weightedAverageSorted(x1, w1, x2, w2);\n }\n return weightedAverageSorted(x2, w2, x1, w1);\n}\n\nfunction weightedAverageSorted(\n x1: number,\n w1: number,\n x2: number,\n w2: number,\n): number {\n const x = (x1 * w1 + x2 * w2) / (w1 + w2);\n return Math.max(x1, Math.min(x, x2));\n}\n\nfunction processedSize(size: number, compression: number): number {\n if (size === 0) {\n return Math.ceil(compression) * 2;\n }\n return size;\n}\n\nfunction unprocessedSize(size: number, compression: number): number {\n if (size === 0) {\n return Math.ceil(compression) * 8;\n }\n return size;\n}\n", "import type {ClientGroup} from './client-group.ts';\nimport {Client} from './client.ts';\nimport type {\n ExtendedInspectorDelegate,\n InspectorDelegate,\n Metrics,\n Rep,\n} from './lazy-inspector.ts';\n\nexport type {InspectorDelegate};\n\nexport type Lazy = typeof import('./lazy-inspector.ts');\n\nexport class Inspector {\n readonly #delegate: ExtendedInspectorDelegate;\n readonly client: Client;\n readonly clientGroup: ClientGroup;\n\n constructor(\n rep: Rep,\n delegate: InspectorDelegate,\n getSocket: () => Promise<WebSocket>,\n ) {\n this.#delegate = {\n getQueryMetrics: delegate.getQueryMetrics.bind(delegate),\n getAST: delegate.getAST.bind(delegate),\n get metrics() {\n return delegate.metrics;\n },\n rep,\n getSocket,\n lazy: import('./lazy-inspector.ts'),\n };\n\n this.client = new Client(this.#delegate, rep.clientID, rep.clientGroupID);\n this.clientGroup = this.client.clientGroup;\n }\n\n async metrics(): Promise<Metrics> {\n return (await this.#delegate.lazy).inspectorMetrics(this.#delegate);\n }\n\n async clients(): Promise<Client[]> {\n return (await this.#delegate.lazy).inspectorClients(this.#delegate);\n }\n\n async clientsWithQueries(): Promise<Client[]> {\n return (await this.#delegate.lazy).inspectorClientsWithQueries(\n this.#delegate,\n );\n }\n\n async serverVersion(): Promise<string> {\n return (await this.#delegate.lazy).serverVersion(this.#delegate);\n }\n}\n", "import type {ValueType} from '../../zero-protocol/src/client-schema.ts';\nimport type {PrimaryKey} from '../../zero-protocol/src/primary-key.ts';\n\nexport type {ValueType} from '../../zero-protocol/src/client-schema.ts';\n\n/**\n * `related` calls need to know what the available relationships are.\n * The `schema` type encodes this information.\n */\nexport type SchemaValue<T = unknown> =\n | {\n type: ValueType;\n serverName?: string | undefined;\n optional?: boolean | undefined;\n }\n | SchemaValueWithCustomType<T>;\n\nexport type SchemaValueWithCustomType<T> = {\n type: ValueType;\n serverName?: string | undefined;\n optional?: boolean;\n customType: T;\n};\n\nexport type TableSchema = {\n readonly name: string;\n readonly serverName?: string | undefined;\n readonly columns: Record<string, SchemaValue>;\n readonly primaryKey: PrimaryKey;\n};\n\nexport type RelationshipsSchema = {\n readonly [name: string]: Relationship;\n};\n\nexport type TypeNameToTypeMap = {\n string: string;\n number: number;\n boolean: boolean;\n null: null;\n\n // In schema-v2, the user will be able to specify the TS type that\n // the JSON should match and `any`` will no\n // longer be used here.\n // ReadOnlyJSONValue is not used as it causes\n // infinite depth errors to pop up for users of our APIs.\n\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n json: any;\n};\n\nexport type ColumnTypeName<T extends SchemaValue | ValueType> =\n T extends SchemaValue ? T['type'] : T;\n\n/**\n * Given a schema value, return the TypeScript type.\n *\n * This allows us to create the correct return type for a\n * query that has a selection.\n */\nexport type SchemaValueToTSType<T extends SchemaValue | ValueType> =\n T extends ValueType\n ? TypeNameToTypeMap[T]\n : T extends {\n optional: true;\n }\n ?\n | (T extends SchemaValueWithCustomType<infer V>\n ? V\n : TypeNameToTypeMap[ColumnTypeName<T>])\n | null\n : T extends SchemaValueWithCustomType<infer V>\n ? V\n : TypeNameToTypeMap[ColumnTypeName<T>];\n\ntype Connection = {\n readonly sourceField: readonly string[];\n readonly destField: readonly string[];\n readonly destSchema: string;\n readonly cardinality: Cardinality;\n};\n\nexport type Cardinality = 'one' | 'many';\n\nexport type Relationship =\n | readonly [Connection]\n | readonly [Connection, Connection];\n// | readonly [Connection, Connection, Connection];\n\nexport type LastInTuple<T extends Relationship> = T extends readonly [infer L]\n ? L\n : T extends readonly [unknown, infer L]\n ? L\n : T extends readonly [unknown, unknown, infer L]\n ? L\n : never;\n\nexport type AtLeastOne<T> = readonly [T, ...T[]];\n\nexport function atLeastOne<T>(arr: readonly T[]): AtLeastOne<T> {\n if (arr.length === 0) {\n throw new Error('Expected at least one element');\n }\n return arr as AtLeastOne<T>;\n}\n\nexport function isOneHop(r: Relationship): r is readonly [Connection] {\n return r.length === 1;\n}\n\nexport function isTwoHop(\n r: Relationship,\n): r is readonly [Connection, Connection] {\n return r.length === 2;\n}\n\nexport type Opaque<BaseType, BrandType = unknown> = BaseType & {\n readonly [base]: BaseType;\n readonly [brand]: BrandType;\n};\n\ndeclare const base: unique symbol;\ndeclare const brand: unique symbol;\n\nexport type IsOpaque<T> = T extends {\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n readonly [brand]: any;\n}\n ? true\n : false;\n\nexport type ExpandRecursiveSkipOpaque<T> =\n IsOpaque<T> extends true\n ? T\n : T extends object\n ? T extends infer O\n ? {[K in keyof O]: ExpandRecursiveSkipOpaque<O[K]>}\n : never\n : T;\n", "export function emptyFunction() {}\nexport const emptyObject = Object.freeze({});\nexport const emptyArray = Object.freeze([]);\nexport function identity<T>(x: T): T {\n return x;\n}\n", "/* eslint-disable @typescript-eslint/no-explicit-any */\nimport type {Expand, ExpandRecursive} from '../../../shared/src/expand.ts';\nimport type {ReadonlyJSONValue} from '../../../shared/src/json.ts';\nimport {type AST, type SimpleOperator} from '../../../zero-protocol/src/ast.ts';\nimport type {Schema as ZeroSchema} from '../../../zero-schema/src/builder/schema-builder.ts';\nimport type {\n LastInTuple,\n SchemaValueToTSType,\n SchemaValueWithCustomType,\n TableSchema,\n} from '../../../zero-schema/src/table-schema.ts';\nimport type {Format, ViewFactory} from '../ivm/view.ts';\nimport type {ExpressionFactory, ParameterReference} from './expression.ts';\nimport type {CustomQueryID} from './named.ts';\nimport type {QueryDelegate} from './query-delegate.ts';\nimport type {TTL} from './ttl.ts';\nimport type {TypedView} from './typed-view.ts';\n\ntype Selector<E extends TableSchema> = keyof E['columns'];\nexport type NoCompoundTypeSelector<T extends TableSchema> = Exclude<\n Selector<T>,\n JsonSelectors<T> | ArraySelectors<T>\n>;\n\ntype JsonSelectors<E extends TableSchema> = {\n [K in keyof E['columns']]: E['columns'][K] extends {type: 'json'} ? K : never;\n}[keyof E['columns']];\n\ntype ArraySelectors<E extends TableSchema> = {\n [K in keyof E['columns']]: E['columns'][K] extends SchemaValueWithCustomType<\n any[]\n >\n ? K\n : never;\n}[keyof E['columns']];\n\nexport type QueryReturn<Q> = Q extends Query<any, any, infer R> ? R : never;\nexport type QueryTable<Q> = Q extends Query<any, infer T, any> ? T : never;\nexport const delegateSymbol = Symbol('delegate');\n\nexport type ExistsOptions = {flip: boolean};\n\nexport type GetFilterType<\n TSchema extends TableSchema,\n TColumn extends keyof TSchema['columns'],\n TOperator extends SimpleOperator,\n> = TOperator extends 'IS' | 'IS NOT'\n ? // SchemaValueToTSType adds null if the type is optional, but we add null\n // no matter what for dx reasons. See:\n // https://github.com/rocicorp/mono/pull/3576#discussion_r1925792608\n SchemaValueToTSType<TSchema['columns'][TColumn]> | null\n : TOperator extends 'IN' | 'NOT IN'\n ? // We don't want to compare to null in where clauses because it causes\n // confusing results:\n // https://zero.rocicorp.dev/docs/reading-data#comparing-to-null\n readonly Exclude<SchemaValueToTSType<TSchema['columns'][TColumn]>, null>[]\n : Exclude<SchemaValueToTSType<TSchema['columns'][TColumn]>, null>;\n\nexport type AvailableRelationships<\n TTable extends string,\n TSchema extends ZeroSchema,\n> = keyof TSchema['relationships'][TTable] & string;\n\nexport type DestTableName<\n TTable extends string,\n TSchema extends ZeroSchema,\n TRelationship extends string,\n> = LastInTuple<TSchema['relationships'][TTable][TRelationship]>['destSchema'];\n\ntype DestRow<\n TTable extends string,\n TSchema extends ZeroSchema,\n TRelationship extends string,\n> = TSchema['relationships'][TTable][TRelationship][0]['cardinality'] extends 'many'\n ? PullRow<DestTableName<TTable, TSchema, TRelationship>, TSchema>\n : PullRow<DestTableName<TTable, TSchema, TRelationship>, TSchema> | undefined;\n\ntype AddSubreturn<TExistingReturn, TSubselectReturn, TAs extends string> = {\n readonly [K in TAs]: undefined extends TSubselectReturn\n ? TSubselectReturn\n : readonly TSubselectReturn[];\n} extends infer TNewRelationship\n ? undefined extends TExistingReturn\n ? (Exclude<TExistingReturn, undefined> & TNewRelationship) | undefined\n : TExistingReturn & TNewRelationship\n : never;\n\nexport type PullTableSchema<\n TTable extends string,\n TSchemas extends ZeroSchema,\n> = TSchemas['tables'][TTable];\n\nexport type PullRow<TTable extends string, TSchema extends ZeroSchema> = {\n readonly [K in keyof PullTableSchema<\n TTable,\n TSchema\n >['columns']]: SchemaValueToTSType<\n PullTableSchema<TTable, TSchema>['columns'][K]\n >;\n} & {};\n\nexport type Row<T extends TableSchema | Query<ZeroSchema, string, any>> =\n T extends TableSchema\n ? {\n readonly [K in keyof T['columns']]: SchemaValueToTSType<\n T['columns'][K]\n >;\n }\n : T extends Query<ZeroSchema, string, infer TReturn>\n ? TReturn\n : never;\n\n/**\n * A hybrid query that runs on both client and server.\n * Results are returned immediately from the client followed by authoritative\n * results from the server.\n *\n * Queries are transactional in that all queries update at once when a new transaction\n * has been committed on the client or server. No query results will reflect stale state.\n *\n * A query can be:\n * - {@linkcode materialize | materialize}\n * - awaited (`then`/{@linkcode run})\n * - {@linkcode preload | preloaded}\n *\n * The normal way to use a query would be through your UI framework's bindings (e.g., useQuery(q))\n * or within a custom mutator.\n *\n * `materialize` and `run/then` are provided for more advanced use cases.\n * Remember that any `view` returned by `materialize` must be destroyed.\n *\n * A query can be run as a 1-shot query by awaiting it. E.g.,\n *\n * ```ts\n * const result = await z.query.issue.limit(10);\n * ```\n *\n * For more information on how to use queries, see the documentation:\n * https://zero.rocicorp.dev/docs/reading-data\n *\n * @typeParam TSchema The database schema type extending ZeroSchema\n * @typeParam TTable The name of the table being queried, must be a key of TSchema['tables']\n * @typeParam TReturn The return type of the query, defaults to PullRow<TTable, TSchema>\n */\nexport interface Query<\n TSchema extends ZeroSchema,\n TTable extends keyof TSchema['tables'] & string,\n TReturn = PullRow<TTable, TSchema>,\n> {\n /**\n * Format is used to specify the shape of the query results. This is used by\n * {@linkcode one} and it also describes the shape when using\n * {@linkcode related}.\n */\n readonly format: Format;\n\n /**\n * A string that uniquely identifies this query. This can be used to determine\n * if two queries are the same.\n *\n * The hash of a custom query, on the client, is the hash of its AST.\n * The hash of a custom query, on the server, is the hash of its name and args.\n *\n * The first allows many client-side queries to be pinned to the same backend query.\n * The second ensures we do not invoke a named query on the backend more than once for the same `name:arg` pairing.\n *\n * If the query.hash was of `name:args` then `useQuery` would de-dupe\n * queries with divergent ASTs.\n *\n * QueryManager will hash based on `name:args` since it is speaking with\n * the server which tracks queries by `name:args`.\n */\n hash(): string;\n readonly ast: AST;\n readonly customQueryID: CustomQueryID | undefined;\n\n nameAndArgs(\n name: string,\n args: ReadonlyArray<ReadonlyJSONValue>,\n ): Query<TSchema, TTable, TReturn>;\n [delegateSymbol](delegate: QueryDelegate): Query<TSchema, TTable, TReturn>;\n\n /**\n * Related is used to add a related query to the current query. This is used\n * for subqueries and joins. These relationships are defined in the\n * relationships section of the schema. The result of the query will\n * include the related rows in the result set as a sub object of the row.\n *\n * ```typescript\n * const row = await z.query.users\n * .related('posts');\n * // {\n * // id: '1',\n * // posts: [\n * // ...\n * // ]\n * // }\n * ```\n * If you want to add a subquery to the related query, you can do so by\n * providing a callback function that receives the related query as an argument.\n *\n * ```typescript\n * const row = await z.query.users\n * .related('posts', q => q.where('published', true));\n * // {\n * // id: '1',\n * // posts: [\n * // {published: true, ...},\n * // ...\n * // ]\n * // }\n * ```\n *\n * @param relationship The name of the relationship\n */\n related<TRelationship extends AvailableRelationships<TTable, TSchema>>(\n relationship: TRelationship,\n ): Query<\n TSchema,\n TTable,\n AddSubreturn<\n TReturn,\n DestRow<TTable, TSchema, TRelationship>,\n TRelationship\n >\n >;\n related<\n TRelationship extends AvailableRelationships<TTable, TSchema>,\n TSub extends Query<TSchema, string, any>,\n >(\n relationship: TRelationship,\n cb: (\n q: Query<\n TSchema,\n DestTableName<TTable, TSchema, TRelationship>,\n DestRow<TTable, TSchema, TRelationship>\n >,\n ) => TSub,\n ): Query<\n TSchema,\n TTable,\n AddSubreturn<\n TReturn,\n TSub extends Query<TSchema, string, infer TSubReturn>\n ? TSubReturn\n : never,\n TRelationship\n >\n >;\n\n /**\n * Represents a condition to filter the query results.\n *\n * @param field The column name to filter on.\n * @param op The operator to use for filtering.\n * @param value The value to compare against.\n *\n * @returns A new query instance with the applied filter.\n *\n * @example\n *\n * ```typescript\n * const query = db.query('users')\n * .where('age', '>', 18)\n * .where('name', 'LIKE', '%John%');\n * ```\n */\n where<\n TSelector extends NoCompoundTypeSelector<PullTableSchema<TTable, TSchema>>,\n TOperator extends SimpleOperator,\n >(\n field: TSelector,\n op: TOperator,\n value:\n | GetFilterType<PullTableSchema<TTable, TSchema>, TSelector, TOperator>\n | ParameterReference,\n ): Query<TSchema, TTable, TReturn>;\n /**\n * Represents a condition to filter the query results.\n *\n * This overload is used when the operator is '='.\n *\n * @param field The column name to filter on.\n * @param value The value to compare against.\n *\n * @returns A new query instance with the applied filter.\n *\n * @example\n * ```typescript\n * const query = db.query('users')\n * .where('age', 18)\n * ```\n */\n where<\n TSelector extends NoCompoundTypeSelector<PullTableSchema<TTable, TSchema>>,\n >(\n field: TSelector,\n value:\n | GetFilterType<PullTableSchema<TTable, TSchema>, TSelector, '='>\n | ParameterReference,\n ): Query<TSchema, TTable, TReturn>;\n\n /**\n * Represents a condition to filter the query results.\n *\n * @param expressionFactory A function that takes a query builder and returns an expression.\n *\n * @returns A new query instance with the applied filter.\n *\n * @example\n * ```typescript\n * const query = db.query('users')\n * .where(({cmp, or}) => or(cmp('age', '>', 18), cmp('name', 'LIKE', '%John%')));\n * ```\n */\n where(\n expressionFactory: ExpressionFactory<TSchema, TTable>,\n ): Query<TSchema, TTable, TReturn>;\n\n whereExists(\n relationship: AvailableRelationships<TTable, TSchema>,\n options?: ExistsOptions | undefined,\n ): Query<TSchema, TTable, TReturn>;\n whereExists<TRelationship extends AvailableRelationships<TTable, TSchema>>(\n relationship: TRelationship,\n cb: (\n q: Query<TSchema, DestTableName<TTable, TSchema, TRelationship>>,\n ) => Query<TSchema, string>,\n options?: ExistsOptions | undefined,\n ): Query<TSchema, TTable, TReturn>;\n\n /**\n * Skips the rows of the query until row matches the given row. If opts is\n * provided, it determines whether the match is inclusive.\n *\n * @param row The row to start from. This is a partial row object and only the provided\n * fields will be used for the comparison.\n * @param opts Optional options object that specifies whether the match is inclusive.\n * If `inclusive` is true, the row will be included in the result.\n * If `inclusive` is false, the row will be excluded from the result and the result\n * will start from the next row.\n *\n * @returns A new query instance with the applied start condition.\n */\n start(\n row: Partial<PullRow<TTable, TSchema>>,\n opts?: {inclusive: boolean} | undefined,\n ): Query<TSchema, TTable, TReturn>;\n\n /**\n * Limits the number of rows returned by the query.\n * @param limit The maximum number of rows to return.\n *\n * @returns A new query instance with the applied limit.\n */\n limit(limit: number): Query<TSchema, TTable, TReturn>;\n\n /**\n * Orders the results by a specified column. If multiple orderings are\n * specified, the results will be ordered by the first column, then the\n * second column, and so on.\n *\n * @param field The column name to order by.\n * @param direction The direction to order the results (ascending or descending).\n *\n * @returns A new query instance with the applied order.\n */\n orderBy<TSelector extends Selector<PullTableSchema<TTable, TSchema>>>(\n field: TSelector,\n direction: 'asc' | 'desc',\n ): Query<TSchema, TTable, TReturn>;\n\n /**\n * Limits the number of rows returned by the query to a single row and then\n * unpacks the result so that you do not get an array of rows but a single\n * row. This is useful when you expect only one row to be returned and want to\n * work with the row directly.\n *\n * If the query returns no rows, the result will be `undefined`.\n *\n * @returns A new query instance with the applied limit to one row.\n */\n one(): Query<TSchema, TTable, TReturn | undefined>;\n\n /**\n * Creates a materialized view of the query. This is a view that will be kept\n * in memory and updated as the query results change.\n *\n * Most of the time you will want to use the `useQuery` hook or the\n * `run`/`then` method to get the results of a query. This method is only\n * needed if you want to access to lower level APIs of the view.\n *\n * @param ttl Time To Live. This is the amount of time to keep the rows\n * associated with this query after `TypedView.destroy`\n * has been called.\n */\n materialize(ttl?: TTL): TypedView<HumanReadable<TReturn>>;\n /**\n * Creates a custom materialized view using a provided factory function. This\n * allows framework-specific bindings (like SolidJS, Vue, etc.) to create\n * optimized views.\n *\n * @param factory A function that creates a custom view implementation\n * @param ttl Optional Time To Live for the view's data after destruction\n * @returns A custom view instance of type {@linkcode T}\n *\n * @example\n * ```ts\n * const view = query.materialize(createSolidViewFactory, '1m');\n * ```\n */\n materialize<T>(\n factory: ViewFactory<TSchema, TTable, TReturn, T>,\n ttl?: TTL,\n ): T;\n\n /**\n * Executes the query and returns the result once. The `options` parameter\n * specifies whether to wait for complete results or return immediately,\n * and the time to live for the query.\n *\n * - `{type: 'unknown'}`: Returns a snapshot of the data immediately.\n * - `{type: 'complete'}`: Waits for the latest, complete results from the server.\n *\n * By default, `run` uses `{type: 'unknown'}` to avoid waiting for the server.\n *\n * `Query` implements `PromiseLike`, and calling `then` on it will invoke `run`\n * with the default behavior (`unknown`).\n *\n * @param options Options to control the result type.\n * @param options.type The type of result to return.\n * @param options.ttl Time To Live. This is the amount of time to keep the rows\n * associated with this query after the returned promise has\n * resolved.\n * @returns A promise resolving to the query result.\n *\n * @example\n * ```js\n * const result = await query.run({type: 'complete', ttl: '1m'});\n * ```\n */\n run(options?: RunOptions): Promise<HumanReadable<TReturn>>;\n\n /**\n * Preload loads the data into the clients cache without keeping it in memory.\n * This is useful for preloading data that will be used later.\n *\n * @param options Options for preloading the query.\n * @param options.ttl Time To Live. This is the amount of time to keep the rows\n * associated with this query after {@linkcode cleanup} has\n * been called.\n */\n preload(options?: PreloadOptions): {\n cleanup: () => void;\n complete: Promise<void>;\n };\n}\n\nexport type PreloadOptions = {\n /**\n * Time To Live. This is the amount of time to keep the rows associated with\n * this query after {@linkcode cleanup} has been called.\n */\n ttl?: TTL | undefined;\n};\n\nexport type MaterializeOptions = PreloadOptions;\n\n/**\n * A helper type that tries to make the type more readable.\n */\nexport type HumanReadable<T> = undefined extends T ? Expand<T> : Expand<T>[];\n\n/**\n * A helper type that tries to make the type more readable.\n */\n// Note: opaque types expand incorrectly.\nexport type HumanReadableRecursive<T> = undefined extends T\n ? ExpandRecursive<T>\n : ExpandRecursive<T>[];\n\n/**\n * The kind of results we want to wait for when using {@linkcode run} on {@linkcode Query}.\n *\n * `unknown` means we don't want to wait for the server to return results. The result is a\n * snapshot of the data at the time the query was run.\n *\n * `complete` means we want to ensure that we have the latest result from the server. The\n * result is a complete and up-to-date view of the data. In some cases this means that we\n * have to wait for the server to return results. To ensure that we have the result for\n * this query you can preload it before calling run. See {@link preload}.\n *\n * By default, `run` uses `{type: 'unknown'}` to avoid waiting for the server.\n *\n * The `ttl` option is used to specify the time to live for the query. This is the amount of\n * time to keep the rows associated with this query after the promise has resolved.\n */\nexport type RunOptions = {\n type: 'unknown' | 'complete';\n ttl?: TTL;\n};\n\nexport const DEFAULT_RUN_OPTIONS_UNKNOWN = {\n type: 'unknown',\n} as const;\n\nexport const DEFAULT_RUN_OPTIONS_COMPLETE = {\n type: 'complete',\n} as const;\n", "import type {LogContext} from '@rocicorp/logger';\n\nexport type TimeUnit = 's' | 'm' | 'h' | 'd' | 'y';\n\n/**\n * Time To Live. This is used for query expiration.\n * - `forever` means the query will never expire.\n * - `none` means the query will expire immediately.\n * - A number means the query will expire after that many milliseconds.\n * - A negative number means the query will never expire, this is same as 'forever'.\n * - A string like `1s` means the query will expire after that many seconds.\n * - A string like `1m` means the query will expire after that many minutes.\n * - A string like `1h` means the query will expire after that many hours.\n * - A string like `1d` means the query will expire after that many days.\n * - A string like `1y` means the query will expire after that many years.\n */\nexport type TTL = `${number}${TimeUnit}` | 'forever' | 'none' | number;\n\nexport const DEFAULT_TTL: TTL = '5m';\nexport const DEFAULT_TTL_MS = 1_000 * 60 * 5;\n\nexport const DEFAULT_PRELOAD_TTL: TTL = 'none';\nexport const DEFAULT_PRELOAD_TTL_MS = 0;\n\nexport const MAX_TTL: TTL = '10m';\nexport const MAX_TTL_MS = 1_000 * 60 * 10;\n\nconst multiplier = {\n s: 1000,\n m: 60 * 1000,\n h: 60 * 60 * 1000,\n d: 24 * 60 * 60 * 1000,\n y: 365 * 24 * 60 * 60 * 1000,\n} as const;\n\nexport function parseTTL(ttl: TTL): number {\n if (typeof ttl === 'number') {\n return Number.isNaN(ttl) ? 0 : !Number.isFinite(ttl) || ttl < 0 ? -1 : ttl;\n }\n if (ttl === 'none') {\n return 0;\n }\n if (ttl === 'forever') {\n return -1;\n }\n const multi = multiplier[ttl[ttl.length - 1] as TimeUnit];\n return Number(ttl.slice(0, -1)) * multi;\n}\n\nexport function compareTTL(a: TTL, b: TTL): number {\n const ap = parseTTL(a);\n const bp = parseTTL(b);\n if (ap === -1 && bp !== -1) {\n return 1;\n }\n if (ap !== -1 && bp === -1) {\n return -1;\n }\n return ap - bp;\n}\n\nexport function normalizeTTL(ttl: TTL): TTL {\n if (typeof ttl === 'string') {\n return ttl;\n }\n\n if (ttl < 0) {\n return 'forever';\n }\n\n if (ttl === 0) {\n return 'none';\n }\n\n let shortest = ttl.toString();\n const lengthOfNumber = shortest.length;\n for (const unit of ['y', 'd', 'h', 'm', 's'] as const) {\n const multi = multiplier[unit];\n const value = ttl / multi;\n const candidate = `${value}${unit}`;\n if (candidate.length < shortest.length) {\n shortest = candidate;\n }\n }\n\n return (shortest.length < lengthOfNumber ? shortest : ttl) as TTL;\n}\n\nexport function clampTTL(ttl: TTL, lc?: Pick<LogContext, 'warn'>): number {\n const parsedTTL = parseTTL(ttl);\n if (parsedTTL === -1 || parsedTTL > 10 * 60 * 1000) {\n // 10 minutes in milliseconds\n lc?.warn?.(`TTL (${ttl}) is too high, clamping to ${MAX_TTL}`);\n return parseTTL(MAX_TTL);\n }\n return parsedTTL;\n}\n", "/* eslint-disable @typescript-eslint/naming-convention */\n/* eslint-disable @typescript-eslint/no-explicit-any */\nimport {resolver} from '@rocicorp/resolver';\nimport {assert} from '../../../shared/src/asserts.ts';\nimport type {ReadonlyJSONValue} from '../../../shared/src/json.ts';\nimport {must} from '../../../shared/src/must.ts';\nimport type {Writable} from '../../../shared/src/writable.ts';\nimport type {\n AST,\n CompoundKey,\n Condition,\n Ordering,\n Parameter,\n SimpleOperator,\n System,\n} from '../../../zero-protocol/src/ast.ts';\nimport type {Row as IVMRow} from '../../../zero-protocol/src/data.ts';\nimport {\n hashOfAST,\n hashOfNameAndArgs,\n} from '../../../zero-protocol/src/query-hash.ts';\nimport type {Schema} from '../../../zero-schema/src/builder/schema-builder.ts';\nimport {\n isOneHop,\n isTwoHop,\n type TableSchema,\n} from '../../../zero-schema/src/table-schema.ts';\nimport {buildPipeline} from '../builder/builder.ts';\nimport {NotImplementedError} from '../error.ts';\nimport {ArrayView} from '../ivm/array-view.ts';\nimport type {Input} from '../ivm/operator.ts';\nimport type {Format, ViewFactory} from '../ivm/view.ts';\nimport {assertNoNotExists} from './assert-no-not-exists.ts';\nimport {\n and,\n cmp,\n ExpressionBuilder,\n simplifyCondition,\n type ExpressionFactory,\n} from './expression.ts';\nimport type {CustomQueryID} from './named.ts';\nimport type {GotCallback, QueryDelegate} from './query-delegate.ts';\nimport {\n delegateSymbol,\n type ExistsOptions,\n type GetFilterType,\n type HumanReadable,\n type MaterializeOptions,\n type PreloadOptions,\n type PullRow,\n type Query,\n type QueryReturn,\n type QueryTable,\n type RunOptions,\n} from './query.ts';\nimport {DEFAULT_PRELOAD_TTL_MS, DEFAULT_TTL_MS, type TTL} from './ttl.ts';\nimport type {TypedView} from './typed-view.ts';\nimport type {ErroredQuery} from '../../../zero-protocol/src/custom-queries.ts';\n\nexport type AnyQuery = Query<Schema, string, any>;\n\nexport function materialize<S extends Schema, T, Q>(\n query: Q,\n delegate: QueryDelegate,\n factoryOrOptions?:\n | ViewFactory<S, QueryTable<Q>, QueryReturn<Q>, T>\n | MaterializeOptions\n | undefined,\n maybeOptions?: MaterializeOptions | undefined,\n) {\n if (typeof factoryOrOptions === 'function') {\n return (\n (query as AnyQuery)\n // eslint-disable-next-line no-unexpected-multiline\n [delegateSymbol](delegate)\n .materialize(factoryOrOptions, maybeOptions?.ttl)\n );\n }\n return (\n (query as AnyQuery)\n // eslint-disable-next-line no-unexpected-multiline\n [delegateSymbol](delegate)\n .materialize(factoryOrOptions?.ttl)\n );\n}\n\nconst astSymbol = Symbol();\n\nexport function ast(query: AnyQuery): AST {\n return (query as AbstractQuery<Schema, string>)[astSymbol];\n}\n\nexport function newQuery<\n TSchema extends Schema,\n TTable extends keyof TSchema['tables'] & string,\n>(\n delegate: QueryDelegate | undefined,\n schema: TSchema,\n table: TTable,\n): Query<TSchema, TTable> {\n return new QueryImpl(\n delegate,\n schema,\n table,\n {table},\n defaultFormat,\n undefined,\n );\n}\n\nexport function staticParam(\n anchorClass: 'authData' | 'preMutationRow',\n field: string | string[],\n): Parameter {\n return {\n type: 'static',\n anchor: anchorClass,\n // for backwards compatibility\n field: field.length === 1 ? field[0] : field,\n };\n}\n\nexport const SUBQ_PREFIX = 'zsubq_';\n\nexport const defaultFormat = {singular: false, relationships: {}} as const;\n\nexport const newQuerySymbol = Symbol();\n\nexport abstract class AbstractQuery<\n TSchema extends Schema,\n TTable extends keyof TSchema['tables'] & string,\n TReturn = PullRow<TTable, TSchema>,\n> implements Query<TSchema, TTable, TReturn>\n{\n readonly #schema: TSchema;\n protected readonly _delegate: QueryDelegate | undefined;\n readonly #tableName: TTable;\n readonly _ast: AST;\n readonly format: Format;\n #hash: string = '';\n readonly #system: System;\n readonly #currentJunction: string | undefined;\n readonly customQueryID: CustomQueryID | undefined;\n\n constructor(\n delegate: QueryDelegate | undefined,\n schema: TSchema,\n tableName: TTable,\n ast: AST,\n format: Format,\n system: System,\n customQueryID: CustomQueryID | undefined,\n currentJunction?: string | undefined,\n ) {\n this.#schema = schema;\n this._delegate = delegate;\n this.#tableName = tableName;\n this._ast = ast;\n this.format = format;\n this.#system = system;\n this.#currentJunction = currentJunction;\n this.customQueryID = customQueryID;\n }\n\n [delegateSymbol](delegate: QueryDelegate): Query<TSchema, TTable, TReturn> {\n return this[newQuerySymbol](\n delegate,\n this.#schema,\n this.#tableName,\n this._ast,\n this.format,\n this.customQueryID,\n this.#currentJunction,\n );\n }\n\n nameAndArgs(\n name: string,\n args: ReadonlyArray<ReadonlyJSONValue>,\n ): Query<TSchema, TTable, TReturn> {\n return this[newQuerySymbol](\n this._delegate,\n this.#schema,\n this.#tableName,\n this._ast,\n this.format,\n {\n name,\n args: args as ReadonlyArray<ReadonlyJSONValue>,\n },\n this.#currentJunction,\n );\n }\n\n get [astSymbol](): AST {\n return this._ast;\n }\n\n get ast() {\n return this._completeAst();\n }\n\n hash(): string {\n if (!this.#hash) {\n this.#hash = hashOfAST(this._completeAst());\n }\n return this.#hash;\n }\n\n // TODO(arv): Put this in the delegate?\n protected abstract [newQuerySymbol]<\n TSchema extends Schema,\n TTable extends keyof TSchema['tables'] & string,\n TReturn,\n >(\n delegate: QueryDelegate | undefined,\n schema: TSchema,\n table: TTable,\n ast: AST,\n format: Format,\n customQueryID: CustomQueryID | undefined,\n currentJunction: string | undefined,\n ): AbstractQuery<TSchema, TTable, TReturn>;\n\n one = (): Query<TSchema, TTable, TReturn | undefined> =>\n this[newQuerySymbol](\n this._delegate,\n this.#schema,\n this.#tableName,\n {\n ...this._ast,\n limit: 1,\n },\n {\n ...this.format,\n singular: true,\n },\n this.customQueryID,\n this.#currentJunction,\n );\n\n whereExists = (\n relationship: string,\n cbOrOptions?: ((q: AnyQuery) => AnyQuery) | ExistsOptions | undefined,\n options?: ExistsOptions | undefined,\n ): Query<TSchema, TTable, TReturn> => {\n const cb = typeof cbOrOptions === 'function' ? cbOrOptions : undefined;\n const opts = typeof cbOrOptions === 'function' ? options : cbOrOptions;\n const flipped = opts?.flip ?? false;\n return this.where(({exists}) => exists(relationship, cb, {flip: flipped}));\n };\n\n related = (\n relationship: string,\n cb?: (q: AnyQuery) => AnyQuery,\n ): AnyQuery => {\n if (relationship.startsWith(SUBQ_PREFIX)) {\n throw new Error(\n `Relationship names may not start with \"${SUBQ_PREFIX}\". That is a reserved prefix.`,\n );\n }\n cb = cb ?? (q => q);\n\n const related = this.#schema.relationships[this.#tableName][relationship];\n assert(related, 'Invalid relationship');\n if (isOneHop(related)) {\n const {destSchema, destField, sourceField, cardinality} = related[0];\n const q: AnyQuery = this[newQuerySymbol](\n this._delegate,\n this.#schema,\n destSchema,\n {\n table: destSchema,\n alias: relationship,\n },\n {\n relationships: {},\n singular: cardinality === 'one',\n },\n this.customQueryID,\n undefined,\n ) as AnyQuery;\n // Intentionally not setting to `one` as it is a perf degradation\n // and the user should not be making the mistake of setting cardinality to\n // `one` when it is actually not.\n // if (cardinality === 'one') {\n // q = q.one();\n // }\n const sq = cb(q) as AbstractQuery<Schema, string>;\n assert(\n isCompoundKey(sourceField),\n 'The source of a relationship must specify at last 1 field',\n );\n assert(\n isCompoundKey(destField),\n 'The destination of a relationship must specify at last 1 field',\n );\n assert(\n sourceField.length === destField.length,\n 'The source and destination of a relationship must have the same number of fields',\n );\n\n return this[newQuerySymbol](\n this._delegate,\n this.#schema,\n this.#tableName,\n {\n ...this._ast,\n related: [\n ...(this._ast.related ?? []),\n {\n system: this.#system,\n correlation: {\n parentField: sourceField,\n childField: destField,\n },\n subquery: addPrimaryKeysToAst(\n this.#schema.tables[destSchema],\n sq._ast,\n ),\n },\n ],\n },\n {\n ...this.format,\n relationships: {\n ...this.format.relationships,\n [relationship]: sq.format,\n },\n },\n this.customQueryID,\n this.#currentJunction,\n );\n }\n\n if (isTwoHop(related)) {\n const [firstRelation, secondRelation] = related;\n const {destSchema} = secondRelation;\n const junctionSchema = firstRelation.destSchema;\n const sq = cb(\n this[newQuerySymbol](\n this._delegate,\n this.#schema,\n destSchema,\n {\n table: destSchema,\n alias: relationship,\n },\n {\n relationships: {},\n singular: secondRelation.cardinality === 'one',\n },\n this.customQueryID,\n relationship,\n ) as unknown as QueryImpl<Schema, string>,\n ) as unknown as QueryImpl<Schema, string>;\n\n assert(isCompoundKey(firstRelation.sourceField), 'Invalid relationship');\n assert(isCompoundKey(firstRelation.destField), 'Invalid relationship');\n assert(isCompoundKey(secondRelation.sourceField), 'Invalid relationship');\n assert(isCompoundKey(secondRelation.destField), 'Invalid relationship');\n\n return this[newQuerySymbol](\n this._delegate,\n this.#schema,\n this.#tableName,\n {\n ...this._ast,\n related: [\n ...(this._ast.related ?? []),\n {\n system: this.#system,\n correlation: {\n parentField: firstRelation.sourceField,\n childField: firstRelation.destField,\n },\n hidden: true,\n subquery: {\n table: junctionSchema,\n alias: relationship,\n orderBy: addPrimaryKeys(\n this.#schema.tables[junctionSchema],\n undefined,\n ),\n related: [\n {\n system: this.#system,\n correlation: {\n parentField: secondRelation.sourceField,\n childField: secondRelation.destField,\n },\n subquery: addPrimaryKeysToAst(\n this.#schema.tables[destSchema],\n sq._ast,\n ),\n },\n ],\n },\n },\n ],\n },\n {\n ...this.format,\n relationships: {\n ...this.format.relationships,\n [relationship]: sq.format,\n },\n },\n this.customQueryID,\n this.#currentJunction,\n );\n }\n\n throw new Error(`Invalid relationship ${relationship}`);\n };\n\n where = (\n fieldOrExpressionFactory: string | ExpressionFactory<TSchema, TTable>,\n opOrValue?: SimpleOperator | GetFilterType<any, any, any> | Parameter,\n value?: GetFilterType<any, any, any> | Parameter,\n ): Query<TSchema, TTable, TReturn> => {\n let cond: Condition;\n\n if (typeof fieldOrExpressionFactory === 'function') {\n cond = fieldOrExpressionFactory(\n new ExpressionBuilder(this._exists) as ExpressionBuilder<\n TSchema,\n TTable\n >,\n );\n } else {\n assert(opOrValue !== undefined, 'Invalid condition');\n cond = cmp(fieldOrExpressionFactory, opOrValue, value);\n }\n\n const existingWhere = this._ast.where;\n if (existingWhere) {\n cond = and(existingWhere, cond);\n }\n\n const where = simplifyCondition(cond);\n\n if (this.#system === 'client') {\n // We need to do this after the DNF since the DNF conversion might change\n // an EXISTS to a NOT EXISTS condition (and vice versa).\n assertNoNotExists(where);\n }\n\n return this[newQuerySymbol](\n this._delegate,\n this.#schema,\n this.#tableName,\n {\n ...this._ast,\n where,\n },\n this.format,\n this.customQueryID,\n this.#currentJunction,\n );\n };\n\n start = (\n row: Partial<PullRow<TTable, TSchema>>,\n opts?: {inclusive: boolean} | undefined,\n ): Query<TSchema, TTable, TReturn> =>\n this[newQuerySymbol](\n this._delegate,\n this.#schema,\n this.#tableName,\n {\n ...this._ast,\n start: {\n row,\n exclusive: !opts?.inclusive,\n },\n },\n this.format,\n this.customQueryID,\n this.#currentJunction,\n );\n\n limit = (limit: number): Query<TSchema, TTable, TReturn> => {\n if (limit < 0) {\n throw new Error('Limit must be non-negative');\n }\n if ((limit | 0) !== limit) {\n throw new Error('Limit must be an integer');\n }\n if (this.#currentJunction) {\n throw new NotImplementedError(\n 'Limit is not supported in junction relationships yet. Junction relationship being limited: ' +\n this.#currentJunction,\n );\n }\n\n return this[newQuerySymbol](\n this._delegate,\n this.#schema,\n this.#tableName,\n {\n ...this._ast,\n limit,\n },\n this.format,\n this.customQueryID,\n this.#currentJunction,\n );\n };\n\n orderBy = <TSelector extends keyof TSchema['tables'][TTable]['columns']>(\n field: TSelector,\n direction: 'asc' | 'desc',\n ): Query<TSchema, TTable, TReturn> => {\n if (this.#currentJunction) {\n throw new NotImplementedError(\n 'Order by is not supported in junction relationships yet. Junction relationship being ordered: ' +\n this.#currentJunction,\n );\n }\n return this[newQuerySymbol](\n this._delegate,\n this.#schema,\n this.#tableName,\n {\n ...this._ast,\n orderBy: [...(this._ast.orderBy ?? []), [field as string, direction]],\n },\n this.format,\n this.customQueryID,\n this.#currentJunction,\n );\n };\n\n protected _exists = (\n relationship: string,\n cb: ((query: AnyQuery) => AnyQuery) | undefined,\n options?: ExistsOptions | undefined,\n ): Condition => {\n cb = cb ?? (q => q);\n const flip = options?.flip ?? false;\n const related = this.#schema.relationships[this.#tableName][relationship];\n assert(related, 'Invalid relationship');\n\n if (isOneHop(related)) {\n const {destSchema, sourceField, destField} = related[0];\n assert(isCompoundKey(sourceField), 'Invalid relationship');\n assert(isCompoundKey(destField), 'Invalid relationship');\n\n const sq = cb(\n this[newQuerySymbol](\n this._delegate,\n this.#schema,\n destSchema,\n {\n table: destSchema,\n alias: `${SUBQ_PREFIX}${relationship}`,\n },\n defaultFormat,\n this.customQueryID,\n undefined,\n ) as AnyQuery,\n ) as unknown as QueryImpl<any, any>;\n return {\n type: 'correlatedSubquery',\n related: {\n system: this.#system,\n correlation: {\n parentField: sourceField,\n childField: destField,\n },\n subquery: addPrimaryKeysToAst(\n this.#schema.tables[destSchema],\n sq._ast,\n ),\n },\n op: 'EXISTS',\n flip,\n };\n }\n\n if (isTwoHop(related)) {\n const [firstRelation, secondRelation] = related;\n assert(isCompoundKey(firstRelation.sourceField), 'Invalid relationship');\n assert(isCompoundKey(firstRelation.destField), 'Invalid relationship');\n assert(isCompoundKey(secondRelation.sourceField), 'Invalid relationship');\n assert(isCompoundKey(secondRelation.destField), 'Invalid relationship');\n const {destSchema} = secondRelation;\n const junctionSchema = firstRelation.destSchema;\n const queryToDest = cb(\n this[newQuerySymbol](\n this._delegate,\n this.#schema,\n destSchema,\n {\n table: destSchema,\n alias: `${SUBQ_PREFIX}zhidden_${relationship}`,\n },\n defaultFormat,\n this.customQueryID,\n relationship,\n ) as AnyQuery,\n );\n\n return {\n type: 'correlatedSubquery',\n related: {\n system: this.#system,\n correlation: {\n parentField: firstRelation.sourceField,\n childField: firstRelation.destField,\n },\n subquery: {\n table: junctionSchema,\n alias: `${SUBQ_PREFIX}${relationship}`,\n orderBy: addPrimaryKeys(\n this.#schema.tables[junctionSchema],\n undefined,\n ),\n where: {\n type: 'correlatedSubquery',\n related: {\n system: this.#system,\n correlation: {\n parentField: secondRelation.sourceField,\n childField: secondRelation.destField,\n },\n\n subquery: addPrimaryKeysToAst(\n this.#schema.tables[destSchema],\n (queryToDest as QueryImpl<any, any>)._ast,\n ),\n },\n op: 'EXISTS',\n flip,\n },\n },\n },\n op: 'EXISTS',\n flip,\n };\n }\n\n throw new Error(`Invalid relationship ${relationship}`);\n };\n\n #completedAST: AST | undefined;\n\n protected _completeAst(): AST {\n if (!this.#completedAST) {\n const finalOrderBy = addPrimaryKeys(\n this.#schema.tables[this.#tableName],\n this._ast.orderBy,\n );\n if (this._ast.start) {\n const {row} = this._ast.start;\n const narrowedRow: Writable<IVMRow> = {};\n for (const [field] of finalOrderBy) {\n narrowedRow[field] = row[field];\n }\n this.#completedAST = {\n ...this._ast,\n start: {\n ...this._ast.start,\n row: narrowedRow,\n },\n orderBy: finalOrderBy,\n };\n } else {\n this.#completedAST = {\n ...this._ast,\n orderBy: addPrimaryKeys(\n this.#schema.tables[this.#tableName],\n this._ast.orderBy,\n ),\n };\n }\n }\n return this.#completedAST;\n }\n\n abstract materialize(\n ttl?: TTL | undefined,\n ): TypedView<HumanReadable<TReturn>>;\n abstract materialize<T>(\n factory: ViewFactory<TSchema, TTable, TReturn, T>,\n ttl?: TTL | undefined,\n ): T;\n\n abstract run(options?: RunOptions): Promise<HumanReadable<TReturn>>;\n\n abstract preload(): {\n cleanup: () => void;\n complete: Promise<void>;\n };\n}\n\nconst completedAstSymbol = Symbol();\n\nexport function completedAST(q: Query<Schema, string, any>) {\n return (q as QueryImpl<Schema, string>)[completedAstSymbol];\n}\n\nexport class QueryImpl<\n TSchema extends Schema,\n TTable extends keyof TSchema['tables'] & string,\n TReturn = PullRow<TTable, TSchema>,\n> extends AbstractQuery<TSchema, TTable, TReturn> {\n readonly #system: System;\n\n constructor(\n delegate: QueryDelegate | undefined,\n schema: TSchema,\n tableName: TTable,\n ast: AST = {table: tableName},\n format: Format = defaultFormat,\n system: System = 'client',\n customQueryID?: CustomQueryID | undefined,\n currentJunction?: string | undefined,\n ) {\n super(\n delegate,\n schema,\n tableName,\n ast,\n format,\n system,\n customQueryID,\n currentJunction,\n );\n this.#system = system;\n }\n\n get [completedAstSymbol](): AST {\n return this._completeAst();\n }\n\n protected [newQuerySymbol]<\n TSchema extends Schema,\n TTable extends string,\n TReturn,\n >(\n delegate: QueryDelegate | undefined,\n schema: TSchema,\n tableName: TTable,\n ast: AST,\n format: Format,\n customQueryID: CustomQueryID | undefined,\n currentJunction: string | undefined,\n ): QueryImpl<TSchema, TTable, TReturn> {\n return new QueryImpl(\n delegate,\n schema,\n tableName,\n ast,\n format,\n this.#system,\n customQueryID,\n currentJunction,\n );\n }\n\n materialize<T>(\n factoryOrTTL?: ViewFactory<TSchema, TTable, TReturn, T> | TTL,\n ttl: TTL = DEFAULT_TTL_MS,\n ): T {\n const delegate = must(\n this._delegate,\n 'materialize requires a query delegate to be set',\n );\n let factory: ViewFactory<TSchema, TTable, TReturn, T> | undefined;\n if (typeof factoryOrTTL === 'function') {\n factory = factoryOrTTL;\n } else {\n ttl = factoryOrTTL ?? DEFAULT_TTL_MS;\n }\n const ast = this._completeAst();\n const queryID = this.customQueryID\n ? hashOfNameAndArgs(this.customQueryID.name, this.customQueryID.args)\n : this.hash();\n const queryCompleteResolver = resolver<true>();\n let queryComplete: boolean | ErroredQuery = delegate.defaultQueryComplete;\n const updateTTL = (newTTL: TTL) => {\n this.customQueryID\n ? delegate.updateCustomQuery(this.customQueryID, newTTL)\n : delegate.updateServerQuery(ast, newTTL);\n };\n\n const gotCallback: GotCallback = (got, error) => {\n if (error) {\n queryCompleteResolver.reject(error);\n queryComplete = error;\n return;\n }\n\n if (got) {\n delegate.addMetric(\n 'query-materialization-end-to-end',\n performance.now() - t0,\n queryID,\n ast,\n );\n queryComplete = true;\n queryCompleteResolver.resolve(true);\n }\n };\n\n let removeCommitObserver: (() => void) | undefined;\n const onDestroy = () => {\n input.destroy();\n removeCommitObserver?.();\n removeAddedQuery();\n };\n\n const t0 = performance.now();\n\n const removeAddedQuery = this.customQueryID\n ? delegate.addCustomQuery(ast, this.customQueryID, ttl, gotCallback)\n : delegate.addServerQuery(ast, ttl, gotCallback);\n\n const input = buildPipeline(ast, delegate, queryID);\n\n const view = delegate.batchViewUpdates(() =>\n (factory ?? arrayViewFactory)(\n this,\n input,\n this.format,\n onDestroy,\n cb => {\n removeCommitObserver = delegate.onTransactionCommit(cb);\n },\n queryComplete || queryCompleteResolver.promise,\n updateTTL,\n ),\n );\n\n delegate.addMetric(\n 'query-materialization-client',\n performance.now() - t0,\n queryID,\n );\n\n return view as T;\n }\n\n run(options?: RunOptions): Promise<HumanReadable<TReturn>> {\n const delegate = must(\n this._delegate,\n 'run requires a query delegate to be set',\n );\n delegate.assertValidRunOptions(options);\n const v: TypedView<HumanReadable<TReturn>> = this.materialize(options?.ttl);\n if (options?.type === 'complete') {\n return new Promise(resolve => {\n v.addListener((data, type) => {\n if (type === 'complete') {\n v.destroy();\n resolve(data as HumanReadable<TReturn>);\n } else if (type === 'error') {\n v.destroy();\n resolve(Promise.reject(data));\n }\n });\n });\n }\n\n options?.type satisfies 'unknown' | undefined;\n\n const ret = v.data;\n v.destroy();\n return Promise.resolve(ret);\n }\n\n preload(options?: PreloadOptions): {\n cleanup: () => void;\n complete: Promise<void>;\n } {\n const delegate = must(\n this._delegate,\n 'preload requires a query delegate to be set',\n );\n const ttl = options?.ttl ?? DEFAULT_PRELOAD_TTL_MS;\n const ast = this._completeAst();\n const {resolve, promise: complete} = resolver<void>();\n if (this.customQueryID) {\n const cleanup = delegate.addCustomQuery(\n ast,\n this.customQueryID,\n ttl,\n got => {\n if (got) {\n resolve();\n }\n },\n );\n return {\n cleanup,\n complete,\n };\n }\n\n const cleanup = delegate.addServerQuery(ast, ttl, got => {\n if (got) {\n resolve();\n }\n });\n return {\n cleanup,\n complete,\n };\n }\n}\n\nfunction addPrimaryKeys(\n schema: TableSchema,\n orderBy: Ordering | undefined,\n): Ordering {\n orderBy = orderBy ?? [];\n const {primaryKey} = schema;\n const primaryKeysToAdd = new Set(primaryKey);\n\n for (const [field] of orderBy) {\n primaryKeysToAdd.delete(field);\n }\n\n if (primaryKeysToAdd.size === 0) {\n return orderBy;\n }\n\n return [\n ...orderBy,\n ...[...primaryKeysToAdd].map(key => [key, 'asc'] as [string, 'asc']),\n ];\n}\n\nfunction addPrimaryKeysToAst(schema: TableSchema, ast: AST): AST {\n return {\n ...ast,\n orderBy: addPrimaryKeys(schema, ast.orderBy),\n };\n}\n\nfunction arrayViewFactory<\n TSchema extends Schema,\n TTable extends string,\n TReturn,\n>(\n _query: AbstractQuery<TSchema, TTable, TReturn>,\n input: Input,\n format: Format,\n onDestroy: () => void,\n onTransactionCommit: (cb: () => void) => void,\n queryComplete: true | ErroredQuery | Promise<true>,\n updateTTL: (ttl: TTL) => void,\n): TypedView<HumanReadable<TReturn>> {\n const v = new ArrayView<HumanReadable<TReturn>>(\n input,\n format,\n queryComplete,\n updateTTL,\n );\n v.onDestroy = onDestroy;\n onTransactionCommit(() => {\n v.flush();\n });\n return v;\n}\n\nfunction isCompoundKey(field: readonly string[]): field is CompoundKey {\n return Array.isArray(field) && field.length >= 1;\n}\n", "import {h64} from '../../shared/src/hash.ts';\nimport {normalizeAST, type AST} from './ast.ts';\n\nconst hashCache = new WeakMap<AST, string>();\n\nexport function hashOfAST(ast: AST): string {\n const normalized = normalizeAST(ast);\n const cached = hashCache.get(normalized);\n if (cached) {\n return cached;\n }\n const hash = h64(JSON.stringify(normalized)).toString(36);\n hashCache.set(normalized, hash);\n return hash;\n}\n\nexport function hashOfNameAndArgs(\n name: string,\n args: readonly unknown[],\n): string {\n const argsString = JSON.stringify(args);\n return h64(`${name}:${argsString}`).toString(36);\n}\n", "import type {FetchRequest, Input, InputBase, Output} from './operator.ts';\nimport {drainStreams, type Node} from './data.ts';\nimport type {Change} from './change.ts';\nimport type {SourceSchema} from './schema.ts';\nimport type {Stream} from './stream.ts';\nimport type {BuilderDelegate} from '../builder/builder.ts';\n\n/**\n * The `where` clause of a ZQL query is implemented using a sub-graph of\n * `FilterOperators`. This sub-graph starts with a `FilterStart` operator,\n * that adapts from the normal `Operator` `Output`, to the\n * `FilterOperator` `FilterInput`, and ends with a `FilterEnd` operator that\n * adapts from a `FilterOperator` `FilterOutput` to a normal `Operator` `Input`.\n * `FilterOperator'`s do not have `fetch` or `cleanup` instead they have a\n * `filter(node: Node, cleanup: boolean): boolean` method.\n * They also have `push` which is just like normal `Operator` push.\n * Not having a `fetch` means these `FilterOperator`'s cannot modify\n * `Node` `row`s or `relationship`s, but they shouldn't, they should just\n * filter.\n *\n * This `FilterOperator` abstraction enables much more efficient processing of\n * `fetch` for `where` clauses containing OR conditions.\n *\n * See https://github.com/rocicorp/mono/pull/4339\n */\n\nexport interface FilterInput extends InputBase {\n /** Tell the input where to send its output. */\n setFilterOutput(output: FilterOutput): void;\n}\n\nexport interface FilterOutput extends Output {\n filter(node: Node, cleanup: boolean): boolean;\n}\n\nexport interface FilterOperator extends FilterInput, FilterOutput {}\n\n/**\n * An implementation of FilterOutput that throws if push or filter is called.\n * It is used as the initial value for for an operator's output before it is\n * set.\n */\nexport const throwFilterOutput: FilterOutput = {\n push(_change: Change): void {\n throw new Error('Output not set');\n },\n\n filter(_node: Node, _cleanup): boolean {\n throw new Error('Output not set');\n },\n};\n\nexport class FilterStart implements FilterInput, Output {\n readonly #input: Input;\n #output: FilterOutput = throwFilterOutput;\n\n constructor(input: Input) {\n this.#input = input;\n input.setOutput(this);\n }\n\n setFilterOutput(output: FilterOutput) {\n this.#output = output;\n }\n\n destroy(): void {\n this.#input.destroy();\n }\n\n getSchema(): SourceSchema {\n return this.#input.getSchema();\n }\n\n push(change: Change) {\n this.#output.push(change, this);\n }\n\n *fetch(req: FetchRequest): Stream<Node> {\n for (const node of this.#input.fetch(req)) {\n if (this.#output.filter(node, false)) {\n yield node;\n }\n }\n }\n\n *cleanup(req: FetchRequest): Stream<Node> {\n for (const node of this.#input.cleanup(req)) {\n if (this.#output.filter(node, true)) {\n yield node;\n } else {\n drainStreams(node);\n }\n }\n }\n}\n\nexport class FilterEnd implements Input, FilterOutput {\n readonly #start: FilterStart;\n readonly #input: FilterInput;\n\n #output: Output = throwFilterOutput;\n\n constructor(start: FilterStart, input: FilterInput) {\n this.#start = start;\n this.#input = input;\n input.setFilterOutput(this);\n }\n\n *fetch(req: FetchRequest): Stream<Node> {\n for (const node of this.#start.fetch(req)) {\n yield node;\n }\n }\n\n *cleanup(req: FetchRequest): Stream<Node> {\n for (const node of this.#start.cleanup(req)) {\n yield node;\n }\n }\n\n filter(_node: Node, _cleanup: boolean) {\n return true;\n }\n\n setOutput(output: Output) {\n this.#output = output;\n }\n\n destroy(): void {\n this.#input.destroy();\n }\n\n getSchema(): SourceSchema {\n return this.#input.getSchema();\n }\n\n push(change: Change) {\n this.#output.push(change, this);\n }\n}\n\nexport function buildFilterPipeline(\n input: Input,\n delegate: BuilderDelegate,\n pipeline: (filterInput: FilterInput) => FilterInput,\n): Input {\n const filterStart = new FilterStart(input);\n delegate.addEdge(input, filterStart);\n const middle = pipeline(filterStart);\n delegate.addEdge(filterStart, middle);\n const filterEnd = new FilterEnd(filterStart, middle);\n delegate.addEdge(middle, filterEnd);\n return filterEnd;\n}\n", "import type {JSONValue} from '../../../shared/src/json.ts';\nimport type {Row} from '../../../zero-protocol/src/data.ts';\nimport type {Change} from './change.ts';\nimport type {Constraint} from './constraint.ts';\nimport type {Node} from './data.ts';\nimport type {SourceSchema} from './schema.ts';\nimport type {Stream} from './stream.ts';\n\n/**\n * Input to an operator.\n */\nexport interface InputBase {\n /** The schema of the data this input returns. */\n getSchema(): SourceSchema;\n\n /**\n * Completely destroy the input. Destroying an input\n * causes it to call destroy on its upstreams, fully\n * cleaning up a pipeline.\n */\n destroy(): void;\n}\n\nexport interface Input extends InputBase {\n /** Tell the input where to send its output. */\n setOutput(output: Output): void;\n\n /**\n * Fetch data. May modify the data in place.\n * Returns nodes sorted in order of `SourceSchema.compareRows`.\n */\n fetch(req: FetchRequest): Stream<Node>;\n\n /**\n * Cleanup maintained state. This is called when `output` will no longer need\n * the data returned by {@linkcode fetch}. The receiving operator should clean up any\n * resources it has allocated to service such requests.\n *\n * This is different from {@linkcode destroy} which means this input will no longer\n * be called at all, for any input.\n *\n * Returns the same thing as {@linkcode fetch}. This allows callers to properly\n * propagate the cleanup message through the graph.\n */\n cleanup(req: FetchRequest): Stream<Node>;\n}\n\nexport type FetchRequest = {\n readonly constraint?: Constraint | undefined;\n /** If supplied, `start.row` must have previously been output by fetch or push. */\n readonly start?: Start | undefined;\n\n /** Whether to fetch in reverse order of the SourceSchema's sort. */\n readonly reverse?: boolean | undefined;\n};\n\nexport type Start = {\n readonly row: Row;\n readonly basis: 'at' | 'after';\n};\n\n/**\n * An output for an operator. Typically another Operator but can also be\n * the code running the pipeline.\n */\nexport interface Output {\n /**\n * Push incremental changes to data previously received with fetch().\n * Consumers must apply all pushed changes or incremental result will\n * be incorrect.\n * Callers must maintain some invariants for correct operation:\n * - Only add rows which do not already exist (by deep equality).\n * - Only remove rows which do exist (by deep equality).\n */\n push(change: Change, pusher: InputBase): void;\n}\n\n/**\n * An implementation of Output that throws if pushed to. It is used as the\n * initial value for for an operator's output before it is set.\n */\nexport const throwOutput: Output = {\n push(_change: Change): void {\n throw new Error('Output not set');\n },\n};\n\n/**\n * Operators are arranged into pipelines.\n * They are stateful.\n * Each operator is an input to the next operator in the chain and an output\n * to the previous.\n */\nexport interface Operator extends Input, Output {}\n\n/**\n * Operators get access to storage that they can store their internal\n * state in.\n */\nexport interface Storage {\n set(key: string, value: JSONValue): void;\n get(key: string, def?: JSONValue): JSONValue | undefined;\n /**\n * If options is not specified, defaults to scanning all entries.\n */\n scan(options?: {prefix: string}): Stream<[string, JSONValue]>;\n del(key: string): void;\n}\n", "/**\n * streams are lazy forward-only iterables.\n * Once a stream reaches the end it can't be restarted.\n * They are iterable, not iterator, so that they can be used in for-each,\n * and so that we know when consumer has stopped iterating the stream. This allows us\n * to clean up resources like sql statements.\n */\nexport type Stream<T> = Iterable<T>;\n\nexport function* take<T>(stream: Stream<T>, limit: number): Stream<T> {\n if (limit < 1) {\n return;\n }\n let count = 0;\n for (const v of stream) {\n yield v;\n if (++count === limit) {\n break;\n }\n }\n}\n\nexport function first<T>(stream: Stream<T>): T | undefined {\n const it = stream[Symbol.iterator]();\n const {value} = it.next();\n it.return?.();\n return value;\n}\n", "import {areEqual} from '../../../shared/src/arrays.ts';\nimport {assert, unreachable} from '../../../shared/src/asserts.ts';\nimport type {CompoundKey} from '../../../zero-protocol/src/ast.ts';\nimport {type Change} from './change.ts';\nimport {normalizeUndefined, type Node, type NormalizedValue} from './data.ts';\nimport {\n throwFilterOutput,\n type FilterInput,\n type FilterOperator,\n type FilterOutput,\n} from './filter-operators.ts';\nimport {type Storage} from './operator.ts';\nimport type {SourceSchema} from './schema.ts';\nimport {first} from './stream.ts';\n\ntype SizeStorageKeyPrefix = `row/${string}/`;\n/**\n * Key is of format\n * `row/${JSON.stringify(parentJoinKeyValues)}/${JSON.stringify(primaryKeyValues)}`\n * This format allows us to look up an existing cached size for a given set of\n * `parentJoinKeyValues` by scanning for prefix\n * `row/${JSON.stringify(parentJoinKeyValues)}/` and using the first result, and\n * to look up the cached size for a specific row by the full key.\n * If the parent join and primary key are the same, then format is changed to\n * `row//${JSON.stringify(primaryKeyValues)}` to shorten the key, since there\n * is no point in looking up an existing cached size by\n * `parentJoinKeyValues` if the specific rows cached size is missing.\n */\ntype SizeStorageKey = `${SizeStorageKeyPrefix}${string}`;\n\ninterface ExistsStorage {\n get(key: SizeStorageKey): number | undefined;\n set(key: SizeStorageKey, value: number): void;\n del(key: SizeStorageKey): void;\n scan({prefix}: {prefix: SizeStorageKeyPrefix}): Iterable<[string, number]>;\n}\n\n/**\n * The Exists operator filters data based on whether or not a relationship is\n * non-empty.\n */\nexport class Exists implements FilterOperator {\n readonly #input: FilterInput;\n readonly #relationshipName: string;\n readonly #storage: ExistsStorage;\n readonly #not: boolean;\n readonly #parentJoinKey: CompoundKey;\n readonly #noSizeReuse: boolean;\n\n #output: FilterOutput = throwFilterOutput;\n\n /**\n * This instance variable is `true` when this operator is processing a `push`,\n * and is used to disable reuse of cached sizes across rows with the\n * same parent join key value.\n * This is necessary because during a push relationships can be inconsistent\n * due to push communicating changes (which may change multiple Nodes) one\n * Node at a time.\n */\n #inPush = false;\n\n constructor(\n input: FilterInput,\n storage: Storage,\n relationshipName: string,\n parentJoinKey: CompoundKey,\n type: 'EXISTS' | 'NOT EXISTS',\n ) {\n this.#input = input;\n this.#relationshipName = relationshipName;\n this.#input.setFilterOutput(this);\n this.#storage = storage as ExistsStorage;\n assert(\n this.#input.getSchema().relationships[relationshipName],\n `Input schema missing ${relationshipName}`,\n );\n this.#not = type === 'NOT EXISTS';\n this.#parentJoinKey = parentJoinKey;\n\n // If the parentJoinKey is the primary key, no sense in trying to reuse.\n this.#noSizeReuse = areEqual(\n parentJoinKey,\n this.#input.getSchema().primaryKey,\n );\n }\n\n setFilterOutput(output: FilterOutput): void {\n this.#output = output;\n }\n\n filter(node: Node, cleanup: boolean): boolean {\n const result = this.#filter(node) && this.#output.filter(node, cleanup);\n if (cleanup) {\n this.#delSize(node);\n }\n return result;\n }\n\n destroy(): void {\n this.#input.destroy();\n }\n\n getSchema(): SourceSchema {\n return this.#input.getSchema();\n }\n\n push(change: Change) {\n assert(!this.#inPush, 'Unexpected re-entrancy');\n this.#inPush = true;\n try {\n switch (change.type) {\n // add, remove and edit cannot change the size of the\n // this.#relationshipName relationship, so simply #pushWithFilter\n case 'add':\n case 'edit': {\n this.#pushWithFilter(change);\n return;\n }\n case 'remove': {\n const size = this.#getSize(change.node);\n // If size is undefined, this operator has not output\n // this row before and so it is unnecessary to output a remove for\n // it.\n if (size === undefined) {\n return;\n }\n this.#pushWithFilter(change, size);\n this.#delSize(change.node);\n return;\n }\n case 'child':\n // Only add and remove child changes for the\n // this.#relationshipName relationship, can change the size\n // of the this.#relationshipName relationship, for other\n // child changes simply #pushWithFilter\n if (\n change.child.relationshipName !== this.#relationshipName ||\n change.child.change.type === 'edit' ||\n change.child.change.type === 'child'\n ) {\n this.#pushWithFilter(change);\n return;\n }\n switch (change.child.change.type) {\n case 'add': {\n let size = this.#getSize(change.node);\n if (size !== undefined) {\n size++;\n this.#setSize(change.node, size);\n } else {\n size = this.#fetchSize(change.node);\n }\n if (size === 1) {\n if (this.#not) {\n // Since the add child change currently being processed is not\n // pushed to output, the added child needs to be excluded from\n // the remove being pushed to output (since the child has\n // never been added to the output).\n this.#output.push(\n {\n type: 'remove',\n node: {\n row: change.node.row,\n relationships: {\n ...change.node.relationships,\n [this.#relationshipName]: () => [],\n },\n },\n },\n this,\n );\n } else {\n this.#output.push(\n {\n type: 'add',\n node: change.node,\n },\n this,\n );\n }\n } else {\n this.#pushWithFilter(change, size);\n }\n return;\n }\n case 'remove': {\n let size = this.#getSize(change.node);\n if (size !== undefined) {\n assert(size > 0);\n size--;\n this.#setSize(change.node, size);\n } else {\n size = this.#fetchSize(change.node);\n }\n if (size === 0) {\n if (this.#not) {\n this.#output.push(\n {\n type: 'add',\n node: change.node,\n },\n this,\n );\n } else {\n // Since the remove child change currently being processed is\n // not pushed to output, the removed child needs to be added to\n // the remove being pushed to output.\n this.#output.push(\n {\n type: 'remove',\n node: {\n row: change.node.row,\n relationships: {\n ...change.node.relationships,\n [this.#relationshipName]: () => [\n change.child.change.node,\n ],\n },\n },\n },\n this,\n );\n }\n } else {\n this.#pushWithFilter(change, size);\n }\n return;\n }\n }\n return;\n default:\n unreachable(change);\n }\n } finally {\n this.#inPush = false;\n }\n }\n\n /**\n * Returns whether or not the node's this.#relationshipName\n * relationship passes the exist/not exists filter condition.\n * If the optional `size` is passed it is used.\n * Otherwise, if there is a stored size for the row it is used.\n * Otherwise the size is computed by streaming the node's\n * relationship with this.#relationshipName (this computed size is also\n * stored).\n */\n #filter(node: Node, size?: number): boolean {\n const exists = (size ?? this.#getOrFetchSize(node)) > 0;\n return this.#not ? !exists : exists;\n }\n\n /**\n * Pushes a change if this.#filter is true for its row.\n */\n #pushWithFilter(change: Change, size?: number): void {\n if (this.#filter(change.node, size)) {\n this.#output.push(change, this);\n }\n }\n\n #getSize(node: Node): number | undefined {\n return this.#storage.get(this.#makeSizeStorageKey(node));\n }\n\n #setSize(node: Node, size: number) {\n this.#storage.set(this.#makeSizeStorageKey(node), size);\n }\n\n #delSize(node: Node) {\n this.#storage.del(this.#makeSizeStorageKey(node));\n }\n\n #getOrFetchSize(node: Node): number {\n const size = this.#getSize(node);\n if (size !== undefined) {\n return size;\n }\n return this.#fetchSize(node);\n }\n\n #fetchSize(node: Node): number {\n if (!this.#noSizeReuse && !this.#inPush) {\n const cachedSizeEntry = first(\n this.#storage.scan({\n prefix: this.#makeSizeStorageKeyPrefix(node),\n }),\n );\n if (cachedSizeEntry !== undefined) {\n this.#setSize(node, cachedSizeEntry[1]);\n return cachedSizeEntry[1];\n }\n }\n\n const relationship = node.relationships[this.#relationshipName];\n assert(relationship);\n let size = 0;\n for (const _relatedNode of relationship()) {\n size++;\n }\n\n this.#setSize(node, size);\n return size;\n }\n\n #makeSizeStorageKeyPrefix(node: Node): SizeStorageKeyPrefix {\n return `row/${\n this.#noSizeReuse\n ? ''\n : JSON.stringify(this.#getKeyValues(node, this.#parentJoinKey))\n }/`;\n }\n\n #makeSizeStorageKey(node: Node): SizeStorageKey {\n return `${this.#makeSizeStorageKeyPrefix(node)}${JSON.stringify(\n this.#getKeyValues(node, this.#input.getSchema().primaryKey),\n )}`;\n }\n\n #getKeyValues(node: Node, def: CompoundKey): NormalizedValue[] {\n const values: NormalizedValue[] = [];\n for (const key of def) {\n values.push(normalizeUndefined(node.row[key]));\n }\n return values;\n }\n}\n", "import {assert, unreachable} from '../../../shared/src/asserts.ts';\nimport {must} from '../../../shared/src/must.ts';\nimport {emptyArray} from '../../../shared/src/sentinels.ts';\nimport type {Change} from './change.ts';\nimport type {Node} from './data.ts';\nimport type {InputBase, Output} from './operator.ts';\nimport type {SourceSchema} from './schema.ts';\nimport type {Stream} from './stream.ts';\n\n/**\n * # pushAccumulatedChanges\n *\n * Pushes the changes that were accumulated by\n * [fan-out, fan-in] or [ufo, ufi] sub-graphs.\n *\n * This function is called at the end of the sub-graph.\n *\n * The sub-graphs represents `OR`s.\n *\n * Changes that can enter the subgraphs:\n * 1. child (due to exist joins being above the sub-graph)\n * 2. add\n * 3. remove\n * 4. edit\n *\n * # Changes that can exit into `pushAccumulatedChanges`:\n *\n * ## Child\n * If a `child` change enters a sub-graph, it will flow to all branches.\n * Each branch will either:\n * - preserve the `child` change\n * - stop the `child` change (e.g., filter)\n * - convert it to an `add` or `remove` (e.g., exists filter)\n *\n * ## Add\n * If an `add` change enters a sub-graph, it will flow to all branches.\n * Each branch will either:\n * - preserve the `add` change\n * - hide the change (e.g., filter)\n *\n * ## Remove\n * If a `remove` change enters a sub-graph, it will flow to all branches.\n * Each branch will either:\n * - preserve the `remove` change\n * - hide the change (e.g., filter)\n *\n * ## Edit\n * If an `edit` change enters a sub-graph, it will flow to all branches.\n * Each branch will either:\n * - preserve the `edit` change\n * - convert it to an `add` (e.g., filter where old didn't match but new does)\n * - convert it to a `remove` (e.g., filter where old matched but new doesn't)\n *\n * This results in some invariants:\n * - an add coming in will only create adds coming out\n * - a remove coming in will only create removes coming out\n * - an edit coming in can create adds, removes, and edits coming out\n * - a child coming in can create adds, removes, and children coming out\n *\n * # Return of `pushAccumulatedChanges`\n *\n * This function will only push a single change.\n * Given the above invariants, how is this possible?\n *\n * An add that becomes many `adds` results in a single add\n * as the `add` is the same row across all adds. Branches do not change the row.\n *\n * A remove that becomes many `removes` results in a single remove\n * for the same reason.\n *\n * If a child enters and exits, it takes precedence over all other changes.\n * If a child enters and is converted only to add and remove it exits as an edit.\n * If a child enters and is converted to only add or only remove, it exits as that change.\n *\n * If an edit enters and is converted to add and remove it exits as an edit.\n * If an edit enters and is converted to only add or only remove, it exits as that change.\n * If an edit enters and exits as edits only, it exits as a single edit.\n */\nexport function pushAccumulatedChanges(\n accumulatedPushes: Change[],\n output: Output,\n pusher: InputBase,\n fanOutChangeType: Change['type'],\n mergeRelationships: (existing: Change, incoming: Change) => Change,\n addEmptyRelationships: (change: Change) => Change,\n) {\n if (accumulatedPushes.length === 0) {\n // It is possible for no forks to pass along the push.\n // E.g., if no filters match in any fork.\n return;\n }\n\n // collapse down to a single change per type\n const candidatesToPush = new Map<Change['type'], Change>();\n for (const change of accumulatedPushes) {\n if (fanOutChangeType === 'child' && change.type !== 'child') {\n assert(\n candidatesToPush.has(change.type) === false,\n () =>\n `Fan-in:child expected at most one ${change.type} when fan-out is of type child`,\n );\n }\n\n const existing = candidatesToPush.get(change.type);\n let mergedChange = change;\n if (existing) {\n // merge in relationships\n mergedChange = mergeRelationships(existing, change);\n }\n candidatesToPush.set(change.type, mergedChange);\n }\n\n accumulatedPushes.length = 0;\n\n const types = [...candidatesToPush.keys()];\n /**\n * Based on the received `fanOutChangeType` only certain output types are valid.\n *\n * - remove must result in all removes\n * - add must result in all adds\n * - edit must result in add or removes or edits\n * - child must result in a single add or single remove or many child changes\n * - Single add or remove because the relationship will be unique to one exist check within the fan-out,fan-in sub-graph\n * - Many child changes because other operators may preserve the child change\n */\n switch (fanOutChangeType) {\n case 'remove':\n assert(\n types.length === 1 && types[0] === 'remove',\n 'Fan-in:remove expected all removes',\n );\n output.push(\n addEmptyRelationships(must(candidatesToPush.get('remove'))),\n pusher,\n );\n return;\n case 'add':\n assert(\n types.length === 1 && types[0] === 'add',\n 'Fan-in:add expected all adds',\n );\n output.push(\n addEmptyRelationships(must(candidatesToPush.get('add'))),\n pusher,\n );\n return;\n case 'edit': {\n assert(\n types.every(\n type => type === 'add' || type === 'remove' || type === 'edit',\n ),\n 'Fan-in:edit expected all adds, removes, or edits',\n );\n const addChange = candidatesToPush.get('add');\n const removeChange = candidatesToPush.get('remove');\n let editChange = candidatesToPush.get('edit');\n\n // If an `edit` is present, it supersedes `add` and `remove`\n // as it semantically represents both.\n if (editChange) {\n if (addChange) {\n editChange = mergeRelationships(editChange, addChange);\n }\n if (removeChange) {\n editChange = mergeRelationships(editChange, removeChange);\n }\n output.push(addEmptyRelationships(editChange), pusher);\n return;\n }\n\n // If `edit` didn't make it through but both `add` and `remove` did,\n // convert back to an edit.\n //\n // When can this happen?\n //\n // EDIT old: a=1, new: a=2\n // |\n // FanOut\n // / \\\n // a=1 a=2\n // | |\n // remove add\n // \\ /\n // FanIn\n //\n // The left filter converts the edit into a remove.\n // The right filter converts the edit into an add.\n if (addChange && removeChange) {\n output.push(\n addEmptyRelationships({\n type: 'edit',\n node: addChange.node,\n oldNode: removeChange.node,\n } as const),\n pusher,\n );\n return;\n }\n\n output.push(\n addEmptyRelationships(must(addChange ?? removeChange)),\n pusher,\n );\n return;\n }\n case 'child': {\n assert(\n types.every(\n type =>\n type === 'add' || // exists can change child to add or remove\n type === 'remove' || // exists can change child to add or remove\n type === 'child', // other operators may preserve the child change\n ),\n 'Fan-in:child expected all adds, removes, or children',\n );\n assert(\n types.length <= 2,\n 'Fan-in:child expected at most 2 types on a child change from fan-out',\n );\n\n // If any branch preserved the original child change, that takes precedence over all other changes.\n const childChange = candidatesToPush.get('child');\n if (childChange) {\n output.push(childChange, pusher);\n return;\n }\n\n const addChange = candidatesToPush.get('add');\n const removeChange = candidatesToPush.get('remove');\n\n assert(\n addChange === undefined || removeChange === undefined,\n 'Fan-in:child expected either add or remove, not both',\n );\n\n output.push(\n addEmptyRelationships(must(addChange ?? removeChange)),\n pusher,\n );\n return;\n }\n default:\n fanOutChangeType satisfies never;\n }\n}\n\n/**\n * Puts relationships from `right` into `left` if they don't already exist in `left`.\n */\nexport function mergeRelationships(left: Change, right: Change): Change {\n // change types will always match\n // unless we have an edit on the left\n // then the right could be edit, add, or remove\n if (left.type === right.type) {\n switch (left.type) {\n case 'add': {\n return {\n type: 'add',\n node: {\n row: left.node.row,\n relationships: {\n ...right.node.relationships,\n ...left.node.relationships,\n },\n },\n };\n }\n case 'remove': {\n return {\n type: 'remove',\n node: {\n row: left.node.row,\n relationships: {\n ...right.node.relationships,\n ...left.node.relationships,\n },\n },\n };\n }\n case 'edit': {\n assert(right.type === 'edit');\n // merge edits into a single edit\n return {\n type: 'edit',\n node: {\n row: left.node.row,\n relationships: {\n ...right.node.relationships,\n ...left.node.relationships,\n },\n },\n oldNode: {\n row: left.oldNode.row,\n relationships: {\n ...right.oldNode.relationships,\n ...left.oldNode.relationships,\n },\n },\n };\n }\n }\n }\n\n // left is always an edit here\n assert(left.type === 'edit');\n switch (right.type) {\n case 'add': {\n return {\n type: 'edit',\n node: {\n ...left.node,\n relationships: {\n ...right.node.relationships,\n ...left.node.relationships,\n },\n },\n oldNode: left.oldNode,\n };\n }\n case 'remove': {\n return {\n type: 'edit',\n node: left.node,\n oldNode: {\n ...left.oldNode,\n relationships: {\n ...right.node.relationships,\n ...left.oldNode.relationships,\n },\n },\n };\n }\n }\n\n unreachable();\n}\n\nexport function makeAddEmptyRelationships(\n schema: SourceSchema,\n): (change: Change) => Change {\n return (change: Change): Change => {\n if (Object.keys(schema.relationships).length === 0) {\n return change;\n }\n\n switch (change.type) {\n case 'add':\n case 'remove': {\n const ret = {\n ...change,\n node: {\n ...change.node,\n relationships: {\n ...change.node.relationships,\n },\n },\n };\n\n mergeEmpty(ret.node.relationships, Object.keys(schema.relationships));\n\n return ret;\n }\n case 'edit': {\n const ret = {\n ...change,\n node: {\n ...change.node,\n relationships: {\n ...change.node.relationships,\n },\n },\n oldNode: {\n ...change.oldNode,\n relationships: {\n ...change.oldNode.relationships,\n },\n },\n };\n\n mergeEmpty(ret.node.relationships, Object.keys(schema.relationships));\n mergeEmpty(\n ret.oldNode.relationships,\n Object.keys(schema.relationships),\n );\n\n return ret;\n }\n case 'child':\n return change; // children only have relationships along the path to the change\n }\n };\n}\n\n/**\n * For each relationship in `schema` that does not exist\n * in `relationships`, add it with an empty stream.\n *\n * This modifies the `relationships` object in place.\n */\nexport function mergeEmpty(\n relationships: Record<string, () => Stream<Node>>,\n relationshipNames: string[],\n) {\n for (const relName of relationshipNames) {\n if (relationships[relName] === undefined) {\n relationships[relName] = () => emptyArray;\n }\n }\n}\n", "import {assert} from '../../../shared/src/asserts.ts';\nimport {identity} from '../../../shared/src/sentinels.ts';\nimport type {Change} from './change.ts';\nimport {type Node} from './data.ts';\nimport type {FanOut} from './fan-out.ts';\nimport {\n throwFilterOutput,\n type FilterInput,\n type FilterOperator,\n type FilterOutput,\n} from './filter-operators.ts';\nimport {pushAccumulatedChanges} from './push-accumulated.ts';\nimport type {SourceSchema} from './schema.ts';\n\n/**\n * The FanIn operator merges multiple streams into one.\n * It eliminates duplicates and must be paired with a fan-out operator\n * somewhere upstream of the fan-in.\n *\n * issue\n * |\n * fan-out\n * / \\\n * a b\n * \\ /\n * fan-in\n * |\n */\nexport class FanIn implements FilterOperator {\n readonly #inputs: readonly FilterInput[];\n readonly #schema: SourceSchema;\n #output: FilterOutput = throwFilterOutput;\n #accumulatedPushes: Change[] = [];\n\n constructor(fanOut: FanOut, inputs: FilterInput[]) {\n this.#inputs = inputs;\n this.#schema = fanOut.getSchema();\n for (const input of inputs) {\n input.setFilterOutput(this);\n assert(this.#schema === input.getSchema(), `Schema mismatch in fan-in`);\n }\n }\n\n setFilterOutput(output: FilterOutput): void {\n this.#output = output;\n }\n\n destroy(): void {\n for (const input of this.#inputs) {\n input.destroy();\n }\n }\n\n getSchema() {\n return this.#schema;\n }\n\n filter(node: Node, cleanup: boolean): boolean {\n return this.#output.filter(node, cleanup);\n }\n\n push(change: Change) {\n this.#accumulatedPushes.push(change);\n }\n\n fanOutDonePushingToAllBranches(fanOutChangeType: Change['type']) {\n if (this.#inputs.length === 0) {\n assert(\n this.#accumulatedPushes.length === 0,\n 'If there are no inputs then fan-in should not receive any pushes.',\n );\n return;\n }\n\n pushAccumulatedChanges(\n this.#accumulatedPushes,\n this.#output,\n this,\n fanOutChangeType,\n identity,\n identity,\n );\n }\n}\n", "import {must} from '../../../shared/src/must.ts';\nimport type {Change} from './change.ts';\nimport type {FanIn} from './fan-in.ts';\nimport type {Node} from './data.ts';\nimport type {\n FilterInput,\n FilterOperator,\n FilterOutput,\n} from './filter-operators.ts';\n\n/**\n * Forks a stream into multiple streams.\n * Is meant to be paired with a `FanIn` operator which will\n * later merge the forks back together.\n */\nexport class FanOut implements FilterOperator {\n readonly #input: FilterInput;\n readonly #outputs: FilterOutput[] = [];\n #fanIn: FanIn | undefined;\n #destroyCount: number = 0;\n\n constructor(input: FilterInput) {\n this.#input = input;\n input.setFilterOutput(this);\n }\n\n setFanIn(fanIn: FanIn) {\n this.#fanIn = fanIn;\n }\n\n setFilterOutput(output: FilterOutput): void {\n this.#outputs.push(output);\n }\n\n destroy(): void {\n if (this.#destroyCount < this.#outputs.length) {\n ++this.#destroyCount;\n if (this.#destroyCount === this.#outputs.length) {\n this.#input.destroy();\n }\n } else {\n throw new Error('FanOut already destroyed once for each output');\n }\n }\n\n getSchema() {\n return this.#input.getSchema();\n }\n\n filter(node: Node, cleanup: boolean): boolean {\n let result = false;\n for (const output of this.#outputs) {\n result = output.filter(node, cleanup) || result;\n // Cleanup needs to be forwarded to all outputs, don't short circuit\n // cleanup. For non-cleanup we can short-circuit on first true.\n if (!cleanup && result) {\n return true;\n }\n }\n return result;\n }\n\n push(change: Change) {\n for (const out of this.#outputs) {\n out.push(change, this);\n }\n must(\n this.#fanIn,\n 'fan-out must have a corresponding fan-in set!',\n ).fanOutDonePushingToAllBranches(change.type);\n }\n}\n", "import type {Row} from '../../../zero-protocol/src/data.ts';\nimport type {EditChange} from './change.ts';\nimport type {InputBase, Output} from './operator.ts';\n\n/**\n * This takes an {@linkcode EditChange} and a predicate that determines if a row\n * should be present based on the row's data. It then splits the change and\n * pushes the appropriate changes to the output based on the predicate.\n */\nexport function maybeSplitAndPushEditChange(\n change: EditChange,\n predicate: (row: Row) => boolean,\n output: Output,\n pusher: InputBase,\n) {\n const oldWasPresent = predicate(change.oldNode.row);\n const newIsPresent = predicate(change.node.row);\n\n if (oldWasPresent && newIsPresent) {\n output.push(change, pusher);\n } else if (oldWasPresent && !newIsPresent) {\n output.push(\n {\n type: 'remove',\n node: change.oldNode,\n },\n pusher,\n );\n } else if (!oldWasPresent && newIsPresent) {\n output.push(\n {\n type: 'add',\n node: change.node,\n },\n pusher,\n );\n }\n}\n", "import {unreachable} from '../../../shared/src/asserts.ts';\nimport type {Row} from '../../../zero-protocol/src/data.ts';\nimport type {Change} from './change.ts';\nimport {maybeSplitAndPushEditChange} from './maybe-split-and-push-edit-change.ts';\nimport type {InputBase, Output} from './operator.ts';\n\nexport function filterPush(\n change: Change,\n output: Output,\n pusher: InputBase,\n predicate?: ((row: Row) => boolean) | undefined,\n) {\n if (!predicate) {\n output.push(change, pusher);\n return;\n }\n switch (change.type) {\n case 'add':\n case 'remove':\n if (predicate(change.node.row)) {\n output.push(change, pusher);\n }\n break;\n case 'child':\n if (predicate(change.node.row)) {\n output.push(change, pusher);\n }\n break;\n case 'edit':\n maybeSplitAndPushEditChange(change, predicate, output, pusher);\n break;\n default:\n unreachable(change);\n }\n}\n", "import type {Row} from '../../../zero-protocol/src/data.ts';\nimport type {Change} from './change.ts';\nimport {\n throwFilterOutput,\n type FilterInput,\n type FilterOperator,\n type FilterOutput,\n} from './filter-operators.ts';\nimport {filterPush} from './filter-push.ts';\nimport {type Node} from './data.ts';\nimport type {SourceSchema} from './schema.ts';\n\n/**\n * The Filter operator filters data through a predicate. It is stateless.\n *\n * The predicate must be pure.\n */\nexport class Filter implements FilterOperator {\n readonly #input: FilterInput;\n readonly #predicate: (row: Row) => boolean;\n\n #output: FilterOutput = throwFilterOutput;\n\n constructor(input: FilterInput, predicate: (row: Row) => boolean) {\n this.#input = input;\n this.#predicate = predicate;\n input.setFilterOutput(this);\n }\n\n filter(node: Node, cleanup: boolean): boolean {\n return this.#predicate(node.row) && this.#output.filter(node, cleanup);\n }\n\n setFilterOutput(output: FilterOutput) {\n this.#output = output;\n }\n\n destroy(): void {\n this.#input.destroy();\n }\n\n getSchema(): SourceSchema {\n return this.#input.getSchema();\n }\n\n push(change: Change) {\n filterPush(change, this.#output, this, this.#predicate);\n }\n}\n", "import {assert} from '../../../shared/src/asserts.ts';\nimport {stringCompare} from '../../../shared/src/string-compare.ts';\nimport type {Writable} from '../../../shared/src/writable.ts';\nimport type {\n Condition,\n SimpleCondition,\n} from '../../../zero-protocol/src/ast.ts';\nimport type {Row, Value} from '../../../zero-protocol/src/data.ts';\nimport type {PrimaryKey} from '../../../zero-protocol/src/primary-key.ts';\nimport {valuesEqual} from './data.ts';\n\nexport type Constraint = {\n readonly [key: string]: Value;\n};\n\nexport function constraintMatchesRow(\n constraint: Constraint,\n row: Row,\n): boolean {\n for (const key in constraint) {\n if (!valuesEqual(row[key], constraint[key])) {\n return false;\n }\n }\n return true;\n}\n\n/**\n * Constraints are compatible if:\n * 1. They do not have any keys in common\n * 2. They have keys in common, but the values for those keys are equal\n */\nexport function constraintsAreCompatible(\n left: Constraint,\n right: Constraint,\n): boolean {\n for (const key in left) {\n if (key in right && !valuesEqual(left[key], right[key])) {\n return false;\n }\n }\n return true;\n}\n\nexport function constraintMatchesPrimaryKey(\n constraint: Constraint,\n primary: PrimaryKey,\n): boolean {\n const constraintKeys = Object.keys(constraint);\n\n if (constraintKeys.length !== primary.length) {\n return false;\n }\n\n // Primary key is always sorted\n // Constraint does not have to be sorted\n constraintKeys.sort(stringCompare);\n\n for (let i = 0; i < constraintKeys.length; i++) {\n if (constraintKeys[i] !== primary[i]) {\n return false;\n }\n }\n return true;\n}\n\n/**\n * Pulls top level `and` components out of a condition tree.\n * The resulting array of simple conditions would match a superset of\n * values that the original condition would match.\n *\n * Examples:\n * a AND b OR c\n *\n * In this case we cannot pull anything because the `or` is at the top level.\n *\n * a AND b AND c\n * We can pull all three.\n *\n * a AND (b OR c)\n * We can only pull `a`.\n */\nexport function pullSimpleAndComponents(\n condition: Condition,\n): SimpleCondition[] {\n if (condition.type === 'and') {\n return condition.conditions.flatMap(pullSimpleAndComponents);\n }\n\n if (condition.type === 'simple') {\n return [condition];\n }\n\n if (condition.type === 'or' && condition.conditions.length === 1) {\n return pullSimpleAndComponents(condition.conditions[0]);\n }\n\n return [];\n}\n\n/**\n * Checks if the supplied filters constitute a primary key lookup.\n * If so, returns the constraint that would be used to look up the primary key.\n * If not, returns undefined.\n */\nexport function primaryKeyConstraintFromFilters(\n condition: Condition | undefined,\n primary: PrimaryKey,\n): Constraint | undefined {\n if (condition === undefined) {\n return undefined;\n }\n\n const conditions = pullSimpleAndComponents(condition);\n if (conditions.length === 0) {\n return undefined;\n }\n\n const ret: Writable<Constraint> = {};\n for (const subCondition of conditions) {\n if (subCondition.op === '=') {\n const column = extractColumn(subCondition);\n if (column !== undefined) {\n if (!primary.includes(column.name)) {\n continue;\n }\n ret[column.name] = column.value;\n }\n }\n }\n\n if (Object.keys(ret).length !== primary.length) {\n return undefined;\n }\n\n return ret;\n}\n\nfunction extractColumn(\n condition: SimpleCondition,\n): {name: string; value: Value} | undefined {\n if (condition.left.type === 'column') {\n assert(condition.right.type === 'literal');\n return {name: condition.left.name, value: condition.right.value};\n }\n\n return undefined;\n}\n\ndeclare const TESTING: boolean;\n\nexport class SetOfConstraint {\n #data: Constraint[] = [];\n\n constructor() {\n // Only used in testing\n assert(TESTING);\n }\n\n #indexOf(value: Constraint): number {\n return this.#data.findIndex(v => constraintEquals(v, value));\n }\n\n has(value: Constraint): boolean {\n return this.#indexOf(value) !== -1;\n }\n\n add(value: Constraint): this {\n if (!this.has(value)) {\n this.#data.push(value);\n }\n return this;\n }\n}\n\nfunction constraintEquals(a: Constraint, b: Constraint): boolean {\n const aEntries = Object.entries(a);\n const bEntries = Object.entries(b);\n if (aEntries.length !== bEntries.length) {\n return false;\n }\n for (let i = 0; i < aEntries.length; i++) {\n if (\n aEntries[i][0] !== bEntries[i][0] ||\n !valuesEqual(aEntries[i][1], bEntries[i][1])\n ) {\n return false;\n }\n }\n return true;\n}\n", "import type {Row} from '../../../zero-protocol/src/data.ts';\nimport type {Change} from './change.ts';\nimport type {SourceSchema} from './schema.ts';\nimport type {Stream} from './stream.ts';\nimport {compareValues, valuesEqual, type Node} from './data.ts';\nimport {assert} from '../../../shared/src/asserts.ts';\nimport type {CompoundKey} from '../../../zero-protocol/src/ast.ts';\n\nexport type JoinChangeOverlay = {\n change: Change;\n position: Row | undefined;\n};\n\nexport function* generateWithOverlay(\n stream: Stream<Node>,\n overlay: Change,\n schema: SourceSchema,\n): Stream<Node> {\n let applied = false;\n let editOldApplied = false;\n let editNewApplied = false;\n for (const node of stream) {\n let yieldNode = true;\n if (!applied) {\n switch (overlay.type) {\n case 'add': {\n if (schema.compareRows(overlay.node.row, node.row) === 0) {\n applied = true;\n yieldNode = false;\n }\n break;\n }\n case 'remove': {\n if (schema.compareRows(overlay.node.row, node.row) < 0) {\n applied = true;\n yield overlay.node;\n }\n break;\n }\n case 'edit': {\n if (\n !editOldApplied &&\n schema.compareRows(overlay.oldNode.row, node.row) < 0\n ) {\n editOldApplied = true;\n if (editNewApplied) {\n applied = true;\n }\n yield overlay.oldNode;\n }\n if (\n !editNewApplied &&\n schema.compareRows(overlay.node.row, node.row) === 0\n ) {\n editNewApplied = true;\n if (editOldApplied) {\n applied = true;\n }\n yieldNode = false;\n }\n break;\n }\n case 'child': {\n if (schema.compareRows(overlay.node.row, node.row) === 0) {\n applied = true;\n yield {\n row: node.row,\n relationships: {\n ...node.relationships,\n [overlay.child.relationshipName]: () =>\n generateWithOverlay(\n node.relationships[overlay.child.relationshipName](),\n overlay.child.change,\n schema.relationships[overlay.child.relationshipName],\n ),\n },\n };\n yieldNode = false;\n }\n break;\n }\n }\n }\n if (yieldNode) {\n yield node;\n }\n }\n if (!applied) {\n if (overlay.type === 'remove') {\n applied = true;\n yield overlay.node;\n } else if (overlay.type === 'edit') {\n assert(editNewApplied);\n editOldApplied = true;\n applied = true;\n yield overlay.oldNode;\n }\n }\n\n assert(applied);\n}\n\nexport function rowEqualsForCompoundKey(\n a: Row,\n b: Row,\n key: CompoundKey,\n): boolean {\n for (let i = 0; i < key.length; i++) {\n if (compareValues(a[key[i]], b[key[i]]) !== 0) {\n return false;\n }\n }\n return true;\n}\n\nexport function isJoinMatch(\n parent: Row,\n parentKey: CompoundKey,\n child: Row,\n childKey: CompoundKey,\n) {\n for (let i = 0; i < parentKey.length; i++) {\n if (!valuesEqual(parent[parentKey[i]], child[childKey[i]])) {\n return false;\n }\n }\n return true;\n}\n", "import {assert, unreachable} from '../../../shared/src/asserts.ts';\nimport {binarySearch} from '../../../shared/src/binary-search.ts';\nimport {emptyArray} from '../../../shared/src/sentinels.ts';\nimport type {Writable} from '../../../shared/src/writable.ts';\nimport type {CompoundKey, System} from '../../../zero-protocol/src/ast.ts';\nimport type {Change} from './change.ts';\nimport {constraintsAreCompatible, type Constraint} from './constraint.ts';\nimport type {Node} from './data.ts';\nimport {\n generateWithOverlay,\n isJoinMatch,\n rowEqualsForCompoundKey,\n type JoinChangeOverlay,\n} from './join-utils.ts';\nimport {\n throwOutput,\n type FetchRequest,\n type Input,\n type Output,\n} from './operator.ts';\nimport type {SourceSchema} from './schema.ts';\nimport {first, type Stream} from './stream.ts';\n\ntype Args = {\n parent: Input;\n child: Input;\n // The nth key in childKey corresponds to the nth key in parentKey.\n parentKey: CompoundKey;\n childKey: CompoundKey;\n\n relationshipName: string;\n hidden: boolean;\n system: System;\n};\n\n/**\n * An *inner* join which fetches nodes from its child input first and then\n * fetches their related nodes from its parent input. Output nodes are the\n * nodes from parent input (in parent input order), which have at least one\n * related child. These output nodes have a new relationship added to them,\n * which has the name `relationshipName`. The value of the relationship is a\n * stream of related nodes from the child input (in child input order).\n */\nexport class FlippedJoin implements Input {\n readonly #parent: Input;\n readonly #child: Input;\n readonly #parentKey: CompoundKey;\n readonly #childKey: CompoundKey;\n readonly #relationshipName: string;\n readonly #schema: SourceSchema;\n\n #output: Output = throwOutput;\n\n #inprogressChildChange: JoinChangeOverlay | undefined;\n\n constructor({\n parent,\n child,\n parentKey,\n childKey,\n relationshipName,\n hidden,\n system,\n }: Args) {\n assert(parent !== child, 'Parent and child must be different operators');\n assert(\n parentKey.length === childKey.length,\n 'The parentKey and childKey keys must have same length',\n );\n this.#parent = parent;\n this.#child = child;\n this.#parentKey = parentKey;\n this.#childKey = childKey;\n this.#relationshipName = relationshipName;\n\n const parentSchema = parent.getSchema();\n const childSchema = child.getSchema();\n this.#schema = {\n ...parentSchema,\n relationships: {\n ...parentSchema.relationships,\n [relationshipName]: {\n ...childSchema,\n isHidden: hidden,\n system,\n },\n },\n };\n\n parent.setOutput({\n push: (change: Change) => this.#pushParent(change),\n });\n child.setOutput({\n push: (change: Change) => this.#pushChild(change),\n });\n }\n\n destroy(): void {\n this.#child.destroy();\n this.#parent.destroy();\n }\n\n setOutput(output: Output): void {\n this.#output = output;\n }\n\n getSchema(): SourceSchema {\n return this.#schema;\n }\n\n // TODO: When parentKey is the parent's primary key (or more\n // generally when the parent cardinality is expected to be small) a different\n // algorithm should be used: For each child node, fetch all parent nodes\n // eagerly and then sort using quicksort.\n *fetch(req: FetchRequest): Stream<Node> {\n const childNodes = [...this.#child.fetch({})];\n // FlippedJoin's split-push change overlay logic is largely\n // the same as Join's with the exception of remove. For remove,\n // the change is undone here, and then re-applied to parents with order\n // less than or equal to change.position below. This is necessary\n // because if the removed node was the last related child, the\n // related parents with position greater than change.position\n // (which should not yet have the node removed), would not even\n // be fetched here, and would be absent from the output all together.\n if (this.#inprogressChildChange?.change.type === 'remove') {\n const removedNode = this.#inprogressChildChange.change.node;\n const compare = this.#child.getSchema().compareRows;\n const insertPos = binarySearch(childNodes.length, i =>\n compare(removedNode.row, childNodes[i].row),\n );\n childNodes.splice(insertPos, 0, removedNode);\n }\n const parentIterators: Iterator<Node>[] = [];\n let threw = false;\n try {\n for (const childNode of childNodes) {\n // TODO: consider adding the ability to pass a set of\n // ids to fetch, and have them applied to sqlite using IN.\n const constraintFromChild: Writable<Constraint> = {};\n for (let i = 0; i < this.#parentKey.length; i++) {\n constraintFromChild[this.#parentKey[i]] =\n childNode.row[this.#childKey[i]];\n }\n if (\n req.constraint &&\n !constraintsAreCompatible(constraintFromChild, req.constraint)\n ) {\n parentIterators.push(emptyArray[Symbol.iterator]());\n } else {\n const stream = this.#parent.fetch({\n ...req,\n constraint: {\n ...req.constraint,\n ...constraintFromChild,\n },\n });\n const iterator = stream[Symbol.iterator]();\n parentIterators.push(iterator);\n }\n }\n const nextParentNodes: (Node | null)[] = [];\n for (let i = 0; i < parentIterators.length; i++) {\n const iter = parentIterators[i];\n const result = iter.next();\n nextParentNodes[i] = result.done ? null : result.value;\n }\n\n while (true) {\n let minParentNode = null;\n let minParentNodeChildIndexes: number[] = [];\n for (let i = 0; i < nextParentNodes.length; i++) {\n const parentNode = nextParentNodes[i];\n if (parentNode === null) {\n continue;\n }\n if (minParentNode === null) {\n minParentNode = parentNode;\n minParentNodeChildIndexes.push(i);\n } else {\n const compareResult =\n this.#schema.compareRows(parentNode.row, minParentNode.row) *\n (req.reverse ? -1 : 1);\n if (compareResult === 0) {\n minParentNodeChildIndexes.push(i);\n } else if (compareResult < 0) {\n minParentNode = parentNode;\n minParentNodeChildIndexes = [i];\n }\n }\n }\n if (minParentNode === null) {\n return;\n }\n const relatedChildNodes: Node[] = [];\n for (const minParentNodeChildIndex of minParentNodeChildIndexes) {\n relatedChildNodes.push(childNodes[minParentNodeChildIndex]);\n const iter = parentIterators[minParentNodeChildIndex];\n const result = iter.next();\n nextParentNodes[minParentNodeChildIndex] = result.done\n ? null\n : result.value;\n }\n let overlaidRelatedChildNodes = relatedChildNodes;\n if (\n this.#inprogressChildChange &&\n this.#inprogressChildChange.position &&\n isJoinMatch(\n this.#inprogressChildChange.change.node.row,\n this.#childKey,\n minParentNode.row,\n this.#parentKey,\n )\n ) {\n const hasInprogressChildChangeBeenPushedForMinParentNode =\n this.#parent\n .getSchema()\n .compareRows(\n minParentNode.row,\n this.#inprogressChildChange.position,\n ) <= 0;\n if (this.#inprogressChildChange.change.type === 'remove') {\n if (hasInprogressChildChangeBeenPushedForMinParentNode) {\n // Remove form relatedChildNodes since the removed child\n // was inserted into childNodes above.\n overlaidRelatedChildNodes = relatedChildNodes.filter(\n n => n !== this.#inprogressChildChange?.change.node,\n );\n }\n } else if (!hasInprogressChildChangeBeenPushedForMinParentNode) {\n overlaidRelatedChildNodes = [\n ...generateWithOverlay(\n relatedChildNodes,\n this.#inprogressChildChange.change,\n this.#child.getSchema(),\n ),\n ];\n }\n }\n\n // yield node if after the overlay it still has relationship nodes\n if (overlaidRelatedChildNodes.length > 0) {\n yield {\n ...minParentNode,\n relationships: {\n ...minParentNode.relationships,\n [this.#relationshipName]: () => overlaidRelatedChildNodes,\n },\n };\n }\n }\n } catch (e) {\n threw = true;\n for (const iter of parentIterators) {\n try {\n iter.throw?.(e);\n } catch (_cleanupError) {\n // error in the iter.throw cleanup,\n // catch so other iterators are cleaned up\n }\n }\n throw e;\n } finally {\n if (!threw) {\n for (const iter of parentIterators) {\n try {\n iter.return?.();\n } catch (_cleanupError) {\n // error in the iter.return cleanup,\n // catch so other iterators are cleaned up\n }\n }\n }\n }\n }\n\n *cleanup(_req: FetchRequest): Stream<Node> {}\n\n #pushChild(change: Change): void {\n const pushChildChange = (exists?: boolean) => {\n this.#inprogressChildChange = {\n change,\n position: undefined,\n };\n try {\n const parentNodeStream = this.#parent.fetch({\n constraint: Object.fromEntries(\n this.#parentKey.map((key, i) => [\n key,\n change.node.row[this.#childKey[i]],\n ]),\n ),\n });\n for (const parentNode of parentNodeStream) {\n this.#inprogressChildChange = {\n change,\n position: parentNode.row,\n };\n const childNodeStream = () =>\n this.#child.fetch({\n constraint: Object.fromEntries(\n this.#childKey.map((key, i) => [\n key,\n parentNode.row[this.#parentKey[i]],\n ]),\n ),\n });\n if (!exists) {\n for (const childNode of childNodeStream()) {\n if (\n this.#child\n .getSchema()\n .compareRows(childNode.row, change.node.row) !== 0\n ) {\n exists = true;\n break;\n }\n }\n }\n if (exists) {\n this.#output.push(\n {\n type: 'child',\n node: {\n ...parentNode,\n relationships: {\n ...parentNode.relationships,\n [this.#relationshipName]: childNodeStream,\n },\n },\n child: {\n relationshipName: this.#relationshipName,\n change,\n },\n },\n this,\n );\n } else {\n this.#output.push(\n {\n ...change,\n node: {\n ...parentNode,\n relationships: {\n ...parentNode.relationships,\n [this.#relationshipName]: () => [change.node],\n },\n },\n },\n this,\n );\n }\n }\n } finally {\n this.#inprogressChildChange = undefined;\n }\n };\n\n switch (change.type) {\n case 'add':\n case 'remove':\n pushChildChange();\n break;\n case 'edit': {\n assert(\n rowEqualsForCompoundKey(\n change.oldNode.row,\n change.node.row,\n this.#childKey,\n ),\n `Child edit must not change relationship.`,\n );\n pushChildChange(true);\n break;\n }\n case 'child':\n pushChildChange(true);\n break;\n }\n }\n\n #pushParent(change: Change): void {\n const childNodeStream = (node: Node) => () =>\n this.#child.fetch({\n constraint: Object.fromEntries(\n this.#childKey.map((key, i) => [key, node.row[this.#parentKey[i]]]),\n ),\n });\n\n const flip = (node: Node) => ({\n ...node,\n relationships: {\n ...node.relationships,\n [this.#relationshipName]: childNodeStream(node),\n },\n });\n\n // If no related child don't push as this is an inner join.\n if (first(childNodeStream(change.node)()) === undefined) {\n return;\n }\n\n switch (change.type) {\n case 'add':\n case 'remove':\n case 'child': {\n this.#output.push(\n {\n ...change,\n node: flip(change.node),\n },\n this,\n );\n break;\n }\n case 'edit': {\n assert(\n rowEqualsForCompoundKey(\n change.oldNode.row,\n change.node.row,\n this.#parentKey,\n ),\n `Parent edit must not change relationship.`,\n );\n this.#output.push(\n {\n type: 'edit',\n oldNode: flip(change.oldNode),\n node: flip(change.node),\n },\n this,\n );\n break;\n }\n default:\n unreachable(change);\n }\n }\n}\n", "import {assert, unreachable} from '../../../shared/src/asserts.ts';\nimport type {CompoundKey, System} from '../../../zero-protocol/src/ast.ts';\nimport type {Row, Value} from '../../../zero-protocol/src/data.ts';\nimport type {PrimaryKey} from '../../../zero-protocol/src/primary-key.ts';\nimport type {Change, ChildChange} from './change.ts';\nimport type {Node} from './data.ts';\nimport {\n generateWithOverlay,\n isJoinMatch,\n rowEqualsForCompoundKey,\n type JoinChangeOverlay,\n} from './join-utils.ts';\nimport {\n throwOutput,\n type FetchRequest,\n type Input,\n type Output,\n type Storage,\n} from './operator.ts';\nimport type {SourceSchema} from './schema.ts';\nimport {take, type Stream} from './stream.ts';\n\ntype Args = {\n parent: Input;\n child: Input;\n storage: Storage;\n // The nth key in parentKey corresponds to the nth key in childKey.\n parentKey: CompoundKey;\n childKey: CompoundKey;\n relationshipName: string;\n hidden: boolean;\n system: System;\n};\n\n/**\n * The Join operator joins the output from two upstream inputs. Zero's join\n * is a little different from SQL's join in that we output hierarchical data,\n * not a flat table. This makes it a lot more useful for UI programming and\n * avoids duplicating tons of data like left join would.\n *\n * The Nodes output from Join have a new relationship added to them, which has\n * the name #relationshipName. The value of the relationship is a stream of\n * child nodes which are the corresponding values from the child source.\n */\nexport class Join implements Input {\n readonly #parent: Input;\n readonly #child: Input;\n readonly #storage: Storage;\n readonly #parentKey: CompoundKey;\n readonly #childKey: CompoundKey;\n readonly #relationshipName: string;\n readonly #schema: SourceSchema;\n\n #output: Output = throwOutput;\n\n #inprogressChildChange: JoinChangeOverlay | undefined;\n\n constructor({\n parent,\n child,\n storage,\n parentKey,\n childKey,\n relationshipName,\n hidden,\n system,\n }: Args) {\n assert(parent !== child, 'Parent and child must be different operators');\n assert(\n parentKey.length === childKey.length,\n 'The parentKey and childKey keys must have same length',\n );\n this.#parent = parent;\n this.#child = child;\n this.#storage = storage;\n this.#parentKey = parentKey;\n this.#childKey = childKey;\n this.#relationshipName = relationshipName;\n\n const parentSchema = parent.getSchema();\n const childSchema = child.getSchema();\n this.#schema = {\n ...parentSchema,\n relationships: {\n ...parentSchema.relationships,\n [relationshipName]: {\n ...childSchema,\n isHidden: hidden,\n system,\n },\n },\n };\n\n parent.setOutput({\n push: (change: Change) => this.#pushParent(change),\n });\n child.setOutput({\n push: (change: Change) => this.#pushChild(change),\n });\n }\n\n destroy(): void {\n this.#parent.destroy();\n this.#child.destroy();\n }\n\n setOutput(output: Output): void {\n this.#output = output;\n }\n\n getSchema(): SourceSchema {\n return this.#schema;\n }\n\n *fetch(req: FetchRequest): Stream<Node> {\n for (const parentNode of this.#parent.fetch(req)) {\n yield this.#processParentNode(\n parentNode.row,\n parentNode.relationships,\n 'fetch',\n );\n }\n }\n\n *cleanup(req: FetchRequest): Stream<Node> {\n for (const parentNode of this.#parent.cleanup(req)) {\n yield this.#processParentNode(\n parentNode.row,\n parentNode.relationships,\n 'cleanup',\n );\n }\n }\n\n #pushParent(change: Change): void {\n switch (change.type) {\n case 'add':\n this.#output.push(\n {\n type: 'add',\n node: this.#processParentNode(\n change.node.row,\n change.node.relationships,\n 'fetch',\n ),\n },\n this,\n );\n break;\n case 'remove':\n this.#output.push(\n {\n type: 'remove',\n node: this.#processParentNode(\n change.node.row,\n change.node.relationships,\n 'cleanup',\n ),\n },\n this,\n );\n break;\n case 'child':\n this.#output.push(\n {\n type: 'child',\n node: this.#processParentNode(\n change.node.row,\n change.node.relationships,\n 'fetch',\n ),\n child: change.child,\n },\n this,\n );\n break;\n case 'edit': {\n // Assert the edit could not change the relationship.\n assert(\n rowEqualsForCompoundKey(\n change.oldNode.row,\n change.node.row,\n this.#parentKey,\n ),\n `Parent edit must not change relationship.`,\n );\n this.#output.push(\n {\n type: 'edit',\n oldNode: this.#processParentNode(\n change.oldNode.row,\n change.oldNode.relationships,\n 'cleanup',\n ),\n node: this.#processParentNode(\n change.node.row,\n change.node.relationships,\n 'fetch',\n ),\n },\n this,\n );\n break;\n }\n default:\n unreachable(change);\n }\n }\n\n #pushChild(change: Change): void {\n const pushChildChange = (childRow: Row, change: Change) => {\n this.#inprogressChildChange = {\n change,\n position: undefined,\n };\n try {\n const parentNodes = this.#parent.fetch({\n constraint: Object.fromEntries(\n this.#parentKey.map((key, i) => [key, childRow[this.#childKey[i]]]),\n ),\n });\n\n for (const parentNode of parentNodes) {\n this.#inprogressChildChange.position = parentNode.row;\n const childChange: ChildChange = {\n type: 'child',\n node: this.#processParentNode(\n parentNode.row,\n parentNode.relationships,\n 'fetch',\n ),\n child: {\n relationshipName: this.#relationshipName,\n change,\n },\n };\n this.#output.push(childChange, this);\n }\n } finally {\n this.#inprogressChildChange = undefined;\n }\n };\n\n switch (change.type) {\n case 'add':\n case 'remove':\n pushChildChange(change.node.row, change);\n break;\n case 'child':\n pushChildChange(change.node.row, change);\n break;\n case 'edit': {\n const childRow = change.node.row;\n const oldChildRow = change.oldNode.row;\n // Assert the edit could not change the relationship.\n assert(\n rowEqualsForCompoundKey(oldChildRow, childRow, this.#childKey),\n 'Child edit must not change relationship.',\n );\n pushChildChange(childRow, change);\n break;\n }\n\n default:\n unreachable(change);\n }\n }\n\n #processParentNode(\n parentNodeRow: Row,\n parentNodeRelations: Record<string, () => Stream<Node>>,\n mode: ProcessParentMode,\n ): Node {\n let method: ProcessParentMode = mode;\n let storageUpdated = false;\n const childStream = () => {\n if (!storageUpdated) {\n if (mode === 'cleanup') {\n this.#storage.del(\n makeStorageKey(\n this.#parentKey,\n this.#parent.getSchema().primaryKey,\n parentNodeRow,\n ),\n );\n const empty =\n [\n ...take(\n this.#storage.scan({\n prefix: makeStorageKeyPrefix(parentNodeRow, this.#parentKey),\n }),\n 1,\n ),\n ].length === 0;\n method = empty ? 'cleanup' : 'fetch';\n }\n\n storageUpdated = true;\n // Defer the work to update storage until the child stream\n // is actually accessed\n if (mode === 'fetch') {\n this.#storage.set(\n makeStorageKey(\n this.#parentKey,\n this.#parent.getSchema().primaryKey,\n parentNodeRow,\n ),\n true,\n );\n }\n }\n\n const stream = this.#child[method]({\n constraint: Object.fromEntries(\n this.#childKey.map((key, i) => [\n key,\n parentNodeRow[this.#parentKey[i]],\n ]),\n ),\n });\n\n if (\n this.#inprogressChildChange &&\n isJoinMatch(\n parentNodeRow,\n this.#parentKey,\n this.#inprogressChildChange.change.node.row,\n this.#childKey,\n ) &&\n this.#inprogressChildChange.position &&\n this.#schema.compareRows(\n parentNodeRow,\n this.#inprogressChildChange.position,\n ) > 0\n ) {\n return generateWithOverlay(\n stream,\n this.#inprogressChildChange.change,\n this.#child.getSchema(),\n );\n }\n return stream;\n };\n\n return {\n row: parentNodeRow,\n relationships: {\n ...parentNodeRelations,\n [this.#relationshipName]: childStream,\n },\n };\n }\n}\n\ntype ProcessParentMode = 'fetch' | 'cleanup';\n\n/** Exported for testing. */\nexport function makeStorageKeyForValues(values: readonly Value[]): string {\n const json = JSON.stringify(['pKeySet', ...values]);\n return json.substring(1, json.length - 1) + ',';\n}\n\n/** Exported for testing. */\nexport function makeStorageKeyPrefix(row: Row, key: CompoundKey): string {\n return makeStorageKeyForValues(key.map(k => row[k]));\n}\n\n/** Exported for testing.\n * This storage key tracks the primary keys seen for each unique\n * value joined on. This is used to know when to cleanup a child's state.\n */\nexport function makeStorageKey(\n key: CompoundKey,\n primaryKey: PrimaryKey,\n row: Row,\n): string {\n const values: Value[] = key.map(k => row[k]);\n for (const key of primaryKey) {\n values.push(row[key]);\n }\n return makeStorageKeyForValues(values);\n}\n", "import type {Row} from '../../../zero-protocol/src/data.ts';\nimport type {AddChange, Change, ChildChange, RemoveChange} from './change.ts';\nimport type {Comparator, Node} from './data.ts';\nimport {maybeSplitAndPushEditChange} from './maybe-split-and-push-edit-change.ts';\nimport {\n throwOutput,\n type FetchRequest,\n type Input,\n type Operator,\n type Output,\n type Start,\n} from './operator.ts';\nimport type {SourceSchema} from './schema.ts';\nimport type {Stream} from './stream.ts';\n\nexport type Bound = {\n row: Row;\n exclusive: boolean;\n};\n\n/**\n * Skip sets the start position for the pipeline. No rows before the bound will\n * be output.\n */\nexport class Skip implements Operator {\n readonly #input: Input;\n readonly #bound: Bound;\n readonly #comparator: Comparator;\n\n #output: Output = throwOutput;\n\n constructor(input: Input, bound: Bound) {\n this.#input = input;\n this.#bound = bound;\n this.#comparator = input.getSchema().compareRows;\n input.setOutput(this);\n }\n\n getSchema(): SourceSchema {\n return this.#input.getSchema();\n }\n\n fetch(req: FetchRequest): Stream<Node> {\n return this.#fetchOrCleanup('fetch', req);\n }\n\n cleanup(req: FetchRequest): Stream<Node> {\n return this.#fetchOrCleanup('fetch', req);\n }\n\n *#fetchOrCleanup(method: 'fetch' | 'cleanup', req: FetchRequest) {\n const start = this.#getStart(req);\n if (start === 'empty') {\n return;\n }\n const nodes = this.#input[method]({...req, start});\n if (!req.reverse) {\n yield* nodes;\n return;\n }\n for (const node of nodes) {\n if (!this.#shouldBePresent(node.row)) {\n return;\n }\n yield node;\n }\n }\n\n setOutput(output: Output): void {\n this.#output = output;\n }\n\n destroy(): void {\n this.#input.destroy();\n }\n\n #shouldBePresent(row: Row): boolean {\n const cmp = this.#comparator(this.#bound.row, row);\n return cmp < 0 || (cmp === 0 && !this.#bound.exclusive);\n }\n\n push(change: Change): void {\n const shouldBePresent = (row: Row) => this.#shouldBePresent(row);\n if (change.type === 'edit') {\n maybeSplitAndPushEditChange(change, shouldBePresent, this.#output, this);\n return;\n }\n\n change satisfies AddChange | RemoveChange | ChildChange;\n\n if (shouldBePresent(change.node.row)) {\n this.#output.push(change, this);\n }\n }\n\n #getStart(req: FetchRequest): Start | undefined | 'empty' {\n const boundStart = {\n row: this.#bound.row,\n basis: this.#bound.exclusive ? 'after' : 'at',\n } as const;\n\n if (!req.start) {\n if (req.reverse) {\n return undefined;\n }\n return boundStart;\n }\n\n const cmp = this.#comparator(this.#bound.row, req.start.row);\n\n if (!req.reverse) {\n // The skip bound is after the requested bound. The requested bound cannot\n // be relevant because even if it was basis: 'after', the skip bound is\n // itself after the requested bound. Return the skip bound.\n if (cmp > 0) {\n return boundStart;\n }\n\n // The skip bound and requested bound are equal. If either is exclusive,\n // return that bound with exclusive. Otherwise, return the skip bound.\n if (cmp === 0) {\n if (this.#bound.exclusive || req.start.basis === 'after') {\n return {\n row: this.#bound.row,\n basis: 'after',\n };\n }\n return boundStart;\n }\n\n return req.start;\n }\n\n req.reverse satisfies true;\n\n // bound is after the start, but request is for reverse so results\n // must be empty\n if (cmp > 0) {\n return 'empty';\n }\n\n if (cmp === 0) {\n // if both are inclusive, the result can be the single row at bound\n // return it as start\n if (!this.#bound.exclusive && req.start.basis === 'at') {\n return boundStart;\n }\n // otherwise the results must be empty, one or both are exclusive\n // in opposite directions\n return 'empty';\n }\n\n // bound is before the start, return start\n return req.start;\n }\n}\n", "import {assert, unreachable} from '../../../shared/src/asserts.ts';\nimport {hasOwn} from '../../../shared/src/has-own.ts';\nimport {must} from '../../../shared/src/must.ts';\nimport type {Row, Value} from '../../../zero-protocol/src/data.ts';\nimport type {PrimaryKey} from '../../../zero-protocol/src/primary-key.ts';\nimport {assertOrderingIncludesPK} from '../builder/builder.ts';\nimport {type Change, type EditChange, type RemoveChange} from './change.ts';\nimport type {Constraint} from './constraint.ts';\nimport {compareValues, type Comparator, type Node} from './data.ts';\nimport {\n throwOutput,\n type FetchRequest,\n type Input,\n type Operator,\n type Output,\n type Storage,\n} from './operator.ts';\nimport type {SourceSchema} from './schema.ts';\nimport {first, take, type Stream} from './stream.ts';\n\nconst MAX_BOUND_KEY = 'maxBound';\n\ntype TakeState = {\n size: number;\n bound: Row | undefined;\n};\n\ninterface TakeStorage {\n get(key: typeof MAX_BOUND_KEY): Row | undefined;\n get(key: string): TakeState | undefined;\n set(key: typeof MAX_BOUND_KEY, value: Row): void;\n set(key: string, value: TakeState): void;\n del(key: string): void;\n}\n\nexport type PartitionKey = PrimaryKey;\n\n/**\n * The Take operator is for implementing limit queries. It takes the first n\n * nodes of its input as determined by the input\u2019s comparator. It then keeps\n * a *bound* of the last item it has accepted so that it can evaluate whether\n * new incoming pushes should be accepted or rejected.\n *\n * Take can count rows globally or by unique value of some field.\n *\n * Maintains the invariant that its output size is always <= limit, even\n * mid processing of a push.\n */\nexport class Take implements Operator {\n readonly #input: Input;\n readonly #storage: TakeStorage;\n readonly #limit: number;\n readonly #partitionKey: PartitionKey | undefined;\n readonly #partitionKeyComparator: Comparator | undefined;\n // Fetch overlay needed for some split push cases.\n #rowHiddenFromFetch: Row | undefined;\n\n #output: Output = throwOutput;\n\n constructor(\n input: Input,\n storage: Storage,\n limit: number,\n partitionKey?: PartitionKey | undefined,\n ) {\n assert(limit >= 0);\n assertOrderingIncludesPK(\n input.getSchema().sort,\n input.getSchema().primaryKey,\n );\n input.setOutput(this);\n this.#input = input;\n this.#storage = storage as TakeStorage;\n this.#limit = limit;\n this.#partitionKey = partitionKey;\n this.#partitionKeyComparator =\n partitionKey && makePartitionKeyComparator(partitionKey);\n }\n\n setOutput(output: Output): void {\n this.#output = output;\n }\n\n getSchema(): SourceSchema {\n return this.#input.getSchema();\n }\n\n *fetch(req: FetchRequest): Stream<Node> {\n if (\n !this.#partitionKey ||\n (req.constraint &&\n constraintMatchesPartitionKey(req.constraint, this.#partitionKey))\n ) {\n const takeStateKey = getTakeStateKey(this.#partitionKey, req.constraint);\n const takeState = this.#storage.get(takeStateKey);\n if (!takeState) {\n yield* this.#initialFetch(req);\n return;\n }\n if (takeState.bound === undefined) {\n return;\n }\n for (const inputNode of this.#input.fetch(req)) {\n if (this.getSchema().compareRows(takeState.bound, inputNode.row) < 0) {\n return;\n }\n if (\n this.#rowHiddenFromFetch &&\n this.getSchema().compareRows(\n this.#rowHiddenFromFetch,\n inputNode.row,\n ) === 0\n ) {\n continue;\n }\n yield inputNode;\n }\n return;\n }\n // There is a partition key, but the fetch is not constrained or constrained\n // on a different key. Thus we don't have a single take state to bound by.\n // This currently only happens with nested sub-queries\n // e.g. issues include issuelabels include label. We could remove this\n // case if we added a translation layer (powered by some state) in join.\n // Specifically we need joinKeyValue => parent constraint key\n const maxBound = this.#storage.get(MAX_BOUND_KEY);\n if (maxBound === undefined) {\n return;\n }\n for (const inputNode of this.#input.fetch(req)) {\n if (this.getSchema().compareRows(inputNode.row, maxBound) > 0) {\n return;\n }\n const takeStateKey = getTakeStateKey(this.#partitionKey, inputNode.row);\n const takeState = this.#storage.get(takeStateKey);\n if (\n takeState?.bound !== undefined &&\n this.getSchema().compareRows(takeState.bound, inputNode.row) >= 0\n ) {\n yield inputNode;\n }\n }\n }\n\n *#initialFetch(req: FetchRequest): Stream<Node> {\n assert(req.start === undefined);\n assert(!req.reverse);\n assert(constraintMatchesPartitionKey(req.constraint, this.#partitionKey));\n\n if (this.#limit === 0) {\n return;\n }\n\n const takeStateKey = getTakeStateKey(this.#partitionKey, req.constraint);\n assert(this.#storage.get(takeStateKey) === undefined);\n\n let size = 0;\n let bound: Row | undefined;\n let downstreamEarlyReturn = true;\n let exceptionThrown = false;\n try {\n for (const inputNode of this.#input.fetch(req)) {\n yield inputNode;\n bound = inputNode.row;\n size++;\n if (size === this.#limit) {\n break;\n }\n }\n downstreamEarlyReturn = false;\n } catch (e) {\n exceptionThrown = true;\n throw e;\n } finally {\n if (!exceptionThrown) {\n this.#setTakeState(\n takeStateKey,\n size,\n bound,\n this.#storage.get(MAX_BOUND_KEY),\n );\n // If it becomes necessary to support downstream early return, this\n // assert should be removed, and replaced with code that consumes\n // the input stream until limit is reached or the input stream is\n // exhausted so that takeState is properly hydrated.\n assert(\n !downstreamEarlyReturn,\n 'Unexpected early return prevented full hydration',\n );\n }\n }\n }\n\n *cleanup(req: FetchRequest): Stream<Node> {\n assert(req.start === undefined);\n assert(constraintMatchesPartitionKey(req.constraint, this.#partitionKey));\n const takeStateKey = getTakeStateKey(this.#partitionKey, req.constraint);\n this.#storage.del(takeStateKey);\n let size = 0;\n for (const inputNode of this.#input.cleanup(req)) {\n if (size === this.#limit) {\n return;\n }\n size++;\n yield inputNode;\n }\n }\n\n #getStateAndConstraint(row: Row) {\n const takeStateKey = getTakeStateKey(this.#partitionKey, row);\n const takeState = this.#storage.get(takeStateKey);\n let maxBound: Row | undefined;\n let constraint: Constraint | undefined;\n if (takeState) {\n maxBound = this.#storage.get(MAX_BOUND_KEY);\n constraint =\n this.#partitionKey &&\n Object.fromEntries(\n this.#partitionKey.map(key => [key, row[key]] as const),\n );\n }\n\n return {takeState, takeStateKey, maxBound, constraint} as\n | {\n takeState: undefined;\n takeStateKey: string;\n maxBound: undefined;\n constraint: undefined;\n }\n | {\n takeState: TakeState;\n takeStateKey: string;\n maxBound: Row | undefined;\n constraint: Constraint | undefined;\n };\n }\n\n push(change: Change): void {\n if (change.type === 'edit') {\n this.#pushEditChange(change);\n return;\n }\n\n const {takeState, takeStateKey, maxBound, constraint} =\n this.#getStateAndConstraint(change.node.row);\n if (!takeState) {\n return;\n }\n\n const {compareRows} = this.getSchema();\n\n if (change.type === 'add') {\n if (takeState.size < this.#limit) {\n this.#setTakeState(\n takeStateKey,\n takeState.size + 1,\n takeState.bound === undefined ||\n compareRows(takeState.bound, change.node.row) < 0\n ? change.node.row\n : takeState.bound,\n maxBound,\n );\n this.#output.push(change, this);\n return;\n }\n // size === limit\n if (\n takeState.bound === undefined ||\n compareRows(change.node.row, takeState.bound) >= 0\n ) {\n return;\n }\n // added row < bound\n let beforeBoundNode: Node | undefined;\n let boundNode: Node;\n if (this.#limit === 1) {\n boundNode = must(\n first(\n this.#input.fetch({\n start: {\n row: takeState.bound,\n basis: 'at',\n },\n constraint,\n }),\n ),\n );\n } else {\n [boundNode, beforeBoundNode] = take(\n this.#input.fetch({\n start: {\n row: takeState.bound,\n basis: 'at',\n },\n constraint,\n reverse: true,\n }),\n 2,\n );\n }\n const removeChange: RemoveChange = {\n type: 'remove',\n node: boundNode,\n };\n // Remove before add to maintain invariant that\n // output size <= limit.\n this.#setTakeState(\n takeStateKey,\n takeState.size,\n beforeBoundNode === undefined ||\n compareRows(change.node.row, beforeBoundNode.row) > 0\n ? change.node.row\n : beforeBoundNode.row,\n maxBound,\n );\n this.#withRowHiddenFromFetch(change.node.row, () => {\n this.#output.push(removeChange, this);\n });\n this.#output.push(change, this);\n } else if (change.type === 'remove') {\n if (takeState.bound === undefined) {\n // change is after bound\n return;\n }\n const compToBound = compareRows(change.node.row, takeState.bound);\n if (compToBound > 0) {\n // change is after bound\n return;\n }\n const [beforeBoundNode] = take(\n this.#input.fetch({\n start: {\n row: takeState.bound,\n basis: 'after',\n },\n constraint,\n reverse: true,\n }),\n 1,\n );\n\n let newBound: {node: Node; push: boolean} | undefined;\n if (beforeBoundNode) {\n const push = compareRows(beforeBoundNode.row, takeState.bound) > 0;\n newBound = {\n node: beforeBoundNode,\n push,\n };\n }\n if (!newBound?.push) {\n for (const node of this.#input.fetch({\n start: {\n row: takeState.bound,\n basis: 'at',\n },\n constraint,\n })) {\n const push = compareRows(node.row, takeState.bound) > 0;\n newBound = {\n node,\n push,\n };\n if (push) {\n break;\n }\n }\n }\n\n if (newBound?.push) {\n this.#output.push(change, this);\n this.#setTakeState(\n takeStateKey,\n takeState.size,\n newBound.node.row,\n maxBound,\n );\n this.#output.push(\n {\n type: 'add',\n node: newBound.node,\n },\n this,\n );\n return;\n }\n this.#setTakeState(\n takeStateKey,\n takeState.size - 1,\n newBound?.node.row,\n maxBound,\n );\n this.#output.push(change, this);\n } else if (change.type === 'child') {\n // A 'child' change should be pushed to output if its row\n // is <= bound.\n if (\n takeState.bound &&\n compareRows(change.node.row, takeState.bound) <= 0\n ) {\n this.#output.push(change, this);\n }\n }\n }\n\n #pushEditChange(change: EditChange): void {\n assert(\n !this.#partitionKeyComparator ||\n this.#partitionKeyComparator(change.oldNode.row, change.node.row) === 0,\n 'Unexpected change of partition key',\n );\n\n const {takeState, takeStateKey, maxBound, constraint} =\n this.#getStateAndConstraint(change.oldNode.row);\n if (!takeState) {\n return;\n }\n\n assert(takeState.bound, 'Bound should be set');\n const {compareRows} = this.getSchema();\n const oldCmp = compareRows(change.oldNode.row, takeState.bound);\n const newCmp = compareRows(change.node.row, takeState.bound);\n\n const replaceBoundAndForwardChange = () => {\n this.#setTakeState(\n takeStateKey,\n takeState.size,\n change.node.row,\n maxBound,\n );\n this.#output.push(change, this);\n };\n\n // The bounds row was changed.\n if (oldCmp === 0) {\n // The new row is the new bound.\n if (newCmp === 0) {\n // no need to update the state since we are keeping the bounds\n this.#output.push(change, this);\n return;\n }\n\n if (newCmp < 0) {\n if (this.#limit === 1) {\n replaceBoundAndForwardChange();\n return;\n }\n\n // New row will be in the result but it might not be the bounds any\n // more. We need to find the row before the bounds to determine the new\n // bounds.\n\n const beforeBoundNode = must(\n first(\n this.#input.fetch({\n start: {\n row: takeState.bound,\n basis: 'after',\n },\n constraint,\n reverse: true,\n }),\n ),\n );\n\n this.#setTakeState(\n takeStateKey,\n takeState.size,\n beforeBoundNode.row,\n maxBound,\n );\n this.#output.push(change, this);\n return;\n }\n\n assert(newCmp > 0);\n // Find the first item at the old bounds. This will be the new bounds.\n const newBoundNode = must(\n first(\n this.#input.fetch({\n start: {\n row: takeState.bound,\n basis: 'at',\n },\n constraint,\n }),\n ),\n );\n\n // The next row is the new row. We can replace the bounds and keep the\n // edit change.\n if (compareRows(newBoundNode.row, change.node.row) === 0) {\n replaceBoundAndForwardChange();\n return;\n }\n\n // The new row is now outside the bounds, so we need to remove the old\n // row and add the new bounds row.\n this.#setTakeState(\n takeStateKey,\n takeState.size,\n newBoundNode.row,\n maxBound,\n );\n this.#withRowHiddenFromFetch(newBoundNode.row, () => {\n this.#output.push(\n {\n type: 'remove',\n node: change.oldNode,\n },\n this,\n );\n });\n this.#output.push(\n {\n type: 'add',\n node: newBoundNode,\n },\n this,\n );\n return;\n }\n\n if (oldCmp > 0) {\n assert(newCmp !== 0, 'Invalid state. Row has duplicate primary key');\n\n // Both old and new outside of bounds\n if (newCmp > 0) {\n return;\n }\n\n // old was outside, new is inside. Pushing out the old bounds\n assert(newCmp < 0);\n\n const [oldBoundNode, newBoundNode] = take(\n this.#input.fetch({\n start: {\n row: takeState.bound,\n basis: 'at',\n },\n constraint,\n reverse: true,\n }),\n 2,\n );\n // Remove before add to maintain invariant that\n // output size <= limit.\n this.#setTakeState(\n takeStateKey,\n takeState.size,\n newBoundNode.row,\n maxBound,\n );\n this.#withRowHiddenFromFetch(change.node.row, () => {\n this.#output.push(\n {\n type: 'remove',\n node: oldBoundNode,\n },\n this,\n );\n });\n this.#output.push(\n {\n type: 'add',\n node: change.node,\n },\n this,\n );\n\n return;\n }\n\n if (oldCmp < 0) {\n assert(newCmp !== 0, 'Invalid state. Row has duplicate primary key');\n\n // Both old and new inside of bounds\n if (newCmp < 0) {\n this.#output.push(change, this);\n return;\n }\n\n // old was inside, new is larger than old bound\n\n assert(newCmp > 0);\n\n // at this point we need to find the row after the bound and use that or\n // the newRow as the new bound.\n const afterBoundNode = must(\n first(\n this.#input.fetch({\n start: {\n row: takeState.bound,\n basis: 'after',\n },\n constraint,\n }),\n ),\n );\n\n // The new row is the new bound. Use an edit change.\n if (compareRows(afterBoundNode.row, change.node.row) === 0) {\n replaceBoundAndForwardChange();\n return;\n }\n\n this.#output.push(\n {\n type: 'remove',\n node: change.oldNode,\n },\n this,\n );\n this.#setTakeState(\n takeStateKey,\n takeState.size,\n afterBoundNode.row,\n maxBound,\n );\n this.#output.push(\n {\n type: 'add',\n node: afterBoundNode,\n },\n this,\n );\n return;\n }\n\n unreachable();\n }\n\n #withRowHiddenFromFetch(row: Row, fn: () => void) {\n this.#rowHiddenFromFetch = row;\n try {\n fn();\n } finally {\n this.#rowHiddenFromFetch = undefined;\n }\n }\n\n #setTakeState(\n takeStateKey: string,\n size: number,\n bound: Row | undefined,\n maxBound: Row | undefined,\n ) {\n this.#storage.set(takeStateKey, {\n size,\n bound,\n });\n if (\n bound !== undefined &&\n (maxBound === undefined ||\n this.getSchema().compareRows(bound, maxBound) > 0)\n ) {\n this.#storage.set(MAX_BOUND_KEY, bound);\n }\n }\n\n destroy(): void {\n this.#input.destroy();\n }\n}\n\nfunction getTakeStateKey(\n partitionKey: PartitionKey | undefined,\n rowOrConstraint: Row | Constraint | undefined,\n): string {\n // The order must be consistent. We always use the order as defined by the\n // partition key.\n const partitionValues: Value[] = [];\n\n if (partitionKey && rowOrConstraint) {\n for (const key of partitionKey) {\n partitionValues.push(rowOrConstraint[key]);\n }\n }\n\n return JSON.stringify(['take', ...partitionValues]);\n}\n\nfunction constraintMatchesPartitionKey(\n constraint: Constraint | undefined,\n partitionKey: PartitionKey | undefined,\n): boolean {\n if (constraint === undefined || partitionKey === undefined) {\n return constraint === partitionKey;\n }\n if (partitionKey.length !== Object.keys(constraint).length) {\n return false;\n }\n for (const key of partitionKey) {\n if (!hasOwn(constraint, key)) {\n return false;\n }\n }\n return true;\n}\n\nfunction makePartitionKeyComparator(partitionKey: PartitionKey): Comparator {\n return (a, b) => {\n for (const key of partitionKey) {\n const cmp = compareValues(a[key], b[key]);\n if (cmp !== 0) {\n return cmp;\n }\n }\n return 0;\n };\n}\n", "import {assert} from '../../../shared/src/asserts.ts';\nimport {mergeIterables} from '../../../shared/src/iterables.ts';\nimport type {Writable} from '../../../shared/src/writable.ts';\nimport type {Change} from './change.ts';\nimport type {Constraint} from './constraint.ts';\nimport type {Node} from './data.ts';\nimport {\n throwOutput,\n type FetchRequest,\n type Input,\n type InputBase,\n type Operator,\n type Output,\n} from './operator.ts';\nimport {\n makeAddEmptyRelationships,\n mergeRelationships,\n pushAccumulatedChanges,\n} from './push-accumulated.ts';\nimport type {SourceSchema} from './schema.ts';\nimport {first, type Stream} from './stream.ts';\nimport type {UnionFanOut} from './union-fan-out.ts';\n\nexport class UnionFanIn implements Operator {\n readonly #inputs: readonly Input[];\n readonly #schema: SourceSchema;\n #fanOutPushStarted: boolean = false;\n #output: Output = throwOutput;\n #accumulatedPushes: Change[] = [];\n\n constructor(fanOut: UnionFanOut, inputs: Input[]) {\n this.#inputs = inputs;\n const fanOutSchema = fanOut.getSchema();\n fanOut.setFanIn(this);\n\n const schema: Writable<SourceSchema> = {\n tableName: fanOutSchema.tableName,\n columns: fanOutSchema.columns,\n primaryKey: fanOutSchema.primaryKey,\n relationships: {\n ...fanOutSchema.relationships,\n },\n isHidden: fanOutSchema.isHidden,\n system: fanOutSchema.system,\n compareRows: fanOutSchema.compareRows,\n sort: fanOutSchema.sort,\n };\n\n // now go through inputs and merge relationships\n const relationshipsFromBranches: Set<string> = new Set();\n for (const input of inputs) {\n const inputSchema = input.getSchema();\n assert(\n schema.tableName === inputSchema.tableName,\n `Table name mismatch in union fan-in: ${schema.tableName} !== ${inputSchema.tableName}`,\n );\n assert(\n schema.primaryKey === inputSchema.primaryKey,\n `Primary key mismatch in union fan-in`,\n );\n assert(\n schema.system === inputSchema.system,\n `System mismatch in union fan-in: ${schema.system} !== ${inputSchema.system}`,\n );\n assert(\n schema.compareRows === inputSchema.compareRows,\n `compareRows mismatch in union fan-in`,\n );\n assert(schema.sort === inputSchema.sort, `Sort mismatch in union fan-in`);\n\n for (const [relName, relSchema] of Object.entries(\n inputSchema.relationships,\n )) {\n if (relName in fanOutSchema.relationships) {\n continue;\n }\n\n // All branches will have unique relationship names except for relationships\n // that come in from `fanOut`.\n assert(\n !relationshipsFromBranches.has(relName),\n `Relationship ${relName} exists in multiple upstream inputs to union fan-in`,\n );\n schema.relationships[relName] = relSchema;\n relationshipsFromBranches.add(relName);\n }\n\n input.setOutput(this);\n }\n\n this.#schema = schema;\n this.#inputs = inputs;\n }\n\n cleanup(_req: FetchRequest): Stream<Node> {\n // Cleanup is going away. Not implemented.\n return [];\n }\n\n destroy(): void {\n for (const input of this.#inputs) {\n input.destroy();\n }\n }\n\n fetch(req: FetchRequest): Stream<Node> {\n const iterables = this.#inputs.map(input => input.fetch(req));\n return mergeIterables(\n iterables,\n (l, r) => this.#schema.compareRows(l.row, r.row),\n true,\n );\n }\n\n getSchema(): SourceSchema {\n return this.#schema;\n }\n\n push(change: Change, pusher: InputBase): void {\n if (!this.#fanOutPushStarted) {\n this.#pushInternalChange(change, pusher);\n } else {\n this.#accumulatedPushes.push(change);\n }\n }\n\n /**\n * An internal change means that a change was received inside the fan-out/fan-in sub-graph.\n *\n * These changes always come from children of a flip-join as no other push generating operators\n * currently exist between union-fan-in and union-fan-out. All other pushes\n * enter into union-fan-out before reaching union-fan-in.\n *\n * - normal joins for `exists` come before `union-fan-out`\n * - joins for `related` come after `union-fan-out`\n * - take comes after `union-fan-out`\n *\n * The algorithm for deciding whether or not to forward a push that came from inside the ufo/ufi sub-graph:\n * 1. If the change is a `child` change we can forward it. This is because all child branches in the ufo/ufi sub-graph are unique.\n * 2. If the change is `add` we can forward it iff no `fetches` for the row return any results.\n * If another branch has it, the add was already emitted in the past.\n * 3. If the change is `remove` we can forward it iff no `fetches` for the row return any results.\n * If no other branches have the change, the remove can be sent as the value is no longer present.\n * If other branches have it, the last branch the processes the remove will send the remove.\n * 4. Edits will always come through as child changes as flip join will flip them into children.\n * An edit that would result in a remove or add will have been split into an add/remove pair rather than being an edit.\n */\n #pushInternalChange(change: Change, pusher: InputBase): void {\n if (change.type === 'child') {\n this.#output.push(change, this);\n return;\n }\n\n assert(change.type === 'add' || change.type === 'remove');\n\n let hadMatch = false;\n for (const input of this.#inputs) {\n if (input === pusher) {\n hadMatch = true;\n continue;\n }\n\n const constraint: Writable<Constraint> = {};\n for (const key of this.#schema.primaryKey) {\n constraint[key] = change.node.row[key];\n }\n const fetchResult = input.fetch({\n constraint,\n });\n\n if (first(fetchResult) !== undefined) {\n // Another branch has the row, so the add/remove is not needed.\n return;\n }\n }\n\n assert(hadMatch, 'Pusher was not one of the inputs to union-fan-in!');\n\n // No other branches have the row, so we can push the change.\n this.#output.push(change, this);\n }\n\n fanOutStartedPushing() {\n assert(this.#fanOutPushStarted === false);\n this.#fanOutPushStarted = true;\n }\n\n fanOutDonePushing(fanOutChangeType: Change['type']) {\n assert(this.#fanOutPushStarted);\n this.#fanOutPushStarted = false;\n if (this.#inputs.length === 0) {\n return;\n }\n\n if (this.#accumulatedPushes.length === 0) {\n // It is possible for no forks to pass along the push.\n // E.g., if no filters match in any fork.\n return;\n }\n\n pushAccumulatedChanges(\n this.#accumulatedPushes,\n this.#output,\n this,\n fanOutChangeType,\n mergeRelationships,\n makeAddEmptyRelationships(this.#schema),\n );\n }\n\n setOutput(output: Output): void {\n this.#output = output;\n }\n}\n", "import {assert} from '../../../shared/src/asserts.ts';\nimport {must} from '../../../shared/src/must.ts';\nimport type {Change} from './change.ts';\nimport type {Node} from './data.ts';\nimport type {FetchRequest, Input, Operator, Output} from './operator.ts';\nimport type {SourceSchema} from './schema.ts';\nimport type {Stream} from './stream.ts';\nimport type {UnionFanIn} from './union-fan-in.ts';\n\nexport class UnionFanOut implements Operator {\n #destroyCount: number = 0;\n #unionFanIn?: UnionFanIn;\n readonly #input: Input;\n readonly #outputs: Output[] = [];\n\n constructor(input: Input) {\n this.#input = input;\n input.setOutput(this);\n }\n\n setFanIn(fanIn: UnionFanIn) {\n assert(!this.#unionFanIn, 'FanIn already set for this FanOut');\n this.#unionFanIn = fanIn;\n }\n\n push(change: Change): void {\n must(this.#unionFanIn).fanOutStartedPushing();\n for (const output of this.#outputs) {\n output.push(change, this);\n }\n must(this.#unionFanIn).fanOutDonePushing(change.type);\n }\n\n setOutput(output: Output): void {\n this.#outputs.push(output);\n }\n\n getSchema(): SourceSchema {\n return this.#input.getSchema();\n }\n\n fetch(req: FetchRequest): Stream<Node> {\n return this.#input.fetch(req);\n }\n\n cleanup(_req: FetchRequest): Stream<Node> {\n // Cleanup is going away. Not implemented.\n return [];\n }\n\n destroy(): void {\n if (this.#destroyCount < this.#outputs.length) {\n ++this.#destroyCount;\n if (this.#destroyCount === this.#outputs.length) {\n this.#input.destroy();\n }\n } else {\n throw new Error('FanOut already destroyed once for each output');\n }\n }\n}\n", "/* eslint-disable @typescript-eslint/no-explicit-any */\nimport {must} from '../../../shared/src/must.ts';\nimport {\n toStaticParam,\n type Condition,\n type LiteralValue,\n type Parameter,\n type SimpleOperator,\n} from '../../../zero-protocol/src/ast.ts';\nimport type {Schema} from '../../../zero-schema/src/builder/schema-builder.ts';\nimport type {\n AvailableRelationships,\n DestTableName,\n ExistsOptions,\n GetFilterType,\n NoCompoundTypeSelector,\n PullTableSchema,\n Query,\n} from './query.ts';\n\nexport type ParameterReference = {\n [toStaticParam](): Parameter;\n};\n\n/**\n * A factory function that creates a condition. This is used to create\n * complex conditions that can be passed to the `where` method of a query.\n *\n * @example\n *\n * ```ts\n * const condition: ExpressionFactory<User> = ({and, cmp, or}) =>\n * and(\n * cmp('name', '=', 'Alice'),\n * or(cmp('age', '>', 18), cmp('isStudent', '=', true)),\n * );\n *\n * const query = z.query.user.where(condition);\n * ```\n */\nexport interface ExpressionFactory<\n TSchema extends Schema,\n TTable extends keyof TSchema['tables'] & string,\n> {\n (eb: ExpressionBuilder<TSchema, TTable>): Condition;\n}\n\nexport class ExpressionBuilder<\n TSchema extends Schema,\n TTable extends keyof TSchema['tables'] & string,\n> {\n readonly #exists: (\n relationship: string,\n cb?: ((query: Query<TSchema, TTable>) => Query<TSchema, any>) | undefined,\n options?: ExistsOptions | undefined,\n ) => Condition;\n\n constructor(\n exists: (\n relationship: string,\n cb?: ((query: Query<TSchema, TTable>) => Query<TSchema, any>) | undefined,\n options?: ExistsOptions | undefined,\n ) => Condition,\n ) {\n this.#exists = exists;\n this.exists = this.exists.bind(this);\n }\n\n get eb() {\n return this;\n }\n\n cmp<\n TSelector extends NoCompoundTypeSelector<PullTableSchema<TTable, TSchema>>,\n TOperator extends SimpleOperator,\n >(\n field: TSelector,\n op: TOperator,\n value:\n | GetFilterType<PullTableSchema<TTable, TSchema>, TSelector, TOperator>\n | ParameterReference,\n ): Condition;\n cmp<\n TSelector extends NoCompoundTypeSelector<PullTableSchema<TTable, TSchema>>,\n >(\n field: TSelector,\n value:\n | GetFilterType<PullTableSchema<TTable, TSchema>, TSelector, '='>\n | ParameterReference,\n ): Condition;\n cmp(\n field: string,\n opOrValue: SimpleOperator | ParameterReference | LiteralValue,\n value?: ParameterReference | LiteralValue,\n ): Condition {\n return cmp(field, opOrValue, value);\n }\n\n cmpLit(\n left: ParameterReference | LiteralValue,\n op: SimpleOperator,\n right: ParameterReference | LiteralValue,\n ): Condition {\n return {\n type: 'simple',\n left: isParameterReference(left)\n ? left[toStaticParam]()\n : {type: 'literal', value: left},\n right: isParameterReference(right)\n ? right[toStaticParam]()\n : {type: 'literal', value: right},\n op,\n };\n }\n\n and = and;\n or = or;\n not = not;\n\n exists = <TRelationship extends AvailableRelationships<TTable, TSchema>>(\n relationship: TRelationship,\n cb?:\n | ((\n query: Query<TSchema, DestTableName<TTable, TSchema, TRelationship>>,\n ) => Query<TSchema, any>)\n | undefined,\n options?: ExistsOptions | undefined,\n ): Condition => this.#exists(relationship, cb, options);\n}\n\nexport function and(...conditions: (Condition | undefined)[]): Condition {\n const expressions = filterTrue(filterUndefined(conditions));\n\n if (expressions.length === 1) {\n return expressions[0];\n }\n\n if (expressions.some(isAlwaysFalse)) {\n return FALSE;\n }\n\n return {type: 'and', conditions: expressions};\n}\n\nexport function or(...conditions: (Condition | undefined)[]): Condition {\n const expressions = filterFalse(filterUndefined(conditions));\n\n if (expressions.length === 1) {\n return expressions[0];\n }\n\n if (expressions.some(isAlwaysTrue)) {\n return TRUE;\n }\n\n return {type: 'or', conditions: expressions};\n}\n\nexport function not(expression: Condition): Condition {\n switch (expression.type) {\n case 'and':\n return {\n type: 'or',\n conditions: expression.conditions.map(not),\n };\n case 'or':\n return {\n type: 'and',\n conditions: expression.conditions.map(not),\n };\n case 'correlatedSubquery':\n return {\n type: 'correlatedSubquery',\n related: expression.related,\n op: negateOperator(expression.op),\n };\n case 'simple':\n return {\n type: 'simple',\n op: negateOperator(expression.op),\n left: expression.left,\n right: expression.right,\n };\n }\n}\n\nexport function cmp(\n field: string,\n opOrValue: SimpleOperator | ParameterReference | LiteralValue,\n value?: ParameterReference | LiteralValue,\n): Condition {\n let op: SimpleOperator;\n if (value === undefined) {\n value = opOrValue;\n op = '=';\n } else {\n op = opOrValue as SimpleOperator;\n }\n\n return {\n type: 'simple',\n left: {type: 'column', name: field},\n right: isParameterReference(value)\n ? value[toStaticParam]()\n : {type: 'literal', value},\n op,\n };\n}\n\nfunction isParameterReference(\n value: ParameterReference | LiteralValue | null,\n): value is ParameterReference {\n return (\n value !== null && typeof value === 'object' && (value as any)[toStaticParam]\n );\n}\n\nexport const TRUE: Condition = {\n type: 'and',\n conditions: [],\n};\n\nconst FALSE: Condition = {\n type: 'or',\n conditions: [],\n};\n\nfunction isAlwaysTrue(condition: Condition): boolean {\n return condition.type === 'and' && condition.conditions.length === 0;\n}\n\nfunction isAlwaysFalse(condition: Condition): boolean {\n return condition.type === 'or' && condition.conditions.length === 0;\n}\n\nexport function simplifyCondition(c: Condition): Condition {\n if (c.type === 'simple' || c.type === 'correlatedSubquery') {\n return c;\n }\n if (c.conditions.length === 1) {\n return simplifyCondition(c.conditions[0]);\n }\n const conditions = flatten(c.type, c.conditions.map(simplifyCondition));\n if (c.type === 'and' && conditions.some(isAlwaysFalse)) {\n return FALSE;\n }\n if (c.type === 'or' && conditions.some(isAlwaysTrue)) {\n return TRUE;\n }\n return {\n type: c.type,\n conditions,\n };\n}\n\nexport function flatten(\n type: 'and' | 'or',\n conditions: readonly Condition[],\n): Condition[] {\n const flattened: Condition[] = [];\n for (const c of conditions) {\n if (c.type === type) {\n flattened.push(...c.conditions);\n } else {\n flattened.push(c);\n }\n }\n\n return flattened;\n}\n\nconst negateSimpleOperatorMap = {\n ['=']: '!=',\n ['!=']: '=',\n ['<']: '>=',\n ['>']: '<=',\n ['>=']: '<',\n ['<=']: '>',\n ['IN']: 'NOT IN',\n ['NOT IN']: 'IN',\n ['LIKE']: 'NOT LIKE',\n ['NOT LIKE']: 'LIKE',\n ['ILIKE']: 'NOT ILIKE',\n ['NOT ILIKE']: 'ILIKE',\n ['IS']: 'IS NOT',\n ['IS NOT']: 'IS',\n} as const;\n\nconst negateOperatorMap = {\n ...negateSimpleOperatorMap,\n ['EXISTS']: 'NOT EXISTS',\n ['NOT EXISTS']: 'EXISTS',\n} as const;\n\nexport function negateOperator<OP extends keyof typeof negateOperatorMap>(\n op: OP,\n): (typeof negateOperatorMap)[OP] {\n return must(negateOperatorMap[op]);\n}\n\nfunction filterUndefined<T>(array: (T | undefined)[]): T[] {\n return array.filter(e => e !== undefined);\n}\n\nfunction filterTrue(conditions: Condition[]): Condition[] {\n return conditions.filter(c => !isAlwaysTrue(c));\n}\n\nfunction filterFalse(conditions: Condition[]): Condition[] {\n return conditions.filter(c => !isAlwaysFalse(c));\n}\n", "import {assertString} from '../../../shared/src/asserts.ts';\nimport type {NonNullValue, SimplePredicateNoNull} from './filter.ts';\n\nexport function getLikePredicate(\n pattern: NonNullValue,\n flags: 'i' | '',\n): SimplePredicateNoNull {\n const op = getLikeOp(String(pattern), flags);\n return (lhs: NonNullValue) => {\n assertString(lhs);\n return op(String(lhs));\n };\n}\n\nfunction getLikeOp(pattern: string, flags: 'i' | ''): (lhs: string) => boolean {\n // if lhs does not contain '%' or '_' then it is a simple string comparison.\n // if it does contain '%' or '_' then it is a regex comparison.\n // '%' is a wildcard for any number of characters\n // '_' is a wildcard for a single character\n // Postgres SQL allows escaping using `\\`.\n\n if (!/_|%|\\\\/.test(pattern)) {\n if (flags === 'i') {\n const rhsLower = pattern.toLowerCase();\n return (lhs: string) => lhs.toLowerCase() === rhsLower;\n }\n return (lhs: string) => lhs === pattern;\n }\n const re = patternToRegExp(pattern, flags);\n return (lhs: string) => re.test(lhs);\n}\n\nconst specialCharsRe = /[$()*+.?[\\]\\\\^{|}]/;\n\nfunction patternToRegExp(source: string, flags: '' | 'i' = ''): RegExp {\n // There are a few cases:\n // % => .*\n // _ => .\n // \\x => \\x for any x except special regexp chars\n // special regexp chars => \\special regexp chars\n let pattern = '^';\n for (let i = 0; i < source.length; i++) {\n let c = source[i];\n switch (c) {\n case '%':\n pattern += '.*';\n break;\n case '_':\n pattern += '.';\n break;\n\n // @ts-expect-error fallthrough\n case '\\\\':\n if (i === source.length - 1) {\n throw new Error('LIKE pattern must not end with escape character');\n }\n i++;\n c = source[i];\n\n // fall through\n default:\n if (specialCharsRe.test(c)) {\n pattern += '\\\\';\n }\n pattern += c;\n\n break;\n }\n }\n return new RegExp(pattern + '$', flags + 'm');\n}\n", "import {assert, unreachable} from '../../../shared/src/asserts.ts';\nimport type {\n Condition,\n SimpleCondition,\n SimpleOperator,\n} from '../../../zero-protocol/src/ast.ts';\nimport type {Row, Value} from '../../../zero-protocol/src/data.ts';\nimport {simplifyCondition} from '../query/expression.ts';\nimport {getLikePredicate} from './like.ts';\n\nexport type NonNullValue = Exclude<Value, null | undefined>;\nexport type SimplePredicate = (rhs: Value) => boolean;\nexport type SimplePredicateNoNull = (rhs: NonNullValue) => boolean;\n\nexport type NoSubqueryCondition =\n | SimpleCondition\n | {\n type: 'and';\n conditions: readonly NoSubqueryCondition[];\n }\n | {\n type: 'or';\n conditions: readonly NoSubqueryCondition[];\n };\n\nexport function createPredicate(\n condition: NoSubqueryCondition,\n): (row: Row) => boolean {\n if (condition.type !== 'simple') {\n const predicates = condition.conditions.map(c => createPredicate(c));\n return condition.type === 'and'\n ? (row: Row) => {\n // and\n for (const predicate of predicates) {\n if (!predicate(row)) {\n return false;\n }\n }\n return true;\n }\n : (row: Row) => {\n // or\n for (const predicate of predicates) {\n if (predicate(row)) {\n return true;\n }\n }\n return false;\n };\n }\n const {left} = condition;\n const {right} = condition;\n assert(\n right.type !== 'static',\n 'static values should be resolved before creating predicates',\n );\n assert(\n left.type !== 'static',\n 'static values should be resolved before creating predicates',\n );\n\n switch (condition.op) {\n case 'IS':\n case 'IS NOT': {\n const impl = createIsPredicate(right.value, condition.op);\n if (left.type === 'literal') {\n const result = impl(left.value);\n return () => result;\n }\n return (row: Row) => impl(row[left.name]);\n }\n }\n\n if (right.value === null || right.value === undefined) {\n return (_row: Row) => false;\n }\n\n const impl = createPredicateImpl(right.value, condition.op);\n if (left.type === 'literal') {\n if (left.value === null || left.value === undefined) {\n return (_row: Row) => false;\n }\n const result = impl(left.value);\n return () => result;\n }\n\n return (row: Row) => {\n const lhs = row[left.name];\n if (lhs === null || lhs === undefined) {\n return false;\n }\n return impl(lhs);\n };\n}\n\nfunction createIsPredicate(\n rhs: Value | readonly Value[],\n operator: 'IS' | 'IS NOT',\n): SimplePredicate {\n switch (operator) {\n case 'IS':\n return lhs => lhs === rhs;\n case 'IS NOT':\n return lhs => lhs !== rhs;\n }\n}\n\nfunction createPredicateImpl(\n rhs: NonNullValue | readonly NonNullValue[],\n operator: Exclude<SimpleOperator, 'IS' | 'IS NOT'>,\n): SimplePredicateNoNull {\n switch (operator) {\n case '=':\n return lhs => lhs === rhs;\n case '!=':\n return lhs => lhs !== rhs;\n case '<':\n return lhs => lhs < rhs;\n case '<=':\n return lhs => lhs <= rhs;\n case '>':\n return lhs => lhs > rhs;\n case '>=':\n return lhs => lhs >= rhs;\n case 'LIKE':\n return getLikePredicate(rhs, '');\n case 'NOT LIKE':\n return not(getLikePredicate(rhs, ''));\n case 'ILIKE':\n return getLikePredicate(rhs, 'i');\n case 'NOT ILIKE':\n return not(getLikePredicate(rhs, 'i'));\n case 'IN': {\n assert(Array.isArray(rhs));\n const set = new Set(rhs);\n return lhs => set.has(lhs);\n }\n case 'NOT IN': {\n assert(Array.isArray(rhs));\n const set = new Set(rhs);\n return lhs => !set.has(lhs);\n }\n default:\n operator satisfies never;\n throw new Error(`Unexpected operator: ${operator}`);\n }\n}\n\nfunction not<T>(f: (lhs: T) => boolean) {\n return (lhs: T) => !f(lhs);\n}\n\n/**\n * If the condition contains any CorrelatedSubqueryConditions, returns a\n * transformed condition which contains no CorrelatedSubqueryCondition(s) but\n * which will filter a subset of the rows that would be filtered by the original\n * condition, or undefined if no such transformation exists.\n *\n * If the condition does not contain any CorrelatedSubqueryConditions\n * returns the condition unmodified and `conditionsRemoved: false`.\n */\nexport function transformFilters(filters: Condition | undefined): {\n filters: NoSubqueryCondition | undefined;\n conditionsRemoved: boolean;\n} {\n if (!filters) {\n return {filters: undefined, conditionsRemoved: false};\n }\n switch (filters.type) {\n case 'simple':\n return {filters, conditionsRemoved: false};\n case 'correlatedSubquery':\n return {filters: undefined, conditionsRemoved: true};\n case 'and':\n case 'or': {\n const transformedConditions: NoSubqueryCondition[] = [];\n let conditionsRemoved = false;\n for (const cond of filters.conditions) {\n const transformed = transformFilters(cond);\n // If any branch of the OR ends up empty, the entire OR needs\n // to be removed.\n if (transformed.filters === undefined && filters.type === 'or') {\n return {filters: undefined, conditionsRemoved: true};\n }\n conditionsRemoved = conditionsRemoved || transformed.conditionsRemoved;\n if (transformed.filters) {\n transformedConditions.push(transformed.filters);\n }\n }\n return {\n filters: simplifyCondition({\n type: filters.type,\n conditions: transformedConditions,\n }) as NoSubqueryCondition,\n conditionsRemoved,\n };\n }\n default:\n unreachable(filters);\n }\n}\n", "import {assert} from '../../../shared/src/asserts.ts';\nimport type {JSONValue} from '../../../shared/src/json.ts';\nimport {must} from '../../../shared/src/must.ts';\nimport type {\n AST,\n ColumnReference,\n CompoundKey,\n Condition,\n Conjunction,\n CorrelatedSubquery,\n CorrelatedSubqueryCondition,\n Disjunction,\n LiteralValue,\n Ordering,\n Parameter,\n SimpleCondition,\n ValuePosition,\n} from '../../../zero-protocol/src/ast.ts';\nimport type {Row} from '../../../zero-protocol/src/data.ts';\nimport type {PrimaryKey} from '../../../zero-protocol/src/primary-key.ts';\nimport {Exists} from '../ivm/exists.ts';\nimport {FanIn} from '../ivm/fan-in.ts';\nimport {FanOut} from '../ivm/fan-out.ts';\nimport {\n buildFilterPipeline,\n type FilterInput,\n} from '../ivm/filter-operators.ts';\nimport {Filter} from '../ivm/filter.ts';\nimport {FlippedJoin} from '../ivm/flipped-join.ts';\nimport {Join} from '../ivm/join.ts';\nimport type {Input, InputBase, Storage} from '../ivm/operator.ts';\nimport {Skip} from '../ivm/skip.ts';\nimport type {Source, SourceInput} from '../ivm/source.ts';\nimport {Take} from '../ivm/take.ts';\nimport {UnionFanIn} from '../ivm/union-fan-in.ts';\nimport {UnionFanOut} from '../ivm/union-fan-out.ts';\nimport type {DebugDelegate} from './debug-delegate.ts';\nimport {createPredicate, type NoSubqueryCondition} from './filter.ts';\n\nexport type StaticQueryParameters = {\n authData: Record<string, JSONValue>;\n preMutationRow?: Row | undefined;\n};\n\n/**\n * Interface required of caller to buildPipeline. Connects to constructed\n * pipeline to delegate environment to provide sources and storage.\n */\nexport interface BuilderDelegate {\n readonly applyFiltersAnyway?: boolean | undefined;\n readonly debug?: DebugDelegate | undefined;\n\n /**\n * Called once for each source needed by the AST.\n * Might be called multiple times with same tableName. It is OK to return\n * same storage instance in that case.\n */\n getSource(tableName: string): Source | undefined;\n\n /**\n * Called once for each operator that requires storage. Should return a new\n * unique storage object for each call.\n */\n createStorage(name: string): Storage;\n\n decorateInput(input: Input, name: string): Input;\n\n addEdge(source: InputBase, dest: InputBase): void;\n\n decorateFilterInput(input: FilterInput, name: string): FilterInput;\n\n decorateSourceInput(input: SourceInput, queryID: string): Input;\n\n /**\n * The AST is mapped on-the-wire between client and server names.\n *\n * There is no \"wire\" for zqlite tests so this function is provided\n * to allow tests to remap the AST.\n */\n mapAst?: ((ast: AST) => AST) | undefined;\n}\n\n/**\n * Builds a pipeline from an AST. Caller must provide a delegate to create source\n * and storage interfaces as necessary.\n *\n * Usage:\n *\n * ```ts\n * class MySink implements Output {\n * readonly #input: Input;\n *\n * constructor(input: Input) {\n * this.#input = input;\n * input.setOutput(this);\n * }\n *\n * push(change: Change, _: Operator) {\n * console.log(change);\n * }\n * }\n *\n * const input = buildPipeline(ast, myDelegate, hash(ast));\n * const sink = new MySink(input);\n * ```\n */\nexport function buildPipeline(\n ast: AST,\n delegate: BuilderDelegate,\n queryID: string,\n): Input {\n ast = delegate.mapAst ? delegate.mapAst(ast) : ast;\n return buildPipelineInternal(ast, delegate, queryID, '');\n}\n\nexport function bindStaticParameters(\n ast: AST,\n staticQueryParameters: StaticQueryParameters | undefined,\n) {\n const visit = (node: AST): AST => ({\n ...node,\n where: node.where ? bindCondition(node.where) : undefined,\n related: node.related?.map(sq => ({\n ...sq,\n subquery: visit(sq.subquery),\n })),\n });\n\n function bindCondition(condition: Condition): Condition {\n if (condition.type === 'simple') {\n return {\n ...condition,\n left: bindValue(condition.left),\n right: bindValue(condition.right) as Exclude<\n ValuePosition,\n ColumnReference\n >,\n };\n }\n if (condition.type === 'correlatedSubquery') {\n return {\n ...condition,\n related: {\n ...condition.related,\n subquery: visit(condition.related.subquery),\n },\n };\n }\n return {\n ...condition,\n conditions: condition.conditions.map(bindCondition),\n };\n }\n\n const bindValue = (value: ValuePosition): ValuePosition => {\n if (isParameter(value)) {\n const anchor = must(\n staticQueryParameters,\n 'Static query params do not exist',\n )[value.anchor];\n const resolvedValue = resolveField(anchor, value.field);\n return {\n type: 'literal',\n value: resolvedValue as LiteralValue,\n };\n }\n return value;\n };\n\n return visit(ast);\n}\n\nfunction resolveField(\n anchor: Record<string, JSONValue> | Row | undefined,\n field: string | string[],\n): unknown {\n if (anchor === undefined) {\n return null;\n }\n\n if (Array.isArray(field)) {\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n return field.reduce((acc, f) => (acc as any)?.[f], anchor) ?? null;\n }\n\n return anchor[field] ?? null;\n}\n\nfunction isParameter(value: ValuePosition): value is Parameter {\n return value.type === 'static';\n}\n\nconst EXISTS_LIMIT = 3;\nconst PERMISSIONS_EXISTS_LIMIT = 1;\n\nfunction buildPipelineInternal(\n ast: AST,\n delegate: BuilderDelegate,\n queryID: string,\n name: string,\n partitionKey?: CompoundKey | undefined,\n): Input {\n const source = delegate.getSource(ast.table);\n if (!source) {\n throw new Error(`Source not found: ${ast.table}`);\n }\n ast = uniquifyCorrelatedSubqueryConditionAliases(ast);\n\n const csqConditions = gatherCorrelatedSubqueryQueryConditions(ast.where);\n const splitEditKeys: Set<string> = partitionKey\n ? new Set(partitionKey)\n : new Set();\n const aliases = new Set<string>();\n for (const csq of csqConditions) {\n aliases.add(csq.related.subquery.alias || '');\n for (const key of csq.related.correlation.parentField) {\n splitEditKeys.add(key);\n }\n }\n if (ast.related) {\n for (const csq of ast.related) {\n for (const key of csq.correlation.parentField) {\n splitEditKeys.add(key);\n }\n }\n }\n const conn = source.connect(\n must(ast.orderBy),\n ast.where,\n splitEditKeys,\n delegate.debug,\n );\n\n let end: Input = delegate.decorateSourceInput(conn, queryID);\n end = delegate.decorateInput(end, `${name}:source(${ast.table})`);\n const {fullyAppliedFilters} = conn;\n\n if (ast.start) {\n const skip = new Skip(end, ast.start);\n delegate.addEdge(end, skip);\n end = delegate.decorateInput(skip, `${name}:skip)`);\n }\n\n for (const csqCondition of csqConditions) {\n // flipped EXISTS are handled in applyWhere\n if (!csqCondition.flip) {\n end = applyCorrelatedSubQuery(\n {\n ...csqCondition.related,\n subquery: {\n ...csqCondition.related.subquery,\n limit:\n csqCondition.related.system === 'permissions'\n ? PERMISSIONS_EXISTS_LIMIT\n : EXISTS_LIMIT,\n },\n },\n delegate,\n queryID,\n end,\n name,\n true,\n );\n }\n }\n\n if (ast.where && (!fullyAppliedFilters || delegate.applyFiltersAnyway)) {\n end = applyWhere(end, ast.where, delegate, name);\n }\n\n if (ast.limit !== undefined) {\n const takeName = `${name}:take`;\n const take = new Take(\n end,\n delegate.createStorage(takeName),\n ast.limit,\n partitionKey,\n );\n delegate.addEdge(end, take);\n end = delegate.decorateInput(take, takeName);\n }\n\n if (ast.related) {\n for (const csq of ast.related) {\n end = applyCorrelatedSubQuery(csq, delegate, queryID, end, name, false);\n }\n }\n\n return end;\n}\n\nfunction applyWhere(\n input: Input,\n condition: Condition,\n delegate: BuilderDelegate,\n name: string,\n): Input {\n if (!conditionIncludesFlippedSubqueryAtAnyLevel(condition)) {\n return buildFilterPipeline(input, delegate, filterInput =>\n applyFilter(filterInput, condition, delegate, name),\n );\n }\n\n return applyFilterWithFlips(input, condition, delegate, name);\n}\n\nfunction applyFilterWithFlips(\n input: Input,\n condition: Condition,\n delegate: BuilderDelegate,\n name: string,\n): Input {\n let end = input;\n assert(condition.type !== 'simple', 'Simple conditions cannot have flips');\n\n switch (condition.type) {\n case 'and': {\n const [withFlipped, withoutFlipped] = partitionBranches(\n condition.conditions,\n conditionIncludesFlippedSubqueryAtAnyLevel,\n );\n if (withoutFlipped.length > 0) {\n end = buildFilterPipeline(input, delegate, filterInput =>\n applyAnd(\n filterInput,\n {\n type: 'and',\n conditions: withoutFlipped,\n },\n delegate,\n name,\n ),\n );\n }\n assert(withFlipped.length > 0, 'Impossible to have no flips here');\n for (const cond of withFlipped) {\n end = applyFilterWithFlips(end, cond, delegate, name);\n }\n break;\n }\n case 'or': {\n const [withFlipped, withoutFlipped] = partitionBranches(\n condition.conditions,\n conditionIncludesFlippedSubqueryAtAnyLevel,\n );\n assert(withFlipped.length > 0, 'Impossible to have no flips here');\n\n const ufo = new UnionFanOut(end);\n delegate.addEdge(end, ufo);\n end = delegate.decorateInput(ufo, `${name}:ufo`);\n\n const branches: Input[] = [];\n if (withoutFlipped.length > 0) {\n branches.push(\n buildFilterPipeline(end, delegate, filterInput =>\n applyOr(\n filterInput,\n {\n type: 'or',\n conditions: withoutFlipped,\n },\n delegate,\n name,\n ),\n ),\n );\n }\n\n for (const cond of withFlipped) {\n branches.push(applyFilterWithFlips(end, cond, delegate, name));\n }\n\n const ufi = new UnionFanIn(ufo, branches);\n for (const branch of branches) {\n delegate.addEdge(branch, ufi);\n }\n end = delegate.decorateInput(ufi, `${name}:ufi`);\n\n break;\n }\n case 'correlatedSubquery': {\n const sq = condition.related;\n const child = buildPipelineInternal(\n sq.subquery,\n delegate,\n '',\n `${name}.${sq.subquery.alias}`,\n sq.correlation.childField,\n );\n const flippedJoin = new FlippedJoin({\n parent: end,\n child,\n parentKey: sq.correlation.parentField,\n childKey: sq.correlation.childField,\n relationshipName: must(\n sq.subquery.alias,\n 'Subquery must have an alias',\n ),\n hidden: sq.hidden ?? false,\n system: sq.system ?? 'client',\n });\n delegate.addEdge(end, flippedJoin);\n delegate.addEdge(child, flippedJoin);\n end = delegate.decorateInput(\n flippedJoin,\n `${name}:flipped-join(${sq.subquery.alias})`,\n );\n break;\n }\n }\n\n return end;\n}\n\nfunction applyFilter(\n input: FilterInput,\n condition: Condition,\n delegate: BuilderDelegate,\n name: string,\n) {\n switch (condition.type) {\n case 'and':\n return applyAnd(input, condition, delegate, name);\n case 'or':\n return applyOr(input, condition, delegate, name);\n case 'correlatedSubquery':\n return applyCorrelatedSubqueryCondition(input, condition, delegate, name);\n case 'simple':\n return applySimpleCondition(input, delegate, condition);\n }\n}\n\nfunction applyAnd(\n input: FilterInput,\n condition: Conjunction,\n delegate: BuilderDelegate,\n name: string,\n): FilterInput {\n for (const subCondition of condition.conditions) {\n input = applyFilter(input, subCondition, delegate, name);\n }\n return input;\n}\n\nexport function applyOr(\n input: FilterInput,\n condition: Disjunction,\n delegate: BuilderDelegate,\n name: string,\n): FilterInput {\n const [subqueryConditions, otherConditions] =\n groupSubqueryConditions(condition);\n // if there are no subquery conditions, no fan-in / fan-out is needed\n if (subqueryConditions.length === 0) {\n const filter = new Filter(\n input,\n createPredicate({\n type: 'or',\n conditions: otherConditions,\n }),\n );\n delegate.addEdge(input, filter);\n return filter;\n }\n\n const fanOut = new FanOut(input);\n delegate.addEdge(input, fanOut);\n const branches = subqueryConditions.map(subCondition =>\n applyFilter(fanOut, subCondition, delegate, name),\n );\n if (otherConditions.length > 0) {\n const filter = new Filter(\n fanOut,\n createPredicate({\n type: 'or',\n conditions: otherConditions,\n }),\n );\n delegate.addEdge(fanOut, filter);\n branches.push(filter);\n }\n const ret = new FanIn(fanOut, branches);\n for (const branch of branches) {\n delegate.addEdge(branch, ret);\n }\n fanOut.setFanIn(ret);\n return ret;\n}\n\nexport function groupSubqueryConditions(condition: Disjunction) {\n const partitioned: [\n subqueryConditions: Condition[],\n otherConditions: NoSubqueryCondition[],\n ] = [[], []];\n for (const subCondition of condition.conditions) {\n if (isNotAndDoesNotContainSubquery(subCondition)) {\n partitioned[1].push(subCondition);\n } else {\n partitioned[0].push(subCondition);\n }\n }\n return partitioned;\n}\n\nexport function isNotAndDoesNotContainSubquery(\n condition: Condition,\n): condition is NoSubqueryCondition {\n if (condition.type === 'correlatedSubquery') {\n return false;\n }\n if (condition.type === 'simple') {\n return true;\n }\n return condition.conditions.every(isNotAndDoesNotContainSubquery);\n}\n\nfunction applySimpleCondition(\n input: FilterInput,\n delegate: BuilderDelegate,\n condition: SimpleCondition,\n): FilterInput {\n const filter = new Filter(input, createPredicate(condition));\n delegate.decorateFilterInput(\n filter,\n `${valuePosName(condition.left)}:${condition.op}:${valuePosName(condition.right)}`,\n );\n delegate.addEdge(input, filter);\n return filter;\n}\n\nfunction valuePosName(left: ValuePosition) {\n switch (left.type) {\n case 'static':\n return left.field;\n case 'literal':\n return left.value;\n case 'column':\n return left.name;\n }\n}\n\nfunction applyCorrelatedSubQuery(\n sq: CorrelatedSubquery,\n delegate: BuilderDelegate,\n queryID: string,\n end: Input,\n name: string,\n fromCondition: boolean,\n) {\n // TODO: we only omit the join if the CSQ if from a condition since\n // we want to create an empty array for `related` fields that are `limit(0)`\n if (sq.subquery.limit === 0 && fromCondition) {\n return end;\n }\n\n assert(sq.subquery.alias, 'Subquery must have an alias');\n const child = buildPipelineInternal(\n sq.subquery,\n delegate,\n queryID,\n `${name}.${sq.subquery.alias}`,\n sq.correlation.childField,\n );\n\n const joinName = `${name}:join(${sq.subquery.alias})`;\n const join = new Join({\n parent: end,\n child,\n storage: delegate.createStorage(joinName),\n parentKey: sq.correlation.parentField,\n childKey: sq.correlation.childField,\n relationshipName: sq.subquery.alias,\n hidden: sq.hidden ?? false,\n system: sq.system ?? 'client',\n });\n delegate.addEdge(end, join);\n delegate.addEdge(child, join);\n return delegate.decorateInput(join, joinName);\n}\n\nfunction applyCorrelatedSubqueryCondition(\n input: FilterInput,\n condition: CorrelatedSubqueryCondition,\n delegate: BuilderDelegate,\n name: string,\n): FilterInput {\n assert(condition.op === 'EXISTS' || condition.op === 'NOT EXISTS');\n if (condition.related.subquery.limit === 0) {\n if (condition.op === 'EXISTS') {\n const filter = new Filter(input, () => false);\n delegate.addEdge(input, filter);\n return filter;\n }\n const filter = new Filter(input, () => true);\n delegate.addEdge(input, filter);\n return filter;\n }\n const existsName = `${name}:exists(${condition.related.subquery.alias})`;\n const exists = new Exists(\n input,\n delegate.createStorage(existsName),\n must(condition.related.subquery.alias),\n condition.related.correlation.parentField,\n condition.op,\n );\n delegate.addEdge(input, exists);\n return delegate.decorateFilterInput(exists, existsName);\n}\n\nfunction gatherCorrelatedSubqueryQueryConditions(\n condition: Condition | undefined,\n) {\n const csqs: CorrelatedSubqueryCondition[] = [];\n const gather = (condition: Condition) => {\n if (condition.type === 'correlatedSubquery') {\n csqs.push(condition);\n return;\n }\n if (condition.type === 'and' || condition.type === 'or') {\n for (const c of condition.conditions) {\n gather(c);\n }\n return;\n }\n };\n if (condition) {\n gather(condition);\n }\n return csqs;\n}\n\nexport function assertOrderingIncludesPK(\n ordering: Ordering,\n pk: PrimaryKey,\n): void {\n // eslint-disable-next-line unicorn/prefer-set-has -- Array is more appropriate here for small collections\n const orderingFields = ordering.map(([field]) => field);\n const missingFields = pk.filter(pkField => !orderingFields.includes(pkField));\n\n if (missingFields.length > 0) {\n throw new Error(\n `Ordering must include all primary key fields. Missing: ${missingFields.join(\n ', ',\n )}. ZQL automatically appends primary key fields to the ordering if they are missing \n so a common cause of this error is a casing mismatch between Postgres and ZQL.\n E.g., \"userid\" vs \"userID\".\n You may want to add double-quotes around your Postgres column names to prevent Postgres from lower-casing them:\n https://www.postgresql.org/docs/current/sql-syntax-lexical.htm`,\n );\n }\n}\n\nfunction uniquifyCorrelatedSubqueryConditionAliases(ast: AST): AST {\n if (!ast.where) {\n return ast;\n }\n const {where} = ast;\n if (where.type !== 'and' && where.type !== 'or') {\n return ast;\n }\n\n let count = 0;\n const uniquifyCorrelatedSubquery = (csqc: CorrelatedSubqueryCondition) => ({\n ...csqc,\n related: {\n ...csqc.related,\n subquery: {\n ...csqc.related.subquery,\n alias: (csqc.related.subquery.alias ?? '') + '_' + count++,\n },\n },\n });\n\n const uniquify = (cond: Condition): Condition => {\n if (cond.type === 'simple') {\n return cond;\n } else if (cond.type === 'correlatedSubquery') {\n return uniquifyCorrelatedSubquery(cond);\n }\n const conditions = [];\n for (const c of cond.conditions) {\n conditions.push(uniquify(c));\n }\n return {\n type: cond.type,\n conditions,\n };\n };\n\n const result = {\n ...ast,\n where: uniquify(where),\n };\n return result;\n}\n\nexport function conditionIncludesFlippedSubqueryAtAnyLevel(\n cond: Condition,\n): boolean {\n if (cond.type === 'correlatedSubquery') {\n return !!cond.flip;\n }\n if (cond.type === 'and' || cond.type === 'or') {\n return cond.conditions.some(c =>\n conditionIncludesFlippedSubqueryAtAnyLevel(c),\n );\n }\n return false;\n}\n\nexport function partitionBranches(\n conditions: readonly Condition[],\n predicate: (c: Condition) => boolean,\n) {\n const matched: Condition[] = [];\n const notMatched: Condition[] = [];\n for (const c of conditions) {\n if (predicate(c)) {\n matched.push(c);\n } else {\n notMatched.push(c);\n }\n }\n return [matched, notMatched] as const;\n}\n", "export class NotImplementedError extends Error {\n constructor(message: string) {\n super(message);\n this.name = 'NotImplementedError';\n }\n}\n", "import {assert} from '../../../shared/src/asserts.ts';\nimport type {Immutable} from '../../../shared/src/immutable.ts';\nimport type {ErroredQuery} from '../../../zero-protocol/src/custom-queries.ts';\nimport type {TTL} from '../query/ttl.ts';\nimport type {Listener, ResultType, TypedView} from '../query/typed-view.ts';\nimport type {Change} from './change.ts';\nimport type {Input, Output} from './operator.ts';\nimport type {SourceSchema} from './schema.ts';\nimport {applyChange} from './view-apply-change.ts';\nimport type {Entry, Format, View} from './view.ts';\n\n/**\n * Implements a materialized view of the output of an operator.\n *\n * It might seem more efficient to use an immutable b-tree for the\n * materialization, but it's not so clear. Inserts in the middle are\n * asymptotically slower in an array, but can often be done with zero\n * allocations, where changes to the b-tree will often require several allocs.\n *\n * Also the plain array view is more convenient for consumers since you can dump\n * it into console to see what it is, rather than having to iterate it.\n */\nexport class ArrayView<V extends View> implements Output, TypedView<V> {\n readonly #input: Input;\n readonly #listeners = new Set<Listener<V>>();\n readonly #schema: SourceSchema;\n readonly #format: Format;\n\n // Synthetic \"root\" entry that has a single \"\" relationship, so that we can\n // treat all changes, including the root change, generically.\n readonly #root: Entry;\n\n onDestroy: (() => void) | undefined;\n\n #dirty = false;\n #resultType: ResultType = 'unknown';\n #error: ErroredQuery | undefined;\n readonly #updateTTL: (ttl: TTL) => void;\n\n constructor(\n input: Input,\n format: Format,\n queryComplete: true | ErroredQuery | Promise<true>,\n updateTTL: (ttl: TTL) => void,\n ) {\n this.#input = input;\n this.#schema = input.getSchema();\n this.#format = format;\n this.#updateTTL = updateTTL;\n this.#root = {'': format.singular ? undefined : []};\n input.setOutput(this);\n\n if (queryComplete === true) {\n this.#resultType = 'complete';\n } else if ('error' in queryComplete) {\n this.#resultType = 'error';\n this.#error = queryComplete;\n } else {\n void queryComplete\n .then(() => {\n this.#resultType = 'complete';\n this.#fireListeners();\n })\n .catch(e => {\n this.#resultType = 'error';\n this.#error = e;\n this.#fireListeners();\n });\n }\n this.#hydrate();\n }\n\n get data() {\n return this.#root[''] as V;\n }\n\n addListener(listener: Listener<V>) {\n assert(!this.#listeners.has(listener), 'Listener already registered');\n this.#listeners.add(listener);\n\n this.#fireListener(listener);\n\n return () => {\n this.#listeners.delete(listener);\n };\n }\n\n #fireListeners() {\n for (const listener of this.#listeners) {\n this.#fireListener(listener);\n }\n }\n\n #fireListener(listener: Listener<V>) {\n listener(this.data as Immutable<V>, this.#resultType, this.#error);\n }\n\n destroy() {\n this.onDestroy?.();\n }\n\n #hydrate() {\n this.#dirty = true;\n for (const node of this.#input.fetch({})) {\n applyChange(\n this.#root,\n {type: 'add', node},\n this.#schema,\n '',\n this.#format,\n );\n }\n this.flush();\n }\n\n push(change: Change): void {\n this.#dirty = true;\n applyChange(this.#root, change, this.#schema, '', this.#format);\n }\n\n flush() {\n if (!this.#dirty) {\n return;\n }\n this.#dirty = false;\n this.#fireListeners();\n }\n\n updateTTL(ttl: TTL) {\n this.#updateTTL(ttl);\n }\n}\n", "import {unreachable} from '../../../shared/src/asserts.ts';\nimport type {Condition} from '../../../zero-protocol/src/ast.ts';\n\n/**\n * Checks if a condition contains any NOT EXISTS operations.\n *\n * The client-side query engine cannot support NOT EXISTS operations because:\n *\n * 1. Zero only syncs a subset of data to the client, defined by the queries you use\n * 2. On the client, we can't distinguish between a row not existing at all vs.\n * a row not being synced to the client\n * 3. For NOT EXISTS to work correctly, we would need complete knowledge of what\n * doesn't exist, which is not reasonable with the partial sync model\n *\n * @param condition The condition to check\n * @throws Error if the condition uses NOT EXISTS operator\n */\nexport function assertNoNotExists(condition: Condition): void {\n switch (condition.type) {\n case 'simple':\n // Simple conditions don't use EXISTS/NOT EXISTS\n return;\n\n case 'correlatedSubquery':\n if (condition.op === 'NOT EXISTS') {\n throw new Error(\n 'not(exists()) is not supported on the client - see https://bugs.rocicorp.dev/issue/3438',\n );\n }\n // Check if the subquery has a where condition\n if (condition.related.subquery.where) {\n assertNoNotExists(condition.related.subquery.where);\n }\n return;\n\n case 'and':\n case 'or':\n for (const c of condition.conditions) {\n assertNoNotExists(c);\n }\n return;\n default:\n unreachable(condition);\n }\n}\n"],
5
5
  "mappings": ";;;;;;;;;;;;;;;;;;;;AAGO,IAAM,OAAO;AAEb,IAAM,KAAK;AAEX,IAAM,KAAK;AACX,IAAM,SAAS;;;ACRtB;AAAA;AAAA,gBAAAA;AAAA,EAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAEA;AAFA,YAAY,OAAO;AAEnB,6BAAc;AAEd,SAAS,UAAU,OAAwB;AACzC,UAAQ,OAAO,OAAO;AAAA,IACpB,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AACH,aAAO,KAAK,UAAU,KAAK;AAAA,IAC7B,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO,MAAM,SAAS,IAAI;AAAA,IAC5B;AACE,UAAI,UAAU,MAAM;AAClB,eAAO;AAAA,MACT;AACA,UAAI,MAAM,QAAQ,KAAK,GAAG;AACxB,eAAO;AAAA,MACT;AACA,aAAO,OAAO;AAAA,EAClB;AACF;AAIA,SAAS,gBAAgBC,IAAYC,OAAiC;AACpE,MAAI,CAACA,OAAM,QAAQ;AACjB,WAAO,UAAUD,EAAC;AAAA,EACpB;AAEA,MAAI,MAAMA;AACV,aAAW,KAAKC,OAAM;AAEpB,UAAO,IAAY,CAAC;AAAA,EACtB;AACA,SAAO,UAAU,GAAG;AACtB;AAEA,SAAS,YACP,MACA,UACAC,aAAuC,OAAK,OAAO,CAAC,GACnC;AACjB,MAAI,SAAS,WAAW,GAAG;AACzB,WAAOA,WAAU,SAAS,CAAC,CAAC;AAAA,EAC9B;AAEA,QAAM,SAAS,GAAGA;AAAA,IAChB,SAAS,SAAS,SAAS,CAAC;AAAA,EAC9B,CAAC,IAAI,IAAI,IAAIA,WAAU,SAAS,SAAS,SAAS,CAAC,CAAC,CAAC;AACrD,MAAI,SAAS,WAAW,GAAG;AACzB,WAAO;AAAA,EACT;AACA,SAAO,GAAG,SAAS,MAAM,GAAG,EAAE,EAAE,IAAIA,UAAS,EAAE,KAAK,IAAI,CAAC,KAAK,MAAM;AACtE;AAEA,SAAS,WACPC,MACAH,IACA,QACA,MACQ;AACR,QAAM,aAAaG,KAAI,OAAO,CAAC;AAC/B,QAAM,EAAC,MAAAF,MAAI,IAAI;AACf,QAAM,SAASA,OAAM,SAAS,OAAOA,MAAK,KAAK,GAAG,CAAC,KAAK;AAExD,UAAQ,WAAW,MAAM;AAAA,IACvB,KAAK;AACH,aAAO,YAAY;AAAA,QACjB;AAAA,QACA,WAAW;AAAA,MACb,CAAC,GAAG,MAAM,SAAS,gBAAgBD,IAAGC,KAAI,CAAC;AAAA,IAC7C,KAAK,iBAAiB;AACpB,YAAMG,UACJH,SAAQA,MAAK,SAAS,IAAI,OAAOA,MAAK,MAAM,GAAG,EAAE,EAAE,KAAK,GAAG,CAAC,KAAK;AAEnE,UAAI,WAAW,MAAM,QAAQ;AAC3B,eAAO,oBAAoB,WAAW,KAAK,GAAG,EAAE,CAAC,GAAGG,OAAM;AAAA,MAC5D;AACA,aAAO,gCAAgCA,OAAM;AAAA,IAC/C;AAAA,IAEA,KAAK;AACH,aAAO,0BAA0B;AAAA,QAC/B;AAAA,QACA,WAAW;AAAA,QACX;AAAA,MACF,CAAC,GAAG,MAAM,QAAQ,gBAAgBJ,IAAGC,KAAI,CAAC;AAAA,IAE5C,KAAK,kBAAkB;AACrB,aAAO,8BACL,WAAW,cAAc,WAAW,YAChC,WAAW,YACX,WAAW,WAAW,SAAS,QAAQ,WAAW,SAAS,EACjE,GAAG,MAAM,2BAA4BD,GAAuB,MAAM;AAAA,IACpE;AAAA,IAEA,KAAK;AACH,UAAI,WAAW,KAAK,WAAW,GAAG;AAChC,eAAO,uBAAuB,WAAW,KAAK,CAAC,CAAC,GAAG,MAAM;AAAA,MAC3D;AACA,aAAO,yBAAyB;AAAA,QAC9B;AAAA,QACA,WAAW;AAAA,MACb,CAAC,GAAG,MAAM;AAAA,IAEZ,KAAK;AACH,aAAO,OAAO,SAAS,UACnB,0BAA0BA,IAAG,QAAuB,QAAQ,QAAQ,IACpE,sBAAsB,MAAM;AAAA,IAElC,KAAK,gBAAgB;AACnB,YAAM,EAAC,MAAK,IAAI;AAChB,YAAM,UAAU,CAAC,QACb,YACA,OAAO,UAAU,WACf,QACC,MAAM,WAAW;AACxB,aAAO,GAAG,OAAO,GAAG,MAAM,SAAS,gBAAgBA,IAAGC,KAAI,CAAC;AAAA,IAC7D;AAAA,EACF;AACF;AAIA,SAAS,0BACP,OACA,QACA,MACQ;AACR,QAAM,WAAyB,CAAC;AAChC,aAAW,QAAQ,OAAO,SAAS;AACjC,UAAM,IAAI,KAAK,IAAI,OAAO,EAAC,KAAI,CAAC;AAChC,QAAI,CAAC,EAAE,IAAI;AACT,eAAS,KAAK,EAAC,MAAM,KAAK,EAAC,CAAC;AAAA,IAC9B;AAAA,EACF;AACA,MAAI,SAAS,QAAQ;AAEnB,aAAS,KAAK,OAAO;AACrB,QAAI,SAAS,WAAW,KAAK,QAAQ,SAAS,CAAC,GAAG,SAAS,CAAC,CAAC,IAAI,GAAG;AAClE,aAAO,WAAW,SAAS,CAAC,EAAE,KAAK,OAAO,SAAS,CAAC,EAAE,MAAM,IAAI;AAAA,IAClE;AAAA,EACF;AAEA,MAAI;AACF,UAAM,MAAM,KAAK,UAAU,KAAK;AAChC,WAAO,wBAAwB,GAAG;AAAA,EACpC,QAAQ;AAEN,WAAO;AAAA,EACT;AACF;AAKA,SAAS,QAAQ,GAAe,GAAe;AAC7C,QAAM,QAAQ,EAAE,IAAI,OAAO,CAAC,EAAE;AAC9B,QAAM,QAAQ,EAAE,IAAI,OAAO,CAAC,EAAE;AAC9B,MAAI,MAAM,WAAW,MAAM,QAAQ;AACjC,WAAO,MAAM,SAAS,MAAM;AAAA,EAC9B;AACA,WAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;AACrC,QAAI,MAAM,CAAC,IAAI,MAAM,CAAC,GAAG;AACvB,aAAO;AAAA,IACT;AACA,QAAI,MAAM,CAAC,IAAI,MAAM,CAAC,GAAG;AACvB,aAAO;AAAA,IACT;AAAA,EACF;AACA,SAAO;AACT;AASO,SAAS,MACd,OACA,QACA,MACG;AACH,QAAM,MAAM,KAAK,OAAO,QAAQ,IAAI;AACpC,MAAI,CAAC,IAAI,IAAI;AACX,UAAM,IAAI,UAAU,IAAI,KAAK;AAAA,EAC/B;AACA,SAAO,IAAI;AACb;AAEO,SAAS,GACd,OACA,QACA,MACY;AACZ,SAAO,KAAK,OAAO,QAAQ,IAAI,EAAE;AACnC;AAEO,SAASF,QACd,OACA,QACA,MACoB;AACpB,QAAM,OAAO,QAAQ,IAAI;AAC3B;AAIO,SAAS,KACd,OACA,QACA,MACW;AACX,QAAM,MAAM,OAAO,IAAI,OAAO,OAAO,EAAC,KAAI,IAAI,MAAS;AACvD,MAAI,CAAC,IAAI,IAAI;AACX,WAAO;AAAA,MACL,IAAI;AAAA,MACJ,OAAO,WAAW,KAAK,OAAO,QAAQ,IAAI;AAAA,IAC5C;AAAA,EACF;AACA,SAAO;AACT;AAOO,SAAS,aACd,OACA,QACA,MACuB;AACvB,MAAI,QAAQ;AACZ,MAAI,SAAS,eAAe;AAC1B,YAAQ;AAAA,EACV,WAAW,SAAS,SAAS;AAC3B,YAAQ;AAAA,EACV;AACA,QAAM,MAAM,OAAO,KAAK,OAAO,KAAK;AACpC,MAAI,QAAQ,QAAW;AACrB,WAAO,EAAC,IAAI,MAAM,MAAK;AAAA,EACzB,WAAW,IAAI,IAAI;AACjB,WAAO;AAAA,EACT;AACA,QAAMI,OAAM,IAAM,cAAY,GAAG;AACjC,SAAO,EAAC,IAAI,OAAO,OAAO,WAAWA,MAAK,OAAO,QAAQ,IAAI,EAAC;AAChE;AAKO,SAAS,SAA2B,GAAoC;AAC7E,SAAO;AACT;AAEO,SAAS,eACd,GACsC;AACtC,SAAS,SAAO,CAAC;AACnB;AAEO,SAAS,cACd,GAC+B;AAC/B,SAAS,QAAM,CAAC;AAClB;AAEO,SAAS,eACd,GAC8C;AAC9C,SAAS,SAAO,CAAC;AACnB;AAGA,IAAM,eAAe,OAAO;AAAA,EAC1B,OAAO,eAAiB,SAAO,EAAE,SAAS,CAAC;AAC7C,EAAE;AAEK,SAAS,uBACd,KACkC;AAClC,SAAO,eAAe;AACxB;AAQO,SAAS,YACd,GACA;AACA,QAAM,QAAQ,CAAC;AACf,aAAW,CAAC,KAAK,IAAI,KAAK,OAAO,QAAQ,EAAE,KAAK,GAAG;AACjD,QAAI,KAAK,SAAS,UAAU;AAC1B,YAAM,GAAG,IAAI,YAAY,IAAoB,EAAE,SAAS;AAAA,IAC1D,OAAO;AACL,YAAM,GAAG,IAAI,KAAK,SAAS;AAAA,IAC7B;AAAA,EACF;AACA,SAAS,SAAO,KAA4D;AAC9E;AAIO,SAAS,gBACX,UACgB;AACnB,SAAS,QAAM,GAAG,SAAS,IAAM,SAAO,CAAC;AAC3C;;;ACtQO,SAAS,UACd,GACA,GACS;AACT,MAAI,MAAM,GAAG;AACX,WAAO;AAAA,EACT;AAEA,MAAI,OAAO,MAAM,OAAO,GAAG;AACzB,WAAO;AAAA,EACT;AAEA,UAAQ,OAAO,GAAG;AAAA,IAChB,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,EACX;AAKA,MAAI;AAGJ,MAAI,MAAM,QAAQ,CAAC,GAAG;AACpB,QAAI,CAAC,MAAM,QAAQ,CAAC,GAAG;AACrB,aAAO;AAAA,IACT;AACA,QAAI,EAAE,WAAW,EAAE,QAAQ;AACzB,aAAO;AAAA,IACT;AACA,aAAS,IAAI,GAAG,IAAI,EAAE,QAAQ,KAAK;AACjC,UAAI,CAAC,UAAU,EAAE,CAAC,GAAG,EAAE,CAAC,CAAC,GAAG;AAC1B,eAAO;AAAA,MACT;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAEA,MAAI,MAAM,QAAQ,MAAM,MAAM;AAC5B,WAAO;AAAA,EACT;AAEA,MAAI,MAAM,QAAQ,CAAC,GAAG;AACpB,WAAO;AAAA,EACT;AAGA,MAAI;AACJ,MAAI;AAKJ,MAAI,QAAQ;AACZ,aAAW,OAAO,GAAG;AACnB,QAAI,OAAO,GAAG,GAAG,GAAG;AAClB,UAAI,CAAC,UAAU,EAAE,GAAG,GAAG,EAAE,GAAG,CAAC,GAAG;AAC9B,eAAO;AAAA,MACT;AACA;AAAA,IACF;AAAA,EACF;AAEA,MAAI,QAAQ;AACZ,aAAW,OAAO,GAAG;AACnB,QAAI,OAAO,GAAG,GAAG,GAAG;AAClB;AAAA,IACF;AAAA,EACF;AAEA,SAAO,UAAU;AACnB;AAEO,SAAS,gBAAgBE,IAAoC;AAClE,MAAI,QAAqB;AACvB;AAAA,EACF;AACA,UAAQ,OAAOA,IAAG;AAAA,IAChB,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AACH;AAAA,IACF,KAAK;AACH,UAAIA,OAAM,MAAM;AACd;AAAA,MACF;AACA,UAAI,MAAM,QAAQA,EAAC,GAAG;AACpB,eAAO,gBAAgBA,EAAC;AAAA,MAC1B;AACA,aAAO,yBAAyBA,EAA4B;AAAA,EAChE;AACA,mBAAiBA,IAAG,YAAY;AAClC;AAEO,SAAS,iBAAiBA,IAAqC;AACpE,eAAaA,EAAC;AACd,2BAAyBA,EAAC;AAC5B;AAEA,SAAS,yBACPA,IACyB;AACzB,aAAW,KAAKA,IAAG;AACjB,QAAI,OAAOA,IAAG,CAAC,GAAG;AAChB,YAAM,QAAQA,GAAE,CAAC;AACjB,UAAI,UAAU,QAAW;AACvB,wBAAgB,KAAK;AAAA,MACvB;AAAA,IACF;AAAA,EACF;AACF;AAEA,SAAS,gBAAgBA,IAAwC;AAC/D,aAAW,QAAQA,IAAG;AACpB,oBAAgB,IAAI;AAAA,EACtB;AACF;AAWO,SAAS,YAAYA,IAAYC,OAA4B;AAClE,UAAQ,OAAOD,IAAG;AAAA,IAChB,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,UAAIA,OAAM,MAAM;AACd,eAAO;AAAA,MACT;AACA,UAAI,MAAM,QAAQA,EAAC,GAAG;AACpB,eAAO,YAAYA,IAAGC,KAAI;AAAA,MAC5B;AACA,aAAO,mBAAmBD,IAA8BC,KAAI;AAAA,EAChE;AACA,SAAO;AACT;AAEO,SAAS,aAAaD,IAAYC,OAA6B;AACpE,MAAI,OAAOD,OAAM,YAAYA,OAAM,MAAM;AACvC,WAAO;AAAA,EACT;AACA,SAAO,mBAAmBA,IAA8BC,KAAI;AAC9D;AAEA,SAAS,mBACPD,IACAC,OACiB;AACjB,aAAW,KAAKD,IAAG;AACjB,QAAI,OAAOA,IAAG,CAAC,GAAG;AAChB,MAAAC,MAAK,KAAK,CAAC;AACX,YAAM,QAAQD,GAAE,CAAC;AACjB,UAAI,UAAU,UAAa,CAAC,YAAY,OAAOC,KAAI,GAAG;AACpD,eAAO;AAAA,MACT;AACA,MAAAA,MAAK,IAAI;AAAA,IACX;AAAA,EACF;AACA,SAAO;AACT;AAEA,SAAS,YAAYD,IAAcC,OAA8B;AAC/D,WAAS,IAAI,GAAG,IAAID,GAAE,QAAQ,KAAK;AACjC,IAAAC,MAAK,KAAK,CAAC;AACX,QAAI,CAAC,YAAYD,GAAE,CAAC,GAAGC,KAAI,GAAG;AAC5B,aAAO;AAAA,IACT;AACA,IAAAA,MAAK,IAAI;AAAA,EACX;AACA,SAAO;AACT;;;AC1OO,SAAS,eAAuB;AAErC,QAAM,OAAO,KAAK,MAAM,KAAK,OAAO,IAAI,UAAU;AAClD,QAAM,MAAM,KAAK,MAAM,KAAK,OAAO,IAAI,UAAU;AAGjD,SAAQ,OAAO,IAAI,KAAK,MAAO,OAAO,GAAG;AAC3C;;;ACHO,IAAM,gBAAgB;AAwB7B,IAAM,SAAS;AAOf,IAAM,YAAY,IAAI,OAAO,aAAa;AACnC,IAAM,YAAY;AAKlB,IAAM,gBAAgB,kCAAkC;AAmB/D,SAAS,iBAAiB,GAAoB,KAAqB;AACjE,SAAO,EAAE,SAAS,EAAE,EAAE,MAAM,CAAC,GAAG,EAAE,SAAS,KAAK,GAAG;AACrD;AASA,SAAS,oCAAgD;AACvD,MAAI,OAAO;AACX,MAAI,IAAI;AAER,SAAO,MAAM;AACX,QAAI,CAAC,MAAM;AAMT,aAAO,iBAAiB,aAAa,GAAG,EAAE;AAAA,IAC5C;AACA,UAAM,OAAO,iBAAiB,KAAK,EAAE;AACrC,WAAQ,OAAO;AAAA,EACjB;AACF;AAYO,SAAS,OAAO,OAA+B;AACpD,SAAO,OAAO,UAAU,YAAY,OAAO,KAAK,KAAK;AACvD;AAEO,SAAS,WAAW,OAAuC;AAChE,EAAOC,QAAO,OAAO,UAAU;AACjC;AAEO,IAAM,aAAoB,sBAAO,EAAE,OAAO,QAAQ,cAAc;;;ACxGvE,IAAM,WAAW;AACjB,IAAM,aAAa;AACnB,IAAM,WAAW;AACjB,IAAM,cAAc;AAkBb,SAAS,eAAe,OAAwB;AACrD,UAAQ,OAAO,OAAO;AAAA,IACpB,KAAK;AAIH,aAAO,WAAW,aAAa,MAAM;AAAA,IACvC,KAAK;AACH,UAAI,MAAM,KAAK,GAAG;AAChB,YAAI,SAAS,EAAE,KAAK,OAAO,SAAS,KAAK,KAAK,GAAG;AAC/C,iBAAO,WAAW;AAAA,QACpB;AACA,eAAO,WAAW;AAAA,MACpB;AACA,aAAO,WAAW;AAAA,IACpB,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,UAAI,UAAU,MAAM;AAClB,eAAO;AAAA,MACT;AAEA,UAAI,MAAM,QAAQ,KAAK,GAAG;AACxB,YAAI,MAAM,IAAI,WAAW;AACzB,mBAAW,WAAW,OAAO;AAC3B,iBAAO,eAAe,OAAO;AAAA,QAC/B;AACA,eAAO;AAAA,MACT;AAEA;AACE,cAAM,MAAM;AACZ,YAAI,MAAc,IAAI,WAAW;AACjC,mBAAW,KAAK,KAAK;AACnB,cAAI,OAAO,KAAK,CAAC,GAAG;AAIlB,kBAAM,gBAAgB,IAAI,CAAC;AAC3B,gBAAI,kBAAkB,QAAW;AAC/B,qBAAO,eAAe,CAAC,IAAI,eAAe,aAAa;AAAA,YACzD;AAAA,UACF;AAAA,QACF;AACA,eAAO;AAAA,MACT;AAAA,EACJ;AAEA,QAAM,IAAI,MAAM,wBAAwB,OAAO,KAAK,YAAY,KAAK,EAAE;AACzE;AAEA,SAAS,MAAM,OAAwB;AACrC,SAAO,WAAW,QAAQ;AAC5B;AAEA,IAAM,aAAa,IAAI,WAAW,aAAa,WAAW;AAEnD,SAAS,eAAqB,KAAQ,OAAkB;AAE7D,SAAO,aAAa,eAAe,GAAG,IAAI,eAAe,KAAK;AAChE;;;ACpFA,SAAQ,mBAAkB;;;ACanB,SAAS,aAAa,MAAc,SAAgC;AACzE,MAAI,MAAM;AACV,SAAO,MAAM,MAAM;AACjB,UAAM,MAAM,OAAQ,OAAO,OAAQ;AACnC,UAAM,IAAI,QAAQ,GAAG;AACrB,QAAI,MAAM,GAAG;AACX,aAAO;AAAA,IACT;AACA,QAAI,IAAI,GAAG;AACT,YAAM,MAAM;AAAA,IACd,OAAO;AACL,aAAO;AAAA,IACT;AAAA,EACF;AACA,SAAO;AACT;;;AC1BO,UAAU,iBAAoB,OAAsB;AACzD,aAAW,QAAQ,OAAO;AACxB,WAAO;AAAA,EACT;AACF;AAEA,UAAU,WACR,MACA,GACa;AACb,MAAI,QAAQ;AACZ,aAAW,KAAK,MAAM;AACpB,QAAI,EAAE,GAAG,OAAO,GAAG;AACjB,YAAM;AAAA,IACR;AAAA,EACF;AACF;AAEA,UAAU,QACR,MACA,GACa;AACb,MAAI,QAAQ;AACZ,aAAW,KAAK,MAAM;AACpB,UAAM,EAAE,GAAG,OAAO;AAAA,EACpB;AACF;AASO,UAAU,KAAQ,QAAkC;AACzD,QAAM,KAAK,OAAO,OAAO,QAAQ,EAAE;AACnC,QAAM,EAAC,MAAK,IAAI,GAAG,KAAK;AACxB,MAAI,UAAU,QAAW;AACvB,UAAM;AAAA,EACR;AACA,KAAG,SAAS;AACd;AAKA,IAAM,cAAN,MAAM,aAAsC;AAAA,EAC1C;AAAA,EACA,YAAY,MAAmB;AAC7B,SAAK,OAAO;AAAA,EACd;AAAA,EAEA,CAAC,OAAO,QAAQ,IAAI;AAClB,WAAO,KAAK,KAAK,OAAO,QAAQ,EAAE;AAAA,EACpC;AAAA,EAEA,IAAO,GAA+C;AACpD,WAAO,IAAI,aAAY,QAAQ,KAAK,MAAM,CAAC,CAAC;AAAA,EAC9C;AAAA,EAEA,OAAO,GAAqD;AAC1D,WAAO,IAAI,aAAY,WAAW,KAAK,MAAM,CAAC,CAAC;AAAA,EACjD;AACF;AAEO,SAAS,aAAgB,MAAmC;AACjE,SAAO,IAAI,YAAY,IAAI;AAC7B;AAEO,UAAU,eACf,WACA,YACA,WAAW,OACU;AACrB,QAAM,YAAY,UAAU,IAAI,OAAK,EAAE,OAAO,QAAQ,EAAE,CAAC;AACzD,MAAI;AACF,UAAM,UAAU,UAAU,IAAI,OAAK,EAAE,KAAK,CAAC;AAC3C,QAAI;AACJ,WAAO,QAAQ,KAAK,OAAK,CAAC,EAAE,IAAI,GAAG;AACjC,YAAM,MAAM,QAAQ;AAAA,QAClB,CAAC,KAA8B,GAAG,MAA+B;AAC/D,cAAI,EAAE,MAAM;AACV,mBAAO;AAAA,UACT;AACA,cAAI,QAAQ,UAAa,WAAW,EAAE,OAAO,IAAI,CAAC,CAAC,IAAI,GAAG;AACxD,mBAAO,CAAC,EAAE,OAAO,CAAC;AAAA,UACpB;AACA,iBAAO;AAAA,QACT;AAAA,QACA;AAAA,MACF;AAEA,aAAO,QAAQ,QAAW,kBAAkB;AAC5C,cAAQ,IAAI,CAAC,CAAC,IAAI,UAAU,IAAI,CAAC,CAAC,EAAE,KAAK;AACzC,UACE,gBAAgB,UAChB,YACA,WAAW,aAAa,IAAI,CAAC,CAAC,MAAM,GACpC;AACA;AAAA,MACF;AACA,oBAAc,IAAI,CAAC;AACnB,YAAM,IAAI,CAAC;AAAA,IACb;AAAA,EACF,UAAE;AACA,eAAW,MAAM,WAAW;AAC1B,SAAG,SAAS;AAAA,IACd;AAAA,EACF;AACF;;;AFhFO,IAAM,aAAa;AACnB,IAAM,eAAe;AAYrB,SAAS,kBACd,OACA,SACA,eACa;AACb,SAAO,WAAW;AAAA,IAChB;AAAA,IACC,iBAA+B,KAC5B,UACA,QAAQ,IAAI,OAAK,EAAE,MAAM,GAAG,CAAC,CAAC;AAAA,EACpC,CAAC;AACH;AAsEA,eAAsB,SACpB,KACAC,OACA,QACA,kBACuB;AACvB,QAAM,OAAO,MAAM,OAAO,QAAQA,KAAI;AAEtC,MAAI,qBAAqB,OAAO,UAAU;AACxC,WAAO,SAAS,KAAK,OAAO,UAAU,QAAQ,OAAO,QAAQ;AAAA,EAC/D;AACA,MAAI,eAAe,IAAI,GAAG;AACxB,WAAO;AAAA,EACT;AACA,QAAM,EAAC,QAAO,IAAI;AAClB,MAAI,IAAIC,cAAa,KAAK,OAAO;AACjC,MAAI,MAAM,QAAQ,QAAQ;AACxB;AAAA,EACF;AACA,QAAM,QAAQ,QAAQ,CAAC;AACvB,SAAO,SAAS,KAAK,MAAM,CAAC,GAAG,QAAQ,gBAAgB;AACzD;AAYO,SAASA,cACd,KACA,SACQ;AACR,SAAO;AAAA,IAAqB,QAAQ;AAAA,IAAQ,OAC1C,YAAY,KAAK,QAAQ,CAAC,EAAE,CAAC,CAAC;AAAA,EAChC;AACF;AAEO,SAAS,kBACd,GACA,SACA,KACS;AACT,SAAO,MAAM,QAAQ,UAAU,QAAQ,CAAC,EAAE,CAAC,MAAM;AACnD;AAEO,SAAS,eACdC,IACA,eACAC,iBACyB;AACzB,MAAI,UAAwB,iBAA+B,IAAI;AAC7D,WAAOD;AAAA,EACT;AAEA,cAAYA,EAAC;AACb,mBAAiBA,EAAC;AAElB,SAAOA,GAAE,UAAU,CAAC;AACpB,QAAM,CAAC,OAAO,OAAO,IAAIA;AACzB,eAAa,KAAK;AAClB,cAAY,OAAO;AAEnB,QAAM,IAAI,QAAQ,IAAI,eAAe;AAGrC,MAAI,iBAA+B,IAAI;AACrC,eAAW,KAAK,SAAS;AACvB,kBAAY,GAAG,CAAC;AAAA,IAClB;AACA,WAAOA;AAAA,EACT;AAEA,QAAM,aAAa,QAAQ,IAAI,OAAK,kBAAkB,GAAG,GAAGC,eAAc,CAAC;AAC3E,SAAO,CAAC,OAAO,UAAU;AAC3B;AAEA,SAAS,YACP,OACA,GAG0C;AAC1C,cAAY,KAAK;AAEjB,SAAO,MAAM,UAAU,CAAC;AACxB,eAAa,MAAM,CAAC,CAAC;AACrB,IAAE,MAAM,CAAC,CAAC;AACV,eAAa,MAAM,CAAC,CAAC;AACvB;AAMA,SAAS,kBACP,OACA,GAGAA,iBACyB;AACzB,cAAY,KAAK;AACjB,SAAO,MAAM,UAAU,CAAC;AACxB,eAAa,MAAM,CAAC,CAAC;AACrB,IAAE,MAAM,CAAC,CAAC;AACV,QAAM,YAAYA,gBAAe,MAAM,CAAC,GAAG,MAAM,CAAC,CAAC;AACnD,SAAO,CAAC,MAAM,CAAC,GAAG,MAAM,CAAC,GAAG,SAAS;AACvC;AAMA,IAAe,WAAf,MAA+B;AAAA,EAC7B;AAAA,EACA;AAAA,EAES;AAAA,EAET,iBAAiB;AAAA,EAEjB,YAAY,SAA8BC,OAAY,WAAoB;AACxE,SAAK,UAAU;AACf,SAAK,OAAOA;AACZ,SAAK,YAAY;AAAA,EACnB;AAAA,EAcA,SAAiB;AACf,WAAO,KAAK,QAAQ,KAAK,QAAQ,SAAS,CAAC,EAAE,CAAC;AAAA,EAChD;AAAA,EAEA,iBAAiB,MAAyB;AACxC,QAAI,KAAK,mBAAmB,IAAI;AAC9B,aAAO,KAAK;AAAA,IACd;AAEA,QAAI,MAAM,KAAK;AACf,eAAW,SAAS,KAAK,SAAS;AAChC,aAAO,MAAM,CAAC;AAAA,IAChB;AACA,WAAQ,KAAK,iBAAiB;AAAA,EAChC;AAAA,EAEU,YAAY,MAAkB;AACtC,SAAK,iBAAiB;AACtB,SAAK;AAAA,MACH;AAAA,IACF;AAAA,EACF;AACF;AAEO,SAAS,YACd,MACA,eACa;AACb,SAAO,kBAAkB,KAAK,OAAO,KAAK,SAAS,aAAa;AAClE;AAEO,IAAM,eAAN,cAA2B,SAA0B;AAAA,EACjD,QAAQ;AAAA,EAEjB,IACE,KACA,OACA,WACA,MACuB;AACvB,QAAI;AACJ,UAAM,IAAIC,cAAa,KAAK,KAAK,OAAO;AACxC,QAAI,CAAC,kBAAkB,GAAG,KAAK,SAAS,GAAG,GAAG;AAE5C,oBAAc;AAAA,IAChB,OAAO;AACL,oBAAc;AAAA,IAChB;AAEA,WAAO,QAAQ;AAAA,MACb,KAAK,QAAQ,MAAM,GAAG,aAAa,CAAC,KAAK,OAAO,SAAS,CAAC;AAAA,IAC5D;AAAA,EACF;AAAA,EAEA,QACE,MACA,OACA,gBACG,OACW;AACd,QAAI,KAAK,WAAW;AAClB,WAAK,QAAQ,OAAO,OAAO,aAAa,GAAG,KAAK;AAChD,WAAK,YAAY,IAAI;AACrB,aAAO;AAAA,IACT;AAEA,UAAM,UAAU,eAAe,KAAK,SAAS,OAAO,aAAa,GAAG,KAAK;AACzE,WAAO,KAAK,gBAAgB,OAAO;AAAA,EACrC;AAAA,EAEA,IAAI,KAAa,MAAyC;AACxD,UAAM,IAAIA,cAAa,KAAK,KAAK,OAAO;AACxC,QAAI,CAAC,kBAAkB,GAAG,KAAK,SAAS,GAAG,GAAG;AAE5C,aAAO,QAAQ,QAAQ,IAAI;AAAA,IAC7B;AAGA,WAAO,QAAQ,QAAQ,KAAK,QAAQ,MAAM,GAAG,CAAC,CAAC;AAAA,EACjD;AAAA,EAEA,OAAO,KAAK,OAAgD;AAC1D,eAAW,SAAS,KAAK,SAAS;AAChC,YAAM,MAAM,CAAC;AAAA,IACf;AAAA,EACF;AAAA,EAEA,OAAO,YACL,OAC8C;AAC9C,eAAW,SAAS,KAAK,SAAS;AAChC,YAAM;AAAA,IACR;AAAA,EACF;AACF;AAEA,SAAS,eACPC,QACA,OACA,gBACG,OACE;AACL,QAAM,MAAMA,OAAM,MAAM,GAAG,KAAK;AAChC,WAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;AACrC,QAAI,KAAK,MAAM,CAAC,CAAC;AAAA,EACnB;AACA,WAAS,IAAI,QAAQ,aAAa,IAAIA,OAAM,QAAQ,KAAK;AACvD,QAAI,KAAKA,OAAM,CAAC,CAAC;AAAA,EACnB;AACA,SAAO;AACT;AAEO,IAAM,mBAAN,MAAM,0BAAyB,SAAe;AAAA,EAC1C;AAAA,EAET,YACE,SACAF,OACA,OACA,WACA;AACA,UAAM,SAASA,OAAM,SAAS;AAC9B,SAAK,QAAQ;AAAA,EACf;AAAA,EAEA,MAAM,IACJ,KACA,OACA,WACA,MAC2B;AAC3B,QAAI,IAAIC,cAAa,KAAK,KAAK,OAAO;AACtC,QAAI,MAAM,KAAK,QAAQ,QAAQ;AAE7B;AAAA,IACF;AAEA,UAAM,YAAY,KAAK,QAAQ,CAAC,EAAE,CAAC;AACnC,UAAM,eAAe,MAAM,KAAK,QAAQ,SAAS;AAEjD,UAAM,YAAY,MAAM,aAAa,IAAI,KAAK,OAAO,WAAW,IAAI;AAEpE,UAAM,gBAAgB,UAAU,iBAAiB,IAAI;AACrD,QAAI,gBAAgB,KAAK,WAAW,gBAAgB,KAAK,SAAS;AAChE,aAAO,KAAK,mBAAmB,MAAM,GAAG,SAAS;AAAA,IACnD;AAEA,UAAM,WAAW;AAAA,MACf;AAAA,MACA,KAAK;AAAA,IACP;AACA,WAAO,KAAK,cAAc,MAAM,GAAG,QAAQ;AAAA,EAC7C;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,mBACJ,MACA,GACA,WAC2B;AAC3B,UAAM,QAAQ,KAAK,QAAQ;AAC3B,UAAM,cAAc,KAAK;AAIzB,QAAI;AACJ,QAAI;AACJ,QAAI;AACJ,QAAI,IAAI,GAAG;AACT,YAAMD,QAAO,YAAY,IAAI,CAAC,EAAE,CAAC;AACjC,YAAM,kBAAkB,MAAM,KAAK,QAAQA,KAAI;AAC/C,eAAS;AAAA,QACP,gBAAgB;AAAA,QAChB,UAAU;AAAA,MACZ;AACA,mBAAa,IAAI;AACjB,oBAAc;AAAA,IAChB,WAAW,IAAI,YAAY,SAAS,GAAG;AACrC,YAAMA,QAAO,YAAY,IAAI,CAAC,EAAE,CAAC;AACjC,YAAM,cAAc,MAAM,KAAK,QAAQA,KAAI;AAC3C,eAAS;AAAA,QACP,UAAU;AAAA,QACV,YAAY;AAAA,MACd;AACA,mBAAa;AACb,oBAAc;AAAA,IAChB,OAAO;AACL,eAAS,UAAU;AACnB,mBAAa;AACb,oBAAc;AAAA,IAChB;AAEA,UAAM,aAAa;AAAA,MACjB;AAAA,MACA,WAAS,MAAM,CAAC;AAAA,MAChB,KAAK,UAAU,KAAK;AAAA,MACpB,KAAK,UAAU,KAAK;AAAA,IACtB;AAIA,UAAM,aAA4B,CAAC;AACnC,eAAWG,YAAW,YAAY;AAChC,YAAM,OAAO,KAAK,YAAYA,UAAS,KAAK;AAC5C,YAAM,eAAe;AAAA,QACnB;AAAA,QACA,KAAK;AAAA,MACP;AACA,iBAAW,KAAK,YAAY;AAAA,IAC9B;AAEA,QAAI,KAAK,WAAW;AAClB,WAAK,QAAQ,OAAO,YAAY,aAAa,GAAG,UAAU;AAC1D,WAAK,YAAY,IAAI;AACrB,aAAO;AAAA,IACT;AAEA,UAAM,UAAU;AAAA,MACd;AAAA,MACA;AAAA,MACA;AAAA,MACA,GAAG;AAAA,IACL;AAEA,WAAO,KAAK,oBAAoB,SAAS,KAAK,KAAK;AAAA,EACrD;AAAA,EAEA,cACE,MACA,OACA,UACkB;AAClB,QAAI,KAAK,WAAW;AAClB,WAAK,QAAQ,OAAO,OAAO,GAAG,QAAQ;AACtC,WAAK,YAAY,IAAI;AACrB,aAAO;AAAA,IACT;AACA,UAAM,UAAU,eAAe,KAAK,SAAS,OAAO,GAAG,QAAQ;AAC/D,WAAO,KAAK,oBAAoB,SAAS,KAAK,KAAK;AAAA,EACrD;AAAA,EAEA,MAAM,IACJ,KACA,MAC0C;AAC1C,UAAM,IAAIF,cAAa,KAAK,KAAK,OAAO;AACxC,QAAI,MAAM,KAAK,QAAQ,QAAQ;AAE7B,aAAO;AAAA,IACT;AAEA,UAAM,YAAY,KAAK,QAAQ,CAAC,EAAE,CAAC;AACnC,UAAM,eAAe,MAAM,KAAK,QAAQ,SAAS;AACjD,UAAM,UAAU,aAAa;AAE7B,UAAM,YAAY,MAAM,aAAa,IAAI,KAAK,IAAI;AAClD,QAAI,UAAU,SAAS,SAAS;AAE9B,aAAO;AAAA,IACT;AAEA,QAAI,UAAU,QAAQ,WAAW,GAAG;AAElC,YAAM,UAAU,eAAe,KAAK,SAAS,GAAG,CAAC;AACjD,aAAO,KAAK,oBAAoB,SAAS,KAAK,KAAK;AAAA,IACrD;AAEA,QAAI,MAAM,KAAK,KAAK,QAAQ,WAAW,GAAG;AAGxC,aAAO;AAAA,IACT;AAGA,QAAI,UAAU,iBAAiB,IAAI,IAAI,KAAK,SAAS;AAEnD,YAAM,QAAQ,8BAA8B,WAAW,KAAK,YAAY;AACxE,aAAO,KAAK,cAAc,MAAM,GAAG,KAAK;AAAA,IAC1C;AAGA,WAAO,KAAK,mBAAmB,MAAM,GAAG,SAAS;AAAA,EACnD;AAAA,EAEA,OAAO,KAAK,MAA+C;AACzD,eAAW,SAAS,KAAK,SAAS;AAChC,YAAM,YAAY,MAAM,KAAK,QAAQ,MAAM,CAAC,CAAC;AAC7C,aAAO,UAAU,KAAK,IAAI;AAAA,IAC5B;AAAA,EACF;AAAA,EAEA,OAAO,YACL,MAC8C;AAC9C,eAAW,SAAS,KAAK,SAAS;AAChC,YAAM,YAAY,MAAM,KAAK,QAAQ,MAAM,CAAC,CAAC;AAC7C,aAAO,UAAU,YAAY,IAAI;AAAA,IACnC;AAAA,EACF;AAAA,EAEA,YACE,OACA,QACA,MACiD;AACjD,UAAM,KAAiD,CAAC;AACxD,aAAS,IAAI,OAAO,IAAI,UAAU,IAAI,KAAK,QAAQ,QAAQ,KAAK;AAC9D,SAAG,KAAK,KAAK,QAAQ,KAAK,QAAQ,CAAC,EAAE,CAAC,CAAC,CAAC;AAAA,IAC1C;AACA,WAAO,QAAQ,IAAI,EAAE;AAAA,EACvB;AAAA,EAEA,MAAM,qBACJ,OACA,QACA,MAC0C;AAC1C,UAAM,EAAC,MAAK,IAAI;AAEhB,QAAI,WAAW,GAAG;AAChB,aAAO,IAAI,kBAAiB,CAAC,GAAG,cAAc,GAAG,QAAQ,GAAG,IAAI;AAAA,IAClE;AAEA,UAAM,SAAS,MAAM,KAAK,YAAY,OAAO,QAAQ,QAAQ,IAAI;AAEjE,QAAI,QAAQ,GAAG;AACb,YAAME,WAAyB,CAAC;AAChC,iBAAW,SAAS,QAA8B;AAChD,QAAAA,SAAQ,KAAK,GAAG,MAAM,OAAO;AAAA,MAC/B;AACA,aAAO,IAAI,kBAAiBA,UAAS,cAAc,GAAG,QAAQ,GAAG,IAAI;AAAA,IACvE;AAEA,WAAO,UAAU,CAAC;AAClB,UAAM,UAAoC,CAAC;AAC3C,eAAW,SAAS,QAA0B;AAC5C,cAAQ,KAAK,GAAG,MAAM,OAAO;AAAA,IAC/B;AACA,WAAO,IAAI,aAAa,SAAS,cAAc,GAAG,IAAI;AAAA,EACxD;AACF;AAoBO,SAAS,YACd,SACAH,OACA,OACA,WACiC;AACjC,MAAI,UAAU,GAAG;AACf,WAAO,IAAI;AAAA,MACT;AAAA,MACAA;AAAA,MACA;AAAA,IACF;AAAA,EACF;AACA,SAAO,IAAI,iBAAiB,SAA0BA,OAAM,OAAO,SAAS;AAC9E;AAEO,SAAS,eACd,MACsB;AACtB,SAAO,KAAK,UAAU;AACxB;AAEO,SAAS,UACd,QAEAI,iBACA,KACA,KACO;AACP,QAAM,aAAoB,CAAC;AAC3B,QAAM,QAAkB,CAAC;AACzB,MAAI,MAAM;AACV,MAAI,QAAa,CAAC;AAClB,aAAW,SAAS,QAAQ;AAC1B,UAAM,OAAOA,gBAAe,KAAK;AACjC,QAAI,QAAQ,KAAK;AACf,UAAI,MAAM,SAAS,GAAG;AACpB,mBAAW,KAAK,KAAK;AACrB,cAAM,KAAK,GAAG;AAAA,MAChB;AACA,iBAAW,KAAK,CAAC,KAAK,CAAC;AACvB,YAAM,KAAK,IAAI;AACf,YAAM;AACN,cAAQ,CAAC;AAAA,IACX,WAAW,MAAM,QAAQ,KAAK;AAC5B,YAAM,KAAK,KAAK;AAChB,iBAAW,KAAK,KAAK;AACrB,YAAM,KAAK,MAAM,IAAI;AACrB,YAAM;AACN,cAAQ,CAAC;AAAA,IACX,OAAO;AACL,aAAO;AACP,YAAM,KAAK,KAAK;AAAA,IAClB;AAAA,EACF;AAEA,MAAI,MAAM,GAAG;AACX,QAAI,MAAM,SAAS,KAAK,MAAM,MAAM,MAAM,SAAS,CAAC,KAAK,KAAK;AAC5D,iBAAW,WAAW,SAAS,CAAC,EAAE,KAAK,GAAG,KAAK;AAAA,IACjD,OAAO;AACL,iBAAW,KAAK,KAAK;AAAA,IACvB;AAAA,EACF;AAEA,SAAO;AACT;AAEO,IAAM,gBAAgB;AAAA,EAC3B;AAAA,EACA,CAAC;AAAA,EACa;AAChB;AACO,IAAM,oBAAoB,IAAI,aAAa,CAAC,GAAG,WAAW,KAAK;AAE/D,SAAS,8BACd,MACAA,iBACwB;AACxB,QAAM,MAAM,KAAK,OAAO;AACxB,QAAM,QAAQ,KAAK;AACnB,QAAM,OAAOA,gBAAe,KAAK,KAAK;AACtC,SAAO,CAAC,KAAK,OAAO,IAAI;AAC1B;;;AGvsBA,IAAM,oBAAoB;AACnB,IAAM,YAAY;AAClB,IAAM,iBAAiB;AACvB,IAAM,eAAe;AACrB,IAAM,cAAc;AAE3B,IAAM,MAAM;AACZ,IAAM,QAAQ;AAIP,UAAU,eACf,UACA,SACyB;AACzB,MAAI,gBAAgB;AACpB,MAAI,eAAe;AACnB,MAAI;AAEJ,WAAS,eAAeC,SAAgB,OAAqB;AAC3D,QAAIA,QAAO,WAAW,MAAM,mBAAmB;AAC7C,MAAAA,QAAO,WAAW,IAAI;AAAA,IACxB;AAAA,EACF;AAEA,WAAS,YAAoB;AAC3B,WAAO,CAAC,eAAe,GAAG,GAAG,iBAAiB;AAAA,EAChD;AAEA,SAAO,gBAAgB,SAAS,UAAU,eAAe,QAAQ,QAAQ;AACvE,QAAI,SAAS,aAAa,EAAE,GAAG,MAAM,QAAQ,YAAY,EAAE,GAAG,GAAG;AAC/D,UACE;AAAA;AAAA,QAEE,SAAS,aAAa,EAAE,KAAK;AAAA,QAC7B,QAAQ,YAAY,EAAE,KAAK;AAAA,MAC7B,GACA;AACA,YAAI,QAAQ;AACV,yBAAe,QAAQ,CAAC;AACxB,gBAAM;AACN,mBAAS;AAAA,QACX;AAAA,MACF,OAAO;AACL,YAAI,CAAC,QAAQ;AACX,mBAAS,UAAU;AAAA,QACrB;AACA,eAAO,YAAY;AACnB,eAAO,cAAc;AACrB,uBAAe,QAAQ,YAAY;AAAA,MACrC;AACA;AACA;AAAA,IACF,WAAW,SAAS,aAAa,EAAE,GAAG,IAAI,QAAQ,YAAY,EAAE,GAAG,GAAG;AAEpE,UAAI,CAAC,QAAQ;AACX,iBAAS,UAAU;AAAA,MACrB;AACA,aAAO,cAAc;AAErB;AAAA,IACF,OAAO;AAEL,UAAI,CAAC,QAAQ;AACX,iBAAS,UAAU;AAAA,MACrB;AACA,aAAO,YAAY;AACnB,qBAAe,QAAQ,YAAY;AAEnC;AAAA,IACF;AAAA,EACF;AAEA,MAAI,eAAe,QAAQ,QAAQ;AACjC,QAAI,CAAC,QAAQ;AACX,eAAS,UAAU;AAAA,IACrB;AACA,WAAO,YAAY,KAAK,QAAQ,SAAS;AACzC,mBAAe,QAAQ,YAAY;AAAA,EACrC;AAEA,MAAI,gBAAgB,SAAS,QAAQ;AACnC,QAAI,CAAC,QAAQ;AACX,eAAS,UAAU;AAAA,IACrB;AACA,WAAO,cAAc,KAAK,SAAS,SAAS;AAAA,EAC9C;AAEA,MAAI,QAAQ;AACV,mBAAe,QAAQ,CAAC;AACxB,UAAM;AAAA,EACR;AACF;;;ACzDO,IAAM,mBAAmB;AAEzB,IAAM,YAAN,MAAiE;AAAA,EACnD,SACjB,oBAAI,IAAI;AAAA,EAES;AAAA,EACA;AAAA,EACnB;AAAA,EACS;AAAA,EACA;AAAA,EAET,YACE,SACA,eACA,OAAa,WACb,eAA6C,gBAC7C,kBAAkB,kBAClB;AACA,SAAK,WAAW;AAChB,SAAK,iBAAiB;AACtB,SAAK,WAAW;AAChB,SAAK,eAAe;AACpB,SAAK,kBAAkB;AAAA,EACzB;AAAA,EAEA,MAAM,QAAQC,OAAsD;AAClE,QAAIA,UAAS,WAAW;AACtB,aAAO;AAAA,IACT;AAEA,UAAM,SAAS,KAAK,OAAO,IAAIA,KAAI;AACnC,QAAI,QAAQ;AACV,aAAO;AAAA,IACT;AAEA,UAAM,QAAQ,MAAM,KAAK,SAAS,aAAaA,KAAI;AACnD,UAAM,OAAO;AAAA,MACX,MAAM;AAAA,MACN,KAAK;AAAA,MACL,KAAK;AAAA,IACP;AACA,UAAM,OAAO;AAAA,MACX,KAAK,YAAY;AAAA,MACjBA;AAAA,MACA,KAAK,UAAU;AAAA,MACf;AAAA,IACF;AACA,SAAK,OAAO,IAAIA,OAAM,IAAI;AAC1B,WAAO;AAAA,EACT;AAAA,EAEA,MAAM,IAAI,KAAmD;AAC3D,UAAM,OAAO,MAAM,SAAS,KAAK,KAAK,UAAU,MAAM,KAAK,QAAQ;AACnE,UAAM,QAAQC,cAAa,KAAK,KAAK,OAAO;AAC5C,QAAI,CAAC,kBAAkB,OAAO,KAAK,SAAS,GAAG,GAAG;AAChD,aAAO;AAAA,IACT;AACA,WAAO,KAAK,QAAQ,KAAK,EAAE,CAAC;AAAA,EAC9B;AAAA,EAEA,MAAM,IAAI,KAA+B;AACvC,UAAM,OAAO,MAAM,SAAS,KAAK,KAAK,UAAU,MAAM,KAAK,QAAQ;AACnE,UAAM,QAAQA,cAAa,KAAK,KAAK,OAAO;AAC5C,WAAO,kBAAkB,OAAO,KAAK,SAAS,GAAG;AAAA,EACnD;AAAA,EAEA,MAAM,UAA4B;AAChC,UAAM,EAAC,SAAQ,IAAI;AACnB,UAAM,OAAO,MAAM,KAAK,QAAQ,KAAK,QAAQ;AAE7C,QAAI,KAAK,aAAa,UAAU;AAC9B,aAAO,KAAK,QAAQ;AAAA,IACtB;AACA,WAAO,KAAK,QAAQ,WAAW;AAAA,EACjC;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,KAAK,SAAgE;AACnE,WAAO;AAAA,MACL,KAAK;AAAA,MACL,MAAM,KAAK;AAAA,MACX,KAAK;AAAA,MACL;AAAA,MACA,OAAMD,UAAQ;AACZ,cAAM,SAAS,MAAM,KAAK,QAAQA,KAAI;AACtC,YAAI,QAAQ;AACV,iBAAO;AAAA,YACL,OAAO;AAAA,YACP,OAAO,YAAY,OAAO,QAAQ,MAAM,IAAI,OAAO;AAAA,UACrD;AAAA,QACF;AACA,cAAM,QAAQ,MAAM,KAAK,SAAS,aAAaA,KAAI;AACnD,eAAO;AAAA,UACL,MAAM;AAAA,UACN,KAAK;AAAA,UACL,KAAK;AAAA,QACP;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA,EAEA,OAAO,OAAsC;AAC3C,UAAM,OAAO,MAAM,KAAK,QAAQ,KAAK,QAAQ;AAC7C,WAAO,KAAK,KAAK,IAAI;AAAA,EACvB;AAAA,EAEA,OAAO,UAAyD;AAC9D,UAAM,OAAO,MAAM,KAAK,QAAQ,KAAK,QAAQ;AAC7C,WAAO,KAAK,YAAY,IAAI;AAAA,EAC9B;AAAA,EAEA,CAAC,OAAO,aAAa,IAAmD;AACtE,WAAO,KAAK,QAAQ;AAAA,EACtB;AAAA,EAEA,OAAO,KAAK,MAA+D;AACzE,UAAM,CAAC,aAAa,QAAQ,IAAI,MAAM,QAAQ,IAAI;AAAA,MAChD,KAAK,QAAQ,KAAK,QAAQ;AAAA,MAC1B,KAAK,QAAQ,KAAK,QAAQ;AAAA,IAC5B,CAAC;AACD,WAAO,UAAU,UAAU,aAAa,MAAM,IAAI;AAAA,EACpD;AACF;AAEA,gBAAgB,UACd,MACA,SACA,UACA,aAC8C;AAC9C,MAAI,KAAK,QAAQ,QAAQ,OAAO;AAG9B,UAAM,YAAa,MAAO,KAA0B;AAAA,MAClD;AAAA,MACA,KAAK,QAAQ;AAAA,MACb;AAAA,IACF;AACA,WAAO,UAAU,WAAW,SAAS,UAAU,WAAW;AAC1D;AAAA,EACF;AAEA,MAAI,QAAQ,QAAQ,KAAK,OAAO;AAE9B,UAAM,eAAgB,MACpB,QACA;AAAA,MACA;AAAA,MACA,QAAQ,QAAQ;AAAA,MAChB;AAAA,IACF;AACA,WAAO,UAAU,MAAM,cAAc,UAAU,WAAW;AAC1D;AAAA,EACF;AAEA,MAAI,eAAe,IAAI,KAAK,eAAe,OAAO,GAAG;AACnD,WAAO;AAAA,MACJ,KAAsB;AAAA,MACtB,QAAyB;AAAA,IAC5B;AACA;AAAA,EACF;AAKA,QAAM,iBAAiB;AAAA,IACpB,KAA0B;AAAA,IAC1B,QAA6B;AAAA,EAChC;AACA,aAAW,UAAU,gBAAgB;AACnC,UAAM,CAAC,WAAW,YAAY,IAAI,MAAM,QAAQ,IAAI;AAAA,MACjD,KAA0B;AAAA,QACzB,OAAO,SAAS;AAAA,QAChB,OAAO,cAAc;AAAA,QACrB;AAAA,MACF;AAAA,MACC,QAA6B;AAAA,QAC5B,OAAO,WAAW;AAAA,QAClB,OAAO,YAAY;AAAA,QACnB;AAAA,MACF;AAAA,IACF,CAAC;AACD,WAAO,UAAU,WAAW,cAAc,UAAU,WAAW;AAAA,EACjE;AACF;AAEA,UAAU,YACR,aACA,gBACyC;AACzC,QAAM,aAAa,YAAY;AAC/B,QAAM,gBAAgB,eAAe;AACrC,MAAI,IAAI;AACR,MAAI,IAAI;AACR,SAAO,IAAI,cAAc,IAAI,eAAe;AAC1C,UAAM,UAAU,YAAY,CAAC,EAAE,CAAC;AAChC,UAAM,aAAa,eAAe,CAAC,EAAE,CAAC;AACtC,QAAI,YAAY,YAAY;AAC1B,UAAI,CAAC,UAAU,YAAY,CAAC,EAAE,CAAC,GAAG,eAAe,CAAC,EAAE,CAAC,CAAC,GAAG;AACvD,cAAM;AAAA,UACJ,IAAI;AAAA,UACJ,KAAK;AAAA,UACL,UAAU,YAAY,CAAC,EAAE,CAAC;AAAA,UAC1B,UAAU,eAAe,CAAC,EAAE,CAAC;AAAA,QAC/B;AAAA,MACF;AACA;AACA;AAAA,IACF,WAAW,UAAU,YAAY;AAC/B,YAAM;AAAA,QACJ,IAAI;AAAA,QACJ,KAAK;AAAA,QACL,UAAU,YAAY,CAAC,EAAE,CAAC;AAAA,MAC5B;AACA;AAAA,IACF,OAAO;AACL,YAAM;AAAA,QACJ,IAAI;AAAA,QACJ,KAAK;AAAA,QACL,UAAU,eAAe,CAAC,EAAE,CAAC;AAAA,MAC/B;AACA;AAAA,IACF;AAAA,EACF;AACA,SAAO,IAAI,YAAY,KAAK;AAC1B,UAAM;AAAA,MACJ,IAAI;AAAA,MACJ,KAAK,YAAY,CAAC,EAAE,CAAC;AAAA,MACrB,UAAU,YAAY,CAAC,EAAE,CAAC;AAAA,IAC5B;AAAA,EACF;AACA,SAAO,IAAI,eAAe,KAAK;AAC7B,UAAM;AAAA,MACJ,IAAI;AAAA,MACJ,KAAK,eAAe,CAAC,EAAE,CAAC;AAAA,MACxB,UAAU,eAAe,CAAC,EAAE,CAAC;AAAA,IAC/B;AAAA,EACF;AACF;AAUA,gBAAgB,YACd,kBACA,aACAA,OACA,SACA,UAC+C;AAC/C,MAAIA,UAAS,WAAW;AACtB;AAAA,EACF;AAEA,QAAM,OAAO,MAAM,SAASA,KAAI;AAChC,QAAM,UAAU,KAAK,YAAY;AACjC,MAAI,IAAI;AACR,MAAI,SAAS;AACX,QAAIC,cAAa,SAAS,OAAO;AAAA,EACnC;AACA,MAAI,KAAK,UAAU,IAAI,GAAG;AACxB,WAAO,IAAI,QAAQ,QAAQ,KAAK;AAC9B,aAAO;AAAA,QACL;AAAA,QACA;AAAA,QACC,QAAQ,CAAC,EAAkB,CAAC;AAAA,QAC7B;AAAA,QACA;AAAA,MACF;AACA,gBAAU;AAAA,IACZ;AAAA,EACF,OAAO;AACL,WAAO,IAAI,QAAQ,QAAQ,KAAK;AAC9B,YAAM,WAAW,YAAY;AAE7B,UAAI,qBAAqB,UAAU;AACjC,eAAO;AAAA,UACL;AAAA,UACA;AAAA,UACA;AAAA,UACA,QAAQ,CAAC,EAAE,CAAC;AAAA,UACZ;AAAA,QACF;AACA;AAAA,MACF;AACA,YAAM,QAAQ,CAAC;AAAA,IACjB;AAAA,EACF;AACF;AAEA,eAAsB,iBACpB,KACA,IACuB;AACvB,QAAMC,QAAgC,CAAC;AACvC,QAAM,OACJ,OAAO,QACH,YAAU;AAAA,IACR,IAAI;AAAA,IACJ,KAAK,MAAM,CAAC;AAAA,IACZ,UAAU,MAAM,CAAC;AAAA,EACnB,KACA,YAAU;AAAA,IACR,IAAI;AAAA,IACJ,KAAK,MAAM,CAAC;AAAA,IACZ,UAAU,MAAM,CAAC;AAAA,EACnB;AAEN,mBAAiB,SAAS,IAAI,QAAQ,GAAG;AACvC,IAAAA,MAAK,KAAK,KAAK,KAAK,CAAC;AAAA,EACvB;AACA,SAAOA;AACT;;;ACzWO,SAAS,cAAc,GAAW,GAAmB;AAC1D,MAAI,MAAM,GAAG;AACX,WAAO;AAAA,EACT;AACA,MAAI,IAAI,GAAG;AACT,WAAO;AAAA,EACT;AACA,SAAO;AACT;;;AC6BO,SAAS,eAAe,GAAW,GAAmB;AAC3D,MAAI,MAAM,GAAG;AACX,WAAO;AAAA,EACT;AACA,MAAI,MAAM,MAAM;AACd,WAAO;AAAA,EACT;AACA,MAAI,MAAM,MAAM;AACd,WAAO;AAAA,EACT;AAEA,QAAM,MAAM,gBAAgB,CAAC;AAC7B,QAAM,MAAM,gBAAgB,CAAC;AAG7B,MAAI,OAAO,QAAQ,YAAY,OAAO,QAAQ,UAAU;AACtD,WAAO,cAAc,OAAO,GAAG,GAAG,OAAO,GAAG,CAAC;AAAA,EAC/C;AAEA,SAAO,MAAM;AACf;AAIA,SAAS,gBAAgB,QAA0C;AACjE,MAAI,OAAO,WAAW,YAAY,OAAO,WAAW,UAAU;AAC5D,WAAO;AAAA,EACT;AACA,SAAO,OAAO;AAChB;AAEO,SAAS,aAAaC,IAAiC;AAC5D,MAAIA,OAAM,QAAQ,OAAOA,OAAM,YAAY,OAAOA,OAAM,UAAU;AAChE;AAAA,EACF;AAEA,mBAAiBA,EAAC;AAClB,MAAI,OAAOA,GAAE,UAAU,YAAY,OAAOA,GAAE,UAAU,UAAU;AAC9D;AAAA,EACF;AAEA,QAAM,IAAI,MAAM,gBAAgB;AAClC;;;AC9DO,SAAS,OAAO,YAA0B;AAC/C,SAAO;AACT;AAMO,SAAS,OAAO,MAAgC;AACrD,MAAI,MAAM,QAAQ,IAAI,GAAG;AACvB,SAAK,KAAK;AACV,aAAS,IAAI,GAAG,IAAI,KAAK,QAAQ,KAAK;AACpC,aAAO,KAAK,IAAI,CAAC,MAAM,KAAK,CAAC,GAAG,+BAA+B;AAAA,IACjE;AACA,WAAO,OAAO,IAAI;AAAA,EACpB;AAEA,QAAM,YAAY,CAAC,GAAG,IAAI;AAC1B,YAAU,KAAK;AAEf,SAAO,OAAO,SAAS;AACzB;AAEO,IAAM,QAAN,MAAyB;AAAA,EACrB;AAAA,EACA;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA;AAAA,EAET,YAAYC,OAAY,MAAS,MAAY;AAC3C;AAAA,MACE,CAAE,KAAmB,SAASA,KAAI;AAAA,MAClC;AAAA,IACF;AACA,qBAAiB,IAAI;AACrB,SAAK,OAAOA;AACZ,SAAK,OAAO;AACZ,SAAK,OAAO;AAAA,EACd;AACF;AAEO,SAAS,WAAWC,IAA+B;AACxD,MAAI,CAAC,MAAM,QAAQA,EAAC,GAAG;AACrB,UAAM,IAAI,MAAM,uBAAuB;AAAA,EACzC;AACA,MAAIA,GAAE,SAAS,GAAG;AAChB,iBAAaA,GAAE,CAAC,CAAC;AACjB,aAAS,IAAI,GAAG,IAAIA,GAAE,QAAQ,KAAK;AACjC,mBAAaA,GAAE,CAAC,CAAC;AAAA,IACnB;AAAA,EACF;AACF;AAEO,SAAS,YACd,MACA,MACA,aACU;AACV,QAAMD,QAAO,YAAY;AACzB,SAAO,IAAI,MAAMA,OAAM,MAAM,IAAI;AACnC;;;AC/CO,IAAM,qBAAN,cAAiC,MAAM;AAAA,EAC5C,OAAO;AAAA,EACE;AAAA,EACT,YAAYE,OAAY;AACtB,UAAM,mBAAmBA,KAAI,EAAE;AAC/B,SAAK,OAAOA;AAAA,EACd;AACF;AAEA,eAAsB,aACpB,OACAA,OACgB;AAChB,QAAM,QAAQ,MAAM,MAAM,SAASA,KAAI;AACvC,MAAI,OAAO;AACT,WAAO;AAAA,EACT;AACA,QAAM,IAAI,mBAAmBA,KAAI;AACnC;AAEA,eAAsB,gBACpB,MACA,OACe;AACf,QAAMA,QAAO,MAAM,MAAM,QAAQ,IAAI;AACrC,SAAOA,OAAM,gBAAgB,IAAI,EAAE;AACnC,SAAOA;AACT;;;ACvDO,IAAM,YAAY;AAClB,IAAM,eAAe;;;ACkBrB,IAAM,oBAAoB;AAE1B,SAAS,kBACd,QACiC;AACjC,SAAO,gBAAgB,OAAO,IAAI;AACpC;AAEO,SAAS,cACd,QACiC;AACjC,SAAO,kBAAkB,MAAM;AACjC;AAEO,SAAS,iBACd,QACoC;AACpC,SAAO,mBAAmB,OAAO,IAAI;AACvC;AAEO,IAAM,SAAN,MAA6B;AAAA,EACzB;AAAA,EAET,YAAY,OAA6B;AACvC,SAAK,QAAQ;AAAA,EACf;AAAA,EAEA,IAAI,OAAU;AACZ,WAAO,KAAK,MAAM,KAAK;AAAA,EACzB;AAAA,EAEA,IAAI,YAAkB;AAEpB,WAAO,KAAK,MAAM,KAAK;AAAA,EACzB;AAAA,EAEA,cAAc,UAAoB,SAAwC;AACxE,WAAO,cAAc,UAAU,SAAS,KAAK,IAAI;AAAA,EACnD;AAAA,EAEA,MAAM,kBACJ,UACA,SACiB;AACjB,WAAQ,MAAM,KAAK,cAAc,UAAU,OAAO,IAAK;AAAA,EACzD;AAAA,EAEA,IAAI,UAAkC;AAEpC,WAAO,KAAK,MAAM,KAAK;AAAA,EACzB;AACF;AAEA,eAAsB,cACpB,UACA,SACA,MACiB;AACjB,UAAQ,KAAK,MAAM;AAAA,IACjB,KAAc;AACZ,aAAO,KAAK,gBAAgB,QAAQ,KAAK;AAAA,IAE3C,KAAc,WAAW;AACvB,UAAI,KAAK,aAAa,UAAU;AAC9B,eAAO,KAAK;AAAA,MACd;AACA,YAAM,EAAC,UAAS,IAAI;AACpB,YAAM,cAAc,MAAM,eAAe,WAAW,OAAO;AAC3D,aAAO,cAAc,UAAU,SAAS,YAAY,IAAI;AAAA,IAC1D;AAAA,IAEA;AACE,kBAAY,IAAI;AAAA,EACpB;AACF;AAYA,eAAsB,eACpB,gBACA,SACkC;AAClC,QAAM,UAAU,MAAM,YAAY,gBAAgB,OAAO;AAEzD,SAAO,QAAQ,OAAO,OAAK,cAAc,CAAC,CAAC;AAC7C;AAEA,eAAsB,mBACpB,gBACA,SACkC;AAClC,QAAM,UAAU,MAAM,YAAY,gBAAgB,OAAO;AAEzD,SAAO,QAAQ,OAAO,OAAK,kBAAkB,CAAC,CAAC;AACjD;AAEA,eAAsB,0BACpB,QACA,kBACA,SACkC;AAClC,QAAM,UAAmC,CAAC;AAC1C,QAAM,4BAA4B,IAAI,IAAI,OAAO,QAAQ,gBAAgB,CAAC;AAC1E,SAAO,CAAC,iBAAiB,MAAM,KAAK,0BAA0B,OAAO,GAAG;AACtE,QAAI,kBAAkB,MAAM,GAAG;AAC7B,YAAM,EAAC,KAAI,IAAI;AACf,YAAM,uBAAuB,0BAA0B,IAAI,KAAK,QAAQ;AACxE,UAAI,yBAAyB,QAAW;AACtC,YAAI,KAAK,cAAc,sBAAsB;AAC3C,oCAA0B,OAAO,KAAK,QAAQ;AAAA,QAChD,OAAO;AACL,kBAAQ,KAAK,MAA+B;AAAA,QAC9C;AAAA,MACF;AAAA,IACF;AACA,UAAM,EAAC,UAAS,IAAI,OAAO;AAC3B,QAAI,cAAc,MAAM;AACtB,YAAM,IAAI,MAAM,UAAU,OAAO,MAAM,IAAI,eAAe;AAAA,IAC5D;AACA,aAAS,MAAM,eAAe,WAAW,OAAO;AAAA,EAClD;AACA,SAAO;AACT;AAEA,eAAsB,qBACpB,MACA,SACmC;AACnC,QAAMC,QAAO,MAAM,QAAQ,QAAQ,IAAI;AACvC,SAAOA,OAAM,gBAAgB,IAAI,EAAE;AACnC,SAAO,qBAAqBA,OAAM,OAAO;AAC3C;AAEA,eAAsB,yBACpBA,OACA,SACe;AACf,UAAQ,MAAM,qBAAqBA,OAAM,OAAO,GAAG,MAAM;AAC3D;AAEA,eAAsB,qBACpBA,OACA,SACmC;AACnC,QAAM,SAAS,MAAM,eAAeA,OAAM,OAAO;AACjD,SAAO,uBAAuB,QAAQ,OAAO;AAC/C;AAEA,eAAsB,uBACpB,QACA,SACmC;AACnC,SAAO,CAAC,iBAAiB,MAAM,GAAG;AAChC,UAAM,EAAC,KAAI,IAAI;AACf,QAAI,gBAAgB,IAAI,GAAG;AACzB,eAAS,MAAM,eAAe,KAAK,kBAAkB,OAAO;AAAA,IAC9D,OAAO;AACL,YAAM,EAAC,UAAS,IAAI;AACpB,UAAI,cAAc,MAAM;AACtB,cAAM,IAAI,MAAM,UAAU,OAAO,MAAM,IAAI,eAAe;AAAA,MAC5D;AACA,eAAS,MAAM,eAAe,WAAW,OAAO;AAAA,IAClD;AAAA,EACF;AACA,SAAO;AACT;AAEO,SAAS,kBACd,GACA,UACkE;AAClE,QAAM,IAAI,EAAE;AACZ,QAAM,OAAO,EAAE,gBAAgB,QAAQ,KAAK;AAC5C,SAAO,CAAC,MAAM,EAAE,UAAU;AAC5B;AAEO,SAAS,2BACd,GACA,GACQ;AACR,SAAO,eAAe,EAAE,KAAK,YAAY,EAAE,KAAK,UAAU;AAC5D;AAOA,eAAsB,YACpB,gBACA,SACyB;AACzB,MAAI,SAAS,MAAM,eAAe,gBAAgB,OAAO;AACzD,QAAM,UAAU,CAAC;AACjB,SAAO,CAAC,iBAAiB,MAAM,GAAG;AAChC,UAAM,EAAC,KAAI,IAAI;AACf,UAAM,EAAC,UAAS,IAAI;AACpB,QAAI,cAAc,MAAM;AACtB,YAAM,IAAI,MAAM,UAAU,OAAO,MAAM,IAAI,eAAe;AAAA,IAC5D;AACA,YAAQ,KAAK,MAAM;AACnB,aAAS,MAAM,eAAe,WAAW,OAAO;AAAA,EAClD;AACA,UAAQ,KAAK,MAAM;AACnB,SAAO;AACT;AAEA,eAAsB,eACpBA,OACA,SACuB;AACvB,QAAM,QAAQ,MAAM,QAAQ,aAAaA,KAAI;AAC7C,SAAO,UAAU,KAAK;AACxB;AAEA,eAAsB,eACpB,MACA,SACuB;AACvB,QAAMA,QAAO,MAAM,gBAAgB,MAAM,OAAO;AAChD,SAAO,eAAeA,OAAM,OAAO;AACrC;AAgBO,SAAS,oBACdC,IAC4B;AAE5B,eAAaA,GAAE,QAAQ;AACvB,eAAaA,GAAE,UAAU;AACzB,eAAaA,GAAE,WAAW;AAC1B,MAAI,CAACA,GAAE,aAAa;AAClB,UAAM,IAAI,MAAM,sBAAsB;AAAA,EACxC;AACA,kBAAgBA,GAAE,eAAe;AACjC,MAAIA,GAAE,iBAAiB,MAAM;AAC3B,eAAWA,GAAE,YAAY;AAAA,EAC3B;AACA,eAAaA,GAAE,SAAS;AAC1B;AAEO,SAAS,gBAAgB,MAAmC;AACjE,SAAO,KAAK,SAAkB;AAChC;AAiBO,SAAS,uBACdC,IAC+B;AAE/B,MAAIA,GAAE,cAAc,MAAM;AACxB,eAAWA,GAAE,SAAS;AAAA,EACxB;AACA,kBAAgBA,GAAE,UAAU;AAC5B,wBAAsBA,GAAE,eAAe;AACzC;AAEA,SAAS,sBACPA,IACuC;AACvC,eAAaA,EAAC;AACd,aAAW,KAAK,OAAO,OAAOA,EAAC,GAAG;AAChC,iBAAa,CAAC;AAAA,EAChB;AACF;AAIO,SAAS,yBACd,GACuC;AACvC,yBAAuB,EAAE,IAAI;AAC/B;AAEA,SAAS,mBAAmB,MAAsC;AAChE,SAAO,KAAK,SAAkB;AAChC;AAEA,SAAS,WAAWA,IAA+B;AACjD,eAAaA,EAAC;AACd,mBAAiBA,EAAC;AAClB,MAAIA,GAAE,cAAc,MAAM;AACxB,iBAAaA,GAAE,SAAS;AAAA,EAC1B;AAEA,eAAaA,GAAE,IAAI;AACnB,UAAQA,GAAE,MAAM;AAAA,IACd,KAAc;AACZ,0BAAoBA,EAAC;AACrB;AAAA,IACF,KAAc;AACZ,6BAAuBA,EAAC;AACxB;AAAA,IACF;AACE,YAAM,IAAI,MAAM,sBAAsBA,GAAE,IAAI,EAAE;AAAA,EAClD;AACF;AAeO,SAAS,oCACd,GACA,GACS;AACT,SACE,EAAE,gBAAgB,EAAE,gBACnB,EAAE,cAAc,YAAY,EAAE,cAAc,UAC7C,EAAE,cAAc,EAAE;AAEtB;AAEA,SAAS,2BACPA,IACmC;AACnC,eAAaA,EAAC;AACd,mBAAiBA,EAAC;AAClB,eAAaA,GAAE,IAAI;AACnB,eAAaA,GAAE,SAAS;AACxB,eAAaA,GAAE,WAAW;AAC1B,MAAIA,GAAE,eAAe,QAAW;AAC9B,kBAAcA,GAAE,UAAU;AAAA,EAC5B;AACF;AAEO,SAAS,uBACd,MACA,iBACgC;AAChC,SAAO;AAAA,IACL;AAAA,IACA,WAAW,gBAAgB,UAAU;AAAA,IACrC,aAAa,gBAAgB;AAAA,IAC7B,YAAY,gBAAgB,cAAc;AAAA,EAC5C;AACF;AAOA,SAAS,kBAAkBA,IAAsC;AAC/D,eAAaA,EAAC;AACd,mBAAiBA,EAAC;AAClB,6BAA2BA,GAAE,UAAU;AACvC,eAAaA,GAAE,SAAS;AAC1B;AAEA,SAAS,mBAAmBA,IAAwC;AAClE,cAAYA,EAAC;AACb,mBAAiBA,EAAC;AAClB,aAAW,MAAMA,IAAG;AAClB,sBAAkB,EAAE;AAAA,EACtB;AACF;AAEO,SAAS,aACdC,cACA,WACA,kBACA,YACA,aACA,iBACA,cACA,WACA,SACA,WACA,UACuB;AACvB,QAAM,OAAsB;AAAA,IAC1B,MAAe;AAAA,IACf;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACA,SAAO;AAAA,IACLA;AAAA,IACA,eAAe,MAAM,WAAW,OAAO;AAAA,EACzC;AACF;AAEO,SAAS,gBACdA,cACA,WACA,iBACA,YACA,WACA,SAC0B;AAC1B,SAAO;AAAA,IACLA;AAAA,IACA;AAAA,MACE;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF;AACF;AAEO,SAAS,0BACd,WACA,iBACA,YACA,WACA,SAC8B;AAC9B,QAAM,OAAyB;AAAA,IAC7B,MAAe;AAAA,IACf;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACA,SAAO,eAAe,MAAM,WAAW,OAAO;AAChD;AAEO,SAAS,UAAU,OAA4B;AACpD,gBAAc,KAAK;AACnB,SAAO,IAAI,OAAO,KAAK;AACzB;AAEA,SAAS,qBACPA,cACA,MACW;AACX,SAAO,IAAI,OAAOA,aAAY,MAAM,QAAQ,IAAI,CAAC,CAAC;AACpD;AAEO,SAAS,QAAQ,MAA8B;AACpD,QAAM,OAAkB,oBAAI,IAAI;AAChC,OAAK,IAAI,KAAK,SAAS;AACvB,QAAM,EAAC,KAAI,IAAI;AACf,UAAQ,KAAK,MAAM;AAAA,IACjB,KAAc;AACZ,WAAK,aAAa,KAAK,IAAI,KAAK,SAAS;AAEzC;AAAA,IACF,KAAc;AAEZ;AAAA,IACF;AACE,kBAAY,IAAI;AAAA,EACpB;AAEA,aAAW,SAAS,KAAK,SAAS;AAChC,SAAK,IAAI,MAAM,SAAS;AAAA,EAC1B;AAEA,SAAO,OAAO,IAAI;AACpB;AAQO,SAAS,eACd,MACA,WACA,SACe;AACf,SAAO,WAAW;AAAA,IAChB;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AACH;AAEO,SAAS,iBAAiBD,IAA2C;AAC1E,MAAI,QAAuB;AACzB;AAAA,EACF;AAEA,eAAaA,EAAC;AACd,mBAAiBA,EAAC;AAClB,aAAWA,GAAE,IAAI;AACjB,eAAaA,GAAE,SAAS;AACxB,qBAAmBA,GAAE,OAAO;AAC9B;AAEA,SAAS,cAAc,OAAwD;AAC7E,QAAM,EAAC,KAAI,IAAI;AACf,mBAAiB,IAAI;AAErB,QAAM,OAAO,oBAAI,IAAI;AACrB,aAAW,SAAS,KAAK,SAAS;AAChC,UAAM,EAAC,KAAI,IAAI,MAAM;AACrB,QAAI,KAAK,IAAI,IAAI,GAAG;AAClB,YAAM,IAAI,MAAM,mBAAmB,IAAI,EAAE;AAAA,IAC3C;AACA,SAAK,IAAI,IAAI;AAAA,EACf;AACF;;;ACxjBO,IAAM,MAAM;AACZ,IAAM,SAAS;;;ACQf,IAAM,YAAN,MAAmC;AAAA,EAC/B;AAAA,EACA;AAAA,EAET,YAAY,MAAmB,KAAY;AACzC,SAAK,OAAO;AACZ,SAAK,MAAM;AAAA,EACb;AACF;AAEO,IAAM,aAAN,cAAyB,UAAsB;AAAA;AAAA;AAAA,EAGpD,QAAuB;AACrB,WAAO,KAAK,IAAI,MAAM;AAAA,EACxB;AAAA,EAEA,QAAuB;AACrB,WAAO,KAAK,IAAI,MAAM;AAAA,EACxB;AACF;AAGA,eAAsB,WACpB,IACA,OACA,IACA,KACA,KACA,aACA,YACe;AACf,MAAI;AACF,eAAW,SAAS,aAAa,KAAK,KAAK,aAAa,UAAU,GAAG;AACnE,cAAQ,IAAI;AAAA,QACV,KAAoB;AAClB,gBAAM,MAAM,IAAI,OAAO,GAAG;AAC1B;AAAA,QACF,KAAoB;AAClB,gBAAM,MAAM,IAAI,KAAK;AACrB;AAAA,MACJ;AAAA,IACF;AAAA,EACF,SAAS,GAAG;AAIV,OAAG,OAAO,sBAAsB,KAAK,KAAK,CAAC;AAAA,EAC7C;AACF;AAGO,SAAS,aACd,SACA,OACA,aACA,YACU;AACV,QAAM,SAAS,oBAAoB,OAAO,WAAW;AACrD,MAAI,WAAW,QAAW;AACxB,QAAI,YAAY;AACd,aAAO,CAAC;AAAA,IACV;AACA,UAAM,IAAI,MAAM,qBAAqB,WAAW,EAAE;AAAA,EACpD;AAEA,QAAM,SAAS,MAAM,QAAQ,MAAM,IAAI,SAAS,CAAC,MAAM;AAEvD,QAAM,YAAsB,CAAC;AAC7B,aAAWE,UAAS,QAAQ;AAC1B,QAAI,OAAOA,WAAU,UAAU;AAC7B,gBAAU,KAAK,eAAe,CAACA,QAAO,OAAO,CAAC,CAAC;AAAA,IACjD,OAAO;AACL,YAAM,IAAI,MAAM,yBAAyB;AAAA,IAC3C;AAAA,EACF;AAEA,SAAO;AACT;AAEO,IAAM,gBAAgB;AACtB,IAAM,gBAAgB;AAkBtB,SAAS,eAAe,UAA4B;AACzD,QAAM,YAAY,SAAS,CAAC;AAC5B,QAAM,UAAU,SAAS,CAAC;AAE1B,MAAI,UAAU,SAAS,IAAQ,GAAG;AAChC,UAAM,IAAI,MAAM,wCAAwC;AAAA,EAC1D;AACA,SAAO,gBAAgB,YAAY,gBAAgB;AACrD;AA2BO,SAAS,mBACd,WACA,SACQ;AACR,QAAM,IAAI,eAAe,CAAC,WAAW,WAAW,EAAE,CAAC;AACnD,MAAI,YAAY,QAAW;AACzB,WAAO,EAAE,MAAM,GAAG,EAAE,SAAS,CAAC;AAAA,EAChC;AACA,SAAO;AACT;AAGO,SAAS,eAAe,iBAAmC;AAChE,MAAI,gBAAgB,CAAC,MAAM,eAAe;AACxC,UAAM,IAAI,MAAM,iBAAiB;AAAA,EACnC;AAEA,QAAM,aAAa,cAAc;AACjC,QAAM,eAAe,cAAc;AACnC,QAAM,kBAAkB,gBAAgB,QAAQ,eAAe,UAAU;AACzE,MAAI,oBAAoB,IAAI;AAC1B,UAAM,IAAI,MAAM,oBAAoB;AAAA,EACtC;AAEA,QAAM,YAAY,gBAAgB,MAAM,YAAY,eAAe;AACnE,QAAM,UAAU,gBAAgB,MAAM,kBAAkB,YAAY;AACpE,SAAO,CAAC,WAAW,OAAO;AAC5B;AAEO,SAAS,oBACd,OACA,SAC6B;AAC7B,WAAS,WAAW,GAA+B;AACjD,QAAI,EAAE,WAAW,GAAG,KAAM,EAAE,WAAW,GAAG,KAAK,EAAE,WAAW,GAAI;AAC9D,aAAO;AAAA,IACT;AACA,WAAO,SAAS,GAAG,EAAE;AAAA,EACvB;AAEA,MAAI,YAAY,IAAI;AAClB,WAAO;AAAA,EACT;AACA,MAAI,CAAC,QAAQ,WAAW,GAAG,GAAG;AAC5B,UAAM,IAAI,MAAM,yBAAyB,OAAO,EAAE;AAAA,EACpD;AAEA,QAAM,SAAS,QACZ,MAAM,GAAG,EACT,MAAM,CAAC,EACP,IAAI,OAAK,EAAE,QAAQ,OAAO,GAAG,EAAE,QAAQ,OAAO,GAAG,CAAC;AAErD,MAAI,SAAS;AACb,aAAW,SAAS,QAAQ;AAC1B,QAAI;AACJ,QAAI,MAAM,QAAQ,MAAM,GAAG;AACzB,YAAM,IAAI,WAAW,KAAK;AAC1B,UAAI,MAAM,QAAW;AACnB,eAAO;AAAA,MACT;AACA,kBAAY,OAAO,CAAC;AAAA,IACtB,WAAW,WAAW,MAAM;AAC1B,aAAO;AAAA,IACT,WAAW,OAAO,WAAW,UAAU;AACrC,eAAS;AACT,kBAAY,OAAO,KAAK;AAAA,IAC1B;AACA,QAAI,cAAc,QAAW;AAC3B,aAAO;AAAA,IACT;AACA,aAAS;AAAA,EACX;AACA,SAAO;AACT;;;ACzMO,IAAM,OAAN,MAAW;AAAA,EACP;AAAA,EACT;AAAA,EACS;AAAA,EAET,YACE,SACA,KACA,SACA;AACA,SAAK,WAAW;AAChB,SAAK,MAAM;AACX,SAAK,UAAU;AAAA,EACjB;AAAA,EAEA,IAAI,KAA+B;AACjC,WAAO,KAAK,IAAI,IAAI,GAAG;AAAA,EACzB;AAAA,EAEA,IAAI,KAAmD;AACrD,WAAO,KAAK,IAAI,IAAI,GAAG;AAAA,EACzB;AAAA,EAEA,UAA4B;AAC1B,WAAO,KAAK,IAAI,QAAQ;AAAA,EAC1B;AAAA,EAEA,eAAe,WAA8B;AAC3C,UAAM,MAAM,KAAK,QAAQ,IAAI,SAAS;AACtC,QAAI,QAAQ,QAAW;AACrB,YAAM,IAAI,MAAM,uBAAuB,SAAS,EAAE;AAAA,IACpD;AACA,WAAO,IAAI;AAAA,EACb;AAAA,EAEA,IAAI,SAAkB;AACpB,WAAO,KAAK,SAAS;AAAA,EACvB;AAAA,EAEA,QAAc;AACZ,SAAK,SAAS,QAAQ;AAAA,EACxB;AACF;AAEO,SAAS,oBACd,SACA,eACe;AACf,SAAO,aAAa,mBAAmB,SAAS,aAAa;AAC/D;AAEA,eAAsB,aACpB,MACA,SACA,eACe;AACf,QAAM,SAAS,MAAM,eAAe,MAAM,OAAO;AACjD,SAAO,eAAe,QAAQ,SAAS,aAAa;AACtD;AAEA,eAAsB,aACpBC,OACA,SACA,eACe;AACf,QAAM,SAAS,MAAM,eAAeA,OAAM,OAAO;AACjD,SAAO,eAAe,QAAQ,SAAS,aAAa;AACtD;AAEA,SAAS,eACP,QACA,SACA,eACM;AACN,QAAM,UAAU,mBAAmB,QAAQ,SAAS,aAAa;AACjE,QAAM,MAAM,IAAI,UAAU,SAAS,eAAe,OAAO,SAAS;AAClE,SAAO,IAAI,KAAK,SAAS,KAAK,OAAO;AACvC;AAEO,SAAS,mBACd,QACA,SACA,eACwB;AACxB,QAAM,IAAI,oBAAI,IAAI;AAClB,aAAW,SAAS,OAAO,SAAS;AAClC,MAAE;AAAA,MACA,MAAM,WAAW;AAAA,MACjB,IAAI;AAAA,QACF;AAAA,QACA,IAAI,UAAU,SAAS,eAAe,MAAM,SAAS;AAAA,MACvD;AAAA,IACF;AAAA,EACF;AACA,SAAO;AACT;;;AChGO,SAAS,SACd,OACA,IACiB;AACjB,SAAO,MAAM,MAAM,KAAK,GAAG,EAAE;AAC/B;AAEO,SAAS,0BACd,OACA,IACiB;AACjB,SAAO,MAAM,MAAM,MAAM,GAAG,EAAE;AAChC;AAEO,SAAS,UACd,OACA,IACiB;AACjB,SAAO,MAAM,MAAM,MAAM,GAAG,OAAM,UAAS;AACzC,UAAM,SAAS,MAAM,GAAG,KAAK;AAC7B,UAAM,MAAM,OAAO;AACnB,WAAO;AAAA,EACT,CAAC;AACH;AAOA,eAAsB,MACpB,GACA,IACiB;AACjB,QAAM,QAAQ,MAAM;AACpB,MAAI;AACF,WAAO,MAAM,GAAG,KAAK;AAAA,EACvB,UAAE;AACA,UAAM,QAAQ;AAAA,EAChB;AACF;;;AC7BO,IAAM,wBACJ,eAAe;AAAA,EACpB,QAAe,sBAAO,EAAE,SAAS;AAAA,EACjC,aAAoB,sBAAO;AAAA,EAC3B,YAAmB,uBAAQ,EAAE,SAAS;AACxC,CAAC;AAQI,IAAM,yBAAgC;AAAA,EAC3C;AACF;AAEO,SAAS,qBACd,GACA,GACS;AACT,SACE,EAAE,gBAAgB,EAAE,gBACnB,EAAE,cAAc,YAAY,EAAE,cAAc,WAC5C,EAAE,UAAU,SAAS,EAAE,UAAU;AAEtC;AAEO,SAAS,sBACd,GACA,GACS;AACT,MAAI,OAAO,KAAK,CAAC,EAAE,WAAW,OAAO,KAAK,CAAC,EAAE,QAAQ;AACnD,WAAO;AAAA,EACT;AACA,aAAW,CAAC,MAAM,MAAM,KAAK,OAAO,QAAQ,CAAC,GAAG;AAC9C,UAAM,SAAS,EAAE,IAAI;AACrB,QAAI,CAAC,UAAU,CAAC,qBAAqB,QAAQ,MAAM,GAAG;AACpD,aAAO;AAAA,IACT;AAAA,EACF;AACA,SAAO;AACT;;;AC1DA,IAAM,oBAA2B,eAAe;AAAA;AAAA;AAAA;AAAA;AAAA,EAK9C,UAAU;AAAA;AAAA;AAAA;AAAA,EAKV,cAAqB,cAAqB,sBAAO,CAAC;AAAA;AAAA;AAAA;AAAA,EAKlD,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWT,aAAoB,eAAsB,sBAAO,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAkBlD,2BAAkC,sBAAc,sBAAO,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOxD,UAAiB,uBAAQ;AAC3B,CAAC;AAIM,IAAM,0BAA0B;AAEvC,SAAS,kBAAkB,OAA8C;AACvE,EAAOC,QAAO,OAAO,iBAAiB;AACxC;AAEA,SAAS,0BAA0B,WAAoC;AACrE,eAAa,SAAS;AACtB,QAAM,eAAe,oBAAI,IAAgC;AACzD,aAAW,CAAC,KAAK,KAAK,KAAK,OAAO,QAAQ,SAAS,GAAG;AACpD,QAAI,UAAU,QAAW;AACvB,wBAAkB,KAAK;AACvB,mBAAa,IAAI,KAAK,KAAK;AAAA,IAC7B;AAAA,EACF;AACA,SAAO;AACT;AAEA,SAAS,0BACP,cACA,UACiB;AACjB,QAAM,YAAgD,CAAC;AACvD,aAAW,CAAC,eAAe,WAAW,KAAK,aAAa,QAAQ,GAAG;AACjE,aAAS,gBAAgB,YAAY,QAAQ;AAC7C,cAAU,aAAa,IAAI;AAAA,MACzB,GAAG;AAAA,MACH,cAAc,CAAC,GAAG,YAAY,aAAa,OAAO,CAAC;AAAA,IACrD;AAAA,EACF;AACA,SAAO,WAAW,SAAS;AAC7B;AAEA,eAAe,sBACbC,OACA,SACyB;AACzB,QAAM,QAAQ,MAAM,QAAQ,SAASA,KAAI;AACzC,SAAO,0BAA0B,OAAO,IAAI;AAC9C;AAEA,eAAsB,gBAAgB,SAAwC;AAC5E,QAAMA,QAAO,MAAM,QAAQ,QAAQ,uBAAuB;AAC1D,MAAI,CAACA,OAAM;AACT,WAAO,oBAAI,IAAI;AAAA,EACjB;AACA,SAAO,sBAAsBA,OAAM,OAAO;AAC5C;AAEA,eAAsB,gBACpB,cACA,UACyB;AACzB,QAAM,mBAAmB,MAAM,gBAAgB,QAAQ;AACvD,aAAW,CAAC,eAAe,WAAW,KAAK,cAAc;AACvD,UAAM,kBAAkB,iBAAiB,IAAI,aAAa;AAC1D,8BAA0B,aAAa,eAAe;AAAA,EACxD;AACA,SAAO,yBAAyB,cAAc,QAAQ;AACxD;AAEA,eAAsB,eACpB,eACA,aACA,UACyB;AACzB,QAAM,mBAAmB,MAAM,gBAAgB,QAAQ;AACvD,QAAM,kBAAkB,iBAAiB,IAAI,aAAa;AAC1D,4BAA0B,aAAa,eAAe;AACtD,QAAM,kBAAkB,IAAI,IAAI,gBAAgB;AAChD,kBAAgB,IAAI,eAAe,WAAW;AAC9C,SAAO,yBAAyB,iBAAiB,QAAQ;AAC3D;AAeA,SAAS,0BACP,aACA,iBACA;AACA,QAAM,kBAAkB,IAAI,IAAI,YAAY,YAAY;AACxD;AAAA,IACE,gBAAgB,SAAS,YAAY,aAAa;AAAA,IAClD;AAAA,EACF;AACA,MAAI,oBAAoB,QAAW;AACjC;AAAA,MACE,sBAAsB,gBAAgB,SAAS,YAAY,OAAO;AAAA,MAClE;AAAA,IACF;AACA;AAAA,MACE,kBAAkB,iBAAiB,gBAAgB,YAAY;AAAA,MAC/D;AAAA,IACF;AAAA,EACF;AACF;AAEA,eAAe,yBACb,cACA,UACyB;AACzB,QAAM,YAAY,0BAA0B,cAAc,QAAQ;AAClE,QAAM,OAAkB,oBAAI,IAAI;AAChC,aAAW,eAAe,aAAa,OAAO,GAAG;AAC/C,SAAK,IAAI,YAAY,QAAQ;AAAA,EAC/B;AACA,QAAM,QAAQ,SAAS,YAAY,WAAW,OAAO,IAAI,CAAC;AAC1D,QAAM,SAAS,SAAS,KAAK;AAC7B,QAAM,SAAS,QAAQ,yBAAyB,MAAM,IAAI;AAC1D,SAAO;AACT;AAEO,SAAS,kBACd,iBACA,cACS;AACT,MAAI,aAAa,WAAW,gBAAgB,MAAM;AAChD,WAAO;AAAA,EACT;AACA,aAAW,eAAe,cAAc;AACtC,QAAI,CAAC,gBAAgB,IAAI,WAAW,GAAG;AACrC,aAAO;AAAA,IACT;AAAA,EACF;AACA,SAAO;AACT;AAEA,eAAsB,eACpB,IACA,SACkC;AAClC,QAAM,eAAe,MAAM,gBAAgB,OAAO;AAClD,SAAO,aAAa,IAAI,EAAE;AAC5B;AAEO,SAAS,+BAA+B,aAA0B;AACvE,aAAW,CAAC,UAAU,UAAU,KAAK,OAAO;AAAA,IAC1C,YAAY;AAAA,EACd,GAAG;AACD,UAAM,2BACJ,YAAY,0BAA0B,QAAQ;AAChD,QACG,6BAA6B,UAAa,eAAe,KAC1D,2BAA2B,YAC3B;AACA,aAAO;AAAA,IACT;AAAA,EACF;AACA,SAAO;AACT;AASA,eAAsB,mBACpB,eACA,UACe;AACf,QAAM,cAAc,MAAM,eAAe,eAAe,QAAQ;AAChE,MAAI,CAAC,aAAa;AAEhB;AAAA,EACF;AACA,QAAM,sBAAsB;AAAA,IAC1B,GAAG;AAAA,IACH,UAAU;AAAA,EACZ;AACA,QAAM,eAAe,eAAe,qBAAqB,QAAQ;AACnE;;;AC1PA,eAAsB,qBACpB,IACc;AACd,QAAM,MAAW,CAAC;AAClB,mBAAiBC,MAAK,IAAI;AACxB,QAAI,KAAKA,EAAC;AAAA,EACZ;AACA,SAAO;AACT;;;ACJO,SAAS,KACd,QACA,QACuB;AAEvB,SAAO,qBAAqB,OAAO,KAAK,MAAM,CAAC;AACjD;;;ACVA,SAAQ,YAAW;AAyBZ,IAAM,aAAN,cAAyB,UAAU;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAiB/B,QAAQ,IAAI,KAAK;AAAA,EACjB,YAAwD,oBAAI,IAAI;AAAA,EAIhE;AAAA,EACA;AAAA,EAET,YACE,UACA,eACA,OAAa,WACb,UAAU,IAAI,MACd,UAAU,KAAK,MACf,eAA6C,gBAC7C,iBACA;AACA,UAAM,UAAU,eAAe,MAAM,cAAc,eAAe;AAElE,SAAK,UAAU;AACf,SAAK,UAAU;AAAA,EACjB;AAAA,EAEA,eAAe,MAA6C;AAC1D,WAAO,KAAK,SAAS;AACrB,SAAK,UAAU,IAAI,KAAK,MAAM,IAAI;AAClC,SAAK,OAAO,IAAI,KAAK,MAAM,IAAI;AAAA,EACjC;AAAA,EAEA,WAAW,MAA6C;AACtD,WAAO,KAAK,SAAS;AACrB,SAAK,UAAU,OAAO,KAAK,IAAI;AAC/B,SAAK,OAAO,cAAc;AAC1B,SAAK,eAAe,IAAI;AAAA,EAC1B;AAAA,EAEA,oBACE,SACA,OACkB;AAClB,UAAM,IAAI,IAAI,iBAAiB,SAAS,cAAc,GAAG,OAAO,IAAI;AACpE,SAAK,eAAe,CAAC;AACrB,WAAO;AAAA,EACT;AAAA,EAEA,gBAAgB,SAAiD;AAC/D,UAAM,IAAI,IAAI,aAAa,SAAS,cAAc,GAAG,IAAI;AACzD,SAAK,eAAe,CAAC;AACrB,WAAO;AAAA,EACT;AAAA,EAQA,YACE,SACA,OACiC;AACjC,UAAM,IAAI,YAAY,SAAS,cAAc,GAAG,OAAO,IAAI;AAC3D,SAAK,eAAe,CAAC;AACrB,WAAO;AAAA,EACT;AAAA,EAEA,IAAI,KAAa,OAAuC;AACtD,WAAO,KAAK,MAAM,SAAS,YAAY;AACrC,YAAM,cAAc,MAAM,KAAK,QAAQ,KAAK,QAAQ;AACpD,YAAM,YAAY,KAAK,aAAa,KAAK,KAAK;AAC9C,YAAM,WAAW,MAAM,YAAY,IAAI,KAAK,OAAO,WAAW,IAAI;AAGlE,UAAI,SAAS,iBAAiB,IAAI,IAAI,KAAK,SAAS;AAClD,cAAM,aAAa,KAAK;AACxB,cAAM,aAAa;AAAA,UACjB,SAAS;AAAA,UACT,CAAAC,WAASA,OAAM,CAAC;AAAA,UAChB,KAAK,UAAU;AAAA,UACf,KAAK,UAAU;AAAA,QACjB;AACA,cAAM,EAAC,MAAK,IAAI;AAChB,cAAM,UAAyB,WAAW,IAAI,CAAAC,aAAW;AACvD,gBAAM,OAAO,KAAK,YAAYA,UAAS,KAAK;AAC5C,iBAAO,8BAA8B,MAAM,KAAK,YAAY;AAAA,QAC9D,CAAC;AACD,cAAM,UAAU,KAAK,oBAAoB,SAAS,QAAQ,CAAC;AAC3D,aAAK,WAAW,QAAQ;AACxB;AAAA,MACF;AAEA,WAAK,WAAW,SAAS;AAAA,IAC3B,CAAC;AAAA,EACH;AAAA,EAEA,IAAI,KAA+B;AACjC,WAAO,KAAK,MAAM,SAAS,YAAY;AACrC,YAAM,cAAc,MAAM,KAAK,QAAQ,KAAK,QAAQ;AACpD,YAAM,cAAc,MAAM,YAAY,IAAI,KAAK,IAAI;AAInD,YAAM,QAAQ,KAAK,aAAa,YAAY;AAC5C,UAAI,OAAO;AAET,YAAI,YAAY,QAAQ,KAAK,YAAY,QAAQ,WAAW,GAAG;AAC7D,eAAK,WAAY,YAAiC,QAAQ,CAAC,EAAE,CAAC;AAAA,QAChE,OAAO;AACL,eAAK,WAAW,YAAY;AAAA,QAC9B;AAAA,MACF;AAEA,aAAO;AAAA,IACT,CAAC;AAAA,EACH;AAAA,EAEA,QAAuB;AACrB,WAAO,KAAK,MAAM,SAAS,MAAM;AAC/B,WAAK,UAAU,MAAM;AACrB,WAAK,WAAW;AAAA,IAClB,CAAC;AAAA,EACH;AAAA,EAEA,QAAuB;AACrB,WAAO,KAAK,MAAM,SAAS,YAAY;AACrC,YAAM,WAAW,KAAK;AAEtB,UAAI,KAAK,aAAa,WAAW;AAE/B,cAAM,QAAQ,SAAS,YAAY,eAAe,CAAC,CAAC;AACpD,cAAM,SAAS,SAAS,KAAiC;AACzD,eAAO,MAAM;AAAA,MACf;AAEA,YAAM,YAAqB,CAAC;AAC5B,YAAM,UAAU;AAAA,QACd,KAAK;AAAA,QACL;AAAA,QACA,SAAS;AAAA,QACT,KAAK;AAAA,QACL,KAAK;AAAA,MACP;AACA,YAAM,QAAQ,IAAI,UAAU,IAAI,WAAS,SAAS,SAAS,KAAK,CAAC,CAAC;AAClE,WAAK,UAAU,MAAM;AACrB,WAAK,WAAW;AAChB,aAAO;AAAA,IACT,CAAC;AAAA,EACH;AACF;AAEA,SAAS,gBACPC,OACA,WACAC,cACA,UACA,eACM;AACN,QAAM,OAAO,SAAS,IAAID,KAAI;AAC9B,MAAI,SAAS,QAAW;AAEtB,WAAOA;AAAA,EACT;AAEA,MAAI,eAAe,IAAI,GAAG;AACxB,UAAME,SAAQD,aAAY,YAAY,MAAM,aAAa,GAAG,CAAC,CAAC;AAC9D,cAAU,KAAKC,MAAK;AACpB,WAAOA,OAAM;AAAA,EACf;AAIA,QAAM,OAAe,CAAC;AACtB,QAAM,EAAC,QAAO,IAAI;AAClB,WAAS,IAAI,GAAG,IAAI,QAAQ,QAAQ,KAAK;AACvC,UAAM,QAAQ,QAAQ,CAAC;AACvB,UAAM,YAAY,MAAM,CAAC;AACzB,UAAM,eAAe;AAAA,MACnB;AAAA,MACA;AAAA,MACAD;AAAA,MACA;AAAA,MACA;AAAA,IACF;AACA,QAAI,iBAAiB,WAAW;AAG9B,cAAQ,CAAC,IAAI,CAAC,MAAM,CAAC,GAAG,cAAc,MAAM,CAAC,CAAC;AAAA,IAChD;AACA,SAAK,KAAK,YAAY;AAAA,EACxB;AACA,QAAM,QAAQA,aAAY,YAAY,MAAM,aAAa,GAAG,OAAO,IAAI,CAAC;AACxE,YAAU,KAAK,KAAK;AACpB,SAAO,MAAM;AACf;;;AC3OO,SAAS,KAAQ,SAA2B;AACjD,MAAI;AACJ,SAAO,MAAM;AACX,QAAI,UAAU,QAAW;AACvB,cAAQ,QAAQ;AAAA,IAClB;AACA,WAAO;AAAA,EACT;AACF;;;ACiBO,IAAM,WAAN,cAAuB,IAA0B;AAAA,EAC7C,IAAI,KAAa,OAA2B;AACnD,QAAI,MAAM,WAAW,GAAG;AACtB,aAAO;AAAA,IACT;AACA,WAAO,MAAM,IAAI,KAAK,KAAK;AAAA,EAC7B;AACF;AAMA,eAAsBE,MACpB,SACA,SACA,MACA,YACA,eACmB;AACnB,QAAM,CAAC,WAAW,SAAS,IAAI,MAAM,QAAQ,IAAI;AAAA,IAC/C,eAAe,SAAS,IAAI;AAAA,IAC5B,eAAe,SAAS,IAAI;AAAA,EAC9B,CAAC;AAED,SAAO,YAAY,WAAW,WAAW,MAAM,YAAY,aAAa;AAC1E;AAOA,eAAsB,YACpB,WACA,WACA,MACA,YACA,eACmB;AACnB,QAAM,WAAW,IAAI,SAAS;AAC9B,MAAI,CAAC,WAAW,mBAAmB,GAAG;AACpC,WAAO;AAAA,EACT;AAEA,QAAM,SAAS,IAAI,UAAU,MAAM,eAAe,UAAU,SAAS;AACrE,QAAM,SAAS,IAAI,UAAU,MAAM,eAAe,UAAU,SAAS;AACrE,QAAM,YAAY,MAAM,KAAU,QAAQ,MAAM;AAChD,WAAS,IAAI,IAAI,SAAS;AAE1B,QAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AAEA,SAAO;AACT;AAEA,eAAsB,mBACpB,YACA,YACA,MACA,UACA,YACA,eACA;AACA,QAAM,aAAa,mBAAmB,YAAY,MAAM,aAAa;AACrE,QAAM,aAAa,mBAAmB,YAAY,MAAM,aAAa;AAErE,aAAW,CAAC,cAAc,QAAQ,KAAK,YAAY;AACjD,QAAI,CAAC,WAAW,2BAA2B,YAAY,GAAG;AACxD;AAAA,IACF;AAEA,UAAM,WAAW,WAAW,IAAI,YAAY;AAC5C,QAAI,aAAa,QAAW;AAC1B,aAAO,aAAa,QAAQ;AAC5B,YAAM,QAAQ,MAAM,KAAU,SAAS,KAAK,SAAS,GAAG;AACxD,iBAAW,OAAO,YAAY;AAC9B,eAAS,IAAI,cAAc,KAAK;AAAA,IAClC,OAAO;AAEL,YAAM,QAAQ,MAAM,iBAAiB,SAAS,KAAK,KAAK;AACxD,eAAS,IAAI,cAAc,KAAK;AAAA,IAClC;AAAA,EACF;AAEA,aAAW,CAAC,cAAc,QAAQ,KAAK,YAAY;AACjD,QAAI,CAAC,WAAW,2BAA2B,YAAY,GAAG;AACxD;AAAA,IACF;AAEA,UAAM,QAAQ,MAAM,iBAAiB,SAAS,KAAK,KAAK;AACxD,aAAS,IAAI,cAAc,KAAK;AAAA,EAClC;AACF;;;AC1FO,IAAM,QAAN,cAAoB,KAAK;AAAA,EACrB;AAAA,EACA;AAAA,EACA;AAAA,EAKA;AAAA,EACA;AAAA,EAET,YACE,UACA,KACA,OACA,MACA,SACA,UACA,eACA;AAEA,UAAM,UAAU,KAAK,OAAO;AAC5B,SAAK,YAAY;AACjB,SAAK,SAAS;AACd,SAAK,QAAQ;AACb,SAAK,YAAY;AACjB,SAAK,iBAAiB;AAGtB,QAAI,UAAU,QAAW;AACvB,aAAO,KAAK,cAAc,SAAS;AAAA,IACrC,OAAO;AACL,aAAO,KAAK,cAAc,MAAM,MAAM,IAAI;AAAA,IAC5C;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,IACJ,IACA,KACA,OACe;AACf,UAAM,SAAS,KAAK,MAAM,KAAK,IAAI,IAAI,GAAG,CAAC;AAC3C,UAAM,cAAc,IAAI,KAAK,SAAS,KAAK,QAAQ,KAAK;AAExD,UAAM,KAAK,IAAI,IAAI,KAAK,KAAK;AAAA,EAC/B;AAAA,EAEA,gBAAiC;AAC/B,WAAO,cAAc,KAAK,WAAW,KAAK,WAAW,KAAK,KAAK;AAAA,EACjE;AAAA,EAEA,MAAM,IAAI,IAAgB,KAA+B;AAEvD,UAAM,SAAS,KAAK,MAAM,KAAK,IAAI,IAAI,GAAG,CAAC;AAC3C,QAAI,WAAW,QAAW;AACxB,YAAM,cAAc,IAAI,KAAK,SAAS,KAAK,QAAQ,MAAS;AAAA,IAC9D;AACA,WAAO,KAAK,IAAI,IAAI,GAAG;AAAA,EACzB;AAAA,EAEA,MAAM,QAAuB;AAC3B,UAAM,KAAK,IAAI,MAAM;AACrB,UAAM,KAAK,CAAC;AACZ,eAAW,OAAO,KAAK,QAAQ,OAAO,GAAG;AACvC,SAAG,KAAK,IAAI,MAAM,CAAC;AAAA,IACrB;AACA,UAAM,QAAQ,IAAI,EAAE;AAAA,EACtB;AAAA,EAEA,MAAM,YAAyC;AAC7C,UAAM,YAAY,MAAM,KAAK,IAAI,MAAM;AACvC,UAAM,eAA8B,CAAC;AAErC,eAAW,SAAS,KAAK,QAAQ,OAAO,GAAG;AACzC,YAAMC,aAAY,MAAM,MAAM,MAAM;AACpC,YAAM,cAA2B;AAAA,QAC/B,YAAY,MAAM,KAAK;AAAA,QACvB,WAAAA;AAAA,MACF;AACA,mBAAa,KAAK,WAAW;AAAA,IAC/B;AAEA,QAAI;AACJ,UAAM,OAAO,KAAK;AAClB,YAAQ,KAAK,MAAM;AAAA,MACjB,KAAc,WAAW;AACvB,eAAO,KAAK,kBAAgC,IAAI;AAChD,cAAM;AAAA,UACJ;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,QACF,IAAI;AACJ,iBAAS;AAAA,UACP,KAAK,UAAU;AAAA,UACf;AAAA,UACA,MAAM,yBAAyB,WAAW,KAAK,SAAS;AAAA,UACxD;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA,KAAK;AAAA,QACP;AACA;AAAA,MACF;AAAA,MAEA,KAAc,cAAc;AAC1B,eAAO,KAAK,iBAA+B,IAAI;AAC/C,cAAM,EAAC,WAAW,iBAAiB,WAAU,IAAI;AACjD,iBAAS;AAAA,UACP,KAAK,UAAU;AAAA,UACf;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,QACF;AACA;AAAA,MACF;AAAA,IACF;AACA,UAAM,KAAK,UAAU,SAAS,OAAO,KAAK;AAC1C,WAAO;AAAA,EACT;AAAA;AAAA,EAGA,MAAM,OAAO,UAAiC;AAC5C,UAAM,SAAS,MAAM,KAAK,UAAU;AACpC,UAAM,aAAa,OAAO,MAAM;AAChC,UAAM,KAAK,UAAU,QAAQ,UAAU,UAAU;AACjD,UAAM,KAAK,UAAU,OAAO;AAC5B,WAAO;AAAA,EACT;AAAA,EAEA,MAAM,gBACJ,UACA,YAC2B;AAC3B,UAAM,SAAS,KAAK,UAAU;AAC9B,UAAM,UAAU,MAAM,KAAK,eAAe,UAAU;AACpD,UAAM,cAAc,MAAM,QAAQ,MAAM;AACxC,UAAM,KAAK,UAAU,QAAQ,UAAU,UAAU;AACjD,UAAM,KAAK,UAAU,OAAO;AAC5B,WAAO,CAAC,YAAY,OAAO;AAAA,EAC7B;AAAA,EAEA,MAAM,eAAe,YAAsD;AACzE,UAAM,WAAW,IAAI,SAAS;AAC9B,QAAI,CAAC,WAAW,mBAAmB,GAAG;AACpC,aAAO;AAAA,IACT;AAEA,QAAI,YAA0B,CAAC;AAC/B,QAAI,KAAK,QAAQ;AACf,YAAM,WAAW,IAAI;AAAA,QACnB,KAAK;AAAA,QACL,KAAK;AAAA,QACL,KAAK,OAAO;AAAA,MACd;AACA,kBAAY,MAAM,KAAK,UAAU,KAAK,GAAG;AAAA,IAC3C;AACA,aAAS,IAAI,IAAI,SAAS;AAC1B,QAAI;AACJ,QAAI,KAAK,QAAQ;AACf,qBAAe;AAAA,QACb,KAAK;AAAA,QACL,KAAK;AAAA,QACL,KAAK;AAAA,MACP;AAAA,IACF,OAAO;AACL,qBAAe,oBAAI,IAAI;AAAA,IACzB;AAEA,eAAW,CAAC,MAAM,KAAK,KAAK,KAAK,SAAS;AACxC,UAAI,CAAC,WAAW,2BAA2B,IAAI,GAAG;AAChD;AAAA,MACF;AACA,YAAM,aAAa,aAAa,IAAI,IAAI;AACxC,aAAO,UAAU,UAAU;AAE3B,YAAM,kBAAkB,OAAO,aAC3B,KAAK,WAAW,KAAK,MAAM,GAAG;AAAA;AAAA,QAE9B,iBAAiB,MAAM,KAAK,KAAK;AAAA;AACrC,eAAS,IAAI,MAAM,eAAe;AAAA,IACpC;AAIA,eAAW,CAAC,MAAM,UAAU,KAAK,cAAc;AAC7C,UACE,CAAC,KAAK,QAAQ,IAAI,IAAI,KACtB,WAAW,2BAA2B,IAAI,GAC1C;AACA,cAAM,kBAAkB,MAAM,iBAAiB,WAAW,KAAK,KAAK;AACpE,iBAAS,IAAI,MAAM,eAAe;AAAA,MACpC;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAAA,EAEA,QAAc;AACZ,SAAK,UAAU,QAAQ;AAAA,EACzB;AACF;AAEA,eAAsB,cACpB,WACA,aACA,iBACA,cACA,UACA,WACA,UACA,eACgB;AAChB,QAAM,QAAQ,MAAM,eAAe,WAAW,QAAQ;AACtD,QAAM,aAAa,IAAI,WAAW,UAAU,eAAe,MAAM,SAAS;AAC1E,QAAM,aAAa,MAAM,MAAM,kBAAkB,UAAU,QAAQ;AACnE,QAAM,UAAU,oBAAoB,OAAO,UAAU,aAAa;AAClE,SAAO,iBAA+B,IAAI;AAC1C,SAAO,IAAI;AAAA,IACT;AAAA,IACA;AAAA,IACA;AAAA,IAEA;AAAA,MACE,MAAe;AAAA,MACf;AAAA,MACA,kBAAkB,MAAM,yBAAyB,WAAW,QAAQ;AAAA,MACpE;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;AAEA,eAAsB,qBACpB,WACA,iBACA,YACA,UACA,UACA,eACgB;AAChB,QAAM,QAAQ,MAAM,eAAe,WAAW,QAAQ;AACtD,QAAM,aAAa,IAAI,WAAW,UAAU,eAAe,MAAM,SAAS;AAC1E,SAAO,IAAI;AAAA,IACT;AAAA,IACA;AAAA,IACA;AAAA,IACA,EAAC,WAAW,MAAe,cAAc,iBAAiB,WAAU;AAAA,IACpE,oBAAoB,OAAO,UAAU,aAAa;AAAA,IAClD;AAAA,IACA;AAAA,EACF;AACF;AAEA,eAAsB,cACpB,IACA,SACA,KACA,cACA,QACe;AACf,QAAM,KAAsB,CAAC;AAC7B,aAAW,OAAO,QAAQ,OAAO,GAAG;AAClC,UAAM,EAAC,UAAS,IAAI,IAAI,KAAK;AAC7B,QAAI,CAAC,aAAa,IAAI,WAAW,SAAS,GAAG;AAC3C,YAAM,SAAS,MAAM,aAAa;AAClC,UAAI,WAAW,QAAW;AACxB,WAAG;AAAA,UACD;AAAA,YACE;AAAA,YACA,IAAI;AAAA,YACW;AAAA,YACf;AAAA,YACA;AAAA,YACA,IAAI,KAAK,WAAW;AAAA,YACpB,IAAI,KAAK,WAAW,cAAc;AAAA,UACpC;AAAA,QACF;AAAA,MACF;AACA,UAAI,WAAW,QAAW;AACxB,WAAG;AAAA,UACD;AAAA,YACE;AAAA,YACA,IAAI;AAAA,YACW;AAAA,YACf;AAAA,YACA;AAAA,YACA,IAAI,KAAK,WAAW;AAAA,YACpB,IAAI,KAAK,WAAW,cAAc;AAAA,UACpC;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,EACF;AACA,QAAM,QAAQ,IAAI,EAAE;AACtB;AAEO,SAAS,oBACd,QACA,UACA,eACyB;AACzB,QAAM,IAAI,oBAAI,IAAI;AAClB,aAAW,SAAS,OAAO,SAAS;AAClC,MAAE;AAAA,MACA,MAAM,WAAW;AAAA,MACjB,IAAI;AAAA,QACF;AAAA,QACA,IAAI,WAAW,UAAU,eAAe,MAAM,SAAS;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AACA,SAAO;AACT;AAEA,eAAsB,iBACpB,IACA,UACA,UACA,QACA,aACA,YACA,eACqB;AACrB,QAAM,WAAW,IAAI,WAAW,UAAU,aAAa;AACvD,mBAAiB,SAAS,SAAS,KAAK,MAAM,GAAG;AAC/C,UAAM,MAAM,MAAM,CAAC;AACnB,QAAI,CAAC,IAAI,WAAW,MAAM,GAAG;AAC3B;AAAA,IACF;AACA,UAAM;AAAA,MACJ;AAAA,MACA;AAAA,MACe;AAAA,MACf;AAAA,MACA,MAAM,CAAC;AAAA,MACP;AAAA,MACA;AAAA,IACF;AAAA,EACF;AACA,SAAO;AACT;;;ACjYO,IAAM,sBAAyD,sBAAO;AAOtE,IAAM,iBAA+C,sBAAO;;;ACT5D,SAAS,eAAuB;AACrC,QAAM,SAAS;AACf,QAAM,OAAO,aAAa;AAC1B,QAAM,MAAM,aAAa;AACzB,QAAM,WAAY,QAAQ,MAAO;AACjC,SAAO,SAAS,SAAS,EAAE,EAAE,MAAM,CAAC,MAAM,EAAE,SAAS,QAAQ,GAAG;AAClE;;;ACmCA,IAAM,iBAAwB,eAAe;AAAA,EAC3C,sBAA6B,sBAAO;AAAA,EAEpC,UAAU;AAAA;AAAA;AAAA;AAAA;AAAA,EAMV,iBAAiB,WAAW,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA,EAMrC,eAAe;AACjB,CAAC;AAID,IAAM,iBAAwB,eAAe;AAAA,EAC3C,sBAA6B,sBAAO;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAcpC,eAAsB,cAAc,UAAU;AAAA;AAAA;AAAA;AAAA;AAAA,EAM9C,aAAa,WAAW,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA,EAMjC,eAAe;AACjB,CAAC;AAMD,SAAS,WAAW,QAAoC;AACtD,SAAQ,OAAoB,kBAAkB;AAChD;AAEO,IAAM,oBAAoB;AAEjC,IAAM,eAAsB,qBAAM,gBAAgB,cAAc;AAEhE,SAAS,aAAa,OAAyC;AAC7D,EAAOC,QAAO,OAAO,YAAY;AACnC;AAEO,SAAS,eAAe,OAA2C;AACxE,EAAOA,QAAO,OAAO,cAAc;AACrC;AAEA,SAAS,qBAAqB,WAA+B;AAC3D,eAAa,SAAS;AACtB,QAAM,UAAU,oBAAI,IAAI;AACxB,aAAW,OAAO,WAAW;AAC3B,QAAI,OAAO,WAAW,GAAG,GAAG;AAC1B,YAAM,QAAQ,UAAU,GAAG;AAC3B,UAAI,UAAU,QAAW;AACvB,qBAAa,KAAK;AAClB,gBAAQ,IAAI,KAAK,KAAK;AAAA,MACxB;AAAA,IACF;AAAA,EACF;AACA,SAAO;AACT;AAEA,SAAS,qBACP,SACA,UACiB;AACjB,aAAW,UAAU,QAAQ,OAAO,GAAG;AACrC,QAAI,WAAW,MAAM,GAAG;AACtB,aAAO,cAAc,QAAQ,SAAS,eAAe;AACrD,UAAI,OAAO,aAAa;AACtB,iBAAS,gBAAgB,OAAO,WAAW;AAAA,MAC7C;AAAA,IACF,OAAO;AACL,eAAS,gBAAgB,OAAO,QAAQ;AACxC,UAAI,OAAO,iBAAiB;AAC1B,iBAAS,gBAAgB,OAAO,eAAe;AAAA,MACjD;AAAA,IACF;AAAA,EACF;AACA,SAAO,WAAW,OAAO,YAAY,OAAO,CAAC;AAC/C;AAEA,eAAsB,WAAW,SAAmC;AAClE,QAAMC,QAAO,MAAM,QAAQ,QAAQ,iBAAiB;AACpD,SAAO,iBAAiBA,OAAM,OAAO;AACvC;AAEA,eAAe,iBACbA,OACA,SACoB;AACpB,MAAI,CAACA,OAAM;AACT,WAAO,oBAAI,IAAI;AAAA,EACjB;AACA,QAAM,QAAQ,MAAM,QAAQ,SAASA,KAAI;AACzC,SAAO,qBAAqB,OAAO,IAAI;AACzC;AAKO,IAAM,2BAAN,cAAuC,MAAM;AAAA,EAClD,OAAO;AAAA,EACE;AAAA,EACT,YAAY,IAAc;AACxB,UAAM,+BAA+B,EAAE,EAAE;AACzC,SAAK,KAAK;AAAA,EACZ;AACF;AAKA,eAAsB,qBACpB,IACA,SACe;AACf,MAAI,CAAE,MAAM,eAAe,IAAI,OAAO,GAAI;AACxC,UAAM,IAAI,yBAAyB,EAAE;AAAA,EACvC;AACF;AAEA,eAAsB,eACpB,IACA,SACkB;AAClB,SAAO,CAAC,CAAE,MAAM,UAAU,IAAI,OAAO;AACvC;AAEA,eAAsB,UACpB,IACA,SAC6B;AAC7B,QAAM,UAAU,MAAM,WAAW,OAAO;AACxC,SAAO,QAAQ,IAAI,EAAE;AACvB;AAEA,eAAsB,cACpB,IACA,SACiB;AACjB,QAAM,SAAS,MAAM,UAAU,IAAI,OAAO;AAC1C,MAAI,CAAC,QAAQ;AACX,UAAM,IAAI,yBAAyB,EAAE;AAAA,EACvC;AACA,SAAO;AACT;AASO,SAAS,aACd,aACA,IACA,QACA,cACA,SACA,eACA,0BAC6B;AAC7B,SAAO,UAAU,QAAQ,OAAM,aAAY;AACzC,mBAAe,kCACb,WACA,YACAC,YACAC,eAC6B;AAC7B,YAAM,kBAAkB;AAAA,QACtB;AAAA,QACA,CAAC;AAAA,QACD;AAAA,QACAD;AAAA,QACAC;AAAA,MACF;AACA,YAAM,QAAQ,SAAS;AAAA,QACrB;AAAA,QACA,QAAQ,eAAe;AAAA,MACzB;AAEA,YAAM,mBAAmB,aAAa;AAEtC,YAAM,YAAsB;AAAA,QAC1B,sBAAsB,KAAK,IAAI;AAAA,QAC/B,eAAe,CAAC,MAAM,IAAI;AAAA,QAC1B,aAAa;AAAA,QACb,eAAe;AAAA,MACjB;AAEA,YAAM,aAAa,IAAI,IAAI,OAAO,EAAE,IAAI,aAAa,SAAS;AAE9D,YAAM,cAA2B;AAAA,QAC/B,UAAU,MAAM;AAAA,QAChB;AAAA,QACA;AAAA,QACA,aAAa,CAAC;AAAA,QACd,2BAA2B,CAAC;AAAA,QAC5B,UAAU;AAAA,MACZ;AAEA,YAAM,QAAQ,IAAI;AAAA,QAChB,SAAS,SAAS,KAAK;AAAA,QACvB,WAAW,YAAY,QAAQ;AAAA,QAC/B,eAAe,kBAAkB,aAAa,QAAQ;AAAA,MACxD,CAAC;AAED,aAAO,CAAC,WAAW,MAAM,MAAM,YAAY,IAAI;AAAA,IACjD;AAEA,UAAM,UAAU,MAAM,WAAW,QAAQ;AAEzC,UAAM,MAAM,MAAM,mBAAmB,UAAU,cAAc,OAAO;AACpE,QAAI,IAAI,SAAS,gCAAgC;AAG/C,YAAM,EAAC,eAAe,SAAQ,IAAI;AAElC,YAAM,YAAsB;AAAA,QAC1B;AAAA,QACA,eAAe,CAAC,QAAQ;AAAA,QACxB,sBAAsB,KAAK,IAAI;AAAA,QAC/B,aAAa;AAAA,MACf;AACA,YAAM,aAAa,IAAI,IAAI,OAAO,EAAE,IAAI,aAAa,SAAS;AAC9D,YAAM,WAAW,YAAY,QAAQ;AAErC,aAAO,CAAC,WAAW,UAAU,YAAY,KAAK;AAAA,IAChD;AAEA,QACE,CAAC,4BACD,IAAI,SAAS,+BACb;AAEA,YAAM,kBAAkB,SAAS,YAAY,eAAe,CAAC,CAAC;AAC9D,YAAM,SAAS,SAAS,eAAe;AAGvC,YAAMA,gBAA8B,CAAC;AAIrC,iBAAW,CAAC,MAAM,eAAe,KAAK,OAAO,QAAQ,OAAO,GAAG;AAC7D,cAAM,uBAAuB;AAAA,UAC3B;AAAA,UACA;AAAA,QACF;AACA,QAAAA,cAAa,KAAK;AAAA,UAChB,YAAY;AAAA,UACZ,WAAW,gBAAgB;AAAA,QAC7B,CAAC;AAAA,MACH;AAEA,aAAO;AAAA,QACL;AAAA,QACA;AAAA,QACA,gBAAgB;AAAA,QAChBA;AAAA,MACF;AAAA,IACF;AAIA,WAAO,IAAI,SAAS,8BAA8B;AAElD,UAAM,EAAC,SAAQ,IAAI;AAGnB,UAAM,eAA8B,CAAC;AACrC,UAAM,EAAC,WAAW,SAAS,WAAU,IAAI;AACzC,UAAM,MAAM,IAAI,UAAU,UAAU,eAAe,SAAS;AAE5D,eAAW,CAAC,MAAM,eAAe,KAAK,OAAO,QAAQ,OAAO,GAAG;AAC7D,YAAM,EAAC,SAAS,IAAI,aAAa,aAAa,MAAK,IAAI;AACvD,YAAM,uBAA6C;AAAA,QACjD;AAAA,QACA,WAAW;AAAA,QACX;AAAA,QACA;AAAA,MACF;AAEA,YAAM,WAAW,qBAAqB,YAAY,oBAAoB;AACtE,UAAI,UAAU;AACZ,qBAAa,KAAK;AAAA,UAChB,YAAY;AAAA,UACZ,WAAW,SAAS;AAAA,QACtB,CAAC;AAAA,MACH,OAAO;AACL,cAAM,aAAa,MAAM;AAAA,UACvB;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,QACF;AACA,qBAAa,KAAK;AAAA,UAChB,YAAY;AAAA,UACZ,WAAW,MAAM,WAAW,MAAM;AAAA,QACpC,CAAC;AAAA,MACH;AAAA,IACF;AAEA,WAAO;AAAA,MACL,SAAS,KAAK;AAAA,MACd,SAAS,KAAK;AAAA,MACd,SAAS;AAAA,MACT;AAAA,IACF;AAAA,EACF,CAAC;AACH;AAEA,SAAS,qBACP,YACA,sBACA;AACA,SAAO,WAAW;AAAA,IAAK,WACrB,oCAAoC,MAAM,YAAY,oBAAoB;AAAA,EAC5E;AACF;AAEO,IAAM,gCAAgC;AACtC,IAAM,iCAAiC;AACvC,IAAM,iCAAiC;AAgB9C,eAAsB,mBACpB,SACA,cACA,SACmC;AACnC,MAAI;AACJ,MAAI;AACJ,QAAM,kBAAkB,IAAI,IAAI,YAAY;AAE5C,QAAM,eAAe,MAAM,gBAAgB,OAAO;AAClD,aAAW,CAAC,eAAe,WAAW,KAAK,cAAc;AACvD,QACE,CAAC,YAAY,YACb,kBAAkB,iBAAiB,YAAY,YAAY,KAC3D,sBAAsB,SAAS,YAAY,OAAO,GAClD;AAEA,aAAO;AAAA,QACL,MAAM;AAAA,QACN;AAAA,QACA,UAAU,YAAY;AAAA,MACxB;AAAA,IACF;AAEA,UAAM,4BAA4B,MAAM;AAAA,MACtC,YAAY;AAAA,MACZ;AAAA,IACF;AACA,6BAAyB,yBAAyB;AAElD,UAAM,EAAC,WAAU,IAAI,0BAA0B;AAC/C,QACE,iBAAiB,UACjB,eAAe,YAAY,YAAY,IAAI,GAC3C;AACA,qBAAe;AACf,qBAAe;AAAA,IACjB;AAAA,EACF;AAEA,MAAI,cAAc;AAChB,WAAO;AAAA,MACL,MAAM;AAAA,MACN,UAAU;AAAA,IACZ;AAAA,EACF;AAEA,SAAO,EAAC,MAAM,8BAA6B;AAC7C;AAEA,SAAS,kBAAkB,SAA0B;AACnD,QAAM,OAAkB,oBAAI,IAAI;AAChC,aAAW,UAAU,QAAQ,OAAO,GAAG;AACrC,QAAI,WAAW,MAAM,GAAG;AACtB,iBAAWF,SAAQ,OAAO,eAAe;AACvC,aAAK,IAAIA,KAAI;AAAA,MACf;AACA,UAAI,OAAO,aAAa;AACtB,aAAK,IAAI,OAAO,WAAW;AAAA,MAC7B;AAAA,IACF,OAAO;AACL,WAAK,IAAI,OAAO,QAAQ;AACxB,UAAI,OAAO,iBAAiB;AAC1B,aAAK,IAAI,OAAO,eAAe;AAAA,MACjC;AAAA,IACF;AAAA,EACF;AACA,SAAO,OAAO,IAAI;AACpB;AAEA,eAAsB,wBACpB,UACA,MACkC;AAClC,QAAM,gBAAgB,MAAM,0BAA0B,UAAU,IAAI;AACpE,MAAI,CAAC,eAAe;AAClB,WAAO;AAAA,EACT;AACA,SAAO,eAAe,eAAe,IAAI;AAC3C;AAEA,eAAsB,0BACpB,UACA,MACoC;AACpC,QAAM,SAAS,MAAM,UAAU,UAAU,IAAI;AAC7C,SAAO,QAAQ;AACjB;AAMA,eAAsB,UACpB,UACA,QACA,UACe;AACf,QAAM,UAAU,MAAM,WAAW,QAAQ;AACzC,QAAM,aAAa,IAAI,IAAI,OAAO,EAAE,IAAI,UAAU,MAAM;AACxD,SAAO,WAAW,YAAY,QAAQ;AACxC;AAMA,eAAsB,WACpB,SACA,UACe;AACf,QAAM,YAAY,qBAAqB,SAAS,QAAQ;AACxD,QAAM,QAAQ,SAAS,YAAY,WAAW,kBAAkB,OAAO,CAAC;AACxE,QAAM,SAAS,SAAS,KAAK;AAC7B,QAAM,SAAS,QAAQ,mBAAmB,MAAM,IAAI;AACpD,SAAO,MAAM;AACf;;;ACjhBO,SAAS,UACd,OACA,QACqB;AACrB,SAAO,WAAW,OAAO,CAAC,GAAGG,OAAM,CAAC,GAAG,OAAOA,EAAe,CAAC,CAAC;AAGjE;AAEO,SAAS,WACd,OACA,QACmB;AAGnB,QAAM,SAA4B,CAAC;AAInC,aAAW,SAAS,OAAO,QAAQ,KAAK,GAAG;AACzC,UAAM,SAAS,OAAO,MAAM,CAAC,GAAG,MAAM,CAAC,CAAC;AACxC,WAAO,OAAO,CAAC,CAAC,IAAI,OAAO,CAAC;AAAA,EAC9B;AACA,SAAO;AACT;AAEO,SAAS,cACd,OACA,QACmB;AAGnB,QAAM,SAA4B,CAAC;AACnC,aAAW,UAAU,OAAO,OAAO,QAAQ,KAAK,CAAC,GAAG;AAClD,WAAO,OAAO,CAAC,CAAC,IAAI,OAAO,CAAC;AAAA,EAC9B;AACA,SAAO;AACT;;;ACrCO,SAAS,KAAQC,IAAyB,KAAiB;AAEhE,MAAIA,MAAK,MAAM;AACb,UAAM,IAAI,MAAM,OAAO,cAAcA,EAAC,QAAQ;AAAA,EAChD;AACA,SAAOA;AACT;;;ACNA,SAAQ,eAAAC,oBAAkB;AA+BnB,SAAS,cAAc,GAAU,GAAkB;AACxD,MAAI,mBAAmB,CAAC;AACxB,MAAI,mBAAmB,CAAC;AAExB,MAAI,MAAM,GAAG;AACX,WAAO;AAAA,EACT;AACA,MAAI,MAAM,MAAM;AACd,WAAO;AAAA,EACT;AACA,MAAI,MAAM,MAAM;AACd,WAAO;AAAA,EACT;AACA,MAAI,OAAO,MAAM,WAAW;AAC1B,kBAAc,CAAC;AACf,WAAO,IAAI,IAAI;AAAA,EACjB;AACA,MAAI,OAAO,MAAM,UAAU;AACzB,iBAAa,CAAC;AACd,WAAO,IAAI;AAAA,EACb;AACA,MAAI,OAAO,MAAM,UAAU;AACzB,iBAAa,CAAC;AASd,WAAOC,aAAY,GAAG,CAAC;AAAA,EACzB;AACA,QAAM,IAAI,MAAM,qBAAqB,CAAC,EAAE;AAC1C;AASO,SAAS,mBAAmBC,IAA2B;AAC5D,SAAOA,MAAK;AACd;AAIO,SAAS,eACd,OACA,SACY;AACZ,SAAO,CAAC,GAAG,MAAM;AAEf,eAAW,OAAO,OAAO;AACvB,YAAM,QAAQ,IAAI,CAAC;AACnB,YAAM,OAAO,cAAc,EAAE,KAAK,GAAG,EAAE,KAAK,CAAC;AAC7C,UAAI,SAAS,GAAG;AACd,cAAM,SAAS,IAAI,CAAC,MAAM,QAAQ,OAAO,CAAC;AAC1C,eAAO,UAAU,CAAC,SAAS;AAAA,MAC7B;AAAA,IACF;AACA,WAAO;AAAA,EACT;AACF;AAQO,SAAS,YAAY,GAAU,GAAmB;AAEvD,MAAI,KAAK,QAAQ,KAAK,MAAM;AAC1B,WAAO;AAAA,EACT;AACA,SAAO,MAAM;AACf;AAEO,SAAS,aAAa,MAAY;AACvC,aAAW,UAAU,OAAO,OAAO,KAAK,aAAa,GAAG;AACtD,eAAWC,SAAQ,OAAO,GAAG;AAC3B,mBAAaA,KAAI;AAAA,IACnB;AAAA,EACF;AACF;;;ACzGO,IAAM,iBAAiB,OAAO,IAAI;AAClC,IAAM,WAAW,OAAO,IAAI;AAwD5B,SAAS,YACd,aACA,QACA,QACA,cACA,QACA,UAAU,OACJ;AACN,MAAI,OAAO,UAAU;AACnB,YAAQ,OAAO,MAAM;AAAA,MACnB,KAAK;AAAA,MACL,KAAK;AACH,mBAAW,CAACC,eAAc,QAAQ,KAAK,OAAO;AAAA,UAC5C,OAAO,KAAK;AAAA,QACd,GAAG;AACD,gBAAM,cAAc,KAAK,OAAO,cAAcA,aAAY,CAAC;AAC3D,qBAAW,QAAQ,SAAS,GAAG;AAC7B;AAAA,cACE;AAAA,cACA,EAAC,MAAM,OAAO,MAAM,KAAI;AAAA,cACxB;AAAA,cACAA;AAAA,cACA;AAAA,cACA;AAAA,YACF;AAAA,UACF;AAAA,QACF;AACA;AAAA,MACF,KAAK;AAKH;AAAA,MACF,KAAK,SAAS;AACZ,cAAM,cAAc;AAAA,UAClB,OAAO,cAAc,OAAO,MAAM,gBAAgB;AAAA,QACpD;AACA;AAAA,UACE;AAAA,UACA,OAAO,MAAM;AAAA,UACb;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,QACF;AACA;AAAA,MACF;AAAA,MACA;AACE,oBAAY,MAAM;AAAA,IACtB;AAAA,EACF;AAEA,QAAM,EAAC,UAAU,eAAe,aAAY,IAAI;AAChD,UAAQ,OAAO,MAAM;AAAA,IACnB,KAAK,OAAO;AACV,UAAI;AAEJ,UAAI,UAAU;AACZ,cAAM,WAAW,YAAY,YAAY;AACzC,YAAI,aAAa,QAAW;AAC1B;AAAA,YACE,OAAO,YAAY,UAAU,OAAO,KAAK,GAAG,MAAM;AAAA,YAClD,0BAA0B,YAAY;AAAA,UACxC;AAEA,mBAAS,cAAc;AAAA,QACzB,OAAO;AACL,qBAAW,iBAAiB,OAAO,KAAK,KAAK,QAAQ,SAAS,CAAC;AAE/D,UAAC,YAAgC,YAAY,IAAI;AAAA,QACnD;AAAA,MACF,OAAO;AACL,mBAAW;AAAA,UACT,OAAO,KAAK;AAAA,UACZ,kBAAkB,aAAa,YAAY;AAAA,UAC3C;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAEA,UAAI,UAAU;AACZ,mBAAW,CAACA,eAAc,QAAQ,KAAK,OAAO;AAAA,UAC5C,OAAO,KAAK;AAAA,QACd,GAAG;AAED,gBAAM,cAAc,KAAK,OAAO,cAAcA,aAAY,CAAC;AAC3D,gBAAM,cAAc,aAAaA,aAAY;AAC7C,cAAI,gBAAgB,QAAW;AAC7B;AAAA,UACF;AAEA,gBAAM,UAAU,YAAY,WACxB,SACC,CAAC;AACN,mBAASA,aAAY,IAAI;AAEzB,qBAAW,QAAQ,SAAS,GAAG;AAC7B;AAAA,cACE;AAAA,cACA,EAAC,MAAM,OAAO,KAAI;AAAA,cAClB;AAAA,cACAA;AAAA,cACA;AAAA,cACA;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAAA,MACF;AACA;AAAA,IACF;AAAA,IACA,KAAK,UAAU;AACb,UAAI,UAAU;AACZ,cAAM,WAAW,YAAY,YAAY;AACzC,eAAO,aAAa,QAAW,qBAAqB;AACpD,cAAM,KAAK,SAAS,cAAc;AAClC,YAAI,OAAO,GAAG;AACZ,UAAC,YAAgC,YAAY,IAAI;AAAA,QACnD;AACA,iBAAS,cAAc;AAAA,MACzB,OAAO;AACL;AAAA,UACE,kBAAkB,aAAa,YAAY;AAAA,UAC3C,OAAO,KAAK;AAAA,UACZ,OAAO;AAAA,QACT;AAAA,MACF;AAEA,mBAAa,OAAO,IAAI;AACxB;AAAA,IACF;AAAA,IACA,KAAK,SAAS;AACZ,UAAI;AACJ,UAAI,UAAU;AACZ,mBAAW,iBAAiB,aAAa,YAAY;AAAA,MACvD,OAAO;AACL,cAAM,OAAO,kBAAkB,aAAa,YAAY;AACxD,cAAM,EAAC,KAAK,MAAK,IAAIC;AAAA,UACnB;AAAA,UACA,OAAO,KAAK;AAAA,UACZ,OAAO;AAAA,QACT;AACA,eAAO,OAAO,qBAAqB;AACnC,mBAAW,KAAK,GAAG;AAAA,MACrB;AAEA,YAAM,cAAc;AAAA,QAClB,OAAO,cAAc,OAAO,MAAM,gBAAgB;AAAA,MACpD;AACA,YAAM,cAAc,OAAO,cAAc,OAAO,MAAM,gBAAgB;AACtE,UAAI,gBAAgB,QAAW;AAC7B;AAAA,UACE;AAAA,UACA,OAAO,MAAM;AAAA,UACb;AAAA,UACA,OAAO,MAAM;AAAA,UACb;AAAA,UACA;AAAA,QACF;AAAA,MACF;AACA;AAAA,IACF;AAAA,IACA,KAAK,QAAQ;AACX,UAAI,UAAU;AACZ,cAAM,WAAW,YAAY,YAAY;AACzC,wBAAgB,QAAQ;AACxB,kBAAU,UAAU,QAAQ,QAAQ,OAAO;AAAA,MAC7C,OAAO;AACL,cAAM,OAAO,kBAAkB,aAAa,YAAY;AAExD,YAAI,OAAO,YAAY,OAAO,QAAQ,KAAK,OAAO,KAAK,GAAG,MAAM,GAAG;AACjE,gBAAM,EAAC,KAAK,QAAQ,OAAO,SAAQ,IAAIA;AAAA,YACrC;AAAA,YACA,OAAO,QAAQ;AAAA,YACf,OAAO;AAAA,UACT;AACA,iBAAO,UAAU,yBAAyB;AAC1C,gBAAM,WAAW,KAAK,MAAM;AAC5B,gBAAM,EAAC,KAAK,MAAK,IAAIA;AAAA,YACnB;AAAA,YACA,OAAO,KAAK;AAAA,YACZ,OAAO;AAAA,UACT;AAOA,cACE,SAAS,cAAc,MAAM,MAC5B,QAAQ,UAAU,MAAM,MAAM,SAC/B;AACA,sBAAU,UAAU,QAAQ,QAAQ,OAAO;AAAA,UAC7C,OAAO;AAUL,qBAAS,cAAc;AACvB,gBAAI,cAAc;AAClB,gBAAI,SAAS,cAAc,MAAM,GAAG;AAClC,mBAAK,OAAO,QAAQ,CAAC;AACrB,4BAAc,SAAS,MAAM,MAAM,IAAI;AAAA,YACzC;AAEA,gBAAI;AACJ,gBAAI,OAAO;AACT,4BAAc,KAAK,WAAW;AAAA,YAChC,OAAO;AACL,mBAAK,OAAO,aAAa,GAAG,QAAQ;AACpC,4BAAc;AACd,kBAAI,SAAS,cAAc,IAAI,GAAG;AAChC,sBAAM,eAAe,EAAC,GAAG,SAAQ;AACjC,qBAAK,MAAM,IAAI;AAAA,cACjB;AAAA,YACF;AACA,wBAAY,cAAc;AAC1B,sBAAU,aAAa,QAAQ,QAAQ,OAAO;AAAA,UAChD;AAAA,QACF,OAAO;AAEL,gBAAM,EAAC,KAAK,MAAK,IAAIA;AAAA,YACnB;AAAA,YACA,OAAO,QAAQ;AAAA,YACf,OAAO;AAAA,UACT;AACA,iBAAO,OAAO,qBAAqB;AACnC,oBAAU,KAAK,GAAG,GAAG,QAAQ,QAAQ,OAAO;AAAA,QAC9C;AAAA,MACF;AAEA;AAAA,IACF;AAAA,IACA;AACE,kBAAY,MAAM;AAAA,EACtB;AACF;AAEA,SAAS,UACP,UACA,QACA,QACA,SACA;AACA,SAAO,OAAO,UAAU,OAAO,KAAK,GAAG;AACvC,MAAI,SAAS;AACX,aAAS,QAAQ,IAAI,OAAO,OAAO,KAAK,KAAK,MAAM;AAAA,EACrD;AACF;AAEA,SAAS,IACP,KACA,MACA,QACA,SACuB;AACvB,QAAM,EAAC,KAAK,MAAK,IAAIA,cAAa,MAAM,KAAK,OAAO,WAAW;AAE/D,MAAI,OAAO;AACT,SAAK,GAAG,EAAE,cAAc;AACxB,WAAO;AAAA,EACT;AACA,QAAM,WAAW,iBAAiB,KAAK,QAAQ,SAAS,CAAC;AACzD,OAAK,OAAO,KAAK,GAAG,QAAQ;AAC5B,SAAO;AACT;AAEA,SAAS,wBACP,MACA,KACA,aACW;AACX,QAAM,EAAC,KAAK,MAAK,IAAIA,cAAa,MAAM,KAAK,WAAW;AACxD,SAAO,OAAO,qBAAqB;AACnC,QAAM,WAAW,KAAK,GAAG;AACzB,QAAM,KAAK,SAAS,cAAc;AAClC,MAAI,OAAO,GAAG;AACZ,SAAK,OAAO,KAAK,CAAC;AAAA,EACpB;AACA,WAAS,cAAc;AAEvB,SAAO;AACT;AAGA,SAASA,cACP,MACA,QACA,YACA;AACA,MAAI,MAAM;AACV,MAAI,OAAO,KAAK,SAAS;AACzB,SAAO,OAAO,MAAM;AAClB,UAAM,MAAO,MAAM,SAAU;AAC7B,UAAM,aAAa,WAAW,KAAK,GAAG,GAAU,MAAa;AAC7D,QAAI,aAAa,GAAG;AAClB,YAAM,MAAM;AAAA,IACd,WAAW,aAAa,GAAG;AACzB,aAAO,MAAM;AAAA,IACf,OAAO;AACL,aAAO,EAAC,KAAK,KAAK,OAAO,KAAI;AAAA,IAC/B;AAAA,EACF;AACA,SAAO,EAAC,KAAK,KAAK,OAAO,MAAK;AAChC;AAEA,SAAS,kBACP,aACA,cACe;AACf,QAAM,OAAO,YAAY,YAAY;AACrC,cAAY,IAAI;AAChB,SAAO;AACT;AAEA,SAAS,gBAAgBC,IAAoC;AAC3D,eAAcA,GAAyB,cAAc,CAAC;AACxD;AAEA,SAAS,iBAAiB,aAAoB,cAAiC;AAC7E,QAAM,IAAI,YAAY,YAAY;AAClC,eAAc,EAAyB,cAAc,CAAC;AACtD,SAAO;AACT;AAEA,SAAS,iBACP,KACA,QACA,SACA,IACW;AACX,MAAI,SAAS;AACX,WAAO,EAAC,GAAG,KAAK,CAAC,cAAc,GAAG,IAAI,CAAC,QAAQ,GAAG,OAAO,KAAK,MAAM,EAAC;AAAA,EACvE;AACA,SAAO,EAAC,GAAG,KAAK,CAAC,cAAc,GAAG,GAAE;AACtC;AACA,SAAS,OAAO,KAAU,QAAsB;AAE9C,MAAI,OAAO,WAAW,WAAW,GAAG;AAClC,WAAO,KAAK,UAAU,IAAI,OAAO,WAAW,CAAC,CAAC,CAAC;AAAA,EACjD;AACA,SAAO,KAAK,UAAU,OAAO,WAAW,IAAI,OAAK,IAAI,CAAC,CAAC,CAAC;AAC1D;;;AClaA,YAAY,YAAY;AAMxB,IAAM,OAA4B,CAAC;AAE5B,IAAM,aACV,uBAAQ,EACR,MAAM,CAAAC,OAAK;AACV,MAAI,QAAqB;AACvB,WAAc,UAAGA,EAAsB;AAAA,EACzC;AACA,QAAM,KAAK,YAAYA,IAAG,IAAI,IACnB,UAAGA,EAAC,IACJ,WAAI;AAAA,IACT,SAAS;AAAA,IACT,MAAM,KAAK,MAAM;AAAA,EACnB,CAAC;AACL,OAAK,SAAS;AACd,SAAO;AACT,CAAC;AAEI,IAAM,mBACV,uBAAQ,EACR,MAAM,CAAAA,OAAK;AACV,MAAI,QAAqB;AACvB,WAAc,UAAGA,EAAuB;AAAA,EAC1C;AACA,QAAM,KAAK,aAAaA,IAAG,IAAI,IACpB,UAAGA,EAAC,IACJ,WAAI;AAAA,IACT,SAAS;AAAA,IACT,MAAM,KAAK,MAAM;AAAA,EACnB,CAAC;AACL,OAAK,SAAS;AACd,SAAO;AACT,CAAC;;;AChCI,IAAM,gBAAkB,qBAAM,CAAG,sBAAO,CAAC,CAAC,EAAE,OAAS,qBAAQ,sBAAO,CAAC,CAAC;;;ACHtE,IAAM,cAAgB,qBAAM,YAAc,yBAAU,CAAC;AAErD,IAAM,YAAc,eAAe,WAAW;;;ACD9C,IAAM,yBAA2B,sBAAS,sBAAO,CAAC;AAGlD,IAAM,0BAA4B,sBAAO,sBAAsB;AAG/D,IAAM,oBAAsB,sBAAS,qBAAM,SAAS,CAAC;AAGrD,IAAM,qBAAuB,sBAAO,iBAAiB;AAGrD,IAAM,2BAA6B,sBAAO;AAAA,EAC/C,UAAY,qBAAQ,sBAAO,CAAC;AAAA,EAC5B,YAAc,sBAAS,qBAAM,SAAS,CAAC,EAAE,SAAS;AAAA,EAClD,gBAAkB,sBAAO;AAAA,EACzB,OAAS,sBAAO;AAAA,EAChB,KAAO,sBAAO;AAAA,EACd,kBAAoB,sBAAO,EAAE,SAAS;AAAA,EACtC,iBAAiB,wBAAwB,SAAS;AAAA,EAClD,YAAY,mBAAmB,SAAS;AAAA,EACxC,OAAS,sBAAS,qBAAQ,sBAAO,CAAC,CAAC,EAAE,SAAS;AAChD,CAAC;;;AClBD,SAAQ,eAAAC,oBAAkB;;;ACFnB,SAAS,QAAW,KAA6B;AAEtD,MAAI,IAAI,IAAI,UAAU,OAAK,MAAM,MAAS;AAC1C,MAAI,IAAI,GAAG;AACT,WAAO;AAAA,EACT;AACA,QAAMC,WAAe,IAAI,MAAM,GAAG,CAAC;AACnC,OAAK,KAAK,IAAI,IAAI,QAAQ,KAAK;AAC7B,UAAM,IAAI,IAAI,CAAC;AACf,QAAI,MAAM,QAAW;AACnB,MAAAA,SAAQ,KAAK,CAAC;AAAA,IAChB;AAAA,EACF;AACA,SAAOA;AACT;AAEO,SAAS,SAAY,MAAoB,MAA6B;AAC3E,SAAO,KAAK,WAAW,KAAK,UAAU,KAAK,MAAM,CAAC,GAAG,MAAM,MAAM,KAAK,CAAC,CAAC;AAC1E;;;ADRO,IAAM,iBAAmB,sBAAO;AAChC,IAAM,gBAAgB,OAAO;AAEpC,IAAM,wBAA0B;AAAA,EAC5B,qBAAM,CAAC,gBAAkB,aAAa,OAAO,MAAM,CAAC,CAAC;AACzD;AAEO,IAAM,iBAAmB,cAAc,qBAAqB;AAG5D,IAAM,kBAAoB;AAAA,EAC7B,sBAAO;AAAA,EACP,sBAAO;AAAA,EACP,uBAAQ;AAAA,EACR,oBAAK;AACT;AAEO,IAAM,oBAAsB,aAAa,KAAK,MAAM,MAAM,QAAQ;AAElE,IAAM,iBAAmB,aAAa,KAAK,KAAK,MAAM,IAAI;AAE1D,IAAM,gBAAkB;AAAA,EAC7B;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAEO,IAAM,cAAgB,aAAa,MAAM,QAAQ;AAEjD,IAAM,uBAAyB;AAAA,EACpC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAEA,IAAM,yBAAqD,eAAe;AAAA,EACxE,MAAQ,uBAAQ,SAAS;AAAA,EACzB,OAAS;AAAA,IACL,sBAAO;AAAA,IACP,sBAAO;AAAA,IACP,uBAAQ;AAAA,IACR,oBAAK;AAAA,IACL,cAAgB,qBAAQ,sBAAO,GAAK,sBAAO,GAAK,uBAAQ,CAAC,CAAC;AAAA,EAC9D;AACF,CAAC;AACD,IAAM,wBAAmD,eAAe;AAAA,EACtE,MAAQ,uBAAQ,QAAQ;AAAA,EACxB,MAAQ,sBAAO;AACjB,CAAC;AAmBD,IAAM,2BAA6B,eAAe;AAAA,EAChD,MAAQ,uBAAQ,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAMxB,QAAU,aAAa,YAAY,gBAAgB;AAAA,EACnD,OAAS,qBAAQ,sBAAO,GAAK,qBAAQ,sBAAO,CAAC,CAAC;AAChD,CAAC;AAED,IAAM,uBAAyB;AAAA,EAC7B;AAAA,EACA;AAAA,EACA;AACF;AAIO,IAAM,wBAAmD,eAAe;AAAA,EAC7E,MAAQ,uBAAQ,QAAQ;AAAA,EACxB,IAAI;AAAA,EACJ,MAAM;AAAA,EACN,OAAS,qBAAM,0BAA0B,sBAAsB;AACjE,CAAC;AAIM,IAAM,4CACT,aAAa,UAAU,YAAY;AAEhC,IAAM,oCACT,eAAe;AAAA,EACf,MAAQ,uBAAQ,oBAAoB;AAAA,EACpC,SAAW,oBAAK,MAAM,wBAAwB;AAAA,EAC9C,IAAI;AAAA,EACJ,MAAQ,uBAAQ,EAAE,SAAS;AAC7B,CAAC;AAEI,IAAM,kBAAuC;AAAA,EAClD;AAAA,EACE,oBAAK,MAAM,iBAAiB;AAAA,EAC5B,oBAAK,MAAM,iBAAiB;AAAA,EAC9B;AACF;AAEA,IAAM,oBAA2C,eAAe;AAAA,EAC9D,MAAQ,uBAAQ,KAAK;AAAA,EACrB,YAAc,cAAc,eAAe;AAC7C,CAAC;AAED,IAAM,oBAA2C,eAAe;AAAA,EAC9D,MAAQ,uBAAQ,IAAI;AAAA,EACpB,YAAc,cAAc,eAAe;AAC7C,CAAC;AAID,SAAS,gBAAgB,OAAuC;AAC9D,SAAO,MAAM,QAAQ,KAAK,KAAK,MAAM,UAAU,CAAC;AAChD,SAAO;AACT;AAEO,IAAM,oBAA2C;AAAA,EACpD,qBAAM,CAAG,sBAAO,CAAC,CAAC,EAAE,OAAS,qBAAQ,sBAAO,CAAC,CAAC;AAClD;AAEA,IAAM,oBAAsB,eAAe;AAAA,EACzC,aAAa;AAAA,EACb,YAAY;AACd,CAAC;AAQM,IAAM,uCAAyC,eAAe;AAAA,EACnE,aAAa;AAAA,EACb,QAAU,uBAAQ,EAAE,SAAS;AAAA,EAC7B,QAAU,aAAa,eAAe,UAAU,MAAM,EAAE,SAAS;AACnE,CAAC;AAEM,IAAM,2BACX,qCAAqC,OAAO;AAAA,EAC1C,UAAY,oBAAK,MAAM,SAAS;AAClC,CAAC;AAEI,IAAM,YAA2B,eAAe;AAAA,EACrD,QAAU,sBAAO,EAAE,SAAS;AAAA,EAC5B,OAAS,sBAAO;AAAA,EAChB,OAAS,sBAAO,EAAE,SAAS;AAAA,EAC3B,OAAO,gBAAgB,SAAS;AAAA,EAChC,SAAW,cAAc,wBAAwB,EAAE,SAAS;AAAA,EAC5D,OAAS,sBAAO,EAAE,SAAS;AAAA,EAC3B,SAAS,eAAe,SAAS;AAAA,EACjC,OACG,sBAAO;AAAA,IACN,KAAK;AAAA,IACL,WAAa,uBAAQ;AAAA,EACvB,CAAC,EACA,SAAS;AACd,CAAC;AAiJD,SAAS,aAAa,KAAU,WAAwC;AAEtE,QAAM,EAAC,WAAW,WAAU,IAAI;AAChC,QAAM,UAAU,CAAC,MAAc,WAAW,IAAI,OAAO,CAAC;AACtD,QAAM,MAAM,CAAC,OAAe,MAAmB;AAC7C,UAAM,YAAY,EAAE,IAAI,SAAO,WAAW,OAAO,GAAG,CAAC;AACrD,WAAO,gBAAgB,SAAS;AAAA,EAClC;AAEA,QAAM,QAAQ,IAAI,QAAQ,UAAU,MAAM,IAAI,KAAK,IAAI;AACvD,QAAM,cAAc;AAAA,IAClB,QAAQ,IAAI;AAAA,IACZ,OAAO,UAAU,IAAI,KAAK;AAAA,IAC1B,OAAO,IAAI;AAAA,IACX,OAAO,QAAQ,eAAe,OAAO,IAAI,OAAO,SAAS,IAAI;AAAA,IAC7D,SAAS,IAAI,UACT,UAAU;AAAA,MACR,IAAI,QAAQ;AAAA,QACV,QACG;AAAA,UACC,aAAa;AAAA,YACX,aAAa,IAAI,IAAI,OAAO,EAAE,YAAY,WAAW;AAAA,YACrD,YAAY,IAAI,EAAE,SAAS,OAAO,EAAE,YAAY,UAAU;AAAA,UAC5D;AAAA,UACA,QAAQ,EAAE;AAAA,UACV,UAAU,aAAa,EAAE,UAAU,SAAS;AAAA,UAC5C,QAAQ,EAAE;AAAA,QACZ;AAAA,MACJ;AAAA,IACF,IACA;AAAA,IACJ,OAAO,IAAI,QACP;AAAA,MACE,GAAG,IAAI;AAAA,MACP,KAAK,OAAO;AAAA,QACV,OAAO,QAAQ,IAAI,MAAM,GAAG,EAAE,IAAI,CAAC,CAAC,KAAK,GAAG,MAAM;AAAA,UAChD,QAAQ,GAAG;AAAA,UACX;AAAA,QACF,CAAC;AAAA,MACH;AAAA,IACF,IACA;AAAA,IACJ,OAAO,IAAI;AAAA,IACX,SAAS,IAAI,SAAS,IAAI,CAAC,CAAC,KAAK,GAAG,MAAM,CAAC,QAAQ,GAAG,GAAG,GAAG,CAAU;AAAA,EACxE;AAEA,SAAO;AACT;AAEA,SAAS,eACP,OACA,OACA,WACW;AAEX,QAAM,EAAC,WAAU,IAAI;AACrB,QAAM,YAAY,CAAC,MACjB,EAAE,SAAS,WAAW,IAAI,EAAC,GAAG,GAAG,MAAM,WAAW,OAAO,EAAE,IAAI,EAAC;AAClE,QAAM,MAAM,CAACC,QAAe,MAAmB;AAC7C,UAAM,YAAY,EAAE,IAAI,SAAO,WAAWA,QAAO,GAAG,CAAC;AACrD,WAAO,gBAAgB,SAAS;AAAA,EAClC;AAEA,MAAI,MAAM,SAAS,UAAU;AAC3B,WAAO,EAAC,GAAG,OAAO,MAAM,UAAU,MAAM,IAAI,EAAC;AAAA,EAC/C,WAAW,MAAM,SAAS,sBAAsB;AAC9C,UAAM,EAAC,aAAa,SAAQ,IAAI,MAAM;AACtC,WAAO;AAAA,MACL,GAAG;AAAA,MACH,SAAS;AAAA,QACP,GAAG,MAAM;AAAA,QACT,aAAa;AAAA,UACX,aAAa,IAAI,OAAO,YAAY,WAAW;AAAA,UAC/C,YAAY,IAAI,SAAS,OAAO,YAAY,UAAU;AAAA,QACxD;AAAA,QACA,UAAU,aAAa,UAAU,SAAS;AAAA,MAC5C;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AAAA,IACL,MAAM,MAAM;AAAA,IACZ,YAAY,UAAU;AAAA,MACpB,MAAM,WAAW,IAAI,OAAK,eAAe,GAAG,OAAO,SAAS,CAAC;AAAA,IAC/D;AAAA,EACF;AACF;AAEA,IAAM,iBAAiB,oBAAI,QAA4B;AAEvD,IAAM,sBAAoC;AAAA,EACxC,WAAW,OAAK;AAAA,EAChB,YAAY,CAAC,GAAG,MAAM;AAAA,EACtB,SAAS;AAAA,EACT,OAAO;AAAA,EACP,YAAY,OAAK,EAAE,KAAK,YAAY;AACtC;AAEO,SAAS,aAAa,KAAyB;AACpD,MAAI,aAAa,eAAe,IAAI,GAAG;AACvC,MAAI,CAAC,YAAY;AACf,iBAAa,aAAa,KAAK,mBAAmB;AAClD,mBAAe,IAAI,KAAK,UAAU;AAAA,EACpC;AACA,SAAO;AACT;AAEO,SAAS,OAAO,KAAU,QAAoB;AACnD,SAAO,aAAa,KAAK;AAAA,IACvB,WAAW,WAAS,OAAO,UAAU,KAAK;AAAA,IAC1C,YAAY,CAAC,OAAO,QAAQ,OAAO,WAAW,OAAO,GAAG;AAAA,IACxD,SAAS,OAAK;AAAA,IACd,OAAO,OAAK;AAAA,IACZ,YAAY,OAAK;AAAA,EACnB,CAAC;AACH;AAEO,SAAS,aACd,MACA,OACA,QACA;AACA,SAAO,eAAe,MAAM,OAAO;AAAA,IACjC,WAAW,CAAAA,WAAS,OAAO,UAAUA,MAAK;AAAA,IAC1C,YAAY,CAACA,QAAO,QAAQ,OAAO,WAAWA,QAAO,GAAG;AAAA,IACxD,SAAS,OAAK;AAAA,IACd,OAAO,OAAK;AAAA,IACZ,YAAY,OAAK;AAAA,EACnB,CAAC;AACH;AAEA,SAAS,cACP,SAC+B;AAC/B,SAAO,QAAQ,KAAK,UAAU;AAChC;AAEA,SAAS,aAAa,GAAc,GAAsB;AACxD,MAAI,EAAE,SAAS,UAAU;AACvB,QAAI,EAAE,SAAS,UAAU;AACvB,aAAO;AAAA,IACT;AAEA,WACE,qBAAqB,EAAE,MAAM,EAAE,IAAI,KACnC,qBAAqB,EAAE,IAAI,EAAE,EAAE,KAC/B,qBAAqB,EAAE,OAAO,EAAE,KAAK;AAAA,EAEzC;AAEA,MAAI,EAAE,SAAS,UAAU;AACvB,WAAO;AAAA,EACT;AAEA,MAAI,EAAE,SAAS,sBAAsB;AACnC,QAAI,EAAE,SAAS,sBAAsB;AACnC,aAAO;AAAA,IACT;AACA,WAAO,WAAW,EAAE,SAAS,EAAE,OAAO,KAAK,qBAAqB,EAAE,IAAI,EAAE,EAAE;AAAA,EAC5E;AACA,MAAI,EAAE,SAAS,sBAAsB;AACnC,WAAO;AAAA,EACT;AAEA,QAAM,MAAM,qBAAqB,EAAE,MAAM,EAAE,IAAI;AAC/C,MAAI,QAAQ,GAAG;AACb,WAAO;AAAA,EACT;AACA,WACM,IAAI,GAAG,IAAI,GACf,IAAI,EAAE,WAAW,UAAU,IAAI,EAAE,WAAW,QAC5C,KAAK,KACL;AACA,UAAMC,OAAM,aAAa,EAAE,WAAW,CAAC,GAAG,EAAE,WAAW,CAAC,CAAC;AACzD,QAAIA,SAAQ,GAAG;AACb,aAAOA;AAAA,IACT;AAAA,EACF;AAEA,SAAO,EAAE,WAAW,SAAS,EAAE,WAAW;AAC5C;AAEA,SAAS,qBAAqB,GAAkB,GAA0B;AACxE,MAAI,EAAE,SAAS,EAAE,MAAM;AACrB,WAAOC,aAAY,EAAE,MAAM,EAAE,IAAI;AAAA,EACnC;AACA,UAAQ,EAAE,MAAM;AAAA,IACd,KAAK;AACH,aAAO,EAAE,SAAS,SAAS;AAC3B,aAAOA,aAAY,OAAO,EAAE,KAAK,GAAG,OAAO,EAAE,KAAK,CAAC;AAAA,IACrD,KAAK;AACH,aAAO,EAAE,SAAS,QAAQ;AAC1B,aAAOA,aAAY,EAAE,MAAM,EAAE,IAAI;AAAA,IACnC,KAAK;AACH,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,EACJ;AACF;AAEA,SAAS,WAAW,GAAuB,GAA+B;AACxE,SAAOA,aAAY,KAAK,EAAE,SAAS,KAAK,GAAG,KAAK,EAAE,SAAS,KAAK,CAAC;AACnE;AAaA,SAAS,UAAU,MAAwC;AACzD,MAAI,KAAK,SAAS,YAAY,KAAK,SAAS,sBAAsB;AAChE,WAAO;AAAA,EACT;AACA,QAAM,aAAa;AAAA,IACjB,KAAK,WAAW;AAAA,MAAQ,OACtB,EAAE,SAAS,KAAK,OAAO,EAAE,WAAW,IAAI,CAAAC,OAAK,UAAUA,EAAC,CAAC,IAAI,UAAU,CAAC;AAAA,IAC1E;AAAA,EACF;AAEA,UAAQ,WAAW,QAAQ;AAAA,IACzB,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO,WAAW,CAAC;AAAA,IACrB;AACE,aAAO;AAAA,QACL,MAAM,KAAK;AAAA,QACX;AAAA,MACF;AAAA,EACJ;AACF;AAEA,SAAS,qBAAqB,GAAkB,GAA0B;AACxE,MAAI,MAAM,QAAQ,MAAM,MAAM;AAC5B,WAAOD,aAAY,GAAG,CAAC;AAAA,EACzB;AACA,MAAI,MAAM,MAAM;AACd,WAAO;AAAA,EACT;AACA,MAAI,MAAM,MAAM;AACd,WAAO;AAAA,EACT;AACA,SAAO;AACT;;;AEhkBA,IAAM,sBAAwB,sBAAO;AAAA,EACnC,gCAAgC;AAAA,EAChC,uBAAuB;AACzB,CAAC;AAID,IAAM,wBAA0B,sBAAO;AAAA,EACrC,UAAY,sBAAO;AAAA,EACnB,SAAW,sBAAO;AAAA;AAAA;AAAA,EAGlB,KAAK,UAAU,SAAS;AAAA;AAAA,EAExB,MAAQ,sBAAO,EAAE,SAAS;AAAA;AAAA,EAE1B,MAAQ,cAAc,UAAU,EAAE,SAAS;AAAA,EAC3C,KAAO,uBAAQ;AAAA,EACf,SAAW,uBAAQ;AAAA,EACnB,KAAO,sBAAO;AAAA,EACd,eAAiB,sBAAO,EAAE,SAAS;AAAA,EACnC,UAAY,sBAAO;AAAA,EACnB,SAAS,oBAAoB,SAAS,EAAE,SAAS;AACnD,CAAC;AAID,IAAM,wBAA0B,sBAAO;AAAA,EACrC,IAAM,sBAAO;AACf,CAAC;AAEM,IAAM,2BAA2B,sBAAsB,OAAO;AAAA,EACnE,IAAM,uBAAQ,SAAS;AAAA,EACvB,OAAS,qBAAM,qBAAqB;AACtC,CAAC;AAIM,IAAM,2BAA2B,sBAAsB,OAAO;AAAA,EACnE,IAAM,uBAAQ,SAAS;AAAA,EACvB,OAAO;AACT,CAAC;AAIM,IAAM,2BAA2B,sBAAsB,OAAO;AAAA,EACnE,IAAM,uBAAQ,SAAS;AAAA,EACvB,OAAS,sBAAO;AAClB,CAAC;AAEM,IAAM,iCAAiC,sBAAsB,OAAO;AAAA,EACzE,IAAM,uBAAQ,eAAe;AAAA,EAC7B,OAAS,uBAAQ;AACnB,CAAC;AAMM,IAAM,gCAAgC,sBAAsB,OAAO;AAAA,EACxE,IAAM,uBAAQ,eAAe;AAAA,EAC7B,OAAO;AACT,CAAC;AAMM,IAAM,wBAA0B;AAAA,EACrC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAEO,IAAM,2BAA6B,qBAAM;AAAA,EAC5C,uBAAQ,SAAS;AAAA,EACnB;AACF,CAAC;;;ACrFM,SAAS,yBAAyBE,QAAmB;AAC1D,MAAIA,WAAU,MAAM;AAClB,UAAM,IAAI,UAAU,sBAAsB;AAAA,EAC5C;AAGA,WAAS,IAAI,GAAG,IAAIA,OAAM,QAAQ,KAAK;AACrC,IAAAA,OAAM,CAAC,IAAI,KAAK,MAAM,KAAK,OAAO,IAAI,GAAG;AAAA,EAC3C;AAEA,SAAOA;AACT;;;ACHO,SAAS,OAAO,OAAO,IAAY;AAExC,QAAM,cAAc,yBAAyB,IAAI,WAAW,IAAI,CAAC;AAEjE,SAAO,YAAY,OAAO,CAAC,IAAI,SAAS;AAMtC,YAAQ;AACR,QAAI,OAAO,IAAI;AAEb,YAAM,KAAK,SAAS,EAAE;AAAA,IACxB,WAAW,OAAO,IAAI;AAEpB,aAAO,OAAO,IAAI,SAAS,EAAE,EAAE,YAAY;AAAA,IAC7C,WAAW,OAAO,IAAI;AACpB,YAAM;AAAA,IACR,OAAO;AACL,YAAM;AAAA,IACR;AACA,WAAO;AAAA,EACT,GAAG,EAAE;AACP;;;AChCA,SAAQ,gBAAe;AAGhB,IAAM,MAAM,CAAC,MAAc,KAAK,GAAG,CAAC;AACpC,IAAM,OAAO,CAAC,MAAc,KAAK,GAAG,CAAC;AAM5C,SAAS,KAAK,KAAa,OAAuB;AAChD,MAAIC,QAAO;AACX,WAAS,IAAI,GAAG,IAAI,OAAO,KAAK;AAC9B,IAAAA,SAAQA,SAAQ,OAAO,OAAO,SAAS,KAAK,CAAC,CAAC;AAAA,EAChD;AACA,SAAOA;AACT;;;ACdO,IAAM,mBAAqB;AAAA,EAC9B,qBAAM,CAAG,sBAAO,CAAC,CAAC,EAAE,OAAS,qBAAQ,sBAAO,CAAC,CAAC;AAClD;AAIO,IAAM,wBAA0B;AAAA,EACnC,sBAAO;AAAA,EACP,sBAAO;AAAA,EACP,uBAAQ;AACZ;AAIO,IAAM,8BAAgC;AAAA,EAC3C;AACF;;;ACXO,IAAM,6BAA6B;AACnC,IAAM,yBAAyB;AAC/B,IAAM,sBAAsB;AAC5B,IAAM,uBAAuB;AAE7B,SAAS,oBAAoB,UAAkBC,OAAsB;AAC1E,SAAO,6BAA6B,WAAW,MAAMA;AACvD;AAEO,SAAS,8BAA8B,UAA0B;AACtE,SAAO,6BAA6B,WAAW;AACjD;AAEO,SAAS,gBAAgBA,OAAsB;AACpD,SAAO,yBAAyBA;AAClC;AAEO,SAAS,sBAAsB,KAAyB;AAC7D,SAAO,uBAAuB,IAAI,WAAW,MAAM,IAAI;AACzD;AAEO,SAAS,mBACd,WACA,YACA,OACQ;AACR,MAAI,WAAW,WAAW,GAAG;AAC3B,WACE,sBACA,YACA,MACE,MAAM,MAAM,WAAW,CAAC,CAAC,GAAG,qBAAqB;AAAA,EAEvD;AAEA,QAAM,SAAS,WAAW,IAAI,OAAO,MAAM,MAAM,CAAC,GAAG,qBAAqB,CAAC;AAC3E,QAAM,MAAM,KAAK,UAAU,MAAM;AAEjC,QAAM,YAAY,KAAK,GAAG;AAC1B,SAAO,sBAAsB,YAAY,MAAM;AACjD;AAEO,SAAS,kBAAkB,KAAqB;AACrD,QAAM,QAAQ,IAAI,QAAQ,KAAK,oBAAoB,MAAM;AACzD,SAAO,IAAI,MAAM,oBAAoB,QAAQ,KAAK;AACpD;;;AChDO,IAAM,cAAN,MAAkB;AAAA,EACd;AAAA,EACA;AAAA,EAET,YACE,UACA,eACA;AACA,SAAK,YAAY;AACjB,SAAK,KAAK;AAAA,EACZ;AAAA,EAEA,MAAM,UAA6B;AACjC,YAAQ,MAAM,KAAK,UAAU,MAAM;AAAA,MACjC,KAAK;AAAA,MACL,KAAK;AAAA,IACP;AAAA,EACF;AAAA,EAEA,MAAM,qBAAwC;AAC5C,YAAQ,MAAM,KAAK,UAAU,MAAM;AAAA,MACjC,KAAK;AAAA,MACL,KAAK;AAAA,IACP;AAAA,EACF;AAAA,EAEA,MAAM,UAA4B;AAChC,YAAQ,MAAM,KAAK,UAAU,MAAM,mBAAmB,KAAK,SAAS;AAAA,EACtE;AACF;;;AC3BO,IAAM,SAAN,MAAa;AAAA,EACT;AAAA,EACA;AAAA,EACA;AAAA,EAET,YACE,UACA,UACA,eACA;AACA,SAAK,YAAY;AACjB,SAAK,KAAK;AAEV,SAAK,cAAc,IAAI,YAAY,KAAK,WAAW,aAAa;AAAA,EAClE;AAAA,EAEA,MAAM,UAA4B;AAChC,YAAQ,MAAM,KAAK,UAAU,MAAM,cAAc,KAAK,WAAW,KAAK,EAAE;AAAA,EAC1E;AAAA,EAEA,MAAM,MAA+C;AACnD,YAAQ,MAAM,KAAK,UAAU,MAAM,UAAU,KAAK,WAAW,KAAK,EAAE;AAAA,EACtE;AAAA,EAEA,MAAM,KAAK,WAAmC;AAC5C,YAAQ,MAAM,KAAK,UAAU,MAAM;AAAA,MACjC,KAAK;AAAA,MACL,KAAK;AAAA,MACL;AAAA,IACF;AAAA,EACF;AACF;;;ACjCO,IAAM,WAAN,MAAe;AAAA,EACpB;AAAA,EACA;AAAA,EAEA,YAAY,MAAc,QAAgB;AACxC,SAAK,OAAO;AACZ,SAAK,SAAS;AAAA,EAChB;AAAA,EAEA,IAAI,GAAmB;AACrB,QAAI,EAAE,SAAS,GAAG;AAChB,YAAM,IAAI,MAAM,0CAA0C;AAAA,IAC5D;AACA,QAAI,KAAK,WAAW,GAAG;AACrB,WAAK,UAAU,EAAE;AACjB,WAAK,QAAS,EAAE,UAAU,EAAE,OAAO,KAAK,QAAS,KAAK;AAAA,IACxD,OAAO;AACL,WAAK,SAAS,EAAE;AAChB,WAAK,OAAO,EAAE;AAAA,IAChB;AAAA,EACF;AACF;AAKO,SAAS,iBAAiB,WAA+B;AAC9D,YAAU,KAAK,CAAC,GAAG,MAAM,EAAE,OAAO,EAAE,IAAI;AAC1C;;;ACjBO,IAAM,UAAN,MAAM,SAAQ;AAAA,EACV;AAAA,EAET;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAEA,YAAY,cAAsB,KAAM;AACtC,SAAK,cAAc;AACnB,SAAK,gBAAgB,cAAc,GAAG,KAAK,WAAW;AACtD,SAAK,kBAAkB,gBAAgB,GAAG,KAAK,WAAW;AAC1D,SAAK,MAAM;AAAA,EACb;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,OAAO,SAAS,MAAsC;AACpD,UAAM,SAAS,IAAI,SAAQ,KAAK,CAAC,CAAC;AAClC,QAAI,KAAK,SAAS,MAAM,GAAG;AACzB,YAAM,IAAI,MAAM,yBAAyB;AAAA,IAC3C;AACA,aAAS,IAAI,GAAG,IAAI,KAAK,QAAQ,KAAK,GAAG;AACvC,aAAO,IAAI,KAAK,CAAC,GAAG,KAAK,IAAI,CAAC,CAAC;AAAA,IACjC;AACA,WAAO;AAAA,EACT;AAAA,EAEA,QAAc;AACZ,SAAK,aAAa,CAAC;AACnB,SAAK,eAAe,CAAC;AACrB,SAAK,cAAc,CAAC;AACpB,SAAK,mBAAmB;AACxB,SAAK,qBAAqB;AAC1B,SAAK,OAAO,OAAO;AACnB,SAAK,OAAO,CAAC,OAAO;AAAA,EACtB;AAAA,EAEA,IAAI,MAAc,SAAiB,GAAG;AACpC,SAAK,YAAY,IAAI,SAAS,MAAM,MAAM,CAAC;AAAA,EAC7C;AAAA;AAAA,EAGA,gBAAgB,cAA4B;AAC1C,eAAW,KAAK,cAAc;AAC5B,WAAK,YAAY,CAAC;AAAA,IACpB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,YAAY,GAAmB;AAC7B,QACE,OAAO,MAAM,EAAE,IAAI,KACnB,EAAE,UAAU,KACZ,OAAO,MAAM,EAAE,MAAM,KACrB,CAAC,OAAO,SAAS,EAAE,MAAM,GACzB;AACA;AAAA,IACF;AAEA,SAAK,aAAa,KAAK,IAAI,SAAS,EAAE,MAAM,EAAE,MAAM,CAAC;AACrD,SAAK,sBAAsB,EAAE;AAE7B,QACE,KAAK,WAAW,SAAS,KAAK,iBAC9B,KAAK,aAAa,SAAS,KAAK,iBAChC;AACA,WAAK,SAAS;AAAA,IAChB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,MAAM,IAAa;AACjB,OAAG,SAAS;AACZ,SAAK,gBAAgB,GAAG,UAAU;AAAA,EACpC;AAAA,EAEA,WAAW;AACT,QACE,KAAK,aAAa,SAAS,KAC3B,KAAK,WAAW,SAAS,KAAK,eAC9B;AAEA,WAAK,aAAa,KAAK,GAAG,KAAK,UAAU;AACzC,uBAAiB,KAAK,YAAY;AAGlC,WAAK,WAAW,SAAS;AACzB,WAAK,WAAW,KAAK,KAAK,aAAa,CAAC,CAAC;AAEzC,WAAK,oBAAoB,KAAK;AAC9B,WAAK,qBAAqB;AAC1B,UAAI,QAAQ,KAAK,aAAa,CAAC,EAAE;AACjC,UAAI,QAAQ,KAAK,mBAAmB,KAAK,aAAa,CAAC;AACvD,eAAS,IAAI,GAAG,IAAI,KAAK,aAAa,QAAQ,KAAK;AACjD,cAAM,WAAW,KAAK,aAAa,CAAC;AACpC,cAAM,YAAY,QAAQ,SAAS;AACnC,YAAI,aAAa,OAAO;AACtB,kBAAQ;AACR,eAAK,WAAW,KAAK,WAAW,SAAS,CAAC,EAAE,IAAI,QAAQ;AAAA,QAC1D,OAAO;AACL,gBAAM,KAAK,KAAK,oBAAoB,QAAQ,KAAK,gBAAgB;AACjE,kBAAQ,KAAK,mBAAmB,KAAK,aAAa,KAAK,CAAC;AACxD,mBAAS,SAAS;AAClB,eAAK,WAAW,KAAK,QAAQ;AAAA,QAC/B;AAAA,MACF;AACA,WAAK,OAAO,KAAK,IAAI,KAAK,MAAM,KAAK,WAAW,CAAC,EAAE,IAAI;AACvD,WAAK,OAAO,KAAK;AAAA,QACf,KAAK;AAAA,QACL,KAAK,WAAW,KAAK,WAAW,SAAS,CAAC,EAAE;AAAA,MAC9C;AACA,WAAK,aAAa,SAAS;AAAA,IAC7B;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,UAAU,KAAmB,CAAC,GAAiB;AAC7C,SAAK,SAAS;AACd,WAAO,GAAG,OAAO,KAAK,UAAU;AAAA,EAClC;AAAA,EAEA,QAAgB;AACd,SAAK,SAAS;AAId,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,SAAsB;AACpB,SAAK,SAAS;AACd,UAAM,OAAoB,CAAC,KAAK,WAAW;AAC3C,eAAW,YAAY,KAAK,YAAY;AACtC,WAAK,KAAK,SAAS,MAAM,SAAS,MAAM;AAAA,IAC1C;AACA,WAAO;AAAA,EACT;AAAA,EAEA,oBAAoB;AAIlB,QACE,KAAK,YAAY,SAAS,KAC1B,KAAK,YAAY,KAAK,YAAY,SAAS,CAAC,MAAM,KAAK,kBACvD;AACA;AAAA,IACF;AACA,UAAM,IAAI,KAAK,WAAW,SAAS;AACnC,QAAI,KAAK,YAAY,SAAS,GAAG;AAC/B,WAAK,YAAY,SAAS;AAAA,IAC5B;AAEA,QAAI,OAAO;AACX,aAAS,IAAI,GAAG,IAAI,KAAK,WAAW,QAAQ,KAAK;AAC/C,YAAM,WAAW,KAAK,WAAW,CAAC;AAClC,YAAM,MAAM,SAAS;AACrB,WAAK,YAAY,CAAC,IAAI,OAAO,MAAM;AACnC,cAAQ;AAAA,IACV;AACA,SAAK,YAAY,KAAK,WAAW,MAAM,IAAI;AAAA,EAC7C;AAAA;AAAA;AAAA;AAAA,EAKA,SAAS,GAAmB;AAC1B,SAAK,SAAS;AACd,SAAK,kBAAkB;AACvB,QAAI,IAAI,KAAK,IAAI,KAAK,KAAK,WAAW,WAAW,GAAG;AAClD,aAAO;AAAA,IACT;AACA,QAAI,KAAK,WAAW,WAAW,GAAG;AAChC,aAAO,KAAK,WAAW,CAAC,EAAE;AAAA,IAC5B;AACA,UAAM,QAAQ,IAAI,KAAK;AACvB,QAAI,SAAS,KAAK,WAAW,CAAC,EAAE,SAAS,GAAG;AAC1C,aACE,KAAK,OACH,IAAI,QAAS,KAAK,WAAW,CAAC,EAAE,UAC/B,KAAK,WAAW,CAAC,EAAE,OAAO,KAAK;AAAA,IAEtC;AAEA,UAAM,QAAQ;AAAA,MACZ,KAAK,YAAY;AAAA,MACjB,CAAC,MAAc,CAAC,KAAK,YAAY,CAAC,IAAI;AAAA,IACxC;AAEA,QAAI,QAAQ,MAAM,KAAK,YAAY,QAAQ;AACzC,YAAMC,MAAK,QAAQ,KAAK,YAAY,QAAQ,CAAC;AAC7C,YAAMC,MAAK,KAAK,YAAY,KAAK,IAAI;AACrC,aAAO;AAAA,QACL,KAAK,WAAW,QAAQ,CAAC,EAAE;AAAA,QAC3BA;AAAA,QACA,KAAK,WAAW,KAAK,EAAE;AAAA,QACvBD;AAAA,MACF;AAAA,IACF;AAEA,UAAM,KACJ,QAAQ,KAAK,mBAAmB,KAAK,WAAW,QAAQ,CAAC,EAAE,SAAS;AACtE,UAAM,KAAK,KAAK,WAAW,QAAQ,CAAC,EAAE,SAAS,IAAI;AACnD,WAAO;AAAA,MACL,KAAK,WAAW,KAAK,WAAW,SAAS,CAAC,EAAE;AAAA,MAC5C;AAAA,MACA,KAAK;AAAA,MACL;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,IAAI,GAAmB;AACrB,SAAK,SAAS;AACd,SAAK,kBAAkB;AACvB,YAAQ,KAAK,WAAW,QAAQ;AAAA,MAC9B,KAAK;AACH,eAAO;AAAA,MACT,KAAK,GAAG;AACN,cAAM,QAAQ,KAAK,OAAO,KAAK;AAC/B,YAAI,KAAK,KAAK,MAAM;AAClB,iBAAO;AAAA,QACT;AACA,YAAI,KAAK,KAAK,MAAM;AAClB,iBAAO;AAAA,QACT;AACA,YAAI,IAAI,KAAK,QAAQ,OAAO;AAE1B,iBAAO;AAAA,QACT;AACA,gBAAQ,IAAI,KAAK,QAAQ;AAAA,MAC3B;AAAA,IACF;AAEA,QAAI,KAAK,KAAK,MAAM;AAClB,aAAO;AAAA,IACT;AACA,QAAI,KAAK,KAAK,MAAM;AAClB,aAAO;AAAA,IACT;AACA,UAAM,KAAK,KAAK,WAAW,CAAC,EAAE;AAE9B,QAAI,KAAK,IAAI;AACX,UAAI,KAAK,KAAK,OAAO,GAAG;AACtB,gBACK,IAAI,KAAK,SAAS,KAAK,KAAK,QAAS,KAAK,WAAW,CAAC,EAAE,SAC3D,KAAK,mBACL;AAAA,MAEJ;AACA,aAAO;AAAA,IACT;AAEA,UAAM,KAAK,KAAK,WAAW,KAAK,WAAW,SAAS,CAAC,EAAE;AACvD,QAAI,KAAK,IAAI;AACX,UAAI,KAAK,OAAO,KAAK,GAAG;AACtB,eACE,KACG,KAAK,OAAO,MAAM,KAAK,OAAO,MAC/B,KAAK,WAAW,KAAK,WAAW,SAAS,CAAC,EAAE,SAC5C,KAAK,mBACL;AAAA,MAEN;AACA,aAAO;AAAA,IACT;AAEA,UAAM,QAAQ;AAAA,MACZ,KAAK,WAAW;AAAA;AAAA;AAAA;AAAA,MAIhB,OAAK,IAAI,KAAK,WAAW,CAAC,EAAE,QAAQ;AAAA,IACtC;AAEA,UAAM,KAAK,IAAI,KAAK,WAAW,QAAQ,CAAC,EAAE;AAC1C,UAAM,KAAK,KAAK,WAAW,KAAK,EAAE,OAAO;AACzC,WACE;AAAA,MACE,KAAK,YAAY,QAAQ,CAAC;AAAA,MAC1B;AAAA,MACA,KAAK,YAAY,KAAK;AAAA,MACtB;AAAA,IACF,IAAI,KAAK;AAAA,EAEb;AAAA,EAEA,aAAa,GAAmB;AAC9B,YACG,KAAK;AAAA,MACH,KAAK,IAAI,GAAG,KAAK,WAAW,IAAI,KAAK,KAAM,KAAK,cAC/C,KAAK,KAAK;AAAA,IACd,IACE,KACF;AAAA,EAEJ;AAAA,EAEA,oBAAoB,GAAmB;AACrC,WAAQ,KAAK,eAAe,KAAK,KAAK,IAAI,IAAI,CAAC,IAAI,KAAK,KAAK,KAAM,KAAK;AAAA,EAC1E;AACF;AA0BA,SAAS,gBACP,IACA,IACA,IACA,IACQ;AACR,MAAI,MAAM,IAAI;AACZ,WAAO,sBAAsB,IAAI,IAAI,IAAI,EAAE;AAAA,EAC7C;AACA,SAAO,sBAAsB,IAAI,IAAI,IAAI,EAAE;AAC7C;AAEA,SAAS,sBACP,IACA,IACA,IACA,IACQ;AACR,QAAM,KAAK,KAAK,KAAK,KAAK,OAAO,KAAK;AACtC,SAAO,KAAK,IAAI,IAAI,KAAK,IAAI,GAAG,EAAE,CAAC;AACrC;AAEA,SAAS,cAAc,MAAc,aAA6B;AAChE,MAAI,SAAS,GAAG;AACd,WAAO,KAAK,KAAK,WAAW,IAAI;AAAA,EAClC;AACA,SAAO;AACT;AAEA,SAAS,gBAAgB,MAAc,aAA6B;AAClE,MAAI,SAAS,GAAG;AACd,WAAO,KAAK,KAAK,WAAW,IAAI;AAAA,EAClC;AACA,SAAO;AACT;;;ACtYO,IAAM,YAAN,MAAgB;AAAA,EACZ;AAAA,EACA;AAAA,EACA;AAAA,EAET,YACE,KACA,UACA,WACA;AACA,SAAK,YAAY;AAAA,MACf,iBAAiB,SAAS,gBAAgB,KAAK,QAAQ;AAAA,MACvD,QAAQ,SAAS,OAAO,KAAK,QAAQ;AAAA,MACrC,IAAI,UAAU;AACZ,eAAO,SAAS;AAAA,MAClB;AAAA,MACA;AAAA,MACA;AAAA,MACA,MAAM,OAAO,8BAAqB;AAAA,IACpC;AAEA,SAAK,SAAS,IAAI,OAAO,KAAK,WAAW,IAAI,UAAU,IAAI,aAAa;AACxE,SAAK,cAAc,KAAK,OAAO;AAAA,EACjC;AAAA,EAEA,MAAM,UAA4B;AAChC,YAAQ,MAAM,KAAK,UAAU,MAAM,iBAAiB,KAAK,SAAS;AAAA,EACpE;AAAA,EAEA,MAAM,UAA6B;AACjC,YAAQ,MAAM,KAAK,UAAU,MAAM,iBAAiB,KAAK,SAAS;AAAA,EACpE;AAAA,EAEA,MAAM,qBAAwC;AAC5C,YAAQ,MAAM,KAAK,UAAU,MAAM;AAAA,MACjC,KAAK;AAAA,IACP;AAAA,EACF;AAAA,EAEA,MAAM,gBAAiC;AACrC,YAAQ,MAAM,KAAK,UAAU,MAAM,cAAc,KAAK,SAAS;AAAA,EACjE;AACF;;;ACmDO,SAAS,SAAS,GAA6C;AACpE,SAAO,EAAE,WAAW;AACtB;AAEO,SAAS,SACd,GACwC;AACxC,SAAO,EAAE,WAAW;AACtB;;;AClHO,SAAS,gBAAgB;AAAC;AAC1B,IAAM,cAAc,OAAO,OAAO,CAAC,CAAC;AACpC,IAAM,aAAa,OAAO,OAAO,CAAC,CAAC;AACnC,SAAS,SAAY,GAAS;AACnC,SAAO;AACT;;;ACiCO,IAAM,iBAAiB,OAAO,UAAU;;;ACnBxC,IAAM,iBAAiB,MAAQ,KAAK;AAGpC,IAAM,yBAAyB;AAE/B,IAAM,UAAe;AACrB,IAAM,aAAa,MAAQ,KAAK;AAEvC,IAAM,aAAa;AAAA,EACjB,GAAG;AAAA,EACH,GAAG,KAAK;AAAA,EACR,GAAG,KAAK,KAAK;AAAA,EACb,GAAG,KAAK,KAAK,KAAK;AAAA,EAClB,GAAG,MAAM,KAAK,KAAK,KAAK;AAC1B;AAEO,SAAS,SAAS,KAAkB;AACzC,MAAI,OAAO,QAAQ,UAAU;AAC3B,WAAO,OAAO,MAAM,GAAG,IAAI,IAAI,CAAC,OAAO,SAAS,GAAG,KAAK,MAAM,IAAI,KAAK;AAAA,EACzE;AACA,MAAI,QAAQ,QAAQ;AAClB,WAAO;AAAA,EACT;AACA,MAAI,QAAQ,WAAW;AACrB,WAAO;AAAA,EACT;AACA,QAAM,QAAQ,WAAW,IAAI,IAAI,SAAS,CAAC,CAAa;AACxD,SAAO,OAAO,IAAI,MAAM,GAAG,EAAE,CAAC,IAAI;AACpC;AAEO,SAAS,WAAW,GAAQ,GAAgB;AACjD,QAAM,KAAK,SAAS,CAAC;AACrB,QAAM,KAAK,SAAS,CAAC;AACrB,MAAI,OAAO,MAAM,OAAO,IAAI;AAC1B,WAAO;AAAA,EACT;AACA,MAAI,OAAO,MAAM,OAAO,IAAI;AAC1B,WAAO;AAAA,EACT;AACA,SAAO,KAAK;AACd;AAEO,SAAS,aAAa,KAAe;AAC1C,MAAI,OAAO,QAAQ,UAAU;AAC3B,WAAO;AAAA,EACT;AAEA,MAAI,MAAM,GAAG;AACX,WAAO;AAAA,EACT;AAEA,MAAI,QAAQ,GAAG;AACb,WAAO;AAAA,EACT;AAEA,MAAI,WAAW,IAAI,SAAS;AAC5B,QAAM,iBAAiB,SAAS;AAChC,aAAW,QAAQ,CAAC,KAAK,KAAK,KAAK,KAAK,GAAG,GAAY;AACrD,UAAM,QAAQ,WAAW,IAAI;AAC7B,UAAM,QAAQ,MAAM;AACpB,UAAM,YAAY,GAAG,KAAK,GAAG,IAAI;AACjC,QAAI,UAAU,SAAS,SAAS,QAAQ;AACtC,iBAAW;AAAA,IACb;AAAA,EACF;AAEA,SAAQ,SAAS,SAAS,iBAAiB,WAAW;AACxD;AAEO,SAAS,SAAS,KAAU,IAAuC;AACxE,QAAM,YAAY,SAAS,GAAG;AAC9B,MAAI,cAAc,MAAM,YAAY,KAAK,KAAK,KAAM;AAElD,QAAI,OAAO,QAAQ,GAAG,8BAA8B,OAAO,EAAE;AAC7D,WAAO,SAAS,OAAO;AAAA,EACzB;AACA,SAAO;AACT;;;AC9FA,SAAQ,gBAAe;;;ACCvB,IAAM,YAAY,oBAAI,QAAqB;AAEpC,SAAS,UAAU,KAAkB;AAC1C,QAAM,aAAa,aAAa,GAAG;AACnC,QAAM,SAAS,UAAU,IAAI,UAAU;AACvC,MAAI,QAAQ;AACV,WAAO;AAAA,EACT;AACA,QAAME,QAAO,IAAI,KAAK,UAAU,UAAU,CAAC,EAAE,SAAS,EAAE;AACxD,YAAU,IAAI,YAAYA,KAAI;AAC9B,SAAOA;AACT;AAEO,SAAS,kBACd,MACA,MACQ;AACR,QAAM,aAAa,KAAK,UAAU,IAAI;AACtC,SAAO,IAAI,GAAG,IAAI,IAAI,UAAU,EAAE,EAAE,SAAS,EAAE;AACjD;;;ACoBO,IAAM,oBAAkC;AAAA,EAC7C,KAAK,SAAuB;AAC1B,UAAM,IAAI,MAAM,gBAAgB;AAAA,EAClC;AAAA,EAEA,OAAO,OAAa,UAAmB;AACrC,UAAM,IAAI,MAAM,gBAAgB;AAAA,EAClC;AACF;AAEO,IAAM,cAAN,MAAiD;AAAA,EAC7C;AAAA,EACT,UAAwB;AAAA,EAExB,YAAY,OAAc;AACxB,SAAK,SAAS;AACd,UAAM,UAAU,IAAI;AAAA,EACtB;AAAA,EAEA,gBAAgB,QAAsB;AACpC,SAAK,UAAU;AAAA,EACjB;AAAA,EAEA,UAAgB;AACd,SAAK,OAAO,QAAQ;AAAA,EACtB;AAAA,EAEA,YAA0B;AACxB,WAAO,KAAK,OAAO,UAAU;AAAA,EAC/B;AAAA,EAEA,KAAK,QAAgB;AACnB,SAAK,QAAQ,KAAK,QAAQ,IAAI;AAAA,EAChC;AAAA,EAEA,CAAC,MAAM,KAAiC;AACtC,eAAW,QAAQ,KAAK,OAAO,MAAM,GAAG,GAAG;AACzC,UAAI,KAAK,QAAQ,OAAO,MAAM,KAAK,GAAG;AACpC,cAAM;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAAA,EAEA,CAAC,QAAQ,KAAiC;AACxC,eAAW,QAAQ,KAAK,OAAO,QAAQ,GAAG,GAAG;AAC3C,UAAI,KAAK,QAAQ,OAAO,MAAM,IAAI,GAAG;AACnC,cAAM;AAAA,MACR,OAAO;AACL,qBAAa,IAAI;AAAA,MACnB;AAAA,IACF;AAAA,EACF;AACF;AAEO,IAAM,YAAN,MAA+C;AAAA,EAC3C;AAAA,EACA;AAAA,EAET,UAAkB;AAAA,EAElB,YAAY,OAAoB,OAAoB;AAClD,SAAK,SAAS;AACd,SAAK,SAAS;AACd,UAAM,gBAAgB,IAAI;AAAA,EAC5B;AAAA,EAEA,CAAC,MAAM,KAAiC;AACtC,eAAW,QAAQ,KAAK,OAAO,MAAM,GAAG,GAAG;AACzC,YAAM;AAAA,IACR;AAAA,EACF;AAAA,EAEA,CAAC,QAAQ,KAAiC;AACxC,eAAW,QAAQ,KAAK,OAAO,QAAQ,GAAG,GAAG;AAC3C,YAAM;AAAA,IACR;AAAA,EACF;AAAA,EAEA,OAAO,OAAa,UAAmB;AACrC,WAAO;AAAA,EACT;AAAA,EAEA,UAAU,QAAgB;AACxB,SAAK,UAAU;AAAA,EACjB;AAAA,EAEA,UAAgB;AACd,SAAK,OAAO,QAAQ;AAAA,EACtB;AAAA,EAEA,YAA0B;AACxB,WAAO,KAAK,OAAO,UAAU;AAAA,EAC/B;AAAA,EAEA,KAAK,QAAgB;AACnB,SAAK,QAAQ,KAAK,QAAQ,IAAI;AAAA,EAChC;AACF;AAEO,SAAS,oBACd,OACA,UACA,UACO;AACP,QAAM,cAAc,IAAI,YAAY,KAAK;AACzC,WAAS,QAAQ,OAAO,WAAW;AACnC,QAAM,SAAS,SAAS,WAAW;AACnC,WAAS,QAAQ,aAAa,MAAM;AACpC,QAAM,YAAY,IAAI,UAAU,aAAa,MAAM;AACnD,WAAS,QAAQ,QAAQ,SAAS;AAClC,SAAO;AACT;;;ACxEO,IAAM,cAAsB;AAAA,EACjC,KAAK,SAAuB;AAC1B,UAAM,IAAI,MAAM,gBAAgB;AAAA,EAClC;AACF;;;AC5EO,UAAU,KAAQ,QAAmB,OAA0B;AACpE,MAAI,QAAQ,GAAG;AACb;AAAA,EACF;AACA,MAAI,QAAQ;AACZ,aAAWC,MAAK,QAAQ;AACtB,UAAMA;AACN,QAAI,EAAE,UAAU,OAAO;AACrB;AAAA,IACF;AAAA,EACF;AACF;AAEO,SAAS,MAAS,QAAkC;AACzD,QAAM,KAAK,OAAO,OAAO,QAAQ,EAAE;AACnC,QAAM,EAAC,MAAK,IAAI,GAAG,KAAK;AACxB,KAAG,SAAS;AACZ,SAAO;AACT;;;ACcO,IAAM,SAAN,MAAuC;AAAA,EACnC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAET,UAAwB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUxB,UAAU;AAAA,EAEV,YACE,OACA,SACA,kBACA,eACA,MACA;AACA,SAAK,SAAS;AACd,SAAK,oBAAoB;AACzB,SAAK,OAAO,gBAAgB,IAAI;AAChC,SAAK,WAAW;AAChB;AAAA,MACE,KAAK,OAAO,UAAU,EAAE,cAAc,gBAAgB;AAAA,MACtD,wBAAwB,gBAAgB;AAAA,IAC1C;AACA,SAAK,OAAO,SAAS;AACrB,SAAK,iBAAiB;AAGtB,SAAK,eAAe;AAAA,MAClB;AAAA,MACA,KAAK,OAAO,UAAU,EAAE;AAAA,IAC1B;AAAA,EACF;AAAA,EAEA,gBAAgB,QAA4B;AAC1C,SAAK,UAAU;AAAA,EACjB;AAAA,EAEA,OAAO,MAAY,SAA2B;AAC5C,UAAM,SAAS,KAAK,QAAQ,IAAI,KAAK,KAAK,QAAQ,OAAO,MAAM,OAAO;AACtE,QAAI,SAAS;AACX,WAAK,SAAS,IAAI;AAAA,IACpB;AACA,WAAO;AAAA,EACT;AAAA,EAEA,UAAgB;AACd,SAAK,OAAO,QAAQ;AAAA,EACtB;AAAA,EAEA,YAA0B;AACxB,WAAO,KAAK,OAAO,UAAU;AAAA,EAC/B;AAAA,EAEA,KAAK,QAAgB;AACnB,WAAO,CAAC,KAAK,SAAS,wBAAwB;AAC9C,SAAK,UAAU;AACf,QAAI;AACF,cAAQ,OAAO,MAAM;AAAA;AAAA;AAAA,QAGnB,KAAK;AAAA,QACL,KAAK,QAAQ;AACX,eAAK,gBAAgB,MAAM;AAC3B;AAAA,QACF;AAAA,QACA,KAAK,UAAU;AACb,gBAAM,OAAO,KAAK,SAAS,OAAO,IAAI;AAItC,cAAI,SAAS,QAAW;AACtB;AAAA,UACF;AACA,eAAK,gBAAgB,QAAQ,IAAI;AACjC,eAAK,SAAS,OAAO,IAAI;AACzB;AAAA,QACF;AAAA,QACA,KAAK;AAKH,cACE,OAAO,MAAM,qBAAqB,KAAK,qBACvC,OAAO,MAAM,OAAO,SAAS,UAC7B,OAAO,MAAM,OAAO,SAAS,SAC7B;AACA,iBAAK,gBAAgB,MAAM;AAC3B;AAAA,UACF;AACA,kBAAQ,OAAO,MAAM,OAAO,MAAM;AAAA,YAChC,KAAK,OAAO;AACV,kBAAI,OAAO,KAAK,SAAS,OAAO,IAAI;AACpC,kBAAI,SAAS,QAAW;AACtB;AACA,qBAAK,SAAS,OAAO,MAAM,IAAI;AAAA,cACjC,OAAO;AACL,uBAAO,KAAK,WAAW,OAAO,IAAI;AAAA,cACpC;AACA,kBAAI,SAAS,GAAG;AACd,oBAAI,KAAK,MAAM;AAKb,uBAAK,QAAQ;AAAA,oBACX;AAAA,sBACE,MAAM;AAAA,sBACN,MAAM;AAAA,wBACJ,KAAK,OAAO,KAAK;AAAA,wBACjB,eAAe;AAAA,0BACb,GAAG,OAAO,KAAK;AAAA,0BACf,CAAC,KAAK,iBAAiB,GAAG,MAAM,CAAC;AAAA,wBACnC;AAAA,sBACF;AAAA,oBACF;AAAA,oBACA;AAAA,kBACF;AAAA,gBACF,OAAO;AACL,uBAAK,QAAQ;AAAA,oBACX;AAAA,sBACE,MAAM;AAAA,sBACN,MAAM,OAAO;AAAA,oBACf;AAAA,oBACA;AAAA,kBACF;AAAA,gBACF;AAAA,cACF,OAAO;AACL,qBAAK,gBAAgB,QAAQ,IAAI;AAAA,cACnC;AACA;AAAA,YACF;AAAA,YACA,KAAK,UAAU;AACb,kBAAI,OAAO,KAAK,SAAS,OAAO,IAAI;AACpC,kBAAI,SAAS,QAAW;AACtB,uBAAO,OAAO,CAAC;AACf;AACA,qBAAK,SAAS,OAAO,MAAM,IAAI;AAAA,cACjC,OAAO;AACL,uBAAO,KAAK,WAAW,OAAO,IAAI;AAAA,cACpC;AACA,kBAAI,SAAS,GAAG;AACd,oBAAI,KAAK,MAAM;AACb,uBAAK,QAAQ;AAAA,oBACX;AAAA,sBACE,MAAM;AAAA,sBACN,MAAM,OAAO;AAAA,oBACf;AAAA,oBACA;AAAA,kBACF;AAAA,gBACF,OAAO;AAIL,uBAAK,QAAQ;AAAA,oBACX;AAAA,sBACE,MAAM;AAAA,sBACN,MAAM;AAAA,wBACJ,KAAK,OAAO,KAAK;AAAA,wBACjB,eAAe;AAAA,0BACb,GAAG,OAAO,KAAK;AAAA,0BACf,CAAC,KAAK,iBAAiB,GAAG,MAAM;AAAA,4BAC9B,OAAO,MAAM,OAAO;AAAA,0BACtB;AAAA,wBACF;AAAA,sBACF;AAAA,oBACF;AAAA,oBACA;AAAA,kBACF;AAAA,gBACF;AAAA,cACF,OAAO;AACL,qBAAK,gBAAgB,QAAQ,IAAI;AAAA,cACnC;AACA;AAAA,YACF;AAAA,UACF;AACA;AAAA,QACF;AACE,sBAAY,MAAM;AAAA,MACtB;AAAA,IACF,UAAE;AACA,WAAK,UAAU;AAAA,IACjB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWA,QAAQ,MAAY,MAAwB;AAC1C,UAAM,UAAU,QAAQ,KAAK,gBAAgB,IAAI,KAAK;AACtD,WAAO,KAAK,OAAO,CAAC,SAAS;AAAA,EAC/B;AAAA;AAAA;AAAA;AAAA,EAKA,gBAAgB,QAAgB,MAAqB;AACnD,QAAI,KAAK,QAAQ,OAAO,MAAM,IAAI,GAAG;AACnC,WAAK,QAAQ,KAAK,QAAQ,IAAI;AAAA,IAChC;AAAA,EACF;AAAA,EAEA,SAAS,MAAgC;AACvC,WAAO,KAAK,SAAS,IAAI,KAAK,oBAAoB,IAAI,CAAC;AAAA,EACzD;AAAA,EAEA,SAAS,MAAY,MAAc;AACjC,SAAK,SAAS,IAAI,KAAK,oBAAoB,IAAI,GAAG,IAAI;AAAA,EACxD;AAAA,EAEA,SAAS,MAAY;AACnB,SAAK,SAAS,IAAI,KAAK,oBAAoB,IAAI,CAAC;AAAA,EAClD;AAAA,EAEA,gBAAgB,MAAoB;AAClC,UAAM,OAAO,KAAK,SAAS,IAAI;AAC/B,QAAI,SAAS,QAAW;AACtB,aAAO;AAAA,IACT;AACA,WAAO,KAAK,WAAW,IAAI;AAAA,EAC7B;AAAA,EAEA,WAAW,MAAoB;AAC7B,QAAI,CAAC,KAAK,gBAAgB,CAAC,KAAK,SAAS;AACvC,YAAM,kBAAkB;AAAA,QACtB,KAAK,SAAS,KAAK;AAAA,UACjB,QAAQ,KAAK,0BAA0B,IAAI;AAAA,QAC7C,CAAC;AAAA,MACH;AACA,UAAI,oBAAoB,QAAW;AACjC,aAAK,SAAS,MAAM,gBAAgB,CAAC,CAAC;AACtC,eAAO,gBAAgB,CAAC;AAAA,MAC1B;AAAA,IACF;AAEA,UAAM,eAAe,KAAK,cAAc,KAAK,iBAAiB;AAC9D,WAAO,YAAY;AACnB,QAAI,OAAO;AACX,eAAW,gBAAgB,aAAa,GAAG;AACzC;AAAA,IACF;AAEA,SAAK,SAAS,MAAM,IAAI;AACxB,WAAO;AAAA,EACT;AAAA,EAEA,0BAA0B,MAAkC;AAC1D,WAAO,OACL,KAAK,eACD,KACA,KAAK,UAAU,KAAK,cAAc,MAAM,KAAK,cAAc,CAAC,CAClE;AAAA,EACF;AAAA,EAEA,oBAAoB,MAA4B;AAC9C,WAAO,GAAG,KAAK,0BAA0B,IAAI,CAAC,GAAG,KAAK;AAAA,MACpD,KAAK,cAAc,MAAM,KAAK,OAAO,UAAU,EAAE,UAAU;AAAA,IAC7D,CAAC;AAAA,EACH;AAAA,EAEA,cAAc,MAAY,KAAqC;AAC7D,UAAM,SAA4B,CAAC;AACnC,eAAW,OAAO,KAAK;AACrB,aAAO,KAAK,mBAAmB,KAAK,IAAI,GAAG,CAAC,CAAC;AAAA,IAC/C;AACA,WAAO;AAAA,EACT;AACF;;;ACxPO,SAAS,uBACd,mBACA,QACA,QACA,kBACAC,qBACA,uBACA;AACA,MAAI,kBAAkB,WAAW,GAAG;AAGlC;AAAA,EACF;AAGA,QAAM,mBAAmB,oBAAI,IAA4B;AACzD,aAAW,UAAU,mBAAmB;AACtC,QAAI,qBAAqB,WAAW,OAAO,SAAS,SAAS;AAC3D;AAAA,QACE,iBAAiB,IAAI,OAAO,IAAI,MAAM;AAAA,QACtC,MACE,qCAAqC,OAAO,IAAI;AAAA,MACpD;AAAA,IACF;AAEA,UAAM,WAAW,iBAAiB,IAAI,OAAO,IAAI;AACjD,QAAI,eAAe;AACnB,QAAI,UAAU;AAEZ,qBAAeA,oBAAmB,UAAU,MAAM;AAAA,IACpD;AACA,qBAAiB,IAAI,OAAO,MAAM,YAAY;AAAA,EAChD;AAEA,oBAAkB,SAAS;AAE3B,QAAM,QAAQ,CAAC,GAAG,iBAAiB,KAAK,CAAC;AAWzC,UAAQ,kBAAkB;AAAA,IACxB,KAAK;AACH;AAAA,QACE,MAAM,WAAW,KAAK,MAAM,CAAC,MAAM;AAAA,QACnC;AAAA,MACF;AACA,aAAO;AAAA,QACL,sBAAsB,KAAK,iBAAiB,IAAI,QAAQ,CAAC,CAAC;AAAA,QAC1D;AAAA,MACF;AACA;AAAA,IACF,KAAK;AACH;AAAA,QACE,MAAM,WAAW,KAAK,MAAM,CAAC,MAAM;AAAA,QACnC;AAAA,MACF;AACA,aAAO;AAAA,QACL,sBAAsB,KAAK,iBAAiB,IAAI,KAAK,CAAC,CAAC;AAAA,QACvD;AAAA,MACF;AACA;AAAA,IACF,KAAK,QAAQ;AACX;AAAA,QACE,MAAM;AAAA,UACJ,UAAQ,SAAS,SAAS,SAAS,YAAY,SAAS;AAAA,QAC1D;AAAA,QACA;AAAA,MACF;AACA,YAAM,YAAY,iBAAiB,IAAI,KAAK;AAC5C,YAAM,eAAe,iBAAiB,IAAI,QAAQ;AAClD,UAAI,aAAa,iBAAiB,IAAI,MAAM;AAI5C,UAAI,YAAY;AACd,YAAI,WAAW;AACb,uBAAaA,oBAAmB,YAAY,SAAS;AAAA,QACvD;AACA,YAAI,cAAc;AAChB,uBAAaA,oBAAmB,YAAY,YAAY;AAAA,QAC1D;AACA,eAAO,KAAK,sBAAsB,UAAU,GAAG,MAAM;AACrD;AAAA,MACF;AAmBA,UAAI,aAAa,cAAc;AAC7B,eAAO;AAAA,UACL,sBAAsB;AAAA,YACpB,MAAM;AAAA,YACN,MAAM,UAAU;AAAA,YAChB,SAAS,aAAa;AAAA,UACxB,CAAU;AAAA,UACV;AAAA,QACF;AACA;AAAA,MACF;AAEA,aAAO;AAAA,QACL,sBAAsB,KAAK,aAAa,YAAY,CAAC;AAAA,QACrD;AAAA,MACF;AACA;AAAA,IACF;AAAA,IACA,KAAK,SAAS;AACZ;AAAA,QACE,MAAM;AAAA,UACJ,UACE,SAAS;AAAA,UACT,SAAS;AAAA,UACT,SAAS;AAAA;AAAA,QACb;AAAA,QACA;AAAA,MACF;AACA;AAAA,QACE,MAAM,UAAU;AAAA,QAChB;AAAA,MACF;AAGA,YAAM,cAAc,iBAAiB,IAAI,OAAO;AAChD,UAAI,aAAa;AACf,eAAO,KAAK,aAAa,MAAM;AAC/B;AAAA,MACF;AAEA,YAAM,YAAY,iBAAiB,IAAI,KAAK;AAC5C,YAAM,eAAe,iBAAiB,IAAI,QAAQ;AAElD;AAAA,QACE,cAAc,UAAa,iBAAiB;AAAA,QAC5C;AAAA,MACF;AAEA,aAAO;AAAA,QACL,sBAAsB,KAAK,aAAa,YAAY,CAAC;AAAA,QACrD;AAAA,MACF;AACA;AAAA,IACF;AAAA,IACA;AACE;AAAA,EACJ;AACF;AAKO,SAAS,mBAAmB,MAAc,OAAuB;AAItE,MAAI,KAAK,SAAS,MAAM,MAAM;AAC5B,YAAQ,KAAK,MAAM;AAAA,MACjB,KAAK,OAAO;AACV,eAAO;AAAA,UACL,MAAM;AAAA,UACN,MAAM;AAAA,YACJ,KAAK,KAAK,KAAK;AAAA,YACf,eAAe;AAAA,cACb,GAAG,MAAM,KAAK;AAAA,cACd,GAAG,KAAK,KAAK;AAAA,YACf;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAAA,MACA,KAAK,UAAU;AACb,eAAO;AAAA,UACL,MAAM;AAAA,UACN,MAAM;AAAA,YACJ,KAAK,KAAK,KAAK;AAAA,YACf,eAAe;AAAA,cACb,GAAG,MAAM,KAAK;AAAA,cACd,GAAG,KAAK,KAAK;AAAA,YACf;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAAA,MACA,KAAK,QAAQ;AACX,eAAO,MAAM,SAAS,MAAM;AAE5B,eAAO;AAAA,UACL,MAAM;AAAA,UACN,MAAM;AAAA,YACJ,KAAK,KAAK,KAAK;AAAA,YACf,eAAe;AAAA,cACb,GAAG,MAAM,KAAK;AAAA,cACd,GAAG,KAAK,KAAK;AAAA,YACf;AAAA,UACF;AAAA,UACA,SAAS;AAAA,YACP,KAAK,KAAK,QAAQ;AAAA,YAClB,eAAe;AAAA,cACb,GAAG,MAAM,QAAQ;AAAA,cACjB,GAAG,KAAK,QAAQ;AAAA,YAClB;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAGA,SAAO,KAAK,SAAS,MAAM;AAC3B,UAAQ,MAAM,MAAM;AAAA,IAClB,KAAK,OAAO;AACV,aAAO;AAAA,QACL,MAAM;AAAA,QACN,MAAM;AAAA,UACJ,GAAG,KAAK;AAAA,UACR,eAAe;AAAA,YACb,GAAG,MAAM,KAAK;AAAA,YACd,GAAG,KAAK,KAAK;AAAA,UACf;AAAA,QACF;AAAA,QACA,SAAS,KAAK;AAAA,MAChB;AAAA,IACF;AAAA,IACA,KAAK,UAAU;AACb,aAAO;AAAA,QACL,MAAM;AAAA,QACN,MAAM,KAAK;AAAA,QACX,SAAS;AAAA,UACP,GAAG,KAAK;AAAA,UACR,eAAe;AAAA,YACb,GAAG,MAAM,KAAK;AAAA,YACd,GAAG,KAAK,QAAQ;AAAA,UAClB;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,cAAY;AACd;AAEO,SAAS,0BACd,QAC4B;AAC5B,SAAO,CAAC,WAA2B;AACjC,QAAI,OAAO,KAAK,OAAO,aAAa,EAAE,WAAW,GAAG;AAClD,aAAO;AAAA,IACT;AAEA,YAAQ,OAAO,MAAM;AAAA,MACnB,KAAK;AAAA,MACL,KAAK,UAAU;AACb,cAAM,MAAM;AAAA,UACV,GAAG;AAAA,UACH,MAAM;AAAA,YACJ,GAAG,OAAO;AAAA,YACV,eAAe;AAAA,cACb,GAAG,OAAO,KAAK;AAAA,YACjB;AAAA,UACF;AAAA,QACF;AAEA,mBAAW,IAAI,KAAK,eAAe,OAAO,KAAK,OAAO,aAAa,CAAC;AAEpE,eAAO;AAAA,MACT;AAAA,MACA,KAAK,QAAQ;AACX,cAAM,MAAM;AAAA,UACV,GAAG;AAAA,UACH,MAAM;AAAA,YACJ,GAAG,OAAO;AAAA,YACV,eAAe;AAAA,cACb,GAAG,OAAO,KAAK;AAAA,YACjB;AAAA,UACF;AAAA,UACA,SAAS;AAAA,YACP,GAAG,OAAO;AAAA,YACV,eAAe;AAAA,cACb,GAAG,OAAO,QAAQ;AAAA,YACpB;AAAA,UACF;AAAA,QACF;AAEA,mBAAW,IAAI,KAAK,eAAe,OAAO,KAAK,OAAO,aAAa,CAAC;AACpE;AAAA,UACE,IAAI,QAAQ;AAAA,UACZ,OAAO,KAAK,OAAO,aAAa;AAAA,QAClC;AAEA,eAAO;AAAA,MACT;AAAA,MACA,KAAK;AACH,eAAO;AAAA,IACX;AAAA,EACF;AACF;AAQO,SAAS,WACd,eACA,mBACA;AACA,aAAW,WAAW,mBAAmB;AACvC,QAAI,cAAc,OAAO,MAAM,QAAW;AACxC,oBAAc,OAAO,IAAI,MAAM;AAAA,IACjC;AAAA,EACF;AACF;;;AC5XO,IAAM,QAAN,MAAsC;AAAA,EAClC;AAAA,EACA;AAAA,EACT,UAAwB;AAAA,EACxB,qBAA+B,CAAC;AAAA,EAEhC,YAAY,QAAgB,QAAuB;AACjD,SAAK,UAAU;AACf,SAAK,UAAU,OAAO,UAAU;AAChC,eAAW,SAAS,QAAQ;AAC1B,YAAM,gBAAgB,IAAI;AAC1B,aAAO,KAAK,YAAY,MAAM,UAAU,GAAG,2BAA2B;AAAA,IACxE;AAAA,EACF;AAAA,EAEA,gBAAgB,QAA4B;AAC1C,SAAK,UAAU;AAAA,EACjB;AAAA,EAEA,UAAgB;AACd,eAAW,SAAS,KAAK,SAAS;AAChC,YAAM,QAAQ;AAAA,IAChB;AAAA,EACF;AAAA,EAEA,YAAY;AACV,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,OAAO,MAAY,SAA2B;AAC5C,WAAO,KAAK,QAAQ,OAAO,MAAM,OAAO;AAAA,EAC1C;AAAA,EAEA,KAAK,QAAgB;AACnB,SAAK,mBAAmB,KAAK,MAAM;AAAA,EACrC;AAAA,EAEA,+BAA+B,kBAAkC;AAC/D,QAAI,KAAK,QAAQ,WAAW,GAAG;AAC7B;AAAA,QACE,KAAK,mBAAmB,WAAW;AAAA,QACnC;AAAA,MACF;AACA;AAAA,IACF;AAEA;AAAA,MACE,KAAK;AAAA,MACL,KAAK;AAAA,MACL;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF;AACF;;;ACpEO,IAAM,SAAN,MAAuC;AAAA,EACnC;AAAA,EACA,WAA2B,CAAC;AAAA,EACrC;AAAA,EACA,gBAAwB;AAAA,EAExB,YAAY,OAAoB;AAC9B,SAAK,SAAS;AACd,UAAM,gBAAgB,IAAI;AAAA,EAC5B;AAAA,EAEA,SAAS,OAAc;AACrB,SAAK,SAAS;AAAA,EAChB;AAAA,EAEA,gBAAgB,QAA4B;AAC1C,SAAK,SAAS,KAAK,MAAM;AAAA,EAC3B;AAAA,EAEA,UAAgB;AACd,QAAI,KAAK,gBAAgB,KAAK,SAAS,QAAQ;AAC7C,QAAE,KAAK;AACP,UAAI,KAAK,kBAAkB,KAAK,SAAS,QAAQ;AAC/C,aAAK,OAAO,QAAQ;AAAA,MACtB;AAAA,IACF,OAAO;AACL,YAAM,IAAI,MAAM,+CAA+C;AAAA,IACjE;AAAA,EACF;AAAA,EAEA,YAAY;AACV,WAAO,KAAK,OAAO,UAAU;AAAA,EAC/B;AAAA,EAEA,OAAO,MAAY,SAA2B;AAC5C,QAAI,SAAS;AACb,eAAW,UAAU,KAAK,UAAU;AAClC,eAAS,OAAO,OAAO,MAAM,OAAO,KAAK;AAGzC,UAAI,CAAC,WAAW,QAAQ;AACtB,eAAO;AAAA,MACT;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAAA,EAEA,KAAK,QAAgB;AACnB,eAAW,OAAO,KAAK,UAAU;AAC/B,UAAI,KAAK,QAAQ,IAAI;AAAA,IACvB;AACA;AAAA,MACE,KAAK;AAAA,MACL;AAAA,IACF,EAAE,+BAA+B,OAAO,IAAI;AAAA,EAC9C;AACF;;;AC9DO,SAAS,4BACd,QACA,WACA,QACA,QACA;AACA,QAAM,gBAAgB,UAAU,OAAO,QAAQ,GAAG;AAClD,QAAM,eAAe,UAAU,OAAO,KAAK,GAAG;AAE9C,MAAI,iBAAiB,cAAc;AACjC,WAAO,KAAK,QAAQ,MAAM;AAAA,EAC5B,WAAW,iBAAiB,CAAC,cAAc;AACzC,WAAO;AAAA,MACL;AAAA,QACE,MAAM;AAAA,QACN,MAAM,OAAO;AAAA,MACf;AAAA,MACA;AAAA,IACF;AAAA,EACF,WAAW,CAAC,iBAAiB,cAAc;AACzC,WAAO;AAAA,MACL;AAAA,QACE,MAAM;AAAA,QACN,MAAM,OAAO;AAAA,MACf;AAAA,MACA;AAAA,IACF;AAAA,EACF;AACF;;;AC/BO,SAAS,WACd,QACA,QACA,QACA,WACA;AACA,MAAI,CAAC,WAAW;AACd,WAAO,KAAK,QAAQ,MAAM;AAC1B;AAAA,EACF;AACA,UAAQ,OAAO,MAAM;AAAA,IACnB,KAAK;AAAA,IACL,KAAK;AACH,UAAI,UAAU,OAAO,KAAK,GAAG,GAAG;AAC9B,eAAO,KAAK,QAAQ,MAAM;AAAA,MAC5B;AACA;AAAA,IACF,KAAK;AACH,UAAI,UAAU,OAAO,KAAK,GAAG,GAAG;AAC9B,eAAO,KAAK,QAAQ,MAAM;AAAA,MAC5B;AACA;AAAA,IACF,KAAK;AACH,kCAA4B,QAAQ,WAAW,QAAQ,MAAM;AAC7D;AAAA,IACF;AACE,kBAAY,MAAM;AAAA,EACtB;AACF;;;ACjBO,IAAM,SAAN,MAAuC;AAAA,EACnC;AAAA,EACA;AAAA,EAET,UAAwB;AAAA,EAExB,YAAY,OAAoB,WAAkC;AAChE,SAAK,SAAS;AACd,SAAK,aAAa;AAClB,UAAM,gBAAgB,IAAI;AAAA,EAC5B;AAAA,EAEA,OAAO,MAAY,SAA2B;AAC5C,WAAO,KAAK,WAAW,KAAK,GAAG,KAAK,KAAK,QAAQ,OAAO,MAAM,OAAO;AAAA,EACvE;AAAA,EAEA,gBAAgB,QAAsB;AACpC,SAAK,UAAU;AAAA,EACjB;AAAA,EAEA,UAAgB;AACd,SAAK,OAAO,QAAQ;AAAA,EACtB;AAAA,EAEA,YAA0B;AACxB,WAAO,KAAK,OAAO,UAAU;AAAA,EAC/B;AAAA,EAEA,KAAK,QAAgB;AACnB,eAAW,QAAQ,KAAK,SAAS,MAAM,KAAK,UAAU;AAAA,EACxD;AACF;;;ACjCO,SAAS,qBACd,YACA,KACS;AACT,aAAW,OAAO,YAAY;AAC5B,QAAI,CAAC,YAAY,IAAI,GAAG,GAAG,WAAW,GAAG,CAAC,GAAG;AAC3C,aAAO;AAAA,IACT;AAAA,EACF;AACA,SAAO;AACT;AAOO,SAAS,yBACd,MACA,OACS;AACT,aAAW,OAAO,MAAM;AACtB,QAAI,OAAO,SAAS,CAAC,YAAY,KAAK,GAAG,GAAG,MAAM,GAAG,CAAC,GAAG;AACvD,aAAO;AAAA,IACT;AAAA,EACF;AACA,SAAO;AACT;AAEO,SAAS,4BACd,YACA,SACS;AACT,QAAM,iBAAiB,OAAO,KAAK,UAAU;AAE7C,MAAI,eAAe,WAAW,QAAQ,QAAQ;AAC5C,WAAO;AAAA,EACT;AAIA,iBAAe,KAAK,aAAa;AAEjC,WAAS,IAAI,GAAG,IAAI,eAAe,QAAQ,KAAK;AAC9C,QAAI,eAAe,CAAC,MAAM,QAAQ,CAAC,GAAG;AACpC,aAAO;AAAA,IACT;AAAA,EACF;AACA,SAAO;AACT;AAkBO,SAAS,wBACd,WACmB;AACnB,MAAI,UAAU,SAAS,OAAO;AAC5B,WAAO,UAAU,WAAW,QAAQ,uBAAuB;AAAA,EAC7D;AAEA,MAAI,UAAU,SAAS,UAAU;AAC/B,WAAO,CAAC,SAAS;AAAA,EACnB;AAEA,MAAI,UAAU,SAAS,QAAQ,UAAU,WAAW,WAAW,GAAG;AAChE,WAAO,wBAAwB,UAAU,WAAW,CAAC,CAAC;AAAA,EACxD;AAEA,SAAO,CAAC;AACV;AAOO,SAAS,gCACd,WACA,SACwB;AACxB,MAAI,cAAc,QAAW;AAC3B,WAAO;AAAA,EACT;AAEA,QAAM,aAAa,wBAAwB,SAAS;AACpD,MAAI,WAAW,WAAW,GAAG;AAC3B,WAAO;AAAA,EACT;AAEA,QAAM,MAA4B,CAAC;AACnC,aAAW,gBAAgB,YAAY;AACrC,QAAI,aAAa,OAAO,KAAK;AAC3B,YAAM,SAAS,cAAc,YAAY;AACzC,UAAI,WAAW,QAAW;AACxB,YAAI,CAAC,QAAQ,SAAS,OAAO,IAAI,GAAG;AAClC;AAAA,QACF;AACA,YAAI,OAAO,IAAI,IAAI,OAAO;AAAA,MAC5B;AAAA,IACF;AAAA,EACF;AAEA,MAAI,OAAO,KAAK,GAAG,EAAE,WAAW,QAAQ,QAAQ;AAC9C,WAAO;AAAA,EACT;AAEA,SAAO;AACT;AAEA,SAAS,cACP,WAC0C;AAC1C,MAAI,UAAU,KAAK,SAAS,UAAU;AACpC,WAAO,UAAU,MAAM,SAAS,SAAS;AACzC,WAAO,EAAC,MAAM,UAAU,KAAK,MAAM,OAAO,UAAU,MAAM,MAAK;AAAA,EACjE;AAEA,SAAO;AACT;;;ACtIO,UAAU,oBACf,QACA,SACA,QACc;AACd,MAAI,UAAU;AACd,MAAI,iBAAiB;AACrB,MAAI,iBAAiB;AACrB,aAAW,QAAQ,QAAQ;AACzB,QAAI,YAAY;AAChB,QAAI,CAAC,SAAS;AACZ,cAAQ,QAAQ,MAAM;AAAA,QACpB,KAAK,OAAO;AACV,cAAI,OAAO,YAAY,QAAQ,KAAK,KAAK,KAAK,GAAG,MAAM,GAAG;AACxD,sBAAU;AACV,wBAAY;AAAA,UACd;AACA;AAAA,QACF;AAAA,QACA,KAAK,UAAU;AACb,cAAI,OAAO,YAAY,QAAQ,KAAK,KAAK,KAAK,GAAG,IAAI,GAAG;AACtD,sBAAU;AACV,kBAAM,QAAQ;AAAA,UAChB;AACA;AAAA,QACF;AAAA,QACA,KAAK,QAAQ;AACX,cACE,CAAC,kBACD,OAAO,YAAY,QAAQ,QAAQ,KAAK,KAAK,GAAG,IAAI,GACpD;AACA,6BAAiB;AACjB,gBAAI,gBAAgB;AAClB,wBAAU;AAAA,YACZ;AACA,kBAAM,QAAQ;AAAA,UAChB;AACA,cACE,CAAC,kBACD,OAAO,YAAY,QAAQ,KAAK,KAAK,KAAK,GAAG,MAAM,GACnD;AACA,6BAAiB;AACjB,gBAAI,gBAAgB;AAClB,wBAAU;AAAA,YACZ;AACA,wBAAY;AAAA,UACd;AACA;AAAA,QACF;AAAA,QACA,KAAK,SAAS;AACZ,cAAI,OAAO,YAAY,QAAQ,KAAK,KAAK,KAAK,GAAG,MAAM,GAAG;AACxD,sBAAU;AACV,kBAAM;AAAA,cACJ,KAAK,KAAK;AAAA,cACV,eAAe;AAAA,gBACb,GAAG,KAAK;AAAA,gBACR,CAAC,QAAQ,MAAM,gBAAgB,GAAG,MAChC;AAAA,kBACE,KAAK,cAAc,QAAQ,MAAM,gBAAgB,EAAE;AAAA,kBACnD,QAAQ,MAAM;AAAA,kBACd,OAAO,cAAc,QAAQ,MAAM,gBAAgB;AAAA,gBACrD;AAAA,cACJ;AAAA,YACF;AACA,wBAAY;AAAA,UACd;AACA;AAAA,QACF;AAAA,MACF;AAAA,IACF;AACA,QAAI,WAAW;AACb,YAAM;AAAA,IACR;AAAA,EACF;AACA,MAAI,CAAC,SAAS;AACZ,QAAI,QAAQ,SAAS,UAAU;AAC7B,gBAAU;AACV,YAAM,QAAQ;AAAA,IAChB,WAAW,QAAQ,SAAS,QAAQ;AAClC,aAAO,cAAc;AACrB,uBAAiB;AACjB,gBAAU;AACV,YAAM,QAAQ;AAAA,IAChB;AAAA,EACF;AAEA,SAAO,OAAO;AAChB;AAEO,SAAS,wBACd,GACA,GACA,KACS;AACT,WAAS,IAAI,GAAG,IAAI,IAAI,QAAQ,KAAK;AACnC,QAAI,cAAc,EAAE,IAAI,CAAC,CAAC,GAAG,EAAE,IAAI,CAAC,CAAC,CAAC,MAAM,GAAG;AAC7C,aAAO;AAAA,IACT;AAAA,EACF;AACA,SAAO;AACT;AAEO,SAAS,YACd,QACA,WACA,OACA,UACA;AACA,WAAS,IAAI,GAAG,IAAI,UAAU,QAAQ,KAAK;AACzC,QAAI,CAAC,YAAY,OAAO,UAAU,CAAC,CAAC,GAAG,MAAM,SAAS,CAAC,CAAC,CAAC,GAAG;AAC1D,aAAO;AAAA,IACT;AAAA,EACF;AACA,SAAO;AACT;;;ACpFO,IAAM,cAAN,MAAmC;AAAA,EAC/B;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAET,UAAkB;AAAA,EAElB;AAAA,EAEA,YAAY;AAAA,IACV;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAAS;AACP,WAAO,WAAW,OAAO,8CAA8C;AACvE;AAAA,MACE,UAAU,WAAW,SAAS;AAAA,MAC9B;AAAA,IACF;AACA,SAAK,UAAU;AACf,SAAK,SAAS;AACd,SAAK,aAAa;AAClB,SAAK,YAAY;AACjB,SAAK,oBAAoB;AAEzB,UAAM,eAAe,OAAO,UAAU;AACtC,UAAM,cAAc,MAAM,UAAU;AACpC,SAAK,UAAU;AAAA,MACb,GAAG;AAAA,MACH,eAAe;AAAA,QACb,GAAG,aAAa;AAAA,QAChB,CAAC,gBAAgB,GAAG;AAAA,UAClB,GAAG;AAAA,UACH,UAAU;AAAA,UACV;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAEA,WAAO,UAAU;AAAA,MACf,MAAM,CAAC,WAAmB,KAAK,YAAY,MAAM;AAAA,IACnD,CAAC;AACD,UAAM,UAAU;AAAA,MACd,MAAM,CAAC,WAAmB,KAAK,WAAW,MAAM;AAAA,IAClD,CAAC;AAAA,EACH;AAAA,EAEA,UAAgB;AACd,SAAK,OAAO,QAAQ;AACpB,SAAK,QAAQ,QAAQ;AAAA,EACvB;AAAA,EAEA,UAAU,QAAsB;AAC9B,SAAK,UAAU;AAAA,EACjB;AAAA,EAEA,YAA0B;AACxB,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,CAAC,MAAM,KAAiC;AACtC,UAAM,aAAa,CAAC,GAAG,KAAK,OAAO,MAAM,CAAC,CAAC,CAAC;AAS5C,QAAI,KAAK,wBAAwB,OAAO,SAAS,UAAU;AACzD,YAAM,cAAc,KAAK,uBAAuB,OAAO;AACvD,YAAM,UAAU,KAAK,OAAO,UAAU,EAAE;AACxC,YAAM,YAAY;AAAA,QAAa,WAAW;AAAA,QAAQ,OAChD,QAAQ,YAAY,KAAK,WAAW,CAAC,EAAE,GAAG;AAAA,MAC5C;AACA,iBAAW,OAAO,WAAW,GAAG,WAAW;AAAA,IAC7C;AACA,UAAM,kBAAoC,CAAC;AAC3C,QAAI,QAAQ;AACZ,QAAI;AACF,iBAAW,aAAa,YAAY;AAGlC,cAAM,sBAA4C,CAAC;AACnD,iBAAS,IAAI,GAAG,IAAI,KAAK,WAAW,QAAQ,KAAK;AAC/C,8BAAoB,KAAK,WAAW,CAAC,CAAC,IACpC,UAAU,IAAI,KAAK,UAAU,CAAC,CAAC;AAAA,QACnC;AACA,YACE,IAAI,cACJ,CAAC,yBAAyB,qBAAqB,IAAI,UAAU,GAC7D;AACA,0BAAgB,KAAK,WAAW,OAAO,QAAQ,EAAE,CAAC;AAAA,QACpD,OAAO;AACL,gBAAM,SAAS,KAAK,QAAQ,MAAM;AAAA,YAChC,GAAG;AAAA,YACH,YAAY;AAAA,cACV,GAAG,IAAI;AAAA,cACP,GAAG;AAAA,YACL;AAAA,UACF,CAAC;AACD,gBAAM,WAAW,OAAO,OAAO,QAAQ,EAAE;AACzC,0BAAgB,KAAK,QAAQ;AAAA,QAC/B;AAAA,MACF;AACA,YAAM,kBAAmC,CAAC;AAC1C,eAAS,IAAI,GAAG,IAAI,gBAAgB,QAAQ,KAAK;AAC/C,cAAM,OAAO,gBAAgB,CAAC;AAC9B,cAAM,SAAS,KAAK,KAAK;AACzB,wBAAgB,CAAC,IAAI,OAAO,OAAO,OAAO,OAAO;AAAA,MACnD;AAEA,aAAO,MAAM;AACX,YAAI,gBAAgB;AACpB,YAAI,4BAAsC,CAAC;AAC3C,iBAAS,IAAI,GAAG,IAAI,gBAAgB,QAAQ,KAAK;AAC/C,gBAAM,aAAa,gBAAgB,CAAC;AACpC,cAAI,eAAe,MAAM;AACvB;AAAA,UACF;AACA,cAAI,kBAAkB,MAAM;AAC1B,4BAAgB;AAChB,sCAA0B,KAAK,CAAC;AAAA,UAClC,OAAO;AACL,kBAAM,gBACJ,KAAK,QAAQ,YAAY,WAAW,KAAK,cAAc,GAAG,KACzD,IAAI,UAAU,KAAK;AACtB,gBAAI,kBAAkB,GAAG;AACvB,wCAA0B,KAAK,CAAC;AAAA,YAClC,WAAW,gBAAgB,GAAG;AAC5B,8BAAgB;AAChB,0CAA4B,CAAC,CAAC;AAAA,YAChC;AAAA,UACF;AAAA,QACF;AACA,YAAI,kBAAkB,MAAM;AAC1B;AAAA,QACF;AACA,cAAM,oBAA4B,CAAC;AACnC,mBAAW,2BAA2B,2BAA2B;AAC/D,4BAAkB,KAAK,WAAW,uBAAuB,CAAC;AAC1D,gBAAM,OAAO,gBAAgB,uBAAuB;AACpD,gBAAM,SAAS,KAAK,KAAK;AACzB,0BAAgB,uBAAuB,IAAI,OAAO,OAC9C,OACA,OAAO;AAAA,QACb;AACA,YAAI,4BAA4B;AAChC,YACE,KAAK,0BACL,KAAK,uBAAuB,YAC5B;AAAA,UACE,KAAK,uBAAuB,OAAO,KAAK;AAAA,UACxC,KAAK;AAAA,UACL,cAAc;AAAA,UACd,KAAK;AAAA,QACP,GACA;AACA,gBAAM,qDACJ,KAAK,QACF,UAAU,EACV;AAAA,YACC,cAAc;AAAA,YACd,KAAK,uBAAuB;AAAA,UAC9B,KAAK;AACT,cAAI,KAAK,uBAAuB,OAAO,SAAS,UAAU;AACxD,gBAAI,oDAAoD;AAGtD,0CAA4B,kBAAkB;AAAA,gBAC5C,OAAK,MAAM,KAAK,wBAAwB,OAAO;AAAA,cACjD;AAAA,YACF;AAAA,UACF,WAAW,CAAC,oDAAoD;AAC9D,wCAA4B;AAAA,cAC1B,GAAG;AAAA,gBACD;AAAA,gBACA,KAAK,uBAAuB;AAAA,gBAC5B,KAAK,OAAO,UAAU;AAAA,cACxB;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAGA,YAAI,0BAA0B,SAAS,GAAG;AACxC,gBAAM;AAAA,YACJ,GAAG;AAAA,YACH,eAAe;AAAA,cACb,GAAG,cAAc;AAAA,cACjB,CAAC,KAAK,iBAAiB,GAAG,MAAM;AAAA,YAClC;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAAA,IACF,SAAS,GAAG;AACV,cAAQ;AACR,iBAAW,QAAQ,iBAAiB;AAClC,YAAI;AACF,eAAK,QAAQ,CAAC;AAAA,QAChB,SAAS,eAAe;AAAA,QAGxB;AAAA,MACF;AACA,YAAM;AAAA,IACR,UAAE;AACA,UAAI,CAAC,OAAO;AACV,mBAAW,QAAQ,iBAAiB;AAClC,cAAI;AACF,iBAAK,SAAS;AAAA,UAChB,SAAS,eAAe;AAAA,UAGxB;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA,EAEA,CAAC,QAAQ,MAAkC;AAAA,EAAC;AAAA,EAE5C,WAAW,QAAsB;AAC/B,UAAM,kBAAkB,CAAC,WAAqB;AAC5C,WAAK,yBAAyB;AAAA,QAC5B;AAAA,QACA,UAAU;AAAA,MACZ;AACA,UAAI;AACF,cAAM,mBAAmB,KAAK,QAAQ,MAAM;AAAA,UAC1C,YAAY,OAAO;AAAA,YACjB,KAAK,WAAW,IAAI,CAAC,KAAK,MAAM;AAAA,cAC9B;AAAA,cACA,OAAO,KAAK,IAAI,KAAK,UAAU,CAAC,CAAC;AAAA,YACnC,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AACD,mBAAW,cAAc,kBAAkB;AACzC,eAAK,yBAAyB;AAAA,YAC5B;AAAA,YACA,UAAU,WAAW;AAAA,UACvB;AACA,gBAAM,kBAAkB,MACtB,KAAK,OAAO,MAAM;AAAA,YAChB,YAAY,OAAO;AAAA,cACjB,KAAK,UAAU,IAAI,CAAC,KAAK,MAAM;AAAA,gBAC7B;AAAA,gBACA,WAAW,IAAI,KAAK,WAAW,CAAC,CAAC;AAAA,cACnC,CAAC;AAAA,YACH;AAAA,UACF,CAAC;AACH,cAAI,CAAC,QAAQ;AACX,uBAAW,aAAa,gBAAgB,GAAG;AACzC,kBACE,KAAK,OACF,UAAU,EACV,YAAY,UAAU,KAAK,OAAO,KAAK,GAAG,MAAM,GACnD;AACA,yBAAS;AACT;AAAA,cACF;AAAA,YACF;AAAA,UACF;AACA,cAAI,QAAQ;AACV,iBAAK,QAAQ;AAAA,cACX;AAAA,gBACE,MAAM;AAAA,gBACN,MAAM;AAAA,kBACJ,GAAG;AAAA,kBACH,eAAe;AAAA,oBACb,GAAG,WAAW;AAAA,oBACd,CAAC,KAAK,iBAAiB,GAAG;AAAA,kBAC5B;AAAA,gBACF;AAAA,gBACA,OAAO;AAAA,kBACL,kBAAkB,KAAK;AAAA,kBACvB;AAAA,gBACF;AAAA,cACF;AAAA,cACA;AAAA,YACF;AAAA,UACF,OAAO;AACL,iBAAK,QAAQ;AAAA,cACX;AAAA,gBACE,GAAG;AAAA,gBACH,MAAM;AAAA,kBACJ,GAAG;AAAA,kBACH,eAAe;AAAA,oBACb,GAAG,WAAW;AAAA,oBACd,CAAC,KAAK,iBAAiB,GAAG,MAAM,CAAC,OAAO,IAAI;AAAA,kBAC9C;AAAA,gBACF;AAAA,cACF;AAAA,cACA;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAAA,MACF,UAAE;AACA,aAAK,yBAAyB;AAAA,MAChC;AAAA,IACF;AAEA,YAAQ,OAAO,MAAM;AAAA,MACnB,KAAK;AAAA,MACL,KAAK;AACH,wBAAgB;AAChB;AAAA,MACF,KAAK,QAAQ;AACX;AAAA,UACE;AAAA,YACE,OAAO,QAAQ;AAAA,YACf,OAAO,KAAK;AAAA,YACZ,KAAK;AAAA,UACP;AAAA,UACA;AAAA,QACF;AACA,wBAAgB,IAAI;AACpB;AAAA,MACF;AAAA,MACA,KAAK;AACH,wBAAgB,IAAI;AACpB;AAAA,IACJ;AAAA,EACF;AAAA,EAEA,YAAY,QAAsB;AAChC,UAAM,kBAAkB,CAAC,SAAe,MACtC,KAAK,OAAO,MAAM;AAAA,MAChB,YAAY,OAAO;AAAA,QACjB,KAAK,UAAU,IAAI,CAAC,KAAK,MAAM,CAAC,KAAK,KAAK,IAAI,KAAK,WAAW,CAAC,CAAC,CAAC,CAAC;AAAA,MACpE;AAAA,IACF,CAAC;AAEH,UAAM,OAAO,CAAC,UAAgB;AAAA,MAC5B,GAAG;AAAA,MACH,eAAe;AAAA,QACb,GAAG,KAAK;AAAA,QACR,CAAC,KAAK,iBAAiB,GAAG,gBAAgB,IAAI;AAAA,MAChD;AAAA,IACF;AAGA,QAAI,MAAM,gBAAgB,OAAO,IAAI,EAAE,CAAC,MAAM,QAAW;AACvD;AAAA,IACF;AAEA,YAAQ,OAAO,MAAM;AAAA,MACnB,KAAK;AAAA,MACL,KAAK;AAAA,MACL,KAAK,SAAS;AACZ,aAAK,QAAQ;AAAA,UACX;AAAA,YACE,GAAG;AAAA,YACH,MAAM,KAAK,OAAO,IAAI;AAAA,UACxB;AAAA,UACA;AAAA,QACF;AACA;AAAA,MACF;AAAA,MACA,KAAK,QAAQ;AACX;AAAA,UACE;AAAA,YACE,OAAO,QAAQ;AAAA,YACf,OAAO,KAAK;AAAA,YACZ,KAAK;AAAA,UACP;AAAA,UACA;AAAA,QACF;AACA,aAAK,QAAQ;AAAA,UACX;AAAA,YACE,MAAM;AAAA,YACN,SAAS,KAAK,OAAO,OAAO;AAAA,YAC5B,MAAM,KAAK,OAAO,IAAI;AAAA,UACxB;AAAA,UACA;AAAA,QACF;AACA;AAAA,MACF;AAAA,MACA;AACE,oBAAY,MAAM;AAAA,IACtB;AAAA,EACF;AACF;;;ACzYO,IAAM,OAAN,MAA4B;AAAA,EACxB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAET,UAAkB;AAAA,EAElB;AAAA,EAEA,YAAY;AAAA,IACV;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAAS;AACP,WAAO,WAAW,OAAO,8CAA8C;AACvE;AAAA,MACE,UAAU,WAAW,SAAS;AAAA,MAC9B;AAAA,IACF;AACA,SAAK,UAAU;AACf,SAAK,SAAS;AACd,SAAK,WAAW;AAChB,SAAK,aAAa;AAClB,SAAK,YAAY;AACjB,SAAK,oBAAoB;AAEzB,UAAM,eAAe,OAAO,UAAU;AACtC,UAAM,cAAc,MAAM,UAAU;AACpC,SAAK,UAAU;AAAA,MACb,GAAG;AAAA,MACH,eAAe;AAAA,QACb,GAAG,aAAa;AAAA,QAChB,CAAC,gBAAgB,GAAG;AAAA,UAClB,GAAG;AAAA,UACH,UAAU;AAAA,UACV;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAEA,WAAO,UAAU;AAAA,MACf,MAAM,CAAC,WAAmB,KAAK,YAAY,MAAM;AAAA,IACnD,CAAC;AACD,UAAM,UAAU;AAAA,MACd,MAAM,CAAC,WAAmB,KAAK,WAAW,MAAM;AAAA,IAClD,CAAC;AAAA,EACH;AAAA,EAEA,UAAgB;AACd,SAAK,QAAQ,QAAQ;AACrB,SAAK,OAAO,QAAQ;AAAA,EACtB;AAAA,EAEA,UAAU,QAAsB;AAC9B,SAAK,UAAU;AAAA,EACjB;AAAA,EAEA,YAA0B;AACxB,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,CAAC,MAAM,KAAiC;AACtC,eAAW,cAAc,KAAK,QAAQ,MAAM,GAAG,GAAG;AAChD,YAAM,KAAK;AAAA,QACT,WAAW;AAAA,QACX,WAAW;AAAA,QACX;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA,EAEA,CAAC,QAAQ,KAAiC;AACxC,eAAW,cAAc,KAAK,QAAQ,QAAQ,GAAG,GAAG;AAClD,YAAM,KAAK;AAAA,QACT,WAAW;AAAA,QACX,WAAW;AAAA,QACX;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA,EAEA,YAAY,QAAsB;AAChC,YAAQ,OAAO,MAAM;AAAA,MACnB,KAAK;AACH,aAAK,QAAQ;AAAA,UACX;AAAA,YACE,MAAM;AAAA,YACN,MAAM,KAAK;AAAA,cACT,OAAO,KAAK;AAAA,cACZ,OAAO,KAAK;AAAA,cACZ;AAAA,YACF;AAAA,UACF;AAAA,UACA;AAAA,QACF;AACA;AAAA,MACF,KAAK;AACH,aAAK,QAAQ;AAAA,UACX;AAAA,YACE,MAAM;AAAA,YACN,MAAM,KAAK;AAAA,cACT,OAAO,KAAK;AAAA,cACZ,OAAO,KAAK;AAAA,cACZ;AAAA,YACF;AAAA,UACF;AAAA,UACA;AAAA,QACF;AACA;AAAA,MACF,KAAK;AACH,aAAK,QAAQ;AAAA,UACX;AAAA,YACE,MAAM;AAAA,YACN,MAAM,KAAK;AAAA,cACT,OAAO,KAAK;AAAA,cACZ,OAAO,KAAK;AAAA,cACZ;AAAA,YACF;AAAA,YACA,OAAO,OAAO;AAAA,UAChB;AAAA,UACA;AAAA,QACF;AACA;AAAA,MACF,KAAK,QAAQ;AAEX;AAAA,UACE;AAAA,YACE,OAAO,QAAQ;AAAA,YACf,OAAO,KAAK;AAAA,YACZ,KAAK;AAAA,UACP;AAAA,UACA;AAAA,QACF;AACA,aAAK,QAAQ;AAAA,UACX;AAAA,YACE,MAAM;AAAA,YACN,SAAS,KAAK;AAAA,cACZ,OAAO,QAAQ;AAAA,cACf,OAAO,QAAQ;AAAA,cACf;AAAA,YACF;AAAA,YACA,MAAM,KAAK;AAAA,cACT,OAAO,KAAK;AAAA,cACZ,OAAO,KAAK;AAAA,cACZ;AAAA,YACF;AAAA,UACF;AAAA,UACA;AAAA,QACF;AACA;AAAA,MACF;AAAA,MACA;AACE,oBAAY,MAAM;AAAA,IACtB;AAAA,EACF;AAAA,EAEA,WAAW,QAAsB;AAC/B,UAAM,kBAAkB,CAAC,UAAeC,YAAmB;AACzD,WAAK,yBAAyB;AAAA,QAC5B,QAAAA;AAAA,QACA,UAAU;AAAA,MACZ;AACA,UAAI;AACF,cAAM,cAAc,KAAK,QAAQ,MAAM;AAAA,UACrC,YAAY,OAAO;AAAA,YACjB,KAAK,WAAW,IAAI,CAAC,KAAK,MAAM,CAAC,KAAK,SAAS,KAAK,UAAU,CAAC,CAAC,CAAC,CAAC;AAAA,UACpE;AAAA,QACF,CAAC;AAED,mBAAW,cAAc,aAAa;AACpC,eAAK,uBAAuB,WAAW,WAAW;AAClD,gBAAM,cAA2B;AAAA,YAC/B,MAAM;AAAA,YACN,MAAM,KAAK;AAAA,cACT,WAAW;AAAA,cACX,WAAW;AAAA,cACX;AAAA,YACF;AAAA,YACA,OAAO;AAAA,cACL,kBAAkB,KAAK;AAAA,cACvB,QAAAA;AAAA,YACF;AAAA,UACF;AACA,eAAK,QAAQ,KAAK,aAAa,IAAI;AAAA,QACrC;AAAA,MACF,UAAE;AACA,aAAK,yBAAyB;AAAA,MAChC;AAAA,IACF;AAEA,YAAQ,OAAO,MAAM;AAAA,MACnB,KAAK;AAAA,MACL,KAAK;AACH,wBAAgB,OAAO,KAAK,KAAK,MAAM;AACvC;AAAA,MACF,KAAK;AACH,wBAAgB,OAAO,KAAK,KAAK,MAAM;AACvC;AAAA,MACF,KAAK,QAAQ;AACX,cAAM,WAAW,OAAO,KAAK;AAC7B,cAAM,cAAc,OAAO,QAAQ;AAEnC;AAAA,UACE,wBAAwB,aAAa,UAAU,KAAK,SAAS;AAAA,UAC7D;AAAA,QACF;AACA,wBAAgB,UAAU,MAAM;AAChC;AAAA,MACF;AAAA,MAEA;AACE,oBAAY,MAAM;AAAA,IACtB;AAAA,EACF;AAAA,EAEA,mBACE,eACA,qBACA,MACM;AACN,QAAI,SAA4B;AAChC,QAAI,iBAAiB;AACrB,UAAM,cAAc,MAAM;AACxB,UAAI,CAAC,gBAAgB;AACnB,YAAI,SAAS,WAAW;AACtB,eAAK,SAAS;AAAA,YACZ;AAAA,cACE,KAAK;AAAA,cACL,KAAK,QAAQ,UAAU,EAAE;AAAA,cACzB;AAAA,YACF;AAAA,UACF;AACA,gBAAM,QACJ;AAAA,YACE,GAAG;AAAA,cACD,KAAK,SAAS,KAAK;AAAA,gBACjB,QAAQ,qBAAqB,eAAe,KAAK,UAAU;AAAA,cAC7D,CAAC;AAAA,cACD;AAAA,YACF;AAAA,UACF,EAAE,WAAW;AACf,mBAAS,QAAQ,YAAY;AAAA,QAC/B;AAEA,yBAAiB;AAGjB,YAAI,SAAS,SAAS;AACpB,eAAK,SAAS;AAAA,YACZ;AAAA,cACE,KAAK;AAAA,cACL,KAAK,QAAQ,UAAU,EAAE;AAAA,cACzB;AAAA,YACF;AAAA,YACA;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAEA,YAAM,SAAS,KAAK,OAAO,MAAM,EAAE;AAAA,QACjC,YAAY,OAAO;AAAA,UACjB,KAAK,UAAU,IAAI,CAAC,KAAK,MAAM;AAAA,YAC7B;AAAA,YACA,cAAc,KAAK,WAAW,CAAC,CAAC;AAAA,UAClC,CAAC;AAAA,QACH;AAAA,MACF,CAAC;AAED,UACE,KAAK,0BACL;AAAA,QACE;AAAA,QACA,KAAK;AAAA,QACL,KAAK,uBAAuB,OAAO,KAAK;AAAA,QACxC,KAAK;AAAA,MACP,KACA,KAAK,uBAAuB,YAC5B,KAAK,QAAQ;AAAA,QACX;AAAA,QACA,KAAK,uBAAuB;AAAA,MAC9B,IAAI,GACJ;AACA,eAAO;AAAA,UACL;AAAA,UACA,KAAK,uBAAuB;AAAA,UAC5B,KAAK,OAAO,UAAU;AAAA,QACxB;AAAA,MACF;AACA,aAAO;AAAA,IACT;AAEA,WAAO;AAAA,MACL,KAAK;AAAA,MACL,eAAe;AAAA,QACb,GAAG;AAAA,QACH,CAAC,KAAK,iBAAiB,GAAG;AAAA,MAC5B;AAAA,IACF;AAAA,EACF;AACF;AAKO,SAAS,wBAAwB,QAAkC;AACxE,QAAM,OAAO,KAAK,UAAU,CAAC,WAAW,GAAG,MAAM,CAAC;AAClD,SAAO,KAAK,UAAU,GAAG,KAAK,SAAS,CAAC,IAAI;AAC9C;AAGO,SAAS,qBAAqB,KAAU,KAA0B;AACvE,SAAO,wBAAwB,IAAI,IAAI,OAAK,IAAI,CAAC,CAAC,CAAC;AACrD;AAMO,SAAS,eACd,KACA,YACA,KACQ;AACR,QAAM,SAAkB,IAAI,IAAI,OAAK,IAAI,CAAC,CAAC;AAC3C,aAAWC,QAAO,YAAY;AAC5B,WAAO,KAAK,IAAIA,IAAG,CAAC;AAAA,EACtB;AACA,SAAO,wBAAwB,MAAM;AACvC;;;ACrWO,IAAM,OAAN,MAA+B;AAAA,EAC3B;AAAA,EACA;AAAA,EACA;AAAA,EAET,UAAkB;AAAA,EAElB,YAAY,OAAc,OAAc;AACtC,SAAK,SAAS;AACd,SAAK,SAAS;AACd,SAAK,cAAc,MAAM,UAAU,EAAE;AACrC,UAAM,UAAU,IAAI;AAAA,EACtB;AAAA,EAEA,YAA0B;AACxB,WAAO,KAAK,OAAO,UAAU;AAAA,EAC/B;AAAA,EAEA,MAAM,KAAiC;AACrC,WAAO,KAAK,gBAAgB,SAAS,GAAG;AAAA,EAC1C;AAAA,EAEA,QAAQ,KAAiC;AACvC,WAAO,KAAK,gBAAgB,SAAS,GAAG;AAAA,EAC1C;AAAA,EAEA,CAAC,gBAAgB,QAA6B,KAAmB;AAC/D,UAAM,QAAQ,KAAK,UAAU,GAAG;AAChC,QAAI,UAAU,SAAS;AACrB;AAAA,IACF;AACA,UAAM,QAAQ,KAAK,OAAO,MAAM,EAAE,EAAC,GAAG,KAAK,MAAK,CAAC;AACjD,QAAI,CAAC,IAAI,SAAS;AAChB,aAAO;AACP;AAAA,IACF;AACA,eAAW,QAAQ,OAAO;AACxB,UAAI,CAAC,KAAK,iBAAiB,KAAK,GAAG,GAAG;AACpC;AAAA,MACF;AACA,YAAM;AAAA,IACR;AAAA,EACF;AAAA,EAEA,UAAU,QAAsB;AAC9B,SAAK,UAAU;AAAA,EACjB;AAAA,EAEA,UAAgB;AACd,SAAK,OAAO,QAAQ;AAAA,EACtB;AAAA,EAEA,iBAAiB,KAAmB;AAClC,UAAMC,OAAM,KAAK,YAAY,KAAK,OAAO,KAAK,GAAG;AACjD,WAAOA,OAAM,KAAMA,SAAQ,KAAK,CAAC,KAAK,OAAO;AAAA,EAC/C;AAAA,EAEA,KAAK,QAAsB;AACzB,UAAM,kBAAkB,CAAC,QAAa,KAAK,iBAAiB,GAAG;AAC/D,QAAI,OAAO,SAAS,QAAQ;AAC1B,kCAA4B,QAAQ,iBAAiB,KAAK,SAAS,IAAI;AACvE;AAAA,IACF;AAEA;AAEA,QAAI,gBAAgB,OAAO,KAAK,GAAG,GAAG;AACpC,WAAK,QAAQ,KAAK,QAAQ,IAAI;AAAA,IAChC;AAAA,EACF;AAAA,EAEA,UAAU,KAAgD;AACxD,UAAM,aAAa;AAAA,MACjB,KAAK,KAAK,OAAO;AAAA,MACjB,OAAO,KAAK,OAAO,YAAY,UAAU;AAAA,IAC3C;AAEA,QAAI,CAAC,IAAI,OAAO;AACd,UAAI,IAAI,SAAS;AACf,eAAO;AAAA,MACT;AACA,aAAO;AAAA,IACT;AAEA,UAAMA,OAAM,KAAK,YAAY,KAAK,OAAO,KAAK,IAAI,MAAM,GAAG;AAE3D,QAAI,CAAC,IAAI,SAAS;AAIhB,UAAIA,OAAM,GAAG;AACX,eAAO;AAAA,MACT;AAIA,UAAIA,SAAQ,GAAG;AACb,YAAI,KAAK,OAAO,aAAa,IAAI,MAAM,UAAU,SAAS;AACxD,iBAAO;AAAA,YACL,KAAK,KAAK,OAAO;AAAA,YACjB,OAAO;AAAA,UACT;AAAA,QACF;AACA,eAAO;AAAA,MACT;AAEA,aAAO,IAAI;AAAA,IACb;AAEA,QAAI;AAIJ,QAAIA,OAAM,GAAG;AACX,aAAO;AAAA,IACT;AAEA,QAAIA,SAAQ,GAAG;AAGb,UAAI,CAAC,KAAK,OAAO,aAAa,IAAI,MAAM,UAAU,MAAM;AACtD,eAAO;AAAA,MACT;AAGA,aAAO;AAAA,IACT;AAGA,WAAO,IAAI;AAAA,EACb;AACF;;;ACvIA,IAAM,gBAAgB;AA4Bf,IAAM,OAAN,MAA+B;AAAA,EAC3B;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EAET;AAAA,EAEA,UAAkB;AAAA,EAElB,YACE,OACA,SACA,OACA,cACA;AACA,WAAO,SAAS,CAAC;AACjB;AAAA,MACE,MAAM,UAAU,EAAE;AAAA,MAClB,MAAM,UAAU,EAAE;AAAA,IACpB;AACA,UAAM,UAAU,IAAI;AACpB,SAAK,SAAS;AACd,SAAK,WAAW;AAChB,SAAK,SAAS;AACd,SAAK,gBAAgB;AACrB,SAAK,0BACH,gBAAgB,2BAA2B,YAAY;AAAA,EAC3D;AAAA,EAEA,UAAU,QAAsB;AAC9B,SAAK,UAAU;AAAA,EACjB;AAAA,EAEA,YAA0B;AACxB,WAAO,KAAK,OAAO,UAAU;AAAA,EAC/B;AAAA,EAEA,CAAC,MAAM,KAAiC;AACtC,QACE,CAAC,KAAK,iBACL,IAAI,cACH,8BAA8B,IAAI,YAAY,KAAK,aAAa,GAClE;AACA,YAAM,eAAe,gBAAgB,KAAK,eAAe,IAAI,UAAU;AACvE,YAAM,YAAY,KAAK,SAAS,IAAI,YAAY;AAChD,UAAI,CAAC,WAAW;AACd,eAAO,KAAK,cAAc,GAAG;AAC7B;AAAA,MACF;AACA,UAAI,UAAU,UAAU,QAAW;AACjC;AAAA,MACF;AACA,iBAAW,aAAa,KAAK,OAAO,MAAM,GAAG,GAAG;AAC9C,YAAI,KAAK,UAAU,EAAE,YAAY,UAAU,OAAO,UAAU,GAAG,IAAI,GAAG;AACpE;AAAA,QACF;AACA,YACE,KAAK,uBACL,KAAK,UAAU,EAAE;AAAA,UACf,KAAK;AAAA,UACL,UAAU;AAAA,QACZ,MAAM,GACN;AACA;AAAA,QACF;AACA,cAAM;AAAA,MACR;AACA;AAAA,IACF;AAOA,UAAM,WAAW,KAAK,SAAS,IAAI,aAAa;AAChD,QAAI,aAAa,QAAW;AAC1B;AAAA,IACF;AACA,eAAW,aAAa,KAAK,OAAO,MAAM,GAAG,GAAG;AAC9C,UAAI,KAAK,UAAU,EAAE,YAAY,UAAU,KAAK,QAAQ,IAAI,GAAG;AAC7D;AAAA,MACF;AACA,YAAM,eAAe,gBAAgB,KAAK,eAAe,UAAU,GAAG;AACtE,YAAM,YAAY,KAAK,SAAS,IAAI,YAAY;AAChD,UACE,WAAW,UAAU,UACrB,KAAK,UAAU,EAAE,YAAY,UAAU,OAAO,UAAU,GAAG,KAAK,GAChE;AACA,cAAM;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAAA,EAEA,CAAC,cAAc,KAAiC;AAC9C,WAAO,IAAI,UAAU,MAAS;AAC9B,WAAO,CAAC,IAAI,OAAO;AACnB,WAAO,8BAA8B,IAAI,YAAY,KAAK,aAAa,CAAC;AAExE,QAAI,KAAK,WAAW,GAAG;AACrB;AAAA,IACF;AAEA,UAAM,eAAe,gBAAgB,KAAK,eAAe,IAAI,UAAU;AACvE,WAAO,KAAK,SAAS,IAAI,YAAY,MAAM,MAAS;AAEpD,QAAI,OAAO;AACX,QAAI;AACJ,QAAI,wBAAwB;AAC5B,QAAI,kBAAkB;AACtB,QAAI;AACF,iBAAW,aAAa,KAAK,OAAO,MAAM,GAAG,GAAG;AAC9C,cAAM;AACN,gBAAQ,UAAU;AAClB;AACA,YAAI,SAAS,KAAK,QAAQ;AACxB;AAAA,QACF;AAAA,MACF;AACA,8BAAwB;AAAA,IAC1B,SAAS,GAAG;AACV,wBAAkB;AAClB,YAAM;AAAA,IACR,UAAE;AACA,UAAI,CAAC,iBAAiB;AACpB,aAAK;AAAA,UACH;AAAA,UACA;AAAA,UACA;AAAA,UACA,KAAK,SAAS,IAAI,aAAa;AAAA,QACjC;AAKA;AAAA,UACE,CAAC;AAAA,UACD;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA,EAEA,CAAC,QAAQ,KAAiC;AACxC,WAAO,IAAI,UAAU,MAAS;AAC9B,WAAO,8BAA8B,IAAI,YAAY,KAAK,aAAa,CAAC;AACxE,UAAM,eAAe,gBAAgB,KAAK,eAAe,IAAI,UAAU;AACvE,SAAK,SAAS,IAAI,YAAY;AAC9B,QAAI,OAAO;AACX,eAAW,aAAa,KAAK,OAAO,QAAQ,GAAG,GAAG;AAChD,UAAI,SAAS,KAAK,QAAQ;AACxB;AAAA,MACF;AACA;AACA,YAAM;AAAA,IACR;AAAA,EACF;AAAA,EAEA,uBAAuB,KAAU;AAC/B,UAAM,eAAe,gBAAgB,KAAK,eAAe,GAAG;AAC5D,UAAM,YAAY,KAAK,SAAS,IAAI,YAAY;AAChD,QAAI;AACJ,QAAI;AACJ,QAAI,WAAW;AACb,iBAAW,KAAK,SAAS,IAAI,aAAa;AAC1C,mBACE,KAAK,iBACL,OAAO;AAAA,QACL,KAAK,cAAc,IAAI,SAAO,CAAC,KAAK,IAAI,GAAG,CAAC,CAAU;AAAA,MACxD;AAAA,IACJ;AAEA,WAAO,EAAC,WAAW,cAAc,UAAU,WAAU;AAAA,EAavD;AAAA,EAEA,KAAK,QAAsB;AACzB,QAAI,OAAO,SAAS,QAAQ;AAC1B,WAAK,gBAAgB,MAAM;AAC3B;AAAA,IACF;AAEA,UAAM,EAAC,WAAW,cAAc,UAAU,WAAU,IAClD,KAAK,uBAAuB,OAAO,KAAK,GAAG;AAC7C,QAAI,CAAC,WAAW;AACd;AAAA,IACF;AAEA,UAAM,EAAC,YAAW,IAAI,KAAK,UAAU;AAErC,QAAI,OAAO,SAAS,OAAO;AACzB,UAAI,UAAU,OAAO,KAAK,QAAQ;AAChC,aAAK;AAAA,UACH;AAAA,UACA,UAAU,OAAO;AAAA,UACjB,UAAU,UAAU,UAClB,YAAY,UAAU,OAAO,OAAO,KAAK,GAAG,IAAI,IAC9C,OAAO,KAAK,MACZ,UAAU;AAAA,UACd;AAAA,QACF;AACA,aAAK,QAAQ,KAAK,QAAQ,IAAI;AAC9B;AAAA,MACF;AAEA,UACE,UAAU,UAAU,UACpB,YAAY,OAAO,KAAK,KAAK,UAAU,KAAK,KAAK,GACjD;AACA;AAAA,MACF;AAEA,UAAI;AACJ,UAAI;AACJ,UAAI,KAAK,WAAW,GAAG;AACrB,oBAAY;AAAA,UACV;AAAA,YACE,KAAK,OAAO,MAAM;AAAA,cAChB,OAAO;AAAA,gBACL,KAAK,UAAU;AAAA,gBACf,OAAO;AAAA,cACT;AAAA,cACA;AAAA,YACF,CAAC;AAAA,UACH;AAAA,QACF;AAAA,MACF,OAAO;AACL,SAAC,WAAW,eAAe,IAAI;AAAA,UAC7B,KAAK,OAAO,MAAM;AAAA,YAChB,OAAO;AAAA,cACL,KAAK,UAAU;AAAA,cACf,OAAO;AAAA,YACT;AAAA,YACA;AAAA,YACA,SAAS;AAAA,UACX,CAAC;AAAA,UACD;AAAA,QACF;AAAA,MACF;AACA,YAAM,eAA6B;AAAA,QACjC,MAAM;AAAA,QACN,MAAM;AAAA,MACR;AAGA,WAAK;AAAA,QACH;AAAA,QACA,UAAU;AAAA,QACV,oBAAoB,UAClB,YAAY,OAAO,KAAK,KAAK,gBAAgB,GAAG,IAAI,IAClD,OAAO,KAAK,MACZ,gBAAgB;AAAA,QACpB;AAAA,MACF;AACA,WAAK,wBAAwB,OAAO,KAAK,KAAK,MAAM;AAClD,aAAK,QAAQ,KAAK,cAAc,IAAI;AAAA,MACtC,CAAC;AACD,WAAK,QAAQ,KAAK,QAAQ,IAAI;AAAA,IAChC,WAAW,OAAO,SAAS,UAAU;AACnC,UAAI,UAAU,UAAU,QAAW;AAEjC;AAAA,MACF;AACA,YAAM,cAAc,YAAY,OAAO,KAAK,KAAK,UAAU,KAAK;AAChE,UAAI,cAAc,GAAG;AAEnB;AAAA,MACF;AACA,YAAM,CAAC,eAAe,IAAI;AAAA,QACxB,KAAK,OAAO,MAAM;AAAA,UAChB,OAAO;AAAA,YACL,KAAK,UAAU;AAAA,YACf,OAAO;AAAA,UACT;AAAA,UACA;AAAA,UACA,SAAS;AAAA,QACX,CAAC;AAAA,QACD;AAAA,MACF;AAEA,UAAI;AACJ,UAAI,iBAAiB;AACnB,cAAM,OAAO,YAAY,gBAAgB,KAAK,UAAU,KAAK,IAAI;AACjE,mBAAW;AAAA,UACT,MAAM;AAAA,UACN;AAAA,QACF;AAAA,MACF;AACA,UAAI,CAAC,UAAU,MAAM;AACnB,mBAAW,QAAQ,KAAK,OAAO,MAAM;AAAA,UACnC,OAAO;AAAA,YACL,KAAK,UAAU;AAAA,YACf,OAAO;AAAA,UACT;AAAA,UACA;AAAA,QACF,CAAC,GAAG;AACF,gBAAM,OAAO,YAAY,KAAK,KAAK,UAAU,KAAK,IAAI;AACtD,qBAAW;AAAA,YACT;AAAA,YACA;AAAA,UACF;AACA,cAAI,MAAM;AACR;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAEA,UAAI,UAAU,MAAM;AAClB,aAAK,QAAQ,KAAK,QAAQ,IAAI;AAC9B,aAAK;AAAA,UACH;AAAA,UACA,UAAU;AAAA,UACV,SAAS,KAAK;AAAA,UACd;AAAA,QACF;AACA,aAAK,QAAQ;AAAA,UACX;AAAA,YACE,MAAM;AAAA,YACN,MAAM,SAAS;AAAA,UACjB;AAAA,UACA;AAAA,QACF;AACA;AAAA,MACF;AACA,WAAK;AAAA,QACH;AAAA,QACA,UAAU,OAAO;AAAA,QACjB,UAAU,KAAK;AAAA,QACf;AAAA,MACF;AACA,WAAK,QAAQ,KAAK,QAAQ,IAAI;AAAA,IAChC,WAAW,OAAO,SAAS,SAAS;AAGlC,UACE,UAAU,SACV,YAAY,OAAO,KAAK,KAAK,UAAU,KAAK,KAAK,GACjD;AACA,aAAK,QAAQ,KAAK,QAAQ,IAAI;AAAA,MAChC;AAAA,IACF;AAAA,EACF;AAAA,EAEA,gBAAgB,QAA0B;AACxC;AAAA,MACE,CAAC,KAAK,2BACJ,KAAK,wBAAwB,OAAO,QAAQ,KAAK,OAAO,KAAK,GAAG,MAAM;AAAA,MACxE;AAAA,IACF;AAEA,UAAM,EAAC,WAAW,cAAc,UAAU,WAAU,IAClD,KAAK,uBAAuB,OAAO,QAAQ,GAAG;AAChD,QAAI,CAAC,WAAW;AACd;AAAA,IACF;AAEA,WAAO,UAAU,OAAO,qBAAqB;AAC7C,UAAM,EAAC,YAAW,IAAI,KAAK,UAAU;AACrC,UAAM,SAAS,YAAY,OAAO,QAAQ,KAAK,UAAU,KAAK;AAC9D,UAAM,SAAS,YAAY,OAAO,KAAK,KAAK,UAAU,KAAK;AAE3D,UAAM,+BAA+B,MAAM;AACzC,WAAK;AAAA,QACH;AAAA,QACA,UAAU;AAAA,QACV,OAAO,KAAK;AAAA,QACZ;AAAA,MACF;AACA,WAAK,QAAQ,KAAK,QAAQ,IAAI;AAAA,IAChC;AAGA,QAAI,WAAW,GAAG;AAEhB,UAAI,WAAW,GAAG;AAEhB,aAAK,QAAQ,KAAK,QAAQ,IAAI;AAC9B;AAAA,MACF;AAEA,UAAI,SAAS,GAAG;AACd,YAAI,KAAK,WAAW,GAAG;AACrB,uCAA6B;AAC7B;AAAA,QACF;AAMA,cAAM,kBAAkB;AAAA,UACtB;AAAA,YACE,KAAK,OAAO,MAAM;AAAA,cAChB,OAAO;AAAA,gBACL,KAAK,UAAU;AAAA,gBACf,OAAO;AAAA,cACT;AAAA,cACA;AAAA,cACA,SAAS;AAAA,YACX,CAAC;AAAA,UACH;AAAA,QACF;AAEA,aAAK;AAAA,UACH;AAAA,UACA,UAAU;AAAA,UACV,gBAAgB;AAAA,UAChB;AAAA,QACF;AACA,aAAK,QAAQ,KAAK,QAAQ,IAAI;AAC9B;AAAA,MACF;AAEA,aAAO,SAAS,CAAC;AAEjB,YAAM,eAAe;AAAA,QACnB;AAAA,UACE,KAAK,OAAO,MAAM;AAAA,YAChB,OAAO;AAAA,cACL,KAAK,UAAU;AAAA,cACf,OAAO;AAAA,YACT;AAAA,YACA;AAAA,UACF,CAAC;AAAA,QACH;AAAA,MACF;AAIA,UAAI,YAAY,aAAa,KAAK,OAAO,KAAK,GAAG,MAAM,GAAG;AACxD,qCAA6B;AAC7B;AAAA,MACF;AAIA,WAAK;AAAA,QACH;AAAA,QACA,UAAU;AAAA,QACV,aAAa;AAAA,QACb;AAAA,MACF;AACA,WAAK,wBAAwB,aAAa,KAAK,MAAM;AACnD,aAAK,QAAQ;AAAA,UACX;AAAA,YACE,MAAM;AAAA,YACN,MAAM,OAAO;AAAA,UACf;AAAA,UACA;AAAA,QACF;AAAA,MACF,CAAC;AACD,WAAK,QAAQ;AAAA,QACX;AAAA,UACE,MAAM;AAAA,UACN,MAAM;AAAA,QACR;AAAA,QACA;AAAA,MACF;AACA;AAAA,IACF;AAEA,QAAI,SAAS,GAAG;AACd,aAAO,WAAW,GAAG,8CAA8C;AAGnE,UAAI,SAAS,GAAG;AACd;AAAA,MACF;AAGA,aAAO,SAAS,CAAC;AAEjB,YAAM,CAAC,cAAc,YAAY,IAAI;AAAA,QACnC,KAAK,OAAO,MAAM;AAAA,UAChB,OAAO;AAAA,YACL,KAAK,UAAU;AAAA,YACf,OAAO;AAAA,UACT;AAAA,UACA;AAAA,UACA,SAAS;AAAA,QACX,CAAC;AAAA,QACD;AAAA,MACF;AAGA,WAAK;AAAA,QACH;AAAA,QACA,UAAU;AAAA,QACV,aAAa;AAAA,QACb;AAAA,MACF;AACA,WAAK,wBAAwB,OAAO,KAAK,KAAK,MAAM;AAClD,aAAK,QAAQ;AAAA,UACX;AAAA,YACE,MAAM;AAAA,YACN,MAAM;AAAA,UACR;AAAA,UACA;AAAA,QACF;AAAA,MACF,CAAC;AACD,WAAK,QAAQ;AAAA,QACX;AAAA,UACE,MAAM;AAAA,UACN,MAAM,OAAO;AAAA,QACf;AAAA,QACA;AAAA,MACF;AAEA;AAAA,IACF;AAEA,QAAI,SAAS,GAAG;AACd,aAAO,WAAW,GAAG,8CAA8C;AAGnE,UAAI,SAAS,GAAG;AACd,aAAK,QAAQ,KAAK,QAAQ,IAAI;AAC9B;AAAA,MACF;AAIA,aAAO,SAAS,CAAC;AAIjB,YAAM,iBAAiB;AAAA,QACrB;AAAA,UACE,KAAK,OAAO,MAAM;AAAA,YAChB,OAAO;AAAA,cACL,KAAK,UAAU;AAAA,cACf,OAAO;AAAA,YACT;AAAA,YACA;AAAA,UACF,CAAC;AAAA,QACH;AAAA,MACF;AAGA,UAAI,YAAY,eAAe,KAAK,OAAO,KAAK,GAAG,MAAM,GAAG;AAC1D,qCAA6B;AAC7B;AAAA,MACF;AAEA,WAAK,QAAQ;AAAA,QACX;AAAA,UACE,MAAM;AAAA,UACN,MAAM,OAAO;AAAA,QACf;AAAA,QACA;AAAA,MACF;AACA,WAAK;AAAA,QACH;AAAA,QACA,UAAU;AAAA,QACV,eAAe;AAAA,QACf;AAAA,MACF;AACA,WAAK,QAAQ;AAAA,QACX;AAAA,UACE,MAAM;AAAA,UACN,MAAM;AAAA,QACR;AAAA,QACA;AAAA,MACF;AACA;AAAA,IACF;AAEA,gBAAY;AAAA,EACd;AAAA,EAEA,wBAAwB,KAAU,IAAgB;AAChD,SAAK,sBAAsB;AAC3B,QAAI;AACF,SAAG;AAAA,IACL,UAAE;AACA,WAAK,sBAAsB;AAAA,IAC7B;AAAA,EACF;AAAA,EAEA,cACE,cACA,MACA,OACA,UACA;AACA,SAAK,SAAS,IAAI,cAAc;AAAA,MAC9B;AAAA,MACA;AAAA,IACF,CAAC;AACD,QACE,UAAU,WACT,aAAa,UACZ,KAAK,UAAU,EAAE,YAAY,OAAO,QAAQ,IAAI,IAClD;AACA,WAAK,SAAS,IAAI,eAAe,KAAK;AAAA,IACxC;AAAA,EACF;AAAA,EAEA,UAAgB;AACd,SAAK,OAAO,QAAQ;AAAA,EACtB;AACF;AAEA,SAAS,gBACP,cACA,iBACQ;AAGR,QAAM,kBAA2B,CAAC;AAElC,MAAI,gBAAgB,iBAAiB;AACnC,eAAW,OAAO,cAAc;AAC9B,sBAAgB,KAAK,gBAAgB,GAAG,CAAC;AAAA,IAC3C;AAAA,EACF;AAEA,SAAO,KAAK,UAAU,CAAC,QAAQ,GAAG,eAAe,CAAC;AACpD;AAEA,SAAS,8BACP,YACA,cACS;AACT,MAAI,eAAe,UAAa,iBAAiB,QAAW;AAC1D,WAAO,eAAe;AAAA,EACxB;AACA,MAAI,aAAa,WAAW,OAAO,KAAK,UAAU,EAAE,QAAQ;AAC1D,WAAO;AAAA,EACT;AACA,aAAW,OAAO,cAAc;AAC9B,QAAI,CAAC,OAAO,YAAY,GAAG,GAAG;AAC5B,aAAO;AAAA,IACT;AAAA,EACF;AACA,SAAO;AACT;AAEA,SAAS,2BAA2B,cAAwC;AAC1E,SAAO,CAAC,GAAG,MAAM;AACf,eAAW,OAAO,cAAc;AAC9B,YAAMC,OAAM,cAAc,EAAE,GAAG,GAAG,EAAE,GAAG,CAAC;AACxC,UAAIA,SAAQ,GAAG;AACb,eAAOA;AAAA,MACT;AAAA,IACF;AACA,WAAO;AAAA,EACT;AACF;;;AC9qBO,IAAM,aAAN,MAAqC;AAAA,EACjC;AAAA,EACA;AAAA,EACT,qBAA8B;AAAA,EAC9B,UAAkB;AAAA,EAClB,qBAA+B,CAAC;AAAA,EAEhC,YAAY,QAAqB,QAAiB;AAChD,SAAK,UAAU;AACf,UAAM,eAAe,OAAO,UAAU;AACtC,WAAO,SAAS,IAAI;AAEpB,UAAM,SAAiC;AAAA,MACrC,WAAW,aAAa;AAAA,MACxB,SAAS,aAAa;AAAA,MACtB,YAAY,aAAa;AAAA,MACzB,eAAe;AAAA,QACb,GAAG,aAAa;AAAA,MAClB;AAAA,MACA,UAAU,aAAa;AAAA,MACvB,QAAQ,aAAa;AAAA,MACrB,aAAa,aAAa;AAAA,MAC1B,MAAM,aAAa;AAAA,IACrB;AAGA,UAAM,4BAAyC,oBAAI,IAAI;AACvD,eAAW,SAAS,QAAQ;AAC1B,YAAM,cAAc,MAAM,UAAU;AACpC;AAAA,QACE,OAAO,cAAc,YAAY;AAAA,QACjC,wCAAwC,OAAO,SAAS,QAAQ,YAAY,SAAS;AAAA,MACvF;AACA;AAAA,QACE,OAAO,eAAe,YAAY;AAAA,QAClC;AAAA,MACF;AACA;AAAA,QACE,OAAO,WAAW,YAAY;AAAA,QAC9B,oCAAoC,OAAO,MAAM,QAAQ,YAAY,MAAM;AAAA,MAC7E;AACA;AAAA,QACE,OAAO,gBAAgB,YAAY;AAAA,QACnC;AAAA,MACF;AACA,aAAO,OAAO,SAAS,YAAY,MAAM,+BAA+B;AAExE,iBAAW,CAAC,SAAS,SAAS,KAAK,OAAO;AAAA,QACxC,YAAY;AAAA,MACd,GAAG;AACD,YAAI,WAAW,aAAa,eAAe;AACzC;AAAA,QACF;AAIA;AAAA,UACE,CAAC,0BAA0B,IAAI,OAAO;AAAA,UACtC,gBAAgB,OAAO;AAAA,QACzB;AACA,eAAO,cAAc,OAAO,IAAI;AAChC,kCAA0B,IAAI,OAAO;AAAA,MACvC;AAEA,YAAM,UAAU,IAAI;AAAA,IACtB;AAEA,SAAK,UAAU;AACf,SAAK,UAAU;AAAA,EACjB;AAAA,EAEA,QAAQ,MAAkC;AAExC,WAAO,CAAC;AAAA,EACV;AAAA,EAEA,UAAgB;AACd,eAAW,SAAS,KAAK,SAAS;AAChC,YAAM,QAAQ;AAAA,IAChB;AAAA,EACF;AAAA,EAEA,MAAM,KAAiC;AACrC,UAAM,YAAY,KAAK,QAAQ,IAAI,WAAS,MAAM,MAAM,GAAG,CAAC;AAC5D,WAAO;AAAA,MACL;AAAA,MACA,CAAC,GAAG,MAAM,KAAK,QAAQ,YAAY,EAAE,KAAK,EAAE,GAAG;AAAA,MAC/C;AAAA,IACF;AAAA,EACF;AAAA,EAEA,YAA0B;AACxB,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,KAAK,QAAgB,QAAyB;AAC5C,QAAI,CAAC,KAAK,oBAAoB;AAC5B,WAAK,oBAAoB,QAAQ,MAAM;AAAA,IACzC,OAAO;AACL,WAAK,mBAAmB,KAAK,MAAM;AAAA,IACrC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAuBA,oBAAoB,QAAgB,QAAyB;AAC3D,QAAI,OAAO,SAAS,SAAS;AAC3B,WAAK,QAAQ,KAAK,QAAQ,IAAI;AAC9B;AAAA,IACF;AAEA,WAAO,OAAO,SAAS,SAAS,OAAO,SAAS,QAAQ;AAExD,QAAI,WAAW;AACf,eAAW,SAAS,KAAK,SAAS;AAChC,UAAI,UAAU,QAAQ;AACpB,mBAAW;AACX;AAAA,MACF;AAEA,YAAM,aAAmC,CAAC;AAC1C,iBAAW,OAAO,KAAK,QAAQ,YAAY;AACzC,mBAAW,GAAG,IAAI,OAAO,KAAK,IAAI,GAAG;AAAA,MACvC;AACA,YAAM,cAAc,MAAM,MAAM;AAAA,QAC9B;AAAA,MACF,CAAC;AAED,UAAI,MAAM,WAAW,MAAM,QAAW;AAEpC;AAAA,MACF;AAAA,IACF;AAEA,WAAO,UAAU,mDAAmD;AAGpE,SAAK,QAAQ,KAAK,QAAQ,IAAI;AAAA,EAChC;AAAA,EAEA,uBAAuB;AACrB,WAAO,KAAK,uBAAuB,KAAK;AACxC,SAAK,qBAAqB;AAAA,EAC5B;AAAA,EAEA,kBAAkB,kBAAkC;AAClD,WAAO,KAAK,kBAAkB;AAC9B,SAAK,qBAAqB;AAC1B,QAAI,KAAK,QAAQ,WAAW,GAAG;AAC7B;AAAA,IACF;AAEA,QAAI,KAAK,mBAAmB,WAAW,GAAG;AAGxC;AAAA,IACF;AAEA;AAAA,MACE,KAAK;AAAA,MACL,KAAK;AAAA,MACL;AAAA,MACA;AAAA,MACA;AAAA,MACA,0BAA0B,KAAK,OAAO;AAAA,IACxC;AAAA,EACF;AAAA,EAEA,UAAU,QAAsB;AAC9B,SAAK,UAAU;AAAA,EACjB;AACF;;;AC5MO,IAAM,cAAN,MAAsC;AAAA,EAC3C,gBAAwB;AAAA,EACxB;AAAA,EACS;AAAA,EACA,WAAqB,CAAC;AAAA,EAE/B,YAAY,OAAc;AACxB,SAAK,SAAS;AACd,UAAM,UAAU,IAAI;AAAA,EACtB;AAAA,EAEA,SAAS,OAAmB;AAC1B,WAAO,CAAC,KAAK,aAAa,mCAAmC;AAC7D,SAAK,cAAc;AAAA,EACrB;AAAA,EAEA,KAAK,QAAsB;AACzB,SAAK,KAAK,WAAW,EAAE,qBAAqB;AAC5C,eAAW,UAAU,KAAK,UAAU;AAClC,aAAO,KAAK,QAAQ,IAAI;AAAA,IAC1B;AACA,SAAK,KAAK,WAAW,EAAE,kBAAkB,OAAO,IAAI;AAAA,EACtD;AAAA,EAEA,UAAU,QAAsB;AAC9B,SAAK,SAAS,KAAK,MAAM;AAAA,EAC3B;AAAA,EAEA,YAA0B;AACxB,WAAO,KAAK,OAAO,UAAU;AAAA,EAC/B;AAAA,EAEA,MAAM,KAAiC;AACrC,WAAO,KAAK,OAAO,MAAM,GAAG;AAAA,EAC9B;AAAA,EAEA,QAAQ,MAAkC;AAExC,WAAO,CAAC;AAAA,EACV;AAAA,EAEA,UAAgB;AACd,QAAI,KAAK,gBAAgB,KAAK,SAAS,QAAQ;AAC7C,QAAE,KAAK;AACP,UAAI,KAAK,kBAAkB,KAAK,SAAS,QAAQ;AAC/C,aAAK,OAAO,QAAQ;AAAA,MACtB;AAAA,IACF,OAAO;AACL,YAAM,IAAI,MAAM,+CAA+C;AAAA,IACjE;AAAA,EACF;AACF;;;ACbO,IAAM,oBAAN,MAGL;AAAA,EACS;AAAA,EAMT,YACE,QAKA;AACA,SAAK,UAAU;AACf,SAAK,SAAS,KAAK,OAAO,KAAK,IAAI;AAAA,EACrC;AAAA,EAEA,IAAI,KAAK;AACP,WAAO;AAAA,EACT;AAAA,EAoBA,IACE,OACA,WACA,OACW;AACX,WAAO,IAAI,OAAO,WAAW,KAAK;AAAA,EACpC;AAAA,EAEA,OACE,MACA,IACA,OACW;AACX,WAAO;AAAA,MACL,MAAM;AAAA,MACN,MAAM,qBAAqB,IAAI,IAC3B,KAAK,aAAa,EAAE,IACpB,EAAC,MAAM,WAAW,OAAO,KAAI;AAAA,MACjC,OAAO,qBAAqB,KAAK,IAC7B,MAAM,aAAa,EAAE,IACrB,EAAC,MAAM,WAAW,OAAO,MAAK;AAAA,MAClC;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM;AAAA,EACN,KAAK;AAAA,EACL,MAAM;AAAA,EAEN,SAAS,CACP,cACA,IAKA,YACc,KAAK,QAAQ,cAAc,IAAI,OAAO;AACxD;AAEO,SAAS,OAAO,YAAkD;AACvE,QAAM,cAAc,WAAW,gBAAgB,UAAU,CAAC;AAE1D,MAAI,YAAY,WAAW,GAAG;AAC5B,WAAO,YAAY,CAAC;AAAA,EACtB;AAEA,MAAI,YAAY,KAAK,aAAa,GAAG;AACnC,WAAO;AAAA,EACT;AAEA,SAAO,EAAC,MAAM,OAAO,YAAY,YAAW;AAC9C;AAEO,SAAS,MAAM,YAAkD;AACtE,QAAM,cAAc,YAAY,gBAAgB,UAAU,CAAC;AAE3D,MAAI,YAAY,WAAW,GAAG;AAC5B,WAAO,YAAY,CAAC;AAAA,EACtB;AAEA,MAAI,YAAY,KAAK,YAAY,GAAG;AAClC,WAAO;AAAA,EACT;AAEA,SAAO,EAAC,MAAM,MAAM,YAAY,YAAW;AAC7C;AAEO,SAAS,IAAI,YAAkC;AACpD,UAAQ,WAAW,MAAM;AAAA,IACvB,KAAK;AACH,aAAO;AAAA,QACL,MAAM;AAAA,QACN,YAAY,WAAW,WAAW,IAAI,GAAG;AAAA,MAC3C;AAAA,IACF,KAAK;AACH,aAAO;AAAA,QACL,MAAM;AAAA,QACN,YAAY,WAAW,WAAW,IAAI,GAAG;AAAA,MAC3C;AAAA,IACF,KAAK;AACH,aAAO;AAAA,QACL,MAAM;AAAA,QACN,SAAS,WAAW;AAAA,QACpB,IAAI,eAAe,WAAW,EAAE;AAAA,MAClC;AAAA,IACF,KAAK;AACH,aAAO;AAAA,QACL,MAAM;AAAA,QACN,IAAI,eAAe,WAAW,EAAE;AAAA,QAChC,MAAM,WAAW;AAAA,QACjB,OAAO,WAAW;AAAA,MACpB;AAAA,EACJ;AACF;AAEO,SAAS,IACd,OACA,WACA,OACW;AACX,MAAI;AACJ,MAAI,UAAU,QAAW;AACvB,YAAQ;AACR,SAAK;AAAA,EACP,OAAO;AACL,SAAK;AAAA,EACP;AAEA,SAAO;AAAA,IACL,MAAM;AAAA,IACN,MAAM,EAAC,MAAM,UAAU,MAAM,MAAK;AAAA,IAClC,OAAO,qBAAqB,KAAK,IAC7B,MAAM,aAAa,EAAE,IACrB,EAAC,MAAM,WAAW,MAAK;AAAA,IAC3B;AAAA,EACF;AACF;AAEA,SAAS,qBACP,OAC6B;AAC7B,SACE,UAAU,QAAQ,OAAO,UAAU,YAAa,MAAc,aAAa;AAE/E;AAEO,IAAM,OAAkB;AAAA,EAC7B,MAAM;AAAA,EACN,YAAY,CAAC;AACf;AAEA,IAAM,QAAmB;AAAA,EACvB,MAAM;AAAA,EACN,YAAY,CAAC;AACf;AAEA,SAAS,aAAa,WAA+B;AACnD,SAAO,UAAU,SAAS,SAAS,UAAU,WAAW,WAAW;AACrE;AAEA,SAAS,cAAc,WAA+B;AACpD,SAAO,UAAU,SAAS,QAAQ,UAAU,WAAW,WAAW;AACpE;AAEO,SAAS,kBAAkB,GAAyB;AACzD,MAAI,EAAE,SAAS,YAAY,EAAE,SAAS,sBAAsB;AAC1D,WAAO;AAAA,EACT;AACA,MAAI,EAAE,WAAW,WAAW,GAAG;AAC7B,WAAO,kBAAkB,EAAE,WAAW,CAAC,CAAC;AAAA,EAC1C;AACA,QAAM,aAAa,QAAQ,EAAE,MAAM,EAAE,WAAW,IAAI,iBAAiB,CAAC;AACtE,MAAI,EAAE,SAAS,SAAS,WAAW,KAAK,aAAa,GAAG;AACtD,WAAO;AAAA,EACT;AACA,MAAI,EAAE,SAAS,QAAQ,WAAW,KAAK,YAAY,GAAG;AACpD,WAAO;AAAA,EACT;AACA,SAAO;AAAA,IACL,MAAM,EAAE;AAAA,IACR;AAAA,EACF;AACF;AAEO,SAAS,QACd,MACA,YACa;AACb,QAAMC,aAAyB,CAAC;AAChC,aAAW,KAAK,YAAY;AAC1B,QAAI,EAAE,SAAS,MAAM;AACnB,MAAAA,WAAU,KAAK,GAAG,EAAE,UAAU;AAAA,IAChC,OAAO;AACL,MAAAA,WAAU,KAAK,CAAC;AAAA,IAClB;AAAA,EACF;AAEA,SAAOA;AACT;AAEA,IAAM,0BAA0B;AAAA,EAC9B,CAAC,GAAG,GAAG;AAAA,EACP,CAAC,IAAI,GAAG;AAAA,EACR,CAAC,GAAG,GAAG;AAAA,EACP,CAAC,GAAG,GAAG;AAAA,EACP,CAAC,IAAI,GAAG;AAAA,EACR,CAAC,IAAI,GAAG;AAAA,EACR,CAAC,IAAI,GAAG;AAAA,EACR,CAAC,QAAQ,GAAG;AAAA,EACZ,CAAC,MAAM,GAAG;AAAA,EACV,CAAC,UAAU,GAAG;AAAA,EACd,CAAC,OAAO,GAAG;AAAA,EACX,CAAC,WAAW,GAAG;AAAA,EACf,CAAC,IAAI,GAAG;AAAA,EACR,CAAC,QAAQ,GAAG;AACd;AAEA,IAAM,oBAAoB;AAAA,EACxB,GAAG;AAAA,EACH,CAAC,QAAQ,GAAG;AAAA,EACZ,CAAC,YAAY,GAAG;AAClB;AAEO,SAAS,eACd,IACgC;AAChC,SAAO,KAAK,kBAAkB,EAAE,CAAC;AACnC;AAEA,SAAS,gBAAmBC,QAA+B;AACzD,SAAOA,OAAM,OAAO,OAAK,MAAM,MAAS;AAC1C;AAEA,SAAS,WAAW,YAAsC;AACxD,SAAO,WAAW,OAAO,OAAK,CAAC,aAAa,CAAC,CAAC;AAChD;AAEA,SAAS,YAAY,YAAsC;AACzD,SAAO,WAAW,OAAO,OAAK,CAAC,cAAc,CAAC,CAAC;AACjD;;;ACnTO,SAAS,iBACd,SACA,OACuB;AACvB,QAAM,KAAK,UAAU,OAAO,OAAO,GAAG,KAAK;AAC3C,SAAO,CAAC,QAAsB;AAC5B,iBAAa,GAAG;AAChB,WAAO,GAAG,OAAO,GAAG,CAAC;AAAA,EACvB;AACF;AAEA,SAAS,UAAU,SAAiB,OAA2C;AAO7E,MAAI,CAAC,SAAS,KAAK,OAAO,GAAG;AAC3B,QAAI,UAAU,KAAK;AACjB,YAAM,WAAW,QAAQ,YAAY;AACrC,aAAO,CAAC,QAAgB,IAAI,YAAY,MAAM;AAAA,IAChD;AACA,WAAO,CAAC,QAAgB,QAAQ;AAAA,EAClC;AACA,QAAM,KAAK,gBAAgB,SAAS,KAAK;AACzC,SAAO,CAAC,QAAgB,GAAG,KAAK,GAAG;AACrC;AAEA,IAAM,iBAAiB;AAEvB,SAAS,gBAAgB,QAAgB,QAAkB,IAAY;AAMrE,MAAI,UAAU;AACd,WAAS,IAAI,GAAG,IAAI,OAAO,QAAQ,KAAK;AACtC,QAAI,IAAI,OAAO,CAAC;AAChB,YAAQ,GAAG;AAAA,MACT,KAAK;AACH,mBAAW;AACX;AAAA,MACF,KAAK;AACH,mBAAW;AACX;AAAA;AAAA,MAGF,KAAK;AACH,YAAI,MAAM,OAAO,SAAS,GAAG;AAC3B,gBAAM,IAAI,MAAM,iDAAiD;AAAA,QACnE;AACA;AACA,YAAI,OAAO,CAAC;AAAA;AAAA,MAGd;AACE,YAAI,eAAe,KAAK,CAAC,GAAG;AAC1B,qBAAW;AAAA,QACb;AACA,mBAAW;AAEX;AAAA,IACJ;AAAA,EACF;AACA,SAAO,IAAI,OAAO,UAAU,KAAK,QAAQ,GAAG;AAC9C;;;AC7CO,SAAS,gBACd,WACuB;AACvB,MAAI,UAAU,SAAS,UAAU;AAC/B,UAAM,aAAa,UAAU,WAAW,IAAI,OAAK,gBAAgB,CAAC,CAAC;AACnE,WAAO,UAAU,SAAS,QACtB,CAAC,QAAa;AAEZ,iBAAW,aAAa,YAAY;AAClC,YAAI,CAAC,UAAU,GAAG,GAAG;AACnB,iBAAO;AAAA,QACT;AAAA,MACF;AACA,aAAO;AAAA,IACT,IACA,CAAC,QAAa;AAEZ,iBAAW,aAAa,YAAY;AAClC,YAAI,UAAU,GAAG,GAAG;AAClB,iBAAO;AAAA,QACT;AAAA,MACF;AACA,aAAO;AAAA,IACT;AAAA,EACN;AACA,QAAM,EAAC,KAAI,IAAI;AACf,QAAM,EAAC,MAAK,IAAI;AAChB;AAAA,IACE,MAAM,SAAS;AAAA,IACf;AAAA,EACF;AACA;AAAA,IACE,KAAK,SAAS;AAAA,IACd;AAAA,EACF;AAEA,UAAQ,UAAU,IAAI;AAAA,IACpB,KAAK;AAAA,IACL,KAAK,UAAU;AACb,YAAMC,QAAO,kBAAkB,MAAM,OAAO,UAAU,EAAE;AACxD,UAAI,KAAK,SAAS,WAAW;AAC3B,cAAM,SAASA,MAAK,KAAK,KAAK;AAC9B,eAAO,MAAM;AAAA,MACf;AACA,aAAO,CAAC,QAAaA,MAAK,IAAI,KAAK,IAAI,CAAC;AAAA,IAC1C;AAAA,EACF;AAEA,MAAI,MAAM,UAAU,QAAQ,MAAM,UAAU,QAAW;AACrD,WAAO,CAAC,SAAc;AAAA,EACxB;AAEA,QAAM,OAAO,oBAAoB,MAAM,OAAO,UAAU,EAAE;AAC1D,MAAI,KAAK,SAAS,WAAW;AAC3B,QAAI,KAAK,UAAU,QAAQ,KAAK,UAAU,QAAW;AACnD,aAAO,CAAC,SAAc;AAAA,IACxB;AACA,UAAM,SAAS,KAAK,KAAK,KAAK;AAC9B,WAAO,MAAM;AAAA,EACf;AAEA,SAAO,CAAC,QAAa;AACnB,UAAM,MAAM,IAAI,KAAK,IAAI;AACzB,QAAI,QAAQ,QAAQ,QAAQ,QAAW;AACrC,aAAO;AAAA,IACT;AACA,WAAO,KAAK,GAAG;AAAA,EACjB;AACF;AAEA,SAAS,kBACP,KACA,UACiB;AACjB,UAAQ,UAAU;AAAA,IAChB,KAAK;AACH,aAAO,SAAO,QAAQ;AAAA,IACxB,KAAK;AACH,aAAO,SAAO,QAAQ;AAAA,EAC1B;AACF;AAEA,SAAS,oBACP,KACA,UACuB;AACvB,UAAQ,UAAU;AAAA,IAChB,KAAK;AACH,aAAO,SAAO,QAAQ;AAAA,IACxB,KAAK;AACH,aAAO,SAAO,QAAQ;AAAA,IACxB,KAAK;AACH,aAAO,SAAO,MAAM;AAAA,IACtB,KAAK;AACH,aAAO,SAAO,OAAO;AAAA,IACvB,KAAK;AACH,aAAO,SAAO,MAAM;AAAA,IACtB,KAAK;AACH,aAAO,SAAO,OAAO;AAAA,IACvB,KAAK;AACH,aAAO,iBAAiB,KAAK,EAAE;AAAA,IACjC,KAAK;AACH,aAAOC,KAAI,iBAAiB,KAAK,EAAE,CAAC;AAAA,IACtC,KAAK;AACH,aAAO,iBAAiB,KAAK,GAAG;AAAA,IAClC,KAAK;AACH,aAAOA,KAAI,iBAAiB,KAAK,GAAG,CAAC;AAAA,IACvC,KAAK,MAAM;AACT,aAAO,MAAM,QAAQ,GAAG,CAAC;AACzB,YAAM,MAAM,IAAI,IAAI,GAAG;AACvB,aAAO,SAAO,IAAI,IAAI,GAAG;AAAA,IAC3B;AAAA,IACA,KAAK,UAAU;AACb,aAAO,MAAM,QAAQ,GAAG,CAAC;AACzB,YAAM,MAAM,IAAI,IAAI,GAAG;AACvB,aAAO,SAAO,CAAC,IAAI,IAAI,GAAG;AAAA,IAC5B;AAAA,IACA;AACE;AACA,YAAM,IAAI,MAAM,wBAAwB,QAAQ,EAAE;AAAA,EACtD;AACF;AAEA,SAASA,KAAO,GAAwB;AACtC,SAAO,CAAC,QAAW,CAAC,EAAE,GAAG;AAC3B;AAWO,SAAS,iBAAiB,SAG/B;AACA,MAAI,CAAC,SAAS;AACZ,WAAO,EAAC,SAAS,QAAW,mBAAmB,MAAK;AAAA,EACtD;AACA,UAAQ,QAAQ,MAAM;AAAA,IACpB,KAAK;AACH,aAAO,EAAC,SAAS,mBAAmB,MAAK;AAAA,IAC3C,KAAK;AACH,aAAO,EAAC,SAAS,QAAW,mBAAmB,KAAI;AAAA,IACrD,KAAK;AAAA,IACL,KAAK,MAAM;AACT,YAAM,wBAA+C,CAAC;AACtD,UAAI,oBAAoB;AACxB,iBAAW,QAAQ,QAAQ,YAAY;AACrC,cAAM,cAAc,iBAAiB,IAAI;AAGzC,YAAI,YAAY,YAAY,UAAa,QAAQ,SAAS,MAAM;AAC9D,iBAAO,EAAC,SAAS,QAAW,mBAAmB,KAAI;AAAA,QACrD;AACA,4BAAoB,qBAAqB,YAAY;AACrD,YAAI,YAAY,SAAS;AACvB,gCAAsB,KAAK,YAAY,OAAO;AAAA,QAChD;AAAA,MACF;AACA,aAAO;AAAA,QACL,SAAS,kBAAkB;AAAA,UACzB,MAAM,QAAQ;AAAA,UACd,YAAY;AAAA,QACd,CAAC;AAAA,QACD;AAAA,MACF;AAAA,IACF;AAAA,IACA;AACE,kBAAY,OAAO;AAAA,EACvB;AACF;;;AC9FO,SAAS,cACd,KACA,UACA,SACO;AACP,QAAM,SAAS,SAAS,SAAS,OAAO,GAAG,IAAI;AAC/C,SAAO,sBAAsB,KAAK,UAAU,SAAS,EAAE;AACzD;AA+EA,IAAM,eAAe;AACrB,IAAM,2BAA2B;AAEjC,SAAS,sBACP,KACA,UACA,SACA,MACA,cACO;AACP,QAAM,SAAS,SAAS,UAAU,IAAI,KAAK;AAC3C,MAAI,CAAC,QAAQ;AACX,UAAM,IAAI,MAAM,qBAAqB,IAAI,KAAK,EAAE;AAAA,EAClD;AACA,QAAM,2CAA2C,GAAG;AAEpD,QAAM,gBAAgB,wCAAwC,IAAI,KAAK;AACvE,QAAM,gBAA6B,eAC/B,IAAI,IAAI,YAAY,IACpB,oBAAI,IAAI;AACZ,QAAM,UAAU,oBAAI,IAAY;AAChC,aAAW,OAAO,eAAe;AAC/B,YAAQ,IAAI,IAAI,QAAQ,SAAS,SAAS,EAAE;AAC5C,eAAW,OAAO,IAAI,QAAQ,YAAY,aAAa;AACrD,oBAAc,IAAI,GAAG;AAAA,IACvB;AAAA,EACF;AACA,MAAI,IAAI,SAAS;AACf,eAAW,OAAO,IAAI,SAAS;AAC7B,iBAAW,OAAO,IAAI,YAAY,aAAa;AAC7C,sBAAc,IAAI,GAAG;AAAA,MACvB;AAAA,IACF;AAAA,EACF;AACA,QAAM,OAAO,OAAO;AAAA,IAClB,KAAK,IAAI,OAAO;AAAA,IAChB,IAAI;AAAA,IACJ;AAAA,IACA,SAAS;AAAA,EACX;AAEA,MAAI,MAAa,SAAS,oBAAoB,MAAM,OAAO;AAC3D,QAAM,SAAS,cAAc,KAAK,GAAG,IAAI,WAAW,IAAI,KAAK,GAAG;AAChE,QAAM,EAAC,oBAAmB,IAAI;AAE9B,MAAI,IAAI,OAAO;AACb,UAAM,OAAO,IAAI,KAAK,KAAK,IAAI,KAAK;AACpC,aAAS,QAAQ,KAAK,IAAI;AAC1B,UAAM,SAAS,cAAc,MAAM,GAAG,IAAI,QAAQ;AAAA,EACpD;AAEA,aAAW,gBAAgB,eAAe;AAExC,QAAI,CAAC,aAAa,MAAM;AACtB,YAAM;AAAA,QACJ;AAAA,UACE,GAAG,aAAa;AAAA,UAChB,UAAU;AAAA,YACR,GAAG,aAAa,QAAQ;AAAA,YACxB,OACE,aAAa,QAAQ,WAAW,gBAC5B,2BACA;AAAA,UACR;AAAA,QACF;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,MAAI,IAAI,UAAU,CAAC,uBAAuB,SAAS,qBAAqB;AACtE,UAAM,WAAW,KAAK,IAAI,OAAO,UAAU,IAAI;AAAA,EACjD;AAEA,MAAI,IAAI,UAAU,QAAW;AAC3B,UAAM,WAAW,GAAG,IAAI;AACxB,UAAMC,QAAO,IAAI;AAAA,MACf;AAAA,MACA,SAAS,cAAc,QAAQ;AAAA,MAC/B,IAAI;AAAA,MACJ;AAAA,IACF;AACA,aAAS,QAAQ,KAAKA,KAAI;AAC1B,UAAM,SAAS,cAAcA,OAAM,QAAQ;AAAA,EAC7C;AAEA,MAAI,IAAI,SAAS;AACf,eAAW,OAAO,IAAI,SAAS;AAC7B,YAAM,wBAAwB,KAAK,UAAU,SAAS,KAAK,MAAM,KAAK;AAAA,IACxE;AAAA,EACF;AAEA,SAAO;AACT;AAEA,SAAS,WACP,OACA,WACA,UACA,MACO;AACP,MAAI,CAAC,2CAA2C,SAAS,GAAG;AAC1D,WAAO;AAAA,MAAoB;AAAA,MAAO;AAAA,MAAU,iBAC1C,YAAY,aAAa,WAAW,UAAU,IAAI;AAAA,IACpD;AAAA,EACF;AAEA,SAAO,qBAAqB,OAAO,WAAW,UAAU,IAAI;AAC9D;AAEA,SAAS,qBACP,OACA,WACA,UACA,MACO;AACP,MAAI,MAAM;AACV,SAAO,UAAU,SAAS,UAAU,qCAAqC;AAEzE,UAAQ,UAAU,MAAM;AAAA,IACtB,KAAK,OAAO;AACV,YAAM,CAAC,aAAa,cAAc,IAAI;AAAA,QACpC,UAAU;AAAA,QACV;AAAA,MACF;AACA,UAAI,eAAe,SAAS,GAAG;AAC7B,cAAM;AAAA,UAAoB;AAAA,UAAO;AAAA,UAAU,iBACzC;AAAA,YACE;AAAA,YACA;AAAA,cACE,MAAM;AAAA,cACN,YAAY;AAAA,YACd;AAAA,YACA;AAAA,YACA;AAAA,UACF;AAAA,QACF;AAAA,MACF;AACA,aAAO,YAAY,SAAS,GAAG,kCAAkC;AACjE,iBAAW,QAAQ,aAAa;AAC9B,cAAM,qBAAqB,KAAK,MAAM,UAAU,IAAI;AAAA,MACtD;AACA;AAAA,IACF;AAAA,IACA,KAAK,MAAM;AACT,YAAM,CAAC,aAAa,cAAc,IAAI;AAAA,QACpC,UAAU;AAAA,QACV;AAAA,MACF;AACA,aAAO,YAAY,SAAS,GAAG,kCAAkC;AAEjE,YAAM,MAAM,IAAI,YAAY,GAAG;AAC/B,eAAS,QAAQ,KAAK,GAAG;AACzB,YAAM,SAAS,cAAc,KAAK,GAAG,IAAI,MAAM;AAE/C,YAAM,WAAoB,CAAC;AAC3B,UAAI,eAAe,SAAS,GAAG;AAC7B,iBAAS;AAAA,UACP;AAAA,YAAoB;AAAA,YAAK;AAAA,YAAU,iBACjC;AAAA,cACE;AAAA,cACA;AAAA,gBACE,MAAM;AAAA,gBACN,YAAY;AAAA,cACd;AAAA,cACA;AAAA,cACA;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAEA,iBAAW,QAAQ,aAAa;AAC9B,iBAAS,KAAK,qBAAqB,KAAK,MAAM,UAAU,IAAI,CAAC;AAAA,MAC/D;AAEA,YAAM,MAAM,IAAI,WAAW,KAAK,QAAQ;AACxC,iBAAW,UAAU,UAAU;AAC7B,iBAAS,QAAQ,QAAQ,GAAG;AAAA,MAC9B;AACA,YAAM,SAAS,cAAc,KAAK,GAAG,IAAI,MAAM;AAE/C;AAAA,IACF;AAAA,IACA,KAAK,sBAAsB;AACzB,YAAM,KAAK,UAAU;AACrB,YAAM,QAAQ;AAAA,QACZ,GAAG;AAAA,QACH;AAAA,QACA;AAAA,QACA,GAAG,IAAI,IAAI,GAAG,SAAS,KAAK;AAAA,QAC5B,GAAG,YAAY;AAAA,MACjB;AACA,YAAM,cAAc,IAAI,YAAY;AAAA,QAClC,QAAQ;AAAA,QACR;AAAA,QACA,WAAW,GAAG,YAAY;AAAA,QAC1B,UAAU,GAAG,YAAY;AAAA,QACzB,kBAAkB;AAAA,UAChB,GAAG,SAAS;AAAA,UACZ;AAAA,QACF;AAAA,QACA,QAAQ,GAAG,UAAU;AAAA,QACrB,QAAQ,GAAG,UAAU;AAAA,MACvB,CAAC;AACD,eAAS,QAAQ,KAAK,WAAW;AACjC,eAAS,QAAQ,OAAO,WAAW;AACnC,YAAM,SAAS;AAAA,QACb;AAAA,QACA,GAAG,IAAI,iBAAiB,GAAG,SAAS,KAAK;AAAA,MAC3C;AACA;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;AAEA,SAAS,YACP,OACA,WACA,UACA,MACA;AACA,UAAQ,UAAU,MAAM;AAAA,IACtB,KAAK;AACH,aAAO,SAAS,OAAO,WAAW,UAAU,IAAI;AAAA,IAClD,KAAK;AACH,aAAO,QAAQ,OAAO,WAAW,UAAU,IAAI;AAAA,IACjD,KAAK;AACH,aAAO,iCAAiC,OAAO,WAAW,UAAU,IAAI;AAAA,IAC1E,KAAK;AACH,aAAO,qBAAqB,OAAO,UAAU,SAAS;AAAA,EAC1D;AACF;AAEA,SAAS,SACP,OACA,WACA,UACA,MACa;AACb,aAAW,gBAAgB,UAAU,YAAY;AAC/C,YAAQ,YAAY,OAAO,cAAc,UAAU,IAAI;AAAA,EACzD;AACA,SAAO;AACT;AAEO,SAAS,QACd,OACA,WACA,UACA,MACa;AACb,QAAM,CAAC,oBAAoB,eAAe,IACxC,wBAAwB,SAAS;AAEnC,MAAI,mBAAmB,WAAW,GAAG;AACnC,UAAM,SAAS,IAAI;AAAA,MACjB;AAAA,MACA,gBAAgB;AAAA,QACd,MAAM;AAAA,QACN,YAAY;AAAA,MACd,CAAC;AAAA,IACH;AACA,aAAS,QAAQ,OAAO,MAAM;AAC9B,WAAO;AAAA,EACT;AAEA,QAAM,SAAS,IAAI,OAAO,KAAK;AAC/B,WAAS,QAAQ,OAAO,MAAM;AAC9B,QAAM,WAAW,mBAAmB;AAAA,IAAI,kBACtC,YAAY,QAAQ,cAAc,UAAU,IAAI;AAAA,EAClD;AACA,MAAI,gBAAgB,SAAS,GAAG;AAC9B,UAAM,SAAS,IAAI;AAAA,MACjB;AAAA,MACA,gBAAgB;AAAA,QACd,MAAM;AAAA,QACN,YAAY;AAAA,MACd,CAAC;AAAA,IACH;AACA,aAAS,QAAQ,QAAQ,MAAM;AAC/B,aAAS,KAAK,MAAM;AAAA,EACtB;AACA,QAAM,MAAM,IAAI,MAAM,QAAQ,QAAQ;AACtC,aAAW,UAAU,UAAU;AAC7B,aAAS,QAAQ,QAAQ,GAAG;AAAA,EAC9B;AACA,SAAO,SAAS,GAAG;AACnB,SAAO;AACT;AAEO,SAAS,wBAAwB,WAAwB;AAC9D,QAAM,cAGF,CAAC,CAAC,GAAG,CAAC,CAAC;AACX,aAAW,gBAAgB,UAAU,YAAY;AAC/C,QAAI,+BAA+B,YAAY,GAAG;AAChD,kBAAY,CAAC,EAAE,KAAK,YAAY;AAAA,IAClC,OAAO;AACL,kBAAY,CAAC,EAAE,KAAK,YAAY;AAAA,IAClC;AAAA,EACF;AACA,SAAO;AACT;AAEO,SAAS,+BACd,WACkC;AAClC,MAAI,UAAU,SAAS,sBAAsB;AAC3C,WAAO;AAAA,EACT;AACA,MAAI,UAAU,SAAS,UAAU;AAC/B,WAAO;AAAA,EACT;AACA,SAAO,UAAU,WAAW,MAAM,8BAA8B;AAClE;AAEA,SAAS,qBACP,OACA,UACA,WACa;AACb,QAAM,SAAS,IAAI,OAAO,OAAO,gBAAgB,SAAS,CAAC;AAC3D,WAAS;AAAA,IACP;AAAA,IACA,GAAG,aAAa,UAAU,IAAI,CAAC,IAAI,UAAU,EAAE,IAAI,aAAa,UAAU,KAAK,CAAC;AAAA,EAClF;AACA,WAAS,QAAQ,OAAO,MAAM;AAC9B,SAAO;AACT;AAEA,SAAS,aAAa,MAAqB;AACzC,UAAQ,KAAK,MAAM;AAAA,IACjB,KAAK;AACH,aAAO,KAAK;AAAA,IACd,KAAK;AACH,aAAO,KAAK;AAAA,IACd,KAAK;AACH,aAAO,KAAK;AAAA,EAChB;AACF;AAEA,SAAS,wBACP,IACA,UACA,SACA,KACA,MACA,eACA;AAGA,MAAI,GAAG,SAAS,UAAU,KAAK,eAAe;AAC5C,WAAO;AAAA,EACT;AAEA,SAAO,GAAG,SAAS,OAAO,6BAA6B;AACvD,QAAM,QAAQ;AAAA,IACZ,GAAG;AAAA,IACH;AAAA,IACA;AAAA,IACA,GAAG,IAAI,IAAI,GAAG,SAAS,KAAK;AAAA,IAC5B,GAAG,YAAY;AAAA,EACjB;AAEA,QAAM,WAAW,GAAG,IAAI,SAAS,GAAG,SAAS,KAAK;AAClD,QAAM,OAAO,IAAI,KAAK;AAAA,IACpB,QAAQ;AAAA,IACR;AAAA,IACA,SAAS,SAAS,cAAc,QAAQ;AAAA,IACxC,WAAW,GAAG,YAAY;AAAA,IAC1B,UAAU,GAAG,YAAY;AAAA,IACzB,kBAAkB,GAAG,SAAS;AAAA,IAC9B,QAAQ,GAAG,UAAU;AAAA,IACrB,QAAQ,GAAG,UAAU;AAAA,EACvB,CAAC;AACD,WAAS,QAAQ,KAAK,IAAI;AAC1B,WAAS,QAAQ,OAAO,IAAI;AAC5B,SAAO,SAAS,cAAc,MAAM,QAAQ;AAC9C;AAEA,SAAS,iCACP,OACA,WACA,UACA,MACa;AACb,SAAO,UAAU,OAAO,YAAY,UAAU,OAAO,YAAY;AACjE,MAAI,UAAU,QAAQ,SAAS,UAAU,GAAG;AAC1C,QAAI,UAAU,OAAO,UAAU;AAC7B,YAAMC,UAAS,IAAI,OAAO,OAAO,MAAM,KAAK;AAC5C,eAAS,QAAQ,OAAOA,OAAM;AAC9B,aAAOA;AAAA,IACT;AACA,UAAM,SAAS,IAAI,OAAO,OAAO,MAAM,IAAI;AAC3C,aAAS,QAAQ,OAAO,MAAM;AAC9B,WAAO;AAAA,EACT;AACA,QAAM,aAAa,GAAG,IAAI,WAAW,UAAU,QAAQ,SAAS,KAAK;AACrE,QAAM,SAAS,IAAI;AAAA,IACjB;AAAA,IACA,SAAS,cAAc,UAAU;AAAA,IACjC,KAAK,UAAU,QAAQ,SAAS,KAAK;AAAA,IACrC,UAAU,QAAQ,YAAY;AAAA,IAC9B,UAAU;AAAA,EACZ;AACA,WAAS,QAAQ,OAAO,MAAM;AAC9B,SAAO,SAAS,oBAAoB,QAAQ,UAAU;AACxD;AAEA,SAAS,wCACP,WACA;AACA,QAAM,OAAsC,CAAC;AAC7C,QAAM,SAAS,CAACC,eAAyB;AACvC,QAAIA,WAAU,SAAS,sBAAsB;AAC3C,WAAK,KAAKA,UAAS;AACnB;AAAA,IACF;AACA,QAAIA,WAAU,SAAS,SAASA,WAAU,SAAS,MAAM;AACvD,iBAAW,KAAKA,WAAU,YAAY;AACpC,eAAO,CAAC;AAAA,MACV;AACA;AAAA,IACF;AAAA,EACF;AACA,MAAI,WAAW;AACb,WAAO,SAAS;AAAA,EAClB;AACA,SAAO;AACT;AAEO,SAAS,yBACd,UACA,IACM;AAEN,QAAM,iBAAiB,SAAS,IAAI,CAAC,CAAC,KAAK,MAAM,KAAK;AACtD,QAAM,gBAAgB,GAAG,OAAO,aAAW,CAAC,eAAe,SAAS,OAAO,CAAC;AAE5E,MAAI,cAAc,SAAS,GAAG;AAC5B,UAAM,IAAI;AAAA,MACR,0DAA0D,cAAc;AAAA,QACtE;AAAA,MACF,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA,IAKH;AAAA,EACF;AACF;AAEA,SAAS,2CAA2C,KAAe;AACjE,MAAI,CAAC,IAAI,OAAO;AACd,WAAO;AAAA,EACT;AACA,QAAM,EAAC,MAAK,IAAI;AAChB,MAAI,MAAM,SAAS,SAAS,MAAM,SAAS,MAAM;AAC/C,WAAO;AAAA,EACT;AAEA,MAAI,QAAQ;AACZ,QAAM,6BAA6B,CAAC,UAAuC;AAAA,IACzE,GAAG;AAAA,IACH,SAAS;AAAA,MACP,GAAG,KAAK;AAAA,MACR,UAAU;AAAA,QACR,GAAG,KAAK,QAAQ;AAAA,QAChB,QAAQ,KAAK,QAAQ,SAAS,SAAS,MAAM,MAAM;AAAA,MACrD;AAAA,IACF;AAAA,EACF;AAEA,QAAM,WAAW,CAAC,SAA+B;AAC/C,QAAI,KAAK,SAAS,UAAU;AAC1B,aAAO;AAAA,IACT,WAAW,KAAK,SAAS,sBAAsB;AAC7C,aAAO,2BAA2B,IAAI;AAAA,IACxC;AACA,UAAM,aAAa,CAAC;AACpB,eAAW,KAAK,KAAK,YAAY;AAC/B,iBAAW,KAAK,SAAS,CAAC,CAAC;AAAA,IAC7B;AACA,WAAO;AAAA,MACL,MAAM,KAAK;AAAA,MACX;AAAA,IACF;AAAA,EACF;AAEA,QAAM,SAAS;AAAA,IACb,GAAG;AAAA,IACH,OAAO,SAAS,KAAK;AAAA,EACvB;AACA,SAAO;AACT;AAEO,SAAS,2CACd,MACS;AACT,MAAI,KAAK,SAAS,sBAAsB;AACtC,WAAO,CAAC,CAAC,KAAK;AAAA,EAChB;AACA,MAAI,KAAK,SAAS,SAAS,KAAK,SAAS,MAAM;AAC7C,WAAO,KAAK,WAAW;AAAA,MAAK,OAC1B,2CAA2C,CAAC;AAAA,IAC9C;AAAA,EACF;AACA,SAAO;AACT;AAEO,SAAS,kBACd,YACA,WACA;AACA,QAAM,UAAuB,CAAC;AAC9B,QAAM,aAA0B,CAAC;AACjC,aAAW,KAAK,YAAY;AAC1B,QAAI,UAAU,CAAC,GAAG;AAChB,cAAQ,KAAK,CAAC;AAAA,IAChB,OAAO;AACL,iBAAW,KAAK,CAAC;AAAA,IACnB;AAAA,EACF;AACA,SAAO,CAAC,SAAS,UAAU;AAC7B;;;ACptBO,IAAM,sBAAN,cAAkC,MAAM;AAAA,EAC7C,YAAY,SAAiB;AAC3B,UAAM,OAAO;AACb,SAAK,OAAO;AAAA,EACd;AACF;;;ACiBO,IAAM,YAAN,MAAgE;AAAA,EAC5D;AAAA,EACA,aAAa,oBAAI,IAAiB;AAAA,EAClC;AAAA,EACA;AAAA;AAAA;AAAA,EAIA;AAAA,EAET;AAAA,EAEA,SAAS;AAAA,EACT,cAA0B;AAAA,EAC1B;AAAA,EACS;AAAA,EAET,YACE,OACA,QACA,eACA,WACA;AACA,SAAK,SAAS;AACd,SAAK,UAAU,MAAM,UAAU;AAC/B,SAAK,UAAU;AACf,SAAK,aAAa;AAClB,SAAK,QAAQ,EAAC,IAAI,OAAO,WAAW,SAAY,CAAC,EAAC;AAClD,UAAM,UAAU,IAAI;AAEpB,QAAI,kBAAkB,MAAM;AAC1B,WAAK,cAAc;AAAA,IACrB,WAAW,WAAW,eAAe;AACnC,WAAK,cAAc;AACnB,WAAK,SAAS;AAAA,IAChB,OAAO;AACL,WAAK,cACF,KAAK,MAAM;AACV,aAAK,cAAc;AACnB,aAAK,eAAe;AAAA,MACtB,CAAC,EACA,MAAM,OAAK;AACV,aAAK,cAAc;AACnB,aAAK,SAAS;AACd,aAAK,eAAe;AAAA,MACtB,CAAC;AAAA,IACL;AACA,SAAK,SAAS;AAAA,EAChB;AAAA,EAEA,IAAI,OAAO;AACT,WAAO,KAAK,MAAM,EAAE;AAAA,EACtB;AAAA,EAEA,YAAY,UAAuB;AACjC,WAAO,CAAC,KAAK,WAAW,IAAI,QAAQ,GAAG,6BAA6B;AACpE,SAAK,WAAW,IAAI,QAAQ;AAE5B,SAAK,cAAc,QAAQ;AAE3B,WAAO,MAAM;AACX,WAAK,WAAW,OAAO,QAAQ;AAAA,IACjC;AAAA,EACF;AAAA,EAEA,iBAAiB;AACf,eAAW,YAAY,KAAK,YAAY;AACtC,WAAK,cAAc,QAAQ;AAAA,IAC7B;AAAA,EACF;AAAA,EAEA,cAAc,UAAuB;AACnC,aAAS,KAAK,MAAsB,KAAK,aAAa,KAAK,MAAM;AAAA,EACnE;AAAA,EAEA,UAAU;AACR,SAAK,YAAY;AAAA,EACnB;AAAA,EAEA,WAAW;AACT,SAAK,SAAS;AACd,eAAW,QAAQ,KAAK,OAAO,MAAM,CAAC,CAAC,GAAG;AACxC;AAAA,QACE,KAAK;AAAA,QACL,EAAC,MAAM,OAAO,KAAI;AAAA,QAClB,KAAK;AAAA,QACL;AAAA,QACA,KAAK;AAAA,MACP;AAAA,IACF;AACA,SAAK,MAAM;AAAA,EACb;AAAA,EAEA,KAAK,QAAsB;AACzB,SAAK,SAAS;AACd,gBAAY,KAAK,OAAO,QAAQ,KAAK,SAAS,IAAI,KAAK,OAAO;AAAA,EAChE;AAAA,EAEA,QAAQ;AACN,QAAI,CAAC,KAAK,QAAQ;AAChB;AAAA,IACF;AACA,SAAK,SAAS;AACd,SAAK,eAAe;AAAA,EACtB;AAAA,EAEA,UAAU,KAAU;AAClB,SAAK,WAAW,GAAG;AAAA,EACrB;AACF;;;AClHO,SAAS,kBAAkB,WAA4B;AAC5D,UAAQ,UAAU,MAAM;AAAA,IACtB,KAAK;AAEH;AAAA,IAEF,KAAK;AACH,UAAI,UAAU,OAAO,cAAc;AACjC,cAAM,IAAI;AAAA,UACR;AAAA,QACF;AAAA,MACF;AAEA,UAAI,UAAU,QAAQ,SAAS,OAAO;AACpC,0BAAkB,UAAU,QAAQ,SAAS,KAAK;AAAA,MACpD;AACA;AAAA,IAEF,KAAK;AAAA,IACL,KAAK;AACH,iBAAW,KAAK,UAAU,YAAY;AACpC,0BAAkB,CAAC;AAAA,MACrB;AACA;AAAA,IACF;AACE,kBAAY,SAAS;AAAA,EACzB;AACF;;;A1BiBO,SAAS,YACd,OACA,UACA,kBAIA,cACA;AACA,MAAI,OAAO,qBAAqB,YAAY;AAC1C,WACG,MAEE,cAAc,EAAE,QAAQ,EACxB,YAAY,kBAAkB,cAAc,GAAG;AAAA,EAEtD;AACA,SACG,MAEE,cAAc,EAAE,QAAQ,EACxB,YAAY,kBAAkB,GAAG;AAExC;AAEA,IAAM,YAAY,OAAO;AAMlB,SAAS,SAId,UACA,QACA,OACwB;AACxB,SAAO,IAAI;AAAA,IACT;AAAA,IACA;AAAA,IACA;AAAA,IACA,EAAC,MAAK;AAAA,IACN;AAAA,IACA;AAAA,EACF;AACF;AAEO,SAAS,YACd,aACA,OACW;AACX,SAAO;AAAA,IACL,MAAM;AAAA,IACN,QAAQ;AAAA;AAAA,IAER,OAAO,MAAM,WAAW,IAAI,MAAM,CAAC,IAAI;AAAA,EACzC;AACF;AAEO,IAAM,cAAc;AAEpB,IAAM,gBAAgB,EAAC,UAAU,OAAO,eAAe,CAAC,EAAC;AAEzD,IAAM,iBAAiB,OAAO;AAE9B,IAAe,gBAAf,MAKP;AAAA,EACW;AAAA,EACU;AAAA,EACV;AAAA,EACA;AAAA,EACA;AAAA,EACT,QAAgB;AAAA,EACP;AAAA,EACA;AAAA,EACA;AAAA,EAET,YACE,UACA,QACA,WACA,KACA,QACA,QACA,eACA,iBACA;AACA,SAAK,UAAU;AACf,SAAK,YAAY;AACjB,SAAK,aAAa;AAClB,SAAK,OAAO;AACZ,SAAK,SAAS;AACd,SAAK,UAAU;AACf,SAAK,mBAAmB;AACxB,SAAK,gBAAgB;AAAA,EACvB;AAAA,EAEA,CAAC,cAAc,EAAE,UAA0D;AACzE,WAAO,KAAK,cAAc;AAAA,MACxB;AAAA,MACA,KAAK;AAAA,MACL,KAAK;AAAA,MACL,KAAK;AAAA,MACL,KAAK;AAAA,MACL,KAAK;AAAA,MACL,KAAK;AAAA,IACP;AAAA,EACF;AAAA,EAEA,YACE,MACA,MACiC;AACjC,WAAO,KAAK,cAAc;AAAA,MACxB,KAAK;AAAA,MACL,KAAK;AAAA,MACL,KAAK;AAAA,MACL,KAAK;AAAA,MACL,KAAK;AAAA,MACL;AAAA,QACE;AAAA,QACA;AAAA,MACF;AAAA,MACA,KAAK;AAAA,IACP;AAAA,EACF;AAAA,EAEA,KAAK,SAAS,IAAS;AACrB,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,IAAI,MAAM;AACR,WAAO,KAAK,aAAa;AAAA,EAC3B;AAAA,EAEA,OAAe;AACb,QAAI,CAAC,KAAK,OAAO;AACf,WAAK,QAAQ,UAAU,KAAK,aAAa,CAAC;AAAA,IAC5C;AACA,WAAO,KAAK;AAAA,EACd;AAAA,EAiBA,MAAM,MACJ,KAAK,cAAc;AAAA,IACjB,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,IACL;AAAA,MACE,GAAG,KAAK;AAAA,MACR,OAAO;AAAA,IACT;AAAA,IACA;AAAA,MACE,GAAG,KAAK;AAAA,MACR,UAAU;AAAA,IACZ;AAAA,IACA,KAAK;AAAA,IACL,KAAK;AAAA,EACP;AAAA,EAEF,cAAc,CACZ,cACA,aACA,YACoC;AACpC,UAAM,KAAK,OAAO,gBAAgB,aAAa,cAAc;AAC7D,UAAM,OAAO,OAAO,gBAAgB,aAAa,UAAU;AAC3D,UAAM,UAAU,MAAM,QAAQ;AAC9B,WAAO,KAAK,MAAM,CAAC,EAAC,OAAM,MAAM,OAAO,cAAc,IAAI,EAAC,MAAM,QAAO,CAAC,CAAC;AAAA,EAC3E;AAAA,EAEA,UAAU,CACR,cACA,OACa;AACb,QAAI,aAAa,WAAW,WAAW,GAAG;AACxC,YAAM,IAAI;AAAA,QACR,0CAA0C,WAAW;AAAA,MACvD;AAAA,IACF;AACA,SAAK,OAAO,OAAK;AAEjB,UAAM,UAAU,KAAK,QAAQ,cAAc,KAAK,UAAU,EAAE,YAAY;AACxE,WAAO,SAAS,sBAAsB;AACtC,QAAI,SAAS,OAAO,GAAG;AACrB,YAAM,EAAC,YAAY,WAAW,aAAa,YAAW,IAAI,QAAQ,CAAC;AACnE,YAAM,IAAc,KAAK,cAAc;AAAA,QACrC,KAAK;AAAA,QACL,KAAK;AAAA,QACL;AAAA,QACA;AAAA,UACE,OAAO;AAAA,UACP,OAAO;AAAA,QACT;AAAA,QACA;AAAA,UACE,eAAe,CAAC;AAAA,UAChB,UAAU,gBAAgB;AAAA,QAC5B;AAAA,QACA,KAAK;AAAA,QACL;AAAA,MACF;AAOA,YAAM,KAAK,GAAG,CAAC;AACf;AAAA,QACE,cAAc,WAAW;AAAA,QACzB;AAAA,MACF;AACA;AAAA,QACE,cAAc,SAAS;AAAA,QACvB;AAAA,MACF;AACA;AAAA,QACE,YAAY,WAAW,UAAU;AAAA,QACjC;AAAA,MACF;AAEA,aAAO,KAAK,cAAc;AAAA,QACxB,KAAK;AAAA,QACL,KAAK;AAAA,QACL,KAAK;AAAA,QACL;AAAA,UACE,GAAG,KAAK;AAAA,UACR,SAAS;AAAA,YACP,GAAI,KAAK,KAAK,WAAW,CAAC;AAAA,YAC1B;AAAA,cACE,QAAQ,KAAK;AAAA,cACb,aAAa;AAAA,gBACX,aAAa;AAAA,gBACb,YAAY;AAAA,cACd;AAAA,cACA,UAAU;AAAA,gBACR,KAAK,QAAQ,OAAO,UAAU;AAAA,gBAC9B,GAAG;AAAA,cACL;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAAA,QACA;AAAA,UACE,GAAG,KAAK;AAAA,UACR,eAAe;AAAA,YACb,GAAG,KAAK,OAAO;AAAA,YACf,CAAC,YAAY,GAAG,GAAG;AAAA,UACrB;AAAA,QACF;AAAA,QACA,KAAK;AAAA,QACL,KAAK;AAAA,MACP;AAAA,IACF;AAEA,QAAI,SAAS,OAAO,GAAG;AACrB,YAAM,CAAC,eAAe,cAAc,IAAI;AACxC,YAAM,EAAC,WAAU,IAAI;AACrB,YAAM,iBAAiB,cAAc;AACrC,YAAM,KAAK;AAAA,QACT,KAAK,cAAc;AAAA,UACjB,KAAK;AAAA,UACL,KAAK;AAAA,UACL;AAAA,UACA;AAAA,YACE,OAAO;AAAA,YACP,OAAO;AAAA,UACT;AAAA,UACA;AAAA,YACE,eAAe,CAAC;AAAA,YAChB,UAAU,eAAe,gBAAgB;AAAA,UAC3C;AAAA,UACA,KAAK;AAAA,UACL;AAAA,QACF;AAAA,MACF;AAEA,aAAO,cAAc,cAAc,WAAW,GAAG,sBAAsB;AACvE,aAAO,cAAc,cAAc,SAAS,GAAG,sBAAsB;AACrE,aAAO,cAAc,eAAe,WAAW,GAAG,sBAAsB;AACxE,aAAO,cAAc,eAAe,SAAS,GAAG,sBAAsB;AAEtE,aAAO,KAAK,cAAc;AAAA,QACxB,KAAK;AAAA,QACL,KAAK;AAAA,QACL,KAAK;AAAA,QACL;AAAA,UACE,GAAG,KAAK;AAAA,UACR,SAAS;AAAA,YACP,GAAI,KAAK,KAAK,WAAW,CAAC;AAAA,YAC1B;AAAA,cACE,QAAQ,KAAK;AAAA,cACb,aAAa;AAAA,gBACX,aAAa,cAAc;AAAA,gBAC3B,YAAY,cAAc;AAAA,cAC5B;AAAA,cACA,QAAQ;AAAA,cACR,UAAU;AAAA,gBACR,OAAO;AAAA,gBACP,OAAO;AAAA,gBACP,SAAS;AAAA,kBACP,KAAK,QAAQ,OAAO,cAAc;AAAA,kBAClC;AAAA,gBACF;AAAA,gBACA,SAAS;AAAA,kBACP;AAAA,oBACE,QAAQ,KAAK;AAAA,oBACb,aAAa;AAAA,sBACX,aAAa,eAAe;AAAA,sBAC5B,YAAY,eAAe;AAAA,oBAC7B;AAAA,oBACA,UAAU;AAAA,sBACR,KAAK,QAAQ,OAAO,UAAU;AAAA,sBAC9B,GAAG;AAAA,oBACL;AAAA,kBACF;AAAA,gBACF;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAAA,QACA;AAAA,UACE,GAAG,KAAK;AAAA,UACR,eAAe;AAAA,YACb,GAAG,KAAK,OAAO;AAAA,YACf,CAAC,YAAY,GAAG,GAAG;AAAA,UACrB;AAAA,QACF;AAAA,QACA,KAAK;AAAA,QACL,KAAK;AAAA,MACP;AAAA,IACF;AAEA,UAAM,IAAI,MAAM,wBAAwB,YAAY,EAAE;AAAA,EACxD;AAAA,EAEA,QAAQ,CACN,0BACA,WACA,UACoC;AACpC,QAAI;AAEJ,QAAI,OAAO,6BAA6B,YAAY;AAClD,aAAO;AAAA,QACL,IAAI,kBAAkB,KAAK,OAAO;AAAA,MAIpC;AAAA,IACF,OAAO;AACL,aAAO,cAAc,QAAW,mBAAmB;AACnD,aAAO,IAAI,0BAA0B,WAAW,KAAK;AAAA,IACvD;AAEA,UAAM,gBAAgB,KAAK,KAAK;AAChC,QAAI,eAAe;AACjB,aAAO,IAAI,eAAe,IAAI;AAAA,IAChC;AAEA,UAAM,QAAQ,kBAAkB,IAAI;AAEpC,QAAI,KAAK,YAAY,UAAU;AAG7B,wBAAkB,KAAK;AAAA,IACzB;AAEA,WAAO,KAAK,cAAc;AAAA,MACxB,KAAK;AAAA,MACL,KAAK;AAAA,MACL,KAAK;AAAA,MACL;AAAA,QACE,GAAG,KAAK;AAAA,QACR;AAAA,MACF;AAAA,MACA,KAAK;AAAA,MACL,KAAK;AAAA,MACL,KAAK;AAAA,IACP;AAAA,EACF;AAAA,EAEA,QAAQ,CACN,KACA,SAEA,KAAK,cAAc;AAAA,IACjB,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,IACL;AAAA,MACE,GAAG,KAAK;AAAA,MACR,OAAO;AAAA,QACL;AAAA,QACA,WAAW,CAAC,MAAM;AAAA,MACpB;AAAA,IACF;AAAA,IACA,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,EACP;AAAA,EAEF,QAAQ,CAAC,UAAmD;AAC1D,QAAI,QAAQ,GAAG;AACb,YAAM,IAAI,MAAM,4BAA4B;AAAA,IAC9C;AACA,SAAK,QAAQ,OAAO,OAAO;AACzB,YAAM,IAAI,MAAM,0BAA0B;AAAA,IAC5C;AACA,QAAI,KAAK,kBAAkB;AACzB,YAAM,IAAI;AAAA,QACR,gGACE,KAAK;AAAA,MACT;AAAA,IACF;AAEA,WAAO,KAAK,cAAc;AAAA,MACxB,KAAK;AAAA,MACL,KAAK;AAAA,MACL,KAAK;AAAA,MACL;AAAA,QACE,GAAG,KAAK;AAAA,QACR;AAAA,MACF;AAAA,MACA,KAAK;AAAA,MACL,KAAK;AAAA,MACL,KAAK;AAAA,IACP;AAAA,EACF;AAAA,EAEA,UAAU,CACR,OACA,cACoC;AACpC,QAAI,KAAK,kBAAkB;AACzB,YAAM,IAAI;AAAA,QACR,mGACE,KAAK;AAAA,MACT;AAAA,IACF;AACA,WAAO,KAAK,cAAc;AAAA,MACxB,KAAK;AAAA,MACL,KAAK;AAAA,MACL,KAAK;AAAA,MACL;AAAA,QACE,GAAG,KAAK;AAAA,QACR,SAAS,CAAC,GAAI,KAAK,KAAK,WAAW,CAAC,GAAI,CAAC,OAAiB,SAAS,CAAC;AAAA,MACtE;AAAA,MACA,KAAK;AAAA,MACL,KAAK;AAAA,MACL,KAAK;AAAA,IACP;AAAA,EACF;AAAA,EAEU,UAAU,CAClB,cACA,IACA,YACc;AACd,SAAK,OAAO,OAAK;AACjB,UAAM,OAAO,SAAS,QAAQ;AAC9B,UAAM,UAAU,KAAK,QAAQ,cAAc,KAAK,UAAU,EAAE,YAAY;AACxE,WAAO,SAAS,sBAAsB;AAEtC,QAAI,SAAS,OAAO,GAAG;AACrB,YAAM,EAAC,YAAY,aAAa,UAAS,IAAI,QAAQ,CAAC;AACtD,aAAO,cAAc,WAAW,GAAG,sBAAsB;AACzD,aAAO,cAAc,SAAS,GAAG,sBAAsB;AAEvD,YAAM,KAAK;AAAA,QACT,KAAK,cAAc;AAAA,UACjB,KAAK;AAAA,UACL,KAAK;AAAA,UACL;AAAA,UACA;AAAA,YACE,OAAO;AAAA,YACP,OAAO,GAAG,WAAW,GAAG,YAAY;AAAA,UACtC;AAAA,UACA;AAAA,UACA,KAAK;AAAA,UACL;AAAA,QACF;AAAA,MACF;AACA,aAAO;AAAA,QACL,MAAM;AAAA,QACN,SAAS;AAAA,UACP,QAAQ,KAAK;AAAA,UACb,aAAa;AAAA,YACX,aAAa;AAAA,YACb,YAAY;AAAA,UACd;AAAA,UACA,UAAU;AAAA,YACR,KAAK,QAAQ,OAAO,UAAU;AAAA,YAC9B,GAAG;AAAA,UACL;AAAA,QACF;AAAA,QACA,IAAI;AAAA,QACJ;AAAA,MACF;AAAA,IACF;AAEA,QAAI,SAAS,OAAO,GAAG;AACrB,YAAM,CAAC,eAAe,cAAc,IAAI;AACxC,aAAO,cAAc,cAAc,WAAW,GAAG,sBAAsB;AACvE,aAAO,cAAc,cAAc,SAAS,GAAG,sBAAsB;AACrE,aAAO,cAAc,eAAe,WAAW,GAAG,sBAAsB;AACxE,aAAO,cAAc,eAAe,SAAS,GAAG,sBAAsB;AACtE,YAAM,EAAC,WAAU,IAAI;AACrB,YAAM,iBAAiB,cAAc;AACrC,YAAM,cAAc;AAAA,QAClB,KAAK,cAAc;AAAA,UACjB,KAAK;AAAA,UACL,KAAK;AAAA,UACL;AAAA,UACA;AAAA,YACE,OAAO;AAAA,YACP,OAAO,GAAG,WAAW,WAAW,YAAY;AAAA,UAC9C;AAAA,UACA;AAAA,UACA,KAAK;AAAA,UACL;AAAA,QACF;AAAA,MACF;AAEA,aAAO;AAAA,QACL,MAAM;AAAA,QACN,SAAS;AAAA,UACP,QAAQ,KAAK;AAAA,UACb,aAAa;AAAA,YACX,aAAa,cAAc;AAAA,YAC3B,YAAY,cAAc;AAAA,UAC5B;AAAA,UACA,UAAU;AAAA,YACR,OAAO;AAAA,YACP,OAAO,GAAG,WAAW,GAAG,YAAY;AAAA,YACpC,SAAS;AAAA,cACP,KAAK,QAAQ,OAAO,cAAc;AAAA,cAClC;AAAA,YACF;AAAA,YACA,OAAO;AAAA,cACL,MAAM;AAAA,cACN,SAAS;AAAA,gBACP,QAAQ,KAAK;AAAA,gBACb,aAAa;AAAA,kBACX,aAAa,eAAe;AAAA,kBAC5B,YAAY,eAAe;AAAA,gBAC7B;AAAA,gBAEA,UAAU;AAAA,kBACR,KAAK,QAAQ,OAAO,UAAU;AAAA,kBAC7B,YAAoC;AAAA,gBACvC;AAAA,cACF;AAAA,cACA,IAAI;AAAA,cACJ;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAAA,QACA,IAAI;AAAA,QACJ;AAAA,MACF;AAAA,IACF;AAEA,UAAM,IAAI,MAAM,wBAAwB,YAAY,EAAE;AAAA,EACxD;AAAA,EAEA;AAAA,EAEU,eAAoB;AAC5B,QAAI,CAAC,KAAK,eAAe;AACvB,YAAM,eAAe;AAAA,QACnB,KAAK,QAAQ,OAAO,KAAK,UAAU;AAAA,QACnC,KAAK,KAAK;AAAA,MACZ;AACA,UAAI,KAAK,KAAK,OAAO;AACnB,cAAM,EAAC,IAAG,IAAI,KAAK,KAAK;AACxB,cAAM,cAAgC,CAAC;AACvC,mBAAW,CAAC,KAAK,KAAK,cAAc;AAClC,sBAAY,KAAK,IAAI,IAAI,KAAK;AAAA,QAChC;AACA,aAAK,gBAAgB;AAAA,UACnB,GAAG,KAAK;AAAA,UACR,OAAO;AAAA,YACL,GAAG,KAAK,KAAK;AAAA,YACb,KAAK;AAAA,UACP;AAAA,UACA,SAAS;AAAA,QACX;AAAA,MACF,OAAO;AACL,aAAK,gBAAgB;AAAA,UACnB,GAAG,KAAK;AAAA,UACR,SAAS;AAAA,YACP,KAAK,QAAQ,OAAO,KAAK,UAAU;AAAA,YACnC,KAAK,KAAK;AAAA,UACZ;AAAA,QACF;AAAA,MACF;AAAA,IACF;AACA,WAAO,KAAK;AAAA,EACd;AAgBF;AAEA,IAAM,qBAAqB,OAAO;AAM3B,IAAM,YAAN,MAAM,mBAIH,cAAwC;AAAA,EACvC;AAAA,EAET,YACE,UACA,QACA,WACA,MAAW,EAAC,OAAO,UAAS,GAC5B,SAAiB,eACjB,SAAiB,UACjB,eACA,iBACA;AACA;AAAA,MACE;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AACA,SAAK,UAAU;AAAA,EACjB;AAAA,EAEA,KAAK,kBAAkB,IAAS;AAC9B,WAAO,KAAK,aAAa;AAAA,EAC3B;AAAA,EAEA,CAAW,cAAc,EAKvB,UACA,QACA,WACA,KACA,QACA,eACA,iBACqC;AACrC,WAAO,IAAI;AAAA,MACT;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA,KAAK;AAAA,MACL;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,YACE,cACA,MAAW,gBACR;AACH,UAAM,WAAW;AAAA,MACf,KAAK;AAAA,MACL;AAAA,IACF;AACA,QAAI;AACJ,QAAI,OAAO,iBAAiB,YAAY;AACtC,gBAAU;AAAA,IACZ,OAAO;AACL,YAAM,gBAAgB;AAAA,IACxB;AACA,UAAM,MAAM,KAAK,aAAa;AAC9B,UAAM,UAAU,KAAK,gBACjB,kBAAkB,KAAK,cAAc,MAAM,KAAK,cAAc,IAAI,IAClE,KAAK,KAAK;AACd,UAAM,wBAAwB,SAAe;AAC7C,QAAI,gBAAwC,SAAS;AACrD,UAAM,YAAY,CAAC,WAAgB;AACjC,WAAK,gBACD,SAAS,kBAAkB,KAAK,eAAe,MAAM,IACrD,SAAS,kBAAkB,KAAK,MAAM;AAAA,IAC5C;AAEA,UAAM,cAA2B,CAAC,KAAK,UAAU;AAC/C,UAAI,OAAO;AACT,8BAAsB,OAAO,KAAK;AAClC,wBAAgB;AAChB;AAAA,MACF;AAEA,UAAI,KAAK;AACP,iBAAS;AAAA,UACP;AAAA,UACA,YAAY,IAAI,IAAI;AAAA,UACpB;AAAA,UACA;AAAA,QACF;AACA,wBAAgB;AAChB,8BAAsB,QAAQ,IAAI;AAAA,MACpC;AAAA,IACF;AAEA,QAAI;AACJ,UAAM,YAAY,MAAM;AACtB,YAAM,QAAQ;AACd,6BAAuB;AACvB,uBAAiB;AAAA,IACnB;AAEA,UAAM,KAAK,YAAY,IAAI;AAE3B,UAAM,mBAAmB,KAAK,gBAC1B,SAAS,eAAe,KAAK,KAAK,eAAe,KAAK,WAAW,IACjE,SAAS,eAAe,KAAK,KAAK,WAAW;AAEjD,UAAM,QAAQ,cAAc,KAAK,UAAU,OAAO;AAElD,UAAM,OAAO,SAAS;AAAA,MAAiB,OACpC,WAAW;AAAA,QACV;AAAA,QACA;AAAA,QACA,KAAK;AAAA,QACL;AAAA,QACA,QAAM;AACJ,iCAAuB,SAAS,oBAAoB,EAAE;AAAA,QACxD;AAAA,QACA,iBAAiB,sBAAsB;AAAA,QACvC;AAAA,MACF;AAAA,IACF;AAEA,aAAS;AAAA,MACP;AAAA,MACA,YAAY,IAAI,IAAI;AAAA,MACpB;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA,EAEA,IAAI,SAAuD;AACzD,UAAM,WAAW;AAAA,MACf,KAAK;AAAA,MACL;AAAA,IACF;AACA,aAAS,sBAAsB,OAAO;AACtC,UAAMC,KAAuC,KAAK,YAAY,SAAS,GAAG;AAC1E,QAAI,SAAS,SAAS,YAAY;AAChC,aAAO,IAAI,QAAQ,aAAW;AAC5B,QAAAA,GAAE,YAAY,CAAC,MAAM,SAAS;AAC5B,cAAI,SAAS,YAAY;AACvB,YAAAA,GAAE,QAAQ;AACV,oBAAQ,IAA8B;AAAA,UACxC,WAAW,SAAS,SAAS;AAC3B,YAAAA,GAAE,QAAQ;AACV,oBAAQ,QAAQ,OAAO,IAAI,CAAC;AAAA,UAC9B;AAAA,QACF,CAAC;AAAA,MACH,CAAC;AAAA,IACH;AAEA,aAAS;AAET,UAAM,MAAMA,GAAE;AACd,IAAAA,GAAE,QAAQ;AACV,WAAO,QAAQ,QAAQ,GAAG;AAAA,EAC5B;AAAA,EAEA,QAAQ,SAGN;AACA,UAAM,WAAW;AAAA,MACf,KAAK;AAAA,MACL;AAAA,IACF;AACA,UAAM,MAAM,SAAS,OAAO;AAC5B,UAAM,MAAM,KAAK,aAAa;AAC9B,UAAM,EAAC,SAAS,SAAS,SAAQ,IAAI,SAAe;AACpD,QAAI,KAAK,eAAe;AACtB,YAAMC,WAAU,SAAS;AAAA,QACvB;AAAA,QACA,KAAK;AAAA,QACL;AAAA,QACA,SAAO;AACL,cAAI,KAAK;AACP,oBAAQ;AAAA,UACV;AAAA,QACF;AAAA,MACF;AACA,aAAO;AAAA,QACL,SAAAA;AAAA,QACA;AAAA,MACF;AAAA,IACF;AAEA,UAAM,UAAU,SAAS,eAAe,KAAK,KAAK,SAAO;AACvD,UAAI,KAAK;AACP,gBAAQ;AAAA,MACV;AAAA,IACF,CAAC;AACD,WAAO;AAAA,MACL;AAAA,MACA;AAAA,IACF;AAAA,EACF;AACF;AAEA,SAAS,eACP,QACA,SACU;AACV,YAAU,WAAW,CAAC;AACtB,QAAM,EAAC,WAAU,IAAI;AACrB,QAAM,mBAAmB,IAAI,IAAI,UAAU;AAE3C,aAAW,CAAC,KAAK,KAAK,SAAS;AAC7B,qBAAiB,OAAO,KAAK;AAAA,EAC/B;AAEA,MAAI,iBAAiB,SAAS,GAAG;AAC/B,WAAO;AAAA,EACT;AAEA,SAAO;AAAA,IACL,GAAG;AAAA,IACH,GAAG,CAAC,GAAG,gBAAgB,EAAE,IAAI,SAAO,CAAC,KAAK,KAAK,CAAoB;AAAA,EACrE;AACF;AAEA,SAAS,oBAAoB,QAAqB,KAAe;AAC/D,SAAO;AAAA,IACL,GAAG;AAAA,IACH,SAAS,eAAe,QAAQ,IAAI,OAAO;AAAA,EAC7C;AACF;AAEA,SAAS,iBAKP,QACA,OACA,QACA,WACA,qBACA,eACA,WACmC;AACnC,QAAMD,KAAI,IAAI;AAAA,IACZ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACA,EAAAA,GAAE,YAAY;AACd,sBAAoB,MAAM;AACxB,IAAAA,GAAE,MAAM;AAAA,EACV,CAAC;AACD,SAAOA;AACT;AAEA,SAAS,cAAc,OAAgD;AACrE,SAAO,MAAM,QAAQ,KAAK,KAAK,MAAM,UAAU;AACjD;",
6
6
  "names": ["assert", "v", "path", "toDisplay", "err", "atPath", "v", "path", "assert", "hash", "binarySearch", "v", "getSizeOfEntry", "hash", "binarySearch", "array", "entries", "getSizeOfEntry", "splice", "hash", "binarySearch", "diff", "v", "hash", "v", "hash", "hash", "v", "v", "createChunk", "value", "hash", "assert", "hash", "v", "value", "entries", "hash", "createChunk", "chunk", "diff", "valueHash", "assert", "hash", "valueHash", "indexRecords", "v", "v", "compareUTF8", "compareUTF8", "v", "node", "relationship", "binarySearch", "v", "v", "compareUTF8", "defined", "table", "val", "compareUTF8", "c", "array", "hash", "hash", "z1", "z2", "hash", "v", "mergeRelationships", "change", "key", "cmp", "cmp", "flattened", "array", "impl", "not", "take", "filter", "condition", "v", "cleanup"]
7
7
  }