@korajs/core 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/chunk-ZMUY7AVV.js +421 -0
- package/dist/chunk-ZMUY7AVV.js.map +1 -0
- package/dist/events-D_kDPDC9.d.cts +324 -0
- package/dist/events-D_kDPDC9.d.ts +324 -0
- package/dist/index.cjs +1153 -0
- package/dist/index.cjs.map +1 -0
- package/dist/index.d.cts +401 -0
- package/dist/index.d.ts +401 -0
- package/dist/index.js +707 -0
- package/dist/index.js.map +1 -0
- package/dist/internal.cjs +397 -0
- package/dist/internal.cjs.map +1 -0
- package/dist/internal.d.cts +58 -0
- package/dist/internal.d.ts +58 -0
- package/dist/internal.js +55 -0
- package/dist/internal.js.map +1 -0
- package/package.json +49 -0
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/types.ts","../src/identifiers/uuid-v7.ts","../src/schema/define.ts","../src/schema/sql-gen.ts","../src/schema/types.ts","../src/schema/validation.ts","../src/version-vector/version-vector.ts"],"sourcesContent":["/**\n * Hybrid Logical Clock timestamp.\n * Provides a total order that respects causality without requiring synchronized clocks.\n */\nexport interface HLCTimestamp {\n\t/** Physical wall-clock time in milliseconds */\n\twallTime: number\n\t/** Logical counter. Increments when wallTime hasn't changed since last event. */\n\tlogical: number\n\t/** Node ID for tie-breaking. Ensures total order even with identical wall+logical. */\n\tnodeId: string\n}\n\n/** The three mutation types an operation can represent */\nexport type OperationType = 'insert' | 'update' | 'delete'\n\n/**\n * The atomic unit of the entire system. Every mutation produces an Operation.\n * Operations are IMMUTABLE and CONTENT-ADDRESSED.\n */\nexport interface Operation {\n\t/** SHA-256 hash of (type + collection + recordId + data + timestamp + nodeId). Content-addressed. */\n\tid: string\n\t/** UUID v7 of the originating device. Time-sortable. */\n\tnodeId: string\n\t/** What happened */\n\ttype: OperationType\n\t/** Which collection (from schema) */\n\tcollection: string\n\t/** ID of the affected record. UUID v7 for inserts, existing ID for update/delete. */\n\trecordId: string\n\t/** Field values. null for delete. For updates, contains ONLY changed fields. */\n\tdata: Record<string, unknown> | null\n\t/** For updates: previous values of changed fields (enables 3-way merge). null for insert/delete. */\n\tpreviousData: Record<string, unknown> | null\n\t/** Hybrid Logical Clock timestamp. Used for causal ordering. */\n\ttimestamp: HLCTimestamp\n\t/** Monotonically increasing per node. Used in version vectors. */\n\tsequenceNumber: number\n\t/** Operation IDs this operation causally depends on (direct parents in the DAG). */\n\tcausalDeps: string[]\n\t/** Schema version at time of creation. Used for migration transforms. */\n\tschemaVersion: number\n}\n\n/**\n * Input for creating an operation (before id and timestamp are assigned).\n */\nexport interface OperationInput {\n\tnodeId: string\n\ttype: OperationType\n\tcollection: string\n\trecordId: string\n\tdata: Record<string, unknown> | null\n\tpreviousData: Record<string, unknown> | null\n\tsequenceNumber: number\n\tcausalDeps: string[]\n\tschemaVersion: number\n}\n\n/** Version vector: maps nodeId to the max sequence number seen from that node */\nexport type VersionVector = Map<string, number>\n\n/** Field kinds supported by the schema system */\nexport type FieldKind =\n\t| 'string'\n\t| 'number'\n\t| 'boolean'\n\t| 'timestamp'\n\t| 'richtext'\n\t| 'enum'\n\t| 'array'\n\n/**\n * Descriptor produced by the type builder (t.string(), t.number(), etc.).\n * Represents a fully configured field definition.\n */\nexport interface FieldDescriptor {\n\tkind: FieldKind\n\trequired: boolean\n\tdefaultValue: unknown\n\tauto: boolean\n\tenumValues: readonly string[] | null\n\titemKind: FieldKind | null\n}\n\n/**\n * Definition of a collection within the schema.\n */\nexport interface CollectionDefinition {\n\tfields: Record<string, FieldDescriptor>\n\tindexes: string[]\n\tconstraints: Constraint[]\n\tresolvers: Record<string, CustomResolver>\n}\n\n/** Custom resolver function for tier 3 merge resolution */\nexport type CustomResolver = (local: unknown, remote: unknown, base: unknown) => unknown\n\n/**\n * Constraint for tier 2 conflict resolution.\n */\nexport interface Constraint {\n\ttype: 'unique' | 'capacity' | 'referential'\n\tfields: string[]\n\twhere?: Record<string, unknown>\n\tonConflict:\n\t\t| 'first-write-wins'\n\t\t| 'last-write-wins'\n\t\t| 'priority-field'\n\t\t| 'server-decides'\n\t\t| 'custom'\n\tpriorityField?: string\n\tresolve?: (local: unknown, remote: unknown, base: unknown) => unknown\n}\n\n/** Relation type between collections */\nexport type RelationType = 'one-to-one' | 'one-to-many' | 'many-to-one' | 'many-to-many'\n\n/** On-delete behavior for relations */\nexport type OnDeleteAction = 'cascade' | 'set-null' | 'restrict' | 'no-action'\n\n/**\n * Definition of a relation between two collections.\n */\nexport interface RelationDefinition {\n\tfrom: string\n\tto: string\n\ttype: RelationType\n\tfield: string\n\tonDelete: OnDeleteAction\n}\n\n/**\n * The complete schema definition produced by defineSchema().\n */\nexport interface SchemaDefinition {\n\tversion: number\n\tcollections: Record<string, CollectionDefinition>\n\trelations: Record<string, RelationDefinition>\n}\n\n/**\n * Merge strategies available for auto-merge and constraints.\n */\nexport const MERGE_STRATEGIES = [\n\t'auto-merge',\n\t'lww',\n\t'first-write-wins',\n\t'server-decides',\n\t'custom',\n] as const\nexport type MergeStrategy = (typeof MERGE_STRATEGIES)[number]\n\n/**\n * Connection quality levels for adaptive sync.\n */\nexport const CONNECTION_QUALITIES = ['excellent', 'good', 'fair', 'poor', 'offline'] as const\nexport type ConnectionQuality = (typeof CONNECTION_QUALITIES)[number]\n\n/**\n * Injectable time source for deterministic testing of clocks.\n */\nexport interface TimeSource {\n\tnow(): number\n}\n\n/**\n * Injectable random source for deterministic testing of UUID generation.\n */\nexport interface RandomSource {\n\tgetRandomValues<T extends ArrayBufferView>(array: T): T\n}\n","import type { RandomSource } from '../types'\n\nconst defaultRandom: RandomSource = globalThis.crypto as RandomSource\n\n/**\n * Generates a UUID v7 per RFC 9562.\n * UUID v7 encodes a Unix timestamp in milliseconds in the most significant 48 bits,\n * making UUIDs time-sortable while remaining globally unique.\n *\n * @param timestamp - Unix timestamp in milliseconds (defaults to Date.now())\n * @param randomSource - Injectable random source for deterministic testing\n * @returns A UUID v7 string in standard 8-4-4-4-12 format\n *\n * @example\n * ```typescript\n * const id = generateUUIDv7()\n * // \"018f3a5c-7e00-7123-abcd-1234567890ab\"\n * ```\n */\nexport function generateUUIDv7(\n\ttimestamp: number = Date.now(),\n\trandomSource: RandomSource = defaultRandom,\n): string {\n\tconst bytes = new Uint8Array(16)\n\trandomSource.getRandomValues(bytes)\n\n\t// Encode 48-bit timestamp in bytes 0-5\n\tconst ms = Math.max(0, Math.floor(timestamp))\n\tbytes[0] = (ms / 2 ** 40) & 0xff\n\tbytes[1] = (ms / 2 ** 32) & 0xff\n\tbytes[2] = (ms / 2 ** 24) & 0xff\n\tbytes[3] = (ms / 2 ** 16) & 0xff\n\tbytes[4] = (ms / 2 ** 8) & 0xff\n\tbytes[5] = ms & 0xff\n\n\t// Set version 7 (0111) in bits 48-51\n\tbytes[6] = ((bytes[6] ?? 0) & 0x0f) | 0x70\n\n\t// Set variant 10 in bits 64-65\n\tbytes[8] = ((bytes[8] ?? 0) & 0x3f) | 0x80\n\n\treturn formatUUID(bytes)\n}\n\n/**\n * Extracts the Unix timestamp in milliseconds from a UUID v7.\n *\n * @param uuid - A UUID v7 string\n * @returns The encoded Unix timestamp in milliseconds\n */\nexport function extractTimestamp(uuid: string): number {\n\tconst hex = uuid.replace(/-/g, '')\n\t// First 12 hex chars = 48 bits of timestamp\n\tconst high = Number.parseInt(hex.slice(0, 8), 16)\n\tconst low = Number.parseInt(hex.slice(8, 12), 16)\n\treturn high * 2 ** 16 + low\n}\n\n/**\n * Validates whether a string is a valid UUID v7.\n * Checks format, version (7), and variant (10xx).\n *\n * @param uuid - String to validate\n * @returns true if the string is a valid UUID v7\n */\nexport function isValidUUIDv7(uuid: string): boolean {\n\tif (!/^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/i.test(uuid)) {\n\t\treturn false\n\t}\n\tconst hex = uuid.replace(/-/g, '')\n\t// Version must be 7 (nibble at position 12)\n\tif (hex[12] !== '7') return false\n\t// Variant must be 10xx (nibble at position 16 must be 8, 9, a, or b)\n\tconst variantNibble = Number.parseInt(hex[16] ?? '0', 16)\n\treturn variantNibble >= 0x8 && variantNibble <= 0xb\n}\n\nfunction formatUUID(bytes: Uint8Array): string {\n\tconst hex = Array.from(bytes, (b) => b.toString(16).padStart(2, '0')).join('')\n\treturn `${hex.slice(0, 8)}-${hex.slice(8, 12)}-${hex.slice(12, 16)}-${hex.slice(16, 20)}-${hex.slice(20, 32)}`\n}\n","import { SchemaValidationError } from '../errors/errors'\nimport type {\n\tCollectionDefinition,\n\tConstraint,\n\tCustomResolver,\n\tFieldDescriptor,\n\tRelationDefinition,\n\tSchemaDefinition,\n} from '../types'\nimport type { FieldBuilder } from './types'\n\n/** Valid collection name pattern: lowercase, alphanumeric + underscore, starting with letter */\nconst COLLECTION_NAME_RE = /^[a-z][a-z0-9_]*$/\n\n/** Valid field name pattern: same as collection name */\nconst FIELD_NAME_RE = /^[a-z][a-z0-9_]*$/\n\n/** Reserved field names that cannot be used in schemas */\nconst RESERVED_FIELDS = new Set(['id', '_created_at', '_updated_at', '_deleted'])\n\n/**\n * Input shape for defineSchema() — what the developer writes.\n */\nexport interface SchemaInput {\n\tversion: number\n\tcollections: Record<string, CollectionInput>\n\trelations?: Record<string, RelationInput>\n}\n\nexport interface CollectionInput {\n\tfields: Record<string, FieldBuilder<any, any, any>>\n\tindexes?: string[]\n\tconstraints?: ConstraintInput[]\n\tresolve?: Record<string, CustomResolver>\n}\n\nexport interface ConstraintInput {\n\ttype: 'unique' | 'capacity' | 'referential'\n\tfields: string[]\n\twhere?: Record<string, unknown>\n\tonConflict:\n\t\t| 'first-write-wins'\n\t\t| 'last-write-wins'\n\t\t| 'priority-field'\n\t\t| 'server-decides'\n\t\t| 'custom'\n\tpriorityField?: string\n\tresolve?: (local: unknown, remote: unknown, base: unknown) => unknown\n}\n\nexport interface RelationInput {\n\tfrom: string\n\tto: string\n\ttype: 'one-to-one' | 'one-to-many' | 'many-to-one' | 'many-to-many'\n\tfield: string\n\tonDelete: 'cascade' | 'set-null' | 'restrict' | 'no-action'\n}\n\n/**\n * Validates and builds a SchemaDefinition from developer input.\n * This is the primary developer-facing function for defining a schema.\n *\n * @param input - The schema definition using type builders\n * @returns A validated SchemaDefinition ready for use by the framework\n * @throws {SchemaValidationError} If the schema is invalid\n *\n * @example\n * ```typescript\n * import { defineSchema, t } from '@korajs/core'\n *\n * const schema = defineSchema({\n * version: 1,\n * collections: {\n * todos: {\n * fields: {\n * title: t.string(),\n * completed: t.boolean().default(false),\n * }\n * }\n * }\n * })\n * ```\n */\n/**\n * Schema definition with a phantom type brand preserving the original input shape.\n * The `__input` property exists only at the type level for inference — no runtime cost.\n */\nexport type TypedSchemaDefinition<T extends SchemaInput = SchemaInput> = SchemaDefinition & {\n\treadonly __input: T\n}\n\nexport function defineSchema<const T extends SchemaInput>(input: T): TypedSchemaDefinition<T> {\n\tvalidateVersion(input.version)\n\n\tconst collections: Record<string, CollectionDefinition> = {}\n\n\tfor (const [name, collectionInput] of Object.entries(input.collections)) {\n\t\tvalidateCollectionName(name)\n\t\tcollections[name] = buildCollection(name, collectionInput)\n\t}\n\n\tif (Object.keys(collections).length === 0) {\n\t\tthrow new SchemaValidationError('Schema must define at least one collection')\n\t}\n\n\tconst relations: Record<string, RelationDefinition> = {}\n\tif (input.relations) {\n\t\tfor (const [name, relationInput] of Object.entries(input.relations)) {\n\t\t\tvalidateRelation(name, relationInput, collections)\n\t\t\trelations[name] = { ...relationInput }\n\t\t}\n\t}\n\n\treturn { version: input.version, collections, relations } as TypedSchemaDefinition<T>\n}\n\nfunction validateVersion(version: number): void {\n\tif (typeof version !== 'number' || !Number.isInteger(version) || version < 1) {\n\t\tthrow new SchemaValidationError('Schema version must be a positive integer', {\n\t\t\treceived: version,\n\t\t})\n\t}\n}\n\nfunction validateCollectionName(name: string): void {\n\tif (!COLLECTION_NAME_RE.test(name)) {\n\t\tthrow new SchemaValidationError(\n\t\t\t`Collection name \"${name}\" is invalid. Must be lowercase, start with a letter, and contain only letters, numbers, and underscores.`,\n\t\t\t{ collection: name },\n\t\t)\n\t}\n}\n\nfunction buildCollection(name: string, input: CollectionInput): CollectionDefinition {\n\tconst fields: Record<string, FieldDescriptor> = {}\n\n\tif (!input.fields || Object.keys(input.fields).length === 0) {\n\t\tthrow new SchemaValidationError(`Collection \"${name}\" must define at least one field`, {\n\t\t\tcollection: name,\n\t\t})\n\t}\n\n\tfor (const [fieldName, builder] of Object.entries(input.fields)) {\n\t\tvalidateFieldName(name, fieldName)\n\t\tfields[fieldName] = builder._build()\n\t}\n\n\tconst indexes = input.indexes ?? []\n\tfor (const indexField of indexes) {\n\t\tif (!(indexField in fields)) {\n\t\t\tthrow new SchemaValidationError(\n\t\t\t\t`Index field \"${indexField}\" does not exist in collection \"${name}\". Available fields: ${Object.keys(fields).join(', ')}`,\n\t\t\t\t{ collection: name, field: indexField },\n\t\t\t)\n\t\t}\n\t}\n\n\tconst constraints: Constraint[] = []\n\tif (input.constraints) {\n\t\tfor (const constraintInput of input.constraints) {\n\t\t\tvalidateConstraint(name, constraintInput, fields)\n\t\t\tconstraints.push({ ...constraintInput })\n\t\t}\n\t}\n\n\tconst resolvers: Record<string, CustomResolver> = {}\n\tif (input.resolve) {\n\t\tfor (const [fieldName, resolver] of Object.entries(input.resolve)) {\n\t\t\tif (!(fieldName in fields)) {\n\t\t\t\tthrow new SchemaValidationError(\n\t\t\t\t\t`Resolver for field \"${fieldName}\" does not exist in collection \"${name}\". Available fields: ${Object.keys(fields).join(', ')}`,\n\t\t\t\t\t{ collection: name, field: fieldName },\n\t\t\t\t)\n\t\t\t}\n\t\t\tif (typeof resolver !== 'function') {\n\t\t\t\tthrow new SchemaValidationError(\n\t\t\t\t\t`Resolver for field \"${fieldName}\" in collection \"${name}\" must be a function`,\n\t\t\t\t\t{ collection: name, field: fieldName },\n\t\t\t\t)\n\t\t\t}\n\t\t\tresolvers[fieldName] = resolver\n\t\t}\n\t}\n\n\treturn { fields, indexes, constraints, resolvers }\n}\n\nfunction validateFieldName(collection: string, fieldName: string): void {\n\tif (RESERVED_FIELDS.has(fieldName)) {\n\t\tthrow new SchemaValidationError(\n\t\t\t`Field name \"${fieldName}\" is reserved in collection \"${collection}\". Reserved fields: ${[...RESERVED_FIELDS].join(', ')}`,\n\t\t\t{ collection, field: fieldName },\n\t\t)\n\t}\n\tif (!FIELD_NAME_RE.test(fieldName)) {\n\t\tthrow new SchemaValidationError(\n\t\t\t`Field name \"${fieldName}\" in collection \"${collection}\" is invalid. Must be lowercase, start with a letter, and contain only letters, numbers, and underscores.`,\n\t\t\t{ collection, field: fieldName },\n\t\t)\n\t}\n}\n\nfunction validateConstraint(\n\tcollection: string,\n\tconstraint: ConstraintInput,\n\tfields: Record<string, FieldDescriptor>,\n): void {\n\tfor (const field of constraint.fields) {\n\t\tif (!(field in fields)) {\n\t\t\tthrow new SchemaValidationError(\n\t\t\t\t`Constraint references field \"${field}\" which does not exist in collection \"${collection}\". Available fields: ${Object.keys(fields).join(', ')}`,\n\t\t\t\t{ collection, field },\n\t\t\t)\n\t\t}\n\t}\n\n\tif (constraint.onConflict === 'priority-field' && !constraint.priorityField) {\n\t\tthrow new SchemaValidationError(\n\t\t\t`Constraint with \"priority-field\" onConflict strategy in collection \"${collection}\" requires a priorityField`,\n\t\t\t{ collection },\n\t\t)\n\t}\n\n\tif (constraint.onConflict === 'priority-field' && constraint.priorityField) {\n\t\tif (!(constraint.priorityField in fields)) {\n\t\t\tthrow new SchemaValidationError(\n\t\t\t\t`Constraint priorityField \"${constraint.priorityField}\" does not exist in collection \"${collection}\"`,\n\t\t\t\t{ collection, field: constraint.priorityField },\n\t\t\t)\n\t\t}\n\t}\n\n\tif (constraint.onConflict === 'custom' && typeof constraint.resolve !== 'function') {\n\t\tthrow new SchemaValidationError(\n\t\t\t`Constraint with \"custom\" onConflict strategy in collection \"${collection}\" requires a resolve function`,\n\t\t\t{ collection },\n\t\t)\n\t}\n}\n\nfunction validateRelation(\n\tname: string,\n\trelation: RelationInput,\n\tcollections: Record<string, CollectionDefinition>,\n): void {\n\tif (!(relation.from in collections)) {\n\t\tthrow new SchemaValidationError(\n\t\t\t`Relation \"${name}\" references source collection \"${relation.from}\" which does not exist. Available collections: ${Object.keys(collections).join(', ')}`,\n\t\t\t{ relation: name, collection: relation.from },\n\t\t)\n\t}\n\n\tif (!(relation.to in collections)) {\n\t\tthrow new SchemaValidationError(\n\t\t\t`Relation \"${name}\" references target collection \"${relation.to}\" which does not exist. Available collections: ${Object.keys(collections).join(', ')}`,\n\t\t\t{ relation: name, collection: relation.to },\n\t\t)\n\t}\n\n\tconst fromCollection = collections[relation.from]\n\tif (fromCollection && !(relation.field in fromCollection.fields)) {\n\t\tthrow new SchemaValidationError(\n\t\t\t`Relation \"${name}\" references field \"${relation.field}\" which does not exist in collection \"${relation.from}\". Available fields: ${Object.keys(fromCollection.fields).join(', ')}`,\n\t\t\t{ relation: name, collection: relation.from, field: relation.field },\n\t\t)\n\t}\n}\n","import type {\n\tCollectionDefinition,\n\tFieldDescriptor,\n\tRelationDefinition,\n\tSchemaDefinition,\n} from '../types'\n\n/**\n * Generate CREATE TABLE and CREATE INDEX SQL for a single collection.\n *\n * @param collectionName - The collection name\n * @param collection - The collection definition\n * @param relations - Optional relations for FK references\n * @returns An array of SQL statements (CREATE TABLE + CREATE INDEX)\n */\nexport function generateSQL(\n\tcollectionName: string,\n\tcollection: CollectionDefinition,\n\trelations?: Record<string, RelationDefinition>,\n): string[] {\n\tconst statements: string[] = []\n\tconst columns: string[] = ['id TEXT PRIMARY KEY NOT NULL']\n\n\t// Track which fields already have indexes\n\tconst indexedFields = new Set(collection.indexes)\n\n\t// Collect FK fields for auto-indexing\n\tconst fkFields: string[] = []\n\n\tfor (const [fieldName, descriptor] of Object.entries(collection.fields)) {\n\t\tlet colDef = columnDefinition(fieldName, descriptor)\n\n\t\t// Add FK reference if a relation exists for this field\n\t\tif (relations) {\n\t\t\tfor (const rel of Object.values(relations)) {\n\t\t\t\tif (rel.from === collectionName && rel.field === fieldName) {\n\t\t\t\t\tcolDef += ` REFERENCES ${rel.to}(id)`\n\t\t\t\t\tfkFields.push(fieldName)\n\t\t\t\t\tbreak\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t\tcolumns.push(colDef)\n\t}\n\n\t// Auto metadata columns\n\tcolumns.push('_created_at INTEGER NOT NULL')\n\tcolumns.push('_updated_at INTEGER NOT NULL')\n\tcolumns.push('_deleted INTEGER NOT NULL DEFAULT 0')\n\n\tstatements.push(`CREATE TABLE IF NOT EXISTS ${collectionName} (\\n ${columns.join(',\\n ')}\\n)`)\n\n\t// Create indexes\n\tfor (const indexField of collection.indexes) {\n\t\tstatements.push(\n\t\t\t`CREATE INDEX IF NOT EXISTS idx_${collectionName}_${indexField} ON ${collectionName} (${indexField})`,\n\t\t)\n\t}\n\n\t// Auto-create indexes on FK columns not already indexed\n\tfor (const fkField of fkFields) {\n\t\tif (!indexedFields.has(fkField)) {\n\t\t\tstatements.push(\n\t\t\t\t`CREATE INDEX IF NOT EXISTS idx_${collectionName}_${fkField} ON ${collectionName} (${fkField})`,\n\t\t\t)\n\t\t}\n\t}\n\n\t// Per-collection operations log table\n\tstatements.push(\n\t\t`CREATE TABLE IF NOT EXISTS _kora_ops_${collectionName} (\n id TEXT PRIMARY KEY NOT NULL,\n node_id TEXT NOT NULL,\n type TEXT NOT NULL,\n record_id TEXT NOT NULL,\n data TEXT,\n previous_data TEXT,\n timestamp TEXT NOT NULL,\n sequence_number INTEGER NOT NULL,\n causal_deps TEXT NOT NULL,\n schema_version INTEGER NOT NULL\n)`,\n\t)\n\n\treturn statements\n}\n\n/**\n * Generate the full DDL for all collections plus metadata tables.\n *\n * @param schema - The complete schema definition\n * @returns An array of all SQL statements needed to initialize the database\n */\nexport function generateFullDDL(schema: SchemaDefinition): string[] {\n\tconst statements: string[] = []\n\n\t// Metadata table\n\tstatements.push(\n\t\t'CREATE TABLE IF NOT EXISTS _kora_meta (\\n' +\n\t\t\t' key TEXT PRIMARY KEY NOT NULL,\\n' +\n\t\t\t' value TEXT NOT NULL\\n' +\n\t\t\t')',\n\t)\n\n\t// Version vector table\n\tstatements.push(\n\t\t'CREATE TABLE IF NOT EXISTS _kora_version_vector (\\n' +\n\t\t\t' node_id TEXT PRIMARY KEY NOT NULL,\\n' +\n\t\t\t' sequence_number INTEGER NOT NULL\\n' +\n\t\t\t')',\n\t)\n\n\tfor (const [name, collection] of Object.entries(schema.collections)) {\n\t\tstatements.push(...generateSQL(name, collection, schema.relations))\n\t}\n\n\treturn statements\n}\n\nfunction columnDefinition(fieldName: string, descriptor: FieldDescriptor): string {\n\tconst sqlType = mapFieldType(descriptor)\n\tconst parts = [fieldName, sqlType]\n\n\tif (descriptor.required && descriptor.defaultValue === undefined && !descriptor.auto) {\n\t\tparts.push('NOT NULL')\n\t}\n\n\tif (descriptor.defaultValue !== undefined) {\n\t\tparts.push(`DEFAULT ${sqlDefault(descriptor.defaultValue)}`)\n\t}\n\n\t// CHECK constraint for enum fields\n\tif (descriptor.kind === 'enum' && descriptor.enumValues) {\n\t\tconst values = descriptor.enumValues.map((v) => `'${v}'`).join(', ')\n\t\tparts.push(`CHECK (${fieldName} IN (${values}))`)\n\t}\n\n\treturn parts.join(' ')\n}\n\nfunction mapFieldType(descriptor: FieldDescriptor): string {\n\tswitch (descriptor.kind) {\n\t\tcase 'string':\n\t\t\treturn 'TEXT'\n\t\tcase 'number':\n\t\t\treturn 'REAL'\n\t\tcase 'boolean':\n\t\t\treturn 'INTEGER'\n\t\tcase 'enum':\n\t\t\treturn 'TEXT'\n\t\tcase 'timestamp':\n\t\t\treturn 'INTEGER'\n\t\tcase 'array':\n\t\t\treturn 'TEXT' // JSON-serialized\n\t\tcase 'richtext':\n\t\t\treturn 'BLOB' // Yjs state\n\t}\n}\n\nfunction sqlDefault(value: unknown): string {\n\tif (value === null) return 'NULL'\n\tif (typeof value === 'string') return `'${value}'`\n\tif (typeof value === 'number') return String(value)\n\tif (typeof value === 'boolean') return value ? '1' : '0'\n\t// Arrays and objects are stored as JSON strings\n\treturn `'${JSON.stringify(value)}'`\n}\n","import type { FieldDescriptor, FieldKind } from '../types'\n\n/**\n * Base field builder implementing the builder pattern for schema field definitions.\n * Each builder is immutable — modifier methods return new builder instances.\n *\n * Type parameters track field metadata at the type level for inference:\n * - Kind: the field kind ('string', 'number', etc.)\n * - Req: whether the field is required (true = required on insert)\n * - Auto: whether the field is auto-populated (true = excluded from insert input)\n *\n * @example\n * ```typescript\n * t.string() // required string field\n * t.string().optional() // optional string field\n * t.string().default('hello') // string with default value\n * t.timestamp().auto() // auto-populated timestamp\n * ```\n */\nexport class FieldBuilder<\n\tKind extends FieldKind = FieldKind,\n\tReq extends boolean = true,\n\tAuto extends boolean = false,\n> {\n\tprotected readonly _kind: Kind\n\tprotected readonly _required: boolean\n\tprotected readonly _defaultValue: unknown\n\tprotected readonly _auto: boolean\n\n\tconstructor(kind: Kind, required = true as unknown as Req, defaultValue: unknown = undefined, auto = false as unknown as Auto) {\n\t\tthis._kind = kind\n\t\tthis._required = required as unknown as boolean\n\t\tthis._defaultValue = defaultValue\n\t\tthis._auto = auto as unknown as boolean\n\t}\n\n\t/** Mark this field as optional (not required on insert) */\n\toptional(): FieldBuilder<Kind, false, Auto> {\n\t\treturn new FieldBuilder(this._kind, false, this._defaultValue, this._auto)\n\t}\n\n\t/** Set a default value for this field. Implicitly makes the field optional. */\n\tdefault(value: unknown): FieldBuilder<Kind, false, Auto> {\n\t\treturn new FieldBuilder(this._kind, false, value, this._auto)\n\t}\n\n\t/** Mark this field as auto-populated (e.g., createdAt timestamps). Developers cannot set auto fields. */\n\tauto(): FieldBuilder<Kind, false, true> {\n\t\treturn new FieldBuilder(this._kind, false, undefined, true)\n\t}\n\n\t/** @internal Build the final FieldDescriptor. Used by defineSchema(). */\n\t_build(): FieldDescriptor {\n\t\treturn {\n\t\t\tkind: this._kind,\n\t\t\trequired: this._required as unknown as boolean,\n\t\t\tdefaultValue: this._defaultValue,\n\t\t\tauto: this._auto as unknown as boolean,\n\t\t\tenumValues: null,\n\t\t\titemKind: null,\n\t\t}\n\t}\n}\n\n/**\n * Field builder for enum fields with constrained string values.\n * Preserves the literal enum tuple type for inference.\n */\nexport class EnumFieldBuilder<\n\tValues extends readonly string[] = readonly string[],\n\tReq extends boolean = true,\n\tAuto extends boolean = false,\n> extends FieldBuilder<'enum', Req, Auto> {\n\tprivate readonly _enumValues: Values\n\n\tconstructor(\n\t\tvalues: Values,\n\t\trequired = true as unknown as Req,\n\t\tdefaultValue: unknown = undefined,\n\t\tauto = false as unknown as Auto,\n\t) {\n\t\tsuper('enum', required, defaultValue, auto)\n\t\tthis._enumValues = values\n\t}\n\n\toverride optional(): EnumFieldBuilder<Values, false, Auto> {\n\t\treturn new EnumFieldBuilder(this._enumValues, false, this._defaultValue, this._auto)\n\t}\n\n\toverride default(value: Values[number]): EnumFieldBuilder<Values, false, Auto> {\n\t\treturn new EnumFieldBuilder(this._enumValues, false, value, this._auto)\n\t}\n\n\toverride auto(): EnumFieldBuilder<Values, false, true> {\n\t\treturn new EnumFieldBuilder(this._enumValues, false, undefined, true)\n\t}\n\n\toverride _build(): FieldDescriptor {\n\t\treturn {\n\t\t\tkind: 'enum',\n\t\t\trequired: this._required as unknown as boolean,\n\t\t\tdefaultValue: this._defaultValue,\n\t\t\tauto: this._auto as unknown as boolean,\n\t\t\tenumValues: this._enumValues,\n\t\t\titemKind: null,\n\t\t}\n\t}\n}\n\n/**\n * Field builder for array fields with a typed item kind.\n * Preserves the item kind type parameter for inference.\n */\nexport class ArrayFieldBuilder<\n\tItemKind extends FieldKind = FieldKind,\n\tReq extends boolean = true,\n\tAuto extends boolean = false,\n> extends FieldBuilder<'array', Req, Auto> {\n\tprivate readonly _itemKind: ItemKind\n\n\tconstructor(\n\t\titemBuilder: FieldBuilder<ItemKind>,\n\t\trequired = true as unknown as Req,\n\t\tdefaultValue: unknown = undefined,\n\t\tauto = false as unknown as Auto,\n\t) {\n\t\tsuper('array', required, defaultValue, auto)\n\t\tthis._itemKind = itemBuilder._build().kind as ItemKind\n\t}\n\n\toverride optional(): ArrayFieldBuilder<ItemKind, false, Auto> {\n\t\treturn new ArrayFieldBuilder(\n\t\t\tnew FieldBuilder(this._itemKind),\n\t\t\tfalse,\n\t\t\tthis._defaultValue,\n\t\t\tthis._auto,\n\t\t)\n\t}\n\n\toverride default(value: unknown[]): ArrayFieldBuilder<ItemKind, false, Auto> {\n\t\treturn new ArrayFieldBuilder(new FieldBuilder(this._itemKind), false, value, this._auto)\n\t}\n\n\toverride auto(): ArrayFieldBuilder<ItemKind, false, true> {\n\t\treturn new ArrayFieldBuilder(new FieldBuilder(this._itemKind), false, undefined, true)\n\t}\n\n\toverride _build(): FieldDescriptor {\n\t\treturn {\n\t\t\tkind: 'array',\n\t\t\trequired: this._required as unknown as boolean,\n\t\t\tdefaultValue: this._defaultValue,\n\t\t\tauto: this._auto as unknown as boolean,\n\t\t\tenumValues: null,\n\t\t\titemKind: this._itemKind,\n\t\t}\n\t}\n}\n\n/**\n * Type builder namespace. The developer's primary interface for defining field types.\n *\n * @example\n * ```typescript\n * import { t } from '@korajs/core'\n *\n * const fields = {\n * title: t.string(),\n * count: t.number(),\n * active: t.boolean().default(true),\n * notes: t.richtext(),\n * tags: t.array(t.string()).default([]),\n * priority: t.enum(['low', 'medium', 'high']).default('medium'),\n * createdAt: t.timestamp().auto(),\n * }\n * ```\n */\nexport const t = {\n\tstring(): FieldBuilder<'string', true, false> {\n\t\treturn new FieldBuilder('string', true, undefined, false)\n\t},\n\n\tnumber(): FieldBuilder<'number', true, false> {\n\t\treturn new FieldBuilder('number', true, undefined, false)\n\t},\n\n\tboolean(): FieldBuilder<'boolean', true, false> {\n\t\treturn new FieldBuilder('boolean', true, undefined, false)\n\t},\n\n\ttimestamp(): FieldBuilder<'timestamp', true, false> {\n\t\treturn new FieldBuilder('timestamp', true, undefined, false)\n\t},\n\n\trichtext(): FieldBuilder<'richtext', true, false> {\n\t\treturn new FieldBuilder('richtext', true, undefined, false)\n\t},\n\n\tenum<const V extends readonly string[]>(values: V): EnumFieldBuilder<V, true, false> {\n\t\treturn new EnumFieldBuilder(values, true, undefined, false)\n\t},\n\n\tarray<K extends FieldKind>(itemBuilder: FieldBuilder<K>): ArrayFieldBuilder<K, true, false> {\n\t\treturn new ArrayFieldBuilder(itemBuilder, true, undefined, false)\n\t},\n}\n","import { SchemaValidationError } from '../errors/errors'\nimport type { CollectionDefinition, FieldDescriptor, OperationType } from '../types'\n\n/**\n * Validates a record's data against a collection's field definitions.\n * Applies defaults, rejects auto fields, and type-checks each value.\n *\n * @param collection - The collection name (for error messages)\n * @param collectionDef - The collection definition from the schema\n * @param data - The record data to validate\n * @param operationType - The operation type ('insert', 'update', 'delete')\n * @returns The validated and normalized data (with defaults applied)\n * @throws {SchemaValidationError} If validation fails\n */\nexport function validateRecord(\n\tcollection: string,\n\tcollectionDef: CollectionDefinition,\n\tdata: Record<string, unknown>,\n\toperationType: OperationType,\n): Record<string, unknown> {\n\tif (operationType === 'delete') {\n\t\treturn {}\n\t}\n\n\tconst result: Record<string, unknown> = {}\n\tconst fieldNames = Object.keys(collectionDef.fields)\n\n\t// Check for extra fields not in the schema\n\tfor (const key of Object.keys(data)) {\n\t\tif (!(key in collectionDef.fields)) {\n\t\t\tthrow new SchemaValidationError(\n\t\t\t\t`Unknown field \"${key}\" in collection \"${collection}\". Available fields: ${fieldNames.join(', ')}`,\n\t\t\t\t{ collection, field: key },\n\t\t\t)\n\t\t}\n\t}\n\n\tfor (const [fieldName, descriptor] of Object.entries(collectionDef.fields)) {\n\t\tconst value = data[fieldName]\n\t\tconst hasValue = fieldName in data\n\n\t\t// Auto fields cannot be set by the developer\n\t\tif (descriptor.auto && hasValue) {\n\t\t\tthrow new SchemaValidationError(\n\t\t\t\t`Field \"${fieldName}\" in collection \"${collection}\" is auto-populated and cannot be set manually`,\n\t\t\t\t{ collection, field: fieldName },\n\t\t\t)\n\t\t}\n\n\t\t// For updates, only validate fields that are present (partial updates)\n\t\tif (operationType === 'update') {\n\t\t\tif (hasValue) {\n\t\t\t\tif (value !== undefined && value !== null) {\n\t\t\t\t\tvalidateFieldValue(collection, fieldName, descriptor, value)\n\t\t\t\t}\n\t\t\t\tresult[fieldName] = value\n\t\t\t}\n\t\t\tcontinue\n\t\t}\n\n\t\t// For inserts, apply defaults and check required fields\n\t\tif (descriptor.auto) {\n\t\t\t// Skip auto fields — they are populated by the framework\n\t\t\tcontinue\n\t\t}\n\n\t\tif (!hasValue || value === undefined) {\n\t\t\tif (descriptor.defaultValue !== undefined) {\n\t\t\t\t// Deep-copy default arrays/objects to prevent shared mutations\n\t\t\t\tresult[fieldName] =\n\t\t\t\t\ttypeof descriptor.defaultValue === 'object' && descriptor.defaultValue !== null\n\t\t\t\t\t\t? JSON.parse(JSON.stringify(descriptor.defaultValue))\n\t\t\t\t\t\t: descriptor.defaultValue\n\t\t\t\tcontinue\n\t\t\t}\n\n\t\t\tif (descriptor.required) {\n\t\t\t\tthrow new SchemaValidationError(\n\t\t\t\t\t`Required field \"${fieldName}\" is missing in collection \"${collection}\"`,\n\t\t\t\t\t{ collection, field: fieldName },\n\t\t\t\t)\n\t\t\t}\n\n\t\t\t// Optional field with no default — omit from result\n\t\t\tcontinue\n\t\t}\n\n\t\tvalidateFieldValue(collection, fieldName, descriptor, value)\n\t\tresult[fieldName] = value\n\t}\n\n\treturn result\n}\n\nfunction validateFieldValue(\n\tcollection: string,\n\tfieldName: string,\n\tdescriptor: FieldDescriptor,\n\tvalue: unknown,\n): void {\n\tswitch (descriptor.kind) {\n\t\tcase 'string': {\n\t\t\tif (typeof value !== 'string') {\n\t\t\t\tthrow new SchemaValidationError(\n\t\t\t\t\t`Field \"${fieldName}\" in collection \"${collection}\" must be a string, got ${typeof value}`,\n\t\t\t\t\t{ collection, field: fieldName, expectedType: 'string', receivedType: typeof value },\n\t\t\t\t)\n\t\t\t}\n\t\t\tbreak\n\t\t}\n\n\t\tcase 'number': {\n\t\t\tif (typeof value !== 'number' || Number.isNaN(value)) {\n\t\t\t\tthrow new SchemaValidationError(\n\t\t\t\t\t`Field \"${fieldName}\" in collection \"${collection}\" must be a number, got ${typeof value}`,\n\t\t\t\t\t{ collection, field: fieldName, expectedType: 'number', receivedType: typeof value },\n\t\t\t\t)\n\t\t\t}\n\t\t\tbreak\n\t\t}\n\n\t\tcase 'boolean': {\n\t\t\tif (typeof value !== 'boolean') {\n\t\t\t\tthrow new SchemaValidationError(\n\t\t\t\t\t`Field \"${fieldName}\" in collection \"${collection}\" must be a boolean, got ${typeof value}`,\n\t\t\t\t\t{ collection, field: fieldName, expectedType: 'boolean', receivedType: typeof value },\n\t\t\t\t)\n\t\t\t}\n\t\t\tbreak\n\t\t}\n\n\t\tcase 'timestamp': {\n\t\t\tif (typeof value !== 'number' || !Number.isFinite(value)) {\n\t\t\t\tthrow new SchemaValidationError(\n\t\t\t\t\t`Field \"${fieldName}\" in collection \"${collection}\" must be a timestamp (number), got ${typeof value}`,\n\t\t\t\t\t{\n\t\t\t\t\t\tcollection,\n\t\t\t\t\t\tfield: fieldName,\n\t\t\t\t\t\texpectedType: 'timestamp',\n\t\t\t\t\t\treceivedType: typeof value,\n\t\t\t\t\t},\n\t\t\t\t)\n\t\t\t}\n\t\t\tbreak\n\t\t}\n\n\t\tcase 'enum': {\n\t\t\tif (typeof value !== 'string') {\n\t\t\t\tthrow new SchemaValidationError(\n\t\t\t\t\t`Field \"${fieldName}\" in collection \"${collection}\" must be a string (enum), got ${typeof value}`,\n\t\t\t\t\t{ collection, field: fieldName, expectedType: 'enum', receivedType: typeof value },\n\t\t\t\t)\n\t\t\t}\n\t\t\tif (descriptor.enumValues && !descriptor.enumValues.includes(value)) {\n\t\t\t\tthrow new SchemaValidationError(\n\t\t\t\t\t`Field \"${fieldName}\" in collection \"${collection}\" must be one of: ${descriptor.enumValues.join(', ')}. Got \"${value}\"`,\n\t\t\t\t\t{\n\t\t\t\t\t\tcollection,\n\t\t\t\t\t\tfield: fieldName,\n\t\t\t\t\t\tallowedValues: [...descriptor.enumValues],\n\t\t\t\t\t\treceived: value,\n\t\t\t\t\t},\n\t\t\t\t)\n\t\t\t}\n\t\t\tbreak\n\t\t}\n\n\t\tcase 'array': {\n\t\t\tif (!Array.isArray(value)) {\n\t\t\t\tthrow new SchemaValidationError(\n\t\t\t\t\t`Field \"${fieldName}\" in collection \"${collection}\" must be an array, got ${typeof value}`,\n\t\t\t\t\t{ collection, field: fieldName, expectedType: 'array', receivedType: typeof value },\n\t\t\t\t)\n\t\t\t}\n\t\t\tif (descriptor.itemKind) {\n\t\t\t\tconst expectedType = jsTypeForKind(descriptor.itemKind)\n\t\t\t\tfor (let i = 0; i < value.length; i++) {\n\t\t\t\t\tconst item = value[i]\n\t\t\t\t\tif (!matchesJsType(item, expectedType)) {\n\t\t\t\t\t\tthrow new SchemaValidationError(\n\t\t\t\t\t\t\t`Field \"${fieldName}[${i}]\" in collection \"${collection}\" must be a ${descriptor.itemKind}, got ${typeof item}`,\n\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\tcollection,\n\t\t\t\t\t\t\t\tfield: `${fieldName}[${i}]`,\n\t\t\t\t\t\t\t\texpectedType: descriptor.itemKind,\n\t\t\t\t\t\t\t\treceivedType: typeof item,\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t)\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t\tbreak\n\t\t}\n\n\t\tcase 'richtext': {\n\t\t\t// Richtext fields accept Uint8Array (Yjs state) or string (plain text initial value)\n\t\t\tif (!(value instanceof Uint8Array) && typeof value !== 'string') {\n\t\t\t\tthrow new SchemaValidationError(\n\t\t\t\t\t`Field \"${fieldName}\" in collection \"${collection}\" must be a Uint8Array or string for richtext, got ${typeof value}`,\n\t\t\t\t\t{\n\t\t\t\t\t\tcollection,\n\t\t\t\t\t\tfield: fieldName,\n\t\t\t\t\t\texpectedType: 'richtext',\n\t\t\t\t\t\treceivedType: typeof value,\n\t\t\t\t\t},\n\t\t\t\t)\n\t\t\t}\n\t\t\tbreak\n\t\t}\n\t}\n}\n\nfunction jsTypeForKind(kind: string): string {\n\tswitch (kind) {\n\t\tcase 'string':\n\t\tcase 'enum':\n\t\t\treturn 'string'\n\t\tcase 'number':\n\t\tcase 'timestamp':\n\t\t\treturn 'number'\n\t\tcase 'boolean':\n\t\t\treturn 'boolean'\n\t\tdefault:\n\t\t\treturn 'object'\n\t}\n}\n\nfunction matchesJsType(value: unknown, expected: string): boolean {\n\t// Using explicit comparisons to satisfy Biome's useValidTypeof rule,\n\t// which requires typeof to be compared against string literals.\n\tswitch (expected) {\n\t\tcase 'string':\n\t\t\treturn typeof value === 'string'\n\t\tcase 'number':\n\t\t\treturn typeof value === 'number'\n\t\tcase 'boolean':\n\t\t\treturn typeof value === 'boolean'\n\t\tcase 'object':\n\t\t\treturn typeof value === 'object'\n\t\tdefault:\n\t\t\treturn false\n\t}\n}\n","import type { Operation, VersionVector } from '../types'\nimport { topologicalSort } from './topological-sort'\n\n/**\n * Create an empty version vector.\n */\nexport function createVersionVector(): VersionVector {\n\treturn new Map()\n}\n\n/**\n * Merge two version vectors by taking the max sequence number for each node.\n * This is commutative, associative, and idempotent.\n */\nexport function mergeVectors(a: VersionVector, b: VersionVector): VersionVector {\n\tconst merged = new Map(a)\n\tfor (const [nodeId, seq] of b) {\n\t\tmerged.set(nodeId, Math.max(merged.get(nodeId) ?? 0, seq))\n\t}\n\treturn merged\n}\n\n/**\n * Advance a version vector for a specific node to a new sequence number.\n * Only advances forward — if the current value is higher, no change is made.\n */\nexport function advanceVector(vector: VersionVector, nodeId: string, seq: number): VersionVector {\n\tconst updated = new Map(vector)\n\tupdated.set(nodeId, Math.max(updated.get(nodeId) ?? 0, seq))\n\treturn updated\n}\n\n/**\n * Returns true if vector `a` dominates vector `b` — meaning `a` has seen\n * everything `b` has seen. Formally: for every nodeId in b, a[nodeId] >= b[nodeId].\n */\nexport function dominates(a: VersionVector, b: VersionVector): boolean {\n\tfor (const [nodeId, bSeq] of b) {\n\t\tif ((a.get(nodeId) ?? 0) < bSeq) return false\n\t}\n\treturn true\n}\n\n/**\n * Returns true if two version vectors are exactly equal.\n */\nexport function vectorsEqual(a: VersionVector, b: VersionVector): boolean {\n\tif (a.size !== b.size) return false\n\tfor (const [nodeId, aSeq] of a) {\n\t\tif (b.get(nodeId) !== aSeq) return false\n\t}\n\treturn true\n}\n\n/**\n * Operation log interface for computing deltas.\n */\nexport interface OperationLog {\n\tgetRange(nodeId: string, fromSeq: number, toSeq: number): Operation[]\n}\n\n/**\n * Compute the operations that `local` has but `remote` does not.\n * Returns operations in causal (topological) order.\n *\n * @param localVector - The local version vector\n * @param remoteVector - The remote version vector\n * @param operationLog - The operation log to fetch operations from\n * @returns Operations sorted in causal order\n */\nexport function computeDelta(\n\tlocalVector: VersionVector,\n\tremoteVector: VersionVector,\n\toperationLog: OperationLog,\n): Operation[] {\n\tconst missing: Operation[] = []\n\tfor (const [nodeId, localSeq] of localVector) {\n\t\tconst remoteSeq = remoteVector.get(nodeId) ?? 0\n\t\tif (localSeq > remoteSeq) {\n\t\t\tmissing.push(...operationLog.getRange(nodeId, remoteSeq + 1, localSeq))\n\t\t}\n\t}\n\treturn topologicalSort(missing)\n}\n\n/**\n * Serialize a version vector to a JSON-compatible string.\n */\nexport function serializeVector(vector: VersionVector): string {\n\tconst entries = [...vector.entries()].sort(([a], [b]) => (a < b ? -1 : a > b ? 1 : 0))\n\treturn JSON.stringify(entries)\n}\n\n/**\n * Deserialize a version vector from its serialized string form.\n */\nexport function deserializeVector(s: string): VersionVector {\n\tconst entries = JSON.parse(s) as [string, number][]\n\treturn new Map(entries)\n}\n"],"mappings":";;;;;;;;;;;;;;;;AAiJO,IAAM,mBAAmB;AAAA,EAC/B;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACD;AAMO,IAAM,uBAAuB,CAAC,aAAa,QAAQ,QAAQ,QAAQ,SAAS;;;AC3JnF,IAAM,gBAA8B,WAAW;AAiBxC,SAAS,eACf,YAAoB,KAAK,IAAI,GAC7B,eAA6B,eACpB;AACT,QAAM,QAAQ,IAAI,WAAW,EAAE;AAC/B,eAAa,gBAAgB,KAAK;AAGlC,QAAM,KAAK,KAAK,IAAI,GAAG,KAAK,MAAM,SAAS,CAAC;AAC5C,QAAM,CAAC,IAAK,KAAK,KAAK,KAAM;AAC5B,QAAM,CAAC,IAAK,KAAK,KAAK,KAAM;AAC5B,QAAM,CAAC,IAAK,KAAK,KAAK,KAAM;AAC5B,QAAM,CAAC,IAAK,KAAK,KAAK,KAAM;AAC5B,QAAM,CAAC,IAAK,KAAK,KAAK,IAAK;AAC3B,QAAM,CAAC,IAAI,KAAK;AAGhB,QAAM,CAAC,KAAM,MAAM,CAAC,KAAK,KAAK,KAAQ;AAGtC,QAAM,CAAC,KAAM,MAAM,CAAC,KAAK,KAAK,KAAQ;AAEtC,SAAO,WAAW,KAAK;AACxB;AAQO,SAAS,iBAAiB,MAAsB;AACtD,QAAM,MAAM,KAAK,QAAQ,MAAM,EAAE;AAEjC,QAAM,OAAO,OAAO,SAAS,IAAI,MAAM,GAAG,CAAC,GAAG,EAAE;AAChD,QAAM,MAAM,OAAO,SAAS,IAAI,MAAM,GAAG,EAAE,GAAG,EAAE;AAChD,SAAO,OAAO,KAAK,KAAK;AACzB;AASO,SAAS,cAAc,MAAuB;AACpD,MAAI,CAAC,kEAAkE,KAAK,IAAI,GAAG;AAClF,WAAO;AAAA,EACR;AACA,QAAM,MAAM,KAAK,QAAQ,MAAM,EAAE;AAEjC,MAAI,IAAI,EAAE,MAAM,IAAK,QAAO;AAE5B,QAAM,gBAAgB,OAAO,SAAS,IAAI,EAAE,KAAK,KAAK,EAAE;AACxD,SAAO,iBAAiB,KAAO,iBAAiB;AACjD;AAEA,SAAS,WAAW,OAA2B;AAC9C,QAAM,MAAM,MAAM,KAAK,OAAO,CAAC,MAAM,EAAE,SAAS,EAAE,EAAE,SAAS,GAAG,GAAG,CAAC,EAAE,KAAK,EAAE;AAC7E,SAAO,GAAG,IAAI,MAAM,GAAG,CAAC,CAAC,IAAI,IAAI,MAAM,GAAG,EAAE,CAAC,IAAI,IAAI,MAAM,IAAI,EAAE,CAAC,IAAI,IAAI,MAAM,IAAI,EAAE,CAAC,IAAI,IAAI,MAAM,IAAI,EAAE,CAAC;AAC7G;;;ACpEA,IAAM,qBAAqB;AAG3B,IAAM,gBAAgB;AAGtB,IAAM,kBAAkB,oBAAI,IAAI,CAAC,MAAM,eAAe,eAAe,UAAU,CAAC;AAyEzE,SAAS,aAA0C,OAAoC;AAC7F,kBAAgB,MAAM,OAAO;AAE7B,QAAM,cAAoD,CAAC;AAE3D,aAAW,CAAC,MAAM,eAAe,KAAK,OAAO,QAAQ,MAAM,WAAW,GAAG;AACxE,2BAAuB,IAAI;AAC3B,gBAAY,IAAI,IAAI,gBAAgB,MAAM,eAAe;AAAA,EAC1D;AAEA,MAAI,OAAO,KAAK,WAAW,EAAE,WAAW,GAAG;AAC1C,UAAM,IAAI,sBAAsB,4CAA4C;AAAA,EAC7E;AAEA,QAAM,YAAgD,CAAC;AACvD,MAAI,MAAM,WAAW;AACpB,eAAW,CAAC,MAAM,aAAa,KAAK,OAAO,QAAQ,MAAM,SAAS,GAAG;AACpE,uBAAiB,MAAM,eAAe,WAAW;AACjD,gBAAU,IAAI,IAAI,EAAE,GAAG,cAAc;AAAA,IACtC;AAAA,EACD;AAEA,SAAO,EAAE,SAAS,MAAM,SAAS,aAAa,UAAU;AACzD;AAEA,SAAS,gBAAgB,SAAuB;AAC/C,MAAI,OAAO,YAAY,YAAY,CAAC,OAAO,UAAU,OAAO,KAAK,UAAU,GAAG;AAC7E,UAAM,IAAI,sBAAsB,6CAA6C;AAAA,MAC5E,UAAU;AAAA,IACX,CAAC;AAAA,EACF;AACD;AAEA,SAAS,uBAAuB,MAAoB;AACnD,MAAI,CAAC,mBAAmB,KAAK,IAAI,GAAG;AACnC,UAAM,IAAI;AAAA,MACT,oBAAoB,IAAI;AAAA,MACxB,EAAE,YAAY,KAAK;AAAA,IACpB;AAAA,EACD;AACD;AAEA,SAAS,gBAAgB,MAAc,OAA8C;AACpF,QAAM,SAA0C,CAAC;AAEjD,MAAI,CAAC,MAAM,UAAU,OAAO,KAAK,MAAM,MAAM,EAAE,WAAW,GAAG;AAC5D,UAAM,IAAI,sBAAsB,eAAe,IAAI,oCAAoC;AAAA,MACtF,YAAY;AAAA,IACb,CAAC;AAAA,EACF;AAEA,aAAW,CAAC,WAAW,OAAO,KAAK,OAAO,QAAQ,MAAM,MAAM,GAAG;AAChE,sBAAkB,MAAM,SAAS;AACjC,WAAO,SAAS,IAAI,QAAQ,OAAO;AAAA,EACpC;AAEA,QAAM,UAAU,MAAM,WAAW,CAAC;AAClC,aAAW,cAAc,SAAS;AACjC,QAAI,EAAE,cAAc,SAAS;AAC5B,YAAM,IAAI;AAAA,QACT,gBAAgB,UAAU,mCAAmC,IAAI,wBAAwB,OAAO,KAAK,MAAM,EAAE,KAAK,IAAI,CAAC;AAAA,QACvH,EAAE,YAAY,MAAM,OAAO,WAAW;AAAA,MACvC;AAAA,IACD;AAAA,EACD;AAEA,QAAM,cAA4B,CAAC;AACnC,MAAI,MAAM,aAAa;AACtB,eAAW,mBAAmB,MAAM,aAAa;AAChD,yBAAmB,MAAM,iBAAiB,MAAM;AAChD,kBAAY,KAAK,EAAE,GAAG,gBAAgB,CAAC;AAAA,IACxC;AAAA,EACD;AAEA,QAAM,YAA4C,CAAC;AACnD,MAAI,MAAM,SAAS;AAClB,eAAW,CAAC,WAAW,QAAQ,KAAK,OAAO,QAAQ,MAAM,OAAO,GAAG;AAClE,UAAI,EAAE,aAAa,SAAS;AAC3B,cAAM,IAAI;AAAA,UACT,uBAAuB,SAAS,mCAAmC,IAAI,wBAAwB,OAAO,KAAK,MAAM,EAAE,KAAK,IAAI,CAAC;AAAA,UAC7H,EAAE,YAAY,MAAM,OAAO,UAAU;AAAA,QACtC;AAAA,MACD;AACA,UAAI,OAAO,aAAa,YAAY;AACnC,cAAM,IAAI;AAAA,UACT,uBAAuB,SAAS,oBAAoB,IAAI;AAAA,UACxD,EAAE,YAAY,MAAM,OAAO,UAAU;AAAA,QACtC;AAAA,MACD;AACA,gBAAU,SAAS,IAAI;AAAA,IACxB;AAAA,EACD;AAEA,SAAO,EAAE,QAAQ,SAAS,aAAa,UAAU;AAClD;AAEA,SAAS,kBAAkB,YAAoB,WAAyB;AACvE,MAAI,gBAAgB,IAAI,SAAS,GAAG;AACnC,UAAM,IAAI;AAAA,MACT,eAAe,SAAS,gCAAgC,UAAU,uBAAuB,CAAC,GAAG,eAAe,EAAE,KAAK,IAAI,CAAC;AAAA,MACxH,EAAE,YAAY,OAAO,UAAU;AAAA,IAChC;AAAA,EACD;AACA,MAAI,CAAC,cAAc,KAAK,SAAS,GAAG;AACnC,UAAM,IAAI;AAAA,MACT,eAAe,SAAS,oBAAoB,UAAU;AAAA,MACtD,EAAE,YAAY,OAAO,UAAU;AAAA,IAChC;AAAA,EACD;AACD;AAEA,SAAS,mBACR,YACA,YACA,QACO;AACP,aAAW,SAAS,WAAW,QAAQ;AACtC,QAAI,EAAE,SAAS,SAAS;AACvB,YAAM,IAAI;AAAA,QACT,gCAAgC,KAAK,yCAAyC,UAAU,wBAAwB,OAAO,KAAK,MAAM,EAAE,KAAK,IAAI,CAAC;AAAA,QAC9I,EAAE,YAAY,MAAM;AAAA,MACrB;AAAA,IACD;AAAA,EACD;AAEA,MAAI,WAAW,eAAe,oBAAoB,CAAC,WAAW,eAAe;AAC5E,UAAM,IAAI;AAAA,MACT,uEAAuE,UAAU;AAAA,MACjF,EAAE,WAAW;AAAA,IACd;AAAA,EACD;AAEA,MAAI,WAAW,eAAe,oBAAoB,WAAW,eAAe;AAC3E,QAAI,EAAE,WAAW,iBAAiB,SAAS;AAC1C,YAAM,IAAI;AAAA,QACT,6BAA6B,WAAW,aAAa,mCAAmC,UAAU;AAAA,QAClG,EAAE,YAAY,OAAO,WAAW,cAAc;AAAA,MAC/C;AAAA,IACD;AAAA,EACD;AAEA,MAAI,WAAW,eAAe,YAAY,OAAO,WAAW,YAAY,YAAY;AACnF,UAAM,IAAI;AAAA,MACT,+DAA+D,UAAU;AAAA,MACzE,EAAE,WAAW;AAAA,IACd;AAAA,EACD;AACD;AAEA,SAAS,iBACR,MACA,UACA,aACO;AACP,MAAI,EAAE,SAAS,QAAQ,cAAc;AACpC,UAAM,IAAI;AAAA,MACT,aAAa,IAAI,mCAAmC,SAAS,IAAI,kDAAkD,OAAO,KAAK,WAAW,EAAE,KAAK,IAAI,CAAC;AAAA,MACtJ,EAAE,UAAU,MAAM,YAAY,SAAS,KAAK;AAAA,IAC7C;AAAA,EACD;AAEA,MAAI,EAAE,SAAS,MAAM,cAAc;AAClC,UAAM,IAAI;AAAA,MACT,aAAa,IAAI,mCAAmC,SAAS,EAAE,kDAAkD,OAAO,KAAK,WAAW,EAAE,KAAK,IAAI,CAAC;AAAA,MACpJ,EAAE,UAAU,MAAM,YAAY,SAAS,GAAG;AAAA,IAC3C;AAAA,EACD;AAEA,QAAM,iBAAiB,YAAY,SAAS,IAAI;AAChD,MAAI,kBAAkB,EAAE,SAAS,SAAS,eAAe,SAAS;AACjE,UAAM,IAAI;AAAA,MACT,aAAa,IAAI,uBAAuB,SAAS,KAAK,yCAAyC,SAAS,IAAI,wBAAwB,OAAO,KAAK,eAAe,MAAM,EAAE,KAAK,IAAI,CAAC;AAAA,MACjL,EAAE,UAAU,MAAM,YAAY,SAAS,MAAM,OAAO,SAAS,MAAM;AAAA,IACpE;AAAA,EACD;AACD;;;AC3PO,SAAS,YACf,gBACA,YACA,WACW;AACX,QAAM,aAAuB,CAAC;AAC9B,QAAM,UAAoB,CAAC,8BAA8B;AAGzD,QAAM,gBAAgB,IAAI,IAAI,WAAW,OAAO;AAGhD,QAAM,WAAqB,CAAC;AAE5B,aAAW,CAAC,WAAW,UAAU,KAAK,OAAO,QAAQ,WAAW,MAAM,GAAG;AACxE,QAAI,SAAS,iBAAiB,WAAW,UAAU;AAGnD,QAAI,WAAW;AACd,iBAAW,OAAO,OAAO,OAAO,SAAS,GAAG;AAC3C,YAAI,IAAI,SAAS,kBAAkB,IAAI,UAAU,WAAW;AAC3D,oBAAU,eAAe,IAAI,EAAE;AAC/B,mBAAS,KAAK,SAAS;AACvB;AAAA,QACD;AAAA,MACD;AAAA,IACD;AAEA,YAAQ,KAAK,MAAM;AAAA,EACpB;AAGA,UAAQ,KAAK,8BAA8B;AAC3C,UAAQ,KAAK,8BAA8B;AAC3C,UAAQ,KAAK,qCAAqC;AAElD,aAAW,KAAK,8BAA8B,cAAc;AAAA,IAAS,QAAQ,KAAK,OAAO,CAAC;AAAA,EAAK;AAG/F,aAAW,cAAc,WAAW,SAAS;AAC5C,eAAW;AAAA,MACV,kCAAkC,cAAc,IAAI,UAAU,OAAO,cAAc,KAAK,UAAU;AAAA,IACnG;AAAA,EACD;AAGA,aAAW,WAAW,UAAU;AAC/B,QAAI,CAAC,cAAc,IAAI,OAAO,GAAG;AAChC,iBAAW;AAAA,QACV,kCAAkC,cAAc,IAAI,OAAO,OAAO,cAAc,KAAK,OAAO;AAAA,MAC7F;AAAA,IACD;AAAA,EACD;AAGA,aAAW;AAAA,IACV,wCAAwC,cAAc;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYvD;AAEA,SAAO;AACR;AAQO,SAAS,gBAAgB,QAAoC;AACnE,QAAM,aAAuB,CAAC;AAG9B,aAAW;AAAA,IACV;AAAA,EAID;AAGA,aAAW;AAAA,IACV;AAAA,EAID;AAEA,aAAW,CAAC,MAAM,UAAU,KAAK,OAAO,QAAQ,OAAO,WAAW,GAAG;AACpE,eAAW,KAAK,GAAG,YAAY,MAAM,YAAY,OAAO,SAAS,CAAC;AAAA,EACnE;AAEA,SAAO;AACR;AAEA,SAAS,iBAAiB,WAAmB,YAAqC;AACjF,QAAM,UAAU,aAAa,UAAU;AACvC,QAAM,QAAQ,CAAC,WAAW,OAAO;AAEjC,MAAI,WAAW,YAAY,WAAW,iBAAiB,UAAa,CAAC,WAAW,MAAM;AACrF,UAAM,KAAK,UAAU;AAAA,EACtB;AAEA,MAAI,WAAW,iBAAiB,QAAW;AAC1C,UAAM,KAAK,WAAW,WAAW,WAAW,YAAY,CAAC,EAAE;AAAA,EAC5D;AAGA,MAAI,WAAW,SAAS,UAAU,WAAW,YAAY;AACxD,UAAM,SAAS,WAAW,WAAW,IAAI,CAAC,MAAM,IAAI,CAAC,GAAG,EAAE,KAAK,IAAI;AACnE,UAAM,KAAK,UAAU,SAAS,QAAQ,MAAM,IAAI;AAAA,EACjD;AAEA,SAAO,MAAM,KAAK,GAAG;AACtB;AAEA,SAAS,aAAa,YAAqC;AAC1D,UAAQ,WAAW,MAAM;AAAA,IACxB,KAAK;AACJ,aAAO;AAAA,IACR,KAAK;AACJ,aAAO;AAAA,IACR,KAAK;AACJ,aAAO;AAAA,IACR,KAAK;AACJ,aAAO;AAAA,IACR,KAAK;AACJ,aAAO;AAAA,IACR,KAAK;AACJ,aAAO;AAAA;AAAA,IACR,KAAK;AACJ,aAAO;AAAA,EACT;AACD;AAEA,SAAS,WAAW,OAAwB;AAC3C,MAAI,UAAU,KAAM,QAAO;AAC3B,MAAI,OAAO,UAAU,SAAU,QAAO,IAAI,KAAK;AAC/C,MAAI,OAAO,UAAU,SAAU,QAAO,OAAO,KAAK;AAClD,MAAI,OAAO,UAAU,UAAW,QAAO,QAAQ,MAAM;AAErD,SAAO,IAAI,KAAK,UAAU,KAAK,CAAC;AACjC;;;ACpJO,IAAM,eAAN,MAAM,cAIX;AAAA,EACkB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAEnB,YAAY,MAAY,WAAW,MAAwB,eAAwB,QAAW,OAAO,OAA0B;AAC9H,SAAK,QAAQ;AACb,SAAK,YAAY;AACjB,SAAK,gBAAgB;AACrB,SAAK,QAAQ;AAAA,EACd;AAAA;AAAA,EAGA,WAA4C;AAC3C,WAAO,IAAI,cAAa,KAAK,OAAO,OAAO,KAAK,eAAe,KAAK,KAAK;AAAA,EAC1E;AAAA;AAAA,EAGA,QAAQ,OAAiD;AACxD,WAAO,IAAI,cAAa,KAAK,OAAO,OAAO,OAAO,KAAK,KAAK;AAAA,EAC7D;AAAA;AAAA,EAGA,OAAwC;AACvC,WAAO,IAAI,cAAa,KAAK,OAAO,OAAO,QAAW,IAAI;AAAA,EAC3D;AAAA;AAAA,EAGA,SAA0B;AACzB,WAAO;AAAA,MACN,MAAM,KAAK;AAAA,MACX,UAAU,KAAK;AAAA,MACf,cAAc,KAAK;AAAA,MACnB,MAAM,KAAK;AAAA,MACX,YAAY;AAAA,MACZ,UAAU;AAAA,IACX;AAAA,EACD;AACD;AAMO,IAAM,mBAAN,MAAM,0BAIH,aAAgC;AAAA,EACxB;AAAA,EAEjB,YACC,QACA,WAAW,MACX,eAAwB,QACxB,OAAO,OACN;AACD,UAAM,QAAQ,UAAU,cAAc,IAAI;AAC1C,SAAK,cAAc;AAAA,EACpB;AAAA,EAES,WAAkD;AAC1D,WAAO,IAAI,kBAAiB,KAAK,aAAa,OAAO,KAAK,eAAe,KAAK,KAAK;AAAA,EACpF;AAAA,EAES,QAAQ,OAA8D;AAC9E,WAAO,IAAI,kBAAiB,KAAK,aAAa,OAAO,OAAO,KAAK,KAAK;AAAA,EACvE;AAAA,EAES,OAA8C;AACtD,WAAO,IAAI,kBAAiB,KAAK,aAAa,OAAO,QAAW,IAAI;AAAA,EACrE;AAAA,EAES,SAA0B;AAClC,WAAO;AAAA,MACN,MAAM;AAAA,MACN,UAAU,KAAK;AAAA,MACf,cAAc,KAAK;AAAA,MACnB,MAAM,KAAK;AAAA,MACX,YAAY,KAAK;AAAA,MACjB,UAAU;AAAA,IACX;AAAA,EACD;AACD;AAMO,IAAM,oBAAN,MAAM,2BAIH,aAAiC;AAAA,EACzB;AAAA,EAEjB,YACC,aACA,WAAW,MACX,eAAwB,QACxB,OAAO,OACN;AACD,UAAM,SAAS,UAAU,cAAc,IAAI;AAC3C,SAAK,YAAY,YAAY,OAAO,EAAE;AAAA,EACvC;AAAA,EAES,WAAqD;AAC7D,WAAO,IAAI;AAAA,MACV,IAAI,aAAa,KAAK,SAAS;AAAA,MAC/B;AAAA,MACA,KAAK;AAAA,MACL,KAAK;AAAA,IACN;AAAA,EACD;AAAA,EAES,QAAQ,OAA4D;AAC5E,WAAO,IAAI,mBAAkB,IAAI,aAAa,KAAK,SAAS,GAAG,OAAO,OAAO,KAAK,KAAK;AAAA,EACxF;AAAA,EAES,OAAiD;AACzD,WAAO,IAAI,mBAAkB,IAAI,aAAa,KAAK,SAAS,GAAG,OAAO,QAAW,IAAI;AAAA,EACtF;AAAA,EAES,SAA0B;AAClC,WAAO;AAAA,MACN,MAAM;AAAA,MACN,UAAU,KAAK;AAAA,MACf,cAAc,KAAK;AAAA,MACnB,MAAM,KAAK;AAAA,MACX,YAAY;AAAA,MACZ,UAAU,KAAK;AAAA,IAChB;AAAA,EACD;AACD;AAoBO,IAAM,IAAI;AAAA,EAChB,SAA8C;AAC7C,WAAO,IAAI,aAAa,UAAU,MAAM,QAAW,KAAK;AAAA,EACzD;AAAA,EAEA,SAA8C;AAC7C,WAAO,IAAI,aAAa,UAAU,MAAM,QAAW,KAAK;AAAA,EACzD;AAAA,EAEA,UAAgD;AAC/C,WAAO,IAAI,aAAa,WAAW,MAAM,QAAW,KAAK;AAAA,EAC1D;AAAA,EAEA,YAAoD;AACnD,WAAO,IAAI,aAAa,aAAa,MAAM,QAAW,KAAK;AAAA,EAC5D;AAAA,EAEA,WAAkD;AACjD,WAAO,IAAI,aAAa,YAAY,MAAM,QAAW,KAAK;AAAA,EAC3D;AAAA,EAEA,KAAwC,QAA6C;AACpF,WAAO,IAAI,iBAAiB,QAAQ,MAAM,QAAW,KAAK;AAAA,EAC3D;AAAA,EAEA,MAA2B,aAAiE;AAC3F,WAAO,IAAI,kBAAkB,aAAa,MAAM,QAAW,KAAK;AAAA,EACjE;AACD;;;AC/LO,SAAS,eACf,YACA,eACA,MACA,eAC0B;AAC1B,MAAI,kBAAkB,UAAU;AAC/B,WAAO,CAAC;AAAA,EACT;AAEA,QAAM,SAAkC,CAAC;AACzC,QAAM,aAAa,OAAO,KAAK,cAAc,MAAM;AAGnD,aAAW,OAAO,OAAO,KAAK,IAAI,GAAG;AACpC,QAAI,EAAE,OAAO,cAAc,SAAS;AACnC,YAAM,IAAI;AAAA,QACT,kBAAkB,GAAG,oBAAoB,UAAU,wBAAwB,WAAW,KAAK,IAAI,CAAC;AAAA,QAChG,EAAE,YAAY,OAAO,IAAI;AAAA,MAC1B;AAAA,IACD;AAAA,EACD;AAEA,aAAW,CAAC,WAAW,UAAU,KAAK,OAAO,QAAQ,cAAc,MAAM,GAAG;AAC3E,UAAM,QAAQ,KAAK,SAAS;AAC5B,UAAM,WAAW,aAAa;AAG9B,QAAI,WAAW,QAAQ,UAAU;AAChC,YAAM,IAAI;AAAA,QACT,UAAU,SAAS,oBAAoB,UAAU;AAAA,QACjD,EAAE,YAAY,OAAO,UAAU;AAAA,MAChC;AAAA,IACD;AAGA,QAAI,kBAAkB,UAAU;AAC/B,UAAI,UAAU;AACb,YAAI,UAAU,UAAa,UAAU,MAAM;AAC1C,6BAAmB,YAAY,WAAW,YAAY,KAAK;AAAA,QAC5D;AACA,eAAO,SAAS,IAAI;AAAA,MACrB;AACA;AAAA,IACD;AAGA,QAAI,WAAW,MAAM;AAEpB;AAAA,IACD;AAEA,QAAI,CAAC,YAAY,UAAU,QAAW;AACrC,UAAI,WAAW,iBAAiB,QAAW;AAE1C,eAAO,SAAS,IACf,OAAO,WAAW,iBAAiB,YAAY,WAAW,iBAAiB,OACxE,KAAK,MAAM,KAAK,UAAU,WAAW,YAAY,CAAC,IAClD,WAAW;AACf;AAAA,MACD;AAEA,UAAI,WAAW,UAAU;AACxB,cAAM,IAAI;AAAA,UACT,mBAAmB,SAAS,+BAA+B,UAAU;AAAA,UACrE,EAAE,YAAY,OAAO,UAAU;AAAA,QAChC;AAAA,MACD;AAGA;AAAA,IACD;AAEA,uBAAmB,YAAY,WAAW,YAAY,KAAK;AAC3D,WAAO,SAAS,IAAI;AAAA,EACrB;AAEA,SAAO;AACR;AAEA,SAAS,mBACR,YACA,WACA,YACA,OACO;AACP,UAAQ,WAAW,MAAM;AAAA,IACxB,KAAK,UAAU;AACd,UAAI,OAAO,UAAU,UAAU;AAC9B,cAAM,IAAI;AAAA,UACT,UAAU,SAAS,oBAAoB,UAAU,2BAA2B,OAAO,KAAK;AAAA,UACxF,EAAE,YAAY,OAAO,WAAW,cAAc,UAAU,cAAc,OAAO,MAAM;AAAA,QACpF;AAAA,MACD;AACA;AAAA,IACD;AAAA,IAEA,KAAK,UAAU;AACd,UAAI,OAAO,UAAU,YAAY,OAAO,MAAM,KAAK,GAAG;AACrD,cAAM,IAAI;AAAA,UACT,UAAU,SAAS,oBAAoB,UAAU,2BAA2B,OAAO,KAAK;AAAA,UACxF,EAAE,YAAY,OAAO,WAAW,cAAc,UAAU,cAAc,OAAO,MAAM;AAAA,QACpF;AAAA,MACD;AACA;AAAA,IACD;AAAA,IAEA,KAAK,WAAW;AACf,UAAI,OAAO,UAAU,WAAW;AAC/B,cAAM,IAAI;AAAA,UACT,UAAU,SAAS,oBAAoB,UAAU,4BAA4B,OAAO,KAAK;AAAA,UACzF,EAAE,YAAY,OAAO,WAAW,cAAc,WAAW,cAAc,OAAO,MAAM;AAAA,QACrF;AAAA,MACD;AACA;AAAA,IACD;AAAA,IAEA,KAAK,aAAa;AACjB,UAAI,OAAO,UAAU,YAAY,CAAC,OAAO,SAAS,KAAK,GAAG;AACzD,cAAM,IAAI;AAAA,UACT,UAAU,SAAS,oBAAoB,UAAU,uCAAuC,OAAO,KAAK;AAAA,UACpG;AAAA,YACC;AAAA,YACA,OAAO;AAAA,YACP,cAAc;AAAA,YACd,cAAc,OAAO;AAAA,UACtB;AAAA,QACD;AAAA,MACD;AACA;AAAA,IACD;AAAA,IAEA,KAAK,QAAQ;AACZ,UAAI,OAAO,UAAU,UAAU;AAC9B,cAAM,IAAI;AAAA,UACT,UAAU,SAAS,oBAAoB,UAAU,kCAAkC,OAAO,KAAK;AAAA,UAC/F,EAAE,YAAY,OAAO,WAAW,cAAc,QAAQ,cAAc,OAAO,MAAM;AAAA,QAClF;AAAA,MACD;AACA,UAAI,WAAW,cAAc,CAAC,WAAW,WAAW,SAAS,KAAK,GAAG;AACpE,cAAM,IAAI;AAAA,UACT,UAAU,SAAS,oBAAoB,UAAU,qBAAqB,WAAW,WAAW,KAAK,IAAI,CAAC,UAAU,KAAK;AAAA,UACrH;AAAA,YACC;AAAA,YACA,OAAO;AAAA,YACP,eAAe,CAAC,GAAG,WAAW,UAAU;AAAA,YACxC,UAAU;AAAA,UACX;AAAA,QACD;AAAA,MACD;AACA;AAAA,IACD;AAAA,IAEA,KAAK,SAAS;AACb,UAAI,CAAC,MAAM,QAAQ,KAAK,GAAG;AAC1B,cAAM,IAAI;AAAA,UACT,UAAU,SAAS,oBAAoB,UAAU,2BAA2B,OAAO,KAAK;AAAA,UACxF,EAAE,YAAY,OAAO,WAAW,cAAc,SAAS,cAAc,OAAO,MAAM;AAAA,QACnF;AAAA,MACD;AACA,UAAI,WAAW,UAAU;AACxB,cAAM,eAAe,cAAc,WAAW,QAAQ;AACtD,iBAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;AACtC,gBAAM,OAAO,MAAM,CAAC;AACpB,cAAI,CAAC,cAAc,MAAM,YAAY,GAAG;AACvC,kBAAM,IAAI;AAAA,cACT,UAAU,SAAS,IAAI,CAAC,qBAAqB,UAAU,eAAe,WAAW,QAAQ,SAAS,OAAO,IAAI;AAAA,cAC7G;AAAA,gBACC;AAAA,gBACA,OAAO,GAAG,SAAS,IAAI,CAAC;AAAA,gBACxB,cAAc,WAAW;AAAA,gBACzB,cAAc,OAAO;AAAA,cACtB;AAAA,YACD;AAAA,UACD;AAAA,QACD;AAAA,MACD;AACA;AAAA,IACD;AAAA,IAEA,KAAK,YAAY;AAEhB,UAAI,EAAE,iBAAiB,eAAe,OAAO,UAAU,UAAU;AAChE,cAAM,IAAI;AAAA,UACT,UAAU,SAAS,oBAAoB,UAAU,sDAAsD,OAAO,KAAK;AAAA,UACnH;AAAA,YACC;AAAA,YACA,OAAO;AAAA,YACP,cAAc;AAAA,YACd,cAAc,OAAO;AAAA,UACtB;AAAA,QACD;AAAA,MACD;AACA;AAAA,IACD;AAAA,EACD;AACD;AAEA,SAAS,cAAc,MAAsB;AAC5C,UAAQ,MAAM;AAAA,IACb,KAAK;AAAA,IACL,KAAK;AACJ,aAAO;AAAA,IACR,KAAK;AAAA,IACL,KAAK;AACJ,aAAO;AAAA,IACR,KAAK;AACJ,aAAO;AAAA,IACR;AACC,aAAO;AAAA,EACT;AACD;AAEA,SAAS,cAAc,OAAgB,UAA2B;AAGjE,UAAQ,UAAU;AAAA,IACjB,KAAK;AACJ,aAAO,OAAO,UAAU;AAAA,IACzB,KAAK;AACJ,aAAO,OAAO,UAAU;AAAA,IACzB,KAAK;AACJ,aAAO,OAAO,UAAU;AAAA,IACzB,KAAK;AACJ,aAAO,OAAO,UAAU;AAAA,IACzB;AACC,aAAO;AAAA,EACT;AACD;;;AC5OO,SAAS,sBAAqC;AACpD,SAAO,oBAAI,IAAI;AAChB;AAMO,SAAS,aAAa,GAAkB,GAAiC;AAC/E,QAAM,SAAS,IAAI,IAAI,CAAC;AACxB,aAAW,CAAC,QAAQ,GAAG,KAAK,GAAG;AAC9B,WAAO,IAAI,QAAQ,KAAK,IAAI,OAAO,IAAI,MAAM,KAAK,GAAG,GAAG,CAAC;AAAA,EAC1D;AACA,SAAO;AACR;AAMO,SAAS,cAAc,QAAuB,QAAgB,KAA4B;AAChG,QAAM,UAAU,IAAI,IAAI,MAAM;AAC9B,UAAQ,IAAI,QAAQ,KAAK,IAAI,QAAQ,IAAI,MAAM,KAAK,GAAG,GAAG,CAAC;AAC3D,SAAO;AACR;AAMO,SAAS,UAAU,GAAkB,GAA2B;AACtE,aAAW,CAAC,QAAQ,IAAI,KAAK,GAAG;AAC/B,SAAK,EAAE,IAAI,MAAM,KAAK,KAAK,KAAM,QAAO;AAAA,EACzC;AACA,SAAO;AACR;AAKO,SAAS,aAAa,GAAkB,GAA2B;AACzE,MAAI,EAAE,SAAS,EAAE,KAAM,QAAO;AAC9B,aAAW,CAAC,QAAQ,IAAI,KAAK,GAAG;AAC/B,QAAI,EAAE,IAAI,MAAM,MAAM,KAAM,QAAO;AAAA,EACpC;AACA,SAAO;AACR;AAkBO,SAAS,aACf,aACA,cACA,cACc;AACd,QAAM,UAAuB,CAAC;AAC9B,aAAW,CAAC,QAAQ,QAAQ,KAAK,aAAa;AAC7C,UAAM,YAAY,aAAa,IAAI,MAAM,KAAK;AAC9C,QAAI,WAAW,WAAW;AACzB,cAAQ,KAAK,GAAG,aAAa,SAAS,QAAQ,YAAY,GAAG,QAAQ,CAAC;AAAA,IACvE;AAAA,EACD;AACA,SAAO,gBAAgB,OAAO;AAC/B;AAKO,SAAS,gBAAgB,QAA+B;AAC9D,QAAM,UAAU,CAAC,GAAG,OAAO,QAAQ,CAAC,EAAE,KAAK,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,MAAO,IAAI,IAAI,KAAK,IAAI,IAAI,IAAI,CAAE;AACrF,SAAO,KAAK,UAAU,OAAO;AAC9B;AAKO,SAAS,kBAAkB,GAA0B;AAC3D,QAAM,UAAU,KAAK,MAAM,CAAC;AAC5B,SAAO,IAAI,IAAI,OAAO;AACvB;","names":[]}
|
|
@@ -0,0 +1,397 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __defProp = Object.defineProperty;
|
|
3
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
4
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
5
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
6
|
+
var __export = (target, all) => {
|
|
7
|
+
for (var name in all)
|
|
8
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
9
|
+
};
|
|
10
|
+
var __copyProps = (to, from, except, desc) => {
|
|
11
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
12
|
+
for (let key of __getOwnPropNames(from))
|
|
13
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
14
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
15
|
+
}
|
|
16
|
+
return to;
|
|
17
|
+
};
|
|
18
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
19
|
+
|
|
20
|
+
// src/internal.ts
|
|
21
|
+
var internal_exports = {};
|
|
22
|
+
__export(internal_exports, {
|
|
23
|
+
SimpleEventEmitter: () => SimpleEventEmitter,
|
|
24
|
+
canonicalize: () => canonicalize,
|
|
25
|
+
computeOperationId: () => computeOperationId,
|
|
26
|
+
topologicalSort: () => topologicalSort,
|
|
27
|
+
validateOperationParams: () => validateOperationParams
|
|
28
|
+
});
|
|
29
|
+
module.exports = __toCommonJS(internal_exports);
|
|
30
|
+
|
|
31
|
+
// src/operations/content-hash.ts
|
|
32
|
+
async function computeOperationId(input, timestamp) {
|
|
33
|
+
const canonical = canonicalize({
|
|
34
|
+
type: input.type,
|
|
35
|
+
collection: input.collection,
|
|
36
|
+
recordId: input.recordId,
|
|
37
|
+
data: input.data,
|
|
38
|
+
timestamp,
|
|
39
|
+
nodeId: input.nodeId
|
|
40
|
+
});
|
|
41
|
+
const encoded = new TextEncoder().encode(canonical);
|
|
42
|
+
const hashBuffer = await globalThis.crypto.subtle.digest("SHA-256", encoded);
|
|
43
|
+
return bufferToHex(hashBuffer);
|
|
44
|
+
}
|
|
45
|
+
function canonicalize(obj) {
|
|
46
|
+
if (obj === null || obj === void 0) {
|
|
47
|
+
return JSON.stringify(obj);
|
|
48
|
+
}
|
|
49
|
+
if (typeof obj !== "object") {
|
|
50
|
+
return JSON.stringify(obj);
|
|
51
|
+
}
|
|
52
|
+
if (Array.isArray(obj)) {
|
|
53
|
+
const items = obj.map((item) => canonicalize(item));
|
|
54
|
+
return `[${items.join(",")}]`;
|
|
55
|
+
}
|
|
56
|
+
const keys = Object.keys(obj).sort();
|
|
57
|
+
const pairs = keys.map((key) => {
|
|
58
|
+
const value = obj[key];
|
|
59
|
+
return `${JSON.stringify(key)}:${canonicalize(value)}`;
|
|
60
|
+
});
|
|
61
|
+
return `{${pairs.join(",")}}`;
|
|
62
|
+
}
|
|
63
|
+
function bufferToHex(buffer) {
|
|
64
|
+
const bytes = new Uint8Array(buffer);
|
|
65
|
+
return Array.from(bytes, (b) => b.toString(16).padStart(2, "0")).join("");
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
// src/events/event-emitter.ts
|
|
69
|
+
var SimpleEventEmitter = class {
|
|
70
|
+
listeners = /* @__PURE__ */ new Map();
|
|
71
|
+
on(type, listener) {
|
|
72
|
+
let set = this.listeners.get(type);
|
|
73
|
+
if (!set) {
|
|
74
|
+
set = /* @__PURE__ */ new Set();
|
|
75
|
+
this.listeners.set(type, set);
|
|
76
|
+
}
|
|
77
|
+
set.add(listener);
|
|
78
|
+
return () => {
|
|
79
|
+
this.off(type, listener);
|
|
80
|
+
};
|
|
81
|
+
}
|
|
82
|
+
off(type, listener) {
|
|
83
|
+
const set = this.listeners.get(type);
|
|
84
|
+
if (set) {
|
|
85
|
+
set.delete(listener);
|
|
86
|
+
if (set.size === 0) {
|
|
87
|
+
this.listeners.delete(type);
|
|
88
|
+
}
|
|
89
|
+
}
|
|
90
|
+
}
|
|
91
|
+
emit(event) {
|
|
92
|
+
const set = this.listeners.get(event.type);
|
|
93
|
+
if (!set) return;
|
|
94
|
+
for (const listener of set) {
|
|
95
|
+
;
|
|
96
|
+
listener(event);
|
|
97
|
+
}
|
|
98
|
+
}
|
|
99
|
+
/** Remove all listeners for all event types. */
|
|
100
|
+
clear() {
|
|
101
|
+
this.listeners.clear();
|
|
102
|
+
}
|
|
103
|
+
/** Get the number of listeners for a specific event type. */
|
|
104
|
+
listenerCount(type) {
|
|
105
|
+
return this.listeners.get(type)?.size ?? 0;
|
|
106
|
+
}
|
|
107
|
+
};
|
|
108
|
+
|
|
109
|
+
// src/errors/errors.ts
|
|
110
|
+
var KoraError = class extends Error {
|
|
111
|
+
constructor(message, code, context) {
|
|
112
|
+
super(message);
|
|
113
|
+
this.code = code;
|
|
114
|
+
this.context = context;
|
|
115
|
+
this.name = "KoraError";
|
|
116
|
+
}
|
|
117
|
+
code;
|
|
118
|
+
context;
|
|
119
|
+
};
|
|
120
|
+
var OperationError = class extends KoraError {
|
|
121
|
+
constructor(message, context) {
|
|
122
|
+
super(message, "OPERATION_ERROR", context);
|
|
123
|
+
this.name = "OperationError";
|
|
124
|
+
}
|
|
125
|
+
};
|
|
126
|
+
var ClockDriftError = class extends KoraError {
|
|
127
|
+
constructor(currentHlcTime, physicalTime) {
|
|
128
|
+
const driftSeconds = Math.round((currentHlcTime - physicalTime) / 1e3);
|
|
129
|
+
super(
|
|
130
|
+
`Clock drift of ${driftSeconds}s detected. Physical time is behind HLC by more than 5 minutes. This indicates a severe clock issue.`,
|
|
131
|
+
"CLOCK_DRIFT",
|
|
132
|
+
{ currentHlcTime, physicalTime, driftSeconds }
|
|
133
|
+
);
|
|
134
|
+
this.currentHlcTime = currentHlcTime;
|
|
135
|
+
this.physicalTime = physicalTime;
|
|
136
|
+
this.name = "ClockDriftError";
|
|
137
|
+
}
|
|
138
|
+
currentHlcTime;
|
|
139
|
+
physicalTime;
|
|
140
|
+
};
|
|
141
|
+
|
|
142
|
+
// src/clock/hlc.ts
|
|
143
|
+
var systemTimeSource = { now: () => Date.now() };
|
|
144
|
+
var DRIFT_WARN_MS = 6e4;
|
|
145
|
+
var DRIFT_ERROR_MS = 5 * 6e4;
|
|
146
|
+
var HybridLogicalClock = class {
|
|
147
|
+
constructor(nodeId, timeSource = systemTimeSource, onDriftWarning) {
|
|
148
|
+
this.nodeId = nodeId;
|
|
149
|
+
this.timeSource = timeSource;
|
|
150
|
+
this.onDriftWarning = onDriftWarning;
|
|
151
|
+
}
|
|
152
|
+
nodeId;
|
|
153
|
+
timeSource;
|
|
154
|
+
onDriftWarning;
|
|
155
|
+
wallTime = 0;
|
|
156
|
+
logical = 0;
|
|
157
|
+
/**
|
|
158
|
+
* Generate a new timestamp for a local event.
|
|
159
|
+
* Guarantees monotonicity: each call returns a timestamp strictly greater than the previous.
|
|
160
|
+
*
|
|
161
|
+
* @throws {ClockDriftError} If physical time is more than 5 minutes behind the HLC wallTime
|
|
162
|
+
*/
|
|
163
|
+
now() {
|
|
164
|
+
const physicalTime = this.timeSource.now();
|
|
165
|
+
this.checkDrift(physicalTime);
|
|
166
|
+
if (physicalTime > this.wallTime) {
|
|
167
|
+
this.wallTime = physicalTime;
|
|
168
|
+
this.logical = 0;
|
|
169
|
+
} else {
|
|
170
|
+
this.logical++;
|
|
171
|
+
}
|
|
172
|
+
return { wallTime: this.wallTime, logical: this.logical, nodeId: this.nodeId };
|
|
173
|
+
}
|
|
174
|
+
/**
|
|
175
|
+
* Update clock on receiving a remote timestamp.
|
|
176
|
+
* Merges the remote clock state with the local state to maintain causal ordering.
|
|
177
|
+
*
|
|
178
|
+
* @throws {ClockDriftError} If physical time is more than 5 minutes behind the resulting wallTime
|
|
179
|
+
*/
|
|
180
|
+
receive(remote) {
|
|
181
|
+
const physicalTime = this.timeSource.now();
|
|
182
|
+
if (physicalTime > this.wallTime && physicalTime > remote.wallTime) {
|
|
183
|
+
this.wallTime = physicalTime;
|
|
184
|
+
this.logical = 0;
|
|
185
|
+
} else if (remote.wallTime > this.wallTime) {
|
|
186
|
+
this.wallTime = remote.wallTime;
|
|
187
|
+
this.logical = remote.logical + 1;
|
|
188
|
+
} else if (this.wallTime === remote.wallTime) {
|
|
189
|
+
this.logical = Math.max(this.logical, remote.logical) + 1;
|
|
190
|
+
} else {
|
|
191
|
+
this.logical++;
|
|
192
|
+
}
|
|
193
|
+
this.checkDrift(physicalTime);
|
|
194
|
+
return { wallTime: this.wallTime, logical: this.logical, nodeId: this.nodeId };
|
|
195
|
+
}
|
|
196
|
+
/**
|
|
197
|
+
* Compare two timestamps. Returns negative if a < b, positive if a > b, zero if equal.
|
|
198
|
+
* Total order: wallTime first, then logical, then nodeId (lexicographic).
|
|
199
|
+
*/
|
|
200
|
+
static compare(a, b) {
|
|
201
|
+
if (a.wallTime !== b.wallTime) return a.wallTime - b.wallTime;
|
|
202
|
+
if (a.logical !== b.logical) return a.logical - b.logical;
|
|
203
|
+
if (a.nodeId < b.nodeId) return -1;
|
|
204
|
+
if (a.nodeId > b.nodeId) return 1;
|
|
205
|
+
return 0;
|
|
206
|
+
}
|
|
207
|
+
/**
|
|
208
|
+
* Serialize an HLC timestamp to a string that sorts lexicographically.
|
|
209
|
+
* Format: zero-padded wallTime:logical:nodeId
|
|
210
|
+
*/
|
|
211
|
+
static serialize(ts) {
|
|
212
|
+
const wall = ts.wallTime.toString().padStart(15, "0");
|
|
213
|
+
const log = ts.logical.toString().padStart(5, "0");
|
|
214
|
+
return `${wall}:${log}:${ts.nodeId}`;
|
|
215
|
+
}
|
|
216
|
+
/**
|
|
217
|
+
* Deserialize an HLC timestamp from its serialized string form.
|
|
218
|
+
*/
|
|
219
|
+
static deserialize(s) {
|
|
220
|
+
const parts = s.split(":");
|
|
221
|
+
if (parts.length < 3) {
|
|
222
|
+
throw new Error(`Invalid HLC timestamp string: "${s}"`);
|
|
223
|
+
}
|
|
224
|
+
return {
|
|
225
|
+
wallTime: Number.parseInt(parts[0] ?? "0", 10),
|
|
226
|
+
logical: Number.parseInt(parts[1] ?? "0", 10),
|
|
227
|
+
// nodeId may contain colons, so rejoin remaining parts
|
|
228
|
+
nodeId: parts.slice(2).join(":")
|
|
229
|
+
};
|
|
230
|
+
}
|
|
231
|
+
checkDrift(physicalTime) {
|
|
232
|
+
const drift = this.wallTime - physicalTime;
|
|
233
|
+
if (drift > DRIFT_ERROR_MS) {
|
|
234
|
+
throw new ClockDriftError(this.wallTime, physicalTime);
|
|
235
|
+
}
|
|
236
|
+
if (drift > DRIFT_WARN_MS) {
|
|
237
|
+
this.onDriftWarning?.(drift);
|
|
238
|
+
}
|
|
239
|
+
}
|
|
240
|
+
};
|
|
241
|
+
|
|
242
|
+
// src/operations/operation.ts
|
|
243
|
+
function validateOperationParams(input) {
|
|
244
|
+
if (!input.nodeId || typeof input.nodeId !== "string") {
|
|
245
|
+
throw new OperationError("nodeId is required and must be a non-empty string", {
|
|
246
|
+
received: input.nodeId
|
|
247
|
+
});
|
|
248
|
+
}
|
|
249
|
+
if (!input.type || !["insert", "update", "delete"].includes(input.type)) {
|
|
250
|
+
throw new OperationError('type must be "insert", "update", or "delete"', {
|
|
251
|
+
received: input.type
|
|
252
|
+
});
|
|
253
|
+
}
|
|
254
|
+
if (!input.collection || typeof input.collection !== "string") {
|
|
255
|
+
throw new OperationError("collection is required and must be a non-empty string", {
|
|
256
|
+
received: input.collection
|
|
257
|
+
});
|
|
258
|
+
}
|
|
259
|
+
if (!input.recordId || typeof input.recordId !== "string") {
|
|
260
|
+
throw new OperationError("recordId is required and must be a non-empty string", {
|
|
261
|
+
received: input.recordId
|
|
262
|
+
});
|
|
263
|
+
}
|
|
264
|
+
if (input.type === "insert" && input.data === null) {
|
|
265
|
+
throw new OperationError("insert operations must include data", {
|
|
266
|
+
type: input.type,
|
|
267
|
+
collection: input.collection
|
|
268
|
+
});
|
|
269
|
+
}
|
|
270
|
+
if (input.type === "update" && input.data === null) {
|
|
271
|
+
throw new OperationError("update operations must include data with changed fields", {
|
|
272
|
+
type: input.type,
|
|
273
|
+
collection: input.collection
|
|
274
|
+
});
|
|
275
|
+
}
|
|
276
|
+
if (input.type === "update" && input.previousData === null) {
|
|
277
|
+
throw new OperationError(
|
|
278
|
+
"update operations must include previousData for 3-way merge support",
|
|
279
|
+
{
|
|
280
|
+
type: input.type,
|
|
281
|
+
collection: input.collection
|
|
282
|
+
}
|
|
283
|
+
);
|
|
284
|
+
}
|
|
285
|
+
if (input.type === "delete" && input.data !== null) {
|
|
286
|
+
throw new OperationError("delete operations must have null data", {
|
|
287
|
+
type: input.type,
|
|
288
|
+
collection: input.collection
|
|
289
|
+
});
|
|
290
|
+
}
|
|
291
|
+
if (typeof input.sequenceNumber !== "number" || input.sequenceNumber < 0) {
|
|
292
|
+
throw new OperationError("sequenceNumber must be a non-negative number", {
|
|
293
|
+
received: input.sequenceNumber
|
|
294
|
+
});
|
|
295
|
+
}
|
|
296
|
+
if (!Array.isArray(input.causalDeps)) {
|
|
297
|
+
throw new OperationError("causalDeps must be an array of operation IDs", {
|
|
298
|
+
received: typeof input.causalDeps
|
|
299
|
+
});
|
|
300
|
+
}
|
|
301
|
+
if (typeof input.schemaVersion !== "number" || input.schemaVersion < 1) {
|
|
302
|
+
throw new OperationError("schemaVersion must be a positive number", {
|
|
303
|
+
received: input.schemaVersion
|
|
304
|
+
});
|
|
305
|
+
}
|
|
306
|
+
}
|
|
307
|
+
|
|
308
|
+
// src/version-vector/topological-sort.ts
|
|
309
|
+
function topologicalSort(operations) {
|
|
310
|
+
if (operations.length <= 1) return [...operations];
|
|
311
|
+
const opMap = /* @__PURE__ */ new Map();
|
|
312
|
+
for (const op of operations) {
|
|
313
|
+
opMap.set(op.id, op);
|
|
314
|
+
}
|
|
315
|
+
const inDegree = /* @__PURE__ */ new Map();
|
|
316
|
+
const dependents = /* @__PURE__ */ new Map();
|
|
317
|
+
for (const op of operations) {
|
|
318
|
+
if (!inDegree.has(op.id)) {
|
|
319
|
+
inDegree.set(op.id, 0);
|
|
320
|
+
}
|
|
321
|
+
if (!dependents.has(op.id)) {
|
|
322
|
+
dependents.set(op.id, []);
|
|
323
|
+
}
|
|
324
|
+
for (const depId of op.causalDeps) {
|
|
325
|
+
if (opMap.has(depId)) {
|
|
326
|
+
inDegree.set(op.id, (inDegree.get(op.id) ?? 0) + 1);
|
|
327
|
+
const deps = dependents.get(depId);
|
|
328
|
+
if (deps) {
|
|
329
|
+
deps.push(op.id);
|
|
330
|
+
} else {
|
|
331
|
+
dependents.set(depId, [op.id]);
|
|
332
|
+
}
|
|
333
|
+
}
|
|
334
|
+
}
|
|
335
|
+
}
|
|
336
|
+
const queue = [];
|
|
337
|
+
for (const op of operations) {
|
|
338
|
+
if ((inDegree.get(op.id) ?? 0) === 0) {
|
|
339
|
+
queue.push(op);
|
|
340
|
+
}
|
|
341
|
+
}
|
|
342
|
+
queue.sort(compareByTimestamp);
|
|
343
|
+
const result = [];
|
|
344
|
+
while (queue.length > 0) {
|
|
345
|
+
const current = queue.shift();
|
|
346
|
+
if (!current) break;
|
|
347
|
+
result.push(current);
|
|
348
|
+
const deps = dependents.get(current.id) ?? [];
|
|
349
|
+
const newlyReady = [];
|
|
350
|
+
for (const depId of deps) {
|
|
351
|
+
const deg = (inDegree.get(depId) ?? 0) - 1;
|
|
352
|
+
inDegree.set(depId, deg);
|
|
353
|
+
if (deg === 0) {
|
|
354
|
+
const op = opMap.get(depId);
|
|
355
|
+
if (op) newlyReady.push(op);
|
|
356
|
+
}
|
|
357
|
+
}
|
|
358
|
+
if (newlyReady.length > 0) {
|
|
359
|
+
newlyReady.sort(compareByTimestamp);
|
|
360
|
+
mergeIntoSorted(queue, newlyReady);
|
|
361
|
+
}
|
|
362
|
+
}
|
|
363
|
+
if (result.length !== operations.length) {
|
|
364
|
+
throw new OperationError(
|
|
365
|
+
`Cycle detected in operation dependency graph. Sorted ${result.length} of ${operations.length} operations.`,
|
|
366
|
+
{
|
|
367
|
+
sortedCount: result.length,
|
|
368
|
+
totalCount: operations.length
|
|
369
|
+
}
|
|
370
|
+
);
|
|
371
|
+
}
|
|
372
|
+
return result;
|
|
373
|
+
}
|
|
374
|
+
function compareByTimestamp(a, b) {
|
|
375
|
+
return HybridLogicalClock.compare(a.timestamp, b.timestamp);
|
|
376
|
+
}
|
|
377
|
+
function mergeIntoSorted(target, items) {
|
|
378
|
+
let insertIndex = 0;
|
|
379
|
+
for (const item of items) {
|
|
380
|
+
while (insertIndex < target.length) {
|
|
381
|
+
const existing = target[insertIndex];
|
|
382
|
+
if (existing && compareByTimestamp(item, existing) <= 0) break;
|
|
383
|
+
insertIndex++;
|
|
384
|
+
}
|
|
385
|
+
target.splice(insertIndex, 0, item);
|
|
386
|
+
insertIndex++;
|
|
387
|
+
}
|
|
388
|
+
}
|
|
389
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
390
|
+
0 && (module.exports = {
|
|
391
|
+
SimpleEventEmitter,
|
|
392
|
+
canonicalize,
|
|
393
|
+
computeOperationId,
|
|
394
|
+
topologicalSort,
|
|
395
|
+
validateOperationParams
|
|
396
|
+
});
|
|
397
|
+
//# sourceMappingURL=internal.cjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/internal.ts","../src/operations/content-hash.ts","../src/events/event-emitter.ts","../src/errors/errors.ts","../src/clock/hlc.ts","../src/operations/operation.ts","../src/version-vector/topological-sort.ts"],"sourcesContent":["// Internal exports — shared within @kora packages but NOT part of the public API.\n// Other @kora packages can import from '@korajs/core/internal' if needed.\n\nexport { canonicalize, computeOperationId } from './operations/content-hash'\nexport { SimpleEventEmitter } from './events/event-emitter'\nexport { validateOperationParams } from './operations/operation'\nexport { topologicalSort } from './version-vector/topological-sort'\n","import type { OperationInput } from '../types'\n\n/**\n * Compute the content-addressed ID for an operation using SHA-256.\n * The same operation content always produces the same hash, ensuring deduplication.\n *\n * @param input - The operation input (without id/timestamp, which are assigned separately)\n * @param timestamp - The HLC timestamp serialized as a string\n * @returns A hex-encoded SHA-256 hash\n */\nexport async function computeOperationId(\n\tinput: OperationInput,\n\ttimestamp: string,\n): Promise<string> {\n\tconst canonical = canonicalize({\n\t\ttype: input.type,\n\t\tcollection: input.collection,\n\t\trecordId: input.recordId,\n\t\tdata: input.data,\n\t\ttimestamp,\n\t\tnodeId: input.nodeId,\n\t})\n\tconst encoded = new TextEncoder().encode(canonical)\n\tconst hashBuffer = await globalThis.crypto.subtle.digest('SHA-256', encoded)\n\treturn bufferToHex(hashBuffer)\n}\n\n/**\n * Deterministic JSON serialization with sorted keys.\n * Ensures identical objects always produce identical strings regardless of property insertion order.\n *\n * @param obj - The value to serialize\n * @returns A deterministic JSON string\n */\nexport function canonicalize(obj: unknown): string {\n\tif (obj === null || obj === undefined) {\n\t\treturn JSON.stringify(obj)\n\t}\n\n\tif (typeof obj !== 'object') {\n\t\treturn JSON.stringify(obj)\n\t}\n\n\tif (Array.isArray(obj)) {\n\t\tconst items = obj.map((item) => canonicalize(item))\n\t\treturn `[${items.join(',')}]`\n\t}\n\n\tconst keys = Object.keys(obj as Record<string, unknown>).sort()\n\tconst pairs = keys.map((key) => {\n\t\tconst value = (obj as Record<string, unknown>)[key]\n\t\treturn `${JSON.stringify(key)}:${canonicalize(value)}`\n\t})\n\treturn `{${pairs.join(',')}}`\n}\n\nfunction bufferToHex(buffer: ArrayBuffer): string {\n\tconst bytes = new Uint8Array(buffer)\n\treturn Array.from(bytes, (b) => b.toString(16).padStart(2, '0')).join('')\n}\n","import type { KoraEventByType, KoraEventEmitter, KoraEventListener, KoraEventType } from './events'\n\ntype AnyListener = (event: never) => void\n\n/**\n * Concrete implementation of KoraEventEmitter.\n * Simple, synchronous event emitter for internal use across @kora packages.\n *\n * @example\n * ```typescript\n * const emitter = new SimpleEventEmitter()\n * const unsub = emitter.on('operation:created', (event) => {\n * console.log(event.operation.id)\n * })\n * emitter.emit({ type: 'operation:created', operation: someOp })\n * unsub() // unsubscribe\n * ```\n */\nexport class SimpleEventEmitter implements KoraEventEmitter {\n\tprivate listeners = new Map<string, Set<AnyListener>>()\n\n\ton<T extends KoraEventType>(type: T, listener: KoraEventListener<T>): () => void {\n\t\tlet set = this.listeners.get(type)\n\t\tif (!set) {\n\t\t\tset = new Set()\n\t\t\tthis.listeners.set(type, set)\n\t\t}\n\t\tset.add(listener as AnyListener)\n\n\t\treturn () => {\n\t\t\tthis.off(type, listener)\n\t\t}\n\t}\n\n\toff<T extends KoraEventType>(type: T, listener: KoraEventListener<T>): void {\n\t\tconst set = this.listeners.get(type)\n\t\tif (set) {\n\t\t\tset.delete(listener as AnyListener)\n\t\t\tif (set.size === 0) {\n\t\t\t\tthis.listeners.delete(type)\n\t\t\t}\n\t\t}\n\t}\n\n\temit<T extends KoraEventType>(event: KoraEventByType<T>): void {\n\t\tconst set = this.listeners.get(event.type)\n\t\tif (!set) return\n\t\tfor (const listener of set) {\n\t\t\t;(listener as (event: KoraEventByType<T>) => void)(event)\n\t\t}\n\t}\n\n\t/** Remove all listeners for all event types. */\n\tclear(): void {\n\t\tthis.listeners.clear()\n\t}\n\n\t/** Get the number of listeners for a specific event type. */\n\tlistenerCount(type: KoraEventType): number {\n\t\treturn this.listeners.get(type)?.size ?? 0\n\t}\n}\n","/**\n * Base error class for all Kora errors.\n * Every error includes a machine-readable code and optional context for debugging.\n */\nexport class KoraError extends Error {\n\tconstructor(\n\t\tmessage: string,\n\t\tpublic readonly code: string,\n\t\tpublic readonly context?: Record<string, unknown>,\n\t) {\n\t\tsuper(message)\n\t\tthis.name = 'KoraError'\n\t}\n}\n\n/**\n * Thrown when schema validation fails during defineSchema() or at app initialization.\n */\nexport class SchemaValidationError extends KoraError {\n\tconstructor(message: string, context?: Record<string, unknown>) {\n\t\tsuper(message, 'SCHEMA_VALIDATION', context)\n\t\tthis.name = 'SchemaValidationError'\n\t}\n}\n\n/**\n * Thrown when an operation is invalid or cannot be created.\n */\nexport class OperationError extends KoraError {\n\tconstructor(message: string, context?: Record<string, unknown>) {\n\t\tsuper(message, 'OPERATION_ERROR', context)\n\t\tthis.name = 'OperationError'\n\t}\n}\n\n/**\n * Thrown when a merge conflict cannot be automatically resolved.\n */\nexport class MergeConflictError extends KoraError {\n\tconstructor(\n\t\tpublic readonly operationA: { id: string; collection: string },\n\t\tpublic readonly operationB: { id: string; collection: string },\n\t\tpublic readonly field: string,\n\t) {\n\t\tsuper(\n\t\t\t`Merge conflict on field \"${field}\" in collection \"${operationA.collection}\"`,\n\t\t\t'MERGE_CONFLICT',\n\t\t\t{ operationA: operationA.id, operationB: operationB.id, field },\n\t\t)\n\t\tthis.name = 'MergeConflictError'\n\t}\n}\n\n/**\n * Thrown when a sync error occurs.\n */\nexport class SyncError extends KoraError {\n\tconstructor(message: string, context?: Record<string, unknown>) {\n\t\tsuper(message, 'SYNC_ERROR', context)\n\t\tthis.name = 'SyncError'\n\t}\n}\n\n/**\n * Thrown when a storage operation fails.\n */\nexport class StorageError extends KoraError {\n\tconstructor(message: string, context?: Record<string, unknown>) {\n\t\tsuper(message, 'STORAGE_ERROR', context)\n\t\tthis.name = 'StorageError'\n\t}\n}\n\n/**\n * Thrown when the HLC detects excessive clock drift.\n * Drift > 60s: warning. Drift > 5min: this error is thrown, refusing to generate timestamps.\n */\nexport class ClockDriftError extends KoraError {\n\tconstructor(\n\t\tpublic readonly currentHlcTime: number,\n\t\tpublic readonly physicalTime: number,\n\t) {\n\t\tconst driftSeconds = Math.round((currentHlcTime - physicalTime) / 1000)\n\t\tsuper(\n\t\t\t`Clock drift of ${driftSeconds}s detected. Physical time is behind HLC by more than 5 minutes. This indicates a severe clock issue.`,\n\t\t\t'CLOCK_DRIFT',\n\t\t\t{ currentHlcTime, physicalTime, driftSeconds },\n\t\t)\n\t\tthis.name = 'ClockDriftError'\n\t}\n}\n","import { ClockDriftError } from '../errors/errors'\nimport type { HLCTimestamp, TimeSource } from '../types'\n\n/** Default time source using the system clock */\nconst systemTimeSource: TimeSource = { now: () => Date.now() }\n\n/** Maximum allowed drift before warning (60 seconds) */\nconst DRIFT_WARN_MS = 60_000\n\n/** Maximum allowed drift before refusing to generate timestamps (5 minutes) */\nconst DRIFT_ERROR_MS = 5 * 60_000\n\n/**\n * Hybrid Logical Clock implementation based on Kulkarni et al.\n *\n * Provides a total order that respects causality without requiring synchronized clocks.\n * Each call to now() returns a timestamp strictly greater than the previous one.\n *\n * @example\n * ```typescript\n * const clock = new HybridLogicalClock('node-1')\n * const ts1 = clock.now()\n * const ts2 = clock.now()\n * // HybridLogicalClock.compare(ts1, ts2) < 0 (ts1 is earlier)\n * ```\n */\nexport class HybridLogicalClock {\n\tprivate wallTime = 0\n\tprivate logical = 0\n\n\tconstructor(\n\t\tprivate readonly nodeId: string,\n\t\tprivate readonly timeSource: TimeSource = systemTimeSource,\n\t\tprivate readonly onDriftWarning?: (driftMs: number) => void,\n\t) {}\n\n\t/**\n\t * Generate a new timestamp for a local event.\n\t * Guarantees monotonicity: each call returns a timestamp strictly greater than the previous.\n\t *\n\t * @throws {ClockDriftError} If physical time is more than 5 minutes behind the HLC wallTime\n\t */\n\tnow(): HLCTimestamp {\n\t\tconst physicalTime = this.timeSource.now()\n\t\tthis.checkDrift(physicalTime)\n\n\t\tif (physicalTime > this.wallTime) {\n\t\t\tthis.wallTime = physicalTime\n\t\t\tthis.logical = 0\n\t\t} else {\n\t\t\tthis.logical++\n\t\t}\n\n\t\treturn { wallTime: this.wallTime, logical: this.logical, nodeId: this.nodeId }\n\t}\n\n\t/**\n\t * Update clock on receiving a remote timestamp.\n\t * Merges the remote clock state with the local state to maintain causal ordering.\n\t *\n\t * @throws {ClockDriftError} If physical time is more than 5 minutes behind the resulting wallTime\n\t */\n\treceive(remote: HLCTimestamp): HLCTimestamp {\n\t\tconst physicalTime = this.timeSource.now()\n\n\t\tif (physicalTime > this.wallTime && physicalTime > remote.wallTime) {\n\t\t\tthis.wallTime = physicalTime\n\t\t\tthis.logical = 0\n\t\t} else if (remote.wallTime > this.wallTime) {\n\t\t\tthis.wallTime = remote.wallTime\n\t\t\tthis.logical = remote.logical + 1\n\t\t} else if (this.wallTime === remote.wallTime) {\n\t\t\tthis.logical = Math.max(this.logical, remote.logical) + 1\n\t\t} else {\n\t\t\t// this.wallTime > remote.wallTime && this.wallTime >= physicalTime\n\t\t\tthis.logical++\n\t\t}\n\n\t\tthis.checkDrift(physicalTime)\n\n\t\treturn { wallTime: this.wallTime, logical: this.logical, nodeId: this.nodeId }\n\t}\n\n\t/**\n\t * Compare two timestamps. Returns negative if a < b, positive if a > b, zero if equal.\n\t * Total order: wallTime first, then logical, then nodeId (lexicographic).\n\t */\n\tstatic compare(a: HLCTimestamp, b: HLCTimestamp): number {\n\t\tif (a.wallTime !== b.wallTime) return a.wallTime - b.wallTime\n\t\tif (a.logical !== b.logical) return a.logical - b.logical\n\t\tif (a.nodeId < b.nodeId) return -1\n\t\tif (a.nodeId > b.nodeId) return 1\n\t\treturn 0\n\t}\n\n\t/**\n\t * Serialize an HLC timestamp to a string that sorts lexicographically.\n\t * Format: zero-padded wallTime:logical:nodeId\n\t */\n\tstatic serialize(ts: HLCTimestamp): string {\n\t\tconst wall = ts.wallTime.toString().padStart(15, '0')\n\t\tconst log = ts.logical.toString().padStart(5, '0')\n\t\treturn `${wall}:${log}:${ts.nodeId}`\n\t}\n\n\t/**\n\t * Deserialize an HLC timestamp from its serialized string form.\n\t */\n\tstatic deserialize(s: string): HLCTimestamp {\n\t\tconst parts = s.split(':')\n\t\tif (parts.length < 3) {\n\t\t\tthrow new Error(`Invalid HLC timestamp string: \"${s}\"`)\n\t\t}\n\t\treturn {\n\t\t\twallTime: Number.parseInt(parts[0] ?? '0', 10),\n\t\t\tlogical: Number.parseInt(parts[1] ?? '0', 10),\n\t\t\t// nodeId may contain colons, so rejoin remaining parts\n\t\t\tnodeId: parts.slice(2).join(':'),\n\t\t}\n\t}\n\n\tprivate checkDrift(physicalTime: number): void {\n\t\tconst drift = this.wallTime - physicalTime\n\t\tif (drift > DRIFT_ERROR_MS) {\n\t\t\tthrow new ClockDriftError(this.wallTime, physicalTime)\n\t\t}\n\t\tif (drift > DRIFT_WARN_MS) {\n\t\t\tthis.onDriftWarning?.(drift)\n\t\t}\n\t}\n}\n","import { HybridLogicalClock } from '../clock/hlc'\nimport { OperationError } from '../errors/errors'\nimport type { HLCTimestamp, Operation, OperationInput } from '../types'\nimport { computeOperationId } from './content-hash'\n\n/**\n * Creates an immutable, content-addressed Operation from the given parameters.\n * The operation is deep-frozen after creation — it cannot be modified.\n *\n * @param input - The operation parameters (without id, which is computed)\n * @param clock - The HLC clock to generate the timestamp\n * @returns A frozen Operation with a content-addressed id\n *\n * @example\n * ```typescript\n * const op = await createOperation({\n * nodeId: 'device-1',\n * type: 'insert',\n * collection: 'todos',\n * recordId: 'rec-1',\n * data: { title: 'Ship it' },\n * previousData: null,\n * sequenceNumber: 1,\n * causalDeps: [],\n * schemaVersion: 1,\n * }, clock)\n * ```\n */\nexport async function createOperation(\n\tinput: OperationInput,\n\tclock: HybridLogicalClock,\n): Promise<Operation> {\n\tvalidateOperationParams(input)\n\n\tconst timestamp = clock.now()\n\tconst serializedTs = HybridLogicalClock.serialize(timestamp)\n\tconst id = await computeOperationId(input, serializedTs)\n\n\tconst operation: Operation = {\n\t\tid,\n\t\tnodeId: input.nodeId,\n\t\ttype: input.type,\n\t\tcollection: input.collection,\n\t\trecordId: input.recordId,\n\t\tdata: input.data ? { ...input.data } : null,\n\t\tpreviousData: input.previousData ? { ...input.previousData } : null,\n\t\ttimestamp,\n\t\tsequenceNumber: input.sequenceNumber,\n\t\tcausalDeps: [...input.causalDeps],\n\t\tschemaVersion: input.schemaVersion,\n\t}\n\n\treturn deepFreeze(operation)\n}\n\n/**\n * Validates operation input parameters. Throws OperationError with\n * contextual information on validation failure.\n */\nexport function validateOperationParams(input: OperationInput): void {\n\tif (!input.nodeId || typeof input.nodeId !== 'string') {\n\t\tthrow new OperationError('nodeId is required and must be a non-empty string', {\n\t\t\treceived: input.nodeId,\n\t\t})\n\t}\n\n\tif (!input.type || !['insert', 'update', 'delete'].includes(input.type)) {\n\t\tthrow new OperationError('type must be \"insert\", \"update\", or \"delete\"', {\n\t\t\treceived: input.type,\n\t\t})\n\t}\n\n\tif (!input.collection || typeof input.collection !== 'string') {\n\t\tthrow new OperationError('collection is required and must be a non-empty string', {\n\t\t\treceived: input.collection,\n\t\t})\n\t}\n\n\tif (!input.recordId || typeof input.recordId !== 'string') {\n\t\tthrow new OperationError('recordId is required and must be a non-empty string', {\n\t\t\treceived: input.recordId,\n\t\t})\n\t}\n\n\tif (input.type === 'insert' && input.data === null) {\n\t\tthrow new OperationError('insert operations must include data', {\n\t\t\ttype: input.type,\n\t\t\tcollection: input.collection,\n\t\t})\n\t}\n\n\tif (input.type === 'update' && input.data === null) {\n\t\tthrow new OperationError('update operations must include data with changed fields', {\n\t\t\ttype: input.type,\n\t\t\tcollection: input.collection,\n\t\t})\n\t}\n\n\tif (input.type === 'update' && input.previousData === null) {\n\t\tthrow new OperationError(\n\t\t\t'update operations must include previousData for 3-way merge support',\n\t\t\t{\n\t\t\t\ttype: input.type,\n\t\t\t\tcollection: input.collection,\n\t\t\t},\n\t\t)\n\t}\n\n\tif (input.type === 'delete' && input.data !== null) {\n\t\tthrow new OperationError('delete operations must have null data', {\n\t\t\ttype: input.type,\n\t\t\tcollection: input.collection,\n\t\t})\n\t}\n\n\tif (typeof input.sequenceNumber !== 'number' || input.sequenceNumber < 0) {\n\t\tthrow new OperationError('sequenceNumber must be a non-negative number', {\n\t\t\treceived: input.sequenceNumber,\n\t\t})\n\t}\n\n\tif (!Array.isArray(input.causalDeps)) {\n\t\tthrow new OperationError('causalDeps must be an array of operation IDs', {\n\t\t\treceived: typeof input.causalDeps,\n\t\t})\n\t}\n\n\tif (typeof input.schemaVersion !== 'number' || input.schemaVersion < 1) {\n\t\tthrow new OperationError('schemaVersion must be a positive number', {\n\t\t\treceived: input.schemaVersion,\n\t\t})\n\t}\n}\n\n/**\n * Verify the integrity of an operation by recomputing its content hash.\n * Returns true if the id matches the recomputed hash.\n */\nexport async function verifyOperationIntegrity(op: Operation): Promise<boolean> {\n\tconst input: OperationInput = {\n\t\tnodeId: op.nodeId,\n\t\ttype: op.type,\n\t\tcollection: op.collection,\n\t\trecordId: op.recordId,\n\t\tdata: op.data,\n\t\tpreviousData: op.previousData,\n\t\tsequenceNumber: op.sequenceNumber,\n\t\tcausalDeps: op.causalDeps,\n\t\tschemaVersion: op.schemaVersion,\n\t}\n\tconst serializedTs = HybridLogicalClock.serialize(op.timestamp)\n\tconst expectedId = await computeOperationId(input, serializedTs)\n\treturn op.id === expectedId\n}\n\n/**\n * Type guard for Operation interface.\n */\nexport function isValidOperation(value: unknown): value is Operation {\n\tif (typeof value !== 'object' || value === null) return false\n\tconst op = value as Record<string, unknown>\n\treturn (\n\t\ttypeof op.id === 'string' &&\n\t\ttypeof op.nodeId === 'string' &&\n\t\t(op.type === 'insert' || op.type === 'update' || op.type === 'delete') &&\n\t\ttypeof op.collection === 'string' &&\n\t\ttypeof op.recordId === 'string' &&\n\t\ttypeof op.sequenceNumber === 'number' &&\n\t\tArray.isArray(op.causalDeps) &&\n\t\ttypeof op.schemaVersion === 'number' &&\n\t\ttypeof op.timestamp === 'object' &&\n\t\top.timestamp !== null\n\t)\n}\n\nfunction deepFreeze<T>(obj: T): T {\n\tif (typeof obj !== 'object' || obj === null) return obj\n\tObject.freeze(obj)\n\tfor (const value of Object.values(obj)) {\n\t\tif (typeof value === 'object' && value !== null && !Object.isFrozen(value)) {\n\t\t\tdeepFreeze(value)\n\t\t}\n\t}\n\treturn obj\n}\n","import { HybridLogicalClock } from '../clock/hlc'\nimport { OperationError } from '../errors/errors'\nimport type { Operation } from '../types'\n\n/**\n * Topological sort of operations based on their causal dependency DAG.\n * Uses Kahn's algorithm with deterministic tie-breaking via HLC timestamp.\n * Time complexity: O(V + E) where V = operations, E = causal dependency edges.\n *\n * @param operations - The operations to sort\n * @returns Operations in causal order (dependencies before dependents)\n * @throws {OperationError} If a cycle is detected in the dependency graph\n */\nexport function topologicalSort(operations: Operation[]): Operation[] {\n\tif (operations.length <= 1) return [...operations]\n\n\t// Build adjacency list and in-degree map\n\tconst opMap = new Map<string, Operation>()\n\tfor (const op of operations) {\n\t\topMap.set(op.id, op)\n\t}\n\n\t// Only count edges where both ends are in the operation set\n\tconst inDegree = new Map<string, number>()\n\tconst dependents = new Map<string, string[]>()\n\n\tfor (const op of operations) {\n\t\tif (!inDegree.has(op.id)) {\n\t\t\tinDegree.set(op.id, 0)\n\t\t}\n\t\tif (!dependents.has(op.id)) {\n\t\t\tdependents.set(op.id, [])\n\t\t}\n\n\t\tfor (const depId of op.causalDeps) {\n\t\t\tif (opMap.has(depId)) {\n\t\t\t\t// depId -> op.id edge (depId must come before op.id)\n\t\t\t\tinDegree.set(op.id, (inDegree.get(op.id) ?? 0) + 1)\n\t\t\t\tconst deps = dependents.get(depId)\n\t\t\t\tif (deps) {\n\t\t\t\t\tdeps.push(op.id)\n\t\t\t\t} else {\n\t\t\t\t\tdependents.set(depId, [op.id])\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\t// Initialize queue with nodes that have no in-set dependencies\n\t// Use a sorted array for deterministic ordering (by HLC timestamp)\n\tconst queue: Operation[] = []\n\tfor (const op of operations) {\n\t\tif ((inDegree.get(op.id) ?? 0) === 0) {\n\t\t\tqueue.push(op)\n\t\t}\n\t}\n\tqueue.sort(compareByTimestamp)\n\n\tconst result: Operation[] = []\n\n\twhile (queue.length > 0) {\n\t\t// Take the earliest operation (deterministic tie-breaking by HLC)\n\t\tconst current = queue.shift()\n\t\tif (!current) break\n\t\tresult.push(current)\n\n\t\tconst deps = dependents.get(current.id) ?? []\n\t\tconst newlyReady: Operation[] = []\n\n\t\tfor (const depId of deps) {\n\t\t\tconst deg = (inDegree.get(depId) ?? 0) - 1\n\t\t\tinDegree.set(depId, deg)\n\t\t\tif (deg === 0) {\n\t\t\t\tconst op = opMap.get(depId)\n\t\t\t\tif (op) newlyReady.push(op)\n\t\t\t}\n\t\t}\n\n\t\t// Sort newly ready operations and merge into queue maintaining sort order\n\t\tif (newlyReady.length > 0) {\n\t\t\tnewlyReady.sort(compareByTimestamp)\n\t\t\tmergeIntoSorted(queue, newlyReady)\n\t\t}\n\t}\n\n\tif (result.length !== operations.length) {\n\t\tthrow new OperationError(\n\t\t\t`Cycle detected in operation dependency graph. Sorted ${result.length} of ${operations.length} operations.`,\n\t\t\t{\n\t\t\t\tsortedCount: result.length,\n\t\t\t\ttotalCount: operations.length,\n\t\t\t},\n\t\t)\n\t}\n\n\treturn result\n}\n\nfunction compareByTimestamp(a: Operation, b: Operation): number {\n\treturn HybridLogicalClock.compare(a.timestamp, b.timestamp)\n}\n\n/** Merge sorted `items` into an already-sorted `target` array, maintaining sort order. */\nfunction mergeIntoSorted(target: Operation[], items: Operation[]): void {\n\tlet insertIndex = 0\n\tfor (const item of items) {\n\t\twhile (insertIndex < target.length) {\n\t\t\tconst existing = target[insertIndex]\n\t\t\tif (existing && compareByTimestamp(item, existing) <= 0) break\n\t\t\tinsertIndex++\n\t\t}\n\t\ttarget.splice(insertIndex, 0, item)\n\t\tinsertIndex++\n\t}\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACUA,eAAsB,mBACrB,OACA,WACkB;AAClB,QAAM,YAAY,aAAa;AAAA,IAC9B,MAAM,MAAM;AAAA,IACZ,YAAY,MAAM;AAAA,IAClB,UAAU,MAAM;AAAA,IAChB,MAAM,MAAM;AAAA,IACZ;AAAA,IACA,QAAQ,MAAM;AAAA,EACf,CAAC;AACD,QAAM,UAAU,IAAI,YAAY,EAAE,OAAO,SAAS;AAClD,QAAM,aAAa,MAAM,WAAW,OAAO,OAAO,OAAO,WAAW,OAAO;AAC3E,SAAO,YAAY,UAAU;AAC9B;AASO,SAAS,aAAa,KAAsB;AAClD,MAAI,QAAQ,QAAQ,QAAQ,QAAW;AACtC,WAAO,KAAK,UAAU,GAAG;AAAA,EAC1B;AAEA,MAAI,OAAO,QAAQ,UAAU;AAC5B,WAAO,KAAK,UAAU,GAAG;AAAA,EAC1B;AAEA,MAAI,MAAM,QAAQ,GAAG,GAAG;AACvB,UAAM,QAAQ,IAAI,IAAI,CAAC,SAAS,aAAa,IAAI,CAAC;AAClD,WAAO,IAAI,MAAM,KAAK,GAAG,CAAC;AAAA,EAC3B;AAEA,QAAM,OAAO,OAAO,KAAK,GAA8B,EAAE,KAAK;AAC9D,QAAM,QAAQ,KAAK,IAAI,CAAC,QAAQ;AAC/B,UAAM,QAAS,IAAgC,GAAG;AAClD,WAAO,GAAG,KAAK,UAAU,GAAG,CAAC,IAAI,aAAa,KAAK,CAAC;AAAA,EACrD,CAAC;AACD,SAAO,IAAI,MAAM,KAAK,GAAG,CAAC;AAC3B;AAEA,SAAS,YAAY,QAA6B;AACjD,QAAM,QAAQ,IAAI,WAAW,MAAM;AACnC,SAAO,MAAM,KAAK,OAAO,CAAC,MAAM,EAAE,SAAS,EAAE,EAAE,SAAS,GAAG,GAAG,CAAC,EAAE,KAAK,EAAE;AACzE;;;ACzCO,IAAM,qBAAN,MAAqD;AAAA,EACnD,YAAY,oBAAI,IAA8B;AAAA,EAEtD,GAA4B,MAAS,UAA4C;AAChF,QAAI,MAAM,KAAK,UAAU,IAAI,IAAI;AACjC,QAAI,CAAC,KAAK;AACT,YAAM,oBAAI,IAAI;AACd,WAAK,UAAU,IAAI,MAAM,GAAG;AAAA,IAC7B;AACA,QAAI,IAAI,QAAuB;AAE/B,WAAO,MAAM;AACZ,WAAK,IAAI,MAAM,QAAQ;AAAA,IACxB;AAAA,EACD;AAAA,EAEA,IAA6B,MAAS,UAAsC;AAC3E,UAAM,MAAM,KAAK,UAAU,IAAI,IAAI;AACnC,QAAI,KAAK;AACR,UAAI,OAAO,QAAuB;AAClC,UAAI,IAAI,SAAS,GAAG;AACnB,aAAK,UAAU,OAAO,IAAI;AAAA,MAC3B;AAAA,IACD;AAAA,EACD;AAAA,EAEA,KAA8B,OAAiC;AAC9D,UAAM,MAAM,KAAK,UAAU,IAAI,MAAM,IAAI;AACzC,QAAI,CAAC,IAAK;AACV,eAAW,YAAY,KAAK;AAC3B;AAAC,MAAC,SAAiD,KAAK;AAAA,IACzD;AAAA,EACD;AAAA;AAAA,EAGA,QAAc;AACb,SAAK,UAAU,MAAM;AAAA,EACtB;AAAA;AAAA,EAGA,cAAc,MAA6B;AAC1C,WAAO,KAAK,UAAU,IAAI,IAAI,GAAG,QAAQ;AAAA,EAC1C;AACD;;;ACzDO,IAAM,YAAN,cAAwB,MAAM;AAAA,EACpC,YACC,SACgB,MACA,SACf;AACD,UAAM,OAAO;AAHG;AACA;AAGhB,SAAK,OAAO;AAAA,EACb;AAAA,EALiB;AAAA,EACA;AAKlB;AAeO,IAAM,iBAAN,cAA6B,UAAU;AAAA,EAC7C,YAAY,SAAiB,SAAmC;AAC/D,UAAM,SAAS,mBAAmB,OAAO;AACzC,SAAK,OAAO;AAAA,EACb;AACD;AA4CO,IAAM,kBAAN,cAA8B,UAAU;AAAA,EAC9C,YACiB,gBACA,cACf;AACD,UAAM,eAAe,KAAK,OAAO,iBAAiB,gBAAgB,GAAI;AACtE;AAAA,MACC,kBAAkB,YAAY;AAAA,MAC9B;AAAA,MACA,EAAE,gBAAgB,cAAc,aAAa;AAAA,IAC9C;AARgB;AACA;AAQhB,SAAK,OAAO;AAAA,EACb;AAAA,EAViB;AAAA,EACA;AAUlB;;;ACtFA,IAAM,mBAA+B,EAAE,KAAK,MAAM,KAAK,IAAI,EAAE;AAG7D,IAAM,gBAAgB;AAGtB,IAAM,iBAAiB,IAAI;AAgBpB,IAAM,qBAAN,MAAyB;AAAA,EAI/B,YACkB,QACA,aAAyB,kBACzB,gBAChB;AAHgB;AACA;AACA;AAAA,EACf;AAAA,EAHe;AAAA,EACA;AAAA,EACA;AAAA,EANV,WAAW;AAAA,EACX,UAAU;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAclB,MAAoB;AACnB,UAAM,eAAe,KAAK,WAAW,IAAI;AACzC,SAAK,WAAW,YAAY;AAE5B,QAAI,eAAe,KAAK,UAAU;AACjC,WAAK,WAAW;AAChB,WAAK,UAAU;AAAA,IAChB,OAAO;AACN,WAAK;AAAA,IACN;AAEA,WAAO,EAAE,UAAU,KAAK,UAAU,SAAS,KAAK,SAAS,QAAQ,KAAK,OAAO;AAAA,EAC9E;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,QAAQ,QAAoC;AAC3C,UAAM,eAAe,KAAK,WAAW,IAAI;AAEzC,QAAI,eAAe,KAAK,YAAY,eAAe,OAAO,UAAU;AACnE,WAAK,WAAW;AAChB,WAAK,UAAU;AAAA,IAChB,WAAW,OAAO,WAAW,KAAK,UAAU;AAC3C,WAAK,WAAW,OAAO;AACvB,WAAK,UAAU,OAAO,UAAU;AAAA,IACjC,WAAW,KAAK,aAAa,OAAO,UAAU;AAC7C,WAAK,UAAU,KAAK,IAAI,KAAK,SAAS,OAAO,OAAO,IAAI;AAAA,IACzD,OAAO;AAEN,WAAK;AAAA,IACN;AAEA,SAAK,WAAW,YAAY;AAE5B,WAAO,EAAE,UAAU,KAAK,UAAU,SAAS,KAAK,SAAS,QAAQ,KAAK,OAAO;AAAA,EAC9E;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,OAAO,QAAQ,GAAiB,GAAyB;AACxD,QAAI,EAAE,aAAa,EAAE,SAAU,QAAO,EAAE,WAAW,EAAE;AACrD,QAAI,EAAE,YAAY,EAAE,QAAS,QAAO,EAAE,UAAU,EAAE;AAClD,QAAI,EAAE,SAAS,EAAE,OAAQ,QAAO;AAChC,QAAI,EAAE,SAAS,EAAE,OAAQ,QAAO;AAChC,WAAO;AAAA,EACR;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,OAAO,UAAU,IAA0B;AAC1C,UAAM,OAAO,GAAG,SAAS,SAAS,EAAE,SAAS,IAAI,GAAG;AACpD,UAAM,MAAM,GAAG,QAAQ,SAAS,EAAE,SAAS,GAAG,GAAG;AACjD,WAAO,GAAG,IAAI,IAAI,GAAG,IAAI,GAAG,MAAM;AAAA,EACnC;AAAA;AAAA;AAAA;AAAA,EAKA,OAAO,YAAY,GAAyB;AAC3C,UAAM,QAAQ,EAAE,MAAM,GAAG;AACzB,QAAI,MAAM,SAAS,GAAG;AACrB,YAAM,IAAI,MAAM,kCAAkC,CAAC,GAAG;AAAA,IACvD;AACA,WAAO;AAAA,MACN,UAAU,OAAO,SAAS,MAAM,CAAC,KAAK,KAAK,EAAE;AAAA,MAC7C,SAAS,OAAO,SAAS,MAAM,CAAC,KAAK,KAAK,EAAE;AAAA;AAAA,MAE5C,QAAQ,MAAM,MAAM,CAAC,EAAE,KAAK,GAAG;AAAA,IAChC;AAAA,EACD;AAAA,EAEQ,WAAW,cAA4B;AAC9C,UAAM,QAAQ,KAAK,WAAW;AAC9B,QAAI,QAAQ,gBAAgB;AAC3B,YAAM,IAAI,gBAAgB,KAAK,UAAU,YAAY;AAAA,IACtD;AACA,QAAI,QAAQ,eAAe;AAC1B,WAAK,iBAAiB,KAAK;AAAA,IAC5B;AAAA,EACD;AACD;;;ACvEO,SAAS,wBAAwB,OAA6B;AACpE,MAAI,CAAC,MAAM,UAAU,OAAO,MAAM,WAAW,UAAU;AACtD,UAAM,IAAI,eAAe,qDAAqD;AAAA,MAC7E,UAAU,MAAM;AAAA,IACjB,CAAC;AAAA,EACF;AAEA,MAAI,CAAC,MAAM,QAAQ,CAAC,CAAC,UAAU,UAAU,QAAQ,EAAE,SAAS,MAAM,IAAI,GAAG;AACxE,UAAM,IAAI,eAAe,gDAAgD;AAAA,MACxE,UAAU,MAAM;AAAA,IACjB,CAAC;AAAA,EACF;AAEA,MAAI,CAAC,MAAM,cAAc,OAAO,MAAM,eAAe,UAAU;AAC9D,UAAM,IAAI,eAAe,yDAAyD;AAAA,MACjF,UAAU,MAAM;AAAA,IACjB,CAAC;AAAA,EACF;AAEA,MAAI,CAAC,MAAM,YAAY,OAAO,MAAM,aAAa,UAAU;AAC1D,UAAM,IAAI,eAAe,uDAAuD;AAAA,MAC/E,UAAU,MAAM;AAAA,IACjB,CAAC;AAAA,EACF;AAEA,MAAI,MAAM,SAAS,YAAY,MAAM,SAAS,MAAM;AACnD,UAAM,IAAI,eAAe,uCAAuC;AAAA,MAC/D,MAAM,MAAM;AAAA,MACZ,YAAY,MAAM;AAAA,IACnB,CAAC;AAAA,EACF;AAEA,MAAI,MAAM,SAAS,YAAY,MAAM,SAAS,MAAM;AACnD,UAAM,IAAI,eAAe,2DAA2D;AAAA,MACnF,MAAM,MAAM;AAAA,MACZ,YAAY,MAAM;AAAA,IACnB,CAAC;AAAA,EACF;AAEA,MAAI,MAAM,SAAS,YAAY,MAAM,iBAAiB,MAAM;AAC3D,UAAM,IAAI;AAAA,MACT;AAAA,MACA;AAAA,QACC,MAAM,MAAM;AAAA,QACZ,YAAY,MAAM;AAAA,MACnB;AAAA,IACD;AAAA,EACD;AAEA,MAAI,MAAM,SAAS,YAAY,MAAM,SAAS,MAAM;AACnD,UAAM,IAAI,eAAe,yCAAyC;AAAA,MACjE,MAAM,MAAM;AAAA,MACZ,YAAY,MAAM;AAAA,IACnB,CAAC;AAAA,EACF;AAEA,MAAI,OAAO,MAAM,mBAAmB,YAAY,MAAM,iBAAiB,GAAG;AACzE,UAAM,IAAI,eAAe,gDAAgD;AAAA,MACxE,UAAU,MAAM;AAAA,IACjB,CAAC;AAAA,EACF;AAEA,MAAI,CAAC,MAAM,QAAQ,MAAM,UAAU,GAAG;AACrC,UAAM,IAAI,eAAe,gDAAgD;AAAA,MACxE,UAAU,OAAO,MAAM;AAAA,IACxB,CAAC;AAAA,EACF;AAEA,MAAI,OAAO,MAAM,kBAAkB,YAAY,MAAM,gBAAgB,GAAG;AACvE,UAAM,IAAI,eAAe,2CAA2C;AAAA,MACnE,UAAU,MAAM;AAAA,IACjB,CAAC;AAAA,EACF;AACD;;;ACvHO,SAAS,gBAAgB,YAAsC;AACrE,MAAI,WAAW,UAAU,EAAG,QAAO,CAAC,GAAG,UAAU;AAGjD,QAAM,QAAQ,oBAAI,IAAuB;AACzC,aAAW,MAAM,YAAY;AAC5B,UAAM,IAAI,GAAG,IAAI,EAAE;AAAA,EACpB;AAGA,QAAM,WAAW,oBAAI,IAAoB;AACzC,QAAM,aAAa,oBAAI,IAAsB;AAE7C,aAAW,MAAM,YAAY;AAC5B,QAAI,CAAC,SAAS,IAAI,GAAG,EAAE,GAAG;AACzB,eAAS,IAAI,GAAG,IAAI,CAAC;AAAA,IACtB;AACA,QAAI,CAAC,WAAW,IAAI,GAAG,EAAE,GAAG;AAC3B,iBAAW,IAAI,GAAG,IAAI,CAAC,CAAC;AAAA,IACzB;AAEA,eAAW,SAAS,GAAG,YAAY;AAClC,UAAI,MAAM,IAAI,KAAK,GAAG;AAErB,iBAAS,IAAI,GAAG,KAAK,SAAS,IAAI,GAAG,EAAE,KAAK,KAAK,CAAC;AAClD,cAAM,OAAO,WAAW,IAAI,KAAK;AACjC,YAAI,MAAM;AACT,eAAK,KAAK,GAAG,EAAE;AAAA,QAChB,OAAO;AACN,qBAAW,IAAI,OAAO,CAAC,GAAG,EAAE,CAAC;AAAA,QAC9B;AAAA,MACD;AAAA,IACD;AAAA,EACD;AAIA,QAAM,QAAqB,CAAC;AAC5B,aAAW,MAAM,YAAY;AAC5B,SAAK,SAAS,IAAI,GAAG,EAAE,KAAK,OAAO,GAAG;AACrC,YAAM,KAAK,EAAE;AAAA,IACd;AAAA,EACD;AACA,QAAM,KAAK,kBAAkB;AAE7B,QAAM,SAAsB,CAAC;AAE7B,SAAO,MAAM,SAAS,GAAG;AAExB,UAAM,UAAU,MAAM,MAAM;AAC5B,QAAI,CAAC,QAAS;AACd,WAAO,KAAK,OAAO;AAEnB,UAAM,OAAO,WAAW,IAAI,QAAQ,EAAE,KAAK,CAAC;AAC5C,UAAM,aAA0B,CAAC;AAEjC,eAAW,SAAS,MAAM;AACzB,YAAM,OAAO,SAAS,IAAI,KAAK,KAAK,KAAK;AACzC,eAAS,IAAI,OAAO,GAAG;AACvB,UAAI,QAAQ,GAAG;AACd,cAAM,KAAK,MAAM,IAAI,KAAK;AAC1B,YAAI,GAAI,YAAW,KAAK,EAAE;AAAA,MAC3B;AAAA,IACD;AAGA,QAAI,WAAW,SAAS,GAAG;AAC1B,iBAAW,KAAK,kBAAkB;AAClC,sBAAgB,OAAO,UAAU;AAAA,IAClC;AAAA,EACD;AAEA,MAAI,OAAO,WAAW,WAAW,QAAQ;AACxC,UAAM,IAAI;AAAA,MACT,wDAAwD,OAAO,MAAM,OAAO,WAAW,MAAM;AAAA,MAC7F;AAAA,QACC,aAAa,OAAO;AAAA,QACpB,YAAY,WAAW;AAAA,MACxB;AAAA,IACD;AAAA,EACD;AAEA,SAAO;AACR;AAEA,SAAS,mBAAmB,GAAc,GAAsB;AAC/D,SAAO,mBAAmB,QAAQ,EAAE,WAAW,EAAE,SAAS;AAC3D;AAGA,SAAS,gBAAgB,QAAqB,OAA0B;AACvE,MAAI,cAAc;AAClB,aAAW,QAAQ,OAAO;AACzB,WAAO,cAAc,OAAO,QAAQ;AACnC,YAAM,WAAW,OAAO,WAAW;AACnC,UAAI,YAAY,mBAAmB,MAAM,QAAQ,KAAK,EAAG;AACzD;AAAA,IACD;AACA,WAAO,OAAO,aAAa,GAAG,IAAI;AAClC;AAAA,EACD;AACD;","names":[]}
|
|
@@ -0,0 +1,58 @@
|
|
|
1
|
+
import { p as OperationInput, j as KoraEventEmitter, l as KoraEventType, k as KoraEventListener, i as KoraEventByType, d as Operation } from './events-D_kDPDC9.cjs';
|
|
2
|
+
export { t as validateOperationParams } from './events-D_kDPDC9.cjs';
|
|
3
|
+
|
|
4
|
+
/**
|
|
5
|
+
* Compute the content-addressed ID for an operation using SHA-256.
|
|
6
|
+
* The same operation content always produces the same hash, ensuring deduplication.
|
|
7
|
+
*
|
|
8
|
+
* @param input - The operation input (without id/timestamp, which are assigned separately)
|
|
9
|
+
* @param timestamp - The HLC timestamp serialized as a string
|
|
10
|
+
* @returns A hex-encoded SHA-256 hash
|
|
11
|
+
*/
|
|
12
|
+
declare function computeOperationId(input: OperationInput, timestamp: string): Promise<string>;
|
|
13
|
+
/**
|
|
14
|
+
* Deterministic JSON serialization with sorted keys.
|
|
15
|
+
* Ensures identical objects always produce identical strings regardless of property insertion order.
|
|
16
|
+
*
|
|
17
|
+
* @param obj - The value to serialize
|
|
18
|
+
* @returns A deterministic JSON string
|
|
19
|
+
*/
|
|
20
|
+
declare function canonicalize(obj: unknown): string;
|
|
21
|
+
|
|
22
|
+
/**
|
|
23
|
+
* Concrete implementation of KoraEventEmitter.
|
|
24
|
+
* Simple, synchronous event emitter for internal use across @kora packages.
|
|
25
|
+
*
|
|
26
|
+
* @example
|
|
27
|
+
* ```typescript
|
|
28
|
+
* const emitter = new SimpleEventEmitter()
|
|
29
|
+
* const unsub = emitter.on('operation:created', (event) => {
|
|
30
|
+
* console.log(event.operation.id)
|
|
31
|
+
* })
|
|
32
|
+
* emitter.emit({ type: 'operation:created', operation: someOp })
|
|
33
|
+
* unsub() // unsubscribe
|
|
34
|
+
* ```
|
|
35
|
+
*/
|
|
36
|
+
declare class SimpleEventEmitter implements KoraEventEmitter {
|
|
37
|
+
private listeners;
|
|
38
|
+
on<T extends KoraEventType>(type: T, listener: KoraEventListener<T>): () => void;
|
|
39
|
+
off<T extends KoraEventType>(type: T, listener: KoraEventListener<T>): void;
|
|
40
|
+
emit<T extends KoraEventType>(event: KoraEventByType<T>): void;
|
|
41
|
+
/** Remove all listeners for all event types. */
|
|
42
|
+
clear(): void;
|
|
43
|
+
/** Get the number of listeners for a specific event type. */
|
|
44
|
+
listenerCount(type: KoraEventType): number;
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
/**
|
|
48
|
+
* Topological sort of operations based on their causal dependency DAG.
|
|
49
|
+
* Uses Kahn's algorithm with deterministic tie-breaking via HLC timestamp.
|
|
50
|
+
* Time complexity: O(V + E) where V = operations, E = causal dependency edges.
|
|
51
|
+
*
|
|
52
|
+
* @param operations - The operations to sort
|
|
53
|
+
* @returns Operations in causal order (dependencies before dependents)
|
|
54
|
+
* @throws {OperationError} If a cycle is detected in the dependency graph
|
|
55
|
+
*/
|
|
56
|
+
declare function topologicalSort(operations: Operation[]): Operation[];
|
|
57
|
+
|
|
58
|
+
export { SimpleEventEmitter, canonicalize, computeOperationId, topologicalSort };
|