@byearlybird/starling 0.12.0 → 0.13.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +274 -0
- package/dist/index.d.ts +73 -3
- package/dist/index.js +291 -410
- package/dist/index.js.map +1 -0
- package/package.json +36 -44
- package/dist/core-UUzgRHaU.js +0 -420
- package/dist/core.d.ts +0 -2
- package/dist/core.js +0 -3
- package/dist/db-DY3UcmfV.d.ts +0 -199
- package/dist/index-BIpu-1zO.d.ts +0 -265
- package/dist/plugin-http.d.ts +0 -139
- package/dist/plugin-http.js +0 -191
- package/dist/plugin-idb.d.ts +0 -59
- package/dist/plugin-idb.js +0 -169
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.js","names":["result: Record<string, R>","result: Record<string, unknown>","current: any","result: Document","result: Tombstones","mergeCollections","mergedDocuments: Record<DocumentId, Document>","mergeCollections","$state: ClockAtom","collectionSnapshotAtoms: ReadableAtom<Collection>[]","collectionsSnapshot: Record<string, Collection>"],"sources":["../lib/store/schema.ts","../lib/core/hex.ts","../lib/core/clock.ts","../lib/core/flatten.ts","../lib/core/document.ts","../lib/core/tombstone.ts","../lib/core/collection.ts","../lib/store/collection.ts","../lib/store/clock.ts","../lib/store/store.ts"],"sourcesContent":["import type { StandardSchemaV1 } from \"@standard-schema/spec\";\n\nexport function validate<T extends StandardSchemaV1>(\n schema: T,\n input: StandardSchemaV1.InferInput<T>,\n): StandardSchemaV1.InferOutput<T> {\n const result = schema[\"~standard\"].validate(input);\n if (result instanceof Promise) {\n throw new TypeError(\"Schema validation must be synchronous\");\n }\n\n if (result.issues) {\n throw new Error(JSON.stringify(result.issues, null, 2));\n }\n\n return result.value;\n}\n\n/**\n * Base type constraint for any standard schema object\n */\nexport type AnyObject = StandardSchemaV1<Record<string, any>>;\n\nexport type SchemaWithId<T extends AnyObject> =\n StandardSchemaV1.InferOutput<T> extends {\n id: any;\n }\n ? T\n : never;\n\nexport type Output<T extends AnyObject> = StandardSchemaV1.InferOutput<T>;\n\nexport type Input<T extends AnyObject> = StandardSchemaV1.InferInput<T>;\n","export function toHex(value: number, padLength: number): string {\n return value.toString(16).padStart(padLength, \"0\");\n}\n\nexport function nonce(length: number): string {\n const bytes = new Uint8Array(length / 2);\n crypto.getRandomValues(bytes);\n return Array.from(bytes)\n .map((b) => toHex(b, 2))\n .join(\"\");\n}\n","import { nonce, toHex } from \"./hex\";\n\nconst MS_LENGTH = 12;\nconst SEQ_LENGTH = 6;\nconst NONCE_LENGTH = 6;\n\nexport type Clock = {\n ms: number;\n seq: number;\n};\n\nexport function advanceClock(current: Clock, next: Clock): Clock {\n if (next.ms > current.ms) {\n return { ms: next.ms, seq: next.seq };\n } else if (next.ms === current.ms) {\n return { ms: current.ms, seq: Math.max(current.seq, next.seq) + 1 };\n } else {\n return { ms: current.ms, seq: current.seq + 1 };\n }\n}\n\nexport function makeStamp(ms: number, seq: number): string {\n return `${toHex(ms, MS_LENGTH)}${toHex(seq, SEQ_LENGTH)}${nonce(NONCE_LENGTH)}`;\n}\n\nexport function parseStamp(stamp: string): { ms: number; seq: number } {\n return {\n ms: parseInt(stamp.slice(0, MS_LENGTH), 16),\n seq: parseInt(stamp.slice(MS_LENGTH, MS_LENGTH + SEQ_LENGTH), 16),\n };\n}\n","/**\n * Flattens a nested object into a flat object with dot-notation keys\n * @param obj - The object to flatten\n * @param mapper - Optional callback to transform leaf values\n * @returns A flattened object with dot-notation keys\n */\nexport function flatten<T, R = unknown>(\n obj: T,\n mapper?: (value: unknown, path: string) => R,\n): Record<string, R> {\n const result: Record<string, R> = {};\n\n const addLeaf = (value: unknown, path: string) => {\n if (path) {\n result[path] = mapper ? mapper(value, path) : (value as R);\n }\n };\n\n function traverse(current: unknown, prefix: string = \"\"): void {\n if (!shouldTraverse(current)) {\n addLeaf(current, prefix);\n return;\n }\n\n for (const [key, value] of Object.entries(current)) {\n const newPath = prefix ? `${prefix}.${key}` : key;\n traverse(value, newPath);\n }\n }\n\n traverse(obj);\n return result;\n}\n\n/**\n * Unflattens a flat object with dot-notation keys into a nested object\n * @param obj - The flattened object to unflatten\n * @param mapper - Optional callback to transform leaf values before placing them\n * @returns A nested object\n */\nexport function unflatten<T = unknown, R = unknown>(\n obj: Record<string, T>,\n mapper?: (value: T, path: string) => R,\n): Record<string, unknown> {\n const result: Record<string, unknown> = {};\n\n for (const [path, value] of Object.entries(obj)) {\n const keys = path.split(\".\");\n const mappedValue = mapper ? mapper(value, path) : value;\n\n let current: any = result;\n for (let i = 0; i < keys.length - 1; i++) {\n const key = keys[i]!;\n if (!(key in current)) {\n current[key] = {};\n }\n current = current[key];\n }\n\n const finalKey = keys[keys.length - 1]!;\n current[finalKey] = mappedValue;\n }\n\n return result;\n}\n\nfunction isPlainObject(value: unknown): value is Record<string, unknown> {\n return (\n typeof value === \"object\" &&\n value !== null &&\n !Array.isArray(value) &&\n (value.constructor === Object || Object.getPrototypeOf(value) === null)\n );\n}\n\nfunction shouldTraverse(value: unknown): value is Record<string, unknown> {\n return isPlainObject(value) && Object.keys(value).length > 0;\n}\n","import { flatten, unflatten } from \"./flatten\";\n\ntype Field<T = unknown> = {\n \"~value\": T;\n \"~stamp\": string;\n};\n\nexport type Document = Record<string, Field>;\n\nexport function makeDocument(\n fields: Record<string, any>,\n stamp: string,\n): Document {\n return flatten(fields, (value) => ({ \"~value\": value, \"~stamp\": stamp }));\n}\n\nexport function parseDocument(document: Document): Record<string, any> {\n return unflatten(document, (field) => field[\"~value\"]);\n}\n\nexport function mergeDocuments(target: Document, source: Document): Document {\n const result: Document = {};\n const keys = new Set([...Object.keys(target), ...Object.keys(source)]);\n\n for (const key of keys) {\n const targetValue = target[key];\n const sourceValue = source[key];\n\n if (targetValue && sourceValue) {\n result[key] =\n targetValue[\"~stamp\"] > sourceValue[\"~stamp\"]\n ? targetValue\n : sourceValue;\n } else if (targetValue) {\n result[key] = targetValue;\n } else if (sourceValue) {\n result[key] = sourceValue;\n } else {\n throw new Error(`Key ${key} not found in either document`);\n }\n }\n\n return result;\n}\n","export type Tombstones = Record<string, string>;\n\nexport function mergeTombstones(\n target: Tombstones,\n source: Tombstones,\n): Tombstones {\n const result: Tombstones = {};\n const keys = new Set([...Object.keys(target), ...Object.keys(source)]);\n\n for (const key of keys) {\n const targetStamp = target[key];\n const sourceStamp = source[key];\n\n if (targetStamp && sourceStamp) {\n result[key] = targetStamp > sourceStamp ? targetStamp : sourceStamp;\n } else if (targetStamp) {\n result[key] = targetStamp;\n } else if (sourceStamp) {\n result[key] = sourceStamp;\n }\n }\n\n return result;\n}\n","import type { Document } from \"./document\";\nimport type { Tombstones } from \"./tombstone\";\nimport { mergeDocuments } from \"./document\";\nimport { mergeTombstones } from \"./tombstone\";\n\nexport type DocumentId = string;\n\nexport type Collection = {\n documents: Record<DocumentId, Document>;\n tombstones: Tombstones;\n};\n\nexport function mergeCollections(\n target: Collection,\n source: Collection,\n): Collection {\n const mergedTombstones = mergeTombstones(\n target.tombstones,\n source.tombstones,\n );\n\n const mergedDocuments: Record<DocumentId, Document> = {};\n const allDocumentIds = new Set([\n ...Object.keys(target.documents),\n ...Object.keys(source.documents),\n ]);\n\n for (const id of allDocumentIds) {\n const targetDoc = target.documents[id];\n const sourceDoc = source.documents[id];\n\n if (mergedTombstones[id]) {\n continue;\n }\n\n if (targetDoc && sourceDoc) {\n mergedDocuments[id] = mergeDocuments(targetDoc, sourceDoc);\n } else if (targetDoc) {\n mergedDocuments[id] = targetDoc;\n } else if (sourceDoc) {\n mergedDocuments[id] = sourceDoc;\n }\n }\n\n return {\n documents: mergedDocuments,\n tombstones: mergedTombstones,\n };\n}\nexport function mergeCollectionRecords(\n target: Record<string, Collection>,\n source: Record<string, Collection>,\n): Record<string, Collection> {\n const result: Record<string, Collection> = { ...target };\n\n for (const [collectionName, sourceCollection] of Object.entries(source)) {\n const targetCollection = result[collectionName];\n if (targetCollection) {\n result[collectionName] = mergeCollections(\n targetCollection,\n sourceCollection,\n );\n } else {\n result[collectionName] = sourceCollection;\n }\n }\n\n return result;\n}\n","import { batched, map, type ReadableAtom } from \"nanostores\";\nimport { validate } from \"./schema\";\nimport {\n makeDocument,\n parseDocument,\n mergeDocuments,\n mergeCollections,\n type Collection,\n type DocumentId,\n} from \"../core\";\nimport type { AnyObject, SchemaWithId, Output, Input } from \"./schema\";\nimport type { ClockAPI } from \"./clock\";\n\nexport type CollectionConfig<T extends AnyObject> =\n | {\n schema: T;\n getId: (data: Output<T>) => DocumentId;\n }\n | {\n schema: SchemaWithId<T>;\n };\n\nexport type CollectionApi<T extends AnyObject> = {\n $data: ReadableAtom<ReadonlyMap<DocumentId, Output<T>>>;\n $snapshot: ReadableAtom<Collection>;\n add(data: Input<T>): void;\n remove(id: DocumentId): void;\n update(id: DocumentId, document: Partial<Input<T>>): void;\n merge(snapshot: Collection): void;\n} & Pick<\n ReadonlyMap<DocumentId, Output<T>>,\n \"get\" | \"has\" | \"keys\" | \"values\" | \"entries\" | \"forEach\" | \"size\"\n>;\n\ntype TickFunction = () => string;\n\nexport function addDocument<T extends AnyObject>(\n $documents: ReturnType<typeof map<Collection[\"documents\"]>>,\n config: CollectionConfig<T>,\n tick: TickFunction,\n data: Input<T>,\n): void {\n const getId = defineGetId(config);\n const valid = validate(config.schema, data);\n const doc = makeDocument(valid, tick());\n const id = getId(valid);\n $documents.setKey(id, doc);\n}\n\nexport function removeDocument(\n $documents: ReturnType<typeof map<Collection[\"documents\"]>>,\n $tombstones: ReturnType<typeof map<Collection[\"tombstones\"]>>,\n tick: TickFunction,\n id: DocumentId,\n): void {\n $tombstones.setKey(id, tick());\n $documents.setKey(id, undefined);\n}\n\nexport function updateDocument<T extends AnyObject>(\n $documents: ReturnType<typeof map<Collection[\"documents\"]>>,\n config: CollectionConfig<T>,\n tick: TickFunction,\n id: DocumentId,\n document: Partial<Input<T>>,\n): void {\n const current = $documents.get()[id];\n if (!current) return;\n\n const newAttrs = makeDocument(document, tick());\n const doc = mergeDocuments(current, newAttrs);\n\n validate(config.schema, parseDocument(doc));\n\n $documents.setKey(id, doc);\n}\n\nexport function mergeCollectionSnapshot(\n $documents: ReturnType<typeof map<Collection[\"documents\"]>>,\n $tombstones: ReturnType<typeof map<Collection[\"tombstones\"]>>,\n currentSnapshot: Collection,\n incomingSnapshot: Collection,\n): void {\n const merged = mergeCollections(currentSnapshot, incomingSnapshot);\n $documents.set(merged.documents);\n $tombstones.set(merged.tombstones);\n}\n\nexport function createCollection<T extends AnyObject>(\n config: CollectionConfig<T>,\n clock: ClockAPI,\n): CollectionApi<T> {\n const { $data, $snapshot, $documents, $tombstones } =\n createCollectionState<T>();\n\n return {\n $data,\n $snapshot,\n get(key: DocumentId) {\n return $data.get().get(key);\n },\n has(key: DocumentId) {\n return $data.get().has(key);\n },\n keys() {\n return $data.get().keys();\n },\n values() {\n return $data.get().values();\n },\n entries() {\n return $data.get().entries();\n },\n forEach(\n callbackfn: (\n value: Output<T>,\n key: DocumentId,\n map: ReadonlyMap<DocumentId, Output<T>>,\n ) => void,\n thisArg?: any,\n ) {\n return $data.get().forEach(callbackfn, thisArg);\n },\n get size() {\n return $data.get().size;\n },\n add(data: Input<T>) {\n addDocument($documents, config, clock.tick, data);\n },\n remove(id: DocumentId) {\n removeDocument($documents, $tombstones, clock.tick, id);\n },\n update(id: DocumentId, document: Partial<Input<T>>) {\n updateDocument($documents, config, clock.tick, id, document);\n },\n merge(snapshot: Collection) {\n const currentSnapshot = $snapshot.get();\n mergeCollectionSnapshot(\n $documents,\n $tombstones,\n currentSnapshot,\n snapshot,\n );\n },\n };\n}\n\nfunction createCollectionState<T extends AnyObject>(): {\n $data: ReadableAtom<ReadonlyMap<DocumentId, Output<T>>>;\n $snapshot: ReadableAtom<Collection>;\n $documents: ReturnType<typeof map<Collection[\"documents\"]>>;\n $tombstones: ReturnType<typeof map<Collection[\"tombstones\"]>>;\n} {\n const $documents = map<Collection[\"documents\"]>({});\n const $tombstones = map<Collection[\"tombstones\"]>({});\n const $snapshot = batched([$documents, $tombstones], parseSnapshot);\n const $data = batched([$documents, $tombstones], parseCollection<T>);\n\n return {\n $data,\n $snapshot,\n $documents,\n $tombstones,\n };\n}\n\nfunction hasIdProperty<T extends AnyObject>(\n data: Output<T>,\n): data is { id: DocumentId } {\n return (\n typeof data === \"object\" &&\n data !== null &&\n \"id\" in data &&\n typeof (data as any).id === \"string\"\n );\n}\n\nfunction parseCollection<T extends AnyObject>(\n documents: Collection[\"documents\"],\n tombstones: Collection[\"tombstones\"],\n): ReadonlyMap<DocumentId, Output<T>> {\n const result = new Map<DocumentId, Output<T>>();\n for (const [id, doc] of Object.entries(documents)) {\n if (!tombstones[id] && doc) {\n result.set(id, parseDocument(doc));\n }\n }\n return result;\n}\n\nfunction parseSnapshot(\n documents: Collection[\"documents\"],\n tombstones: Collection[\"tombstones\"],\n): Collection {\n return {\n documents,\n tombstones,\n };\n}\n\nfunction hasGetId<T extends AnyObject>(\n config: CollectionConfig<T>,\n): config is {\n schema: T;\n getId: (data: Output<T>) => DocumentId;\n} {\n return \"getId\" in config && typeof config.getId === \"function\";\n}\n\nfunction defineGetId<T extends AnyObject>(\n config: CollectionConfig<T>,\n): (data: Output<T>) => DocumentId {\n return hasGetId(config) ? config.getId : defaultGetId;\n}\n\nfunction defaultGetId<T extends AnyObject>(data: Output<T>): DocumentId {\n if (hasIdProperty(data)) {\n return data.id;\n }\n throw new Error(\n \"Schema must have an 'id' property when getId is not provided\",\n );\n}\n","import { atom } from \"nanostores\";\nimport type { Clock } from \"../core/clock\";\nimport { advanceClock, makeStamp } from \"../core/clock\";\n\ntype ClockAtom = ReturnType<typeof atom<Clock>>;\n\nexport type ClockAPI = {\n $state: ClockAtom;\n tick: () => string;\n advance: (ms: number, seq: number) => void;\n};\n\nexport function createClock(): ClockAPI {\n const $state: ClockAtom = atom<Clock>(nowClock());\n\n const tick = () => {\n const next = advanceClock($state.get(), nowClock());\n $state.set(next);\n return makeStamp(next.ms, next.seq);\n };\n\n const advance = (ms: number, seq: number) => {\n const next = advanceClock($state.get(), { ms, seq });\n $state.set(next);\n };\n\n return {\n $state,\n tick,\n advance,\n };\n}\n\nfunction nowClock(): Clock {\n return { ms: Date.now(), seq: 0 };\n}\n","import { batched, computed, type ReadableAtom } from \"nanostores\";\nimport { createCollection } from \"./collection\";\nimport { createClock, type ClockAPI } from \"./clock\";\nimport type { CollectionConfig, CollectionApi } from \"./collection\";\nimport type { Clock } from \"../core/clock\";\nimport type { Collection } from \"../core/collection\";\n\nexport type StoreSnapshot = {\n clock: Clock;\n collections: Record<string, Collection>;\n};\n\nexport type StoreCollections<T extends Record<string, CollectionConfig<any>>> =\n {\n [K in keyof T]: T[K] extends CollectionConfig<infer S>\n ? CollectionApi<S>\n : never;\n };\n\nexport type QueryCollections<\n TCollections extends StoreCollections<any>,\n TKeys extends readonly (keyof TCollections)[],\n> = {\n [K in TKeys[number]]: TCollections[K] extends { $data: ReadableAtom<infer D> }\n ? D\n : never;\n};\n\nexport type StoreAPI<T extends Record<string, CollectionConfig<any>>> =\n StoreCollections<T> & {\n $snapshot: ReadableAtom<StoreSnapshot>;\n query<TKeys extends readonly (keyof StoreCollections<T>)[], TResult>(\n collections: TKeys,\n callback: (\n collections: QueryCollections<StoreCollections<T>, TKeys>,\n ) => TResult,\n ): ReadableAtom<TResult>;\n merge(snapshot: StoreSnapshot): void;\n };\n\nexport function createStore<\n T extends Record<string, CollectionConfig<any>>,\n>(config: { collections: T }): StoreAPI<T> {\n const clock = createClock();\n const collections = initCollections(config.collections, clock);\n const $snapshot = parseCollections(collections, clock.$state);\n\n function getCollectionDataStores(\n collectionNames: readonly (keyof StoreCollections<T>)[],\n ): ReadableAtom<any>[] {\n return collectionNames.map((name) => collections[name]!.$data);\n }\n\n return {\n ...collections,\n $snapshot,\n query: <TKeys extends readonly (keyof StoreCollections<T>)[], TResult>(\n collectionNames: TKeys,\n callback: (\n collections: QueryCollections<StoreCollections<T>, TKeys>,\n ) => TResult,\n ) => {\n const atoms = getCollectionDataStores(collectionNames);\n\n return computed(atoms, (...values) => {\n const entries = collectionNames.map((name, i) => [name, values[i]]);\n return callback(\n Object.fromEntries(entries) as QueryCollections<\n StoreCollections<T>,\n TKeys\n >,\n );\n });\n },\n merge: (snapshot) => {\n clock.advance(snapshot.clock.ms, snapshot.clock.seq);\n mergeCollections(collections, snapshot.collections);\n },\n };\n}\n\nfunction initCollections<T extends Record<string, CollectionConfig<any>>>(\n collectionsConfig: T,\n clock: ClockAPI,\n): StoreCollections<T> {\n return Object.fromEntries(\n Object.entries(collectionsConfig).map(([name, config]) => [\n name,\n createCollection(config, clock),\n ]),\n ) as StoreCollections<T>;\n}\n\nfunction parseCollections<T extends Record<string, CollectionConfig<any>>>(\n collections: StoreCollections<T>,\n clockState: ReadableAtom<Clock>,\n): ReadableAtom<StoreSnapshot> {\n const collectionNames = Object.keys(collections);\n const collectionSnapshotAtoms: ReadableAtom<Collection>[] = [];\n\n for (const name of collectionNames) {\n const collection = collections[name];\n if (collection) {\n collectionSnapshotAtoms.push(collection.$snapshot);\n }\n }\n\n return batched(\n [clockState, ...collectionSnapshotAtoms],\n (clock, ...snapshots) => {\n const collectionsSnapshot: Record<string, Collection> = {};\n for (let i = 0; i < collectionNames.length; i++) {\n const name = collectionNames[i];\n const snapshot = snapshots[i];\n if (name && snapshot !== undefined) {\n collectionsSnapshot[name] = snapshot;\n }\n }\n\n return {\n clock,\n collections: collectionsSnapshot,\n };\n },\n );\n}\n\nfunction mergeCollections(\n target: Record<string, CollectionApi<any>>,\n source: Record<string, Collection>,\n) {\n for (const [collectionName, collectionSnapshot] of Object.entries(source)) {\n const collection = target[collectionName];\n if (collection) {\n collection.merge(collectionSnapshot);\n }\n }\n}\n"],"mappings":";;;AAEA,SAAgB,SACd,QACA,OACiC;CACjC,MAAM,SAAS,OAAO,aAAa,SAAS,MAAM;AAClD,KAAI,kBAAkB,QACpB,OAAM,IAAI,UAAU,wCAAwC;AAG9D,KAAI,OAAO,OACT,OAAM,IAAI,MAAM,KAAK,UAAU,OAAO,QAAQ,MAAM,EAAE,CAAC;AAGzD,QAAO,OAAO;;;;;ACfhB,SAAgB,MAAM,OAAe,WAA2B;AAC9D,QAAO,MAAM,SAAS,GAAG,CAAC,SAAS,WAAW,IAAI;;AAGpD,SAAgB,MAAM,QAAwB;CAC5C,MAAM,QAAQ,IAAI,WAAW,SAAS,EAAE;AACxC,QAAO,gBAAgB,MAAM;AAC7B,QAAO,MAAM,KAAK,MAAM,CACrB,KAAK,MAAM,MAAM,GAAG,EAAE,CAAC,CACvB,KAAK,GAAG;;;;;ACPb,MAAM,YAAY;AAClB,MAAM,aAAa;AACnB,MAAM,eAAe;AAOrB,SAAgB,aAAa,SAAgB,MAAoB;AAC/D,KAAI,KAAK,KAAK,QAAQ,GACpB,QAAO;EAAE,IAAI,KAAK;EAAI,KAAK,KAAK;EAAK;UAC5B,KAAK,OAAO,QAAQ,GAC7B,QAAO;EAAE,IAAI,QAAQ;EAAI,KAAK,KAAK,IAAI,QAAQ,KAAK,KAAK,IAAI,GAAG;EAAG;KAEnE,QAAO;EAAE,IAAI,QAAQ;EAAI,KAAK,QAAQ,MAAM;EAAG;;AAInD,SAAgB,UAAU,IAAY,KAAqB;AACzD,QAAO,GAAG,MAAM,IAAI,UAAU,GAAG,MAAM,KAAK,WAAW,GAAG,MAAM,aAAa;;;;;;;;;;;AChB/E,SAAgB,QACd,KACA,QACmB;CACnB,MAAMA,SAA4B,EAAE;CAEpC,MAAM,WAAW,OAAgB,SAAiB;AAChD,MAAI,KACF,QAAO,QAAQ,SAAS,OAAO,OAAO,KAAK,GAAI;;CAInD,SAAS,SAAS,SAAkB,SAAiB,IAAU;AAC7D,MAAI,CAAC,eAAe,QAAQ,EAAE;AAC5B,WAAQ,SAAS,OAAO;AACxB;;AAGF,OAAK,MAAM,CAAC,KAAK,UAAU,OAAO,QAAQ,QAAQ,CAEhD,UAAS,OADO,SAAS,GAAG,OAAO,GAAG,QAAQ,IACtB;;AAI5B,UAAS,IAAI;AACb,QAAO;;;;;;;;AAST,SAAgB,UACd,KACA,QACyB;CACzB,MAAMC,SAAkC,EAAE;AAE1C,MAAK,MAAM,CAAC,MAAM,UAAU,OAAO,QAAQ,IAAI,EAAE;EAC/C,MAAM,OAAO,KAAK,MAAM,IAAI;EAC5B,MAAM,cAAc,SAAS,OAAO,OAAO,KAAK,GAAG;EAEnD,IAAIC,UAAe;AACnB,OAAK,IAAI,IAAI,GAAG,IAAI,KAAK,SAAS,GAAG,KAAK;GACxC,MAAM,MAAM,KAAK;AACjB,OAAI,EAAE,OAAO,SACX,SAAQ,OAAO,EAAE;AAEnB,aAAU,QAAQ;;EAGpB,MAAM,WAAW,KAAK,KAAK,SAAS;AACpC,UAAQ,YAAY;;AAGtB,QAAO;;AAGT,SAAS,cAAc,OAAkD;AACvE,QACE,OAAO,UAAU,YACjB,UAAU,QACV,CAAC,MAAM,QAAQ,MAAM,KACpB,MAAM,gBAAgB,UAAU,OAAO,eAAe,MAAM,KAAK;;AAItE,SAAS,eAAe,OAAkD;AACxE,QAAO,cAAc,MAAM,IAAI,OAAO,KAAK,MAAM,CAAC,SAAS;;;;;ACnE7D,SAAgB,aACd,QACA,OACU;AACV,QAAO,QAAQ,SAAS,WAAW;EAAE,UAAU;EAAO,UAAU;EAAO,EAAE;;AAG3E,SAAgB,cAAc,UAAyC;AACrE,QAAO,UAAU,WAAW,UAAU,MAAM,UAAU;;AAGxD,SAAgB,eAAe,QAAkB,QAA4B;CAC3E,MAAMC,SAAmB,EAAE;CAC3B,MAAM,OAAO,IAAI,IAAI,CAAC,GAAG,OAAO,KAAK,OAAO,EAAE,GAAG,OAAO,KAAK,OAAO,CAAC,CAAC;AAEtE,MAAK,MAAM,OAAO,MAAM;EACtB,MAAM,cAAc,OAAO;EAC3B,MAAM,cAAc,OAAO;AAE3B,MAAI,eAAe,YACjB,QAAO,OACL,YAAY,YAAY,YAAY,YAChC,cACA;WACG,YACT,QAAO,OAAO;WACL,YACT,QAAO,OAAO;MAEd,OAAM,IAAI,MAAM,OAAO,IAAI,+BAA+B;;AAI9D,QAAO;;;;;ACxCT,SAAgB,gBACd,QACA,QACY;CACZ,MAAMC,SAAqB,EAAE;CAC7B,MAAM,OAAO,IAAI,IAAI,CAAC,GAAG,OAAO,KAAK,OAAO,EAAE,GAAG,OAAO,KAAK,OAAO,CAAC,CAAC;AAEtE,MAAK,MAAM,OAAO,MAAM;EACtB,MAAM,cAAc,OAAO;EAC3B,MAAM,cAAc,OAAO;AAE3B,MAAI,eAAe,YACjB,QAAO,OAAO,cAAc,cAAc,cAAc;WAC/C,YACT,QAAO,OAAO;WACL,YACT,QAAO,OAAO;;AAIlB,QAAO;;;;;ACVT,SAAgBC,mBACd,QACA,QACY;CACZ,MAAM,mBAAmB,gBACvB,OAAO,YACP,OAAO,WACR;CAED,MAAMC,kBAAgD,EAAE;CACxD,MAAM,iBAAiB,IAAI,IAAI,CAC7B,GAAG,OAAO,KAAK,OAAO,UAAU,EAChC,GAAG,OAAO,KAAK,OAAO,UAAU,CACjC,CAAC;AAEF,MAAK,MAAM,MAAM,gBAAgB;EAC/B,MAAM,YAAY,OAAO,UAAU;EACnC,MAAM,YAAY,OAAO,UAAU;AAEnC,MAAI,iBAAiB,IACnB;AAGF,MAAI,aAAa,UACf,iBAAgB,MAAM,eAAe,WAAW,UAAU;WACjD,UACT,iBAAgB,MAAM;WACb,UACT,iBAAgB,MAAM;;AAI1B,QAAO;EACL,WAAW;EACX,YAAY;EACb;;;;;ACXH,SAAgB,YACd,YACA,QACA,MACA,MACM;CACN,MAAM,QAAQ,YAAY,OAAO;CACjC,MAAM,QAAQ,SAAS,OAAO,QAAQ,KAAK;CAC3C,MAAM,MAAM,aAAa,OAAO,MAAM,CAAC;CACvC,MAAM,KAAK,MAAM,MAAM;AACvB,YAAW,OAAO,IAAI,IAAI;;AAG5B,SAAgB,eACd,YACA,aACA,MACA,IACM;AACN,aAAY,OAAO,IAAI,MAAM,CAAC;AAC9B,YAAW,OAAO,IAAI,OAAU;;AAGlC,SAAgB,eACd,YACA,QACA,MACA,IACA,UACM;CACN,MAAM,UAAU,WAAW,KAAK,CAAC;AACjC,KAAI,CAAC,QAAS;CAGd,MAAM,MAAM,eAAe,SADV,aAAa,UAAU,MAAM,CAAC,CACF;AAE7C,UAAS,OAAO,QAAQ,cAAc,IAAI,CAAC;AAE3C,YAAW,OAAO,IAAI,IAAI;;AAG5B,SAAgB,wBACd,YACA,aACA,iBACA,kBACM;CACN,MAAM,SAASC,mBAAiB,iBAAiB,iBAAiB;AAClE,YAAW,IAAI,OAAO,UAAU;AAChC,aAAY,IAAI,OAAO,WAAW;;AAGpC,SAAgB,iBACd,QACA,OACkB;CAClB,MAAM,EAAE,OAAO,WAAW,YAAY,gBACpC,uBAA0B;AAE5B,QAAO;EACL;EACA;EACA,IAAI,KAAiB;AACnB,UAAO,MAAM,KAAK,CAAC,IAAI,IAAI;;EAE7B,IAAI,KAAiB;AACnB,UAAO,MAAM,KAAK,CAAC,IAAI,IAAI;;EAE7B,OAAO;AACL,UAAO,MAAM,KAAK,CAAC,MAAM;;EAE3B,SAAS;AACP,UAAO,MAAM,KAAK,CAAC,QAAQ;;EAE7B,UAAU;AACR,UAAO,MAAM,KAAK,CAAC,SAAS;;EAE9B,QACE,YAKA,SACA;AACA,UAAO,MAAM,KAAK,CAAC,QAAQ,YAAY,QAAQ;;EAEjD,IAAI,OAAO;AACT,UAAO,MAAM,KAAK,CAAC;;EAErB,IAAI,MAAgB;AAClB,eAAY,YAAY,QAAQ,MAAM,MAAM,KAAK;;EAEnD,OAAO,IAAgB;AACrB,kBAAe,YAAY,aAAa,MAAM,MAAM,GAAG;;EAEzD,OAAO,IAAgB,UAA6B;AAClD,kBAAe,YAAY,QAAQ,MAAM,MAAM,IAAI,SAAS;;EAE9D,MAAM,UAAsB;AAE1B,2BACE,YACA,aAHsB,UAAU,KAAK,EAKrC,SACD;;EAEJ;;AAGH,SAAS,wBAKP;CACA,MAAM,aAAa,IAA6B,EAAE,CAAC;CACnD,MAAM,cAAc,IAA8B,EAAE,CAAC;CACrD,MAAM,YAAY,QAAQ,CAAC,YAAY,YAAY,EAAE,cAAc;AAGnE,QAAO;EACL,OAHY,QAAQ,CAAC,YAAY,YAAY,EAAE,gBAAmB;EAIlE;EACA;EACA;EACD;;AAGH,SAAS,cACP,MAC4B;AAC5B,QACE,OAAO,SAAS,YAChB,SAAS,QACT,QAAQ,QACR,OAAQ,KAAa,OAAO;;AAIhC,SAAS,gBACP,WACA,YACoC;CACpC,MAAM,yBAAS,IAAI,KAA4B;AAC/C,MAAK,MAAM,CAAC,IAAI,QAAQ,OAAO,QAAQ,UAAU,CAC/C,KAAI,CAAC,WAAW,OAAO,IACrB,QAAO,IAAI,IAAI,cAAc,IAAI,CAAC;AAGtC,QAAO;;AAGT,SAAS,cACP,WACA,YACY;AACZ,QAAO;EACL;EACA;EACD;;AAGH,SAAS,SACP,QAIA;AACA,QAAO,WAAW,UAAU,OAAO,OAAO,UAAU;;AAGtD,SAAS,YACP,QACiC;AACjC,QAAO,SAAS,OAAO,GAAG,OAAO,QAAQ;;AAG3C,SAAS,aAAkC,MAA6B;AACtE,KAAI,cAAc,KAAK,CACrB,QAAO,KAAK;AAEd,OAAM,IAAI,MACR,+DACD;;;;;ACjNH,SAAgB,cAAwB;CACtC,MAAMC,SAAoB,KAAY,UAAU,CAAC;CAEjD,MAAM,aAAa;EACjB,MAAM,OAAO,aAAa,OAAO,KAAK,EAAE,UAAU,CAAC;AACnD,SAAO,IAAI,KAAK;AAChB,SAAO,UAAU,KAAK,IAAI,KAAK,IAAI;;CAGrC,MAAM,WAAW,IAAY,QAAgB;EAC3C,MAAM,OAAO,aAAa,OAAO,KAAK,EAAE;GAAE;GAAI;GAAK,CAAC;AACpD,SAAO,IAAI,KAAK;;AAGlB,QAAO;EACL;EACA;EACA;EACD;;AAGH,SAAS,WAAkB;AACzB,QAAO;EAAE,IAAI,KAAK,KAAK;EAAE,KAAK;EAAG;;;;;ACMnC,SAAgB,YAEd,QAAyC;CACzC,MAAM,QAAQ,aAAa;CAC3B,MAAM,cAAc,gBAAgB,OAAO,aAAa,MAAM;CAC9D,MAAM,YAAY,iBAAiB,aAAa,MAAM,OAAO;CAE7D,SAAS,wBACP,iBACqB;AACrB,SAAO,gBAAgB,KAAK,SAAS,YAAY,MAAO,MAAM;;AAGhE,QAAO;EACL,GAAG;EACH;EACA,QACE,iBACA,aAGG;AAGH,UAAO,SAFO,wBAAwB,gBAAgB,GAE9B,GAAG,WAAW;IACpC,MAAM,UAAU,gBAAgB,KAAK,MAAM,MAAM,CAAC,MAAM,OAAO,GAAG,CAAC;AACnE,WAAO,SACL,OAAO,YAAY,QAAQ,CAI5B;KACD;;EAEJ,QAAQ,aAAa;AACnB,SAAM,QAAQ,SAAS,MAAM,IAAI,SAAS,MAAM,IAAI;AACpD,oBAAiB,aAAa,SAAS,YAAY;;EAEtD;;AAGH,SAAS,gBACP,mBACA,OACqB;AACrB,QAAO,OAAO,YACZ,OAAO,QAAQ,kBAAkB,CAAC,KAAK,CAAC,MAAM,YAAY,CACxD,MACA,iBAAiB,QAAQ,MAAM,CAChC,CAAC,CACH;;AAGH,SAAS,iBACP,aACA,YAC6B;CAC7B,MAAM,kBAAkB,OAAO,KAAK,YAAY;CAChD,MAAMC,0BAAsD,EAAE;AAE9D,MAAK,MAAM,QAAQ,iBAAiB;EAClC,MAAM,aAAa,YAAY;AAC/B,MAAI,WACF,yBAAwB,KAAK,WAAW,UAAU;;AAItD,QAAO,QACL,CAAC,YAAY,GAAG,wBAAwB,GACvC,OAAO,GAAG,cAAc;EACvB,MAAMC,sBAAkD,EAAE;AAC1D,OAAK,IAAI,IAAI,GAAG,IAAI,gBAAgB,QAAQ,KAAK;GAC/C,MAAM,OAAO,gBAAgB;GAC7B,MAAM,WAAW,UAAU;AAC3B,OAAI,QAAQ,aAAa,OACvB,qBAAoB,QAAQ;;AAIhC,SAAO;GACL;GACA,aAAa;GACd;GAEJ;;AAGH,SAAS,iBACP,QACA,QACA;AACA,MAAK,MAAM,CAAC,gBAAgB,uBAAuB,OAAO,QAAQ,OAAO,EAAE;EACzE,MAAM,aAAa,OAAO;AAC1B,MAAI,WACF,YAAW,MAAM,mBAAmB"}
|
package/package.json
CHANGED
|
@@ -1,46 +1,38 @@
|
|
|
1
1
|
{
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
"publishConfig": {
|
|
39
|
-
"access": "public"
|
|
40
|
-
},
|
|
41
|
-
"devDependencies": {
|
|
42
|
-
"fake-indexeddb": "^6.0.0",
|
|
43
|
-
"happy-dom": "^20.0.10",
|
|
44
|
-
"zod": "^4.1.12"
|
|
45
|
-
}
|
|
2
|
+
"name": "@byearlybird/starling",
|
|
3
|
+
"version": "0.13.0",
|
|
4
|
+
"type": "module",
|
|
5
|
+
"main": "./dist/index.js",
|
|
6
|
+
"module": "./dist/index.js",
|
|
7
|
+
"types": "./dist/index.d.ts",
|
|
8
|
+
"exports": {
|
|
9
|
+
".": {
|
|
10
|
+
"types": "./dist/index.d.ts",
|
|
11
|
+
"import": "./dist/index.js"
|
|
12
|
+
}
|
|
13
|
+
},
|
|
14
|
+
"files": [
|
|
15
|
+
"dist"
|
|
16
|
+
],
|
|
17
|
+
"scripts": {
|
|
18
|
+
"build": "tsdown",
|
|
19
|
+
"dev": "tsdown --watch",
|
|
20
|
+
"format": "prettier . --write",
|
|
21
|
+
"format:check": "prettier . --check",
|
|
22
|
+
"typecheck": "tsc --noEmit",
|
|
23
|
+
"prepublishOnly": "bun run build"
|
|
24
|
+
},
|
|
25
|
+
"devDependencies": {
|
|
26
|
+
"@types/bun": "latest",
|
|
27
|
+
"prettier": "3.7.4",
|
|
28
|
+
"tsdown": "^0.18.3",
|
|
29
|
+
"zod": "^4.2.1"
|
|
30
|
+
},
|
|
31
|
+
"peerDependencies": {
|
|
32
|
+
"typescript": "^5"
|
|
33
|
+
},
|
|
34
|
+
"dependencies": {
|
|
35
|
+
"@standard-schema/spec": "^1.1.0",
|
|
36
|
+
"nanostores": "^1.1.0"
|
|
37
|
+
}
|
|
46
38
|
}
|
package/dist/core-UUzgRHaU.js
DELETED
|
@@ -1,420 +0,0 @@
|
|
|
1
|
-
//#region src/core/clock/errors.ts
|
|
2
|
-
var InvalidEventstampError = class extends Error {
|
|
3
|
-
constructor(eventstamp) {
|
|
4
|
-
super(`Invalid eventstamp: "${eventstamp}"`);
|
|
5
|
-
this.name = "InvalidEventstampError";
|
|
6
|
-
}
|
|
7
|
-
};
|
|
8
|
-
|
|
9
|
-
//#endregion
|
|
10
|
-
//#region src/core/clock/eventstamp.ts
|
|
11
|
-
function generateNonce() {
|
|
12
|
-
return Math.random().toString(16).slice(2, 6).padStart(4, "0");
|
|
13
|
-
}
|
|
14
|
-
function encodeEventstamp(timestampMs, counter, nonce) {
|
|
15
|
-
return `${new Date(timestampMs).toISOString()}|${counter.toString(16).padStart(4, "0")}|${nonce}`;
|
|
16
|
-
}
|
|
17
|
-
const EVENTSTAMP_REGEX = /^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z\|[0-9a-f]{4,}\|[0-9a-f]{4}$/;
|
|
18
|
-
/**
|
|
19
|
-
* Validates whether a string is a properly formatted eventstamp.
|
|
20
|
-
* Expected format: YYYY-MM-DDTHH:mm:ss.SSSZ|HHHH+|HHHH
|
|
21
|
-
* where HHHH+ represents 4 or more hex characters for the counter,
|
|
22
|
-
* and HHHH represents exactly 4 hex characters for the nonce.
|
|
23
|
-
*/
|
|
24
|
-
function isValidEventstamp(stamp) {
|
|
25
|
-
return EVENTSTAMP_REGEX.test(stamp);
|
|
26
|
-
}
|
|
27
|
-
function decodeEventstamp(eventstamp) {
|
|
28
|
-
if (!isValidEventstamp(eventstamp)) throw new InvalidEventstampError(eventstamp);
|
|
29
|
-
const parts = eventstamp.split("|");
|
|
30
|
-
const isoString = parts[0];
|
|
31
|
-
const hexCounter = parts[1];
|
|
32
|
-
const nonce = parts[2];
|
|
33
|
-
return {
|
|
34
|
-
timestampMs: new Date(isoString).getTime(),
|
|
35
|
-
counter: parseInt(hexCounter, 16),
|
|
36
|
-
nonce
|
|
37
|
-
};
|
|
38
|
-
}
|
|
39
|
-
const MIN_EVENTSTAMP = encodeEventstamp(0, 0, "0000");
|
|
40
|
-
/**
|
|
41
|
-
* Find the maximum eventstamp from an array of eventstamps.
|
|
42
|
-
* Returns MIN_EVENTSTAMP if the array is empty.
|
|
43
|
-
* @param eventstamps - Array of eventstamp strings
|
|
44
|
-
* @returns The maximum eventstamp
|
|
45
|
-
*/
|
|
46
|
-
function maxEventstamp(eventstamps) {
|
|
47
|
-
if (eventstamps.length === 0) return MIN_EVENTSTAMP;
|
|
48
|
-
return eventstamps.reduce((max, stamp) => stamp > max ? stamp : max);
|
|
49
|
-
}
|
|
50
|
-
|
|
51
|
-
//#endregion
|
|
52
|
-
//#region src/core/clock/clock.ts
|
|
53
|
-
/**
|
|
54
|
-
* Create a new Clock instance.
|
|
55
|
-
* @param initialState - Optional initial state for the clock
|
|
56
|
-
*/
|
|
57
|
-
function createClock(initialState) {
|
|
58
|
-
let counter = initialState?.counter ?? 0;
|
|
59
|
-
let lastMs = initialState?.lastMs ?? Date.now();
|
|
60
|
-
let lastNonce = initialState?.lastNonce ?? generateNonce();
|
|
61
|
-
const now = () => {
|
|
62
|
-
const wallMs = Date.now();
|
|
63
|
-
if (wallMs > lastMs) {
|
|
64
|
-
lastMs = wallMs;
|
|
65
|
-
counter = 0;
|
|
66
|
-
lastNonce = generateNonce();
|
|
67
|
-
} else {
|
|
68
|
-
counter++;
|
|
69
|
-
lastNonce = generateNonce();
|
|
70
|
-
}
|
|
71
|
-
return encodeEventstamp(lastMs, counter, lastNonce);
|
|
72
|
-
};
|
|
73
|
-
const latest = () => encodeEventstamp(lastMs, counter, lastNonce);
|
|
74
|
-
const forward = (eventstamp) => {
|
|
75
|
-
if (!isValidEventstamp(eventstamp)) throw new InvalidEventstampError(eventstamp);
|
|
76
|
-
if (eventstamp > latest()) {
|
|
77
|
-
const newer = decodeEventstamp(eventstamp);
|
|
78
|
-
lastMs = newer.timestampMs;
|
|
79
|
-
counter = newer.counter;
|
|
80
|
-
lastNonce = newer.nonce;
|
|
81
|
-
}
|
|
82
|
-
};
|
|
83
|
-
return {
|
|
84
|
-
now,
|
|
85
|
-
latest,
|
|
86
|
-
forward
|
|
87
|
-
};
|
|
88
|
-
}
|
|
89
|
-
/**
|
|
90
|
-
* Create a Clock from an eventstamp string.
|
|
91
|
-
* @param eventstamp - Eventstamp string to decode and initialize clock from
|
|
92
|
-
* @throws Error if eventstamp is invalid
|
|
93
|
-
*/
|
|
94
|
-
function createClockFromEventstamp(eventstamp) {
|
|
95
|
-
if (!isValidEventstamp(eventstamp)) throw new Error(`Invalid eventstamp: "${eventstamp}"`);
|
|
96
|
-
const decoded = decodeEventstamp(eventstamp);
|
|
97
|
-
return createClock({
|
|
98
|
-
counter: decoded.counter,
|
|
99
|
-
lastMs: decoded.timestampMs,
|
|
100
|
-
lastNonce: decoded.nonce
|
|
101
|
-
});
|
|
102
|
-
}
|
|
103
|
-
|
|
104
|
-
//#endregion
|
|
105
|
-
//#region src/core/document/resource.ts
|
|
106
|
-
function isObject(value) {
|
|
107
|
-
return value != null && typeof value === "object" && !Array.isArray(value) && Object.getPrototypeOf(value) === Object.prototype;
|
|
108
|
-
}
|
|
109
|
-
/**
|
|
110
|
-
* Get a value from a nested object using a dot-separated path.
|
|
111
|
-
* @internal
|
|
112
|
-
*/
|
|
113
|
-
function getValueAtPath(obj, path) {
|
|
114
|
-
const parts = path.split(".");
|
|
115
|
-
let current = obj;
|
|
116
|
-
for (const part of parts) {
|
|
117
|
-
if (current == null) return void 0;
|
|
118
|
-
current = current[part];
|
|
119
|
-
}
|
|
120
|
-
return current;
|
|
121
|
-
}
|
|
122
|
-
/**
|
|
123
|
-
* Set a value in a nested object using a dot-separated path.
|
|
124
|
-
* Creates intermediate objects as needed.
|
|
125
|
-
* @internal
|
|
126
|
-
*/
|
|
127
|
-
function setValueAtPath(obj, path, value) {
|
|
128
|
-
const parts = path.split(".");
|
|
129
|
-
let current = obj;
|
|
130
|
-
for (let i = 0; i < parts.length - 1; i++) {
|
|
131
|
-
if (!current[parts[i]] || typeof current[parts[i]] !== "object") current[parts[i]] = {};
|
|
132
|
-
current = current[parts[i]];
|
|
133
|
-
}
|
|
134
|
-
current[parts[parts.length - 1]] = value;
|
|
135
|
-
}
|
|
136
|
-
/**
|
|
137
|
-
* Compute the latest eventstamp for a resource from its field eventstamps and deletedAt.
|
|
138
|
-
* Used internally and exported for testing/validation.
|
|
139
|
-
* @internal
|
|
140
|
-
*/
|
|
141
|
-
function computeResourceLatest(eventstamps, deletedAt, fallback) {
|
|
142
|
-
let max = fallback ?? MIN_EVENTSTAMP;
|
|
143
|
-
for (const stamp of Object.values(eventstamps)) if (stamp > max) max = stamp;
|
|
144
|
-
if (deletedAt && deletedAt > max) return deletedAt;
|
|
145
|
-
return max;
|
|
146
|
-
}
|
|
147
|
-
function makeResource(id, obj, eventstamp, deletedAt = null) {
|
|
148
|
-
const eventstamps = {};
|
|
149
|
-
const traverse = (input, path = "") => {
|
|
150
|
-
for (const key in input) {
|
|
151
|
-
if (!Object.hasOwn(input, key)) continue;
|
|
152
|
-
const value = input[key];
|
|
153
|
-
const fieldPath = path ? `${path}.${key}` : key;
|
|
154
|
-
if (isObject(value)) traverse(value, fieldPath);
|
|
155
|
-
else eventstamps[fieldPath] = eventstamp;
|
|
156
|
-
}
|
|
157
|
-
};
|
|
158
|
-
traverse(obj);
|
|
159
|
-
return {
|
|
160
|
-
id,
|
|
161
|
-
attributes: obj,
|
|
162
|
-
meta: {
|
|
163
|
-
eventstamps,
|
|
164
|
-
latest: computeResourceLatest(eventstamps, deletedAt, eventstamp),
|
|
165
|
-
deletedAt
|
|
166
|
-
}
|
|
167
|
-
};
|
|
168
|
-
}
|
|
169
|
-
function mergeResources(into, from) {
|
|
170
|
-
const resultAttributes = {};
|
|
171
|
-
const resultEventstamps = {};
|
|
172
|
-
const allPaths = new Set([...Object.keys(into.meta.eventstamps), ...Object.keys(from.meta.eventstamps)]);
|
|
173
|
-
for (const path of allPaths) {
|
|
174
|
-
const stamp1 = into.meta.eventstamps[path];
|
|
175
|
-
const stamp2 = from.meta.eventstamps[path];
|
|
176
|
-
if (stamp1 && stamp2) if (stamp1 > stamp2) {
|
|
177
|
-
setValueAtPath(resultAttributes, path, getValueAtPath(into.attributes, path));
|
|
178
|
-
resultEventstamps[path] = stamp1;
|
|
179
|
-
} else {
|
|
180
|
-
setValueAtPath(resultAttributes, path, getValueAtPath(from.attributes, path));
|
|
181
|
-
resultEventstamps[path] = stamp2;
|
|
182
|
-
}
|
|
183
|
-
else if (stamp1) {
|
|
184
|
-
setValueAtPath(resultAttributes, path, getValueAtPath(into.attributes, path));
|
|
185
|
-
resultEventstamps[path] = stamp1;
|
|
186
|
-
} else {
|
|
187
|
-
setValueAtPath(resultAttributes, path, getValueAtPath(from.attributes, path));
|
|
188
|
-
resultEventstamps[path] = stamp2;
|
|
189
|
-
}
|
|
190
|
-
}
|
|
191
|
-
const dataLatest = computeResourceLatest(resultEventstamps, null, into.meta.latest > from.meta.latest ? into.meta.latest : from.meta.latest);
|
|
192
|
-
const mergedDeletedAt = into.meta.deletedAt && from.meta.deletedAt ? into.meta.deletedAt > from.meta.deletedAt ? into.meta.deletedAt : from.meta.deletedAt : into.meta.deletedAt || from.meta.deletedAt || null;
|
|
193
|
-
const finalLatest = mergedDeletedAt && mergedDeletedAt > dataLatest ? mergedDeletedAt : dataLatest;
|
|
194
|
-
return {
|
|
195
|
-
id: into.id,
|
|
196
|
-
attributes: resultAttributes,
|
|
197
|
-
meta: {
|
|
198
|
-
eventstamps: resultEventstamps,
|
|
199
|
-
latest: finalLatest,
|
|
200
|
-
deletedAt: mergedDeletedAt
|
|
201
|
-
}
|
|
202
|
-
};
|
|
203
|
-
}
|
|
204
|
-
function deleteResource(resource, eventstamp) {
|
|
205
|
-
const dataLatest = resource.meta.deletedAt ? computeResourceLatest(resource.meta.eventstamps, null) : resource.meta.latest;
|
|
206
|
-
const latest = eventstamp > dataLatest ? eventstamp : dataLatest;
|
|
207
|
-
return {
|
|
208
|
-
id: resource.id,
|
|
209
|
-
attributes: resource.attributes,
|
|
210
|
-
meta: {
|
|
211
|
-
eventstamps: resource.meta.eventstamps,
|
|
212
|
-
latest,
|
|
213
|
-
deletedAt: eventstamp
|
|
214
|
-
}
|
|
215
|
-
};
|
|
216
|
-
}
|
|
217
|
-
|
|
218
|
-
//#endregion
|
|
219
|
-
//#region src/core/document/document.ts
|
|
220
|
-
/**
|
|
221
|
-
* Merges two Starling documents using field-level Last-Write-Wins semantics.
|
|
222
|
-
*
|
|
223
|
-
* The merge operation:
|
|
224
|
-
* 1. Forwards the clock to the newest eventstamp from either document
|
|
225
|
-
* 2. Merges each resource pair using field-level LWW (via mergeResources)
|
|
226
|
-
* 3. Tracks what changed for hook notifications (added/updated/deleted)
|
|
227
|
-
*
|
|
228
|
-
* Deletion is final: once a resource is deleted, updates to it are merged into
|
|
229
|
-
* the resource's attributes but don't restore visibility. Only new resources or
|
|
230
|
-
* transitions into the deleted state are tracked.
|
|
231
|
-
*
|
|
232
|
-
* @param into - The base document to merge into
|
|
233
|
-
* @param from - The source document to merge from
|
|
234
|
-
* @returns Merged document and categorized changes
|
|
235
|
-
*
|
|
236
|
-
* @example
|
|
237
|
-
* ```typescript
|
|
238
|
-
* const into = {
|
|
239
|
-
* type: "items",
|
|
240
|
-
* latest: "2025-01-01T00:00:00.000Z|0001|a1b2",
|
|
241
|
-
* resources: { "doc1": { id: "doc1", attributes: {...}, meta: {...} } }
|
|
242
|
-
* };
|
|
243
|
-
*
|
|
244
|
-
* const from = {
|
|
245
|
-
* type: "items",
|
|
246
|
-
* latest: "2025-01-01T00:05:00.000Z|0001|c3d4",
|
|
247
|
-
* resources: {
|
|
248
|
-
* "doc1": { id: "doc1", attributes: {...}, meta: {...} }, // updated
|
|
249
|
-
* "doc2": { id: "doc2", attributes: {...}, meta: {...} } // new
|
|
250
|
-
* }
|
|
251
|
-
* };
|
|
252
|
-
*
|
|
253
|
-
* const result = mergeDocuments(into, from);
|
|
254
|
-
* // result.document.latest === "2025-01-01T00:05:00.000Z|0001|c3d4"
|
|
255
|
-
* // result.changes.added has "doc2"
|
|
256
|
-
* // result.changes.updated has "doc1"
|
|
257
|
-
* ```
|
|
258
|
-
*/
|
|
259
|
-
function mergeDocuments(into, from) {
|
|
260
|
-
const added = /* @__PURE__ */ new Map();
|
|
261
|
-
const updated = /* @__PURE__ */ new Map();
|
|
262
|
-
const deleted = /* @__PURE__ */ new Set();
|
|
263
|
-
const mergedResources = { ...into.resources };
|
|
264
|
-
let newestEventstamp = into.latest >= from.latest ? into.latest : from.latest;
|
|
265
|
-
for (const [id, fromDoc] of Object.entries(from.resources)) {
|
|
266
|
-
const intoDoc = into.resources[id];
|
|
267
|
-
if (!intoDoc) {
|
|
268
|
-
mergedResources[id] = fromDoc;
|
|
269
|
-
if (!fromDoc.meta.deletedAt) added.set(id, fromDoc);
|
|
270
|
-
if (fromDoc.meta.latest > newestEventstamp) newestEventstamp = fromDoc.meta.latest;
|
|
271
|
-
} else {
|
|
272
|
-
if (intoDoc === fromDoc) continue;
|
|
273
|
-
const mergedDoc = mergeResources(intoDoc, fromDoc);
|
|
274
|
-
mergedResources[id] = mergedDoc;
|
|
275
|
-
if (mergedDoc.meta.latest > newestEventstamp) newestEventstamp = mergedDoc.meta.latest;
|
|
276
|
-
const wasDeleted = intoDoc.meta.deletedAt !== null;
|
|
277
|
-
const isDeleted = mergedDoc.meta.deletedAt !== null;
|
|
278
|
-
if (!wasDeleted && isDeleted) deleted.add(id);
|
|
279
|
-
else if (!isDeleted) {
|
|
280
|
-
if (intoDoc.meta.latest !== mergedDoc.meta.latest) updated.set(id, mergedDoc);
|
|
281
|
-
}
|
|
282
|
-
}
|
|
283
|
-
}
|
|
284
|
-
return {
|
|
285
|
-
document: {
|
|
286
|
-
type: into.type,
|
|
287
|
-
latest: newestEventstamp,
|
|
288
|
-
resources: mergedResources
|
|
289
|
-
},
|
|
290
|
-
changes: {
|
|
291
|
-
added,
|
|
292
|
-
updated,
|
|
293
|
-
deleted
|
|
294
|
-
}
|
|
295
|
-
};
|
|
296
|
-
}
|
|
297
|
-
/**
|
|
298
|
-
* Creates an empty Starling document with the given type and eventstamp.
|
|
299
|
-
* Useful for initializing new stores or testing.
|
|
300
|
-
*
|
|
301
|
-
* @param type - Resource type identifier for this collection
|
|
302
|
-
* @param eventstamp - Initial clock value for this document
|
|
303
|
-
* @returns Empty document
|
|
304
|
-
*
|
|
305
|
-
* @example
|
|
306
|
-
* ```typescript
|
|
307
|
-
* const empty = makeDocument("tasks", "2025-01-01T00:00:00.000Z|0000|0000");
|
|
308
|
-
* ```
|
|
309
|
-
*/
|
|
310
|
-
function makeDocument(type, eventstamp) {
|
|
311
|
-
return {
|
|
312
|
-
type,
|
|
313
|
-
latest: eventstamp,
|
|
314
|
-
resources: {}
|
|
315
|
-
};
|
|
316
|
-
}
|
|
317
|
-
|
|
318
|
-
//#endregion
|
|
319
|
-
//#region src/core/document/utils.ts
|
|
320
|
-
/**
|
|
321
|
-
* Convert a StarlingDocument's resources into a Map keyed by resource ID.
|
|
322
|
-
* @param document - StarlingDocument containing resource data
|
|
323
|
-
* @returns Map of resource ID to ResourceObject
|
|
324
|
-
*/
|
|
325
|
-
function documentToMap(document) {
|
|
326
|
-
return new Map(Object.entries(document.resources));
|
|
327
|
-
}
|
|
328
|
-
/**
|
|
329
|
-
* Convert a Map of resources into a StarlingDocument.
|
|
330
|
-
* @param type - Resource type identifier for this collection
|
|
331
|
-
* @param resources - Map of resource ID to ResourceObject
|
|
332
|
-
* @param fallbackEventstamp - Eventstamp to include when computing the max (optional)
|
|
333
|
-
* @returns StarlingDocument representation of the resources
|
|
334
|
-
*/
|
|
335
|
-
function mapToDocument(type, resources, fallbackEventstamp) {
|
|
336
|
-
const eventstamps = Array.from(resources.values()).map((r) => r.meta.latest);
|
|
337
|
-
if (fallbackEventstamp) eventstamps.push(fallbackEventstamp);
|
|
338
|
-
const latest = maxEventstamp(eventstamps);
|
|
339
|
-
const resourcesRecord = {};
|
|
340
|
-
for (const [id, resource] of resources) resourcesRecord[id] = resource;
|
|
341
|
-
return {
|
|
342
|
-
type,
|
|
343
|
-
latest,
|
|
344
|
-
resources: resourcesRecord
|
|
345
|
-
};
|
|
346
|
-
}
|
|
347
|
-
|
|
348
|
-
//#endregion
|
|
349
|
-
//#region src/core/resource-map/resource-map.ts
|
|
350
|
-
/**
|
|
351
|
-
* A ResourceMap container for storing and managing ResourceObjects.
|
|
352
|
-
*
|
|
353
|
-
* This factory function creates a ResourceMap with state-based replication
|
|
354
|
-
* and automatic convergence via Last-Write-Wins conflict resolution.
|
|
355
|
-
* It stores complete resource snapshots with encoded metadata, including deletion markers.
|
|
356
|
-
*
|
|
357
|
-
* ResourceMap does NOT filter based on deletion status—it stores and returns
|
|
358
|
-
* all ResourceObjects including deleted ones. The Store class is responsible
|
|
359
|
-
* for filtering what's visible to users.
|
|
360
|
-
*
|
|
361
|
-
* @example
|
|
362
|
-
* ```typescript
|
|
363
|
-
* const resourceMap = createMap("todos");
|
|
364
|
-
* resourceMap.set("id1", { name: "Alice" });
|
|
365
|
-
* const resource = resourceMap.get("id1"); // ResourceObject with metadata
|
|
366
|
-
* ```
|
|
367
|
-
*/
|
|
368
|
-
function createMap(resourceType, initialMap = /* @__PURE__ */ new Map(), eventstamp) {
|
|
369
|
-
let internalMap = initialMap;
|
|
370
|
-
const clock = createClock();
|
|
371
|
-
if (eventstamp) clock.forward(eventstamp);
|
|
372
|
-
return {
|
|
373
|
-
has(id) {
|
|
374
|
-
return internalMap.has(id);
|
|
375
|
-
},
|
|
376
|
-
get(id) {
|
|
377
|
-
return internalMap.get(id);
|
|
378
|
-
},
|
|
379
|
-
entries() {
|
|
380
|
-
return internalMap.entries();
|
|
381
|
-
},
|
|
382
|
-
set(id, object) {
|
|
383
|
-
const encoded = makeResource(id, object, clock.now());
|
|
384
|
-
const current = internalMap.get(id);
|
|
385
|
-
if (current) {
|
|
386
|
-
const merged = mergeResources(current, encoded);
|
|
387
|
-
internalMap.set(id, merged);
|
|
388
|
-
} else internalMap.set(id, encoded);
|
|
389
|
-
},
|
|
390
|
-
delete(id) {
|
|
391
|
-
const current = internalMap.get(id);
|
|
392
|
-
if (current) {
|
|
393
|
-
const doc = deleteResource(current, clock.now());
|
|
394
|
-
internalMap.set(id, doc);
|
|
395
|
-
}
|
|
396
|
-
},
|
|
397
|
-
cloneMap() {
|
|
398
|
-
return new Map(internalMap);
|
|
399
|
-
},
|
|
400
|
-
toDocument() {
|
|
401
|
-
return mapToDocument(resourceType, internalMap, clock.latest());
|
|
402
|
-
},
|
|
403
|
-
merge(document) {
|
|
404
|
-
const result = mergeDocuments(mapToDocument(resourceType, internalMap, clock.latest()), document);
|
|
405
|
-
clock.forward(result.document.latest);
|
|
406
|
-
internalMap = documentToMap(result.document);
|
|
407
|
-
return result;
|
|
408
|
-
}
|
|
409
|
-
};
|
|
410
|
-
}
|
|
411
|
-
/**
|
|
412
|
-
* Create a ResourceMap from a StarlingDocument snapshot.
|
|
413
|
-
* @param document - StarlingDocument containing resource data
|
|
414
|
-
*/
|
|
415
|
-
function createMapFromDocument(document) {
|
|
416
|
-
return createMap(document.type, documentToMap(document), document.latest);
|
|
417
|
-
}
|
|
418
|
-
|
|
419
|
-
//#endregion
|
|
420
|
-
export { makeDocument as a, makeResource as c, createClockFromEventstamp as d, MIN_EVENTSTAMP as f, InvalidEventstampError as h, mapToDocument as i, mergeResources as l, maxEventstamp as m, createMapFromDocument as n, mergeDocuments as o, isValidEventstamp as p, documentToMap as r, deleteResource as s, createMap as t, createClock as u };
|
package/dist/core.d.ts
DELETED
|
@@ -1,2 +0,0 @@
|
|
|
1
|
-
import { _ as maxEventstamp, a as AnyObject, b as createClock, c as StarlingDocument, d as ResourceObject, f as deleteResource, g as isValidEventstamp, h as MIN_EVENTSTAMP, i as mapToDocument, l as makeDocument, m as mergeResources, n as createMapFromDocument, o as DocumentChanges, p as makeResource, r as documentToMap, s as MergeDocumentsResult, t as createMap, u as mergeDocuments, v as InvalidEventstampError, x as createClockFromEventstamp, y as Clock } from "./index-BIpu-1zO.js";
|
|
2
|
-
export { AnyObject, Clock, DocumentChanges, InvalidEventstampError, MIN_EVENTSTAMP, MergeDocumentsResult, ResourceObject, StarlingDocument, createClock, createClockFromEventstamp, createMap, createMapFromDocument, deleteResource, documentToMap, isValidEventstamp, makeDocument, makeResource, mapToDocument, maxEventstamp, mergeDocuments, mergeResources };
|
package/dist/core.js
DELETED
|
@@ -1,3 +0,0 @@
|
|
|
1
|
-
import { a as makeDocument, c as makeResource, d as createClockFromEventstamp, f as MIN_EVENTSTAMP, h as InvalidEventstampError, i as mapToDocument, l as mergeResources, m as maxEventstamp, n as createMapFromDocument, o as mergeDocuments, p as isValidEventstamp, r as documentToMap, s as deleteResource, t as createMap, u as createClock } from "./core-UUzgRHaU.js";
|
|
2
|
-
|
|
3
|
-
export { InvalidEventstampError, MIN_EVENTSTAMP, createClock, createClockFromEventstamp, createMap, createMapFromDocument, deleteResource, documentToMap, isValidEventstamp, makeDocument, makeResource, mapToDocument, maxEventstamp, mergeDocuments, mergeResources };
|