@tanstack/db 0.4.14 → 0.4.16
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cjs/index.cjs +8 -0
- package/dist/cjs/index.cjs.map +1 -1
- package/dist/cjs/index.d.cts +2 -0
- package/dist/cjs/indexes/auto-index.cjs +17 -8
- package/dist/cjs/indexes/auto-index.cjs.map +1 -1
- package/dist/cjs/local-storage.cjs +21 -18
- package/dist/cjs/local-storage.cjs.map +1 -1
- package/dist/cjs/local-storage.d.cts +9 -0
- package/dist/cjs/paced-mutations.cjs +52 -0
- package/dist/cjs/paced-mutations.cjs.map +1 -0
- package/dist/cjs/paced-mutations.d.cts +81 -0
- package/dist/cjs/query/optimizer.cjs +17 -2
- package/dist/cjs/query/optimizer.cjs.map +1 -1
- package/dist/cjs/strategies/debounceStrategy.cjs +21 -0
- package/dist/cjs/strategies/debounceStrategy.cjs.map +1 -0
- package/dist/cjs/strategies/debounceStrategy.d.cts +22 -0
- package/dist/cjs/strategies/index.d.cts +4 -0
- package/dist/cjs/strategies/queueStrategy.cjs +33 -0
- package/dist/cjs/strategies/queueStrategy.cjs.map +1 -0
- package/dist/cjs/strategies/queueStrategy.d.cts +43 -0
- package/dist/cjs/strategies/throttleStrategy.cjs +21 -0
- package/dist/cjs/strategies/throttleStrategy.cjs.map +1 -0
- package/dist/cjs/strategies/throttleStrategy.d.cts +39 -0
- package/dist/cjs/strategies/types.d.cts +103 -0
- package/dist/esm/index.d.ts +2 -0
- package/dist/esm/index.js +8 -0
- package/dist/esm/index.js.map +1 -1
- package/dist/esm/indexes/auto-index.js +17 -8
- package/dist/esm/indexes/auto-index.js.map +1 -1
- package/dist/esm/local-storage.d.ts +9 -0
- package/dist/esm/local-storage.js +21 -18
- package/dist/esm/local-storage.js.map +1 -1
- package/dist/esm/paced-mutations.d.ts +81 -0
- package/dist/esm/paced-mutations.js +52 -0
- package/dist/esm/paced-mutations.js.map +1 -0
- package/dist/esm/query/optimizer.js +17 -2
- package/dist/esm/query/optimizer.js.map +1 -1
- package/dist/esm/strategies/debounceStrategy.d.ts +22 -0
- package/dist/esm/strategies/debounceStrategy.js +21 -0
- package/dist/esm/strategies/debounceStrategy.js.map +1 -0
- package/dist/esm/strategies/index.d.ts +4 -0
- package/dist/esm/strategies/queueStrategy.d.ts +43 -0
- package/dist/esm/strategies/queueStrategy.js +33 -0
- package/dist/esm/strategies/queueStrategy.js.map +1 -0
- package/dist/esm/strategies/throttleStrategy.d.ts +39 -0
- package/dist/esm/strategies/throttleStrategy.js +21 -0
- package/dist/esm/strategies/throttleStrategy.js.map +1 -0
- package/dist/esm/strategies/types.d.ts +103 -0
- package/package.json +4 -1
- package/src/index.ts +2 -0
- package/src/indexes/auto-index.ts +23 -10
- package/src/local-storage.ts +41 -17
- package/src/paced-mutations.ts +169 -0
- package/src/query/optimizer.ts +31 -4
- package/src/strategies/debounceStrategy.ts +45 -0
- package/src/strategies/index.ts +17 -0
- package/src/strategies/queueStrategy.ts +75 -0
- package/src/strategies/throttleStrategy.ts +62 -0
- package/src/strategies/types.ts +130 -0
package/dist/esm/index.js
CHANGED
|
@@ -7,6 +7,7 @@ import { createOptimisticAction } from "./optimistic-action.js";
|
|
|
7
7
|
import { localOnlyCollectionOptions } from "./local-only.js";
|
|
8
8
|
import { localStorageCollectionOptions } from "./local-storage.js";
|
|
9
9
|
import { AggregateFunctionNotInSelectError, AggregateNotSupportedError, CannotCombineEmptyExpressionListError, CollectionConfigurationError, CollectionInErrorStateError, CollectionInputNotFoundError, CollectionIsInErrorStateError, CollectionOperationError, CollectionRequiresConfigError, CollectionRequiresSyncConfigError, CollectionStateError, DeleteKeyNotFoundError, DistinctRequiresSelectError, DuplicateKeyError, DuplicateKeySyncError, EmptyReferencePathError, GroupByError, HavingRequiresGroupByError, InvalidCollectionStatusTransitionError, InvalidJoinCondition, InvalidJoinConditionLeftSourceError, InvalidJoinConditionRightSourceError, InvalidJoinConditionSameSourceError, InvalidJoinConditionSourceMismatchError, InvalidSchemaError, InvalidSourceError, InvalidStorageDataFormatError, InvalidStorageObjectFormatError, JoinCollectionNotFoundError, JoinConditionMustBeEqualityError, JoinError, KeyUpdateNotAllowedError, LimitOffsetRequireOrderByError, LocalStorageCollectionError, MissingAliasInputsError, MissingDeleteHandlerError, MissingHandlerError, MissingInsertHandlerError, MissingMutationFunctionError, MissingUpdateArgumentError, MissingUpdateHandlerError, NegativeActiveSubscribersError, NoKeysPassedToDeleteError, NoKeysPassedToUpdateError, NoPendingSyncTransactionCommitError, NoPendingSyncTransactionWriteError, NonAggregateExpressionNotInGroupByError, NonRetriableError, OnlyOneSourceAllowedError, QueryBuilderError, QueryCompilationError, QueryMustHaveFromClauseError, QueryOptimizerError, SchemaMustBeSynchronousError, SchemaValidationError, SerializationError, SetWindowRequiresOrderByError, StorageError, StorageKeyRequiredError, SubQueryMustHaveFromClauseError, SubscriptionNotFoundError, SyncCleanupError, SyncTransactionAlreadyCommittedError, SyncTransactionAlreadyCommittedWriteError, TanStackDBError, TransactionAlreadyCompletedRollbackError, TransactionError, TransactionNotPendingCommitError, TransactionNotPendingMutateError, UndefinedKeyError, UnknownExpressionTypeError, UnknownFunctionError, UnknownHavingExpressionTypeError, UnsupportedAggregateFunctionError, UnsupportedFromTypeError, UnsupportedJoinSourceTypeError, UnsupportedJoinTypeError, UpdateKeyNotFoundError, WhereClauseConversionError } from "./errors.js";
|
|
10
|
+
import { createPacedMutations } from "./paced-mutations.js";
|
|
10
11
|
import { BaseIndex, IndexOperation } from "./indexes/base-index.js";
|
|
11
12
|
import { BTreeIndex } from "./indexes/btree-index.js";
|
|
12
13
|
import { IndexProxy, LazyIndexWrapper } from "./indexes/lazy-index.js";
|
|
@@ -14,6 +15,9 @@ import { BaseQueryBuilder, Query } from "./query/builder/index.js";
|
|
|
14
15
|
import { add, and, avg, coalesce, concat, count, eq, gt, gte, ilike, inArray, isNull, isUndefined, length, like, lower, lt, lte, max, min, not, or, sum, upper } from "./query/builder/functions.js";
|
|
15
16
|
import { compileQuery } from "./query/compiler/index.js";
|
|
16
17
|
import { createLiveQueryCollection, liveQueryCollectionOptions } from "./query/live-query-collection.js";
|
|
18
|
+
import { debounceStrategy } from "./strategies/debounceStrategy.js";
|
|
19
|
+
import { queueStrategy } from "./strategies/queueStrategy.js";
|
|
20
|
+
import { throttleStrategy } from "./strategies/throttleStrategy.js";
|
|
17
21
|
export {
|
|
18
22
|
AggregateFunctionNotInSelectError,
|
|
19
23
|
AggregateNotSupportedError,
|
|
@@ -116,7 +120,9 @@ export {
|
|
|
116
120
|
createCollection,
|
|
117
121
|
createLiveQueryCollection,
|
|
118
122
|
createOptimisticAction,
|
|
123
|
+
createPacedMutations,
|
|
119
124
|
createTransaction,
|
|
125
|
+
debounceStrategy,
|
|
120
126
|
eq,
|
|
121
127
|
getActiveTransaction,
|
|
122
128
|
gt,
|
|
@@ -137,7 +143,9 @@ export {
|
|
|
137
143
|
min,
|
|
138
144
|
not,
|
|
139
145
|
or,
|
|
146
|
+
queueStrategy,
|
|
140
147
|
sum,
|
|
148
|
+
throttleStrategy,
|
|
141
149
|
upper,
|
|
142
150
|
withArrayChangeTracking,
|
|
143
151
|
withChangeTracking
|
package/dist/esm/index.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.js","sources":[],"sourcesContent":[],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"index.js","sources":[],"sourcesContent":[],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;"}
|
|
@@ -17,14 +17,23 @@ function ensureIndexForField(fieldName, fieldPath, collection, compareOptions =
|
|
|
17
17
|
return;
|
|
18
18
|
}
|
|
19
19
|
try {
|
|
20
|
-
collection.createIndex(
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
20
|
+
collection.createIndex(
|
|
21
|
+
(row) => {
|
|
22
|
+
let current = row;
|
|
23
|
+
for (const part of fieldPath) {
|
|
24
|
+
current = current[part];
|
|
25
|
+
}
|
|
26
|
+
return current;
|
|
27
|
+
},
|
|
28
|
+
{
|
|
29
|
+
name: `auto:${fieldPath.join(`.`)}`,
|
|
30
|
+
indexType: BTreeIndex,
|
|
31
|
+
options: compareFn ? { compareFn, compareOptions } : {}
|
|
32
|
+
}
|
|
33
|
+
);
|
|
25
34
|
} catch (error) {
|
|
26
35
|
console.warn(
|
|
27
|
-
`${collection.id ? `[${collection.id}] ` : ``}Failed to create auto-index for field "${
|
|
36
|
+
`${collection.id ? `[${collection.id}] ` : ``}Failed to create auto-index for field path "${fieldPath.join(`.`)}":`,
|
|
28
37
|
error
|
|
29
38
|
);
|
|
30
39
|
}
|
|
@@ -60,10 +69,10 @@ function extractIndexableExpressions(expression) {
|
|
|
60
69
|
}
|
|
61
70
|
const fieldRef = func.args[0];
|
|
62
71
|
const fieldPath = fieldRef.path;
|
|
63
|
-
if (fieldPath.length
|
|
72
|
+
if (fieldPath.length === 0) {
|
|
64
73
|
return;
|
|
65
74
|
}
|
|
66
|
-
const fieldName = fieldPath
|
|
75
|
+
const fieldName = fieldPath.join(`_`);
|
|
67
76
|
results.push({ fieldName, fieldPath });
|
|
68
77
|
}
|
|
69
78
|
extractFromExpression(expression);
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"auto-index.js","sources":["../../../src/indexes/auto-index.ts"],"sourcesContent":["import { DEFAULT_COMPARE_OPTIONS } from \"../utils\"\nimport { BTreeIndex } from \"./btree-index\"\nimport type { CompareOptions } from \"../query/builder/types\"\nimport type { BasicExpression } from \"../query/ir\"\nimport type { CollectionImpl } from \"../collection/index.js\"\n\nexport interface AutoIndexConfig {\n autoIndex?: `off` | `eager`\n}\n\nfunction shouldAutoIndex(collection: CollectionImpl<any, any, any, any, any>) {\n // Only proceed if auto-indexing is enabled\n if (collection.config.autoIndex !== `eager`) {\n return false\n }\n\n return true\n}\n\nexport function ensureIndexForField<\n T extends Record<string, any>,\n TKey extends string | number,\n>(\n fieldName: string,\n fieldPath: Array<string>,\n collection: CollectionImpl<T, TKey, any, any, any>,\n compareOptions: CompareOptions = DEFAULT_COMPARE_OPTIONS,\n compareFn?: (a: any, b: any) => number\n) {\n if (!shouldAutoIndex(collection)) {\n return\n }\n\n // Check if we already have an index for this field\n const existingIndex = Array.from(collection.indexes.values()).find(\n (index) =>\n index.matchesField(fieldPath) &&\n index.matchesCompareOptions(compareOptions)\n )\n\n if (existingIndex) {\n return // Index already exists\n }\n\n // Create a new index for this field using the collection's createIndex method\n try {\n collection.createIndex((row) =>
|
|
1
|
+
{"version":3,"file":"auto-index.js","sources":["../../../src/indexes/auto-index.ts"],"sourcesContent":["import { DEFAULT_COMPARE_OPTIONS } from \"../utils\"\nimport { BTreeIndex } from \"./btree-index\"\nimport type { CompareOptions } from \"../query/builder/types\"\nimport type { BasicExpression } from \"../query/ir\"\nimport type { CollectionImpl } from \"../collection/index.js\"\n\nexport interface AutoIndexConfig {\n autoIndex?: `off` | `eager`\n}\n\nfunction shouldAutoIndex(collection: CollectionImpl<any, any, any, any, any>) {\n // Only proceed if auto-indexing is enabled\n if (collection.config.autoIndex !== `eager`) {\n return false\n }\n\n return true\n}\n\nexport function ensureIndexForField<\n T extends Record<string, any>,\n TKey extends string | number,\n>(\n fieldName: string,\n fieldPath: Array<string>,\n collection: CollectionImpl<T, TKey, any, any, any>,\n compareOptions: CompareOptions = DEFAULT_COMPARE_OPTIONS,\n compareFn?: (a: any, b: any) => number\n) {\n if (!shouldAutoIndex(collection)) {\n return\n }\n\n // Check if we already have an index for this field\n const existingIndex = Array.from(collection.indexes.values()).find(\n (index) =>\n index.matchesField(fieldPath) &&\n index.matchesCompareOptions(compareOptions)\n )\n\n if (existingIndex) {\n return // Index already exists\n }\n\n // Create a new index for this field using the collection's createIndex method\n try {\n // Use the proxy-based approach to create the proper accessor for nested paths\n collection.createIndex(\n (row) => {\n // Navigate through the field path\n let current: any = row\n for (const part of fieldPath) {\n current = current[part]\n }\n return current\n },\n {\n name: `auto:${fieldPath.join(`.`)}`,\n indexType: BTreeIndex,\n options: compareFn ? { compareFn, compareOptions } : {},\n }\n )\n } catch (error) {\n console.warn(\n `${collection.id ? `[${collection.id}] ` : ``}Failed to create auto-index for field path \"${fieldPath.join(`.`)}\":`,\n error\n )\n }\n}\n\n/**\n * Analyzes a where expression and creates indexes for all simple operations on single fields\n */\nexport function ensureIndexForExpression<\n T extends Record<string, any>,\n TKey extends string | number,\n>(\n expression: BasicExpression,\n collection: CollectionImpl<T, TKey, any, any, any>\n): void {\n if (!shouldAutoIndex(collection)) {\n return\n }\n\n // Extract all indexable expressions and create indexes for them\n const indexableExpressions = extractIndexableExpressions(expression)\n\n for (const { fieldName, fieldPath } of indexableExpressions) {\n ensureIndexForField(fieldName, fieldPath, collection)\n }\n}\n\n/**\n * Extracts all indexable expressions from a where expression\n */\nfunction extractIndexableExpressions(\n expression: BasicExpression\n): Array<{ fieldName: string; fieldPath: Array<string> }> {\n const results: Array<{ fieldName: string; fieldPath: Array<string> }> = []\n\n function extractFromExpression(expr: BasicExpression): void {\n if (expr.type !== `func`) {\n return\n }\n\n const func = expr as any\n\n // Handle 'and' expressions by recursively processing all arguments\n if (func.name === `and`) {\n for (const arg of func.args) {\n extractFromExpression(arg)\n }\n return\n }\n\n // Check if this is a supported operation\n const supportedOperations = [`eq`, `gt`, `gte`, `lt`, `lte`, `in`]\n if (!supportedOperations.includes(func.name)) {\n return\n }\n\n // Check if the first argument is a property reference\n if (func.args.length < 1 || func.args[0].type !== `ref`) {\n return\n }\n\n const fieldRef = func.args[0]\n const fieldPath = fieldRef.path\n\n // Skip if the path is empty\n if (fieldPath.length === 0) {\n return\n }\n\n // For nested paths, use the full path joined with underscores as the field name\n // For simple paths, use the first (and only) element\n const fieldName = fieldPath.join(`_`)\n results.push({ fieldName, fieldPath })\n }\n\n extractFromExpression(expression)\n return results\n}\n"],"names":[],"mappings":";;AAUA,SAAS,gBAAgB,YAAqD;AAE5E,MAAI,WAAW,OAAO,cAAc,SAAS;AAC3C,WAAO;AAAA,EACT;AAEA,SAAO;AACT;AAEO,SAAS,oBAId,WACA,WACA,YACA,iBAAiC,yBACjC,WACA;AACA,MAAI,CAAC,gBAAgB,UAAU,GAAG;AAChC;AAAA,EACF;AAGA,QAAM,gBAAgB,MAAM,KAAK,WAAW,QAAQ,OAAA,CAAQ,EAAE;AAAA,IAC5D,CAAC,UACC,MAAM,aAAa,SAAS,KAC5B,MAAM,sBAAsB,cAAc;AAAA,EAAA;AAG9C,MAAI,eAAe;AACjB;AAAA,EACF;AAGA,MAAI;AAEF,eAAW;AAAA,MACT,CAAC,QAAQ;AAEP,YAAI,UAAe;AACnB,mBAAW,QAAQ,WAAW;AAC5B,oBAAU,QAAQ,IAAI;AAAA,QACxB;AACA,eAAO;AAAA,MACT;AAAA,MACA;AAAA,QACE,MAAM,QAAQ,UAAU,KAAK,GAAG,CAAC;AAAA,QACjC,WAAW;AAAA,QACX,SAAS,YAAY,EAAE,WAAW,eAAA,IAAmB,CAAA;AAAA,MAAC;AAAA,IACxD;AAAA,EAEJ,SAAS,OAAO;AACd,YAAQ;AAAA,MACN,GAAG,WAAW,KAAK,IAAI,WAAW,EAAE,OAAO,EAAE,+CAA+C,UAAU,KAAK,GAAG,CAAC;AAAA,MAC/G;AAAA,IAAA;AAAA,EAEJ;AACF;AAKO,SAAS,yBAId,YACA,YACM;AACN,MAAI,CAAC,gBAAgB,UAAU,GAAG;AAChC;AAAA,EACF;AAGA,QAAM,uBAAuB,4BAA4B,UAAU;AAEnE,aAAW,EAAE,WAAW,UAAA,KAAe,sBAAsB;AAC3D,wBAAoB,WAAW,WAAW,UAAU;AAAA,EACtD;AACF;AAKA,SAAS,4BACP,YACwD;AACxD,QAAM,UAAkE,CAAA;AAExE,WAAS,sBAAsB,MAA6B;AAC1D,QAAI,KAAK,SAAS,QAAQ;AACxB;AAAA,IACF;AAEA,UAAM,OAAO;AAGb,QAAI,KAAK,SAAS,OAAO;AACvB,iBAAW,OAAO,KAAK,MAAM;AAC3B,8BAAsB,GAAG;AAAA,MAC3B;AACA;AAAA,IACF;AAGA,UAAM,sBAAsB,CAAC,MAAM,MAAM,OAAO,MAAM,OAAO,IAAI;AACjE,QAAI,CAAC,oBAAoB,SAAS,KAAK,IAAI,GAAG;AAC5C;AAAA,IACF;AAGA,QAAI,KAAK,KAAK,SAAS,KAAK,KAAK,KAAK,CAAC,EAAE,SAAS,OAAO;AACvD;AAAA,IACF;AAEA,UAAM,WAAW,KAAK,KAAK,CAAC;AAC5B,UAAM,YAAY,SAAS;AAG3B,QAAI,UAAU,WAAW,GAAG;AAC1B;AAAA,IACF;AAIA,UAAM,YAAY,UAAU,KAAK,GAAG;AACpC,YAAQ,KAAK,EAAE,WAAW,UAAA,CAAW;AAAA,EACvC;AAEA,wBAAsB,UAAU;AAChC,SAAO;AACT;"}
|
|
@@ -11,6 +11,10 @@ export type StorageEventApi = {
|
|
|
11
11
|
addEventListener: (type: `storage`, listener: (event: StorageEvent) => void) => void;
|
|
12
12
|
removeEventListener: (type: `storage`, listener: (event: StorageEvent) => void) => void;
|
|
13
13
|
};
|
|
14
|
+
export interface Parser {
|
|
15
|
+
parse: (data: string) => unknown;
|
|
16
|
+
stringify: (data: unknown) => string;
|
|
17
|
+
}
|
|
14
18
|
/**
|
|
15
19
|
* Configuration interface for localStorage collection options
|
|
16
20
|
* @template T - The type of items in the collection
|
|
@@ -32,6 +36,11 @@ export interface LocalStorageCollectionConfig<T extends object = object, TSchema
|
|
|
32
36
|
* Can be any object that implements addEventListener/removeEventListener for storage events
|
|
33
37
|
*/
|
|
34
38
|
storageEventApi?: StorageEventApi;
|
|
39
|
+
/**
|
|
40
|
+
* Parser to use for serializing and deserializing data to and from storage
|
|
41
|
+
* Defaults to JSON
|
|
42
|
+
*/
|
|
43
|
+
parser?: Parser;
|
|
35
44
|
}
|
|
36
45
|
/**
|
|
37
46
|
* Type for the clear utility function
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import { StorageKeyRequiredError, SerializationError, InvalidStorageDataFormatError, InvalidStorageObjectFormatError } from "./errors.js";
|
|
2
|
-
function validateJsonSerializable(value, operation) {
|
|
2
|
+
function validateJsonSerializable(parser, value, operation) {
|
|
3
3
|
try {
|
|
4
|
-
|
|
4
|
+
parser.stringify(value);
|
|
5
5
|
} catch (error) {
|
|
6
6
|
throw new SerializationError(
|
|
7
7
|
operation,
|
|
@@ -40,11 +40,13 @@ function localStorageCollectionOptions(config) {
|
|
|
40
40
|
}
|
|
41
41
|
const storage = config.storage || (typeof window !== `undefined` ? window.localStorage : null) || createInMemoryStorage();
|
|
42
42
|
const storageEventApi = config.storageEventApi || (typeof window !== `undefined` ? window : null) || createNoOpStorageEventApi();
|
|
43
|
+
const parser = config.parser || JSON;
|
|
43
44
|
const lastKnownData = /* @__PURE__ */ new Map();
|
|
44
45
|
const sync = createLocalStorageSync(
|
|
45
46
|
config.storageKey,
|
|
46
47
|
storage,
|
|
47
48
|
storageEventApi,
|
|
49
|
+
parser,
|
|
48
50
|
config.getKey,
|
|
49
51
|
lastKnownData
|
|
50
52
|
);
|
|
@@ -59,7 +61,7 @@ function localStorageCollectionOptions(config) {
|
|
|
59
61
|
dataMap.forEach((storedItem, key) => {
|
|
60
62
|
objectData[String(key)] = storedItem;
|
|
61
63
|
});
|
|
62
|
-
const serialized =
|
|
64
|
+
const serialized = parser.stringify(objectData);
|
|
63
65
|
storage.setItem(config.storageKey, serialized);
|
|
64
66
|
} catch (error) {
|
|
65
67
|
console.error(
|
|
@@ -78,13 +80,13 @@ function localStorageCollectionOptions(config) {
|
|
|
78
80
|
};
|
|
79
81
|
const wrappedOnInsert = async (params) => {
|
|
80
82
|
params.transaction.mutations.forEach((mutation) => {
|
|
81
|
-
validateJsonSerializable(mutation.modified, `insert`);
|
|
83
|
+
validateJsonSerializable(parser, mutation.modified, `insert`);
|
|
82
84
|
});
|
|
83
85
|
let handlerResult = {};
|
|
84
86
|
if (config.onInsert) {
|
|
85
87
|
handlerResult = await config.onInsert(params) ?? {};
|
|
86
88
|
}
|
|
87
|
-
const currentData = loadFromStorage(config.storageKey, storage);
|
|
89
|
+
const currentData = loadFromStorage(config.storageKey, storage, parser);
|
|
88
90
|
params.transaction.mutations.forEach((mutation) => {
|
|
89
91
|
const key = config.getKey(mutation.modified);
|
|
90
92
|
const storedItem = {
|
|
@@ -99,13 +101,13 @@ function localStorageCollectionOptions(config) {
|
|
|
99
101
|
};
|
|
100
102
|
const wrappedOnUpdate = async (params) => {
|
|
101
103
|
params.transaction.mutations.forEach((mutation) => {
|
|
102
|
-
validateJsonSerializable(mutation.modified, `update`);
|
|
104
|
+
validateJsonSerializable(parser, mutation.modified, `update`);
|
|
103
105
|
});
|
|
104
106
|
let handlerResult = {};
|
|
105
107
|
if (config.onUpdate) {
|
|
106
108
|
handlerResult = await config.onUpdate(params) ?? {};
|
|
107
109
|
}
|
|
108
|
-
const currentData = loadFromStorage(config.storageKey, storage);
|
|
110
|
+
const currentData = loadFromStorage(config.storageKey, storage, parser);
|
|
109
111
|
params.transaction.mutations.forEach((mutation) => {
|
|
110
112
|
const key = config.getKey(mutation.modified);
|
|
111
113
|
const storedItem = {
|
|
@@ -123,7 +125,7 @@ function localStorageCollectionOptions(config) {
|
|
|
123
125
|
if (config.onDelete) {
|
|
124
126
|
handlerResult = await config.onDelete(params) ?? {};
|
|
125
127
|
}
|
|
126
|
-
const currentData = loadFromStorage(config.storageKey, storage);
|
|
128
|
+
const currentData = loadFromStorage(config.storageKey, storage, parser);
|
|
127
129
|
params.transaction.mutations.forEach((mutation) => {
|
|
128
130
|
const key = config.getKey(mutation.original);
|
|
129
131
|
currentData.delete(key);
|
|
@@ -157,16 +159,17 @@ function localStorageCollectionOptions(config) {
|
|
|
157
159
|
switch (mutation.type) {
|
|
158
160
|
case `insert`:
|
|
159
161
|
case `update`:
|
|
160
|
-
validateJsonSerializable(mutation.modified, mutation.type);
|
|
162
|
+
validateJsonSerializable(parser, mutation.modified, mutation.type);
|
|
161
163
|
break;
|
|
162
164
|
case `delete`:
|
|
163
|
-
validateJsonSerializable(mutation.original, mutation.type);
|
|
165
|
+
validateJsonSerializable(parser, mutation.original, mutation.type);
|
|
164
166
|
break;
|
|
165
167
|
}
|
|
166
168
|
}
|
|
167
169
|
const currentData = loadFromStorage(
|
|
168
170
|
config.storageKey,
|
|
169
|
-
storage
|
|
171
|
+
storage,
|
|
172
|
+
parser
|
|
170
173
|
);
|
|
171
174
|
for (const mutation of collectionMutations) {
|
|
172
175
|
const key = mutation.key;
|
|
@@ -203,13 +206,13 @@ function localStorageCollectionOptions(config) {
|
|
|
203
206
|
}
|
|
204
207
|
};
|
|
205
208
|
}
|
|
206
|
-
function loadFromStorage(storageKey, storage) {
|
|
209
|
+
function loadFromStorage(storageKey, storage, parser) {
|
|
207
210
|
try {
|
|
208
211
|
const rawData = storage.getItem(storageKey);
|
|
209
212
|
if (!rawData) {
|
|
210
213
|
return /* @__PURE__ */ new Map();
|
|
211
214
|
}
|
|
212
|
-
const parsed =
|
|
215
|
+
const parsed = parser.parse(rawData);
|
|
213
216
|
const dataMap = /* @__PURE__ */ new Map();
|
|
214
217
|
if (typeof parsed === `object` && parsed !== null && !Array.isArray(parsed)) {
|
|
215
218
|
Object.entries(parsed).forEach(([key, value]) => {
|
|
@@ -232,7 +235,7 @@ function loadFromStorage(storageKey, storage) {
|
|
|
232
235
|
return /* @__PURE__ */ new Map();
|
|
233
236
|
}
|
|
234
237
|
}
|
|
235
|
-
function createLocalStorageSync(storageKey, storage, storageEventApi, _getKey, lastKnownData) {
|
|
238
|
+
function createLocalStorageSync(storageKey, storage, storageEventApi, parser, _getKey, lastKnownData) {
|
|
236
239
|
let syncParams = null;
|
|
237
240
|
let collection = null;
|
|
238
241
|
const findChanges = (oldData, newData) => {
|
|
@@ -255,13 +258,13 @@ function createLocalStorageSync(storageKey, storage, storageEventApi, _getKey, l
|
|
|
255
258
|
const processStorageChanges = () => {
|
|
256
259
|
if (!syncParams) return;
|
|
257
260
|
const { begin, write, commit } = syncParams;
|
|
258
|
-
const newData = loadFromStorage(storageKey, storage);
|
|
261
|
+
const newData = loadFromStorage(storageKey, storage, parser);
|
|
259
262
|
const changes = findChanges(lastKnownData, newData);
|
|
260
263
|
if (changes.length > 0) {
|
|
261
264
|
begin();
|
|
262
265
|
changes.forEach(({ type, value }) => {
|
|
263
266
|
if (value) {
|
|
264
|
-
validateJsonSerializable(value, type);
|
|
267
|
+
validateJsonSerializable(parser, value, type);
|
|
265
268
|
write({ type, value });
|
|
266
269
|
}
|
|
267
270
|
});
|
|
@@ -277,11 +280,11 @@ function createLocalStorageSync(storageKey, storage, storageEventApi, _getKey, l
|
|
|
277
280
|
const { begin, write, commit, markReady } = params;
|
|
278
281
|
syncParams = params;
|
|
279
282
|
collection = params.collection;
|
|
280
|
-
const initialData = loadFromStorage(storageKey, storage);
|
|
283
|
+
const initialData = loadFromStorage(storageKey, storage, parser);
|
|
281
284
|
if (initialData.size > 0) {
|
|
282
285
|
begin();
|
|
283
286
|
initialData.forEach((storedItem) => {
|
|
284
|
-
validateJsonSerializable(storedItem.data, `load`);
|
|
287
|
+
validateJsonSerializable(parser, storedItem.data, `load`);
|
|
285
288
|
write({ type: `insert`, value: storedItem.data });
|
|
286
289
|
});
|
|
287
290
|
commit();
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"local-storage.js","sources":["../../src/local-storage.ts"],"sourcesContent":["import {\n InvalidStorageDataFormatError,\n InvalidStorageObjectFormatError,\n SerializationError,\n StorageKeyRequiredError,\n} from \"./errors\"\nimport type {\n BaseCollectionConfig,\n CollectionConfig,\n DeleteMutationFnParams,\n InferSchemaOutput,\n InsertMutationFnParams,\n PendingMutation,\n SyncConfig,\n UpdateMutationFnParams,\n UtilsRecord,\n} from \"./types\"\nimport type { StandardSchemaV1 } from \"@standard-schema/spec\"\n\n/**\n * Storage API interface - subset of DOM Storage that we need\n */\nexport type StorageApi = Pick<Storage, `getItem` | `setItem` | `removeItem`>\n\n/**\n * Storage event API - subset of Window for 'storage' events only\n */\nexport type StorageEventApi = {\n addEventListener: (\n type: `storage`,\n listener: (event: StorageEvent) => void\n ) => void\n removeEventListener: (\n type: `storage`,\n listener: (event: StorageEvent) => void\n ) => void\n}\n\n/**\n * Internal storage format that includes version tracking\n */\ninterface StoredItem<T> {\n versionKey: string\n data: T\n}\n\n/**\n * Configuration interface for localStorage collection options\n * @template T - The type of items in the collection\n * @template TSchema - The schema type for validation\n * @template TKey - The type of the key returned by `getKey`\n */\nexport interface LocalStorageCollectionConfig<\n T extends object = object,\n TSchema extends StandardSchemaV1 = never,\n TKey extends string | number = string | number,\n> extends BaseCollectionConfig<T, TKey, TSchema> {\n /**\n * The key to use for storing the collection data in localStorage/sessionStorage\n */\n storageKey: string\n\n /**\n * Storage API to use (defaults to window.localStorage)\n * Can be any object that implements the Storage interface (e.g., sessionStorage)\n */\n storage?: StorageApi\n\n /**\n * Storage event API to use for cross-tab synchronization (defaults to window)\n * Can be any object that implements addEventListener/removeEventListener for storage events\n */\n storageEventApi?: StorageEventApi\n}\n\n/**\n * Type for the clear utility function\n */\nexport type ClearStorageFn = () => void\n\n/**\n * Type for the getStorageSize utility function\n */\nexport type GetStorageSizeFn = () => number\n\n/**\n * LocalStorage collection utilities type\n */\nexport interface LocalStorageCollectionUtils extends UtilsRecord {\n clearStorage: ClearStorageFn\n getStorageSize: GetStorageSizeFn\n /**\n * Accepts mutations from a transaction that belong to this collection and persists them to localStorage.\n * This should be called in your transaction's mutationFn to persist local-storage data.\n *\n * @param transaction - The transaction containing mutations to accept\n * @example\n * const localSettings = createCollection(localStorageCollectionOptions({...}))\n *\n * const tx = createTransaction({\n * mutationFn: async ({ transaction }) => {\n * // Make API call first\n * await api.save(...)\n * // Then persist local-storage mutations after success\n * localSettings.utils.acceptMutations(transaction)\n * }\n * })\n */\n acceptMutations: (transaction: {\n mutations: Array<PendingMutation<Record<string, unknown>>>\n }) => void\n}\n\n/**\n * Validates that a value can be JSON serialized\n * @param value - The value to validate for JSON serialization\n * @param operation - The operation type being performed (for error messages)\n * @throws Error if the value cannot be JSON serialized\n */\nfunction validateJsonSerializable(value: any, operation: string): void {\n try {\n JSON.stringify(value)\n } catch (error) {\n throw new SerializationError(\n operation,\n error instanceof Error ? error.message : String(error)\n )\n }\n}\n\n/**\n * Generate a UUID for version tracking\n * @returns A unique identifier string for tracking data versions\n */\nfunction generateUuid(): string {\n return crypto.randomUUID()\n}\n\n/**\n * Creates an in-memory storage implementation that mimics the StorageApi interface\n * Used as a fallback when localStorage is not available (e.g., server-side rendering)\n * @returns An object implementing the StorageApi interface using an in-memory Map\n */\nfunction createInMemoryStorage(): StorageApi {\n const storage = new Map<string, string>()\n\n return {\n getItem(key: string): string | null {\n return storage.get(key) ?? null\n },\n setItem(key: string, value: string): void {\n storage.set(key, value)\n },\n removeItem(key: string): void {\n storage.delete(key)\n },\n }\n}\n\n/**\n * Creates a no-op storage event API for environments without window (e.g., server-side)\n * This provides the required interface but doesn't actually listen to any events\n * since cross-tab synchronization is not possible in server environments\n * @returns An object implementing the StorageEventApi interface with no-op methods\n */\nfunction createNoOpStorageEventApi(): StorageEventApi {\n return {\n addEventListener: () => {\n // No-op: cannot listen to storage events without window\n },\n removeEventListener: () => {\n // No-op: cannot remove listeners without window\n },\n }\n}\n\n/**\n * Creates localStorage collection options for use with a standard Collection\n *\n * This function creates a collection that persists data to localStorage/sessionStorage\n * and synchronizes changes across browser tabs using storage events.\n *\n * **Fallback Behavior:**\n *\n * When localStorage is not available (e.g., in server-side rendering environments),\n * this function automatically falls back to an in-memory storage implementation.\n * This prevents errors during module initialization and allows the collection to\n * work in any environment, though data will not persist across page reloads or\n * be shared across tabs when using the in-memory fallback.\n *\n * **Using with Manual Transactions:**\n *\n * For manual transactions, you must call `utils.acceptMutations()` in your transaction's `mutationFn`\n * to persist changes made during `tx.mutate()`. This is necessary because local-storage collections\n * don't participate in the standard mutation handler flow for manual transactions.\n *\n * @template TExplicit - The explicit type of items in the collection (highest priority)\n * @template TSchema - The schema type for validation and type inference (second priority)\n * @template TFallback - The fallback type if no explicit or schema type is provided\n * @param config - Configuration options for the localStorage collection\n * @returns Collection options with utilities including clearStorage, getStorageSize, and acceptMutations\n *\n * @example\n * // Basic localStorage collection\n * const collection = createCollection(\n * localStorageCollectionOptions({\n * storageKey: 'todos',\n * getKey: (item) => item.id,\n * })\n * )\n *\n * @example\n * // localStorage collection with custom storage\n * const collection = createCollection(\n * localStorageCollectionOptions({\n * storageKey: 'todos',\n * storage: window.sessionStorage, // Use sessionStorage instead\n * getKey: (item) => item.id,\n * })\n * )\n *\n * @example\n * // localStorage collection with mutation handlers\n * const collection = createCollection(\n * localStorageCollectionOptions({\n * storageKey: 'todos',\n * getKey: (item) => item.id,\n * onInsert: async ({ transaction }) => {\n * console.log('Item inserted:', transaction.mutations[0].modified)\n * },\n * })\n * )\n *\n * @example\n * // Using with manual transactions\n * const localSettings = createCollection(\n * localStorageCollectionOptions({\n * storageKey: 'user-settings',\n * getKey: (item) => item.id,\n * })\n * )\n *\n * const tx = createTransaction({\n * mutationFn: async ({ transaction }) => {\n * // Use settings data in API call\n * const settingsMutations = transaction.mutations.filter(m => m.collection === localSettings)\n * await api.updateUserProfile({ settings: settingsMutations[0]?.modified })\n *\n * // Persist local-storage mutations after API success\n * localSettings.utils.acceptMutations(transaction)\n * }\n * })\n *\n * tx.mutate(() => {\n * localSettings.insert({ id: 'theme', value: 'dark' })\n * apiCollection.insert({ id: 2, data: 'profile data' })\n * })\n *\n * await tx.commit()\n */\n\n// Overload for when schema is provided\nexport function localStorageCollectionOptions<\n T extends StandardSchemaV1,\n TKey extends string | number = string | number,\n>(\n config: LocalStorageCollectionConfig<InferSchemaOutput<T>, T, TKey> & {\n schema: T\n }\n): CollectionConfig<InferSchemaOutput<T>, TKey, T> & {\n id: string\n utils: LocalStorageCollectionUtils\n schema: T\n}\n\n// Overload for when no schema is provided\n// the type T needs to be passed explicitly unless it can be inferred from the getKey function in the config\nexport function localStorageCollectionOptions<\n T extends object,\n TKey extends string | number = string | number,\n>(\n config: LocalStorageCollectionConfig<T, never, TKey> & {\n schema?: never // prohibit schema\n }\n): CollectionConfig<T, TKey> & {\n id: string\n utils: LocalStorageCollectionUtils\n schema?: never // no schema in the result\n}\n\nexport function localStorageCollectionOptions(\n config: LocalStorageCollectionConfig<any, any, string | number>\n): Omit<CollectionConfig<any, string | number, any>, `id`> & {\n id: string\n utils: LocalStorageCollectionUtils\n schema?: StandardSchemaV1\n} {\n // Validate required parameters\n if (!config.storageKey) {\n throw new StorageKeyRequiredError()\n }\n\n // Default to window.localStorage if no storage is provided\n // Fall back to in-memory storage if localStorage is not available (e.g., server-side rendering)\n const storage =\n config.storage ||\n (typeof window !== `undefined` ? window.localStorage : null) ||\n createInMemoryStorage()\n\n // Default to window for storage events if not provided\n // Fall back to no-op storage event API if window is not available (e.g., server-side rendering)\n const storageEventApi =\n config.storageEventApi ||\n (typeof window !== `undefined` ? window : null) ||\n createNoOpStorageEventApi()\n\n // Track the last known state to detect changes\n const lastKnownData = new Map<string | number, StoredItem<any>>()\n\n // Create the sync configuration\n const sync = createLocalStorageSync<any>(\n config.storageKey,\n storage,\n storageEventApi,\n config.getKey,\n lastKnownData\n )\n\n /**\n * Manual trigger function for local sync updates\n * Forces a check for storage changes and updates the collection if needed\n */\n const triggerLocalSync = () => {\n if (sync.manualTrigger) {\n sync.manualTrigger()\n }\n }\n\n /**\n * Save data to storage\n * @param dataMap - Map of items with version tracking to save to storage\n */\n const saveToStorage = (\n dataMap: Map<string | number, StoredItem<any>>\n ): void => {\n try {\n // Convert Map to object format for storage\n const objectData: Record<string, StoredItem<any>> = {}\n dataMap.forEach((storedItem, key) => {\n objectData[String(key)] = storedItem\n })\n const serialized = JSON.stringify(objectData)\n storage.setItem(config.storageKey, serialized)\n } catch (error) {\n console.error(\n `[LocalStorageCollection] Error saving data to storage key \"${config.storageKey}\":`,\n error\n )\n throw error\n }\n }\n\n /**\n * Removes all collection data from the configured storage\n */\n const clearStorage: ClearStorageFn = (): void => {\n storage.removeItem(config.storageKey)\n }\n\n /**\n * Get the size of the stored data in bytes (approximate)\n * @returns The approximate size in bytes of the stored collection data\n */\n const getStorageSize: GetStorageSizeFn = (): number => {\n const data = storage.getItem(config.storageKey)\n return data ? new Blob([data]).size : 0\n }\n\n /*\n * Create wrapper handlers for direct persistence operations that perform actual storage operations\n * Wraps the user's onInsert handler to also save changes to localStorage\n */\n const wrappedOnInsert = async (params: InsertMutationFnParams<any>) => {\n // Validate that all values in the transaction can be JSON serialized\n params.transaction.mutations.forEach((mutation) => {\n validateJsonSerializable(mutation.modified, `insert`)\n })\n\n // Call the user handler BEFORE persisting changes (if provided)\n let handlerResult: any = {}\n if (config.onInsert) {\n handlerResult = (await config.onInsert(params)) ?? {}\n }\n\n // Always persist to storage\n // Load current data from storage\n const currentData = loadFromStorage<any>(config.storageKey, storage)\n\n // Add new items with version keys\n params.transaction.mutations.forEach((mutation) => {\n const key = config.getKey(mutation.modified)\n const storedItem: StoredItem<any> = {\n versionKey: generateUuid(),\n data: mutation.modified,\n }\n currentData.set(key, storedItem)\n })\n\n // Save to storage\n saveToStorage(currentData)\n\n // Manually trigger local sync since storage events don't fire for current tab\n triggerLocalSync()\n\n return handlerResult\n }\n\n const wrappedOnUpdate = async (params: UpdateMutationFnParams<any>) => {\n // Validate that all values in the transaction can be JSON serialized\n params.transaction.mutations.forEach((mutation) => {\n validateJsonSerializable(mutation.modified, `update`)\n })\n\n // Call the user handler BEFORE persisting changes (if provided)\n let handlerResult: any = {}\n if (config.onUpdate) {\n handlerResult = (await config.onUpdate(params)) ?? {}\n }\n\n // Always persist to storage\n // Load current data from storage\n const currentData = loadFromStorage<any>(config.storageKey, storage)\n\n // Update items with new version keys\n params.transaction.mutations.forEach((mutation) => {\n const key = config.getKey(mutation.modified)\n const storedItem: StoredItem<any> = {\n versionKey: generateUuid(),\n data: mutation.modified,\n }\n currentData.set(key, storedItem)\n })\n\n // Save to storage\n saveToStorage(currentData)\n\n // Manually trigger local sync since storage events don't fire for current tab\n triggerLocalSync()\n\n return handlerResult\n }\n\n const wrappedOnDelete = async (params: DeleteMutationFnParams<any>) => {\n // Call the user handler BEFORE persisting changes (if provided)\n let handlerResult: any = {}\n if (config.onDelete) {\n handlerResult = (await config.onDelete(params)) ?? {}\n }\n\n // Always persist to storage\n // Load current data from storage\n const currentData = loadFromStorage<any>(config.storageKey, storage)\n\n // Remove items\n params.transaction.mutations.forEach((mutation) => {\n // For delete operations, mutation.original contains the full object\n const key = config.getKey(mutation.original)\n currentData.delete(key)\n })\n\n // Save to storage\n saveToStorage(currentData)\n\n // Manually trigger local sync since storage events don't fire for current tab\n triggerLocalSync()\n\n return handlerResult\n }\n\n // Extract standard Collection config properties\n const {\n storageKey: _storageKey,\n storage: _storage,\n storageEventApi: _storageEventApi,\n onInsert: _onInsert,\n onUpdate: _onUpdate,\n onDelete: _onDelete,\n id,\n ...restConfig\n } = config\n\n // Default id to a pattern based on storage key if not provided\n const collectionId = id ?? `local-collection:${config.storageKey}`\n\n /**\n * Accepts mutations from a transaction that belong to this collection and persists them to storage\n */\n const acceptMutations = (transaction: {\n mutations: Array<PendingMutation<Record<string, unknown>>>\n }) => {\n // Filter mutations that belong to this collection\n // Use collection ID for filtering if collection reference isn't available yet\n const collectionMutations = transaction.mutations.filter((m) => {\n // Try to match by collection reference first\n if (sync.collection && m.collection === sync.collection) {\n return true\n }\n // Fall back to matching by collection ID\n return m.collection.id === collectionId\n })\n\n if (collectionMutations.length === 0) {\n return\n }\n\n // Validate all mutations can be serialized before modifying storage\n for (const mutation of collectionMutations) {\n switch (mutation.type) {\n case `insert`:\n case `update`:\n validateJsonSerializable(mutation.modified, mutation.type)\n break\n case `delete`:\n validateJsonSerializable(mutation.original, mutation.type)\n break\n }\n }\n\n // Load current data from storage\n const currentData = loadFromStorage<Record<string, unknown>>(\n config.storageKey,\n storage\n )\n\n // Apply each mutation\n for (const mutation of collectionMutations) {\n // Use the engine's pre-computed key to avoid key derivation issues\n const key = mutation.key\n\n switch (mutation.type) {\n case `insert`:\n case `update`: {\n const storedItem: StoredItem<Record<string, unknown>> = {\n versionKey: generateUuid(),\n data: mutation.modified,\n }\n currentData.set(key, storedItem)\n break\n }\n case `delete`: {\n currentData.delete(key)\n break\n }\n }\n }\n\n // Save to storage\n saveToStorage(currentData)\n\n // Confirm the mutations in the collection to move them from optimistic to synced state\n // This writes them through the sync interface to make them \"synced\" instead of \"optimistic\"\n sync.confirmOperationsSync(collectionMutations)\n }\n\n return {\n ...restConfig,\n id: collectionId,\n sync,\n onInsert: wrappedOnInsert,\n onUpdate: wrappedOnUpdate,\n onDelete: wrappedOnDelete,\n utils: {\n clearStorage,\n getStorageSize,\n acceptMutations,\n },\n }\n}\n\n/**\n * Load data from storage and return as a Map\n * @param storageKey - The key used to store data in the storage API\n * @param storage - The storage API to load from (localStorage, sessionStorage, etc.)\n * @returns Map of stored items with version tracking, or empty Map if loading fails\n */\nfunction loadFromStorage<T extends object>(\n storageKey: string,\n storage: StorageApi\n): Map<string | number, StoredItem<T>> {\n try {\n const rawData = storage.getItem(storageKey)\n if (!rawData) {\n return new Map()\n }\n\n const parsed = JSON.parse(rawData)\n const dataMap = new Map<string | number, StoredItem<T>>()\n\n // Handle object format where keys map to StoredItem values\n if (\n typeof parsed === `object` &&\n parsed !== null &&\n !Array.isArray(parsed)\n ) {\n Object.entries(parsed).forEach(([key, value]) => {\n // Runtime check to ensure the value has the expected StoredItem structure\n if (\n value &&\n typeof value === `object` &&\n `versionKey` in value &&\n `data` in value\n ) {\n const storedItem = value as StoredItem<T>\n dataMap.set(key, storedItem)\n } else {\n throw new InvalidStorageDataFormatError(storageKey, key)\n }\n })\n } else {\n throw new InvalidStorageObjectFormatError(storageKey)\n }\n\n return dataMap\n } catch (error) {\n console.warn(\n `[LocalStorageCollection] Error loading data from storage key \"${storageKey}\":`,\n error\n )\n return new Map()\n }\n}\n\n/**\n * Internal function to create localStorage sync configuration\n * Creates a sync configuration that handles localStorage persistence and cross-tab synchronization\n * @param storageKey - The key used for storing data in localStorage\n * @param storage - The storage API to use (localStorage, sessionStorage, etc.)\n * @param storageEventApi - The event API for listening to storage changes\n * @param getKey - Function to extract the key from an item\n * @param lastKnownData - Map tracking the last known state for change detection\n * @returns Sync configuration with manual trigger capability\n */\nfunction createLocalStorageSync<T extends object>(\n storageKey: string,\n storage: StorageApi,\n storageEventApi: StorageEventApi,\n _getKey: (item: T) => string | number,\n lastKnownData: Map<string | number, StoredItem<T>>\n): SyncConfig<T> & {\n manualTrigger?: () => void\n collection: any\n confirmOperationsSync: (mutations: Array<any>) => void\n} {\n let syncParams: Parameters<SyncConfig<T>[`sync`]>[0] | null = null\n let collection: any = null\n\n /**\n * Compare two Maps to find differences using version keys\n * @param oldData - The previous state of stored items\n * @param newData - The current state of stored items\n * @returns Array of changes with type, key, and value information\n */\n const findChanges = (\n oldData: Map<string | number, StoredItem<T>>,\n newData: Map<string | number, StoredItem<T>>\n ): Array<{\n type: `insert` | `update` | `delete`\n key: string | number\n value?: T\n }> => {\n const changes: Array<{\n type: `insert` | `update` | `delete`\n key: string | number\n value?: T\n }> = []\n\n // Check for deletions and updates\n oldData.forEach((oldStoredItem, key) => {\n const newStoredItem = newData.get(key)\n if (!newStoredItem) {\n changes.push({ type: `delete`, key, value: oldStoredItem.data })\n } else if (oldStoredItem.versionKey !== newStoredItem.versionKey) {\n changes.push({ type: `update`, key, value: newStoredItem.data })\n }\n })\n\n // Check for insertions\n newData.forEach((newStoredItem, key) => {\n if (!oldData.has(key)) {\n changes.push({ type: `insert`, key, value: newStoredItem.data })\n }\n })\n\n return changes\n }\n\n /**\n * Process storage changes and update collection\n * Loads new data from storage, compares with last known state, and applies changes\n */\n const processStorageChanges = () => {\n if (!syncParams) return\n\n const { begin, write, commit } = syncParams\n\n // Load the new data\n const newData = loadFromStorage<T>(storageKey, storage)\n\n // Find the specific changes\n const changes = findChanges(lastKnownData, newData)\n\n if (changes.length > 0) {\n begin()\n changes.forEach(({ type, value }) => {\n if (value) {\n validateJsonSerializable(value, type)\n write({ type, value })\n }\n })\n commit()\n\n // Update lastKnownData\n lastKnownData.clear()\n newData.forEach((storedItem, key) => {\n lastKnownData.set(key, storedItem)\n })\n }\n }\n\n const syncConfig: SyncConfig<T> & {\n manualTrigger?: () => void\n collection: any\n } = {\n sync: (params: Parameters<SyncConfig<T>[`sync`]>[0]) => {\n const { begin, write, commit, markReady } = params\n\n // Store sync params and collection for later use\n syncParams = params\n collection = params.collection\n\n // Initial load\n const initialData = loadFromStorage<T>(storageKey, storage)\n if (initialData.size > 0) {\n begin()\n initialData.forEach((storedItem) => {\n validateJsonSerializable(storedItem.data, `load`)\n write({ type: `insert`, value: storedItem.data })\n })\n commit()\n }\n\n // Update lastKnownData\n lastKnownData.clear()\n initialData.forEach((storedItem, key) => {\n lastKnownData.set(key, storedItem)\n })\n\n // Mark collection as ready after initial load\n markReady()\n\n // Listen for storage events from other tabs\n const handleStorageEvent = (event: StorageEvent) => {\n // Only respond to changes to our specific key and from our storage\n if (event.key !== storageKey || event.storageArea !== storage) {\n return\n }\n\n processStorageChanges()\n }\n\n // Add storage event listener for cross-tab sync\n storageEventApi.addEventListener(`storage`, handleStorageEvent)\n\n // Note: Cleanup is handled automatically by the collection when it's disposed\n },\n\n /**\n * Get sync metadata - returns storage key information\n * @returns Object containing storage key and storage type metadata\n */\n getSyncMetadata: () => ({\n storageKey,\n storageType:\n storage === (typeof window !== `undefined` ? window.localStorage : null)\n ? `localStorage`\n : `custom`,\n }),\n\n // Manual trigger function for local updates\n manualTrigger: processStorageChanges,\n\n // Collection instance reference\n collection,\n }\n\n /**\n * Confirms mutations by writing them through the sync interface\n * This moves mutations from optimistic to synced state\n * @param mutations - Array of mutation objects to confirm\n */\n const confirmOperationsSync = (mutations: Array<any>) => {\n if (!syncParams) {\n // Sync not initialized yet, mutations will be handled on next sync\n return\n }\n\n const { begin, write, commit } = syncParams\n\n // Write the mutations through sync to confirm them\n begin()\n mutations.forEach((mutation: any) => {\n write({\n type: mutation.type,\n value:\n mutation.type === `delete` ? mutation.original : mutation.modified,\n })\n })\n commit()\n }\n\n return {\n ...syncConfig,\n confirmOperationsSync,\n }\n}\n"],"names":[],"mappings":";AAuHA,SAAS,yBAAyB,OAAY,WAAyB;AACrE,MAAI;AACF,SAAK,UAAU,KAAK;AAAA,EACtB,SAAS,OAAO;AACd,UAAM,IAAI;AAAA,MACR;AAAA,MACA,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAAA,IAAA;AAAA,EAEzD;AACF;AAMA,SAAS,eAAuB;AAC9B,SAAO,OAAO,WAAA;AAChB;AAOA,SAAS,wBAAoC;AAC3C,QAAM,8BAAc,IAAA;AAEpB,SAAO;AAAA,IACL,QAAQ,KAA4B;AAClC,aAAO,QAAQ,IAAI,GAAG,KAAK;AAAA,IAC7B;AAAA,IACA,QAAQ,KAAa,OAAqB;AACxC,cAAQ,IAAI,KAAK,KAAK;AAAA,IACxB;AAAA,IACA,WAAW,KAAmB;AAC5B,cAAQ,OAAO,GAAG;AAAA,IACpB;AAAA,EAAA;AAEJ;AAQA,SAAS,4BAA6C;AACpD,SAAO;AAAA,IACL,kBAAkB,MAAM;AAAA,IAExB;AAAA,IACA,qBAAqB,MAAM;AAAA,IAE3B;AAAA,EAAA;AAEJ;AAoHO,SAAS,8BACd,QAKA;AAEA,MAAI,CAAC,OAAO,YAAY;AACtB,UAAM,IAAI,wBAAA;AAAA,EACZ;AAIA,QAAM,UACJ,OAAO,YACN,OAAO,WAAW,cAAc,OAAO,eAAe,SACvD,sBAAA;AAIF,QAAM,kBACJ,OAAO,oBACN,OAAO,WAAW,cAAc,SAAS,SAC1C,0BAAA;AAGF,QAAM,oCAAoB,IAAA;AAG1B,QAAM,OAAO;AAAA,IACX,OAAO;AAAA,IACP;AAAA,IACA;AAAA,IACA,OAAO;AAAA,IACP;AAAA,EAAA;AAOF,QAAM,mBAAmB,MAAM;AAC7B,QAAI,KAAK,eAAe;AACtB,WAAK,cAAA;AAAA,IACP;AAAA,EACF;AAMA,QAAM,gBAAgB,CACpB,YACS;AACT,QAAI;AAEF,YAAM,aAA8C,CAAA;AACpD,cAAQ,QAAQ,CAAC,YAAY,QAAQ;AACnC,mBAAW,OAAO,GAAG,CAAC,IAAI;AAAA,MAC5B,CAAC;AACD,YAAM,aAAa,KAAK,UAAU,UAAU;AAC5C,cAAQ,QAAQ,OAAO,YAAY,UAAU;AAAA,IAC/C,SAAS,OAAO;AACd,cAAQ;AAAA,QACN,8DAA8D,OAAO,UAAU;AAAA,QAC/E;AAAA,MAAA;AAEF,YAAM;AAAA,IACR;AAAA,EACF;AAKA,QAAM,eAA+B,MAAY;AAC/C,YAAQ,WAAW,OAAO,UAAU;AAAA,EACtC;AAMA,QAAM,iBAAmC,MAAc;AACrD,UAAM,OAAO,QAAQ,QAAQ,OAAO,UAAU;AAC9C,WAAO,OAAO,IAAI,KAAK,CAAC,IAAI,CAAC,EAAE,OAAO;AAAA,EACxC;AAMA,QAAM,kBAAkB,OAAO,WAAwC;AAErE,WAAO,YAAY,UAAU,QAAQ,CAAC,aAAa;AACjD,+BAAyB,SAAS,UAAU,QAAQ;AAAA,IACtD,CAAC;AAGD,QAAI,gBAAqB,CAAA;AACzB,QAAI,OAAO,UAAU;AACnB,sBAAiB,MAAM,OAAO,SAAS,MAAM,KAAM,CAAA;AAAA,IACrD;AAIA,UAAM,cAAc,gBAAqB,OAAO,YAAY,OAAO;AAGnE,WAAO,YAAY,UAAU,QAAQ,CAAC,aAAa;AACjD,YAAM,MAAM,OAAO,OAAO,SAAS,QAAQ;AAC3C,YAAM,aAA8B;AAAA,QAClC,YAAY,aAAA;AAAA,QACZ,MAAM,SAAS;AAAA,MAAA;AAEjB,kBAAY,IAAI,KAAK,UAAU;AAAA,IACjC,CAAC;AAGD,kBAAc,WAAW;AAGzB,qBAAA;AAEA,WAAO;AAAA,EACT;AAEA,QAAM,kBAAkB,OAAO,WAAwC;AAErE,WAAO,YAAY,UAAU,QAAQ,CAAC,aAAa;AACjD,+BAAyB,SAAS,UAAU,QAAQ;AAAA,IACtD,CAAC;AAGD,QAAI,gBAAqB,CAAA;AACzB,QAAI,OAAO,UAAU;AACnB,sBAAiB,MAAM,OAAO,SAAS,MAAM,KAAM,CAAA;AAAA,IACrD;AAIA,UAAM,cAAc,gBAAqB,OAAO,YAAY,OAAO;AAGnE,WAAO,YAAY,UAAU,QAAQ,CAAC,aAAa;AACjD,YAAM,MAAM,OAAO,OAAO,SAAS,QAAQ;AAC3C,YAAM,aAA8B;AAAA,QAClC,YAAY,aAAA;AAAA,QACZ,MAAM,SAAS;AAAA,MAAA;AAEjB,kBAAY,IAAI,KAAK,UAAU;AAAA,IACjC,CAAC;AAGD,kBAAc,WAAW;AAGzB,qBAAA;AAEA,WAAO;AAAA,EACT;AAEA,QAAM,kBAAkB,OAAO,WAAwC;AAErE,QAAI,gBAAqB,CAAA;AACzB,QAAI,OAAO,UAAU;AACnB,sBAAiB,MAAM,OAAO,SAAS,MAAM,KAAM,CAAA;AAAA,IACrD;AAIA,UAAM,cAAc,gBAAqB,OAAO,YAAY,OAAO;AAGnE,WAAO,YAAY,UAAU,QAAQ,CAAC,aAAa;AAEjD,YAAM,MAAM,OAAO,OAAO,SAAS,QAAQ;AAC3C,kBAAY,OAAO,GAAG;AAAA,IACxB,CAAC;AAGD,kBAAc,WAAW;AAGzB,qBAAA;AAEA,WAAO;AAAA,EACT;AAGA,QAAM;AAAA,IACJ,YAAY;AAAA,IACZ,SAAS;AAAA,IACT,iBAAiB;AAAA,IACjB,UAAU;AAAA,IACV,UAAU;AAAA,IACV,UAAU;AAAA,IACV;AAAA,IACA,GAAG;AAAA,EAAA,IACD;AAGJ,QAAM,eAAe,MAAM,oBAAoB,OAAO,UAAU;AAKhE,QAAM,kBAAkB,CAAC,gBAEnB;AAGJ,UAAM,sBAAsB,YAAY,UAAU,OAAO,CAAC,MAAM;AAE9D,UAAI,KAAK,cAAc,EAAE,eAAe,KAAK,YAAY;AACvD,eAAO;AAAA,MACT;AAEA,aAAO,EAAE,WAAW,OAAO;AAAA,IAC7B,CAAC;AAED,QAAI,oBAAoB,WAAW,GAAG;AACpC;AAAA,IACF;AAGA,eAAW,YAAY,qBAAqB;AAC1C,cAAQ,SAAS,MAAA;AAAA,QACf,KAAK;AAAA,QACL,KAAK;AACH,mCAAyB,SAAS,UAAU,SAAS,IAAI;AACzD;AAAA,QACF,KAAK;AACH,mCAAyB,SAAS,UAAU,SAAS,IAAI;AACzD;AAAA,MAAA;AAAA,IAEN;AAGA,UAAM,cAAc;AAAA,MAClB,OAAO;AAAA,MACP;AAAA,IAAA;AAIF,eAAW,YAAY,qBAAqB;AAE1C,YAAM,MAAM,SAAS;AAErB,cAAQ,SAAS,MAAA;AAAA,QACf,KAAK;AAAA,QACL,KAAK,UAAU;AACb,gBAAM,aAAkD;AAAA,YACtD,YAAY,aAAA;AAAA,YACZ,MAAM,SAAS;AAAA,UAAA;AAEjB,sBAAY,IAAI,KAAK,UAAU;AAC/B;AAAA,QACF;AAAA,QACA,KAAK,UAAU;AACb,sBAAY,OAAO,GAAG;AACtB;AAAA,QACF;AAAA,MAAA;AAAA,IAEJ;AAGA,kBAAc,WAAW;AAIzB,SAAK,sBAAsB,mBAAmB;AAAA,EAChD;AAEA,SAAO;AAAA,IACL,GAAG;AAAA,IACH,IAAI;AAAA,IACJ;AAAA,IACA,UAAU;AAAA,IACV,UAAU;AAAA,IACV,UAAU;AAAA,IACV,OAAO;AAAA,MACL;AAAA,MACA;AAAA,MACA;AAAA,IAAA;AAAA,EACF;AAEJ;AAQA,SAAS,gBACP,YACA,SACqC;AACrC,MAAI;AACF,UAAM,UAAU,QAAQ,QAAQ,UAAU;AAC1C,QAAI,CAAC,SAAS;AACZ,iCAAW,IAAA;AAAA,IACb;AAEA,UAAM,SAAS,KAAK,MAAM,OAAO;AACjC,UAAM,8BAAc,IAAA;AAGpB,QACE,OAAO,WAAW,YAClB,WAAW,QACX,CAAC,MAAM,QAAQ,MAAM,GACrB;AACA,aAAO,QAAQ,MAAM,EAAE,QAAQ,CAAC,CAAC,KAAK,KAAK,MAAM;AAE/C,YACE,SACA,OAAO,UAAU,YACjB,gBAAgB,SAChB,UAAU,OACV;AACA,gBAAM,aAAa;AACnB,kBAAQ,IAAI,KAAK,UAAU;AAAA,QAC7B,OAAO;AACL,gBAAM,IAAI,8BAA8B,YAAY,GAAG;AAAA,QACzD;AAAA,MACF,CAAC;AAAA,IACH,OAAO;AACL,YAAM,IAAI,gCAAgC,UAAU;AAAA,IACtD;AAEA,WAAO;AAAA,EACT,SAAS,OAAO;AACd,YAAQ;AAAA,MACN,iEAAiE,UAAU;AAAA,MAC3E;AAAA,IAAA;AAEF,+BAAW,IAAA;AAAA,EACb;AACF;AAYA,SAAS,uBACP,YACA,SACA,iBACA,SACA,eAKA;AACA,MAAI,aAA0D;AAC9D,MAAI,aAAkB;AAQtB,QAAM,cAAc,CAClB,SACA,YAKI;AACJ,UAAM,UAID,CAAA;AAGL,YAAQ,QAAQ,CAAC,eAAe,QAAQ;AACtC,YAAM,gBAAgB,QAAQ,IAAI,GAAG;AACrC,UAAI,CAAC,eAAe;AAClB,gBAAQ,KAAK,EAAE,MAAM,UAAU,KAAK,OAAO,cAAc,MAAM;AAAA,MACjE,WAAW,cAAc,eAAe,cAAc,YAAY;AAChE,gBAAQ,KAAK,EAAE,MAAM,UAAU,KAAK,OAAO,cAAc,MAAM;AAAA,MACjE;AAAA,IACF,CAAC;AAGD,YAAQ,QAAQ,CAAC,eAAe,QAAQ;AACtC,UAAI,CAAC,QAAQ,IAAI,GAAG,GAAG;AACrB,gBAAQ,KAAK,EAAE,MAAM,UAAU,KAAK,OAAO,cAAc,MAAM;AAAA,MACjE;AAAA,IACF,CAAC;AAED,WAAO;AAAA,EACT;AAMA,QAAM,wBAAwB,MAAM;AAClC,QAAI,CAAC,WAAY;AAEjB,UAAM,EAAE,OAAO,OAAO,OAAA,IAAW;AAGjC,UAAM,UAAU,gBAAmB,YAAY,OAAO;AAGtD,UAAM,UAAU,YAAY,eAAe,OAAO;AAElD,QAAI,QAAQ,SAAS,GAAG;AACtB,YAAA;AACA,cAAQ,QAAQ,CAAC,EAAE,MAAM,YAAY;AACnC,YAAI,OAAO;AACT,mCAAyB,OAAO,IAAI;AACpC,gBAAM,EAAE,MAAM,OAAO;AAAA,QACvB;AAAA,MACF,CAAC;AACD,aAAA;AAGA,oBAAc,MAAA;AACd,cAAQ,QAAQ,CAAC,YAAY,QAAQ;AACnC,sBAAc,IAAI,KAAK,UAAU;AAAA,MACnC,CAAC;AAAA,IACH;AAAA,EACF;AAEA,QAAM,aAGF;AAAA,IACF,MAAM,CAAC,WAAiD;AACtD,YAAM,EAAE,OAAO,OAAO,QAAQ,cAAc;AAG5C,mBAAa;AACb,mBAAa,OAAO;AAGpB,YAAM,cAAc,gBAAmB,YAAY,OAAO;AAC1D,UAAI,YAAY,OAAO,GAAG;AACxB,cAAA;AACA,oBAAY,QAAQ,CAAC,eAAe;AAClC,mCAAyB,WAAW,MAAM,MAAM;AAChD,gBAAM,EAAE,MAAM,UAAU,OAAO,WAAW,MAAM;AAAA,QAClD,CAAC;AACD,eAAA;AAAA,MACF;AAGA,oBAAc,MAAA;AACd,kBAAY,QAAQ,CAAC,YAAY,QAAQ;AACvC,sBAAc,IAAI,KAAK,UAAU;AAAA,MACnC,CAAC;AAGD,gBAAA;AAGA,YAAM,qBAAqB,CAAC,UAAwB;AAElD,YAAI,MAAM,QAAQ,cAAc,MAAM,gBAAgB,SAAS;AAC7D;AAAA,QACF;AAEA,8BAAA;AAAA,MACF;AAGA,sBAAgB,iBAAiB,WAAW,kBAAkB;AAAA,IAGhE;AAAA;AAAA;AAAA;AAAA;AAAA,IAMA,iBAAiB,OAAO;AAAA,MACtB;AAAA,MACA,aACE,aAAa,OAAO,WAAW,cAAc,OAAO,eAAe,QAC/D,iBACA;AAAA,IAAA;AAAA;AAAA,IAIR,eAAe;AAAA;AAAA,IAGf;AAAA,EAAA;AAQF,QAAM,wBAAwB,CAAC,cAA0B;AACvD,QAAI,CAAC,YAAY;AAEf;AAAA,IACF;AAEA,UAAM,EAAE,OAAO,OAAO,OAAA,IAAW;AAGjC,UAAA;AACA,cAAU,QAAQ,CAAC,aAAkB;AACnC,YAAM;AAAA,QACJ,MAAM,SAAS;AAAA,QACf,OACE,SAAS,SAAS,WAAW,SAAS,WAAW,SAAS;AAAA,MAAA,CAC7D;AAAA,IACH,CAAC;AACD,WAAA;AAAA,EACF;AAEA,SAAO;AAAA,IACL,GAAG;AAAA,IACH;AAAA,EAAA;AAEJ;"}
|
|
1
|
+
{"version":3,"file":"local-storage.js","sources":["../../src/local-storage.ts"],"sourcesContent":["import {\n InvalidStorageDataFormatError,\n InvalidStorageObjectFormatError,\n SerializationError,\n StorageKeyRequiredError,\n} from \"./errors\"\nimport type {\n BaseCollectionConfig,\n CollectionConfig,\n DeleteMutationFnParams,\n InferSchemaOutput,\n InsertMutationFnParams,\n PendingMutation,\n SyncConfig,\n UpdateMutationFnParams,\n UtilsRecord,\n} from \"./types\"\nimport type { StandardSchemaV1 } from \"@standard-schema/spec\"\n\n/**\n * Storage API interface - subset of DOM Storage that we need\n */\nexport type StorageApi = Pick<Storage, `getItem` | `setItem` | `removeItem`>\n\n/**\n * Storage event API - subset of Window for 'storage' events only\n */\nexport type StorageEventApi = {\n addEventListener: (\n type: `storage`,\n listener: (event: StorageEvent) => void\n ) => void\n removeEventListener: (\n type: `storage`,\n listener: (event: StorageEvent) => void\n ) => void\n}\n\n/**\n * Internal storage format that includes version tracking\n */\ninterface StoredItem<T> {\n versionKey: string\n data: T\n}\n\nexport interface Parser {\n parse: (data: string) => unknown\n stringify: (data: unknown) => string\n}\n\n/**\n * Configuration interface for localStorage collection options\n * @template T - The type of items in the collection\n * @template TSchema - The schema type for validation\n * @template TKey - The type of the key returned by `getKey`\n */\nexport interface LocalStorageCollectionConfig<\n T extends object = object,\n TSchema extends StandardSchemaV1 = never,\n TKey extends string | number = string | number,\n> extends BaseCollectionConfig<T, TKey, TSchema> {\n /**\n * The key to use for storing the collection data in localStorage/sessionStorage\n */\n storageKey: string\n\n /**\n * Storage API to use (defaults to window.localStorage)\n * Can be any object that implements the Storage interface (e.g., sessionStorage)\n */\n storage?: StorageApi\n\n /**\n * Storage event API to use for cross-tab synchronization (defaults to window)\n * Can be any object that implements addEventListener/removeEventListener for storage events\n */\n storageEventApi?: StorageEventApi\n\n /**\n * Parser to use for serializing and deserializing data to and from storage\n * Defaults to JSON\n */\n parser?: Parser\n}\n\n/**\n * Type for the clear utility function\n */\nexport type ClearStorageFn = () => void\n\n/**\n * Type for the getStorageSize utility function\n */\nexport type GetStorageSizeFn = () => number\n\n/**\n * LocalStorage collection utilities type\n */\nexport interface LocalStorageCollectionUtils extends UtilsRecord {\n clearStorage: ClearStorageFn\n getStorageSize: GetStorageSizeFn\n /**\n * Accepts mutations from a transaction that belong to this collection and persists them to localStorage.\n * This should be called in your transaction's mutationFn to persist local-storage data.\n *\n * @param transaction - The transaction containing mutations to accept\n * @example\n * const localSettings = createCollection(localStorageCollectionOptions({...}))\n *\n * const tx = createTransaction({\n * mutationFn: async ({ transaction }) => {\n * // Make API call first\n * await api.save(...)\n * // Then persist local-storage mutations after success\n * localSettings.utils.acceptMutations(transaction)\n * }\n * })\n */\n acceptMutations: (transaction: {\n mutations: Array<PendingMutation<Record<string, unknown>>>\n }) => void\n}\n\n/**\n * Validates that a value can be JSON serialized\n * @param parser - The parser to use for serialization\n * @param value - The value to validate for JSON serialization\n * @param operation - The operation type being performed (for error messages)\n * @throws Error if the value cannot be JSON serialized\n */\nfunction validateJsonSerializable(\n parser: Parser,\n value: any,\n operation: string\n): void {\n try {\n parser.stringify(value)\n } catch (error) {\n throw new SerializationError(\n operation,\n error instanceof Error ? error.message : String(error)\n )\n }\n}\n\n/**\n * Generate a UUID for version tracking\n * @returns A unique identifier string for tracking data versions\n */\nfunction generateUuid(): string {\n return crypto.randomUUID()\n}\n\n/**\n * Creates an in-memory storage implementation that mimics the StorageApi interface\n * Used as a fallback when localStorage is not available (e.g., server-side rendering)\n * @returns An object implementing the StorageApi interface using an in-memory Map\n */\nfunction createInMemoryStorage(): StorageApi {\n const storage = new Map<string, string>()\n\n return {\n getItem(key: string): string | null {\n return storage.get(key) ?? null\n },\n setItem(key: string, value: string): void {\n storage.set(key, value)\n },\n removeItem(key: string): void {\n storage.delete(key)\n },\n }\n}\n\n/**\n * Creates a no-op storage event API for environments without window (e.g., server-side)\n * This provides the required interface but doesn't actually listen to any events\n * since cross-tab synchronization is not possible in server environments\n * @returns An object implementing the StorageEventApi interface with no-op methods\n */\nfunction createNoOpStorageEventApi(): StorageEventApi {\n return {\n addEventListener: () => {\n // No-op: cannot listen to storage events without window\n },\n removeEventListener: () => {\n // No-op: cannot remove listeners without window\n },\n }\n}\n\n/**\n * Creates localStorage collection options for use with a standard Collection\n *\n * This function creates a collection that persists data to localStorage/sessionStorage\n * and synchronizes changes across browser tabs using storage events.\n *\n * **Fallback Behavior:**\n *\n * When localStorage is not available (e.g., in server-side rendering environments),\n * this function automatically falls back to an in-memory storage implementation.\n * This prevents errors during module initialization and allows the collection to\n * work in any environment, though data will not persist across page reloads or\n * be shared across tabs when using the in-memory fallback.\n *\n * **Using with Manual Transactions:**\n *\n * For manual transactions, you must call `utils.acceptMutations()` in your transaction's `mutationFn`\n * to persist changes made during `tx.mutate()`. This is necessary because local-storage collections\n * don't participate in the standard mutation handler flow for manual transactions.\n *\n * @template TExplicit - The explicit type of items in the collection (highest priority)\n * @template TSchema - The schema type for validation and type inference (second priority)\n * @template TFallback - The fallback type if no explicit or schema type is provided\n * @param config - Configuration options for the localStorage collection\n * @returns Collection options with utilities including clearStorage, getStorageSize, and acceptMutations\n *\n * @example\n * // Basic localStorage collection\n * const collection = createCollection(\n * localStorageCollectionOptions({\n * storageKey: 'todos',\n * getKey: (item) => item.id,\n * })\n * )\n *\n * @example\n * // localStorage collection with custom storage\n * const collection = createCollection(\n * localStorageCollectionOptions({\n * storageKey: 'todos',\n * storage: window.sessionStorage, // Use sessionStorage instead\n * getKey: (item) => item.id,\n * })\n * )\n *\n * @example\n * // localStorage collection with mutation handlers\n * const collection = createCollection(\n * localStorageCollectionOptions({\n * storageKey: 'todos',\n * getKey: (item) => item.id,\n * onInsert: async ({ transaction }) => {\n * console.log('Item inserted:', transaction.mutations[0].modified)\n * },\n * })\n * )\n *\n * @example\n * // Using with manual transactions\n * const localSettings = createCollection(\n * localStorageCollectionOptions({\n * storageKey: 'user-settings',\n * getKey: (item) => item.id,\n * })\n * )\n *\n * const tx = createTransaction({\n * mutationFn: async ({ transaction }) => {\n * // Use settings data in API call\n * const settingsMutations = transaction.mutations.filter(m => m.collection === localSettings)\n * await api.updateUserProfile({ settings: settingsMutations[0]?.modified })\n *\n * // Persist local-storage mutations after API success\n * localSettings.utils.acceptMutations(transaction)\n * }\n * })\n *\n * tx.mutate(() => {\n * localSettings.insert({ id: 'theme', value: 'dark' })\n * apiCollection.insert({ id: 2, data: 'profile data' })\n * })\n *\n * await tx.commit()\n */\n\n// Overload for when schema is provided\nexport function localStorageCollectionOptions<\n T extends StandardSchemaV1,\n TKey extends string | number = string | number,\n>(\n config: LocalStorageCollectionConfig<InferSchemaOutput<T>, T, TKey> & {\n schema: T\n }\n): CollectionConfig<InferSchemaOutput<T>, TKey, T> & {\n id: string\n utils: LocalStorageCollectionUtils\n schema: T\n}\n\n// Overload for when no schema is provided\n// the type T needs to be passed explicitly unless it can be inferred from the getKey function in the config\nexport function localStorageCollectionOptions<\n T extends object,\n TKey extends string | number = string | number,\n>(\n config: LocalStorageCollectionConfig<T, never, TKey> & {\n schema?: never // prohibit schema\n }\n): CollectionConfig<T, TKey> & {\n id: string\n utils: LocalStorageCollectionUtils\n schema?: never // no schema in the result\n}\n\nexport function localStorageCollectionOptions(\n config: LocalStorageCollectionConfig<any, any, string | number>\n): Omit<CollectionConfig<any, string | number, any>, `id`> & {\n id: string\n utils: LocalStorageCollectionUtils\n schema?: StandardSchemaV1\n} {\n // Validate required parameters\n if (!config.storageKey) {\n throw new StorageKeyRequiredError()\n }\n\n // Default to window.localStorage if no storage is provided\n // Fall back to in-memory storage if localStorage is not available (e.g., server-side rendering)\n const storage =\n config.storage ||\n (typeof window !== `undefined` ? window.localStorage : null) ||\n createInMemoryStorage()\n\n // Default to window for storage events if not provided\n // Fall back to no-op storage event API if window is not available (e.g., server-side rendering)\n const storageEventApi =\n config.storageEventApi ||\n (typeof window !== `undefined` ? window : null) ||\n createNoOpStorageEventApi()\n\n // Default to JSON parser if no parser is provided\n const parser = config.parser || JSON\n\n // Track the last known state to detect changes\n const lastKnownData = new Map<string | number, StoredItem<any>>()\n\n // Create the sync configuration\n const sync = createLocalStorageSync<any>(\n config.storageKey,\n storage,\n storageEventApi,\n parser,\n config.getKey,\n lastKnownData\n )\n\n /**\n * Manual trigger function for local sync updates\n * Forces a check for storage changes and updates the collection if needed\n */\n const triggerLocalSync = () => {\n if (sync.manualTrigger) {\n sync.manualTrigger()\n }\n }\n\n /**\n * Save data to storage\n * @param dataMap - Map of items with version tracking to save to storage\n */\n const saveToStorage = (\n dataMap: Map<string | number, StoredItem<any>>\n ): void => {\n try {\n // Convert Map to object format for storage\n const objectData: Record<string, StoredItem<any>> = {}\n dataMap.forEach((storedItem, key) => {\n objectData[String(key)] = storedItem\n })\n const serialized = parser.stringify(objectData)\n storage.setItem(config.storageKey, serialized)\n } catch (error) {\n console.error(\n `[LocalStorageCollection] Error saving data to storage key \"${config.storageKey}\":`,\n error\n )\n throw error\n }\n }\n\n /**\n * Removes all collection data from the configured storage\n */\n const clearStorage: ClearStorageFn = (): void => {\n storage.removeItem(config.storageKey)\n }\n\n /**\n * Get the size of the stored data in bytes (approximate)\n * @returns The approximate size in bytes of the stored collection data\n */\n const getStorageSize: GetStorageSizeFn = (): number => {\n const data = storage.getItem(config.storageKey)\n return data ? new Blob([data]).size : 0\n }\n\n /*\n * Create wrapper handlers for direct persistence operations that perform actual storage operations\n * Wraps the user's onInsert handler to also save changes to localStorage\n */\n const wrappedOnInsert = async (params: InsertMutationFnParams<any>) => {\n // Validate that all values in the transaction can be JSON serialized\n params.transaction.mutations.forEach((mutation) => {\n validateJsonSerializable(parser, mutation.modified, `insert`)\n })\n\n // Call the user handler BEFORE persisting changes (if provided)\n let handlerResult: any = {}\n if (config.onInsert) {\n handlerResult = (await config.onInsert(params)) ?? {}\n }\n\n // Always persist to storage\n // Load current data from storage\n const currentData = loadFromStorage<any>(config.storageKey, storage, parser)\n\n // Add new items with version keys\n params.transaction.mutations.forEach((mutation) => {\n const key = config.getKey(mutation.modified)\n const storedItem: StoredItem<any> = {\n versionKey: generateUuid(),\n data: mutation.modified,\n }\n currentData.set(key, storedItem)\n })\n\n // Save to storage\n saveToStorage(currentData)\n\n // Manually trigger local sync since storage events don't fire for current tab\n triggerLocalSync()\n\n return handlerResult\n }\n\n const wrappedOnUpdate = async (params: UpdateMutationFnParams<any>) => {\n // Validate that all values in the transaction can be JSON serialized\n params.transaction.mutations.forEach((mutation) => {\n validateJsonSerializable(parser, mutation.modified, `update`)\n })\n\n // Call the user handler BEFORE persisting changes (if provided)\n let handlerResult: any = {}\n if (config.onUpdate) {\n handlerResult = (await config.onUpdate(params)) ?? {}\n }\n\n // Always persist to storage\n // Load current data from storage\n const currentData = loadFromStorage<any>(config.storageKey, storage, parser)\n\n // Update items with new version keys\n params.transaction.mutations.forEach((mutation) => {\n const key = config.getKey(mutation.modified)\n const storedItem: StoredItem<any> = {\n versionKey: generateUuid(),\n data: mutation.modified,\n }\n currentData.set(key, storedItem)\n })\n\n // Save to storage\n saveToStorage(currentData)\n\n // Manually trigger local sync since storage events don't fire for current tab\n triggerLocalSync()\n\n return handlerResult\n }\n\n const wrappedOnDelete = async (params: DeleteMutationFnParams<any>) => {\n // Call the user handler BEFORE persisting changes (if provided)\n let handlerResult: any = {}\n if (config.onDelete) {\n handlerResult = (await config.onDelete(params)) ?? {}\n }\n\n // Always persist to storage\n // Load current data from storage\n const currentData = loadFromStorage<any>(config.storageKey, storage, parser)\n\n // Remove items\n params.transaction.mutations.forEach((mutation) => {\n // For delete operations, mutation.original contains the full object\n const key = config.getKey(mutation.original)\n currentData.delete(key)\n })\n\n // Save to storage\n saveToStorage(currentData)\n\n // Manually trigger local sync since storage events don't fire for current tab\n triggerLocalSync()\n\n return handlerResult\n }\n\n // Extract standard Collection config properties\n const {\n storageKey: _storageKey,\n storage: _storage,\n storageEventApi: _storageEventApi,\n onInsert: _onInsert,\n onUpdate: _onUpdate,\n onDelete: _onDelete,\n id,\n ...restConfig\n } = config\n\n // Default id to a pattern based on storage key if not provided\n const collectionId = id ?? `local-collection:${config.storageKey}`\n\n /**\n * Accepts mutations from a transaction that belong to this collection and persists them to storage\n */\n const acceptMutations = (transaction: {\n mutations: Array<PendingMutation<Record<string, unknown>>>\n }) => {\n // Filter mutations that belong to this collection\n // Use collection ID for filtering if collection reference isn't available yet\n const collectionMutations = transaction.mutations.filter((m) => {\n // Try to match by collection reference first\n if (sync.collection && m.collection === sync.collection) {\n return true\n }\n // Fall back to matching by collection ID\n return m.collection.id === collectionId\n })\n\n if (collectionMutations.length === 0) {\n return\n }\n\n // Validate all mutations can be serialized before modifying storage\n for (const mutation of collectionMutations) {\n switch (mutation.type) {\n case `insert`:\n case `update`:\n validateJsonSerializable(parser, mutation.modified, mutation.type)\n break\n case `delete`:\n validateJsonSerializable(parser, mutation.original, mutation.type)\n break\n }\n }\n\n // Load current data from storage\n const currentData = loadFromStorage<Record<string, unknown>>(\n config.storageKey,\n storage,\n parser\n )\n\n // Apply each mutation\n for (const mutation of collectionMutations) {\n // Use the engine's pre-computed key to avoid key derivation issues\n const key = mutation.key\n\n switch (mutation.type) {\n case `insert`:\n case `update`: {\n const storedItem: StoredItem<Record<string, unknown>> = {\n versionKey: generateUuid(),\n data: mutation.modified,\n }\n currentData.set(key, storedItem)\n break\n }\n case `delete`: {\n currentData.delete(key)\n break\n }\n }\n }\n\n // Save to storage\n saveToStorage(currentData)\n\n // Confirm the mutations in the collection to move them from optimistic to synced state\n // This writes them through the sync interface to make them \"synced\" instead of \"optimistic\"\n sync.confirmOperationsSync(collectionMutations)\n }\n\n return {\n ...restConfig,\n id: collectionId,\n sync,\n onInsert: wrappedOnInsert,\n onUpdate: wrappedOnUpdate,\n onDelete: wrappedOnDelete,\n utils: {\n clearStorage,\n getStorageSize,\n acceptMutations,\n },\n }\n}\n\n/**\n * Load data from storage and return as a Map\n * @param parser - The parser to use for deserializing the data\n * @param storageKey - The key used to store data in the storage API\n * @param storage - The storage API to load from (localStorage, sessionStorage, etc.)\n * @returns Map of stored items with version tracking, or empty Map if loading fails\n */\nfunction loadFromStorage<T extends object>(\n storageKey: string,\n storage: StorageApi,\n parser: Parser\n): Map<string | number, StoredItem<T>> {\n try {\n const rawData = storage.getItem(storageKey)\n if (!rawData) {\n return new Map()\n }\n\n const parsed = parser.parse(rawData)\n const dataMap = new Map<string | number, StoredItem<T>>()\n\n // Handle object format where keys map to StoredItem values\n if (\n typeof parsed === `object` &&\n parsed !== null &&\n !Array.isArray(parsed)\n ) {\n Object.entries(parsed).forEach(([key, value]) => {\n // Runtime check to ensure the value has the expected StoredItem structure\n if (\n value &&\n typeof value === `object` &&\n `versionKey` in value &&\n `data` in value\n ) {\n const storedItem = value as StoredItem<T>\n dataMap.set(key, storedItem)\n } else {\n throw new InvalidStorageDataFormatError(storageKey, key)\n }\n })\n } else {\n throw new InvalidStorageObjectFormatError(storageKey)\n }\n\n return dataMap\n } catch (error) {\n console.warn(\n `[LocalStorageCollection] Error loading data from storage key \"${storageKey}\":`,\n error\n )\n return new Map()\n }\n}\n\n/**\n * Internal function to create localStorage sync configuration\n * Creates a sync configuration that handles localStorage persistence and cross-tab synchronization\n * @param storageKey - The key used for storing data in localStorage\n * @param storage - The storage API to use (localStorage, sessionStorage, etc.)\n * @param storageEventApi - The event API for listening to storage changes\n * @param getKey - Function to extract the key from an item\n * @param lastKnownData - Map tracking the last known state for change detection\n * @returns Sync configuration with manual trigger capability\n */\nfunction createLocalStorageSync<T extends object>(\n storageKey: string,\n storage: StorageApi,\n storageEventApi: StorageEventApi,\n parser: Parser,\n _getKey: (item: T) => string | number,\n lastKnownData: Map<string | number, StoredItem<T>>\n): SyncConfig<T> & {\n manualTrigger?: () => void\n collection: any\n confirmOperationsSync: (mutations: Array<any>) => void\n} {\n let syncParams: Parameters<SyncConfig<T>[`sync`]>[0] | null = null\n let collection: any = null\n\n /**\n * Compare two Maps to find differences using version keys\n * @param oldData - The previous state of stored items\n * @param newData - The current state of stored items\n * @returns Array of changes with type, key, and value information\n */\n const findChanges = (\n oldData: Map<string | number, StoredItem<T>>,\n newData: Map<string | number, StoredItem<T>>\n ): Array<{\n type: `insert` | `update` | `delete`\n key: string | number\n value?: T\n }> => {\n const changes: Array<{\n type: `insert` | `update` | `delete`\n key: string | number\n value?: T\n }> = []\n\n // Check for deletions and updates\n oldData.forEach((oldStoredItem, key) => {\n const newStoredItem = newData.get(key)\n if (!newStoredItem) {\n changes.push({ type: `delete`, key, value: oldStoredItem.data })\n } else if (oldStoredItem.versionKey !== newStoredItem.versionKey) {\n changes.push({ type: `update`, key, value: newStoredItem.data })\n }\n })\n\n // Check for insertions\n newData.forEach((newStoredItem, key) => {\n if (!oldData.has(key)) {\n changes.push({ type: `insert`, key, value: newStoredItem.data })\n }\n })\n\n return changes\n }\n\n /**\n * Process storage changes and update collection\n * Loads new data from storage, compares with last known state, and applies changes\n */\n const processStorageChanges = () => {\n if (!syncParams) return\n\n const { begin, write, commit } = syncParams\n\n // Load the new data\n const newData = loadFromStorage<T>(storageKey, storage, parser)\n\n // Find the specific changes\n const changes = findChanges(lastKnownData, newData)\n\n if (changes.length > 0) {\n begin()\n changes.forEach(({ type, value }) => {\n if (value) {\n validateJsonSerializable(parser, value, type)\n write({ type, value })\n }\n })\n commit()\n\n // Update lastKnownData\n lastKnownData.clear()\n newData.forEach((storedItem, key) => {\n lastKnownData.set(key, storedItem)\n })\n }\n }\n\n const syncConfig: SyncConfig<T> & {\n manualTrigger?: () => void\n collection: any\n } = {\n sync: (params: Parameters<SyncConfig<T>[`sync`]>[0]) => {\n const { begin, write, commit, markReady } = params\n\n // Store sync params and collection for later use\n syncParams = params\n collection = params.collection\n\n // Initial load\n const initialData = loadFromStorage<T>(storageKey, storage, parser)\n if (initialData.size > 0) {\n begin()\n initialData.forEach((storedItem) => {\n validateJsonSerializable(parser, storedItem.data, `load`)\n write({ type: `insert`, value: storedItem.data })\n })\n commit()\n }\n\n // Update lastKnownData\n lastKnownData.clear()\n initialData.forEach((storedItem, key) => {\n lastKnownData.set(key, storedItem)\n })\n\n // Mark collection as ready after initial load\n markReady()\n\n // Listen for storage events from other tabs\n const handleStorageEvent = (event: StorageEvent) => {\n // Only respond to changes to our specific key and from our storage\n if (event.key !== storageKey || event.storageArea !== storage) {\n return\n }\n\n processStorageChanges()\n }\n\n // Add storage event listener for cross-tab sync\n storageEventApi.addEventListener(`storage`, handleStorageEvent)\n\n // Note: Cleanup is handled automatically by the collection when it's disposed\n },\n\n /**\n * Get sync metadata - returns storage key information\n * @returns Object containing storage key and storage type metadata\n */\n getSyncMetadata: () => ({\n storageKey,\n storageType:\n storage === (typeof window !== `undefined` ? window.localStorage : null)\n ? `localStorage`\n : `custom`,\n }),\n\n // Manual trigger function for local updates\n manualTrigger: processStorageChanges,\n\n // Collection instance reference\n collection,\n }\n\n /**\n * Confirms mutations by writing them through the sync interface\n * This moves mutations from optimistic to synced state\n * @param mutations - Array of mutation objects to confirm\n */\n const confirmOperationsSync = (mutations: Array<any>) => {\n if (!syncParams) {\n // Sync not initialized yet, mutations will be handled on next sync\n return\n }\n\n const { begin, write, commit } = syncParams\n\n // Write the mutations through sync to confirm them\n begin()\n mutations.forEach((mutation: any) => {\n write({\n type: mutation.type,\n value:\n mutation.type === `delete` ? mutation.original : mutation.modified,\n })\n })\n commit()\n }\n\n return {\n ...syncConfig,\n confirmOperationsSync,\n }\n}\n"],"names":[],"mappings":";AAmIA,SAAS,yBACP,QACA,OACA,WACM;AACN,MAAI;AACF,WAAO,UAAU,KAAK;AAAA,EACxB,SAAS,OAAO;AACd,UAAM,IAAI;AAAA,MACR;AAAA,MACA,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAAA,IAAA;AAAA,EAEzD;AACF;AAMA,SAAS,eAAuB;AAC9B,SAAO,OAAO,WAAA;AAChB;AAOA,SAAS,wBAAoC;AAC3C,QAAM,8BAAc,IAAA;AAEpB,SAAO;AAAA,IACL,QAAQ,KAA4B;AAClC,aAAO,QAAQ,IAAI,GAAG,KAAK;AAAA,IAC7B;AAAA,IACA,QAAQ,KAAa,OAAqB;AACxC,cAAQ,IAAI,KAAK,KAAK;AAAA,IACxB;AAAA,IACA,WAAW,KAAmB;AAC5B,cAAQ,OAAO,GAAG;AAAA,IACpB;AAAA,EAAA;AAEJ;AAQA,SAAS,4BAA6C;AACpD,SAAO;AAAA,IACL,kBAAkB,MAAM;AAAA,IAExB;AAAA,IACA,qBAAqB,MAAM;AAAA,IAE3B;AAAA,EAAA;AAEJ;AAoHO,SAAS,8BACd,QAKA;AAEA,MAAI,CAAC,OAAO,YAAY;AACtB,UAAM,IAAI,wBAAA;AAAA,EACZ;AAIA,QAAM,UACJ,OAAO,YACN,OAAO,WAAW,cAAc,OAAO,eAAe,SACvD,sBAAA;AAIF,QAAM,kBACJ,OAAO,oBACN,OAAO,WAAW,cAAc,SAAS,SAC1C,0BAAA;AAGF,QAAM,SAAS,OAAO,UAAU;AAGhC,QAAM,oCAAoB,IAAA;AAG1B,QAAM,OAAO;AAAA,IACX,OAAO;AAAA,IACP;AAAA,IACA;AAAA,IACA;AAAA,IACA,OAAO;AAAA,IACP;AAAA,EAAA;AAOF,QAAM,mBAAmB,MAAM;AAC7B,QAAI,KAAK,eAAe;AACtB,WAAK,cAAA;AAAA,IACP;AAAA,EACF;AAMA,QAAM,gBAAgB,CACpB,YACS;AACT,QAAI;AAEF,YAAM,aAA8C,CAAA;AACpD,cAAQ,QAAQ,CAAC,YAAY,QAAQ;AACnC,mBAAW,OAAO,GAAG,CAAC,IAAI;AAAA,MAC5B,CAAC;AACD,YAAM,aAAa,OAAO,UAAU,UAAU;AAC9C,cAAQ,QAAQ,OAAO,YAAY,UAAU;AAAA,IAC/C,SAAS,OAAO;AACd,cAAQ;AAAA,QACN,8DAA8D,OAAO,UAAU;AAAA,QAC/E;AAAA,MAAA;AAEF,YAAM;AAAA,IACR;AAAA,EACF;AAKA,QAAM,eAA+B,MAAY;AAC/C,YAAQ,WAAW,OAAO,UAAU;AAAA,EACtC;AAMA,QAAM,iBAAmC,MAAc;AACrD,UAAM,OAAO,QAAQ,QAAQ,OAAO,UAAU;AAC9C,WAAO,OAAO,IAAI,KAAK,CAAC,IAAI,CAAC,EAAE,OAAO;AAAA,EACxC;AAMA,QAAM,kBAAkB,OAAO,WAAwC;AAErE,WAAO,YAAY,UAAU,QAAQ,CAAC,aAAa;AACjD,+BAAyB,QAAQ,SAAS,UAAU,QAAQ;AAAA,IAC9D,CAAC;AAGD,QAAI,gBAAqB,CAAA;AACzB,QAAI,OAAO,UAAU;AACnB,sBAAiB,MAAM,OAAO,SAAS,MAAM,KAAM,CAAA;AAAA,IACrD;AAIA,UAAM,cAAc,gBAAqB,OAAO,YAAY,SAAS,MAAM;AAG3E,WAAO,YAAY,UAAU,QAAQ,CAAC,aAAa;AACjD,YAAM,MAAM,OAAO,OAAO,SAAS,QAAQ;AAC3C,YAAM,aAA8B;AAAA,QAClC,YAAY,aAAA;AAAA,QACZ,MAAM,SAAS;AAAA,MAAA;AAEjB,kBAAY,IAAI,KAAK,UAAU;AAAA,IACjC,CAAC;AAGD,kBAAc,WAAW;AAGzB,qBAAA;AAEA,WAAO;AAAA,EACT;AAEA,QAAM,kBAAkB,OAAO,WAAwC;AAErE,WAAO,YAAY,UAAU,QAAQ,CAAC,aAAa;AACjD,+BAAyB,QAAQ,SAAS,UAAU,QAAQ;AAAA,IAC9D,CAAC;AAGD,QAAI,gBAAqB,CAAA;AACzB,QAAI,OAAO,UAAU;AACnB,sBAAiB,MAAM,OAAO,SAAS,MAAM,KAAM,CAAA;AAAA,IACrD;AAIA,UAAM,cAAc,gBAAqB,OAAO,YAAY,SAAS,MAAM;AAG3E,WAAO,YAAY,UAAU,QAAQ,CAAC,aAAa;AACjD,YAAM,MAAM,OAAO,OAAO,SAAS,QAAQ;AAC3C,YAAM,aAA8B;AAAA,QAClC,YAAY,aAAA;AAAA,QACZ,MAAM,SAAS;AAAA,MAAA;AAEjB,kBAAY,IAAI,KAAK,UAAU;AAAA,IACjC,CAAC;AAGD,kBAAc,WAAW;AAGzB,qBAAA;AAEA,WAAO;AAAA,EACT;AAEA,QAAM,kBAAkB,OAAO,WAAwC;AAErE,QAAI,gBAAqB,CAAA;AACzB,QAAI,OAAO,UAAU;AACnB,sBAAiB,MAAM,OAAO,SAAS,MAAM,KAAM,CAAA;AAAA,IACrD;AAIA,UAAM,cAAc,gBAAqB,OAAO,YAAY,SAAS,MAAM;AAG3E,WAAO,YAAY,UAAU,QAAQ,CAAC,aAAa;AAEjD,YAAM,MAAM,OAAO,OAAO,SAAS,QAAQ;AAC3C,kBAAY,OAAO,GAAG;AAAA,IACxB,CAAC;AAGD,kBAAc,WAAW;AAGzB,qBAAA;AAEA,WAAO;AAAA,EACT;AAGA,QAAM;AAAA,IACJ,YAAY;AAAA,IACZ,SAAS;AAAA,IACT,iBAAiB;AAAA,IACjB,UAAU;AAAA,IACV,UAAU;AAAA,IACV,UAAU;AAAA,IACV;AAAA,IACA,GAAG;AAAA,EAAA,IACD;AAGJ,QAAM,eAAe,MAAM,oBAAoB,OAAO,UAAU;AAKhE,QAAM,kBAAkB,CAAC,gBAEnB;AAGJ,UAAM,sBAAsB,YAAY,UAAU,OAAO,CAAC,MAAM;AAE9D,UAAI,KAAK,cAAc,EAAE,eAAe,KAAK,YAAY;AACvD,eAAO;AAAA,MACT;AAEA,aAAO,EAAE,WAAW,OAAO;AAAA,IAC7B,CAAC;AAED,QAAI,oBAAoB,WAAW,GAAG;AACpC;AAAA,IACF;AAGA,eAAW,YAAY,qBAAqB;AAC1C,cAAQ,SAAS,MAAA;AAAA,QACf,KAAK;AAAA,QACL,KAAK;AACH,mCAAyB,QAAQ,SAAS,UAAU,SAAS,IAAI;AACjE;AAAA,QACF,KAAK;AACH,mCAAyB,QAAQ,SAAS,UAAU,SAAS,IAAI;AACjE;AAAA,MAAA;AAAA,IAEN;AAGA,UAAM,cAAc;AAAA,MAClB,OAAO;AAAA,MACP;AAAA,MACA;AAAA,IAAA;AAIF,eAAW,YAAY,qBAAqB;AAE1C,YAAM,MAAM,SAAS;AAErB,cAAQ,SAAS,MAAA;AAAA,QACf,KAAK;AAAA,QACL,KAAK,UAAU;AACb,gBAAM,aAAkD;AAAA,YACtD,YAAY,aAAA;AAAA,YACZ,MAAM,SAAS;AAAA,UAAA;AAEjB,sBAAY,IAAI,KAAK,UAAU;AAC/B;AAAA,QACF;AAAA,QACA,KAAK,UAAU;AACb,sBAAY,OAAO,GAAG;AACtB;AAAA,QACF;AAAA,MAAA;AAAA,IAEJ;AAGA,kBAAc,WAAW;AAIzB,SAAK,sBAAsB,mBAAmB;AAAA,EAChD;AAEA,SAAO;AAAA,IACL,GAAG;AAAA,IACH,IAAI;AAAA,IACJ;AAAA,IACA,UAAU;AAAA,IACV,UAAU;AAAA,IACV,UAAU;AAAA,IACV,OAAO;AAAA,MACL;AAAA,MACA;AAAA,MACA;AAAA,IAAA;AAAA,EACF;AAEJ;AASA,SAAS,gBACP,YACA,SACA,QACqC;AACrC,MAAI;AACF,UAAM,UAAU,QAAQ,QAAQ,UAAU;AAC1C,QAAI,CAAC,SAAS;AACZ,iCAAW,IAAA;AAAA,IACb;AAEA,UAAM,SAAS,OAAO,MAAM,OAAO;AACnC,UAAM,8BAAc,IAAA;AAGpB,QACE,OAAO,WAAW,YAClB,WAAW,QACX,CAAC,MAAM,QAAQ,MAAM,GACrB;AACA,aAAO,QAAQ,MAAM,EAAE,QAAQ,CAAC,CAAC,KAAK,KAAK,MAAM;AAE/C,YACE,SACA,OAAO,UAAU,YACjB,gBAAgB,SAChB,UAAU,OACV;AACA,gBAAM,aAAa;AACnB,kBAAQ,IAAI,KAAK,UAAU;AAAA,QAC7B,OAAO;AACL,gBAAM,IAAI,8BAA8B,YAAY,GAAG;AAAA,QACzD;AAAA,MACF,CAAC;AAAA,IACH,OAAO;AACL,YAAM,IAAI,gCAAgC,UAAU;AAAA,IACtD;AAEA,WAAO;AAAA,EACT,SAAS,OAAO;AACd,YAAQ;AAAA,MACN,iEAAiE,UAAU;AAAA,MAC3E;AAAA,IAAA;AAEF,+BAAW,IAAA;AAAA,EACb;AACF;AAYA,SAAS,uBACP,YACA,SACA,iBACA,QACA,SACA,eAKA;AACA,MAAI,aAA0D;AAC9D,MAAI,aAAkB;AAQtB,QAAM,cAAc,CAClB,SACA,YAKI;AACJ,UAAM,UAID,CAAA;AAGL,YAAQ,QAAQ,CAAC,eAAe,QAAQ;AACtC,YAAM,gBAAgB,QAAQ,IAAI,GAAG;AACrC,UAAI,CAAC,eAAe;AAClB,gBAAQ,KAAK,EAAE,MAAM,UAAU,KAAK,OAAO,cAAc,MAAM;AAAA,MACjE,WAAW,cAAc,eAAe,cAAc,YAAY;AAChE,gBAAQ,KAAK,EAAE,MAAM,UAAU,KAAK,OAAO,cAAc,MAAM;AAAA,MACjE;AAAA,IACF,CAAC;AAGD,YAAQ,QAAQ,CAAC,eAAe,QAAQ;AACtC,UAAI,CAAC,QAAQ,IAAI,GAAG,GAAG;AACrB,gBAAQ,KAAK,EAAE,MAAM,UAAU,KAAK,OAAO,cAAc,MAAM;AAAA,MACjE;AAAA,IACF,CAAC;AAED,WAAO;AAAA,EACT;AAMA,QAAM,wBAAwB,MAAM;AAClC,QAAI,CAAC,WAAY;AAEjB,UAAM,EAAE,OAAO,OAAO,OAAA,IAAW;AAGjC,UAAM,UAAU,gBAAmB,YAAY,SAAS,MAAM;AAG9D,UAAM,UAAU,YAAY,eAAe,OAAO;AAElD,QAAI,QAAQ,SAAS,GAAG;AACtB,YAAA;AACA,cAAQ,QAAQ,CAAC,EAAE,MAAM,YAAY;AACnC,YAAI,OAAO;AACT,mCAAyB,QAAQ,OAAO,IAAI;AAC5C,gBAAM,EAAE,MAAM,OAAO;AAAA,QACvB;AAAA,MACF,CAAC;AACD,aAAA;AAGA,oBAAc,MAAA;AACd,cAAQ,QAAQ,CAAC,YAAY,QAAQ;AACnC,sBAAc,IAAI,KAAK,UAAU;AAAA,MACnC,CAAC;AAAA,IACH;AAAA,EACF;AAEA,QAAM,aAGF;AAAA,IACF,MAAM,CAAC,WAAiD;AACtD,YAAM,EAAE,OAAO,OAAO,QAAQ,cAAc;AAG5C,mBAAa;AACb,mBAAa,OAAO;AAGpB,YAAM,cAAc,gBAAmB,YAAY,SAAS,MAAM;AAClE,UAAI,YAAY,OAAO,GAAG;AACxB,cAAA;AACA,oBAAY,QAAQ,CAAC,eAAe;AAClC,mCAAyB,QAAQ,WAAW,MAAM,MAAM;AACxD,gBAAM,EAAE,MAAM,UAAU,OAAO,WAAW,MAAM;AAAA,QAClD,CAAC;AACD,eAAA;AAAA,MACF;AAGA,oBAAc,MAAA;AACd,kBAAY,QAAQ,CAAC,YAAY,QAAQ;AACvC,sBAAc,IAAI,KAAK,UAAU;AAAA,MACnC,CAAC;AAGD,gBAAA;AAGA,YAAM,qBAAqB,CAAC,UAAwB;AAElD,YAAI,MAAM,QAAQ,cAAc,MAAM,gBAAgB,SAAS;AAC7D;AAAA,QACF;AAEA,8BAAA;AAAA,MACF;AAGA,sBAAgB,iBAAiB,WAAW,kBAAkB;AAAA,IAGhE;AAAA;AAAA;AAAA;AAAA;AAAA,IAMA,iBAAiB,OAAO;AAAA,MACtB;AAAA,MACA,aACE,aAAa,OAAO,WAAW,cAAc,OAAO,eAAe,QAC/D,iBACA;AAAA,IAAA;AAAA;AAAA,IAIR,eAAe;AAAA;AAAA,IAGf;AAAA,EAAA;AAQF,QAAM,wBAAwB,CAAC,cAA0B;AACvD,QAAI,CAAC,YAAY;AAEf;AAAA,IACF;AAEA,UAAM,EAAE,OAAO,OAAO,OAAA,IAAW;AAGjC,UAAA;AACA,cAAU,QAAQ,CAAC,aAAkB;AACnC,YAAM;AAAA,QACJ,MAAM,SAAS;AAAA,QACf,OACE,SAAS,SAAS,WAAW,SAAS,WAAW,SAAS;AAAA,MAAA,CAC7D;AAAA,IACH,CAAC;AACD,WAAA;AAAA,EACF;AAEA,SAAO;AAAA,IACL,GAAG;AAAA,IACH;AAAA,EAAA;AAEJ;"}
|
|
@@ -0,0 +1,81 @@
|
|
|
1
|
+
import { MutationFn, Transaction } from './types.js';
|
|
2
|
+
import { Strategy } from './strategies/types.js';
|
|
3
|
+
/**
|
|
4
|
+
* Configuration for creating a paced mutations manager
|
|
5
|
+
*/
|
|
6
|
+
export interface PacedMutationsConfig<TVariables = unknown, T extends object = Record<string, unknown>> {
|
|
7
|
+
/**
|
|
8
|
+
* Callback to apply optimistic updates immediately.
|
|
9
|
+
* Receives the variables passed to the mutate function.
|
|
10
|
+
*/
|
|
11
|
+
onMutate: (variables: TVariables) => void;
|
|
12
|
+
/**
|
|
13
|
+
* Function to execute the mutation on the server.
|
|
14
|
+
* Receives the transaction parameters containing all merged mutations.
|
|
15
|
+
*/
|
|
16
|
+
mutationFn: MutationFn<T>;
|
|
17
|
+
/**
|
|
18
|
+
* Strategy for controlling mutation execution timing
|
|
19
|
+
* Examples: debounceStrategy, queueStrategy, throttleStrategy
|
|
20
|
+
*/
|
|
21
|
+
strategy: Strategy;
|
|
22
|
+
/**
|
|
23
|
+
* Custom metadata to associate with transactions
|
|
24
|
+
*/
|
|
25
|
+
metadata?: Record<string, unknown>;
|
|
26
|
+
}
|
|
27
|
+
/**
|
|
28
|
+
* Creates a paced mutations manager with pluggable timing strategies.
|
|
29
|
+
*
|
|
30
|
+
* This function provides a way to control when and how optimistic mutations
|
|
31
|
+
* are persisted to the backend, using strategies like debouncing, queuing,
|
|
32
|
+
* or throttling. The optimistic updates are applied immediately via `onMutate`,
|
|
33
|
+
* and the actual persistence is controlled by the strategy.
|
|
34
|
+
*
|
|
35
|
+
* The returned function accepts variables of type TVariables and returns a
|
|
36
|
+
* Transaction object that can be awaited to know when persistence completes
|
|
37
|
+
* or to handle errors.
|
|
38
|
+
*
|
|
39
|
+
* @param config - Configuration including onMutate, mutationFn and strategy
|
|
40
|
+
* @returns A function that accepts variables and returns a Transaction
|
|
41
|
+
*
|
|
42
|
+
* @example
|
|
43
|
+
* ```ts
|
|
44
|
+
* // Debounced mutations for auto-save
|
|
45
|
+
* const updateTodo = createPacedMutations<string>({
|
|
46
|
+
* onMutate: (text) => {
|
|
47
|
+
* // Apply optimistic update immediately
|
|
48
|
+
* collection.update(id, draft => { draft.text = text })
|
|
49
|
+
* },
|
|
50
|
+
* mutationFn: async (text, { transaction }) => {
|
|
51
|
+
* await api.save(transaction.mutations)
|
|
52
|
+
* },
|
|
53
|
+
* strategy: debounceStrategy({ wait: 500 })
|
|
54
|
+
* })
|
|
55
|
+
*
|
|
56
|
+
* // Call with variables, returns a transaction
|
|
57
|
+
* const tx = updateTodo('New text')
|
|
58
|
+
*
|
|
59
|
+
* // Await persistence or handle errors
|
|
60
|
+
* await tx.isPersisted.promise
|
|
61
|
+
* ```
|
|
62
|
+
*
|
|
63
|
+
* @example
|
|
64
|
+
* ```ts
|
|
65
|
+
* // Queue strategy for sequential processing
|
|
66
|
+
* const addTodo = createPacedMutations<{ text: string }>({
|
|
67
|
+
* onMutate: ({ text }) => {
|
|
68
|
+
* collection.insert({ id: uuid(), text, completed: false })
|
|
69
|
+
* },
|
|
70
|
+
* mutationFn: async ({ text }, { transaction }) => {
|
|
71
|
+
* await api.save(transaction.mutations)
|
|
72
|
+
* },
|
|
73
|
+
* strategy: queueStrategy({
|
|
74
|
+
* wait: 200,
|
|
75
|
+
* addItemsTo: 'back',
|
|
76
|
+
* getItemsFrom: 'front'
|
|
77
|
+
* })
|
|
78
|
+
* })
|
|
79
|
+
* ```
|
|
80
|
+
*/
|
|
81
|
+
export declare function createPacedMutations<TVariables = unknown, T extends object = Record<string, unknown>>(config: PacedMutationsConfig<TVariables, T>): (variables: TVariables) => Transaction<T>;
|
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
import { createTransaction } from "./transactions.js";
|
|
2
|
+
function createPacedMutations(config) {
|
|
3
|
+
const { onMutate, mutationFn, strategy, ...transactionConfig } = config;
|
|
4
|
+
let activeTransaction = null;
|
|
5
|
+
const commitCallback = () => {
|
|
6
|
+
if (!activeTransaction) {
|
|
7
|
+
throw new Error(
|
|
8
|
+
`Strategy callback called but no active transaction exists. This indicates a bug in the strategy implementation.`
|
|
9
|
+
);
|
|
10
|
+
}
|
|
11
|
+
if (activeTransaction.state !== `pending`) {
|
|
12
|
+
throw new Error(
|
|
13
|
+
`Strategy callback called but active transaction is in state "${activeTransaction.state}". Expected "pending".`
|
|
14
|
+
);
|
|
15
|
+
}
|
|
16
|
+
const txToCommit = activeTransaction;
|
|
17
|
+
activeTransaction = null;
|
|
18
|
+
txToCommit.commit().catch(() => {
|
|
19
|
+
});
|
|
20
|
+
return txToCommit;
|
|
21
|
+
};
|
|
22
|
+
function mutate(variables) {
|
|
23
|
+
if (!activeTransaction || activeTransaction.state !== `pending`) {
|
|
24
|
+
activeTransaction = createTransaction({
|
|
25
|
+
...transactionConfig,
|
|
26
|
+
mutationFn,
|
|
27
|
+
autoCommit: false
|
|
28
|
+
});
|
|
29
|
+
}
|
|
30
|
+
activeTransaction.mutate(() => {
|
|
31
|
+
onMutate(variables);
|
|
32
|
+
});
|
|
33
|
+
const txToReturn = activeTransaction;
|
|
34
|
+
if (strategy._type === `queue`) {
|
|
35
|
+
const capturedTx = activeTransaction;
|
|
36
|
+
activeTransaction = null;
|
|
37
|
+
strategy.execute(() => {
|
|
38
|
+
capturedTx.commit().catch(() => {
|
|
39
|
+
});
|
|
40
|
+
return capturedTx;
|
|
41
|
+
});
|
|
42
|
+
} else {
|
|
43
|
+
strategy.execute(commitCallback);
|
|
44
|
+
}
|
|
45
|
+
return txToReturn;
|
|
46
|
+
}
|
|
47
|
+
return mutate;
|
|
48
|
+
}
|
|
49
|
+
export {
|
|
50
|
+
createPacedMutations
|
|
51
|
+
};
|
|
52
|
+
//# sourceMappingURL=paced-mutations.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"paced-mutations.js","sources":["../../src/paced-mutations.ts"],"sourcesContent":["import { createTransaction } from \"./transactions\"\nimport type { MutationFn, Transaction } from \"./types\"\nimport type { Strategy } from \"./strategies/types\"\n\n/**\n * Configuration for creating a paced mutations manager\n */\nexport interface PacedMutationsConfig<\n TVariables = unknown,\n T extends object = Record<string, unknown>,\n> {\n /**\n * Callback to apply optimistic updates immediately.\n * Receives the variables passed to the mutate function.\n */\n onMutate: (variables: TVariables) => void\n /**\n * Function to execute the mutation on the server.\n * Receives the transaction parameters containing all merged mutations.\n */\n mutationFn: MutationFn<T>\n /**\n * Strategy for controlling mutation execution timing\n * Examples: debounceStrategy, queueStrategy, throttleStrategy\n */\n strategy: Strategy\n /**\n * Custom metadata to associate with transactions\n */\n metadata?: Record<string, unknown>\n}\n\n/**\n * Creates a paced mutations manager with pluggable timing strategies.\n *\n * This function provides a way to control when and how optimistic mutations\n * are persisted to the backend, using strategies like debouncing, queuing,\n * or throttling. The optimistic updates are applied immediately via `onMutate`,\n * and the actual persistence is controlled by the strategy.\n *\n * The returned function accepts variables of type TVariables and returns a\n * Transaction object that can be awaited to know when persistence completes\n * or to handle errors.\n *\n * @param config - Configuration including onMutate, mutationFn and strategy\n * @returns A function that accepts variables and returns a Transaction\n *\n * @example\n * ```ts\n * // Debounced mutations for auto-save\n * const updateTodo = createPacedMutations<string>({\n * onMutate: (text) => {\n * // Apply optimistic update immediately\n * collection.update(id, draft => { draft.text = text })\n * },\n * mutationFn: async (text, { transaction }) => {\n * await api.save(transaction.mutations)\n * },\n * strategy: debounceStrategy({ wait: 500 })\n * })\n *\n * // Call with variables, returns a transaction\n * const tx = updateTodo('New text')\n *\n * // Await persistence or handle errors\n * await tx.isPersisted.promise\n * ```\n *\n * @example\n * ```ts\n * // Queue strategy for sequential processing\n * const addTodo = createPacedMutations<{ text: string }>({\n * onMutate: ({ text }) => {\n * collection.insert({ id: uuid(), text, completed: false })\n * },\n * mutationFn: async ({ text }, { transaction }) => {\n * await api.save(transaction.mutations)\n * },\n * strategy: queueStrategy({\n * wait: 200,\n * addItemsTo: 'back',\n * getItemsFrom: 'front'\n * })\n * })\n * ```\n */\nexport function createPacedMutations<\n TVariables = unknown,\n T extends object = Record<string, unknown>,\n>(\n config: PacedMutationsConfig<TVariables, T>\n): (variables: TVariables) => Transaction<T> {\n const { onMutate, mutationFn, strategy, ...transactionConfig } = config\n\n // The currently active transaction (pending, not yet persisting)\n let activeTransaction: Transaction<T> | null = null\n\n // Commit callback that the strategy will call when it's time to persist\n const commitCallback = () => {\n if (!activeTransaction) {\n throw new Error(\n `Strategy callback called but no active transaction exists. This indicates a bug in the strategy implementation.`\n )\n }\n\n if (activeTransaction.state !== `pending`) {\n throw new Error(\n `Strategy callback called but active transaction is in state \"${activeTransaction.state}\". Expected \"pending\".`\n )\n }\n\n const txToCommit = activeTransaction\n\n // Clear active transaction reference before committing\n activeTransaction = null\n\n // Commit the transaction\n txToCommit.commit().catch(() => {\n // Errors are handled via transaction.isPersisted.promise\n // This catch prevents unhandled promise rejections\n })\n\n return txToCommit\n }\n\n /**\n * Executes a mutation with the given variables. Creates a new transaction if none is active,\n * or adds to the existing active transaction. The strategy controls when\n * the transaction is actually committed.\n */\n function mutate(variables: TVariables): Transaction<T> {\n // Create a new transaction if we don't have an active one\n if (!activeTransaction || activeTransaction.state !== `pending`) {\n activeTransaction = createTransaction<T>({\n ...transactionConfig,\n mutationFn,\n autoCommit: false,\n })\n }\n\n // Execute onMutate with variables to apply optimistic updates\n activeTransaction.mutate(() => {\n onMutate(variables)\n })\n\n // Save reference before calling strategy.execute\n const txToReturn = activeTransaction\n\n // For queue strategy, pass a function that commits the captured transaction\n // This prevents the error when commitCallback tries to access the cleared activeTransaction\n if (strategy._type === `queue`) {\n const capturedTx = activeTransaction\n activeTransaction = null // Clear so next mutation creates a new transaction\n strategy.execute(() => {\n capturedTx.commit().catch(() => {\n // Errors are handled via transaction.isPersisted.promise\n })\n return capturedTx\n })\n } else {\n // For debounce/throttle, use commitCallback which manages activeTransaction\n strategy.execute(commitCallback)\n }\n\n return txToReturn\n }\n\n return mutate\n}\n"],"names":[],"mappings":";AAsFO,SAAS,qBAId,QAC2C;AAC3C,QAAM,EAAE,UAAU,YAAY,UAAU,GAAG,sBAAsB;AAGjE,MAAI,oBAA2C;AAG/C,QAAM,iBAAiB,MAAM;AAC3B,QAAI,CAAC,mBAAmB;AACtB,YAAM,IAAI;AAAA,QACR;AAAA,MAAA;AAAA,IAEJ;AAEA,QAAI,kBAAkB,UAAU,WAAW;AACzC,YAAM,IAAI;AAAA,QACR,gEAAgE,kBAAkB,KAAK;AAAA,MAAA;AAAA,IAE3F;AAEA,UAAM,aAAa;AAGnB,wBAAoB;AAGpB,eAAW,SAAS,MAAM,MAAM;AAAA,IAGhC,CAAC;AAED,WAAO;AAAA,EACT;AAOA,WAAS,OAAO,WAAuC;AAErD,QAAI,CAAC,qBAAqB,kBAAkB,UAAU,WAAW;AAC/D,0BAAoB,kBAAqB;AAAA,QACvC,GAAG;AAAA,QACH;AAAA,QACA,YAAY;AAAA,MAAA,CACb;AAAA,IACH;AAGA,sBAAkB,OAAO,MAAM;AAC7B,eAAS,SAAS;AAAA,IACpB,CAAC;AAGD,UAAM,aAAa;AAInB,QAAI,SAAS,UAAU,SAAS;AAC9B,YAAM,aAAa;AACnB,0BAAoB;AACpB,eAAS,QAAQ,MAAM;AACrB,mBAAW,SAAS,MAAM,MAAM;AAAA,QAEhC,CAAC;AACD,eAAO;AAAA,MACT,CAAC;AAAA,IACH,OAAO;AAEL,eAAS,QAAQ,cAAc;AAAA,IACjC;AAEA,WAAO;AAAA,EACT;AAEA,SAAO;AACT;"}
|