react-smart-query 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +130 -0
- package/dist/cache.service-MR6EEYM4.mjs +4 -0
- package/dist/cache.service-MR6EEYM4.mjs.map +1 -0
- package/dist/chunk-KLJQATIV.mjs +170 -0
- package/dist/chunk-KLJQATIV.mjs.map +1 -0
- package/dist/chunk-KSLDOL27.mjs +133 -0
- package/dist/chunk-KSLDOL27.mjs.map +1 -0
- package/dist/chunk-QRCVY7UR.mjs +137 -0
- package/dist/chunk-QRCVY7UR.mjs.map +1 -0
- package/dist/index.d.mts +545 -0
- package/dist/index.d.ts +545 -0
- package/dist/index.js +1533 -0
- package/dist/index.js.map +1 -0
- package/dist/index.mjs +1018 -0
- package/dist/index.mjs.map +1 -0
- package/dist/storage.adapter-PJCVI4DE.mjs +3 -0
- package/dist/storage.adapter-PJCVI4DE.mjs.map +1 -0
- package/dist/testing.d.mts +89 -0
- package/dist/testing.d.ts +89 -0
- package/dist/testing.js +272 -0
- package/dist/testing.js.map +1 -0
- package/dist/testing.mjs +78 -0
- package/dist/testing.mjs.map +1 -0
- package/dist/types-XXiTKLnh.d.mts +134 -0
- package/dist/types-XXiTKLnh.d.ts +134 -0
- package/dist/utils/debug.d.mts +2 -0
- package/dist/utils/debug.d.ts +2 -0
- package/dist/utils/debug.js +208 -0
- package/dist/utils/debug.js.map +1 -0
- package/dist/utils/debug.mjs +40 -0
- package/dist/utils/debug.mjs.map +1 -0
- package/docs/API_REFERENCE.md +149 -0
- package/docs/GUIDELINES.md +23 -0
- package/docs/TESTING.md +61 -0
- package/package.json +136 -0
|
@@ -0,0 +1,134 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* src/types.ts
|
|
3
|
+
*
|
|
4
|
+
* Single source of truth for all shared public types.
|
|
5
|
+
* No implementation logic — pure TypeScript interfaces and type aliases.
|
|
6
|
+
*/
|
|
7
|
+
/**
|
|
8
|
+
* Minimum shape required for list items.
|
|
9
|
+
* The actual id field name is configured via `getItemId` — this type
|
|
10
|
+
* only enforces that items are plain objects.
|
|
11
|
+
*/
|
|
12
|
+
type AnyItem = Record<string, unknown>;
|
|
13
|
+
/**
|
|
14
|
+
* Extracts the id of an item. Configurable so APIs using `_id`, `uuid`,
|
|
15
|
+
* numeric ids, or composite keys all work without data transformation.
|
|
16
|
+
*
|
|
17
|
+
* @example
|
|
18
|
+
* getItemId: (item) => item.id // "id: string"
|
|
19
|
+
* getItemId: (item) => String(item._id) // MongoDB ObjectId
|
|
20
|
+
* getItemId: (item) => String(item.id) // numeric id
|
|
21
|
+
*/
|
|
22
|
+
type GetItemId<T extends AnyItem> = (item: T) => string;
|
|
23
|
+
/**
|
|
24
|
+
* Comparator for sort order. Same contract as Array.sort comparator.
|
|
25
|
+
* Return negative if a < b, positive if a > b, 0 if equal.
|
|
26
|
+
*/
|
|
27
|
+
type SortComparator<T extends AnyItem> = (a: T, b: T) => number;
|
|
28
|
+
/**
|
|
29
|
+
* Extracts a version sticker (timestamp or counter) from an item.
|
|
30
|
+
* Used to prevent stale updates from overwriting fresher data.
|
|
31
|
+
*/
|
|
32
|
+
type GetItemVersion<T extends AnyItem> = (item: T) => number | string;
|
|
33
|
+
interface CacheEntry<T> {
|
|
34
|
+
readonly version: number;
|
|
35
|
+
readonly data: T;
|
|
36
|
+
readonly cachedAt: number;
|
|
37
|
+
readonly lastAccessedAt: number;
|
|
38
|
+
}
|
|
39
|
+
interface NormalizedList<T extends AnyItem> {
|
|
40
|
+
byId: Record<string, T>;
|
|
41
|
+
allIds: string[];
|
|
42
|
+
}
|
|
43
|
+
type PaginationMode = "normalized" | "pages";
|
|
44
|
+
interface UnifiedNormalizedInfiniteData<T extends AnyItem> {
|
|
45
|
+
data: NormalizedList<T>;
|
|
46
|
+
meta: {
|
|
47
|
+
nextCursor: unknown | null;
|
|
48
|
+
pageParams: unknown[];
|
|
49
|
+
lastFetchedAt?: number;
|
|
50
|
+
};
|
|
51
|
+
}
|
|
52
|
+
/** Legacy support - will be used internally or for migration if needed */
|
|
53
|
+
interface InfinitePagedData<T extends AnyItem> {
|
|
54
|
+
pages: NormalizedList<T>[];
|
|
55
|
+
pageParams: unknown[];
|
|
56
|
+
nextCursor: unknown | null;
|
|
57
|
+
}
|
|
58
|
+
type ObservabilityEvent = {
|
|
59
|
+
type: "cache_hit";
|
|
60
|
+
queryKey: readonly unknown[];
|
|
61
|
+
cachedAt: number;
|
|
62
|
+
} | {
|
|
63
|
+
type: "cache_miss";
|
|
64
|
+
queryKey: readonly unknown[];
|
|
65
|
+
} | {
|
|
66
|
+
type: "cache_write";
|
|
67
|
+
queryKey: readonly unknown[];
|
|
68
|
+
dataSize: number;
|
|
69
|
+
} | {
|
|
70
|
+
type: "fetch_start";
|
|
71
|
+
queryKey: readonly unknown[];
|
|
72
|
+
} | {
|
|
73
|
+
type: "fetch_success";
|
|
74
|
+
queryKey: readonly unknown[];
|
|
75
|
+
durationMs: number;
|
|
76
|
+
} | {
|
|
77
|
+
type: "fetch_error";
|
|
78
|
+
queryKey: readonly unknown[];
|
|
79
|
+
error: unknown;
|
|
80
|
+
} | {
|
|
81
|
+
type: "queue_enqueue";
|
|
82
|
+
mutationId: string;
|
|
83
|
+
mutationType: string;
|
|
84
|
+
} | {
|
|
85
|
+
type: "queue_success";
|
|
86
|
+
mutationId: string;
|
|
87
|
+
} | {
|
|
88
|
+
type: "queue_failure";
|
|
89
|
+
mutationId: string;
|
|
90
|
+
retryCount: number;
|
|
91
|
+
} | {
|
|
92
|
+
type: "queue_drained";
|
|
93
|
+
} | {
|
|
94
|
+
type: "sync_conflict";
|
|
95
|
+
queryKey: readonly unknown[];
|
|
96
|
+
localVersion: unknown;
|
|
97
|
+
serverVersion: unknown;
|
|
98
|
+
} | {
|
|
99
|
+
type: "storage_quota_exceeded";
|
|
100
|
+
key: string;
|
|
101
|
+
};
|
|
102
|
+
type ObserverFn = (event: ObservabilityEvent) => void;
|
|
103
|
+
type MutationType = "ADD_ITEM" | "UPDATE_ITEM" | "REMOVE_ITEM" | "CUSTOM";
|
|
104
|
+
interface QueuedMutation<TPayload = unknown> {
|
|
105
|
+
id: string;
|
|
106
|
+
type: MutationType | string;
|
|
107
|
+
/** Logical entity key for coalescing — e.g. "expense:exp_123" */
|
|
108
|
+
entityKey?: string;
|
|
109
|
+
queryKey: readonly unknown[];
|
|
110
|
+
payload: TPayload;
|
|
111
|
+
enqueuedAt: number;
|
|
112
|
+
retryCount: number;
|
|
113
|
+
maxRetries: number;
|
|
114
|
+
nextRetryAt: number;
|
|
115
|
+
}
|
|
116
|
+
interface SmartQueryError {
|
|
117
|
+
/** Original error from the API / network */
|
|
118
|
+
cause: unknown;
|
|
119
|
+
/** Human-readable message */
|
|
120
|
+
message: string;
|
|
121
|
+
/** Whether this error is retryable */
|
|
122
|
+
retryable: boolean;
|
|
123
|
+
/** HTTP status code if available */
|
|
124
|
+
statusCode?: number;
|
|
125
|
+
}
|
|
126
|
+
interface AsyncStorage {
|
|
127
|
+
get(key: string): Promise<string | undefined>;
|
|
128
|
+
set(key: string, value: string): Promise<void>;
|
|
129
|
+
delete(key: string): Promise<void>;
|
|
130
|
+
clearAll(): Promise<void>;
|
|
131
|
+
keys(): Promise<string[]>;
|
|
132
|
+
}
|
|
133
|
+
|
|
134
|
+
export type { AsyncStorage as A, CacheEntry as C, GetItemId as G, InfinitePagedData as I, MutationType as M, NormalizedList as N, ObserverFn as O, PaginationMode as P, QueuedMutation as Q, SortComparator as S, UnifiedNormalizedInfiniteData as U, AnyItem as a, GetItemVersion as b, SmartQueryError as c, ObservabilityEvent as d };
|
|
@@ -0,0 +1,208 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
var reactNative = require('react-native');
|
|
4
|
+
|
|
5
|
+
var __require = /* @__PURE__ */ ((x) => typeof require !== "undefined" ? require : typeof Proxy !== "undefined" ? new Proxy(x, {
|
|
6
|
+
get: (a, b) => (typeof require !== "undefined" ? require : a)[b]
|
|
7
|
+
}) : x)(function(x) {
|
|
8
|
+
if (typeof require !== "undefined") return require.apply(this, arguments);
|
|
9
|
+
throw Error('Dynamic require of "' + x + '" is not supported');
|
|
10
|
+
});
|
|
11
|
+
function createMemoryStorage() {
|
|
12
|
+
const store = /* @__PURE__ */ new Map();
|
|
13
|
+
return {
|
|
14
|
+
get: (key) => Promise.resolve(store.get(key)),
|
|
15
|
+
set: (key, value) => {
|
|
16
|
+
store.set(key, value);
|
|
17
|
+
return Promise.resolve();
|
|
18
|
+
},
|
|
19
|
+
delete: (key) => {
|
|
20
|
+
store.delete(key);
|
|
21
|
+
return Promise.resolve();
|
|
22
|
+
},
|
|
23
|
+
clearAll: () => {
|
|
24
|
+
store.clear();
|
|
25
|
+
return Promise.resolve();
|
|
26
|
+
},
|
|
27
|
+
keys: () => Promise.resolve(Array.from(store.keys()))
|
|
28
|
+
};
|
|
29
|
+
}
|
|
30
|
+
function createNativeStorage() {
|
|
31
|
+
const MMKV = __require("react-native-mmkv").MMKV;
|
|
32
|
+
const mmkv = new MMKV({ id: "react-smart-query-v2" });
|
|
33
|
+
return {
|
|
34
|
+
get: (key) => Promise.resolve(mmkv.getString(key) ?? void 0),
|
|
35
|
+
set: (key, value) => Promise.resolve(void mmkv.set(key, value)),
|
|
36
|
+
delete: (key) => Promise.resolve(void mmkv.delete(key)),
|
|
37
|
+
clearAll: () => Promise.resolve(void mmkv.clearAll()),
|
|
38
|
+
keys: () => Promise.resolve(mmkv.getAllKeys())
|
|
39
|
+
};
|
|
40
|
+
}
|
|
41
|
+
var IDB_NAME = "SmartQueryV2";
|
|
42
|
+
var IDB_STORE = "entries";
|
|
43
|
+
var IDB_VERSION = 1;
|
|
44
|
+
function isIDBAvailable() {
|
|
45
|
+
return typeof globalThis !== "undefined" && typeof globalThis.indexedDB !== "undefined";
|
|
46
|
+
}
|
|
47
|
+
function openIDB() {
|
|
48
|
+
return new Promise((resolve, reject) => {
|
|
49
|
+
const req = indexedDB.open(IDB_NAME, IDB_VERSION);
|
|
50
|
+
req.onupgradeneeded = (e) => {
|
|
51
|
+
const db = e.target.result;
|
|
52
|
+
if (!db.objectStoreNames.contains(IDB_STORE)) {
|
|
53
|
+
db.createObjectStore(IDB_STORE);
|
|
54
|
+
}
|
|
55
|
+
};
|
|
56
|
+
req.onsuccess = (e) => resolve(e.target.result);
|
|
57
|
+
req.onerror = () => reject(req.error);
|
|
58
|
+
req.onblocked = () => reject(new Error("IDB blocked by another tab"));
|
|
59
|
+
});
|
|
60
|
+
}
|
|
61
|
+
var _idb = null;
|
|
62
|
+
var getIDB = () => {
|
|
63
|
+
_idb ??= openIDB();
|
|
64
|
+
return _idb;
|
|
65
|
+
};
|
|
66
|
+
function idbWrap(req) {
|
|
67
|
+
return new Promise((res, rej) => {
|
|
68
|
+
req.onsuccess = () => res(req.result);
|
|
69
|
+
req.onerror = () => rej(req.error);
|
|
70
|
+
});
|
|
71
|
+
}
|
|
72
|
+
function createWebStorage() {
|
|
73
|
+
if (!isIDBAvailable()) return createMemoryStorage();
|
|
74
|
+
return {
|
|
75
|
+
async get(key) {
|
|
76
|
+
const db = await getIDB();
|
|
77
|
+
return idbWrap(
|
|
78
|
+
db.transaction(IDB_STORE, "readonly").objectStore(IDB_STORE).get(key)
|
|
79
|
+
);
|
|
80
|
+
},
|
|
81
|
+
async set(key, value) {
|
|
82
|
+
const db = await getIDB();
|
|
83
|
+
await idbWrap(
|
|
84
|
+
db.transaction(IDB_STORE, "readwrite").objectStore(IDB_STORE).put(value, key)
|
|
85
|
+
);
|
|
86
|
+
},
|
|
87
|
+
async delete(key) {
|
|
88
|
+
const db = await getIDB();
|
|
89
|
+
await idbWrap(
|
|
90
|
+
db.transaction(IDB_STORE, "readwrite").objectStore(IDB_STORE).delete(key)
|
|
91
|
+
);
|
|
92
|
+
},
|
|
93
|
+
async clearAll() {
|
|
94
|
+
const db = await getIDB();
|
|
95
|
+
await idbWrap(
|
|
96
|
+
db.transaction(IDB_STORE, "readwrite").objectStore(IDB_STORE).clear()
|
|
97
|
+
);
|
|
98
|
+
},
|
|
99
|
+
async keys() {
|
|
100
|
+
const db = await getIDB();
|
|
101
|
+
const result = await idbWrap(
|
|
102
|
+
db.transaction(IDB_STORE, "readonly").objectStore(IDB_STORE).getAllKeys()
|
|
103
|
+
);
|
|
104
|
+
return result.map(String);
|
|
105
|
+
}
|
|
106
|
+
};
|
|
107
|
+
}
|
|
108
|
+
var storage = reactNative.Platform.OS === "web" ? createWebStorage() : createNativeStorage();
|
|
109
|
+
function getStorage() {
|
|
110
|
+
return storage;
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
// src/services/observer.service.ts
|
|
114
|
+
var observers = /* @__PURE__ */ new Set();
|
|
115
|
+
function emit(event) {
|
|
116
|
+
if (observers.size === 0) return;
|
|
117
|
+
for (const fn of observers) {
|
|
118
|
+
try {
|
|
119
|
+
fn(event);
|
|
120
|
+
} catch {
|
|
121
|
+
}
|
|
122
|
+
}
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
// src/services/cache.service.ts
|
|
126
|
+
var CURRENT_CACHE_VERSION = 2;
|
|
127
|
+
function cacheKeyFor(queryKey) {
|
|
128
|
+
return `sq2:${JSON.stringify(queryKey)}`;
|
|
129
|
+
}
|
|
130
|
+
async function readCache(key, queryKey) {
|
|
131
|
+
try {
|
|
132
|
+
const storage2 = getStorage();
|
|
133
|
+
const raw = await storage2.get(key);
|
|
134
|
+
if (!raw) {
|
|
135
|
+
if (queryKey) emit({ type: "cache_miss", queryKey });
|
|
136
|
+
return null;
|
|
137
|
+
}
|
|
138
|
+
const entry = JSON.parse(raw);
|
|
139
|
+
if (entry.version !== CURRENT_CACHE_VERSION) {
|
|
140
|
+
void storage2.delete(key);
|
|
141
|
+
if (queryKey) emit({ type: "cache_miss", queryKey });
|
|
142
|
+
return null;
|
|
143
|
+
}
|
|
144
|
+
void storage2.set(
|
|
145
|
+
key,
|
|
146
|
+
JSON.stringify({ ...entry, lastAccessedAt: Date.now() })
|
|
147
|
+
);
|
|
148
|
+
if (queryKey) emit({ type: "cache_hit", queryKey, cachedAt: entry.cachedAt });
|
|
149
|
+
return entry;
|
|
150
|
+
} catch {
|
|
151
|
+
return null;
|
|
152
|
+
}
|
|
153
|
+
}
|
|
154
|
+
|
|
155
|
+
// src/services/queue.service.ts
|
|
156
|
+
var QUEUE_KEY = "sq2:mutation_queue";
|
|
157
|
+
async function loadQueue() {
|
|
158
|
+
try {
|
|
159
|
+
const raw = await getStorage().get(QUEUE_KEY);
|
|
160
|
+
if (!raw) return [];
|
|
161
|
+
return JSON.parse(raw);
|
|
162
|
+
} catch {
|
|
163
|
+
return [];
|
|
164
|
+
}
|
|
165
|
+
}
|
|
166
|
+
var getQueue = loadQueue;
|
|
167
|
+
var getQueueLength = async () => (await loadQueue()).length;
|
|
168
|
+
|
|
169
|
+
// src/services/requestLock.service.ts
|
|
170
|
+
var inFlight = /* @__PURE__ */ new Map();
|
|
171
|
+
var inFlightCount = () => inFlight.size;
|
|
172
|
+
var inFlightKeys = () => Array.from(inFlight.keys());
|
|
173
|
+
|
|
174
|
+
// src/utils/debug.ts
|
|
175
|
+
if (__DEV__) {
|
|
176
|
+
const debug = {
|
|
177
|
+
async inspectCache(queryKey) {
|
|
178
|
+
return readCache(cacheKeyFor(queryKey), queryKey);
|
|
179
|
+
},
|
|
180
|
+
async listCacheKeys() {
|
|
181
|
+
const keys = await getStorage().keys();
|
|
182
|
+
return keys.filter((k) => k.startsWith("sq2:"));
|
|
183
|
+
},
|
|
184
|
+
async clearCache() {
|
|
185
|
+
await getStorage().clearAll();
|
|
186
|
+
console.log("[SmartQuery] Cache cleared");
|
|
187
|
+
},
|
|
188
|
+
getQueue,
|
|
189
|
+
getQueueLength,
|
|
190
|
+
inFlightKeys,
|
|
191
|
+
inFlightCount,
|
|
192
|
+
async snapshot() {
|
|
193
|
+
const [keys, queue] = await Promise.all([
|
|
194
|
+
debug.listCacheKeys(),
|
|
195
|
+
debug.getQueue()
|
|
196
|
+
]);
|
|
197
|
+
console.group("[SmartQuery] Debug Snapshot");
|
|
198
|
+
console.log("Cache entries:", keys.length, keys);
|
|
199
|
+
console.log("Queued mutations:", queue.length, queue);
|
|
200
|
+
console.log("In-flight requests:", inFlightKeys());
|
|
201
|
+
console.groupEnd();
|
|
202
|
+
}
|
|
203
|
+
};
|
|
204
|
+
globalThis.smartQueryDebug = debug;
|
|
205
|
+
console.log("[SmartQuery] Debug tools ready \u2192 smartQueryDebug.snapshot()");
|
|
206
|
+
}
|
|
207
|
+
//# sourceMappingURL=debug.js.map
|
|
208
|
+
//# sourceMappingURL=debug.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../src/services/storage.adapter.ts","../../src/services/observer.service.ts","../../src/services/cache.service.ts","../../src/services/queue.service.ts","../../src/services/requestLock.service.ts","../../src/utils/debug.ts"],"names":["Platform","storage"],"mappings":";;;;;;;;;;AAqBA,SAAS,mBAAA,GAAoC;AAC3C,EAAA,MAAM,KAAA,uBAAY,GAAA,EAAoB;AACtC,EAAA,OAAO;AAAA,IACL,GAAA,EAAK,CAAC,GAAA,KAAQ,OAAA,CAAQ,QAAQ,KAAA,CAAM,GAAA,CAAI,GAAG,CAAC,CAAA;AAAA,IAC5C,GAAA,EAAK,CAAC,GAAA,EAAK,KAAA,KAAU;AAAE,MAAA,KAAA,CAAM,GAAA,CAAI,KAAK,KAAK,CAAA;AAAG,MAAA,OAAO,QAAQ,OAAA,EAAQ;AAAA,IAAG,CAAA;AAAA,IACxE,MAAA,EAAQ,CAAC,GAAA,KAAQ;AAAE,MAAA,KAAA,CAAM,OAAO,GAAG,CAAA;AAAG,MAAA,OAAO,QAAQ,OAAA,EAAQ;AAAA,IAAG,CAAA;AAAA,IAChE,UAAU,MAAM;AAAE,MAAA,KAAA,CAAM,KAAA,EAAM;AAAG,MAAA,OAAO,QAAQ,OAAA,EAAQ;AAAA,IAAG,CAAA;AAAA,IAC3D,IAAA,EAAM,MAAM,OAAA,CAAQ,OAAA,CAAQ,MAAM,IAAA,CAAK,KAAA,CAAM,IAAA,EAAM,CAAC;AAAA,GACtD;AACF;AAIA,SAAS,mBAAA,GAAoC;AAC3C,EAAA,MAAM,IAAA,GAAO,SAAA,CAAQ,mBAAmB,CAAA,CAAE,IAAA;AAC1C,EAAA,MAAM,OAAO,IAAI,IAAA,CAAK,EAAE,EAAA,EAAI,wBAAwB,CAAA;AACpD,EAAA,OAAO;AAAA,IACL,GAAA,EAAK,CAAC,GAAA,KAAQ,OAAA,CAAQ,QAAQ,IAAA,CAAK,SAAA,CAAU,GAAG,CAAA,IAAK,MAAS,CAAA;AAAA,IAC9D,GAAA,EAAK,CAAC,GAAA,EAAK,KAAA,KAAU,OAAA,CAAQ,OAAA,CAAQ,KAAK,IAAA,CAAK,GAAA,CAAI,GAAA,EAAK,KAAK,CAAC,CAAA;AAAA,IAC9D,MAAA,EAAQ,CAAC,GAAA,KAAQ,OAAA,CAAQ,QAAQ,KAAK,IAAA,CAAK,MAAA,CAAO,GAAG,CAAC,CAAA;AAAA,IACtD,UAAU,MAAM,OAAA,CAAQ,QAAQ,KAAK,IAAA,CAAK,UAAU,CAAA;AAAA,IACpD,MAAM,MAAM,OAAA,CAAQ,OAAA,CAAQ,IAAA,CAAK,YAAY;AAAA,GAC/C;AACF;AAIA,IAAM,QAAA,GAAW,cAAA;AACjB,IAAM,SAAA,GAAY,SAAA;AAClB,IAAM,WAAA,GAAc,CAAA;AAEpB,SAAS,cAAA,GAA0B;AACjC,EAAA,OAAO,OAAO,UAAA,KAAe,WAAA,IAC3B,OAAQ,WAAkD,SAAA,KAAc,WAAA;AAC5E;AAEA,SAAS,OAAA,GAAgC;AACvC,EAAA,OAAO,IAAI,OAAA,CAAQ,CAAC,OAAA,EAAS,MAAA,KAAW;AACtC,IAAA,MAAM,GAAA,GAAM,SAAA,CAAU,IAAA,CAAK,QAAA,EAAU,WAAW,CAAA;AAChD,IAAA,GAAA,CAAI,eAAA,GAAkB,CAAC,CAAA,KAAM;AAC3B,MAAA,MAAM,EAAA,GAAM,EAAE,MAAA,CAA4B,MAAA;AAC1C,MAAA,IAAI,CAAC,EAAA,CAAG,gBAAA,CAAiB,QAAA,CAAS,SAAS,CAAA,EAAG;AAC5C,QAAA,EAAA,CAAG,kBAAkB,SAAS,CAAA;AAAA,MAChC;AAAA,IACF,CAAA;AACA,IAAA,GAAA,CAAI,YAAY,CAAC,CAAA,KAAM,OAAA,CAAS,CAAA,CAAE,OAA4B,MAAM,CAAA;AACpE,IAAA,GAAA,CAAI,OAAA,GAAU,MAAM,MAAA,CAAO,GAAA,CAAI,KAAK,CAAA;AACpC,IAAA,GAAA,CAAI,YAAY,MAAM,MAAA,CAAO,IAAI,KAAA,CAAM,4BAA4B,CAAC,CAAA;AAAA,EACtE,CAAC,CAAA;AACH;AAEA,IAAI,IAAA,GAAoC,IAAA;AACxC,IAAM,SAAS,MAAM;AAAE,EAAA,IAAA,KAAS,OAAA,EAAQ;AAAG,EAAA,OAAO,IAAA;AAAM,CAAA;AAExD,SAAS,QAAW,GAAA,EAAgC;AAClD,EAAA,OAAO,IAAI,OAAA,CAAQ,CAAC,GAAA,EAAK,GAAA,KAAQ;AAC/B,IAAA,GAAA,CAAI,SAAA,GAAY,MAAM,GAAA,CAAI,GAAA,CAAI,MAAM,CAAA;AACpC,IAAA,GAAA,CAAI,OAAA,GAAU,MAAM,GAAA,CAAI,GAAA,CAAI,KAAK,CAAA;AAAA,EACnC,CAAC,CAAA;AACH;AAEA,SAAS,gBAAA,GAAiC;AAExC,EAAA,IAAI,CAAC,cAAA,EAAe,EAAG,OAAO,mBAAA,EAAoB;AAElD,EAAA,OAAO;AAAA,IACL,MAAM,IAAI,GAAA,EAAK;AACb,MAAA,MAAM,EAAA,GAAK,MAAM,MAAA,EAAO;AACxB,MAAA,OAAO,OAAA;AAAA,QACL,EAAA,CAAG,YAAY,SAAA,EAAW,UAAU,EAAE,WAAA,CAAY,SAAS,CAAA,CAAE,GAAA,CAAI,GAAG;AAAA,OACtE;AAAA,IACF,CAAA;AAAA,IACA,MAAM,GAAA,CAAI,GAAA,EAAK,KAAA,EAAO;AACpB,MAAA,MAAM,EAAA,GAAK,MAAM,MAAA,EAAO;AACxB,MAAA,MAAM,OAAA;AAAA,QACJ,EAAA,CAAG,WAAA,CAAY,SAAA,EAAW,WAAW,CAAA,CAAE,YAAY,SAAS,CAAA,CAAE,GAAA,CAAI,KAAA,EAAO,GAAG;AAAA,OAC9E;AAAA,IACF,CAAA;AAAA,IACA,MAAM,OAAO,GAAA,EAAK;AAChB,MAAA,MAAM,EAAA,GAAK,MAAM,MAAA,EAAO;AACxB,MAAA,MAAM,OAAA;AAAA,QACJ,EAAA,CAAG,YAAY,SAAA,EAAW,WAAW,EAAE,WAAA,CAAY,SAAS,CAAA,CAAE,MAAA,CAAO,GAAG;AAAA,OAC1E;AAAA,IACF,CAAA;AAAA,IACA,MAAM,QAAA,GAAW;AACf,MAAA,MAAM,EAAA,GAAK,MAAM,MAAA,EAAO;AACxB,MAAA,MAAM,OAAA;AAAA,QACJ,EAAA,CAAG,YAAY,SAAA,EAAW,WAAW,EAAE,WAAA,CAAY,SAAS,EAAE,KAAA;AAAM,OACtE;AAAA,IACF,CAAA;AAAA,IACA,MAAM,IAAA,GAAO;AACX,MAAA,MAAM,EAAA,GAAK,MAAM,MAAA,EAAO;AACxB,MAAA,MAAM,SAAS,MAAM,OAAA;AAAA,QACnB,EAAA,CAAG,YAAY,SAAA,EAAW,UAAU,EAAE,WAAA,CAAY,SAAS,EAAE,UAAA;AAAW,OAC1E;AACA,MAAA,OAAO,MAAA,CAAO,IAAI,MAAM,CAAA;AAAA,IAC1B;AAAA,GACF;AACF;AAIO,IAAM,UACXA,oBAAA,CAAS,EAAA,KAAO,KAAA,GAAQ,gBAAA,KAAqB,mBAAA,EAAoB;AAK5D,SAAS,UAAA,GAA2B;AACzC,EAAA,OAA2B,OAAA;AAC7B;;;ACtGA,IAAM,SAAA,uBAAgB,GAAA,EAAgB;AA8B/B,SAAS,KAAK,KAAA,EAAiC;AACpD,EAAA,IAAI,SAAA,CAAU,SAAS,CAAA,EAAG;AAC1B,EAAA,KAAA,MAAW,MAAM,SAAA,EAAW;AAC1B,IAAA,IAAI;AAAE,MAAA,EAAA,CAAG,KAAK,CAAA;AAAA,IAAG,CAAA,CAAA,MAAQ;AAAA,IAA8C;AAAA,EACzE;AACF;;;ACzCO,IAAM,qBAAA,GAAwB,CAAA;AAc9B,SAAS,YAAY,QAAA,EAAsC;AAChE,EAAA,OAAO,CAAA,IAAA,EAAO,IAAA,CAAK,SAAA,CAAU,QAAQ,CAAC,CAAA,CAAA;AACxC;AAIA,eAAsB,SAAA,CACpB,KACA,QAAA,EAC+B;AAC/B,EAAA,IAAI;AACF,IAAA,MAAMC,WAAU,UAAA,EAAW;AAC3B,IAAA,MAAM,GAAA,GAAM,MAAMA,QAAAA,CAAQ,GAAA,CAAI,GAAG,CAAA;AAEjC,IAAA,IAAI,CAAC,GAAA,EAAK;AACR,MAAA,IAAI,UAAU,IAAA,CAAK,EAAE,IAAA,EAAM,YAAA,EAAc,UAAU,CAAA;AACnD,MAAA,OAAO,IAAA;AAAA,IACT;AAEA,IAAA,MAAM,KAAA,GAAQ,IAAA,CAAK,KAAA,CAAM,GAAG,CAAA;AAE5B,IAAA,IAAI,KAAA,CAAM,YAAY,qBAAA,EAAuB;AAC3C,MAAA,KAAKA,QAAAA,CAAQ,OAAO,GAAG,CAAA;AACvB,MAAA,IAAI,UAAU,IAAA,CAAK,EAAE,IAAA,EAAM,YAAA,EAAc,UAAU,CAAA;AACnD,MAAA,OAAO,IAAA;AAAA,IACT;AAGA,IAAA,KAAKA,QAAAA,CAAQ,GAAA;AAAA,MACX,GAAA;AAAA,MACA,IAAA,CAAK,UAAU,EAAE,GAAG,OAAO,cAAA,EAAgB,IAAA,CAAK,GAAA,EAAI,EAAG;AAAA,KACzD;AAEA,IAAA,IAAI,QAAA,OAAe,EAAE,IAAA,EAAM,aAAa,QAAA,EAAU,QAAA,EAAU,KAAA,CAAM,QAAA,EAAU,CAAA;AAC5E,IAAA,OAAO,KAAA;AAAA,EACT,CAAA,CAAA,MAAQ;AACN,IAAA,OAAO,IAAA;AAAA,EACT;AACF;;;ACrDA,IAAM,SAAA,GAAY,oBAAA;AAyFlB,eAAe,SAAA,GAAuC;AACpD,EAAA,IAAI;AACF,IAAA,MAAM,GAAA,GAAM,MAAM,UAAA,EAAW,CAAE,IAAI,SAAS,CAAA;AAC5C,IAAA,IAAI,CAAC,GAAA,EAAK,OAAO,EAAC;AAClB,IAAA,OAAO,IAAA,CAAK,MAAM,GAAG,CAAA;AAAA,EACvB,CAAA,CAAA,MAAQ;AAAE,IAAA,OAAO,EAAC;AAAA,EAAG;AACvB;AAyHO,IAAM,QAAA,GAAW,SAAA;AACjB,IAAM,cAAA,GAAiB,YAAA,CAC3B,MAAM,SAAA,EAAU,EAAG,MAAA;;;ACzOtB,IAAM,QAAA,uBAAe,GAAA,EAA8B;AAW5C,IAAM,aAAA,GAAgB,MAAc,QAAA,CAAS,IAAA;AAC7C,IAAM,eAAe,MAAgB,KAAA,CAAM,IAAA,CAAK,QAAA,CAAS,MAAM,CAAA;;;ACCtE,IAAI,OAAA,EAAS;AACX,EAAA,MAAM,KAAA,GAAQ;AAAA,IACZ,MAAM,aAAa,QAAA,EAA8B;AAC/C,MAAA,OAAO,SAAA,CAAU,WAAA,CAAY,QAAQ,CAAA,EAAG,QAAQ,CAAA;AAAA,IAClD,CAAA;AAAA,IACA,MAAM,aAAA,GAAgB;AACpB,MAAA,MAAM,IAAA,GAAO,MAAM,UAAA,EAAW,CAAE,IAAA,EAAK;AACrC,MAAA,OAAO,KAAK,MAAA,CAAO,CAAC,MAAM,CAAA,CAAE,UAAA,CAAW,MAAM,CAAC,CAAA;AAAA,IAChD,CAAA;AAAA,IACA,MAAM,UAAA,GAAa;AACjB,MAAA,MAAM,UAAA,GAAa,QAAA,EAAS;AAC5B,MAAA,OAAA,CAAQ,IAAI,4BAA4B,CAAA;AAAA,IAC1C,CAAA;AAAA,IACA,QAAA;AAAA,IACA,cAAA;AAAA,IACA,YAAA;AAAA,IACA,aAAA;AAAA,IACA,MAAM,QAAA,GAAW;AACf,MAAA,MAAM,CAAC,IAAA,EAAM,KAAK,CAAA,GAAI,MAAM,QAAQ,GAAA,CAAI;AAAA,QACtC,MAAM,aAAA,EAAc;AAAA,QACpB,MAAM,QAAA;AAAS,OAChB,CAAA;AACD,MAAA,OAAA,CAAQ,MAAM,6BAA6B,CAAA;AAC3C,MAAA,OAAA,CAAQ,GAAA,CAAI,gBAAA,EAAkB,IAAA,CAAK,MAAA,EAAQ,IAAI,CAAA;AAC/C,MAAA,OAAA,CAAQ,GAAA,CAAI,mBAAA,EAAqB,KAAA,CAAM,MAAA,EAAQ,KAAK,CAAA;AACpD,MAAA,OAAA,CAAQ,GAAA,CAAI,qBAAA,EAAuB,YAAA,EAAc,CAAA;AACjD,MAAA,OAAA,CAAQ,QAAA,EAAS;AAAA,IACnB;AAAA,GACF;AAEA,EAAC,WAAuC,eAAA,GAAkB,KAAA;AAC1D,EAAA,OAAA,CAAQ,IAAI,kEAA6D,CAAA;AAC3E","file":"debug.js","sourcesContent":["/**\n * src/services/storage.adapter.ts\n *\n * Platform-aware storage with SSR safety.\n *\n * iOS / Android → react-native-mmkv\n * Web (browser) → IndexedDB\n * SSR / Node.js → in-memory Map (safe no-op, hydrates on client)\n *\n * The SSR guard prevents \"indexedDB is not defined\" crashes in\n * Next.js / Expo Router SSR builds. Data written during SSR is discarded;\n * the client hydrates from real IDB on first mount.\n */\n\nimport { Platform } from \"react-native\";\nimport type { AsyncStorage } from \"../types\";\n\nexport type { AsyncStorage };\n\n// ─── SSR / Node.js in-memory adapter ─────────────────────────────────────────\n\nfunction createMemoryStorage(): AsyncStorage {\n const store = new Map<string, string>();\n return {\n get: (key) => Promise.resolve(store.get(key)),\n set: (key, value) => { store.set(key, value); return Promise.resolve(); },\n delete: (key) => { store.delete(key); return Promise.resolve(); },\n clearAll: () => { store.clear(); return Promise.resolve(); },\n keys: () => Promise.resolve(Array.from(store.keys())),\n };\n}\n\n// ─── Native adapter (iOS + Android) ──────────────────────────────────────────\n\nfunction createNativeStorage(): AsyncStorage {\n const MMKV = require(\"react-native-mmkv\").MMKV;\n const mmkv = new MMKV({ id: \"react-smart-query-v2\" });\n return {\n get: (key) => Promise.resolve(mmkv.getString(key) ?? undefined),\n set: (key, value) => Promise.resolve(void mmkv.set(key, value)),\n delete: (key) => Promise.resolve(void mmkv.delete(key)),\n clearAll: () => Promise.resolve(void mmkv.clearAll()),\n keys: () => Promise.resolve(mmkv.getAllKeys()),\n };\n}\n\n// ─── Web adapter (IndexedDB) with SSR guard ───────────────────────────────────\n\nconst IDB_NAME = \"SmartQueryV2\";\nconst IDB_STORE = \"entries\";\nconst IDB_VERSION = 1;\n\nfunction isIDBAvailable(): boolean {\n return typeof globalThis !== \"undefined\" &&\n typeof (globalThis as unknown as Record<string, unknown>).indexedDB !== \"undefined\";\n}\n\nfunction openIDB(): Promise<IDBDatabase> {\n return new Promise((resolve, reject) => {\n const req = indexedDB.open(IDB_NAME, IDB_VERSION);\n req.onupgradeneeded = (e) => {\n const db = (e.target as IDBOpenDBRequest).result;\n if (!db.objectStoreNames.contains(IDB_STORE)) {\n db.createObjectStore(IDB_STORE);\n }\n };\n req.onsuccess = (e) => resolve((e.target as IDBOpenDBRequest).result);\n req.onerror = () => reject(req.error);\n req.onblocked = () => reject(new Error(\"IDB blocked by another tab\"));\n });\n}\n\nlet _idb: Promise<IDBDatabase> | null = null;\nconst getIDB = () => { _idb ??= openIDB(); return _idb; };\n\nfunction idbWrap<T>(req: IDBRequest<T>): Promise<T> {\n return new Promise((res, rej) => {\n req.onsuccess = () => res(req.result);\n req.onerror = () => rej(req.error);\n });\n}\n\nfunction createWebStorage(): AsyncStorage {\n // SSR guard — return memory storage when IDB is unavailable\n if (!isIDBAvailable()) return createMemoryStorage();\n\n return {\n async get(key) {\n const db = await getIDB();\n return idbWrap<string | undefined>(\n db.transaction(IDB_STORE, \"readonly\").objectStore(IDB_STORE).get(key)\n );\n },\n async set(key, value) {\n const db = await getIDB();\n await idbWrap(\n db.transaction(IDB_STORE, \"readwrite\").objectStore(IDB_STORE).put(value, key)\n );\n },\n async delete(key) {\n const db = await getIDB();\n await idbWrap(\n db.transaction(IDB_STORE, \"readwrite\").objectStore(IDB_STORE).delete(key)\n );\n },\n async clearAll() {\n const db = await getIDB();\n await idbWrap(\n db.transaction(IDB_STORE, \"readwrite\").objectStore(IDB_STORE).clear()\n );\n },\n async keys() {\n const db = await getIDB();\n const result = await idbWrap<IDBValidKey[]>(\n db.transaction(IDB_STORE, \"readonly\").objectStore(IDB_STORE).getAllKeys()\n );\n return result.map(String);\n },\n };\n}\n\n// ─── Singleton ────────────────────────────────────────────────────────────────\n\nexport const storage: AsyncStorage =\n Platform.OS === \"web\" ? createWebStorage() : createNativeStorage();\n\n/** Exposed for test utilities to inject a custom adapter */\nlet _overrideStorage: AsyncStorage | null = null;\n\nexport function getStorage(): AsyncStorage {\n return _overrideStorage ?? storage;\n}\n\n/** @internal — used by SmartQueryTestProvider only */\nexport function _setStorageOverride(s: AsyncStorage | null): void {\n _overrideStorage = s;\n}\n","/**\n * src/services/observer.service.ts\n *\n * Pluggable observability — emit structured events to any analytics backend.\n *\n * Zero coupling: the library emits; you decide where it goes.\n * Attach observers at app startup; they receive every internal event.\n *\n * @example\n * // Sentry breadcrumbs\n * addObserver((event) => {\n * if (event.type === \"fetch_error\") {\n * Sentry.addBreadcrumb({ message: event.type, data: event });\n * }\n * });\n *\n * @example\n * // Datadog / Mixpanel\n * addObserver((event) => {\n * analytics.track(event.type, event);\n * });\n *\n * @example\n * // Simple console logger in dev\n * if (__DEV__) addObserver(console.log);\n */\n\nimport type { ObservabilityEvent, ObserverFn } from \"../types\";\n\nconst observers = new Set<ObserverFn>();\n\n/**\n * Register an observer. Returns an unsubscribe function.\n *\n * @example\n * const unsub = addObserver(myLogger);\n * // Later:\n * unsub();\n */\nexport function addObserver(fn: ObserverFn): () => void {\n observers.add(fn);\n return () => observers.delete(fn);\n}\n\n/** Remove a specific observer */\nexport function removeObserver(fn: ObserverFn): void {\n observers.delete(fn);\n}\n\n/** Remove all observers */\nexport function clearObservers(): void {\n observers.clear();\n}\n\n/**\n * @internal — emit an event to all registered observers.\n * Called by cache.service, queue.service, and useSmartQuery.\n * Never throws — observer errors are swallowed to protect the data path.\n */\nexport function emit(event: ObservabilityEvent): void {\n if (observers.size === 0) return; // fast path — no observers registered\n for (const fn of observers) {\n try { fn(event); } catch { /* observer error must not crash the app */ }\n }\n}\n","/**\n * src/services/cache.service.ts\n *\n * Versioned, LRU-aware cache layer.\n *\n * Features:\n * • Schema versioning — auto-invalidates stale entries on version bump\n * • lastAccessedAt tracking — enables LRU eviction\n * • Configurable max entries per prefix — prevents unbounded growth\n * • Partial hydration — read a subset of a NormalizedList by ids\n * • Observability events on hit / miss / write / quota exceeded\n */\n\nimport { getStorage } from \"./storage.adapter\";\nimport { emit } from \"./observer.service\";\nimport type { CacheEntry, NormalizedList, AnyItem } from \"../types\";\n\n// ─── Versioning ───────────────────────────────────────────────────────────────\n\n/**\n * Bump when CacheEntry shape or NormalizedList schema changes in a\n * breaking way. Any stored entry with a lower version is silently discarded.\n */\nexport const CURRENT_CACHE_VERSION = 2;\n\n// ─── LRU config ───────────────────────────────────────────────────────────────\n\nconst DEFAULT_MAX_ENTRIES = 200;\nlet _maxEntries = DEFAULT_MAX_ENTRIES;\n\n/** Override the global max entries limit (call before any reads/writes) */\nexport function setMaxCacheEntries(n: number): void {\n _maxEntries = n;\n}\n\n// ─── Key derivation ───────────────────────────────────────────────────────────\n\nexport function cacheKeyFor(queryKey: readonly unknown[]): string {\n return `sq2:${JSON.stringify(queryKey)}`;\n}\n\n// ─── Read ─────────────────────────────────────────────────────────────────────\n\nexport async function readCache<T>(\n key: string,\n queryKey?: readonly unknown[]\n): Promise<CacheEntry<T> | null> {\n try {\n const storage = getStorage();\n const raw = await storage.get(key);\n\n if (!raw) {\n if (queryKey) emit({ type: \"cache_miss\", queryKey });\n return null;\n }\n\n const entry = JSON.parse(raw) as CacheEntry<T>;\n\n if (entry.version !== CURRENT_CACHE_VERSION) {\n void storage.delete(key);\n if (queryKey) emit({ type: \"cache_miss\", queryKey });\n return null;\n }\n\n // Touch lastAccessedAt for LRU — fire-and-forget, non-blocking\n void storage.set(\n key,\n JSON.stringify({ ...entry, lastAccessedAt: Date.now() })\n );\n\n if (queryKey) emit({ type: \"cache_hit\", queryKey, cachedAt: entry.cachedAt });\n return entry;\n } catch {\n return null;\n }\n}\n\n// ─── Write ────────────────────────────────────────────────────────────────────\n\nexport async function writeCache<T>(\n key: string,\n data: T,\n queryKey?: readonly unknown[]\n): Promise<void> {\n try {\n const storage = getStorage();\n const now = Date.now();\n const entry: CacheEntry<T> = {\n version: CURRENT_CACHE_VERSION,\n data,\n cachedAt: now,\n lastAccessedAt: now,\n };\n\n const serialized = JSON.stringify(entry);\n\n try {\n await storage.set(key, serialized);\n if (queryKey) {\n emit({ type: \"cache_write\", queryKey, dataSize: serialized.length });\n }\n } catch (quotaErr) {\n emit({ type: \"storage_quota_exceeded\", key });\n // Attempt LRU eviction then retry once\n await evictLRUEntries();\n await storage.set(key, serialized);\n }\n\n // Async LRU check — doesn't block the write\n void checkAndEvict();\n } catch {\n // Fail silently — a cache write failure must never crash the app\n }\n}\n\n// ─── Delete ───────────────────────────────────────────────────────────────────\n\nexport async function deleteCache(key: string): Promise<void> {\n try {\n await getStorage().delete(key);\n } catch {}\n}\n\n// ─── LRU eviction ─────────────────────────────────────────────────────────────\n\ninterface LRUMeta {\n key: string;\n lastAccessedAt: number;\n}\n\nasync function checkAndEvict(): Promise<void> {\n const storage = getStorage();\n const allKeys = await storage.keys();\n const sqKeys = allKeys.filter((k) => k.startsWith(\"sq2:\"));\n\n if (sqKeys.length <= _maxEntries) return;\n\n await evictLRUEntries(sqKeys);\n}\n\nasync function evictLRUEntries(sqKeys?: string[]): Promise<void> {\n const storage = getStorage();\n const keys = sqKeys ?? (await storage.keys()).filter((k) => k.startsWith(\"sq2:\"));\n\n // Read lastAccessedAt for each entry — lightweight parse\n const metas: LRUMeta[] = [];\n await Promise.all(\n keys.map(async (key) => {\n try {\n const raw = await storage.get(key);\n if (!raw) return;\n const parsed = JSON.parse(raw) as Partial<CacheEntry<unknown>>;\n metas.push({ key, lastAccessedAt: parsed.lastAccessedAt ?? 0 });\n } catch {}\n })\n );\n\n // Sort oldest-first and evict the bottom 20%\n metas.sort((a, b) => a.lastAccessedAt - b.lastAccessedAt);\n const evictCount = Math.max(1, Math.floor(metas.length * 0.2));\n const toEvict = metas.slice(0, evictCount);\n\n await Promise.all(toEvict.map(({ key }) => storage.delete(key)));\n}\n\n// ─── Partial hydration ────────────────────────────────────────────────────────\n\n/**\n * Read a subset of a NormalizedList cache entry by item ids.\n *\n * Use for pagination, lazy loading, or detail views that only need\n * a handful of items from a large cached list.\n *\n * @returns null if the cache entry doesn't exist.\n * Empty array if none of the requested ids are cached.\n *\n * @example\n * const items = await getPartialCache<Expense>(\n * cacheKeyFor([\"expenses\", tripId]),\n * [\"exp_1\", \"exp_2\"]\n * );\n */\nexport async function getPartialCache<T extends AnyItem>(\n key: string,\n ids: string[]\n): Promise<T[] | null> {\n const entry = await readCache<NormalizedList<T>>(key);\n if (!entry) return null;\n\n const { byId } = entry.data;\n const result: T[] = [];\n for (const id of ids) {\n if (id in byId) result.push(byId[id]);\n }\n return result;\n}\n\n// ─── TTL check ────────────────────────────────────────────────────────────────\n\nexport function isCacheStale(entry: CacheEntry<unknown>, ttlMs: number): boolean {\n return Date.now() - entry.cachedAt > ttlMs;\n}\n","/**\n * src/services/queue.service.ts\n *\n * Offline Mutation Queue — persist-first, retry-on-reconnect, with coalescing.\n *\n * Key design decisions:\n * • Sequential FIFO processing preserves causal ordering\n * • Coalescing: two mutations with the same entityKey are merged before send\n * (prevents stale optimistic updates from racing each other)\n * • Exponential backoff with full jitter — no thundering herd on reconnect\n * • Observability events on enqueue / success / failure / drain\n * • clearQueue() on logout prevents cross-user mutation leakage\n */\n\nimport { getStorage } from \"./storage.adapter\";\nimport { emit } from \"./observer.service\";\nimport type { QueuedMutation, MutationType } from \"../types\";\n\nexport type { QueuedMutation, MutationType };\n\n// ─── Constants ────────────────────────────────────────────────────────────────\n\nconst QUEUE_KEY = \"sq2:mutation_queue\";\nconst DEFAULT_MAX_RETRIES = 5;\nconst BACKOFF_BASE_MS = 1_000;\nconst BACKOFF_MAX_MS = 120_000;\n\n// ─── Executor registry ────────────────────────────────────────────────────────\n\ntype Executor<P = unknown> = (mutation: QueuedMutation<P>) => Promise<void>;\nconst executors = new Map<string, Executor>();\n\n/**\n * Register an executor for a mutation type.\n * Must be called at app startup before mutations are enqueued.\n *\n * @example\n * registerExecutor(\"ADD_ITEM\", async (m) => {\n * await api.post(\"/expenses\", m.payload);\n * });\n */\nexport function registerExecutor<TPayload>(\n type: string,\n fn: Executor<TPayload>\n): void {\n executors.set(type, fn as Executor);\n}\n\n// ─── Queue coalescing ─────────────────────────────────────────────────────────\n\n/**\n * Coalesce mutations with the same entityKey.\n *\n * Rules (applied in order for each entityKey group):\n * • REMOVE_ITEM after any other mutation → keep only REMOVE_ITEM\n * • Multiple UPDATE_ITEM → keep only the latest (highest enqueuedAt)\n * • Multiple ADD_ITEM → keep only the latest (shouldn't normally happen)\n * • ADD_ITEM then UPDATE_ITEM → merge into a single ADD_ITEM with latest payload\n *\n * Mutations without an entityKey are never coalesced.\n */\nfunction coalesceQueue(queue: QueuedMutation[]): QueuedMutation[] {\n // Separate coalesable (have entityKey) from non-coalesable\n const byEntityKey = new Map<string, QueuedMutation[]>();\n const standalone: QueuedMutation[] = [];\n\n for (const m of queue) {\n if (!m.entityKey) { standalone.push(m); continue; }\n const group = byEntityKey.get(m.entityKey) ?? [];\n group.push(m);\n byEntityKey.set(m.entityKey, group);\n }\n\n const coalesced: QueuedMutation[] = [];\n\n for (const group of byEntityKey.values()) {\n // Sort by enqueuedAt ascending within each group\n group.sort((a, b) => a.enqueuedAt - b.enqueuedAt);\n\n const hasRemove = group.some((m) => m.type === \"REMOVE_ITEM\");\n if (hasRemove) {\n // Keep only the last REMOVE_ITEM — all preceding mutations are superseded\n const removeOp = [...group].reverse().find((m) => m.type === \"REMOVE_ITEM\")!;\n coalesced.push(removeOp);\n continue;\n }\n\n const addOp = group.find((m) => m.type === \"ADD_ITEM\");\n const updateOps = group.filter((m) => m.type === \"UPDATE_ITEM\");\n\n if (addOp && updateOps.length > 0) {\n // ADD + UPDATE(s) → single ADD with the latest payload\n const latestUpdate = updateOps[updateOps.length - 1];\n coalesced.push({ ...addOp, payload: latestUpdate.payload });\n } else if (addOp) {\n coalesced.push(addOp);\n } else if (updateOps.length > 0) {\n // Multiple UPDATEs → keep latest\n coalesced.push(updateOps[updateOps.length - 1]);\n } else {\n // CUSTOM or mixed — keep all\n coalesced.push(...group);\n }\n }\n\n // Restore original ordering by enqueuedAt\n return [...standalone, ...coalesced].sort((a, b) => a.enqueuedAt - b.enqueuedAt);\n}\n\n// ─── Persistence ──────────────────────────────────────────────────────────────\n\nasync function loadQueue(): Promise<QueuedMutation[]> {\n try {\n const raw = await getStorage().get(QUEUE_KEY);\n if (!raw) return [];\n return JSON.parse(raw) as QueuedMutation[];\n } catch { return []; }\n}\n\nasync function saveQueue(queue: QueuedMutation[]): Promise<void> {\n try {\n await getStorage().set(QUEUE_KEY, JSON.stringify(queue));\n } catch {}\n}\n\n// ─── Backoff ──────────────────────────────────────────────────────────────────\n\nfunction backoffMs(retryCount: number): number {\n const exp = Math.min(BACKOFF_BASE_MS * 2 ** retryCount, BACKOFF_MAX_MS);\n return Math.random() * exp; // full jitter\n}\n\n// ─── Processing ───────────────────────────────────────────────────────────────\n\nlet isProcessing = false;\n\n/**\n * Process all pending mutations in FIFO order.\n * Coalesces before sending to minimize network calls.\n * Safe to call concurrently — guarded by isProcessing flag.\n */\nexport async function processQueue(): Promise<void> {\n if (isProcessing) return;\n isProcessing = true;\n\n try {\n const raw = await loadQueue();\n if (raw.length === 0) return;\n\n const queue = coalesceQueue(raw);\n const now = Date.now();\n const remaining: QueuedMutation[] = [];\n\n for (const mutation of queue) {\n if (mutation.nextRetryAt > now) { remaining.push(mutation); continue; }\n\n const executor = executors.get(mutation.type);\n if (!executor) {\n if (__DEV__) {\n console.warn(`[SmartQuery] No executor for \"${mutation.type}\"`);\n }\n remaining.push(mutation);\n continue;\n }\n\n try {\n await executor(mutation);\n emit({ type: \"queue_success\", mutationId: mutation.id });\n } catch (err) {\n const nextRetry = mutation.retryCount + 1;\n emit({ type: \"queue_failure\", mutationId: mutation.id, retryCount: nextRetry });\n\n if (nextRetry >= mutation.maxRetries) {\n if (__DEV__) {\n console.error(\n `[SmartQuery] Mutation ${mutation.id} dropped after ${mutation.maxRetries} retries`,\n err\n );\n }\n } else {\n remaining.push({\n ...mutation,\n retryCount: nextRetry,\n nextRetryAt: now + backoffMs(nextRetry),\n });\n }\n }\n }\n\n await saveQueue(remaining);\n if (remaining.length === 0) emit({ type: \"queue_drained\" });\n } finally {\n isProcessing = false;\n }\n}\n\n// ─── Public API ───────────────────────────────────────────────────────────────\n\n/**\n * Add a mutation to the persistent queue.\n *\n * @param entityKey Optional logical key for coalescing (e.g. \"expense:exp_123\").\n * Mutations with the same entityKey are merged before sending.\n */\nexport async function enqueueMutation<TPayload>(options: {\n type: MutationType | string;\n queryKey: readonly unknown[];\n payload: TPayload;\n entityKey?: string;\n maxRetries?: number;\n}): Promise<void> {\n const queue = await loadQueue();\n const mutation: QueuedMutation<TPayload> = {\n id: `mut_${Date.now()}_${Math.random().toString(36).slice(2, 8)}`,\n type: options.type as MutationType,\n entityKey: options.entityKey,\n queryKey: options.queryKey,\n payload: options.payload,\n enqueuedAt: Date.now(),\n retryCount: 0,\n maxRetries: options.maxRetries ?? DEFAULT_MAX_RETRIES,\n nextRetryAt: 0,\n };\n queue.push(mutation as QueuedMutation);\n await saveQueue(queue);\n emit({ type: \"queue_enqueue\", mutationId: mutation.id, mutationType: mutation.type });\n}\n\n/** Process queue on app startup */\nexport async function initQueue(): Promise<void> {\n await processQueue();\n}\n\n/** Clear all pending mutations — call on logout */\nexport async function clearQueue(): Promise<void> {\n await saveQueue([]);\n}\n\nexport const getQueue = loadQueue;\nexport const getQueueLength = async (): Promise<number> =>\n (await loadQueue()).length;\n","/**\n * src/services/requestLock.service.ts\n *\n * In-flight request deduplication.\n * Concurrent calls with the same key share one Promise — only one fetch fires.\n */\n\nconst inFlight = new Map<string, Promise<unknown>>();\n\nexport function fetchWithLock<T>(key: string, fn: () => Promise<T>): Promise<T> {\n const existing = inFlight.get(key);\n if (existing) return existing as Promise<T>;\n\n const promise = fn().finally(() => inFlight.delete(key));\n inFlight.set(key, promise);\n return promise;\n}\n\nexport const inFlightCount = (): number => inFlight.size;\nexport const inFlightKeys = (): string[] => Array.from(inFlight.keys());\n","/**\n * src/utils/debug.ts\n *\n * Developer debug tools — zero production overhead.\n * Attach to globalThis only when __DEV__ is true.\n *\n * Import once as a side effect:\n * import \"react-smart-query/debug\"; // in App.tsx\n *\n * Then use in debugger / console:\n * await smartQueryDebug.snapshot()\n * await smartQueryDebug.inspectCache([\"expenses\", \"trip_1\"])\n * smartQueryDebug.inFlightKeys()\n */\n\nimport { cacheKeyFor, readCache } from \"../services/cache.service\";\nimport { getStorage } from \"../services/storage.adapter\";\nimport { getQueue, getQueueLength } from \"../services/queue.service\";\nimport { inFlightCount, inFlightKeys } from \"../services/requestLock.service\";\n\nif (__DEV__) {\n const debug = {\n async inspectCache(queryKey: readonly unknown[]) {\n return readCache(cacheKeyFor(queryKey), queryKey);\n },\n async listCacheKeys() {\n const keys = await getStorage().keys();\n return keys.filter((k) => k.startsWith(\"sq2:\"));\n },\n async clearCache() {\n await getStorage().clearAll();\n console.log(\"[SmartQuery] Cache cleared\");\n },\n getQueue,\n getQueueLength,\n inFlightKeys,\n inFlightCount,\n async snapshot() {\n const [keys, queue] = await Promise.all([\n debug.listCacheKeys(),\n debug.getQueue(),\n ]);\n console.group(\"[SmartQuery] Debug Snapshot\");\n console.log(\"Cache entries:\", keys.length, keys);\n console.log(\"Queued mutations:\", queue.length, queue);\n console.log(\"In-flight requests:\", inFlightKeys());\n console.groupEnd();\n },\n };\n\n (globalThis as Record<string, unknown>).smartQueryDebug = debug;\n console.log(\"[SmartQuery] Debug tools ready → smartQueryDebug.snapshot()\");\n}\n\nexport {};\n"]}
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
import { init_requestLock_service, inFlightCount, inFlightKeys, getQueueLength, getQueue } from '../chunk-KLJQATIV.mjs';
|
|
2
|
+
import { readCache, cacheKeyFor } from '../chunk-KSLDOL27.mjs';
|
|
3
|
+
import { getStorage } from '../chunk-QRCVY7UR.mjs';
|
|
4
|
+
|
|
5
|
+
// src/utils/debug.ts
|
|
6
|
+
init_requestLock_service();
|
|
7
|
+
if (__DEV__) {
|
|
8
|
+
const debug = {
|
|
9
|
+
async inspectCache(queryKey) {
|
|
10
|
+
return readCache(cacheKeyFor(queryKey), queryKey);
|
|
11
|
+
},
|
|
12
|
+
async listCacheKeys() {
|
|
13
|
+
const keys = await getStorage().keys();
|
|
14
|
+
return keys.filter((k) => k.startsWith("sq2:"));
|
|
15
|
+
},
|
|
16
|
+
async clearCache() {
|
|
17
|
+
await getStorage().clearAll();
|
|
18
|
+
console.log("[SmartQuery] Cache cleared");
|
|
19
|
+
},
|
|
20
|
+
getQueue,
|
|
21
|
+
getQueueLength,
|
|
22
|
+
inFlightKeys,
|
|
23
|
+
inFlightCount,
|
|
24
|
+
async snapshot() {
|
|
25
|
+
const [keys, queue] = await Promise.all([
|
|
26
|
+
debug.listCacheKeys(),
|
|
27
|
+
debug.getQueue()
|
|
28
|
+
]);
|
|
29
|
+
console.group("[SmartQuery] Debug Snapshot");
|
|
30
|
+
console.log("Cache entries:", keys.length, keys);
|
|
31
|
+
console.log("Queued mutations:", queue.length, queue);
|
|
32
|
+
console.log("In-flight requests:", inFlightKeys());
|
|
33
|
+
console.groupEnd();
|
|
34
|
+
}
|
|
35
|
+
};
|
|
36
|
+
globalThis.smartQueryDebug = debug;
|
|
37
|
+
console.log("[SmartQuery] Debug tools ready \u2192 smartQueryDebug.snapshot()");
|
|
38
|
+
}
|
|
39
|
+
//# sourceMappingURL=debug.mjs.map
|
|
40
|
+
//# sourceMappingURL=debug.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../src/utils/debug.ts"],"names":[],"mappings":";;;;;AAkBA,wBAAA,EAAA;AAEA,IAAI,OAAA,EAAS;AACX,EAAA,MAAM,KAAA,GAAQ;AAAA,IACZ,MAAM,aAAa,QAAA,EAA8B;AAC/C,MAAA,OAAO,SAAA,CAAU,WAAA,CAAY,QAAQ,CAAA,EAAG,QAAQ,CAAA;AAAA,IAClD,CAAA;AAAA,IACA,MAAM,aAAA,GAAgB;AACpB,MAAA,MAAM,IAAA,GAAO,MAAM,UAAA,EAAW,CAAE,IAAA,EAAK;AACrC,MAAA,OAAO,KAAK,MAAA,CAAO,CAAC,MAAM,CAAA,CAAE,UAAA,CAAW,MAAM,CAAC,CAAA;AAAA,IAChD,CAAA;AAAA,IACA,MAAM,UAAA,GAAa;AACjB,MAAA,MAAM,UAAA,GAAa,QAAA,EAAS;AAC5B,MAAA,OAAA,CAAQ,IAAI,4BAA4B,CAAA;AAAA,IAC1C,CAAA;AAAA,IACA,QAAA;AAAA,IACA,cAAA;AAAA,IACA,YAAA;AAAA,IACA,aAAA;AAAA,IACA,MAAM,QAAA,GAAW;AACf,MAAA,MAAM,CAAC,IAAA,EAAM,KAAK,CAAA,GAAI,MAAM,QAAQ,GAAA,CAAI;AAAA,QACtC,MAAM,aAAA,EAAc;AAAA,QACpB,MAAM,QAAA;AAAS,OAChB,CAAA;AACD,MAAA,OAAA,CAAQ,MAAM,6BAA6B,CAAA;AAC3C,MAAA,OAAA,CAAQ,GAAA,CAAI,gBAAA,EAAkB,IAAA,CAAK,MAAA,EAAQ,IAAI,CAAA;AAC/C,MAAA,OAAA,CAAQ,GAAA,CAAI,mBAAA,EAAqB,KAAA,CAAM,MAAA,EAAQ,KAAK,CAAA;AACpD,MAAA,OAAA,CAAQ,GAAA,CAAI,qBAAA,EAAuB,YAAA,EAAc,CAAA;AACjD,MAAA,OAAA,CAAQ,QAAA,EAAS;AAAA,IACnB;AAAA,GACF;AAEA,EAAC,WAAuC,eAAA,GAAkB,KAAA;AAC1D,EAAA,OAAA,CAAQ,IAAI,kEAA6D,CAAA;AAC3E","file":"debug.mjs","sourcesContent":["/**\n * src/utils/debug.ts\n *\n * Developer debug tools — zero production overhead.\n * Attach to globalThis only when __DEV__ is true.\n *\n * Import once as a side effect:\n * import \"react-smart-query/debug\"; // in App.tsx\n *\n * Then use in debugger / console:\n * await smartQueryDebug.snapshot()\n * await smartQueryDebug.inspectCache([\"expenses\", \"trip_1\"])\n * smartQueryDebug.inFlightKeys()\n */\n\nimport { cacheKeyFor, readCache } from \"../services/cache.service\";\nimport { getStorage } from \"../services/storage.adapter\";\nimport { getQueue, getQueueLength } from \"../services/queue.service\";\nimport { inFlightCount, inFlightKeys } from \"../services/requestLock.service\";\n\nif (__DEV__) {\n const debug = {\n async inspectCache(queryKey: readonly unknown[]) {\n return readCache(cacheKeyFor(queryKey), queryKey);\n },\n async listCacheKeys() {\n const keys = await getStorage().keys();\n return keys.filter((k) => k.startsWith(\"sq2:\"));\n },\n async clearCache() {\n await getStorage().clearAll();\n console.log(\"[SmartQuery] Cache cleared\");\n },\n getQueue,\n getQueueLength,\n inFlightKeys,\n inFlightCount,\n async snapshot() {\n const [keys, queue] = await Promise.all([\n debug.listCacheKeys(),\n debug.getQueue(),\n ]);\n console.group(\"[SmartQuery] Debug Snapshot\");\n console.log(\"Cache entries:\", keys.length, keys);\n console.log(\"Queued mutations:\", queue.length, queue);\n console.log(\"In-flight requests:\", inFlightKeys());\n console.groupEnd();\n },\n };\n\n (globalThis as Record<string, unknown>).smartQueryDebug = debug;\n console.log(\"[SmartQuery] Debug tools ready → smartQueryDebug.snapshot()\");\n}\n\nexport {};\n"]}
|
|
@@ -0,0 +1,149 @@
|
|
|
1
|
+
# React Smart Query Documentation
|
|
2
|
+
|
|
3
|
+
Welcome to the detailed documentation for `react-smart-query`. This library provides a robust, offline-first, normalized data orchestration layer tailored for React Native and Web applications, built on top of TanStack Query.
|
|
4
|
+
|
|
5
|
+
## Table of Contents
|
|
6
|
+
|
|
7
|
+
1. [Core Concepts](#core-concepts)
|
|
8
|
+
2. [Hooks API](#hooks-api)
|
|
9
|
+
- [`useSmartQuery`](#usesmartquery)
|
|
10
|
+
- [`useInfiniteSmartQuery`](#useinfinitesmartquery)
|
|
11
|
+
- [`useSmartMutation`](#usesmartmutation)
|
|
12
|
+
- [`useSmartQuerySelector`](#usesmartqueryselector)
|
|
13
|
+
3. [Factory API](#factory-api)
|
|
14
|
+
- [`createTypedQuery`](#createtypedquery)
|
|
15
|
+
4. [Advanced Features](#advanced-features)
|
|
16
|
+
- [Offline Queue](#offline-queue)
|
|
17
|
+
- [Normalization & Cache](#normalization--cache)
|
|
18
|
+
- [Debugging](#debugging)
|
|
19
|
+
|
|
20
|
+
---
|
|
21
|
+
|
|
22
|
+
## Core Concepts
|
|
23
|
+
|
|
24
|
+
### Normalization
|
|
25
|
+
Instead of keeping duplicate instances of data items in memory across different query responses (especially in paginated lists), `react-smart-query` stores data in a normalized cache (`byId` map and `allIds` array). This ensures that updates to an item in one place immediately reflect everywhere that item is rendered.
|
|
26
|
+
|
|
27
|
+
### Offline-First Architecture
|
|
28
|
+
Mutations made while the device is offline are pushed to a persistent queue. When the device regains connectivity, the queue is processed automatically, ensuring no data loss.
|
|
29
|
+
|
|
30
|
+
### Deterministic Sorting
|
|
31
|
+
For infinite lists, `react-smart-query` maintains a strictly ordered aggregate list (`allIds`). When new items are added, they are inserted using binary search (O(log n)), guaranteeing perfectly sorted lists without "flickering".
|
|
32
|
+
|
|
33
|
+
---
|
|
34
|
+
|
|
35
|
+
## Hooks API
|
|
36
|
+
|
|
37
|
+
### `useSmartQuery`
|
|
38
|
+
A drop-in enhancement for TanStack's `useQuery`. It seamlessly reads from and writes to the normalized store.
|
|
39
|
+
|
|
40
|
+
**Example Usage:**
|
|
41
|
+
```tsx
|
|
42
|
+
import { useSmartQuery } from 'react-smart-query';
|
|
43
|
+
|
|
44
|
+
const { data, isLoading, error } = useSmartQuery({
|
|
45
|
+
queryKey: ['userProfile', userId],
|
|
46
|
+
queryFn: () => fetchUserProfile(userId),
|
|
47
|
+
});
|
|
48
|
+
```
|
|
49
|
+
|
|
50
|
+
### `useInfiniteSmartQuery`
|
|
51
|
+
The flagship hook for handling paginated data. It abstracts away the complexity of infinite scrolling, normalization, and sorting.
|
|
52
|
+
|
|
53
|
+
**Key Props:**
|
|
54
|
+
- `queryKey`: Unique identifier for the query.
|
|
55
|
+
- `queryFn`: Function to fetch a page of data.
|
|
56
|
+
- `getNextCursor`: Function to extract the next page token from a response.
|
|
57
|
+
- `getItemId`: Function extracting a unique ID from an item.
|
|
58
|
+
- `sortComparator`: Function sorting items in the list.
|
|
59
|
+
|
|
60
|
+
**Example Usage:**
|
|
61
|
+
```tsx
|
|
62
|
+
import { useInfiniteSmartQuery } from 'react-smart-query';
|
|
63
|
+
|
|
64
|
+
const { data, addItem, removeItem, fetchNextPage } = useInfiniteSmartQuery({
|
|
65
|
+
queryKey: ['feed'],
|
|
66
|
+
queryFn: ({ pageParam }) => fetchFeed(pageParam),
|
|
67
|
+
getNextCursor: (res) => res.nextCursor,
|
|
68
|
+
select: (res) => res.items,
|
|
69
|
+
getItemId: (item) => item.id,
|
|
70
|
+
sortComparator: (a, b) => b.timestamp - a.timestamp,
|
|
71
|
+
});
|
|
72
|
+
|
|
73
|
+
// Mutating the list
|
|
74
|
+
const onPostCreated = (newPost) => addItem(newPost);
|
|
75
|
+
const onPostDeleted = (postId) => removeItem(postId);
|
|
76
|
+
```
|
|
77
|
+
|
|
78
|
+
### `useSmartMutation`
|
|
79
|
+
Handles creating, updating, or deleting items. It integrates with the offline queue and provides optimistic UI updates.
|
|
80
|
+
|
|
81
|
+
**Example Usage:**
|
|
82
|
+
```tsx
|
|
83
|
+
import { useSmartMutation } from 'react-smart-query';
|
|
84
|
+
|
|
85
|
+
const mutate = useSmartMutation({
|
|
86
|
+
mutationFn: (newExpense) => api.post('/expenses', newExpense),
|
|
87
|
+
onMutate: (newExpense) => {
|
|
88
|
+
// Optimistically update UI
|
|
89
|
+
},
|
|
90
|
+
});
|
|
91
|
+
```
|
|
92
|
+
|
|
93
|
+
### `useSmartQuerySelector`
|
|
94
|
+
Allows fine-grained subscriptions to a slice of the cached data, preventing unnecessary re-renders.
|
|
95
|
+
|
|
96
|
+
**Example Usage:**
|
|
97
|
+
```tsx
|
|
98
|
+
import { useSmartQuerySelector } from 'react-smart-query';
|
|
99
|
+
|
|
100
|
+
const specificItem = useSmartQuerySelector(['expenses'], (data) => data.find(i => i.id === '123'));
|
|
101
|
+
```
|
|
102
|
+
|
|
103
|
+
---
|
|
104
|
+
|
|
105
|
+
## Factory API
|
|
106
|
+
|
|
107
|
+
### `createTypedQuery`
|
|
108
|
+
Provides a way to create strongly-typed query configurations that can be reused across your application, ensuring type safety and consistency.
|
|
109
|
+
|
|
110
|
+
**Example Usage:**
|
|
111
|
+
```tsx
|
|
112
|
+
import { createTypedQuery } from 'react-smart-query';
|
|
113
|
+
|
|
114
|
+
export const userQuery = createTypedQuery({
|
|
115
|
+
queryKeyBase: ['users'],
|
|
116
|
+
queryFn: (id: string) => fetchUser(id),
|
|
117
|
+
});
|
|
118
|
+
|
|
119
|
+
// In component:
|
|
120
|
+
const { data } = useSmartQuery(userQuery.buildConfig('user-123'));
|
|
121
|
+
```
|
|
122
|
+
|
|
123
|
+
---
|
|
124
|
+
|
|
125
|
+
## Advanced Features
|
|
126
|
+
|
|
127
|
+
### Offline Queue
|
|
128
|
+
The library automatically handles queuing mutations when offline.
|
|
129
|
+
To interact with it manually (rarely needed):
|
|
130
|
+
```tsx
|
|
131
|
+
import { getQueue, clearQueue, processQueue } from 'react-smart-query';
|
|
132
|
+
```
|
|
133
|
+
|
|
134
|
+
### Normalization & Cache
|
|
135
|
+
While hooks handle normalization automatically, you can interact with the raw cache:
|
|
136
|
+
```tsx
|
|
137
|
+
import { readCache, writeCache, clearAllSmartCache } from 'react-smart-query';
|
|
138
|
+
```
|
|
139
|
+
|
|
140
|
+
### Debugging
|
|
141
|
+
For development, you can enable verbose logging and snapshot tools.
|
|
142
|
+
```tsx
|
|
143
|
+
// In your App entry point (DEV ONLY)
|
|
144
|
+
import "react-smart-query/debug";
|
|
145
|
+
|
|
146
|
+
// Access the debugger window or call globally (if attached)
|
|
147
|
+
import { smartQueryDebug } from 'react-smart-query';
|
|
148
|
+
smartQueryDebug.snapshot();
|
|
149
|
+
```
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
# Best Practices & Guidelines
|
|
2
|
+
|
|
3
|
+
To get the most out of `react-smart-query`, follow these architectural guidelines.
|
|
4
|
+
|
|
5
|
+
## 1. Always provide an ID and a Sort Comparator
|
|
6
|
+
When using `useInfiniteSmartQuery`, `getItemId` and `sortComparator` are critical. Even if your API returns items in perfect order, the sort comparator is how `react-smart-query` knows where to insert *optimistic or newly added offline items*.
|
|
7
|
+
|
|
8
|
+
```typescript
|
|
9
|
+
// Good
|
|
10
|
+
sortComparator: (a, b) => new Date(b.createdAt).getTime() - new Date(a.createdAt).getTime()
|
|
11
|
+
```
|
|
12
|
+
|
|
13
|
+
## 2. Leverage Background Fetch Guards
|
|
14
|
+
`react-smart-query` protects local mutations. If you add an item locally, and a subsequent background refetch (from `react-query`) does *not* contain that item, it won't be deleted if it was created locally within the guard window (default 5 minutes).
|
|
15
|
+
|
|
16
|
+
## 3. Keep Cache Keys Consistent
|
|
17
|
+
Use strongly typed cache keys or leverage `createTypedQuery` to ensure your keys match exactly across your application. Normalization ties items to keys; a typo in a key implies an entirely separate data store.
|
|
18
|
+
|
|
19
|
+
## 4. Handle Offline Sync Gracefully
|
|
20
|
+
The queue processes mutations automatically when the device comes online. Ensure your `mutationFn` in `useSmartMutation` is robust enough to handle data that might be slightly stale.
|
|
21
|
+
|
|
22
|
+
## 5. Don't Store Enormous Lists Indefinitely
|
|
23
|
+
The library automatically applies a soft trim when lists exceed a certain threshold to prevent memory bloat, but avoid querying 10,000 items in a single non-paginated `useSmartQuery`.
|