supastash 0.1.43 → 0.1.45

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (31) hide show
  1. package/README.md +10 -18
  2. package/dist/hooks/supastashData/fetchCalls.d.ts +1 -0
  3. package/dist/hooks/supastashData/fetchCalls.d.ts.map +1 -1
  4. package/dist/hooks/supastashData/fetchCalls.js +7 -1
  5. package/dist/hooks/supastashData/index.d.ts +2 -1
  6. package/dist/hooks/supastashData/index.d.ts.map +1 -1
  7. package/dist/hooks/supastashData/index.js +5 -3
  8. package/dist/hooks/syncEngine/pullFromRemote/index.d.ts.map +1 -1
  9. package/dist/hooks/syncEngine/pullFromRemote/index.js +38 -0
  10. package/dist/hooks/syncEngine/pushLocal/index.d.ts.map +1 -1
  11. package/dist/hooks/syncEngine/pushLocal/index.js +38 -0
  12. package/dist/hooks/syncStatus/index.d.ts +36 -12
  13. package/dist/hooks/syncStatus/index.d.ts.map +1 -1
  14. package/dist/hooks/syncStatus/index.js +61 -20
  15. package/dist/store/syncStatus.d.ts +3 -0
  16. package/dist/store/syncStatus.d.ts.map +1 -1
  17. package/dist/store/syncStatus.js +40 -0
  18. package/dist/types/realtimeData.types.d.ts +30 -0
  19. package/dist/types/syncEngine.types.d.ts +79 -0
  20. package/dist/utils/sync/pullFromRemote/pullData.d.ts.map +1 -1
  21. package/dist/utils/sync/pullFromRemote/pullData.js +14 -0
  22. package/dist/utils/sync/pullFromRemote/updateLocalDb.d.ts.map +1 -1
  23. package/dist/utils/sync/pullFromRemote/updateLocalDb.js +12 -0
  24. package/dist/utils/sync/pushLocal/sendUnsyncedToSupabase.d.ts.map +1 -1
  25. package/dist/utils/sync/pushLocal/sendUnsyncedToSupabase.js +12 -3
  26. package/dist/utils/sync/pushLocal/uploadChunk.d.ts.map +1 -1
  27. package/dist/utils/sync/pushLocal/uploadChunk.js +14 -1
  28. package/dist/utils/sync/queryStatus.d.ts +56 -0
  29. package/dist/utils/sync/queryStatus.d.ts.map +1 -1
  30. package/dist/utils/sync/queryStatus.js +147 -1
  31. package/package.json +1 -1
package/README.md CHANGED
@@ -8,7 +8,10 @@
8
8
 
9
9
  ---
10
10
 
11
- ## 📚 [Full Docs](https://0xzekea.github.io/supastash/)
11
+ ## 📚 Documentation
12
+
13
+ → [Full Docs](https://0xzekea.github.io/supastash/)
14
+ → [Getting Started Guide](./docs/getting-started.md)
12
15
 
13
16
  ---
14
17
 
@@ -21,7 +24,7 @@
21
24
 
22
25
  - `expo-sqlite`
23
26
  - `react-native-nitro-sqlite`
24
- - `react-native-sqlite-storage`
27
+ - `react-native-sqlite-storage` (beta)
25
28
 
26
29
  - 🧠 Built-in:
27
30
 
@@ -59,7 +62,7 @@ npm install expo-sqlite
59
62
  # Bare RN with better speed
60
63
  npm install react-native-nitro-sqlite
61
64
 
62
- # Classic RN SQLite
65
+ # Classic RN SQLite (beta)
63
66
  npm install react-native-sqlite-storage
64
67
  ```
65
68
 
@@ -134,12 +137,13 @@ Use this in a hook like `useHydrateStores()` to stay in sync without polling.
134
137
 
135
138
  ---
136
139
 
137
- ### 🔁 `useSupastashData` (Global, Realtime)
140
+ ### 🔁 `useSupastashData` (with Realtime)
138
141
 
139
142
  ```ts
140
143
  const { data, groupedBy } = useSupastashData("orders", {
141
144
  filter: { column: "user_id", operator: "eq", value: userId },
142
145
  extraMapKeys: ["status"],
146
+ realtime: true, // Default: true
143
147
  });
144
148
  ```
145
149
 
@@ -169,7 +173,7 @@ useSupastashFilters({
169
173
  - Your Supabase tables must have:
170
174
 
171
175
  - A primary key `id` (string or UUID)
172
- - `timestamptz` columns for `created_at`, `updated_at`, and optionally `deleted_at`
176
+ - `timestamptz` columns for `created_at`, `updated_at`, and `deleted_at`
173
177
 
174
178
  - Run this SQL in Supabase to allow schema reflection:
175
179
 
@@ -204,7 +208,7 @@ const { data: orders } = useSupatashData("orders", {
204
208
 
205
209
  - Tracks rows using `updated_at`, `deleted_at`, and `created_at`
206
210
  - Batches changes in background and retries failed ones
207
- - Keeps **local database as the source of truth**
211
+ - Local cache backed by Supabase
208
212
  - Runs pull/push jobs efficiently using staged task pipelines
209
213
 
210
214
  ---
@@ -223,18 +227,6 @@ await supastash
223
227
 
224
228
  ---
225
229
 
226
- ## 🗂 Recommended Project Structure
227
-
228
- ```
229
- src/
230
- ├─ core/ # Supastash config, Supabase client
231
- ├─ hooks/ # useSupatashData, useSupastashFilters etc.
232
- ├─ types/ # Zod schemas, DB types
233
- ├─ utils/ # Local helpers
234
- ```
235
-
236
- ---
237
-
238
230
  ## 🔧 API Docs
239
231
 
240
232
  - [`configureSupastash()`](https://0xzekea.github.io/supastash/docs/configuration)
@@ -4,5 +4,6 @@ export declare function fetchCalls<R = any>(table: string, options: RealtimeOpti
4
4
  trigger: () => void;
5
5
  cancel: () => void;
6
6
  initialFetchAndSync: () => Promise<void>;
7
+ isFetching: boolean;
7
8
  };
8
9
  //# sourceMappingURL=fetchCalls.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"fetchCalls.d.ts","sourceRoot":"","sources":["../../../src/hooks/supastashData/fetchCalls.ts"],"names":[],"mappings":"AAGA,OAAO,EAEL,eAAe,EAChB,MAAM,gCAAgC,CAAC;AAKxC,wBAAgB,UAAU,CAAC,CAAC,GAAG,GAAG,EAChC,KAAK,EAAE,MAAM,EACb,OAAO,EAAE,eAAe,CAAC,CAAC,CAAC,EAC3B,WAAW,EAAE,KAAK,CAAC,SAAS,CAAC,OAAO,CAAC;;;;;EAuGtC"}
1
+ {"version":3,"file":"fetchCalls.d.ts","sourceRoot":"","sources":["../../../src/hooks/supastashData/fetchCalls.ts"],"names":[],"mappings":"AAGA,OAAO,EAEL,eAAe,EAChB,MAAM,gCAAgC,CAAC;AAKxC,wBAAgB,UAAU,CAAC,CAAC,GAAG,GAAG,EAChC,KAAK,EAAE,MAAM,EACb,OAAO,EAAE,eAAe,CAAC,CAAC,CAAC,EAC3B,WAAW,EAAE,KAAK,CAAC,SAAS,CAAC,OAAO,CAAC;;;;;;EA4GtC"}
@@ -1,4 +1,4 @@
1
- import { useEffect, useRef } from "react";
1
+ import { useEffect, useRef, useState } from "react";
2
2
  import { syncCalls } from "../../store/syncCalls";
3
3
  import { tableFilters, tableFiltersUsed } from "../../store/tableFilters";
4
4
  import { fetchLocalData } from "../../utils/fetchData/fetchLocalData";
@@ -7,6 +7,7 @@ import { logError } from "../../utils/logs";
7
7
  export function fetchCalls(table, options, initialized) {
8
8
  const { shouldFetch = true, limit, filter, onPushToRemote, onInsertAndUpdate, useFilterWhileSyncing = true, extraMapKeys, daylength, onlyUseFilterForRealtime, orderBy, orderDesc, sqlFilter, } = options;
9
9
  const cancelled = useRef(false);
10
+ const [isFetching, setIsFetching] = useState(false);
10
11
  useEffect(() => {
11
12
  if (filter && useFilterWhileSyncing && !tableFiltersUsed.has(table)) {
12
13
  tableFilters.set(table, [filter]);
@@ -66,17 +67,22 @@ export function fetchCalls(table, options, initialized) {
66
67
  filters = filter ? [filter] : undefined;
67
68
  }
68
69
  try {
70
+ setIsFetching(true);
69
71
  await initialFetch(table, filters, onInsertAndUpdate, onPushToRemote);
70
72
  await fetch();
71
73
  }
72
74
  catch (error) {
73
75
  logError(`[Supastash] Error on initial fetch for ${table}`, error);
74
76
  }
77
+ finally {
78
+ setIsFetching(false);
79
+ }
75
80
  };
76
81
  return {
77
82
  triggerRefresh,
78
83
  trigger,
79
84
  cancel,
80
85
  initialFetchAndSync,
86
+ isFetching,
81
87
  };
82
88
  }
@@ -59,7 +59,8 @@ import { RealtimeOptions, SupastashDataResult } from "../../types/realtimeData.t
59
59
  * dataMap: Map of records by ID,
60
60
  * groupedBy: Optional maps grouped by field,
61
61
  * trigger: Manually trigger sync,
62
- * cancel: Cancel pending fetch or sync
62
+ * cancel: Cancel pending fetch or
63
+ * isFetching: Whether the data is being fetched
63
64
  * }
64
65
  */
65
66
  export declare function useSupastashData<R = any>(table: string, options?: RealtimeOptions): SupastashDataResult<R>;
@@ -1 +1 @@
1
- {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/hooks/supastashData/index.ts"],"names":[],"mappings":"AAGA,OAAO,EACL,eAAe,EACf,mBAAmB,EACpB,MAAM,gCAAgC,CAAC;AAWxC;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA8DG;AACH,wBAAgB,gBAAgB,CAAC,CAAC,GAAG,GAAG,EACtC,KAAK,EAAE,MAAM,EACb,OAAO,GAAE,eAAoB,GAC5B,mBAAmB,CAAC,CAAC,CAAC,CA8FxB"}
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/hooks/supastashData/index.ts"],"names":[],"mappings":"AAGA,OAAO,EACL,eAAe,EACf,mBAAmB,EACpB,MAAM,gCAAgC,CAAC;AAWxC;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA+DG;AACH,wBAAgB,gBAAgB,CAAC,CAAC,GAAG,GAAG,EACtC,KAAK,EAAE,MAAM,EACb,OAAO,GAAE,eAAoB,GAC5B,mBAAmB,CAAC,CAAC,CAAC,CA4FxB"}
@@ -69,7 +69,8 @@ const tableSubscriptions = new Map();
69
69
  * dataMap: Map of records by ID,
70
70
  * groupedBy: Optional maps grouped by field,
71
71
  * trigger: Manually trigger sync,
72
- * cancel: Cancel pending fetch or sync
72
+ * cancel: Cancel pending fetch or
73
+ * isFetching: Whether the data is being fetched
73
74
  * }
74
75
  */
75
76
  export function useSupastashData(table, options = {}) {
@@ -78,7 +79,7 @@ export function useSupastashData(table, options = {}) {
78
79
  const unsub = useRef(null);
79
80
  const { dataMap, data, groupedBy, } = useDataState(table);
80
81
  const queueHandler = useEventQueues(table, options, flushIntervalMs);
81
- const { triggerRefresh, trigger, cancel, initialFetchAndSync } = fetchCalls(table, options, hasTriggeredRef);
82
+ const { triggerRefresh, trigger, cancel, initialFetchAndSync, isFetching } = fetchCalls(table, options, hasTriggeredRef);
82
83
  const subKey = useMemo(() => `${table}:${buildFilterString(filter)}`, [table, filter]);
83
84
  const isAnyNullish = useMemo(() => {
84
85
  if (!options.sqlFilter)
@@ -124,5 +125,6 @@ export function useSupastashData(table, options = {}) {
124
125
  trigger,
125
126
  cancel,
126
127
  groupedBy,
127
- }), [data]);
128
+ isFetching,
129
+ }), [data, isFetching]);
128
130
  }
@@ -1 +1 @@
1
- {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../../src/hooks/syncEngine/pullFromRemote/index.ts"],"names":[],"mappings":"AAQA;;GAEG;AACH,wBAAsB,cAAc,kBA8BnC"}
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../../src/hooks/syncEngine/pullFromRemote/index.ts"],"names":[],"mappings":"AASA;;GAEG;AACH,wBAAsB,cAAc,kBAoEnC"}
@@ -5,10 +5,13 @@ import log from "../../../utils/logs";
5
5
  import { getAllTables } from "../../../utils/sync/getAllTables";
6
6
  import { runLimitedConcurrency } from "../../../utils/sync/pullFromRemote/runLimitedConcurrency";
7
7
  import { updateLocalDb } from "../../../utils/sync/pullFromRemote/updateLocalDb";
8
+ import { SyncInfoUpdater } from "../../../utils/sync/queryStatus";
8
9
  /**
9
10
  * Pulls the data from the remote database to the local database
10
11
  */
11
12
  export async function pullFromRemote() {
13
+ let numberOfTables = 0;
14
+ let tablesCompleted = 0;
12
15
  try {
13
16
  const tables = await getAllTables();
14
17
  if (!tables) {
@@ -17,19 +20,54 @@ export async function pullFromRemote() {
17
20
  }
18
21
  const excludeTables = getSupastashConfig()?.excludeTables?.pull || [];
19
22
  const tablesToPull = tables.filter((table) => !excludeTables?.includes(table));
23
+ numberOfTables = tablesToPull.length;
24
+ SyncInfoUpdater.setInProgress({
25
+ action: "start",
26
+ type: "pull",
27
+ });
28
+ SyncInfoUpdater.setNumberOfTables({
29
+ amount: numberOfTables,
30
+ type: "pull",
31
+ });
20
32
  const toPull = tablesToPull.map((table) => async () => {
21
33
  try {
34
+ SyncInfoUpdater.markLogStart({
35
+ type: "pull",
36
+ table,
37
+ });
22
38
  const filter = tableFilters.get(table);
23
39
  const onReceiveRecord = syncCalls.get(table)?.pull;
24
40
  await updateLocalDb(table, filter, onReceiveRecord);
41
+ SyncInfoUpdater.markLogSuccess({
42
+ type: "pull",
43
+ table,
44
+ });
25
45
  }
26
46
  catch (e) {
47
+ SyncInfoUpdater.markLogError({
48
+ type: "pull",
49
+ table,
50
+ lastError: e,
51
+ errorCount: 1,
52
+ });
27
53
  log(`[Supastash] pull table failed: ${table} — ${e?.code ?? e?.name ?? e}`);
28
54
  }
55
+ finally {
56
+ tablesCompleted++;
57
+ SyncInfoUpdater.setTablesCompleted({
58
+ amount: tablesCompleted,
59
+ type: "pull",
60
+ });
61
+ }
29
62
  });
30
63
  await runLimitedConcurrency(toPull, 3);
31
64
  }
32
65
  catch (error) {
33
66
  log(`[Supastash] Error pulling from remote: ${error}`);
34
67
  }
68
+ finally {
69
+ SyncInfoUpdater.reset({
70
+ type: "pull",
71
+ });
72
+ }
35
73
  }
@@ -1 +1 @@
1
- {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../../src/hooks/syncEngine/pushLocal/index.ts"],"names":[],"mappings":"AAaA;;GAEG;AACH,wBAAsB,aAAa,kBA4DlC"}
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../../src/hooks/syncEngine/pushLocal/index.ts"],"names":[],"mappings":"AAcA;;GAEG;AACH,wBAAsB,aAAa,kBAiGlC"}
@@ -5,6 +5,7 @@ import log from "../../../utils/logs";
5
5
  import { getAllTables } from "../../../utils/sync/getAllTables";
6
6
  import { runLimitedConcurrency } from "../../../utils/sync/pullFromRemote/runLimitedConcurrency";
7
7
  import { pushLocalDataToRemote } from "../../../utils/sync/pushLocal/sendUnsyncedToSupabase";
8
+ import { SyncInfoUpdater } from "../../../utils/sync/queryStatus";
8
9
  let emptyPassCount = 0;
9
10
  let lastEmptyPassAt = 0;
10
11
  const tablePushLock = new Map();
@@ -12,6 +13,8 @@ const tablePushLock = new Map();
12
13
  * Pushes the local data to the remote database
13
14
  */
14
15
  export async function pushLocalData() {
16
+ let tablesCompleted = 0;
17
+ let numberOfTables = 0;
15
18
  try {
16
19
  const tables = await getAllTables();
17
20
  if (!tables) {
@@ -22,8 +25,21 @@ export async function pushLocalData() {
22
25
  return;
23
26
  const excludeTables = getSupastashConfig()?.excludeTables?.push || [];
24
27
  const tablesToPush = tables.filter((table) => !excludeTables?.includes(table));
28
+ numberOfTables = tablesToPush.length;
29
+ SyncInfoUpdater.setInProgress({
30
+ action: "start",
31
+ type: "push",
32
+ });
33
+ SyncInfoUpdater.setNumberOfTables({
34
+ amount: numberOfTables,
35
+ type: "push",
36
+ });
25
37
  const results = [];
26
38
  const jobs = tablesToPush.map((table) => async () => {
39
+ SyncInfoUpdater.setCurrentTable({
40
+ table,
41
+ type: "push",
42
+ });
27
43
  if (tablePushLock.get(table)) {
28
44
  results.push({ table, hadWork: false });
29
45
  return;
@@ -31,15 +47,34 @@ export async function pushLocalData() {
31
47
  tablePushLock.set(table, true);
32
48
  try {
33
49
  const onPush = syncCalls.get(table)?.push;
50
+ SyncInfoUpdater.markLogStart({
51
+ type: "push",
52
+ table,
53
+ });
34
54
  const hadWork = await pushLocalDataToRemote(table, onPush);
35
55
  results.push({ table, hadWork: !!hadWork });
56
+ SyncInfoUpdater.markLogSuccess({
57
+ type: "push",
58
+ table,
59
+ });
36
60
  }
37
61
  catch (e) {
38
62
  const msg = e?.code ?? e?.name ?? String(e);
63
+ SyncInfoUpdater.markLogError({
64
+ type: "push",
65
+ table,
66
+ lastError: e,
67
+ errorCount: 1,
68
+ });
39
69
  results.push({ table, hadWork: false, error: msg });
40
70
  log(`[Supastash] Push table failed: ${table} — ${msg}`);
41
71
  }
42
72
  finally {
73
+ tablesCompleted++;
74
+ SyncInfoUpdater.setTablesCompleted({
75
+ amount: tablesCompleted,
76
+ type: "push",
77
+ });
43
78
  tablePushLock.set(table, false);
44
79
  }
45
80
  });
@@ -63,4 +98,7 @@ export async function pushLocalData() {
63
98
  catch (error) {
64
99
  log(`[Supastash] Error pushing local data to remote database: ${error}`);
65
100
  }
101
+ finally {
102
+ SyncInfoUpdater.reset({ type: "push" });
103
+ }
66
104
  }
@@ -1,18 +1,42 @@
1
+ import { SyncInfo } from "../../types/syncEngine.types";
1
2
  /**
2
- * React hook that returns the current global sync status across all tracked Supastash tables.
3
+ * React hook that provides a **live snapshot of Supastash sync state** across all tracked tables.
3
4
  *
4
- * - Listens for the "updateSyncStatus" event from the event bus.
5
- * - Recomputes sync status whenever the event is emitted (e.g. after CRUD operations).
6
- * - Status can be:
7
- * - "pending" → at least one table has pending sync rows
8
- * - "error" → at least one table has failed sync rows
9
- * - "synced" → all tracked tables are fully synced
5
+ * It listens for two global events:
6
+ * - `"updateSyncStatus"` high-level sync state (`pending`, `error`, or `synced`)
7
+ * - `"updateSyncInfo"` detailed sync progress (tables, logs, counts, etc.)
10
8
  *
11
- * @returns {"pending" | "error" | "synced"} The current sync status
9
+ * Debouncing is used internally to prevent rapid UI re-renders during
10
+ * frequent background sync updates.
12
11
  *
13
- * @example
14
- * const syncStatus = useSupastashSyncStatus();
15
- * if (syncStatus === "pending") showSyncingIndicator();
12
+ * ---
13
+ * **Returned Values**
14
+ * - `syncStatus` `"pending" | "error" | "synced"`
15
+ * - `syncInfo` → `SyncInfo` object containing `pull` and `push` progress
16
+ *
17
+ * ---
18
+ * **Example**
19
+ * ```ts
20
+ * const { syncStatus, syncInfo } = useSupastashSyncStatus(50); // 50ms debounce delay
21
+ *
22
+ * if (syncInfo.pull.inProgress) showPullingIndicator();
23
+ * if (syncInfo.push.inProgress) showPushingIndicator();
24
+ *
25
+ * if (syncStatus === "pending") showSyncingBadge();
26
+ * ```
27
+ *
28
+ * ---
29
+ * **SyncInfo Structure**
30
+ * - pull / push: `SyncInfoItem`
31
+ * - inProgress: boolean
32
+ * - numberOfTables: number
33
+ * - tablesCompleted: number
34
+ * - currentTable: { name, unsyncedDataCount, unsyncedDeletedCount }
35
+ * - lastSyncedAt: number
36
+ * - lastSyncLog: SyncLogEntry[]
16
37
  */
17
- export declare function useSupastashSyncStatus(): "error" | "pending" | "synced";
38
+ export declare function useSupastashSyncStatus(debounceDelay?: number): {
39
+ syncStatus: "error" | "pending" | "synced";
40
+ syncInfo: SyncInfo;
41
+ };
18
42
  //# sourceMappingURL=index.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/hooks/syncStatus/index.ts"],"names":[],"mappings":"AAIA;;;;;;;;;;;;;;;GAeG;AACH,wBAAgB,sBAAsB,mCAiBrC"}
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/hooks/syncStatus/index.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,QAAQ,EAAE,MAAM,8BAA8B,CAAC;AAOxD;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GAmCG;AACH,wBAAgB,sBAAsB,CAAC,aAAa,SAAK;;;EAqCxD"}
@@ -1,34 +1,75 @@
1
1
  import { useEffect, useState } from "react";
2
2
  import { supastashEventBus } from "../../utils/events/eventBus";
3
- import { getSupastashStatus } from "../../utils/sync/queryStatus";
3
+ import { getSupastashStatus, SyncInfoUpdater, } from "../../utils/sync/queryStatus";
4
4
  /**
5
- * React hook that returns the current global sync status across all tracked Supastash tables.
5
+ * React hook that provides a **live snapshot of Supastash sync state** across all tracked tables.
6
6
  *
7
- * - Listens for the "updateSyncStatus" event from the event bus.
8
- * - Recomputes sync status whenever the event is emitted (e.g. after CRUD operations).
9
- * - Status can be:
10
- * - "pending" → at least one table has pending sync rows
11
- * - "error" → at least one table has failed sync rows
12
- * - "synced" → all tracked tables are fully synced
7
+ * It listens for two global events:
8
+ * - `"updateSyncStatus"` high-level sync state (`pending`, `error`, or `synced`)
9
+ * - `"updateSyncInfo"` detailed sync progress (tables, logs, counts, etc.)
13
10
  *
14
- * @returns {"pending" | "error" | "synced"} The current sync status
11
+ * Debouncing is used internally to prevent rapid UI re-renders during
12
+ * frequent background sync updates.
15
13
  *
16
- * @example
17
- * const syncStatus = useSupastashSyncStatus();
18
- * if (syncStatus === "pending") showSyncingIndicator();
14
+ * ---
15
+ * **Returned Values**
16
+ * - `syncStatus` `"pending" | "error" | "synced"`
17
+ * - `syncInfo` → `SyncInfo` object containing `pull` and `push` progress
18
+ *
19
+ * ---
20
+ * **Example**
21
+ * ```ts
22
+ * const { syncStatus, syncInfo } = useSupastashSyncStatus(50); // 50ms debounce delay
23
+ *
24
+ * if (syncInfo.pull.inProgress) showPullingIndicator();
25
+ * if (syncInfo.push.inProgress) showPushingIndicator();
26
+ *
27
+ * if (syncStatus === "pending") showSyncingBadge();
28
+ * ```
29
+ *
30
+ * ---
31
+ * **SyncInfo Structure**
32
+ * - pull / push: `SyncInfoItem`
33
+ * - inProgress: boolean
34
+ * - numberOfTables: number
35
+ * - tablesCompleted: number
36
+ * - currentTable: { name, unsyncedDataCount, unsyncedDeletedCount }
37
+ * - lastSyncedAt: number
38
+ * - lastSyncLog: SyncLogEntry[]
19
39
  */
20
- export function useSupastashSyncStatus() {
40
+ export function useSupastashSyncStatus(debounceDelay = 40) {
21
41
  const [syncStatus, setSyncStatus] = useState("synced");
42
+ const [syncInfo, setSyncInfo] = useState(() => SyncInfoUpdater.getSnapshot());
22
43
  useEffect(() => {
23
- const refreshSyncStatus = () => {
24
- const status = getSupastashStatus();
25
- setSyncStatus(status);
44
+ const handleStatusUpdate = debounce(() => {
45
+ setSyncStatus(getSupastashStatus());
46
+ }, debounceDelay);
47
+ handleStatusUpdate();
48
+ supastashEventBus.on("updateSyncStatus", handleStatusUpdate);
49
+ return () => {
50
+ supastashEventBus.off("updateSyncStatus", handleStatusUpdate);
51
+ handleStatusUpdate.cancel();
26
52
  };
27
- refreshSyncStatus();
28
- supastashEventBus.on("updateSyncStatus", refreshSyncStatus);
53
+ }, []);
54
+ useEffect(() => {
55
+ const handleInfoUpdate = debounce((next) => {
56
+ setSyncInfo(next);
57
+ }, debounceDelay);
58
+ setSyncInfo(SyncInfoUpdater.getSnapshot());
59
+ supastashEventBus.on("updateSyncInfo", handleInfoUpdate);
29
60
  return () => {
30
- supastashEventBus.off("updateSyncStatus", refreshSyncStatus);
61
+ supastashEventBus.off("updateSyncInfo", handleInfoUpdate);
62
+ handleInfoUpdate.cancel();
31
63
  };
32
64
  }, []);
33
- return syncStatus;
65
+ return { syncStatus, syncInfo };
66
+ }
67
+ function debounce(fn, delay = 40) {
68
+ let timeout;
69
+ const debounced = (...args) => {
70
+ clearTimeout(timeout);
71
+ timeout = setTimeout(() => fn(...args), delay);
72
+ };
73
+ debounced.cancel = () => clearTimeout(timeout);
74
+ return debounced;
34
75
  }
@@ -1,3 +1,4 @@
1
+ import { SyncInfo, SyncLogEntry } from "../types/syncEngine.types";
1
2
  /**
2
3
  * A map tracking sync status for each row in each table.
3
4
  *
@@ -21,4 +22,6 @@
21
22
  * - Value = sync status of that row
22
23
  */
23
24
  export declare const syncStatusMap: Map<string, Map<string, "error" | "pending" | "success">>;
25
+ export declare const syncInfo: SyncInfo;
26
+ export declare const DEFAULT_SYNC_LOG_ENTRY: SyncLogEntry;
24
27
  //# sourceMappingURL=syncStatus.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"syncStatus.d.ts","sourceRoot":"","sources":["../../src/store/syncStatus.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;;;;;;;;GAqBG;AACH,eAAO,MAAM,aAAa,2DAGvB,CAAC"}
1
+ {"version":3,"file":"syncStatus.d.ts","sourceRoot":"","sources":["../../src/store/syncStatus.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,QAAQ,EAAE,YAAY,EAAE,MAAM,2BAA2B,CAAC;AAEnE;;;;;;;;;;;;;;;;;;;;;GAqBG;AACH,eAAO,MAAM,aAAa,2DAGvB,CAAC;AAEJ,eAAO,MAAM,QAAQ,EAAE,QAyBtB,CAAC;AAEF,eAAO,MAAM,sBAAsB,EAAE,YAapC,CAAC"}
@@ -21,3 +21,43 @@
21
21
  * - Value = sync status of that row
22
22
  */
23
23
  export const syncStatusMap = new Map();
24
+ export const syncInfo = {
25
+ pull: {
26
+ inProgress: false,
27
+ numberOfTables: 0,
28
+ tablesCompleted: 0,
29
+ currentTable: {
30
+ name: "",
31
+ unsyncedDataCount: 0,
32
+ unsyncedDeletedCount: 0,
33
+ },
34
+ lastSyncedAt: 0,
35
+ lastSyncLog: [],
36
+ },
37
+ push: {
38
+ inProgress: false,
39
+ numberOfTables: 0,
40
+ tablesCompleted: 0,
41
+ currentTable: {
42
+ name: "",
43
+ unsyncedDataCount: 0,
44
+ unsyncedDeletedCount: 0,
45
+ },
46
+ lastSyncedAt: 0,
47
+ lastSyncLog: [],
48
+ },
49
+ };
50
+ export const DEFAULT_SYNC_LOG_ENTRY = {
51
+ table: "",
52
+ filterKey: "",
53
+ filterJson: [],
54
+ action: "push",
55
+ success: true,
56
+ errorCount: 0,
57
+ unsyncedDataCount: 0,
58
+ unsyncedDeletedCount: 0,
59
+ rowsFailed: 0,
60
+ lastError: null,
61
+ startTime: 0,
62
+ endTime: 0,
63
+ };
@@ -235,13 +235,43 @@ export interface RealtimeOptions<R = any> {
235
235
  }
236
236
 
237
237
  export type SupastashDataResult<R = any> = {
238
+ /**
239
+ * Array of records.
240
+ * @example
241
+ * data: [user1, user2],
242
+ */
238
243
  data: Array<R>;
244
+ /**
245
+ * Map of records by ID.
246
+ * @example
247
+ * dataMap: new Map([[1, user1], [2, user2]]),
248
+ */
239
249
  dataMap: Map<string, R>;
250
+ /**
251
+ * Trigger the fetch or sync.
252
+ */
240
253
  trigger: () => void;
254
+ /**
255
+ * Cancel the pending fetch or sync.
256
+ */
241
257
  cancel: () => void;
258
+ /**
259
+ * Optional maps grouped by field.
260
+ * @example
261
+ * groupedBy: {
262
+ * userId: new Map([[1, [user1, user2]]]),
263
+ * groupId: new Map([[1, [group1, group2]]]),
264
+ * }
265
+ */
242
266
  groupedBy?: {
243
267
  [K in keyof R]: Map<R[K], Array<R>>;
244
268
  };
269
+ /**
270
+ * Whether the data is being fetched.
271
+ * @example
272
+ * isFetching: true
273
+ */
274
+ isFetching: boolean;
245
275
  };
246
276
 
247
277
  export type SupastashDataHook<R = any> = (
@@ -1,3 +1,5 @@
1
+ import { SupastashFilter } from "./realtimeData.types";
2
+
1
3
  export type SyncResult = {
2
4
  success: string[]; // IDs that were successfully upserted
3
5
  skipped: { id: string; reason: string }[]; // IDs that were skipped with reason
@@ -40,3 +42,80 @@ export type PublicScope =
40
42
  | "last_synced_at"
41
43
  | "last_created_at"
42
44
  | "last_deleted_at";
45
+
46
+ export type CurrentTableInfo = {
47
+ /** Table currently being synced */
48
+ name: string;
49
+
50
+ /** Number of local records pending upload or download */
51
+ unsyncedDataCount: number;
52
+
53
+ /** Number of soft-deleted records pending sync */
54
+ unsyncedDeletedCount: number;
55
+ };
56
+
57
+ export type SyncLogEntry = {
58
+ /** Table this log entry belongs to */
59
+ table: string;
60
+
61
+ /** Optional filter key used for pull operations */
62
+ filterKey?: string;
63
+
64
+ /** JSON array of filters used for pull operations */
65
+ filterJson?: SupastashFilter[];
66
+
67
+ /** Sync direction for this log entry ("push" or "pull") */
68
+ action: "push" | "pull";
69
+
70
+ /** Whether the sync operation completed successfully */
71
+ success: boolean;
72
+
73
+ /** Number of errors encountered during the operation */
74
+ errorCount: number;
75
+
76
+ /** Last recorded error (if any) */
77
+ lastError: Error | null;
78
+
79
+ /** Number of unsynced data rows before this operation */
80
+ unsyncedDataCount: number;
81
+
82
+ /** Number of unsynced deleted rows before this operation */
83
+ unsyncedDeletedCount: number;
84
+
85
+ /** Timestamp (ms) when the operation started */
86
+ startTime: number;
87
+
88
+ /** Timestamp (ms) when the operation ended */
89
+ endTime: number;
90
+
91
+ /** Number of rows that failed to push (push only) */
92
+ rowsFailed: number;
93
+ };
94
+
95
+ export type SyncInfoItem = {
96
+ /** Whether a sync process is currently active */
97
+ inProgress: boolean;
98
+
99
+ /** Total number of tables scheduled for this sync cycle */
100
+ numberOfTables: number;
101
+
102
+ /** Number of tables successfully completed so far */
103
+ tablesCompleted: number;
104
+
105
+ /** Details of the table currently being processed */
106
+ currentTable: CurrentTableInfo;
107
+
108
+ /** Timestamp (ms) of the most recent completed sync */
109
+ lastSyncedAt: number;
110
+
111
+ /** Collection of detailed logs for recent table syncs */
112
+ lastSyncLog: SyncLogEntry[];
113
+ };
114
+
115
+ export type SyncInfo = {
116
+ /** Sync information for pull operations (server → local) */
117
+ pull: SyncInfoItem;
118
+
119
+ /** Sync information for push operations (local → server) */
120
+ push: SyncInfoItem;
121
+ };
@@ -1 +1 @@
1
- {"version":3,"file":"pullData.d.ts","sourceRoot":"","sources":["../../../../src/utils/sync/pullFromRemote/pullData.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,WAAW,EAAE,MAAM,4BAA4B,CAAC;AACzD,OAAO,EAAE,cAAc,EAAE,MAAM,mCAAmC,CAAC;AAOnE;;;;GAIG;AACH,wBAAsB,QAAQ,CAC5B,KAAK,EAAE,MAAM,EACb,OAAO,CAAC,EAAE,cAAc,EAAE,GACzB,OAAO,CAAC;IAAE,IAAI,EAAE,WAAW,EAAE,CAAC;IAAC,UAAU,EAAE,MAAM,EAAE,CAAA;CAAE,GAAG,IAAI,CAAC,CA8D/D"}
1
+ {"version":3,"file":"pullData.d.ts","sourceRoot":"","sources":["../../../../src/utils/sync/pullFromRemote/pullData.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,WAAW,EAAE,MAAM,4BAA4B,CAAC;AACzD,OAAO,EAAE,cAAc,EAAE,MAAM,mCAAmC,CAAC;AASnE;;;;GAIG;AACH,wBAAsB,QAAQ,CAC5B,KAAK,EAAE,MAAM,EACb,OAAO,CAAC,EAAE,cAAc,EAAE,GACzB,OAAO,CAAC;IAAE,IAAI,EAAE,WAAW,EAAE,CAAC;IAAC,UAAU,EAAE,MAAM,EAAE,CAAA;CAAE,GAAG,IAAI,CAAC,CA2E/D"}
@@ -2,6 +2,8 @@ import { getSupastashConfig } from "../../../core/config";
2
2
  import { getSupastashDb } from "../../../db/dbInitializer";
3
3
  import log, { logWarn } from "../../logs";
4
4
  import { supabaseClientErr } from "../../supabaseClientErr";
5
+ import { SyncInfoUpdater } from "../queryStatus";
6
+ import { computeFilterKey } from "../status/filterKey";
5
7
  import { selectAndAddAMillisecond } from "../status/repo";
6
8
  import { setSupastashSyncStatus } from "../status/services";
7
9
  import { getMaxDate, logNoUpdates, pageThrough } from "./helpers";
@@ -14,6 +16,18 @@ export async function pullData(table, filters) {
14
16
  const supabase = getSupastashConfig().supabaseClient;
15
17
  if (!supabase)
16
18
  throw new Error(`No supabase client found: ${supabaseClientErr}`);
19
+ SyncInfoUpdater.setLastSyncLog({
20
+ key: "filterJson",
21
+ value: filters ?? [],
22
+ type: "pull",
23
+ table,
24
+ });
25
+ SyncInfoUpdater.setLastSyncLog({
26
+ key: "filterKey",
27
+ value: (await computeFilterKey(filters, "global")) ?? "",
28
+ type: "pull",
29
+ table,
30
+ });
17
31
  const db = await getSupastashDb();
18
32
  const { last_created_at, last_synced_at, last_deleted_at } = await selectAndAddAMillisecond(db, table, filters);
19
33
  const [createdRows, updatedRows, deletedRows] = await Promise.all([
@@ -1 +1 @@
1
- {"version":3,"file":"updateLocalDb.d.ts","sourceRoot":"","sources":["../../../../src/utils/sync/pullFromRemote/updateLocalDb.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,cAAc,EAAE,MAAM,mCAAmC,CAAC;AAcnE;;;GAGG;AACH,wBAAsB,aAAa,CACjC,KAAK,EAAE,MAAM,EACb,OAAO,CAAC,EAAE,cAAc,EAAE,EAC1B,aAAa,CAAC,EAAE,CAAC,OAAO,EAAE,GAAG,KAAK,OAAO,CAAC,IAAI,CAAC,iBA6EhD;AAID;;;;;GAKG;AACH,wBAAsB,UAAU,CAC9B,KAAK,EAAE,MAAM,EACb,MAAM,EAAE,GAAG,EACX,SAAS,CAAC,EAAE,OAAO,iBAoEpB"}
1
+ {"version":3,"file":"updateLocalDb.d.ts","sourceRoot":"","sources":["../../../../src/utils/sync/pullFromRemote/updateLocalDb.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,cAAc,EAAE,MAAM,mCAAmC,CAAC;AAenE;;;GAGG;AACH,wBAAsB,aAAa,CACjC,KAAK,EAAE,MAAM,EACb,OAAO,CAAC,EAAE,cAAc,EAAE,EAC1B,aAAa,CAAC,EAAE,CAAC,OAAO,EAAE,GAAG,KAAK,OAAO,CAAC,IAAI,CAAC,iBAyFhD;AAID;;;;;GAKG;AACH,wBAAsB,UAAU,CAC9B,KAAK,EAAE,MAAM,EACb,MAAM,EAAE,GAAG,EACX,SAAS,CAAC,EAAE,OAAO,iBAoEpB"}
@@ -4,6 +4,7 @@ import { isOnline } from "../../connection";
4
4
  import { getTableSchema } from "../../getTableSchema";
5
5
  import log, { logError, logWarn } from "../../logs";
6
6
  import { refreshScreen } from "../../refreshScreenCalls";
7
+ import { SyncInfoUpdater } from "../queryStatus";
7
8
  import { updateLocalSyncedAt } from "../status/syncUpdate";
8
9
  import { pullData } from "./pullData";
9
10
  import { stringifyValue } from "./stringifyFields";
@@ -27,6 +28,16 @@ export async function updateLocalDb(table, filters, onReceiveData) {
27
28
  const data = dataResult?.data;
28
29
  const deletedIds = dataResult?.deletedIds;
29
30
  const deletedIdSet = new Set(deletedIds ?? []);
31
+ SyncInfoUpdater.setUnsyncedDataCount({
32
+ amount: data?.length ?? 0,
33
+ type: "pull",
34
+ table,
35
+ });
36
+ SyncInfoUpdater.setUnsyncedDeletedCount({
37
+ amount: deletedIds?.length ?? 0,
38
+ type: "pull",
39
+ table,
40
+ });
30
41
  const refreshNeeded = !!data?.length || !!deletedIds?.length;
31
42
  // Delete records that are no longer in the remote data
32
43
  if (deletedIds && deletedIds.length > 0) {
@@ -80,6 +91,7 @@ export async function updateLocalDb(table, filters, onReceiveData) {
80
91
  }
81
92
  catch (error) {
82
93
  logError(`[Supastash] Error updating local db for ${table}`, error);
94
+ throw error;
83
95
  }
84
96
  finally {
85
97
  isInSync.delete(table);
@@ -1 +1 @@
1
- {"version":3,"file":"sendUnsyncedToSupabase.d.ts","sourceRoot":"","sources":["../../../../src/utils/sync/pushLocal/sendUnsyncedToSupabase.ts"],"names":[],"mappings":"AASA;;;GAGG;AACH,wBAAsB,qBAAqB,CACzC,KAAK,EAAE,MAAM,EACb,cAAc,CAAC,EAAE,CAAC,OAAO,EAAE,GAAG,EAAE,KAAK,OAAO,CAAC,OAAO,CAAC,EACrD,MAAM,CAAC,EAAE,MAAM,EAAE,gCAuClB"}
1
+ {"version":3,"file":"sendUnsyncedToSupabase.d.ts","sourceRoot":"","sources":["../../../../src/utils/sync/pushLocal/sendUnsyncedToSupabase.ts"],"names":[],"mappings":"AASA;;;GAGG;AACH,wBAAsB,qBAAqB,CACzC,KAAK,EAAE,MAAM,EACb,cAAc,CAAC,EAAE,CAAC,OAAO,EAAE,GAAG,EAAE,KAAK,OAAO,CAAC,OAAO,CAAC,EACrD,MAAM,CAAC,EAAE,MAAM,EAAE,gCA8ClB"}
@@ -1,6 +1,6 @@
1
1
  import { isOnline } from "../../connection";
2
- import { logError } from "../../logs";
3
2
  import { refreshScreen } from "../../refreshScreenCalls";
3
+ import { SyncInfoUpdater } from "../queryStatus";
4
4
  import { deleteData } from "./deleteChunks";
5
5
  import { getAllDeletedData, getAllUnsyncedData } from "./getAllUnsyncedData";
6
6
  import { uploadData } from "./uploadChunk";
@@ -18,6 +18,16 @@ export async function pushLocalDataToRemote(table, onPushToRemote, noSync) {
18
18
  return false;
19
19
  const data = await getAllUnsyncedData(table);
20
20
  const deletedData = await getAllDeletedData(table);
21
+ SyncInfoUpdater.setUnsyncedDataCount({
22
+ amount: data?.length ?? 0,
23
+ type: "push",
24
+ table,
25
+ });
26
+ SyncInfoUpdater.setUnsyncedDeletedCount({
27
+ amount: deletedData?.length ?? 0,
28
+ type: "push",
29
+ table,
30
+ });
21
31
  const hasData = !!data?.length;
22
32
  const hasDeletes = !!deletedData?.length;
23
33
  if (!hasData && !hasDeletes) {
@@ -38,8 +48,7 @@ export async function pushLocalDataToRemote(table, onPushToRemote, noSync) {
38
48
  return didWork;
39
49
  }
40
50
  catch (error) {
41
- logError(`[Supastash] Error pushing local data to remote for ${table}`, error);
42
- return false;
51
+ throw error;
43
52
  }
44
53
  finally {
45
54
  isInSync.delete(table);
@@ -1 +1 @@
1
- {"version":3,"file":"uploadChunk.d.ts","sourceRoot":"","sources":["../../../../src/utils/sync/pushLocal/uploadChunk.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,WAAW,EAAE,MAAM,4BAA4B,CAAC;AA+IzD;;;;GAIG;AACH,wBAAsB,UAAU,CAC9B,KAAK,EAAE,MAAM,EACb,eAAe,EAAE,WAAW,EAAE,EAC9B,cAAc,CAAC,EAAE,CAAC,OAAO,EAAE,GAAG,EAAE,KAAK,OAAO,CAAC,OAAO,CAAC,iBActD"}
1
+ {"version":3,"file":"uploadChunk.d.ts","sourceRoot":"","sources":["../../../../src/utils/sync/pushLocal/uploadChunk.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,WAAW,EAAE,MAAM,4BAA4B,CAAC;AA6JzD;;;;GAIG;AACH,wBAAsB,UAAU,CAC9B,KAAK,EAAE,MAAM,EACb,eAAe,EAAE,WAAW,EAAE,EAC9B,cAAc,CAAC,EAAE,CAAC,OAAO,EAAE,GAAG,EAAE,KAAK,OAAO,CAAC,OAAO,CAAC,iBActD"}
@@ -4,7 +4,7 @@ import { isOnline } from "../../../utils/connection";
4
4
  import { normalizeForSupabase } from "../../getSafeValues";
5
5
  import log from "../../logs";
6
6
  import { supabaseClientErr } from "../../supabaseClientErr";
7
- import { setQueryStatus } from "../queryStatus";
7
+ import { setQueryStatus, SyncInfoUpdater } from "../queryStatus";
8
8
  import { enforceTimestamps } from "./normalize";
9
9
  import { batchUpsert, fetchRemoteHeadsChunked, filterRowsByUpdatedAt, handleRowFailure, markSynced, singleUpsert, } from "./uploadHelpers";
10
10
  /**
@@ -26,6 +26,8 @@ async function uploadChunk(table, chunk, onPushToRemote) {
26
26
  const online = await isOnline();
27
27
  if (!online)
28
28
  return;
29
+ let errorCount = 0;
30
+ let lastError = null;
29
31
  const ids = chunk.map((row) => row.id);
30
32
  // Fetch remote data for the current chunk
31
33
  const remoteIds = await fetchRemoteHeadsChunked(table, ids, supabase);
@@ -88,6 +90,8 @@ async function uploadChunk(table, chunk, onPushToRemote) {
88
90
  syncedNow.push(row.id);
89
91
  continue;
90
92
  }
93
+ errorCount++;
94
+ lastError = res.error;
91
95
  const decision = await handleRowFailure(config, table, row, res.error, supabase);
92
96
  if (decision === "DROP" || decision === "REPLACED") {
93
97
  continue;
@@ -107,6 +111,15 @@ async function uploadChunk(table, chunk, onPushToRemote) {
107
111
  await new Promise((r) => setTimeout(r, delay));
108
112
  pending = keep;
109
113
  }
114
+ if (pending.length > 0) {
115
+ SyncInfoUpdater.markLogError({
116
+ type: "push",
117
+ table,
118
+ lastError: lastError ?? new Error("Unknown error"),
119
+ errorCount: errorCount ?? 0,
120
+ rowsFailed: pending.length,
121
+ });
122
+ }
110
123
  // Gave up this pass — rows left in `pending` will be retried by outer scheduler
111
124
  for (const r of pending)
112
125
  setQueryStatus(r.id, table, "error");
@@ -1,3 +1,4 @@
1
+ import { SyncInfo, SyncLogEntry } from "../../types/syncEngine.types";
1
2
  /**
2
3
  * Sets the sync status of a query (row) in a specific table.
3
4
  * Automatically removes the entry if the status is 'success'.
@@ -37,4 +38,59 @@ export declare function getTableStatus(table: string): {
37
38
  * "synced" if all tables are fully synced
38
39
  */
39
40
  export declare function getSupastashStatus(): "error" | "pending" | "synced";
41
+ declare function snapshot(): SyncInfo;
42
+ export declare const SyncInfoUpdater: {
43
+ setInProgress: ({ action, type, }: {
44
+ action: "start" | "stop";
45
+ type: "pull" | "push";
46
+ }) => void;
47
+ setTablesCompleted: ({ amount, type, }: {
48
+ amount: number;
49
+ type: "pull" | "push";
50
+ }) => void;
51
+ setNumberOfTables: ({ amount, type, }: {
52
+ amount: number;
53
+ type: "pull" | "push";
54
+ }) => void;
55
+ setCurrentTable: ({ table, type, }: {
56
+ table: string;
57
+ type: "pull" | "push";
58
+ }) => void;
59
+ setLastSyncLog: ({ key, value, type, table, }: {
60
+ key: keyof SyncLogEntry;
61
+ value: SyncLogEntry[keyof SyncLogEntry];
62
+ type: "pull" | "push";
63
+ table: string;
64
+ }) => void;
65
+ setUnsyncedDataCount: ({ amount, type, table, }: {
66
+ amount: number;
67
+ type: "pull" | "push";
68
+ table: string;
69
+ }) => void;
70
+ setUnsyncedDeletedCount: ({ amount, type, table, }: {
71
+ amount: number;
72
+ type: "pull" | "push";
73
+ table: string;
74
+ }) => void;
75
+ markLogStart: ({ type, table }: {
76
+ type: "pull" | "push";
77
+ table: string;
78
+ }) => void;
79
+ markLogSuccess: ({ type, table, }: {
80
+ type: "pull" | "push";
81
+ table: string;
82
+ }) => void;
83
+ markLogError: ({ type, table, lastError, errorCount, rowsFailed, }: {
84
+ type: "pull" | "push";
85
+ table: string;
86
+ lastError: Error;
87
+ errorCount: number;
88
+ rowsFailed?: number;
89
+ }) => void;
90
+ reset: ({ type }: {
91
+ type: "pull" | "push";
92
+ }) => void;
93
+ getSnapshot: typeof snapshot;
94
+ };
95
+ export {};
40
96
  //# sourceMappingURL=queryStatus.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"queryStatus.d.ts","sourceRoot":"","sources":["../../../src/utils/sync/queryStatus.ts"],"names":[],"mappings":"AAGA;;;;;;;GAOG;AACH,wBAAgB,cAAc,CAC5B,KAAK,EAAE,MAAM,EACb,KAAK,EAAE,MAAM,EACb,MAAM,EAAE,SAAS,GAAG,SAAS,GAAG,OAAO,QAYxC;AAED;;;;GAIG;AACH,wBAAsB,oBAAoB,CAAC,KAAK,EAAE,MAAM,iBAgBvD;AAED;;;;;;;;;GASG;AACH,wBAAgB,cAAc,CAAC,KAAK,EAAE,MAAM;;;;;EAwB3C;AAED;;;;;;GAMG;AACH,wBAAgB,kBAAkB,mCAcjC"}
1
+ {"version":3,"file":"queryStatus.d.ts","sourceRoot":"","sources":["../../../src/utils/sync/queryStatus.ts"],"names":[],"mappings":"AAMA,OAAO,EAAE,QAAQ,EAAE,YAAY,EAAE,MAAM,8BAA8B,CAAC;AAGtE;;;;;;;GAOG;AACH,wBAAgB,cAAc,CAC5B,KAAK,EAAE,MAAM,EACb,KAAK,EAAE,MAAM,EACb,MAAM,EAAE,SAAS,GAAG,SAAS,GAAG,OAAO,QAYxC;AAED;;;;GAIG;AACH,wBAAsB,oBAAoB,CAAC,KAAK,EAAE,MAAM,iBAgBvD;AAED;;;;;;;;;GASG;AACH,wBAAgB,cAAc,CAAC,KAAK,EAAE,MAAM;;;;;EAwB3C;AAED;;;;;;GAMG;AACH,wBAAgB,kBAAkB,mCAcjC;AAID,iBAAS,QAAQ,IAAI,QAAQ,CAE5B;AAKD,eAAO,MAAM,eAAe;uCAIvB;QACD,MAAM,EAAE,OAAO,GAAG,MAAM,CAAC;QACzB,IAAI,EAAE,MAAM,GAAG,MAAM,CAAC;KACvB;4CAUE;QACD,MAAM,EAAE,MAAM,CAAC;QACf,IAAI,EAAE,MAAM,GAAG,MAAM,CAAC;KACvB;2CAUE;QACD,MAAM,EAAE,MAAM,CAAC;QACf,IAAI,EAAE,MAAM,GAAG,MAAM,CAAC;KACvB;wCAUE;QACD,KAAK,EAAE,MAAM,CAAC;QACd,IAAI,EAAE,MAAM,GAAG,MAAM,CAAC;KACvB;mDAgBE;QACD,GAAG,EAAE,MAAM,YAAY,CAAC;QACxB,KAAK,EAAE,YAAY,CAAC,MAAM,YAAY,CAAC,CAAC;QACxC,IAAI,EAAE,MAAM,GAAG,MAAM,CAAC;QACtB,KAAK,EAAE,MAAM,CAAC;KACf;qDAqBE;QACD,MAAM,EAAE,MAAM,CAAC;QACf,IAAI,EAAE,MAAM,GAAG,MAAM,CAAC;QACtB,KAAK,EAAE,MAAM,CAAC;KACf;wDAiBE;QACD,MAAM,EAAE,MAAM,CAAC;QACf,IAAI,EAAE,MAAM,GAAG,MAAM,CAAC;QACtB,KAAK,EAAE,MAAM,CAAC;KACf;oCAc+B;QAAE,IAAI,EAAE,MAAM,GAAG,MAAM,CAAC;QAAC,KAAK,EAAE,MAAM,CAAA;KAAE;uCAWrE;QACD,IAAI,EAAE,MAAM,GAAG,MAAM,CAAC;QACtB,KAAK,EAAE,MAAM,CAAC;KACf;wEAqBE;QACD,IAAI,EAAE,MAAM,GAAG,MAAM,CAAC;QACtB,KAAK,EAAE,MAAM,CAAC;QACd,SAAS,EAAE,KAAK,CAAC;QACjB,UAAU,EAAE,MAAM,CAAC;QACnB,UAAU,CAAC,EAAE,MAAM,CAAC;KACrB;sBAiCiB;QAAE,IAAI,EAAE,MAAM,GAAG,MAAM,CAAA;KAAE;;CAe5C,CAAC"}
@@ -1,5 +1,6 @@
1
1
  import { getSupastashDb } from "../../db/dbInitializer";
2
- import { syncStatusMap } from "../../store/syncStatus";
2
+ import { DEFAULT_SYNC_LOG_ENTRY, syncInfo, syncStatusMap, } from "../../store/syncStatus";
3
+ import { supastashEventBus } from "../events/eventBus";
3
4
  /**
4
5
  * Sets the sync status of a query (row) in a specific table.
5
6
  * Automatically removes the entry if the status is 'success'.
@@ -91,3 +92,148 @@ export function getSupastashStatus() {
91
92
  }
92
93
  return "synced";
93
94
  }
95
+ let storeSyncInfo = structuredClone(syncInfo);
96
+ function snapshot() {
97
+ return structuredClone(storeSyncInfo);
98
+ }
99
+ function emit() {
100
+ supastashEventBus.emit("updateSyncInfo", snapshot());
101
+ }
102
+ export const SyncInfoUpdater = {
103
+ setInProgress: ({ action, type, }) => {
104
+ const next = structuredClone(storeSyncInfo);
105
+ next[type].inProgress = action === "start";
106
+ storeSyncInfo = next;
107
+ emit();
108
+ },
109
+ setTablesCompleted: ({ amount, type, }) => {
110
+ const next = structuredClone(storeSyncInfo);
111
+ next[type].tablesCompleted = amount;
112
+ storeSyncInfo = next;
113
+ emit();
114
+ },
115
+ setNumberOfTables: ({ amount, type, }) => {
116
+ const next = structuredClone(storeSyncInfo);
117
+ next[type].numberOfTables = amount;
118
+ storeSyncInfo = next;
119
+ emit();
120
+ },
121
+ setCurrentTable: ({ table, type, }) => {
122
+ const next = structuredClone(storeSyncInfo);
123
+ next[type].currentTable = {
124
+ name: table,
125
+ unsyncedDataCount: 0,
126
+ unsyncedDeletedCount: 0,
127
+ };
128
+ storeSyncInfo = next;
129
+ emit();
130
+ },
131
+ setLastSyncLog: ({ key, value, type, table, }) => {
132
+ const next = structuredClone(storeSyncInfo);
133
+ const arr = next[type].lastSyncLog;
134
+ const row = arr.find((l) => l.table === table);
135
+ if (!row) {
136
+ arr.push({
137
+ ...DEFAULT_SYNC_LOG_ENTRY,
138
+ table,
139
+ [key]: value,
140
+ });
141
+ }
142
+ else {
143
+ row[key] = value;
144
+ }
145
+ storeSyncInfo = next;
146
+ emit();
147
+ },
148
+ setUnsyncedDataCount: ({ amount, type, table, }) => {
149
+ const next = structuredClone(storeSyncInfo);
150
+ next[type].currentTable.unsyncedDataCount = amount;
151
+ SyncInfoUpdater.setLastSyncLog({
152
+ type,
153
+ table,
154
+ key: "unsyncedDataCount",
155
+ value: amount,
156
+ });
157
+ storeSyncInfo = next;
158
+ emit();
159
+ },
160
+ setUnsyncedDeletedCount: ({ amount, type, table, }) => {
161
+ const next = structuredClone(storeSyncInfo);
162
+ next[type].currentTable.unsyncedDeletedCount = amount;
163
+ SyncInfoUpdater.setLastSyncLog({
164
+ type,
165
+ table,
166
+ key: "unsyncedDeletedCount",
167
+ value: amount,
168
+ });
169
+ storeSyncInfo = next;
170
+ emit();
171
+ },
172
+ // convenience helpers (optional)
173
+ markLogStart: ({ type, table }) => SyncInfoUpdater.setLastSyncLog({
174
+ type,
175
+ table,
176
+ key: "startTime",
177
+ value: Date.now(),
178
+ }),
179
+ markLogSuccess: ({ type, table, }) => {
180
+ SyncInfoUpdater.setLastSyncLog({
181
+ type,
182
+ table,
183
+ key: "success",
184
+ value: true,
185
+ });
186
+ SyncInfoUpdater.setLastSyncLog({
187
+ type,
188
+ table,
189
+ key: "endTime",
190
+ value: Date.now(),
191
+ });
192
+ },
193
+ markLogError: ({ type, table, lastError, errorCount, rowsFailed = 0, }) => {
194
+ SyncInfoUpdater.setLastSyncLog({
195
+ type,
196
+ table,
197
+ key: "success",
198
+ value: false,
199
+ });
200
+ SyncInfoUpdater.setLastSyncLog({
201
+ type,
202
+ table,
203
+ key: "lastError",
204
+ value: lastError,
205
+ });
206
+ SyncInfoUpdater.setLastSyncLog({
207
+ type,
208
+ table,
209
+ key: "errorCount",
210
+ value: errorCount,
211
+ });
212
+ SyncInfoUpdater.setLastSyncLog({
213
+ type,
214
+ table,
215
+ key: "endTime",
216
+ value: Date.now(),
217
+ });
218
+ SyncInfoUpdater.setLastSyncLog({
219
+ type,
220
+ table,
221
+ key: "rowsFailed",
222
+ value: rowsFailed ?? 0,
223
+ });
224
+ },
225
+ reset: ({ type }) => {
226
+ const next = structuredClone(storeSyncInfo);
227
+ next[type] = {
228
+ ...next[type],
229
+ inProgress: false,
230
+ numberOfTables: 0,
231
+ tablesCompleted: 0,
232
+ currentTable: { name: "", unsyncedDataCount: 0, unsyncedDeletedCount: 0 },
233
+ lastSyncedAt: Date.now(),
234
+ };
235
+ storeSyncInfo = next;
236
+ emit();
237
+ },
238
+ getSnapshot: snapshot,
239
+ };
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "supastash",
3
- "version": "0.1.43",
3
+ "version": "0.1.45",
4
4
  "main": "dist/index.js",
5
5
  "types": "dist/index.d.ts",
6
6
  "type": "module",