supastash 0.1.30 โ†’ 0.1.32

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (32) hide show
  1. package/README.md +0 -9
  2. package/dist/core/schemaManager/index.d.ts.map +1 -1
  3. package/dist/core/schemaManager/index.js +5 -5
  4. package/dist/hooks/supastashLogic.d.ts.map +1 -1
  5. package/dist/hooks/supastashLogic.js +2 -0
  6. package/dist/types/schemaManager.types.d.ts +1 -0
  7. package/dist/types/supastashConfig.types.d.ts +3 -2
  8. package/dist/utils/createIndexes.d.ts +9 -0
  9. package/dist/utils/createIndexes.d.ts.map +1 -0
  10. package/dist/utils/createIndexes.js +81 -0
  11. package/dist/utils/query/helpers/localDb/insertMany.d.ts +10 -0
  12. package/dist/utils/query/helpers/localDb/insertMany.d.ts.map +1 -0
  13. package/dist/utils/query/helpers/localDb/insertMany.js +127 -0
  14. package/dist/utils/query/helpers/localDb/upsertMany.d.ts +13 -0
  15. package/dist/utils/query/helpers/localDb/upsertMany.d.ts.map +1 -0
  16. package/dist/utils/query/helpers/localDb/upsertMany.js +178 -0
  17. package/dist/utils/query/localDbQuery/insert.d.ts.map +1 -1
  18. package/dist/utils/query/localDbQuery/insert.js +7 -36
  19. package/dist/utils/query/localDbQuery/upsert.d.ts.map +1 -1
  20. package/dist/utils/query/localDbQuery/upsert.js +10 -74
  21. package/dist/utils/query/remoteQuery/supabaseQuery.js +1 -1
  22. package/dist/utils/sync/pullFromRemote/updateLocalDb.d.ts.map +1 -1
  23. package/dist/utils/sync/pullFromRemote/updateLocalDb.js +29 -13
  24. package/dist/utils/sync/pushLocal/uploadChunk.d.ts.map +1 -1
  25. package/dist/utils/sync/pushLocal/uploadChunk.js +1 -3
  26. package/dist/utils/syncStatus.d.ts +2 -1
  27. package/dist/utils/syncStatus.d.ts.map +1 -1
  28. package/dist/utils/syncStatus.js +12 -2
  29. package/dist/utils/syncUpdate.d.ts +1 -1
  30. package/dist/utils/syncUpdate.d.ts.map +1 -1
  31. package/dist/utils/syncUpdate.js +3 -5
  32. package/package.json +9 -4
package/README.md CHANGED
@@ -235,15 +235,6 @@ src/
235
235
 
236
236
  ---
237
237
 
238
- ## ๐Ÿงช Dev & Testing
239
-
240
- ```bash
241
- yarn test # Run tests
242
- yarn dev # Dev mode (watch)
243
- ```
244
-
245
- ---
246
-
247
238
  ## ๐Ÿ”ง API Docs
248
239
 
249
240
  - [`configureSupastash()`](https://0xzekea.github.io/supastash/docs/configuration)
@@ -1 +1 @@
1
- {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/core/schemaManager/index.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,qBAAqB,EAAE,MAAM,iCAAiC,CAAC;AAIxE;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA8BG;AACH,wBAAsB,iBAAiB,CACrC,SAAS,EAAE,MAAM,EACjB,MAAM,EAAE,qBAAqB,EAC7B,oBAAoB,UAAQ,iBA+E7B"}
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/core/schemaManager/index.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,qBAAqB,EAAE,MAAM,iCAAiC,CAAC;AAIxE;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA8BG;AACH,wBAAsB,iBAAiB,CACrC,SAAS,EAAE,MAAM,EACjB,MAAM,EAAE,qBAAqB,EAC7B,oBAAoB,UAAQ,iBAoF7B"}
@@ -38,10 +38,10 @@ export async function defineLocalSchema(tableName, schema, deletePreviousSchema
38
38
  throw new Error(`'id' of type UUID column is required for table ${tableName}`);
39
39
  }
40
40
  const db = await getSupastashDb();
41
- const { __indices, ...columnSchema } = schema;
42
- const indexNotInSchema = __indices?.some((i) => !columnSchema[i]);
43
- if (__indices && indexNotInSchema) {
44
- throw new Error(`Index ${indexNotInSchema} not found in schema. Please ensure all indices are defined in the schema.`);
41
+ const { __indices, __constraints, ...columnSchema } = schema;
42
+ const indexNotInSchema = __indices?.filter((i) => !columnSchema[i]) ?? [];
43
+ if (indexNotInSchema.length > 0) {
44
+ throw new Error(`Index columns ${indexNotInSchema.join(", ")} not found in schema. Please ensure all columns are defined in the schema.`);
45
45
  }
46
46
  // Ensure required columns
47
47
  const safeSchema = {
@@ -54,7 +54,7 @@ export async function defineLocalSchema(tableName, schema, deletePreviousSchema
54
54
  // Build column definitions
55
55
  const schemaParts = Object.entries(safeSchema).map(([key, value]) => `${key} ${value}`);
56
56
  const schemaString = schemaParts.join(", ");
57
- const sql = `CREATE TABLE IF NOT EXISTS ${tableName} (${schemaString});`;
57
+ const sql = `CREATE TABLE IF NOT EXISTS ${tableName} (${schemaString}) ${__constraints ? ` ${__constraints}` : ""};`;
58
58
  if (deletePreviousSchema) {
59
59
  const dropSql = `DROP TABLE IF EXISTS ${tableName}`;
60
60
  const clearSyncStatusSql = `DELETE FROM supastash_sync_status WHERE table_name = '${tableName}'`;
@@ -1 +1 @@
1
- {"version":3,"file":"supastashLogic.d.ts","sourceRoot":"","sources":["../../src/hooks/supastashLogic.ts"],"names":[],"mappings":"AAUA,OAAO,EAAE,mBAAmB,EAAE,MAAM,gCAAgC,CAAC;AAQrE;;;;;;;;;;;;;;;GAeG;AACH,wBAAgB,YAAY,CAAC,IAAI,GAAE,OAAe,GAAG,mBAAmB,CA2EvE"}
1
+ {"version":3,"file":"supastashLogic.d.ts","sourceRoot":"","sources":["../../src/hooks/supastashLogic.ts"],"names":[],"mappings":"AAUA,OAAO,EAAE,mBAAmB,EAAE,MAAM,gCAAgC,CAAC;AASrE;;;;;;;;;;;;;;;GAeG;AACH,wBAAgB,YAAY,CAAC,IAAI,GAAE,OAAe,GAAG,mBAAmB,CA4EvE"}
@@ -4,6 +4,7 @@ import { supastashDbErrorMsg } from "../db/dbErrorMsg";
4
4
  import { useSyncEngine } from "../hooks/syncEngine";
5
5
  import { localCache } from "../store/localCache";
6
6
  import { filterTracker, tableFilters, tableFiltersUsed, } from "../store/tableFilters";
7
+ import { createIdIndexes } from "../utils/createIndexes";
7
8
  import { logError, logWarn } from "../utils/logs";
8
9
  import { createDeletedStatusTable, createSyncStatusTable, } from "../utils/schema/createSyncStatus";
9
10
  import { supabaseClientErr } from "../utils/supabaseClientErr";
@@ -54,6 +55,7 @@ export function useSupastash(lazy = false) {
54
55
  // Create supastash metadata tables
55
56
  await createSyncStatusTable();
56
57
  await createDeletedStatusTable();
58
+ await createIdIndexes();
57
59
  // On schema init
58
60
  if (config.onSchemaInit) {
59
61
  await config.onSchemaInit();
@@ -39,6 +39,7 @@ export type LocalSchemaDefinition = {
39
39
  [key: string]: ColumnDefinition | string[];
40
40
  } & {
41
41
  __indices?: string[];
42
+ __constraints?: string;
42
43
  };
43
44
 
44
45
  export type DefineLocalSchema = (
@@ -34,6 +34,7 @@ export type SupastashConfig<T extends SupastashSQLiteClientTypes> = {
34
34
  listeners?: number;
35
35
  onSchemaInit?: () => Promise<void>;
36
36
  debugMode?: boolean;
37
+ useCustomRPCForUpserts?: boolean;
37
38
  };
38
39
 
39
40
  interface SupastashSQLiteDatabase {
@@ -54,7 +55,7 @@ interface SupastashSQLiteDatabase {
54
55
  * @param params - Optional parameters for the query
55
56
  * @returns A Promise resolving to an array
56
57
  */
57
- getAllAsync(sql: string, params?: any[]): Promise<any[]>;
58
+ getAllAsync<T = any>(sql: string, params?: any[]): Promise<T[]>;
58
59
 
59
60
  /**
60
61
  * Executes a query and returns **only the first row** (or `null` if no rows).
@@ -63,7 +64,7 @@ interface SupastashSQLiteDatabase {
63
64
  * @param params - Optional parameters for the query
64
65
  * @returns A Promise resolving to the first matching row
65
66
  */
66
- getFirstAsync(sql: string, params?: any[]): Promise<any | null>;
67
+ getFirstAsync<T = any>(sql: string, params?: any[]): Promise<T | null>;
67
68
 
68
69
  /**
69
70
  * Executes **multiple SQL statements** separated by semicolons.
@@ -0,0 +1,9 @@
1
+ /**
2
+ * Create a single-column index on "id" where it actually helps.
3
+ * - Skips virtual tables (e.g. FTS), views, system tables.
4
+ * - Skips when "id" doesn't exist.
5
+ * - Skips when "id" is already PK (including TEXT PK which has an implicit unique index).
6
+ * - Skips when there's already an index on "id".
7
+ */
8
+ export declare function createIdIndexes(): Promise<void>;
9
+ //# sourceMappingURL=createIndexes.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"createIndexes.d.ts","sourceRoot":"","sources":["../../src/utils/createIndexes.ts"],"names":[],"mappings":"AAIA;;;;;;GAMG;AACH,wBAAsB,eAAe,kBAgGpC"}
@@ -0,0 +1,81 @@
1
+ import { getSupastashDb } from "../db/dbInitializer";
2
+ import { logError } from "./logs";
3
+ import { getAllTables } from "./sync/getAllTables";
4
+ /**
5
+ * Create a single-column index on "id" where it actually helps.
6
+ * - Skips virtual tables (e.g. FTS), views, system tables.
7
+ * - Skips when "id" doesn't exist.
8
+ * - Skips when "id" is already PK (including TEXT PK which has an implicit unique index).
9
+ * - Skips when there's already an index on "id".
10
+ */
11
+ export async function createIdIndexes() {
12
+ const db = await getSupastashDb();
13
+ const tables = await getAllTables();
14
+ if (!tables || !tables.length)
15
+ return;
16
+ const q = (s) => `"${s.replace(/"/g, '""')}"`;
17
+ for (const table of tables) {
18
+ if (!table)
19
+ continue;
20
+ try {
21
+ // 1) Check sqlite_schema for table kind + DDL
22
+ const schemaRow = await db.getFirstAsync(`SELECT type, sql FROM sqlite_schema WHERE name = ?1`, [table]);
23
+ if (!schemaRow)
24
+ continue; // not a real table in this db
25
+ if (schemaRow.type !== "table")
26
+ continue; // views, triggers, etc.
27
+ const ddl = schemaRow.sql || "";
28
+ // Virtual tables get skipped
29
+ if (/\bCREATE\s+VIRTUAL\s+TABLE\b/i.test(ddl)) {
30
+ continue;
31
+ }
32
+ // 2) Inspect columns; ensure "id" exists and get PK info
33
+ const columns = await db.getAllAsync(`PRAGMA table_info(${q(table)})`);
34
+ if (!columns?.length)
35
+ continue;
36
+ const idCol = columns.find((c) => c.name === "id");
37
+ if (!idCol)
38
+ continue; // no "id" column โ†’ nothing to do
39
+ // If "id" is part of the PK, indexing is unnecessary/redundant:
40
+ // - INTEGER PRIMARY KEY uses rowid (fast already).
41
+ // - TEXT/other PRIMARY KEYs get an implicit unique index.
42
+ const pkCols = columns.filter((c) => c.pk > 0);
43
+ const idIsPk = idCol.pk > 0;
44
+ if (idIsPk) {
45
+ // If it's the only PK column or part of composite PK, skip โ€” already indexed enough.
46
+ continue;
47
+ }
48
+ // 3) Check if an index on "id" already exists
49
+ const indexes = await db.getAllAsync(`PRAGMA index_list(${q(table)})`);
50
+ let hasIdOnlyIndex = false;
51
+ if (indexes?.length) {
52
+ for (const ix of indexes) {
53
+ // Ignore partial indexes for simplicity
54
+ const cols = await db.getAllAsync(`PRAGMA index_info(${q(ix.name)})`);
55
+ if (!cols?.length)
56
+ continue;
57
+ if (cols.length === 1 && cols[0].name === "id") {
58
+ hasIdOnlyIndex = true;
59
+ break;
60
+ }
61
+ }
62
+ }
63
+ if (hasIdOnlyIndex)
64
+ continue;
65
+ // 4) Create the index safely
66
+ const idxName = `idx_${table}_id`;
67
+ try {
68
+ await db.runAsync("BEGIN");
69
+ await db.runAsync(`CREATE INDEX IF NOT EXISTS ${q(idxName)} ON ${q(table)}("id");`);
70
+ await db.runAsync("COMMIT");
71
+ }
72
+ catch (e) {
73
+ await db.runAsync("ROLLBACK");
74
+ logError(`[index-skip] ${table}: ${e.message}`);
75
+ }
76
+ }
77
+ catch (err) {
78
+ logError(`[index-check-failed] ${table}: ${err.message}`);
79
+ }
80
+ }
81
+ }
@@ -0,0 +1,10 @@
1
+ import { SyncMode } from "../../../../types/query.types";
2
+ interface InsertOptions<R = any> {
3
+ table: string;
4
+ syncMode?: SyncMode;
5
+ nowISO?: string;
6
+ returnInsertedRows?: boolean;
7
+ }
8
+ export declare function insertMany<R = any>(payload: R[], opts: InsertOptions<R>): Promise<R[] | void>;
9
+ export {};
10
+ //# sourceMappingURL=insertMany.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"insertMany.d.ts","sourceRoot":"","sources":["../../../../../src/utils/query/helpers/localDb/insertMany.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,QAAQ,EAAE,MAAM,+BAA+B,CAAC;AAIzD,UAAU,aAAa,CAAC,CAAC,GAAG,GAAG;IAC7B,KAAK,EAAE,MAAM,CAAC;IACd,QAAQ,CAAC,EAAE,QAAQ,CAAC;IACpB,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,kBAAkB,CAAC,EAAE,OAAO,CAAC;CAC9B;AAMD,wBAAsB,UAAU,CAAC,CAAC,GAAG,GAAG,EACtC,OAAO,EAAE,CAAC,EAAE,EACZ,IAAI,EAAE,aAAa,CAAC,CAAC,CAAC,GACrB,OAAO,CAAC,CAAC,EAAE,GAAG,IAAI,CAAC,CA4GrB"}
@@ -0,0 +1,127 @@
1
+ import { getSupastashDb } from "../../../../db/dbInitializer";
2
+ import { getSafeValue } from "../../../serializer";
3
+ import { parseStringifiedFields as parseRow } from "../../../sync/pushLocal/parseFields";
4
+ const MAX_PARAMS = 999;
5
+ const CHECK_BATCH = 900;
6
+ const YIELD_EVERY = 500;
7
+ export async function insertMany(payload, opts) {
8
+ const db = await getSupastashDb();
9
+ const { table, syncMode, returnInsertedRows } = opts;
10
+ const timeStamp = opts.nowISO ?? new Date().toISOString();
11
+ assertTableName(table);
12
+ if (!Array.isArray(payload) || payload.length === 0)
13
+ return [];
14
+ const idSet = new Set();
15
+ // 1) Validate & gather ids
16
+ const ids = payload.map((item, i) => {
17
+ if (!item || !item.id) {
18
+ throw new Error(`Payload[${i}] must include a valid 'id' field for inserts.`);
19
+ }
20
+ const id = String(item.id);
21
+ if (idSet.has(id)) {
22
+ throw new Error(`Duplicate id: ${id} in payload[${i}]`);
23
+ }
24
+ idSet.add(id);
25
+ return id;
26
+ });
27
+ // 2) Check existing ids in DB (batched; fail-fast)
28
+ for (let i = 0; i < ids.length; i += CHECK_BATCH) {
29
+ const part = ids.slice(i, i + CHECK_BATCH);
30
+ const ph = makePlaceholders(part.length);
31
+ const existing = await db.getAllAsync(`SELECT id FROM ${quoteIdent(table)} WHERE id IN (${ph})`, part);
32
+ if (existing.length) {
33
+ const taken = existing.map((r) => String(r.id));
34
+ throw new Error(`Record(s) already exist in table ${table}: ${taken.join(", ")}`);
35
+ }
36
+ }
37
+ // 3) Do inserts in a single transaction
38
+ const insertedIds = [];
39
+ const run = async () => {
40
+ for (let i = 0; i < payload.length; i++) {
41
+ const item = payload[i];
42
+ const newPayload = {
43
+ ...item,
44
+ created_at: hasOwn(item, "created_at") ? item.created_at : timeStamp,
45
+ updated_at: hasOwn(item, "updated_at") ? item.updated_at : timeStamp,
46
+ synced_at: hasOwn(item, "synced_at")
47
+ ? item.synced_at
48
+ : syncMode && (syncMode === "localOnly" || syncMode === "remoteFirst")
49
+ ? timeStamp
50
+ : null,
51
+ };
52
+ const colArray = Object.keys(newPayload);
53
+ if (colArray.length === 0)
54
+ continue;
55
+ // Validate/quote column names
56
+ const colsSql = colArray.map(quoteIdent).join(", ");
57
+ const placeholders = makePlaceholders(colArray.length);
58
+ const values = colArray.map((c) => normalizeValue(getSafeValue(newPayload[c])));
59
+ await db.runAsync(`INSERT INTO ${quoteIdent(table)} (${colsSql}) VALUES (${placeholders})`, values);
60
+ insertedIds.push(String(item.id));
61
+ if ((i + 1) % YIELD_EVERY === 0)
62
+ await microYield();
63
+ }
64
+ };
65
+ await db.runAsync("BEGIN");
66
+ try {
67
+ await run();
68
+ await db.runAsync("COMMIT");
69
+ }
70
+ catch (e) {
71
+ await db.runAsync("ROLLBACK");
72
+ throw e;
73
+ }
74
+ // 4) Optionally fetch inserted rows (batched) and return in input order
75
+ if (!returnInsertedRows)
76
+ return;
77
+ if (insertedIds.length === 0)
78
+ return [];
79
+ const rows = [];
80
+ for (let i = 0; i < insertedIds.length; i += CHECK_BATCH) {
81
+ const part = insertedIds.slice(i, i + CHECK_BATCH);
82
+ const ph = makePlaceholders(part.length);
83
+ const chunkRows = await db.getAllAsync(`SELECT * FROM ${quoteIdent(table)} WHERE id IN (${ph})`, part);
84
+ rows.push(...chunkRows);
85
+ }
86
+ const map = new Map(rows.map((r) => [String(r.id), parseRow ? parseRow(r) : r]));
87
+ return insertedIds.map((id) => map.get(id)).filter(Boolean);
88
+ }
89
+ /* ---------- helpers ---------- */
90
+ function makePlaceholders(n) {
91
+ if (n <= 0)
92
+ throw new Error("No placeholders to make");
93
+ if (n > MAX_PARAMS)
94
+ throw new Error(`Requested ${n} placeholders; max is ${MAX_PARAMS}`);
95
+ return Array(n).fill("?").join(",");
96
+ }
97
+ function hasOwn(obj, key) {
98
+ return Object.prototype.hasOwnProperty.call(obj, key);
99
+ }
100
+ function assertTableName(name) {
101
+ if (!/^[A-Za-z_][A-Za-z0-9_]*$/.test(name)) {
102
+ throw new Error(`Unsafe table name: ${name}`);
103
+ }
104
+ }
105
+ function quoteIdent(name) {
106
+ if (!/^[A-Za-z_][A-Za-z0-9_]*$/.test(name)) {
107
+ throw new Error(`Unsafe identifier: ${name}`);
108
+ }
109
+ return `"${name}"`;
110
+ }
111
+ function normalizeValue(v) {
112
+ return v === undefined ? null : v;
113
+ }
114
+ function defaultSafe(value) {
115
+ if (value === null || value === undefined)
116
+ return value ?? null;
117
+ if (value instanceof Date)
118
+ return value.toISOString();
119
+ if (Array.isArray(value))
120
+ return JSON.stringify(value);
121
+ if (typeof value === "object")
122
+ return JSON.stringify(value);
123
+ return value;
124
+ }
125
+ function microYield() {
126
+ return new Promise((res) => setTimeout(res, 0));
127
+ }
@@ -0,0 +1,13 @@
1
+ import { SyncMode } from "../../../../types/query.types";
2
+ interface UpsertOptions<R = any> {
3
+ table: string;
4
+ onConflictKeys?: string[];
5
+ syncMode?: SyncMode;
6
+ nowISO?: string;
7
+ preserveTimestamp?: boolean;
8
+ returnRows?: boolean;
9
+ yieldEvery?: number;
10
+ }
11
+ export declare function upsertMany<R = any>(items: R[], opts: UpsertOptions<R>): Promise<R[] | void>;
12
+ export {};
13
+ //# sourceMappingURL=upsertMany.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"upsertMany.d.ts","sourceRoot":"","sources":["../../../../../src/utils/query/helpers/localDb/upsertMany.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,QAAQ,EAAE,MAAM,+BAA+B,CAAC;AAMzD,UAAU,aAAa,CAAC,CAAC,GAAG,GAAG;IAC7B,KAAK,EAAE,MAAM,CAAC;IACd,cAAc,CAAC,EAAE,MAAM,EAAE,CAAC;IAC1B,QAAQ,CAAC,EAAE,QAAQ,CAAC;IACpB,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,iBAAiB,CAAC,EAAE,OAAO,CAAC;IAC5B,UAAU,CAAC,EAAE,OAAO,CAAC;IACrB,UAAU,CAAC,EAAE,MAAM,CAAC;CACrB;AAID,wBAAsB,UAAU,CAAC,CAAC,GAAG,GAAG,EACtC,KAAK,EAAE,CAAC,EAAE,EACV,IAAI,EAAE,aAAa,CAAC,CAAC,CAAC,GACrB,OAAO,CAAC,CAAC,EAAE,GAAG,IAAI,CAAC,CAuKrB"}
@@ -0,0 +1,178 @@
1
+ import { getSupastashDb } from "../../../../db/dbInitializer";
2
+ import { generateUUIDv4 } from "../../../genUUID";
3
+ import { parseStringifiedFields as parseRow } from "../../../sync/pushLocal/parseFields";
4
+ const DEFAULT_DATE = "1970-01-01T00:00:00.000Z";
5
+ const CHECK_BATCH = 900; // param headroom under 999
6
+ export async function upsertMany(items, opts) {
7
+ const db = await getSupastashDb();
8
+ const { table, syncMode, nowISO, preserveTimestamp = false, yieldEvery = 500, } = opts;
9
+ const returnRows = opts.returnRows !== false;
10
+ const onConflictKeys = opts.onConflictKeys && opts.onConflictKeys.length
11
+ ? opts.onConflictKeys
12
+ : ["id"];
13
+ assertTableName(table);
14
+ onConflictKeys.forEach(assertIdent);
15
+ if (!Array.isArray(items) || items.length === 0)
16
+ return [];
17
+ const timeStamp = nowISO ?? new Date().toISOString();
18
+ let existingIdSet = null;
19
+ if (onConflictKeys.length === 1 && onConflictKeys[0] === "id") {
20
+ const ids = items
21
+ .map((row, i) => {
22
+ const id = row?.id ?? null;
23
+ // allow missing; weโ€™ll generate before insert
24
+ return id == null ? null : String(id);
25
+ })
26
+ .filter(Boolean);
27
+ existingIdSet = new Set(await selectIdsInBatches(db, table, ids));
28
+ }
29
+ const upserted = [];
30
+ const run = async () => {
31
+ for (let i = 0; i < items.length; i++) {
32
+ const input = items[i] ?? {};
33
+ const row = { ...input };
34
+ // Ensure id if it's part of conflict keys
35
+ if (onConflictKeys.includes("id") && (row.id == null || row.id === "")) {
36
+ row.id = generateUUIDv4();
37
+ }
38
+ // synced_at default
39
+ if (!hasOwn(row, "synced_at")) {
40
+ row.synced_at =
41
+ syncMode && (syncMode === "localOnly" || syncMode === "remoteFirst")
42
+ ? timeStamp
43
+ : null;
44
+ }
45
+ // Decide: update or insert?
46
+ const { clause, values: keyValues } = buildWhere(onConflictKeys, row);
47
+ const canCheckConflict = clause !== null;
48
+ let exists = false;
49
+ if (onConflictKeys.length === 1 &&
50
+ onConflictKeys[0] === "id" &&
51
+ existingIdSet) {
52
+ exists = existingIdSet.has(String(row.id));
53
+ }
54
+ else if (canCheckConflict) {
55
+ const existing = await db.getAllAsync(`SELECT 1 FROM ${quote(table)} WHERE ${clause} LIMIT 2`, keyValues);
56
+ if (existing.length > 1) {
57
+ throw new Error(`Multiple rows matched onConflictKeys in '${table}' โ€” expected uniqueness on ${onConflictKeys.join(", ")}`);
58
+ }
59
+ exists = existing.length === 1;
60
+ }
61
+ if (exists) {
62
+ // UPDATE path
63
+ if (!preserveTimestamp || input.updated_at === undefined) {
64
+ row.updated_at =
65
+ input.updated_at !== undefined ? input.updated_at : timeStamp;
66
+ }
67
+ // Build SET list (exclude conflict keys; also skip undefined to avoid nulling unintentionally)
68
+ const updateCols = Object.keys(row).filter((c) => !onConflictKeys.includes(c) && row[c] !== undefined);
69
+ if (updateCols.length > 0) {
70
+ const setSql = updateCols.map((c) => `${quote(c)} = ?`).join(", ");
71
+ const setVals = updateCols.map((c) => toDbValue(row[c]));
72
+ if (!canCheckConflict) {
73
+ throw new Error(`Missing onConflictKeys in payload; cannot UPDATE in '${table}'. Keys: ${onConflictKeys.join(", ")}`);
74
+ }
75
+ await db.runAsync(`UPDATE ${quote(table)} SET ${setSql} WHERE ${clause}`, [...setVals, ...keyValues]);
76
+ if (returnRows) {
77
+ const updated = await db.getFirstAsync(`SELECT * FROM ${quote(table)} WHERE ${clause} LIMIT 1`, keyValues);
78
+ if (returnRows)
79
+ upserted.push(parseRow(updated));
80
+ }
81
+ }
82
+ }
83
+ else {
84
+ // INSERT path
85
+ if (!hasOwn(row, "created_at"))
86
+ row.created_at = timeStamp;
87
+ if (!hasOwn(row, "updated_at"))
88
+ row.updated_at = timeStamp;
89
+ if (!hasOwn(row, "id"))
90
+ row.id = generateUUIDv4();
91
+ const insertCols = Object.keys(row).filter((c) => row[c] !== undefined);
92
+ const placeholders = insertCols.map(() => "?").join(", ");
93
+ const insertVals = insertCols.map((c) => toDbValue(row[c]));
94
+ await db.runAsync(`INSERT INTO ${quote(table)} (${insertCols
95
+ .map(quote)
96
+ .join(", ")}) VALUES (${placeholders})`, insertVals);
97
+ if (returnRows) {
98
+ const inserted = await db.getFirstAsync(`SELECT * FROM ${quote(table)} WHERE ${clause} LIMIT 1`, keyValues);
99
+ upserted.push(parseRow(inserted));
100
+ }
101
+ if (existingIdSet &&
102
+ onConflictKeys.length === 1 &&
103
+ onConflictKeys[0] === "id") {
104
+ existingIdSet.add(String(row.id));
105
+ }
106
+ }
107
+ if (yieldEvery > 0 && (i + 1) % yieldEvery === 0) {
108
+ await microYield();
109
+ }
110
+ }
111
+ };
112
+ await db.runAsync("BEGIN");
113
+ try {
114
+ await run();
115
+ await db.runAsync("COMMIT");
116
+ }
117
+ catch (e) {
118
+ await db.runAsync("ROLLBACK");
119
+ throw e;
120
+ }
121
+ return returnRows ? upserted : undefined;
122
+ }
123
+ /* ================= helpers ================= */
124
+ function buildWhere(keys, row) {
125
+ if (!keys.length)
126
+ return { clause: null, values: [] };
127
+ const missing = keys.filter((k) => row[k] === undefined || row[k] === null);
128
+ if (missing.length)
129
+ return { clause: null, values: [] };
130
+ const parts = keys.map((k) => `${quote(k)} = ?`);
131
+ const vals = keys.map((k) => toDbValue(row[k]));
132
+ return { clause: parts.join(" AND "), values: vals };
133
+ }
134
+ function toDbValue(v) {
135
+ if (v === undefined)
136
+ return null;
137
+ if (v === null)
138
+ return null;
139
+ if (v instanceof Date)
140
+ return v.toISOString();
141
+ if (Array.isArray(v))
142
+ return JSON.stringify(v);
143
+ if (typeof v === "object")
144
+ return JSON.stringify(v);
145
+ return v;
146
+ }
147
+ function hasOwn(obj, key) {
148
+ return Object.prototype.hasOwnProperty.call(obj, key);
149
+ }
150
+ function assertTableName(name) {
151
+ if (!/^[A-Za-z_][A-Za-z0-9_]*$/.test(name)) {
152
+ throw new Error(`Unsafe table name: ${name}`);
153
+ }
154
+ }
155
+ function assertIdent(name) {
156
+ if (!/^[A-Za-z_][A-Za-z0-9_]*$/.test(name)) {
157
+ throw new Error(`Unsafe identifier: ${name}`);
158
+ }
159
+ }
160
+ function quote(name) {
161
+ return `"${name}"`;
162
+ }
163
+ async function selectIdsInBatches(db, table, ids) {
164
+ const out = [];
165
+ for (let i = 0; i < ids.length; i += CHECK_BATCH) {
166
+ const part = ids.slice(i, i + CHECK_BATCH);
167
+ if (part.length === 0)
168
+ continue;
169
+ const ph = Array(part.length).fill("?").join(",");
170
+ const rows = await db.getAllAsync(`SELECT id FROM ${quote(table)} WHERE id IN (${ph})`, part);
171
+ for (const r of rows)
172
+ out.push(String(r.id));
173
+ }
174
+ return out;
175
+ }
176
+ function microYield() {
177
+ return new Promise((res) => setTimeout(res, 0));
178
+ }
@@ -1 +1 @@
1
- {"version":3,"file":"insert.d.ts","sourceRoot":"","sources":["../../../../src/utils/query/localDbQuery/insert.ts"],"names":[],"mappings":"AACA,OAAO,EAEL,iBAAiB,EACjB,aAAa,EACb,QAAQ,EACT,MAAM,4BAA4B,CAAC;AAMpC;;;;;;GAMG;AACH,wBAAsB,UAAU,CAAC,CAAC,SAAS,OAAO,EAAE,CAAC,EAAE,CAAC,EACtD,KAAK,EAAE,MAAM,EACb,OAAO,EAAE,CAAC,EAAE,GAAG,IAAI,EACnB,QAAQ,CAAC,EAAE,QAAQ,EACnB,QAAQ,CAAC,EAAE,CAAC,GACX,OAAO,CAAC,CAAC,SAAS,IAAI,GAAG,aAAa,CAAC,CAAC,CAAC,GAAG,iBAAiB,CAAC,CAAC,CAAC,CAAC,CA6EnE"}
1
+ {"version":3,"file":"insert.d.ts","sourceRoot":"","sources":["../../../../src/utils/query/localDbQuery/insert.ts"],"names":[],"mappings":"AAAA,OAAO,EACL,iBAAiB,EACjB,aAAa,EACb,QAAQ,EACT,MAAM,4BAA4B,CAAC;AAKpC;;;;;;GAMG;AACH,wBAAsB,UAAU,CAAC,CAAC,SAAS,OAAO,EAAE,CAAC,EAAE,CAAC,EACtD,KAAK,EAAE,MAAM,EACb,OAAO,EAAE,CAAC,EAAE,GAAG,IAAI,EACnB,QAAQ,CAAC,EAAE,QAAQ,EACnB,QAAQ,CAAC,EAAE,CAAC,GACX,OAAO,CAAC,CAAC,SAAS,IAAI,GAAG,aAAa,CAAC,CAAC,CAAC,GAAG,iBAAiB,CAAC,CAAC,CAAC,CAAC,CA8BnE"}
@@ -1,8 +1,6 @@
1
- import { getSupastashDb } from "../../../db/dbInitializer";
2
1
  import { logError } from "../../logs";
3
- import { getSafeValue } from "../../serializer";
4
- import { parseStringifiedFields } from "../../sync/pushLocal/parseFields";
5
2
  import { assertTableExists } from "../../tableValidator";
3
+ import { insertMany } from "../helpers/localDb/insertMany";
6
4
  /**
7
5
  * Inserts data locally, sets synced_at to null pending update to remote server
8
6
  *
@@ -15,43 +13,16 @@ export async function insertData(table, payload, syncMode, isSingle) {
15
13
  throw new Error("Table name was not provided for an insert call");
16
14
  if (!payload)
17
15
  throw new Error(`Payload data was not provided for an insert call on ${table}`);
18
- const timeStamp = new Date().toISOString();
19
- const inserted = [];
20
16
  try {
21
17
  await assertTableExists(table);
22
- const db = await getSupastashDb();
23
- for (const item of payload) {
24
- if (!item.id) {
25
- throw new Error(`Payload must include a valid 'id' field for inserts.`);
26
- }
27
- const newPayload = {
28
- ...item,
29
- created_at: item.created_at ?? timeStamp,
30
- updated_at: item.updated_at ?? timeStamp,
31
- synced_at: Object.prototype.hasOwnProperty.call(item, "synced_at")
32
- ? item.synced_at
33
- : syncMode && (syncMode === "localOnly" || syncMode === "remoteFirst")
34
- ? timeStamp
35
- : null,
36
- };
37
- const colArray = Object.keys(newPayload);
38
- const cols = colArray.join(", ");
39
- const placeholders = colArray.map(() => "?").join(", ");
40
- const values = colArray.map((c) => getSafeValue(newPayload[c]));
41
- // Check if record already exist
42
- const exists = await db.getFirstAsync(`SELECT 1 FROM ${table} WHERE id = ? LIMIT 1`, [newPayload.id]);
43
- if (exists)
44
- throw new Error(`Record with id ${newPayload.id} already exists in table ${table}`);
45
- // Insert data
46
- await db.runAsync(`INSERT INTO ${table} (${cols}) VALUES (${placeholders})`, values);
47
- const insertedRow = await db.getFirstAsync(`SELECT * FROM ${table} WHERE id = ?`, [newPayload.id]);
48
- if (insertedRow) {
49
- inserted.push(parseStringifiedFields(insertedRow));
50
- }
51
- }
18
+ const inserted = await insertMany(payload, {
19
+ table,
20
+ syncMode,
21
+ returnInsertedRows: true,
22
+ });
52
23
  return {
53
24
  error: null,
54
- data: isSingle ? inserted[0] : inserted,
25
+ data: isSingle ? inserted?.[0] : inserted,
55
26
  };
56
27
  }
57
28
  catch (error) {
@@ -1 +1 @@
1
- {"version":3,"file":"upsert.d.ts","sourceRoot":"","sources":["../../../../src/utils/query/localDbQuery/upsert.ts"],"names":[],"mappings":"AAEA,OAAO,EAEL,iBAAiB,EACjB,aAAa,EACb,QAAQ,EACT,MAAM,4BAA4B,CAAC;AASpC;;;;;GAKG;AACH,wBAAsB,UAAU,CAAC,CAAC,SAAS,OAAO,EAAE,CAAC,EAAE,CAAC,EACtD,KAAK,EAAE,MAAM,EACb,OAAO,EAAE,CAAC,GAAG,CAAC,EAAE,GAAG,IAAI,EACvB,QAAQ,CAAC,EAAE,QAAQ,EACnB,QAAQ,CAAC,EAAE,CAAC,EACZ,cAAc,GAAE,MAAM,EAAW,EACjC,iBAAiB,CAAC,EAAE,OAAO,GAC1B,OAAO,CAAC,CAAC,SAAS,IAAI,GAAG,aAAa,CAAC,CAAC,CAAC,GAAG,iBAAiB,CAAC,CAAC,CAAC,CAAC,CA4HnE"}
1
+ {"version":3,"file":"upsert.d.ts","sourceRoot":"","sources":["../../../../src/utils/query/localDbQuery/upsert.ts"],"names":[],"mappings":"AAAA,OAAO,EACL,iBAAiB,EACjB,aAAa,EACb,QAAQ,EACT,MAAM,4BAA4B,CAAC;AAOpC;;;;;GAKG;AACH,wBAAsB,UAAU,CAAC,CAAC,SAAS,OAAO,EAAE,CAAC,EAAE,CAAC,EACtD,KAAK,EAAE,MAAM,EACb,OAAO,EAAE,CAAC,GAAG,CAAC,EAAE,GAAG,IAAI,EACvB,QAAQ,CAAC,EAAE,QAAQ,EACnB,QAAQ,CAAC,EAAE,CAAC,EACZ,cAAc,GAAE,MAAM,EAAW,EACjC,iBAAiB,CAAC,EAAE,OAAO,GAC1B,OAAO,CAAC,CAAC,SAAS,IAAI,GAAG,aAAa,CAAC,CAAC,CAAC,GAAG,iBAAiB,CAAC,CAAC,CAAC,CAAC,CA8BnE"}
@@ -1,10 +1,6 @@
1
- import { getSupastashConfig } from "../../../core/config";
2
- import { getSupastashDb } from "../../../db/dbInitializer";
3
- import { generateUUIDv4 } from "../../../utils/genUUID";
4
- import { logError, logWarn } from "../../logs";
5
- import { getSafeValue } from "../../serializer";
6
- import { parseStringifiedFields } from "../../sync/pushLocal/parseFields";
1
+ import { logError } from "../../logs";
7
2
  import { assertTableExists } from "../../tableValidator";
3
+ import { upsertMany } from "../helpers/localDb/upsertMany";
8
4
  const warned = new Set();
9
5
  /**
10
6
  * Performs upsert-like logic on local DB:
@@ -16,78 +12,18 @@ export async function upsertData(table, payload, syncMode, isSingle, onConflictK
16
12
  if (!payload || !table)
17
13
  throw new Error("Table and payload are required for upsert.");
18
14
  await assertTableExists(table);
19
- const timeStamp = new Date().toISOString();
20
15
  const items = Array.isArray(payload) ? payload : [payload];
21
- const upserted = [];
22
16
  try {
23
- const db = await getSupastashDb();
24
- for (const item of items) {
25
- const newPayload = {
26
- ...item,
27
- synced_at: Object.prototype.hasOwnProperty.call(item, "synced_at")
28
- ? item.synced_at
29
- : syncMode && (syncMode === "localOnly" || syncMode === "remoteFirst")
30
- ? timeStamp
31
- : null,
32
- };
33
- const colArray = Object.keys(newPayload);
34
- const includesConflictKeys = onConflictKeys.every((key) => colArray.includes(key));
35
- if (!includesConflictKeys) {
36
- throw new Error(`onConflictKeys must include at least one key from the payload. Payload: ${JSON.stringify(newPayload)}`);
37
- }
38
- const whereClause = onConflictKeys
39
- .map((key) => `${key} = ?`)
40
- .join(" AND ");
41
- const conflictValues = onConflictKeys.map((key) => getSafeValue(newPayload[key]));
42
- const existingData = await db.getAllAsync(`SELECT * FROM ${table} WHERE ${whereClause}`, [...conflictValues]);
43
- if (existingData.length > 0) {
44
- if (existingData.length > 1) {
45
- throw new Error(`Multiple rows matched onConflictKeys in '${table}' โ€” expected unique constraint. Payload: ${JSON.stringify(newPayload)}`);
46
- }
47
- if (!preserveTimestamp || item.updated_at === undefined) {
48
- if (!warned.has(table) &&
49
- !getSupastashConfig().debugMode &&
50
- __DEV__) {
51
- warned.add(table);
52
- logWarn(`[Supastash] updated_at not provided for upsert call on ${table} โ€“ defaulting to ${timeStamp}`);
53
- }
54
- const userUpdatedAt = item.updated_at;
55
- newPayload.updated_at =
56
- userUpdatedAt !== undefined ? userUpdatedAt : timeStamp;
57
- }
58
- const updateColsArray = Object.keys(newPayload);
59
- const updateCols = updateColsArray
60
- .filter((col) => !onConflictKeys.includes(col))
61
- .map((col) => `${col} = ?`)
62
- .join(", ");
63
- const updateValues = updateColsArray
64
- .filter((col) => !onConflictKeys.includes(col))
65
- .map((c) => getSafeValue(newPayload[c]));
66
- const updateSql = `UPDATE ${table} SET ${updateCols} WHERE ${whereClause}`;
67
- await db.runAsync(updateSql, [...updateValues, ...conflictValues]);
68
- upserted.push(parseStringifiedFields(newPayload));
69
- }
70
- else {
71
- const insertPayload = {
72
- ...newPayload,
73
- id: newPayload.id ?? generateUUIDv4(),
74
- created_at: newPayload.created_at ?? timeStamp,
75
- updated_at: newPayload.updated_at ?? timeStamp,
76
- };
77
- const newColsArray = Object.keys(insertPayload);
78
- const insertCols = newColsArray.join(", ");
79
- const insertPlaceholders = newColsArray.map(() => "?").join(", ");
80
- const insertValues = newColsArray.map((c) => getSafeValue(insertPayload[c]));
81
- const insertSql = `INSERT INTO ${table} (${insertCols}) VALUES (${insertPlaceholders})`;
82
- await db.runAsync(insertSql, insertValues);
83
- const inserted = await db.getFirstAsync(`SELECT * FROM ${table} WHERE ${whereClause} LIMIT 1`, [...conflictValues]);
84
- if (inserted)
85
- upserted.push(parseStringifiedFields(inserted));
86
- }
87
- }
17
+ const upserted = await upsertMany(items, {
18
+ table,
19
+ syncMode,
20
+ returnRows: true,
21
+ onConflictKeys,
22
+ preserveTimestamp,
23
+ });
88
24
  return {
89
25
  error: null,
90
- data: isSingle ? upserted[0] : upserted,
26
+ data: isSingle ? upserted?.[0] : upserted,
91
27
  };
92
28
  }
93
29
  catch (error) {
@@ -137,7 +137,7 @@ export async function querySupabase(state, isBatched = false) {
137
137
  type !== "remoteFirst") {
138
138
  if (method === "insert" && newPayload) {
139
139
  for (const item of upsertOrInsertPayload) {
140
- await updateLocalSyncedAt(table, item.id);
140
+ await updateLocalSyncedAt(table, [item.id]);
141
141
  }
142
142
  }
143
143
  if (method === "upsert" && newPayload) {
@@ -1 +1 @@
1
- {"version":3,"file":"updateLocalDb.d.ts","sourceRoot":"","sources":["../../../../src/utils/sync/pullFromRemote/updateLocalDb.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,cAAc,EAAE,MAAM,mCAAmC,CAAC;AAanE;;;GAGG;AACH,wBAAsB,aAAa,CACjC,KAAK,EAAE,MAAM,EACb,OAAO,CAAC,EAAE,cAAc,EAAE,EAC1B,aAAa,CAAC,EAAE,CAAC,OAAO,EAAE,GAAG,KAAK,OAAO,CAAC,IAAI,CAAC,iBA+ChD;AAID;;;;;GAKG;AACH,wBAAsB,UAAU,CAC9B,KAAK,EAAE,MAAM,EACb,MAAM,EAAE,GAAG,EACX,SAAS,CAAC,EAAE,OAAO,iBAoEpB"}
1
+ {"version":3,"file":"updateLocalDb.d.ts","sourceRoot":"","sources":["../../../../src/utils/sync/pullFromRemote/updateLocalDb.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,cAAc,EAAE,MAAM,mCAAmC,CAAC;AAcnE;;;GAGG;AACH,wBAAsB,aAAa,CACjC,KAAK,EAAE,MAAM,EACb,OAAO,CAAC,EAAE,cAAc,EAAE,EAC1B,aAAa,CAAC,EAAE,CAAC,OAAO,EAAE,GAAG,KAAK,OAAO,CAAC,IAAI,CAAC,iBAiEhD;AAID;;;;;GAKG;AACH,wBAAsB,UAAU,CAC9B,KAAK,EAAE,MAAM,EACb,MAAM,EAAE,GAAG,EACX,SAAS,CAAC,EAAE,OAAO,iBAoEpB"}
@@ -10,6 +10,7 @@ import { pullDeletedData } from "./pullDeletedData";
10
10
  import { stringifyValue } from "./stringifyFields";
11
11
  let isInSync = new Map();
12
12
  const DEFAULT_DATE = "1970-01-01T00:00:00Z";
13
+ const BATCH_SIZE = 500;
13
14
  /**
14
15
  * Updates the local database with the remote changes
15
16
  * @param table - The table to update
@@ -27,24 +28,39 @@ export async function updateLocalDb(table, filters, onReceiveData) {
27
28
  const refreshNeeded = !!deletedData?.records.length || !!data?.length;
28
29
  // Delete records that are no longer in the remote data
29
30
  if (deletedData && deletedData.records.length > 0) {
30
- for (const record of deletedData.records) {
31
- await db.runAsync(`DELETE FROM ${table} WHERE id = ?`, [record.id]);
32
- }
31
+ const ids = deletedData.records.map((record) => record.id).join(",");
32
+ const placeholders = ids
33
+ .split(",")
34
+ .map(() => "?")
35
+ .join(",");
36
+ await db.runAsync(`DELETE FROM ${table} WHERE id IN (${placeholders})`, deletedData.records.map((record) => record.id));
33
37
  }
34
38
  // Update local database with remote changes
35
39
  if (data && data.length > 0) {
36
- for (const record of data) {
37
- if (deletedData?.deletedDataMap.has(record.id))
38
- continue;
39
- const { doesExist, newer } = await checkIfRecordExistsAndIsNewer(table, record);
40
- if (newer) {
41
- if (onReceiveData) {
42
- await onReceiveData(record);
40
+ const run = async () => {
41
+ for (let i = 0; i < data.length; i++) {
42
+ const record = data[i];
43
+ if (deletedData?.deletedDataMap.has(record.id))
44
+ continue;
45
+ const { doesExist, newer } = await checkIfRecordExistsAndIsNewer(table, record);
46
+ if (newer) {
47
+ if (onReceiveData) {
48
+ await onReceiveData(record);
49
+ }
50
+ else {
51
+ await upsertData(table, record, doesExist);
52
+ }
43
53
  }
44
- else {
45
- await upsertData(table, record, doesExist);
54
+ if ((i + 1) % BATCH_SIZE === 0) {
55
+ await new Promise((res) => setTimeout(res, 0));
46
56
  }
47
57
  }
58
+ };
59
+ try {
60
+ await run();
61
+ }
62
+ catch (error) {
63
+ throw error;
48
64
  }
49
65
  }
50
66
  if (refreshNeeded)
@@ -104,7 +120,7 @@ export async function upsertData(table, record, doesExist) {
104
120
  // Insert new record
105
121
  await db.runAsync(`INSERT INTO ${table} (${keys.join(", ")}) VALUES (${placeholders})`, values);
106
122
  }
107
- await updateLocalSyncedAt(table, record.id);
123
+ await updateLocalSyncedAt(table, [record.id]);
108
124
  }
109
125
  catch (error) {
110
126
  logError(`[Supastash] Error upserting data for ${table}`, error);
@@ -1 +1 @@
1
- {"version":3,"file":"uploadChunk.d.ts","sourceRoot":"","sources":["../../../../src/utils/sync/pushLocal/uploadChunk.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,WAAW,EAAE,MAAM,4BAA4B,CAAC;AA8KzD;;;;GAIG;AACH,wBAAsB,UAAU,CAC9B,KAAK,EAAE,MAAM,EACb,eAAe,EAAE,WAAW,EAAE,EAC9B,cAAc,CAAC,EAAE,CAAC,OAAO,EAAE,GAAG,EAAE,KAAK,OAAO,CAAC,OAAO,CAAC,iBAYtD"}
1
+ {"version":3,"file":"uploadChunk.d.ts","sourceRoot":"","sources":["../../../../src/utils/sync/pushLocal/uploadChunk.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,WAAW,EAAE,MAAM,4BAA4B,CAAC;AAgLzD;;;;GAIG;AACH,wBAAsB,UAAU,CAC9B,KAAK,EAAE,MAAM,EACb,eAAe,EAAE,WAAW,EAAE,EAC9B,cAAc,CAAC,EAAE,CAAC,OAAO,EAAE,GAAG,EAAE,KAAK,OAAO,CAAC,OAAO,CAAC,iBAYtD"}
@@ -9,9 +9,7 @@ const RANDOM_OLD_DATE = new Date("2000-01-01").toISOString();
9
9
  const CHUNK_SIZE = 500;
10
10
  const DEFAULT_DATE = "1970-01-01T00:00:00Z";
11
11
  async function updateSyncStatus(table, rows) {
12
- for (const row of rows) {
13
- await updateLocalSyncedAt(table, row.id);
14
- }
12
+ await updateLocalSyncedAt(table, rows.map((row) => row.id));
15
13
  }
16
14
  function errorHandler(error, table, toUpsert, attempts) {
17
15
  for (const row of toUpsert) {
@@ -27,10 +27,11 @@ export declare function getLocalSyncLog(tableName: string): Promise<{
27
27
  * Sets the sync log for a given table
28
28
  * @param tableName - The name of the table to set the sync log for
29
29
  * @param lastSyncedAt - The last synced at timestamp
30
+ * @param lastCreatedAt - The last created at timestamp
30
31
  * @example
31
32
  * setLocalSyncLog("users", new Date().toISOString());
32
33
  */
33
- export declare function setLocalSyncLog(tableName: string, lastSyncedAt: string): Promise<void>;
34
+ export declare function setLocalSyncLog(tableName: string, lastSyncedAt: string, lastCreatedAt?: string): Promise<void>;
34
35
  /**
35
36
  * Clears the delete log for a given table
36
37
  * @param tableName - The name of the table to clear the delete log for
@@ -1 +1 @@
1
- {"version":3,"file":"syncStatus.d.ts","sourceRoot":"","sources":["../../src/utils/syncStatus.ts"],"names":[],"mappings":"AAUA;;;;;GAKG;AACH,wBAAsB,iBAAiB,CAAC,SAAS,EAAE,MAAM,iBAQxD;AAED;;;;GAIG;AACH,wBAAsB,oBAAoB,kBAKzC;AAED;;;;;;;GAOG;AACH,wBAAsB,eAAe,CAAC,SAAS,EAAE,MAAM,GAAG,OAAO,CAAC;IAChE,UAAU,EAAE,MAAM,CAAC;IACnB,YAAY,EAAE,MAAM,CAAC;CACtB,GAAG,IAAI,CAAC,CAUR;AAED;;;;;;GAMG;AACH,wBAAsB,eAAe,CAAC,SAAS,EAAE,MAAM,EAAE,YAAY,EAAE,MAAM,iBAM5E;AAED;;;;;GAKG;AACH,wBAAsB,mBAAmB,CAAC,SAAS,EAAE,MAAM,iBAM1D;AAED;;;;GAIG;AACH,wBAAsB,sBAAsB,kBAI3C;AAED;;;;;;;GAOG;AACH,wBAAsB,iBAAiB,CAAC,SAAS,EAAE,MAAM,GAAG,OAAO,CAAC;IAClE,UAAU,EAAE,MAAM,CAAC;IACnB,aAAa,EAAE,MAAM,CAAC;CACvB,GAAG,IAAI,CAAC,CAUR;AAED;;;;;;GAMG;AACH,wBAAsB,iBAAiB,CACrC,SAAS,EAAE,MAAM,EACjB,aAAa,EAAE,MAAM,iBAOtB"}
1
+ {"version":3,"file":"syncStatus.d.ts","sourceRoot":"","sources":["../../src/utils/syncStatus.ts"],"names":[],"mappings":"AAWA;;;;;GAKG;AACH,wBAAsB,iBAAiB,CAAC,SAAS,EAAE,MAAM,iBAQxD;AAED;;;;GAIG;AACH,wBAAsB,oBAAoB,kBAKzC;AAED;;;;;;;GAOG;AACH,wBAAsB,eAAe,CAAC,SAAS,EAAE,MAAM,GAAG,OAAO,CAAC;IAChE,UAAU,EAAE,MAAM,CAAC;IACnB,YAAY,EAAE,MAAM,CAAC;CACtB,GAAG,IAAI,CAAC,CAUR;AAED;;;;;;;GAOG;AACH,wBAAsB,eAAe,CACnC,SAAS,EAAE,MAAM,EACjB,YAAY,EAAE,MAAM,EACpB,aAAa,CAAC,EAAE,MAAM,iBAkBvB;AAED;;;;;GAKG;AACH,wBAAsB,mBAAmB,CAAC,SAAS,EAAE,MAAM,iBAM1D;AAED;;;;GAIG;AACH,wBAAsB,sBAAsB,kBAI3C;AAED;;;;;;;GAOG;AACH,wBAAsB,iBAAiB,CAAC,SAAS,EAAE,MAAM,GAAG,OAAO,CAAC;IAClE,UAAU,EAAE,MAAM,CAAC;IACnB,aAAa,EAAE,MAAM,CAAC;CACvB,GAAG,IAAI,CAAC,CAUR;AAED;;;;;;GAMG;AACH,wBAAsB,iBAAiB,CACrC,SAAS,EAAE,MAAM,EACjB,aAAa,EAAE,MAAM,iBAOtB"}
@@ -1,4 +1,5 @@
1
1
  import { getSupastashDb } from "../db/dbInitializer";
2
+ import { logWarn } from "./logs";
2
3
  import { createDeletedStatusTable, createSyncStatusTable, } from "./schema/createSyncStatus";
3
4
  const SYNC_STATUS_TABLE = "supastash_sync_status";
4
5
  const DELETED_STATUS_TABLE = "supastash_deleted_status";
@@ -49,12 +50,21 @@ export async function getLocalSyncLog(tableName) {
49
50
  * Sets the sync log for a given table
50
51
  * @param tableName - The name of the table to set the sync log for
51
52
  * @param lastSyncedAt - The last synced at timestamp
53
+ * @param lastCreatedAt - The last created at timestamp
52
54
  * @example
53
55
  * setLocalSyncLog("users", new Date().toISOString());
54
56
  */
55
- export async function setLocalSyncLog(tableName, lastSyncedAt) {
57
+ export async function setLocalSyncLog(tableName, lastSyncedAt, lastCreatedAt) {
56
58
  const db = await getSupastashDb();
57
- await db.runAsync(`INSERT OR REPLACE INTO ${SYNC_STATUS_TABLE} (table_name, last_synced_at) VALUES (?, ?)`, [tableName, lastSyncedAt]);
59
+ if (lastSyncedAt) {
60
+ await db.runAsync(`INSERT OR REPLACE INTO ${SYNC_STATUS_TABLE} (table_name, last_synced_at) VALUES (?, ?)`, [tableName, lastSyncedAt]);
61
+ }
62
+ else {
63
+ logWarn(`No last synced at timestamp for table ${tableName}`);
64
+ }
65
+ if (lastCreatedAt) {
66
+ await db.runAsync(`INSERT OR REPLACE INTO ${LAST_CREATED_TABLE} (table_name, last_created_at) VALUES (?, ?)`, [tableName, lastCreatedAt]);
67
+ }
58
68
  }
59
69
  /**
60
70
  * Clears the delete log for a given table
@@ -3,5 +3,5 @@
3
3
  * @param tableName - The name of the table to update
4
4
  * @param id - The id of the row to update
5
5
  */
6
- export declare function updateLocalSyncedAt(tableName: string, id: string): Promise<void>;
6
+ export declare function updateLocalSyncedAt(tableName: string, ids: string[]): Promise<void>;
7
7
  //# sourceMappingURL=syncUpdate.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"syncUpdate.d.ts","sourceRoot":"","sources":["../../src/utils/syncUpdate.ts"],"names":[],"mappings":"AAGA;;;;GAIG;AACH,wBAAsB,mBAAmB,CAAC,SAAS,EAAE,MAAM,EAAE,EAAE,EAAE,MAAM,iBAYtE"}
1
+ {"version":3,"file":"syncUpdate.d.ts","sourceRoot":"","sources":["../../src/utils/syncUpdate.ts"],"names":[],"mappings":"AAGA;;;;GAIG;AACH,wBAAsB,mBAAmB,CAAC,SAAS,EAAE,MAAM,EAAE,GAAG,EAAE,MAAM,EAAE,iBAczE"}
@@ -5,14 +5,12 @@ import { logError } from "./logs";
5
5
  * @param tableName - The name of the table to update
6
6
  * @param id - The id of the row to update
7
7
  */
8
- export async function updateLocalSyncedAt(tableName, id) {
8
+ export async function updateLocalSyncedAt(tableName, ids) {
9
9
  try {
10
10
  const db = await getSupastashDb();
11
11
  const timeStamp = new Date().toISOString();
12
- await db.runAsync(`UPDATE ${tableName} SET synced_at = ? WHERE id = ?`, [
13
- timeStamp,
14
- id,
15
- ]);
12
+ const placeholders = ids.map(() => "?").join(", ");
13
+ await db.runAsync(`UPDATE ${tableName} SET synced_at = ? WHERE id IN (${placeholders})`, [timeStamp, ...ids]);
16
14
  }
17
15
  catch (error) {
18
16
  logError(error);
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "supastash",
3
- "version": "0.1.30",
3
+ "version": "0.1.32",
4
4
  "main": "dist/index.js",
5
5
  "types": "dist/index.d.ts",
6
6
  "type": "module",
@@ -18,15 +18,20 @@
18
18
  "scripts": {
19
19
  "build": "tsc",
20
20
  "dev": "tsc --watch",
21
- "test": "vitest --config vitest.config.ts",
22
- "test:watch": "vitest --watch --config vitest.config.ts"
21
+ "test:watch": "vitest --watch --config vitest.config.ts",
22
+ "test": "jest"
23
23
  },
24
24
  "devDependencies": {
25
25
  "@testing-library/react-native": "^13.2.0",
26
+ "@types/better-sqlite3": "^7.6.13",
26
27
  "@types/jest": "^29.5.14",
27
28
  "@types/node": "^22.15.21",
28
29
  "@types/react-native-sqlite-storage": ">=6.0.0 <7.0.0",
29
- "typescript": "^5.8.3",
30
+ "better-sqlite3": "^11.10.0",
31
+ "jest": "^29.7.0",
32
+ "ts-jest": "^29.4.1",
33
+ "ts-node": "^10.9.2",
34
+ "typescript": "^5.9.2",
30
35
  "vite": "^6.3.5",
31
36
  "vitest": "^3.1.4"
32
37
  },