supastash 0.1.62 → 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +17 -5
- package/dist/core/config/index.d.ts.map +1 -1
- package/dist/core/config/index.js +5 -1
- package/dist/db/adapters/expo_sqlite.d.ts.map +1 -1
- package/dist/db/adapters/expo_sqlite.js +11 -0
- package/dist/db/adapters/rn_nitro.d.ts.map +1 -1
- package/dist/db/adapters/rn_nitro.js +20 -0
- package/dist/db/adapters/rn_sqlite_storage.d.ts.map +1 -1
- package/dist/db/adapters/rn_sqlite_storage.js +27 -0
- package/dist/hooks/supastashData/fetchCalls.d.ts.map +1 -1
- package/dist/hooks/supastashData/index.d.ts.map +1 -1
- package/dist/hooks/supastashData/index.js +12 -4
- package/dist/hooks/supastashData/realtimeSubscription.js +4 -4
- package/dist/hooks/supastashData/registerSub.d.ts +3 -3
- package/dist/hooks/supastashData/registerSub.d.ts.map +1 -1
- package/dist/hooks/supastashData/registerSub.js +3 -3
- package/dist/hooks/supastashFilters/index.d.ts +2 -2
- package/dist/hooks/supastashFilters/index.d.ts.map +1 -1
- package/dist/hooks/supastashFilters/index.js +6 -4
- package/dist/hooks/supastashLiteQuery/index.d.ts.map +1 -1
- package/dist/hooks/supastashLiteQuery/index.js +12 -2
- package/dist/hooks/syncEngine/pushLocal/index.js +1 -1
- package/dist/index.d.ts +3 -1
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +1 -0
- package/dist/store/tableFilters.d.ts +3 -3
- package/dist/store/tableFilters.d.ts.map +1 -1
- package/dist/store/tx.d.ts +3 -0
- package/dist/store/tx.d.ts.map +1 -0
- package/dist/store/tx.js +1 -0
- package/dist/types/expoSqlite.types.d.ts +3 -13
- package/dist/types/index.d.ts +1 -1
- package/dist/types/index.d.ts.map +1 -1
- package/dist/types/liteQuery.types.d.ts +2 -2
- package/dist/types/query.types.d.ts +24 -1
- package/dist/types/realtimeData.types.d.ts +8 -4
- package/dist/types/supastashConfig.types.d.ts +83 -6
- package/dist/types/syncEngine.types.d.ts +49 -7
- package/dist/utils/errorHandler.d.ts +6 -0
- package/dist/utils/errorHandler.d.ts.map +1 -0
- package/dist/utils/errorHandler.js +8 -0
- package/dist/utils/fetchData/buildFilter.d.ts +8 -4
- package/dist/utils/fetchData/buildFilter.d.ts.map +1 -1
- package/dist/utils/fetchData/createTable.d.ts.map +1 -1
- package/dist/utils/fetchData/createTable.js +3 -46
- package/dist/utils/fetchData/deleteData.d.ts.map +1 -1
- package/dist/utils/fetchData/deleteData.js +6 -3
- package/dist/utils/fetchData/fetchLocalData.d.ts +2 -2
- package/dist/utils/fetchData/fetchLocalData.d.ts.map +1 -1
- package/dist/utils/fetchData/initialFetch.d.ts +2 -2
- package/dist/utils/fetchData/initialFetch.d.ts.map +1 -1
- package/dist/utils/fetchData/liteHelpers.d.ts +2 -2
- package/dist/utils/fetchData/liteHelpers.d.ts.map +1 -1
- package/dist/utils/fetchData/liteHelpers.js +14 -119
- package/dist/utils/fetchData/realTimeCall.js +2 -2
- package/dist/utils/fetchData/receiveData.js +1 -1
- package/dist/utils/query/builder/crud.d.ts +5 -5
- package/dist/utils/query/builder/crud.d.ts.map +1 -1
- package/dist/utils/query/builder/filters.d.ts +36 -12
- package/dist/utils/query/builder/filters.d.ts.map +1 -1
- package/dist/utils/query/builder/filters.js +32 -0
- package/dist/utils/query/builder/index.d.ts +29 -1
- package/dist/utils/query/builder/index.d.ts.map +1 -1
- package/dist/utils/query/builder/index.js +77 -1
- package/dist/utils/query/builder/mainQuery.d.ts.map +1 -1
- package/dist/utils/query/builder/mainQuery.js +17 -2
- package/dist/utils/query/helpers/localDb/getLocalMethod.d.ts +2 -2
- package/dist/utils/query/helpers/localDb/getLocalMethod.d.ts.map +1 -1
- package/dist/utils/query/helpers/localDb/getLocalMethod.js +7 -6
- package/dist/utils/query/helpers/localDb/insertMany.d.ts +3 -0
- package/dist/utils/query/helpers/localDb/insertMany.d.ts.map +1 -1
- package/dist/utils/query/helpers/localDb/insertMany.js +10 -3
- package/dist/utils/query/helpers/localDb/localQueryBuilder.d.ts +6 -6
- package/dist/utils/query/helpers/localDb/localQueryBuilder.d.ts.map +1 -1
- package/dist/utils/query/helpers/localDb/localQueryBuilder.js +17 -10
- package/dist/utils/query/helpers/localDb/upsertMany.d.ts +3 -0
- package/dist/utils/query/helpers/localDb/upsertMany.d.ts.map +1 -1
- package/dist/utils/query/helpers/localDb/upsertMany.js +9 -2
- package/dist/utils/query/helpers/mainQueryHelpers.d.ts.map +1 -1
- package/dist/utils/query/helpers/mainQueryHelpers.js +38 -8
- package/dist/utils/query/helpers/queueRemote.d.ts.map +1 -1
- package/dist/utils/query/helpers/queueRemote.js +33 -24
- package/dist/utils/query/localDbQuery/delete.d.ts +9 -3
- package/dist/utils/query/localDbQuery/delete.d.ts.map +1 -1
- package/dist/utils/query/localDbQuery/delete.js +16 -5
- package/dist/utils/query/localDbQuery/index.d.ts.map +1 -1
- package/dist/utils/query/localDbQuery/index.js +3 -3
- package/dist/utils/query/localDbQuery/insert.d.ts +2 -2
- package/dist/utils/query/localDbQuery/insert.d.ts.map +1 -1
- package/dist/utils/query/localDbQuery/insert.js +8 -5
- package/dist/utils/query/localDbQuery/select.d.ts +2 -2
- package/dist/utils/query/localDbQuery/select.d.ts.map +1 -1
- package/dist/utils/query/localDbQuery/select.js +5 -2
- package/dist/utils/query/localDbQuery/update.d.ts +2 -2
- package/dist/utils/query/localDbQuery/update.d.ts.map +1 -1
- package/dist/utils/query/localDbQuery/update.js +5 -2
- package/dist/utils/query/localDbQuery/upsert.d.ts +2 -2
- package/dist/utils/query/localDbQuery/upsert.d.ts.map +1 -1
- package/dist/utils/query/localDbQuery/upsert.js +8 -3
- package/dist/utils/query/remoteQuery/supabaseQuery.d.ts.map +1 -1
- package/dist/utils/query/remoteQuery/supabaseQuery.js +4 -1
- package/dist/utils/reusedHelpers.d.ts +8 -0
- package/dist/utils/reusedHelpers.d.ts.map +1 -0
- package/dist/utils/reusedHelpers.js +162 -0
- package/dist/utils/schema/createSyncStatus.d.ts +3 -1
- package/dist/utils/schema/createSyncStatus.d.ts.map +1 -1
- package/dist/utils/schema/createSyncStatus.js +30 -3
- package/dist/utils/sync/pullFromRemote/fetchOlder.d.ts +44 -0
- package/dist/utils/sync/pullFromRemote/fetchOlder.d.ts.map +1 -0
- package/dist/utils/sync/pullFromRemote/fetchOlder.js +55 -0
- package/dist/utils/sync/pullFromRemote/fetchOlderHelpers.d.ts +33 -0
- package/dist/utils/sync/pullFromRemote/fetchOlderHelpers.d.ts.map +1 -0
- package/dist/utils/sync/pullFromRemote/fetchOlderHelpers.js +110 -0
- package/dist/utils/sync/pullFromRemote/helpers.d.ts +10 -7
- package/dist/utils/sync/pullFromRemote/helpers.d.ts.map +1 -1
- package/dist/utils/sync/pullFromRemote/helpers.js +20 -14
- package/dist/utils/sync/pullFromRemote/pullData.d.ts +2 -3
- package/dist/utils/sync/pullFromRemote/pullData.d.ts.map +1 -1
- package/dist/utils/sync/pullFromRemote/pullData.js +4 -9
- package/dist/utils/sync/pullFromRemote/pullDeletedData.d.ts +8 -5
- package/dist/utils/sync/pullFromRemote/updateFilter.d.ts +1 -1
- package/dist/utils/sync/pullFromRemote/updateFilter.d.ts.map +1 -1
- package/dist/utils/sync/pullFromRemote/updateFilter.js +5 -3
- package/dist/utils/sync/pullFromRemote/updateLocalDb.d.ts +4 -3
- package/dist/utils/sync/pullFromRemote/updateLocalDb.d.ts.map +1 -1
- package/dist/utils/sync/pullFromRemote/updateLocalDb.js +51 -46
- package/dist/utils/sync/pullFromRemote/validateFilters.d.ts +2 -4
- package/dist/utils/sync/pullFromRemote/validateFilters.d.ts.map +1 -1
- package/dist/utils/sync/pullFromRemote/validateFilters.js +9 -63
- package/dist/utils/sync/pushLocal/deleteChunks.d.ts.map +1 -1
- package/dist/utils/sync/pushLocal/deleteChunks.js +7 -5
- package/dist/utils/sync/pushLocal/getAllUnsyncedData.d.ts.map +1 -1
- package/dist/utils/sync/pushLocal/getAllUnsyncedData.js +12 -44
- package/dist/utils/sync/pushLocal/uploadHelpers.js +1 -1
- package/dist/utils/sync/status/filterKey.d.ts +3 -3
- package/dist/utils/sync/status/filterKey.d.ts.map +1 -1
- package/dist/utils/sync/status/filterKey.js +5 -2
- package/dist/utils/sync/status/remoteSchema.d.ts +4 -0
- package/dist/utils/sync/status/remoteSchema.d.ts.map +1 -0
- package/dist/utils/sync/status/remoteSchema.js +140 -0
- package/dist/utils/sync/status/repo.d.ts +5 -5
- package/dist/utils/sync/status/repo.d.ts.map +1 -1
- package/dist/utils/sync/status/repo.js +29 -23
- package/dist/utils/sync/status/services.d.ts +5 -6
- package/dist/utils/sync/status/services.d.ts.map +1 -1
- package/dist/utils/sync/status/services.js +1 -6
- package/dist/utils/sync/status/syncStatus.d.ts +5 -7
- package/dist/utils/sync/status/syncStatus.d.ts.map +1 -1
- package/dist/utils/sync/status/syncStatus.js +11 -3
- package/package.json +2 -2
- package/dist/types/supastashFilters.types.d.ts +0 -3
|
@@ -0,0 +1,162 @@
|
|
|
1
|
+
const validOperators = new Set([
|
|
2
|
+
"eq",
|
|
3
|
+
"neq",
|
|
4
|
+
"gt",
|
|
5
|
+
"lt",
|
|
6
|
+
"gte",
|
|
7
|
+
"lte",
|
|
8
|
+
"is",
|
|
9
|
+
"in",
|
|
10
|
+
]);
|
|
11
|
+
export const ReusedHelpers = {
|
|
12
|
+
isValidFilter(filters) {
|
|
13
|
+
if (!Array.isArray(filters))
|
|
14
|
+
return false;
|
|
15
|
+
const validateNode = (node) => {
|
|
16
|
+
if (!node || typeof node !== "object")
|
|
17
|
+
return false;
|
|
18
|
+
// OR group
|
|
19
|
+
if ("or" in node) {
|
|
20
|
+
if (!Array.isArray(node.or) || node.or.length === 0) {
|
|
21
|
+
return false;
|
|
22
|
+
}
|
|
23
|
+
return node.or.every(validateNode);
|
|
24
|
+
}
|
|
25
|
+
// Condition
|
|
26
|
+
const { column, operator, value } = node;
|
|
27
|
+
if (typeof column !== "string" || column.trim() === "") {
|
|
28
|
+
return false;
|
|
29
|
+
}
|
|
30
|
+
if (!validOperators.has(operator)) {
|
|
31
|
+
return false;
|
|
32
|
+
}
|
|
33
|
+
switch (operator) {
|
|
34
|
+
case "is":
|
|
35
|
+
return (value === null ||
|
|
36
|
+
value === true ||
|
|
37
|
+
value === false ||
|
|
38
|
+
value === "null" ||
|
|
39
|
+
value === "true" ||
|
|
40
|
+
value === "false");
|
|
41
|
+
case "in":
|
|
42
|
+
return Array.isArray(value) && value.length > 0;
|
|
43
|
+
default:
|
|
44
|
+
return value !== undefined && value !== null;
|
|
45
|
+
}
|
|
46
|
+
};
|
|
47
|
+
return filters.every(validateNode);
|
|
48
|
+
},
|
|
49
|
+
applyFilters(q, filters, table) {
|
|
50
|
+
for (const f of filters) {
|
|
51
|
+
// OR group
|
|
52
|
+
if ("or" in f) {
|
|
53
|
+
if (!Array.isArray(f.or) || f.or.length === 0) {
|
|
54
|
+
throw new Error(`Invalid OR filter for ${table}`);
|
|
55
|
+
}
|
|
56
|
+
const orString = f.or
|
|
57
|
+
.map((sub) => {
|
|
58
|
+
if (!ReusedHelpers.isValidFilter([sub])) {
|
|
59
|
+
throw new Error(`Invalid OR syncFilter: ${JSON.stringify(sub)} for ${table}`);
|
|
60
|
+
}
|
|
61
|
+
return `${String(sub.column)}.${sub.operator}.${sub.value}`;
|
|
62
|
+
})
|
|
63
|
+
.join(",");
|
|
64
|
+
q = q.or(orString);
|
|
65
|
+
continue;
|
|
66
|
+
}
|
|
67
|
+
// Normal AND filter
|
|
68
|
+
if (!ReusedHelpers.isValidFilter([f])) {
|
|
69
|
+
throw new Error(`Invalid syncFilter: ${JSON.stringify(f)} for ${table}`);
|
|
70
|
+
}
|
|
71
|
+
q = q[f.operator](f.column, f.value);
|
|
72
|
+
}
|
|
73
|
+
return q;
|
|
74
|
+
},
|
|
75
|
+
buildFilterString(filter) {
|
|
76
|
+
if (!filter)
|
|
77
|
+
return undefined;
|
|
78
|
+
// OR group
|
|
79
|
+
if ("or" in filter) {
|
|
80
|
+
if (!Array.isArray(filter.or) || filter.or.length === 0) {
|
|
81
|
+
return undefined;
|
|
82
|
+
}
|
|
83
|
+
const inner = filter.or
|
|
84
|
+
.map((f) => {
|
|
85
|
+
if ("and" in f || "or" in f) {
|
|
86
|
+
return undefined; // nested groups not supported here
|
|
87
|
+
}
|
|
88
|
+
if (f.value === null) {
|
|
89
|
+
return `${String(f.column)}.${f.operator}.null`;
|
|
90
|
+
}
|
|
91
|
+
if (f.operator === "in" && Array.isArray(f.value)) {
|
|
92
|
+
return `${String(f.column)}.in.(${f.value.join(",")})`;
|
|
93
|
+
}
|
|
94
|
+
return `${String(f.column)}.${f.operator}.${f.value}`;
|
|
95
|
+
})
|
|
96
|
+
.filter(Boolean)
|
|
97
|
+
.join(",");
|
|
98
|
+
return inner ? `or=(${inner})` : undefined;
|
|
99
|
+
}
|
|
100
|
+
// Condition
|
|
101
|
+
const { column, operator, value } = filter;
|
|
102
|
+
if (value === null) {
|
|
103
|
+
return `${String(column)}=${operator}.null`;
|
|
104
|
+
}
|
|
105
|
+
if (operator === "in" && Array.isArray(value)) {
|
|
106
|
+
return `${String(column)}=in.(${value.join(",")})`;
|
|
107
|
+
}
|
|
108
|
+
return `${String(column)}=${operator}.${value}`;
|
|
109
|
+
},
|
|
110
|
+
buildFilterForSql(filter) {
|
|
111
|
+
if (!filter)
|
|
112
|
+
return undefined;
|
|
113
|
+
// OR group
|
|
114
|
+
if ("or" in filter) {
|
|
115
|
+
const parts = filter.or
|
|
116
|
+
.map(ReusedHelpers.buildFilterForSql)
|
|
117
|
+
.filter(Boolean);
|
|
118
|
+
if (parts.length === 0)
|
|
119
|
+
return undefined;
|
|
120
|
+
return `(${parts.join(" OR ")})`;
|
|
121
|
+
}
|
|
122
|
+
const { column, operator, value } = filter;
|
|
123
|
+
switch (operator) {
|
|
124
|
+
case "eq":
|
|
125
|
+
return value === null
|
|
126
|
+
? `${String(column)} IS NULL`
|
|
127
|
+
: `${String(column)} = ${sqlValue(value)}`;
|
|
128
|
+
case "neq":
|
|
129
|
+
return value === null
|
|
130
|
+
? `${String(column)} IS NOT NULL`
|
|
131
|
+
: `${String(column)} != ${sqlValue(value)}`;
|
|
132
|
+
case "gt":
|
|
133
|
+
return `${String(column)} > ${sqlValue(value)}`;
|
|
134
|
+
case "lt":
|
|
135
|
+
return `${String(column)} < ${sqlValue(value)}`;
|
|
136
|
+
case "gte":
|
|
137
|
+
return `${String(column)} >= ${sqlValue(value)}`;
|
|
138
|
+
case "lte":
|
|
139
|
+
return `${String(column)} <= ${sqlValue(value)}`;
|
|
140
|
+
case "in":
|
|
141
|
+
if (!Array.isArray(value))
|
|
142
|
+
return undefined;
|
|
143
|
+
return `${String(column)} IN (${value.map(sqlValue).join(", ")})`;
|
|
144
|
+
case "is":
|
|
145
|
+
return `${String(column)} IS ${sqlValue(value)}`;
|
|
146
|
+
default:
|
|
147
|
+
throw new Error(`Unsupported operator: ${operator}`);
|
|
148
|
+
}
|
|
149
|
+
},
|
|
150
|
+
};
|
|
151
|
+
function sqlValue(val) {
|
|
152
|
+
if (Array.isArray(val)) {
|
|
153
|
+
return val.map(sqlValue).join(", ");
|
|
154
|
+
}
|
|
155
|
+
if (typeof val === "boolean")
|
|
156
|
+
return val ? "1" : "0";
|
|
157
|
+
if (val === null)
|
|
158
|
+
return "NULL";
|
|
159
|
+
if (typeof val === "number")
|
|
160
|
+
return val.toString();
|
|
161
|
+
return `'${val.replace(/'/g, "''")}'`; // Escape single quotes
|
|
162
|
+
}
|
|
@@ -5,9 +5,11 @@
|
|
|
5
5
|
* @deprecated Use createSyncStatusTable instead
|
|
6
6
|
*/
|
|
7
7
|
export declare function createDeletedStatusTable(): Promise<void>;
|
|
8
|
-
export declare const SYNC_STATUS_TABLES_SQL = "\n CREATE TABLE IF NOT EXISTS supastash_sync_marks (\n table_name TEXT NOT NULL,\n filter_key TEXT NOT NULL, \n filter_json TEXT NULL, \n
|
|
8
|
+
export declare const SYNC_STATUS_TABLES_SQL = "\n CREATE TABLE IF NOT EXISTS supastash_sync_marks (\n table_name TEXT NOT NULL,\n filter_key TEXT NOT NULL, \n filter_json TEXT NULL, \n last_synced_at TEXT NULL, \n last_synced_at_pk TEXT NULL, \n last_deleted_at TEXT NULL, \n updated_at TEXT NOT NULL DEFAULT (datetime('now')),\n PRIMARY KEY (table_name, filter_key)\n);";
|
|
9
|
+
export declare const SERVER_SYNC_STATUS_TABLES_SQL = "\n CREATE TABLE IF NOT EXISTS supastash_server_sync_marks (\n table_name TEXT NOT NULL,\n filter_key TEXT NOT NULL,\n filter_json TEXT NULL,\n\n last_deleted_at TEXT NULL,\n\n last_synced_at TEXT NULL, -- represents arrived_at\n last_synced_at_pk TEXT NULL, -- composite cursor safety\n\n updated_at TEXT NOT NULL DEFAULT (datetime('now')),\n\n PRIMARY KEY (table_name, filter_key)\n );\n";
|
|
9
10
|
export declare const ADD_PK_TO_SYNC_MARKS_SQL = "\n ALTER TABLE supastash_sync_marks ADD COLUMN last_synced_at_pk TEXT NULL;\n";
|
|
10
11
|
export declare const INDEX_SYNC_MARKS_SQL = "\n CREATE INDEX IF NOT EXISTS idx_supastash_marks_updated\n ON supastash_sync_marks(updated_at);\n";
|
|
12
|
+
export declare const INDEX_SERVER_SYNC_MARKS_SQL = "\n CREATE INDEX IF NOT EXISTS idx_supastash_server_marks_updated\n ON supastash_server_sync_marks(updated_at);\n";
|
|
11
13
|
/**
|
|
12
14
|
* Creates the supastash_sync_marks table if it doesn't exist
|
|
13
15
|
*
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"createSyncStatus.d.ts","sourceRoot":"","sources":["../../../src/utils/schema/createSyncStatus.ts"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"createSyncStatus.d.ts","sourceRoot":"","sources":["../../../src/utils/schema/createSyncStatus.ts"],"names":[],"mappings":"AAGA;;;;;GAKG;AACH,wBAAsB,wBAAwB,kBAE7C;AAED,eAAO,MAAM,sBAAsB,0ZAUhC,CAAC;AAEJ,eAAO,MAAM,6BAA6B,6eAezC,CAAC;AAEF,eAAO,MAAM,wBAAwB,mFAEpC,CAAC;AAEF,eAAO,MAAM,oBAAoB,2GAGhC,CAAC;AAEF,eAAO,MAAM,2BAA2B,yHAGvC,CAAC;AAIF;;;;GAIG;AACH,wBAAsB,qBAAqB,kBAkB1C"}
|
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import { getSupastashConfig } from "../../core/config";
|
|
1
2
|
import { getSupastashDb } from "../../db/dbInitializer";
|
|
2
3
|
/**
|
|
3
4
|
* Creates the supastash_deleted_status table if it doesn't exist
|
|
@@ -13,13 +14,28 @@ export const SYNC_STATUS_TABLES_SQL = `
|
|
|
13
14
|
table_name TEXT NOT NULL,
|
|
14
15
|
filter_key TEXT NOT NULL,
|
|
15
16
|
filter_json TEXT NULL,
|
|
16
|
-
last_created_at TEXT NULL,
|
|
17
17
|
last_synced_at TEXT NULL,
|
|
18
18
|
last_synced_at_pk TEXT NULL,
|
|
19
19
|
last_deleted_at TEXT NULL,
|
|
20
20
|
updated_at TEXT NOT NULL DEFAULT (datetime('now')),
|
|
21
21
|
PRIMARY KEY (table_name, filter_key)
|
|
22
22
|
);`;
|
|
23
|
+
export const SERVER_SYNC_STATUS_TABLES_SQL = `
|
|
24
|
+
CREATE TABLE IF NOT EXISTS supastash_server_sync_marks (
|
|
25
|
+
table_name TEXT NOT NULL,
|
|
26
|
+
filter_key TEXT NOT NULL,
|
|
27
|
+
filter_json TEXT NULL,
|
|
28
|
+
|
|
29
|
+
last_deleted_at TEXT NULL,
|
|
30
|
+
|
|
31
|
+
last_synced_at TEXT NULL, -- represents arrived_at
|
|
32
|
+
last_synced_at_pk TEXT NULL, -- composite cursor safety
|
|
33
|
+
|
|
34
|
+
updated_at TEXT NOT NULL DEFAULT (datetime('now')),
|
|
35
|
+
|
|
36
|
+
PRIMARY KEY (table_name, filter_key)
|
|
37
|
+
);
|
|
38
|
+
`;
|
|
23
39
|
export const ADD_PK_TO_SYNC_MARKS_SQL = `
|
|
24
40
|
ALTER TABLE supastash_sync_marks ADD COLUMN last_synced_at_pk TEXT NULL;
|
|
25
41
|
`;
|
|
@@ -27,6 +43,10 @@ export const INDEX_SYNC_MARKS_SQL = `
|
|
|
27
43
|
CREATE INDEX IF NOT EXISTS idx_supastash_marks_updated
|
|
28
44
|
ON supastash_sync_marks(updated_at);
|
|
29
45
|
`;
|
|
46
|
+
export const INDEX_SERVER_SYNC_MARKS_SQL = `
|
|
47
|
+
CREATE INDEX IF NOT EXISTS idx_supastash_server_marks_updated
|
|
48
|
+
ON supastash_server_sync_marks(updated_at);
|
|
49
|
+
`;
|
|
30
50
|
let addedPk = false;
|
|
31
51
|
/**
|
|
32
52
|
* Creates the supastash_sync_marks table if it doesn't exist
|
|
@@ -35,7 +55,15 @@ let addedPk = false;
|
|
|
35
55
|
*/
|
|
36
56
|
export async function createSyncStatusTable() {
|
|
37
57
|
const db = await getSupastashDb();
|
|
38
|
-
|
|
58
|
+
const cfg = getSupastashConfig();
|
|
59
|
+
if (cfg.replicationMode === "server-side") {
|
|
60
|
+
await db.execAsync(SERVER_SYNC_STATUS_TABLES_SQL);
|
|
61
|
+
await db.execAsync(INDEX_SERVER_SYNC_MARKS_SQL);
|
|
62
|
+
}
|
|
63
|
+
else {
|
|
64
|
+
await db.execAsync(SYNC_STATUS_TABLES_SQL);
|
|
65
|
+
await db.execAsync(INDEX_SYNC_MARKS_SQL);
|
|
66
|
+
}
|
|
39
67
|
try {
|
|
40
68
|
if (addedPk)
|
|
41
69
|
return;
|
|
@@ -45,5 +73,4 @@ export async function createSyncStatusTable() {
|
|
|
45
73
|
catch (error) {
|
|
46
74
|
// Ignore error if column already exists
|
|
47
75
|
}
|
|
48
|
-
await db.execAsync(INDEX_SYNC_MARKS_SQL);
|
|
49
76
|
}
|
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
import { PayloadData } from "../../../types/query.types";
|
|
2
|
+
import { SupastashFilter } from "../../../types/realtimeData.types";
|
|
3
|
+
/**
|
|
4
|
+
* Fetches a backward page of records from the server relative to the current
|
|
5
|
+
* earliest locally stored row.
|
|
6
|
+
*
|
|
7
|
+
* This function performs cursor-based backward pagination by:
|
|
8
|
+
* - Determining the earliest known local record for the table
|
|
9
|
+
* - Validating the provided boundary timestamp
|
|
10
|
+
* - Fetching older records from the server up to the specified limit
|
|
11
|
+
* - Optionally persisting the results into the local database
|
|
12
|
+
*
|
|
13
|
+
* Designed for infinite scroll and historical lookback scenarios.
|
|
14
|
+
*
|
|
15
|
+
* @returns An object containing:
|
|
16
|
+
* - `data`: The fetched records
|
|
17
|
+
* - `hasMore`: Whether additional older records may exist
|
|
18
|
+
*/
|
|
19
|
+
export declare function fetchOlderPage({ boundaryTs, table, filters, limit, shouldStoreToLocalDb, }: {
|
|
20
|
+
/**
|
|
21
|
+
* The timestamp to cap lookback at.
|
|
22
|
+
*/
|
|
23
|
+
boundaryTs: string;
|
|
24
|
+
/**
|
|
25
|
+
* The table to fetch data from.
|
|
26
|
+
*/
|
|
27
|
+
table: string;
|
|
28
|
+
/**
|
|
29
|
+
* The filters to apply to the data.
|
|
30
|
+
*/
|
|
31
|
+
filters?: SupastashFilter[];
|
|
32
|
+
/**
|
|
33
|
+
* The maximum number of records to fetch.
|
|
34
|
+
*/
|
|
35
|
+
limit: number;
|
|
36
|
+
/**
|
|
37
|
+
* The data to store to the database.
|
|
38
|
+
*/
|
|
39
|
+
shouldStoreToLocalDb?: boolean;
|
|
40
|
+
}): Promise<{
|
|
41
|
+
hasMore: boolean;
|
|
42
|
+
data: PayloadData[];
|
|
43
|
+
}>;
|
|
44
|
+
//# sourceMappingURL=fetchOlder.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"fetchOlder.d.ts","sourceRoot":"","sources":["../../../../src/utils/sync/pullFromRemote/fetchOlder.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,WAAW,EAAE,MAAM,4BAA4B,CAAC;AACzD,OAAO,EAAE,eAAe,EAAE,MAAM,mCAAmC,CAAC;AAIpE;;;;;;;;;;;;;;;GAeG;AACH,wBAAsB,cAAc,CAAC,EACnC,UAAU,EACV,KAAK,EACL,OAAO,EACP,KAAK,EACL,oBAA2B,GAC5B,EAAE;IACD;;OAEG;IACH,UAAU,EAAE,MAAM,CAAC;IACnB;;OAEG;IACH,KAAK,EAAE,MAAM,CAAC;IACd;;OAEG;IACH,OAAO,CAAC,EAAE,eAAe,EAAE,CAAC;IAE5B;;OAEG;IACH,KAAK,EAAE,MAAM,CAAC;IAEd;;OAEG;IACH,oBAAoB,CAAC,EAAE,OAAO,CAAC;CAChC,GAAG,OAAO,CAAC;IAAE,OAAO,EAAE,OAAO,CAAC;IAAC,IAAI,EAAE,WAAW,EAAE,CAAA;CAAE,CAAC,CAwCrD"}
|
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
import { getSupastashConfig } from "../../../core/config";
|
|
2
|
+
import { logWarn } from "../../logs";
|
|
3
|
+
import { FetchOlderHelpers } from "./fetchOlderHelpers";
|
|
4
|
+
/**
|
|
5
|
+
* Fetches a backward page of records from the server relative to the current
|
|
6
|
+
* earliest locally stored row.
|
|
7
|
+
*
|
|
8
|
+
* This function performs cursor-based backward pagination by:
|
|
9
|
+
* - Determining the earliest known local record for the table
|
|
10
|
+
* - Validating the provided boundary timestamp
|
|
11
|
+
* - Fetching older records from the server up to the specified limit
|
|
12
|
+
* - Optionally persisting the results into the local database
|
|
13
|
+
*
|
|
14
|
+
* Designed for infinite scroll and historical lookback scenarios.
|
|
15
|
+
*
|
|
16
|
+
* @returns An object containing:
|
|
17
|
+
* - `data`: The fetched records
|
|
18
|
+
* - `hasMore`: Whether additional older records may exist
|
|
19
|
+
*/
|
|
20
|
+
export async function fetchOlderPage({ boundaryTs, table, filters, limit, shouldStoreToLocalDb = true, }) {
|
|
21
|
+
try {
|
|
22
|
+
const isGhost = getSupastashConfig().supastashMode === "ghost";
|
|
23
|
+
if (isGhost) {
|
|
24
|
+
return { hasMore: false, data: [] };
|
|
25
|
+
}
|
|
26
|
+
const lookbackDays = await FetchOlderHelpers.getLookbackDays({
|
|
27
|
+
table,
|
|
28
|
+
filters,
|
|
29
|
+
});
|
|
30
|
+
if (boundaryTs) {
|
|
31
|
+
FetchOlderHelpers.validateBoundaryTs({
|
|
32
|
+
boundaryTs,
|
|
33
|
+
earliestDate: lookbackDays.createdAt,
|
|
34
|
+
});
|
|
35
|
+
}
|
|
36
|
+
const data = await FetchOlderHelpers.fetchData({
|
|
37
|
+
table,
|
|
38
|
+
filters,
|
|
39
|
+
limit,
|
|
40
|
+
boundaryTs,
|
|
41
|
+
earliestDate: lookbackDays.createdAt,
|
|
42
|
+
earliestId: lookbackDays.id,
|
|
43
|
+
});
|
|
44
|
+
if (shouldStoreToLocalDb) {
|
|
45
|
+
await FetchOlderHelpers.storeToDb({ table, data });
|
|
46
|
+
}
|
|
47
|
+
if (data.length > limit) {
|
|
48
|
+
logWarn(`[Supastash] Pagination overflow detected for table ${table}`);
|
|
49
|
+
}
|
|
50
|
+
return { hasMore: data.length === limit, data };
|
|
51
|
+
}
|
|
52
|
+
catch (error) {
|
|
53
|
+
return FetchOlderHelpers.handleError(error);
|
|
54
|
+
}
|
|
55
|
+
}
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
import { PayloadData } from "../../../types/query.types";
|
|
2
|
+
import { SupastashFilter } from "../../../types/realtimeData.types";
|
|
3
|
+
import { SupastashError } from "../../errorHandler";
|
|
4
|
+
export declare const FetchOlderHelpers: {
|
|
5
|
+
validateBoundaryTs({ boundaryTs, earliestDate, }: {
|
|
6
|
+
boundaryTs: string;
|
|
7
|
+
earliestDate: string;
|
|
8
|
+
}): void;
|
|
9
|
+
getLookbackDays({ table, filters, }: {
|
|
10
|
+
table: string;
|
|
11
|
+
filters?: SupastashFilter[];
|
|
12
|
+
}): Promise<{
|
|
13
|
+
createdAt: any;
|
|
14
|
+
id: any;
|
|
15
|
+
}>;
|
|
16
|
+
fetchData({ table, filters, limit, earliestDate, boundaryTs, earliestId, }: {
|
|
17
|
+
table: string;
|
|
18
|
+
filters?: SupastashFilter[];
|
|
19
|
+
limit: number;
|
|
20
|
+
boundaryTs?: string;
|
|
21
|
+
earliestDate: string;
|
|
22
|
+
earliestId: string;
|
|
23
|
+
}): Promise<any[]>;
|
|
24
|
+
handleError(error: SupastashError): {
|
|
25
|
+
hasMore: boolean;
|
|
26
|
+
data: PayloadData[];
|
|
27
|
+
};
|
|
28
|
+
storeToDb({ table, data }: {
|
|
29
|
+
table: string;
|
|
30
|
+
data: PayloadData[];
|
|
31
|
+
}): Promise<void>;
|
|
32
|
+
};
|
|
33
|
+
//# sourceMappingURL=fetchOlderHelpers.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"fetchOlderHelpers.d.ts","sourceRoot":"","sources":["../../../../src/utils/sync/pullFromRemote/fetchOlderHelpers.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,WAAW,EAAE,MAAM,4BAA4B,CAAC;AACzD,OAAO,EAAE,eAAe,EAAE,MAAM,mCAAmC,CAAC;AAEpE,OAAO,EAAE,cAAc,EAAE,MAAM,oBAAoB,CAAC;AAQpD,eAAO,MAAM,iBAAiB;sDAIzB;QACD,UAAU,EAAE,MAAM,CAAC;QACnB,YAAY,EAAE,MAAM,CAAC;KACtB;yCAmBE;QACD,KAAK,EAAE,MAAM,CAAC;QACd,OAAO,CAAC,EAAE,eAAe,EAAE,CAAC;KAC7B;;;;gFA8BE;QACD,KAAK,EAAE,MAAM,CAAC;QACd,OAAO,CAAC,EAAE,eAAe,EAAE,CAAC;QAC5B,KAAK,EAAE,MAAM,CAAC;QACd,UAAU,CAAC,EAAE,MAAM,CAAC;QACpB,YAAY,EAAE,MAAM,CAAC;QACrB,UAAU,EAAE,MAAM,CAAC;KACpB;uBAqDkB,cAAc,GAAG;QAClC,OAAO,EAAE,OAAO,CAAC;QACjB,IAAI,EAAE,WAAW,EAAE,CAAC;KACrB;+BAagC;QAAE,KAAK,EAAE,MAAM,CAAC;QAAC,IAAI,EAAE,WAAW,EAAE,CAAA;KAAE;CAcxE,CAAC"}
|
|
@@ -0,0 +1,110 @@
|
|
|
1
|
+
import { getSupastashConfig } from "../../../core/config";
|
|
2
|
+
import { getSupastashDb } from "../../../db/dbInitializer";
|
|
3
|
+
import { ReusedHelpers } from "../../../utils/reusedHelpers";
|
|
4
|
+
import { SupastashError } from "../../errorHandler";
|
|
5
|
+
import { logWarn } from "../../logs";
|
|
6
|
+
import { supabaseClientErr } from "../../supabaseClientErr";
|
|
7
|
+
import { upsertData } from "./updateLocalDb";
|
|
8
|
+
const DEFAULT_DATE = "2000-01-01T00:00:00Z";
|
|
9
|
+
const DEFAULT_ID = "00000000-0000-0000-0000-000000000000";
|
|
10
|
+
export const FetchOlderHelpers = {
|
|
11
|
+
validateBoundaryTs({ boundaryTs, earliestDate, }) {
|
|
12
|
+
const boundaryTsDate = new Date(boundaryTs);
|
|
13
|
+
if (isNaN(boundaryTsDate.getTime())) {
|
|
14
|
+
logWarn(`Invalid boundary timestamp: ${boundaryTs}`);
|
|
15
|
+
throw new SupastashError(`Invalid boundary timestamp: ${boundaryTs}`, "INVALID_BOUNDARY_TS");
|
|
16
|
+
}
|
|
17
|
+
if (boundaryTsDate > new Date(earliestDate)) {
|
|
18
|
+
throw new SupastashError(`Boundary timestamp is more recent than oldest data: ${boundaryTs} > ${earliestDate}`, "BOUNDARY_TS_MORE_RECENT_THAN_OLDEST_DATA");
|
|
19
|
+
}
|
|
20
|
+
},
|
|
21
|
+
async getLookbackDays({ table, filters, }) {
|
|
22
|
+
// If no cursor, build the SQL filter
|
|
23
|
+
let sqlFilter = "";
|
|
24
|
+
if (filters) {
|
|
25
|
+
for (const filter of filters) {
|
|
26
|
+
sqlFilter += ReusedHelpers.buildFilterForSql(filter);
|
|
27
|
+
}
|
|
28
|
+
}
|
|
29
|
+
// Fetch the earliest date and id
|
|
30
|
+
const db = await getSupastashDb();
|
|
31
|
+
const result = await db.getFirstAsync(`
|
|
32
|
+
SELECT id, created_at FROM ${table}
|
|
33
|
+
${sqlFilter}
|
|
34
|
+
ORDER BY created_at ASC, id ASC
|
|
35
|
+
LIMIT 1
|
|
36
|
+
`);
|
|
37
|
+
if (!result)
|
|
38
|
+
return { createdAt: DEFAULT_DATE, id: DEFAULT_ID };
|
|
39
|
+
return { createdAt: result.created_at, id: result.id };
|
|
40
|
+
},
|
|
41
|
+
async fetchData({ table, filters, limit, earliestDate, boundaryTs, earliestId, }) {
|
|
42
|
+
const supabase = getSupastashConfig().supabaseClient;
|
|
43
|
+
if (!supabase)
|
|
44
|
+
throw new Error(`No supabase client found: ${supabaseClientErr}`);
|
|
45
|
+
let cursorId = earliestId;
|
|
46
|
+
let cursorDate = earliestDate;
|
|
47
|
+
const results = [];
|
|
48
|
+
let remainingLimit = limit;
|
|
49
|
+
while (true) {
|
|
50
|
+
const pageSize = Math.min(1000, remainingLimit);
|
|
51
|
+
// Build the query
|
|
52
|
+
let q = supabase
|
|
53
|
+
.from(table)
|
|
54
|
+
.select("*")
|
|
55
|
+
.limit(pageSize)
|
|
56
|
+
.order("created_at", { ascending: false })
|
|
57
|
+
.order("id", { ascending: false })
|
|
58
|
+
.is("deleted_at", null);
|
|
59
|
+
q = q
|
|
60
|
+
.or(`created_at.lt.${cursorDate},and(created_at.eq.${cursorDate},id.lt.${cursorId})`)
|
|
61
|
+
.gte("created_at", boundaryTs ?? DEFAULT_DATE);
|
|
62
|
+
if (filters) {
|
|
63
|
+
q = ReusedHelpers.applyFilters(q, filters, table);
|
|
64
|
+
}
|
|
65
|
+
const { data, error } = await q;
|
|
66
|
+
if (error)
|
|
67
|
+
throw error;
|
|
68
|
+
if (!data || data.length === 0)
|
|
69
|
+
break;
|
|
70
|
+
results.push(...data);
|
|
71
|
+
const last = data[data.length - 1];
|
|
72
|
+
cursorId = last.id;
|
|
73
|
+
cursorDate = last.created_at;
|
|
74
|
+
if (data.length < pageSize) {
|
|
75
|
+
break;
|
|
76
|
+
}
|
|
77
|
+
if (remainingLimit <= pageSize) {
|
|
78
|
+
break;
|
|
79
|
+
}
|
|
80
|
+
remainingLimit -= pageSize;
|
|
81
|
+
}
|
|
82
|
+
return results;
|
|
83
|
+
},
|
|
84
|
+
handleError(error) {
|
|
85
|
+
switch (error.code) {
|
|
86
|
+
case "INVALID_BOUNDARY_TS":
|
|
87
|
+
logWarn(error.message);
|
|
88
|
+
return { hasMore: false, data: [] };
|
|
89
|
+
case "BOUNDARY_TS_MORE_RECENT_THAN_OLDEST_DATA":
|
|
90
|
+
logWarn(error.message);
|
|
91
|
+
return { hasMore: false, data: [] };
|
|
92
|
+
default:
|
|
93
|
+
throw error;
|
|
94
|
+
}
|
|
95
|
+
},
|
|
96
|
+
async storeToDb({ table, data }) {
|
|
97
|
+
if (data.length === 0)
|
|
98
|
+
return;
|
|
99
|
+
const batchSize = 500;
|
|
100
|
+
const db = await getSupastashDb();
|
|
101
|
+
await db.withTransaction(async (tx) => {
|
|
102
|
+
for (let i = 0; i < data.length; i++) {
|
|
103
|
+
await upsertData({ tx, table, record: data[i] });
|
|
104
|
+
if ((i + 1) % batchSize === 0) {
|
|
105
|
+
await new Promise((res) => setTimeout(res, 0));
|
|
106
|
+
}
|
|
107
|
+
}
|
|
108
|
+
});
|
|
109
|
+
},
|
|
110
|
+
};
|
|
@@ -1,12 +1,12 @@
|
|
|
1
1
|
import { PayloadData } from "../../../types/query.types";
|
|
2
|
-
import {
|
|
2
|
+
import { SupastashFilter } from "../../../types/realtimeData.types";
|
|
3
3
|
import { ReceivedDataCompleted } from "../../../types/syncEngine.types";
|
|
4
4
|
export declare function pageThrough(base: {
|
|
5
|
-
tsCol: "
|
|
5
|
+
tsCol: "arrived_at" | "updated_at";
|
|
6
6
|
since: string;
|
|
7
7
|
table: string;
|
|
8
8
|
select?: string;
|
|
9
|
-
filters?:
|
|
9
|
+
filters?: SupastashFilter[];
|
|
10
10
|
includeDeleted?: boolean;
|
|
11
11
|
batchId: string;
|
|
12
12
|
previousPk?: string | null;
|
|
@@ -17,21 +17,24 @@ export declare function returnMaxDate({ row, prevMax, col, }: {
|
|
|
17
17
|
value: string;
|
|
18
18
|
pk: string | null;
|
|
19
19
|
} | null;
|
|
20
|
-
col: "
|
|
20
|
+
col: "arrived_at" | "updated_at" | "deleted_at";
|
|
21
21
|
}): {
|
|
22
22
|
value: string;
|
|
23
23
|
pk: string | null;
|
|
24
24
|
} | null;
|
|
25
|
-
export declare function getMaxDate(rows: PayloadData[], col: "
|
|
25
|
+
export declare function getMaxDate(rows: PayloadData[], col: "arrived_at" | "updated_at"): string | null;
|
|
26
26
|
export declare function logNoUpdates(table: string): void;
|
|
27
27
|
export declare function getReceivedDataCompleted({ batchId, col, }: {
|
|
28
28
|
batchId: string;
|
|
29
|
-
col: "
|
|
29
|
+
col: "arrived_at" | "updated_at";
|
|
30
30
|
}): ReceivedDataCompleted;
|
|
31
31
|
export declare function setReceivedDataCompleted({ batchId, col, completed, }: {
|
|
32
32
|
batchId: string;
|
|
33
|
-
col: "
|
|
33
|
+
col: "arrived_at" | "updated_at";
|
|
34
34
|
completed: ReceivedDataCompleted;
|
|
35
35
|
}): void;
|
|
36
36
|
export declare function deleteReceivedDataCompleted(batchId: string): void;
|
|
37
|
+
export declare function getMaxSyncLookBack({ table, }: {
|
|
38
|
+
table: string;
|
|
39
|
+
}): string | undefined;
|
|
37
40
|
//# sourceMappingURL=helpers.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"helpers.d.ts","sourceRoot":"","sources":["../../../../src/utils/sync/pullFromRemote/helpers.ts"],"names":[],"mappings":"AAKA,OAAO,EAAE,WAAW,EAAE,MAAM,4BAA4B,CAAC;AACzD,OAAO,EAAE,
|
|
1
|
+
{"version":3,"file":"helpers.d.ts","sourceRoot":"","sources":["../../../../src/utils/sync/pullFromRemote/helpers.ts"],"names":[],"mappings":"AAKA,OAAO,EAAE,WAAW,EAAE,MAAM,4BAA4B,CAAC;AACzD,OAAO,EAAE,eAAe,EAAE,MAAM,mCAAmC,CAAC;AACpE,OAAO,EAAE,qBAAqB,EAAE,MAAM,iCAAiC,CAAC;AAaxE,wBAAsB,WAAW,CAAC,IAAI,EAAE;IACtC,KAAK,EAAE,YAAY,GAAG,YAAY,CAAC;IACnC,KAAK,EAAE,MAAM,CAAC;IACd,KAAK,EAAE,MAAM,CAAC;IACd,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,OAAO,CAAC,EAAE,eAAe,EAAE,CAAC;IAC5B,cAAc,CAAC,EAAE,OAAO,CAAC;IACzB,OAAO,EAAE,MAAM,CAAC;IAChB,UAAU,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;CAC5B,kBAgFA;AAED,wBAAgB,aAAa,CAAC,EAC5B,GAAG,EACH,OAAO,EACP,GAAG,GACJ,EAAE;IACD,GAAG,EAAE,WAAW,CAAC;IACjB,OAAO,EAAE;QAAE,KAAK,EAAE,MAAM,CAAC;QAAC,EAAE,EAAE,MAAM,GAAG,IAAI,CAAA;KAAE,GAAG,IAAI,CAAC;IACrD,GAAG,EAAE,YAAY,GAAG,YAAY,GAAG,YAAY,CAAC;CACjD,GAAG;IAAE,KAAK,EAAE,MAAM,CAAC;IAAC,EAAE,EAAE,MAAM,GAAG,IAAI,CAAA;CAAE,GAAG,IAAI,CAoB9C;AAED,wBAAgB,UAAU,CACxB,IAAI,EAAE,WAAW,EAAE,EACnB,GAAG,EAAE,YAAY,GAAG,YAAY,GAC/B,MAAM,GAAG,IAAI,CAUf;AAED,wBAAgB,YAAY,CAAC,KAAK,EAAE,MAAM,QAczC;AAED,wBAAgB,wBAAwB,CAAC,EACvC,OAAO,EACP,GAAG,GACJ,EAAE;IACD,OAAO,EAAE,MAAM,CAAC;IAChB,GAAG,EAAE,YAAY,GAAG,YAAY,CAAC;CAClC,GAAG,qBAAqB,CAUxB;AAED,wBAAgB,wBAAwB,CAAC,EACvC,OAAO,EACP,GAAG,EACH,SAAS,GACV,EAAE;IACD,OAAO,EAAE,MAAM,CAAC;IAChB,GAAG,EAAE,YAAY,GAAG,YAAY,CAAC;IACjC,SAAS,EAAE,qBAAqB,CAAC;CAClC,QAKA;AAED,wBAAgB,2BAA2B,CAAC,OAAO,EAAE,MAAM,QAE1D;AAED,wBAAgB,kBAAkB,CAAC,EACjC,KAAK,GACN,EAAE;IACD,KAAK,EAAE,MAAM,CAAC;CACf,GAAG,MAAM,GAAG,SAAS,CAcrB"}
|
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
import { getSupastashConfig } from "../../../core/config";
|
|
2
2
|
import { RECEIVED_DATA_COMPLETED_MAP, RECEIVED_DATA_THRESHOLD, } from "../../../store/syncStatus";
|
|
3
3
|
import log from "../../logs";
|
|
4
|
+
import { ReusedHelpers } from "../../reusedHelpers";
|
|
4
5
|
import { supabaseClientErr } from "../../supabaseClientErr";
|
|
5
|
-
import isValidFilter from "./validateFilters";
|
|
6
6
|
const RANDOM_OLD_DATE = "2000-01-01T00:00:00Z";
|
|
7
7
|
const PAGE_SIZE = RECEIVED_DATA_THRESHOLD;
|
|
8
8
|
const MAX_PAGE_SIZE = 2000;
|
|
@@ -10,15 +10,6 @@ const timesPulled = new Map();
|
|
|
10
10
|
const lastPulled = new Map();
|
|
11
11
|
const DEFAULT_MAX_PULL_ATTEMPTS = 150;
|
|
12
12
|
const DEFAULT_PK = "00000000-0000-0000-0000-000000000000";
|
|
13
|
-
function applyFilters(q, filters, table) {
|
|
14
|
-
for (const f of filters) {
|
|
15
|
-
if (!isValidFilter([f])) {
|
|
16
|
-
throw new Error(`Invalid syncFilter: ${JSON.stringify(f)} for ${table}`);
|
|
17
|
-
}
|
|
18
|
-
q = q[f.operator](f.column, f.value);
|
|
19
|
-
}
|
|
20
|
-
return q;
|
|
21
|
-
}
|
|
22
13
|
export async function pageThrough(base) {
|
|
23
14
|
const supabase = getSupastashConfig().supabaseClient;
|
|
24
15
|
if (!supabase)
|
|
@@ -34,7 +25,11 @@ export async function pageThrough(base) {
|
|
|
34
25
|
let cursorId = base.previousPk ?? lastWork.lastId;
|
|
35
26
|
let lastDataSize = 0;
|
|
36
27
|
const { table, filters = [], select = "*" } = base;
|
|
28
|
+
const maxSyncLookBack = getMaxSyncLookBack({ table });
|
|
37
29
|
while (true) {
|
|
30
|
+
const ts = maxSyncLookBack && Date.parse(cursorTs) < Date.parse(maxSyncLookBack)
|
|
31
|
+
? maxSyncLookBack
|
|
32
|
+
: cursorTs;
|
|
38
33
|
let q = supabase
|
|
39
34
|
.from(table)
|
|
40
35
|
.select(select)
|
|
@@ -42,13 +37,13 @@ export async function pageThrough(base) {
|
|
|
42
37
|
.order("id", { ascending: true })
|
|
43
38
|
.limit(PAGE_SIZE);
|
|
44
39
|
if (cursorId) {
|
|
45
|
-
q = q.or(`${base.tsCol}.gt.${
|
|
40
|
+
q = q.or(`${base.tsCol}.gt.${ts},and(${base.tsCol}.eq.${ts},id.gt.${cursorId})`);
|
|
46
41
|
}
|
|
47
42
|
else {
|
|
48
|
-
q = q.gte(base.tsCol,
|
|
43
|
+
q = q.gte(base.tsCol, ts);
|
|
49
44
|
}
|
|
50
|
-
if (filters) {
|
|
51
|
-
q = applyFilters(q, filters, table);
|
|
45
|
+
if (filters && filters.length > 0) {
|
|
46
|
+
q = ReusedHelpers.applyFilters(q, filters, table);
|
|
52
47
|
}
|
|
53
48
|
const { data, error } = await q;
|
|
54
49
|
if (error)
|
|
@@ -150,3 +145,14 @@ export function setReceivedDataCompleted({ batchId, col, completed, }) {
|
|
|
150
145
|
export function deleteReceivedDataCompleted(batchId) {
|
|
151
146
|
delete RECEIVED_DATA_COMPLETED_MAP[batchId];
|
|
152
147
|
}
|
|
148
|
+
export function getMaxSyncLookBack({ table, }) {
|
|
149
|
+
const cfg = getSupastashConfig();
|
|
150
|
+
if (cfg.fullSyncTables?.includes(table)) {
|
|
151
|
+
return undefined;
|
|
152
|
+
}
|
|
153
|
+
const perTable = cfg.perTableSyncLookbackDays?.[table];
|
|
154
|
+
const days = perTable !== undefined ? perTable : cfg.maxSyncLookbackDays;
|
|
155
|
+
if (days === undefined)
|
|
156
|
+
return undefined;
|
|
157
|
+
return new Date(Date.now() - days * 24 * 60 * 60 * 1000).toISOString();
|
|
158
|
+
}
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import { PayloadData } from "../../../types/query.types";
|
|
2
|
-
import {
|
|
2
|
+
import { SupastashFilter } from "../../../types/realtimeData.types";
|
|
3
3
|
/**
|
|
4
4
|
* Pulls data from the remote database for a given table
|
|
5
5
|
* @param table - The table to pull data from
|
|
@@ -7,13 +7,12 @@ import { RealtimeFilter } from "../../../types/realtimeData.types";
|
|
|
7
7
|
*/
|
|
8
8
|
export declare function pullData({ table, filters, batchId, }: {
|
|
9
9
|
table: string;
|
|
10
|
-
filters?:
|
|
10
|
+
filters?: SupastashFilter[];
|
|
11
11
|
batchId: string;
|
|
12
12
|
}): Promise<{
|
|
13
13
|
data: PayloadData[];
|
|
14
14
|
deletedIds: string[];
|
|
15
15
|
timestamps: {
|
|
16
|
-
createdMax: string | null;
|
|
17
16
|
updatedMax: string | null;
|
|
18
17
|
deletedMax: string | null;
|
|
19
18
|
updatedMaxPk: string | null;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"pullData.d.ts","sourceRoot":"","sources":["../../../../src/utils/sync/pullFromRemote/pullData.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,WAAW,EAAE,MAAM,4BAA4B,CAAC;AACzD,OAAO,EAAE,
|
|
1
|
+
{"version":3,"file":"pullData.d.ts","sourceRoot":"","sources":["../../../../src/utils/sync/pullFromRemote/pullData.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,WAAW,EAAE,MAAM,4BAA4B,CAAC;AACzD,OAAO,EAAE,eAAe,EAAE,MAAM,mCAAmC,CAAC;AAQpE;;;;GAIG;AACH,wBAAsB,QAAQ,CAAC,EAC7B,KAAK,EACL,OAAO,EACP,OAAO,GACR,EAAE;IACD,KAAK,EAAE,MAAM,CAAC;IACd,OAAO,CAAC,EAAE,eAAe,EAAE,CAAC;IAC5B,OAAO,EAAE,MAAM,CAAC;CACjB,GAAG,OAAO,CAAC;IACV,IAAI,EAAE,WAAW,EAAE,CAAC;IACpB,UAAU,EAAE,MAAM,EAAE,CAAC;IACrB,UAAU,EAAE;QACV,UAAU,EAAE,MAAM,GAAG,IAAI,CAAC;QAC1B,UAAU,EAAE,MAAM,GAAG,IAAI,CAAC;QAC1B,YAAY,EAAE,MAAM,GAAG,IAAI,CAAC;KAC7B,CAAC;CACH,GAAG,IAAI,CAAC,CAmFR"}
|