ponder 0.9.2 → 0.9.4-debug.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/bin/ponder.js +2470 -3762
- package/dist/bin/ponder.js.map +1 -1
- package/dist/chunk-6AOFLZJ4.js +1692 -0
- package/dist/chunk-6AOFLZJ4.js.map +1 -0
- package/dist/chunk-DZFRP3KH.js +70 -0
- package/dist/chunk-DZFRP3KH.js.map +1 -0
- package/dist/{chunk-IFTUFVCL.js → chunk-MJKRYIBO.js} +3 -73
- package/dist/chunk-MJKRYIBO.js.map +1 -0
- package/dist/db-in86nyw7.d.ts +625 -0
- package/dist/experimental_unsafe_stores.d.ts +375 -0
- package/dist/experimental_unsafe_stores.js +11 -0
- package/dist/experimental_unsafe_stores.js.map +1 -0
- package/dist/index.d.ts +17 -429
- package/dist/index.js +4 -2
- package/dist/index.js.map +1 -1
- package/package.json +5 -1
- package/src/bin/commands/codegen.ts +8 -10
- package/src/bin/commands/dev.ts +30 -42
- package/src/bin/commands/list.ts +9 -14
- package/src/bin/commands/serve.ts +26 -39
- package/src/bin/commands/start.ts +29 -42
- package/src/bin/utils/{shutdown.ts → exit.ts} +23 -37
- package/src/bin/utils/run.ts +275 -175
- package/src/bin/utils/runServer.ts +1 -5
- package/src/build/configAndIndexingFunctions.ts +547 -512
- package/src/build/index.ts +5 -8
- package/src/build/pre.ts +3 -0
- package/src/config/index.ts +9 -6
- package/src/database/index.ts +72 -72
- package/src/drizzle/kit/index.ts +3 -3
- package/src/experimental_unsafe_stores.ts +4 -0
- package/src/indexing/index.ts +0 -4
- package/src/indexing/service.ts +31 -93
- package/src/indexing-store/historical.ts +2 -4
- package/src/internal/common.ts +2 -0
- package/src/internal/errors.ts +9 -9
- package/src/internal/logger.ts +1 -1
- package/src/internal/metrics.ts +75 -103
- package/src/internal/shutdown.ts +25 -0
- package/src/internal/telemetry.ts +16 -18
- package/src/internal/types.ts +9 -1
- package/src/server/index.ts +3 -5
- package/src/sync/events.ts +4 -4
- package/src/sync/filter.ts +1 -0
- package/src/sync/index.ts +1046 -805
- package/src/sync-historical/index.ts +0 -37
- package/src/sync-realtime/index.ts +48 -48
- package/src/sync-store/encoding.ts +5 -5
- package/src/sync-store/index.ts +5 -23
- package/src/ui/index.ts +2 -11
- package/src/utils/checkpoint.ts +17 -3
- package/src/utils/chunk.ts +7 -0
- package/src/utils/generators.ts +66 -0
- package/src/utils/mutex.ts +34 -0
- package/src/utils/partition.ts +41 -0
- package/src/utils/requestQueue.ts +19 -10
- package/src/utils/zipper.ts +80 -0
- package/dist/chunk-IFTUFVCL.js.map +0 -1
|
@@ -0,0 +1,1692 @@
|
|
|
1
|
+
import {
|
|
2
|
+
createQueue
|
|
3
|
+
} from "./chunk-MJKRYIBO.js";
|
|
4
|
+
import {
|
|
5
|
+
onchain
|
|
6
|
+
} from "./chunk-K2TLRLX3.js";
|
|
7
|
+
|
|
8
|
+
// src/drizzle/index.ts
|
|
9
|
+
import { getTableColumns, getTableName, is as is2 } from "drizzle-orm";
|
|
10
|
+
import { PgTable as PgTable2, getTableConfig as getTableConfig2 } from "drizzle-orm/pg-core";
|
|
11
|
+
|
|
12
|
+
// src/drizzle/kit/index.ts
|
|
13
|
+
import { SQL, is } from "drizzle-orm";
|
|
14
|
+
import { CasingCache, toCamelCase, toSnakeCase } from "drizzle-orm/casing";
|
|
15
|
+
import {
|
|
16
|
+
PgDialect,
|
|
17
|
+
PgEnumColumn,
|
|
18
|
+
PgMaterializedView,
|
|
19
|
+
PgSchema,
|
|
20
|
+
PgTable,
|
|
21
|
+
PgView,
|
|
22
|
+
getTableConfig,
|
|
23
|
+
integer,
|
|
24
|
+
isPgEnum,
|
|
25
|
+
isPgSequence,
|
|
26
|
+
pgTable,
|
|
27
|
+
serial,
|
|
28
|
+
varchar
|
|
29
|
+
} from "drizzle-orm/pg-core";
|
|
30
|
+
var sqlToReorgTableName = (tableName) => `_reorg__${tableName}`;
|
|
31
|
+
var getSql = (schema) => {
|
|
32
|
+
const { tables, enums, schemas } = prepareFromExports(schema);
|
|
33
|
+
const json = generatePgSnapshot(tables, enums, schemas, "snake_case");
|
|
34
|
+
const squashed = squashPgScheme(json);
|
|
35
|
+
const jsonCreateIndexesForCreatedTables = Object.values(
|
|
36
|
+
squashed.tables
|
|
37
|
+
).flatMap((it) => {
|
|
38
|
+
return preparePgCreateIndexesJson(it.name, it.schema, it.indexes);
|
|
39
|
+
});
|
|
40
|
+
const jsonCreateEnums = Object.values(squashed.enums).map((it) => {
|
|
41
|
+
return prepareCreateEnumJson(it.name, it.schema, it.values);
|
|
42
|
+
}) ?? [];
|
|
43
|
+
const jsonCreateTables = Object.values(squashed.tables).map((it) => {
|
|
44
|
+
return preparePgCreateTableJson(it, json);
|
|
45
|
+
});
|
|
46
|
+
const fromJson = (statements) => statements.flatMap((statement) => {
|
|
47
|
+
const filtered = convertors.filter((it) => {
|
|
48
|
+
return it.can(statement, "postgresql");
|
|
49
|
+
});
|
|
50
|
+
const convertor = filtered.length === 1 ? filtered[0] : void 0;
|
|
51
|
+
if (!convertor) {
|
|
52
|
+
return "";
|
|
53
|
+
}
|
|
54
|
+
return convertor.convert(statement);
|
|
55
|
+
}).filter((it) => it !== "");
|
|
56
|
+
const combinedTables = jsonCreateTables.flatMap((statement) => [
|
|
57
|
+
statement,
|
|
58
|
+
createReorgTableStatement(statement)
|
|
59
|
+
]);
|
|
60
|
+
return {
|
|
61
|
+
tables: {
|
|
62
|
+
sql: fromJson(combinedTables),
|
|
63
|
+
json: combinedTables
|
|
64
|
+
},
|
|
65
|
+
enums: { sql: fromJson(jsonCreateEnums), json: jsonCreateEnums },
|
|
66
|
+
indexes: {
|
|
67
|
+
sql: fromJson(jsonCreateIndexesForCreatedTables),
|
|
68
|
+
json: jsonCreateIndexesForCreatedTables
|
|
69
|
+
}
|
|
70
|
+
};
|
|
71
|
+
};
|
|
72
|
+
var createReorgTableStatement = (statement) => {
|
|
73
|
+
const reorgStatement = structuredClone(statement);
|
|
74
|
+
reorgStatement.compositePkName = void 0;
|
|
75
|
+
reorgStatement.compositePKs = [];
|
|
76
|
+
for (const column of reorgStatement.columns) {
|
|
77
|
+
column.primaryKey = false;
|
|
78
|
+
}
|
|
79
|
+
const reorgColumns = Object.values(
|
|
80
|
+
squashPgScheme(
|
|
81
|
+
generatePgSnapshot(
|
|
82
|
+
[
|
|
83
|
+
pgTable("", {
|
|
84
|
+
operation_id: serial().notNull().primaryKey(),
|
|
85
|
+
operation: integer().notNull(),
|
|
86
|
+
checkpoint: varchar({
|
|
87
|
+
length: 75
|
|
88
|
+
}).notNull()
|
|
89
|
+
})
|
|
90
|
+
],
|
|
91
|
+
[],
|
|
92
|
+
[],
|
|
93
|
+
"snake_case"
|
|
94
|
+
)
|
|
95
|
+
).tables
|
|
96
|
+
//@ts-ignore
|
|
97
|
+
)[0].columns;
|
|
98
|
+
reorgStatement.columns.push(...Object.values(reorgColumns));
|
|
99
|
+
reorgStatement.tableName = sqlToReorgTableName(reorgStatement.tableName);
|
|
100
|
+
return reorgStatement;
|
|
101
|
+
};
|
|
102
|
+
var PgSquasher = {
|
|
103
|
+
squashIdx: (idx) => {
|
|
104
|
+
return `${idx.name};${idx.columns.map(
|
|
105
|
+
(c) => `${c.expression}--${c.isExpression}--${c.asc}--${c.nulls}--${c.opclass && ""}`
|
|
106
|
+
).join(
|
|
107
|
+
",,"
|
|
108
|
+
)};${idx.isUnique};${idx.concurrently};${idx.method};${idx.where};${JSON.stringify(idx.with)}`;
|
|
109
|
+
},
|
|
110
|
+
unsquashIdx: (input) => {
|
|
111
|
+
const [
|
|
112
|
+
name,
|
|
113
|
+
columnsString,
|
|
114
|
+
isUnique,
|
|
115
|
+
concurrently,
|
|
116
|
+
method,
|
|
117
|
+
where,
|
|
118
|
+
idxWith
|
|
119
|
+
] = input.split(";");
|
|
120
|
+
const columnString = columnsString.split(",,");
|
|
121
|
+
const columns = [];
|
|
122
|
+
for (const column of columnString) {
|
|
123
|
+
const [expression, isExpression, asc, nulls, opclass] = column.split("--");
|
|
124
|
+
columns.push({
|
|
125
|
+
nulls,
|
|
126
|
+
isExpression: isExpression === "true",
|
|
127
|
+
asc: asc === "true",
|
|
128
|
+
expression,
|
|
129
|
+
opclass: opclass === "undefined" ? void 0 : opclass
|
|
130
|
+
});
|
|
131
|
+
}
|
|
132
|
+
return {
|
|
133
|
+
name,
|
|
134
|
+
columns,
|
|
135
|
+
isUnique: isUnique === "true",
|
|
136
|
+
concurrently: concurrently === "true",
|
|
137
|
+
method,
|
|
138
|
+
where: where === "undefined" ? void 0 : where,
|
|
139
|
+
with: !idxWith || idxWith === "undefined" ? void 0 : JSON.parse(idxWith)
|
|
140
|
+
};
|
|
141
|
+
},
|
|
142
|
+
squashPK: (pk) => {
|
|
143
|
+
return `${pk.columns.join(",")};${pk.name}`;
|
|
144
|
+
},
|
|
145
|
+
unsquashPK: (pk) => {
|
|
146
|
+
const splitted = pk.split(";");
|
|
147
|
+
return { name: splitted[1], columns: splitted[0].split(",") };
|
|
148
|
+
}
|
|
149
|
+
};
|
|
150
|
+
var parseType = (schemaPrefix, type) => {
|
|
151
|
+
const pgNativeTypes = [
|
|
152
|
+
"uuid",
|
|
153
|
+
"smallint",
|
|
154
|
+
"integer",
|
|
155
|
+
"bigint",
|
|
156
|
+
"boolean",
|
|
157
|
+
"text",
|
|
158
|
+
"varchar",
|
|
159
|
+
"serial",
|
|
160
|
+
"bigserial",
|
|
161
|
+
"decimal",
|
|
162
|
+
"numeric",
|
|
163
|
+
"real",
|
|
164
|
+
"json",
|
|
165
|
+
"jsonb",
|
|
166
|
+
"time",
|
|
167
|
+
"time with time zone",
|
|
168
|
+
"time without time zone",
|
|
169
|
+
"time",
|
|
170
|
+
"timestamp",
|
|
171
|
+
"timestamp with time zone",
|
|
172
|
+
"timestamp without time zone",
|
|
173
|
+
"date",
|
|
174
|
+
"interval",
|
|
175
|
+
"bigint",
|
|
176
|
+
"bigserial",
|
|
177
|
+
"double precision",
|
|
178
|
+
"interval year",
|
|
179
|
+
"interval month",
|
|
180
|
+
"interval day",
|
|
181
|
+
"interval hour",
|
|
182
|
+
"interval minute",
|
|
183
|
+
"interval second",
|
|
184
|
+
"interval year to month",
|
|
185
|
+
"interval day to hour",
|
|
186
|
+
"interval day to minute",
|
|
187
|
+
"interval day to second",
|
|
188
|
+
"interval hour to minute",
|
|
189
|
+
"interval hour to second",
|
|
190
|
+
"interval minute to second"
|
|
191
|
+
];
|
|
192
|
+
const arrayDefinitionRegex = /\[\d*(?:\[\d*\])*\]/g;
|
|
193
|
+
const arrayDefinition = (type.match(arrayDefinitionRegex) ?? []).join("");
|
|
194
|
+
const withoutArrayDefinition = type.replace(arrayDefinitionRegex, "");
|
|
195
|
+
return pgNativeTypes.some((it) => type.startsWith(it)) ? `${withoutArrayDefinition}${arrayDefinition}` : `${schemaPrefix}"${withoutArrayDefinition}"${arrayDefinition}`;
|
|
196
|
+
};
|
|
197
|
+
var Convertor = class {
|
|
198
|
+
};
|
|
199
|
+
var PgCreateTableConvertor = class extends Convertor {
|
|
200
|
+
can(statement, dialect) {
|
|
201
|
+
return statement.type === "create_table" && dialect === "postgresql";
|
|
202
|
+
}
|
|
203
|
+
convert(st) {
|
|
204
|
+
const { tableName, schema, columns, compositePKs } = st;
|
|
205
|
+
let statement = "";
|
|
206
|
+
const name = schema ? `"${schema}"."${tableName}"` : `"${tableName}"`;
|
|
207
|
+
statement += `CREATE TABLE ${name} (
|
|
208
|
+
`;
|
|
209
|
+
for (let i = 0; i < columns.length; i++) {
|
|
210
|
+
const column = columns[i];
|
|
211
|
+
const primaryKeyStatement = column.primaryKey ? " PRIMARY KEY" : "";
|
|
212
|
+
const notNullStatement = column.notNull && !column.identity ? " NOT NULL" : "";
|
|
213
|
+
const defaultStatement = column.default !== void 0 ? ` DEFAULT ${column.default}` : "";
|
|
214
|
+
const schemaPrefix = column.typeSchema && column.typeSchema !== "public" ? `"${column.typeSchema}".` : "";
|
|
215
|
+
const type = parseType(schemaPrefix, column.type);
|
|
216
|
+
statement += ` "${column.name}" ${type}${primaryKeyStatement}${defaultStatement}${notNullStatement}`;
|
|
217
|
+
statement += i === columns.length - 1 ? "" : ",\n";
|
|
218
|
+
}
|
|
219
|
+
if (typeof compositePKs !== "undefined" && compositePKs.length > 0) {
|
|
220
|
+
statement += ",\n";
|
|
221
|
+
const compositePK = PgSquasher.unsquashPK(compositePKs[0]);
|
|
222
|
+
statement += ` CONSTRAINT "${st.compositePkName}" PRIMARY KEY("${compositePK.columns.join(`","`)}")`;
|
|
223
|
+
}
|
|
224
|
+
statement += "\n);";
|
|
225
|
+
statement += "\n";
|
|
226
|
+
return statement;
|
|
227
|
+
}
|
|
228
|
+
};
|
|
229
|
+
var CreateTypeEnumConvertor = class extends Convertor {
|
|
230
|
+
can(statement) {
|
|
231
|
+
return statement.type === "create_type_enum";
|
|
232
|
+
}
|
|
233
|
+
convert(st) {
|
|
234
|
+
const { name, values, schema } = st;
|
|
235
|
+
const enumNameWithSchema = schema ? `"${schema}"."${name}"` : `"${name}"`;
|
|
236
|
+
let valuesStatement = "(";
|
|
237
|
+
valuesStatement += values.map((it) => `'${it}'`).join(", ");
|
|
238
|
+
valuesStatement += ")";
|
|
239
|
+
const statement = `CREATE TYPE ${enumNameWithSchema} AS ENUM${valuesStatement};`;
|
|
240
|
+
return statement;
|
|
241
|
+
}
|
|
242
|
+
};
|
|
243
|
+
var CreatePgIndexConvertor = class extends Convertor {
|
|
244
|
+
can(statement, dialect) {
|
|
245
|
+
return statement.type === "create_index_pg" && dialect === "postgresql";
|
|
246
|
+
}
|
|
247
|
+
convert(statement) {
|
|
248
|
+
const {
|
|
249
|
+
name,
|
|
250
|
+
columns,
|
|
251
|
+
isUnique,
|
|
252
|
+
concurrently,
|
|
253
|
+
with: withMap,
|
|
254
|
+
method,
|
|
255
|
+
where
|
|
256
|
+
} = statement.data;
|
|
257
|
+
const indexPart = isUnique ? "UNIQUE INDEX" : "INDEX";
|
|
258
|
+
const value = columns.map(
|
|
259
|
+
(it) => `${it.isExpression ? it.expression : `"${it.expression}"`}${it.opclass ? ` ${it.opclass}` : it.asc ? "" : " DESC"}${it.asc && it.nulls && it.nulls === "last" || it.opclass ? "" : ` NULLS ${it.nulls.toUpperCase()}`}`
|
|
260
|
+
).join(",");
|
|
261
|
+
const tableNameWithSchema = statement.schema ? `"${statement.schema}"."${statement.tableName}"` : `"${statement.tableName}"`;
|
|
262
|
+
function reverseLogic(mappedWith) {
|
|
263
|
+
let reversedString = "";
|
|
264
|
+
for (const key in mappedWith) {
|
|
265
|
+
if (mappedWith.hasOwnProperty(key)) {
|
|
266
|
+
reversedString += `${key}=${mappedWith[key]},`;
|
|
267
|
+
}
|
|
268
|
+
}
|
|
269
|
+
reversedString = reversedString.slice(0, -1);
|
|
270
|
+
return reversedString;
|
|
271
|
+
}
|
|
272
|
+
return `CREATE ${indexPart}${concurrently ? " CONCURRENTLY" : ""} IF NOT EXISTS "${name}" ON ${tableNameWithSchema} USING ${method} (${value})${Object.keys(withMap).length !== 0 ? ` WITH (${reverseLogic(withMap)})` : ""}${where ? ` WHERE ${where}` : ""};`;
|
|
273
|
+
}
|
|
274
|
+
};
|
|
275
|
+
var PgCreateSchemaConvertor = class extends Convertor {
|
|
276
|
+
can(statement, dialect) {
|
|
277
|
+
return statement.type === "create_schema" && dialect === "postgresql";
|
|
278
|
+
}
|
|
279
|
+
convert(statement) {
|
|
280
|
+
const { name } = statement;
|
|
281
|
+
return `CREATE SCHEMA IF NOT EXISTS"${name}";
|
|
282
|
+
`;
|
|
283
|
+
}
|
|
284
|
+
};
|
|
285
|
+
var convertors = [];
|
|
286
|
+
convertors.push(new PgCreateTableConvertor());
|
|
287
|
+
convertors.push(new CreateTypeEnumConvertor());
|
|
288
|
+
convertors.push(new CreatePgIndexConvertor());
|
|
289
|
+
convertors.push(new PgCreateSchemaConvertor());
|
|
290
|
+
var preparePgCreateTableJson = (table, json) => {
|
|
291
|
+
const { name, schema, columns, compositePrimaryKeys } = table;
|
|
292
|
+
const tableKey = `${schema || "public"}.${name}`;
|
|
293
|
+
const compositePkName = Object.values(compositePrimaryKeys).length > 0 ? json.tables[tableKey].compositePrimaryKeys[`${PgSquasher.unsquashPK(Object.values(compositePrimaryKeys)[0]).name}`].name : "";
|
|
294
|
+
return {
|
|
295
|
+
type: "create_table",
|
|
296
|
+
tableName: name,
|
|
297
|
+
schema,
|
|
298
|
+
columns: Object.values(columns),
|
|
299
|
+
compositePKs: Object.values(compositePrimaryKeys),
|
|
300
|
+
compositePkName
|
|
301
|
+
};
|
|
302
|
+
};
|
|
303
|
+
var preparePgCreateIndexesJson = (tableName, schema, indexes) => {
|
|
304
|
+
return Object.values(indexes).map((indexData) => {
|
|
305
|
+
return {
|
|
306
|
+
type: "create_index_pg",
|
|
307
|
+
tableName,
|
|
308
|
+
data: PgSquasher.unsquashIdx(indexData),
|
|
309
|
+
schema
|
|
310
|
+
};
|
|
311
|
+
});
|
|
312
|
+
};
|
|
313
|
+
var prepareCreateEnumJson = (name, schema, values) => {
|
|
314
|
+
return {
|
|
315
|
+
type: "create_type_enum",
|
|
316
|
+
name,
|
|
317
|
+
schema,
|
|
318
|
+
values
|
|
319
|
+
};
|
|
320
|
+
};
|
|
321
|
+
var prepareFromExports = (exports) => {
|
|
322
|
+
const tables = [];
|
|
323
|
+
const enums = [];
|
|
324
|
+
const schemas = [];
|
|
325
|
+
const sequences = [];
|
|
326
|
+
const views = [];
|
|
327
|
+
const matViews = [];
|
|
328
|
+
const i0values = Object.values(exports);
|
|
329
|
+
i0values.forEach((t) => {
|
|
330
|
+
if (isPgEnum(t)) {
|
|
331
|
+
enums.push(t);
|
|
332
|
+
return;
|
|
333
|
+
}
|
|
334
|
+
if (is(t, PgTable)) {
|
|
335
|
+
tables.push(t);
|
|
336
|
+
}
|
|
337
|
+
if (is(t, PgSchema)) {
|
|
338
|
+
schemas.push(t);
|
|
339
|
+
}
|
|
340
|
+
if (is(t, PgView)) {
|
|
341
|
+
views.push(t);
|
|
342
|
+
}
|
|
343
|
+
if (is(t, PgMaterializedView)) {
|
|
344
|
+
matViews.push(t);
|
|
345
|
+
}
|
|
346
|
+
if (isPgSequence(t)) {
|
|
347
|
+
sequences.push(t);
|
|
348
|
+
}
|
|
349
|
+
});
|
|
350
|
+
return { tables, enums, schemas, sequences, views, matViews };
|
|
351
|
+
};
|
|
352
|
+
function getColumnCasing(column, casing) {
|
|
353
|
+
if (!column.name)
|
|
354
|
+
return "";
|
|
355
|
+
return !column.keyAsName || casing === void 0 ? column.name : casing === "camelCase" ? toCamelCase(column.name) : toSnakeCase(column.name);
|
|
356
|
+
}
|
|
357
|
+
var sqlToStr = (sql2, casing) => {
|
|
358
|
+
return sql2.toQuery({
|
|
359
|
+
escapeName: () => {
|
|
360
|
+
throw new Error("we don't support params for `sql` default values");
|
|
361
|
+
},
|
|
362
|
+
escapeParam: () => {
|
|
363
|
+
throw new Error("we don't support params for `sql` default values");
|
|
364
|
+
},
|
|
365
|
+
escapeString: () => {
|
|
366
|
+
throw new Error("we don't support params for `sql` default values");
|
|
367
|
+
},
|
|
368
|
+
casing: new CasingCache(casing)
|
|
369
|
+
}).sql;
|
|
370
|
+
};
|
|
371
|
+
function isPgArrayType(sqlType) {
|
|
372
|
+
return sqlType.match(/.*\[\d*\].*|.*\[\].*/g) !== null;
|
|
373
|
+
}
|
|
374
|
+
function buildArrayString(array, sqlType) {
|
|
375
|
+
sqlType = sqlType.split("[")[0];
|
|
376
|
+
const values = array.map((value) => {
|
|
377
|
+
if (typeof value === "number" || typeof value === "bigint") {
|
|
378
|
+
return value.toString();
|
|
379
|
+
} else if (typeof value === "boolean") {
|
|
380
|
+
return value ? "true" : "false";
|
|
381
|
+
} else if (Array.isArray(value)) {
|
|
382
|
+
return buildArrayString(value, sqlType);
|
|
383
|
+
} else if (value instanceof Date) {
|
|
384
|
+
if (sqlType === "date") {
|
|
385
|
+
return `"${value.toISOString().split("T")[0]}"`;
|
|
386
|
+
} else if (sqlType === "timestamp") {
|
|
387
|
+
return `"${value.toISOString().replace("T", " ").slice(0, 23)}"`;
|
|
388
|
+
} else {
|
|
389
|
+
return `"${value.toISOString()}"`;
|
|
390
|
+
}
|
|
391
|
+
} else if (typeof value === "object") {
|
|
392
|
+
return `"${JSON.stringify(value).replaceAll('"', '\\"')}"`;
|
|
393
|
+
}
|
|
394
|
+
return `"${value}"`;
|
|
395
|
+
}).join(",");
|
|
396
|
+
return `{${values}}`;
|
|
397
|
+
}
|
|
398
|
+
var indexName = (tableName, columns) => {
|
|
399
|
+
return `${tableName}_${columns.join("_")}_index`;
|
|
400
|
+
};
|
|
401
|
+
var generatePgSnapshot = (tables, enums, schemas, casing) => {
|
|
402
|
+
const dialect = new PgDialect({ casing });
|
|
403
|
+
const result = {};
|
|
404
|
+
const indexesInSchema = {};
|
|
405
|
+
for (const table of tables) {
|
|
406
|
+
const {
|
|
407
|
+
name: tableName,
|
|
408
|
+
columns,
|
|
409
|
+
indexes,
|
|
410
|
+
schema,
|
|
411
|
+
primaryKeys
|
|
412
|
+
} = getTableConfig(table);
|
|
413
|
+
const columnsObject = {};
|
|
414
|
+
const indexesObject = {};
|
|
415
|
+
const primaryKeysObject = {};
|
|
416
|
+
columns.forEach((column) => {
|
|
417
|
+
const name = getColumnCasing(column, casing);
|
|
418
|
+
const notNull = column.notNull;
|
|
419
|
+
const primaryKey = column.primary;
|
|
420
|
+
const sqlTypeLowered = column.getSQLType().toLowerCase();
|
|
421
|
+
const typeSchema = is(column, PgEnumColumn) ? column.enum.schema || "public" : void 0;
|
|
422
|
+
const columnToSet = {
|
|
423
|
+
name,
|
|
424
|
+
type: column.getSQLType(),
|
|
425
|
+
typeSchema,
|
|
426
|
+
primaryKey,
|
|
427
|
+
notNull
|
|
428
|
+
};
|
|
429
|
+
if (column.default !== void 0) {
|
|
430
|
+
if (is(column.default, SQL)) {
|
|
431
|
+
columnToSet.default = sqlToStr(column.default, casing);
|
|
432
|
+
} else {
|
|
433
|
+
if (typeof column.default === "string") {
|
|
434
|
+
columnToSet.default = `'${column.default}'`;
|
|
435
|
+
} else {
|
|
436
|
+
if (sqlTypeLowered === "jsonb" || sqlTypeLowered === "json") {
|
|
437
|
+
columnToSet.default = `'${JSON.stringify(column.default)}'::${sqlTypeLowered}`;
|
|
438
|
+
} else if (column.default instanceof Date) {
|
|
439
|
+
if (sqlTypeLowered === "date") {
|
|
440
|
+
columnToSet.default = `'${column.default.toISOString().split("T")[0]}'`;
|
|
441
|
+
} else if (sqlTypeLowered === "timestamp") {
|
|
442
|
+
columnToSet.default = `'${column.default.toISOString().replace("T", " ").slice(0, 23)}'`;
|
|
443
|
+
} else {
|
|
444
|
+
columnToSet.default = `'${column.default.toISOString()}'`;
|
|
445
|
+
}
|
|
446
|
+
} else if (isPgArrayType(sqlTypeLowered) && Array.isArray(column.default)) {
|
|
447
|
+
columnToSet.default = `'${buildArrayString(column.default, sqlTypeLowered)}'`;
|
|
448
|
+
} else {
|
|
449
|
+
columnToSet.default = column.default;
|
|
450
|
+
}
|
|
451
|
+
}
|
|
452
|
+
}
|
|
453
|
+
}
|
|
454
|
+
columnsObject[name] = columnToSet;
|
|
455
|
+
});
|
|
456
|
+
primaryKeys.map((pk) => {
|
|
457
|
+
const originalColumnNames = pk.columns.map((c) => c.name);
|
|
458
|
+
const columnNames = pk.columns.map((c) => getColumnCasing(c, casing));
|
|
459
|
+
let name = pk.getName();
|
|
460
|
+
if (casing !== void 0) {
|
|
461
|
+
for (let i = 0; i < originalColumnNames.length; i++) {
|
|
462
|
+
name = name.replace(originalColumnNames[i], columnNames[i]);
|
|
463
|
+
}
|
|
464
|
+
}
|
|
465
|
+
primaryKeysObject[name] = {
|
|
466
|
+
name,
|
|
467
|
+
columns: columnNames
|
|
468
|
+
};
|
|
469
|
+
});
|
|
470
|
+
indexes.forEach((value) => {
|
|
471
|
+
const columns2 = value.config.columns;
|
|
472
|
+
const indexColumnNames = [];
|
|
473
|
+
columns2.forEach((it) => {
|
|
474
|
+
const name2 = getColumnCasing(it, casing);
|
|
475
|
+
indexColumnNames.push(name2);
|
|
476
|
+
});
|
|
477
|
+
const name = value.config.name ? value.config.name : indexName(tableName, indexColumnNames);
|
|
478
|
+
const indexColumns = columns2.map(
|
|
479
|
+
(it) => {
|
|
480
|
+
if (is(it, SQL)) {
|
|
481
|
+
return {
|
|
482
|
+
expression: dialect.sqlToQuery(it, "indexes").sql,
|
|
483
|
+
asc: true,
|
|
484
|
+
isExpression: true,
|
|
485
|
+
nulls: "last"
|
|
486
|
+
};
|
|
487
|
+
} else {
|
|
488
|
+
it = it;
|
|
489
|
+
return {
|
|
490
|
+
expression: getColumnCasing(it, casing),
|
|
491
|
+
isExpression: false,
|
|
492
|
+
// @ts-ignore
|
|
493
|
+
asc: it.indexConfig?.order === "asc",
|
|
494
|
+
// @ts-ignore
|
|
495
|
+
nulls: it.indexConfig?.nulls ? (
|
|
496
|
+
// @ts-ignore
|
|
497
|
+
it.indexConfig?.nulls
|
|
498
|
+
) : (
|
|
499
|
+
// @ts-ignore
|
|
500
|
+
it.indexConfig?.order === "desc" ? "first" : "last"
|
|
501
|
+
),
|
|
502
|
+
// @ts-ignore
|
|
503
|
+
opclass: it.indexConfig?.opClass
|
|
504
|
+
};
|
|
505
|
+
}
|
|
506
|
+
}
|
|
507
|
+
);
|
|
508
|
+
if (typeof indexesInSchema[schema ?? "public"] !== "undefined") {
|
|
509
|
+
indexesInSchema[schema ?? "public"].push(name);
|
|
510
|
+
} else {
|
|
511
|
+
indexesInSchema[schema ?? "public"] = [name];
|
|
512
|
+
}
|
|
513
|
+
indexesObject[name] = {
|
|
514
|
+
name,
|
|
515
|
+
columns: indexColumns,
|
|
516
|
+
isUnique: value.config.unique ?? false,
|
|
517
|
+
where: value.config.where ? dialect.sqlToQuery(value.config.where).sql : void 0,
|
|
518
|
+
concurrently: value.config.concurrently ?? false,
|
|
519
|
+
method: value.config.method ?? "btree",
|
|
520
|
+
with: value.config.with ?? {}
|
|
521
|
+
};
|
|
522
|
+
});
|
|
523
|
+
const tableKey = `${schema ?? "public"}.${tableName}`;
|
|
524
|
+
result[tableKey] = {
|
|
525
|
+
name: tableName,
|
|
526
|
+
schema: schema ?? "",
|
|
527
|
+
columns: columnsObject,
|
|
528
|
+
indexes: indexesObject,
|
|
529
|
+
compositePrimaryKeys: primaryKeysObject
|
|
530
|
+
};
|
|
531
|
+
}
|
|
532
|
+
const enumsToReturn = enums.reduce((map, obj) => {
|
|
533
|
+
const enumSchema = obj.schema || "public";
|
|
534
|
+
const key = `${enumSchema}.${obj.enumName}`;
|
|
535
|
+
map[key] = {
|
|
536
|
+
name: obj.enumName,
|
|
537
|
+
schema: enumSchema,
|
|
538
|
+
values: obj.enumValues
|
|
539
|
+
};
|
|
540
|
+
return map;
|
|
541
|
+
}, {});
|
|
542
|
+
const schemasObject = Object.fromEntries(
|
|
543
|
+
schemas.filter((it) => {
|
|
544
|
+
return it.schemaName !== "public";
|
|
545
|
+
}).map((it) => [it.schemaName, it.schemaName])
|
|
546
|
+
);
|
|
547
|
+
return {
|
|
548
|
+
version: "7",
|
|
549
|
+
dialect: "postgresql",
|
|
550
|
+
tables: result,
|
|
551
|
+
enums: enumsToReturn,
|
|
552
|
+
schemas: schemasObject
|
|
553
|
+
};
|
|
554
|
+
};
|
|
555
|
+
var mapValues = (obj, map) => {
|
|
556
|
+
const result = Object.keys(obj).reduce(
|
|
557
|
+
(result2, key) => {
|
|
558
|
+
result2[key] = map(obj[key]);
|
|
559
|
+
return result2;
|
|
560
|
+
},
|
|
561
|
+
{}
|
|
562
|
+
);
|
|
563
|
+
return result;
|
|
564
|
+
};
|
|
565
|
+
var squashPgScheme = (json) => {
|
|
566
|
+
const mappedTables = Object.fromEntries(
|
|
567
|
+
Object.entries(json.tables).map((it) => {
|
|
568
|
+
const squashedIndexes = mapValues(it[1].indexes, (index) => {
|
|
569
|
+
return PgSquasher.squashIdx(index);
|
|
570
|
+
});
|
|
571
|
+
const squashedPKs = mapValues(it[1].compositePrimaryKeys, (pk) => {
|
|
572
|
+
return PgSquasher.squashPK(pk);
|
|
573
|
+
});
|
|
574
|
+
const mappedColumns = Object.fromEntries(
|
|
575
|
+
Object.entries(it[1].columns).map((it2) => {
|
|
576
|
+
return [
|
|
577
|
+
it2[0],
|
|
578
|
+
{
|
|
579
|
+
...it2[1],
|
|
580
|
+
identity: void 0
|
|
581
|
+
}
|
|
582
|
+
];
|
|
583
|
+
})
|
|
584
|
+
);
|
|
585
|
+
return [
|
|
586
|
+
it[0],
|
|
587
|
+
{
|
|
588
|
+
name: it[1].name,
|
|
589
|
+
schema: it[1].schema,
|
|
590
|
+
columns: mappedColumns,
|
|
591
|
+
indexes: squashedIndexes,
|
|
592
|
+
compositePrimaryKeys: squashedPKs
|
|
593
|
+
}
|
|
594
|
+
];
|
|
595
|
+
})
|
|
596
|
+
);
|
|
597
|
+
return {
|
|
598
|
+
version: "7",
|
|
599
|
+
dialect: json.dialect,
|
|
600
|
+
tables: mappedTables,
|
|
601
|
+
enums: json.enums,
|
|
602
|
+
schemas: json.schemas,
|
|
603
|
+
views: json.views
|
|
604
|
+
};
|
|
605
|
+
};
|
|
606
|
+
|
|
607
|
+
// src/drizzle/index.ts
|
|
608
|
+
var getTableNames = (schema) => {
|
|
609
|
+
const tableNames = Object.entries(schema).filter(([, table]) => is2(table, PgTable2)).map(([js, table]) => {
|
|
610
|
+
const sql2 = getTableName(table);
|
|
611
|
+
return {
|
|
612
|
+
sql: sql2,
|
|
613
|
+
reorg: sqlToReorgTableName(sql2),
|
|
614
|
+
trigger: sqlToReorgTableName(sql2),
|
|
615
|
+
triggerFn: `operation_reorg__${sql2}()`,
|
|
616
|
+
js
|
|
617
|
+
};
|
|
618
|
+
});
|
|
619
|
+
return tableNames;
|
|
620
|
+
};
|
|
621
|
+
var getPrimaryKeyColumns = (table) => {
|
|
622
|
+
const primaryKeys = getTableConfig2(table).primaryKeys;
|
|
623
|
+
const findJsName = (column) => {
|
|
624
|
+
const name = column.name;
|
|
625
|
+
for (const [js, column2] of Object.entries(getTableColumns(table))) {
|
|
626
|
+
if (column2.name === name)
|
|
627
|
+
return js;
|
|
628
|
+
}
|
|
629
|
+
throw "unreachable";
|
|
630
|
+
};
|
|
631
|
+
if (primaryKeys.length > 0) {
|
|
632
|
+
return primaryKeys[0].columns.map((column) => ({
|
|
633
|
+
sql: getColumnCasing(column, "snake_case"),
|
|
634
|
+
js: findJsName(column)
|
|
635
|
+
}));
|
|
636
|
+
}
|
|
637
|
+
const pkColumn = Object.values(getTableColumns(table)).find(
|
|
638
|
+
(c) => c.primary
|
|
639
|
+
);
|
|
640
|
+
return [
|
|
641
|
+
{
|
|
642
|
+
sql: getColumnCasing(pkColumn, "snake_case"),
|
|
643
|
+
js: findJsName(pkColumn)
|
|
644
|
+
}
|
|
645
|
+
];
|
|
646
|
+
};
|
|
647
|
+
|
|
648
|
+
// src/internal/errors.ts
|
|
649
|
+
var BaseError = class _BaseError extends Error {
|
|
650
|
+
name = "BaseError";
|
|
651
|
+
meta = [];
|
|
652
|
+
constructor(message) {
|
|
653
|
+
super(message);
|
|
654
|
+
Object.setPrototypeOf(this, _BaseError.prototype);
|
|
655
|
+
}
|
|
656
|
+
};
|
|
657
|
+
function getBaseError(err) {
|
|
658
|
+
if (err instanceof BaseError)
|
|
659
|
+
return err;
|
|
660
|
+
if (err instanceof Error)
|
|
661
|
+
return new BaseError(err.message);
|
|
662
|
+
if (typeof err?.message === "string")
|
|
663
|
+
return new BaseError(err.message);
|
|
664
|
+
if (typeof err === "string")
|
|
665
|
+
return new BaseError(err);
|
|
666
|
+
return new BaseError("unknown error");
|
|
667
|
+
}
|
|
668
|
+
var BuildError = class _BuildError extends BaseError {
|
|
669
|
+
name = "BuildError";
|
|
670
|
+
constructor(message) {
|
|
671
|
+
super(message);
|
|
672
|
+
Object.setPrototypeOf(this, _BuildError.prototype);
|
|
673
|
+
}
|
|
674
|
+
};
|
|
675
|
+
var NonRetryableError = class _NonRetryableError extends BaseError {
|
|
676
|
+
name = "NonRetryableError";
|
|
677
|
+
constructor(message) {
|
|
678
|
+
super(message);
|
|
679
|
+
Object.setPrototypeOf(this, _NonRetryableError.prototype);
|
|
680
|
+
}
|
|
681
|
+
};
|
|
682
|
+
var UniqueConstraintError = class _UniqueConstraintError extends NonRetryableError {
|
|
683
|
+
name = "UniqueConstraintError";
|
|
684
|
+
constructor(message) {
|
|
685
|
+
super(message);
|
|
686
|
+
Object.setPrototypeOf(this, _UniqueConstraintError.prototype);
|
|
687
|
+
}
|
|
688
|
+
};
|
|
689
|
+
var NotNullConstraintError = class _NotNullConstraintError extends NonRetryableError {
|
|
690
|
+
name = "NotNullConstraintError";
|
|
691
|
+
constructor(message) {
|
|
692
|
+
super(message);
|
|
693
|
+
Object.setPrototypeOf(this, _NotNullConstraintError.prototype);
|
|
694
|
+
}
|
|
695
|
+
};
|
|
696
|
+
var RecordNotFoundError = class _RecordNotFoundError extends NonRetryableError {
|
|
697
|
+
name = "RecordNotFoundError";
|
|
698
|
+
constructor(message) {
|
|
699
|
+
super(message);
|
|
700
|
+
Object.setPrototypeOf(this, _RecordNotFoundError.prototype);
|
|
701
|
+
}
|
|
702
|
+
};
|
|
703
|
+
var CheckConstraintError = class _CheckConstraintError extends NonRetryableError {
|
|
704
|
+
name = "CheckConstraintError";
|
|
705
|
+
constructor(message) {
|
|
706
|
+
super(message);
|
|
707
|
+
Object.setPrototypeOf(this, _CheckConstraintError.prototype);
|
|
708
|
+
}
|
|
709
|
+
};
|
|
710
|
+
var InvalidStoreMethodError = class _InvalidStoreMethodError extends NonRetryableError {
|
|
711
|
+
name = "InvalidStoreMethodError";
|
|
712
|
+
constructor(message) {
|
|
713
|
+
super(message);
|
|
714
|
+
Object.setPrototypeOf(this, _InvalidStoreMethodError.prototype);
|
|
715
|
+
}
|
|
716
|
+
};
|
|
717
|
+
var UndefinedTableError = class _UndefinedTableError extends NonRetryableError {
|
|
718
|
+
name = "UndefinedTableError";
|
|
719
|
+
constructor(message) {
|
|
720
|
+
super(message);
|
|
721
|
+
Object.setPrototypeOf(this, _UndefinedTableError.prototype);
|
|
722
|
+
}
|
|
723
|
+
};
|
|
724
|
+
var BigIntSerializationError = class _BigIntSerializationError extends NonRetryableError {
|
|
725
|
+
name = "BigIntSerializationError";
|
|
726
|
+
constructor(message) {
|
|
727
|
+
super(message);
|
|
728
|
+
Object.setPrototypeOf(this, _BigIntSerializationError.prototype);
|
|
729
|
+
}
|
|
730
|
+
};
|
|
731
|
+
var FlushError = class _FlushError extends NonRetryableError {
|
|
732
|
+
name = "FlushError";
|
|
733
|
+
constructor(message) {
|
|
734
|
+
super(message);
|
|
735
|
+
Object.setPrototypeOf(this, _FlushError.prototype);
|
|
736
|
+
}
|
|
737
|
+
};
|
|
738
|
+
var ShutdownError = class _ShutdownError extends NonRetryableError {
|
|
739
|
+
name = "ShutdownError";
|
|
740
|
+
constructor(message) {
|
|
741
|
+
super(message);
|
|
742
|
+
Object.setPrototypeOf(this, _ShutdownError.prototype);
|
|
743
|
+
}
|
|
744
|
+
};
|
|
745
|
+
|
|
746
|
+
// src/utils/print.ts
|
|
747
|
+
function prettyPrint(args) {
|
|
748
|
+
if (args === void 0)
|
|
749
|
+
return "(undefined)";
|
|
750
|
+
const entries = Object.entries(args).map(([key, value]) => {
|
|
751
|
+
if (value === void 0)
|
|
752
|
+
return null;
|
|
753
|
+
const trimmedValue = typeof value === "string" && value.length > 80 ? value.slice(0, 80).concat("...") : value;
|
|
754
|
+
return [key, trimmedValue];
|
|
755
|
+
}).filter(Boolean);
|
|
756
|
+
if (entries.length === 0)
|
|
757
|
+
return " (empty object)";
|
|
758
|
+
const maxLength = entries.reduce(
|
|
759
|
+
(acc, [key]) => Math.max(acc, key.length),
|
|
760
|
+
0
|
|
761
|
+
);
|
|
762
|
+
return entries.map(([key, value]) => ` ${`${key}`.padEnd(maxLength + 1)} ${value}`).join("\n");
|
|
763
|
+
}
|
|
764
|
+
|
|
765
|
+
// src/indexing-store/historical.ts
|
|
766
|
+
import {
|
|
767
|
+
and,
|
|
768
|
+
eq,
|
|
769
|
+
getTableColumns as getTableColumns2,
|
|
770
|
+
getTableName as getTableName2,
|
|
771
|
+
sql
|
|
772
|
+
} from "drizzle-orm";
|
|
773
|
+
import { getTableConfig as getTableConfig3 } from "drizzle-orm/pg-core";
|
|
774
|
+
import { drizzle } from "drizzle-orm/pg-proxy";
|
|
775
|
+
|
|
776
|
+
// src/indexing-store/index.ts
|
|
777
|
+
var parseSqlError = (e) => {
|
|
778
|
+
let error = getBaseError(e);
|
|
779
|
+
if (error?.message?.includes("violates not-null constraint")) {
|
|
780
|
+
error = new NotNullConstraintError(error.message);
|
|
781
|
+
} else if (error?.message?.includes("violates unique constraint")) {
|
|
782
|
+
error = new UniqueConstraintError(error.message);
|
|
783
|
+
} else if (error?.message.includes("violates check constraint")) {
|
|
784
|
+
error = new CheckConstraintError(error.message);
|
|
785
|
+
} else if (error?.message?.includes("Do not know how to serialize a BigInt")) {
|
|
786
|
+
error = new BigIntSerializationError(error.message);
|
|
787
|
+
error.meta.push(
|
|
788
|
+
"Hint:\n The JSON column type does not support BigInt values. Use the replaceBigInts() helper function before inserting into the database. Docs: https://ponder.sh/docs/utilities/replace-bigints"
|
|
789
|
+
);
|
|
790
|
+
}
|
|
791
|
+
return error;
|
|
792
|
+
};
|
|
793
|
+
|
|
794
|
+
// src/indexing-store/historical.ts
|
|
795
|
+
var checkOnchainTable = (table, method) => {
|
|
796
|
+
if (table === void 0)
|
|
797
|
+
throw new UndefinedTableError(
|
|
798
|
+
`Table object passed to db.${method}() is undefined`
|
|
799
|
+
);
|
|
800
|
+
if (onchain in table)
|
|
801
|
+
return;
|
|
802
|
+
throw new InvalidStoreMethodError(
|
|
803
|
+
method === "find" ? `db.find() can only be used with onchain tables, and '${getTableConfig3(table).name}' is an offchain table.` : `Indexing functions can only write to onchain tables, and '${getTableConfig3(table).name}' is an offchain table.`
|
|
804
|
+
);
|
|
805
|
+
};
|
|
806
|
+
var hasEmptyValue = (column) => {
|
|
807
|
+
return column.hasDefault;
|
|
808
|
+
};
|
|
809
|
+
var getEmptyValue = (column, type) => {
|
|
810
|
+
if (type === 1 /* UPDATE */ && column.onUpdateFn) {
|
|
811
|
+
return column.onUpdateFn();
|
|
812
|
+
}
|
|
813
|
+
if (column.default !== void 0)
|
|
814
|
+
return column.default;
|
|
815
|
+
if (column.defaultFn !== void 0)
|
|
816
|
+
return column.defaultFn();
|
|
817
|
+
if (column.onUpdateFn !== void 0)
|
|
818
|
+
return column.onUpdateFn();
|
|
819
|
+
return void 0;
|
|
820
|
+
};
|
|
821
|
+
var normalizeColumn = (column, value, type) => {
|
|
822
|
+
if (value === void 0) {
|
|
823
|
+
if (hasEmptyValue(column))
|
|
824
|
+
return getEmptyValue(column, type);
|
|
825
|
+
return null;
|
|
826
|
+
}
|
|
827
|
+
if (column.mapToDriverValue === void 0)
|
|
828
|
+
return value;
|
|
829
|
+
try {
|
|
830
|
+
return column.mapFromDriverValue(column.mapToDriverValue(value));
|
|
831
|
+
} catch (e) {
|
|
832
|
+
if (e?.message?.includes("Do not know how to serialize a BigInt")) {
|
|
833
|
+
const error = new BigIntSerializationError(e.message);
|
|
834
|
+
error.meta.push(
|
|
835
|
+
"Hint:\n The JSON column type does not support BigInt values. Use the replaceBigInts() helper function before inserting into the database. Docs: https://ponder.sh/docs/utilities/replace-bigints"
|
|
836
|
+
);
|
|
837
|
+
throw error;
|
|
838
|
+
}
|
|
839
|
+
}
|
|
840
|
+
};
|
|
841
|
+
var createHistoricalIndexingStore = ({
|
|
842
|
+
common,
|
|
843
|
+
schemaBuild: { schema },
|
|
844
|
+
database,
|
|
845
|
+
isDatabaseEmpty
|
|
846
|
+
}) => {
|
|
847
|
+
const queue = createQueue({
|
|
848
|
+
browser: false,
|
|
849
|
+
initialStart: true,
|
|
850
|
+
concurrency: 1,
|
|
851
|
+
worker: (fn) => {
|
|
852
|
+
return fn();
|
|
853
|
+
}
|
|
854
|
+
});
|
|
855
|
+
const primaryKeysCache = /* @__PURE__ */ new Map();
|
|
856
|
+
const cache = /* @__PURE__ */ new Map();
|
|
857
|
+
for (const tableName of getTableNames(schema)) {
|
|
858
|
+
primaryKeysCache.set(
|
|
859
|
+
schema[tableName.js],
|
|
860
|
+
getPrimaryKeyColumns(schema[tableName.js])
|
|
861
|
+
);
|
|
862
|
+
cache.set(schema[tableName.js], /* @__PURE__ */ new Map());
|
|
863
|
+
}
|
|
864
|
+
const getCacheKey = (table, row) => {
|
|
865
|
+
const primaryKeys = primaryKeysCache.get(table);
|
|
866
|
+
return primaryKeys.map((pk) => normalizeColumn(table[pk.js], row[pk.js])).join("_");
|
|
867
|
+
};
|
|
868
|
+
const getCacheEntry = (table, row) => {
|
|
869
|
+
return cache.get(table).get(getCacheKey(table, row));
|
|
870
|
+
};
|
|
871
|
+
const setCacheEntry = (table, userRow, entryType, existingRow) => {
|
|
872
|
+
let row = structuredClone(userRow);
|
|
873
|
+
if (existingRow) {
|
|
874
|
+
for (const [key, value] of Object.entries(row)) {
|
|
875
|
+
existingRow[key] = value;
|
|
876
|
+
}
|
|
877
|
+
existingRow = normalizeRow(table, existingRow, entryType);
|
|
878
|
+
const bytes = getBytes(existingRow);
|
|
879
|
+
cacheBytes += bytes;
|
|
880
|
+
cache.get(table).set(getCacheKey(table, existingRow), {
|
|
881
|
+
type: entryType,
|
|
882
|
+
row: existingRow,
|
|
883
|
+
operationIndex: totalCacheOps++,
|
|
884
|
+
bytes
|
|
885
|
+
});
|
|
886
|
+
return structuredClone(existingRow);
|
|
887
|
+
} else {
|
|
888
|
+
row = normalizeRow(table, row, entryType);
|
|
889
|
+
const bytes = getBytes(row);
|
|
890
|
+
cacheBytes += bytes;
|
|
891
|
+
cache.get(table).set(getCacheKey(table, row), {
|
|
892
|
+
type: entryType,
|
|
893
|
+
bytes,
|
|
894
|
+
operationIndex: totalCacheOps++,
|
|
895
|
+
row
|
|
896
|
+
});
|
|
897
|
+
return structuredClone(row);
|
|
898
|
+
}
|
|
899
|
+
};
|
|
900
|
+
const deleteCacheEntry = (table, row) => {
|
|
901
|
+
const entry = getCacheEntry(table, row);
|
|
902
|
+
if (entry) {
|
|
903
|
+
cacheBytes -= entry.bytes;
|
|
904
|
+
}
|
|
905
|
+
return cache.get(table).delete(getCacheKey(table, row));
|
|
906
|
+
};
|
|
907
|
+
const normalizeRow = (table, row, type) => {
|
|
908
|
+
for (const [columnName, column] of Object.entries(getTableColumns2(table))) {
|
|
909
|
+
if (type === 0 /* INSERT */ && (row[columnName] === void 0 || row[columnName] === null) && column.notNull && hasEmptyValue(column) === false) {
|
|
910
|
+
const error = new NotNullConstraintError(
|
|
911
|
+
`Column '${getTableName2(table)}.${columnName}' violates not-null constraint.`
|
|
912
|
+
);
|
|
913
|
+
error.meta.push(
|
|
914
|
+
`db.${type === 0 /* INSERT */ ? "insert" : "update"} arguments:
|
|
915
|
+
${prettyPrint(row)}`
|
|
916
|
+
);
|
|
917
|
+
throw error;
|
|
918
|
+
}
|
|
919
|
+
row[columnName] = normalizeColumn(column, row[columnName], type);
|
|
920
|
+
}
|
|
921
|
+
return row;
|
|
922
|
+
};
|
|
923
|
+
const getBytes = (value) => {
|
|
924
|
+
let size = 13;
|
|
925
|
+
if (typeof value === "number") {
|
|
926
|
+
size += 8;
|
|
927
|
+
} else if (typeof value === "string") {
|
|
928
|
+
size += 2 * value.length;
|
|
929
|
+
} else if (typeof value === "boolean") {
|
|
930
|
+
size += 4;
|
|
931
|
+
} else if (typeof value === "bigint") {
|
|
932
|
+
size += 48;
|
|
933
|
+
} else if (value === null || value === void 0) {
|
|
934
|
+
size += 8;
|
|
935
|
+
} else if (Array.isArray(value)) {
|
|
936
|
+
for (const e of value) {
|
|
937
|
+
size += getBytes(e);
|
|
938
|
+
}
|
|
939
|
+
} else {
|
|
940
|
+
for (const col of Object.values(value)) {
|
|
941
|
+
size += getBytes(col);
|
|
942
|
+
}
|
|
943
|
+
}
|
|
944
|
+
return size;
|
|
945
|
+
};
|
|
946
|
+
let cacheBytes = 0;
|
|
947
|
+
let totalCacheOps = 0;
|
|
948
|
+
const maxBytes = common.options.indexingCacheMaxBytes;
|
|
949
|
+
common.logger.debug({
|
|
950
|
+
service: "indexing",
|
|
951
|
+
msg: `Using a ${Math.round(maxBytes / (1024 * 1024))} MB indexing cache`
|
|
952
|
+
});
|
|
953
|
+
const getWhereCondition = (table, key) => {
|
|
954
|
+
primaryKeysCache.get(table);
|
|
955
|
+
const conditions = [];
|
|
956
|
+
for (const { js } of primaryKeysCache.get(table)) {
|
|
957
|
+
conditions.push(eq(table[js], key[js]));
|
|
958
|
+
}
|
|
959
|
+
return and(...conditions);
|
|
960
|
+
};
|
|
961
|
+
const find = (table, key) => {
|
|
962
|
+
return database.qb.drizzle.select().from(table).where(getWhereCondition(table, key)).then((res) => res.length === 0 ? null : res[0]);
|
|
963
|
+
};
|
|
964
|
+
const indexingStore = {
|
|
965
|
+
// @ts-ignore
|
|
966
|
+
find: (table, key) => queue.add(
|
|
967
|
+
() => database.wrap(
|
|
968
|
+
{ method: `${getTableName2(table) ?? "unknown"}.find()` },
|
|
969
|
+
async () => {
|
|
970
|
+
checkOnchainTable(table, "find");
|
|
971
|
+
const entry = getCacheEntry(table, key);
|
|
972
|
+
if (entry) {
|
|
973
|
+
getCacheEntry(table, key).operationIndex = totalCacheOps++;
|
|
974
|
+
return entry.row;
|
|
975
|
+
} else {
|
|
976
|
+
if (isDatabaseEmpty)
|
|
977
|
+
return null;
|
|
978
|
+
const row = await find(table, key);
|
|
979
|
+
const bytes = getBytes(row);
|
|
980
|
+
cacheBytes += bytes;
|
|
981
|
+
cache.get(table).set(getCacheKey(table, key), {
|
|
982
|
+
type: 2 /* FIND */,
|
|
983
|
+
bytes,
|
|
984
|
+
operationIndex: totalCacheOps++,
|
|
985
|
+
row
|
|
986
|
+
});
|
|
987
|
+
return find(table, key);
|
|
988
|
+
}
|
|
989
|
+
}
|
|
990
|
+
)
|
|
991
|
+
),
|
|
992
|
+
// @ts-ignore
|
|
993
|
+
insert(table) {
|
|
994
|
+
return {
|
|
995
|
+
values: (values) => {
|
|
996
|
+
const inner = {
|
|
997
|
+
onConflictDoNothing: () => queue.add(
|
|
998
|
+
() => database.wrap(
|
|
999
|
+
{
|
|
1000
|
+
method: `${getTableName2(table) ?? "unknown"}.insert()`
|
|
1001
|
+
},
|
|
1002
|
+
async () => {
|
|
1003
|
+
checkOnchainTable(table, "insert");
|
|
1004
|
+
if (Array.isArray(values)) {
|
|
1005
|
+
const rows = [];
|
|
1006
|
+
for (const value of values) {
|
|
1007
|
+
const entry = getCacheEntry(table, value);
|
|
1008
|
+
let row;
|
|
1009
|
+
if (entry?.row) {
|
|
1010
|
+
row = entry.row;
|
|
1011
|
+
} else {
|
|
1012
|
+
if (isDatabaseEmpty)
|
|
1013
|
+
row = null;
|
|
1014
|
+
else
|
|
1015
|
+
row = await find(table, value);
|
|
1016
|
+
}
|
|
1017
|
+
if (row === null) {
|
|
1018
|
+
rows.push(
|
|
1019
|
+
setCacheEntry(table, value, 0 /* INSERT */)
|
|
1020
|
+
);
|
|
1021
|
+
} else {
|
|
1022
|
+
rows.push(null);
|
|
1023
|
+
}
|
|
1024
|
+
}
|
|
1025
|
+
return rows;
|
|
1026
|
+
} else {
|
|
1027
|
+
const entry = getCacheEntry(table, values);
|
|
1028
|
+
let row;
|
|
1029
|
+
if (entry?.row) {
|
|
1030
|
+
row = entry.row;
|
|
1031
|
+
} else {
|
|
1032
|
+
if (isDatabaseEmpty)
|
|
1033
|
+
row = null;
|
|
1034
|
+
else
|
|
1035
|
+
row = await find(table, values);
|
|
1036
|
+
}
|
|
1037
|
+
if (row === null) {
|
|
1038
|
+
return setCacheEntry(table, values, 0 /* INSERT */);
|
|
1039
|
+
}
|
|
1040
|
+
return null;
|
|
1041
|
+
}
|
|
1042
|
+
}
|
|
1043
|
+
)
|
|
1044
|
+
),
|
|
1045
|
+
onConflictDoUpdate: (valuesU) => queue.add(
|
|
1046
|
+
() => database.wrap(
|
|
1047
|
+
{
|
|
1048
|
+
method: `${getTableName2(table) ?? "unknown"}.insert()`
|
|
1049
|
+
},
|
|
1050
|
+
async () => {
|
|
1051
|
+
checkOnchainTable(table, "insert");
|
|
1052
|
+
if (Array.isArray(values)) {
|
|
1053
|
+
const rows = [];
|
|
1054
|
+
for (const value of values) {
|
|
1055
|
+
const entry = getCacheEntry(table, value);
|
|
1056
|
+
deleteCacheEntry(table, value);
|
|
1057
|
+
let row;
|
|
1058
|
+
if (entry?.row) {
|
|
1059
|
+
row = entry.row;
|
|
1060
|
+
} else {
|
|
1061
|
+
if (isDatabaseEmpty)
|
|
1062
|
+
row = null;
|
|
1063
|
+
else
|
|
1064
|
+
row = await find(table, value);
|
|
1065
|
+
}
|
|
1066
|
+
if (row === null) {
|
|
1067
|
+
rows.push(
|
|
1068
|
+
setCacheEntry(table, value, 0 /* INSERT */)
|
|
1069
|
+
);
|
|
1070
|
+
} else {
|
|
1071
|
+
if (typeof valuesU === "function") {
|
|
1072
|
+
rows.push(
|
|
1073
|
+
setCacheEntry(
|
|
1074
|
+
table,
|
|
1075
|
+
valuesU(row),
|
|
1076
|
+
entry?.type === 0 /* INSERT */ ? 0 /* INSERT */ : 1 /* UPDATE */,
|
|
1077
|
+
row
|
|
1078
|
+
)
|
|
1079
|
+
);
|
|
1080
|
+
} else {
|
|
1081
|
+
rows.push(
|
|
1082
|
+
setCacheEntry(
|
|
1083
|
+
table,
|
|
1084
|
+
valuesU,
|
|
1085
|
+
entry?.type === 0 /* INSERT */ ? 0 /* INSERT */ : 1 /* UPDATE */,
|
|
1086
|
+
row
|
|
1087
|
+
)
|
|
1088
|
+
);
|
|
1089
|
+
}
|
|
1090
|
+
}
|
|
1091
|
+
}
|
|
1092
|
+
return rows;
|
|
1093
|
+
} else {
|
|
1094
|
+
const entry = getCacheEntry(table, values);
|
|
1095
|
+
deleteCacheEntry(table, values);
|
|
1096
|
+
let row;
|
|
1097
|
+
if (entry?.row) {
|
|
1098
|
+
row = entry.row;
|
|
1099
|
+
} else {
|
|
1100
|
+
if (isDatabaseEmpty)
|
|
1101
|
+
row = null;
|
|
1102
|
+
else
|
|
1103
|
+
row = await find(table, values);
|
|
1104
|
+
}
|
|
1105
|
+
if (row === null) {
|
|
1106
|
+
return setCacheEntry(table, values, 0 /* INSERT */);
|
|
1107
|
+
} else {
|
|
1108
|
+
if (typeof valuesU === "function") {
|
|
1109
|
+
return setCacheEntry(
|
|
1110
|
+
table,
|
|
1111
|
+
valuesU(row),
|
|
1112
|
+
entry?.type === 0 /* INSERT */ ? 0 /* INSERT */ : 1 /* UPDATE */,
|
|
1113
|
+
row
|
|
1114
|
+
);
|
|
1115
|
+
} else {
|
|
1116
|
+
return setCacheEntry(
|
|
1117
|
+
table,
|
|
1118
|
+
valuesU,
|
|
1119
|
+
entry?.type === 0 /* INSERT */ ? 0 /* INSERT */ : 1 /* UPDATE */,
|
|
1120
|
+
row
|
|
1121
|
+
);
|
|
1122
|
+
}
|
|
1123
|
+
}
|
|
1124
|
+
}
|
|
1125
|
+
}
|
|
1126
|
+
)
|
|
1127
|
+
),
|
|
1128
|
+
// biome-ignore lint/suspicious/noThenProperty: <explanation>
|
|
1129
|
+
then: (onFulfilled, onRejected) => queue.add(
|
|
1130
|
+
() => database.wrap(
|
|
1131
|
+
{
|
|
1132
|
+
method: `${getTableName2(table) ?? "unknown"}.insert()`
|
|
1133
|
+
},
|
|
1134
|
+
async () => {
|
|
1135
|
+
checkOnchainTable(table, "insert");
|
|
1136
|
+
if (Array.isArray(values)) {
|
|
1137
|
+
const rows = [];
|
|
1138
|
+
for (const value of values) {
|
|
1139
|
+
if (getCacheEntry(table, value)?.row) {
|
|
1140
|
+
const error = new UniqueConstraintError(
|
|
1141
|
+
`Unique constraint failed for '${getTableName2(table)}'.`
|
|
1142
|
+
);
|
|
1143
|
+
error.meta.push(
|
|
1144
|
+
`db.insert arguments:
|
|
1145
|
+
${prettyPrint(value)}`
|
|
1146
|
+
);
|
|
1147
|
+
throw error;
|
|
1148
|
+
} else if (isDatabaseEmpty === false) {
|
|
1149
|
+
const findResult = await find(table, value);
|
|
1150
|
+
if (findResult) {
|
|
1151
|
+
const error = new UniqueConstraintError(
|
|
1152
|
+
`Unique constraint failed for '${getTableName2(table)}'.`
|
|
1153
|
+
);
|
|
1154
|
+
error.meta.push(
|
|
1155
|
+
`db.insert arguments:
|
|
1156
|
+
${prettyPrint(value)}`
|
|
1157
|
+
);
|
|
1158
|
+
throw error;
|
|
1159
|
+
}
|
|
1160
|
+
}
|
|
1161
|
+
rows.push(
|
|
1162
|
+
setCacheEntry(table, value, 0 /* INSERT */)
|
|
1163
|
+
);
|
|
1164
|
+
}
|
|
1165
|
+
return rows;
|
|
1166
|
+
} else {
|
|
1167
|
+
if (getCacheEntry(table, values)?.row) {
|
|
1168
|
+
const error = new UniqueConstraintError(
|
|
1169
|
+
`Unique constraint failed for '${getTableName2(table)}'.`
|
|
1170
|
+
);
|
|
1171
|
+
error.meta.push(
|
|
1172
|
+
`db.insert arguments:
|
|
1173
|
+
${prettyPrint(values)}`
|
|
1174
|
+
);
|
|
1175
|
+
throw error;
|
|
1176
|
+
} else if (isDatabaseEmpty === false) {
|
|
1177
|
+
const findResult = await find(table, values);
|
|
1178
|
+
if (findResult) {
|
|
1179
|
+
const error = new UniqueConstraintError(
|
|
1180
|
+
`Unique constraint failed for '${getTableName2(table)}'.`
|
|
1181
|
+
);
|
|
1182
|
+
error.meta.push(
|
|
1183
|
+
`db.insert arguments:
|
|
1184
|
+
${prettyPrint(values)}`
|
|
1185
|
+
);
|
|
1186
|
+
throw error;
|
|
1187
|
+
}
|
|
1188
|
+
}
|
|
1189
|
+
return setCacheEntry(table, values, 0 /* INSERT */);
|
|
1190
|
+
}
|
|
1191
|
+
}
|
|
1192
|
+
)
|
|
1193
|
+
).then(onFulfilled, onRejected),
|
|
1194
|
+
catch: (onRejected) => inner.then(void 0, onRejected),
|
|
1195
|
+
finally: (onFinally) => inner.then(
|
|
1196
|
+
(value) => {
|
|
1197
|
+
onFinally?.();
|
|
1198
|
+
return value;
|
|
1199
|
+
},
|
|
1200
|
+
(reason) => {
|
|
1201
|
+
onFinally?.();
|
|
1202
|
+
throw reason;
|
|
1203
|
+
}
|
|
1204
|
+
)
|
|
1205
|
+
// @ts-ignore
|
|
1206
|
+
};
|
|
1207
|
+
return inner;
|
|
1208
|
+
}
|
|
1209
|
+
};
|
|
1210
|
+
},
|
|
1211
|
+
// @ts-ignore
|
|
1212
|
+
update(table, key) {
|
|
1213
|
+
return {
|
|
1214
|
+
set: (values) => queue.add(
|
|
1215
|
+
() => database.wrap(
|
|
1216
|
+
{ method: `${getTableName2(table) ?? "unknown"}.update()` },
|
|
1217
|
+
async () => {
|
|
1218
|
+
checkOnchainTable(table, "update");
|
|
1219
|
+
const entry = getCacheEntry(table, key);
|
|
1220
|
+
deleteCacheEntry(table, key);
|
|
1221
|
+
let row;
|
|
1222
|
+
if (entry?.row) {
|
|
1223
|
+
row = entry.row;
|
|
1224
|
+
} else {
|
|
1225
|
+
if (isDatabaseEmpty) {
|
|
1226
|
+
const error = new RecordNotFoundError(
|
|
1227
|
+
`No existing record found in table '${getTableName2(table)}'`
|
|
1228
|
+
);
|
|
1229
|
+
error.meta.push(
|
|
1230
|
+
`db.update arguments:
|
|
1231
|
+
${prettyPrint(key)}`
|
|
1232
|
+
);
|
|
1233
|
+
throw error;
|
|
1234
|
+
}
|
|
1235
|
+
const findResult = await find(table, key);
|
|
1236
|
+
if (findResult) {
|
|
1237
|
+
row = findResult;
|
|
1238
|
+
} else {
|
|
1239
|
+
const error = new RecordNotFoundError(
|
|
1240
|
+
`No existing record found in table '${getTableName2(table)}'`
|
|
1241
|
+
);
|
|
1242
|
+
error.meta.push(
|
|
1243
|
+
`db.update arguments:
|
|
1244
|
+
${prettyPrint(key)}`
|
|
1245
|
+
);
|
|
1246
|
+
throw error;
|
|
1247
|
+
}
|
|
1248
|
+
}
|
|
1249
|
+
if (typeof values === "function") {
|
|
1250
|
+
return setCacheEntry(
|
|
1251
|
+
table,
|
|
1252
|
+
values(row),
|
|
1253
|
+
entry?.type === 0 /* INSERT */ ? 0 /* INSERT */ : 1 /* UPDATE */,
|
|
1254
|
+
row
|
|
1255
|
+
);
|
|
1256
|
+
} else {
|
|
1257
|
+
return setCacheEntry(
|
|
1258
|
+
table,
|
|
1259
|
+
values,
|
|
1260
|
+
entry?.type === 0 /* INSERT */ ? 0 /* INSERT */ : 1 /* UPDATE */,
|
|
1261
|
+
row
|
|
1262
|
+
);
|
|
1263
|
+
}
|
|
1264
|
+
}
|
|
1265
|
+
)
|
|
1266
|
+
)
|
|
1267
|
+
};
|
|
1268
|
+
},
|
|
1269
|
+
// @ts-ignore
|
|
1270
|
+
delete: (table, key) => queue.add(
|
|
1271
|
+
() => database.wrap(
|
|
1272
|
+
{ method: `${getTableName2(table) ?? "unknown"}.delete()` },
|
|
1273
|
+
async () => {
|
|
1274
|
+
checkOnchainTable(table, "delete");
|
|
1275
|
+
const entry = getCacheEntry(table, key);
|
|
1276
|
+
deleteCacheEntry(table, key);
|
|
1277
|
+
if (entry?.row) {
|
|
1278
|
+
if (entry.type === 0 /* INSERT */) {
|
|
1279
|
+
return true;
|
|
1280
|
+
}
|
|
1281
|
+
await database.qb.drizzle.delete(table).where(getWhereCondition(table, key));
|
|
1282
|
+
return true;
|
|
1283
|
+
} else {
|
|
1284
|
+
if (isDatabaseEmpty) {
|
|
1285
|
+
return false;
|
|
1286
|
+
}
|
|
1287
|
+
const deleteResult = await database.qb.drizzle.delete(table).where(getWhereCondition(table, key)).returning();
|
|
1288
|
+
return deleteResult.length > 0;
|
|
1289
|
+
}
|
|
1290
|
+
}
|
|
1291
|
+
)
|
|
1292
|
+
),
|
|
1293
|
+
// @ts-ignore
|
|
1294
|
+
sql: drizzle(
|
|
1295
|
+
async (_sql, params, method, typings) => {
|
|
1296
|
+
await database.createTriggers();
|
|
1297
|
+
await indexingStore.flush();
|
|
1298
|
+
await database.removeTriggers();
|
|
1299
|
+
isDatabaseEmpty = false;
|
|
1300
|
+
const query = { sql: _sql, params, typings };
|
|
1301
|
+
const res = await database.wrap({ method: "sql" }, async () => {
|
|
1302
|
+
try {
|
|
1303
|
+
return await database.qb.drizzle._.session.prepareQuery(query, void 0, void 0, method === "all").execute();
|
|
1304
|
+
} catch (e) {
|
|
1305
|
+
throw parseSqlError(e);
|
|
1306
|
+
}
|
|
1307
|
+
});
|
|
1308
|
+
return { rows: res.rows.map((row) => Object.values(row)) };
|
|
1309
|
+
},
|
|
1310
|
+
{ schema, casing: "snake_case" }
|
|
1311
|
+
),
|
|
1312
|
+
async flush() {
|
|
1313
|
+
await queue.add(async () => {
|
|
1314
|
+
let cacheSize = 0;
|
|
1315
|
+
for (const c of cache.values())
|
|
1316
|
+
cacheSize += c.size;
|
|
1317
|
+
const flushIndex = totalCacheOps - cacheSize * (1 - common.options.indexingCacheFlushRatio);
|
|
1318
|
+
const shouldDelete = cacheBytes > maxBytes;
|
|
1319
|
+
if (shouldDelete)
|
|
1320
|
+
isDatabaseEmpty = false;
|
|
1321
|
+
const promises = [];
|
|
1322
|
+
for (const [table, tableCache] of cache) {
|
|
1323
|
+
const batchSize = Math.round(
|
|
1324
|
+
common.options.databaseMaxQueryParameters / Object.keys(getTableColumns2(table)).length
|
|
1325
|
+
);
|
|
1326
|
+
const insertValues = [];
|
|
1327
|
+
const updateValues = [];
|
|
1328
|
+
for (const [key, entry] of tableCache) {
|
|
1329
|
+
if (entry.type === 0 /* INSERT */) {
|
|
1330
|
+
insertValues.push(entry.row);
|
|
1331
|
+
}
|
|
1332
|
+
if (entry.type === 1 /* UPDATE */) {
|
|
1333
|
+
updateValues.push(entry.row);
|
|
1334
|
+
}
|
|
1335
|
+
if (shouldDelete && entry.operationIndex < flushIndex) {
|
|
1336
|
+
tableCache.delete(key);
|
|
1337
|
+
cacheBytes -= entry.bytes;
|
|
1338
|
+
}
|
|
1339
|
+
entry.type = 2 /* FIND */;
|
|
1340
|
+
}
|
|
1341
|
+
if (insertValues.length > 0) {
|
|
1342
|
+
common.logger.debug({
|
|
1343
|
+
service: "indexing",
|
|
1344
|
+
msg: `Inserting ${insertValues.length} cached '${getTableName2(table)}' rows into the database`
|
|
1345
|
+
});
|
|
1346
|
+
while (insertValues.length > 0) {
|
|
1347
|
+
const values = insertValues.splice(0, batchSize);
|
|
1348
|
+
promises.push(
|
|
1349
|
+
database.wrap(
|
|
1350
|
+
{ method: `${getTableName2(table)}.flush()` },
|
|
1351
|
+
async () => {
|
|
1352
|
+
await database.qb.drizzle.insert(table).values(values).catch((_error) => {
|
|
1353
|
+
const error = _error;
|
|
1354
|
+
common.logger.error({
|
|
1355
|
+
service: "indexing",
|
|
1356
|
+
msg: "Internal error occurred while flushing cache. Please report this error here: https://github.com/ponder-sh/ponder/issues"
|
|
1357
|
+
});
|
|
1358
|
+
throw new FlushError(error.message);
|
|
1359
|
+
});
|
|
1360
|
+
}
|
|
1361
|
+
)
|
|
1362
|
+
);
|
|
1363
|
+
}
|
|
1364
|
+
}
|
|
1365
|
+
if (updateValues.length > 0) {
|
|
1366
|
+
common.logger.debug({
|
|
1367
|
+
service: "indexing",
|
|
1368
|
+
msg: `Updating ${updateValues.length} cached '${getTableName2(table)}' rows in the database`
|
|
1369
|
+
});
|
|
1370
|
+
const primaryKeys = primaryKeysCache.get(table);
|
|
1371
|
+
const set = {};
|
|
1372
|
+
for (const [columnName, column] of Object.entries(
|
|
1373
|
+
getTableColumns2(table)
|
|
1374
|
+
)) {
|
|
1375
|
+
set[columnName] = sql.raw(
|
|
1376
|
+
`excluded."${getColumnCasing(column, "snake_case")}"`
|
|
1377
|
+
);
|
|
1378
|
+
}
|
|
1379
|
+
while (updateValues.length > 0) {
|
|
1380
|
+
const values = updateValues.splice(0, batchSize);
|
|
1381
|
+
promises.push(
|
|
1382
|
+
database.wrap(
|
|
1383
|
+
{
|
|
1384
|
+
method: `${getTableName2(table)}.flush()`
|
|
1385
|
+
},
|
|
1386
|
+
async () => {
|
|
1387
|
+
await database.qb.drizzle.insert(table).values(values).onConflictDoUpdate({
|
|
1388
|
+
// @ts-ignore
|
|
1389
|
+
target: primaryKeys.map(({ js }) => table[js]),
|
|
1390
|
+
set
|
|
1391
|
+
}).catch((_error) => {
|
|
1392
|
+
const error = _error;
|
|
1393
|
+
common.logger.error({
|
|
1394
|
+
service: "indexing",
|
|
1395
|
+
msg: "Internal error occurred while flushing cache. Please report this error here: https://github.com/ponder-sh/ponder/issues"
|
|
1396
|
+
});
|
|
1397
|
+
throw new FlushError(error.message);
|
|
1398
|
+
});
|
|
1399
|
+
}
|
|
1400
|
+
)
|
|
1401
|
+
);
|
|
1402
|
+
}
|
|
1403
|
+
}
|
|
1404
|
+
}
|
|
1405
|
+
await Promise.all(promises);
|
|
1406
|
+
});
|
|
1407
|
+
},
|
|
1408
|
+
isCacheFull() {
|
|
1409
|
+
return cacheBytes > maxBytes;
|
|
1410
|
+
}
|
|
1411
|
+
};
|
|
1412
|
+
return indexingStore;
|
|
1413
|
+
};
|
|
1414
|
+
|
|
1415
|
+
// src/indexing-store/realtime.ts
|
|
1416
|
+
import {
|
|
1417
|
+
and as and2,
|
|
1418
|
+
eq as eq2,
|
|
1419
|
+
getTableName as getTableName3
|
|
1420
|
+
} from "drizzle-orm";
|
|
1421
|
+
import { getTableConfig as getTableConfig4 } from "drizzle-orm/pg-core";
|
|
1422
|
+
import { drizzle as drizzle2 } from "drizzle-orm/pg-proxy";
|
|
1423
|
+
var checkOnchainTable2 = (table, method) => {
|
|
1424
|
+
if (table === void 0)
|
|
1425
|
+
throw new UndefinedTableError(
|
|
1426
|
+
`Table object passed to db.${method}() is undefined`
|
|
1427
|
+
);
|
|
1428
|
+
if (onchain in table)
|
|
1429
|
+
return;
|
|
1430
|
+
throw new InvalidStoreMethodError(
|
|
1431
|
+
method === "find" ? `db.find() can only be used with onchain tables, and '${getTableConfig4(table).name}' is an offchain table.` : `Indexing functions can only write to onchain tables, and '${getTableConfig4(table).name}' is an offchain table.`
|
|
1432
|
+
);
|
|
1433
|
+
};
|
|
1434
|
+
var createRealtimeIndexingStore = ({
|
|
1435
|
+
schemaBuild: { schema },
|
|
1436
|
+
database
|
|
1437
|
+
}) => {
|
|
1438
|
+
const queue = createQueue({
|
|
1439
|
+
browser: false,
|
|
1440
|
+
initialStart: true,
|
|
1441
|
+
concurrency: 1,
|
|
1442
|
+
worker: (fn) => {
|
|
1443
|
+
return fn();
|
|
1444
|
+
}
|
|
1445
|
+
});
|
|
1446
|
+
const primaryKeysCache = /* @__PURE__ */ new Map();
|
|
1447
|
+
for (const tableName of getTableNames(schema)) {
|
|
1448
|
+
primaryKeysCache.set(
|
|
1449
|
+
schema[tableName.js],
|
|
1450
|
+
getPrimaryKeyColumns(schema[tableName.js])
|
|
1451
|
+
);
|
|
1452
|
+
}
|
|
1453
|
+
const getCacheKey = (table, row) => {
|
|
1454
|
+
const primaryKeys = primaryKeysCache.get(table);
|
|
1455
|
+
return primaryKeys.map((pk) => normalizeColumn(table[pk.js], row[pk.js])).join("_");
|
|
1456
|
+
};
|
|
1457
|
+
const getWhereCondition = (table, key) => {
|
|
1458
|
+
primaryKeysCache.get(table);
|
|
1459
|
+
const conditions = [];
|
|
1460
|
+
for (const { js } of primaryKeysCache.get(table)) {
|
|
1461
|
+
conditions.push(eq2(table[js], key[js]));
|
|
1462
|
+
}
|
|
1463
|
+
return and2(...conditions);
|
|
1464
|
+
};
|
|
1465
|
+
const find = (table, key) => {
|
|
1466
|
+
return database.qb.drizzle.select().from(table).where(getWhereCondition(table, key)).then((res) => res.length === 0 ? null : res[0]);
|
|
1467
|
+
};
|
|
1468
|
+
const indexingStore = {
|
|
1469
|
+
// @ts-ignore
|
|
1470
|
+
find: (table, key) => queue.add(
|
|
1471
|
+
() => database.wrap(
|
|
1472
|
+
{ method: `${getTableName3(table) ?? "unknown"}.find()` },
|
|
1473
|
+
async () => {
|
|
1474
|
+
checkOnchainTable2(table, "find");
|
|
1475
|
+
return find(table, key);
|
|
1476
|
+
}
|
|
1477
|
+
)
|
|
1478
|
+
),
|
|
1479
|
+
// @ts-ignore
|
|
1480
|
+
insert(table) {
|
|
1481
|
+
return {
|
|
1482
|
+
values: (values) => {
|
|
1483
|
+
const inner = {
|
|
1484
|
+
onConflictDoNothing: () => queue.add(
|
|
1485
|
+
() => database.wrap(
|
|
1486
|
+
{
|
|
1487
|
+
method: `${getTableName3(table) ?? "unknown"}.insert()`
|
|
1488
|
+
},
|
|
1489
|
+
async () => {
|
|
1490
|
+
checkOnchainTable2(table, "insert");
|
|
1491
|
+
const parseResult = (result) => {
|
|
1492
|
+
if (Array.isArray(values) === false) {
|
|
1493
|
+
return result.length === 1 ? result[0] : null;
|
|
1494
|
+
}
|
|
1495
|
+
const rows = [];
|
|
1496
|
+
let resultIndex = 0;
|
|
1497
|
+
for (let i = 0; i < values.length; i++) {
|
|
1498
|
+
if (getCacheKey(table, values[i]) === getCacheKey(table, result[resultIndex])) {
|
|
1499
|
+
rows.push(result[resultIndex++]);
|
|
1500
|
+
} else {
|
|
1501
|
+
rows.push(null);
|
|
1502
|
+
}
|
|
1503
|
+
}
|
|
1504
|
+
return rows;
|
|
1505
|
+
};
|
|
1506
|
+
try {
|
|
1507
|
+
return await database.qb.drizzle.insert(table).values(values).onConflictDoNothing().returning().then(parseResult);
|
|
1508
|
+
} catch (e) {
|
|
1509
|
+
throw parseSqlError(e);
|
|
1510
|
+
}
|
|
1511
|
+
}
|
|
1512
|
+
)
|
|
1513
|
+
),
|
|
1514
|
+
onConflictDoUpdate: (valuesU) => queue.add(
|
|
1515
|
+
() => database.wrap(
|
|
1516
|
+
{
|
|
1517
|
+
method: `${getTableName3(table) ?? "unknown"}.insert()`
|
|
1518
|
+
},
|
|
1519
|
+
async () => {
|
|
1520
|
+
checkOnchainTable2(table, "insert");
|
|
1521
|
+
if (typeof valuesU === "object") {
|
|
1522
|
+
try {
|
|
1523
|
+
return await database.qb.drizzle.insert(table).values(values).onConflictDoUpdate({
|
|
1524
|
+
target: primaryKeysCache.get(table).map(({ js }) => table[js]),
|
|
1525
|
+
set: valuesU
|
|
1526
|
+
}).returning().then(
|
|
1527
|
+
(res) => Array.isArray(values) ? res : res[0]
|
|
1528
|
+
);
|
|
1529
|
+
} catch (e) {
|
|
1530
|
+
throw parseSqlError(e);
|
|
1531
|
+
}
|
|
1532
|
+
}
|
|
1533
|
+
if (Array.isArray(values)) {
|
|
1534
|
+
const rows = [];
|
|
1535
|
+
for (const value of values) {
|
|
1536
|
+
const row = await find(table, value);
|
|
1537
|
+
if (row === null) {
|
|
1538
|
+
try {
|
|
1539
|
+
rows.push(
|
|
1540
|
+
await database.qb.drizzle.insert(table).values(value).returning().then((res) => res[0])
|
|
1541
|
+
);
|
|
1542
|
+
} catch (e) {
|
|
1543
|
+
throw parseSqlError(e);
|
|
1544
|
+
}
|
|
1545
|
+
} else {
|
|
1546
|
+
try {
|
|
1547
|
+
rows.push(
|
|
1548
|
+
await database.qb.drizzle.update(table).set(valuesU(row)).where(getWhereCondition(table, value)).returning().then((res) => res[0])
|
|
1549
|
+
);
|
|
1550
|
+
} catch (e) {
|
|
1551
|
+
throw parseSqlError(e);
|
|
1552
|
+
}
|
|
1553
|
+
}
|
|
1554
|
+
}
|
|
1555
|
+
return rows;
|
|
1556
|
+
} else {
|
|
1557
|
+
const row = await find(table, values);
|
|
1558
|
+
if (row === null) {
|
|
1559
|
+
try {
|
|
1560
|
+
return await database.qb.drizzle.insert(table).values(values).returning().then((res) => res[0]);
|
|
1561
|
+
} catch (e) {
|
|
1562
|
+
throw parseSqlError(e);
|
|
1563
|
+
}
|
|
1564
|
+
} else {
|
|
1565
|
+
try {
|
|
1566
|
+
return await database.qb.drizzle.update(table).set(valuesU(row)).where(getWhereCondition(table, values)).returning().then((res) => res[0]);
|
|
1567
|
+
} catch (e) {
|
|
1568
|
+
throw parseSqlError(e);
|
|
1569
|
+
}
|
|
1570
|
+
}
|
|
1571
|
+
}
|
|
1572
|
+
}
|
|
1573
|
+
)
|
|
1574
|
+
),
|
|
1575
|
+
// biome-ignore lint/suspicious/noThenProperty: <explanation>
|
|
1576
|
+
then: (onFulfilled, onRejected) => queue.add(
|
|
1577
|
+
() => database.wrap(
|
|
1578
|
+
{
|
|
1579
|
+
method: `${getTableName3(table) ?? "unknown"}.insert()`
|
|
1580
|
+
},
|
|
1581
|
+
async () => {
|
|
1582
|
+
checkOnchainTable2(table, "insert");
|
|
1583
|
+
try {
|
|
1584
|
+
return await database.qb.drizzle.insert(table).values(values).returning().then(
|
|
1585
|
+
(res) => Array.isArray(values) ? res : res[0]
|
|
1586
|
+
);
|
|
1587
|
+
} catch (e) {
|
|
1588
|
+
throw parseSqlError(e);
|
|
1589
|
+
}
|
|
1590
|
+
}
|
|
1591
|
+
)
|
|
1592
|
+
).then(onFulfilled, onRejected),
|
|
1593
|
+
catch: (onRejected) => inner.then(void 0, onRejected),
|
|
1594
|
+
finally: (onFinally) => inner.then(
|
|
1595
|
+
(value) => {
|
|
1596
|
+
onFinally?.();
|
|
1597
|
+
return value;
|
|
1598
|
+
},
|
|
1599
|
+
(reason) => {
|
|
1600
|
+
onFinally?.();
|
|
1601
|
+
throw reason;
|
|
1602
|
+
}
|
|
1603
|
+
)
|
|
1604
|
+
// @ts-ignore
|
|
1605
|
+
};
|
|
1606
|
+
return inner;
|
|
1607
|
+
}
|
|
1608
|
+
};
|
|
1609
|
+
},
|
|
1610
|
+
// @ts-ignore
|
|
1611
|
+
update(table, key) {
|
|
1612
|
+
return {
|
|
1613
|
+
set: (values) => queue.add(
|
|
1614
|
+
() => database.wrap(
|
|
1615
|
+
{ method: `${getTableName3(table) ?? "unknown"}.update()` },
|
|
1616
|
+
async () => {
|
|
1617
|
+
checkOnchainTable2(table, "update");
|
|
1618
|
+
if (typeof values === "function") {
|
|
1619
|
+
const row = await find(table, key);
|
|
1620
|
+
if (row === null) {
|
|
1621
|
+
const error = new RecordNotFoundError(
|
|
1622
|
+
`No existing record found in table '${getTableName3(table)}'`
|
|
1623
|
+
);
|
|
1624
|
+
error.meta.push(
|
|
1625
|
+
`db.update arguments:
|
|
1626
|
+
${prettyPrint(key)}`
|
|
1627
|
+
);
|
|
1628
|
+
throw error;
|
|
1629
|
+
}
|
|
1630
|
+
try {
|
|
1631
|
+
return await database.qb.drizzle.update(table).set(values(row)).where(getWhereCondition(table, key)).returning().then((res) => res[0]);
|
|
1632
|
+
} catch (e) {
|
|
1633
|
+
throw parseSqlError(e);
|
|
1634
|
+
}
|
|
1635
|
+
} else {
|
|
1636
|
+
try {
|
|
1637
|
+
return await database.qb.drizzle.update(table).set(values).where(getWhereCondition(table, key)).returning().then((res) => res[0]);
|
|
1638
|
+
} catch (e) {
|
|
1639
|
+
throw parseSqlError(e);
|
|
1640
|
+
}
|
|
1641
|
+
}
|
|
1642
|
+
}
|
|
1643
|
+
)
|
|
1644
|
+
)
|
|
1645
|
+
};
|
|
1646
|
+
},
|
|
1647
|
+
// @ts-ignore
|
|
1648
|
+
delete: (table, key) => queue.add(
|
|
1649
|
+
() => database.wrap(
|
|
1650
|
+
{ method: `${getTableName3(table) ?? "unknown"}.delete()` },
|
|
1651
|
+
async () => {
|
|
1652
|
+
checkOnchainTable2(table, "delete");
|
|
1653
|
+
const deleted = await database.qb.drizzle.delete(table).where(getWhereCondition(table, key)).returning();
|
|
1654
|
+
return deleted.length > 0;
|
|
1655
|
+
}
|
|
1656
|
+
)
|
|
1657
|
+
),
|
|
1658
|
+
// @ts-ignore
|
|
1659
|
+
sql: drizzle2(
|
|
1660
|
+
(_sql, params, method, typings) => (
|
|
1661
|
+
// @ts-ignore
|
|
1662
|
+
queue.add(async () => {
|
|
1663
|
+
const query = { sql: _sql, params, typings };
|
|
1664
|
+
const res = await database.wrap({ method: "sql" }, async () => {
|
|
1665
|
+
try {
|
|
1666
|
+
return await database.qb.drizzle._.session.prepareQuery(query, void 0, void 0, method === "all").execute();
|
|
1667
|
+
} catch (e) {
|
|
1668
|
+
throw parseSqlError(e);
|
|
1669
|
+
}
|
|
1670
|
+
});
|
|
1671
|
+
return { rows: res.rows.map((row) => Object.values(row)) };
|
|
1672
|
+
})
|
|
1673
|
+
),
|
|
1674
|
+
{ schema, casing: "snake_case" }
|
|
1675
|
+
)
|
|
1676
|
+
};
|
|
1677
|
+
return indexingStore;
|
|
1678
|
+
};
|
|
1679
|
+
|
|
1680
|
+
export {
|
|
1681
|
+
getSql,
|
|
1682
|
+
getColumnCasing,
|
|
1683
|
+
getTableNames,
|
|
1684
|
+
getPrimaryKeyColumns,
|
|
1685
|
+
BuildError,
|
|
1686
|
+
NonRetryableError,
|
|
1687
|
+
ShutdownError,
|
|
1688
|
+
prettyPrint,
|
|
1689
|
+
createHistoricalIndexingStore,
|
|
1690
|
+
createRealtimeIndexingStore
|
|
1691
|
+
};
|
|
1692
|
+
//# sourceMappingURL=chunk-6AOFLZJ4.js.map
|