@snowtop/ent 0.2.7 → 0.2.8
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/action/executor.js +4 -4
- package/action/operations.js +3 -0
- package/action/orchestrator.js +10 -12
- package/action/topological_sort.d.ts +9 -0
- package/action/topological_sort.js +46 -0
- package/core/base.d.ts +11 -4
- package/core/clause.d.ts +3 -5
- package/core/clause.js +32 -0
- package/core/config.d.ts +26 -2
- package/core/config.js +7 -1
- package/core/context.js +5 -12
- package/core/db.d.ts +12 -2
- package/core/db.js +102 -7
- package/core/dev_schema.d.ts +9 -0
- package/core/dev_schema.js +306 -0
- package/core/ent.d.ts +3 -5
- package/core/ent.js +22 -7
- package/core/extensions.d.ts +25 -0
- package/core/extensions.js +220 -0
- package/core/loaders/assoc_count_loader.js +2 -5
- package/core/loaders/assoc_edge_loader.js +5 -8
- package/core/loaders/loader.js +1 -1
- package/core/loaders/object_loader.js +3 -6
- package/core/loaders/query_loader.d.ts +2 -5
- package/core/loaders/query_loader.js +11 -10
- package/core/memoize.d.ts +1 -0
- package/core/memoize.js +15 -0
- package/core/query/custom_clause_query.js +5 -1
- package/core/query/query.d.ts +1 -1
- package/core/query/query.js +10 -7
- package/core/query_expression.d.ts +6 -0
- package/core/query_expression.js +2 -0
- package/core/query_impl.d.ts +19 -3
- package/core/query_impl.js +148 -35
- package/index.d.ts +5 -1
- package/index.js +9 -2
- package/package.json +1 -7
- package/parse_schema/parse.d.ts +2 -12
- package/parse_schema/parse.js +22 -41
- package/schema/index.d.ts +1 -1
- package/schema/schema.d.ts +20 -1
- package/scripts/custom_graphql.js +12 -5
- package/scripts/fix_action_exports.js +1 -1
- package/scripts/migrate_v0.1.js +2 -5
- package/scripts/move_types.js +1 -1
- package/scripts/read_schema.js +2 -5
- package/testutils/builder.js +1 -2
- package/testutils/parse_sql.js +1 -1
- package/tsc/compilerOptions.d.ts +2 -2
- package/tsc/compilerOptions.js +12 -18
- package/tsc/move_generated.js +2 -2
- package/tsc/transform.d.ts +1 -1
- package/tsc/transform.js +16 -2
- package/tsc/transform_action.d.ts +1 -1
- package/tsc/transform_action.js +1 -1
- package/tsc/transform_ent.d.ts +1 -1
- package/tsc/transform_ent.js +1 -1
- package/tsc/transform_schema.d.ts +1 -1
- package/tsc/transform_schema.js +2 -2
|
@@ -0,0 +1,306 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
14
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
15
|
+
}) : function(o, v) {
|
|
16
|
+
o["default"] = v;
|
|
17
|
+
});
|
|
18
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
19
|
+
var ownKeys = function(o) {
|
|
20
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
21
|
+
var ar = [];
|
|
22
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
23
|
+
return ar;
|
|
24
|
+
};
|
|
25
|
+
return ownKeys(o);
|
|
26
|
+
};
|
|
27
|
+
return function (mod) {
|
|
28
|
+
if (mod && mod.__esModule) return mod;
|
|
29
|
+
var result = {};
|
|
30
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
31
|
+
__setModuleDefault(result, mod);
|
|
32
|
+
return result;
|
|
33
|
+
};
|
|
34
|
+
})();
|
|
35
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
36
|
+
exports.resolveDevSchema = resolveDevSchema;
|
|
37
|
+
exports.isDevSchemaEnabled = isDevSchemaEnabled;
|
|
38
|
+
const crypto_1 = require("crypto");
|
|
39
|
+
const fs = __importStar(require("fs"));
|
|
40
|
+
const path = __importStar(require("path"));
|
|
41
|
+
const STATE_DIR = ".ent";
|
|
42
|
+
const STATE_FILE = "dev_schema.json";
|
|
43
|
+
const DEFAULT_SCHEMA_DIR = path.join("src", "schema");
|
|
44
|
+
const DEFAULT_SCHEMA_PREFIX = "ent_dev";
|
|
45
|
+
const MAX_SCHEMA_LEN = 63;
|
|
46
|
+
function resolveDevSchema(cfg) {
|
|
47
|
+
if (!isDevSchemaEnabled(cfg)) {
|
|
48
|
+
return { enabled: false };
|
|
49
|
+
}
|
|
50
|
+
if (cfg?.schemaName) {
|
|
51
|
+
const schemaName = sanitizeIdentifier(cfg.schemaName);
|
|
52
|
+
return {
|
|
53
|
+
enabled: true,
|
|
54
|
+
schemaName,
|
|
55
|
+
includePublic: cfg.includePublic === true,
|
|
56
|
+
};
|
|
57
|
+
}
|
|
58
|
+
if (cfg) {
|
|
59
|
+
const branchName = requireCurrentBranch("dev branch schemas are enabled but the current git branch could not be determined. Set devSchema.schemaName explicitly.");
|
|
60
|
+
return {
|
|
61
|
+
enabled: true,
|
|
62
|
+
schemaName: buildSchemaName(branchName),
|
|
63
|
+
branchName,
|
|
64
|
+
includePublic: cfg.includePublic === true,
|
|
65
|
+
};
|
|
66
|
+
}
|
|
67
|
+
const state = loadDevSchemaState(resolveStatePath());
|
|
68
|
+
if (state?.schemaName) {
|
|
69
|
+
const branchName = state.branchName;
|
|
70
|
+
if (branchName) {
|
|
71
|
+
// State-file mode is tied to the branch that produced the generated
|
|
72
|
+
// schema metadata. Fail closed after branch switches until codegen
|
|
73
|
+
// refreshes the state file.
|
|
74
|
+
const currentBranch = requireCurrentBranch(`dev branch schema state was generated for branch "${branchName}" but the current git branch could not be determined. Run ent codegen to regenerate or set devSchema.schemaName explicitly.`);
|
|
75
|
+
if (currentBranch !== branchName) {
|
|
76
|
+
throw new Error(`dev branch schema state was generated for branch "${branchName}" but current branch is "${currentBranch}". Run ent codegen to regenerate or set devSchema.schemaName explicitly.`);
|
|
77
|
+
}
|
|
78
|
+
}
|
|
79
|
+
return {
|
|
80
|
+
enabled: true,
|
|
81
|
+
schemaName: sanitizeIdentifier(state.schemaName),
|
|
82
|
+
branchName,
|
|
83
|
+
includePublic: state.includePublic === true,
|
|
84
|
+
};
|
|
85
|
+
}
|
|
86
|
+
const branchName = requireCurrentBranch("dev branch schemas are enabled but the current git branch could not be determined. Set devSchema.schemaName explicitly or run ent codegen to regenerate src/schema/.ent/dev_schema.json.");
|
|
87
|
+
return {
|
|
88
|
+
enabled: true,
|
|
89
|
+
schemaName: buildSchemaName(branchName),
|
|
90
|
+
branchName,
|
|
91
|
+
includePublic: false,
|
|
92
|
+
};
|
|
93
|
+
}
|
|
94
|
+
function isDevSchemaEnabled(cfg) {
|
|
95
|
+
const nodeEnv = (process.env.NODE_ENV || "").toLowerCase();
|
|
96
|
+
if (nodeEnv === "production") {
|
|
97
|
+
return false;
|
|
98
|
+
}
|
|
99
|
+
const envEnabled = parseEnvBool("ENT_DEV_SCHEMA_ENABLED");
|
|
100
|
+
if (envEnabled !== undefined) {
|
|
101
|
+
return envEnabled;
|
|
102
|
+
}
|
|
103
|
+
if (cfg) {
|
|
104
|
+
if (cfg.enabled !== true) {
|
|
105
|
+
return false;
|
|
106
|
+
}
|
|
107
|
+
const branch = resolveGitBranch();
|
|
108
|
+
if (isBranchIgnored(cfg.ignoreBranches, branch)) {
|
|
109
|
+
return false;
|
|
110
|
+
}
|
|
111
|
+
return true;
|
|
112
|
+
}
|
|
113
|
+
const state = loadDevSchemaState(resolveStatePath());
|
|
114
|
+
if (!state?.schemaName) {
|
|
115
|
+
return false;
|
|
116
|
+
}
|
|
117
|
+
const branch = resolveGitBranch();
|
|
118
|
+
if (isBranchIgnored(state.ignoreBranches, branch)) {
|
|
119
|
+
return false;
|
|
120
|
+
}
|
|
121
|
+
return true;
|
|
122
|
+
}
|
|
123
|
+
function loadDevSchemaState(statePath) {
|
|
124
|
+
if (!statePath || !fs.existsSync(statePath)) {
|
|
125
|
+
return undefined;
|
|
126
|
+
}
|
|
127
|
+
const raw = fs.readFileSync(statePath, "utf8");
|
|
128
|
+
let data;
|
|
129
|
+
try {
|
|
130
|
+
data = JSON.parse(raw);
|
|
131
|
+
}
|
|
132
|
+
catch (err) {
|
|
133
|
+
throw new Error(`invalid dev schema state file at ${statePath}`);
|
|
134
|
+
}
|
|
135
|
+
if (!data || !data.schemaName) {
|
|
136
|
+
return undefined;
|
|
137
|
+
}
|
|
138
|
+
return data;
|
|
139
|
+
}
|
|
140
|
+
function requireCurrentBranch(message) {
|
|
141
|
+
const branch = resolveGitBranch();
|
|
142
|
+
if (!branch) {
|
|
143
|
+
throw new Error(message);
|
|
144
|
+
}
|
|
145
|
+
return branch;
|
|
146
|
+
}
|
|
147
|
+
function slugify(input) {
|
|
148
|
+
if (!input) {
|
|
149
|
+
return "";
|
|
150
|
+
}
|
|
151
|
+
const lower = input.toLowerCase();
|
|
152
|
+
let out = "";
|
|
153
|
+
let lastUnderscore = false;
|
|
154
|
+
for (const ch of lower) {
|
|
155
|
+
const isAlpha = ch >= "a" && ch <= "z";
|
|
156
|
+
const isDigit = ch >= "0" && ch <= "9";
|
|
157
|
+
if (isAlpha || isDigit) {
|
|
158
|
+
out += ch;
|
|
159
|
+
lastUnderscore = false;
|
|
160
|
+
continue;
|
|
161
|
+
}
|
|
162
|
+
if (!lastUnderscore) {
|
|
163
|
+
out += "_";
|
|
164
|
+
lastUnderscore = true;
|
|
165
|
+
}
|
|
166
|
+
}
|
|
167
|
+
return trimBoundaryUnderscores(out);
|
|
168
|
+
}
|
|
169
|
+
function trimBoundaryUnderscores(input) {
|
|
170
|
+
let start = 0;
|
|
171
|
+
let end = input.length;
|
|
172
|
+
while (start < end && input[start] === "_") {
|
|
173
|
+
start++;
|
|
174
|
+
}
|
|
175
|
+
while (end > start && input[end - 1] === "_") {
|
|
176
|
+
end--;
|
|
177
|
+
}
|
|
178
|
+
return input.slice(start, end);
|
|
179
|
+
}
|
|
180
|
+
function sanitizeIdentifier(input) {
|
|
181
|
+
const slug = slugify(input);
|
|
182
|
+
if (!slug) {
|
|
183
|
+
return "schema";
|
|
184
|
+
}
|
|
185
|
+
let normalized = slug;
|
|
186
|
+
if (normalized[0] >= "0" && normalized[0] <= "9") {
|
|
187
|
+
normalized = `schema_${normalized}`;
|
|
188
|
+
}
|
|
189
|
+
if (normalized.length > MAX_SCHEMA_LEN) {
|
|
190
|
+
return normalized.slice(0, MAX_SCHEMA_LEN);
|
|
191
|
+
}
|
|
192
|
+
return normalized;
|
|
193
|
+
}
|
|
194
|
+
function shortHash(input) {
|
|
195
|
+
return (0, crypto_1.createHash)("sha1").update(input).digest("hex").slice(0, 8);
|
|
196
|
+
}
|
|
197
|
+
function buildSchemaName(branch) {
|
|
198
|
+
const prefix = sanitizeIdentifier(DEFAULT_SCHEMA_PREFIX);
|
|
199
|
+
let branchSlug = slugify(branch);
|
|
200
|
+
if (!branchSlug) {
|
|
201
|
+
branchSlug = "branch";
|
|
202
|
+
}
|
|
203
|
+
const hash = shortHash(branch);
|
|
204
|
+
let name = [prefix, branchSlug, hash].join("_");
|
|
205
|
+
if (name.length <= MAX_SCHEMA_LEN) {
|
|
206
|
+
return name;
|
|
207
|
+
}
|
|
208
|
+
const over = name.length - MAX_SCHEMA_LEN;
|
|
209
|
+
if (over > 0 && branchSlug.length > 1) {
|
|
210
|
+
branchSlug = branchSlug.slice(0, branchSlug.length - Math.min(over, branchSlug.length - 1));
|
|
211
|
+
}
|
|
212
|
+
name = [prefix, branchSlug, hash].join("_");
|
|
213
|
+
if (name.length > MAX_SCHEMA_LEN) {
|
|
214
|
+
return name.slice(0, MAX_SCHEMA_LEN);
|
|
215
|
+
}
|
|
216
|
+
return name;
|
|
217
|
+
}
|
|
218
|
+
function resolveStatePath() {
|
|
219
|
+
const start = process.cwd();
|
|
220
|
+
const root = findGitRoot(start) || start;
|
|
221
|
+
const schemaDir = path.join(root, DEFAULT_SCHEMA_DIR);
|
|
222
|
+
return path.join(schemaDir, STATE_DIR, STATE_FILE);
|
|
223
|
+
}
|
|
224
|
+
function isBranchIgnored(ignoreBranches, branch) {
|
|
225
|
+
if (!branch || !ignoreBranches || ignoreBranches.length === 0) {
|
|
226
|
+
return false;
|
|
227
|
+
}
|
|
228
|
+
for (const name of ignoreBranches) {
|
|
229
|
+
if (!name || !name.trim()) {
|
|
230
|
+
continue;
|
|
231
|
+
}
|
|
232
|
+
if (name === branch) {
|
|
233
|
+
return true;
|
|
234
|
+
}
|
|
235
|
+
}
|
|
236
|
+
return false;
|
|
237
|
+
}
|
|
238
|
+
function parseEnvBool(key) {
|
|
239
|
+
const raw = process.env[key];
|
|
240
|
+
if (!raw) {
|
|
241
|
+
return undefined;
|
|
242
|
+
}
|
|
243
|
+
const val = raw.trim().toLowerCase();
|
|
244
|
+
if (["1", "true", "t", "yes", "y"].includes(val)) {
|
|
245
|
+
return true;
|
|
246
|
+
}
|
|
247
|
+
if (["0", "false", "f", "no", "n"].includes(val)) {
|
|
248
|
+
return false;
|
|
249
|
+
}
|
|
250
|
+
return undefined;
|
|
251
|
+
}
|
|
252
|
+
function resolveGitBranch() {
|
|
253
|
+
const start = process.cwd();
|
|
254
|
+
const root = findGitRoot(start);
|
|
255
|
+
if (!root) {
|
|
256
|
+
return "";
|
|
257
|
+
}
|
|
258
|
+
const gitDir = resolveGitDir(path.join(root, ".git"));
|
|
259
|
+
if (!gitDir) {
|
|
260
|
+
return "";
|
|
261
|
+
}
|
|
262
|
+
const headPath = path.join(gitDir, "HEAD");
|
|
263
|
+
if (!fs.existsSync(headPath)) {
|
|
264
|
+
return "";
|
|
265
|
+
}
|
|
266
|
+
const head = fs.readFileSync(headPath, "utf8").trim();
|
|
267
|
+
if (!head.startsWith("ref:")) {
|
|
268
|
+
return "";
|
|
269
|
+
}
|
|
270
|
+
return head
|
|
271
|
+
.replace("ref:", "")
|
|
272
|
+
.trim()
|
|
273
|
+
.replace(/^refs\/heads\//, "");
|
|
274
|
+
}
|
|
275
|
+
function resolveGitDir(gitPath) {
|
|
276
|
+
if (!fs.existsSync(gitPath)) {
|
|
277
|
+
return undefined;
|
|
278
|
+
}
|
|
279
|
+
const stat = fs.statSync(gitPath);
|
|
280
|
+
if (stat.isDirectory()) {
|
|
281
|
+
return gitPath;
|
|
282
|
+
}
|
|
283
|
+
const contents = fs.readFileSync(gitPath, "utf8").trim();
|
|
284
|
+
if (!contents.startsWith("gitdir:")) {
|
|
285
|
+
return undefined;
|
|
286
|
+
}
|
|
287
|
+
let dir = contents.replace("gitdir:", "").trim();
|
|
288
|
+
if (!path.isAbsolute(dir)) {
|
|
289
|
+
dir = path.join(path.dirname(gitPath), dir);
|
|
290
|
+
}
|
|
291
|
+
return dir;
|
|
292
|
+
}
|
|
293
|
+
function findGitRoot(start) {
|
|
294
|
+
let dir = start;
|
|
295
|
+
while (true) {
|
|
296
|
+
const gitPath = path.join(dir, ".git");
|
|
297
|
+
if (fs.existsSync(gitPath)) {
|
|
298
|
+
return dir;
|
|
299
|
+
}
|
|
300
|
+
const parent = path.dirname(dir);
|
|
301
|
+
if (parent === dir) {
|
|
302
|
+
return undefined;
|
|
303
|
+
}
|
|
304
|
+
dir = parent;
|
|
305
|
+
}
|
|
306
|
+
}
|
package/core/ent.d.ts
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { Context, CreateRowOptions, Data, DataOptions, EdgeQueryableDataOptions, EditRowOptions, Ent, ID, LoadCustomEntOptions, LoadEntOptions, LoadRowOptions, LoadRowsOptions, LoaderWithLoadMany, QueryDataOptions, SelectCustomDataOptions, Viewer } from "./base";
|
|
1
|
+
import { Context, CreateRowOptions, Data, DataOptions, EdgeQueryableDataOptions, EditRowOptions, Ent, ID, LoadCustomEntOptions, LoadEntOptions, LoadRowOptions, LoadRowsOptions, LoaderWithLoadMany, QueryDataOptions, SelectBaseDataOptions, SelectCustomDataOptions, Viewer } from "./base";
|
|
2
2
|
import { Queryer, SyncQueryer } from "./db";
|
|
3
3
|
import * as clause from "./clause";
|
|
4
4
|
import { InstrumentedDataLoader } from "./loaders/loader";
|
|
@@ -108,10 +108,7 @@ interface GroupQueryOptions<T extends Data, K = keyof T> {
|
|
|
108
108
|
tableName: string;
|
|
109
109
|
clause?: clause.Clause<T, K>;
|
|
110
110
|
groupColumn: K;
|
|
111
|
-
fields:
|
|
112
|
-
alias: string;
|
|
113
|
-
column: K;
|
|
114
|
-
})[];
|
|
111
|
+
fields: SelectBaseDataOptions["fields"];
|
|
115
112
|
values: any[];
|
|
116
113
|
orderby?: OrderBy;
|
|
117
114
|
limit: number;
|
|
@@ -151,6 +148,7 @@ export interface cursorOptions {
|
|
|
151
148
|
*/
|
|
152
149
|
rowKeys?: string[];
|
|
153
150
|
}
|
|
151
|
+
export declare function decodeCursorPayload(encoded: string): string;
|
|
154
152
|
export declare function getCursor(opts: cursorOptions): string;
|
|
155
153
|
export declare class AssocEdgeData {
|
|
156
154
|
edgeType: string;
|
package/core/ent.js
CHANGED
|
@@ -68,6 +68,7 @@ exports.editRow = editRow;
|
|
|
68
68
|
exports.editRowSync = editRowSync;
|
|
69
69
|
exports.deleteRows = deleteRows;
|
|
70
70
|
exports.deleteRowsSync = deleteRowsSync;
|
|
71
|
+
exports.decodeCursorPayload = decodeCursorPayload;
|
|
71
72
|
exports.getCursor = getCursor;
|
|
72
73
|
exports.loadEdgeData = loadEdgeData;
|
|
73
74
|
exports.loadEdgeDatas = loadEdgeDatas;
|
|
@@ -607,13 +608,13 @@ async function loadRow(options) {
|
|
|
607
608
|
return row;
|
|
608
609
|
}
|
|
609
610
|
}
|
|
610
|
-
const
|
|
611
|
-
logQuery(query,
|
|
611
|
+
const queryData = (0, query_impl_1.buildQueryData)(options);
|
|
612
|
+
logQuery(queryData.query, queryData.logValues);
|
|
612
613
|
const pool = db_1.default.getInstance().getPool();
|
|
613
|
-
const res = await pool.query(query,
|
|
614
|
+
const res = await pool.query(queryData.query, queryData.values);
|
|
614
615
|
if (res.rowCount != 1) {
|
|
615
616
|
if (res.rowCount > 1) {
|
|
616
|
-
(0, logger_1.log)("error", "got more than one row for query " + query);
|
|
617
|
+
(0, logger_1.log)("error", "got more than one row for query " + queryData.query);
|
|
617
618
|
}
|
|
618
619
|
return null;
|
|
619
620
|
}
|
|
@@ -652,8 +653,8 @@ async function loadRows(options) {
|
|
|
652
653
|
return rows;
|
|
653
654
|
}
|
|
654
655
|
}
|
|
655
|
-
const
|
|
656
|
-
const r = await performRawQuery(query,
|
|
656
|
+
const queryData = (0, query_impl_1.buildQueryData)(options);
|
|
657
|
+
const r = await performRawQuery(queryData.query, queryData.values, queryData.logValues);
|
|
657
658
|
if (cache) {
|
|
658
659
|
// put the rows in the cache...
|
|
659
660
|
cache.primeCache(options, r);
|
|
@@ -662,6 +663,12 @@ async function loadRows(options) {
|
|
|
662
663
|
}
|
|
663
664
|
// this is used for queries when we select multiple ids at once
|
|
664
665
|
function buildGroupQuery(options) {
|
|
666
|
+
if (options.fields.some((field) => typeof field === "object" && "expression" in field)) {
|
|
667
|
+
throw new Error("group queries do not support computed select expressions");
|
|
668
|
+
}
|
|
669
|
+
if (options.orderby && (0, query_impl_1.orderByHasExpressions)(options.orderby)) {
|
|
670
|
+
throw new Error("group queries do not support computed order expressions");
|
|
671
|
+
}
|
|
665
672
|
const fields = [...options.fields, "row_number()"];
|
|
666
673
|
let cls = clause.In(options.groupColumn, ...options.values);
|
|
667
674
|
if (options.clause) {
|
|
@@ -900,6 +907,14 @@ class AssocEdge {
|
|
|
900
907
|
}
|
|
901
908
|
}
|
|
902
909
|
exports.AssocEdge = AssocEdge;
|
|
910
|
+
// UTF-8-safe cursor encoding (btoa only supports Latin-1 code units).
|
|
911
|
+
function encodeCursorPayload(json) {
|
|
912
|
+
return Buffer.from(json, "utf8").toString("base64");
|
|
913
|
+
}
|
|
914
|
+
// Inverse of encodeCursorPayload; exported for pagination decode in query.ts
|
|
915
|
+
function decodeCursorPayload(encoded) {
|
|
916
|
+
return Buffer.from(encoded, "base64").toString("utf8");
|
|
917
|
+
}
|
|
903
918
|
// TODO eventually update this for sortCol time unique keys
|
|
904
919
|
function getCursor(opts) {
|
|
905
920
|
const { row, cursorKeys, rowKeys } = opts;
|
|
@@ -917,7 +932,7 @@ function getCursor(opts) {
|
|
|
917
932
|
const rowKey = rowKeys?.[i] || cursorKey;
|
|
918
933
|
parts.push([cursorKey, convert(row[rowKey])]);
|
|
919
934
|
}
|
|
920
|
-
return
|
|
935
|
+
return encodeCursorPayload(JSON.stringify(parts));
|
|
921
936
|
}
|
|
922
937
|
class AssocEdgeData {
|
|
923
938
|
constructor(data) {
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
import { type Pool } from "pg";
|
|
2
|
+
import type { RuntimeDBExtension } from "./config";
|
|
3
|
+
import type { ResolvedDevSchema } from "./dev_schema";
|
|
4
|
+
export interface InstalledDBExtension {
|
|
5
|
+
name: string;
|
|
6
|
+
version: string;
|
|
7
|
+
installSchema?: string;
|
|
8
|
+
}
|
|
9
|
+
export interface ExtensionTypeParser {
|
|
10
|
+
name: string;
|
|
11
|
+
parse(value: string | null): unknown;
|
|
12
|
+
}
|
|
13
|
+
export interface ExtensionRuntimeHandler {
|
|
14
|
+
name: string;
|
|
15
|
+
runtimeSchemas?: string[];
|
|
16
|
+
types?: ExtensionTypeParser[];
|
|
17
|
+
validate?(installed: InstalledDBExtension, extension: RuntimeDBExtension): void | Promise<void>;
|
|
18
|
+
}
|
|
19
|
+
export declare function registerExtensionRuntime(handler: ExtensionRuntimeHandler): void;
|
|
20
|
+
export declare function clearExtensionRuntimes(): void;
|
|
21
|
+
export declare function normalizeExtensions(extensions: RuntimeDBExtension[]): RuntimeDBExtension[];
|
|
22
|
+
export declare function resolveExtensions(cfg?: RuntimeDBExtension[]): RuntimeDBExtension[];
|
|
23
|
+
export declare function getExtensionSearchPathSchemas(extensions: RuntimeDBExtension[]): string[];
|
|
24
|
+
export declare function buildExtensionSearchPath(resolvedDevSchema: ResolvedDevSchema, extensions: RuntimeDBExtension[]): string | undefined;
|
|
25
|
+
export declare function initializeExtensions(pool: Pick<Pool, "query">, extensions: RuntimeDBExtension[]): Promise<void>;
|
|
@@ -0,0 +1,220 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.registerExtensionRuntime = registerExtensionRuntime;
|
|
4
|
+
exports.clearExtensionRuntimes = clearExtensionRuntimes;
|
|
5
|
+
exports.normalizeExtensions = normalizeExtensions;
|
|
6
|
+
exports.resolveExtensions = resolveExtensions;
|
|
7
|
+
exports.getExtensionSearchPathSchemas = getExtensionSearchPathSchemas;
|
|
8
|
+
exports.buildExtensionSearchPath = buildExtensionSearchPath;
|
|
9
|
+
exports.initializeExtensions = initializeExtensions;
|
|
10
|
+
const pg_1 = require("pg");
|
|
11
|
+
const TEXT_ARRAY_OID = 1009;
|
|
12
|
+
const runtimeHandlers = new Map();
|
|
13
|
+
const registeredTypeOIDs = new Set();
|
|
14
|
+
function normalizeProvisionedBy(extension) {
|
|
15
|
+
if (extension.provisionedBy === "ent" ||
|
|
16
|
+
extension.provisionedBy === "external") {
|
|
17
|
+
return extension.provisionedBy;
|
|
18
|
+
}
|
|
19
|
+
if (extension.provisionedBy) {
|
|
20
|
+
throw new Error(`invalid provisionedBy ${extension.provisionedBy} for db extension ${extension.name}`);
|
|
21
|
+
}
|
|
22
|
+
return "ent";
|
|
23
|
+
}
|
|
24
|
+
function normalizeRuntimeHandler(handler) {
|
|
25
|
+
const typeHandlers = new Map();
|
|
26
|
+
for (const typeHandler of handler.types || []) {
|
|
27
|
+
typeHandlers.set(typeHandler.name, typeHandler);
|
|
28
|
+
}
|
|
29
|
+
return {
|
|
30
|
+
...handler,
|
|
31
|
+
runtimeSchemas: [...new Set(handler.runtimeSchemas || [])],
|
|
32
|
+
types: [...typeHandlers.values()],
|
|
33
|
+
};
|
|
34
|
+
}
|
|
35
|
+
function mergeRuntimeHandlers(lhs, rhs) {
|
|
36
|
+
const mergedTypes = new Map();
|
|
37
|
+
for (const typeHandler of lhs.types || []) {
|
|
38
|
+
mergedTypes.set(typeHandler.name, typeHandler);
|
|
39
|
+
}
|
|
40
|
+
for (const typeHandler of rhs.types || []) {
|
|
41
|
+
mergedTypes.set(typeHandler.name, typeHandler);
|
|
42
|
+
}
|
|
43
|
+
return {
|
|
44
|
+
name: rhs.name,
|
|
45
|
+
runtimeSchemas: [
|
|
46
|
+
...new Set([
|
|
47
|
+
...(lhs.runtimeSchemas || []),
|
|
48
|
+
...(rhs.runtimeSchemas || []),
|
|
49
|
+
]),
|
|
50
|
+
],
|
|
51
|
+
types: [...mergedTypes.values()],
|
|
52
|
+
validate: rhs.validate || lhs.validate,
|
|
53
|
+
};
|
|
54
|
+
}
|
|
55
|
+
function getRegisteredRuntimeHandlers() {
|
|
56
|
+
return [...runtimeHandlers.values()].sort((lhs, rhs) => lhs.name.localeCompare(rhs.name));
|
|
57
|
+
}
|
|
58
|
+
function registerExtensionRuntime(handler) {
|
|
59
|
+
const normalized = normalizeRuntimeHandler(handler);
|
|
60
|
+
const existing = runtimeHandlers.get(normalized.name);
|
|
61
|
+
runtimeHandlers.set(normalized.name, existing ? mergeRuntimeHandlers(existing, normalized) : normalized);
|
|
62
|
+
}
|
|
63
|
+
function clearExtensionRuntimes() {
|
|
64
|
+
runtimeHandlers.clear();
|
|
65
|
+
registeredTypeOIDs.clear();
|
|
66
|
+
}
|
|
67
|
+
function normalizeExtensions(extensions) {
|
|
68
|
+
return [...extensions]
|
|
69
|
+
.map((extension) => ({
|
|
70
|
+
...extension,
|
|
71
|
+
provisionedBy: normalizeProvisionedBy(extension),
|
|
72
|
+
runtimeSchemas: extension.runtimeSchemas || [],
|
|
73
|
+
dropCascade: extension.dropCascade === true,
|
|
74
|
+
}))
|
|
75
|
+
.sort((lhs, rhs) => lhs.name.localeCompare(rhs.name));
|
|
76
|
+
}
|
|
77
|
+
function resolveExtensions(cfg) {
|
|
78
|
+
return normalizeExtensions(cfg || []);
|
|
79
|
+
}
|
|
80
|
+
function getExtensionSearchPathSchemas(extensions) {
|
|
81
|
+
const normalizedExtensions = normalizeExtensions(extensions);
|
|
82
|
+
const configuredExtensions = new Map(normalizedExtensions.map((extension) => [extension.name, extension]));
|
|
83
|
+
const seen = new Set();
|
|
84
|
+
const schemas = [];
|
|
85
|
+
function addSchemas(runtimeSchemas) {
|
|
86
|
+
for (const schema of runtimeSchemas) {
|
|
87
|
+
if (!schema || seen.has(schema)) {
|
|
88
|
+
continue;
|
|
89
|
+
}
|
|
90
|
+
seen.add(schema);
|
|
91
|
+
schemas.push(schema);
|
|
92
|
+
}
|
|
93
|
+
}
|
|
94
|
+
for (const extension of normalizedExtensions) {
|
|
95
|
+
addSchemas(extension.runtimeSchemas || []);
|
|
96
|
+
}
|
|
97
|
+
for (const handler of getRegisteredRuntimeHandlers()) {
|
|
98
|
+
const configured = configuredExtensions.get(handler.name);
|
|
99
|
+
addSchemas(configured
|
|
100
|
+
? configured.runtimeSchemas || []
|
|
101
|
+
: handler.runtimeSchemas || []);
|
|
102
|
+
}
|
|
103
|
+
return schemas;
|
|
104
|
+
}
|
|
105
|
+
function buildExtensionSearchPath(resolvedDevSchema, extensions) {
|
|
106
|
+
const schemas = [];
|
|
107
|
+
if (resolvedDevSchema.enabled && resolvedDevSchema.schemaName) {
|
|
108
|
+
schemas.push(resolvedDevSchema.schemaName);
|
|
109
|
+
}
|
|
110
|
+
for (const schema of getExtensionSearchPathSchemas(extensions)) {
|
|
111
|
+
if (!schemas.includes(schema)) {
|
|
112
|
+
schemas.push(schema);
|
|
113
|
+
}
|
|
114
|
+
}
|
|
115
|
+
if (resolvedDevSchema.enabled &&
|
|
116
|
+
resolvedDevSchema.includePublic &&
|
|
117
|
+
!schemas.includes("public")) {
|
|
118
|
+
schemas.push("public");
|
|
119
|
+
}
|
|
120
|
+
if (!resolvedDevSchema.enabled &&
|
|
121
|
+
schemas.length > 0 &&
|
|
122
|
+
!schemas.includes("public")) {
|
|
123
|
+
schemas.push("public");
|
|
124
|
+
}
|
|
125
|
+
if (schemas.length === 0) {
|
|
126
|
+
return undefined;
|
|
127
|
+
}
|
|
128
|
+
return schemas.join(",");
|
|
129
|
+
}
|
|
130
|
+
async function getInstalledExtensions(pool, extensions) {
|
|
131
|
+
if (extensions.length === 0) {
|
|
132
|
+
return new Map();
|
|
133
|
+
}
|
|
134
|
+
const res = await pool.query(`
|
|
135
|
+
SELECT
|
|
136
|
+
extname,
|
|
137
|
+
extversion,
|
|
138
|
+
extnamespace::regnamespace::text AS install_schema
|
|
139
|
+
FROM pg_extension
|
|
140
|
+
WHERE extname = ANY($1::text[])
|
|
141
|
+
`, [extensions.map((extension) => extension.name)]);
|
|
142
|
+
const installed = new Map();
|
|
143
|
+
for (const row of res.rows) {
|
|
144
|
+
installed.set(row.extname, {
|
|
145
|
+
name: row.extname,
|
|
146
|
+
version: row.extversion,
|
|
147
|
+
installSchema: row.install_schema,
|
|
148
|
+
});
|
|
149
|
+
}
|
|
150
|
+
return installed;
|
|
151
|
+
}
|
|
152
|
+
function registerArrayParser(arrayOID, parse) {
|
|
153
|
+
if (!arrayOID || registeredTypeOIDs.has(arrayOID)) {
|
|
154
|
+
return;
|
|
155
|
+
}
|
|
156
|
+
const parseTextArray = pg_1.types.getTypeParser(TEXT_ARRAY_OID);
|
|
157
|
+
pg_1.types.setTypeParser(arrayOID, (value) => {
|
|
158
|
+
if (value === null) {
|
|
159
|
+
return null;
|
|
160
|
+
}
|
|
161
|
+
const parsed = parseTextArray(value);
|
|
162
|
+
return parsed.map((entry) => parse(entry));
|
|
163
|
+
});
|
|
164
|
+
registeredTypeOIDs.add(arrayOID);
|
|
165
|
+
}
|
|
166
|
+
async function initializeRegisteredTypeParsers(pool, configuredExtensions) {
|
|
167
|
+
const registeredTypes = new Map();
|
|
168
|
+
for (const handler of getRegisteredRuntimeHandlers()) {
|
|
169
|
+
for (const typeHandler of handler.types || []) {
|
|
170
|
+
registeredTypes.set(typeHandler.name, {
|
|
171
|
+
extensionName: handler.name,
|
|
172
|
+
parse: typeHandler.parse,
|
|
173
|
+
});
|
|
174
|
+
}
|
|
175
|
+
}
|
|
176
|
+
if (registeredTypes.size === 0) {
|
|
177
|
+
return;
|
|
178
|
+
}
|
|
179
|
+
const res = await pool.query(`
|
|
180
|
+
SELECT oid, typname, typarray
|
|
181
|
+
FROM pg_type
|
|
182
|
+
WHERE typname = ANY($1::text[])
|
|
183
|
+
`, [[...registeredTypes.keys()]]);
|
|
184
|
+
const rowsByType = new Map(res.rows.map((row) => [row.typname, row]));
|
|
185
|
+
for (const [typeName, typeHandler] of registeredTypes.entries()) {
|
|
186
|
+
const row = rowsByType.get(typeName);
|
|
187
|
+
if (!row) {
|
|
188
|
+
if (configuredExtensions.has(typeHandler.extensionName)) {
|
|
189
|
+
throw new Error(`required pg type "${typeName}" for db extension "${typeHandler.extensionName}" was not found`);
|
|
190
|
+
}
|
|
191
|
+
continue;
|
|
192
|
+
}
|
|
193
|
+
if (!registeredTypeOIDs.has(row.oid)) {
|
|
194
|
+
pg_1.types.setTypeParser(row.oid, typeHandler.parse);
|
|
195
|
+
registeredTypeOIDs.add(row.oid);
|
|
196
|
+
}
|
|
197
|
+
registerArrayParser(row.typarray, typeHandler.parse);
|
|
198
|
+
}
|
|
199
|
+
}
|
|
200
|
+
async function initializeExtensions(pool, extensions) {
|
|
201
|
+
const normalizedExtensions = normalizeExtensions(extensions);
|
|
202
|
+
const configuredExtensions = new Map(normalizedExtensions.map((extension) => [extension.name, extension]));
|
|
203
|
+
const installedExtensions = await getInstalledExtensions(pool, normalizedExtensions);
|
|
204
|
+
for (const extension of normalizedExtensions) {
|
|
205
|
+
const installed = installedExtensions.get(extension.name);
|
|
206
|
+
if (!installed) {
|
|
207
|
+
throw new Error(`required db extension "${extension.name}" is not installed`);
|
|
208
|
+
}
|
|
209
|
+
if (extension.version && extension.version !== installed.version) {
|
|
210
|
+
throw new Error(`required db extension "${extension.name}" version "${extension.version}" but found "${installed.version}"`);
|
|
211
|
+
}
|
|
212
|
+
if (extension.installSchema &&
|
|
213
|
+
extension.installSchema !== installed.installSchema) {
|
|
214
|
+
throw new Error(`required db extension "${extension.name}" install schema "${extension.installSchema}" but found "${installed.installSchema}"`);
|
|
215
|
+
}
|
|
216
|
+
const handler = runtimeHandlers.get(extension.name);
|
|
217
|
+
await handler?.validate?.(installed, extension);
|
|
218
|
+
}
|
|
219
|
+
await initializeRegisteredTypeParsers(pool, configuredExtensions);
|
|
220
|
+
}
|
|
@@ -32,23 +32,20 @@ var __importStar = (this && this.__importStar) || (function () {
|
|
|
32
32
|
return result;
|
|
33
33
|
};
|
|
34
34
|
})();
|
|
35
|
-
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
36
|
-
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
37
|
-
};
|
|
38
35
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
39
36
|
exports.AssocEdgeCountLoaderFactory = exports.AssocEdgeCountLoader = void 0;
|
|
40
37
|
const ent_1 = require("../ent");
|
|
41
38
|
const clause = __importStar(require("../clause"));
|
|
42
39
|
const loader_1 = require("./loader");
|
|
43
40
|
const raw_count_loader_1 = require("./raw_count_loader");
|
|
44
|
-
const
|
|
41
|
+
const memoize_1 = require("../memoize");
|
|
45
42
|
class AssocEdgeCountLoader {
|
|
46
43
|
constructor(edgeType, context, options) {
|
|
47
44
|
this.edgeType = edgeType;
|
|
48
45
|
this.context = context;
|
|
49
46
|
this.options = options;
|
|
50
47
|
if (context) {
|
|
51
|
-
this.loaderFn = (0,
|
|
48
|
+
this.loaderFn = (0, memoize_1.memoizeNoArgs)(this.getLoader.bind(this));
|
|
52
49
|
}
|
|
53
50
|
}
|
|
54
51
|
async getLoader() {
|