@snowtop/ent 0.2.7 → 0.2.8
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/action/executor.js +4 -4
- package/action/operations.js +3 -0
- package/action/orchestrator.js +10 -12
- package/action/topological_sort.d.ts +9 -0
- package/action/topological_sort.js +46 -0
- package/core/base.d.ts +11 -4
- package/core/clause.d.ts +3 -5
- package/core/clause.js +32 -0
- package/core/config.d.ts +26 -2
- package/core/config.js +7 -1
- package/core/context.js +5 -12
- package/core/db.d.ts +12 -2
- package/core/db.js +102 -7
- package/core/dev_schema.d.ts +9 -0
- package/core/dev_schema.js +306 -0
- package/core/ent.d.ts +3 -5
- package/core/ent.js +22 -7
- package/core/extensions.d.ts +25 -0
- package/core/extensions.js +220 -0
- package/core/loaders/assoc_count_loader.js +2 -5
- package/core/loaders/assoc_edge_loader.js +5 -8
- package/core/loaders/loader.js +1 -1
- package/core/loaders/object_loader.js +3 -6
- package/core/loaders/query_loader.d.ts +2 -5
- package/core/loaders/query_loader.js +11 -10
- package/core/memoize.d.ts +1 -0
- package/core/memoize.js +15 -0
- package/core/query/custom_clause_query.js +5 -1
- package/core/query/query.d.ts +1 -1
- package/core/query/query.js +10 -7
- package/core/query_expression.d.ts +6 -0
- package/core/query_expression.js +2 -0
- package/core/query_impl.d.ts +19 -3
- package/core/query_impl.js +148 -35
- package/index.d.ts +5 -1
- package/index.js +9 -2
- package/package.json +1 -7
- package/parse_schema/parse.d.ts +2 -12
- package/parse_schema/parse.js +22 -41
- package/schema/index.d.ts +1 -1
- package/schema/schema.d.ts +20 -1
- package/scripts/custom_graphql.js +12 -5
- package/scripts/fix_action_exports.js +1 -1
- package/scripts/migrate_v0.1.js +2 -5
- package/scripts/move_types.js +1 -1
- package/scripts/read_schema.js +2 -5
- package/testutils/builder.js +1 -2
- package/testutils/parse_sql.js +1 -1
- package/tsc/compilerOptions.d.ts +2 -2
- package/tsc/compilerOptions.js +12 -18
- package/tsc/move_generated.js +2 -2
- package/tsc/transform.d.ts +1 -1
- package/tsc/transform.js +16 -2
- package/tsc/transform_action.d.ts +1 -1
- package/tsc/transform_action.js +1 -1
- package/tsc/transform_ent.d.ts +1 -1
- package/tsc/transform_ent.js +1 -1
- package/tsc/transform_schema.d.ts +1 -1
- package/tsc/transform_schema.js +2 -2
package/action/executor.js
CHANGED
|
@@ -5,10 +5,10 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
|
5
5
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
6
|
exports.ComplexExecutor = exports.ListBasedExecutor = void 0;
|
|
7
7
|
exports.executeOperations = executeOperations;
|
|
8
|
-
const graph_data_structure_1 = require("graph-data-structure");
|
|
9
8
|
const ent_1 = require("../core/ent");
|
|
10
9
|
const db_1 = __importDefault(require("../core/db"));
|
|
11
10
|
const logger_1 = require("../core/logger");
|
|
11
|
+
const topological_sort_1 = require("./topological_sort");
|
|
12
12
|
const operations_1 = require("./operations");
|
|
13
13
|
// private to ent
|
|
14
14
|
class ListBasedExecutor {
|
|
@@ -123,7 +123,7 @@ class ComplexExecutor {
|
|
|
123
123
|
this.executors = [];
|
|
124
124
|
this.changedOps = new Map();
|
|
125
125
|
this.builder = options?.builder;
|
|
126
|
-
const graph = new
|
|
126
|
+
const graph = new topological_sort_1.TopologicalGraph();
|
|
127
127
|
const changesetMap = new Map();
|
|
128
128
|
const impl = (c) => {
|
|
129
129
|
changesetMap.set(c.placeholderID.toString(), c);
|
|
@@ -131,7 +131,7 @@ class ComplexExecutor {
|
|
|
131
131
|
if (c.dependencies) {
|
|
132
132
|
for (let [_, builder] of c.dependencies) {
|
|
133
133
|
// dependency should go first...
|
|
134
|
-
graph.addEdge(builder.placeholderID.toString(), c.placeholderID.toString()
|
|
134
|
+
graph.addEdge(builder.placeholderID.toString(), c.placeholderID.toString());
|
|
135
135
|
}
|
|
136
136
|
}
|
|
137
137
|
if (c.changesets) {
|
|
@@ -156,7 +156,7 @@ class ComplexExecutor {
|
|
|
156
156
|
// TODO: can this logic be rewritten to not have a set yet avoid duplicates?
|
|
157
157
|
let nodeOps = new Set();
|
|
158
158
|
let remainOps = new Set();
|
|
159
|
-
const sorted =
|
|
159
|
+
const sorted = graph.topologicalSort();
|
|
160
160
|
sorted.forEach((node) => {
|
|
161
161
|
let c = changesetMap.get(node);
|
|
162
162
|
if (!c) {
|
package/action/operations.js
CHANGED
|
@@ -237,6 +237,9 @@ class EditNodeOperation {
|
|
|
237
237
|
return `RETURNING ${this.options.loadEntOptions.fields
|
|
238
238
|
.map((f) => {
|
|
239
239
|
if (typeof f === "object") {
|
|
240
|
+
if ("expression" in f) {
|
|
241
|
+
throw new Error("RETURNING does not support computed select expressions");
|
|
242
|
+
}
|
|
240
243
|
return `${f.alias}.${f.column}`;
|
|
241
244
|
}
|
|
242
245
|
return f;
|
package/action/orchestrator.js
CHANGED
|
@@ -32,9 +32,6 @@ var __importStar = (this && this.__importStar) || (function () {
|
|
|
32
32
|
return result;
|
|
33
33
|
};
|
|
34
34
|
})();
|
|
35
|
-
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
36
|
-
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
37
|
-
};
|
|
38
35
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
39
36
|
exports.EntChangeset = exports.Orchestrator = exports.edgeDirection = void 0;
|
|
40
37
|
const ent_1 = require("../core/ent");
|
|
@@ -43,8 +40,8 @@ const operations_1 = require("./operations");
|
|
|
43
40
|
const action_1 = require("../action");
|
|
44
41
|
const privacy_1 = require("../core/privacy");
|
|
45
42
|
const executor_1 = require("./executor");
|
|
43
|
+
const memoize_1 = require("../core/memoize");
|
|
46
44
|
const logger_1 = require("../core/logger");
|
|
47
|
-
const memoizee_1 = __importDefault(require("memoizee"));
|
|
48
45
|
const clause = __importStar(require("../core/clause"));
|
|
49
46
|
const types_1 = require("util/types");
|
|
50
47
|
const operations_2 = require("./operations");
|
|
@@ -112,7 +109,7 @@ class Orchestrator {
|
|
|
112
109
|
this.viewer = options.viewer;
|
|
113
110
|
this.actualOperation = this.options.operation;
|
|
114
111
|
this.existingEnt = this.options.builder.existingEnt;
|
|
115
|
-
this.memoizedGetFields = (0,
|
|
112
|
+
this.memoizedGetFields = (0, memoize_1.memoizeNoArgs)(this.getFieldsInfo.bind(this));
|
|
116
113
|
}
|
|
117
114
|
// don't type this because we don't care
|
|
118
115
|
__getOptions() {
|
|
@@ -639,19 +636,20 @@ class Orchestrator {
|
|
|
639
636
|
for (const [k, field] of schemaFields) {
|
|
640
637
|
const inputKey = this.getInputKey(k);
|
|
641
638
|
const storageKey = this.getStorageKey(k);
|
|
642
|
-
let
|
|
643
|
-
if (
|
|
644
|
-
|
|
639
|
+
let inputVal = transformed.data[inputKey];
|
|
640
|
+
if (inputVal === undefined) {
|
|
641
|
+
inputVal = transformed.data[storageKey];
|
|
645
642
|
}
|
|
646
|
-
if (
|
|
643
|
+
if (inputVal === undefined) {
|
|
647
644
|
continue;
|
|
648
645
|
}
|
|
646
|
+
let dbVal = inputVal;
|
|
649
647
|
if (field.format) {
|
|
650
|
-
|
|
648
|
+
dbVal = field.format(inputVal, true);
|
|
651
649
|
}
|
|
652
|
-
data[this.getStorageKey(k)] =
|
|
650
|
+
data[this.getStorageKey(k)] = dbVal;
|
|
653
651
|
if (!field.immutable) {
|
|
654
|
-
this.defaultFieldsByTSName[this.getInputKey(k)] =
|
|
652
|
+
this.defaultFieldsByTSName[this.getInputKey(k)] = inputVal;
|
|
655
653
|
}
|
|
656
654
|
// hmm do we need this?
|
|
657
655
|
// TODO how to do this for local tests?
|
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.TopologicalGraph = void 0;
|
|
4
|
+
class TopologicalGraph {
|
|
5
|
+
constructor() {
|
|
6
|
+
this.nodes = new Set();
|
|
7
|
+
this.edges = new Map();
|
|
8
|
+
}
|
|
9
|
+
addNode(node) {
|
|
10
|
+
this.nodes.add(node);
|
|
11
|
+
if (!this.edges.has(node)) {
|
|
12
|
+
this.edges.set(node, new Set());
|
|
13
|
+
}
|
|
14
|
+
}
|
|
15
|
+
addEdge(from, to) {
|
|
16
|
+
this.addNode(from);
|
|
17
|
+
this.addNode(to);
|
|
18
|
+
this.edges.get(from).add(to);
|
|
19
|
+
}
|
|
20
|
+
topologicalSort() {
|
|
21
|
+
const ordered = [];
|
|
22
|
+
const seen = new Map();
|
|
23
|
+
const visit = (node) => {
|
|
24
|
+
const state = seen.get(node);
|
|
25
|
+
if (state === 1) {
|
|
26
|
+
throw new Error("Cycle found");
|
|
27
|
+
}
|
|
28
|
+
if (state === 2) {
|
|
29
|
+
return;
|
|
30
|
+
}
|
|
31
|
+
seen.set(node, 1);
|
|
32
|
+
for (const target of this.edges.get(node) || []) {
|
|
33
|
+
visit(target);
|
|
34
|
+
}
|
|
35
|
+
seen.set(node, 2);
|
|
36
|
+
ordered.push(node);
|
|
37
|
+
};
|
|
38
|
+
for (const node of this.nodes) {
|
|
39
|
+
if (seen.get(node) !== 2) {
|
|
40
|
+
visit(node);
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
return ordered.reverse();
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
exports.TopologicalGraph = TopologicalGraph;
|
package/core/base.d.ts
CHANGED
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
import * as clause from "./clause";
|
|
2
2
|
import { ObjectLoaderFactory } from "./loaders";
|
|
3
3
|
import { OrderBy } from "./query_impl";
|
|
4
|
+
import { QueryExpression } from "./query_expression";
|
|
4
5
|
export interface Loader<K, V> {
|
|
5
6
|
context?: Context;
|
|
6
7
|
load(key: K): Promise<V>;
|
|
@@ -61,16 +62,22 @@ export interface EntConstructor<TEnt extends Ent, TViewer extends Viewer = Viewe
|
|
|
61
62
|
new (viewer: TViewer, data: Data): TEnt;
|
|
62
63
|
}
|
|
63
64
|
export type ID = string | number;
|
|
65
|
+
export interface SelectColumnField {
|
|
66
|
+
alias: string;
|
|
67
|
+
column: string;
|
|
68
|
+
}
|
|
69
|
+
export interface SelectExpressionField {
|
|
70
|
+
alias: string;
|
|
71
|
+
expression: QueryExpression;
|
|
72
|
+
}
|
|
73
|
+
export type SelectField = string | SelectColumnField | SelectExpressionField;
|
|
64
74
|
export interface DataOptions {
|
|
65
75
|
tableName: string;
|
|
66
76
|
alias?: string;
|
|
67
77
|
context?: Context;
|
|
68
78
|
}
|
|
69
79
|
export interface SelectBaseDataOptions extends DataOptions {
|
|
70
|
-
fields:
|
|
71
|
-
alias: string;
|
|
72
|
-
column: string;
|
|
73
|
-
})[];
|
|
80
|
+
fields: SelectField[];
|
|
74
81
|
fieldsAlias?: string;
|
|
75
82
|
disableFieldsAlias?: boolean;
|
|
76
83
|
disableDefaultOrderByAlias?: boolean;
|
package/core/clause.d.ts
CHANGED
|
@@ -1,10 +1,7 @@
|
|
|
1
1
|
import { Data, ID, SelectDataOptions } from "./base";
|
|
2
|
-
|
|
3
|
-
|
|
2
|
+
import { QueryExpression } from "./query_expression";
|
|
3
|
+
export interface Clause<T extends Data = Data, K = keyof T> extends QueryExpression {
|
|
4
4
|
columns(): K[];
|
|
5
|
-
values(): any[];
|
|
6
|
-
instanceKey(): string;
|
|
7
|
-
logValues(): any[];
|
|
8
5
|
compositeOp?: string;
|
|
9
6
|
}
|
|
10
7
|
export interface SensitiveValue {
|
|
@@ -180,4 +177,5 @@ export declare function Modulo<T extends Data, K = keyof T>(col: K, value: any,
|
|
|
180
177
|
export declare function getCombinedClause<V extends Data = Data, K = keyof V>(options: Pick<SelectDataOptions, "clause">, cls: Clause<V, K>, checkIntersection?: boolean): Clause<V, K>;
|
|
181
178
|
export declare function getCombinedClause<V extends Data = Data, K = keyof V>(options: Pick<SelectDataOptions, "clause">, cls: Clause<V, K> | undefined, checkIntersection?: boolean): Clause<V, K> | undefined;
|
|
182
179
|
export declare function Expression<T extends Data, K = keyof T>(expression: string): Clause<T, K>;
|
|
180
|
+
export declare function ParameterizedExpression(key: string, expression: (idx: number, alias?: string) => string, values: any[], logValues?: any[]): QueryExpression;
|
|
183
181
|
export {};
|
package/core/clause.js
CHANGED
|
@@ -92,6 +92,7 @@ exports.Divide = Divide;
|
|
|
92
92
|
exports.Modulo = Modulo;
|
|
93
93
|
exports.getCombinedClause = getCombinedClause;
|
|
94
94
|
exports.Expression = Expression;
|
|
95
|
+
exports.ParameterizedExpression = ParameterizedExpression;
|
|
95
96
|
const db_1 = __importStar(require("./db"));
|
|
96
97
|
const query_impl_1 = require("./query_impl");
|
|
97
98
|
function isSensitive(val) {
|
|
@@ -256,6 +257,34 @@ class simpleExpression {
|
|
|
256
257
|
return `${this.expression}`;
|
|
257
258
|
}
|
|
258
259
|
}
|
|
260
|
+
class parameterizedExpression {
|
|
261
|
+
constructor(key, expression, params, logParams) {
|
|
262
|
+
this.key = key;
|
|
263
|
+
this.expression = expression;
|
|
264
|
+
this.params = params;
|
|
265
|
+
this.logParams = logParams;
|
|
266
|
+
}
|
|
267
|
+
clause(idx, alias) {
|
|
268
|
+
return this.expression(idx, alias);
|
|
269
|
+
}
|
|
270
|
+
values() {
|
|
271
|
+
return this.params.map((value) => rawValue(value));
|
|
272
|
+
}
|
|
273
|
+
logValues() {
|
|
274
|
+
if (this.logParams) {
|
|
275
|
+
return this.logParams;
|
|
276
|
+
}
|
|
277
|
+
return this.params.map((value) => {
|
|
278
|
+
if (isSensitive(value)) {
|
|
279
|
+
return value.logValue();
|
|
280
|
+
}
|
|
281
|
+
return value;
|
|
282
|
+
});
|
|
283
|
+
}
|
|
284
|
+
instanceKey() {
|
|
285
|
+
return this.key;
|
|
286
|
+
}
|
|
287
|
+
}
|
|
259
288
|
class arraySimpleClause {
|
|
260
289
|
constructor(col, value, op, overrideAlias) {
|
|
261
290
|
this.col = col;
|
|
@@ -1107,3 +1136,6 @@ function getCombinedClause(options, cls, checkIntersection = false) {
|
|
|
1107
1136
|
function Expression(expression) {
|
|
1108
1137
|
return new simpleExpression(expression);
|
|
1109
1138
|
}
|
|
1139
|
+
function ParameterizedExpression(key, expression, values, logValues) {
|
|
1140
|
+
return new parameterizedExpression(key, expression, values, logValues);
|
|
1141
|
+
}
|
package/core/config.d.ts
CHANGED
|
@@ -12,6 +12,14 @@ declare enum fieldPrivacyEvaluated {
|
|
|
12
12
|
AT_ENT_LOAD = "at_ent_load",
|
|
13
13
|
ON_DEMAND = "on_demand"
|
|
14
14
|
}
|
|
15
|
+
export interface RuntimeDBExtension {
|
|
16
|
+
name: string;
|
|
17
|
+
provisionedBy?: "ent" | "external";
|
|
18
|
+
version?: string;
|
|
19
|
+
installSchema?: string;
|
|
20
|
+
runtimeSchemas?: string[];
|
|
21
|
+
dropCascade?: boolean;
|
|
22
|
+
}
|
|
15
23
|
export interface Config {
|
|
16
24
|
dbConnectionString?: string;
|
|
17
25
|
dbFile?: string;
|
|
@@ -22,18 +30,34 @@ export interface Config {
|
|
|
22
30
|
loaderMaxBatchSize?: number;
|
|
23
31
|
clauseLoaderConcurrency?: number;
|
|
24
32
|
entLoaderPrivacyConcurrencyLimit?: number;
|
|
33
|
+
devSchema?: RuntimeDevSchemaConfig;
|
|
34
|
+
extensions?: RuntimeDBExtension[];
|
|
25
35
|
}
|
|
26
|
-
export interface ConfigWithCodegen extends Config {
|
|
36
|
+
export interface ConfigWithCodegen extends Omit<Config, "devSchema"> {
|
|
27
37
|
codegen?: CodegenConfig;
|
|
28
38
|
databaseMigration?: DatabaseMigrationConfig;
|
|
29
39
|
customGraphQLJSONPath?: string;
|
|
30
40
|
dynamicScriptCustomGraphQLJSONPath?: string;
|
|
31
41
|
globalSchemaPath?: string;
|
|
42
|
+
devSchema?: DevSchemaConfig;
|
|
32
43
|
}
|
|
33
44
|
interface DatabaseMigrationConfig {
|
|
34
45
|
custom_sql_include?: string[];
|
|
35
46
|
custom_sql_exclude?: string[];
|
|
36
47
|
}
|
|
48
|
+
export interface DevSchemaPruneConfig {
|
|
49
|
+
enabled?: boolean;
|
|
50
|
+
days?: number;
|
|
51
|
+
}
|
|
52
|
+
export interface RuntimeDevSchemaConfig {
|
|
53
|
+
enabled?: boolean;
|
|
54
|
+
schemaName?: string;
|
|
55
|
+
includePublic?: boolean;
|
|
56
|
+
ignoreBranches?: string[];
|
|
57
|
+
}
|
|
58
|
+
export interface DevSchemaConfig extends RuntimeDevSchemaConfig {
|
|
59
|
+
prune?: DevSchemaPruneConfig;
|
|
60
|
+
}
|
|
37
61
|
interface CodegenConfig {
|
|
38
62
|
defaultEntPolicy?: PrivacyConfig;
|
|
39
63
|
defaultActionPolicy?: PrivacyConfig;
|
|
@@ -70,5 +94,5 @@ interface importedObject {
|
|
|
70
94
|
name: string;
|
|
71
95
|
alias?: string;
|
|
72
96
|
}
|
|
73
|
-
export declare function loadConfig(file?: string | Buffer | Config): void;
|
|
97
|
+
export declare function loadConfig(file?: string | Buffer | Config | ConfigWithCodegen): void;
|
|
74
98
|
export {};
|
package/core/config.js
CHANGED
|
@@ -66,11 +66,17 @@ function setConfig(cfg) {
|
|
|
66
66
|
if (cfg.log) {
|
|
67
67
|
(0, logger_1.setLogLevels)(cfg.log);
|
|
68
68
|
}
|
|
69
|
-
if (cfg.dbConnectionString ||
|
|
69
|
+
if (cfg.dbConnectionString ||
|
|
70
|
+
cfg.dbFile ||
|
|
71
|
+
cfg.db ||
|
|
72
|
+
cfg.devSchema ||
|
|
73
|
+
cfg.extensions) {
|
|
70
74
|
db_1.default.initDB({
|
|
71
75
|
connectionString: cfg.dbConnectionString,
|
|
72
76
|
dbFile: cfg.dbFile,
|
|
73
77
|
db: cfg.db,
|
|
78
|
+
devSchema: cfg.devSchema,
|
|
79
|
+
extensions: cfg.extensions,
|
|
74
80
|
});
|
|
75
81
|
}
|
|
76
82
|
(0, ent_1.___setLogQueryErrorWithError)(cfg.logQueryWithError);
|
package/core/context.js
CHANGED
|
@@ -7,6 +7,7 @@ exports.getContextCacheKey = getContextCacheKey;
|
|
|
7
7
|
const logger_1 = require("./logger");
|
|
8
8
|
const cache_utils_1 = require("./cache_utils");
|
|
9
9
|
const metrics_1 = require("./metrics");
|
|
10
|
+
const query_impl_1 = require("./query_impl");
|
|
10
11
|
const DEFAULT_MAX_DISCARDED_LOADERS = 1000;
|
|
11
12
|
let maxDiscardedLoaders = DEFAULT_MAX_DISCARDED_LOADERS;
|
|
12
13
|
function getContextCacheMaxDiscardedLoaders() {
|
|
@@ -23,16 +24,8 @@ function setContextCacheMaxDiscardedLoaders(size) {
|
|
|
23
24
|
maxDiscardedLoaders = Math.floor(size);
|
|
24
25
|
}
|
|
25
26
|
function getContextCacheKey(options) {
|
|
26
|
-
const fields = options.fields
|
|
27
|
-
.map((f) => {
|
|
28
|
-
if (typeof f === "object") {
|
|
29
|
-
return `${f.alias}.${f.column}`;
|
|
30
|
-
}
|
|
31
|
-
return f;
|
|
32
|
-
})
|
|
33
|
-
.join(",");
|
|
34
27
|
const parts = [
|
|
35
|
-
`fields:${fields}`,
|
|
28
|
+
`fields:${(0, query_impl_1.getSelectFieldsKey)(options.fields)}`,
|
|
36
29
|
`clause:${options.clause.instanceKey()}`,
|
|
37
30
|
];
|
|
38
31
|
if (options.distinct !== undefined) {
|
|
@@ -54,7 +47,7 @@ function getContextCacheKey(options) {
|
|
|
54
47
|
parts.push(`groupby:${options.groupby}`);
|
|
55
48
|
}
|
|
56
49
|
if (options.orderby) {
|
|
57
|
-
parts.push(`orderby:${(0,
|
|
50
|
+
parts.push(`orderby:${(0, query_impl_1.getOrderByKey)(options.orderby)}`);
|
|
58
51
|
}
|
|
59
52
|
if (options.join) {
|
|
60
53
|
const joinKey = options.join.map((join) => ({
|
|
@@ -124,7 +117,7 @@ class ContextCache {
|
|
|
124
117
|
}
|
|
125
118
|
(0, logger_1.log)("cache", {
|
|
126
119
|
"cache-hit": key,
|
|
127
|
-
|
|
120
|
+
tableName: options.tableName,
|
|
128
121
|
});
|
|
129
122
|
}
|
|
130
123
|
return rows || null;
|
|
@@ -146,7 +139,7 @@ class ContextCache {
|
|
|
146
139
|
}
|
|
147
140
|
(0, logger_1.log)("cache", {
|
|
148
141
|
"cache-hit": key,
|
|
149
|
-
|
|
142
|
+
tableName: options.tableName,
|
|
150
143
|
});
|
|
151
144
|
}
|
|
152
145
|
return row || null;
|
package/core/db.d.ts
CHANGED
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
import { Pool, PoolClient, PoolConfig } from "pg";
|
|
2
|
+
import type { RuntimeDBExtension, RuntimeDevSchemaConfig } from "./config";
|
|
2
3
|
export interface Database extends PoolConfig {
|
|
3
4
|
database?: string;
|
|
4
5
|
user?: string;
|
|
@@ -17,12 +18,16 @@ interface DatabaseInfo {
|
|
|
17
18
|
dialect: Dialect;
|
|
18
19
|
config: PoolConfig;
|
|
19
20
|
filePath?: string;
|
|
21
|
+
devSchema?: RuntimeDevSchemaConfig;
|
|
22
|
+
extensions?: RuntimeDBExtension[];
|
|
20
23
|
}
|
|
21
24
|
interface clientConfigArgs {
|
|
22
25
|
connectionString?: string;
|
|
23
26
|
dbFile?: string;
|
|
24
27
|
db?: Database | DBDict;
|
|
25
28
|
cfg?: PoolConfig;
|
|
29
|
+
devSchema?: RuntimeDevSchemaConfig;
|
|
30
|
+
extensions?: RuntimeDBExtension[];
|
|
26
31
|
}
|
|
27
32
|
export default class DB {
|
|
28
33
|
db: DatabaseInfo;
|
|
@@ -112,7 +117,10 @@ export declare class Sqlite implements Connection, SyncClient {
|
|
|
112
117
|
}
|
|
113
118
|
export declare class Postgres implements Connection {
|
|
114
119
|
private pool;
|
|
115
|
-
|
|
120
|
+
private ready?;
|
|
121
|
+
private closePromise?;
|
|
122
|
+
constructor(pool: Pool, ready?: Promise<void> | undefined);
|
|
123
|
+
private ensureReady;
|
|
116
124
|
self(): this;
|
|
117
125
|
newClient(): Promise<PostgresClient>;
|
|
118
126
|
query(query: string, values?: any[]): Promise<QueryResult<QueryResultRow>>;
|
|
@@ -122,7 +130,9 @@ export declare class Postgres implements Connection {
|
|
|
122
130
|
}
|
|
123
131
|
export declare class PostgresClient implements Client {
|
|
124
132
|
private client;
|
|
125
|
-
|
|
133
|
+
private ready?;
|
|
134
|
+
constructor(client: PoolClient, ready?: Promise<void> | undefined);
|
|
135
|
+
private ensureReady;
|
|
126
136
|
query(query: string, values?: any[]): Promise<QueryResult<QueryResultRow>>;
|
|
127
137
|
queryAll(query: string, values?: any[]): Promise<QueryResult<QueryResultRow>>;
|
|
128
138
|
exec(query: string, values?: any[]): Promise<ExecResult>;
|
package/core/db.js
CHANGED
|
@@ -39,6 +39,8 @@ const js_yaml_1 = require("js-yaml");
|
|
|
39
39
|
const luxon_1 = require("luxon");
|
|
40
40
|
const pg_1 = __importStar(require("pg"));
|
|
41
41
|
const logger_1 = require("./logger");
|
|
42
|
+
const dev_schema_1 = require("./dev_schema");
|
|
43
|
+
const extensions_1 = require("./extensions");
|
|
42
44
|
function isDbDict(v) {
|
|
43
45
|
return (v["production"] !== undefined ||
|
|
44
46
|
v["development"] !== undefined ||
|
|
@@ -76,15 +78,22 @@ function parseConnectionString(str, args) {
|
|
|
76
78
|
// database file in yml file
|
|
77
79
|
// database/config.yml
|
|
78
80
|
function getClientConfig(args) {
|
|
81
|
+
const extensions = (0, extensions_1.resolveExtensions)(args?.extensions);
|
|
79
82
|
// if there's a db connection string, use that first
|
|
80
83
|
const str = process.env.DB_CONNECTION_STRING;
|
|
81
84
|
if (str) {
|
|
82
|
-
|
|
85
|
+
const info = parseConnectionString(str, args);
|
|
86
|
+
info.devSchema = args?.devSchema;
|
|
87
|
+
info.extensions = extensions;
|
|
88
|
+
return info;
|
|
83
89
|
}
|
|
84
90
|
let file = "config/database.yml";
|
|
85
91
|
if (args) {
|
|
86
92
|
if (args.connectionString) {
|
|
87
|
-
|
|
93
|
+
const info = parseConnectionString(args.connectionString, args);
|
|
94
|
+
info.devSchema = args?.devSchema;
|
|
95
|
+
info.extensions = extensions;
|
|
96
|
+
return info;
|
|
88
97
|
}
|
|
89
98
|
if (args.db) {
|
|
90
99
|
let db;
|
|
@@ -100,6 +109,8 @@ function getClientConfig(args) {
|
|
|
100
109
|
return {
|
|
101
110
|
dialect: Dialect.Postgres,
|
|
102
111
|
config: db,
|
|
112
|
+
devSchema: args?.devSchema,
|
|
113
|
+
extensions,
|
|
103
114
|
};
|
|
104
115
|
}
|
|
105
116
|
if (args.dbFile) {
|
|
@@ -128,6 +139,8 @@ function getClientConfig(args) {
|
|
|
128
139
|
// max, min, etc
|
|
129
140
|
...cfg,
|
|
130
141
|
},
|
|
142
|
+
devSchema: args?.devSchema,
|
|
143
|
+
extensions,
|
|
131
144
|
};
|
|
132
145
|
}
|
|
133
146
|
throw new Error(`invalid yaml configuration in file`);
|
|
@@ -140,9 +153,39 @@ function getClientConfig(args) {
|
|
|
140
153
|
class DB {
|
|
141
154
|
constructor(db) {
|
|
142
155
|
this.db = db;
|
|
156
|
+
const devSchemaEnabled = (0, dev_schema_1.isDevSchemaEnabled)(db.devSchema);
|
|
157
|
+
if (devSchemaEnabled && db.dialect === Dialect.SQLite) {
|
|
158
|
+
throw new Error("dev branch schemas are only supported for postgres");
|
|
159
|
+
}
|
|
160
|
+
const resolvedDevSchema = devSchemaEnabled
|
|
161
|
+
? (0, dev_schema_1.resolveDevSchema)(db.devSchema)
|
|
162
|
+
: { enabled: false };
|
|
163
|
+
const extensions = db.extensions || [];
|
|
143
164
|
if (db.dialect === Dialect.Postgres) {
|
|
165
|
+
const searchPath = (0, extensions_1.buildExtensionSearchPath)(resolvedDevSchema, extensions);
|
|
166
|
+
if (searchPath) {
|
|
167
|
+
const option = `-c search_path=${searchPath}`;
|
|
168
|
+
db.config = {
|
|
169
|
+
...db.config,
|
|
170
|
+
options: db.config.options
|
|
171
|
+
? `${db.config.options} ${option}`
|
|
172
|
+
: option,
|
|
173
|
+
};
|
|
174
|
+
}
|
|
144
175
|
this.pool = new pg_1.Pool(db.config);
|
|
145
|
-
|
|
176
|
+
const schemaName = resolvedDevSchema.schemaName;
|
|
177
|
+
const readyTasks = [];
|
|
178
|
+
if (resolvedDevSchema.enabled && schemaName) {
|
|
179
|
+
readyTasks.push(validateDevSchema(this.pool, schemaName).then(() => touchDevSchemaRegistry(this.pool, schemaName, resolvedDevSchema.branchName).catch(() => { })));
|
|
180
|
+
}
|
|
181
|
+
readyTasks.push((0, extensions_1.initializeExtensions)(this.pool, extensions));
|
|
182
|
+
const ready = readyTasks.length > 0
|
|
183
|
+
? Promise.all(readyTasks).then(() => undefined)
|
|
184
|
+
: undefined;
|
|
185
|
+
if (ready) {
|
|
186
|
+
ready.catch(() => { });
|
|
187
|
+
}
|
|
188
|
+
this.q = new Postgres(this.pool, ready);
|
|
146
189
|
this.pool.on("error", (err, client) => {
|
|
147
190
|
(0, logger_1.log)("error", err);
|
|
148
191
|
});
|
|
@@ -203,6 +246,10 @@ class DB {
|
|
|
203
246
|
static initDB(args) {
|
|
204
247
|
const config = getClientConfig(args);
|
|
205
248
|
if (config) {
|
|
249
|
+
const existing = DB.instance;
|
|
250
|
+
if (existing) {
|
|
251
|
+
void existing.endPool().catch(() => { });
|
|
252
|
+
}
|
|
206
253
|
DB.instance = new DB(config);
|
|
207
254
|
DB.dialect = DB.instance.db.dialect;
|
|
208
255
|
}
|
|
@@ -320,29 +367,39 @@ class Sqlite {
|
|
|
320
367
|
}
|
|
321
368
|
exports.Sqlite = Sqlite;
|
|
322
369
|
class Postgres {
|
|
323
|
-
constructor(pool) {
|
|
370
|
+
constructor(pool, ready) {
|
|
324
371
|
this.pool = pool;
|
|
372
|
+
this.ready = ready;
|
|
373
|
+
}
|
|
374
|
+
async ensureReady() {
|
|
375
|
+
if (this.ready) {
|
|
376
|
+
await this.ready;
|
|
377
|
+
}
|
|
325
378
|
}
|
|
326
379
|
self() {
|
|
327
380
|
return this;
|
|
328
381
|
}
|
|
329
382
|
// returns new Pool client
|
|
330
383
|
async newClient() {
|
|
384
|
+
await this.ensureReady();
|
|
331
385
|
const client = await this.pool.connect();
|
|
332
386
|
if (!client) {
|
|
333
387
|
throw new Error(`couldn't get new client`);
|
|
334
388
|
}
|
|
335
|
-
return new PostgresClient(client);
|
|
389
|
+
return new PostgresClient(client, this.ready);
|
|
336
390
|
}
|
|
337
391
|
async query(query, values) {
|
|
392
|
+
await this.ensureReady();
|
|
338
393
|
const r = await this.pool.query(query, values);
|
|
339
394
|
return r;
|
|
340
395
|
}
|
|
341
396
|
async queryAll(query, values) {
|
|
397
|
+
await this.ensureReady();
|
|
342
398
|
const r = await this.pool.query(query, values);
|
|
343
399
|
return r;
|
|
344
400
|
}
|
|
345
401
|
async exec(query, values) {
|
|
402
|
+
await this.ensureReady();
|
|
346
403
|
const r = await this.pool.query(query, values);
|
|
347
404
|
return {
|
|
348
405
|
rowCount: r?.rowCount || 0,
|
|
@@ -350,23 +407,35 @@ class Postgres {
|
|
|
350
407
|
};
|
|
351
408
|
}
|
|
352
409
|
async close() {
|
|
353
|
-
|
|
410
|
+
if (!this.closePromise) {
|
|
411
|
+
this.closePromise = this.pool.end();
|
|
412
|
+
}
|
|
413
|
+
return this.closePromise;
|
|
354
414
|
}
|
|
355
415
|
}
|
|
356
416
|
exports.Postgres = Postgres;
|
|
357
417
|
class PostgresClient {
|
|
358
|
-
constructor(client) {
|
|
418
|
+
constructor(client, ready) {
|
|
359
419
|
this.client = client;
|
|
420
|
+
this.ready = ready;
|
|
421
|
+
}
|
|
422
|
+
async ensureReady() {
|
|
423
|
+
if (this.ready) {
|
|
424
|
+
await this.ready;
|
|
425
|
+
}
|
|
360
426
|
}
|
|
361
427
|
async query(query, values) {
|
|
428
|
+
await this.ensureReady();
|
|
362
429
|
const r = await this.client.query(query, values);
|
|
363
430
|
return r;
|
|
364
431
|
}
|
|
365
432
|
async queryAll(query, values) {
|
|
433
|
+
await this.ensureReady();
|
|
366
434
|
const r = await this.client.query(query, values);
|
|
367
435
|
return r;
|
|
368
436
|
}
|
|
369
437
|
async exec(query, values) {
|
|
438
|
+
await this.ensureReady();
|
|
370
439
|
const r = await this.client.query(query, values);
|
|
371
440
|
return {
|
|
372
441
|
rowCount: r?.rowCount || 0,
|
|
@@ -378,3 +447,29 @@ class PostgresClient {
|
|
|
378
447
|
}
|
|
379
448
|
}
|
|
380
449
|
exports.PostgresClient = PostgresClient;
|
|
450
|
+
async function validateDevSchema(pool, schemaName) {
|
|
451
|
+
const res = await pool.query("SELECT EXISTS (SELECT 1 FROM pg_namespace WHERE nspname = $1) AS ok", [schemaName]);
|
|
452
|
+
if (!res.rows?.[0]?.ok) {
|
|
453
|
+
throw new Error(`dev branch schema \"${schemaName}\" does not exist. Run auto_schema or migrations to create it.`);
|
|
454
|
+
}
|
|
455
|
+
}
|
|
456
|
+
async function touchDevSchemaRegistry(pool, schemaName, branchName) {
|
|
457
|
+
const branch = branchName ?? null;
|
|
458
|
+
try {
|
|
459
|
+
// Avoid DDL at runtime; registry table should be created by auto_schema/prune.
|
|
460
|
+
await pool.query(`
|
|
461
|
+
INSERT INTO public.ent_dev_schema_registry (schema_name, branch_name, created_at, last_used_at)
|
|
462
|
+
VALUES ($1, $2, now(), now())
|
|
463
|
+
ON CONFLICT (schema_name)
|
|
464
|
+
DO UPDATE SET last_used_at = now(), branch_name = EXCLUDED.branch_name
|
|
465
|
+
`, [schemaName, branch]);
|
|
466
|
+
}
|
|
467
|
+
catch (err) {
|
|
468
|
+
if (err &&
|
|
469
|
+
typeof err.message === "string" &&
|
|
470
|
+
err.message.includes("ent_dev_schema_registry")) {
|
|
471
|
+
return;
|
|
472
|
+
}
|
|
473
|
+
(0, logger_1.log)("debug", err);
|
|
474
|
+
}
|
|
475
|
+
}
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
import type { RuntimeDevSchemaConfig } from "./config";
|
|
2
|
+
export interface ResolvedDevSchema {
|
|
3
|
+
enabled: boolean;
|
|
4
|
+
schemaName?: string;
|
|
5
|
+
branchName?: string;
|
|
6
|
+
includePublic?: boolean;
|
|
7
|
+
}
|
|
8
|
+
export declare function resolveDevSchema(cfg?: RuntimeDevSchemaConfig): ResolvedDevSchema;
|
|
9
|
+
export declare function isDevSchemaEnabled(cfg?: RuntimeDevSchemaConfig): boolean;
|