@malloydata/malloy 0.0.304 → 0.0.305
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/dialect/dialect.d.ts +1 -1
- package/dist/dialect/duckdb/duckdb.d.ts +1 -1
- package/dist/dialect/duckdb/duckdb.js +2 -6
- package/dist/dialect/mysql/mysql.d.ts +1 -1
- package/dist/dialect/mysql/mysql.js +2 -6
- package/dist/dialect/postgres/postgres.d.ts +1 -1
- package/dist/dialect/postgres/postgres.js +2 -6
- package/dist/dialect/snowflake/snowflake.d.ts +1 -1
- package/dist/dialect/snowflake/snowflake.js +2 -5
- package/dist/dialect/standardsql/standardsql.d.ts +1 -1
- package/dist/dialect/standardsql/standardsql.js +2 -6
- package/dist/dialect/trino/trino.d.ts +1 -1
- package/dist/dialect/trino/trino.js +2 -6
- package/dist/index.d.ts +1 -1
- package/dist/index.js +2 -3
- package/dist/lang/ast/expressions/expr-aggregate-function.js +12 -2
- package/dist/lang/ast/expressions/expr-count.js +3 -1
- package/dist/lang/ast/expressions/expr-func.js +34 -10
- package/dist/lang/ast/expressions/expr-props.js +1 -1
- package/dist/lang/ast/expressions/expr-ungroup.js +7 -3
- package/dist/lang/ast/expressions/function-ordering.d.ts +19 -5
- package/dist/lang/ast/expressions/function-ordering.js +61 -9
- package/dist/lang/ast/field-space/include-utils.js +1 -1
- package/dist/lang/ast/field-space/index-field-space.js +3 -1
- package/dist/lang/ast/field-space/query-spaces.js +20 -11
- package/dist/lang/ast/query-builders/index-builder.js +1 -1
- package/dist/lang/ast/query-builders/reduce-builder.js +1 -1
- package/dist/lang/ast/query-elements/query-arrow.js +14 -4
- package/dist/lang/ast/query-elements/query-base.d.ts +1 -0
- package/dist/lang/ast/query-elements/query-base.js +14 -4
- package/dist/lang/ast/query-elements/query-refine.js +2 -0
- package/dist/lang/ast/query-properties/drill.js +1 -1
- package/dist/lang/ast/source-properties/join.js +6 -2
- package/dist/lang/ast/statements/define-source.js +1 -1
- package/dist/lang/ast/types/expr-value.js +1 -1
- package/dist/lang/ast/view-elements/reference-view.js +4 -1
- package/dist/lang/ast/view-elements/refine-utils.js +1 -1
- package/dist/{model/composite_source_utils.d.ts → lang/composite-source-utils.d.ts} +4 -17
- package/dist/{model/composite_source_utils.js → lang/composite-source-utils.js} +274 -44
- package/dist/lang/test/parse-expects.d.ts +1 -1
- package/dist/lang/test/parse-expects.js +6 -2
- package/dist/lang/test/test-translator.js +1 -1
- package/dist/malloy.js +1 -1
- package/dist/model/expression_compiler.d.ts +27 -0
- package/dist/model/expression_compiler.js +780 -0
- package/dist/model/field_instance.d.ts +108 -0
- package/dist/model/field_instance.js +520 -0
- package/dist/model/index.d.ts +5 -1
- package/dist/model/index.js +25 -4
- package/dist/model/join_instance.d.ts +18 -0
- package/dist/model/join_instance.js +71 -0
- package/dist/model/malloy_types.d.ts +48 -2
- package/dist/model/malloy_types.js +39 -1
- package/dist/model/query_model.d.ts +2 -0
- package/dist/model/query_model.js +7 -0
- package/dist/model/query_model_contract.d.ts +32 -0
- package/dist/model/query_model_contract.js +7 -0
- package/dist/model/query_model_impl.d.ts +30 -0
- package/dist/model/query_model_impl.js +266 -0
- package/dist/model/query_node.d.ts +132 -0
- package/dist/model/query_node.js +638 -0
- package/dist/model/query_query.d.ts +86 -0
- package/dist/model/query_query.js +1724 -0
- package/dist/model/sql_block.js +2 -2
- package/dist/model/stage_writer.d.ts +25 -0
- package/dist/model/stage_writer.js +120 -0
- package/dist/model/utils.d.ts +18 -1
- package/dist/model/utils.js +66 -1
- package/dist/to_stable.js +3 -4
- package/dist/version.d.ts +1 -1
- package/dist/version.js +1 -1
- package/package.json +4 -4
- package/dist/model/malloy_query.d.ts +0 -391
- package/dist/model/malloy_query.js +0 -3926
|
@@ -1,3926 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
/*
|
|
3
|
-
* Copyright 2023 Google LLC
|
|
4
|
-
*
|
|
5
|
-
* Permission is hereby granted, free of charge, to any person obtaining
|
|
6
|
-
* a copy of this software and associated documentation files
|
|
7
|
-
* (the "Software"), to deal in the Software without restriction,
|
|
8
|
-
* including without limitation the rights to use, copy, modify, merge,
|
|
9
|
-
* publish, distribute, sublicense, and/or sell copies of the Software,
|
|
10
|
-
* and to permit persons to whom the Software is furnished to do so,
|
|
11
|
-
* subject to the following conditions:
|
|
12
|
-
*
|
|
13
|
-
* The above copyright notice and this permission notice shall be
|
|
14
|
-
* included in all copies or substantial portions of the Software.
|
|
15
|
-
*
|
|
16
|
-
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
|
17
|
-
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
|
18
|
-
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
|
19
|
-
* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
|
20
|
-
* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
|
|
21
|
-
* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
|
|
22
|
-
* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
|
23
|
-
*/
|
|
24
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
25
|
-
exports.QueryModel = exports.Segment = void 0;
|
|
26
|
-
exports.getResultStructDefForView = getResultStructDefForView;
|
|
27
|
-
exports.getResultStructDefForQuery = getResultStructDefForQuery;
|
|
28
|
-
const uuid_1 = require("uuid");
|
|
29
|
-
const dialect_1 = require("../dialect");
|
|
30
|
-
const standardsql_1 = require("../dialect/standardsql/standardsql");
|
|
31
|
-
const malloy_types_1 = require("./malloy_types");
|
|
32
|
-
const utils_1 = require("./utils");
|
|
33
|
-
const utils_2 = require("./materialization/utils");
|
|
34
|
-
const annotation_1 = require("../annotation");
|
|
35
|
-
const filter_compilers_1 = require("./filter_compilers");
|
|
36
|
-
const malloy_filter_1 = require("@malloydata/malloy-filter");
|
|
37
|
-
function pathToCol(path) {
|
|
38
|
-
return path.map(el => encodeURIComponent(el)).join('/');
|
|
39
|
-
}
|
|
40
|
-
// quote a string for SQL use. Perhaps should be in dialect.
|
|
41
|
-
function generateSQLStringLiteral(sourceString) {
|
|
42
|
-
return `'${sourceString}'`;
|
|
43
|
-
}
|
|
44
|
-
function identifierNormalize(s) {
|
|
45
|
-
return s.replace(/[^a-zA-Z0-9_]/g, '_o_');
|
|
46
|
-
}
|
|
47
|
-
function getDialectFieldList(structDef) {
|
|
48
|
-
const dialectFieldList = [];
|
|
49
|
-
for (const f of structDef.fields.filter(malloy_types_1.fieldIsIntrinsic)) {
|
|
50
|
-
dialectFieldList.push({
|
|
51
|
-
typeDef: f,
|
|
52
|
-
sqlExpression: (0, malloy_types_1.getIdentifier)(f),
|
|
53
|
-
rawName: (0, malloy_types_1.getIdentifier)(f),
|
|
54
|
-
sqlOutputName: (0, malloy_types_1.getIdentifier)(f),
|
|
55
|
-
});
|
|
56
|
-
}
|
|
57
|
-
return dialectFieldList;
|
|
58
|
-
}
|
|
59
|
-
function pushDialectField(dl, f) {
|
|
60
|
-
const { sqlExpression, sqlOutputName, rawName } = f;
|
|
61
|
-
if ((0, malloy_types_1.isAtomic)(f.fieldDef)) {
|
|
62
|
-
dl.push({ typeDef: f.fieldDef, sqlExpression, sqlOutputName, rawName });
|
|
63
|
-
}
|
|
64
|
-
}
|
|
65
|
-
class UniqueKeyUse extends Set {
|
|
66
|
-
add_use(k) {
|
|
67
|
-
if (k !== undefined) {
|
|
68
|
-
return this.add(k);
|
|
69
|
-
}
|
|
70
|
-
}
|
|
71
|
-
hasAsymetricFunctions() {
|
|
72
|
-
return (this.has('sum') ||
|
|
73
|
-
this.has('avg') ||
|
|
74
|
-
this.has('count') ||
|
|
75
|
-
this.has('generic_asymmetric_aggregate'));
|
|
76
|
-
}
|
|
77
|
-
}
|
|
78
|
-
class StageWriter {
|
|
79
|
-
constructor(useCTE = true, parent) {
|
|
80
|
-
this.parent = parent;
|
|
81
|
-
this.withs = [];
|
|
82
|
-
this.udfs = [];
|
|
83
|
-
this.pdts = [];
|
|
84
|
-
this.dependenciesToMaterialize = {};
|
|
85
|
-
this.stagePrefix = '__stage';
|
|
86
|
-
this.useCTE = useCTE;
|
|
87
|
-
}
|
|
88
|
-
getName(id) {
|
|
89
|
-
return `${this.stagePrefix}${id}`;
|
|
90
|
-
}
|
|
91
|
-
root() {
|
|
92
|
-
if (this.parent === undefined) {
|
|
93
|
-
return this;
|
|
94
|
-
}
|
|
95
|
-
else {
|
|
96
|
-
return this.parent.root();
|
|
97
|
-
}
|
|
98
|
-
}
|
|
99
|
-
addStage(sql) {
|
|
100
|
-
if (this.useCTE) {
|
|
101
|
-
this.withs.push(sql);
|
|
102
|
-
return this.getName(this.withs.length - 1);
|
|
103
|
-
}
|
|
104
|
-
else {
|
|
105
|
-
this.withs[0] = sql;
|
|
106
|
-
return (0, utils_1.indent)(`\n(${sql})\n`);
|
|
107
|
-
}
|
|
108
|
-
}
|
|
109
|
-
addUDF(stageWriter, dialect, structDef) {
|
|
110
|
-
var _a;
|
|
111
|
-
// eslint-disable-next-line prefer-const
|
|
112
|
-
let { sql, lastStageName } = stageWriter.combineStages(true);
|
|
113
|
-
if (lastStageName === undefined) {
|
|
114
|
-
throw new Error('Internal Error: no stage to combine');
|
|
115
|
-
}
|
|
116
|
-
sql += dialect.sqlCreateFunctionCombineLastStage(lastStageName, getDialectFieldList(structDef), (_a = structDef.resultMetadata) === null || _a === void 0 ? void 0 : _a.orderBy);
|
|
117
|
-
const id = `${dialect.udfPrefix}${this.root().udfs.length}`;
|
|
118
|
-
sql = dialect.sqlCreateFunction(id, sql);
|
|
119
|
-
this.root().udfs.push(sql);
|
|
120
|
-
return id;
|
|
121
|
-
}
|
|
122
|
-
addMaterializedQuery(fieldName, query, materializatedTablePrefix) {
|
|
123
|
-
var _a;
|
|
124
|
-
const name = query.name;
|
|
125
|
-
if (!name) {
|
|
126
|
-
throw new Error(`Source ${fieldName} on a unnamed query that is tagged as materialize, only named queries can be materialized.`);
|
|
127
|
-
}
|
|
128
|
-
const path = (_a = query.location) === null || _a === void 0 ? void 0 : _a.url;
|
|
129
|
-
if (!path) {
|
|
130
|
-
throw new Error(`Trying to materialize query ${name}, but its path is not set.`);
|
|
131
|
-
}
|
|
132
|
-
// Creating an object that should uniquely identify a query within a Malloy model repo.
|
|
133
|
-
const queryMaterializationSpec = (0, utils_2.buildQueryMaterializationSpec)(path, name, materializatedTablePrefix);
|
|
134
|
-
this.root().dependenciesToMaterialize[queryMaterializationSpec.id] =
|
|
135
|
-
queryMaterializationSpec;
|
|
136
|
-
return queryMaterializationSpec.id;
|
|
137
|
-
}
|
|
138
|
-
addPDT(baseName, dialect) {
|
|
139
|
-
const sql = this.combineStages(false).sql + this.withs[this.withs.length - 1];
|
|
140
|
-
const name = baseName + (0, utils_1.generateHash)(sql);
|
|
141
|
-
const tableName = `scratch.${name}`;
|
|
142
|
-
this.root().pdts.push(dialect.sqlCreateTableAsSelect(tableName, sql));
|
|
143
|
-
return tableName;
|
|
144
|
-
}
|
|
145
|
-
// combine all the stages except the last one into a WITH statement
|
|
146
|
-
// return SQL and the last stage name
|
|
147
|
-
combineStages(includeLastStage) {
|
|
148
|
-
if (!this.useCTE) {
|
|
149
|
-
return { sql: this.withs[0], lastStageName: this.withs[0] };
|
|
150
|
-
}
|
|
151
|
-
let lastStageName = this.getName(0);
|
|
152
|
-
let prefix = 'WITH ';
|
|
153
|
-
let w = '';
|
|
154
|
-
for (let i = 0; i < this.withs.length - (includeLastStage ? 0 : 1); i++) {
|
|
155
|
-
const sql = this.withs[i];
|
|
156
|
-
lastStageName = this.getName(i);
|
|
157
|
-
if (sql === undefined) {
|
|
158
|
-
throw new Error(`Expected sql WITH to be present for stage ${lastStageName}.`);
|
|
159
|
-
}
|
|
160
|
-
w += `${prefix}${lastStageName} AS (\n${(0, utils_1.indent)(sql)})\n`;
|
|
161
|
-
prefix = ', ';
|
|
162
|
-
}
|
|
163
|
-
return { sql: w, lastStageName };
|
|
164
|
-
}
|
|
165
|
-
/** emit the SQL for all the stages. */
|
|
166
|
-
generateSQLStages() {
|
|
167
|
-
const lastStageNum = this.withs.length - 1;
|
|
168
|
-
if (lastStageNum < 0) {
|
|
169
|
-
throw new Error('No SQL generated');
|
|
170
|
-
}
|
|
171
|
-
const udfs = this.udfs.join('\n');
|
|
172
|
-
const pdts = this.pdts.join('\n');
|
|
173
|
-
const sql = this.combineStages(false).sql;
|
|
174
|
-
return udfs + pdts + sql + this.withs[lastStageNum];
|
|
175
|
-
}
|
|
176
|
-
generateCoorelatedSubQuery(dialect, structDef) {
|
|
177
|
-
var _a, _b;
|
|
178
|
-
if (!this.useCTE) {
|
|
179
|
-
return dialect.sqlCreateFunctionCombineLastStage(`(${this.withs[0]})`, getDialectFieldList(structDef), (_a = structDef.resultMetadata) === null || _a === void 0 ? void 0 : _a.orderBy);
|
|
180
|
-
}
|
|
181
|
-
else {
|
|
182
|
-
return (this.combineStages(true).sql +
|
|
183
|
-
dialect.sqlCreateFunctionCombineLastStage(this.getName(this.withs.length - 1), getDialectFieldList(structDef), (_b = structDef.resultMetadata) === null || _b === void 0 ? void 0 : _b.orderBy));
|
|
184
|
-
}
|
|
185
|
-
}
|
|
186
|
-
}
|
|
187
|
-
class GenerateState {
|
|
188
|
-
constructor() {
|
|
189
|
-
this.totalGroupSet = -1;
|
|
190
|
-
}
|
|
191
|
-
withWhere(s) {
|
|
192
|
-
const newState = new GenerateState();
|
|
193
|
-
newState.whereSQL = s;
|
|
194
|
-
newState.applyValue = this.applyValue;
|
|
195
|
-
newState.totalGroupSet = this.totalGroupSet;
|
|
196
|
-
return newState;
|
|
197
|
-
}
|
|
198
|
-
withApply(s) {
|
|
199
|
-
const newState = new GenerateState();
|
|
200
|
-
newState.whereSQL = this.whereSQL;
|
|
201
|
-
newState.applyValue = s;
|
|
202
|
-
newState.totalGroupSet = this.totalGroupSet;
|
|
203
|
-
return newState;
|
|
204
|
-
}
|
|
205
|
-
withTotal(groupSet) {
|
|
206
|
-
const newState = new GenerateState();
|
|
207
|
-
newState.whereSQL = this.whereSQL;
|
|
208
|
-
newState.applyValue = this.applyValue;
|
|
209
|
-
newState.totalGroupSet = groupSet;
|
|
210
|
-
return newState;
|
|
211
|
-
}
|
|
212
|
-
}
|
|
213
|
-
class QueryNode {
|
|
214
|
-
constructor(referenceId) {
|
|
215
|
-
this.referenceId = referenceId !== null && referenceId !== void 0 ? referenceId : (0, uuid_1.v4)();
|
|
216
|
-
}
|
|
217
|
-
getChildByName(_name) {
|
|
218
|
-
return undefined;
|
|
219
|
-
}
|
|
220
|
-
}
|
|
221
|
-
class QueryField extends QueryNode {
|
|
222
|
-
constructor(fieldDef, parent, referenceId) {
|
|
223
|
-
super(referenceId);
|
|
224
|
-
this.fieldDef = fieldDef;
|
|
225
|
-
this.parent = parent;
|
|
226
|
-
this.fieldDef = fieldDef;
|
|
227
|
-
}
|
|
228
|
-
getIdentifier() {
|
|
229
|
-
return (0, malloy_types_1.getIdentifier)(this.fieldDef);
|
|
230
|
-
}
|
|
231
|
-
uniqueKeyPossibleUse() {
|
|
232
|
-
return undefined;
|
|
233
|
-
}
|
|
234
|
-
getJoinableParent() {
|
|
235
|
-
const parent = this.parent;
|
|
236
|
-
if (parent.structDef.type === 'record') {
|
|
237
|
-
return parent.getJoinableParent();
|
|
238
|
-
}
|
|
239
|
-
return parent;
|
|
240
|
-
}
|
|
241
|
-
isAtomic() {
|
|
242
|
-
return (0, malloy_types_1.isAtomic)(this.fieldDef);
|
|
243
|
-
}
|
|
244
|
-
caseGroup(groupSets, s) {
|
|
245
|
-
if (groupSets.length === 0) {
|
|
246
|
-
return s;
|
|
247
|
-
}
|
|
248
|
-
else {
|
|
249
|
-
const exp = groupSets.length === 1
|
|
250
|
-
? `=${groupSets[0]}`
|
|
251
|
-
: ` IN (${groupSets.join(',')})`;
|
|
252
|
-
return `CASE WHEN group_set${exp} THEN\n ${s}\n END`;
|
|
253
|
-
}
|
|
254
|
-
}
|
|
255
|
-
getFullOutputName() {
|
|
256
|
-
return this.parent.getFullOutputName() + this.getIdentifier();
|
|
257
|
-
}
|
|
258
|
-
generateFieldFragment(resultSet, context, expr, state) {
|
|
259
|
-
// find the structDef and return the path to the field...
|
|
260
|
-
const field = context.getFieldByName(expr.path);
|
|
261
|
-
if ((0, malloy_types_1.hasExpression)(field.fieldDef)) {
|
|
262
|
-
const ret = this.exprToSQL(resultSet, field.parent, field.fieldDef.e, state);
|
|
263
|
-
// in order to avoid too many parens, there was some code here ..
|
|
264
|
-
// if (!ret.match(/^\(.*\)$/)) {
|
|
265
|
-
// ret = `(${ret})`;
|
|
266
|
-
// }
|
|
267
|
-
// but this failed when the expresion was (bool1)or(bool2)
|
|
268
|
-
// there could maybe be a smarter parse of the expression to avoid
|
|
269
|
-
// an extra paren, but correctness first, beauty AND correctness later
|
|
270
|
-
return `(${ret})`;
|
|
271
|
-
}
|
|
272
|
-
else {
|
|
273
|
-
// return field.parent.getIdentifier() + "." + field.fieldDef.name;
|
|
274
|
-
return field.generateExpression(resultSet);
|
|
275
|
-
}
|
|
276
|
-
}
|
|
277
|
-
generateOutputFieldFragment(resultSet, _context, frag, _state) {
|
|
278
|
-
return `(${resultSet.getField(frag.name).getAnalyticalSQL(false)})`;
|
|
279
|
-
}
|
|
280
|
-
*stringsFromSQLExpression(resultSet, context, e, state) {
|
|
281
|
-
/*
|
|
282
|
-
* Like template strings, the array of strings is paired with template insertions,
|
|
283
|
-
* each string is followed by at most one expression to be inserted
|
|
284
|
-
*/
|
|
285
|
-
const subExprList = [...e.kids.args];
|
|
286
|
-
for (const str of e.src) {
|
|
287
|
-
yield str;
|
|
288
|
-
const expr = subExprList.shift();
|
|
289
|
-
if (expr) {
|
|
290
|
-
yield this.exprToSQL(resultSet, context, expr, state);
|
|
291
|
-
}
|
|
292
|
-
}
|
|
293
|
-
}
|
|
294
|
-
getParameterMap(overload, numArgs) {
|
|
295
|
-
return new Map(overload.params.map((param, paramIndex) => {
|
|
296
|
-
const argIndexes = param.isVariadic
|
|
297
|
-
? (0, utils_1.range)(paramIndex, numArgs)
|
|
298
|
-
: [paramIndex];
|
|
299
|
-
return [param.name, { param, argIndexes }];
|
|
300
|
-
}));
|
|
301
|
-
}
|
|
302
|
-
expandFunctionCall(dialect, overload, args, orderBy, limit) {
|
|
303
|
-
function withCommas(es) {
|
|
304
|
-
const ret = [];
|
|
305
|
-
for (let i = 0; i < es.length;) {
|
|
306
|
-
ret.push(es[i]);
|
|
307
|
-
i += 1;
|
|
308
|
-
if (i < es.length) {
|
|
309
|
-
ret.push(',');
|
|
310
|
-
}
|
|
311
|
-
}
|
|
312
|
-
return ret;
|
|
313
|
-
}
|
|
314
|
-
const paramMap = this.getParameterMap(overload, args.length);
|
|
315
|
-
if (overload.dialect[dialect] === undefined) {
|
|
316
|
-
throw new Error(`Function is not defined for '${dialect}' dialect`);
|
|
317
|
-
}
|
|
318
|
-
const expanded = (0, utils_1.exprMap)(overload.dialect[dialect].e, fragment => {
|
|
319
|
-
var _a, _b;
|
|
320
|
-
if (fragment.node === 'spread') {
|
|
321
|
-
const param = fragment.e;
|
|
322
|
-
if (param.node !== 'function_parameter') {
|
|
323
|
-
throw new Error('Invalid function definition. Argument to spread must be a function parameter.');
|
|
324
|
-
}
|
|
325
|
-
const entry = paramMap.get(param.name);
|
|
326
|
-
if (entry === undefined) {
|
|
327
|
-
return fragment;
|
|
328
|
-
}
|
|
329
|
-
const spread = entry.argIndexes.map(argIndex => args[argIndex]);
|
|
330
|
-
return (0, utils_1.composeSQLExpr)(withCommas(spread));
|
|
331
|
-
}
|
|
332
|
-
else if (fragment.node === 'function_parameter') {
|
|
333
|
-
const entry = paramMap.get(fragment.name);
|
|
334
|
-
if (entry === undefined) {
|
|
335
|
-
return fragment;
|
|
336
|
-
}
|
|
337
|
-
else if (entry.param.isVariadic) {
|
|
338
|
-
const spread = entry.argIndexes.map(argIndex => args[argIndex]);
|
|
339
|
-
return (0, utils_1.composeSQLExpr)(withCommas(spread));
|
|
340
|
-
}
|
|
341
|
-
else {
|
|
342
|
-
return args[entry.argIndexes[0]];
|
|
343
|
-
}
|
|
344
|
-
}
|
|
345
|
-
else if (fragment.node === 'aggregate_order_by') {
|
|
346
|
-
return orderBy
|
|
347
|
-
? (0, utils_1.composeSQLExpr)([
|
|
348
|
-
` ${(_a = fragment.prefix) !== null && _a !== void 0 ? _a : ''}${orderBy}${(_b = fragment.suffix) !== null && _b !== void 0 ? _b : ''}`,
|
|
349
|
-
])
|
|
350
|
-
: { node: '' };
|
|
351
|
-
}
|
|
352
|
-
else if (fragment.node === 'aggregate_limit') {
|
|
353
|
-
return limit ? (0, utils_1.composeSQLExpr)([` ${limit}`]) : { node: '' };
|
|
354
|
-
}
|
|
355
|
-
return fragment;
|
|
356
|
-
});
|
|
357
|
-
return expanded;
|
|
358
|
-
}
|
|
359
|
-
getFunctionOrderBy(resultSet, context, state, orderBy, args, overload) {
|
|
360
|
-
if (orderBy.length === 0)
|
|
361
|
-
return undefined;
|
|
362
|
-
return ('ORDER BY ' +
|
|
363
|
-
orderBy
|
|
364
|
-
.map(ob => {
|
|
365
|
-
var _a;
|
|
366
|
-
const defaultOrderByArgIndex = (_a = overload.dialect[context.dialect.name].defaultOrderByArgIndex) !== null && _a !== void 0 ? _a : 0;
|
|
367
|
-
const expr = ob.node === 'functionOrderBy' ? ob.e : args[defaultOrderByArgIndex];
|
|
368
|
-
const osql = this.generateDimFragment(resultSet, context, expr, state);
|
|
369
|
-
const dirsql = ob.dir === 'asc' ? ' ASC' : ob.dir === 'desc' ? ' DESC' : '';
|
|
370
|
-
return `${osql}${dirsql}`;
|
|
371
|
-
})
|
|
372
|
-
.join(', '));
|
|
373
|
-
}
|
|
374
|
-
generateAsymmetricStringAggExpression(resultSet, context, value, separator, distinctKey, orderBy, dialectName, state) {
|
|
375
|
-
if (orderBy) {
|
|
376
|
-
throw new Error(`Function \`string_agg\` does not support fanning out with an order by in ${dialectName}`);
|
|
377
|
-
}
|
|
378
|
-
const valueSQL = this.generateDimFragment(resultSet, context, value, state);
|
|
379
|
-
const separatorSQL = separator
|
|
380
|
-
? this.generateDimFragment(resultSet, context, separator, state)
|
|
381
|
-
: '';
|
|
382
|
-
return this.parent.dialect.sqlStringAggDistinct(distinctKey, valueSQL, separatorSQL);
|
|
383
|
-
}
|
|
384
|
-
getParamForArgIndex(params, argIndex) {
|
|
385
|
-
const prevVariadic = params.slice(0, argIndex).find(p => p.isVariadic);
|
|
386
|
-
return prevVariadic !== null && prevVariadic !== void 0 ? prevVariadic : params[argIndex];
|
|
387
|
-
}
|
|
388
|
-
generateFunctionCallExpression(resultSet, context, frag, state) {
|
|
389
|
-
var _a, _b, _c;
|
|
390
|
-
const overload = frag.overload;
|
|
391
|
-
const args = frag.kids.args;
|
|
392
|
-
const isSymmetric = (_a = frag.overload.isSymmetric) !== null && _a !== void 0 ? _a : false;
|
|
393
|
-
const distinctKey = (0, malloy_types_1.expressionIsAggregate)(overload.returnType.expressionType) &&
|
|
394
|
-
!isSymmetric &&
|
|
395
|
-
this.generateDistinctKeyIfNecessary(resultSet, context, frag.structPath);
|
|
396
|
-
const aggregateLimit = frag.limit ? `LIMIT ${frag.limit}` : undefined;
|
|
397
|
-
if (frag.name === 'string_agg' &&
|
|
398
|
-
distinctKey &&
|
|
399
|
-
!context.dialect.supportsAggDistinct &&
|
|
400
|
-
context.dialect.name !== 'snowflake') {
|
|
401
|
-
return this.generateAsymmetricStringAggExpression(resultSet, context, args[0], args[1], distinctKey, frag.kids.orderBy, context.dialect.name, state);
|
|
402
|
-
}
|
|
403
|
-
if (distinctKey) {
|
|
404
|
-
if (!context.dialect.supportsAggDistinct) {
|
|
405
|
-
throw new Error(`Function \`${frag.name}\` does not support fanning out in ${context.dialect.name}`);
|
|
406
|
-
}
|
|
407
|
-
const argsExpressions = args.map(arg => {
|
|
408
|
-
return this.generateDimFragment(resultSet, context, arg, state);
|
|
409
|
-
});
|
|
410
|
-
const orderBys = (_b = frag.kids.orderBy) !== null && _b !== void 0 ? _b : [];
|
|
411
|
-
const orderByExpressions = orderBys.map(ob => {
|
|
412
|
-
var _a;
|
|
413
|
-
const defaultOrderByArgIndex = (_a = overload.dialect[context.dialect.name].defaultOrderByArgIndex) !== null && _a !== void 0 ? _a : 0;
|
|
414
|
-
const expr = ob.node === 'functionOrderBy' ? ob.e : args[defaultOrderByArgIndex];
|
|
415
|
-
return this.generateDimFragment(resultSet, context, expr, state);
|
|
416
|
-
});
|
|
417
|
-
return context.dialect.sqlAggDistinct(distinctKey, [...argsExpressions, ...orderByExpressions], valNames => {
|
|
418
|
-
const vals = valNames.map((v, i) => {
|
|
419
|
-
// Special case: the argument is required to be literal, so we use the actual argument
|
|
420
|
-
// rather than the packed value
|
|
421
|
-
// TODO don't even pack the value in the first place
|
|
422
|
-
if (i < args.length) {
|
|
423
|
-
const param = this.getParamForArgIndex(overload.params, i);
|
|
424
|
-
if (param.allowedTypes.every(t => (0, malloy_types_1.isLiteral)(t.evalSpace))) {
|
|
425
|
-
return args[i];
|
|
426
|
-
}
|
|
427
|
-
}
|
|
428
|
-
return (0, utils_1.composeSQLExpr)([v]);
|
|
429
|
-
});
|
|
430
|
-
const newArgs = vals.slice(0, argsExpressions.length);
|
|
431
|
-
const orderBy = vals
|
|
432
|
-
.slice(argsExpressions.length)
|
|
433
|
-
.map((e, i) => {
|
|
434
|
-
return { node: 'functionOrderBy', e, dir: orderBys[i].dir };
|
|
435
|
-
});
|
|
436
|
-
const orderBySQL = this.getFunctionOrderBy(resultSet, context, state, orderBy, newArgs, overload);
|
|
437
|
-
const funcCall = this.expandFunctionCall(context.dialect.name, overload, newArgs, orderBySQL, aggregateLimit);
|
|
438
|
-
return this.exprToSQL(resultSet, context, funcCall, state);
|
|
439
|
-
});
|
|
440
|
-
}
|
|
441
|
-
else {
|
|
442
|
-
const mappedArgs = (0, malloy_types_1.expressionIsAggregate)(overload.returnType.expressionType)
|
|
443
|
-
? args.map((arg, index) => {
|
|
444
|
-
// TODO We assume that all arguments to this aggregate-returning function need to
|
|
445
|
-
// have filters applied to them. This is not necessarily true in the general case,
|
|
446
|
-
// e.g. in a function `avg_plus(a, b) = avg(a) + b` -- here, `b` should not be
|
|
447
|
-
// be filtered. But since there aren't any aggregate functions like this in the
|
|
448
|
-
// standard library we have planned, we ignore this for now.
|
|
449
|
-
// Update: Now we apply this only to arguments whose parameter is not constant-requiring.
|
|
450
|
-
// So in `string_agg(val, sep)`, `sep` does not get filters applied to it because
|
|
451
|
-
// it must be constant
|
|
452
|
-
const param = this.getParamForArgIndex(overload.params, index);
|
|
453
|
-
// TODO technically this should probably look at _which_ allowed param type was matched
|
|
454
|
-
// for this argument and see if that type is at most constant... but we lose type information
|
|
455
|
-
// by this point in the compilation, so that info would have to be passed into the func call
|
|
456
|
-
// fragment.
|
|
457
|
-
return param.allowedTypes.every(t => (0, malloy_types_1.isLiteral)(t.evalSpace))
|
|
458
|
-
? arg
|
|
459
|
-
: (0, utils_1.composeSQLExpr)([
|
|
460
|
-
this.generateDimFragment(resultSet, context, arg, state),
|
|
461
|
-
]);
|
|
462
|
-
})
|
|
463
|
-
: args;
|
|
464
|
-
const orderBySql = frag.kids.orderBy
|
|
465
|
-
? this.getFunctionOrderBy(resultSet, context, state, frag.kids.orderBy, args, overload)
|
|
466
|
-
: '';
|
|
467
|
-
const funcCall = this.expandFunctionCall(context.dialect.name, overload, mappedArgs, orderBySql, aggregateLimit);
|
|
468
|
-
if ((0, malloy_types_1.expressionIsAnalytic)(overload.returnType.expressionType)) {
|
|
469
|
-
const extraPartitions = ((_c = frag.partitionBy) !== null && _c !== void 0 ? _c : []).map(outputName => {
|
|
470
|
-
return `(${resultSet.getField(outputName).getAnalyticalSQL(false)})`;
|
|
471
|
-
});
|
|
472
|
-
return this.generateAnalyticFragment(context.dialect.name, resultSet, context, funcCall, overload, state, args, extraPartitions, orderBySql);
|
|
473
|
-
}
|
|
474
|
-
return this.exprToSQL(resultSet, context, funcCall, state);
|
|
475
|
-
}
|
|
476
|
-
}
|
|
477
|
-
generateSpread(_resultSet, _context, _frag, _state) {
|
|
478
|
-
throw new Error('Unexpanded spread encountered during SQL generation');
|
|
479
|
-
}
|
|
480
|
-
generateParameterFragment(resultSet, context, expr, state) {
|
|
481
|
-
var _a;
|
|
482
|
-
const name = expr.path[0];
|
|
483
|
-
(_a = context.eventStream) === null || _a === void 0 ? void 0 : _a.emit('source-argument-compiled', { name });
|
|
484
|
-
const argument = context.arguments()[name];
|
|
485
|
-
if (argument.value) {
|
|
486
|
-
return this.exprToSQL(resultSet, context, argument.value, state);
|
|
487
|
-
}
|
|
488
|
-
throw new Error(`Can't generate SQL, no value for ${expr.path}`);
|
|
489
|
-
}
|
|
490
|
-
generateFilterFragment(resultSet, context, expr, state) {
|
|
491
|
-
const allWhere = new utils_1.AndChain(state.whereSQL);
|
|
492
|
-
for (const cond of expr.kids.filterList) {
|
|
493
|
-
allWhere.add(this.exprToSQL(resultSet, context, cond.e, state.withWhere()));
|
|
494
|
-
}
|
|
495
|
-
return this.exprToSQL(resultSet, context, expr.kids.e, state.withWhere(allWhere.sql()));
|
|
496
|
-
}
|
|
497
|
-
generateDimFragment(resultSet, context, expr, state) {
|
|
498
|
-
let dim = this.exprToSQL(resultSet, context, expr, state);
|
|
499
|
-
if (state.whereSQL) {
|
|
500
|
-
dim = `CASE WHEN ${state.whereSQL} THEN ${dim} END`;
|
|
501
|
-
}
|
|
502
|
-
return dim;
|
|
503
|
-
}
|
|
504
|
-
generateUngroupedFragment(resultSet, context, expr, state) {
|
|
505
|
-
if (state.totalGroupSet !== -1) {
|
|
506
|
-
throw new Error('Already in ALL. Cannot nest within an all calcuation.');
|
|
507
|
-
}
|
|
508
|
-
let totalGroupSet;
|
|
509
|
-
let ungroupSet;
|
|
510
|
-
if (expr.fields && expr.fields.length > 0) {
|
|
511
|
-
const key = expr.fields.sort().join('|') + expr.node;
|
|
512
|
-
ungroupSet = resultSet.ungroupedSets.get(key);
|
|
513
|
-
if (ungroupSet === undefined) {
|
|
514
|
-
throw new Error(`Internal Error, cannot find groupset with key ${key}`);
|
|
515
|
-
}
|
|
516
|
-
totalGroupSet = ungroupSet.groupSet;
|
|
517
|
-
}
|
|
518
|
-
else {
|
|
519
|
-
totalGroupSet = resultSet.parent ? resultSet.parent.groupSet : 0;
|
|
520
|
-
}
|
|
521
|
-
const s = this.exprToSQL(resultSet, context, expr.e, state.withTotal(totalGroupSet));
|
|
522
|
-
const fields = resultSet.getUngroupPartitions(ungroupSet);
|
|
523
|
-
let partitionBy = '';
|
|
524
|
-
const fieldsString = fields.map(f => f.getAnalyticalSQL(true)).join(', ');
|
|
525
|
-
if (fieldsString.length > 0) {
|
|
526
|
-
partitionBy = `PARTITION BY ${fieldsString}`;
|
|
527
|
-
}
|
|
528
|
-
return `MAX(${s}) OVER (${partitionBy})`;
|
|
529
|
-
}
|
|
530
|
-
generateDistinctKeyIfNecessary(resultSet, context, structPath) {
|
|
531
|
-
let struct = context;
|
|
532
|
-
if (structPath) {
|
|
533
|
-
struct = this.parent.getStructByName(structPath);
|
|
534
|
-
}
|
|
535
|
-
if (struct.needsSymetricCalculation(resultSet)) {
|
|
536
|
-
return struct.getDistinctKey().generateExpression(resultSet);
|
|
537
|
-
}
|
|
538
|
-
else {
|
|
539
|
-
return undefined;
|
|
540
|
-
}
|
|
541
|
-
}
|
|
542
|
-
generateSumFragment(resultSet, context, expr, state) {
|
|
543
|
-
const dimSQL = this.generateDimFragment(resultSet, context, expr.e, state);
|
|
544
|
-
const distinctKeySQL = this.generateDistinctKeyIfNecessary(resultSet, context, expr.structPath);
|
|
545
|
-
let ret;
|
|
546
|
-
if (distinctKeySQL) {
|
|
547
|
-
if (this.parent.dialect.supportsSumDistinctFunction) {
|
|
548
|
-
ret = this.parent.dialect.sqlSumDistinct(distinctKeySQL, dimSQL, 'SUM');
|
|
549
|
-
}
|
|
550
|
-
else {
|
|
551
|
-
ret = sqlSumDistinct(this.parent.dialect, dimSQL, distinctKeySQL);
|
|
552
|
-
}
|
|
553
|
-
}
|
|
554
|
-
else {
|
|
555
|
-
ret = `SUM(${dimSQL})`;
|
|
556
|
-
}
|
|
557
|
-
return `COALESCE(${ret},0)`;
|
|
558
|
-
}
|
|
559
|
-
generateSymmetricFragment(resultSet, context, expr, state) {
|
|
560
|
-
const dimSQL = this.generateDimFragment(resultSet, context, expr.e, state);
|
|
561
|
-
const f = expr.function === 'distinct' ? 'count(distinct ' : expr.function + '(';
|
|
562
|
-
return `${f}${dimSQL})`;
|
|
563
|
-
}
|
|
564
|
-
generateAvgFragment(resultSet, context, expr, state) {
|
|
565
|
-
// find the structDef and return the path to the field...
|
|
566
|
-
const dimSQL = this.generateDimFragment(resultSet, context, expr.e, state);
|
|
567
|
-
const distinctKeySQL = this.generateDistinctKeyIfNecessary(resultSet, context, expr.structPath);
|
|
568
|
-
if (distinctKeySQL) {
|
|
569
|
-
let countDistinctKeySQL = distinctKeySQL;
|
|
570
|
-
if (state.whereSQL) {
|
|
571
|
-
countDistinctKeySQL = `CASE WHEN ${state.whereSQL} THEN ${distinctKeySQL} END`;
|
|
572
|
-
}
|
|
573
|
-
let sumDistinctSQL;
|
|
574
|
-
let avgDistinctSQL;
|
|
575
|
-
if (this.parent.dialect.supportsSumDistinctFunction) {
|
|
576
|
-
avgDistinctSQL = this.parent.dialect.sqlSumDistinct(distinctKeySQL, dimSQL, 'AVG');
|
|
577
|
-
}
|
|
578
|
-
else {
|
|
579
|
-
sumDistinctSQL = sqlSumDistinct(this.parent.dialect, dimSQL, distinctKeySQL);
|
|
580
|
-
avgDistinctSQL = `(${sumDistinctSQL})/NULLIF(COUNT(DISTINCT CASE WHEN ${dimSQL} IS NOT NULL THEN ${countDistinctKeySQL} END),0)`;
|
|
581
|
-
}
|
|
582
|
-
return avgDistinctSQL;
|
|
583
|
-
}
|
|
584
|
-
else {
|
|
585
|
-
return `AVG(${dimSQL})`;
|
|
586
|
-
}
|
|
587
|
-
}
|
|
588
|
-
generateCountFragment(resultSet, context, expr, state) {
|
|
589
|
-
let func = 'COUNT(';
|
|
590
|
-
let thing = '1';
|
|
591
|
-
let struct = context;
|
|
592
|
-
if (expr.structPath) {
|
|
593
|
-
struct = this.parent.root().getStructByName(expr.structPath);
|
|
594
|
-
}
|
|
595
|
-
const joinName = struct.getJoinableParent().getIdentifier();
|
|
596
|
-
const join = resultSet.root().joins.get(joinName);
|
|
597
|
-
if (!join) {
|
|
598
|
-
throw new Error(`Join ${joinName} not found in result set`);
|
|
599
|
-
}
|
|
600
|
-
if (!join.leafiest || join.makeUniqueKey) {
|
|
601
|
-
func = 'COUNT(DISTINCT ';
|
|
602
|
-
thing = struct.getDistinctKey().generateExpression(resultSet);
|
|
603
|
-
}
|
|
604
|
-
// const distinctKeySQL = this.generateDistinctKeyIfNecessary(
|
|
605
|
-
// resultSet,
|
|
606
|
-
// context,
|
|
607
|
-
// expr.structPath
|
|
608
|
-
// );
|
|
609
|
-
// if (distinctKeySQL) {
|
|
610
|
-
// func = 'COUNT(DISTINCT';
|
|
611
|
-
// thing = distinctKeySQL;
|
|
612
|
-
// }
|
|
613
|
-
// find the structDef and return the path to the field...
|
|
614
|
-
if (state.whereSQL) {
|
|
615
|
-
return `${func}CASE WHEN ${state.whereSQL} THEN ${thing} END)`;
|
|
616
|
-
}
|
|
617
|
-
else {
|
|
618
|
-
return `${func}${thing})`;
|
|
619
|
-
}
|
|
620
|
-
}
|
|
621
|
-
generateSourceReference(resultSet, context, expr) {
|
|
622
|
-
if (expr.path === undefined) {
|
|
623
|
-
return context.getSQLIdentifier();
|
|
624
|
-
}
|
|
625
|
-
else {
|
|
626
|
-
return context.getFieldByName(expr.path).getIdentifier();
|
|
627
|
-
}
|
|
628
|
-
}
|
|
629
|
-
getAnalyticPartitions(resultStruct, extraPartitionFields) {
|
|
630
|
-
const ret = [];
|
|
631
|
-
let p = resultStruct.parent;
|
|
632
|
-
while (p !== undefined) {
|
|
633
|
-
const scalars = p.fields(fi => isBasicScalar(fi.f) && fi.fieldUsage.type === 'result');
|
|
634
|
-
const partitionSQLs = scalars.map(fi => fi.getAnalyticalSQL(true));
|
|
635
|
-
ret.push(...partitionSQLs);
|
|
636
|
-
p = p.parent;
|
|
637
|
-
}
|
|
638
|
-
if (extraPartitionFields) {
|
|
639
|
-
ret.push(...extraPartitionFields);
|
|
640
|
-
}
|
|
641
|
-
return ret;
|
|
642
|
-
}
|
|
643
|
-
generateAnalyticFragment(dialect, resultStruct, context, expr, overload, state, args, partitionByFields, funcOrdering) {
|
|
644
|
-
const isComplex = resultStruct.root().isComplexQuery;
|
|
645
|
-
const partitionFields = this.getAnalyticPartitions(resultStruct, partitionByFields);
|
|
646
|
-
const allPartitions = [
|
|
647
|
-
...(isComplex ? ['group_set'] : []),
|
|
648
|
-
...partitionFields,
|
|
649
|
-
];
|
|
650
|
-
const partitionBy = allPartitions.length > 0
|
|
651
|
-
? `PARTITION BY ${allPartitions.join(', ')}`
|
|
652
|
-
: '';
|
|
653
|
-
let orderBy = funcOrdering !== null && funcOrdering !== void 0 ? funcOrdering : '';
|
|
654
|
-
const dialectOverload = overload.dialect[dialect];
|
|
655
|
-
if (!funcOrdering && dialectOverload.needsWindowOrderBy) {
|
|
656
|
-
// calculate the ordering.
|
|
657
|
-
const obSQL = [];
|
|
658
|
-
let orderingField;
|
|
659
|
-
const orderByDef = resultStruct.firstSegment.orderBy ||
|
|
660
|
-
resultStruct.calculateDefaultOrderBy();
|
|
661
|
-
for (const ordering of orderByDef) {
|
|
662
|
-
if (typeof ordering.field === 'string') {
|
|
663
|
-
orderingField = {
|
|
664
|
-
name: ordering.field,
|
|
665
|
-
fif: resultStruct.getField(ordering.field),
|
|
666
|
-
};
|
|
667
|
-
}
|
|
668
|
-
else {
|
|
669
|
-
orderingField = resultStruct.getFieldByNumber(ordering.field);
|
|
670
|
-
}
|
|
671
|
-
const exprType = orderingField.fif.f.fieldDef.expressionType;
|
|
672
|
-
// TODO today we do not support ordering by analytic functions at all, so this works
|
|
673
|
-
// but eventually we will, and this check will just want to ensure that the order field
|
|
674
|
-
// isn't the same as the field we're currently compiling (otherwise we will loop infintely)
|
|
675
|
-
if ((0, malloy_types_1.expressionIsAnalytic)(exprType)) {
|
|
676
|
-
continue;
|
|
677
|
-
}
|
|
678
|
-
if (resultStruct.firstSegment.type === 'reduce') {
|
|
679
|
-
const orderSQL = orderingField.fif.getAnalyticalSQL(false);
|
|
680
|
-
// const orderSQL = this.generateDimFragment(resultSet, context, arg, state)
|
|
681
|
-
obSQL.push(` ${orderSQL} ${ordering.dir || 'ASC'}`);
|
|
682
|
-
}
|
|
683
|
-
else if (resultStruct.firstSegment.type === 'project') {
|
|
684
|
-
obSQL.push(` ${orderingField.fif.f.generateExpression(resultStruct)} ${ordering.dir || 'ASC'}`);
|
|
685
|
-
}
|
|
686
|
-
}
|
|
687
|
-
if (obSQL.length > 0) {
|
|
688
|
-
orderBy = ' ' + this.parent.dialect.sqlOrderBy(obSQL, 'analytical');
|
|
689
|
-
}
|
|
690
|
-
}
|
|
691
|
-
let between = '';
|
|
692
|
-
if (dialectOverload.between) {
|
|
693
|
-
const [preceding, following] = [
|
|
694
|
-
dialectOverload.between.preceding,
|
|
695
|
-
dialectOverload.between.following,
|
|
696
|
-
].map(value => {
|
|
697
|
-
if (value === -1) {
|
|
698
|
-
return 'UNBOUNDED';
|
|
699
|
-
}
|
|
700
|
-
if (typeof value === 'number') {
|
|
701
|
-
return value;
|
|
702
|
-
}
|
|
703
|
-
const argIndex = overload.params.findIndex(param => param.name === value);
|
|
704
|
-
const arg = args[argIndex];
|
|
705
|
-
if (arg.node !== 'numberLiteral') {
|
|
706
|
-
throw new Error('Invalid number of rows for window spec');
|
|
707
|
-
}
|
|
708
|
-
// TODO this does not handle float literals correctly
|
|
709
|
-
return arg.literal;
|
|
710
|
-
});
|
|
711
|
-
between = `ROWS BETWEEN ${preceding} PRECEDING AND ${following} FOLLOWING`;
|
|
712
|
-
}
|
|
713
|
-
const funcSQL = this.exprToSQL(resultStruct, context, expr, state);
|
|
714
|
-
let retExpr = `${funcSQL} OVER(${partitionBy} ${orderBy} ${between})`;
|
|
715
|
-
if (isComplex) {
|
|
716
|
-
retExpr = `CASE WHEN group_set=${resultStruct.groupSet} THEN ${retExpr} END`;
|
|
717
|
-
}
|
|
718
|
-
return retExpr;
|
|
719
|
-
}
|
|
720
|
-
generateCaseSQL(pf) {
|
|
721
|
-
const caseStmt = ['CASE'];
|
|
722
|
-
if (pf.kids.caseValue !== undefined) {
|
|
723
|
-
caseStmt.push(`${pf.kids.caseValue.sql}`);
|
|
724
|
-
}
|
|
725
|
-
for (let i = 0; i < pf.kids.caseWhen.length; i += 1) {
|
|
726
|
-
caseStmt.push(`WHEN ${pf.kids.caseWhen[i].sql} THEN ${pf.kids.caseThen[i].sql}`);
|
|
727
|
-
}
|
|
728
|
-
if (pf.kids.caseElse !== undefined) {
|
|
729
|
-
caseStmt.push(`ELSE ${pf.kids.caseElse.sql}`);
|
|
730
|
-
}
|
|
731
|
-
caseStmt.push('END');
|
|
732
|
-
return caseStmt.join(' ');
|
|
733
|
-
}
|
|
734
|
-
exprToSQL(resultSet, context, exprToTranslate, state = new GenerateState()) {
|
|
735
|
-
var _a;
|
|
736
|
-
// Wrap non leaf sub expressions in parenthesis
|
|
737
|
-
const subExpr = function (qf, e) {
|
|
738
|
-
const sql = qf.exprToSQL(resultSet, context, e, state);
|
|
739
|
-
if ((0, malloy_types_1.exprHasKids)(e)) {
|
|
740
|
-
return `(${sql})`;
|
|
741
|
-
}
|
|
742
|
-
return sql;
|
|
743
|
-
};
|
|
744
|
-
/*
|
|
745
|
-
* Translate the children first, and stash the translation
|
|
746
|
-
* in the nodes themselves, so that if we call into the dialect
|
|
747
|
-
* it will have access to the translated children.
|
|
748
|
-
*/
|
|
749
|
-
let expr = exprToTranslate;
|
|
750
|
-
if ((0, malloy_types_1.exprHasE)(exprToTranslate)) {
|
|
751
|
-
expr = { ...exprToTranslate };
|
|
752
|
-
const eSql = subExpr(this, expr.e);
|
|
753
|
-
expr.e = { ...expr.e, sql: eSql };
|
|
754
|
-
}
|
|
755
|
-
else if ((0, malloy_types_1.exprHasKids)(exprToTranslate)) {
|
|
756
|
-
expr = { ...exprToTranslate };
|
|
757
|
-
const oldKids = exprToTranslate.kids;
|
|
758
|
-
for (const [name, kidExpr] of Object.entries(oldKids)) {
|
|
759
|
-
if (kidExpr === null)
|
|
760
|
-
continue;
|
|
761
|
-
if (Array.isArray(kidExpr)) {
|
|
762
|
-
expr.kids[name] = kidExpr.map(e => {
|
|
763
|
-
return { ...e, sql: subExpr(this, e) };
|
|
764
|
-
});
|
|
765
|
-
}
|
|
766
|
-
else {
|
|
767
|
-
expr.kids[name] = { ...oldKids[name], sql: subExpr(this, kidExpr) };
|
|
768
|
-
}
|
|
769
|
-
}
|
|
770
|
-
}
|
|
771
|
-
/*
|
|
772
|
-
* Give the dialect a chance to translate this node
|
|
773
|
-
*/
|
|
774
|
-
const qi = resultSet.getQueryInfo();
|
|
775
|
-
const dialectSQL = this.parent.dialect.exprToSQL(qi, expr);
|
|
776
|
-
if (dialectSQL) {
|
|
777
|
-
return dialectSQL;
|
|
778
|
-
}
|
|
779
|
-
switch (expr.node) {
|
|
780
|
-
case 'field':
|
|
781
|
-
return this.generateFieldFragment(resultSet, context, expr, state);
|
|
782
|
-
case 'parameter':
|
|
783
|
-
return this.generateParameterFragment(resultSet, context, expr, state);
|
|
784
|
-
case 'filteredExpr':
|
|
785
|
-
return this.generateFilterFragment(resultSet, context, expr, state);
|
|
786
|
-
case 'all':
|
|
787
|
-
case 'exclude':
|
|
788
|
-
return this.generateUngroupedFragment(resultSet, context, expr, state);
|
|
789
|
-
case 'genericSQLExpr':
|
|
790
|
-
return Array.from(this.stringsFromSQLExpression(resultSet, context, expr, state)).join('');
|
|
791
|
-
case 'aggregate': {
|
|
792
|
-
let agg = '';
|
|
793
|
-
if (expr.function === 'sum') {
|
|
794
|
-
agg = this.generateSumFragment(resultSet, context, expr, state);
|
|
795
|
-
}
|
|
796
|
-
else if (expr.function === 'avg') {
|
|
797
|
-
agg = this.generateAvgFragment(resultSet, context, expr, state);
|
|
798
|
-
}
|
|
799
|
-
else if (expr.function === 'count') {
|
|
800
|
-
agg = this.generateCountFragment(resultSet, context, expr, state);
|
|
801
|
-
}
|
|
802
|
-
else if (expr.function === 'min' ||
|
|
803
|
-
expr.function === 'max' ||
|
|
804
|
-
expr.function === 'distinct') {
|
|
805
|
-
agg = this.generateSymmetricFragment(resultSet, context, expr, state);
|
|
806
|
-
}
|
|
807
|
-
else {
|
|
808
|
-
throw new Error(`Internal Error: Unknown aggregate function ${expr.function}`);
|
|
809
|
-
}
|
|
810
|
-
if (resultSet.root().isComplexQuery) {
|
|
811
|
-
let groupSet = resultSet.groupSet;
|
|
812
|
-
if (state.totalGroupSet !== -1) {
|
|
813
|
-
groupSet = state.totalGroupSet;
|
|
814
|
-
}
|
|
815
|
-
return this.caseGroup([groupSet], agg);
|
|
816
|
-
}
|
|
817
|
-
return agg;
|
|
818
|
-
}
|
|
819
|
-
case 'function_parameter':
|
|
820
|
-
throw new Error('Internal Error: Function parameter fragment remaining during SQL generation');
|
|
821
|
-
case 'outputField':
|
|
822
|
-
return this.generateOutputFieldFragment(resultSet, context, expr, state);
|
|
823
|
-
case 'function_call':
|
|
824
|
-
return this.generateFunctionCallExpression(resultSet, context, expr, state);
|
|
825
|
-
case 'spread':
|
|
826
|
-
return this.generateSpread(resultSet, context, expr, state);
|
|
827
|
-
case 'source-reference':
|
|
828
|
-
return this.generateSourceReference(resultSet, context, expr);
|
|
829
|
-
case '+':
|
|
830
|
-
case '-':
|
|
831
|
-
case '*':
|
|
832
|
-
case '%':
|
|
833
|
-
case '/':
|
|
834
|
-
case '>':
|
|
835
|
-
case '<':
|
|
836
|
-
case '>=':
|
|
837
|
-
case '<=':
|
|
838
|
-
case '=':
|
|
839
|
-
return `${expr.kids.left.sql}${expr.node}${expr.kids.right.sql}`;
|
|
840
|
-
// Malloy inequality comparisons always return a boolean
|
|
841
|
-
case '!=': {
|
|
842
|
-
const notEqual = `${expr.kids.left.sql}!=${expr.kids.right.sql}`;
|
|
843
|
-
return `COALESCE(${notEqual},true)`;
|
|
844
|
-
}
|
|
845
|
-
case 'and':
|
|
846
|
-
case 'or':
|
|
847
|
-
return `${expr.kids.left.sql} ${expr.node} ${expr.kids.right.sql}`;
|
|
848
|
-
case 'coalesce':
|
|
849
|
-
return `COALESCE(${expr.kids.left.sql},${expr.kids.right.sql})`;
|
|
850
|
-
case 'in': {
|
|
851
|
-
const oneOf = expr.kids.oneOf.map(o => o.sql).join(',');
|
|
852
|
-
return `${expr.kids.e.sql} ${expr.not ? 'NOT IN' : 'IN'} (${oneOf})`;
|
|
853
|
-
}
|
|
854
|
-
case 'like':
|
|
855
|
-
case '!like': {
|
|
856
|
-
const likeIt = expr.node === 'like' ? 'LIKE' : 'NOT LIKE';
|
|
857
|
-
const compare = expr.kids.right.node === 'stringLiteral'
|
|
858
|
-
? this.parent.dialect.sqlLike(likeIt, (_a = expr.kids.left.sql) !== null && _a !== void 0 ? _a : '', expr.kids.right.literal)
|
|
859
|
-
: `${expr.kids.left.sql} ${likeIt} ${expr.kids.right.sql}`;
|
|
860
|
-
return expr.node === 'like' ? compare : `COALESCE(${compare},true)`;
|
|
861
|
-
}
|
|
862
|
-
case '()':
|
|
863
|
-
return `(${expr.e.sql})`;
|
|
864
|
-
case 'not':
|
|
865
|
-
// Malloy not operator always returns a boolean
|
|
866
|
-
return `COALESCE(NOT ${expr.e.sql},TRUE)`;
|
|
867
|
-
case 'unary-':
|
|
868
|
-
return `-${expr.e.sql}`;
|
|
869
|
-
case 'is-null':
|
|
870
|
-
return `${expr.e.sql} IS NULL`;
|
|
871
|
-
case 'is-not-null':
|
|
872
|
-
return `${expr.e.sql} IS NOT NULL`;
|
|
873
|
-
case 'true':
|
|
874
|
-
case 'false':
|
|
875
|
-
return expr.node;
|
|
876
|
-
case 'null':
|
|
877
|
-
return 'NULL';
|
|
878
|
-
case 'case':
|
|
879
|
-
return this.generateCaseSQL(expr);
|
|
880
|
-
case '':
|
|
881
|
-
return '';
|
|
882
|
-
case 'filterCondition':
|
|
883
|
-
// our child will be translated at the top of this function
|
|
884
|
-
if (expr.e.sql) {
|
|
885
|
-
expr.sql = expr.e.sql;
|
|
886
|
-
return expr.sql;
|
|
887
|
-
}
|
|
888
|
-
return '';
|
|
889
|
-
case 'functionDefaultOrderBy':
|
|
890
|
-
case 'functionOrderBy':
|
|
891
|
-
return '';
|
|
892
|
-
// TODO: throw an error here; not simple because we call into this
|
|
893
|
-
// code currently before the composite source is resolved in some cases
|
|
894
|
-
case 'compositeField':
|
|
895
|
-
return '{COMPOSITE_FIELD}';
|
|
896
|
-
case 'filterMatch':
|
|
897
|
-
return this.generateAppliedFilter(context, expr);
|
|
898
|
-
case 'filterLiteral':
|
|
899
|
-
return 'INTERNAL ERROR FILTER EXPRESSION VALUE SHOULD NOT BE USED';
|
|
900
|
-
default:
|
|
901
|
-
throw new Error(`Internal Error: Unknown expression node '${expr.node}' ${JSON.stringify(expr, undefined, 2)}`);
|
|
902
|
-
}
|
|
903
|
-
}
|
|
904
|
-
generateAppliedFilter(context, filterMatchExpr) {
|
|
905
|
-
var _a;
|
|
906
|
-
let filterExpr = filterMatchExpr.kids.filterExpr;
|
|
907
|
-
while (filterExpr.node === '()') {
|
|
908
|
-
filterExpr = filterExpr.e;
|
|
909
|
-
}
|
|
910
|
-
if (filterExpr.node === 'parameter') {
|
|
911
|
-
const name = filterExpr.path[0];
|
|
912
|
-
(_a = context.eventStream) === null || _a === void 0 ? void 0 : _a.emit('source-argument-compiled', { name });
|
|
913
|
-
const argument = context.arguments()[name];
|
|
914
|
-
if (argument.value) {
|
|
915
|
-
filterExpr = argument.value;
|
|
916
|
-
}
|
|
917
|
-
else {
|
|
918
|
-
throw new Error(`Parameter ${name} was expected to be a filter expression`);
|
|
919
|
-
}
|
|
920
|
-
}
|
|
921
|
-
if (filterExpr.node !== 'filterLiteral') {
|
|
922
|
-
throw new Error('Can only use filter expression literals or parameters as filter expressions');
|
|
923
|
-
}
|
|
924
|
-
const filterSrc = filterExpr.filterSrc;
|
|
925
|
-
let fParse;
|
|
926
|
-
switch (filterMatchExpr.dataType) {
|
|
927
|
-
case 'string':
|
|
928
|
-
fParse = malloy_filter_1.StringFilterExpression.parse(filterSrc);
|
|
929
|
-
break;
|
|
930
|
-
case 'number':
|
|
931
|
-
fParse = malloy_filter_1.NumberFilterExpression.parse(filterSrc);
|
|
932
|
-
break;
|
|
933
|
-
case 'boolean':
|
|
934
|
-
fParse = malloy_filter_1.BooleanFilterExpression.parse(filterSrc);
|
|
935
|
-
break;
|
|
936
|
-
case 'date':
|
|
937
|
-
case 'timestamp':
|
|
938
|
-
fParse = malloy_filter_1.TemporalFilterExpression.parse(filterSrc);
|
|
939
|
-
break;
|
|
940
|
-
}
|
|
941
|
-
if (fParse.log.length > 0) {
|
|
942
|
-
throw new Error(`Filter expression parse error: ${fParse.log[0]}`);
|
|
943
|
-
}
|
|
944
|
-
return filter_compilers_1.FilterCompilers.compile(filterMatchExpr.dataType, fParse.parsed, filterMatchExpr.kids.expr.sql || '', context.dialect);
|
|
945
|
-
}
|
|
946
|
-
isNestedInParent(parentDef) {
|
|
947
|
-
switch (parentDef.type) {
|
|
948
|
-
case 'record':
|
|
949
|
-
case 'array':
|
|
950
|
-
return true;
|
|
951
|
-
return true;
|
|
952
|
-
default:
|
|
953
|
-
return false;
|
|
954
|
-
}
|
|
955
|
-
}
|
|
956
|
-
isArrayElement(parentDef) {
|
|
957
|
-
return (parentDef.type === 'array' &&
|
|
958
|
-
parentDef.elementTypeDef.type !== 'record_element');
|
|
959
|
-
}
|
|
960
|
-
generateExpression(resultSet) {
|
|
961
|
-
// If the field itself is an expression, generate it ..
|
|
962
|
-
if ((0, malloy_types_1.hasExpression)(this.fieldDef)) {
|
|
963
|
-
return this.exprToSQL(resultSet, this.parent, this.fieldDef.e);
|
|
964
|
-
}
|
|
965
|
-
// The field itself is not an expression, so we would like
|
|
966
|
-
// to generate a dotted path to the field, EXCEPT ...
|
|
967
|
-
// some of the steps in the dotting might not exist
|
|
968
|
-
// in the namespace of their parent, but rather be record
|
|
969
|
-
// expressions which should be evaluated in the namespace
|
|
970
|
-
// of their parent.
|
|
971
|
-
// So we walk the tree and ask each one to compute itself
|
|
972
|
-
for (let ancestor = this.parent; ancestor !== undefined; ancestor = ancestor.parent) {
|
|
973
|
-
if (ancestor.structDef.type === 'record' &&
|
|
974
|
-
(0, malloy_types_1.hasExpression)(ancestor.structDef) &&
|
|
975
|
-
ancestor.recordAlias === undefined) {
|
|
976
|
-
if (!ancestor.parent) {
|
|
977
|
-
throw new Error('Inconcievable record ancestor with expression but no parent');
|
|
978
|
-
}
|
|
979
|
-
const aliasValue = this.exprToSQL(resultSet, ancestor.parent, ancestor.structDef.e);
|
|
980
|
-
ancestor.informOfAliasValue(aliasValue);
|
|
981
|
-
}
|
|
982
|
-
}
|
|
983
|
-
return this.parent.sqlChildReference(this.fieldDef.name, this.parent.structDef.type === 'record'
|
|
984
|
-
? {
|
|
985
|
-
result: resultSet,
|
|
986
|
-
field: this,
|
|
987
|
-
}
|
|
988
|
-
: undefined);
|
|
989
|
-
}
|
|
990
|
-
includeInWildcard() {
|
|
991
|
-
return false;
|
|
992
|
-
}
|
|
993
|
-
}
|
|
994
|
-
function isBasicCalculation(f) {
|
|
995
|
-
return f instanceof QueryAtomicField && isCalculatedField(f);
|
|
996
|
-
}
|
|
997
|
-
function isBasicAggregate(f) {
|
|
998
|
-
return f instanceof QueryAtomicField && isAggregateField(f);
|
|
999
|
-
}
|
|
1000
|
-
function isBasicScalar(f) {
|
|
1001
|
-
return f instanceof QueryAtomicField && isScalarField(f);
|
|
1002
|
-
}
|
|
1003
|
-
function isScalarField(f) {
|
|
1004
|
-
if (f.isAtomic()) {
|
|
1005
|
-
if ((0, malloy_types_1.hasExpression)(f.fieldDef)) {
|
|
1006
|
-
const et = f.fieldDef.expressionType;
|
|
1007
|
-
if ((0, malloy_types_1.expressionIsCalculation)(et) || (0, malloy_types_1.expressionIsAggregate)(et)) {
|
|
1008
|
-
return false;
|
|
1009
|
-
}
|
|
1010
|
-
}
|
|
1011
|
-
return true;
|
|
1012
|
-
}
|
|
1013
|
-
return false;
|
|
1014
|
-
}
|
|
1015
|
-
function isCalculatedField(f) {
|
|
1016
|
-
if (f.isAtomic() && (0, malloy_types_1.hasExpression)(f.fieldDef)) {
|
|
1017
|
-
return (0, malloy_types_1.expressionIsCalculation)(f.fieldDef.expressionType);
|
|
1018
|
-
}
|
|
1019
|
-
return false;
|
|
1020
|
-
}
|
|
1021
|
-
function isAggregateField(f) {
|
|
1022
|
-
if (f.isAtomic() && (0, malloy_types_1.hasExpression)(f.fieldDef)) {
|
|
1023
|
-
return (0, malloy_types_1.expressionIsAggregate)(f.fieldDef.expressionType);
|
|
1024
|
-
}
|
|
1025
|
-
return false;
|
|
1026
|
-
}
|
|
1027
|
-
class QueryAtomicField extends QueryField {
|
|
1028
|
-
constructor(fieldDef, parent, refId) {
|
|
1029
|
-
super(fieldDef, parent, refId);
|
|
1030
|
-
this.fieldDef = fieldDef; // wish I didn't have to do this
|
|
1031
|
-
}
|
|
1032
|
-
includeInWildcard() {
|
|
1033
|
-
return true;
|
|
1034
|
-
}
|
|
1035
|
-
getFilterList() {
|
|
1036
|
-
return [];
|
|
1037
|
-
}
|
|
1038
|
-
}
|
|
1039
|
-
// class QueryMeasure extends QueryField {}
|
|
1040
|
-
class QueryFieldString extends QueryAtomicField {
|
|
1041
|
-
}
|
|
1042
|
-
class QueryFieldNumber extends QueryAtomicField {
|
|
1043
|
-
}
|
|
1044
|
-
class QueryFieldBoolean extends QueryAtomicField {
|
|
1045
|
-
}
|
|
1046
|
-
class QueryFieldJSON extends QueryAtomicField {
|
|
1047
|
-
}
|
|
1048
|
-
class QueryFieldUnsupported extends QueryAtomicField {
|
|
1049
|
-
}
|
|
1050
|
-
class QueryFieldDate extends QueryAtomicField {
|
|
1051
|
-
generateExpression(resultSet) {
|
|
1052
|
-
const fd = this.fieldDef;
|
|
1053
|
-
const superExpr = super.generateExpression(resultSet);
|
|
1054
|
-
if (!fd.timeframe) {
|
|
1055
|
-
return superExpr;
|
|
1056
|
-
}
|
|
1057
|
-
else {
|
|
1058
|
-
const truncated = {
|
|
1059
|
-
node: 'trunc',
|
|
1060
|
-
e: (0, malloy_types_1.mkTemporal)({ node: 'genericSQLExpr', src: [superExpr], kids: { args: [] } }, 'date'),
|
|
1061
|
-
units: fd.timeframe,
|
|
1062
|
-
};
|
|
1063
|
-
return this.exprToSQL(resultSet, this.parent, truncated);
|
|
1064
|
-
}
|
|
1065
|
-
}
|
|
1066
|
-
// clone ourselves on demand as a timeframe.
|
|
1067
|
-
getChildByName(name) {
|
|
1068
|
-
const fieldDef = {
|
|
1069
|
-
...this.fieldDef,
|
|
1070
|
-
as: `${this.getIdentifier()}_${name}`,
|
|
1071
|
-
timeframe: name,
|
|
1072
|
-
};
|
|
1073
|
-
return new QueryFieldDate(fieldDef, this.parent);
|
|
1074
|
-
}
|
|
1075
|
-
}
|
|
1076
|
-
class QueryFieldTimestamp extends QueryAtomicField {
|
|
1077
|
-
// clone ourselves on demand as a timeframe.
|
|
1078
|
-
getChildByName(name) {
|
|
1079
|
-
const fieldDef = {
|
|
1080
|
-
...this.fieldDef,
|
|
1081
|
-
as: `${this.getIdentifier()}_${name}`,
|
|
1082
|
-
timeframe: name,
|
|
1083
|
-
};
|
|
1084
|
-
return new QueryFieldTimestamp(fieldDef, this.parent);
|
|
1085
|
-
}
|
|
1086
|
-
}
|
|
1087
|
-
class QueryFieldDistinctKey extends QueryAtomicField {
|
|
1088
|
-
generateExpression(resultSet) {
|
|
1089
|
-
var _a;
|
|
1090
|
-
if (this.parent.primaryKey()) {
|
|
1091
|
-
const pk = this.parent.getPrimaryKeyField(this.fieldDef);
|
|
1092
|
-
return pk.generateExpression(resultSet);
|
|
1093
|
-
}
|
|
1094
|
-
else if (this.parent.structDef.type === 'array') {
|
|
1095
|
-
const parentKey = (_a = this.parent.parent) === null || _a === void 0 ? void 0 : _a.getDistinctKey().generateExpression(resultSet);
|
|
1096
|
-
return this.parent.dialect.sqlMakeUnnestKey(parentKey || '', // shouldn't have to do this...
|
|
1097
|
-
this.parent.dialect.sqlFieldReference(this.parent.getIdentifier(), 'table', '__row_id', 'string'));
|
|
1098
|
-
}
|
|
1099
|
-
else {
|
|
1100
|
-
// return this.parent.getIdentifier() + "." + "__distinct_key";
|
|
1101
|
-
return this.parent.dialect.sqlFieldReference(this.parent.getIdentifier(), 'table', '__distinct_key', 'string');
|
|
1102
|
-
}
|
|
1103
|
-
}
|
|
1104
|
-
includeInWildcard() {
|
|
1105
|
-
return false;
|
|
1106
|
-
}
|
|
1107
|
-
}
|
|
1108
|
-
const NUMERIC_DECIMAL_PRECISION = 9;
|
|
1109
|
-
function sqlSumDistinct(dialect, sqlExp, sqlDistintKey) {
|
|
1110
|
-
const precision = 9;
|
|
1111
|
-
const uniqueInt = dialect.sqlSumDistinctHashedKey(sqlDistintKey);
|
|
1112
|
-
const multiplier = 10 ** (precision - NUMERIC_DECIMAL_PRECISION);
|
|
1113
|
-
const sumSQL = `
|
|
1114
|
-
(
|
|
1115
|
-
SUM(DISTINCT
|
|
1116
|
-
(CAST(ROUND(COALESCE(${sqlExp},0)*(${multiplier}*1.0), ${NUMERIC_DECIMAL_PRECISION}) AS ${dialect.defaultDecimalType}) +
|
|
1117
|
-
${uniqueInt}
|
|
1118
|
-
))
|
|
1119
|
-
-
|
|
1120
|
-
SUM(DISTINCT ${uniqueInt})
|
|
1121
|
-
)`;
|
|
1122
|
-
let ret = `(${sumSQL}/(${multiplier}*1.0))`;
|
|
1123
|
-
ret = `CAST(${ret} AS ${dialect.defaultNumberType})`;
|
|
1124
|
-
return ret;
|
|
1125
|
-
}
|
|
1126
|
-
class FieldInstanceField {
|
|
1127
|
-
constructor(f, fieldUsage, parent, drillExpression) {
|
|
1128
|
-
this.f = f;
|
|
1129
|
-
this.fieldUsage = fieldUsage;
|
|
1130
|
-
this.parent = parent;
|
|
1131
|
-
this.drillExpression = drillExpression;
|
|
1132
|
-
this.type = 'field';
|
|
1133
|
-
this.additionalGroupSets = [];
|
|
1134
|
-
}
|
|
1135
|
-
root() {
|
|
1136
|
-
return this.parent.root();
|
|
1137
|
-
}
|
|
1138
|
-
getSQL() {
|
|
1139
|
-
let exp = this.f.generateExpression(this.parent);
|
|
1140
|
-
if (isScalarField(this.f)) {
|
|
1141
|
-
exp = this.f.caseGroup(this.parent.groupSet > 0
|
|
1142
|
-
? this.parent.childGroups.concat(this.additionalGroupSets)
|
|
1143
|
-
: [], exp);
|
|
1144
|
-
}
|
|
1145
|
-
return exp;
|
|
1146
|
-
}
|
|
1147
|
-
getAnalyticalSQL(forPartition) {
|
|
1148
|
-
if (this.analyticalSQL === undefined) {
|
|
1149
|
-
return this.getSQL();
|
|
1150
|
-
}
|
|
1151
|
-
else if (forPartition && this.partitionSQL) {
|
|
1152
|
-
return this.partitionSQL;
|
|
1153
|
-
}
|
|
1154
|
-
else {
|
|
1155
|
-
return this.analyticalSQL;
|
|
1156
|
-
}
|
|
1157
|
-
}
|
|
1158
|
-
}
|
|
1159
|
-
class FieldInstanceResult {
|
|
1160
|
-
constructor(turtleDef, parent) {
|
|
1161
|
-
this.turtleDef = turtleDef;
|
|
1162
|
-
this.parent = parent;
|
|
1163
|
-
this.type = 'query';
|
|
1164
|
-
this.allFields = new Map();
|
|
1165
|
-
this.groupSet = 0;
|
|
1166
|
-
this.depth = 0;
|
|
1167
|
-
this.childGroups = [];
|
|
1168
|
-
this.hasHaving = false;
|
|
1169
|
-
this.ungroupedSets = new Map();
|
|
1170
|
-
// query: QueryQuery;
|
|
1171
|
-
this.resultUsesUngrouped = false;
|
|
1172
|
-
this.firstSegment = turtleDef.pipeline[0];
|
|
1173
|
-
}
|
|
1174
|
-
/**
|
|
1175
|
-
* Information about the query containing this result set. Invented
|
|
1176
|
-
* to pass on timezone information, but maybe more things will
|
|
1177
|
-
* eventually go in here.
|
|
1178
|
-
* @returns QueryInfo
|
|
1179
|
-
*/
|
|
1180
|
-
getQueryInfo() {
|
|
1181
|
-
if (!(0, malloy_types_1.isIndexSegment)(this.firstSegment) &&
|
|
1182
|
-
!(0, malloy_types_1.isRawSegment)(this.firstSegment)) {
|
|
1183
|
-
const { queryTimezone } = this.firstSegment;
|
|
1184
|
-
if (queryTimezone) {
|
|
1185
|
-
return { queryTimezone };
|
|
1186
|
-
}
|
|
1187
|
-
}
|
|
1188
|
-
return {};
|
|
1189
|
-
}
|
|
1190
|
-
addField(as, field, usage, drillExpression) {
|
|
1191
|
-
const fi = this.allFields.get(as);
|
|
1192
|
-
if (fi) {
|
|
1193
|
-
if (fi.type === 'query') {
|
|
1194
|
-
throw new Error(`Redefinition of field ${field.fieldDef.name} as struct`);
|
|
1195
|
-
}
|
|
1196
|
-
const fif = fi;
|
|
1197
|
-
if (fif.fieldUsage.type === 'result') {
|
|
1198
|
-
if (usage.type !== 'result') {
|
|
1199
|
-
// its already in the result, we can just ignore it.
|
|
1200
|
-
return;
|
|
1201
|
-
}
|
|
1202
|
-
else {
|
|
1203
|
-
throw new Error(`Ambiguous output field name '${field.fieldDef.name}'.`);
|
|
1204
|
-
}
|
|
1205
|
-
}
|
|
1206
|
-
}
|
|
1207
|
-
this.add(as, new FieldInstanceField(field, usage, this, drillExpression));
|
|
1208
|
-
}
|
|
1209
|
-
parentGroupSet() {
|
|
1210
|
-
if (this.parent) {
|
|
1211
|
-
return this.parent.groupSet;
|
|
1212
|
-
}
|
|
1213
|
-
else {
|
|
1214
|
-
return 0;
|
|
1215
|
-
}
|
|
1216
|
-
}
|
|
1217
|
-
add(name, f) {
|
|
1218
|
-
this.allFields.set(name, f);
|
|
1219
|
-
}
|
|
1220
|
-
hasField(name) {
|
|
1221
|
-
const fi = this.allFields.get(name);
|
|
1222
|
-
return fi !== undefined && fi instanceof FieldInstanceField;
|
|
1223
|
-
}
|
|
1224
|
-
getField(name) {
|
|
1225
|
-
const fi = this.allFields.get(name);
|
|
1226
|
-
if (fi === undefined) {
|
|
1227
|
-
throw new Error(`Internal Error, field Not defined ${name}`);
|
|
1228
|
-
}
|
|
1229
|
-
else if (fi instanceof FieldInstanceField) {
|
|
1230
|
-
return fi;
|
|
1231
|
-
}
|
|
1232
|
-
throw new Error(`can't use a query here ${name}`);
|
|
1233
|
-
}
|
|
1234
|
-
getFieldByNumber(index) {
|
|
1235
|
-
for (const [name, fi] of this.allFields) {
|
|
1236
|
-
if (fi instanceof FieldInstanceField) {
|
|
1237
|
-
if (fi.fieldUsage.type === 'result' &&
|
|
1238
|
-
fi.fieldUsage.resultIndex === index) {
|
|
1239
|
-
return { name, fif: fi };
|
|
1240
|
-
}
|
|
1241
|
-
}
|
|
1242
|
-
}
|
|
1243
|
-
throw new Error(`Invalid Order By index '${index}`);
|
|
1244
|
-
}
|
|
1245
|
-
// loops through all the turtled queries and computes recomputes the group numbers
|
|
1246
|
-
computeGroups(nextGroupSetNumber, depth) {
|
|
1247
|
-
// if the root node uses a total, start at 1.
|
|
1248
|
-
if (nextGroupSetNumber === 0 && this.resultUsesUngrouped) {
|
|
1249
|
-
this.root().computeOnlyGroups.push(nextGroupSetNumber++);
|
|
1250
|
-
}
|
|
1251
|
-
// make a groupset for each unique ungrouping expression
|
|
1252
|
-
for (const [_key, grouping] of this.ungroupedSets) {
|
|
1253
|
-
const groupSet = nextGroupSetNumber++;
|
|
1254
|
-
grouping.groupSet = groupSet;
|
|
1255
|
-
this.root().computeOnlyGroups.push(groupSet);
|
|
1256
|
-
}
|
|
1257
|
-
this.groupSet = nextGroupSetNumber++;
|
|
1258
|
-
this.depth = depth;
|
|
1259
|
-
let maxDepth = depth;
|
|
1260
|
-
let isComplex = false;
|
|
1261
|
-
let children = [this.groupSet];
|
|
1262
|
-
for (const [_name, fi] of this.allFields) {
|
|
1263
|
-
if (fi.type === 'query') {
|
|
1264
|
-
const fir = fi;
|
|
1265
|
-
isComplex = true;
|
|
1266
|
-
if (fir.firstSegment.type === 'reduce') {
|
|
1267
|
-
const r = fir.computeGroups(nextGroupSetNumber, depth + 1);
|
|
1268
|
-
children = children.concat(r.children);
|
|
1269
|
-
nextGroupSetNumber = r.nextGroupSetNumber;
|
|
1270
|
-
if (r.maxDepth > maxDepth) {
|
|
1271
|
-
maxDepth = r.maxDepth;
|
|
1272
|
-
}
|
|
1273
|
-
}
|
|
1274
|
-
}
|
|
1275
|
-
}
|
|
1276
|
-
this.childGroups = children;
|
|
1277
|
-
return { nextGroupSetNumber, maxDepth, children, isComplex };
|
|
1278
|
-
}
|
|
1279
|
-
fields(fn = undefined) {
|
|
1280
|
-
const ret = [];
|
|
1281
|
-
for (const e of this.allFields.values()) {
|
|
1282
|
-
if (e instanceof FieldInstanceField) {
|
|
1283
|
-
if (fn === undefined || fn(e)) {
|
|
1284
|
-
ret.push(e);
|
|
1285
|
-
}
|
|
1286
|
-
}
|
|
1287
|
-
}
|
|
1288
|
-
return ret;
|
|
1289
|
-
}
|
|
1290
|
-
fieldNames(fn) {
|
|
1291
|
-
const ret = [];
|
|
1292
|
-
for (const [name, fi] of this.allFields) {
|
|
1293
|
-
if (fi instanceof FieldInstanceField) {
|
|
1294
|
-
if (fn === undefined || fn(fi)) {
|
|
1295
|
-
ret.push(name);
|
|
1296
|
-
}
|
|
1297
|
-
}
|
|
1298
|
-
}
|
|
1299
|
-
return ret;
|
|
1300
|
-
}
|
|
1301
|
-
// if a turtled result is all measures, we emit use ANY_VALUE for the aggregation
|
|
1302
|
-
// and emit the resulting structure as a RECORD instead of REPEATED
|
|
1303
|
-
// if we have all numbers, we need to know because we'll have to conjur a record.
|
|
1304
|
-
getRepeatedResultType() {
|
|
1305
|
-
let ret = 'inline_all_numbers';
|
|
1306
|
-
for (const f of this.fields()) {
|
|
1307
|
-
if (f.fieldUsage.type === 'result') {
|
|
1308
|
-
if (isBasicScalar(f.f)) {
|
|
1309
|
-
return 'nested';
|
|
1310
|
-
}
|
|
1311
|
-
if (f.f instanceof QueryFieldStruct) {
|
|
1312
|
-
ret = 'inline';
|
|
1313
|
-
}
|
|
1314
|
-
}
|
|
1315
|
-
}
|
|
1316
|
-
return ret;
|
|
1317
|
-
}
|
|
1318
|
-
structs() {
|
|
1319
|
-
const ret = [];
|
|
1320
|
-
for (const e of this.allFields.values()) {
|
|
1321
|
-
if (e instanceof FieldInstanceResult) {
|
|
1322
|
-
ret.push(e);
|
|
1323
|
-
}
|
|
1324
|
-
}
|
|
1325
|
-
return ret;
|
|
1326
|
-
}
|
|
1327
|
-
// return a list of structs that match the criteria
|
|
1328
|
-
// specified in the function.
|
|
1329
|
-
selectStructs(result, fn) {
|
|
1330
|
-
if (fn(this)) {
|
|
1331
|
-
result.push(this);
|
|
1332
|
-
}
|
|
1333
|
-
for (const e of this.structs()) {
|
|
1334
|
-
e.selectStructs(result, fn);
|
|
1335
|
-
}
|
|
1336
|
-
return result;
|
|
1337
|
-
}
|
|
1338
|
-
calculateDefaultOrderBy() {
|
|
1339
|
-
// LookML rules for default ordering.
|
|
1340
|
-
// Date or time or ordnal based fields, that field ascending
|
|
1341
|
-
// First Measure Descending.
|
|
1342
|
-
let firstField;
|
|
1343
|
-
for (const [_name, fi] of this.allFields) {
|
|
1344
|
-
if (fi instanceof FieldInstanceField) {
|
|
1345
|
-
if (fi.fieldUsage.type === 'result') {
|
|
1346
|
-
if (fi.f.fieldDef.type === 'turtle' ||
|
|
1347
|
-
(0, malloy_types_1.isJoined)(fi.f.fieldDef) ||
|
|
1348
|
-
(0, malloy_types_1.expressionIsAnalytic)(fi.f.fieldDef.expressionType)) {
|
|
1349
|
-
continue;
|
|
1350
|
-
}
|
|
1351
|
-
firstField || (firstField = fi.fieldUsage.resultIndex);
|
|
1352
|
-
if (['date', 'timestamp'].indexOf(fi.f.fieldDef.type) > -1) {
|
|
1353
|
-
return [{ dir: 'desc', field: fi.fieldUsage.resultIndex }];
|
|
1354
|
-
}
|
|
1355
|
-
else if (isBasicAggregate(fi.f)) {
|
|
1356
|
-
return [{ dir: 'desc', field: fi.fieldUsage.resultIndex }];
|
|
1357
|
-
}
|
|
1358
|
-
}
|
|
1359
|
-
}
|
|
1360
|
-
}
|
|
1361
|
-
if (firstField) {
|
|
1362
|
-
return [{ dir: 'asc', field: firstField }];
|
|
1363
|
-
}
|
|
1364
|
-
return [];
|
|
1365
|
-
}
|
|
1366
|
-
addStructToJoin(qs, query, uniqueKeyPossibleUse, joinStack) {
|
|
1367
|
-
var _a;
|
|
1368
|
-
const name = qs.getIdentifier();
|
|
1369
|
-
// we're already chasing the dependency for this join.
|
|
1370
|
-
if (joinStack.indexOf(name) !== -1) {
|
|
1371
|
-
return;
|
|
1372
|
-
}
|
|
1373
|
-
let join;
|
|
1374
|
-
if ((join = this.root().joins.get(name))) {
|
|
1375
|
-
join.uniqueKeyPossibleUses.add_use(uniqueKeyPossibleUse);
|
|
1376
|
-
return;
|
|
1377
|
-
}
|
|
1378
|
-
// if we have a parent, join it first.
|
|
1379
|
-
let parent;
|
|
1380
|
-
const parentStruct = (_a = qs.parent) === null || _a === void 0 ? void 0 : _a.getJoinableParent();
|
|
1381
|
-
if (parentStruct) {
|
|
1382
|
-
// add dependant expressions first...
|
|
1383
|
-
this.addStructToJoin(parentStruct, query, undefined, joinStack);
|
|
1384
|
-
parent = this.root().joins.get(parentStruct.getIdentifier());
|
|
1385
|
-
}
|
|
1386
|
-
// add any dependant joins based on the ON
|
|
1387
|
-
const sd = qs.structDef;
|
|
1388
|
-
if ((0, malloy_types_1.isJoinedSource)(sd) &&
|
|
1389
|
-
qs.parent && // if the join has an ON, it must thave a parent
|
|
1390
|
-
sd.onExpression &&
|
|
1391
|
-
joinStack.indexOf(name) === -1) {
|
|
1392
|
-
query.addDependantExpr(this, qs.parent, sd.onExpression, [
|
|
1393
|
-
...joinStack,
|
|
1394
|
-
name,
|
|
1395
|
-
]);
|
|
1396
|
-
}
|
|
1397
|
-
if (!(join = this.root().joins.get(name))) {
|
|
1398
|
-
join = new JoinInstance(qs, name, parent);
|
|
1399
|
-
this.root().joins.set(name, join);
|
|
1400
|
-
}
|
|
1401
|
-
join.uniqueKeyPossibleUses.add_use(uniqueKeyPossibleUse);
|
|
1402
|
-
}
|
|
1403
|
-
findJoins(query) {
|
|
1404
|
-
for (const dim of this.fields()) {
|
|
1405
|
-
if (!(dim.f instanceof QueryFieldStruct)) {
|
|
1406
|
-
this.addStructToJoin(dim.f.getJoinableParent(), query, dim.f.uniqueKeyPossibleUse(), []);
|
|
1407
|
-
}
|
|
1408
|
-
}
|
|
1409
|
-
for (const s of this.structs()) {
|
|
1410
|
-
s.findJoins(query);
|
|
1411
|
-
}
|
|
1412
|
-
}
|
|
1413
|
-
root() {
|
|
1414
|
-
if (this.parent) {
|
|
1415
|
-
return this.parent.root();
|
|
1416
|
-
}
|
|
1417
|
-
throw new Error('Internal Error, Null parent FieldInstanceResult');
|
|
1418
|
-
}
|
|
1419
|
-
getUngroupPartitions(ungroupSet) {
|
|
1420
|
-
let ret = [];
|
|
1421
|
-
let p = this;
|
|
1422
|
-
let excludeFields = [];
|
|
1423
|
-
let inScopeFieldNames = [];
|
|
1424
|
-
// all defaults to all fields at the current level.
|
|
1425
|
-
if (ungroupSet === undefined || ungroupSet.type === 'all') {
|
|
1426
|
-
// fields specified an an all, convert it to an exclude set.
|
|
1427
|
-
const allFields = (ungroupSet === null || ungroupSet === void 0 ? void 0 : ungroupSet.fields) || [];
|
|
1428
|
-
// convert an All into the equivalent exclude
|
|
1429
|
-
excludeFields = this.fields(fi => isBasicScalar(fi.f) &&
|
|
1430
|
-
fi.fieldUsage.type === 'result' &&
|
|
1431
|
-
allFields.indexOf(fi.f.getIdentifier()) === -1).map(fi => fi.f.getIdentifier());
|
|
1432
|
-
}
|
|
1433
|
-
else {
|
|
1434
|
-
excludeFields = ungroupSet.fields;
|
|
1435
|
-
}
|
|
1436
|
-
let firstScope = true;
|
|
1437
|
-
while (p !== undefined) {
|
|
1438
|
-
// get a list of valid fieldnames for the current scope.
|
|
1439
|
-
if (firstScope || (ungroupSet === null || ungroupSet === void 0 ? void 0 : ungroupSet.type) === 'exclude') {
|
|
1440
|
-
inScopeFieldNames = inScopeFieldNames.concat(p
|
|
1441
|
-
.fields(fi => isScalarField(fi.f) && fi.fieldUsage.type === 'result')
|
|
1442
|
-
.map(fi => fi.f.getIdentifier()));
|
|
1443
|
-
}
|
|
1444
|
-
ret = ret.concat(p.fields(fi => isScalarField(fi.f) &&
|
|
1445
|
-
fi.fieldUsage.type === 'result' &&
|
|
1446
|
-
excludeFields.indexOf(fi.f.getIdentifier()) === -1));
|
|
1447
|
-
p = p.parent;
|
|
1448
|
-
firstScope = false;
|
|
1449
|
-
}
|
|
1450
|
-
// verify that all names specified are available in the current scope.
|
|
1451
|
-
for (const fieldName of (ungroupSet === null || ungroupSet === void 0 ? void 0 : ungroupSet.fields) || []) {
|
|
1452
|
-
if (inScopeFieldNames.indexOf(fieldName) === -1) {
|
|
1453
|
-
throw new Error(`${ungroupSet === null || ungroupSet === void 0 ? void 0 : ungroupSet.type}(): unknown field name "${fieldName}" or name not in scope.`);
|
|
1454
|
-
}
|
|
1455
|
-
}
|
|
1456
|
-
return ret;
|
|
1457
|
-
}
|
|
1458
|
-
assignFieldsToGroups() {
|
|
1459
|
-
for (const [_key, grouping] of this.ungroupedSets) {
|
|
1460
|
-
for (const fieldInstance of this.getUngroupPartitions(grouping)) {
|
|
1461
|
-
fieldInstance.additionalGroupSets.push(grouping.groupSet);
|
|
1462
|
-
}
|
|
1463
|
-
}
|
|
1464
|
-
for (const child of this.structs()) {
|
|
1465
|
-
child.assignFieldsToGroups();
|
|
1466
|
-
}
|
|
1467
|
-
}
|
|
1468
|
-
}
|
|
1469
|
-
/* Root Result as opposed to a turtled result */
|
|
1470
|
-
class FieldInstanceResultRoot extends FieldInstanceResult {
|
|
1471
|
-
constructor(turtleDef) {
|
|
1472
|
-
super(turtleDef, undefined);
|
|
1473
|
-
this.joins = new Map();
|
|
1474
|
-
this.havings = new utils_1.AndChain();
|
|
1475
|
-
this.isComplexQuery = false;
|
|
1476
|
-
this.queryUsesPartitioning = false;
|
|
1477
|
-
this.computeOnlyGroups = [];
|
|
1478
|
-
this.elimatedComputeGroups = false;
|
|
1479
|
-
}
|
|
1480
|
-
root() {
|
|
1481
|
-
return this;
|
|
1482
|
-
}
|
|
1483
|
-
// in the stage immediately following stage0 we need to elimiate any of the
|
|
1484
|
-
// groups that were used in ungroup calculations. We need to do this only
|
|
1485
|
-
// once and in the very next stage.
|
|
1486
|
-
eliminateComputeGroupsSQL() {
|
|
1487
|
-
if (this.elimatedComputeGroups || this.computeOnlyGroups.length === 0) {
|
|
1488
|
-
return '';
|
|
1489
|
-
}
|
|
1490
|
-
else {
|
|
1491
|
-
this.elimatedComputeGroups = true;
|
|
1492
|
-
return `group_set NOT IN (${this.computeOnlyGroups.join(',')})`;
|
|
1493
|
-
}
|
|
1494
|
-
}
|
|
1495
|
-
// look at all the fields again in the structs in the query
|
|
1496
|
-
calculateSymmetricAggregates() {
|
|
1497
|
-
let leafiest;
|
|
1498
|
-
for (const [name, join] of this.joins) {
|
|
1499
|
-
// first join is by default the
|
|
1500
|
-
const relationship = join.parentRelationship();
|
|
1501
|
-
if (relationship === 'many_to_many' ||
|
|
1502
|
-
join.forceAllSymmetricCalculations()) {
|
|
1503
|
-
// everything must be calculated with symmetric aggregates
|
|
1504
|
-
leafiest = '0never';
|
|
1505
|
-
}
|
|
1506
|
-
else if (leafiest === undefined) {
|
|
1507
|
-
leafiest = name;
|
|
1508
|
-
}
|
|
1509
|
-
else if (join.parentRelationship() === 'one_to_many') {
|
|
1510
|
-
// check up the parent relationship until you find
|
|
1511
|
-
// the current leafiest node. If it isn't in the direct path
|
|
1512
|
-
// we need symmetric aggregate for everything.
|
|
1513
|
-
// if it is in the path, than this one becomes leafiest
|
|
1514
|
-
const s = join.queryStruct;
|
|
1515
|
-
if (s.parent && s.parent.getIdentifier() === leafiest) {
|
|
1516
|
-
leafiest = name;
|
|
1517
|
-
}
|
|
1518
|
-
else {
|
|
1519
|
-
// we have more than on one_to_many join chain, all bets are off.
|
|
1520
|
-
leafiest = '0never';
|
|
1521
|
-
}
|
|
1522
|
-
}
|
|
1523
|
-
}
|
|
1524
|
-
// console.log(`LEAFIEST: ${leafiest}`);
|
|
1525
|
-
for (const [name, join] of this.joins) {
|
|
1526
|
-
join.leafiest = name === leafiest;
|
|
1527
|
-
}
|
|
1528
|
-
// figure out which joins we need to manufacture distinct keys for.
|
|
1529
|
-
// Nested Unique keys are dependant on the primary key of the parent
|
|
1530
|
-
// and the table.
|
|
1531
|
-
for (const [_name, join] of this.joins) {
|
|
1532
|
-
// in a one_to_many join we need a key to count there may be a failed
|
|
1533
|
-
// match in a left join.
|
|
1534
|
-
// users -> {
|
|
1535
|
-
// group_by: user_id
|
|
1536
|
-
// aggregate: order_count is orders.count()
|
|
1537
|
-
if (
|
|
1538
|
-
// we have a leafiest count() joined subtree
|
|
1539
|
-
(join.leafiest &&
|
|
1540
|
-
join.parent !== undefined &&
|
|
1541
|
-
join.uniqueKeyPossibleUses.has('count')) ||
|
|
1542
|
-
// or not leafiest and we use an asymetric function
|
|
1543
|
-
(!join.leafiest && join.uniqueKeyPossibleUses.hasAsymetricFunctions())) {
|
|
1544
|
-
let j = join;
|
|
1545
|
-
while (j) {
|
|
1546
|
-
if (!j.queryStruct.primaryKey()) {
|
|
1547
|
-
j.makeUniqueKey = true;
|
|
1548
|
-
}
|
|
1549
|
-
if (j.queryStruct.structDef.type === 'array') {
|
|
1550
|
-
j = j.parent;
|
|
1551
|
-
}
|
|
1552
|
-
else {
|
|
1553
|
-
j = undefined;
|
|
1554
|
-
}
|
|
1555
|
-
}
|
|
1556
|
-
}
|
|
1557
|
-
}
|
|
1558
|
-
}
|
|
1559
|
-
}
|
|
1560
|
-
class JoinInstance {
|
|
1561
|
-
constructor(queryStruct, alias, parent) {
|
|
1562
|
-
this.queryStruct = queryStruct;
|
|
1563
|
-
this.alias = alias;
|
|
1564
|
-
this.parent = parent;
|
|
1565
|
-
this.uniqueKeyPossibleUses = new UniqueKeyUse();
|
|
1566
|
-
this.makeUniqueKey = false;
|
|
1567
|
-
this.leafiest = false;
|
|
1568
|
-
this.children = [];
|
|
1569
|
-
if (parent) {
|
|
1570
|
-
parent.children.push(this);
|
|
1571
|
-
}
|
|
1572
|
-
// convert the filter list into a list of boolean fields so we can
|
|
1573
|
-
// generate dependancies and code for them.
|
|
1574
|
-
const sd = this.queryStruct.structDef;
|
|
1575
|
-
if ((0, malloy_types_1.isSourceDef)(sd) && sd.filterList) {
|
|
1576
|
-
this.joinFilterConditions = sd.filterList.map(filter => new QueryFieldBoolean({
|
|
1577
|
-
type: 'boolean',
|
|
1578
|
-
name: 'ignoreme',
|
|
1579
|
-
e: filter.e,
|
|
1580
|
-
}, this.queryStruct));
|
|
1581
|
-
}
|
|
1582
|
-
}
|
|
1583
|
-
parentRelationship() {
|
|
1584
|
-
if (this.queryStruct.parent === undefined) {
|
|
1585
|
-
return 'root';
|
|
1586
|
-
}
|
|
1587
|
-
const thisStruct = this.queryStruct.structDef;
|
|
1588
|
-
if ((0, malloy_types_1.isJoined)(thisStruct)) {
|
|
1589
|
-
switch (thisStruct.join) {
|
|
1590
|
-
case 'one':
|
|
1591
|
-
return 'many_to_one';
|
|
1592
|
-
case 'cross':
|
|
1593
|
-
return 'many_to_many';
|
|
1594
|
-
case 'many':
|
|
1595
|
-
return 'one_to_many';
|
|
1596
|
-
}
|
|
1597
|
-
}
|
|
1598
|
-
throw new Error(`Internal error unknown relationship type to parent for ${this.queryStruct.structDef.name}`);
|
|
1599
|
-
}
|
|
1600
|
-
// For now, we force all symmetric calculations for full and right joins
|
|
1601
|
-
// because we need distinct keys for COUNT(xx) operations. Don't really need
|
|
1602
|
-
// this for sums. This will produce correct results and we can optimize this
|
|
1603
|
-
// at some point..
|
|
1604
|
-
forceAllSymmetricCalculations() {
|
|
1605
|
-
if (this.queryStruct.parent === undefined) {
|
|
1606
|
-
return false;
|
|
1607
|
-
}
|
|
1608
|
-
const thisStruct = this.queryStruct.structDef;
|
|
1609
|
-
if ((0, malloy_types_1.isJoined)(thisStruct)) {
|
|
1610
|
-
return (thisStruct.matrixOperation === 'right' ||
|
|
1611
|
-
thisStruct.matrixOperation === 'full');
|
|
1612
|
-
}
|
|
1613
|
-
return false;
|
|
1614
|
-
}
|
|
1615
|
-
// postgres unnest needs to know the names of the physical fields.
|
|
1616
|
-
getDialectFieldList() {
|
|
1617
|
-
return getDialectFieldList(this.queryStruct.structDef);
|
|
1618
|
-
}
|
|
1619
|
-
}
|
|
1620
|
-
/**
|
|
1621
|
-
* Used by the translator to get the output StructDef of a pipe segment
|
|
1622
|
-
*
|
|
1623
|
-
* half translated to the new world of types ..
|
|
1624
|
-
*/
|
|
1625
|
-
class Segment {
|
|
1626
|
-
static nextStructDef(structDef, segment) {
|
|
1627
|
-
const qs = new QueryStruct(structDef, undefined, {
|
|
1628
|
-
model: new QueryModel(undefined),
|
|
1629
|
-
}, {});
|
|
1630
|
-
const turtleDef = {
|
|
1631
|
-
type: 'turtle',
|
|
1632
|
-
name: 'ignoreme',
|
|
1633
|
-
pipeline: [segment],
|
|
1634
|
-
};
|
|
1635
|
-
const queryQueryQuery = QueryQuery.makeQuery(turtleDef, qs, new StageWriter(true, undefined), // stage write indicates we want to get a result.
|
|
1636
|
-
false);
|
|
1637
|
-
return queryQueryQuery.getResultStructDef();
|
|
1638
|
-
}
|
|
1639
|
-
}
|
|
1640
|
-
exports.Segment = Segment;
|
|
1641
|
-
function getResultStructDefForView(source, view) {
|
|
1642
|
-
const qs = new QueryStruct(source, undefined, {
|
|
1643
|
-
model: new QueryModel(undefined),
|
|
1644
|
-
}, {});
|
|
1645
|
-
const queryQueryQuery = QueryQuery.makeQuery(view, qs, new StageWriter(true, undefined), // stage write indicates we want to get a result.
|
|
1646
|
-
false);
|
|
1647
|
-
return queryQueryQuery.getResultStructDef();
|
|
1648
|
-
}
|
|
1649
|
-
function getResultStructDefForQuery(model, query) {
|
|
1650
|
-
const queryModel = new QueryModel(model);
|
|
1651
|
-
const compiled = queryModel.compileQuery(query);
|
|
1652
|
-
return compiled.structs[compiled.structs.length - 1];
|
|
1653
|
-
}
|
|
1654
|
-
/** Query builder object. */
|
|
1655
|
-
class QueryQuery extends QueryField {
|
|
1656
|
-
constructor(fieldDef, parent, stageWriter, isJoinedSubquery) {
|
|
1657
|
-
super(fieldDef, parent);
|
|
1658
|
-
this.prepared = false;
|
|
1659
|
-
this.maxDepth = 0;
|
|
1660
|
-
this.maxGroupSet = 0;
|
|
1661
|
-
this.fieldDef = fieldDef;
|
|
1662
|
-
this.rootResult = new FieldInstanceResultRoot(fieldDef);
|
|
1663
|
-
this.stageWriter = stageWriter;
|
|
1664
|
-
// do some magic here to get the first segment.
|
|
1665
|
-
this.firstSegment = fieldDef.pipeline[0];
|
|
1666
|
-
this.isJoinedSubquery = isJoinedSubquery;
|
|
1667
|
-
}
|
|
1668
|
-
static makeQuery(fieldDef, parentStruct, stageWriter = undefined, isJoinedSubquery) {
|
|
1669
|
-
let parent = parentStruct;
|
|
1670
|
-
let turtleWithFilters = parentStruct.applyStructFiltersToTurtleDef(fieldDef);
|
|
1671
|
-
const firstStage = turtleWithFilters.pipeline[0];
|
|
1672
|
-
const sourceDef = parentStruct.structDef;
|
|
1673
|
-
// if we are generating code
|
|
1674
|
-
// and have extended declaration, we need to make a new QueryStruct
|
|
1675
|
-
// copy the definitions into a new structdef
|
|
1676
|
-
// edit the declations from the pipeline
|
|
1677
|
-
if (stageWriter !== undefined &&
|
|
1678
|
-
(0, malloy_types_1.isQuerySegment)(firstStage) &&
|
|
1679
|
-
firstStage.extendSource !== undefined) {
|
|
1680
|
-
parent = new QueryStruct({
|
|
1681
|
-
...sourceDef,
|
|
1682
|
-
fields: [...sourceDef.fields, ...firstStage.extendSource],
|
|
1683
|
-
}, parentStruct.sourceArguments, parent.parent ? { struct: parent } : { model: parent.model }, parent.prepareResultOptions);
|
|
1684
|
-
turtleWithFilters = {
|
|
1685
|
-
...turtleWithFilters,
|
|
1686
|
-
pipeline: [
|
|
1687
|
-
{
|
|
1688
|
-
...firstStage,
|
|
1689
|
-
extendSource: undefined,
|
|
1690
|
-
},
|
|
1691
|
-
...turtleWithFilters.pipeline.slice(1),
|
|
1692
|
-
],
|
|
1693
|
-
};
|
|
1694
|
-
}
|
|
1695
|
-
if ((0, malloy_types_1.isSourceDef)(sourceDef) &&
|
|
1696
|
-
sourceDef.queryTimezone &&
|
|
1697
|
-
(0, malloy_types_1.isQuerySegment)(firstStage) &&
|
|
1698
|
-
firstStage.queryTimezone === undefined) {
|
|
1699
|
-
firstStage.queryTimezone = sourceDef.queryTimezone;
|
|
1700
|
-
}
|
|
1701
|
-
switch (firstStage.type) {
|
|
1702
|
-
case 'reduce':
|
|
1703
|
-
return new QueryQueryReduce(turtleWithFilters, parent, stageWriter, isJoinedSubquery);
|
|
1704
|
-
case 'project':
|
|
1705
|
-
return new QueryQueryProject(turtleWithFilters, parent, stageWriter, isJoinedSubquery);
|
|
1706
|
-
case 'index':
|
|
1707
|
-
return new QueryQueryIndex(turtleWithFilters, parent, stageWriter, isJoinedSubquery);
|
|
1708
|
-
case 'raw':
|
|
1709
|
-
return new QueryQueryRaw(turtleWithFilters, parent, stageWriter, isJoinedSubquery);
|
|
1710
|
-
case 'partial':
|
|
1711
|
-
throw new Error('Attempt to make query out of partial stage');
|
|
1712
|
-
}
|
|
1713
|
-
}
|
|
1714
|
-
inNestedPipeline() {
|
|
1715
|
-
return this.parent.structDef.type === 'nest_source';
|
|
1716
|
-
}
|
|
1717
|
-
// get a field ref and expand it.
|
|
1718
|
-
expandField(f) {
|
|
1719
|
-
const field = f.type === 'fieldref'
|
|
1720
|
-
? this.parent.getQueryFieldReference(f)
|
|
1721
|
-
: this.parent.makeQueryField(f);
|
|
1722
|
-
const as = field.getIdentifier();
|
|
1723
|
-
return { as, field };
|
|
1724
|
-
}
|
|
1725
|
-
addDependantPath(resultStruct, context, path, uniqueKeyPossibleUse, joinStack) {
|
|
1726
|
-
if (path.length === 0) {
|
|
1727
|
-
return;
|
|
1728
|
-
}
|
|
1729
|
-
const node = context.getFieldByName(path);
|
|
1730
|
-
const joinableParent = node instanceof QueryFieldStruct
|
|
1731
|
-
? node.queryStruct.getJoinableParent()
|
|
1732
|
-
: node.parent.getJoinableParent();
|
|
1733
|
-
resultStruct
|
|
1734
|
-
.root()
|
|
1735
|
-
.addStructToJoin(joinableParent, this, uniqueKeyPossibleUse, joinStack);
|
|
1736
|
-
}
|
|
1737
|
-
findRecordAliases(context, path) {
|
|
1738
|
-
for (const seg of path) {
|
|
1739
|
-
const field = context.getFieldByName([seg]);
|
|
1740
|
-
if (field instanceof QueryFieldStruct) {
|
|
1741
|
-
const qs = field.queryStruct;
|
|
1742
|
-
if (qs.structDef.type === 'record' &&
|
|
1743
|
-
(0, malloy_types_1.hasExpression)(qs.structDef) &&
|
|
1744
|
-
qs.parent) {
|
|
1745
|
-
qs.informOfAliasValue(this.exprToSQL(this.rootResult, qs.parent, qs.structDef.e));
|
|
1746
|
-
}
|
|
1747
|
-
context = qs;
|
|
1748
|
-
}
|
|
1749
|
-
}
|
|
1750
|
-
}
|
|
1751
|
-
addDependantExpr(resultStruct, context, e, joinStack) {
|
|
1752
|
-
var _a;
|
|
1753
|
-
for (const expr of (0, utils_1.exprWalk)(e)) {
|
|
1754
|
-
if (expr.node === 'function_call') {
|
|
1755
|
-
if ((0, malloy_types_1.expressionIsAnalytic)(expr.overload.returnType.expressionType) &&
|
|
1756
|
-
this.parent.dialect.cantPartitionWindowFunctionsOnExpressions &&
|
|
1757
|
-
resultStruct.firstSegment.type === 'reduce') {
|
|
1758
|
-
// force the use of a lateral_join_bag
|
|
1759
|
-
resultStruct.root().isComplexQuery = true;
|
|
1760
|
-
resultStruct.root().queryUsesPartitioning = true;
|
|
1761
|
-
}
|
|
1762
|
-
const isSymmetric = (_a = expr.overload.isSymmetric) !== null && _a !== void 0 ? _a : false;
|
|
1763
|
-
const isAggregate = (0, malloy_types_1.expressionIsAggregate)(expr.overload.returnType.expressionType);
|
|
1764
|
-
const isAsymmetricAggregate = isAggregate && !isSymmetric;
|
|
1765
|
-
const uniqueKeyPossibleUse = isAsymmetricAggregate
|
|
1766
|
-
? 'generic_asymmetric_aggregate'
|
|
1767
|
-
: undefined;
|
|
1768
|
-
if (expr.structPath) {
|
|
1769
|
-
this.addDependantPath(resultStruct, context, expr.structPath, uniqueKeyPossibleUse, joinStack);
|
|
1770
|
-
}
|
|
1771
|
-
else if (isAsymmetricAggregate) {
|
|
1772
|
-
resultStruct.addStructToJoin(context, this, uniqueKeyPossibleUse, joinStack);
|
|
1773
|
-
}
|
|
1774
|
-
if ((0, malloy_types_1.expressionIsAnalytic)(expr.overload.returnType.expressionType)) {
|
|
1775
|
-
resultStruct.root().queryUsesPartitioning = true;
|
|
1776
|
-
}
|
|
1777
|
-
}
|
|
1778
|
-
else if (expr.node === 'all' || expr.node === 'exclude') {
|
|
1779
|
-
resultStruct.resultUsesUngrouped = true;
|
|
1780
|
-
resultStruct.root().isComplexQuery = true;
|
|
1781
|
-
resultStruct.root().queryUsesPartitioning = true;
|
|
1782
|
-
if (expr.fields && expr.fields.length > 0) {
|
|
1783
|
-
const key = expr.fields.sort().join('|') + expr.node;
|
|
1784
|
-
if (resultStruct.ungroupedSets.get(key) === undefined) {
|
|
1785
|
-
resultStruct.ungroupedSets.set(key, {
|
|
1786
|
-
type: expr.node,
|
|
1787
|
-
fields: expr.fields,
|
|
1788
|
-
groupSet: -1,
|
|
1789
|
-
});
|
|
1790
|
-
}
|
|
1791
|
-
}
|
|
1792
|
-
}
|
|
1793
|
-
if (expr.node === 'field') {
|
|
1794
|
-
this.findRecordAliases(context, expr.path);
|
|
1795
|
-
const field = context.getDimensionOrMeasureByName(expr.path);
|
|
1796
|
-
if ((0, malloy_types_1.hasExpression)(field.fieldDef)) {
|
|
1797
|
-
this.addDependantExpr(resultStruct, field.parent, field.fieldDef.e, joinStack);
|
|
1798
|
-
}
|
|
1799
|
-
else {
|
|
1800
|
-
resultStruct
|
|
1801
|
-
.root()
|
|
1802
|
-
.addStructToJoin(field.parent.getJoinableParent(), this, undefined, joinStack);
|
|
1803
|
-
}
|
|
1804
|
-
}
|
|
1805
|
-
else if (expr.node === 'aggregate') {
|
|
1806
|
-
if ((0, malloy_types_1.isAsymmetricExpr)(expr)) {
|
|
1807
|
-
if (expr.structPath) {
|
|
1808
|
-
this.findRecordAliases(context, expr.structPath);
|
|
1809
|
-
this.addDependantPath(resultStruct, context, expr.structPath, expr.function, joinStack);
|
|
1810
|
-
}
|
|
1811
|
-
else {
|
|
1812
|
-
// we are doing a sum in the root. It may need symetric aggregates
|
|
1813
|
-
resultStruct.addStructToJoin(context, this, expr.function, joinStack);
|
|
1814
|
-
}
|
|
1815
|
-
}
|
|
1816
|
-
}
|
|
1817
|
-
}
|
|
1818
|
-
}
|
|
1819
|
-
addDependancies(resultStruct, field) {
|
|
1820
|
-
if ((0, malloy_types_1.hasExpression)(field.fieldDef)) {
|
|
1821
|
-
this.addDependantExpr(resultStruct, field.parent, field.fieldDef.e, []);
|
|
1822
|
-
}
|
|
1823
|
-
}
|
|
1824
|
-
getSegmentFields(resultStruct) {
|
|
1825
|
-
const fs = resultStruct.firstSegment;
|
|
1826
|
-
return fs.type === 'index'
|
|
1827
|
-
? fs.indexFields
|
|
1828
|
-
: (0, malloy_types_1.isQuerySegment)(fs)
|
|
1829
|
-
? fs.queryFields
|
|
1830
|
-
: [];
|
|
1831
|
-
}
|
|
1832
|
-
getDrillExpression(f) {
|
|
1833
|
-
if ((0, malloy_types_1.isAtomic)(f) || f.type === 'fieldref')
|
|
1834
|
-
return f.drillExpression;
|
|
1835
|
-
return undefined;
|
|
1836
|
-
}
|
|
1837
|
-
expandFields(resultStruct) {
|
|
1838
|
-
let resultIndex = 1;
|
|
1839
|
-
for (const f of this.getSegmentFields(resultStruct)) {
|
|
1840
|
-
const { as, field } = this.expandField(f);
|
|
1841
|
-
const drillExpression = this.getDrillExpression(f);
|
|
1842
|
-
if (field instanceof QueryQuery) {
|
|
1843
|
-
if (this.firstSegment.type === 'project') {
|
|
1844
|
-
throw new Error(`Nested views cannot be used in select - '${field.fieldDef.name}'`);
|
|
1845
|
-
}
|
|
1846
|
-
const fir = new FieldInstanceResult(field.fieldDef, resultStruct);
|
|
1847
|
-
this.expandFields(fir);
|
|
1848
|
-
resultStruct.add(as, fir);
|
|
1849
|
-
}
|
|
1850
|
-
else if (field instanceof QueryAtomicField) {
|
|
1851
|
-
resultStruct.addField(as, field, {
|
|
1852
|
-
resultIndex,
|
|
1853
|
-
type: 'result',
|
|
1854
|
-
}, drillExpression);
|
|
1855
|
-
this.addDependancies(resultStruct, field);
|
|
1856
|
-
if (isBasicAggregate(field)) {
|
|
1857
|
-
if (this.firstSegment.type === 'project') {
|
|
1858
|
-
throw new Error(`Aggregate Fields cannot be used in select - '${field.fieldDef.name}'`);
|
|
1859
|
-
}
|
|
1860
|
-
}
|
|
1861
|
-
}
|
|
1862
|
-
else if (field instanceof QueryFieldStruct) {
|
|
1863
|
-
if (field.isAtomic()) {
|
|
1864
|
-
this.addDependancies(resultStruct, field);
|
|
1865
|
-
}
|
|
1866
|
-
resultStruct.addField(as, field, {
|
|
1867
|
-
resultIndex,
|
|
1868
|
-
type: 'result',
|
|
1869
|
-
}, drillExpression);
|
|
1870
|
-
}
|
|
1871
|
-
// else if (
|
|
1872
|
-
// this.firstSegment.type === "project" &&
|
|
1873
|
-
// field instanceof QueryStruct
|
|
1874
|
-
// ) {
|
|
1875
|
-
// // TODO lloyd refactor or comment why we do nothing here
|
|
1876
|
-
// } else {
|
|
1877
|
-
// throw new Error(`'${as}' cannot be used as in this way.`);
|
|
1878
|
-
// }
|
|
1879
|
-
resultIndex++;
|
|
1880
|
-
}
|
|
1881
|
-
this.expandFilters(resultStruct);
|
|
1882
|
-
}
|
|
1883
|
-
expandFilters(resultStruct) {
|
|
1884
|
-
if (resultStruct.firstSegment.filterList === undefined) {
|
|
1885
|
-
return;
|
|
1886
|
-
}
|
|
1887
|
-
// Go through the filters and make or find dependant fields
|
|
1888
|
-
// add them to the field index. Place the individual filters
|
|
1889
|
-
// in the correct catgory.
|
|
1890
|
-
for (const cond of resultStruct.firstSegment.filterList || []) {
|
|
1891
|
-
const context = this.parent;
|
|
1892
|
-
this.addDependantExpr(resultStruct, context, cond.e, []);
|
|
1893
|
-
}
|
|
1894
|
-
for (const join of resultStruct.root().joins.values() || []) {
|
|
1895
|
-
for (const qf of join.joinFilterConditions || []) {
|
|
1896
|
-
if (qf.fieldDef.type === 'boolean' && qf.fieldDef.e) {
|
|
1897
|
-
this.addDependantExpr(resultStruct, qf.parent, qf.fieldDef.e, []);
|
|
1898
|
-
}
|
|
1899
|
-
}
|
|
1900
|
-
}
|
|
1901
|
-
}
|
|
1902
|
-
generateSQLFilters(resultStruct, which
|
|
1903
|
-
// filterList: FilterCondition[] | undefined = undefined
|
|
1904
|
-
) {
|
|
1905
|
-
const resultFilters = new utils_1.AndChain();
|
|
1906
|
-
const list = resultStruct.firstSegment.filterList;
|
|
1907
|
-
if (list === undefined) {
|
|
1908
|
-
return resultFilters;
|
|
1909
|
-
}
|
|
1910
|
-
// Go through the filters and make or find dependant fields
|
|
1911
|
-
// add them to the field index. Place the individual filters
|
|
1912
|
-
// in the correct catgory.
|
|
1913
|
-
for (const cond of list || []) {
|
|
1914
|
-
const context = this.parent;
|
|
1915
|
-
if ((which === 'having' && (0, malloy_types_1.expressionIsCalculation)(cond.expressionType)) ||
|
|
1916
|
-
(which === 'where' && (0, malloy_types_1.expressionIsScalar)(cond.expressionType))) {
|
|
1917
|
-
const sqlClause = this.exprToSQL(resultStruct, context, cond.e, undefined);
|
|
1918
|
-
resultFilters.add(sqlClause);
|
|
1919
|
-
}
|
|
1920
|
-
}
|
|
1921
|
-
return resultFilters;
|
|
1922
|
-
}
|
|
1923
|
-
prepare(_stageWriter) {
|
|
1924
|
-
if (!this.prepared) {
|
|
1925
|
-
this.expandFields(this.rootResult);
|
|
1926
|
-
this.rootResult.addStructToJoin(this.parent, this, undefined, []);
|
|
1927
|
-
this.rootResult.findJoins(this);
|
|
1928
|
-
this.addAlwaysJoins(this.rootResult);
|
|
1929
|
-
this.rootResult.calculateSymmetricAggregates();
|
|
1930
|
-
this.prepared = true;
|
|
1931
|
-
}
|
|
1932
|
-
}
|
|
1933
|
-
addAlwaysJoins(rootResult) {
|
|
1934
|
-
var _a;
|
|
1935
|
-
const stage = this.fieldDef.pipeline[0];
|
|
1936
|
-
if (stage.type !== 'raw') {
|
|
1937
|
-
const alwaysJoins = (_a = stage.alwaysJoins) !== null && _a !== void 0 ? _a : [];
|
|
1938
|
-
for (const joinName of alwaysJoins) {
|
|
1939
|
-
const qs = this.parent.getChildByName(joinName);
|
|
1940
|
-
if (qs instanceof QueryFieldStruct) {
|
|
1941
|
-
rootResult.addStructToJoin(qs.queryStruct, this, undefined, []);
|
|
1942
|
-
}
|
|
1943
|
-
}
|
|
1944
|
-
}
|
|
1945
|
-
}
|
|
1946
|
-
// get the source fieldname and filters associated with the field (so we can drill later)
|
|
1947
|
-
getResultMetadata(fi) {
|
|
1948
|
-
if (fi instanceof FieldInstanceField) {
|
|
1949
|
-
if (fi.fieldUsage.type === 'result') {
|
|
1950
|
-
// const fieldDef = fi.f.fieldDef as AtomicField;
|
|
1951
|
-
const fieldDef = fi.f.fieldDef;
|
|
1952
|
-
let filterList;
|
|
1953
|
-
const sourceField = fi.f.parent.getFullOutputName() +
|
|
1954
|
-
(fieldDef.name || fieldDef.as || 'undefined');
|
|
1955
|
-
const sourceExpression = (0, malloy_types_1.hasExpression)(fieldDef)
|
|
1956
|
-
? fieldDef.code
|
|
1957
|
-
: undefined;
|
|
1958
|
-
const sourceClasses = [sourceField];
|
|
1959
|
-
const referenceId = fi.f.referenceId;
|
|
1960
|
-
const drillExpression = fi.drillExpression;
|
|
1961
|
-
const base = {
|
|
1962
|
-
sourceField,
|
|
1963
|
-
sourceExpression,
|
|
1964
|
-
sourceClasses,
|
|
1965
|
-
referenceId,
|
|
1966
|
-
drillExpression,
|
|
1967
|
-
};
|
|
1968
|
-
if (isBasicCalculation(fi.f)) {
|
|
1969
|
-
filterList = fi.f.getFilterList();
|
|
1970
|
-
return {
|
|
1971
|
-
...base,
|
|
1972
|
-
filterList,
|
|
1973
|
-
fieldKind: 'measure',
|
|
1974
|
-
};
|
|
1975
|
-
}
|
|
1976
|
-
if (isBasicScalar(fi.f)) {
|
|
1977
|
-
return {
|
|
1978
|
-
...base,
|
|
1979
|
-
filterList,
|
|
1980
|
-
fieldKind: 'dimension',
|
|
1981
|
-
};
|
|
1982
|
-
}
|
|
1983
|
-
else {
|
|
1984
|
-
return undefined;
|
|
1985
|
-
}
|
|
1986
|
-
}
|
|
1987
|
-
return undefined;
|
|
1988
|
-
}
|
|
1989
|
-
else if (fi instanceof FieldInstanceResult) {
|
|
1990
|
-
const sourceField = fi.turtleDef.name || fi.turtleDef.as;
|
|
1991
|
-
const sourceClasses = sourceField ? [sourceField] : [];
|
|
1992
|
-
const filterList = fi.firstSegment.filterList;
|
|
1993
|
-
const lastSegment = fi.turtleDef.pipeline[fi.turtleDef.pipeline.length - 1];
|
|
1994
|
-
const limit = (0, malloy_types_1.isRawSegment)(lastSegment) ? undefined : lastSegment.limit;
|
|
1995
|
-
let orderBy = undefined;
|
|
1996
|
-
const drillable = (0, malloy_types_1.isQuerySegment)(lastSegment) && fi.turtleDef.pipeline.length === 1;
|
|
1997
|
-
if ((0, malloy_types_1.isQuerySegment)(lastSegment)) {
|
|
1998
|
-
orderBy = lastSegment.orderBy;
|
|
1999
|
-
}
|
|
2000
|
-
if (sourceField) {
|
|
2001
|
-
return {
|
|
2002
|
-
sourceField,
|
|
2003
|
-
filterList,
|
|
2004
|
-
sourceClasses,
|
|
2005
|
-
fieldKind: 'struct',
|
|
2006
|
-
limit,
|
|
2007
|
-
orderBy,
|
|
2008
|
-
drillable,
|
|
2009
|
-
};
|
|
2010
|
-
}
|
|
2011
|
-
}
|
|
2012
|
-
return undefined;
|
|
2013
|
-
}
|
|
2014
|
-
/** returns a fields and primary key of a struct for this query */
|
|
2015
|
-
getResultStructDef(resultStruct = this.rootResult, isRoot = true) {
|
|
2016
|
-
const fields = [];
|
|
2017
|
-
let primaryKey;
|
|
2018
|
-
this.prepare(undefined);
|
|
2019
|
-
let dimCount = 0;
|
|
2020
|
-
for (const [name, fi] of resultStruct.allFields) {
|
|
2021
|
-
const resultMetadata = this.getResultMetadata(fi);
|
|
2022
|
-
if (fi instanceof FieldInstanceResult) {
|
|
2023
|
-
const { structDef, repeatedResultType } = this.generateTurtlePipelineSQL(fi, new StageWriter(true, undefined), '<nosource>');
|
|
2024
|
-
if (repeatedResultType === 'nested') {
|
|
2025
|
-
const multiLineNest = {
|
|
2026
|
-
...structDef,
|
|
2027
|
-
type: 'array',
|
|
2028
|
-
elementTypeDef: { type: 'record_element' },
|
|
2029
|
-
join: 'many',
|
|
2030
|
-
name,
|
|
2031
|
-
resultMetadata,
|
|
2032
|
-
};
|
|
2033
|
-
fields.push(multiLineNest);
|
|
2034
|
-
}
|
|
2035
|
-
else {
|
|
2036
|
-
const oneLineNest = {
|
|
2037
|
-
...structDef,
|
|
2038
|
-
type: 'record',
|
|
2039
|
-
join: 'one',
|
|
2040
|
-
name,
|
|
2041
|
-
resultMetadata,
|
|
2042
|
-
};
|
|
2043
|
-
fields.push(oneLineNest);
|
|
2044
|
-
}
|
|
2045
|
-
}
|
|
2046
|
-
else if (fi instanceof FieldInstanceField) {
|
|
2047
|
-
if (fi.fieldUsage.type === 'result') {
|
|
2048
|
-
// if there is only one dimension, it is the primaryKey
|
|
2049
|
-
// if there are more, primaryKey is undefined.
|
|
2050
|
-
if (isBasicScalar(fi.f)) {
|
|
2051
|
-
if (dimCount === 0 && isRoot) {
|
|
2052
|
-
primaryKey = name;
|
|
2053
|
-
}
|
|
2054
|
-
else {
|
|
2055
|
-
primaryKey = undefined;
|
|
2056
|
-
}
|
|
2057
|
-
dimCount++;
|
|
2058
|
-
}
|
|
2059
|
-
// Remove computations because they are all resolved
|
|
2060
|
-
let fOut = fi.f.fieldDef;
|
|
2061
|
-
if ((0, malloy_types_1.hasExpression)(fOut)) {
|
|
2062
|
-
fOut = { ...fOut };
|
|
2063
|
-
// "as" because delete needs the property to be optional
|
|
2064
|
-
delete fOut.e;
|
|
2065
|
-
delete fOut.code;
|
|
2066
|
-
delete fOut.expressionType;
|
|
2067
|
-
}
|
|
2068
|
-
const location = fOut.location;
|
|
2069
|
-
const annotation = fOut.annotation;
|
|
2070
|
-
const common = {
|
|
2071
|
-
resultMetadata,
|
|
2072
|
-
location,
|
|
2073
|
-
annotation,
|
|
2074
|
-
};
|
|
2075
|
-
// build out the result fields...
|
|
2076
|
-
switch (fOut.type) {
|
|
2077
|
-
case 'boolean':
|
|
2078
|
-
case 'json':
|
|
2079
|
-
case 'string':
|
|
2080
|
-
fields.push({
|
|
2081
|
-
name,
|
|
2082
|
-
type: fOut.type,
|
|
2083
|
-
...common,
|
|
2084
|
-
});
|
|
2085
|
-
break;
|
|
2086
|
-
case 'date':
|
|
2087
|
-
case 'timestamp': {
|
|
2088
|
-
const timeframe = fOut.timeframe;
|
|
2089
|
-
const fd = { type: fOut.type };
|
|
2090
|
-
if (timeframe) {
|
|
2091
|
-
fd.timeframe = timeframe;
|
|
2092
|
-
}
|
|
2093
|
-
fields.push({
|
|
2094
|
-
name,
|
|
2095
|
-
...fd,
|
|
2096
|
-
...common,
|
|
2097
|
-
});
|
|
2098
|
-
break;
|
|
2099
|
-
}
|
|
2100
|
-
case 'number':
|
|
2101
|
-
fields.push({
|
|
2102
|
-
name,
|
|
2103
|
-
numberType: fOut.numberType,
|
|
2104
|
-
type: 'number',
|
|
2105
|
-
...common,
|
|
2106
|
-
});
|
|
2107
|
-
break;
|
|
2108
|
-
case 'sql native':
|
|
2109
|
-
case 'record':
|
|
2110
|
-
case 'array': {
|
|
2111
|
-
fields.push({ ...fOut, ...common });
|
|
2112
|
-
break;
|
|
2113
|
-
}
|
|
2114
|
-
default:
|
|
2115
|
-
throw new Error(`unknown Field Type in query ${JSON.stringify(fOut)}`);
|
|
2116
|
-
}
|
|
2117
|
-
}
|
|
2118
|
-
}
|
|
2119
|
-
}
|
|
2120
|
-
const outputStruct = {
|
|
2121
|
-
type: 'query_result',
|
|
2122
|
-
name: this.resultStage || 'result',
|
|
2123
|
-
fields,
|
|
2124
|
-
dialect: this.parent.dialect.name,
|
|
2125
|
-
primaryKey,
|
|
2126
|
-
connection: this.parent.connectionName,
|
|
2127
|
-
resultMetadata: this.getResultMetadata(this.rootResult),
|
|
2128
|
-
queryTimezone: resultStruct.getQueryInfo().queryTimezone,
|
|
2129
|
-
};
|
|
2130
|
-
if (this.parent.structDef.modelAnnotation) {
|
|
2131
|
-
outputStruct.modelAnnotation = this.parent.structDef.modelAnnotation;
|
|
2132
|
-
}
|
|
2133
|
-
return outputStruct;
|
|
2134
|
-
}
|
|
2135
|
-
generateSQLJoinBlock(stageWriter, ji, depth) {
|
|
2136
|
-
var _a;
|
|
2137
|
-
let s = '';
|
|
2138
|
-
const qs = ji.queryStruct;
|
|
2139
|
-
const qsDef = qs.structDef;
|
|
2140
|
-
(_a = qs.eventStream) === null || _a === void 0 ? void 0 : _a.emit('join-used', { name: (0, malloy_types_1.getIdentifier)(qsDef) });
|
|
2141
|
-
qs.maybeEmitParameterizedSourceUsage();
|
|
2142
|
-
if ((0, malloy_types_1.isJoinedSource)(qsDef)) {
|
|
2143
|
-
let structSQL = qs.structSourceSQL(stageWriter);
|
|
2144
|
-
const matrixOperation = (qsDef.matrixOperation || 'left').toUpperCase();
|
|
2145
|
-
if (!this.parent.dialect.supportsFullJoin && matrixOperation === 'FULL') {
|
|
2146
|
-
throw new Error('FULL JOIN not supported');
|
|
2147
|
-
}
|
|
2148
|
-
if (ji.makeUniqueKey) {
|
|
2149
|
-
const passKeys = this.generateSQLPassthroughKeys(qs);
|
|
2150
|
-
structSQL = `(SELECT ${qs.dialect.sqlGenerateUUID()} as ${qs.dialect.sqlMaybeQuoteIdentifier('__distinct_key')}, x.* ${passKeys} FROM ${structSQL} as x)`;
|
|
2151
|
-
}
|
|
2152
|
-
let onCondition = '';
|
|
2153
|
-
if (qs.parent === undefined) {
|
|
2154
|
-
throw new Error('Expected joined struct to have a parent.');
|
|
2155
|
-
}
|
|
2156
|
-
if (qsDef.onExpression) {
|
|
2157
|
-
onCondition = new QueryFieldBoolean({
|
|
2158
|
-
type: 'boolean',
|
|
2159
|
-
name: 'ignoreme',
|
|
2160
|
-
e: qsDef.onExpression,
|
|
2161
|
-
}, qs.parent).generateExpression(this.rootResult);
|
|
2162
|
-
}
|
|
2163
|
-
else {
|
|
2164
|
-
onCondition = '1=1';
|
|
2165
|
-
}
|
|
2166
|
-
let filters = '';
|
|
2167
|
-
let conditions = undefined;
|
|
2168
|
-
if (ji.joinFilterConditions) {
|
|
2169
|
-
conditions = ji.joinFilterConditions.map(qf => qf.generateExpression(this.rootResult));
|
|
2170
|
-
}
|
|
2171
|
-
if (ji.children.length === 0 ||
|
|
2172
|
-
conditions === undefined ||
|
|
2173
|
-
!this.parent.dialect.supportsComplexFilteredSources) {
|
|
2174
|
-
// LTNOTE: need a check here to see the children's where: conditions are local
|
|
2175
|
-
// to the source and not to any of it's joined children.
|
|
2176
|
-
// In Presto, we're going to get a SQL error if in this case
|
|
2177
|
-
// for now. We need to inspect the 'condition' of each of the children
|
|
2178
|
-
// to see if they reference subchildren and blow up if they do
|
|
2179
|
-
// or move them to the where clause with a (x.distnct_key is NULL or (condition))
|
|
2180
|
-
//
|
|
2181
|
-
// const childrenFiltersAreComplex = somethign(conditions)
|
|
2182
|
-
// if (conditions && childrenFiltersAreComplex !this.parent.dialect.supportsComplexFilteredSources) {
|
|
2183
|
-
// throw new Error(
|
|
2184
|
-
// 'Cannot join a source with a complex filter on a joined source'
|
|
2185
|
-
// );
|
|
2186
|
-
// }
|
|
2187
|
-
if (conditions !== undefined && conditions.length >= 1) {
|
|
2188
|
-
filters = ` AND (${conditions.join(' AND ')})`;
|
|
2189
|
-
}
|
|
2190
|
-
s += ` ${matrixOperation} JOIN ${structSQL} AS ${ji.alias}\n ON ${onCondition}${filters}\n`;
|
|
2191
|
-
}
|
|
2192
|
-
else {
|
|
2193
|
-
let select = `SELECT ${ji.alias}.*`;
|
|
2194
|
-
let joins = '';
|
|
2195
|
-
for (const childJoin of ji.children) {
|
|
2196
|
-
joins += this.generateSQLJoinBlock(stageWriter, childJoin, depth + 1);
|
|
2197
|
-
select += `, ${this.parent.dialect.sqlSelectAliasAsStruct(childJoin.alias, getDialectFieldList(childJoin.queryStruct.structDef))} AS ${childJoin.alias}`;
|
|
2198
|
-
}
|
|
2199
|
-
select += `\nFROM ${structSQL} AS ${ji.alias}\n${joins}\nWHERE ${conditions === null || conditions === void 0 ? void 0 : conditions.join(' AND ')}\n`;
|
|
2200
|
-
s += `${matrixOperation} JOIN (\n${(0, utils_1.indent)(select)}) AS ${ji.alias}\n ON ${onCondition}\n`;
|
|
2201
|
-
return s;
|
|
2202
|
-
}
|
|
2203
|
-
}
|
|
2204
|
-
else if (qsDef.type === 'array') {
|
|
2205
|
-
if (qs.parent === undefined || ji.parent === undefined) {
|
|
2206
|
-
throw new Error('Internal Error, nested structure with no parent.');
|
|
2207
|
-
}
|
|
2208
|
-
// We need an SQL expression which results in the array for us to pass to un-nest
|
|
2209
|
-
let arrayExpression;
|
|
2210
|
-
if ((0, malloy_types_1.hasExpression)(qsDef)) {
|
|
2211
|
-
// If this array is NOT contained in the parent, but a computed entity
|
|
2212
|
-
// then the thing we are joining is not "parent.childName", but
|
|
2213
|
-
// the expression which is built in that namespace
|
|
2214
|
-
arrayExpression = this.exprToSQL(this.rootResult, qs.parent, qsDef.e);
|
|
2215
|
-
}
|
|
2216
|
-
else {
|
|
2217
|
-
// If this is a reference through an expression at the top level,
|
|
2218
|
-
// need to generate the expression because the expression is written
|
|
2219
|
-
// in the top level, this call is being used to generate the join.
|
|
2220
|
-
// Below the top level, the expression will have been written into
|
|
2221
|
-
// a join at the top level, and the name will exist.
|
|
2222
|
-
// ... not sure this is the right way to do this
|
|
2223
|
-
// ... the test for this is called "source repeated record containing an array"
|
|
2224
|
-
arrayExpression = qs.parent.sqlChildReference(qsDef.name, depth === 0 ? { result: this.rootResult, field: this } : undefined);
|
|
2225
|
-
}
|
|
2226
|
-
// we need to generate primary key. If parent has a primary key combine
|
|
2227
|
-
// console.log(ji.alias, fieldExpression, this.inNestedPipeline());
|
|
2228
|
-
s += `${this.parent.dialect.sqlUnnestAlias(arrayExpression, ji.alias, ji.getDialectFieldList(), ji.makeUniqueKey, (0, malloy_types_1.isBasicArray)(qsDef), this.inNestedPipeline())}\n`;
|
|
2229
|
-
}
|
|
2230
|
-
else if (qsDef.type === 'record') {
|
|
2231
|
-
throw new Error('Internal Error: records should never appear in join trees');
|
|
2232
|
-
}
|
|
2233
|
-
else {
|
|
2234
|
-
throw new Error(`Join type not implemented ${qs.structDef.type}`);
|
|
2235
|
-
}
|
|
2236
|
-
for (const childJoin of ji.children) {
|
|
2237
|
-
s += this.generateSQLJoinBlock(stageWriter, childJoin, depth + 1);
|
|
2238
|
-
}
|
|
2239
|
-
return s;
|
|
2240
|
-
}
|
|
2241
|
-
// BigQuery has wildcard psudo columns that are treated differently
|
|
2242
|
-
// SELECT * FROM xxx doesn't include these psuedo columns but we need them so
|
|
2243
|
-
// filters can get pushed down properly when generating a UNIQUE key.
|
|
2244
|
-
// No other dialect really needs this so we are coding here but maybe someday
|
|
2245
|
-
// this makes its way into the dialect.
|
|
2246
|
-
generateSQLPassthroughKeys(qs) {
|
|
2247
|
-
let ret = '';
|
|
2248
|
-
if (qs.dialect.name === 'standardsql') {
|
|
2249
|
-
const psudoCols = [
|
|
2250
|
-
'_TABLE_SUFFIX',
|
|
2251
|
-
'_PARTITIONDATE',
|
|
2252
|
-
'_PARTITIONTIME',
|
|
2253
|
-
].filter(element => qs.getChildByName(element) !== undefined);
|
|
2254
|
-
if (psudoCols.length > 0) {
|
|
2255
|
-
ret = ', ' + psudoCols.join(', ');
|
|
2256
|
-
}
|
|
2257
|
-
}
|
|
2258
|
-
return ret;
|
|
2259
|
-
}
|
|
2260
|
-
generateSQLJoins(stageWriter) {
|
|
2261
|
-
let s = '';
|
|
2262
|
-
// get the first value from the map (weird, I know)
|
|
2263
|
-
const [[, ji]] = this.rootResult.joins;
|
|
2264
|
-
const qs = ji.queryStruct;
|
|
2265
|
-
// Joins
|
|
2266
|
-
let structSQL = qs.structSourceSQL(stageWriter);
|
|
2267
|
-
if ((0, malloy_types_1.isIndexSegment)(this.firstSegment)) {
|
|
2268
|
-
structSQL = this.parent.dialect.sqlSampleTable(structSQL, this.firstSegment.sample);
|
|
2269
|
-
if (this.firstSegment.sample) {
|
|
2270
|
-
structSQL = stageWriter.addStage(`SELECT * from ${structSQL} as x limit 100000 `);
|
|
2271
|
-
}
|
|
2272
|
-
}
|
|
2273
|
-
if ((0, malloy_types_1.isBaseTable)(qs.structDef)) {
|
|
2274
|
-
if (ji.makeUniqueKey) {
|
|
2275
|
-
const passKeys = this.generateSQLPassthroughKeys(qs);
|
|
2276
|
-
structSQL = `(SELECT ${qs.dialect.sqlGenerateUUID()} as ${qs.dialect.sqlMaybeQuoteIdentifier('__distinct_key')}, x.* ${passKeys} FROM ${structSQL} as x)`;
|
|
2277
|
-
}
|
|
2278
|
-
s += `FROM ${structSQL} as ${ji.alias}\n`;
|
|
2279
|
-
}
|
|
2280
|
-
else {
|
|
2281
|
-
throw new Error('Internal Error, queries must start from a basetable');
|
|
2282
|
-
}
|
|
2283
|
-
for (const childJoin of ji.children) {
|
|
2284
|
-
s += this.generateSQLJoinBlock(stageWriter, childJoin, 0);
|
|
2285
|
-
}
|
|
2286
|
-
return s;
|
|
2287
|
-
}
|
|
2288
|
-
genereateSQLOrderBy(queryDef, resultStruct) {
|
|
2289
|
-
let s = '';
|
|
2290
|
-
if (this.firstSegment.type === 'project' && !queryDef.orderBy) {
|
|
2291
|
-
return ''; // No default ordering for project.
|
|
2292
|
-
}
|
|
2293
|
-
// Intermediate results (in a pipeline or join) that have no limit, don't need an orderby
|
|
2294
|
-
// Some database don't have this optimization.
|
|
2295
|
-
if (this.fieldDef.pipeline.length > 1 && queryDef.limit === undefined) {
|
|
2296
|
-
return '';
|
|
2297
|
-
}
|
|
2298
|
-
// ignore orderby if all aggregates.
|
|
2299
|
-
if (resultStruct.getRepeatedResultType() === 'inline_all_numbers') {
|
|
2300
|
-
return '';
|
|
2301
|
-
}
|
|
2302
|
-
// if we are in the last stage of a query and the query is a subquery
|
|
2303
|
-
// and has no limit, ORDER BY is superfluous
|
|
2304
|
-
if (this.isJoinedSubquery &&
|
|
2305
|
-
this.fieldDef.pipeline.length === 1 &&
|
|
2306
|
-
queryDef.limit === undefined) {
|
|
2307
|
-
return '';
|
|
2308
|
-
}
|
|
2309
|
-
const orderBy = queryDef.orderBy || resultStruct.calculateDefaultOrderBy();
|
|
2310
|
-
const o = [];
|
|
2311
|
-
for (const f of orderBy) {
|
|
2312
|
-
if (typeof f.field === 'string') {
|
|
2313
|
-
// convert name to an index
|
|
2314
|
-
const fi = resultStruct.getField(f.field);
|
|
2315
|
-
if (fi && fi.fieldUsage.type === 'result') {
|
|
2316
|
-
if (this.parent.dialect.orderByClause === 'ordinal') {
|
|
2317
|
-
o.push(`${fi.fieldUsage.resultIndex} ${f.dir || 'ASC'}`);
|
|
2318
|
-
}
|
|
2319
|
-
else if (this.parent.dialect.orderByClause === 'output_name') {
|
|
2320
|
-
o.push(`${this.parent.dialect.sqlMaybeQuoteIdentifier(f.field)} ${f.dir || 'ASC'}`);
|
|
2321
|
-
}
|
|
2322
|
-
else if (this.parent.dialect.orderByClause === 'expression') {
|
|
2323
|
-
const fieldExpr = fi.getSQL();
|
|
2324
|
-
o.push(`${fieldExpr} ${f.dir || 'ASC'}`);
|
|
2325
|
-
}
|
|
2326
|
-
}
|
|
2327
|
-
else {
|
|
2328
|
-
throw new Error(`Unknown field in ORDER BY ${f.field}`);
|
|
2329
|
-
}
|
|
2330
|
-
}
|
|
2331
|
-
else {
|
|
2332
|
-
if (this.parent.dialect.orderByClause === 'ordinal') {
|
|
2333
|
-
o.push(`${f.field} ${f.dir || 'ASC'}`);
|
|
2334
|
-
}
|
|
2335
|
-
else if (this.parent.dialect.orderByClause === 'output_name') {
|
|
2336
|
-
const orderingField = resultStruct.getFieldByNumber(f.field);
|
|
2337
|
-
o.push(`${this.parent.dialect.sqlMaybeQuoteIdentifier(orderingField.name)} ${f.dir || 'ASC'}`);
|
|
2338
|
-
}
|
|
2339
|
-
else if (this.parent.dialect.orderByClause === 'expression') {
|
|
2340
|
-
const orderingField = resultStruct.getFieldByNumber(f.field);
|
|
2341
|
-
const fieldExpr = orderingField.fif.getSQL();
|
|
2342
|
-
o.push(`${fieldExpr} ${f.dir || 'ASC'}`);
|
|
2343
|
-
}
|
|
2344
|
-
}
|
|
2345
|
-
}
|
|
2346
|
-
if (o.length > 0) {
|
|
2347
|
-
s = this.parent.dialect.sqlOrderBy(o, 'query') + '\n';
|
|
2348
|
-
}
|
|
2349
|
-
return s;
|
|
2350
|
-
}
|
|
2351
|
-
generateSimpleSQL(stageWriter) {
|
|
2352
|
-
let s = '';
|
|
2353
|
-
s += 'SELECT \n';
|
|
2354
|
-
const fields = [];
|
|
2355
|
-
for (const [name, field] of this.rootResult.allFields) {
|
|
2356
|
-
const fi = field;
|
|
2357
|
-
const sqlName = this.parent.dialect.sqlMaybeQuoteIdentifier(name);
|
|
2358
|
-
if (fi.fieldUsage.type === 'result') {
|
|
2359
|
-
fields.push(` ${fi.f.generateExpression(this.rootResult)} as ${sqlName}`);
|
|
2360
|
-
}
|
|
2361
|
-
}
|
|
2362
|
-
s += (0, utils_1.indent)(fields.join(',\n')) + '\n';
|
|
2363
|
-
s += this.generateSQLJoins(stageWriter);
|
|
2364
|
-
s += this.generateSQLFilters(this.rootResult, 'where').sql('where');
|
|
2365
|
-
// group by
|
|
2366
|
-
if (this.firstSegment.type === 'reduce') {
|
|
2367
|
-
const n = [];
|
|
2368
|
-
for (const field of this.rootResult.fields()) {
|
|
2369
|
-
const fi = field;
|
|
2370
|
-
if (fi.fieldUsage.type === 'result' && isScalarField(fi.f)) {
|
|
2371
|
-
n.push(fi.fieldUsage.resultIndex.toString());
|
|
2372
|
-
}
|
|
2373
|
-
}
|
|
2374
|
-
if (n.length > 0) {
|
|
2375
|
-
s += `GROUP BY ${n.join(',')}\n`;
|
|
2376
|
-
}
|
|
2377
|
-
}
|
|
2378
|
-
s += this.generateSQLFilters(this.rootResult, 'having').sql('having');
|
|
2379
|
-
// order by
|
|
2380
|
-
s += this.genereateSQLOrderBy(this.firstSegment, this.rootResult);
|
|
2381
|
-
// limit
|
|
2382
|
-
if (!(0, malloy_types_1.isRawSegment)(this.firstSegment) && this.firstSegment.limit) {
|
|
2383
|
-
s += `LIMIT ${this.firstSegment.limit}\n`;
|
|
2384
|
-
}
|
|
2385
|
-
this.resultStage = stageWriter.addStage(s);
|
|
2386
|
-
return this.resultStage;
|
|
2387
|
-
}
|
|
2388
|
-
// This probably should be generated in a dialect independat way.
|
|
2389
|
-
// but for now, it is just googleSQL.
|
|
2390
|
-
generatePipelinedStages(outputPipelinedSQL, lastStageName, stageWriter) {
|
|
2391
|
-
if (outputPipelinedSQL.length === 0) {
|
|
2392
|
-
return lastStageName;
|
|
2393
|
-
}
|
|
2394
|
-
let retSQL;
|
|
2395
|
-
if (this.parent.dialect.supportsSelectReplace) {
|
|
2396
|
-
const pipelinesSQL = outputPipelinedSQL
|
|
2397
|
-
.map(o => `${o.pipelineSQL} as ${o.sqlFieldName}`)
|
|
2398
|
-
.join(',\n');
|
|
2399
|
-
retSQL = `SELECT * replace (${pipelinesSQL}) FROM ${lastStageName}
|
|
2400
|
-
`;
|
|
2401
|
-
}
|
|
2402
|
-
else {
|
|
2403
|
-
const pipelinesSQL = outputPipelinedSQL
|
|
2404
|
-
.map(o => `${o.pipelineSQL} as ${o.sqlFieldName}`)
|
|
2405
|
-
.join(',\n');
|
|
2406
|
-
const outputFields = outputPipelinedSQL.map(f => f.sqlFieldName);
|
|
2407
|
-
const allFields = Array.from(this.rootResult.allFields.keys()).map(f => this.parent.dialect.sqlMaybeQuoteIdentifier(f));
|
|
2408
|
-
const fields = allFields.filter(f => outputFields.indexOf(f) === -1);
|
|
2409
|
-
retSQL = `SELECT ${fields.length > 0 ? fields.join(', ') + ',' : ''} ${pipelinesSQL} FROM ${lastStageName}`;
|
|
2410
|
-
}
|
|
2411
|
-
return stageWriter.addStage(retSQL);
|
|
2412
|
-
}
|
|
2413
|
-
generateStage0Fields(resultSet, output, stageWriter) {
|
|
2414
|
-
const scalarFields = [];
|
|
2415
|
-
const otherFields = [];
|
|
2416
|
-
for (const [name, fi] of resultSet.allFields) {
|
|
2417
|
-
if (fi instanceof FieldInstanceField && isScalarField(fi.f)) {
|
|
2418
|
-
scalarFields.push([name, fi]);
|
|
2419
|
-
}
|
|
2420
|
-
else {
|
|
2421
|
-
otherFields.push([name, fi]);
|
|
2422
|
-
}
|
|
2423
|
-
}
|
|
2424
|
-
const orderedFields = [...scalarFields, ...otherFields];
|
|
2425
|
-
for (const [name, fi] of orderedFields) {
|
|
2426
|
-
const outputName = this.parent.dialect.sqlMaybeQuoteIdentifier(`${name}__${resultSet.groupSet}`);
|
|
2427
|
-
if (fi instanceof FieldInstanceField) {
|
|
2428
|
-
if (fi.fieldUsage.type === 'result') {
|
|
2429
|
-
const exp = fi.getSQL();
|
|
2430
|
-
if (isScalarField(fi.f)) {
|
|
2431
|
-
if (this.parent.dialect.cantPartitionWindowFunctionsOnExpressions &&
|
|
2432
|
-
this.rootResult.queryUsesPartitioning &&
|
|
2433
|
-
resultSet.firstSegment.type === 'reduce') {
|
|
2434
|
-
// BigQuery can't partition aggregate function except when the field has no
|
|
2435
|
-
// expression. Additionally it can't partition by floats. We stuff expressions
|
|
2436
|
-
// and numbers as strings into a lateral join when the query has ungrouped expressions
|
|
2437
|
-
const outputFieldName = `__lateral_join_bag.${outputName}`;
|
|
2438
|
-
fi.analyticalSQL = outputFieldName;
|
|
2439
|
-
output.lateralJoinSQLExpressions.push(`${exp} as ${outputName}`);
|
|
2440
|
-
output.sql.push(outputFieldName);
|
|
2441
|
-
if (fi.f.fieldDef.type === 'number') {
|
|
2442
|
-
const outputNameString = this.parent.dialect.sqlMaybeQuoteIdentifier(`${name}__${resultSet.groupSet}_string`);
|
|
2443
|
-
const outputFieldNameString = `__lateral_join_bag.${outputNameString}`;
|
|
2444
|
-
output.sql.push(outputFieldNameString);
|
|
2445
|
-
output.dimensionIndexes.push(output.fieldIndex++);
|
|
2446
|
-
output.lateralJoinSQLExpressions.push(`CAST(${exp} as STRING) as ${outputNameString}`);
|
|
2447
|
-
fi.partitionSQL = outputFieldNameString;
|
|
2448
|
-
}
|
|
2449
|
-
}
|
|
2450
|
-
else {
|
|
2451
|
-
// just treat it like a regular field.
|
|
2452
|
-
output.sql.push(`${exp} as ${outputName}`);
|
|
2453
|
-
}
|
|
2454
|
-
output.dimensionIndexes.push(output.fieldIndex++);
|
|
2455
|
-
}
|
|
2456
|
-
else if (isBasicCalculation(fi.f)) {
|
|
2457
|
-
output.sql.push(`${exp} as ${outputName}`);
|
|
2458
|
-
output.fieldIndex++;
|
|
2459
|
-
}
|
|
2460
|
-
}
|
|
2461
|
-
}
|
|
2462
|
-
else if (fi instanceof FieldInstanceResult) {
|
|
2463
|
-
if (fi.firstSegment.type === 'reduce') {
|
|
2464
|
-
this.generateStage0Fields(fi, output, stageWriter);
|
|
2465
|
-
}
|
|
2466
|
-
else if (fi.firstSegment.type === 'project') {
|
|
2467
|
-
const s = this.generateTurtleSQL(fi, stageWriter, outputName, output.outputPipelinedSQL);
|
|
2468
|
-
output.sql.push(`${s} as ${outputName}`);
|
|
2469
|
-
output.fieldIndex++;
|
|
2470
|
-
}
|
|
2471
|
-
}
|
|
2472
|
-
}
|
|
2473
|
-
// LTNOTE: we could optimize here in the future.
|
|
2474
|
-
// leaf turtles can have their having clauses in the main query
|
|
2475
|
-
// turtles with leaves need to promote their state to their
|
|
2476
|
-
// children.
|
|
2477
|
-
const having = this.generateSQLFilters(resultSet, 'having');
|
|
2478
|
-
if (!having.empty()) {
|
|
2479
|
-
// if we have no children, the having can run at the root level
|
|
2480
|
-
if (resultSet.childGroups.length === 1) {
|
|
2481
|
-
resultSet
|
|
2482
|
-
.root()
|
|
2483
|
-
.havings.add(`(group_set<>${resultSet.groupSet} OR (group_set=${resultSet.groupSet} AND ${having.sql()}))`);
|
|
2484
|
-
}
|
|
2485
|
-
else {
|
|
2486
|
-
resultSet.hasHaving = true;
|
|
2487
|
-
output.sql.push(`CASE WHEN group_set=${resultSet.groupSet} THEN CASE WHEN ${having.sql()} THEN 0 ELSE 1 END END as __delete__${resultSet.groupSet}`);
|
|
2488
|
-
output.fieldIndex++;
|
|
2489
|
-
}
|
|
2490
|
-
}
|
|
2491
|
-
}
|
|
2492
|
-
generateSQLWhereChildren(resultStruct) {
|
|
2493
|
-
const wheres = new utils_1.AndChain();
|
|
2494
|
-
for (const [, field] of resultStruct.allFields) {
|
|
2495
|
-
if (field.type === 'query') {
|
|
2496
|
-
const fir = field;
|
|
2497
|
-
const turtleWhere = this.generateSQLFilters(fir, 'where');
|
|
2498
|
-
if (turtleWhere.present()) {
|
|
2499
|
-
const groupSets = fir.childGroups.join(',');
|
|
2500
|
-
wheres.add(`(group_set NOT IN (${groupSets})` +
|
|
2501
|
-
` OR (group_set IN (${groupSets}) AND ${turtleWhere.sql()}))`);
|
|
2502
|
-
}
|
|
2503
|
-
wheres.addChain(this.generateSQLWhereChildren(fir));
|
|
2504
|
-
}
|
|
2505
|
-
}
|
|
2506
|
-
return wheres;
|
|
2507
|
-
}
|
|
2508
|
-
generateSQLWhereTurtled() {
|
|
2509
|
-
const wheres = this.generateSQLFilters(this.rootResult, 'where');
|
|
2510
|
-
wheres.addChain(this.generateSQLWhereChildren(this.rootResult));
|
|
2511
|
-
return wheres.sql('where');
|
|
2512
|
-
}
|
|
2513
|
-
// iterate over the nested queries looking for Havings (and someday limits).
|
|
2514
|
-
// if you find any, generate a new stage(s) to perform these functions.
|
|
2515
|
-
generateSQLHavingLimit(stageWriter, lastStageName) {
|
|
2516
|
-
const fields = [];
|
|
2517
|
-
const resultsWithHaving = this.rootResult.selectStructs([], (result) => result.hasHaving);
|
|
2518
|
-
if (resultsWithHaving.length > 0) {
|
|
2519
|
-
for (const result of resultsWithHaving) {
|
|
2520
|
-
// find all the parent dimension names.
|
|
2521
|
-
const dimensions = [];
|
|
2522
|
-
let r = result;
|
|
2523
|
-
while (r) {
|
|
2524
|
-
for (const name of r.fieldNames(fi => isScalarField(fi.f))) {
|
|
2525
|
-
dimensions.push(this.parent.dialect.sqlMaybeQuoteIdentifier(`${name}__${r.groupSet}`));
|
|
2526
|
-
}
|
|
2527
|
-
r = r.parent;
|
|
2528
|
-
}
|
|
2529
|
-
let partition = '';
|
|
2530
|
-
if (dimensions.length > 0) {
|
|
2531
|
-
partition = 'partition by ';
|
|
2532
|
-
partition += dimensions
|
|
2533
|
-
.map(this.parent.dialect.castToString)
|
|
2534
|
-
.join(',');
|
|
2535
|
-
}
|
|
2536
|
-
fields.push(`MAX(CASE WHEN group_set IN (${result.childGroups.join(',')}) THEN __delete__${result.groupSet} END) OVER(${partition}) as __shaving__${result.groupSet}`);
|
|
2537
|
-
}
|
|
2538
|
-
}
|
|
2539
|
-
if (resultsWithHaving.length > 0) {
|
|
2540
|
-
lastStageName = stageWriter.addStage(`SELECT\n *,\n ${fields.join(',\n ')} \nFROM ${lastStageName}`);
|
|
2541
|
-
const havings = new utils_1.AndChain();
|
|
2542
|
-
for (const result of resultsWithHaving) {
|
|
2543
|
-
havings.add(`group_set IN (${result.childGroups.join(',')}) AND __shaving__${result.groupSet}=1`);
|
|
2544
|
-
}
|
|
2545
|
-
lastStageName = stageWriter.addStage(`SELECT *\nFROM ${lastStageName}\nWHERE NOT (${havings.sqlOr()})`);
|
|
2546
|
-
}
|
|
2547
|
-
return lastStageName;
|
|
2548
|
-
}
|
|
2549
|
-
generateSQLStage0(stageWriter) {
|
|
2550
|
-
let s = 'SELECT\n';
|
|
2551
|
-
let from = this.generateSQLJoins(stageWriter);
|
|
2552
|
-
const wheres = this.generateSQLWhereTurtled();
|
|
2553
|
-
const f = {
|
|
2554
|
-
dimensionIndexes: [1],
|
|
2555
|
-
fieldIndex: 2,
|
|
2556
|
-
sql: ['group_set'],
|
|
2557
|
-
lateralJoinSQLExpressions: [],
|
|
2558
|
-
groupsAggregated: [],
|
|
2559
|
-
outputPipelinedSQL: [],
|
|
2560
|
-
};
|
|
2561
|
-
this.generateStage0Fields(this.rootResult, f, stageWriter);
|
|
2562
|
-
if (this.firstSegment.type === 'project' &&
|
|
2563
|
-
!this.parent.modelCompilerFlags().has('unsafe_complex_select_query')) {
|
|
2564
|
-
throw new Error('PROJECT cannot be used on queries with turtles');
|
|
2565
|
-
}
|
|
2566
|
-
const groupBy = 'GROUP BY ' + f.dimensionIndexes.join(',') + '\n';
|
|
2567
|
-
from += this.parent.dialect.sqlGroupSetTable(this.maxGroupSet) + '\n';
|
|
2568
|
-
s += (0, utils_1.indent)(f.sql.join(',\n')) + '\n';
|
|
2569
|
-
// this should only happen on standard SQL, BigQuery can't partition by expressions and
|
|
2570
|
-
// aggregates.
|
|
2571
|
-
if (f.lateralJoinSQLExpressions.length > 0) {
|
|
2572
|
-
from += `LEFT JOIN UNNEST([STRUCT(${f.lateralJoinSQLExpressions.join(',\n')})]) as __lateral_join_bag\n`;
|
|
2573
|
-
}
|
|
2574
|
-
s += from + wheres + groupBy + this.rootResult.havings.sql('having');
|
|
2575
|
-
// generate the stage
|
|
2576
|
-
const resultStage = stageWriter.addStage(s);
|
|
2577
|
-
// generate stages for havings and limits
|
|
2578
|
-
this.resultStage = this.generateSQLHavingLimit(stageWriter, resultStage);
|
|
2579
|
-
this.resultStage = this.generatePipelinedStages(f.outputPipelinedSQL, this.resultStage, stageWriter);
|
|
2580
|
-
return this.resultStage;
|
|
2581
|
-
}
|
|
2582
|
-
generateDepthNFields(depth, resultSet, output, stageWriter) {
|
|
2583
|
-
const groupsToMap = [];
|
|
2584
|
-
for (const [name, fi] of resultSet.allFields) {
|
|
2585
|
-
const sqlFieldName = this.parent.dialect.sqlMaybeQuoteIdentifier(`${name}__${resultSet.groupSet}`);
|
|
2586
|
-
if (fi instanceof FieldInstanceField) {
|
|
2587
|
-
if (fi.fieldUsage.type === 'result') {
|
|
2588
|
-
if (isScalarField(fi.f)) {
|
|
2589
|
-
const exp = this.caseGroup(resultSet.groupSet > 0 ? resultSet.childGroups : [], sqlFieldName);
|
|
2590
|
-
output.sql.push(`${exp} as ${sqlFieldName}`);
|
|
2591
|
-
output.dimensionIndexes.push(output.fieldIndex++);
|
|
2592
|
-
}
|
|
2593
|
-
else if (isBasicCalculation(fi.f)) {
|
|
2594
|
-
const exp = this.parent.dialect.sqlAnyValue(resultSet.groupSet, sqlFieldName);
|
|
2595
|
-
output.sql.push(`${exp} as ${sqlFieldName}`);
|
|
2596
|
-
output.fieldIndex++;
|
|
2597
|
-
}
|
|
2598
|
-
}
|
|
2599
|
-
}
|
|
2600
|
-
else if (fi instanceof FieldInstanceResult) {
|
|
2601
|
-
if (fi.depth > depth) {
|
|
2602
|
-
// ignore it, we've already dealt with it.
|
|
2603
|
-
}
|
|
2604
|
-
else if (fi.depth === depth) {
|
|
2605
|
-
const s = this.generateTurtleSQL(fi, stageWriter, sqlFieldName, output.outputPipelinedSQL);
|
|
2606
|
-
output.groupsAggregated.push({
|
|
2607
|
-
fromGroup: fi.groupSet,
|
|
2608
|
-
toGroup: resultSet.groupSet,
|
|
2609
|
-
});
|
|
2610
|
-
groupsToMap.push(fi.groupSet);
|
|
2611
|
-
output.sql.push(`${s} as ${sqlFieldName}`);
|
|
2612
|
-
output.fieldIndex++;
|
|
2613
|
-
}
|
|
2614
|
-
else {
|
|
2615
|
-
this.generateDepthNFields(depth, fi, output, stageWriter);
|
|
2616
|
-
}
|
|
2617
|
-
}
|
|
2618
|
-
}
|
|
2619
|
-
if (output.groupsAggregated.length > 0) {
|
|
2620
|
-
output.sql[0] = 'CASE ';
|
|
2621
|
-
for (const m of output.groupsAggregated) {
|
|
2622
|
-
output.sql[0] += `WHEN group_set=${m.fromGroup} THEN ${m.toGroup} `;
|
|
2623
|
-
}
|
|
2624
|
-
output.sql[0] += 'ELSE group_set END as group_set';
|
|
2625
|
-
}
|
|
2626
|
-
}
|
|
2627
|
-
generateSQLDepthN(depth, stageWriter, stageName) {
|
|
2628
|
-
let s = 'SELECT \n';
|
|
2629
|
-
const f = {
|
|
2630
|
-
dimensionIndexes: [1],
|
|
2631
|
-
fieldIndex: 2,
|
|
2632
|
-
sql: ['group_set'],
|
|
2633
|
-
lateralJoinSQLExpressions: [],
|
|
2634
|
-
groupsAggregated: [],
|
|
2635
|
-
outputPipelinedSQL: [],
|
|
2636
|
-
};
|
|
2637
|
-
this.generateDepthNFields(depth, this.rootResult, f, stageWriter);
|
|
2638
|
-
s += (0, utils_1.indent)(f.sql.join(',\n')) + '\n';
|
|
2639
|
-
s += `FROM ${stageName}\n`;
|
|
2640
|
-
const where = this.rootResult.eliminateComputeGroupsSQL();
|
|
2641
|
-
if (where.length > 0) {
|
|
2642
|
-
s += `WHERE ${where}\n`;
|
|
2643
|
-
}
|
|
2644
|
-
if (f.dimensionIndexes.length > 0) {
|
|
2645
|
-
s += `GROUP BY ${f.dimensionIndexes.join(',')}\n`;
|
|
2646
|
-
}
|
|
2647
|
-
this.resultStage = stageWriter.addStage(s);
|
|
2648
|
-
this.resultStage = this.generatePipelinedStages(f.outputPipelinedSQL, this.resultStage, stageWriter);
|
|
2649
|
-
return this.resultStage;
|
|
2650
|
-
}
|
|
2651
|
-
genereateSQLCombineTurtles(stageWriter, stage0Name) {
|
|
2652
|
-
let s = 'SELECT\n';
|
|
2653
|
-
const fieldsSQL = [];
|
|
2654
|
-
let fieldIndex = 1;
|
|
2655
|
-
const outputPipelinedSQL = [];
|
|
2656
|
-
const dimensionIndexes = [];
|
|
2657
|
-
for (const [name, fi] of this.rootResult.allFields) {
|
|
2658
|
-
const sqlName = this.parent.dialect.sqlMaybeQuoteIdentifier(name);
|
|
2659
|
-
if (fi instanceof FieldInstanceField) {
|
|
2660
|
-
if (fi.fieldUsage.type === 'result') {
|
|
2661
|
-
if (isScalarField(fi.f)) {
|
|
2662
|
-
fieldsSQL.push(this.parent.dialect.sqlMaybeQuoteIdentifier(`${name}__${this.rootResult.groupSet}`) + ` as ${sqlName}`);
|
|
2663
|
-
dimensionIndexes.push(fieldIndex++);
|
|
2664
|
-
}
|
|
2665
|
-
else if (isBasicCalculation(fi.f)) {
|
|
2666
|
-
fieldsSQL.push(this.parent.dialect.sqlAnyValueLastTurtle(this.parent.dialect.sqlMaybeQuoteIdentifier(`${name}__${this.rootResult.groupSet}`), this.rootResult.groupSet, sqlName));
|
|
2667
|
-
fieldIndex++;
|
|
2668
|
-
}
|
|
2669
|
-
}
|
|
2670
|
-
}
|
|
2671
|
-
else if (fi instanceof FieldInstanceResult) {
|
|
2672
|
-
if (fi.firstSegment.type === 'reduce') {
|
|
2673
|
-
fieldsSQL.push(`${this.generateTurtleSQL(fi, stageWriter, sqlName, outputPipelinedSQL)} as ${sqlName}`);
|
|
2674
|
-
fieldIndex++;
|
|
2675
|
-
}
|
|
2676
|
-
else if (fi.firstSegment.type === 'project') {
|
|
2677
|
-
fieldsSQL.push(this.parent.dialect.sqlAnyValueLastTurtle(this.parent.dialect.sqlMaybeQuoteIdentifier(`${name}__${this.rootResult.groupSet}`), this.rootResult.groupSet, sqlName));
|
|
2678
|
-
fieldIndex++;
|
|
2679
|
-
}
|
|
2680
|
-
}
|
|
2681
|
-
}
|
|
2682
|
-
s += (0, utils_1.indent)(fieldsSQL.join(',\n')) + `\nFROM ${stage0Name}\n`;
|
|
2683
|
-
const where = this.rootResult.eliminateComputeGroupsSQL();
|
|
2684
|
-
if (where.length > 0) {
|
|
2685
|
-
s += `WHERE ${where}\n`;
|
|
2686
|
-
}
|
|
2687
|
-
if (dimensionIndexes.length > 0) {
|
|
2688
|
-
s += `GROUP BY ${dimensionIndexes.join(',')}\n`;
|
|
2689
|
-
}
|
|
2690
|
-
// order by
|
|
2691
|
-
s += this.genereateSQLOrderBy(this.firstSegment, this.rootResult);
|
|
2692
|
-
// limit
|
|
2693
|
-
if (!(0, malloy_types_1.isRawSegment)(this.firstSegment) && this.firstSegment.limit) {
|
|
2694
|
-
s += `LIMIT ${this.firstSegment.limit}\n`;
|
|
2695
|
-
}
|
|
2696
|
-
this.resultStage = stageWriter.addStage(s);
|
|
2697
|
-
this.resultStage = this.generatePipelinedStages(outputPipelinedSQL, this.resultStage, stageWriter);
|
|
2698
|
-
return this.resultStage;
|
|
2699
|
-
}
|
|
2700
|
-
// create a simplified version of the StructDef for dialects.
|
|
2701
|
-
buildDialectFieldList(resultStruct) {
|
|
2702
|
-
const dialectFieldList = [];
|
|
2703
|
-
for (const [name, field] of resultStruct.allFields) {
|
|
2704
|
-
const sqlName = this.parent.dialect.sqlMaybeQuoteIdentifier(name);
|
|
2705
|
-
//
|
|
2706
|
-
if (resultStruct.firstSegment.type === 'reduce' &&
|
|
2707
|
-
field instanceof FieldInstanceResult) {
|
|
2708
|
-
const { structDef, repeatedResultType } = this.generateTurtlePipelineSQL(field, new StageWriter(true, undefined), '<nosource>');
|
|
2709
|
-
if (repeatedResultType === 'nested') {
|
|
2710
|
-
const multiLineNest = {
|
|
2711
|
-
...structDef,
|
|
2712
|
-
type: 'array',
|
|
2713
|
-
elementTypeDef: { type: 'record_element' },
|
|
2714
|
-
join: 'many',
|
|
2715
|
-
name,
|
|
2716
|
-
};
|
|
2717
|
-
dialectFieldList.push({
|
|
2718
|
-
typeDef: multiLineNest,
|
|
2719
|
-
sqlExpression: this.parent.dialect.sqlMaybeQuoteIdentifier(`${name}__${resultStruct.groupSet}`),
|
|
2720
|
-
rawName: name,
|
|
2721
|
-
sqlOutputName: sqlName,
|
|
2722
|
-
});
|
|
2723
|
-
}
|
|
2724
|
-
else {
|
|
2725
|
-
const oneLineNest = {
|
|
2726
|
-
...structDef,
|
|
2727
|
-
type: 'record',
|
|
2728
|
-
join: 'one',
|
|
2729
|
-
name,
|
|
2730
|
-
};
|
|
2731
|
-
dialectFieldList.push({
|
|
2732
|
-
typeDef: oneLineNest,
|
|
2733
|
-
sqlExpression: this.parent.dialect.sqlMaybeQuoteIdentifier(`${name}__${resultStruct.groupSet}`),
|
|
2734
|
-
rawName: name,
|
|
2735
|
-
sqlOutputName: sqlName,
|
|
2736
|
-
});
|
|
2737
|
-
}
|
|
2738
|
-
}
|
|
2739
|
-
else if (resultStruct.firstSegment.type === 'reduce' &&
|
|
2740
|
-
field instanceof FieldInstanceField &&
|
|
2741
|
-
field.fieldUsage.type === 'result') {
|
|
2742
|
-
pushDialectField(dialectFieldList, {
|
|
2743
|
-
fieldDef: field.f.fieldDef,
|
|
2744
|
-
sqlExpression: this.parent.dialect.sqlMaybeQuoteIdentifier(`${name}__${resultStruct.groupSet}`),
|
|
2745
|
-
rawName: name,
|
|
2746
|
-
sqlOutputName: sqlName,
|
|
2747
|
-
});
|
|
2748
|
-
}
|
|
2749
|
-
else if (resultStruct.firstSegment.type === 'project' &&
|
|
2750
|
-
field instanceof FieldInstanceField &&
|
|
2751
|
-
field.fieldUsage.type === 'result') {
|
|
2752
|
-
pushDialectField(dialectFieldList, {
|
|
2753
|
-
fieldDef: field.f.fieldDef,
|
|
2754
|
-
sqlExpression: field.f.generateExpression(resultStruct),
|
|
2755
|
-
rawName: name,
|
|
2756
|
-
sqlOutputName: sqlName,
|
|
2757
|
-
});
|
|
2758
|
-
}
|
|
2759
|
-
}
|
|
2760
|
-
return dialectFieldList;
|
|
2761
|
-
}
|
|
2762
|
-
generateTurtleSQL(resultStruct, stageWriter, sqlFieldName, outputPipelinedSQL) {
|
|
2763
|
-
// let fieldsSQL: string[] = [];
|
|
2764
|
-
let orderBy = '';
|
|
2765
|
-
const limit = (0, malloy_types_1.isRawSegment)(resultStruct.firstSegment)
|
|
2766
|
-
? undefined
|
|
2767
|
-
: resultStruct.firstSegment.limit;
|
|
2768
|
-
// calculate the ordering.
|
|
2769
|
-
const obSQL = [];
|
|
2770
|
-
let orderingField;
|
|
2771
|
-
const orderByDef = resultStruct.firstSegment.orderBy ||
|
|
2772
|
-
resultStruct.calculateDefaultOrderBy();
|
|
2773
|
-
for (const ordering of orderByDef) {
|
|
2774
|
-
if (typeof ordering.field === 'string') {
|
|
2775
|
-
orderingField = {
|
|
2776
|
-
name: ordering.field,
|
|
2777
|
-
fif: resultStruct.getField(ordering.field),
|
|
2778
|
-
};
|
|
2779
|
-
}
|
|
2780
|
-
else {
|
|
2781
|
-
orderingField = resultStruct.getFieldByNumber(ordering.field);
|
|
2782
|
-
}
|
|
2783
|
-
if (resultStruct.firstSegment.type === 'reduce') {
|
|
2784
|
-
obSQL.push(' ' +
|
|
2785
|
-
this.parent.dialect.sqlMaybeQuoteIdentifier(`${orderingField.name}__${resultStruct.groupSet}`) +
|
|
2786
|
-
` ${ordering.dir || 'ASC'}`);
|
|
2787
|
-
}
|
|
2788
|
-
else if (resultStruct.firstSegment.type === 'project') {
|
|
2789
|
-
obSQL.push(` ${orderingField.fif.f.generateExpression(resultStruct)} ${ordering.dir || 'ASC'}`);
|
|
2790
|
-
}
|
|
2791
|
-
}
|
|
2792
|
-
if (obSQL.length > 0) {
|
|
2793
|
-
orderBy = ' ' + this.parent.dialect.sqlOrderBy(obSQL, 'turtle');
|
|
2794
|
-
}
|
|
2795
|
-
const dialectFieldList = this.buildDialectFieldList(resultStruct);
|
|
2796
|
-
let resultType;
|
|
2797
|
-
let ret;
|
|
2798
|
-
if ((resultType = resultStruct.getRepeatedResultType()) !== 'nested') {
|
|
2799
|
-
if (resultType === 'inline_all_numbers') {
|
|
2800
|
-
ret = this.parent.dialect.sqlCoaleseMeasuresInline(resultStruct.groupSet, dialectFieldList);
|
|
2801
|
-
}
|
|
2802
|
-
else {
|
|
2803
|
-
ret = this.parent.dialect.sqlAnyValueTurtle(resultStruct.groupSet, dialectFieldList);
|
|
2804
|
-
}
|
|
2805
|
-
}
|
|
2806
|
-
else {
|
|
2807
|
-
ret = this.parent.dialect.sqlAggregateTurtle(resultStruct.groupSet, dialectFieldList, orderBy, limit);
|
|
2808
|
-
}
|
|
2809
|
-
// If the turtle is a pipeline, generate a UDF to compute it.
|
|
2810
|
-
const newStageWriter = new StageWriter(this.parent.dialect.supportsCTEinCoorelatedSubQueries, stageWriter);
|
|
2811
|
-
const { structDef, pipeOut } = this.generateTurtlePipelineSQL(resultStruct, newStageWriter, this.parent.dialect.supportUnnestArrayAgg ? ret : sqlFieldName);
|
|
2812
|
-
// if there was a pipeline.
|
|
2813
|
-
if (pipeOut !== undefined) {
|
|
2814
|
-
const sql = newStageWriter.generateCoorelatedSubQuery(this.parent.dialect, structDef);
|
|
2815
|
-
if (this.parent.dialect.supportUnnestArrayAgg) {
|
|
2816
|
-
ret = `(${sql})`;
|
|
2817
|
-
}
|
|
2818
|
-
else {
|
|
2819
|
-
outputPipelinedSQL.push({
|
|
2820
|
-
sqlFieldName,
|
|
2821
|
-
pipelineSQL: `(${sql})`,
|
|
2822
|
-
});
|
|
2823
|
-
}
|
|
2824
|
-
}
|
|
2825
|
-
return ret;
|
|
2826
|
-
// return `${aggregateFunction}(CASE WHEN group_set=${
|
|
2827
|
-
// resultStruct.groupSet
|
|
2828
|
-
// } THEN STRUCT(${fieldsSQL.join(",\n")}) END${tailSQL})`;
|
|
2829
|
-
}
|
|
2830
|
-
generateTurtlePipelineSQL(fi, stageWriter, sourceSQLExpression) {
|
|
2831
|
-
let structDef = this.getResultStructDef(fi, false);
|
|
2832
|
-
const repeatedResultType = fi.getRepeatedResultType();
|
|
2833
|
-
const hasPipeline = fi.turtleDef.pipeline.length > 1;
|
|
2834
|
-
let pipeOut;
|
|
2835
|
-
let outputRepeatedResultType = repeatedResultType;
|
|
2836
|
-
if (hasPipeline) {
|
|
2837
|
-
const pipeline = [...fi.turtleDef.pipeline];
|
|
2838
|
-
pipeline.shift();
|
|
2839
|
-
const newTurtle = {
|
|
2840
|
-
type: 'turtle',
|
|
2841
|
-
name: 'starthere',
|
|
2842
|
-
pipeline,
|
|
2843
|
-
};
|
|
2844
|
-
const inputStruct = {
|
|
2845
|
-
type: 'nest_source',
|
|
2846
|
-
name: '~pipe~',
|
|
2847
|
-
pipeSQL: this.parent.dialect.sqlUnnestPipelineHead(repeatedResultType === 'inline_all_numbers', sourceSQLExpression, getDialectFieldList(structDef)),
|
|
2848
|
-
fields: structDef.fields,
|
|
2849
|
-
connection: structDef.connection,
|
|
2850
|
-
dialect: structDef.dialect,
|
|
2851
|
-
};
|
|
2852
|
-
const qs = new QueryStruct(inputStruct, undefined, { model: this.parent.getModel() }, this.parent.prepareResultOptions);
|
|
2853
|
-
const q = QueryQuery.makeQuery(newTurtle, qs, stageWriter, this.isJoinedSubquery);
|
|
2854
|
-
pipeOut = q.generateSQLFromPipeline(stageWriter);
|
|
2855
|
-
outputRepeatedResultType = q.rootResult.getRepeatedResultType();
|
|
2856
|
-
// console.log(stageWriter.generateSQLStages());
|
|
2857
|
-
structDef = pipeOut.outputStruct;
|
|
2858
|
-
}
|
|
2859
|
-
structDef.annotation = fi.turtleDef.annotation;
|
|
2860
|
-
return {
|
|
2861
|
-
structDef,
|
|
2862
|
-
pipeOut,
|
|
2863
|
-
repeatedResultType: outputRepeatedResultType,
|
|
2864
|
-
};
|
|
2865
|
-
}
|
|
2866
|
-
generateComplexSQL(stageWriter) {
|
|
2867
|
-
let stageName = this.generateSQLStage0(stageWriter);
|
|
2868
|
-
if (this.maxDepth > 1) {
|
|
2869
|
-
let i = this.maxDepth;
|
|
2870
|
-
while (i > 1) {
|
|
2871
|
-
stageName = this.generateSQLDepthN(i, stageWriter, stageName);
|
|
2872
|
-
i--;
|
|
2873
|
-
}
|
|
2874
|
-
}
|
|
2875
|
-
// nest the turtles.
|
|
2876
|
-
return this.genereateSQLCombineTurtles(stageWriter, stageName);
|
|
2877
|
-
}
|
|
2878
|
-
generateSQL(stageWriter) {
|
|
2879
|
-
var _a;
|
|
2880
|
-
const r = this.rootResult.computeGroups(0, 0);
|
|
2881
|
-
this.maxDepth = r.maxDepth;
|
|
2882
|
-
this.maxGroupSet = r.nextGroupSetNumber - 1;
|
|
2883
|
-
this.rootResult.assignFieldsToGroups();
|
|
2884
|
-
(_a = this.rootResult).isComplexQuery || (_a.isComplexQuery = this.maxDepth > 0 || r.isComplex);
|
|
2885
|
-
if (this.rootResult.isComplexQuery) {
|
|
2886
|
-
return this.generateComplexSQL(stageWriter);
|
|
2887
|
-
}
|
|
2888
|
-
else {
|
|
2889
|
-
return this.generateSimpleSQL(stageWriter);
|
|
2890
|
-
}
|
|
2891
|
-
}
|
|
2892
|
-
generateSQLFromPipeline(stageWriter) {
|
|
2893
|
-
this.parent.maybeEmitParameterizedSourceUsage();
|
|
2894
|
-
this.prepare(stageWriter);
|
|
2895
|
-
let lastStageName = this.generateSQL(stageWriter);
|
|
2896
|
-
let outputStruct = this.getResultStructDef();
|
|
2897
|
-
const pipeline = [...this.fieldDef.pipeline];
|
|
2898
|
-
if (pipeline.length > 1) {
|
|
2899
|
-
// console.log(pretty(outputStruct));
|
|
2900
|
-
let structDef = {
|
|
2901
|
-
...outputStruct,
|
|
2902
|
-
name: lastStageName,
|
|
2903
|
-
type: 'finalize',
|
|
2904
|
-
};
|
|
2905
|
-
pipeline.shift();
|
|
2906
|
-
for (const transform of pipeline) {
|
|
2907
|
-
const parent = this.parent.parent
|
|
2908
|
-
? { struct: this.parent.parent }
|
|
2909
|
-
: { model: this.parent.getModel() };
|
|
2910
|
-
const s = new QueryStruct(structDef, undefined, parent, this.parent.prepareResultOptions);
|
|
2911
|
-
const q = QueryQuery.makeQuery({ type: 'turtle', name: '~computeLastStage~', pipeline: [transform] }, s, stageWriter, this.isJoinedSubquery);
|
|
2912
|
-
q.prepare(stageWriter);
|
|
2913
|
-
lastStageName = q.generateSQL(stageWriter);
|
|
2914
|
-
outputStruct = q.getResultStructDef();
|
|
2915
|
-
structDef = {
|
|
2916
|
-
...outputStruct,
|
|
2917
|
-
name: lastStageName,
|
|
2918
|
-
type: 'finalize',
|
|
2919
|
-
};
|
|
2920
|
-
}
|
|
2921
|
-
}
|
|
2922
|
-
return { lastStageName, outputStruct };
|
|
2923
|
-
}
|
|
2924
|
-
}
|
|
2925
|
-
class QueryQueryReduce extends QueryQuery {
|
|
2926
|
-
}
|
|
2927
|
-
class QueryQueryProject extends QueryQuery {
|
|
2928
|
-
}
|
|
2929
|
-
// generates a single stage query for the index.
|
|
2930
|
-
// wildcards have been expanded
|
|
2931
|
-
// nested repeated fields are safe to use.
|
|
2932
|
-
class QueryQueryIndexStage extends QueryQuery {
|
|
2933
|
-
constructor(fieldDef, parent, stageWriter, isJoinedSubquery) {
|
|
2934
|
-
super(fieldDef, parent, stageWriter, isJoinedSubquery);
|
|
2935
|
-
this.indexPaths = {};
|
|
2936
|
-
this.fieldDef = fieldDef;
|
|
2937
|
-
}
|
|
2938
|
-
expandField(f) {
|
|
2939
|
-
const as = f.path.join('.');
|
|
2940
|
-
const field = this.parent.getQueryFieldByName(f.path);
|
|
2941
|
-
return { as, field };
|
|
2942
|
-
}
|
|
2943
|
-
expandFields(resultStruct) {
|
|
2944
|
-
let resultIndex = 1;
|
|
2945
|
-
const groupIndex = resultStruct.groupSet;
|
|
2946
|
-
this.maxGroupSet = groupIndex;
|
|
2947
|
-
for (const f of this.firstSegment.indexFields) {
|
|
2948
|
-
const { as, field } = this.expandField(f);
|
|
2949
|
-
const referencePath = f.path;
|
|
2950
|
-
this.indexPaths[as] = referencePath;
|
|
2951
|
-
resultStruct.addField(as, field, {
|
|
2952
|
-
resultIndex,
|
|
2953
|
-
type: 'result',
|
|
2954
|
-
}, undefined);
|
|
2955
|
-
if (field instanceof QueryAtomicField) {
|
|
2956
|
-
this.addDependancies(resultStruct, field);
|
|
2957
|
-
}
|
|
2958
|
-
resultIndex++;
|
|
2959
|
-
}
|
|
2960
|
-
const measure = this.firstSegment.weightMeasure;
|
|
2961
|
-
if (measure !== undefined) {
|
|
2962
|
-
const f = this.parent.getFieldByName([measure]);
|
|
2963
|
-
resultStruct.addField(measure, f, {
|
|
2964
|
-
resultIndex,
|
|
2965
|
-
type: 'result',
|
|
2966
|
-
}, undefined);
|
|
2967
|
-
this.addDependancies(resultStruct, f);
|
|
2968
|
-
}
|
|
2969
|
-
this.expandFilters(resultStruct);
|
|
2970
|
-
}
|
|
2971
|
-
generateSQL(stageWriter) {
|
|
2972
|
-
let measureSQL = 'COUNT(*)';
|
|
2973
|
-
const dialect = this.parent.dialect;
|
|
2974
|
-
const fieldNameColumn = dialect.sqlMaybeQuoteIdentifier('fieldName');
|
|
2975
|
-
const fieldPathColumn = dialect.sqlMaybeQuoteIdentifier('fieldPath');
|
|
2976
|
-
const fieldValueColumn = dialect.sqlMaybeQuoteIdentifier('fieldValue');
|
|
2977
|
-
const fieldTypeColumn = dialect.sqlMaybeQuoteIdentifier('fieldType');
|
|
2978
|
-
const fieldRangeColumn = dialect.sqlMaybeQuoteIdentifier('fieldRange');
|
|
2979
|
-
const weightColumn = dialect.sqlMaybeQuoteIdentifier('weight');
|
|
2980
|
-
const measureName = this.firstSegment.weightMeasure;
|
|
2981
|
-
if (measureName) {
|
|
2982
|
-
measureSQL = this.rootResult
|
|
2983
|
-
.getField(measureName)
|
|
2984
|
-
.f.generateExpression(this.rootResult);
|
|
2985
|
-
}
|
|
2986
|
-
const fields = [];
|
|
2987
|
-
for (const [name, field] of this.rootResult.allFields) {
|
|
2988
|
-
const fi = field;
|
|
2989
|
-
if (fi.fieldUsage.type === 'result' && isScalarField(fi.f)) {
|
|
2990
|
-
const expression = fi.f.generateExpression(this.rootResult);
|
|
2991
|
-
const path = this.indexPaths[name] || [];
|
|
2992
|
-
fields.push({ name, path, type: fi.f.fieldDef.type, expression });
|
|
2993
|
-
}
|
|
2994
|
-
}
|
|
2995
|
-
let s = 'SELECT\n group_set,\n';
|
|
2996
|
-
s += ' CASE group_set\n';
|
|
2997
|
-
for (let i = 0; i < fields.length; i++) {
|
|
2998
|
-
s += ` WHEN ${i} THEN '${fields[i].name}'\n`;
|
|
2999
|
-
}
|
|
3000
|
-
s += ` END as ${fieldNameColumn},\n`;
|
|
3001
|
-
s += ' CASE group_set\n';
|
|
3002
|
-
for (let i = 0; i < fields.length; i++) {
|
|
3003
|
-
const path = pathToCol(fields[i].path);
|
|
3004
|
-
s += ` WHEN ${i} THEN '${path}'\n`;
|
|
3005
|
-
}
|
|
3006
|
-
s += ` END as ${fieldPathColumn},\n`;
|
|
3007
|
-
s += ' CASE group_set\n';
|
|
3008
|
-
for (let i = 0; i < fields.length; i++) {
|
|
3009
|
-
s += ` WHEN ${i} THEN '${fields[i].type}'\n`;
|
|
3010
|
-
}
|
|
3011
|
-
s += ` END as ${fieldTypeColumn},`;
|
|
3012
|
-
s += ` CASE group_set WHEN 99999 THEN ${dialect.castToString('NULL')}\n`;
|
|
3013
|
-
for (let i = 0; i < fields.length; i++) {
|
|
3014
|
-
if (fields[i].type === 'string') {
|
|
3015
|
-
s += ` WHEN ${i} THEN ${fields[i].expression}\n`;
|
|
3016
|
-
}
|
|
3017
|
-
}
|
|
3018
|
-
s += ` END as ${fieldValueColumn},\n`;
|
|
3019
|
-
s += ` ${measureSQL} as ${weightColumn},\n`;
|
|
3020
|
-
// just in case we don't have any field types, force the case statement to have at least one value.
|
|
3021
|
-
s += " CASE group_set\n WHEN 99999 THEN ''";
|
|
3022
|
-
for (let i = 0; i < fields.length; i++) {
|
|
3023
|
-
if (fields[i].type === 'number') {
|
|
3024
|
-
s += ` WHEN ${i} THEN ${dialect.concat(`MIN(${dialect.castToString(fields[i].expression)})`, "' to '", dialect.castToString(`MAX(${fields[i].expression})`))}\n`;
|
|
3025
|
-
}
|
|
3026
|
-
if (fields[i].type === 'timestamp' || fields[i].type === 'date') {
|
|
3027
|
-
s += ` WHEN ${i} THEN ${dialect.concat(`MIN(${dialect.sqlDateToString(fields[i].expression)})`, "' to '", `MAX(${dialect.sqlDateToString(fields[i].expression)})`)}\n`;
|
|
3028
|
-
}
|
|
3029
|
-
}
|
|
3030
|
-
s += ` END as ${fieldRangeColumn}\n`;
|
|
3031
|
-
// CASE
|
|
3032
|
-
// WHEN field_type = 'timestamp' or field_type = 'date'
|
|
3033
|
-
// THEN MIN(field_value) || ' to ' || MAX(field_value)
|
|
3034
|
-
// WHEN field_type = 'number'
|
|
3035
|
-
// THEN
|
|
3036
|
-
// ELSE NULL
|
|
3037
|
-
// END as field_range\n`;
|
|
3038
|
-
s += this.generateSQLJoins(stageWriter);
|
|
3039
|
-
s += dialect.sqlGroupSetTable(fields.length) + '\n';
|
|
3040
|
-
s += this.generateSQLFilters(this.rootResult, 'where').sql('where');
|
|
3041
|
-
s += 'GROUP BY 1,2,3,4,5\n';
|
|
3042
|
-
// limit
|
|
3043
|
-
if (!(0, malloy_types_1.isRawSegment)(this.firstSegment) && this.firstSegment.limit) {
|
|
3044
|
-
s += `LIMIT ${this.firstSegment.limit}\n`;
|
|
3045
|
-
}
|
|
3046
|
-
// console.log(s);
|
|
3047
|
-
const resultStage = stageWriter.addStage(s);
|
|
3048
|
-
this.resultStage = stageWriter.addStage(`SELECT
|
|
3049
|
-
${fieldNameColumn},
|
|
3050
|
-
${fieldPathColumn},
|
|
3051
|
-
${fieldTypeColumn},
|
|
3052
|
-
COALESCE(${fieldValueColumn}, ${fieldRangeColumn}) as ${fieldValueColumn},
|
|
3053
|
-
${weightColumn}
|
|
3054
|
-
FROM ${resultStage}\n`);
|
|
3055
|
-
return this.resultStage;
|
|
3056
|
-
}
|
|
3057
|
-
}
|
|
3058
|
-
class QueryQueryRaw extends QueryQuery {
|
|
3059
|
-
generateSQL(stageWriter) {
|
|
3060
|
-
if (this.parent.structDef.type !== 'sql_select') {
|
|
3061
|
-
throw new Error('Invalid struct for QueryQueryRaw, currently only supports SQL');
|
|
3062
|
-
}
|
|
3063
|
-
return stageWriter.addStage(this.parent.structDef.selectStr);
|
|
3064
|
-
}
|
|
3065
|
-
prepare() {
|
|
3066
|
-
// Do nothing!
|
|
3067
|
-
}
|
|
3068
|
-
getResultStructDef() {
|
|
3069
|
-
if (!(0, malloy_types_1.isSourceDef)(this.parent.structDef)) {
|
|
3070
|
-
throw new Error(`Result cannot be type ${this.parent.structDef.type}`);
|
|
3071
|
-
}
|
|
3072
|
-
return { ...this.parent.structDef, type: 'query_result' };
|
|
3073
|
-
}
|
|
3074
|
-
getResultMetadata(_fi) {
|
|
3075
|
-
return undefined;
|
|
3076
|
-
}
|
|
3077
|
-
}
|
|
3078
|
-
class QueryQueryIndex extends QueryQuery {
|
|
3079
|
-
constructor(fieldDef, parent, stageWriter, isJoinedSubquery) {
|
|
3080
|
-
super(fieldDef, parent, stageWriter, isJoinedSubquery);
|
|
3081
|
-
this.stages = [];
|
|
3082
|
-
this.fieldDef = fieldDef;
|
|
3083
|
-
this.fieldsToStages();
|
|
3084
|
-
}
|
|
3085
|
-
fieldsToStages() {
|
|
3086
|
-
const indexSeg = this.firstSegment;
|
|
3087
|
-
if (this.parent.dialect.dontUnionIndex) {
|
|
3088
|
-
this.stages = [indexSeg.indexFields];
|
|
3089
|
-
return;
|
|
3090
|
-
}
|
|
3091
|
-
// Collect the field references by unique path, the final
|
|
3092
|
-
// index will be a union indexes from each unique path
|
|
3093
|
-
const stageMap = {};
|
|
3094
|
-
for (const fref of indexSeg.indexFields) {
|
|
3095
|
-
if (fref.path.length > 1) {
|
|
3096
|
-
const stageRoot = pathToCol(fref.path.slice(0, fref.path.length - 1));
|
|
3097
|
-
const stage = stageMap[stageRoot];
|
|
3098
|
-
if (stage === undefined) {
|
|
3099
|
-
const f = this.parent.nameMap.get(fref.path[0]);
|
|
3100
|
-
if (f instanceof QueryFieldStruct &&
|
|
3101
|
-
f.fieldDef.join === 'many' &&
|
|
3102
|
-
f.fieldDef.fields.length > 1) {
|
|
3103
|
-
const toStage = [fref];
|
|
3104
|
-
stageMap[stageRoot] = toStage;
|
|
3105
|
-
this.stages.push(toStage);
|
|
3106
|
-
continue;
|
|
3107
|
-
}
|
|
3108
|
-
}
|
|
3109
|
-
else {
|
|
3110
|
-
stage.push(fref);
|
|
3111
|
-
continue;
|
|
3112
|
-
}
|
|
3113
|
-
}
|
|
3114
|
-
if (this.stages[0] === undefined) {
|
|
3115
|
-
this.stages[0] = [];
|
|
3116
|
-
}
|
|
3117
|
-
this.stages[0].push(fref);
|
|
3118
|
-
}
|
|
3119
|
-
}
|
|
3120
|
-
expandFields(_resultStruct) { }
|
|
3121
|
-
generateSQL(stageWriter) {
|
|
3122
|
-
const indexSeg = this.firstSegment;
|
|
3123
|
-
const outputStageNames = [];
|
|
3124
|
-
for (const fields of this.stages) {
|
|
3125
|
-
const q = new QueryQueryIndexStage({
|
|
3126
|
-
...this.fieldDef,
|
|
3127
|
-
pipeline: [
|
|
3128
|
-
{
|
|
3129
|
-
...indexSeg,
|
|
3130
|
-
indexFields: fields,
|
|
3131
|
-
},
|
|
3132
|
-
],
|
|
3133
|
-
}, this.parent, stageWriter, this.isJoinedSubquery);
|
|
3134
|
-
q.prepare(stageWriter);
|
|
3135
|
-
const lastStageName = q.generateSQL(stageWriter);
|
|
3136
|
-
outputStageNames.push(lastStageName);
|
|
3137
|
-
}
|
|
3138
|
-
if (outputStageNames.length === 1) {
|
|
3139
|
-
this.resultStage = outputStageNames[0];
|
|
3140
|
-
}
|
|
3141
|
-
else {
|
|
3142
|
-
this.resultStage = stageWriter.addStage(outputStageNames.map(n => `SELECT * FROM ${n}\n`).join(' UNION ALL \n'));
|
|
3143
|
-
}
|
|
3144
|
-
return this.resultStage;
|
|
3145
|
-
}
|
|
3146
|
-
/**
|
|
3147
|
-
* All Indexes have the same output schema.
|
|
3148
|
-
* fieldName is deprecated, dots in fieldName may or may not be join nodes
|
|
3149
|
-
* fieldPath is a URL encoded slash separated path
|
|
3150
|
-
*/
|
|
3151
|
-
getResultStructDef() {
|
|
3152
|
-
const ret = {
|
|
3153
|
-
type: 'query_result',
|
|
3154
|
-
name: this.resultStage || 'result',
|
|
3155
|
-
dialect: this.parent.dialect.name,
|
|
3156
|
-
fields: [
|
|
3157
|
-
{ type: 'string', name: 'fieldName' },
|
|
3158
|
-
{ type: 'string', name: 'fieldPath' },
|
|
3159
|
-
{ type: 'string', name: 'fieldValue' },
|
|
3160
|
-
{ type: 'string', name: 'fieldType' },
|
|
3161
|
-
{ type: 'number', name: 'weight', numberType: 'integer' },
|
|
3162
|
-
],
|
|
3163
|
-
connection: this.parent.connectionName,
|
|
3164
|
-
};
|
|
3165
|
-
if (this.parent.structDef.modelAnnotation) {
|
|
3166
|
-
ret.modelAnnotation = this.parent.structDef.modelAnnotation;
|
|
3167
|
-
}
|
|
3168
|
-
return ret;
|
|
3169
|
-
}
|
|
3170
|
-
}
|
|
3171
|
-
/*
|
|
3172
|
-
* The input to a query will always be a QueryStruct. A QueryStruct is also a namespace
|
|
3173
|
-
* for tracking joins, and so a QueryFieldStruct is a QueryField which has a QueryStruct.
|
|
3174
|
-
*
|
|
3175
|
-
* This is a result of it being impossible to inherit both from QueryStruct and QueryField
|
|
3176
|
-
* for array and record types.
|
|
3177
|
-
*/
|
|
3178
|
-
class QueryFieldStruct extends QueryField {
|
|
3179
|
-
constructor(jfd, sourceArguments, parent, prepareResultOptions, referenceId) {
|
|
3180
|
-
super(jfd, parent, referenceId);
|
|
3181
|
-
this.fieldDef = jfd;
|
|
3182
|
-
this.queryStruct = new QueryStruct(jfd, sourceArguments, { struct: parent }, prepareResultOptions);
|
|
3183
|
-
}
|
|
3184
|
-
/*
|
|
3185
|
-
* Proxy the field-like methods that QueryStruct implements, eventually
|
|
3186
|
-
* those probably should be in here ... I thought this would be important
|
|
3187
|
-
* but maybe it isn't, it doesn't fix the problem I am working on ...
|
|
3188
|
-
*/
|
|
3189
|
-
getJoinableParent() {
|
|
3190
|
-
return this.queryStruct.getJoinableParent();
|
|
3191
|
-
}
|
|
3192
|
-
getFullOutputName() {
|
|
3193
|
-
return this.queryStruct.getFullOutputName();
|
|
3194
|
-
}
|
|
3195
|
-
includeInWildcard() {
|
|
3196
|
-
return this.isAtomic();
|
|
3197
|
-
}
|
|
3198
|
-
}
|
|
3199
|
-
/** Structure object as it is used to build a query */
|
|
3200
|
-
class QueryStruct {
|
|
3201
|
-
constructor(structDef, sourceArguments, parent, prepareResultOptions) {
|
|
3202
|
-
this.structDef = structDef;
|
|
3203
|
-
this.sourceArguments = sourceArguments;
|
|
3204
|
-
this.prepareResultOptions = prepareResultOptions;
|
|
3205
|
-
this.nameMap = new Map();
|
|
3206
|
-
this._modelTag = undefined;
|
|
3207
|
-
this._arguments = undefined;
|
|
3208
|
-
this.setParent(parent);
|
|
3209
|
-
if ('model' in parent) {
|
|
3210
|
-
this.model = parent.model;
|
|
3211
|
-
this.pathAliasMap = new Map();
|
|
3212
|
-
if ((0, malloy_types_1.isSourceDef)(structDef)) {
|
|
3213
|
-
this.connectionName = structDef.connection;
|
|
3214
|
-
}
|
|
3215
|
-
else {
|
|
3216
|
-
throw new Error('All root StructDefs should be a baseTable');
|
|
3217
|
-
}
|
|
3218
|
-
}
|
|
3219
|
-
else {
|
|
3220
|
-
this.model = this.getModel();
|
|
3221
|
-
this.pathAliasMap = this.root().pathAliasMap;
|
|
3222
|
-
this.connectionName = this.root().connectionName;
|
|
3223
|
-
}
|
|
3224
|
-
this.dialect = (0, dialect_1.getDialect)(this.findFirstDialect());
|
|
3225
|
-
this.addFieldsFromFieldList(structDef.fields);
|
|
3226
|
-
}
|
|
3227
|
-
modelCompilerFlags() {
|
|
3228
|
-
if (this._modelTag === undefined) {
|
|
3229
|
-
const annotation = this.structDef.modelAnnotation;
|
|
3230
|
-
const { tag } = (0, annotation_1.annotationToTag)(annotation, { prefix: /^##!\s*/ });
|
|
3231
|
-
this._modelTag = tag;
|
|
3232
|
-
}
|
|
3233
|
-
return this._modelTag;
|
|
3234
|
-
}
|
|
3235
|
-
findFirstDialect() {
|
|
3236
|
-
if ((0, malloy_types_1.isSourceDef)(this.structDef)) {
|
|
3237
|
-
return this.structDef.dialect;
|
|
3238
|
-
}
|
|
3239
|
-
if (this.parent) {
|
|
3240
|
-
return this.parent.findFirstDialect();
|
|
3241
|
-
}
|
|
3242
|
-
throw new Error('Cannot create QueryStruct from record with model parent');
|
|
3243
|
-
}
|
|
3244
|
-
informOfAliasValue(av) {
|
|
3245
|
-
this.recordAlias = av;
|
|
3246
|
-
}
|
|
3247
|
-
maybeEmitParameterizedSourceUsage() {
|
|
3248
|
-
var _a;
|
|
3249
|
-
if ((0, malloy_types_1.isSourceDef)(this.structDef)) {
|
|
3250
|
-
const paramsAndArgs = {
|
|
3251
|
-
...this.structDef.parameters,
|
|
3252
|
-
...this.structDef.arguments,
|
|
3253
|
-
};
|
|
3254
|
-
if (Object.values(paramsAndArgs).length === 0)
|
|
3255
|
-
return;
|
|
3256
|
-
(_a = this.eventStream) === null || _a === void 0 ? void 0 : _a.emit('parameterized-source-compiled', {
|
|
3257
|
-
parameters: paramsAndArgs,
|
|
3258
|
-
});
|
|
3259
|
-
}
|
|
3260
|
-
}
|
|
3261
|
-
resolveParentParameterReferences(param) {
|
|
3262
|
-
return {
|
|
3263
|
-
...param,
|
|
3264
|
-
value: param.value === null
|
|
3265
|
-
? null
|
|
3266
|
-
: (0, utils_1.exprMap)(param.value, frag => {
|
|
3267
|
-
if (frag.node === 'parameter') {
|
|
3268
|
-
const resolved1 = (this.parent ? this.parent.arguments() : this.arguments())[frag.path[0]];
|
|
3269
|
-
const resolved2 = this.parent
|
|
3270
|
-
? this.parent.resolveParentParameterReferences(resolved1)
|
|
3271
|
-
: resolved1;
|
|
3272
|
-
if (resolved2.value === null) {
|
|
3273
|
-
throw new Error('Invalid parameter value');
|
|
3274
|
-
}
|
|
3275
|
-
else {
|
|
3276
|
-
return resolved2.value;
|
|
3277
|
-
}
|
|
3278
|
-
}
|
|
3279
|
-
return frag;
|
|
3280
|
-
}),
|
|
3281
|
-
};
|
|
3282
|
-
}
|
|
3283
|
-
arguments() {
|
|
3284
|
-
var _a;
|
|
3285
|
-
if (this._arguments !== undefined) {
|
|
3286
|
-
return this._arguments;
|
|
3287
|
-
}
|
|
3288
|
-
this._arguments = {};
|
|
3289
|
-
if ((0, malloy_types_1.isSourceDef)(this.structDef)) {
|
|
3290
|
-
// First, copy over all parameters, to get default values
|
|
3291
|
-
const params = (_a = this.structDef.parameters) !== null && _a !== void 0 ? _a : {};
|
|
3292
|
-
for (const parameterName in params) {
|
|
3293
|
-
this._arguments[parameterName] = params[parameterName];
|
|
3294
|
-
}
|
|
3295
|
-
// Then, copy over arguments to override default values
|
|
3296
|
-
const args = { ...this.structDef.arguments, ...this.sourceArguments };
|
|
3297
|
-
for (const parameterName in args) {
|
|
3298
|
-
const orig = args[parameterName];
|
|
3299
|
-
this._arguments[parameterName] =
|
|
3300
|
-
this.resolveParentParameterReferences(orig);
|
|
3301
|
-
}
|
|
3302
|
-
}
|
|
3303
|
-
return this._arguments;
|
|
3304
|
-
}
|
|
3305
|
-
addFieldsFromFieldList(fields) {
|
|
3306
|
-
for (const field of fields) {
|
|
3307
|
-
const as = (0, malloy_types_1.getIdentifier)(field);
|
|
3308
|
-
if (field.type === 'turtle') {
|
|
3309
|
-
this.addFieldToNameMap(as, QueryQuery.makeQuery(field, this, undefined, false));
|
|
3310
|
-
}
|
|
3311
|
-
else if ((0, malloy_types_1.isAtomic)(field) || (0, malloy_types_1.isJoinedSource)(field)) {
|
|
3312
|
-
this.addFieldToNameMap(as, this.makeQueryField(field));
|
|
3313
|
-
}
|
|
3314
|
-
else {
|
|
3315
|
-
throw new Error('mtoy did nit add field');
|
|
3316
|
-
}
|
|
3317
|
-
}
|
|
3318
|
-
// if we don't have distinct key yet for this struct, add it.
|
|
3319
|
-
if (!this.nameMap.has('__distinct_key')) {
|
|
3320
|
-
this.addFieldToNameMap('__distinct_key', new QueryFieldDistinctKey({ type: 'string', name: '__distinct_key' }, this));
|
|
3321
|
-
}
|
|
3322
|
-
}
|
|
3323
|
-
// generate unique string for the alias.
|
|
3324
|
-
// return a string that can be used to represent the full
|
|
3325
|
-
// join path to a struct.
|
|
3326
|
-
getAliasIdentifier() {
|
|
3327
|
-
const path = this.getFullOutputName();
|
|
3328
|
-
const ret = this.pathAliasMap.get(path);
|
|
3329
|
-
// make a unique alias name
|
|
3330
|
-
if (ret === undefined) {
|
|
3331
|
-
const aliases = Array.from(this.pathAliasMap.values());
|
|
3332
|
-
const base = identifierNormalize((0, malloy_types_1.getIdentifier)(this.structDef));
|
|
3333
|
-
let name = `${base}_0`;
|
|
3334
|
-
let n = 1;
|
|
3335
|
-
while (aliases.includes(name) && n < 1000) {
|
|
3336
|
-
n++;
|
|
3337
|
-
name = `${base}_${n}`;
|
|
3338
|
-
}
|
|
3339
|
-
if (n < 1000) {
|
|
3340
|
-
this.pathAliasMap.set(path, name);
|
|
3341
|
-
return name;
|
|
3342
|
-
}
|
|
3343
|
-
else {
|
|
3344
|
-
throw new Error('Internal Error: cannot create unique alias name');
|
|
3345
|
-
}
|
|
3346
|
-
// get the malloy name for this struct (will include a trailing dot)
|
|
3347
|
-
// return this.getFullOutputName().replace(/\.$/, "").replace(/\./g, "_o_");
|
|
3348
|
-
}
|
|
3349
|
-
else {
|
|
3350
|
-
return ret;
|
|
3351
|
-
}
|
|
3352
|
-
}
|
|
3353
|
-
getSQLIdentifier() {
|
|
3354
|
-
if (this.unnestWithNumbers() && this.parent !== undefined) {
|
|
3355
|
-
const x = this.parent.getSQLIdentifier() +
|
|
3356
|
-
'.' +
|
|
3357
|
-
(0, malloy_types_1.getIdentifier)(this.structDef) +
|
|
3358
|
-
`[${this.getIdentifier()}.__row_id]`;
|
|
3359
|
-
return x;
|
|
3360
|
-
}
|
|
3361
|
-
else {
|
|
3362
|
-
return this.getIdentifier();
|
|
3363
|
-
}
|
|
3364
|
-
}
|
|
3365
|
-
sqlChildReference(name, expand) {
|
|
3366
|
-
let parentRef = this.getSQLIdentifier();
|
|
3367
|
-
if (expand && (0, malloy_types_1.isAtomic)(this.structDef) && (0, malloy_types_1.hasExpression)(this.structDef)) {
|
|
3368
|
-
if (!this.parent) {
|
|
3369
|
-
throw new Error(`Cannot expand reference to ${name} without parent`);
|
|
3370
|
-
}
|
|
3371
|
-
parentRef = expand.field.exprToSQL(expand.result, this.parent, this.structDef.e);
|
|
3372
|
-
}
|
|
3373
|
-
let refType = 'table';
|
|
3374
|
-
if (this.structDef.type === 'record') {
|
|
3375
|
-
refType = 'record';
|
|
3376
|
-
}
|
|
3377
|
-
else if (this.structDef.type === 'array') {
|
|
3378
|
-
refType =
|
|
3379
|
-
this.structDef.elementTypeDef.type === 'record_element'
|
|
3380
|
-
? 'array[record]'
|
|
3381
|
-
: 'array[scalar]';
|
|
3382
|
-
}
|
|
3383
|
-
else if (this.structDef.type === 'nest_source') {
|
|
3384
|
-
refType = 'nest source';
|
|
3385
|
-
}
|
|
3386
|
-
const child = this.getChildByName(name);
|
|
3387
|
-
const childType = (child === null || child === void 0 ? void 0 : child.fieldDef.type) || 'unknown';
|
|
3388
|
-
return this.dialect.sqlFieldReference(parentRef, refType, name, childType);
|
|
3389
|
-
}
|
|
3390
|
-
// return the name of the field in SQL
|
|
3391
|
-
getIdentifier() {
|
|
3392
|
-
// if it is the root table, use provided alias if we have one.
|
|
3393
|
-
if ((0, malloy_types_1.isBaseTable)(this.structDef)) {
|
|
3394
|
-
return 'base';
|
|
3395
|
-
}
|
|
3396
|
-
// If this is a synthetic column, return the expression rather than the name
|
|
3397
|
-
// because the name will not exist. Only for records because the other types
|
|
3398
|
-
// will have joins and thus be in the namespace. We can't compute it here
|
|
3399
|
-
// because we don't have access to the Query to call exprToSQL.
|
|
3400
|
-
if (this.structDef.type === 'record' && (0, malloy_types_1.hasExpression)(this.structDef)) {
|
|
3401
|
-
if (this.recordAlias) {
|
|
3402
|
-
return this.recordAlias;
|
|
3403
|
-
}
|
|
3404
|
-
throw new Error('INTERNAL ERROR, record field alias not pre-computed');
|
|
3405
|
-
}
|
|
3406
|
-
// if this is an inline object, include the parents alias.
|
|
3407
|
-
if (this.structDef.type === 'record' && this.parent) {
|
|
3408
|
-
return this.parent.sqlChildReference((0, malloy_types_1.getIdentifier)(this.structDef), undefined);
|
|
3409
|
-
}
|
|
3410
|
-
// we are somewhere in the join tree. Make sure the alias is unique.
|
|
3411
|
-
return this.getAliasIdentifier();
|
|
3412
|
-
}
|
|
3413
|
-
// return the name of the field in Malloy
|
|
3414
|
-
getFullOutputName() {
|
|
3415
|
-
if (this.parent) {
|
|
3416
|
-
return (this.parent.getFullOutputName() + (0, malloy_types_1.getIdentifier)(this.structDef) + '.');
|
|
3417
|
-
}
|
|
3418
|
-
else {
|
|
3419
|
-
return '';
|
|
3420
|
-
}
|
|
3421
|
-
}
|
|
3422
|
-
needsSymetricCalculation(resultSet) {
|
|
3423
|
-
const joinName = this.getJoinableParent().getIdentifier();
|
|
3424
|
-
const join = resultSet.root().joins.get(joinName);
|
|
3425
|
-
if (join) {
|
|
3426
|
-
return !join.leafiest;
|
|
3427
|
-
}
|
|
3428
|
-
throw new Error(`Join ${joinName} not found in result set`);
|
|
3429
|
-
}
|
|
3430
|
-
unnestWithNumbers() {
|
|
3431
|
-
return this.dialect.unnestWithNumbers && this.structDef.type === 'array';
|
|
3432
|
-
}
|
|
3433
|
-
getJoinableParent() {
|
|
3434
|
-
// if it is inline it should always have a parent
|
|
3435
|
-
if (this.structDef.type === 'record') {
|
|
3436
|
-
if (this.parent) {
|
|
3437
|
-
return this.parent.getJoinableParent();
|
|
3438
|
-
}
|
|
3439
|
-
else {
|
|
3440
|
-
throw new Error('Internal Error: inline struct cannot be root');
|
|
3441
|
-
}
|
|
3442
|
-
}
|
|
3443
|
-
return this;
|
|
3444
|
-
}
|
|
3445
|
-
addFieldToNameMap(as, n) {
|
|
3446
|
-
if (this.nameMap.has(as)) {
|
|
3447
|
-
throw new Error(`Redefinition of ${as}`);
|
|
3448
|
-
}
|
|
3449
|
-
this.nameMap.set(as, n);
|
|
3450
|
-
}
|
|
3451
|
-
/** the the primary key or throw an error. */
|
|
3452
|
-
getPrimaryKeyField(fieldDef) {
|
|
3453
|
-
let pk;
|
|
3454
|
-
if ((pk = this.primaryKey())) {
|
|
3455
|
-
return pk;
|
|
3456
|
-
}
|
|
3457
|
-
else {
|
|
3458
|
-
throw new Error(`Missing primary key for ${fieldDef}`);
|
|
3459
|
-
}
|
|
3460
|
-
}
|
|
3461
|
-
/**
|
|
3462
|
-
* called after all structure has been loaded. Examine this structure to see
|
|
3463
|
-
* if if it is based on a query and if it is, add the output fields (unless
|
|
3464
|
-
* they exist) to the structure.
|
|
3465
|
-
*/
|
|
3466
|
-
resolveQueryFields() {
|
|
3467
|
-
if (this.structDef.type === 'query_source') {
|
|
3468
|
-
const resultStruct = this.model
|
|
3469
|
-
.loadQuery(this.structDef.query, undefined, this.prepareResultOptions)
|
|
3470
|
-
.structs.pop();
|
|
3471
|
-
// should never happen.
|
|
3472
|
-
if (!resultStruct) {
|
|
3473
|
-
throw new Error("Internal Error, query didn't produce a struct");
|
|
3474
|
-
}
|
|
3475
|
-
const structDef = { ...this.structDef };
|
|
3476
|
-
for (const f of resultStruct.fields) {
|
|
3477
|
-
const as = (0, malloy_types_1.getIdentifier)(f);
|
|
3478
|
-
if (!this.nameMap.has(as)) {
|
|
3479
|
-
structDef.fields.push(f);
|
|
3480
|
-
this.nameMap.set(as, this.makeQueryField(f));
|
|
3481
|
-
}
|
|
3482
|
-
}
|
|
3483
|
-
this.structDef = structDef;
|
|
3484
|
-
if (!this.structDef.primaryKey && resultStruct.primaryKey) {
|
|
3485
|
-
this.structDef.primaryKey = resultStruct.primaryKey;
|
|
3486
|
-
}
|
|
3487
|
-
}
|
|
3488
|
-
for (const [, v] of this.nameMap) {
|
|
3489
|
-
if (v instanceof QueryFieldStruct) {
|
|
3490
|
-
v.queryStruct.resolveQueryFields();
|
|
3491
|
-
}
|
|
3492
|
-
}
|
|
3493
|
-
}
|
|
3494
|
-
getModel() {
|
|
3495
|
-
if (this.model) {
|
|
3496
|
-
return this.model;
|
|
3497
|
-
}
|
|
3498
|
-
else {
|
|
3499
|
-
if (this.parent === undefined) {
|
|
3500
|
-
throw new Error('Expected this query struct to have a parent, as no model was present.');
|
|
3501
|
-
}
|
|
3502
|
-
return this.parent.getModel();
|
|
3503
|
-
}
|
|
3504
|
-
}
|
|
3505
|
-
get eventStream() {
|
|
3506
|
-
return this.getModel().eventStream;
|
|
3507
|
-
}
|
|
3508
|
-
setParent(parent) {
|
|
3509
|
-
if ('struct' in parent) {
|
|
3510
|
-
this.parent = parent.struct;
|
|
3511
|
-
}
|
|
3512
|
-
if ('model' in parent) {
|
|
3513
|
-
this.model = parent.model;
|
|
3514
|
-
}
|
|
3515
|
-
else {
|
|
3516
|
-
this.model = this.getModel();
|
|
3517
|
-
}
|
|
3518
|
-
}
|
|
3519
|
-
/** makes a new queryable field object from a fieldDef */
|
|
3520
|
-
makeQueryField(field, referenceId) {
|
|
3521
|
-
switch (field.type) {
|
|
3522
|
-
case 'array':
|
|
3523
|
-
case 'record':
|
|
3524
|
-
case 'query_source':
|
|
3525
|
-
case 'table':
|
|
3526
|
-
case 'sql_select':
|
|
3527
|
-
case 'composite':
|
|
3528
|
-
return new QueryFieldStruct(field, undefined, this, this.prepareResultOptions);
|
|
3529
|
-
case 'string':
|
|
3530
|
-
return new QueryFieldString(field, this, referenceId);
|
|
3531
|
-
case 'date':
|
|
3532
|
-
return new QueryFieldDate(field, this, referenceId);
|
|
3533
|
-
case 'timestamp':
|
|
3534
|
-
return new QueryFieldTimestamp(field, this, referenceId);
|
|
3535
|
-
case 'number':
|
|
3536
|
-
return new QueryFieldNumber(field, this, referenceId);
|
|
3537
|
-
case 'boolean':
|
|
3538
|
-
return new QueryFieldBoolean(field, this, referenceId);
|
|
3539
|
-
case 'json':
|
|
3540
|
-
return new QueryFieldJSON(field, this, referenceId);
|
|
3541
|
-
case 'sql native':
|
|
3542
|
-
return new QueryFieldUnsupported(field, this, referenceId);
|
|
3543
|
-
case 'turtle':
|
|
3544
|
-
return QueryQuery.makeQuery(field, this, undefined, false);
|
|
3545
|
-
default:
|
|
3546
|
-
throw new Error(`unknown field definition ${(JSON.stringify(field), undefined, 2)}`);
|
|
3547
|
-
}
|
|
3548
|
-
}
|
|
3549
|
-
structSourceSQL(stageWriter) {
|
|
3550
|
-
var _a, _b;
|
|
3551
|
-
switch (this.structDef.type) {
|
|
3552
|
-
case 'table':
|
|
3553
|
-
return this.dialect.quoteTablePath(this.structDef.tablePath);
|
|
3554
|
-
case 'composite':
|
|
3555
|
-
// TODO: throw an error here; not simple because we call into this
|
|
3556
|
-
// code currently before the composite source is resolved in some cases
|
|
3557
|
-
return '{COMPOSITE SOURCE}';
|
|
3558
|
-
case 'finalize':
|
|
3559
|
-
return this.structDef.name;
|
|
3560
|
-
case 'sql_select':
|
|
3561
|
-
return `(${this.structDef.selectStr})`;
|
|
3562
|
-
case 'nest_source':
|
|
3563
|
-
return this.structDef.pipeSQL;
|
|
3564
|
-
case 'query_source': {
|
|
3565
|
-
// cache derived table.
|
|
3566
|
-
if (((_a = this.prepareResultOptions) === null || _a === void 0 ? void 0 : _a.replaceMaterializedReferences) &&
|
|
3567
|
-
(0, utils_2.shouldMaterialize)(this.structDef.query.annotation)) {
|
|
3568
|
-
return stageWriter.addMaterializedQuery((0, malloy_types_1.getIdentifier)(this.structDef), this.structDef.query, (_b = this.prepareResultOptions) === null || _b === void 0 ? void 0 : _b.materializedTablePrefix);
|
|
3569
|
-
}
|
|
3570
|
-
else {
|
|
3571
|
-
// returns the stage name.
|
|
3572
|
-
return this.model.loadQuery(this.structDef.query, stageWriter, this.prepareResultOptions, false, true // this is an intermediate stage.
|
|
3573
|
-
).lastStageName;
|
|
3574
|
-
}
|
|
3575
|
-
}
|
|
3576
|
-
default:
|
|
3577
|
-
throw new Error(`Cannot create SQL StageWriter from '${(0, malloy_types_1.getIdentifier)(this.structDef)}' type '${this.structDef.type}`);
|
|
3578
|
-
}
|
|
3579
|
-
}
|
|
3580
|
-
root() {
|
|
3581
|
-
return this.parent ? this.parent.root() : this;
|
|
3582
|
-
}
|
|
3583
|
-
primaryKey() {
|
|
3584
|
-
if ((0, malloy_types_1.isSourceDef)(this.structDef) && this.structDef.primaryKey) {
|
|
3585
|
-
return this.getDimensionByName([this.structDef.primaryKey]);
|
|
3586
|
-
}
|
|
3587
|
-
else {
|
|
3588
|
-
return undefined;
|
|
3589
|
-
}
|
|
3590
|
-
}
|
|
3591
|
-
getChildByName(name) {
|
|
3592
|
-
return this.nameMap.get(name);
|
|
3593
|
-
}
|
|
3594
|
-
/** convert a path into a field reference */
|
|
3595
|
-
getFieldByName(path) {
|
|
3596
|
-
let found = undefined;
|
|
3597
|
-
let lookIn = this;
|
|
3598
|
-
let notFound = path[0];
|
|
3599
|
-
for (const n of path) {
|
|
3600
|
-
found = lookIn === null || lookIn === void 0 ? void 0 : lookIn.getChildByName(n);
|
|
3601
|
-
if (!found) {
|
|
3602
|
-
notFound = n;
|
|
3603
|
-
break;
|
|
3604
|
-
}
|
|
3605
|
-
lookIn =
|
|
3606
|
-
found instanceof QueryFieldStruct ? found.queryStruct : undefined;
|
|
3607
|
-
}
|
|
3608
|
-
if (found === undefined) {
|
|
3609
|
-
const pathErr = path.length > 1 ? ` in ${path.join('.')}` : '';
|
|
3610
|
-
throw new Error(`${notFound} not found${pathErr}`);
|
|
3611
|
-
}
|
|
3612
|
-
return found;
|
|
3613
|
-
}
|
|
3614
|
-
// structs referenced in queries are converted to fields.
|
|
3615
|
-
getQueryFieldByName(name) {
|
|
3616
|
-
const field = this.getFieldByName(name);
|
|
3617
|
-
if (field instanceof QueryFieldStruct) {
|
|
3618
|
-
throw new Error(`Cannot reference ${name.join('.')} as a scalar'`);
|
|
3619
|
-
}
|
|
3620
|
-
return field;
|
|
3621
|
-
}
|
|
3622
|
-
getQueryFieldReference(f) {
|
|
3623
|
-
const { path, annotation, drillExpression } = f;
|
|
3624
|
-
const field = this.getFieldByName(path);
|
|
3625
|
-
if (annotation || drillExpression) {
|
|
3626
|
-
if (field.parent === undefined) {
|
|
3627
|
-
throw new Error('Inconcievable, field reference to orphaned query field');
|
|
3628
|
-
}
|
|
3629
|
-
// Made a field object from the source, but the annotations were computed by the compiler
|
|
3630
|
-
// when it generated the reference, and has both the source and reference annotations included.
|
|
3631
|
-
if (field instanceof QueryFieldStruct) {
|
|
3632
|
-
const newDef = { ...field.fieldDef, annotation, drillExpression };
|
|
3633
|
-
return new QueryFieldStruct(newDef, undefined, field.parent, {}, field.referenceId);
|
|
3634
|
-
}
|
|
3635
|
-
else {
|
|
3636
|
-
const newDef = { ...field.fieldDef, annotation, drillExpression };
|
|
3637
|
-
return field.parent.makeQueryField(newDef, field.referenceId);
|
|
3638
|
-
}
|
|
3639
|
-
}
|
|
3640
|
-
return field;
|
|
3641
|
-
}
|
|
3642
|
-
getDimensionOrMeasureByName(name) {
|
|
3643
|
-
const field = this.getFieldByName(name);
|
|
3644
|
-
if (!field.isAtomic()) {
|
|
3645
|
-
throw new Error(`${name} is not an atomic field? Inconceivable!`);
|
|
3646
|
-
}
|
|
3647
|
-
return field;
|
|
3648
|
-
}
|
|
3649
|
-
/** returns a query object for the given name */
|
|
3650
|
-
getDimensionByName(name) {
|
|
3651
|
-
const field = this.getFieldByName(name);
|
|
3652
|
-
if (isBasicScalar(field)) {
|
|
3653
|
-
return field;
|
|
3654
|
-
}
|
|
3655
|
-
throw new Error(`${name} is not an atomic scalar field? Inconceivable!`);
|
|
3656
|
-
}
|
|
3657
|
-
/** returns a query object for the given name */
|
|
3658
|
-
getStructByName(name) {
|
|
3659
|
-
if (name.length === 0) {
|
|
3660
|
-
return this;
|
|
3661
|
-
}
|
|
3662
|
-
const struct = this.getFieldByName(name);
|
|
3663
|
-
if (struct instanceof QueryFieldStruct) {
|
|
3664
|
-
return struct.queryStruct;
|
|
3665
|
-
}
|
|
3666
|
-
throw new Error(`Error: Path to structure not found '${name.join('.')}'`);
|
|
3667
|
-
}
|
|
3668
|
-
getDistinctKey() {
|
|
3669
|
-
if (this.structDef.type !== 'record') {
|
|
3670
|
-
return this.getDimensionByName(['__distinct_key']);
|
|
3671
|
-
}
|
|
3672
|
-
else if (this.parent) {
|
|
3673
|
-
return this.parent.getDistinctKey();
|
|
3674
|
-
}
|
|
3675
|
-
else {
|
|
3676
|
-
throw new Error('Asking a record for a primary key? Inconceivable!');
|
|
3677
|
-
}
|
|
3678
|
-
}
|
|
3679
|
-
applyStructFiltersToTurtleDef(turtleDef) {
|
|
3680
|
-
const pipeline = [...turtleDef.pipeline];
|
|
3681
|
-
const annotation = turtleDef.annotation;
|
|
3682
|
-
const addedFilters = turtleDef.filterList || [];
|
|
3683
|
-
pipeline[0] = {
|
|
3684
|
-
...pipeline[0],
|
|
3685
|
-
filterList: addedFilters.concat(pipeline[0].filterList || [], (0, malloy_types_1.isSourceDef)(this.structDef) ? this.structDef.filterList || [] : []),
|
|
3686
|
-
};
|
|
3687
|
-
const flatTurtleDef = {
|
|
3688
|
-
type: 'turtle',
|
|
3689
|
-
name: turtleDef.name,
|
|
3690
|
-
pipeline,
|
|
3691
|
-
annotation,
|
|
3692
|
-
location: turtleDef.location,
|
|
3693
|
-
};
|
|
3694
|
-
return flatTurtleDef;
|
|
3695
|
-
}
|
|
3696
|
-
}
|
|
3697
|
-
// const exploreSearchSQLMap = new Map<string, string>();
|
|
3698
|
-
/** start here */
|
|
3699
|
-
class QueryModel {
|
|
3700
|
-
constructor(modelDef, eventStream) {
|
|
3701
|
-
this.eventStream = eventStream;
|
|
3702
|
-
this.dialect = new standardsql_1.StandardSQLDialect();
|
|
3703
|
-
// dialect: Dialect = new PostgresDialect();
|
|
3704
|
-
this.modelDef = undefined;
|
|
3705
|
-
this.structs = new Map();
|
|
3706
|
-
this.exploreSearchSQLMap = new Map();
|
|
3707
|
-
if (modelDef) {
|
|
3708
|
-
this.loadModelFromDef(modelDef);
|
|
3709
|
-
}
|
|
3710
|
-
}
|
|
3711
|
-
loadModelFromDef(modelDef) {
|
|
3712
|
-
this.modelDef = modelDef;
|
|
3713
|
-
for (const s of Object.values(this.modelDef.contents)) {
|
|
3714
|
-
let qs;
|
|
3715
|
-
if ((0, malloy_types_1.isSourceDef)(s)) {
|
|
3716
|
-
qs = new QueryStruct(s, undefined, { model: this }, {});
|
|
3717
|
-
this.structs.set((0, malloy_types_1.getIdentifier)(s), qs);
|
|
3718
|
-
qs.resolveQueryFields();
|
|
3719
|
-
}
|
|
3720
|
-
else if (s.type === 'query') {
|
|
3721
|
-
/* TODO */
|
|
3722
|
-
}
|
|
3723
|
-
else {
|
|
3724
|
-
throw new Error('Internal Error: Unknown structure type');
|
|
3725
|
-
}
|
|
3726
|
-
}
|
|
3727
|
-
}
|
|
3728
|
-
getStructByName(name) {
|
|
3729
|
-
const s = this.structs.get(name);
|
|
3730
|
-
if (s) {
|
|
3731
|
-
return s;
|
|
3732
|
-
}
|
|
3733
|
-
throw new Error(`Struct ${name} not found in model.`);
|
|
3734
|
-
}
|
|
3735
|
-
getStructFromRef(structRef, sourceArguments, prepareResultOptions) {
|
|
3736
|
-
var _a;
|
|
3737
|
-
prepareResultOptions !== null && prepareResultOptions !== void 0 ? prepareResultOptions : (prepareResultOptions = {});
|
|
3738
|
-
if (typeof structRef === 'string') {
|
|
3739
|
-
const ret = this.getStructByName(structRef);
|
|
3740
|
-
if (sourceArguments !== undefined) {
|
|
3741
|
-
return new QueryStruct(ret.structDef, sourceArguments, (_a = ret.parent) !== null && _a !== void 0 ? _a : { model: this }, prepareResultOptions);
|
|
3742
|
-
}
|
|
3743
|
-
return ret;
|
|
3744
|
-
}
|
|
3745
|
-
return new QueryStruct(structRef, sourceArguments, { model: this }, prepareResultOptions);
|
|
3746
|
-
}
|
|
3747
|
-
loadQuery(query, stageWriter, prepareResultOptions, emitFinalStage = false, isJoinedSubquery = false) {
|
|
3748
|
-
var _a;
|
|
3749
|
-
const malloy = '';
|
|
3750
|
-
if (!stageWriter) {
|
|
3751
|
-
stageWriter = new StageWriter(true, undefined);
|
|
3752
|
-
}
|
|
3753
|
-
const turtleDef = {
|
|
3754
|
-
type: 'turtle',
|
|
3755
|
-
name: 'ignoreme',
|
|
3756
|
-
pipeline: query.pipeline,
|
|
3757
|
-
filterList: query.filterList,
|
|
3758
|
-
};
|
|
3759
|
-
const structRef = (_a = query.compositeResolvedSourceDef) !== null && _a !== void 0 ? _a : query.structRef;
|
|
3760
|
-
const q = QueryQuery.makeQuery(turtleDef, this.getStructFromRef(structRef, query.sourceArguments, prepareResultOptions), stageWriter, isJoinedSubquery);
|
|
3761
|
-
const ret = q.generateSQLFromPipeline(stageWriter);
|
|
3762
|
-
if (emitFinalStage && q.parent.dialect.hasFinalStage) {
|
|
3763
|
-
// const fieldNames: string[] = [];
|
|
3764
|
-
// for (const f of ret.outputStruct.fields) {
|
|
3765
|
-
// fieldNames.push(getIdentifier(f));
|
|
3766
|
-
// }
|
|
3767
|
-
const fieldNames = [];
|
|
3768
|
-
for (const f of ret.outputStruct.fields) {
|
|
3769
|
-
if ((0, malloy_types_1.isAtomic)(f)) {
|
|
3770
|
-
const quoted = q.parent.dialect.sqlMaybeQuoteIdentifier(f.name);
|
|
3771
|
-
fieldNames.push(quoted);
|
|
3772
|
-
}
|
|
3773
|
-
}
|
|
3774
|
-
// const fieldNames = getAtomicFields(ret.outputStruct).map(fieldDef =>
|
|
3775
|
-
// q.parent.dialect.sqlMaybeQuoteIdentifier(fieldDef.name)
|
|
3776
|
-
// );
|
|
3777
|
-
ret.lastStageName = stageWriter.addStage(q.parent.dialect.sqlFinalStage(ret.lastStageName, fieldNames));
|
|
3778
|
-
}
|
|
3779
|
-
return {
|
|
3780
|
-
lastStageName: ret.lastStageName,
|
|
3781
|
-
malloy,
|
|
3782
|
-
stageWriter,
|
|
3783
|
-
structs: [ret.outputStruct],
|
|
3784
|
-
connectionName: q.parent.connectionName,
|
|
3785
|
-
};
|
|
3786
|
-
}
|
|
3787
|
-
addDefaultRowLimit(query, defaultRowLimit) {
|
|
3788
|
-
const nope = { query, addedDefaultRowLimit: undefined };
|
|
3789
|
-
if (defaultRowLimit === undefined)
|
|
3790
|
-
return nope;
|
|
3791
|
-
const lastSegment = query.pipeline[query.pipeline.length - 1];
|
|
3792
|
-
if (lastSegment.type === 'raw')
|
|
3793
|
-
return nope;
|
|
3794
|
-
if (lastSegment.limit !== undefined)
|
|
3795
|
-
return nope;
|
|
3796
|
-
return {
|
|
3797
|
-
query: {
|
|
3798
|
-
...query,
|
|
3799
|
-
pipeline: [
|
|
3800
|
-
...query.pipeline.slice(0, -1),
|
|
3801
|
-
{
|
|
3802
|
-
...lastSegment,
|
|
3803
|
-
limit: defaultRowLimit,
|
|
3804
|
-
},
|
|
3805
|
-
],
|
|
3806
|
-
},
|
|
3807
|
-
addedDefaultRowLimit: defaultRowLimit,
|
|
3808
|
-
};
|
|
3809
|
-
}
|
|
3810
|
-
compileQuery(query, prepareResultOptions, finalize = true) {
|
|
3811
|
-
var _a, _b, _c;
|
|
3812
|
-
let newModel;
|
|
3813
|
-
const addDefaultRowLimit = this.addDefaultRowLimit(query, prepareResultOptions === null || prepareResultOptions === void 0 ? void 0 : prepareResultOptions.defaultRowLimit);
|
|
3814
|
-
query = addDefaultRowLimit.query;
|
|
3815
|
-
const addedDefaultRowLimit = addDefaultRowLimit.addedDefaultRowLimit;
|
|
3816
|
-
const m = newModel || this;
|
|
3817
|
-
const ret = m.loadQuery(query, undefined, prepareResultOptions, finalize, false);
|
|
3818
|
-
const structRef = (_a = query.compositeResolvedSourceDef) !== null && _a !== void 0 ? _a : query.structRef;
|
|
3819
|
-
const sourceExplore = typeof structRef === 'string'
|
|
3820
|
-
? structRef
|
|
3821
|
-
: structRef.as || structRef.name;
|
|
3822
|
-
const sourceArguments = (_b = query.sourceArguments) !== null && _b !== void 0 ? _b : (typeof structRef === 'string' ? undefined : structRef.arguments);
|
|
3823
|
-
// LTNote: I don't understand why this might be here. It should have happened in loadQuery...
|
|
3824
|
-
if (finalize && this.dialect.hasFinalStage) {
|
|
3825
|
-
ret.lastStageName = ret.stageWriter.addStage(
|
|
3826
|
-
// note this will be broken on duckDB waiting on a real fix.
|
|
3827
|
-
this.dialect.sqlFinalStage(ret.lastStageName, []));
|
|
3828
|
-
}
|
|
3829
|
-
return {
|
|
3830
|
-
lastStageName: ret.lastStageName,
|
|
3831
|
-
malloy: ret.malloy,
|
|
3832
|
-
sql: ret.stageWriter.generateSQLStages(),
|
|
3833
|
-
dependenciesToMaterialize: ret.stageWriter.dependenciesToMaterialize,
|
|
3834
|
-
materialization: (0, utils_2.shouldMaterialize)(query.annotation)
|
|
3835
|
-
? (0, utils_2.buildQueryMaterializationSpec)((_c = query.location) === null || _c === void 0 ? void 0 : _c.url, query.name, prepareResultOptions === null || prepareResultOptions === void 0 ? void 0 : prepareResultOptions.materializedTablePrefix)
|
|
3836
|
-
: undefined,
|
|
3837
|
-
structs: ret.structs,
|
|
3838
|
-
sourceExplore,
|
|
3839
|
-
sourceFilters: query.filterList,
|
|
3840
|
-
sourceArguments,
|
|
3841
|
-
queryName: query.name,
|
|
3842
|
-
connectionName: ret.connectionName,
|
|
3843
|
-
annotation: query.annotation,
|
|
3844
|
-
queryTimezone: ret.structs[0].queryTimezone,
|
|
3845
|
-
defaultRowLimitAdded: addedDefaultRowLimit,
|
|
3846
|
-
};
|
|
3847
|
-
}
|
|
3848
|
-
async searchIndex(connection, explore, searchValue, limit = 1000, searchField = undefined) {
|
|
3849
|
-
if (!connection.canPersist()) {
|
|
3850
|
-
return undefined;
|
|
3851
|
-
}
|
|
3852
|
-
// make a search index if one isn't modelled.
|
|
3853
|
-
const struct = this.getStructByName(explore);
|
|
3854
|
-
let indexStar = [];
|
|
3855
|
-
for (const [fn, fv] of struct.nameMap) {
|
|
3856
|
-
if (isScalarField(fv) && fv.includeInWildcard()) {
|
|
3857
|
-
indexStar.push({ type: 'fieldref', path: [fn] });
|
|
3858
|
-
}
|
|
3859
|
-
}
|
|
3860
|
-
indexStar = indexStar.sort((a, b) => a.path[0].localeCompare(b.path[0]));
|
|
3861
|
-
const indexQuery = {
|
|
3862
|
-
structRef: explore,
|
|
3863
|
-
pipeline: [
|
|
3864
|
-
{
|
|
3865
|
-
type: 'index',
|
|
3866
|
-
indexFields: indexStar,
|
|
3867
|
-
sample: struct.dialect.defaultSampling,
|
|
3868
|
-
outputStruct: {
|
|
3869
|
-
type: 'query_result',
|
|
3870
|
-
name: 'index',
|
|
3871
|
-
connection: struct.connectionName,
|
|
3872
|
-
dialect: struct.dialect.name,
|
|
3873
|
-
fields: [
|
|
3874
|
-
{ name: 'fieldName', type: 'string' },
|
|
3875
|
-
{ name: 'fieldPath', type: 'string' },
|
|
3876
|
-
{ name: 'fieldType', type: 'string' },
|
|
3877
|
-
{ name: 'weight', type: 'number' },
|
|
3878
|
-
{ name: 'fieldValue', type: 'string' },
|
|
3879
|
-
],
|
|
3880
|
-
},
|
|
3881
|
-
},
|
|
3882
|
-
],
|
|
3883
|
-
};
|
|
3884
|
-
const fieldNameColumn = struct.dialect.sqlMaybeQuoteIdentifier('fieldName');
|
|
3885
|
-
const fieldPathColumn = struct.dialect.sqlMaybeQuoteIdentifier('fieldPath');
|
|
3886
|
-
const fieldValueColumn = struct.dialect.sqlMaybeQuoteIdentifier('fieldValue');
|
|
3887
|
-
const fieldTypeColumn = struct.dialect.sqlMaybeQuoteIdentifier('fieldType');
|
|
3888
|
-
const weightColumn = struct.dialect.sqlMaybeQuoteIdentifier('weight');
|
|
3889
|
-
// if we've compiled the SQL before use it otherwise
|
|
3890
|
-
let sqlPDT = this.exploreSearchSQLMap.get(explore);
|
|
3891
|
-
if (sqlPDT === undefined) {
|
|
3892
|
-
sqlPDT = this.compileQuery(indexQuery, undefined, false).sql;
|
|
3893
|
-
this.exploreSearchSQLMap.set(explore, sqlPDT);
|
|
3894
|
-
}
|
|
3895
|
-
let query = `SELECT
|
|
3896
|
-
${fieldNameColumn},
|
|
3897
|
-
${fieldPathColumn},
|
|
3898
|
-
${fieldValueColumn},
|
|
3899
|
-
${fieldTypeColumn},
|
|
3900
|
-
${weightColumn},
|
|
3901
|
-
CASE WHEN lower(${fieldValueColumn}) LIKE lower(${generateSQLStringLiteral(searchValue + '%')}) THEN 1 ELSE 0 END as match_first
|
|
3902
|
-
FROM ${await connection.manifestTemporaryTable(sqlPDT)}
|
|
3903
|
-
WHERE lower(${fieldValueColumn}) LIKE lower(${generateSQLStringLiteral('%' + searchValue + '%')}) ${searchField !== undefined
|
|
3904
|
-
? ` AND ${fieldNameColumn} = '` + searchField + "' \n"
|
|
3905
|
-
: ''}
|
|
3906
|
-
ORDER BY CASE WHEN lower(${fieldValueColumn}) LIKE lower(${generateSQLStringLiteral(searchValue + '%')}) THEN 1 ELSE 0 END DESC, ${weightColumn} DESC
|
|
3907
|
-
LIMIT ${limit}
|
|
3908
|
-
`;
|
|
3909
|
-
if (struct.dialect.hasFinalStage) {
|
|
3910
|
-
query = `WITH __stage0 AS(\n${query}\n)\n${struct.dialect.sqlFinalStage('__stage0', [
|
|
3911
|
-
fieldNameColumn,
|
|
3912
|
-
fieldPathColumn,
|
|
3913
|
-
fieldValueColumn,
|
|
3914
|
-
fieldTypeColumn,
|
|
3915
|
-
weightColumn,
|
|
3916
|
-
'match_first',
|
|
3917
|
-
])}`;
|
|
3918
|
-
}
|
|
3919
|
-
const result = await connection.runSQL(query, {
|
|
3920
|
-
rowLimit: 1000,
|
|
3921
|
-
});
|
|
3922
|
-
return result.rows;
|
|
3923
|
-
}
|
|
3924
|
-
}
|
|
3925
|
-
exports.QueryModel = QueryModel;
|
|
3926
|
-
//# sourceMappingURL=malloy_query.js.map
|