@malloydata/malloy 0.0.304 → 0.0.305
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/dialect/dialect.d.ts +1 -1
- package/dist/dialect/duckdb/duckdb.d.ts +1 -1
- package/dist/dialect/duckdb/duckdb.js +2 -6
- package/dist/dialect/mysql/mysql.d.ts +1 -1
- package/dist/dialect/mysql/mysql.js +2 -6
- package/dist/dialect/postgres/postgres.d.ts +1 -1
- package/dist/dialect/postgres/postgres.js +2 -6
- package/dist/dialect/snowflake/snowflake.d.ts +1 -1
- package/dist/dialect/snowflake/snowflake.js +2 -5
- package/dist/dialect/standardsql/standardsql.d.ts +1 -1
- package/dist/dialect/standardsql/standardsql.js +2 -6
- package/dist/dialect/trino/trino.d.ts +1 -1
- package/dist/dialect/trino/trino.js +2 -6
- package/dist/index.d.ts +1 -1
- package/dist/index.js +2 -3
- package/dist/lang/ast/expressions/expr-aggregate-function.js +12 -2
- package/dist/lang/ast/expressions/expr-count.js +3 -1
- package/dist/lang/ast/expressions/expr-func.js +34 -10
- package/dist/lang/ast/expressions/expr-props.js +1 -1
- package/dist/lang/ast/expressions/expr-ungroup.js +7 -3
- package/dist/lang/ast/expressions/function-ordering.d.ts +19 -5
- package/dist/lang/ast/expressions/function-ordering.js +61 -9
- package/dist/lang/ast/field-space/include-utils.js +1 -1
- package/dist/lang/ast/field-space/index-field-space.js +3 -1
- package/dist/lang/ast/field-space/query-spaces.js +20 -11
- package/dist/lang/ast/query-builders/index-builder.js +1 -1
- package/dist/lang/ast/query-builders/reduce-builder.js +1 -1
- package/dist/lang/ast/query-elements/query-arrow.js +14 -4
- package/dist/lang/ast/query-elements/query-base.d.ts +1 -0
- package/dist/lang/ast/query-elements/query-base.js +14 -4
- package/dist/lang/ast/query-elements/query-refine.js +2 -0
- package/dist/lang/ast/query-properties/drill.js +1 -1
- package/dist/lang/ast/source-properties/join.js +6 -2
- package/dist/lang/ast/statements/define-source.js +1 -1
- package/dist/lang/ast/types/expr-value.js +1 -1
- package/dist/lang/ast/view-elements/reference-view.js +4 -1
- package/dist/lang/ast/view-elements/refine-utils.js +1 -1
- package/dist/{model/composite_source_utils.d.ts → lang/composite-source-utils.d.ts} +4 -17
- package/dist/{model/composite_source_utils.js → lang/composite-source-utils.js} +274 -44
- package/dist/lang/test/parse-expects.d.ts +1 -1
- package/dist/lang/test/parse-expects.js +6 -2
- package/dist/lang/test/test-translator.js +1 -1
- package/dist/malloy.js +1 -1
- package/dist/model/expression_compiler.d.ts +27 -0
- package/dist/model/expression_compiler.js +780 -0
- package/dist/model/field_instance.d.ts +108 -0
- package/dist/model/field_instance.js +520 -0
- package/dist/model/index.d.ts +5 -1
- package/dist/model/index.js +25 -4
- package/dist/model/join_instance.d.ts +18 -0
- package/dist/model/join_instance.js +71 -0
- package/dist/model/malloy_types.d.ts +48 -2
- package/dist/model/malloy_types.js +39 -1
- package/dist/model/query_model.d.ts +2 -0
- package/dist/model/query_model.js +7 -0
- package/dist/model/query_model_contract.d.ts +32 -0
- package/dist/model/query_model_contract.js +7 -0
- package/dist/model/query_model_impl.d.ts +30 -0
- package/dist/model/query_model_impl.js +266 -0
- package/dist/model/query_node.d.ts +132 -0
- package/dist/model/query_node.js +638 -0
- package/dist/model/query_query.d.ts +86 -0
- package/dist/model/query_query.js +1724 -0
- package/dist/model/sql_block.js +2 -2
- package/dist/model/stage_writer.d.ts +25 -0
- package/dist/model/stage_writer.js +120 -0
- package/dist/model/utils.d.ts +18 -1
- package/dist/model/utils.js +66 -1
- package/dist/to_stable.js +3 -4
- package/dist/version.d.ts +1 -1
- package/dist/version.js +1 -1
- package/package.json +4 -4
- package/dist/model/malloy_query.d.ts +0 -391
- package/dist/model/malloy_query.js +0 -3926
|
@@ -0,0 +1,1724 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
/*
|
|
3
|
+
* Copyright Contributors to the Malloy project
|
|
4
|
+
* SPDX-License-Identifier: MIT
|
|
5
|
+
*/
|
|
6
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
7
|
+
exports.QueryQuery = void 0;
|
|
8
|
+
const expression_compiler_1 = require("./expression_compiler");
|
|
9
|
+
const malloy_types_1 = require("./malloy_types");
|
|
10
|
+
const utils_1 = require("./utils");
|
|
11
|
+
const query_node_1 = require("./query_node");
|
|
12
|
+
const stage_writer_1 = require("./stage_writer");
|
|
13
|
+
const field_instance_1 = require("./field_instance");
|
|
14
|
+
const utils_2 = require("./materialization/utils");
|
|
15
|
+
function pathToCol(path) {
|
|
16
|
+
return path.map(el => encodeURIComponent(el)).join('/');
|
|
17
|
+
}
|
|
18
|
+
function pushDialectField(dl, f) {
|
|
19
|
+
const { sqlExpression, sqlOutputName, rawName } = f;
|
|
20
|
+
if ((0, malloy_types_1.isAtomic)(f.fieldDef)) {
|
|
21
|
+
dl.push({ typeDef: f.fieldDef, sqlExpression, sqlOutputName, rawName });
|
|
22
|
+
}
|
|
23
|
+
}
|
|
24
|
+
/** Query builder object. */
|
|
25
|
+
class QueryQuery extends query_node_1.QueryField {
|
|
26
|
+
constructor(fieldDef, parent, stageWriter, isJoinedSubquery, lookupStruct) {
|
|
27
|
+
super(fieldDef, parent);
|
|
28
|
+
this.prepared = false;
|
|
29
|
+
this.maxDepth = 0;
|
|
30
|
+
this.maxGroupSet = 0;
|
|
31
|
+
this.fieldDef = fieldDef;
|
|
32
|
+
this.rootResult = new field_instance_1.FieldInstanceResultRoot(fieldDef);
|
|
33
|
+
this.stageWriter = stageWriter;
|
|
34
|
+
// do some magic here to get the first segment.
|
|
35
|
+
this.firstSegment = fieldDef.pipeline[0];
|
|
36
|
+
this.isJoinedSubquery = isJoinedSubquery;
|
|
37
|
+
this.structRefToQueryStruct = lookupStruct;
|
|
38
|
+
}
|
|
39
|
+
static makeQuery(fieldDef, parentStruct, stageWriter = undefined, isJoinedSubquery, lookupStruct) {
|
|
40
|
+
let parent = parentStruct;
|
|
41
|
+
let turtleWithFilters = parentStruct.applyStructFiltersToTurtleDef(fieldDef);
|
|
42
|
+
const firstStage = turtleWithFilters.pipeline[0];
|
|
43
|
+
const sourceDef = parentStruct.structDef;
|
|
44
|
+
// if we are generating code
|
|
45
|
+
// and have extended declaration, we need to make a new QueryStruct
|
|
46
|
+
// copy the definitions into a new structdef
|
|
47
|
+
// edit the declations from the pipeline
|
|
48
|
+
if (stageWriter !== undefined &&
|
|
49
|
+
(0, malloy_types_1.isQuerySegment)(firstStage) &&
|
|
50
|
+
firstStage.extendSource !== undefined) {
|
|
51
|
+
parent = new query_node_1.QueryStruct({
|
|
52
|
+
...sourceDef,
|
|
53
|
+
fields: [...sourceDef.fields, ...firstStage.extendSource],
|
|
54
|
+
}, parentStruct.sourceArguments, parent.parent ? { struct: parent } : { model: parent.model }, parent.prepareResultOptions);
|
|
55
|
+
turtleWithFilters = {
|
|
56
|
+
...turtleWithFilters,
|
|
57
|
+
pipeline: [
|
|
58
|
+
{
|
|
59
|
+
...firstStage,
|
|
60
|
+
extendSource: undefined,
|
|
61
|
+
},
|
|
62
|
+
...turtleWithFilters.pipeline.slice(1),
|
|
63
|
+
],
|
|
64
|
+
};
|
|
65
|
+
}
|
|
66
|
+
if ((0, malloy_types_1.isSourceDef)(sourceDef) &&
|
|
67
|
+
sourceDef.queryTimezone &&
|
|
68
|
+
(0, malloy_types_1.isQuerySegment)(firstStage) &&
|
|
69
|
+
firstStage.queryTimezone === undefined) {
|
|
70
|
+
firstStage.queryTimezone = sourceDef.queryTimezone;
|
|
71
|
+
}
|
|
72
|
+
switch (firstStage.type) {
|
|
73
|
+
case 'reduce':
|
|
74
|
+
return new QueryQueryReduce(turtleWithFilters, parent, stageWriter, isJoinedSubquery, lookupStruct);
|
|
75
|
+
case 'project':
|
|
76
|
+
return new QueryQueryProject(turtleWithFilters, parent, stageWriter, isJoinedSubquery, lookupStruct);
|
|
77
|
+
case 'index':
|
|
78
|
+
return new QueryQueryIndex(turtleWithFilters, parent, stageWriter, isJoinedSubquery, lookupStruct);
|
|
79
|
+
case 'raw':
|
|
80
|
+
return new QueryQueryRaw(turtleWithFilters, parent, stageWriter, isJoinedSubquery, lookupStruct);
|
|
81
|
+
case 'partial':
|
|
82
|
+
throw new Error('Attempt to make query out of partial stage');
|
|
83
|
+
}
|
|
84
|
+
}
|
|
85
|
+
inNestedPipeline() {
|
|
86
|
+
return this.parent.structDef.type === 'nest_source';
|
|
87
|
+
}
|
|
88
|
+
// get a field ref and expand it.
|
|
89
|
+
expandField(f) {
|
|
90
|
+
const field = f.type === 'fieldref'
|
|
91
|
+
? this.parent.getQueryFieldReference(f)
|
|
92
|
+
: this.parent.makeQueryField(f);
|
|
93
|
+
const as = field.getIdentifier();
|
|
94
|
+
return { as, field };
|
|
95
|
+
}
|
|
96
|
+
addDependantPath(resultStruct, context, path, uniqueKeyRequirement) {
|
|
97
|
+
if (path.length === 0) {
|
|
98
|
+
return;
|
|
99
|
+
}
|
|
100
|
+
// Loop through path segments, ensuring each join exists
|
|
101
|
+
let currentContext = context;
|
|
102
|
+
for (const segment of path.slice(0, -1)) {
|
|
103
|
+
// Try to get the field at this path segment
|
|
104
|
+
let segmentField;
|
|
105
|
+
try {
|
|
106
|
+
segmentField = currentContext.getFieldByName([segment]);
|
|
107
|
+
}
|
|
108
|
+
catch {
|
|
109
|
+
// Field doesn't exist, need to add the join
|
|
110
|
+
// This is where we'd need to figure out how to create the missing join
|
|
111
|
+
// Maybe we need more context about what join we're trying to add?
|
|
112
|
+
throw new Error(`Cannot find join '${segment}' in ${path.join('.')} to add to query`);
|
|
113
|
+
}
|
|
114
|
+
if (segmentField instanceof query_node_1.QueryFieldStruct) {
|
|
115
|
+
if ((0, malloy_types_1.isJoinedSource)(segmentField.fieldDef)) {
|
|
116
|
+
resultStruct
|
|
117
|
+
.root()
|
|
118
|
+
.addStructToJoin(segmentField.queryStruct, undefined);
|
|
119
|
+
currentContext = segmentField.queryStruct;
|
|
120
|
+
}
|
|
121
|
+
else {
|
|
122
|
+
// Can't navigate deeper into non-joined sources like records
|
|
123
|
+
break;
|
|
124
|
+
}
|
|
125
|
+
}
|
|
126
|
+
}
|
|
127
|
+
// Now handle the full path for the final dependency
|
|
128
|
+
const node = context.getFieldByName(path);
|
|
129
|
+
const joinableParent = node instanceof query_node_1.QueryFieldStruct
|
|
130
|
+
? node.queryStruct.getJoinableParent()
|
|
131
|
+
: node.parent.getJoinableParent();
|
|
132
|
+
resultStruct.root().addStructToJoin(joinableParent, uniqueKeyRequirement);
|
|
133
|
+
}
|
|
134
|
+
dependenciesFromFieldUsage(resultStruct) {
|
|
135
|
+
// Only QuerySegment and IndexSegment have fieldUsage, RawSegment does not
|
|
136
|
+
if (this.firstSegment.type === 'raw' ||
|
|
137
|
+
this.firstSegment.type === 'partial') {
|
|
138
|
+
throw new Error('QueryQuery attempt to load a raw or partial segment');
|
|
139
|
+
}
|
|
140
|
+
for (const joinUsage of this.firstSegment.activeJoins || []) {
|
|
141
|
+
this.addDependantPath(resultStruct, this.parent, joinUsage.path, undefined);
|
|
142
|
+
}
|
|
143
|
+
for (const usage of this.firstSegment.expandedFieldUsage || []) {
|
|
144
|
+
if (usage.analyticFunctionUse) {
|
|
145
|
+
resultStruct.root().queryUsesPartitioning = true;
|
|
146
|
+
// BigQuery-specific handling
|
|
147
|
+
if (this.parent.dialect.cantPartitionWindowFunctionsOnExpressions &&
|
|
148
|
+
resultStruct.firstSegment.type === 'reduce') {
|
|
149
|
+
// force the use of a lateral_join_bag
|
|
150
|
+
resultStruct.root().isComplexQuery = true;
|
|
151
|
+
resultStruct.root().queryUsesPartitioning = true;
|
|
152
|
+
}
|
|
153
|
+
continue;
|
|
154
|
+
}
|
|
155
|
+
if (usage.uniqueKeyRequirement) {
|
|
156
|
+
if (usage.path.length === 0) {
|
|
157
|
+
resultStruct.addStructToJoin(this.parent, usage.uniqueKeyRequirement);
|
|
158
|
+
}
|
|
159
|
+
else {
|
|
160
|
+
this.findRecordAliases(this.parent, usage.path);
|
|
161
|
+
this.addDependantPath(resultStruct, this.parent, usage.path, usage.uniqueKeyRequirement);
|
|
162
|
+
}
|
|
163
|
+
continue;
|
|
164
|
+
}
|
|
165
|
+
if (usage.path.length > 1) {
|
|
166
|
+
this.findRecordAliases(this.parent, usage.path);
|
|
167
|
+
}
|
|
168
|
+
}
|
|
169
|
+
const expandedUngroupings = 'expandedUngroupings' in this.firstSegment
|
|
170
|
+
? this.firstSegment.expandedUngroupings || []
|
|
171
|
+
: [];
|
|
172
|
+
for (const ungrouping of expandedUngroupings) {
|
|
173
|
+
resultStruct.root().isComplexQuery = true;
|
|
174
|
+
resultStruct.root().queryUsesPartitioning = true;
|
|
175
|
+
// Navigate to correct result struct using ungrouping's path
|
|
176
|
+
let destResult = resultStruct;
|
|
177
|
+
for (const pathSegment of ungrouping.path) {
|
|
178
|
+
const nextStruct = destResult.allFields.get(pathSegment);
|
|
179
|
+
if (!(nextStruct instanceof field_instance_1.FieldInstanceResult)) {
|
|
180
|
+
throw new Error(`Ungroup path ${ungrouping.path.join('.')} segment '${pathSegment}' is not a nested query`);
|
|
181
|
+
}
|
|
182
|
+
destResult = nextStruct;
|
|
183
|
+
}
|
|
184
|
+
destResult.resultUsesUngrouped = true;
|
|
185
|
+
if (ungrouping.refFields && ungrouping.refFields.length > 0) {
|
|
186
|
+
const refType = ungrouping.exclude ? 'exclude' : 'all';
|
|
187
|
+
const key = (0, utils_1.groupingKey)(refType, ungrouping.refFields);
|
|
188
|
+
if (destResult.ungroupedSets.get(key) === undefined) {
|
|
189
|
+
destResult.ungroupedSets.set(key, {
|
|
190
|
+
type: refType,
|
|
191
|
+
fields: ungrouping.refFields,
|
|
192
|
+
groupSet: -1,
|
|
193
|
+
});
|
|
194
|
+
}
|
|
195
|
+
}
|
|
196
|
+
destResult.resultUsesUngrouped = true;
|
|
197
|
+
if (ungrouping.refFields && ungrouping.refFields.length > 0) {
|
|
198
|
+
const refType = ungrouping.exclude ? 'exclude' : 'all';
|
|
199
|
+
const key = (0, utils_1.groupingKey)(refType, ungrouping.refFields);
|
|
200
|
+
if (destResult.ungroupedSets.get(key) === undefined) {
|
|
201
|
+
destResult.ungroupedSets.set(key, {
|
|
202
|
+
type: refType,
|
|
203
|
+
fields: ungrouping.refFields,
|
|
204
|
+
groupSet: -1,
|
|
205
|
+
});
|
|
206
|
+
}
|
|
207
|
+
}
|
|
208
|
+
}
|
|
209
|
+
}
|
|
210
|
+
/*
|
|
211
|
+
** Later on, when a record is referenced, the context needed to translate the
|
|
212
|
+
** reference won't exist, so we translate them all in prepare. The better fix
|
|
213
|
+
** involves understanding more about what a "translation state" is and how
|
|
214
|
+
** to create it at the moment when a field is referenced, but I couldn't do
|
|
215
|
+
** that at the time I did this work. TODO come back and do that.
|
|
216
|
+
*/
|
|
217
|
+
findRecordAliases(context, path) {
|
|
218
|
+
for (const seg of path) {
|
|
219
|
+
const field = context.getChildByName(seg);
|
|
220
|
+
if (!field) {
|
|
221
|
+
throw new Error('findRecordAliases: field not found: ' + path.join('.'));
|
|
222
|
+
}
|
|
223
|
+
if (field instanceof query_node_1.QueryFieldStruct) {
|
|
224
|
+
const qs = field.queryStruct;
|
|
225
|
+
if (qs.structDef.type === 'record' &&
|
|
226
|
+
(0, malloy_types_1.hasExpression)(qs.structDef) &&
|
|
227
|
+
qs.parent) {
|
|
228
|
+
qs.informOfAliasValue((0, expression_compiler_1.exprToSQL)(this, this.rootResult, qs.parent, qs.structDef.e));
|
|
229
|
+
}
|
|
230
|
+
context = qs;
|
|
231
|
+
}
|
|
232
|
+
}
|
|
233
|
+
}
|
|
234
|
+
getSegmentFields(resultStruct) {
|
|
235
|
+
const fs = resultStruct.firstSegment;
|
|
236
|
+
return fs.type === 'index'
|
|
237
|
+
? fs.indexFields
|
|
238
|
+
: (0, malloy_types_1.isQuerySegment)(fs)
|
|
239
|
+
? fs.queryFields
|
|
240
|
+
: [];
|
|
241
|
+
}
|
|
242
|
+
getDrillExpression(f) {
|
|
243
|
+
if ((0, malloy_types_1.isAtomic)(f) || f.type === 'fieldref')
|
|
244
|
+
return f.drillExpression;
|
|
245
|
+
return undefined;
|
|
246
|
+
}
|
|
247
|
+
expandFields(resultStruct) {
|
|
248
|
+
let resultIndex = 1;
|
|
249
|
+
for (const f of this.getSegmentFields(resultStruct)) {
|
|
250
|
+
const { as, field } = this.expandField(f);
|
|
251
|
+
const drillExpression = this.getDrillExpression(f);
|
|
252
|
+
if (field instanceof QueryQuery) {
|
|
253
|
+
if (this.firstSegment.type === 'project') {
|
|
254
|
+
throw new Error(`Nested views cannot be used in select - '${field.fieldDef.name}'`);
|
|
255
|
+
}
|
|
256
|
+
const fir = new field_instance_1.FieldInstanceResult(field.fieldDef, resultStruct);
|
|
257
|
+
this.expandFields(fir);
|
|
258
|
+
resultStruct.add(as, fir);
|
|
259
|
+
}
|
|
260
|
+
else if (field instanceof query_node_1.QueryAtomicField) {
|
|
261
|
+
resultStruct.addField(as, field, {
|
|
262
|
+
resultIndex,
|
|
263
|
+
type: 'result',
|
|
264
|
+
}, drillExpression);
|
|
265
|
+
if ((0, malloy_types_1.hasExpression)(field.fieldDef) &&
|
|
266
|
+
(0, malloy_types_1.expressionIsAnalytic)(field.fieldDef.expressionType) &&
|
|
267
|
+
this.parent.dialect.cantPartitionWindowFunctionsOnExpressions &&
|
|
268
|
+
resultStruct.firstSegment.type === 'reduce') {
|
|
269
|
+
resultStruct.root().isComplexQuery = true;
|
|
270
|
+
resultStruct.root().queryUsesPartitioning = true;
|
|
271
|
+
}
|
|
272
|
+
if ((0, query_node_1.isBasicAggregate)(field)) {
|
|
273
|
+
if (this.firstSegment.type === 'project') {
|
|
274
|
+
throw new Error(`Aggregate Fields cannot be used in select - '${field.fieldDef.name}'`);
|
|
275
|
+
}
|
|
276
|
+
}
|
|
277
|
+
}
|
|
278
|
+
else if (field instanceof query_node_1.QueryFieldStruct) {
|
|
279
|
+
resultStruct.addField(as, field, {
|
|
280
|
+
resultIndex,
|
|
281
|
+
type: 'result',
|
|
282
|
+
}, drillExpression);
|
|
283
|
+
}
|
|
284
|
+
resultIndex++;
|
|
285
|
+
}
|
|
286
|
+
}
|
|
287
|
+
generateSQLFilters(resultStruct, which
|
|
288
|
+
// filterList: FilterCondition[] | undefined = undefined
|
|
289
|
+
) {
|
|
290
|
+
const resultFilters = new utils_1.AndChain();
|
|
291
|
+
const list = resultStruct.firstSegment.filterList;
|
|
292
|
+
if (list === undefined) {
|
|
293
|
+
return resultFilters;
|
|
294
|
+
}
|
|
295
|
+
// Go through the filters and make or find dependant fields
|
|
296
|
+
// add them to the field index. Place the individual filters
|
|
297
|
+
// in the correct catgory.
|
|
298
|
+
for (const cond of list || []) {
|
|
299
|
+
const context = this.parent;
|
|
300
|
+
if ((which === 'having' && (0, malloy_types_1.expressionIsCalculation)(cond.expressionType)) ||
|
|
301
|
+
(which === 'where' && (0, malloy_types_1.expressionIsScalar)(cond.expressionType))) {
|
|
302
|
+
const sqlClause = (0, expression_compiler_1.exprToSQL)(this, resultStruct, context, cond.e, undefined);
|
|
303
|
+
resultFilters.add(sqlClause);
|
|
304
|
+
}
|
|
305
|
+
}
|
|
306
|
+
return resultFilters;
|
|
307
|
+
}
|
|
308
|
+
prepare(_stageWriter) {
|
|
309
|
+
if (!this.prepared) {
|
|
310
|
+
// Add the root base join to the joins map
|
|
311
|
+
this.rootResult.addStructToJoin(this.parent, undefined);
|
|
312
|
+
// Expand fields (just adds them to result, no dependency tracking)
|
|
313
|
+
this.expandFields(this.rootResult);
|
|
314
|
+
// Process all dependencies from translator's fieldUsage
|
|
315
|
+
this.dependenciesFromFieldUsage(this.rootResult);
|
|
316
|
+
// Handle always joins
|
|
317
|
+
this.addAlwaysJoins(this.rootResult);
|
|
318
|
+
// Calculate symmetric aggregates based on the joins
|
|
319
|
+
this.rootResult.calculateSymmetricAggregates();
|
|
320
|
+
this.prepared = true;
|
|
321
|
+
}
|
|
322
|
+
}
|
|
323
|
+
findJoins(resultStruct) {
|
|
324
|
+
for (const dim of resultStruct.fields()) {
|
|
325
|
+
if (!(dim.f instanceof query_node_1.QueryFieldStruct)) {
|
|
326
|
+
resultStruct.addStructToJoin(dim.f.getJoinableParent(), undefined);
|
|
327
|
+
}
|
|
328
|
+
}
|
|
329
|
+
for (const s of resultStruct.structs()) {
|
|
330
|
+
this.findJoins(s);
|
|
331
|
+
}
|
|
332
|
+
}
|
|
333
|
+
addAlwaysJoins(rootResult) {
|
|
334
|
+
var _a;
|
|
335
|
+
const stage = this.fieldDef.pipeline[0];
|
|
336
|
+
if (stage.type !== 'raw') {
|
|
337
|
+
const alwaysJoins = (_a = stage.alwaysJoins) !== null && _a !== void 0 ? _a : [];
|
|
338
|
+
for (const joinName of alwaysJoins) {
|
|
339
|
+
const qs = this.parent.getChildByName(joinName);
|
|
340
|
+
if (qs instanceof query_node_1.QueryFieldStruct) {
|
|
341
|
+
rootResult.addStructToJoin(qs.queryStruct, undefined);
|
|
342
|
+
}
|
|
343
|
+
}
|
|
344
|
+
}
|
|
345
|
+
}
|
|
346
|
+
// get the source fieldname and filters associated with the field (so we can drill later)
|
|
347
|
+
getResultMetadata(fi) {
|
|
348
|
+
if (fi instanceof field_instance_1.FieldInstanceField) {
|
|
349
|
+
if (fi.fieldUsage.type === 'result') {
|
|
350
|
+
// const fieldDef = fi.f.fieldDef as AtomicField;
|
|
351
|
+
const fieldDef = fi.f.fieldDef;
|
|
352
|
+
let filterList;
|
|
353
|
+
const sourceField = fi.f.parent.getFullOutputName() +
|
|
354
|
+
(fieldDef.name || fieldDef.as || 'undefined');
|
|
355
|
+
const sourceExpression = (0, malloy_types_1.hasExpression)(fieldDef)
|
|
356
|
+
? fieldDef.code
|
|
357
|
+
: undefined;
|
|
358
|
+
const sourceClasses = [sourceField];
|
|
359
|
+
const referenceId = fi.f.referenceId;
|
|
360
|
+
const drillExpression = fi.drillExpression;
|
|
361
|
+
const base = {
|
|
362
|
+
sourceField,
|
|
363
|
+
sourceExpression,
|
|
364
|
+
sourceClasses,
|
|
365
|
+
referenceId,
|
|
366
|
+
drillExpression,
|
|
367
|
+
};
|
|
368
|
+
if ((0, query_node_1.isBasicCalculation)(fi.f)) {
|
|
369
|
+
filterList = fi.f.getFilterList();
|
|
370
|
+
return {
|
|
371
|
+
...base,
|
|
372
|
+
filterList,
|
|
373
|
+
fieldKind: 'measure',
|
|
374
|
+
};
|
|
375
|
+
}
|
|
376
|
+
if ((0, query_node_1.isBasicScalar)(fi.f)) {
|
|
377
|
+
return {
|
|
378
|
+
...base,
|
|
379
|
+
filterList,
|
|
380
|
+
fieldKind: 'dimension',
|
|
381
|
+
};
|
|
382
|
+
}
|
|
383
|
+
else {
|
|
384
|
+
return undefined;
|
|
385
|
+
}
|
|
386
|
+
}
|
|
387
|
+
return undefined;
|
|
388
|
+
}
|
|
389
|
+
else if (fi instanceof field_instance_1.FieldInstanceResult) {
|
|
390
|
+
const sourceField = fi.turtleDef.name || fi.turtleDef.as;
|
|
391
|
+
const sourceClasses = sourceField ? [sourceField] : [];
|
|
392
|
+
const filterList = fi.firstSegment.filterList;
|
|
393
|
+
const lastSegment = fi.turtleDef.pipeline[fi.turtleDef.pipeline.length - 1];
|
|
394
|
+
const limit = (0, malloy_types_1.isRawSegment)(lastSegment) ? undefined : lastSegment.limit;
|
|
395
|
+
let orderBy = undefined;
|
|
396
|
+
const drillable = (0, malloy_types_1.isQuerySegment)(lastSegment) && fi.turtleDef.pipeline.length === 1;
|
|
397
|
+
if ((0, malloy_types_1.isQuerySegment)(lastSegment)) {
|
|
398
|
+
orderBy = lastSegment.orderBy;
|
|
399
|
+
}
|
|
400
|
+
if (sourceField) {
|
|
401
|
+
return {
|
|
402
|
+
sourceField,
|
|
403
|
+
filterList,
|
|
404
|
+
sourceClasses,
|
|
405
|
+
fieldKind: 'struct',
|
|
406
|
+
limit,
|
|
407
|
+
orderBy,
|
|
408
|
+
drillable,
|
|
409
|
+
};
|
|
410
|
+
}
|
|
411
|
+
}
|
|
412
|
+
return undefined;
|
|
413
|
+
}
|
|
414
|
+
/** returns a fields and primary key of a struct for this query */
|
|
415
|
+
getResultStructDef(resultStruct = this.rootResult, isRoot = true) {
|
|
416
|
+
const fields = [];
|
|
417
|
+
let primaryKey;
|
|
418
|
+
this.prepare(undefined);
|
|
419
|
+
let dimCount = 0;
|
|
420
|
+
for (const [name, fi] of resultStruct.allFields) {
|
|
421
|
+
const resultMetadata = this.getResultMetadata(fi);
|
|
422
|
+
if (fi instanceof field_instance_1.FieldInstanceResult) {
|
|
423
|
+
const { structDef, repeatedResultType } = this.generateTurtlePipelineSQL(fi, new stage_writer_1.StageWriter(true, undefined), '<nosource>');
|
|
424
|
+
if (repeatedResultType === 'nested') {
|
|
425
|
+
const multiLineNest = {
|
|
426
|
+
...structDef,
|
|
427
|
+
type: 'array',
|
|
428
|
+
elementTypeDef: { type: 'record_element' },
|
|
429
|
+
join: 'many',
|
|
430
|
+
name,
|
|
431
|
+
resultMetadata,
|
|
432
|
+
};
|
|
433
|
+
fields.push(multiLineNest);
|
|
434
|
+
}
|
|
435
|
+
else {
|
|
436
|
+
const oneLineNest = {
|
|
437
|
+
...structDef,
|
|
438
|
+
type: 'record',
|
|
439
|
+
join: 'one',
|
|
440
|
+
name,
|
|
441
|
+
resultMetadata,
|
|
442
|
+
};
|
|
443
|
+
fields.push(oneLineNest);
|
|
444
|
+
}
|
|
445
|
+
}
|
|
446
|
+
else if (fi instanceof field_instance_1.FieldInstanceField) {
|
|
447
|
+
if (fi.fieldUsage.type === 'result') {
|
|
448
|
+
// if there is only one dimension, it is the primaryKey
|
|
449
|
+
// if there are more, primaryKey is undefined.
|
|
450
|
+
if ((0, query_node_1.isBasicScalar)(fi.f)) {
|
|
451
|
+
if (dimCount === 0 && isRoot) {
|
|
452
|
+
primaryKey = name;
|
|
453
|
+
}
|
|
454
|
+
else {
|
|
455
|
+
primaryKey = undefined;
|
|
456
|
+
}
|
|
457
|
+
dimCount++;
|
|
458
|
+
}
|
|
459
|
+
// Remove computations because they are all resolved
|
|
460
|
+
let fOut = fi.f.fieldDef;
|
|
461
|
+
if ((0, malloy_types_1.hasExpression)(fOut)) {
|
|
462
|
+
fOut = { ...fOut };
|
|
463
|
+
// "as" because delete needs the property to be optional
|
|
464
|
+
delete fOut.e;
|
|
465
|
+
delete fOut.code;
|
|
466
|
+
delete fOut.expressionType;
|
|
467
|
+
}
|
|
468
|
+
const location = fOut.location;
|
|
469
|
+
const annotation = fOut.annotation;
|
|
470
|
+
const common = {
|
|
471
|
+
resultMetadata,
|
|
472
|
+
location,
|
|
473
|
+
annotation,
|
|
474
|
+
};
|
|
475
|
+
// build out the result fields...
|
|
476
|
+
switch (fOut.type) {
|
|
477
|
+
case 'boolean':
|
|
478
|
+
case 'json':
|
|
479
|
+
case 'string':
|
|
480
|
+
fields.push({
|
|
481
|
+
name,
|
|
482
|
+
type: fOut.type,
|
|
483
|
+
...common,
|
|
484
|
+
});
|
|
485
|
+
break;
|
|
486
|
+
case 'date':
|
|
487
|
+
case 'timestamp': {
|
|
488
|
+
const timeframe = fOut.timeframe;
|
|
489
|
+
const fd = { type: fOut.type };
|
|
490
|
+
if (timeframe) {
|
|
491
|
+
fd.timeframe = timeframe;
|
|
492
|
+
}
|
|
493
|
+
fields.push({
|
|
494
|
+
name,
|
|
495
|
+
...fd,
|
|
496
|
+
...common,
|
|
497
|
+
});
|
|
498
|
+
break;
|
|
499
|
+
}
|
|
500
|
+
case 'number':
|
|
501
|
+
fields.push({
|
|
502
|
+
name,
|
|
503
|
+
numberType: fOut.numberType,
|
|
504
|
+
type: 'number',
|
|
505
|
+
...common,
|
|
506
|
+
});
|
|
507
|
+
break;
|
|
508
|
+
case 'sql native':
|
|
509
|
+
case 'record':
|
|
510
|
+
case 'array': {
|
|
511
|
+
fields.push({ ...fOut, ...common });
|
|
512
|
+
break;
|
|
513
|
+
}
|
|
514
|
+
default:
|
|
515
|
+
throw new Error(`unknown Field Type in query ${JSON.stringify(fOut)}`);
|
|
516
|
+
}
|
|
517
|
+
}
|
|
518
|
+
}
|
|
519
|
+
}
|
|
520
|
+
const outputStruct = {
|
|
521
|
+
type: 'query_result',
|
|
522
|
+
name: this.resultStage || 'result',
|
|
523
|
+
fields,
|
|
524
|
+
dialect: this.parent.dialect.name,
|
|
525
|
+
primaryKey,
|
|
526
|
+
connection: this.parent.connectionName,
|
|
527
|
+
resultMetadata: this.getResultMetadata(this.rootResult),
|
|
528
|
+
queryTimezone: resultStruct.getQueryInfo().queryTimezone,
|
|
529
|
+
};
|
|
530
|
+
if (this.parent.structDef.modelAnnotation) {
|
|
531
|
+
outputStruct.modelAnnotation = this.parent.structDef.modelAnnotation;
|
|
532
|
+
}
|
|
533
|
+
return outputStruct;
|
|
534
|
+
}
|
|
535
|
+
getStructSourceSQL(qs, stageWriter) {
|
|
536
|
+
var _a, _b, _c;
|
|
537
|
+
switch (qs.structDef.type) {
|
|
538
|
+
case 'table':
|
|
539
|
+
return this.parent.dialect.quoteTablePath(qs.structDef.tablePath);
|
|
540
|
+
case 'composite':
|
|
541
|
+
// TODO: throw an error here; not simple because we call into this
|
|
542
|
+
// code currently before the composite source is resolved in some cases
|
|
543
|
+
return '{COMPOSITE SOURCE}';
|
|
544
|
+
case 'finalize':
|
|
545
|
+
return qs.structDef.name;
|
|
546
|
+
case 'sql_select':
|
|
547
|
+
return `(${qs.structDef.selectStr})`;
|
|
548
|
+
case 'nest_source':
|
|
549
|
+
return qs.structDef.pipeSQL;
|
|
550
|
+
case 'query_source': {
|
|
551
|
+
// cache derived table.
|
|
552
|
+
if (((_a = qs.prepareResultOptions) === null || _a === void 0 ? void 0 : _a.replaceMaterializedReferences) &&
|
|
553
|
+
(0, utils_2.shouldMaterialize)(qs.structDef.query.annotation)) {
|
|
554
|
+
return stageWriter.addMaterializedQuery((0, malloy_types_1.getIdentifier)(qs.structDef), qs.structDef.query, (_b = qs.prepareResultOptions) === null || _b === void 0 ? void 0 : _b.materializedTablePrefix);
|
|
555
|
+
}
|
|
556
|
+
else {
|
|
557
|
+
// Inline what loadQuery does, circularity workaround, finds the
|
|
558
|
+
// the name of the last stage
|
|
559
|
+
const query = qs.structDef.query;
|
|
560
|
+
const turtleDef = {
|
|
561
|
+
type: 'turtle',
|
|
562
|
+
name: 'ignoreme',
|
|
563
|
+
pipeline: query.pipeline,
|
|
564
|
+
filterList: query.filterList,
|
|
565
|
+
};
|
|
566
|
+
const structRef = (_c = query.compositeResolvedSourceDef) !== null && _c !== void 0 ? _c : query.structRef;
|
|
567
|
+
let sourceStruct;
|
|
568
|
+
if (typeof structRef === 'string') {
|
|
569
|
+
const struct = this.structRefToQueryStruct(structRef);
|
|
570
|
+
if (!struct) {
|
|
571
|
+
throw new Error(`Unexpected reference to an undefined source '${structRef}'`);
|
|
572
|
+
}
|
|
573
|
+
sourceStruct = struct;
|
|
574
|
+
}
|
|
575
|
+
else {
|
|
576
|
+
sourceStruct = new query_node_1.QueryStruct(structRef, query.sourceArguments, { model: this.parent.getModel() }, qs.prepareResultOptions);
|
|
577
|
+
}
|
|
578
|
+
const q = QueryQuery.makeQuery(turtleDef, sourceStruct, stageWriter, qs.parent !== undefined, // isJoinedSubquery
|
|
579
|
+
this.structRefToQueryStruct);
|
|
580
|
+
const ret = q.generateSQLFromPipeline(stageWriter);
|
|
581
|
+
return ret.lastStageName;
|
|
582
|
+
}
|
|
583
|
+
}
|
|
584
|
+
default:
|
|
585
|
+
throw new Error(`Cannot create SQL StageWriter from '${(0, malloy_types_1.getIdentifier)(qs.structDef)}' type '${qs.structDef.type}`);
|
|
586
|
+
}
|
|
587
|
+
}
|
|
588
|
+
generateSQLJoinBlock(stageWriter, ji, depth) {
|
|
589
|
+
var _a;
|
|
590
|
+
let s = '';
|
|
591
|
+
const qs = ji.queryStruct;
|
|
592
|
+
const qsDef = qs.structDef;
|
|
593
|
+
(_a = qs.eventStream) === null || _a === void 0 ? void 0 : _a.emit('join-used', { name: (0, malloy_types_1.getIdentifier)(qsDef) });
|
|
594
|
+
qs.maybeEmitParameterizedSourceUsage();
|
|
595
|
+
if ((0, malloy_types_1.isJoinedSource)(qsDef)) {
|
|
596
|
+
let structSQL = this.getStructSourceSQL(qs, stageWriter);
|
|
597
|
+
const matrixOperation = (qsDef.matrixOperation || 'left').toUpperCase();
|
|
598
|
+
if (!this.parent.dialect.supportsFullJoin && matrixOperation === 'FULL') {
|
|
599
|
+
throw new Error('FULL JOIN not supported');
|
|
600
|
+
}
|
|
601
|
+
if (ji.makeUniqueKey) {
|
|
602
|
+
const passKeys = this.generateSQLPassthroughKeys(qs);
|
|
603
|
+
structSQL = `(SELECT ${qs.dialect.sqlGenerateUUID()} as ${qs.dialect.sqlMaybeQuoteIdentifier('__distinct_key')}, x.* ${passKeys} FROM ${structSQL} as x)`;
|
|
604
|
+
}
|
|
605
|
+
let onCondition = '';
|
|
606
|
+
if (qs.parent === undefined) {
|
|
607
|
+
throw new Error('Expected joined struct to have a parent.');
|
|
608
|
+
}
|
|
609
|
+
if (qsDef.onExpression) {
|
|
610
|
+
// Create a temporary field instance to generate the SQL
|
|
611
|
+
const boolField = new query_node_1.QueryFieldBoolean({
|
|
612
|
+
type: 'boolean',
|
|
613
|
+
name: 'ignoreme',
|
|
614
|
+
e: qsDef.onExpression,
|
|
615
|
+
}, qs.parent);
|
|
616
|
+
const tempInstance = new field_instance_1.FieldInstanceField(boolField, { type: 'where' }, // It's used in a WHERE-like context
|
|
617
|
+
this.rootResult, undefined);
|
|
618
|
+
onCondition = tempInstance.generateExpression();
|
|
619
|
+
}
|
|
620
|
+
else {
|
|
621
|
+
onCondition = '1=1';
|
|
622
|
+
}
|
|
623
|
+
let filters = '';
|
|
624
|
+
let conditions = undefined;
|
|
625
|
+
if (ji.joinFilterConditions) {
|
|
626
|
+
conditions = ji.joinFilterConditions.map(qf => {
|
|
627
|
+
const tempInstance = new field_instance_1.FieldInstanceField(qf, { type: 'where' }, this.rootResult, undefined);
|
|
628
|
+
return tempInstance.generateExpression();
|
|
629
|
+
});
|
|
630
|
+
}
|
|
631
|
+
if (ji.children.length === 0 ||
|
|
632
|
+
conditions === undefined ||
|
|
633
|
+
!this.parent.dialect.supportsComplexFilteredSources) {
|
|
634
|
+
// LTNOTE: need a check here to see the children's where: conditions are local
|
|
635
|
+
// to the source and not to any of it's joined children.
|
|
636
|
+
// In Presto, we're going to get a SQL error if in this case
|
|
637
|
+
// for now. We need to inspect the 'condition' of each of the children
|
|
638
|
+
// to see if they reference subchildren and blow up if they do
|
|
639
|
+
// or move them to the where clause with a (x.distnct_key is NULL or (condition))
|
|
640
|
+
//
|
|
641
|
+
// const childrenFiltersAreComplex = somethign(conditions)
|
|
642
|
+
// if (conditions && childrenFiltersAreComplex !this.parent.dialect.supportsComplexFilteredSources) {
|
|
643
|
+
// throw new Error(
|
|
644
|
+
// 'Cannot join a source with a complex filter on a joined source'
|
|
645
|
+
// );
|
|
646
|
+
// }
|
|
647
|
+
if (conditions !== undefined && conditions.length >= 1) {
|
|
648
|
+
filters = ` AND (${conditions.join(' AND ')})`;
|
|
649
|
+
}
|
|
650
|
+
s += ` ${matrixOperation} JOIN ${structSQL} AS ${ji.alias}\n ON ${onCondition}${filters}\n`;
|
|
651
|
+
}
|
|
652
|
+
else {
|
|
653
|
+
let select = `SELECT ${ji.alias}.*`;
|
|
654
|
+
let joins = '';
|
|
655
|
+
for (const childJoin of ji.children) {
|
|
656
|
+
joins += this.generateSQLJoinBlock(stageWriter, childJoin, depth + 1);
|
|
657
|
+
select += `, ${this.parent.dialect.sqlSelectAliasAsStruct(childJoin.alias, (0, utils_1.getDialectFieldList)(childJoin.queryStruct.structDef))} AS ${childJoin.alias}`;
|
|
658
|
+
}
|
|
659
|
+
select += `\nFROM ${structSQL} AS ${ji.alias}\n${joins}\nWHERE ${conditions === null || conditions === void 0 ? void 0 : conditions.join(' AND ')}\n`;
|
|
660
|
+
s += `${matrixOperation} JOIN (\n${(0, utils_1.indent)(select)}) AS ${ji.alias}\n ON ${onCondition}\n`;
|
|
661
|
+
return s;
|
|
662
|
+
}
|
|
663
|
+
}
|
|
664
|
+
else if (qsDef.type === 'array') {
|
|
665
|
+
if (qs.parent === undefined || ji.parent === undefined) {
|
|
666
|
+
throw new Error('Internal Error, nested structure with no parent.');
|
|
667
|
+
}
|
|
668
|
+
// We need an SQL expression which results in the array for us to pass to un-nest
|
|
669
|
+
let arrayExpression;
|
|
670
|
+
if ((0, malloy_types_1.hasExpression)(qsDef)) {
|
|
671
|
+
// If this array is NOT contained in the parent, but a computed entity
|
|
672
|
+
// then the thing we are joining is not "parent.childName", but
|
|
673
|
+
// the expression which is built in that namespace
|
|
674
|
+
arrayExpression = (0, expression_compiler_1.exprToSQL)(this, this.rootResult, qs.parent, qsDef.e);
|
|
675
|
+
}
|
|
676
|
+
else {
|
|
677
|
+
// If this is a reference through an expression at the top level,
|
|
678
|
+
// need to generate the expression because the expression is written
|
|
679
|
+
// in the top level, this call is being used to generate the join.
|
|
680
|
+
// Below the top level, the expression will have been written into
|
|
681
|
+
// a join at the top level, and the name will exist.
|
|
682
|
+
// ... not sure this is the right way to do this
|
|
683
|
+
// ... the test for this is called "source repeated record containing an array"
|
|
684
|
+
arrayExpression = (0, field_instance_1.sqlFullChildReference)(qs.parent, qsDef.name, depth === 0 ? { result: this.rootResult, field: this } : undefined);
|
|
685
|
+
}
|
|
686
|
+
// we need to generate primary key. If parent has a primary key combine
|
|
687
|
+
// console.log(ji.alias, fieldExpression, this.inNestedPipeline());
|
|
688
|
+
s += `${this.parent.dialect.sqlUnnestAlias(arrayExpression, ji.alias, ji.getDialectFieldList(), ji.makeUniqueKey, (0, malloy_types_1.isBasicArray)(qsDef), this.inNestedPipeline())}\n`;
|
|
689
|
+
}
|
|
690
|
+
else if (qsDef.type === 'record') {
|
|
691
|
+
throw new Error('Internal Error: records should never appear in join trees');
|
|
692
|
+
}
|
|
693
|
+
else {
|
|
694
|
+
throw new Error(`Join type not implemented ${qs.structDef.type}`);
|
|
695
|
+
}
|
|
696
|
+
for (const childJoin of ji.children) {
|
|
697
|
+
s += this.generateSQLJoinBlock(stageWriter, childJoin, depth + 1);
|
|
698
|
+
}
|
|
699
|
+
return s;
|
|
700
|
+
}
|
|
701
|
+
// BigQuery has wildcard psudo columns that are treated differently
|
|
702
|
+
// SELECT * FROM xxx doesn't include these psuedo columns but we need them so
|
|
703
|
+
// filters can get pushed down properly when generating a UNIQUE key.
|
|
704
|
+
// No other dialect really needs this so we are coding here but maybe someday
|
|
705
|
+
// this makes its way into the dialect.
|
|
706
|
+
generateSQLPassthroughKeys(qs) {
|
|
707
|
+
let ret = '';
|
|
708
|
+
if (qs.dialect.name === 'standardsql') {
|
|
709
|
+
const psudoCols = [
|
|
710
|
+
'_TABLE_SUFFIX',
|
|
711
|
+
'_PARTITIONDATE',
|
|
712
|
+
'_PARTITIONTIME',
|
|
713
|
+
].filter(element => qs.getChildByName(element) !== undefined);
|
|
714
|
+
if (psudoCols.length > 0) {
|
|
715
|
+
ret = ', ' + psudoCols.join(', ');
|
|
716
|
+
}
|
|
717
|
+
}
|
|
718
|
+
return ret;
|
|
719
|
+
}
|
|
720
|
+
generateSQLJoins(stageWriter) {
|
|
721
|
+
let s = '';
|
|
722
|
+
// get the first value from the map (weird, I know)
|
|
723
|
+
const [[, ji]] = this.rootResult.joins;
|
|
724
|
+
const qs = ji.queryStruct;
|
|
725
|
+
// Joins
|
|
726
|
+
let structSQL = this.getStructSourceSQL(qs, stageWriter);
|
|
727
|
+
if ((0, malloy_types_1.isIndexSegment)(this.firstSegment)) {
|
|
728
|
+
structSQL = this.parent.dialect.sqlSampleTable(structSQL, this.firstSegment.sample);
|
|
729
|
+
if (this.firstSegment.sample) {
|
|
730
|
+
structSQL = stageWriter.addStage(`SELECT * from ${structSQL} as x limit 100000 `);
|
|
731
|
+
}
|
|
732
|
+
}
|
|
733
|
+
if ((0, malloy_types_1.isBaseTable)(qs.structDef)) {
|
|
734
|
+
if (ji.makeUniqueKey) {
|
|
735
|
+
const passKeys = this.generateSQLPassthroughKeys(qs);
|
|
736
|
+
structSQL = `(SELECT ${qs.dialect.sqlGenerateUUID()} as ${qs.dialect.sqlMaybeQuoteIdentifier('__distinct_key')}, x.* ${passKeys} FROM ${structSQL} as x)`;
|
|
737
|
+
}
|
|
738
|
+
s += `FROM ${structSQL} as ${ji.alias}\n`;
|
|
739
|
+
}
|
|
740
|
+
else {
|
|
741
|
+
throw new Error('Internal Error, queries must start from a basetable');
|
|
742
|
+
}
|
|
743
|
+
for (const childJoin of ji.children) {
|
|
744
|
+
s += this.generateSQLJoinBlock(stageWriter, childJoin, 0);
|
|
745
|
+
}
|
|
746
|
+
return s;
|
|
747
|
+
}
|
|
748
|
+
genereateSQLOrderBy(queryDef, resultStruct) {
|
|
749
|
+
let s = '';
|
|
750
|
+
if (this.firstSegment.type === 'project' && !queryDef.orderBy) {
|
|
751
|
+
return ''; // No default ordering for project.
|
|
752
|
+
}
|
|
753
|
+
// Intermediate results (in a pipeline or join) that have no limit, don't need an orderby
|
|
754
|
+
// Some database don't have this optimization.
|
|
755
|
+
if (this.fieldDef.pipeline.length > 1 && queryDef.limit === undefined) {
|
|
756
|
+
return '';
|
|
757
|
+
}
|
|
758
|
+
// ignore orderby if all aggregates.
|
|
759
|
+
if (resultStruct.getRepeatedResultType() === 'inline_all_numbers') {
|
|
760
|
+
return '';
|
|
761
|
+
}
|
|
762
|
+
// if we are in the last stage of a query and the query is a subquery
|
|
763
|
+
// and has no limit, ORDER BY is superfluous
|
|
764
|
+
if (this.isJoinedSubquery &&
|
|
765
|
+
this.fieldDef.pipeline.length === 1 &&
|
|
766
|
+
queryDef.limit === undefined) {
|
|
767
|
+
return '';
|
|
768
|
+
}
|
|
769
|
+
const orderBy = queryDef.orderBy || resultStruct.calculateDefaultOrderBy();
|
|
770
|
+
const o = [];
|
|
771
|
+
for (const f of orderBy) {
|
|
772
|
+
if (typeof f.field === 'string') {
|
|
773
|
+
// convert name to an index
|
|
774
|
+
const fi = resultStruct.getField(f.field);
|
|
775
|
+
if (fi && fi.fieldUsage.type === 'result') {
|
|
776
|
+
if (this.parent.dialect.orderByClause === 'ordinal') {
|
|
777
|
+
o.push(`${fi.fieldUsage.resultIndex} ${f.dir || 'ASC'}`);
|
|
778
|
+
}
|
|
779
|
+
else if (this.parent.dialect.orderByClause === 'output_name') {
|
|
780
|
+
o.push(`${this.parent.dialect.sqlMaybeQuoteIdentifier(f.field)} ${f.dir || 'ASC'}`);
|
|
781
|
+
}
|
|
782
|
+
else if (this.parent.dialect.orderByClause === 'expression') {
|
|
783
|
+
const fieldExpr = fi.getSQL();
|
|
784
|
+
o.push(`${fieldExpr} ${f.dir || 'ASC'}`);
|
|
785
|
+
}
|
|
786
|
+
}
|
|
787
|
+
else {
|
|
788
|
+
throw new Error(`Unknown field in ORDER BY ${f.field}`);
|
|
789
|
+
}
|
|
790
|
+
}
|
|
791
|
+
else {
|
|
792
|
+
if (this.parent.dialect.orderByClause === 'ordinal') {
|
|
793
|
+
o.push(`${f.field} ${f.dir || 'ASC'}`);
|
|
794
|
+
}
|
|
795
|
+
else if (this.parent.dialect.orderByClause === 'output_name') {
|
|
796
|
+
const orderingField = resultStruct.getFieldByNumber(f.field);
|
|
797
|
+
o.push(`${this.parent.dialect.sqlMaybeQuoteIdentifier(orderingField.name)} ${f.dir || 'ASC'}`);
|
|
798
|
+
}
|
|
799
|
+
else if (this.parent.dialect.orderByClause === 'expression') {
|
|
800
|
+
const orderingField = resultStruct.getFieldByNumber(f.field);
|
|
801
|
+
const fieldExpr = orderingField.fif.getSQL();
|
|
802
|
+
o.push(`${fieldExpr} ${f.dir || 'ASC'}`);
|
|
803
|
+
}
|
|
804
|
+
}
|
|
805
|
+
}
|
|
806
|
+
if (o.length > 0) {
|
|
807
|
+
s = this.parent.dialect.sqlOrderBy(o, 'query') + '\n';
|
|
808
|
+
}
|
|
809
|
+
return s;
|
|
810
|
+
}
|
|
811
|
+
generateSimpleSQL(stageWriter) {
|
|
812
|
+
let s = '';
|
|
813
|
+
s += 'SELECT \n';
|
|
814
|
+
const fields = [];
|
|
815
|
+
for (const [name, field] of this.rootResult.allFields) {
|
|
816
|
+
const fi = field;
|
|
817
|
+
const sqlName = this.parent.dialect.sqlMaybeQuoteIdentifier(name);
|
|
818
|
+
if (fi.fieldUsage.type === 'result') {
|
|
819
|
+
fields.push(` ${fi.generateExpression()} as ${sqlName}`);
|
|
820
|
+
}
|
|
821
|
+
}
|
|
822
|
+
s += (0, utils_1.indent)(fields.join(',\n')) + '\n';
|
|
823
|
+
s += this.generateSQLJoins(stageWriter);
|
|
824
|
+
s += this.generateSQLFilters(this.rootResult, 'where').sql('where');
|
|
825
|
+
// group by
|
|
826
|
+
if (this.firstSegment.type === 'reduce') {
|
|
827
|
+
const n = [];
|
|
828
|
+
for (const field of this.rootResult.fields()) {
|
|
829
|
+
const fi = field;
|
|
830
|
+
if (fi.fieldUsage.type === 'result' && (0, query_node_1.isScalarField)(fi.f)) {
|
|
831
|
+
n.push(fi.fieldUsage.resultIndex.toString());
|
|
832
|
+
}
|
|
833
|
+
}
|
|
834
|
+
if (n.length > 0) {
|
|
835
|
+
s += `GROUP BY ${n.join(',')}\n`;
|
|
836
|
+
}
|
|
837
|
+
}
|
|
838
|
+
s += this.generateSQLFilters(this.rootResult, 'having').sql('having');
|
|
839
|
+
// order by
|
|
840
|
+
s += this.genereateSQLOrderBy(this.firstSegment, this.rootResult);
|
|
841
|
+
// limit
|
|
842
|
+
if (!(0, malloy_types_1.isRawSegment)(this.firstSegment) && this.firstSegment.limit) {
|
|
843
|
+
s += `LIMIT ${this.firstSegment.limit}\n`;
|
|
844
|
+
}
|
|
845
|
+
this.resultStage = stageWriter.addStage(s);
|
|
846
|
+
return this.resultStage;
|
|
847
|
+
}
|
|
848
|
+
// This probably should be generated in a dialect independat way.
|
|
849
|
+
// but for now, it is just googleSQL.
|
|
850
|
+
generatePipelinedStages(outputPipelinedSQL, lastStageName, stageWriter) {
|
|
851
|
+
if (outputPipelinedSQL.length === 0) {
|
|
852
|
+
return lastStageName;
|
|
853
|
+
}
|
|
854
|
+
let retSQL;
|
|
855
|
+
if (this.parent.dialect.supportsSelectReplace) {
|
|
856
|
+
const pipelinesSQL = outputPipelinedSQL
|
|
857
|
+
.map(o => `${o.pipelineSQL} as ${o.sqlFieldName}`)
|
|
858
|
+
.join(',\n');
|
|
859
|
+
retSQL = `SELECT * replace (${pipelinesSQL}) FROM ${lastStageName}
|
|
860
|
+
`;
|
|
861
|
+
}
|
|
862
|
+
else {
|
|
863
|
+
const pipelinesSQL = outputPipelinedSQL
|
|
864
|
+
.map(o => `${o.pipelineSQL} as ${o.sqlFieldName}`)
|
|
865
|
+
.join(',\n');
|
|
866
|
+
const outputFields = outputPipelinedSQL.map(f => f.sqlFieldName);
|
|
867
|
+
const allFields = Array.from(this.rootResult.allFields.keys()).map(f => this.parent.dialect.sqlMaybeQuoteIdentifier(f));
|
|
868
|
+
const fields = allFields.filter(f => outputFields.indexOf(f) === -1);
|
|
869
|
+
retSQL = `SELECT ${fields.length > 0 ? fields.join(', ') + ',' : ''} ${pipelinesSQL} FROM ${lastStageName}`;
|
|
870
|
+
}
|
|
871
|
+
return stageWriter.addStage(retSQL);
|
|
872
|
+
}
|
|
873
|
+
generateStage0Fields(resultSet, output, stageWriter) {
|
|
874
|
+
const scalarFields = [];
|
|
875
|
+
const otherFields = [];
|
|
876
|
+
for (const [name, fi] of resultSet.allFields) {
|
|
877
|
+
if (fi instanceof field_instance_1.FieldInstanceField && (0, query_node_1.isScalarField)(fi.f)) {
|
|
878
|
+
scalarFields.push([name, fi]);
|
|
879
|
+
}
|
|
880
|
+
else {
|
|
881
|
+
otherFields.push([name, fi]);
|
|
882
|
+
}
|
|
883
|
+
}
|
|
884
|
+
const orderedFields = [...scalarFields, ...otherFields];
|
|
885
|
+
for (const [name, fi] of orderedFields) {
|
|
886
|
+
const outputName = this.parent.dialect.sqlMaybeQuoteIdentifier(`${name}__${resultSet.groupSet}`);
|
|
887
|
+
if (fi instanceof field_instance_1.FieldInstanceField) {
|
|
888
|
+
if (fi.fieldUsage.type === 'result') {
|
|
889
|
+
const exp = fi.getSQL();
|
|
890
|
+
if ((0, query_node_1.isScalarField)(fi.f)) {
|
|
891
|
+
if (this.parent.dialect.cantPartitionWindowFunctionsOnExpressions &&
|
|
892
|
+
this.rootResult.queryUsesPartitioning &&
|
|
893
|
+
resultSet.firstSegment.type === 'reduce') {
|
|
894
|
+
// BigQuery can't partition aggregate function except when the field has no
|
|
895
|
+
// expression. Additionally it can't partition by floats. We stuff expressions
|
|
896
|
+
// and numbers as strings into a lateral join when the query has ungrouped expressions
|
|
897
|
+
const outputFieldName = `__lateral_join_bag.${outputName}`;
|
|
898
|
+
fi.analyticalSQL = outputFieldName;
|
|
899
|
+
output.lateralJoinSQLExpressions.push(`${exp} as ${outputName}`);
|
|
900
|
+
output.sql.push(outputFieldName);
|
|
901
|
+
if (fi.f.fieldDef.type === 'number') {
|
|
902
|
+
const outputNameString = this.parent.dialect.sqlMaybeQuoteIdentifier(`${name}__${resultSet.groupSet}_string`);
|
|
903
|
+
const outputFieldNameString = `__lateral_join_bag.${outputNameString}`;
|
|
904
|
+
output.sql.push(outputFieldNameString);
|
|
905
|
+
output.dimensionIndexes.push(output.fieldIndex++);
|
|
906
|
+
output.lateralJoinSQLExpressions.push(`CAST(${exp} as STRING) as ${outputNameString}`);
|
|
907
|
+
fi.partitionSQL = outputFieldNameString;
|
|
908
|
+
}
|
|
909
|
+
}
|
|
910
|
+
else {
|
|
911
|
+
// just treat it like a regular field.
|
|
912
|
+
output.sql.push(`${exp} as ${outputName}`);
|
|
913
|
+
}
|
|
914
|
+
output.dimensionIndexes.push(output.fieldIndex++);
|
|
915
|
+
}
|
|
916
|
+
else if ((0, query_node_1.isBasicCalculation)(fi.f)) {
|
|
917
|
+
output.sql.push(`${exp} as ${outputName}`);
|
|
918
|
+
output.fieldIndex++;
|
|
919
|
+
}
|
|
920
|
+
}
|
|
921
|
+
}
|
|
922
|
+
else if (fi instanceof field_instance_1.FieldInstanceResult) {
|
|
923
|
+
if (fi.firstSegment.type === 'reduce') {
|
|
924
|
+
this.generateStage0Fields(fi, output, stageWriter);
|
|
925
|
+
}
|
|
926
|
+
else if (fi.firstSegment.type === 'project') {
|
|
927
|
+
const s = this.generateTurtleSQL(fi, stageWriter, outputName, output.outputPipelinedSQL);
|
|
928
|
+
output.sql.push(`${s} as ${outputName}`);
|
|
929
|
+
output.fieldIndex++;
|
|
930
|
+
}
|
|
931
|
+
}
|
|
932
|
+
}
|
|
933
|
+
// LTNOTE: we could optimize here in the future.
|
|
934
|
+
// leaf turtles can have their having clauses in the main query
|
|
935
|
+
// turtles with leaves need to promote their state to their
|
|
936
|
+
// children.
|
|
937
|
+
const having = this.generateSQLFilters(resultSet, 'having');
|
|
938
|
+
if (!having.empty()) {
|
|
939
|
+
// if we have no children, the having can run at the root level
|
|
940
|
+
if (resultSet.childGroups.length === 1) {
|
|
941
|
+
resultSet
|
|
942
|
+
.root()
|
|
943
|
+
.havings.add(`(group_set<>${resultSet.groupSet} OR (group_set=${resultSet.groupSet} AND ${having.sql()}))`);
|
|
944
|
+
}
|
|
945
|
+
else {
|
|
946
|
+
resultSet.hasHaving = true;
|
|
947
|
+
output.sql.push(`CASE WHEN group_set=${resultSet.groupSet} THEN CASE WHEN ${having.sql()} THEN 0 ELSE 1 END END as __delete__${resultSet.groupSet}`);
|
|
948
|
+
output.fieldIndex++;
|
|
949
|
+
}
|
|
950
|
+
}
|
|
951
|
+
}
|
|
952
|
+
generateSQLWhereChildren(resultStruct) {
|
|
953
|
+
const wheres = new utils_1.AndChain();
|
|
954
|
+
for (const [, field] of resultStruct.allFields) {
|
|
955
|
+
if (field.type === 'query') {
|
|
956
|
+
const fir = field;
|
|
957
|
+
const turtleWhere = this.generateSQLFilters(fir, 'where');
|
|
958
|
+
if (turtleWhere.present()) {
|
|
959
|
+
const groupSets = fir.childGroups.join(',');
|
|
960
|
+
wheres.add(`(group_set NOT IN (${groupSets})` +
|
|
961
|
+
` OR (group_set IN (${groupSets}) AND ${turtleWhere.sql()}))`);
|
|
962
|
+
}
|
|
963
|
+
wheres.addChain(this.generateSQLWhereChildren(fir));
|
|
964
|
+
}
|
|
965
|
+
}
|
|
966
|
+
return wheres;
|
|
967
|
+
}
|
|
968
|
+
generateSQLWhereTurtled() {
|
|
969
|
+
const wheres = this.generateSQLFilters(this.rootResult, 'where');
|
|
970
|
+
wheres.addChain(this.generateSQLWhereChildren(this.rootResult));
|
|
971
|
+
return wheres.sql('where');
|
|
972
|
+
}
|
|
973
|
+
// iterate over the nested queries looking for Havings and Limits
|
|
974
|
+
//
|
|
975
|
+
// Think of the result graph as a tree.
|
|
976
|
+
//
|
|
977
|
+
// Havings in leaves have already been removed in the stage0 queries we are only concerned with
|
|
978
|
+
//. having in nodes with children
|
|
979
|
+
//
|
|
980
|
+
// First step is to generate the partition and order by code for each of the relevent groupsets
|
|
981
|
+
//
|
|
982
|
+
// Next we compute rows that are over the order by limit. Nodes with children are additionally
|
|
983
|
+
// partitioned by having.
|
|
984
|
+
//
|
|
985
|
+
// Scan the parent for children. If there are any nodes that need to be deleted, note them.
|
|
986
|
+
//
|
|
987
|
+
// Finally remove any node either over the limit or part of a parent's having.
|
|
988
|
+
//
|
|
989
|
+
generateSQLHavingLimit(stageWriter, lastStageName) {
|
|
990
|
+
const havingFields = [];
|
|
991
|
+
const limitExpressions = [];
|
|
992
|
+
const limitValues = [];
|
|
993
|
+
const limitComplexClauses = [];
|
|
994
|
+
const limitSimpleFilters = [];
|
|
995
|
+
const partitionSQL = [];
|
|
996
|
+
let hasAnyLimits = false;
|
|
997
|
+
let hasResultsWithChildren = false;
|
|
998
|
+
const resultsWithHavingOrLimit = this.rootResult.selectStructs([], (result) => result.hasHaving || result.getLimit() !== undefined);
|
|
999
|
+
if (resultsWithHavingOrLimit.length > 0) {
|
|
1000
|
+
// loop through an generate the partitions
|
|
1001
|
+
for (const result of this.rootResult.selectStructs([], (_result) => true)) {
|
|
1002
|
+
const hasLimit = result.getLimit() !== undefined;
|
|
1003
|
+
hasResultsWithChildren || (hasResultsWithChildren = result.childGroups.length > 1 && (hasLimit || result.hasHaving));
|
|
1004
|
+
hasAnyLimits || (hasAnyLimits = hasLimit);
|
|
1005
|
+
// find all the parent dimension names.
|
|
1006
|
+
const dimensions = [];
|
|
1007
|
+
let r = result;
|
|
1008
|
+
while (r) {
|
|
1009
|
+
for (const name of r.fieldNames(fi => (0, query_node_1.isScalarField)(fi.f))) {
|
|
1010
|
+
dimensions.push(this.parent.dialect.sqlMaybeQuoteIdentifier(`${name}__${r.groupSet}`));
|
|
1011
|
+
}
|
|
1012
|
+
r = r.parent;
|
|
1013
|
+
}
|
|
1014
|
+
let partition = '';
|
|
1015
|
+
if (dimensions.length > 0) {
|
|
1016
|
+
partition = 'PARTITION BY ';
|
|
1017
|
+
partition += dimensions
|
|
1018
|
+
.map(this.parent.dialect.castToString)
|
|
1019
|
+
.join(',');
|
|
1020
|
+
}
|
|
1021
|
+
partitionSQL[result.groupSet] = partition;
|
|
1022
|
+
}
|
|
1023
|
+
for (const result of resultsWithHavingOrLimit) {
|
|
1024
|
+
const limit = result.getLimit();
|
|
1025
|
+
// if we have a limit
|
|
1026
|
+
if (limit) {
|
|
1027
|
+
limitValues[result.groupSet] = limit;
|
|
1028
|
+
const obSQL = [];
|
|
1029
|
+
let orderingField;
|
|
1030
|
+
const orderByDef = result.firstSegment.orderBy ||
|
|
1031
|
+
result.calculateDefaultOrderBy();
|
|
1032
|
+
for (const ordering of orderByDef) {
|
|
1033
|
+
if (typeof ordering.field === 'string') {
|
|
1034
|
+
orderingField = {
|
|
1035
|
+
name: ordering.field,
|
|
1036
|
+
fif: result.getField(ordering.field),
|
|
1037
|
+
};
|
|
1038
|
+
}
|
|
1039
|
+
else {
|
|
1040
|
+
orderingField = result.getFieldByNumber(ordering.field);
|
|
1041
|
+
}
|
|
1042
|
+
obSQL.push(' ' +
|
|
1043
|
+
this.parent.dialect.sqlMaybeQuoteIdentifier(`${orderingField.name}__${result.groupSet}`) +
|
|
1044
|
+
` ${ordering.dir || 'ASC'}`);
|
|
1045
|
+
// partition for a row number is the parent if it exists.
|
|
1046
|
+
let p = '';
|
|
1047
|
+
if (result.parent && partitionSQL[result.parent.groupSet]) {
|
|
1048
|
+
p = partitionSQL[result.parent.groupSet] + ', group_set';
|
|
1049
|
+
}
|
|
1050
|
+
else {
|
|
1051
|
+
p = 'PARTITION BY group_set';
|
|
1052
|
+
}
|
|
1053
|
+
// if this has nested data and a having, we want to partion by the 'having' so we don't count
|
|
1054
|
+
// deleted rows.
|
|
1055
|
+
if (result.hasHaving) {
|
|
1056
|
+
p = p + `, __delete__${result.groupSet}`;
|
|
1057
|
+
}
|
|
1058
|
+
limitExpressions.push(`CASE WHEN GROUP_SET=${result.groupSet} THEN
|
|
1059
|
+
ROW_NUMBER() OVER (${p} ORDER BY ${obSQL.join(',')}) END as __row_number__${result.groupSet}`);
|
|
1060
|
+
// if the group set is a leaf, we can write a simple where clause.
|
|
1061
|
+
const filterClause = `(GROUP_SET = ${result.groupSet} AND __row_number__${result.groupSet} > ${limitValues[result.groupSet]})`;
|
|
1062
|
+
if (result.childGroups.length === 1) {
|
|
1063
|
+
limitSimpleFilters.push(filterClause);
|
|
1064
|
+
}
|
|
1065
|
+
else {
|
|
1066
|
+
// its a complex
|
|
1067
|
+
limitComplexClauses[result.groupSet] = `CASE WHEN ${filterClause} THEN 1 ELSE 0 END`;
|
|
1068
|
+
}
|
|
1069
|
+
}
|
|
1070
|
+
}
|
|
1071
|
+
}
|
|
1072
|
+
}
|
|
1073
|
+
// generate over_limit flag
|
|
1074
|
+
if (resultsWithHavingOrLimit.length > 0) {
|
|
1075
|
+
if (limitExpressions.length > 0) {
|
|
1076
|
+
if (hasAnyLimits) {
|
|
1077
|
+
lastStageName = stageWriter.addStage(`SELECT\n *,\n ${limitExpressions.join(',\n')} \nFROM ${lastStageName}\n`);
|
|
1078
|
+
}
|
|
1079
|
+
}
|
|
1080
|
+
let simpleLimits = '1=1';
|
|
1081
|
+
if (limitSimpleFilters.length > 0) {
|
|
1082
|
+
simpleLimits = ` NOT (${limitSimpleFilters.join('\n OR ')})`;
|
|
1083
|
+
}
|
|
1084
|
+
if (hasAnyLimits && !hasResultsWithChildren) {
|
|
1085
|
+
lastStageName = stageWriter.addStage(`SELECT * FROM ${lastStageName}\n WHERE ${simpleLimits}\n`);
|
|
1086
|
+
}
|
|
1087
|
+
else if (hasResultsWithChildren) {
|
|
1088
|
+
// we may or my not have any limits
|
|
1089
|
+
const havings = new utils_1.AndChain();
|
|
1090
|
+
for (const result of resultsWithHavingOrLimit) {
|
|
1091
|
+
const testKey = [];
|
|
1092
|
+
// parent group
|
|
1093
|
+
if (result.hasHaving && result.childGroups.length > 1) {
|
|
1094
|
+
testKey.push(`__delete__${result.groupSet}`);
|
|
1095
|
+
}
|
|
1096
|
+
// limit
|
|
1097
|
+
if (limitComplexClauses[result.groupSet]) {
|
|
1098
|
+
testKey.push(limitComplexClauses[result.groupSet]);
|
|
1099
|
+
}
|
|
1100
|
+
if (testKey.length > 0 && result.childGroups.length > 1) {
|
|
1101
|
+
havingFields.push(`MAX(CASE WHEN group_set IN (${result.childGroups.join(',')}) THEN ${testKey.join(' + ')}
|
|
1102
|
+
END) OVER(${partitionSQL[result.groupSet]}) as __shaving__${result.groupSet}`);
|
|
1103
|
+
havings.add(`group_set IN (${result.childGroups.join(',')}) AND __shaving__${result.groupSet} > 0`);
|
|
1104
|
+
}
|
|
1105
|
+
}
|
|
1106
|
+
lastStageName = stageWriter.addStage(`SELECT\n *,\n ${havingFields.join(',\n ')} \nFROM ${lastStageName} WHERE ${simpleLimits}\n`);
|
|
1107
|
+
lastStageName = stageWriter.addStage(`SELECT *\nFROM ${lastStageName}\nWHERE NOT (${havings.sqlOr()})\n`);
|
|
1108
|
+
}
|
|
1109
|
+
}
|
|
1110
|
+
return lastStageName;
|
|
1111
|
+
}
|
|
1112
|
+
generateSQLStage0(stageWriter) {
|
|
1113
|
+
let s = 'SELECT\n';
|
|
1114
|
+
let from = this.generateSQLJoins(stageWriter);
|
|
1115
|
+
const wheres = this.generateSQLWhereTurtled();
|
|
1116
|
+
const f = {
|
|
1117
|
+
dimensionIndexes: [1],
|
|
1118
|
+
fieldIndex: 2,
|
|
1119
|
+
sql: ['group_set'],
|
|
1120
|
+
lateralJoinSQLExpressions: [],
|
|
1121
|
+
groupsAggregated: [],
|
|
1122
|
+
outputPipelinedSQL: [],
|
|
1123
|
+
};
|
|
1124
|
+
this.generateStage0Fields(this.rootResult, f, stageWriter);
|
|
1125
|
+
if (this.firstSegment.type === 'project' &&
|
|
1126
|
+
!this.parent.modelCompilerFlags().has('unsafe_complex_select_query')) {
|
|
1127
|
+
throw new Error('PROJECT cannot be used on queries with turtles');
|
|
1128
|
+
}
|
|
1129
|
+
const groupBy = 'GROUP BY ' + f.dimensionIndexes.join(',') + '\n';
|
|
1130
|
+
from += this.parent.dialect.sqlGroupSetTable(this.maxGroupSet) + '\n';
|
|
1131
|
+
s += (0, utils_1.indent)(f.sql.join(',\n')) + '\n';
|
|
1132
|
+
// this should only happen on standard SQL, BigQuery can't partition by expressions and
|
|
1133
|
+
// aggregates.
|
|
1134
|
+
if (f.lateralJoinSQLExpressions.length > 0) {
|
|
1135
|
+
from += `LEFT JOIN UNNEST([STRUCT(${f.lateralJoinSQLExpressions.join(',\n')})]) as __lateral_join_bag\n`;
|
|
1136
|
+
}
|
|
1137
|
+
s += from + wheres + groupBy + this.rootResult.havings.sql('having');
|
|
1138
|
+
// generate the stage
|
|
1139
|
+
const resultStage = stageWriter.addStage(s);
|
|
1140
|
+
// generate stages for havings and limits
|
|
1141
|
+
this.resultStage = this.generateSQLHavingLimit(stageWriter, resultStage);
|
|
1142
|
+
this.resultStage = this.generatePipelinedStages(f.outputPipelinedSQL, this.resultStage, stageWriter);
|
|
1143
|
+
return this.resultStage;
|
|
1144
|
+
}
|
|
1145
|
+
generateDepthNFields(depth, resultSet, output, stageWriter) {
|
|
1146
|
+
const groupsToMap = [];
|
|
1147
|
+
for (const [name, fi] of resultSet.allFields) {
|
|
1148
|
+
const sqlFieldName = this.parent.dialect.sqlMaybeQuoteIdentifier(`${name}__${resultSet.groupSet}`);
|
|
1149
|
+
if (fi instanceof field_instance_1.FieldInstanceField) {
|
|
1150
|
+
if (fi.fieldUsage.type === 'result') {
|
|
1151
|
+
if ((0, query_node_1.isScalarField)(fi.f)) {
|
|
1152
|
+
const exp = (0, utils_1.caseGroup)(resultSet.groupSet > 0 ? resultSet.childGroups : [], sqlFieldName);
|
|
1153
|
+
output.sql.push(`${exp} as ${sqlFieldName}`);
|
|
1154
|
+
output.dimensionIndexes.push(output.fieldIndex++);
|
|
1155
|
+
}
|
|
1156
|
+
else if ((0, query_node_1.isBasicCalculation)(fi.f)) {
|
|
1157
|
+
const exp = this.parent.dialect.sqlAnyValue(resultSet.groupSet, sqlFieldName);
|
|
1158
|
+
output.sql.push(`${exp} as ${sqlFieldName}`);
|
|
1159
|
+
output.fieldIndex++;
|
|
1160
|
+
}
|
|
1161
|
+
}
|
|
1162
|
+
}
|
|
1163
|
+
else if (fi instanceof field_instance_1.FieldInstanceResult) {
|
|
1164
|
+
if (fi.depth > depth) {
|
|
1165
|
+
// ignore it, we've already dealt with it.
|
|
1166
|
+
}
|
|
1167
|
+
else if (fi.depth === depth) {
|
|
1168
|
+
const s = this.generateTurtleSQL(fi, stageWriter, sqlFieldName, output.outputPipelinedSQL);
|
|
1169
|
+
output.groupsAggregated.push({
|
|
1170
|
+
fromGroup: fi.groupSet,
|
|
1171
|
+
toGroup: resultSet.groupSet,
|
|
1172
|
+
});
|
|
1173
|
+
groupsToMap.push(fi.groupSet);
|
|
1174
|
+
output.sql.push(`${s} as ${sqlFieldName}`);
|
|
1175
|
+
output.fieldIndex++;
|
|
1176
|
+
}
|
|
1177
|
+
else {
|
|
1178
|
+
this.generateDepthNFields(depth, fi, output, stageWriter);
|
|
1179
|
+
}
|
|
1180
|
+
}
|
|
1181
|
+
}
|
|
1182
|
+
if (output.groupsAggregated.length > 0) {
|
|
1183
|
+
output.sql[0] = 'CASE ';
|
|
1184
|
+
for (const m of output.groupsAggregated) {
|
|
1185
|
+
output.sql[0] += `WHEN group_set=${m.fromGroup} THEN ${m.toGroup} `;
|
|
1186
|
+
}
|
|
1187
|
+
output.sql[0] += 'ELSE group_set END as group_set';
|
|
1188
|
+
}
|
|
1189
|
+
}
|
|
1190
|
+
generateSQLDepthN(depth, stageWriter, stageName) {
|
|
1191
|
+
let s = 'SELECT \n';
|
|
1192
|
+
const f = {
|
|
1193
|
+
dimensionIndexes: [1],
|
|
1194
|
+
fieldIndex: 2,
|
|
1195
|
+
sql: ['group_set'],
|
|
1196
|
+
lateralJoinSQLExpressions: [],
|
|
1197
|
+
groupsAggregated: [],
|
|
1198
|
+
outputPipelinedSQL: [],
|
|
1199
|
+
};
|
|
1200
|
+
this.generateDepthNFields(depth, this.rootResult, f, stageWriter);
|
|
1201
|
+
s += (0, utils_1.indent)(f.sql.join(',\n')) + '\n';
|
|
1202
|
+
s += `FROM ${stageName}\n`;
|
|
1203
|
+
const where = this.rootResult.eliminateComputeGroupsSQL();
|
|
1204
|
+
if (where.length > 0) {
|
|
1205
|
+
s += `WHERE ${where}\n`;
|
|
1206
|
+
}
|
|
1207
|
+
if (f.dimensionIndexes.length > 0) {
|
|
1208
|
+
s += `GROUP BY ${f.dimensionIndexes.join(',')}\n`;
|
|
1209
|
+
}
|
|
1210
|
+
this.resultStage = stageWriter.addStage(s);
|
|
1211
|
+
this.resultStage = this.generatePipelinedStages(f.outputPipelinedSQL, this.resultStage, stageWriter);
|
|
1212
|
+
return this.resultStage;
|
|
1213
|
+
}
|
|
1214
|
+
genereateSQLCombineTurtles(stageWriter, stage0Name) {
|
|
1215
|
+
let s = 'SELECT\n';
|
|
1216
|
+
const fieldsSQL = [];
|
|
1217
|
+
let fieldIndex = 1;
|
|
1218
|
+
const outputPipelinedSQL = [];
|
|
1219
|
+
const dimensionIndexes = [];
|
|
1220
|
+
for (const [name, fi] of this.rootResult.allFields) {
|
|
1221
|
+
const sqlName = this.parent.dialect.sqlMaybeQuoteIdentifier(name);
|
|
1222
|
+
if (fi instanceof field_instance_1.FieldInstanceField) {
|
|
1223
|
+
if (fi.fieldUsage.type === 'result') {
|
|
1224
|
+
if ((0, query_node_1.isScalarField)(fi.f)) {
|
|
1225
|
+
fieldsSQL.push(this.parent.dialect.sqlMaybeQuoteIdentifier(`${name}__${this.rootResult.groupSet}`) + ` as ${sqlName}`);
|
|
1226
|
+
dimensionIndexes.push(fieldIndex++);
|
|
1227
|
+
}
|
|
1228
|
+
else if ((0, query_node_1.isBasicCalculation)(fi.f)) {
|
|
1229
|
+
fieldsSQL.push(this.parent.dialect.sqlAnyValueLastTurtle(this.parent.dialect.sqlMaybeQuoteIdentifier(`${name}__${this.rootResult.groupSet}`), this.rootResult.groupSet, sqlName));
|
|
1230
|
+
fieldIndex++;
|
|
1231
|
+
}
|
|
1232
|
+
}
|
|
1233
|
+
}
|
|
1234
|
+
else if (fi instanceof field_instance_1.FieldInstanceResult) {
|
|
1235
|
+
if (fi.firstSegment.type === 'reduce') {
|
|
1236
|
+
fieldsSQL.push(`${this.generateTurtleSQL(fi, stageWriter, sqlName, outputPipelinedSQL)} as ${sqlName}`);
|
|
1237
|
+
fieldIndex++;
|
|
1238
|
+
}
|
|
1239
|
+
else if (fi.firstSegment.type === 'project') {
|
|
1240
|
+
fieldsSQL.push(this.parent.dialect.sqlAnyValueLastTurtle(this.parent.dialect.sqlMaybeQuoteIdentifier(`${name}__${this.rootResult.groupSet}`), this.rootResult.groupSet, sqlName));
|
|
1241
|
+
fieldIndex++;
|
|
1242
|
+
}
|
|
1243
|
+
}
|
|
1244
|
+
}
|
|
1245
|
+
s += (0, utils_1.indent)(fieldsSQL.join(',\n')) + `\nFROM ${stage0Name}\n`;
|
|
1246
|
+
const where = this.rootResult.eliminateComputeGroupsSQL();
|
|
1247
|
+
if (where.length > 0) {
|
|
1248
|
+
s += `WHERE ${where}\n`;
|
|
1249
|
+
}
|
|
1250
|
+
if (dimensionIndexes.length > 0) {
|
|
1251
|
+
s += `GROUP BY ${dimensionIndexes.join(',')}\n`;
|
|
1252
|
+
}
|
|
1253
|
+
// order by
|
|
1254
|
+
s += this.genereateSQLOrderBy(this.firstSegment, this.rootResult);
|
|
1255
|
+
// limit
|
|
1256
|
+
if (!(0, malloy_types_1.isRawSegment)(this.firstSegment) && this.firstSegment.limit) {
|
|
1257
|
+
s += `LIMIT ${this.firstSegment.limit}\n`;
|
|
1258
|
+
}
|
|
1259
|
+
this.resultStage = stageWriter.addStage(s);
|
|
1260
|
+
this.resultStage = this.generatePipelinedStages(outputPipelinedSQL, this.resultStage, stageWriter);
|
|
1261
|
+
return this.resultStage;
|
|
1262
|
+
}
|
|
1263
|
+
// create a simplified version of the StructDef for dialects.
|
|
1264
|
+
buildDialectFieldList(resultStruct) {
|
|
1265
|
+
const dialectFieldList = [];
|
|
1266
|
+
for (const [name, field] of resultStruct.allFields) {
|
|
1267
|
+
const sqlName = this.parent.dialect.sqlMaybeQuoteIdentifier(name);
|
|
1268
|
+
//
|
|
1269
|
+
if (resultStruct.firstSegment.type === 'reduce' &&
|
|
1270
|
+
field instanceof field_instance_1.FieldInstanceResult) {
|
|
1271
|
+
const { structDef, repeatedResultType } = this.generateTurtlePipelineSQL(field, new stage_writer_1.StageWriter(true, undefined), '<nosource>');
|
|
1272
|
+
if (repeatedResultType === 'nested') {
|
|
1273
|
+
const multiLineNest = {
|
|
1274
|
+
...structDef,
|
|
1275
|
+
type: 'array',
|
|
1276
|
+
elementTypeDef: { type: 'record_element' },
|
|
1277
|
+
join: 'many',
|
|
1278
|
+
name,
|
|
1279
|
+
};
|
|
1280
|
+
dialectFieldList.push({
|
|
1281
|
+
typeDef: multiLineNest,
|
|
1282
|
+
sqlExpression: this.parent.dialect.sqlMaybeQuoteIdentifier(`${name}__${resultStruct.groupSet}`),
|
|
1283
|
+
rawName: name,
|
|
1284
|
+
sqlOutputName: sqlName,
|
|
1285
|
+
});
|
|
1286
|
+
}
|
|
1287
|
+
else {
|
|
1288
|
+
const oneLineNest = {
|
|
1289
|
+
...structDef,
|
|
1290
|
+
type: 'record',
|
|
1291
|
+
join: 'one',
|
|
1292
|
+
name,
|
|
1293
|
+
};
|
|
1294
|
+
dialectFieldList.push({
|
|
1295
|
+
typeDef: oneLineNest,
|
|
1296
|
+
sqlExpression: this.parent.dialect.sqlMaybeQuoteIdentifier(`${name}__${resultStruct.groupSet}`),
|
|
1297
|
+
rawName: name,
|
|
1298
|
+
sqlOutputName: sqlName,
|
|
1299
|
+
});
|
|
1300
|
+
}
|
|
1301
|
+
}
|
|
1302
|
+
else if (resultStruct.firstSegment.type === 'reduce' &&
|
|
1303
|
+
field instanceof field_instance_1.FieldInstanceField &&
|
|
1304
|
+
field.fieldUsage.type === 'result') {
|
|
1305
|
+
pushDialectField(dialectFieldList, {
|
|
1306
|
+
fieldDef: field.f.fieldDef,
|
|
1307
|
+
sqlExpression: this.parent.dialect.sqlMaybeQuoteIdentifier(`${name}__${resultStruct.groupSet}`),
|
|
1308
|
+
rawName: name,
|
|
1309
|
+
sqlOutputName: sqlName,
|
|
1310
|
+
});
|
|
1311
|
+
}
|
|
1312
|
+
else if (resultStruct.firstSegment.type === 'project' &&
|
|
1313
|
+
field instanceof field_instance_1.FieldInstanceField &&
|
|
1314
|
+
field.fieldUsage.type === 'result') {
|
|
1315
|
+
pushDialectField(dialectFieldList, {
|
|
1316
|
+
fieldDef: field.f.fieldDef,
|
|
1317
|
+
sqlExpression: field.generateExpression(),
|
|
1318
|
+
rawName: name,
|
|
1319
|
+
sqlOutputName: sqlName,
|
|
1320
|
+
});
|
|
1321
|
+
}
|
|
1322
|
+
}
|
|
1323
|
+
return dialectFieldList;
|
|
1324
|
+
}
|
|
1325
|
+
generateTurtleSQL(resultStruct, stageWriter, sqlFieldName, outputPipelinedSQL) {
|
|
1326
|
+
// let fieldsSQL: string[] = [];
|
|
1327
|
+
let orderBy = '';
|
|
1328
|
+
// calculate the ordering.
|
|
1329
|
+
const obSQL = [];
|
|
1330
|
+
let orderingField;
|
|
1331
|
+
const orderByDef = resultStruct.firstSegment.orderBy ||
|
|
1332
|
+
resultStruct.calculateDefaultOrderBy();
|
|
1333
|
+
for (const ordering of orderByDef) {
|
|
1334
|
+
if (typeof ordering.field === 'string') {
|
|
1335
|
+
orderingField = {
|
|
1336
|
+
name: ordering.field,
|
|
1337
|
+
fif: resultStruct.getField(ordering.field),
|
|
1338
|
+
};
|
|
1339
|
+
}
|
|
1340
|
+
else {
|
|
1341
|
+
orderingField = resultStruct.getFieldByNumber(ordering.field);
|
|
1342
|
+
}
|
|
1343
|
+
if (resultStruct.firstSegment.type === 'reduce') {
|
|
1344
|
+
obSQL.push(' ' +
|
|
1345
|
+
this.parent.dialect.sqlMaybeQuoteIdentifier(`${orderingField.name}__${resultStruct.groupSet}`) +
|
|
1346
|
+
` ${ordering.dir || 'ASC'}`);
|
|
1347
|
+
}
|
|
1348
|
+
else if (resultStruct.firstSegment.type === 'project') {
|
|
1349
|
+
obSQL.push(` ${orderingField.fif.generateExpression()} ${ordering.dir || 'ASC'}`);
|
|
1350
|
+
}
|
|
1351
|
+
}
|
|
1352
|
+
if (obSQL.length > 0) {
|
|
1353
|
+
orderBy = ' ' + this.parent.dialect.sqlOrderBy(obSQL, 'turtle');
|
|
1354
|
+
}
|
|
1355
|
+
const dialectFieldList = this.buildDialectFieldList(resultStruct);
|
|
1356
|
+
let resultType;
|
|
1357
|
+
let ret;
|
|
1358
|
+
if ((resultType = resultStruct.getRepeatedResultType()) !== 'nested') {
|
|
1359
|
+
if (resultType === 'inline_all_numbers') {
|
|
1360
|
+
ret = this.parent.dialect.sqlCoaleseMeasuresInline(resultStruct.groupSet, dialectFieldList);
|
|
1361
|
+
}
|
|
1362
|
+
else {
|
|
1363
|
+
ret = this.parent.dialect.sqlAnyValueTurtle(resultStruct.groupSet, dialectFieldList);
|
|
1364
|
+
}
|
|
1365
|
+
}
|
|
1366
|
+
else {
|
|
1367
|
+
ret = this.parent.dialect.sqlAggregateTurtle(resultStruct.groupSet, dialectFieldList, orderBy);
|
|
1368
|
+
}
|
|
1369
|
+
// If the turtle is a pipeline, generate a UDF to compute it.
|
|
1370
|
+
const newStageWriter = new stage_writer_1.StageWriter(this.parent.dialect.supportsCTEinCoorelatedSubQueries, stageWriter);
|
|
1371
|
+
const { structDef, pipeOut } = this.generateTurtlePipelineSQL(resultStruct, newStageWriter, this.parent.dialect.supportUnnestArrayAgg ? ret : sqlFieldName);
|
|
1372
|
+
// if there was a pipeline.
|
|
1373
|
+
if (pipeOut !== undefined) {
|
|
1374
|
+
const sql = newStageWriter.generateCoorelatedSubQuery(this.parent.dialect, structDef);
|
|
1375
|
+
if (this.parent.dialect.supportUnnestArrayAgg) {
|
|
1376
|
+
ret = `(${sql})`;
|
|
1377
|
+
}
|
|
1378
|
+
else {
|
|
1379
|
+
outputPipelinedSQL.push({
|
|
1380
|
+
sqlFieldName,
|
|
1381
|
+
pipelineSQL: `(${sql})`,
|
|
1382
|
+
});
|
|
1383
|
+
}
|
|
1384
|
+
}
|
|
1385
|
+
return ret;
|
|
1386
|
+
// return `${aggregateFunction}(CASE WHEN group_set=${
|
|
1387
|
+
// resultStruct.groupSet
|
|
1388
|
+
// } THEN STRUCT(${fieldsSQL.join(",\n")}) END${tailSQL})`;
|
|
1389
|
+
}
|
|
1390
|
+
generateTurtlePipelineSQL(fi, stageWriter, sourceSQLExpression) {
|
|
1391
|
+
let structDef = this.getResultStructDef(fi, false);
|
|
1392
|
+
const repeatedResultType = fi.getRepeatedResultType();
|
|
1393
|
+
const hasPipeline = fi.turtleDef.pipeline.length > 1;
|
|
1394
|
+
let pipeOut;
|
|
1395
|
+
let outputRepeatedResultType = repeatedResultType;
|
|
1396
|
+
if (hasPipeline) {
|
|
1397
|
+
const pipeline = [...fi.turtleDef.pipeline];
|
|
1398
|
+
pipeline.shift();
|
|
1399
|
+
const newTurtle = {
|
|
1400
|
+
type: 'turtle',
|
|
1401
|
+
name: 'starthere',
|
|
1402
|
+
pipeline,
|
|
1403
|
+
};
|
|
1404
|
+
const inputStruct = {
|
|
1405
|
+
type: 'nest_source',
|
|
1406
|
+
name: '~pipe~',
|
|
1407
|
+
pipeSQL: this.parent.dialect.sqlUnnestPipelineHead(repeatedResultType === 'inline_all_numbers', sourceSQLExpression, (0, utils_1.getDialectFieldList)(structDef)),
|
|
1408
|
+
fields: structDef.fields,
|
|
1409
|
+
connection: structDef.connection,
|
|
1410
|
+
dialect: structDef.dialect,
|
|
1411
|
+
};
|
|
1412
|
+
const qs = new query_node_1.QueryStruct(inputStruct, undefined, { model: this.parent.getModel() }, this.parent.prepareResultOptions);
|
|
1413
|
+
const q = QueryQuery.makeQuery(newTurtle, qs, stageWriter, this.isJoinedSubquery, this.structRefToQueryStruct);
|
|
1414
|
+
pipeOut = q.generateSQLFromPipeline(stageWriter);
|
|
1415
|
+
outputRepeatedResultType = q.rootResult.getRepeatedResultType();
|
|
1416
|
+
// console.log(stageWriter.generateSQLStages());
|
|
1417
|
+
structDef = pipeOut.outputStruct;
|
|
1418
|
+
}
|
|
1419
|
+
structDef.annotation = fi.turtleDef.annotation;
|
|
1420
|
+
return {
|
|
1421
|
+
structDef,
|
|
1422
|
+
pipeOut,
|
|
1423
|
+
repeatedResultType: outputRepeatedResultType,
|
|
1424
|
+
};
|
|
1425
|
+
}
|
|
1426
|
+
generateComplexSQL(stageWriter) {
|
|
1427
|
+
let stageName = this.generateSQLStage0(stageWriter);
|
|
1428
|
+
if (this.maxDepth > 1) {
|
|
1429
|
+
let i = this.maxDepth;
|
|
1430
|
+
while (i > 1) {
|
|
1431
|
+
stageName = this.generateSQLDepthN(i, stageWriter, stageName);
|
|
1432
|
+
i--;
|
|
1433
|
+
}
|
|
1434
|
+
}
|
|
1435
|
+
// nest the turtles.
|
|
1436
|
+
return this.genereateSQLCombineTurtles(stageWriter, stageName);
|
|
1437
|
+
}
|
|
1438
|
+
generateSQL(stageWriter) {
|
|
1439
|
+
var _a;
|
|
1440
|
+
const r = this.rootResult.computeGroups(0, 0);
|
|
1441
|
+
this.maxDepth = r.maxDepth;
|
|
1442
|
+
this.maxGroupSet = r.nextGroupSetNumber - 1;
|
|
1443
|
+
this.rootResult.assignFieldsToGroups();
|
|
1444
|
+
(_a = this.rootResult).isComplexQuery || (_a.isComplexQuery = this.maxDepth > 0 || r.isComplex);
|
|
1445
|
+
if (this.rootResult.isComplexQuery) {
|
|
1446
|
+
return this.generateComplexSQL(stageWriter);
|
|
1447
|
+
}
|
|
1448
|
+
else {
|
|
1449
|
+
return this.generateSimpleSQL(stageWriter);
|
|
1450
|
+
}
|
|
1451
|
+
}
|
|
1452
|
+
generateSQLFromPipeline(stageWriter) {
|
|
1453
|
+
this.parent.maybeEmitParameterizedSourceUsage();
|
|
1454
|
+
this.prepare(stageWriter);
|
|
1455
|
+
let lastStageName = this.generateSQL(stageWriter);
|
|
1456
|
+
let outputStruct = this.getResultStructDef();
|
|
1457
|
+
const pipeline = [...this.fieldDef.pipeline];
|
|
1458
|
+
if (pipeline.length > 1) {
|
|
1459
|
+
// console.log(pretty(outputStruct));
|
|
1460
|
+
let structDef = {
|
|
1461
|
+
...outputStruct,
|
|
1462
|
+
name: lastStageName,
|
|
1463
|
+
type: 'finalize',
|
|
1464
|
+
};
|
|
1465
|
+
pipeline.shift();
|
|
1466
|
+
for (const transform of pipeline) {
|
|
1467
|
+
const parent = this.parent.parent
|
|
1468
|
+
? { struct: this.parent.parent }
|
|
1469
|
+
: { model: this.parent.getModel() };
|
|
1470
|
+
const s = new query_node_1.QueryStruct(structDef, undefined, parent, this.parent.prepareResultOptions);
|
|
1471
|
+
const q = QueryQuery.makeQuery({ type: 'turtle', name: '~computeLastStage~', pipeline: [transform] }, s, stageWriter, this.isJoinedSubquery, this.structRefToQueryStruct);
|
|
1472
|
+
q.prepare(stageWriter);
|
|
1473
|
+
lastStageName = q.generateSQL(stageWriter);
|
|
1474
|
+
outputStruct = q.getResultStructDef();
|
|
1475
|
+
structDef = {
|
|
1476
|
+
...outputStruct,
|
|
1477
|
+
name: lastStageName,
|
|
1478
|
+
type: 'finalize',
|
|
1479
|
+
};
|
|
1480
|
+
}
|
|
1481
|
+
}
|
|
1482
|
+
return { lastStageName, outputStruct };
|
|
1483
|
+
}
|
|
1484
|
+
}
|
|
1485
|
+
exports.QueryQuery = QueryQuery;
|
|
1486
|
+
// wildcards have been expanded
|
|
1487
|
+
// nested repeated fields are safe to use.
|
|
1488
|
+
class QueryQueryIndexStage extends QueryQuery {
|
|
1489
|
+
constructor(fieldDef, parent, stageWriter, isJoinedSubquery, zz) {
|
|
1490
|
+
super(fieldDef, parent, stageWriter, isJoinedSubquery, zz);
|
|
1491
|
+
this.indexPaths = {};
|
|
1492
|
+
this.fieldDef = fieldDef;
|
|
1493
|
+
}
|
|
1494
|
+
expandField(f) {
|
|
1495
|
+
const as = f.path.join('.');
|
|
1496
|
+
const field = this.parent.getQueryFieldByName(f.path);
|
|
1497
|
+
return { as, field };
|
|
1498
|
+
}
|
|
1499
|
+
expandFields(resultStruct) {
|
|
1500
|
+
let resultIndex = 1;
|
|
1501
|
+
const groupIndex = resultStruct.groupSet;
|
|
1502
|
+
this.maxGroupSet = groupIndex;
|
|
1503
|
+
for (const f of this.firstSegment.indexFields) {
|
|
1504
|
+
const { as, field } = this.expandField(f);
|
|
1505
|
+
const referencePath = f.path;
|
|
1506
|
+
this.indexPaths[as] = referencePath;
|
|
1507
|
+
resultStruct.addField(as, field, {
|
|
1508
|
+
resultIndex,
|
|
1509
|
+
type: 'result',
|
|
1510
|
+
}, undefined);
|
|
1511
|
+
resultIndex++;
|
|
1512
|
+
}
|
|
1513
|
+
const measure = this.firstSegment.weightMeasure;
|
|
1514
|
+
if (measure !== undefined) {
|
|
1515
|
+
const f = this.parent.getFieldByName([measure]);
|
|
1516
|
+
resultStruct.addField(measure, f, {
|
|
1517
|
+
resultIndex,
|
|
1518
|
+
type: 'result',
|
|
1519
|
+
}, undefined);
|
|
1520
|
+
}
|
|
1521
|
+
}
|
|
1522
|
+
generateSQL(stageWriter) {
|
|
1523
|
+
let measureSQL = 'COUNT(*)';
|
|
1524
|
+
const dialect = this.parent.dialect;
|
|
1525
|
+
const fieldNameColumn = dialect.sqlMaybeQuoteIdentifier('fieldName');
|
|
1526
|
+
const fieldPathColumn = dialect.sqlMaybeQuoteIdentifier('fieldPath');
|
|
1527
|
+
const fieldValueColumn = dialect.sqlMaybeQuoteIdentifier('fieldValue');
|
|
1528
|
+
const fieldTypeColumn = dialect.sqlMaybeQuoteIdentifier('fieldType');
|
|
1529
|
+
const fieldRangeColumn = dialect.sqlMaybeQuoteIdentifier('fieldRange');
|
|
1530
|
+
const weightColumn = dialect.sqlMaybeQuoteIdentifier('weight');
|
|
1531
|
+
const measureName = this.firstSegment.weightMeasure;
|
|
1532
|
+
if (measureName) {
|
|
1533
|
+
measureSQL = this.rootResult.getField(measureName).generateExpression();
|
|
1534
|
+
}
|
|
1535
|
+
const fields = [];
|
|
1536
|
+
for (const [name, field] of this.rootResult.allFields) {
|
|
1537
|
+
const fi = field;
|
|
1538
|
+
if (fi.fieldUsage.type === 'result' && (0, query_node_1.isScalarField)(fi.f)) {
|
|
1539
|
+
const expression = fi.generateExpression();
|
|
1540
|
+
const path = this.indexPaths[name] || [];
|
|
1541
|
+
fields.push({ name, path, type: fi.f.fieldDef.type, expression });
|
|
1542
|
+
}
|
|
1543
|
+
}
|
|
1544
|
+
let s = 'SELECT\n group_set,\n';
|
|
1545
|
+
s += ' CASE group_set\n';
|
|
1546
|
+
for (let i = 0; i < fields.length; i++) {
|
|
1547
|
+
s += ` WHEN ${i} THEN '${fields[i].name}'\n`;
|
|
1548
|
+
}
|
|
1549
|
+
s += ` END as ${fieldNameColumn},\n`;
|
|
1550
|
+
s += ' CASE group_set\n';
|
|
1551
|
+
for (let i = 0; i < fields.length; i++) {
|
|
1552
|
+
const path = pathToCol(fields[i].path);
|
|
1553
|
+
s += ` WHEN ${i} THEN '${path}'\n`;
|
|
1554
|
+
}
|
|
1555
|
+
s += ` END as ${fieldPathColumn},\n`;
|
|
1556
|
+
s += ' CASE group_set\n';
|
|
1557
|
+
for (let i = 0; i < fields.length; i++) {
|
|
1558
|
+
s += ` WHEN ${i} THEN '${fields[i].type}'\n`;
|
|
1559
|
+
}
|
|
1560
|
+
s += ` END as ${fieldTypeColumn},`;
|
|
1561
|
+
s += ` CASE group_set WHEN 99999 THEN ${dialect.castToString('NULL')}\n`;
|
|
1562
|
+
for (let i = 0; i < fields.length; i++) {
|
|
1563
|
+
if (fields[i].type === 'string') {
|
|
1564
|
+
s += ` WHEN ${i} THEN ${fields[i].expression}\n`;
|
|
1565
|
+
}
|
|
1566
|
+
}
|
|
1567
|
+
s += ` END as ${fieldValueColumn},\n`;
|
|
1568
|
+
s += ` ${measureSQL} as ${weightColumn},\n`;
|
|
1569
|
+
// just in case we don't have any field types, force the case statement to have at least one value.
|
|
1570
|
+
s += " CASE group_set\n WHEN 99999 THEN ''";
|
|
1571
|
+
for (let i = 0; i < fields.length; i++) {
|
|
1572
|
+
if (fields[i].type === 'number') {
|
|
1573
|
+
s += ` WHEN ${i} THEN ${dialect.concat(`MIN(${dialect.castToString(fields[i].expression)})`, "' to '", dialect.castToString(`MAX(${fields[i].expression})`))}\n`;
|
|
1574
|
+
}
|
|
1575
|
+
if (fields[i].type === 'timestamp' || fields[i].type === 'date') {
|
|
1576
|
+
s += ` WHEN ${i} THEN ${dialect.concat(`MIN(${dialect.sqlDateToString(fields[i].expression)})`, "' to '", `MAX(${dialect.sqlDateToString(fields[i].expression)})`)}\n`;
|
|
1577
|
+
}
|
|
1578
|
+
}
|
|
1579
|
+
s += ` END as ${fieldRangeColumn}\n`;
|
|
1580
|
+
// CASE
|
|
1581
|
+
// WHEN field_type = 'timestamp' or field_type = 'date'
|
|
1582
|
+
// THEN MIN(field_value) || ' to ' || MAX(field_value)
|
|
1583
|
+
// WHEN field_type = 'number'
|
|
1584
|
+
// THEN
|
|
1585
|
+
// ELSE NULL
|
|
1586
|
+
// END as field_range\n`;
|
|
1587
|
+
s += this.generateSQLJoins(stageWriter);
|
|
1588
|
+
s += dialect.sqlGroupSetTable(fields.length) + '\n';
|
|
1589
|
+
s += this.generateSQLFilters(this.rootResult, 'where').sql('where');
|
|
1590
|
+
s += 'GROUP BY 1,2,3,4,5\n';
|
|
1591
|
+
// limit
|
|
1592
|
+
if (!(0, malloy_types_1.isRawSegment)(this.firstSegment) && this.firstSegment.limit) {
|
|
1593
|
+
s += `LIMIT ${this.firstSegment.limit}\n`;
|
|
1594
|
+
}
|
|
1595
|
+
// console.log(s);
|
|
1596
|
+
const resultStage = stageWriter.addStage(s);
|
|
1597
|
+
this.resultStage = stageWriter.addStage(`SELECT
|
|
1598
|
+
${fieldNameColumn},
|
|
1599
|
+
${fieldPathColumn},
|
|
1600
|
+
${fieldTypeColumn},
|
|
1601
|
+
COALESCE(${fieldValueColumn}, ${fieldRangeColumn}) as ${fieldValueColumn},
|
|
1602
|
+
${weightColumn}
|
|
1603
|
+
FROM ${resultStage}\n`);
|
|
1604
|
+
return this.resultStage;
|
|
1605
|
+
}
|
|
1606
|
+
}
|
|
1607
|
+
class QueryQueryIndex extends QueryQuery {
|
|
1608
|
+
constructor(fieldDef, parent, stageWriter, isJoinedSubquery, lookupStruct) {
|
|
1609
|
+
super(fieldDef, parent, stageWriter, isJoinedSubquery, lookupStruct);
|
|
1610
|
+
this.stages = [];
|
|
1611
|
+
this.fieldDef = fieldDef;
|
|
1612
|
+
this.fieldsToStages();
|
|
1613
|
+
}
|
|
1614
|
+
fieldsToStages() {
|
|
1615
|
+
const indexSeg = this.firstSegment;
|
|
1616
|
+
if (this.parent.dialect.dontUnionIndex) {
|
|
1617
|
+
this.stages = [indexSeg.indexFields];
|
|
1618
|
+
return;
|
|
1619
|
+
}
|
|
1620
|
+
// Collect the field references by unique path, the final
|
|
1621
|
+
// index will be a union indexes from each unique path
|
|
1622
|
+
const stageMap = {};
|
|
1623
|
+
for (const fref of indexSeg.indexFields) {
|
|
1624
|
+
if (fref.path.length > 1) {
|
|
1625
|
+
const stageRoot = pathToCol(fref.path.slice(0, fref.path.length - 1));
|
|
1626
|
+
const stage = stageMap[stageRoot];
|
|
1627
|
+
if (stage === undefined) {
|
|
1628
|
+
const f = this.parent.nameMap.get(fref.path[0]);
|
|
1629
|
+
if (f instanceof query_node_1.QueryFieldStruct &&
|
|
1630
|
+
f.fieldDef.join === 'many' &&
|
|
1631
|
+
f.fieldDef.fields.length > 1) {
|
|
1632
|
+
const toStage = [fref];
|
|
1633
|
+
stageMap[stageRoot] = toStage;
|
|
1634
|
+
this.stages.push(toStage);
|
|
1635
|
+
continue;
|
|
1636
|
+
}
|
|
1637
|
+
}
|
|
1638
|
+
else {
|
|
1639
|
+
stage.push(fref);
|
|
1640
|
+
continue;
|
|
1641
|
+
}
|
|
1642
|
+
}
|
|
1643
|
+
if (this.stages[0] === undefined) {
|
|
1644
|
+
this.stages[0] = [];
|
|
1645
|
+
}
|
|
1646
|
+
this.stages[0].push(fref);
|
|
1647
|
+
}
|
|
1648
|
+
}
|
|
1649
|
+
expandFields(_resultStruct) { }
|
|
1650
|
+
generateSQL(stageWriter) {
|
|
1651
|
+
const indexSeg = this.firstSegment;
|
|
1652
|
+
const outputStageNames = [];
|
|
1653
|
+
for (const fields of this.stages) {
|
|
1654
|
+
const q = new QueryQueryIndexStage({
|
|
1655
|
+
...this.fieldDef,
|
|
1656
|
+
pipeline: [
|
|
1657
|
+
{
|
|
1658
|
+
...indexSeg,
|
|
1659
|
+
indexFields: fields,
|
|
1660
|
+
},
|
|
1661
|
+
],
|
|
1662
|
+
}, this.parent, stageWriter, this.isJoinedSubquery, this.structRefToQueryStruct);
|
|
1663
|
+
q.prepare(stageWriter);
|
|
1664
|
+
const lastStageName = q.generateSQL(stageWriter);
|
|
1665
|
+
outputStageNames.push(lastStageName);
|
|
1666
|
+
}
|
|
1667
|
+
if (outputStageNames.length === 1) {
|
|
1668
|
+
this.resultStage = outputStageNames[0];
|
|
1669
|
+
}
|
|
1670
|
+
else {
|
|
1671
|
+
this.resultStage = stageWriter.addStage(outputStageNames.map(n => `SELECT * FROM ${n}\n`).join(' UNION ALL \n'));
|
|
1672
|
+
}
|
|
1673
|
+
return this.resultStage;
|
|
1674
|
+
}
|
|
1675
|
+
/**
|
|
1676
|
+
* All Indexes have the same output schema.
|
|
1677
|
+
* fieldName is deprecated, dots in fieldName may or may not be join nodes
|
|
1678
|
+
* fieldPath is a URL encoded slash separated path
|
|
1679
|
+
*/
|
|
1680
|
+
getResultStructDef() {
|
|
1681
|
+
const ret = {
|
|
1682
|
+
type: 'query_result',
|
|
1683
|
+
name: this.resultStage || 'result',
|
|
1684
|
+
dialect: this.parent.dialect.name,
|
|
1685
|
+
fields: [
|
|
1686
|
+
{ type: 'string', name: 'fieldName' },
|
|
1687
|
+
{ type: 'string', name: 'fieldPath' },
|
|
1688
|
+
{ type: 'string', name: 'fieldValue' },
|
|
1689
|
+
{ type: 'string', name: 'fieldType' },
|
|
1690
|
+
{ type: 'number', name: 'weight', numberType: 'integer' },
|
|
1691
|
+
],
|
|
1692
|
+
connection: this.parent.connectionName,
|
|
1693
|
+
};
|
|
1694
|
+
if (this.parent.structDef.modelAnnotation) {
|
|
1695
|
+
ret.modelAnnotation = this.parent.structDef.modelAnnotation;
|
|
1696
|
+
}
|
|
1697
|
+
return ret;
|
|
1698
|
+
}
|
|
1699
|
+
}
|
|
1700
|
+
class QueryQueryProject extends QueryQuery {
|
|
1701
|
+
}
|
|
1702
|
+
class QueryQueryRaw extends QueryQuery {
|
|
1703
|
+
generateSQL(stageWriter) {
|
|
1704
|
+
if (this.parent.structDef.type !== 'sql_select') {
|
|
1705
|
+
throw new Error('Invalid struct for QueryQueryRaw, currently only supports SQL');
|
|
1706
|
+
}
|
|
1707
|
+
return stageWriter.addStage(this.parent.structDef.selectStr);
|
|
1708
|
+
}
|
|
1709
|
+
prepare() {
|
|
1710
|
+
// Do nothing!
|
|
1711
|
+
}
|
|
1712
|
+
getResultStructDef() {
|
|
1713
|
+
if (!(0, malloy_types_1.isSourceDef)(this.parent.structDef)) {
|
|
1714
|
+
throw new Error(`Result cannot be type ${this.parent.structDef.type}`);
|
|
1715
|
+
}
|
|
1716
|
+
return { ...this.parent.structDef, type: 'query_result' };
|
|
1717
|
+
}
|
|
1718
|
+
getResultMetadata(_fi) {
|
|
1719
|
+
return undefined;
|
|
1720
|
+
}
|
|
1721
|
+
}
|
|
1722
|
+
class QueryQueryReduce extends QueryQuery {
|
|
1723
|
+
}
|
|
1724
|
+
//# sourceMappingURL=query_query.js.map
|