@malloydata/malloy 0.0.304 → 0.0.305
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/dialect/dialect.d.ts +1 -1
- package/dist/dialect/duckdb/duckdb.d.ts +1 -1
- package/dist/dialect/duckdb/duckdb.js +2 -6
- package/dist/dialect/mysql/mysql.d.ts +1 -1
- package/dist/dialect/mysql/mysql.js +2 -6
- package/dist/dialect/postgres/postgres.d.ts +1 -1
- package/dist/dialect/postgres/postgres.js +2 -6
- package/dist/dialect/snowflake/snowflake.d.ts +1 -1
- package/dist/dialect/snowflake/snowflake.js +2 -5
- package/dist/dialect/standardsql/standardsql.d.ts +1 -1
- package/dist/dialect/standardsql/standardsql.js +2 -6
- package/dist/dialect/trino/trino.d.ts +1 -1
- package/dist/dialect/trino/trino.js +2 -6
- package/dist/index.d.ts +1 -1
- package/dist/index.js +2 -3
- package/dist/lang/ast/expressions/expr-aggregate-function.js +12 -2
- package/dist/lang/ast/expressions/expr-count.js +3 -1
- package/dist/lang/ast/expressions/expr-func.js +34 -10
- package/dist/lang/ast/expressions/expr-props.js +1 -1
- package/dist/lang/ast/expressions/expr-ungroup.js +7 -3
- package/dist/lang/ast/expressions/function-ordering.d.ts +19 -5
- package/dist/lang/ast/expressions/function-ordering.js +61 -9
- package/dist/lang/ast/field-space/include-utils.js +1 -1
- package/dist/lang/ast/field-space/index-field-space.js +3 -1
- package/dist/lang/ast/field-space/query-spaces.js +20 -11
- package/dist/lang/ast/query-builders/index-builder.js +1 -1
- package/dist/lang/ast/query-builders/reduce-builder.js +1 -1
- package/dist/lang/ast/query-elements/query-arrow.js +14 -4
- package/dist/lang/ast/query-elements/query-base.d.ts +1 -0
- package/dist/lang/ast/query-elements/query-base.js +14 -4
- package/dist/lang/ast/query-elements/query-refine.js +2 -0
- package/dist/lang/ast/query-properties/drill.js +1 -1
- package/dist/lang/ast/source-properties/join.js +6 -2
- package/dist/lang/ast/statements/define-source.js +1 -1
- package/dist/lang/ast/types/expr-value.js +1 -1
- package/dist/lang/ast/view-elements/reference-view.js +4 -1
- package/dist/lang/ast/view-elements/refine-utils.js +1 -1
- package/dist/{model/composite_source_utils.d.ts → lang/composite-source-utils.d.ts} +4 -17
- package/dist/{model/composite_source_utils.js → lang/composite-source-utils.js} +274 -44
- package/dist/lang/test/parse-expects.d.ts +1 -1
- package/dist/lang/test/parse-expects.js +6 -2
- package/dist/lang/test/test-translator.js +1 -1
- package/dist/malloy.js +1 -1
- package/dist/model/expression_compiler.d.ts +27 -0
- package/dist/model/expression_compiler.js +780 -0
- package/dist/model/field_instance.d.ts +108 -0
- package/dist/model/field_instance.js +520 -0
- package/dist/model/index.d.ts +5 -1
- package/dist/model/index.js +25 -4
- package/dist/model/join_instance.d.ts +18 -0
- package/dist/model/join_instance.js +71 -0
- package/dist/model/malloy_types.d.ts +48 -2
- package/dist/model/malloy_types.js +39 -1
- package/dist/model/query_model.d.ts +2 -0
- package/dist/model/query_model.js +7 -0
- package/dist/model/query_model_contract.d.ts +32 -0
- package/dist/model/query_model_contract.js +7 -0
- package/dist/model/query_model_impl.d.ts +30 -0
- package/dist/model/query_model_impl.js +266 -0
- package/dist/model/query_node.d.ts +132 -0
- package/dist/model/query_node.js +638 -0
- package/dist/model/query_query.d.ts +86 -0
- package/dist/model/query_query.js +1724 -0
- package/dist/model/sql_block.js +2 -2
- package/dist/model/stage_writer.d.ts +25 -0
- package/dist/model/stage_writer.js +120 -0
- package/dist/model/utils.d.ts +18 -1
- package/dist/model/utils.js +66 -1
- package/dist/to_stable.js +3 -4
- package/dist/version.d.ts +1 -1
- package/dist/version.js +1 -1
- package/package.json +4 -4
- package/dist/model/malloy_query.d.ts +0 -391
- package/dist/model/malloy_query.js +0 -3926
|
@@ -0,0 +1,780 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
/*
|
|
3
|
+
* Copyright Contributors to the Malloy project
|
|
4
|
+
* SPDX-License-Identifier: MIT
|
|
5
|
+
*/
|
|
6
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
7
|
+
exports.exprToSQL = exprToSQL;
|
|
8
|
+
exports.generateFunctionCallExpression = generateFunctionCallExpression;
|
|
9
|
+
exports.generateFieldFragment = generateFieldFragment;
|
|
10
|
+
exports.generateOutputFieldFragment = generateOutputFieldFragment;
|
|
11
|
+
exports.generateParameterFragment = generateParameterFragment;
|
|
12
|
+
exports.generateFilterFragment = generateFilterFragment;
|
|
13
|
+
exports.generateDimFragment = generateDimFragment;
|
|
14
|
+
exports.generateUngroupedFragment = generateUngroupedFragment;
|
|
15
|
+
exports.generateDistinctKeyIfNecessary = generateDistinctKeyIfNecessary;
|
|
16
|
+
exports.generateSumFragment = generateSumFragment;
|
|
17
|
+
exports.generateSymmetricFragment = generateSymmetricFragment;
|
|
18
|
+
exports.generateAvgFragment = generateAvgFragment;
|
|
19
|
+
exports.generateCountFragment = generateCountFragment;
|
|
20
|
+
exports.generateSpread = generateSpread;
|
|
21
|
+
exports.generateSourceReference = generateSourceReference;
|
|
22
|
+
exports.generateCaseSQL = generateCaseSQL;
|
|
23
|
+
exports.getFunctionOrderBy = getFunctionOrderBy;
|
|
24
|
+
exports.getAnalyticPartitions = getAnalyticPartitions;
|
|
25
|
+
exports.stringsFromSQLExpression = stringsFromSQLExpression;
|
|
26
|
+
const malloy_types_1 = require("./malloy_types");
|
|
27
|
+
const malloy_filter_1 = require("@malloydata/malloy-filter");
|
|
28
|
+
const field_instance_1 = require("./field_instance");
|
|
29
|
+
const filter_compilers_1 = require("./filter_compilers");
|
|
30
|
+
const utils_1 = require("./utils");
|
|
31
|
+
const query_node_1 = require("./query_node");
|
|
32
|
+
const NUMERIC_DECIMAL_PRECISION = 9;
|
|
33
|
+
function sqlSumDistinct(dialect, sqlExp, sqlDistintKey) {
|
|
34
|
+
const precision = 9;
|
|
35
|
+
const uniqueInt = dialect.sqlSumDistinctHashedKey(sqlDistintKey);
|
|
36
|
+
const multiplier = 10 ** (precision - NUMERIC_DECIMAL_PRECISION);
|
|
37
|
+
const sumSQL = `
|
|
38
|
+
(
|
|
39
|
+
SUM(DISTINCT
|
|
40
|
+
(CAST(ROUND(COALESCE(${sqlExp},0)*(${multiplier}*1.0), ${NUMERIC_DECIMAL_PRECISION}) AS ${dialect.defaultDecimalType}) +
|
|
41
|
+
${uniqueInt}
|
|
42
|
+
))
|
|
43
|
+
-
|
|
44
|
+
SUM(DISTINCT ${uniqueInt})
|
|
45
|
+
)`;
|
|
46
|
+
let ret = `(${sumSQL}/(${multiplier}*1.0))`;
|
|
47
|
+
ret = `CAST(${ret} AS ${dialect.defaultNumberType})`;
|
|
48
|
+
return ret;
|
|
49
|
+
}
|
|
50
|
+
/**
|
|
51
|
+
* Converts an expression to SQL.
|
|
52
|
+
* This function was extracted from QueryField.exprToSQL to break circular dependencies.
|
|
53
|
+
*/
|
|
54
|
+
function exprToSQL(field, resultSet, context, exprToTranslate, state = new utils_1.GenerateState()) {
|
|
55
|
+
var _a;
|
|
56
|
+
// Wrap non leaf sub expressions in parenthesis
|
|
57
|
+
const subExpr = function (qf, e) {
|
|
58
|
+
const sql = exprToSQL(qf, resultSet, context, e, state);
|
|
59
|
+
if ((0, malloy_types_1.exprHasKids)(e)) {
|
|
60
|
+
return `(${sql})`;
|
|
61
|
+
}
|
|
62
|
+
return sql;
|
|
63
|
+
};
|
|
64
|
+
/*
|
|
65
|
+
* Translate the children first, and stash the translation
|
|
66
|
+
* in the nodes themselves, so that if we call into the dialect
|
|
67
|
+
* it will have access to the translated children.
|
|
68
|
+
*/
|
|
69
|
+
let expr = exprToTranslate;
|
|
70
|
+
if ((0, malloy_types_1.exprHasE)(exprToTranslate)) {
|
|
71
|
+
expr = { ...exprToTranslate };
|
|
72
|
+
const eSql = subExpr(field, expr.e);
|
|
73
|
+
expr.e = { ...expr.e, sql: eSql };
|
|
74
|
+
}
|
|
75
|
+
else if ((0, malloy_types_1.exprHasKids)(exprToTranslate)) {
|
|
76
|
+
expr = { ...exprToTranslate };
|
|
77
|
+
const oldKids = exprToTranslate.kids;
|
|
78
|
+
for (const [name, kidExpr] of Object.entries(oldKids)) {
|
|
79
|
+
if (kidExpr === null)
|
|
80
|
+
continue;
|
|
81
|
+
if (Array.isArray(kidExpr)) {
|
|
82
|
+
expr.kids[name] = kidExpr.map(e => {
|
|
83
|
+
return { ...e, sql: subExpr(field, e) };
|
|
84
|
+
});
|
|
85
|
+
}
|
|
86
|
+
else {
|
|
87
|
+
expr.kids[name] = { ...oldKids[name], sql: subExpr(field, kidExpr) };
|
|
88
|
+
}
|
|
89
|
+
}
|
|
90
|
+
}
|
|
91
|
+
/*
|
|
92
|
+
* Give the dialect a chance to translate this node
|
|
93
|
+
*/
|
|
94
|
+
const qi = resultSet.getQueryInfo();
|
|
95
|
+
const dialectSQL = field.parent.dialect.exprToSQL(qi, expr);
|
|
96
|
+
if (dialectSQL) {
|
|
97
|
+
return dialectSQL;
|
|
98
|
+
}
|
|
99
|
+
switch (expr.node) {
|
|
100
|
+
case 'field':
|
|
101
|
+
return generateFieldFragment(field, resultSet, context, expr, state);
|
|
102
|
+
case 'parameter':
|
|
103
|
+
return generateParameterFragment(field, resultSet, context, expr, state);
|
|
104
|
+
case 'filteredExpr':
|
|
105
|
+
return generateFilterFragment(field, resultSet, context, expr, state);
|
|
106
|
+
case 'all':
|
|
107
|
+
case 'exclude':
|
|
108
|
+
return generateUngroupedFragment(field, resultSet, context, expr, state);
|
|
109
|
+
case 'genericSQLExpr':
|
|
110
|
+
return Array.from(stringsFromSQLExpression(field, resultSet, context, expr, state)).join('');
|
|
111
|
+
case 'aggregate': {
|
|
112
|
+
let agg = '';
|
|
113
|
+
if (expr.function === 'sum') {
|
|
114
|
+
agg = generateSumFragment(field, resultSet, context, expr, state);
|
|
115
|
+
}
|
|
116
|
+
else if (expr.function === 'avg') {
|
|
117
|
+
agg = generateAvgFragment(field, resultSet, context, expr, state);
|
|
118
|
+
}
|
|
119
|
+
else if (expr.function === 'count') {
|
|
120
|
+
agg = generateCountFragment(field, resultSet, context, expr, state);
|
|
121
|
+
}
|
|
122
|
+
else if (expr.function === 'min' ||
|
|
123
|
+
expr.function === 'max' ||
|
|
124
|
+
expr.function === 'distinct') {
|
|
125
|
+
agg = generateSymmetricFragment(field, resultSet, context, expr, state);
|
|
126
|
+
}
|
|
127
|
+
else {
|
|
128
|
+
throw new Error(`Internal Error: Unknown aggregate function ${expr.function}`);
|
|
129
|
+
}
|
|
130
|
+
if (resultSet.root().isComplexQuery) {
|
|
131
|
+
let groupSet = resultSet.groupSet;
|
|
132
|
+
if (state.totalGroupSet !== -1) {
|
|
133
|
+
groupSet = state.totalGroupSet;
|
|
134
|
+
}
|
|
135
|
+
return (0, utils_1.caseGroup)([groupSet], agg);
|
|
136
|
+
}
|
|
137
|
+
return agg;
|
|
138
|
+
}
|
|
139
|
+
case 'function_parameter':
|
|
140
|
+
throw new Error('Internal Error: Function parameter fragment remaining during SQL generation');
|
|
141
|
+
case 'outputField':
|
|
142
|
+
return generateOutputFieldFragment(field, resultSet, context, expr, state);
|
|
143
|
+
case 'function_call':
|
|
144
|
+
return generateFunctionCallExpression(field, resultSet, context, expr, state);
|
|
145
|
+
case 'spread':
|
|
146
|
+
return generateSpread(field, resultSet, context, expr, state);
|
|
147
|
+
case 'source-reference':
|
|
148
|
+
return generateSourceReference(field, resultSet, context, expr);
|
|
149
|
+
case '+':
|
|
150
|
+
case '-':
|
|
151
|
+
case '*':
|
|
152
|
+
case '%':
|
|
153
|
+
case '/':
|
|
154
|
+
case '>':
|
|
155
|
+
case '<':
|
|
156
|
+
case '>=':
|
|
157
|
+
case '<=':
|
|
158
|
+
case '=':
|
|
159
|
+
return `${expr.kids.left.sql}${expr.node}${expr.kids.right.sql}`;
|
|
160
|
+
// Malloy inequality comparisons always return a boolean
|
|
161
|
+
case '!=': {
|
|
162
|
+
const notEqual = `${expr.kids.left.sql}!=${expr.kids.right.sql}`;
|
|
163
|
+
return `COALESCE(${notEqual},true)`;
|
|
164
|
+
}
|
|
165
|
+
case 'and':
|
|
166
|
+
case 'or':
|
|
167
|
+
return `${expr.kids.left.sql} ${expr.node} ${expr.kids.right.sql}`;
|
|
168
|
+
case 'coalesce':
|
|
169
|
+
return `COALESCE(${expr.kids.left.sql},${expr.kids.right.sql})`;
|
|
170
|
+
case 'in': {
|
|
171
|
+
const oneOf = expr.kids.oneOf.map(o => o.sql).join(',');
|
|
172
|
+
return `${expr.kids.e.sql} ${expr.not ? 'NOT IN' : 'IN'} (${oneOf})`;
|
|
173
|
+
}
|
|
174
|
+
case 'like':
|
|
175
|
+
case '!like': {
|
|
176
|
+
const likeIt = expr.node === 'like' ? 'LIKE' : 'NOT LIKE';
|
|
177
|
+
const compare = expr.kids.right.node === 'stringLiteral'
|
|
178
|
+
? field.parent.dialect.sqlLike(likeIt, (_a = expr.kids.left.sql) !== null && _a !== void 0 ? _a : '', expr.kids.right.literal)
|
|
179
|
+
: `${expr.kids.left.sql} ${likeIt} ${expr.kids.right.sql}`;
|
|
180
|
+
return expr.node === 'like' ? compare : `COALESCE(${compare},true)`;
|
|
181
|
+
}
|
|
182
|
+
case '()':
|
|
183
|
+
return `(${expr.e.sql})`;
|
|
184
|
+
case 'not':
|
|
185
|
+
// Malloy not operator always returns a boolean
|
|
186
|
+
return `COALESCE(NOT ${expr.e.sql},TRUE)`;
|
|
187
|
+
case 'unary-':
|
|
188
|
+
return `-${expr.e.sql}`;
|
|
189
|
+
case 'is-null':
|
|
190
|
+
return `${expr.e.sql} IS NULL`;
|
|
191
|
+
case 'is-not-null':
|
|
192
|
+
return `${expr.e.sql} IS NOT NULL`;
|
|
193
|
+
case 'true':
|
|
194
|
+
case 'false':
|
|
195
|
+
return expr.node;
|
|
196
|
+
case 'null':
|
|
197
|
+
return 'NULL';
|
|
198
|
+
case 'case':
|
|
199
|
+
return generateCaseSQL(field, expr);
|
|
200
|
+
case '':
|
|
201
|
+
return '';
|
|
202
|
+
case 'filterCondition':
|
|
203
|
+
// our child will be translated at the top of this function
|
|
204
|
+
if (expr.e.sql) {
|
|
205
|
+
expr.sql = expr.e.sql;
|
|
206
|
+
return expr.sql;
|
|
207
|
+
}
|
|
208
|
+
return '';
|
|
209
|
+
case 'functionDefaultOrderBy':
|
|
210
|
+
case 'functionOrderBy':
|
|
211
|
+
return '';
|
|
212
|
+
// TODO: throw an error here; not simple because we call into this
|
|
213
|
+
// code currently before the composite source is resolved in some cases
|
|
214
|
+
case 'compositeField':
|
|
215
|
+
return '{COMPOSITE_FIELD}';
|
|
216
|
+
case 'filterMatch':
|
|
217
|
+
return generateAppliedFilter(field, context, expr);
|
|
218
|
+
case 'filterLiteral':
|
|
219
|
+
return 'INTERNAL ERROR FILTER EXPRESSION VALUE SHOULD NOT BE USED';
|
|
220
|
+
default:
|
|
221
|
+
throw new Error(`Internal Error: Unknown expression node '${expr.node}' ${JSON.stringify(expr, undefined, 2)}`);
|
|
222
|
+
}
|
|
223
|
+
}
|
|
224
|
+
function generateAppliedFilter(field, context, filterMatchExpr) {
|
|
225
|
+
var _a;
|
|
226
|
+
let filterExpr = filterMatchExpr.kids.filterExpr;
|
|
227
|
+
while (filterExpr.node === '()') {
|
|
228
|
+
filterExpr = filterExpr.e;
|
|
229
|
+
}
|
|
230
|
+
if (filterExpr.node === 'parameter') {
|
|
231
|
+
const name = filterExpr.path[0];
|
|
232
|
+
(_a = context.eventStream) === null || _a === void 0 ? void 0 : _a.emit('source-argument-compiled', { name });
|
|
233
|
+
const argument = context.arguments()[name];
|
|
234
|
+
if (argument.value) {
|
|
235
|
+
filterExpr = argument.value;
|
|
236
|
+
}
|
|
237
|
+
else {
|
|
238
|
+
throw new Error(`Parameter ${name} was expected to be a filter expression`);
|
|
239
|
+
}
|
|
240
|
+
}
|
|
241
|
+
if (filterExpr.node !== 'filterLiteral') {
|
|
242
|
+
throw new Error('Can only use filter expression literals or parameters as filter expressions');
|
|
243
|
+
}
|
|
244
|
+
const filterSrc = filterExpr.filterSrc;
|
|
245
|
+
let fParse;
|
|
246
|
+
switch (filterMatchExpr.dataType) {
|
|
247
|
+
case 'string':
|
|
248
|
+
fParse = malloy_filter_1.StringFilterExpression.parse(filterSrc);
|
|
249
|
+
break;
|
|
250
|
+
case 'number':
|
|
251
|
+
fParse = malloy_filter_1.NumberFilterExpression.parse(filterSrc);
|
|
252
|
+
break;
|
|
253
|
+
case 'boolean':
|
|
254
|
+
fParse = malloy_filter_1.BooleanFilterExpression.parse(filterSrc);
|
|
255
|
+
break;
|
|
256
|
+
case 'date':
|
|
257
|
+
case 'timestamp':
|
|
258
|
+
fParse = malloy_filter_1.TemporalFilterExpression.parse(filterSrc);
|
|
259
|
+
break;
|
|
260
|
+
default:
|
|
261
|
+
throw new Error(`unsupported filter type ${filterMatchExpr.dataType}`);
|
|
262
|
+
}
|
|
263
|
+
if (fParse.log.length > 0) {
|
|
264
|
+
throw new Error(`Filter expression parse error: ${fParse.log[0]}`);
|
|
265
|
+
}
|
|
266
|
+
return filter_compilers_1.FilterCompilers.compile(filterMatchExpr.dataType, fParse.parsed, filterMatchExpr.kids.expr.sql || '', context.dialect);
|
|
267
|
+
}
|
|
268
|
+
// Helper functions for generateFunctionCallExpression
|
|
269
|
+
function getParameterMap(overload, numArgs) {
|
|
270
|
+
return new Map(overload.params.map((param, paramIndex) => {
|
|
271
|
+
const argIndexes = param.isVariadic
|
|
272
|
+
? (0, utils_1.range)(paramIndex, numArgs)
|
|
273
|
+
: [paramIndex];
|
|
274
|
+
return [param.name, { param, argIndexes }];
|
|
275
|
+
}));
|
|
276
|
+
}
|
|
277
|
+
function expandFunctionCall(dialect, overload, args, orderBy, limit) {
|
|
278
|
+
function withCommas(es) {
|
|
279
|
+
const ret = [];
|
|
280
|
+
for (let i = 0; i < es.length;) {
|
|
281
|
+
ret.push(es[i]);
|
|
282
|
+
i += 1;
|
|
283
|
+
if (i < es.length) {
|
|
284
|
+
ret.push(',');
|
|
285
|
+
}
|
|
286
|
+
}
|
|
287
|
+
return ret;
|
|
288
|
+
}
|
|
289
|
+
const paramMap = getParameterMap(overload, args.length);
|
|
290
|
+
if (overload.dialect[dialect] === undefined) {
|
|
291
|
+
throw new Error(`Function is not defined for '${dialect}' dialect`);
|
|
292
|
+
}
|
|
293
|
+
const expanded = (0, utils_1.exprMap)(overload.dialect[dialect].e, fragment => {
|
|
294
|
+
var _a, _b;
|
|
295
|
+
if (fragment.node === 'spread') {
|
|
296
|
+
const param = fragment.e;
|
|
297
|
+
if (param.node !== 'function_parameter') {
|
|
298
|
+
throw new Error('Invalid function definition. Argument to spread must be a function parameter.');
|
|
299
|
+
}
|
|
300
|
+
const entry = paramMap.get(param.name);
|
|
301
|
+
if (entry === undefined) {
|
|
302
|
+
return fragment;
|
|
303
|
+
}
|
|
304
|
+
const spread = entry.argIndexes.map(argIndex => args[argIndex]);
|
|
305
|
+
return (0, utils_1.composeSQLExpr)(withCommas(spread));
|
|
306
|
+
}
|
|
307
|
+
else if (fragment.node === 'function_parameter') {
|
|
308
|
+
const entry = paramMap.get(fragment.name);
|
|
309
|
+
if (entry === undefined) {
|
|
310
|
+
return fragment;
|
|
311
|
+
}
|
|
312
|
+
else if (entry.param.isVariadic) {
|
|
313
|
+
const spread = entry.argIndexes.map(argIndex => args[argIndex]);
|
|
314
|
+
return (0, utils_1.composeSQLExpr)(withCommas(spread));
|
|
315
|
+
}
|
|
316
|
+
else {
|
|
317
|
+
return args[entry.argIndexes[0]];
|
|
318
|
+
}
|
|
319
|
+
}
|
|
320
|
+
else if (fragment.node === 'aggregate_order_by') {
|
|
321
|
+
return orderBy
|
|
322
|
+
? (0, utils_1.composeSQLExpr)([
|
|
323
|
+
` ${(_a = fragment.prefix) !== null && _a !== void 0 ? _a : ''}${orderBy}${(_b = fragment.suffix) !== null && _b !== void 0 ? _b : ''}`,
|
|
324
|
+
])
|
|
325
|
+
: { node: '' };
|
|
326
|
+
}
|
|
327
|
+
else if (fragment.node === 'aggregate_limit') {
|
|
328
|
+
return limit ? (0, utils_1.composeSQLExpr)([` ${limit}`]) : { node: '' };
|
|
329
|
+
}
|
|
330
|
+
return fragment;
|
|
331
|
+
});
|
|
332
|
+
return expanded;
|
|
333
|
+
}
|
|
334
|
+
function getParamForArgIndex(params, argIndex) {
|
|
335
|
+
const prevVariadic = params.slice(0, argIndex).find(p => p.isVariadic);
|
|
336
|
+
return prevVariadic !== null && prevVariadic !== void 0 ? prevVariadic : params[argIndex];
|
|
337
|
+
}
|
|
338
|
+
function generateAsymmetricStringAggExpression(field, resultSet, context, value, separator, distinctKey, orderBy, dialectName, state) {
|
|
339
|
+
if (orderBy) {
|
|
340
|
+
throw new Error(`Function \`string_agg\` does not support fanning out with an order by in ${dialectName}`);
|
|
341
|
+
}
|
|
342
|
+
const valueSQL = generateDimFragment(field, resultSet, context, value, state);
|
|
343
|
+
const separatorSQL = separator
|
|
344
|
+
? generateDimFragment(field, resultSet, context, separator, state)
|
|
345
|
+
: '';
|
|
346
|
+
return field.parent.dialect.sqlStringAggDistinct(distinctKey, valueSQL, separatorSQL);
|
|
347
|
+
}
|
|
348
|
+
function generateAnalyticFragment(field, dialect, resultStruct, context, expr, overload, state, args, partitionByFields, funcOrdering) {
|
|
349
|
+
const isComplex = resultStruct.root().isComplexQuery;
|
|
350
|
+
const partitionFields = getAnalyticPartitions(field, resultStruct, partitionByFields);
|
|
351
|
+
const allPartitions = [
|
|
352
|
+
...(isComplex ? ['group_set'] : []),
|
|
353
|
+
...partitionFields,
|
|
354
|
+
];
|
|
355
|
+
const partitionBy = allPartitions.length > 0 ? `PARTITION BY ${allPartitions.join(', ')}` : '';
|
|
356
|
+
let orderBy = funcOrdering !== null && funcOrdering !== void 0 ? funcOrdering : '';
|
|
357
|
+
const dialectOverload = overload.dialect[dialect];
|
|
358
|
+
if (!funcOrdering && dialectOverload.needsWindowOrderBy) {
|
|
359
|
+
// calculate the ordering.
|
|
360
|
+
const obSQL = [];
|
|
361
|
+
let orderingField;
|
|
362
|
+
const orderByDef = resultStruct.firstSegment.orderBy ||
|
|
363
|
+
resultStruct.calculateDefaultOrderBy();
|
|
364
|
+
for (const ordering of orderByDef) {
|
|
365
|
+
if (typeof ordering.field === 'string') {
|
|
366
|
+
orderingField = {
|
|
367
|
+
name: ordering.field,
|
|
368
|
+
fif: resultStruct.getField(ordering.field),
|
|
369
|
+
};
|
|
370
|
+
}
|
|
371
|
+
else {
|
|
372
|
+
orderingField = resultStruct.getFieldByNumber(ordering.field);
|
|
373
|
+
}
|
|
374
|
+
if ('expressionType' in orderingField.fif.f.fieldDef) {
|
|
375
|
+
const exprType = orderingField.fif.f.fieldDef['expressionType'];
|
|
376
|
+
// TODO today we do not support ordering by analytic functions at all, so this works
|
|
377
|
+
// but eventually we will, and this check will just want to ensure that the order field
|
|
378
|
+
// isn't the same as the field we're currently compiling (otherwise we will loop infintely)
|
|
379
|
+
if ((0, malloy_types_1.expressionIsAnalytic)(exprType)) {
|
|
380
|
+
continue;
|
|
381
|
+
}
|
|
382
|
+
}
|
|
383
|
+
if (resultStruct.firstSegment.type === 'reduce') {
|
|
384
|
+
const orderSQL = orderingField.fif.getAnalyticalSQL(false);
|
|
385
|
+
// const orderSQL = this.generateDimFragment(resultSet, context, arg, state)
|
|
386
|
+
obSQL.push(` ${orderSQL} ${ordering.dir || 'ASC'}`);
|
|
387
|
+
}
|
|
388
|
+
else if (resultStruct.firstSegment.type === 'project') {
|
|
389
|
+
// Verify that the field's parent result structure matches what we expect
|
|
390
|
+
if (orderingField.fif.parent !== resultStruct) {
|
|
391
|
+
throw new Error(`Field instance parent mismatch: field '${orderingField.name}' has parent from different result structure. ` +
|
|
392
|
+
'This likely means the field is from a previous pipeline stage and needs special handling.');
|
|
393
|
+
}
|
|
394
|
+
const orderSQL = orderingField.fif.generateExpression();
|
|
395
|
+
obSQL.push(` ${orderSQL} ${ordering.dir || 'ASC'}`);
|
|
396
|
+
}
|
|
397
|
+
}
|
|
398
|
+
if (obSQL.length > 0) {
|
|
399
|
+
orderBy = ' ' + field.parent.dialect.sqlOrderBy(obSQL, 'analytical');
|
|
400
|
+
}
|
|
401
|
+
}
|
|
402
|
+
let between = '';
|
|
403
|
+
if (dialectOverload.between) {
|
|
404
|
+
const [preceding, following] = [
|
|
405
|
+
dialectOverload.between.preceding,
|
|
406
|
+
dialectOverload.between.following,
|
|
407
|
+
].map(value => {
|
|
408
|
+
if (value === -1) {
|
|
409
|
+
return 'UNBOUNDED';
|
|
410
|
+
}
|
|
411
|
+
if (typeof value === 'number') {
|
|
412
|
+
return value;
|
|
413
|
+
}
|
|
414
|
+
const argIndex = overload.params.findIndex(param => param.name === value);
|
|
415
|
+
const arg = args[argIndex];
|
|
416
|
+
if (arg.node !== 'numberLiteral') {
|
|
417
|
+
throw new Error('Invalid number of rows for window spec');
|
|
418
|
+
}
|
|
419
|
+
// TODO this does not handle float literals correctly
|
|
420
|
+
return arg.literal;
|
|
421
|
+
});
|
|
422
|
+
between = `ROWS BETWEEN ${preceding} PRECEDING AND ${following} FOLLOWING`;
|
|
423
|
+
}
|
|
424
|
+
const funcSQL = exprToSQL(field, resultStruct, context, expr, state);
|
|
425
|
+
let retExpr = `${funcSQL} OVER(${partitionBy} ${orderBy} ${between})`;
|
|
426
|
+
if (isComplex) {
|
|
427
|
+
retExpr = `CASE WHEN group_set=${resultStruct.groupSet} THEN ${retExpr} END`;
|
|
428
|
+
}
|
|
429
|
+
return retExpr;
|
|
430
|
+
}
|
|
431
|
+
function generateFunctionCallExpression(field, resultSet, context, frag, state) {
|
|
432
|
+
var _a, _b, _c;
|
|
433
|
+
const overload = frag.overload;
|
|
434
|
+
const args = frag.kids.args;
|
|
435
|
+
const isSymmetric = (_a = frag.overload.isSymmetric) !== null && _a !== void 0 ? _a : false;
|
|
436
|
+
const distinctKey = (0, malloy_types_1.expressionIsAggregate)(overload.returnType.expressionType) &&
|
|
437
|
+
!isSymmetric &&
|
|
438
|
+
generateDistinctKeyIfNecessary(field, resultSet, context, frag.structPath);
|
|
439
|
+
const aggregateLimit = frag.limit ? `LIMIT ${frag.limit}` : undefined;
|
|
440
|
+
if (frag.name === 'string_agg' &&
|
|
441
|
+
distinctKey &&
|
|
442
|
+
!context.dialect.supportsAggDistinct &&
|
|
443
|
+
context.dialect.name !== 'snowflake') {
|
|
444
|
+
return generateAsymmetricStringAggExpression(field, resultSet, context, args[0], args[1], distinctKey, frag.kids.orderBy, context.dialect.name, state);
|
|
445
|
+
}
|
|
446
|
+
if (distinctKey) {
|
|
447
|
+
if (!context.dialect.supportsAggDistinct) {
|
|
448
|
+
throw new Error(`Function \`${frag.name}\` does not support fanning out in ${context.dialect.name}`);
|
|
449
|
+
}
|
|
450
|
+
const argsExpressions = args.map(arg => {
|
|
451
|
+
return generateDimFragment(field, resultSet, context, arg, state);
|
|
452
|
+
});
|
|
453
|
+
const orderBys = (_b = frag.kids.orderBy) !== null && _b !== void 0 ? _b : [];
|
|
454
|
+
const orderByExpressions = orderBys.map(ob => {
|
|
455
|
+
var _a;
|
|
456
|
+
const defaultOrderByArgIndex = (_a = overload.dialect[context.dialect.name].defaultOrderByArgIndex) !== null && _a !== void 0 ? _a : 0;
|
|
457
|
+
const expr = ob.node === 'functionOrderBy' ? ob.e : args[defaultOrderByArgIndex];
|
|
458
|
+
return generateDimFragment(field, resultSet, context, expr, state);
|
|
459
|
+
});
|
|
460
|
+
return context.dialect.sqlAggDistinct(distinctKey, [...argsExpressions, ...orderByExpressions], valNames => {
|
|
461
|
+
const vals = valNames.map((v, i) => {
|
|
462
|
+
// Special case: the argument is required to be literal, so we use the actual argument
|
|
463
|
+
// rather than the packed value
|
|
464
|
+
// TODO don't even pack the value in the first place
|
|
465
|
+
if (i < args.length) {
|
|
466
|
+
const param = getParamForArgIndex(overload.params, i);
|
|
467
|
+
if (param.allowedTypes.every(t => (0, malloy_types_1.isLiteral)(t.evalSpace))) {
|
|
468
|
+
return args[i];
|
|
469
|
+
}
|
|
470
|
+
}
|
|
471
|
+
return (0, utils_1.composeSQLExpr)([v]);
|
|
472
|
+
});
|
|
473
|
+
const newArgs = vals.slice(0, argsExpressions.length);
|
|
474
|
+
const orderBy = vals
|
|
475
|
+
.slice(argsExpressions.length)
|
|
476
|
+
.map((e, i) => {
|
|
477
|
+
return { node: 'functionOrderBy', e, dir: orderBys[i].dir };
|
|
478
|
+
});
|
|
479
|
+
const orderBySQL = getFunctionOrderBy(field, resultSet, context, state, orderBy, newArgs, overload);
|
|
480
|
+
const funcCall = expandFunctionCall(context.dialect.name, overload, newArgs, orderBySQL, aggregateLimit);
|
|
481
|
+
return exprToSQL(field, resultSet, context, funcCall, state);
|
|
482
|
+
});
|
|
483
|
+
}
|
|
484
|
+
else {
|
|
485
|
+
const mappedArgs = (0, malloy_types_1.expressionIsAggregate)(overload.returnType.expressionType)
|
|
486
|
+
? args.map((arg, index) => {
|
|
487
|
+
// TODO We assume that all arguments to this aggregate-returning function need to
|
|
488
|
+
// have filters applied to them. This is not necessarily true in the general case,
|
|
489
|
+
// e.g. in a function `avg_plus(a, b) = avg(a) + b` -- here, `b` should not be
|
|
490
|
+
// be filtered. But since there aren't any aggregate functions like this in the
|
|
491
|
+
// standard library we have planned, we ignore this for now.
|
|
492
|
+
// Update: Now we apply this only to arguments whose parameter is not constant-requiring.
|
|
493
|
+
// So in `string_agg(val, sep)`, `sep` does not get filters applied to it because
|
|
494
|
+
// it must be constant
|
|
495
|
+
const param = getParamForArgIndex(overload.params, index);
|
|
496
|
+
// TODO technically this should probably look at _which_ allowed param type was matched
|
|
497
|
+
// for this argument and see if that type is at most constant... but we lose type information
|
|
498
|
+
// by this point in the compilation, so that info would have to be passed into the func call
|
|
499
|
+
// fragment.
|
|
500
|
+
return param.allowedTypes.every(t => (0, malloy_types_1.isLiteral)(t.evalSpace))
|
|
501
|
+
? arg
|
|
502
|
+
: (0, utils_1.composeSQLExpr)([
|
|
503
|
+
generateDimFragment(field, resultSet, context, arg, state),
|
|
504
|
+
]);
|
|
505
|
+
})
|
|
506
|
+
: args;
|
|
507
|
+
const orderBySql = frag.kids.orderBy
|
|
508
|
+
? getFunctionOrderBy(field, resultSet, context, state, frag.kids.orderBy, args, overload)
|
|
509
|
+
: '';
|
|
510
|
+
const funcCall = expandFunctionCall(context.dialect.name, overload, mappedArgs, orderBySql, aggregateLimit);
|
|
511
|
+
if ((0, malloy_types_1.expressionIsAnalytic)(overload.returnType.expressionType)) {
|
|
512
|
+
const extraPartitions = ((_c = frag.partitionBy) !== null && _c !== void 0 ? _c : []).map(outputName => {
|
|
513
|
+
return `(${resultSet.getField(outputName).getAnalyticalSQL(false)})`;
|
|
514
|
+
});
|
|
515
|
+
return generateAnalyticFragment(field, context.dialect.name, resultSet, context, funcCall, overload, state, args, extraPartitions, orderBySql);
|
|
516
|
+
}
|
|
517
|
+
return exprToSQL(field, resultSet, context, funcCall, state);
|
|
518
|
+
}
|
|
519
|
+
}
|
|
520
|
+
function generateFieldFragment(field, resultSet, context, expr, state) {
|
|
521
|
+
// find the structDef and return the path to the field...
|
|
522
|
+
const fieldRef = context.getFieldByName(expr.path);
|
|
523
|
+
if ((0, malloy_types_1.hasExpression)(fieldRef.fieldDef)) {
|
|
524
|
+
const ret = exprToSQL(field, resultSet, fieldRef.parent, fieldRef.fieldDef.e, state);
|
|
525
|
+
return `(${ret})`;
|
|
526
|
+
}
|
|
527
|
+
else {
|
|
528
|
+
// Instead of calling FieldInstanceFeild.generateExpression, which will just call back here
|
|
529
|
+
// copy what that would do ..
|
|
530
|
+
// Check for distinct key by its characteristic properties
|
|
531
|
+
if (fieldRef.fieldDef.type === 'string' &&
|
|
532
|
+
fieldRef.fieldDef.name === '__distinct_key') {
|
|
533
|
+
return generateDistinctKeySQL(fieldRef, resultSet);
|
|
534
|
+
}
|
|
535
|
+
// The normal case - just generate the SQL reference
|
|
536
|
+
return (0, field_instance_1.sqlFullChildReference)(fieldRef.parent, fieldRef.fieldDef.name, fieldRef.parent.structDef.type === 'record'
|
|
537
|
+
? {
|
|
538
|
+
result: resultSet,
|
|
539
|
+
field: fieldRef,
|
|
540
|
+
}
|
|
541
|
+
: undefined);
|
|
542
|
+
}
|
|
543
|
+
}
|
|
544
|
+
function generateOutputFieldFragment(field, resultSet, _context, frag, _state) {
|
|
545
|
+
return `(${resultSet.getField(frag.name).getAnalyticalSQL(false)})`;
|
|
546
|
+
}
|
|
547
|
+
function generateParameterFragment(field, resultSet, context, expr, state) {
|
|
548
|
+
var _a;
|
|
549
|
+
const name = expr.path[0];
|
|
550
|
+
(_a = context.eventStream) === null || _a === void 0 ? void 0 : _a.emit('source-argument-compiled', { name });
|
|
551
|
+
const argument = context.arguments()[name];
|
|
552
|
+
if (argument.value) {
|
|
553
|
+
return exprToSQL(field, resultSet, context, argument.value, state);
|
|
554
|
+
}
|
|
555
|
+
throw new Error(`Can't generate SQL, no value for ${expr.path}`);
|
|
556
|
+
}
|
|
557
|
+
function generateFilterFragment(field, resultSet, context, expr, state) {
|
|
558
|
+
const allWhere = new utils_1.AndChain(state.whereSQL);
|
|
559
|
+
for (const cond of expr.kids.filterList) {
|
|
560
|
+
allWhere.add(exprToSQL(field, resultSet, context, cond.e, state.withWhere()));
|
|
561
|
+
}
|
|
562
|
+
return exprToSQL(field, resultSet, context, expr.kids.e, state.withWhere(allWhere.sql()));
|
|
563
|
+
}
|
|
564
|
+
function generateDimFragment(field, resultSet, context, expr, state) {
|
|
565
|
+
let dim = exprToSQL(field, resultSet, context, expr, state);
|
|
566
|
+
if (state.whereSQL) {
|
|
567
|
+
dim = `CASE WHEN ${state.whereSQL} THEN ${dim} END`;
|
|
568
|
+
}
|
|
569
|
+
return dim;
|
|
570
|
+
}
|
|
571
|
+
function generateUngroupedFragment(field, resultSet, context, expr, state) {
|
|
572
|
+
if (state.totalGroupSet !== -1) {
|
|
573
|
+
throw new Error('Already in ALL. Cannot nest within an all calcuation.');
|
|
574
|
+
}
|
|
575
|
+
let totalGroupSet;
|
|
576
|
+
let ungroupSet;
|
|
577
|
+
if (expr.fields && expr.fields.length > 0) {
|
|
578
|
+
const key = (0, utils_1.groupingKey)(expr.node, expr.fields);
|
|
579
|
+
ungroupSet = resultSet.ungroupedSets.get(key);
|
|
580
|
+
if (ungroupSet === undefined) {
|
|
581
|
+
throw new Error(`Internal Error, cannot find groupset with key ${key}`);
|
|
582
|
+
}
|
|
583
|
+
totalGroupSet = ungroupSet.groupSet;
|
|
584
|
+
}
|
|
585
|
+
else {
|
|
586
|
+
totalGroupSet = resultSet.parent ? resultSet.parent.groupSet : 0;
|
|
587
|
+
}
|
|
588
|
+
const s = exprToSQL(field, resultSet, context, expr.e, state.withTotal(totalGroupSet));
|
|
589
|
+
const fields = resultSet.getUngroupPartitions(ungroupSet);
|
|
590
|
+
let partitionBy = '';
|
|
591
|
+
const fieldsString = fields.map(f => f.getAnalyticalSQL(true)).join(', ');
|
|
592
|
+
if (fieldsString.length > 0) {
|
|
593
|
+
partitionBy = `PARTITION BY ${fieldsString}`;
|
|
594
|
+
}
|
|
595
|
+
return `MAX(${s}) OVER (${partitionBy})`;
|
|
596
|
+
}
|
|
597
|
+
function getDistinctKeySQL(struct, resultSet) {
|
|
598
|
+
const distinctKeyField = struct.getDistinctKey();
|
|
599
|
+
return generateDistinctKeySQL(distinctKeyField, resultSet);
|
|
600
|
+
}
|
|
601
|
+
function generateDistinctKeyIfNecessary(field, resultSet, context, structPath) {
|
|
602
|
+
let struct = context;
|
|
603
|
+
if (structPath) {
|
|
604
|
+
struct = field.parent.getStructByName(structPath);
|
|
605
|
+
}
|
|
606
|
+
if (needsSymetricCalculation(struct, resultSet)) {
|
|
607
|
+
return getDistinctKeySQL(struct, resultSet);
|
|
608
|
+
}
|
|
609
|
+
else {
|
|
610
|
+
return undefined;
|
|
611
|
+
}
|
|
612
|
+
}
|
|
613
|
+
function generateSumFragment(field, resultSet, context, expr, state) {
|
|
614
|
+
const dimSQL = generateDimFragment(field, resultSet, context, expr.e, state);
|
|
615
|
+
const distinctKeySQL = generateDistinctKeyIfNecessary(field, resultSet, context, expr.structPath);
|
|
616
|
+
let ret;
|
|
617
|
+
if (distinctKeySQL) {
|
|
618
|
+
if (field.parent.dialect.supportsSumDistinctFunction) {
|
|
619
|
+
ret = field.parent.dialect.sqlSumDistinct(distinctKeySQL, dimSQL, 'SUM');
|
|
620
|
+
}
|
|
621
|
+
else {
|
|
622
|
+
ret = sqlSumDistinct(field.parent.dialect, dimSQL, distinctKeySQL);
|
|
623
|
+
}
|
|
624
|
+
}
|
|
625
|
+
else {
|
|
626
|
+
ret = `SUM(${dimSQL})`;
|
|
627
|
+
}
|
|
628
|
+
return `COALESCE(${ret},0)`;
|
|
629
|
+
}
|
|
630
|
+
function generateSymmetricFragment(field, resultSet, context, expr, state) {
|
|
631
|
+
const dimSQL = generateDimFragment(field, resultSet, context, expr.e, state);
|
|
632
|
+
const f = expr.function === 'distinct' ? 'count(distinct ' : expr.function + '(';
|
|
633
|
+
return `${f}${dimSQL})`;
|
|
634
|
+
}
|
|
635
|
+
function generateAvgFragment(field, resultSet, context, expr, state) {
|
|
636
|
+
const dimSQL = generateDimFragment(field, resultSet, context, expr.e, state);
|
|
637
|
+
const distinctKeySQL = generateDistinctKeyIfNecessary(field, resultSet, context, expr.structPath);
|
|
638
|
+
if (distinctKeySQL) {
|
|
639
|
+
let countDistinctKeySQL = distinctKeySQL;
|
|
640
|
+
if (state.whereSQL) {
|
|
641
|
+
countDistinctKeySQL = `CASE WHEN ${state.whereSQL} THEN ${distinctKeySQL} END`;
|
|
642
|
+
}
|
|
643
|
+
let sumDistinctSQL;
|
|
644
|
+
let avgDistinctSQL;
|
|
645
|
+
if (field.parent.dialect.supportsSumDistinctFunction) {
|
|
646
|
+
avgDistinctSQL = field.parent.dialect.sqlSumDistinct(distinctKeySQL, dimSQL, 'AVG');
|
|
647
|
+
}
|
|
648
|
+
else {
|
|
649
|
+
sumDistinctSQL = sqlSumDistinct(field.parent.dialect, dimSQL, distinctKeySQL);
|
|
650
|
+
avgDistinctSQL = `(${sumDistinctSQL})/NULLIF(COUNT(DISTINCT CASE WHEN ${dimSQL} IS NOT NULL THEN ${countDistinctKeySQL} END),0)`;
|
|
651
|
+
}
|
|
652
|
+
return avgDistinctSQL;
|
|
653
|
+
}
|
|
654
|
+
else {
|
|
655
|
+
return `AVG(${dimSQL})`;
|
|
656
|
+
}
|
|
657
|
+
}
|
|
658
|
+
function generateCountFragment(field, resultSet, context, expr, state) {
|
|
659
|
+
let func = 'COUNT(';
|
|
660
|
+
let thing = '1';
|
|
661
|
+
let struct = context;
|
|
662
|
+
if (expr.structPath) {
|
|
663
|
+
struct = field.parent.getStructByName(expr.structPath);
|
|
664
|
+
}
|
|
665
|
+
const joinName = struct.getJoinableParent().getIdentifier();
|
|
666
|
+
const join = resultSet.root().joins.get(joinName);
|
|
667
|
+
if (!join) {
|
|
668
|
+
throw new Error(`Join ${joinName} not found in result set`);
|
|
669
|
+
}
|
|
670
|
+
if (!join.leafiest || join.makeUniqueKey) {
|
|
671
|
+
func = 'COUNT(DISTINCT ';
|
|
672
|
+
thing = getDistinctKeySQL(struct, resultSet);
|
|
673
|
+
}
|
|
674
|
+
if (state.whereSQL) {
|
|
675
|
+
return `${func}CASE WHEN ${state.whereSQL} THEN ${thing} END)`;
|
|
676
|
+
}
|
|
677
|
+
else {
|
|
678
|
+
return `${func}${thing})`;
|
|
679
|
+
}
|
|
680
|
+
}
|
|
681
|
+
function generateSpread(field, _resultSet, _context, _frag, _state) {
|
|
682
|
+
throw new Error(`Unexpanded spread encountered during SQL generation for ${field.getIdentifier()}`);
|
|
683
|
+
}
|
|
684
|
+
function generateSourceReference(field, resultSet, context, expr) {
|
|
685
|
+
if (expr.path === undefined) {
|
|
686
|
+
return context.getSQLIdentifier();
|
|
687
|
+
}
|
|
688
|
+
else {
|
|
689
|
+
return context.getFieldByName(expr.path).getIdentifier();
|
|
690
|
+
}
|
|
691
|
+
}
|
|
692
|
+
function generateCaseSQL(field, pf) {
|
|
693
|
+
const caseStmt = ['CASE'];
|
|
694
|
+
if (pf.kids.caseValue !== undefined) {
|
|
695
|
+
caseStmt.push(`${pf.kids.caseValue.sql}`);
|
|
696
|
+
}
|
|
697
|
+
for (let i = 0; i < pf.kids.caseWhen.length; i += 1) {
|
|
698
|
+
caseStmt.push(`WHEN ${pf.kids.caseWhen[i].sql} THEN ${pf.kids.caseThen[i].sql}`);
|
|
699
|
+
}
|
|
700
|
+
if (pf.kids.caseElse !== undefined) {
|
|
701
|
+
caseStmt.push(`ELSE ${pf.kids.caseElse.sql}`);
|
|
702
|
+
}
|
|
703
|
+
caseStmt.push('END');
|
|
704
|
+
return caseStmt.join(' ');
|
|
705
|
+
}
|
|
706
|
+
function getFunctionOrderBy(field, resultSet, context, state, orderBy, args, overload) {
|
|
707
|
+
if (orderBy.length === 0)
|
|
708
|
+
return undefined;
|
|
709
|
+
return ('ORDER BY ' +
|
|
710
|
+
orderBy
|
|
711
|
+
.map(ob => {
|
|
712
|
+
var _a;
|
|
713
|
+
const defaultOrderByArgIndex = (_a = overload.dialect[context.dialect.name].defaultOrderByArgIndex) !== null && _a !== void 0 ? _a : 0;
|
|
714
|
+
const expr = ob.node === 'functionOrderBy' ? ob.e : args[defaultOrderByArgIndex];
|
|
715
|
+
const osql = generateDimFragment(field, resultSet, context, expr, state);
|
|
716
|
+
const dirsql = ob.dir === 'asc' ? ' ASC' : ob.dir === 'desc' ? ' DESC' : '';
|
|
717
|
+
return `${osql}${dirsql}`;
|
|
718
|
+
})
|
|
719
|
+
.join(', '));
|
|
720
|
+
}
|
|
721
|
+
function getAnalyticPartitions(field, resultStruct, extraPartitionFields) {
|
|
722
|
+
const ret = [];
|
|
723
|
+
let p = resultStruct.parent;
|
|
724
|
+
while (p !== undefined) {
|
|
725
|
+
const scalars = p.fields(fi => (0, query_node_1.isBasicScalar)(fi.f) && fi.fieldUsage.type === 'result');
|
|
726
|
+
const partitionSQLs = scalars.map(fi => fi.getAnalyticalSQL(true));
|
|
727
|
+
ret.push(...partitionSQLs);
|
|
728
|
+
p = p.parent;
|
|
729
|
+
}
|
|
730
|
+
if (extraPartitionFields) {
|
|
731
|
+
ret.push(...extraPartitionFields);
|
|
732
|
+
}
|
|
733
|
+
return ret;
|
|
734
|
+
}
|
|
735
|
+
function* stringsFromSQLExpression(field, resultSet, context, e, state) {
|
|
736
|
+
/*
|
|
737
|
+
* Like template strings, the array of strings is paired with template insertions,
|
|
738
|
+
* each string is followed by at most one expression to be inserted
|
|
739
|
+
*/
|
|
740
|
+
const subExprList = [...e.kids.args];
|
|
741
|
+
for (const str of e.src) {
|
|
742
|
+
yield str;
|
|
743
|
+
const expr = subExprList.shift();
|
|
744
|
+
if (expr) {
|
|
745
|
+
yield exprToSQL(field, resultSet, context, expr, state);
|
|
746
|
+
}
|
|
747
|
+
}
|
|
748
|
+
}
|
|
749
|
+
// Add this function to expression_compiler.ts
|
|
750
|
+
function generateDistinctKeySQL(fieldRef, resultSet) {
|
|
751
|
+
var _a;
|
|
752
|
+
const parent = fieldRef.parent;
|
|
753
|
+
if (parent.primaryKey()) {
|
|
754
|
+
const pk = parent.getPrimaryKeyField(fieldRef.fieldDef);
|
|
755
|
+
// Recursively generate the primary key SQL
|
|
756
|
+
return generateFieldFragment(fieldRef, // Use fieldRef as the field parameter
|
|
757
|
+
resultSet, parent, { node: 'field', path: [pk.getIdentifier()] }, new utils_1.GenerateState());
|
|
758
|
+
}
|
|
759
|
+
else if (parent.structDef.type === 'array') {
|
|
760
|
+
const parentDistinctKey = (_a = parent.parent) === null || _a === void 0 ? void 0 : _a.getDistinctKey();
|
|
761
|
+
let parentKeySQL = '';
|
|
762
|
+
if (parentDistinctKey && parent.parent) {
|
|
763
|
+
parentKeySQL = generateFieldFragment(fieldRef, // Use fieldRef as the field parameter
|
|
764
|
+
resultSet, parent.parent, { node: 'field', path: ['__distinct_key'] }, new utils_1.GenerateState());
|
|
765
|
+
}
|
|
766
|
+
return parent.dialect.sqlMakeUnnestKey(parentKeySQL, parent.dialect.sqlFieldReference(parent.getIdentifier(), 'table', '__row_id', 'string'));
|
|
767
|
+
}
|
|
768
|
+
else {
|
|
769
|
+
return parent.dialect.sqlFieldReference(parent.getIdentifier(), 'table', '__distinct_key', 'string');
|
|
770
|
+
}
|
|
771
|
+
}
|
|
772
|
+
function needsSymetricCalculation(qs, resultSet) {
|
|
773
|
+
const joinName = qs.getJoinableParent().getIdentifier();
|
|
774
|
+
const join = resultSet.root().joins.get(joinName);
|
|
775
|
+
if (join) {
|
|
776
|
+
return !join.leafiest;
|
|
777
|
+
}
|
|
778
|
+
throw new Error(`Join ${joinName} not found in result set`);
|
|
779
|
+
}
|
|
780
|
+
//# sourceMappingURL=expression_compiler.js.map
|