@malloydata/malloy 0.0.308 → 0.0.310
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1 -1
- package/dist/dialect/mysql/mysql.js +23 -4
- package/dist/dialect/postgres/postgres.d.ts +3 -2
- package/dist/dialect/postgres/postgres.js +42 -8
- package/dist/dialect/trino/trino.js +1 -1
- package/dist/index.d.ts +2 -2
- package/dist/index.js +4 -2
- package/dist/lang/composite-source-utils.js +6 -6
- package/dist/model/expression_compiler.js +8 -9
- package/dist/model/filter_compilers.d.ts +1 -0
- package/dist/model/filter_compilers.js +29 -39
- package/dist/model/query_node.d.ts +1 -1
- package/dist/model/query_query.d.ts +2 -2
- package/dist/model/query_query.js +22 -53
- package/dist/version.d.ts +1 -1
- package/dist/version.js +1 -1
- package/package.json +4 -4
package/README.md
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
# Malloy
|
|
2
2
|
|
|
3
|
-
Malloy is
|
|
3
|
+
Malloy is a modern open source language for describing data relationships and transformations. It is both a semantic modeling language and a querying language that runs queries against a relational database. Malloy currently connects to BigQuery and Postgres, and natively supports DuckDB. We've built a Visual Studio Code extension to facilitate building Malloy data models, querying and transforming data, and creating simple visualizations and dashboards.
|
|
4
4
|
|
|
5
5
|
## Building applications or products in javascript with @malloydata/malloy
|
|
6
6
|
|
|
@@ -65,6 +65,23 @@ const mysqlToMalloyTypes = {
|
|
|
65
65
|
// TODO: Check if we need special handling for boolean.
|
|
66
66
|
'tinyint(1)': { type: 'boolean' },
|
|
67
67
|
};
|
|
68
|
+
function malloyTypeToJSONTableType(malloyType) {
|
|
69
|
+
switch (malloyType.type) {
|
|
70
|
+
case 'number':
|
|
71
|
+
return malloyType.numberType === 'integer' ? 'INT' : 'DOUBLE';
|
|
72
|
+
case 'string':
|
|
73
|
+
return 'CHAR(255)'; // JSON_TABLE needs a length
|
|
74
|
+
case 'boolean':
|
|
75
|
+
return 'INT'; // or TINYINT(1) if you prefer
|
|
76
|
+
case 'record':
|
|
77
|
+
case 'array':
|
|
78
|
+
return 'JSON';
|
|
79
|
+
case 'timestamp':
|
|
80
|
+
return 'DATETIME';
|
|
81
|
+
default:
|
|
82
|
+
return malloyType.type.toUpperCase();
|
|
83
|
+
}
|
|
84
|
+
}
|
|
68
85
|
class MySQLDialect extends dialect_1.Dialect {
|
|
69
86
|
constructor() {
|
|
70
87
|
super(...arguments);
|
|
@@ -99,14 +116,16 @@ class MySQLDialect extends dialect_1.Dialect {
|
|
|
99
116
|
malloyTypeToSQLType(malloyType) {
|
|
100
117
|
switch (malloyType.type) {
|
|
101
118
|
case 'number':
|
|
102
|
-
return malloyType.numberType === 'integer' ? '
|
|
119
|
+
return malloyType.numberType === 'integer' ? 'SIGNED' : 'DOUBLE';
|
|
103
120
|
case 'string':
|
|
104
|
-
return '
|
|
121
|
+
return 'CHAR';
|
|
122
|
+
case 'boolean':
|
|
123
|
+
return 'SIGNED';
|
|
105
124
|
case 'record':
|
|
106
125
|
case 'array':
|
|
107
126
|
return 'JSON';
|
|
108
|
-
case 'date':
|
|
109
127
|
case 'timestamp':
|
|
128
|
+
return 'DATETIME';
|
|
110
129
|
default:
|
|
111
130
|
return malloyType.type;
|
|
112
131
|
}
|
|
@@ -177,7 +196,7 @@ class MySQLDialect extends dialect_1.Dialect {
|
|
|
177
196
|
var _a;
|
|
178
197
|
const fields = [];
|
|
179
198
|
for (const f of fieldList) {
|
|
180
|
-
let fType =
|
|
199
|
+
let fType = malloyTypeToJSONTableType(f.typeDef);
|
|
181
200
|
if (f.typeDef.type === 'sql native' &&
|
|
182
201
|
f.typeDef.rawType &&
|
|
183
202
|
((_a = f.typeDef.rawType) === null || _a === void 0 ? void 0 : _a.match(/json/))) {
|
|
@@ -1,6 +1,6 @@
|
|
|
1
|
-
import type { Sampling, AtomicTypeDef, TimeDeltaExpr, TypecastExpr, MeasureTimeExpr, BasicAtomicTypeDef, RecordLiteralNode, ArrayLiteralNode } from '../../model/malloy_types';
|
|
1
|
+
import type { Sampling, AtomicTypeDef, TimeDeltaExpr, TypecastExpr, MeasureTimeExpr, BasicAtomicTypeDef, RecordLiteralNode, ArrayLiteralNode, TimeExtractExpr } from '../../model/malloy_types';
|
|
2
2
|
import type { DialectFunctionOverloadDef } from '../functions';
|
|
3
|
-
import type
|
|
3
|
+
import { type DialectFieldList, type FieldReferenceType, type QueryInfo } from '../dialect';
|
|
4
4
|
import { PostgresBase } from '../pg_impl';
|
|
5
5
|
export declare class PostgresDialect extends PostgresBase {
|
|
6
6
|
name: string;
|
|
@@ -66,4 +66,5 @@ export declare class PostgresDialect extends PostgresBase {
|
|
|
66
66
|
validateTypeName(sqlType: string): boolean;
|
|
67
67
|
sqlLiteralRecord(lit: RecordLiteralNode): string;
|
|
68
68
|
sqlLiteralArray(lit: ArrayLiteralNode): string;
|
|
69
|
+
sqlTimeExtractExpr(qi: QueryInfo, from: TimeExtractExpr): string;
|
|
69
70
|
}
|
|
@@ -26,6 +26,7 @@ exports.PostgresDialect = void 0;
|
|
|
26
26
|
const utils_1 = require("../../model/utils");
|
|
27
27
|
const malloy_types_1 = require("../../model/malloy_types");
|
|
28
28
|
const functions_1 = require("../functions");
|
|
29
|
+
const dialect_1 = require("../dialect");
|
|
29
30
|
const pg_impl_1 = require("../pg_impl");
|
|
30
31
|
const dialect_functions_1 = require("./dialect_functions");
|
|
31
32
|
const function_overrides_1 = require("./function_overrides");
|
|
@@ -80,7 +81,7 @@ class PostgresDialect extends pg_impl_1.PostgresBase {
|
|
|
80
81
|
this.udfPrefix = 'pg_temp.__udf';
|
|
81
82
|
this.hasFinalStage = true;
|
|
82
83
|
this.divisionIsInteger = true;
|
|
83
|
-
this.supportsSumDistinctFunction =
|
|
84
|
+
this.supportsSumDistinctFunction = true;
|
|
84
85
|
this.unnestWithNumbers = false;
|
|
85
86
|
this.defaultSampling = { rows: 50000 };
|
|
86
87
|
this.supportUnnestArrayAgg = true;
|
|
@@ -242,14 +243,33 @@ class PostgresDialect extends pg_impl_1.PostgresBase {
|
|
|
242
243
|
}
|
|
243
244
|
throw new Error(`Unknown or unhandled postgres time unit: ${df.units}`);
|
|
244
245
|
}
|
|
246
|
+
// This looks like a partial implementation of a method similar to how DuckDB
|
|
247
|
+
// does symmetric aggregates, which was abandoned. Leaving it in here for now
|
|
248
|
+
// in case the original author wants to pick it up again.
|
|
249
|
+
// sqlSumDistinct(key: string, value: string, funcName: string): string {
|
|
250
|
+
// // return `sum_distinct(list({key:${key}, val: ${value}}))`;
|
|
251
|
+
// return `(
|
|
252
|
+
// SELECT ${funcName}((a::json->>'f2')::DOUBLE PRECISION) as value
|
|
253
|
+
// FROM (
|
|
254
|
+
// SELECT UNNEST(array_agg(distinct row_to_json(row(${key},${value}))::text)) a
|
|
255
|
+
// ) a
|
|
256
|
+
// )`;
|
|
257
|
+
// }
|
|
245
258
|
sqlSumDistinct(key, value, funcName) {
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
259
|
+
const hashKey = this.sqlSumDistinctHashedKey(key);
|
|
260
|
+
// PostgreSQL requires CAST to NUMERIC before ROUND, which is different
|
|
261
|
+
// than the generic implementation of sqlSumDistinct, but is OK in
|
|
262
|
+
// PostgreSQL because NUMERIC has arbitrary precision.
|
|
263
|
+
const roundedValue = `ROUND(CAST(COALESCE(${value}, 0) AS NUMERIC), 9)`;
|
|
264
|
+
const sumSQL = `SUM(DISTINCT ${roundedValue} + ${hashKey}) - SUM(DISTINCT ${hashKey})`;
|
|
265
|
+
const ret = `CAST(${sumSQL} AS DOUBLE PRECISION)`;
|
|
266
|
+
if (funcName === 'SUM') {
|
|
267
|
+
return ret;
|
|
268
|
+
}
|
|
269
|
+
else if (funcName === 'AVG') {
|
|
270
|
+
return `(${ret})/NULLIF(COUNT(DISTINCT CASE WHEN ${value} IS NOT NULL THEN ${key} END), 0)`;
|
|
271
|
+
}
|
|
272
|
+
throw new Error(`Unknown Symmetric Aggregate function ${funcName}`);
|
|
253
273
|
}
|
|
254
274
|
// TODO this does not preserve the types of the arguments, meaning we have to hack
|
|
255
275
|
// around this in the definitions of functions that use this to cast back to the correct
|
|
@@ -339,6 +359,20 @@ class PostgresDialect extends pg_impl_1.PostgresBase {
|
|
|
339
359
|
const array = lit.kids.values.map(val => val.sql);
|
|
340
360
|
return 'JSONB_BUILD_ARRAY(' + array.join(',') + ')';
|
|
341
361
|
}
|
|
362
|
+
sqlTimeExtractExpr(qi, from) {
|
|
363
|
+
const units = pg_impl_1.timeExtractMap[from.units] || from.units;
|
|
364
|
+
let extractFrom = from.e.sql;
|
|
365
|
+
if (malloy_types_1.TD.isTimestamp(from.e.typeDef)) {
|
|
366
|
+
const tz = (0, dialect_1.qtz)(qi);
|
|
367
|
+
if (tz) {
|
|
368
|
+
extractFrom = `(${extractFrom}::TIMESTAMPTZ AT TIME ZONE '${tz}')`;
|
|
369
|
+
}
|
|
370
|
+
}
|
|
371
|
+
// PostgreSQL before 14 returns a double precision for EXTRACT, cast to integer
|
|
372
|
+
// since it is common to pass an extraction to mod ( like in fitler expressions )
|
|
373
|
+
const extracted = `EXTRACT(${units} FROM ${extractFrom})::integer`;
|
|
374
|
+
return from.units === 'day_of_week' ? `(${extracted}+1)` : `(${extracted})`;
|
|
375
|
+
}
|
|
342
376
|
}
|
|
343
377
|
exports.PostgresDialect = PostgresDialect;
|
|
344
378
|
//# sourceMappingURL=postgres.js.map
|
|
@@ -519,7 +519,7 @@ ${(0, utils_1.indent)(sql)}
|
|
|
519
519
|
const name = (_a = f.as) !== null && _a !== void 0 ? _a : f.name;
|
|
520
520
|
rowVals.push((_b = lit.kids[name].sql) !== null && _b !== void 0 ? _b : 'internal-error-record-literal');
|
|
521
521
|
const elType = this.malloyTypeToSQLType(f);
|
|
522
|
-
rowTypes.push(`${name} ${elType}`);
|
|
522
|
+
rowTypes.push(`${this.sqlMaybeQuoteIdentifier(name)} ${elType}`);
|
|
523
523
|
}
|
|
524
524
|
}
|
|
525
525
|
return `CAST(ROW(${rowVals.join(',')}) AS ROW(${rowTypes.join(',')}))`;
|
package/dist/index.d.ts
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
export { DuckDBDialect, StandardSQLDialect, TrinoDialect, PostgresDialect, SnowflakeDialect, MySQLDialect, registerDialect, arg, qtz, overload, minScalar, anyExprType, minAggregate, maxScalar, sql, makeParam, param, variadicParam, literal, spread, Dialect, TinyParser, } from './dialect';
|
|
2
2
|
export type { DialectFieldList, DialectFunctionOverloadDef, QueryInfo, MalloyStandardFunctionImplementations, DefinitionBlueprint, DefinitionBlueprintMap, OverloadedDefinitionBlueprint, TinyToken, } from './dialect';
|
|
3
|
-
export type { QueryDataRow, StructDef, TableSourceDef, SQLSourceDef, SourceDef, JoinFieldDef, NamedSourceDefs, MalloyQueryData, DateUnit, ExtractUnit, TimestampUnit, TemporalFieldType, QueryData, QueryValue, Expr, FilterCondition, Argument, Parameter, FieldDef, PipeSegment, QueryFieldDef, IndexFieldDef, TurtleDef, SearchValueMapResult, SearchIndexResult, ModelDef, Query, QueryResult, QueryResultDef, QueryRunStats, QueryScalar, NamedQuery, NamedModelObject, ExpressionType, FunctionDef, FunctionOverloadDef, FunctionParameterDef, ExpressionValueType, TypeDesc, FunctionParamTypeDesc, DocumentLocation, DocumentRange, DocumentPosition, Sampling, Annotation, BasicAtomicTypeDef, BasicAtomicDef, AtomicTypeDef, AtomicFieldDef, ArrayDef, ArrayTypeDef, RecordTypeDef, RepeatedRecordTypeDef, RecordDef, RepeatedRecordDef, RecordLiteralNode, StringLiteralNode, ArrayLiteralNode, SourceComponentInfo, } from './model';
|
|
4
|
-
export { isSourceDef, isBasicAtomic, isJoined, isJoinedSource, isSamplingEnable, isSamplingPercent, isSamplingRows, isRepeatedRecord, isBasicArray, mkArrayDef, mkFieldDef, expressionIsAggregate, expressionIsAnalytic, expressionIsCalculation, expressionIsScalar, expressionIsUngroupedAggregate, indent, composeSQLExpr, isTimestampUnit, isDateUnit, } from './model';
|
|
3
|
+
export type { QueryDataRow, StructDef, TableSourceDef, SQLSourceDef, SourceDef, JoinFieldDef, NamedSourceDefs, MalloyQueryData, DateUnit, ExtractUnit, TimestampUnit, TemporalFieldType, QueryData, QueryValue, Expr, FilterCondition, Argument, Parameter, FieldDef, PipeSegment, QueryFieldDef, IndexFieldDef, TurtleDef, SearchValueMapResult, SearchIndexResult, ModelDef, Query, QueryResult, QueryResultDef, QueryRunStats, QueryScalar, NamedQuery, NamedModelObject, ExpressionType, FunctionDef, FunctionOverloadDef, FunctionParameterDef, ExpressionValueType, TypeDesc, FunctionParamTypeDesc, DocumentLocation, DocumentRange, DocumentPosition, Sampling, Annotation, BasicAtomicTypeDef, BasicAtomicDef, AtomicTypeDef, AtomicFieldDef, ArrayDef, ArrayTypeDef, RecordTypeDef, RepeatedRecordTypeDef, RecordDef, RepeatedRecordDef, RecordLiteralNode, StringLiteralNode, ArrayLiteralNode, SourceComponentInfo, TimeLiteralNode, TypecastExpr, } from './model';
|
|
4
|
+
export { isSourceDef, isBasicAtomic, isJoined, isJoinedSource, isSamplingEnable, isSamplingPercent, isSamplingRows, isRepeatedRecord, isBasicArray, mkArrayDef, mkFieldDef, expressionIsAggregate, expressionIsAnalytic, expressionIsCalculation, expressionIsScalar, expressionIsUngroupedAggregate, indent, composeSQLExpr, isTimestampUnit, isDateUnit, constantExprToSQL, } from './model';
|
|
5
5
|
export { malloyToQuery, MalloyTranslator, } from './lang';
|
|
6
6
|
export type { LogMessage, TranslateResponse } from './lang';
|
|
7
7
|
export { Model, Malloy, Runtime, AtomicFieldType, ConnectionRuntime, SingleConnectionRuntime, EmptyURLReader, InMemoryURLReader, FixedConnectionMap, MalloyError, JoinRelationship, SourceRelationship, DateTimeframe, TimestampTimeframe, PreparedResult, Result, QueryMaterializer, CSVWriter, JSONWriter, Parse, DataWriter, Explore, InMemoryModelCache, CacheManager, } from './malloy';
|
package/dist/index.js
CHANGED
|
@@ -33,8 +33,8 @@ var __importStar = (this && this.__importStar) || (function () {
|
|
|
33
33
|
};
|
|
34
34
|
})();
|
|
35
35
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
36
|
-
exports.
|
|
37
|
-
exports.annotationToTaglines = exports.annotationToTag = exports.sqlKey = exports.API = exports.extractMalloyObjectFromTag = exports.writeMalloyObjectToTag = exports.sourceDefToSourceInfo = exports.modelDefToModelInfo = exports.toAsyncGenerator = exports.CacheManager = exports.InMemoryModelCache = exports.Explore = exports.DataWriter = exports.Parse = exports.JSONWriter = exports.CSVWriter = exports.QueryMaterializer = exports.Result = exports.PreparedResult = exports.TimestampTimeframe = exports.DateTimeframe = exports.SourceRelationship = exports.JoinRelationship = exports.MalloyError = exports.FixedConnectionMap = exports.InMemoryURLReader = exports.EmptyURLReader = void 0;
|
|
36
|
+
exports.ConnectionRuntime = exports.AtomicFieldType = exports.Runtime = exports.Malloy = exports.Model = exports.MalloyTranslator = exports.malloyToQuery = exports.constantExprToSQL = exports.isDateUnit = exports.isTimestampUnit = exports.composeSQLExpr = exports.indent = exports.expressionIsUngroupedAggregate = exports.expressionIsScalar = exports.expressionIsCalculation = exports.expressionIsAnalytic = exports.expressionIsAggregate = exports.mkFieldDef = exports.mkArrayDef = exports.isBasicArray = exports.isRepeatedRecord = exports.isSamplingRows = exports.isSamplingPercent = exports.isSamplingEnable = exports.isJoinedSource = exports.isJoined = exports.isBasicAtomic = exports.isSourceDef = exports.TinyParser = exports.Dialect = exports.spread = exports.literal = exports.variadicParam = exports.param = exports.makeParam = exports.sql = exports.maxScalar = exports.minAggregate = exports.anyExprType = exports.minScalar = exports.overload = exports.qtz = exports.arg = exports.registerDialect = exports.MySQLDialect = exports.SnowflakeDialect = exports.PostgresDialect = exports.TrinoDialect = exports.StandardSQLDialect = exports.DuckDBDialect = void 0;
|
|
37
|
+
exports.annotationToTaglines = exports.annotationToTag = exports.sqlKey = exports.API = exports.extractMalloyObjectFromTag = exports.writeMalloyObjectToTag = exports.sourceDefToSourceInfo = exports.modelDefToModelInfo = exports.toAsyncGenerator = exports.CacheManager = exports.InMemoryModelCache = exports.Explore = exports.DataWriter = exports.Parse = exports.JSONWriter = exports.CSVWriter = exports.QueryMaterializer = exports.Result = exports.PreparedResult = exports.TimestampTimeframe = exports.DateTimeframe = exports.SourceRelationship = exports.JoinRelationship = exports.MalloyError = exports.FixedConnectionMap = exports.InMemoryURLReader = exports.EmptyURLReader = exports.SingleConnectionRuntime = void 0;
|
|
38
38
|
/*
|
|
39
39
|
* Copyright 2023 Google LLC
|
|
40
40
|
*
|
|
@@ -102,6 +102,8 @@ Object.defineProperty(exports, "indent", { enumerable: true, get: function () {
|
|
|
102
102
|
Object.defineProperty(exports, "composeSQLExpr", { enumerable: true, get: function () { return model_1.composeSQLExpr; } });
|
|
103
103
|
Object.defineProperty(exports, "isTimestampUnit", { enumerable: true, get: function () { return model_1.isTimestampUnit; } });
|
|
104
104
|
Object.defineProperty(exports, "isDateUnit", { enumerable: true, get: function () { return model_1.isDateUnit; } });
|
|
105
|
+
// Used in testing, not really public API
|
|
106
|
+
Object.defineProperty(exports, "constantExprToSQL", { enumerable: true, get: function () { return model_1.constantExprToSQL; } });
|
|
105
107
|
var lang_1 = require("./lang");
|
|
106
108
|
Object.defineProperty(exports, "malloyToQuery", { enumerable: true, get: function () { return lang_1.malloyToQuery; } });
|
|
107
109
|
// Needed for tests only
|
|
@@ -39,7 +39,7 @@ sources) {
|
|
|
39
39
|
let anyComposites = false;
|
|
40
40
|
let joinsProcessed = false;
|
|
41
41
|
const nonCompositeFields = getNonCompositeFields(source);
|
|
42
|
-
const expandedForError = onlyCompositeUsage(
|
|
42
|
+
const expandedForError = onlyCompositeUsage(expandFieldUsage(fieldUsage, rootFields).result, source.fields);
|
|
43
43
|
if (source.type === 'composite') {
|
|
44
44
|
let found = false;
|
|
45
45
|
anyComposites = true;
|
|
@@ -66,7 +66,7 @@ sources) {
|
|
|
66
66
|
}
|
|
67
67
|
const fieldUsageWithWheres = (_b = mergeFieldUsage(getFieldUsageFromFilterList(inputSource), fieldUsage)) !== null && _b !== void 0 ? _b : [];
|
|
68
68
|
const fieldsForLookup = [...nonCompositeFields, ...inputSource.fields];
|
|
69
|
-
const expanded =
|
|
69
|
+
const expanded = expandFieldUsage(fieldUsageWithWheres, fieldsForLookup);
|
|
70
70
|
if (expanded.missingFields.length > 0) {
|
|
71
71
|
// A lookup failed while expanding, which means this source certainly won't work
|
|
72
72
|
for (const missingField of expanded.missingFields) {
|
|
@@ -177,7 +177,7 @@ sources) {
|
|
|
177
177
|
}
|
|
178
178
|
else if (source.partitionComposite !== undefined) {
|
|
179
179
|
anyComposites = true;
|
|
180
|
-
const expanded =
|
|
180
|
+
const expanded = expandFieldUsage(fieldUsage, rootFields).result;
|
|
181
181
|
// TODO possibly abort if expanded has missing fields...
|
|
182
182
|
const expandedCategorized = categorizeFieldUsage(expanded);
|
|
183
183
|
const { partitionFilter, issues } = getPartitionCompositeFilter(source.partitionComposite, expandedCategorized.sourceUsage);
|
|
@@ -199,7 +199,7 @@ sources) {
|
|
|
199
199
|
};
|
|
200
200
|
}
|
|
201
201
|
if (!joinsProcessed) {
|
|
202
|
-
const expanded =
|
|
202
|
+
const expanded = expandFieldUsage(fieldUsage, getJoinFields(rootFields, path));
|
|
203
203
|
if (expanded.missingFields.length > 0) {
|
|
204
204
|
return {
|
|
205
205
|
error: {
|
|
@@ -278,7 +278,7 @@ function getExpandedSegment(segment, inputSource) {
|
|
|
278
278
|
updatedSegment = { ...segment, queryFields: updatedQueryFields };
|
|
279
279
|
}
|
|
280
280
|
const allFieldUsage = mergeFieldUsage(getFieldUsageFromFilterList(inputSource), segment.fieldUsage);
|
|
281
|
-
const expanded =
|
|
281
|
+
const expanded = expandFieldUsage(allFieldUsage || [], fields);
|
|
282
282
|
// Merge ungroupings from direct collection and field expansion
|
|
283
283
|
const allUngroupings = [...collectedUngroupings, ...expanded.ungroupings];
|
|
284
284
|
return {
|
|
@@ -334,7 +334,7 @@ function findActiveJoins(dependencies) {
|
|
|
334
334
|
* - `missingFields`: References to fields which could not be resolved
|
|
335
335
|
* - `activeJoins`: Topologically sorted list of joins needed to resolve these uses
|
|
336
336
|
*/
|
|
337
|
-
function
|
|
337
|
+
function expandFieldUsage(fieldUsage, fields) {
|
|
338
338
|
var _a, _b, _c;
|
|
339
339
|
const seen = {};
|
|
340
340
|
const missingFields = [];
|
|
@@ -33,16 +33,15 @@ function sqlSumDistinct(dialect, sqlExp, sqlDistintKey) {
|
|
|
33
33
|
const precision = 9;
|
|
34
34
|
const uniqueInt = dialect.sqlSumDistinctHashedKey(sqlDistintKey);
|
|
35
35
|
const multiplier = 10 ** (precision - NUMERIC_DECIMAL_PRECISION);
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
)
|
|
42
|
-
-
|
|
43
|
-
SUM(DISTINCT ${uniqueInt})
|
|
36
|
+
// Ensure value is numeric and handle nulls
|
|
37
|
+
const safeValue = `CAST(COALESCE(${sqlExp}, 0) AS ${dialect.defaultDecimalType})`;
|
|
38
|
+
// Scale and round to eliminate floating point differences
|
|
39
|
+
const roundedValue = `ROUND(${safeValue}*${multiplier}, ${NUMERIC_DECIMAL_PRECISION})`;
|
|
40
|
+
const sumSQL = `(
|
|
41
|
+
SUM(DISTINCT ${roundedValue} + ${uniqueInt})
|
|
42
|
+
- SUM(DISTINCT ${uniqueInt})
|
|
44
43
|
)`;
|
|
45
|
-
let ret = `(${sumSQL}
|
|
44
|
+
let ret = `(${sumSQL}/${multiplier})`;
|
|
46
45
|
ret = `CAST(${ret} AS ${dialect.defaultNumberType})`;
|
|
47
46
|
return ret;
|
|
48
47
|
}
|
|
@@ -521,51 +521,20 @@ class TemporalFilterCompiler {
|
|
|
521
521
|
return this.lastUnit(m.units);
|
|
522
522
|
case 'next':
|
|
523
523
|
return this.nextUnit(m.units);
|
|
524
|
+
case 'sunday':
|
|
525
|
+
return this.weekdayMoment(1, m.which);
|
|
524
526
|
case 'monday':
|
|
527
|
+
return this.weekdayMoment(2, m.which);
|
|
525
528
|
case 'tuesday':
|
|
529
|
+
return this.weekdayMoment(3, m.which);
|
|
526
530
|
case 'wednesday':
|
|
531
|
+
return this.weekdayMoment(4, m.which);
|
|
527
532
|
case 'thursday':
|
|
533
|
+
return this.weekdayMoment(5, m.which);
|
|
528
534
|
case 'friday':
|
|
535
|
+
return this.weekdayMoment(6, m.which);
|
|
529
536
|
case 'saturday':
|
|
530
|
-
|
|
531
|
-
const destDay = [
|
|
532
|
-
'sunday',
|
|
533
|
-
'monday',
|
|
534
|
-
'tuesday',
|
|
535
|
-
'wednesday',
|
|
536
|
-
'thursday',
|
|
537
|
-
'friday',
|
|
538
|
-
'saturday',
|
|
539
|
-
].indexOf(m.moment);
|
|
540
|
-
const dow = this.dayofWeek(this.nowExpr()).sql;
|
|
541
|
-
if (m.which === 'next') {
|
|
542
|
-
const nForwards = `${this.mod7(`${destDay}-(${dow}-1)+6`)}+1`;
|
|
543
|
-
const begin = this.delta(this.thisUnit('day').begin, '+', nForwards, 'day');
|
|
544
|
-
const end = this.delta(this.thisUnit('day').begin, '+', `${nForwards}+1`, 'day');
|
|
545
|
-
// console.log(
|
|
546
|
-
// `SELECT ${
|
|
547
|
-
// this.nowExpr().sql
|
|
548
|
-
// } as now,\n ${destDay} as destDay,\n ${dow} as dow,\n ${nForwards} as nForwards,\n ${
|
|
549
|
-
// begin.sql
|
|
550
|
-
// } as begin,\n ${end.sql} as end`
|
|
551
|
-
// );
|
|
552
|
-
return { begin, end: end.sql };
|
|
553
|
-
}
|
|
554
|
-
// dacks back = mod((daw0 - dst) + 6, 7) + 1;
|
|
555
|
-
// dacks back = mod(((daw - 1) - dst) + 6, 7) + 1;
|
|
556
|
-
// dacks back = mod(((daw) - dst) + 7, 7) + 1;
|
|
557
|
-
const nBack = `${this.mod7(`(${dow}-1)-${destDay}+6`)}+1`;
|
|
558
|
-
const begin = this.delta(this.thisUnit('day').begin, '-', nBack, 'day');
|
|
559
|
-
const end = this.delta(this.thisUnit('day').begin, '-', `(${nBack})-1`, 'day');
|
|
560
|
-
// console.log(
|
|
561
|
-
// `SELECT ${
|
|
562
|
-
// this.nowExpr().sql
|
|
563
|
-
// } as now,\n ${destDay} as destDay,\n ${dow} as dow,\n ${nBack} as nBack,\n ${
|
|
564
|
-
// begin.sql
|
|
565
|
-
// } as begin,\n ${end.sql} as end`
|
|
566
|
-
// );
|
|
567
|
-
return { begin, end: end.sql };
|
|
568
|
-
}
|
|
537
|
+
return this.weekdayMoment(7, m.which);
|
|
569
538
|
}
|
|
570
539
|
}
|
|
571
540
|
isIn(notIn, begin, end) {
|
|
@@ -581,6 +550,27 @@ class TemporalFilterCompiler {
|
|
|
581
550
|
end = this.time(end);
|
|
582
551
|
return `${this.expr} ${begOp} ${begin} ${joinOp} ${this.expr} ${endOp} ${end}`;
|
|
583
552
|
}
|
|
553
|
+
weekdayMoment(destDay, which) {
|
|
554
|
+
const direction = which || 'last';
|
|
555
|
+
const dow = this.dayofWeek(this.nowExpr());
|
|
556
|
+
const todayBegin = this.thisUnit('day').begin;
|
|
557
|
+
// destDay comes in as 1-7 (Malloy format), convert to 0-6
|
|
558
|
+
const destDayZeroBased = destDay - 1;
|
|
559
|
+
// dow is 1-7, convert to 0-6 for the arithmetic
|
|
560
|
+
const dowZeroBased = `(${dow.sql}-1)`;
|
|
561
|
+
let daysOffset;
|
|
562
|
+
if (direction === 'next') {
|
|
563
|
+
// Days forward: ((destDay - dow + 6) % 7) + 1
|
|
564
|
+
daysOffset = `${this.mod7(`${destDayZeroBased}-${dowZeroBased}+6`)}+1`;
|
|
565
|
+
}
|
|
566
|
+
else {
|
|
567
|
+
// Days back: ((dow - destDay + 6) % 7) + 1
|
|
568
|
+
daysOffset = `${this.mod7(`${dowZeroBased}-${destDayZeroBased}+6`)}+1`;
|
|
569
|
+
}
|
|
570
|
+
const begin = this.delta(todayBegin, direction === 'next' ? '+' : '-', daysOffset, 'day');
|
|
571
|
+
const end = this.delta(begin, '+', '1', 'day');
|
|
572
|
+
return { begin, end: end.sql };
|
|
573
|
+
}
|
|
584
574
|
}
|
|
585
575
|
exports.TemporalFilterCompiler = TemporalFilterCompiler;
|
|
586
576
|
//# sourceMappingURL=filter_compilers.js.map
|
|
@@ -80,7 +80,7 @@ export declare class QueryStruct {
|
|
|
80
80
|
/**
|
|
81
81
|
* For fields which are a record, but the value is an expression
|
|
82
82
|
* we capture the context needed to generate the expression in
|
|
83
|
-
* QueryQuery.
|
|
83
|
+
* QueryQuery.expandRecordExpressions. Later in the compilation if a
|
|
84
84
|
* reference passes through this struct, this will call
|
|
85
85
|
* the expression compiler with the correct context
|
|
86
86
|
* to compute the record value.
|
|
@@ -54,11 +54,11 @@ export declare class QueryQuery extends QueryField {
|
|
|
54
54
|
* @param resultStruct - The FieldInstanceResult containing compilation context
|
|
55
55
|
* @param source - The QueryStruct to traverse (initially the query's parent/input)
|
|
56
56
|
*/
|
|
57
|
-
|
|
57
|
+
expandRecordExpressions(resultStruct: FieldInstanceResult, source: QueryStruct): void;
|
|
58
58
|
generateSQLFilters(resultStruct: FieldInstanceResult, which: 'where' | 'having'): AndChain;
|
|
59
59
|
prepare(_stageWriter: StageWriter | undefined): void;
|
|
60
60
|
private findJoins;
|
|
61
|
-
addAlwaysJoins(
|
|
61
|
+
addAlwaysJoins(): void;
|
|
62
62
|
getResultMetadata(fi: FieldInstance): ResultStructMetadataDef | ResultMetadataDef | undefined;
|
|
63
63
|
/** returns a fields and primary key of a struct for this query */
|
|
64
64
|
getResultStructDef(resultStruct?: FieldInstanceResult, isRoot?: boolean): QueryResultDef;
|
|
@@ -93,83 +93,52 @@ class QueryQuery extends query_node_1.QueryField {
|
|
|
93
93
|
const as = field.getIdentifier();
|
|
94
94
|
return { as, field };
|
|
95
95
|
}
|
|
96
|
-
addDependantPath(
|
|
97
|
-
|
|
98
|
-
return;
|
|
99
|
-
}
|
|
100
|
-
// Loop through path segments, ensuring each join exists
|
|
101
|
-
let currentContext = context;
|
|
102
|
-
for (const segment of path.slice(0, -1)) {
|
|
103
|
-
// Try to get the field at this path segment
|
|
104
|
-
let segmentField;
|
|
105
|
-
try {
|
|
106
|
-
segmentField = currentContext.getFieldByName([segment]);
|
|
107
|
-
}
|
|
108
|
-
catch {
|
|
109
|
-
// Field doesn't exist, need to add the join
|
|
110
|
-
// This is where we'd need to figure out how to create the missing join
|
|
111
|
-
// Maybe we need more context about what join we're trying to add?
|
|
112
|
-
throw new Error(`Cannot find join '${segment}' in ${path.join('.')} to add to query`);
|
|
113
|
-
}
|
|
114
|
-
if (segmentField instanceof query_node_1.QueryFieldStruct) {
|
|
115
|
-
if ((0, malloy_types_1.isJoinedSource)(segmentField.fieldDef)) {
|
|
116
|
-
resultStruct
|
|
117
|
-
.root()
|
|
118
|
-
.addStructToJoin(segmentField.queryStruct, undefined);
|
|
119
|
-
currentContext = segmentField.queryStruct;
|
|
120
|
-
}
|
|
121
|
-
else {
|
|
122
|
-
// Can't navigate deeper into non-joined sources like records
|
|
123
|
-
break;
|
|
124
|
-
}
|
|
125
|
-
}
|
|
126
|
-
}
|
|
127
|
-
// Now handle the full path for the final dependency
|
|
128
|
-
const node = context.getFieldByName(path);
|
|
96
|
+
addDependantPath(path, uniqueKeyRequirement) {
|
|
97
|
+
const node = this.parent.getFieldByName(path);
|
|
129
98
|
const joinableParent = node instanceof query_node_1.QueryFieldStruct
|
|
130
99
|
? node.queryStruct.getJoinableParent()
|
|
131
100
|
: node.parent.getJoinableParent();
|
|
132
|
-
|
|
101
|
+
this.rootResult.addStructToJoin(joinableParent, uniqueKeyRequirement);
|
|
133
102
|
}
|
|
134
|
-
dependenciesFromFieldUsage(
|
|
103
|
+
dependenciesFromFieldUsage() {
|
|
104
|
+
const resultRoot = this.rootResult;
|
|
135
105
|
// Only QuerySegment and IndexSegment have fieldUsage, RawSegment does not
|
|
136
106
|
if (this.firstSegment.type === 'raw' ||
|
|
137
107
|
this.firstSegment.type === 'partial') {
|
|
138
108
|
throw new Error('QueryQuery attempt to load a raw or partial segment');
|
|
139
109
|
}
|
|
140
110
|
for (const joinUsage of this.firstSegment.activeJoins || []) {
|
|
141
|
-
this.addDependantPath(
|
|
111
|
+
this.addDependantPath(joinUsage.path, undefined);
|
|
142
112
|
}
|
|
143
113
|
for (const usage of this.firstSegment.expandedFieldUsage || []) {
|
|
144
114
|
if (usage.analyticFunctionUse) {
|
|
145
|
-
|
|
115
|
+
resultRoot.queryUsesPartitioning = true;
|
|
146
116
|
// BigQuery-specific handling
|
|
147
117
|
if (this.parent.dialect.cantPartitionWindowFunctionsOnExpressions &&
|
|
148
|
-
|
|
118
|
+
resultRoot.firstSegment.type === 'reduce') {
|
|
149
119
|
// force the use of a lateral_join_bag
|
|
150
|
-
|
|
151
|
-
|
|
120
|
+
resultRoot.isComplexQuery = true;
|
|
121
|
+
resultRoot.queryUsesPartitioning = true;
|
|
152
122
|
}
|
|
153
123
|
continue;
|
|
154
124
|
}
|
|
155
125
|
if (usage.uniqueKeyRequirement) {
|
|
156
126
|
if (usage.path.length === 0) {
|
|
157
|
-
|
|
127
|
+
resultRoot.addStructToJoin(this.parent, usage.uniqueKeyRequirement);
|
|
158
128
|
}
|
|
159
129
|
else {
|
|
160
|
-
this.addDependantPath(
|
|
130
|
+
this.addDependantPath(usage.path, usage.uniqueKeyRequirement);
|
|
161
131
|
}
|
|
162
|
-
continue;
|
|
163
132
|
}
|
|
164
133
|
}
|
|
165
134
|
const expandedUngroupings = 'expandedUngroupings' in this.firstSegment
|
|
166
135
|
? this.firstSegment.expandedUngroupings || []
|
|
167
136
|
: [];
|
|
168
137
|
for (const ungrouping of expandedUngroupings) {
|
|
169
|
-
|
|
170
|
-
|
|
138
|
+
resultRoot.isComplexQuery = true;
|
|
139
|
+
resultRoot.queryUsesPartitioning = true;
|
|
171
140
|
// Navigate to correct result struct using ungrouping's path
|
|
172
|
-
let destResult =
|
|
141
|
+
let destResult = resultRoot;
|
|
173
142
|
for (const pathSegment of ungrouping.path) {
|
|
174
143
|
const nextStruct = destResult.allFields.get(pathSegment);
|
|
175
144
|
if (!(nextStruct instanceof field_instance_1.FieldInstanceResult)) {
|
|
@@ -264,7 +233,7 @@ class QueryQuery extends query_node_1.QueryField {
|
|
|
264
233
|
* @param resultStruct - The FieldInstanceResult containing compilation context
|
|
265
234
|
* @param source - The QueryStruct to traverse (initially the query's parent/input)
|
|
266
235
|
*/
|
|
267
|
-
|
|
236
|
+
expandRecordExpressions(resultStruct, source) {
|
|
268
237
|
for (const field of source.nameMap.values()) {
|
|
269
238
|
if (field instanceof query_node_1.QueryFieldStruct) {
|
|
270
239
|
const qs = field.queryStruct;
|
|
@@ -277,7 +246,7 @@ class QueryQuery extends query_node_1.QueryField {
|
|
|
277
246
|
qs.computeRecordExpression = () => (0, expression_compiler_1.exprToSQL)(resultStruct, parent, e);
|
|
278
247
|
}
|
|
279
248
|
// Recurse into this structure
|
|
280
|
-
this.
|
|
249
|
+
this.expandRecordExpressions(resultStruct, qs);
|
|
281
250
|
}
|
|
282
251
|
}
|
|
283
252
|
}
|
|
@@ -304,15 +273,15 @@ class QueryQuery extends query_node_1.QueryField {
|
|
|
304
273
|
}
|
|
305
274
|
prepare(_stageWriter) {
|
|
306
275
|
if (!this.prepared) {
|
|
307
|
-
this.
|
|
276
|
+
this.expandRecordExpressions(this.rootResult, this.parent);
|
|
308
277
|
// Add the root base join to the joins map
|
|
309
278
|
this.rootResult.addStructToJoin(this.parent, undefined);
|
|
310
279
|
// Expand fields (just adds them to result, no dependency tracking)
|
|
311
280
|
this.expandFields(this.rootResult);
|
|
312
281
|
// Process all dependencies from translator's fieldUsage
|
|
313
|
-
this.dependenciesFromFieldUsage(
|
|
282
|
+
this.dependenciesFromFieldUsage();
|
|
314
283
|
// Handle always joins
|
|
315
|
-
this.addAlwaysJoins(
|
|
284
|
+
this.addAlwaysJoins();
|
|
316
285
|
// Calculate symmetric aggregates based on the joins
|
|
317
286
|
this.rootResult.calculateSymmetricAggregates();
|
|
318
287
|
this.prepared = true;
|
|
@@ -328,7 +297,7 @@ class QueryQuery extends query_node_1.QueryField {
|
|
|
328
297
|
this.findJoins(s);
|
|
329
298
|
}
|
|
330
299
|
}
|
|
331
|
-
addAlwaysJoins(
|
|
300
|
+
addAlwaysJoins() {
|
|
332
301
|
var _a;
|
|
333
302
|
const stage = this.fieldDef.pipeline[0];
|
|
334
303
|
if (stage.type !== 'raw') {
|
|
@@ -336,7 +305,7 @@ class QueryQuery extends query_node_1.QueryField {
|
|
|
336
305
|
for (const joinName of alwaysJoins) {
|
|
337
306
|
const qs = this.parent.getChildByName(joinName);
|
|
338
307
|
if (qs instanceof query_node_1.QueryFieldStruct) {
|
|
339
|
-
rootResult.addStructToJoin(qs.queryStruct, undefined);
|
|
308
|
+
this.rootResult.addStructToJoin(qs.queryStruct, undefined);
|
|
340
309
|
}
|
|
341
310
|
}
|
|
342
311
|
}
|
package/dist/version.d.ts
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
export declare const MALLOY_VERSION = "0.0.
|
|
1
|
+
export declare const MALLOY_VERSION = "0.0.310";
|
package/dist/version.js
CHANGED
|
@@ -2,5 +2,5 @@
|
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.MALLOY_VERSION = void 0;
|
|
4
4
|
// generated with 'generate-version-file' script; do not edit manually
|
|
5
|
-
exports.MALLOY_VERSION = '0.0.
|
|
5
|
+
exports.MALLOY_VERSION = '0.0.310';
|
|
6
6
|
//# sourceMappingURL=version.js.map
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@malloydata/malloy",
|
|
3
|
-
"version": "0.0.
|
|
3
|
+
"version": "0.0.310",
|
|
4
4
|
"license": "MIT",
|
|
5
5
|
"exports": {
|
|
6
6
|
".": "./dist/index.js",
|
|
@@ -41,9 +41,9 @@
|
|
|
41
41
|
"generate-version-file": "VERSION=$(npm pkg get version --workspaces=false | tr -d \\\")\necho \"// generated with 'generate-version-file' script; do not edit manually\\nexport const MALLOY_VERSION = '$VERSION';\" > src/version.ts"
|
|
42
42
|
},
|
|
43
43
|
"dependencies": {
|
|
44
|
-
"@malloydata/malloy-filter": "0.0.
|
|
45
|
-
"@malloydata/malloy-interfaces": "0.0.
|
|
46
|
-
"@malloydata/malloy-tag": "0.0.
|
|
44
|
+
"@malloydata/malloy-filter": "0.0.310",
|
|
45
|
+
"@malloydata/malloy-interfaces": "0.0.310",
|
|
46
|
+
"@malloydata/malloy-tag": "0.0.310",
|
|
47
47
|
"antlr4ts": "^0.5.0-alpha.4",
|
|
48
48
|
"assert": "^2.0.0",
|
|
49
49
|
"jaro-winkler": "^0.2.8",
|