@malloydata/malloy 0.0.288 → 0.0.290
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.ts +1 -1
- package/dist/lang/ast/expressions/time-literal.js +23 -0
- package/dist/lang/ast/query-elements/query-arrow.js +1 -1
- package/dist/lang/ast/query-utils.d.ts +0 -1
- package/dist/lang/ast/query-utils.js +0 -7
- package/dist/lang/ast/source-elements/named-source.js +10 -2
- package/dist/lang/ast/typedesc-utils.d.ts +2 -1
- package/dist/lang/ast/typedesc-utils.js +13 -0
- package/dist/lang/ast/types/space-param.js +4 -20
- package/dist/malloy.d.ts +16 -2
- package/dist/malloy.js +82 -1
- package/dist/model/composite_source_utils.d.ts +15 -1
- package/dist/model/composite_source_utils.js +109 -5
- package/dist/version.d.ts +1 -1
- package/dist/version.js +1 -1
- package/package.json +4 -4
package/dist/index.d.ts
CHANGED
|
@@ -5,7 +5,7 @@ export { isSourceDef, Segment, isBasicAtomic, isJoined, isJoinedSource, isSampli
|
|
|
5
5
|
export { malloyToQuery, MalloyTranslator, } from './lang';
|
|
6
6
|
export type { LogMessage, TranslateResponse } from './lang';
|
|
7
7
|
export { Model, Malloy, Runtime, AtomicFieldType, ConnectionRuntime, SingleConnectionRuntime, EmptyURLReader, InMemoryURLReader, FixedConnectionMap, MalloyError, JoinRelationship, SourceRelationship, DateTimeframe, TimestampTimeframe, PreparedResult, Result, QueryMaterializer, CSVWriter, JSONWriter, Parse, DataWriter, Explore, InMemoryModelCache, CacheManager, } from './malloy';
|
|
8
|
-
export type { PreparedQuery, Field, AtomicField, ExploreField, QueryField, SortableField, DataArray, DataRecord, DataColumn, DataArrayOrRecord, Loggable, ModelMaterializer, DocumentTablePath, DocumentSymbol, ResultJSON, PreparedResultJSON, PreparedResultMaterializer, ExploreMaterializer, WriteStream, SerializedExplore, ModelCache, CachedModel, DateField, TimestampField, } from './malloy';
|
|
8
|
+
export type { PreparedQuery, Field, AtomicField, ExploreField, QueryField, SortableField, DataArray, DataRecord, DataColumn, DataArrayOrRecord, Loggable, ModelMaterializer, DocumentTablePath, DocumentSymbol, ResultJSON, PreparedResultJSON, PreparedResultMaterializer, ExploreMaterializer, WriteStream, SerializedExplore, ModelCache, CachedModel, DateField, TimestampField, SourceComponentInfo, } from './malloy';
|
|
9
9
|
export type { QueryOptionsReader, RunSQLOptions } from './run_sql_options';
|
|
10
10
|
export type { EventStream, ModelString, ModelURL, QueryString, QueryURL, URLReader, InvalidationKey, } from './runtime_types';
|
|
11
11
|
export type { Connection, ConnectionConfig, ConnectionFactory, ConnectionParameter, ConnectionParameterValue, ConnectionConfigSchema, FetchSchemaOptions, InfoConnection, LookupConnection, PersistSQLResults, PooledConnection, TestableConnection, StreamingConnection, } from './connection/types';
|
|
@@ -162,6 +162,7 @@ class LiteralTimestamp extends TimeLiteral {
|
|
|
162
162
|
const hasSubsecs = literalTs.match(/^([^.,]+)[,.](\d+)$/);
|
|
163
163
|
if (hasSubsecs) {
|
|
164
164
|
literalTs = hasSubsecs[1];
|
|
165
|
+
units = undefined;
|
|
165
166
|
// subSecs = hasSubsecs[2];
|
|
166
167
|
// mtoy TODO subsecond units not ignored
|
|
167
168
|
}
|
|
@@ -199,6 +200,28 @@ class GranularLiteral extends TimeLiteral {
|
|
|
199
200
|
let rangeEnd = this.getNext();
|
|
200
201
|
if (rangeEnd) {
|
|
201
202
|
const testValue = left.getExpression(fs);
|
|
203
|
+
if (testValue.type === 'date' && op === '=' && this.units === 'day') {
|
|
204
|
+
// TODO remove the === 'day' check above and warn
|
|
205
|
+
// if (this.units !== 'day') {
|
|
206
|
+
// this.logWarning(
|
|
207
|
+
// 'time-equality-not-granular',
|
|
208
|
+
// `Equality comparisons of a date to a literal ${this.units} will compare the first day of the ${this.units}; use a literal day instead, or use \`?\` to check whether the date is within the ${this.units}.`
|
|
209
|
+
// );
|
|
210
|
+
// }
|
|
211
|
+
return super.apply(fs, op, left);
|
|
212
|
+
}
|
|
213
|
+
if (testValue.type === 'timestamp' &&
|
|
214
|
+
op === '=' &&
|
|
215
|
+
this.units === undefined) {
|
|
216
|
+
// TODO remove the === 'second' check above and warn
|
|
217
|
+
// if (this.units !== 'second') {
|
|
218
|
+
// this.logWarning(
|
|
219
|
+
// 'time-equality-not-granular',
|
|
220
|
+
// `Equality comparisons of a timestamp to a literal ${this.units} will compare the first instant of the ${this.units}; use a literal timestamp (to the second) instead, or use \`?\` to check whether the date is within the ${this.units}.`
|
|
221
|
+
// );
|
|
222
|
+
// }
|
|
223
|
+
return super.apply(fs, op, left);
|
|
224
|
+
}
|
|
202
225
|
if (testValue.type === 'timestamp') {
|
|
203
226
|
const newStart = (0, expression_def_1.getMorphicValue)(rangeStart, 'timestamp');
|
|
204
227
|
const newEnd = (0, expression_def_1.getMorphicValue)(rangeEnd, 'timestamp');
|
|
@@ -81,7 +81,7 @@ class QueryArrow extends query_base_1.QueryBase {
|
|
|
81
81
|
if (segment !== undefined) {
|
|
82
82
|
const unsatisfiedGroupBys = (0, composite_source_utils_1.checkRequiredGroupBys)(compositeResolvedSourceDef !== null && compositeResolvedSourceDef !== void 0 ? compositeResolvedSourceDef : inputStruct, segment);
|
|
83
83
|
for (const unsatisfiedGroupBy of unsatisfiedGroupBys) {
|
|
84
|
-
this.logError('missing-required-group-by', `Group by of \`${unsatisfiedGroupBy.path.join('.')}\` is required but not present`, {
|
|
84
|
+
this.logError('missing-required-group-by', `Group by or single value filter of \`${unsatisfiedGroupBy.path.join('.')}\` is required but not present`, {
|
|
85
85
|
at: unsatisfiedGroupBy.at,
|
|
86
86
|
});
|
|
87
87
|
}
|
|
@@ -2,4 +2,3 @@ import type { PipeSegment } from '../../model';
|
|
|
2
2
|
import type { MalloyElement } from './types/malloy-element';
|
|
3
3
|
export declare function detectAndRemovePartialStages(pipeline: PipeSegment[], logTo: MalloyElement): PipeSegment[];
|
|
4
4
|
export declare function unsatisfiedRequiredGroupBys(segment: PipeSegment | undefined, requiredGroupBys: string[][]): string[][];
|
|
5
|
-
export declare function validateRequiredGroupBys(segment: PipeSegment, logTo: MalloyElement, requiredGroupBys: string[][]): void;
|
|
@@ -24,7 +24,6 @@
|
|
|
24
24
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
25
25
|
exports.detectAndRemovePartialStages = detectAndRemovePartialStages;
|
|
26
26
|
exports.unsatisfiedRequiredGroupBys = unsatisfiedRequiredGroupBys;
|
|
27
|
-
exports.validateRequiredGroupBys = validateRequiredGroupBys;
|
|
28
27
|
const model_1 = require("../../model");
|
|
29
28
|
// We don't want to ever generate actual 'partial' stages, so convert this
|
|
30
29
|
// into a reduce so the compiler doesn't explode
|
|
@@ -67,10 +66,4 @@ function unsatisfiedRequiredGroupBys(segment, requiredGroupBys) {
|
|
|
67
66
|
}
|
|
68
67
|
return result;
|
|
69
68
|
}
|
|
70
|
-
function validateRequiredGroupBys(segment, logTo, requiredGroupBys) {
|
|
71
|
-
const missing = unsatisfiedRequiredGroupBys(segment, requiredGroupBys);
|
|
72
|
-
for (const requiredGroupBy of missing) {
|
|
73
|
-
logTo.logError('missing-required-group-by', `Group by of \`${requiredGroupBy.join('.')}\` is required but not present`);
|
|
74
|
-
}
|
|
75
|
-
}
|
|
76
69
|
//# sourceMappingURL=query-utils.js.map
|
|
@@ -116,7 +116,7 @@ class NamedSource extends source_1.Source {
|
|
|
116
116
|
return this.evaluateArguments(parameterSpace, base.parameters, []);
|
|
117
117
|
}
|
|
118
118
|
evaluateArguments(parameterSpace, parametersIn, parametersOut) {
|
|
119
|
-
var _a, _b;
|
|
119
|
+
var _a, _b, _c;
|
|
120
120
|
const outArguments = { ...this.sourceArguments };
|
|
121
121
|
const passedNames = new Set();
|
|
122
122
|
for (const argument of (_a = this.args) !== null && _a !== void 0 ? _a : []) {
|
|
@@ -144,7 +144,15 @@ class NamedSource extends source_1.Source {
|
|
|
144
144
|
if (pVal.type === 'filter expression' &&
|
|
145
145
|
parameter.type === 'filter expression' &&
|
|
146
146
|
parameter.filterType) {
|
|
147
|
-
|
|
147
|
+
if (value.node === 'parameter') {
|
|
148
|
+
const filterType = (_c = pVal['filterType']) !== null && _c !== void 0 ? _c : 'missing-filter-type';
|
|
149
|
+
if (parameter.filterType !== filterType) {
|
|
150
|
+
argument.value.logError('filter-expression-type', `Parameter types filter<${parameter.filterType}> and filter<${filterType}> do not match`);
|
|
151
|
+
}
|
|
152
|
+
}
|
|
153
|
+
else {
|
|
154
|
+
(0, expression_def_1.checkFilterExpression)(argument.value, parameter.filterType, value);
|
|
155
|
+
}
|
|
148
156
|
}
|
|
149
157
|
if (pVal.type !== parameter.type && (0, malloy_types_1.isCastType)(parameter.type)) {
|
|
150
158
|
value = (0, time_utils_1.castTo)(parameter.type, pVal.value, pVal.type, true);
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import type { AtomicTypeDef, TypeDesc } from '../../model';
|
|
1
|
+
import type { AtomicTypeDef, EvalSpace, Parameter, TypeDesc } from '../../model';
|
|
2
2
|
export declare const nullT: TypeDesc;
|
|
3
3
|
export declare const numberT: TypeDesc;
|
|
4
4
|
export declare const stringT: TypeDesc;
|
|
@@ -46,3 +46,4 @@ export declare function inspect(...types: (TypeDesc | undefined)[]): string;
|
|
|
46
46
|
* create a field, don't copy the non type fields.
|
|
47
47
|
*/
|
|
48
48
|
export declare function atomicDef(td: AtomicTypeDef | TypeDesc): AtomicTypeDef;
|
|
49
|
+
export declare function parameterTypeDesc(p: Parameter, evalSpace: EvalSpace): TypeDesc;
|
|
@@ -29,6 +29,7 @@ exports.typeIn = typeIn;
|
|
|
29
29
|
exports.typeEq = typeEq;
|
|
30
30
|
exports.inspect = inspect;
|
|
31
31
|
exports.atomicDef = atomicDef;
|
|
32
|
+
exports.parameterTypeDesc = parameterTypeDesc;
|
|
32
33
|
const model_1 = require("../../model");
|
|
33
34
|
function mkTypeDesc(
|
|
34
35
|
// The problem is that record and array, as currently defined, require a dialect
|
|
@@ -155,4 +156,16 @@ function atomicDef(td) {
|
|
|
155
156
|
}
|
|
156
157
|
return { type: 'error' };
|
|
157
158
|
}
|
|
159
|
+
function parameterTypeDesc(p, evalSpace) {
|
|
160
|
+
const t = p.type;
|
|
161
|
+
const theType = t === 'filter expression'
|
|
162
|
+
? { type: t, filterType: p.filterType }
|
|
163
|
+
: atomicDef(p);
|
|
164
|
+
return {
|
|
165
|
+
...theType,
|
|
166
|
+
expressionType: 'scalar',
|
|
167
|
+
evalSpace,
|
|
168
|
+
fieldUsage: [],
|
|
169
|
+
};
|
|
170
|
+
}
|
|
158
171
|
//# sourceMappingURL=typedesc-utils.js.map
|
|
@@ -78,15 +78,7 @@ class AbstractParameter extends SpaceParam {
|
|
|
78
78
|
return this._parameter;
|
|
79
79
|
}
|
|
80
80
|
typeDesc() {
|
|
81
|
-
|
|
82
|
-
const t = p.type;
|
|
83
|
-
const theType = t === 'filter expression' ? { type: t } : TDU.atomicDef(p);
|
|
84
|
-
return {
|
|
85
|
-
...theType,
|
|
86
|
-
expressionType: 'scalar',
|
|
87
|
-
evalSpace: 'constant',
|
|
88
|
-
fieldUsage: [],
|
|
89
|
-
};
|
|
81
|
+
return TDU.parameterTypeDesc(this.parameter(), 'constant');
|
|
90
82
|
}
|
|
91
83
|
}
|
|
92
84
|
exports.AbstractParameter = AbstractParameter;
|
|
@@ -99,17 +91,9 @@ class DefinedParameter extends SpaceParam {
|
|
|
99
91
|
return this.paramDef;
|
|
100
92
|
}
|
|
101
93
|
typeDesc() {
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
return {
|
|
106
|
-
...theType,
|
|
107
|
-
expressionType: 'scalar',
|
|
108
|
-
// TODO Not sure whether params are considered "input space". It seems like they
|
|
109
|
-
// could be input or constant, depending on usage (same as above).
|
|
110
|
-
evalSpace: 'input',
|
|
111
|
-
fieldUsage: [],
|
|
112
|
-
};
|
|
94
|
+
// TODO Not sure whether params are considered "input space". It seems like they
|
|
95
|
+
// could be input or constant, depending on usage (same as above).
|
|
96
|
+
return TDU.parameterTypeDesc(this.parameter(), 'input');
|
|
113
97
|
}
|
|
114
98
|
}
|
|
115
99
|
exports.DefinedParameter = DefinedParameter;
|
package/dist/malloy.d.ts
CHANGED
|
@@ -2,7 +2,7 @@ import type { RunSQLOptions } from './run_sql_options';
|
|
|
2
2
|
import type { DocumentCompletion as DocumentCompletionDefinition, DocumentSymbol as DocumentSymbolDefinition, LogMessage } from './lang';
|
|
3
3
|
import { MalloyTranslator } from './lang';
|
|
4
4
|
import type { DocumentHelpContext } from './lang/parse-tree-walkers/document-help-context-walker';
|
|
5
|
-
import type { CompiledQuery, DocumentLocation, DocumentReference, BooleanFieldDef, JSONFieldDef, NumberFieldDef, StringFieldDef, FilterCondition, Query as InternalQuery, ModelDef, DocumentPosition as ModelDocumentPosition, NamedQuery, QueryData, QueryDataRow, QueryResult, SearchIndexResult, SearchValueMapResult, StructDef, TurtleDef, NativeUnsupportedFieldDef, QueryRunStats, ImportLocation, Annotation, SQLSourceDef, AtomicFieldDef, DateFieldDef, TimestampFieldDef, SourceDef, QueryToMaterialize, Argument } from './model';
|
|
5
|
+
import type { CompiledQuery, DocumentLocation, DocumentReference, BooleanFieldDef, JSONFieldDef, NumberFieldDef, StringFieldDef, FilterCondition, Query as InternalQuery, ModelDef, DocumentPosition as ModelDocumentPosition, NamedQuery, QueryData, QueryDataRow, QueryResult, SearchIndexResult, SearchValueMapResult, StructDef, TurtleDef, NativeUnsupportedFieldDef, QueryRunStats, ImportLocation, Annotation, SQLSourceDef, AtomicFieldDef, DateFieldDef, TimestampFieldDef, SourceDef, QueryToMaterialize, Argument, TableSourceDef } from './model';
|
|
6
6
|
import type { EventStream, InvalidationKey, ModelString, ModelURL, QueryString, QueryURL, URLReader } from './runtime_types';
|
|
7
7
|
import type { Connection, FetchSchemaOptions, InfoConnection, LookupConnection } from './connection/types';
|
|
8
8
|
import type { Tag } from '@malloydata/malloy-tag';
|
|
@@ -19,6 +19,15 @@ export interface Loggable {
|
|
|
19
19
|
warn: (message?: any, ...optionalParams: any[]) => void;
|
|
20
20
|
error: (message?: any, ...optionalParams: any[]) => void;
|
|
21
21
|
}
|
|
22
|
+
export type SourceComponentInfo = {
|
|
23
|
+
type: 'table';
|
|
24
|
+
tableName: string;
|
|
25
|
+
sourceID: string;
|
|
26
|
+
} | {
|
|
27
|
+
type: 'sql';
|
|
28
|
+
selectStatement: string;
|
|
29
|
+
sourceID: string;
|
|
30
|
+
};
|
|
22
31
|
export interface ParseOptions {
|
|
23
32
|
importBaseURL?: URL;
|
|
24
33
|
testEnvironment?: boolean;
|
|
@@ -98,7 +107,7 @@ export declare class Malloy {
|
|
|
98
107
|
* instead of looping forever, but the fix is to correct the dialect.
|
|
99
108
|
*/
|
|
100
109
|
static safelyFetchTableSchema(connection: InfoConnection, toFetch: Record<string, string>, opts: FetchSchemaOptions): Promise<{
|
|
101
|
-
schemas: Record<string,
|
|
110
|
+
schemas: Record<string, TableSourceDef>;
|
|
102
111
|
errors: Record<string, string>;
|
|
103
112
|
}>;
|
|
104
113
|
/**
|
|
@@ -618,6 +627,11 @@ export declare class Explore extends Entity implements Taggable {
|
|
|
618
627
|
toJSON(): SerializedExplore;
|
|
619
628
|
static fromJSON(main_explore: SerializedExplore): Explore;
|
|
620
629
|
get location(): DocumentLocation | undefined;
|
|
630
|
+
private collectSourceComponents;
|
|
631
|
+
/**
|
|
632
|
+
* THIS IS A HIGHLY EXPERIMENTAL API AND MAY VANISH OR CHAGE WITHOUT NOTICE
|
|
633
|
+
*/
|
|
634
|
+
getSourceComponents(): SourceComponentInfo[];
|
|
621
635
|
}
|
|
622
636
|
export declare enum AtomicFieldType {
|
|
623
637
|
String = "string",
|
package/dist/malloy.js
CHANGED
|
@@ -33,6 +33,11 @@ const annotation_1 = require("./annotation");
|
|
|
33
33
|
const sql_block_1 = require("./model/sql_block");
|
|
34
34
|
const utils_1 = require("./lang/utils");
|
|
35
35
|
const reference_list_1 = require("./lang/reference-list");
|
|
36
|
+
function isSourceComponent(source) {
|
|
37
|
+
return (source.type === 'table' ||
|
|
38
|
+
source.type === 'sql_select' ||
|
|
39
|
+
source.type === 'query_source');
|
|
40
|
+
}
|
|
36
41
|
const MALLOY_INTERNAL_URL = 'internal://internal.malloy';
|
|
37
42
|
class Malloy {
|
|
38
43
|
static get version() {
|
|
@@ -499,7 +504,7 @@ class Model {
|
|
|
499
504
|
*/
|
|
500
505
|
getExploreByName(name) {
|
|
501
506
|
const struct = this.modelDef.contents[name];
|
|
502
|
-
if ((0, model_1.isSourceDef)(struct)) {
|
|
507
|
+
if (struct && (0, model_1.isSourceDef)(struct)) {
|
|
503
508
|
return new Explore(struct);
|
|
504
509
|
}
|
|
505
510
|
throw new Error("'name' is not an explore");
|
|
@@ -1268,6 +1273,82 @@ class Explore extends Entity {
|
|
|
1268
1273
|
get location() {
|
|
1269
1274
|
return this.structDef.location;
|
|
1270
1275
|
}
|
|
1276
|
+
collectSourceComponents(structDef) {
|
|
1277
|
+
const sources = [];
|
|
1278
|
+
if (structDef.type === 'composite') {
|
|
1279
|
+
for (const source of structDef.sources) {
|
|
1280
|
+
sources.push(...this.collectSourceComponents(source));
|
|
1281
|
+
}
|
|
1282
|
+
return sources;
|
|
1283
|
+
}
|
|
1284
|
+
if (isSourceComponent(structDef)) {
|
|
1285
|
+
if (structDef.type === 'table') {
|
|
1286
|
+
// Generate sourceID based on connection and table name
|
|
1287
|
+
sources.push({
|
|
1288
|
+
type: 'table',
|
|
1289
|
+
tableName: structDef.tablePath,
|
|
1290
|
+
sourceID: `${structDef.connection}:${structDef.tablePath}`,
|
|
1291
|
+
});
|
|
1292
|
+
}
|
|
1293
|
+
else if (structDef.type === 'sql_select') {
|
|
1294
|
+
sources.push({
|
|
1295
|
+
type: 'sql',
|
|
1296
|
+
selectStatement: structDef.selectStr,
|
|
1297
|
+
sourceID: `${structDef.connection}:${structDef.selectStr}`,
|
|
1298
|
+
});
|
|
1299
|
+
}
|
|
1300
|
+
else if (structDef.type === 'query_source') {
|
|
1301
|
+
// For QuerySourceDef, we need to extract the SQL from the query
|
|
1302
|
+
// We need to create a PreparedQuery from the query, then get a PreparedResult
|
|
1303
|
+
// to access the SQL
|
|
1304
|
+
let sql;
|
|
1305
|
+
try {
|
|
1306
|
+
// Create a PreparedQuery from the query in the QuerySourceDef
|
|
1307
|
+
const preparedQuery = new PreparedQuery(structDef.query, this.modelDef, []);
|
|
1308
|
+
// Get the PreparedResult which contains the SQL
|
|
1309
|
+
const preparedResult = preparedQuery.getPreparedResult();
|
|
1310
|
+
// Extract the SQL
|
|
1311
|
+
sql = preparedResult.sql;
|
|
1312
|
+
}
|
|
1313
|
+
catch (error) {
|
|
1314
|
+
// If we can't compile the query, use a placeholder
|
|
1315
|
+
sql = `-- Could not compile SQL for query ${structDef.query.name || 'unnamed query'}: ${error instanceof Error ? error.message : String(error)}`;
|
|
1316
|
+
}
|
|
1317
|
+
// Generate sourceID based on connection and SQL
|
|
1318
|
+
const sourceID = `${structDef.connection}:${sql}`;
|
|
1319
|
+
sources.push({
|
|
1320
|
+
type: 'sql',
|
|
1321
|
+
selectStatement: sql,
|
|
1322
|
+
sourceID: sourceID,
|
|
1323
|
+
});
|
|
1324
|
+
}
|
|
1325
|
+
}
|
|
1326
|
+
else {
|
|
1327
|
+
return [];
|
|
1328
|
+
}
|
|
1329
|
+
// Process all fields to find joins
|
|
1330
|
+
for (const field of structDef.fields) {
|
|
1331
|
+
if ((0, model_1.isJoined)(field)) {
|
|
1332
|
+
sources.push(...this.collectSourceComponents(field));
|
|
1333
|
+
}
|
|
1334
|
+
}
|
|
1335
|
+
return sources;
|
|
1336
|
+
}
|
|
1337
|
+
/**
|
|
1338
|
+
* THIS IS A HIGHLY EXPERIMENTAL API AND MAY VANISH OR CHAGE WITHOUT NOTICE
|
|
1339
|
+
*/
|
|
1340
|
+
getSourceComponents() {
|
|
1341
|
+
const uniqueSources = {};
|
|
1342
|
+
if ((0, model_1.isSourceDef)(this.structDef)) {
|
|
1343
|
+
const allSources = this.collectSourceComponents(this.structDef);
|
|
1344
|
+
// Deduplicate sources using sourceID as the key
|
|
1345
|
+
for (const source of allSources) {
|
|
1346
|
+
uniqueSources[source.sourceID] = source;
|
|
1347
|
+
}
|
|
1348
|
+
}
|
|
1349
|
+
// Return the deduplicated sources as an array
|
|
1350
|
+
return Object.values(uniqueSources);
|
|
1351
|
+
}
|
|
1271
1352
|
}
|
|
1272
1353
|
exports.Explore = Explore;
|
|
1273
1354
|
var AtomicFieldType;
|
|
@@ -1,5 +1,6 @@
|
|
|
1
|
+
import type { BooleanFilter, NumberFilter, StringFilter, TemporalFilter } from '@malloydata/malloy-filter';
|
|
1
2
|
import type { MalloyElement } from '../lang/ast';
|
|
2
|
-
import type { FieldUsage, PipeSegment, SourceDef, StructDef, RequiredGroupBy } from './malloy_types';
|
|
3
|
+
import type { FieldUsage, PipeSegment, SourceDef, Expr, StructDef, RequiredGroupBy } from './malloy_types';
|
|
3
4
|
type CompositeCouldNotFindFieldError = {
|
|
4
5
|
code: 'could_not_find_field';
|
|
5
6
|
data: {
|
|
@@ -86,4 +87,17 @@ export declare function pathBegins(path: string[], prefix: string[]): boolean;
|
|
|
86
87
|
export declare function sortFieldUsageByReferenceLocation(usage: FieldUsage[]): FieldUsage[];
|
|
87
88
|
export declare function hasCompositesAnywhere(source: StructDef): boolean;
|
|
88
89
|
export declare function logCompositeError(error: CompositeError, logTo: MalloyElement): void;
|
|
90
|
+
export declare function compileFilterExpression(ft: string, fexpr: Expr): {
|
|
91
|
+
kind: 'date' | 'timestamp';
|
|
92
|
+
parsed: TemporalFilter;
|
|
93
|
+
} | {
|
|
94
|
+
kind: 'string';
|
|
95
|
+
parsed: StringFilter;
|
|
96
|
+
} | {
|
|
97
|
+
kind: 'boolean';
|
|
98
|
+
parsed: BooleanFilter;
|
|
99
|
+
} | {
|
|
100
|
+
kind: 'number';
|
|
101
|
+
parsed: NumberFilter;
|
|
102
|
+
} | undefined;
|
|
89
103
|
export {};
|
|
@@ -24,6 +24,8 @@ exports.pathBegins = pathBegins;
|
|
|
24
24
|
exports.sortFieldUsageByReferenceLocation = sortFieldUsageByReferenceLocation;
|
|
25
25
|
exports.hasCompositesAnywhere = hasCompositesAnywhere;
|
|
26
26
|
exports.logCompositeError = logCompositeError;
|
|
27
|
+
exports.compileFilterExpression = compileFilterExpression;
|
|
28
|
+
const malloy_filter_1 = require("@malloydata/malloy-filter");
|
|
27
29
|
const malloy_types_1 = require("./malloy_types");
|
|
28
30
|
const utils_1 = require("./utils");
|
|
29
31
|
function _resolveCompositeSources(path, source, rootFields, nests, fieldUsage,
|
|
@@ -505,6 +507,7 @@ function getFieldUsageForField(field) {
|
|
|
505
507
|
return [];
|
|
506
508
|
}
|
|
507
509
|
function nestLevelsAt(nests, at) {
|
|
510
|
+
var _a;
|
|
508
511
|
if (at === undefined)
|
|
509
512
|
return nests;
|
|
510
513
|
return {
|
|
@@ -512,7 +515,8 @@ function nestLevelsAt(nests, at) {
|
|
|
512
515
|
nested: nests.nested.map(n => nestLevelsAt(n, at)),
|
|
513
516
|
fieldsReferenced: fieldUsageAt(nests.fieldsReferencedDirectly, at),
|
|
514
517
|
ungroupings: ungroupingsAt(nests.ungroupings, at),
|
|
515
|
-
requiredGroupBys: requiredGroupBysAt(nests.requiredGroupBys, at),
|
|
518
|
+
requiredGroupBys: (_a = requiredGroupBysAt(nests.requiredGroupBys, at)) !== null && _a !== void 0 ? _a : [],
|
|
519
|
+
singleValueFilters: nests.singleValueFilters,
|
|
516
520
|
};
|
|
517
521
|
}
|
|
518
522
|
function fieldUsageAt(fieldUsage, at) {
|
|
@@ -552,12 +556,13 @@ function joinedUngroupings(joinPath, ungroupings) {
|
|
|
552
556
|
}));
|
|
553
557
|
}
|
|
554
558
|
function extractNestLevels(segment) {
|
|
555
|
-
var _a, _b;
|
|
559
|
+
var _a, _b, _c;
|
|
556
560
|
const fieldsReferencedDirectly = [];
|
|
557
561
|
const fieldsReferenced = [];
|
|
558
562
|
const nested = [];
|
|
559
563
|
const ungroupings = [];
|
|
560
564
|
const requiredGroupBys = [];
|
|
565
|
+
const singleValueFilters = [];
|
|
561
566
|
if (segment.type === 'project' ||
|
|
562
567
|
segment.type === 'partial' ||
|
|
563
568
|
segment.type === 'reduce') {
|
|
@@ -580,6 +585,12 @@ function extractNestLevels(segment) {
|
|
|
580
585
|
requiredGroupBys.push(...((_b = field.requiresGroupBy) !== null && _b !== void 0 ? _b : []));
|
|
581
586
|
}
|
|
582
587
|
}
|
|
588
|
+
for (const filter of (_c = segment.filterList) !== null && _c !== void 0 ? _c : []) {
|
|
589
|
+
if (!(0, malloy_types_1.expressionIsScalar)(filter.expressionType))
|
|
590
|
+
continue;
|
|
591
|
+
const fields = getSingleValueFilterFields(filter.e);
|
|
592
|
+
singleValueFilters.push(...fields);
|
|
593
|
+
}
|
|
583
594
|
}
|
|
584
595
|
const levels = {
|
|
585
596
|
fieldsReferencedDirectly,
|
|
@@ -587,9 +598,70 @@ function extractNestLevels(segment) {
|
|
|
587
598
|
fieldsReferenced,
|
|
588
599
|
ungroupings,
|
|
589
600
|
requiredGroupBys,
|
|
601
|
+
singleValueFilters,
|
|
590
602
|
};
|
|
591
603
|
return nestLevelsAt(levels, segment.referencedAt);
|
|
592
604
|
}
|
|
605
|
+
function getSingleValueFilterFields(filter) {
|
|
606
|
+
const fieldPaths = [];
|
|
607
|
+
if (filter.node === 'and') {
|
|
608
|
+
fieldPaths.push(...getSingleValueFilterFields(filter.kids.left));
|
|
609
|
+
fieldPaths.push(...getSingleValueFilterFields(filter.kids.right));
|
|
610
|
+
}
|
|
611
|
+
else if (filter.node === '()') {
|
|
612
|
+
fieldPaths.push(...getSingleValueFilterFields(filter.e));
|
|
613
|
+
}
|
|
614
|
+
else {
|
|
615
|
+
const path = isSingleValueFilterNode(filter);
|
|
616
|
+
if (path) {
|
|
617
|
+
fieldPaths.push(path);
|
|
618
|
+
}
|
|
619
|
+
}
|
|
620
|
+
return fieldPaths;
|
|
621
|
+
}
|
|
622
|
+
function isSingleValueFilterNode(e) {
|
|
623
|
+
if (e.node === 'filterMatch') {
|
|
624
|
+
if (e.kids.expr.node === 'field') {
|
|
625
|
+
const result = compileFilterExpression(e.dataType, e.kids.filterExpr);
|
|
626
|
+
if (!result)
|
|
627
|
+
return [];
|
|
628
|
+
if ((result.parsed.operator === 'null' && !result.parsed.not) ||
|
|
629
|
+
(result.kind === 'boolean' &&
|
|
630
|
+
['false', 'true'].includes(result.parsed.operator) &&
|
|
631
|
+
!result.parsed.not) ||
|
|
632
|
+
(result.kind === 'date' &&
|
|
633
|
+
result.parsed.operator === 'in' &&
|
|
634
|
+
result.parsed.in.moment === 'literal' &&
|
|
635
|
+
result.parsed.in.units === 'day' &&
|
|
636
|
+
!result.parsed.not) ||
|
|
637
|
+
(result.kind === 'timestamp' &&
|
|
638
|
+
result.parsed.operator === 'in' &&
|
|
639
|
+
result.parsed.in.moment === 'literal' &&
|
|
640
|
+
result.parsed.in.units === undefined &&
|
|
641
|
+
!result.parsed.not) ||
|
|
642
|
+
// TODO: handle 'today', 'now', 'yesterday', etc.
|
|
643
|
+
((result.kind === 'number' || result.kind === 'string') &&
|
|
644
|
+
result.parsed.operator === '=' &&
|
|
645
|
+
result.parsed.values.length === 1 &&
|
|
646
|
+
!result.parsed.not)) {
|
|
647
|
+
return e.kids.expr.path;
|
|
648
|
+
}
|
|
649
|
+
}
|
|
650
|
+
}
|
|
651
|
+
else if (e.node === '=') {
|
|
652
|
+
if (e.kids.left.node === 'field' &&
|
|
653
|
+
(e.kids.right.node === 'true' ||
|
|
654
|
+
e.kids.right.node === 'false' ||
|
|
655
|
+
e.kids.right.node === 'timeLiteral' ||
|
|
656
|
+
e.kids.right.node === 'numberLiteral' ||
|
|
657
|
+
e.kids.right.node === 'stringLiteral')) {
|
|
658
|
+
return e.kids.left.path;
|
|
659
|
+
}
|
|
660
|
+
}
|
|
661
|
+
else if (e.node === 'is-null' && e.e.node === 'field') {
|
|
662
|
+
return e.e.path;
|
|
663
|
+
}
|
|
664
|
+
}
|
|
593
665
|
function expandRefs(nests, fields) {
|
|
594
666
|
var _a, _b, _c;
|
|
595
667
|
const newNests = [];
|
|
@@ -655,6 +727,7 @@ function expandRefs(nests, fields) {
|
|
|
655
727
|
ungroupings: [],
|
|
656
728
|
nested: [],
|
|
657
729
|
requiredGroupBys: (_a = ungrouping.requiresGroupBy) !== null && _a !== void 0 ? _a : [],
|
|
730
|
+
singleValueFilters: [],
|
|
658
731
|
}, fields);
|
|
659
732
|
missingFields.push(...((_b = expanded.missingFields) !== null && _b !== void 0 ? _b : []));
|
|
660
733
|
for (const field of expanded.result.requiredGroupBys) {
|
|
@@ -676,6 +749,7 @@ function expandRefs(nests, fields) {
|
|
|
676
749
|
requiredGroupBys,
|
|
677
750
|
unsatisfiableGroupBys,
|
|
678
751
|
nested,
|
|
752
|
+
singleValueFilters: nests.singleValueFilters,
|
|
679
753
|
},
|
|
680
754
|
missingFields: missingFields.length > 0 ? missingFields : undefined,
|
|
681
755
|
};
|
|
@@ -700,13 +774,16 @@ function checkRequiredGroupBys(compositeResolvedSourceDef, segment) {
|
|
|
700
774
|
return unsatisfied;
|
|
701
775
|
}
|
|
702
776
|
function getUnsatisfiedRequiredGroupBys(level) {
|
|
703
|
-
const fields =
|
|
777
|
+
const fields = [
|
|
778
|
+
...level.fieldsReferencedDirectly.map(f => f.path),
|
|
779
|
+
...level.singleValueFilters,
|
|
780
|
+
];
|
|
704
781
|
const requiredGroupBys = [...level.requiredGroupBys];
|
|
705
782
|
for (const nested of level.nested) {
|
|
706
783
|
requiredGroupBys.push(...getUnsatisfiedRequiredGroupBys(nested));
|
|
707
784
|
}
|
|
708
785
|
return [
|
|
709
|
-
...requiredGroupBys.filter(rgb => !fields.some(f => pathEq(f
|
|
786
|
+
...requiredGroupBys.filter(rgb => !fields.some(f => pathEq(f, rgb.path))),
|
|
710
787
|
...level.unsatisfiableGroupBys,
|
|
711
788
|
];
|
|
712
789
|
}
|
|
@@ -803,7 +880,7 @@ function logCompositeError(error, logTo) {
|
|
|
803
880
|
}
|
|
804
881
|
else if (issue.type === 'missing-required-group-by') {
|
|
805
882
|
const fieldRef = `\`${issue.requiredGroupBy.path.join('.')}\``;
|
|
806
|
-
logTo.logError('could-not-resolve-composite-source', `Could not resolve composite source: missing group by ${fieldRef} as required in ${source}${requiredFields}`, { at: issue.requiredGroupBy.at });
|
|
883
|
+
logTo.logError('could-not-resolve-composite-source', `Could not resolve composite source: missing group by or single value filter of ${fieldRef} as required in ${source}${requiredFields}`, { at: issue.requiredGroupBy.at });
|
|
807
884
|
}
|
|
808
885
|
else {
|
|
809
886
|
const joinRef = `\`${issue.path.join('.')}\``;
|
|
@@ -817,4 +894,31 @@ function logCompositeError(error, logTo) {
|
|
|
817
894
|
logTo.logError('could-not-resolve-composite-source', 'Could not resolve composite source');
|
|
818
895
|
}
|
|
819
896
|
}
|
|
897
|
+
function compileFilterExpression(ft, fexpr) {
|
|
898
|
+
if (fexpr.node !== 'filterLiteral') {
|
|
899
|
+
return undefined;
|
|
900
|
+
}
|
|
901
|
+
const fsrc = fexpr.filterSrc;
|
|
902
|
+
if (ft === 'date' || ft === 'timestamp') {
|
|
903
|
+
const result = malloy_filter_1.TemporalFilterExpression.parse(fsrc);
|
|
904
|
+
if (result.parsed)
|
|
905
|
+
return { kind: ft, parsed: result.parsed };
|
|
906
|
+
}
|
|
907
|
+
else if (ft === 'string') {
|
|
908
|
+
const result = malloy_filter_1.StringFilterExpression.parse(fsrc);
|
|
909
|
+
if (result.parsed)
|
|
910
|
+
return { kind: ft, parsed: result.parsed };
|
|
911
|
+
}
|
|
912
|
+
else if (ft === 'number') {
|
|
913
|
+
const result = malloy_filter_1.NumberFilterExpression.parse(fsrc);
|
|
914
|
+
if (result.parsed)
|
|
915
|
+
return { kind: ft, parsed: result.parsed };
|
|
916
|
+
}
|
|
917
|
+
else if (ft === 'boolean') {
|
|
918
|
+
const result = malloy_filter_1.BooleanFilterExpression.parse(fsrc);
|
|
919
|
+
if (result.parsed)
|
|
920
|
+
return { kind: ft, parsed: result.parsed };
|
|
921
|
+
}
|
|
922
|
+
return undefined;
|
|
923
|
+
}
|
|
820
924
|
//# sourceMappingURL=composite_source_utils.js.map
|
package/dist/version.d.ts
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
export declare const MALLOY_VERSION = "0.0.
|
|
1
|
+
export declare const MALLOY_VERSION = "0.0.290";
|
package/dist/version.js
CHANGED
|
@@ -2,5 +2,5 @@
|
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.MALLOY_VERSION = void 0;
|
|
4
4
|
// generated with 'generate-version-file' script; do not edit manually
|
|
5
|
-
exports.MALLOY_VERSION = '0.0.
|
|
5
|
+
exports.MALLOY_VERSION = '0.0.290';
|
|
6
6
|
//# sourceMappingURL=version.js.map
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@malloydata/malloy",
|
|
3
|
-
"version": "0.0.
|
|
3
|
+
"version": "0.0.290",
|
|
4
4
|
"license": "MIT",
|
|
5
5
|
"exports": {
|
|
6
6
|
".": "./dist/index.js",
|
|
@@ -41,9 +41,9 @@
|
|
|
41
41
|
"generate-version-file": "VERSION=$(npm pkg get version --workspaces=false | tr -d \\\")\necho \"// generated with 'generate-version-file' script; do not edit manually\\nexport const MALLOY_VERSION = '$VERSION';\" > src/version.ts"
|
|
42
42
|
},
|
|
43
43
|
"dependencies": {
|
|
44
|
-
"@malloydata/malloy-filter": "0.0.
|
|
45
|
-
"@malloydata/malloy-interfaces": "0.0.
|
|
46
|
-
"@malloydata/malloy-tag": "0.0.
|
|
44
|
+
"@malloydata/malloy-filter": "0.0.290",
|
|
45
|
+
"@malloydata/malloy-interfaces": "0.0.290",
|
|
46
|
+
"@malloydata/malloy-tag": "0.0.290",
|
|
47
47
|
"antlr4ts": "^0.5.0-alpha.4",
|
|
48
48
|
"assert": "^2.0.0",
|
|
49
49
|
"jaro-winkler": "^0.2.8",
|