@finos/legend-application-data-cube 0.3.2 → 0.3.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (112) hide show
  1. package/lib/__lib__/LegendDataCubeNavigation.d.ts +2 -0
  2. package/lib/__lib__/LegendDataCubeNavigation.d.ts.map +1 -1
  3. package/lib/__lib__/LegendDataCubeNavigation.js +2 -0
  4. package/lib/__lib__/LegendDataCubeNavigation.js.map +1 -1
  5. package/lib/application/LegendDataCubeApplicationConfig.d.ts +4 -0
  6. package/lib/application/LegendDataCubeApplicationConfig.d.ts.map +1 -1
  7. package/lib/application/LegendDataCubeApplicationConfig.js +5 -0
  8. package/lib/application/LegendDataCubeApplicationConfig.js.map +1 -1
  9. package/lib/application/__test-utils__/LegendDataCubeApplicationTestUtils.d.ts +18 -0
  10. package/lib/application/__test-utils__/LegendDataCubeApplicationTestUtils.d.ts.map +1 -0
  11. package/lib/application/__test-utils__/LegendDataCubeApplicationTestUtils.js +48 -0
  12. package/lib/application/__test-utils__/LegendDataCubeApplicationTestUtils.js.map +1 -0
  13. package/lib/components/LegendDataCubeBlockingWindow.d.ts +2 -1
  14. package/lib/components/LegendDataCubeBlockingWindow.d.ts.map +1 -1
  15. package/lib/components/LegendDataCubeBlockingWindow.js +8 -3
  16. package/lib/components/LegendDataCubeBlockingWindow.js.map +1 -1
  17. package/lib/components/__test-utils__/LegendDataCubeStoreTestUtils.d.ts +42 -0
  18. package/lib/components/__test-utils__/LegendDataCubeStoreTestUtils.d.ts.map +1 -0
  19. package/lib/components/__test-utils__/LegendDataCubeStoreTestUtils.js +104 -0
  20. package/lib/components/__test-utils__/LegendDataCubeStoreTestUtils.js.map +1 -0
  21. package/lib/components/builder/LegendDataCubeBuilder.d.ts +5 -0
  22. package/lib/components/builder/LegendDataCubeBuilder.d.ts.map +1 -1
  23. package/lib/components/builder/LegendDataCubeBuilder.js +26 -5
  24. package/lib/components/builder/LegendDataCubeBuilder.js.map +1 -1
  25. package/lib/components/builder/LegendDataCubeBuilderStoreProvider.d.ts.map +1 -1
  26. package/lib/components/builder/LegendDataCubeBuilderStoreProvider.js +1 -1
  27. package/lib/components/builder/LegendDataCubeBuilderStoreProvider.js.map +1 -1
  28. package/lib/components/builder/LegendDataCubePartialSourceLoader.d.ts +19 -0
  29. package/lib/components/builder/LegendDataCubePartialSourceLoader.d.ts.map +1 -0
  30. package/lib/components/builder/LegendDataCubePartialSourceLoader.js +50 -0
  31. package/lib/components/builder/LegendDataCubePartialSourceLoader.js.map +1 -0
  32. package/lib/components/builder/LegendDataCubeSourceViewer.d.ts.map +1 -1
  33. package/lib/components/builder/LegendDataCubeSourceViewer.js +61 -1
  34. package/lib/components/builder/LegendDataCubeSourceViewer.js.map +1 -1
  35. package/lib/components/builder/source/LocalFileDataCubeSourceBuilder.d.ts.map +1 -1
  36. package/lib/components/builder/source/LocalFileDataCubeSourceBuilder.js +1 -2
  37. package/lib/components/builder/source/LocalFileDataCubeSourceBuilder.js.map +1 -1
  38. package/lib/components/builder/source/loader/LocalFileDataCubePartialSourceLoader.d.ts +22 -0
  39. package/lib/components/builder/source/loader/LocalFileDataCubePartialSourceLoader.d.ts.map +1 -0
  40. package/lib/components/builder/source/loader/LocalFileDataCubePartialSourceLoader.js +28 -0
  41. package/lib/components/builder/source/loader/LocalFileDataCubePartialSourceLoader.js.map +1 -0
  42. package/lib/index.css +2 -2
  43. package/lib/index.css.map +1 -1
  44. package/lib/package.json +3 -1
  45. package/lib/stores/LegendDataCubeBaseStore.d.ts +2 -1
  46. package/lib/stores/LegendDataCubeBaseStore.d.ts.map +1 -1
  47. package/lib/stores/LegendDataCubeBaseStore.js +6 -3
  48. package/lib/stores/LegendDataCubeBaseStore.js.map +1 -1
  49. package/lib/stores/LegendDataCubeDataCubeEngine.d.ts +6 -3
  50. package/lib/stores/LegendDataCubeDataCubeEngine.d.ts.map +1 -1
  51. package/lib/stores/LegendDataCubeDataCubeEngine.js +127 -106
  52. package/lib/stores/LegendDataCubeDataCubeEngine.js.map +1 -1
  53. package/lib/stores/LegendDataCubeDuckDBEngine.d.ts +11 -4
  54. package/lib/stores/LegendDataCubeDuckDBEngine.d.ts.map +1 -1
  55. package/lib/stores/LegendDataCubeDuckDBEngine.js +83 -16
  56. package/lib/stores/LegendDataCubeDuckDBEngine.js.map +1 -1
  57. package/lib/stores/builder/LegendDataCubeBuilderStore.d.ts +4 -0
  58. package/lib/stores/builder/LegendDataCubeBuilderStore.d.ts.map +1 -1
  59. package/lib/stores/builder/LegendDataCubeBuilderStore.js +58 -17
  60. package/lib/stores/builder/LegendDataCubeBuilderStore.js.map +1 -1
  61. package/lib/stores/builder/LegendDataCubeLoaderState.d.ts +11 -2
  62. package/lib/stores/builder/LegendDataCubeLoaderState.d.ts.map +1 -1
  63. package/lib/stores/builder/LegendDataCubeLoaderState.js +28 -1
  64. package/lib/stores/builder/LegendDataCubeLoaderState.js.map +1 -1
  65. package/lib/stores/builder/LegendDataCubeSourceLoaderState.d.ts +44 -0
  66. package/lib/stores/builder/LegendDataCubeSourceLoaderState.d.ts.map +1 -0
  67. package/lib/stores/builder/LegendDataCubeSourceLoaderState.js +101 -0
  68. package/lib/stores/builder/LegendDataCubeSourceLoaderState.js.map +1 -0
  69. package/lib/stores/builder/source/LocalFileDataCubeSourceBuilderState.d.ts +3 -2
  70. package/lib/stores/builder/source/LocalFileDataCubeSourceBuilderState.d.ts.map +1 -1
  71. package/lib/stores/builder/source/LocalFileDataCubeSourceBuilderState.js +7 -10
  72. package/lib/stores/builder/source/LocalFileDataCubeSourceBuilderState.js.map +1 -1
  73. package/lib/stores/builder/source/loader/LegendDataCubePartialSourceLoaderState.d.ts +29 -0
  74. package/lib/stores/builder/source/loader/LegendDataCubePartialSourceLoaderState.d.ts.map +1 -0
  75. package/lib/stores/builder/source/loader/LegendDataCubePartialSourceLoaderState.js +25 -0
  76. package/lib/stores/builder/source/loader/LegendDataCubePartialSourceLoaderState.js.map +1 -0
  77. package/lib/stores/builder/source/loader/LocalFileDataCubePartialSourceLoaderState.d.ts +43 -0
  78. package/lib/stores/builder/source/loader/LocalFileDataCubePartialSourceLoaderState.d.ts.map +1 -0
  79. package/lib/stores/builder/source/loader/LocalFileDataCubePartialSourceLoaderState.js +142 -0
  80. package/lib/stores/builder/source/loader/LocalFileDataCubePartialSourceLoaderState.js.map +1 -0
  81. package/lib/stores/model/LegendQueryDataCubeSource.d.ts +2 -1
  82. package/lib/stores/model/LegendQueryDataCubeSource.d.ts.map +1 -1
  83. package/lib/stores/model/LegendQueryDataCubeSource.js +1 -0
  84. package/lib/stores/model/LegendQueryDataCubeSource.js.map +1 -1
  85. package/lib/stores/model/LocalFileDataCubeSource.d.ts +3 -8
  86. package/lib/stores/model/LocalFileDataCubeSource.d.ts.map +1 -1
  87. package/lib/stores/model/LocalFileDataCubeSource.js +5 -15
  88. package/lib/stores/model/LocalFileDataCubeSource.js.map +1 -1
  89. package/package.json +13 -11
  90. package/src/__lib__/LegendDataCubeNavigation.ts +21 -0
  91. package/src/application/LegendDataCubeApplicationConfig.ts +10 -0
  92. package/src/application/__test-utils__/LegendDataCubeApplicationTestUtils.ts +52 -0
  93. package/src/components/LegendDataCubeBlockingWindow.tsx +8 -2
  94. package/src/components/__test-utils__/LegendDataCubeStoreTestUtils.tsx +231 -0
  95. package/src/components/builder/LegendDataCubeBuilder.tsx +51 -6
  96. package/src/components/builder/LegendDataCubeBuilderStoreProvider.tsx +3 -0
  97. package/src/components/builder/LegendDataCubePartialSourceLoader.tsx +108 -0
  98. package/src/components/builder/LegendDataCubeSourceViewer.tsx +171 -1
  99. package/src/components/builder/source/LocalFileDataCubeSourceBuilder.tsx +1 -2
  100. package/src/components/builder/source/loader/LocalFileDataCubePartialSourceLoader.tsx +60 -0
  101. package/src/stores/LegendDataCubeBaseStore.ts +13 -6
  102. package/src/stores/LegendDataCubeDataCubeEngine.ts +161 -120
  103. package/src/stores/LegendDataCubeDuckDBEngine.ts +101 -17
  104. package/src/stores/builder/LegendDataCubeBuilderStore.tsx +96 -24
  105. package/src/stores/builder/LegendDataCubeLoaderState.tsx +44 -1
  106. package/src/stores/builder/LegendDataCubeSourceLoaderState.tsx +145 -0
  107. package/src/stores/builder/source/LocalFileDataCubeSourceBuilderState.ts +9 -14
  108. package/src/stores/builder/source/loader/LegendDataCubePartialSourceLoaderState.ts +41 -0
  109. package/src/stores/builder/source/loader/LocalFileDataCubePartialSourceLoaderState.ts +217 -0
  110. package/src/stores/model/LegendQueryDataCubeSource.ts +2 -0
  111. package/src/stores/model/LocalFileDataCubeSource.ts +6 -15
  112. package/tsconfig.json +8 -1
@@ -30,8 +30,7 @@ export const LocalFileDataCubeSourceBuilder = observer(
30
30
  type={AlertType.WARNING}
31
31
  text={`Currently, support for local file comes with the following limitations:
32
32
  - Only CSV files are supported, but not all variants of CSV files are supported (required header row, comma delimiter, single escape quote).
33
- - Data from uploaded file will not be stored nor shared.
34
- - DataCube created with local file source cannot be saved.`}
33
+ - Data from uploaded file will not be stored nor shared.`}
35
34
  />
36
35
  <div className="mt-2 flex h-6 w-full items-center text-neutral-500">
37
36
  <input
@@ -0,0 +1,60 @@
1
+ /**
2
+ * Copyright (c) 2020-present, Goldman Sachs
3
+ *
4
+ * Licensed under the Apache License, Version 2.0 (the "License");
5
+ * you may not use this file except in compliance with the License.
6
+ * You may obtain a copy of the License at
7
+ *
8
+ * http://www.apache.org/licenses/LICENSE-2.0
9
+ *
10
+ * Unless required by applicable law or agreed to in writing, software
11
+ * distributed under the License is distributed on an "AS IS" BASIS,
12
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ * See the License for the specific language governing permissions and
14
+ * limitations under the License.
15
+ */
16
+
17
+ import { observer } from 'mobx-react-lite';
18
+ import { AlertType, FormAlert, FormCodeEditor } from '@finos/legend-data-cube';
19
+ import { CODE_EDITOR_LANGUAGE } from '@finos/legend-code-editor';
20
+ import type { LocalFileDataCubePartialSourceLoaderState } from '../../../../stores/builder/source/loader/LocalFileDataCubePartialSourceLoaderState.js';
21
+
22
+ export const LocalFileDataCubePartialSourceLoader = observer(
23
+ (props: {
24
+ partialSourceLoader: LocalFileDataCubePartialSourceLoaderState;
25
+ }) => {
26
+ const { partialSourceLoader } = props;
27
+
28
+ return (
29
+ <div className="h-full w-full">
30
+ <FormAlert
31
+ message="Local file support is experimental"
32
+ type={AlertType.WARNING}
33
+ text={`Currently, support for local file comes with the following limitations:
34
+ - Only CSV files are supported, but not all variants of CSV files are supported (required header row, comma delimiter, single escape quote).
35
+ - Data from uploaded file will not be stored nor shared.`}
36
+ />
37
+ <div className="mt-2 flex h-6 w-full items-center text-neutral-500">
38
+ <input
39
+ type="file"
40
+ onChange={(event) => {
41
+ partialSourceLoader.processFile(event.target.files?.[0]);
42
+ }}
43
+ className="w-full"
44
+ />
45
+ </div>
46
+ {partialSourceLoader.previewText !== undefined && (
47
+ <div className="mt-2 h-40">
48
+ <FormCodeEditor
49
+ value={partialSourceLoader.previewText}
50
+ language={CODE_EDITOR_LANGUAGE.TEXT}
51
+ isReadOnly={true}
52
+ hidePadding={true}
53
+ title="Data Preview"
54
+ />
55
+ </div>
56
+ )}
57
+ </div>
58
+ );
59
+ },
60
+ );
@@ -23,8 +23,9 @@ import type { LegendDataCubePluginManager } from '../application/LegendDataCubeP
23
23
  import { DepotServerClient } from '@finos/legend-server-depot';
24
24
  import type { LegendDataCubeApplicationConfig } from '../application/LegendDataCubeApplicationConfig.js';
25
25
  import {
26
- V1_EngineServerClient,
26
+ type V1_EngineServerClient,
27
27
  V1_PureGraphManager,
28
+ V1_RemoteEngine,
28
29
  } from '@finos/legend-graph';
29
30
  import {
30
31
  ActionState,
@@ -61,6 +62,7 @@ export class LegendDataCubeBaseStore {
61
62
  readonly pluginManager: LegendDataCubePluginManager;
62
63
  readonly depotServerClient: DepotServerClient;
63
64
  readonly graphManager: V1_PureGraphManager;
65
+ readonly remoteEngine: V1_RemoteEngine;
64
66
  readonly engineServerClient: V1_EngineServerClient;
65
67
 
66
68
  readonly engine: LegendDataCubeDataCubeEngine;
@@ -116,11 +118,15 @@ export class LegendDataCubeBaseStore {
116
118
  } satisfies DataCubeSetting<string>,
117
119
  ];
118
120
 
119
- this.engineServerClient = new V1_EngineServerClient({
120
- baseUrl: this.getEngineServerBaseUrlSettingValue(),
121
- queryBaseUrl: this.application.config.engineQueryServerUrl,
122
- enableCompression: this.getEngineEnableCompressionSettingValue(),
123
- });
121
+ this.remoteEngine = new V1_RemoteEngine(
122
+ {
123
+ baseUrl: this.getEngineServerBaseUrlSettingValue(),
124
+ queryBaseUrl: this.application.config.engineQueryServerUrl,
125
+ enableCompression: this.getEngineEnableCompressionSettingValue(),
126
+ },
127
+ application.logService,
128
+ );
129
+ this.engineServerClient = this.remoteEngine.getEngineServerClient();
124
130
  this.engineServerClient.setTracerService(application.tracerService);
125
131
 
126
132
  this.engine = new LegendDataCubeDataCubeEngine(
@@ -200,6 +206,7 @@ export class LegendDataCubeBaseStore {
200
206
  },
201
207
  },
202
208
  {
209
+ engine: this.remoteEngine,
203
210
  tracerService: this.application.tracerService,
204
211
  },
205
212
  );
@@ -82,6 +82,8 @@ import {
82
82
  V1_deserializePureModelContext,
83
83
  type V1_ConcreteFunctionDefinition,
84
84
  V1_deserializeValueSpecification,
85
+ LET_TOKEN,
86
+ V1_AppliedFunction,
85
87
  } from '@finos/legend-graph';
86
88
  import {
87
89
  _elementPtr,
@@ -101,6 +103,8 @@ import {
101
103
  DataCubeExecutionError,
102
104
  RawUserDefinedFunctionDataCubeSource,
103
105
  ADHOC_FUNCTION_DATA_CUBE_SOURCE_TYPE,
106
+ UserDefinedFunctionDataCubeSource,
107
+ DataCubeQueryFilterOperator,
104
108
  } from '@finos/legend-data-cube';
105
109
  import {
106
110
  isNonNullable,
@@ -199,12 +203,87 @@ export class LegendDataCubeDataCubeEngine extends DataCubeEngine {
199
203
  RawLocalFileQueryDataCubeSource.serialization.fromJson(value);
200
204
  const source = new LocalFileDataCubeSource();
201
205
  source.fileName = rawSource.fileName;
202
- source.count = rawSource.count;
203
- source.db = rawSource.db;
204
- source.model = rawSource.model;
205
- source.runtime = rawSource.runtime;
206
- source.schema = rawSource.schema;
207
- source.table = rawSource.table;
206
+ source.fileFormat = rawSource.fileFormat;
207
+
208
+ const tableCatalog = this._duckDBEngine.retrieveCatalogTable(
209
+ rawSource._ref,
210
+ );
211
+
212
+ const { model, database, schema, table, runtime } =
213
+ this._synthesizeMinimalModelContext({
214
+ schemaName: tableCatalog.schemaName,
215
+ tableName: tableCatalog.tableName,
216
+ tableColumns: tableCatalog.columns.map((col) => {
217
+ const column = new V1_Column();
218
+ column.name = col[0] as string;
219
+ // TODO: confirm this is in accordance to engine
220
+ // check if we have a duckdb enum mapping
221
+ // See https://duckdb.org/docs/sql/data_types/overview.html
222
+ switch (col[1] as string) {
223
+ case 'BIT': {
224
+ column.type = new V1_Bit();
225
+ break;
226
+ }
227
+ case 'BOOLEAN': {
228
+ // TODO: understand why boolean is not present in relationalDataType
229
+ column.type = new V1_VarChar();
230
+ break;
231
+ }
232
+ case 'DATE': {
233
+ column.type = new V1_Date();
234
+ break;
235
+ }
236
+ case 'DECIMAL': {
237
+ column.type = new V1_Decimal();
238
+ break;
239
+ }
240
+ case 'DOUBLE': {
241
+ column.type = new V1_Double();
242
+ break;
243
+ }
244
+ case 'FLOAT': {
245
+ column.type = new V1_Float();
246
+ break;
247
+ }
248
+ case 'INTEGER': {
249
+ column.type = new V1_Integer();
250
+ break;
251
+ }
252
+ case 'TININT': {
253
+ column.type = new V1_TinyInt();
254
+ break;
255
+ }
256
+ case 'SMALLINT': {
257
+ column.type = new V1_SmallInt();
258
+ break;
259
+ }
260
+ case 'BIGINT': {
261
+ column.type = new V1_BigInt();
262
+ break;
263
+ }
264
+ case 'TIMESTAMP': {
265
+ column.type = new V1_Timestamp();
266
+ break;
267
+ }
268
+ case 'VARCHAR': {
269
+ column.type = new V1_VarChar();
270
+ break;
271
+ }
272
+ default: {
273
+ throw new UnsupportedOperationError(
274
+ `Can't ingest local file data: failed to find matching relational data type for DuckDB type '${col[1]}' when synthesizing table definition`,
275
+ );
276
+ }
277
+ }
278
+ return column;
279
+ }),
280
+ });
281
+
282
+ source.db = database.path;
283
+ source.model = model;
284
+ source.table = table.name;
285
+ source.schema = schema.name;
286
+ source.runtime = runtime.path;
208
287
 
209
288
  const query = new V1_ClassInstance();
210
289
  query.type = V1_ClassInstanceType.RELATION_STORE_ACCESSOR;
@@ -244,7 +323,8 @@ export class LegendDataCubeDataCubeEngine extends DataCubeEngine {
244
323
  );
245
324
  }
246
325
 
247
- const source = new AdhocQueryDataCubeSource();
326
+ const source = new UserDefinedFunctionDataCubeSource();
327
+ source.functionPath = rawSource.functionPath;
248
328
  source.runtime = rawSource.runtime;
249
329
  source.model = rawSource.model;
250
330
  if (deserializedModel.sdlcInfo instanceof V1_LegendSDLC) {
@@ -327,32 +407,6 @@ export class LegendDataCubeDataCubeEngine extends DataCubeEngine {
327
407
  ),
328
408
  );
329
409
  source.query = at(source.lambda.body, 0);
330
- // use the default parameter values from the query
331
- //
332
- // TODO?: we should probably allow configuring the parameters?
333
- // this would mean we need to create first-class support for parameters in DataCube component
334
- const parameterValues = await Promise.all(
335
- source.lambda.parameters.map(async (parameter) => {
336
- if (parameter.genericType?.rawType instanceof V1_PackageableType) {
337
- const paramValue = new V1_ParameterValue();
338
- paramValue.name = parameter.name;
339
- const type = parameter.genericType.rawType.fullPath;
340
- const defaultValue = queryInfo.defaultParameterValues?.find(
341
- (val) => val.name === parameter.name,
342
- )?.content;
343
- paramValue.value =
344
- defaultValue !== undefined
345
- ? await this.parseValueSpecification(defaultValue)
346
- : {
347
- _type: V1_deserializeRawValueSpecificationType(type),
348
- value: _defaultPrimitiveTypeValue(type),
349
- };
350
- return paramValue;
351
- }
352
- return undefined;
353
- }),
354
- );
355
- source.parameterValues = parameterValues.filter(isNonNullable);
356
410
  try {
357
411
  source.columns = (
358
412
  await this._getLambdaRelationType(
@@ -366,6 +420,59 @@ export class LegendDataCubeDataCubeEngine extends DataCubeEngine {
366
420
  `Can't get query result columns. Make sure the saved query return a relation (i.e. typed TDS). Error: ${error.message}`,
367
421
  );
368
422
  }
423
+ // To handle parameter value with function calls we
424
+ // 1. Separate the parameters with function calls from regular parameters
425
+ // 2. Add let statements for function parameter values and store them in the source's letParameterValueSpec
426
+ // 3. Prepend the let statements to the lambda body when we execute the query
427
+ const letFuncs: V1_ValueSpecification[] = [];
428
+ const parameterValues = (
429
+ await Promise.all(
430
+ source.lambda.parameters.map(async (parameter) => {
431
+ if (
432
+ parameter.genericType?.rawType instanceof V1_PackageableType
433
+ ) {
434
+ const type = parameter.genericType.rawType.fullPath;
435
+ const defaultValueString =
436
+ queryInfo.defaultParameterValues?.find(
437
+ (val) => val.name === parameter.name,
438
+ )?.content;
439
+ const defaultValueSpec =
440
+ defaultValueString !== undefined
441
+ ? await this.parseValueSpecification(defaultValueString)
442
+ : {
443
+ _type: V1_deserializeRawValueSpecificationType(type),
444
+ value: _defaultPrimitiveTypeValue(type),
445
+ };
446
+ if (defaultValueSpec instanceof V1_AppliedFunction) {
447
+ const letFunc = guaranteeType(
448
+ this.deserializeValueSpecification(
449
+ await this._engineServerClient.grammarToJSON_lambda(
450
+ `${LET_TOKEN} ${parameter.name} ${DataCubeQueryFilterOperator.EQUAL} ${defaultValueString}`,
451
+ '',
452
+ undefined,
453
+ undefined,
454
+ false,
455
+ ),
456
+ ),
457
+ V1_Lambda,
458
+ );
459
+ letFuncs.push(...letFunc.body);
460
+ } else {
461
+ const paramValue = new V1_ParameterValue();
462
+ paramValue.name = parameter.name;
463
+ paramValue.value = defaultValueSpec;
464
+ return paramValue;
465
+ }
466
+ }
467
+ return undefined;
468
+ }),
469
+ )
470
+ ).filter(isNonNullable);
471
+ source.letParameterValueSpec = letFuncs;
472
+ source.parameterValues = parameterValues;
473
+ source.lambda.parameters = source.lambda.parameters.filter((param) =>
474
+ parameterValues.find((p) => p.name === param.name),
475
+ );
369
476
  return source;
370
477
  }
371
478
  default:
@@ -434,6 +541,13 @@ export class LegendDataCubeDataCubeEngine extends DataCubeEngine {
434
541
  model: source.model,
435
542
  })
436
543
  ).completions as CompletionItem[];
544
+ } else if (source instanceof UserDefinedFunctionDataCubeSource) {
545
+ return (
546
+ await this._engineServerClient.completeCode({
547
+ codeBlock,
548
+ model: source.model,
549
+ })
550
+ ).completions as CompletionItem[];
437
551
  } else if (source instanceof LegendQueryDataCubeSource) {
438
552
  return (
439
553
  await this._engineServerClient.completeCode({
@@ -524,8 +638,11 @@ export class LegendDataCubeDataCubeEngine extends DataCubeEngine {
524
638
  try {
525
639
  if (source instanceof AdhocQueryDataCubeSource) {
526
640
  result = await this._runQuery(query, source.model, undefined, options);
641
+ } else if (source instanceof UserDefinedFunctionDataCubeSource) {
642
+ result = await this._runQuery(query, source.model, undefined, options);
527
643
  } else if (source instanceof LegendQueryDataCubeSource) {
528
644
  query.parameters = source.lambda.parameters;
645
+ query.body = [...source.letParameterValueSpec, ...query.body];
529
646
  result = await this._runQuery(
530
647
  query,
531
648
  source.model,
@@ -631,6 +748,11 @@ export class LegendDataCubeDataCubeEngine extends DataCubeEngine {
631
748
  DataCubeFunction.FROM,
632
749
  [_elementPtr(source.runtime)].filter(isNonNullable),
633
750
  );
751
+ } else if (source instanceof UserDefinedFunctionDataCubeSource) {
752
+ return _function(
753
+ DataCubeFunction.FROM,
754
+ [_elementPtr(source.runtime)].filter(isNonNullable),
755
+ );
634
756
  } else if (source instanceof LegendQueryDataCubeSource) {
635
757
  return _function(
636
758
  DataCubeFunction.FROM,
@@ -748,6 +870,8 @@ export class LegendDataCubeDataCubeEngine extends DataCubeEngine {
748
870
  ) {
749
871
  if (source instanceof AdhocQueryDataCubeSource) {
750
872
  return this._getLambdaRelationType(query, source.model);
873
+ } else if (source instanceof UserDefinedFunctionDataCubeSource) {
874
+ return this._getLambdaRelationType(query, source.model);
751
875
  } else if (source instanceof LegendQueryDataCubeSource) {
752
876
  return this._getLambdaRelationType(query, source.model);
753
877
  } else if (source instanceof CachedDataCubeSource) {
@@ -853,93 +977,10 @@ export class LegendDataCubeDataCubeEngine extends DataCubeEngine {
853
977
  }
854
978
  }
855
979
 
856
- async ingestLocalFileData(
857
- data: string,
858
- format: string,
859
- ): Promise<DataCubeSource | undefined> {
860
- const {
861
- schema: schemaName,
862
- table: tableName,
863
- tableSpec,
864
- } = await this._duckDBEngine.ingestLocalFileData(data, format);
865
-
866
- const { model, database, schema, table, runtime } =
867
- this._synthesizeMinimalModelContext({
868
- schemaName,
869
- tableName,
870
- tableColumns: tableSpec.map((col) => {
871
- const column = new V1_Column();
872
- column.name = col[0] as string;
873
- // TODO: confirm this is in accordance to engine
874
- // check if we have a duckdb enum mapping
875
- // See https://duckdb.org/docs/sql/data_types/overview.html
876
- switch (col[1] as string) {
877
- case 'BIT': {
878
- column.type = new V1_Bit();
879
- break;
880
- }
881
- case 'BOOLEAN': {
882
- // TODO: understand why boolean is not present in relationalDataType
883
- column.type = new V1_VarChar();
884
- break;
885
- }
886
- case 'DATE': {
887
- column.type = new V1_Date();
888
- break;
889
- }
890
- case 'DECIMAL': {
891
- column.type = new V1_Decimal();
892
- break;
893
- }
894
- case 'DOUBLE': {
895
- column.type = new V1_Double();
896
- break;
897
- }
898
- case 'FLOAT': {
899
- column.type = new V1_Float();
900
- break;
901
- }
902
- case 'INTEGER': {
903
- column.type = new V1_Integer();
904
- break;
905
- }
906
- case 'TININT': {
907
- column.type = new V1_TinyInt();
908
- break;
909
- }
910
- case 'SMALLINT': {
911
- column.type = new V1_SmallInt();
912
- break;
913
- }
914
- case 'BIGINT': {
915
- column.type = new V1_BigInt();
916
- break;
917
- }
918
- case 'TIMESTAMP': {
919
- column.type = new V1_Timestamp();
920
- break;
921
- }
922
- case 'VARCHAR': {
923
- column.type = new V1_VarChar();
924
- break;
925
- }
926
- default: {
927
- throw new UnsupportedOperationError(
928
- `Can't ingest local file data: failed to find matching relational data type for DuckDB type '${col[1]}' when synthesizing table definition`,
929
- );
930
- }
931
- }
932
- return column;
933
- }),
934
- });
935
-
936
- const source = new LocalFileDataCubeSource();
937
- source.model = model;
938
- source.runtime = runtime.path;
939
- source.db = database.path;
940
- source.schema = schema.name;
941
- source.table = table.name;
942
- return source;
980
+ async ingestLocalFileData(data: string, format: string, refId?: string) {
981
+ const { dbReference, columnNames } =
982
+ await this._duckDBEngine.ingestLocalFileData(data, format, refId);
983
+ return { dbReference, columnNames };
943
984
  }
944
985
 
945
986
  private _synthesizeMinimalModelContext(data: {
@@ -29,7 +29,9 @@ import {
29
29
  assertNonNullable,
30
30
  csvStringify,
31
31
  guaranteeNonNullable,
32
+ isNullable,
32
33
  UnsupportedOperationError,
34
+ uuid,
33
35
  } from '@finos/legend-shared';
34
36
  import type { CachedDataCubeSource } from '@finos/legend-data-cube';
35
37
  import { Type } from 'apache-arrow';
@@ -48,6 +50,7 @@ export class LegendDataCubeDuckDBEngine {
48
50
  // Options for creating csv using papa parser: https://www.papaparse.com/docs#config
49
51
  private static readonly ESCAPE_CHAR = `'`;
50
52
  private static readonly QUOTE_CHAR = `'`;
53
+ private _catalog: Map<string, DuckDBCatalogTable> = new Map();
51
54
 
52
55
  private _database?: duckdb.AsyncDuckDB | undefined;
53
56
 
@@ -58,6 +61,13 @@ export class LegendDataCubeDuckDBEngine {
58
61
  );
59
62
  }
60
63
 
64
+ retrieveCatalogTable(ref: string) {
65
+ return guaranteeNonNullable(
66
+ this._catalog.get(ref),
67
+ `Can't find reference ${ref}`,
68
+ );
69
+ }
70
+
61
71
  async initialize() {
62
72
  // Initialize DuckDB with WASM
63
73
  // See: https://duckdb.org/docs/api/wasm/instantiation.html
@@ -99,35 +109,90 @@ export class LegendDataCubeDuckDBEngine {
99
109
  const table = `${LegendDataCubeDuckDBEngine.CACHE_TABLE_NAME_PREFIX}${LegendDataCubeDuckDBEngine.cacheTableCounter}`;
100
110
  const csvFileName = `${LegendDataCubeDuckDBEngine.CACHE_FILE_NAME}${LegendDataCubeDuckDBEngine.cacheTableCounter}.csv`;
101
111
 
112
+ const columns: string[] = [];
102
113
  const columnNames: string[] = [];
103
- result.builder.columns.forEach((col) => columnNames.push(col.name));
114
+ result.builder.columns.forEach((col) => {
115
+ let colType: string;
116
+ switch (col.type as string) {
117
+ case PRIMITIVE_TYPE.BINARY: {
118
+ colType = 'BIT';
119
+ break;
120
+ }
121
+ case PRIMITIVE_TYPE.BOOLEAN: {
122
+ colType = 'BOOLEAN';
123
+ break;
124
+ }
125
+ case PRIMITIVE_TYPE.NUMBER: {
126
+ colType = 'DOUBLE';
127
+ break;
128
+ }
129
+ case PRIMITIVE_TYPE.INTEGER: {
130
+ colType = 'INTEGER';
131
+ break;
132
+ }
133
+ // TODO: we need precision and scale
134
+ case PRIMITIVE_TYPE.DECIMAL: {
135
+ colType = 'DECIMAL';
136
+ break;
137
+ }
138
+ case PRIMITIVE_TYPE.FLOAT: {
139
+ colType = 'FLOAT';
140
+ break;
141
+ }
142
+ case PRIMITIVE_TYPE.STRICTDATE:
143
+ case PRIMITIVE_TYPE.DATE: {
144
+ colType = 'DATE';
145
+ break;
146
+ }
147
+ case PRIMITIVE_TYPE.DATETIME: {
148
+ colType = 'TIMESTAMP';
149
+ break;
150
+ }
151
+ case PRIMITIVE_TYPE.STRING: {
152
+ colType = 'VARCHAR';
153
+ break;
154
+ }
155
+ default: {
156
+ throw new UnsupportedOperationError(
157
+ `Can't initialize cache: failed to find matching DuckDB type for Pure type '${col.type}'`,
158
+ );
159
+ }
160
+ }
161
+ columns.push(`"${col.name}" ${colType}`);
162
+ columnNames.push(col.name);
163
+ });
104
164
 
105
165
  const data = result.result.rows.map((row) => row.values);
106
166
 
107
- const csvContent = csvStringify([columnNames, ...data], {
108
- escapeChar: LegendDataCubeDuckDBEngine.ESCAPE_CHAR,
109
- quoteChar: LegendDataCubeDuckDBEngine.QUOTE_CHAR,
110
- });
167
+ const csvContent = csvStringify([columnNames, ...data]);
111
168
  await this.database.registerFileText(csvFileName, csvContent);
112
169
 
113
170
  const connection = await this.database.connect();
171
+
172
+ // we create our own table schema becuase of date type conversions from arrow to duckDB data types
173
+ const CREATE_TABLE_SQL = `CREATE TABLE ${schema}.${table} (${columns.join(',')})`;
174
+ await connection.query(CREATE_TABLE_SQL);
175
+
114
176
  await connection.insertCSVFromPath(csvFileName, {
115
177
  schema: schema,
116
178
  name: table,
117
- create: true,
118
- header: true,
179
+ create: false,
180
+ header: true, // we add header and get it to autodetect otherwise we would have to provide column details with arrow datatypes
119
181
  detect: true,
120
- dateFormat: 'YYYY-MM-DD',
121
- timestampFormat: 'YYYY-MM-DD', // make sure Date is not auto-converted to timestamp
122
- escape: LegendDataCubeDuckDBEngine.ESCAPE_CHAR,
123
- quote: LegendDataCubeDuckDBEngine.QUOTE_CHAR,
124
182
  });
125
183
  await connection.close();
126
184
 
127
185
  return { schema, table, rowCount: result.result.rows.length };
128
186
  }
129
187
 
130
- async ingestLocalFileData(data: string, format: string) {
188
+ async ingestLocalFileData(data: string, format: string, refId?: string) {
189
+ if (!isNullable(refId) && this._catalog.has(refId)) {
190
+ const dbDetails = guaranteeNonNullable(this._catalog.get(refId));
191
+ return {
192
+ dbReference: refId,
193
+ columnNames: dbDetails.columns.map((col) => col[0] as string),
194
+ };
195
+ }
131
196
  const schema = LegendDataCubeDuckDBEngine.DUCKDB_DEFAULT_SCHEMA_NAME;
132
197
  LegendDataCubeDuckDBEngine.ingestFileTableCounter += 1;
133
198
  const table = `${LegendDataCubeDuckDBEngine.INGEST_TABLE_NAME_PREFIX}${LegendDataCubeDuckDBEngine.ingestFileTableCounter}`;
@@ -145,7 +210,7 @@ export class LegendDataCubeDuckDBEngine {
145
210
  header: true,
146
211
  detect: true,
147
212
  dateFormat: 'YYYY-MM-DD',
148
- timestampFormat: 'YYYY-MM-DD', // make sure Date is not auto-converted to timestamp
213
+ timestampFormat: 'YYYY-MM-DD hh:mm:ss[.zzzzzzzzz][+-TT[:tt]]', // make sure Date is not auto-converted to timestamp
149
214
  escape: LegendDataCubeDuckDBEngine.ESCAPE_CHAR,
150
215
  quote: LegendDataCubeDuckDBEngine.QUOTE_CHAR,
151
216
  });
@@ -161,12 +226,22 @@ export class LegendDataCubeDuckDBEngine {
161
226
  const tableSpec = (await connection.query(`DESCRIBE ${schema}.${table}`))
162
227
  .toArray()
163
228
  .map((spec) => [
164
- spec[LegendDataCubeDuckDBEngine.COLUMN_NAME],
165
- spec[LegendDataCubeDuckDBEngine.COLUMN_TYPE],
229
+ spec[LegendDataCubeDuckDBEngine.COLUMN_NAME] as string,
230
+ spec[LegendDataCubeDuckDBEngine.COLUMN_TYPE] as string,
166
231
  ]);
167
232
  await connection.close();
168
233
 
169
- return { schema, table, tableSpec };
234
+ const ref = isNullable(refId) ? uuid() : refId;
235
+ this._catalog.set(ref, {
236
+ schemaName: schema,
237
+ tableName: table,
238
+ columns: tableSpec,
239
+ } satisfies DuckDBCatalogTable);
240
+
241
+ return {
242
+ dbReference: ref,
243
+ columnNames: tableSpec.map((spec) => spec[0] as string),
244
+ };
170
245
  }
171
246
 
172
247
  async runSQLQuery(sql: string) {
@@ -210,13 +285,16 @@ export class LegendDataCubeDuckDBEngine {
210
285
  col.type = PRIMITIVE_TYPE.BOOLEAN;
211
286
  break;
212
287
  }
213
- case Type.Timestamp:
214
288
  case Type.Date:
215
289
  case Type.DateDay:
216
290
  case Type.DateMillisecond: {
217
291
  col.type = PRIMITIVE_TYPE.DATE;
218
292
  break;
219
293
  }
294
+ case Type.Timestamp: {
295
+ col.type = PRIMITIVE_TYPE.DATETIME;
296
+ break;
297
+ }
220
298
  case Type.Utf8:
221
299
  case Type.LargeUtf8: {
222
300
  col.type = PRIMITIVE_TYPE.STRING;
@@ -269,3 +347,9 @@ export class LegendDataCubeDuckDBEngine {
269
347
  await this._database?.terminate();
270
348
  }
271
349
  }
350
+
351
+ type DuckDBCatalogTable = {
352
+ schemaName: string;
353
+ tableName: string;
354
+ columns: string[][];
355
+ };