datajunction-ui 0.0.26 → 0.0.27
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +2 -2
- package/src/app/components/Search.jsx +41 -33
- package/src/app/components/__tests__/Search.test.jsx +46 -11
- package/src/app/index.tsx +3 -3
- package/src/app/pages/AddEditNodePage/MetricQueryField.jsx +57 -8
- package/src/app/pages/AddEditNodePage/UpstreamNodeField.jsx +17 -5
- package/src/app/pages/AddEditNodePage/__tests__/index.test.jsx +97 -1
- package/src/app/pages/AddEditNodePage/index.jsx +61 -17
- package/src/app/pages/NodePage/WatchNodeButton.jsx +12 -5
- package/src/app/pages/QueryPlannerPage/MetricFlowGraph.jsx +93 -15
- package/src/app/pages/QueryPlannerPage/PreAggDetailsPanel.jsx +2320 -65
- package/src/app/pages/QueryPlannerPage/SelectionPanel.jsx +234 -25
- package/src/app/pages/QueryPlannerPage/__tests__/MetricFlowGraph.test.jsx +315 -122
- package/src/app/pages/QueryPlannerPage/__tests__/PreAggDetailsPanel.test.jsx +2672 -314
- package/src/app/pages/QueryPlannerPage/__tests__/SelectionPanel.test.jsx +567 -0
- package/src/app/pages/QueryPlannerPage/__tests__/index.test.jsx +480 -55
- package/src/app/pages/QueryPlannerPage/index.jsx +1021 -14
- package/src/app/pages/QueryPlannerPage/styles.css +1990 -62
- package/src/app/pages/Root/__tests__/index.test.jsx +79 -8
- package/src/app/pages/Root/index.tsx +1 -6
- package/src/app/pages/SQLBuilderPage/__tests__/index.test.jsx +82 -0
- package/src/app/pages/SettingsPage/__tests__/CreateServiceAccountModal.test.jsx +37 -0
- package/src/app/pages/SettingsPage/__tests__/ServiceAccountsSection.test.jsx +48 -0
- package/src/app/pages/SettingsPage/__tests__/index.test.jsx +169 -1
- package/src/app/services/DJService.js +492 -3
- package/src/app/services/__tests__/DJService.test.jsx +582 -0
- package/src/mocks/mockNodes.jsx +36 -0
- package/webpack.config.js +27 -0
|
@@ -1,6 +1,14 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import {
|
|
2
|
+
useContext,
|
|
3
|
+
useEffect,
|
|
4
|
+
useState,
|
|
5
|
+
useCallback,
|
|
6
|
+
useRef,
|
|
7
|
+
lazy,
|
|
8
|
+
Suspense,
|
|
9
|
+
} from 'react';
|
|
10
|
+
import { useLocation, useNavigate } from 'react-router-dom';
|
|
2
11
|
import DJClientContext from '../../providers/djclient';
|
|
3
|
-
import MetricFlowGraph from './MetricFlowGraph';
|
|
4
12
|
import SelectionPanel from './SelectionPanel';
|
|
5
13
|
import {
|
|
6
14
|
PreAggDetailsPanel,
|
|
@@ -9,16 +17,40 @@ import {
|
|
|
9
17
|
} from './PreAggDetailsPanel';
|
|
10
18
|
import './styles.css';
|
|
11
19
|
|
|
20
|
+
// Lazy load the graph component - ReactFlow and dagre are heavy (~500KB)
|
|
21
|
+
// This allows API calls to start immediately while the graph loads in parallel
|
|
22
|
+
const MetricFlowGraph = lazy(() => import('./MetricFlowGraph'));
|
|
23
|
+
|
|
24
|
+
/**
|
|
25
|
+
* Helper to normalize grain columns to short names for comparison
|
|
26
|
+
* "default.date_dim.date_id" -> "date_id"
|
|
27
|
+
*/
|
|
28
|
+
function normalizeGrain(grainCols) {
|
|
29
|
+
return (grainCols || [])
|
|
30
|
+
.map(col => col.split('.').pop())
|
|
31
|
+
.sort()
|
|
32
|
+
.join(',');
|
|
33
|
+
}
|
|
34
|
+
|
|
12
35
|
export function QueryPlannerPage() {
|
|
13
36
|
const djClient = useContext(DJClientContext).DataJunctionAPI;
|
|
37
|
+
const location = useLocation();
|
|
38
|
+
const navigate = useNavigate();
|
|
14
39
|
|
|
15
40
|
// Available options
|
|
16
41
|
const [metrics, setMetrics] = useState([]);
|
|
17
42
|
const [commonDimensions, setCommonDimensions] = useState([]);
|
|
43
|
+
const [cubes, setCubes] = useState([]);
|
|
18
44
|
|
|
19
|
-
// Selection state
|
|
45
|
+
// Selection state - initialized from URL params
|
|
20
46
|
const [selectedMetrics, setSelectedMetrics] = useState([]);
|
|
21
47
|
const [selectedDimensions, setSelectedDimensions] = useState([]);
|
|
48
|
+
const [loadedCubeName, setLoadedCubeName] = useState(null); // Track loaded cube preset
|
|
49
|
+
|
|
50
|
+
// Track if we've initialized from URL (to avoid overwriting URL on first render)
|
|
51
|
+
const initializedFromUrl = useRef(false);
|
|
52
|
+
const pendingDimensionsFromUrl = useRef([]);
|
|
53
|
+
const pendingCubeFromUrl = useRef(null);
|
|
22
54
|
|
|
23
55
|
// Results state
|
|
24
56
|
const [measuresResult, setMeasuresResult] = useState(null);
|
|
@@ -30,11 +62,133 @@ export function QueryPlannerPage() {
|
|
|
30
62
|
// Node selection for details panel
|
|
31
63
|
const [selectedNode, setSelectedNode] = useState(null);
|
|
32
64
|
|
|
33
|
-
//
|
|
65
|
+
// Materialization state - map of grain_key -> pre-agg info
|
|
66
|
+
const [plannedPreaggs, setPlannedPreaggs] = useState({});
|
|
67
|
+
|
|
68
|
+
// Materialization error state
|
|
69
|
+
const [materializationError, setMaterializationError] = useState(null);
|
|
70
|
+
|
|
71
|
+
// Workflow URLs from successful cube materialization
|
|
72
|
+
const [workflowUrls, setWorkflowUrls] = useState([]);
|
|
73
|
+
|
|
74
|
+
// Full cube materialization info (for edit/refresh/backfill)
|
|
75
|
+
const [cubeMaterialization, setCubeMaterialization] = useState(null);
|
|
76
|
+
|
|
77
|
+
// Initialize selection from URL params on mount
|
|
78
|
+
useEffect(() => {
|
|
79
|
+
const params = new URLSearchParams(location.search);
|
|
80
|
+
const urlMetrics = params.get('metrics')?.split(',').filter(Boolean) || [];
|
|
81
|
+
const urlDimensions =
|
|
82
|
+
params.get('dimensions')?.split(',').filter(Boolean) || [];
|
|
83
|
+
const urlCube = params.get('cube');
|
|
84
|
+
|
|
85
|
+
if (urlMetrics.length > 0) {
|
|
86
|
+
setSelectedMetrics(urlMetrics);
|
|
87
|
+
// Store dimensions to apply after commonDimensions are loaded
|
|
88
|
+
if (urlDimensions.length > 0) {
|
|
89
|
+
pendingDimensionsFromUrl.current = urlDimensions;
|
|
90
|
+
}
|
|
91
|
+
// Store cube name - will be set after cube data is loaded
|
|
92
|
+
if (urlCube) {
|
|
93
|
+
pendingCubeFromUrl.current = urlCube;
|
|
94
|
+
// Don't set loadedCubeName here - wait until cube data loads
|
|
95
|
+
}
|
|
96
|
+
initializedFromUrl.current = true;
|
|
97
|
+
} else if (urlCube) {
|
|
98
|
+
// Cube specified without metrics - will load cube on mount
|
|
99
|
+
pendingCubeFromUrl.current = urlCube;
|
|
100
|
+
initializedFromUrl.current = true;
|
|
101
|
+
}
|
|
102
|
+
}, []); // Only run on mount
|
|
103
|
+
|
|
104
|
+
// Update URL when selection changes
|
|
105
|
+
useEffect(() => {
|
|
106
|
+
// Skip the first render if we just initialized from URL
|
|
107
|
+
if (
|
|
108
|
+
!initializedFromUrl.current &&
|
|
109
|
+
selectedMetrics.length === 0 &&
|
|
110
|
+
!loadedCubeName
|
|
111
|
+
) {
|
|
112
|
+
return;
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
const params = new URLSearchParams();
|
|
116
|
+
if (loadedCubeName) {
|
|
117
|
+
// Cube is loaded - only include cube name (metrics/dims come from cube definition)
|
|
118
|
+
params.set('cube', loadedCubeName);
|
|
119
|
+
} else {
|
|
120
|
+
// No cube - include metrics and dimensions
|
|
121
|
+
if (selectedMetrics.length > 0) {
|
|
122
|
+
params.set('metrics', selectedMetrics.join(','));
|
|
123
|
+
}
|
|
124
|
+
if (selectedDimensions.length > 0) {
|
|
125
|
+
params.set('dimensions', selectedDimensions.join(','));
|
|
126
|
+
}
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
const newSearch = params.toString();
|
|
130
|
+
const currentSearch = location.search.replace(/^\?/, '');
|
|
131
|
+
|
|
132
|
+
// Only update if different (avoid unnecessary history entries)
|
|
133
|
+
if (newSearch !== currentSearch) {
|
|
134
|
+
navigate(
|
|
135
|
+
{
|
|
136
|
+
pathname: location.pathname,
|
|
137
|
+
search: newSearch ? `?${newSearch}` : '',
|
|
138
|
+
},
|
|
139
|
+
{ replace: true },
|
|
140
|
+
);
|
|
141
|
+
}
|
|
142
|
+
}, [
|
|
143
|
+
selectedMetrics,
|
|
144
|
+
selectedDimensions,
|
|
145
|
+
loadedCubeName,
|
|
146
|
+
location.pathname,
|
|
147
|
+
navigate,
|
|
148
|
+
]);
|
|
149
|
+
|
|
150
|
+
// Get metrics list and cube names on mount
|
|
151
|
+
// Uses GraphQL for lightweight cube listing with display names
|
|
34
152
|
useEffect(() => {
|
|
35
153
|
const fetchData = async () => {
|
|
36
|
-
const metricsList = await
|
|
154
|
+
const [metricsList, cubesList] = await Promise.all([
|
|
155
|
+
djClient.metrics(),
|
|
156
|
+
djClient.listCubesForPreset().catch(() => []),
|
|
157
|
+
]);
|
|
37
158
|
setMetrics(metricsList);
|
|
159
|
+
// cubesList returns [{name, display_name}] from GraphQL
|
|
160
|
+
setCubes(cubesList);
|
|
161
|
+
|
|
162
|
+
// If there's a pending cube from URL, load it now
|
|
163
|
+
if (pendingCubeFromUrl.current) {
|
|
164
|
+
const cubeName = pendingCubeFromUrl.current;
|
|
165
|
+
pendingCubeFromUrl.current = null; // Clear to prevent re-loading
|
|
166
|
+
try {
|
|
167
|
+
// Use lightweight GraphQL query - much faster than REST endpoint
|
|
168
|
+
const cubeData = await djClient.cubeForPlanner(cubeName);
|
|
169
|
+
// Validate cube data has expected fields
|
|
170
|
+
if (cubeData && Array.isArray(cubeData.cube_node_metrics)) {
|
|
171
|
+
const cubeMetrics = cubeData.cube_node_metrics || [];
|
|
172
|
+
const cubeDimensions = cubeData.cube_node_dimensions || [];
|
|
173
|
+
setLoadedCubeName(cubeName);
|
|
174
|
+
setSelectedMetrics(cubeMetrics);
|
|
175
|
+
pendingDimensionsFromUrl.current = cubeDimensions;
|
|
176
|
+
|
|
177
|
+
// Materialization info is included in the GraphQL response
|
|
178
|
+
const cubeMat = cubeData.cubeMaterialization;
|
|
179
|
+
if (cubeMat) {
|
|
180
|
+
setCubeMaterialization(cubeMat);
|
|
181
|
+
if (cubeMat.workflowUrls?.length > 0) {
|
|
182
|
+
setWorkflowUrls(cubeMat.workflowUrls);
|
|
183
|
+
}
|
|
184
|
+
}
|
|
185
|
+
} else {
|
|
186
|
+
console.error('Invalid cube data from URL:', cubeData);
|
|
187
|
+
}
|
|
188
|
+
} catch (err) {
|
|
189
|
+
console.error('Failed to load cube from URL:', err);
|
|
190
|
+
}
|
|
191
|
+
}
|
|
38
192
|
};
|
|
39
193
|
fetchData().catch(console.error);
|
|
40
194
|
}, [djClient]);
|
|
@@ -47,6 +201,18 @@ export function QueryPlannerPage() {
|
|
|
47
201
|
try {
|
|
48
202
|
const dims = await djClient.commonDimensions(selectedMetrics);
|
|
49
203
|
setCommonDimensions(dims);
|
|
204
|
+
|
|
205
|
+
// Apply pending dimensions from URL if we have them
|
|
206
|
+
if (pendingDimensionsFromUrl.current.length > 0) {
|
|
207
|
+
const validDimNames = dims.map(d => d.name);
|
|
208
|
+
const validPending = pendingDimensionsFromUrl.current.filter(d =>
|
|
209
|
+
validDimNames.includes(d),
|
|
210
|
+
);
|
|
211
|
+
if (validPending.length > 0) {
|
|
212
|
+
setSelectedDimensions(validPending);
|
|
213
|
+
}
|
|
214
|
+
pendingDimensionsFromUrl.current = []; // Clear after applying
|
|
215
|
+
}
|
|
50
216
|
} catch (err) {
|
|
51
217
|
console.error('Failed to fetch dimensions:', err);
|
|
52
218
|
setCommonDimensions([]);
|
|
@@ -100,14 +266,159 @@ export function QueryPlannerPage() {
|
|
|
100
266
|
fetchData().catch(console.error);
|
|
101
267
|
}, [djClient, selectedMetrics, selectedDimensions]);
|
|
102
268
|
|
|
269
|
+
// Fetch existing pre-aggregations for the grain groups
|
|
270
|
+
useEffect(() => {
|
|
271
|
+
const fetchExistingPreaggs = async () => {
|
|
272
|
+
if (!measuresResult?.grain_groups?.length) {
|
|
273
|
+
setPlannedPreaggs({});
|
|
274
|
+
return;
|
|
275
|
+
}
|
|
276
|
+
|
|
277
|
+
// Get unique node names from grain groups
|
|
278
|
+
try {
|
|
279
|
+
// For each grain group, ask server to find matching pre-aggs
|
|
280
|
+
const newPreaggs = {};
|
|
281
|
+
|
|
282
|
+
// Use requested_dimensions from the measures result - these are the fully qualified
|
|
283
|
+
// dimension references that pre-aggs are created with
|
|
284
|
+
const requestedDims = measuresResult.requested_dimensions || [];
|
|
285
|
+
const grainColsForLookup = requestedDims.join(',');
|
|
286
|
+
console.log(
|
|
287
|
+
'grainCols for pre-agg lookup (from requested_dimensions):',
|
|
288
|
+
grainColsForLookup,
|
|
289
|
+
);
|
|
290
|
+
|
|
291
|
+
await Promise.all(
|
|
292
|
+
measuresResult.grain_groups.map(async gg => {
|
|
293
|
+
const grainKey = `${gg.parent_name}|${normalizeGrain(gg.grain)}`;
|
|
294
|
+
|
|
295
|
+
// Extract measure names
|
|
296
|
+
const measureNames = (gg.components || [])
|
|
297
|
+
.map(c => c.name)
|
|
298
|
+
.filter(Boolean)
|
|
299
|
+
.join(',');
|
|
300
|
+
|
|
301
|
+
console.log(
|
|
302
|
+
`Looking for pre-agg: node=${gg.parent_name}, grain=${grainColsForLookup}, measures=${measureNames}`,
|
|
303
|
+
);
|
|
304
|
+
|
|
305
|
+
// First try exact match using requested_dimensions
|
|
306
|
+
let result = await djClient.listPreaggs({
|
|
307
|
+
node_name: gg.parent_name,
|
|
308
|
+
grain: grainColsForLookup,
|
|
309
|
+
grain_mode: 'exact',
|
|
310
|
+
measures: measureNames || undefined,
|
|
311
|
+
});
|
|
312
|
+
|
|
313
|
+
console.log(`Exact match result for ${gg.parent_name}:`, result);
|
|
314
|
+
|
|
315
|
+
let preaggs = result.items || result.pre_aggregations || [];
|
|
316
|
+
let match = preaggs[0];
|
|
317
|
+
|
|
318
|
+
// If no exact match, try superset (finer grain)
|
|
319
|
+
if (!match) {
|
|
320
|
+
result = await djClient.listPreaggs({
|
|
321
|
+
node_name: gg.parent_name,
|
|
322
|
+
grain: grainColsForLookup,
|
|
323
|
+
grain_mode: 'superset',
|
|
324
|
+
measures: measureNames || undefined,
|
|
325
|
+
});
|
|
326
|
+
console.log(
|
|
327
|
+
`Superset match result for ${gg.parent_name}:`,
|
|
328
|
+
result,
|
|
329
|
+
);
|
|
330
|
+
preaggs = result.items || result.pre_aggregations || [];
|
|
331
|
+
match = preaggs[0];
|
|
332
|
+
|
|
333
|
+
// Mark as compatible (not exact match)
|
|
334
|
+
if (match) {
|
|
335
|
+
match = { ...match, _isCompatible: true };
|
|
336
|
+
}
|
|
337
|
+
}
|
|
338
|
+
|
|
339
|
+
if (match) {
|
|
340
|
+
console.log(`Found pre-agg match for ${gg.parent_name}:`, match);
|
|
341
|
+
newPreaggs[grainKey] = match;
|
|
342
|
+
} else {
|
|
343
|
+
console.log(`No pre-agg match found for ${gg.parent_name}`);
|
|
344
|
+
}
|
|
345
|
+
}),
|
|
346
|
+
);
|
|
347
|
+
|
|
348
|
+
setPlannedPreaggs(newPreaggs);
|
|
349
|
+
} catch (err) {
|
|
350
|
+
console.error('Failed to fetch existing pre-aggs:', err);
|
|
351
|
+
}
|
|
352
|
+
};
|
|
353
|
+
|
|
354
|
+
fetchExistingPreaggs();
|
|
355
|
+
}, [measuresResult, djClient]);
|
|
356
|
+
|
|
103
357
|
const handleMetricsChange = useCallback(newMetrics => {
|
|
104
358
|
setSelectedMetrics(newMetrics);
|
|
105
359
|
setSelectedNode(null);
|
|
360
|
+
// Clear cube state since user is manually changing selection
|
|
361
|
+
setLoadedCubeName(null);
|
|
362
|
+
setWorkflowUrls([]);
|
|
363
|
+
setCubeMaterialization(null);
|
|
364
|
+
}, []);
|
|
365
|
+
|
|
366
|
+
// Load a cube preset - sets both metrics and dimensions from the cube definition
|
|
367
|
+
const handleLoadCubePreset = useCallback(
|
|
368
|
+
async cubeName => {
|
|
369
|
+
if (!cubeName) return;
|
|
370
|
+
|
|
371
|
+
try {
|
|
372
|
+
// Use lightweight GraphQL query - much faster than REST endpoint
|
|
373
|
+
const cubeData = await djClient.cubeForPlanner(cubeName);
|
|
374
|
+
// Validate cube data has expected fields
|
|
375
|
+
if (cubeData && Array.isArray(cubeData.cube_node_metrics)) {
|
|
376
|
+
// Extract metrics and dimensions from the cube
|
|
377
|
+
const cubeMetrics = cubeData.cube_node_metrics || [];
|
|
378
|
+
const cubeDimensions = cubeData.cube_node_dimensions || [];
|
|
379
|
+
|
|
380
|
+
// Set the cube name for URL and display
|
|
381
|
+
setLoadedCubeName(cubeName);
|
|
382
|
+
// Set the metrics first - dimensions will be loaded and filtered via the effect
|
|
383
|
+
setSelectedMetrics(cubeMetrics);
|
|
384
|
+
// Store dimensions to apply after common dimensions are loaded
|
|
385
|
+
pendingDimensionsFromUrl.current = cubeDimensions;
|
|
386
|
+
setSelectedNode(null);
|
|
387
|
+
|
|
388
|
+
// Materialization info is included in the GraphQL response
|
|
389
|
+
const cubeMat = cubeData.cubeMaterialization;
|
|
390
|
+
if (cubeMat) {
|
|
391
|
+
setCubeMaterialization(cubeMat);
|
|
392
|
+
if (cubeMat.workflowUrls?.length > 0) {
|
|
393
|
+
setWorkflowUrls(cubeMat.workflowUrls);
|
|
394
|
+
}
|
|
395
|
+
}
|
|
396
|
+
} else {
|
|
397
|
+
console.error('Invalid cube data received:', cubeData);
|
|
398
|
+
}
|
|
399
|
+
} catch (err) {
|
|
400
|
+
console.error('Failed to load cube preset:', err);
|
|
401
|
+
}
|
|
402
|
+
},
|
|
403
|
+
[djClient],
|
|
404
|
+
);
|
|
405
|
+
|
|
406
|
+
// Clear cube preset when selection is manually cleared
|
|
407
|
+
const handleClearSelection = useCallback(() => {
|
|
408
|
+
setSelectedMetrics([]);
|
|
409
|
+
setSelectedDimensions([]);
|
|
410
|
+
setLoadedCubeName(null);
|
|
411
|
+
setWorkflowUrls([]);
|
|
412
|
+
setCubeMaterialization(null);
|
|
106
413
|
}, []);
|
|
107
414
|
|
|
108
415
|
const handleDimensionsChange = useCallback(newDimensions => {
|
|
109
416
|
setSelectedDimensions(newDimensions);
|
|
110
417
|
setSelectedNode(null);
|
|
418
|
+
// Clear cube state since user is manually changing selection
|
|
419
|
+
setLoadedCubeName(null);
|
|
420
|
+
setWorkflowUrls([]);
|
|
421
|
+
setCubeMaterialization(null);
|
|
111
422
|
}, []);
|
|
112
423
|
|
|
113
424
|
const handleNodeSelect = useCallback(node => {
|
|
@@ -118,13 +429,662 @@ export function QueryPlannerPage() {
|
|
|
118
429
|
setSelectedNode(null);
|
|
119
430
|
}, []);
|
|
120
431
|
|
|
432
|
+
// Handle planning/saving a new materialization configuration
|
|
433
|
+
// Note: This creates pre-aggs for ALL grain groups with the same settings
|
|
434
|
+
const handlePlanMaterialization = useCallback(
|
|
435
|
+
async (grainGroup, config) => {
|
|
436
|
+
setMaterializationError(null); // Clear any previous error
|
|
437
|
+
|
|
438
|
+
try {
|
|
439
|
+
// Step 1: Create the pre-agg records with config
|
|
440
|
+
const result = await djClient.planPreaggs(
|
|
441
|
+
selectedMetrics,
|
|
442
|
+
selectedDimensions,
|
|
443
|
+
config.strategy,
|
|
444
|
+
config.schedule,
|
|
445
|
+
config.lookbackWindow,
|
|
446
|
+
);
|
|
447
|
+
|
|
448
|
+
// Check for error in response
|
|
449
|
+
if (result._error || result.message || result.detail) {
|
|
450
|
+
const errorMsg =
|
|
451
|
+
result.message ||
|
|
452
|
+
result.detail ||
|
|
453
|
+
'Failed to plan pre-aggregations';
|
|
454
|
+
setMaterializationError(errorMsg);
|
|
455
|
+
throw new Error(errorMsg);
|
|
456
|
+
}
|
|
457
|
+
|
|
458
|
+
// Get the created pre-aggs (API returns `preaggs`, list endpoint returns `items`)
|
|
459
|
+
const preaggs =
|
|
460
|
+
result.preaggs || result.items || result.pre_aggregations || [];
|
|
461
|
+
|
|
462
|
+
// Update local state
|
|
463
|
+
const newPreaggs = { ...plannedPreaggs };
|
|
464
|
+
preaggs.forEach(preagg => {
|
|
465
|
+
const grainKey = `${preagg.node_name}|${normalizeGrain(
|
|
466
|
+
preagg.grain_columns,
|
|
467
|
+
)}`;
|
|
468
|
+
newPreaggs[grainKey] = preagg;
|
|
469
|
+
});
|
|
470
|
+
setPlannedPreaggs(newPreaggs);
|
|
471
|
+
|
|
472
|
+
// Step 2: Create scheduled workflows and optionally run backfills
|
|
473
|
+
if (preaggs.length > 0 && config.schedule) {
|
|
474
|
+
const workflowPromises = [];
|
|
475
|
+
const backfillPromises = [];
|
|
476
|
+
|
|
477
|
+
for (const preagg of preaggs) {
|
|
478
|
+
// Always create the scheduled workflow when a schedule is provided
|
|
479
|
+
workflowPromises.push(
|
|
480
|
+
djClient.materializePreagg(preagg.id).catch(err => {
|
|
481
|
+
console.error(
|
|
482
|
+
`Failed to create workflow for preagg ${preagg.id}:`,
|
|
483
|
+
err,
|
|
484
|
+
);
|
|
485
|
+
return null;
|
|
486
|
+
}),
|
|
487
|
+
);
|
|
488
|
+
}
|
|
489
|
+
|
|
490
|
+
// First: Wait for all workflows to be created
|
|
491
|
+
const workflowResults = await Promise.all(workflowPromises);
|
|
492
|
+
|
|
493
|
+
// Second: Only after workflows are created, start backfills
|
|
494
|
+
for (const preagg of preaggs) {
|
|
495
|
+
if (
|
|
496
|
+
config.runBackfill &&
|
|
497
|
+
config.backfillFrom &&
|
|
498
|
+
config.backfillTo
|
|
499
|
+
) {
|
|
500
|
+
backfillPromises.push(
|
|
501
|
+
djClient
|
|
502
|
+
.runPreaggBackfill(
|
|
503
|
+
preagg.id,
|
|
504
|
+
config.backfillFrom,
|
|
505
|
+
config.backfillTo,
|
|
506
|
+
)
|
|
507
|
+
.catch(err => {
|
|
508
|
+
console.error(
|
|
509
|
+
`Failed to run backfill for preagg ${preagg.id}:`,
|
|
510
|
+
err,
|
|
511
|
+
);
|
|
512
|
+
return null;
|
|
513
|
+
}),
|
|
514
|
+
);
|
|
515
|
+
}
|
|
516
|
+
}
|
|
517
|
+
|
|
518
|
+
// Wait for all backfills to complete
|
|
519
|
+
const backfillResults = await Promise.all(backfillPromises);
|
|
520
|
+
|
|
521
|
+
// Update state with workflow URLs
|
|
522
|
+
const updatedPreaggs = { ...newPreaggs };
|
|
523
|
+
preaggs.forEach((preagg, idx) => {
|
|
524
|
+
const grainKey = `${preagg.node_name}|${normalizeGrain(
|
|
525
|
+
preagg.grain_columns,
|
|
526
|
+
)}`;
|
|
527
|
+
const workflowResult = workflowResults[idx];
|
|
528
|
+
if (workflowResult?.workflow_urls?.length > 0) {
|
|
529
|
+
updatedPreaggs[grainKey] = {
|
|
530
|
+
...updatedPreaggs[grainKey],
|
|
531
|
+
workflow_urls: workflowResult.workflow_urls,
|
|
532
|
+
workflow_status: workflowResult.workflow_status || 'active',
|
|
533
|
+
};
|
|
534
|
+
}
|
|
535
|
+
});
|
|
536
|
+
setPlannedPreaggs(updatedPreaggs);
|
|
537
|
+
|
|
538
|
+
// Show toast with backfill info
|
|
539
|
+
const successfulBackfills = backfillResults.filter(r => r?.job_url);
|
|
540
|
+
if (successfulBackfills.length > 0) {
|
|
541
|
+
console.log('Backfills started:', successfulBackfills);
|
|
542
|
+
}
|
|
543
|
+
}
|
|
544
|
+
|
|
545
|
+
// Step 3: If Druid cube enabled, schedule cube materialization
|
|
546
|
+
console.log(
|
|
547
|
+
'Scheduling Druid cube materialization for:',
|
|
548
|
+
config.enableDruidCube,
|
|
549
|
+
);
|
|
550
|
+
if (config.enableDruidCube) {
|
|
551
|
+
// Use existing cube if loaded, otherwise use the new cube name from config
|
|
552
|
+
const cubeName = loadedCubeName || config.druidCubeName;
|
|
553
|
+
|
|
554
|
+
if (!cubeName) {
|
|
555
|
+
const errorMsg = 'Cube name is required for Druid materialization';
|
|
556
|
+
setMaterializationError(errorMsg);
|
|
557
|
+
throw new Error(errorMsg); // Throw so form doesn't close
|
|
558
|
+
}
|
|
559
|
+
|
|
560
|
+
// Continue with cube creation/materialization
|
|
561
|
+
console.log('Scheduling Druid cube materialization for:', cubeName);
|
|
562
|
+
|
|
563
|
+
// If no existing cube, create one first
|
|
564
|
+
if (!loadedCubeName && config.druidCubeName) {
|
|
565
|
+
const cubeResult = await djClient.createCube(
|
|
566
|
+
config.druidCubeName, // name
|
|
567
|
+
config.druidCubeName.split('.').pop(), // display_name (short name)
|
|
568
|
+
`Druid cube for ${selectedMetrics.join(', ')}`, // description
|
|
569
|
+
'published', // mode
|
|
570
|
+
selectedMetrics, // metrics
|
|
571
|
+
selectedDimensions, // dimensions
|
|
572
|
+
[], // filters
|
|
573
|
+
);
|
|
574
|
+
|
|
575
|
+
if (cubeResult.status >= 400) {
|
|
576
|
+
const errorMsg =
|
|
577
|
+
cubeResult.json?.message ||
|
|
578
|
+
cubeResult.json?.detail ||
|
|
579
|
+
'Failed to create cube';
|
|
580
|
+
// Check if it's a "cube already exists" error - that's okay
|
|
581
|
+
if (!errorMsg.toLowerCase().includes('already exists')) {
|
|
582
|
+
setMaterializationError(`Failed to create cube: ${errorMsg}`);
|
|
583
|
+
return result; // Don't proceed with materialization
|
|
584
|
+
}
|
|
585
|
+
console.log(
|
|
586
|
+
'Cube already exists, proceeding with materialization',
|
|
587
|
+
);
|
|
588
|
+
}
|
|
589
|
+
// Set the loaded cube name so the banner shows it
|
|
590
|
+
setLoadedCubeName(config.druidCubeName);
|
|
591
|
+
}
|
|
592
|
+
|
|
593
|
+
// Schedule cube materialization (waits on pre-aggs, ingests to Druid)
|
|
594
|
+
const cubeMaterializeResult = await djClient.materializeCubeV2(
|
|
595
|
+
cubeName,
|
|
596
|
+
config.schedule || '0 6 * * *',
|
|
597
|
+
config.strategy,
|
|
598
|
+
config.lookbackWindow,
|
|
599
|
+
config.runBackfill ?? true,
|
|
600
|
+
);
|
|
601
|
+
|
|
602
|
+
if (cubeMaterializeResult.status >= 400) {
|
|
603
|
+
const errorMsg =
|
|
604
|
+
cubeMaterializeResult.json?.message ||
|
|
605
|
+
cubeMaterializeResult.json?.detail ||
|
|
606
|
+
'Failed to schedule cube materialization';
|
|
607
|
+
console.error('Cube materialization failed:', errorMsg);
|
|
608
|
+
// Don't throw - pre-aggs were still created successfully
|
|
609
|
+
setMaterializationError(
|
|
610
|
+
`Pre-aggs created but cube materialization failed: ${errorMsg}`,
|
|
611
|
+
);
|
|
612
|
+
} else {
|
|
613
|
+
console.log(
|
|
614
|
+
'Cube materialization scheduled:',
|
|
615
|
+
cubeMaterializeResult.json,
|
|
616
|
+
);
|
|
617
|
+
console.log(
|
|
618
|
+
'Workflow URLs from response:',
|
|
619
|
+
cubeMaterializeResult.json?.workflow_urls,
|
|
620
|
+
);
|
|
621
|
+
// Store workflow URLs for display
|
|
622
|
+
const urls = cubeMaterializeResult.json?.workflow_urls || [];
|
|
623
|
+
console.log('Setting workflowUrls to:', urls);
|
|
624
|
+
setWorkflowUrls(urls);
|
|
625
|
+
|
|
626
|
+
// If backfill is enabled, also kick off cube backfill
|
|
627
|
+
if (
|
|
628
|
+
config.runBackfill &&
|
|
629
|
+
config.backfillFrom &&
|
|
630
|
+
config.backfillTo
|
|
631
|
+
) {
|
|
632
|
+
console.log(
|
|
633
|
+
'Running cube backfill from',
|
|
634
|
+
config.backfillFrom,
|
|
635
|
+
'to',
|
|
636
|
+
config.backfillTo,
|
|
637
|
+
);
|
|
638
|
+
try {
|
|
639
|
+
const cubeBackfillResult = await djClient.runCubeBackfill(
|
|
640
|
+
cubeName,
|
|
641
|
+
config.backfillFrom,
|
|
642
|
+
config.backfillTo,
|
|
643
|
+
);
|
|
644
|
+
if (cubeBackfillResult._error) {
|
|
645
|
+
console.error(
|
|
646
|
+
'Cube backfill failed:',
|
|
647
|
+
cubeBackfillResult.message,
|
|
648
|
+
);
|
|
649
|
+
// Don't throw - cube materialization was still created
|
|
650
|
+
} else {
|
|
651
|
+
console.log(
|
|
652
|
+
'Cube backfill started:',
|
|
653
|
+
cubeBackfillResult.job_url,
|
|
654
|
+
);
|
|
655
|
+
}
|
|
656
|
+
} catch (cubeBackfillErr) {
|
|
657
|
+
console.error('Failed to run cube backfill:', cubeBackfillErr);
|
|
658
|
+
// Don't throw - cube materialization was still created
|
|
659
|
+
}
|
|
660
|
+
}
|
|
661
|
+
}
|
|
662
|
+
}
|
|
663
|
+
|
|
664
|
+
return result;
|
|
665
|
+
} catch (err) {
|
|
666
|
+
console.error('Failed to plan materialization:', err);
|
|
667
|
+
const errorMsg = err.message || 'Failed to plan materialization';
|
|
668
|
+
setMaterializationError(errorMsg);
|
|
669
|
+
throw err;
|
|
670
|
+
}
|
|
671
|
+
},
|
|
672
|
+
[
|
|
673
|
+
djClient,
|
|
674
|
+
selectedMetrics,
|
|
675
|
+
selectedDimensions,
|
|
676
|
+
plannedPreaggs,
|
|
677
|
+
loadedCubeName,
|
|
678
|
+
],
|
|
679
|
+
);
|
|
680
|
+
|
|
681
|
+
// Handle updating config for a single existing pre-agg
|
|
682
|
+
const handleUpdateConfig = useCallback(
|
|
683
|
+
async (preaggId, config) => {
|
|
684
|
+
setMaterializationError(null);
|
|
685
|
+
try {
|
|
686
|
+
const result = await djClient.updatePreaggConfig(
|
|
687
|
+
preaggId,
|
|
688
|
+
config.strategy,
|
|
689
|
+
config.schedule,
|
|
690
|
+
config.lookbackWindow,
|
|
691
|
+
);
|
|
692
|
+
|
|
693
|
+
if (result._error || result.message || result.detail) {
|
|
694
|
+
const errorMsg =
|
|
695
|
+
result.message || result.detail || 'Failed to update config';
|
|
696
|
+
setMaterializationError(errorMsg);
|
|
697
|
+
throw new Error(errorMsg);
|
|
698
|
+
}
|
|
699
|
+
|
|
700
|
+
// Update the specific pre-agg in our state
|
|
701
|
+
setPlannedPreaggs(prev => {
|
|
702
|
+
const updated = { ...prev };
|
|
703
|
+
for (const key in updated) {
|
|
704
|
+
if (updated[key].id === preaggId) {
|
|
705
|
+
updated[key] = { ...updated[key], ...result };
|
|
706
|
+
break;
|
|
707
|
+
}
|
|
708
|
+
}
|
|
709
|
+
return updated;
|
|
710
|
+
});
|
|
711
|
+
|
|
712
|
+
return result;
|
|
713
|
+
} catch (err) {
|
|
714
|
+
console.error('Failed to update config:', err);
|
|
715
|
+
const errorMsg = err.message || 'Failed to update config';
|
|
716
|
+
setMaterializationError(errorMsg);
|
|
717
|
+
throw err;
|
|
718
|
+
}
|
|
719
|
+
},
|
|
720
|
+
[djClient],
|
|
721
|
+
);
|
|
722
|
+
|
|
723
|
+
// Handle creating/refreshing a scheduled workflow for a pre-agg
|
|
724
|
+
const handleCreateWorkflow = useCallback(
|
|
725
|
+
async preaggId => {
|
|
726
|
+
setMaterializationError(null);
|
|
727
|
+
try {
|
|
728
|
+
const result = await djClient.materializePreagg(preaggId);
|
|
729
|
+
|
|
730
|
+
if (result._error || result.message || result.detail) {
|
|
731
|
+
const errorMsg =
|
|
732
|
+
result.message || result.detail || 'Failed to create workflow';
|
|
733
|
+
setMaterializationError(errorMsg);
|
|
734
|
+
return null;
|
|
735
|
+
}
|
|
736
|
+
|
|
737
|
+
// Update the pre-agg with workflow info
|
|
738
|
+
setPlannedPreaggs(prev => {
|
|
739
|
+
const updated = { ...prev };
|
|
740
|
+
for (const key in updated) {
|
|
741
|
+
if (updated[key].id === preaggId) {
|
|
742
|
+
updated[key] = {
|
|
743
|
+
...updated[key],
|
|
744
|
+
workflow_urls: result.workflow_urls,
|
|
745
|
+
workflow_status: result.workflow_status || 'active',
|
|
746
|
+
};
|
|
747
|
+
break;
|
|
748
|
+
}
|
|
749
|
+
}
|
|
750
|
+
return updated;
|
|
751
|
+
});
|
|
752
|
+
|
|
753
|
+
return result;
|
|
754
|
+
} catch (err) {
|
|
755
|
+
console.error('Failed to create workflow:', err);
|
|
756
|
+
setMaterializationError(err.message || 'Failed to create workflow');
|
|
757
|
+
return null;
|
|
758
|
+
}
|
|
759
|
+
},
|
|
760
|
+
[djClient],
|
|
761
|
+
);
|
|
762
|
+
|
|
763
|
+
// Handle running a backfill for a pre-agg
|
|
764
|
+
const handleRunBackfill = useCallback(
|
|
765
|
+
async (preaggId, startDate, endDate) => {
|
|
766
|
+
setMaterializationError(null);
|
|
767
|
+
try {
|
|
768
|
+
const result = await djClient.runPreaggBackfill(
|
|
769
|
+
preaggId,
|
|
770
|
+
startDate,
|
|
771
|
+
endDate,
|
|
772
|
+
);
|
|
773
|
+
|
|
774
|
+
if (result._error || result.message || result.detail) {
|
|
775
|
+
const errorMsg =
|
|
776
|
+
result.message || result.detail || 'Failed to run backfill';
|
|
777
|
+
setMaterializationError(errorMsg);
|
|
778
|
+
return null;
|
|
779
|
+
}
|
|
780
|
+
|
|
781
|
+
// Return the job URL so the UI can display it
|
|
782
|
+
return result;
|
|
783
|
+
} catch (err) {
|
|
784
|
+
console.error('Failed to run backfill:', err);
|
|
785
|
+
setMaterializationError(err.message || 'Failed to run backfill');
|
|
786
|
+
return null;
|
|
787
|
+
}
|
|
788
|
+
},
|
|
789
|
+
[djClient],
|
|
790
|
+
);
|
|
791
|
+
|
|
792
|
+
// Handle running an ad-hoc job for a pre-agg (uses backfill with same start/end date)
|
|
793
|
+
const handleRunAdhoc = useCallback(
|
|
794
|
+
async (preaggId, partitionDate) => {
|
|
795
|
+
setMaterializationError(null);
|
|
796
|
+
try {
|
|
797
|
+
// Use backfill endpoint with same start and end date for single-date runs
|
|
798
|
+
const result = await djClient.runPreaggBackfill(
|
|
799
|
+
preaggId,
|
|
800
|
+
partitionDate,
|
|
801
|
+
partitionDate,
|
|
802
|
+
);
|
|
803
|
+
|
|
804
|
+
if (result._error || result.message || result.detail) {
|
|
805
|
+
const errorMsg =
|
|
806
|
+
result.message || result.detail || 'Failed to run ad-hoc job';
|
|
807
|
+
setMaterializationError(errorMsg);
|
|
808
|
+
return null;
|
|
809
|
+
}
|
|
810
|
+
|
|
811
|
+
// Return the job URL so the UI can display it
|
|
812
|
+
return result;
|
|
813
|
+
} catch (err) {
|
|
814
|
+
console.error('Failed to run ad-hoc job:', err);
|
|
815
|
+
setMaterializationError(err.message || 'Failed to run ad-hoc job');
|
|
816
|
+
return null;
|
|
817
|
+
}
|
|
818
|
+
},
|
|
819
|
+
[djClient],
|
|
820
|
+
);
|
|
821
|
+
|
|
822
|
+
// Handle deactivating (pausing) a pre-agg workflow
|
|
823
|
+
const handleDeactivatePreaggWorkflow = useCallback(
|
|
824
|
+
async preaggId => {
|
|
825
|
+
setMaterializationError(null);
|
|
826
|
+
try {
|
|
827
|
+
const result = await djClient.deactivatePreaggWorkflow(preaggId);
|
|
828
|
+
|
|
829
|
+
if (result._error || result.message?.includes('Failed')) {
|
|
830
|
+
const errorMsg =
|
|
831
|
+
result.message || result.detail || 'Failed to deactivate workflow';
|
|
832
|
+
setMaterializationError(errorMsg);
|
|
833
|
+
return null;
|
|
834
|
+
}
|
|
835
|
+
|
|
836
|
+
// Update the pre-agg in our state to reflect deactivation
|
|
837
|
+
// Backend clears all config, so we clear it here too for clean slate
|
|
838
|
+
setPlannedPreaggs(prev => {
|
|
839
|
+
const updated = { ...prev };
|
|
840
|
+
for (const key in updated) {
|
|
841
|
+
if (updated[key].id === preaggId) {
|
|
842
|
+
updated[key] = {
|
|
843
|
+
...updated[key],
|
|
844
|
+
strategy: null,
|
|
845
|
+
schedule: null,
|
|
846
|
+
lookback_window: null,
|
|
847
|
+
workflow_status: null,
|
|
848
|
+
workflow_urls: null,
|
|
849
|
+
};
|
|
850
|
+
break;
|
|
851
|
+
}
|
|
852
|
+
}
|
|
853
|
+
return updated;
|
|
854
|
+
});
|
|
855
|
+
|
|
856
|
+
return result;
|
|
857
|
+
} catch (err) {
|
|
858
|
+
console.error('Failed to deactivate workflow:', err);
|
|
859
|
+
setMaterializationError(err.message || 'Failed to deactivate workflow');
|
|
860
|
+
return null;
|
|
861
|
+
}
|
|
862
|
+
},
|
|
863
|
+
[djClient],
|
|
864
|
+
);
|
|
865
|
+
|
|
866
|
+
// Handle deactivating cube workflow
|
|
867
|
+
const handleDeactivateCubeWorkflow = useCallback(async () => {
|
|
868
|
+
if (!loadedCubeName) {
|
|
869
|
+
setMaterializationError('No cube loaded');
|
|
870
|
+
return null;
|
|
871
|
+
}
|
|
872
|
+
setMaterializationError(null);
|
|
873
|
+
try {
|
|
874
|
+
const result = await djClient.deactivateCubeWorkflow(loadedCubeName);
|
|
875
|
+
|
|
876
|
+
if (result.status >= 400) {
|
|
877
|
+
const errorMsg =
|
|
878
|
+
result.json?.message ||
|
|
879
|
+
result.json?.detail ||
|
|
880
|
+
'Failed to deactivate cube workflow';
|
|
881
|
+
setMaterializationError(errorMsg);
|
|
882
|
+
return null;
|
|
883
|
+
}
|
|
884
|
+
|
|
885
|
+
// Clear cube materialization state
|
|
886
|
+
setWorkflowUrls([]);
|
|
887
|
+
setCubeMaterialization(null);
|
|
888
|
+
|
|
889
|
+
return result.json;
|
|
890
|
+
} catch (err) {
|
|
891
|
+
console.error('Failed to deactivate cube workflow:', err);
|
|
892
|
+
setMaterializationError(
|
|
893
|
+
err.message || 'Failed to deactivate cube workflow',
|
|
894
|
+
);
|
|
895
|
+
return null;
|
|
896
|
+
}
|
|
897
|
+
}, [djClient, loadedCubeName]);
|
|
898
|
+
|
|
899
|
+
// Handle updating cube config
|
|
900
|
+
const handleUpdateCubeConfig = useCallback(
|
|
901
|
+
async config => {
|
|
902
|
+
if (!loadedCubeName) {
|
|
903
|
+
setMaterializationError('No cube loaded');
|
|
904
|
+
return null;
|
|
905
|
+
}
|
|
906
|
+
setMaterializationError(null);
|
|
907
|
+
try {
|
|
908
|
+
// Re-call materialize with new config (run_backfill=false to just update)
|
|
909
|
+
const result = await djClient.refreshCubeWorkflow(
|
|
910
|
+
loadedCubeName,
|
|
911
|
+
config.schedule,
|
|
912
|
+
config.strategy,
|
|
913
|
+
config.lookbackWindow,
|
|
914
|
+
);
|
|
915
|
+
|
|
916
|
+
if (result.status >= 400) {
|
|
917
|
+
const errorMsg =
|
|
918
|
+
result.json?.message ||
|
|
919
|
+
result.json?.detail ||
|
|
920
|
+
'Failed to update cube config';
|
|
921
|
+
setMaterializationError(errorMsg);
|
|
922
|
+
return null;
|
|
923
|
+
}
|
|
924
|
+
|
|
925
|
+
// Update local state
|
|
926
|
+
const urls = result.json?.workflow_urls || [];
|
|
927
|
+
setWorkflowUrls(urls);
|
|
928
|
+
setCubeMaterialization(prev => ({
|
|
929
|
+
...prev,
|
|
930
|
+
schedule: config.schedule,
|
|
931
|
+
strategy: config.strategy,
|
|
932
|
+
lookbackWindow: config.lookbackWindow,
|
|
933
|
+
workflowUrls: urls,
|
|
934
|
+
}));
|
|
935
|
+
|
|
936
|
+
return result.json;
|
|
937
|
+
} catch (err) {
|
|
938
|
+
console.error('Failed to update cube config:', err);
|
|
939
|
+
setMaterializationError(err.message || 'Failed to update cube config');
|
|
940
|
+
return null;
|
|
941
|
+
}
|
|
942
|
+
},
|
|
943
|
+
[djClient, loadedCubeName],
|
|
944
|
+
);
|
|
945
|
+
|
|
946
|
+
// Handle refreshing cube workflow
|
|
947
|
+
const handleRefreshCubeWorkflow = useCallback(async () => {
|
|
948
|
+
if (!loadedCubeName || !cubeMaterialization) {
|
|
949
|
+
setMaterializationError('No cube materialization to refresh');
|
|
950
|
+
return null;
|
|
951
|
+
}
|
|
952
|
+
setMaterializationError(null);
|
|
953
|
+
try {
|
|
954
|
+
const result = await djClient.refreshCubeWorkflow(
|
|
955
|
+
loadedCubeName,
|
|
956
|
+
cubeMaterialization.schedule,
|
|
957
|
+
cubeMaterialization.strategy,
|
|
958
|
+
cubeMaterialization.lookbackWindow,
|
|
959
|
+
);
|
|
960
|
+
|
|
961
|
+
if (result.status >= 400) {
|
|
962
|
+
const errorMsg =
|
|
963
|
+
result.json?.message ||
|
|
964
|
+
result.json?.detail ||
|
|
965
|
+
'Failed to refresh cube workflow';
|
|
966
|
+
setMaterializationError(errorMsg);
|
|
967
|
+
return null;
|
|
968
|
+
}
|
|
969
|
+
|
|
970
|
+
// Update workflow URLs
|
|
971
|
+
const urls = result.json?.workflow_urls || [];
|
|
972
|
+
setWorkflowUrls(urls);
|
|
973
|
+
setCubeMaterialization(prev => ({
|
|
974
|
+
...prev,
|
|
975
|
+
workflowUrls: urls,
|
|
976
|
+
}));
|
|
977
|
+
|
|
978
|
+
return result.json;
|
|
979
|
+
} catch (err) {
|
|
980
|
+
console.error('Failed to refresh cube workflow:', err);
|
|
981
|
+
setMaterializationError(err.message || 'Failed to refresh cube workflow');
|
|
982
|
+
return null;
|
|
983
|
+
}
|
|
984
|
+
}, [djClient, loadedCubeName, cubeMaterialization]);
|
|
985
|
+
|
|
986
|
+
// Handle running cube backfill with date range
|
|
987
|
+
const handleRunCubeBackfill = useCallback(
|
|
988
|
+
async (startDate, endDate) => {
|
|
989
|
+
if (!loadedCubeName) {
|
|
990
|
+
setMaterializationError('No cube to backfill');
|
|
991
|
+
return null;
|
|
992
|
+
}
|
|
993
|
+
if (!startDate) {
|
|
994
|
+
setMaterializationError('Start date is required');
|
|
995
|
+
return null;
|
|
996
|
+
}
|
|
997
|
+
setMaterializationError(null);
|
|
998
|
+
try {
|
|
999
|
+
const result = await djClient.runCubeBackfill(
|
|
1000
|
+
loadedCubeName,
|
|
1001
|
+
startDate,
|
|
1002
|
+
endDate,
|
|
1003
|
+
);
|
|
1004
|
+
|
|
1005
|
+
if (result._error) {
|
|
1006
|
+
setMaterializationError(
|
|
1007
|
+
result.message || 'Failed to run cube backfill',
|
|
1008
|
+
);
|
|
1009
|
+
return null;
|
|
1010
|
+
}
|
|
1011
|
+
|
|
1012
|
+
return result;
|
|
1013
|
+
} catch (err) {
|
|
1014
|
+
console.error('Failed to run cube backfill:', err);
|
|
1015
|
+
setMaterializationError(err.message || 'Failed to run cube backfill');
|
|
1016
|
+
return null;
|
|
1017
|
+
}
|
|
1018
|
+
},
|
|
1019
|
+
[djClient, loadedCubeName],
|
|
1020
|
+
);
|
|
1021
|
+
|
|
1022
|
+
// Fetch raw SQL (without pre-aggregations)
|
|
1023
|
+
const handleFetchRawSql = useCallback(async () => {
|
|
1024
|
+
try {
|
|
1025
|
+
const result = await djClient.metricsV3(
|
|
1026
|
+
selectedMetrics,
|
|
1027
|
+
selectedDimensions,
|
|
1028
|
+
'',
|
|
1029
|
+
false, // use_materialized = false for raw SQL
|
|
1030
|
+
);
|
|
1031
|
+
return result.sql;
|
|
1032
|
+
} catch (err) {
|
|
1033
|
+
console.error('Failed to fetch raw SQL:', err);
|
|
1034
|
+
return null;
|
|
1035
|
+
}
|
|
1036
|
+
}, [djClient, selectedMetrics, selectedDimensions]);
|
|
1037
|
+
|
|
1038
|
+
// Set partition on a column (for enabling incremental materialization)
|
|
1039
|
+
const handleSetPartition = useCallback(
|
|
1040
|
+
async (nodeName, columnName, partitionType, format, granularity) => {
|
|
1041
|
+
try {
|
|
1042
|
+
return await djClient.setPartition(
|
|
1043
|
+
nodeName,
|
|
1044
|
+
columnName,
|
|
1045
|
+
partitionType,
|
|
1046
|
+
format,
|
|
1047
|
+
granularity,
|
|
1048
|
+
);
|
|
1049
|
+
} catch (err) {
|
|
1050
|
+
console.error('Failed to set partition:', err);
|
|
1051
|
+
throw err;
|
|
1052
|
+
}
|
|
1053
|
+
},
|
|
1054
|
+
[djClient],
|
|
1055
|
+
);
|
|
1056
|
+
|
|
1057
|
+
// Refresh measures result (after setting partition)
|
|
1058
|
+
const handleRefreshMeasures = useCallback(async () => {
|
|
1059
|
+
if (selectedMetrics.length > 0 && selectedDimensions.length > 0) {
|
|
1060
|
+
try {
|
|
1061
|
+
const [measures, metrics] = await Promise.all([
|
|
1062
|
+
djClient.measuresV3(selectedMetrics, selectedDimensions),
|
|
1063
|
+
djClient.metricsV3(selectedMetrics, selectedDimensions),
|
|
1064
|
+
]);
|
|
1065
|
+
setMeasuresResult(measures);
|
|
1066
|
+
setMetricsResult(metrics);
|
|
1067
|
+
} catch (err) {
|
|
1068
|
+
console.error('Failed to refresh measures:', err);
|
|
1069
|
+
}
|
|
1070
|
+
}
|
|
1071
|
+
}, [djClient, selectedMetrics, selectedDimensions]);
|
|
1072
|
+
|
|
1073
|
+
// Fetch node columns with partition info (for checking if temporal partitions exist)
|
|
1074
|
+
const handleFetchNodePartitions = useCallback(
|
|
1075
|
+
async nodeName => {
|
|
1076
|
+
return await djClient.getNodeColumnsWithPartitions(nodeName);
|
|
1077
|
+
},
|
|
1078
|
+
[djClient],
|
|
1079
|
+
);
|
|
1080
|
+
|
|
121
1081
|
return (
|
|
122
1082
|
<div className="planner-page">
|
|
123
1083
|
{/* Header */}
|
|
124
1084
|
<header className="planner-header">
|
|
125
1085
|
<div className="planner-header-content">
|
|
126
1086
|
<h1>Query Planner</h1>
|
|
127
|
-
<p>Explore metrics and dimensions and plan materializations</p>
|
|
1087
|
+
{/* <p>Explore metrics and dimensions and plan materializations</p> */}
|
|
128
1088
|
</div>
|
|
129
1089
|
{error && <div className="header-error">{error}</div>}
|
|
130
1090
|
</header>
|
|
@@ -141,6 +1101,10 @@ export function QueryPlannerPage() {
|
|
|
141
1101
|
selectedDimensions={selectedDimensions}
|
|
142
1102
|
onDimensionsChange={handleDimensionsChange}
|
|
143
1103
|
loading={dimensionsLoading}
|
|
1104
|
+
cubes={cubes}
|
|
1105
|
+
onLoadCubePreset={handleLoadCubePreset}
|
|
1106
|
+
loadedCubeName={loadedCubeName}
|
|
1107
|
+
onClearSelection={handleClearSelection}
|
|
144
1108
|
/>
|
|
145
1109
|
</aside>
|
|
146
1110
|
|
|
@@ -159,12 +1123,22 @@ export function QueryPlannerPage() {
|
|
|
159
1123
|
{measuresResult.metric_formulas?.length || 0} metrics
|
|
160
1124
|
</span>
|
|
161
1125
|
</div>
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
1126
|
+
{/* Suspense boundary for lazy-loaded ReactFlow graph */}
|
|
1127
|
+
<Suspense
|
|
1128
|
+
fallback={
|
|
1129
|
+
<div className="graph-loading">
|
|
1130
|
+
<div className="loading-spinner" />
|
|
1131
|
+
<span>Loading graph...</span>
|
|
1132
|
+
</div>
|
|
1133
|
+
}
|
|
1134
|
+
>
|
|
1135
|
+
<MetricFlowGraph
|
|
1136
|
+
grainGroups={measuresResult.grain_groups}
|
|
1137
|
+
metricFormulas={measuresResult.metric_formulas}
|
|
1138
|
+
selectedNode={selectedNode}
|
|
1139
|
+
onNodeSelect={handleNodeSelect}
|
|
1140
|
+
/>
|
|
1141
|
+
</Suspense>
|
|
168
1142
|
</>
|
|
169
1143
|
) : (
|
|
170
1144
|
<div className="graph-empty">
|
|
@@ -180,11 +1154,23 @@ export function QueryPlannerPage() {
|
|
|
180
1154
|
|
|
181
1155
|
{/* Right: Details Panel */}
|
|
182
1156
|
<aside className="planner-details">
|
|
183
|
-
{selectedNode?.type === 'preagg'
|
|
1157
|
+
{selectedNode?.type === 'preagg' ||
|
|
1158
|
+
selectedNode?.type === 'component' ? (
|
|
184
1159
|
<PreAggDetailsPanel
|
|
185
|
-
preAgg={
|
|
1160
|
+
preAgg={
|
|
1161
|
+
selectedNode?.type === 'component'
|
|
1162
|
+
? measuresResult?.grain_groups?.[
|
|
1163
|
+
selectedNode.data?.grainGroupIndex
|
|
1164
|
+
]
|
|
1165
|
+
: selectedNode.data
|
|
1166
|
+
}
|
|
186
1167
|
metricFormulas={measuresResult?.metric_formulas}
|
|
187
1168
|
onClose={handleClosePanel}
|
|
1169
|
+
highlightedComponent={
|
|
1170
|
+
selectedNode?.type === 'component'
|
|
1171
|
+
? selectedNode.data?.name
|
|
1172
|
+
: null
|
|
1173
|
+
}
|
|
188
1174
|
/>
|
|
189
1175
|
) : selectedNode?.type === 'metric' ? (
|
|
190
1176
|
<MetricDetailsPanel
|
|
@@ -198,6 +1184,27 @@ export function QueryPlannerPage() {
|
|
|
198
1184
|
metricsResult={metricsResult}
|
|
199
1185
|
selectedMetrics={selectedMetrics}
|
|
200
1186
|
selectedDimensions={selectedDimensions}
|
|
1187
|
+
plannedPreaggs={plannedPreaggs}
|
|
1188
|
+
onPlanMaterialization={handlePlanMaterialization}
|
|
1189
|
+
onUpdateConfig={handleUpdateConfig}
|
|
1190
|
+
onCreateWorkflow={handleCreateWorkflow}
|
|
1191
|
+
onRunBackfill={handleRunBackfill}
|
|
1192
|
+
onRunAdhoc={handleRunAdhoc}
|
|
1193
|
+
onFetchRawSql={handleFetchRawSql}
|
|
1194
|
+
onSetPartition={handleSetPartition}
|
|
1195
|
+
onRefreshMeasures={handleRefreshMeasures}
|
|
1196
|
+
onFetchNodePartitions={handleFetchNodePartitions}
|
|
1197
|
+
materializationError={materializationError}
|
|
1198
|
+
onClearError={() => setMaterializationError(null)}
|
|
1199
|
+
workflowUrls={workflowUrls}
|
|
1200
|
+
onClearWorkflowUrls={() => setWorkflowUrls([])}
|
|
1201
|
+
loadedCubeName={loadedCubeName}
|
|
1202
|
+
cubeMaterialization={cubeMaterialization}
|
|
1203
|
+
onUpdateCubeConfig={handleUpdateCubeConfig}
|
|
1204
|
+
onRefreshCubeWorkflow={handleRefreshCubeWorkflow}
|
|
1205
|
+
onRunCubeBackfill={handleRunCubeBackfill}
|
|
1206
|
+
onDeactivatePreaggWorkflow={handleDeactivatePreaggWorkflow}
|
|
1207
|
+
onDeactivateCubeWorkflow={handleDeactivateCubeWorkflow}
|
|
201
1208
|
/>
|
|
202
1209
|
)}
|
|
203
1210
|
</aside>
|