datajunction-ui 0.0.1-a84.dev1 → 0.0.1-a84.dev2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "datajunction-ui",
3
- "version": "0.0.1-a84.dev1",
3
+ "version": "0.0.1-a84.dev2",
4
4
  "description": "DataJunction Metrics Platform UI",
5
5
  "module": "src/index.tsx",
6
6
  "repository": {
@@ -12,6 +12,8 @@ export default function AddMaterializationPopover({ node, onSubmit }) {
12
12
  const [options, setOptions] = useState([]);
13
13
  const [jobs, setJobs] = useState([]);
14
14
 
15
+ const timePartitionColumns = node.columns.filter(col => col.partition);
16
+
15
17
  const ref = useRef(null);
16
18
 
17
19
  useEffect(() => {
@@ -42,13 +44,23 @@ export default function AddMaterializationPopover({ node, onSubmit }) {
42
44
  if (!values.job_type) {
43
45
  values.job_type = 'spark_sql';
44
46
  }
45
- const { status, json } = await djClient.materialize(
46
- values.node,
47
- values.job_type,
48
- values.strategy,
49
- values.schedule,
50
- config,
51
- );
47
+ const { status, json } = (
48
+ values.job_type === 'druid_cube' ?
49
+ await djClient.materializeCube(
50
+ values.node,
51
+ values.job_type,
52
+ values.strategy,
53
+ values.schedule,
54
+ values.lookback_window,
55
+ ) :
56
+ await djClient.materialize(
57
+ values.node,
58
+ values.job_type,
59
+ values.strategy,
60
+ values.schedule,
61
+ config,
62
+ )
63
+ );
52
64
  if (status === 200 || status === 201) {
53
65
  setStatus({ success: json.message });
54
66
  window.location.reload();
@@ -99,8 +111,8 @@ export default function AddMaterializationPopover({ node, onSubmit }) {
99
111
  initialValues={{
100
112
  node: node?.name,
101
113
  job_type:
102
- node?.type === 'cube' ? 'druid_metrics_cube' : 'spark_sql',
103
- strategy: 'full',
114
+ node?.type === 'cube' ? 'druid_cube' : 'spark_sql',
115
+ strategy: timePartitionColumns.length == 1 ? 'incremental_time' : 'full',
104
116
  schedule: '@daily',
105
117
  lookback_window: '1 DAY',
106
118
  spark_config: {
@@ -121,21 +133,21 @@ export default function AddMaterializationPopover({ node, onSubmit }) {
121
133
 
122
134
  <Field as="select" name="job_type">
123
135
  <>
124
- <option
136
+ {/* <option
125
137
  key={'druid_measures_cube'}
126
138
  value={'druid_measures_cube'}
127
139
  >
128
140
  Druid Measures Cube (Pre-Agg Cube)
129
- </option>
141
+ </option> */}
130
142
  <option
131
- key={'druid_metrics_cube'}
132
- value={'druid_metrics_cube'}
143
+ key={'druid_measures_cube'}
144
+ value={'druid_measures_cube'}
133
145
  >
134
- Druid Metrics Cube (Post-Agg Cube)
146
+ Druid
135
147
  </option>
136
- <option key={'spark_sql'} value={'spark_sql'}>
148
+ {/* <option key={'spark_sql'} value={'spark_sql'}>
137
149
  Iceberg Table
138
- </option>
150
+ </option> */}
139
151
  </>
140
152
  </Field>
141
153
  <br />
@@ -150,6 +162,7 @@ export default function AddMaterializationPopover({ node, onSubmit }) {
150
162
  value={node?.name}
151
163
  readOnly={true}
152
164
  />
165
+ {console.log('timePartitionColumns.length', timePartitionColumns.length)}
153
166
  <span data-testid="edit-partition">
154
167
  <label htmlFor="strategy">Strategy</label>
155
168
  <Field as="select" name="strategy">
@@ -70,7 +70,7 @@ export default function NodeInfoTab({ node }) {
70
70
  );
71
71
 
72
72
  const metricQueryDiv =
73
- node.type === 'metric' ? (
73
+ node?.type === 'metric' ? (
74
74
  <div className="list-group-item d-flex">
75
75
  <div className="gap-2 w-100 justify-content-between py-3">
76
76
  <div style={{ marginBottom: '30px' }}>
@@ -980,6 +980,25 @@ export const DataJunctionAPI = {
980
980
  );
981
981
  return { status: response.status, json: await response.json() };
982
982
  },
983
+ materializeCube: async function (nodeName, jobType, strategy, schedule, lookbackWindow) {
984
+ const response = await fetch(
985
+ `${DJ_URL}/nodes/${nodeName}/materialization`,
986
+ {
987
+ method: 'POST',
988
+ headers: {
989
+ 'Content-Type': 'application/json',
990
+ },
991
+ body: JSON.stringify({
992
+ job: jobType,
993
+ strategy: strategy,
994
+ schedule: schedule,
995
+ lookback_window: lookbackWindow,
996
+ }),
997
+ credentials: 'include',
998
+ },
999
+ );
1000
+ return { status: response.status, json: await response.json() };
1001
+ },
983
1002
  runBackfill: async function (nodeName, materializationName, partitionValues) {
984
1003
  const response = await fetch(
985
1004
  `${DJ_URL}/nodes/${nodeName}/materializations/${materializationName}/backfill`,