drizzle-cube 0.1.29 → 0.1.31
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1 -1
- package/dist/adapters/{compiler-BibS3bXd.js → compiler-CVega_Gv.js} +1786 -1820
- package/dist/adapters/express/index.js +87 -89
- package/dist/adapters/fastify/index.js +81 -83
- package/dist/adapters/hono/index.js +113 -116
- package/dist/adapters/nextjs/index.js +101 -102
- package/dist/client/charts.js +1 -1
- package/dist/client/chunks/charts-MTxju0dv.js +2402 -0
- package/dist/client/chunks/charts-MTxju0dv.js.map +1 -0
- package/dist/client/chunks/{icons-B_0yoGjv.js → icons-C_McHd9z.js} +10 -1
- package/dist/client/chunks/{icons-B_0yoGjv.js.map → icons-C_McHd9z.js.map} +1 -1
- package/dist/client/chunks/providers-DwDirAYo.js +208 -0
- package/dist/client/chunks/{providers-B7MVnAAt.js.map → providers-DwDirAYo.js.map} +1 -1
- package/dist/client/components.js +9741 -9616
- package/dist/client/components.js.map +1 -1
- package/dist/client/hooks.js +2 -2
- package/dist/client/index.js +2 -2
- package/dist/client/providers.js +1 -1
- package/dist/client/styles.css +1 -1
- package/dist/client-bundle-stats.html +1 -1
- package/dist/server/index.js +1479 -1485
- package/package.json +29 -20
- package/dist/client/chunks/charts-BM1k01Jw.js +0 -2404
- package/dist/client/chunks/charts-BM1k01Jw.js.map +0 -1
- package/dist/client/chunks/providers-B7MVnAAt.js +0 -211
|
@@ -1,148 +1,146 @@
|
|
|
1
|
-
import
|
|
2
|
-
import
|
|
3
|
-
import { S as
|
|
4
|
-
function
|
|
1
|
+
import p, { Router as w } from "express";
|
|
2
|
+
import S from "cors";
|
|
3
|
+
import { S as E, c as s, f as g, a as R, b as q, h as v } from "../compiler-CVega_Gv.js";
|
|
4
|
+
function $(l) {
|
|
5
5
|
const {
|
|
6
6
|
cubes: c,
|
|
7
|
-
drizzle:
|
|
8
|
-
schema:
|
|
7
|
+
drizzle: j,
|
|
8
|
+
schema: x,
|
|
9
9
|
extractSecurityContext: d,
|
|
10
|
-
engineType:
|
|
11
|
-
cors:
|
|
10
|
+
engineType: h,
|
|
11
|
+
cors: b,
|
|
12
12
|
basePath: y = "/cubejs-api/v1",
|
|
13
|
-
jsonLimit:
|
|
13
|
+
jsonLimit: Q = "10mb"
|
|
14
14
|
} = l;
|
|
15
15
|
if (!c || c.length === 0)
|
|
16
16
|
throw new Error("At least one cube must be provided in the cubes array");
|
|
17
|
-
const
|
|
18
|
-
|
|
19
|
-
const
|
|
20
|
-
drizzle:
|
|
21
|
-
schema:
|
|
22
|
-
engineType:
|
|
17
|
+
const i = w();
|
|
18
|
+
b && i.use(S(b)), i.use(p.json({ limit: Q })), i.use(p.urlencoded({ extended: !0, limit: Q }));
|
|
19
|
+
const a = new E({
|
|
20
|
+
drizzle: j,
|
|
21
|
+
schema: x,
|
|
22
|
+
engineType: h
|
|
23
23
|
});
|
|
24
24
|
return c.forEach((t) => {
|
|
25
|
-
|
|
26
|
-
}),
|
|
25
|
+
a.registerCube(t);
|
|
26
|
+
}), i.post(`${y}/load`, async (t, r) => {
|
|
27
27
|
try {
|
|
28
|
-
const e = t.body.query || t.body,
|
|
29
|
-
if (!
|
|
30
|
-
return r.status(400).json(
|
|
31
|
-
`Query validation failed: ${
|
|
28
|
+
const e = t.body.query || t.body, o = await d(t, r), n = a.validateQuery(e);
|
|
29
|
+
if (!n.isValid)
|
|
30
|
+
return r.status(400).json(s(
|
|
31
|
+
`Query validation failed: ${n.errors.join(", ")}`,
|
|
32
32
|
400
|
|
33
33
|
));
|
|
34
|
-
const
|
|
35
|
-
r.json(
|
|
34
|
+
const u = await a.executeMultiCubeQuery(e, o);
|
|
35
|
+
r.json(g(e, u, a));
|
|
36
36
|
} catch (e) {
|
|
37
|
-
console.error("Query execution error:", e), r.status(500).json(
|
|
37
|
+
console.error("Query execution error:", e), r.status(500).json(s(
|
|
38
38
|
e instanceof Error ? e.message : "Query execution failed",
|
|
39
39
|
500
|
|
40
40
|
));
|
|
41
41
|
}
|
|
42
|
-
}),
|
|
42
|
+
}), i.get(`${y}/load`, async (t, r) => {
|
|
43
43
|
try {
|
|
44
44
|
const e = t.query.query;
|
|
45
45
|
if (!e)
|
|
46
|
-
return r.status(400).json(
|
|
46
|
+
return r.status(400).json(s(
|
|
47
47
|
"Query parameter is required",
|
|
48
48
|
400
|
|
49
49
|
));
|
|
50
|
-
let
|
|
50
|
+
let o;
|
|
51
51
|
try {
|
|
52
|
-
|
|
52
|
+
o = JSON.parse(e);
|
|
53
53
|
} catch {
|
|
54
|
-
return r.status(400).json(
|
|
54
|
+
return r.status(400).json(s(
|
|
55
55
|
"Invalid JSON in query parameter",
|
|
56
56
|
400
|
|
57
57
|
));
|
|
58
58
|
}
|
|
59
|
-
const
|
|
60
|
-
if (!
|
|
61
|
-
return r.status(400).json(
|
|
62
|
-
`Query validation failed: ${
|
|
59
|
+
const n = await d(t, r), u = a.validateQuery(o);
|
|
60
|
+
if (!u.isValid)
|
|
61
|
+
return r.status(400).json(s(
|
|
62
|
+
`Query validation failed: ${u.errors.join(", ")}`,
|
|
63
63
|
400
|
|
64
64
|
));
|
|
65
|
-
const f = await
|
|
66
|
-
r.json(
|
|
65
|
+
const f = await a.executeMultiCubeQuery(o, n);
|
|
66
|
+
r.json(g(o, f, a));
|
|
67
67
|
} catch (e) {
|
|
68
|
-
console.error("Query execution error:", e), r.status(500).json(
|
|
68
|
+
console.error("Query execution error:", e), r.status(500).json(s(
|
|
69
69
|
e instanceof Error ? e.message : "Query execution failed",
|
|
70
70
|
500
|
|
71
71
|
));
|
|
72
72
|
}
|
|
73
|
-
}),
|
|
73
|
+
}), i.get(`${y}/meta`, (t, r) => {
|
|
74
74
|
try {
|
|
75
|
-
const e =
|
|
76
|
-
r.json(
|
|
75
|
+
const e = a.getMetadata();
|
|
76
|
+
r.json(R(e));
|
|
77
77
|
} catch (e) {
|
|
78
|
-
console.error("Metadata error:", e), r.status(500).json(
|
|
78
|
+
console.error("Metadata error:", e), r.status(500).json(s(
|
|
79
79
|
e instanceof Error ? e.message : "Failed to fetch metadata",
|
|
80
80
|
500
|
|
81
81
|
));
|
|
82
82
|
}
|
|
83
|
-
}),
|
|
84
|
-
var e, s;
|
|
83
|
+
}), i.post(`${y}/sql`, async (t, r) => {
|
|
85
84
|
try {
|
|
86
|
-
const
|
|
87
|
-
if (!
|
|
88
|
-
return r.status(400).json(
|
|
89
|
-
`Query validation failed: ${
|
|
85
|
+
const e = t.body, o = await d(t, r), n = a.validateQuery(e);
|
|
86
|
+
if (!n.isValid)
|
|
87
|
+
return r.status(400).json(s(
|
|
88
|
+
`Query validation failed: ${n.errors.join(", ")}`,
|
|
90
89
|
400
|
|
91
90
|
));
|
|
92
|
-
const
|
|
93
|
-
if (!
|
|
94
|
-
return r.status(400).json(
|
|
91
|
+
const u = e.measures?.[0] || e.dimensions?.[0];
|
|
92
|
+
if (!u)
|
|
93
|
+
return r.status(400).json(s(
|
|
95
94
|
"No measures or dimensions specified",
|
|
96
95
|
400
|
|
97
96
|
));
|
|
98
|
-
const
|
|
99
|
-
r.json(
|
|
100
|
-
} catch (
|
|
101
|
-
console.error("SQL generation error:",
|
|
102
|
-
|
|
97
|
+
const f = u.split(".")[0], m = await a.generateSQL(f, e, o);
|
|
98
|
+
r.json(q(e, m));
|
|
99
|
+
} catch (e) {
|
|
100
|
+
console.error("SQL generation error:", e), r.status(500).json(s(
|
|
101
|
+
e instanceof Error ? e.message : "SQL generation failed",
|
|
103
102
|
500
|
|
104
103
|
));
|
|
105
104
|
}
|
|
106
|
-
}),
|
|
107
|
-
var e, s;
|
|
105
|
+
}), i.get(`${y}/sql`, async (t, r) => {
|
|
108
106
|
try {
|
|
109
|
-
const
|
|
110
|
-
if (!
|
|
111
|
-
return r.status(400).json(
|
|
107
|
+
const e = t.query.query;
|
|
108
|
+
if (!e)
|
|
109
|
+
return r.status(400).json(s(
|
|
112
110
|
"Query parameter is required",
|
|
113
111
|
400
|
|
114
112
|
));
|
|
115
|
-
const
|
|
116
|
-
if (!
|
|
117
|
-
return r.status(400).json(
|
|
118
|
-
`Query validation failed: ${
|
|
113
|
+
const o = JSON.parse(e), n = await d(t, r), u = a.validateQuery(o);
|
|
114
|
+
if (!u.isValid)
|
|
115
|
+
return r.status(400).json(s(
|
|
116
|
+
`Query validation failed: ${u.errors.join(", ")}`,
|
|
119
117
|
400
|
|
120
118
|
));
|
|
121
|
-
const
|
|
122
|
-
if (!
|
|
123
|
-
return r.status(400).json(
|
|
119
|
+
const f = o.measures?.[0] || o.dimensions?.[0];
|
|
120
|
+
if (!f)
|
|
121
|
+
return r.status(400).json(s(
|
|
124
122
|
"No measures or dimensions specified",
|
|
125
123
|
400
|
|
126
124
|
));
|
|
127
|
-
const
|
|
128
|
-
r.json(
|
|
129
|
-
} catch (
|
|
130
|
-
console.error("SQL generation error:",
|
|
131
|
-
|
|
125
|
+
const m = f.split(".")[0], C = await a.generateSQL(m, o, n);
|
|
126
|
+
r.json(q(o, C));
|
|
127
|
+
} catch (e) {
|
|
128
|
+
console.error("SQL generation error:", e), r.status(500).json(s(
|
|
129
|
+
e instanceof Error ? e.message : "SQL generation failed",
|
|
132
130
|
500
|
|
133
131
|
));
|
|
134
132
|
}
|
|
135
|
-
}),
|
|
133
|
+
}), i.post(`${y}/dry-run`, async (t, r) => {
|
|
136
134
|
try {
|
|
137
|
-
const e = t.body.query || t.body,
|
|
138
|
-
r.json(
|
|
135
|
+
const e = t.body.query || t.body, o = await d(t, r), n = await v(e, o, a);
|
|
136
|
+
r.json(n);
|
|
139
137
|
} catch (e) {
|
|
140
138
|
console.error("Dry-run error:", e), r.status(400).json({
|
|
141
139
|
error: e instanceof Error ? e.message : "Dry-run validation failed",
|
|
142
140
|
valid: !1
|
|
143
141
|
});
|
|
144
142
|
}
|
|
145
|
-
}),
|
|
143
|
+
}), i.get(`${y}/dry-run`, async (t, r) => {
|
|
146
144
|
try {
|
|
147
145
|
const e = t.query.query;
|
|
148
146
|
if (!e)
|
|
@@ -150,28 +148,28 @@ function N(l) {
|
|
|
150
148
|
error: "Query parameter is required",
|
|
151
149
|
valid: !1
|
|
152
150
|
});
|
|
153
|
-
const
|
|
154
|
-
r.json(
|
|
151
|
+
const o = JSON.parse(e), n = await d(t, r), u = await v(o, n, a);
|
|
152
|
+
r.json(u);
|
|
155
153
|
} catch (e) {
|
|
156
154
|
console.error("Dry-run error:", e), r.status(400).json({
|
|
157
155
|
error: e instanceof Error ? e.message : "Dry-run validation failed",
|
|
158
156
|
valid: !1
|
|
159
157
|
});
|
|
160
158
|
}
|
|
161
|
-
}),
|
|
162
|
-
console.error("Express adapter error:", t), e.headersSent || e.status(500).json(
|
|
163
|
-
}),
|
|
159
|
+
}), i.use((t, r, e, o) => {
|
|
160
|
+
console.error("Express adapter error:", t), e.headersSent || e.status(500).json(s(t, 500));
|
|
161
|
+
}), i;
|
|
164
162
|
}
|
|
165
|
-
function
|
|
166
|
-
const
|
|
167
|
-
return l.use("/",
|
|
163
|
+
function L(l, c) {
|
|
164
|
+
const j = $(c);
|
|
165
|
+
return l.use("/", j), l;
|
|
168
166
|
}
|
|
169
|
-
function
|
|
170
|
-
const c =
|
|
171
|
-
return
|
|
167
|
+
function J(l) {
|
|
168
|
+
const c = p();
|
|
169
|
+
return L(c, l);
|
|
172
170
|
}
|
|
173
171
|
export {
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
172
|
+
J as createCubeApp,
|
|
173
|
+
$ as createCubeRouter,
|
|
174
|
+
L as mountCubeRoutes
|
|
177
175
|
};
|
|
@@ -1,30 +1,30 @@
|
|
|
1
|
-
import { S as
|
|
2
|
-
const
|
|
1
|
+
import { S as j, c as i, f as q, a as $, b as h, h as v } from "../compiler-CVega_Gv.js";
|
|
2
|
+
const Q = function(a, C, p) {
|
|
3
3
|
const {
|
|
4
|
-
cubes:
|
|
5
|
-
drizzle:
|
|
6
|
-
schema:
|
|
4
|
+
cubes: g,
|
|
5
|
+
drizzle: w,
|
|
6
|
+
schema: x,
|
|
7
7
|
extractSecurityContext: y,
|
|
8
|
-
engineType:
|
|
9
|
-
cors:
|
|
8
|
+
engineType: S,
|
|
9
|
+
cors: f,
|
|
10
10
|
basePath: d = "/cubejs-api/v1",
|
|
11
|
-
bodyLimit:
|
|
11
|
+
bodyLimit: b = 10485760
|
|
12
12
|
// 10MB
|
|
13
|
-
} =
|
|
14
|
-
if (!
|
|
15
|
-
return
|
|
16
|
-
|
|
13
|
+
} = C;
|
|
14
|
+
if (!g || g.length === 0)
|
|
15
|
+
return p(new Error("At least one cube must be provided in the cubes array"));
|
|
16
|
+
f && a.register(import("@fastify/cors"), f), a.addHook("onRequest", async (r, t) => {
|
|
17
17
|
r.method === "POST" && (r.body = void 0);
|
|
18
18
|
});
|
|
19
|
-
const
|
|
20
|
-
drizzle:
|
|
21
|
-
schema:
|
|
22
|
-
engineType:
|
|
19
|
+
const o = new j({
|
|
20
|
+
drizzle: w,
|
|
21
|
+
schema: x,
|
|
22
|
+
engineType: S
|
|
23
23
|
});
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
}),
|
|
27
|
-
bodyLimit:
|
|
24
|
+
g.forEach((r) => {
|
|
25
|
+
o.registerCube(r);
|
|
26
|
+
}), a.post(`${d}/load`, {
|
|
27
|
+
bodyLimit: b,
|
|
28
28
|
schema: {
|
|
29
29
|
body: {
|
|
30
30
|
type: "object",
|
|
@@ -33,21 +33,21 @@ const w = function(o, x, q) {
|
|
|
33
33
|
}
|
|
34
34
|
}, async (r, t) => {
|
|
35
35
|
try {
|
|
36
|
-
const e = r.body,
|
|
36
|
+
const e = r.body, s = e.query || e, u = await y(r), n = o.validateQuery(s);
|
|
37
37
|
if (!n.isValid)
|
|
38
|
-
return t.status(400).send(
|
|
38
|
+
return t.status(400).send(i(
|
|
39
39
|
`Query validation failed: ${n.errors.join(", ")}`,
|
|
40
40
|
400
|
|
41
41
|
));
|
|
42
|
-
const c = await
|
|
43
|
-
return
|
|
42
|
+
const c = await o.executeMultiCubeQuery(s, u);
|
|
43
|
+
return q(s, c, o);
|
|
44
44
|
} catch (e) {
|
|
45
|
-
return r.log.error(e, "Query execution error"), t.status(500).send(
|
|
45
|
+
return r.log.error(e, "Query execution error"), t.status(500).send(i(
|
|
46
46
|
e instanceof Error ? e.message : "Query execution failed",
|
|
47
47
|
500
|
|
48
48
|
));
|
|
49
49
|
}
|
|
50
|
-
}),
|
|
50
|
+
}), a.get(`${d}/load`, {
|
|
51
51
|
schema: {
|
|
52
52
|
querystring: {
|
|
53
53
|
type: "object",
|
|
@@ -60,41 +60,41 @@ const w = function(o, x, q) {
|
|
|
60
60
|
}, async (r, t) => {
|
|
61
61
|
try {
|
|
62
62
|
const { query: e } = r.query;
|
|
63
|
-
let
|
|
63
|
+
let s;
|
|
64
64
|
try {
|
|
65
|
-
|
|
65
|
+
s = JSON.parse(e);
|
|
66
66
|
} catch {
|
|
67
|
-
return t.status(400).send(
|
|
67
|
+
return t.status(400).send(i(
|
|
68
68
|
"Invalid JSON in query parameter",
|
|
69
69
|
400
|
|
70
70
|
));
|
|
71
71
|
}
|
|
72
|
-
const
|
|
72
|
+
const u = await y(r), n = o.validateQuery(s);
|
|
73
73
|
if (!n.isValid)
|
|
74
|
-
return t.status(400).send(
|
|
74
|
+
return t.status(400).send(i(
|
|
75
75
|
`Query validation failed: ${n.errors.join(", ")}`,
|
|
76
76
|
400
|
|
77
77
|
));
|
|
78
|
-
const c = await
|
|
79
|
-
return
|
|
78
|
+
const c = await o.executeMultiCubeQuery(s, u);
|
|
79
|
+
return q(s, c, o);
|
|
80
80
|
} catch (e) {
|
|
81
|
-
return r.log.error(e, "Query execution error"), t.status(500).send(
|
|
81
|
+
return r.log.error(e, "Query execution error"), t.status(500).send(i(
|
|
82
82
|
e instanceof Error ? e.message : "Query execution failed",
|
|
83
83
|
500
|
|
84
84
|
));
|
|
85
85
|
}
|
|
86
|
-
}),
|
|
86
|
+
}), a.get(`${d}/meta`, async (r, t) => {
|
|
87
87
|
try {
|
|
88
|
-
const e =
|
|
89
|
-
return
|
|
88
|
+
const e = o.getMetadata();
|
|
89
|
+
return $(e);
|
|
90
90
|
} catch (e) {
|
|
91
|
-
return r.log.error(e, "Metadata error"), t.status(500).send(
|
|
91
|
+
return r.log.error(e, "Metadata error"), t.status(500).send(i(
|
|
92
92
|
e instanceof Error ? e.message : "Failed to fetch metadata",
|
|
93
93
|
500
|
|
94
94
|
));
|
|
95
95
|
}
|
|
96
|
-
}),
|
|
97
|
-
bodyLimit:
|
|
96
|
+
}), a.post(`${d}/sql`, {
|
|
97
|
+
bodyLimit: b,
|
|
98
98
|
schema: {
|
|
99
99
|
body: {
|
|
100
100
|
type: "object",
|
|
@@ -102,29 +102,28 @@ const w = function(o, x, q) {
|
|
|
102
102
|
}
|
|
103
103
|
}
|
|
104
104
|
}, async (r, t) => {
|
|
105
|
-
var e, a;
|
|
106
105
|
try {
|
|
107
|
-
const
|
|
108
|
-
if (!
|
|
109
|
-
return t.status(400).send(
|
|
110
|
-
`Query validation failed: ${
|
|
106
|
+
const e = r.body, s = await y(r), u = o.validateQuery(e);
|
|
107
|
+
if (!u.isValid)
|
|
108
|
+
return t.status(400).send(i(
|
|
109
|
+
`Query validation failed: ${u.errors.join(", ")}`,
|
|
111
110
|
400
|
|
112
111
|
));
|
|
113
|
-
const
|
|
114
|
-
if (!
|
|
115
|
-
return t.status(400).send(
|
|
112
|
+
const n = e.measures?.[0] || e.dimensions?.[0];
|
|
113
|
+
if (!n)
|
|
114
|
+
return t.status(400).send(i(
|
|
116
115
|
"No measures or dimensions specified",
|
|
117
116
|
400
|
|
118
117
|
));
|
|
119
|
-
const
|
|
120
|
-
return
|
|
121
|
-
} catch (
|
|
122
|
-
return r.log.error(
|
|
123
|
-
|
|
118
|
+
const c = n.split(".")[0], m = await o.generateSQL(c, e, s);
|
|
119
|
+
return h(e, m);
|
|
120
|
+
} catch (e) {
|
|
121
|
+
return r.log.error(e, "SQL generation error"), t.status(500).send(i(
|
|
122
|
+
e instanceof Error ? e.message : "SQL generation failed",
|
|
124
123
|
500
|
|
125
124
|
));
|
|
126
125
|
}
|
|
127
|
-
}),
|
|
126
|
+
}), a.get(`${d}/sql`, {
|
|
128
127
|
schema: {
|
|
129
128
|
querystring: {
|
|
130
129
|
type: "object",
|
|
@@ -135,30 +134,29 @@ const w = function(o, x, q) {
|
|
|
135
134
|
}
|
|
136
135
|
}
|
|
137
136
|
}, async (r, t) => {
|
|
138
|
-
var e, a;
|
|
139
137
|
try {
|
|
140
|
-
const { query:
|
|
141
|
-
if (!
|
|
142
|
-
return t.status(400).send(
|
|
143
|
-
`Query validation failed: ${
|
|
138
|
+
const { query: e } = r.query, s = JSON.parse(e), u = await y(r), n = o.validateQuery(s);
|
|
139
|
+
if (!n.isValid)
|
|
140
|
+
return t.status(400).send(i(
|
|
141
|
+
`Query validation failed: ${n.errors.join(", ")}`,
|
|
144
142
|
400
|
|
145
143
|
));
|
|
146
|
-
const
|
|
147
|
-
if (!
|
|
148
|
-
return t.status(400).send(
|
|
144
|
+
const c = s.measures?.[0] || s.dimensions?.[0];
|
|
145
|
+
if (!c)
|
|
146
|
+
return t.status(400).send(i(
|
|
149
147
|
"No measures or dimensions specified",
|
|
150
148
|
400
|
|
151
149
|
));
|
|
152
|
-
const
|
|
153
|
-
return
|
|
154
|
-
} catch (
|
|
155
|
-
return r.log.error(
|
|
156
|
-
|
|
150
|
+
const m = c.split(".")[0], E = await o.generateSQL(m, s, u);
|
|
151
|
+
return h(s, E);
|
|
152
|
+
} catch (e) {
|
|
153
|
+
return r.log.error(e, "SQL generation error"), t.status(500).send(i(
|
|
154
|
+
e instanceof Error ? e.message : "SQL generation failed",
|
|
157
155
|
500
|
|
158
156
|
));
|
|
159
157
|
}
|
|
160
|
-
}),
|
|
161
|
-
bodyLimit:
|
|
158
|
+
}), a.post(`${d}/dry-run`, {
|
|
159
|
+
bodyLimit: b,
|
|
162
160
|
schema: {
|
|
163
161
|
body: {
|
|
164
162
|
type: "object",
|
|
@@ -167,15 +165,15 @@ const w = function(o, x, q) {
|
|
|
167
165
|
}
|
|
168
166
|
}, async (r, t) => {
|
|
169
167
|
try {
|
|
170
|
-
const e = r.body,
|
|
171
|
-
return await
|
|
168
|
+
const e = r.body, s = e.query || e, u = await y(r);
|
|
169
|
+
return await v(s, u, o);
|
|
172
170
|
} catch (e) {
|
|
173
171
|
return r.log.error(e, "Dry-run error"), t.status(400).send({
|
|
174
172
|
error: e instanceof Error ? e.message : "Dry-run validation failed",
|
|
175
173
|
valid: !1
|
|
176
174
|
});
|
|
177
175
|
}
|
|
178
|
-
}),
|
|
176
|
+
}), a.get(`${d}/dry-run`, {
|
|
179
177
|
schema: {
|
|
180
178
|
querystring: {
|
|
181
179
|
type: "object",
|
|
@@ -187,27 +185,27 @@ const w = function(o, x, q) {
|
|
|
187
185
|
}
|
|
188
186
|
}, async (r, t) => {
|
|
189
187
|
try {
|
|
190
|
-
const { query: e } = r.query,
|
|
191
|
-
return await
|
|
188
|
+
const { query: e } = r.query, s = JSON.parse(e), u = await y(r);
|
|
189
|
+
return await v(s, u, o);
|
|
192
190
|
} catch (e) {
|
|
193
191
|
return r.log.error(e, "Dry-run error"), t.status(400).send({
|
|
194
192
|
error: e instanceof Error ? e.message : "Dry-run validation failed",
|
|
195
193
|
valid: !1
|
|
196
194
|
});
|
|
197
195
|
}
|
|
198
|
-
}),
|
|
196
|
+
}), a.setErrorHandler(async (r, t, e) => (t.log.error(r, "Fastify cube adapter error"), e.statusCode < 400 && e.status(500), i(r, e.statusCode))), p();
|
|
199
197
|
};
|
|
200
|
-
async function
|
|
201
|
-
await
|
|
198
|
+
async function L(l, a) {
|
|
199
|
+
await l.register(Q, a);
|
|
202
200
|
}
|
|
203
|
-
function
|
|
204
|
-
const
|
|
201
|
+
function R(l) {
|
|
202
|
+
const a = require("fastify")({
|
|
205
203
|
logger: !0
|
|
206
204
|
});
|
|
207
|
-
return
|
|
205
|
+
return a.register(Q, l), a;
|
|
208
206
|
}
|
|
209
207
|
export {
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
208
|
+
R as createCubeApp,
|
|
209
|
+
Q as cubePlugin,
|
|
210
|
+
L as registerCubeRoutes
|
|
213
211
|
};
|