@pd4castr/cli 1.1.0 → 1.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +4 -2
- package/dist/index.js +508 -288
- package/package.json +29 -47
package/dist/index.js
CHANGED
|
@@ -1,49 +1,6 @@
|
|
|
1
1
|
#!/usr/bin/env node
|
|
2
|
-
var
|
|
3
|
-
var
|
|
4
|
-
throw TypeError(msg);
|
|
5
|
-
};
|
|
6
|
-
var __using = (stack, value, async) => {
|
|
7
|
-
if (value != null) {
|
|
8
|
-
if (typeof value !== "object" && typeof value !== "function") __typeError("Object expected");
|
|
9
|
-
var dispose, inner;
|
|
10
|
-
if (async) dispose = value[__knownSymbol("asyncDispose")];
|
|
11
|
-
if (dispose === void 0) {
|
|
12
|
-
dispose = value[__knownSymbol("dispose")];
|
|
13
|
-
if (async) inner = dispose;
|
|
14
|
-
}
|
|
15
|
-
if (typeof dispose !== "function") __typeError("Object not disposable");
|
|
16
|
-
if (inner) dispose = function() {
|
|
17
|
-
try {
|
|
18
|
-
inner.call(this);
|
|
19
|
-
} catch (e) {
|
|
20
|
-
return Promise.reject(e);
|
|
21
|
-
}
|
|
22
|
-
};
|
|
23
|
-
stack.push([async, dispose, value]);
|
|
24
|
-
} else if (async) {
|
|
25
|
-
stack.push([async]);
|
|
26
|
-
}
|
|
27
|
-
return value;
|
|
28
|
-
};
|
|
29
|
-
var __callDispose = (stack, error, hasError) => {
|
|
30
|
-
var E = typeof SuppressedError === "function" ? SuppressedError : function(e, s, m, _) {
|
|
31
|
-
return _ = Error(m), _.name = "SuppressedError", _.error = e, _.suppressed = s, _;
|
|
32
|
-
};
|
|
33
|
-
var fail = (e) => error = hasError ? new E(e, error, "An error was suppressed during disposal") : (hasError = true, e);
|
|
34
|
-
var next = (it) => {
|
|
35
|
-
while (it = stack.pop()) {
|
|
36
|
-
try {
|
|
37
|
-
var result = it[1] && it[1].call(it[2]);
|
|
38
|
-
if (it[0]) return Promise.resolve(result).then(next, (e) => (fail(e), next()));
|
|
39
|
-
} catch (e) {
|
|
40
|
-
fail(e);
|
|
41
|
-
}
|
|
42
|
-
}
|
|
43
|
-
if (hasError) throw error;
|
|
44
|
-
};
|
|
45
|
-
return next();
|
|
46
|
-
};
|
|
2
|
+
var __defProp = Object.defineProperty;
|
|
3
|
+
var __name = (target, value) => __defProp(target, "name", { value, configurable: true });
|
|
47
4
|
|
|
48
5
|
// src/constants.ts
|
|
49
6
|
var AUTH0_DOMAIN = "pdview.au.auth0.com";
|
|
@@ -51,7 +8,7 @@ var AUTH0_CLIENT_ID = "Q5tQNF57cQlVXnVsqnU0hhgy92rVb03W";
|
|
|
51
8
|
var AUTH0_AUDIENCE = "https://api.pd4castr.com.au";
|
|
52
9
|
var GLOBAL_CONFIG_FILE = ".pd4castr";
|
|
53
10
|
var PROJECT_CONFIG_FILE = ".pd4castrrc.json";
|
|
54
|
-
var DEFAULT_API_URL = "https://
|
|
11
|
+
var DEFAULT_API_URL = "https://api.v2.pd4castr.com.au";
|
|
55
12
|
var DEFAULT_INPUT_SOURCE_ID = "0bdfd52b-efaa-455e-9a3b-1a6d2b879b73";
|
|
56
13
|
var TEST_INPUT_DATA_DIR = "test_input";
|
|
57
14
|
var TEST_OUTPUT_DATA_DIR = "test_output";
|
|
@@ -75,7 +32,23 @@ import { ZodError } from "zod";
|
|
|
75
32
|
|
|
76
33
|
// src/schemas/project-config-schema.ts
|
|
77
34
|
import { z } from "zod";
|
|
78
|
-
|
|
35
|
+
|
|
36
|
+
// src/utils/is-iana-timezone.ts
|
|
37
|
+
var timezones = /* @__PURE__ */ new Set([
|
|
38
|
+
...Intl.supportedValuesOf("timeZone"),
|
|
39
|
+
"UTC"
|
|
40
|
+
]);
|
|
41
|
+
function isIanaTimeZone(value) {
|
|
42
|
+
return typeof value === "string" && timezones.has(value);
|
|
43
|
+
}
|
|
44
|
+
__name(isIanaTimeZone, "isIanaTimeZone");
|
|
45
|
+
|
|
46
|
+
// src/schemas/project-config-schema.ts
|
|
47
|
+
var fileFormatSchema = z.enum([
|
|
48
|
+
"csv",
|
|
49
|
+
"json",
|
|
50
|
+
"parquet"
|
|
51
|
+
]);
|
|
79
52
|
var aemoDataFetcherSchema = z.object({
|
|
80
53
|
type: z.literal("AEMO_MMS"),
|
|
81
54
|
checkInterval: z.number().int().min(60),
|
|
@@ -84,29 +57,54 @@ var aemoDataFetcherSchema = z.object({
|
|
|
84
57
|
fetchQuery: z.string()
|
|
85
58
|
})
|
|
86
59
|
});
|
|
87
|
-
var dataFetcherSchema = z.discriminatedUnion("type", [
|
|
60
|
+
var dataFetcherSchema = z.discriminatedUnion("type", [
|
|
61
|
+
aemoDataFetcherSchema
|
|
62
|
+
]);
|
|
88
63
|
var modelInputSchema = z.object({
|
|
89
64
|
key: z.string(),
|
|
90
65
|
inputSource: z.string().optional().default(DEFAULT_INPUT_SOURCE_ID),
|
|
91
|
-
trigger: z.enum([
|
|
66
|
+
trigger: z.enum([
|
|
67
|
+
"WAIT_FOR_LATEST_FILE",
|
|
68
|
+
"USE_MOST_RECENT_FILE"
|
|
69
|
+
]),
|
|
92
70
|
uploadFileFormat: fileFormatSchema.optional().default("json"),
|
|
93
71
|
targetFileFormat: fileFormatSchema.optional().default("json"),
|
|
94
72
|
fetcher: dataFetcherSchema.optional().nullable()
|
|
95
73
|
});
|
|
96
74
|
var modelOutputSchema = z.object({
|
|
97
75
|
name: z.string(),
|
|
98
|
-
type: z.enum([
|
|
76
|
+
type: z.enum([
|
|
77
|
+
"float",
|
|
78
|
+
"integer",
|
|
79
|
+
"string",
|
|
80
|
+
"date",
|
|
81
|
+
"boolean",
|
|
82
|
+
"unknown"
|
|
83
|
+
]),
|
|
99
84
|
seriesKey: z.boolean(),
|
|
100
85
|
colour: z.string().regex(/^#[0-9A-Fa-f]{6}$/).optional()
|
|
101
86
|
});
|
|
87
|
+
var sensitivitySchema = z.object({
|
|
88
|
+
name: z.string(),
|
|
89
|
+
query: z.string()
|
|
90
|
+
});
|
|
102
91
|
var CONFIG_WARNING_KEY = "// WARNING: DO NOT MODIFY THESE SYSTEM MANAGED VALUES";
|
|
103
92
|
var projectConfigSchema = z.object({
|
|
104
93
|
name: z.string(),
|
|
105
|
-
forecastVariable: z.enum([
|
|
106
|
-
|
|
94
|
+
forecastVariable: z.enum([
|
|
95
|
+
"price"
|
|
96
|
+
]),
|
|
97
|
+
timeHorizon: z.enum([
|
|
98
|
+
"actual",
|
|
99
|
+
"day_ahead",
|
|
100
|
+
"week_ahead",
|
|
101
|
+
"quarterly"
|
|
102
|
+
]),
|
|
103
|
+
displayTimezone: z.string().refine(isIanaTimeZone, "invalid IANA time zone").optional().default("Australia/Brisbane"),
|
|
107
104
|
metadata: z.record(z.string(), z.any()).optional(),
|
|
108
105
|
inputs: z.array(modelInputSchema),
|
|
109
106
|
outputs: z.array(modelOutputSchema),
|
|
107
|
+
sensitivities: z.array(sensitivitySchema).optional().default([]),
|
|
110
108
|
[CONFIG_WARNING_KEY]: z.string().optional().default(""),
|
|
111
109
|
$$id: z.string().nullable().optional().default(null),
|
|
112
110
|
$$modelGroupID: z.string().nullable().optional().default(null),
|
|
@@ -124,6 +122,7 @@ async function isExistingPath(path15) {
|
|
|
124
122
|
return false;
|
|
125
123
|
}
|
|
126
124
|
}
|
|
125
|
+
__name(isExistingPath, "isExistingPath");
|
|
127
126
|
|
|
128
127
|
// src/config/load-project-context.ts
|
|
129
128
|
async function loadProjectContext(configPath) {
|
|
@@ -131,9 +130,7 @@ async function loadProjectContext(configPath) {
|
|
|
131
130
|
const resolvedConfigPath = configPath ? path.resolve(configPath) : path.join(projectRoot, PROJECT_CONFIG_FILE);
|
|
132
131
|
const configExists = await isExistingPath(resolvedConfigPath);
|
|
133
132
|
if (!configExists) {
|
|
134
|
-
throw new Error(
|
|
135
|
-
`No config found at ${resolvedConfigPath} (docs: https://github.com/pipelabs/pd4castr-model-examples/blob/main/docs/005-config.md).`
|
|
136
|
-
);
|
|
133
|
+
throw new Error(`No config found at ${resolvedConfigPath} (docs: https://github.com/pipelabs/pd4castr-model-examples/blob/main/docs/005-config.md).`);
|
|
137
134
|
}
|
|
138
135
|
try {
|
|
139
136
|
const configFileContents = await fs2.readFile(resolvedConfigPath, "utf8");
|
|
@@ -147,11 +144,10 @@ async function loadProjectContext(configPath) {
|
|
|
147
144
|
if (error instanceof ZodError) {
|
|
148
145
|
throw error;
|
|
149
146
|
}
|
|
150
|
-
throw new Error(
|
|
151
|
-
"Failed to parse project config (docs: https://github.com/pipelabs/pd4castr-model-examples/blob/main/docs/005-config.md)."
|
|
152
|
-
);
|
|
147
|
+
throw new Error("Failed to parse project config (docs: https://github.com/pipelabs/pd4castr-model-examples/blob/main/docs/005-config.md).");
|
|
153
148
|
}
|
|
154
149
|
}
|
|
150
|
+
__name(loadProjectContext, "loadProjectContext");
|
|
155
151
|
|
|
156
152
|
// src/utils/create-link.ts
|
|
157
153
|
var ESC = "\x1B";
|
|
@@ -162,11 +158,13 @@ function createLink(text, url) {
|
|
|
162
158
|
const end = `${OSC}8${SEP}${SEP}${ESC}\\`;
|
|
163
159
|
return `${start}${text}${end}`;
|
|
164
160
|
}
|
|
161
|
+
__name(createLink, "createLink");
|
|
165
162
|
|
|
166
163
|
// src/utils/format-nest-error-message.ts
|
|
167
164
|
function formatNestErrorMessage(error) {
|
|
168
165
|
return `[${error.error?.toUpperCase() ?? "UNKNOWN"}] ${error.message}`;
|
|
169
166
|
}
|
|
167
|
+
__name(formatNestErrorMessage, "formatNestErrorMessage");
|
|
170
168
|
|
|
171
169
|
// src/utils/get-auth.ts
|
|
172
170
|
import invariant from "tiny-invariant";
|
|
@@ -199,18 +197,21 @@ async function loadGlobalConfig() {
|
|
|
199
197
|
return getDefaultConfig();
|
|
200
198
|
}
|
|
201
199
|
}
|
|
200
|
+
__name(loadGlobalConfig, "loadGlobalConfig");
|
|
202
201
|
function getDefaultConfig() {
|
|
203
202
|
return {
|
|
204
203
|
accessToken: null,
|
|
205
204
|
accessTokenExpiresAt: null
|
|
206
205
|
};
|
|
207
206
|
}
|
|
207
|
+
__name(getDefaultConfig, "getDefaultConfig");
|
|
208
208
|
|
|
209
209
|
// src/utils/is-authed.ts
|
|
210
210
|
function isAuthed(config) {
|
|
211
211
|
const isTokenExpired = config.accessTokenExpiresAt && config.accessTokenExpiresAt <= Date.now();
|
|
212
212
|
return Boolean(config.accessToken) && !isTokenExpired;
|
|
213
213
|
}
|
|
214
|
+
__name(isAuthed, "isAuthed");
|
|
214
215
|
|
|
215
216
|
// src/utils/get-auth.ts
|
|
216
217
|
async function getAuth() {
|
|
@@ -225,6 +226,7 @@ async function getAuth() {
|
|
|
225
226
|
expiresAt: config.accessTokenExpiresAt
|
|
226
227
|
};
|
|
227
228
|
}
|
|
229
|
+
__name(getAuth, "getAuth");
|
|
228
230
|
|
|
229
231
|
// src/utils/log-zod-issues.ts
|
|
230
232
|
function logZodIssues(error) {
|
|
@@ -232,6 +234,7 @@ function logZodIssues(error) {
|
|
|
232
234
|
console.log(` \u2718 ${issue.path.join(".")} - ${issue.message}`);
|
|
233
235
|
}
|
|
234
236
|
}
|
|
237
|
+
__name(logZodIssues, "logZodIssues");
|
|
235
238
|
|
|
236
239
|
// src/commands/fetch/utils/fetch-aemo-data.ts
|
|
237
240
|
import fs4 from "fs/promises";
|
|
@@ -246,9 +249,7 @@ var envSchema = z3.object({
|
|
|
246
249
|
// wsl sets this environment variable on all distros that i've checked
|
|
247
250
|
isWSL: z3.boolean().default(() => Boolean(process.env.WSL_DISTRO_NAME)),
|
|
248
251
|
apiURL: z3.string().default(() => process.env.PD4CASTR_API_URL ?? DEFAULT_API_URL),
|
|
249
|
-
wslNetworkInterface: z3.string().default(
|
|
250
|
-
() => process.env.PD4CASTR_WSL_NETWORK_INTERFACE ?? WSL_NETWORK_INTERFACE_DEFAULT
|
|
251
|
-
),
|
|
252
|
+
wslNetworkInterface: z3.string().default(() => process.env.PD4CASTR_WSL_NETWORK_INTERFACE ?? WSL_NETWORK_INTERFACE_DEFAULT),
|
|
252
253
|
auth0ClientId: z3.string().default(() => process.env.PD4CASTR_AUTH0_CLIENT_ID ?? AUTH0_CLIENT_ID),
|
|
253
254
|
auth0Audience: z3.string().default(() => process.env.PD4CASTR_AUTH0_AUDIENCE ?? AUTH0_AUDIENCE)
|
|
254
255
|
});
|
|
@@ -257,6 +258,7 @@ var envSchema = z3.object({
|
|
|
257
258
|
function getEnv() {
|
|
258
259
|
return envSchema.parse(process.env);
|
|
259
260
|
}
|
|
261
|
+
__name(getEnv, "getEnv");
|
|
260
262
|
|
|
261
263
|
// src/api/api.ts
|
|
262
264
|
var api = ky.create({
|
|
@@ -265,22 +267,29 @@ var api = ky.create({
|
|
|
265
267
|
|
|
266
268
|
// src/api/query-data-fetcher.ts
|
|
267
269
|
async function queryDataFetcher(querySQL, authCtx) {
|
|
268
|
-
const headers = {
|
|
269
|
-
|
|
270
|
-
|
|
270
|
+
const headers = {
|
|
271
|
+
Authorization: `Bearer ${authCtx.accessToken}`
|
|
272
|
+
};
|
|
273
|
+
const payload = {
|
|
274
|
+
query: querySQL,
|
|
275
|
+
type: "AEMO_MMS"
|
|
276
|
+
};
|
|
277
|
+
const result = await api.post("data-fetcher/query", {
|
|
278
|
+
json: payload,
|
|
279
|
+
headers
|
|
280
|
+
}).json();
|
|
271
281
|
return result;
|
|
272
282
|
}
|
|
283
|
+
__name(queryDataFetcher, "queryDataFetcher");
|
|
273
284
|
|
|
274
285
|
// src/commands/fetch/utils/fetch-aemo-data.ts
|
|
275
286
|
async function fetchAEMOData(dataFetcher, authCtx, ctx) {
|
|
276
|
-
const queryPath = path3.resolve(
|
|
277
|
-
ctx.projectRoot,
|
|
278
|
-
dataFetcher.config.fetchQuery
|
|
279
|
-
);
|
|
287
|
+
const queryPath = path3.resolve(ctx.projectRoot, dataFetcher.config.fetchQuery);
|
|
280
288
|
const querySQL = await fs4.readFile(queryPath, "utf8");
|
|
281
289
|
const result = await queryDataFetcher(querySQL, authCtx);
|
|
282
290
|
return result;
|
|
283
291
|
}
|
|
292
|
+
__name(fetchAEMOData, "fetchAEMOData");
|
|
284
293
|
|
|
285
294
|
// src/commands/fetch/utils/get-fetcher.ts
|
|
286
295
|
var DATA_FETCHER_FNS = {
|
|
@@ -293,6 +302,7 @@ function getFetcher(type) {
|
|
|
293
302
|
}
|
|
294
303
|
return fetcher;
|
|
295
304
|
}
|
|
305
|
+
__name(getFetcher, "getFetcher");
|
|
296
306
|
|
|
297
307
|
// src/commands/fetch/utils/write-test-data.ts
|
|
298
308
|
import fs5 from "fs/promises";
|
|
@@ -302,18 +312,24 @@ import path4 from "path";
|
|
|
302
312
|
function getInputFilename(modelInput) {
|
|
303
313
|
return `${modelInput.key}.${modelInput.targetFileFormat}`;
|
|
304
314
|
}
|
|
315
|
+
__name(getInputFilename, "getInputFilename");
|
|
305
316
|
|
|
306
317
|
// src/commands/fetch/utils/write-test-data.ts
|
|
307
318
|
async function writeTestData(inputData, modelInput, inputDataDir, ctx) {
|
|
308
319
|
const inputDir = path4.resolve(ctx.projectRoot, inputDataDir);
|
|
309
|
-
await fs5.mkdir(inputDir, {
|
|
320
|
+
await fs5.mkdir(inputDir, {
|
|
321
|
+
recursive: true
|
|
322
|
+
});
|
|
310
323
|
const inputFilename = getInputFilename(modelInput);
|
|
311
324
|
const inputPath = path4.resolve(inputDir, inputFilename);
|
|
312
325
|
await fs5.writeFile(inputPath, JSON.stringify(inputData, void 0, 2));
|
|
313
326
|
}
|
|
327
|
+
__name(writeTestData, "writeTestData");
|
|
314
328
|
|
|
315
329
|
// src/commands/fetch/handle-action.ts
|
|
316
|
-
var FETCHABLE_DATA_FETCHER_TYPES = /* @__PURE__ */ new Set([
|
|
330
|
+
var FETCHABLE_DATA_FETCHER_TYPES = /* @__PURE__ */ new Set([
|
|
331
|
+
"AEMO_MMS"
|
|
332
|
+
]);
|
|
317
333
|
async function handleAction(options) {
|
|
318
334
|
const spinner = ora("Starting data fetch...").start();
|
|
319
335
|
try {
|
|
@@ -330,9 +346,7 @@ async function handleAction(options) {
|
|
|
330
346
|
continue;
|
|
331
347
|
}
|
|
332
348
|
if (!FETCHABLE_DATA_FETCHER_TYPES.has(input2.fetcher.type)) {
|
|
333
|
-
spinner.warn(
|
|
334
|
-
`\`${input2.key}\` (${input2.fetcher.type}) - unsupported, skipping`
|
|
335
|
-
);
|
|
349
|
+
spinner.warn(`\`${input2.key}\` (${input2.fetcher.type}) - unsupported, skipping`);
|
|
336
350
|
continue;
|
|
337
351
|
}
|
|
338
352
|
spinner.start(`\`${input2.key}\` (${input2.fetcher.type}) - fetching...`);
|
|
@@ -364,15 +378,13 @@ ${link} to view fetched data
|
|
|
364
378
|
process.exit(1);
|
|
365
379
|
}
|
|
366
380
|
}
|
|
381
|
+
__name(handleAction, "handleAction");
|
|
367
382
|
|
|
368
383
|
// src/commands/fetch/index.ts
|
|
369
384
|
function registerFetchCommand(program2) {
|
|
370
|
-
program2.command("fetch").description("Fetches test data from configured data fetchers.").option(
|
|
371
|
-
"-i, --input-dir <path>",
|
|
372
|
-
"The input test data directory",
|
|
373
|
-
TEST_INPUT_DATA_DIR
|
|
374
|
-
).option("-c, --config <path>", "Path to config file", PROJECT_CONFIG_FILE).action(handleAction);
|
|
385
|
+
program2.command("fetch").description("Fetches test data from configured data fetchers.").option("-i, --input-dir <path>", "The input test data directory", TEST_INPUT_DATA_DIR).option("-c, --config <path>", "Path to config file", PROJECT_CONFIG_FILE).action(handleAction);
|
|
375
386
|
}
|
|
387
|
+
__name(registerFetchCommand, "registerFetchCommand");
|
|
376
388
|
|
|
377
389
|
// src/commands/init/handle-action.ts
|
|
378
390
|
import path6 from "path";
|
|
@@ -399,6 +411,7 @@ var templates = {
|
|
|
399
411
|
function getTemplatePath(template) {
|
|
400
412
|
return `https://github.com/${template.repo}/${template.path}`;
|
|
401
413
|
}
|
|
414
|
+
__name(getTemplatePath, "getTemplatePath");
|
|
402
415
|
|
|
403
416
|
// src/commands/init/utils/validate-name.ts
|
|
404
417
|
async function validateName(value) {
|
|
@@ -408,6 +421,7 @@ async function validateName(value) {
|
|
|
408
421
|
}
|
|
409
422
|
return true;
|
|
410
423
|
}
|
|
424
|
+
__name(validateName, "validateName");
|
|
411
425
|
|
|
412
426
|
// src/commands/init/handle-action.ts
|
|
413
427
|
async function handleAction2() {
|
|
@@ -437,6 +451,7 @@ async function handleAction2() {
|
|
|
437
451
|
process.exit(1);
|
|
438
452
|
}
|
|
439
453
|
}
|
|
454
|
+
__name(handleAction2, "handleAction");
|
|
440
455
|
async function fetchTemplate(template, projectName) {
|
|
441
456
|
const templatePath = getTemplatePath(templates[template]);
|
|
442
457
|
const fetcher = tiged(templatePath, {
|
|
@@ -446,11 +461,13 @@ async function fetchTemplate(template, projectName) {
|
|
|
446
461
|
const destination = path6.join(process.cwd(), projectName);
|
|
447
462
|
await fetcher.clone(destination);
|
|
448
463
|
}
|
|
464
|
+
__name(fetchTemplate, "fetchTemplate");
|
|
449
465
|
|
|
450
466
|
// src/commands/init/index.ts
|
|
451
467
|
function registerInitCommand(program2) {
|
|
452
468
|
program2.command("init").description("Initialize a new model using a template.").action(handleAction2);
|
|
453
469
|
}
|
|
470
|
+
__name(registerInitCommand, "registerInitCommand");
|
|
454
471
|
|
|
455
472
|
// src/commands/login/handle-action.ts
|
|
456
473
|
import { ExecaError as ExecaError3 } from "execa";
|
|
@@ -468,16 +485,22 @@ async function updateGlobalConfig(updateFn) {
|
|
|
468
485
|
const configPath = path7.join(os2.homedir(), GLOBAL_CONFIG_FILE);
|
|
469
486
|
await fs6.writeFile(configPath, JSON.stringify(updatedConfig, void 0, 2));
|
|
470
487
|
}
|
|
488
|
+
__name(updateGlobalConfig, "updateGlobalConfig");
|
|
471
489
|
|
|
472
490
|
// src/commands/login/utils/complete-auth-flow.ts
|
|
473
491
|
import { HTTPError as HTTPError2 } from "ky";
|
|
474
492
|
|
|
475
493
|
// src/commands/login/auth0-api.ts
|
|
476
494
|
import ky2 from "ky";
|
|
477
|
-
var auth0API = ky2.create({
|
|
495
|
+
var auth0API = ky2.create({
|
|
496
|
+
prefixUrl: `https://${AUTH0_DOMAIN}`
|
|
497
|
+
});
|
|
478
498
|
|
|
479
499
|
// src/commands/login/utils/complete-auth-flow.ts
|
|
480
|
-
var FAILED_AUTH_ERRORS = /* @__PURE__ */ new Set([
|
|
500
|
+
var FAILED_AUTH_ERRORS = /* @__PURE__ */ new Set([
|
|
501
|
+
"expired_token",
|
|
502
|
+
"access_denied"
|
|
503
|
+
]);
|
|
481
504
|
async function completeAuthFlow(authCtx) {
|
|
482
505
|
const env = getEnv();
|
|
483
506
|
const payload = {
|
|
@@ -487,7 +510,9 @@ async function completeAuthFlow(authCtx) {
|
|
|
487
510
|
};
|
|
488
511
|
async function fetchAuthResponse() {
|
|
489
512
|
try {
|
|
490
|
-
const response = await auth0API.post("oauth/token", {
|
|
513
|
+
const response = await auth0API.post("oauth/token", {
|
|
514
|
+
json: payload
|
|
515
|
+
}).json();
|
|
491
516
|
const authPayload = {
|
|
492
517
|
accessToken: response.access_token,
|
|
493
518
|
expiresAt: Date.now() + response.expires_in * 1e3
|
|
@@ -500,18 +525,16 @@ async function completeAuthFlow(authCtx) {
|
|
|
500
525
|
const errorResponse = await error.response.json();
|
|
501
526
|
const isFailedAuthError = FAILED_AUTH_ERRORS.has(errorResponse.error);
|
|
502
527
|
if (isFailedAuthError) {
|
|
503
|
-
throw new Error(
|
|
504
|
-
`Login failed, please try again (${errorResponse.error_description}).`
|
|
505
|
-
);
|
|
528
|
+
throw new Error(`Login failed, please try again (${errorResponse.error_description}).`);
|
|
506
529
|
}
|
|
507
530
|
const delay = authCtx.checkInterval * 1e3;
|
|
508
|
-
return new Promise(
|
|
509
|
-
(resolve) => setTimeout(() => resolve(fetchAuthResponse()), delay)
|
|
510
|
-
);
|
|
531
|
+
return new Promise((resolve) => setTimeout(() => resolve(fetchAuthResponse()), delay));
|
|
511
532
|
}
|
|
512
533
|
}
|
|
534
|
+
__name(fetchAuthResponse, "fetchAuthResponse");
|
|
513
535
|
return fetchAuthResponse();
|
|
514
536
|
}
|
|
537
|
+
__name(completeAuthFlow, "completeAuthFlow");
|
|
515
538
|
|
|
516
539
|
// src/commands/login/utils/start-auth-flow.ts
|
|
517
540
|
async function startAuthFlow() {
|
|
@@ -521,7 +544,9 @@ async function startAuthFlow() {
|
|
|
521
544
|
audience: env.auth0Audience,
|
|
522
545
|
scope: "openid email"
|
|
523
546
|
};
|
|
524
|
-
const codeResponse = await auth0API.post("oauth/device/code", {
|
|
547
|
+
const codeResponse = await auth0API.post("oauth/device/code", {
|
|
548
|
+
json: payload
|
|
549
|
+
}).json();
|
|
525
550
|
const authContext = {
|
|
526
551
|
deviceCode: codeResponse.device_code,
|
|
527
552
|
verificationURL: codeResponse.verification_uri_complete,
|
|
@@ -530,6 +555,7 @@ async function startAuthFlow() {
|
|
|
530
555
|
};
|
|
531
556
|
return authContext;
|
|
532
557
|
}
|
|
558
|
+
__name(startAuthFlow, "startAuthFlow");
|
|
533
559
|
|
|
534
560
|
// src/commands/login/handle-action.ts
|
|
535
561
|
async function handleAction3() {
|
|
@@ -541,10 +567,8 @@ async function handleAction3() {
|
|
|
541
567
|
return;
|
|
542
568
|
}
|
|
543
569
|
const authCtx = await startAuthFlow();
|
|
544
|
-
spinner.info(
|
|
545
|
-
|
|
546
|
-
${authCtx.verificationURL}`
|
|
547
|
-
);
|
|
570
|
+
spinner.info(`Please open the login link in your browser:
|
|
571
|
+
${authCtx.verificationURL}`);
|
|
548
572
|
spinner.info(`Your login code is:
|
|
549
573
|
${authCtx.userCode}
|
|
550
574
|
`);
|
|
@@ -570,11 +594,13 @@ async function handleAction3() {
|
|
|
570
594
|
process.exit(1);
|
|
571
595
|
}
|
|
572
596
|
}
|
|
597
|
+
__name(handleAction3, "handleAction");
|
|
573
598
|
|
|
574
599
|
// src/commands/login/index.ts
|
|
575
600
|
function registerLoginCommand(program2) {
|
|
576
601
|
program2.command("login").description("Logs in to the pd4castr API.").action(handleAction3);
|
|
577
602
|
}
|
|
603
|
+
__name(registerLoginCommand, "registerLoginCommand");
|
|
578
604
|
|
|
579
605
|
// src/commands/logout/handle-action.ts
|
|
580
606
|
import { ExecaError as ExecaError4 } from "execa";
|
|
@@ -608,11 +634,13 @@ async function handleAction4() {
|
|
|
608
634
|
process.exit(1);
|
|
609
635
|
}
|
|
610
636
|
}
|
|
637
|
+
__name(handleAction4, "handleAction");
|
|
611
638
|
|
|
612
639
|
// src/commands/logout/index.ts
|
|
613
640
|
function registerLogoutCommand(program2) {
|
|
614
641
|
program2.command("logout").description("Logs out of the pd4castr API.").action(handleAction4);
|
|
615
642
|
}
|
|
643
|
+
__name(registerLogoutCommand, "registerLogoutCommand");
|
|
616
644
|
|
|
617
645
|
// src/commands/publish/handle-action.ts
|
|
618
646
|
import { ExecaError as ExecaError5 } from "execa";
|
|
@@ -632,6 +660,7 @@ async function startWebServer(app, port) {
|
|
|
632
660
|
});
|
|
633
661
|
});
|
|
634
662
|
}
|
|
663
|
+
__name(startWebServer, "startWebServer");
|
|
635
664
|
|
|
636
665
|
// src/commands/publish/handle-create-model-flow.ts
|
|
637
666
|
import * as inquirer2 from "@inquirer/prompts";
|
|
@@ -639,25 +668,42 @@ import chalk3 from "chalk";
|
|
|
639
668
|
|
|
640
669
|
// src/api/create-model.ts
|
|
641
670
|
async function createModel(config, authCtx) {
|
|
642
|
-
const headers = {
|
|
643
|
-
|
|
671
|
+
const headers = {
|
|
672
|
+
Authorization: `Bearer ${authCtx.accessToken}`
|
|
673
|
+
};
|
|
674
|
+
const result = await api.post("model", {
|
|
675
|
+
headers,
|
|
676
|
+
json: config
|
|
677
|
+
}).json();
|
|
644
678
|
return result;
|
|
645
679
|
}
|
|
680
|
+
__name(createModel, "createModel");
|
|
646
681
|
|
|
647
682
|
// src/api/get-registry-push-credentials.ts
|
|
648
683
|
async function getRegistryPushCredentials(modelID, authCtx) {
|
|
649
|
-
const headers = {
|
|
684
|
+
const headers = {
|
|
685
|
+
Authorization: `Bearer ${authCtx.accessToken}`
|
|
686
|
+
};
|
|
650
687
|
const searchParams = new URLSearchParams(`modelId=${modelID}`);
|
|
651
|
-
const result = await api.get("registry/push-credentials", {
|
|
688
|
+
const result = await api.get("registry/push-credentials", {
|
|
689
|
+
headers,
|
|
690
|
+
searchParams
|
|
691
|
+
}).json();
|
|
652
692
|
return result;
|
|
653
693
|
}
|
|
694
|
+
__name(getRegistryPushCredentials, "getRegistryPushCredentials");
|
|
654
695
|
|
|
655
696
|
// src/api/trigger-model-run.ts
|
|
656
697
|
async function triggerModelRun(modelId, authCtx) {
|
|
657
|
-
const headers = {
|
|
658
|
-
|
|
698
|
+
const headers = {
|
|
699
|
+
Authorization: `Bearer ${authCtx.accessToken}`
|
|
700
|
+
};
|
|
701
|
+
const result = await api.post(`model/${modelId}/trigger`, {
|
|
702
|
+
headers
|
|
703
|
+
}).json();
|
|
659
704
|
return result;
|
|
660
705
|
}
|
|
706
|
+
__name(triggerModelRun, "triggerModelRun");
|
|
661
707
|
|
|
662
708
|
// src/config/update-project-config.ts
|
|
663
709
|
import fs7 from "fs/promises";
|
|
@@ -667,73 +713,91 @@ async function updateProjectConfig(updateFn, configPath) {
|
|
|
667
713
|
const projectConfig = await loadProjectContext(configPath);
|
|
668
714
|
const updatedConfig = produce2(projectConfig.config, updateFn);
|
|
669
715
|
const resolvedConfigPath = configPath ? path8.resolve(configPath) : path8.join(projectConfig.projectRoot, PROJECT_CONFIG_FILE);
|
|
670
|
-
await fs7.writeFile(
|
|
671
|
-
resolvedConfigPath,
|
|
672
|
-
JSON.stringify(updatedConfig, void 0, 2)
|
|
673
|
-
);
|
|
716
|
+
await fs7.writeFile(resolvedConfigPath, JSON.stringify(updatedConfig, void 0, 2));
|
|
674
717
|
}
|
|
718
|
+
__name(updateProjectConfig, "updateProjectConfig");
|
|
675
719
|
|
|
676
720
|
// src/docker/build-docker-image.ts
|
|
677
721
|
import { execa } from "execa";
|
|
678
722
|
async function buildDockerImage(dockerImage, ctx) {
|
|
679
723
|
try {
|
|
680
|
-
await execa(
|
|
681
|
-
"
|
|
682
|
-
|
|
683
|
-
|
|
684
|
-
|
|
685
|
-
|
|
686
|
-
|
|
687
|
-
|
|
724
|
+
await execa("docker", [
|
|
725
|
+
"build",
|
|
726
|
+
"--platform=linux/amd64",
|
|
727
|
+
"-t",
|
|
728
|
+
dockerImage,
|
|
729
|
+
"."
|
|
730
|
+
], {
|
|
731
|
+
cwd: ctx.projectRoot,
|
|
732
|
+
stdio: "pipe"
|
|
733
|
+
});
|
|
688
734
|
} catch (error) {
|
|
689
|
-
throw new Error("Failed to build docker image", {
|
|
735
|
+
throw new Error("Failed to build docker image", {
|
|
736
|
+
cause: error
|
|
737
|
+
});
|
|
690
738
|
}
|
|
691
739
|
}
|
|
740
|
+
__name(buildDockerImage, "buildDockerImage");
|
|
692
741
|
|
|
693
742
|
// src/docker/login-to-docker-registry.ts
|
|
694
743
|
import { execa as execa2 } from "execa";
|
|
695
744
|
async function loginToDockerRegistry(authConfig) {
|
|
696
745
|
try {
|
|
697
|
-
await execa2(
|
|
698
|
-
"
|
|
699
|
-
|
|
700
|
-
|
|
701
|
-
|
|
702
|
-
|
|
703
|
-
|
|
704
|
-
|
|
705
|
-
|
|
706
|
-
{ input: authConfig.password }
|
|
707
|
-
);
|
|
746
|
+
await execa2("docker", [
|
|
747
|
+
"login",
|
|
748
|
+
authConfig.registry,
|
|
749
|
+
"--username",
|
|
750
|
+
authConfig.username,
|
|
751
|
+
"--password-stdin"
|
|
752
|
+
], {
|
|
753
|
+
input: authConfig.password
|
|
754
|
+
});
|
|
708
755
|
} catch (error) {
|
|
709
|
-
throw new Error("Failed to login to docker registry", {
|
|
756
|
+
throw new Error("Failed to login to docker registry", {
|
|
757
|
+
cause: error
|
|
758
|
+
});
|
|
710
759
|
}
|
|
711
760
|
}
|
|
761
|
+
__name(loginToDockerRegistry, "loginToDockerRegistry");
|
|
712
762
|
|
|
713
763
|
// src/docker/push-docker-image.ts
|
|
714
764
|
import { execa as execa3 } from "execa";
|
|
715
765
|
async function pushDockerImage(dockerImage, pushRef) {
|
|
716
766
|
try {
|
|
717
|
-
await execa3("docker", [
|
|
718
|
-
|
|
767
|
+
await execa3("docker", [
|
|
768
|
+
"tag",
|
|
769
|
+
dockerImage,
|
|
770
|
+
pushRef
|
|
771
|
+
]);
|
|
772
|
+
await execa3("docker", [
|
|
773
|
+
"push",
|
|
774
|
+
pushRef
|
|
775
|
+
]);
|
|
719
776
|
} catch (error) {
|
|
720
|
-
throw new Error("Failed to push docker image", {
|
|
777
|
+
throw new Error("Failed to push docker image", {
|
|
778
|
+
cause: error
|
|
779
|
+
});
|
|
721
780
|
}
|
|
722
781
|
}
|
|
782
|
+
__name(pushDockerImage, "pushDockerImage");
|
|
723
783
|
|
|
724
784
|
// src/utils/get-docker-image.ts
|
|
725
785
|
import slugify from "slugify";
|
|
726
786
|
function getDockerImage(ctx) {
|
|
727
|
-
const sluggedName = slugify(ctx.config.name, {
|
|
787
|
+
const sluggedName = slugify(ctx.config.name, {
|
|
788
|
+
lower: true
|
|
789
|
+
});
|
|
728
790
|
const dockerImage = `pd4castr/${sluggedName}-local:${Date.now()}`;
|
|
729
791
|
return dockerImage;
|
|
730
792
|
}
|
|
793
|
+
__name(getDockerImage, "getDockerImage");
|
|
731
794
|
|
|
732
795
|
// src/utils/get-model-config-from-project-config.ts
|
|
733
796
|
import fs8 from "fs/promises";
|
|
734
797
|
import path9 from "path";
|
|
735
798
|
async function getModelConfigFromProjectConfig(ctx) {
|
|
736
799
|
const inputs = await getInputsWithInlinedSQL(ctx);
|
|
800
|
+
const sensitivities = await getSensitivitiesWithInlinedSQL(ctx);
|
|
737
801
|
const { $$id, $$modelGroupID, $$revision, $$dockerImage, ...config } = ctx.config;
|
|
738
802
|
return {
|
|
739
803
|
...config,
|
|
@@ -741,10 +805,14 @@ async function getModelConfigFromProjectConfig(ctx) {
|
|
|
741
805
|
modelGroupId: $$modelGroupID,
|
|
742
806
|
revision: $$revision ?? 0,
|
|
743
807
|
dockerImage: $$dockerImage,
|
|
744
|
-
inputs
|
|
808
|
+
inputs,
|
|
809
|
+
sensitivities
|
|
745
810
|
};
|
|
746
811
|
}
|
|
747
|
-
|
|
812
|
+
__name(getModelConfigFromProjectConfig, "getModelConfigFromProjectConfig");
|
|
813
|
+
var FETCHERS_WITH_SQL = /* @__PURE__ */ new Set([
|
|
814
|
+
"AEMO_MMS"
|
|
815
|
+
]);
|
|
748
816
|
async function getInputsWithInlinedSQL(ctx) {
|
|
749
817
|
const inputsWithSQL = [];
|
|
750
818
|
for (const input2 of ctx.config.inputs) {
|
|
@@ -752,14 +820,8 @@ async function getInputsWithInlinedSQL(ctx) {
|
|
|
752
820
|
inputsWithSQL.push(input2);
|
|
753
821
|
continue;
|
|
754
822
|
}
|
|
755
|
-
const fetchQueryPath = path9.resolve(
|
|
756
|
-
|
|
757
|
-
input2.fetcher.config.fetchQuery
|
|
758
|
-
);
|
|
759
|
-
const checkQueryPath = path9.resolve(
|
|
760
|
-
ctx.projectRoot,
|
|
761
|
-
input2.fetcher.config.checkQuery
|
|
762
|
-
);
|
|
823
|
+
const fetchQueryPath = path9.resolve(ctx.projectRoot, input2.fetcher.config.fetchQuery);
|
|
824
|
+
const checkQueryPath = path9.resolve(ctx.projectRoot, input2.fetcher.config.checkQuery);
|
|
763
825
|
const [fetchQuerySQL, checkQuerySQL] = await Promise.all([
|
|
764
826
|
fs8.readFile(fetchQueryPath, "utf8"),
|
|
765
827
|
fs8.readFile(checkQueryPath, "utf8")
|
|
@@ -779,11 +841,31 @@ async function getInputsWithInlinedSQL(ctx) {
|
|
|
779
841
|
}
|
|
780
842
|
return inputsWithSQL;
|
|
781
843
|
}
|
|
844
|
+
__name(getInputsWithInlinedSQL, "getInputsWithInlinedSQL");
|
|
845
|
+
async function getSensitivitiesWithInlinedSQL(ctx) {
|
|
846
|
+
const sensitivitiesWithSQL = [];
|
|
847
|
+
const sensitivities = ctx.config.sensitivities ?? [];
|
|
848
|
+
for (const sensitivity of sensitivities) {
|
|
849
|
+
const queryPath = path9.resolve(ctx.projectRoot, sensitivity.query);
|
|
850
|
+
try {
|
|
851
|
+
const sql = await fs8.readFile(queryPath, "utf8");
|
|
852
|
+
sensitivitiesWithSQL.push({
|
|
853
|
+
...sensitivity,
|
|
854
|
+
query: sql
|
|
855
|
+
});
|
|
856
|
+
} catch {
|
|
857
|
+
throw new Error(`Sensitivity query file not found (${sensitivity.query})`);
|
|
858
|
+
}
|
|
859
|
+
}
|
|
860
|
+
return sensitivitiesWithSQL;
|
|
861
|
+
}
|
|
862
|
+
__name(getSensitivitiesWithInlinedSQL, "getSensitivitiesWithInlinedSQL");
|
|
782
863
|
|
|
783
864
|
// src/utils/log-empty-line.ts
|
|
784
865
|
function logEmptyLine() {
|
|
785
866
|
console.log("");
|
|
786
867
|
}
|
|
868
|
+
__name(logEmptyLine, "logEmptyLine");
|
|
787
869
|
|
|
788
870
|
// src/commands/publish/constants.ts
|
|
789
871
|
import chalk from "chalk";
|
|
@@ -802,20 +884,24 @@ function getModelSummaryLines(ctx) {
|
|
|
802
884
|
` ${chalk2.bold("Forecast variable:")} ${ctx.config.forecastVariable}`,
|
|
803
885
|
` ${chalk2.bold("Time horizon:")} ${ctx.config.timeHorizon}`,
|
|
804
886
|
` ${chalk2.bold("Inputs:")}`,
|
|
805
|
-
...ctx.config.inputs.map(
|
|
806
|
-
(input2) => ` \u2022 ${input2.key} - ${getInputType(input2)}`
|
|
807
|
-
),
|
|
887
|
+
...ctx.config.inputs.map((input2) => ` \u2022 ${input2.key} - ${getInputType(input2)}`),
|
|
808
888
|
` ${chalk2.bold("Outputs:")}`,
|
|
809
889
|
...ctx.config.outputs.map((output) => ` \u2022 ${output.name} - ${output.type}`),
|
|
890
|
+
...ctx.config.sensitivities.length > 0 ? [
|
|
891
|
+
` ${chalk2.bold("Sensitivities:")}`,
|
|
892
|
+
...ctx.config.sensitivities.map((s) => ` \u2022 ${s.name}`)
|
|
893
|
+
] : [],
|
|
810
894
|
""
|
|
811
895
|
];
|
|
812
896
|
}
|
|
897
|
+
__name(getModelSummaryLines, "getModelSummaryLines");
|
|
813
898
|
function getInputType(input2) {
|
|
814
899
|
if (input2.fetcher) {
|
|
815
900
|
return input2.fetcher.type;
|
|
816
901
|
}
|
|
817
902
|
return "static";
|
|
818
903
|
}
|
|
904
|
+
__name(getInputType, "getInputType");
|
|
819
905
|
|
|
820
906
|
// src/docker/run-model-container.ts
|
|
821
907
|
import os3 from "os";
|
|
@@ -828,17 +914,19 @@ function getInputEnv(modelInput, webserverURL) {
|
|
|
828
914
|
const inputFileURL = `${webserverURL}/input/${filename}`;
|
|
829
915
|
return `INPUT_${variableName}_URL=${inputFileURL}`;
|
|
830
916
|
}
|
|
917
|
+
__name(getInputEnv, "getInputEnv");
|
|
831
918
|
|
|
832
919
|
// src/docker/run-model-container.ts
|
|
833
920
|
async function runModelContainer(dockerImage, webserverPort, ctx) {
|
|
834
921
|
const env = getEnv();
|
|
835
922
|
const webserverHostname = env.isWSL ? DOCKER_HOSTNAME_WSL : DOCKER_HOSTNAME_DEFAULT;
|
|
836
923
|
const webserverURL = `http://${webserverHostname}:${webserverPort}`;
|
|
837
|
-
const inputEnvs = ctx.config.inputs.map(
|
|
838
|
-
(input2) => getInputEnv(input2, webserverURL)
|
|
839
|
-
);
|
|
924
|
+
const inputEnvs = ctx.config.inputs.map((input2) => getInputEnv(input2, webserverURL));
|
|
840
925
|
const outputEnv = `OUTPUT_URL=${webserverURL}/output`;
|
|
841
|
-
const envs = [
|
|
926
|
+
const envs = [
|
|
927
|
+
...inputEnvs,
|
|
928
|
+
outputEnv
|
|
929
|
+
];
|
|
842
930
|
try {
|
|
843
931
|
const extraRunArgs = [];
|
|
844
932
|
if (env.isWSL) {
|
|
@@ -849,7 +937,10 @@ async function runModelContainer(dockerImage, webserverPort, ctx) {
|
|
|
849
937
|
"run",
|
|
850
938
|
"--rm",
|
|
851
939
|
...extraRunArgs,
|
|
852
|
-
...envs.flatMap((env2) => [
|
|
940
|
+
...envs.flatMap((env2) => [
|
|
941
|
+
"--env",
|
|
942
|
+
env2
|
|
943
|
+
]),
|
|
853
944
|
dockerImage
|
|
854
945
|
];
|
|
855
946
|
await execa4("docker", args, {
|
|
@@ -857,20 +948,22 @@ async function runModelContainer(dockerImage, webserverPort, ctx) {
|
|
|
857
948
|
stdio: "pipe"
|
|
858
949
|
});
|
|
859
950
|
} catch (error) {
|
|
860
|
-
throw new Error("Failed to run model container", {
|
|
951
|
+
throw new Error("Failed to run model container", {
|
|
952
|
+
cause: error
|
|
953
|
+
});
|
|
861
954
|
}
|
|
862
955
|
}
|
|
956
|
+
__name(runModelContainer, "runModelContainer");
|
|
863
957
|
function getWSLMachineIP() {
|
|
864
958
|
const env = getEnv();
|
|
865
959
|
const interfaces = os3.networkInterfaces();
|
|
866
960
|
const interfaceInfo = interfaces[env.wslNetworkInterface]?.[0];
|
|
867
961
|
if (!interfaceInfo) {
|
|
868
|
-
throw new Error(
|
|
869
|
-
`WSL machine IP not found for interface \`${env.wslNetworkInterface}\``
|
|
870
|
-
);
|
|
962
|
+
throw new Error(`WSL machine IP not found for interface \`${env.wslNetworkInterface}\``);
|
|
871
963
|
}
|
|
872
964
|
return interfaceInfo.address;
|
|
873
965
|
}
|
|
966
|
+
__name(getWSLMachineIP, "getWSLMachineIP");
|
|
874
967
|
|
|
875
968
|
// src/model-io-checks/setup-model-io-checks.ts
|
|
876
969
|
import path12 from "path";
|
|
@@ -878,6 +971,9 @@ import express from "express";
|
|
|
878
971
|
|
|
879
972
|
// src/model-io-checks/model-io-checks.ts
|
|
880
973
|
var ModelIOChecks = class {
|
|
974
|
+
static {
|
|
975
|
+
__name(this, "ModelIOChecks");
|
|
976
|
+
}
|
|
881
977
|
inputsToDownload;
|
|
882
978
|
outputUploaded;
|
|
883
979
|
constructor(data) {
|
|
@@ -914,17 +1010,16 @@ import path10 from "path";
|
|
|
914
1010
|
function createInputHandler(inputFilesPath, modelIOChecks, ctx) {
|
|
915
1011
|
return (req, res) => {
|
|
916
1012
|
if (!modelIOChecks.isValidInput(req.params.filename)) {
|
|
917
|
-
return res.status(404).json({
|
|
1013
|
+
return res.status(404).json({
|
|
1014
|
+
error: "File not found"
|
|
1015
|
+
});
|
|
918
1016
|
}
|
|
919
1017
|
modelIOChecks.trackInputHandled(req.params.filename);
|
|
920
|
-
const filePath = path10.join(
|
|
921
|
-
ctx.projectRoot,
|
|
922
|
-
inputFilesPath,
|
|
923
|
-
req.params.filename
|
|
924
|
-
);
|
|
1018
|
+
const filePath = path10.join(ctx.projectRoot, inputFilesPath, req.params.filename);
|
|
925
1019
|
return res.sendFile(filePath);
|
|
926
1020
|
};
|
|
927
1021
|
}
|
|
1022
|
+
__name(createInputHandler, "createInputHandler");
|
|
928
1023
|
|
|
929
1024
|
// src/model-io-checks/utils/create-output-handler.ts
|
|
930
1025
|
import fs9 from "fs/promises";
|
|
@@ -933,17 +1028,24 @@ function createOutputHandler(modelIOChecks, ctx) {
|
|
|
933
1028
|
return async (req, res) => {
|
|
934
1029
|
modelIOChecks.trackOutputHandled();
|
|
935
1030
|
const outputPath = path11.join(ctx.projectRoot, TEST_OUTPUT_DATA_DIR);
|
|
936
|
-
await fs9.mkdir(outputPath, {
|
|
1031
|
+
await fs9.mkdir(outputPath, {
|
|
1032
|
+
recursive: true
|
|
1033
|
+
});
|
|
937
1034
|
const outputFilePath = path11.join(outputPath, TEST_OUTPUT_FILENAME);
|
|
938
1035
|
const outputData = JSON.stringify(req.body, null, 2);
|
|
939
1036
|
await fs9.writeFile(outputFilePath, outputData, "utf8");
|
|
940
|
-
return res.status(200).json({
|
|
1037
|
+
return res.status(200).json({
|
|
1038
|
+
success: true
|
|
1039
|
+
});
|
|
941
1040
|
};
|
|
942
1041
|
}
|
|
1042
|
+
__name(createOutputHandler, "createOutputHandler");
|
|
943
1043
|
|
|
944
1044
|
// src/model-io-checks/setup-model-io-checks.ts
|
|
945
1045
|
function setupModelIOChecks(app, inputDir, inputFiles, ctx) {
|
|
946
|
-
const modelIOChecks = new ModelIOChecks({
|
|
1046
|
+
const modelIOChecks = new ModelIOChecks({
|
|
1047
|
+
inputFiles
|
|
1048
|
+
});
|
|
947
1049
|
const handleInput = createInputHandler(inputDir, modelIOChecks, ctx);
|
|
948
1050
|
const handleOutput = createOutputHandler(modelIOChecks, ctx);
|
|
949
1051
|
const inputPath = path12.join(ctx.projectRoot, inputDir);
|
|
@@ -953,6 +1055,7 @@ function setupModelIOChecks(app, inputDir, inputFiles, ctx) {
|
|
|
953
1055
|
app.put("/output", handleOutput);
|
|
954
1056
|
return modelIOChecks;
|
|
955
1057
|
}
|
|
1058
|
+
__name(setupModelIOChecks, "setupModelIOChecks");
|
|
956
1059
|
|
|
957
1060
|
// src/utils/check-input-files.ts
|
|
958
1061
|
import path13 from "path";
|
|
@@ -961,37 +1064,31 @@ async function checkInputFiles(inputFiles, inputDataPath, ctx) {
|
|
|
961
1064
|
const filePath = path13.join(ctx.projectRoot, inputDataPath, inputFile);
|
|
962
1065
|
const exists = await isExistingPath(filePath);
|
|
963
1066
|
if (!exists) {
|
|
964
|
-
throw new Error(
|
|
965
|
-
`Input data not found (${inputFile}) - did you need to run \`pd4castr fetch\`?`
|
|
966
|
-
);
|
|
1067
|
+
throw new Error(`Input data not found (${inputFile}) - did you need to run \`pd4castr fetch\`?`);
|
|
967
1068
|
}
|
|
968
1069
|
}
|
|
969
1070
|
}
|
|
1071
|
+
__name(checkInputFiles, "checkInputFiles");
|
|
970
1072
|
|
|
971
1073
|
// src/utils/get-input-files.ts
|
|
972
1074
|
function getInputFiles(config) {
|
|
973
1075
|
const inputFiles = config.inputs.map((input2) => getInputFilename(input2));
|
|
974
1076
|
return inputFiles;
|
|
975
1077
|
}
|
|
1078
|
+
__name(getInputFiles, "getInputFiles");
|
|
976
1079
|
|
|
977
1080
|
// src/commands/publish/utils/run-model-io-tests.ts
|
|
978
1081
|
async function runModelIOTests(dockerImage, options, app, ctx) {
|
|
979
1082
|
const inputFiles = getInputFiles(ctx.config);
|
|
980
1083
|
await checkInputFiles(inputFiles, options.inputDir, ctx);
|
|
981
1084
|
await buildDockerImage(dockerImage, ctx);
|
|
982
|
-
const modelIOChecks = setupModelIOChecks(
|
|
983
|
-
app,
|
|
984
|
-
options.inputDir,
|
|
985
|
-
inputFiles,
|
|
986
|
-
ctx
|
|
987
|
-
);
|
|
1085
|
+
const modelIOChecks = setupModelIOChecks(app, options.inputDir, inputFiles, ctx);
|
|
988
1086
|
await runModelContainer(dockerImage, options.port, ctx);
|
|
989
1087
|
if (!modelIOChecks.isInputsHandled() || !modelIOChecks.isOutputHandled()) {
|
|
990
|
-
throw new Error(
|
|
991
|
-
"Model I/O test failed. Please run `pd4castr test` to debug the issue."
|
|
992
|
-
);
|
|
1088
|
+
throw new Error("Model I/O test failed. Please run `pd4castr test` to debug the issue.");
|
|
993
1089
|
}
|
|
994
1090
|
}
|
|
1091
|
+
__name(runModelIOTests, "runModelIOTests");
|
|
995
1092
|
|
|
996
1093
|
// src/commands/publish/handle-create-model-flow.ts
|
|
997
1094
|
async function handleCreateModelFlow(options, app, spinner, ctx, authCtx) {
|
|
@@ -1020,11 +1117,10 @@ async function handleCreateModelFlow(options, app, spinner, ctx, authCtx) {
|
|
|
1020
1117
|
config.$$modelGroupID = model.modelGroupId;
|
|
1021
1118
|
config.$$revision = model.revision;
|
|
1022
1119
|
config.$$dockerImage = model.dockerImage;
|
|
1120
|
+
config.displayTimezone = model.displayTimezone;
|
|
1023
1121
|
}, options.config);
|
|
1024
1122
|
spinner.succeed("Model data published successfully");
|
|
1025
|
-
spinner.start(
|
|
1026
|
-
"Pushing model image to registry - this may take a few minutes..."
|
|
1027
|
-
);
|
|
1123
|
+
spinner.start("Pushing model image to registry - this may take a few minutes...");
|
|
1028
1124
|
const pushCredentials = await getRegistryPushCredentials(model.id, authCtx);
|
|
1029
1125
|
await loginToDockerRegistry(pushCredentials);
|
|
1030
1126
|
await buildDockerImage(dockerImage, ctx);
|
|
@@ -1051,6 +1147,7 @@ async function handleCreateModelFlow(options, app, spinner, ctx, authCtx) {
|
|
|
1051
1147
|
console.log(MODEL_RUN_TRIGGER_MESSAGE);
|
|
1052
1148
|
}
|
|
1053
1149
|
}
|
|
1150
|
+
__name(handleCreateModelFlow, "handleCreateModelFlow");
|
|
1054
1151
|
|
|
1055
1152
|
// src/commands/publish/handle-update-existing-model-flow.ts
|
|
1056
1153
|
import * as inquirer5 from "@inquirer/prompts";
|
|
@@ -1065,26 +1162,28 @@ import invariant2 from "tiny-invariant";
|
|
|
1065
1162
|
|
|
1066
1163
|
// src/api/get-model.ts
|
|
1067
1164
|
async function getModel(id, authCtx) {
|
|
1068
|
-
const headers = {
|
|
1069
|
-
|
|
1165
|
+
const headers = {
|
|
1166
|
+
Authorization: `Bearer ${authCtx.accessToken}`
|
|
1167
|
+
};
|
|
1168
|
+
const result = await api.get(`model/${id}`, {
|
|
1169
|
+
headers
|
|
1170
|
+
}).json();
|
|
1070
1171
|
return result;
|
|
1071
1172
|
}
|
|
1173
|
+
__name(getModel, "getModel");
|
|
1072
1174
|
|
|
1073
1175
|
// src/commands/publish/utils/validate-local-model-state.ts
|
|
1074
1176
|
async function validateLocalModelState(ctx, authCtx) {
|
|
1075
1177
|
invariant2(ctx.config.$$id, "model ID is required to fetch published model");
|
|
1076
1178
|
const currentModel = await getModel(ctx.config.$$id, authCtx);
|
|
1077
1179
|
if (currentModel.revision !== ctx.config.$$revision) {
|
|
1078
|
-
throw new Error(
|
|
1079
|
-
`OUT OF SYNC: Local revision (${ctx.config.$$revision}) does not match the current published revision (${currentModel.revision})`
|
|
1080
|
-
);
|
|
1180
|
+
throw new Error(`OUT OF SYNC: Local revision (${ctx.config.$$revision}) does not match the current published revision (${currentModel.revision})`);
|
|
1081
1181
|
}
|
|
1082
1182
|
if (currentModel.modelGroupId !== ctx.config.$$modelGroupID) {
|
|
1083
|
-
throw new Error(
|
|
1084
|
-
`OUT OF SYNC: Local model group ID (${ctx.config.$$modelGroupID}) does not match the current published model group ID (${currentModel.modelGroupId})`
|
|
1085
|
-
);
|
|
1183
|
+
throw new Error(`OUT OF SYNC: Local model group ID (${ctx.config.$$modelGroupID}) does not match the current published model group ID (${currentModel.modelGroupId})`);
|
|
1086
1184
|
}
|
|
1087
1185
|
}
|
|
1186
|
+
__name(validateLocalModelState, "validateLocalModelState");
|
|
1088
1187
|
|
|
1089
1188
|
// src/commands/publish/handle-model-revision-create-flow.ts
|
|
1090
1189
|
var WARNING_LABEL = chalk4.yellowBright.bold("WARNING!");
|
|
@@ -1120,9 +1219,7 @@ async function handleModelRevisionCreateFlow(options, app, spinner, ctx, authCtx
|
|
|
1120
1219
|
config.$$dockerImage = model.dockerImage;
|
|
1121
1220
|
}, options.config);
|
|
1122
1221
|
spinner.succeed("Model revision data published successfully");
|
|
1123
|
-
spinner.start(
|
|
1124
|
-
"Pushing new model revision image to registry - this may take a few minutes..."
|
|
1125
|
-
);
|
|
1222
|
+
spinner.start("Pushing new model revision image to registry - this may take a few minutes...");
|
|
1126
1223
|
const pushCredentials = await getRegistryPushCredentials(model.id, authCtx);
|
|
1127
1224
|
await loginToDockerRegistry(pushCredentials);
|
|
1128
1225
|
await buildDockerImage(dockerImage, ctx);
|
|
@@ -1143,14 +1240,13 @@ async function handleModelRevisionCreateFlow(options, app, spinner, ctx, authCtx
|
|
|
1143
1240
|
symbol: "\u{1F680} ",
|
|
1144
1241
|
prefixText: "\n",
|
|
1145
1242
|
suffixText: "\n",
|
|
1146
|
-
text: chalk4.bold(
|
|
1147
|
-
`New model revision (r${model.revision}) published successfully`
|
|
1148
|
-
)
|
|
1243
|
+
text: chalk4.bold(`New model revision (r${model.revision}) published successfully`)
|
|
1149
1244
|
});
|
|
1150
1245
|
if (!modelRunTriggered && !options.skipTrigger) {
|
|
1151
1246
|
console.log(MODEL_RUN_TRIGGER_MESSAGE);
|
|
1152
1247
|
}
|
|
1153
1248
|
}
|
|
1249
|
+
__name(handleModelRevisionCreateFlow, "handleModelRevisionCreateFlow");
|
|
1154
1250
|
|
|
1155
1251
|
// src/commands/publish/handle-model-revision-update-flow.ts
|
|
1156
1252
|
import * as inquirer4 from "@inquirer/prompts";
|
|
@@ -1158,10 +1254,16 @@ import chalk5 from "chalk";
|
|
|
1158
1254
|
|
|
1159
1255
|
// src/api/update-model.ts
|
|
1160
1256
|
async function updateModel(config, authCtx) {
|
|
1161
|
-
const headers = {
|
|
1162
|
-
|
|
1257
|
+
const headers = {
|
|
1258
|
+
Authorization: `Bearer ${authCtx.accessToken}`
|
|
1259
|
+
};
|
|
1260
|
+
const result = await api.patch(`model/${config.id}`, {
|
|
1261
|
+
headers,
|
|
1262
|
+
json: config
|
|
1263
|
+
}).json();
|
|
1163
1264
|
return result;
|
|
1164
1265
|
}
|
|
1266
|
+
__name(updateModel, "updateModel");
|
|
1165
1267
|
|
|
1166
1268
|
// src/commands/publish/handle-model-revision-update-flow.ts
|
|
1167
1269
|
var WARNING_LABEL2 = chalk5.yellowBright.bold("WARNING!");
|
|
@@ -1195,11 +1297,10 @@ async function handleModelRevisionUpdateFlow(options, app, spinner, ctx, authCtx
|
|
|
1195
1297
|
config.$$modelGroupID = model.modelGroupId;
|
|
1196
1298
|
config.$$revision = model.revision;
|
|
1197
1299
|
config.$$dockerImage = model.dockerImage;
|
|
1300
|
+
config.displayTimezone = model.displayTimezone;
|
|
1198
1301
|
}, options.config);
|
|
1199
1302
|
spinner.succeed("Model revision data updated successfully");
|
|
1200
|
-
spinner.start(
|
|
1201
|
-
"Pushing updated model image to registry - this may take a few minutes..."
|
|
1202
|
-
);
|
|
1303
|
+
spinner.start("Pushing updated model image to registry - this may take a few minutes...");
|
|
1203
1304
|
const pushCredentials = await getRegistryPushCredentials(model.id, authCtx);
|
|
1204
1305
|
await loginToDockerRegistry(pushCredentials);
|
|
1205
1306
|
await buildDockerImage(dockerImage, ctx);
|
|
@@ -1226,6 +1327,7 @@ async function handleModelRevisionUpdateFlow(options, app, spinner, ctx, authCtx
|
|
|
1226
1327
|
console.log(MODEL_RUN_TRIGGER_MESSAGE);
|
|
1227
1328
|
}
|
|
1228
1329
|
}
|
|
1330
|
+
__name(handleModelRevisionUpdateFlow, "handleModelRevisionUpdateFlow");
|
|
1229
1331
|
|
|
1230
1332
|
// src/commands/publish/handle-update-existing-model-flow.ts
|
|
1231
1333
|
async function handleUpdateExistingModelFlow(options, app, spinner, ctx, authCtx) {
|
|
@@ -1237,34 +1339,113 @@ async function handleUpdateExistingModelFlow(options, app, spinner, ctx, authCtx
|
|
|
1237
1339
|
message: "Do you want to update the existing revision or create a new one?",
|
|
1238
1340
|
choices: [
|
|
1239
1341
|
{
|
|
1240
|
-
value: "new"
|
|
1342
|
+
value: "new",
|
|
1241
1343
|
name: `New Revision (r${revision} \u2192 r${revision + 1})`
|
|
1242
1344
|
},
|
|
1243
1345
|
{
|
|
1244
|
-
value: "update"
|
|
1346
|
+
value: "update",
|
|
1245
1347
|
name: `Update Existing Revision (r${revision})`
|
|
1246
1348
|
}
|
|
1247
1349
|
]
|
|
1248
1350
|
});
|
|
1249
1351
|
logEmptyLine();
|
|
1250
|
-
if (action === "new"
|
|
1352
|
+
if (action === "new") {
|
|
1251
1353
|
await handleModelRevisionCreateFlow(options, app, spinner, ctx, authCtx);
|
|
1252
|
-
} else if (action === "update"
|
|
1354
|
+
} else if (action === "update") {
|
|
1253
1355
|
await handleModelRevisionUpdateFlow(options, app, spinner, ctx, authCtx);
|
|
1254
1356
|
} else {
|
|
1255
1357
|
throw new Error("Invalid CLI state");
|
|
1256
1358
|
}
|
|
1257
1359
|
}
|
|
1360
|
+
__name(handleUpdateExistingModelFlow, "handleUpdateExistingModelFlow");
|
|
1258
1361
|
|
|
1259
1362
|
// src/commands/publish/handle-action.ts
|
|
1363
|
+
function _ts_add_disposable_resource(env, value, async) {
|
|
1364
|
+
if (value !== null && value !== void 0) {
|
|
1365
|
+
if (typeof value !== "object" && typeof value !== "function") throw new TypeError("Object expected.");
|
|
1366
|
+
var dispose, inner;
|
|
1367
|
+
if (async) {
|
|
1368
|
+
if (!Symbol.asyncDispose) throw new TypeError("Symbol.asyncDispose is not defined.");
|
|
1369
|
+
dispose = value[Symbol.asyncDispose];
|
|
1370
|
+
}
|
|
1371
|
+
if (dispose === void 0) {
|
|
1372
|
+
if (!Symbol.dispose) throw new TypeError("Symbol.dispose is not defined.");
|
|
1373
|
+
dispose = value[Symbol.dispose];
|
|
1374
|
+
if (async) inner = dispose;
|
|
1375
|
+
}
|
|
1376
|
+
if (typeof dispose !== "function") throw new TypeError("Object not disposable.");
|
|
1377
|
+
if (inner) dispose = /* @__PURE__ */ __name(function() {
|
|
1378
|
+
try {
|
|
1379
|
+
inner.call(this);
|
|
1380
|
+
} catch (e) {
|
|
1381
|
+
return Promise.reject(e);
|
|
1382
|
+
}
|
|
1383
|
+
}, "dispose");
|
|
1384
|
+
env.stack.push({
|
|
1385
|
+
value,
|
|
1386
|
+
dispose,
|
|
1387
|
+
async
|
|
1388
|
+
});
|
|
1389
|
+
} else if (async) {
|
|
1390
|
+
env.stack.push({
|
|
1391
|
+
async: true
|
|
1392
|
+
});
|
|
1393
|
+
}
|
|
1394
|
+
return value;
|
|
1395
|
+
}
|
|
1396
|
+
__name(_ts_add_disposable_resource, "_ts_add_disposable_resource");
|
|
1397
|
+
function _ts_dispose_resources(env) {
|
|
1398
|
+
var _SuppressedError = typeof SuppressedError === "function" ? SuppressedError : function(error, suppressed, message) {
|
|
1399
|
+
var e = new Error(message);
|
|
1400
|
+
return e.name = "SuppressedError", e.error = error, e.suppressed = suppressed, e;
|
|
1401
|
+
};
|
|
1402
|
+
return (_ts_dispose_resources = /* @__PURE__ */ __name(function _ts_dispose_resources3(env2) {
|
|
1403
|
+
function fail(e) {
|
|
1404
|
+
env2.error = env2.hasError ? new _SuppressedError(e, env2.error, "An error was suppressed during disposal.") : e;
|
|
1405
|
+
env2.hasError = true;
|
|
1406
|
+
}
|
|
1407
|
+
__name(fail, "fail");
|
|
1408
|
+
var r, s = 0;
|
|
1409
|
+
function next() {
|
|
1410
|
+
while (r = env2.stack.pop()) {
|
|
1411
|
+
try {
|
|
1412
|
+
if (!r.async && s === 1) return s = 0, env2.stack.push(r), Promise.resolve().then(next);
|
|
1413
|
+
if (r.dispose) {
|
|
1414
|
+
var result = r.dispose.call(r.value);
|
|
1415
|
+
if (r.async) return s |= 2, Promise.resolve(result).then(next, function(e) {
|
|
1416
|
+
fail(e);
|
|
1417
|
+
return next();
|
|
1418
|
+
});
|
|
1419
|
+
} else s |= 1;
|
|
1420
|
+
} catch (e) {
|
|
1421
|
+
fail(e);
|
|
1422
|
+
}
|
|
1423
|
+
}
|
|
1424
|
+
if (s === 1) return env2.hasError ? Promise.reject(env2.error) : Promise.resolve();
|
|
1425
|
+
if (env2.hasError) throw env2.error;
|
|
1426
|
+
}
|
|
1427
|
+
__name(next, "next");
|
|
1428
|
+
return next();
|
|
1429
|
+
}, "_ts_dispose_resources"))(env);
|
|
1430
|
+
}
|
|
1431
|
+
__name(_ts_dispose_resources, "_ts_dispose_resources");
|
|
1260
1432
|
async function handleAction5(options) {
|
|
1261
|
-
|
|
1433
|
+
const env = {
|
|
1434
|
+
stack: [],
|
|
1435
|
+
error: void 0,
|
|
1436
|
+
hasError: false
|
|
1437
|
+
};
|
|
1262
1438
|
try {
|
|
1263
1439
|
const spinner = ora5("Starting model publish...").start();
|
|
1264
1440
|
const app = express2();
|
|
1265
|
-
app.use(express2.json({
|
|
1266
|
-
|
|
1267
|
-
|
|
1441
|
+
app.use(express2.json({
|
|
1442
|
+
limit: "100mb"
|
|
1443
|
+
}));
|
|
1444
|
+
app.use(express2.urlencoded({
|
|
1445
|
+
limit: "100mb",
|
|
1446
|
+
extended: true
|
|
1447
|
+
}));
|
|
1448
|
+
const webServer = _ts_add_disposable_resource(env, await startWebServer(app, options.port), false);
|
|
1268
1449
|
try {
|
|
1269
1450
|
const ctx = await loadProjectContext(options.config);
|
|
1270
1451
|
const authCtx = await getAuth();
|
|
@@ -1286,25 +1467,20 @@ async function handleAction5(options) {
|
|
|
1286
1467
|
}
|
|
1287
1468
|
process.exit(1);
|
|
1288
1469
|
}
|
|
1289
|
-
} catch (
|
|
1290
|
-
|
|
1470
|
+
} catch (e) {
|
|
1471
|
+
env.error = e;
|
|
1472
|
+
env.hasError = true;
|
|
1291
1473
|
} finally {
|
|
1292
|
-
|
|
1474
|
+
_ts_dispose_resources(env);
|
|
1293
1475
|
}
|
|
1294
1476
|
}
|
|
1477
|
+
__name(handleAction5, "handleAction");
|
|
1295
1478
|
|
|
1296
1479
|
// src/commands/publish/index.ts
|
|
1297
1480
|
function registerPublishCommand(program2) {
|
|
1298
|
-
program2.command("publish").description("Publishes a pd4castr model.").option(
|
|
1299
|
-
"-i, --input-dir <path>",
|
|
1300
|
-
"The input test data directory",
|
|
1301
|
-
TEST_INPUT_DATA_DIR
|
|
1302
|
-
).option(
|
|
1303
|
-
"-p, --port <port>",
|
|
1304
|
-
"The port to run the IO testing webserver on",
|
|
1305
|
-
TEST_WEBSERVER_PORT.toString()
|
|
1306
|
-
).option("--sc, --skip-checks", "Skip the model I/O checks", false).option("--st, --skip-trigger", "Skip the model trigger", false).option("-c, --config <path>", "Path to config file", PROJECT_CONFIG_FILE).action(handleAction5);
|
|
1481
|
+
program2.command("publish").description("Publishes a pd4castr model.").option("-i, --input-dir <path>", "The input test data directory", TEST_INPUT_DATA_DIR).option("-p, --port <port>", "The port to run the IO testing webserver on", TEST_WEBSERVER_PORT.toString()).option("--sc, --skip-checks", "Skip the model I/O checks", false).option("--st, --skip-trigger", "Skip the model trigger", false).option("-c, --config <path>", "Path to config file", PROJECT_CONFIG_FILE).action(handleAction5);
|
|
1307
1482
|
}
|
|
1483
|
+
__name(registerPublishCommand, "registerPublishCommand");
|
|
1308
1484
|
|
|
1309
1485
|
// src/commands/test/handle-action.ts
|
|
1310
1486
|
import path14 from "path";
|
|
@@ -1312,14 +1488,92 @@ import { ExecaError as ExecaError6 } from "execa";
|
|
|
1312
1488
|
import express3 from "express";
|
|
1313
1489
|
import ora6 from "ora";
|
|
1314
1490
|
import { ZodError as ZodError6 } from "zod";
|
|
1491
|
+
function _ts_add_disposable_resource2(env, value, async) {
|
|
1492
|
+
if (value !== null && value !== void 0) {
|
|
1493
|
+
if (typeof value !== "object" && typeof value !== "function") throw new TypeError("Object expected.");
|
|
1494
|
+
var dispose, inner;
|
|
1495
|
+
if (async) {
|
|
1496
|
+
if (!Symbol.asyncDispose) throw new TypeError("Symbol.asyncDispose is not defined.");
|
|
1497
|
+
dispose = value[Symbol.asyncDispose];
|
|
1498
|
+
}
|
|
1499
|
+
if (dispose === void 0) {
|
|
1500
|
+
if (!Symbol.dispose) throw new TypeError("Symbol.dispose is not defined.");
|
|
1501
|
+
dispose = value[Symbol.dispose];
|
|
1502
|
+
if (async) inner = dispose;
|
|
1503
|
+
}
|
|
1504
|
+
if (typeof dispose !== "function") throw new TypeError("Object not disposable.");
|
|
1505
|
+
if (inner) dispose = /* @__PURE__ */ __name(function() {
|
|
1506
|
+
try {
|
|
1507
|
+
inner.call(this);
|
|
1508
|
+
} catch (e) {
|
|
1509
|
+
return Promise.reject(e);
|
|
1510
|
+
}
|
|
1511
|
+
}, "dispose");
|
|
1512
|
+
env.stack.push({
|
|
1513
|
+
value,
|
|
1514
|
+
dispose,
|
|
1515
|
+
async
|
|
1516
|
+
});
|
|
1517
|
+
} else if (async) {
|
|
1518
|
+
env.stack.push({
|
|
1519
|
+
async: true
|
|
1520
|
+
});
|
|
1521
|
+
}
|
|
1522
|
+
return value;
|
|
1523
|
+
}
|
|
1524
|
+
__name(_ts_add_disposable_resource2, "_ts_add_disposable_resource");
|
|
1525
|
+
function _ts_dispose_resources2(env) {
|
|
1526
|
+
var _SuppressedError = typeof SuppressedError === "function" ? SuppressedError : function(error, suppressed, message) {
|
|
1527
|
+
var e = new Error(message);
|
|
1528
|
+
return e.name = "SuppressedError", e.error = error, e.suppressed = suppressed, e;
|
|
1529
|
+
};
|
|
1530
|
+
return (_ts_dispose_resources2 = /* @__PURE__ */ __name(function _ts_dispose_resources3(env2) {
|
|
1531
|
+
function fail(e) {
|
|
1532
|
+
env2.error = env2.hasError ? new _SuppressedError(e, env2.error, "An error was suppressed during disposal.") : e;
|
|
1533
|
+
env2.hasError = true;
|
|
1534
|
+
}
|
|
1535
|
+
__name(fail, "fail");
|
|
1536
|
+
var r, s = 0;
|
|
1537
|
+
function next() {
|
|
1538
|
+
while (r = env2.stack.pop()) {
|
|
1539
|
+
try {
|
|
1540
|
+
if (!r.async && s === 1) return s = 0, env2.stack.push(r), Promise.resolve().then(next);
|
|
1541
|
+
if (r.dispose) {
|
|
1542
|
+
var result = r.dispose.call(r.value);
|
|
1543
|
+
if (r.async) return s |= 2, Promise.resolve(result).then(next, function(e) {
|
|
1544
|
+
fail(e);
|
|
1545
|
+
return next();
|
|
1546
|
+
});
|
|
1547
|
+
} else s |= 1;
|
|
1548
|
+
} catch (e) {
|
|
1549
|
+
fail(e);
|
|
1550
|
+
}
|
|
1551
|
+
}
|
|
1552
|
+
if (s === 1) return env2.hasError ? Promise.reject(env2.error) : Promise.resolve();
|
|
1553
|
+
if (env2.hasError) throw env2.error;
|
|
1554
|
+
}
|
|
1555
|
+
__name(next, "next");
|
|
1556
|
+
return next();
|
|
1557
|
+
}, "_ts_dispose_resources"))(env);
|
|
1558
|
+
}
|
|
1559
|
+
__name(_ts_dispose_resources2, "_ts_dispose_resources");
|
|
1315
1560
|
async function handleAction6(options) {
|
|
1316
|
-
|
|
1561
|
+
const env = {
|
|
1562
|
+
stack: [],
|
|
1563
|
+
error: void 0,
|
|
1564
|
+
hasError: false
|
|
1565
|
+
};
|
|
1317
1566
|
try {
|
|
1318
1567
|
const spinner = ora6("Starting model tests...").info();
|
|
1319
1568
|
const app = express3();
|
|
1320
|
-
app.use(express3.json({
|
|
1321
|
-
|
|
1322
|
-
|
|
1569
|
+
app.use(express3.json({
|
|
1570
|
+
limit: "100mb"
|
|
1571
|
+
}));
|
|
1572
|
+
app.use(express3.urlencoded({
|
|
1573
|
+
limit: "100mb",
|
|
1574
|
+
extended: true
|
|
1575
|
+
}));
|
|
1576
|
+
const webServer = _ts_add_disposable_resource2(env, await startWebServer(app, options.port), false);
|
|
1323
1577
|
try {
|
|
1324
1578
|
const ctx = await loadProjectContext(options.config);
|
|
1325
1579
|
const inputFiles = getInputFiles(ctx.config);
|
|
@@ -1330,12 +1584,7 @@ async function handleAction6(options) {
|
|
|
1330
1584
|
const dockerImage = getDockerImage(ctx);
|
|
1331
1585
|
await buildDockerImage(dockerImage, ctx);
|
|
1332
1586
|
spinner.succeed(`Built docker image (${dockerImage})`);
|
|
1333
|
-
const modelIOChecks = setupModelIOChecks(
|
|
1334
|
-
app,
|
|
1335
|
-
options.inputDir,
|
|
1336
|
-
inputFiles,
|
|
1337
|
-
ctx
|
|
1338
|
-
);
|
|
1587
|
+
const modelIOChecks = setupModelIOChecks(app, options.inputDir, inputFiles, ctx);
|
|
1339
1588
|
spinner.start("Running model container");
|
|
1340
1589
|
await runModelContainer(dockerImage, options.port, ctx);
|
|
1341
1590
|
spinner.succeed("Model run complete");
|
|
@@ -1351,11 +1600,7 @@ async function handleAction6(options) {
|
|
|
1351
1600
|
throw new Error("Model I/O test failed");
|
|
1352
1601
|
}
|
|
1353
1602
|
if (modelIOChecks.isOutputHandled()) {
|
|
1354
|
-
const outputPath = path14.join(
|
|
1355
|
-
ctx.projectRoot,
|
|
1356
|
-
TEST_OUTPUT_DATA_DIR,
|
|
1357
|
-
TEST_OUTPUT_FILENAME
|
|
1358
|
-
);
|
|
1603
|
+
const outputPath = path14.join(ctx.projectRoot, TEST_OUTPUT_DATA_DIR, TEST_OUTPUT_FILENAME);
|
|
1359
1604
|
const clickHereLink = createLink("Click here", `file://${outputPath}`);
|
|
1360
1605
|
const fileLink = createLink(TEST_OUTPUT_FILENAME, `file://${outputPath}`);
|
|
1361
1606
|
console.log(`
|
|
@@ -1376,27 +1621,20 @@ ${clickHereLink} to view output (${fileLink})
|
|
|
1376
1621
|
}
|
|
1377
1622
|
process.exit(1);
|
|
1378
1623
|
}
|
|
1379
|
-
} catch (
|
|
1380
|
-
|
|
1624
|
+
} catch (e) {
|
|
1625
|
+
env.error = e;
|
|
1626
|
+
env.hasError = true;
|
|
1381
1627
|
} finally {
|
|
1382
|
-
|
|
1628
|
+
_ts_dispose_resources2(env);
|
|
1383
1629
|
}
|
|
1384
1630
|
}
|
|
1631
|
+
__name(handleAction6, "handleAction");
|
|
1385
1632
|
|
|
1386
1633
|
// src/commands/test/index.ts
|
|
1387
1634
|
function registerTestCommand(program2) {
|
|
1388
|
-
program2.command("test").description(
|
|
1389
|
-
"Test a model by verifying input and output is handled correctly."
|
|
1390
|
-
).option(
|
|
1391
|
-
"-i, --input-dir <path>",
|
|
1392
|
-
"The input test data directory",
|
|
1393
|
-
TEST_INPUT_DATA_DIR
|
|
1394
|
-
).option(
|
|
1395
|
-
"-p, --port <port>",
|
|
1396
|
-
"The port to run the IO testing webserver on",
|
|
1397
|
-
TEST_WEBSERVER_PORT.toString()
|
|
1398
|
-
).option("-c, --config <path>", "Path to config file", PROJECT_CONFIG_FILE).action(handleAction6);
|
|
1635
|
+
program2.command("test").description("Test a model by verifying input and output is handled correctly.").option("-i, --input-dir <path>", "The input test data directory", TEST_INPUT_DATA_DIR).option("-p, --port <port>", "The port to run the IO testing webserver on", TEST_WEBSERVER_PORT.toString()).option("-c, --config <path>", "Path to config file", PROJECT_CONFIG_FILE).action(handleAction6);
|
|
1399
1636
|
}
|
|
1637
|
+
__name(registerTestCommand, "registerTestCommand");
|
|
1400
1638
|
|
|
1401
1639
|
// src/program.ts
|
|
1402
1640
|
import { Command } from "commander";
|
|
@@ -1404,8 +1642,9 @@ import { Command } from "commander";
|
|
|
1404
1642
|
// package.json
|
|
1405
1643
|
var package_default = {
|
|
1406
1644
|
name: "@pd4castr/cli",
|
|
1407
|
-
version: "1.
|
|
1645
|
+
version: "1.3.0",
|
|
1408
1646
|
description: "CLI tool for creating, testing, and publishing pd4castr models",
|
|
1647
|
+
license: "UNLICENSED",
|
|
1409
1648
|
main: "dist/index.js",
|
|
1410
1649
|
type: "module",
|
|
1411
1650
|
bin: {
|
|
@@ -1414,77 +1653,58 @@ var package_default = {
|
|
|
1414
1653
|
files: [
|
|
1415
1654
|
"dist/**/*"
|
|
1416
1655
|
],
|
|
1656
|
+
engines: {
|
|
1657
|
+
node: ">=20.0.0"
|
|
1658
|
+
},
|
|
1417
1659
|
scripts: {
|
|
1418
1660
|
build: "tsup",
|
|
1419
1661
|
dev: "tsup --watch",
|
|
1420
1662
|
cli: "node dist/index.js",
|
|
1421
|
-
|
|
1422
|
-
"test:watch": "vitest",
|
|
1423
|
-
"test:coverage": "vitest run --coverage",
|
|
1424
|
-
lint: "eslint .",
|
|
1425
|
-
"lint:fix": "eslint . --fix",
|
|
1663
|
+
release: "semantic-release -e semantic-release-monorepo",
|
|
1426
1664
|
format: "prettier --write .",
|
|
1427
|
-
|
|
1665
|
+
lint: "eslint .",
|
|
1428
1666
|
typecheck: "tsc --noEmit",
|
|
1429
1667
|
prepublishOnly: "yarn build"
|
|
1430
1668
|
},
|
|
1431
|
-
|
|
1432
|
-
"
|
|
1433
|
-
"
|
|
1434
|
-
|
|
1435
|
-
|
|
1436
|
-
|
|
1437
|
-
|
|
1438
|
-
|
|
1439
|
-
|
|
1440
|
-
|
|
1441
|
-
|
|
1669
|
+
dependencies: {
|
|
1670
|
+
"@inquirer/prompts": "7.7.1",
|
|
1671
|
+
auth0: "4.28.0",
|
|
1672
|
+
chalk: "5.6.0",
|
|
1673
|
+
commander: "14.0.0",
|
|
1674
|
+
execa: "9.6.0",
|
|
1675
|
+
express: "4.21.2",
|
|
1676
|
+
immer: "10.1.1",
|
|
1677
|
+
ky: "1.8.2",
|
|
1678
|
+
ora: "8.2.0",
|
|
1679
|
+
slugify: "1.6.6",
|
|
1680
|
+
tiged: "2.12.7",
|
|
1681
|
+
"tiny-invariant": "1.3.3",
|
|
1682
|
+
zod: "4.0.14"
|
|
1442
1683
|
},
|
|
1443
|
-
homepage: "https://github.com/pipelabs/pd4castr-cli#readme",
|
|
1444
1684
|
devDependencies: {
|
|
1445
1685
|
"@faker-js/faker": "10.0.0",
|
|
1446
1686
|
"@mswjs/data": "0.16.2",
|
|
1447
1687
|
"@types/express": "4.17.21",
|
|
1448
1688
|
"@types/node": "24.1.0",
|
|
1449
1689
|
"@types/supertest": "6.0.3",
|
|
1450
|
-
"@typescript-eslint/eslint-plugin": "8.38.0",
|
|
1451
|
-
"@typescript-eslint/parser": "8.38.0",
|
|
1452
|
-
eslint: "9.32.0",
|
|
1453
|
-
"eslint-config-prettier": "10.1.8",
|
|
1454
|
-
"eslint-plugin-simple-import-sort": "12.1.1",
|
|
1455
|
-
"eslint-plugin-unicorn": "60.0.0",
|
|
1456
|
-
"eslint-plugin-vitest": "0.5.4",
|
|
1457
1690
|
"hook-std": "3.0.0",
|
|
1458
1691
|
"jest-extended": "6.0.0",
|
|
1459
|
-
memfs: "4.
|
|
1460
|
-
msw: "2.10.
|
|
1461
|
-
prettier: "3.6.2",
|
|
1692
|
+
memfs: "4.49.0",
|
|
1693
|
+
msw: "2.10.5",
|
|
1462
1694
|
"strip-ansi": "7.1.0",
|
|
1463
1695
|
supertest: "7.1.4",
|
|
1464
1696
|
tsup: "8.5.0",
|
|
1465
1697
|
"type-fest": "4.41.0",
|
|
1466
1698
|
typescript: "5.8.3",
|
|
1467
|
-
"typescript-eslint": "8.38.0",
|
|
1468
1699
|
vitest: "3.2.4"
|
|
1469
1700
|
},
|
|
1470
|
-
|
|
1471
|
-
|
|
1472
|
-
auth0: "4.27.0",
|
|
1473
|
-
chalk: "5.6.0",
|
|
1474
|
-
commander: "14.0.0",
|
|
1475
|
-
execa: "9.6.0",
|
|
1476
|
-
express: "4.21.2",
|
|
1477
|
-
immer: "10.1.1",
|
|
1478
|
-
ky: "1.8.2",
|
|
1479
|
-
ora: "8.2.0",
|
|
1480
|
-
slugify: "1.6.6",
|
|
1481
|
-
tiged: "2.12.7",
|
|
1482
|
-
"tiny-invariant": "1.3.3",
|
|
1483
|
-
zod: "4.0.14"
|
|
1701
|
+
publishConfig: {
|
|
1702
|
+
access: "public"
|
|
1484
1703
|
},
|
|
1485
|
-
|
|
1486
|
-
|
|
1487
|
-
|
|
1704
|
+
keywords: [
|
|
1705
|
+
"cli",
|
|
1706
|
+
"pd4castr"
|
|
1707
|
+
]
|
|
1488
1708
|
};
|
|
1489
1709
|
|
|
1490
1710
|
// src/program.ts
|