@pd4castr/cli 1.2.0 → 1.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +4 -2
- package/dist/index.js +497 -288
- package/package.json +29 -47
package/dist/index.js
CHANGED
|
@@ -1,49 +1,6 @@
|
|
|
1
1
|
#!/usr/bin/env node
|
|
2
|
-
var
|
|
3
|
-
var
|
|
4
|
-
throw TypeError(msg);
|
|
5
|
-
};
|
|
6
|
-
var __using = (stack, value, async) => {
|
|
7
|
-
if (value != null) {
|
|
8
|
-
if (typeof value !== "object" && typeof value !== "function") __typeError("Object expected");
|
|
9
|
-
var dispose, inner;
|
|
10
|
-
if (async) dispose = value[__knownSymbol("asyncDispose")];
|
|
11
|
-
if (dispose === void 0) {
|
|
12
|
-
dispose = value[__knownSymbol("dispose")];
|
|
13
|
-
if (async) inner = dispose;
|
|
14
|
-
}
|
|
15
|
-
if (typeof dispose !== "function") __typeError("Object not disposable");
|
|
16
|
-
if (inner) dispose = function() {
|
|
17
|
-
try {
|
|
18
|
-
inner.call(this);
|
|
19
|
-
} catch (e) {
|
|
20
|
-
return Promise.reject(e);
|
|
21
|
-
}
|
|
22
|
-
};
|
|
23
|
-
stack.push([async, dispose, value]);
|
|
24
|
-
} else if (async) {
|
|
25
|
-
stack.push([async]);
|
|
26
|
-
}
|
|
27
|
-
return value;
|
|
28
|
-
};
|
|
29
|
-
var __callDispose = (stack, error, hasError) => {
|
|
30
|
-
var E = typeof SuppressedError === "function" ? SuppressedError : function(e, s, m, _) {
|
|
31
|
-
return _ = Error(m), _.name = "SuppressedError", _.error = e, _.suppressed = s, _;
|
|
32
|
-
};
|
|
33
|
-
var fail = (e) => error = hasError ? new E(e, error, "An error was suppressed during disposal") : (hasError = true, e);
|
|
34
|
-
var next = (it) => {
|
|
35
|
-
while (it = stack.pop()) {
|
|
36
|
-
try {
|
|
37
|
-
var result = it[1] && it[1].call(it[2]);
|
|
38
|
-
if (it[0]) return Promise.resolve(result).then(next, (e) => (fail(e), next()));
|
|
39
|
-
} catch (e) {
|
|
40
|
-
fail(e);
|
|
41
|
-
}
|
|
42
|
-
}
|
|
43
|
-
if (hasError) throw error;
|
|
44
|
-
};
|
|
45
|
-
return next();
|
|
46
|
-
};
|
|
2
|
+
var __defProp = Object.defineProperty;
|
|
3
|
+
var __name = (target, value) => __defProp(target, "name", { value, configurable: true });
|
|
47
4
|
|
|
48
5
|
// src/constants.ts
|
|
49
6
|
var AUTH0_DOMAIN = "pdview.au.auth0.com";
|
|
@@ -77,13 +34,21 @@ import { ZodError } from "zod";
|
|
|
77
34
|
import { z } from "zod";
|
|
78
35
|
|
|
79
36
|
// src/utils/is-iana-timezone.ts
|
|
80
|
-
var timezones = /* @__PURE__ */ new Set([
|
|
37
|
+
var timezones = /* @__PURE__ */ new Set([
|
|
38
|
+
...Intl.supportedValuesOf("timeZone"),
|
|
39
|
+
"UTC"
|
|
40
|
+
]);
|
|
81
41
|
function isIanaTimeZone(value) {
|
|
82
42
|
return typeof value === "string" && timezones.has(value);
|
|
83
43
|
}
|
|
44
|
+
__name(isIanaTimeZone, "isIanaTimeZone");
|
|
84
45
|
|
|
85
46
|
// src/schemas/project-config-schema.ts
|
|
86
|
-
var fileFormatSchema = z.enum([
|
|
47
|
+
var fileFormatSchema = z.enum([
|
|
48
|
+
"csv",
|
|
49
|
+
"json",
|
|
50
|
+
"parquet"
|
|
51
|
+
]);
|
|
87
52
|
var aemoDataFetcherSchema = z.object({
|
|
88
53
|
type: z.literal("AEMO_MMS"),
|
|
89
54
|
checkInterval: z.number().int().min(60),
|
|
@@ -92,30 +57,54 @@ var aemoDataFetcherSchema = z.object({
|
|
|
92
57
|
fetchQuery: z.string()
|
|
93
58
|
})
|
|
94
59
|
});
|
|
95
|
-
var dataFetcherSchema = z.discriminatedUnion("type", [
|
|
60
|
+
var dataFetcherSchema = z.discriminatedUnion("type", [
|
|
61
|
+
aemoDataFetcherSchema
|
|
62
|
+
]);
|
|
96
63
|
var modelInputSchema = z.object({
|
|
97
64
|
key: z.string(),
|
|
98
65
|
inputSource: z.string().optional().default(DEFAULT_INPUT_SOURCE_ID),
|
|
99
|
-
trigger: z.enum([
|
|
66
|
+
trigger: z.enum([
|
|
67
|
+
"WAIT_FOR_LATEST_FILE",
|
|
68
|
+
"USE_MOST_RECENT_FILE"
|
|
69
|
+
]),
|
|
100
70
|
uploadFileFormat: fileFormatSchema.optional().default("json"),
|
|
101
71
|
targetFileFormat: fileFormatSchema.optional().default("json"),
|
|
102
72
|
fetcher: dataFetcherSchema.optional().nullable()
|
|
103
73
|
});
|
|
104
74
|
var modelOutputSchema = z.object({
|
|
105
75
|
name: z.string(),
|
|
106
|
-
type: z.enum([
|
|
76
|
+
type: z.enum([
|
|
77
|
+
"float",
|
|
78
|
+
"integer",
|
|
79
|
+
"string",
|
|
80
|
+
"date",
|
|
81
|
+
"boolean",
|
|
82
|
+
"unknown"
|
|
83
|
+
]),
|
|
107
84
|
seriesKey: z.boolean(),
|
|
108
85
|
colour: z.string().regex(/^#[0-9A-Fa-f]{6}$/).optional()
|
|
109
86
|
});
|
|
87
|
+
var sensitivitySchema = z.object({
|
|
88
|
+
name: z.string(),
|
|
89
|
+
query: z.string()
|
|
90
|
+
});
|
|
110
91
|
var CONFIG_WARNING_KEY = "// WARNING: DO NOT MODIFY THESE SYSTEM MANAGED VALUES";
|
|
111
92
|
var projectConfigSchema = z.object({
|
|
112
93
|
name: z.string(),
|
|
113
|
-
forecastVariable: z.enum([
|
|
114
|
-
|
|
94
|
+
forecastVariable: z.enum([
|
|
95
|
+
"price"
|
|
96
|
+
]),
|
|
97
|
+
timeHorizon: z.enum([
|
|
98
|
+
"actual",
|
|
99
|
+
"day_ahead",
|
|
100
|
+
"week_ahead",
|
|
101
|
+
"quarterly"
|
|
102
|
+
]),
|
|
115
103
|
displayTimezone: z.string().refine(isIanaTimeZone, "invalid IANA time zone").optional().default("Australia/Brisbane"),
|
|
116
104
|
metadata: z.record(z.string(), z.any()).optional(),
|
|
117
105
|
inputs: z.array(modelInputSchema),
|
|
118
106
|
outputs: z.array(modelOutputSchema),
|
|
107
|
+
sensitivities: z.array(sensitivitySchema).optional().default([]),
|
|
119
108
|
[CONFIG_WARNING_KEY]: z.string().optional().default(""),
|
|
120
109
|
$$id: z.string().nullable().optional().default(null),
|
|
121
110
|
$$modelGroupID: z.string().nullable().optional().default(null),
|
|
@@ -133,6 +122,7 @@ async function isExistingPath(path15) {
|
|
|
133
122
|
return false;
|
|
134
123
|
}
|
|
135
124
|
}
|
|
125
|
+
__name(isExistingPath, "isExistingPath");
|
|
136
126
|
|
|
137
127
|
// src/config/load-project-context.ts
|
|
138
128
|
async function loadProjectContext(configPath) {
|
|
@@ -140,9 +130,7 @@ async function loadProjectContext(configPath) {
|
|
|
140
130
|
const resolvedConfigPath = configPath ? path.resolve(configPath) : path.join(projectRoot, PROJECT_CONFIG_FILE);
|
|
141
131
|
const configExists = await isExistingPath(resolvedConfigPath);
|
|
142
132
|
if (!configExists) {
|
|
143
|
-
throw new Error(
|
|
144
|
-
`No config found at ${resolvedConfigPath} (docs: https://github.com/pipelabs/pd4castr-model-examples/blob/main/docs/005-config.md).`
|
|
145
|
-
);
|
|
133
|
+
throw new Error(`No config found at ${resolvedConfigPath} (docs: https://github.com/pipelabs/pd4castr-model-examples/blob/main/docs/005-config.md).`);
|
|
146
134
|
}
|
|
147
135
|
try {
|
|
148
136
|
const configFileContents = await fs2.readFile(resolvedConfigPath, "utf8");
|
|
@@ -156,11 +144,10 @@ async function loadProjectContext(configPath) {
|
|
|
156
144
|
if (error instanceof ZodError) {
|
|
157
145
|
throw error;
|
|
158
146
|
}
|
|
159
|
-
throw new Error(
|
|
160
|
-
"Failed to parse project config (docs: https://github.com/pipelabs/pd4castr-model-examples/blob/main/docs/005-config.md)."
|
|
161
|
-
);
|
|
147
|
+
throw new Error("Failed to parse project config (docs: https://github.com/pipelabs/pd4castr-model-examples/blob/main/docs/005-config.md).");
|
|
162
148
|
}
|
|
163
149
|
}
|
|
150
|
+
__name(loadProjectContext, "loadProjectContext");
|
|
164
151
|
|
|
165
152
|
// src/utils/create-link.ts
|
|
166
153
|
var ESC = "\x1B";
|
|
@@ -171,11 +158,13 @@ function createLink(text, url) {
|
|
|
171
158
|
const end = `${OSC}8${SEP}${SEP}${ESC}\\`;
|
|
172
159
|
return `${start}${text}${end}`;
|
|
173
160
|
}
|
|
161
|
+
__name(createLink, "createLink");
|
|
174
162
|
|
|
175
163
|
// src/utils/format-nest-error-message.ts
|
|
176
164
|
function formatNestErrorMessage(error) {
|
|
177
165
|
return `[${error.error?.toUpperCase() ?? "UNKNOWN"}] ${error.message}`;
|
|
178
166
|
}
|
|
167
|
+
__name(formatNestErrorMessage, "formatNestErrorMessage");
|
|
179
168
|
|
|
180
169
|
// src/utils/get-auth.ts
|
|
181
170
|
import invariant from "tiny-invariant";
|
|
@@ -208,18 +197,21 @@ async function loadGlobalConfig() {
|
|
|
208
197
|
return getDefaultConfig();
|
|
209
198
|
}
|
|
210
199
|
}
|
|
200
|
+
__name(loadGlobalConfig, "loadGlobalConfig");
|
|
211
201
|
function getDefaultConfig() {
|
|
212
202
|
return {
|
|
213
203
|
accessToken: null,
|
|
214
204
|
accessTokenExpiresAt: null
|
|
215
205
|
};
|
|
216
206
|
}
|
|
207
|
+
__name(getDefaultConfig, "getDefaultConfig");
|
|
217
208
|
|
|
218
209
|
// src/utils/is-authed.ts
|
|
219
210
|
function isAuthed(config) {
|
|
220
211
|
const isTokenExpired = config.accessTokenExpiresAt && config.accessTokenExpiresAt <= Date.now();
|
|
221
212
|
return Boolean(config.accessToken) && !isTokenExpired;
|
|
222
213
|
}
|
|
214
|
+
__name(isAuthed, "isAuthed");
|
|
223
215
|
|
|
224
216
|
// src/utils/get-auth.ts
|
|
225
217
|
async function getAuth() {
|
|
@@ -234,6 +226,7 @@ async function getAuth() {
|
|
|
234
226
|
expiresAt: config.accessTokenExpiresAt
|
|
235
227
|
};
|
|
236
228
|
}
|
|
229
|
+
__name(getAuth, "getAuth");
|
|
237
230
|
|
|
238
231
|
// src/utils/log-zod-issues.ts
|
|
239
232
|
function logZodIssues(error) {
|
|
@@ -241,6 +234,7 @@ function logZodIssues(error) {
|
|
|
241
234
|
console.log(` \u2718 ${issue.path.join(".")} - ${issue.message}`);
|
|
242
235
|
}
|
|
243
236
|
}
|
|
237
|
+
__name(logZodIssues, "logZodIssues");
|
|
244
238
|
|
|
245
239
|
// src/commands/fetch/utils/fetch-aemo-data.ts
|
|
246
240
|
import fs4 from "fs/promises";
|
|
@@ -255,9 +249,7 @@ var envSchema = z3.object({
|
|
|
255
249
|
// wsl sets this environment variable on all distros that i've checked
|
|
256
250
|
isWSL: z3.boolean().default(() => Boolean(process.env.WSL_DISTRO_NAME)),
|
|
257
251
|
apiURL: z3.string().default(() => process.env.PD4CASTR_API_URL ?? DEFAULT_API_URL),
|
|
258
|
-
wslNetworkInterface: z3.string().default(
|
|
259
|
-
() => process.env.PD4CASTR_WSL_NETWORK_INTERFACE ?? WSL_NETWORK_INTERFACE_DEFAULT
|
|
260
|
-
),
|
|
252
|
+
wslNetworkInterface: z3.string().default(() => process.env.PD4CASTR_WSL_NETWORK_INTERFACE ?? WSL_NETWORK_INTERFACE_DEFAULT),
|
|
261
253
|
auth0ClientId: z3.string().default(() => process.env.PD4CASTR_AUTH0_CLIENT_ID ?? AUTH0_CLIENT_ID),
|
|
262
254
|
auth0Audience: z3.string().default(() => process.env.PD4CASTR_AUTH0_AUDIENCE ?? AUTH0_AUDIENCE)
|
|
263
255
|
});
|
|
@@ -266,6 +258,7 @@ var envSchema = z3.object({
|
|
|
266
258
|
function getEnv() {
|
|
267
259
|
return envSchema.parse(process.env);
|
|
268
260
|
}
|
|
261
|
+
__name(getEnv, "getEnv");
|
|
269
262
|
|
|
270
263
|
// src/api/api.ts
|
|
271
264
|
var api = ky.create({
|
|
@@ -274,22 +267,29 @@ var api = ky.create({
|
|
|
274
267
|
|
|
275
268
|
// src/api/query-data-fetcher.ts
|
|
276
269
|
async function queryDataFetcher(querySQL, authCtx) {
|
|
277
|
-
const headers = {
|
|
278
|
-
|
|
279
|
-
|
|
270
|
+
const headers = {
|
|
271
|
+
Authorization: `Bearer ${authCtx.accessToken}`
|
|
272
|
+
};
|
|
273
|
+
const payload = {
|
|
274
|
+
query: querySQL,
|
|
275
|
+
type: "AEMO_MMS"
|
|
276
|
+
};
|
|
277
|
+
const result = await api.post("data-fetcher/query", {
|
|
278
|
+
json: payload,
|
|
279
|
+
headers
|
|
280
|
+
}).json();
|
|
280
281
|
return result;
|
|
281
282
|
}
|
|
283
|
+
__name(queryDataFetcher, "queryDataFetcher");
|
|
282
284
|
|
|
283
285
|
// src/commands/fetch/utils/fetch-aemo-data.ts
|
|
284
286
|
async function fetchAEMOData(dataFetcher, authCtx, ctx) {
|
|
285
|
-
const queryPath = path3.resolve(
|
|
286
|
-
ctx.projectRoot,
|
|
287
|
-
dataFetcher.config.fetchQuery
|
|
288
|
-
);
|
|
287
|
+
const queryPath = path3.resolve(ctx.projectRoot, dataFetcher.config.fetchQuery);
|
|
289
288
|
const querySQL = await fs4.readFile(queryPath, "utf8");
|
|
290
289
|
const result = await queryDataFetcher(querySQL, authCtx);
|
|
291
290
|
return result;
|
|
292
291
|
}
|
|
292
|
+
__name(fetchAEMOData, "fetchAEMOData");
|
|
293
293
|
|
|
294
294
|
// src/commands/fetch/utils/get-fetcher.ts
|
|
295
295
|
var DATA_FETCHER_FNS = {
|
|
@@ -302,6 +302,7 @@ function getFetcher(type) {
|
|
|
302
302
|
}
|
|
303
303
|
return fetcher;
|
|
304
304
|
}
|
|
305
|
+
__name(getFetcher, "getFetcher");
|
|
305
306
|
|
|
306
307
|
// src/commands/fetch/utils/write-test-data.ts
|
|
307
308
|
import fs5 from "fs/promises";
|
|
@@ -311,18 +312,24 @@ import path4 from "path";
|
|
|
311
312
|
function getInputFilename(modelInput) {
|
|
312
313
|
return `${modelInput.key}.${modelInput.targetFileFormat}`;
|
|
313
314
|
}
|
|
315
|
+
__name(getInputFilename, "getInputFilename");
|
|
314
316
|
|
|
315
317
|
// src/commands/fetch/utils/write-test-data.ts
|
|
316
318
|
async function writeTestData(inputData, modelInput, inputDataDir, ctx) {
|
|
317
319
|
const inputDir = path4.resolve(ctx.projectRoot, inputDataDir);
|
|
318
|
-
await fs5.mkdir(inputDir, {
|
|
320
|
+
await fs5.mkdir(inputDir, {
|
|
321
|
+
recursive: true
|
|
322
|
+
});
|
|
319
323
|
const inputFilename = getInputFilename(modelInput);
|
|
320
324
|
const inputPath = path4.resolve(inputDir, inputFilename);
|
|
321
325
|
await fs5.writeFile(inputPath, JSON.stringify(inputData, void 0, 2));
|
|
322
326
|
}
|
|
327
|
+
__name(writeTestData, "writeTestData");
|
|
323
328
|
|
|
324
329
|
// src/commands/fetch/handle-action.ts
|
|
325
|
-
var FETCHABLE_DATA_FETCHER_TYPES = /* @__PURE__ */ new Set([
|
|
330
|
+
var FETCHABLE_DATA_FETCHER_TYPES = /* @__PURE__ */ new Set([
|
|
331
|
+
"AEMO_MMS"
|
|
332
|
+
]);
|
|
326
333
|
async function handleAction(options) {
|
|
327
334
|
const spinner = ora("Starting data fetch...").start();
|
|
328
335
|
try {
|
|
@@ -339,9 +346,7 @@ async function handleAction(options) {
|
|
|
339
346
|
continue;
|
|
340
347
|
}
|
|
341
348
|
if (!FETCHABLE_DATA_FETCHER_TYPES.has(input2.fetcher.type)) {
|
|
342
|
-
spinner.warn(
|
|
343
|
-
`\`${input2.key}\` (${input2.fetcher.type}) - unsupported, skipping`
|
|
344
|
-
);
|
|
349
|
+
spinner.warn(`\`${input2.key}\` (${input2.fetcher.type}) - unsupported, skipping`);
|
|
345
350
|
continue;
|
|
346
351
|
}
|
|
347
352
|
spinner.start(`\`${input2.key}\` (${input2.fetcher.type}) - fetching...`);
|
|
@@ -373,15 +378,13 @@ ${link} to view fetched data
|
|
|
373
378
|
process.exit(1);
|
|
374
379
|
}
|
|
375
380
|
}
|
|
381
|
+
__name(handleAction, "handleAction");
|
|
376
382
|
|
|
377
383
|
// src/commands/fetch/index.ts
|
|
378
384
|
function registerFetchCommand(program2) {
|
|
379
|
-
program2.command("fetch").description("Fetches test data from configured data fetchers.").option(
|
|
380
|
-
"-i, --input-dir <path>",
|
|
381
|
-
"The input test data directory",
|
|
382
|
-
TEST_INPUT_DATA_DIR
|
|
383
|
-
).option("-c, --config <path>", "Path to config file", PROJECT_CONFIG_FILE).action(handleAction);
|
|
385
|
+
program2.command("fetch").description("Fetches test data from configured data fetchers.").option("-i, --input-dir <path>", "The input test data directory", TEST_INPUT_DATA_DIR).option("-c, --config <path>", "Path to config file", PROJECT_CONFIG_FILE).action(handleAction);
|
|
384
386
|
}
|
|
387
|
+
__name(registerFetchCommand, "registerFetchCommand");
|
|
385
388
|
|
|
386
389
|
// src/commands/init/handle-action.ts
|
|
387
390
|
import path6 from "path";
|
|
@@ -408,6 +411,7 @@ var templates = {
|
|
|
408
411
|
function getTemplatePath(template) {
|
|
409
412
|
return `https://github.com/${template.repo}/${template.path}`;
|
|
410
413
|
}
|
|
414
|
+
__name(getTemplatePath, "getTemplatePath");
|
|
411
415
|
|
|
412
416
|
// src/commands/init/utils/validate-name.ts
|
|
413
417
|
async function validateName(value) {
|
|
@@ -417,6 +421,7 @@ async function validateName(value) {
|
|
|
417
421
|
}
|
|
418
422
|
return true;
|
|
419
423
|
}
|
|
424
|
+
__name(validateName, "validateName");
|
|
420
425
|
|
|
421
426
|
// src/commands/init/handle-action.ts
|
|
422
427
|
async function handleAction2() {
|
|
@@ -446,6 +451,7 @@ async function handleAction2() {
|
|
|
446
451
|
process.exit(1);
|
|
447
452
|
}
|
|
448
453
|
}
|
|
454
|
+
__name(handleAction2, "handleAction");
|
|
449
455
|
async function fetchTemplate(template, projectName) {
|
|
450
456
|
const templatePath = getTemplatePath(templates[template]);
|
|
451
457
|
const fetcher = tiged(templatePath, {
|
|
@@ -455,11 +461,13 @@ async function fetchTemplate(template, projectName) {
|
|
|
455
461
|
const destination = path6.join(process.cwd(), projectName);
|
|
456
462
|
await fetcher.clone(destination);
|
|
457
463
|
}
|
|
464
|
+
__name(fetchTemplate, "fetchTemplate");
|
|
458
465
|
|
|
459
466
|
// src/commands/init/index.ts
|
|
460
467
|
function registerInitCommand(program2) {
|
|
461
468
|
program2.command("init").description("Initialize a new model using a template.").action(handleAction2);
|
|
462
469
|
}
|
|
470
|
+
__name(registerInitCommand, "registerInitCommand");
|
|
463
471
|
|
|
464
472
|
// src/commands/login/handle-action.ts
|
|
465
473
|
import { ExecaError as ExecaError3 } from "execa";
|
|
@@ -477,16 +485,22 @@ async function updateGlobalConfig(updateFn) {
|
|
|
477
485
|
const configPath = path7.join(os2.homedir(), GLOBAL_CONFIG_FILE);
|
|
478
486
|
await fs6.writeFile(configPath, JSON.stringify(updatedConfig, void 0, 2));
|
|
479
487
|
}
|
|
488
|
+
__name(updateGlobalConfig, "updateGlobalConfig");
|
|
480
489
|
|
|
481
490
|
// src/commands/login/utils/complete-auth-flow.ts
|
|
482
491
|
import { HTTPError as HTTPError2 } from "ky";
|
|
483
492
|
|
|
484
493
|
// src/commands/login/auth0-api.ts
|
|
485
494
|
import ky2 from "ky";
|
|
486
|
-
var auth0API = ky2.create({
|
|
495
|
+
var auth0API = ky2.create({
|
|
496
|
+
prefixUrl: `https://${AUTH0_DOMAIN}`
|
|
497
|
+
});
|
|
487
498
|
|
|
488
499
|
// src/commands/login/utils/complete-auth-flow.ts
|
|
489
|
-
var FAILED_AUTH_ERRORS = /* @__PURE__ */ new Set([
|
|
500
|
+
var FAILED_AUTH_ERRORS = /* @__PURE__ */ new Set([
|
|
501
|
+
"expired_token",
|
|
502
|
+
"access_denied"
|
|
503
|
+
]);
|
|
490
504
|
async function completeAuthFlow(authCtx) {
|
|
491
505
|
const env = getEnv();
|
|
492
506
|
const payload = {
|
|
@@ -496,7 +510,9 @@ async function completeAuthFlow(authCtx) {
|
|
|
496
510
|
};
|
|
497
511
|
async function fetchAuthResponse() {
|
|
498
512
|
try {
|
|
499
|
-
const response = await auth0API.post("oauth/token", {
|
|
513
|
+
const response = await auth0API.post("oauth/token", {
|
|
514
|
+
json: payload
|
|
515
|
+
}).json();
|
|
500
516
|
const authPayload = {
|
|
501
517
|
accessToken: response.access_token,
|
|
502
518
|
expiresAt: Date.now() + response.expires_in * 1e3
|
|
@@ -509,18 +525,16 @@ async function completeAuthFlow(authCtx) {
|
|
|
509
525
|
const errorResponse = await error.response.json();
|
|
510
526
|
const isFailedAuthError = FAILED_AUTH_ERRORS.has(errorResponse.error);
|
|
511
527
|
if (isFailedAuthError) {
|
|
512
|
-
throw new Error(
|
|
513
|
-
`Login failed, please try again (${errorResponse.error_description}).`
|
|
514
|
-
);
|
|
528
|
+
throw new Error(`Login failed, please try again (${errorResponse.error_description}).`);
|
|
515
529
|
}
|
|
516
530
|
const delay = authCtx.checkInterval * 1e3;
|
|
517
|
-
return new Promise(
|
|
518
|
-
(resolve) => setTimeout(() => resolve(fetchAuthResponse()), delay)
|
|
519
|
-
);
|
|
531
|
+
return new Promise((resolve) => setTimeout(() => resolve(fetchAuthResponse()), delay));
|
|
520
532
|
}
|
|
521
533
|
}
|
|
534
|
+
__name(fetchAuthResponse, "fetchAuthResponse");
|
|
522
535
|
return fetchAuthResponse();
|
|
523
536
|
}
|
|
537
|
+
__name(completeAuthFlow, "completeAuthFlow");
|
|
524
538
|
|
|
525
539
|
// src/commands/login/utils/start-auth-flow.ts
|
|
526
540
|
async function startAuthFlow() {
|
|
@@ -530,7 +544,9 @@ async function startAuthFlow() {
|
|
|
530
544
|
audience: env.auth0Audience,
|
|
531
545
|
scope: "openid email"
|
|
532
546
|
};
|
|
533
|
-
const codeResponse = await auth0API.post("oauth/device/code", {
|
|
547
|
+
const codeResponse = await auth0API.post("oauth/device/code", {
|
|
548
|
+
json: payload
|
|
549
|
+
}).json();
|
|
534
550
|
const authContext = {
|
|
535
551
|
deviceCode: codeResponse.device_code,
|
|
536
552
|
verificationURL: codeResponse.verification_uri_complete,
|
|
@@ -539,6 +555,7 @@ async function startAuthFlow() {
|
|
|
539
555
|
};
|
|
540
556
|
return authContext;
|
|
541
557
|
}
|
|
558
|
+
__name(startAuthFlow, "startAuthFlow");
|
|
542
559
|
|
|
543
560
|
// src/commands/login/handle-action.ts
|
|
544
561
|
async function handleAction3() {
|
|
@@ -550,10 +567,8 @@ async function handleAction3() {
|
|
|
550
567
|
return;
|
|
551
568
|
}
|
|
552
569
|
const authCtx = await startAuthFlow();
|
|
553
|
-
spinner.info(
|
|
554
|
-
|
|
555
|
-
${authCtx.verificationURL}`
|
|
556
|
-
);
|
|
570
|
+
spinner.info(`Please open the login link in your browser:
|
|
571
|
+
${authCtx.verificationURL}`);
|
|
557
572
|
spinner.info(`Your login code is:
|
|
558
573
|
${authCtx.userCode}
|
|
559
574
|
`);
|
|
@@ -579,11 +594,13 @@ async function handleAction3() {
|
|
|
579
594
|
process.exit(1);
|
|
580
595
|
}
|
|
581
596
|
}
|
|
597
|
+
__name(handleAction3, "handleAction");
|
|
582
598
|
|
|
583
599
|
// src/commands/login/index.ts
|
|
584
600
|
function registerLoginCommand(program2) {
|
|
585
601
|
program2.command("login").description("Logs in to the pd4castr API.").action(handleAction3);
|
|
586
602
|
}
|
|
603
|
+
__name(registerLoginCommand, "registerLoginCommand");
|
|
587
604
|
|
|
588
605
|
// src/commands/logout/handle-action.ts
|
|
589
606
|
import { ExecaError as ExecaError4 } from "execa";
|
|
@@ -617,11 +634,13 @@ async function handleAction4() {
|
|
|
617
634
|
process.exit(1);
|
|
618
635
|
}
|
|
619
636
|
}
|
|
637
|
+
__name(handleAction4, "handleAction");
|
|
620
638
|
|
|
621
639
|
// src/commands/logout/index.ts
|
|
622
640
|
function registerLogoutCommand(program2) {
|
|
623
641
|
program2.command("logout").description("Logs out of the pd4castr API.").action(handleAction4);
|
|
624
642
|
}
|
|
643
|
+
__name(registerLogoutCommand, "registerLogoutCommand");
|
|
625
644
|
|
|
626
645
|
// src/commands/publish/handle-action.ts
|
|
627
646
|
import { ExecaError as ExecaError5 } from "execa";
|
|
@@ -641,6 +660,7 @@ async function startWebServer(app, port) {
|
|
|
641
660
|
});
|
|
642
661
|
});
|
|
643
662
|
}
|
|
663
|
+
__name(startWebServer, "startWebServer");
|
|
644
664
|
|
|
645
665
|
// src/commands/publish/handle-create-model-flow.ts
|
|
646
666
|
import * as inquirer2 from "@inquirer/prompts";
|
|
@@ -648,25 +668,42 @@ import chalk3 from "chalk";
|
|
|
648
668
|
|
|
649
669
|
// src/api/create-model.ts
|
|
650
670
|
async function createModel(config, authCtx) {
|
|
651
|
-
const headers = {
|
|
652
|
-
|
|
671
|
+
const headers = {
|
|
672
|
+
Authorization: `Bearer ${authCtx.accessToken}`
|
|
673
|
+
};
|
|
674
|
+
const result = await api.post("model", {
|
|
675
|
+
headers,
|
|
676
|
+
json: config
|
|
677
|
+
}).json();
|
|
653
678
|
return result;
|
|
654
679
|
}
|
|
680
|
+
__name(createModel, "createModel");
|
|
655
681
|
|
|
656
682
|
// src/api/get-registry-push-credentials.ts
|
|
657
683
|
async function getRegistryPushCredentials(modelID, authCtx) {
|
|
658
|
-
const headers = {
|
|
684
|
+
const headers = {
|
|
685
|
+
Authorization: `Bearer ${authCtx.accessToken}`
|
|
686
|
+
};
|
|
659
687
|
const searchParams = new URLSearchParams(`modelId=${modelID}`);
|
|
660
|
-
const result = await api.get("registry/push-credentials", {
|
|
688
|
+
const result = await api.get("registry/push-credentials", {
|
|
689
|
+
headers,
|
|
690
|
+
searchParams
|
|
691
|
+
}).json();
|
|
661
692
|
return result;
|
|
662
693
|
}
|
|
694
|
+
__name(getRegistryPushCredentials, "getRegistryPushCredentials");
|
|
663
695
|
|
|
664
696
|
// src/api/trigger-model-run.ts
|
|
665
697
|
async function triggerModelRun(modelId, authCtx) {
|
|
666
|
-
const headers = {
|
|
667
|
-
|
|
698
|
+
const headers = {
|
|
699
|
+
Authorization: `Bearer ${authCtx.accessToken}`
|
|
700
|
+
};
|
|
701
|
+
const result = await api.post(`model/${modelId}/trigger`, {
|
|
702
|
+
headers
|
|
703
|
+
}).json();
|
|
668
704
|
return result;
|
|
669
705
|
}
|
|
706
|
+
__name(triggerModelRun, "triggerModelRun");
|
|
670
707
|
|
|
671
708
|
// src/config/update-project-config.ts
|
|
672
709
|
import fs7 from "fs/promises";
|
|
@@ -676,73 +713,91 @@ async function updateProjectConfig(updateFn, configPath) {
|
|
|
676
713
|
const projectConfig = await loadProjectContext(configPath);
|
|
677
714
|
const updatedConfig = produce2(projectConfig.config, updateFn);
|
|
678
715
|
const resolvedConfigPath = configPath ? path8.resolve(configPath) : path8.join(projectConfig.projectRoot, PROJECT_CONFIG_FILE);
|
|
679
|
-
await fs7.writeFile(
|
|
680
|
-
resolvedConfigPath,
|
|
681
|
-
JSON.stringify(updatedConfig, void 0, 2)
|
|
682
|
-
);
|
|
716
|
+
await fs7.writeFile(resolvedConfigPath, JSON.stringify(updatedConfig, void 0, 2));
|
|
683
717
|
}
|
|
718
|
+
__name(updateProjectConfig, "updateProjectConfig");
|
|
684
719
|
|
|
685
720
|
// src/docker/build-docker-image.ts
|
|
686
721
|
import { execa } from "execa";
|
|
687
722
|
async function buildDockerImage(dockerImage, ctx) {
|
|
688
723
|
try {
|
|
689
|
-
await execa(
|
|
690
|
-
"
|
|
691
|
-
|
|
692
|
-
|
|
693
|
-
|
|
694
|
-
|
|
695
|
-
|
|
696
|
-
|
|
724
|
+
await execa("docker", [
|
|
725
|
+
"build",
|
|
726
|
+
"--platform=linux/amd64",
|
|
727
|
+
"-t",
|
|
728
|
+
dockerImage,
|
|
729
|
+
"."
|
|
730
|
+
], {
|
|
731
|
+
cwd: ctx.projectRoot,
|
|
732
|
+
stdio: "pipe"
|
|
733
|
+
});
|
|
697
734
|
} catch (error) {
|
|
698
|
-
throw new Error("Failed to build docker image", {
|
|
735
|
+
throw new Error("Failed to build docker image", {
|
|
736
|
+
cause: error
|
|
737
|
+
});
|
|
699
738
|
}
|
|
700
739
|
}
|
|
740
|
+
__name(buildDockerImage, "buildDockerImage");
|
|
701
741
|
|
|
702
742
|
// src/docker/login-to-docker-registry.ts
|
|
703
743
|
import { execa as execa2 } from "execa";
|
|
704
744
|
async function loginToDockerRegistry(authConfig) {
|
|
705
745
|
try {
|
|
706
|
-
await execa2(
|
|
707
|
-
"
|
|
708
|
-
|
|
709
|
-
|
|
710
|
-
|
|
711
|
-
|
|
712
|
-
|
|
713
|
-
|
|
714
|
-
|
|
715
|
-
{ input: authConfig.password }
|
|
716
|
-
);
|
|
746
|
+
await execa2("docker", [
|
|
747
|
+
"login",
|
|
748
|
+
authConfig.registry,
|
|
749
|
+
"--username",
|
|
750
|
+
authConfig.username,
|
|
751
|
+
"--password-stdin"
|
|
752
|
+
], {
|
|
753
|
+
input: authConfig.password
|
|
754
|
+
});
|
|
717
755
|
} catch (error) {
|
|
718
|
-
throw new Error("Failed to login to docker registry", {
|
|
756
|
+
throw new Error("Failed to login to docker registry", {
|
|
757
|
+
cause: error
|
|
758
|
+
});
|
|
719
759
|
}
|
|
720
760
|
}
|
|
761
|
+
__name(loginToDockerRegistry, "loginToDockerRegistry");
|
|
721
762
|
|
|
722
763
|
// src/docker/push-docker-image.ts
|
|
723
764
|
import { execa as execa3 } from "execa";
|
|
724
765
|
async function pushDockerImage(dockerImage, pushRef) {
|
|
725
766
|
try {
|
|
726
|
-
await execa3("docker", [
|
|
727
|
-
|
|
767
|
+
await execa3("docker", [
|
|
768
|
+
"tag",
|
|
769
|
+
dockerImage,
|
|
770
|
+
pushRef
|
|
771
|
+
]);
|
|
772
|
+
await execa3("docker", [
|
|
773
|
+
"push",
|
|
774
|
+
pushRef
|
|
775
|
+
]);
|
|
728
776
|
} catch (error) {
|
|
729
|
-
throw new Error("Failed to push docker image", {
|
|
777
|
+
throw new Error("Failed to push docker image", {
|
|
778
|
+
cause: error
|
|
779
|
+
});
|
|
730
780
|
}
|
|
731
781
|
}
|
|
782
|
+
__name(pushDockerImage, "pushDockerImage");
|
|
732
783
|
|
|
733
784
|
// src/utils/get-docker-image.ts
|
|
734
785
|
import slugify from "slugify";
|
|
735
786
|
function getDockerImage(ctx) {
|
|
736
|
-
const sluggedName = slugify(ctx.config.name, {
|
|
787
|
+
const sluggedName = slugify(ctx.config.name, {
|
|
788
|
+
lower: true
|
|
789
|
+
});
|
|
737
790
|
const dockerImage = `pd4castr/${sluggedName}-local:${Date.now()}`;
|
|
738
791
|
return dockerImage;
|
|
739
792
|
}
|
|
793
|
+
__name(getDockerImage, "getDockerImage");
|
|
740
794
|
|
|
741
795
|
// src/utils/get-model-config-from-project-config.ts
|
|
742
796
|
import fs8 from "fs/promises";
|
|
743
797
|
import path9 from "path";
|
|
744
798
|
async function getModelConfigFromProjectConfig(ctx) {
|
|
745
799
|
const inputs = await getInputsWithInlinedSQL(ctx);
|
|
800
|
+
const sensitivities = await getSensitivitiesWithInlinedSQL(ctx);
|
|
746
801
|
const { $$id, $$modelGroupID, $$revision, $$dockerImage, ...config } = ctx.config;
|
|
747
802
|
return {
|
|
748
803
|
...config,
|
|
@@ -750,10 +805,14 @@ async function getModelConfigFromProjectConfig(ctx) {
|
|
|
750
805
|
modelGroupId: $$modelGroupID,
|
|
751
806
|
revision: $$revision ?? 0,
|
|
752
807
|
dockerImage: $$dockerImage,
|
|
753
|
-
inputs
|
|
808
|
+
inputs,
|
|
809
|
+
sensitivities
|
|
754
810
|
};
|
|
755
811
|
}
|
|
756
|
-
|
|
812
|
+
__name(getModelConfigFromProjectConfig, "getModelConfigFromProjectConfig");
|
|
813
|
+
var FETCHERS_WITH_SQL = /* @__PURE__ */ new Set([
|
|
814
|
+
"AEMO_MMS"
|
|
815
|
+
]);
|
|
757
816
|
async function getInputsWithInlinedSQL(ctx) {
|
|
758
817
|
const inputsWithSQL = [];
|
|
759
818
|
for (const input2 of ctx.config.inputs) {
|
|
@@ -761,14 +820,8 @@ async function getInputsWithInlinedSQL(ctx) {
|
|
|
761
820
|
inputsWithSQL.push(input2);
|
|
762
821
|
continue;
|
|
763
822
|
}
|
|
764
|
-
const fetchQueryPath = path9.resolve(
|
|
765
|
-
|
|
766
|
-
input2.fetcher.config.fetchQuery
|
|
767
|
-
);
|
|
768
|
-
const checkQueryPath = path9.resolve(
|
|
769
|
-
ctx.projectRoot,
|
|
770
|
-
input2.fetcher.config.checkQuery
|
|
771
|
-
);
|
|
823
|
+
const fetchQueryPath = path9.resolve(ctx.projectRoot, input2.fetcher.config.fetchQuery);
|
|
824
|
+
const checkQueryPath = path9.resolve(ctx.projectRoot, input2.fetcher.config.checkQuery);
|
|
772
825
|
const [fetchQuerySQL, checkQuerySQL] = await Promise.all([
|
|
773
826
|
fs8.readFile(fetchQueryPath, "utf8"),
|
|
774
827
|
fs8.readFile(checkQueryPath, "utf8")
|
|
@@ -788,11 +841,31 @@ async function getInputsWithInlinedSQL(ctx) {
|
|
|
788
841
|
}
|
|
789
842
|
return inputsWithSQL;
|
|
790
843
|
}
|
|
844
|
+
__name(getInputsWithInlinedSQL, "getInputsWithInlinedSQL");
|
|
845
|
+
async function getSensitivitiesWithInlinedSQL(ctx) {
|
|
846
|
+
const sensitivitiesWithSQL = [];
|
|
847
|
+
const sensitivities = ctx.config.sensitivities ?? [];
|
|
848
|
+
for (const sensitivity of sensitivities) {
|
|
849
|
+
const queryPath = path9.resolve(ctx.projectRoot, sensitivity.query);
|
|
850
|
+
try {
|
|
851
|
+
const sql = await fs8.readFile(queryPath, "utf8");
|
|
852
|
+
sensitivitiesWithSQL.push({
|
|
853
|
+
...sensitivity,
|
|
854
|
+
query: sql
|
|
855
|
+
});
|
|
856
|
+
} catch {
|
|
857
|
+
throw new Error(`Sensitivity query file not found (${sensitivity.query})`);
|
|
858
|
+
}
|
|
859
|
+
}
|
|
860
|
+
return sensitivitiesWithSQL;
|
|
861
|
+
}
|
|
862
|
+
__name(getSensitivitiesWithInlinedSQL, "getSensitivitiesWithInlinedSQL");
|
|
791
863
|
|
|
792
864
|
// src/utils/log-empty-line.ts
|
|
793
865
|
function logEmptyLine() {
|
|
794
866
|
console.log("");
|
|
795
867
|
}
|
|
868
|
+
__name(logEmptyLine, "logEmptyLine");
|
|
796
869
|
|
|
797
870
|
// src/commands/publish/constants.ts
|
|
798
871
|
import chalk from "chalk";
|
|
@@ -811,20 +884,24 @@ function getModelSummaryLines(ctx) {
|
|
|
811
884
|
` ${chalk2.bold("Forecast variable:")} ${ctx.config.forecastVariable}`,
|
|
812
885
|
` ${chalk2.bold("Time horizon:")} ${ctx.config.timeHorizon}`,
|
|
813
886
|
` ${chalk2.bold("Inputs:")}`,
|
|
814
|
-
...ctx.config.inputs.map(
|
|
815
|
-
(input2) => ` \u2022 ${input2.key} - ${getInputType(input2)}`
|
|
816
|
-
),
|
|
887
|
+
...ctx.config.inputs.map((input2) => ` \u2022 ${input2.key} - ${getInputType(input2)}`),
|
|
817
888
|
` ${chalk2.bold("Outputs:")}`,
|
|
818
889
|
...ctx.config.outputs.map((output) => ` \u2022 ${output.name} - ${output.type}`),
|
|
890
|
+
...ctx.config.sensitivities.length > 0 ? [
|
|
891
|
+
` ${chalk2.bold("Sensitivities:")}`,
|
|
892
|
+
...ctx.config.sensitivities.map((s) => ` \u2022 ${s.name}`)
|
|
893
|
+
] : [],
|
|
819
894
|
""
|
|
820
895
|
];
|
|
821
896
|
}
|
|
897
|
+
__name(getModelSummaryLines, "getModelSummaryLines");
|
|
822
898
|
function getInputType(input2) {
|
|
823
899
|
if (input2.fetcher) {
|
|
824
900
|
return input2.fetcher.type;
|
|
825
901
|
}
|
|
826
902
|
return "static";
|
|
827
903
|
}
|
|
904
|
+
__name(getInputType, "getInputType");
|
|
828
905
|
|
|
829
906
|
// src/docker/run-model-container.ts
|
|
830
907
|
import os3 from "os";
|
|
@@ -837,17 +914,19 @@ function getInputEnv(modelInput, webserverURL) {
|
|
|
837
914
|
const inputFileURL = `${webserverURL}/input/${filename}`;
|
|
838
915
|
return `INPUT_${variableName}_URL=${inputFileURL}`;
|
|
839
916
|
}
|
|
917
|
+
__name(getInputEnv, "getInputEnv");
|
|
840
918
|
|
|
841
919
|
// src/docker/run-model-container.ts
|
|
842
920
|
async function runModelContainer(dockerImage, webserverPort, ctx) {
|
|
843
921
|
const env = getEnv();
|
|
844
922
|
const webserverHostname = env.isWSL ? DOCKER_HOSTNAME_WSL : DOCKER_HOSTNAME_DEFAULT;
|
|
845
923
|
const webserverURL = `http://${webserverHostname}:${webserverPort}`;
|
|
846
|
-
const inputEnvs = ctx.config.inputs.map(
|
|
847
|
-
(input2) => getInputEnv(input2, webserverURL)
|
|
848
|
-
);
|
|
924
|
+
const inputEnvs = ctx.config.inputs.map((input2) => getInputEnv(input2, webserverURL));
|
|
849
925
|
const outputEnv = `OUTPUT_URL=${webserverURL}/output`;
|
|
850
|
-
const envs = [
|
|
926
|
+
const envs = [
|
|
927
|
+
...inputEnvs,
|
|
928
|
+
outputEnv
|
|
929
|
+
];
|
|
851
930
|
try {
|
|
852
931
|
const extraRunArgs = [];
|
|
853
932
|
if (env.isWSL) {
|
|
@@ -858,7 +937,10 @@ async function runModelContainer(dockerImage, webserverPort, ctx) {
|
|
|
858
937
|
"run",
|
|
859
938
|
"--rm",
|
|
860
939
|
...extraRunArgs,
|
|
861
|
-
...envs.flatMap((env2) => [
|
|
940
|
+
...envs.flatMap((env2) => [
|
|
941
|
+
"--env",
|
|
942
|
+
env2
|
|
943
|
+
]),
|
|
862
944
|
dockerImage
|
|
863
945
|
];
|
|
864
946
|
await execa4("docker", args, {
|
|
@@ -866,20 +948,22 @@ async function runModelContainer(dockerImage, webserverPort, ctx) {
|
|
|
866
948
|
stdio: "pipe"
|
|
867
949
|
});
|
|
868
950
|
} catch (error) {
|
|
869
|
-
throw new Error("Failed to run model container", {
|
|
951
|
+
throw new Error("Failed to run model container", {
|
|
952
|
+
cause: error
|
|
953
|
+
});
|
|
870
954
|
}
|
|
871
955
|
}
|
|
956
|
+
__name(runModelContainer, "runModelContainer");
|
|
872
957
|
function getWSLMachineIP() {
|
|
873
958
|
const env = getEnv();
|
|
874
959
|
const interfaces = os3.networkInterfaces();
|
|
875
960
|
const interfaceInfo = interfaces[env.wslNetworkInterface]?.[0];
|
|
876
961
|
if (!interfaceInfo) {
|
|
877
|
-
throw new Error(
|
|
878
|
-
`WSL machine IP not found for interface \`${env.wslNetworkInterface}\``
|
|
879
|
-
);
|
|
962
|
+
throw new Error(`WSL machine IP not found for interface \`${env.wslNetworkInterface}\``);
|
|
880
963
|
}
|
|
881
964
|
return interfaceInfo.address;
|
|
882
965
|
}
|
|
966
|
+
__name(getWSLMachineIP, "getWSLMachineIP");
|
|
883
967
|
|
|
884
968
|
// src/model-io-checks/setup-model-io-checks.ts
|
|
885
969
|
import path12 from "path";
|
|
@@ -887,6 +971,9 @@ import express from "express";
|
|
|
887
971
|
|
|
888
972
|
// src/model-io-checks/model-io-checks.ts
|
|
889
973
|
var ModelIOChecks = class {
|
|
974
|
+
static {
|
|
975
|
+
__name(this, "ModelIOChecks");
|
|
976
|
+
}
|
|
890
977
|
inputsToDownload;
|
|
891
978
|
outputUploaded;
|
|
892
979
|
constructor(data) {
|
|
@@ -923,17 +1010,16 @@ import path10 from "path";
|
|
|
923
1010
|
function createInputHandler(inputFilesPath, modelIOChecks, ctx) {
|
|
924
1011
|
return (req, res) => {
|
|
925
1012
|
if (!modelIOChecks.isValidInput(req.params.filename)) {
|
|
926
|
-
return res.status(404).json({
|
|
1013
|
+
return res.status(404).json({
|
|
1014
|
+
error: "File not found"
|
|
1015
|
+
});
|
|
927
1016
|
}
|
|
928
1017
|
modelIOChecks.trackInputHandled(req.params.filename);
|
|
929
|
-
const filePath = path10.join(
|
|
930
|
-
ctx.projectRoot,
|
|
931
|
-
inputFilesPath,
|
|
932
|
-
req.params.filename
|
|
933
|
-
);
|
|
1018
|
+
const filePath = path10.join(ctx.projectRoot, inputFilesPath, req.params.filename);
|
|
934
1019
|
return res.sendFile(filePath);
|
|
935
1020
|
};
|
|
936
1021
|
}
|
|
1022
|
+
__name(createInputHandler, "createInputHandler");
|
|
937
1023
|
|
|
938
1024
|
// src/model-io-checks/utils/create-output-handler.ts
|
|
939
1025
|
import fs9 from "fs/promises";
|
|
@@ -942,17 +1028,24 @@ function createOutputHandler(modelIOChecks, ctx) {
|
|
|
942
1028
|
return async (req, res) => {
|
|
943
1029
|
modelIOChecks.trackOutputHandled();
|
|
944
1030
|
const outputPath = path11.join(ctx.projectRoot, TEST_OUTPUT_DATA_DIR);
|
|
945
|
-
await fs9.mkdir(outputPath, {
|
|
1031
|
+
await fs9.mkdir(outputPath, {
|
|
1032
|
+
recursive: true
|
|
1033
|
+
});
|
|
946
1034
|
const outputFilePath = path11.join(outputPath, TEST_OUTPUT_FILENAME);
|
|
947
1035
|
const outputData = JSON.stringify(req.body, null, 2);
|
|
948
1036
|
await fs9.writeFile(outputFilePath, outputData, "utf8");
|
|
949
|
-
return res.status(200).json({
|
|
1037
|
+
return res.status(200).json({
|
|
1038
|
+
success: true
|
|
1039
|
+
});
|
|
950
1040
|
};
|
|
951
1041
|
}
|
|
1042
|
+
__name(createOutputHandler, "createOutputHandler");
|
|
952
1043
|
|
|
953
1044
|
// src/model-io-checks/setup-model-io-checks.ts
|
|
954
1045
|
function setupModelIOChecks(app, inputDir, inputFiles, ctx) {
|
|
955
|
-
const modelIOChecks = new ModelIOChecks({
|
|
1046
|
+
const modelIOChecks = new ModelIOChecks({
|
|
1047
|
+
inputFiles
|
|
1048
|
+
});
|
|
956
1049
|
const handleInput = createInputHandler(inputDir, modelIOChecks, ctx);
|
|
957
1050
|
const handleOutput = createOutputHandler(modelIOChecks, ctx);
|
|
958
1051
|
const inputPath = path12.join(ctx.projectRoot, inputDir);
|
|
@@ -962,6 +1055,7 @@ function setupModelIOChecks(app, inputDir, inputFiles, ctx) {
|
|
|
962
1055
|
app.put("/output", handleOutput);
|
|
963
1056
|
return modelIOChecks;
|
|
964
1057
|
}
|
|
1058
|
+
__name(setupModelIOChecks, "setupModelIOChecks");
|
|
965
1059
|
|
|
966
1060
|
// src/utils/check-input-files.ts
|
|
967
1061
|
import path13 from "path";
|
|
@@ -970,37 +1064,31 @@ async function checkInputFiles(inputFiles, inputDataPath, ctx) {
|
|
|
970
1064
|
const filePath = path13.join(ctx.projectRoot, inputDataPath, inputFile);
|
|
971
1065
|
const exists = await isExistingPath(filePath);
|
|
972
1066
|
if (!exists) {
|
|
973
|
-
throw new Error(
|
|
974
|
-
`Input data not found (${inputFile}) - did you need to run \`pd4castr fetch\`?`
|
|
975
|
-
);
|
|
1067
|
+
throw new Error(`Input data not found (${inputFile}) - did you need to run \`pd4castr fetch\`?`);
|
|
976
1068
|
}
|
|
977
1069
|
}
|
|
978
1070
|
}
|
|
1071
|
+
__name(checkInputFiles, "checkInputFiles");
|
|
979
1072
|
|
|
980
1073
|
// src/utils/get-input-files.ts
|
|
981
1074
|
function getInputFiles(config) {
|
|
982
1075
|
const inputFiles = config.inputs.map((input2) => getInputFilename(input2));
|
|
983
1076
|
return inputFiles;
|
|
984
1077
|
}
|
|
1078
|
+
__name(getInputFiles, "getInputFiles");
|
|
985
1079
|
|
|
986
1080
|
// src/commands/publish/utils/run-model-io-tests.ts
|
|
987
1081
|
async function runModelIOTests(dockerImage, options, app, ctx) {
|
|
988
1082
|
const inputFiles = getInputFiles(ctx.config);
|
|
989
1083
|
await checkInputFiles(inputFiles, options.inputDir, ctx);
|
|
990
1084
|
await buildDockerImage(dockerImage, ctx);
|
|
991
|
-
const modelIOChecks = setupModelIOChecks(
|
|
992
|
-
app,
|
|
993
|
-
options.inputDir,
|
|
994
|
-
inputFiles,
|
|
995
|
-
ctx
|
|
996
|
-
);
|
|
1085
|
+
const modelIOChecks = setupModelIOChecks(app, options.inputDir, inputFiles, ctx);
|
|
997
1086
|
await runModelContainer(dockerImage, options.port, ctx);
|
|
998
1087
|
if (!modelIOChecks.isInputsHandled() || !modelIOChecks.isOutputHandled()) {
|
|
999
|
-
throw new Error(
|
|
1000
|
-
"Model I/O test failed. Please run `pd4castr test` to debug the issue."
|
|
1001
|
-
);
|
|
1088
|
+
throw new Error("Model I/O test failed. Please run `pd4castr test` to debug the issue.");
|
|
1002
1089
|
}
|
|
1003
1090
|
}
|
|
1091
|
+
__name(runModelIOTests, "runModelIOTests");
|
|
1004
1092
|
|
|
1005
1093
|
// src/commands/publish/handle-create-model-flow.ts
|
|
1006
1094
|
async function handleCreateModelFlow(options, app, spinner, ctx, authCtx) {
|
|
@@ -1032,9 +1120,7 @@ async function handleCreateModelFlow(options, app, spinner, ctx, authCtx) {
|
|
|
1032
1120
|
config.displayTimezone = model.displayTimezone;
|
|
1033
1121
|
}, options.config);
|
|
1034
1122
|
spinner.succeed("Model data published successfully");
|
|
1035
|
-
spinner.start(
|
|
1036
|
-
"Pushing model image to registry - this may take a few minutes..."
|
|
1037
|
-
);
|
|
1123
|
+
spinner.start("Pushing model image to registry - this may take a few minutes...");
|
|
1038
1124
|
const pushCredentials = await getRegistryPushCredentials(model.id, authCtx);
|
|
1039
1125
|
await loginToDockerRegistry(pushCredentials);
|
|
1040
1126
|
await buildDockerImage(dockerImage, ctx);
|
|
@@ -1061,6 +1147,7 @@ async function handleCreateModelFlow(options, app, spinner, ctx, authCtx) {
|
|
|
1061
1147
|
console.log(MODEL_RUN_TRIGGER_MESSAGE);
|
|
1062
1148
|
}
|
|
1063
1149
|
}
|
|
1150
|
+
__name(handleCreateModelFlow, "handleCreateModelFlow");
|
|
1064
1151
|
|
|
1065
1152
|
// src/commands/publish/handle-update-existing-model-flow.ts
|
|
1066
1153
|
import * as inquirer5 from "@inquirer/prompts";
|
|
@@ -1075,26 +1162,28 @@ import invariant2 from "tiny-invariant";
|
|
|
1075
1162
|
|
|
1076
1163
|
// src/api/get-model.ts
|
|
1077
1164
|
async function getModel(id, authCtx) {
|
|
1078
|
-
const headers = {
|
|
1079
|
-
|
|
1165
|
+
const headers = {
|
|
1166
|
+
Authorization: `Bearer ${authCtx.accessToken}`
|
|
1167
|
+
};
|
|
1168
|
+
const result = await api.get(`model/${id}`, {
|
|
1169
|
+
headers
|
|
1170
|
+
}).json();
|
|
1080
1171
|
return result;
|
|
1081
1172
|
}
|
|
1173
|
+
__name(getModel, "getModel");
|
|
1082
1174
|
|
|
1083
1175
|
// src/commands/publish/utils/validate-local-model-state.ts
|
|
1084
1176
|
async function validateLocalModelState(ctx, authCtx) {
|
|
1085
1177
|
invariant2(ctx.config.$$id, "model ID is required to fetch published model");
|
|
1086
1178
|
const currentModel = await getModel(ctx.config.$$id, authCtx);
|
|
1087
1179
|
if (currentModel.revision !== ctx.config.$$revision) {
|
|
1088
|
-
throw new Error(
|
|
1089
|
-
`OUT OF SYNC: Local revision (${ctx.config.$$revision}) does not match the current published revision (${currentModel.revision})`
|
|
1090
|
-
);
|
|
1180
|
+
throw new Error(`OUT OF SYNC: Local revision (${ctx.config.$$revision}) does not match the current published revision (${currentModel.revision})`);
|
|
1091
1181
|
}
|
|
1092
1182
|
if (currentModel.modelGroupId !== ctx.config.$$modelGroupID) {
|
|
1093
|
-
throw new Error(
|
|
1094
|
-
`OUT OF SYNC: Local model group ID (${ctx.config.$$modelGroupID}) does not match the current published model group ID (${currentModel.modelGroupId})`
|
|
1095
|
-
);
|
|
1183
|
+
throw new Error(`OUT OF SYNC: Local model group ID (${ctx.config.$$modelGroupID}) does not match the current published model group ID (${currentModel.modelGroupId})`);
|
|
1096
1184
|
}
|
|
1097
1185
|
}
|
|
1186
|
+
__name(validateLocalModelState, "validateLocalModelState");
|
|
1098
1187
|
|
|
1099
1188
|
// src/commands/publish/handle-model-revision-create-flow.ts
|
|
1100
1189
|
var WARNING_LABEL = chalk4.yellowBright.bold("WARNING!");
|
|
@@ -1130,9 +1219,7 @@ async function handleModelRevisionCreateFlow(options, app, spinner, ctx, authCtx
|
|
|
1130
1219
|
config.$$dockerImage = model.dockerImage;
|
|
1131
1220
|
}, options.config);
|
|
1132
1221
|
spinner.succeed("Model revision data published successfully");
|
|
1133
|
-
spinner.start(
|
|
1134
|
-
"Pushing new model revision image to registry - this may take a few minutes..."
|
|
1135
|
-
);
|
|
1222
|
+
spinner.start("Pushing new model revision image to registry - this may take a few minutes...");
|
|
1136
1223
|
const pushCredentials = await getRegistryPushCredentials(model.id, authCtx);
|
|
1137
1224
|
await loginToDockerRegistry(pushCredentials);
|
|
1138
1225
|
await buildDockerImage(dockerImage, ctx);
|
|
@@ -1153,14 +1240,13 @@ async function handleModelRevisionCreateFlow(options, app, spinner, ctx, authCtx
|
|
|
1153
1240
|
symbol: "\u{1F680} ",
|
|
1154
1241
|
prefixText: "\n",
|
|
1155
1242
|
suffixText: "\n",
|
|
1156
|
-
text: chalk4.bold(
|
|
1157
|
-
`New model revision (r${model.revision}) published successfully`
|
|
1158
|
-
)
|
|
1243
|
+
text: chalk4.bold(`New model revision (r${model.revision}) published successfully`)
|
|
1159
1244
|
});
|
|
1160
1245
|
if (!modelRunTriggered && !options.skipTrigger) {
|
|
1161
1246
|
console.log(MODEL_RUN_TRIGGER_MESSAGE);
|
|
1162
1247
|
}
|
|
1163
1248
|
}
|
|
1249
|
+
__name(handleModelRevisionCreateFlow, "handleModelRevisionCreateFlow");
|
|
1164
1250
|
|
|
1165
1251
|
// src/commands/publish/handle-model-revision-update-flow.ts
|
|
1166
1252
|
import * as inquirer4 from "@inquirer/prompts";
|
|
@@ -1168,10 +1254,16 @@ import chalk5 from "chalk";
|
|
|
1168
1254
|
|
|
1169
1255
|
// src/api/update-model.ts
|
|
1170
1256
|
async function updateModel(config, authCtx) {
|
|
1171
|
-
const headers = {
|
|
1172
|
-
|
|
1257
|
+
const headers = {
|
|
1258
|
+
Authorization: `Bearer ${authCtx.accessToken}`
|
|
1259
|
+
};
|
|
1260
|
+
const result = await api.patch(`model/${config.id}`, {
|
|
1261
|
+
headers,
|
|
1262
|
+
json: config
|
|
1263
|
+
}).json();
|
|
1173
1264
|
return result;
|
|
1174
1265
|
}
|
|
1266
|
+
__name(updateModel, "updateModel");
|
|
1175
1267
|
|
|
1176
1268
|
// src/commands/publish/handle-model-revision-update-flow.ts
|
|
1177
1269
|
var WARNING_LABEL2 = chalk5.yellowBright.bold("WARNING!");
|
|
@@ -1208,9 +1300,7 @@ async function handleModelRevisionUpdateFlow(options, app, spinner, ctx, authCtx
|
|
|
1208
1300
|
config.displayTimezone = model.displayTimezone;
|
|
1209
1301
|
}, options.config);
|
|
1210
1302
|
spinner.succeed("Model revision data updated successfully");
|
|
1211
|
-
spinner.start(
|
|
1212
|
-
"Pushing updated model image to registry - this may take a few minutes..."
|
|
1213
|
-
);
|
|
1303
|
+
spinner.start("Pushing updated model image to registry - this may take a few minutes...");
|
|
1214
1304
|
const pushCredentials = await getRegistryPushCredentials(model.id, authCtx);
|
|
1215
1305
|
await loginToDockerRegistry(pushCredentials);
|
|
1216
1306
|
await buildDockerImage(dockerImage, ctx);
|
|
@@ -1237,6 +1327,7 @@ async function handleModelRevisionUpdateFlow(options, app, spinner, ctx, authCtx
|
|
|
1237
1327
|
console.log(MODEL_RUN_TRIGGER_MESSAGE);
|
|
1238
1328
|
}
|
|
1239
1329
|
}
|
|
1330
|
+
__name(handleModelRevisionUpdateFlow, "handleModelRevisionUpdateFlow");
|
|
1240
1331
|
|
|
1241
1332
|
// src/commands/publish/handle-update-existing-model-flow.ts
|
|
1242
1333
|
async function handleUpdateExistingModelFlow(options, app, spinner, ctx, authCtx) {
|
|
@@ -1248,34 +1339,113 @@ async function handleUpdateExistingModelFlow(options, app, spinner, ctx, authCtx
|
|
|
1248
1339
|
message: "Do you want to update the existing revision or create a new one?",
|
|
1249
1340
|
choices: [
|
|
1250
1341
|
{
|
|
1251
|
-
value: "new"
|
|
1342
|
+
value: "new",
|
|
1252
1343
|
name: `New Revision (r${revision} \u2192 r${revision + 1})`
|
|
1253
1344
|
},
|
|
1254
1345
|
{
|
|
1255
|
-
value: "update"
|
|
1346
|
+
value: "update",
|
|
1256
1347
|
name: `Update Existing Revision (r${revision})`
|
|
1257
1348
|
}
|
|
1258
1349
|
]
|
|
1259
1350
|
});
|
|
1260
1351
|
logEmptyLine();
|
|
1261
|
-
if (action === "new"
|
|
1352
|
+
if (action === "new") {
|
|
1262
1353
|
await handleModelRevisionCreateFlow(options, app, spinner, ctx, authCtx);
|
|
1263
|
-
} else if (action === "update"
|
|
1354
|
+
} else if (action === "update") {
|
|
1264
1355
|
await handleModelRevisionUpdateFlow(options, app, spinner, ctx, authCtx);
|
|
1265
1356
|
} else {
|
|
1266
1357
|
throw new Error("Invalid CLI state");
|
|
1267
1358
|
}
|
|
1268
1359
|
}
|
|
1360
|
+
__name(handleUpdateExistingModelFlow, "handleUpdateExistingModelFlow");
|
|
1269
1361
|
|
|
1270
1362
|
// src/commands/publish/handle-action.ts
|
|
1363
|
+
function _ts_add_disposable_resource(env, value, async) {
|
|
1364
|
+
if (value !== null && value !== void 0) {
|
|
1365
|
+
if (typeof value !== "object" && typeof value !== "function") throw new TypeError("Object expected.");
|
|
1366
|
+
var dispose, inner;
|
|
1367
|
+
if (async) {
|
|
1368
|
+
if (!Symbol.asyncDispose) throw new TypeError("Symbol.asyncDispose is not defined.");
|
|
1369
|
+
dispose = value[Symbol.asyncDispose];
|
|
1370
|
+
}
|
|
1371
|
+
if (dispose === void 0) {
|
|
1372
|
+
if (!Symbol.dispose) throw new TypeError("Symbol.dispose is not defined.");
|
|
1373
|
+
dispose = value[Symbol.dispose];
|
|
1374
|
+
if (async) inner = dispose;
|
|
1375
|
+
}
|
|
1376
|
+
if (typeof dispose !== "function") throw new TypeError("Object not disposable.");
|
|
1377
|
+
if (inner) dispose = /* @__PURE__ */ __name(function() {
|
|
1378
|
+
try {
|
|
1379
|
+
inner.call(this);
|
|
1380
|
+
} catch (e) {
|
|
1381
|
+
return Promise.reject(e);
|
|
1382
|
+
}
|
|
1383
|
+
}, "dispose");
|
|
1384
|
+
env.stack.push({
|
|
1385
|
+
value,
|
|
1386
|
+
dispose,
|
|
1387
|
+
async
|
|
1388
|
+
});
|
|
1389
|
+
} else if (async) {
|
|
1390
|
+
env.stack.push({
|
|
1391
|
+
async: true
|
|
1392
|
+
});
|
|
1393
|
+
}
|
|
1394
|
+
return value;
|
|
1395
|
+
}
|
|
1396
|
+
__name(_ts_add_disposable_resource, "_ts_add_disposable_resource");
|
|
1397
|
+
function _ts_dispose_resources(env) {
|
|
1398
|
+
var _SuppressedError = typeof SuppressedError === "function" ? SuppressedError : function(error, suppressed, message) {
|
|
1399
|
+
var e = new Error(message);
|
|
1400
|
+
return e.name = "SuppressedError", e.error = error, e.suppressed = suppressed, e;
|
|
1401
|
+
};
|
|
1402
|
+
return (_ts_dispose_resources = /* @__PURE__ */ __name(function _ts_dispose_resources3(env2) {
|
|
1403
|
+
function fail(e) {
|
|
1404
|
+
env2.error = env2.hasError ? new _SuppressedError(e, env2.error, "An error was suppressed during disposal.") : e;
|
|
1405
|
+
env2.hasError = true;
|
|
1406
|
+
}
|
|
1407
|
+
__name(fail, "fail");
|
|
1408
|
+
var r, s = 0;
|
|
1409
|
+
function next() {
|
|
1410
|
+
while (r = env2.stack.pop()) {
|
|
1411
|
+
try {
|
|
1412
|
+
if (!r.async && s === 1) return s = 0, env2.stack.push(r), Promise.resolve().then(next);
|
|
1413
|
+
if (r.dispose) {
|
|
1414
|
+
var result = r.dispose.call(r.value);
|
|
1415
|
+
if (r.async) return s |= 2, Promise.resolve(result).then(next, function(e) {
|
|
1416
|
+
fail(e);
|
|
1417
|
+
return next();
|
|
1418
|
+
});
|
|
1419
|
+
} else s |= 1;
|
|
1420
|
+
} catch (e) {
|
|
1421
|
+
fail(e);
|
|
1422
|
+
}
|
|
1423
|
+
}
|
|
1424
|
+
if (s === 1) return env2.hasError ? Promise.reject(env2.error) : Promise.resolve();
|
|
1425
|
+
if (env2.hasError) throw env2.error;
|
|
1426
|
+
}
|
|
1427
|
+
__name(next, "next");
|
|
1428
|
+
return next();
|
|
1429
|
+
}, "_ts_dispose_resources"))(env);
|
|
1430
|
+
}
|
|
1431
|
+
__name(_ts_dispose_resources, "_ts_dispose_resources");
|
|
1271
1432
|
async function handleAction5(options) {
|
|
1272
|
-
|
|
1433
|
+
const env = {
|
|
1434
|
+
stack: [],
|
|
1435
|
+
error: void 0,
|
|
1436
|
+
hasError: false
|
|
1437
|
+
};
|
|
1273
1438
|
try {
|
|
1274
1439
|
const spinner = ora5("Starting model publish...").start();
|
|
1275
1440
|
const app = express2();
|
|
1276
|
-
app.use(express2.json({
|
|
1277
|
-
|
|
1278
|
-
|
|
1441
|
+
app.use(express2.json({
|
|
1442
|
+
limit: "100mb"
|
|
1443
|
+
}));
|
|
1444
|
+
app.use(express2.urlencoded({
|
|
1445
|
+
limit: "100mb",
|
|
1446
|
+
extended: true
|
|
1447
|
+
}));
|
|
1448
|
+
const webServer = _ts_add_disposable_resource(env, await startWebServer(app, options.port), false);
|
|
1279
1449
|
try {
|
|
1280
1450
|
const ctx = await loadProjectContext(options.config);
|
|
1281
1451
|
const authCtx = await getAuth();
|
|
@@ -1297,25 +1467,20 @@ async function handleAction5(options) {
|
|
|
1297
1467
|
}
|
|
1298
1468
|
process.exit(1);
|
|
1299
1469
|
}
|
|
1300
|
-
} catch (
|
|
1301
|
-
|
|
1470
|
+
} catch (e) {
|
|
1471
|
+
env.error = e;
|
|
1472
|
+
env.hasError = true;
|
|
1302
1473
|
} finally {
|
|
1303
|
-
|
|
1474
|
+
_ts_dispose_resources(env);
|
|
1304
1475
|
}
|
|
1305
1476
|
}
|
|
1477
|
+
__name(handleAction5, "handleAction");
|
|
1306
1478
|
|
|
1307
1479
|
// src/commands/publish/index.ts
|
|
1308
1480
|
function registerPublishCommand(program2) {
|
|
1309
|
-
program2.command("publish").description("Publishes a pd4castr model.").option(
|
|
1310
|
-
"-i, --input-dir <path>",
|
|
1311
|
-
"The input test data directory",
|
|
1312
|
-
TEST_INPUT_DATA_DIR
|
|
1313
|
-
).option(
|
|
1314
|
-
"-p, --port <port>",
|
|
1315
|
-
"The port to run the IO testing webserver on",
|
|
1316
|
-
TEST_WEBSERVER_PORT.toString()
|
|
1317
|
-
).option("--sc, --skip-checks", "Skip the model I/O checks", false).option("--st, --skip-trigger", "Skip the model trigger", false).option("-c, --config <path>", "Path to config file", PROJECT_CONFIG_FILE).action(handleAction5);
|
|
1481
|
+
program2.command("publish").description("Publishes a pd4castr model.").option("-i, --input-dir <path>", "The input test data directory", TEST_INPUT_DATA_DIR).option("-p, --port <port>", "The port to run the IO testing webserver on", TEST_WEBSERVER_PORT.toString()).option("--sc, --skip-checks", "Skip the model I/O checks", false).option("--st, --skip-trigger", "Skip the model trigger", false).option("-c, --config <path>", "Path to config file", PROJECT_CONFIG_FILE).action(handleAction5);
|
|
1318
1482
|
}
|
|
1483
|
+
__name(registerPublishCommand, "registerPublishCommand");
|
|
1319
1484
|
|
|
1320
1485
|
// src/commands/test/handle-action.ts
|
|
1321
1486
|
import path14 from "path";
|
|
@@ -1323,14 +1488,92 @@ import { ExecaError as ExecaError6 } from "execa";
|
|
|
1323
1488
|
import express3 from "express";
|
|
1324
1489
|
import ora6 from "ora";
|
|
1325
1490
|
import { ZodError as ZodError6 } from "zod";
|
|
1491
|
+
function _ts_add_disposable_resource2(env, value, async) {
|
|
1492
|
+
if (value !== null && value !== void 0) {
|
|
1493
|
+
if (typeof value !== "object" && typeof value !== "function") throw new TypeError("Object expected.");
|
|
1494
|
+
var dispose, inner;
|
|
1495
|
+
if (async) {
|
|
1496
|
+
if (!Symbol.asyncDispose) throw new TypeError("Symbol.asyncDispose is not defined.");
|
|
1497
|
+
dispose = value[Symbol.asyncDispose];
|
|
1498
|
+
}
|
|
1499
|
+
if (dispose === void 0) {
|
|
1500
|
+
if (!Symbol.dispose) throw new TypeError("Symbol.dispose is not defined.");
|
|
1501
|
+
dispose = value[Symbol.dispose];
|
|
1502
|
+
if (async) inner = dispose;
|
|
1503
|
+
}
|
|
1504
|
+
if (typeof dispose !== "function") throw new TypeError("Object not disposable.");
|
|
1505
|
+
if (inner) dispose = /* @__PURE__ */ __name(function() {
|
|
1506
|
+
try {
|
|
1507
|
+
inner.call(this);
|
|
1508
|
+
} catch (e) {
|
|
1509
|
+
return Promise.reject(e);
|
|
1510
|
+
}
|
|
1511
|
+
}, "dispose");
|
|
1512
|
+
env.stack.push({
|
|
1513
|
+
value,
|
|
1514
|
+
dispose,
|
|
1515
|
+
async
|
|
1516
|
+
});
|
|
1517
|
+
} else if (async) {
|
|
1518
|
+
env.stack.push({
|
|
1519
|
+
async: true
|
|
1520
|
+
});
|
|
1521
|
+
}
|
|
1522
|
+
return value;
|
|
1523
|
+
}
|
|
1524
|
+
__name(_ts_add_disposable_resource2, "_ts_add_disposable_resource");
|
|
1525
|
+
function _ts_dispose_resources2(env) {
|
|
1526
|
+
var _SuppressedError = typeof SuppressedError === "function" ? SuppressedError : function(error, suppressed, message) {
|
|
1527
|
+
var e = new Error(message);
|
|
1528
|
+
return e.name = "SuppressedError", e.error = error, e.suppressed = suppressed, e;
|
|
1529
|
+
};
|
|
1530
|
+
return (_ts_dispose_resources2 = /* @__PURE__ */ __name(function _ts_dispose_resources3(env2) {
|
|
1531
|
+
function fail(e) {
|
|
1532
|
+
env2.error = env2.hasError ? new _SuppressedError(e, env2.error, "An error was suppressed during disposal.") : e;
|
|
1533
|
+
env2.hasError = true;
|
|
1534
|
+
}
|
|
1535
|
+
__name(fail, "fail");
|
|
1536
|
+
var r, s = 0;
|
|
1537
|
+
function next() {
|
|
1538
|
+
while (r = env2.stack.pop()) {
|
|
1539
|
+
try {
|
|
1540
|
+
if (!r.async && s === 1) return s = 0, env2.stack.push(r), Promise.resolve().then(next);
|
|
1541
|
+
if (r.dispose) {
|
|
1542
|
+
var result = r.dispose.call(r.value);
|
|
1543
|
+
if (r.async) return s |= 2, Promise.resolve(result).then(next, function(e) {
|
|
1544
|
+
fail(e);
|
|
1545
|
+
return next();
|
|
1546
|
+
});
|
|
1547
|
+
} else s |= 1;
|
|
1548
|
+
} catch (e) {
|
|
1549
|
+
fail(e);
|
|
1550
|
+
}
|
|
1551
|
+
}
|
|
1552
|
+
if (s === 1) return env2.hasError ? Promise.reject(env2.error) : Promise.resolve();
|
|
1553
|
+
if (env2.hasError) throw env2.error;
|
|
1554
|
+
}
|
|
1555
|
+
__name(next, "next");
|
|
1556
|
+
return next();
|
|
1557
|
+
}, "_ts_dispose_resources"))(env);
|
|
1558
|
+
}
|
|
1559
|
+
__name(_ts_dispose_resources2, "_ts_dispose_resources");
|
|
1326
1560
|
async function handleAction6(options) {
|
|
1327
|
-
|
|
1561
|
+
const env = {
|
|
1562
|
+
stack: [],
|
|
1563
|
+
error: void 0,
|
|
1564
|
+
hasError: false
|
|
1565
|
+
};
|
|
1328
1566
|
try {
|
|
1329
1567
|
const spinner = ora6("Starting model tests...").info();
|
|
1330
1568
|
const app = express3();
|
|
1331
|
-
app.use(express3.json({
|
|
1332
|
-
|
|
1333
|
-
|
|
1569
|
+
app.use(express3.json({
|
|
1570
|
+
limit: "100mb"
|
|
1571
|
+
}));
|
|
1572
|
+
app.use(express3.urlencoded({
|
|
1573
|
+
limit: "100mb",
|
|
1574
|
+
extended: true
|
|
1575
|
+
}));
|
|
1576
|
+
const webServer = _ts_add_disposable_resource2(env, await startWebServer(app, options.port), false);
|
|
1334
1577
|
try {
|
|
1335
1578
|
const ctx = await loadProjectContext(options.config);
|
|
1336
1579
|
const inputFiles = getInputFiles(ctx.config);
|
|
@@ -1341,12 +1584,7 @@ async function handleAction6(options) {
|
|
|
1341
1584
|
const dockerImage = getDockerImage(ctx);
|
|
1342
1585
|
await buildDockerImage(dockerImage, ctx);
|
|
1343
1586
|
spinner.succeed(`Built docker image (${dockerImage})`);
|
|
1344
|
-
const modelIOChecks = setupModelIOChecks(
|
|
1345
|
-
app,
|
|
1346
|
-
options.inputDir,
|
|
1347
|
-
inputFiles,
|
|
1348
|
-
ctx
|
|
1349
|
-
);
|
|
1587
|
+
const modelIOChecks = setupModelIOChecks(app, options.inputDir, inputFiles, ctx);
|
|
1350
1588
|
spinner.start("Running model container");
|
|
1351
1589
|
await runModelContainer(dockerImage, options.port, ctx);
|
|
1352
1590
|
spinner.succeed("Model run complete");
|
|
@@ -1362,11 +1600,7 @@ async function handleAction6(options) {
|
|
|
1362
1600
|
throw new Error("Model I/O test failed");
|
|
1363
1601
|
}
|
|
1364
1602
|
if (modelIOChecks.isOutputHandled()) {
|
|
1365
|
-
const outputPath = path14.join(
|
|
1366
|
-
ctx.projectRoot,
|
|
1367
|
-
TEST_OUTPUT_DATA_DIR,
|
|
1368
|
-
TEST_OUTPUT_FILENAME
|
|
1369
|
-
);
|
|
1603
|
+
const outputPath = path14.join(ctx.projectRoot, TEST_OUTPUT_DATA_DIR, TEST_OUTPUT_FILENAME);
|
|
1370
1604
|
const clickHereLink = createLink("Click here", `file://${outputPath}`);
|
|
1371
1605
|
const fileLink = createLink(TEST_OUTPUT_FILENAME, `file://${outputPath}`);
|
|
1372
1606
|
console.log(`
|
|
@@ -1387,27 +1621,20 @@ ${clickHereLink} to view output (${fileLink})
|
|
|
1387
1621
|
}
|
|
1388
1622
|
process.exit(1);
|
|
1389
1623
|
}
|
|
1390
|
-
} catch (
|
|
1391
|
-
|
|
1624
|
+
} catch (e) {
|
|
1625
|
+
env.error = e;
|
|
1626
|
+
env.hasError = true;
|
|
1392
1627
|
} finally {
|
|
1393
|
-
|
|
1628
|
+
_ts_dispose_resources2(env);
|
|
1394
1629
|
}
|
|
1395
1630
|
}
|
|
1631
|
+
__name(handleAction6, "handleAction");
|
|
1396
1632
|
|
|
1397
1633
|
// src/commands/test/index.ts
|
|
1398
1634
|
function registerTestCommand(program2) {
|
|
1399
|
-
program2.command("test").description(
|
|
1400
|
-
"Test a model by verifying input and output is handled correctly."
|
|
1401
|
-
).option(
|
|
1402
|
-
"-i, --input-dir <path>",
|
|
1403
|
-
"The input test data directory",
|
|
1404
|
-
TEST_INPUT_DATA_DIR
|
|
1405
|
-
).option(
|
|
1406
|
-
"-p, --port <port>",
|
|
1407
|
-
"The port to run the IO testing webserver on",
|
|
1408
|
-
TEST_WEBSERVER_PORT.toString()
|
|
1409
|
-
).option("-c, --config <path>", "Path to config file", PROJECT_CONFIG_FILE).action(handleAction6);
|
|
1635
|
+
program2.command("test").description("Test a model by verifying input and output is handled correctly.").option("-i, --input-dir <path>", "The input test data directory", TEST_INPUT_DATA_DIR).option("-p, --port <port>", "The port to run the IO testing webserver on", TEST_WEBSERVER_PORT.toString()).option("-c, --config <path>", "Path to config file", PROJECT_CONFIG_FILE).action(handleAction6);
|
|
1410
1636
|
}
|
|
1637
|
+
__name(registerTestCommand, "registerTestCommand");
|
|
1411
1638
|
|
|
1412
1639
|
// src/program.ts
|
|
1413
1640
|
import { Command } from "commander";
|
|
@@ -1415,8 +1642,9 @@ import { Command } from "commander";
|
|
|
1415
1642
|
// package.json
|
|
1416
1643
|
var package_default = {
|
|
1417
1644
|
name: "@pd4castr/cli",
|
|
1418
|
-
version: "1.
|
|
1645
|
+
version: "1.3.0",
|
|
1419
1646
|
description: "CLI tool for creating, testing, and publishing pd4castr models",
|
|
1647
|
+
license: "UNLICENSED",
|
|
1420
1648
|
main: "dist/index.js",
|
|
1421
1649
|
type: "module",
|
|
1422
1650
|
bin: {
|
|
@@ -1425,77 +1653,58 @@ var package_default = {
|
|
|
1425
1653
|
files: [
|
|
1426
1654
|
"dist/**/*"
|
|
1427
1655
|
],
|
|
1656
|
+
engines: {
|
|
1657
|
+
node: ">=20.0.0"
|
|
1658
|
+
},
|
|
1428
1659
|
scripts: {
|
|
1429
1660
|
build: "tsup",
|
|
1430
1661
|
dev: "tsup --watch",
|
|
1431
1662
|
cli: "node dist/index.js",
|
|
1432
|
-
|
|
1433
|
-
"test:watch": "vitest",
|
|
1434
|
-
"test:coverage": "vitest run --coverage",
|
|
1435
|
-
lint: "eslint .",
|
|
1436
|
-
"lint:fix": "eslint . --fix",
|
|
1663
|
+
release: "semantic-release -e semantic-release-monorepo",
|
|
1437
1664
|
format: "prettier --write .",
|
|
1438
|
-
|
|
1665
|
+
lint: "eslint .",
|
|
1439
1666
|
typecheck: "tsc --noEmit",
|
|
1440
1667
|
prepublishOnly: "yarn build"
|
|
1441
1668
|
},
|
|
1442
|
-
|
|
1443
|
-
"
|
|
1444
|
-
"
|
|
1445
|
-
|
|
1446
|
-
|
|
1447
|
-
|
|
1448
|
-
|
|
1449
|
-
|
|
1450
|
-
|
|
1451
|
-
|
|
1452
|
-
|
|
1669
|
+
dependencies: {
|
|
1670
|
+
"@inquirer/prompts": "7.7.1",
|
|
1671
|
+
auth0: "4.28.0",
|
|
1672
|
+
chalk: "5.6.0",
|
|
1673
|
+
commander: "14.0.0",
|
|
1674
|
+
execa: "9.6.0",
|
|
1675
|
+
express: "4.21.2",
|
|
1676
|
+
immer: "10.1.1",
|
|
1677
|
+
ky: "1.8.2",
|
|
1678
|
+
ora: "8.2.0",
|
|
1679
|
+
slugify: "1.6.6",
|
|
1680
|
+
tiged: "2.12.7",
|
|
1681
|
+
"tiny-invariant": "1.3.3",
|
|
1682
|
+
zod: "4.0.14"
|
|
1453
1683
|
},
|
|
1454
|
-
homepage: "https://github.com/pipelabs/pd4castr-cli#readme",
|
|
1455
1684
|
devDependencies: {
|
|
1456
1685
|
"@faker-js/faker": "10.0.0",
|
|
1457
1686
|
"@mswjs/data": "0.16.2",
|
|
1458
1687
|
"@types/express": "4.17.21",
|
|
1459
1688
|
"@types/node": "24.1.0",
|
|
1460
1689
|
"@types/supertest": "6.0.3",
|
|
1461
|
-
"@typescript-eslint/eslint-plugin": "8.38.0",
|
|
1462
|
-
"@typescript-eslint/parser": "8.38.0",
|
|
1463
|
-
eslint: "9.32.0",
|
|
1464
|
-
"eslint-config-prettier": "10.1.8",
|
|
1465
|
-
"eslint-plugin-simple-import-sort": "12.1.1",
|
|
1466
|
-
"eslint-plugin-unicorn": "60.0.0",
|
|
1467
|
-
"eslint-plugin-vitest": "0.5.4",
|
|
1468
1690
|
"hook-std": "3.0.0",
|
|
1469
1691
|
"jest-extended": "6.0.0",
|
|
1470
|
-
memfs: "4.
|
|
1471
|
-
msw: "2.10.
|
|
1472
|
-
prettier: "3.6.2",
|
|
1692
|
+
memfs: "4.49.0",
|
|
1693
|
+
msw: "2.10.5",
|
|
1473
1694
|
"strip-ansi": "7.1.0",
|
|
1474
1695
|
supertest: "7.1.4",
|
|
1475
1696
|
tsup: "8.5.0",
|
|
1476
1697
|
"type-fest": "4.41.0",
|
|
1477
1698
|
typescript: "5.8.3",
|
|
1478
|
-
"typescript-eslint": "8.38.0",
|
|
1479
1699
|
vitest: "3.2.4"
|
|
1480
1700
|
},
|
|
1481
|
-
|
|
1482
|
-
|
|
1483
|
-
auth0: "4.27.0",
|
|
1484
|
-
chalk: "5.6.0",
|
|
1485
|
-
commander: "14.0.0",
|
|
1486
|
-
execa: "9.6.0",
|
|
1487
|
-
express: "4.21.2",
|
|
1488
|
-
immer: "10.1.1",
|
|
1489
|
-
ky: "1.8.2",
|
|
1490
|
-
ora: "8.2.0",
|
|
1491
|
-
slugify: "1.6.6",
|
|
1492
|
-
tiged: "2.12.7",
|
|
1493
|
-
"tiny-invariant": "1.3.3",
|
|
1494
|
-
zod: "4.0.14"
|
|
1701
|
+
publishConfig: {
|
|
1702
|
+
access: "public"
|
|
1495
1703
|
},
|
|
1496
|
-
|
|
1497
|
-
|
|
1498
|
-
|
|
1704
|
+
keywords: [
|
|
1705
|
+
"cli",
|
|
1706
|
+
"pd4castr"
|
|
1707
|
+
]
|
|
1499
1708
|
};
|
|
1500
1709
|
|
|
1501
1710
|
// src/program.ts
|