@pd4castr/cli 1.2.0 → 1.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (3) hide show
  1. package/README.md +4 -2
  2. package/dist/index.js +546 -289
  3. package/package.json +30 -48
package/dist/index.js CHANGED
@@ -1,49 +1,6 @@
1
1
  #!/usr/bin/env node
2
- var __knownSymbol = (name, symbol) => (symbol = Symbol[name]) ? symbol : Symbol.for("Symbol." + name);
3
- var __typeError = (msg) => {
4
- throw TypeError(msg);
5
- };
6
- var __using = (stack, value, async) => {
7
- if (value != null) {
8
- if (typeof value !== "object" && typeof value !== "function") __typeError("Object expected");
9
- var dispose, inner;
10
- if (async) dispose = value[__knownSymbol("asyncDispose")];
11
- if (dispose === void 0) {
12
- dispose = value[__knownSymbol("dispose")];
13
- if (async) inner = dispose;
14
- }
15
- if (typeof dispose !== "function") __typeError("Object not disposable");
16
- if (inner) dispose = function() {
17
- try {
18
- inner.call(this);
19
- } catch (e) {
20
- return Promise.reject(e);
21
- }
22
- };
23
- stack.push([async, dispose, value]);
24
- } else if (async) {
25
- stack.push([async]);
26
- }
27
- return value;
28
- };
29
- var __callDispose = (stack, error, hasError) => {
30
- var E = typeof SuppressedError === "function" ? SuppressedError : function(e, s, m, _) {
31
- return _ = Error(m), _.name = "SuppressedError", _.error = e, _.suppressed = s, _;
32
- };
33
- var fail = (e) => error = hasError ? new E(e, error, "An error was suppressed during disposal") : (hasError = true, e);
34
- var next = (it) => {
35
- while (it = stack.pop()) {
36
- try {
37
- var result = it[1] && it[1].call(it[2]);
38
- if (it[0]) return Promise.resolve(result).then(next, (e) => (fail(e), next()));
39
- } catch (e) {
40
- fail(e);
41
- }
42
- }
43
- if (hasError) throw error;
44
- };
45
- return next();
46
- };
2
+ var __defProp = Object.defineProperty;
3
+ var __name = (target, value) => __defProp(target, "name", { value, configurable: true });
47
4
 
48
5
  // src/constants.ts
49
6
  var AUTH0_DOMAIN = "pdview.au.auth0.com";
@@ -77,13 +34,21 @@ import { ZodError } from "zod";
77
34
  import { z } from "zod";
78
35
 
79
36
  // src/utils/is-iana-timezone.ts
80
- var timezones = /* @__PURE__ */ new Set([...Intl.supportedValuesOf("timeZone"), "UTC"]);
37
+ var timezones = /* @__PURE__ */ new Set([
38
+ ...Intl.supportedValuesOf("timeZone"),
39
+ "UTC"
40
+ ]);
81
41
  function isIanaTimeZone(value) {
82
42
  return typeof value === "string" && timezones.has(value);
83
43
  }
44
+ __name(isIanaTimeZone, "isIanaTimeZone");
84
45
 
85
46
  // src/schemas/project-config-schema.ts
86
- var fileFormatSchema = z.enum(["csv", "json", "parquet"]);
47
+ var fileFormatSchema = z.enum([
48
+ "csv",
49
+ "json",
50
+ "parquet"
51
+ ]);
87
52
  var aemoDataFetcherSchema = z.object({
88
53
  type: z.literal("AEMO_MMS"),
89
54
  checkInterval: z.number().int().min(60),
@@ -92,30 +57,63 @@ var aemoDataFetcherSchema = z.object({
92
57
  fetchQuery: z.string()
93
58
  })
94
59
  });
95
- var dataFetcherSchema = z.discriminatedUnion("type", [aemoDataFetcherSchema]);
60
+ var dataFetcherSchema = z.discriminatedUnion("type", [
61
+ aemoDataFetcherSchema
62
+ ]);
96
63
  var modelInputSchema = z.object({
97
64
  key: z.string(),
98
65
  inputSource: z.string().optional().default(DEFAULT_INPUT_SOURCE_ID),
99
- trigger: z.enum(["WAIT_FOR_LATEST_FILE", "USE_MOST_RECENT_FILE"]),
66
+ trigger: z.enum([
67
+ "WAIT_FOR_LATEST_FILE",
68
+ "USE_MOST_RECENT_FILE"
69
+ ]),
100
70
  uploadFileFormat: fileFormatSchema.optional().default("json"),
101
71
  targetFileFormat: fileFormatSchema.optional().default("json"),
102
72
  fetcher: dataFetcherSchema.optional().nullable()
103
73
  });
104
74
  var modelOutputSchema = z.object({
105
75
  name: z.string(),
106
- type: z.enum(["float", "integer", "string", "date", "boolean", "unknown"]),
76
+ type: z.enum([
77
+ "float",
78
+ "integer",
79
+ "string",
80
+ "date",
81
+ "boolean",
82
+ "unknown"
83
+ ]),
107
84
  seriesKey: z.boolean(),
108
85
  colour: z.string().regex(/^#[0-9A-Fa-f]{6}$/).optional()
109
86
  });
87
+ var sensitivitySchema = z.object({
88
+ name: z.string(),
89
+ query: z.string()
90
+ });
91
+ var inputAggregationSchema = z.object({
92
+ name: z.string(),
93
+ query: z.string(),
94
+ description: z.string().optional().default(""),
95
+ colours: z.array(z.string()).optional().default([])
96
+ });
110
97
  var CONFIG_WARNING_KEY = "// WARNING: DO NOT MODIFY THESE SYSTEM MANAGED VALUES";
111
98
  var projectConfigSchema = z.object({
112
99
  name: z.string(),
113
- forecastVariable: z.enum(["price"]),
114
- timeHorizon: z.enum(["actual", "day_ahead", "week_ahead", "quarterly"]),
100
+ forecastVariable: z.enum([
101
+ "price"
102
+ ]),
103
+ timeHorizon: z.enum([
104
+ "actual",
105
+ "day_ahead",
106
+ "week_ahead",
107
+ "quarterly"
108
+ ]),
115
109
  displayTimezone: z.string().refine(isIanaTimeZone, "invalid IANA time zone").optional().default("Australia/Brisbane"),
116
110
  metadata: z.record(z.string(), z.any()).optional(),
111
+ public: z.boolean().optional().default(false),
112
+ runDatetimeQuery: z.string().optional().nullable().default(null),
117
113
  inputs: z.array(modelInputSchema),
114
+ inputAggregations: z.array(inputAggregationSchema).optional().default([]),
118
115
  outputs: z.array(modelOutputSchema),
116
+ sensitivities: z.array(sensitivitySchema).optional().default([]),
119
117
  [CONFIG_WARNING_KEY]: z.string().optional().default(""),
120
118
  $$id: z.string().nullable().optional().default(null),
121
119
  $$modelGroupID: z.string().nullable().optional().default(null),
@@ -133,6 +131,7 @@ async function isExistingPath(path15) {
133
131
  return false;
134
132
  }
135
133
  }
134
+ __name(isExistingPath, "isExistingPath");
136
135
 
137
136
  // src/config/load-project-context.ts
138
137
  async function loadProjectContext(configPath) {
@@ -140,9 +139,7 @@ async function loadProjectContext(configPath) {
140
139
  const resolvedConfigPath = configPath ? path.resolve(configPath) : path.join(projectRoot, PROJECT_CONFIG_FILE);
141
140
  const configExists = await isExistingPath(resolvedConfigPath);
142
141
  if (!configExists) {
143
- throw new Error(
144
- `No config found at ${resolvedConfigPath} (docs: https://github.com/pipelabs/pd4castr-model-examples/blob/main/docs/005-config.md).`
145
- );
142
+ throw new Error(`No config found at ${resolvedConfigPath} (docs: https://github.com/pipelabs/pd4castr-model-examples/blob/main/docs/005-config.md).`);
146
143
  }
147
144
  try {
148
145
  const configFileContents = await fs2.readFile(resolvedConfigPath, "utf8");
@@ -156,11 +153,10 @@ async function loadProjectContext(configPath) {
156
153
  if (error instanceof ZodError) {
157
154
  throw error;
158
155
  }
159
- throw new Error(
160
- "Failed to parse project config (docs: https://github.com/pipelabs/pd4castr-model-examples/blob/main/docs/005-config.md)."
161
- );
156
+ throw new Error("Failed to parse project config (docs: https://github.com/pipelabs/pd4castr-model-examples/blob/main/docs/005-config.md).");
162
157
  }
163
158
  }
159
+ __name(loadProjectContext, "loadProjectContext");
164
160
 
165
161
  // src/utils/create-link.ts
166
162
  var ESC = "\x1B";
@@ -171,11 +167,13 @@ function createLink(text, url) {
171
167
  const end = `${OSC}8${SEP}${SEP}${ESC}\\`;
172
168
  return `${start}${text}${end}`;
173
169
  }
170
+ __name(createLink, "createLink");
174
171
 
175
172
  // src/utils/format-nest-error-message.ts
176
173
  function formatNestErrorMessage(error) {
177
174
  return `[${error.error?.toUpperCase() ?? "UNKNOWN"}] ${error.message}`;
178
175
  }
176
+ __name(formatNestErrorMessage, "formatNestErrorMessage");
179
177
 
180
178
  // src/utils/get-auth.ts
181
179
  import invariant from "tiny-invariant";
@@ -208,18 +206,21 @@ async function loadGlobalConfig() {
208
206
  return getDefaultConfig();
209
207
  }
210
208
  }
209
+ __name(loadGlobalConfig, "loadGlobalConfig");
211
210
  function getDefaultConfig() {
212
211
  return {
213
212
  accessToken: null,
214
213
  accessTokenExpiresAt: null
215
214
  };
216
215
  }
216
+ __name(getDefaultConfig, "getDefaultConfig");
217
217
 
218
218
  // src/utils/is-authed.ts
219
219
  function isAuthed(config) {
220
220
  const isTokenExpired = config.accessTokenExpiresAt && config.accessTokenExpiresAt <= Date.now();
221
221
  return Boolean(config.accessToken) && !isTokenExpired;
222
222
  }
223
+ __name(isAuthed, "isAuthed");
223
224
 
224
225
  // src/utils/get-auth.ts
225
226
  async function getAuth() {
@@ -234,6 +235,7 @@ async function getAuth() {
234
235
  expiresAt: config.accessTokenExpiresAt
235
236
  };
236
237
  }
238
+ __name(getAuth, "getAuth");
237
239
 
238
240
  // src/utils/log-zod-issues.ts
239
241
  function logZodIssues(error) {
@@ -241,6 +243,7 @@ function logZodIssues(error) {
241
243
  console.log(` \u2718 ${issue.path.join(".")} - ${issue.message}`);
242
244
  }
243
245
  }
246
+ __name(logZodIssues, "logZodIssues");
244
247
 
245
248
  // src/commands/fetch/utils/fetch-aemo-data.ts
246
249
  import fs4 from "fs/promises";
@@ -255,9 +258,7 @@ var envSchema = z3.object({
255
258
  // wsl sets this environment variable on all distros that i've checked
256
259
  isWSL: z3.boolean().default(() => Boolean(process.env.WSL_DISTRO_NAME)),
257
260
  apiURL: z3.string().default(() => process.env.PD4CASTR_API_URL ?? DEFAULT_API_URL),
258
- wslNetworkInterface: z3.string().default(
259
- () => process.env.PD4CASTR_WSL_NETWORK_INTERFACE ?? WSL_NETWORK_INTERFACE_DEFAULT
260
- ),
261
+ wslNetworkInterface: z3.string().default(() => process.env.PD4CASTR_WSL_NETWORK_INTERFACE ?? WSL_NETWORK_INTERFACE_DEFAULT),
261
262
  auth0ClientId: z3.string().default(() => process.env.PD4CASTR_AUTH0_CLIENT_ID ?? AUTH0_CLIENT_ID),
262
263
  auth0Audience: z3.string().default(() => process.env.PD4CASTR_AUTH0_AUDIENCE ?? AUTH0_AUDIENCE)
263
264
  });
@@ -266,6 +267,7 @@ var envSchema = z3.object({
266
267
  function getEnv() {
267
268
  return envSchema.parse(process.env);
268
269
  }
270
+ __name(getEnv, "getEnv");
269
271
 
270
272
  // src/api/api.ts
271
273
  var api = ky.create({
@@ -274,22 +276,29 @@ var api = ky.create({
274
276
 
275
277
  // src/api/query-data-fetcher.ts
276
278
  async function queryDataFetcher(querySQL, authCtx) {
277
- const headers = { Authorization: `Bearer ${authCtx.accessToken}` };
278
- const payload = { query: querySQL, type: "AEMO_MMS" };
279
- const result = await api.post("data-fetcher/query", { json: payload, headers }).json();
279
+ const headers = {
280
+ Authorization: `Bearer ${authCtx.accessToken}`
281
+ };
282
+ const payload = {
283
+ query: querySQL,
284
+ type: "AEMO_MMS"
285
+ };
286
+ const result = await api.post("data-fetcher/query", {
287
+ json: payload,
288
+ headers
289
+ }).json();
280
290
  return result;
281
291
  }
292
+ __name(queryDataFetcher, "queryDataFetcher");
282
293
 
283
294
  // src/commands/fetch/utils/fetch-aemo-data.ts
284
295
  async function fetchAEMOData(dataFetcher, authCtx, ctx) {
285
- const queryPath = path3.resolve(
286
- ctx.projectRoot,
287
- dataFetcher.config.fetchQuery
288
- );
296
+ const queryPath = path3.resolve(ctx.projectRoot, dataFetcher.config.fetchQuery);
289
297
  const querySQL = await fs4.readFile(queryPath, "utf8");
290
298
  const result = await queryDataFetcher(querySQL, authCtx);
291
299
  return result;
292
300
  }
301
+ __name(fetchAEMOData, "fetchAEMOData");
293
302
 
294
303
  // src/commands/fetch/utils/get-fetcher.ts
295
304
  var DATA_FETCHER_FNS = {
@@ -302,6 +311,7 @@ function getFetcher(type) {
302
311
  }
303
312
  return fetcher;
304
313
  }
314
+ __name(getFetcher, "getFetcher");
305
315
 
306
316
  // src/commands/fetch/utils/write-test-data.ts
307
317
  import fs5 from "fs/promises";
@@ -311,18 +321,24 @@ import path4 from "path";
311
321
  function getInputFilename(modelInput) {
312
322
  return `${modelInput.key}.${modelInput.targetFileFormat}`;
313
323
  }
324
+ __name(getInputFilename, "getInputFilename");
314
325
 
315
326
  // src/commands/fetch/utils/write-test-data.ts
316
327
  async function writeTestData(inputData, modelInput, inputDataDir, ctx) {
317
328
  const inputDir = path4.resolve(ctx.projectRoot, inputDataDir);
318
- await fs5.mkdir(inputDir, { recursive: true });
329
+ await fs5.mkdir(inputDir, {
330
+ recursive: true
331
+ });
319
332
  const inputFilename = getInputFilename(modelInput);
320
333
  const inputPath = path4.resolve(inputDir, inputFilename);
321
334
  await fs5.writeFile(inputPath, JSON.stringify(inputData, void 0, 2));
322
335
  }
336
+ __name(writeTestData, "writeTestData");
323
337
 
324
338
  // src/commands/fetch/handle-action.ts
325
- var FETCHABLE_DATA_FETCHER_TYPES = /* @__PURE__ */ new Set(["AEMO_MMS"]);
339
+ var FETCHABLE_DATA_FETCHER_TYPES = /* @__PURE__ */ new Set([
340
+ "AEMO_MMS"
341
+ ]);
326
342
  async function handleAction(options) {
327
343
  const spinner = ora("Starting data fetch...").start();
328
344
  try {
@@ -339,9 +355,7 @@ async function handleAction(options) {
339
355
  continue;
340
356
  }
341
357
  if (!FETCHABLE_DATA_FETCHER_TYPES.has(input2.fetcher.type)) {
342
- spinner.warn(
343
- `\`${input2.key}\` (${input2.fetcher.type}) - unsupported, skipping`
344
- );
358
+ spinner.warn(`\`${input2.key}\` (${input2.fetcher.type}) - unsupported, skipping`);
345
359
  continue;
346
360
  }
347
361
  spinner.start(`\`${input2.key}\` (${input2.fetcher.type}) - fetching...`);
@@ -373,15 +387,13 @@ ${link} to view fetched data
373
387
  process.exit(1);
374
388
  }
375
389
  }
390
+ __name(handleAction, "handleAction");
376
391
 
377
392
  // src/commands/fetch/index.ts
378
393
  function registerFetchCommand(program2) {
379
- program2.command("fetch").description("Fetches test data from configured data fetchers.").option(
380
- "-i, --input-dir <path>",
381
- "The input test data directory",
382
- TEST_INPUT_DATA_DIR
383
- ).option("-c, --config <path>", "Path to config file", PROJECT_CONFIG_FILE).action(handleAction);
394
+ program2.command("fetch").description("Fetches test data from configured data fetchers.").option("-i, --input-dir <path>", "The input test data directory", TEST_INPUT_DATA_DIR).option("-c, --config <path>", "Path to config file", PROJECT_CONFIG_FILE).action(handleAction);
384
395
  }
396
+ __name(registerFetchCommand, "registerFetchCommand");
385
397
 
386
398
  // src/commands/init/handle-action.ts
387
399
  import path6 from "path";
@@ -408,6 +420,7 @@ var templates = {
408
420
  function getTemplatePath(template) {
409
421
  return `https://github.com/${template.repo}/${template.path}`;
410
422
  }
423
+ __name(getTemplatePath, "getTemplatePath");
411
424
 
412
425
  // src/commands/init/utils/validate-name.ts
413
426
  async function validateName(value) {
@@ -417,6 +430,7 @@ async function validateName(value) {
417
430
  }
418
431
  return true;
419
432
  }
433
+ __name(validateName, "validateName");
420
434
 
421
435
  // src/commands/init/handle-action.ts
422
436
  async function handleAction2() {
@@ -446,6 +460,7 @@ async function handleAction2() {
446
460
  process.exit(1);
447
461
  }
448
462
  }
463
+ __name(handleAction2, "handleAction");
449
464
  async function fetchTemplate(template, projectName) {
450
465
  const templatePath = getTemplatePath(templates[template]);
451
466
  const fetcher = tiged(templatePath, {
@@ -455,11 +470,13 @@ async function fetchTemplate(template, projectName) {
455
470
  const destination = path6.join(process.cwd(), projectName);
456
471
  await fetcher.clone(destination);
457
472
  }
473
+ __name(fetchTemplate, "fetchTemplate");
458
474
 
459
475
  // src/commands/init/index.ts
460
476
  function registerInitCommand(program2) {
461
477
  program2.command("init").description("Initialize a new model using a template.").action(handleAction2);
462
478
  }
479
+ __name(registerInitCommand, "registerInitCommand");
463
480
 
464
481
  // src/commands/login/handle-action.ts
465
482
  import { ExecaError as ExecaError3 } from "execa";
@@ -477,16 +494,22 @@ async function updateGlobalConfig(updateFn) {
477
494
  const configPath = path7.join(os2.homedir(), GLOBAL_CONFIG_FILE);
478
495
  await fs6.writeFile(configPath, JSON.stringify(updatedConfig, void 0, 2));
479
496
  }
497
+ __name(updateGlobalConfig, "updateGlobalConfig");
480
498
 
481
499
  // src/commands/login/utils/complete-auth-flow.ts
482
500
  import { HTTPError as HTTPError2 } from "ky";
483
501
 
484
502
  // src/commands/login/auth0-api.ts
485
503
  import ky2 from "ky";
486
- var auth0API = ky2.create({ prefixUrl: `https://${AUTH0_DOMAIN}` });
504
+ var auth0API = ky2.create({
505
+ prefixUrl: `https://${AUTH0_DOMAIN}`
506
+ });
487
507
 
488
508
  // src/commands/login/utils/complete-auth-flow.ts
489
- var FAILED_AUTH_ERRORS = /* @__PURE__ */ new Set(["expired_token", "access_denied"]);
509
+ var FAILED_AUTH_ERRORS = /* @__PURE__ */ new Set([
510
+ "expired_token",
511
+ "access_denied"
512
+ ]);
490
513
  async function completeAuthFlow(authCtx) {
491
514
  const env = getEnv();
492
515
  const payload = {
@@ -496,7 +519,9 @@ async function completeAuthFlow(authCtx) {
496
519
  };
497
520
  async function fetchAuthResponse() {
498
521
  try {
499
- const response = await auth0API.post("oauth/token", { json: payload }).json();
522
+ const response = await auth0API.post("oauth/token", {
523
+ json: payload
524
+ }).json();
500
525
  const authPayload = {
501
526
  accessToken: response.access_token,
502
527
  expiresAt: Date.now() + response.expires_in * 1e3
@@ -509,18 +534,16 @@ async function completeAuthFlow(authCtx) {
509
534
  const errorResponse = await error.response.json();
510
535
  const isFailedAuthError = FAILED_AUTH_ERRORS.has(errorResponse.error);
511
536
  if (isFailedAuthError) {
512
- throw new Error(
513
- `Login failed, please try again (${errorResponse.error_description}).`
514
- );
537
+ throw new Error(`Login failed, please try again (${errorResponse.error_description}).`);
515
538
  }
516
539
  const delay = authCtx.checkInterval * 1e3;
517
- return new Promise(
518
- (resolve) => setTimeout(() => resolve(fetchAuthResponse()), delay)
519
- );
540
+ return new Promise((resolve) => setTimeout(() => resolve(fetchAuthResponse()), delay));
520
541
  }
521
542
  }
543
+ __name(fetchAuthResponse, "fetchAuthResponse");
522
544
  return fetchAuthResponse();
523
545
  }
546
+ __name(completeAuthFlow, "completeAuthFlow");
524
547
 
525
548
  // src/commands/login/utils/start-auth-flow.ts
526
549
  async function startAuthFlow() {
@@ -530,7 +553,9 @@ async function startAuthFlow() {
530
553
  audience: env.auth0Audience,
531
554
  scope: "openid email"
532
555
  };
533
- const codeResponse = await auth0API.post("oauth/device/code", { json: payload }).json();
556
+ const codeResponse = await auth0API.post("oauth/device/code", {
557
+ json: payload
558
+ }).json();
534
559
  const authContext = {
535
560
  deviceCode: codeResponse.device_code,
536
561
  verificationURL: codeResponse.verification_uri_complete,
@@ -539,6 +564,7 @@ async function startAuthFlow() {
539
564
  };
540
565
  return authContext;
541
566
  }
567
+ __name(startAuthFlow, "startAuthFlow");
542
568
 
543
569
  // src/commands/login/handle-action.ts
544
570
  async function handleAction3() {
@@ -550,10 +576,8 @@ async function handleAction3() {
550
576
  return;
551
577
  }
552
578
  const authCtx = await startAuthFlow();
553
- spinner.info(
554
- `Please open the login link in your browser:
555
- ${authCtx.verificationURL}`
556
- );
579
+ spinner.info(`Please open the login link in your browser:
580
+ ${authCtx.verificationURL}`);
557
581
  spinner.info(`Your login code is:
558
582
  ${authCtx.userCode}
559
583
  `);
@@ -579,11 +603,13 @@ async function handleAction3() {
579
603
  process.exit(1);
580
604
  }
581
605
  }
606
+ __name(handleAction3, "handleAction");
582
607
 
583
608
  // src/commands/login/index.ts
584
609
  function registerLoginCommand(program2) {
585
610
  program2.command("login").description("Logs in to the pd4castr API.").action(handleAction3);
586
611
  }
612
+ __name(registerLoginCommand, "registerLoginCommand");
587
613
 
588
614
  // src/commands/logout/handle-action.ts
589
615
  import { ExecaError as ExecaError4 } from "execa";
@@ -617,11 +643,13 @@ async function handleAction4() {
617
643
  process.exit(1);
618
644
  }
619
645
  }
646
+ __name(handleAction4, "handleAction");
620
647
 
621
648
  // src/commands/logout/index.ts
622
649
  function registerLogoutCommand(program2) {
623
650
  program2.command("logout").description("Logs out of the pd4castr API.").action(handleAction4);
624
651
  }
652
+ __name(registerLogoutCommand, "registerLogoutCommand");
625
653
 
626
654
  // src/commands/publish/handle-action.ts
627
655
  import { ExecaError as ExecaError5 } from "execa";
@@ -641,6 +669,7 @@ async function startWebServer(app, port) {
641
669
  });
642
670
  });
643
671
  }
672
+ __name(startWebServer, "startWebServer");
644
673
 
645
674
  // src/commands/publish/handle-create-model-flow.ts
646
675
  import * as inquirer2 from "@inquirer/prompts";
@@ -648,25 +677,42 @@ import chalk3 from "chalk";
648
677
 
649
678
  // src/api/create-model.ts
650
679
  async function createModel(config, authCtx) {
651
- const headers = { Authorization: `Bearer ${authCtx.accessToken}` };
652
- const result = await api.post("model", { headers, json: config }).json();
680
+ const headers = {
681
+ Authorization: `Bearer ${authCtx.accessToken}`
682
+ };
683
+ const result = await api.post("model", {
684
+ headers,
685
+ json: config
686
+ }).json();
653
687
  return result;
654
688
  }
689
+ __name(createModel, "createModel");
655
690
 
656
691
  // src/api/get-registry-push-credentials.ts
657
692
  async function getRegistryPushCredentials(modelID, authCtx) {
658
- const headers = { Authorization: `Bearer ${authCtx.accessToken}` };
693
+ const headers = {
694
+ Authorization: `Bearer ${authCtx.accessToken}`
695
+ };
659
696
  const searchParams = new URLSearchParams(`modelId=${modelID}`);
660
- const result = await api.get("registry/push-credentials", { headers, searchParams }).json();
697
+ const result = await api.get("registry/push-credentials", {
698
+ headers,
699
+ searchParams
700
+ }).json();
661
701
  return result;
662
702
  }
703
+ __name(getRegistryPushCredentials, "getRegistryPushCredentials");
663
704
 
664
705
  // src/api/trigger-model-run.ts
665
706
  async function triggerModelRun(modelId, authCtx) {
666
- const headers = { Authorization: `Bearer ${authCtx.accessToken}` };
667
- const result = await api.post(`model/${modelId}/trigger`, { headers }).json();
707
+ const headers = {
708
+ Authorization: `Bearer ${authCtx.accessToken}`
709
+ };
710
+ const result = await api.post(`model/${modelId}/trigger`, {
711
+ headers
712
+ }).json();
668
713
  return result;
669
714
  }
715
+ __name(triggerModelRun, "triggerModelRun");
670
716
 
671
717
  // src/config/update-project-config.ts
672
718
  import fs7 from "fs/promises";
@@ -676,73 +722,93 @@ async function updateProjectConfig(updateFn, configPath) {
676
722
  const projectConfig = await loadProjectContext(configPath);
677
723
  const updatedConfig = produce2(projectConfig.config, updateFn);
678
724
  const resolvedConfigPath = configPath ? path8.resolve(configPath) : path8.join(projectConfig.projectRoot, PROJECT_CONFIG_FILE);
679
- await fs7.writeFile(
680
- resolvedConfigPath,
681
- JSON.stringify(updatedConfig, void 0, 2)
682
- );
725
+ await fs7.writeFile(resolvedConfigPath, JSON.stringify(updatedConfig, void 0, 2));
683
726
  }
727
+ __name(updateProjectConfig, "updateProjectConfig");
684
728
 
685
729
  // src/docker/build-docker-image.ts
686
730
  import { execa } from "execa";
687
731
  async function buildDockerImage(dockerImage, ctx) {
688
732
  try {
689
- await execa(
690
- "docker",
691
- ["build", "--platform=linux/amd64", "-t", dockerImage, "."],
692
- {
693
- cwd: ctx.projectRoot,
694
- stdio: "pipe"
695
- }
696
- );
733
+ await execa("docker", [
734
+ "build",
735
+ "--platform=linux/amd64",
736
+ "-t",
737
+ dockerImage,
738
+ "."
739
+ ], {
740
+ cwd: ctx.projectRoot,
741
+ stdio: "pipe"
742
+ });
697
743
  } catch (error) {
698
- throw new Error("Failed to build docker image", { cause: error });
744
+ throw new Error("Failed to build docker image", {
745
+ cause: error
746
+ });
699
747
  }
700
748
  }
749
+ __name(buildDockerImage, "buildDockerImage");
701
750
 
702
751
  // src/docker/login-to-docker-registry.ts
703
752
  import { execa as execa2 } from "execa";
704
753
  async function loginToDockerRegistry(authConfig) {
705
754
  try {
706
- await execa2(
707
- "docker",
708
- [
709
- "login",
710
- authConfig.registry,
711
- "--username",
712
- authConfig.username,
713
- "--password-stdin"
714
- ],
715
- { input: authConfig.password }
716
- );
755
+ await execa2("docker", [
756
+ "login",
757
+ authConfig.registry,
758
+ "--username",
759
+ authConfig.username,
760
+ "--password-stdin"
761
+ ], {
762
+ input: authConfig.password
763
+ });
717
764
  } catch (error) {
718
- throw new Error("Failed to login to docker registry", { cause: error });
765
+ throw new Error("Failed to login to docker registry", {
766
+ cause: error
767
+ });
719
768
  }
720
769
  }
770
+ __name(loginToDockerRegistry, "loginToDockerRegistry");
721
771
 
722
772
  // src/docker/push-docker-image.ts
723
773
  import { execa as execa3 } from "execa";
724
774
  async function pushDockerImage(dockerImage, pushRef) {
725
775
  try {
726
- await execa3("docker", ["tag", dockerImage, pushRef]);
727
- await execa3("docker", ["push", pushRef]);
776
+ await execa3("docker", [
777
+ "tag",
778
+ dockerImage,
779
+ pushRef
780
+ ]);
781
+ await execa3("docker", [
782
+ "push",
783
+ pushRef
784
+ ]);
728
785
  } catch (error) {
729
- throw new Error("Failed to push docker image", { cause: error });
786
+ throw new Error("Failed to push docker image", {
787
+ cause: error
788
+ });
730
789
  }
731
790
  }
791
+ __name(pushDockerImage, "pushDockerImage");
732
792
 
733
793
  // src/utils/get-docker-image.ts
734
794
  import slugify from "slugify";
735
795
  function getDockerImage(ctx) {
736
- const sluggedName = slugify(ctx.config.name, { lower: true });
796
+ const sluggedName = slugify(ctx.config.name, {
797
+ lower: true
798
+ });
737
799
  const dockerImage = `pd4castr/${sluggedName}-local:${Date.now()}`;
738
800
  return dockerImage;
739
801
  }
802
+ __name(getDockerImage, "getDockerImage");
740
803
 
741
804
  // src/utils/get-model-config-from-project-config.ts
742
805
  import fs8 from "fs/promises";
743
806
  import path9 from "path";
744
807
  async function getModelConfigFromProjectConfig(ctx) {
745
808
  const inputs = await getInputsWithInlinedSQL(ctx);
809
+ const sensitivities = await getSensitivitiesWithInlinedSQL(ctx);
810
+ const inputAggregations = await getInputAggregationsWithInlinedSQL(ctx);
811
+ const runDatetimeQuery = await getrunDatetimeQuerySQL(ctx);
746
812
  const { $$id, $$modelGroupID, $$revision, $$dockerImage, ...config } = ctx.config;
747
813
  return {
748
814
  ...config,
@@ -750,10 +816,16 @@ async function getModelConfigFromProjectConfig(ctx) {
750
816
  modelGroupId: $$modelGroupID,
751
817
  revision: $$revision ?? 0,
752
818
  dockerImage: $$dockerImage,
753
- inputs
819
+ inputs,
820
+ sensitivities,
821
+ inputAggregations,
822
+ runDatetimeQuery
754
823
  };
755
824
  }
756
- var FETCHERS_WITH_SQL = /* @__PURE__ */ new Set(["AEMO_MMS"]);
825
+ __name(getModelConfigFromProjectConfig, "getModelConfigFromProjectConfig");
826
+ var FETCHERS_WITH_SQL = /* @__PURE__ */ new Set([
827
+ "AEMO_MMS"
828
+ ]);
757
829
  async function getInputsWithInlinedSQL(ctx) {
758
830
  const inputsWithSQL = [];
759
831
  for (const input2 of ctx.config.inputs) {
@@ -761,14 +833,8 @@ async function getInputsWithInlinedSQL(ctx) {
761
833
  inputsWithSQL.push(input2);
762
834
  continue;
763
835
  }
764
- const fetchQueryPath = path9.resolve(
765
- ctx.projectRoot,
766
- input2.fetcher.config.fetchQuery
767
- );
768
- const checkQueryPath = path9.resolve(
769
- ctx.projectRoot,
770
- input2.fetcher.config.checkQuery
771
- );
836
+ const fetchQueryPath = path9.resolve(ctx.projectRoot, input2.fetcher.config.fetchQuery);
837
+ const checkQueryPath = path9.resolve(ctx.projectRoot, input2.fetcher.config.checkQuery);
772
838
  const [fetchQuerySQL, checkQuerySQL] = await Promise.all([
773
839
  fs8.readFile(fetchQueryPath, "utf8"),
774
840
  fs8.readFile(checkQueryPath, "utf8")
@@ -788,11 +854,61 @@ async function getInputsWithInlinedSQL(ctx) {
788
854
  }
789
855
  return inputsWithSQL;
790
856
  }
857
+ __name(getInputsWithInlinedSQL, "getInputsWithInlinedSQL");
858
+ async function getSensitivitiesWithInlinedSQL(ctx) {
859
+ const sensitivitiesWithSQL = [];
860
+ const sensitivities = ctx.config.sensitivities ?? [];
861
+ for (const sensitivity of sensitivities) {
862
+ const queryPath = path9.resolve(ctx.projectRoot, sensitivity.query);
863
+ try {
864
+ const sql = await fs8.readFile(queryPath, "utf8");
865
+ sensitivitiesWithSQL.push({
866
+ ...sensitivity,
867
+ query: sql
868
+ });
869
+ } catch {
870
+ throw new Error(`Sensitivity query file not found (${sensitivity.query})`);
871
+ }
872
+ }
873
+ return sensitivitiesWithSQL;
874
+ }
875
+ __name(getSensitivitiesWithInlinedSQL, "getSensitivitiesWithInlinedSQL");
876
+ async function getInputAggregationsWithInlinedSQL(ctx) {
877
+ const inputAggregationsWithSQL = [];
878
+ for (const inputAggregation of ctx.config.inputAggregations) {
879
+ const queryPath = path9.resolve(ctx.projectRoot, inputAggregation.query);
880
+ try {
881
+ const sql = await fs8.readFile(queryPath, "utf8");
882
+ inputAggregationsWithSQL.push({
883
+ ...inputAggregation,
884
+ query: sql
885
+ });
886
+ } catch {
887
+ throw new Error(`Input aggregation query file not found (${inputAggregation.query})`);
888
+ }
889
+ }
890
+ return inputAggregationsWithSQL;
891
+ }
892
+ __name(getInputAggregationsWithInlinedSQL, "getInputAggregationsWithInlinedSQL");
893
+ async function getrunDatetimeQuerySQL(ctx) {
894
+ if (!ctx.config.runDatetimeQuery) {
895
+ return null;
896
+ }
897
+ const queryPath = path9.resolve(ctx.projectRoot, ctx.config.runDatetimeQuery);
898
+ try {
899
+ const sql = await fs8.readFile(queryPath, "utf8");
900
+ return sql;
901
+ } catch {
902
+ throw new Error(`Run datetime query file not found (${ctx.config.runDatetimeQuery})`);
903
+ }
904
+ }
905
+ __name(getrunDatetimeQuerySQL, "getrunDatetimeQuerySQL");
791
906
 
792
907
  // src/utils/log-empty-line.ts
793
908
  function logEmptyLine() {
794
909
  console.log("");
795
910
  }
911
+ __name(logEmptyLine, "logEmptyLine");
796
912
 
797
913
  // src/commands/publish/constants.ts
798
914
  import chalk from "chalk";
@@ -810,21 +926,30 @@ function getModelSummaryLines(ctx) {
810
926
  ` ${chalk2.bold("Revision:")} ${ctx.config.$$revision}`,
811
927
  ` ${chalk2.bold("Forecast variable:")} ${ctx.config.forecastVariable}`,
812
928
  ` ${chalk2.bold("Time horizon:")} ${ctx.config.timeHorizon}`,
929
+ ` ${chalk2.bold("Public:")} ${ctx.config.public}`,
813
930
  ` ${chalk2.bold("Inputs:")}`,
814
- ...ctx.config.inputs.map(
815
- (input2) => ` \u2022 ${input2.key} - ${getInputType(input2)}`
816
- ),
931
+ ...ctx.config.inputs.map((input2) => ` \u2022 ${input2.key} - ${getInputType(input2)}`),
817
932
  ` ${chalk2.bold("Outputs:")}`,
818
933
  ...ctx.config.outputs.map((output) => ` \u2022 ${output.name} - ${output.type}`),
934
+ ...ctx.config.inputAggregations.length > 0 ? [
935
+ ` ${chalk2.bold("Input aggregations:")}`,
936
+ ...ctx.config.inputAggregations.map((agg) => ` \u2022 ${agg.name}`)
937
+ ] : [],
938
+ ...ctx.config.sensitivities.length > 0 ? [
939
+ ` ${chalk2.bold("Sensitivities:")}`,
940
+ ...ctx.config.sensitivities.map((s) => ` \u2022 ${s.name}`)
941
+ ] : [],
819
942
  ""
820
943
  ];
821
944
  }
945
+ __name(getModelSummaryLines, "getModelSummaryLines");
822
946
  function getInputType(input2) {
823
947
  if (input2.fetcher) {
824
948
  return input2.fetcher.type;
825
949
  }
826
950
  return "static";
827
951
  }
952
+ __name(getInputType, "getInputType");
828
953
 
829
954
  // src/docker/run-model-container.ts
830
955
  import os3 from "os";
@@ -837,17 +962,19 @@ function getInputEnv(modelInput, webserverURL) {
837
962
  const inputFileURL = `${webserverURL}/input/${filename}`;
838
963
  return `INPUT_${variableName}_URL=${inputFileURL}`;
839
964
  }
965
+ __name(getInputEnv, "getInputEnv");
840
966
 
841
967
  // src/docker/run-model-container.ts
842
968
  async function runModelContainer(dockerImage, webserverPort, ctx) {
843
969
  const env = getEnv();
844
970
  const webserverHostname = env.isWSL ? DOCKER_HOSTNAME_WSL : DOCKER_HOSTNAME_DEFAULT;
845
971
  const webserverURL = `http://${webserverHostname}:${webserverPort}`;
846
- const inputEnvs = ctx.config.inputs.map(
847
- (input2) => getInputEnv(input2, webserverURL)
848
- );
972
+ const inputEnvs = ctx.config.inputs.map((input2) => getInputEnv(input2, webserverURL));
849
973
  const outputEnv = `OUTPUT_URL=${webserverURL}/output`;
850
- const envs = [...inputEnvs, outputEnv];
974
+ const envs = [
975
+ ...inputEnvs,
976
+ outputEnv
977
+ ];
851
978
  try {
852
979
  const extraRunArgs = [];
853
980
  if (env.isWSL) {
@@ -858,7 +985,10 @@ async function runModelContainer(dockerImage, webserverPort, ctx) {
858
985
  "run",
859
986
  "--rm",
860
987
  ...extraRunArgs,
861
- ...envs.flatMap((env2) => ["--env", env2]),
988
+ ...envs.flatMap((env2) => [
989
+ "--env",
990
+ env2
991
+ ]),
862
992
  dockerImage
863
993
  ];
864
994
  await execa4("docker", args, {
@@ -866,20 +996,22 @@ async function runModelContainer(dockerImage, webserverPort, ctx) {
866
996
  stdio: "pipe"
867
997
  });
868
998
  } catch (error) {
869
- throw new Error("Failed to run model container", { cause: error });
999
+ throw new Error("Failed to run model container", {
1000
+ cause: error
1001
+ });
870
1002
  }
871
1003
  }
1004
+ __name(runModelContainer, "runModelContainer");
872
1005
  function getWSLMachineIP() {
873
1006
  const env = getEnv();
874
1007
  const interfaces = os3.networkInterfaces();
875
1008
  const interfaceInfo = interfaces[env.wslNetworkInterface]?.[0];
876
1009
  if (!interfaceInfo) {
877
- throw new Error(
878
- `WSL machine IP not found for interface \`${env.wslNetworkInterface}\``
879
- );
1010
+ throw new Error(`WSL machine IP not found for interface \`${env.wslNetworkInterface}\``);
880
1011
  }
881
1012
  return interfaceInfo.address;
882
1013
  }
1014
+ __name(getWSLMachineIP, "getWSLMachineIP");
883
1015
 
884
1016
  // src/model-io-checks/setup-model-io-checks.ts
885
1017
  import path12 from "path";
@@ -887,6 +1019,9 @@ import express from "express";
887
1019
 
888
1020
  // src/model-io-checks/model-io-checks.ts
889
1021
  var ModelIOChecks = class {
1022
+ static {
1023
+ __name(this, "ModelIOChecks");
1024
+ }
890
1025
  inputsToDownload;
891
1026
  outputUploaded;
892
1027
  constructor(data) {
@@ -923,17 +1058,16 @@ import path10 from "path";
923
1058
  function createInputHandler(inputFilesPath, modelIOChecks, ctx) {
924
1059
  return (req, res) => {
925
1060
  if (!modelIOChecks.isValidInput(req.params.filename)) {
926
- return res.status(404).json({ error: "File not found" });
1061
+ return res.status(404).json({
1062
+ error: "File not found"
1063
+ });
927
1064
  }
928
1065
  modelIOChecks.trackInputHandled(req.params.filename);
929
- const filePath = path10.join(
930
- ctx.projectRoot,
931
- inputFilesPath,
932
- req.params.filename
933
- );
1066
+ const filePath = path10.join(ctx.projectRoot, inputFilesPath, req.params.filename);
934
1067
  return res.sendFile(filePath);
935
1068
  };
936
1069
  }
1070
+ __name(createInputHandler, "createInputHandler");
937
1071
 
938
1072
  // src/model-io-checks/utils/create-output-handler.ts
939
1073
  import fs9 from "fs/promises";
@@ -942,17 +1076,24 @@ function createOutputHandler(modelIOChecks, ctx) {
942
1076
  return async (req, res) => {
943
1077
  modelIOChecks.trackOutputHandled();
944
1078
  const outputPath = path11.join(ctx.projectRoot, TEST_OUTPUT_DATA_DIR);
945
- await fs9.mkdir(outputPath, { recursive: true });
1079
+ await fs9.mkdir(outputPath, {
1080
+ recursive: true
1081
+ });
946
1082
  const outputFilePath = path11.join(outputPath, TEST_OUTPUT_FILENAME);
947
1083
  const outputData = JSON.stringify(req.body, null, 2);
948
1084
  await fs9.writeFile(outputFilePath, outputData, "utf8");
949
- return res.status(200).json({ success: true });
1085
+ return res.status(200).json({
1086
+ success: true
1087
+ });
950
1088
  };
951
1089
  }
1090
+ __name(createOutputHandler, "createOutputHandler");
952
1091
 
953
1092
  // src/model-io-checks/setup-model-io-checks.ts
954
1093
  function setupModelIOChecks(app, inputDir, inputFiles, ctx) {
955
- const modelIOChecks = new ModelIOChecks({ inputFiles });
1094
+ const modelIOChecks = new ModelIOChecks({
1095
+ inputFiles
1096
+ });
956
1097
  const handleInput = createInputHandler(inputDir, modelIOChecks, ctx);
957
1098
  const handleOutput = createOutputHandler(modelIOChecks, ctx);
958
1099
  const inputPath = path12.join(ctx.projectRoot, inputDir);
@@ -962,6 +1103,7 @@ function setupModelIOChecks(app, inputDir, inputFiles, ctx) {
962
1103
  app.put("/output", handleOutput);
963
1104
  return modelIOChecks;
964
1105
  }
1106
+ __name(setupModelIOChecks, "setupModelIOChecks");
965
1107
 
966
1108
  // src/utils/check-input-files.ts
967
1109
  import path13 from "path";
@@ -970,37 +1112,31 @@ async function checkInputFiles(inputFiles, inputDataPath, ctx) {
970
1112
  const filePath = path13.join(ctx.projectRoot, inputDataPath, inputFile);
971
1113
  const exists = await isExistingPath(filePath);
972
1114
  if (!exists) {
973
- throw new Error(
974
- `Input data not found (${inputFile}) - did you need to run \`pd4castr fetch\`?`
975
- );
1115
+ throw new Error(`Input data not found (${inputFile}) - did you need to run \`pd4castr fetch\`?`);
976
1116
  }
977
1117
  }
978
1118
  }
1119
+ __name(checkInputFiles, "checkInputFiles");
979
1120
 
980
1121
  // src/utils/get-input-files.ts
981
1122
  function getInputFiles(config) {
982
1123
  const inputFiles = config.inputs.map((input2) => getInputFilename(input2));
983
1124
  return inputFiles;
984
1125
  }
1126
+ __name(getInputFiles, "getInputFiles");
985
1127
 
986
1128
  // src/commands/publish/utils/run-model-io-tests.ts
987
1129
  async function runModelIOTests(dockerImage, options, app, ctx) {
988
1130
  const inputFiles = getInputFiles(ctx.config);
989
1131
  await checkInputFiles(inputFiles, options.inputDir, ctx);
990
1132
  await buildDockerImage(dockerImage, ctx);
991
- const modelIOChecks = setupModelIOChecks(
992
- app,
993
- options.inputDir,
994
- inputFiles,
995
- ctx
996
- );
1133
+ const modelIOChecks = setupModelIOChecks(app, options.inputDir, inputFiles, ctx);
997
1134
  await runModelContainer(dockerImage, options.port, ctx);
998
1135
  if (!modelIOChecks.isInputsHandled() || !modelIOChecks.isOutputHandled()) {
999
- throw new Error(
1000
- "Model I/O test failed. Please run `pd4castr test` to debug the issue."
1001
- );
1136
+ throw new Error("Model I/O test failed. Please run `pd4castr test` to debug the issue.");
1002
1137
  }
1003
1138
  }
1139
+ __name(runModelIOTests, "runModelIOTests");
1004
1140
 
1005
1141
  // src/commands/publish/handle-create-model-flow.ts
1006
1142
  async function handleCreateModelFlow(options, app, spinner, ctx, authCtx) {
@@ -1032,9 +1168,7 @@ async function handleCreateModelFlow(options, app, spinner, ctx, authCtx) {
1032
1168
  config.displayTimezone = model.displayTimezone;
1033
1169
  }, options.config);
1034
1170
  spinner.succeed("Model data published successfully");
1035
- spinner.start(
1036
- "Pushing model image to registry - this may take a few minutes..."
1037
- );
1171
+ spinner.start("Pushing model image to registry - this may take a few minutes...");
1038
1172
  const pushCredentials = await getRegistryPushCredentials(model.id, authCtx);
1039
1173
  await loginToDockerRegistry(pushCredentials);
1040
1174
  await buildDockerImage(dockerImage, ctx);
@@ -1061,6 +1195,7 @@ async function handleCreateModelFlow(options, app, spinner, ctx, authCtx) {
1061
1195
  console.log(MODEL_RUN_TRIGGER_MESSAGE);
1062
1196
  }
1063
1197
  }
1198
+ __name(handleCreateModelFlow, "handleCreateModelFlow");
1064
1199
 
1065
1200
  // src/commands/publish/handle-update-existing-model-flow.ts
1066
1201
  import * as inquirer5 from "@inquirer/prompts";
@@ -1075,26 +1210,28 @@ import invariant2 from "tiny-invariant";
1075
1210
 
1076
1211
  // src/api/get-model.ts
1077
1212
  async function getModel(id, authCtx) {
1078
- const headers = { Authorization: `Bearer ${authCtx.accessToken}` };
1079
- const result = await api.get(`model/${id}`, { headers }).json();
1213
+ const headers = {
1214
+ Authorization: `Bearer ${authCtx.accessToken}`
1215
+ };
1216
+ const result = await api.get(`model/${id}`, {
1217
+ headers
1218
+ }).json();
1080
1219
  return result;
1081
1220
  }
1221
+ __name(getModel, "getModel");
1082
1222
 
1083
1223
  // src/commands/publish/utils/validate-local-model-state.ts
1084
1224
  async function validateLocalModelState(ctx, authCtx) {
1085
1225
  invariant2(ctx.config.$$id, "model ID is required to fetch published model");
1086
1226
  const currentModel = await getModel(ctx.config.$$id, authCtx);
1087
1227
  if (currentModel.revision !== ctx.config.$$revision) {
1088
- throw new Error(
1089
- `OUT OF SYNC: Local revision (${ctx.config.$$revision}) does not match the current published revision (${currentModel.revision})`
1090
- );
1228
+ throw new Error(`OUT OF SYNC: Local revision (${ctx.config.$$revision}) does not match the current published revision (${currentModel.revision})`);
1091
1229
  }
1092
1230
  if (currentModel.modelGroupId !== ctx.config.$$modelGroupID) {
1093
- throw new Error(
1094
- `OUT OF SYNC: Local model group ID (${ctx.config.$$modelGroupID}) does not match the current published model group ID (${currentModel.modelGroupId})`
1095
- );
1231
+ throw new Error(`OUT OF SYNC: Local model group ID (${ctx.config.$$modelGroupID}) does not match the current published model group ID (${currentModel.modelGroupId})`);
1096
1232
  }
1097
1233
  }
1234
+ __name(validateLocalModelState, "validateLocalModelState");
1098
1235
 
1099
1236
  // src/commands/publish/handle-model-revision-create-flow.ts
1100
1237
  var WARNING_LABEL = chalk4.yellowBright.bold("WARNING!");
@@ -1130,9 +1267,7 @@ async function handleModelRevisionCreateFlow(options, app, spinner, ctx, authCtx
1130
1267
  config.$$dockerImage = model.dockerImage;
1131
1268
  }, options.config);
1132
1269
  spinner.succeed("Model revision data published successfully");
1133
- spinner.start(
1134
- "Pushing new model revision image to registry - this may take a few minutes..."
1135
- );
1270
+ spinner.start("Pushing new model revision image to registry - this may take a few minutes...");
1136
1271
  const pushCredentials = await getRegistryPushCredentials(model.id, authCtx);
1137
1272
  await loginToDockerRegistry(pushCredentials);
1138
1273
  await buildDockerImage(dockerImage, ctx);
@@ -1153,14 +1288,13 @@ async function handleModelRevisionCreateFlow(options, app, spinner, ctx, authCtx
1153
1288
  symbol: "\u{1F680} ",
1154
1289
  prefixText: "\n",
1155
1290
  suffixText: "\n",
1156
- text: chalk4.bold(
1157
- `New model revision (r${model.revision}) published successfully`
1158
- )
1291
+ text: chalk4.bold(`New model revision (r${model.revision}) published successfully`)
1159
1292
  });
1160
1293
  if (!modelRunTriggered && !options.skipTrigger) {
1161
1294
  console.log(MODEL_RUN_TRIGGER_MESSAGE);
1162
1295
  }
1163
1296
  }
1297
+ __name(handleModelRevisionCreateFlow, "handleModelRevisionCreateFlow");
1164
1298
 
1165
1299
  // src/commands/publish/handle-model-revision-update-flow.ts
1166
1300
  import * as inquirer4 from "@inquirer/prompts";
@@ -1168,10 +1302,16 @@ import chalk5 from "chalk";
1168
1302
 
1169
1303
  // src/api/update-model.ts
1170
1304
  async function updateModel(config, authCtx) {
1171
- const headers = { Authorization: `Bearer ${authCtx.accessToken}` };
1172
- const result = await api.patch(`model/${config.id}`, { headers, json: config }).json();
1305
+ const headers = {
1306
+ Authorization: `Bearer ${authCtx.accessToken}`
1307
+ };
1308
+ const result = await api.patch(`model/${config.id}`, {
1309
+ headers,
1310
+ json: config
1311
+ }).json();
1173
1312
  return result;
1174
1313
  }
1314
+ __name(updateModel, "updateModel");
1175
1315
 
1176
1316
  // src/commands/publish/handle-model-revision-update-flow.ts
1177
1317
  var WARNING_LABEL2 = chalk5.yellowBright.bold("WARNING!");
@@ -1208,9 +1348,7 @@ async function handleModelRevisionUpdateFlow(options, app, spinner, ctx, authCtx
1208
1348
  config.displayTimezone = model.displayTimezone;
1209
1349
  }, options.config);
1210
1350
  spinner.succeed("Model revision data updated successfully");
1211
- spinner.start(
1212
- "Pushing updated model image to registry - this may take a few minutes..."
1213
- );
1351
+ spinner.start("Pushing updated model image to registry - this may take a few minutes...");
1214
1352
  const pushCredentials = await getRegistryPushCredentials(model.id, authCtx);
1215
1353
  await loginToDockerRegistry(pushCredentials);
1216
1354
  await buildDockerImage(dockerImage, ctx);
@@ -1237,6 +1375,7 @@ async function handleModelRevisionUpdateFlow(options, app, spinner, ctx, authCtx
1237
1375
  console.log(MODEL_RUN_TRIGGER_MESSAGE);
1238
1376
  }
1239
1377
  }
1378
+ __name(handleModelRevisionUpdateFlow, "handleModelRevisionUpdateFlow");
1240
1379
 
1241
1380
  // src/commands/publish/handle-update-existing-model-flow.ts
1242
1381
  async function handleUpdateExistingModelFlow(options, app, spinner, ctx, authCtx) {
@@ -1248,34 +1387,113 @@ async function handleUpdateExistingModelFlow(options, app, spinner, ctx, authCtx
1248
1387
  message: "Do you want to update the existing revision or create a new one?",
1249
1388
  choices: [
1250
1389
  {
1251
- value: "new" /* NewRevision */,
1390
+ value: "new",
1252
1391
  name: `New Revision (r${revision} \u2192 r${revision + 1})`
1253
1392
  },
1254
1393
  {
1255
- value: "update" /* UpdateExisting */,
1394
+ value: "update",
1256
1395
  name: `Update Existing Revision (r${revision})`
1257
1396
  }
1258
1397
  ]
1259
1398
  });
1260
1399
  logEmptyLine();
1261
- if (action === "new" /* NewRevision */) {
1400
+ if (action === "new") {
1262
1401
  await handleModelRevisionCreateFlow(options, app, spinner, ctx, authCtx);
1263
- } else if (action === "update" /* UpdateExisting */) {
1402
+ } else if (action === "update") {
1264
1403
  await handleModelRevisionUpdateFlow(options, app, spinner, ctx, authCtx);
1265
1404
  } else {
1266
1405
  throw new Error("Invalid CLI state");
1267
1406
  }
1268
1407
  }
1408
+ __name(handleUpdateExistingModelFlow, "handleUpdateExistingModelFlow");
1269
1409
 
1270
1410
  // src/commands/publish/handle-action.ts
1411
+ function _ts_add_disposable_resource(env, value, async) {
1412
+ if (value !== null && value !== void 0) {
1413
+ if (typeof value !== "object" && typeof value !== "function") throw new TypeError("Object expected.");
1414
+ var dispose, inner;
1415
+ if (async) {
1416
+ if (!Symbol.asyncDispose) throw new TypeError("Symbol.asyncDispose is not defined.");
1417
+ dispose = value[Symbol.asyncDispose];
1418
+ }
1419
+ if (dispose === void 0) {
1420
+ if (!Symbol.dispose) throw new TypeError("Symbol.dispose is not defined.");
1421
+ dispose = value[Symbol.dispose];
1422
+ if (async) inner = dispose;
1423
+ }
1424
+ if (typeof dispose !== "function") throw new TypeError("Object not disposable.");
1425
+ if (inner) dispose = /* @__PURE__ */ __name(function() {
1426
+ try {
1427
+ inner.call(this);
1428
+ } catch (e) {
1429
+ return Promise.reject(e);
1430
+ }
1431
+ }, "dispose");
1432
+ env.stack.push({
1433
+ value,
1434
+ dispose,
1435
+ async
1436
+ });
1437
+ } else if (async) {
1438
+ env.stack.push({
1439
+ async: true
1440
+ });
1441
+ }
1442
+ return value;
1443
+ }
1444
+ __name(_ts_add_disposable_resource, "_ts_add_disposable_resource");
1445
+ function _ts_dispose_resources(env) {
1446
+ var _SuppressedError = typeof SuppressedError === "function" ? SuppressedError : function(error, suppressed, message) {
1447
+ var e = new Error(message);
1448
+ return e.name = "SuppressedError", e.error = error, e.suppressed = suppressed, e;
1449
+ };
1450
+ return (_ts_dispose_resources = /* @__PURE__ */ __name(function _ts_dispose_resources3(env2) {
1451
+ function fail(e) {
1452
+ env2.error = env2.hasError ? new _SuppressedError(e, env2.error, "An error was suppressed during disposal.") : e;
1453
+ env2.hasError = true;
1454
+ }
1455
+ __name(fail, "fail");
1456
+ var r, s = 0;
1457
+ function next() {
1458
+ while (r = env2.stack.pop()) {
1459
+ try {
1460
+ if (!r.async && s === 1) return s = 0, env2.stack.push(r), Promise.resolve().then(next);
1461
+ if (r.dispose) {
1462
+ var result = r.dispose.call(r.value);
1463
+ if (r.async) return s |= 2, Promise.resolve(result).then(next, function(e) {
1464
+ fail(e);
1465
+ return next();
1466
+ });
1467
+ } else s |= 1;
1468
+ } catch (e) {
1469
+ fail(e);
1470
+ }
1471
+ }
1472
+ if (s === 1) return env2.hasError ? Promise.reject(env2.error) : Promise.resolve();
1473
+ if (env2.hasError) throw env2.error;
1474
+ }
1475
+ __name(next, "next");
1476
+ return next();
1477
+ }, "_ts_dispose_resources"))(env);
1478
+ }
1479
+ __name(_ts_dispose_resources, "_ts_dispose_resources");
1271
1480
  async function handleAction5(options) {
1272
- var _stack = [];
1481
+ const env = {
1482
+ stack: [],
1483
+ error: void 0,
1484
+ hasError: false
1485
+ };
1273
1486
  try {
1274
1487
  const spinner = ora5("Starting model publish...").start();
1275
1488
  const app = express2();
1276
- app.use(express2.json({ limit: "100mb" }));
1277
- app.use(express2.urlencoded({ limit: "100mb", extended: true }));
1278
- const webServer = __using(_stack, await startWebServer(app, options.port));
1489
+ app.use(express2.json({
1490
+ limit: "100mb"
1491
+ }));
1492
+ app.use(express2.urlencoded({
1493
+ limit: "100mb",
1494
+ extended: true
1495
+ }));
1496
+ const webServer = _ts_add_disposable_resource(env, await startWebServer(app, options.port), false);
1279
1497
  try {
1280
1498
  const ctx = await loadProjectContext(options.config);
1281
1499
  const authCtx = await getAuth();
@@ -1297,25 +1515,20 @@ async function handleAction5(options) {
1297
1515
  }
1298
1516
  process.exit(1);
1299
1517
  }
1300
- } catch (_) {
1301
- var _error = _, _hasError = true;
1518
+ } catch (e) {
1519
+ env.error = e;
1520
+ env.hasError = true;
1302
1521
  } finally {
1303
- __callDispose(_stack, _error, _hasError);
1522
+ _ts_dispose_resources(env);
1304
1523
  }
1305
1524
  }
1525
+ __name(handleAction5, "handleAction");
1306
1526
 
1307
1527
  // src/commands/publish/index.ts
1308
1528
  function registerPublishCommand(program2) {
1309
- program2.command("publish").description("Publishes a pd4castr model.").option(
1310
- "-i, --input-dir <path>",
1311
- "The input test data directory",
1312
- TEST_INPUT_DATA_DIR
1313
- ).option(
1314
- "-p, --port <port>",
1315
- "The port to run the IO testing webserver on",
1316
- TEST_WEBSERVER_PORT.toString()
1317
- ).option("--sc, --skip-checks", "Skip the model I/O checks", false).option("--st, --skip-trigger", "Skip the model trigger", false).option("-c, --config <path>", "Path to config file", PROJECT_CONFIG_FILE).action(handleAction5);
1529
+ program2.command("publish").description("Publishes a pd4castr model.").option("-i, --input-dir <path>", "The input test data directory", TEST_INPUT_DATA_DIR).option("-p, --port <port>", "The port to run the IO testing webserver on", TEST_WEBSERVER_PORT.toString()).option("--sc, --skip-checks", "Skip the model I/O checks", false).option("--st, --skip-trigger", "Skip the model trigger", false).option("-c, --config <path>", "Path to config file", PROJECT_CONFIG_FILE).action(handleAction5);
1318
1530
  }
1531
+ __name(registerPublishCommand, "registerPublishCommand");
1319
1532
 
1320
1533
  // src/commands/test/handle-action.ts
1321
1534
  import path14 from "path";
@@ -1323,14 +1536,92 @@ import { ExecaError as ExecaError6 } from "execa";
1323
1536
  import express3 from "express";
1324
1537
  import ora6 from "ora";
1325
1538
  import { ZodError as ZodError6 } from "zod";
1539
+ function _ts_add_disposable_resource2(env, value, async) {
1540
+ if (value !== null && value !== void 0) {
1541
+ if (typeof value !== "object" && typeof value !== "function") throw new TypeError("Object expected.");
1542
+ var dispose, inner;
1543
+ if (async) {
1544
+ if (!Symbol.asyncDispose) throw new TypeError("Symbol.asyncDispose is not defined.");
1545
+ dispose = value[Symbol.asyncDispose];
1546
+ }
1547
+ if (dispose === void 0) {
1548
+ if (!Symbol.dispose) throw new TypeError("Symbol.dispose is not defined.");
1549
+ dispose = value[Symbol.dispose];
1550
+ if (async) inner = dispose;
1551
+ }
1552
+ if (typeof dispose !== "function") throw new TypeError("Object not disposable.");
1553
+ if (inner) dispose = /* @__PURE__ */ __name(function() {
1554
+ try {
1555
+ inner.call(this);
1556
+ } catch (e) {
1557
+ return Promise.reject(e);
1558
+ }
1559
+ }, "dispose");
1560
+ env.stack.push({
1561
+ value,
1562
+ dispose,
1563
+ async
1564
+ });
1565
+ } else if (async) {
1566
+ env.stack.push({
1567
+ async: true
1568
+ });
1569
+ }
1570
+ return value;
1571
+ }
1572
+ __name(_ts_add_disposable_resource2, "_ts_add_disposable_resource");
1573
+ function _ts_dispose_resources2(env) {
1574
+ var _SuppressedError = typeof SuppressedError === "function" ? SuppressedError : function(error, suppressed, message) {
1575
+ var e = new Error(message);
1576
+ return e.name = "SuppressedError", e.error = error, e.suppressed = suppressed, e;
1577
+ };
1578
+ return (_ts_dispose_resources2 = /* @__PURE__ */ __name(function _ts_dispose_resources3(env2) {
1579
+ function fail(e) {
1580
+ env2.error = env2.hasError ? new _SuppressedError(e, env2.error, "An error was suppressed during disposal.") : e;
1581
+ env2.hasError = true;
1582
+ }
1583
+ __name(fail, "fail");
1584
+ var r, s = 0;
1585
+ function next() {
1586
+ while (r = env2.stack.pop()) {
1587
+ try {
1588
+ if (!r.async && s === 1) return s = 0, env2.stack.push(r), Promise.resolve().then(next);
1589
+ if (r.dispose) {
1590
+ var result = r.dispose.call(r.value);
1591
+ if (r.async) return s |= 2, Promise.resolve(result).then(next, function(e) {
1592
+ fail(e);
1593
+ return next();
1594
+ });
1595
+ } else s |= 1;
1596
+ } catch (e) {
1597
+ fail(e);
1598
+ }
1599
+ }
1600
+ if (s === 1) return env2.hasError ? Promise.reject(env2.error) : Promise.resolve();
1601
+ if (env2.hasError) throw env2.error;
1602
+ }
1603
+ __name(next, "next");
1604
+ return next();
1605
+ }, "_ts_dispose_resources"))(env);
1606
+ }
1607
+ __name(_ts_dispose_resources2, "_ts_dispose_resources");
1326
1608
  async function handleAction6(options) {
1327
- var _stack = [];
1609
+ const env = {
1610
+ stack: [],
1611
+ error: void 0,
1612
+ hasError: false
1613
+ };
1328
1614
  try {
1329
1615
  const spinner = ora6("Starting model tests...").info();
1330
1616
  const app = express3();
1331
- app.use(express3.json({ limit: "100mb" }));
1332
- app.use(express3.urlencoded({ limit: "100mb", extended: true }));
1333
- const webServer = __using(_stack, await startWebServer(app, options.port));
1617
+ app.use(express3.json({
1618
+ limit: "100mb"
1619
+ }));
1620
+ app.use(express3.urlencoded({
1621
+ limit: "100mb",
1622
+ extended: true
1623
+ }));
1624
+ const webServer = _ts_add_disposable_resource2(env, await startWebServer(app, options.port), false);
1334
1625
  try {
1335
1626
  const ctx = await loadProjectContext(options.config);
1336
1627
  const inputFiles = getInputFiles(ctx.config);
@@ -1341,12 +1632,7 @@ async function handleAction6(options) {
1341
1632
  const dockerImage = getDockerImage(ctx);
1342
1633
  await buildDockerImage(dockerImage, ctx);
1343
1634
  spinner.succeed(`Built docker image (${dockerImage})`);
1344
- const modelIOChecks = setupModelIOChecks(
1345
- app,
1346
- options.inputDir,
1347
- inputFiles,
1348
- ctx
1349
- );
1635
+ const modelIOChecks = setupModelIOChecks(app, options.inputDir, inputFiles, ctx);
1350
1636
  spinner.start("Running model container");
1351
1637
  await runModelContainer(dockerImage, options.port, ctx);
1352
1638
  spinner.succeed("Model run complete");
@@ -1362,11 +1648,7 @@ async function handleAction6(options) {
1362
1648
  throw new Error("Model I/O test failed");
1363
1649
  }
1364
1650
  if (modelIOChecks.isOutputHandled()) {
1365
- const outputPath = path14.join(
1366
- ctx.projectRoot,
1367
- TEST_OUTPUT_DATA_DIR,
1368
- TEST_OUTPUT_FILENAME
1369
- );
1651
+ const outputPath = path14.join(ctx.projectRoot, TEST_OUTPUT_DATA_DIR, TEST_OUTPUT_FILENAME);
1370
1652
  const clickHereLink = createLink("Click here", `file://${outputPath}`);
1371
1653
  const fileLink = createLink(TEST_OUTPUT_FILENAME, `file://${outputPath}`);
1372
1654
  console.log(`
@@ -1387,27 +1669,20 @@ ${clickHereLink} to view output (${fileLink})
1387
1669
  }
1388
1670
  process.exit(1);
1389
1671
  }
1390
- } catch (_) {
1391
- var _error = _, _hasError = true;
1672
+ } catch (e) {
1673
+ env.error = e;
1674
+ env.hasError = true;
1392
1675
  } finally {
1393
- __callDispose(_stack, _error, _hasError);
1676
+ _ts_dispose_resources2(env);
1394
1677
  }
1395
1678
  }
1679
+ __name(handleAction6, "handleAction");
1396
1680
 
1397
1681
  // src/commands/test/index.ts
1398
1682
  function registerTestCommand(program2) {
1399
- program2.command("test").description(
1400
- "Test a model by verifying input and output is handled correctly."
1401
- ).option(
1402
- "-i, --input-dir <path>",
1403
- "The input test data directory",
1404
- TEST_INPUT_DATA_DIR
1405
- ).option(
1406
- "-p, --port <port>",
1407
- "The port to run the IO testing webserver on",
1408
- TEST_WEBSERVER_PORT.toString()
1409
- ).option("-c, --config <path>", "Path to config file", PROJECT_CONFIG_FILE).action(handleAction6);
1683
+ program2.command("test").description("Test a model by verifying input and output is handled correctly.").option("-i, --input-dir <path>", "The input test data directory", TEST_INPUT_DATA_DIR).option("-p, --port <port>", "The port to run the IO testing webserver on", TEST_WEBSERVER_PORT.toString()).option("-c, --config <path>", "Path to config file", PROJECT_CONFIG_FILE).action(handleAction6);
1410
1684
  }
1685
+ __name(registerTestCommand, "registerTestCommand");
1411
1686
 
1412
1687
  // src/program.ts
1413
1688
  import { Command } from "commander";
@@ -1415,8 +1690,9 @@ import { Command } from "commander";
1415
1690
  // package.json
1416
1691
  var package_default = {
1417
1692
  name: "@pd4castr/cli",
1418
- version: "1.2.0",
1693
+ version: "1.4.0",
1419
1694
  description: "CLI tool for creating, testing, and publishing pd4castr models",
1695
+ license: "UNLICENSED",
1420
1696
  main: "dist/index.js",
1421
1697
  type: "module",
1422
1698
  bin: {
@@ -1425,77 +1701,58 @@ var package_default = {
1425
1701
  files: [
1426
1702
  "dist/**/*"
1427
1703
  ],
1704
+ engines: {
1705
+ node: ">=20.0.0"
1706
+ },
1428
1707
  scripts: {
1429
1708
  build: "tsup",
1430
1709
  dev: "tsup --watch",
1431
- cli: "node dist/index.js",
1432
- test: "vitest run",
1433
- "test:watch": "vitest",
1434
- "test:coverage": "vitest run --coverage",
1435
- lint: "eslint .",
1436
- "lint:fix": "eslint . --fix",
1710
+ pd4castr: "node dist/index.js",
1711
+ release: "semantic-release -e semantic-release-monorepo",
1437
1712
  format: "prettier --write .",
1438
- "format:check": "prettier --check .",
1713
+ lint: "eslint .",
1439
1714
  typecheck: "tsc --noEmit",
1440
1715
  prepublishOnly: "yarn build"
1441
1716
  },
1442
- keywords: [
1443
- "cli",
1444
- "pd4castr"
1445
- ],
1446
- license: "UNLICENSED",
1447
- repository: {
1448
- type: "git",
1449
- url: "git+https://github.com/pipelabs/pd4castr-cli.git"
1450
- },
1451
- bugs: {
1452
- url: "https://github.com/pipelabs/pd4castr-cli/issues"
1717
+ dependencies: {
1718
+ "@inquirer/prompts": "7.7.1",
1719
+ auth0: "4.28.0",
1720
+ chalk: "5.6.0",
1721
+ commander: "14.0.0",
1722
+ execa: "9.6.0",
1723
+ express: "4.21.2",
1724
+ immer: "10.1.1",
1725
+ ky: "1.8.2",
1726
+ ora: "8.2.0",
1727
+ slugify: "1.6.6",
1728
+ tiged: "2.12.7",
1729
+ "tiny-invariant": "1.3.3",
1730
+ zod: "4.0.14"
1453
1731
  },
1454
- homepage: "https://github.com/pipelabs/pd4castr-cli#readme",
1455
1732
  devDependencies: {
1456
1733
  "@faker-js/faker": "10.0.0",
1457
1734
  "@mswjs/data": "0.16.2",
1458
1735
  "@types/express": "4.17.21",
1459
1736
  "@types/node": "24.1.0",
1460
1737
  "@types/supertest": "6.0.3",
1461
- "@typescript-eslint/eslint-plugin": "8.38.0",
1462
- "@typescript-eslint/parser": "8.38.0",
1463
- eslint: "9.32.0",
1464
- "eslint-config-prettier": "10.1.8",
1465
- "eslint-plugin-simple-import-sort": "12.1.1",
1466
- "eslint-plugin-unicorn": "60.0.0",
1467
- "eslint-plugin-vitest": "0.5.4",
1468
1738
  "hook-std": "3.0.0",
1469
1739
  "jest-extended": "6.0.0",
1470
- memfs: "4.23.0",
1471
- msw: "2.10.4",
1472
- prettier: "3.6.2",
1740
+ memfs: "4.49.0",
1741
+ msw: "2.10.5",
1473
1742
  "strip-ansi": "7.1.0",
1474
1743
  supertest: "7.1.4",
1475
1744
  tsup: "8.5.0",
1476
1745
  "type-fest": "4.41.0",
1477
1746
  typescript: "5.8.3",
1478
- "typescript-eslint": "8.38.0",
1479
1747
  vitest: "3.2.4"
1480
1748
  },
1481
- dependencies: {
1482
- "@inquirer/prompts": "7.7.1",
1483
- auth0: "4.27.0",
1484
- chalk: "5.6.0",
1485
- commander: "14.0.0",
1486
- execa: "9.6.0",
1487
- express: "4.21.2",
1488
- immer: "10.1.1",
1489
- ky: "1.8.2",
1490
- ora: "8.2.0",
1491
- slugify: "1.6.6",
1492
- tiged: "2.12.7",
1493
- "tiny-invariant": "1.3.3",
1494
- zod: "4.0.14"
1749
+ publishConfig: {
1750
+ access: "public"
1495
1751
  },
1496
- engines: {
1497
- node: ">=20.0.0"
1498
- }
1752
+ keywords: [
1753
+ "cli",
1754
+ "pd4castr"
1755
+ ]
1499
1756
  };
1500
1757
 
1501
1758
  // src/program.ts