@tradejs/cli 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,1056 @@
1
+ "use strict";
2
+ var __create = Object.create;
3
+ var __defProp = Object.defineProperty;
4
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
5
+ var __getOwnPropNames = Object.getOwnPropertyNames;
6
+ var __getProtoOf = Object.getPrototypeOf;
7
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
8
+ var __copyProps = (to, from, except, desc) => {
9
+ if (from && typeof from === "object" || typeof from === "function") {
10
+ for (let key of __getOwnPropNames(from))
11
+ if (!__hasOwnProp.call(to, key) && key !== except)
12
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
13
+ }
14
+ return to;
15
+ };
16
+ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
17
+ // If the importer is in node compatibility mode or this is not an ESM
18
+ // file that has been converted to a CommonJS file using a Babel-
19
+ // compatible transform (i.e. "__esModule" has not been set), then set
20
+ // "default" to the CommonJS "module.exports" for node compatibility.
21
+ isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
22
+ mod
23
+ ));
24
+
25
+ // src/scripts/mlTrainLatestSelect.ts
26
+ var import_path = __toESM(require("path"));
27
+ var import_promises = __toESM(require("fs/promises"));
28
+ var import_fs = require("fs");
29
+ var import_crypto = require("crypto");
30
+ var import_child_process = require("child_process");
31
+ var import_events = require("events");
32
+ var import_readline2 = __toESM(require("readline"));
33
+ var import_args = __toESM(require("args"));
34
+ var import_chalk2 = __toESM(require("chalk"));
35
+
36
+ // src/scripts/selectStrategy.ts
37
+ var import_readline = __toESM(require("readline"));
38
+ var import_chalk = __toESM(require("chalk"));
39
+ var import_strategies = require("@tradejs/node/strategies");
40
+ var defaultStrategy = "TrendLine";
41
+ var getStrategyChoices = async () => {
42
+ try {
43
+ const loaded = await (0, import_strategies.getAvailableStrategyNames)();
44
+ if (loaded.length) {
45
+ return loaded;
46
+ }
47
+ } catch (error) {
48
+ console.warn(`Failed to load strategy list: ${String(error)}`);
49
+ }
50
+ return [
51
+ "Breakout",
52
+ "MaStrategy",
53
+ "AdaptiveMomentumRibbon",
54
+ "TrendLine",
55
+ "VolumeDivergence"
56
+ ];
57
+ };
58
+ var selectStrategy = async (promptLabel = "Select strategy") => {
59
+ const strategies = await getStrategyChoices();
60
+ const fallbackStrategy = strategies.includes(defaultStrategy) ? defaultStrategy : strategies[0];
61
+ if (!process.stdin.isTTY) {
62
+ return fallbackStrategy;
63
+ }
64
+ console.log(import_chalk.default.cyan("Available strategies:"));
65
+ strategies.forEach((name, index) => {
66
+ const isDefault = name === defaultStrategy;
67
+ const label = isDefault ? import_chalk.default.green(name) : name;
68
+ const suffix = isDefault ? import_chalk.default.gray(" (default)") : "";
69
+ console.log(` ${import_chalk.default.yellow(String(index + 1))}) ${label}${suffix}`);
70
+ });
71
+ const rl = import_readline.default.createInterface({
72
+ input: process.stdin,
73
+ output: process.stdout
74
+ });
75
+ const question = (text) => new Promise((resolve) => rl.question(text, resolve));
76
+ const answer = await question(
77
+ `${promptLabel} [${import_chalk.default.green(fallbackStrategy)}]: `
78
+ );
79
+ rl.close();
80
+ const trimmed = answer.trim();
81
+ if (!trimmed) {
82
+ return fallbackStrategy;
83
+ }
84
+ const asNumber = Number(trimmed);
85
+ if (Number.isFinite(asNumber) && asNumber >= 1 && asNumber <= strategies.length) {
86
+ return strategies[asNumber - 1];
87
+ }
88
+ const byName = strategies.find(
89
+ (name) => name.toLowerCase() === trimmed.toLowerCase()
90
+ );
91
+ if (byName) {
92
+ return byName;
93
+ }
94
+ console.warn(`Unknown strategy "${trimmed}", using ${fallbackStrategy}.`);
95
+ return fallbackStrategy;
96
+ };
97
+
98
+ // src/scripts/mlTrainLatestSelect.ts
99
+ var import_ml = require("@tradejs/infra/ml");
100
+ var toFileToken = (value) => value.trim().toLowerCase().replace(/[^a-z0-9_-]+/g, "_").replace(/^_+|_+$/g, "") || "any";
101
+ var asBool = (value) => ["1", "true", "yes", "on"].includes((value ?? "").trim().toLowerCase());
102
+ var asInt = (value, fallback) => {
103
+ const parsed = Number.parseInt((value ?? "").trim(), 10);
104
+ return Number.isFinite(parsed) && parsed > 0 ? parsed : fallback;
105
+ };
106
+ var asNonNegativeInt = (value, fallback) => {
107
+ const parsed = Number.parseInt((value ?? "").trim(), 10);
108
+ return Number.isFinite(parsed) && parsed >= 0 ? parsed : fallback;
109
+ };
110
+ var formatBytes = (bytes) => {
111
+ if (!Number.isFinite(bytes) || bytes <= 0) return "0 B";
112
+ const units = ["B", "KiB", "MiB", "GiB", "TiB"];
113
+ let value = bytes;
114
+ let idx = 0;
115
+ while (value >= 1024 && idx < units.length - 1) {
116
+ value /= 1024;
117
+ idx++;
118
+ }
119
+ return `${value.toFixed(idx === 0 ? 0 : 2)} ${units[idx]}`;
120
+ };
121
+ var getMlContainerMemUsage = () => {
122
+ try {
123
+ const psRaw = (0, import_child_process.execSync)(
124
+ 'docker ps --filter status=running --filter label=com.docker.compose.service=ml --format "{{.ID}}\\t{{.Names}}\\t{{.Label \\"com.docker.compose.oneoff\\"}}"',
125
+ { encoding: "utf8", stdio: ["ignore", "pipe", "ignore"] }
126
+ ).trim();
127
+ if (!psRaw) return "n/a";
128
+ const rows = psRaw.split("\n").map((line) => line.trim()).filter(Boolean).map((line) => {
129
+ const [id = "", name = "", oneoff = ""] = line.split(" ");
130
+ return { id, name, oneoff };
131
+ });
132
+ if (!rows.length) return "n/a";
133
+ const chosen = rows.find((row) => row.oneoff === "True") ?? rows[0];
134
+ if (!chosen?.id) return "n/a";
135
+ const memUsage = (0, import_child_process.execSync)(
136
+ `docker stats --no-stream --format "{{.MemUsage}}" ${chosen.id}`,
137
+ { encoding: "utf8", stdio: ["ignore", "pipe", "ignore"] }
138
+ ).trim();
139
+ if (!memUsage) return "n/a";
140
+ return `${chosen.name}: ${memUsage}`;
141
+ } catch {
142
+ return "n/a";
143
+ }
144
+ };
145
+ var MODEL_TYPES = [
146
+ "catboost",
147
+ "random_forest",
148
+ "extra_trees",
149
+ "xgboost",
150
+ "lightgbm"
151
+ ];
152
+ var STRATEGIES = [
153
+ "Breakout",
154
+ "MaStrategy",
155
+ "AdaptiveMomentumRibbon",
156
+ "TrendLine",
157
+ "VolumeDivergence",
158
+ "any"
159
+ ];
160
+ import_args.default.option(["s", "strategy"], "Strategy name (e.g. TrendLine)");
161
+ import_args.default.option(
162
+ ["m", "model"],
163
+ "Model type: catboost | random_forest | extra_trees | xgboost | lightgbm"
164
+ );
165
+ import_args.default.option(
166
+ ["L", "latestOnly"],
167
+ "Use only latest dataset file (overrides ML_TRAIN_USE_LATEST_ONLY)",
168
+ false
169
+ );
170
+ var flags = import_args.default.parse(process.argv);
171
+ var parseStrategy = (value) => {
172
+ const raw = String(value ?? "").trim();
173
+ if (!raw) return null;
174
+ const byName = STRATEGIES.find(
175
+ (name) => name.toLowerCase() === raw.toLowerCase()
176
+ );
177
+ return byName ?? null;
178
+ };
179
+ var parseModelType = (value) => {
180
+ const raw = String(value ?? "").trim().toLowerCase();
181
+ if (!raw) return null;
182
+ return MODEL_TYPES.includes(raw) ? raw : null;
183
+ };
184
+ var selectModelType = async (defaultModel) => {
185
+ if (!process.stdin.isTTY) {
186
+ return defaultModel;
187
+ }
188
+ console.log(import_chalk2.default.cyan("Available models:"));
189
+ MODEL_TYPES.forEach((name, index) => {
190
+ const isDefault = name === defaultModel;
191
+ const label = isDefault ? import_chalk2.default.green(name) : name;
192
+ const suffix = isDefault ? import_chalk2.default.gray(" (default)") : "";
193
+ console.log(` ${import_chalk2.default.yellow(String(index + 1))}) ${label}${suffix}`);
194
+ });
195
+ const rl = import_readline2.default.createInterface({
196
+ input: process.stdin,
197
+ output: process.stdout
198
+ });
199
+ const question = (text) => new Promise((resolve) => rl.question(text, resolve));
200
+ const answer = await question(
201
+ `Select model [${import_chalk2.default.green(defaultModel)}]: `
202
+ );
203
+ rl.close();
204
+ const trimmed = answer.trim().toLowerCase();
205
+ if (!trimmed) {
206
+ return defaultModel;
207
+ }
208
+ const asNumber = Number(trimmed);
209
+ if (Number.isFinite(asNumber) && asNumber >= 1 && asNumber <= MODEL_TYPES.length) {
210
+ return MODEL_TYPES[asNumber - 1];
211
+ }
212
+ if (MODEL_TYPES.includes(trimmed)) {
213
+ return trimmed;
214
+ }
215
+ console.warn(`Unknown model "${answer.trim()}", using ${defaultModel}.`);
216
+ return defaultModel;
217
+ };
218
+ var listDatasetFiles = async (dir, strategyName) => {
219
+ const entries = await import_promises.default.readdir(dir, { withFileTypes: true });
220
+ const prefix = `ml-dataset-${toFileToken(strategyName)}-`;
221
+ const legacyPrefix = "ml-dataset-";
222
+ const fileNames = entries.filter((entry) => entry.isFile()).map((entry) => entry.name);
223
+ const hasScopedFiles = fileNames.some((name) => name.startsWith(prefix));
224
+ const acceptedPrefix = hasScopedFiles ? prefix : legacyPrefix;
225
+ const jsonlFiles = fileNames.filter(
226
+ (name) => name.startsWith(acceptedPrefix) && name.endsWith(".jsonl")
227
+ ).filter((name) => !name.includes(".train.") && !name.includes(".test.")).filter((name) => !name.includes(".holdout-train.")).filter((name) => !name.includes(".holdout-test.")).filter((name) => !name.includes(".walk-forward.")).filter((name) => !name.includes(".prod."));
228
+ const withMtime = await Promise.all(
229
+ jsonlFiles.map(async (name) => {
230
+ const stat = await import_promises.default.stat(import_path.default.join(dir, name));
231
+ return { name, mtime: stat.mtimeMs };
232
+ })
233
+ );
234
+ withMtime.sort((a, b) => a.mtime - b.mtime);
235
+ return withMtime.map(({ name }) => name);
236
+ };
237
+ var concatFiles = async (files, outPath) => {
238
+ const out = (0, import_fs.createWriteStream)(outPath, { encoding: "utf8" });
239
+ const done = (0, import_events.once)(out, "finish");
240
+ try {
241
+ for (const file of files) {
242
+ const reader = (0, import_fs.createReadStream)(file, { encoding: "utf8" });
243
+ for await (const chunk of reader) {
244
+ if (!out.write(chunk)) {
245
+ await (0, import_events.once)(out, "drain");
246
+ }
247
+ }
248
+ }
249
+ } finally {
250
+ out.end();
251
+ await done;
252
+ }
253
+ };
254
+ var pipeNormalized = (stream, write) => {
255
+ if (!stream) return () => {
256
+ };
257
+ let buffer = "";
258
+ const onData = (chunk) => {
259
+ const text = String(chunk).replace(/\r/g, "\n");
260
+ buffer += text;
261
+ const parts = buffer.split("\n");
262
+ buffer = parts.pop() ?? "";
263
+ for (const part of parts) {
264
+ write(part);
265
+ }
266
+ };
267
+ stream.on("data", onData);
268
+ return () => {
269
+ stream.off("data", onData);
270
+ if (buffer.length) {
271
+ write(buffer);
272
+ buffer = "";
273
+ }
274
+ };
275
+ };
276
+ var DAY_MS = 24 * 60 * 60 * 1e3;
277
+ var SPLIT_PROGRESS_EVERY = 2e5;
278
+ var parseTimestampMs = (value) => {
279
+ const numeric = Number(value);
280
+ if (!Number.isFinite(numeric) || numeric <= 0) return null;
281
+ return numeric < 1e12 ? Math.trunc(numeric * 1e3) : Math.trunc(numeric);
282
+ };
283
+ var scanMaxLabeledTimestampMs = async (inputPath) => {
284
+ const rl = import_readline2.default.createInterface({
285
+ input: (0, import_fs.createReadStream)(inputPath, { encoding: "utf8" }),
286
+ crlfDelay: Infinity
287
+ });
288
+ let maxTs = 0;
289
+ let scanned = 0;
290
+ const startedAt = Date.now();
291
+ for await (const line of rl) {
292
+ const trimmed = line.trim();
293
+ if (!trimmed) continue;
294
+ scanned++;
295
+ if (scanned % SPLIT_PROGRESS_EVERY === 0) {
296
+ const elapsed = Math.floor((Date.now() - startedAt) / 1e3);
297
+ console.log(
298
+ `[split] scan max labeled ts: lines=${scanned} elapsed=${elapsed}s`
299
+ );
300
+ }
301
+ try {
302
+ const row = JSON.parse(trimmed);
303
+ if (row.label === null || row.label === void 0) continue;
304
+ const ts = parseTimestampMs(row.entryTimestamp);
305
+ if (ts && ts > maxTs) maxTs = ts;
306
+ } catch {
307
+ }
308
+ }
309
+ rl.close();
310
+ if (!maxTs) {
311
+ throw new Error("No labeled rows with entryTimestamp found in dataset.");
312
+ }
313
+ return maxTs;
314
+ };
315
+ var scanMaxTrainTimestampMs = async (inputPath, holdoutCutoffMs) => {
316
+ const rl = import_readline2.default.createInterface({
317
+ input: (0, import_fs.createReadStream)(inputPath, { encoding: "utf8" }),
318
+ crlfDelay: Infinity
319
+ });
320
+ let maxTrainTs = 0;
321
+ let scanned = 0;
322
+ const startedAt = Date.now();
323
+ for await (const line of rl) {
324
+ const trimmed = line.trim();
325
+ if (!trimmed) continue;
326
+ scanned++;
327
+ if (scanned % SPLIT_PROGRESS_EVERY === 0) {
328
+ const elapsed = Math.floor((Date.now() - startedAt) / 1e3);
329
+ console.log(
330
+ `[split] scan max train ts: lines=${scanned} elapsed=${elapsed}s`
331
+ );
332
+ }
333
+ try {
334
+ const row = JSON.parse(trimmed);
335
+ if (row.label === null || row.label === void 0) continue;
336
+ const ts = parseTimestampMs(row.entryTimestamp);
337
+ if (ts && ts <= holdoutCutoffMs && ts > maxTrainTs) {
338
+ maxTrainTs = ts;
339
+ }
340
+ } catch {
341
+ }
342
+ }
343
+ rl.close();
344
+ if (!maxTrainTs) {
345
+ throw new Error(
346
+ "No train rows found after holdout cutoff. Adjust test window."
347
+ );
348
+ }
349
+ return maxTrainTs;
350
+ };
351
+ var hashFileNameSha1 = (filePath) => (0, import_crypto.createHash)("sha1").update(import_path.default.basename(filePath)).digest("hex");
352
+ var prepareTrainWindowFiles = async ({
353
+ inputPath,
354
+ testDays,
355
+ trainRecentDays,
356
+ walkForwardFolds,
357
+ enforceCausalityGuard
358
+ }) => {
359
+ const inputBaseName = import_path.default.basename(inputPath);
360
+ if ((0, import_ml.isDerivedDatasetFileName)(inputBaseName)) {
361
+ throw new Error(
362
+ `Refusing to split derived dataset file: ${inputBaseName}. Expected base export file.`
363
+ );
364
+ }
365
+ const exportHash = hashFileNameSha1(inputPath);
366
+ const keyPayload = JSON.stringify({
367
+ exportHash,
368
+ testDays,
369
+ trainRecentDays,
370
+ walkForwardFolds,
371
+ causalityGuardVersion: enforceCausalityGuard ? 1 : 0
372
+ });
373
+ const key = (0, import_crypto.createHash)("sha1").update(keyPayload).digest("hex").slice(0, 12);
374
+ const dir = import_path.default.dirname(inputPath);
375
+ const parsed = import_path.default.parse(inputPath);
376
+ const holdoutTrainPath = import_path.default.join(
377
+ dir,
378
+ `${parsed.name}.holdout-train.${key}.jsonl`
379
+ );
380
+ const holdoutTestPath = import_path.default.join(
381
+ dir,
382
+ `${parsed.name}.holdout-test.${key}.jsonl`
383
+ );
384
+ const prodPath = import_path.default.join(dir, `${parsed.name}.prod.${key}.jsonl`);
385
+ const walkForwardFoldDefs = Array.from(
386
+ { length: Math.max(walkForwardFolds, 0) },
387
+ (_, idx) => {
388
+ const fold = idx + 1;
389
+ const foldToken = `fold-${fold}`;
390
+ return {
391
+ fold,
392
+ trainPath: import_path.default.join(
393
+ dir,
394
+ `${parsed.name}.walk-forward-${foldToken}.train.${key}.jsonl`
395
+ ),
396
+ testPath: import_path.default.join(
397
+ dir,
398
+ `${parsed.name}.walk-forward-${foldToken}.test.${key}.jsonl`
399
+ )
400
+ };
401
+ }
402
+ );
403
+ const metaPath = import_path.default.join(dir, `${parsed.name}.windows.${key}.meta.json`);
404
+ try {
405
+ const [metaRaw] = await Promise.all([
406
+ import_promises.default.readFile(metaPath, "utf8"),
407
+ import_promises.default.access(holdoutTrainPath),
408
+ import_promises.default.access(holdoutTestPath),
409
+ import_promises.default.access(prodPath),
410
+ ...walkForwardFoldDefs.flatMap((entry) => [
411
+ import_promises.default.access(entry.trainPath),
412
+ import_promises.default.access(entry.testPath)
413
+ ])
414
+ ]);
415
+ const meta2 = JSON.parse(metaRaw);
416
+ if (meta2.exportHash === exportHash) {
417
+ const foldMetaByFold = /* @__PURE__ */ new Map();
418
+ const fileFolds = Array.isArray(meta2.files?.walkForwardFolds) ? meta2.files?.walkForwardFolds ?? [] : [];
419
+ const hasExtendedFoldTiming = fileFolds.length === 0 || "trainMinTs" in fileFolds[0];
420
+ if (!hasExtendedFoldTiming) {
421
+ throw new Error("Legacy meta format cache miss");
422
+ }
423
+ if (!meta2.files?.prod?.prod) {
424
+ throw new Error("Legacy meta format cache miss (prod block missing)");
425
+ }
426
+ for (const foldFile of fileFolds) {
427
+ const fold = Number(foldFile.fold);
428
+ if (!Number.isFinite(fold) || fold <= 0) continue;
429
+ foldMetaByFold.set(fold, {
430
+ startTs: Number(foldFile.startTs ?? 0),
431
+ endTs: Number(foldFile.endTs ?? 0),
432
+ trainRows: Number(foldFile.trainRows ?? 0),
433
+ testRows: Number(foldFile.testRows ?? 0)
434
+ });
435
+ }
436
+ if (Array.isArray(meta2.foldCounts)) {
437
+ for (const row of meta2.foldCounts) {
438
+ const fold = Number(row.fold);
439
+ if (!Number.isFinite(fold) || fold <= 0) continue;
440
+ const prev = foldMetaByFold.get(fold) ?? {
441
+ startTs: 0,
442
+ endTs: 0,
443
+ trainRows: 0,
444
+ testRows: 0
445
+ };
446
+ foldMetaByFold.set(fold, {
447
+ ...prev,
448
+ trainRows: Number(row.trainRows ?? 0),
449
+ testRows: Number(row.testRows ?? 0)
450
+ });
451
+ }
452
+ }
453
+ return {
454
+ holdoutTrainPath,
455
+ holdoutTestPath,
456
+ prodPath,
457
+ walkForwardFolds: walkForwardFoldDefs.map((entry) => {
458
+ const metaRow = foldMetaByFold.get(entry.fold);
459
+ return {
460
+ fold: entry.fold,
461
+ startTs: Number(metaRow?.startTs ?? 0),
462
+ endTs: Number(metaRow?.endTs ?? 0),
463
+ trainPath: entry.trainPath,
464
+ testPath: entry.testPath,
465
+ trainRows: Number(metaRow?.trainRows ?? 0),
466
+ testRows: Number(metaRow?.testRows ?? 0)
467
+ };
468
+ }),
469
+ cleanup: [],
470
+ reused: true,
471
+ key,
472
+ exportHash,
473
+ counts: {
474
+ holdoutTrainRows: Number(
475
+ meta2.files?.holdout?.holdoutTrainRows ?? meta2.counts?.holdoutTrainRows ?? 0
476
+ ),
477
+ holdoutTestRows: Number(
478
+ meta2.files?.holdout?.holdoutTestRows ?? meta2.counts?.holdoutTestRows ?? 0
479
+ ),
480
+ walkForwardSourceRows: Number(
481
+ meta2.files?.walkForwardFolds?.[0]?.walkForwardSourceRows ?? meta2.counts?.walkForwardSourceRows ?? 0
482
+ ),
483
+ prodRows: Number(
484
+ meta2.files?.prod?.prodRows ?? meta2.counts?.prodRows ?? 0
485
+ )
486
+ }
487
+ };
488
+ }
489
+ } catch {
490
+ }
491
+ console.log("[split] phase 1/3: scanning max labeled timestamp...");
492
+ const maxLabeledTs = await scanMaxLabeledTimestampMs(inputPath);
493
+ const holdoutCutoffMs = maxLabeledTs - testDays * DAY_MS;
494
+ console.log("[split] phase 2/3: scanning max train timestamp...");
495
+ const maxTrainTs = await scanMaxTrainTimestampMs(inputPath, holdoutCutoffMs);
496
+ const {
497
+ holdoutCutoffMs: derivedHoldoutCutoffMs,
498
+ holdoutTrainStartMs,
499
+ wfStartMs,
500
+ prodStartMs,
501
+ folds
502
+ } = (0, import_ml.computeWindowBoundaries)({
503
+ maxLabeledTs,
504
+ maxTrainTs,
505
+ testDays,
506
+ trainRecentDays,
507
+ walkForwardFolds
508
+ });
509
+ if (derivedHoldoutCutoffMs !== holdoutCutoffMs) {
510
+ throw new Error("Internal split boundary mismatch for holdout cutoff.");
511
+ }
512
+ const walkForwardFoldsWithWindows = walkForwardFoldDefs.map((entry) => {
513
+ const foldWindow = folds.find((row) => row.fold === entry.fold);
514
+ return {
515
+ ...entry,
516
+ startTs: Number(foldWindow?.startTs ?? 0),
517
+ endTs: Number(foldWindow?.endTs ?? 0),
518
+ trainRows: 0,
519
+ testRows: 0
520
+ };
521
+ });
522
+ console.log("[split] phase 3/3: writing holdout/walk-forward files...");
523
+ const holdoutTrainWriter = (0, import_fs.createWriteStream)(holdoutTrainPath, {
524
+ encoding: "utf8"
525
+ });
526
+ const holdoutTestWriter = (0, import_fs.createWriteStream)(holdoutTestPath, {
527
+ encoding: "utf8"
528
+ });
529
+ const prodWriter = (0, import_fs.createWriteStream)(prodPath, {
530
+ encoding: "utf8"
531
+ });
532
+ const walkForwardWriters = walkForwardFoldsWithWindows.map((entry) => ({
533
+ ...entry,
534
+ trainWriter: (0, import_fs.createWriteStream)(entry.trainPath, {
535
+ encoding: "utf8"
536
+ }),
537
+ testWriter: (0, import_fs.createWriteStream)(entry.testPath, {
538
+ encoding: "utf8"
539
+ }),
540
+ trainMinTs: Number.POSITIVE_INFINITY,
541
+ trainMaxTs: 0,
542
+ testMinTs: Number.POSITIVE_INFINITY,
543
+ testMaxTs: 0
544
+ }));
545
+ let holdoutTrainRows = 0;
546
+ let holdoutTestRows = 0;
547
+ let walkForwardSourceRows = 0;
548
+ let holdoutTrainMinTs = Number.POSITIVE_INFINITY;
549
+ let holdoutTrainMaxTs = 0;
550
+ let holdoutTestMinTs = Number.POSITIVE_INFINITY;
551
+ let holdoutTestMaxTs = 0;
552
+ let holdoutTrainOutOfRangeRows = 0;
553
+ let prodRows = 0;
554
+ let prodMinTs = Number.POSITIVE_INFINITY;
555
+ let prodMaxTs = 0;
556
+ let scanned = 0;
557
+ const writeStartedAt = Date.now();
558
+ const rl = import_readline2.default.createInterface({
559
+ input: (0, import_fs.createReadStream)(inputPath, { encoding: "utf8" }),
560
+ crlfDelay: Infinity
561
+ });
562
+ for await (const line of rl) {
563
+ const trimmed = line.trim();
564
+ if (!trimmed) continue;
565
+ scanned++;
566
+ if (scanned % SPLIT_PROGRESS_EVERY === 0) {
567
+ const elapsed = Math.floor((Date.now() - writeStartedAt) / 1e3);
568
+ console.log(
569
+ `[split] writing: lines=${scanned} train=${holdoutTrainRows} test=${holdoutTestRows} wf_source=${walkForwardSourceRows} elapsed=${elapsed}s`
570
+ );
571
+ }
572
+ let parsedRow = null;
573
+ try {
574
+ parsedRow = JSON.parse(trimmed);
575
+ } catch {
576
+ continue;
577
+ }
578
+ if (!parsedRow || parsedRow.label === null || parsedRow.label === void 0)
579
+ continue;
580
+ const ts = parseTimestampMs(parsedRow.entryTimestamp);
581
+ if (!ts) continue;
582
+ if (enforceCausalityGuard) {
583
+ const violations = (0, import_ml.findLookaheadViolations)(
584
+ parsedRow
585
+ );
586
+ if (violations.length) {
587
+ const sample = violations.slice(0, 3).map(
588
+ (row) => `${row.key}: ${row.featureTimestampMs} > ${row.entryTimestampMs}`
589
+ ).join(", ");
590
+ throw new Error(
591
+ `Lookahead validation failed: feature timestamp is newer than entryTimestamp. ${sample}`
592
+ );
593
+ }
594
+ }
595
+ if (ts > holdoutCutoffMs) {
596
+ if (!holdoutTestWriter.write(`${trimmed}
597
+ `)) {
598
+ await (0, import_events.once)(holdoutTestWriter, "drain");
599
+ }
600
+ holdoutTestRows++;
601
+ if (ts < holdoutTestMinTs) holdoutTestMinTs = ts;
602
+ if (ts > holdoutTestMaxTs) holdoutTestMaxTs = ts;
603
+ } else {
604
+ if (ts >= holdoutTrainStartMs) {
605
+ if (!holdoutTrainWriter.write(`${trimmed}
606
+ `)) {
607
+ await (0, import_events.once)(holdoutTrainWriter, "drain");
608
+ }
609
+ holdoutTrainRows++;
610
+ if (ts < holdoutTrainStartMs || ts > holdoutCutoffMs) {
611
+ holdoutTrainOutOfRangeRows++;
612
+ }
613
+ if (ts < holdoutTrainMinTs) holdoutTrainMinTs = ts;
614
+ if (ts > holdoutTrainMaxTs) holdoutTrainMaxTs = ts;
615
+ }
616
+ if (ts >= wfStartMs && ts <= maxTrainTs) {
617
+ walkForwardSourceRows++;
618
+ for (const foldWriter of walkForwardWriters) {
619
+ if (ts > foldWriter.startTs && ts <= foldWriter.endTs) {
620
+ if (!foldWriter.testWriter.write(`${trimmed}
621
+ `)) {
622
+ await (0, import_events.once)(foldWriter.testWriter, "drain");
623
+ }
624
+ foldWriter.testRows++;
625
+ if (ts < foldWriter.testMinTs) foldWriter.testMinTs = ts;
626
+ if (ts > foldWriter.testMaxTs) foldWriter.testMaxTs = ts;
627
+ } else if (ts <= foldWriter.startTs) {
628
+ if (!foldWriter.trainWriter.write(`${trimmed}
629
+ `)) {
630
+ await (0, import_events.once)(foldWriter.trainWriter, "drain");
631
+ }
632
+ foldWriter.trainRows++;
633
+ if (ts < foldWriter.trainMinTs) foldWriter.trainMinTs = ts;
634
+ if (ts > foldWriter.trainMaxTs) foldWriter.trainMaxTs = ts;
635
+ }
636
+ }
637
+ }
638
+ }
639
+ if (ts >= prodStartMs) {
640
+ if (!prodWriter.write(`${trimmed}
641
+ `)) {
642
+ await (0, import_events.once)(prodWriter, "drain");
643
+ }
644
+ prodRows++;
645
+ if (ts < prodMinTs) prodMinTs = ts;
646
+ if (ts > prodMaxTs) prodMaxTs = ts;
647
+ }
648
+ }
649
+ rl.close();
650
+ holdoutTrainWriter.end();
651
+ holdoutTestWriter.end();
652
+ prodWriter.end();
653
+ for (const foldWriter of walkForwardWriters) {
654
+ foldWriter.trainWriter.end();
655
+ foldWriter.testWriter.end();
656
+ }
657
+ const finishPromises = [
658
+ (0, import_events.once)(holdoutTrainWriter, "finish"),
659
+ (0, import_events.once)(holdoutTestWriter, "finish"),
660
+ (0, import_events.once)(prodWriter, "finish"),
661
+ ...walkForwardWriters.flatMap((foldWriter) => [
662
+ (0, import_events.once)(foldWriter.trainWriter, "finish"),
663
+ (0, import_events.once)(foldWriter.testWriter, "finish")
664
+ ])
665
+ ];
666
+ await Promise.all(finishPromises);
667
+ if (!holdoutTrainRows || !holdoutTestRows) {
668
+ throw new Error(
669
+ `Window split produced empty holdout set (train=${holdoutTrainRows}, test=${holdoutTestRows}).`
670
+ );
671
+ }
672
+ if (holdoutTrainOutOfRangeRows > 0) {
673
+ throw new Error(
674
+ `Holdout split validation failed (train_out_of_range=${holdoutTrainOutOfRangeRows}).`
675
+ );
676
+ }
677
+ if (holdoutTrainMaxTs > holdoutCutoffMs) {
678
+ throw new Error(
679
+ `Holdout train contains rows newer than cutoff (${holdoutTrainMaxTs} > ${holdoutCutoffMs}).`
680
+ );
681
+ }
682
+ if (holdoutTestMinTs <= holdoutCutoffMs) {
683
+ throw new Error(
684
+ `Holdout test contains rows older/equal cutoff (${holdoutTestMinTs} <= ${holdoutCutoffMs}).`
685
+ );
686
+ }
687
+ if (walkForwardFolds > 0 && !walkForwardSourceRows) {
688
+ throw new Error("Window split produced empty walk-forward source dataset.");
689
+ }
690
+ if (!prodRows) {
691
+ throw new Error("Window split produced empty prod dataset.");
692
+ }
693
+ for (const foldWriter of walkForwardWriters) {
694
+ if (!foldWriter.trainRows || !foldWriter.testRows) {
695
+ throw new Error(
696
+ `Walk-forward fold ${foldWriter.fold} is empty (train=${foldWriter.trainRows}, test=${foldWriter.testRows}).`
697
+ );
698
+ }
699
+ }
700
+ const meta = {
701
+ exportFile: import_path.default.basename(inputPath),
702
+ exportHash,
703
+ params: {
704
+ testDays,
705
+ trainRecentDays,
706
+ walkForwardFolds,
707
+ causalityGuard: enforceCausalityGuard
708
+ },
709
+ files: {
710
+ holdout: {
711
+ holdoutTrainRows,
712
+ holdoutTestRows,
713
+ holdoutCutoffMs,
714
+ holdoutCutoffDt: (0, import_ml.toIsoUtcOrNull)(holdoutCutoffMs),
715
+ holdoutTrainStartMs,
716
+ holdoutTrainStartDt: (0, import_ml.toIsoUtcOrNull)(holdoutTrainStartMs),
717
+ holdoutTrainMinTs: holdoutTrainRows > 0 ? holdoutTrainMinTs : null,
718
+ holdoutTrainMinDt: (0, import_ml.toIsoUtcOrNull)(
719
+ holdoutTrainRows > 0 ? holdoutTrainMinTs : null
720
+ ),
721
+ holdoutTrainMaxTs: holdoutTrainRows > 0 ? holdoutTrainMaxTs : null,
722
+ holdoutTrainMaxDt: (0, import_ml.toIsoUtcOrNull)(
723
+ holdoutTrainRows > 0 ? holdoutTrainMaxTs : null
724
+ ),
725
+ holdoutTestMinTs: holdoutTestRows > 0 ? holdoutTestMinTs : null,
726
+ holdoutTestMinDt: (0, import_ml.toIsoUtcOrNull)(
727
+ holdoutTestRows > 0 ? holdoutTestMinTs : null
728
+ ),
729
+ holdoutTestMaxTs: holdoutTestRows > 0 ? holdoutTestMaxTs : null,
730
+ holdoutTestMaxDt: (0, import_ml.toIsoUtcOrNull)(
731
+ holdoutTestRows > 0 ? holdoutTestMaxTs : null
732
+ ),
733
+ holdoutTrain: import_path.default.basename(holdoutTrainPath),
734
+ holdoutTest: import_path.default.basename(holdoutTestPath)
735
+ },
736
+ prod: {
737
+ prodRows,
738
+ prodStartMs: Number.isFinite(prodStartMs) ? prodStartMs : null,
739
+ prodStartDt: (0, import_ml.toIsoUtcOrNull)(
740
+ Number.isFinite(prodStartMs) ? prodStartMs : null
741
+ ),
742
+ prodMinTs: prodRows > 0 ? prodMinTs : null,
743
+ prodMinDt: (0, import_ml.toIsoUtcOrNull)(prodRows > 0 ? prodMinTs : null),
744
+ prodMaxTs: prodRows > 0 ? prodMaxTs : null,
745
+ prodMaxDt: (0, import_ml.toIsoUtcOrNull)(prodRows > 0 ? prodMaxTs : null),
746
+ prod: import_path.default.basename(prodPath)
747
+ },
748
+ walkForwardFolds: walkForwardWriters.map((entry) => ({
749
+ fold: entry.fold,
750
+ walkForwardSourceRows,
751
+ startTs: entry.startTs,
752
+ startDt: (0, import_ml.toIsoUtcOrNull)(entry.startTs),
753
+ endTs: entry.endTs,
754
+ endDt: (0, import_ml.toIsoUtcOrNull)(entry.endTs),
755
+ trainStartMs: Number.isFinite(wfStartMs) ? wfStartMs : null,
756
+ trainStartDt: (0, import_ml.toIsoUtcOrNull)(
757
+ Number.isFinite(wfStartMs) ? wfStartMs : null
758
+ ),
759
+ trainMinTs: entry.trainRows > 0 ? entry.trainMinTs : null,
760
+ trainMinDt: (0, import_ml.toIsoUtcOrNull)(
761
+ entry.trainRows > 0 ? entry.trainMinTs : null
762
+ ),
763
+ trainMaxTs: entry.trainRows > 0 ? entry.trainMaxTs : null,
764
+ trainMaxDt: (0, import_ml.toIsoUtcOrNull)(
765
+ entry.trainRows > 0 ? entry.trainMaxTs : null
766
+ ),
767
+ testMinTs: entry.testRows > 0 ? entry.testMinTs : null,
768
+ testMinDt: (0, import_ml.toIsoUtcOrNull)(entry.testRows > 0 ? entry.testMinTs : null),
769
+ testMaxTs: entry.testRows > 0 ? entry.testMaxTs : null,
770
+ testMaxDt: (0, import_ml.toIsoUtcOrNull)(entry.testRows > 0 ? entry.testMaxTs : null),
771
+ trainRows: entry.trainRows,
772
+ testRows: entry.testRows,
773
+ train: import_path.default.basename(entry.trainPath),
774
+ test: import_path.default.basename(entry.testPath)
775
+ }))
776
+ },
777
+ createdAt: (/* @__PURE__ */ new Date()).toISOString()
778
+ };
779
+ await import_promises.default.writeFile(metaPath, `${JSON.stringify(meta, null, 2)}
780
+ `, "utf8");
781
+ return {
782
+ holdoutTrainPath,
783
+ holdoutTestPath,
784
+ prodPath,
785
+ walkForwardFolds: walkForwardWriters.map((entry) => ({
786
+ fold: entry.fold,
787
+ startTs: entry.startTs,
788
+ endTs: entry.endTs,
789
+ trainPath: entry.trainPath,
790
+ testPath: entry.testPath,
791
+ trainRows: entry.trainRows,
792
+ testRows: entry.testRows
793
+ })),
794
+ cleanup: [],
795
+ reused: false,
796
+ key,
797
+ exportHash,
798
+ counts: {
799
+ holdoutTrainRows,
800
+ holdoutTestRows,
801
+ walkForwardSourceRows,
802
+ prodRows
803
+ }
804
+ };
805
+ };
806
+ var run = async () => {
807
+ const selectedFromCli = parseStrategy(flags.strategy);
808
+ const selected = selectedFromCli ?? await selectStrategy();
809
+ const envModelType = (process.env.ML_MODEL_TYPE ?? "random_forest").trim().toLowerCase();
810
+ const defaultModelType = MODEL_TYPES.includes(
811
+ envModelType
812
+ ) ? envModelType : "random_forest";
813
+ const modelFromCli = parseModelType(flags.model);
814
+ const modelType = modelFromCli ?? await selectModelType(defaultModelType);
815
+ const useLatestOnly = Boolean(flags.latestOnly) || asBool(process.env.ML_TRAIN_USE_LATEST_ONLY);
816
+ const exportDir = import_path.default.join(process.cwd(), "data", "ml", "export");
817
+ const modelDirRoot = import_path.default.join(process.cwd(), "data", "ml");
818
+ const datasetFiles = await listDatasetFiles(exportDir, selected);
819
+ let inputPath;
820
+ let cleanupFiles = [];
821
+ if (datasetFiles.length && useLatestOnly) {
822
+ const latest = datasetFiles[datasetFiles.length - 1];
823
+ inputPath = import_path.default.join(exportDir, latest);
824
+ console.log(`Using latest JSONL dataset: ${import_path.default.basename(inputPath)}`);
825
+ } else if (datasetFiles.length) {
826
+ const files = datasetFiles.map((name) => import_path.default.join(exportDir, name));
827
+ const mergedPrefix = `ml-dataset-merged-${Date.now()}`;
828
+ inputPath = import_path.default.join(exportDir, `${mergedPrefix}.jsonl`);
829
+ await concatFiles(files, inputPath);
830
+ cleanupFiles = [inputPath];
831
+ console.log(`Merged datasets: ${datasetFiles.length} exports`);
832
+ } else {
833
+ console.error("No ml-dataset-*.jsonl found in data/ml/export");
834
+ process.exit(1);
835
+ }
836
+ try {
837
+ const inputStat = await import_promises.default.stat(inputPath);
838
+ const totalInputBytes = inputStat.size;
839
+ const autoIncrementalThresholdGb = asInt(
840
+ process.env.ML_TRAIN_INCREMENTAL_THRESHOLD_GB,
841
+ 2
842
+ );
843
+ const autoIncrementalThresholdBytes = autoIncrementalThresholdGb * 1024 * 1024 * 1024;
844
+ const forceIncremental = asBool(process.env.ML_TRAIN_INCREMENTAL);
845
+ const disableIncremental = asBool(process.env.ML_TRAIN_NO_INCREMENTAL);
846
+ const useIncremental = !disableIncremental && (forceIncremental || totalInputBytes >= autoIncrementalThresholdBytes);
847
+ const chunkSize = asInt(process.env.ML_TRAIN_CHUNK_SIZE, 1e4);
848
+ const incrementalIterations = asInt(
849
+ process.env.ML_TRAIN_INCREMENTAL_ITERATIONS,
850
+ 30
851
+ );
852
+ const trainRecentDays = asNonNegativeInt(
853
+ process.env.ML_TRAIN_RECENT_DAYS,
854
+ 60
855
+ );
856
+ const enableEnsemble = asBool(process.env.ML_TRAIN_ENSEMBLE);
857
+ const forceEnsemble = asBool(process.env.ML_TRAIN_FORCE_ENSEMBLE);
858
+ const disableEnsemble = asBool(process.env.ML_TRAIN_NO_ENSEMBLE);
859
+ const useEnsemble = (forceEnsemble || enableEnsemble) && !disableEnsemble;
860
+ const walkForwardFolds = asNonNegativeInt(
861
+ process.env.ML_TRAIN_WALK_FORWARD_FOLDS,
862
+ 2
863
+ );
864
+ const featureProfile = (process.env.ML_TRAIN_FEATURE_PROFILE ?? "all").trim().toLowerCase();
865
+ const reportDir = (process.env.ML_TRAIN_REPORT_DIR ?? "data/ml/models").trim();
866
+ const testDays = asInt(process.env.ML_TRAIN_TEST_DAYS, 30);
867
+ if (useIncremental && modelType === "catboost") {
868
+ console.log(
869
+ `Using incremental mode (total input ${(totalInputBytes / 1024 / 1024 / 1024).toFixed(2)} GiB, chunk size ${chunkSize}, iters/chunk ${incrementalIterations})`
870
+ );
871
+ }
872
+ if (!useEnsemble) {
873
+ console.log("Using single-model mode (outer ensemble disabled).");
874
+ }
875
+ console.log(`Train recent days: ${trainRecentDays}`);
876
+ console.log(`Walk-forward folds: ${walkForwardFolds}`);
877
+ console.log(`Feature profile: ${featureProfile}`);
878
+ const splitFiles = await prepareTrainWindowFiles({
879
+ inputPath,
880
+ testDays,
881
+ trainRecentDays,
882
+ walkForwardFolds,
883
+ enforceCausalityGuard: !asBool(
884
+ process.env.ML_TRAIN_DISABLE_CAUSALITY_GUARD
885
+ )
886
+ });
887
+ cleanupFiles.push(...splitFiles.cleanup);
888
+ const splitMode = splitFiles.reused ? "Reusing" : "Prepared";
889
+ console.log(
890
+ `${splitMode} split key=${splitFiles.key} export_hash=${splitFiles.exportHash.slice(0, 12)}`
891
+ );
892
+ console.log(
893
+ `${splitMode} holdout files: train=${import_path.default.basename(splitFiles.holdoutTrainPath)} (${splitFiles.counts.holdoutTrainRows} rows), test=${import_path.default.basename(splitFiles.holdoutTestPath)} (${splitFiles.counts.holdoutTestRows} rows)`
894
+ );
895
+ console.log(
896
+ `${splitMode} prod file: ${import_path.default.basename(splitFiles.prodPath)} (${splitFiles.counts.prodRows} rows)`
897
+ );
898
+ if (splitFiles.walkForwardFolds.length) {
899
+ console.log(
900
+ `${splitMode} walk-forward source rows: ${splitFiles.counts.walkForwardSourceRows}`
901
+ );
902
+ for (const foldEntry of splitFiles.walkForwardFolds) {
903
+ console.log(
904
+ `${splitMode} walk-forward fold ${foldEntry.fold}: train=${import_path.default.basename(foldEntry.trainPath)} (${foldEntry.trainRows} rows), test=${import_path.default.basename(foldEntry.testPath)} (${foldEntry.testRows} rows)`
905
+ );
906
+ }
907
+ } else {
908
+ console.log(`${splitMode} walk-forward folds disabled.`);
909
+ }
910
+ const trainArgs = [
911
+ "compose",
912
+ "-f",
913
+ "docker-compose.ml.yml",
914
+ "run",
915
+ "--rm",
916
+ "ml",
917
+ "python",
918
+ "/app/ml/train.py",
919
+ "--input",
920
+ `/app/data/ml/export/${import_path.default.basename(splitFiles.holdoutTrainPath)}`,
921
+ "--test-input",
922
+ `/app/data/ml/export/${import_path.default.basename(splitFiles.holdoutTestPath)}`,
923
+ "--prod-input",
924
+ `/app/data/ml/export/${import_path.default.basename(splitFiles.prodPath)}`,
925
+ "--strategy",
926
+ selected,
927
+ "--model-type",
928
+ modelType,
929
+ "--feature-profile",
930
+ featureProfile,
931
+ "--train-recent-days",
932
+ String(trainRecentDays),
933
+ "--walk-forward-folds",
934
+ String(walkForwardFolds),
935
+ "--test-days",
936
+ String(testDays),
937
+ "--report-dir",
938
+ reportDir,
939
+ ...useEnsemble ? ["--ensemble"] : [],
940
+ ...splitFiles.walkForwardFolds.flatMap((foldEntry) => [
941
+ "--walk-forward-fold-train-input",
942
+ `/app/data/ml/export/${import_path.default.basename(foldEntry.trainPath)}`,
943
+ "--walk-forward-fold-test-input",
944
+ `/app/data/ml/export/${import_path.default.basename(foldEntry.testPath)}`
945
+ ])
946
+ ];
947
+ const startedAt = Date.now();
948
+ let lastOutputAt = startedAt;
949
+ let sawDockerOutput = false;
950
+ const heartbeatSec = asInt(process.env.ML_TRAIN_HEARTBEAT_SEC, 10);
951
+ const trainDebug = asBool(process.env.ML_TRAIN_DEBUG);
952
+ const noOutputTimeoutSec = asInt(
953
+ process.env.ML_TRAIN_DOCKER_NO_OUTPUT_TIMEOUT_SEC,
954
+ 90
955
+ );
956
+ console.log(`Starting train command: docker ${trainArgs.join(" ")}`);
957
+ const child = (0, import_child_process.spawn)("docker", trainArgs, {
958
+ stdio: ["inherit", "pipe", "pipe"],
959
+ env: {
960
+ ...process.env,
961
+ PYTHONUNBUFFERED: "1",
962
+ COMPOSE_IGNORE_ORPHANS: "1"
963
+ }
964
+ });
965
+ const stopStdoutPipe = pipeNormalized(child.stdout, (line) => {
966
+ sawDockerOutput = true;
967
+ lastOutputAt = Date.now();
968
+ process.stdout.write(`${line}
969
+ `);
970
+ });
971
+ const stopStderrPipe = pipeNormalized(child.stderr, (line) => {
972
+ sawDockerOutput = true;
973
+ lastOutputAt = Date.now();
974
+ process.stderr.write(`${line}
975
+ `);
976
+ });
977
+ const heartbeat = setInterval(
978
+ () => {
979
+ const elapsedSec = Math.floor((Date.now() - startedAt) / 1e3);
980
+ const silenceSec = Math.floor((Date.now() - lastOutputAt) / 1e3);
981
+ if (trainDebug) {
982
+ const nodeMem = formatBytes(process.memoryUsage().rss);
983
+ const mlMem = getMlContainerMemUsage();
984
+ process.stdout.write(
985
+ `
986
+ [train] still running... elapsed ${elapsedSec}s, silence=${silenceSec}s (model=${modelType}, strategy=${selected}, node_rss=${nodeMem}, ml_mem=${mlMem})
987
+ `
988
+ );
989
+ }
990
+ if (noOutputTimeoutSec > 0 && !sawDockerOutput && silenceSec >= noOutputTimeoutSec) {
991
+ process.stderr.write(
992
+ `[train] docker produced no output for ${silenceSec}s; terminating process. Check Docker Desktop/daemon health.
993
+ `
994
+ );
995
+ child.kill("SIGTERM");
996
+ setTimeout(() => {
997
+ if (child.exitCode === null && child.signalCode === null) {
998
+ child.kill("SIGKILL");
999
+ }
1000
+ }, 5e3).unref();
1001
+ }
1002
+ },
1003
+ Math.max(heartbeatSec, 1) * 1e3
1004
+ );
1005
+ const result = await new Promise((resolve) => {
1006
+ child.on("error", (error) => resolve({ code: 1, signal: null, error }));
1007
+ child.on("exit", (code2, signal) => resolve({ code: code2, signal }));
1008
+ });
1009
+ clearInterval(heartbeat);
1010
+ stopStdoutPipe();
1011
+ stopStderrPipe();
1012
+ const code = result.code ?? 1;
1013
+ let finalCode = code;
1014
+ if (result.error) {
1015
+ console.error(`Failed to start docker process: ${result.error.message}`);
1016
+ }
1017
+ if (result.signal === "SIGKILL" || code === 137) {
1018
+ console.error(
1019
+ "Training was killed (exit 137). Most likely out-of-memory. Try ML_TRAIN_USE_LATEST_ONLY=1, random_forest/extra_trees, or a smaller export."
1020
+ );
1021
+ }
1022
+ if (code === 0) {
1023
+ const modelDir = import_path.default.join(modelDirRoot, "models");
1024
+ let savedModels = [];
1025
+ try {
1026
+ const entries = await import_promises.default.readdir(modelDir, { withFileTypes: true });
1027
+ savedModels = entries.filter((entry) => entry.isFile()).map((entry) => entry.name).filter(
1028
+ (name) => name === `${selected}.joblib` || /^.+\.model\d+\.joblib$/.test(name)
1029
+ ).filter((name) => name.startsWith(`${selected}.`)).sort();
1030
+ } catch {
1031
+ savedModels = [];
1032
+ }
1033
+ if (!savedModels.length) {
1034
+ console.error(
1035
+ `Training finished but no model artifacts found in ${modelDir} for strategy "${selected}".`
1036
+ );
1037
+ finalCode = 1;
1038
+ } else {
1039
+ console.log(`Saved model artifacts (${savedModels.length}):`);
1040
+ for (const name of savedModels) {
1041
+ console.log(` - data/ml/models/${name}`);
1042
+ }
1043
+ }
1044
+ }
1045
+ process.exitCode = finalCode;
1046
+ } finally {
1047
+ for (const file of cleanupFiles) {
1048
+ await import_promises.default.rm(file, { force: true });
1049
+ }
1050
+ }
1051
+ process.exit(process.exitCode ?? 1);
1052
+ };
1053
+ run().catch((err) => {
1054
+ console.error("Failed to train:", err);
1055
+ process.exit(1);
1056
+ });