ins-pricing 0.5.0__py3-none-any.whl → 0.5.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ins_pricing/cli/BayesOpt_entry.py +15 -5
- ins_pricing/cli/BayesOpt_incremental.py +43 -10
- ins_pricing/cli/Explain_Run.py +16 -5
- ins_pricing/cli/Explain_entry.py +29 -8
- ins_pricing/cli/Pricing_Run.py +16 -5
- ins_pricing/cli/bayesopt_entry_runner.py +45 -12
- ins_pricing/cli/utils/bootstrap.py +23 -0
- ins_pricing/cli/utils/cli_config.py +34 -15
- ins_pricing/cli/utils/import_resolver.py +14 -14
- ins_pricing/cli/utils/notebook_utils.py +120 -106
- ins_pricing/cli/watchdog_run.py +15 -5
- ins_pricing/frontend/app.py +132 -61
- ins_pricing/frontend/config_builder.py +33 -0
- ins_pricing/frontend/example_config.json +11 -0
- ins_pricing/frontend/runner.py +340 -388
- ins_pricing/modelling/README.md +1 -1
- ins_pricing/modelling/bayesopt/README.md +29 -11
- ins_pricing/modelling/bayesopt/config_components.py +12 -0
- ins_pricing/modelling/bayesopt/config_preprocess.py +50 -13
- ins_pricing/modelling/bayesopt/core.py +47 -19
- ins_pricing/modelling/bayesopt/model_plotting_mixin.py +20 -14
- ins_pricing/modelling/bayesopt/models/model_ft_components.py +349 -342
- ins_pricing/modelling/bayesopt/models/model_ft_trainer.py +11 -5
- ins_pricing/modelling/bayesopt/models/model_gnn.py +20 -14
- ins_pricing/modelling/bayesopt/models/model_resn.py +9 -3
- ins_pricing/modelling/bayesopt/trainers/trainer_base.py +62 -50
- ins_pricing/modelling/bayesopt/trainers/trainer_ft.py +61 -53
- ins_pricing/modelling/bayesopt/trainers/trainer_glm.py +9 -3
- ins_pricing/modelling/bayesopt/trainers/trainer_gnn.py +40 -32
- ins_pricing/modelling/bayesopt/trainers/trainer_resn.py +36 -24
- ins_pricing/modelling/bayesopt/trainers/trainer_xgb.py +240 -37
- ins_pricing/modelling/bayesopt/utils/distributed_utils.py +193 -186
- ins_pricing/modelling/bayesopt/utils/torch_trainer_mixin.py +23 -10
- ins_pricing/pricing/factors.py +67 -56
- ins_pricing/setup.py +1 -1
- ins_pricing/utils/__init__.py +7 -6
- ins_pricing/utils/device.py +45 -24
- ins_pricing/utils/logging.py +34 -1
- ins_pricing/utils/profiling.py +8 -4
- {ins_pricing-0.5.0.dist-info → ins_pricing-0.5.1.dist-info}/METADATA +182 -182
- {ins_pricing-0.5.0.dist-info → ins_pricing-0.5.1.dist-info}/RECORD +43 -42
- {ins_pricing-0.5.0.dist-info → ins_pricing-0.5.1.dist-info}/WHEEL +0 -0
- {ins_pricing-0.5.0.dist-info → ins_pricing-0.5.1.dist-info}/top_level.txt +0 -0
ins_pricing/frontend/runner.py
CHANGED
|
@@ -1,388 +1,340 @@
|
|
|
1
|
-
"""
|
|
2
|
-
Unified Task Runner with Real-time Logging
|
|
3
|
-
Executes model training, explanation, plotting, and other tasks based on config.
|
|
4
|
-
"""
|
|
5
|
-
|
|
6
|
-
import sys
|
|
7
|
-
import
|
|
8
|
-
import
|
|
9
|
-
import
|
|
10
|
-
import
|
|
11
|
-
import
|
|
12
|
-
|
|
13
|
-
from
|
|
14
|
-
import
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
"""
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
except Exception as e:
|
|
228
|
-
exception_holder.append(e)
|
|
229
|
-
|
|
230
|
-
import traceback
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
sys.
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
self.task_thread
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
|
|
286
|
-
|
|
287
|
-
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
|
|
291
|
-
|
|
292
|
-
|
|
293
|
-
|
|
294
|
-
|
|
295
|
-
|
|
296
|
-
|
|
297
|
-
|
|
298
|
-
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
|
|
315
|
-
|
|
316
|
-
|
|
317
|
-
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
|
|
321
|
-
|
|
322
|
-
|
|
323
|
-
|
|
324
|
-
|
|
325
|
-
|
|
326
|
-
|
|
327
|
-
|
|
328
|
-
|
|
329
|
-
|
|
330
|
-
|
|
331
|
-
|
|
332
|
-
|
|
333
|
-
|
|
334
|
-
|
|
335
|
-
|
|
336
|
-
|
|
337
|
-
|
|
338
|
-
|
|
339
|
-
|
|
340
|
-
|
|
341
|
-
|
|
342
|
-
class StreamToLogger:
|
|
343
|
-
"""
|
|
344
|
-
Fake file-like stream object that redirects writes to a logger instance.
|
|
345
|
-
"""
|
|
346
|
-
|
|
347
|
-
def __init__(self, logger, log_level=logging.INFO):
|
|
348
|
-
self.logger = logger
|
|
349
|
-
self.log_level = log_level
|
|
350
|
-
self.linebuf = ''
|
|
351
|
-
|
|
352
|
-
def write(self, buf):
|
|
353
|
-
for line in buf.rstrip().splitlines():
|
|
354
|
-
self.logger.log(self.log_level, line.rstrip())
|
|
355
|
-
|
|
356
|
-
def flush(self):
|
|
357
|
-
pass
|
|
358
|
-
|
|
359
|
-
|
|
360
|
-
def setup_logger(name: str = "task") -> logging.Logger:
|
|
361
|
-
"""
|
|
362
|
-
Set up a logger for task execution.
|
|
363
|
-
|
|
364
|
-
Args:
|
|
365
|
-
name: Logger name
|
|
366
|
-
|
|
367
|
-
Returns:
|
|
368
|
-
Configured logger instance
|
|
369
|
-
"""
|
|
370
|
-
logger = logging.getLogger(name)
|
|
371
|
-
logger.setLevel(logging.INFO)
|
|
372
|
-
|
|
373
|
-
# Create console handler
|
|
374
|
-
console_handler = logging.StreamHandler()
|
|
375
|
-
console_handler.setLevel(logging.INFO)
|
|
376
|
-
|
|
377
|
-
# Create formatter
|
|
378
|
-
formatter = logging.Formatter(
|
|
379
|
-
'%(asctime)s - %(name)s - %(levelname)s - %(message)s',
|
|
380
|
-
datefmt='%Y-%m-%d %H:%M:%S'
|
|
381
|
-
)
|
|
382
|
-
console_handler.setFormatter(formatter)
|
|
383
|
-
|
|
384
|
-
# Add handler to logger
|
|
385
|
-
if not logger.handlers:
|
|
386
|
-
logger.addHandler(console_handler)
|
|
387
|
-
|
|
388
|
-
return logger
|
|
1
|
+
"""
|
|
2
|
+
Unified Task Runner with Real-time Logging
|
|
3
|
+
Executes model training, explanation, plotting, and other tasks based on config.
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
import sys
|
|
7
|
+
import os
|
|
8
|
+
import threading
|
|
9
|
+
import queue
|
|
10
|
+
import time
|
|
11
|
+
import json
|
|
12
|
+
import subprocess
|
|
13
|
+
from pathlib import Path
|
|
14
|
+
from typing import Generator, Optional, Dict, Any, List, Sequence, Tuple
|
|
15
|
+
import logging
|
|
16
|
+
|
|
17
|
+
from ins_pricing.utils import get_logger, log_print
|
|
18
|
+
|
|
19
|
+
_logger = get_logger("ins_pricing.frontend.runner")
|
|
20
|
+
_logger.propagate = False
|
|
21
|
+
if not _logger.handlers:
|
|
22
|
+
_handler = logging.StreamHandler()
|
|
23
|
+
_handler.setFormatter(logging.Formatter("%(message)s"))
|
|
24
|
+
_logger.addHandler(_handler)
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
def _log(*args, **kwargs) -> None:
|
|
28
|
+
log_print(_logger, *args, **kwargs)
|
|
29
|
+
|
|
30
|
+
class LogCapture:
|
|
31
|
+
"""Capture stdout and stderr for real-time display."""
|
|
32
|
+
|
|
33
|
+
def __init__(self):
|
|
34
|
+
self.log_queue = queue.Queue()
|
|
35
|
+
self.stop_flag = threading.Event()
|
|
36
|
+
|
|
37
|
+
def write(self, text: str):
|
|
38
|
+
"""Write method for capturing output."""
|
|
39
|
+
if text and text.strip():
|
|
40
|
+
self.log_queue.put(text)
|
|
41
|
+
|
|
42
|
+
def flush(self):
|
|
43
|
+
"""Flush method (required for file-like objects)."""
|
|
44
|
+
pass
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
class TaskRunner:
|
|
48
|
+
"""
|
|
49
|
+
Run model tasks (training, explain, plotting, etc.) and capture logs.
|
|
50
|
+
|
|
51
|
+
Supports all task modes defined in config.runner.mode:
|
|
52
|
+
- entry: Standard model training
|
|
53
|
+
- explain: Model explanation (permutation, SHAP, etc.)
|
|
54
|
+
- incremental: Incremental training
|
|
55
|
+
- watchdog: Watchdog mode for monitoring
|
|
56
|
+
"""
|
|
57
|
+
|
|
58
|
+
def __init__(self):
|
|
59
|
+
self.task_thread = None
|
|
60
|
+
self.log_capture = None
|
|
61
|
+
self._proc: Optional[subprocess.Popen] = None
|
|
62
|
+
|
|
63
|
+
def _detect_task_mode(self, config_path: str) -> str:
|
|
64
|
+
"""
|
|
65
|
+
Detect the task mode from config file.
|
|
66
|
+
|
|
67
|
+
Args:
|
|
68
|
+
config_path: Path to configuration JSON file
|
|
69
|
+
|
|
70
|
+
Returns:
|
|
71
|
+
Task mode string (e.g., 'entry', 'explain', 'incremental', 'watchdog')
|
|
72
|
+
"""
|
|
73
|
+
try:
|
|
74
|
+
with open(config_path, 'r', encoding='utf-8') as f:
|
|
75
|
+
config = json.load(f)
|
|
76
|
+
|
|
77
|
+
runner_config = config.get('runner', {})
|
|
78
|
+
mode = runner_config.get('mode', 'entry')
|
|
79
|
+
return str(mode).lower()
|
|
80
|
+
|
|
81
|
+
except Exception as e:
|
|
82
|
+
_log(f"Warning: Could not detect task mode, defaulting to 'entry': {e}")
|
|
83
|
+
return 'entry'
|
|
84
|
+
|
|
85
|
+
def _build_cmd_from_config(self, config_path: str) -> Tuple[List[str], str]:
|
|
86
|
+
"""Build the command to execute based on config.runner.mode."""
|
|
87
|
+
from ins_pricing.cli.utils.notebook_utils import build_cmd_from_config
|
|
88
|
+
|
|
89
|
+
return build_cmd_from_config(config_path)
|
|
90
|
+
|
|
91
|
+
def run_task(self, config_path: str) -> Generator[str, None, None]:
|
|
92
|
+
"""
|
|
93
|
+
Run task based on config file with real-time log capture.
|
|
94
|
+
|
|
95
|
+
This method automatically detects the task mode from the config file
|
|
96
|
+
(training, explain, plotting, etc.) and runs the appropriate task.
|
|
97
|
+
|
|
98
|
+
Args:
|
|
99
|
+
config_path: Path to configuration JSON file
|
|
100
|
+
|
|
101
|
+
Yields:
|
|
102
|
+
Log lines as they are generated
|
|
103
|
+
"""
|
|
104
|
+
self.log_capture = LogCapture()
|
|
105
|
+
|
|
106
|
+
# Configure logging to capture both file and stream output
|
|
107
|
+
log_handler = logging.StreamHandler(self.log_capture)
|
|
108
|
+
log_handler.setLevel(logging.INFO)
|
|
109
|
+
formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')
|
|
110
|
+
log_handler.setFormatter(formatter)
|
|
111
|
+
|
|
112
|
+
# Add handler to root logger
|
|
113
|
+
root_logger = logging.getLogger()
|
|
114
|
+
original_handlers = root_logger.handlers.copy()
|
|
115
|
+
root_logger.addHandler(log_handler)
|
|
116
|
+
|
|
117
|
+
# Store original stdout/stderr
|
|
118
|
+
original_stdout = sys.stdout
|
|
119
|
+
original_stderr = sys.stderr
|
|
120
|
+
|
|
121
|
+
try:
|
|
122
|
+
# Detect task mode
|
|
123
|
+
task_mode = self._detect_task_mode(config_path)
|
|
124
|
+
|
|
125
|
+
# Start task in separate thread
|
|
126
|
+
exception_holder = []
|
|
127
|
+
|
|
128
|
+
def task_worker():
|
|
129
|
+
try:
|
|
130
|
+
sys.stdout = self.log_capture
|
|
131
|
+
sys.stderr = self.log_capture
|
|
132
|
+
|
|
133
|
+
# Log start
|
|
134
|
+
cmd, task_mode = self._build_cmd_from_config(config_path)
|
|
135
|
+
_log(f"Starting task [{task_mode}] with config: {config_path}")
|
|
136
|
+
_log("=" * 80)
|
|
137
|
+
|
|
138
|
+
# Run subprocess with streamed output
|
|
139
|
+
proc = subprocess.Popen(
|
|
140
|
+
cmd,
|
|
141
|
+
stdout=subprocess.PIPE,
|
|
142
|
+
stderr=subprocess.STDOUT,
|
|
143
|
+
text=True,
|
|
144
|
+
bufsize=1,
|
|
145
|
+
cwd=str(Path(config_path).resolve().parent),
|
|
146
|
+
)
|
|
147
|
+
self._proc = proc
|
|
148
|
+
if proc.stdout is not None:
|
|
149
|
+
for line in proc.stdout:
|
|
150
|
+
_log(line.rstrip())
|
|
151
|
+
return_code = proc.wait()
|
|
152
|
+
if return_code != 0:
|
|
153
|
+
raise RuntimeError(f"Task exited with code {return_code}")
|
|
154
|
+
|
|
155
|
+
_log("=" * 80)
|
|
156
|
+
_log(f"Task [{task_mode}] completed successfully!")
|
|
157
|
+
|
|
158
|
+
except Exception as e:
|
|
159
|
+
exception_holder.append(e)
|
|
160
|
+
_log(f"Error during task execution: {str(e)}")
|
|
161
|
+
import traceback
|
|
162
|
+
_log(traceback.format_exc())
|
|
163
|
+
|
|
164
|
+
finally:
|
|
165
|
+
self._proc = None
|
|
166
|
+
sys.stdout = original_stdout
|
|
167
|
+
sys.stderr = original_stderr
|
|
168
|
+
|
|
169
|
+
self.task_thread = threading.Thread(target=task_worker, daemon=True)
|
|
170
|
+
self.task_thread.start()
|
|
171
|
+
|
|
172
|
+
# Yield log lines as they come in
|
|
173
|
+
last_update = time.time()
|
|
174
|
+
while self.task_thread.is_alive() or not self.log_capture.log_queue.empty():
|
|
175
|
+
try:
|
|
176
|
+
# Try to get log with timeout
|
|
177
|
+
log_line = self.log_capture.log_queue.get(timeout=0.1)
|
|
178
|
+
yield log_line
|
|
179
|
+
last_update = time.time()
|
|
180
|
+
|
|
181
|
+
except queue.Empty:
|
|
182
|
+
# Send heartbeat every 5 seconds
|
|
183
|
+
if time.time() - last_update > 5:
|
|
184
|
+
yield "."
|
|
185
|
+
last_update = time.time()
|
|
186
|
+
continue
|
|
187
|
+
|
|
188
|
+
# Wait for thread to complete
|
|
189
|
+
self.task_thread.join(timeout=1)
|
|
190
|
+
|
|
191
|
+
# Check for exceptions
|
|
192
|
+
if exception_holder:
|
|
193
|
+
raise exception_holder[0]
|
|
194
|
+
|
|
195
|
+
finally:
|
|
196
|
+
# Restore original stdout/stderr
|
|
197
|
+
sys.stdout = original_stdout
|
|
198
|
+
sys.stderr = original_stderr
|
|
199
|
+
|
|
200
|
+
# Restore original logging handlers
|
|
201
|
+
root_logger.handlers = original_handlers
|
|
202
|
+
|
|
203
|
+
def run_callable(self, func, *args, **kwargs) -> Generator[str, None, None]:
|
|
204
|
+
"""Run an in-process callable and stream stdout/stderr."""
|
|
205
|
+
self.log_capture = LogCapture()
|
|
206
|
+
|
|
207
|
+
log_handler = logging.StreamHandler(self.log_capture)
|
|
208
|
+
log_handler.setLevel(logging.INFO)
|
|
209
|
+
formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')
|
|
210
|
+
log_handler.setFormatter(formatter)
|
|
211
|
+
|
|
212
|
+
root_logger = logging.getLogger()
|
|
213
|
+
original_handlers = root_logger.handlers.copy()
|
|
214
|
+
root_logger.addHandler(log_handler)
|
|
215
|
+
|
|
216
|
+
original_stdout = sys.stdout
|
|
217
|
+
original_stderr = sys.stderr
|
|
218
|
+
|
|
219
|
+
try:
|
|
220
|
+
exception_holder = []
|
|
221
|
+
|
|
222
|
+
def task_worker():
|
|
223
|
+
try:
|
|
224
|
+
sys.stdout = self.log_capture
|
|
225
|
+
sys.stderr = self.log_capture
|
|
226
|
+
func(*args, **kwargs)
|
|
227
|
+
except Exception as e:
|
|
228
|
+
exception_holder.append(e)
|
|
229
|
+
_log(f"Error during task execution: {str(e)}")
|
|
230
|
+
import traceback
|
|
231
|
+
_log(traceback.format_exc())
|
|
232
|
+
finally:
|
|
233
|
+
sys.stdout = original_stdout
|
|
234
|
+
sys.stderr = original_stderr
|
|
235
|
+
|
|
236
|
+
self.task_thread = threading.Thread(target=task_worker, daemon=True)
|
|
237
|
+
self.task_thread.start()
|
|
238
|
+
|
|
239
|
+
last_update = time.time()
|
|
240
|
+
while self.task_thread.is_alive() or not self.log_capture.log_queue.empty():
|
|
241
|
+
try:
|
|
242
|
+
log_line = self.log_capture.log_queue.get(timeout=0.1)
|
|
243
|
+
yield log_line
|
|
244
|
+
last_update = time.time()
|
|
245
|
+
except queue.Empty:
|
|
246
|
+
if time.time() - last_update > 5:
|
|
247
|
+
yield "."
|
|
248
|
+
last_update = time.time()
|
|
249
|
+
continue
|
|
250
|
+
|
|
251
|
+
self.task_thread.join(timeout=1)
|
|
252
|
+
if exception_holder:
|
|
253
|
+
raise exception_holder[0]
|
|
254
|
+
finally:
|
|
255
|
+
sys.stdout = original_stdout
|
|
256
|
+
sys.stderr = original_stderr
|
|
257
|
+
root_logger.handlers = original_handlers
|
|
258
|
+
|
|
259
|
+
def stop_task(self):
|
|
260
|
+
"""Stop the current task process."""
|
|
261
|
+
if self.log_capture:
|
|
262
|
+
self.log_capture.stop_flag.set()
|
|
263
|
+
|
|
264
|
+
proc = self._proc
|
|
265
|
+
if proc is not None and proc.poll() is None:
|
|
266
|
+
try:
|
|
267
|
+
if os.name == "nt":
|
|
268
|
+
subprocess.run(
|
|
269
|
+
["taskkill", "/PID", str(proc.pid), "/T", "/F"],
|
|
270
|
+
stdout=subprocess.DEVNULL,
|
|
271
|
+
stderr=subprocess.DEVNULL,
|
|
272
|
+
check=False,
|
|
273
|
+
)
|
|
274
|
+
else:
|
|
275
|
+
proc.terminate()
|
|
276
|
+
try:
|
|
277
|
+
proc.wait(timeout=5)
|
|
278
|
+
except Exception:
|
|
279
|
+
proc.kill()
|
|
280
|
+
except Exception:
|
|
281
|
+
try:
|
|
282
|
+
proc.kill()
|
|
283
|
+
except Exception:
|
|
284
|
+
pass
|
|
285
|
+
|
|
286
|
+
if self.task_thread and self.task_thread.is_alive():
|
|
287
|
+
self.task_thread.join(timeout=5)
|
|
288
|
+
|
|
289
|
+
|
|
290
|
+
# Backward compatibility aliases
|
|
291
|
+
TrainingRunner = TaskRunner
|
|
292
|
+
|
|
293
|
+
|
|
294
|
+
class StreamToLogger:
|
|
295
|
+
"""
|
|
296
|
+
Fake file-like stream object that redirects writes to a logger instance.
|
|
297
|
+
"""
|
|
298
|
+
|
|
299
|
+
def __init__(self, logger, log_level=logging.INFO):
|
|
300
|
+
self.logger = logger
|
|
301
|
+
self.log_level = log_level
|
|
302
|
+
self.linebuf = ''
|
|
303
|
+
|
|
304
|
+
def write(self, buf):
|
|
305
|
+
for line in buf.rstrip().splitlines():
|
|
306
|
+
self.logger.log(self.log_level, line.rstrip())
|
|
307
|
+
|
|
308
|
+
def flush(self):
|
|
309
|
+
pass
|
|
310
|
+
|
|
311
|
+
|
|
312
|
+
def setup_logger(name: str = "task") -> logging.Logger:
|
|
313
|
+
"""
|
|
314
|
+
Set up a logger for task execution.
|
|
315
|
+
|
|
316
|
+
Args:
|
|
317
|
+
name: Logger name
|
|
318
|
+
|
|
319
|
+
Returns:
|
|
320
|
+
Configured logger instance
|
|
321
|
+
"""
|
|
322
|
+
logger = logging.getLogger(name)
|
|
323
|
+
logger.setLevel(logging.INFO)
|
|
324
|
+
|
|
325
|
+
# Create console handler
|
|
326
|
+
console_handler = logging.StreamHandler()
|
|
327
|
+
console_handler.setLevel(logging.INFO)
|
|
328
|
+
|
|
329
|
+
# Create formatter
|
|
330
|
+
formatter = logging.Formatter(
|
|
331
|
+
'%(asctime)s - %(name)s - %(levelname)s - %(message)s',
|
|
332
|
+
datefmt='%Y-%m-%d %H:%M:%S'
|
|
333
|
+
)
|
|
334
|
+
console_handler.setFormatter(formatter)
|
|
335
|
+
|
|
336
|
+
# Add handler to logger
|
|
337
|
+
if not logger.handlers:
|
|
338
|
+
logger.addHandler(console_handler)
|
|
339
|
+
|
|
340
|
+
return logger
|