physicsworks 1.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,606 @@
1
+ """
2
+ Stage execution logic for the runner package.
3
+ """
4
+
5
+ import requests
6
+ import json
7
+ import os
8
+ import shutil
9
+ import zipfile
10
+ import uuid
11
+ from pathlib import Path
12
+ from typing import Dict, List, Any
13
+
14
+ from .config import Configuration, RuntimeAttributes, Stage, ExecutionMode
15
+ from .logger import DebugLogger
16
+ from .server import ServerCommunicator
17
+ from .watcher import FileSystemWatcher
18
+
19
+
20
+ class StageExecutor:
21
+ """Executes individual stages with debug support"""
22
+
23
+ def __init__(self, config: Configuration, runtime_attrs: RuntimeAttributes,
24
+ server_comm: ServerCommunicator, watcher: FileSystemWatcher,
25
+ logger: DebugLogger):
26
+ self.config = config
27
+ self.runtime_attrs = runtime_attrs
28
+ self.server_comm = server_comm
29
+ self.watcher = watcher
30
+ self.logger = logger
31
+
32
+ def should_execute_stage(self, stage: Stage) -> bool:
33
+ """Check if stage should be executed based on starting stage and skip list"""
34
+ stage_order = [Stage.START, Stage.INPUT, Stage.DOWNLOAD, Stage.UPLOAD, Stage.POSTPROCESS]
35
+ starting_index = stage_order.index(self.config.starting_stage)
36
+ current_index = stage_order.index(stage)
37
+
38
+ return current_index >= starting_index and stage not in self.config.skip_stages
39
+
40
+ def execute_stage(self, stage: Stage) -> bool:
41
+ """Execute a specific stage"""
42
+ if not self.should_execute_stage(stage):
43
+ self.logger.stage_skip(stage)
44
+ return True
45
+
46
+ self.logger.stage_start(stage)
47
+
48
+ try:
49
+ if stage == Stage.INPUT:
50
+ return self._execute_input_stage()
51
+ elif stage == Stage.DOWNLOAD:
52
+ return self._execute_download_stage()
53
+ elif stage == Stage.UPLOAD:
54
+ return self._execute_upload_stage()
55
+ elif stage == Stage.POSTPROCESS:
56
+ return self._execute_postprocess_stage()
57
+ else:
58
+ self.logger.error(f"Unknown stage: {stage}")
59
+ return False
60
+
61
+ except Exception as e:
62
+ self.logger.error(f"Error in stage {stage.value}: {e}")
63
+ if self.config.debug_mode:
64
+ import traceback
65
+ traceback.print_exc()
66
+ return False
67
+ finally:
68
+ self.logger.stage_complete(stage)
69
+
70
+ def _execute_input_stage(self) -> bool:
71
+ """Execute input retrieval stage"""
72
+ self.logger.info("Retrieving input data from server")
73
+ self._prepare_location(self.config.inputs_path)
74
+ return self._get_inputs()
75
+
76
+ def _execute_download_stage(self) -> bool:
77
+ """Execute download stage"""
78
+ self.logger.info("Downloading input files")
79
+
80
+ # Get input data from server first
81
+ if not self._get_inputs():
82
+ return False
83
+
84
+ # Create all required directories
85
+ self._prepare_location(self.config.downloads_path)
86
+ self._prepare_location(self.config.inputs_path)
87
+ self._prepare_location(self.config.outputs_path)
88
+ self._prepare_location(self.config.raw_path)
89
+ self._prepare_location(self.config.scripts_path)
90
+
91
+ # Create output subdirectories: graphics, logs, media, plots
92
+ for subdir in ['graphics', 'logs', 'media', 'plots']:
93
+ self._prepare_location(os.path.join(self.config.outputs_path, subdir))
94
+
95
+ # Copy state.json from inputs to outputs if it exists
96
+ self._copy_state_json_template()
97
+
98
+ return self._download_input_files()
99
+
100
+ def _execute_upload_stage(self) -> bool:
101
+ """Execute upload stage"""
102
+ self.logger.info("Uploading output files")
103
+ return self._upload_output_files()
104
+
105
+ def _execute_postprocess_stage(self) -> bool:
106
+ """Execute post-processing stage (excluding pvbatch - user-defined)"""
107
+ self.logger.info("Running post-processing")
108
+
109
+ # Run unified post-processing with configurable features (no pvbatch)
110
+ return self._postprocess_unified()
111
+
112
+ def _copy_state_json_template(self):
113
+ """Copy state.txt template from inputs to outputs"""
114
+ try:
115
+ state_txt_input = os.path.join(self.config.inputs_path, "state.txt")
116
+ state_txt_output = os.path.join(self.config.outputs_path, "state.txt")
117
+
118
+ if os.path.exists(state_txt_input):
119
+ shutil.copy2(state_txt_input, state_txt_output)
120
+ self.logger.debug(f"Copied state.txt template from {state_txt_input} to {state_txt_output}")
121
+ else:
122
+ self.logger.debug(f"No state.txt template found at {state_txt_input}, skipping")
123
+
124
+ except Exception as e:
125
+ self.logger.error(f"Error copying state.txt template: {e}")
126
+
127
+ # The rest of the methods follow the same pattern...
128
+ # For brevity, I'll include the essential structure and key methods
129
+
130
+ def _get_inputs(self) -> bool:
131
+ """Get input data from server"""
132
+ try:
133
+ config = self.config.config
134
+ response = requests.get(
135
+ f"{config['host']}simulation/run_data/read/{config['project']}/{config['simulation']}",
136
+ headers={'auth-token': config['token']}
137
+ )
138
+
139
+ if response.status_code != 200:
140
+ self.logger.error(f"Failed to get inputs: {response.text}")
141
+ return False
142
+
143
+ server_data = response.json()
144
+ simulation = server_data.get('simulation')
145
+
146
+ if not simulation:
147
+ raise Exception("Simulation not found")
148
+
149
+ # Create and extract run node
150
+ self.server_comm.run_node = requests.put(
151
+ f"{config['host']}simulation/run/update/{config['simulation']}",
152
+ json={},
153
+ headers={'auth-token': config['token']}
154
+ ).json()
155
+
156
+ self.runtime_attrs.run_id = self.server_comm.run_node['id']
157
+
158
+ # Update config with resource info
159
+ config['resource'] = {
160
+ 'id': self.server_comm.run_node['id'],
161
+ 'name': self.server_comm.run_node['name']
162
+ }
163
+
164
+ with open(self.config.config_path, 'w') as config_file:
165
+ config_file.write(json.dumps(config))
166
+
167
+ # Process materials, tree, inputs, etc.
168
+ materials = server_data.get('materials', [])
169
+ materials_data = self._process_materials(materials)
170
+ self._dump_file(materials_data, os.path.join(self.config.inputs_path, "variants.json"))
171
+
172
+ tree = simulation.get('tree', {})
173
+ run_inputs = server_data.get('inputs', {})
174
+ self._dump_file(tree, os.path.join(self.config.inputs_path, "workbench.json"))
175
+ self._dump_file(run_inputs, os.path.join(self.config.inputs_path, "runInputs.json"))
176
+
177
+ # Extract meshes
178
+ if "meshes" in simulation:
179
+ self.server_comm.meshes = simulation['meshes']
180
+
181
+ # Write simulation config
182
+ self._dump_file(self.runtime_attrs.compression, os.path.join(self.config.inputs_path, "config.json"))
183
+
184
+ # Store solver config
185
+ self.server_comm.solver_config = server_data.get('physicsFeature', {}).get('config', {})
186
+ self.server_comm.inputs = server_data.get('physicsFeature', {}).get('inputs', {})
187
+ self.server_comm.scripts = server_data.get('physicsFeature', {}).get('scripts', {})
188
+
189
+ # Write solver config to file for use by scripts
190
+ self._dump_file(self.server_comm.solver_config, os.path.join(self.config.inputs_path, "solverConfig.json"))
191
+
192
+ # Detect execution mode
193
+ if 'container' in self.server_comm.solver_config:
194
+ self.config.execution_mode = ExecutionMode.REMOTE
195
+ self.logger.info("Detected Remote execution mode")
196
+ else:
197
+ self.config.execution_mode = ExecutionMode.NATIVE
198
+ self.logger.info("Detected native execution mode")
199
+
200
+ # Store config reference for server communicator
201
+ self.server_comm.config = config
202
+
203
+ return True
204
+
205
+ except Exception as e:
206
+ self.logger.error(f"Error getting inputs: {e}")
207
+ return False
208
+
209
+ def _process_materials(self, materials: List[Dict[str, Any]]) -> Dict[str, Any]:
210
+ """Process materials data"""
211
+ materials_data = {}
212
+ for current in materials:
213
+ composition = current.get('composition', {})
214
+ variant = {key: current[key] for key in current if key != 'composition'}
215
+ properties = variant.get('properties', [])
216
+
217
+ # Process properties
218
+ for i, prop in enumerate(properties):
219
+ values = prop.get('values', [])
220
+ if prop.get('type') == 'SingleValue' and values:
221
+ properties[i] = {**prop, 'variantId': current.get('id'), 'value': values[0].get('y')}
222
+ else:
223
+ properties[i] = {**prop, 'variantId': current.get('id'), 'values': values}
224
+
225
+ variant['properties'] = properties
226
+ materials_data[current.get('id')] = {'composition': composition, 'variant': variant}
227
+
228
+ return materials_data
229
+
230
+ def _download_input_files(self) -> bool:
231
+ """Download input files"""
232
+ try:
233
+ # Download mesh files
234
+ if hasattr(self.server_comm, 'meshes') and self.server_comm.meshes:
235
+ for mesh in self.server_comm.meshes:
236
+ if 'rawMeshFile' in mesh and 'id' in mesh:
237
+ mesh_ext = mesh['rawMeshFile'].split(".")[-1]
238
+ mesh_path = os.path.join(self.config.downloads_path, f"{mesh['id']}.{mesh_ext}")
239
+ self._download_file(
240
+ f"{self.config.config['host']}storage/runner/{mesh['rawMeshFile']}",
241
+ mesh_path
242
+ )
243
+ self.logger.debug(f"Downloaded mesh file to {mesh_path}")
244
+
245
+ # Download inputs archive if specified
246
+ if 'inputsArchive' in self.config.config and self.server_comm.inputs:
247
+ inputs_zip_path = os.path.join(self.config.downloads_path, 'inputs.zip')
248
+ self._download_file(
249
+ f"{self.config.config['host']}storage/runner/{self.server_comm.inputs}",
250
+ inputs_zip_path
251
+ )
252
+
253
+ # Extract inputs archive
254
+ with zipfile.ZipFile(inputs_zip_path, 'r') as zip_ref:
255
+ zip_ref.extractall(self.config.inputs_path)
256
+
257
+ self.logger.debug("Downloaded and extracted inputs archive")
258
+
259
+ # Download scripts archive if specified
260
+ if self.server_comm.scripts:
261
+ scripts_zip_path = os.path.join(self.config.downloads_path, 'scripts.zip')
262
+ self._download_file(
263
+ f"{self.config.config['host']}storage/runner/{self.server_comm.scripts}",
264
+ scripts_zip_path
265
+ )
266
+
267
+ # Extract scripts archive to scripts directory
268
+ with zipfile.ZipFile(scripts_zip_path, 'r') as zip_ref:
269
+ zip_ref.extractall(self.config.scripts_path)
270
+
271
+ self.logger.debug("Downloaded and extracted scripts archive")
272
+
273
+ return True
274
+
275
+ except Exception as e:
276
+ self.logger.error(f"Error downloading input files: {e}")
277
+ return False
278
+
279
+ # Helper methods
280
+ def _prepare_location(self, path: str):
281
+ """Create directory if it doesn't exist"""
282
+ try:
283
+ Path(path).mkdir(parents=True, exist_ok=True)
284
+ except Exception as e:
285
+ self.logger.error(f"Error creating directory {path}: {e}")
286
+ raise
287
+
288
+ def _dump_file(self, obj: Any, path: str):
289
+ """Dump object to JSON file"""
290
+ try:
291
+ with open(path, 'w') as file:
292
+ json.dump(obj, file, indent=2)
293
+ except Exception as e:
294
+ self.logger.error(f"Error writing file {path}: {e}")
295
+ raise
296
+
297
+ def _download_file(self, url: str, file_path: str):
298
+ """Download file from URL"""
299
+ try:
300
+ response = requests.get(url, allow_redirects=True, headers={'auth-token': self.config.config['token']})
301
+ response.raise_for_status()
302
+
303
+ with open(file_path, 'wb') as file:
304
+ file.write(response.content)
305
+
306
+ except Exception as e:
307
+ self.logger.error(f"Error downloading file to {file_path}: {e}")
308
+ raise
309
+
310
+ # Placeholder methods for the remaining functionality
311
+ # These would contain the full implementation from the original wrapper
312
+
313
+ def _upload_output_files(self) -> bool:
314
+ """Upload output files to server"""
315
+ try:
316
+ from .utils import FileUploader
317
+
318
+ # Calculate total size of all files to upload
319
+ total_size = sum(os.path.getsize(file_path) for file_path in self.runtime_attrs.output_files)
320
+ uploaded_size = 0
321
+
322
+ # Format sizes for display
323
+ def format_size(size_bytes):
324
+ """Format bytes to human readable format"""
325
+ for unit in ['B', 'KB', 'MB', 'GB']:
326
+ if size_bytes < 1024.0:
327
+ return f"{size_bytes:.1f}{unit}"
328
+ size_bytes /= 1024.0
329
+ return f"{size_bytes:.1f}TB"
330
+
331
+ # Create progress callback
332
+ def on_chunk_uploaded(chunk_bytes):
333
+ nonlocal uploaded_size
334
+ uploaded_size += chunk_bytes
335
+ # Calculate progress from 92% to 100% based on uploaded size
336
+ progress = int(92 + (uploaded_size / total_size * 8))
337
+ # Cap at 99% - only final status update will set 100%
338
+ if progress >= 100:
339
+ progress = 99
340
+
341
+ # Set label based on upload completion
342
+ if uploaded_size >= total_size:
343
+ label = "upload complete"
344
+ else:
345
+ label = f"uploading results {format_size(uploaded_size)}/{format_size(total_size)}"
346
+
347
+ self.server_comm.set_status("running", progress, label)
348
+
349
+ uploader = FileUploader(self.config, self.runtime_attrs, self.server_comm, self.logger, on_chunk_uploaded)
350
+
351
+ self.server_comm.set_status("running", 92, f"uploading results {format_size(0)}/{format_size(total_size)}")
352
+
353
+ for file_path in self.runtime_attrs.output_files:
354
+ self.logger.debug(f"Uploading file: {file_path}")
355
+
356
+ metadata = {
357
+ "project": self.config.config['project'],
358
+ "owner": self.config.config['owner'],
359
+ "originalname": os.path.basename(file_path),
360
+ "resource": self.config.config['job'],
361
+ "resourceKind": "Run",
362
+ "simulation": self.config.config['simulation'],
363
+ }
364
+
365
+ if file_path.endswith(".zip"):
366
+ filename = self.runtime_attrs.filenames.get(file_path)
367
+ uploader.upload_file(file_path, metadata, filename)
368
+ else:
369
+ uploader.upload_file(file_path, metadata)
370
+
371
+ return True
372
+
373
+ except Exception as e:
374
+ self.logger.error(f"Error uploading files: {e}")
375
+ return False
376
+
377
+ def _postprocess_unified(self) -> bool:
378
+ """Unified post-processing with configurable features (excluding pvbatch)"""
379
+ try:
380
+ from .utils import PostProcessor, ResultsZipper
381
+
382
+ post_processor = PostProcessor(self.config, self.runtime_attrs, self.server_comm, self.logger)
383
+ zipper = ResultsZipper(self.config, self.runtime_attrs, self.server_comm, self.logger)
384
+
385
+ # Set initial progress for post-processing stage
386
+ self.server_comm.set_status("running", 90, "post-processing")
387
+
388
+ # Set results first (always done)
389
+ post_processor.set_results()
390
+
391
+ # Note: pvbatch processing should be handled by user-defined scripts
392
+ # not in the post-processing stage
393
+
394
+ # Create visualization archives based on available files
395
+ post_processor.create_visualization_archives()
396
+
397
+ # Zip results if downloadable paths are configured
398
+ if self.server_comm.solver_config.get('downloadable'):
399
+ self.server_comm.set_status("running", 90, "zipping results")
400
+ zipper.zip_results()
401
+
402
+ # Upload all output files
403
+ self._upload_output_files()
404
+
405
+ # Update downloadables node with results.zip
406
+ self._update_downloadables_node()
407
+
408
+ # Update graphics node with any visualization data
409
+ self._update_graphics_node()
410
+
411
+ # Set final status
412
+ if self.runtime_attrs.run_succeeded:
413
+ self.server_comm.set_status("finished", 100, "finished")
414
+ else:
415
+ raise Exception("The run failed")
416
+
417
+ return True
418
+
419
+ except Exception as e:
420
+ self.logger.error(f"Error in post-processing: {e}")
421
+ return False
422
+
423
+ def _update_graphics_node(self):
424
+ """Update graphics node with visualization data"""
425
+ try:
426
+ graphics_node = next(
427
+ (node for node in self.server_comm.run_node.get('children', []) if node.get('slug') == "graphics"),
428
+ None
429
+ )
430
+
431
+ if not graphics_node:
432
+ return
433
+
434
+ if not graphics_node.get('children'):
435
+ graphics_node['children'] = []
436
+
437
+ # Check for pvbatch_output.json from pvbatch processing
438
+ output_json_path = os.path.join(self.config.outputs_path, "pvbatch_output.json")
439
+ if os.path.exists(output_json_path):
440
+ self._process_pvbatch_output(graphics_node, output_json_path)
441
+ else:
442
+ # Create basic visualization entries for VTP files
443
+ self._create_basic_visualization_entries(graphics_node)
444
+
445
+ except Exception as e:
446
+ self.logger.error(f"Error updating graphics node: {e}")
447
+
448
+ def _update_downloadables_node(self):
449
+ """Update downloadables node with results.zip"""
450
+ try:
451
+ # Find the downloadables node
452
+ downloadables_node = next(
453
+ (node for node in self.server_comm.run_node.get('children', []) if node.get('slug') == 'downloadables'),
454
+ None
455
+ )
456
+
457
+ if not downloadables_node:
458
+ self.logger.debug("No downloadables node found, skipping update")
459
+ return
460
+
461
+ # Find the results.zip filename from uploaded files
462
+ result_zip_name = None
463
+ for file_path in self.runtime_attrs.output_files:
464
+ if file_path.endswith("result.zip"):
465
+ result_zip_name = self.runtime_attrs.filenames.get(file_path)
466
+ break
467
+
468
+ if not result_zip_name:
469
+ self.logger.debug("No result.zip found, skipping downloadables update")
470
+ return
471
+
472
+ # Update the downloadables node with the specified structure
473
+ simulation_id = self.config.config['simulation']
474
+ downloadables_node['id'] = str(uuid.uuid1())
475
+ downloadables_node['name'] = 'Downloadables'
476
+ downloadables_node['slug'] = 'downloadables'
477
+ downloadables_node['simulationId'] = simulation_id
478
+ downloadables_node['children'] = [
479
+ {
480
+ 'id': str(uuid.uuid1()),
481
+ 'name': 'Result.zip',
482
+ 'slug': 'result.zip',
483
+ 'isFile': True,
484
+ 'filename': result_zip_name,
485
+ 'simulationId': simulation_id,
486
+ 'actions': {
487
+ 'type': 'command',
488
+ 'list': [{'name': 'Download', 'slug': 'download'}]
489
+ }
490
+ }
491
+ ]
492
+
493
+ self.logger.debug(f"Updated downloadables node with {result_zip_name}")
494
+
495
+ except Exception as e:
496
+ self.logger.error(f"Error updating downloadables node: {e}")
497
+
498
+ def _process_pvbatch_output(self, graphics_node, output_json_path):
499
+ """Process pvbatch pvbatch_output.json and update graphics node"""
500
+ try:
501
+ with open(output_json_path, 'r') as outfile:
502
+ output = json.load(outfile)
503
+
504
+ # Update plots with CSV data if available
505
+ if 'csv' in output and self.runtime_attrs.plots_paths:
506
+ plots_node = next(
507
+ (node for node in self.server_comm.run_node.get('children', []) if node.get('slug') == 'plots'),
508
+ None
509
+ )
510
+ if plots_node and plots_node.get('children'):
511
+ for plot in plots_node['children']:
512
+ if plot['name'] in output['csv']:
513
+ plot['columns'] = output['csv'][plot['name']]
514
+
515
+ # Find the corresponding archive file
516
+ tar_filename = None
517
+ for output_file in self.runtime_attrs.output_files:
518
+ if 'gltf' in output_file or 'pvbatch' in output_file:
519
+ tar_filename = os.path.basename(output_file)
520
+ break
521
+
522
+ if tar_filename:
523
+ # Prepare metadata
524
+ metadata = {'times': {}, 'regions': {}}
525
+ if 'bBox' in output:
526
+ metadata['bBox'] = output['bBox']
527
+
528
+ items = output.get('items', [])
529
+ for output_item in items:
530
+ time_index = str(output_item['timeIndex'])
531
+ metadata['times'][time_index] = output_item['time']
532
+
533
+ region = output_item['region']
534
+ field = output_item['field']
535
+ if region not in metadata['regions']:
536
+ metadata['regions'][region] = [field]
537
+ else:
538
+ metadata['regions'][region] = list(set([field] + metadata['regions'][region]))
539
+
540
+ # Add graphics entry
541
+ gltf_id = str(uuid.uuid1())
542
+ graphics_node['children'].append({
543
+ 'id': gltf_id,
544
+ 'name': output.get('type', 'Visualization'),
545
+ 'slug': output.get('slug', 'visualization'),
546
+ 'simulationId': self.config.config['simulation'],
547
+ 'value': {
548
+ 'id': str(uuid.uuid1()),
549
+ 'title': output.get('type', 'Visualization'),
550
+ 'slug': output.get('slug', 'visualization'),
551
+ 'simulationFields': True,
552
+ 'data': {
553
+ 'filename': tar_filename,
554
+ 'items': items,
555
+ 'metadata': metadata
556
+ }
557
+ }
558
+ })
559
+
560
+ # Update simulation with gltf ID reference
561
+ # This allows the workbench to find the GLTF node for rendering
562
+ self.server_comm.set_status(
563
+ self.server_comm.runtime_attrs.status,
564
+ self.server_comm.runtime_attrs.progress,
565
+ self.server_comm.runtime_attrs.status_label,
566
+ extras={'gltf': gltf_id}
567
+ )
568
+
569
+ except Exception as e:
570
+ self.logger.error(f"Error processing pvbatch output: {e}")
571
+
572
+ def _create_basic_visualization_entries(self, graphics_node):
573
+ """Create basic visualization entries for VTP files"""
574
+ try:
575
+ # Look for VTP archive
576
+ vtp_archive = None
577
+ for output_file in self.runtime_attrs.output_files:
578
+ if 'vtp' in output_file and output_file.endswith('.tar.gz'):
579
+ vtp_archive = os.path.basename(output_file)
580
+ break
581
+
582
+ if vtp_archive:
583
+ vtp_metadata = {
584
+ "id": str(uuid.uuid1()),
585
+ "type": "VTP",
586
+ "slug": "vtp",
587
+ }
588
+
589
+ graphics_node['children'].append({
590
+ 'id': vtp_metadata["id"],
591
+ 'name': vtp_metadata['type'],
592
+ 'slug': vtp_metadata['slug'],
593
+ 'simulationId': self.config.config['simulation'],
594
+ 'value': {
595
+ 'id': str(uuid.uuid1()),
596
+ 'title': vtp_metadata['type'],
597
+ 'slug': vtp_metadata['slug'],
598
+ 'simulationFields': True,
599
+ 'data': {
600
+ 'filename': vtp_archive
601
+ }
602
+ }
603
+ })
604
+
605
+ except Exception as e:
606
+ self.logger.error(f"Error creating basic visualization entries: {e}")
@@ -0,0 +1,39 @@
1
+ """
2
+ Interface utilities for status tracking and progress reporting.
3
+ """
4
+
5
+ import os
6
+ from datetime import datetime
7
+ from enum import Enum
8
+
9
+
10
+ class StepStatus(Enum):
11
+ """Status values for execution steps"""
12
+ PENDING = "pending"
13
+ RUNNING = "running"
14
+ FINISHED = "finished"
15
+ FAILED = "failed"
16
+
17
+
18
+ def append_status(outputs_dir: str, progress: int, message: str, step: str = "", status: StepStatus = StepStatus.RUNNING):
19
+ """
20
+ Append status update to state.txt file.
21
+
22
+ Writes a CSV-formatted line to state.txt with the format:
23
+ timestamp,progress,message,step,status
24
+
25
+ Args:
26
+ outputs_dir: Path to outputs directory
27
+ progress: Progress percentage (0-100)
28
+ message: Status message
29
+ step: Step name (optional)
30
+ status: Step status enum value
31
+ """
32
+ state_file = os.path.join(outputs_dir, "state.txt")
33
+ timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
34
+
35
+ # Convert enum to string value
36
+ status_str = status.value if isinstance(status, StepStatus) else str(status)
37
+
38
+ with open(state_file, "a", encoding="utf-8") as f:
39
+ f.write(f"{timestamp},{progress},{message},{step},{status_str}\n")
@@ -0,0 +1,37 @@
1
+ """
2
+ Enhanced logging for debugging runner operations.
3
+ """
4
+
5
+ from .config import Stage
6
+
7
+
8
+ class DebugLogger:
9
+ """Enhanced logging for debugging"""
10
+
11
+ def __init__(self, debug_mode: bool = False):
12
+ self.debug_mode = debug_mode
13
+
14
+ def info(self, message: str, stage: str = "MAIN"):
15
+ print(f"[{stage}] {message}")
16
+
17
+ def debug(self, message: str, stage: str = "DEBUG"):
18
+ if self.debug_mode:
19
+ print(f"[{stage}] {message}")
20
+
21
+ def error(self, message: str, stage: str = "ERROR"):
22
+ print(f"[{stage}] ERROR: {message}")
23
+
24
+ def stage_start(self, stage: Stage):
25
+ print(f"\n{'='*50}")
26
+ print(f"STARTING STAGE: {stage.value.upper()}")
27
+ print(f"{'='*50}")
28
+
29
+ def stage_skip(self, stage: Stage):
30
+ print(f"\n{'*'*50}")
31
+ print(f"SKIPPING STAGE: {stage.value.upper()}")
32
+ print(f"{'*'*50}")
33
+
34
+ def stage_complete(self, stage: Stage):
35
+ print(f"\n{'-'*50}")
36
+ print(f"COMPLETED STAGE: {stage.value.upper()}")
37
+ print(f"{'-'*50}")