pysfi 0.1.11__py3-none-any.whl → 0.1.13__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {pysfi-0.1.11.dist-info → pysfi-0.1.13.dist-info}/METADATA +3 -1
- pysfi-0.1.13.dist-info/RECORD +70 -0
- {pysfi-0.1.11.dist-info → pysfi-0.1.13.dist-info}/entry_points.txt +3 -0
- sfi/__init__.py +5 -3
- sfi/alarmclock/__init__.py +3 -0
- sfi/alarmclock/alarmclock.py +23 -40
- sfi/bumpversion/__init__.py +5 -3
- sfi/cleanbuild/__init__.py +3 -0
- sfi/cli.py +12 -2
- sfi/condasetup/__init__.py +1 -0
- sfi/docdiff/__init__.py +1 -0
- sfi/docdiff/docdiff.py +238 -0
- sfi/docscan/__init__.py +3 -3
- sfi/docscan/docscan_gui.py +150 -46
- sfi/img2pdf/__init__.py +0 -0
- sfi/img2pdf/img2pdf.py +453 -0
- sfi/llmclient/__init__.py +0 -0
- sfi/llmclient/llmclient.py +31 -8
- sfi/llmquantize/llmquantize.py +39 -11
- sfi/llmserver/__init__.py +1 -0
- sfi/llmserver/llmserver.py +63 -13
- sfi/makepython/makepython.py +507 -124
- sfi/pyarchive/__init__.py +1 -0
- sfi/pyarchive/pyarchive.py +908 -278
- sfi/pyembedinstall/pyembedinstall.py +88 -89
- sfi/pylibpack/pylibpack.py +571 -465
- sfi/pyloadergen/pyloadergen.py +372 -218
- sfi/pypack/pypack.py +494 -965
- sfi/pyprojectparse/pyprojectparse.py +328 -28
- sfi/pysourcepack/__init__.py +1 -0
- sfi/pysourcepack/pysourcepack.py +210 -131
- sfi/quizbase/quizbase_gui.py +2 -2
- sfi/taskkill/taskkill.py +168 -59
- sfi/which/which.py +11 -3
- sfi/workflowengine/workflowengine.py +225 -122
- pysfi-0.1.11.dist-info/RECORD +0 -60
- {pysfi-0.1.11.dist-info → pysfi-0.1.13.dist-info}/WHEEL +0 -0
|
@@ -3,6 +3,13 @@
|
|
|
3
3
|
This module provides a comprehensive workflow engine for managing
|
|
4
4
|
complex task dependencies with support for I/O tasks, CPU-intensive tasks,
|
|
5
5
|
serial tasks, and parallel task execution.
|
|
6
|
+
|
|
7
|
+
The engine supports:
|
|
8
|
+
- Dependency management with cycle detection
|
|
9
|
+
- Topological sorting for execution order
|
|
10
|
+
- Concurrent execution with configurable limits
|
|
11
|
+
- Error handling and timeout management
|
|
12
|
+
- Execution monitoring and reporting
|
|
6
13
|
"""
|
|
7
14
|
|
|
8
15
|
from __future__ import annotations
|
|
@@ -49,7 +56,13 @@ class TaskResult:
|
|
|
49
56
|
class Task(ABC):
|
|
50
57
|
"""Task abstract base class"""
|
|
51
58
|
|
|
52
|
-
def __init__(
|
|
59
|
+
def __init__(
|
|
60
|
+
self,
|
|
61
|
+
task_id: str,
|
|
62
|
+
task_type: TaskType,
|
|
63
|
+
dependencies: list[str] | None = None,
|
|
64
|
+
timeout: float = 30.0,
|
|
65
|
+
):
|
|
53
66
|
self.task_id = task_id
|
|
54
67
|
self.task_type = task_type
|
|
55
68
|
self.dependencies = dependencies or []
|
|
@@ -82,18 +95,68 @@ class Task(ABC):
|
|
|
82
95
|
return self.end_time - self.start_time
|
|
83
96
|
return 0.0
|
|
84
97
|
|
|
98
|
+
async def _execute_with_error_handling(
|
|
99
|
+
self, execution_func, context: dict[str, TaskResult]
|
|
100
|
+
):
|
|
101
|
+
"""Common execution wrapper with error handling"""
|
|
102
|
+
self.start_time = time.time()
|
|
103
|
+
self.update_status(TaskStatus.RUNNING)
|
|
104
|
+
|
|
105
|
+
try:
|
|
106
|
+
data = await asyncio.wait_for(execution_func(context), timeout=self.timeout)
|
|
107
|
+
self.end_time = time.time()
|
|
108
|
+
self.result = TaskResult(
|
|
109
|
+
task_id=self.task_id,
|
|
110
|
+
success=True,
|
|
111
|
+
data=data,
|
|
112
|
+
execution_time=self.get_execution_time(),
|
|
113
|
+
)
|
|
114
|
+
self.update_status(TaskStatus.COMPLETED)
|
|
115
|
+
return self.result
|
|
116
|
+
except asyncio.TimeoutError as e:
|
|
117
|
+
self.end_time = time.time()
|
|
118
|
+
self.result = TaskResult(
|
|
119
|
+
task_id=self.task_id,
|
|
120
|
+
success=False,
|
|
121
|
+
data=None,
|
|
122
|
+
execution_time=self.get_execution_time(),
|
|
123
|
+
error=e,
|
|
124
|
+
)
|
|
125
|
+
self.update_status(TaskStatus.FAILED)
|
|
126
|
+
raise
|
|
127
|
+
except Exception as e:
|
|
128
|
+
self.end_time = time.time()
|
|
129
|
+
self.result = TaskResult(
|
|
130
|
+
task_id=self.task_id,
|
|
131
|
+
success=False,
|
|
132
|
+
data=None,
|
|
133
|
+
execution_time=self.get_execution_time(),
|
|
134
|
+
error=e,
|
|
135
|
+
)
|
|
136
|
+
self.update_status(TaskStatus.FAILED)
|
|
137
|
+
raise
|
|
138
|
+
|
|
85
139
|
|
|
86
140
|
class IOTask(Task):
|
|
87
141
|
"""I/O-intensive task"""
|
|
88
142
|
|
|
89
|
-
def __init__(
|
|
143
|
+
def __init__(
|
|
144
|
+
self,
|
|
145
|
+
task_id: str,
|
|
146
|
+
duration: float,
|
|
147
|
+
dependencies: list[str] | None = None,
|
|
148
|
+
timeout: float = 30.0,
|
|
149
|
+
):
|
|
90
150
|
super().__init__(task_id, TaskType.ASYNC, dependencies, timeout)
|
|
91
151
|
self.duration = duration
|
|
92
152
|
|
|
93
153
|
async def execute(self, context: dict[str, TaskResult]) -> Any:
|
|
94
154
|
"""Simulate I/O operation"""
|
|
95
|
-
print(
|
|
96
|
-
|
|
155
|
+
print(
|
|
156
|
+
f"[IO] Starting task {self.task_id}, estimated duration: {self.duration}s"
|
|
157
|
+
)
|
|
158
|
+
result = await self._execute_with_error_handling(self._execute_io, context)
|
|
159
|
+
return result.data
|
|
97
160
|
|
|
98
161
|
async def _execute_io(self, context: dict[str, TaskResult]) -> Any:
|
|
99
162
|
"""Internal I/O execution method"""
|
|
@@ -104,25 +167,34 @@ class IOTask(Task):
|
|
|
104
167
|
class CPUTask(Task):
|
|
105
168
|
"""CPU-intensive task"""
|
|
106
169
|
|
|
107
|
-
def __init__(
|
|
170
|
+
def __init__(
|
|
171
|
+
self,
|
|
172
|
+
task_id: str,
|
|
173
|
+
iterations: int,
|
|
174
|
+
dependencies: list[str] | None = None,
|
|
175
|
+
timeout: float = 30.0,
|
|
176
|
+
):
|
|
108
177
|
super().__init__(task_id, TaskType.CPU, dependencies, timeout)
|
|
109
178
|
self.iterations = iterations
|
|
110
179
|
|
|
111
180
|
async def execute(self, context: dict[str, TaskResult]) -> Any:
|
|
112
181
|
"""CPU-intensive computation task"""
|
|
113
182
|
print(f"[CPU] Starting task {self.task_id}, iterations: {self.iterations}")
|
|
183
|
+
result = await self._execute_with_error_handling(self._execute_cpu, context)
|
|
184
|
+
return result.data
|
|
185
|
+
|
|
186
|
+
async def _execute_cpu(self, context: dict[str, TaskResult]) -> Any:
|
|
187
|
+
"""Execute CPU-intensive work in thread pool"""
|
|
114
188
|
|
|
115
|
-
# Move CPU-intensive task to thread pool to avoid blocking event loop
|
|
116
189
|
def cpu_intensive_work():
|
|
117
190
|
result = 0
|
|
118
191
|
for i in range(self.iterations):
|
|
119
192
|
result += i * i
|
|
120
193
|
return result
|
|
121
194
|
|
|
122
|
-
# Use
|
|
123
|
-
# Use run_in_executor for Python 3.8 compatibility (asyncio.to_thread is Python 3.9+)
|
|
195
|
+
# Use run_in_executor to avoid blocking event loop
|
|
124
196
|
loop = asyncio.get_event_loop()
|
|
125
|
-
result = await
|
|
197
|
+
result = await loop.run_in_executor(None, cpu_intensive_work)
|
|
126
198
|
return f"CPU task {self.task_id} completed, result: {result}"
|
|
127
199
|
|
|
128
200
|
|
|
@@ -130,7 +202,11 @@ class SerialTask(Task):
|
|
|
130
202
|
"""Serial task (stateful, must execute sequentially)"""
|
|
131
203
|
|
|
132
204
|
def __init__(
|
|
133
|
-
self,
|
|
205
|
+
self,
|
|
206
|
+
task_id: str,
|
|
207
|
+
process_func: Callable,
|
|
208
|
+
dependencies: list[str] | None = None,
|
|
209
|
+
timeout: float = 30.0,
|
|
134
210
|
):
|
|
135
211
|
super().__init__(task_id, TaskType.SERIAL, dependencies, timeout)
|
|
136
212
|
self.process_func = process_func
|
|
@@ -139,7 +215,11 @@ class SerialTask(Task):
|
|
|
139
215
|
async def execute(self, context: dict[str, TaskResult]) -> Any:
|
|
140
216
|
"""Execute serial task"""
|
|
141
217
|
print(f"[Serial] Starting serial task {self.task_id}")
|
|
218
|
+
result = await self._execute_with_error_handling(self._execute_serial, context)
|
|
219
|
+
return result.data
|
|
142
220
|
|
|
221
|
+
async def _execute_serial(self, context: dict[str, TaskResult]) -> Any:
|
|
222
|
+
"""Execute serial task logic"""
|
|
143
223
|
# Collect results from dependent tasks
|
|
144
224
|
inputs = {dep_id: context[dep_id].data for dep_id in self.dependencies}
|
|
145
225
|
|
|
@@ -172,61 +252,71 @@ class ParallelTask(Task):
|
|
|
172
252
|
|
|
173
253
|
async def execute(self, context: dict[str, TaskResult]) -> Any:
|
|
174
254
|
"""Execute subtasks in parallel"""
|
|
175
|
-
print(
|
|
176
|
-
|
|
255
|
+
print(
|
|
256
|
+
f"[Parallel] Starting parallel task {self.task_id}, contains {len(self.subtasks)} subtasks"
|
|
257
|
+
)
|
|
258
|
+
result = await self._execute_with_error_handling(
|
|
259
|
+
self._execute_parallel, context
|
|
260
|
+
)
|
|
261
|
+
return result.data
|
|
262
|
+
|
|
263
|
+
async def _execute_parallel(self, context: dict[str, TaskResult]) -> Any:
|
|
264
|
+
"""Execute subtasks in parallel with controlled concurrency"""
|
|
177
265
|
# Create semaphore to control concurrency
|
|
178
266
|
semaphore = asyncio.Semaphore(self.max_concurrent)
|
|
179
267
|
|
|
180
|
-
async def
|
|
181
|
-
async with
|
|
182
|
-
subtask
|
|
183
|
-
subtask.update_status(TaskStatus.RUNNING)
|
|
184
|
-
|
|
268
|
+
async def execute_subtask_with_semaphore(subtask: Task):
|
|
269
|
+
async with semaphore:
|
|
270
|
+
# Execute subtask with its own context
|
|
185
271
|
try:
|
|
186
|
-
data = await asyncio.wait_for(
|
|
272
|
+
data = await asyncio.wait_for(
|
|
273
|
+
subtask.execute(context), timeout=subtask.timeout
|
|
274
|
+
)
|
|
187
275
|
subtask.result = TaskResult(
|
|
188
276
|
task_id=subtask.task_id,
|
|
189
277
|
success=True,
|
|
190
278
|
data=data,
|
|
191
|
-
execution_time=time.time() - subtask.start_time
|
|
279
|
+
execution_time=time.time() - subtask.start_time
|
|
280
|
+
if subtask.start_time
|
|
281
|
+
else 0,
|
|
192
282
|
)
|
|
193
283
|
subtask.update_status(TaskStatus.COMPLETED)
|
|
194
284
|
return subtask.result
|
|
195
|
-
except asyncio.TimeoutError as e:
|
|
196
|
-
error = TimeoutError(f"Task {subtask.task_id} execution timeout")
|
|
197
|
-
subtask.result = TaskResult(
|
|
198
|
-
task_id=subtask.task_id,
|
|
199
|
-
success=False,
|
|
200
|
-
data=None,
|
|
201
|
-
execution_time=time.time() - subtask.start_time,
|
|
202
|
-
error=error,
|
|
203
|
-
)
|
|
204
|
-
subtask.update_status(TaskStatus.FAILED)
|
|
205
|
-
raise error from e
|
|
206
285
|
except Exception as e:
|
|
286
|
+
subtask.end_time = time.time()
|
|
207
287
|
subtask.result = TaskResult(
|
|
208
288
|
task_id=subtask.task_id,
|
|
209
289
|
success=False,
|
|
210
290
|
data=None,
|
|
211
|
-
execution_time=time.time() - subtask.start_time
|
|
291
|
+
execution_time=time.time() - subtask.start_time
|
|
292
|
+
if subtask.start_time
|
|
293
|
+
else 0,
|
|
212
294
|
error=e,
|
|
213
295
|
)
|
|
214
296
|
subtask.update_status(TaskStatus.FAILED)
|
|
215
|
-
|
|
297
|
+
return subtask.result
|
|
216
298
|
|
|
217
299
|
# Execute all subtasks in parallel
|
|
218
|
-
|
|
219
|
-
|
|
300
|
+
results = await asyncio.gather(
|
|
301
|
+
*[execute_subtask_with_semaphore(subtask) for subtask in self.subtasks],
|
|
302
|
+
return_exceptions=True,
|
|
303
|
+
)
|
|
220
304
|
|
|
221
|
-
# Process results
|
|
305
|
+
# Process results
|
|
222
306
|
successful_results = []
|
|
223
307
|
failed_results = []
|
|
224
308
|
|
|
225
|
-
for
|
|
309
|
+
for i, result in enumerate(results):
|
|
310
|
+
subtask = self.subtasks[i]
|
|
226
311
|
if isinstance(result, Exception):
|
|
227
312
|
failed_results.append(f"Subtask {subtask.task_id} failed: {result}")
|
|
228
313
|
elif isinstance(result, TaskResult):
|
|
229
|
-
|
|
314
|
+
if result.success:
|
|
315
|
+
successful_results.append(result.data)
|
|
316
|
+
else:
|
|
317
|
+
failed_results.append(
|
|
318
|
+
f"Subtask {subtask.task_id} failed: {result.error}"
|
|
319
|
+
)
|
|
230
320
|
|
|
231
321
|
if failed_results:
|
|
232
322
|
return f"Parallel task {self.task_id} partially failed: {failed_results}"
|
|
@@ -336,86 +426,8 @@ class WorkflowEngine:
|
|
|
336
426
|
for level_index, level in enumerate(execution_order, 1):
|
|
337
427
|
print(f"\n{'=' * 20} Phase {level_index} ({len(level)} tasks) {'=' * 20}")
|
|
338
428
|
|
|
339
|
-
#
|
|
340
|
-
|
|
341
|
-
for task_id in level:
|
|
342
|
-
task = self.tasks[task_id]
|
|
343
|
-
if task.can_execute(completed_tasks):
|
|
344
|
-
task.update_status(TaskStatus.READY)
|
|
345
|
-
ready_tasks.append(task)
|
|
346
|
-
|
|
347
|
-
if not ready_tasks:
|
|
348
|
-
continue
|
|
349
|
-
|
|
350
|
-
# Create semaphore for this level to control concurrency
|
|
351
|
-
semaphore = asyncio.Semaphore(self.max_concurrent)
|
|
352
|
-
|
|
353
|
-
async def execute_single_task(task: Task, sem: asyncio.Semaphore):
|
|
354
|
-
async with sem:
|
|
355
|
-
task.start_time = time.time()
|
|
356
|
-
task.update_status(TaskStatus.RUNNING)
|
|
357
|
-
|
|
358
|
-
try:
|
|
359
|
-
# Collect results from dependent tasks
|
|
360
|
-
dependency_results = {dep_id: self.results[dep_id] for dep_id in task.get_dependencies()}
|
|
361
|
-
|
|
362
|
-
# Execute task
|
|
363
|
-
data = await asyncio.wait_for(task.execute(dependency_results), timeout=task.timeout)
|
|
364
|
-
|
|
365
|
-
task.end_time = time.time()
|
|
366
|
-
task.result = TaskResult(
|
|
367
|
-
task_id=task.task_id, success=True, data=data, execution_time=task.get_execution_time()
|
|
368
|
-
)
|
|
369
|
-
task.update_status(TaskStatus.COMPLETED)
|
|
370
|
-
|
|
371
|
-
# Store result
|
|
372
|
-
self.results[task.task_id] = task.result
|
|
373
|
-
completed_tasks.add(task.task_id)
|
|
374
|
-
|
|
375
|
-
print(f"[OK] Task {task.task_id} completed, duration: {task.get_execution_time():.2f}s")
|
|
376
|
-
|
|
377
|
-
return task.result
|
|
378
|
-
|
|
379
|
-
except asyncio.TimeoutError as e:
|
|
380
|
-
task.end_time = time.time()
|
|
381
|
-
task.result = TaskResult(
|
|
382
|
-
task_id=task.task_id,
|
|
383
|
-
success=False,
|
|
384
|
-
data=None,
|
|
385
|
-
execution_time=task.get_execution_time(),
|
|
386
|
-
error=e,
|
|
387
|
-
)
|
|
388
|
-
task.update_status(TaskStatus.FAILED)
|
|
389
|
-
|
|
390
|
-
# Store result and mark as completed (even if failed)
|
|
391
|
-
self.results[task.task_id] = task.result
|
|
392
|
-
completed_tasks.add(task.task_id)
|
|
393
|
-
|
|
394
|
-
print(f"[FAIL] Task {task.task_id} timeout")
|
|
395
|
-
raise e from e
|
|
396
|
-
except Exception as e:
|
|
397
|
-
task.end_time = time.time()
|
|
398
|
-
task.result = TaskResult(
|
|
399
|
-
task_id=task.task_id,
|
|
400
|
-
success=False,
|
|
401
|
-
data=None,
|
|
402
|
-
execution_time=task.get_execution_time(),
|
|
403
|
-
error=e,
|
|
404
|
-
)
|
|
405
|
-
task.update_status(TaskStatus.FAILED)
|
|
406
|
-
|
|
407
|
-
# Store result and mark as completed (even if failed)
|
|
408
|
-
self.results[task.task_id] = task.result
|
|
409
|
-
completed_tasks.add(task.task_id)
|
|
410
|
-
|
|
411
|
-
print(f"[FAIL] Task {task.task_id} failed: {e}")
|
|
412
|
-
raise e
|
|
413
|
-
|
|
414
|
-
# Execute all ready tasks in this level in parallel
|
|
415
|
-
tasks_to_execute = [execute_single_task(task, semaphore) for task in ready_tasks]
|
|
416
|
-
|
|
417
|
-
# Use return_exceptions=True to ensure all tasks complete even if some fail
|
|
418
|
-
await asyncio.gather(*tasks_to_execute, return_exceptions=True)
|
|
429
|
+
# Execute the current level
|
|
430
|
+
await self._execute_level(level, completed_tasks)
|
|
419
431
|
|
|
420
432
|
print(f"\n{'=' * 50}")
|
|
421
433
|
print("Workflow execution completed")
|
|
@@ -423,22 +435,113 @@ class WorkflowEngine:
|
|
|
423
435
|
|
|
424
436
|
return self.results
|
|
425
437
|
|
|
438
|
+
async def _execute_level(self, level: list[str], completed_tasks: set[str]):
|
|
439
|
+
"""Execute a single level of tasks with controlled concurrency."""
|
|
440
|
+
# Filter executable tasks in this level
|
|
441
|
+
ready_tasks = []
|
|
442
|
+
for task_id in level:
|
|
443
|
+
task = self.tasks[task_id]
|
|
444
|
+
if task.can_execute(completed_tasks):
|
|
445
|
+
task.update_status(TaskStatus.READY)
|
|
446
|
+
ready_tasks.append(task)
|
|
447
|
+
|
|
448
|
+
if not ready_tasks:
|
|
449
|
+
return
|
|
450
|
+
|
|
451
|
+
# Use a shared semaphore for this level to control concurrency
|
|
452
|
+
semaphore = asyncio.Semaphore(self.max_concurrent)
|
|
453
|
+
|
|
454
|
+
# Execute all ready tasks in this level in parallel
|
|
455
|
+
tasks_to_execute = [
|
|
456
|
+
self._execute_single_task_with_semaphore(task, semaphore, completed_tasks)
|
|
457
|
+
for task in ready_tasks
|
|
458
|
+
]
|
|
459
|
+
|
|
460
|
+
# Use return_exceptions=True to ensure all tasks complete even if some fail
|
|
461
|
+
await asyncio.gather(*tasks_to_execute, return_exceptions=True)
|
|
462
|
+
|
|
463
|
+
async def _execute_single_task_with_semaphore(
|
|
464
|
+
self, task: Task, semaphore: asyncio.Semaphore, completed_tasks: set[str]
|
|
465
|
+
):
|
|
466
|
+
"""Execute a single task with semaphore control for concurrency."""
|
|
467
|
+
async with semaphore:
|
|
468
|
+
return await self._execute_single_task(task, completed_tasks)
|
|
469
|
+
|
|
470
|
+
async def _execute_single_task(self, task: Task, completed_tasks: set[str]):
|
|
471
|
+
"""Execute a single task with error handling."""
|
|
472
|
+
task.start_time = time.time()
|
|
473
|
+
task.update_status(TaskStatus.RUNNING)
|
|
474
|
+
|
|
475
|
+
# Collect results from dependent tasks
|
|
476
|
+
dependency_results = {
|
|
477
|
+
dep_id: self.results[dep_id] for dep_id in task.get_dependencies()
|
|
478
|
+
}
|
|
479
|
+
|
|
480
|
+
try:
|
|
481
|
+
# Execute task using the common error handling wrapper
|
|
482
|
+
result_data = await task._execute_with_error_handling(
|
|
483
|
+
lambda ctx: task.execute(ctx), dependency_results
|
|
484
|
+
)
|
|
485
|
+
# Update task with result
|
|
486
|
+
task.result = result_data
|
|
487
|
+
except Exception:
|
|
488
|
+
# Result is already stored in task.result by _execute_with_error_handling
|
|
489
|
+
pass
|
|
490
|
+
|
|
491
|
+
# Store result and update completed tasks (even if failed)
|
|
492
|
+
self.results[task.task_id] = task.result
|
|
493
|
+
completed_tasks.add(task.task_id)
|
|
494
|
+
|
|
495
|
+
# Print appropriate message based on result
|
|
496
|
+
if task.result.success:
|
|
497
|
+
print(
|
|
498
|
+
f"[OK] Task {task.task_id} completed, duration: {task.get_execution_time():.2f}s"
|
|
499
|
+
)
|
|
500
|
+
else:
|
|
501
|
+
error_msg = (
|
|
502
|
+
"timeout"
|
|
503
|
+
if isinstance(task.result.error, asyncio.TimeoutError)
|
|
504
|
+
else str(task.result.error)
|
|
505
|
+
)
|
|
506
|
+
print(f"[FAIL] Task {task.task_id} failed: {error_msg}")
|
|
507
|
+
|
|
508
|
+
return task.result
|
|
509
|
+
|
|
426
510
|
def get_execution_summary(self) -> dict[str, Any]:
|
|
427
511
|
"""Get execution summary"""
|
|
428
512
|
total_tasks = len(self.tasks)
|
|
429
|
-
|
|
430
|
-
|
|
431
|
-
|
|
513
|
+
if total_tasks == 0:
|
|
514
|
+
return {
|
|
515
|
+
"total_tasks": 0,
|
|
516
|
+
"completed": 0,
|
|
517
|
+
"failed": 0,
|
|
518
|
+
"pending": 0,
|
|
519
|
+
"total_execution_time": 0.0,
|
|
520
|
+
"success_rate": 0.0,
|
|
521
|
+
}
|
|
522
|
+
|
|
523
|
+
completed = 0
|
|
524
|
+
failed = 0
|
|
432
525
|
total_time = 0.0
|
|
526
|
+
|
|
527
|
+
# Single pass through tasks to calculate all metrics
|
|
433
528
|
for task in self.tasks.values():
|
|
529
|
+
if task.status == TaskStatus.COMPLETED:
|
|
530
|
+
completed += 1
|
|
531
|
+
elif task.status == TaskStatus.FAILED:
|
|
532
|
+
failed += 1
|
|
533
|
+
|
|
434
534
|
if task.result:
|
|
435
535
|
total_time += task.result.execution_time
|
|
436
536
|
|
|
537
|
+
pending = total_tasks - completed - failed
|
|
538
|
+
success_rate = completed / total_tasks if total_tasks > 0 else 0
|
|
539
|
+
|
|
437
540
|
return {
|
|
438
541
|
"total_tasks": total_tasks,
|
|
439
542
|
"completed": completed,
|
|
440
543
|
"failed": failed,
|
|
441
|
-
"pending":
|
|
544
|
+
"pending": pending,
|
|
442
545
|
"total_execution_time": total_time,
|
|
443
|
-
"success_rate":
|
|
546
|
+
"success_rate": success_rate,
|
|
444
547
|
}
|
pysfi-0.1.11.dist-info/RECORD
DELETED
|
@@ -1,60 +0,0 @@
|
|
|
1
|
-
sfi/__init__.py,sha256=W9IqwgCQr3sYmRyRC3ryCaJDJcMWBtyLzxp5CVsT3Nc,75
|
|
2
|
-
sfi/cli.py,sha256=bUUTOg18sJQbSKSfsVANhlMgSj9yzO2txIzFAd9B2Ok,296
|
|
3
|
-
sfi/alarmclock/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
4
|
-
sfi/alarmclock/alarmclock.py,sha256=0HoacKlGdYq_hINAdl54Cz2E_z6nNjPyqif2xcEBQss,12381
|
|
5
|
-
sfi/bumpversion/__init__.py,sha256=j3XC03YiSDWRJV6UOcDWWsp09STfV5LrvzFkjsehSwA,86
|
|
6
|
-
sfi/bumpversion/bumpversion.py,sha256=HOyHLaE0sZajrlcVZ8hsim8mPjz77qwQVSo6aIzjMXE,20735
|
|
7
|
-
sfi/cleanbuild/cleanbuild.py,sha256=Fr6_cr3rj4llcEQ8yNTK-DHdSzmx1I4hYFJJHu5YEz0,5200
|
|
8
|
-
sfi/condasetup/condasetup.py,sha256=RlbXVYcAJYMau-ZzHOMzHrHl4r-lqNZO0bT-zWuzP_k,4581
|
|
9
|
-
sfi/docscan/__init__.py,sha256=qKkwfRoVBNMzNNdQk69QnFUrmJACtW9qbvoRloTDHfk,121
|
|
10
|
-
sfi/docscan/docscan.py,sha256=rk8mjEI2SKNIliV-Yb41pfUmYBQ1tUhk5LHUNEjkszI,41890
|
|
11
|
-
sfi/docscan/docscan_gui.py,sha256=T_blCyGGaWxL6rtjLIYW3nGdX8DpLQv73YbDnITR4eg,50671
|
|
12
|
-
sfi/docscan/lang/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
13
|
-
sfi/docscan/lang/eng.py,sha256=GcOcT9FLcPZRdJ-MbLRYyf6vDweZTQBu_zUnEFzRY84,8529
|
|
14
|
-
sfi/docscan/lang/zhcn.py,sha256=1SZwQjZF3oi9FsnzuZB-9v7P64sGm5oNmVjuL-rhcEQ,8885
|
|
15
|
-
sfi/filedate/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
16
|
-
sfi/filedate/filedate.py,sha256=5FARcsB2Rlz2uTBxeYYjbIEJb9l1cyXj9WSoNKvSrRo,6068
|
|
17
|
-
sfi/gittool/__init__.py,sha256=Xqxw7UUX-TKkWOCB1QHq8AdIKTkU7x87Xr-E0yVmObA,24
|
|
18
|
-
sfi/gittool/gittool.py,sha256=BBE6gm9qP1fAWLqKprmsf7bOFgDvBvia8_bMaXc7dR4,11960
|
|
19
|
-
sfi/llmclient/llmclient.py,sha256=SnFZ9c2cNvFeLeobJV1ls7Ewftaam4s-HVBYW2tgHPo,21706
|
|
20
|
-
sfi/llmquantize/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
21
|
-
sfi/llmquantize/llmquantize.py,sha256=ILmfdJg7Rc7xAygfcVgkSKJ_qRAHDRZXjBymYFBy6fg,17693
|
|
22
|
-
sfi/llmserver/llmserver.py,sha256=vYEcNOV-OEvDULpzzCAJ0zHrXgqJFolOhqNdCZU0Bjs,11339
|
|
23
|
-
sfi/makepython/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
24
|
-
sfi/makepython/makepython.py,sha256=87lySTg0j1lZNIJMf8U_Go_fkuLkuwGDUf6LUbR_r6c,11445
|
|
25
|
-
sfi/pdfsplit/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
26
|
-
sfi/pdfsplit/pdfsplit.py,sha256=QWtW3GU28U2ZOyN5sCbH7jEMBpNbuAIzjXWOAXXW44M,6209
|
|
27
|
-
sfi/pyarchive/pyarchive.py,sha256=1rkWY96U_DWbgTvdFGZto7dfutfhUo-OxmoVoaTo6WU,12892
|
|
28
|
-
sfi/pyembedinstall/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
29
|
-
sfi/pyembedinstall/pyembedinstall.py,sha256=kxp5YuwNHB29AVBMEJvzaIqTc7bEx-oBpKReGRJ1Pyw,23737
|
|
30
|
-
sfi/pylibpack/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
31
|
-
sfi/pylibpack/pylibpack.py,sha256=lcGrzijibFFaJnxL8ZVALAGNHGs3g7-N4DO_AuiKwPs,54436
|
|
32
|
-
sfi/pylibpack/rules/numpy.json,sha256=ee4gA5NBudFi3MaJA-QlBKQwiQAUb-eluF8HNVkl7Vk,384
|
|
33
|
-
sfi/pylibpack/rules/pymupdf.json,sha256=Hkzh8dvXKCzKx4aeHbu5E0qwgfbwQxZH2VLtQZzlMO4,153
|
|
34
|
-
sfi/pylibpack/rules/pyqt5.json,sha256=JKGnVSUMfXGR5XK1sbL1F6cAsEhl7hK12QkrulAB00M,374
|
|
35
|
-
sfi/pylibpack/rules/pyside2.json,sha256=uSSteT-3wDohWwQ36Z5mSOaSbxrR4565In4uZj_eR4w,557
|
|
36
|
-
sfi/pylibpack/rules/scipy.json,sha256=vTSi3W5BGWcwMkaDnyD6Yg7ijZdicPEUMw4fnRTnNf4,468
|
|
37
|
-
sfi/pylibpack/rules/shiboken2.json,sha256=9Pl3eslvergyjlyHNknkyN0oZlcH3049WULe5WjsmKM,515
|
|
38
|
-
sfi/pyloadergen/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
39
|
-
sfi/pyloadergen/pyloadergen.py,sha256=R2E6YBCUwfgT3SLr7paBBYEcuySLaj1q_CTZC2slwWQ,39741
|
|
40
|
-
sfi/pypack/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
41
|
-
sfi/pypack/pypack.py,sha256=-l3jc4akSSlgEmwiB-kITP0ioBBy42-taHkEsGNEQNw,35168
|
|
42
|
-
sfi/pyprojectparse/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
43
|
-
sfi/pyprojectparse/pyprojectparse.py,sha256=MLDuQm5LFZW-TQ_GfaaeFSa18lsQq5u2ZTjnlkFQ_Ao,19112
|
|
44
|
-
sfi/pysourcepack/pysourcepack.py,sha256=qjBCFnY_3S7xwPgQ2GB0dr0WFbEj3uusZQ_udiU0Bok,9452
|
|
45
|
-
sfi/quizbase/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
46
|
-
sfi/quizbase/quizbase.py,sha256=3tPUuYexZ9TVsNPPO_Itmr5OvyHSgY5OSUZwPoQt9zg,30605
|
|
47
|
-
sfi/quizbase/quizbase_gui.py,sha256=7prc5tWkbUPzs1ofNc4xIC_aRrMCWB6RYN_NTq70p0Q,34729
|
|
48
|
-
sfi/regexvalidate/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
49
|
-
sfi/regexvalidate/regex_help.html,sha256=3ltx3nh-Y5kkbHy5D67KfWtLig3u5XEhIlPHdHLEuTE,12436
|
|
50
|
-
sfi/regexvalidate/regexvalidate.py,sha256=5C_M2EKt9Jlonq03v9zrqtsFfAKK3D1vF1kBxD6iUpE,18600
|
|
51
|
-
sfi/taskkill/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
52
|
-
sfi/taskkill/taskkill.py,sha256=kRacCP78mDsZk44tfm4qblOplPxQuo3_6lHl6UQEmkU,7744
|
|
53
|
-
sfi/which/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
54
|
-
sfi/which/which.py,sha256=zVIAwZA-pGGngxkkwZ6IxDX3ozVHg7cLSYwYO9FjaIc,2439
|
|
55
|
-
sfi/workflowengine/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
56
|
-
sfi/workflowengine/workflowengine.py,sha256=ck5PjyyjtWtbjN4ePEKsTWV6QR-BUlrfwrY6jih52jQ,17055
|
|
57
|
-
pysfi-0.1.11.dist-info/METADATA,sha256=4dkuiM92FRkYo-8ti_ZEmAxq1eoFa6HF0xs8ErJyc68,4047
|
|
58
|
-
pysfi-0.1.11.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
|
59
|
-
pysfi-0.1.11.dist-info/entry_points.txt,sha256=FhBsBY75x9e-AAyJ0t0HD1rb90FDllsM_IdVGHWZx9o,1099
|
|
60
|
-
pysfi-0.1.11.dist-info/RECORD,,
|
|
File without changes
|