pysfi 0.1.6__py3-none-any.whl → 0.1.10__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,444 @@
1
+ """Workflow Engine - A flexible async task orchestration system.
2
+
3
+ This module provides a comprehensive workflow engine for managing
4
+ complex task dependencies with support for I/O tasks, CPU-intensive tasks,
5
+ serial tasks, and parallel task execution.
6
+ """
7
+
8
+ from __future__ import annotations
9
+
10
+ import asyncio
11
+ import time
12
+ from abc import ABC, abstractmethod
13
+ from collections import defaultdict, deque
14
+ from dataclasses import dataclass
15
+ from enum import Enum
16
+ from typing import Any, Callable, Sequence
17
+
18
+
19
+ class TaskStatus(Enum):
20
+ """Task status enumeration"""
21
+
22
+ PENDING = "pending"
23
+ READY = "ready"
24
+ RUNNING = "running"
25
+ COMPLETED = "completed"
26
+ FAILED = "failed"
27
+
28
+
29
+ class TaskType(Enum):
30
+ """Task type enumeration"""
31
+
32
+ SERIAL = "serial" # Serial task
33
+ PARALLEL = "parallel" # Parallel task
34
+ ASYNC = "async" # Async I/O task
35
+ CPU = "cpu" # CPU-intensive task
36
+
37
+
38
+ @dataclass
39
+ class TaskResult:
40
+ """Task execution result"""
41
+
42
+ task_id: str
43
+ success: bool
44
+ data: Any
45
+ execution_time: float
46
+ error: Exception | None = None
47
+
48
+
49
+ class Task(ABC):
50
+ """Task abstract base class"""
51
+
52
+ def __init__(self, task_id: str, task_type: TaskType, dependencies: list[str] | None = None, timeout: float = 30.0):
53
+ self.task_id = task_id
54
+ self.task_type = task_type
55
+ self.dependencies = dependencies or []
56
+ self.timeout = timeout
57
+ self.status = TaskStatus.PENDING
58
+ self.result: TaskResult | None = None
59
+ self.start_time: float | None = None
60
+ self.end_time: float | None = None
61
+
62
+ def get_dependencies(self) -> list[str]:
63
+ """Get list of dependent task IDs"""
64
+ return self.dependencies.copy()
65
+
66
+ def can_execute(self, completed_tasks: set[str]) -> bool:
67
+ """Check if task can be executed (dependencies satisfied)"""
68
+ return all(dep in completed_tasks for dep in self.dependencies)
69
+
70
+ def update_status(self, status: TaskStatus):
71
+ """Update task status"""
72
+ self.status = status
73
+
74
+ @abstractmethod
75
+ async def execute(self, context: dict[str, TaskResult]) -> Any:
76
+ """Execute task logic, must be implemented by subclasses"""
77
+ pass
78
+
79
+ def get_execution_time(self) -> float:
80
+ """Get task execution time"""
81
+ if self.start_time and self.end_time:
82
+ return self.end_time - self.start_time
83
+ return 0.0
84
+
85
+
86
+ class IOTask(Task):
87
+ """I/O-intensive task"""
88
+
89
+ def __init__(self, task_id: str, duration: float, dependencies: list[str] | None = None, timeout: float = 30.0):
90
+ super().__init__(task_id, TaskType.ASYNC, dependencies, timeout)
91
+ self.duration = duration
92
+
93
+ async def execute(self, context: dict[str, TaskResult]) -> Any:
94
+ """Simulate I/O operation"""
95
+ print(f"[IO] Starting task {self.task_id}, estimated duration: {self.duration}s")
96
+ return await asyncio.wait_for(self._execute_io(context), timeout=self.timeout)
97
+
98
+ async def _execute_io(self, context: dict[str, TaskResult]) -> Any:
99
+ """Internal I/O execution method"""
100
+ await asyncio.sleep(self.duration)
101
+ return f"IO task {self.task_id} completed, dependencies: {list(context.keys())}"
102
+
103
+
104
+ class CPUTask(Task):
105
+ """CPU-intensive task"""
106
+
107
+ def __init__(self, task_id: str, iterations: int, dependencies: list[str] | None = None, timeout: float = 30.0):
108
+ super().__init__(task_id, TaskType.CPU, dependencies, timeout)
109
+ self.iterations = iterations
110
+
111
+ async def execute(self, context: dict[str, TaskResult]) -> Any:
112
+ """CPU-intensive computation task"""
113
+ print(f"[CPU] Starting task {self.task_id}, iterations: {self.iterations}")
114
+
115
+ # Move CPU-intensive task to thread pool to avoid blocking event loop
116
+ def cpu_intensive_work():
117
+ result = 0
118
+ for i in range(self.iterations):
119
+ result += i * i
120
+ return result
121
+
122
+ # Use asyncio.wait_for with timeout to prevent infinite hangs
123
+ # Use run_in_executor for Python 3.8 compatibility (asyncio.to_thread is Python 3.9+)
124
+ loop = asyncio.get_event_loop()
125
+ result = await asyncio.wait_for(loop.run_in_executor(None, cpu_intensive_work), timeout=self.timeout)
126
+ return f"CPU task {self.task_id} completed, result: {result}"
127
+
128
+
129
+ class SerialTask(Task):
130
+ """Serial task (stateful, must execute sequentially)"""
131
+
132
+ def __init__(
133
+ self, task_id: str, process_func: Callable, dependencies: list[str] | None = None, timeout: float = 30.0
134
+ ):
135
+ super().__init__(task_id, TaskType.SERIAL, dependencies, timeout)
136
+ self.process_func = process_func
137
+ self.state = {}
138
+
139
+ async def execute(self, context: dict[str, TaskResult]) -> Any:
140
+ """Execute serial task"""
141
+ print(f"[Serial] Starting serial task {self.task_id}")
142
+
143
+ # Collect results from dependent tasks
144
+ inputs = {dep_id: context[dep_id].data for dep_id in self.dependencies}
145
+
146
+ # Execute process function
147
+ if asyncio.iscoroutinefunction(self.process_func):
148
+ result = await self.process_func(inputs, self.state)
149
+ else:
150
+ result = self.process_func(inputs, self.state)
151
+
152
+ # Update state
153
+ self.state = {"last_result": result, "executed": True}
154
+
155
+ return f"Serial task {self.task_id} completed, result: {result}"
156
+
157
+
158
+ class ParallelTask(Task):
159
+ """Parallel task (can execute concurrently with other tasks)"""
160
+
161
+ def __init__(
162
+ self,
163
+ task_id: str,
164
+ subtasks: Sequence[Task],
165
+ dependencies: list[str] | None = None,
166
+ timeout: float = 30.0,
167
+ max_concurrent: int = 3,
168
+ ):
169
+ super().__init__(task_id, TaskType.PARALLEL, dependencies, timeout)
170
+ self.subtasks = subtasks
171
+ self.max_concurrent = max_concurrent
172
+
173
+ async def execute(self, context: dict[str, TaskResult]) -> Any:
174
+ """Execute subtasks in parallel"""
175
+ print(f"[Parallel] Starting parallel task {self.task_id}, contains {len(self.subtasks)} subtasks")
176
+
177
+ # Create semaphore to control concurrency
178
+ semaphore = asyncio.Semaphore(self.max_concurrent)
179
+
180
+ async def execute_subtask(subtask: Task, sem: asyncio.Semaphore):
181
+ async with sem:
182
+ subtask.start_time = time.time()
183
+ subtask.update_status(TaskStatus.RUNNING)
184
+
185
+ try:
186
+ data = await asyncio.wait_for(subtask.execute(context), timeout=subtask.timeout)
187
+ subtask.result = TaskResult(
188
+ task_id=subtask.task_id,
189
+ success=True,
190
+ data=data,
191
+ execution_time=time.time() - subtask.start_time,
192
+ )
193
+ subtask.update_status(TaskStatus.COMPLETED)
194
+ return subtask.result
195
+ except asyncio.TimeoutError as e:
196
+ error = TimeoutError(f"Task {subtask.task_id} execution timeout")
197
+ subtask.result = TaskResult(
198
+ task_id=subtask.task_id,
199
+ success=False,
200
+ data=None,
201
+ execution_time=time.time() - subtask.start_time,
202
+ error=error,
203
+ )
204
+ subtask.update_status(TaskStatus.FAILED)
205
+ raise error from e
206
+ except Exception as e:
207
+ subtask.result = TaskResult(
208
+ task_id=subtask.task_id,
209
+ success=False,
210
+ data=None,
211
+ execution_time=time.time() - subtask.start_time,
212
+ error=e,
213
+ )
214
+ subtask.update_status(TaskStatus.FAILED)
215
+ raise e
216
+
217
+ # Execute all subtasks in parallel
218
+ tasks = [execute_subtask(subtask, semaphore) for subtask in self.subtasks]
219
+ results = await asyncio.gather(*tasks, return_exceptions=True)
220
+
221
+ # Process results using zip for better readability
222
+ successful_results = []
223
+ failed_results = []
224
+
225
+ for subtask, result in zip(self.subtasks, results):
226
+ if isinstance(result, Exception):
227
+ failed_results.append(f"Subtask {subtask.task_id} failed: {result}")
228
+ elif isinstance(result, TaskResult):
229
+ successful_results.append(result.data)
230
+
231
+ if failed_results:
232
+ return f"Parallel task {self.task_id} partially failed: {failed_results}"
233
+
234
+ return f"Parallel task {self.task_id} completed, results: {successful_results}"
235
+
236
+
237
+ class WorkflowEngine:
238
+ """Workflow engine - core orchestrator"""
239
+
240
+ def __init__(self, max_concurrent: int = 4):
241
+ self.tasks: dict[str, Task] = {}
242
+ self.results: dict[str, TaskResult] = {}
243
+ self.max_concurrent = max_concurrent
244
+ self.execution_order: list[list[str]] = []
245
+
246
+ def add_task(self, task: Task):
247
+ """Add task to workflow"""
248
+ self.tasks[task.task_id] = task
249
+
250
+ def validate_dependencies(self) -> bool:
251
+ """Validate task dependencies, ensure no circular dependencies"""
252
+ # Build adjacency list
253
+ graph = defaultdict(list)
254
+ in_degree = dict.fromkeys(self.tasks, 0)
255
+
256
+ for task_id, task in self.tasks.items():
257
+ for dep in task.get_dependencies():
258
+ if dep not in self.tasks:
259
+ raise ValueError(f"Task {task_id} depends on unknown task {dep}")
260
+ graph[dep].append(task_id)
261
+ in_degree[task_id] += 1
262
+
263
+ # Detect circular dependencies
264
+ visited = 0
265
+ queue = deque([task_id for task_id, degree in in_degree.items() if degree == 0])
266
+
267
+ while queue:
268
+ current = queue.popleft()
269
+ visited += 1
270
+
271
+ for neighbor in graph[current]:
272
+ in_degree[neighbor] -= 1
273
+ if in_degree[neighbor] == 0:
274
+ queue.append(neighbor)
275
+
276
+ if visited != len(self.tasks):
277
+ raise ValueError("Circular dependency detected in workflow")
278
+
279
+ return True
280
+
281
+ def calculate_execution_order(self) -> list[list[str]]:
282
+ """Calculate task execution order (topological sort + level grouping)"""
283
+ if not self.tasks:
284
+ return []
285
+
286
+ # Build adjacency list
287
+ graph = defaultdict(list)
288
+ in_degree = dict.fromkeys(self.tasks, 0)
289
+
290
+ for task_id, task in self.tasks.items():
291
+ for dep in task.get_dependencies():
292
+ graph[dep].append(task_id)
293
+ in_degree[task_id] += 1
294
+
295
+ # Level-based topological sort
296
+ execution_order = []
297
+ queue = deque([task_id for task_id, degree in in_degree.items() if degree == 0])
298
+
299
+ while queue:
300
+ level_size = len(queue)
301
+ current_level = []
302
+
303
+ for _ in range(level_size):
304
+ task_id = queue.popleft()
305
+ current_level.append(task_id)
306
+
307
+ for neighbor in graph[task_id]:
308
+ in_degree[neighbor] -= 1
309
+ if in_degree[neighbor] == 0:
310
+ queue.append(neighbor)
311
+
312
+ if current_level:
313
+ execution_order.append(current_level)
314
+
315
+ self.execution_order = execution_order
316
+ return execution_order
317
+
318
+ async def execute_workflow(self) -> dict[str, TaskResult]:
319
+ """Execute entire workflow"""
320
+ print("=" * 50)
321
+ print("Starting workflow execution")
322
+ print("=" * 50)
323
+
324
+ # Validate dependencies
325
+ self.validate_dependencies()
326
+
327
+ # Calculate execution order
328
+ execution_order = self.calculate_execution_order()
329
+ print(f"Execution plan ({len(execution_order)} phases):")
330
+ for i, level in enumerate(execution_order, 1):
331
+ print(f" Phase {i}: {level}")
332
+
333
+ # Execute by level
334
+ completed_tasks: set[str] = set()
335
+
336
+ for level_index, level in enumerate(execution_order, 1):
337
+ print(f"\n{'=' * 20} Phase {level_index} ({len(level)} tasks) {'=' * 20}")
338
+
339
+ # Filter executable tasks in this level
340
+ ready_tasks = []
341
+ for task_id in level:
342
+ task = self.tasks[task_id]
343
+ if task.can_execute(completed_tasks):
344
+ task.update_status(TaskStatus.READY)
345
+ ready_tasks.append(task)
346
+
347
+ if not ready_tasks:
348
+ continue
349
+
350
+ # Create semaphore for this level to control concurrency
351
+ semaphore = asyncio.Semaphore(self.max_concurrent)
352
+
353
+ async def execute_single_task(task: Task, sem: asyncio.Semaphore):
354
+ async with sem:
355
+ task.start_time = time.time()
356
+ task.update_status(TaskStatus.RUNNING)
357
+
358
+ try:
359
+ # Collect results from dependent tasks
360
+ dependency_results = {dep_id: self.results[dep_id] for dep_id in task.get_dependencies()}
361
+
362
+ # Execute task
363
+ data = await asyncio.wait_for(task.execute(dependency_results), timeout=task.timeout)
364
+
365
+ task.end_time = time.time()
366
+ task.result = TaskResult(
367
+ task_id=task.task_id, success=True, data=data, execution_time=task.get_execution_time()
368
+ )
369
+ task.update_status(TaskStatus.COMPLETED)
370
+
371
+ # Store result
372
+ self.results[task.task_id] = task.result
373
+ completed_tasks.add(task.task_id)
374
+
375
+ print(f"[OK] Task {task.task_id} completed, duration: {task.get_execution_time():.2f}s")
376
+
377
+ return task.result
378
+
379
+ except asyncio.TimeoutError as e:
380
+ task.end_time = time.time()
381
+ task.result = TaskResult(
382
+ task_id=task.task_id,
383
+ success=False,
384
+ data=None,
385
+ execution_time=task.get_execution_time(),
386
+ error=e,
387
+ )
388
+ task.update_status(TaskStatus.FAILED)
389
+
390
+ # Store result and mark as completed (even if failed)
391
+ self.results[task.task_id] = task.result
392
+ completed_tasks.add(task.task_id)
393
+
394
+ print(f"[FAIL] Task {task.task_id} timeout")
395
+ raise e from e
396
+ except Exception as e:
397
+ task.end_time = time.time()
398
+ task.result = TaskResult(
399
+ task_id=task.task_id,
400
+ success=False,
401
+ data=None,
402
+ execution_time=task.get_execution_time(),
403
+ error=e,
404
+ )
405
+ task.update_status(TaskStatus.FAILED)
406
+
407
+ # Store result and mark as completed (even if failed)
408
+ self.results[task.task_id] = task.result
409
+ completed_tasks.add(task.task_id)
410
+
411
+ print(f"[FAIL] Task {task.task_id} failed: {e}")
412
+ raise e
413
+
414
+ # Execute all ready tasks in this level in parallel
415
+ tasks_to_execute = [execute_single_task(task, semaphore) for task in ready_tasks]
416
+
417
+ # Use return_exceptions=True to ensure all tasks complete even if some fail
418
+ await asyncio.gather(*tasks_to_execute, return_exceptions=True)
419
+
420
+ print(f"\n{'=' * 50}")
421
+ print("Workflow execution completed")
422
+ print(f"{'=' * 50}")
423
+
424
+ return self.results
425
+
426
+ def get_execution_summary(self) -> dict[str, Any]:
427
+ """Get execution summary"""
428
+ total_tasks = len(self.tasks)
429
+ completed = sum(1 for task in self.tasks.values() if task.status == TaskStatus.COMPLETED)
430
+ failed = sum(1 for task in self.tasks.values() if task.status == TaskStatus.FAILED)
431
+
432
+ total_time = 0.0
433
+ for task in self.tasks.values():
434
+ if task.result:
435
+ total_time += task.result.execution_time
436
+
437
+ return {
438
+ "total_tasks": total_tasks,
439
+ "completed": completed,
440
+ "failed": failed,
441
+ "pending": total_tasks - completed - failed,
442
+ "total_execution_time": total_time,
443
+ "success_rate": completed / total_tasks if total_tasks > 0 else 0,
444
+ }
@@ -1,21 +0,0 @@
1
- sfi/__init__.py,sha256=VaGTEot5vjPh1rcxcuYg_KnK58mt8BHly0KnV5-55lk,74
2
- sfi/alarmclock/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
3
- sfi/alarmclock/alarmclock.py,sha256=65G8OyTGpe4oQ2SFerQG1N9PVJ4KxO7WzgsTxpGm4O0,12509
4
- sfi/bumpversion/__init__.py,sha256=BMI9LlfdLgZoj-xXYkjzPj2VTwci2pT0umrEyAjXUgs,85
5
- sfi/bumpversion/bumpversion.py,sha256=HOyHLaE0sZajrlcVZ8hsim8mPjz77qwQVSo6aIzjMXE,20735
6
- sfi/embedinstall/embedinstall.py,sha256=N5EbTDdX4bE3W0qHGAwAUuepqFr0sbdZuPI3KWrtuUY,14936
7
- sfi/filedate/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
8
- sfi/filedate/filedate.py,sha256=DpVp26lumE_Lz_4TgqUEX8IxtK3Y6yHSEFV8qJyegyk,3645
9
- sfi/makepython/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
10
- sfi/makepython/makepython.py,sha256=YKBXTjvugKN_TlFfx-vOudpkZy45VhO0vq89fxo6hFU,10583
11
- sfi/pdfsplit/pdfsplit.py,sha256=9M1o9QjTxAAa0DHbTTbDeap2evcazWlb98u80hzJmx0,6004
12
- sfi/projectparse/projectparse.py,sha256=Ojg-z4lZEtjEBpJYWyznTgL307N45AxlQKnRkEH0P70,5525
13
- sfi/pyloadergen/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
14
- sfi/pyloadergen/pyloadergen.py,sha256=Erzz1PwrEQcDSCxXG-4DZ-CZavDt6MNv7k3nET5IB9U,32423
15
- sfi/pypacker/fspacker.py,sha256=3tlS7qiWoH_kOzsp9eSWsQ-SY7-bSTugwfB-HIL69iE,3238
16
- sfi/taskkill/taskkill.py,sha256=6Aw4afmgfLZcQnvgG_38A1VrwazDrnNdOmY1l4kr0lc,7758
17
- sfi/which/which.py,sha256=zVIAwZA-pGGngxkkwZ6IxDX3ozVHg7cLSYwYO9FjaIc,2439
18
- pysfi-0.1.6.dist-info/METADATA,sha256=WJUn9KiphnGrEOLLgTi0xl_KqA0ozhLSkKiKnhNuUOE,2897
19
- pysfi-0.1.6.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
20
- pysfi-0.1.6.dist-info/entry_points.txt,sha256=h85YEwzaEEGK4e6kBFC_G1omyGMqRF03y1H_IHiM5is,467
21
- pysfi-0.1.6.dist-info/RECORD,,
sfi/pypacker/fspacker.py DELETED
@@ -1,91 +0,0 @@
1
- from __future__ import annotations
2
-
3
- import argparse
4
- import logging
5
- from pathlib import Path
6
-
7
- logging.basicConfig(level=logging.INFO, format="%(message)s")
8
- logger = logging.getLogger(__name__)
9
-
10
- __version__ = "0.9.30"
11
- __build__ = "20260113"
12
-
13
-
14
- def main() -> None:
15
- parser = argparse.ArgumentParser()
16
- parser.add_argument(
17
- "options",
18
- choices=("build", "b", "run", "r", "clean", "c", "version", "v"),
19
- help="Operation command",
20
- )
21
- parser.add_argument(
22
- "target",
23
- type=str,
24
- default=None,
25
- nargs="?",
26
- help="Target name to match, supports fuzzy matching, leave empty if only one.",
27
- )
28
- parser.add_argument(
29
- "directory",
30
- type=str,
31
- default=str(Path.cwd()),
32
- nargs="?",
33
- help="Project directory path",
34
- )
35
-
36
- parser.add_argument("--archive", "-a", action="store_true", help="zip packaging mode")
37
- parser.add_argument("--compile", "-c", action="store_true", help="compile mode")
38
- parser.add_argument("--debug", "-d", action="store_true", help="debug mode")
39
- parser.add_argument("--gui", "-g", action="store_true", help="GUI mode")
40
- parser.add_argument("--nsis", "-n", action="store_true", help="nsis packaging mode")
41
- parser.add_argument("--offline", "-o", action="store_true", help="offline mode")
42
- parser.add_argument("--rebuild", "-rb", action="store_true", help="rebuild mode")
43
- parser.add_argument("--recursive", "-r", action="store_true", default=False, help="recursive")
44
- parser.add_argument("--simplify", "-s", action="store_true", default=True, help="simplify mode")
45
- parser.add_argument("--use-mingw", "-mingw", action="store_true", help="use mingw to compile")
46
- parser.add_argument(
47
- "--use-nuitka",
48
- "-nuitka",
49
- action="store_true",
50
- default=True,
51
- help="use nuitka to compile",
52
- )
53
- parser.add_argument("--with-tk", "-tk", action="store_true", help="package tk library")
54
- parser.add_argument("--with-js", "-js", action="store_true", help="package js")
55
-
56
- args = parser.parse_args()
57
-
58
- # settings.mode["archive"] = args.archive
59
- # settings.mode["compile"] = args.compile
60
- # settings.mode["debug"] = args.debug
61
- # settings.mode["gui"] = args.gui
62
- # settings.mode["nsis"] = args.nsis
63
- # settings.mode["offline"] = args.offline
64
- # settings.mode["rebuild"] = args.rebuild
65
- # settings.mode["recursive"] = args.recursive
66
- # settings.mode["simplify"] = args.simplify
67
- # settings.mode["use_mingw"] = args.use_mingw
68
- # settings.mode["use_nuitka"] = args.use_nuitka
69
- # settings.mode["with_tk"] = args.with_tk
70
- # settings.mode["with_js"] = args.with_js
71
-
72
- if args.debug:
73
- logger.setLevel(logging.DEBUG)
74
- else:
75
- logger.setLevel(logging.INFO)
76
-
77
- # logger.show_header()
78
-
79
- if args.options in {"version", "v"}:
80
- logger.info(f"pypacker {__version__} (build {__build__})")
81
- return
82
-
83
- # manager = ProjectManager(Path(args.directory), match_name=args.target)
84
- # if args.options in {"build", "b"}:
85
- # manager.build()
86
- # elif args.options in {"run", "r"}:
87
- # manager.run()
88
- # elif args.options in {"clean", "c"}:
89
- # manager.clean()
90
-
91
- # settings.dump()
File without changes