pysfi 0.1.7__py3-none-any.whl → 0.1.11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (55) hide show
  1. {pysfi-0.1.7.dist-info → pysfi-0.1.11.dist-info}/METADATA +11 -9
  2. pysfi-0.1.11.dist-info/RECORD +60 -0
  3. pysfi-0.1.11.dist-info/entry_points.txt +28 -0
  4. sfi/__init__.py +1 -1
  5. sfi/alarmclock/alarmclock.py +40 -40
  6. sfi/bumpversion/__init__.py +1 -1
  7. sfi/cleanbuild/cleanbuild.py +155 -0
  8. sfi/condasetup/condasetup.py +116 -0
  9. sfi/docscan/__init__.py +1 -1
  10. sfi/docscan/docscan.py +407 -103
  11. sfi/docscan/docscan_gui.py +1282 -596
  12. sfi/docscan/lang/eng.py +152 -0
  13. sfi/docscan/lang/zhcn.py +170 -0
  14. sfi/filedate/filedate.py +185 -112
  15. sfi/gittool/__init__.py +2 -0
  16. sfi/gittool/gittool.py +401 -0
  17. sfi/llmclient/llmclient.py +592 -0
  18. sfi/llmquantize/llmquantize.py +480 -0
  19. sfi/llmserver/llmserver.py +335 -0
  20. sfi/makepython/makepython.py +31 -30
  21. sfi/pdfsplit/pdfsplit.py +173 -173
  22. sfi/pyarchive/pyarchive.py +418 -0
  23. sfi/pyembedinstall/pyembedinstall.py +629 -0
  24. sfi/pylibpack/__init__.py +0 -0
  25. sfi/pylibpack/pylibpack.py +1457 -0
  26. sfi/pylibpack/rules/numpy.json +22 -0
  27. sfi/pylibpack/rules/pymupdf.json +10 -0
  28. sfi/pylibpack/rules/pyqt5.json +19 -0
  29. sfi/pylibpack/rules/pyside2.json +23 -0
  30. sfi/pylibpack/rules/scipy.json +23 -0
  31. sfi/pylibpack/rules/shiboken2.json +24 -0
  32. sfi/pyloadergen/pyloadergen.py +512 -227
  33. sfi/pypack/__init__.py +0 -0
  34. sfi/pypack/pypack.py +1142 -0
  35. sfi/pyprojectparse/__init__.py +0 -0
  36. sfi/pyprojectparse/pyprojectparse.py +500 -0
  37. sfi/pysourcepack/pysourcepack.py +308 -0
  38. sfi/quizbase/__init__.py +0 -0
  39. sfi/quizbase/quizbase.py +828 -0
  40. sfi/quizbase/quizbase_gui.py +987 -0
  41. sfi/regexvalidate/__init__.py +0 -0
  42. sfi/regexvalidate/regex_help.html +284 -0
  43. sfi/regexvalidate/regexvalidate.py +468 -0
  44. sfi/taskkill/taskkill.py +0 -2
  45. sfi/workflowengine/__init__.py +0 -0
  46. sfi/workflowengine/workflowengine.py +444 -0
  47. pysfi-0.1.7.dist-info/RECORD +0 -31
  48. pysfi-0.1.7.dist-info/entry_points.txt +0 -15
  49. sfi/embedinstall/embedinstall.py +0 -418
  50. sfi/projectparse/projectparse.py +0 -152
  51. sfi/pypacker/fspacker.py +0 -91
  52. {pysfi-0.1.7.dist-info → pysfi-0.1.11.dist-info}/WHEEL +0 -0
  53. /sfi/{embedinstall → docscan/lang}/__init__.py +0 -0
  54. /sfi/{projectparse → llmquantize}/__init__.py +0 -0
  55. /sfi/{pypacker → pyembedinstall}/__init__.py +0 -0
@@ -0,0 +1,444 @@
1
+ """Workflow Engine - A flexible async task orchestration system.
2
+
3
+ This module provides a comprehensive workflow engine for managing
4
+ complex task dependencies with support for I/O tasks, CPU-intensive tasks,
5
+ serial tasks, and parallel task execution.
6
+ """
7
+
8
+ from __future__ import annotations
9
+
10
+ import asyncio
11
+ import time
12
+ from abc import ABC, abstractmethod
13
+ from collections import defaultdict, deque
14
+ from dataclasses import dataclass
15
+ from enum import Enum
16
+ from typing import Any, Callable, Sequence
17
+
18
+
19
+ class TaskStatus(Enum):
20
+ """Task status enumeration"""
21
+
22
+ PENDING = "pending"
23
+ READY = "ready"
24
+ RUNNING = "running"
25
+ COMPLETED = "completed"
26
+ FAILED = "failed"
27
+
28
+
29
+ class TaskType(Enum):
30
+ """Task type enumeration"""
31
+
32
+ SERIAL = "serial" # Serial task
33
+ PARALLEL = "parallel" # Parallel task
34
+ ASYNC = "async" # Async I/O task
35
+ CPU = "cpu" # CPU-intensive task
36
+
37
+
38
+ @dataclass
39
+ class TaskResult:
40
+ """Task execution result"""
41
+
42
+ task_id: str
43
+ success: bool
44
+ data: Any
45
+ execution_time: float
46
+ error: Exception | None = None
47
+
48
+
49
+ class Task(ABC):
50
+ """Task abstract base class"""
51
+
52
+ def __init__(self, task_id: str, task_type: TaskType, dependencies: list[str] | None = None, timeout: float = 30.0):
53
+ self.task_id = task_id
54
+ self.task_type = task_type
55
+ self.dependencies = dependencies or []
56
+ self.timeout = timeout
57
+ self.status = TaskStatus.PENDING
58
+ self.result: TaskResult | None = None
59
+ self.start_time: float | None = None
60
+ self.end_time: float | None = None
61
+
62
+ def get_dependencies(self) -> list[str]:
63
+ """Get list of dependent task IDs"""
64
+ return self.dependencies.copy()
65
+
66
+ def can_execute(self, completed_tasks: set[str]) -> bool:
67
+ """Check if task can be executed (dependencies satisfied)"""
68
+ return all(dep in completed_tasks for dep in self.dependencies)
69
+
70
+ def update_status(self, status: TaskStatus):
71
+ """Update task status"""
72
+ self.status = status
73
+
74
+ @abstractmethod
75
+ async def execute(self, context: dict[str, TaskResult]) -> Any:
76
+ """Execute task logic, must be implemented by subclasses"""
77
+ pass
78
+
79
+ def get_execution_time(self) -> float:
80
+ """Get task execution time"""
81
+ if self.start_time and self.end_time:
82
+ return self.end_time - self.start_time
83
+ return 0.0
84
+
85
+
86
+ class IOTask(Task):
87
+ """I/O-intensive task"""
88
+
89
+ def __init__(self, task_id: str, duration: float, dependencies: list[str] | None = None, timeout: float = 30.0):
90
+ super().__init__(task_id, TaskType.ASYNC, dependencies, timeout)
91
+ self.duration = duration
92
+
93
+ async def execute(self, context: dict[str, TaskResult]) -> Any:
94
+ """Simulate I/O operation"""
95
+ print(f"[IO] Starting task {self.task_id}, estimated duration: {self.duration}s")
96
+ return await asyncio.wait_for(self._execute_io(context), timeout=self.timeout)
97
+
98
+ async def _execute_io(self, context: dict[str, TaskResult]) -> Any:
99
+ """Internal I/O execution method"""
100
+ await asyncio.sleep(self.duration)
101
+ return f"IO task {self.task_id} completed, dependencies: {list(context.keys())}"
102
+
103
+
104
+ class CPUTask(Task):
105
+ """CPU-intensive task"""
106
+
107
+ def __init__(self, task_id: str, iterations: int, dependencies: list[str] | None = None, timeout: float = 30.0):
108
+ super().__init__(task_id, TaskType.CPU, dependencies, timeout)
109
+ self.iterations = iterations
110
+
111
+ async def execute(self, context: dict[str, TaskResult]) -> Any:
112
+ """CPU-intensive computation task"""
113
+ print(f"[CPU] Starting task {self.task_id}, iterations: {self.iterations}")
114
+
115
+ # Move CPU-intensive task to thread pool to avoid blocking event loop
116
+ def cpu_intensive_work():
117
+ result = 0
118
+ for i in range(self.iterations):
119
+ result += i * i
120
+ return result
121
+
122
+ # Use asyncio.wait_for with timeout to prevent infinite hangs
123
+ # Use run_in_executor for Python 3.8 compatibility (asyncio.to_thread is Python 3.9+)
124
+ loop = asyncio.get_event_loop()
125
+ result = await asyncio.wait_for(loop.run_in_executor(None, cpu_intensive_work), timeout=self.timeout)
126
+ return f"CPU task {self.task_id} completed, result: {result}"
127
+
128
+
129
+ class SerialTask(Task):
130
+ """Serial task (stateful, must execute sequentially)"""
131
+
132
+ def __init__(
133
+ self, task_id: str, process_func: Callable, dependencies: list[str] | None = None, timeout: float = 30.0
134
+ ):
135
+ super().__init__(task_id, TaskType.SERIAL, dependencies, timeout)
136
+ self.process_func = process_func
137
+ self.state = {}
138
+
139
+ async def execute(self, context: dict[str, TaskResult]) -> Any:
140
+ """Execute serial task"""
141
+ print(f"[Serial] Starting serial task {self.task_id}")
142
+
143
+ # Collect results from dependent tasks
144
+ inputs = {dep_id: context[dep_id].data for dep_id in self.dependencies}
145
+
146
+ # Execute process function
147
+ if asyncio.iscoroutinefunction(self.process_func):
148
+ result = await self.process_func(inputs, self.state)
149
+ else:
150
+ result = self.process_func(inputs, self.state)
151
+
152
+ # Update state
153
+ self.state = {"last_result": result, "executed": True}
154
+
155
+ return f"Serial task {self.task_id} completed, result: {result}"
156
+
157
+
158
+ class ParallelTask(Task):
159
+ """Parallel task (can execute concurrently with other tasks)"""
160
+
161
+ def __init__(
162
+ self,
163
+ task_id: str,
164
+ subtasks: Sequence[Task],
165
+ dependencies: list[str] | None = None,
166
+ timeout: float = 30.0,
167
+ max_concurrent: int = 3,
168
+ ):
169
+ super().__init__(task_id, TaskType.PARALLEL, dependencies, timeout)
170
+ self.subtasks = subtasks
171
+ self.max_concurrent = max_concurrent
172
+
173
+ async def execute(self, context: dict[str, TaskResult]) -> Any:
174
+ """Execute subtasks in parallel"""
175
+ print(f"[Parallel] Starting parallel task {self.task_id}, contains {len(self.subtasks)} subtasks")
176
+
177
+ # Create semaphore to control concurrency
178
+ semaphore = asyncio.Semaphore(self.max_concurrent)
179
+
180
+ async def execute_subtask(subtask: Task, sem: asyncio.Semaphore):
181
+ async with sem:
182
+ subtask.start_time = time.time()
183
+ subtask.update_status(TaskStatus.RUNNING)
184
+
185
+ try:
186
+ data = await asyncio.wait_for(subtask.execute(context), timeout=subtask.timeout)
187
+ subtask.result = TaskResult(
188
+ task_id=subtask.task_id,
189
+ success=True,
190
+ data=data,
191
+ execution_time=time.time() - subtask.start_time,
192
+ )
193
+ subtask.update_status(TaskStatus.COMPLETED)
194
+ return subtask.result
195
+ except asyncio.TimeoutError as e:
196
+ error = TimeoutError(f"Task {subtask.task_id} execution timeout")
197
+ subtask.result = TaskResult(
198
+ task_id=subtask.task_id,
199
+ success=False,
200
+ data=None,
201
+ execution_time=time.time() - subtask.start_time,
202
+ error=error,
203
+ )
204
+ subtask.update_status(TaskStatus.FAILED)
205
+ raise error from e
206
+ except Exception as e:
207
+ subtask.result = TaskResult(
208
+ task_id=subtask.task_id,
209
+ success=False,
210
+ data=None,
211
+ execution_time=time.time() - subtask.start_time,
212
+ error=e,
213
+ )
214
+ subtask.update_status(TaskStatus.FAILED)
215
+ raise e
216
+
217
+ # Execute all subtasks in parallel
218
+ tasks = [execute_subtask(subtask, semaphore) for subtask in self.subtasks]
219
+ results = await asyncio.gather(*tasks, return_exceptions=True)
220
+
221
+ # Process results using zip for better readability
222
+ successful_results = []
223
+ failed_results = []
224
+
225
+ for subtask, result in zip(self.subtasks, results):
226
+ if isinstance(result, Exception):
227
+ failed_results.append(f"Subtask {subtask.task_id} failed: {result}")
228
+ elif isinstance(result, TaskResult):
229
+ successful_results.append(result.data)
230
+
231
+ if failed_results:
232
+ return f"Parallel task {self.task_id} partially failed: {failed_results}"
233
+
234
+ return f"Parallel task {self.task_id} completed, results: {successful_results}"
235
+
236
+
237
+ class WorkflowEngine:
238
+ """Workflow engine - core orchestrator"""
239
+
240
+ def __init__(self, max_concurrent: int = 4):
241
+ self.tasks: dict[str, Task] = {}
242
+ self.results: dict[str, TaskResult] = {}
243
+ self.max_concurrent = max_concurrent
244
+ self.execution_order: list[list[str]] = []
245
+
246
+ def add_task(self, task: Task):
247
+ """Add task to workflow"""
248
+ self.tasks[task.task_id] = task
249
+
250
+ def validate_dependencies(self) -> bool:
251
+ """Validate task dependencies, ensure no circular dependencies"""
252
+ # Build adjacency list
253
+ graph = defaultdict(list)
254
+ in_degree = dict.fromkeys(self.tasks, 0)
255
+
256
+ for task_id, task in self.tasks.items():
257
+ for dep in task.get_dependencies():
258
+ if dep not in self.tasks:
259
+ raise ValueError(f"Task {task_id} depends on unknown task {dep}")
260
+ graph[dep].append(task_id)
261
+ in_degree[task_id] += 1
262
+
263
+ # Detect circular dependencies
264
+ visited = 0
265
+ queue = deque([task_id for task_id, degree in in_degree.items() if degree == 0])
266
+
267
+ while queue:
268
+ current = queue.popleft()
269
+ visited += 1
270
+
271
+ for neighbor in graph[current]:
272
+ in_degree[neighbor] -= 1
273
+ if in_degree[neighbor] == 0:
274
+ queue.append(neighbor)
275
+
276
+ if visited != len(self.tasks):
277
+ raise ValueError("Circular dependency detected in workflow")
278
+
279
+ return True
280
+
281
+ def calculate_execution_order(self) -> list[list[str]]:
282
+ """Calculate task execution order (topological sort + level grouping)"""
283
+ if not self.tasks:
284
+ return []
285
+
286
+ # Build adjacency list
287
+ graph = defaultdict(list)
288
+ in_degree = dict.fromkeys(self.tasks, 0)
289
+
290
+ for task_id, task in self.tasks.items():
291
+ for dep in task.get_dependencies():
292
+ graph[dep].append(task_id)
293
+ in_degree[task_id] += 1
294
+
295
+ # Level-based topological sort
296
+ execution_order = []
297
+ queue = deque([task_id for task_id, degree in in_degree.items() if degree == 0])
298
+
299
+ while queue:
300
+ level_size = len(queue)
301
+ current_level = []
302
+
303
+ for _ in range(level_size):
304
+ task_id = queue.popleft()
305
+ current_level.append(task_id)
306
+
307
+ for neighbor in graph[task_id]:
308
+ in_degree[neighbor] -= 1
309
+ if in_degree[neighbor] == 0:
310
+ queue.append(neighbor)
311
+
312
+ if current_level:
313
+ execution_order.append(current_level)
314
+
315
+ self.execution_order = execution_order
316
+ return execution_order
317
+
318
+ async def execute_workflow(self) -> dict[str, TaskResult]:
319
+ """Execute entire workflow"""
320
+ print("=" * 50)
321
+ print("Starting workflow execution")
322
+ print("=" * 50)
323
+
324
+ # Validate dependencies
325
+ self.validate_dependencies()
326
+
327
+ # Calculate execution order
328
+ execution_order = self.calculate_execution_order()
329
+ print(f"Execution plan ({len(execution_order)} phases):")
330
+ for i, level in enumerate(execution_order, 1):
331
+ print(f" Phase {i}: {level}")
332
+
333
+ # Execute by level
334
+ completed_tasks: set[str] = set()
335
+
336
+ for level_index, level in enumerate(execution_order, 1):
337
+ print(f"\n{'=' * 20} Phase {level_index} ({len(level)} tasks) {'=' * 20}")
338
+
339
+ # Filter executable tasks in this level
340
+ ready_tasks = []
341
+ for task_id in level:
342
+ task = self.tasks[task_id]
343
+ if task.can_execute(completed_tasks):
344
+ task.update_status(TaskStatus.READY)
345
+ ready_tasks.append(task)
346
+
347
+ if not ready_tasks:
348
+ continue
349
+
350
+ # Create semaphore for this level to control concurrency
351
+ semaphore = asyncio.Semaphore(self.max_concurrent)
352
+
353
+ async def execute_single_task(task: Task, sem: asyncio.Semaphore):
354
+ async with sem:
355
+ task.start_time = time.time()
356
+ task.update_status(TaskStatus.RUNNING)
357
+
358
+ try:
359
+ # Collect results from dependent tasks
360
+ dependency_results = {dep_id: self.results[dep_id] for dep_id in task.get_dependencies()}
361
+
362
+ # Execute task
363
+ data = await asyncio.wait_for(task.execute(dependency_results), timeout=task.timeout)
364
+
365
+ task.end_time = time.time()
366
+ task.result = TaskResult(
367
+ task_id=task.task_id, success=True, data=data, execution_time=task.get_execution_time()
368
+ )
369
+ task.update_status(TaskStatus.COMPLETED)
370
+
371
+ # Store result
372
+ self.results[task.task_id] = task.result
373
+ completed_tasks.add(task.task_id)
374
+
375
+ print(f"[OK] Task {task.task_id} completed, duration: {task.get_execution_time():.2f}s")
376
+
377
+ return task.result
378
+
379
+ except asyncio.TimeoutError as e:
380
+ task.end_time = time.time()
381
+ task.result = TaskResult(
382
+ task_id=task.task_id,
383
+ success=False,
384
+ data=None,
385
+ execution_time=task.get_execution_time(),
386
+ error=e,
387
+ )
388
+ task.update_status(TaskStatus.FAILED)
389
+
390
+ # Store result and mark as completed (even if failed)
391
+ self.results[task.task_id] = task.result
392
+ completed_tasks.add(task.task_id)
393
+
394
+ print(f"[FAIL] Task {task.task_id} timeout")
395
+ raise e from e
396
+ except Exception as e:
397
+ task.end_time = time.time()
398
+ task.result = TaskResult(
399
+ task_id=task.task_id,
400
+ success=False,
401
+ data=None,
402
+ execution_time=task.get_execution_time(),
403
+ error=e,
404
+ )
405
+ task.update_status(TaskStatus.FAILED)
406
+
407
+ # Store result and mark as completed (even if failed)
408
+ self.results[task.task_id] = task.result
409
+ completed_tasks.add(task.task_id)
410
+
411
+ print(f"[FAIL] Task {task.task_id} failed: {e}")
412
+ raise e
413
+
414
+ # Execute all ready tasks in this level in parallel
415
+ tasks_to_execute = [execute_single_task(task, semaphore) for task in ready_tasks]
416
+
417
+ # Use return_exceptions=True to ensure all tasks complete even if some fail
418
+ await asyncio.gather(*tasks_to_execute, return_exceptions=True)
419
+
420
+ print(f"\n{'=' * 50}")
421
+ print("Workflow execution completed")
422
+ print(f"{'=' * 50}")
423
+
424
+ return self.results
425
+
426
+ def get_execution_summary(self) -> dict[str, Any]:
427
+ """Get execution summary"""
428
+ total_tasks = len(self.tasks)
429
+ completed = sum(1 for task in self.tasks.values() if task.status == TaskStatus.COMPLETED)
430
+ failed = sum(1 for task in self.tasks.values() if task.status == TaskStatus.FAILED)
431
+
432
+ total_time = 0.0
433
+ for task in self.tasks.values():
434
+ if task.result:
435
+ total_time += task.result.execution_time
436
+
437
+ return {
438
+ "total_tasks": total_tasks,
439
+ "completed": completed,
440
+ "failed": failed,
441
+ "pending": total_tasks - completed - failed,
442
+ "total_execution_time": total_time,
443
+ "success_rate": completed / total_tasks if total_tasks > 0 else 0,
444
+ }
@@ -1,31 +0,0 @@
1
- sfi/__init__.py,sha256=pSXPoNOdDwew8sssXmWQjA8u92VyNiQ3MXZMcPa9q3c,74
2
- sfi/cli.py,sha256=bUUTOg18sJQbSKSfsVANhlMgSj9yzO2txIzFAd9B2Ok,296
3
- sfi/alarmclock/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
4
- sfi/alarmclock/alarmclock.py,sha256=65G8OyTGpe4oQ2SFerQG1N9PVJ4KxO7WzgsTxpGm4O0,12509
5
- sfi/bumpversion/__init__.py,sha256=GvXipQzQiXXJJ-sG-W4JJTSnOlhClstCnUDgHGrVNMo,85
6
- sfi/bumpversion/bumpversion.py,sha256=HOyHLaE0sZajrlcVZ8hsim8mPjz77qwQVSo6aIzjMXE,20735
7
- sfi/docscan/__init__.py,sha256=h-NVUVySlTSfrpVYXKrcOToVRml7WYRynS2viIztWZo,120
8
- sfi/docscan/docscan.py,sha256=Qigj3nB1hT5JpKQzY79QiS-R8BcqQbF7NZSsBSZRSXA,30899
9
- sfi/docscan/docscan_gui.py,sha256=iJpUCsbxzgNCMUYinjm_dYvW7JSPb0IAlr_CX9KiiGE,21953
10
- sfi/embedinstall/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
11
- sfi/embedinstall/embedinstall.py,sha256=N5EbTDdX4bE3W0qHGAwAUuepqFr0sbdZuPI3KWrtuUY,14936
12
- sfi/filedate/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
13
- sfi/filedate/filedate.py,sha256=DpVp26lumE_Lz_4TgqUEX8IxtK3Y6yHSEFV8qJyegyk,3645
14
- sfi/makepython/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
15
- sfi/makepython/makepython.py,sha256=IVy5MxrLQFJ0O3TPu0E2P7DBfHveFv-3ctBP5U2Dyts,11351
16
- sfi/pdfsplit/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
17
- sfi/pdfsplit/pdfsplit.py,sha256=9M1o9QjTxAAa0DHbTTbDeap2evcazWlb98u80hzJmx0,6004
18
- sfi/projectparse/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
19
- sfi/projectparse/projectparse.py,sha256=Ojg-z4lZEtjEBpJYWyznTgL307N45AxlQKnRkEH0P70,5525
20
- sfi/pyloadergen/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
21
- sfi/pyloadergen/pyloadergen.py,sha256=Erzz1PwrEQcDSCxXG-4DZ-CZavDt6MNv7k3nET5IB9U,32423
22
- sfi/pypacker/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
23
- sfi/pypacker/fspacker.py,sha256=3tlS7qiWoH_kOzsp9eSWsQ-SY7-bSTugwfB-HIL69iE,3238
24
- sfi/taskkill/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
25
- sfi/taskkill/taskkill.py,sha256=6Aw4afmgfLZcQnvgG_38A1VrwazDrnNdOmY1l4kr0lc,7758
26
- sfi/which/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
27
- sfi/which/which.py,sha256=zVIAwZA-pGGngxkkwZ6IxDX3ozVHg7cLSYwYO9FjaIc,2439
28
- pysfi-0.1.7.dist-info/METADATA,sha256=f7gQc-a5QA5cNdnQf7X5DRCHR8ZHmFvNoSQYFI6GQLU,3922
29
- pysfi-0.1.7.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
30
- pysfi-0.1.7.dist-info/entry_points.txt,sha256=9AaMwUXB86BYgn-RkAqAdd7J3saw0emxsVnO0Q1xCww,564
31
- pysfi-0.1.7.dist-info/RECORD,,
@@ -1,15 +0,0 @@
1
- [console_scripts]
2
- alarmclk = sfi.alarmclock.alarmclock:main
3
- bumpversion = sfi.bumpversion.bumpversion:main
4
- docscan = sfi.docscan.docscan:main
5
- docscan-gui = sfi.docscan.docscan_gui:main
6
- embedinstall = sfi.embedinstall.embedinstall:main
7
- filedate = sfi.filedate.filedate:main
8
- mkp = sfi.makepython.makepython:main
9
- pdfsplit = sfi.pdfsplit.pdfsplit:main
10
- projectparse = sfi.projectparse.projectparse:main
11
- pyloadergen = sfi.pyloadergen.pyloadergen:main
12
- pypacker = sfi.pypacker.pypacker:main
13
- sfi = sfi.cli:main
14
- taskk = sfi.taskkill.taskkill:main
15
- wch = sfi.which.which:main