pysfi 0.1.12__py3-none-any.whl → 0.1.14__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (42) hide show
  1. {pysfi-0.1.12.dist-info → pysfi-0.1.14.dist-info}/METADATA +1 -1
  2. pysfi-0.1.14.dist-info/RECORD +68 -0
  3. {pysfi-0.1.12.dist-info → pysfi-0.1.14.dist-info}/entry_points.txt +3 -0
  4. sfi/__init__.py +19 -2
  5. sfi/alarmclock/__init__.py +3 -0
  6. sfi/alarmclock/alarmclock.py +23 -40
  7. sfi/bumpversion/__init__.py +3 -1
  8. sfi/bumpversion/bumpversion.py +64 -15
  9. sfi/cleanbuild/__init__.py +3 -0
  10. sfi/cleanbuild/cleanbuild.py +5 -1
  11. sfi/cli.py +25 -4
  12. sfi/condasetup/__init__.py +1 -0
  13. sfi/condasetup/condasetup.py +91 -76
  14. sfi/docdiff/__init__.py +1 -0
  15. sfi/docdiff/docdiff.py +3 -2
  16. sfi/docscan/__init__.py +1 -1
  17. sfi/docscan/docscan.py +78 -23
  18. sfi/docscan/docscan_gui.py +152 -48
  19. sfi/filedate/filedate.py +12 -5
  20. sfi/img2pdf/img2pdf.py +453 -0
  21. sfi/llmclient/llmclient.py +31 -8
  22. sfi/llmquantize/llmquantize.py +76 -37
  23. sfi/llmserver/__init__.py +1 -0
  24. sfi/llmserver/llmserver.py +63 -13
  25. sfi/makepython/makepython.py +1145 -201
  26. sfi/pdfsplit/pdfsplit.py +45 -12
  27. sfi/pyarchive/__init__.py +1 -0
  28. sfi/pyarchive/pyarchive.py +908 -278
  29. sfi/pyembedinstall/pyembedinstall.py +88 -89
  30. sfi/pylibpack/pylibpack.py +561 -463
  31. sfi/pyloadergen/pyloadergen.py +372 -218
  32. sfi/pypack/pypack.py +510 -959
  33. sfi/pyprojectparse/pyprojectparse.py +337 -40
  34. sfi/pysourcepack/__init__.py +1 -0
  35. sfi/pysourcepack/pysourcepack.py +210 -131
  36. sfi/quizbase/quizbase_gui.py +2 -2
  37. sfi/taskkill/taskkill.py +168 -59
  38. sfi/which/which.py +11 -3
  39. pysfi-0.1.12.dist-info/RECORD +0 -62
  40. sfi/workflowengine/workflowengine.py +0 -444
  41. {pysfi-0.1.12.dist-info → pysfi-0.1.14.dist-info}/WHEEL +0 -0
  42. /sfi/{workflowengine → img2pdf}/__init__.py +0 -0
@@ -16,13 +16,15 @@ import argparse
16
16
  import datetime
17
17
  import json
18
18
  import logging
19
+ import platform
19
20
  import re
20
21
  import sys
21
22
  import time
22
23
  from dataclasses import dataclass, field
23
24
  from functools import cached_property
24
25
  from pathlib import Path
25
- from typing import Any
26
+ from re import Pattern
27
+ from typing import Any, Final
26
28
 
27
29
  if sys.version_info >= (3, 11):
28
30
  import tomllib
@@ -35,12 +37,15 @@ __all__ = ["Project", "Solution"]
35
37
  logging.basicConfig(level=logging.INFO, format="%(levelname)s: %(message)s")
36
38
  logger = logging.getLogger(__name__)
37
39
  cwd = Path.cwd()
40
+ is_windows = platform.system() == "Windows"
38
41
 
39
42
  # Precompiled regex for dependency name extraction (optimization)
40
- _DEP_NAME_PATTERN = re.compile(r"^([a-zA-Z0-9._-]+)")
43
+ _DEP_NAME_PATTERN: Final[Pattern[str]] = re.compile(r"^([a-zA-Z0-9._-]+)")
44
+ _EXTRA_PATTERN: Final[Pattern[str]] = re.compile(r"\[([^\]]+)\]")
45
+ _VERSION_PATTERN: Final[Pattern[str]] = re.compile(r"[<>=!~].*$")
41
46
 
42
47
  # Qt-related keywords and dependencies for faster detection
43
- _QT_DEPENDENCIES: frozenset[str] = frozenset((
48
+ _QT_DEPENDENCIES: Final[frozenset[str]] = frozenset((
44
49
  "Qt",
45
50
  "PySide",
46
51
  "PyQt",
@@ -53,10 +58,77 @@ _QT_DEPENDENCIES: frozenset[str] = frozenset((
53
58
  ))
54
59
 
55
60
  # GUI-related keywords for faster detection
56
- _GUI_KEYWORDS: frozenset[str] = frozenset(("gui", "desktop"))
61
+ _GUI_KEYWORDS: Final[frozenset[str]] = frozenset(("gui", "desktop"))
57
62
 
58
63
  # Required attributes for project validation (module-level constant for performance)
59
- _REQUIRED_ATTRS: frozenset[str] = frozenset(("name", "version", "description"))
64
+ _REQUIRED_ATTRS: Final[frozenset[str]] = frozenset(("name", "version", "description"))
65
+
66
+
67
+ @dataclass(frozen=True)
68
+ class Dependency:
69
+ """Represents a Python package dependency."""
70
+
71
+ name: str = ""
72
+ version: str | None = None
73
+ extras: set[str] = field(default_factory=set)
74
+ requires: set[str] = field(default_factory=set)
75
+
76
+ @staticmethod
77
+ def from_str(dep_str: str) -> Dependency:
78
+ """Create a Dependency instance from a dependency string.
79
+
80
+ Args:
81
+ dep_str (str): The dependency string in the format "name[extras]version".
82
+
83
+ Returns:
84
+ Dependency: The created Dependency instance.
85
+ """
86
+ # Parse the dependency string to extract name, extras, and version
87
+ name = ""
88
+ version = None # Use None initially, will be set if version is found
89
+ extras = set()
90
+
91
+ # First, extract extras if present (text within square brackets)
92
+ extras_match = _EXTRA_PATTERN.search(dep_str)
93
+ if extras_match:
94
+ extras_str = extras_match.group(1)
95
+ extras = {extra.strip() for extra in extras_str.split(",")}
96
+ # Remove the extras part from the string for further processing
97
+ dep_str = dep_str[: extras_match.start()] + dep_str[extras_match.end() :]
98
+
99
+ # Extract version specifier (starts with comparison operators like >=, <=, ==, etc.)
100
+ version_match = _VERSION_PATTERN.search(dep_str)
101
+ if version_match:
102
+ version = version_match.group()
103
+ dep_str = dep_str[: version_match.start()].strip()
104
+
105
+ # Remaining part is the name
106
+ name = dep_str.strip()
107
+
108
+ return Dependency(name=name, version=version, extras=extras, requires=set())
109
+
110
+ def __post_init__(self):
111
+ # Normalize the name after initialization
112
+ # This ensures that names like "Requests-OAuthLib" become "requests_oauthlib"
113
+ object.__setattr__(self, "name", self.name.lower().replace("-", "_"))
114
+
115
+ @cached_property
116
+ def normalized_name(self) -> str:
117
+ """Return the normalized name of the dependency (same as name since it's already normalized)."""
118
+ return self.name
119
+
120
+ def __str__(self) -> str:
121
+ """String representation of dependency."""
122
+ # Pre-cache version to avoid repeated evaluation
123
+ version_str = self.version or ""
124
+ if self.extras:
125
+ # Pre-cache sorted extras to avoid repeated sorting
126
+ sorted_extras = ",".join(sorted(self.extras))
127
+ return f"{self.name}[{sorted_extras}]{version_str}"
128
+ return f"{self.name}{version_str}"
129
+
130
+ def __hash__(self) -> int:
131
+ return hash((self.name, self.version, frozenset(self.extras)))
60
132
 
61
133
 
62
134
  @dataclass(frozen=True)
@@ -84,6 +156,8 @@ class Project:
84
156
  urls: Dictionary of project URLs (homepage, repository, etc.)
85
157
  build_backend: Build backend system used (e.g., "setuptools.build_meta")
86
158
  requires: List of build system requirements
159
+ toml_path: Path to the pyproject.toml file
160
+ solution_root_dir: Root directory of the solution (for multi-project setups)
87
161
  """
88
162
 
89
163
  name: str
@@ -102,11 +176,99 @@ class Project:
102
176
  urls: dict[str, str]
103
177
  build_backend: str
104
178
  requires: list[str]
179
+ toml_path: Path
180
+ solution_root_dir: Path | None = None
181
+
182
+ @cached_property
183
+ def min_python_version(self) -> str | None:
184
+ """Extract the minimum Python version from requires_python.
185
+
186
+ Parses the requires_python string to extract the minimum version requirement.
187
+ Supports formats like ">=3.8", ">3.7", "~=3.9", etc. Returns None if no
188
+ minimum version is specified or if the format is unrecognized.
189
+
190
+ Returns:
191
+ The minimum Python version as a string, or None if not found.
192
+ """
193
+ if not self.requires_python:
194
+ return None
195
+
196
+ # Pattern to match minimum Python version requirements
197
+ # Examples: >=3.8, >3.7, ~=3.9, ==3.8.*, etc.
198
+ patterns = [
199
+ r">=(\d+\.\d+(?:\.\d+)?)", # >=3.8, >=3.8.1
200
+ r">(\d+\.\d+(?:\.\d+)?)", # >3.7
201
+ r"~=?(\d+\.\d+(?:\.\d+)?)", # ~=3.9, ~3.9
202
+ r"==?(\d+\.\d+(?:\.\d+)?)(?:\.\*)?", # ==3.8, ==3.8.*, =3.8
203
+ ]
204
+
205
+ for pattern in patterns:
206
+ match = re.search(pattern, self.requires_python)
207
+ if match:
208
+ return match.group(1)
209
+
210
+ return None
211
+
212
+ @cached_property
213
+ def root_dir(self) -> Path:
214
+ """Return the root directory of the project."""
215
+ if not self.toml_path.is_file():
216
+ # Return current directory if toml_path is not set or invalid
217
+ # This can happen when Project is created with default values
218
+ return Path()
219
+
220
+ return self.toml_path.parent
221
+
222
+ @cached_property
223
+ def dist_dir(self) -> Path:
224
+ """Return the distribution directory of the project.
225
+
226
+ In multi-project solutions, returns solution_root_dir/dist.
227
+ In single-project setups, returns project_root_dir/dist.
228
+ """
229
+ if self.solution_root_dir:
230
+ dist_dir = self.solution_root_dir / "dist"
231
+ else:
232
+ dist_dir = self.root_dir / "dist"
233
+ dist_dir.mkdir(parents=True, exist_ok=True)
234
+ return dist_dir
235
+
236
+ @cached_property
237
+ def extension(self) -> str:
238
+ return ".exe" if is_windows else ""
239
+
240
+ @cached_property
241
+ def exe_name(self) -> str:
242
+ """Return the executable name of the project."""
243
+ return f"{self.name}{self.extension}"
244
+
245
+ @cached_property
246
+ def exe_path(self) -> Path:
247
+ """Return the executable path of the project."""
248
+ return self.dist_dir / f"{self.name}{self.extension}"
249
+
250
+ @cached_property
251
+ def runtime_dir(self) -> Path:
252
+ """Return the runtime directory of the project."""
253
+ runtime_dir = self.dist_dir / "runtime"
254
+ runtime_dir.mkdir(parents=True, exist_ok=True)
255
+ return runtime_dir
256
+
257
+ @cached_property
258
+ def lib_dir(self) -> Path:
259
+ """Return the lib directory of the project."""
260
+ lib_dir = self.dist_dir / "site-packages"
261
+ lib_dir.mkdir(parents=True, exist_ok=True)
262
+ return lib_dir
105
263
 
106
264
  @cached_property
107
265
  def normalized_name(self) -> str:
108
266
  return self.name.replace("-", "_")
109
267
 
268
+ @cached_property
269
+ def converted_dependencies(self) -> set[Dependency]:
270
+ return {Dependency.from_str(dep) for dep in self.dependencies}
271
+
110
272
  @cached_property
111
273
  def dep_names(self) -> set[str]:
112
274
  """Extract normalized dependency names from the dependencies list.
@@ -118,7 +280,6 @@ class Project:
118
280
  Returns:
119
281
  Set of normalized dependency package names.
120
282
  """
121
- # Use set comprehension directly to avoid intermediate list
122
283
  return {
123
284
  match.group(1)
124
285
  if (match := _DEP_NAME_PATTERN.match(main_part))
@@ -134,7 +295,10 @@ class Project:
134
295
  Returns:
135
296
  Set of Qt-related dependency names found in the project.
136
297
  """
137
- return self.dep_names & _QT_DEPENDENCIES
298
+ # Pre-cache the sets to avoid attribute lookup overhead
299
+ dep_names_set = self.dep_names
300
+ qt_deps_set = _QT_DEPENDENCIES
301
+ return dep_names_set & qt_deps_set
138
302
 
139
303
  @cached_property
140
304
  def has_qt(self) -> bool:
@@ -147,7 +311,8 @@ class Project:
147
311
  @cached_property
148
312
  def is_gui(self) -> bool:
149
313
  # Use set intersection for O(1) lookup instead of O(n) with any()
150
- return bool(set(self.keywords) & _GUI_KEYWORDS)
314
+ # Also consider projects with Qt dependencies as GUI applications
315
+ return bool(set(self.keywords) & _GUI_KEYWORDS) or self.has_qt
151
316
 
152
317
  @cached_property
153
318
  def loader_type(self) -> str:
@@ -169,24 +334,46 @@ class Project:
169
334
  """
170
335
  if not toml_file.is_file():
171
336
  logger.error(f"{toml_file} does not exist")
172
- return Project.from_dict({})
337
+ return Project._from_empty_dict()
173
338
 
174
339
  try:
175
340
  with open(toml_file, "rb") as f:
176
341
  data = tomllib.load(f)
177
342
  except Exception as e:
178
343
  logger.error(f"Error parsing {toml_file}: {e}")
179
- return Project.from_dict({})
344
+ return Project._from_empty_dict()
180
345
 
181
346
  if "project" not in data:
182
347
  logger.error(f"No project section in {toml_file}")
183
- return Project.from_dict({})
348
+ return Project._from_empty_dict()
184
349
 
185
350
  if "name" not in data["project"]:
186
351
  logger.error(f"No name in project section of {toml_file}")
187
- return Project.from_dict({})
352
+ return Project._from_empty_dict()
353
+
354
+ # Extract project data and build system data separately for performance
355
+ project_data = data["project"]
356
+ build_system_data = data.get("build-system", {})
357
+
358
+ # Merge data efficiently
359
+ merged_data = project_data.copy() # Start with project data
360
+
361
+ # Handle TOML field names that use hyphens but Python attributes use underscores
362
+ if "requires-python" in merged_data:
363
+ merged_data["requires_python"] = merged_data.pop("requires-python")
364
+
365
+ merged_data.update({"toml_path": toml_file})
366
+ merged_data.update(build_system_data) # Add build system data
367
+
368
+ return Project._from_dict(merged_data)
188
369
 
189
- return Project.from_dict({**data["project"], **data.get("build-system", {})})
370
+ def pack_source(self):
371
+ """Pack source code and resources to dist/src directory."""
372
+ pass
373
+
374
+ def pack_embed_python(self):
375
+ """Pack Python runtime to dist/runtime directory."""
376
+ pass
190
377
 
191
378
  @cached_property
192
379
  def raw_data(self) -> dict[str, Any]:
@@ -216,7 +403,12 @@ class Project:
216
403
  }
217
404
 
218
405
  @classmethod
219
- def from_dict(cls, data: dict[str, Any]) -> Project:
406
+ def _from_empty_dict(cls) -> Project:
407
+ """Create a Project instance from an empty dictionary."""
408
+ return cls._from_dict({})
409
+
410
+ @classmethod
411
+ def _from_dict(cls, data: dict[str, Any]) -> Project:
220
412
  """Create a Project instance from a dictionary of project attributes.
221
413
 
222
414
  Args:
@@ -242,6 +434,8 @@ class Project:
242
434
  urls=data.get("urls", {}),
243
435
  build_backend=data.get("build_backend", ""),
244
436
  requires=data.get("requires", []),
437
+ toml_path=data.get("toml_path", Path("")),
438
+ solution_root_dir=data.get("solution_root_dir"),
245
439
  )
246
440
 
247
441
 
@@ -268,27 +462,41 @@ class Solution:
268
462
  time_stamp: datetime.datetime = field(default_factory=datetime.datetime.now)
269
463
 
270
464
  def __repr__(self):
271
- return f"""
272
- <Solution root={self.root_dir}
273
- projects: {len(self.projects)}
274
- update: {self.update}
275
- time_used: {self.elapsed_time:.4f}s
276
- timestamp: {self.time_stamp}
277
- >"""
278
-
279
- @cached_property
280
- def elapsed_time(self) -> float:
281
- """Calculate and cache the elapsed time since start."""
282
- return time.perf_counter() - self.start_time
465
+ return (
466
+ f"<Solution(\n"
467
+ f" root_dir={self.root_dir!r},\n"
468
+ f" projects: {len(self.projects)},\n"
469
+ f" update={self.update!r},\n"
470
+ f" time_used={self.elapsed_time:.4f}s,\n"
471
+ f" timestamp={self.time_stamp!r}\n"
472
+ f")>"
473
+ )
283
474
 
284
475
  def __post_init__(self):
285
- logger.info(f"\t - Loaded {len(self.projects)} projects from {self.root_dir}")
286
476
  # Only log brief summary to avoid overly verbose output
287
477
  logger.info(
288
- f"\t - Solution created in {self.elapsed_time:.4f}s at {self.time_stamp}"
478
+ f"Solution: {len(self.projects)} projects from {self.root_dir}, "
479
+ f"created in {self.elapsed_time:.4f}s at {self.time_stamp:%Y-%m-%d %H:%M:%S}"
289
480
  )
290
481
  self._write_project_json()
291
482
 
483
+ @cached_property
484
+ def elapsed_time(self) -> float:
485
+ """Calculate and cache the elapsed time since start."""
486
+ return time.perf_counter() - self.start_time
487
+
488
+ @cached_property
489
+ def dependencies(self) -> set[str]:
490
+ """Get a set of all dependencies for all projects in the solution.
491
+
492
+ Returns:
493
+ Set of dependency package names.
494
+ """
495
+ # Use set comprehension for better performance
496
+ return {
497
+ dep for project in self.projects.values() for dep in project.dependencies
498
+ }
499
+
292
500
  @cached_property
293
501
  def json_file(self) -> Path:
294
502
  """Path to the cache file where project data is stored.
@@ -298,6 +506,69 @@ class Solution:
298
506
  """
299
507
  return self.root_dir / "projects.json"
300
508
 
509
+ def find_matching_projects(self, pattern: str) -> list[str]:
510
+ """Find all projects matching the given pattern (case-insensitive).
511
+
512
+ Args:
513
+ pattern: Pattern to match (substring, case-insensitive)
514
+
515
+ Returns:
516
+ List of matching project names
517
+ """
518
+ if not pattern:
519
+ return []
520
+
521
+ lower_pattern = pattern.lower()
522
+ return [name for name in self.projects if lower_pattern in name.lower()]
523
+
524
+ def resolve_project_name(self, project_name: str | None) -> str | None:
525
+ """Resolve project name with fuzzy matching support.
526
+
527
+ Resolution strategy:
528
+ 1. If project_name is None: auto-select if only one project exists
529
+ 2. Exact match: return if project name exists
530
+ 3. Fuzzy match: find projects containing the given substring (case-insensitive)
531
+ 4. If multiple fuzzy matches: return None (ambiguous)
532
+
533
+ Args:
534
+ project_name: Project name to resolve, or None for auto-selection
535
+
536
+ Returns:
537
+ Resolved project name, or None if resolution fails
538
+
539
+ Examples:
540
+ >>> solution.resolve_project_name(None) # Auto-select if single project
541
+ 'myproject'
542
+ >>> solution.resolve_project_name('docscan') # Exact match
543
+ 'docscan'
544
+ >>> solution.resolve_project_name('doc') # Fuzzy match
545
+ 'docscan'
546
+ >>> solution.resolve_project_name('scan') # Fuzzy match
547
+ 'docscan'
548
+ """
549
+ # Auto-select if only one project
550
+ if not project_name:
551
+ if len(self.projects) == 1:
552
+ return next(iter(self.projects.keys()))
553
+ return None
554
+
555
+ # Exact match (case-sensitive)
556
+ if project_name in self.projects:
557
+ return project_name
558
+
559
+ # Fuzzy match (case-insensitive substring matching)
560
+ lower_name = project_name.lower()
561
+ matches = [name for name in self.projects if lower_name in name.lower()]
562
+
563
+ if len(matches) == 1:
564
+ return matches[0]
565
+ elif len(matches) > 1:
566
+ # Multiple matches found - ambiguous
567
+ return None
568
+
569
+ # No match found
570
+ return None
571
+
301
572
  @classmethod
302
573
  def from_toml_files(
303
574
  cls, root_dir: Path, toml_files: list[Path], update: bool = False
@@ -322,6 +593,31 @@ class Solution:
322
593
  f"Warning: {toml_file} does not contain project information"
323
594
  )
324
595
  continue
596
+
597
+ # For multi-project solutions, set solution_root_dir
598
+ # Create new Project with solution_root_dir set
599
+ if len(toml_files) > 1:
600
+ project = Project(
601
+ name=project.name,
602
+ version=project.version,
603
+ description=project.description,
604
+ readme=project.readme,
605
+ requires_python=project.requires_python,
606
+ dependencies=project.dependencies,
607
+ optional_dependencies=project.optional_dependencies,
608
+ scripts=project.scripts,
609
+ entry_points=project.entry_points,
610
+ authors=project.authors,
611
+ license=project.license,
612
+ keywords=project.keywords,
613
+ classifiers=project.classifiers,
614
+ urls=project.urls,
615
+ build_backend=project.build_backend,
616
+ requires=project.requires,
617
+ toml_path=project.toml_path,
618
+ solution_root_dir=root_dir,
619
+ )
620
+
325
621
  projects[project.name] = project
326
622
 
327
623
  return cls(root_dir=root_dir, projects=projects, update=update)
@@ -347,16 +643,16 @@ class Solution:
347
643
  # Check if the value has a "project" section (from pyproject.toml parsing)
348
644
  if "project" in value:
349
645
  project_data = value.get("project", {})
350
- projects[key] = Project.from_dict(project_data)
646
+ projects[key] = Project._from_dict(project_data)
351
647
  else:
352
648
  # Check if the value contains direct project attributes
353
649
  # Use set intersection with module-level constant for faster attribute checking
354
650
  if value.keys() & _REQUIRED_ATTRS:
355
- projects[key] = Project.from_dict(value)
651
+ projects[key] = Project._from_dict(value)
356
652
  else:
357
653
  # No project section or recognizable project attributes found
358
654
  logger.warning(f"No project information found in {key}")
359
- projects[key] = Project.from_dict({})
655
+ projects[key] = Project._from_empty_dict()
360
656
  else:
361
657
  projects[key] = value
362
658
  except (TypeError, ValueError) as e:
@@ -385,6 +681,8 @@ class Solution:
385
681
  logger.debug(f"Loading project data from {json_file}...")
386
682
  with json_file.open("r", encoding="utf-8") as f:
387
683
  loaded_data = json.load(f)
684
+ logger.debug(f"\t - Loaded project data from {json_file}")
685
+ return cls.from_json_data(json_file.parent, loaded_data, update=update)
388
686
  except (OSError, json.JSONDecodeError, KeyError) as e:
389
687
  logger.error(f"Error loading project data from {json_file}: {e}")
390
688
  return cls(root_dir=json_file.parent, projects={})
@@ -392,9 +690,6 @@ class Solution:
392
690
  logger.error(f"Unknown error loading project data from {json_file}: {e}")
393
691
  return cls(root_dir=json_file.parent, projects={})
394
692
 
395
- logger.debug(f"\t - Loaded project data from {json_file}")
396
- return cls.from_json_data(json_file.parent, loaded_data, update=update)
397
-
398
693
  @classmethod
399
694
  def from_directory(cls, root_dir: Path, update: bool = False) -> Solution:
400
695
  """Create a Solution instance by scanning a directory for pyproject.toml files.
@@ -441,8 +736,7 @@ class Solution:
441
736
  return
442
737
 
443
738
  try:
444
- # Convert Project objects to dictionaries for JSON serialization
445
- # Use dict comprehension for better performance
739
+ # Pre-cache raw_data access to avoid repeated property access
446
740
  serializable_data = {
447
741
  key: project_data.raw_data
448
742
  if isinstance(project_data, Project)
@@ -452,14 +746,11 @@ class Solution:
452
746
 
453
747
  with json_file.open("w", encoding="utf-8") as f:
454
748
  json.dump(serializable_data, f, indent=2, ensure_ascii=False)
749
+ logger.info(f"Output written to {json_file}")
455
750
  except (OSError, json.JSONDecodeError, KeyError) as e:
456
751
  logger.error(f"Error writing output to {json_file}: {e}")
457
- return
458
752
  except Exception as e:
459
753
  logger.error(f"Unknown error writing output to {json_file}: {e}")
460
- return
461
- else:
462
- logger.info(f"Output written to {json_file}")
463
754
 
464
755
 
465
756
  def create_parser() -> argparse.ArgumentParser:
@@ -491,6 +782,12 @@ def create_parser() -> argparse.ArgumentParser:
491
782
 
492
783
 
493
784
  def main() -> None:
785
+ """Main entry point for the pyproject.toml parser tool.
786
+
787
+ Parses command line arguments and creates a Solution instance by scanning
788
+ the specified directory for pyproject.toml files. Uses cached data if
789
+ available unless the update flag is set.
790
+ """
494
791
  parser = create_parser()
495
792
  args = parser.parse_args()
496
793
 
@@ -0,0 +1 @@
1
+