exportify 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (49) hide show
  1. exportify/__init__.py +80 -0
  2. exportify/_version.py +9 -0
  3. exportify/analysis/__init__.py +10 -0
  4. exportify/analysis/ast_parser.py +389 -0
  5. exportify/analysis/ast_parser_overload.py +113 -0
  6. exportify/cli.py +54 -0
  7. exportify/commands/__init__.py +53 -0
  8. exportify/commands/check.py +355 -0
  9. exportify/commands/clear_cache.py +47 -0
  10. exportify/commands/doctor.py +79 -0
  11. exportify/commands/fix.py +220 -0
  12. exportify/commands/generate.py +177 -0
  13. exportify/commands/init.py +98 -0
  14. exportify/commands/status.py +78 -0
  15. exportify/commands/utils.py +261 -0
  16. exportify/common/__init__.py +7 -0
  17. exportify/common/cache.py +422 -0
  18. exportify/common/config.py +218 -0
  19. exportify/common/types.py +500 -0
  20. exportify/discovery/__init__.py +10 -0
  21. exportify/discovery/file_discovery.py +158 -0
  22. exportify/export_manager/__init__.py +44 -0
  23. exportify/export_manager/file_writer.py +325 -0
  24. exportify/export_manager/generator.py +531 -0
  25. exportify/export_manager/graph.py +360 -0
  26. exportify/export_manager/module_all.py +500 -0
  27. exportify/export_manager/rules.py +260 -0
  28. exportify/export_manager/section_parser.py +406 -0
  29. exportify/migration.py +486 -0
  30. exportify/pipeline.py +287 -0
  31. exportify/py.typed +0 -0
  32. exportify/rules/README.md +437 -0
  33. exportify/rules/default_rules.yaml +248 -0
  34. exportify/schemas/exportify-config.schema.json +148 -0
  35. exportify/types.py +37 -0
  36. exportify/utils.py +128 -0
  37. exportify/validator/__init__.py +20 -0
  38. exportify/validator/consistency.py +187 -0
  39. exportify/validator/resolver.py +118 -0
  40. exportify/validator/validator.py +531 -0
  41. exportify/verify.sh +75 -0
  42. exportify-0.1.0.dist-info/METADATA +32 -0
  43. exportify-0.1.0.dist-info/RECORD +49 -0
  44. exportify-0.1.0.dist-info/WHEEL +4 -0
  45. exportify-0.1.0.dist-info/entry_points.txt +2 -0
  46. exportify-0.1.0.dist-info/licenses/LICENSE-Apache-2.0 +73 -0
  47. exportify-0.1.0.dist-info/licenses/LICENSE-MIT +18 -0
  48. exportify-0.1.0.dist-info/licenses/LICENSES/Apache-2.0.txt +73 -0
  49. exportify-0.1.0.dist-info/licenses/LICENSES/MIT.txt +18 -0
exportify/__init__.py ADDED
@@ -0,0 +1,80 @@
1
+ # SPDX-FileCopyrightText: 2026 Knitli Inc.
2
+ #
3
+ # SPDX-License-Identifier: MIT OR Apache-2.0
4
+
5
+ # sourcery skip: no-relative-imports
6
+ # SPDX-FileCopyrightText: 2025 Knitli Inc.
7
+ # SPDX-FileContributor: Adam Poulemanos <adam@knit.li>
8
+ #
9
+ # SPDX-License-Identifier: MIT OR Apache-2.0
10
+ """Lazy import system tools and CLI."""
11
+
12
+ from __future__ import annotations
13
+
14
+ from types import MappingProxyType
15
+ from typing import TYPE_CHECKING
16
+
17
+ from lateimport import create_late_getattr
18
+
19
+
20
+ if TYPE_CHECKING:
21
+ pass
22
+
23
+ _dynamic_imports: MappingProxyType[str, tuple[str, str]] = MappingProxyType({})
24
+
25
+ __getattr__ = create_late_getattr(_dynamic_imports, globals(), __name__)
26
+
27
+ __all__ = ()
28
+
29
+
30
+ def _get_version() -> str:
31
+ """Get the current version of Exportify.
32
+
33
+ Because our version is dynamically generated during build/release, we try several methods to get it. If you downloaded Exportify from PyPi, then the first will work, or the second if the file didn't get generated for some reason. If you're running from source, we try to get the version from git tags. If all else fails, we return "0.0.0".
34
+ """
35
+ try:
36
+ from exportify._version import __version__
37
+ except ImportError:
38
+ try:
39
+ import importlib.metadata
40
+
41
+ __version__ = importlib.metadata.version("exportify")
42
+ except importlib.metadata.PackageNotFoundError:
43
+ try:
44
+ import shutil
45
+ import subprocess
46
+
47
+ # Try to get version from git if available
48
+ # Git commands work from any directory within a repo, so no need to specify cwd
49
+ # The subprocess call is safe because we use the system to find the executable, not user input
50
+ if git := shutil.which("git"):
51
+ git_describe = subprocess.run(
52
+ [git, "describe", "--tags", "--always", "--dirty"],
53
+ capture_output=True,
54
+ text=True,
55
+ check=False,
56
+ )
57
+ if git_describe.returncode == 0:
58
+ __version__ = git_describe.stdout.strip()
59
+ else:
60
+ __version__ = "0.0.0"
61
+ else:
62
+ __version__ = "0.0.0"
63
+ except Exception:
64
+ __version__ = "0.0.0"
65
+ return __version__
66
+
67
+
68
+ __version__ = _get_version()
69
+
70
+ __all__ = "__version__"
71
+
72
+ # === MANAGED EXPORTS ===
73
+
74
+ # This section is automatically generated by the lazy import system.
75
+ # Manual edits below this line will be overwritten.
76
+
77
+
78
+ def __dir__() -> list[str]:
79
+ """List available attributes for the package."""
80
+ return list(__all__)
exportify/_version.py ADDED
@@ -0,0 +1,9 @@
1
+ # SPDX-FileCopyrightText: 2026 Knitli Inc.
2
+ # SPDX-License-Identifier: MIT OR Apache-2.0
3
+ """Version information for Exportify."""
4
+ from typing import Final
5
+
6
+
7
+ __version__: Final[str] = "0.1.0"
8
+
9
+ __all__ = ("__version__",)
@@ -0,0 +1,10 @@
1
+ # SPDX-FileCopyrightText: 2026 Knitli Inc.
2
+ #
3
+ # SPDX-License-Identifier: MIT OR Apache-2.0
4
+
5
+ """AST analysis package for lazy import system."""
6
+
7
+ from exportify.analysis.ast_parser import ASTParser
8
+
9
+
10
+ __all__ = ["ASTParser"]
@@ -0,0 +1,389 @@
1
+ #!/usr/bin/env python3
2
+
3
+ # SPDX-FileCopyrightText: 2026 Knitli Inc.
4
+ #
5
+ # SPDX-License-Identifier: MIT OR Apache-2.0
6
+
7
+ """AST parser for extracting exports from Python files.
8
+
9
+ Parses Python source code and extracts exportable symbols:
10
+ - Classes
11
+ - Functions (top-level only)
12
+ - Variables
13
+ - Constants (SCREAMING_SNAKE_CASE)
14
+ - Type aliases (TypeAlias annotation)
15
+ """
16
+
17
+ from __future__ import annotations
18
+
19
+ import ast
20
+ import hashlib
21
+ import re
22
+ import sys
23
+ import time
24
+
25
+ from pathlib import Path
26
+
27
+ from exportify.analysis.ast_parser_overload import group_functions_by_name
28
+ from exportify.common.types import (
29
+ AnalysisResult,
30
+ DetectedSymbol,
31
+ MemberType,
32
+ SourceLocation,
33
+ SymbolProvenance,
34
+ )
35
+
36
+
37
+ class ASTParser:
38
+ """Parse Python files and extract exports."""
39
+
40
+ def __init__(self):
41
+ """Initialize AST parser."""
42
+
43
+ def parse_file(self, file_path: Path, module_path: str) -> AnalysisResult:
44
+ """Parse a Python file and extract exports.
45
+
46
+ Args:
47
+ file_path: Path to Python file
48
+ module_path: Module path (e.g., "codeweaver.core.types")
49
+
50
+ Returns:
51
+ AnalysisResult with symbols and metadata
52
+ """
53
+ # Read and hash file
54
+ content = file_path.read_text(encoding="utf-8")
55
+ file_hash = hashlib.sha256(content.encode()).hexdigest()
56
+
57
+ # Try to parse
58
+ try:
59
+ tree = ast.parse(content, filename=str(file_path))
60
+ except SyntaxError:
61
+ # Return empty result for syntax errors
62
+ # The validator will catch these
63
+ return AnalysisResult(
64
+ symbols=[],
65
+ imports=[],
66
+ file_hash=file_hash,
67
+ analysis_timestamp=time.time(),
68
+ schema_version="1.0",
69
+ )
70
+
71
+ # Extract symbols (both defined and imported)
72
+ defined_symbols = self._extract_symbols(tree, file_path)
73
+ imported_symbols = self._extract_import_symbols(tree, file_path)
74
+ all_symbols = defined_symbols + imported_symbols
75
+
76
+ # Extract imports as strings for backward compatibility/caching
77
+ imports = self._extract_imports(tree)
78
+
79
+ return AnalysisResult(
80
+ symbols=all_symbols,
81
+ imports=imports,
82
+ file_hash=file_hash,
83
+ analysis_timestamp=time.time(),
84
+ schema_version="1.0",
85
+ )
86
+
87
+ def _extract_symbols(self, tree: ast.Module, file_path: Path) -> list[DetectedSymbol]:
88
+ """Extract all exportable symbols from AST.
89
+
90
+ Args:
91
+ tree: Parsed AST module
92
+ file_path: Path to source file (for error reporting)
93
+
94
+ Returns:
95
+ List of detected symbols
96
+ """
97
+ symbols = []
98
+
99
+ # Group functions by name to handle @overload correctly
100
+ function_groups = group_functions_by_name(tree)
101
+
102
+ # Only process top-level nodes
103
+ for node in tree.body:
104
+ if isinstance(node, ast.ClassDef):
105
+ symbols.append(self._handle_class(node))
106
+ elif isinstance(node, ast.FunctionDef | ast.AsyncFunctionDef):
107
+ if symbol := self._handle_function(node, function_groups):
108
+ symbols.append(symbol)
109
+ elif isinstance(node, ast.TypeAlias):
110
+ symbols.append(self._handle_type_alias(node))
111
+ elif isinstance(node, ast.AnnAssign):
112
+ if symbol := self._handle_annotated_assign(node):
113
+ symbols.append(symbol)
114
+ elif isinstance(node, ast.Assign):
115
+ symbols.extend(self._handle_assign(node))
116
+
117
+ return symbols
118
+
119
+ def _handle_class(self, node: ast.ClassDef) -> DetectedSymbol:
120
+ """Handle class definition."""
121
+ return self._create_symbol(
122
+ name=node.name,
123
+ member_type=MemberType.CLASS,
124
+ location=SourceLocation(line=node.lineno),
125
+ docstring=ast.get_docstring(node),
126
+ provenance=SymbolProvenance.DEFINED_HERE,
127
+ is_private=node.name.startswith("_"),
128
+ )
129
+
130
+ def _handle_function(
131
+ self, node: ast.FunctionDef | ast.AsyncFunctionDef, function_groups: dict
132
+ ) -> DetectedSymbol | None:
133
+ """Handle function definition. Returns None if not the first definition."""
134
+ func_name = node.name
135
+ group = function_groups[func_name]
136
+
137
+ # Only add symbol once per function name
138
+ if node is not group["first_definition"]:
139
+ return None
140
+
141
+ is_overloaded = group["has_overload"]
142
+ overload_count = group["overload_count"]
143
+ has_implementation = group["has_implementation"]
144
+
145
+ # Get docstring from implementation if available, else from first overload
146
+ docstring = ast.get_docstring(group["implementation"] or node)
147
+
148
+ return self._create_symbol(
149
+ name=func_name,
150
+ member_type=MemberType.FUNCTION,
151
+ location=SourceLocation(line=node.lineno),
152
+ docstring=docstring,
153
+ provenance=SymbolProvenance.DEFINED_HERE,
154
+ is_private=func_name.startswith("_"),
155
+ metadata={
156
+ "is_overloaded": is_overloaded,
157
+ "overload_count": overload_count,
158
+ "has_implementation": has_implementation,
159
+ },
160
+ )
161
+
162
+ def _handle_type_alias(self, node: ast.TypeAlias) -> DetectedSymbol:
163
+ """Handle type alias definition."""
164
+ name = node.name.id if isinstance(node.name, ast.Name) else str(node.name)
165
+ return self._create_symbol(
166
+ name=name,
167
+ member_type=MemberType.TYPE_ALIAS,
168
+ location=SourceLocation(line=node.lineno),
169
+ provenance=SymbolProvenance.DEFINED_HERE,
170
+ is_private=name.startswith("_"),
171
+ metadata={"style": "python3.12+"},
172
+ )
173
+
174
+ def _handle_annotated_assign(self, node: ast.AnnAssign) -> DetectedSymbol | None:
175
+ """Handle annotated assignment. Returns None if not a Name target."""
176
+ if not isinstance(node.target, ast.Name):
177
+ return None
178
+
179
+ member_type = self._determine_variable_type(node.target.id, node.annotation)
180
+ metadata = {}
181
+ if member_type == MemberType.TYPE_ALIAS:
182
+ metadata["style"] = "pre-python3.12"
183
+
184
+ return self._create_symbol(
185
+ name=node.target.id,
186
+ member_type=member_type,
187
+ location=SourceLocation(line=node.lineno),
188
+ provenance=SymbolProvenance.DEFINED_HERE,
189
+ is_private=node.target.id.startswith("_"),
190
+ metadata=metadata,
191
+ )
192
+
193
+ def _handle_assign(self, node: ast.Assign) -> list[DetectedSymbol]:
194
+ """Handle regular assignment. Returns list of symbols."""
195
+ symbols = []
196
+ for target in node.targets:
197
+ if isinstance(target, ast.Name):
198
+ member_type = self._determine_variable_type(target.id, None)
199
+ symbols.append(
200
+ self._create_symbol(
201
+ name=target.id,
202
+ member_type=member_type,
203
+ location=SourceLocation(line=node.lineno),
204
+ provenance=SymbolProvenance.DEFINED_HERE,
205
+ is_private=target.id.startswith("_"),
206
+ )
207
+ )
208
+ return symbols
209
+
210
+ def _determine_variable_type(self, name: str, annotation: ast.expr | None) -> MemberType:
211
+ """Determine if variable is a constant, type alias, or regular variable."""
212
+ # Check for TypeAlias annotation
213
+ if annotation:
214
+ if isinstance(annotation, ast.Name) and annotation.id == "TypeAlias":
215
+ return MemberType.TYPE_ALIAS
216
+ # Also check for typing.TypeAlias
217
+ if isinstance(annotation, ast.Attribute) and annotation.attr == "TypeAlias":
218
+ return MemberType.TYPE_ALIAS
219
+
220
+ # Check for SCREAMING_SNAKE_CASE constant pattern
221
+ if re.match(r"^[A-Z][A-Z0-9_]*$", name):
222
+ return MemberType.CONSTANT
223
+
224
+ # Default to variable
225
+ return MemberType.VARIABLE
226
+
227
+ def _extract_import_symbols(self, tree: ast.Module, file_path: Path) -> list[DetectedSymbol]:
228
+ """Extract import statements as ParsedSymbol objects.
229
+
230
+ Categorizes imports with heuristic metadata to help distinguish likely re-exports
231
+ from internal use. The heuristics are:
232
+
233
+ - Aliased imports (import X as Y, from X import Y as Z) → is_likely_reexport=True
234
+ - Non-aliased imports → is_likely_reexport=False
235
+ - is_stdlib metadata tracked separately (for rule system to use)
236
+
237
+ The logic: if you alias an import, you're likely planning to expose it publicly.
238
+ Why else rename it? Non-aliased imports are typically for internal use.
239
+
240
+ Note: These are heuristics only. Final re-export decisions are made by the
241
+ rule system during the decision phase. This metadata helps inform those rules.
242
+ For example, rules might choose to never re-export stdlib imports regardless
243
+ of aliasing.
244
+ """
245
+ symbols = []
246
+
247
+ # Only process top-level imports
248
+ for node in tree.body:
249
+ if isinstance(node, ast.Import):
250
+ for alias in node.names:
251
+ # alias.name = module name (e.g., "sys")
252
+ # alias.asname = alias (e.g., "system") or None
253
+ export_name = alias.asname or alias.name
254
+ is_stdlib = self._is_stdlib_module(alias.name)
255
+
256
+ # Aliased imports are likely re-exports
257
+ # (why else would you alias if not to re-export?)
258
+ is_likely_reexport = bool(alias.asname)
259
+
260
+ symbols.append(
261
+ self._create_symbol(
262
+ name=export_name,
263
+ member_type=MemberType.IMPORTED,
264
+ location=SourceLocation(line=node.lineno),
265
+ provenance=SymbolProvenance.ALIAS_IMPORTED
266
+ if alias.asname
267
+ else SymbolProvenance.IMPORTED,
268
+ is_private=False, # Imports are presumably public unless named _
269
+ original_name=alias.name,
270
+ original_source=None, # Standard import, source is the name
271
+ metadata={
272
+ "import_type": "module",
273
+ "is_likely_reexport": is_likely_reexport,
274
+ "is_stdlib": is_stdlib,
275
+ },
276
+ )
277
+ )
278
+
279
+ elif isinstance(node, ast.ImportFrom):
280
+ module = node.module or ""
281
+ level = "." * node.level # Relative imports
282
+ import_path = f"{level}{module}" if level or module else ""
283
+ is_stdlib = self._is_stdlib_module(module) if module else False
284
+
285
+ for alias in node.names:
286
+ # alias.name = imported name (e.g., "Path")
287
+ # alias.asname = alias (e.g., "P") or None
288
+ export_name = alias.asname or alias.name
289
+
290
+ # Aliased imports are likely re-exports
291
+ # (why else would you alias if not to re-export?)
292
+ is_likely_reexport = bool(alias.asname)
293
+
294
+ symbols.append(
295
+ self._create_symbol(
296
+ name=export_name,
297
+ member_type=MemberType.IMPORTED,
298
+ location=SourceLocation(line=node.lineno),
299
+ provenance=SymbolProvenance.ALIAS_IMPORTED
300
+ if alias.asname
301
+ else SymbolProvenance.IMPORTED,
302
+ is_private=False,
303
+ original_name=alias.name,
304
+ original_source=import_path,
305
+ metadata={
306
+ "import_type": "from",
307
+ "is_likely_reexport": is_likely_reexport,
308
+ "is_stdlib": is_stdlib,
309
+ },
310
+ )
311
+ )
312
+
313
+ return symbols
314
+
315
+ def _extract_imports(self, tree: ast.Module) -> list[str]:
316
+ """Extract import statements as strings."""
317
+ imports = []
318
+
319
+ for node in ast.walk(tree):
320
+ if isinstance(node, ast.Import):
321
+ for alias in node.names:
322
+ import_name = f"import {alias.name}"
323
+ if alias.asname:
324
+ import_name += f" as {alias.asname}"
325
+ imports.append(import_name)
326
+
327
+ elif isinstance(node, ast.ImportFrom):
328
+ module = node.module or ""
329
+ level = "." * node.level # Relative imports
330
+ for alias in node.names:
331
+ import_name = f"from {level}{module} import {alias.name}"
332
+ if alias.asname:
333
+ import_name += f" as {alias.asname}"
334
+ imports.append(import_name)
335
+
336
+ return imports
337
+
338
+ def _is_stdlib_module(self, module_name: str) -> bool:
339
+ """Check if a module is from the Python standard library.
340
+
341
+ Uses a simple heuristic: stdlib modules don't contain dots (top-level only).
342
+ This covers common cases like sys, os, pathlib, typing, etc.
343
+
344
+ Args:
345
+ module_name: Name of the module to check
346
+
347
+ Returns:
348
+ True if likely a stdlib module, False otherwise
349
+ """
350
+ if not module_name:
351
+ return False
352
+
353
+ # Common stdlib modules (top-level only)
354
+ common_stdlib = sys.stdlib_module_names
355
+
356
+ # Get the top-level module name
357
+ top_level = module_name.split(".")[0]
358
+
359
+ # Check if it's a known stdlib module or starts with underscore (internal)
360
+ return top_level in common_stdlib or top_level.startswith("_")
361
+
362
+ def _create_symbol(
363
+ self,
364
+ name: str,
365
+ member_type: MemberType,
366
+ location: SourceLocation,
367
+ provenance: SymbolProvenance,
368
+ *,
369
+ is_private: bool,
370
+ original_name: str | None = None,
371
+ original_source: str | None = None,
372
+ docstring: str | None = None,
373
+ metadata: dict[str, object] | None = None,
374
+ ) -> DetectedSymbol:
375
+ """Helper to create DetectedSymbol with default values."""
376
+ return DetectedSymbol(
377
+ name=name,
378
+ member_type=member_type,
379
+ location=location,
380
+ provenance=provenance,
381
+ is_private=is_private,
382
+ original_name=original_name,
383
+ original_source=original_source,
384
+ docstring=docstring,
385
+ metadata=metadata or {},
386
+ )
387
+
388
+
389
+ __all__ = ["ASTParser"]
@@ -0,0 +1,113 @@
1
+ #!/usr/bin/env python3
2
+
3
+ # SPDX-FileCopyrightText: 2026 Knitli Inc.
4
+ #
5
+ # SPDX-License-Identifier: MIT OR Apache-2.0
6
+
7
+ """Helper functions for @overload detection in AST parser."""
8
+
9
+ from __future__ import annotations
10
+
11
+ import ast
12
+ import logging
13
+
14
+
15
+ logger = logging.getLogger(__name__)
16
+
17
+
18
+ def is_overloaded_function(node: ast.FunctionDef | ast.AsyncFunctionDef) -> bool:
19
+ r"""Check if function has @overload decorator.
20
+
21
+ Args:
22
+ node: Function definition AST node
23
+
24
+ Returns:
25
+ True if function has @overload decorator
26
+
27
+ Example:
28
+ >>> node = ast.parse("@overload\ndef f(): ...").body[0]
29
+ >>> is_overloaded_function(node)
30
+ True
31
+ """
32
+ for decorator in node.decorator_list:
33
+ # Direct reference: @overload
34
+ if isinstance(decorator, ast.Name) and decorator.id == "overload":
35
+ return True
36
+
37
+ # Attribute reference: @typing.overload
38
+ if isinstance(decorator, ast.Attribute) and decorator.attr == "overload":
39
+ return True
40
+
41
+ return False
42
+
43
+
44
+ def group_functions_by_name(tree: ast.Module) -> dict[str, dict[str, object]]:
45
+ """Group top-level functions by name, handling @overload.
46
+
47
+ Args:
48
+ tree: Parsed AST module
49
+
50
+ Returns:
51
+ Dict mapping function name to metadata about all definitions
52
+
53
+ Example:
54
+ >>> tree = ast.parse('''
55
+ ... @overload
56
+ ... def f(x: int) -> int: ...
57
+ ... @overload
58
+ ... def f(x: str) -> str: ...
59
+ ... def f(x): return x
60
+ ... ''')
61
+ >>> groups = group_functions_by_name(tree)
62
+ >>> groups["f"]["has_overload"]
63
+ True
64
+ >>> groups["f"]["overload_count"]
65
+ 2
66
+ """
67
+ function_groups: dict[str, dict[str, object]] = {}
68
+
69
+ # Find all top-level functions
70
+ for node in tree.body:
71
+ if isinstance(node, ast.FunctionDef | ast.AsyncFunctionDef):
72
+ func_name = node.name
73
+
74
+ if func_name not in function_groups:
75
+ function_groups[func_name] = {
76
+ "definitions": [],
77
+ "overloads": [],
78
+ "implementation": None,
79
+ "first_definition": node,
80
+ "has_overload": False,
81
+ "overload_count": 0,
82
+ "has_implementation": False,
83
+ }
84
+
85
+ group = function_groups[func_name]
86
+ group["definitions"].append(node) # type: ignore[union-attr]
87
+
88
+ if is_overloaded_function(node):
89
+ group["overloads"].append(node) # type: ignore[union-attr]
90
+ group["has_overload"] = True
91
+ else:
92
+ # Non-overload definition is the implementation
93
+ group["implementation"] = node
94
+
95
+ # Calculate final metadata
96
+ for func_name, group in function_groups.items():
97
+ overloads: list = group["overloads"] # type: ignore[assignment]
98
+ group["overload_count"] = len(overloads)
99
+ group["has_implementation"] = group["implementation"] is not None
100
+
101
+ # Warn if multiple definitions without @overload
102
+ definitions: list = group["definitions"] # type: ignore[assignment]
103
+ if len(definitions) > 1 and not group["has_overload"]:
104
+ logger.warning(
105
+ "Function '%s' defined %d times without @overload decorator",
106
+ func_name,
107
+ len(definitions),
108
+ )
109
+
110
+ return function_groups
111
+
112
+
113
+ __all__ = ["group_functions_by_name", "is_overloaded_function"]
exportify/cli.py ADDED
@@ -0,0 +1,54 @@
1
+ # sourcery skip: avoid-global-variables
2
+ # SPDX-FileCopyrightText: 2025 Knitli Inc.
3
+ # SPDX-FileContributor: Adam Poulemanos <adam@knit.li>
4
+ #
5
+ # SPDX-License-Identifier: MIT OR Apache-2.0
6
+ """Exportify CLI entry point and command registration.
7
+
8
+ Registers all seven commands and wires them to the ``exportify`` program:
9
+ - ``check`` — validate exports and ``__all__`` consistency
10
+ - ``fix`` — repair exports and ``__all__`` declarations
11
+ - ``generate`` — create ``__init__.py`` files with lazy imports
12
+ - ``status`` — show a quick cache and configuration snapshot
13
+ - ``doctor`` — run cache and configuration health checks
14
+ - ``init`` — initialize Exportify in a project
15
+ - ``clear-cache`` — delete all cached analysis results
16
+ """
17
+
18
+ from __future__ import annotations
19
+
20
+ import logging
21
+
22
+ from cyclopts import App
23
+
24
+ from exportify import __version__
25
+ from exportify.commands.utils import CONSOLE
26
+
27
+
28
+ logger = logging.getLogger(__name__)
29
+
30
+ app = App(
31
+ name="exportify",
32
+ help="Generate, validate, and fix Python package exports and __init__.py files",
33
+ version=__version__,
34
+ console=CONSOLE,
35
+ )
36
+
37
+ app.command("exportify.commands.check:CheckCommand", name="check")
38
+ app.command("exportify.commands.fix:FixCommand", name="fix")
39
+ app.command("exportify.commands.generate:GenerateCommand", name="generate")
40
+ app.command("exportify.commands.status:StatusCommand", name="status")
41
+ app.command("exportify.commands.doctor:DoctorCommand", name="doctor")
42
+ app.command("exportify.commands.init:InitCommand", name="init")
43
+ app.command("exportify.commands.clear_cache:ClearCacheCommand", name="clear-cache")
44
+
45
+
46
+ def main() -> None:
47
+ """Main entry point for the CLI."""
48
+ app()
49
+
50
+
51
+ if __name__ == "__main__":
52
+ main()
53
+
54
+ __all__ = ("app",)