mapFolding 0.8.2__py3-none-any.whl → 0.8.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (37) hide show
  1. mapFolding/__init__.py +6 -2
  2. mapFolding/basecamp.py +11 -5
  3. mapFolding/filesystem.py +134 -109
  4. mapFolding/oeis.py +1 -1
  5. mapFolding/reference/__init__.py +7 -0
  6. mapFolding/reference/jobsCompleted/[2x19]/p2x19.py +197 -0
  7. mapFolding/reference/jobsCompleted/__init__.py +50 -0
  8. mapFolding/reference/jobsCompleted/p2x19/p2x19.py +29 -0
  9. mapFolding/someAssemblyRequired/__init__.py +37 -18
  10. mapFolding/someAssemblyRequired/_theTypes.py +35 -0
  11. mapFolding/someAssemblyRequired/_tool_Make.py +92 -0
  12. mapFolding/someAssemblyRequired/_tool_Then.py +65 -0
  13. mapFolding/someAssemblyRequired/_toolboxAntecedents.py +326 -0
  14. mapFolding/someAssemblyRequired/_toolboxContainers.py +306 -0
  15. mapFolding/someAssemblyRequired/_toolboxPython.py +76 -0
  16. mapFolding/someAssemblyRequired/getLLVMforNoReason.py +20 -1
  17. mapFolding/someAssemblyRequired/ingredientsNumba.py +17 -24
  18. mapFolding/someAssemblyRequired/synthesizeNumbaFlow.py +112 -149
  19. mapFolding/someAssemblyRequired/synthesizeNumbaJob.py +247 -0
  20. mapFolding/someAssemblyRequired/transformDataStructures.py +167 -100
  21. mapFolding/someAssemblyRequired/transformationTools.py +63 -678
  22. mapFolding/syntheticModules/__init__.py +1 -0
  23. mapFolding/syntheticModules/numbaCount_doTheNeedful.py +36 -33
  24. mapFolding/theDao.py +13 -11
  25. mapFolding/theSSOT.py +69 -119
  26. {mapfolding-0.8.2.dist-info → mapfolding-0.8.4.dist-info}/METADATA +4 -2
  27. mapfolding-0.8.4.dist-info/RECORD +49 -0
  28. {mapfolding-0.8.2.dist-info → mapfolding-0.8.4.dist-info}/WHEEL +1 -1
  29. tests/conftest.py +34 -29
  30. tests/test_computations.py +40 -31
  31. tests/test_filesystem.py +3 -3
  32. tests/test_other.py +4 -3
  33. mapFolding/someAssemblyRequired/synthesizeNumbaJobVESTIGIAL.py +0 -413
  34. mapfolding-0.8.2.dist-info/RECORD +0 -39
  35. {mapfolding-0.8.2.dist-info → mapfolding-0.8.4.dist-info}/entry_points.txt +0 -0
  36. {mapfolding-0.8.2.dist-info → mapfolding-0.8.4.dist-info}/licenses/LICENSE +0 -0
  37. {mapfolding-0.8.2.dist-info → mapfolding-0.8.4.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,306 @@
1
+ """
2
+ Container classes for AST transformations and code synthesis.
3
+
4
+ This module provides container classes used in the AST transformation process
5
+ and code synthesis workflows. It acts as a dependency boundary to prevent
6
+ circular imports while providing reusable data structures.
7
+ """
8
+ from collections import defaultdict
9
+ from collections.abc import Sequence
10
+ from mapFolding.someAssemblyRequired import ast_Identifier, be, ifThis, Make, parseLogicalPath2astModule, str_nameDOTname
11
+ from mapFolding.theSSOT import callableDispatcherHARDCODED, raiseIfNoneGitHubIssueNumber3, The
12
+ from pathlib import Path, PurePosixPath
13
+ from Z0Z_tools import updateExtendPolishDictionaryLists
14
+ import ast
15
+ import dataclasses
16
+
17
+ class LedgerOfImports:
18
+ # TODO When resolving the ledger of imports, remove self-referential imports
19
+ # TODO TypeIgnore :/
20
+
21
+ def __init__(self, startWith: ast.AST | None = None) -> None:
22
+ self.dictionaryImportFrom: dict[str_nameDOTname, list[tuple[ast_Identifier, ast_Identifier | None]]] = defaultdict(list)
23
+ self.listImport: list[str_nameDOTname] = []
24
+ if startWith:
25
+ self.walkThis(startWith)
26
+
27
+ def addAst(self, astImport____: ast.Import | ast.ImportFrom) -> None:
28
+ assert isinstance(astImport____, (ast.Import, ast.ImportFrom)), f"I received {type(astImport____) = }, but I can only accept {ast.Import} and {ast.ImportFrom}."
29
+ if be.Import(astImport____):
30
+ for alias in astImport____.names:
31
+ self.listImport.append(alias.name)
32
+ elif be.ImportFrom(astImport____):
33
+ # TODO fix the mess created by `None` means '.'. I need a `str_nameDOTname` to replace '.'
34
+ if astImport____.module is None:
35
+ astImport____.module = '.'
36
+ for alias in astImport____.names:
37
+ self.dictionaryImportFrom[astImport____.module].append((alias.name, alias.asname))
38
+
39
+ def addImport_asStr(self, moduleIdentifier: str_nameDOTname) -> None:
40
+ self.listImport.append(moduleIdentifier)
41
+
42
+ def addImportFrom_asStr(self, moduleIdentifier: ast_Identifier, name: ast_Identifier, asname: ast_Identifier | None = None) -> None:
43
+ self.dictionaryImportFrom[moduleIdentifier].append((name, asname))
44
+
45
+ def exportListModuleIdentifiers(self) -> list[ast_Identifier]:
46
+ listModuleIdentifiers: list[ast_Identifier] = list(self.dictionaryImportFrom.keys())
47
+ listModuleIdentifiers.extend(self.listImport)
48
+ return sorted(set(listModuleIdentifiers))
49
+
50
+ def makeList_ast(self) -> list[ast.ImportFrom | ast.Import]:
51
+ listImportFrom: list[ast.ImportFrom] = []
52
+ for moduleIdentifier, listOfNameTuples in sorted(self.dictionaryImportFrom.items()):
53
+ listOfNameTuples = sorted(list(set(listOfNameTuples)), key=lambda nameTuple: nameTuple[0])
54
+ list_alias: list[ast.alias] = []
55
+ for name, asname in listOfNameTuples:
56
+ list_alias.append(Make.alias(name, asname))
57
+ listImportFrom.append(Make.ImportFrom(moduleIdentifier, list_alias))
58
+ list_astImport: list[ast.Import] = [Make.Import(moduleIdentifier) for moduleIdentifier in sorted(set(self.listImport))]
59
+ return listImportFrom + list_astImport
60
+
61
+ def update(self, *fromLedger: 'LedgerOfImports') -> None:
62
+ """Update this ledger with imports from one or more other ledgers.
63
+ Parameters:
64
+ *fromLedger: One or more other `LedgerOfImports` objects from which to merge.
65
+ """
66
+ self.dictionaryImportFrom = updateExtendPolishDictionaryLists(self.dictionaryImportFrom, *(ledger.dictionaryImportFrom for ledger in fromLedger), destroyDuplicates=True, reorderLists=True)
67
+ for ledger in fromLedger:
68
+ self.listImport.extend(ledger.listImport)
69
+
70
+ def walkThis(self, walkThis: ast.AST) -> None:
71
+ for nodeBuffalo in ast.walk(walkThis):
72
+ if isinstance(nodeBuffalo, (ast.Import, ast.ImportFrom)):
73
+ self.addAst(nodeBuffalo)
74
+
75
+ @dataclasses.dataclass
76
+ class IngredientsFunction:
77
+ """Everything necessary to integrate a function into a module should be here.
78
+ Parameters:
79
+ astFunctionDef: hint `Make.astFunctionDef()`
80
+ """
81
+ astFunctionDef: ast.FunctionDef
82
+ imports: LedgerOfImports = dataclasses.field(default_factory=LedgerOfImports)
83
+ type_ignores: list[ast.TypeIgnore] = dataclasses.field(default_factory=list)
84
+
85
+ @dataclasses.dataclass
86
+ class IngredientsModule:
87
+ """Everything necessary to create one _logical_ `ast.Module` should be here.
88
+ Extrinsic qualities should _probably_ be handled externally.
89
+
90
+ Parameters:
91
+ ingredientsFunction (None): One or more `IngredientsFunction` that will appended to `listIngredientsFunctions`.
92
+ """
93
+ ingredientsFunction: dataclasses.InitVar[Sequence[IngredientsFunction] | IngredientsFunction | None] = None
94
+
95
+ # init var with an existing module? method to deconstruct an existing module?
96
+
97
+ # `body` attribute of `ast.Module`
98
+ """NOTE
99
+ - Bare statements in `prologue` and `epilogue` are not 'protected' by `if __name__ == '__main__':` so they will be executed merely by loading the module.
100
+ - The dataclass has methods for modifying `prologue`, `epilogue`, and `launcher`.
101
+ - However, `prologue`, `epilogue`, and `launcher` are `ast.Module` (as opposed to `list[ast.stmt]`), so that you may use tools such as `ast.walk` and `ast.NodeVisitor` on the fields.
102
+ """
103
+ imports: LedgerOfImports = dataclasses.field(default_factory=LedgerOfImports)
104
+ """Modify this field using the methods in `LedgerOfImports`."""
105
+ prologue: ast.Module = Make.Module([],[])
106
+ """Statements after the imports and before the functions in listIngredientsFunctions."""
107
+ listIngredientsFunctions: list[IngredientsFunction] = dataclasses.field(default_factory=list)
108
+ epilogue: ast.Module = Make.Module([],[])
109
+ """Statements after the functions in listIngredientsFunctions and before `launcher`."""
110
+ launcher: ast.Module = Make.Module([],[])
111
+ """`if __name__ == '__main__':`"""
112
+
113
+ # `ast.TypeIgnore` statements to supplement those in other fields; `type_ignores` is a parameter for `ast.Module` constructor
114
+ supplemental_type_ignores: list[ast.TypeIgnore] = dataclasses.field(default_factory=list)
115
+
116
+ def __post_init__(self, ingredientsFunction: Sequence[IngredientsFunction] | IngredientsFunction | None = None) -> None:
117
+ if ingredientsFunction is not None:
118
+ if isinstance(ingredientsFunction, IngredientsFunction):
119
+ self.appendIngredientsFunction(ingredientsFunction)
120
+ else:
121
+ self.appendIngredientsFunction(*ingredientsFunction)
122
+
123
+ def _append_astModule(self, self_astModule: ast.Module, astModule: ast.Module | None, statement: Sequence[ast.stmt] | ast.stmt | None, type_ignores: list[ast.TypeIgnore] | None) -> None:
124
+ """Append one or more statements to `prologue`."""
125
+ list_body: list[ast.stmt] = []
126
+ listTypeIgnore: list[ast.TypeIgnore] = []
127
+ if astModule is not None and be.Module(astModule):
128
+ list_body.extend(astModule.body)
129
+ listTypeIgnore.extend(astModule.type_ignores)
130
+ if type_ignores is not None:
131
+ listTypeIgnore.extend(type_ignores)
132
+ if statement is not None:
133
+ if isinstance(statement, Sequence):
134
+ list_body.extend(statement)
135
+ else:
136
+ list_body.append(statement)
137
+ self_astModule.body.extend(list_body)
138
+ self_astModule.type_ignores.extend(listTypeIgnore)
139
+ ast.fix_missing_locations(self_astModule)
140
+
141
+ def appendPrologue(self, astModule: ast.Module | None = None, statement: Sequence[ast.stmt] | ast.stmt | None = None, type_ignores: list[ast.TypeIgnore] | None = None) -> None:
142
+ """Append one or more statements to `prologue`."""
143
+ self._append_astModule(self.prologue, astModule, statement, type_ignores)
144
+
145
+ def appendEpilogue(self, astModule: ast.Module | None = None, statement: Sequence[ast.stmt] | ast.stmt | None = None, type_ignores: list[ast.TypeIgnore] | None = None) -> None:
146
+ """Append one or more statements to `epilogue`."""
147
+ self._append_astModule(self.epilogue, astModule, statement, type_ignores)
148
+
149
+ def appendLauncher(self, astModule: ast.Module | None = None, statement: Sequence[ast.stmt] | ast.stmt | None = None, type_ignores: list[ast.TypeIgnore] | None = None) -> None:
150
+ """Append one or more statements to `launcher`."""
151
+ self._append_astModule(self.launcher, astModule, statement, type_ignores)
152
+
153
+ def appendIngredientsFunction(self, *ingredientsFunction: IngredientsFunction) -> None:
154
+ """Append one or more `IngredientsFunction`."""
155
+ for allegedIngredientsFunction in ingredientsFunction:
156
+ if isinstance(allegedIngredientsFunction, IngredientsFunction):
157
+ self.listIngredientsFunctions.append(allegedIngredientsFunction)
158
+ else:
159
+ raise ValueError(f"I received `{type(allegedIngredientsFunction) = }`, but I can only accept `{IngredientsFunction}`.")
160
+
161
+ @property
162
+ def list_astImportImportFrom(self) -> list[ast.Import | ast.ImportFrom]:
163
+ """List of `ast.Import` and `ast.ImportFrom` statements."""
164
+ sherpaLedger = LedgerOfImports()
165
+ listLedgers: list[LedgerOfImports] = [self.imports]
166
+ for ingredientsFunction in self.listIngredientsFunctions:
167
+ listLedgers.append(ingredientsFunction.imports)
168
+ sherpaLedger.update(*listLedgers)
169
+ return sherpaLedger.makeList_ast()
170
+
171
+ @property
172
+ def body(self) -> list[ast.stmt]:
173
+ list_stmt: list[ast.stmt] = []
174
+ list_stmt.extend(self.list_astImportImportFrom)
175
+ list_stmt.extend(self.prologue.body)
176
+ for ingredientsFunction in self.listIngredientsFunctions:
177
+ list_stmt.append(ingredientsFunction.astFunctionDef)
178
+ list_stmt.extend(self.epilogue.body)
179
+ list_stmt.extend(self.launcher.body)
180
+ # TODO `launcher`, if it exists, must start with `if __name__ == '__main__':` and be indented
181
+ return list_stmt
182
+
183
+ @property
184
+ def type_ignores(self) -> list[ast.TypeIgnore]:
185
+ listTypeIgnore: list[ast.TypeIgnore] = self.supplemental_type_ignores
186
+ # listTypeIgnore.extend(self.imports.makeListAst())
187
+ listTypeIgnore.extend(self.prologue.type_ignores)
188
+ for ingredientsFunction in self.listIngredientsFunctions:
189
+ listTypeIgnore.extend(ingredientsFunction.type_ignores)
190
+ listTypeIgnore.extend(self.epilogue.type_ignores)
191
+ listTypeIgnore.extend(self.launcher.type_ignores)
192
+ return listTypeIgnore
193
+
194
+ @dataclasses.dataclass
195
+ class RecipeSynthesizeFlow:
196
+ """Settings for synthesizing flow."""
197
+ # ========================================
198
+ # Source
199
+ # ========================================
200
+ source_astModule = parseLogicalPath2astModule(The.logicalPathModuleSourceAlgorithm)
201
+
202
+ # Figure out dynamic flow control to synthesized modules https://github.com/hunterhogan/mapFolding/issues/4
203
+ sourceCallableDispatcher: ast_Identifier = The.sourceCallableDispatcher
204
+ sourceCallableInitialize: ast_Identifier = The.sourceCallableInitialize
205
+ sourceCallableParallel: ast_Identifier = The.sourceCallableParallel
206
+ sourceCallableSequential: ast_Identifier = The.sourceCallableSequential
207
+
208
+ sourceDataclassIdentifier: ast_Identifier = The.dataclassIdentifier
209
+ sourceDataclassInstance: ast_Identifier = The.dataclassInstance
210
+ sourceDataclassInstanceTaskDistribution: ast_Identifier = The.dataclassInstanceTaskDistribution
211
+ sourceLogicalPathModuleDataclass: str_nameDOTname = The.logicalPathModuleDataclass
212
+
213
+ sourceConcurrencyManagerNamespace = The.sourceConcurrencyManagerNamespace
214
+ sourceConcurrencyManagerIdentifier = The.sourceConcurrencyManagerIdentifier
215
+
216
+ # ========================================
217
+ # Logical identifiers (as opposed to physical identifiers)
218
+ # ========================================
219
+ # Package ================================
220
+ packageIdentifier: ast_Identifier | None = The.packageName
221
+
222
+ # Qualified logical path ================================
223
+ logicalPathModuleDataclass: str_nameDOTname = sourceLogicalPathModuleDataclass
224
+ logicalPathFlowRoot: ast_Identifier | None = 'syntheticModules'
225
+ """ `logicalPathFlowRoot` likely corresponds to a physical filesystem directory."""
226
+
227
+ # Module ================================
228
+ moduleDispatcher: ast_Identifier = 'numbaCount_doTheNeedful'
229
+ moduleInitialize: ast_Identifier = moduleDispatcher
230
+ moduleParallel: ast_Identifier = moduleDispatcher
231
+ moduleSequential: ast_Identifier = moduleDispatcher
232
+
233
+ # Function ================================
234
+ callableDispatcher: ast_Identifier = sourceCallableDispatcher
235
+ callableInitialize: ast_Identifier = sourceCallableInitialize
236
+ callableParallel: ast_Identifier = sourceCallableParallel
237
+ callableSequential: ast_Identifier = sourceCallableSequential
238
+ concurrencyManagerNamespace: ast_Identifier = sourceConcurrencyManagerNamespace
239
+ concurrencyManagerIdentifier: ast_Identifier = sourceConcurrencyManagerIdentifier
240
+ dataclassIdentifier: ast_Identifier = sourceDataclassIdentifier
241
+
242
+ # Variable ================================
243
+ dataclassInstance: ast_Identifier = sourceDataclassInstance
244
+ dataclassInstanceTaskDistribution: ast_Identifier = sourceDataclassInstanceTaskDistribution
245
+
246
+ # ========================================
247
+ # Computed
248
+ # ========================================
249
+ """
250
+ theFormatStrModuleSynthetic = "{packageFlow}Count"
251
+ theFormatStrModuleForCallableSynthetic = theFormatStrModuleSynthetic + "_{callableTarget}"
252
+ theModuleDispatcherSynthetic: ast_Identifier = theFormatStrModuleForCallableSynthetic.format(packageFlow=packageFlowSynthetic, callableTarget=The.sourceCallableDispatcher)
253
+ theLogicalPathModuleDispatcherSynthetic: str = '.'.join([The.packageName, The.moduleOfSyntheticModules, theModuleDispatcherSynthetic])
254
+
255
+ """
256
+ # logicalPathModuleDispatcher: str = '.'.join([Z0Z_flowLogicalPathRoot, moduleDispatcher])
257
+ # ========================================
258
+ # Filesystem (names of physical objects)
259
+ # ========================================
260
+ pathPackage: PurePosixPath | None = PurePosixPath(The.pathPackage)
261
+ fileExtension: str = The.fileExtension
262
+
263
+ def _makePathFilename(self, filenameStem: str,
264
+ pathRoot: PurePosixPath | None = None,
265
+ logicalPathINFIX: str_nameDOTname | None = None,
266
+ fileExtension: str | None = None,
267
+ ) -> PurePosixPath:
268
+ """filenameStem: (hint: the name of the logical module)"""
269
+ if pathRoot is None:
270
+ pathRoot = self.pathPackage or PurePosixPath(Path.cwd())
271
+ if logicalPathINFIX:
272
+ whyIsThisStillAThing: list[str] = logicalPathINFIX.split('.')
273
+ pathRoot = pathRoot.joinpath(*whyIsThisStillAThing)
274
+ if fileExtension is None:
275
+ fileExtension = self.fileExtension
276
+ filename: str = filenameStem + fileExtension
277
+ return pathRoot.joinpath(filename)
278
+
279
+ @property
280
+ def pathFilenameDispatcher(self) -> PurePosixPath:
281
+ return self._makePathFilename(filenameStem=self.moduleDispatcher, logicalPathINFIX=self.logicalPathFlowRoot)
282
+ @property
283
+ def pathFilenameInitialize(self) -> PurePosixPath:
284
+ return self._makePathFilename(filenameStem=self.moduleInitialize, logicalPathINFIX=self.logicalPathFlowRoot)
285
+ @property
286
+ def pathFilenameParallel(self) -> PurePosixPath:
287
+ return self._makePathFilename(filenameStem=self.moduleParallel, logicalPathINFIX=self.logicalPathFlowRoot)
288
+ @property
289
+ def pathFilenameSequential(self) -> PurePosixPath:
290
+ return self._makePathFilename(filenameStem=self.moduleSequential, logicalPathINFIX=self.logicalPathFlowRoot)
291
+
292
+ def __post_init__(self) -> None:
293
+ if ((self.concurrencyManagerIdentifier is not None and self.concurrencyManagerIdentifier != self.sourceConcurrencyManagerIdentifier) # `submit` # type: ignore
294
+ or ((self.concurrencyManagerIdentifier is None) != (self.concurrencyManagerNamespace is None))): # type: ignore
295
+ import warnings
296
+ warnings.warn(f"If your synthesized module is weird, check `{self.concurrencyManagerIdentifier=}` and `{self.concurrencyManagerNamespace=}`. (ChildProcessError? 'Yeah! Children shouldn't be processing stuff, man.')", category=ChildProcessError, stacklevel=2) # pyright: ignore[reportCallIssue, reportArgumentType] Y'all Pynatics need to be less shrill and focus on making code that doesn't need 8000 error categories.
297
+
298
+ # self.logicalPathModuleDispatcher!=logicalPathModuleDispatcherHARDCODED or
299
+ if self.callableDispatcher!=callableDispatcherHARDCODED:
300
+ print(f"fyi: `{self.callableDispatcher=}` but\n\t`{callableDispatcherHARDCODED=}`.")
301
+
302
+ def astModuleToIngredientsFunction(astModule: ast.AST, identifierFunctionDef: ast_Identifier) -> IngredientsFunction:
303
+ from mapFolding.someAssemblyRequired import extractFunctionDef
304
+ astFunctionDef = extractFunctionDef(astModule, identifierFunctionDef)
305
+ if not astFunctionDef: raise raiseIfNoneGitHubIssueNumber3
306
+ return IngredientsFunction(astFunctionDef, LedgerOfImports(astModule))
@@ -0,0 +1,76 @@
1
+ from collections.abc import Callable
2
+ from inspect import getsource as inspect_getsource
3
+ from mapFolding.someAssemblyRequired import ast_Identifier, str_nameDOTname
4
+ from os import PathLike
5
+ from pathlib import Path, PurePath
6
+ from types import ModuleType
7
+ from typing import Any, Literal
8
+ import ast
9
+ import importlib
10
+ import importlib.util
11
+
12
+ # TODO Identify the logic that narrows the type and can help the user during static type checking.
13
+
14
+ class NodeTourist(ast.NodeVisitor):
15
+ def __init__(self, findThis, doThat): # type: ignore
16
+ self.findThis = findThis
17
+ self.doThat = doThat
18
+ self.nodeCaptured = None
19
+
20
+ def visit(self, node): # type: ignore
21
+ if self.findThis(node):
22
+ nodeActionReturn = self.doThat(node) # type: ignore
23
+ if nodeActionReturn is not None:
24
+ self.nodeCaptured = nodeActionReturn # type: ignore
25
+ self.generic_visit(node)
26
+
27
+ def captureLastMatch(self, node): # type: ignore
28
+ """Capture the last matched node that produces a non-None result.
29
+
30
+ This method traverses the entire tree starting at the given node
31
+ and returns the last non-None value produced by applying doThat
32
+ to a matching node. It will continue traversing after finding a match,
33
+ and the value captured can be replaced by later matches.
34
+
35
+ Parameters:
36
+ node: The AST node to start traversal from
37
+
38
+ Returns:
39
+ The result of applying doThat to the last matching node that returned
40
+ a non-None value, or None if no match found or all matches returned None
41
+ """
42
+ self.nodeCaptured = None
43
+ self.visit(node) # type: ignore
44
+ return self.nodeCaptured
45
+
46
+ class NodeChanger(ast.NodeTransformer):
47
+ def __init__(self, findThis, doThat): # type: ignore
48
+ self.findThis = findThis
49
+ self.doThat = doThat
50
+
51
+ def visit(self, node): # type: ignore
52
+ if self.findThis(node):
53
+ return self.doThat(node) # type: ignore
54
+ return super().visit(node)
55
+
56
+ def importLogicalPath2Callable(logicalPathModule: str_nameDOTname, identifier: ast_Identifier, packageIdentifierIfRelative: ast_Identifier | None = None) -> Callable[..., Any]:
57
+ moduleImported: ModuleType = importlib.import_module(logicalPathModule, packageIdentifierIfRelative)
58
+ return getattr(moduleImported, identifier)
59
+
60
+ def importPathFilename2Callable(pathFilename: PathLike[Any] | PurePath, identifier: ast_Identifier, moduleIdentifier: ast_Identifier | None = None) -> Callable[..., Any]:
61
+ pathFilename = Path(pathFilename)
62
+
63
+ importlibSpecification = importlib.util.spec_from_file_location(moduleIdentifier or pathFilename.stem, pathFilename)
64
+ if importlibSpecification is None or importlibSpecification.loader is None: raise ImportError(f"I received\n\t`{pathFilename = }`,\n\t`{identifier = }`, and\n\t`{moduleIdentifier = }`.\n\tAfter loading, \n\t`importlibSpecification` {'is `None`' if importlibSpecification is None else 'has a value'} and\n\t`importlibSpecification.loader` is unknown.")
65
+
66
+ moduleImported_jk_hahaha: ModuleType = importlib.util.module_from_spec(importlibSpecification)
67
+ importlibSpecification.loader.exec_module(moduleImported_jk_hahaha)
68
+ return getattr(moduleImported_jk_hahaha, identifier)
69
+
70
+ def parseLogicalPath2astModule(logicalPathModule: str_nameDOTname, packageIdentifierIfRelative: ast_Identifier|None=None, mode:str='exec') -> ast.AST:
71
+ moduleImported: ModuleType = importlib.import_module(logicalPathModule, packageIdentifierIfRelative)
72
+ sourcePython: str = inspect_getsource(moduleImported)
73
+ return ast.parse(sourcePython, mode=mode)
74
+
75
+ def parsePathFilename2astModule(pathFilename: PathLike[Any] | PurePath, mode:str='exec') -> ast.AST:
76
+ return ast.parse(Path(pathFilename).read_text(), mode=mode)
@@ -12,6 +12,14 @@ The extracted LLVM IR can be valuable for debugging, optimization analysis, or e
12
12
  purposes, as it provides a view into how high-level Python code is translated into
13
13
  lower-level representations for machine execution.
14
14
 
15
+ Example of successful use:
16
+ The LLVM IR for the groundbreaking 2x19 map calculation can be found at:
17
+ mapFolding/reference/jobsCompleted/[2x19]/[2x19].ll
18
+
19
+ This file demonstrates the low-level optimizations that made this previously
20
+ intractable calculation possible. The IR reveals how the abstract algorithm was
21
+ transformed into efficient machine code through Numba's compilation pipeline.
22
+
15
23
  While originally part of a tighter integration with the code generation pipeline,
16
24
  this module now operates as a standalone utility that can be applied to any module
17
25
  containing Numba-compiled functions.
@@ -23,7 +31,18 @@ import importlib.util
23
31
  import llvmlite.binding
24
32
 
25
33
  def writeModuleLLVM(pathFilename: Path, identifierCallable: str) -> Path:
26
- """Import the generated module directly and get its LLVM IR."""
34
+ """Import the generated module directly and get its LLVM IR.
35
+
36
+ Parameters
37
+ pathFilename: Path to the Python module file containing the Numba-compiled function
38
+ identifierCallable: Name of the function within the module to extract LLVM IR from
39
+
40
+ Returns
41
+ Path to the generated .ll file containing the extracted LLVM IR
42
+
43
+ For an example of the output, see reference/jobsCompleted/[2x19]/[2x19].ll,
44
+ which contains the IR for the historically significant 2x19 map calculation.
45
+ """
27
46
  specTarget: ModuleSpec | None = importlib.util.spec_from_file_location("generatedModule", pathFilename)
28
47
  if specTarget is None or specTarget.loader is None:
29
48
  raise ImportError(f"Could not create module spec or loader for {pathFilename}")
@@ -20,7 +20,8 @@ algorithm implementation into a highly-optimized Numba version.
20
20
  """
21
21
 
22
22
  from collections.abc import Callable, Sequence
23
- from mapFolding.someAssemblyRequired import ifThis, IngredientsFunction, Make
23
+ from mapFolding.someAssemblyRequired import Make
24
+ from mapFolding.someAssemblyRequired._toolboxContainers import IngredientsFunction
24
25
  from numba.core.compiler import CompilerBase as numbaCompilerBase
25
26
  from typing import Any, cast, Final, TYPE_CHECKING
26
27
  import ast
@@ -71,10 +72,11 @@ parametersNumbaFailEarly: Final[ParametersNumba] = {
71
72
  'no_cfunc_wrapper': False,
72
73
  'no_cpython_wrapper': False,
73
74
  'nopython': True,
74
- 'parallel': False, }
75
+ 'parallel': False,
76
+ }
75
77
  """For a production function: speed is irrelevant, error discovery is paramount, must be compatible with anything downstream."""
76
78
 
77
- parametersNumbaDEFAULT: Final[ParametersNumba] = {
79
+ parametersNumbaDefault: Final[ParametersNumba] = {
78
80
  '_nrt': True,
79
81
  'boundscheck': False,
80
82
  'cache': True,
@@ -90,13 +92,13 @@ parametersNumbaDEFAULT: Final[ParametersNumba] = {
90
92
  """Middle of the road: fast, lean, but will talk to non-jitted functions."""
91
93
 
92
94
  parametersNumbaParallelDEFAULT: Final[ParametersNumba] = {
93
- **parametersNumbaDEFAULT,
95
+ **parametersNumbaDefault,
94
96
  '_nrt': True,
95
97
  'parallel': True, }
96
98
  """Middle of the road: fast, lean, but will talk to non-jitted functions."""
97
99
 
98
100
  parametersNumbaSuperJit: Final[ParametersNumba] = {
99
- **parametersNumbaDEFAULT,
101
+ **parametersNumbaDefault,
100
102
  'no_cfunc_wrapper': True,
101
103
  'no_cpython_wrapper': True, }
102
104
  """Speed, no helmet, no talking to non-jitted functions."""
@@ -125,15 +127,6 @@ parametersNumbaMinimum: Final[ParametersNumba] = {
125
127
  Z0Z_numbaDataTypeModule = 'numba'
126
128
  Z0Z_decoratorCallable = 'jit'
127
129
 
128
- def thisIsNumbaDotJit(Ima: ast.AST) -> bool:
129
- return ifThis.isCallNamespace_Identifier(Z0Z_numbaDataTypeModule, Z0Z_decoratorCallable)(Ima)
130
-
131
- def thisIsJit(Ima: ast.AST) -> bool:
132
- return ifThis.isCall_Identifier(Z0Z_decoratorCallable)(Ima)
133
-
134
- def thisIsAnyNumbaJitDecorator(Ima: ast.AST) -> bool:
135
- return thisIsNumbaDotJit(Ima) or thisIsJit(Ima)
136
-
137
130
  def decorateCallableWithNumba(ingredientsFunction: IngredientsFunction, parametersNumba: ParametersNumba | None = None) -> IngredientsFunction:
138
131
  def Z0Z_UnhandledDecorators(astCallable: ast.FunctionDef) -> ast.FunctionDef:
139
132
  # TODO: more explicit handling of decorators. I'm able to ignore this because I know `algorithmSource` doesn't have any decorators.
@@ -143,7 +136,7 @@ def decorateCallableWithNumba(ingredientsFunction: IngredientsFunction, paramete
143
136
  warnings.warn(f"Removed decorator {ast.unparse(decoratorItem)} from {astCallable.name}")
144
137
  return astCallable
145
138
 
146
- def makeSpecialSignatureForNumba(signatureElement: ast.arg) -> ast.Subscript | ast.Name | None:
139
+ def makeSpecialSignatureForNumba(signatureElement: ast.arg) -> ast.Subscript | ast.Name | None: # type: ignore
147
140
  if isinstance(signatureElement.annotation, ast.Subscript) and isinstance(signatureElement.annotation.slice, ast.Tuple):
148
141
  annotationShape: ast.expr = signatureElement.annotation.slice.elts[0]
149
142
  if isinstance(annotationShape, ast.Subscript) and isinstance(annotationShape.slice, ast.Tuple):
@@ -162,7 +155,7 @@ def decorateCallableWithNumba(ingredientsFunction: IngredientsFunction, paramete
162
155
  ndarrayName = signatureElement.arg
163
156
  Z0Z_hacky_dtype: str = ndarrayName
164
157
  datatype_attr = datatypeAST or Z0Z_hacky_dtype
165
- ingredientsFunction.imports.addImportFromStr(datatypeModuleDecorator, datatype_attr)
158
+ ingredientsFunction.imports.addImportFrom_asStr(datatypeModuleDecorator, datatype_attr)
166
159
  datatypeNumba = ast.Name(id=datatype_attr, ctx=ast.Load())
167
160
 
168
161
  return ast.Subscript(value=datatypeNumba, slice=shapeAST, ctx=ast.Load())
@@ -179,9 +172,9 @@ def decorateCallableWithNumba(ingredientsFunction: IngredientsFunction, paramete
179
172
  # Efficient translation of Python scalar types to Numba types https://github.com/hunterhogan/mapFolding/issues/8
180
173
  # For now, let Numba infer them.
181
174
  continue
182
- signatureElement: ast.Subscript | ast.Name | None = makeSpecialSignatureForNumba(parameter)
183
- if signatureElement:
184
- list_arg4signature_or_function.append(signatureElement)
175
+ # signatureElement: ast.Subscript | ast.Name | None = makeSpecialSignatureForNumba(parameter)
176
+ # if signatureElement:
177
+ # list_arg4signature_or_function.append(signatureElement)
185
178
 
186
179
  if ingredientsFunction.astFunctionDef.returns and isinstance(ingredientsFunction.astFunctionDef.returns, ast.Name):
187
180
  theReturn: ast.Name = ingredientsFunction.astFunctionDef.returns
@@ -192,15 +185,15 @@ def decorateCallableWithNumba(ingredientsFunction: IngredientsFunction, paramete
192
185
 
193
186
  ingredientsFunction.astFunctionDef = Z0Z_UnhandledDecorators(ingredientsFunction.astFunctionDef)
194
187
  if parametersNumba is None:
195
- parametersNumba = parametersNumbaDEFAULT
196
- listDecoratorKeywords: list[ast.keyword] = [Make.ast_keyword(parameterName, Make.astConstant(parameterValue)) for parameterName, parameterValue in parametersNumba.items()]
188
+ parametersNumba = parametersNumbaDefault
189
+ listDecoratorKeywords: list[ast.keyword] = [Make.keyword(parameterName, Make.Constant(parameterValue)) for parameterName, parameterValue in parametersNumba.items()]
197
190
 
198
191
  decoratorModule: str = Z0Z_numbaDataTypeModule
199
192
  decoratorCallable: str = Z0Z_decoratorCallable
200
- ingredientsFunction.imports.addImportFromStr(decoratorModule, decoratorCallable)
193
+ ingredientsFunction.imports.addImportFrom_asStr(decoratorModule, decoratorCallable)
201
194
  # Leave this line in so that global edits will change it.
202
- astDecorator: ast.Call = Make.astCall(Make.astName(decoratorCallable), list_argsDecorator, listDecoratorKeywords)
203
- astDecorator: ast.Call = Make.astCall(Make.astName(decoratorCallable), list_astKeywords=listDecoratorKeywords) # type: ignore[no-redef]
195
+ astDecorator: ast.Call = Make.Call(Make.Name(decoratorCallable), list_argsDecorator, listDecoratorKeywords)
196
+ astDecorator: ast.Call = Make.Call(Make.Name(decoratorCallable), list_astKeywords=listDecoratorKeywords)
204
197
 
205
198
  ingredientsFunction.astFunctionDef.decorator_list = [astDecorator]
206
199
  return ingredientsFunction