mapFolding 0.11.2__py3-none-any.whl → 0.11.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -18,138 +18,18 @@ The containers work in conjunction with transformation tools that manipulate the
18
18
  specific optimizations and transformations.
19
19
  """
20
20
 
21
+ from astToolkit import ClassIsAndAttribute, DOT, LedgerOfImports, Make, NodeTourist, str_nameDOTname, Then
21
22
  from collections.abc import Callable
22
- from astToolkit import ClassIsAndAttribute
23
23
  from copy import deepcopy
24
- from mapFolding.someAssemblyRequired import ast_Identifier, DOT, IfThis, Make, NodeTourist, parseLogicalPath2astModule, str_nameDOTname, Then, LedgerOfImports
25
- from mapFolding import raiseIfNoneGitHubIssueNumber3, The
26
- from pathlib import Path, PurePosixPath
24
+ from mapFolding.someAssemblyRequired import IfThis, raiseIfNoneGitHubIssueNumber3
27
25
  from typing import Any, cast
28
26
  import ast
29
27
  import dataclasses
30
28
 
31
- # Consolidate settings classes through inheritance https://github.com/hunterhogan/mapFolding/issues/15
32
- @dataclasses.dataclass
33
- class RecipeSynthesizeFlow:
34
- """
35
- Configure the generation of new modules, including Numba-accelerated code modules.
36
-
37
- RecipeSynthesizeFlow defines the complete blueprint for transforming an original Python algorithm into an optimized,
38
- accelerated implementation. It specifies:
39
-
40
- 1. Source code locations and identifiers.
41
- 2. Target code locations and identifiers.
42
- 3. Naming conventions for generated modules and functions.
43
- 4. File system paths for output files.
44
- 5. Import relationships between components.
45
-
46
- This configuration class serves as a single source of truth for the code generation process, ensuring consistency
47
- across all generated artifacts while enabling customization of the transformation assembly line.
48
-
49
- The transformation process uses this configuration to extract functions from the source module, transform them
50
- according to optimization rules, and output properly structured optimized modules with all necessary imports.
51
- """
52
- # ========================================
53
- # Source
54
- source_astModule: ast.Module = parseLogicalPath2astModule(The.logicalPathModuleSourceAlgorithm)
55
- """AST of the source algorithm module containing the original implementation."""
56
-
57
- # Figure out dynamic flow control to synthesized modules https://github.com/hunterhogan/mapFolding/issues/4
58
- sourceCallableDispatcher: ast_Identifier = The.sourceCallableDispatcher
59
- sourceCallableInitialize: ast_Identifier = The.sourceCallableInitialize
60
- sourceCallableParallel: ast_Identifier = The.sourceCallableParallel
61
- sourceCallableSequential: ast_Identifier = The.sourceCallableSequential
62
-
63
- sourceDataclassIdentifier: ast_Identifier = The.dataclassIdentifier
64
- sourceDataclassInstance: ast_Identifier = The.dataclassInstance
65
- sourceDataclassInstanceTaskDistribution: ast_Identifier = The.dataclassInstanceTaskDistribution
66
- sourceLogicalPathModuleDataclass: str_nameDOTname = The.logicalPathModuleDataclass
67
-
68
- sourceConcurrencyManagerNamespace = The.sourceConcurrencyManagerNamespace
69
- sourceConcurrencyManagerIdentifier = The.sourceConcurrencyManagerIdentifier
70
-
71
- # ========================================
72
- # Logical identifiers (as opposed to physical identifiers)
73
- # ========================================
74
- # Package ================================
75
- packageIdentifier: ast_Identifier | None = The.packageName
76
-
77
- # Qualified logical path ================================
78
- logicalPathModuleDataclass: str_nameDOTname = sourceLogicalPathModuleDataclass
79
- logicalPathFlowRoot: ast_Identifier | None = 'syntheticModules'
80
- """ `logicalPathFlowRoot` likely corresponds to a physical filesystem directory."""
81
-
82
- # Module ================================
83
- moduleDispatcher: ast_Identifier = 'numbaCount'
84
- moduleInitialize: ast_Identifier = moduleDispatcher
85
- moduleParallel: ast_Identifier = moduleDispatcher
86
- moduleSequential: ast_Identifier = moduleDispatcher
87
-
88
- # Function ================================
89
- callableDispatcher: ast_Identifier = sourceCallableDispatcher
90
- callableInitialize: ast_Identifier = sourceCallableInitialize
91
- callableParallel: ast_Identifier = sourceCallableParallel
92
- callableSequential: ast_Identifier = sourceCallableSequential
93
- concurrencyManagerNamespace: ast_Identifier = sourceConcurrencyManagerNamespace
94
- concurrencyManagerIdentifier: ast_Identifier = sourceConcurrencyManagerIdentifier
95
- dataclassIdentifier: ast_Identifier = sourceDataclassIdentifier
96
-
97
- # Variable ================================
98
- dataclassInstance: ast_Identifier = sourceDataclassInstance
99
- dataclassInstanceTaskDistribution: ast_Identifier = sourceDataclassInstanceTaskDistribution
100
-
101
- removeDataclassDispatcher: bool = False
102
- removeDataclassInitialize: bool = False
103
- removeDataclassParallel: bool = True
104
- removeDataclassSequential: bool = True
105
- # ========================================
106
- # Computed
107
- # Figure out dynamic flow control to synthesized modules https://github.com/hunterhogan/mapFolding/issues/4
108
- # theFormatStrModuleSynthetic = "{packageFlow}Count"
109
- # theFormatStrModuleForCallableSynthetic = theFormatStrModuleSynthetic + "_{callableTarget}"
110
- # theModuleDispatcherSynthetic: ast_Identifier = theFormatStrModuleForCallableSynthetic.format(packageFlow=packageFlowSynthetic, callableTarget=The.sourceCallableDispatcher)
111
- # theLogicalPathModuleDispatcherSynthetic: str = '.'.join([The.packageName, The.moduleOfSyntheticModules, theModuleDispatcherSynthetic])
112
- # logicalPathModuleDispatcher: str = '.'.join([Z0Z_flowLogicalPathRoot, moduleDispatcher])
113
-
114
- # ========================================
115
- # Filesystem (names of physical objects)
116
- pathPackage: PurePosixPath | None = PurePosixPath(The.pathPackage)
117
- fileExtension: str = The.fileExtension
118
-
119
- def _makePathFilename(self, filenameStem: str,
120
- pathRoot: PurePosixPath | None = None,
121
- logicalPathINFIX: str_nameDOTname | None = None,
122
- fileExtension: str | None = None,
123
- ) -> PurePosixPath:
124
- """filenameStem: (hint: the name of the logical module)"""
125
- if pathRoot is None:
126
- pathRoot = self.pathPackage or PurePosixPath(Path.cwd())
127
- if logicalPathINFIX:
128
- whyIsThisStillAThing: list[str] = logicalPathINFIX.split('.')
129
- pathRoot = pathRoot.joinpath(*whyIsThisStillAThing)
130
- if fileExtension is None:
131
- fileExtension = self.fileExtension
132
- filename: str = filenameStem + fileExtension
133
- return pathRoot.joinpath(filename)
134
-
135
- @property
136
- def pathFilenameDispatcher(self) -> PurePosixPath:
137
- return self._makePathFilename(filenameStem=self.moduleDispatcher, logicalPathINFIX=self.logicalPathFlowRoot)
138
- @property
139
- def pathFilenameInitialize(self) -> PurePosixPath:
140
- return self._makePathFilename(filenameStem=self.moduleInitialize, logicalPathINFIX=self.logicalPathFlowRoot)
141
- @property
142
- def pathFilenameParallel(self) -> PurePosixPath:
143
- return self._makePathFilename(filenameStem=self.moduleParallel, logicalPathINFIX=self.logicalPathFlowRoot)
144
- @property
145
- def pathFilenameSequential(self) -> PurePosixPath:
146
- return self._makePathFilename(filenameStem=self.moduleSequential, logicalPathINFIX=self.logicalPathFlowRoot)
147
-
148
29
  dummyAssign = Make.Assign([Make.Name("dummyTarget")], Make.Constant(None))
149
30
  dummySubscript = Make.Subscript(Make.Name("dummy"), Make.Name("slice"))
150
31
  dummyTuple = Make.Tuple([Make.Name("dummyElement")])
151
32
 
152
- # Consolidate settings classes through inheritance https://github.com/hunterhogan/mapFolding/issues/15
153
33
  @dataclasses.dataclass
154
34
  class ShatteredDataclass:
155
35
  countingVariableAnnotation: ast.expr
@@ -158,10 +38,10 @@ class ShatteredDataclass:
158
38
  countingVariableName: ast.Name
159
39
  """AST name node representing the counting variable identifier."""
160
40
 
161
- field2AnnAssign: dict[ast_Identifier, ast.AnnAssign | ast.Assign] = dataclasses.field(default_factory=lambda: dict[ast_Identifier, ast.AnnAssign | ast.Assign]())
41
+ field2AnnAssign: dict[str, ast.AnnAssign | ast.Assign] = dataclasses.field(default_factory=lambda: dict[str, ast.AnnAssign | ast.Assign]())
162
42
  """Maps field names to their corresponding AST call expressions."""
163
43
 
164
- Z0Z_field2AnnAssign: dict[ast_Identifier, tuple[ast.AnnAssign | ast.Assign, str]] = dataclasses.field(default_factory=lambda: dict[ast_Identifier, tuple[ast.AnnAssign | ast.Assign, str]]())
44
+ Z0Z_field2AnnAssign: dict[str, tuple[ast.AnnAssign | ast.Assign, str]] = dataclasses.field(default_factory=lambda: dict[str, tuple[ast.AnnAssign | ast.Assign, str]]())
165
45
 
166
46
  fragments4AssignmentOrParameters: ast.Tuple = dummyTuple
167
47
  """AST tuple used as target for assignment to capture returned fragments."""
@@ -213,12 +93,12 @@ class DeReConstructField2ast:
213
93
  """
214
94
  dataclassesDOTdataclassLogicalPathModule: dataclasses.InitVar[str_nameDOTname]
215
95
  dataclassClassDef: dataclasses.InitVar[ast.ClassDef]
216
- dataclassesDOTdataclassInstance_Identifier: dataclasses.InitVar[ast_Identifier]
96
+ dataclassesDOTdataclassInstance_Identifier: dataclasses.InitVar[str]
217
97
  field: dataclasses.InitVar[dataclasses.Field[Any]]
218
98
 
219
99
  ledger: LedgerOfImports = dataclasses.field(default_factory=LedgerOfImports)
220
100
 
221
- name: ast_Identifier = dataclasses.field(init=False)
101
+ name: str = dataclasses.field(init=False)
222
102
  typeBuffalo: type[Any] | str | Any = dataclasses.field(init=False)
223
103
  default: Any | None = dataclasses.field(init=False)
224
104
  default_factory: Callable[..., Any] | None = dataclasses.field(init=False)
@@ -237,7 +117,7 @@ class DeReConstructField2ast:
237
117
  astAnnAssignConstructor: ast.AnnAssign|ast.Assign = dataclasses.field(init=False)
238
118
  Z0Z_hack: tuple[ast.AnnAssign|ast.Assign, str] = dataclasses.field(init=False)
239
119
 
240
- def __post_init__(self, dataclassesDOTdataclassLogicalPathModule: str_nameDOTname, dataclassClassDef: ast.ClassDef, dataclassesDOTdataclassInstance_Identifier: ast_Identifier, field: dataclasses.Field[Any]) -> None:
120
+ def __post_init__(self, dataclassesDOTdataclassLogicalPathModule: str_nameDOTname, dataclassClassDef: ast.ClassDef, dataclassesDOTdataclassInstance_Identifier: str, field: dataclasses.Field[Any]) -> None:
241
121
  self.compare = field.compare
242
122
  self.default = field.default if field.default is not dataclasses.MISSING else None
243
123
  self.default_factory = field.default_factory if field.default_factory is not dataclasses.MISSING else None
@@ -276,7 +156,7 @@ class DeReConstructField2ast:
276
156
  ast_expr = Make.Subscript(Make.Name(annotationType), Make.Tuple([axesSubscript, Make.Subscript(Make.Name('dtype'), dtype_asnameName)]))
277
157
  constructor = 'array'
278
158
  self.ledger.addImportFrom_asStr(moduleWithLogicalPath, constructor)
279
- dtypeIdentifier: ast_Identifier = dtype.__name__
159
+ dtypeIdentifier: str = dtype.__name__
280
160
  self.ledger.addImportFrom_asStr(moduleWithLogicalPath, dtypeIdentifier, dtype_asnameName.id)
281
161
  self.astAnnAssignConstructor = Make.AnnAssign(self.astName, ast_expr, Make.Call(Make.Name(constructor), list_keyword=[Make.keyword('dtype', dtype_asnameName)]))
282
162
  self.astAnnAssignConstructor = Make.Assign([self.astName], Make.Call(Make.Name(constructor), list_keyword=[Make.keyword('dtype', dtype_asnameName)]))
@@ -285,7 +165,7 @@ class DeReConstructField2ast:
285
165
  self.astAnnAssignConstructor = Make.AnnAssign(self.astName, self.astAnnotation, Make.Call(self.astAnnotation, [Make.Constant(-1)]))
286
166
  self.Z0Z_hack = (self.astAnnAssignConstructor, 'scalar')
287
167
  elif isinstance(self.astAnnotation, ast.Subscript):
288
- elementConstructor: ast_Identifier = self.metadata['elementConstructor']
168
+ elementConstructor: str = self.metadata['elementConstructor']
289
169
  self.ledger.addImportFrom_asStr(dataclassesDOTdataclassLogicalPathModule, elementConstructor)
290
170
  takeTheTuple = deepcopy(self.astAnnotation.slice)
291
171
  self.astAnnAssignConstructor = Make.AnnAssign(self.astName, self.astAnnotation, takeTheTuple)
@@ -0,0 +1,20 @@
1
+ from mapFolding import PackageSettings
2
+ import dataclasses
3
+
4
+ dataclassInstanceIdentifierHardcoded: str = 'state'
5
+ sourceCallableDispatcherHARDCODED: str = 'doTheNeedful'
6
+
7
+ class raiseIfNoneGitHubIssueNumber3(Exception): pass
8
+
9
+ dictionaryEstimates: dict[tuple[int, ...], int] = {
10
+ (2,2,2,2,2,2,2,2): 798148657152000,
11
+ (2,21): 776374224866624,
12
+ (3,15): 824761667826225,
13
+ (3,3,3,3): 85109616000000000000000000000000,
14
+ (8,8): 791274195985524900,
15
+ }
16
+ algorithmSourceModuleHARDCODED: str = 'daoOfMapFolding'
17
+ sourceCallableIdentifierHARDCODED: str = 'count'
18
+ logicalPathInfixHARDCODED: str = 'syntheticModules'
19
+ theCountingIdentifierHARDCODED: str = 'groupsOfFolds'
20
+ dataPackingModuleIdentifierHARDCODED: str = 'dataPacking'
@@ -1,10 +1,9 @@
1
- from astToolkit import ClassIsAndAttribute
2
- from mapFolding import getPathFilenameFoldsTotal, raiseIfNoneGitHubIssueNumber3, The, MapFoldingState
3
- from mapFolding.someAssemblyRequired import (
4
- ast_Identifier,
1
+ from mapFolding import getPathFilenameFoldsTotal, MapFoldingState, packageSettings
2
+ from mapFolding.someAssemblyRequired import IfThis, raiseIfNoneGitHubIssueNumber3
3
+ from astToolkit import (
5
4
  Be,
5
+ ClassIsAndAttribute,
6
6
  extractFunctionDef,
7
- IfThis,
8
7
  IngredientsFunction,
9
8
  IngredientsModule,
10
9
  LedgerOfImports,
@@ -13,10 +12,10 @@ from mapFolding.someAssemblyRequired import (
13
12
  NodeTourist,
14
13
  str_nameDOTname,
15
14
  Then,
16
- write_astModule,
17
15
  )
16
+ from astToolkit.transformationTools import write_astModule
18
17
  from mapFolding.someAssemblyRequired.RecipeJob import RecipeJobTheorem2Numba
19
- from mapFolding.someAssemblyRequired.toolkitNumba import parametersNumbaLight, SpicesJobNumba, decorateCallableWithNumba
18
+ from mapFolding.someAssemblyRequired.toolkitNumba import decorateCallableWithNumba, parametersNumbaLight, SpicesJobNumba
20
19
  from mapFolding.syntheticModules.initializeCount import initializeGroupsOfFolds
21
20
  from pathlib import PurePosixPath
22
21
  from typing import cast, NamedTuple
@@ -69,15 +68,15 @@ if __name__ == '__main__':
69
68
  writeStream.write(str(foldsTotal))
70
69
  writeStream.close()
71
70
  """
72
- numba_progressPythonClass: ast_Identifier = 'ProgressBar'
73
- numba_progressNumbaType: ast_Identifier = 'ProgressBarType'
71
+ numba_progressPythonClass: str = 'ProgressBar'
72
+ numba_progressNumbaType: str = 'ProgressBarType'
74
73
  ingredientsModule.imports.addImportFrom_asStr('numba_progress', numba_progressPythonClass)
75
74
  ingredientsModule.imports.addImportFrom_asStr('numba_progress', numba_progressNumbaType)
76
75
 
77
76
  ast_argNumbaProgress = ast.arg(arg=spices.numbaProgressBarIdentifier, annotation=ast.Name(id=numba_progressPythonClass, ctx=ast.Load()))
78
77
  ingredientsFunction.astFunctionDef.args.args.append(ast_argNumbaProgress)
79
78
 
80
- findThis = ClassIsAndAttribute.targetIs(ast.AugAssign, IfThis.isName_Identifier(job.shatteredDataclass.countingVariableName.id)) # type: ignore
79
+ findThis = ClassIsAndAttribute.targetIs(ast.AugAssign, IfThis.isName_Identifier(job.shatteredDataclass.countingVariableName.id))
81
80
  doThat = Then.replaceWith(Make.Expr(Make.Call(Make.Attribute(Make.Name(spices.numbaProgressBarIdentifier),'update'),[Make.Constant(1)])))
82
81
  countWithProgressBar = NodeChanger(findThis, doThat)
83
82
  countWithProgressBar.visit(ingredientsFunction.astFunctionDef)
@@ -117,11 +116,11 @@ def move_arg2FunctionDefDOTbodyAndAssignInitialValues(ingredientsFunction: Ingre
117
116
  ingredientsFunction.imports.update(job.shatteredDataclass.imports)
118
117
 
119
118
  list_argCuzMyBrainRefusesToThink = ingredientsFunction.astFunctionDef.args.args + ingredientsFunction.astFunctionDef.args.posonlyargs + ingredientsFunction.astFunctionDef.args.kwonlyargs
120
- list_arg_arg: list[ast_Identifier] = [ast_arg.arg for ast_arg in list_argCuzMyBrainRefusesToThink]
119
+ list_arg_arg: list[str] = [ast_arg.arg for ast_arg in list_argCuzMyBrainRefusesToThink]
121
120
  listName: list[ast.Name] = []
122
121
  NodeTourist(Be.Name, Then.appendTo(listName)).visit(ingredientsFunction.astFunctionDef)
123
- list_Identifiers: list[ast_Identifier] = [astName.id for astName in listName]
124
- list_IdentifiersNotUsed: list[ast_Identifier] = list(set(list_arg_arg) - set(list_Identifiers))
122
+ list_Identifiers: list[str] = [astName.id for astName in listName]
123
+ list_IdentifiersNotUsed: list[str] = list(set(list_arg_arg) - set(list_Identifiers))
125
124
 
126
125
  for ast_arg in list_argCuzMyBrainRefusesToThink:
127
126
  if ast_arg.arg in job.shatteredDataclass.field2AnnAssign:
@@ -184,7 +183,7 @@ def makeJobNumba(job: RecipeJobTheorem2Numba, spices: SpicesJobNumba) -> None:
184
183
  if __name__ == '__main__':
185
184
  import time
186
185
  timeStart = time.perf_counter()
187
- foldsTotal = {job.countCallable}() * {job.state.leavesTotal}
186
+ foldsTotal = int({job.countCallable}() * {job.state.leavesTotal})
188
187
  print(time.perf_counter() - timeStart)
189
188
  print('\\nmap {job.state.mapShape} =', foldsTotal)
190
189
  writeStream = open('{job.pathFilenameFoldsTotal.as_posix()}', 'w')
@@ -202,9 +201,9 @@ if __name__ == '__main__':
202
201
 
203
202
  class DatatypeConfig(NamedTuple):
204
203
  Z0Z_module: str_nameDOTname
205
- fml: ast_Identifier
206
- Z0Z_type_name: ast_Identifier
207
- Z0Z_asname: ast_Identifier | None = None
204
+ fml: str
205
+ Z0Z_type_name: str
206
+ Z0Z_asname: str | None = None
208
207
 
209
208
  listDatatypeConfigs = [
210
209
  DatatypeConfig(fml='DatatypeLeavesTotal', Z0Z_module='numba', Z0Z_type_name='uint8'),
@@ -267,7 +266,7 @@ if __name__ == '__main__':
267
266
  # foldsTotalEstimated = getFoldsTotalKnown(state.mapShape) // state.leavesTotal
268
267
  # foldsTotalEstimated = dictionaryEstimates[state.mapShape] // state.leavesTotal
269
268
  foldsTotalEstimated = 0
270
- pathModule = PurePosixPath(The.pathPackage, 'jobs')
269
+ pathModule = PurePosixPath(packageSettings.pathPackage, 'jobs')
271
270
  pathFilenameFoldsTotal = PurePosixPath(getPathFilenameFoldsTotal(state.mapShape, pathModule))
272
271
  aJob = RecipeJobTheorem2Numba(state, foldsTotalEstimated, pathModule=pathModule, pathFilenameFoldsTotal=pathFilenameFoldsTotal)
273
272
  spices = SpicesJobNumba(useNumbaProgressBar=False, parametersNumba=parametersNumbaLight)
@@ -17,16 +17,13 @@ performance improvements while preserving code semantics and correctness.
17
17
 
18
18
  from collections.abc import Callable, Sequence
19
19
  from mapFolding import NotRequired, TypedDict
20
- from mapFolding.someAssemblyRequired import ast_Identifier, IngredientsFunction, Make, RecipeSynthesizeFlow, str_nameDOTname, write_astModule
21
- from mapFolding.someAssemblyRequired.transformationTools import makeNewFlow
20
+ from astToolkit import IngredientsFunction, Make, str_nameDOTname
21
+ from astToolkit.transformationTools import write_astModule
22
22
  from numba.core.compiler import CompilerBase as numbaCompilerBase
23
23
  from typing import Any, cast, Final
24
24
  import ast
25
25
  import dataclasses
26
26
 
27
- # Consolidate settings classes through inheritance https://github.com/hunterhogan/mapFolding/issues/15
28
- theNumbaFlow: RecipeSynthesizeFlow = RecipeSynthesizeFlow()
29
-
30
27
  class ParametersNumba(TypedDict):
31
28
  _dbg_extend_lifetimes: NotRequired[bool]
32
29
  _dbg_optnone: NotRequired[bool]
@@ -56,7 +53,7 @@ parametersNumbaDefault: Final[ParametersNumba] = { '_nrt': True, 'boundscheck':
56
53
  parametersNumbaLight: Final[ParametersNumba] = {'cache': True, 'error_model': 'numpy', 'fastmath': True, 'forceinline': True}
57
54
 
58
55
  Z0Z_numbaDataTypeModule: str_nameDOTname = 'numba'
59
- Z0Z_decoratorCallable: ast_Identifier = 'jit'
56
+ Z0Z_decoratorCallable: str = 'jit'
60
57
 
61
58
  def decorateCallableWithNumba(ingredientsFunction: IngredientsFunction, parametersNumba: ParametersNumba | None = None) -> IngredientsFunction:
62
59
  def Z0Z_UnhandledDecorators(astCallable: ast.FunctionDef) -> ast.FunctionDef:
@@ -128,47 +125,8 @@ def decorateCallableWithNumba(ingredientsFunction: IngredientsFunction, paramete
128
125
  ingredientsFunction.astFunctionDef.decorator_list = [astDecorator]
129
126
  return ingredientsFunction
130
127
 
131
- # Consolidate settings classes through inheritance https://github.com/hunterhogan/mapFolding/issues/15
132
128
  @dataclasses.dataclass
133
129
  class SpicesJobNumba:
134
130
  useNumbaProgressBar: bool = True
135
- numbaProgressBarIdentifier: ast_Identifier = 'ProgressBarGroupsOfFolds'
131
+ numbaProgressBarIdentifier: str = 'ProgressBarGroupsOfFolds'
136
132
  parametersNumba: ParametersNumba = dataclasses.field(default_factory=ParametersNumba) # type: ignore
137
-
138
- # Consolidate settings classes through inheritance https://github.com/hunterhogan/mapFolding/issues/15
139
- def makeNumbaFlow(numbaFlow: RecipeSynthesizeFlow) -> None:
140
- """
141
- Transform standard Python algorithm code into optimized Numba implementations.
142
-
143
- This function implements the complete transformation assembly line that converts
144
- a conventional Python implementation into a high-performance Numba-accelerated
145
- version. The process includes:
146
-
147
- 1. Extracting core algorithm functions from the source module
148
- 2. Inlining function calls to create self-contained implementations
149
- 3. Transforming dataclass access patterns for Numba compatibility
150
- 4. Applying appropriate Numba decorators with optimization settings
151
- 5. Generating a unified module with sequential and parallel implementations
152
- 6. Writing the transformed code to the filesystem with properly managed imports
153
-
154
- The transformation preserves the logical structure and semantics of the original
155
- implementation while making it compatible with Numba's constraints and
156
- optimization capabilities. This creates a bridge between the general-purpose
157
- implementation and the highly-optimized version needed for production use.
158
-
159
- Parameters:
160
- numbaFlow: Configuration object that specifies all aspects of the
161
- transformation process, including source and target locations,
162
- function and variable names, and output paths.
163
- """
164
-
165
- ingredientsModuleNumbaUnified = makeNewFlow(numbaFlow)
166
-
167
- # numba decorators =========================================
168
- ingredientsModuleNumbaUnified.listIngredientsFunctions[1] = decorateCallableWithNumba(ingredientsModuleNumbaUnified.listIngredientsFunctions[1])
169
- ingredientsModuleNumbaUnified.listIngredientsFunctions[2] = decorateCallableWithNumba(ingredientsModuleNumbaUnified.listIngredientsFunctions[2])
170
-
171
- write_astModule(ingredientsModuleNumbaUnified, numbaFlow.pathFilenameDispatcher, numbaFlow.packageIdentifier)
172
-
173
- if __name__ == '__main__':
174
- makeNumbaFlow(theNumbaFlow)
@@ -18,80 +18,28 @@ readable, maintainable implementations to highly optimized versions while preser
18
18
  logical structure and correctness.
19
19
  """
20
20
 
21
- from collections.abc import Callable
22
21
  from astToolkit import ClassIsAndAttribute
23
- from mapFolding import outfitCountFolds, ComputationState, The, getPathFilenameFoldsTotal
24
22
  from mapFolding.someAssemblyRequired import (
25
- ast_Identifier,
26
- astModuleToIngredientsFunction,
27
- Be,
28
23
  DeReConstructField2ast,
29
- extractClassDef,
30
- Grab,
31
24
  IfThis,
32
- importLogicalPath2Callable,
25
+ ShatteredDataclass,
26
+ )
27
+ from astToolkit import(
28
+ Be,
29
+ extractClassDef,
33
30
  IngredientsFunction,
34
- IngredientsModule,
35
- inlineFunctionDef,
36
- LedgerOfImports,
37
31
  Make,
38
32
  NodeChanger,
39
- NodeTourist,
40
33
  parseLogicalPath2astModule,
41
- RecipeSynthesizeFlow,
42
- removeUnusedParameters,
43
- ShatteredDataclass,
44
34
  str_nameDOTname,
45
35
  Then,
46
- unparseFindReplace,
47
36
  )
48
- from os import PathLike
49
- from pathlib import Path, PurePath
50
- from typing import Any, Literal, overload
37
+ from astToolkit.transformationTools import unparseFindReplace
38
+ from Z0Z_tools import importLogicalPath2Callable
51
39
  import ast
52
40
  import dataclasses
53
- import pickle
54
41
 
55
- @overload
56
- def makeInitializedComputationState(mapShape: tuple[int, ...], writeJob: Literal[True], *, pathFilename: PathLike[str] | PurePath | None = None, **keywordArguments: Any) -> Path: ...
57
- @overload
58
- def makeInitializedComputationState(mapShape: tuple[int, ...], writeJob: Literal[False] = False, **keywordArguments: Any) -> ComputationState: ...
59
- def makeInitializedComputationState(mapShape: tuple[int, ...], writeJob: bool = False, *, pathFilename: PathLike[str] | PurePath | None = None, **keywordArguments: Any) -> ComputationState | Path:
60
- """
61
- Initializes a computation state and optionally saves it to disk.
62
-
63
- This function initializes a computation state using the source algorithm.
64
-
65
- Hint: If you want an uninitialized state, call `outfitCountFolds` directly.
66
-
67
- Parameters:
68
- mapShape: List of integers representing the dimensions of the map to be folded.
69
- writeJob (False): Whether to save the state to disk.
70
- pathFilename (getPathFilenameFoldsTotal.pkl): The path and filename to save the state. If None, uses a default path.
71
- **keywordArguments: computationDivisions:int|str|None=None,concurrencyLimit:int=1.
72
- Returns:
73
- stateUniversal|pathFilenameJob: The computation state for the map folding calculations, or
74
- the path to the saved state file if writeJob is True.
75
- """
76
- stateUniversal: ComputationState = outfitCountFolds(mapShape, **keywordArguments)
77
-
78
- initializeState = importLogicalPath2Callable(The.logicalPathModuleSourceAlgorithm, The.sourceCallableInitialize)
79
- stateUniversal = initializeState(stateUniversal)
80
-
81
- if not writeJob:
82
- return stateUniversal
83
-
84
- if pathFilename:
85
- pathFilenameJob = Path(pathFilename)
86
- pathFilenameJob.parent.mkdir(parents=True, exist_ok=True)
87
- else:
88
- pathFilenameJob = getPathFilenameFoldsTotal(stateUniversal.mapShape).with_suffix('.pkl')
89
-
90
- # Fix code scanning alert - Consider possible security implications associated with pickle module. #17
91
- pathFilenameJob.write_bytes(pickle.dumps(stateUniversal))
92
- return pathFilenameJob
93
-
94
- def shatter_dataclassesDOTdataclass(logicalPathModule: str_nameDOTname, dataclass_Identifier: ast_Identifier, instance_Identifier: ast_Identifier) -> ShatteredDataclass:
42
+ def shatter_dataclassesDOTdataclass(logicalPathModule: str_nameDOTname, dataclass_Identifier: str, instance_Identifier: str) -> ShatteredDataclass:
95
43
  """
96
44
  Decompose a dataclass definition into AST components for manipulation and code generation.
97
45
 
@@ -121,8 +69,8 @@ def shatter_dataclassesDOTdataclass(logicalPathModule: str_nameDOTname, dataclas
121
69
  Raises:
122
70
  ValueError: If the dataclass cannot be found in the specified module or if no counting variable is identified in the dataclass.
123
71
  """
124
- Official_fieldOrder: list[ast_Identifier] = []
125
- dictionaryDeReConstruction: dict[ast_Identifier, DeReConstructField2ast] = {}
72
+ Official_fieldOrder: list[str] = []
73
+ dictionaryDeReConstruction: dict[str, DeReConstructField2ast] = {}
126
74
 
127
75
  dataclassClassDef = extractClassDef(parseLogicalPath2astModule(logicalPathModule), dataclass_Identifier)
128
76
  if not isinstance(dataclassClassDef, ast.ClassDef): raise ValueError(f"I could not find `{dataclass_Identifier = }` in `{logicalPathModule = }`.")
@@ -160,116 +108,6 @@ def shatter_dataclassesDOTdataclass(logicalPathModule: str_nameDOTname, dataclas
160
108
 
161
109
  return shatteredDataclass
162
110
 
163
- # END of acceptable classes and functions ======================================================
164
- def makeNewFlow(recipeFlow: RecipeSynthesizeFlow) -> IngredientsModule:
165
- # Figure out dynamic flow control to synthesized modules https://github.com/hunterhogan/mapFolding/issues/4
166
- listAllIngredientsFunctions = [
167
- (ingredientsInitialize := astModuleToIngredientsFunction(recipeFlow.source_astModule, recipeFlow.sourceCallableInitialize)),
168
- (ingredientsParallel := astModuleToIngredientsFunction(recipeFlow.source_astModule, recipeFlow.sourceCallableParallel)),
169
- (ingredientsSequential := astModuleToIngredientsFunction(recipeFlow.source_astModule, recipeFlow.sourceCallableSequential)),
170
- (ingredientsDispatcher := astModuleToIngredientsFunction(recipeFlow.source_astModule, recipeFlow.sourceCallableDispatcher)),
171
- ]
172
-
173
- # Inline functions ========================================================
174
- # NOTE Replacements statements are based on the identifiers in the _source_, so operate on the source identifiers.
175
- ingredientsInitialize.astFunctionDef = inlineFunctionDef(recipeFlow.sourceCallableInitialize, recipeFlow.source_astModule)
176
- ingredientsParallel.astFunctionDef = inlineFunctionDef(recipeFlow.sourceCallableParallel, recipeFlow.source_astModule)
177
- ingredientsSequential.astFunctionDef = inlineFunctionDef(recipeFlow.sourceCallableSequential, recipeFlow.source_astModule)
178
-
179
- # assignRecipeIdentifiersToCallable. =============================
180
- # Consolidate settings classes through inheritance https://github.com/hunterhogan/mapFolding/issues/15
181
- # How can I use dataclass settings as the SSOT for specific actions? https://github.com/hunterhogan/mapFolding/issues/16
182
- # NOTE reminder: you are updating these `ast.Name` here (and not in a more general search) because this is a
183
- # narrow search for `ast.Call` so you won't accidentally replace unrelated `ast.Name`.
184
- listFindReplace = [(recipeFlow.sourceCallableDispatcher, recipeFlow.callableDispatcher),
185
- (recipeFlow.sourceCallableInitialize, recipeFlow.callableInitialize),
186
- (recipeFlow.sourceCallableParallel, recipeFlow.callableParallel),
187
- (recipeFlow.sourceCallableSequential, recipeFlow.callableSequential),]
188
- for ingredients in listAllIngredientsFunctions:
189
- for source_Identifier, recipe_Identifier in listFindReplace:
190
- updateCallName = NodeChanger(IfThis.isCall_Identifier(source_Identifier), Grab.funcAttribute(Then.replaceWith(Make.Name(recipe_Identifier))))
191
- updateCallName.visit(ingredients.astFunctionDef)
192
-
193
- ingredientsDispatcher.astFunctionDef.name = recipeFlow.callableDispatcher
194
- ingredientsInitialize.astFunctionDef.name = recipeFlow.callableInitialize
195
- ingredientsParallel.astFunctionDef.name = recipeFlow.callableParallel
196
- ingredientsSequential.astFunctionDef.name = recipeFlow.callableSequential
197
-
198
- # Assign identifiers per the recipe. ==============================
199
- listFindReplace = [(recipeFlow.sourceDataclassInstance, recipeFlow.dataclassInstance),
200
- (recipeFlow.sourceDataclassInstanceTaskDistribution, recipeFlow.dataclassInstanceTaskDistribution),
201
- (recipeFlow.sourceConcurrencyManagerNamespace, recipeFlow.concurrencyManagerNamespace),]
202
- for ingredients in listAllIngredientsFunctions:
203
- for source_Identifier, recipe_Identifier in listFindReplace:
204
- updateName = NodeChanger(IfThis.isName_Identifier(source_Identifier) , Grab.idAttribute(Then.replaceWith(recipe_Identifier)))
205
- update_arg = NodeChanger(IfThis.isArgument_Identifier(source_Identifier), Grab.argAttribute(Then.replaceWith(recipe_Identifier)))
206
- updateName.visit(ingredients.astFunctionDef)
207
- update_arg.visit(ingredients.astFunctionDef)
208
-
209
- updateConcurrencyManager = NodeChanger(IfThis.isCallAttributeNamespace_Identifier(recipeFlow.sourceConcurrencyManagerNamespace, recipeFlow.sourceConcurrencyManagerIdentifier)
210
- , Grab.funcAttribute(Then.replaceWith(Make.Attribute(Make.Name(recipeFlow.concurrencyManagerNamespace), recipeFlow.concurrencyManagerIdentifier))))
211
- updateConcurrencyManager.visit(ingredientsDispatcher.astFunctionDef)
212
-
213
- # shatter Dataclass =======================================================
214
- instance_Identifier = recipeFlow.dataclassInstance
215
- getTheOtherRecord_damn = recipeFlow.dataclassInstanceTaskDistribution
216
- shatteredDataclass = shatter_dataclassesDOTdataclass(recipeFlow.logicalPathModuleDataclass, recipeFlow.sourceDataclassIdentifier, instance_Identifier)
217
- ingredientsDispatcher.imports.update(shatteredDataclass.imports)
218
-
219
- # How can I use dataclass settings as the SSOT for specific actions? https://github.com/hunterhogan/mapFolding/issues/16
220
- # Change callable parameters and Call to the callable at the same time ====
221
- # sequentialCallable =========================================================
222
- if recipeFlow.removeDataclassSequential:
223
- ingredientsSequential = removeDataclassFromFunction(ingredientsSequential, shatteredDataclass)
224
- ingredientsDispatcher = unpackDataclassCallFunctionRepackDataclass(ingredientsDispatcher, recipeFlow.callableSequential, shatteredDataclass)
225
-
226
- if recipeFlow.removeDataclassInitialize:
227
- ingredientsInitialize = removeDataclassFromFunction(ingredientsInitialize, shatteredDataclass)
228
- ingredientsDispatcher = unpackDataclassCallFunctionRepackDataclass(ingredientsDispatcher, recipeFlow.callableInitialize, shatteredDataclass)
229
-
230
- # parallelCallable =========================================================
231
- if recipeFlow.removeDataclassParallel:
232
- ingredientsParallel.astFunctionDef.args = Make.arguments(args=shatteredDataclass.list_argAnnotated4ArgumentsSpecification)
233
-
234
- ingredientsParallel.astFunctionDef = unparseFindReplace(ingredientsParallel.astFunctionDef, shatteredDataclass.map_stateDOTfield2Name)
235
-
236
- ingredientsParallel = removeUnusedParameters(ingredientsParallel)
237
-
238
- list_argCuzMyBrainRefusesToThink = ingredientsParallel.astFunctionDef.args.args + ingredientsParallel.astFunctionDef.args.posonlyargs + ingredientsParallel.astFunctionDef.args.kwonlyargs
239
- list_arg_arg: list[ast_Identifier] = [ast_arg.arg for ast_arg in list_argCuzMyBrainRefusesToThink]
240
-
241
- listParameters = [parameter for parameter in shatteredDataclass.listName4Parameters if parameter.id in list_arg_arg]
242
-
243
- replaceCall2concurrencyManager = NodeChanger(IfThis.isCallAttributeNamespace_Identifier(recipeFlow.concurrencyManagerNamespace, recipeFlow.concurrencyManagerIdentifier), Then.replaceWith(Make.Call(Make.Attribute(Make.Name(recipeFlow.concurrencyManagerNamespace), recipeFlow.concurrencyManagerIdentifier), [Make.Name(recipeFlow.callableParallel)] + listParameters)))
244
-
245
- def getIt(astCallConcurrencyResult: list[ast.Call]) -> Callable[[ast.AST], ast.AST]:
246
- # TODO I cannot remember why I made this function. It doesn't fit with how I normally do things.
247
- def workhorse(node: ast.AST) -> ast.AST:
248
- NodeTourist(Be.Call, Then.appendTo(astCallConcurrencyResult)).visit(node)
249
- return node
250
- return workhorse
251
-
252
- # NOTE I am dissatisfied with this logic for many reasons, including that it requires separate NodeCollector and NodeReplacer instances.
253
- astCallConcurrencyResult: list[ast.Call] = []
254
- get_astCallConcurrencyResult = NodeTourist(IfThis.isAssignAndTargets0Is(IfThis.isSubscript_Identifier(getTheOtherRecord_damn)), getIt(astCallConcurrencyResult))
255
- get_astCallConcurrencyResult.visit(ingredientsDispatcher.astFunctionDef)
256
- replaceAssignParallelCallable = NodeChanger(IfThis.isAssignAndTargets0Is(IfThis.isSubscript_Identifier(getTheOtherRecord_damn)), Grab.valueAttribute(Then.replaceWith(astCallConcurrencyResult[0])))
257
- replaceAssignParallelCallable.visit(ingredientsDispatcher.astFunctionDef)
258
- changeReturnParallelCallable = NodeChanger(Be.Return, Then.replaceWith(Make.Return(shatteredDataclass.countingVariableName)))
259
- ingredientsParallel.astFunctionDef.returns = shatteredDataclass.countingVariableAnnotation
260
-
261
- unpack4parallelCallable = NodeChanger(ClassIsAndAttribute.valueIs(ast.Assign, IfThis.isCallAttributeNamespace_Identifier(recipeFlow.concurrencyManagerNamespace, recipeFlow.concurrencyManagerIdentifier)), Then.insertThisAbove(shatteredDataclass.listUnpack))
262
-
263
- unpack4parallelCallable.visit(ingredientsDispatcher.astFunctionDef)
264
- replaceCall2concurrencyManager.visit(ingredientsDispatcher.astFunctionDef)
265
- changeReturnParallelCallable.visit(ingredientsParallel.astFunctionDef)
266
-
267
- # Module-level transformations ===========================================================
268
- ingredientsModuleNumbaUnified = IngredientsModule(ingredientsFunction=listAllIngredientsFunctions, imports=LedgerOfImports(recipeFlow.source_astModule))
269
- ingredientsModuleNumbaUnified.removeImportFromModule('numpy')
270
-
271
- return ingredientsModuleNumbaUnified
272
-
273
111
  def removeDataclassFromFunction(ingredientsTarget: IngredientsFunction, shatteredDataclass: ShatteredDataclass) -> IngredientsFunction:
274
112
  ingredientsTarget.astFunctionDef.args = Make.arguments(args=shatteredDataclass.list_argAnnotated4ArgumentsSpecification)
275
113
  ingredientsTarget.astFunctionDef.returns = shatteredDataclass.signatureReturnAnnotation
@@ -278,7 +116,7 @@ def removeDataclassFromFunction(ingredientsTarget: IngredientsFunction, shattere
278
116
  ingredientsTarget.astFunctionDef = unparseFindReplace(ingredientsTarget.astFunctionDef, shatteredDataclass.map_stateDOTfield2Name)
279
117
  return ingredientsTarget
280
118
 
281
- def unpackDataclassCallFunctionRepackDataclass(ingredientsCaller: IngredientsFunction, targetCallableIdentifier: ast_Identifier, shatteredDataclass: ShatteredDataclass) -> IngredientsFunction:
119
+ def unpackDataclassCallFunctionRepackDataclass(ingredientsCaller: IngredientsFunction, targetCallableIdentifier: str, shatteredDataclass: ShatteredDataclass) -> IngredientsFunction:
282
120
  astCallTargetCallable = Make.Call(Make.Name(targetCallableIdentifier), shatteredDataclass.listName4Parameters)
283
121
  replaceAssignTargetCallable = NodeChanger(ClassIsAndAttribute.valueIs(ast.Assign, IfThis.isCall_Identifier(targetCallableIdentifier)), Then.replaceWith(Make.Assign([shatteredDataclass.fragments4AssignmentOrParameters], value=astCallTargetCallable)))
284
122
  unpack4targetCallable = NodeChanger(ClassIsAndAttribute.valueIs(ast.Assign, IfThis.isCall_Identifier(targetCallableIdentifier)), Then.insertThisAbove(shatteredDataclass.listUnpack))
@@ -287,11 +125,3 @@ def unpackDataclassCallFunctionRepackDataclass(ingredientsCaller: IngredientsFun
287
125
  unpack4targetCallable.visit(ingredientsCaller.astFunctionDef)
288
126
  repack4targetCallable.visit(ingredientsCaller.astFunctionDef)
289
127
  return ingredientsCaller
290
-
291
- dictionaryEstimates: dict[tuple[int, ...], int] = {
292
- (2,2,2,2,2,2,2,2): 798148657152000,
293
- (2,21): 776374224866624,
294
- (3,15): 824761667826225,
295
- (3,3,3,3): 85109616000000000000000000000000,
296
- (8,8): 791274195985524900,
297
- }
mapFolding/theSSOT.py CHANGED
@@ -5,25 +5,30 @@ from tomli import load as tomli_load
5
5
  import dataclasses
6
6
 
7
7
  packageNamePACKAGING_HARDCODED = "mapFolding"
8
+ concurrencyPackageHARDCODED = 'multiprocessing'
8
9
 
9
- # Evaluate When Packaging https://github.com/hunterhogan/mapFolding/issues/18
10
+ # Evaluate When Packaging
11
+ # https://github.com/hunterhogan/mapFolding/issues/18
10
12
  try:
11
13
  packageNamePACKAGING: str = tomli_load(Path("../pyproject.toml").open('rb'))["project"]["name"]
12
14
  except Exception:
13
15
  packageNamePACKAGING = packageNamePACKAGING_HARDCODED
14
16
 
15
- # Evaluate When Installing https://github.com/hunterhogan/mapFolding/issues/18
17
+ # Evaluate When Installing
18
+ # https://github.com/hunterhogan/mapFolding/issues/18
16
19
  def getPathPackageINSTALLING() -> Path:
17
20
  pathPackage: Path = Path(inspect_getfile(importlib_import_module(packageNamePACKAGING)))
18
21
  if pathPackage.is_file():
19
22
  pathPackage = pathPackage.parent
20
23
  return pathPackage
21
24
 
22
- # PackageSettings in theSSOT.py and immutability https://github.com/hunterhogan/mapFolding/issues/11
23
25
  @dataclasses.dataclass
24
26
  class PackageSettings:
25
27
  fileExtension: str = dataclasses.field(default='.py', metadata={'evaluateWhen': 'installing'})
26
28
  packageName: str = dataclasses.field(default = packageNamePACKAGING, metadata={'evaluateWhen': 'packaging'})
27
29
  pathPackage: Path = dataclasses.field(default_factory=getPathPackageINSTALLING, metadata={'evaluateWhen': 'installing'})
30
+ concurrencyPackage: str | None = None
31
+ """Package to use for concurrent execution (e.g., 'multiprocessing', 'numba')."""
28
32
 
29
- packageSettings = PackageSettings()
33
+ concurrencyPackage = concurrencyPackageHARDCODED
34
+ packageSettings = PackageSettings(concurrencyPackage=concurrencyPackage)