mapFolding 0.12.0__tar.gz → 0.12.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (58) hide show
  1. {mapfolding-0.12.0 → mapfolding-0.12.1}/PKG-INFO +2 -2
  2. {mapfolding-0.12.0 → mapfolding-0.12.1}/mapFolding/someAssemblyRequired/Z0Z_makeAllModules.py +27 -27
  3. {mapfolding-0.12.0 → mapfolding-0.12.1}/mapFolding/someAssemblyRequired/_toolIfThis.py +2 -2
  4. {mapfolding-0.12.0 → mapfolding-0.12.1}/mapFolding/someAssemblyRequired/_toolkitContainers.py +4 -4
  5. {mapfolding-0.12.0 → mapfolding-0.12.1}/mapFolding/someAssemblyRequired/makeJobTheorem2Numba.py +16 -16
  6. {mapfolding-0.12.0 → mapfolding-0.12.1}/mapFolding/someAssemblyRequired/transformationTools.py +13 -13
  7. {mapfolding-0.12.0 → mapfolding-0.12.1}/mapFolding.egg-info/PKG-INFO +2 -2
  8. {mapfolding-0.12.0 → mapfolding-0.12.1}/mapFolding.egg-info/requires.txt +1 -1
  9. {mapfolding-0.12.0 → mapfolding-0.12.1}/pyproject.toml +2 -2
  10. {mapfolding-0.12.0 → mapfolding-0.12.1}/LICENSE +0 -0
  11. {mapfolding-0.12.0 → mapfolding-0.12.1}/README.md +0 -0
  12. {mapfolding-0.12.0 → mapfolding-0.12.1}/mapFolding/__init__.py +0 -0
  13. {mapfolding-0.12.0 → mapfolding-0.12.1}/mapFolding/basecamp.py +0 -0
  14. {mapfolding-0.12.0 → mapfolding-0.12.1}/mapFolding/beDRY.py +0 -0
  15. {mapfolding-0.12.0 → mapfolding-0.12.1}/mapFolding/daoOfMapFolding.py +0 -0
  16. {mapfolding-0.12.0 → mapfolding-0.12.1}/mapFolding/dataBaskets.py +0 -0
  17. {mapfolding-0.12.0 → mapfolding-0.12.1}/mapFolding/datatypes.py +0 -0
  18. {mapfolding-0.12.0 → mapfolding-0.12.1}/mapFolding/filesystemToolkit.py +0 -0
  19. {mapfolding-0.12.0 → mapfolding-0.12.1}/mapFolding/oeis.py +0 -0
  20. {mapfolding-0.12.0 → mapfolding-0.12.1}/mapFolding/py.typed +0 -0
  21. {mapfolding-0.12.0 → mapfolding-0.12.1}/mapFolding/reference/__init__.py +0 -0
  22. {mapfolding-0.12.0 → mapfolding-0.12.1}/mapFolding/reference/flattened.py +0 -0
  23. {mapfolding-0.12.0 → mapfolding-0.12.1}/mapFolding/reference/hunterNumba.py +0 -0
  24. {mapfolding-0.12.0 → mapfolding-0.12.1}/mapFolding/reference/irvineJavaPort.py +0 -0
  25. {mapfolding-0.12.0 → mapfolding-0.12.1}/mapFolding/reference/jaxCount.py +0 -0
  26. {mapfolding-0.12.0 → mapfolding-0.12.1}/mapFolding/reference/jobsCompleted/[2x19]/p2x19.py +0 -0
  27. {mapfolding-0.12.0 → mapfolding-0.12.1}/mapFolding/reference/jobsCompleted/__init__.py +0 -0
  28. {mapfolding-0.12.0 → mapfolding-0.12.1}/mapFolding/reference/jobsCompleted/p2x19/p2x19.py +0 -0
  29. {mapfolding-0.12.0 → mapfolding-0.12.1}/mapFolding/reference/lunnonNumpy.py +0 -0
  30. {mapfolding-0.12.0 → mapfolding-0.12.1}/mapFolding/reference/lunnonWhile.py +0 -0
  31. {mapfolding-0.12.0 → mapfolding-0.12.1}/mapFolding/reference/rotatedEntryPoint.py +0 -0
  32. {mapfolding-0.12.0 → mapfolding-0.12.1}/mapFolding/reference/total_countPlus1vsPlusN.py +0 -0
  33. {mapfolding-0.12.0 → mapfolding-0.12.1}/mapFolding/someAssemblyRequired/RecipeJob.py +0 -0
  34. {mapfolding-0.12.0 → mapfolding-0.12.1}/mapFolding/someAssemblyRequired/__init__.py +0 -0
  35. {mapfolding-0.12.0 → mapfolding-0.12.1}/mapFolding/someAssemblyRequired/getLLVMforNoReason.py +0 -0
  36. {mapfolding-0.12.0 → mapfolding-0.12.1}/mapFolding/someAssemblyRequired/infoBooth.py +0 -0
  37. {mapfolding-0.12.0 → mapfolding-0.12.1}/mapFolding/someAssemblyRequired/toolkitNumba.py +0 -0
  38. {mapfolding-0.12.0 → mapfolding-0.12.1}/mapFolding/syntheticModules/__init__.py +0 -0
  39. {mapfolding-0.12.0 → mapfolding-0.12.1}/mapFolding/syntheticModules/countParallel.py +0 -0
  40. {mapfolding-0.12.0 → mapfolding-0.12.1}/mapFolding/syntheticModules/daoOfMapFolding.py +0 -0
  41. {mapfolding-0.12.0 → mapfolding-0.12.1}/mapFolding/syntheticModules/dataPacking.py +0 -0
  42. {mapfolding-0.12.0 → mapfolding-0.12.1}/mapFolding/syntheticModules/initializeCount.py +0 -0
  43. {mapfolding-0.12.0 → mapfolding-0.12.1}/mapFolding/syntheticModules/theorem2.py +0 -0
  44. {mapfolding-0.12.0 → mapfolding-0.12.1}/mapFolding/syntheticModules/theorem2Numba.py +0 -0
  45. {mapfolding-0.12.0 → mapfolding-0.12.1}/mapFolding/syntheticModules/theorem2Trimmed.py +0 -0
  46. {mapfolding-0.12.0 → mapfolding-0.12.1}/mapFolding/theSSOT.py +0 -0
  47. {mapfolding-0.12.0 → mapfolding-0.12.1}/mapFolding.egg-info/SOURCES.txt +0 -0
  48. {mapfolding-0.12.0 → mapfolding-0.12.1}/mapFolding.egg-info/dependency_links.txt +0 -0
  49. {mapfolding-0.12.0 → mapfolding-0.12.1}/mapFolding.egg-info/entry_points.txt +0 -0
  50. {mapfolding-0.12.0 → mapfolding-0.12.1}/mapFolding.egg-info/top_level.txt +0 -0
  51. {mapfolding-0.12.0 → mapfolding-0.12.1}/setup.cfg +0 -0
  52. {mapfolding-0.12.0 → mapfolding-0.12.1}/tests/__init__.py +0 -0
  53. {mapfolding-0.12.0 → mapfolding-0.12.1}/tests/conftest.py +0 -0
  54. {mapfolding-0.12.0 → mapfolding-0.12.1}/tests/test_computations.py +0 -0
  55. {mapfolding-0.12.0 → mapfolding-0.12.1}/tests/test_filesystem.py +0 -0
  56. {mapfolding-0.12.0 → mapfolding-0.12.1}/tests/test_oeis.py +0 -0
  57. {mapfolding-0.12.0 → mapfolding-0.12.1}/tests/test_other.py +0 -0
  58. {mapfolding-0.12.0 → mapfolding-0.12.1}/tests/test_tasks.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: mapFolding
3
- Version: 0.12.0
3
+ Version: 0.12.1
4
4
  Summary: Map folding algorithm with code transformation framework for optimizing numerical computations
5
5
  Author-email: Hunter Hogan <HunterHogan@pm.me>
6
6
  License: CC-BY-NC-4.0
@@ -29,7 +29,7 @@ Classifier: Typing :: Typed
29
29
  Requires-Python: >=3.12
30
30
  Description-Content-Type: text/markdown
31
31
  License-File: LICENSE
32
- Requires-Dist: astToolkit
32
+ Requires-Dist: astToolkit>=0.3.0
33
33
  Requires-Dist: autoflake
34
34
  Requires-Dist: numba_progress
35
35
  Requires-Dist: numba
@@ -43,17 +43,17 @@ import dataclasses
43
43
 
44
44
  def findDataclass(ingredientsFunction: IngredientsFunction) -> tuple[str, str, str]:
45
45
  dataclassName: ast.expr = raiseIfNone(NodeTourist(Be.arg, Then.extractIt(DOT.annotation)).captureLastMatch(ingredientsFunction.astFunctionDef))
46
- dataclass_Identifier: str = raiseIfNone(NodeTourist(Be.Name, Then.extractIt(DOT.id)).captureLastMatch(dataclassName))
46
+ dataclassIdentifier: str = raiseIfNone(NodeTourist(Be.Name, Then.extractIt(DOT.id)).captureLastMatch(dataclassName))
47
47
  dataclassLogicalPathModule = None
48
48
  for moduleWithLogicalPath, listNameTuples in ingredientsFunction.imports.dictionaryImportFrom.items():
49
49
  for nameTuple in listNameTuples:
50
- if nameTuple[0] == dataclass_Identifier:
50
+ if nameTuple[0] == dataclassIdentifier:
51
51
  dataclassLogicalPathModule = moduleWithLogicalPath
52
52
  break
53
53
  if dataclassLogicalPathModule:
54
54
  break
55
55
  dataclassInstanceIdentifier = raiseIfNone(NodeTourist(Be.arg, Then.extractIt(DOT.arg)).captureLastMatch(ingredientsFunction.astFunctionDef))
56
- return raiseIfNone(dataclassLogicalPathModule), dataclass_Identifier, dataclassInstanceIdentifier
56
+ return raiseIfNone(dataclassLogicalPathModule), dataclassIdentifier, dataclassInstanceIdentifier
57
57
 
58
58
  def _getLogicalPath(packageName: str | None = None, logicalPathInfix: str | None = None, moduleIdentifier: str | None = None, *modules: str) -> str_nameDOTname:
59
59
  listLogicalPathParts: list[str] = []
@@ -127,7 +127,7 @@ def makeDaoOfMapFolding(astModule: ast.Module, moduleIdentifier: str, callableId
127
127
  astTuple = raiseIfNone(NodeTourist(Be.Return, Then.extractIt(DOT.value)).captureLastMatch(ingredientsFunction.astFunctionDef))
128
128
  cast(ast.Tuple, astTuple).ctx = ast.Store()
129
129
 
130
- findThis = ClassIsAndAttribute.valueIs(ast.Assign, IfThis.isCall_Identifier(targetCallableIdentifier))
130
+ findThis = ClassIsAndAttribute.valueIs(ast.Assign, IfThis.isCallIdentifier(targetCallableIdentifier))
131
131
  doThat = Then.replaceWith(Make.Assign([astTuple], value=Make.Call(Make.Name(targetCallableIdentifier), cast(ast.Tuple, astTuple).elts)))
132
132
  changeAssignCallToTarget = NodeChanger(findThis, doThat)
133
133
  changeAssignCallToTarget.visit(ingredientsFunctionDispatcher.astFunctionDef)
@@ -150,31 +150,31 @@ def makeDaoOfMapFoldingParallel(astModule: ast.Module, moduleIdentifier: str, ca
150
150
  ingredientsFunction.astFunctionDef.name = callableIdentifier
151
151
 
152
152
  dataclassName: ast.expr = raiseIfNone(NodeTourist(Be.arg, Then.extractIt(DOT.annotation)).captureLastMatch(ingredientsFunction.astFunctionDef))
153
- dataclass_Identifier: str = raiseIfNone(NodeTourist(Be.Name, Then.extractIt(DOT.id)).captureLastMatch(dataclassName))
153
+ dataclassIdentifier: str = raiseIfNone(NodeTourist(Be.Name, Then.extractIt(DOT.id)).captureLastMatch(dataclassName))
154
154
 
155
155
  dataclassLogicalPathModule = None
156
156
  for moduleWithLogicalPath, listNameTuples in ingredientsFunction.imports.dictionaryImportFrom.items():
157
157
  for nameTuple in listNameTuples:
158
- if nameTuple[0] == dataclass_Identifier:
158
+ if nameTuple[0] == dataclassIdentifier:
159
159
  dataclassLogicalPathModule = moduleWithLogicalPath
160
160
  break
161
161
  if dataclassLogicalPathModule:
162
162
  break
163
163
  if dataclassLogicalPathModule is None: raise Exception
164
164
  dataclassInstanceIdentifier = raiseIfNone(NodeTourist(Be.arg, Then.extractIt(DOT.arg)).captureLastMatch(ingredientsFunction.astFunctionDef))
165
- shatteredDataclass = shatter_dataclassesDOTdataclass(dataclassLogicalPathModule, dataclass_Identifier, dataclassInstanceIdentifier)
165
+ shatteredDataclass = shatter_dataclassesDOTdataclass(dataclassLogicalPathModule, dataclassIdentifier, dataclassInstanceIdentifier)
166
166
 
167
167
  # Start add the parallel state fields to the count function ================================================
168
- dataclassBaseFields = dataclasses.fields(importLogicalPath2Callable(dataclassLogicalPathModule, dataclass_Identifier)) # pyright: ignore [reportArgumentType]
169
- dataclass_IdentifierParallel = 'Parallel' + dataclass_Identifier
170
- dataclassFieldsParallel = dataclasses.fields(importLogicalPath2Callable(dataclassLogicalPathModule, dataclass_IdentifierParallel)) # pyright: ignore [reportArgumentType]
168
+ dataclassBaseFields = dataclasses.fields(importLogicalPath2Callable(dataclassLogicalPathModule, dataclassIdentifier)) # pyright: ignore [reportArgumentType]
169
+ dataclassIdentifierParallel = 'Parallel' + dataclassIdentifier
170
+ dataclassFieldsParallel = dataclasses.fields(importLogicalPath2Callable(dataclassLogicalPathModule, dataclassIdentifierParallel)) # pyright: ignore [reportArgumentType]
171
171
  onlyParallelFields = [field for field in dataclassFieldsParallel if field.name not in [fieldBase.name for fieldBase in dataclassBaseFields]]
172
172
 
173
173
  Official_fieldOrder: list[str] = []
174
174
  dictionaryDeReConstruction: dict[str, DeReConstructField2ast] = {}
175
175
 
176
- dataclassClassDef = extractClassDef(parseLogicalPath2astModule(dataclassLogicalPathModule), dataclass_IdentifierParallel)
177
- if not isinstance(dataclassClassDef, ast.ClassDef): raise ValueError(f"I could not find `{dataclass_IdentifierParallel = }` in `{dataclassLogicalPathModule = }`.")
176
+ dataclassClassDef = extractClassDef(parseLogicalPath2astModule(dataclassLogicalPathModule), dataclassIdentifierParallel)
177
+ if not isinstance(dataclassClassDef, ast.ClassDef): raise ValueError(f"I could not find `{dataclassIdentifierParallel = }` in `{dataclassLogicalPathModule = }`.")
178
178
 
179
179
  for aField in onlyParallelFields:
180
180
  Official_fieldOrder.append(aField.name)
@@ -193,13 +193,13 @@ def makeDaoOfMapFoldingParallel(astModule: ast.Module, moduleIdentifier: str, ca
193
193
  map_stateDOTfield2Name={**shatteredDataclass.map_stateDOTfield2Name, **{dictionaryDeReConstruction[field].ast_nameDOTname: dictionaryDeReConstruction[field].astName for field in Official_fieldOrder}},
194
194
  )
195
195
  shatteredDataclassParallel.fragments4AssignmentOrParameters = Make.Tuple(shatteredDataclassParallel.listName4Parameters, ast.Store())
196
- shatteredDataclassParallel.repack = Make.Assign([Make.Name(dataclassInstanceIdentifier)], value=Make.Call(Make.Name(dataclass_IdentifierParallel), list_keyword=shatteredDataclassParallel.list_keyword_field__field4init))
196
+ shatteredDataclassParallel.repack = Make.Assign([Make.Name(dataclassInstanceIdentifier)], value=Make.Call(Make.Name(dataclassIdentifierParallel), list_keyword=shatteredDataclassParallel.list_keyword_field__field4init))
197
197
  shatteredDataclassParallel.signatureReturnAnnotation = Make.Subscript(Make.Name('tuple'), Make.Tuple(shatteredDataclassParallel.listAnnotations))
198
198
 
199
199
  shatteredDataclassParallel.imports.update(*(dictionaryDeReConstruction[field].ledger for field in Official_fieldOrder))
200
- shatteredDataclassParallel.imports.addImportFrom_asStr(dataclassLogicalPathModule, dataclass_IdentifierParallel)
200
+ shatteredDataclassParallel.imports.addImportFrom_asStr(dataclassLogicalPathModule, dataclassIdentifierParallel)
201
201
  shatteredDataclassParallel.imports.update(shatteredDataclass.imports)
202
- shatteredDataclassParallel.imports.removeImportFrom(dataclassLogicalPathModule, dataclass_Identifier)
202
+ shatteredDataclassParallel.imports.removeImportFrom(dataclassLogicalPathModule, dataclassIdentifier)
203
203
 
204
204
  # End add the parallel state fields to the count function ================================================
205
205
 
@@ -208,7 +208,7 @@ def makeDaoOfMapFoldingParallel(astModule: ast.Module, moduleIdentifier: str, ca
208
208
 
209
209
  # Start add the parallel logic to the count function ================================================
210
210
 
211
- findThis = ClassIsAndAttribute.testIs(ast.While, ClassIsAndAttribute.leftIs(ast.Compare, IfThis.isName_Identifier('leafConnectee')))
211
+ findThis = ClassIsAndAttribute.testIs(ast.While, ClassIsAndAttribute.leftIs(ast.Compare, IfThis.isNameIdentifier('leafConnectee')))
212
212
  doThat = Then.extractIt(DOT.body)
213
213
  captureCountGapsCodeBlock: NodeTourist[ast.While, Sequence[ast.stmt]] = NodeTourist(findThis, doThat)
214
214
  countGapsCodeBlock = raiseIfNone(captureCountGapsCodeBlock.captureLastMatch(ingredientsFunction.astFunctionDef))
@@ -233,30 +233,30 @@ def makeDaoOfMapFoldingParallel(astModule: ast.Module, moduleIdentifier: str, ca
233
233
  sourceCallableIdentifier = sourceCallableDispatcherDEFAULT
234
234
 
235
235
  unRepackDataclass: IngredientsFunction = astModuleToIngredientsFunction(astModule, sourceCallableIdentifier)
236
- unRepackDataclass.astFunctionDef.name = 'unRepack' + dataclass_IdentifierParallel
236
+ unRepackDataclass.astFunctionDef.name = 'unRepack' + dataclassIdentifierParallel
237
237
  unRepackDataclass.imports.update(shatteredDataclassParallel.imports)
238
- findThis = ClassIsAndAttribute.annotationIs(ast.arg, IfThis.isName_Identifier(dataclass_Identifier)) # pyright: ignore[reportArgumentType, reportUnknownVariableType, reportCallIssue]
239
- doThat = Grab.annotationAttribute(Grab.idAttribute(Then.replaceWith(dataclass_IdentifierParallel))) # pyright: ignore[reportArgumentType]
238
+ findThis = ClassIsAndAttribute.annotationIs(ast.arg, IfThis.isNameIdentifier(dataclassIdentifier)) # pyright: ignore[reportArgumentType, reportUnknownVariableType, reportCallIssue]
239
+ doThat = Grab.annotationAttribute(Grab.idAttribute(Then.replaceWith(dataclassIdentifierParallel))) # pyright: ignore[reportArgumentType]
240
240
  NodeChanger(findThis, doThat).visit(unRepackDataclass.astFunctionDef) # pyright: ignore[reportUnknownArgumentType]
241
- unRepackDataclass.astFunctionDef.returns = Make.Name(dataclass_IdentifierParallel)
241
+ unRepackDataclass.astFunctionDef.returns = Make.Name(dataclassIdentifierParallel)
242
242
  targetCallableIdentifier = ingredientsFunction.astFunctionDef.name
243
243
  unRepackDataclass = unpackDataclassCallFunctionRepackDataclass(unRepackDataclass, targetCallableIdentifier, shatteredDataclassParallel)
244
244
 
245
245
  astTuple = raiseIfNone(NodeTourist(Be.Return, Then.extractIt(DOT.value)).captureLastMatch(ingredientsFunction.astFunctionDef))
246
246
  cast(ast.Tuple, astTuple).ctx = ast.Store()
247
- findThis = ClassIsAndAttribute.valueIs(ast.Assign, IfThis.isCall_Identifier(targetCallableIdentifier))
247
+ findThis = ClassIsAndAttribute.valueIs(ast.Assign, IfThis.isCallIdentifier(targetCallableIdentifier))
248
248
  doThat = Then.replaceWith(Make.Assign([astTuple], value=Make.Call(Make.Name(targetCallableIdentifier), cast(ast.Tuple, astTuple).elts)))
249
249
  changeAssignCallToTarget = NodeChanger(findThis, doThat)
250
250
  changeAssignCallToTarget.visit(unRepackDataclass.astFunctionDef)
251
251
 
252
252
  ingredientsDoTheNeedful: IngredientsFunction = IngredientsFunction(
253
253
  astFunctionDef = ast.FunctionDef(name='doTheNeedful'
254
- , args=ast.arguments(args=[ast.arg('state', annotation=ast.Name(dataclass_IdentifierParallel)), ast.arg('concurrencyLimit', annotation=ast.Name('int'))])
254
+ , args=ast.arguments(args=[ast.arg('state', annotation=ast.Name(dataclassIdentifierParallel)), ast.arg('concurrencyLimit', annotation=ast.Name('int'))])
255
255
  , body=[ast.Assign(targets=[ast.Name('stateParallel', ctx=ast.Store())], value=ast.Call(func=ast.Name('deepcopy'), args=[ast.Name('state')]))
256
- , ast.AnnAssign(target=ast.Name('listStatesParallel', ctx=ast.Store()), annotation=ast.Subscript(value=ast.Name('list'), slice=ast.Name(dataclass_IdentifierParallel)), value=ast.BinOp(left=ast.List(elts=[ast.Name('stateParallel')]), op=ast.Mult(), right=ast.Attribute(value=ast.Name('stateParallel'), attr='taskDivisions')), simple=1)
256
+ , ast.AnnAssign(target=ast.Name('listStatesParallel', ctx=ast.Store()), annotation=ast.Subscript(value=ast.Name('list'), slice=ast.Name(dataclassIdentifierParallel)), value=ast.BinOp(left=ast.List(elts=[ast.Name('stateParallel')]), op=ast.Mult(), right=ast.Attribute(value=ast.Name('stateParallel'), attr='taskDivisions')), simple=1)
257
257
  , ast.AnnAssign(target=ast.Name('groupsOfFoldsTotal', ctx=ast.Store()), annotation=ast.Name('int'), value=ast.Constant(value=0), simple=1)
258
258
 
259
- , ast.AnnAssign(target=ast.Name('dictionaryConcurrency', ctx=ast.Store()), annotation=ast.Subscript(value=ast.Name('dict'), slice=ast.Tuple(elts=[ast.Name('int'), ast.Subscript(value=ast.Name('ConcurrentFuture'), slice=ast.Name(dataclass_IdentifierParallel))])), value=ast.Dict(), simple=1)
259
+ , ast.AnnAssign(target=ast.Name('dictionaryConcurrency', ctx=ast.Store()), annotation=ast.Subscript(value=ast.Name('dict'), slice=ast.Tuple(elts=[ast.Name('int'), ast.Subscript(value=ast.Name('ConcurrentFuture'), slice=ast.Name(dataclassIdentifierParallel))])), value=ast.Dict(), simple=1)
260
260
  , ast.With(items=[ast.withitem(context_expr=ast.Call(func=ast.Name('ProcessPoolExecutor'), args=[ast.Name('concurrencyLimit')]), optional_vars=ast.Name('concurrencyManager', ctx=ast.Store()))]
261
261
  , body=[ast.For(target=ast.Name('indexSherpa', ctx=ast.Store()), iter=ast.Call(func=ast.Name('range'), args=[ast.Attribute(value=ast.Name('stateParallel'), attr='taskDivisions')])
262
262
  , body=[ast.Assign(targets=[ast.Name('state', ctx=ast.Store())], value=ast.Call(func=ast.Name('deepcopy'), args=[ast.Name('stateParallel')]))
@@ -268,7 +268,7 @@ def makeDaoOfMapFoldingParallel(astModule: ast.Module, moduleIdentifier: str, ca
268
268
 
269
269
  , ast.AnnAssign(target=ast.Name('foldsTotal', ctx=ast.Store()), annotation=ast.Name('int'), value=ast.BinOp(left=ast.Name('groupsOfFoldsTotal'), op=ast.Mult(), right=ast.Attribute(value=ast.Name('stateParallel'), attr='leavesTotal')), simple=1)
270
270
  , ast.Return(value=ast.Tuple(elts=[ast.Name('foldsTotal'), ast.Name('listStatesParallel')]))]
271
- , returns=ast.Subscript(value=ast.Name('tuple'), slice=ast.Tuple(elts=[ast.Name('int'), ast.Subscript(value=ast.Name('list'), slice=ast.Name(dataclass_IdentifierParallel))])))
271
+ , returns=ast.Subscript(value=ast.Name('tuple'), slice=ast.Tuple(elts=[ast.Name('int'), ast.Subscript(value=ast.Name('list'), slice=ast.Name(dataclassIdentifierParallel))])))
272
272
  , imports = LedgerOfImports(Make.Module([ast.ImportFrom(module='concurrent.futures', names=[ast.alias(name='Future', asname='ConcurrentFuture'), ast.alias(name='ProcessPoolExecutor')], level=0),
273
273
  ast.ImportFrom(module='copy', names=[ast.alias(name='deepcopy')], level=0),
274
274
  ast.ImportFrom(module='multiprocessing', names=[ast.alias(name='set_start_method', asname='multiprocessing_set_start_method')], level=0),])
@@ -339,7 +339,7 @@ def trimTheorem2(astModule: ast.Module, moduleIdentifier: str, callableIdentifie
339
339
 
340
340
  dataclassInstanceIdentifier = raiseIfNone(NodeTourist(Be.arg, Then.extractIt(DOT.arg)).captureLastMatch(ingredientsFunction.astFunctionDef))
341
341
 
342
- findThis = IfThis.isIfUnaryNotAttributeNamespace_Identifier(dataclassInstanceIdentifier, 'dimensionsUnconstrained')
342
+ findThis = IfThis.isIfUnaryNotAttributeNamespaceIdentifier(dataclassInstanceIdentifier, 'dimensionsUnconstrained')
343
343
  doThat = Then.removeIt
344
344
  NodeChanger(findThis, doThat).visit(ingredientsFunction.astFunctionDef)
345
345
 
@@ -399,7 +399,7 @@ def makeUnRePackDataclass(astImportFrom: ast.ImportFrom) -> None:
399
399
  astTuple = raiseIfNone(NodeTourist(Be.Return, Then.extractIt(DOT.value)).captureLastMatch(targetFunctionDef))
400
400
  cast(ast.Tuple, astTuple).ctx = ast.Store()
401
401
 
402
- findThis = ClassIsAndAttribute.valueIs(ast.Assign, IfThis.isCall_Identifier(targetCallableIdentifier))
402
+ findThis = ClassIsAndAttribute.valueIs(ast.Assign, IfThis.isCallIdentifier(targetCallableIdentifier))
403
403
  doThat = Then.replaceWith(Make.Assign([astTuple], value=Make.Call(Make.Name(targetCallableIdentifier), cast(ast.Tuple, astTuple).elts)))
404
404
  NodeChanger(findThis, doThat).visit(ingredientsFunction.astFunctionDef)
405
405
 
@@ -38,7 +38,7 @@ class IfThis(astToolkit_IfThis):
38
38
  @staticmethod
39
39
  def isAttributeNamespaceIdentifierGreaterThan0(namespace: str, identifier: str) -> Callable[[ast.AST], TypeGuard[ast.Compare] | bool]:
40
40
  return lambda node: (Be.Compare(node)
41
- and IfThis.isAttributeNamespace_Identifier(namespace, identifier)(DOT.left(node))
41
+ and IfThis.isAttributeNamespaceIdentifier(namespace, identifier)(DOT.left(node))
42
42
  and Be.Gt(node.ops[0])
43
43
  and IfThis.isConstant_value(0)(node.comparators[0]))
44
44
  @staticmethod
@@ -54,5 +54,5 @@ class IfThis(astToolkit_IfThis):
54
54
  @staticmethod
55
55
  def isAttributeNamespaceIdentifierLessThanOrEqual0(namespace: str, identifier: str) -> Callable[[ast.AST], TypeGuard[ast.Compare] | bool]:
56
56
  return lambda node: (Be.Compare(node)
57
- and IfThis.isAttributeNamespace_Identifier(namespace, identifier)(DOT.left(node))
57
+ and IfThis.isAttributeNamespaceIdentifier(namespace, identifier)(DOT.left(node))
58
58
  and Be.LtE(node.ops[0]))
@@ -93,7 +93,7 @@ class DeReConstructField2ast:
93
93
  """
94
94
  dataclassesDOTdataclassLogicalPathModule: dataclasses.InitVar[str_nameDOTname]
95
95
  dataclassClassDef: dataclasses.InitVar[ast.ClassDef]
96
- dataclassesDOTdataclassInstance_Identifier: dataclasses.InitVar[str]
96
+ dataclassesDOTdataclassInstanceIdentifier: dataclasses.InitVar[str]
97
97
  field: dataclasses.InitVar[dataclasses.Field[Any]]
98
98
 
99
99
  ledger: LedgerOfImports = dataclasses.field(default_factory=LedgerOfImports)
@@ -117,7 +117,7 @@ class DeReConstructField2ast:
117
117
  astAnnAssignConstructor: ast.AnnAssign|ast.Assign = dataclasses.field(init=False)
118
118
  Z0Z_hack: tuple[ast.AnnAssign|ast.Assign, str] = dataclasses.field(init=False)
119
119
 
120
- def __post_init__(self, dataclassesDOTdataclassLogicalPathModule: str_nameDOTname, dataclassClassDef: ast.ClassDef, dataclassesDOTdataclassInstance_Identifier: str, field: dataclasses.Field[Any]) -> None:
120
+ def __post_init__(self, dataclassesDOTdataclassLogicalPathModule: str_nameDOTname, dataclassClassDef: ast.ClassDef, dataclassesDOTdataclassInstanceIdentifier: str, field: dataclasses.Field[Any]) -> None:
121
121
  self.compare = field.compare
122
122
  self.default = field.default if field.default is not dataclasses.MISSING else None
123
123
  self.default_factory = field.default_factory if field.default_factory is not dataclasses.MISSING else None
@@ -131,10 +131,10 @@ class DeReConstructField2ast:
131
131
 
132
132
  self.astName = Make.Name(self.name)
133
133
  self.ast_keyword_field__field = Make.keyword(self.name, self.astName)
134
- self.ast_nameDOTname = Make.Attribute(Make.Name(dataclassesDOTdataclassInstance_Identifier), self.name)
134
+ self.ast_nameDOTname = Make.Attribute(Make.Name(dataclassesDOTdataclassInstanceIdentifier), self.name)
135
135
 
136
136
  sherpa = NodeTourist( # pyright: ignore[reportUnknownVariableType]
137
- findThis=ClassIsAndAttribute.targetIs(ast.AnnAssign, IfThis.isName_Identifier(self.name))
137
+ findThis=ClassIsAndAttribute.targetIs(ast.AnnAssign, IfThis.isNameIdentifier(self.name))
138
138
  , doThat=Then.extractIt(DOT.annotation) # pyright: ignore[reportArgumentType]
139
139
  ).captureLastMatch(dataclassClassDef)
140
140
 
@@ -23,15 +23,15 @@ from Z0Z_tools import autoDecodingRLE
23
23
  import ast
24
24
  """Synthesize one file to compute `foldsTotal` of `mapShape`."""
25
25
 
26
- list_IdentifiersNotUsedAllHARDCODED = ['concurrencyLimit', 'foldsTotal', 'mapShape',]
27
- list_IdentifiersNotUsedParallelSequentialHARDCODED = ['indexLeaf']
28
- list_IdentifiersNotUsedSequentialHARDCODED = ['foldGroups', 'taskDivisions', 'taskIndex',]
26
+ listIdentifiersNotUsedAllHARDCODED = ['concurrencyLimit', 'foldsTotal', 'mapShape',]
27
+ listIdentifiersNotUsedParallelSequentialHARDCODED = ['indexLeaf']
28
+ listIdentifiersNotUsedSequentialHARDCODED = ['foldGroups', 'taskDivisions', 'taskIndex',]
29
29
 
30
- list_IdentifiersReplacedHARDCODED = ['groupsOfFolds',]
30
+ listIdentifiersReplacedHARDCODED = ['groupsOfFolds',]
31
31
 
32
- list_IdentifiersStaticValuesHARDCODED = ['dimensionsTotal', 'leavesTotal',]
32
+ listIdentifiersStaticValuesHARDCODED = ['dimensionsTotal', 'leavesTotal',]
33
33
 
34
- list_IdentifiersNotUsedHARDCODED = list_IdentifiersStaticValuesHARDCODED + list_IdentifiersReplacedHARDCODED + list_IdentifiersNotUsedAllHARDCODED + list_IdentifiersNotUsedParallelSequentialHARDCODED + list_IdentifiersNotUsedSequentialHARDCODED
34
+ listIdentifiersNotUsedHARDCODED = listIdentifiersStaticValuesHARDCODED + listIdentifiersReplacedHARDCODED + listIdentifiersNotUsedAllHARDCODED + listIdentifiersNotUsedParallelSequentialHARDCODED + listIdentifiersNotUsedSequentialHARDCODED
35
35
 
36
36
  def addLauncherNumbaProgress(ingredientsModule: IngredientsModule, ingredientsFunction: IngredientsFunction, job: RecipeJobTheorem2Numba, spices: SpicesJobNumba) -> tuple[IngredientsModule, IngredientsFunction]:
37
37
  """
@@ -76,7 +76,7 @@ if __name__ == '__main__':
76
76
  ast_argNumbaProgress = ast.arg(arg=spices.numbaProgressBarIdentifier, annotation=ast.Name(id=numba_progressPythonClass, ctx=ast.Load()))
77
77
  ingredientsFunction.astFunctionDef.args.args.append(ast_argNumbaProgress)
78
78
 
79
- findThis = ClassIsAndAttribute.targetIs(ast.AugAssign, IfThis.isName_Identifier(job.shatteredDataclass.countingVariableName.id))
79
+ findThis = ClassIsAndAttribute.targetIs(ast.AugAssign, IfThis.isNameIdentifier(job.shatteredDataclass.countingVariableName.id))
80
80
  doThat = Then.replaceWith(Make.Expr(Make.Call(Make.Attribute(Make.Name(spices.numbaProgressBarIdentifier),'update'),[Make.Constant(1)])))
81
81
  countWithProgressBar = NodeChanger(findThis, doThat)
82
82
  countWithProgressBar.visit(ingredientsFunction.astFunctionDef)
@@ -119,12 +119,12 @@ def move_arg2FunctionDefDOTbodyAndAssignInitialValues(ingredientsFunction: Ingre
119
119
  list_arg_arg: list[str] = [ast_arg.arg for ast_arg in list_argCuzMyBrainRefusesToThink]
120
120
  listName: list[ast.Name] = []
121
121
  NodeTourist(Be.Name, Then.appendTo(listName)).visit(ingredientsFunction.astFunctionDef)
122
- list_Identifiers: list[str] = [astName.id for astName in listName]
123
- list_IdentifiersNotUsed: list[str] = list(set(list_arg_arg) - set(list_Identifiers))
122
+ listIdentifiers: list[str] = [astName.id for astName in listName]
123
+ listIdentifiersNotUsed: list[str] = list(set(list_arg_arg) - set(listIdentifiers))
124
124
 
125
125
  for ast_arg in list_argCuzMyBrainRefusesToThink:
126
126
  if ast_arg.arg in job.shatteredDataclass.field2AnnAssign:
127
- if ast_arg.arg in list_IdentifiersNotUsed:
127
+ if ast_arg.arg in listIdentifiersNotUsed:
128
128
  pass
129
129
  else:
130
130
  ImaAnnAssign, elementConstructor = job.shatteredDataclass.Z0Z_field2AnnAssign[ast_arg.arg]
@@ -146,7 +146,7 @@ def move_arg2FunctionDefDOTbodyAndAssignInitialValues(ingredientsFunction: Ingre
146
146
 
147
147
  ingredientsFunction.astFunctionDef.body.insert(0, ImaAnnAssign)
148
148
 
149
- findThis = IfThis.is_arg_Identifier(ast_arg.arg)
149
+ findThis = IfThis.is_argIdentifier(ast_arg.arg)
150
150
  remove_arg = NodeChanger(findThis, Then.removeIt)
151
151
  remove_arg.visit(ingredientsFunction.astFunctionDef)
152
152
 
@@ -160,16 +160,16 @@ def makeJobNumba(job: RecipeJobTheorem2Numba, spices: SpicesJobNumba) -> None:
160
160
  ingredientsCount: IngredientsFunction = IngredientsFunction(astFunctionDef, LedgerOfImports())
161
161
 
162
162
  # Remove `foldGroups` and any other unused statements, so you can dynamically determine which variables are not used
163
- findThis = ClassIsAndAttribute.targetsIs(ast.Assign, lambda list_expr: any([IfThis.isSubscript_Identifier('foldGroups')(node) for node in list_expr ]))
164
- # findThis = IfThis.isAssignAndTargets0Is(IfThis.isSubscript_Identifier('foldGroups'))
163
+ findThis = ClassIsAndAttribute.targetsIs(ast.Assign, lambda list_expr: any([IfThis.isSubscriptIdentifier('foldGroups')(node) for node in list_expr ]))
164
+ # findThis = IfThis.isAssignAndTargets0Is(IfThis.isSubscriptIdentifier('foldGroups'))
165
165
  doThat = Then.removeIt
166
166
  remove_foldGroups = NodeChanger(findThis, doThat)
167
167
  # remove_foldGroups.visit(ingredientsCount.astFunctionDef)
168
168
 
169
169
  # replace identifiers with static values with their values, so you can dynamically determine which variables are not used
170
- list_IdentifiersStaticValues = list_IdentifiersStaticValuesHARDCODED
171
- for identifier in list_IdentifiersStaticValues:
172
- findThis = IfThis.isName_Identifier(identifier)
170
+ listIdentifiersStaticValues = listIdentifiersStaticValuesHARDCODED
171
+ for identifier in listIdentifiersStaticValues:
172
+ findThis = IfThis.isNameIdentifier(identifier)
173
173
  doThat = Then.replaceWith(Make.Constant(int(job.state.__dict__[identifier])))
174
174
  NodeChanger(findThis, doThat).visit(ingredientsCount.astFunctionDef)
175
175
 
@@ -39,7 +39,7 @@ from Z0Z_tools import importLogicalPath2Callable
39
39
  import ast
40
40
  import dataclasses
41
41
 
42
- def shatter_dataclassesDOTdataclass(logicalPathModule: str_nameDOTname, dataclass_Identifier: str, instance_Identifier: str) -> ShatteredDataclass:
42
+ def shatter_dataclassesDOTdataclass(logicalPathModule: str_nameDOTname, dataclassIdentifier: str, instanceIdentifier: str) -> ShatteredDataclass:
43
43
  """
44
44
  Decompose a dataclass definition into AST components for manipulation and code generation.
45
45
 
@@ -59,8 +59,8 @@ def shatter_dataclassesDOTdataclass(logicalPathModule: str_nameDOTname, dataclas
59
59
 
60
60
  Parameters:
61
61
  logicalPathModule: The fully qualified module path containing the dataclass definition.
62
- dataclass_Identifier: The name of the dataclass to decompose.
63
- instance_Identifier: The variable name to use for the dataclass instance in generated code.
62
+ dataclassIdentifier: The name of the dataclass to decompose.
63
+ instanceIdentifier: The variable name to use for the dataclass instance in generated code.
64
64
 
65
65
  Returns:
66
66
  shatteredDataclass: A ShatteredDataclass containing AST representations of all dataclass components,
@@ -72,19 +72,19 @@ def shatter_dataclassesDOTdataclass(logicalPathModule: str_nameDOTname, dataclas
72
72
  Official_fieldOrder: list[str] = []
73
73
  dictionaryDeReConstruction: dict[str, DeReConstructField2ast] = {}
74
74
 
75
- dataclassClassDef = extractClassDef(parseLogicalPath2astModule(logicalPathModule), dataclass_Identifier)
76
- if not isinstance(dataclassClassDef, ast.ClassDef): raise ValueError(f"I could not find `{dataclass_Identifier = }` in `{logicalPathModule = }`.")
75
+ dataclassClassDef = extractClassDef(parseLogicalPath2astModule(logicalPathModule), dataclassIdentifier)
76
+ if not isinstance(dataclassClassDef, ast.ClassDef): raise ValueError(f"I could not find `{dataclassIdentifier = }` in `{logicalPathModule = }`.")
77
77
 
78
78
  countingVariable = None
79
- for aField in dataclasses.fields(importLogicalPath2Callable(logicalPathModule, dataclass_Identifier)): # pyright: ignore [reportArgumentType]
79
+ for aField in dataclasses.fields(importLogicalPath2Callable(logicalPathModule, dataclassIdentifier)): # pyright: ignore [reportArgumentType]
80
80
  Official_fieldOrder.append(aField.name)
81
- dictionaryDeReConstruction[aField.name] = DeReConstructField2ast(logicalPathModule, dataclassClassDef, instance_Identifier, aField)
81
+ dictionaryDeReConstruction[aField.name] = DeReConstructField2ast(logicalPathModule, dataclassClassDef, instanceIdentifier, aField)
82
82
  if aField.metadata.get('theCountingIdentifier', False):
83
83
  countingVariable = dictionaryDeReConstruction[aField.name].name
84
84
 
85
85
  if countingVariable is None:
86
86
  import warnings
87
- warnings.warn(message=f"I could not find the counting variable in `{dataclass_Identifier = }` in `{logicalPathModule = }`.", category=UserWarning)
87
+ warnings.warn(message=f"I could not find the counting variable in `{dataclassIdentifier = }` in `{logicalPathModule = }`.", category=UserWarning)
88
88
  raise Exception
89
89
 
90
90
  shatteredDataclass = ShatteredDataclass(
@@ -100,11 +100,11 @@ def shatter_dataclassesDOTdataclass(logicalPathModule: str_nameDOTname, dataclas
100
100
  map_stateDOTfield2Name={dictionaryDeReConstruction[field].ast_nameDOTname: dictionaryDeReConstruction[field].astName for field in Official_fieldOrder},
101
101
  )
102
102
  shatteredDataclass.fragments4AssignmentOrParameters = Make.Tuple(shatteredDataclass.listName4Parameters, ast.Store())
103
- shatteredDataclass.repack = Make.Assign([Make.Name(instance_Identifier)], value=Make.Call(Make.Name(dataclass_Identifier), list_keyword=shatteredDataclass.list_keyword_field__field4init))
103
+ shatteredDataclass.repack = Make.Assign([Make.Name(instanceIdentifier)], value=Make.Call(Make.Name(dataclassIdentifier), list_keyword=shatteredDataclass.list_keyword_field__field4init))
104
104
  shatteredDataclass.signatureReturnAnnotation = Make.Subscript(Make.Name('tuple'), Make.Tuple(shatteredDataclass.listAnnotations))
105
105
 
106
106
  shatteredDataclass.imports.update(*(dictionaryDeReConstruction[field].ledger for field in Official_fieldOrder))
107
- shatteredDataclass.imports.addImportFrom_asStr(logicalPathModule, dataclass_Identifier)
107
+ shatteredDataclass.imports.addImportFrom_asStr(logicalPathModule, dataclassIdentifier)
108
108
 
109
109
  return shatteredDataclass
110
110
 
@@ -118,9 +118,9 @@ def removeDataclassFromFunction(ingredientsTarget: IngredientsFunction, shattere
118
118
 
119
119
  def unpackDataclassCallFunctionRepackDataclass(ingredientsCaller: IngredientsFunction, targetCallableIdentifier: str, shatteredDataclass: ShatteredDataclass) -> IngredientsFunction:
120
120
  astCallTargetCallable = Make.Call(Make.Name(targetCallableIdentifier), shatteredDataclass.listName4Parameters)
121
- replaceAssignTargetCallable = NodeChanger(ClassIsAndAttribute.valueIs(ast.Assign, IfThis.isCall_Identifier(targetCallableIdentifier)), Then.replaceWith(Make.Assign([shatteredDataclass.fragments4AssignmentOrParameters], value=astCallTargetCallable)))
122
- unpack4targetCallable = NodeChanger(ClassIsAndAttribute.valueIs(ast.Assign, IfThis.isCall_Identifier(targetCallableIdentifier)), Then.insertThisAbove(shatteredDataclass.listUnpack))
123
- repack4targetCallable = NodeChanger(ClassIsAndAttribute.valueIs(ast.Assign, IfThis.isCall_Identifier(targetCallableIdentifier)), Then.insertThisBelow([shatteredDataclass.repack]))
121
+ replaceAssignTargetCallable = NodeChanger(ClassIsAndAttribute.valueIs(ast.Assign, IfThis.isCallIdentifier(targetCallableIdentifier)), Then.replaceWith(Make.Assign([shatteredDataclass.fragments4AssignmentOrParameters], value=astCallTargetCallable)))
122
+ unpack4targetCallable = NodeChanger(ClassIsAndAttribute.valueIs(ast.Assign, IfThis.isCallIdentifier(targetCallableIdentifier)), Then.insertThisAbove(shatteredDataclass.listUnpack))
123
+ repack4targetCallable = NodeChanger(ClassIsAndAttribute.valueIs(ast.Assign, IfThis.isCallIdentifier(targetCallableIdentifier)), Then.insertThisBelow([shatteredDataclass.repack]))
124
124
  replaceAssignTargetCallable.visit(ingredientsCaller.astFunctionDef)
125
125
  unpack4targetCallable.visit(ingredientsCaller.astFunctionDef)
126
126
  repack4targetCallable.visit(ingredientsCaller.astFunctionDef)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: mapFolding
3
- Version: 0.12.0
3
+ Version: 0.12.1
4
4
  Summary: Map folding algorithm with code transformation framework for optimizing numerical computations
5
5
  Author-email: Hunter Hogan <HunterHogan@pm.me>
6
6
  License: CC-BY-NC-4.0
@@ -29,7 +29,7 @@ Classifier: Typing :: Typed
29
29
  Requires-Python: >=3.12
30
30
  Description-Content-Type: text/markdown
31
31
  License-File: LICENSE
32
- Requires-Dist: astToolkit
32
+ Requires-Dist: astToolkit>=0.3.0
33
33
  Requires-Dist: autoflake
34
34
  Requires-Dist: numba_progress
35
35
  Requires-Dist: numba
@@ -1,4 +1,4 @@
1
- astToolkit
1
+ astToolkit>=0.3.0
2
2
  autoflake
3
3
  numba_progress
4
4
  numba
@@ -23,7 +23,7 @@ classifiers = [
23
23
  "Topic :: Software Development :: Compilers",
24
24
  "Typing :: Typed",]
25
25
  dependencies = [
26
- "astToolkit",
26
+ "astToolkit>=0.3.0",
27
27
  "autoflake",
28
28
  "numba_progress",
29
29
  "numba",
@@ -70,7 +70,7 @@ readme = { file = "README.md", content-type = "text/markdown" }
70
70
  requires-python = ">=3.12"
71
71
  scripts = { getOEISids = "mapFolding.oeis:getOEISids", clearOEIScache = "mapFolding.oeis:clearOEIScache", OEIS_for_n = "mapFolding.oeis:OEIS_for_n" }
72
72
  urls = { Donate = "https://www.patreon.com/integrated", Homepage = "https://github.com/hunterhogan/mapFolding", Repository = "https://github.com/hunterhogan/mapFolding.git", Issues = "https://github.com/hunterhogan/mapFolding/issues"}
73
- version = "0.12.0"
73
+ version = "0.12.1"
74
74
 
75
75
  [tool.coverage]
76
76
  report = { exclude_lines = [
File without changes
File without changes
File without changes