mapFolding 0.8.4__py3-none-any.whl → 0.8.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (36) hide show
  1. mapFolding/__init__.py +10 -6
  2. mapFolding/basecamp.py +3 -3
  3. mapFolding/beDRY.py +241 -68
  4. mapFolding/oeis.py +41 -26
  5. mapFolding/reference/hunterNumba.py +1 -1
  6. mapFolding/someAssemblyRequired/__init__.py +16 -15
  7. mapFolding/someAssemblyRequired/_theTypes.py +31 -13
  8. mapFolding/someAssemblyRequired/_tool_Make.py +13 -5
  9. mapFolding/someAssemblyRequired/_tool_Then.py +12 -5
  10. mapFolding/someAssemblyRequired/_toolboxAntecedents.py +131 -99
  11. mapFolding/someAssemblyRequired/_toolboxContainers.py +92 -15
  12. mapFolding/someAssemblyRequired/_toolboxPython.py +17 -31
  13. mapFolding/someAssemblyRequired/getLLVMforNoReason.py +2 -2
  14. mapFolding/someAssemblyRequired/newInliner.py +22 -0
  15. mapFolding/someAssemblyRequired/synthesizeNumbaJob.py +65 -116
  16. mapFolding/someAssemblyRequired/toolboxNumba.py +364 -0
  17. mapFolding/someAssemblyRequired/transformationTools.py +262 -41
  18. mapFolding/syntheticModules/numbaCount_doTheNeedful.py +0 -1
  19. mapFolding/theSSOT.py +30 -33
  20. mapFolding/{filesystem.py → toolboxFilesystem.py} +90 -25
  21. {mapfolding-0.8.4.dist-info → mapfolding-0.8.6.dist-info}/METADATA +3 -2
  22. mapfolding-0.8.6.dist-info/RECORD +47 -0
  23. tests/conftest.py +30 -31
  24. tests/test_computations.py +8 -7
  25. tests/test_filesystem.py +2 -2
  26. tests/test_other.py +2 -2
  27. tests/test_tasks.py +3 -3
  28. mapFolding/noHomeYet.py +0 -32
  29. mapFolding/someAssemblyRequired/ingredientsNumba.py +0 -199
  30. mapFolding/someAssemblyRequired/synthesizeNumbaFlow.py +0 -156
  31. mapFolding/someAssemblyRequired/transformDataStructures.py +0 -235
  32. mapfolding-0.8.4.dist-info/RECORD +0 -49
  33. {mapfolding-0.8.4.dist-info → mapfolding-0.8.6.dist-info}/WHEEL +0 -0
  34. {mapfolding-0.8.4.dist-info → mapfolding-0.8.6.dist-info}/entry_points.txt +0 -0
  35. {mapfolding-0.8.4.dist-info → mapfolding-0.8.6.dist-info}/licenses/LICENSE +0 -0
  36. {mapfolding-0.8.4.dist-info → mapfolding-0.8.6.dist-info}/top_level.txt +0 -0
@@ -25,14 +25,40 @@ transformation scenarios beyond the scope of this package.
25
25
  from autoflake import fix_code as autoflake_fix_code
26
26
  from collections.abc import Callable, Mapping
27
27
  from copy import deepcopy
28
- from mapFolding.filesystem import writeStringToHere
29
- from mapFolding.someAssemblyRequired import ast_Identifier, be, ifThis, Make, NodeChanger, NodeTourist, Then, typeCertified
30
- from mapFolding.someAssemblyRequired._toolboxContainers import IngredientsModule
31
- from mapFolding.theSSOT import raiseIfNoneGitHubIssueNumber3
28
+ from mapFolding.beDRY import outfitCountFolds
29
+ from mapFolding.toolboxFilesystem import getPathFilenameFoldsTotal, writeStringToHere
30
+ from mapFolding.someAssemblyRequired import (
31
+ ast_Identifier,
32
+ be,
33
+ DOT,
34
+ ifThis,
35
+ ImaAnnotationType,
36
+ importLogicalPath2Callable,
37
+ IngredientsFunction,
38
+ IngredientsModule,
39
+ LedgerOfImports,
40
+ Make,
41
+ NodeChanger,
42
+ NodeTourist,
43
+ parseLogicalPath2astModule,
44
+ ShatteredDataclass,
45
+ str_nameDOTname,
46
+ Then,
47
+ TypeCertified,
48
+ 又,
49
+ )
50
+ from mapFolding.theSSOT import ComputationState, The, raiseIfNoneGitHubIssueNumber3
32
51
  from os import PathLike
33
- from pathlib import PurePath
34
- from typing import Any
52
+ from pathlib import Path, PurePath
53
+ from typing import Any, Literal, overload
35
54
  import ast
55
+ import dataclasses
56
+ import pickle
57
+
58
+ def astModuleToIngredientsFunction(astModule: ast.AST, identifierFunctionDef: ast_Identifier) -> IngredientsFunction:
59
+ astFunctionDef = extractFunctionDef(astModule, identifierFunctionDef)
60
+ if not astFunctionDef: raise raiseIfNoneGitHubIssueNumber3
61
+ return IngredientsFunction(astFunctionDef, LedgerOfImports(astModule))
36
62
 
37
63
  def extractClassDef(module: ast.AST, identifier: ast_Identifier) -> ast.ClassDef | None:
38
64
  return NodeTourist(ifThis.isClassDef_Identifier(identifier), Then.getIt).captureLastMatch(module)
@@ -40,6 +66,229 @@ def extractClassDef(module: ast.AST, identifier: ast_Identifier) -> ast.ClassDef
40
66
  def extractFunctionDef(module: ast.AST, identifier: ast_Identifier) -> ast.FunctionDef | None:
41
67
  return NodeTourist(ifThis.isFunctionDef_Identifier(identifier), Then.getIt).captureLastMatch(module)
42
68
 
69
+ def makeDictionaryFunctionDef(module: ast.AST) -> dict[ast_Identifier, ast.FunctionDef]:
70
+ dictionaryIdentifier2FunctionDef: dict[ast_Identifier, ast.FunctionDef] = {}
71
+ NodeTourist(be.FunctionDef, Then.updateKeyValueIn(DOT.name, Then.getIt, dictionaryIdentifier2FunctionDef)).visit(module)
72
+ return dictionaryIdentifier2FunctionDef
73
+
74
+ def makeDictionary4InliningFunction(identifierToInline: ast_Identifier, dictionaryFunctionDef: dict[ast_Identifier, ast.FunctionDef], FunctionDefToInline: ast.FunctionDef | None = None) -> dict[str, ast.FunctionDef]:
75
+ """
76
+ Creates a dictionary of function definitions required for inlining a target function.
77
+ This function analyzes a target function and recursively collects all function definitions
78
+ that are called within it (and any functions called by those functions), preparing them for inlining.
79
+ Parameters:
80
+ ----------
81
+ identifierToInline : ast_Identifier
82
+ The identifier of the function to be inlined.
83
+ dictionaryFunctionDef : dict[ast_Identifier, ast.FunctionDef]
84
+ A dictionary mapping function identifiers to their AST function definitions.
85
+ FunctionDefToInline : ast.FunctionDef | None, optional
86
+ The AST function definition to inline. If None, it will be retrieved from dictionaryFunctionDef using identifierToInline.
87
+ Returns:
88
+ -------
89
+ dict[str, ast.FunctionDef]
90
+ A dictionary mapping function names to their AST function definitions, containing all functions needed for inlining.
91
+ Raises:
92
+ ------
93
+ ValueError
94
+ If the function to inline is not found in the dictionary, or if recursion is detected during analysis.
95
+ Notes:
96
+ -----
97
+ The function performs a recursive analysis to find all dependent functions needed for inlining.
98
+ It detects and prevents recursive function calls that could cause infinite inlining.
99
+ """
100
+ if FunctionDefToInline is None:
101
+ try:
102
+ FunctionDefToInline = dictionaryFunctionDef[identifierToInline]
103
+ except KeyError as ERRORmessage:
104
+ raise ValueError(f"FunctionDefToInline not found in dictionaryIdentifier2FunctionDef: {identifierToInline = }") from ERRORmessage
105
+
106
+ listIdentifiersCalledFunctions: list[ast_Identifier] = []
107
+ findIdentifiersToInline = NodeTourist(ifThis.isCallToName, lambda node: Then.appendTo(listIdentifiersCalledFunctions)(DOT.id(DOT.func(node)))) # pyright: ignore[reportArgumentType]
108
+ findIdentifiersToInline.visit(FunctionDefToInline)
109
+
110
+ dictionary4Inlining: dict[ast_Identifier, ast.FunctionDef] = {}
111
+ for identifier in sorted(set(listIdentifiersCalledFunctions).intersection(dictionaryFunctionDef.keys())):
112
+ dictionary4Inlining[identifier] = dictionaryFunctionDef[identifier]
113
+
114
+ keepGoing = True
115
+ while keepGoing:
116
+ keepGoing = False
117
+ listIdentifiersCalledFunctions.clear()
118
+ findIdentifiersToInline.visit(Make.Module(list(dictionary4Inlining.values())))
119
+
120
+ # NOTE: This is simple not comprehensive recursion protection. # TODO think about why I dislike `ifThis.CallDoesNotCallItself`
121
+ if identifierToInline in listIdentifiersCalledFunctions: raise ValueError(f"Recursion found: {identifierToInline = }.")
122
+
123
+ listIdentifiersCalledFunctions = sorted((set(listIdentifiersCalledFunctions).difference(dictionary4Inlining.keys())).intersection(dictionaryFunctionDef.keys()))
124
+ if len(listIdentifiersCalledFunctions) > 0:
125
+ keepGoing = True
126
+ for identifier in listIdentifiersCalledFunctions:
127
+ if identifier in dictionaryFunctionDef:
128
+ dictionary4Inlining[identifier] = dictionaryFunctionDef[identifier]
129
+
130
+ return dictionary4Inlining
131
+
132
+ @overload
133
+ def makeInitializedComputationState(mapShape: tuple[int, ...], writeJob: Literal[True], *, pathFilename: PathLike[str] | PurePath | None = None, **keywordArguments: Any) -> Path: ...
134
+ @overload
135
+ def makeInitializedComputationState(mapShape: tuple[int, ...], writeJob: Literal[False] = False, **keywordArguments: Any) -> ComputationState: ...
136
+ def makeInitializedComputationState(mapShape: tuple[int, ...], writeJob: bool = False, *, pathFilename: PathLike[str] | PurePath | None = None, **keywordArguments: Any) -> ComputationState | Path:
137
+ """
138
+ Initializes a computation state and optionally saves it to disk.
139
+
140
+ This function initializes a computation state using the source algorithm.
141
+
142
+ Hint: If you want an uninitialized state, call `outfitCountFolds` directly.
143
+
144
+ Parameters:
145
+ mapShape: List of integers representing the dimensions of the map to be folded.
146
+ writeJob (False): Whether to save the state to disk.
147
+ pathFilename (getPathFilenameFoldsTotal.pkl): The path and filename to save the state. If None, uses a default path.
148
+ **keywordArguments: computationDivisions:int|str|None=None,concurrencyLimit:int=1.
149
+ Returns:
150
+ stateUniversal|pathFilenameJob: The computation state for the map folding calculations, or
151
+ the path to the saved state file if writeJob is True.
152
+ """
153
+ stateUniversal: ComputationState = outfitCountFolds(mapShape, **keywordArguments)
154
+
155
+ initializeState = importLogicalPath2Callable(The.logicalPathModuleSourceAlgorithm, The.sourceCallableInitialize)
156
+ stateUniversal = initializeState(stateUniversal)
157
+
158
+ if not writeJob:
159
+ return stateUniversal
160
+
161
+ if pathFilename:
162
+ pathFilenameJob = Path(pathFilename)
163
+ pathFilenameJob.parent.mkdir(parents=True, exist_ok=True)
164
+ else:
165
+ pathFilenameJob = getPathFilenameFoldsTotal(stateUniversal.mapShape).with_suffix('.pkl')
166
+
167
+ pathFilenameJob.write_bytes(pickle.dumps(stateUniversal))
168
+ return pathFilenameJob
169
+
170
+ @dataclasses.dataclass
171
+ class DeReConstructField2ast:
172
+ dataclassesDOTdataclassLogicalPathModule: dataclasses.InitVar[str_nameDOTname]
173
+ dataclassClassDef: dataclasses.InitVar[ast.ClassDef]
174
+ dataclassesDOTdataclassInstance_Identifier: dataclasses.InitVar[ast_Identifier]
175
+ field: dataclasses.InitVar[dataclasses.Field[Any]]
176
+
177
+ ledger: LedgerOfImports = dataclasses.field(default_factory=LedgerOfImports)
178
+
179
+ name: ast_Identifier = dataclasses.field(init=False)
180
+ typeBuffalo: type[Any] | str | Any = dataclasses.field(init=False)
181
+ default: Any | None = dataclasses.field(init=False)
182
+ default_factory: Callable[..., Any] | None = dataclasses.field(init=False)
183
+ repr: bool = dataclasses.field(init=False)
184
+ hash: bool | None = dataclasses.field(init=False)
185
+ init: bool = dataclasses.field(init=False)
186
+ compare: bool = dataclasses.field(init=False)
187
+ metadata: dict[Any, Any] = dataclasses.field(init=False)
188
+ kw_only: bool = dataclasses.field(init=False)
189
+
190
+ astName: ast.Name = dataclasses.field(init=False)
191
+ ast_keyword_field__field: ast.keyword = dataclasses.field(init=False)
192
+ ast_nameDOTname: ast.Attribute = dataclasses.field(init=False)
193
+ astAnnotation: ImaAnnotationType = dataclasses.field(init=False)
194
+ ast_argAnnotated: ast.arg = dataclasses.field(init=False)
195
+ astAnnAssignConstructor: ast.AnnAssign = dataclasses.field(init=False)
196
+ Z0Z_hack: tuple[ast.AnnAssign, str] = dataclasses.field(init=False)
197
+
198
+ def __post_init__(self, dataclassesDOTdataclassLogicalPathModule: str_nameDOTname, dataclassClassDef: ast.ClassDef, dataclassesDOTdataclassInstance_Identifier: ast_Identifier, field: dataclasses.Field[Any]) -> None:
199
+ self.compare = field.compare
200
+ self.default = field.default if field.default is not dataclasses.MISSING else None
201
+ self.default_factory = field.default_factory if field.default_factory is not dataclasses.MISSING else None
202
+ self.hash = field.hash
203
+ self.init = field.init
204
+ self.kw_only = field.kw_only if field.kw_only is not dataclasses.MISSING else False
205
+ self.metadata = dict(field.metadata)
206
+ self.name = field.name
207
+ self.repr = field.repr
208
+ self.typeBuffalo = field.type
209
+
210
+ self.astName = Make.Name(self.name)
211
+ self.ast_keyword_field__field = Make.keyword(self.name, self.astName)
212
+ self.ast_nameDOTname = Make.Attribute(Make.Name(dataclassesDOTdataclassInstance_Identifier), self.name)
213
+
214
+ sherpa = NodeTourist(ifThis.isAnnAssign_targetIs(ifThis.isName_Identifier(self.name)), 又.annotation(Then.getIt)).captureLastMatch(dataclassClassDef)
215
+ if sherpa is None: raise raiseIfNoneGitHubIssueNumber3
216
+ else: self.astAnnotation = sherpa
217
+
218
+ self.ast_argAnnotated = Make.arg(self.name, self.astAnnotation)
219
+
220
+ dtype = self.metadata.get('dtype', None)
221
+ if dtype:
222
+ moduleWithLogicalPath: str_nameDOTname = 'numpy'
223
+ annotation = 'ndarray'
224
+ self.ledger.addImportFrom_asStr(moduleWithLogicalPath, annotation)
225
+ constructor = 'array'
226
+ self.ledger.addImportFrom_asStr(moduleWithLogicalPath, constructor)
227
+ dtypeIdentifier: ast_Identifier = dtype.__name__
228
+ dtype_asnameName: ast.Name = self.astAnnotation
229
+ # dtypeIdentifier_asname: ast_Identifier = moduleWithLogicalPath + '_' + dtypeIdentifier
230
+ self.ledger.addImportFrom_asStr(moduleWithLogicalPath, dtypeIdentifier, dtype_asnameName.id)
231
+ self.astAnnAssignConstructor = Make.AnnAssign(self.astName, Make.Name(annotation), Make.Call(Make.Name(constructor), list_astKeywords=[Make.keyword('dtype', dtype_asnameName)]))
232
+ # self.astAnnAssignConstructor = Make.AnnAssign(self.astName, Make.Name(annotation), Make.Call(Make.Name(constructor), list_astKeywords=[Make.keyword('dtype', Make.Name(dtypeIdentifier_asname))]))
233
+ # self.astAnnAssignConstructor = Make.AnnAssign(self.astName, self.astAnnotation, Make.Call(Make.Name(constructor), list_astKeywords=[Make.keyword('dtype', Make.Name(dtypeIdentifier_asname))]))
234
+ self.Z0Z_hack = (self.astAnnAssignConstructor, 'array')
235
+ elif be.Name(self.astAnnotation):
236
+ self.astAnnAssignConstructor = Make.AnnAssign(self.astName, self.astAnnotation, Make.Call(self.astAnnotation, [Make.Constant(-1)]))
237
+ # self.ledger.addImportFrom_asStr(dataclassesDOTdataclassLogicalPathModule, self.astAnnotation.id)
238
+ self.Z0Z_hack = (self.astAnnAssignConstructor, 'scalar')
239
+ elif be.Subscript(self.astAnnotation):
240
+ elementConstructor: ast_Identifier = self.metadata['elementConstructor']
241
+ self.ledger.addImportFrom_asStr(dataclassesDOTdataclassLogicalPathModule, elementConstructor)
242
+ takeTheTuple: ast.Tuple = deepcopy(self.astAnnotation.slice) # type: ignore
243
+ self.astAnnAssignConstructor = Make.AnnAssign(self.astName, self.astAnnotation, takeTheTuple)
244
+ self.Z0Z_hack = (self.astAnnAssignConstructor, elementConstructor)
245
+ if be.Name(self.astAnnotation):
246
+ self.ledger.addImportFrom_asStr(dataclassesDOTdataclassLogicalPathModule, self.astAnnotation.id) # pyright: ignore [reportUnknownArgumentType, reportUnknownMemberType, reportIJustCalledATypeGuardMethod_WTF]
247
+
248
+ def shatter_dataclassesDOTdataclass(logicalPathModule: str_nameDOTname, dataclass_Identifier: ast_Identifier, instance_Identifier: ast_Identifier) -> ShatteredDataclass:
249
+ """
250
+ Parameters:
251
+ logicalPathModule: gimme string cuz python is stoopid
252
+ dataclass_Identifier: The identifier of the dataclass to be dismantled.
253
+ instance_Identifier: In the synthesized module/function/scope, the identifier that will be used for the instance.
254
+ """
255
+ Official_fieldOrder: list[ast_Identifier] = []
256
+ dictionaryDeReConstruction: dict[ast_Identifier, DeReConstructField2ast] = {}
257
+
258
+ dataclassClassDef = extractClassDef(parseLogicalPath2astModule(logicalPathModule), dataclass_Identifier)
259
+ if not isinstance(dataclassClassDef, ast.ClassDef): raise ValueError(f"I could not find {dataclass_Identifier=} in {logicalPathModule=}.")
260
+
261
+ countingVariable = None
262
+ for aField in dataclasses.fields(importLogicalPath2Callable(logicalPathModule, dataclass_Identifier)): # pyright: ignore [reportArgumentType]
263
+ Official_fieldOrder.append(aField.name)
264
+ dictionaryDeReConstruction[aField.name] = DeReConstructField2ast(logicalPathModule, dataclassClassDef, instance_Identifier, aField)
265
+ if aField.metadata.get('theCountingIdentifier', False):
266
+ countingVariable = dictionaryDeReConstruction[aField.name].name
267
+
268
+ if countingVariable is None:
269
+ raise ValueError(f"I could not find the counting variable in {dataclass_Identifier=} in {logicalPathModule=}.")
270
+
271
+ shatteredDataclass = ShatteredDataclass(
272
+ countingVariableAnnotation=dictionaryDeReConstruction[countingVariable].astAnnotation,
273
+ countingVariableName=dictionaryDeReConstruction[countingVariable].astName,
274
+ field2AnnAssign={dictionaryDeReConstruction[field].name: dictionaryDeReConstruction[field].astAnnAssignConstructor for field in Official_fieldOrder},
275
+ Z0Z_field2AnnAssign={dictionaryDeReConstruction[field].name: dictionaryDeReConstruction[field].Z0Z_hack for field in Official_fieldOrder},
276
+ list_argAnnotated4ArgumentsSpecification=[dictionaryDeReConstruction[field].ast_argAnnotated for field in Official_fieldOrder],
277
+ list_keyword_field__field4init=[dictionaryDeReConstruction[field].ast_keyword_field__field for field in Official_fieldOrder if dictionaryDeReConstruction[field].init],
278
+ listAnnotations=[dictionaryDeReConstruction[field].astAnnotation for field in Official_fieldOrder],
279
+ listName4Parameters=[dictionaryDeReConstruction[field].astName for field in Official_fieldOrder],
280
+ listUnpack=[Make.AnnAssign(dictionaryDeReConstruction[field].astName, dictionaryDeReConstruction[field].astAnnotation, dictionaryDeReConstruction[field].ast_nameDOTname) for field in Official_fieldOrder],
281
+ map_stateDOTfield2Name={dictionaryDeReConstruction[field].ast_nameDOTname: dictionaryDeReConstruction[field].astName for field in Official_fieldOrder},
282
+ )
283
+ shatteredDataclass.fragments4AssignmentOrParameters = Make.Tuple(shatteredDataclass.listName4Parameters, ast.Store())
284
+ shatteredDataclass.repack = Make.Assign(listTargets=[Make.Name(instance_Identifier)], value=Make.Call(Make.Name(dataclass_Identifier), list_astKeywords=shatteredDataclass.list_keyword_field__field4init))
285
+ shatteredDataclass.signatureReturnAnnotation = Make.Subscript(Make.Name('tuple'), Make.Tuple(shatteredDataclass.listAnnotations))
286
+
287
+ shatteredDataclass.ledger.update(*(dictionaryDeReConstruction[field].ledger for field in Official_fieldOrder))
288
+ shatteredDataclass.ledger.addImportFrom_asStr(logicalPathModule, dataclass_Identifier)
289
+
290
+ return shatteredDataclass
291
+
43
292
  def write_astModule(ingredients: IngredientsModule, pathFilename: PathLike[Any] | PurePath, packageName: ast_Identifier | None = None) -> None:
44
293
  astModule = Make.Module(ingredients.body, ingredients.type_ignores)
45
294
  ast.fix_missing_locations(astModule)
@@ -48,16 +297,10 @@ def write_astModule(ingredients: IngredientsModule, pathFilename: PathLike[Any]
48
297
  autoflake_additional_imports: list[str] = ingredients.imports.exportListModuleIdentifiers()
49
298
  if packageName:
50
299
  autoflake_additional_imports.append(packageName)
51
- pythonSource = autoflake_fix_code(pythonSource, autoflake_additional_imports, expand_star_imports=False, remove_all_unused_imports=False, remove_duplicate_keys = False, remove_unused_variables = False)
300
+ pythonSource = autoflake_fix_code(pythonSource, autoflake_additional_imports, expand_star_imports=False, remove_all_unused_imports=True, remove_duplicate_keys = False, remove_unused_variables = False)
52
301
  writeStringToHere(pythonSource, pathFilename)
53
302
 
54
303
  # END of acceptable classes and functions ======================================================
55
-
56
- def makeDictionaryFunctionDef(module: ast.AST) -> dict[ast_Identifier, ast.FunctionDef]:
57
- dictionaryFunctionDef: dict[ast_Identifier, ast.FunctionDef] = {}
58
- NodeTourist(be.FunctionDef, Then.updateThis(dictionaryFunctionDef)).visit(module)
59
- return dictionaryFunctionDef
60
-
61
304
  dictionaryEstimates: dict[tuple[int, ...], int] = {
62
305
  (2,2,2,2,2,2,2,2): 362794844160000,
63
306
  (2,21): 1493028892051200,
@@ -67,7 +310,7 @@ dictionaryEstimates: dict[tuple[int, ...], int] = {
67
310
  }
68
311
 
69
312
  # END of marginal classes and functions ======================================================
70
- def Z0Z_lameFindReplace(astTree: typeCertified, mappingFindReplaceNodes: Mapping[ast.AST, ast.AST]) -> typeCertified:
313
+ def Z0Z_lameFindReplace(astTree, mappingFindReplaceNodes: Mapping[ast.AST, ast.AST]):
71
314
  keepGoing = True
72
315
  newTree = deepcopy(astTree)
73
316
 
@@ -82,37 +325,15 @@ def Z0Z_lameFindReplace(astTree: typeCertified, mappingFindReplaceNodes: Mapping
82
325
  return newTree
83
326
 
84
327
  # Start of I HATE PROGRAMMING ==========================================================
85
- # Similar functionality to call does not call itself, but it is used for something else. I hate this function, too.
86
- def Z0Z_descendantContainsMatchingNode(node: ast.AST, predicateFunction: Callable[[ast.AST], bool]) -> bool:
87
- """Return True if any descendant of the node (or the node itself) matches the predicateFunction."""
88
- matchFound = False
89
- class DescendantFinder(ast.NodeVisitor):
90
- def generic_visit(self, node: ast.AST) -> None:
91
- nonlocal matchFound
92
- if predicateFunction(node):
93
- matchFound = True
94
- else:
95
- super().generic_visit(node)
96
- DescendantFinder().visit(node)
97
- return matchFound
98
-
99
- def Z0Z_executeActionUnlessDescendantMatches(exclusionPredicate: Callable[[ast.AST], bool], actionFunction: Callable[[ast.AST], None]) -> Callable[[ast.AST], None]:
100
- """Return a new action that will execute actionFunction only if no descendant (or the node itself) matches exclusionPredicate."""
101
- def wrappedAction(node: ast.AST) -> None:
102
- if not Z0Z_descendantContainsMatchingNode(node, exclusionPredicate):
103
- actionFunction(node)
104
- return wrappedAction
105
-
106
- # Inlining functions ==========================================================
107
328
  def Z0Z_makeDictionaryReplacementStatements(module: ast.AST) -> dict[ast_Identifier, ast.stmt | list[ast.stmt]]:
108
329
  """Return a dictionary of function names and their replacement statements."""
109
330
  dictionaryFunctionDef: dict[ast_Identifier, ast.FunctionDef] = makeDictionaryFunctionDef(module)
110
331
  dictionaryReplacementStatements: dict[ast_Identifier, ast.stmt | list[ast.stmt]] = {}
111
332
  for name, astFunctionDef in dictionaryFunctionDef.items():
112
333
  if ifThis.onlyReturnAnyCompare(astFunctionDef):
113
- dictionaryReplacementStatements[name] = astFunctionDef.body[0].value
334
+ dictionaryReplacementStatements[name] = astFunctionDef.body[0].value # type: ignore
114
335
  elif ifThis.onlyReturnUnaryOp(astFunctionDef):
115
- dictionaryReplacementStatements[name] = astFunctionDef.body[0].value
336
+ dictionaryReplacementStatements[name] = astFunctionDef.body[0].value # type: ignore
116
337
  else:
117
338
  dictionaryReplacementStatements[name] = astFunctionDef.body[0:-1]
118
339
  return dictionaryReplacementStatements
@@ -128,17 +349,17 @@ def Z0Z_inlineThisFunctionWithTheseValues(astFunctionDef: ast.FunctionDef, dicti
128
349
 
129
350
  def visit_Expr(self, node: ast.Expr) -> ast.AST | list[ast.stmt]:
130
351
  if ifThis.CallDoesNotCallItselfAndNameDOTidIsIn(self.dictionaryReplacementStatements)(node.value):
131
- return self.dictionaryReplacementStatements[node.value.func.id]
352
+ return self.dictionaryReplacementStatements[node.value.func.id] # type: ignore
132
353
  return node
133
354
 
134
355
  def visit_Assign(self, node: ast.Assign) -> ast.AST | list[ast.stmt]:
135
356
  if ifThis.CallDoesNotCallItselfAndNameDOTidIsIn(self.dictionaryReplacementStatements)(node.value):
136
- return self.dictionaryReplacementStatements[node.value.func.id]
357
+ return self.dictionaryReplacementStatements[node.value.func.id] # type: ignore
137
358
  return node
138
359
 
139
360
  def visit_Call(self, node: ast.Call) -> ast.AST | list[ast.stmt]:
140
361
  if ifThis.CallDoesNotCallItselfAndNameDOTidIsIn(self.dictionaryReplacementStatements)(node):
141
- replacement = self.dictionaryReplacementStatements[node.func.id]
362
+ replacement = self.dictionaryReplacementStatements[node.func.id] # type: ignore
142
363
  if not isinstance(replacement, list):
143
364
  return replacement
144
365
  return node
@@ -2,7 +2,6 @@ from concurrent.futures import Future as ConcurrentFuture, ProcessPoolExecutor
2
2
  from copy import deepcopy
3
3
  from mapFolding.theSSOT import Array1DElephino, Array1DFoldsTotal, Array1DLeavesTotal, Array3D, ComputationState, DatatypeElephino, DatatypeFoldsTotal, DatatypeLeavesTotal
4
4
  from numba import jit
5
- from numpy import array, int16, int64
6
5
 
7
6
  def countInitialize(state: ComputationState) -> ComputationState:
8
7
  while state.leaf1ndex > 0:
mapFolding/theSSOT.py CHANGED
@@ -19,11 +19,11 @@ to avoid namespace collisions when transforming algorithms.
19
19
  from collections.abc import Callable
20
20
  from importlib import import_module as importlib_import_module
21
21
  from inspect import getfile as inspect_getfile
22
- from numpy import dtype, int64 as numpy_int64, int16 as numpy_int16, ndarray
22
+ from numpy import dtype, int64 as numpy_int64, int16 as numpy_int16, integer, ndarray
23
23
  from pathlib import Path
24
24
  from tomli import load as tomli_load
25
25
  from types import ModuleType
26
- from typing import TypeAlias
26
+ from typing import Any, TypeAlias, TypeVar
27
27
  import dataclasses
28
28
 
29
29
  # =============================================================================
@@ -55,7 +55,8 @@ concurrencyPackageHARDCODED = 'multiprocessing'
55
55
  # The following is an improvement, but it is not the full solution.
56
56
  # I hope that the standardized markers, `metadata={'evaluateWhen': 'packaging'}` will help to automate
57
57
  # whatever needs to happen so that the following is well implemented.
58
- @dataclasses.dataclass(frozen=True)
58
+ # @dataclasses.dataclass(frozen=True)
59
+ @dataclasses.dataclass
59
60
  class PackageSettings:
60
61
 
61
62
  logicalPathModuleDispatcher: str | None = None
@@ -78,25 +79,14 @@ class PackageSettings:
78
79
  sourceConcurrencyManagerNamespace: str = dataclasses.field(default='concurrencyManager', metadata={'evaluateWhen': 'packaging'})
79
80
  sourceConcurrencyPackage: str = dataclasses.field(default='multiprocessing', metadata={'evaluateWhen': 'packaging'})
80
81
 
81
- @property # These are not fields, and that annoys me.
82
- def dataclassInstanceTaskDistribution(self) -> str:
83
- """ During parallel computation, this identifier helps to create deep copies of the dataclass instance. """
84
- # it follows that `metadata={'evaluateWhen': 'packaging'}`
85
- return self.dataclassInstance + self.dataclassInstanceTaskDistributionSuffix
86
-
87
- @property # These are not fields, and that annoys me.
88
- def logicalPathModuleDataclass(self) -> str:
89
- """ The package.module.name logical path to the dataclass. """
90
- # it follows that `metadata={'evaluateWhen': 'packaging'}`
91
- return '.'.join([self.packageName, self.dataclassModule])
92
-
93
- @property # These are not fields, and that annoys me.
94
- def logicalPathModuleSourceAlgorithm(self) -> str:
95
- """ The package.module.name logical path to the source algorithm. """
96
- # it follows that `metadata={'evaluateWhen': 'packaging'}`
97
- return '.'.join([self.packageName, self.sourceAlgorithm])
98
-
99
- @property # These are not fields, and that annoys me.
82
+ dataclassInstanceTaskDistribution: str = dataclasses.field(init=False, metadata={'evaluateWhen': 'packaging'})
83
+ """ During parallel computation, this identifier helps to create deep copies of the dataclass instance. """
84
+ logicalPathModuleDataclass: str = dataclasses.field(init=False)
85
+ """ The package.module.name logical path to the dataclass. """
86
+ logicalPathModuleSourceAlgorithm: str = dataclasses.field(init=False)
87
+ """ The package.module.name logical path to the source algorithm. """
88
+
89
+ @property # This is not a field, and that annoys me.
100
90
  def dispatcher(self) -> Callable[['ComputationState'], 'ComputationState']:
101
91
  """ _The_ callable that connects `countFolds` to the logic that does the work."""
102
92
  logicalPath: str = self.logicalPathModuleDispatcher or self.logicalPathModuleSourceAlgorithm
@@ -104,6 +94,12 @@ class PackageSettings:
104
94
  moduleImported: ModuleType = importlib_import_module(logicalPath)
105
95
  return getattr(moduleImported, identifier)
106
96
 
97
+ def __post_init__(self) -> None:
98
+ self.dataclassInstanceTaskDistribution = self.dataclassInstance + self.dataclassInstanceTaskDistributionSuffix
99
+
100
+ self.logicalPathModuleDataclass = '.'.join([self.packageName, self.dataclassModule])
101
+ self.logicalPathModuleSourceAlgorithm = '.'.join([self.packageName, self.sourceAlgorithm])
102
+
107
103
  The = PackageSettings(logicalPathModuleDispatcher=logicalPathModuleDispatcherHARDCODED, callableDispatcher=callableDispatcherHARDCODED, concurrencyPackage=concurrencyPackageHARDCODED)
108
104
 
109
105
  # To remove this function, I need to learn how to change "conftest.py" to patch this.
@@ -116,7 +112,8 @@ def getPackageDispatcher() -> Callable[['ComputationState'], 'ComputationState']
116
112
  return The.dispatcher
117
113
  # =============================================================================
118
114
  # Flexible Data Structure System Needs Enhanced Paradigm https://github.com/hunterhogan/mapFolding/issues/9
119
- # Efficient translation of Python scalar types to Numba types https://github.com/hunterhogan/mapFolding/issues/8
115
+
116
+ numpyIntegerType = TypeVar('numpyIntegerType', bound=integer[Any], covariant=True)
120
117
 
121
118
  DatatypeLeavesTotal: TypeAlias = int
122
119
  NumPyLeavesTotal: TypeAlias = numpy_int16 # this would be uint8, but mapShape (2,2,2,2, 2,2,2,2) has 256 leaves, so generic containers must accommodate at least 256 leaves
@@ -139,7 +136,7 @@ class ComputationState:
139
136
  taskDivisions: DatatypeLeavesTotal
140
137
  concurrencyLimit: DatatypeElephino
141
138
 
142
- connectionGraph: Array3D = dataclasses.field(init=False, metadata={'dtype': Array3D.__args__[1].__args__[0]}) # pyright: ignore[reportAttributeAccessIssue]
139
+ connectionGraph: Array3D = dataclasses.field(init=False, metadata={'dtype': Array3D.__args__[1].__args__[0]}) # pyright: ignore[reportUnknownMemberType, reportAttributeAccessIssue]
143
140
  dimensionsTotal: DatatypeLeavesTotal = dataclasses.field(init=False)
144
141
 
145
142
  countDimensionsGapped: Array1DLeavesTotal = dataclasses.field(default=None, init=True, metadata={'dtype': Array1DLeavesTotal.__args__[1].__args__[0]}) # type: ignore[arg-type, reportAssignmentType]
@@ -162,24 +159,24 @@ class ComputationState:
162
159
  taskIndex: DatatypeLeavesTotal = DatatypeLeavesTotal(0)
163
160
 
164
161
  def __post_init__(self) -> None:
165
- from mapFolding.beDRY import makeConnectionGraph, makeDataContainer
162
+ from mapFolding.beDRY import getConnectionGraph, makeDataContainer
166
163
  self.dimensionsTotal = DatatypeLeavesTotal(len(self.mapShape))
167
164
  leavesTotalAsInt = int(self.leavesTotal)
168
- self.connectionGraph = makeConnectionGraph(self.mapShape, leavesTotalAsInt, self.__dataclass_fields__['connectionGraph'].metadata['dtype'])
165
+ self.connectionGraph = getConnectionGraph(self.mapShape, leavesTotalAsInt, self.__dataclass_fields__['connectionGraph'].metadata['dtype'])
169
166
 
170
- if self.dimensionsUnconstrained is None:
167
+ if self.dimensionsUnconstrained is None: # type: ignore
171
168
  self.dimensionsUnconstrained = DatatypeLeavesTotal(int(self.dimensionsTotal))
172
169
 
173
- if self.foldGroups is None:
170
+ if self.foldGroups is None: # type: ignore
174
171
  self.foldGroups = makeDataContainer(max(2, int(self.taskDivisions) + 1), self.__dataclass_fields__['foldGroups'].metadata['dtype'])
175
172
  self.foldGroups[-1] = self.leavesTotal
176
173
 
177
- if self.gapsWhere is None: self.gapsWhere = makeDataContainer(leavesTotalAsInt * leavesTotalAsInt + 1, self.__dataclass_fields__['gapsWhere'].metadata['dtype'])
174
+ if self.gapsWhere is None: self.gapsWhere = makeDataContainer(leavesTotalAsInt * leavesTotalAsInt + 1, self.__dataclass_fields__['gapsWhere'].metadata['dtype']) # type: ignore
178
175
 
179
- if self.countDimensionsGapped is None: self.countDimensionsGapped = makeDataContainer(leavesTotalAsInt + 1, self.__dataclass_fields__['countDimensionsGapped'].metadata['dtype'])
180
- if self.gapRangeStart is None: self.gapRangeStart = makeDataContainer(leavesTotalAsInt + 1, self.__dataclass_fields__['gapRangeStart'].metadata['dtype'])
181
- if self.leafAbove is None: self.leafAbove = makeDataContainer(leavesTotalAsInt + 1, self.__dataclass_fields__['leafAbove'].metadata['dtype'])
182
- if self.leafBelow is None: self.leafBelow = makeDataContainer(leavesTotalAsInt + 1, self.__dataclass_fields__['leafBelow'].metadata['dtype'])
176
+ if self.countDimensionsGapped is None: self.countDimensionsGapped = makeDataContainer(leavesTotalAsInt + 1, self.__dataclass_fields__['countDimensionsGapped'].metadata['dtype']) # type: ignore
177
+ if self.gapRangeStart is None: self.gapRangeStart = makeDataContainer(leavesTotalAsInt + 1, self.__dataclass_fields__['gapRangeStart'].metadata['dtype']) # type: ignore
178
+ if self.leafAbove is None: self.leafAbove = makeDataContainer(leavesTotalAsInt + 1, self.__dataclass_fields__['leafAbove'].metadata['dtype']) # type: ignore
179
+ if self.leafBelow is None: self.leafBelow = makeDataContainer(leavesTotalAsInt + 1, self.__dataclass_fields__['leafBelow'].metadata['dtype']) # type: ignore
183
180
 
184
181
  def getFoldsTotal(self) -> None:
185
182
  self.foldsTotal = DatatypeFoldsTotal(self.foldGroups[0:-1].sum() * self.leavesTotal)