mapFolding 0.8.4__py3-none-any.whl → 0.8.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (34) hide show
  1. mapFolding/__init__.py +4 -1
  2. mapFolding/basecamp.py +3 -3
  3. mapFolding/beDRY.py +241 -68
  4. mapFolding/oeis.py +3 -3
  5. mapFolding/reference/hunterNumba.py +1 -1
  6. mapFolding/someAssemblyRequired/__init__.py +16 -15
  7. mapFolding/someAssemblyRequired/_theTypes.py +31 -13
  8. mapFolding/someAssemblyRequired/_tool_Make.py +8 -1
  9. mapFolding/someAssemblyRequired/_tool_Then.py +12 -5
  10. mapFolding/someAssemblyRequired/_toolboxAntecedents.py +131 -99
  11. mapFolding/someAssemblyRequired/_toolboxContainers.py +35 -7
  12. mapFolding/someAssemblyRequired/_toolboxPython.py +17 -31
  13. mapFolding/someAssemblyRequired/getLLVMforNoReason.py +2 -2
  14. mapFolding/someAssemblyRequired/newInliner.py +22 -0
  15. mapFolding/someAssemblyRequired/synthesizeNumbaJob.py +24 -113
  16. mapFolding/someAssemblyRequired/toolboxNumba.py +358 -0
  17. mapFolding/someAssemblyRequired/transformationTools.py +253 -40
  18. mapFolding/theSSOT.py +30 -32
  19. mapFolding/{filesystem.py → toolboxFilesystem.py} +90 -25
  20. {mapfolding-0.8.4.dist-info → mapfolding-0.8.5.dist-info}/METADATA +3 -2
  21. mapfolding-0.8.5.dist-info/RECORD +48 -0
  22. tests/conftest.py +30 -31
  23. tests/test_computations.py +7 -6
  24. tests/test_filesystem.py +2 -2
  25. tests/test_other.py +2 -2
  26. tests/test_tasks.py +2 -2
  27. mapFolding/someAssemblyRequired/ingredientsNumba.py +0 -199
  28. mapFolding/someAssemblyRequired/synthesizeNumbaFlow.py +0 -156
  29. mapFolding/someAssemblyRequired/transformDataStructures.py +0 -235
  30. mapfolding-0.8.4.dist-info/RECORD +0 -49
  31. {mapfolding-0.8.4.dist-info → mapfolding-0.8.5.dist-info}/WHEEL +0 -0
  32. {mapfolding-0.8.4.dist-info → mapfolding-0.8.5.dist-info}/entry_points.txt +0 -0
  33. {mapfolding-0.8.4.dist-info → mapfolding-0.8.5.dist-info}/licenses/LICENSE +0 -0
  34. {mapfolding-0.8.4.dist-info → mapfolding-0.8.5.dist-info}/top_level.txt +0 -0
@@ -25,14 +25,40 @@ transformation scenarios beyond the scope of this package.
25
25
  from autoflake import fix_code as autoflake_fix_code
26
26
  from collections.abc import Callable, Mapping
27
27
  from copy import deepcopy
28
- from mapFolding.filesystem import writeStringToHere
29
- from mapFolding.someAssemblyRequired import ast_Identifier, be, ifThis, Make, NodeChanger, NodeTourist, Then, typeCertified
30
- from mapFolding.someAssemblyRequired._toolboxContainers import IngredientsModule
31
- from mapFolding.theSSOT import raiseIfNoneGitHubIssueNumber3
28
+ from mapFolding.beDRY import outfitCountFolds
29
+ from mapFolding.toolboxFilesystem import getPathFilenameFoldsTotal, writeStringToHere
30
+ from mapFolding.someAssemblyRequired import (
31
+ ast_Identifier,
32
+ be,
33
+ DOT,
34
+ ifThis,
35
+ ImaAnnotationType,
36
+ importLogicalPath2Callable,
37
+ IngredientsFunction,
38
+ IngredientsModule,
39
+ LedgerOfImports,
40
+ Make,
41
+ NodeChanger,
42
+ NodeTourist,
43
+ parseLogicalPath2astModule,
44
+ ShatteredDataclass,
45
+ str_nameDOTname,
46
+ Then,
47
+ TypeCertified,
48
+ 又,
49
+ )
50
+ from mapFolding.theSSOT import ComputationState, The, raiseIfNoneGitHubIssueNumber3
32
51
  from os import PathLike
33
- from pathlib import PurePath
34
- from typing import Any
52
+ from pathlib import Path, PurePath
53
+ from typing import Any, Literal, overload
35
54
  import ast
55
+ import dataclasses
56
+ import pickle
57
+
58
+ def astModuleToIngredientsFunction(astModule: ast.AST, identifierFunctionDef: ast_Identifier) -> IngredientsFunction:
59
+ astFunctionDef = extractFunctionDef(astModule, identifierFunctionDef)
60
+ if not astFunctionDef: raise raiseIfNoneGitHubIssueNumber3
61
+ return IngredientsFunction(astFunctionDef, LedgerOfImports(astModule))
36
62
 
37
63
  def extractClassDef(module: ast.AST, identifier: ast_Identifier) -> ast.ClassDef | None:
38
64
  return NodeTourist(ifThis.isClassDef_Identifier(identifier), Then.getIt).captureLastMatch(module)
@@ -40,6 +66,221 @@ def extractClassDef(module: ast.AST, identifier: ast_Identifier) -> ast.ClassDef
40
66
  def extractFunctionDef(module: ast.AST, identifier: ast_Identifier) -> ast.FunctionDef | None:
41
67
  return NodeTourist(ifThis.isFunctionDef_Identifier(identifier), Then.getIt).captureLastMatch(module)
42
68
 
69
+ def makeDictionaryFunctionDef(module: ast.AST) -> dict[ast_Identifier, ast.FunctionDef]:
70
+ dictionaryIdentifier2FunctionDef: dict[ast_Identifier, ast.FunctionDef] = {}
71
+ NodeTourist(be.FunctionDef, Then.updateKeyValueIn(DOT.name, Then.getIt, dictionaryIdentifier2FunctionDef)).visit(module)
72
+ return dictionaryIdentifier2FunctionDef
73
+
74
+ def makeDictionary4InliningFunction(identifierToInline: ast_Identifier, dictionaryFunctionDef: dict[ast_Identifier, ast.FunctionDef], FunctionDefToInline: ast.FunctionDef | None = None) -> dict[str, ast.FunctionDef]:
75
+ """
76
+ Creates a dictionary of function definitions required for inlining a target function.
77
+ This function analyzes a target function and recursively collects all function definitions
78
+ that are called within it (and any functions called by those functions), preparing them for inlining.
79
+ Parameters:
80
+ ----------
81
+ identifierToInline : ast_Identifier
82
+ The identifier of the function to be inlined.
83
+ dictionaryFunctionDef : dict[ast_Identifier, ast.FunctionDef]
84
+ A dictionary mapping function identifiers to their AST function definitions.
85
+ FunctionDefToInline : ast.FunctionDef | None, optional
86
+ The AST function definition to inline. If None, it will be retrieved from dictionaryFunctionDef using identifierToInline.
87
+ Returns:
88
+ -------
89
+ dict[str, ast.FunctionDef]
90
+ A dictionary mapping function names to their AST function definitions, containing all functions needed for inlining.
91
+ Raises:
92
+ ------
93
+ ValueError
94
+ If the function to inline is not found in the dictionary, or if recursion is detected during analysis.
95
+ Notes:
96
+ -----
97
+ The function performs a recursive analysis to find all dependent functions needed for inlining.
98
+ It detects and prevents recursive function calls that could cause infinite inlining.
99
+ """
100
+ if FunctionDefToInline is None:
101
+ try:
102
+ FunctionDefToInline = dictionaryFunctionDef[identifierToInline]
103
+ except KeyError as ERRORmessage:
104
+ raise ValueError(f"FunctionDefToInline not found in dictionaryIdentifier2FunctionDef: {identifierToInline = }") from ERRORmessage
105
+
106
+ listIdentifiersCalledFunctions: list[ast_Identifier] = []
107
+ findIdentifiersToInline = NodeTourist(ifThis.isCallToName, lambda node: Then.appendTo(listIdentifiersCalledFunctions)(DOT.id(DOT.func(node)))) # pyright: ignore[reportArgumentType]
108
+ findIdentifiersToInline.visit(FunctionDefToInline)
109
+
110
+ dictionary4Inlining: dict[ast_Identifier, ast.FunctionDef] = {}
111
+ for identifier in sorted(set(listIdentifiersCalledFunctions).intersection(dictionaryFunctionDef.keys())):
112
+ dictionary4Inlining[identifier] = dictionaryFunctionDef[identifier]
113
+
114
+ keepGoing = True
115
+ while keepGoing:
116
+ keepGoing = False
117
+ listIdentifiersCalledFunctions.clear()
118
+ findIdentifiersToInline.visit(Make.Module(list(dictionary4Inlining.values())))
119
+
120
+ # NOTE: This is simple not comprehensive recursion protection. # TODO think about why I dislike `ifThis.CallDoesNotCallItself`
121
+ if identifierToInline in listIdentifiersCalledFunctions: raise ValueError(f"Recursion found: {identifierToInline = }.")
122
+
123
+ listIdentifiersCalledFunctions = sorted((set(listIdentifiersCalledFunctions).difference(dictionary4Inlining.keys())).intersection(dictionaryFunctionDef.keys()))
124
+ if len(listIdentifiersCalledFunctions) > 0:
125
+ keepGoing = True
126
+ for identifier in listIdentifiersCalledFunctions:
127
+ if identifier in dictionaryFunctionDef:
128
+ dictionary4Inlining[identifier] = dictionaryFunctionDef[identifier]
129
+
130
+ return dictionary4Inlining
131
+
132
+ @overload
133
+ def makeInitializedComputationState(mapShape: tuple[int, ...], writeJob: Literal[True], *, pathFilename: PathLike[str] | PurePath | None = None, **keywordArguments: Any) -> Path: ...
134
+ @overload
135
+ def makeInitializedComputationState(mapShape: tuple[int, ...], writeJob: Literal[False] = False, **keywordArguments: Any) -> ComputationState: ...
136
+ def makeInitializedComputationState(mapShape: tuple[int, ...], writeJob: bool = False, *, pathFilename: PathLike[str] | PurePath | None = None, **keywordArguments: Any) -> ComputationState | Path:
137
+ """
138
+ Initializes a computation state and optionally saves it to disk.
139
+
140
+ This function initializes a computation state using the source algorithm.
141
+
142
+ Hint: If you want an uninitialized state, call `outfitCountFolds` directly.
143
+
144
+ Parameters:
145
+ mapShape: List of integers representing the dimensions of the map to be folded.
146
+ writeJob (False): Whether to save the state to disk.
147
+ pathFilename (getPathFilenameFoldsTotal.pkl): The path and filename to save the state. If None, uses a default path.
148
+ **keywordArguments: computationDivisions:int|str|None=None,concurrencyLimit:int=1.
149
+ Returns:
150
+ stateUniversal|pathFilenameJob: The computation state for the map folding calculations, or
151
+ the path to the saved state file if writeJob is True.
152
+ """
153
+ stateUniversal: ComputationState = outfitCountFolds(mapShape, **keywordArguments)
154
+
155
+ initializeState = importLogicalPath2Callable(The.logicalPathModuleSourceAlgorithm, The.sourceCallableInitialize)
156
+ stateUniversal = initializeState(stateUniversal)
157
+
158
+ if not writeJob:
159
+ return stateUniversal
160
+
161
+ if pathFilename:
162
+ pathFilenameJob = Path(pathFilename)
163
+ pathFilenameJob.parent.mkdir(parents=True, exist_ok=True)
164
+ else:
165
+ pathFilenameJob = getPathFilenameFoldsTotal(stateUniversal.mapShape).with_suffix('.pkl')
166
+
167
+ pathFilenameJob.write_bytes(pickle.dumps(stateUniversal))
168
+ return pathFilenameJob
169
+
170
+ @dataclasses.dataclass
171
+ class DeReConstructField2ast:
172
+ dataclassesDOTdataclassLogicalPathModule: dataclasses.InitVar[str_nameDOTname]
173
+ dataclassClassDef: dataclasses.InitVar[ast.ClassDef]
174
+ dataclassesDOTdataclassInstance_Identifier: dataclasses.InitVar[ast_Identifier]
175
+ field: dataclasses.InitVar[dataclasses.Field[Any]]
176
+
177
+ ledger: LedgerOfImports = dataclasses.field(default_factory=LedgerOfImports)
178
+
179
+ name: ast_Identifier = dataclasses.field(init=False)
180
+ typeBuffalo: type[Any] | str | Any = dataclasses.field(init=False)
181
+ default: Any | None = dataclasses.field(init=False)
182
+ default_factory: Callable[..., Any] | None = dataclasses.field(init=False)
183
+ repr: bool = dataclasses.field(init=False)
184
+ hash: bool | None = dataclasses.field(init=False)
185
+ init: bool = dataclasses.field(init=False)
186
+ compare: bool = dataclasses.field(init=False)
187
+ metadata: dict[Any, Any] = dataclasses.field(init=False)
188
+ kw_only: bool = dataclasses.field(init=False)
189
+
190
+ astName: ast.Name = dataclasses.field(init=False)
191
+ ast_keyword_field__field: ast.keyword = dataclasses.field(init=False)
192
+ ast_nameDOTname: ast.Attribute = dataclasses.field(init=False)
193
+ astAnnotation: ImaAnnotationType = dataclasses.field(init=False)
194
+ ast_argAnnotated: ast.arg = dataclasses.field(init=False)
195
+ astAnnAssignConstructor: ast.AnnAssign = dataclasses.field(init=False)
196
+ Z0Z_hack: tuple[ast.AnnAssign, str] = dataclasses.field(init=False)
197
+
198
+ def __post_init__(self, dataclassesDOTdataclassLogicalPathModule: str_nameDOTname, dataclassClassDef: ast.ClassDef, dataclassesDOTdataclassInstance_Identifier: ast_Identifier, field: dataclasses.Field[Any]) -> None:
199
+ self.compare = field.compare
200
+ self.default = field.default if field.default is not dataclasses.MISSING else None
201
+ self.default_factory = field.default_factory if field.default_factory is not dataclasses.MISSING else None
202
+ self.hash = field.hash
203
+ self.init = field.init
204
+ self.kw_only = field.kw_only if field.kw_only is not dataclasses.MISSING else False
205
+ self.metadata = dict(field.metadata)
206
+ self.name = field.name
207
+ self.repr = field.repr
208
+ self.typeBuffalo = field.type
209
+
210
+ self.astName = Make.Name(self.name)
211
+ self.ast_keyword_field__field = Make.keyword(self.name, self.astName)
212
+ self.ast_nameDOTname = Make.Attribute(Make.Name(dataclassesDOTdataclassInstance_Identifier), self.name)
213
+
214
+ sherpa = NodeTourist(ifThis.isAnnAssign_targetIs(ifThis.isName_Identifier(self.name)), 又.annotation(Then.getIt)).captureLastMatch(dataclassClassDef)
215
+ if sherpa is None: raise raiseIfNoneGitHubIssueNumber3
216
+ else: self.astAnnotation = sherpa
217
+
218
+ self.ast_argAnnotated = Make.arg(self.name, self.astAnnotation)
219
+
220
+ dtype = self.metadata.get('dtype', None)
221
+ if dtype:
222
+ constructor = 'array'
223
+ self.astAnnAssignConstructor = Make.AnnAssign(self.astName, self.astAnnotation, Make.Call(Make.Name(constructor), list_astKeywords=[Make.keyword('dtype', Make.Name(dtype.__name__))]))
224
+ self.ledger.addImportFrom_asStr('numpy', constructor)
225
+ self.ledger.addImportFrom_asStr('numpy', dtype.__name__)
226
+ self.Z0Z_hack = (self.astAnnAssignConstructor, 'array')
227
+ elif be.Name(self.astAnnotation):
228
+ self.astAnnAssignConstructor = Make.AnnAssign(self.astName, self.astAnnotation, Make.Call(self.astAnnotation, [Make.Constant(-1)]))
229
+ self.ledger.addImportFrom_asStr(dataclassesDOTdataclassLogicalPathModule, self.astAnnotation.id)
230
+ self.Z0Z_hack = (self.astAnnAssignConstructor, 'scalar')
231
+ elif be.Subscript(self.astAnnotation):
232
+ elementConstructor: ast_Identifier = self.metadata['elementConstructor']
233
+ self.ledger.addImportFrom_asStr(dataclassesDOTdataclassLogicalPathModule, elementConstructor)
234
+ takeTheTuple: ast.Tuple = deepcopy(self.astAnnotation.slice) # type: ignore
235
+ self.astAnnAssignConstructor = Make.AnnAssign(self.astName, self.astAnnotation, takeTheTuple)
236
+ self.Z0Z_hack = (self.astAnnAssignConstructor, elementConstructor)
237
+ if be.Name(self.astAnnotation):
238
+ self.ledger.addImportFrom_asStr(dataclassesDOTdataclassLogicalPathModule, self.astAnnotation.id) # pyright: ignore [reportUnknownArgumentType, reportUnknownMemberType, reportIJustCalledATypeGuardMethod_WTF]
239
+
240
+ def shatter_dataclassesDOTdataclass(logicalPathModule: str_nameDOTname, dataclass_Identifier: ast_Identifier, instance_Identifier: ast_Identifier) -> ShatteredDataclass:
241
+ """
242
+ Parameters:
243
+ logicalPathModule: gimme string cuz python is stoopid
244
+ dataclass_Identifier: The identifier of the dataclass to be dismantled.
245
+ instance_Identifier: In the synthesized module/function/scope, the identifier that will be used for the instance.
246
+ """
247
+ Official_fieldOrder: list[ast_Identifier] = []
248
+ dictionaryDeReConstruction: dict[ast_Identifier, DeReConstructField2ast] = {}
249
+
250
+ dataclassClassDef = extractClassDef(parseLogicalPath2astModule(logicalPathModule), dataclass_Identifier)
251
+ if not isinstance(dataclassClassDef, ast.ClassDef): raise ValueError(f"I could not find {dataclass_Identifier=} in {logicalPathModule=}.")
252
+
253
+ countingVariable = None
254
+ for aField in dataclasses.fields(importLogicalPath2Callable(logicalPathModule, dataclass_Identifier)): # pyright: ignore [reportArgumentType]
255
+ Official_fieldOrder.append(aField.name)
256
+ dictionaryDeReConstruction[aField.name] = DeReConstructField2ast(logicalPathModule, dataclassClassDef, instance_Identifier, aField)
257
+ if aField.metadata.get('theCountingIdentifier', False):
258
+ countingVariable = dictionaryDeReConstruction[aField.name].name
259
+
260
+ if countingVariable is None:
261
+ raise ValueError(f"I could not find the counting variable in {dataclass_Identifier=} in {logicalPathModule=}.")
262
+
263
+ shatteredDataclass = ShatteredDataclass(
264
+ countingVariableAnnotation=dictionaryDeReConstruction[countingVariable].astAnnotation,
265
+ countingVariableName=dictionaryDeReConstruction[countingVariable].astName,
266
+ field2AnnAssign={dictionaryDeReConstruction[field].name: dictionaryDeReConstruction[field].astAnnAssignConstructor for field in Official_fieldOrder},
267
+ Z0Z_field2AnnAssign={dictionaryDeReConstruction[field].name: dictionaryDeReConstruction[field].Z0Z_hack for field in Official_fieldOrder},
268
+ list_argAnnotated4ArgumentsSpecification=[dictionaryDeReConstruction[field].ast_argAnnotated for field in Official_fieldOrder],
269
+ list_keyword_field__field4init=[dictionaryDeReConstruction[field].ast_keyword_field__field for field in Official_fieldOrder if dictionaryDeReConstruction[field].init],
270
+ listAnnotations=[dictionaryDeReConstruction[field].astAnnotation for field in Official_fieldOrder],
271
+ listName4Parameters=[dictionaryDeReConstruction[field].astName for field in Official_fieldOrder],
272
+ listUnpack=[Make.AnnAssign(dictionaryDeReConstruction[field].astName, dictionaryDeReConstruction[field].astAnnotation, dictionaryDeReConstruction[field].ast_nameDOTname) for field in Official_fieldOrder],
273
+ map_stateDOTfield2Name={dictionaryDeReConstruction[field].ast_nameDOTname: dictionaryDeReConstruction[field].astName for field in Official_fieldOrder},
274
+ )
275
+ shatteredDataclass.fragments4AssignmentOrParameters = Make.Tuple(shatteredDataclass.listName4Parameters, ast.Store())
276
+ shatteredDataclass.repack = Make.Assign(listTargets=[Make.Name(instance_Identifier)], value=Make.Call(Make.Name(dataclass_Identifier), list_astKeywords=shatteredDataclass.list_keyword_field__field4init))
277
+ shatteredDataclass.signatureReturnAnnotation = Make.Subscript(Make.Name('tuple'), Make.Tuple(shatteredDataclass.listAnnotations))
278
+
279
+ shatteredDataclass.ledger.update(*(dictionaryDeReConstruction[field].ledger for field in Official_fieldOrder))
280
+ shatteredDataclass.ledger.addImportFrom_asStr(logicalPathModule, dataclass_Identifier)
281
+
282
+ return shatteredDataclass
283
+
43
284
  def write_astModule(ingredients: IngredientsModule, pathFilename: PathLike[Any] | PurePath, packageName: ast_Identifier | None = None) -> None:
44
285
  astModule = Make.Module(ingredients.body, ingredients.type_ignores)
45
286
  ast.fix_missing_locations(astModule)
@@ -52,12 +293,6 @@ def write_astModule(ingredients: IngredientsModule, pathFilename: PathLike[Any]
52
293
  writeStringToHere(pythonSource, pathFilename)
53
294
 
54
295
  # END of acceptable classes and functions ======================================================
55
-
56
- def makeDictionaryFunctionDef(module: ast.AST) -> dict[ast_Identifier, ast.FunctionDef]:
57
- dictionaryFunctionDef: dict[ast_Identifier, ast.FunctionDef] = {}
58
- NodeTourist(be.FunctionDef, Then.updateThis(dictionaryFunctionDef)).visit(module)
59
- return dictionaryFunctionDef
60
-
61
296
  dictionaryEstimates: dict[tuple[int, ...], int] = {
62
297
  (2,2,2,2,2,2,2,2): 362794844160000,
63
298
  (2,21): 1493028892051200,
@@ -67,7 +302,7 @@ dictionaryEstimates: dict[tuple[int, ...], int] = {
67
302
  }
68
303
 
69
304
  # END of marginal classes and functions ======================================================
70
- def Z0Z_lameFindReplace(astTree: typeCertified, mappingFindReplaceNodes: Mapping[ast.AST, ast.AST]) -> typeCertified:
305
+ def Z0Z_lameFindReplace(astTree, mappingFindReplaceNodes: Mapping[ast.AST, ast.AST]):
71
306
  keepGoing = True
72
307
  newTree = deepcopy(astTree)
73
308
 
@@ -82,37 +317,15 @@ def Z0Z_lameFindReplace(astTree: typeCertified, mappingFindReplaceNodes: Mapping
82
317
  return newTree
83
318
 
84
319
  # Start of I HATE PROGRAMMING ==========================================================
85
- # Similar functionality to call does not call itself, but it is used for something else. I hate this function, too.
86
- def Z0Z_descendantContainsMatchingNode(node: ast.AST, predicateFunction: Callable[[ast.AST], bool]) -> bool:
87
- """Return True if any descendant of the node (or the node itself) matches the predicateFunction."""
88
- matchFound = False
89
- class DescendantFinder(ast.NodeVisitor):
90
- def generic_visit(self, node: ast.AST) -> None:
91
- nonlocal matchFound
92
- if predicateFunction(node):
93
- matchFound = True
94
- else:
95
- super().generic_visit(node)
96
- DescendantFinder().visit(node)
97
- return matchFound
98
-
99
- def Z0Z_executeActionUnlessDescendantMatches(exclusionPredicate: Callable[[ast.AST], bool], actionFunction: Callable[[ast.AST], None]) -> Callable[[ast.AST], None]:
100
- """Return a new action that will execute actionFunction only if no descendant (or the node itself) matches exclusionPredicate."""
101
- def wrappedAction(node: ast.AST) -> None:
102
- if not Z0Z_descendantContainsMatchingNode(node, exclusionPredicate):
103
- actionFunction(node)
104
- return wrappedAction
105
-
106
- # Inlining functions ==========================================================
107
320
  def Z0Z_makeDictionaryReplacementStatements(module: ast.AST) -> dict[ast_Identifier, ast.stmt | list[ast.stmt]]:
108
321
  """Return a dictionary of function names and their replacement statements."""
109
322
  dictionaryFunctionDef: dict[ast_Identifier, ast.FunctionDef] = makeDictionaryFunctionDef(module)
110
323
  dictionaryReplacementStatements: dict[ast_Identifier, ast.stmt | list[ast.stmt]] = {}
111
324
  for name, astFunctionDef in dictionaryFunctionDef.items():
112
325
  if ifThis.onlyReturnAnyCompare(astFunctionDef):
113
- dictionaryReplacementStatements[name] = astFunctionDef.body[0].value
326
+ dictionaryReplacementStatements[name] = astFunctionDef.body[0].value # type: ignore
114
327
  elif ifThis.onlyReturnUnaryOp(astFunctionDef):
115
- dictionaryReplacementStatements[name] = astFunctionDef.body[0].value
328
+ dictionaryReplacementStatements[name] = astFunctionDef.body[0].value # type: ignore
116
329
  else:
117
330
  dictionaryReplacementStatements[name] = astFunctionDef.body[0:-1]
118
331
  return dictionaryReplacementStatements
@@ -128,17 +341,17 @@ def Z0Z_inlineThisFunctionWithTheseValues(astFunctionDef: ast.FunctionDef, dicti
128
341
 
129
342
  def visit_Expr(self, node: ast.Expr) -> ast.AST | list[ast.stmt]:
130
343
  if ifThis.CallDoesNotCallItselfAndNameDOTidIsIn(self.dictionaryReplacementStatements)(node.value):
131
- return self.dictionaryReplacementStatements[node.value.func.id]
344
+ return self.dictionaryReplacementStatements[node.value.func.id] # type: ignore
132
345
  return node
133
346
 
134
347
  def visit_Assign(self, node: ast.Assign) -> ast.AST | list[ast.stmt]:
135
348
  if ifThis.CallDoesNotCallItselfAndNameDOTidIsIn(self.dictionaryReplacementStatements)(node.value):
136
- return self.dictionaryReplacementStatements[node.value.func.id]
349
+ return self.dictionaryReplacementStatements[node.value.func.id] # type: ignore
137
350
  return node
138
351
 
139
352
  def visit_Call(self, node: ast.Call) -> ast.AST | list[ast.stmt]:
140
353
  if ifThis.CallDoesNotCallItselfAndNameDOTidIsIn(self.dictionaryReplacementStatements)(node):
141
- replacement = self.dictionaryReplacementStatements[node.func.id]
354
+ replacement = self.dictionaryReplacementStatements[node.func.id] # type: ignore
142
355
  if not isinstance(replacement, list):
143
356
  return replacement
144
357
  return node
mapFolding/theSSOT.py CHANGED
@@ -19,11 +19,11 @@ to avoid namespace collisions when transforming algorithms.
19
19
  from collections.abc import Callable
20
20
  from importlib import import_module as importlib_import_module
21
21
  from inspect import getfile as inspect_getfile
22
- from numpy import dtype, int64 as numpy_int64, int16 as numpy_int16, ndarray
22
+ from numpy import dtype, int64 as numpy_int64, int16 as numpy_int16, integer, ndarray
23
23
  from pathlib import Path
24
24
  from tomli import load as tomli_load
25
25
  from types import ModuleType
26
- from typing import TypeAlias
26
+ from typing import Any, TypeAlias, TypeVar
27
27
  import dataclasses
28
28
 
29
29
  # =============================================================================
@@ -55,7 +55,8 @@ concurrencyPackageHARDCODED = 'multiprocessing'
55
55
  # The following is an improvement, but it is not the full solution.
56
56
  # I hope that the standardized markers, `metadata={'evaluateWhen': 'packaging'}` will help to automate
57
57
  # whatever needs to happen so that the following is well implemented.
58
- @dataclasses.dataclass(frozen=True)
58
+ # @dataclasses.dataclass(frozen=True)
59
+ @dataclasses.dataclass
59
60
  class PackageSettings:
60
61
 
61
62
  logicalPathModuleDispatcher: str | None = None
@@ -78,25 +79,14 @@ class PackageSettings:
78
79
  sourceConcurrencyManagerNamespace: str = dataclasses.field(default='concurrencyManager', metadata={'evaluateWhen': 'packaging'})
79
80
  sourceConcurrencyPackage: str = dataclasses.field(default='multiprocessing', metadata={'evaluateWhen': 'packaging'})
80
81
 
81
- @property # These are not fields, and that annoys me.
82
- def dataclassInstanceTaskDistribution(self) -> str:
83
- """ During parallel computation, this identifier helps to create deep copies of the dataclass instance. """
84
- # it follows that `metadata={'evaluateWhen': 'packaging'}`
85
- return self.dataclassInstance + self.dataclassInstanceTaskDistributionSuffix
86
-
87
- @property # These are not fields, and that annoys me.
88
- def logicalPathModuleDataclass(self) -> str:
89
- """ The package.module.name logical path to the dataclass. """
90
- # it follows that `metadata={'evaluateWhen': 'packaging'}`
91
- return '.'.join([self.packageName, self.dataclassModule])
92
-
93
- @property # These are not fields, and that annoys me.
94
- def logicalPathModuleSourceAlgorithm(self) -> str:
95
- """ The package.module.name logical path to the source algorithm. """
96
- # it follows that `metadata={'evaluateWhen': 'packaging'}`
97
- return '.'.join([self.packageName, self.sourceAlgorithm])
98
-
99
- @property # These are not fields, and that annoys me.
82
+ dataclassInstanceTaskDistribution: str = dataclasses.field(init=False, metadata={'evaluateWhen': 'packaging'})
83
+ """ During parallel computation, this identifier helps to create deep copies of the dataclass instance. """
84
+ logicalPathModuleDataclass: str = dataclasses.field(init=False)
85
+ """ The package.module.name logical path to the dataclass. """
86
+ logicalPathModuleSourceAlgorithm: str = dataclasses.field(init=False)
87
+ """ The package.module.name logical path to the source algorithm. """
88
+
89
+ @property # This is not a field, and that annoys me.
100
90
  def dispatcher(self) -> Callable[['ComputationState'], 'ComputationState']:
101
91
  """ _The_ callable that connects `countFolds` to the logic that does the work."""
102
92
  logicalPath: str = self.logicalPathModuleDispatcher or self.logicalPathModuleSourceAlgorithm
@@ -104,6 +94,12 @@ class PackageSettings:
104
94
  moduleImported: ModuleType = importlib_import_module(logicalPath)
105
95
  return getattr(moduleImported, identifier)
106
96
 
97
+ def __post_init__(self) -> None:
98
+ self.dataclassInstanceTaskDistribution = self.dataclassInstance + self.dataclassInstanceTaskDistributionSuffix
99
+
100
+ self.logicalPathModuleDataclass = '.'.join([self.packageName, self.dataclassModule])
101
+ self.logicalPathModuleSourceAlgorithm = '.'.join([self.packageName, self.sourceAlgorithm])
102
+
107
103
  The = PackageSettings(logicalPathModuleDispatcher=logicalPathModuleDispatcherHARDCODED, callableDispatcher=callableDispatcherHARDCODED, concurrencyPackage=concurrencyPackageHARDCODED)
108
104
 
109
105
  # To remove this function, I need to learn how to change "conftest.py" to patch this.
@@ -118,6 +114,8 @@ def getPackageDispatcher() -> Callable[['ComputationState'], 'ComputationState']
118
114
  # Flexible Data Structure System Needs Enhanced Paradigm https://github.com/hunterhogan/mapFolding/issues/9
119
115
  # Efficient translation of Python scalar types to Numba types https://github.com/hunterhogan/mapFolding/issues/8
120
116
 
117
+ numpyIntegerType = TypeVar('numpyIntegerType', bound=integer[Any], covariant=True)
118
+
121
119
  DatatypeLeavesTotal: TypeAlias = int
122
120
  NumPyLeavesTotal: TypeAlias = numpy_int16 # this would be uint8, but mapShape (2,2,2,2, 2,2,2,2) has 256 leaves, so generic containers must accommodate at least 256 leaves
123
121
 
@@ -139,7 +137,7 @@ class ComputationState:
139
137
  taskDivisions: DatatypeLeavesTotal
140
138
  concurrencyLimit: DatatypeElephino
141
139
 
142
- connectionGraph: Array3D = dataclasses.field(init=False, metadata={'dtype': Array3D.__args__[1].__args__[0]}) # pyright: ignore[reportAttributeAccessIssue]
140
+ connectionGraph: Array3D = dataclasses.field(init=False, metadata={'dtype': Array3D.__args__[1].__args__[0]}) # pyright: ignore[reportUnknownMemberType, reportAttributeAccessIssue]
143
141
  dimensionsTotal: DatatypeLeavesTotal = dataclasses.field(init=False)
144
142
 
145
143
  countDimensionsGapped: Array1DLeavesTotal = dataclasses.field(default=None, init=True, metadata={'dtype': Array1DLeavesTotal.__args__[1].__args__[0]}) # type: ignore[arg-type, reportAssignmentType]
@@ -162,24 +160,24 @@ class ComputationState:
162
160
  taskIndex: DatatypeLeavesTotal = DatatypeLeavesTotal(0)
163
161
 
164
162
  def __post_init__(self) -> None:
165
- from mapFolding.beDRY import makeConnectionGraph, makeDataContainer
163
+ from mapFolding.beDRY import getConnectionGraph, makeDataContainer
166
164
  self.dimensionsTotal = DatatypeLeavesTotal(len(self.mapShape))
167
165
  leavesTotalAsInt = int(self.leavesTotal)
168
- self.connectionGraph = makeConnectionGraph(self.mapShape, leavesTotalAsInt, self.__dataclass_fields__['connectionGraph'].metadata['dtype'])
166
+ self.connectionGraph = getConnectionGraph(self.mapShape, leavesTotalAsInt, self.__dataclass_fields__['connectionGraph'].metadata['dtype'])
169
167
 
170
- if self.dimensionsUnconstrained is None:
168
+ if self.dimensionsUnconstrained is None: # type: ignore
171
169
  self.dimensionsUnconstrained = DatatypeLeavesTotal(int(self.dimensionsTotal))
172
170
 
173
- if self.foldGroups is None:
171
+ if self.foldGroups is None: # type: ignore
174
172
  self.foldGroups = makeDataContainer(max(2, int(self.taskDivisions) + 1), self.__dataclass_fields__['foldGroups'].metadata['dtype'])
175
173
  self.foldGroups[-1] = self.leavesTotal
176
174
 
177
- if self.gapsWhere is None: self.gapsWhere = makeDataContainer(leavesTotalAsInt * leavesTotalAsInt + 1, self.__dataclass_fields__['gapsWhere'].metadata['dtype'])
175
+ if self.gapsWhere is None: self.gapsWhere = makeDataContainer(leavesTotalAsInt * leavesTotalAsInt + 1, self.__dataclass_fields__['gapsWhere'].metadata['dtype']) # type: ignore
178
176
 
179
- if self.countDimensionsGapped is None: self.countDimensionsGapped = makeDataContainer(leavesTotalAsInt + 1, self.__dataclass_fields__['countDimensionsGapped'].metadata['dtype'])
180
- if self.gapRangeStart is None: self.gapRangeStart = makeDataContainer(leavesTotalAsInt + 1, self.__dataclass_fields__['gapRangeStart'].metadata['dtype'])
181
- if self.leafAbove is None: self.leafAbove = makeDataContainer(leavesTotalAsInt + 1, self.__dataclass_fields__['leafAbove'].metadata['dtype'])
182
- if self.leafBelow is None: self.leafBelow = makeDataContainer(leavesTotalAsInt + 1, self.__dataclass_fields__['leafBelow'].metadata['dtype'])
177
+ if self.countDimensionsGapped is None: self.countDimensionsGapped = makeDataContainer(leavesTotalAsInt + 1, self.__dataclass_fields__['countDimensionsGapped'].metadata['dtype']) # type: ignore
178
+ if self.gapRangeStart is None: self.gapRangeStart = makeDataContainer(leavesTotalAsInt + 1, self.__dataclass_fields__['gapRangeStart'].metadata['dtype']) # type: ignore
179
+ if self.leafAbove is None: self.leafAbove = makeDataContainer(leavesTotalAsInt + 1, self.__dataclass_fields__['leafAbove'].metadata['dtype']) # type: ignore
180
+ if self.leafBelow is None: self.leafBelow = makeDataContainer(leavesTotalAsInt + 1, self.__dataclass_fields__['leafBelow'].metadata['dtype']) # type: ignore
183
181
 
184
182
  def getFoldsTotal(self) -> None:
185
183
  self.foldsTotal = DatatypeFoldsTotal(self.foldGroups[0:-1].sum() * self.leavesTotal)