mapFolding 0.8.3__py3-none-any.whl → 0.8.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (38) hide show
  1. mapFolding/__init__.py +6 -3
  2. mapFolding/basecamp.py +13 -7
  3. mapFolding/beDRY.py +241 -68
  4. mapFolding/oeis.py +4 -4
  5. mapFolding/reference/hunterNumba.py +1 -1
  6. mapFolding/someAssemblyRequired/__init__.py +40 -20
  7. mapFolding/someAssemblyRequired/_theTypes.py +53 -0
  8. mapFolding/someAssemblyRequired/_tool_Make.py +99 -0
  9. mapFolding/someAssemblyRequired/_tool_Then.py +72 -0
  10. mapFolding/someAssemblyRequired/_toolboxAntecedents.py +358 -0
  11. mapFolding/someAssemblyRequired/_toolboxContainers.py +334 -0
  12. mapFolding/someAssemblyRequired/_toolboxPython.py +62 -0
  13. mapFolding/someAssemblyRequired/getLLVMforNoReason.py +2 -2
  14. mapFolding/someAssemblyRequired/newInliner.py +22 -0
  15. mapFolding/someAssemblyRequired/synthesizeNumbaJob.py +158 -0
  16. mapFolding/someAssemblyRequired/toolboxNumba.py +358 -0
  17. mapFolding/someAssemblyRequired/transformationTools.py +289 -698
  18. mapFolding/syntheticModules/numbaCount_doTheNeedful.py +36 -33
  19. mapFolding/theDao.py +13 -11
  20. mapFolding/theSSOT.py +83 -128
  21. mapFolding/toolboxFilesystem.py +219 -0
  22. {mapfolding-0.8.3.dist-info → mapfolding-0.8.5.dist-info}/METADATA +4 -2
  23. mapfolding-0.8.5.dist-info/RECORD +48 -0
  24. {mapfolding-0.8.3.dist-info → mapfolding-0.8.5.dist-info}/WHEEL +1 -1
  25. tests/conftest.py +56 -52
  26. tests/test_computations.py +42 -32
  27. tests/test_filesystem.py +4 -4
  28. tests/test_other.py +2 -2
  29. tests/test_tasks.py +2 -2
  30. mapFolding/filesystem.py +0 -129
  31. mapFolding/someAssemblyRequired/ingredientsNumba.py +0 -206
  32. mapFolding/someAssemblyRequired/synthesizeNumbaFlow.py +0 -211
  33. mapFolding/someAssemblyRequired/synthesizeNumbaJobVESTIGIAL.py +0 -413
  34. mapFolding/someAssemblyRequired/transformDataStructures.py +0 -168
  35. mapfolding-0.8.3.dist-info/RECORD +0 -43
  36. {mapfolding-0.8.3.dist-info → mapfolding-0.8.5.dist-info}/entry_points.txt +0 -0
  37. {mapfolding-0.8.3.dist-info → mapfolding-0.8.5.dist-info}/licenses/LICENSE +0 -0
  38. {mapfolding-0.8.3.dist-info → mapfolding-0.8.5.dist-info}/top_level.txt +0 -0
@@ -1,413 +0,0 @@
1
- """Synthesize one file to compute `foldsTotal` of `mapShape`."""
2
- from collections.abc import Sequence
3
- from typing import Any, cast, TYPE_CHECKING
4
- from mapFolding.filesystem import getFilenameFoldsTotal, getPathFilenameFoldsTotal
5
- from mapFolding.someAssemblyRequired import ( ifThis, LedgerOfImports, Make, NodeReplacer, Then, )
6
- from mapFolding.theSSOT import ( ComputationState, raiseIfNoneGitHubIssueNumber3, getPathJobRootDEFAULT, )
7
- from os import PathLike
8
- from pathlib import Path
9
- from types import ModuleType
10
- from Z0Z_tools import autoDecodingRLE
11
- import ast
12
- import python_minifier
13
- import autoflake
14
- import copy
15
- import inspect
16
- import numpy
17
- if TYPE_CHECKING:
18
- from mapFolding.someAssemblyRequired.transformDataStructures import makeStateJobOUTDATED
19
- from mapFolding.someAssemblyRequired.ingredientsNumba import thisIsNumbaDotJit, decorateCallableWithNumba
20
- from mapFolding.someAssemblyRequired.ingredientsNumba import ParametersNumba, parametersNumbaDEFAULT
21
-
22
- def Z0Z_gamma(FunctionDefTarget: ast.FunctionDef, astAssignee: ast.Name, statement: ast.Assign | ast.stmt, identifier: str, arrayTarget: numpy.ndarray[tuple[int, ...], numpy.dtype[numpy.integer[Any]]], allImports: LedgerOfImports) -> tuple[ast.FunctionDef, LedgerOfImports]:
23
- arrayType = type(arrayTarget)
24
- moduleConstructor: str = arrayType.__module__
25
- constructorName: str = arrayType.__name__.replace('ndarray', 'array') # NOTE hack
26
- dataAsStrRLE: str = autoDecodingRLE(arrayTarget, addSpaces=True)
27
- dataAs_astExpr: ast.expr = cast(ast.Expr, ast.parse(dataAsStrRLE).body[0]).value
28
- dtypeName: str = identifier
29
- dtypeAsName: str = f"{moduleConstructor}_{dtypeName}"
30
- list_astKeywords: list[ast.keyword] = [ast.keyword(arg='dtype', value=ast.Name(id=dtypeAsName, ctx=ast.Load()))]
31
- allImports.addImportFromStr(moduleConstructor, dtypeName, dtypeAsName)
32
- astCall: ast.Call = Make.astCall(Make.astName(constructorName), [dataAs_astExpr], list_astKeywords)
33
- assignment = ast.Assign(targets=[astAssignee], value=astCall)
34
- FunctionDefTarget.body.insert(0, assignment)
35
- FunctionDefTarget.body.remove(statement)
36
- allImports.addImportFromStr(moduleConstructor, constructorName)
37
- return FunctionDefTarget, allImports
38
-
39
- def insertArrayIn_body(FunctionDefTarget: ast.FunctionDef, identifier: str, arrayTarget: numpy.ndarray[tuple[int, ...], numpy.dtype[numpy.integer[Any]]], allImports: LedgerOfImports, unrollSlices: int | None = None) -> tuple[ast.FunctionDef, LedgerOfImports]:
40
- def insertAssign(FunctionDefTarget: ast.FunctionDef, assignee: str, arraySlice: numpy.ndarray[tuple[int, ...], numpy.dtype[numpy.integer[Any]]], allImports: LedgerOfImports) -> tuple[ast.FunctionDef, LedgerOfImports]:
41
- statement = ast.Assign(targets=[ast.Name(id='beans', ctx=ast.Load())], value=ast.Constant(value='and cornbread'))
42
- FunctionDefTarget.body.insert(0, statement)
43
- astAssignee = ast.Name(id=assignee, ctx=ast.Store())
44
- return Z0Z_gamma(FunctionDefTarget, astAssignee, statement, identifier, arraySlice, allImports)
45
-
46
- if not unrollSlices:
47
- FunctionDefTarget, allImports = insertAssign(FunctionDefTarget, identifier, arrayTarget, allImports)
48
- else:
49
- for index, arraySlice in enumerate(arrayTarget):
50
- FunctionDefTarget, allImports = insertAssign(FunctionDefTarget, f"{identifier}_{index}", arraySlice, allImports)
51
-
52
- return FunctionDefTarget, allImports
53
-
54
- def findAndReplaceTrackArrayIn_body(FunctionDefTarget: ast.FunctionDef, identifier: str, arrayTarget: numpy.ndarray[tuple[int, ...], numpy.dtype[numpy.integer[Any]]], allImports: LedgerOfImports) -> tuple[ast.FunctionDef, LedgerOfImports]:
55
- for statement in FunctionDefTarget.body.copy():
56
- if True:
57
- indexAsStr: str = ast.unparse(statement.value.slice) # type: ignore
58
- arraySlice: numpy.ndarray[Any, numpy.dtype[numpy.integer[Any]]] = arrayTarget[eval(indexAsStr)]
59
- astAssignee: ast.Name = cast(ast.Name, statement.targets[0]) # type: ignore
60
- FunctionDefTarget, allImports = Z0Z_gamma(FunctionDefTarget, astAssignee, statement, identifier, arraySlice, allImports)
61
- return FunctionDefTarget, allImports
62
-
63
- def findAndReplaceArraySubscriptIn_body(FunctionDefTarget: ast.FunctionDef, identifier: str, arrayTarget: numpy.ndarray[tuple[int, ...], numpy.dtype[numpy.integer[Any]]], allImports: LedgerOfImports) -> tuple[ast.FunctionDef, LedgerOfImports]:
64
- # parameter: I define moduleConstructor
65
- moduleConstructor = 'numba'
66
-
67
- for statement in FunctionDefTarget.body.copy():
68
- if True:
69
- indexAsStr: str = ast.unparse(statement.value.slice) # type: ignore
70
- arraySlice: numpy.ndarray[Any, numpy.dtype[numpy.integer[Any]]] = arrayTarget[eval(indexAsStr)]
71
- astAssignee: ast.Name = cast(ast.Name, statement.targets[0]) # type: ignore
72
- arraySliceItem: int = arraySlice.item()
73
- constructorName: str = astAssignee.id
74
- dataAs_astExpr = ast.Constant(value=arraySliceItem)
75
- list_astKeywords: list[ast.keyword] = []
76
- astCall: ast.Call = Make.astCall(Make.astName(constructorName), [dataAs_astExpr], list_astKeywords)
77
- assignment = ast.Assign(targets=[astAssignee], value=astCall)
78
- FunctionDefTarget.body.insert(0, assignment)
79
- FunctionDefTarget.body.remove(statement)
80
- allImports.addImportFromStr(moduleConstructor, constructorName)
81
- return FunctionDefTarget, allImports
82
-
83
- def removeAssignmentFrom_body(FunctionDefTarget: ast.FunctionDef, identifier: str) -> ast.FunctionDef:
84
- FunctionDefSherpa: ast.AST | Sequence[ast.AST] | None = NodeReplacer(ifThis.isAnyAssignmentTo(identifier), Then.removeThis).visit(FunctionDefTarget)
85
- if not FunctionDefSherpa:
86
- raise raiseIfNoneGitHubIssueNumber3("Dude, where's my function?")
87
- else:
88
- FunctionDefTarget = cast(ast.FunctionDef, FunctionDefSherpa)
89
- ast.fix_missing_locations(FunctionDefTarget)
90
- return FunctionDefTarget
91
-
92
- def findAndReplaceAnnAssignIn_body(FunctionDefTarget: ast.FunctionDef, allImports: LedgerOfImports) -> tuple[ast.FunctionDef, LedgerOfImports]:
93
- """Unlike most of the other functions, this is generic: it tries to turn an annotation into a construction call."""
94
- moduleConstructor: str = 'numba'
95
- for stmt in FunctionDefTarget.body.copy():
96
- if isinstance(stmt, ast.AnnAssign):
97
- if isinstance(stmt.target, ast.Name) and isinstance(stmt.value, ast.Constant):
98
- astAssignee: ast.Name = stmt.target
99
- argData_dtypeName: str = astAssignee.id
100
- allImports.addImportFromStr(moduleConstructor, argData_dtypeName)
101
- astCall = ast.Call(func=ast.Name(id=argData_dtypeName, ctx=ast.Load()), args=[stmt.value], keywords=[])
102
- assignment = ast.Assign(targets=[astAssignee], value=astCall)
103
- FunctionDefTarget.body.insert(0, assignment)
104
- FunctionDefTarget.body.remove(stmt)
105
- return FunctionDefTarget, allImports
106
-
107
- def findThingyReplaceWithConstantIn_body(FunctionDefTarget: ast.FunctionDef, object: str, value: int) -> ast.FunctionDef:
108
- """
109
- Replaces nodes in astFunction matching the AST of the string `object`
110
- with a constant node holding the provided value.
111
- """
112
- targetExpression: ast.expr = ast.parse(object, mode='eval').body
113
- targetDump: str = ast.dump(targetExpression, annotate_fields=False)
114
-
115
- def findNode(node: ast.AST) -> bool:
116
- return ast.dump(node, annotate_fields=False) == targetDump
117
-
118
- def replaceWithConstant(node: ast.AST) -> ast.AST:
119
- return ast.copy_location(ast.Constant(value=value), node)
120
-
121
- transformer = NodeReplacer(findNode, replaceWithConstant)
122
- newFunction: ast.FunctionDef = cast(ast.FunctionDef, transformer.visit(FunctionDefTarget))
123
- ast.fix_missing_locations(newFunction)
124
- return newFunction
125
-
126
- def findAstNameReplaceWithConstantIn_body(FunctionDefTarget: ast.FunctionDef, name: str, value: int) -> ast.FunctionDef:
127
- def replaceWithConstant(node: ast.AST) -> ast.AST:
128
- return ast.copy_location(ast.Constant(value=value), node)
129
-
130
- return cast(ast.FunctionDef, NodeReplacer(ifThis.isName_Identifier(name), replaceWithConstant).visit(FunctionDefTarget))
131
-
132
- def insertReturnStatementIn_body(FunctionDefTarget: ast.FunctionDef, arrayTarget: numpy.ndarray[tuple[int, ...], numpy.dtype[numpy.integer[Any]]], allImports: LedgerOfImports) -> tuple[ast.FunctionDef, LedgerOfImports]:
133
- """Add multiplication and return statement to function, properly constructing AST nodes."""
134
- # Create AST for multiplication operation
135
- multiplicand = 'Z0Z_identifierCountFolds'
136
- multiplyOperation = ast.BinOp(
137
- left=ast.Name(id=multiplicand, ctx=ast.Load()),
138
- op=ast.Mult(), right=ast.Constant(value=int(arrayTarget[-1])))
139
-
140
- returnStatement = ast.Return(value=multiplyOperation)
141
-
142
- datatype: str = 'Z0Z_identifierCountFolds'
143
- FunctionDefTarget.returns = ast.Name(id=datatype, ctx=ast.Load())
144
- datatypeModuleScalar: str = 'numba'
145
- allImports.addImportFromStr(datatypeModuleScalar, datatype)
146
-
147
- FunctionDefTarget.body.append(returnStatement)
148
-
149
- return FunctionDefTarget, allImports
150
-
151
- def findAndReplaceWhileLoopIn_body(FunctionDefTarget: ast.FunctionDef, iteratorName: str, iterationsTotal: int) -> ast.FunctionDef:
152
- """
153
- Unroll all nested while loops matching the condition that their test uses `iteratorName`.
154
- """
155
- # Helper transformer to replace iterator occurrences with a constant.
156
- class ReplaceIterator(ast.NodeTransformer):
157
- def __init__(self, iteratorName: str, constantValue: int) -> None:
158
- super().__init__()
159
- self.iteratorName: str = iteratorName
160
- self.constantValue: int = constantValue
161
-
162
- def visit_Name(self, node: ast.Name) -> ast.AST:
163
- if node.id == self.iteratorName:
164
- return ast.copy_location(ast.Constant(value=self.constantValue), node)
165
- return self.generic_visit(node)
166
-
167
- # NodeTransformer that finds while loops (even if deeply nested) and unrolls them.
168
- class WhileLoopUnroller(ast.NodeTransformer):
169
- def __init__(self, iteratorName: str, iterationsTotal: int) -> None:
170
- super().__init__()
171
- self.iteratorName: str = iteratorName
172
- self.iterationsTotal: int = iterationsTotal
173
-
174
- def visit_While(self, node: ast.While) -> list[ast.stmt]:
175
- # Check if the while loop's test uses the iterator.
176
- if isinstance(node.test, ast.Compare) and ifThis.isName_Identifier(self.iteratorName)(node.test.left):
177
- # Recurse the while loop body and remove AugAssign that increments the iterator.
178
- cleanBodyStatements: list[ast.stmt] = []
179
- for loopStatement in node.body:
180
- # Recursively visit nested statements.
181
- visitedStatement = self.visit(loopStatement)
182
- # Remove direct AugAssign: iterator += 1.
183
- if (isinstance(loopStatement, ast.AugAssign) and
184
- isinstance(loopStatement.target, ast.Name) and
185
- loopStatement.target.id == self.iteratorName and
186
- isinstance(loopStatement.op, ast.Add) and
187
- isinstance(loopStatement.value, ast.Constant) and
188
- loopStatement.value.value == 1):
189
- continue
190
- cleanBodyStatements.append(visitedStatement)
191
-
192
- newStatements: list[ast.stmt] = []
193
- # Unroll using the filtered body.
194
- for iterationIndex in range(self.iterationsTotal):
195
- for loopStatement in cleanBodyStatements:
196
- copiedStatement: ast.stmt = copy.deepcopy(loopStatement)
197
- replacer = ReplaceIterator(self.iteratorName, iterationIndex)
198
- newStatement = replacer.visit(copiedStatement)
199
- ast.fix_missing_locations(newStatement)
200
- newStatements.append(newStatement)
201
- # Optionally, process the orelse block.
202
- if node.orelse:
203
- for elseStmt in node.orelse:
204
- visitedElse = self.visit(elseStmt)
205
- if isinstance(visitedElse, list):
206
- newStatements.extend(cast(list[ast.stmt], visitedElse))
207
- else:
208
- newStatements.append(visitedElse)
209
- return newStatements
210
- return [cast(ast.stmt, self.generic_visit(node))]
211
-
212
- newFunctionDef = WhileLoopUnroller(iteratorName, iterationsTotal).visit(FunctionDefTarget)
213
- ast.fix_missing_locations(newFunctionDef)
214
- return newFunctionDef
215
-
216
- def makeLauncherTqdmJobNumba(callableTarget: str, pathFilenameFoldsTotal: Path, totalEstimated: int, leavesTotal:int) -> ast.Module:
217
- linesLaunch: str = f"""
218
- if __name__ == '__main__':
219
- with ProgressBar(total={totalEstimated}, update_interval=2) as statusUpdate:
220
- {callableTarget}(statusUpdate)
221
- foldsTotal = statusUpdate.n * {leavesTotal}
222
- print("", foldsTotal)
223
- writeStream = open('{pathFilenameFoldsTotal.as_posix()}', 'w')
224
- writeStream.write(str(foldsTotal))
225
- writeStream.close()
226
- """
227
- return ast.parse(linesLaunch)
228
-
229
- def makeLauncherBasicJobNumba(callableTarget: str, pathFilenameFoldsTotal: Path) -> ast.Module:
230
- linesLaunch: str = f"""
231
- if __name__ == '__main__':
232
- import time
233
- timeStart = time.perf_counter()
234
- foldsTotal = {callableTarget}()
235
- print(foldsTotal, time.perf_counter() - timeStart)
236
- writeStream = open('{pathFilenameFoldsTotal.as_posix()}', 'w')
237
- writeStream.write(str(foldsTotal))
238
- writeStream.close()
239
- """
240
- return ast.parse(linesLaunch)
241
-
242
- def doUnrollCountGaps(FunctionDefTarget: ast.FunctionDef, stateJob: ComputationState, allImports: LedgerOfImports) -> tuple[ast.FunctionDef, LedgerOfImports]:
243
- """The initial results were very bad."""
244
- FunctionDefTarget = findAndReplaceWhileLoopIn_body(FunctionDefTarget, 'indexDimension', stateJob.dimensionsTotal)
245
- FunctionDefTarget = removeAssignmentFrom_body(FunctionDefTarget, 'indexDimension')
246
- FunctionDefTarget = removeAssignmentFrom_body(FunctionDefTarget, 'connectionGraph')
247
- FunctionDefTarget, allImports = insertArrayIn_body(FunctionDefTarget, 'connectionGraph', stateJob.connectionGraph, allImports, stateJob.dimensionsTotal)
248
- for index in range(stateJob.dimensionsTotal):
249
- class ReplaceConnectionGraph(ast.NodeTransformer):
250
- def visit_Subscript(self, node: ast.Subscript) -> ast.AST:
251
- node = cast(ast.Subscript, self.generic_visit(node))
252
- if (isinstance(node.value, ast.Name) and node.value.id == "connectionGraph" and
253
- isinstance(node.slice, ast.Tuple) and len(node.slice.elts) >= 1):
254
- firstElement: ast.expr = node.slice.elts[0]
255
- if isinstance(firstElement, ast.Constant) and firstElement.value == index:
256
- newName = ast.Name(id=f"connectionGraph_{index}", ctx=ast.Load())
257
- remainingIndices: list[ast.expr] = node.slice.elts[1:]
258
- if len(remainingIndices) == 1:
259
- newSlice: ast.expr = remainingIndices[0]
260
- else:
261
- newSlice = ast.Tuple(elts=remainingIndices, ctx=ast.Load())
262
- return ast.copy_location(ast.Subscript(value=newName, slice=newSlice, ctx=node.ctx), node)
263
- return node
264
- transformer = ReplaceConnectionGraph()
265
- FunctionDefTarget = transformer.visit(FunctionDefTarget)
266
- return FunctionDefTarget, allImports
267
-
268
- def writeJobNumba(mapShape: Sequence[int], algorithmSource: ModuleType, callableTarget: str | None = None, parametersNumba: ParametersNumba | None = None, pathFilenameWriteJob: str | PathLike[str] | None = None, unrollCountGaps: bool | None = False, Z0Z_totalEstimated: int = 0, **keywordArguments: Any | None) -> Path:
269
- """ Parameters: **keywordArguments: most especially for `computationDivisions` if you want to make a parallel job. Also `CPUlimit`.
270
- Notes:
271
- Hypothetically, everything can now be configured with parameters and functions. And changing how the job is written is relatively easy.
272
-
273
- Overview
274
- - the code starts life in theDao.py, which has many optimizations; `makeNumbaOptimizedFlow` increase optimization especially by using numba; `writeJobNumba` increases optimization especially by limiting its capabilities to just one set of parameters
275
- - the synthesized module must run well as a standalone interpreted-Python script
276
- - the next major optimization step will (probably) be to use the module synthesized by `writeJobNumba` to compile a standalone executable
277
- - Nevertheless, at each major optimization step, the code is constantly being improved and optimized, so everything must be well organized and able to handle upstream and downstream changes
278
-
279
- Minutia
280
- - perf_counter is for testing. When I run a real job, I delete those lines
281
- - avoid `with` statement
282
-
283
- Necessary
284
- - Move the function's parameters to the function body,
285
- - initialize identifiers with their state types and values,
286
-
287
- Optimizations
288
- - replace static-valued identifiers with their values
289
- - narrowly focused imports
290
- """
291
-
292
- # NOTE get the raw ingredients: data and the algorithm
293
- stateJob = makeStateJobOUTDATED(mapShape, writeJob=False, **keywordArguments)
294
- pythonSource: str = inspect.getsource(algorithmSource)
295
- astModule: ast.Module = ast.parse(pythonSource)
296
- setFunctionDef: set[ast.FunctionDef] = {statement for statement in astModule.body if isinstance(statement, ast.FunctionDef)}
297
-
298
- if not callableTarget:
299
- if len(setFunctionDef) == 1:
300
- FunctionDefTarget = setFunctionDef.pop()
301
- callableTarget = FunctionDefTarget.name
302
- else:
303
- raise ValueError(f"I did not receive a `callableTarget` and {algorithmSource.__name__=} has more than one callable: {setFunctionDef}. Please select one.")
304
- else:
305
- listFunctionDefTarget: list[ast.FunctionDef] = [statement for statement in setFunctionDef if statement.name == callableTarget]
306
- FunctionDefTarget = listFunctionDefTarget[0] if listFunctionDefTarget else None # type: ignore
307
- if not FunctionDefTarget: raise ValueError(f"I received `{callableTarget=}` and {algorithmSource.__name__=}, but I could not find that function in that source.")
308
-
309
- # NOTE `allImports` is a complementary container to `FunctionDefTarget`; the `FunctionDefTarget` cannot track its own imports very well.
310
- allImports = LedgerOfImports(astModule)
311
-
312
- # NOTE remove the parameters from the function signature
313
- for pirateScowl in FunctionDefTarget.args.args.copy():
314
- match pirateScowl.arg:
315
- case 'connectionGraph':
316
- FunctionDefTarget, allImports = insertArrayIn_body(FunctionDefTarget, pirateScowl.arg, stateJob.connectionGraph, allImports)
317
- case 'gapsWhere':
318
- FunctionDefTarget, allImports = insertArrayIn_body(FunctionDefTarget, pirateScowl.arg, stateJob.gapsWhere, allImports)
319
- case 'foldGroups':
320
- FunctionDefTarget = removeAssignmentFrom_body(FunctionDefTarget, pirateScowl.arg)
321
- case _:
322
- pass
323
- FunctionDefTarget.args.args.remove(pirateScowl)
324
-
325
- identifierCounter = 'Z0Z_identifierCountFolds'
326
- astExprIncrementCounter = ast.Expr(value = Make.astCall(Make.nameDOTname(identifierCounter, 'update'), listArguments=[ast.Constant(value=1)], list_astKeywords=[]))
327
- FunctionDefTarget= cast(ast.FunctionDef, NodeReplacer(ifThis.isAugAssignTo(identifierCounter), Then.replaceWith(astExprIncrementCounter)).visit(FunctionDefTarget))
328
- ast.fix_missing_locations(FunctionDefTarget)
329
-
330
- for assignmentTarget in ['taskIndex', 'dimensionsTotal', identifierCounter]:
331
- FunctionDefTarget = removeAssignmentFrom_body(FunctionDefTarget, assignmentTarget)
332
- # NOTE replace identifiers with static values with their values
333
- FunctionDefTarget = findAstNameReplaceWithConstantIn_body(FunctionDefTarget, 'dimensionsTotal', int(stateJob.dimensionsTotal))
334
- FunctionDefTarget = findThingyReplaceWithConstantIn_body(FunctionDefTarget, 'foldGroups[-1]', int(stateJob.foldGroups[-1]))
335
-
336
- # NOTE an attempt at optimization
337
- if unrollCountGaps:
338
- FunctionDefTarget, allImports = doUnrollCountGaps(FunctionDefTarget, stateJob, allImports)
339
-
340
- # NOTE starting the count and printing the total
341
- pathFilenameFoldsTotal: Path = getPathFilenameFoldsTotal(stateJob.mapShape)
342
-
343
- astLauncher: ast.Module = makeLauncherBasicJobNumba(FunctionDefTarget.name, pathFilenameFoldsTotal)
344
-
345
- # TODO create function for assigning value to `totalEstimated`
346
- totalEstimated: int = Z0Z_totalEstimated
347
- astLauncher = makeLauncherTqdmJobNumba(FunctionDefTarget.name, pathFilenameFoldsTotal, totalEstimated, stateJob.foldGroups[-1])
348
-
349
- allImports.addImportFromStr('numba_progress', 'ProgressBar')
350
- allImports.addImportFromStr('numba_progress', 'ProgressBarType')
351
-
352
- # add ProgressBarType parameter to function args
353
- counterArg = ast.arg(arg=identifierCounter, annotation=ast.Name(id='ProgressBarType', ctx=ast.Load()))
354
- FunctionDefTarget.args.args.append(counterArg)
355
-
356
- if parametersNumba is None:
357
- parametersNumba = parametersNumbaDEFAULT
358
- parametersNumba['nogil'] = True
359
-
360
- FunctionDefTarget, allImports = insertReturnStatementIn_body(FunctionDefTarget, stateJob.foldGroups, allImports)
361
-
362
- FunctionDefTarget, allImports = findAndReplaceAnnAssignIn_body(FunctionDefTarget, allImports)
363
- # NOTE add the perfect decorator
364
- if thisIsNumbaDotJit(FunctionDefTarget.decorator_list[0]):
365
- astCall: ast.Call = cast(ast.Call, FunctionDefTarget.decorator_list[0])
366
- astCall.func = ast.Name(id='jit', ctx=ast.Load())
367
- FunctionDefTarget.decorator_list[0] = astCall
368
-
369
- # NOTE add imports, make str, remove unused imports
370
- astImports: list[ast.ImportFrom | ast.Import] = allImports.makeListAst()
371
- astModule = ast.Module(body=cast(list[ast.stmt], astImports + [FunctionDefTarget] + [astLauncher]), type_ignores=[])
372
- ast.fix_missing_locations(astModule)
373
- pythonSource = ast.unparse(astModule)
374
- pythonSource = autoflake.fix_code(pythonSource, ['mapFolding', 'numba', 'numpy'])
375
- pythonSource = python_minifier.minify(pythonSource, remove_annotations = False, remove_pass = False, remove_literal_statements = False, combine_imports = True, hoist_literals = False, rename_locals = False, rename_globals = False, remove_object_base = False, convert_posargs_to_args = False, preserve_shebang = True, remove_asserts = False, remove_debug = False, remove_explicit_return_none = False, remove_builtin_exception_brackets = False, constant_folding = False)
376
-
377
- # NOTE put on disk
378
- if pathFilenameWriteJob is None:
379
- filename: str = getFilenameFoldsTotal(stateJob.mapShape)
380
- pathRoot: Path = getPathJobRootDEFAULT()
381
- pathFilenameWriteJob = Path(pathRoot, Path(filename).stem, Path(filename).with_suffix('.py'))
382
- else:
383
- pathFilenameWriteJob = Path(pathFilenameWriteJob)
384
- pathFilenameWriteJob.parent.mkdir(parents=True, exist_ok=True)
385
-
386
- pathFilenameWriteJob.write_text(pythonSource)
387
-
388
- return pathFilenameWriteJob
389
-
390
- if __name__ == '__main__':
391
- mapShape: list[int] = [5,5]
392
- dictionaryEstimates: dict[tuple[int, ...], int] = {
393
- (2,2,2,2,2,2,2,2): 362794844160000,
394
- (2,21): 1493028892051200,
395
- (3,15): 9842024675968800,
396
- (3,3,3,3,3): 85109616000000,
397
- (3,3,3,3): 85109616000,
398
- (8,8): 129950723279272000,
399
- }
400
-
401
- totalEstimated: int = dictionaryEstimates.get(tuple(mapShape), 10**8)
402
- from mapFolding.syntheticModules import numbaCount_doTheNeedful
403
- algorithmSource: ModuleType = numbaCount_doTheNeedful
404
-
405
- callableTarget = 'countSequential'
406
-
407
- parametersNumba: ParametersNumba = parametersNumbaDEFAULT
408
- parametersNumba['nogil'] = True
409
- parametersNumba['boundscheck'] = False
410
-
411
- pathFilenameWriteJob = None
412
-
413
- writeJobNumba(mapShape, algorithmSource, callableTarget, parametersNumba, pathFilenameWriteJob, Z0Z_totalEstimated=totalEstimated)
@@ -1,168 +0,0 @@
1
- """
2
- Utilities for transforming complex data structures in Python code generation.
3
-
4
- This module provides specialized tools for working with structured data types during
5
- the code transformation process, with a particular focus on handling dataclasses. It
6
- implements functionality that enables:
7
-
8
- 1. Decomposing dataclasses into individual fields for efficient processing
9
- 2. Creating optimized parameter passing for transformed functions
10
- 3. Converting between different representations of data structures
11
- 4. Serializing and deserializing computation state objects
12
-
13
- The core functionality revolves around the "shattering" process that breaks down
14
- a dataclass into its constituent components, making each field individually accessible
15
- for code generation and optimization purposes. This dataclass handling is critical for
16
- transforming algorithms that operate on unified state objects into optimized implementations
17
- that work with primitive types directly.
18
-
19
- While developed for transforming map folding computation state objects, the utilities are
20
- designed to be applicable to various data structure transformation scenarios.
21
- """
22
-
23
- from collections.abc import Sequence
24
- from importlib import import_module as importlib_import_module
25
- from inspect import getsource as inspect_getsource
26
- from mapFolding.beDRY import outfitCountFolds, validateListDimensions
27
- from mapFolding.filesystem import getPathFilenameFoldsTotal
28
- from mapFolding.someAssemblyRequired import (
29
- ast_Identifier,
30
- extractClassDef,
31
- ifThis,
32
- LedgerOfImports,
33
- Make,
34
- NodeCollector,
35
- strDotStrCuzPyStoopid,
36
- Then,
37
- Z0Z_executeActionUnlessDescendantMatches,
38
- )
39
- from mapFolding.theSSOT import ComputationState, The
40
- from pathlib import Path
41
- from types import ModuleType
42
- from typing import Any, Literal, overload
43
- import ast
44
- import dataclasses
45
- import pickle
46
-
47
- # Would `LibCST` be better than `ast` in some cases? https://github.com/hunterhogan/mapFolding/issues/7
48
-
49
- countingIdentifierHARDCODED = 'groupsOfFolds'
50
-
51
- @dataclasses.dataclass
52
- class ShatteredDataclass:
53
- astAssignDataclassRepack: ast.Assign
54
- astSubscriptPrimitiveTupleAnnotations4FunctionDef_returns: ast.Subscript
55
- astTuple4AssignTargetsToFragments: ast.Tuple
56
- countingVariableAnnotation: ast.expr
57
- countingVariableName: ast.Name
58
- ledgerDataclassANDFragments: LedgerOfImports = dataclasses.field(default_factory=LedgerOfImports)
59
- list_ast_argAnnotated4ArgumentsSpecification: list[ast.arg] = dataclasses.field(default_factory=list)
60
- list_keyword4DataclassInitialization: list[ast.keyword] = dataclasses.field(default_factory=list)
61
- listAnnAssign4DataclassUnpack: list[ast.AnnAssign] = dataclasses.field(default_factory=list)
62
- listAnnotations: list[ast.expr] = dataclasses.field(default_factory=list)
63
- listNameDataclassFragments4Parameters: list[ast.Name] = dataclasses.field(default_factory=list)
64
-
65
- def shatter_dataclassesDOTdataclass(logicalPathModule: strDotStrCuzPyStoopid, dataclass_Identifier: ast_Identifier, instance_Identifier: ast_Identifier) -> ShatteredDataclass:
66
- """
67
- Parameters:
68
- logicalPathModule: gimme string cuz python is stoopid
69
- dataclass_Identifier: The identifier of the dataclass to be dismantled.
70
- instance_Identifier: In the synthesized module/function/scope, the identifier that will be used for the instance.
71
- """
72
- # TODO learn whether dataclasses.make_dataclass would be useful to transform the target dataclass into the `ShatteredDataclass`
73
-
74
- module: ast.Module = ast.parse(inspect_getsource(importlib_import_module(logicalPathModule)))
75
- astName_dataclassesDOTdataclass = Make.astName(dataclass_Identifier)
76
-
77
- dataclass = extractClassDef(module, dataclass_Identifier)
78
- if not isinstance(dataclass, ast.ClassDef):
79
- raise ValueError(f"I could not find {dataclass_Identifier=} in {logicalPathModule=}.")
80
-
81
- ledgerDataclassANDFragments = LedgerOfImports()
82
- list_ast_argAnnotated4ArgumentsSpecification: list[ast.arg] = []
83
- list_keyword4DataclassInitialization: list[ast.keyword] = []
84
- listAnnAssign4DataclassUnpack: list[ast.AnnAssign] = []
85
- listAnnotations: list[ast.expr] = []
86
- listNameDataclassFragments4Parameters: list[ast.Name] = []
87
-
88
- # TODO get the value from `groupsOfFolds: DatatypeFoldsTotal = dataclasses.field(default=DatatypeFoldsTotal(0), metadata={'theCountingIdentifier': True})`
89
- countingVariable = countingIdentifierHARDCODED
90
-
91
- addToLedgerPredicate = ifThis.isAnnAssignAndAnnotationIsName
92
- addToLedgerAction = Then.Z0Z_ledger(logicalPathModule, ledgerDataclassANDFragments)
93
- addToLedger = NodeCollector(addToLedgerPredicate, [addToLedgerAction])
94
-
95
- exclusionPredicate = ifThis.is_keyword_IdentifierEqualsConstantValue('init', False)
96
- appendKeywordAction = Then.Z0Z_appendKeywordMirroredTo(list_keyword4DataclassInitialization)
97
- filteredAppendKeywordAction = Z0Z_executeActionUnlessDescendantMatches(exclusionPredicate, appendKeywordAction) # type: ignore
98
-
99
- NodeCollector(
100
- ifThis.isAnnAssignAndTargetIsName,
101
- [Then.Z0Z_appendAnnAssignOf_nameDOTnameTo(instance_Identifier, listAnnAssign4DataclassUnpack)
102
- , Then.append_targetTo(listNameDataclassFragments4Parameters) # type: ignore
103
- , lambda node: addToLedger.visit(node)
104
- , filteredAppendKeywordAction
105
- , lambda node: list_ast_argAnnotated4ArgumentsSpecification.append(Make.ast_arg(node.target.id, node.annotation)) # type: ignore
106
- , lambda node: listAnnotations.append(node.annotation) # type: ignore
107
- ]
108
- ).visit(dataclass)
109
-
110
- shatteredDataclass = ShatteredDataclass(
111
- astAssignDataclassRepack = Make.astAssign(listTargets=[Make.astName(instance_Identifier)], value=Make.astCall(astName_dataclassesDOTdataclass, list_astKeywords=list_keyword4DataclassInitialization))
112
- , astSubscriptPrimitiveTupleAnnotations4FunctionDef_returns = Make.astSubscript(Make.astName('tuple'), Make.astTuple(listAnnotations))
113
- , astTuple4AssignTargetsToFragments = Make.astTuple(listNameDataclassFragments4Parameters, ast.Store())
114
- , countingVariableAnnotation = next(ast_arg.annotation for ast_arg in list_ast_argAnnotated4ArgumentsSpecification if ast_arg.arg == countingVariable) or Make.astName('Any')
115
- , countingVariableName = Make.astName(countingVariable)
116
- , ledgerDataclassANDFragments = ledgerDataclassANDFragments
117
- , list_ast_argAnnotated4ArgumentsSpecification = list_ast_argAnnotated4ArgumentsSpecification
118
- , list_keyword4DataclassInitialization = list_keyword4DataclassInitialization
119
- , listAnnAssign4DataclassUnpack = listAnnAssign4DataclassUnpack
120
- , listAnnotations = listAnnotations
121
- , listNameDataclassFragments4Parameters = listNameDataclassFragments4Parameters
122
- )
123
-
124
- shatteredDataclass.ledgerDataclassANDFragments.addImportFromStr(logicalPathModule, dataclass_Identifier)
125
- return shatteredDataclass
126
-
127
- def getSourceAlgorithmVESTIGIAL() -> ModuleType:
128
- moduleImported: ModuleType = importlib_import_module(The.logicalPathModuleSourceAlgorithm)
129
- return moduleImported
130
-
131
- @overload
132
- def makeStateJobOUTDATED(listDimensions: Sequence[int], *, writeJob: Literal[True], **keywordArguments: Any) -> Path: ...
133
- @overload
134
- def makeStateJobOUTDATED(listDimensions: Sequence[int], *, writeJob: Literal[False], **keywordArguments: Any) -> ComputationState: ...
135
- def makeStateJobOUTDATED(listDimensions: Sequence[int], *, writeJob: bool = True, **keywordArguments: Any) -> ComputationState | Path:
136
- """
137
- Creates a computation state job for map folding calculations and optionally saves it to disk.
138
-
139
- This function initializes a computation state for map folding calculations based on the given dimensions,
140
- sets up the initial counting configuration, and can optionally save the state to a pickle file.
141
-
142
- Parameters:
143
- listDimensions: List of integers representing the dimensions of the map to be folded.
144
- writeJob (True): Whether to save the state to disk.
145
- **keywordArguments: Additional keyword arguments to pass to the computation state initialization.
146
-
147
- Returns:
148
- stateUniversal|pathFilenameJob: The computation state for the map folding calculations, or
149
- the path to the saved state file if writeJob is True.
150
- """
151
- mapShape = validateListDimensions(listDimensions)
152
- stateUniversal: ComputationState = outfitCountFolds(mapShape, **keywordArguments)
153
-
154
- moduleSource: ModuleType = getSourceAlgorithmVESTIGIAL()
155
- # TODO `countInitialize` is hardcoded
156
- stateUniversal = moduleSource.countInitialize(stateUniversal)
157
-
158
- if not writeJob:
159
- return stateUniversal
160
-
161
- pathFilenameChopChop = getPathFilenameFoldsTotal(stateUniversal.mapShape, None)
162
- suffix = pathFilenameChopChop.suffix
163
- pathJob = Path(str(pathFilenameChopChop)[0:-len(suffix)])
164
- pathJob.mkdir(parents=True, exist_ok=True)
165
- pathFilenameJob = pathJob / 'stateJob.pkl'
166
-
167
- pathFilenameJob.write_bytes(pickle.dumps(stateUniversal))
168
- return pathFilenameJob
@@ -1,43 +0,0 @@
1
- mapFolding/__init__.py,sha256=XRcskvsF7PTdsNOWWeR1FhQLA3fyBUScCqdfxYhAy8c,1998
2
- mapFolding/basecamp.py,sha256=uPwbb_fi8zqqBbVjb355qanSNUqqJ9aefcf_nrvA7qI,4510
3
- mapFolding/beDRY.py,sha256=UhH52BryHQNRjphf_PirtMkV45rhdemdC9PmnpACq7I,9397
4
- mapFolding/filesystem.py,sha256=-pYpWugd0p3TrAz7xf9YIJW-pn1X-iRCGtJgEAF9Rns,5923
5
- mapFolding/noHomeYet.py,sha256=UKZeWlyn0SKlF9dhYoud7E6gWXpiSEekZOOoJp88WeI,1362
6
- mapFolding/oeis.py,sha256=TbY8KtAGbQlT6eEsa_7HVMF7bMLN-aBFKclyTMHfqHk,12615
7
- mapFolding/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
8
- mapFolding/theDao.py,sha256=Blzm5j24x1BE2nvgXjdzHEeuc2na6kAH9b_eP6PcwlI,9836
9
- mapFolding/theSSOT.py,sha256=DH0so-BVQm8RrBrqhehRC9spmb5Gg1vFegbMSeeoMcM,12236
10
- mapFolding/reference/__init__.py,sha256=UIEU8BJR_YDzjFQcLel3XtHzOCJiOUGlGiWzOzbvhik,2206
11
- mapFolding/reference/flattened.py,sha256=QK1xG9SllqCoi68e86Hyl9d9ATUAAFNpTQI-3zmcp5I,16072
12
- mapFolding/reference/hunterNumba.py,sha256=espFiX92EPZ1Ub1YQVoBnNYvh2kFg1HR6Qa4djx8Ixg,7253
13
- mapFolding/reference/irvineJavaPort.py,sha256=UEfIX4QbPLl5jnyfYIyX5YRR3_rYvPUikK8jLehsFko,4076
14
- mapFolding/reference/jaxCount.py,sha256=TuDNKOnyhQfuixKmIxO9Algv7dvy7KMGhgsV3h96FGE,14853
15
- mapFolding/reference/lunnanNumpy.py,sha256=mMgrgbrBpe4nmo72ThEI-MGH0OwEHmfMPczSXHp2qKo,4357
16
- mapFolding/reference/lunnanWhile.py,sha256=ZL8GAQtPs5nJZSgoDl5USrLSS_zs03y98y1Z9E4jOmQ,3799
17
- mapFolding/reference/rotatedEntryPoint.py,sha256=5ughpKUT2JQhoAKgoDUdYNjgWQYPGV8v-7dWEAdDmfE,10274
18
- mapFolding/reference/total_countPlus1vsPlusN.py,sha256=yJZAVLVdoXqHag2_N6_6CT-Q6HXBgRro-eny93-Rlpw,9307
19
- mapFolding/reference/jobsCompleted/__init__.py,sha256=TU93ZGUW1xEkT6d9mQFn_rp5DvRy0ZslEB2Q6MF5ZDc,2596
20
- mapFolding/reference/jobsCompleted/[2x19]/p2x19.py,sha256=_tvYtfzMWVo2VtUbIAieoscb4N8FFflgTdW4-ljBUuA,19626
21
- mapFolding/reference/jobsCompleted/p2x19/p2x19.py,sha256=eZEw4Me4ocTt6VXoK2-Sbd5SowZtxRIbN9dZmc7OCVg,6395
22
- mapFolding/someAssemblyRequired/__init__.py,sha256=xA5a-nZjXIwcqEOig5PEZSxde4_m3JJ5Pb0CN4aiRjw,2488
23
- mapFolding/someAssemblyRequired/getLLVMforNoReason.py,sha256=CDbesDJSQE-P8uznXIAttRw9f413UpUt-RowK38hqbY,2735
24
- mapFolding/someAssemblyRequired/ingredientsNumba.py,sha256=g6Z7t35NpoDskzm0OLwTQhHw5CYiYktVYxI2NhCQHww,8435
25
- mapFolding/someAssemblyRequired/synthesizeNumbaFlow.py,sha256=scEozf2dy4pvrRMtQSycZf3RkNJtYLGKQVDhN6_H8_4,11812
26
- mapFolding/someAssemblyRequired/synthesizeNumbaJobVESTIGIAL.py,sha256=RBSrtr7US2P7mkY-EA-b2WIOxjs2b0WJaCln1ERxOcI,22314
27
- mapFolding/someAssemblyRequired/transformDataStructures.py,sha256=ssgnGpO4Z8tfdep-lzeodua8ZxA0qvjIwDGv6wbP_ew,8605
28
- mapFolding/someAssemblyRequired/transformationTools.py,sha256=_m4i6TylMBCkh-f-LPAtUIzHhZm4nhyuQOceK3yNwIo,41067
29
- mapFolding/syntheticModules/__init__.py,sha256=evVFqhCGa-WZKDiLcnQWjs-Bj34eRnfSLqz_d7dFYZY,83
30
- mapFolding/syntheticModules/numbaCount_doTheNeedful.py,sha256=52RuwJVH2fROvWU2dT8wYcQvLgRuvkNZPq01kujCC_U,15725
31
- mapfolding-0.8.3.dist-info/licenses/LICENSE,sha256=NxH5Y8BdC-gNU-WSMwim3uMbID2iNDXJz7fHtuTdXhk,19346
32
- tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
33
- tests/conftest.py,sha256=hTpUTW7MtYGP5aeQnnoZMKgTEGCKdLJ8Fnmnv9d4NJw,11115
34
- tests/test_computations.py,sha256=RHHByyuC8w-qbaag4Iqo_QNYm_7A-9BslbstMOdbZbU,3329
35
- tests/test_filesystem.py,sha256=Kou0gj5T72oISao6umYfU6L_W5Hi7QS9_IxTv2hU0Pw,3147
36
- tests/test_oeis.py,sha256=uxvwmgbnylSDdsVJfuAT0LuYLbIVFwSgdLxHm-xUGBM,5043
37
- tests/test_other.py,sha256=AzsCXiX8x5WJ7i0SocWQY6lT30IJg1lKoybx03X2eqU,4281
38
- tests/test_tasks.py,sha256=hkZygihT8bCEO2zc-2VcxReQrZJBwgLNbYx0YP4lTDg,2853
39
- mapfolding-0.8.3.dist-info/METADATA,sha256=LPHFjXnPJA3N4TmHLA7EAEL20sRrjMatn-kxmL1_118,9286
40
- mapfolding-0.8.3.dist-info/WHEEL,sha256=1tXe9gY0PYatrMPMDd6jXqjfpz_B-Wqm32CPfRC58XU,91
41
- mapfolding-0.8.3.dist-info/entry_points.txt,sha256=F3OUeZR1XDTpoH7k3wXuRb3KF_kXTTeYhu5AGK1SiOQ,146
42
- mapfolding-0.8.3.dist-info/top_level.txt,sha256=1gP2vFaqPwHujGwb3UjtMlLEGN-943VSYFR7V4gDqW8,17
43
- mapfolding-0.8.3.dist-info/RECORD,,