mapFolding 0.3.11__py3-none-any.whl → 0.4.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- mapFolding/__init__.py +44 -32
- mapFolding/basecamp.py +50 -50
- mapFolding/beDRY.py +336 -336
- mapFolding/oeis.py +262 -262
- mapFolding/reference/flattened.py +294 -293
- mapFolding/reference/hunterNumba.py +126 -126
- mapFolding/reference/irvineJavaPort.py +99 -99
- mapFolding/reference/jax.py +153 -153
- mapFolding/reference/lunnan.py +148 -148
- mapFolding/reference/lunnanNumpy.py +115 -115
- mapFolding/reference/lunnanWhile.py +114 -114
- mapFolding/reference/rotatedEntryPoint.py +183 -183
- mapFolding/reference/total_countPlus1vsPlusN.py +203 -203
- mapFolding/someAssemblyRequired/__init__.py +2 -1
- mapFolding/someAssemblyRequired/getLLVMforNoReason.py +12 -12
- mapFolding/someAssemblyRequired/makeJob.py +48 -48
- mapFolding/someAssemblyRequired/synthesizeModuleJAX.py +17 -17
- mapFolding/someAssemblyRequired/synthesizeNumba.py +345 -803
- mapFolding/someAssemblyRequired/synthesizeNumbaGeneralized.py +371 -0
- mapFolding/someAssemblyRequired/synthesizeNumbaJob.py +150 -0
- mapFolding/someAssemblyRequired/synthesizeNumbaModules.py +75 -0
- mapFolding/syntheticModules/__init__.py +0 -0
- mapFolding/syntheticModules/numba_countInitialize.py +2 -2
- mapFolding/syntheticModules/numba_countParallel.py +3 -3
- mapFolding/syntheticModules/numba_countSequential.py +28 -28
- mapFolding/syntheticModules/numba_doTheNeedful.py +6 -6
- mapFolding/theDao.py +168 -169
- mapFolding/theSSOT.py +190 -162
- mapFolding/theSSOTnumba.py +91 -75
- mapFolding-0.4.0.dist-info/METADATA +122 -0
- mapFolding-0.4.0.dist-info/RECORD +41 -0
- tests/conftest.py +238 -128
- tests/test_oeis.py +80 -80
- tests/test_other.py +137 -224
- tests/test_tasks.py +21 -21
- tests/test_types.py +2 -2
- mapFolding-0.3.11.dist-info/METADATA +0 -155
- mapFolding-0.3.11.dist-info/RECORD +0 -39
- tests/conftest_tmpRegistry.py +0 -62
- tests/conftest_uniformTests.py +0 -53
- {mapFolding-0.3.11.dist-info → mapFolding-0.4.0.dist-info}/LICENSE +0 -0
- {mapFolding-0.3.11.dist-info → mapFolding-0.4.0.dist-info}/WHEEL +0 -0
- {mapFolding-0.3.11.dist-info → mapFolding-0.4.0.dist-info}/entry_points.txt +0 -0
- {mapFolding-0.3.11.dist-info → mapFolding-0.4.0.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,371 @@
|
|
|
1
|
+
from mapFolding import (
|
|
2
|
+
computationState,
|
|
3
|
+
EnumIndices,
|
|
4
|
+
formatModuleNameDEFAULT,
|
|
5
|
+
FREAKOUT,
|
|
6
|
+
getAlgorithmSource,
|
|
7
|
+
getFilenameFoldsTotal,
|
|
8
|
+
getPathFilenameFoldsTotal,
|
|
9
|
+
getPathJobRootDEFAULT,
|
|
10
|
+
getPathPackage,
|
|
11
|
+
getPathSyntheticModules,
|
|
12
|
+
hackSSOTdatatype,
|
|
13
|
+
indexMy,
|
|
14
|
+
indexTrack,
|
|
15
|
+
moduleOfSyntheticModules,
|
|
16
|
+
myPackageNameIs,
|
|
17
|
+
ParametersNumba,
|
|
18
|
+
parametersNumbaDEFAULT,
|
|
19
|
+
parametersNumbaFailEarly,
|
|
20
|
+
parametersNumbaMinimum,
|
|
21
|
+
parametersNumbaSuperJit,
|
|
22
|
+
parametersNumbaSuperJitParallel,
|
|
23
|
+
setDatatypeElephino,
|
|
24
|
+
setDatatypeFoldsTotal,
|
|
25
|
+
setDatatypeLeavesTotal,
|
|
26
|
+
setDatatypeModule,
|
|
27
|
+
Z0Z_getDatatypeModuleScalar,
|
|
28
|
+
Z0Z_getDecoratorCallable,
|
|
29
|
+
Z0Z_identifierCountFolds,
|
|
30
|
+
Z0Z_setDatatypeModuleScalar,
|
|
31
|
+
Z0Z_setDecoratorCallable,
|
|
32
|
+
)
|
|
33
|
+
from mapFolding.someAssemblyRequired.makeJob import makeStateJob
|
|
34
|
+
from numpy import integer
|
|
35
|
+
from numpy.typing import NDArray
|
|
36
|
+
from types import ModuleType
|
|
37
|
+
from typing import Any, Callable, cast, Dict, List, Optional, Sequence, Set, Tuple, Type, Union
|
|
38
|
+
import ast
|
|
39
|
+
import autoflake
|
|
40
|
+
import collections
|
|
41
|
+
import copy
|
|
42
|
+
import importlib.util
|
|
43
|
+
import inspect
|
|
44
|
+
import more_itertools
|
|
45
|
+
import numba
|
|
46
|
+
import numpy
|
|
47
|
+
import os
|
|
48
|
+
import pathlib
|
|
49
|
+
import python_minifier
|
|
50
|
+
|
|
51
|
+
youOughtaKnow = collections.namedtuple('youOughtaKnow', ['callableSynthesized', 'pathFilenameForMe', 'astForCompetentProgrammers'])
|
|
52
|
+
|
|
53
|
+
# TODO move to Z0Z_tools
|
|
54
|
+
def makeStrRLEcompacted(arrayTarget: NDArray[integer[Any]]) -> str:
|
|
55
|
+
"""Converts a NumPy array into a compressed string representation using run-length encoding (RLE).
|
|
56
|
+
|
|
57
|
+
This function takes a NumPy array and converts it into an optimized string representation by:
|
|
58
|
+
1. Compressing consecutive sequences of numbers into range objects
|
|
59
|
+
2. Minimizing repeated zeros using array multiplication syntax
|
|
60
|
+
|
|
61
|
+
Parameters:
|
|
62
|
+
arrayTarget (numpy.ndarray): The input NumPy array to be converted
|
|
63
|
+
|
|
64
|
+
Returns:
|
|
65
|
+
str: A string containing Python code that recreates the input array in compressed form.
|
|
66
|
+
"""
|
|
67
|
+
|
|
68
|
+
def compressRangesNDArrayNoFlatten(arraySlice: NDArray[integer[Any]]) -> List[List[Any] | Any | NDArray[integer[Any]]] | List[Any] | Any | NDArray[integer[Any]]:
|
|
69
|
+
if isinstance(arraySlice, numpy.ndarray) and arraySlice.ndim > 1:
|
|
70
|
+
return [compressRangesNDArrayNoFlatten(arraySlice[index]) for index in range(arraySlice.shape[0])]
|
|
71
|
+
elif isinstance(arraySlice, numpy.ndarray) and arraySlice.ndim == 1:
|
|
72
|
+
listWithRanges = []
|
|
73
|
+
for group in more_itertools.consecutive_groups(arraySlice.tolist()):
|
|
74
|
+
ImaSerious = list(group)
|
|
75
|
+
ImaRange = [range(ImaSerious[0], ImaSerious[-1] + 1)]
|
|
76
|
+
spaces = True #NOTE
|
|
77
|
+
lengthAsList = spaces*(len(ImaSerious)-1) + len(python_minifier.minify(str(ImaSerious))) # brackets are proxies for commas
|
|
78
|
+
lengthAsRange = spaces*1 + len(str('*')) + len(python_minifier.minify(str(ImaRange))) # brackets are proxies for commas
|
|
79
|
+
if lengthAsRange < lengthAsList:
|
|
80
|
+
listWithRanges += ImaRange
|
|
81
|
+
else:
|
|
82
|
+
listWithRanges += ImaSerious
|
|
83
|
+
return listWithRanges
|
|
84
|
+
return arraySlice
|
|
85
|
+
|
|
86
|
+
arrayAsNestedLists = compressRangesNDArrayNoFlatten(arrayTarget)
|
|
87
|
+
|
|
88
|
+
arrayAsStr = python_minifier.minify(str(arrayAsNestedLists))
|
|
89
|
+
|
|
90
|
+
commaIntMaximum = arrayTarget.shape[-1] - 1
|
|
91
|
+
|
|
92
|
+
for X in range(1):
|
|
93
|
+
arrayAsStr = arrayAsStr.replace(f'[{X}' + f',{X}'*commaIntMaximum + ']', f'[{X}]*'+str(commaIntMaximum+1))
|
|
94
|
+
for countInt in range(commaIntMaximum, 2, -1):
|
|
95
|
+
arrayAsStr = arrayAsStr.replace(f',{X}'*countInt + ']', f']+[{X}]*'+str(countInt))
|
|
96
|
+
|
|
97
|
+
arrayAsStr = arrayAsStr.replace('range', '*range')
|
|
98
|
+
|
|
99
|
+
return arrayAsStr
|
|
100
|
+
|
|
101
|
+
# Generic
|
|
102
|
+
class ifThis:
|
|
103
|
+
"""Generic AST node predicate builder."""
|
|
104
|
+
@staticmethod
|
|
105
|
+
def nameIs(allegedly: str) -> Callable[[ast.AST], bool]:
|
|
106
|
+
return lambda node: (isinstance(node, ast.Name) and node.id == allegedly)
|
|
107
|
+
|
|
108
|
+
@staticmethod
|
|
109
|
+
def isCallWithAttribute(moduleName: str, callableName: str) -> Callable[[ast.AST], bool]:
|
|
110
|
+
return lambda node: (isinstance(node, ast.Call)
|
|
111
|
+
and isinstance(node.func, ast.Attribute)
|
|
112
|
+
and isinstance(node.func.value, ast.Name)
|
|
113
|
+
and node.func.value.id == moduleName
|
|
114
|
+
and node.func.attr == callableName)
|
|
115
|
+
|
|
116
|
+
@staticmethod
|
|
117
|
+
def isCallWithName(callableName: str) -> Callable[[ast.AST], bool]:
|
|
118
|
+
return lambda node: (isinstance(node, ast.Call) and isinstance(node.func, ast.Name) and node.func.id == callableName)
|
|
119
|
+
|
|
120
|
+
@staticmethod
|
|
121
|
+
def anyOf(*predicates: Callable[[ast.AST], bool]) -> Callable[[ast.AST], bool]:
|
|
122
|
+
return lambda node: any(pred(node) for pred in predicates)
|
|
123
|
+
|
|
124
|
+
class Then:
|
|
125
|
+
"""Generic actions."""
|
|
126
|
+
@staticmethod
|
|
127
|
+
def copy_astCallKeywords(astCall: ast.Call) -> Dict[str, Any]:
|
|
128
|
+
"""Extract keyword parameters from a decorator AST node."""
|
|
129
|
+
dictionaryKeywords: Dict[str, Any] = {}
|
|
130
|
+
for keywordItem in astCall.keywords:
|
|
131
|
+
if isinstance(keywordItem.value, ast.Constant) and keywordItem.arg is not None:
|
|
132
|
+
dictionaryKeywords[keywordItem.arg] = keywordItem.value.value
|
|
133
|
+
return dictionaryKeywords
|
|
134
|
+
|
|
135
|
+
@staticmethod
|
|
136
|
+
def make_astCall(name: str, args: Optional[Sequence[ast.expr]]=None, list_astKeywords: Optional[Sequence[ast.keyword]]=None, dictionaryKeywords: Optional[Dict[str, Any]]=None) -> ast.Call:
|
|
137
|
+
list_dictionaryKeywords = [ast.keyword(arg=keyName, value=ast.Constant(value=keyValue)) for keyName, keyValue in dictionaryKeywords.items()] if dictionaryKeywords else []
|
|
138
|
+
return ast.Call(
|
|
139
|
+
func=ast.Name(id=name, ctx=ast.Load()),
|
|
140
|
+
args=list(args) if args else [],
|
|
141
|
+
keywords=list_dictionaryKeywords + list(list_astKeywords) if list_astKeywords else [],
|
|
142
|
+
)
|
|
143
|
+
|
|
144
|
+
class NodeReplacer(ast.NodeTransformer):
|
|
145
|
+
"""
|
|
146
|
+
A node transformer that replaces or removes AST nodes based on a condition.
|
|
147
|
+
This transformer traverses an AST and for each node checks a predicate. If the predicate
|
|
148
|
+
returns True, the transformer uses the replacement builder to obtain a new node. Returning
|
|
149
|
+
None from the replacement builder indicates that the node should be removed.
|
|
150
|
+
|
|
151
|
+
Attributes:
|
|
152
|
+
findMe (Callable[[ast.AST], bool]): A function that determines whether a node should be replaced.
|
|
153
|
+
nodeReplacementBuilder (Callable[[ast.AST], Optional[ast.AST]]): A function that returns a new node
|
|
154
|
+
or None to remove the node.
|
|
155
|
+
|
|
156
|
+
Methods:
|
|
157
|
+
visit(node: ast.AST) -> Optional[ast.AST]:
|
|
158
|
+
Visits each node in the AST, replacing or removing it based on the predicate.
|
|
159
|
+
"""
|
|
160
|
+
def __init__(self, findMe: Callable[[ast.AST], bool], nodeReplacementBuilder: Callable[[ast.AST], Optional[ast.AST]]) -> None:
|
|
161
|
+
self.findMe = findMe
|
|
162
|
+
self.nodeReplacementBuilder = nodeReplacementBuilder
|
|
163
|
+
|
|
164
|
+
def visit(self, node: ast.AST) -> ast.AST | None | Any:
|
|
165
|
+
if self.findMe(node):
|
|
166
|
+
return self.nodeReplacementBuilder(node)
|
|
167
|
+
return super().visit(node)
|
|
168
|
+
|
|
169
|
+
# Confusing: suspiciously specific but still reusable
|
|
170
|
+
def thisIsNumbaDotJit(Ima: ast.AST) -> bool:
|
|
171
|
+
return ifThis.isCallWithAttribute(Z0Z_getDatatypeModuleScalar(), Z0Z_getDecoratorCallable())(Ima)
|
|
172
|
+
|
|
173
|
+
def thisIsJit(Ima: ast.AST) -> bool:
|
|
174
|
+
return ifThis.isCallWithName(Z0Z_getDecoratorCallable())(Ima)
|
|
175
|
+
|
|
176
|
+
def thisIsAnyNumbaJitDecorator(Ima: ast.AST) -> bool:
|
|
177
|
+
return thisIsNumbaDotJit(Ima) or thisIsJit(Ima)
|
|
178
|
+
|
|
179
|
+
# Domain-based
|
|
180
|
+
class UniversalImportTracker:
|
|
181
|
+
def __init__(self) -> None:
|
|
182
|
+
self.dictionaryImportFrom: Dict[str, Set] = collections.defaultdict(set)
|
|
183
|
+
self.setImport = set()
|
|
184
|
+
|
|
185
|
+
def addAst(self, astImport_: Union[ast.Import, ast.ImportFrom]) -> None:
|
|
186
|
+
if isinstance(astImport_, ast.Import):
|
|
187
|
+
for alias in astImport_.names:
|
|
188
|
+
self.setImport.add(alias.name)
|
|
189
|
+
elif isinstance(astImport_, ast.ImportFrom):
|
|
190
|
+
if astImport_.module is not None:
|
|
191
|
+
self.dictionaryImportFrom[astImport_.module].update(alias.name for alias in astImport_.names)
|
|
192
|
+
|
|
193
|
+
def addImportFromStr(self, module: str, name: str) -> None:
|
|
194
|
+
self.dictionaryImportFrom[module].add(name)
|
|
195
|
+
|
|
196
|
+
def addImportStr(self, name: str) -> None:
|
|
197
|
+
self.setImport.add(name)
|
|
198
|
+
|
|
199
|
+
def makeListAst(self) -> List[Union[ast.ImportFrom, ast.Import]]:
|
|
200
|
+
listAstImportFrom = [ast.ImportFrom(module=module, names=[ast.alias(name=name, asname=None)], level=0) for module, names in self.dictionaryImportFrom.items() for name in names]
|
|
201
|
+
listAstImport = [ast.Import(names=[ast.alias(name=name, asname=None)]) for name in self.setImport]
|
|
202
|
+
return listAstImportFrom + listAstImport
|
|
203
|
+
|
|
204
|
+
# Intricate and specialized
|
|
205
|
+
class RecursiveInliner(ast.NodeTransformer):
|
|
206
|
+
"""
|
|
207
|
+
Class RecursiveInliner:
|
|
208
|
+
A custom AST NodeTransformer designed to recursively inline function calls from a given dictionary
|
|
209
|
+
of function definitions into the AST. Once a particular function has been inlined, it is marked
|
|
210
|
+
as completed to avoid repeated inlining. This transformation modifies the AST in-place by substituting
|
|
211
|
+
eligible function calls with the body of their corresponding function.
|
|
212
|
+
Attributes:
|
|
213
|
+
dictionaryFunctions (Dict[str, ast.FunctionDef]):
|
|
214
|
+
A mapping of function name to its AST definition, used as a source for inlining.
|
|
215
|
+
callablesCompleted (Set[str]):
|
|
216
|
+
A set to track function names that have already been inlined to prevent multiple expansions.
|
|
217
|
+
Methods:
|
|
218
|
+
inlineFunctionBody(callableTargetName: str) -> Optional[ast.FunctionDef]:
|
|
219
|
+
Retrieves the AST definition for a given function name from dictionaryFunctions
|
|
220
|
+
and recursively inlines any function calls within it. Returns the function definition
|
|
221
|
+
that was inlined or None if the function was already processed.
|
|
222
|
+
visit_Call(callNode: ast.Call) -> ast.AST:
|
|
223
|
+
Inspects calls within the AST. If a function call matches one in dictionaryFunctions,
|
|
224
|
+
it is replaced by the inlined body. If the last statement in the inlined body is a return
|
|
225
|
+
or an expression, that value or expression is substituted; otherwise, a constant is returned.
|
|
226
|
+
visit_Expr(node: ast.Expr) -> Union[ast.AST, List[ast.AST]]:
|
|
227
|
+
Handles expression nodes in the AST. If the expression is a function call from
|
|
228
|
+
dictionaryFunctions, its statements are expanded in place, effectively inlining
|
|
229
|
+
the called function's statements into the surrounding context.
|
|
230
|
+
"""
|
|
231
|
+
def __init__(self, dictionaryFunctions: Dict[str, ast.FunctionDef]):
|
|
232
|
+
self.dictionaryFunctions = dictionaryFunctions
|
|
233
|
+
self.callablesCompleted: Set[str] = set()
|
|
234
|
+
|
|
235
|
+
def inlineFunctionBody(self, callableTargetName: str) -> Optional[ast.FunctionDef]:
|
|
236
|
+
if (callableTargetName in self.callablesCompleted):
|
|
237
|
+
return None
|
|
238
|
+
|
|
239
|
+
self.callablesCompleted.add(callableTargetName)
|
|
240
|
+
inlineDefinition = self.dictionaryFunctions[callableTargetName]
|
|
241
|
+
for astNode in ast.walk(inlineDefinition):
|
|
242
|
+
self.visit(astNode)
|
|
243
|
+
return inlineDefinition
|
|
244
|
+
|
|
245
|
+
def visit_Call(self, node: ast.Call) -> Any | ast.Constant | ast.Call | ast.AST:
|
|
246
|
+
callNodeVisited = self.generic_visit(node)
|
|
247
|
+
if (isinstance(callNodeVisited, ast.Call) and isinstance(callNodeVisited.func, ast.Name) and callNodeVisited.func.id in self.dictionaryFunctions):
|
|
248
|
+
inlineDefinition = self.inlineFunctionBody(callNodeVisited.func.id)
|
|
249
|
+
if (inlineDefinition and inlineDefinition.body):
|
|
250
|
+
statementTerminating = inlineDefinition.body[-1]
|
|
251
|
+
if (isinstance(statementTerminating, ast.Return) and statementTerminating.value is not None):
|
|
252
|
+
return self.visit(statementTerminating.value)
|
|
253
|
+
elif (isinstance(statementTerminating, ast.Expr) and statementTerminating.value is not None):
|
|
254
|
+
return self.visit(statementTerminating.value)
|
|
255
|
+
return ast.Constant(value=None)
|
|
256
|
+
return callNodeVisited
|
|
257
|
+
|
|
258
|
+
def visit_Expr(self, node: ast.Expr) -> Union[ast.AST, List[ast.AST]]:
|
|
259
|
+
if (isinstance(node.value, ast.Call)):
|
|
260
|
+
if (isinstance(node.value.func, ast.Name) and node.value.func.id in self.dictionaryFunctions):
|
|
261
|
+
inlineDefinition = self.inlineFunctionBody(node.value.func.id)
|
|
262
|
+
if (inlineDefinition):
|
|
263
|
+
return [self.visit(stmt) for stmt in inlineDefinition.body]
|
|
264
|
+
return self.generic_visit(node)
|
|
265
|
+
|
|
266
|
+
class UnpackArrays(ast.NodeTransformer):
|
|
267
|
+
"""
|
|
268
|
+
A class that transforms array accesses using enum indices into local variables.
|
|
269
|
+
|
|
270
|
+
This AST transformer identifies array accesses using enum indices and replaces them
|
|
271
|
+
with local variables, adding initialization statements at the start of functions.
|
|
272
|
+
|
|
273
|
+
Parameters:
|
|
274
|
+
enumIndexClass (Type[EnumIndices]): The enum class used for array indexing
|
|
275
|
+
arrayName (str): The name of the array being accessed
|
|
276
|
+
|
|
277
|
+
Attributes:
|
|
278
|
+
enumIndexClass (Type[EnumIndices]): Stored enum class for index lookups
|
|
279
|
+
arrayName (str): Name of the array being transformed
|
|
280
|
+
substitutions (dict): Tracks variable substitutions and their original nodes
|
|
281
|
+
|
|
282
|
+
The transformer handles two main cases:
|
|
283
|
+
1. Scalar array access - array[EnumIndices.MEMBER]
|
|
284
|
+
2. Array slice access - array[EnumIndices.MEMBER, other_indices...]
|
|
285
|
+
For each identified access pattern, it:
|
|
286
|
+
1. Creates a local variable named after the enum member
|
|
287
|
+
2. Adds initialization code at function start
|
|
288
|
+
3. Replaces original array access with the local variable
|
|
289
|
+
"""
|
|
290
|
+
|
|
291
|
+
def __init__(self, enumIndexClass: Type[EnumIndices], arrayName: str) -> None:
|
|
292
|
+
self.enumIndexClass = enumIndexClass
|
|
293
|
+
self.arrayName = arrayName
|
|
294
|
+
self.substitutions: Dict[str, Any] = {}
|
|
295
|
+
|
|
296
|
+
def extract_member_name(self, node: ast.AST) -> Optional[str]:
|
|
297
|
+
"""Recursively extract enum member name from any node in the AST."""
|
|
298
|
+
if isinstance(node, ast.Attribute) and node.attr == 'value':
|
|
299
|
+
innerAttribute = node.value
|
|
300
|
+
while isinstance(innerAttribute, ast.Attribute):
|
|
301
|
+
if (isinstance(innerAttribute.value, ast.Name) and innerAttribute.value.id == self.enumIndexClass.__name__):
|
|
302
|
+
return innerAttribute.attr
|
|
303
|
+
innerAttribute = innerAttribute.value
|
|
304
|
+
return None
|
|
305
|
+
|
|
306
|
+
def transform_slice_element(self, node: ast.AST) -> ast.AST:
|
|
307
|
+
"""Transform any enum references within a slice element."""
|
|
308
|
+
if isinstance(node, ast.Subscript):
|
|
309
|
+
if isinstance(node.slice, ast.Attribute):
|
|
310
|
+
member_name = self.extract_member_name(node.slice)
|
|
311
|
+
if member_name:
|
|
312
|
+
return ast.Name(id=member_name, ctx=node.ctx)
|
|
313
|
+
elif isinstance(node, ast.Tuple):
|
|
314
|
+
# Handle tuple slices by transforming each element
|
|
315
|
+
return ast.Tuple(elts=cast(List[ast.expr], [self.transform_slice_element(elt) for elt in node.elts]), ctx=node.ctx)
|
|
316
|
+
elif isinstance(node, ast.Attribute):
|
|
317
|
+
member_name = self.extract_member_name(node)
|
|
318
|
+
if member_name:
|
|
319
|
+
return ast.Name(id=member_name, ctx=ast.Load())
|
|
320
|
+
return node
|
|
321
|
+
|
|
322
|
+
def visit_Subscript(self, node: ast.Subscript) -> ast.AST:
|
|
323
|
+
# Recursively visit any nested subscripts in value or slice
|
|
324
|
+
node.value = self.visit(node.value)
|
|
325
|
+
node.slice = self.visit(node.slice)
|
|
326
|
+
# If node.value is not our arrayName, just return node
|
|
327
|
+
if not (isinstance(node.value, ast.Name) and node.value.id == self.arrayName):
|
|
328
|
+
return node
|
|
329
|
+
|
|
330
|
+
# Handle scalar array access
|
|
331
|
+
if isinstance(node.slice, ast.Attribute):
|
|
332
|
+
memberName = self.extract_member_name(node.slice)
|
|
333
|
+
if memberName:
|
|
334
|
+
self.substitutions[memberName] = ('scalar', node)
|
|
335
|
+
return ast.Name(id=memberName, ctx=ast.Load())
|
|
336
|
+
|
|
337
|
+
# Handle array slice access
|
|
338
|
+
if isinstance(node.slice, ast.Tuple) and node.slice.elts:
|
|
339
|
+
firstElement = node.slice.elts[0]
|
|
340
|
+
memberName = self.extract_member_name(firstElement)
|
|
341
|
+
sliceRemainder = [self.visit(elem) for elem in node.slice.elts[1:]]
|
|
342
|
+
if memberName:
|
|
343
|
+
self.substitutions[memberName] = ('array', node)
|
|
344
|
+
if len(sliceRemainder) == 0:
|
|
345
|
+
return ast.Name(id=memberName, ctx=ast.Load())
|
|
346
|
+
return ast.Subscript(value=ast.Name(id=memberName, ctx=ast.Load()), slice=ast.Tuple(elts=sliceRemainder, ctx=ast.Load()) if len(sliceRemainder) > 1 else sliceRemainder[0], ctx=ast.Load())
|
|
347
|
+
|
|
348
|
+
# If single-element tuple, unwrap
|
|
349
|
+
if isinstance(node.slice, ast.Tuple) and len(node.slice.elts) == 1:
|
|
350
|
+
node.slice = node.slice.elts[0]
|
|
351
|
+
|
|
352
|
+
return node
|
|
353
|
+
|
|
354
|
+
def visit_FunctionDef(self, node: ast.FunctionDef) -> ast.FunctionDef:
|
|
355
|
+
node = cast(ast.FunctionDef, self.generic_visit(node))
|
|
356
|
+
|
|
357
|
+
initializations = []
|
|
358
|
+
for name, (kind, original_node) in self.substitutions.items():
|
|
359
|
+
if kind == 'scalar':
|
|
360
|
+
initializations.append(ast.Assign(targets=[ast.Name(id=name, ctx=ast.Store())], value=original_node))
|
|
361
|
+
else: # array
|
|
362
|
+
initializations.append(
|
|
363
|
+
ast.Assign(
|
|
364
|
+
targets=[ast.Name(id=name, ctx=ast.Store())],
|
|
365
|
+
value=ast.Subscript(value=ast.Name(id=self.arrayName, ctx=ast.Load()),
|
|
366
|
+
slice=ast.Attribute(value=ast.Attribute(
|
|
367
|
+
value=ast.Name(id=self.enumIndexClass.__name__, ctx=ast.Load()),
|
|
368
|
+
attr=name, ctx=ast.Load()), attr='value', ctx=ast.Load()), ctx=ast.Load())))
|
|
369
|
+
|
|
370
|
+
node.body = initializations + node.body
|
|
371
|
+
return node
|
|
@@ -0,0 +1,150 @@
|
|
|
1
|
+
"""Synthesize one file to compute `foldsTotal` of `mapShape`."""
|
|
2
|
+
from mapFolding.someAssemblyRequired.synthesizeNumba import *
|
|
3
|
+
|
|
4
|
+
def doUnrollCountGaps(FunctionDefTarget: ast.FunctionDef, stateJob: computationState, allImports: UniversalImportTracker) -> Tuple[ast.FunctionDef, UniversalImportTracker]:
|
|
5
|
+
"""The initial results were very bad."""
|
|
6
|
+
FunctionDefTarget = findAndReplaceWhileLoopIn_body(FunctionDefTarget, 'indexDimension', stateJob['my'][indexMy.dimensionsTotal])
|
|
7
|
+
FunctionDefTarget = removeAssignTargetFrom_body(FunctionDefTarget, 'indexDimension')
|
|
8
|
+
FunctionDefTarget = removeAssignTargetFrom_body(FunctionDefTarget, 'connectionGraph')
|
|
9
|
+
FunctionDefTarget, allImports = insertArrayIn_body(FunctionDefTarget, 'connectionGraph', stateJob['connectionGraph'], allImports, stateJob['my'][indexMy.dimensionsTotal])
|
|
10
|
+
for index in range(stateJob['my'][indexMy.dimensionsTotal]):
|
|
11
|
+
class ReplaceConnectionGraph(ast.NodeTransformer):
|
|
12
|
+
def visit_Subscript(self, node: ast.Subscript) -> ast.AST:
|
|
13
|
+
node = cast(ast.Subscript, self.generic_visit(node))
|
|
14
|
+
if (isinstance(node.value, ast.Name) and node.value.id == "connectionGraph" and
|
|
15
|
+
isinstance(node.slice, ast.Tuple) and len(node.slice.elts) >= 1):
|
|
16
|
+
firstElement = node.slice.elts[0]
|
|
17
|
+
if isinstance(firstElement, ast.Constant) and firstElement.value == index:
|
|
18
|
+
newName = ast.Name(id=f"connectionGraph_{index}", ctx=ast.Load())
|
|
19
|
+
remainingIndices = node.slice.elts[1:]
|
|
20
|
+
if len(remainingIndices) == 1:
|
|
21
|
+
newSlice = remainingIndices[0]
|
|
22
|
+
else:
|
|
23
|
+
newSlice = ast.Tuple(elts=remainingIndices, ctx=ast.Load())
|
|
24
|
+
return ast.copy_location(ast.Subscript(value=newName, slice=newSlice, ctx=node.ctx), node)
|
|
25
|
+
return node
|
|
26
|
+
transformer = ReplaceConnectionGraph()
|
|
27
|
+
FunctionDefTarget = transformer.visit(FunctionDefTarget)
|
|
28
|
+
return FunctionDefTarget, allImports
|
|
29
|
+
|
|
30
|
+
def writeJobNumba(mapShape: Sequence[int]
|
|
31
|
+
, callableTarget: str
|
|
32
|
+
, algorithmSource: ModuleType
|
|
33
|
+
, parametersNumba: Optional[ParametersNumba]=None
|
|
34
|
+
, pathFilenameWriteJob: Optional[Union[str, os.PathLike[str]]] = None
|
|
35
|
+
, unrollCountGaps: Optional[bool] = False
|
|
36
|
+
, **keywordArguments: Optional[Any]
|
|
37
|
+
) -> pathlib.Path:
|
|
38
|
+
""" Parameters: **keywordArguments: most especially for `computationDivisions` if you want to make a parallel job. Also `CPUlimit`. """
|
|
39
|
+
|
|
40
|
+
""" Notes:
|
|
41
|
+
Hypothetically, everything can now be configured with parameters and functions. And changing how the job is written is relatively easy.
|
|
42
|
+
|
|
43
|
+
Overview
|
|
44
|
+
- the code starts life in theDao.py, which has many optimizations; `makeNumbaOptimizedFlow` increase optimization especially by using numba; `writeJobNumba` increases optimization especially by limiting its capabilities to just one set of parameters
|
|
45
|
+
- the synthesized module must run well as a standalone interpreted-Python script
|
|
46
|
+
- the next major optimization step will (probably) be to use the module synthesized by `writeJobNumba` to compile a standalone executable
|
|
47
|
+
- Nevertheless, at each major optimization step, the code is constantly being improved and optimized, so everything must be well organized and able to handle upstream and downstream changes
|
|
48
|
+
|
|
49
|
+
Minutia
|
|
50
|
+
- perf_counter is for testing. When I run a real job, I delete those lines
|
|
51
|
+
- avoid `with` statement
|
|
52
|
+
|
|
53
|
+
Necessary
|
|
54
|
+
- Move the function's parameters to the function body,
|
|
55
|
+
- initialize identifiers with their state types and values,
|
|
56
|
+
|
|
57
|
+
Optimizations
|
|
58
|
+
- replace static-valued identifiers with their values
|
|
59
|
+
- narrowly focused imports
|
|
60
|
+
"""
|
|
61
|
+
|
|
62
|
+
# NOTE get the raw ingredients: data and the algorithm
|
|
63
|
+
stateJob = makeStateJob(mapShape, writeJob=False, **keywordArguments)
|
|
64
|
+
pythonSource = inspect.getsource(algorithmSource)
|
|
65
|
+
astModule = ast.parse(pythonSource)
|
|
66
|
+
FunctionDefTarget = next((node for node in astModule.body if isinstance(node, ast.FunctionDef) and node.name == callableTarget), None)
|
|
67
|
+
if not FunctionDefTarget: raise ValueError(f"I received `{callableTarget=}` and {algorithmSource.__name__=}, but I could not find that function in that source.")
|
|
68
|
+
|
|
69
|
+
# NOTE `allImports` is a complementary container to `FunctionDefTarget`; the `FunctionDefTarget` cannot track its own imports very well.
|
|
70
|
+
allImports = UniversalImportTracker()
|
|
71
|
+
for statement in astModule.body:
|
|
72
|
+
if isinstance(statement, (ast.Import, ast.ImportFrom)):
|
|
73
|
+
allImports.addAst(statement)
|
|
74
|
+
|
|
75
|
+
# NOTE remove the parameters from the function signature
|
|
76
|
+
for pirateScowl in FunctionDefTarget.args.args.copy():
|
|
77
|
+
match pirateScowl.arg:
|
|
78
|
+
case 'my':
|
|
79
|
+
FunctionDefTarget, allImports = findAndReplaceArraySubscriptIn_body(FunctionDefTarget, pirateScowl.arg, stateJob[pirateScowl.arg], ['taskIndex', 'dimensionsTotal'], allImports)
|
|
80
|
+
case 'track':
|
|
81
|
+
FunctionDefTarget, allImports = findAndReplaceArrayIn_body(FunctionDefTarget, pirateScowl.arg, stateJob[pirateScowl.arg], allImports)
|
|
82
|
+
case 'connectionGraph':
|
|
83
|
+
FunctionDefTarget, allImports = insertArrayIn_body(FunctionDefTarget, pirateScowl.arg, stateJob[pirateScowl.arg], allImports)
|
|
84
|
+
case 'gapsWhere':
|
|
85
|
+
FunctionDefTarget, allImports = insertArrayIn_body(FunctionDefTarget, pirateScowl.arg, stateJob[pirateScowl.arg], allImports)
|
|
86
|
+
case 'foldGroups':
|
|
87
|
+
FunctionDefTarget = removeAssignTargetFrom_body(FunctionDefTarget, pirateScowl.arg)
|
|
88
|
+
FunctionDefTarget.args.args.remove(pirateScowl)
|
|
89
|
+
|
|
90
|
+
# NOTE replace identifiers with static values with their values
|
|
91
|
+
FunctionDefTarget, allImports = findAndReplaceAnnAssignIn_body(FunctionDefTarget, allImports)
|
|
92
|
+
FunctionDefTarget = findAstNameReplaceWithConstantIn_body(FunctionDefTarget, 'dimensionsTotal', int(stateJob['my'][indexMy.dimensionsTotal]))
|
|
93
|
+
FunctionDefTarget = findThingyReplaceWithConstantIn_body(FunctionDefTarget, 'foldGroups[-1]', int(stateJob['foldGroups'][-1]))
|
|
94
|
+
|
|
95
|
+
# NOTE an attempt at optimization
|
|
96
|
+
if unrollCountGaps:
|
|
97
|
+
FunctionDefTarget, allImports = doUnrollCountGaps(FunctionDefTarget, stateJob, allImports)
|
|
98
|
+
|
|
99
|
+
# NOTE starting the count and printing the total
|
|
100
|
+
pathFilenameFoldsTotal = getPathFilenameFoldsTotal(stateJob['mapShape'])
|
|
101
|
+
astLauncher = makeLauncherBasicJobNumba(FunctionDefTarget.name, pathFilenameFoldsTotal)
|
|
102
|
+
FunctionDefTarget, allImports = insertReturnStatementIn_body(FunctionDefTarget, stateJob['foldGroups'], allImports)
|
|
103
|
+
|
|
104
|
+
# NOTE add the perfect decorator
|
|
105
|
+
datatype = hackSSOTdatatype(Z0Z_identifierCountFolds)
|
|
106
|
+
FunctionDefTarget.returns = ast.Name(id=datatype, ctx=ast.Load())
|
|
107
|
+
FunctionDefTarget, allImports = decorateCallableWithNumba(FunctionDefTarget, allImports, parametersNumba)
|
|
108
|
+
if thisIsNumbaDotJit(FunctionDefTarget.decorator_list[0]):
|
|
109
|
+
astCall = cast(ast.Call, FunctionDefTarget.decorator_list[0])
|
|
110
|
+
astCall.func = ast.Name(id=Z0Z_getDecoratorCallable(), ctx=ast.Load())
|
|
111
|
+
FunctionDefTarget.decorator_list[0] = astCall
|
|
112
|
+
|
|
113
|
+
# NOTE add imports, make str, remove unused imports
|
|
114
|
+
astImports = allImports.makeListAst()
|
|
115
|
+
astModule = ast.Module(body=cast(List[ast.stmt], astImports + [FunctionDefTarget] + [astLauncher]), type_ignores=[])
|
|
116
|
+
ast.fix_missing_locations(astModule)
|
|
117
|
+
pythonSource = ast.unparse(astModule)
|
|
118
|
+
pythonSource = autoflake.fix_code(pythonSource, ['mapFolding', 'numba', 'numpy'])
|
|
119
|
+
|
|
120
|
+
# NOTE put on disk
|
|
121
|
+
if pathFilenameWriteJob is None:
|
|
122
|
+
filename = getFilenameFoldsTotal(stateJob['mapShape'])
|
|
123
|
+
pathRoot = getPathJobRootDEFAULT()
|
|
124
|
+
pathFilenameWriteJob = pathlib.Path(pathRoot, pathlib.Path(filename).stem, pathlib.Path(filename).with_suffix('.py'))
|
|
125
|
+
else:
|
|
126
|
+
pathFilenameWriteJob = pathlib.Path(pathFilenameWriteJob)
|
|
127
|
+
pathFilenameWriteJob.parent.mkdir(parents=True, exist_ok=True)
|
|
128
|
+
|
|
129
|
+
pathFilenameWriteJob.write_text(pythonSource)
|
|
130
|
+
|
|
131
|
+
return pathFilenameWriteJob
|
|
132
|
+
|
|
133
|
+
if __name__ == '__main__':
|
|
134
|
+
mapShape = [5,5]
|
|
135
|
+
callableTarget = 'countSequential'
|
|
136
|
+
|
|
137
|
+
from mapFolding.syntheticModules import numba_countSequential
|
|
138
|
+
algorithmSource: ModuleType = numba_countSequential
|
|
139
|
+
|
|
140
|
+
parametersNumba = parametersNumbaDEFAULT
|
|
141
|
+
|
|
142
|
+
pathFilenameWriteJob = None
|
|
143
|
+
|
|
144
|
+
setDatatypeFoldsTotal('int64', sourGrapes=True)
|
|
145
|
+
setDatatypeElephino('uint8', sourGrapes=True)
|
|
146
|
+
setDatatypeLeavesTotal('uint8', sourGrapes=True)
|
|
147
|
+
Z0Z_setDatatypeModuleScalar('numba')
|
|
148
|
+
Z0Z_setDecoratorCallable('jit')
|
|
149
|
+
|
|
150
|
+
writeJobNumba(mapShape, callableTarget, algorithmSource, parametersNumba, pathFilenameWriteJob)
|
|
@@ -0,0 +1,75 @@
|
|
|
1
|
+
"""I suspect this function will be relatively stable for now.
|
|
2
|
+
Managing settings and options, however, ... I've 'invented'
|
|
3
|
+
everything I am doing. I would rather benefit from humanity's
|
|
4
|
+
collective wisdom."""
|
|
5
|
+
from mapFolding.someAssemblyRequired.synthesizeNumba import *
|
|
6
|
+
|
|
7
|
+
def makeFlowNumbaOptimized(listCallablesInline: List[str], callableDispatcher: Optional[str] = None, algorithmSource: Optional[ModuleType] = None, relativePathWrite: Optional[pathlib.Path] = None, formatFilenameWrite: Optional[str] = None) -> None:
|
|
8
|
+
if relativePathWrite and relativePathWrite.is_absolute():
|
|
9
|
+
raise ValueError("The path to write the module must be relative to the root of the package.")
|
|
10
|
+
if not algorithmSource:
|
|
11
|
+
algorithmSource = getAlgorithmSource()
|
|
12
|
+
|
|
13
|
+
listStuffYouOughtaKnow: List[youOughtaKnow] = []
|
|
14
|
+
|
|
15
|
+
def doThisStuff(callableTarget: str, parametersNumba: Optional[ParametersNumba], inlineCallables: bool, unpackArrays: bool, allImports: Optional[UniversalImportTracker], relativePathWrite: Optional[pathlib.Path], formatFilenameWrite: Optional[str]) -> youOughtaKnow:
|
|
16
|
+
pythonSource = inspect.getsource(algorithmSource)
|
|
17
|
+
pythonSource = makeAstModuleForOneCallable(pythonSource, callableTarget, parametersNumba, inlineCallables, unpackArrays, allImports)
|
|
18
|
+
if not pythonSource: raise FREAKOUT
|
|
19
|
+
pythonSource = autoflake.fix_code(pythonSource, ['mapFolding', 'numba', 'numpy'])
|
|
20
|
+
|
|
21
|
+
if not relativePathWrite:
|
|
22
|
+
pathWrite = getPathSyntheticModules()
|
|
23
|
+
else:
|
|
24
|
+
pathWrite = getPathPackage() / relativePathWrite
|
|
25
|
+
if not formatFilenameWrite:
|
|
26
|
+
formatFilenameWrite = formatModuleNameDEFAULT + '.py'
|
|
27
|
+
pathFilename = pathWrite / formatFilenameWrite.format(callableTarget=callableTarget)
|
|
28
|
+
|
|
29
|
+
pathFilename.write_text(pythonSource)
|
|
30
|
+
|
|
31
|
+
howIsThisStillAThing = getPathPackage().parent
|
|
32
|
+
dumbassPythonNamespace = pathFilename.relative_to(howIsThisStillAThing).with_suffix('').parts
|
|
33
|
+
ImaModule = '.'.join(dumbassPythonNamespace)
|
|
34
|
+
astImportFrom = ast.ImportFrom(module=ImaModule, names=[ast.alias(name=callableTarget, asname=None)], level=0)
|
|
35
|
+
|
|
36
|
+
return youOughtaKnow(callableSynthesized=callableTarget, pathFilenameForMe=pathFilename, astForCompetentProgrammers=astImportFrom)
|
|
37
|
+
|
|
38
|
+
for callableTarget in listCallablesInline:
|
|
39
|
+
parametersNumba = None
|
|
40
|
+
inlineCallables = True
|
|
41
|
+
unpackArrays = False
|
|
42
|
+
allImports = None
|
|
43
|
+
match callableTarget:
|
|
44
|
+
case 'countParallel':
|
|
45
|
+
parametersNumba = parametersNumbaSuperJitParallel
|
|
46
|
+
case 'countSequential':
|
|
47
|
+
parametersNumba = parametersNumbaSuperJit
|
|
48
|
+
unpackArrays = True
|
|
49
|
+
case 'countInitialize':
|
|
50
|
+
parametersNumba = parametersNumbaDEFAULT
|
|
51
|
+
listStuffYouOughtaKnow.append(doThisStuff(callableTarget, parametersNumba, inlineCallables, unpackArrays, allImports, relativePathWrite, formatFilenameWrite))
|
|
52
|
+
|
|
53
|
+
if callableDispatcher:
|
|
54
|
+
callableTarget = callableDispatcher
|
|
55
|
+
parametersNumba = None
|
|
56
|
+
inlineCallables = False
|
|
57
|
+
unpackArrays = False
|
|
58
|
+
allImports = UniversalImportTracker()
|
|
59
|
+
for stuff in listStuffYouOughtaKnow:
|
|
60
|
+
statement = stuff.astForCompetentProgrammers
|
|
61
|
+
if isinstance(statement, (ast.Import, ast.ImportFrom)):
|
|
62
|
+
allImports.addAst(statement)
|
|
63
|
+
|
|
64
|
+
listStuffYouOughtaKnow.append(doThisStuff(callableTarget, parametersNumba, inlineCallables, unpackArrays, allImports, relativePathWrite, formatFilenameWrite))
|
|
65
|
+
|
|
66
|
+
if __name__ == '__main__':
|
|
67
|
+
setDatatypeModule('numpy', sourGrapes=True)
|
|
68
|
+
setDatatypeFoldsTotal('int64', sourGrapes=True)
|
|
69
|
+
setDatatypeElephino('uint8', sourGrapes=True)
|
|
70
|
+
setDatatypeLeavesTotal('uint8', sourGrapes=True)
|
|
71
|
+
Z0Z_setDatatypeModuleScalar('numba')
|
|
72
|
+
Z0Z_setDecoratorCallable('jit')
|
|
73
|
+
listCallablesInline: List[str] = ['countInitialize', 'countParallel', 'countSequential']
|
|
74
|
+
callableDispatcher = 'doTheNeedful'
|
|
75
|
+
makeFlowNumbaOptimized(listCallablesInline, callableDispatcher)
|
|
File without changes
|
|
@@ -2,9 +2,9 @@ from mapFolding import indexMy
|
|
|
2
2
|
from mapFolding import indexTrack
|
|
3
3
|
from numba import uint8
|
|
4
4
|
from numba import jit
|
|
5
|
-
from numpy import ndarray
|
|
6
|
-
from numpy import dtype
|
|
7
5
|
from numpy import integer
|
|
6
|
+
from numpy import dtype
|
|
7
|
+
from numpy import ndarray
|
|
8
8
|
from typing import Any
|
|
9
9
|
from typing import Tuple
|
|
10
10
|
|
|
@@ -1,12 +1,12 @@
|
|
|
1
1
|
from mapFolding import indexMy
|
|
2
2
|
from mapFolding import indexTrack
|
|
3
3
|
from numba import uint8
|
|
4
|
-
from numba import prange
|
|
5
4
|
from numba import jit
|
|
6
5
|
from numba import int64
|
|
7
|
-
from
|
|
8
|
-
from numpy import dtype
|
|
6
|
+
from numba import prange
|
|
9
7
|
from numpy import integer
|
|
8
|
+
from numpy import dtype
|
|
9
|
+
from numpy import ndarray
|
|
10
10
|
from typing import Any
|
|
11
11
|
from typing import Tuple
|
|
12
12
|
|