mapFolding 0.3.11__tar.gz → 0.3.12__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {mapfolding-0.3.11 → mapfolding-0.3.12}/PKG-INFO +1 -1
- {mapfolding-0.3.11 → mapfolding-0.3.12}/mapFolding/__init__.py +10 -0
- {mapfolding-0.3.11 → mapfolding-0.3.12}/mapFolding/someAssemblyRequired/synthesizeNumba.py +156 -324
- mapfolding-0.3.12/mapFolding/someAssemblyRequired/synthesizeNumbaHardcoding.py +188 -0
- {mapfolding-0.3.11 → mapfolding-0.3.12}/mapFolding/syntheticModules/numba_countInitialize.py +1 -1
- {mapfolding-0.3.11 → mapfolding-0.3.12}/mapFolding/syntheticModules/numba_countParallel.py +2 -2
- {mapfolding-0.3.11 → mapfolding-0.3.12}/mapFolding/syntheticModules/numba_countSequential.py +27 -27
- {mapfolding-0.3.11 → mapfolding-0.3.12}/mapFolding/theDao.py +26 -27
- {mapfolding-0.3.11 → mapfolding-0.3.12}/mapFolding/theSSOT.py +24 -0
- {mapfolding-0.3.11 → mapfolding-0.3.12}/mapFolding/theSSOTnumba.py +1 -1
- {mapfolding-0.3.11 → mapfolding-0.3.12}/mapFolding.egg-info/PKG-INFO +1 -1
- {mapfolding-0.3.11 → mapfolding-0.3.12}/mapFolding.egg-info/SOURCES.txt +1 -0
- {mapfolding-0.3.11 → mapfolding-0.3.12}/pyproject.toml +1 -1
- {mapfolding-0.3.11 → mapfolding-0.3.12}/LICENSE +0 -0
- {mapfolding-0.3.11 → mapfolding-0.3.12}/README.md +0 -0
- {mapfolding-0.3.11 → mapfolding-0.3.12}/mapFolding/basecamp.py +0 -0
- {mapfolding-0.3.11 → mapfolding-0.3.12}/mapFolding/beDRY.py +0 -0
- {mapfolding-0.3.11 → mapfolding-0.3.12}/mapFolding/oeis.py +0 -0
- {mapfolding-0.3.11 → mapfolding-0.3.12}/mapFolding/reference/flattened.py +0 -0
- {mapfolding-0.3.11 → mapfolding-0.3.12}/mapFolding/reference/hunterNumba.py +0 -0
- {mapfolding-0.3.11 → mapfolding-0.3.12}/mapFolding/reference/irvineJavaPort.py +0 -0
- {mapfolding-0.3.11 → mapfolding-0.3.12}/mapFolding/reference/jax.py +0 -0
- {mapfolding-0.3.11 → mapfolding-0.3.12}/mapFolding/reference/lunnan.py +0 -0
- {mapfolding-0.3.11 → mapfolding-0.3.12}/mapFolding/reference/lunnanNumpy.py +0 -0
- {mapfolding-0.3.11 → mapfolding-0.3.12}/mapFolding/reference/lunnanWhile.py +0 -0
- {mapfolding-0.3.11 → mapfolding-0.3.12}/mapFolding/reference/rotatedEntryPoint.py +0 -0
- {mapfolding-0.3.11 → mapfolding-0.3.12}/mapFolding/reference/total_countPlus1vsPlusN.py +0 -0
- {mapfolding-0.3.11 → mapfolding-0.3.12}/mapFolding/someAssemblyRequired/__init__.py +0 -0
- {mapfolding-0.3.11 → mapfolding-0.3.12}/mapFolding/someAssemblyRequired/getLLVMforNoReason.py +0 -0
- {mapfolding-0.3.11 → mapfolding-0.3.12}/mapFolding/someAssemblyRequired/makeJob.py +0 -0
- {mapfolding-0.3.11 → mapfolding-0.3.12}/mapFolding/someAssemblyRequired/synthesizeModuleJAX.py +0 -0
- {mapfolding-0.3.11 → mapfolding-0.3.12}/mapFolding/syntheticModules/numba_doTheNeedful.py +0 -0
- {mapfolding-0.3.11 → mapfolding-0.3.12}/mapFolding.egg-info/dependency_links.txt +0 -0
- {mapfolding-0.3.11 → mapfolding-0.3.12}/mapFolding.egg-info/entry_points.txt +0 -0
- {mapfolding-0.3.11 → mapfolding-0.3.12}/mapFolding.egg-info/requires.txt +0 -0
- {mapfolding-0.3.11 → mapfolding-0.3.12}/mapFolding.egg-info/top_level.txt +0 -0
- {mapfolding-0.3.11 → mapfolding-0.3.12}/setup.cfg +0 -0
- {mapfolding-0.3.11 → mapfolding-0.3.12}/tests/__init__.py +0 -0
- {mapfolding-0.3.11 → mapfolding-0.3.12}/tests/conftest.py +0 -0
- {mapfolding-0.3.11 → mapfolding-0.3.12}/tests/conftest_tmpRegistry.py +0 -0
- {mapfolding-0.3.11 → mapfolding-0.3.12}/tests/conftest_uniformTests.py +0 -0
- {mapfolding-0.3.11 → mapfolding-0.3.12}/tests/test_oeis.py +0 -0
- {mapfolding-0.3.11 → mapfolding-0.3.12}/tests/test_other.py +0 -0
- {mapfolding-0.3.11 → mapfolding-0.3.12}/tests/test_tasks.py +0 -0
- {mapfolding-0.3.11 → mapfolding-0.3.12}/tests/test_types.py +0 -0
|
@@ -26,6 +26,11 @@ from mapFolding.theSSOT import (
|
|
|
26
26
|
getPathJobRootDEFAULT,
|
|
27
27
|
getPathSyntheticModules,
|
|
28
28
|
moduleOfSyntheticModules,
|
|
29
|
+
Z0Z_getDatatypeModuleScalar,
|
|
30
|
+
Z0Z_getDecoratorCallable,
|
|
31
|
+
Z0Z_setDatatypeModuleScalar,
|
|
32
|
+
Z0Z_setDecoratorCallable,
|
|
33
|
+
Z0Z_identifierCountFolds,
|
|
29
34
|
)
|
|
30
35
|
|
|
31
36
|
# Parameters for the prima donna
|
|
@@ -38,6 +43,11 @@ from mapFolding.theSSOT import (
|
|
|
38
43
|
parametersNumbaSuperJitParallel,
|
|
39
44
|
)
|
|
40
45
|
|
|
46
|
+
# Coping
|
|
47
|
+
from mapFolding.theSSOT import (
|
|
48
|
+
FREAKOUT,
|
|
49
|
+
)
|
|
50
|
+
|
|
41
51
|
from mapFolding.beDRY import (
|
|
42
52
|
getFilenameFoldsTotal,
|
|
43
53
|
getPathFilenameFoldsTotal,
|
|
@@ -1,6 +1,9 @@
|
|
|
1
|
+
"""I think this module is free of hardcoded values.
|
|
2
|
+
TODO: consolidate the logic in this module."""
|
|
1
3
|
from mapFolding import (
|
|
2
4
|
computationState,
|
|
3
5
|
EnumIndices,
|
|
6
|
+
FREAKOUT,
|
|
4
7
|
getAlgorithmSource,
|
|
5
8
|
getFilenameFoldsTotal,
|
|
6
9
|
getPathFilenameFoldsTotal,
|
|
@@ -20,15 +23,19 @@ from mapFolding import (
|
|
|
20
23
|
setDatatypeFoldsTotal,
|
|
21
24
|
setDatatypeLeavesTotal,
|
|
22
25
|
setDatatypeModule,
|
|
26
|
+
Z0Z_getDatatypeModuleScalar,
|
|
27
|
+
Z0Z_getDecoratorCallable,
|
|
28
|
+
Z0Z_identifierCountFolds,
|
|
29
|
+
Z0Z_setDatatypeModuleScalar,
|
|
30
|
+
Z0Z_setDecoratorCallable,
|
|
23
31
|
)
|
|
24
|
-
from collections import namedtuple
|
|
25
32
|
from mapFolding.someAssemblyRequired import makeStateJob
|
|
26
33
|
from types import ModuleType
|
|
27
|
-
from typing import Any, Callable, Dict, List, Optional, Sequence, Set, Tuple, Type, Union
|
|
34
|
+
from typing import Any, Callable, cast, Dict, List, Optional, Sequence, Set, Tuple, Type, Union
|
|
28
35
|
import ast
|
|
36
|
+
import autoflake
|
|
29
37
|
import collections
|
|
30
38
|
import inspect
|
|
31
|
-
import autoflake
|
|
32
39
|
import more_itertools
|
|
33
40
|
import numba
|
|
34
41
|
import numpy
|
|
@@ -36,7 +43,7 @@ import os
|
|
|
36
43
|
import pathlib
|
|
37
44
|
import python_minifier
|
|
38
45
|
|
|
39
|
-
youOughtaKnow = namedtuple('youOughtaKnow', ['callableSynthesized', 'pathFilenameForMe', 'astForCompetentProgrammers'])
|
|
46
|
+
youOughtaKnow = collections.namedtuple('youOughtaKnow', ['callableSynthesized', 'pathFilenameForMe', 'astForCompetentProgrammers'])
|
|
40
47
|
|
|
41
48
|
class UniversalImportTracker:
|
|
42
49
|
def __init__(self):
|
|
@@ -60,9 +67,10 @@ class UniversalImportTracker:
|
|
|
60
67
|
listAstImportFrom = [ast.ImportFrom(module=module, names=[ast.alias(name=name, asname=None)], level=0) for module, names in self.dictionaryImportFrom.items() for name in names]
|
|
61
68
|
listAstImport = [ast.Import(names=[ast.alias(name=name, asname=None)]) for name in self.setImport]
|
|
62
69
|
return listAstImportFrom + listAstImport
|
|
70
|
+
|
|
63
71
|
class NodeReplacer(ast.NodeTransformer):
|
|
64
|
-
"""
|
|
65
|
-
def __init__(self, findMe, nodeReplacementBuilder):
|
|
72
|
+
"""Generic node replacement using configurable predicate and builder."""
|
|
73
|
+
def __init__(self, findMe: Callable[[ast.AST], bool], nodeReplacementBuilder: Callable[[ast.AST], ast.AST]):
|
|
66
74
|
self.findMe = findMe
|
|
67
75
|
self.nodeReplacementBuilder = nodeReplacementBuilder
|
|
68
76
|
|
|
@@ -71,26 +79,6 @@ class NodeReplacer(ast.NodeTransformer):
|
|
|
71
79
|
return self.nodeReplacementBuilder(node)
|
|
72
80
|
return super().visit(node)
|
|
73
81
|
|
|
74
|
-
class ArgumentProcessor:
|
|
75
|
-
"""Unified argument processing using transformation rules"""
|
|
76
|
-
def __init__(self, rules: List[Tuple[Callable[[ast.arg], bool], Callable]]):
|
|
77
|
-
self.rules = rules # (predicate, transformation)
|
|
78
|
-
|
|
79
|
-
def process(self, FunctionDef: ast.FunctionDef) -> ast.FunctionDef:
|
|
80
|
-
for arg in FunctionDef.args.args.copy():
|
|
81
|
-
for predicate, transform in self.rules:
|
|
82
|
-
if predicate(arg):
|
|
83
|
-
FunctionDef = transform(FunctionDef, arg)
|
|
84
|
-
return FunctionDef
|
|
85
|
-
|
|
86
|
-
def Z0Z_UnhandledDecorators(astCallable: ast.FunctionDef) -> ast.FunctionDef:
|
|
87
|
-
# TODO: more explicit handling of decorators. I'm able to ignore this because I know `algorithmSource` doesn't have any decorators.
|
|
88
|
-
for decoratorItem in astCallable.decorator_list.copy():
|
|
89
|
-
import warnings
|
|
90
|
-
astCallable.decorator_list.remove(decoratorItem)
|
|
91
|
-
warnings.warn(f"Removed decorator {ast.unparse(decoratorItem)} from {astCallable.name}")
|
|
92
|
-
return astCallable
|
|
93
|
-
|
|
94
82
|
class RecursiveInliner(ast.NodeTransformer):
|
|
95
83
|
"""
|
|
96
84
|
Class RecursiveInliner:
|
|
@@ -152,7 +140,7 @@ class RecursiveInliner(ast.NodeTransformer):
|
|
|
152
140
|
return [self.visit(stmt) for stmt in inlineDefinition.body]
|
|
153
141
|
return self.generic_visit(node)
|
|
154
142
|
|
|
155
|
-
class
|
|
143
|
+
class UnpackArrays(ast.NodeTransformer):
|
|
156
144
|
"""
|
|
157
145
|
A class that transforms array accesses using enum indices into local variables.
|
|
158
146
|
|
|
@@ -259,8 +247,42 @@ class UnpackArrayAccesses(ast.NodeTransformer):
|
|
|
259
247
|
node.body = initializations + node.body
|
|
260
248
|
return node
|
|
261
249
|
|
|
262
|
-
def
|
|
263
|
-
|
|
250
|
+
def Z0Z_UnhandledDecorators(astCallable: ast.FunctionDef) -> ast.FunctionDef:
|
|
251
|
+
# TODO: more explicit handling of decorators. I'm able to ignore this because I know `algorithmSource` doesn't have any decorators.
|
|
252
|
+
for decoratorItem in astCallable.decorator_list.copy():
|
|
253
|
+
import warnings
|
|
254
|
+
astCallable.decorator_list.remove(decoratorItem)
|
|
255
|
+
warnings.warn(f"Removed decorator {ast.unparse(decoratorItem)} from {astCallable.name}")
|
|
256
|
+
return astCallable
|
|
257
|
+
def isThisNodeNumbaJitCall(node: ast.AST) -> bool:
|
|
258
|
+
return (isinstance(node, ast.Call) and isinstance(node.func, ast.Attribute) and node.func.attr == Z0Z_getDecoratorCallable())
|
|
259
|
+
def isThisNodeJitCall(node: ast.AST) -> bool:
|
|
260
|
+
return (isinstance(node, ast.Call) and isinstance(node.func, ast.Name) and node.func.id == Z0Z_getDecoratorCallable())
|
|
261
|
+
def isThisNodeNumbaJitDecorator(node: ast.AST) -> bool:
|
|
262
|
+
return isThisNodeNumbaJitCall(node) or isThisNodeJitCall(node)
|
|
263
|
+
def Z0Z_generalizeThis(FunctionDefTarget: ast.FunctionDef, parametersNumba: Optional[ParametersNumba]=None) -> Tuple[ast.FunctionDef, ParametersNumba | None]:
|
|
264
|
+
def recycleParametersNumba(decorator: ast.Call) -> Dict[str, Any]:
|
|
265
|
+
parametersNumbaExtracted: Dict[str, Any] = {}
|
|
266
|
+
for keywordItem in decorator.keywords:
|
|
267
|
+
if isinstance(keywordItem.value, ast.Constant) and keywordItem.arg is not None:
|
|
268
|
+
parametersNumbaExtracted[keywordItem.arg] = keywordItem.value.value
|
|
269
|
+
return parametersNumbaExtracted
|
|
270
|
+
|
|
271
|
+
for decorator in FunctionDefTarget.decorator_list.copy():
|
|
272
|
+
if isThisNodeNumbaJitDecorator(decorator):
|
|
273
|
+
decorator = cast(ast.Call, decorator)
|
|
274
|
+
if parametersNumba is None:
|
|
275
|
+
parametersNumbaSherpa = recycleParametersNumba(decorator)
|
|
276
|
+
if (HunterIsSureThereAreBetterWaysToDoThis := True):
|
|
277
|
+
if parametersNumbaSherpa:
|
|
278
|
+
parametersNumba = cast(ParametersNumba, parametersNumbaSherpa)
|
|
279
|
+
FunctionDefTarget.decorator_list.remove(decorator)
|
|
280
|
+
|
|
281
|
+
return FunctionDefTarget, parametersNumba
|
|
282
|
+
|
|
283
|
+
def decorateCallableWithNumba(FunctionDefTarget: ast.FunctionDef, allImports: UniversalImportTracker, parametersNumba: Optional[ParametersNumba]=None) -> Tuple[ast.FunctionDef, UniversalImportTracker]:
|
|
284
|
+
datatypeModuleDecorator = Z0Z_getDatatypeModuleScalar()
|
|
285
|
+
def make_arg4parameter(signatureElement: ast.arg):
|
|
264
286
|
if isinstance(signatureElement.annotation, ast.Subscript) and isinstance(signatureElement.annotation.slice, ast.Tuple):
|
|
265
287
|
annotationShape = signatureElement.annotation.slice.elts[0]
|
|
266
288
|
if isinstance(annotationShape, ast.Subscript) and isinstance(annotationShape.slice, ast.Tuple):
|
|
@@ -284,45 +306,55 @@ def decorateCallableWithNumba(FunctionDefTarget: ast.FunctionDef, parametersNumb
|
|
|
284
306
|
|
|
285
307
|
return ast.Subscript(value=datatypeNumba, slice=shapeAST, ctx=ast.Load())
|
|
286
308
|
|
|
287
|
-
|
|
309
|
+
list_argsDecorator: Sequence[ast.expr] = []
|
|
288
310
|
|
|
289
|
-
|
|
311
|
+
list_arg4signature_or_function: Sequence[ast.expr] = []
|
|
290
312
|
for parameter in FunctionDefTarget.args.args:
|
|
291
|
-
signatureElement =
|
|
292
|
-
if
|
|
293
|
-
|
|
294
|
-
|
|
295
|
-
|
|
296
|
-
|
|
297
|
-
|
|
298
|
-
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
if ImaReturn is not None and isinstance(ImaReturn.value, ast.Name):
|
|
305
|
-
my_idIf_I_wereA_astCall_func_astName_idParameter = ImaReturn.value.id
|
|
306
|
-
ast_argsSignature = ast.Call(
|
|
307
|
-
func=ast.Name(id=my_idIf_I_wereA_astCall_func_astName_idParameter, ctx=ast.Load()),
|
|
308
|
-
args=[astTupleSignatureParameters],
|
|
309
|
-
keywords=[]
|
|
310
|
-
)
|
|
311
|
-
else:
|
|
312
|
-
ast_argsSignature = astTupleSignatureParameters
|
|
313
|
-
|
|
313
|
+
signatureElement = make_arg4parameter(parameter)
|
|
314
|
+
if signatureElement:
|
|
315
|
+
list_arg4signature_or_function.append(signatureElement)
|
|
316
|
+
|
|
317
|
+
if FunctionDefTarget.returns and isinstance(FunctionDefTarget.returns, ast.Name):
|
|
318
|
+
theReturn: ast.Name = FunctionDefTarget.returns
|
|
319
|
+
list_argsDecorator = [cast(ast.expr, ast.Call(func=ast.Name(id=theReturn.id, ctx=ast.Load())
|
|
320
|
+
, args=list_arg4signature_or_function if list_arg4signature_or_function else [] , keywords=[] ) )]
|
|
321
|
+
elif list_arg4signature_or_function:
|
|
322
|
+
list_argsDecorator = [cast(ast.expr, ast.Tuple(elts=list_arg4signature_or_function, ctx=ast.Load()))]
|
|
323
|
+
|
|
324
|
+
FunctionDefTarget, parametersNumba = Z0Z_generalizeThis(FunctionDefTarget, parametersNumba)
|
|
325
|
+
FunctionDefTarget = Z0Z_UnhandledDecorators(FunctionDefTarget)
|
|
314
326
|
if parametersNumba is None:
|
|
315
327
|
parametersNumba = parametersNumbaDEFAULT
|
|
328
|
+
listDecoratorKeywords = [ast.keyword(arg=parameterName, value=ast.Constant(value=parameterValue)) for parameterName, parameterValue in parametersNumba.items()]
|
|
316
329
|
|
|
317
|
-
|
|
318
|
-
|
|
319
|
-
|
|
330
|
+
decoratorModule = Z0Z_getDatatypeModuleScalar()
|
|
331
|
+
decoratorCallable = Z0Z_getDecoratorCallable()
|
|
332
|
+
allImports.addImportFromStr(decoratorModule, decoratorCallable)
|
|
333
|
+
astDecorator = ast.Call(
|
|
334
|
+
func=ast.Name(id=decoratorCallable, ctx=ast.Load())
|
|
335
|
+
, args=list_argsDecorator if list_argsDecorator else []
|
|
336
|
+
, keywords=listDecoratorKeywords)
|
|
320
337
|
|
|
321
|
-
FunctionDefTarget.decorator_list = [
|
|
322
|
-
return FunctionDefTarget
|
|
338
|
+
FunctionDefTarget.decorator_list = [astDecorator]
|
|
339
|
+
return FunctionDefTarget, allImports
|
|
340
|
+
|
|
341
|
+
def makeDecoratorJobNumba(FunctionDefTarget: ast.FunctionDef, allImports: UniversalImportTracker, parametersNumba: Optional[ParametersNumba]=None) -> Tuple[ast.FunctionDef, UniversalImportTracker]:
|
|
342
|
+
decoratorCallable = Z0Z_getDecoratorCallable()
|
|
343
|
+
def convertToPlainJit(node: ast.Call) -> ast.Call:
|
|
344
|
+
node.func = ast.Name(id=decoratorCallable, ctx=ast.Load())
|
|
345
|
+
return node
|
|
346
|
+
|
|
347
|
+
FunctionDefTarget, parametersNumba = Z0Z_generalizeThis(FunctionDefTarget, parametersNumba)
|
|
348
|
+
|
|
349
|
+
FunctionDefTarget, allImports = decorateCallableWithNumba(FunctionDefTarget, allImports, parametersNumba)
|
|
350
|
+
if isThisNodeNumbaJitCall(FunctionDefTarget.decorator_list[0]):
|
|
351
|
+
FunctionDefTarget.decorator_list[0] = convertToPlainJit(cast(ast.Call, FunctionDefTarget.decorator_list[0]))
|
|
352
|
+
|
|
353
|
+
return FunctionDefTarget, allImports
|
|
323
354
|
|
|
324
|
-
def inlineOneCallable(pythonSource: str, callableTarget: str) -> str
|
|
355
|
+
def inlineOneCallable(pythonSource: str, callableTarget: str, parametersNumba: Optional[ParametersNumba]=None, unpackArrays: Optional[bool]=False) -> str:
|
|
325
356
|
astModule: ast.Module = ast.parse(pythonSource, type_comments=True)
|
|
357
|
+
allImports = UniversalImportTracker()
|
|
326
358
|
|
|
327
359
|
for statement in astModule.body:
|
|
328
360
|
if isinstance(statement, (ast.Import, ast.ImportFrom)):
|
|
@@ -332,35 +364,27 @@ def inlineOneCallable(pythonSource: str, callableTarget: str) -> str | None:
|
|
|
332
364
|
callableInlinerWorkhorse = RecursiveInliner(dictionaryFunctionDef)
|
|
333
365
|
FunctionDefTarget = callableInlinerWorkhorse.inlineFunctionBody(callableTarget)
|
|
334
366
|
|
|
335
|
-
if FunctionDefTarget:
|
|
336
|
-
|
|
337
|
-
|
|
338
|
-
|
|
339
|
-
|
|
340
|
-
|
|
341
|
-
case 'countSequential':
|
|
342
|
-
parametersNumba = parametersNumbaSuperJit
|
|
343
|
-
case 'countInitialize':
|
|
344
|
-
parametersNumba = parametersNumbaDEFAULT
|
|
345
|
-
|
|
346
|
-
FunctionDefTarget = decorateCallableWithNumba(FunctionDefTarget, parametersNumba)
|
|
347
|
-
|
|
348
|
-
if callableTarget == 'countSequential':
|
|
349
|
-
unpackerMy = UnpackArrayAccesses(indexMy, 'my')
|
|
350
|
-
FunctionDefTarget = cast(ast.FunctionDef, unpackerMy.visit(FunctionDefTarget))
|
|
351
|
-
ast.fix_missing_locations(FunctionDefTarget)
|
|
367
|
+
if not FunctionDefTarget:
|
|
368
|
+
raise FREAKOUT
|
|
369
|
+
|
|
370
|
+
ast.fix_missing_locations(FunctionDefTarget)
|
|
371
|
+
|
|
372
|
+
FunctionDefTarget, allImports = decorateCallableWithNumba(FunctionDefTarget, allImports, parametersNumba)
|
|
352
373
|
|
|
353
|
-
|
|
354
|
-
|
|
374
|
+
if unpackArrays:
|
|
375
|
+
for tupleUnpack in [(indexMy, 'my'), (indexTrack, 'track')]:
|
|
376
|
+
unpacker = UnpackArrays(*tupleUnpack)
|
|
377
|
+
FunctionDefTarget = cast(ast.FunctionDef, unpacker.visit(FunctionDefTarget))
|
|
355
378
|
ast.fix_missing_locations(FunctionDefTarget)
|
|
356
379
|
|
|
357
|
-
|
|
358
|
-
|
|
359
|
-
|
|
360
|
-
|
|
380
|
+
moduleAST = ast.Module(body=cast(List[ast.stmt], allImports.makeListAst() + [FunctionDefTarget]), type_ignores=[])
|
|
381
|
+
ast.fix_missing_locations(moduleAST)
|
|
382
|
+
moduleSource = ast.unparse(moduleAST)
|
|
383
|
+
return moduleSource
|
|
361
384
|
|
|
362
385
|
def makeDispatcherNumba(pythonSource: str, callableTarget: str, listStuffYouOughtaKnow: List[youOughtaKnow]) -> str:
|
|
363
386
|
astSource = ast.parse(pythonSource)
|
|
387
|
+
allImports = UniversalImportTracker()
|
|
364
388
|
|
|
365
389
|
for statement in astSource.body:
|
|
366
390
|
if isinstance(statement, (ast.Import, ast.ImportFrom)):
|
|
@@ -376,98 +400,13 @@ def makeDispatcherNumba(pythonSource: str, callableTarget: str, listStuffYouOugh
|
|
|
376
400
|
if not FunctionDefTarget:
|
|
377
401
|
raise ValueError(f"Could not find function {callableTarget} in source code")
|
|
378
402
|
|
|
379
|
-
|
|
380
|
-
FunctionDefTarget = Z0Z_UnhandledDecorators(FunctionDefTarget)
|
|
381
|
-
|
|
382
|
-
FunctionDefTarget = decorateCallableWithNumba(FunctionDefTarget, parametersNumbaFailEarly)
|
|
403
|
+
FunctionDefTarget, allImports = decorateCallableWithNumba(FunctionDefTarget, allImports, parametersNumbaFailEarly)
|
|
383
404
|
|
|
384
|
-
astModule = ast.Module(
|
|
385
|
-
+ [FunctionDefTarget]), type_ignores=[])
|
|
405
|
+
astModule = ast.Module(body=cast(List[ast.stmt], allImports.makeListAst() + [FunctionDefTarget]), type_ignores=[])
|
|
386
406
|
|
|
387
407
|
ast.fix_missing_locations(astModule)
|
|
388
408
|
return ast.unparse(astModule)
|
|
389
409
|
|
|
390
|
-
def makeNumbaOptimizedFlow(listCallablesInline: List[str], callableDispatcher: Optional[str] = None, algorithmSource: Optional[ModuleType] = None) -> None:
|
|
391
|
-
if not algorithmSource:
|
|
392
|
-
algorithmSource = getAlgorithmSource()
|
|
393
|
-
|
|
394
|
-
formatModuleNameDEFAULT = "numba_{callableTarget}"
|
|
395
|
-
|
|
396
|
-
# When I am a more competent programmer, I will make getPathFilenameWrite dependent on makeAstImport or vice versa,
|
|
397
|
-
# so the name of the physical file doesn't get out of whack with the name of the logical module.
|
|
398
|
-
def getPathFilenameWrite(callableTarget: str
|
|
399
|
-
, pathWrite: Optional[pathlib.Path] = None
|
|
400
|
-
, formatFilenameWrite: Optional[str] = None
|
|
401
|
-
) -> pathlib.Path:
|
|
402
|
-
if not pathWrite:
|
|
403
|
-
pathWrite = getPathSyntheticModules()
|
|
404
|
-
if not formatFilenameWrite:
|
|
405
|
-
formatFilenameWrite = formatModuleNameDEFAULT + '.py'
|
|
406
|
-
|
|
407
|
-
pathFilename = pathWrite / formatFilenameWrite.format(callableTarget=callableTarget)
|
|
408
|
-
return pathFilename
|
|
409
|
-
|
|
410
|
-
def makeAstImport(callableTarget: str
|
|
411
|
-
, packageName: Optional[str] = None
|
|
412
|
-
, subPackageName: Optional[str] = None
|
|
413
|
-
, moduleName: Optional[str] = None
|
|
414
|
-
, astNodeLogicalPathThingy: Optional[ast.AST] = None
|
|
415
|
-
) -> ast.ImportFrom:
|
|
416
|
-
"""Creates import AST node for synthetic modules."""
|
|
417
|
-
if astNodeLogicalPathThingy is None:
|
|
418
|
-
if packageName is None:
|
|
419
|
-
packageName = myPackageNameIs
|
|
420
|
-
if subPackageName is None:
|
|
421
|
-
subPackageName = moduleOfSyntheticModules
|
|
422
|
-
if moduleName is None:
|
|
423
|
-
moduleName = formatModuleNameDEFAULT.format(callableTarget=callableTarget)
|
|
424
|
-
module=f'{packageName}.{subPackageName}.{moduleName}'
|
|
425
|
-
else:
|
|
426
|
-
module = str(astNodeLogicalPathThingy)
|
|
427
|
-
return ast.ImportFrom(
|
|
428
|
-
module=module,
|
|
429
|
-
names=[ast.alias(name=callableTarget, asname=None)],
|
|
430
|
-
level=0
|
|
431
|
-
)
|
|
432
|
-
|
|
433
|
-
listStuffYouOughtaKnow: List[youOughtaKnow] = []
|
|
434
|
-
|
|
435
|
-
global allImports
|
|
436
|
-
for callableTarget in listCallablesInline:
|
|
437
|
-
allImports = UniversalImportTracker()
|
|
438
|
-
pythonSource = inspect.getsource(algorithmSource)
|
|
439
|
-
pythonSource = inlineOneCallable(pythonSource, callableTarget)
|
|
440
|
-
if not pythonSource:
|
|
441
|
-
raise Exception("Pylance, OMG! The sky is falling!")
|
|
442
|
-
|
|
443
|
-
pathFilename = getPathFilenameWrite(callableTarget)
|
|
444
|
-
|
|
445
|
-
listStuffYouOughtaKnow.append(youOughtaKnow(
|
|
446
|
-
callableSynthesized=callableTarget,
|
|
447
|
-
pathFilenameForMe=pathFilename,
|
|
448
|
-
astForCompetentProgrammers=makeAstImport(callableTarget)
|
|
449
|
-
))
|
|
450
|
-
pythonSource = autoflake.fix_code(pythonSource, ['mapFolding', 'numba', 'numpy'])
|
|
451
|
-
pathFilename.write_text(pythonSource)
|
|
452
|
-
|
|
453
|
-
# Generate dispatcher if requested
|
|
454
|
-
if callableDispatcher:
|
|
455
|
-
allImports = UniversalImportTracker()
|
|
456
|
-
pythonSource = inspect.getsource(algorithmSource)
|
|
457
|
-
pythonSource = makeDispatcherNumba(pythonSource, callableDispatcher, listStuffYouOughtaKnow)
|
|
458
|
-
if not pythonSource:
|
|
459
|
-
raise Exception("Pylance, OMG! The sky is falling!")
|
|
460
|
-
|
|
461
|
-
pathFilename = getPathFilenameWrite(callableDispatcher)
|
|
462
|
-
|
|
463
|
-
listStuffYouOughtaKnow.append(youOughtaKnow(
|
|
464
|
-
callableSynthesized=callableDispatcher,
|
|
465
|
-
pathFilenameForMe=pathFilename,
|
|
466
|
-
astForCompetentProgrammers=makeAstImport(callableDispatcher)
|
|
467
|
-
))
|
|
468
|
-
pythonSource = autoflake.fix_code(pythonSource, ['mapFolding', 'numba', 'numpy'])
|
|
469
|
-
pathFilename.write_text(pythonSource)
|
|
470
|
-
|
|
471
410
|
def makeStrRLEcompacted(arrayTarget: numpy.ndarray, identifierName: Optional[str]=None) -> str:
|
|
472
411
|
"""Converts a NumPy array into a compressed string representation using run-length encoding (RLE).
|
|
473
412
|
|
|
@@ -514,7 +453,7 @@ def makeStrRLEcompacted(arrayTarget: numpy.ndarray, identifierName: Optional[str
|
|
|
514
453
|
return f"{identifierName} = array({stringMinimized}, dtype={arrayTarget.dtype})"
|
|
515
454
|
return stringMinimized
|
|
516
455
|
|
|
517
|
-
def moveArrayTo_body(FunctionDefTarget: ast.FunctionDef, astArg: ast.arg, argData: numpy.ndarray) -> ast.FunctionDef:
|
|
456
|
+
def moveArrayTo_body(FunctionDefTarget: ast.FunctionDef, astArg: ast.arg, argData: numpy.ndarray, allImports: UniversalImportTracker) -> Tuple[ast.FunctionDef, UniversalImportTracker]:
|
|
518
457
|
arrayType = type(argData)
|
|
519
458
|
moduleConstructor = arrayType.__module__
|
|
520
459
|
constructorName = arrayType.__name__
|
|
@@ -539,9 +478,9 @@ def moveArrayTo_body(FunctionDefTarget: ast.FunctionDef, astArg: ast.arg, argDat
|
|
|
539
478
|
FunctionDefTarget.body.insert(0, assignment)
|
|
540
479
|
FunctionDefTarget.args.args.remove(astArg)
|
|
541
480
|
|
|
542
|
-
return FunctionDefTarget
|
|
481
|
+
return FunctionDefTarget, allImports
|
|
543
482
|
|
|
544
|
-
def evaluateArrayIn_body(FunctionDefTarget: ast.FunctionDef, astArg: ast.arg, argData: numpy.ndarray) -> ast.FunctionDef:
|
|
483
|
+
def evaluateArrayIn_body(FunctionDefTarget: ast.FunctionDef, astArg: ast.arg, argData: numpy.ndarray, allImports: UniversalImportTracker) -> Tuple[ast.FunctionDef, UniversalImportTracker]:
|
|
545
484
|
arrayType = type(argData)
|
|
546
485
|
moduleConstructor = arrayType.__module__
|
|
547
486
|
constructorName = arrayType.__name__
|
|
@@ -574,10 +513,10 @@ def evaluateArrayIn_body(FunctionDefTarget: ast.FunctionDef, astArg: ast.arg, ar
|
|
|
574
513
|
FunctionDefTarget.body.remove(stmt)
|
|
575
514
|
|
|
576
515
|
FunctionDefTarget.args.args.remove(astArg)
|
|
577
|
-
return FunctionDefTarget
|
|
516
|
+
return FunctionDefTarget, allImports
|
|
578
517
|
|
|
579
|
-
def evaluate_argIn_body(FunctionDefTarget: ast.FunctionDef, astArg: ast.arg, argData: numpy.ndarray, Z0Z_listChaff: List[str]) -> ast.FunctionDef:
|
|
580
|
-
moduleConstructor =
|
|
518
|
+
def evaluate_argIn_body(FunctionDefTarget: ast.FunctionDef, astArg: ast.arg, argData: numpy.ndarray, Z0Z_listChaff: List[str], allImports: UniversalImportTracker) -> Tuple[ast.FunctionDef, UniversalImportTracker]:
|
|
519
|
+
moduleConstructor = Z0Z_getDatatypeModuleScalar()
|
|
581
520
|
for stmt in FunctionDefTarget.body.copy():
|
|
582
521
|
if isinstance(stmt, ast.Assign):
|
|
583
522
|
if isinstance(stmt.targets[0], ast.Name) and isinstance(stmt.value, ast.Subscript):
|
|
@@ -595,10 +534,10 @@ def evaluate_argIn_body(FunctionDefTarget: ast.FunctionDef, astArg: ast.arg, arg
|
|
|
595
534
|
FunctionDefTarget.body.insert(0, assignment)
|
|
596
535
|
FunctionDefTarget.body.remove(stmt)
|
|
597
536
|
FunctionDefTarget.args.args.remove(astArg)
|
|
598
|
-
return FunctionDefTarget
|
|
537
|
+
return FunctionDefTarget, allImports
|
|
599
538
|
|
|
600
|
-
def evaluateAnnAssignIn_body(FunctionDefTarget: ast.FunctionDef) -> ast.FunctionDef:
|
|
601
|
-
moduleConstructor =
|
|
539
|
+
def evaluateAnnAssignIn_body(FunctionDefTarget: ast.FunctionDef, allImports: UniversalImportTracker) -> Tuple[ast.FunctionDef, UniversalImportTracker]:
|
|
540
|
+
moduleConstructor = Z0Z_getDatatypeModuleScalar()
|
|
602
541
|
for stmt in FunctionDefTarget.body.copy():
|
|
603
542
|
if isinstance(stmt, ast.AnnAssign):
|
|
604
543
|
if isinstance(stmt.target, ast.Name) and isinstance(stmt.value, ast.Constant):
|
|
@@ -609,7 +548,7 @@ def evaluateAnnAssignIn_body(FunctionDefTarget: ast.FunctionDef) -> ast.Function
|
|
|
609
548
|
assignment = ast.Assign(targets=[astAssignee], value=astCall)
|
|
610
549
|
FunctionDefTarget.body.insert(0, assignment)
|
|
611
550
|
FunctionDefTarget.body.remove(stmt)
|
|
612
|
-
return FunctionDefTarget
|
|
551
|
+
return FunctionDefTarget, allImports
|
|
613
552
|
|
|
614
553
|
def removeIdentifierFrom_body(FunctionDefTarget: ast.FunctionDef, astArg: ast.arg) -> ast.FunctionDef:
|
|
615
554
|
for stmt in FunctionDefTarget.body.copy():
|
|
@@ -648,158 +587,51 @@ def astNameToAstConstant(FunctionDefTarget: ast.FunctionDef, name: str, value: i
|
|
|
648
587
|
|
|
649
588
|
return cast(ast.FunctionDef, NodeReplacer(findName, replaceWithConstant).visit(FunctionDefTarget))
|
|
650
589
|
|
|
651
|
-
def
|
|
652
|
-
# Use NodeReplacer to handle decorator cleanup
|
|
653
|
-
def isNumbaJitDecorator(node: ast.AST) -> bool:
|
|
654
|
-
return ((isinstance(node, ast.Call) and isinstance(node.func, ast.Attribute)
|
|
655
|
-
and getattr(node.func.value, "id", None) == "numba" and node.func.attr == "jit")
|
|
656
|
-
or
|
|
657
|
-
(isinstance(node, ast.Call) and isinstance(node.func, ast.Name) and node.func.id == "jit"))
|
|
658
|
-
|
|
659
|
-
def extractNumbaParams(node: ast.Call) -> None:
|
|
660
|
-
nonlocal parametersNumba
|
|
661
|
-
if parametersNumba is None:
|
|
662
|
-
parametersNumbaExtracted: Dict[str, Any] = {}
|
|
663
|
-
for keywordItem in node.keywords:
|
|
664
|
-
if isinstance(keywordItem.value, ast.Constant) and keywordItem.arg is not None:
|
|
665
|
-
parametersNumbaExtracted[keywordItem.arg] = keywordItem.value.value
|
|
666
|
-
if parametersNumbaExtracted:
|
|
667
|
-
parametersNumba = ParametersNumba(parametersNumbaExtracted) # type: ignore
|
|
668
|
-
return None
|
|
669
|
-
|
|
670
|
-
# Remove existing numba decorators
|
|
671
|
-
decoratorCleaner = NodeReplacer(isNumbaJitDecorator, extractNumbaParams)
|
|
672
|
-
FunctionDefTarget = cast(ast.FunctionDef, decoratorCleaner.visit(FunctionDefTarget))
|
|
673
|
-
|
|
674
|
-
FunctionDefTarget = Z0Z_UnhandledDecorators(FunctionDefTarget)
|
|
675
|
-
allImports.addImportFromStr('numba', 'jit')
|
|
676
|
-
FunctionDefTarget = decorateCallableWithNumba(FunctionDefTarget, parametersNumba)
|
|
677
|
-
|
|
678
|
-
# Convert @numba.jit to @jit
|
|
679
|
-
def isNumbaJitCall(node: ast.AST) -> bool:
|
|
680
|
-
return (isinstance(node, ast.Call) and isinstance(node.func, ast.Attribute) and node.func.attr == "jit")
|
|
681
|
-
|
|
682
|
-
def convertToPlainJit(node: ast.Call) -> ast.Call:
|
|
683
|
-
node.func = ast.Name(id="jit", ctx=ast.Load())
|
|
684
|
-
return node
|
|
685
|
-
|
|
686
|
-
jitSimplifier = NodeReplacer(isNumbaJitCall, convertToPlainJit)
|
|
687
|
-
return cast(ast.FunctionDef, jitSimplifier.visit(FunctionDefTarget))
|
|
688
|
-
|
|
689
|
-
def makeLauncher(callableTarget: str) -> ast.Module:
|
|
590
|
+
def makeLauncherJobNumba(callableTarget: str, pathFilenameFoldsTotal: pathlib.Path) -> ast.Module:
|
|
690
591
|
linesLaunch = f"""
|
|
691
592
|
if __name__ == '__main__':
|
|
692
593
|
import time
|
|
693
594
|
timeStart = time.perf_counter()
|
|
694
|
-
{callableTarget}()
|
|
695
|
-
print(time.perf_counter() - timeStart)
|
|
595
|
+
foldsTotal = {callableTarget}()
|
|
596
|
+
print(foldsTotal, time.perf_counter() - timeStart)
|
|
597
|
+
writeStream = open('{pathFilenameFoldsTotal.as_posix()}', 'w')
|
|
598
|
+
writeStream.write(str(foldsTotal))
|
|
599
|
+
writeStream.close()
|
|
696
600
|
"""
|
|
697
|
-
|
|
698
|
-
return astLaunch
|
|
699
|
-
|
|
700
|
-
def make_writeFoldsTotal(stateJob: computationState, pathFilenameFoldsTotal: pathlib.Path) -> ast.Module:
|
|
701
|
-
allImports.addImportFromStr('numba', 'objmode')
|
|
702
|
-
linesWriteFoldsTotal = f"""
|
|
703
|
-
groupsOfFolds *= {str(stateJob['foldGroups'][-1])}
|
|
704
|
-
print(groupsOfFolds)
|
|
705
|
-
with objmode():
|
|
706
|
-
open('{pathFilenameFoldsTotal.as_posix()}', 'w').write(str(groupsOfFolds))
|
|
707
|
-
return groupsOfFolds
|
|
708
|
-
"""
|
|
709
|
-
return ast.parse(linesWriteFoldsTotal)
|
|
710
|
-
|
|
711
|
-
def writeJobNumba(listDimensions: Sequence[int], callableTarget: str, algorithmSource: ModuleType, parametersNumba: Optional[ParametersNumba]=None, pathFilenameWriteJob: Optional[Union[str, os.PathLike[str]]] = None, **keywordArguments: Optional[Any]) -> pathlib.Path:
|
|
712
|
-
"""
|
|
713
|
-
Parameters:
|
|
714
|
-
**keywordArguments: most especially for `computationDivisions` if you want to make a parallel job. Also `CPUlimit`.
|
|
715
|
-
"""
|
|
716
|
-
stateJob = makeStateJob(listDimensions, writeJob=False, **keywordArguments)
|
|
717
|
-
pythonSource = inspect.getsource(algorithmSource)
|
|
718
|
-
astModule = ast.parse(pythonSource)
|
|
719
|
-
|
|
720
|
-
for statement in astModule.body:
|
|
721
|
-
if isinstance(statement, (ast.Import, ast.ImportFrom)):
|
|
722
|
-
allImports.addAst(statement)
|
|
723
|
-
|
|
724
|
-
FunctionDefTarget = next((node for node in astModule.body if isinstance(node, ast.FunctionDef) and node.name == callableTarget), None)
|
|
725
|
-
|
|
726
|
-
if not FunctionDefTarget:
|
|
727
|
-
raise ValueError(f"Could not find function {callableTarget} in source code.")
|
|
728
|
-
|
|
729
|
-
# Define argument processing rules
|
|
730
|
-
argumentRules = [
|
|
731
|
-
(lambda arg: arg.arg in ['connectionGraph', 'gapsWhere'],
|
|
732
|
-
lambda node, arg: moveArrayTo_body(node, arg, stateJob[arg.arg])),
|
|
733
|
-
|
|
734
|
-
(lambda arg: arg.arg in ['track'],
|
|
735
|
-
lambda node, arg: evaluateArrayIn_body(node, arg, stateJob[arg.arg])),
|
|
736
|
-
|
|
737
|
-
(lambda arg: arg.arg in ['my'],
|
|
738
|
-
lambda node, arg: evaluate_argIn_body(node, arg, stateJob[arg.arg], ['taskIndex', 'dimensionsTotal'])),
|
|
739
|
-
|
|
740
|
-
(lambda arg: arg.arg in ['foldGroups'],
|
|
741
|
-
lambda node, arg: removeIdentifierFrom_body(node, arg))
|
|
742
|
-
]
|
|
743
|
-
|
|
744
|
-
# Process arguments using ArgumentProcessor
|
|
745
|
-
argumentProcessor = ArgumentProcessor(argumentRules)
|
|
746
|
-
FunctionDefTarget = argumentProcessor.process(FunctionDefTarget)
|
|
601
|
+
return ast.parse(linesLaunch)
|
|
747
602
|
|
|
748
|
-
|
|
749
|
-
|
|
750
|
-
|
|
603
|
+
def addReturnJobNumba(FunctionDefTarget: ast.FunctionDef, stateJob: computationState, allImports: UniversalImportTracker) -> Tuple[ast.FunctionDef, UniversalImportTracker]:
|
|
604
|
+
"""Add multiplication and return statement to function, properly constructing AST nodes."""
|
|
605
|
+
# Create AST for multiplication operation
|
|
606
|
+
multiplicand = Z0Z_identifierCountFolds
|
|
607
|
+
datatype = hackSSOTdatatype(multiplicand)
|
|
608
|
+
multiplyOperation = ast.BinOp(
|
|
609
|
+
left=ast.Name(id=multiplicand, ctx=ast.Load()),
|
|
610
|
+
op=ast.Mult(), right=ast.Constant(value=int(stateJob['foldGroups'][-1])))
|
|
751
611
|
|
|
752
|
-
|
|
612
|
+
returnStatement = ast.Return(value=multiplyOperation)
|
|
753
613
|
|
|
754
|
-
|
|
755
|
-
|
|
756
|
-
FunctionDefTarget.
|
|
614
|
+
datatypeModuleScalar = Z0Z_getDatatypeModuleScalar()
|
|
615
|
+
allImports.addImportFromStr(datatypeModuleScalar, datatype)
|
|
616
|
+
FunctionDefTarget.returns = ast.Name(id=datatype, ctx=ast.Load())
|
|
757
617
|
|
|
758
|
-
|
|
618
|
+
FunctionDefTarget.body.append(returnStatement)
|
|
759
619
|
|
|
760
|
-
|
|
620
|
+
return FunctionDefTarget, allImports
|
|
761
621
|
|
|
762
|
-
|
|
763
|
-
|
|
764
|
-
|
|
765
|
-
|
|
766
|
-
|
|
767
|
-
|
|
768
|
-
|
|
769
|
-
|
|
770
|
-
|
|
771
|
-
|
|
772
|
-
|
|
773
|
-
|
|
774
|
-
|
|
775
|
-
|
|
776
|
-
|
|
777
|
-
|
|
778
|
-
else:
|
|
779
|
-
pathFilenameWriteJob = pathlib.Path(pathFilenameWriteJob)
|
|
780
|
-
pathFilenameWriteJob.parent.mkdir(parents=True, exist_ok=True)
|
|
781
|
-
|
|
782
|
-
pathFilenameWriteJob.write_text(pythonSource)
|
|
783
|
-
return pathFilenameWriteJob
|
|
784
|
-
|
|
785
|
-
if __name__ == '__main__':
|
|
786
|
-
setDatatypeModule('numpy', sourGrapes=True)
|
|
787
|
-
setDatatypeFoldsTotal('int64', sourGrapes=True)
|
|
788
|
-
setDatatypeElephino('uint8', sourGrapes=True)
|
|
789
|
-
setDatatypeLeavesTotal('uint8', sourGrapes=True)
|
|
790
|
-
listCallablesInline: List[str] = ['countInitialize', 'countParallel', 'countSequential']
|
|
791
|
-
datatypeModuleScalar = 'numba'
|
|
792
|
-
datatypeModuleDecorator = 'numba'
|
|
793
|
-
callableDispatcher = 'doTheNeedful'
|
|
794
|
-
makeNumbaOptimizedFlow(listCallablesInline, callableDispatcher)
|
|
795
|
-
|
|
796
|
-
listDimensions = [5,5]
|
|
797
|
-
setDatatypeFoldsTotal('int64', sourGrapes=True)
|
|
798
|
-
setDatatypeElephino('uint8', sourGrapes=True)
|
|
799
|
-
setDatatypeLeavesTotal('uint8', sourGrapes=True)
|
|
800
|
-
from mapFolding.syntheticModules import numba_countSequential
|
|
801
|
-
algorithmSource: ModuleType = numba_countSequential
|
|
802
|
-
datatypeModuleScalar = 'numba'
|
|
803
|
-
datatypeModuleDecorator = 'numba'
|
|
804
|
-
allImports = UniversalImportTracker()
|
|
805
|
-
pathFilenameModule = writeJobNumba(listDimensions, 'countSequential', algorithmSource, parametersNumbaDEFAULT)
|
|
622
|
+
def unrollWhileLoop(FunctionDefTarget: ast.FunctionDef, iteratorName: str, iterationsTotal: int, connectionGraph: numpy.ndarray[Tuple[int, int, int], numpy.dtype[numpy.integer[Any]]]) -> ast.FunctionDef:
|
|
623
|
+
"""
|
|
624
|
+
Unroll the countGaps loop: in theDao, it is a while loop, of course.
|
|
625
|
+
However, it could be written as `for indexDimension in range(dimensionsTotal):`.
|
|
626
|
+
It is useful to note that it could also be written as `for indexDimension in range(connectionGraph.shape[0]):`.
|
|
627
|
+
We will unroll the loop into a series of stateJob['my'][indexMy.dimensionsTotal]-many code blocks that are similar but not identical.
|
|
628
|
+
In each code block, we know the value of the identifier `indexDimension`, so we replace the identifier with its value.
|
|
629
|
+
Furthermore, we will split connectionGraph into arrays along the first axis.
|
|
630
|
+
`connectionGraph[indexDimension, leaf1ndex, leafBelow[leafConnectee]]`
|
|
631
|
+
`connectionGraph0[leaf1ndex, leafBelow[leafConnectee]]`
|
|
632
|
+
`connectionGraph1[leaf1ndex, leafBelow[leafConnectee]]`
|
|
633
|
+
`connectionGraphN[leaf1ndex, leafBelow[leafConnectee]]`
|
|
634
|
+
|
|
635
|
+
After unrolling, we can remove three `indexDimension` statements: 1) the first initialization, which is really a memory allocation, 2) the loop initialization, and 3) the loop increment.
|
|
636
|
+
"""
|
|
637
|
+
return FunctionDefTarget
|
|
@@ -0,0 +1,188 @@
|
|
|
1
|
+
from synthesizeNumba import *
|
|
2
|
+
|
|
3
|
+
def makeNumbaOptimizedFlow(listCallablesInline: List[str], callableDispatcher: Optional[str] = None, algorithmSource: Optional[ModuleType] = None) -> None:
|
|
4
|
+
if not algorithmSource:
|
|
5
|
+
algorithmSource = getAlgorithmSource()
|
|
6
|
+
|
|
7
|
+
formatModuleNameDEFAULT = "numba_{callableTarget}"
|
|
8
|
+
|
|
9
|
+
# When I am a more competent programmer, I will make getPathFilenameWrite dependent on makeAstImport or vice versa,
|
|
10
|
+
# so the name of the physical file doesn't get out of whack with the name of the logical module.
|
|
11
|
+
def getPathFilenameWrite(callableTarget: str
|
|
12
|
+
, pathWrite: Optional[pathlib.Path] = None
|
|
13
|
+
, formatFilenameWrite: Optional[str] = None
|
|
14
|
+
) -> pathlib.Path:
|
|
15
|
+
if not pathWrite:
|
|
16
|
+
pathWrite = getPathSyntheticModules()
|
|
17
|
+
if not formatFilenameWrite:
|
|
18
|
+
formatFilenameWrite = formatModuleNameDEFAULT + '.py'
|
|
19
|
+
|
|
20
|
+
pathFilename = pathWrite / formatFilenameWrite.format(callableTarget=callableTarget)
|
|
21
|
+
return pathFilename
|
|
22
|
+
|
|
23
|
+
def makeAstImport(callableTarget: str
|
|
24
|
+
, packageName: Optional[str] = None
|
|
25
|
+
, subPackageName: Optional[str] = None
|
|
26
|
+
, moduleName: Optional[str] = None
|
|
27
|
+
, astNodeLogicalPathThingy: Optional[ast.AST] = None
|
|
28
|
+
) -> ast.ImportFrom:
|
|
29
|
+
"""Creates import AST node for synthetic modules."""
|
|
30
|
+
if astNodeLogicalPathThingy is None:
|
|
31
|
+
if packageName is None:
|
|
32
|
+
packageName = myPackageNameIs
|
|
33
|
+
if subPackageName is None:
|
|
34
|
+
subPackageName = moduleOfSyntheticModules
|
|
35
|
+
if moduleName is None:
|
|
36
|
+
moduleName = formatModuleNameDEFAULT.format(callableTarget=callableTarget)
|
|
37
|
+
module=f'{packageName}.{subPackageName}.{moduleName}'
|
|
38
|
+
else:
|
|
39
|
+
module = str(astNodeLogicalPathThingy)
|
|
40
|
+
return ast.ImportFrom(
|
|
41
|
+
module=module,
|
|
42
|
+
names=[ast.alias(name=callableTarget, asname=None)],
|
|
43
|
+
level=0
|
|
44
|
+
)
|
|
45
|
+
|
|
46
|
+
listStuffYouOughtaKnow: List[youOughtaKnow] = []
|
|
47
|
+
|
|
48
|
+
for callableTarget in listCallablesInline:
|
|
49
|
+
pythonSource = inspect.getsource(algorithmSource)
|
|
50
|
+
parametersNumba = None
|
|
51
|
+
unpackArrays = False
|
|
52
|
+
match callableTarget:
|
|
53
|
+
case 'countParallel':
|
|
54
|
+
parametersNumba = parametersNumbaSuperJitParallel
|
|
55
|
+
case 'countSequential':
|
|
56
|
+
parametersNumba = parametersNumbaSuperJit
|
|
57
|
+
unpackArrays = True
|
|
58
|
+
case 'countInitialize':
|
|
59
|
+
parametersNumba = parametersNumbaDEFAULT
|
|
60
|
+
pythonSource = inlineOneCallable(pythonSource, callableTarget, parametersNumba, unpackArrays)
|
|
61
|
+
if not pythonSource:
|
|
62
|
+
raise Exception("Pylance, OMG! The sky is falling!")
|
|
63
|
+
|
|
64
|
+
pathFilename = getPathFilenameWrite(callableTarget)
|
|
65
|
+
|
|
66
|
+
listStuffYouOughtaKnow.append(youOughtaKnow(
|
|
67
|
+
callableSynthesized=callableTarget,
|
|
68
|
+
pathFilenameForMe=pathFilename,
|
|
69
|
+
astForCompetentProgrammers=makeAstImport(callableTarget)
|
|
70
|
+
))
|
|
71
|
+
pythonSource = autoflake.fix_code(pythonSource, ['mapFolding', 'numba', 'numpy'])
|
|
72
|
+
pathFilename.write_text(pythonSource)
|
|
73
|
+
|
|
74
|
+
# Generate dispatcher if requested
|
|
75
|
+
if callableDispatcher:
|
|
76
|
+
pythonSource = inspect.getsource(algorithmSource)
|
|
77
|
+
pythonSource = makeDispatcherNumba(pythonSource, callableDispatcher, listStuffYouOughtaKnow)
|
|
78
|
+
if not pythonSource:
|
|
79
|
+
raise FREAKOUT
|
|
80
|
+
|
|
81
|
+
pathFilename = getPathFilenameWrite(callableDispatcher)
|
|
82
|
+
|
|
83
|
+
listStuffYouOughtaKnow.append(youOughtaKnow(
|
|
84
|
+
callableSynthesized=callableDispatcher,
|
|
85
|
+
pathFilenameForMe=pathFilename,
|
|
86
|
+
astForCompetentProgrammers=makeAstImport(callableDispatcher)
|
|
87
|
+
))
|
|
88
|
+
pythonSource = autoflake.fix_code(pythonSource, ['mapFolding', 'numba', 'numpy'])
|
|
89
|
+
pathFilename.write_text(pythonSource)
|
|
90
|
+
|
|
91
|
+
def writeJobNumba(listDimensions: Sequence[int], callableTarget: str, algorithmSource: ModuleType, parametersNumba: Optional[ParametersNumba]=None, pathFilenameWriteJob: Optional[Union[str, os.PathLike[str]]] = None, **keywordArguments: Optional[Any]) -> pathlib.Path:
|
|
92
|
+
""" Parameters: **keywordArguments: most especially for `computationDivisions` if you want to make a parallel job. Also `CPUlimit`. """
|
|
93
|
+
"""Notes about the existing logic:
|
|
94
|
+
- the synthesized module must run well as a standalone interpreted Python script
|
|
95
|
+
- `writeJobNumba` synthesizes a parameter-specific module by starting with code synthesized by `makeNumbaOptimizedFlow`, which improves the optimization
|
|
96
|
+
- similarly, `writeJobNumba` should be a solid foundation for more optimizations, most especially compiling to a standalone executable, but the details of the next optimization step are unknown
|
|
97
|
+
- the minimum runtime (on my computer) to compute a value unknown to mathematicians is 26 hours, therefore, we ant to ensure the value is seen by the user, but we must have ultra-light overhead.
|
|
98
|
+
- perf_counter is for testing. When I run a real job, I delete those lines
|
|
99
|
+
- avoid `with` statement
|
|
100
|
+
"""
|
|
101
|
+
stateJob = makeStateJob(listDimensions, writeJob=False, **keywordArguments)
|
|
102
|
+
pythonSource = inspect.getsource(algorithmSource)
|
|
103
|
+
astModule = ast.parse(pythonSource)
|
|
104
|
+
|
|
105
|
+
allImports = UniversalImportTracker()
|
|
106
|
+
|
|
107
|
+
for statement in astModule.body:
|
|
108
|
+
if isinstance(statement, (ast.Import, ast.ImportFrom)):
|
|
109
|
+
allImports.addAst(statement)
|
|
110
|
+
|
|
111
|
+
FunctionDefTarget = next((node for node in astModule.body if isinstance(node, ast.FunctionDef) and node.name == callableTarget), None)
|
|
112
|
+
if not FunctionDefTarget: raise ValueError(f"I received `{callableTarget=}` and {algorithmSource.__name__=}, but I could not find that function in that source.")
|
|
113
|
+
|
|
114
|
+
for pirateScowl in FunctionDefTarget.args.args.copy():
|
|
115
|
+
match pirateScowl.arg:
|
|
116
|
+
case 'my':
|
|
117
|
+
FunctionDefTarget, allImports = evaluate_argIn_body(FunctionDefTarget, pirateScowl, stateJob[pirateScowl.arg], ['taskIndex', 'dimensionsTotal'], allImports)
|
|
118
|
+
case 'track':
|
|
119
|
+
FunctionDefTarget, allImports = evaluateArrayIn_body(FunctionDefTarget, pirateScowl, stateJob[pirateScowl.arg], allImports)
|
|
120
|
+
# TODO remove this after implementing `unrollWhileLoop`
|
|
121
|
+
case 'connectionGraph':
|
|
122
|
+
FunctionDefTarget, allImports = moveArrayTo_body(FunctionDefTarget, pirateScowl, stateJob[pirateScowl.arg], allImports)
|
|
123
|
+
case 'gapsWhere':
|
|
124
|
+
FunctionDefTarget, allImports = moveArrayTo_body(FunctionDefTarget, pirateScowl, stateJob[pirateScowl.arg], allImports)
|
|
125
|
+
case 'foldGroups':
|
|
126
|
+
FunctionDefTarget = removeIdentifierFrom_body(FunctionDefTarget, pirateScowl)
|
|
127
|
+
|
|
128
|
+
# Move function parameters to the function body,
|
|
129
|
+
# initialize identifiers with their state types and values,
|
|
130
|
+
# and replace static-valued identifiers with their values.
|
|
131
|
+
FunctionDefTarget, allImports = evaluateAnnAssignIn_body(FunctionDefTarget, allImports)
|
|
132
|
+
FunctionDefTarget = astNameToAstConstant(FunctionDefTarget, 'dimensionsTotal', int(stateJob['my'][indexMy.dimensionsTotal]))
|
|
133
|
+
FunctionDefTarget = astObjectToAstConstant(FunctionDefTarget, 'foldGroups[-1]', int(stateJob['foldGroups'][-1]))
|
|
134
|
+
|
|
135
|
+
FunctionDefTarget = unrollWhileLoop(FunctionDefTarget, 'indexDimension', stateJob['my'][indexMy.dimensionsTotal], stateJob['connectionGraph'])
|
|
136
|
+
|
|
137
|
+
FunctionDefTarget, allImports = addReturnJobNumba(FunctionDefTarget, stateJob, allImports)
|
|
138
|
+
FunctionDefTarget, allImports = makeDecoratorJobNumba(FunctionDefTarget, allImports, parametersNumba)
|
|
139
|
+
|
|
140
|
+
pathFilenameFoldsTotal = getPathFilenameFoldsTotal(stateJob['mapShape'])
|
|
141
|
+
# TODO consider: 1) launcher is a function, 2) if __name__ calls the launcher function, and 3) the launcher is "jitted", even just a light jit, then 4) `FunctionDefTarget` could be superJit.
|
|
142
|
+
astLauncher = makeLauncherJobNumba(FunctionDefTarget.name, pathFilenameFoldsTotal)
|
|
143
|
+
|
|
144
|
+
astImports = allImports.makeListAst()
|
|
145
|
+
|
|
146
|
+
astModule = ast.Module(body=cast(List[ast.stmt], astImports + [FunctionDefTarget] + [astLauncher]), type_ignores=[])
|
|
147
|
+
ast.fix_missing_locations(astModule)
|
|
148
|
+
|
|
149
|
+
pythonSource = ast.unparse(astModule)
|
|
150
|
+
pythonSource = autoflake.fix_code(pythonSource, ['mapFolding', 'numba', 'numpy'])
|
|
151
|
+
|
|
152
|
+
if pathFilenameWriteJob is None:
|
|
153
|
+
filename = getFilenameFoldsTotal(stateJob['mapShape'])
|
|
154
|
+
pathRoot = getPathJobRootDEFAULT()
|
|
155
|
+
pathFilenameWriteJob = pathlib.Path(pathRoot, pathlib.Path(filename).stem, pathlib.Path(filename).with_suffix('.py'))
|
|
156
|
+
else:
|
|
157
|
+
pathFilenameWriteJob = pathlib.Path(pathFilenameWriteJob)
|
|
158
|
+
pathFilenameWriteJob.parent.mkdir(parents=True, exist_ok=True)
|
|
159
|
+
|
|
160
|
+
pathFilenameWriteJob.write_text(pythonSource)
|
|
161
|
+
return pathFilenameWriteJob
|
|
162
|
+
|
|
163
|
+
def mainBig():
|
|
164
|
+
setDatatypeModule('numpy', sourGrapes=True)
|
|
165
|
+
setDatatypeFoldsTotal('int64', sourGrapes=True)
|
|
166
|
+
setDatatypeElephino('uint8', sourGrapes=True)
|
|
167
|
+
setDatatypeLeavesTotal('uint8', sourGrapes=True)
|
|
168
|
+
listCallablesInline: List[str] = ['countInitialize', 'countParallel', 'countSequential']
|
|
169
|
+
Z0Z_setDatatypeModuleScalar('numba')
|
|
170
|
+
Z0Z_setDecoratorCallable('jit')
|
|
171
|
+
callableDispatcher = 'doTheNeedful'
|
|
172
|
+
makeNumbaOptimizedFlow(listCallablesInline, callableDispatcher)
|
|
173
|
+
|
|
174
|
+
def mainSmall():
|
|
175
|
+
listDimensions = [6,6]
|
|
176
|
+
setDatatypeFoldsTotal('int64', sourGrapes=True)
|
|
177
|
+
setDatatypeElephino('uint8', sourGrapes=True)
|
|
178
|
+
setDatatypeLeavesTotal('uint8', sourGrapes=True)
|
|
179
|
+
from mapFolding.syntheticModules import numba_countSequential
|
|
180
|
+
algorithmSource: ModuleType = numba_countSequential
|
|
181
|
+
Z0Z_setDatatypeModuleScalar('numba')
|
|
182
|
+
Z0Z_setDecoratorCallable('jit')
|
|
183
|
+
writeJobNumba(listDimensions, 'countSequential', algorithmSource, parametersNumbaDEFAULT)
|
|
184
|
+
|
|
185
|
+
if __name__ == '__main__':
|
|
186
|
+
mainBig()
|
|
187
|
+
|
|
188
|
+
mainSmall()
|
|
@@ -1,9 +1,9 @@
|
|
|
1
|
-
from mapFolding import indexMy
|
|
2
1
|
from mapFolding import indexTrack
|
|
2
|
+
from mapFolding import indexMy
|
|
3
3
|
from numba import uint8
|
|
4
|
-
from numba import prange
|
|
5
4
|
from numba import jit
|
|
6
5
|
from numba import int64
|
|
6
|
+
from numba import prange
|
|
7
7
|
from numpy import ndarray
|
|
8
8
|
from numpy import dtype
|
|
9
9
|
from numpy import integer
|
{mapfolding-0.3.11 → mapfolding-0.3.12}/mapFolding/syntheticModules/numba_countSequential.py
RENAMED
|
@@ -1,5 +1,5 @@
|
|
|
1
|
-
from mapFolding import indexMy
|
|
2
1
|
from mapFolding import indexTrack
|
|
2
|
+
from mapFolding import indexMy
|
|
3
3
|
from numba import uint8
|
|
4
4
|
from numba import jit
|
|
5
5
|
from numba import int64
|
|
@@ -25,33 +25,33 @@ def countSequential(connectionGraph: ndarray[Tuple[int, int, int], dtype[integer
|
|
|
25
25
|
gap1ndex = my[indexMy.gap1ndex.value]
|
|
26
26
|
taskIndex = my[indexMy.taskIndex.value]
|
|
27
27
|
groupsOfFolds: int = 0
|
|
28
|
-
doFindGaps = True
|
|
29
28
|
while leaf1ndex:
|
|
30
|
-
if
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
29
|
+
if leaf1ndex <= 1 or leafBelow[0] == 1:
|
|
30
|
+
if leaf1ndex > foldGroups[-1]:
|
|
31
|
+
groupsOfFolds += 1
|
|
32
|
+
else:
|
|
33
|
+
dimensionsUnconstrained = dimensionsTotal
|
|
34
|
+
gap1ndexCeiling = gapRangeStart[leaf1ndex - 1]
|
|
35
|
+
indexDimension = 0
|
|
36
|
+
while indexDimension < dimensionsTotal:
|
|
37
|
+
if connectionGraph[indexDimension, leaf1ndex, leaf1ndex] == leaf1ndex:
|
|
38
|
+
dimensionsUnconstrained -= 1
|
|
39
|
+
else:
|
|
40
|
+
leafConnectee = connectionGraph[indexDimension, leaf1ndex, leaf1ndex]
|
|
41
|
+
while leafConnectee != leaf1ndex:
|
|
42
|
+
gapsWhere[gap1ndexCeiling] = leafConnectee
|
|
43
|
+
if countDimensionsGapped[leafConnectee] == 0:
|
|
44
|
+
gap1ndexCeiling += 1
|
|
45
|
+
countDimensionsGapped[leafConnectee] += 1
|
|
46
|
+
leafConnectee = connectionGraph[indexDimension, leaf1ndex, leafBelow[leafConnectee]]
|
|
47
|
+
indexDimension += 1
|
|
48
|
+
indexMiniGap = gap1ndex
|
|
49
|
+
while indexMiniGap < gap1ndexCeiling:
|
|
50
|
+
gapsWhere[gap1ndex] = gapsWhere[indexMiniGap]
|
|
51
|
+
if countDimensionsGapped[gapsWhere[indexMiniGap]] == dimensionsUnconstrained:
|
|
52
|
+
gap1ndex += 1
|
|
53
|
+
countDimensionsGapped[gapsWhere[indexMiniGap]] = 0
|
|
54
|
+
indexMiniGap += 1
|
|
55
55
|
while leaf1ndex and gap1ndex == gapRangeStart[leaf1ndex - 1]:
|
|
56
56
|
leaf1ndex -= 1
|
|
57
57
|
leafBelow[leafAbove[leaf1ndex]] = leafBelow[leaf1ndex]
|
|
@@ -36,9 +36,6 @@ def countGaps(gapsWhere: ndarray[Tuple[int], dtype[integer[Any]]], my: ndarray[T
|
|
|
36
36
|
gap1ndexCeilingIncrement(my=my)
|
|
37
37
|
track[indexTrack.countDimensionsGapped.value, my[indexMy.leafConnectee.value]] += 1
|
|
38
38
|
|
|
39
|
-
def dimension1ndexIncrement(my: ndarray[Tuple[int], dtype[integer[Any]]]) -> None:
|
|
40
|
-
my[indexMy.indexDimension.value] += 1
|
|
41
|
-
|
|
42
39
|
def dimensionsUnconstrainedCondition(connectionGraph: ndarray[Tuple[int, int, int], dtype[integer[Any]]], my: ndarray[Tuple[int], dtype[integer[Any]]]) -> Any:
|
|
43
40
|
return connectionGraph[my[indexMy.indexDimension.value], my[indexMy.leaf1ndex.value], my[indexMy.leaf1ndex.value]] == my[indexMy.leaf1ndex.value]
|
|
44
41
|
|
|
@@ -56,6 +53,9 @@ def findGapsInitializeVariables(my: ndarray[Tuple[int], dtype[integer[Any]]], tr
|
|
|
56
53
|
my[indexMy.gap1ndexCeiling.value] = track[indexTrack.gapRangeStart.value, my[indexMy.leaf1ndex.value] - 1]
|
|
57
54
|
my[indexMy.indexDimension.value] = 0
|
|
58
55
|
|
|
56
|
+
def indexDimensionIncrement(my: ndarray[Tuple[int], dtype[integer[Any]]]) -> None:
|
|
57
|
+
my[indexMy.indexDimension.value] += 1
|
|
58
|
+
|
|
59
59
|
def indexMiniGapIncrement(my: ndarray[Tuple[int], dtype[integer[Any]]]) -> None:
|
|
60
60
|
my[indexMy.indexMiniGap.value] += 1
|
|
61
61
|
|
|
@@ -81,7 +81,7 @@ def leafConnecteeUpdate(connectionGraph: ndarray[Tuple[int, int, int], dtype[int
|
|
|
81
81
|
def loopingLeavesConnectedToActiveLeaf(my: ndarray[Tuple[int], dtype[integer[Any]]]) -> Any:
|
|
82
82
|
return my[indexMy.leafConnectee.value] != my[indexMy.leaf1ndex.value]
|
|
83
83
|
|
|
84
|
-
def
|
|
84
|
+
def loopUpToDimensionsTotal(my: ndarray[Tuple[int], dtype[integer[Any]]]) -> Any:
|
|
85
85
|
return my[indexMy.indexDimension.value] < my[indexMy.dimensionsTotal.value]
|
|
86
86
|
|
|
87
87
|
def loopingToActiveGapCeiling(my: ndarray[Tuple[int], dtype[integer[Any]]]) -> Any:
|
|
@@ -111,7 +111,7 @@ def countInitialize(connectionGraph: ndarray[Tuple[int, int, int], dtype[integer
|
|
|
111
111
|
while activeLeafGreaterThan0Condition(my=my):
|
|
112
112
|
if activeLeafIsTheFirstLeafCondition(my=my) or leafBelowSentinelIs1Condition(track=track):
|
|
113
113
|
findGapsInitializeVariables(my=my, track=track)
|
|
114
|
-
while
|
|
114
|
+
while loopUpToDimensionsTotal(my=my):
|
|
115
115
|
if dimensionsUnconstrainedCondition(connectionGraph=connectionGraph, my=my):
|
|
116
116
|
dimensionsUnconstrainedDecrement(my=my)
|
|
117
117
|
else:
|
|
@@ -119,7 +119,7 @@ def countInitialize(connectionGraph: ndarray[Tuple[int, int, int], dtype[integer
|
|
|
119
119
|
while loopingLeavesConnectedToActiveLeaf(my=my):
|
|
120
120
|
countGaps(gapsWhere=gapsWhere, my=my, track=track)
|
|
121
121
|
leafConnecteeUpdate(connectionGraph=connectionGraph, my=my, track=track)
|
|
122
|
-
|
|
122
|
+
indexDimensionIncrement(my=my)
|
|
123
123
|
if allDimensionsAreUnconstrained(my=my):
|
|
124
124
|
insertUnconstrainedLeaf(gapsWhere=gapsWhere, my=my)
|
|
125
125
|
indexMiniGapInitialization(my=my)
|
|
@@ -159,7 +159,7 @@ def countParallel(connectionGraph: ndarray[Tuple[int, int, int], dtype[integer[A
|
|
|
159
159
|
groupsOfFolds += 1
|
|
160
160
|
else:
|
|
161
161
|
findGapsInitializeVariables(my=my, track=track)
|
|
162
|
-
while
|
|
162
|
+
while loopUpToDimensionsTotal(my=my):
|
|
163
163
|
if dimensionsUnconstrainedCondition(connectionGraph=connectionGraph, my=my):
|
|
164
164
|
dimensionsUnconstrainedDecrement(my=my)
|
|
165
165
|
else:
|
|
@@ -168,7 +168,7 @@ def countParallel(connectionGraph: ndarray[Tuple[int, int, int], dtype[integer[A
|
|
|
168
168
|
if thereAreComputationDivisionsYouMightSkip(my=my):
|
|
169
169
|
countGaps(gapsWhere=gapsWhere, my=my, track=track)
|
|
170
170
|
leafConnecteeUpdate(connectionGraph=connectionGraph, my=my, track=track)
|
|
171
|
-
|
|
171
|
+
indexDimensionIncrement(my=my)
|
|
172
172
|
indexMiniGapInitialization(my=my)
|
|
173
173
|
while loopingToActiveGapCeiling(my=my):
|
|
174
174
|
filterCommonGaps(gapsWhere=gapsWhere, my=my, track=track)
|
|
@@ -187,27 +187,26 @@ def countSequential( connectionGraph: ndarray[Tuple[int, int, int], dtype[intege
|
|
|
187
187
|
) -> None:
|
|
188
188
|
|
|
189
189
|
groupsOfFolds: int = 0
|
|
190
|
-
doFindGaps = True # Frankly, I can't figure out if `doFindGaps` is or is not faster. Furthermore, I have a strong feeling there is an even better way.
|
|
191
190
|
|
|
192
191
|
while activeLeafGreaterThan0Condition(my=my):
|
|
193
|
-
if
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
192
|
+
if activeLeafIsTheFirstLeafCondition(my=my) or leafBelowSentinelIs1Condition(track=track):
|
|
193
|
+
if activeLeafGreaterThanLeavesTotalCondition(foldGroups=foldGroups, my=my):
|
|
194
|
+
groupsOfFolds += 1
|
|
195
|
+
else:
|
|
196
|
+
findGapsInitializeVariables(my=my, track=track)
|
|
197
|
+
while loopUpToDimensionsTotal(my=my):
|
|
198
|
+
if dimensionsUnconstrainedCondition(connectionGraph=connectionGraph, my=my):
|
|
199
|
+
dimensionsUnconstrainedDecrement(my=my)
|
|
200
|
+
else:
|
|
201
|
+
leafConnecteeInitialization(connectionGraph=connectionGraph, my=my)
|
|
202
|
+
while loopingLeavesConnectedToActiveLeaf(my=my):
|
|
203
|
+
countGaps(gapsWhere=gapsWhere, my=my, track=track)
|
|
204
|
+
leafConnecteeUpdate(connectionGraph=connectionGraph, my=my, track=track)
|
|
205
|
+
indexDimensionIncrement(my=my)
|
|
206
|
+
indexMiniGapInitialization(my=my)
|
|
207
|
+
while loopingToActiveGapCeiling(my=my):
|
|
208
|
+
filterCommonGaps(gapsWhere=gapsWhere, my=my, track=track)
|
|
209
|
+
indexMiniGapIncrement(my=my)
|
|
211
210
|
while backtrackCondition(my=my, track=track):
|
|
212
211
|
backtrack(my=my, track=track)
|
|
213
212
|
if placeLeafCondition(my=my):
|
|
@@ -249,3 +249,27 @@ def hackSSOTdatatype(identifier: str) -> str:
|
|
|
249
249
|
elif RubeGoldBerg == 'datatypeLeavesTotal':
|
|
250
250
|
return _get_datatype('leavesTotal')
|
|
251
251
|
raise Exception("Dude, you forgot to set a value in `hackSSOTdatatype`.")
|
|
252
|
+
|
|
253
|
+
_datatypeModuleScalar = 'numba'
|
|
254
|
+
_decoratorCallable = 'jit'
|
|
255
|
+
def Z0Z_getDatatypeModuleScalar() -> str:
|
|
256
|
+
return _datatypeModuleScalar
|
|
257
|
+
|
|
258
|
+
def Z0Z_setDatatypeModuleScalar(moduleName: str) -> str:
|
|
259
|
+
global _datatypeModuleScalar
|
|
260
|
+
_datatypeModuleScalar = moduleName
|
|
261
|
+
return _datatypeModuleScalar
|
|
262
|
+
|
|
263
|
+
def Z0Z_getDecoratorCallable() -> str:
|
|
264
|
+
return _decoratorCallable
|
|
265
|
+
|
|
266
|
+
def Z0Z_setDecoratorCallable(decoratorName: str) -> str:
|
|
267
|
+
global _decoratorCallable
|
|
268
|
+
_decoratorCallable = decoratorName
|
|
269
|
+
return _decoratorCallable
|
|
270
|
+
|
|
271
|
+
class FREAKOUT(Exception):
|
|
272
|
+
pass
|
|
273
|
+
|
|
274
|
+
# This identifier is declared in theDao.py. Two sources of truth is better than 900.
|
|
275
|
+
Z0Z_identifierCountFolds = 'groupsOfFolds'
|
|
@@ -34,7 +34,7 @@ Old notes that are not entirely accurate.
|
|
|
34
34
|
| `no_cpython_wrapper` | Disable Python C-API wrapper generation | Size | Smallest | Exclusionary |
|
|
35
35
|
|
|
36
36
|
"""
|
|
37
|
-
|
|
37
|
+
# NOTE Deepseek removed forceinline=True, inline='always'
|
|
38
38
|
# TODO try to implement all possible parameters, but use `NotRequired` for the more esoteric ones
|
|
39
39
|
class ParametersNumba(TypedDict):
|
|
40
40
|
_dbg_extend_lifetimes: NotRequired[bool]
|
|
@@ -28,6 +28,7 @@ mapFolding/someAssemblyRequired/getLLVMforNoReason.py
|
|
|
28
28
|
mapFolding/someAssemblyRequired/makeJob.py
|
|
29
29
|
mapFolding/someAssemblyRequired/synthesizeModuleJAX.py
|
|
30
30
|
mapFolding/someAssemblyRequired/synthesizeNumba.py
|
|
31
|
+
mapFolding/someAssemblyRequired/synthesizeNumbaHardcoding.py
|
|
31
32
|
mapFolding/syntheticModules/numba_countInitialize.py
|
|
32
33
|
mapFolding/syntheticModules/numba_countParallel.py
|
|
33
34
|
mapFolding/syntheticModules/numba_countSequential.py
|
|
@@ -47,7 +47,7 @@ readme = { file = "README.md", content-type = "text/markdown" }
|
|
|
47
47
|
requires-python = ">=3.10"
|
|
48
48
|
scripts = { getOEISids = "mapFolding.oeis:getOEISids", clearOEIScache = "mapFolding.oeis:clearOEIScache", OEIS_for_n = "mapFolding.oeis:OEIS_for_n" }
|
|
49
49
|
urls = { Donate = "https://www.patreon.com/integrated", Homepage = "https://github.com/hunterhogan/mapFolding", Repository = "https://github.com/hunterhogan/mapFolding.git" }
|
|
50
|
-
version = "0.3.
|
|
50
|
+
version = "0.3.12"
|
|
51
51
|
|
|
52
52
|
[tool.coverage]
|
|
53
53
|
report = { exclude_lines = [
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{mapfolding-0.3.11 → mapfolding-0.3.12}/mapFolding/someAssemblyRequired/getLLVMforNoReason.py
RENAMED
|
File without changes
|
|
File without changes
|
{mapfolding-0.3.11 → mapfolding-0.3.12}/mapFolding/someAssemblyRequired/synthesizeModuleJAX.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|