mapFolding 0.7.1__py3-none-any.whl → 0.8.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- mapFolding/beDRY.py +77 -81
- mapFolding/noHomeYet.py +2 -2
- mapFolding/oeis.py +2 -2
- mapFolding/someAssemblyRequired/Z0Z_workbench.py +347 -30
- mapFolding/someAssemblyRequired/__init__.py +4 -3
- mapFolding/someAssemblyRequired/getLLVMforNoReason.py +0 -1
- mapFolding/someAssemblyRequired/ingredientsNumba.py +87 -2
- mapFolding/someAssemblyRequired/synthesizeDataConverters.py +34 -52
- mapFolding/someAssemblyRequired/{synthesizeNumbaJob.py → synthesizeNumbaJobVESTIGIAL.py} +18 -21
- mapFolding/someAssemblyRequired/transformationTools.py +546 -208
- mapFolding/syntheticModules/numbaCount_doTheNeedful.py +197 -12
- mapFolding/theDao.py +23 -16
- mapFolding/theSSOT.py +28 -43
- {mapfolding-0.7.1.dist-info → mapfolding-0.8.0.dist-info}/METADATA +6 -7
- mapfolding-0.8.0.dist-info/RECORD +41 -0
- {mapfolding-0.7.1.dist-info → mapfolding-0.8.0.dist-info}/WHEEL +1 -1
- tests/conftest.py +2 -3
- tests/test_filesystem.py +0 -2
- tests/test_other.py +2 -3
- tests/test_tasks.py +0 -4
- mapFolding/someAssemblyRequired/synthesizeCountingFunctions.py +0 -7
- mapFolding/someAssemblyRequired/synthesizeNumba.py +0 -91
- mapFolding/someAssemblyRequired/synthesizeNumbaModules.py +0 -91
- mapFolding/someAssemblyRequired/whatWillBe.py +0 -357
- mapFolding/syntheticModules/__init__.py +0 -0
- mapFolding/syntheticModules/dataNamespaceFlattened.py +0 -30
- mapFolding/syntheticModules/multiprocessingCount_doTheNeedful.py +0 -216
- mapFolding/syntheticModules/numbaCount.py +0 -90
- mapFolding/syntheticModules/numbaCountSequential.py +0 -111
- mapFolding/syntheticModules/numba_doTheNeedful.py +0 -12
- mapfolding-0.7.1.dist-info/RECORD +0 -51
- /mapFolding/syntheticModules/{numbaCountExample.py → numbaCountHistoricalExample.py} +0 -0
- /mapFolding/syntheticModules/{numba_doTheNeedfulExample.py → numba_doTheNeedfulHistoricalExample.py} +0 -0
- {mapfolding-0.7.1.dist-info → mapfolding-0.8.0.dist-info}/LICENSE +0 -0
- {mapfolding-0.7.1.dist-info → mapfolding-0.8.0.dist-info}/entry_points.txt +0 -0
- {mapfolding-0.7.1.dist-info → mapfolding-0.8.0.dist-info}/top_level.txt +0 -0
|
@@ -1,91 +0,0 @@
|
|
|
1
|
-
from collections.abc import Sequence
|
|
2
|
-
from mapFolding.someAssemblyRequired.whatWillBe import LedgerOfImports, ParametersNumba, Z0Z_getDatatypeModuleScalar, parametersNumbaDEFAULT
|
|
3
|
-
from mapFolding.someAssemblyRequired.whatWillBe import Z0Z_getDecoratorCallable
|
|
4
|
-
from mapFolding.someAssemblyRequired import Make, ifThis
|
|
5
|
-
from typing import Any, cast
|
|
6
|
-
import ast
|
|
7
|
-
|
|
8
|
-
def thisIsNumbaDotJit(Ima: ast.AST) -> bool:
|
|
9
|
-
return ifThis.isCallNamespace_Identifier(Z0Z_getDatatypeModuleScalar(), Z0Z_getDecoratorCallable())(Ima)
|
|
10
|
-
|
|
11
|
-
def thisIsJit(Ima: ast.AST) -> bool:
|
|
12
|
-
return ifThis.isCall_Identifier(Z0Z_getDecoratorCallable())(Ima)
|
|
13
|
-
|
|
14
|
-
def thisIsAnyNumbaJitDecorator(Ima: ast.AST) -> bool:
|
|
15
|
-
return thisIsNumbaDotJit(Ima) or thisIsJit(Ima)
|
|
16
|
-
|
|
17
|
-
def decorateCallableWithNumba(FunctionDefTarget: ast.FunctionDef, allImports: LedgerOfImports, parametersNumba: ParametersNumba | None = None) -> tuple[ast.FunctionDef, LedgerOfImports]:
|
|
18
|
-
def Z0Z_UnhandledDecorators(astCallable: ast.FunctionDef) -> ast.FunctionDef:
|
|
19
|
-
# TODO: more explicit handling of decorators. I'm able to ignore this because I know `algorithmSource` doesn't have any decorators.
|
|
20
|
-
for decoratorItem in astCallable.decorator_list.copy():
|
|
21
|
-
import warnings
|
|
22
|
-
astCallable.decorator_list.remove(decoratorItem)
|
|
23
|
-
warnings.warn(f"Removed decorator {ast.unparse(decoratorItem)} from {astCallable.name}")
|
|
24
|
-
return astCallable
|
|
25
|
-
|
|
26
|
-
def make_arg4parameter(signatureElement: ast.arg) -> ast.Subscript | ast.Name | None:
|
|
27
|
-
if isinstance(signatureElement.annotation, ast.Subscript) and isinstance(signatureElement.annotation.slice, ast.Tuple):
|
|
28
|
-
annotationShape: ast.expr = signatureElement.annotation.slice.elts[0]
|
|
29
|
-
if isinstance(annotationShape, ast.Subscript) and isinstance(annotationShape.slice, ast.Tuple):
|
|
30
|
-
shapeAsListSlices: list[ast.Slice] = [ast.Slice() for _axis in range(len(annotationShape.slice.elts))]
|
|
31
|
-
shapeAsListSlices[-1] = ast.Slice(step=ast.Constant(value=1))
|
|
32
|
-
shapeAST: ast.Slice | ast.Tuple = ast.Tuple(elts=list(shapeAsListSlices), ctx=ast.Load())
|
|
33
|
-
else:
|
|
34
|
-
shapeAST = ast.Slice(step=ast.Constant(value=1))
|
|
35
|
-
|
|
36
|
-
annotationDtype: ast.expr = signatureElement.annotation.slice.elts[1]
|
|
37
|
-
if (isinstance(annotationDtype, ast.Subscript) and isinstance(annotationDtype.slice, ast.Attribute)):
|
|
38
|
-
datatypeAST = annotationDtype.slice.attr
|
|
39
|
-
else:
|
|
40
|
-
datatypeAST = None
|
|
41
|
-
|
|
42
|
-
ndarrayName = signatureElement.arg
|
|
43
|
-
Z0Z_hacky_dtype: str = ndarrayName
|
|
44
|
-
datatype_attr = datatypeAST or Z0Z_hacky_dtype
|
|
45
|
-
allImports.addImportFromStr(datatypeModuleDecorator, datatype_attr)
|
|
46
|
-
datatypeNumba = ast.Name(id=datatype_attr, ctx=ast.Load())
|
|
47
|
-
|
|
48
|
-
return ast.Subscript(value=datatypeNumba, slice=shapeAST, ctx=ast.Load())
|
|
49
|
-
|
|
50
|
-
elif isinstance(signatureElement.annotation, ast.Name):
|
|
51
|
-
return signatureElement.annotation
|
|
52
|
-
return None
|
|
53
|
-
|
|
54
|
-
datatypeModuleDecorator: str = Z0Z_getDatatypeModuleScalar()
|
|
55
|
-
list_argsDecorator: Sequence[ast.expr] = []
|
|
56
|
-
|
|
57
|
-
list_arg4signature_or_function: list[ast.expr] = []
|
|
58
|
-
for parameter in FunctionDefTarget.args.args:
|
|
59
|
-
signatureElement: ast.Subscript | ast.Name | None = make_arg4parameter(parameter)
|
|
60
|
-
if signatureElement:
|
|
61
|
-
list_arg4signature_or_function.append(signatureElement)
|
|
62
|
-
|
|
63
|
-
if FunctionDefTarget.returns and isinstance(FunctionDefTarget.returns, ast.Name):
|
|
64
|
-
theReturn: ast.Name = FunctionDefTarget.returns
|
|
65
|
-
list_argsDecorator = [cast(ast.expr, ast.Call(func=ast.Name(id=theReturn.id, ctx=ast.Load())
|
|
66
|
-
, args=list_arg4signature_or_function if list_arg4signature_or_function else [], keywords=[] ) )]
|
|
67
|
-
elif list_arg4signature_or_function:
|
|
68
|
-
list_argsDecorator = [cast(ast.expr, ast.Tuple(elts=list_arg4signature_or_function, ctx=ast.Load()))]
|
|
69
|
-
|
|
70
|
-
for decorator in FunctionDefTarget.decorator_list.copy():
|
|
71
|
-
if thisIsAnyNumbaJitDecorator(decorator):
|
|
72
|
-
decorator = cast(ast.Call, decorator)
|
|
73
|
-
if parametersNumba is None:
|
|
74
|
-
parametersNumbaSherpa: dict[str, Any] = Make.copy_astCallKeywords(decorator)
|
|
75
|
-
if (_HunterIsSureThereAreBetterWaysToDoThis := True):
|
|
76
|
-
if parametersNumbaSherpa:
|
|
77
|
-
parametersNumba = cast(ParametersNumba, parametersNumbaSherpa)
|
|
78
|
-
FunctionDefTarget.decorator_list.remove(decorator)
|
|
79
|
-
|
|
80
|
-
FunctionDefTarget = Z0Z_UnhandledDecorators(FunctionDefTarget)
|
|
81
|
-
if parametersNumba is None:
|
|
82
|
-
parametersNumba = parametersNumbaDEFAULT
|
|
83
|
-
listDecoratorKeywords: list[ast.keyword] = [ast.keyword(arg=parameterName, value=ast.Constant(value=parameterValue)) for parameterName, parameterValue in parametersNumba.items()]
|
|
84
|
-
|
|
85
|
-
decoratorModule: str = Z0Z_getDatatypeModuleScalar()
|
|
86
|
-
decoratorCallable: str = Z0Z_getDecoratorCallable()
|
|
87
|
-
allImports.addImportFromStr(decoratorModule, decoratorCallable)
|
|
88
|
-
astDecorator: ast.Call = Make.astCall(Make.astName(decoratorCallable), list_argsDecorator, listDecoratorKeywords)
|
|
89
|
-
|
|
90
|
-
FunctionDefTarget.decorator_list = [astDecorator]
|
|
91
|
-
return FunctionDefTarget, allImports
|
|
@@ -1,91 +0,0 @@
|
|
|
1
|
-
# pyright: basic
|
|
2
|
-
from os import PathLike
|
|
3
|
-
from typing import TYPE_CHECKING
|
|
4
|
-
if TYPE_CHECKING:
|
|
5
|
-
from mapFolding.someAssemblyRequired.whatWillBe import ParametersSynthesizeNumbaCallable, listNumbaCallableDispatchees
|
|
6
|
-
from mapFolding.theSSOT import theModuleOfSyntheticModules
|
|
7
|
-
from mapFolding.theSSOT import getSourceAlgorithm
|
|
8
|
-
import types
|
|
9
|
-
|
|
10
|
-
def makeFlowNumbaOptimized(listCallablesInline: list[ParametersSynthesizeNumbaCallable] = listNumbaCallableDispatchees, callableDispatcher: bool = True, algorithmSource: types.ModuleType = getSourceAlgorithm(), relativePathWrite: str | PathLike[str] = theModuleOfSyntheticModules, filenameModuleWrite: str = 'filenameModuleSyntheticWrite', formatFilenameWrite: str = 'formatStrFilenameForCallableSynthetic'):
|
|
11
|
-
from mapFolding.someAssemblyRequired.whatWillBe import ParametersSynthesizeNumbaCallable, listNumbaCallableDispatchees
|
|
12
|
-
from mapFolding.someAssemblyRequired.whatWillBe import LedgerOfImports, Z0Z_autoflake_additional_imports
|
|
13
|
-
from mapFolding.theSSOT import FREAKOUT
|
|
14
|
-
from mapFolding.theSSOT import thePathPackage, getDatatypePackage
|
|
15
|
-
from mapFolding.someAssemblyRequired.whatWillBe import FunctionInliner, YouOughtaKnow, ast_Identifier
|
|
16
|
-
from pathlib import Path
|
|
17
|
-
from typing import cast
|
|
18
|
-
import ast
|
|
19
|
-
import autoflake
|
|
20
|
-
import inspect
|
|
21
|
-
import warnings
|
|
22
|
-
if relativePathWrite and Path(relativePathWrite).is_absolute():
|
|
23
|
-
raise ValueError("The path to write the module must be relative to the root of the package.")
|
|
24
|
-
|
|
25
|
-
listStuffYouOughtaKnow: list[YouOughtaKnow] = []
|
|
26
|
-
|
|
27
|
-
listFunctionDefs: list[ast.FunctionDef] = []
|
|
28
|
-
allImportsModule = LedgerOfImports()
|
|
29
|
-
for tupleParameters in listCallablesInline:
|
|
30
|
-
pythonSource: str = inspect.getsource(algorithmSource)
|
|
31
|
-
astModule: ast.Module = ast.parse(pythonSource)
|
|
32
|
-
if allImports is None:
|
|
33
|
-
allImports = LedgerOfImports(astModule)
|
|
34
|
-
else:
|
|
35
|
-
allImports.walkThis(astModule)
|
|
36
|
-
|
|
37
|
-
if inlineCallables:
|
|
38
|
-
dictionaryFunctionDef: dict[ast_Identifier, ast.FunctionDef] = {statement.name: statement for statement in astModule.body if isinstance(statement, ast.FunctionDef)}
|
|
39
|
-
callableInlinerWorkhorse = FunctionInliner(dictionaryFunctionDef)
|
|
40
|
-
FunctionDefTarget = callableInlinerWorkhorse.inlineFunctionBody(callableTarget)
|
|
41
|
-
else:
|
|
42
|
-
FunctionDefTarget = next((statement for statement in astModule.body if isinstance(statement, ast.FunctionDef) and statement.name == callableTarget), None)
|
|
43
|
-
if not FunctionDefTarget:
|
|
44
|
-
raise ValueError(f"Could not find function {callableTarget} in source code")
|
|
45
|
-
|
|
46
|
-
ast.fix_missing_locations(FunctionDefTarget)
|
|
47
|
-
listFunctionDefs.append(FunctionDefTarget)
|
|
48
|
-
allImportsModule.update(allImports)
|
|
49
|
-
|
|
50
|
-
listAstImports: list[ast.ImportFrom | ast.Import] = allImportsModule.makeListAst()
|
|
51
|
-
additional_imports: list[str] = Z0Z_autoflake_additional_imports
|
|
52
|
-
additional_imports.append(getDatatypePackage())
|
|
53
|
-
|
|
54
|
-
astModule = ast.Module(body=cast(list[ast.stmt], listAstImports + listFunctionDefs), type_ignores=[])
|
|
55
|
-
ast.fix_missing_locations(astModule)
|
|
56
|
-
pythonSource: str = ast.unparse(astModule)
|
|
57
|
-
if not pythonSource: raise FREAKOUT
|
|
58
|
-
pythonSource = autoflake.fix_code(pythonSource, additional_imports)
|
|
59
|
-
|
|
60
|
-
pathWrite: Path = thePathPackage / relativePathWrite
|
|
61
|
-
|
|
62
|
-
if not filenameWrite:
|
|
63
|
-
if len(listCallableSynthesized) == 1:
|
|
64
|
-
callableTarget: str = listCallableSynthesized[0].callableTarget
|
|
65
|
-
else:
|
|
66
|
-
callableTarget = filenameWriteCallableTargetDEFAULT
|
|
67
|
-
# NOTE WARNING I think I broken this format string. See theSSOT.py
|
|
68
|
-
filenameWrite = formatFilenameWrite.format(callableTarget=callableTarget)
|
|
69
|
-
else:
|
|
70
|
-
if not filenameWrite.endswith('.py'):
|
|
71
|
-
warnings.warn(f"Filename {filenameWrite=} does not end with '.py'.")
|
|
72
|
-
|
|
73
|
-
pathFilename: Path = pathWrite / filenameWrite
|
|
74
|
-
|
|
75
|
-
pathFilename.write_text(pythonSource)
|
|
76
|
-
|
|
77
|
-
howIsThisStillAThing: Path = thePathPackage.parent
|
|
78
|
-
dumbassPythonNamespace: tuple[str, ...] = pathFilename.relative_to(howIsThisStillAThing).with_suffix('').parts
|
|
79
|
-
ImaModule: str = '.'.join(dumbassPythonNamespace)
|
|
80
|
-
|
|
81
|
-
for item in listCallableSynthesized:
|
|
82
|
-
callableTarget: str = item.callableTarget
|
|
83
|
-
astImportFrom = ast.ImportFrom(module=ImaModule, names=[ast.alias(name=callableTarget, asname=None)], level=0)
|
|
84
|
-
stuff = YouOughtaKnow(callableSynthesized=callableTarget, pathFilenameForMe=pathFilename, astForCompetentProgrammers=astImportFrom)
|
|
85
|
-
listStuffYouOughtaKnow.append(stuff)
|
|
86
|
-
listStuffYouOughtaKnow.extend(listStuff)
|
|
87
|
-
|
|
88
|
-
if callableDispatcher:
|
|
89
|
-
pass
|
|
90
|
-
|
|
91
|
-
return listStuffYouOughtaKnow
|
|
@@ -1,357 +0,0 @@
|
|
|
1
|
-
"""
|
|
2
|
-
- Settings for synthesizing the modules used by the package (i.e., the flow for numba)
|
|
3
|
-
- Settings for synthesizing modules that could be used by the package (e.g., the flow for JAX)
|
|
4
|
-
- Therefore, an abstracted system for creating settings for the package
|
|
5
|
-
- And with only a little more effort, an abstracted system for creating settings to synthesize arbitrary subsets of modules for arbitrary packages
|
|
6
|
-
"""
|
|
7
|
-
from mapFolding.someAssemblyRequired.transformationTools import (
|
|
8
|
-
ast_Identifier,
|
|
9
|
-
executeActionUnlessDescendantMatches,
|
|
10
|
-
extractClassDef,
|
|
11
|
-
extractFunctionDef,
|
|
12
|
-
ifThis,
|
|
13
|
-
Make,
|
|
14
|
-
NodeCollector,
|
|
15
|
-
NodeReplacer,
|
|
16
|
-
strDotStrCuzPyStoopid,
|
|
17
|
-
Then,
|
|
18
|
-
)
|
|
19
|
-
from mapFolding.filesystem import writeStringToHere
|
|
20
|
-
from mapFolding.theSSOT import (
|
|
21
|
-
FREAKOUT,
|
|
22
|
-
getDatatypePackage,
|
|
23
|
-
getSourceAlgorithm,
|
|
24
|
-
theDataclassIdentifier,
|
|
25
|
-
theDataclassInstance,
|
|
26
|
-
theDispatcherCallable,
|
|
27
|
-
theFileExtension,
|
|
28
|
-
theFormatStrModuleForCallableSynthetic,
|
|
29
|
-
theFormatStrModuleSynthetic,
|
|
30
|
-
theLogicalPathModuleDataclass,
|
|
31
|
-
theLogicalPathModuleDispatcherSynthetic,
|
|
32
|
-
theModuleDispatcherSynthetic,
|
|
33
|
-
theModuleOfSyntheticModules,
|
|
34
|
-
thePackageName,
|
|
35
|
-
thePathPackage,
|
|
36
|
-
theSourceInitializeCallable,
|
|
37
|
-
theSourceParallelCallable,
|
|
38
|
-
theSourceSequentialCallable,
|
|
39
|
-
)
|
|
40
|
-
from autoflake import fix_code as autoflake_fix_code
|
|
41
|
-
from collections import defaultdict
|
|
42
|
-
from collections.abc import Sequence
|
|
43
|
-
from inspect import getsource as inspect_getsource
|
|
44
|
-
from mapFolding.someAssemblyRequired.ingredientsNumba import parametersNumbaDEFAULT, parametersNumbaSuperJit, parametersNumbaSuperJitParallel, ParametersNumba
|
|
45
|
-
from pathlib import Path, PurePosixPath
|
|
46
|
-
from types import ModuleType
|
|
47
|
-
from typing import NamedTuple
|
|
48
|
-
from Z0Z_tools import updateExtendPolishDictionaryLists
|
|
49
|
-
import ast
|
|
50
|
-
import dataclasses
|
|
51
|
-
|
|
52
|
-
@dataclasses.dataclass
|
|
53
|
-
class RecipeSynthesizeFlow:
|
|
54
|
-
"""Settings for synthesizing flow."""
|
|
55
|
-
# TODO consider `IngredientsFlow` or similar
|
|
56
|
-
# ========================================
|
|
57
|
-
# Source
|
|
58
|
-
sourceAlgorithm: ModuleType = getSourceAlgorithm()
|
|
59
|
-
sourcePython: str = inspect_getsource(sourceAlgorithm)
|
|
60
|
-
source_astModule: ast.Module = ast.parse(sourcePython)
|
|
61
|
-
# https://github.com/hunterhogan/mapFolding/issues/4
|
|
62
|
-
sourceDispatcherCallable: str = theDispatcherCallable
|
|
63
|
-
sourceSequentialCallable: str = theSourceSequentialCallable
|
|
64
|
-
sourceDataclassIdentifier: str = theDataclassIdentifier
|
|
65
|
-
# I still hate the OOP paradigm. But I like this dataclass stuff.
|
|
66
|
-
|
|
67
|
-
# ========================================
|
|
68
|
-
# Filesystem
|
|
69
|
-
pathPackage: PurePosixPath = PurePosixPath(thePathPackage)
|
|
70
|
-
fileExtension: str = theFileExtension
|
|
71
|
-
|
|
72
|
-
# ========================================
|
|
73
|
-
# Logical identifiers
|
|
74
|
-
# meta
|
|
75
|
-
formatStrModuleSynthetic: str = theFormatStrModuleSynthetic
|
|
76
|
-
formatStrModuleForCallableSynthetic: str = theFormatStrModuleForCallableSynthetic
|
|
77
|
-
|
|
78
|
-
# Package
|
|
79
|
-
packageName: ast_Identifier = thePackageName
|
|
80
|
-
|
|
81
|
-
# Module
|
|
82
|
-
# https://github.com/hunterhogan/mapFolding/issues/4
|
|
83
|
-
Z0Z_flowLogicalPathRoot: str = theModuleOfSyntheticModules
|
|
84
|
-
moduleDispatcher: str = theModuleDispatcherSynthetic
|
|
85
|
-
logicalPathModuleDataclass: str = theLogicalPathModuleDataclass
|
|
86
|
-
# https://github.com/hunterhogan/mapFolding/issues/4
|
|
87
|
-
# `theLogicalPathModuleDispatcherSynthetic` is a problem. It is defined in theSSOT, but it can also be calculated.
|
|
88
|
-
logicalPathModuleDispatcher: str = theLogicalPathModuleDispatcherSynthetic
|
|
89
|
-
dataConverterModule: str = 'dataNamespaceFlattened'
|
|
90
|
-
|
|
91
|
-
# Function
|
|
92
|
-
sequentialCallable: str = sourceSequentialCallable
|
|
93
|
-
dataclassIdentifier: str = sourceDataclassIdentifier
|
|
94
|
-
dataConverterCallable: str = 'unpackDataclassPackUp'
|
|
95
|
-
dispatcherCallable: str = sourceDispatcherCallable
|
|
96
|
-
|
|
97
|
-
# Variable
|
|
98
|
-
dataclassInstance: str = theDataclassInstance
|
|
99
|
-
|
|
100
|
-
class LedgerOfImports:
|
|
101
|
-
def __init__(self, startWith: ast.AST | None = None) -> None:
|
|
102
|
-
self.dictionaryImportFrom: dict[str, list[tuple[str, str | None]]] = defaultdict(list)
|
|
103
|
-
self.listImport: list[str] = []
|
|
104
|
-
|
|
105
|
-
if startWith:
|
|
106
|
-
self.walkThis(startWith)
|
|
107
|
-
|
|
108
|
-
def addAst(self, astImport_: ast.Import | ast.ImportFrom) -> None:
|
|
109
|
-
if not isinstance(astImport_, (ast.Import, ast.ImportFrom)): # pyright: ignore[reportUnnecessaryIsInstance]
|
|
110
|
-
raise ValueError(f"Expected ast.Import or ast.ImportFrom, got {type(astImport_)}")
|
|
111
|
-
if isinstance(astImport_, ast.Import):
|
|
112
|
-
for alias in astImport_.names:
|
|
113
|
-
self.listImport.append(alias.name)
|
|
114
|
-
else:
|
|
115
|
-
if astImport_.module is not None:
|
|
116
|
-
for alias in astImport_.names:
|
|
117
|
-
self.dictionaryImportFrom[astImport_.module].append((alias.name, alias.asname))
|
|
118
|
-
|
|
119
|
-
def addImportStr(self, module: str) -> None:
|
|
120
|
-
self.listImport.append(module)
|
|
121
|
-
|
|
122
|
-
def addImportFromStr(self, module: str, name: str, asname: str | None = None) -> None:
|
|
123
|
-
self.dictionaryImportFrom[module].append((name, asname))
|
|
124
|
-
|
|
125
|
-
def exportListModuleNames(self) -> list[str]:
|
|
126
|
-
listModuleNames: list[str] = list(self.dictionaryImportFrom.keys())
|
|
127
|
-
listModuleNames.extend(self.listImport)
|
|
128
|
-
return sorted(set(listModuleNames))
|
|
129
|
-
|
|
130
|
-
def makeListAst(self) -> list[ast.ImportFrom | ast.Import]:
|
|
131
|
-
listAstImportFrom: list[ast.ImportFrom] = []
|
|
132
|
-
|
|
133
|
-
for module, listOfNameTuples in sorted(self.dictionaryImportFrom.items()):
|
|
134
|
-
listOfNameTuples = sorted(list(set(listOfNameTuples)), key=lambda nameTuple: nameTuple[0])
|
|
135
|
-
listAlias: list[ast.alias] = []
|
|
136
|
-
for name, asname in listOfNameTuples:
|
|
137
|
-
listAlias.append(Make.astAlias(name, asname))
|
|
138
|
-
listAstImportFrom.append(Make.astImportFrom(module, listAlias))
|
|
139
|
-
|
|
140
|
-
listAstImport: list[ast.Import] = [Make.astImport(name) for name in sorted(set(self.listImport))]
|
|
141
|
-
return listAstImportFrom + listAstImport
|
|
142
|
-
|
|
143
|
-
def update(self, *fromLedger: 'LedgerOfImports') -> None:
|
|
144
|
-
"""
|
|
145
|
-
Update this ledger with imports from one or more other ledgers.
|
|
146
|
-
|
|
147
|
-
Parameters:
|
|
148
|
-
*fromTracker: One or more other `LedgerOfImports` objects from which to merge.
|
|
149
|
-
"""
|
|
150
|
-
self.dictionaryImportFrom = updateExtendPolishDictionaryLists(self.dictionaryImportFrom, *(ledger.dictionaryImportFrom for ledger in fromLedger), destroyDuplicates=True, reorderLists=True)
|
|
151
|
-
|
|
152
|
-
for ledger in fromLedger:
|
|
153
|
-
self.listImport.extend(ledger.listImport)
|
|
154
|
-
|
|
155
|
-
def walkThis(self, walkThis: ast.AST) -> None:
|
|
156
|
-
for smurf in ast.walk(walkThis):
|
|
157
|
-
if isinstance(smurf, (ast.Import, ast.ImportFrom)):
|
|
158
|
-
self.addAst(smurf)
|
|
159
|
-
|
|
160
|
-
@dataclasses.dataclass
|
|
161
|
-
class Z0Z_IngredientsDataStructure:
|
|
162
|
-
"""Everything necessary to create a data structure should be here."""
|
|
163
|
-
dataclassDef: ast.ClassDef
|
|
164
|
-
imports: LedgerOfImports = dataclasses.field(default_factory=LedgerOfImports)
|
|
165
|
-
|
|
166
|
-
@dataclasses.dataclass
|
|
167
|
-
class IngredientsFunction:
|
|
168
|
-
"""Everything necessary to integrate a function into a module should be here."""
|
|
169
|
-
FunctionDef: ast.FunctionDef # hint `Make.astFunctionDef`
|
|
170
|
-
imports: LedgerOfImports = dataclasses.field(default_factory=LedgerOfImports)
|
|
171
|
-
|
|
172
|
-
@dataclasses.dataclass
|
|
173
|
-
class IngredientsModule:
|
|
174
|
-
"""Everything necessary to create one _logical_ `ast.Module` should be here.
|
|
175
|
-
Extrinsic qualities should be handled externally, such as with `RecipeModule`."""
|
|
176
|
-
# If an `ast.Module` had a logical name that would be reasonable, but Python is firmly opposed
|
|
177
|
-
# to a reasonable namespace, therefore, Hunter, you were silly to add a `name` field to this
|
|
178
|
-
# dataclass for building an `ast.Module`.
|
|
179
|
-
# name: ast_Identifier
|
|
180
|
-
# Hey, genius, note that this is dataclasses.InitVar
|
|
181
|
-
ingredientsFunction: dataclasses.InitVar[Sequence[IngredientsFunction] | IngredientsFunction | None] = None
|
|
182
|
-
|
|
183
|
-
# `body` attribute of `ast.Module`
|
|
184
|
-
imports: LedgerOfImports = dataclasses.field(default_factory=LedgerOfImports)
|
|
185
|
-
prologue: list[ast.stmt] = dataclasses.field(default_factory=list)
|
|
186
|
-
functions: list[ast.FunctionDef | ast.stmt] = dataclasses.field(default_factory=list)
|
|
187
|
-
epilogue: list[ast.stmt] = dataclasses.field(default_factory=list)
|
|
188
|
-
launcher: list[ast.stmt] = dataclasses.field(default_factory=list)
|
|
189
|
-
|
|
190
|
-
# parameter for `ast.Module` constructor
|
|
191
|
-
type_ignores: list[ast.TypeIgnore] = dataclasses.field(default_factory=list)
|
|
192
|
-
|
|
193
|
-
def __post_init__(self, ingredientsFunction: Sequence[IngredientsFunction] | IngredientsFunction | None = None) -> None:
|
|
194
|
-
if ingredientsFunction is not None:
|
|
195
|
-
if isinstance(ingredientsFunction, IngredientsFunction):
|
|
196
|
-
self.addIngredientsFunction(ingredientsFunction)
|
|
197
|
-
else:
|
|
198
|
-
self.addIngredientsFunction(*ingredientsFunction)
|
|
199
|
-
|
|
200
|
-
def addIngredientsFunction(self, *ingredientsFunction: IngredientsFunction) -> None:
|
|
201
|
-
"""Add one or more `IngredientsFunction`. """
|
|
202
|
-
listLedgers: list[LedgerOfImports] = []
|
|
203
|
-
for definition in ingredientsFunction:
|
|
204
|
-
self.functions.append(definition.FunctionDef)
|
|
205
|
-
listLedgers.append(definition.imports)
|
|
206
|
-
self.imports.update(*listLedgers)
|
|
207
|
-
|
|
208
|
-
def _makeModuleBody(self) -> list[ast.stmt]:
|
|
209
|
-
body: list[ast.stmt] = []
|
|
210
|
-
body.extend(self.imports.makeListAst())
|
|
211
|
-
body.extend(self.prologue)
|
|
212
|
-
body.extend(self.functions)
|
|
213
|
-
body.extend(self.epilogue)
|
|
214
|
-
body.extend(self.launcher)
|
|
215
|
-
# TODO `launcher`, if it exists, must start with `if __name__ == '__main__':` and be indented
|
|
216
|
-
return body
|
|
217
|
-
|
|
218
|
-
def export(self) -> ast.Module:
|
|
219
|
-
"""Create a new `ast.Module` from the ingredients."""
|
|
220
|
-
return Make.astModule(self._makeModuleBody(), self.type_ignores)
|
|
221
|
-
|
|
222
|
-
@dataclasses.dataclass
|
|
223
|
-
class RecipeCountingFunction:
|
|
224
|
-
"""Settings for synthesizing counting functions."""
|
|
225
|
-
ingredients: IngredientsFunction
|
|
226
|
-
|
|
227
|
-
@dataclasses.dataclass
|
|
228
|
-
class RecipeDispatchFunction:
|
|
229
|
-
# A "dispatcher" must receive a dataclass instance and return a dataclass instance.
|
|
230
|
-
# computationStateComplete: ComputationState = dispatcher(computationStateInitialized)
|
|
231
|
-
# The most critical values in the returned dataclass are foldGroups[0:-1] and leavesTotal
|
|
232
|
-
# self.foldsTotal = DatatypeFoldsTotal(self.foldGroups[0:-1].sum() * self.leavesTotal)
|
|
233
|
-
# the function name is required by IngredientsFunction
|
|
234
|
-
ingredients: IngredientsFunction
|
|
235
|
-
logicalPathModuleDataclass: str = theLogicalPathModuleDataclass
|
|
236
|
-
dataclassIdentifier: str = theDataclassIdentifier
|
|
237
|
-
dataclassInstance: str = theDataclassInstance
|
|
238
|
-
Z0Z_unpackDataclass: bool = True
|
|
239
|
-
countDispatcher: bool = True
|
|
240
|
-
# is this the countDispatcher or what is the information for calling the countDispatcher: import or no? callable identifier? parameters? return type?
|
|
241
|
-
# countDispatcher lives in `theLogicalPathModuleDispatcherSynthetic`
|
|
242
|
-
# countDispatcher is named `theDispatcherCallable`
|
|
243
|
-
# post init
|
|
244
|
-
# addImportFromStr(self, module: str, name: str, asname: str | None = None)
|
|
245
|
-
|
|
246
|
-
@dataclasses.dataclass
|
|
247
|
-
class RecipeModule:
|
|
248
|
-
"""How to get one or more logical `ast.Module` on disk as one physical module."""
|
|
249
|
-
# Physical namespace
|
|
250
|
-
filenameStem: str
|
|
251
|
-
fileExtension: str = theFileExtension
|
|
252
|
-
pathPackage: PurePosixPath = PurePosixPath(thePathPackage)
|
|
253
|
-
|
|
254
|
-
# Physical and logical namespace
|
|
255
|
-
packageName: ast_Identifier | None= thePackageName
|
|
256
|
-
logicalPathINFIX: ast_Identifier | strDotStrCuzPyStoopid | None = None # module names other than the module itself and the package name
|
|
257
|
-
|
|
258
|
-
def _getLogicalPathParent(self) -> str | None:
|
|
259
|
-
listModules: list[ast_Identifier] = []
|
|
260
|
-
if self.packageName:
|
|
261
|
-
listModules.append(self.packageName)
|
|
262
|
-
if self.logicalPathINFIX:
|
|
263
|
-
listModules.append(self.logicalPathINFIX)
|
|
264
|
-
if listModules:
|
|
265
|
-
return '.'.join(listModules)
|
|
266
|
-
|
|
267
|
-
def _getLogicalPathAbsolute(self) -> str:
|
|
268
|
-
listModules: list[ast_Identifier] = []
|
|
269
|
-
logicalPathParent: str | None = self._getLogicalPathParent()
|
|
270
|
-
if logicalPathParent:
|
|
271
|
-
listModules.append(logicalPathParent)
|
|
272
|
-
listModules.append(self.filenameStem)
|
|
273
|
-
return '.'.join(listModules)
|
|
274
|
-
|
|
275
|
-
@property
|
|
276
|
-
def pathFilename(self):
|
|
277
|
-
""" `PurePosixPath` ensures os-independent formatting of the `dataclass.field` value,
|
|
278
|
-
but you must convert to `Path` to perform filesystem operations."""
|
|
279
|
-
pathRoot: PurePosixPath = self.pathPackage
|
|
280
|
-
filename: str = self.filenameStem + self.fileExtension
|
|
281
|
-
if self.logicalPathINFIX:
|
|
282
|
-
whyIsThisStillAThing: list[str] = self.logicalPathINFIX.split('.')
|
|
283
|
-
pathRoot = pathRoot.joinpath(*whyIsThisStillAThing)
|
|
284
|
-
return pathRoot.joinpath(filename)
|
|
285
|
-
|
|
286
|
-
ingredients: IngredientsModule = IngredientsModule()
|
|
287
|
-
|
|
288
|
-
@property
|
|
289
|
-
def absoluteImport(self) -> ast.Import:
|
|
290
|
-
return Make.astImport(self._getLogicalPathAbsolute())
|
|
291
|
-
|
|
292
|
-
@property
|
|
293
|
-
def absoluteImportFrom(self) -> ast.ImportFrom:
|
|
294
|
-
""" `from . import theModule` """
|
|
295
|
-
logicalPathParent: str = self._getLogicalPathParent() or '.'
|
|
296
|
-
return Make.astImportFrom(logicalPathParent, [Make.astAlias(self.filenameStem)])
|
|
297
|
-
|
|
298
|
-
def writeModule(self) -> None:
|
|
299
|
-
astModule = self.ingredients.export()
|
|
300
|
-
ast.fix_missing_locations(astModule)
|
|
301
|
-
pythonSource: str = ast.unparse(astModule)
|
|
302
|
-
if not pythonSource: raise FREAKOUT
|
|
303
|
-
autoflake_additional_imports: list[str] = self.ingredients.imports.exportListModuleNames()
|
|
304
|
-
if self.packageName:
|
|
305
|
-
autoflake_additional_imports.append(self.packageName)
|
|
306
|
-
pythonSource = autoflake_fix_code(pythonSource, autoflake_additional_imports, expand_star_imports=False, remove_all_unused_imports=False, remove_duplicate_keys = False, remove_unused_variables = False,)
|
|
307
|
-
writeStringToHere(pythonSource, self.pathFilename)
|
|
308
|
-
|
|
309
|
-
numbaFlow: RecipeSynthesizeFlow = RecipeSynthesizeFlow()
|
|
310
|
-
|
|
311
|
-
# https://github.com/hunterhogan/mapFolding/issues/3
|
|
312
|
-
sourceSequentialFunctionDef = extractFunctionDef(numbaFlow.sourceSequentialCallable, numbaFlow.source_astModule)
|
|
313
|
-
if sourceSequentialFunctionDef is None: raise FREAKOUT
|
|
314
|
-
|
|
315
|
-
numbaCountSequential = RecipeCountingFunction(IngredientsFunction(
|
|
316
|
-
FunctionDef=sourceSequentialFunctionDef,
|
|
317
|
-
imports=LedgerOfImports(numbaFlow.source_astModule)
|
|
318
|
-
))
|
|
319
|
-
|
|
320
|
-
numbaDispatcher = RecipeModule(filenameStem=numbaFlow.moduleDispatcher, fileExtension=numbaFlow.fileExtension, pathPackage=numbaFlow.pathPackage,
|
|
321
|
-
packageName=numbaFlow.packageName, logicalPathINFIX=numbaFlow.Z0Z_flowLogicalPathRoot)
|
|
322
|
-
|
|
323
|
-
class ParametersSynthesizeNumbaCallable(NamedTuple):
|
|
324
|
-
callableTarget: str
|
|
325
|
-
parametersNumba: ParametersNumba | None = None
|
|
326
|
-
inlineCallables: bool = False
|
|
327
|
-
|
|
328
|
-
listNumbaCallableDispatchees: list[ParametersSynthesizeNumbaCallable] = [
|
|
329
|
-
ParametersSynthesizeNumbaCallable('countParallel', parametersNumbaSuperJitParallel, True),
|
|
330
|
-
ParametersSynthesizeNumbaCallable('countSequential', parametersNumbaSuperJit, True),
|
|
331
|
-
ParametersSynthesizeNumbaCallable('countInitialize', parametersNumbaDEFAULT, True),
|
|
332
|
-
]
|
|
333
|
-
|
|
334
|
-
_datatypeModuleScalar = ''
|
|
335
|
-
_decoratorCallable = ''
|
|
336
|
-
|
|
337
|
-
# if numba
|
|
338
|
-
_datatypeModuleScalar = 'numba'
|
|
339
|
-
_decoratorCallable = 'jit'
|
|
340
|
-
Z0Z_autoflake_additional_imports: list[str] = []
|
|
341
|
-
Z0Z_autoflake_additional_imports.append('numba')
|
|
342
|
-
|
|
343
|
-
def Z0Z_getDatatypeModuleScalar() -> str:
|
|
344
|
-
return _datatypeModuleScalar
|
|
345
|
-
|
|
346
|
-
def Z0Z_setDatatypeModuleScalar(moduleName: str) -> str:
|
|
347
|
-
global _datatypeModuleScalar
|
|
348
|
-
_datatypeModuleScalar = moduleName
|
|
349
|
-
return _datatypeModuleScalar
|
|
350
|
-
|
|
351
|
-
def Z0Z_getDecoratorCallable() -> str:
|
|
352
|
-
return _decoratorCallable
|
|
353
|
-
|
|
354
|
-
def Z0Z_setDecoratorCallable(decoratorName: str) -> str:
|
|
355
|
-
global _decoratorCallable
|
|
356
|
-
_decoratorCallable = decoratorName
|
|
357
|
-
return _decoratorCallable
|
|
File without changes
|
|
@@ -1,30 +0,0 @@
|
|
|
1
|
-
from mapFolding.someAssemblyRequired.synthesizeDataConverters import makeStateJob
|
|
2
|
-
from mapFolding.syntheticModules.numbaCount_doTheNeedful import doTheNeedful
|
|
3
|
-
from mapFolding.theSSOT import Array1DElephino, Array1DFoldsTotal, Array1DLeavesTotal, Array3D, ComputationState, DatatypeElephino, DatatypeFoldsTotal, DatatypeLeavesTotal
|
|
4
|
-
|
|
5
|
-
def flattenData(state: ComputationState) -> ComputationState:
|
|
6
|
-
state = makeStateJob(state.mapShape, writeJob=False)
|
|
7
|
-
mapShape: tuple[DatatypeLeavesTotal, ...] = state.mapShape
|
|
8
|
-
leavesTotal: DatatypeLeavesTotal = state.leavesTotal
|
|
9
|
-
taskDivisions: DatatypeLeavesTotal = state.taskDivisions
|
|
10
|
-
connectionGraph: Array3D = state.connectionGraph
|
|
11
|
-
dimensionsTotal: DatatypeLeavesTotal = state.dimensionsTotal
|
|
12
|
-
countDimensionsGapped: Array1DLeavesTotal = state.countDimensionsGapped
|
|
13
|
-
dimensionsUnconstrained: DatatypeLeavesTotal = state.dimensionsUnconstrained
|
|
14
|
-
gapRangeStart: Array1DElephino = state.gapRangeStart
|
|
15
|
-
gapsWhere: Array1DLeavesTotal = state.gapsWhere
|
|
16
|
-
leafAbove: Array1DLeavesTotal = state.leafAbove
|
|
17
|
-
leafBelow: Array1DLeavesTotal = state.leafBelow
|
|
18
|
-
foldGroups: Array1DFoldsTotal = state.foldGroups
|
|
19
|
-
foldsTotal: DatatypeFoldsTotal = state.foldsTotal
|
|
20
|
-
gap1ndex: DatatypeLeavesTotal = state.gap1ndex
|
|
21
|
-
gap1ndexCeiling: DatatypeElephino = state.gap1ndexCeiling
|
|
22
|
-
groupsOfFolds: DatatypeFoldsTotal = state.groupsOfFolds
|
|
23
|
-
indexDimension: DatatypeLeavesTotal = state.indexDimension
|
|
24
|
-
indexLeaf: DatatypeLeavesTotal = state.indexLeaf
|
|
25
|
-
indexMiniGap: DatatypeElephino = state.indexMiniGap
|
|
26
|
-
leaf1ndex: DatatypeElephino = state.leaf1ndex
|
|
27
|
-
leafConnectee: DatatypeElephino = state.leafConnectee
|
|
28
|
-
taskIndex: DatatypeLeavesTotal = state.taskIndex
|
|
29
|
-
mapShape, leavesTotal, taskDivisions, connectionGraph, dimensionsTotal, countDimensionsGapped, dimensionsUnconstrained, gapRangeStart, gapsWhere, leafAbove, leafBelow, foldGroups, foldsTotal, gap1ndex, gap1ndexCeiling, groupsOfFolds, indexDimension, indexLeaf, indexMiniGap, leaf1ndex, leafConnectee, taskIndex = doTheNeedful(mapShape, leavesTotal, taskDivisions, connectionGraph, dimensionsTotal, countDimensionsGapped, dimensionsUnconstrained, gapRangeStart, gapsWhere, leafAbove, leafBelow, foldGroups, foldsTotal, gap1ndex, gap1ndexCeiling, groupsOfFolds, indexDimension, indexLeaf, indexMiniGap, leaf1ndex, leafConnectee, taskIndex)
|
|
30
|
-
return ComputationState(mapShape=mapShape, leavesTotal=leavesTotal, taskDivisions=taskDivisions, countDimensionsGapped=countDimensionsGapped, dimensionsUnconstrained=dimensionsUnconstrained, gapRangeStart=gapRangeStart, gapsWhere=gapsWhere, leafAbove=leafAbove, leafBelow=leafBelow, foldGroups=foldGroups, foldsTotal=foldsTotal, gap1ndex=gap1ndex, gap1ndexCeiling=gap1ndexCeiling, groupsOfFolds=groupsOfFolds, indexDimension=indexDimension, indexLeaf=indexLeaf, indexMiniGap=indexMiniGap, leaf1ndex=leaf1ndex, leafConnectee=leafConnectee, taskIndex=taskIndex)
|