mapFolding 0.5.1__py3-none-any.whl → 0.7.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (51) hide show
  1. mapFolding/__init__.py +6 -101
  2. mapFolding/basecamp.py +12 -10
  3. mapFolding/beDRY.py +96 -316
  4. mapFolding/filesystem.py +87 -0
  5. mapFolding/noHomeYet.py +20 -0
  6. mapFolding/oeis.py +39 -36
  7. mapFolding/reference/flattened.py +377 -0
  8. mapFolding/reference/hunterNumba.py +132 -0
  9. mapFolding/reference/irvineJavaPort.py +120 -0
  10. mapFolding/reference/jax.py +208 -0
  11. mapFolding/reference/lunnan.py +153 -0
  12. mapFolding/reference/lunnanNumpy.py +123 -0
  13. mapFolding/reference/lunnanWhile.py +121 -0
  14. mapFolding/reference/rotatedEntryPoint.py +240 -0
  15. mapFolding/reference/total_countPlus1vsPlusN.py +211 -0
  16. mapFolding/someAssemblyRequired/Z0Z_workbench.py +34 -0
  17. mapFolding/someAssemblyRequired/__init__.py +16 -0
  18. mapFolding/someAssemblyRequired/getLLVMforNoReason.py +21 -0
  19. mapFolding/someAssemblyRequired/ingredientsNumba.py +100 -0
  20. mapFolding/someAssemblyRequired/synthesizeCountingFunctions.py +7 -0
  21. mapFolding/someAssemblyRequired/synthesizeDataConverters.py +135 -0
  22. mapFolding/someAssemblyRequired/synthesizeNumba.py +91 -0
  23. mapFolding/someAssemblyRequired/synthesizeNumbaJob.py +417 -0
  24. mapFolding/someAssemblyRequired/synthesizeNumbaModules.py +91 -0
  25. mapFolding/someAssemblyRequired/transformationTools.py +425 -0
  26. mapFolding/someAssemblyRequired/whatWillBe.py +311 -0
  27. mapFolding/syntheticModules/__init__.py +0 -0
  28. mapFolding/syntheticModules/dataNamespaceFlattened.py +30 -0
  29. mapFolding/syntheticModules/numbaCount.py +90 -0
  30. mapFolding/syntheticModules/numbaCountExample.py +158 -0
  31. mapFolding/syntheticModules/numbaCountSequential.py +110 -0
  32. mapFolding/syntheticModules/numbaCount_doTheNeedful.py +13 -0
  33. mapFolding/syntheticModules/numba_doTheNeedful.py +12 -0
  34. mapFolding/syntheticModules/numba_doTheNeedfulExample.py +13 -0
  35. mapFolding/theDao.py +203 -227
  36. mapFolding/theSSOT.py +254 -123
  37. {mapFolding-0.5.1.dist-info → mapfolding-0.7.0.dist-info}/METADATA +10 -8
  38. mapfolding-0.7.0.dist-info/RECORD +50 -0
  39. {mapFolding-0.5.1.dist-info → mapfolding-0.7.0.dist-info}/WHEEL +1 -1
  40. {mapFolding-0.5.1.dist-info → mapfolding-0.7.0.dist-info}/top_level.txt +1 -0
  41. tests/__init__.py +0 -0
  42. tests/conftest.py +278 -0
  43. tests/test_computations.py +49 -0
  44. tests/test_filesystem.py +52 -0
  45. tests/test_oeis.py +128 -0
  46. tests/test_other.py +84 -0
  47. tests/test_tasks.py +50 -0
  48. mapFolding/theSSOTdatatypes.py +0 -156
  49. mapFolding-0.5.1.dist-info/RECORD +0 -14
  50. {mapFolding-0.5.1.dist-info → mapfolding-0.7.0.dist-info}/LICENSE +0 -0
  51. {mapFolding-0.5.1.dist-info → mapfolding-0.7.0.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,34 @@
1
+ from mapFolding.someAssemblyRequired.synthesizeDataConverters import makeStateJob
2
+ from mapFolding.someAssemblyRequired.synthesizeDataConverters import makeDataclassConverter
3
+ from mapFolding.someAssemblyRequired.whatWillBe import IngredientsFunction, IngredientsModule, numbaFlow
4
+ from mapFolding.someAssemblyRequired.synthesizeCountingFunctions import Z0Z_makeCountingFunction
5
+ import ast
6
+
7
+ if __name__ == '__main__':
8
+ ingredientsFunctionDataConverter = makeDataclassConverter(
9
+ dataclassIdentifierAsStr=numbaFlow.dataclassIdentifierAsStr
10
+ , logicalPathModuleDataclass=numbaFlow.logicalPathModuleDataclass
11
+ , dataclassInstanceAsStr=numbaFlow.dataclassInstanceAsStr
12
+
13
+ , dispatcherCallableAsStr=numbaFlow.dispatcherCallableAsStr
14
+ , logicalPathModuleDispatcher=numbaFlow.logicalPathModuleDispatcher
15
+ , dataConverterCallableAsStr=numbaFlow.dataConverterCallableAsStr
16
+ )
17
+
18
+ # initialize with theDao
19
+ dataInitializationHack = "state=makeStateJob(state.mapShape,writeJob=False)"
20
+ ingredientsFunctionDataConverter.FunctionDef.body.insert(0, ast.parse(dataInitializationHack).body[0])
21
+ ingredientsFunctionDataConverter.imports.addImportFromStr('mapFolding.someAssemblyRequired', 'makeStateJob')
22
+
23
+ ingredientsSequential = Z0Z_makeCountingFunction(numbaFlow.sequentialCallableAsStr
24
+ , numbaFlow.sourceAlgorithm
25
+ , inline=True
26
+ , dataclass=False)
27
+
28
+ ingredientsModuleDataConverter = IngredientsModule(
29
+ name=numbaFlow.dataConverterModule,
30
+ ingredientsFunction=ingredientsFunctionDataConverter,
31
+ logicalPathINFIX=numbaFlow.moduleOfSyntheticModules,
32
+ )
33
+
34
+ ingredientsModuleDataConverter.writeModule()
@@ -0,0 +1,16 @@
1
+ from mapFolding.someAssemblyRequired.whatWillBe import (
2
+ ast_Identifier as ast_Identifier,
3
+ extractClassDef as extractClassDef,
4
+ extractFunctionDef as extractFunctionDef,
5
+ executeActionUnlessDescendantMatches as executeActionUnlessDescendantMatches,
6
+ ifThis as ifThis,
7
+ IngredientsFunction as IngredientsFunction,
8
+ IngredientsModule as IngredientsModule,
9
+ LedgerOfImports as LedgerOfImports,
10
+ listNumbaCallableDispatchees as listNumbaCallableDispatchees,
11
+ Make as Make,
12
+ NodeCollector as NodeCollector,
13
+ NodeReplacer as NodeReplacer,
14
+ strDotStrCuzPyStoopid as strDotStrCuzPyStoopid,
15
+ Then as Then,
16
+ )
@@ -0,0 +1,21 @@
1
+ import importlib
2
+ from importlib.machinery import ModuleSpec
3
+ from types import ModuleType
4
+ import importlib.util
5
+ import llvmlite.binding
6
+ import pathlib
7
+
8
+ def writeModuleLLVM(pathFilename: pathlib.Path, identifierCallable: str) -> pathlib.Path:
9
+ """Import the generated module directly and get its LLVM IR."""
10
+ specTarget: ModuleSpec | None = importlib.util.spec_from_file_location("generatedModule", pathFilename)
11
+ if specTarget is None or specTarget.loader is None:
12
+ raise ImportError(f"Could not create module spec or loader for {pathFilename}")
13
+ moduleTarget: ModuleType = importlib.util.module_from_spec(specTarget)
14
+ specTarget.loader.exec_module(moduleTarget)
15
+
16
+ # Get LLVM IR and write to file
17
+ linesLLVM = moduleTarget.__dict__[identifierCallable].inspect_llvm()[()]
18
+ moduleLLVM: llvmlite.binding.ModuleRef = llvmlite.binding.module.parse_assembly(linesLLVM)
19
+ pathFilenameLLVM: pathlib.Path = pathFilename.with_suffix(".ll")
20
+ pathFilenameLLVM.write_text(str(moduleLLVM))
21
+ return pathFilenameLLVM
@@ -0,0 +1,100 @@
1
+ from collections.abc import Callable
2
+ from numba.core.compiler import CompilerBase as numbaCompilerBase
3
+ from typing import Any, TYPE_CHECKING, Final
4
+
5
+ try:
6
+ from typing import NotRequired
7
+ except Exception:
8
+ from typing_extensions import NotRequired
9
+
10
+ if TYPE_CHECKING:
11
+ from typing import TypedDict
12
+ else:
13
+ TypedDict = dict
14
+
15
+ class ParametersNumba(TypedDict):
16
+ _dbg_extend_lifetimes: NotRequired[bool]
17
+ _dbg_optnone: NotRequired[bool]
18
+ _nrt: NotRequired[bool]
19
+ boundscheck: NotRequired[bool]
20
+ cache: bool
21
+ debug: NotRequired[bool]
22
+ error_model: str
23
+ fastmath: bool
24
+ forceinline: bool
25
+ forceobj: NotRequired[bool]
26
+ inline: str
27
+ locals: NotRequired[dict[str, Any]]
28
+ looplift: bool
29
+ no_cfunc_wrapper: bool
30
+ no_cpython_wrapper: bool
31
+ no_rewrites: NotRequired[bool]
32
+ nogil: NotRequired[bool]
33
+ nopython: bool
34
+ parallel: bool
35
+ pipeline_class: NotRequired[type[numbaCompilerBase]]
36
+ signature_or_function: NotRequired[Any | Callable[..., Any] | str | tuple[Any, ...]]
37
+ target: NotRequired[str]
38
+
39
+ parametersNumbaFailEarly: Final[ParametersNumba] = {
40
+ '_nrt': True,
41
+ 'boundscheck': True,
42
+ 'cache': True,
43
+ 'error_model': 'python',
44
+ 'fastmath': False,
45
+ 'forceinline': True,
46
+ 'inline': 'always',
47
+ 'looplift': False,
48
+ 'no_cfunc_wrapper': False,
49
+ 'no_cpython_wrapper': False,
50
+ 'nopython': True,
51
+ 'parallel': False, }
52
+ """For a production function: speed is irrelevant, error discovery is paramount, must be compatible with anything downstream."""
53
+
54
+ parametersNumbaDEFAULT: Final[ParametersNumba] = {
55
+ '_nrt': True,
56
+ 'boundscheck': False,
57
+ 'cache': True,
58
+ 'error_model': 'numpy',
59
+ 'fastmath': True,
60
+ 'forceinline': True,
61
+ 'inline': 'always',
62
+ 'looplift': False,
63
+ 'no_cfunc_wrapper': False,
64
+ 'no_cpython_wrapper': False,
65
+ 'nopython': True,
66
+ 'parallel': False, }
67
+ """Middle of the road: fast, lean, but will talk to non-jitted functions."""
68
+
69
+ parametersNumbaParallelDEFAULT: Final[ParametersNumba] = {
70
+ **parametersNumbaDEFAULT,
71
+ '_nrt': True,
72
+ 'parallel': True, }
73
+ """Middle of the road: fast, lean, but will talk to non-jitted functions."""
74
+
75
+ parametersNumbaSuperJit: Final[ParametersNumba] = {
76
+ **parametersNumbaDEFAULT,
77
+ 'no_cfunc_wrapper': True,
78
+ 'no_cpython_wrapper': True, }
79
+ """Speed, no helmet, no talking to non-jitted functions."""
80
+
81
+ parametersNumbaSuperJitParallel: Final[ParametersNumba] = {
82
+ **parametersNumbaSuperJit,
83
+ '_nrt': True,
84
+ 'parallel': True, }
85
+ """Speed, no helmet, concurrency, no talking to non-jitted functions."""
86
+
87
+ parametersNumbaMinimum: Final[ParametersNumba] = {
88
+ '_nrt': True,
89
+ 'boundscheck': True,
90
+ 'cache': True,
91
+ 'error_model': 'numpy',
92
+ 'fastmath': True,
93
+ 'forceinline': False,
94
+ 'inline': 'always',
95
+ 'looplift': False,
96
+ 'no_cfunc_wrapper': False,
97
+ 'no_cpython_wrapper': False,
98
+ 'nopython': False,
99
+ 'forceobj': True,
100
+ 'parallel': False, }
@@ -0,0 +1,7 @@
1
+ from mapFolding.someAssemblyRequired import IngredientsFunction, Make
2
+ from typing import cast
3
+ from types import ModuleType
4
+ import ast
5
+
6
+ def Z0Z_makeCountingFunction(callableTarget: str, sourceAlgorithm: ModuleType, inline: bool, dataclass: bool):
7
+ pass
@@ -0,0 +1,135 @@
1
+ from collections.abc import Sequence
2
+ from importlib import import_module
3
+ from inspect import getsource as inspect_getsource
4
+ from pathlib import Path
5
+ from types import ModuleType
6
+ from typing import Any, cast, overload, Literal
7
+ import ast
8
+ import pickle
9
+ from mapFolding.beDRY import ComputationState, outfitCountFolds, validateListDimensions
10
+ from mapFolding.filesystem import getPathFilenameFoldsTotal
11
+ from mapFolding.someAssemblyRequired import (
12
+ ast_Identifier,
13
+ executeActionUnlessDescendantMatches,
14
+ extractClassDef,
15
+ ifThis,
16
+ IngredientsFunction,
17
+ LedgerOfImports,
18
+ Make,
19
+ NodeCollector,
20
+ strDotStrCuzPyStoopid,
21
+ Then,
22
+ )
23
+ from mapFolding.theSSOT import getSourceAlgorithm
24
+
25
+ def shatter_dataclassesDOTdataclass(logicalPathModule: strDotStrCuzPyStoopid, dataclass_Identifier: ast_Identifier, instance_Identifier: ast_Identifier
26
+ ) -> tuple[ast.Name, LedgerOfImports, list[ast.AnnAssign], list[ast.Name], list[ast.keyword], ast.Tuple]:
27
+ """
28
+ Parameters:
29
+ logicalPathModule: gimme string cuz python is stoopid
30
+ dataclass_Identifier: The identifier of the dataclass to be dismantled.
31
+ instance_Identifier: In the synthesized module/function/scope, the identifier that will be used for the instance.
32
+ """
33
+ module: ast.Module = ast.parse(inspect_getsource(import_module(logicalPathModule)))
34
+
35
+ dataclass = extractClassDef(dataclass_Identifier, module)
36
+
37
+ if not isinstance(dataclass, ast.ClassDef):
38
+ raise ValueError(f"I could not find {dataclass_Identifier=} in {logicalPathModule=}.")
39
+
40
+ list_astAnnAssign: list[ast.AnnAssign] = []
41
+ listKeywordForDataclassInitialization: list[ast.keyword] = []
42
+ list_astNameDataclassFragments: list[ast.Name] = []
43
+ ledgerDataclassAndFragments = LedgerOfImports()
44
+
45
+ addToLedgerPredicate = ifThis.isAnnAssignAndAnnotationIsName
46
+ addToLedgerAction = Then.Z0Z_ledger(logicalPathModule, ledgerDataclassAndFragments)
47
+ addToLedger = NodeCollector(addToLedgerPredicate, [addToLedgerAction])
48
+
49
+ exclusionPredicate = ifThis.is_keyword_IdentifierEqualsConstantValue('init', False)
50
+ appendKeywordAction = Then.Z0Z_appendKeywordMirroredTo(listKeywordForDataclassInitialization)
51
+ filteredAppendKeywordAction = executeActionUnlessDescendantMatches(exclusionPredicate, appendKeywordAction)
52
+
53
+ collector = NodeCollector(
54
+ ifThis.isAnnAssignAndTargetIsName,
55
+ [Then.Z0Z_appendAnnAssignOfNameDOTnameTo(instance_Identifier, list_astAnnAssign)
56
+ , Then.append_targetTo(list_astNameDataclassFragments)
57
+ , lambda node: addToLedger.visit(node)
58
+ , filteredAppendKeywordAction
59
+ ]
60
+ )
61
+
62
+ collector.visit(dataclass)
63
+
64
+ ledgerDataclassAndFragments.addImportFromStr(logicalPathModule, dataclass_Identifier)
65
+
66
+ astNameDataclass = Make.astName(dataclass_Identifier)
67
+ astTupleForAssignTargetsToFragments: ast.Tuple = Make.astTuple(list_astNameDataclassFragments, ast.Store())
68
+ return astNameDataclass, ledgerDataclassAndFragments, list_astAnnAssign, list_astNameDataclassFragments, listKeywordForDataclassInitialization, astTupleForAssignTargetsToFragments
69
+
70
+ def makeDataclassConverter(dataclassIdentifierAsStr: str,
71
+ logicalPathModuleDataclass: str,
72
+ dataclassInstanceAsStr: str,
73
+ dispatcherCallableAsStr: str,
74
+ logicalPathModuleDispatcher: str,
75
+ dataConverterCallableAsStr: str,
76
+ ) -> IngredientsFunction:
77
+
78
+ astNameDataclass, ledgerDataclassAndFragments, list_astAnnAssign, list_astNameDataclassFragments, list_astKeywordDataclassFragments, astTupleForAssignTargetsToFragments = shatter_dataclassesDOTdataclass(logicalPathModuleDataclass, dataclassIdentifierAsStr, dataclassInstanceAsStr)
79
+
80
+ ingredientsFunction = IngredientsFunction(
81
+ FunctionDef = Make.astFunctionDef(name=dataConverterCallableAsStr
82
+ , argumentsSpecification=Make.astArgumentsSpecification(args=[Make.astArg(dataclassInstanceAsStr, astNameDataclass)])
83
+ , body = cast(list[ast.stmt], list_astAnnAssign)
84
+ , returns = astNameDataclass
85
+ )
86
+ , imports = ledgerDataclassAndFragments
87
+ )
88
+
89
+ callToDispatcher = Make.astAssign(listTargets=[astTupleForAssignTargetsToFragments]
90
+ , value=Make.astCall(Make.astName(dispatcherCallableAsStr), args=list_astNameDataclassFragments))
91
+ ingredientsFunction.FunctionDef.body.append(callToDispatcher)
92
+ ingredientsFunction.imports.addImportFromStr(logicalPathModuleDispatcher, dispatcherCallableAsStr)
93
+
94
+ ingredientsFunction.FunctionDef.body.append(Make.astReturn(Make.astCall(astNameDataclass, list_astKeywords=list_astKeywordDataclassFragments)))
95
+
96
+ return ingredientsFunction
97
+
98
+ @overload
99
+ def makeStateJob(listDimensions: Sequence[int], *, writeJob: Literal[True], **keywordArguments: Any) -> Path: ...
100
+ @overload
101
+ def makeStateJob(listDimensions: Sequence[int], *, writeJob: Literal[False], **keywordArguments: Any) -> ComputationState: ...
102
+ def makeStateJob(listDimensions: Sequence[int], *, writeJob: bool = True, **keywordArguments: Any) -> ComputationState | Path:
103
+ """
104
+ Creates a computation state job for map folding calculations and optionally saves it to disk.
105
+
106
+ This function initializes a computation state for map folding calculations based on the given dimensions,
107
+ sets up the initial counting configuration, and can optionally save the state to a pickle file.
108
+
109
+ Parameters:
110
+ listDimensions: List of integers representing the dimensions of the map to be folded.
111
+ writeJob (True): Whether to save the state to disk.
112
+ **keywordArguments: Additional keyword arguments to pass to the computation state initialization.
113
+
114
+ Returns:
115
+ stateUniversal|pathFilenameJob: The computation state for the map folding calculations, or
116
+ the path to the saved state file if writeJob is True.
117
+ """
118
+ mapShape = validateListDimensions(listDimensions)
119
+ stateUniversal = outfitCountFolds(mapShape, **keywordArguments)
120
+
121
+ moduleSource: ModuleType = getSourceAlgorithm()
122
+ # TODO `countInitialize` is hardcoded
123
+ stateUniversal: ComputationState = moduleSource.countInitialize(stateUniversal)
124
+
125
+ if not writeJob:
126
+ return stateUniversal
127
+
128
+ pathFilenameChopChop = getPathFilenameFoldsTotal(stateUniversal.mapShape, None)
129
+ suffix = pathFilenameChopChop.suffix
130
+ pathJob = Path(str(pathFilenameChopChop)[0:-len(suffix)])
131
+ pathJob.mkdir(parents=True, exist_ok=True)
132
+ pathFilenameJob = pathJob / 'stateJob.pkl'
133
+
134
+ pathFilenameJob.write_bytes(pickle.dumps(stateUniversal))
135
+ return pathFilenameJob
@@ -0,0 +1,91 @@
1
+ from collections.abc import Sequence
2
+ from mapFolding.someAssemblyRequired.whatWillBe import LedgerOfImports, ParametersNumba, Z0Z_getDatatypeModuleScalar, parametersNumbaDEFAULT
3
+ from mapFolding.someAssemblyRequired.whatWillBe import Z0Z_getDecoratorCallable
4
+ from mapFolding.someAssemblyRequired import Make, ifThis
5
+ from typing import Any, cast
6
+ import ast
7
+
8
+ def thisIsNumbaDotJit(Ima: ast.AST) -> bool:
9
+ return ifThis.isCallNamespace_Identifier(Z0Z_getDatatypeModuleScalar(), Z0Z_getDecoratorCallable())(Ima)
10
+
11
+ def thisIsJit(Ima: ast.AST) -> bool:
12
+ return ifThis.isCall_Identifier(Z0Z_getDecoratorCallable())(Ima)
13
+
14
+ def thisIsAnyNumbaJitDecorator(Ima: ast.AST) -> bool:
15
+ return thisIsNumbaDotJit(Ima) or thisIsJit(Ima)
16
+
17
+ def decorateCallableWithNumba(FunctionDefTarget: ast.FunctionDef, allImports: LedgerOfImports, parametersNumba: ParametersNumba | None = None) -> tuple[ast.FunctionDef, LedgerOfImports]:
18
+ def Z0Z_UnhandledDecorators(astCallable: ast.FunctionDef) -> ast.FunctionDef:
19
+ # TODO: more explicit handling of decorators. I'm able to ignore this because I know `algorithmSource` doesn't have any decorators.
20
+ for decoratorItem in astCallable.decorator_list.copy():
21
+ import warnings
22
+ astCallable.decorator_list.remove(decoratorItem)
23
+ warnings.warn(f"Removed decorator {ast.unparse(decoratorItem)} from {astCallable.name}")
24
+ return astCallable
25
+
26
+ def make_arg4parameter(signatureElement: ast.arg) -> ast.Subscript | ast.Name | None:
27
+ if isinstance(signatureElement.annotation, ast.Subscript) and isinstance(signatureElement.annotation.slice, ast.Tuple):
28
+ annotationShape: ast.expr = signatureElement.annotation.slice.elts[0]
29
+ if isinstance(annotationShape, ast.Subscript) and isinstance(annotationShape.slice, ast.Tuple):
30
+ shapeAsListSlices: list[ast.Slice] = [ast.Slice() for _axis in range(len(annotationShape.slice.elts))]
31
+ shapeAsListSlices[-1] = ast.Slice(step=ast.Constant(value=1))
32
+ shapeAST: ast.Slice | ast.Tuple = ast.Tuple(elts=list(shapeAsListSlices), ctx=ast.Load())
33
+ else:
34
+ shapeAST = ast.Slice(step=ast.Constant(value=1))
35
+
36
+ annotationDtype: ast.expr = signatureElement.annotation.slice.elts[1]
37
+ if (isinstance(annotationDtype, ast.Subscript) and isinstance(annotationDtype.slice, ast.Attribute)):
38
+ datatypeAST = annotationDtype.slice.attr
39
+ else:
40
+ datatypeAST = None
41
+
42
+ ndarrayName = signatureElement.arg
43
+ Z0Z_hacky_dtype: str = ndarrayName
44
+ datatype_attr = datatypeAST or Z0Z_hacky_dtype
45
+ allImports.addImportFromStr(datatypeModuleDecorator, datatype_attr)
46
+ datatypeNumba = ast.Name(id=datatype_attr, ctx=ast.Load())
47
+
48
+ return ast.Subscript(value=datatypeNumba, slice=shapeAST, ctx=ast.Load())
49
+
50
+ elif isinstance(signatureElement.annotation, ast.Name):
51
+ return signatureElement.annotation
52
+ return None
53
+
54
+ datatypeModuleDecorator: str = Z0Z_getDatatypeModuleScalar()
55
+ list_argsDecorator: Sequence[ast.expr] = []
56
+
57
+ list_arg4signature_or_function: list[ast.expr] = []
58
+ for parameter in FunctionDefTarget.args.args:
59
+ signatureElement: ast.Subscript | ast.Name | None = make_arg4parameter(parameter)
60
+ if signatureElement:
61
+ list_arg4signature_or_function.append(signatureElement)
62
+
63
+ if FunctionDefTarget.returns and isinstance(FunctionDefTarget.returns, ast.Name):
64
+ theReturn: ast.Name = FunctionDefTarget.returns
65
+ list_argsDecorator = [cast(ast.expr, ast.Call(func=ast.Name(id=theReturn.id, ctx=ast.Load())
66
+ , args=list_arg4signature_or_function if list_arg4signature_or_function else [], keywords=[] ) )]
67
+ elif list_arg4signature_or_function:
68
+ list_argsDecorator = [cast(ast.expr, ast.Tuple(elts=list_arg4signature_or_function, ctx=ast.Load()))]
69
+
70
+ for decorator in FunctionDefTarget.decorator_list.copy():
71
+ if thisIsAnyNumbaJitDecorator(decorator):
72
+ decorator = cast(ast.Call, decorator)
73
+ if parametersNumba is None:
74
+ parametersNumbaSherpa: dict[str, Any] = Make.copy_astCallKeywords(decorator)
75
+ if (_HunterIsSureThereAreBetterWaysToDoThis := True):
76
+ if parametersNumbaSherpa:
77
+ parametersNumba = cast(ParametersNumba, parametersNumbaSherpa)
78
+ FunctionDefTarget.decorator_list.remove(decorator)
79
+
80
+ FunctionDefTarget = Z0Z_UnhandledDecorators(FunctionDefTarget)
81
+ if parametersNumba is None:
82
+ parametersNumba = parametersNumbaDEFAULT
83
+ listDecoratorKeywords: list[ast.keyword] = [ast.keyword(arg=parameterName, value=ast.Constant(value=parameterValue)) for parameterName, parameterValue in parametersNumba.items()]
84
+
85
+ decoratorModule: str = Z0Z_getDatatypeModuleScalar()
86
+ decoratorCallable: str = Z0Z_getDecoratorCallable()
87
+ allImports.addImportFromStr(decoratorModule, decoratorCallable)
88
+ astDecorator: ast.Call = Make.astCall(Make.astName(decoratorCallable), list_argsDecorator, listDecoratorKeywords)
89
+
90
+ FunctionDefTarget.decorator_list = [astDecorator]
91
+ return FunctionDefTarget, allImports