mapFolding 0.3.2__tar.gz → 0.3.4__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (38) hide show
  1. {mapfolding-0.3.2 → mapfolding-0.3.4}/PKG-INFO +4 -2
  2. {mapfolding-0.3.2 → mapfolding-0.3.4}/README.md +1 -1
  3. mapfolding-0.3.4/mapFolding/citations/updateCitation.py +67 -0
  4. {mapfolding-0.3.2 → mapfolding-0.3.4}/mapFolding/mapFolding.egg-info/PKG-INFO +4 -2
  5. {mapfolding-0.3.2 → mapfolding-0.3.4}/mapFolding/mapFolding.egg-info/SOURCES.txt +8 -4
  6. {mapfolding-0.3.2 → mapfolding-0.3.4}/mapFolding/mapFolding.egg-info/requires.txt +2 -0
  7. {mapfolding-0.3.2 → mapfolding-0.3.4}/mapFolding/mapFolding.egg-info/top_level.txt +1 -0
  8. mapfolding-0.3.4/mapFolding/someAssemblyRequired/__init__.py +2 -0
  9. mapfolding-0.3.4/mapFolding/someAssemblyRequired/generalizeSourceCode.py +122 -0
  10. mapfolding-0.3.4/mapFolding/someAssemblyRequired/makeJob.py +21 -0
  11. mapfolding-0.3.2/mapFolding/someAssemblyRequired/synthesizeJob.py → mapfolding-0.3.4/mapFolding/someAssemblyRequired/synthesizeModuleJob.py +35 -35
  12. mapfolding-0.3.4/mapFolding/someAssemblyRequired/synthesizeModules.py +170 -0
  13. mapfolding-0.3.4/mapFolding/syntheticModules/countInitialize.py +47 -0
  14. mapfolding-0.3.4/mapFolding/syntheticModules/countParallel.py +54 -0
  15. {mapfolding-0.3.2/mapFolding/someAssemblyRequired → mapfolding-0.3.4/mapFolding/syntheticModules}/countSequential.py +6 -4
  16. {mapfolding-0.3.2 → mapfolding-0.3.4}/pyproject.toml +7 -8
  17. {mapfolding-0.3.2 → mapfolding-0.3.4}/tests/test_oeis.py +29 -23
  18. {mapfolding-0.3.2 → mapfolding-0.3.4}/tests/test_other.py +24 -20
  19. mapfolding-0.3.4/tests/test_tasks.py +44 -0
  20. mapfolding-0.3.2/mapFolding/someAssemblyRequired/countInitialize.py +0 -45
  21. mapfolding-0.3.2/mapFolding/someAssemblyRequired/countParallel.py +0 -52
  22. mapfolding-0.3.2/mapFolding/someAssemblyRequired/synthesizeModules.py +0 -187
  23. mapfolding-0.3.2/tests/test_tasks.py +0 -30
  24. {mapfolding-0.3.2 → mapfolding-0.3.4}/mapFolding/benchmarks/benchmarking.py +0 -0
  25. {mapfolding-0.3.2 → mapfolding-0.3.4}/mapFolding/mapFolding.egg-info/dependency_links.txt +0 -0
  26. {mapfolding-0.3.2 → mapfolding-0.3.4}/mapFolding/mapFolding.egg-info/entry_points.txt +0 -0
  27. {mapfolding-0.3.2 → mapfolding-0.3.4}/mapFolding/reference/flattened.py +0 -0
  28. {mapfolding-0.3.2 → mapfolding-0.3.4}/mapFolding/reference/hunterNumba.py +0 -0
  29. {mapfolding-0.3.2 → mapfolding-0.3.4}/mapFolding/reference/irvineJavaPort.py +0 -0
  30. {mapfolding-0.3.2 → mapfolding-0.3.4}/mapFolding/reference/jax.py +0 -0
  31. {mapfolding-0.3.2 → mapfolding-0.3.4}/mapFolding/reference/lunnan.py +0 -0
  32. {mapfolding-0.3.2 → mapfolding-0.3.4}/mapFolding/reference/lunnanNumpy.py +0 -0
  33. {mapfolding-0.3.2 → mapfolding-0.3.4}/mapFolding/reference/lunnanWhile.py +0 -0
  34. {mapfolding-0.3.2 → mapfolding-0.3.4}/mapFolding/reference/rotatedEntryPoint.py +0 -0
  35. {mapfolding-0.3.2 → mapfolding-0.3.4}/mapFolding/reference/total_countPlus1vsPlusN.py +0 -0
  36. {mapfolding-0.3.2 → mapfolding-0.3.4}/mapFolding/someAssemblyRequired/getLLVMforNoReason.py +0 -0
  37. {mapfolding-0.3.2/mapFolding/someAssemblyRequired → mapfolding-0.3.4/mapFolding/syntheticModules}/__init__.py +0 -0
  38. {mapfolding-0.3.2 → mapfolding-0.3.4}/setup.cfg +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: mapFolding
3
- Version: 0.3.2
3
+ Version: 0.3.4
4
4
  Summary: Count distinct ways to fold a map (or a strip of stamps)
5
5
  Author-email: Hunter Hogan <HunterHogan@pm.me>
6
6
  License: CC-BY-NC-4.0
@@ -29,12 +29,14 @@ Requires-Dist: jupyter; extra == "benchmark"
29
29
  Requires-Dist: pandas; extra == "benchmark"
30
30
  Requires-Dist: tqdm; extra == "benchmark"
31
31
  Provides-Extra: testing
32
+ Requires-Dist: cffconvert; extra == "testing"
32
33
  Requires-Dist: more_itertools; extra == "testing"
33
34
  Requires-Dist: pytest; extra == "testing"
34
35
  Requires-Dist: pytest-cov; extra == "testing"
35
36
  Requires-Dist: pytest-env; extra == "testing"
36
37
  Requires-Dist: pytest-xdist; extra == "testing"
37
38
  Requires-Dist: python_minifier; extra == "testing"
39
+ Requires-Dist: tomli; extra == "testing"
38
40
 
39
41
  # Algorithm(s) for counting distinct ways to fold a map (or a strip of stamps)
40
42
 
@@ -52,7 +54,7 @@ The directory [mapFolding/reference](https://github.com/hunterhogan/mapFolding/b
52
54
  - [hunterNumba.py](https://github.com/hunterhogan/mapFolding/blob/main/mapFolding/reference), a one-size-fits-all, self-contained, reasonably fast, contemporary algorithm that is nevertheless infected by _noobaceae ignorancium_, and
53
55
  - miscellaneous notes.
54
56
 
55
- [![pip install mapFolding](https://img.shields.io/badge/pip%20install-mapFolding-gray.svg?colorB=3b434b)](https://pypi.org/project/mapFolding/) [![Python Tests](https://github.com/hunterhogan/mapFolding/actions/workflows/unittests.yml/badge.svg)](https://github.com/hunterhogan/mapFolding/actions/workflows/unittests.yml) [![Static Badge](https://img.shields.io/badge/stinkin'%20badges-don't%20need-b98e5e)](https://youtu.be/g6f_miE91mk&t=4) ![PyPI - Downloads](https://img.shields.io/pypi/dd/mapFolding) ![Static Badge](https://img.shields.io/badge/issues-I%20have%20them-brightgreen) ![GitHub repo size](https://img.shields.io/github/repo-size/hunterhogan/mapFolding)
57
+ [![pip install mapFolding](https://img.shields.io/badge/pip%20install-mapFolding-gray.svg?colorB=3b434b)](https://pypi.org/project/mapFolding/) [![Python Tests](https://github.com/hunterhogan/mapFolding/actions/workflows/pythonTests.yml/badge.svg)](https://github.com/hunterhogan/mapFolding/actions/workflows/pythonTests.yml) [![Static Badge](https://img.shields.io/badge/stinkin'%20badges-don't%20need-b98e5e)](https://youtu.be/g6f_miE91mk&t=4) ![PyPI - Downloads](https://img.shields.io/pypi/dd/mapFolding) ![Static Badge](https://img.shields.io/badge/issues-I%20have%20them-brightgreen) ![GitHub repo size](https://img.shields.io/github/repo-size/hunterhogan/mapFolding)
56
58
 
57
59
  ## Simple, easy usage based on OEIS IDs
58
60
 
@@ -14,7 +14,7 @@ The directory [mapFolding/reference](https://github.com/hunterhogan/mapFolding/b
14
14
  - [hunterNumba.py](https://github.com/hunterhogan/mapFolding/blob/main/mapFolding/reference), a one-size-fits-all, self-contained, reasonably fast, contemporary algorithm that is nevertheless infected by _noobaceae ignorancium_, and
15
15
  - miscellaneous notes.
16
16
 
17
- [![pip install mapFolding](https://img.shields.io/badge/pip%20install-mapFolding-gray.svg?colorB=3b434b)](https://pypi.org/project/mapFolding/) [![Python Tests](https://github.com/hunterhogan/mapFolding/actions/workflows/unittests.yml/badge.svg)](https://github.com/hunterhogan/mapFolding/actions/workflows/unittests.yml) [![Static Badge](https://img.shields.io/badge/stinkin'%20badges-don't%20need-b98e5e)](https://youtu.be/g6f_miE91mk&t=4) ![PyPI - Downloads](https://img.shields.io/pypi/dd/mapFolding) ![Static Badge](https://img.shields.io/badge/issues-I%20have%20them-brightgreen) ![GitHub repo size](https://img.shields.io/github/repo-size/hunterhogan/mapFolding)
17
+ [![pip install mapFolding](https://img.shields.io/badge/pip%20install-mapFolding-gray.svg?colorB=3b434b)](https://pypi.org/project/mapFolding/) [![Python Tests](https://github.com/hunterhogan/mapFolding/actions/workflows/pythonTests.yml/badge.svg)](https://github.com/hunterhogan/mapFolding/actions/workflows/pythonTests.yml) [![Static Badge](https://img.shields.io/badge/stinkin'%20badges-don't%20need-b98e5e)](https://youtu.be/g6f_miE91mk&t=4) ![PyPI - Downloads](https://img.shields.io/pypi/dd/mapFolding) ![Static Badge](https://img.shields.io/badge/issues-I%20have%20them-brightgreen) ![GitHub repo size](https://img.shields.io/github/repo-size/hunterhogan/mapFolding)
18
18
 
19
19
  ## Simple, easy usage based on OEIS IDs
20
20
 
@@ -0,0 +1,67 @@
1
+ from cffconvert.cli.create_citation import create_citation
2
+ from typing import Any, Dict
3
+ import cffconvert
4
+ import pathlib
5
+ import tomli
6
+ import inspect
7
+ import json
8
+
9
+ """
10
+ Tentative plan:
11
+ - Commit and push to GitHub
12
+ - GitHub Action gathers information from the sources of truth
13
+ - If the citation needs to be updated, write to both
14
+ - pathFilenameCitationSSOT
15
+ - pathFilenameCitationDOTcffRepo
16
+ - Commit and push to GitHub
17
+ - this complicates things
18
+ - I want the updated citation to be in the `commit` field of itself
19
+ """
20
+
21
+ """cffconvert.Citation fields and the source of truth
22
+ abstract: pathFilenameCitationSSOT
23
+ authors: pathFilenamePackageSSOT
24
+ cff-version: pathFilenameCitationSSOT
25
+ commit: workflows['Make GitHub Release']
26
+ contact: pathFilenamePackageSSOT
27
+ date-released: workflows['Make GitHub Release']
28
+ doi: pathFilenameCitationSSOT
29
+ identifiers: workflows['Make GitHub Release']
30
+ keywords: pathFilenamePackageSSOT
31
+ license: pathFilenamePackageSSOT
32
+ license-url: pathFilenamePackageSSOT
33
+ message: pathFilenameCitationSSOT
34
+ preferred-citation: pathFilenameCitationSSOT
35
+ references: to be determined
36
+ repository: pathFilenamePackageSSOT
37
+ repository-artifact: (https://pypi.org/pypi/{package_name}/json').json()['releases']
38
+ repository-code: workflows['Make GitHub Release']
39
+ title: pathFilenamePackageSSOT
40
+ type: pathFilenameCitationSSOT
41
+ url: pathFilenamePackageSSOT
42
+ version: pathFilenamePackageSSOT
43
+ """
44
+ # Prefer reliable, dynamic values over hardcoded ones
45
+ packageName: str = 'mapFolding'
46
+ pathRepoRoot = pathlib.Path(__file__).parent.parent.parent
47
+ pathFilenamePackageSSOT = pathRepoRoot / 'pyproject.toml'
48
+
49
+ filenameGitHubAction = 'updateCitation.yml'
50
+ pathFilenameGitHubAction = pathRepoRoot / '.github' / 'workflows' / filenameGitHubAction
51
+
52
+ filenameCitationDOTcff = 'CITATION.cff'
53
+ pathCitations = pathRepoRoot / packageName / 'citations'
54
+ pathFilenameCitationSSOT = pathCitations / filenameCitationDOTcff
55
+ pathFilenameCitationDOTcffRepo = pathRepoRoot / filenameCitationDOTcff
56
+
57
+ tomlPackageData: Dict[str, Any] = tomli.loads(pathFilenamePackageSSOT.read_text())['project']
58
+
59
+ citationObject: cffconvert.Citation = create_citation(infile=pathFilenameCitationSSOT, url=None)
60
+
61
+ path_cffconvert = pathlib.Path(inspect.getfile(cffconvert)).parent
62
+ pathFilenameSchema = path_cffconvert / "schemas/1.2.0/schema.json"
63
+ scheme: Dict[str, Any] = json.loads(pathFilenameSchema.read_text())
64
+ schemaSpecifications: Dict[str, Any] = scheme['properties']
65
+
66
+ for property, subProperties in schemaSpecifications.items():
67
+ print(property, subProperties.get('items', None))
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: mapFolding
3
- Version: 0.3.2
3
+ Version: 0.3.4
4
4
  Summary: Count distinct ways to fold a map (or a strip of stamps)
5
5
  Author-email: Hunter Hogan <HunterHogan@pm.me>
6
6
  License: CC-BY-NC-4.0
@@ -29,12 +29,14 @@ Requires-Dist: jupyter; extra == "benchmark"
29
29
  Requires-Dist: pandas; extra == "benchmark"
30
30
  Requires-Dist: tqdm; extra == "benchmark"
31
31
  Provides-Extra: testing
32
+ Requires-Dist: cffconvert; extra == "testing"
32
33
  Requires-Dist: more_itertools; extra == "testing"
33
34
  Requires-Dist: pytest; extra == "testing"
34
35
  Requires-Dist: pytest-cov; extra == "testing"
35
36
  Requires-Dist: pytest-env; extra == "testing"
36
37
  Requires-Dist: pytest-xdist; extra == "testing"
37
38
  Requires-Dist: python_minifier; extra == "testing"
39
+ Requires-Dist: tomli; extra == "testing"
38
40
 
39
41
  # Algorithm(s) for counting distinct ways to fold a map (or a strip of stamps)
40
42
 
@@ -52,7 +54,7 @@ The directory [mapFolding/reference](https://github.com/hunterhogan/mapFolding/b
52
54
  - [hunterNumba.py](https://github.com/hunterhogan/mapFolding/blob/main/mapFolding/reference), a one-size-fits-all, self-contained, reasonably fast, contemporary algorithm that is nevertheless infected by _noobaceae ignorancium_, and
53
55
  - miscellaneous notes.
54
56
 
55
- [![pip install mapFolding](https://img.shields.io/badge/pip%20install-mapFolding-gray.svg?colorB=3b434b)](https://pypi.org/project/mapFolding/) [![Python Tests](https://github.com/hunterhogan/mapFolding/actions/workflows/unittests.yml/badge.svg)](https://github.com/hunterhogan/mapFolding/actions/workflows/unittests.yml) [![Static Badge](https://img.shields.io/badge/stinkin'%20badges-don't%20need-b98e5e)](https://youtu.be/g6f_miE91mk&t=4) ![PyPI - Downloads](https://img.shields.io/pypi/dd/mapFolding) ![Static Badge](https://img.shields.io/badge/issues-I%20have%20them-brightgreen) ![GitHub repo size](https://img.shields.io/github/repo-size/hunterhogan/mapFolding)
57
+ [![pip install mapFolding](https://img.shields.io/badge/pip%20install-mapFolding-gray.svg?colorB=3b434b)](https://pypi.org/project/mapFolding/) [![Python Tests](https://github.com/hunterhogan/mapFolding/actions/workflows/pythonTests.yml/badge.svg)](https://github.com/hunterhogan/mapFolding/actions/workflows/pythonTests.yml) [![Static Badge](https://img.shields.io/badge/stinkin'%20badges-don't%20need-b98e5e)](https://youtu.be/g6f_miE91mk&t=4) ![PyPI - Downloads](https://img.shields.io/pypi/dd/mapFolding) ![Static Badge](https://img.shields.io/badge/issues-I%20have%20them-brightgreen) ![GitHub repo size](https://img.shields.io/github/repo-size/hunterhogan/mapFolding)
56
58
 
57
59
  ## Simple, easy usage based on OEIS IDs
58
60
 
@@ -1,6 +1,7 @@
1
1
  README.md
2
2
  pyproject.toml
3
3
  mapFolding/benchmarks/benchmarking.py
4
+ mapFolding/citations/updateCitation.py
4
5
  mapFolding/mapFolding.egg-info/PKG-INFO
5
6
  mapFolding/mapFolding.egg-info/SOURCES.txt
6
7
  mapFolding/mapFolding.egg-info/dependency_links.txt
@@ -17,12 +18,15 @@ mapFolding/reference/lunnanWhile.py
17
18
  mapFolding/reference/rotatedEntryPoint.py
18
19
  mapFolding/reference/total_countPlus1vsPlusN.py
19
20
  mapFolding/someAssemblyRequired/__init__.py
20
- mapFolding/someAssemblyRequired/countInitialize.py
21
- mapFolding/someAssemblyRequired/countParallel.py
22
- mapFolding/someAssemblyRequired/countSequential.py
21
+ mapFolding/someAssemblyRequired/generalizeSourceCode.py
23
22
  mapFolding/someAssemblyRequired/getLLVMforNoReason.py
24
- mapFolding/someAssemblyRequired/synthesizeJob.py
23
+ mapFolding/someAssemblyRequired/makeJob.py
24
+ mapFolding/someAssemblyRequired/synthesizeModuleJob.py
25
25
  mapFolding/someAssemblyRequired/synthesizeModules.py
26
+ mapFolding/syntheticModules/__init__.py
27
+ mapFolding/syntheticModules/countInitialize.py
28
+ mapFolding/syntheticModules/countParallel.py
29
+ mapFolding/syntheticModules/countSequential.py
26
30
  tests/test_oeis.py
27
31
  tests/test_other.py
28
32
  tests/test_tasks.py
@@ -9,9 +9,11 @@ pandas
9
9
  tqdm
10
10
 
11
11
  [testing]
12
+ cffconvert
12
13
  more_itertools
13
14
  pytest
14
15
  pytest-cov
15
16
  pytest-env
16
17
  pytest-xdist
17
18
  python_minifier
19
+ tomli
@@ -2,3 +2,4 @@ benchmarks
2
2
  citations
3
3
  reference
4
4
  someAssemblyRequired
5
+ syntheticModules
@@ -0,0 +1,2 @@
1
+ from .makeJob import makeStateJob
2
+ # from .generalizeSourceCode import makeInlineFunction
@@ -0,0 +1,122 @@
1
+ from mapFolding import datatypeLargeDEFAULT, datatypeMediumDEFAULT, datatypeSmallDEFAULT
2
+ from typing import Dict, Optional, List, Set, Union
3
+ import ast
4
+
5
+ class RecursiveInlinerWithEnum(ast.NodeTransformer):
6
+ """Process AST nodes to inline functions and substitute enum values.
7
+ Also handles function decorators during inlining."""
8
+
9
+ def __init__(self, dictionaryFunctions: Dict[str, ast.FunctionDef], dictionaryEnumValues: Dict[str, int]) -> None:
10
+ self.dictionaryFunctions = dictionaryFunctions
11
+ self.dictionaryEnumValues = dictionaryEnumValues
12
+ self.processed = set()
13
+
14
+ def inlineFunctionBody(self, functionName: str) -> Optional[ast.FunctionDef]:
15
+ if functionName in self.processed:
16
+ return None
17
+
18
+ self.processed.add(functionName)
19
+ inlineDefinition = self.dictionaryFunctions[functionName]
20
+ # Recursively process the function body
21
+ for node in ast.walk(inlineDefinition):
22
+ self.visit(node)
23
+ return inlineDefinition
24
+
25
+ def visit_Attribute(self, node: ast.Attribute) -> ast.AST:
26
+ # Substitute enum identifiers (e.g., indexMy.leaf1ndex.value)
27
+ if isinstance(node.value, ast.Attribute) and isinstance(node.value.value, ast.Name):
28
+ enumPath = f"{node.value.value.id}.{node.value.attr}.{node.attr}"
29
+ if enumPath in self.dictionaryEnumValues:
30
+ return ast.Constant(value=self.dictionaryEnumValues[enumPath])
31
+ return self.generic_visit(node)
32
+
33
+ def visit_Call(self, node: ast.Call) -> ast.AST:
34
+ callNode = self.generic_visit(node)
35
+ if isinstance(callNode, ast.Call) and isinstance(callNode.func, ast.Name) and callNode.func.id in self.dictionaryFunctions:
36
+ inlineDefinition = self.inlineFunctionBody(callNode.func.id)
37
+ if (inlineDefinition and inlineDefinition.body):
38
+ lastStmt = inlineDefinition.body[-1]
39
+ if isinstance(lastStmt, ast.Return) and lastStmt.value is not None:
40
+ return self.visit(lastStmt.value)
41
+ elif isinstance(lastStmt, ast.Expr) and lastStmt.value is not None:
42
+ return self.visit(lastStmt.value)
43
+ return ast.Constant(value=None)
44
+ return callNode
45
+
46
+ def visit_Expr(self, node: ast.Expr) -> Union[ast.AST, List[ast.AST]]:
47
+ if isinstance(node.value, ast.Call):
48
+ if isinstance(node.value.func, ast.Name) and node.value.func.id in self.dictionaryFunctions:
49
+ inlineDefinition = self.inlineFunctionBody(node.value.func.id)
50
+ if inlineDefinition:
51
+ return [self.visit(stmt) for stmt in inlineDefinition.body]
52
+ return self.generic_visit(node)
53
+
54
+ def findRequiredImports(node: ast.AST) -> Set[str]:
55
+ """Find all modules that need to be imported based on AST analysis.
56
+ NOTE: due to hardcoding, this is a glorified regex. No, wait, this is less versatile than regex."""
57
+ requiredImports = set()
58
+
59
+ class ImportFinder(ast.NodeVisitor):
60
+ def visit_Name(self, node: ast.Name) -> None:
61
+ if node.id in {'numba'}:
62
+ requiredImports.add(node.id)
63
+ self.generic_visit(node)
64
+
65
+ def visitDecorator(self, node: ast.AST) -> None:
66
+ if isinstance(node, ast.Call) and isinstance(node.func, ast.Name):
67
+ if node.func.id == 'jit':
68
+ requiredImports.add('numba')
69
+ self.generic_visit(node)
70
+
71
+ ImportFinder().visit(node)
72
+ return requiredImports
73
+
74
+ def generateImports(requiredImports: Set[str]) -> str:
75
+ """Generate import statements based on required modules."""
76
+ importStatements = {'import numba', 'from mapFolding import indexMy, indexTrack'}
77
+
78
+ importMapping = {
79
+ 'numba': 'import numba',
80
+ }
81
+
82
+ for moduleName in sorted(requiredImports):
83
+ if moduleName in importMapping:
84
+ importStatements.add(importMapping[moduleName])
85
+
86
+ return '\n'.join(importStatements)
87
+
88
+ def makeInlineFunction(sourceCode: str, targetFunctionName: str, dictionaryEnumValues: Dict[str, int], skipEnum: bool=False, **keywordArguments: Optional[str]):
89
+ datatypeLarge = keywordArguments.get('datatypeLarge', datatypeLargeDEFAULT)
90
+ datatypeMedium = keywordArguments.get('datatypeMedium', datatypeMediumDEFAULT)
91
+ datatypeSmall = keywordArguments.get('datatypeSmall', datatypeSmallDEFAULT)
92
+ if skipEnum:
93
+ dictionaryEnumValues = {}
94
+ dictionaryParsed = ast.parse(sourceCode)
95
+ dictionaryFunctions = {
96
+ element.name: element
97
+ for element in dictionaryParsed.body
98
+ if isinstance(element, ast.FunctionDef)
99
+ }
100
+ nodeTarget = dictionaryFunctions[targetFunctionName]
101
+ nodeInliner = RecursiveInlinerWithEnum(dictionaryFunctions, dictionaryEnumValues)
102
+ nodeInlined = nodeInliner.visit(nodeTarget)
103
+ ast.fix_missing_locations(nodeInlined)
104
+ callableInlinedDecorators = [decorator for decorator in nodeInlined.decorator_list]
105
+
106
+ requiredImports = findRequiredImports(nodeInlined)
107
+ importStatements = generateImports(requiredImports)
108
+ importsRequired = importStatements
109
+ dictionaryDecoratorsNumba={
110
+ 'countInitialize':
111
+ f'@numba.jit((numba.{datatypeSmall}[:,:,::1], numba.{datatypeMedium}[::1], numba.{datatypeSmall}[::1], numba.{datatypeMedium}[:,::1]), parallel=False, boundscheck=False, cache=True, error_model="numpy", fastmath=True, looplift=False, nogil=True, nopython=True)\n',
112
+ 'countParallel':
113
+ f'@numba.jit((numba.{datatypeSmall}[:,:,::1], numba.{datatypeLarge}[::1], numba.{datatypeMedium}[::1], numba.{datatypeSmall}[::1], numba.{datatypeMedium}[:,::1]), parallel=True, boundscheck=False, cache=True, error_model="numpy", fastmath=True, looplift=False, nogil=True, nopython=True)\n',
114
+ 'countSequential':
115
+ f'@numba.jit((numba.{datatypeSmall}[:,:,::1], numba.{datatypeLarge}[::1], numba.{datatypeMedium}[::1], numba.{datatypeSmall}[::1], numba.{datatypeMedium}[:,::1]), parallel=False, boundscheck=False, cache=True, error_model="numpy", fastmath=True, looplift=False, nogil=True, nopython=True)\n',
116
+ }
117
+
118
+ lineNumbaDecorator = dictionaryDecoratorsNumba[targetFunctionName]
119
+
120
+ # inlinedCode = ast.unparse(ast.Module(body=[nodeInlined], type_ignores=[]))
121
+ callableInlined = lineNumbaDecorator + ast.unparse(nodeInlined)
122
+ return (callableInlined, callableInlinedDecorators, importsRequired)
@@ -0,0 +1,21 @@
1
+ from mapFolding import getPathFilenameFoldsTotal
2
+ from mapFolding import outfitCountFolds
3
+ from typing import Any, Optional, Sequence, Type
4
+ import pathlib
5
+ import pickle
6
+
7
+ def makeStateJob(listDimensions: Sequence[int], **keywordArguments: Optional[Type[Any]]) -> pathlib.Path:
8
+
9
+ stateUniversal = outfitCountFolds(listDimensions, computationDivisions=None, CPUlimit=None, **keywordArguments)
10
+
11
+ from syntheticModules import countInitialize
12
+ countInitialize(stateUniversal['connectionGraph'], stateUniversal['gapsWhere'], stateUniversal['my'], stateUniversal['track'])
13
+
14
+ pathFilenameChopChop = getPathFilenameFoldsTotal(stateUniversal['mapShape'])
15
+ suffix = pathFilenameChopChop.suffix
16
+ pathJob = pathlib.Path(str(pathFilenameChopChop)[0:-len(suffix)])
17
+ pathJob.mkdir(parents=True, exist_ok=True)
18
+ pathFilenameJob = pathJob / 'stateJob.pkl'
19
+
20
+ pathFilenameJob.write_bytes(pickle.dumps(stateUniversal))
21
+ return pathFilenameJob
@@ -1,10 +1,12 @@
1
- from mapFolding import getPathFilenameFoldsTotal, dtypeNumpyDefaults, thisSeemsVeryComplicated
2
- from mapFolding import make_dtype, datatypeLarge, dtypeLarge, datatypeMedium, dtypeMedium, datatypeSmall, dtypeSmall
3
- from mapFolding import outfitCountFolds, computationState, indexMy, indexTrack
4
- from someAssemblyRequired import countInitialize, countSequential
5
- from typing import Any, Optional, Sequence, Type
1
+ from mapFolding import getPathFilenameFoldsTotal
2
+ from mapFolding import make_dtype, datatypeLargeDEFAULT, datatypeMediumDEFAULT, datatypeSmallDEFAULT, datatypeModuleDEFAULT
3
+ from mapFolding import computationState
4
+ from someAssemblyRequired import makeStateJob
5
+ from typing import Optional
6
6
  import more_itertools
7
7
  import inspect
8
+ import importlib
9
+ import importlib.util
8
10
  import numpy
9
11
  import pathlib
10
12
  import pickle
@@ -12,20 +14,7 @@ import python_minifier
12
14
 
13
15
  identifierCallableLaunch = "goGoGadgetAbsurdity"
14
16
 
15
- def makeStateJob(listDimensions: Sequence[int], **keywordArguments: Optional[Type[Any]]):
16
- stateUniversal = outfitCountFolds(listDimensions, computationDivisions=None, CPUlimit=None, **keywordArguments)
17
- countInitialize(stateUniversal['connectionGraph'], stateUniversal['gapsWhere'], stateUniversal['my'], stateUniversal['track'])
18
-
19
- pathFilenameChopChop = getPathFilenameFoldsTotal(stateUniversal['mapShape'])
20
- suffix = pathFilenameChopChop.suffix
21
- pathJob = pathlib.Path(str(pathFilenameChopChop)[0:-len(suffix)])
22
- pathJob.mkdir(parents=True, exist_ok=True)
23
- pathFilenameJob = pathJob / 'stateJob.pkl'
24
-
25
- pathFilenameJob.write_bytes(pickle.dumps(stateUniversal))
26
- return pathFilenameJob
27
-
28
- def convertNDArrayToStr(arrayTarget: numpy.ndarray, identifierName: str) -> str:
17
+ def makeStrRLEcompacted(arrayTarget: numpy.ndarray, identifierName: str) -> str:
29
18
  def process_nested_array(arraySlice):
30
19
  if isinstance(arraySlice, numpy.ndarray) and arraySlice.ndim > 1:
31
20
  return [process_nested_array(arraySlice[index]) for index in range(arraySlice.shape[0])]
@@ -54,22 +43,25 @@ def convertNDArrayToStr(arrayTarget: numpy.ndarray, identifierName: str) -> str:
54
43
  return f"{identifierName} = numpy.array({stringMinimized}, dtype=numpy.{arrayTarget.dtype})"
55
44
 
56
45
  def writeModuleWithNumba(listDimensions, **keywordArguments: Optional[str]) -> pathlib.Path:
57
- datatypeLargeAsStr = keywordArguments.get('datatypeLarge', thisSeemsVeryComplicated.datatypeLarge)
58
- datatypeMediumAsStr = keywordArguments.get('datatypeMedium', thisSeemsVeryComplicated.datatypeMedium)
59
- datatypeSmallAsStr = keywordArguments.get('datatypeSmall', thisSeemsVeryComplicated.datatypeSmall)
46
+ datatypeLarge = keywordArguments.get('datatypeLarge', datatypeLargeDEFAULT)
47
+ datatypeMedium = keywordArguments.get('datatypeMedium', datatypeMediumDEFAULT)
48
+ datatypeSmall = keywordArguments.get('datatypeSmall', datatypeSmallDEFAULT)
49
+ datatypeModule = keywordArguments.get('datatypeModule', datatypeModuleDEFAULT)
60
50
 
61
- numpy_dtypeLarge = make_dtype(datatypeLargeAsStr) # type: ignore
62
- numpy_dtypeMedium = make_dtype(datatypeMediumAsStr) # type: ignore
63
- numpy_dtypeSmall = make_dtype(datatypeSmallAsStr) # type: ignore
51
+ dtypeLarge = make_dtype(datatypeLarge, datatypeModule) # type: ignore
52
+ dtypeMedium = make_dtype(datatypeMedium, datatypeModule) # type: ignore
53
+ dtypeSmall = make_dtype(datatypeSmall, datatypeModule) # type: ignore
64
54
 
65
- pathFilenameJob = makeStateJob(listDimensions, dtypeLarge = numpy_dtypeLarge, dtypeMedium = numpy_dtypeMedium, dtypeSmall = numpy_dtypeSmall)
55
+ pathFilenameJob = makeStateJob(listDimensions, dtypeLarge = dtypeLarge, dtypeMedium = dtypeMedium, dtypeSmall = dtypeSmall)
66
56
  stateJob: computationState = pickle.loads(pathFilenameJob.read_bytes())
67
57
  pathFilenameFoldsTotal = getPathFilenameFoldsTotal(stateJob['mapShape'], pathFilenameJob.parent)
68
58
 
69
- codeSource = inspect.getsource(countSequential)
59
+ from syntheticModules import countSequential
60
+ algorithmSource = countSequential
61
+ codeSource = inspect.getsource(algorithmSource)
70
62
 
71
63
  # forceinline=True might actually be useful
72
- parametersNumba = f"numba.types.{datatypeLargeAsStr}(), \
64
+ parametersNumba = f"numba.types.{datatypeLarge}(), \
73
65
  cache=True, \
74
66
  nopython=True, \
75
67
  fastmath=True, \
@@ -96,14 +88,14 @@ no_cpython_wrapper=False, \
96
88
  ImaIndent = ' '
97
89
  linesDataDynamic = """"""
98
90
  linesDataDynamic = "\n".join([linesDataDynamic
99
- , ImaIndent + f"foldsTotal = numba.types.{datatypeLargeAsStr}(0)"
100
- , ImaIndent + convertNDArrayToStr(stateJob['foldGroups'], 'foldGroups')
101
- , ImaIndent + convertNDArrayToStr(stateJob['gapsWhere'], 'gapsWhere')
91
+ , ImaIndent + f"foldsTotal = numba.types.{datatypeLarge}(0)"
92
+ , ImaIndent + makeStrRLEcompacted(stateJob['foldGroups'], 'foldGroups')
93
+ , ImaIndent + makeStrRLEcompacted(stateJob['gapsWhere'], 'gapsWhere')
102
94
  ])
103
95
 
104
96
  linesDataStatic = """"""
105
97
  linesDataStatic = "\n".join([linesDataStatic
106
- , ImaIndent + convertNDArrayToStr(stateJob['connectionGraph'], 'connectionGraph')
98
+ , ImaIndent + makeStrRLEcompacted(stateJob['connectionGraph'], 'connectionGraph')
107
99
  ])
108
100
 
109
101
  my = stateJob['my']
@@ -127,7 +119,7 @@ no_cpython_wrapper=False, \
127
119
  elif 'track[indexTrack.' in lineSource:
128
120
  # leafAbove = track[indexTrack.leafAbove.value]
129
121
  identifier, statement = lineSource.split('=')
130
- lineSource = ImaIndent + convertNDArrayToStr(eval(statement.strip()), identifier.strip())
122
+ lineSource = ImaIndent + makeStrRLEcompacted(eval(statement.strip()), identifier.strip())
131
123
 
132
124
  linesAlgorithm = "\n".join([linesAlgorithm
133
125
  , lineSource
@@ -163,8 +155,16 @@ if __name__ == '__main__':
163
155
  return pathFilenameDestination
164
156
 
165
157
  if __name__ == '__main__':
166
- listDimensions = [3,15]
158
+ listDimensions = [6,6]
167
159
  datatypeLarge = 'int64'
168
160
  datatypeMedium = 'uint8'
169
161
  datatypeSmall = datatypeMedium
170
- writeModuleWithNumba(listDimensions, datatypeLarge=datatypeLarge, datatypeMedium=datatypeMedium, datatypeSmall=datatypeSmall)
162
+ pathFilenameModule = writeModuleWithNumba(listDimensions, datatypeLarge=datatypeLarge, datatypeMedium=datatypeMedium, datatypeSmall=datatypeSmall)
163
+ # Induce numba.jit compilation
164
+ moduleSpec = importlib.util.spec_from_file_location(pathFilenameModule.stem, pathFilenameModule)
165
+ if moduleSpec is None:
166
+ raise ImportError(f"Could not load module specification from {pathFilenameModule}")
167
+ module = importlib.util.module_from_spec(moduleSpec)
168
+ if moduleSpec.loader is None:
169
+ raise ImportError(f"Could not load module from {moduleSpec}")
170
+ moduleSpec.loader.exec_module(module)
@@ -0,0 +1,170 @@
1
+ from mapFolding import indexMy, indexTrack, getAlgorithmSource, ParametersNumba, parametersNumbaDEFAULT, hackSSOTdtype
2
+ from mapFolding import datatypeLargeDEFAULT, datatypeMediumDEFAULT, datatypeSmallDEFAULT
3
+ import pathlib
4
+ import inspect
5
+ import numpy
6
+ import numba
7
+ from typing import Dict, Optional, List, Set, Union, Sequence
8
+ import ast
9
+
10
+ algorithmSource = getAlgorithmSource()
11
+
12
+ class RecursiveInliner(ast.NodeTransformer):
13
+ def __init__(self, dictionaryFunctions: Dict[str, ast.FunctionDef]):
14
+ self.dictionaryFunctions = dictionaryFunctions
15
+ self.processed = set()
16
+
17
+ def inlineFunctionBody(self, functionName: str) -> Optional[ast.FunctionDef]:
18
+ if functionName in self.processed:
19
+ return None
20
+
21
+ self.processed.add(functionName)
22
+ inlineDefinition = self.dictionaryFunctions[functionName]
23
+ # Recursively process the function body
24
+ for node in ast.walk(inlineDefinition):
25
+ self.visit(node)
26
+ return inlineDefinition
27
+
28
+ def visit_Call(self, node: ast.Call) -> ast.AST:
29
+ callNode = self.generic_visit(node)
30
+ if isinstance(callNode, ast.Call) and isinstance(callNode.func, ast.Name) and callNode.func.id in self.dictionaryFunctions:
31
+ inlineDefinition = self.inlineFunctionBody(callNode.func.id)
32
+ if (inlineDefinition and inlineDefinition.body):
33
+ lastStmt = inlineDefinition.body[-1]
34
+ if isinstance(lastStmt, ast.Return) and lastStmt.value is not None:
35
+ return self.visit(lastStmt.value)
36
+ elif isinstance(lastStmt, ast.Expr) and lastStmt.value is not None:
37
+ return self.visit(lastStmt.value)
38
+ return ast.Constant(value=None)
39
+ return callNode
40
+
41
+ def visit_Expr(self, node: ast.Expr) -> Union[ast.AST, List[ast.AST]]:
42
+ if isinstance(node.value, ast.Call):
43
+ if isinstance(node.value.func, ast.Name) and node.value.func.id in self.dictionaryFunctions:
44
+ inlineDefinition = self.inlineFunctionBody(node.value.func.id)
45
+ if inlineDefinition:
46
+ return [self.visit(stmt) for stmt in inlineDefinition.body]
47
+ return self.generic_visit(node)
48
+
49
+ def decorateCallableWithNumba(astCallable: ast.FunctionDef, parallel: bool=False, **keywordArguments: Optional[str]):
50
+ def makeNumbaParameterSignatureElement(signatureElement: ast.arg):
51
+ if isinstance(signatureElement.annotation, ast.Subscript) and isinstance(signatureElement.annotation.slice, ast.Tuple):
52
+
53
+ annotationShape = signatureElement.annotation.slice.elts[0]
54
+ if isinstance(annotationShape, ast.Subscript) and isinstance(annotationShape.slice, ast.Tuple):
55
+ shapeAsListSlices = [ast.Slice() for axis in range(len(annotationShape.slice.elts))]
56
+ shapeAsListSlices[-1] = ast.Slice(step=ast.Constant(value=1))
57
+ shapeAST = ast.Tuple(elts=shapeAsListSlices, ctx=ast.Load())
58
+ else:
59
+ shapeAST = ast.Slice(step=ast.Constant(value=1))
60
+
61
+ annotationDtype = signatureElement.annotation.slice.elts[1]
62
+ if isinstance(annotationDtype, ast.Subscript) and isinstance(annotationDtype.slice, ast.Attribute):
63
+ datatypeAST = annotationDtype.slice.attr
64
+ else:
65
+ datatypeAST = None
66
+
67
+ ndarrayName = signatureElement.arg
68
+ Z0Z_hackyStr = hackSSOTdtype[ndarrayName]
69
+ Z0Z_hackyStr = Z0Z_hackyStr[0] + 'ata' + Z0Z_hackyStr[1:]
70
+ datatype_attr = keywordArguments.get(Z0Z_hackyStr, None) or datatypeAST or eval(Z0Z_hackyStr+'DEFAULT')
71
+
72
+ datatypeNumba = ast.Attribute(value=ast.Name(id='numba', ctx=ast.Load()), attr=datatype_attr, ctx=ast.Load())
73
+
74
+ return ast.Subscript(value=datatypeNumba, slice=shapeAST, ctx=ast.Load())
75
+
76
+ # callableSourceDecorators = [decorator for decorator in callableInlined.decorator_list]
77
+
78
+ listNumbaParameterSignature: List[ast.Subscript] = []
79
+ for parameter in astCallable.args.args:
80
+ signatureElement = makeNumbaParameterSignatureElement(parameter)
81
+ if signatureElement:
82
+ listNumbaParameterSignature.append(signatureElement)
83
+
84
+ astArgsNumbaSignature = ast.Tuple(elts=listNumbaParameterSignature, ctx=ast.Load())
85
+
86
+ parametersNumba = parametersNumbaDEFAULT if not parallel else ParametersNumba({**parametersNumbaDEFAULT, 'parallel': True})
87
+ listKeywordsNumbaSignature = [ast.keyword(arg=parameterName, value=ast.Constant(value=parameterValue)) for parameterName, parameterValue in parametersNumba.items()]
88
+
89
+ astDecoratorNumba = ast.Call(func=ast.Attribute(value=ast.Name(id='numba', ctx=ast.Load()), attr='jit', ctx=ast.Load()), args=[astArgsNumbaSignature], keywords=listKeywordsNumbaSignature)
90
+
91
+ astCallable.decorator_list = [astDecoratorNumba]
92
+ return astCallable
93
+
94
+ def getDictionaryEnumValues() -> Dict[str, int]:
95
+ dictionaryEnumValues = {}
96
+ for enumIndex in [indexMy, indexTrack]:
97
+ for memberName, memberValue in enumIndex._member_map_.items():
98
+ dictionaryEnumValues[f"{enumIndex.__name__}.{memberName}.value"] = memberValue.value
99
+ return dictionaryEnumValues
100
+
101
+ def unpackArrays(codeInlined: str) -> str:
102
+ dictionaryReplaceScalars = {
103
+ 'my[indexMy.dimensionsTotal.value]': 'dimensionsTotal',
104
+ 'my[indexMy.dimensionsUnconstrained.value]': 'dimensionsUnconstrained',
105
+ 'my[indexMy.gap1ndex.value]': 'gap1ndex',
106
+ 'my[indexMy.gap1ndexCeiling.value]': 'gap1ndexCeiling',
107
+ 'my[indexMy.indexDimension.value]': 'indexDimension',
108
+ # 'my[indexMy.indexLeaf.value]': 'indexLeaf',
109
+ 'my[indexMy.indexMiniGap.value]': 'indexMiniGap',
110
+ 'my[indexMy.leaf1ndex.value]': 'leaf1ndex',
111
+ 'my[indexMy.leafConnectee.value]': 'leafConnectee',
112
+ # 'my[indexMy.taskDivisions.value]': 'taskDivisions',
113
+ 'my[indexMy.taskIndex.value]': 'taskIndex',
114
+ # 'foldGroups[-1]': 'leavesTotal',
115
+ }
116
+
117
+ dictionaryReplaceArrays = {
118
+ "track[indexTrack.leafAbove.value, ": 'leafAbove[',
119
+ "track[indexTrack.leafBelow.value, ": 'leafBelow[',
120
+ 'track[indexTrack.countDimensionsGapped.value, ': 'countDimensionsGapped[',
121
+ 'track[indexTrack.gapRangeStart.value, ': 'gapRangeStart[',
122
+ }
123
+
124
+ ImaIndent = " "
125
+ linesInitialize = """"""
126
+
127
+ for find, replace in dictionaryReplaceScalars.items():
128
+ linesInitialize += f"{ImaIndent}{replace} = {find}\n"
129
+ codeInlined = codeInlined.replace(find, replace)
130
+
131
+ for find, replace in dictionaryReplaceArrays.items():
132
+ linesInitialize += f"{ImaIndent}{replace[0:-1]} = {find[0:-2]}]\n"
133
+ codeInlined = codeInlined.replace(find, replace)
134
+
135
+ ourGuyOnTheInside = " doFindGaps = True\n"
136
+ linesInitialize = ourGuyOnTheInside + linesInitialize
137
+
138
+ codeInlined = codeInlined.replace(ourGuyOnTheInside, linesInitialize)
139
+
140
+ return codeInlined
141
+
142
+ def inlineMapFoldingNumba(**keywordArguments: Optional[str]):
143
+ dictionaryEnumValues = getDictionaryEnumValues()
144
+ codeSource = inspect.getsource(algorithmSource)
145
+ pathFilenameAlgorithm = pathlib.Path(inspect.getfile(algorithmSource))
146
+
147
+ listPathFilenamesDestination: list[pathlib.Path] = []
148
+ listCallables = [ 'countInitialize', 'countParallel', 'countSequential', ]
149
+ for callableTarget in listCallables:
150
+ codeParsed: ast.Module = ast.parse(codeSource, type_comments=True)
151
+ codeSourceImportStatements = {statement for statement in codeParsed.body if isinstance(statement, (ast.Import, ast.ImportFrom))}
152
+ dictionaryFunctions = {statement.name: statement for statement in codeParsed.body if isinstance(statement, ast.FunctionDef)}
153
+ callableInlinerWorkhorse = RecursiveInliner(dictionaryFunctions)
154
+ parallel = callableTarget == 'countParallel'
155
+ callableInlined = callableInlinerWorkhorse.inlineFunctionBody(callableTarget)
156
+ if callableInlined:
157
+ ast.fix_missing_locations(callableInlined)
158
+ callableDecorated = decorateCallableWithNumba(callableInlined, parallel, **keywordArguments)
159
+
160
+ importsRequired = "\n".join([ast.unparse(importStatement) for importStatement in codeSourceImportStatements])
161
+ callableInlined = ast.unparse(callableDecorated)
162
+ codeUnpacked = unpackArrays(callableInlined) if callableTarget == 'countSequential' else callableInlined
163
+ # inlinedCode = ast.unparse(ast.Module(body=[nodeInlined], type_ignores=[]))
164
+
165
+ pathFilenameDestination = pathFilenameAlgorithm.parent / "syntheticModules" / pathFilenameAlgorithm.with_stem(callableTarget).name
166
+ pathFilenameDestination.write_text(importsRequired + "\n" + codeUnpacked)
167
+ listPathFilenamesDestination.append(pathFilenameDestination)
168
+
169
+ if __name__ == '__main__':
170
+ inlineMapFoldingNumba()