mapFolding 0.3.3__py3-none-any.whl → 0.3.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,67 @@
1
+ from cffconvert.cli.create_citation import create_citation
2
+ from typing import Any, Dict
3
+ import cffconvert
4
+ import pathlib
5
+ import tomli
6
+ import inspect
7
+ import json
8
+
9
+ """
10
+ Tentative plan:
11
+ - Commit and push to GitHub
12
+ - GitHub Action gathers information from the sources of truth
13
+ - If the citation needs to be updated, write to both
14
+ - pathFilenameCitationSSOT
15
+ - pathFilenameCitationDOTcffRepo
16
+ - Commit and push to GitHub
17
+ - this complicates things
18
+ - I want the updated citation to be in the `commit` field of itself
19
+ """
20
+
21
+ """cffconvert.Citation fields and the source of truth
22
+ abstract: pathFilenameCitationSSOT
23
+ authors: pathFilenamePackageSSOT
24
+ cff-version: pathFilenameCitationSSOT
25
+ commit: workflows['Make GitHub Release']
26
+ contact: pathFilenamePackageSSOT
27
+ date-released: workflows['Make GitHub Release']
28
+ doi: pathFilenameCitationSSOT
29
+ identifiers: workflows['Make GitHub Release']
30
+ keywords: pathFilenamePackageSSOT
31
+ license: pathFilenamePackageSSOT
32
+ license-url: pathFilenamePackageSSOT
33
+ message: pathFilenameCitationSSOT
34
+ preferred-citation: pathFilenameCitationSSOT
35
+ references: to be determined
36
+ repository: pathFilenamePackageSSOT
37
+ repository-artifact: (https://pypi.org/pypi/{package_name}/json').json()['releases']
38
+ repository-code: workflows['Make GitHub Release']
39
+ title: pathFilenamePackageSSOT
40
+ type: pathFilenameCitationSSOT
41
+ url: pathFilenamePackageSSOT
42
+ version: pathFilenamePackageSSOT
43
+ """
44
+ # Prefer reliable, dynamic values over hardcoded ones
45
+ packageName: str = 'mapFolding'
46
+ pathRepoRoot = pathlib.Path(__file__).parent.parent.parent
47
+ pathFilenamePackageSSOT = pathRepoRoot / 'pyproject.toml'
48
+
49
+ filenameGitHubAction = 'updateCitation.yml'
50
+ pathFilenameGitHubAction = pathRepoRoot / '.github' / 'workflows' / filenameGitHubAction
51
+
52
+ filenameCitationDOTcff = 'CITATION.cff'
53
+ pathCitations = pathRepoRoot / packageName / 'citations'
54
+ pathFilenameCitationSSOT = pathCitations / filenameCitationDOTcff
55
+ pathFilenameCitationDOTcffRepo = pathRepoRoot / filenameCitationDOTcff
56
+
57
+ tomlPackageData: Dict[str, Any] = tomli.loads(pathFilenamePackageSSOT.read_text())['project']
58
+
59
+ citationObject: cffconvert.Citation = create_citation(infile=pathFilenameCitationSSOT, url=None)
60
+
61
+ path_cffconvert = pathlib.Path(inspect.getfile(cffconvert)).parent
62
+ pathFilenameSchema = path_cffconvert / "schemas/1.2.0/schema.json"
63
+ scheme: Dict[str, Any] = json.loads(pathFilenameSchema.read_text())
64
+ schemaSpecifications: Dict[str, Any] = scheme['properties']
65
+
66
+ for property, subProperties in schemaSpecifications.items():
67
+ print(property, subProperties.get('items', None))
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: mapFolding
3
- Version: 0.3.3
3
+ Version: 0.3.4
4
4
  Summary: Count distinct ways to fold a map (or a strip of stamps)
5
5
  Author-email: Hunter Hogan <HunterHogan@pm.me>
6
6
  License: CC-BY-NC-4.0
@@ -29,12 +29,14 @@ Requires-Dist: jupyter; extra == "benchmark"
29
29
  Requires-Dist: pandas; extra == "benchmark"
30
30
  Requires-Dist: tqdm; extra == "benchmark"
31
31
  Provides-Extra: testing
32
+ Requires-Dist: cffconvert; extra == "testing"
32
33
  Requires-Dist: more_itertools; extra == "testing"
33
34
  Requires-Dist: pytest; extra == "testing"
34
35
  Requires-Dist: pytest-cov; extra == "testing"
35
36
  Requires-Dist: pytest-env; extra == "testing"
36
37
  Requires-Dist: pytest-xdist; extra == "testing"
37
38
  Requires-Dist: python_minifier; extra == "testing"
39
+ Requires-Dist: tomli; extra == "testing"
38
40
 
39
41
  # Algorithm(s) for counting distinct ways to fold a map (or a strip of stamps)
40
42
 
@@ -0,0 +1,26 @@
1
+ benchmarks/benchmarking.py,sha256=HD_0NSvuabblg94ftDre6LFnXShTe8MYj3hIodW-zV0,3076
2
+ citations/updateCitation.py,sha256=3AUPo9_4SfH8AwQBMRl7KygAXoMRjQSqFl3ERWxtrtk,2541
3
+ reference/flattened.py,sha256=6blZ2Y9G8mu1F3gV8SKndPE398t2VVFlsgKlyeJ765A,16538
4
+ reference/hunterNumba.py,sha256=HWndRgsajOf76rbb2LDNEZ6itsdYbyV-k3wgOFjeR6c,7104
5
+ reference/irvineJavaPort.py,sha256=Sj-63Z-OsGuDoEBXuxyjRrNmmyl0d7Yz_XuY7I47Oyg,4250
6
+ reference/jax.py,sha256=rojyK80lOATtbzxjGOHWHZngQa47CXCLJHZwIdN2MwI,14955
7
+ reference/lunnan.py,sha256=XEcql_gxvCCghb6Or3qwmPbn4IZUbZTaSmw_fUjRxZE,5037
8
+ reference/lunnanNumpy.py,sha256=HqDgSwTOZA-G0oophOEfc4zs25Mv4yw2aoF1v8miOLk,4653
9
+ reference/lunnanWhile.py,sha256=7NY2IKO5XBgol0aWWF_Fi-7oTL9pvu_z6lB0TF1uVHk,4063
10
+ reference/rotatedEntryPoint.py,sha256=z0QyDQtnMvXNj5ntWzzJUQUMFm1-xHGLVhtYzwmczUI,11530
11
+ reference/total_countPlus1vsPlusN.py,sha256=usenM8Yn_G1dqlPl7NKKkcnbohBZVZBXTQRm2S3_EDA,8106
12
+ someAssemblyRequired/__init__.py,sha256=7iODZE6dM4h52spgivUvAuVsvYdSx-_YcSTz1gX82Vw,89
13
+ someAssemblyRequired/generalizeSourceCode.py,sha256=qyJD0ZdG0t-SYTItL_JjaIXm3-joWt3e-2nMSAH4Dbg,6392
14
+ someAssemblyRequired/getLLVMforNoReason.py,sha256=FtJzw2pZS3A4NimWdZsegXaU-vKeCw8m67kcfb5wvGM,894
15
+ someAssemblyRequired/makeJob.py,sha256=iaLjr-FhFloTF6wSuwOpurgpqJulZht9CxNo9MDidbg,949
16
+ someAssemblyRequired/synthesizeModuleJob.py,sha256=xLak-ZZ1zQ92jBobhJqbnA1Fua9ofiRvLdK1fmD8s_s,7271
17
+ someAssemblyRequired/synthesizeModules.py,sha256=JGOx69DGCcCntRtw7aOXXcmERCHqVyhFo1oiKh3P8Mg,8842
18
+ syntheticModules/__init__.py,sha256=nDtS5UFMKN-F5pTp0qKA0J0I-XR3n3OFxV2bosieBu8,131
19
+ syntheticModules/countInitialize.py,sha256=QqKfQxCmUJuJutNxOZ0VfqYEHnuk7XSkCYx7RKz3kn4,4239
20
+ syntheticModules/countParallel.py,sha256=77JzO3TsccjSUJRExZ0Nxdqowd_Sm0_2bRziVx5XMI4,5355
21
+ syntheticModules/countSequential.py,sha256=QixgcN9R5zcrmJjxSO4oOCYViWogA35HbDNlni9hw8o,3655
22
+ mapFolding-0.3.4.dist-info/METADATA,sha256=v8MJLZBzqS2hBp4trsRjjLzn8RAsddUPg16IiI9J1cg,7617
23
+ mapFolding-0.3.4.dist-info/WHEEL,sha256=In9FTNxeP60KnTkGw7wk6mJPYd_dQSjEZmXdBdMCI-8,91
24
+ mapFolding-0.3.4.dist-info/entry_points.txt,sha256=F3OUeZR1XDTpoH7k3wXuRb3KF_kXTTeYhu5AGK1SiOQ,146
25
+ mapFolding-0.3.4.dist-info/top_level.txt,sha256=yVG9dNZywoaddcsUdEDg7o0XOBzJd_4Z-sDaXGHpiMY,69
26
+ mapFolding-0.3.4.dist-info/RECORD,,
@@ -1,4 +1,5 @@
1
1
  benchmarks
2
+ citations
2
3
  reference
3
4
  someAssemblyRequired
4
5
  syntheticModules
@@ -1,2 +1,2 @@
1
1
  from .makeJob import makeStateJob
2
- from .generalizeSourceCode import makeInlineFunction
2
+ # from .generalizeSourceCode import makeInlineFunction
@@ -1,16 +1,17 @@
1
1
  from mapFolding import datatypeLargeDEFAULT, datatypeMediumDEFAULT, datatypeSmallDEFAULT
2
+ from typing import Dict, Optional, List, Set, Union
2
3
  import ast
3
4
 
4
5
  class RecursiveInlinerWithEnum(ast.NodeTransformer):
5
6
  """Process AST nodes to inline functions and substitute enum values.
6
7
  Also handles function decorators during inlining."""
7
8
 
8
- def __init__(self, dictionaryFunctions, dictionaryEnumValues):
9
+ def __init__(self, dictionaryFunctions: Dict[str, ast.FunctionDef], dictionaryEnumValues: Dict[str, int]) -> None:
9
10
  self.dictionaryFunctions = dictionaryFunctions
10
11
  self.dictionaryEnumValues = dictionaryEnumValues
11
12
  self.processed = set()
12
13
 
13
- def inlineFunctionBody(self, functionName):
14
+ def inlineFunctionBody(self, functionName: str) -> Optional[ast.FunctionDef]:
14
15
  if functionName in self.processed:
15
16
  return None
16
17
 
@@ -21,7 +22,7 @@ class RecursiveInlinerWithEnum(ast.NodeTransformer):
21
22
  self.visit(node)
22
23
  return inlineDefinition
23
24
 
24
- def visit_Attribute(self, node):
25
+ def visit_Attribute(self, node: ast.Attribute) -> ast.AST:
25
26
  # Substitute enum identifiers (e.g., indexMy.leaf1ndex.value)
26
27
  if isinstance(node.value, ast.Attribute) and isinstance(node.value.value, ast.Name):
27
28
  enumPath = f"{node.value.value.id}.{node.value.attr}.{node.attr}"
@@ -29,7 +30,7 @@ class RecursiveInlinerWithEnum(ast.NodeTransformer):
29
30
  return ast.Constant(value=self.dictionaryEnumValues[enumPath])
30
31
  return self.generic_visit(node)
31
32
 
32
- def visit_Call(self, node):
33
+ def visit_Call(self, node: ast.Call) -> ast.AST:
33
34
  callNode = self.generic_visit(node)
34
35
  if isinstance(callNode, ast.Call) and isinstance(callNode.func, ast.Name) and callNode.func.id in self.dictionaryFunctions:
35
36
  inlineDefinition = self.inlineFunctionBody(callNode.func.id)
@@ -39,10 +40,10 @@ class RecursiveInlinerWithEnum(ast.NodeTransformer):
39
40
  return self.visit(lastStmt.value)
40
41
  elif isinstance(lastStmt, ast.Expr) and lastStmt.value is not None:
41
42
  return self.visit(lastStmt.value)
42
- return None
43
+ return ast.Constant(value=None)
43
44
  return callNode
44
45
 
45
- def visit_Expr(self, node):
46
+ def visit_Expr(self, node: ast.Expr) -> Union[ast.AST, List[ast.AST]]:
46
47
  if isinstance(node.value, ast.Call):
47
48
  if isinstance(node.value.func, ast.Name) and node.value.func.id in self.dictionaryFunctions:
48
49
  inlineDefinition = self.inlineFunctionBody(node.value.func.id)
@@ -50,18 +51,18 @@ class RecursiveInlinerWithEnum(ast.NodeTransformer):
50
51
  return [self.visit(stmt) for stmt in inlineDefinition.body]
51
52
  return self.generic_visit(node)
52
53
 
53
- def findRequiredImports(node):
54
+ def findRequiredImports(node: ast.AST) -> Set[str]:
54
55
  """Find all modules that need to be imported based on AST analysis.
55
56
  NOTE: due to hardcoding, this is a glorified regex. No, wait, this is less versatile than regex."""
56
57
  requiredImports = set()
57
58
 
58
59
  class ImportFinder(ast.NodeVisitor):
59
- def visit_Name(self, node):
60
+ def visit_Name(self, node: ast.Name) -> None:
60
61
  if node.id in {'numba'}:
61
62
  requiredImports.add(node.id)
62
63
  self.generic_visit(node)
63
64
 
64
- def visitDecorator(self, node):
65
+ def visitDecorator(self, node: ast.AST) -> None:
65
66
  if isinstance(node, ast.Call) and isinstance(node.func, ast.Name):
66
67
  if node.func.id == 'jit':
67
68
  requiredImports.add('numba')
@@ -70,7 +71,7 @@ def findRequiredImports(node):
70
71
  ImportFinder().visit(node)
71
72
  return requiredImports
72
73
 
73
- def generateImports(requiredImports):
74
+ def generateImports(requiredImports: Set[str]) -> str:
74
75
  """Generate import statements based on required modules."""
75
76
  importStatements = {'import numba', 'from mapFolding import indexMy, indexTrack'}
76
77
 
@@ -84,7 +85,7 @@ def generateImports(requiredImports):
84
85
 
85
86
  return '\n'.join(importStatements)
86
87
 
87
- def makeInlineFunction(sourceCode, targetFunctionName, dictionaryEnumValues, skipEnum=False, **keywordArguments):
88
+ def makeInlineFunction(sourceCode: str, targetFunctionName: str, dictionaryEnumValues: Dict[str, int], skipEnum: bool=False, **keywordArguments: Optional[str]):
88
89
  datatypeLarge = keywordArguments.get('datatypeLarge', datatypeLargeDEFAULT)
89
90
  datatypeMedium = keywordArguments.get('datatypeMedium', datatypeMediumDEFAULT)
90
91
  datatypeSmall = keywordArguments.get('datatypeSmall', datatypeSmallDEFAULT)
@@ -5,8 +5,10 @@ import pathlib
5
5
  import pickle
6
6
 
7
7
  def makeStateJob(listDimensions: Sequence[int], **keywordArguments: Optional[Type[Any]]) -> pathlib.Path:
8
- from syntheticModules import countInitialize
8
+
9
9
  stateUniversal = outfitCountFolds(listDimensions, computationDivisions=None, CPUlimit=None, **keywordArguments)
10
+
11
+ from syntheticModules import countInitialize
10
12
  countInitialize(stateUniversal['connectionGraph'], stateUniversal['gapsWhere'], stateUniversal['my'], stateUniversal['track'])
11
13
 
12
14
  pathFilenameChopChop = getPathFilenameFoldsTotal(stateUniversal['mapShape'])
@@ -1,19 +1,104 @@
1
- from mapFolding import indexMy, indexTrack, getAlgorithmSource
1
+ from mapFolding import indexMy, indexTrack, getAlgorithmSource, ParametersNumba, parametersNumbaDEFAULT, hackSSOTdtype
2
2
  from mapFolding import datatypeLargeDEFAULT, datatypeMediumDEFAULT, datatypeSmallDEFAULT
3
- from someAssemblyRequired import makeInlineFunction
4
3
  import pathlib
5
4
  import inspect
5
+ import numpy
6
+ import numba
7
+ from typing import Dict, Optional, List, Set, Union, Sequence
8
+ import ast
6
9
 
7
10
  algorithmSource = getAlgorithmSource()
8
11
 
9
- def getDictionaryEnumValues():
12
+ class RecursiveInliner(ast.NodeTransformer):
13
+ def __init__(self, dictionaryFunctions: Dict[str, ast.FunctionDef]):
14
+ self.dictionaryFunctions = dictionaryFunctions
15
+ self.processed = set()
16
+
17
+ def inlineFunctionBody(self, functionName: str) -> Optional[ast.FunctionDef]:
18
+ if functionName in self.processed:
19
+ return None
20
+
21
+ self.processed.add(functionName)
22
+ inlineDefinition = self.dictionaryFunctions[functionName]
23
+ # Recursively process the function body
24
+ for node in ast.walk(inlineDefinition):
25
+ self.visit(node)
26
+ return inlineDefinition
27
+
28
+ def visit_Call(self, node: ast.Call) -> ast.AST:
29
+ callNode = self.generic_visit(node)
30
+ if isinstance(callNode, ast.Call) and isinstance(callNode.func, ast.Name) and callNode.func.id in self.dictionaryFunctions:
31
+ inlineDefinition = self.inlineFunctionBody(callNode.func.id)
32
+ if (inlineDefinition and inlineDefinition.body):
33
+ lastStmt = inlineDefinition.body[-1]
34
+ if isinstance(lastStmt, ast.Return) and lastStmt.value is not None:
35
+ return self.visit(lastStmt.value)
36
+ elif isinstance(lastStmt, ast.Expr) and lastStmt.value is not None:
37
+ return self.visit(lastStmt.value)
38
+ return ast.Constant(value=None)
39
+ return callNode
40
+
41
+ def visit_Expr(self, node: ast.Expr) -> Union[ast.AST, List[ast.AST]]:
42
+ if isinstance(node.value, ast.Call):
43
+ if isinstance(node.value.func, ast.Name) and node.value.func.id in self.dictionaryFunctions:
44
+ inlineDefinition = self.inlineFunctionBody(node.value.func.id)
45
+ if inlineDefinition:
46
+ return [self.visit(stmt) for stmt in inlineDefinition.body]
47
+ return self.generic_visit(node)
48
+
49
+ def decorateCallableWithNumba(astCallable: ast.FunctionDef, parallel: bool=False, **keywordArguments: Optional[str]):
50
+ def makeNumbaParameterSignatureElement(signatureElement: ast.arg):
51
+ if isinstance(signatureElement.annotation, ast.Subscript) and isinstance(signatureElement.annotation.slice, ast.Tuple):
52
+
53
+ annotationShape = signatureElement.annotation.slice.elts[0]
54
+ if isinstance(annotationShape, ast.Subscript) and isinstance(annotationShape.slice, ast.Tuple):
55
+ shapeAsListSlices = [ast.Slice() for axis in range(len(annotationShape.slice.elts))]
56
+ shapeAsListSlices[-1] = ast.Slice(step=ast.Constant(value=1))
57
+ shapeAST = ast.Tuple(elts=shapeAsListSlices, ctx=ast.Load())
58
+ else:
59
+ shapeAST = ast.Slice(step=ast.Constant(value=1))
60
+
61
+ annotationDtype = signatureElement.annotation.slice.elts[1]
62
+ if isinstance(annotationDtype, ast.Subscript) and isinstance(annotationDtype.slice, ast.Attribute):
63
+ datatypeAST = annotationDtype.slice.attr
64
+ else:
65
+ datatypeAST = None
66
+
67
+ ndarrayName = signatureElement.arg
68
+ Z0Z_hackyStr = hackSSOTdtype[ndarrayName]
69
+ Z0Z_hackyStr = Z0Z_hackyStr[0] + 'ata' + Z0Z_hackyStr[1:]
70
+ datatype_attr = keywordArguments.get(Z0Z_hackyStr, None) or datatypeAST or eval(Z0Z_hackyStr+'DEFAULT')
71
+
72
+ datatypeNumba = ast.Attribute(value=ast.Name(id='numba', ctx=ast.Load()), attr=datatype_attr, ctx=ast.Load())
73
+
74
+ return ast.Subscript(value=datatypeNumba, slice=shapeAST, ctx=ast.Load())
75
+
76
+ # callableSourceDecorators = [decorator for decorator in callableInlined.decorator_list]
77
+
78
+ listNumbaParameterSignature: List[ast.Subscript] = []
79
+ for parameter in astCallable.args.args:
80
+ signatureElement = makeNumbaParameterSignatureElement(parameter)
81
+ if signatureElement:
82
+ listNumbaParameterSignature.append(signatureElement)
83
+
84
+ astArgsNumbaSignature = ast.Tuple(elts=listNumbaParameterSignature, ctx=ast.Load())
85
+
86
+ parametersNumba = parametersNumbaDEFAULT if not parallel else ParametersNumba({**parametersNumbaDEFAULT, 'parallel': True})
87
+ listKeywordsNumbaSignature = [ast.keyword(arg=parameterName, value=ast.Constant(value=parameterValue)) for parameterName, parameterValue in parametersNumba.items()]
88
+
89
+ astDecoratorNumba = ast.Call(func=ast.Attribute(value=ast.Name(id='numba', ctx=ast.Load()), attr='jit', ctx=ast.Load()), args=[astArgsNumbaSignature], keywords=listKeywordsNumbaSignature)
90
+
91
+ astCallable.decorator_list = [astDecoratorNumba]
92
+ return astCallable
93
+
94
+ def getDictionaryEnumValues() -> Dict[str, int]:
10
95
  dictionaryEnumValues = {}
11
96
  for enumIndex in [indexMy, indexTrack]:
12
97
  for memberName, memberValue in enumIndex._member_map_.items():
13
98
  dictionaryEnumValues[f"{enumIndex.__name__}.{memberName}.value"] = memberValue.value
14
99
  return dictionaryEnumValues
15
100
 
16
- def unpackArrays(codeInlined: str, callableTarget: str) -> str:
101
+ def unpackArrays(codeInlined: str) -> str:
17
102
  dictionaryReplaceScalars = {
18
103
  'my[indexMy.dimensionsTotal.value]': 'dimensionsTotal',
19
104
  'my[indexMy.dimensionsUnconstrained.value]': 'dimensionsUnconstrained',
@@ -54,25 +139,32 @@ def unpackArrays(codeInlined: str, callableTarget: str) -> str:
54
139
 
55
140
  return codeInlined
56
141
 
57
- def inlineMapFoldingNumba(**keywordArguments):
58
- datatypeLarge = keywordArguments.get('datatypeLarge', datatypeLargeDEFAULT)
59
- datatypeMedium = keywordArguments.get('datatypeMedium', datatypeMediumDEFAULT)
60
- datatypeSmall = keywordArguments.get('datatypeSmall', datatypeSmallDEFAULT)
142
+ def inlineMapFoldingNumba(**keywordArguments: Optional[str]):
61
143
  dictionaryEnumValues = getDictionaryEnumValues()
62
144
  codeSource = inspect.getsource(algorithmSource)
63
145
  pathFilenameAlgorithm = pathlib.Path(inspect.getfile(algorithmSource))
64
146
 
65
- listCallables = [ 'countInitialize', 'countParallel', 'countSequential', ]
66
-
67
147
  listPathFilenamesDestination: list[pathlib.Path] = []
148
+ listCallables = [ 'countInitialize', 'countParallel', 'countSequential', ]
68
149
  for callableTarget in listCallables:
69
- skipEnum = (callableTarget == 'countInitialize')
70
- skipEnum = (callableTarget == 'countSequential')
71
- pathFilenameDestination = pathFilenameAlgorithm.parent / "syntheticModules" / pathFilenameAlgorithm.with_stem(callableTarget).name
72
- codeInlined, callableInlinedDecorators, importsRequired = makeInlineFunction(codeSource, callableTarget, dictionaryEnumValues, skipEnum, datatypeLarge=datatypeLarge, datatypeMedium=datatypeMedium, datatypeSmall=datatypeSmall)
73
- codeUnpacked = unpackArrays(codeInlined, callableTarget)
74
- pathFilenameDestination.write_text(importsRequired + "\n" + codeUnpacked)
75
- listPathFilenamesDestination.append(pathFilenameDestination)
150
+ codeParsed: ast.Module = ast.parse(codeSource, type_comments=True)
151
+ codeSourceImportStatements = {statement for statement in codeParsed.body if isinstance(statement, (ast.Import, ast.ImportFrom))}
152
+ dictionaryFunctions = {statement.name: statement for statement in codeParsed.body if isinstance(statement, ast.FunctionDef)}
153
+ callableInlinerWorkhorse = RecursiveInliner(dictionaryFunctions)
154
+ parallel = callableTarget == 'countParallel'
155
+ callableInlined = callableInlinerWorkhorse.inlineFunctionBody(callableTarget)
156
+ if callableInlined:
157
+ ast.fix_missing_locations(callableInlined)
158
+ callableDecorated = decorateCallableWithNumba(callableInlined, parallel, **keywordArguments)
159
+
160
+ importsRequired = "\n".join([ast.unparse(importStatement) for importStatement in codeSourceImportStatements])
161
+ callableInlined = ast.unparse(callableDecorated)
162
+ codeUnpacked = unpackArrays(callableInlined) if callableTarget == 'countSequential' else callableInlined
163
+ # inlinedCode = ast.unparse(ast.Module(body=[nodeInlined], type_ignores=[]))
164
+
165
+ pathFilenameDestination = pathFilenameAlgorithm.parent / "syntheticModules" / pathFilenameAlgorithm.with_stem(callableTarget).name
166
+ pathFilenameDestination.write_text(importsRequired + "\n" + codeUnpacked)
167
+ listPathFilenamesDestination.append(pathFilenameDestination)
76
168
 
77
169
  if __name__ == '__main__':
78
170
  inlineMapFoldingNumba()
@@ -1,44 +1,47 @@
1
- from mapFolding import indexMy, indexTrack
1
+ from numpy import integer
2
2
  import numba
3
- @numba.jit((numba.uint8[:,:,::1], numba.uint8[::1], numba.uint8[::1], numba.uint8[:,::1]), parallel=False, boundscheck=False, cache=True, error_model="numpy", fastmath=True, looplift=False, nogil=True, nopython=True)
4
- def countInitialize(connectionGraph, gapsWhere, my, track):
5
- while my[7] > 0:
6
- if my[7] <= 1 or track[1, 0] == 1:
7
- my[1] = my[0]
8
- my[3] = track[3, my[7] - 1]
9
- my[4] = 0
10
- while my[4] < my[0]:
11
- if connectionGraph[my[4], my[7], my[7]] == my[7]:
12
- my[1] -= 1
3
+ import numpy
4
+ from mapFolding import indexMy, indexTrack
5
+ from typing import Any, Tuple
6
+ @numba.jit((numba.uint8[:, :, ::1], numba.uint8[::1], numba.uint8[::1], numba.uint8[:, ::1]), _nrt=True, boundscheck=False, cache=True, error_model='numpy', fastmath=True, forceinline=False, inline='never', looplift=False, no_cfunc_wrapper=True, no_cpython_wrapper=True, nopython=True, parallel=False)
7
+ def countInitialize(connectionGraph: numpy.ndarray[Tuple[int, int, int], numpy.dtype[integer[Any]]], gapsWhere: numpy.ndarray[Tuple[int], numpy.dtype[integer[Any]]], my: numpy.ndarray[Tuple[int], numpy.dtype[integer[Any]]], track: numpy.ndarray[Tuple[int, int], numpy.dtype[integer[Any]]]):
8
+ while my[indexMy.leaf1ndex.value] > 0:
9
+ if my[indexMy.leaf1ndex.value] <= 1 or track[indexTrack.leafBelow.value, 0] == 1:
10
+ my[indexMy.dimensionsUnconstrained.value] = my[indexMy.dimensionsTotal.value]
11
+ my[indexMy.gap1ndexCeiling.value] = track[indexTrack.gapRangeStart.value, my[indexMy.leaf1ndex.value] - 1]
12
+ my[indexMy.indexDimension.value] = 0
13
+ while my[indexMy.indexDimension.value] < my[indexMy.dimensionsTotal.value]:
14
+ if connectionGraph[my[indexMy.indexDimension.value], my[indexMy.leaf1ndex.value], my[indexMy.leaf1ndex.value]] == my[indexMy.leaf1ndex.value]:
15
+ my[indexMy.dimensionsUnconstrained.value] -= 1
13
16
  else:
14
- my[8] = connectionGraph[my[4], my[7], my[7]]
15
- while my[8] != my[7]:
16
- gapsWhere[my[3]] = my[8]
17
- if track[2, my[8]] == 0:
18
- my[3] += 1
19
- track[2, my[8]] += 1
20
- my[8] = connectionGraph[my[4], my[7], track[1, my[8]]]
21
- my[4] += 1
22
- if not my[1]:
23
- my[5] = 0
24
- while my[5] < my[7]:
25
- gapsWhere[my[3]] = my[5]
26
- my[3] += 1
27
- my[5] += 1
28
- my[6] = my[2]
29
- while my[6] < my[3]:
30
- gapsWhere[my[2]] = gapsWhere[my[6]]
31
- if track[2, gapsWhere[my[6]]] == my[1]:
32
- my[2] += 1
33
- track[2, gapsWhere[my[6]]] = 0
34
- my[6] += 1
35
- if my[7] > 0:
36
- my[2] -= 1
37
- track[0, my[7]] = gapsWhere[my[2]]
38
- track[1, my[7]] = track[1, track[0, my[7]]]
39
- track[1, track[0, my[7]]] = my[7]
40
- track[0, track[1, my[7]]] = my[7]
41
- track[3, my[7]] = my[2]
42
- my[7] += 1
43
- if my[2] > 0:
17
+ my[indexMy.leafConnectee.value] = connectionGraph[my[indexMy.indexDimension.value], my[indexMy.leaf1ndex.value], my[indexMy.leaf1ndex.value]]
18
+ while my[indexMy.leafConnectee.value] != my[indexMy.leaf1ndex.value]:
19
+ gapsWhere[my[indexMy.gap1ndexCeiling.value]] = my[indexMy.leafConnectee.value]
20
+ if track[indexTrack.countDimensionsGapped.value, my[indexMy.leafConnectee.value]] == 0:
21
+ my[indexMy.gap1ndexCeiling.value] += 1
22
+ track[indexTrack.countDimensionsGapped.value, my[indexMy.leafConnectee.value]] += 1
23
+ my[indexMy.leafConnectee.value] = connectionGraph[my[indexMy.indexDimension.value], my[indexMy.leaf1ndex.value], track[indexTrack.leafBelow.value, my[indexMy.leafConnectee.value]]]
24
+ my[indexMy.indexDimension.value] += 1
25
+ if not my[indexMy.dimensionsUnconstrained.value]:
26
+ my[indexMy.indexLeaf.value] = 0
27
+ while my[indexMy.indexLeaf.value] < my[indexMy.leaf1ndex.value]:
28
+ gapsWhere[my[indexMy.gap1ndexCeiling.value]] = my[indexMy.indexLeaf.value]
29
+ my[indexMy.gap1ndexCeiling.value] += 1
30
+ my[indexMy.indexLeaf.value] += 1
31
+ my[indexMy.indexMiniGap.value] = my[indexMy.gap1ndex.value]
32
+ while my[indexMy.indexMiniGap.value] < my[indexMy.gap1ndexCeiling.value]:
33
+ gapsWhere[my[indexMy.gap1ndex.value]] = gapsWhere[my[indexMy.indexMiniGap.value]]
34
+ if track[indexTrack.countDimensionsGapped.value, gapsWhere[my[indexMy.indexMiniGap.value]]] == my[indexMy.dimensionsUnconstrained.value]:
35
+ my[indexMy.gap1ndex.value] += 1
36
+ track[indexTrack.countDimensionsGapped.value, gapsWhere[my[indexMy.indexMiniGap.value]]] = 0
37
+ my[indexMy.indexMiniGap.value] += 1
38
+ if my[indexMy.leaf1ndex.value] > 0:
39
+ my[indexMy.gap1ndex.value] -= 1
40
+ track[indexTrack.leafAbove.value, my[indexMy.leaf1ndex.value]] = gapsWhere[my[indexMy.gap1ndex.value]]
41
+ track[indexTrack.leafBelow.value, my[indexMy.leaf1ndex.value]] = track[indexTrack.leafBelow.value, track[indexTrack.leafAbove.value, my[indexMy.leaf1ndex.value]]]
42
+ track[indexTrack.leafBelow.value, track[indexTrack.leafAbove.value, my[indexMy.leaf1ndex.value]]] = my[indexMy.leaf1ndex.value]
43
+ track[indexTrack.leafAbove.value, track[indexTrack.leafBelow.value, my[indexMy.leaf1ndex.value]]] = my[indexMy.leaf1ndex.value]
44
+ track[indexTrack.gapRangeStart.value, my[indexMy.leaf1ndex.value]] = my[indexMy.gap1ndex.value]
45
+ my[indexMy.leaf1ndex.value] += 1
46
+ if my[indexMy.gap1ndex.value] > 0:
44
47
  return
@@ -1,51 +1,54 @@
1
- from mapFolding import indexMy, indexTrack
1
+ from numpy import integer
2
+ from typing import Any, Tuple
2
3
  import numba
3
- @numba.jit((numba.uint8[:,:,::1], numba.int64[::1], numba.uint8[::1], numba.uint8[::1], numba.uint8[:,::1]), parallel=True, boundscheck=False, cache=True, error_model="numpy", fastmath=True, looplift=False, nogil=True, nopython=True)
4
- def countParallel(connectionGraph, foldGroups, gapsWherePARALLEL, myPARALLEL, trackPARALLEL):
5
- for indexSherpa in numba.prange(myPARALLEL[9]):
4
+ from mapFolding import indexMy, indexTrack
5
+ import numpy
6
+ @numba.jit((numba.uint8[:, :, ::1], numba.int64[::1], numba.uint8[::1], numba.uint8[::1], numba.uint8[:, ::1]), _nrt=True, boundscheck=False, cache=True, error_model='numpy', fastmath=True, forceinline=False, inline='never', looplift=False, no_cfunc_wrapper=True, no_cpython_wrapper=True, nopython=True, parallel=True)
7
+ def countParallel(connectionGraph: numpy.ndarray[Tuple[int, int, int], numpy.dtype[integer[Any]]], foldGroups: numpy.ndarray[Tuple[int], numpy.dtype[integer[Any]]], gapsWherePARALLEL: numpy.ndarray[Tuple[int], numpy.dtype[integer[Any]]], myPARALLEL: numpy.ndarray[Tuple[int], numpy.dtype[integer[Any]]], trackPARALLEL: numpy.ndarray[Tuple[int, int], numpy.dtype[integer[Any]]]):
8
+ for indexSherpa in numba.prange(myPARALLEL[indexMy.taskDivisions.value]):
6
9
  gapsWhere = gapsWherePARALLEL.copy()
7
10
  my = myPARALLEL.copy()
8
- my[10] = indexSherpa
11
+ my[indexMy.taskIndex.value] = indexSherpa
9
12
  track = trackPARALLEL.copy()
10
13
  groupsOfFolds: int = 0
11
- while my[7] > 0:
12
- if my[7] <= 1 or track[1, 0] == 1:
13
- if my[7] > foldGroups[-1]:
14
+ while my[indexMy.leaf1ndex.value] > 0:
15
+ if my[indexMy.leaf1ndex.value] <= 1 or track[indexTrack.leafBelow.value, 0] == 1:
16
+ if my[indexMy.leaf1ndex.value] > foldGroups[-1]:
14
17
  groupsOfFolds = groupsOfFolds + 1
15
18
  else:
16
- my[1] = my[0]
17
- my[3] = track[3, my[7] - 1]
18
- my[4] = 0
19
- while my[4] < my[0]:
20
- if connectionGraph[my[4], my[7], my[7]] == my[7]:
21
- my[1] -= 1
19
+ my[indexMy.dimensionsUnconstrained.value] = my[indexMy.dimensionsTotal.value]
20
+ my[indexMy.gap1ndexCeiling.value] = track[indexTrack.gapRangeStart.value, my[indexMy.leaf1ndex.value] - 1]
21
+ my[indexMy.indexDimension.value] = 0
22
+ while my[indexMy.indexDimension.value] < my[indexMy.dimensionsTotal.value]:
23
+ if connectionGraph[my[indexMy.indexDimension.value], my[indexMy.leaf1ndex.value], my[indexMy.leaf1ndex.value]] == my[indexMy.leaf1ndex.value]:
24
+ my[indexMy.dimensionsUnconstrained.value] -= 1
22
25
  else:
23
- my[8] = connectionGraph[my[4], my[7], my[7]]
24
- while my[8] != my[7]:
25
- if my[7] != my[9] or my[8] % my[9] == my[10]:
26
- gapsWhere[my[3]] = my[8]
27
- if track[2, my[8]] == 0:
28
- my[3] += 1
29
- track[2, my[8]] += 1
30
- my[8] = connectionGraph[my[4], my[7], track[1, my[8]]]
31
- my[4] += 1
32
- my[6] = my[2]
33
- while my[6] < my[3]:
34
- gapsWhere[my[2]] = gapsWhere[my[6]]
35
- if track[2, gapsWhere[my[6]]] == my[1]:
36
- my[2] += 1
37
- track[2, gapsWhere[my[6]]] = 0
38
- my[6] += 1
39
- while my[7] > 0 and my[2] == track[3, my[7] - 1]:
40
- my[7] -= 1
41
- track[1, track[0, my[7]]] = track[1, my[7]]
42
- track[0, track[1, my[7]]] = track[0, my[7]]
43
- if my[7] > 0:
44
- my[2] -= 1
45
- track[0, my[7]] = gapsWhere[my[2]]
46
- track[1, my[7]] = track[1, track[0, my[7]]]
47
- track[1, track[0, my[7]]] = my[7]
48
- track[0, track[1, my[7]]] = my[7]
49
- track[3, my[7]] = my[2]
50
- my[7] += 1
51
- foldGroups[my[10]] = groupsOfFolds
26
+ my[indexMy.leafConnectee.value] = connectionGraph[my[indexMy.indexDimension.value], my[indexMy.leaf1ndex.value], my[indexMy.leaf1ndex.value]]
27
+ while my[indexMy.leafConnectee.value] != my[indexMy.leaf1ndex.value]:
28
+ if my[indexMy.leaf1ndex.value] != my[indexMy.taskDivisions.value] or my[indexMy.leafConnectee.value] % my[indexMy.taskDivisions.value] == my[indexMy.taskIndex.value]:
29
+ gapsWhere[my[indexMy.gap1ndexCeiling.value]] = my[indexMy.leafConnectee.value]
30
+ if track[indexTrack.countDimensionsGapped.value, my[indexMy.leafConnectee.value]] == 0:
31
+ my[indexMy.gap1ndexCeiling.value] += 1
32
+ track[indexTrack.countDimensionsGapped.value, my[indexMy.leafConnectee.value]] += 1
33
+ my[indexMy.leafConnectee.value] = connectionGraph[my[indexMy.indexDimension.value], my[indexMy.leaf1ndex.value], track[indexTrack.leafBelow.value, my[indexMy.leafConnectee.value]]]
34
+ my[indexMy.indexDimension.value] += 1
35
+ my[indexMy.indexMiniGap.value] = my[indexMy.gap1ndex.value]
36
+ while my[indexMy.indexMiniGap.value] < my[indexMy.gap1ndexCeiling.value]:
37
+ gapsWhere[my[indexMy.gap1ndex.value]] = gapsWhere[my[indexMy.indexMiniGap.value]]
38
+ if track[indexTrack.countDimensionsGapped.value, gapsWhere[my[indexMy.indexMiniGap.value]]] == my[indexMy.dimensionsUnconstrained.value]:
39
+ my[indexMy.gap1ndex.value] += 1
40
+ track[indexTrack.countDimensionsGapped.value, gapsWhere[my[indexMy.indexMiniGap.value]]] = 0
41
+ my[indexMy.indexMiniGap.value] += 1
42
+ while my[indexMy.leaf1ndex.value] > 0 and my[indexMy.gap1ndex.value] == track[indexTrack.gapRangeStart.value, my[indexMy.leaf1ndex.value] - 1]:
43
+ my[indexMy.leaf1ndex.value] -= 1
44
+ track[indexTrack.leafBelow.value, track[indexTrack.leafAbove.value, my[indexMy.leaf1ndex.value]]] = track[indexTrack.leafBelow.value, my[indexMy.leaf1ndex.value]]
45
+ track[indexTrack.leafAbove.value, track[indexTrack.leafBelow.value, my[indexMy.leaf1ndex.value]]] = track[indexTrack.leafAbove.value, my[indexMy.leaf1ndex.value]]
46
+ if my[indexMy.leaf1ndex.value] > 0:
47
+ my[indexMy.gap1ndex.value] -= 1
48
+ track[indexTrack.leafAbove.value, my[indexMy.leaf1ndex.value]] = gapsWhere[my[indexMy.gap1ndex.value]]
49
+ track[indexTrack.leafBelow.value, my[indexMy.leaf1ndex.value]] = track[indexTrack.leafBelow.value, track[indexTrack.leafAbove.value, my[indexMy.leaf1ndex.value]]]
50
+ track[indexTrack.leafBelow.value, track[indexTrack.leafAbove.value, my[indexMy.leaf1ndex.value]]] = my[indexMy.leaf1ndex.value]
51
+ track[indexTrack.leafAbove.value, track[indexTrack.leafBelow.value, my[indexMy.leaf1ndex.value]]] = my[indexMy.leaf1ndex.value]
52
+ track[indexTrack.gapRangeStart.value, my[indexMy.leaf1ndex.value]] = my[indexMy.gap1ndex.value]
53
+ my[indexMy.leaf1ndex.value] += 1
54
+ foldGroups[my[indexMy.taskIndex.value]] = groupsOfFolds
@@ -1,7 +1,10 @@
1
- from mapFolding import indexMy, indexTrack
1
+ from numpy import integer
2
+ from typing import Any, Tuple
2
3
  import numba
3
- @numba.jit((numba.uint8[:,:,::1], numba.int64[::1], numba.uint8[::1], numba.uint8[::1], numba.uint8[:,::1]), parallel=False, boundscheck=False, cache=True, error_model="numpy", fastmath=True, looplift=False, nogil=True, nopython=True)
4
- def countSequential(connectionGraph, foldGroups, gapsWhere, my, track):
4
+ from mapFolding import indexMy, indexTrack
5
+ import numpy
6
+ @numba.jit((numba.uint8[:, :, ::1], numba.int64[::1], numba.uint8[::1], numba.uint8[::1], numba.uint8[:, ::1]), _nrt=True, boundscheck=False, cache=True, error_model='numpy', fastmath=True, forceinline=False, inline='never', looplift=False, no_cfunc_wrapper=True, no_cpython_wrapper=True, nopython=True, parallel=False)
7
+ def countSequential(connectionGraph: numpy.ndarray[Tuple[int, int, int], numpy.dtype[integer[Any]]], foldGroups: numpy.ndarray[Tuple[int], numpy.dtype[integer[Any]]], gapsWhere: numpy.ndarray[Tuple[int], numpy.dtype[integer[Any]]], my: numpy.ndarray[Tuple[int], numpy.dtype[integer[Any]]], track: numpy.ndarray[Tuple[int, int], numpy.dtype[integer[Any]]]):
5
8
  doFindGaps = True
6
9
  dimensionsTotal = my[indexMy.dimensionsTotal.value]
7
10
  dimensionsUnconstrained = my[indexMy.dimensionsUnconstrained.value]
@@ -1,25 +0,0 @@
1
- benchmarks/benchmarking.py,sha256=HD_0NSvuabblg94ftDre6LFnXShTe8MYj3hIodW-zV0,3076
2
- reference/flattened.py,sha256=6blZ2Y9G8mu1F3gV8SKndPE398t2VVFlsgKlyeJ765A,16538
3
- reference/hunterNumba.py,sha256=HWndRgsajOf76rbb2LDNEZ6itsdYbyV-k3wgOFjeR6c,7104
4
- reference/irvineJavaPort.py,sha256=Sj-63Z-OsGuDoEBXuxyjRrNmmyl0d7Yz_XuY7I47Oyg,4250
5
- reference/jax.py,sha256=rojyK80lOATtbzxjGOHWHZngQa47CXCLJHZwIdN2MwI,14955
6
- reference/lunnan.py,sha256=XEcql_gxvCCghb6Or3qwmPbn4IZUbZTaSmw_fUjRxZE,5037
7
- reference/lunnanNumpy.py,sha256=HqDgSwTOZA-G0oophOEfc4zs25Mv4yw2aoF1v8miOLk,4653
8
- reference/lunnanWhile.py,sha256=7NY2IKO5XBgol0aWWF_Fi-7oTL9pvu_z6lB0TF1uVHk,4063
9
- reference/rotatedEntryPoint.py,sha256=z0QyDQtnMvXNj5ntWzzJUQUMFm1-xHGLVhtYzwmczUI,11530
10
- reference/total_countPlus1vsPlusN.py,sha256=usenM8Yn_G1dqlPl7NKKkcnbohBZVZBXTQRm2S3_EDA,8106
11
- someAssemblyRequired/__init__.py,sha256=iZpBslk8OnCmaUoqAivva7Hl7GJYrjwRV_owcBbgfcM,87
12
- someAssemblyRequired/generalizeSourceCode.py,sha256=6LsUe-5uqGXcrtXWrP70BrUgnjkJKrQo8y1KyFZdb-k,6024
13
- someAssemblyRequired/getLLVMforNoReason.py,sha256=FtJzw2pZS3A4NimWdZsegXaU-vKeCw8m67kcfb5wvGM,894
14
- someAssemblyRequired/makeJob.py,sha256=W85W7vWsNsu9mBsgU3Cx-FPYIdLLnyzR4GwdcYsWZv4,947
15
- someAssemblyRequired/synthesizeJob.py,sha256=xLak-ZZ1zQ92jBobhJqbnA1Fua9ofiRvLdK1fmD8s_s,7271
16
- someAssemblyRequired/synthesizeModules.py,sha256=yR9oFsZe3sbgDe2XoS9MbaIqNz-hOq6qOFzHMhPn4rc,3737
17
- syntheticModules/__init__.py,sha256=nDtS5UFMKN-F5pTp0qKA0J0I-XR3n3OFxV2bosieBu8,131
18
- syntheticModules/countInitialize.py,sha256=rRn1gtR1PWxpQ8Mw-_QRZT7ujRP_1H04QizJE9RlZ7o,1839
19
- syntheticModules/countParallel.py,sha256=aZQvSEeWeJ-47eyMyIisq1baAAl-H6W0RYIlVPv_D_U,2559
20
- syntheticModules/countSequential.py,sha256=Uf1Zd-r3wsmVoHOWyNOQQiFO6k6xmFG6Oo3dqwAqBQo,3216
21
- mapFolding-0.3.3.dist-info/METADATA,sha256=SfukFXyZtDIdmQ4Gzgl8ePyov_factyZ4Te7LSh0UYc,7530
22
- mapFolding-0.3.3.dist-info/WHEEL,sha256=In9FTNxeP60KnTkGw7wk6mJPYd_dQSjEZmXdBdMCI-8,91
23
- mapFolding-0.3.3.dist-info/entry_points.txt,sha256=F3OUeZR1XDTpoH7k3wXuRb3KF_kXTTeYhu5AGK1SiOQ,146
24
- mapFolding-0.3.3.dist-info/top_level.txt,sha256=tZHrMCdFq5ghJY_MAv_GhcpmQecelcIcoxgzLnF1-V4,59
25
- mapFolding-0.3.3.dist-info/RECORD,,