mapFolding 0.12.0__py3-none-any.whl → 0.12.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (34) hide show
  1. mapFolding/__init__.py +42 -18
  2. mapFolding/_theSSOT.py +137 -0
  3. mapFolding/basecamp.py +28 -18
  4. mapFolding/beDRY.py +21 -19
  5. mapFolding/dataBaskets.py +170 -18
  6. mapFolding/datatypes.py +109 -1
  7. mapFolding/filesystemToolkit.py +38 -33
  8. mapFolding/oeis.py +209 -93
  9. mapFolding/someAssemblyRequired/RecipeJob.py +120 -9
  10. mapFolding/someAssemblyRequired/__init__.py +35 -38
  11. mapFolding/someAssemblyRequired/_toolIfThis.py +80 -18
  12. mapFolding/someAssemblyRequired/_toolkitContainers.py +123 -45
  13. mapFolding/someAssemblyRequired/infoBooth.py +37 -2
  14. mapFolding/someAssemblyRequired/makeAllModules.py +712 -0
  15. mapFolding/someAssemblyRequired/makeJobTheorem2Numba.py +111 -48
  16. mapFolding/someAssemblyRequired/toolkitNumba.py +171 -19
  17. mapFolding/someAssemblyRequired/transformationTools.py +93 -49
  18. mapfolding-0.12.2.dist-info/METADATA +167 -0
  19. mapfolding-0.12.2.dist-info/RECORD +53 -0
  20. {mapfolding-0.12.0.dist-info → mapfolding-0.12.2.dist-info}/WHEEL +1 -1
  21. tests/__init__.py +28 -44
  22. tests/conftest.py +66 -61
  23. tests/test_computations.py +39 -82
  24. tests/test_filesystem.py +25 -1
  25. tests/test_oeis.py +30 -1
  26. tests/test_other.py +27 -0
  27. tests/test_tasks.py +31 -1
  28. mapFolding/someAssemblyRequired/Z0Z_makeAllModules.py +0 -433
  29. mapFolding/theSSOT.py +0 -34
  30. mapfolding-0.12.0.dist-info/METADATA +0 -184
  31. mapfolding-0.12.0.dist-info/RECORD +0 -53
  32. {mapfolding-0.12.0.dist-info → mapfolding-0.12.2.dist-info}/entry_points.txt +0 -0
  33. {mapfolding-0.12.0.dist-info → mapfolding-0.12.2.dist-info}/licenses/LICENSE +0 -0
  34. {mapfolding-0.12.0.dist-info → mapfolding-0.12.2.dist-info}/top_level.txt +0 -0
@@ -1,28 +1,39 @@
1
1
  """
2
- AST Container Classes for Python Code Generation and Transformation
3
-
4
- This module provides specialized container classes that organize AST nodes, imports, and program structure for code
5
- generation and transformation. These classes form the organizational backbone of the code generation system, enabling:
6
-
7
- 1. Tracking and managing imports with LedgerOfImports.
8
- 2. Packaging function definitions with their dependencies via IngredientsFunction.
9
- 3. Structuring complete modules with IngredientsModule.
10
- 4. Configuring code synthesis with RecipeSynthesizeFlow.
11
- 5. Organizing decomposed dataclass representations with ShatteredDataclass.
12
-
13
- Together, these container classes implement a component-based architecture for programmatic generation of
14
- high-performance code. They maintain a clean separation between structure and content, allowing transformations to be
15
- applied systematically while preserving relationships between code elements.
16
-
17
- The containers work in conjunction with transformation tools that manipulate the contained AST nodes to implement
18
- specific optimizations and transformations.
2
+ Map folding AST transformation system: Dataclass decomposition containers and reconstruction logic.
3
+
4
+ This module provides the structural foundation for the map folding AST transformation system by
5
+ implementing container classes that decompose dataclass definitions into their constituent AST
6
+ components. Building upon the pattern recognition capabilities established in the foundational layer,
7
+ these containers enable the systematic transformation of dataclass-based map folding algorithms
8
+ into Numba-compatible implementations.
9
+
10
+ The decomposition process addresses a fundamental challenge in high-performance computing: Numba's
11
+ just-in-time compiler cannot directly process dataclass instances but excels at optimizing
12
+ operations on primitive values and tuples. The containers bridge this gap by extracting individual
13
+ fields, type annotations, initialization patterns, and reconstruction logic as separate AST nodes
14
+ that can be manipulated and recombined for different compilation contexts.
15
+
16
+ Key decomposition capabilities include field extraction from dataclass definitions into function
17
+ parameters, type annotation preservation for static analysis, constructor pattern generation for
18
+ different field types, instance reconstruction logic for result packaging, and import dependency
19
+ tracking for generated code modules. These components form the building blocks for subsequent
20
+ transformation stages that generate specialized modules with embedded constants, eliminated dead
21
+ code paths, and optimized execution strategies.
22
+
23
+ The containers support the complete transformation system from high-level dataclass algorithms
24
+ to low-level optimized functions while maintaining semantic equivalence and type safety throughout
25
+ the compilation process.
19
26
  """
20
27
 
21
- from astToolkit import ClassIsAndAttribute, DOT, LedgerOfImports, Make, NodeTourist, str_nameDOTname, Then
28
+ from astToolkit import (
29
+ ClassIsAndAttribute, DOT, hasDOTtarget_NameOrAttributeOrSubscript, identifierDotAttribute,
30
+ LedgerOfImports, Make, NodeTourist, Then,
31
+ )
22
32
  from collections.abc import Callable
23
33
  from copy import deepcopy
24
- from mapFolding.someAssemblyRequired import IfThis, raiseIfNoneGitHubIssueNumber3
34
+ from mapFolding.someAssemblyRequired import IfThis
25
35
  from typing import Any, cast
36
+ from Z0Z_tools import raiseIfNone
26
37
  import ast
27
38
  import dataclasses
28
39
 
@@ -32,6 +43,22 @@ dummyTuple = Make.Tuple([Make.Name("dummyElement")])
32
43
 
33
44
  @dataclasses.dataclass
34
45
  class ShatteredDataclass:
46
+ """
47
+ Container for decomposed dataclass components organized as AST nodes for code generation.
48
+
49
+ This class holds the decomposed representation of a dataclass, breaking it down into individual
50
+ AST components that can be manipulated and recombined for different code generation contexts.
51
+ It is particularly essential for transforming dataclass-based algorithms into Numba-compatible
52
+ functions where dataclass instances cannot be directly used.
53
+
54
+ The decomposition enables individual field access, type annotation extraction, and parameter
55
+ specification generation while maintaining the structural relationships needed to reconstruct
56
+ equivalent functionality using primitive values and tuples.
57
+
58
+ All AST components are organized to support both function parameter specification (unpacking
59
+ dataclass fields into individual parameters) and result reconstruction (packing individual
60
+ values back into dataclass instances).
61
+ """
35
62
  countingVariableAnnotation: ast.expr
36
63
  """Type annotation for the counting variable extracted from the dataclass."""
37
64
 
@@ -39,39 +66,40 @@ class ShatteredDataclass:
39
66
  """AST name node representing the counting variable identifier."""
40
67
 
41
68
  field2AnnAssign: dict[str, ast.AnnAssign | ast.Assign] = dataclasses.field(default_factory=lambda: dict[str, ast.AnnAssign | ast.Assign]())
42
- """Maps field names to their corresponding AST call expressions."""
69
+ """Maps field names to their corresponding AST assignment expressions for initialization."""
43
70
 
44
71
  Z0Z_field2AnnAssign: dict[str, tuple[ast.AnnAssign | ast.Assign, str]] = dataclasses.field(default_factory=lambda: dict[str, tuple[ast.AnnAssign | ast.Assign, str]]())
72
+ """Temporary mapping for field assignments with constructor type information."""
45
73
 
46
74
  fragments4AssignmentOrParameters: ast.Tuple = dummyTuple
47
- """AST tuple used as target for assignment to capture returned fragments."""
75
+ """AST tuple used as target for assignment to capture returned field values."""
48
76
 
49
77
  imports: LedgerOfImports = dataclasses.field(default_factory=LedgerOfImports)
50
- """Import records for the dataclass and its constituent parts."""
78
+ """Import records for the dataclass and its constituent field types."""
51
79
 
52
80
  list_argAnnotated4ArgumentsSpecification: list[ast.arg] = dataclasses.field(default_factory=lambda: list[ast.arg]())
53
- """Function argument nodes with annotations for parameter specification."""
81
+ """Function argument nodes with type annotations for parameter specification."""
54
82
 
55
83
  list_keyword_field__field4init: list[ast.keyword] = dataclasses.field(default_factory=lambda: list[ast.keyword]())
56
- """Keyword arguments for dataclass initialization with field=field format."""
84
+ """Keyword arguments for dataclass initialization using field=field format."""
57
85
 
58
86
  listAnnotations: list[ast.expr] = dataclasses.field(default_factory=lambda: list[ast.expr]())
59
- """Type annotations for each dataclass field."""
87
+ """Type annotations for each dataclass field in declaration order."""
60
88
 
61
89
  listName4Parameters: list[ast.Name] = dataclasses.field(default_factory=lambda: list[ast.Name]())
62
90
  """Name nodes for each dataclass field used as function parameters."""
63
91
 
64
92
  listUnpack: list[ast.AnnAssign] = dataclasses.field(default_factory=lambda: list[ast.AnnAssign]())
65
- """Annotated assignment statements to extract fields from dataclass."""
93
+ """Annotated assignment statements to extract individual fields from dataclass instances."""
66
94
 
67
95
  map_stateDOTfield2Name: dict[ast.AST, ast.Name] = dataclasses.field(default_factory=lambda: dict[ast.AST, ast.Name]())
68
- """Maps AST expressions to Name nodes for find-replace operations."""
96
+ """Maps dataclass attribute access expressions to field name nodes for find-replace operations."""
69
97
 
70
98
  repack: ast.Assign = dummyAssign
71
- """AST assignment statement that reconstructs the original dataclass instance."""
99
+ """AST assignment statement that reconstructs the original dataclass instance from individual fields."""
72
100
 
73
101
  signatureReturnAnnotation: ast.Subscript = dummySubscript
74
- """tuple-based return type annotation for function definitions."""
102
+ """Tuple-based return type annotation for functions returning decomposed field values."""
75
103
 
76
104
  @dataclasses.dataclass
77
105
  class DeReConstructField2ast:
@@ -82,7 +110,6 @@ class DeReConstructField2ast:
82
110
  representations needed for code generation. It handles the conversion of field
83
111
  attributes, type annotations, and metadata into AST constructs that can be used
84
112
  to reconstruct the field in generated code.
85
-
86
113
  The class is particularly important for decomposing dataclass fields (like those in
87
114
  ComputationState) to enable their use in specialized contexts like Numba-optimized
88
115
  functions, where the full dataclass cannot be directly used but its contents need
@@ -91,33 +118,88 @@ class DeReConstructField2ast:
91
118
  Each field is processed according to its type and metadata to create appropriate
92
119
  variable declarations, type annotations, and initialization code as AST nodes.
93
120
  """
94
- dataclassesDOTdataclassLogicalPathModule: dataclasses.InitVar[str_nameDOTname]
121
+
122
+ dataclassesDOTdataclassLogicalPathModule: dataclasses.InitVar[identifierDotAttribute]
123
+ """Logical path to the module containing the source dataclass definition."""
124
+
95
125
  dataclassClassDef: dataclasses.InitVar[ast.ClassDef]
96
- dataclassesDOTdataclassInstance_Identifier: dataclasses.InitVar[str]
126
+ """AST class definition node for the source dataclass."""
127
+
128
+ dataclassesDOTdataclassInstanceIdentifier: dataclasses.InitVar[str]
129
+ """Variable identifier for the dataclass instance in generated code."""
130
+
97
131
  field: dataclasses.InitVar[dataclasses.Field[Any]]
132
+ """Dataclass field object to be transformed into AST components."""
98
133
 
99
134
  ledger: LedgerOfImports = dataclasses.field(default_factory=LedgerOfImports)
135
+ """Import tracking for types and modules required by this field."""
100
136
 
101
137
  name: str = dataclasses.field(init=False)
138
+ """Field name extracted from the dataclass field definition."""
139
+
102
140
  typeBuffalo: type[Any] | str | Any = dataclasses.field(init=False)
141
+ """Type annotation of the field as specified in the dataclass."""
142
+
103
143
  default: Any | None = dataclasses.field(init=False)
144
+ """Default value for the field, or None if no default is specified."""
145
+
104
146
  default_factory: Callable[..., Any] | None = dataclasses.field(init=False)
147
+ """Default factory function for the field, or None if not specified."""
148
+
105
149
  repr: bool = dataclasses.field(init=False)
150
+ """Whether the field should be included in the string representation."""
151
+
106
152
  hash: bool | None = dataclasses.field(init=False)
153
+ """Whether the field should be included in hash computation."""
154
+
107
155
  init: bool = dataclasses.field(init=False)
156
+ """Whether the field should be included in the generated __init__ method."""
157
+
108
158
  compare: bool = dataclasses.field(init=False)
159
+ """Whether the field should be included in comparison operations."""
160
+
109
161
  metadata: dict[Any, Any] = dataclasses.field(init=False)
162
+ """Field metadata dictionary containing additional configuration information."""
163
+
110
164
  kw_only: bool = dataclasses.field(init=False)
165
+ """Whether the field must be specified as a keyword-only argument."""
111
166
 
112
167
  astName: ast.Name = dataclasses.field(init=False)
168
+ """AST name node representing the field identifier."""
169
+
113
170
  ast_keyword_field__field: ast.keyword = dataclasses.field(init=False)
171
+ """AST keyword argument for dataclass initialization using field=field pattern."""
172
+
114
173
  ast_nameDOTname: ast.Attribute = dataclasses.field(init=False)
174
+ """AST attribute access expression for accessing the field from an instance."""
175
+
115
176
  astAnnotation: ast.expr = dataclasses.field(init=False)
177
+ """AST expression representing the field's type annotation."""
178
+
116
179
  ast_argAnnotated: ast.arg = dataclasses.field(init=False)
180
+ """AST function argument with type annotation for parameter specification."""
181
+
117
182
  astAnnAssignConstructor: ast.AnnAssign|ast.Assign = dataclasses.field(init=False)
118
- Z0Z_hack: tuple[ast.AnnAssign|ast.Assign, str] = dataclasses.field(init=False)
183
+ """AST assignment statement for field initialization with appropriate constructor."""
119
184
 
120
- def __post_init__(self, dataclassesDOTdataclassLogicalPathModule: str_nameDOTname, dataclassClassDef: ast.ClassDef, dataclassesDOTdataclassInstance_Identifier: str, field: dataclasses.Field[Any]) -> None:
185
+ Z0Z_hack: tuple[ast.AnnAssign|ast.Assign, str] = dataclasses.field(init=False)
186
+ """Temporary tuple containing assignment statement and constructor type information."""
187
+
188
+ def __post_init__(self, dataclassesDOTdataclassLogicalPathModule: identifierDotAttribute, dataclassClassDef: ast.ClassDef, dataclassesDOTdataclassInstanceIdentifier: str, field: dataclasses.Field[Any]) -> None:
189
+ """
190
+ Initialize AST components based on the provided dataclass field.
191
+
192
+ This method extracts field attributes and constructs corresponding AST nodes
193
+ for various code generation contexts. It handles special cases for array types,
194
+ scalar types, and complex type annotations, creating appropriate constructor
195
+ calls and import requirements.
196
+
197
+ Parameters:
198
+ dataclassesDOTdataclassLogicalPathModule: Module path containing the dataclass
199
+ dataclassClassDef: AST class definition for type annotation extraction
200
+ dataclassesDOTdataclassInstanceIdentifier: Instance variable name for attribute access
201
+ field: Dataclass field to transform
202
+ """
121
203
  self.compare = field.compare
122
204
  self.default = field.default if field.default is not dataclasses.MISSING else None
123
205
  self.default_factory = field.default_factory if field.default_factory is not dataclasses.MISSING else None
@@ -131,21 +213,17 @@ class DeReConstructField2ast:
131
213
 
132
214
  self.astName = Make.Name(self.name)
133
215
  self.ast_keyword_field__field = Make.keyword(self.name, self.astName)
134
- self.ast_nameDOTname = Make.Attribute(Make.Name(dataclassesDOTdataclassInstance_Identifier), self.name)
135
-
136
- sherpa = NodeTourist( # pyright: ignore[reportUnknownVariableType]
137
- findThis=ClassIsAndAttribute.targetIs(ast.AnnAssign, IfThis.isName_Identifier(self.name))
138
- , doThat=Then.extractIt(DOT.annotation) # pyright: ignore[reportArgumentType]
139
- ).captureLastMatch(dataclassClassDef)
216
+ self.ast_nameDOTname = Make.Attribute(Make.Name(dataclassesDOTdataclassInstanceIdentifier), self.name)
140
217
 
141
- if sherpa is None: raise raiseIfNoneGitHubIssueNumber3
142
- else: self.astAnnotation = sherpa
218
+ findThis=ClassIsAndAttribute.targetIs(ast.AnnAssign, IfThis.isNameIdentifier(self.name))
219
+ doThat=cast(Callable[[hasDOTtarget_NameOrAttributeOrSubscript], ast.expr], Then.extractIt(DOT.annotation))
220
+ self.astAnnotation = raiseIfNone(NodeTourist(findThis, doThat).captureLastMatch(dataclassClassDef))
143
221
 
144
- self.ast_argAnnotated = Make.arg(self.name, self.astAnnotation) # pyright: ignore[reportUnknownArgumentType, reportUnknownMemberType]
222
+ self.ast_argAnnotated = Make.arg(self.name, self.astAnnotation)
145
223
 
146
224
  dtype = self.metadata.get('dtype', None)
147
225
  if dtype:
148
- moduleWithLogicalPath: str_nameDOTname = 'numpy'
226
+ moduleWithLogicalPath: identifierDotAttribute = 'numpy'
149
227
  annotationType = 'ndarray'
150
228
  self.ledger.addImportFrom_asStr(moduleWithLogicalPath, annotationType)
151
229
  self.ledger.addImportFrom_asStr(moduleWithLogicalPath, 'dtype')
@@ -171,4 +249,4 @@ class DeReConstructField2ast:
171
249
  self.astAnnAssignConstructor = Make.AnnAssign(self.astName, self.astAnnotation, takeTheTuple)
172
250
  self.Z0Z_hack = (self.astAnnAssignConstructor, elementConstructor)
173
251
  if isinstance(self.astAnnotation, ast.Name):
174
- self.ledger.addImportFrom_asStr(dataclassesDOTdataclassLogicalPathModule, self.astAnnotation.id) # pyright: ignore [reportUnknownArgumentType, reportUnknownMemberType, reportIJustCalledATypeGuardMethod_WTF]
252
+ self.ledger.addImportFrom_asStr(dataclassesDOTdataclassLogicalPathModule, self.astAnnotation.id)
@@ -1,12 +1,36 @@
1
+ """
2
+ Configuration constants and computational complexity estimates for map folding operations.
3
+
4
+ Provides default identifiers for code generation, module organization, and computational
5
+ resource planning. The module serves as a central registry for configuration values
6
+ used throughout the map folding system, particularly for synthetic module generation
7
+ and optimization decision-making.
8
+
9
+ The complexity estimates enable informed choices about computational strategies based
10
+ on empirical measurements and theoretical analysis of map folding algorithms for
11
+ specific dimensional configurations.
12
+ """
13
+
1
14
  algorithmSourceModuleDEFAULT: str = 'daoOfMapFolding'
15
+ """Default identifier for the algorithm source module containing the base implementation."""
16
+
2
17
  dataclassInstanceIdentifierDEFAULT: str = 'state'
18
+ """Default variable name for dataclass instances in generated code."""
19
+
3
20
  dataPackingModuleIdentifierDEFAULT: str = 'dataPacking'
21
+ """Default identifier for modules containing data packing and unpacking functions."""
22
+
4
23
  logicalPathInfixDEFAULT: str = 'syntheticModules'
24
+ """Default path component for organizing synthetic generated modules."""
25
+
5
26
  sourceCallableDispatcherDEFAULT: str = 'doTheNeedful'
27
+ """Default identifier for dispatcher functions that route computational tasks."""
28
+
6
29
  sourceCallableIdentifierDEFAULT: str = 'count'
7
- theCountingIdentifierDEFAULT: str = 'groupsOfFolds'
30
+ """Default identifier for the core counting function in algorithms."""
8
31
 
9
- class raiseIfNoneGitHubIssueNumber3(Exception): pass
32
+ theCountingIdentifierDEFAULT: str = 'groupsOfFolds'
33
+ """Default identifier for the primary counting variable in map folding computations."""
10
34
 
11
35
  dictionaryEstimates: dict[tuple[int, ...], int] = {
12
36
  (2,2,2,2,2,2,2,2): 798148657152000,
@@ -15,3 +39,14 @@ dictionaryEstimates: dict[tuple[int, ...], int] = {
15
39
  (3,3,3,3): 85109616000000000000000000000000,
16
40
  (8,8): 791274195985524900,
17
41
  }
42
+ """
43
+ Registry of computational complexity estimates for specific map dimension configurations.
44
+
45
+ Maps dimensional tuples to estimated fold counts based on empirical measurements and
46
+ theoretical analysis. These estimates guide optimization decisions and resource planning
47
+ for computational tasks with known dimensional parameters.
48
+
49
+ The estimates represent the expected number of computational operations or fold
50
+ configurations for the given map dimensions, helping determine appropriate optimization
51
+ strategies and computational resource allocation.
52
+ """