mapFolding 0.12.1__py3-none-any.whl → 0.12.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (37) hide show
  1. mapFolding/__init__.py +46 -20
  2. mapFolding/_theSSOT.py +81 -0
  3. mapFolding/_theTypes.py +148 -0
  4. mapFolding/basecamp.py +62 -47
  5. mapFolding/beDRY.py +100 -73
  6. mapFolding/dataBaskets.py +226 -31
  7. mapFolding/filesystemToolkit.py +161 -107
  8. mapFolding/oeis.py +388 -174
  9. mapFolding/reference/flattened.py +1 -1
  10. mapFolding/someAssemblyRequired/RecipeJob.py +146 -20
  11. mapFolding/someAssemblyRequired/__init__.py +60 -38
  12. mapFolding/someAssemblyRequired/_toolIfThis.py +125 -35
  13. mapFolding/someAssemblyRequired/_toolkitContainers.py +125 -44
  14. mapFolding/someAssemblyRequired/getLLVMforNoReason.py +35 -26
  15. mapFolding/someAssemblyRequired/infoBooth.py +37 -2
  16. mapFolding/someAssemblyRequired/makeAllModules.py +785 -0
  17. mapFolding/someAssemblyRequired/makeJobTheorem2Numba.py +161 -74
  18. mapFolding/someAssemblyRequired/toolkitNumba.py +218 -36
  19. mapFolding/someAssemblyRequired/transformationTools.py +125 -58
  20. mapfolding-0.12.3.dist-info/METADATA +163 -0
  21. mapfolding-0.12.3.dist-info/RECORD +53 -0
  22. {mapfolding-0.12.1.dist-info → mapfolding-0.12.3.dist-info}/WHEEL +1 -1
  23. tests/__init__.py +28 -44
  24. tests/conftest.py +66 -61
  25. tests/test_computations.py +64 -89
  26. tests/test_filesystem.py +25 -1
  27. tests/test_oeis.py +37 -7
  28. tests/test_other.py +29 -2
  29. tests/test_tasks.py +30 -2
  30. mapFolding/datatypes.py +0 -18
  31. mapFolding/someAssemblyRequired/Z0Z_makeAllModules.py +0 -433
  32. mapFolding/theSSOT.py +0 -34
  33. mapfolding-0.12.1.dist-info/METADATA +0 -184
  34. mapfolding-0.12.1.dist-info/RECORD +0 -53
  35. {mapfolding-0.12.1.dist-info → mapfolding-0.12.3.dist-info}/entry_points.txt +0 -0
  36. {mapfolding-0.12.1.dist-info → mapfolding-0.12.3.dist-info}/licenses/LICENSE +0 -0
  37. {mapfolding-0.12.1.dist-info → mapfolding-0.12.3.dist-info}/top_level.txt +0 -0
@@ -1,27 +1,35 @@
1
1
  """
2
- AST Container Classes for Python Code Generation and Transformation
3
-
4
- This module provides specialized container classes that organize AST nodes, imports, and program structure for code
5
- generation and transformation. These classes form the organizational backbone of the code generation system, enabling:
6
-
7
- 1. Tracking and managing imports with LedgerOfImports.
8
- 2. Packaging function definitions with their dependencies via IngredientsFunction.
9
- 3. Structuring complete modules with IngredientsModule.
10
- 4. Configuring code synthesis with RecipeSynthesizeFlow.
11
- 5. Organizing decomposed dataclass representations with ShatteredDataclass.
12
-
13
- Together, these container classes implement a component-based architecture for programmatic generation of
14
- high-performance code. They maintain a clean separation between structure and content, allowing transformations to be
15
- applied systematically while preserving relationships between code elements.
16
-
17
- The containers work in conjunction with transformation tools that manipulate the contained AST nodes to implement
18
- specific optimizations and transformations.
2
+ Map folding AST transformation system: Dataclass decomposition containers and reconstruction logic.
3
+
4
+ This module provides the structural foundation for the map folding AST transformation system by
5
+ implementing container classes that decompose dataclass definitions into their constituent AST
6
+ components. Building upon the pattern recognition capabilities established in the foundational layer,
7
+ these containers enable the systematic transformation of dataclass-based map folding algorithms
8
+ into Numba-compatible implementations.
9
+
10
+ The decomposition process addresses a fundamental challenge in high-performance computing: Numba's
11
+ just-in-time compiler cannot directly process dataclass instances but excels at optimizing
12
+ operations on primitive values and tuples. The containers bridge this gap by extracting individual
13
+ fields, type annotations, initialization patterns, and reconstruction logic as separate AST nodes
14
+ that can be manipulated and recombined for different compilation contexts.
15
+
16
+ Key decomposition capabilities include field extraction from dataclass definitions into function
17
+ parameters, type annotation preservation for static analysis, constructor pattern generation for
18
+ different field types, instance reconstruction logic for result packaging, and import dependency
19
+ tracking for generated code modules. These components form the building blocks for subsequent
20
+ transformation stages that generate specialized modules with embedded constants, eliminated dead
21
+ code paths, and optimized execution strategies.
22
+
23
+ The containers support the complete transformation system from high-level dataclass algorithms
24
+ to low-level optimized functions while maintaining semantic equivalence and type safety throughout
25
+ the compilation process.
19
26
  """
20
27
 
21
- from astToolkit import ClassIsAndAttribute, DOT, LedgerOfImports, Make, NodeTourist, str_nameDOTname, Then
28
+ from astToolkit import Be, DOT, identifierDotAttribute, LedgerOfImports, Make, NodeTourist, Then
22
29
  from collections.abc import Callable
23
30
  from copy import deepcopy
24
- from mapFolding.someAssemblyRequired import IfThis, raiseIfNoneGitHubIssueNumber3
31
+ from hunterMakesPy import raiseIfNone
32
+ from mapFolding.someAssemblyRequired import IfThis
25
33
  from typing import Any, cast
26
34
  import ast
27
35
  import dataclasses
@@ -32,6 +40,22 @@ dummyTuple = Make.Tuple([Make.Name("dummyElement")])
32
40
 
33
41
  @dataclasses.dataclass
34
42
  class ShatteredDataclass:
43
+ """Container for decomposed dataclass components organized as AST nodes for code generation.
44
+
45
+ This class holds the decomposed representation of a dataclass, breaking it down into individual
46
+ AST components that can be manipulated and recombined for different code generation contexts.
47
+ It is particularly essential for transforming dataclass-based algorithms into Numba-compatible
48
+ functions where dataclass instances cannot be directly used.
49
+
50
+ The decomposition enables individual field access, type annotation extraction, and parameter
51
+ specification generation while maintaining the structural relationships needed to reconstruct
52
+ equivalent functionality using primitive values and tuples.
53
+
54
+ All AST components are organized to support both function parameter specification (unpacking
55
+ dataclass fields into individual parameters) and result reconstruction (packing individual
56
+ values back into dataclass instances).
57
+ """
58
+
35
59
  countingVariableAnnotation: ast.expr
36
60
  """Type annotation for the counting variable extracted from the dataclass."""
37
61
 
@@ -39,39 +63,40 @@ class ShatteredDataclass:
39
63
  """AST name node representing the counting variable identifier."""
40
64
 
41
65
  field2AnnAssign: dict[str, ast.AnnAssign | ast.Assign] = dataclasses.field(default_factory=lambda: dict[str, ast.AnnAssign | ast.Assign]())
42
- """Maps field names to their corresponding AST call expressions."""
66
+ """Maps field names to their corresponding AST assignment expressions for initialization."""
43
67
 
44
68
  Z0Z_field2AnnAssign: dict[str, tuple[ast.AnnAssign | ast.Assign, str]] = dataclasses.field(default_factory=lambda: dict[str, tuple[ast.AnnAssign | ast.Assign, str]]())
69
+ """Temporary mapping for field assignments with constructor type information."""
45
70
 
46
71
  fragments4AssignmentOrParameters: ast.Tuple = dummyTuple
47
- """AST tuple used as target for assignment to capture returned fragments."""
72
+ """AST tuple used as target for assignment to capture returned field values."""
48
73
 
49
74
  imports: LedgerOfImports = dataclasses.field(default_factory=LedgerOfImports)
50
- """Import records for the dataclass and its constituent parts."""
75
+ """Import records for the dataclass and its constituent field types."""
51
76
 
52
77
  list_argAnnotated4ArgumentsSpecification: list[ast.arg] = dataclasses.field(default_factory=lambda: list[ast.arg]())
53
- """Function argument nodes with annotations for parameter specification."""
78
+ """Function argument nodes with type annotations for parameter specification."""
54
79
 
55
80
  list_keyword_field__field4init: list[ast.keyword] = dataclasses.field(default_factory=lambda: list[ast.keyword]())
56
- """Keyword arguments for dataclass initialization with field=field format."""
81
+ """Keyword arguments for dataclass initialization using field=field format."""
57
82
 
58
83
  listAnnotations: list[ast.expr] = dataclasses.field(default_factory=lambda: list[ast.expr]())
59
- """Type annotations for each dataclass field."""
84
+ """Type annotations for each dataclass field in declaration order."""
60
85
 
61
86
  listName4Parameters: list[ast.Name] = dataclasses.field(default_factory=lambda: list[ast.Name]())
62
87
  """Name nodes for each dataclass field used as function parameters."""
63
88
 
64
89
  listUnpack: list[ast.AnnAssign] = dataclasses.field(default_factory=lambda: list[ast.AnnAssign]())
65
- """Annotated assignment statements to extract fields from dataclass."""
90
+ """Annotated assignment statements to extract individual fields from dataclass instances."""
66
91
 
67
92
  map_stateDOTfield2Name: dict[ast.AST, ast.Name] = dataclasses.field(default_factory=lambda: dict[ast.AST, ast.Name]())
68
- """Maps AST expressions to Name nodes for find-replace operations."""
93
+ """Maps dataclass attribute access expressions to field name nodes for find-replace operations."""
69
94
 
70
95
  repack: ast.Assign = dummyAssign
71
- """AST assignment statement that reconstructs the original dataclass instance."""
96
+ """AST assignment statement that reconstructs the original dataclass instance from individual fields."""
72
97
 
73
98
  signatureReturnAnnotation: ast.Subscript = dummySubscript
74
- """tuple-based return type annotation for function definitions."""
99
+ """Tuple-based return type annotation for functions returning decomposed field values."""
75
100
 
76
101
  @dataclasses.dataclass
77
102
  class DeReConstructField2ast:
@@ -82,7 +107,6 @@ class DeReConstructField2ast:
82
107
  representations needed for code generation. It handles the conversion of field
83
108
  attributes, type annotations, and metadata into AST constructs that can be used
84
109
  to reconstruct the field in generated code.
85
-
86
110
  The class is particularly important for decomposing dataclass fields (like those in
87
111
  ComputationState) to enable their use in specialized contexts like Numba-optimized
88
112
  functions, where the full dataclass cannot be directly used but its contents need
@@ -91,33 +115,93 @@ class DeReConstructField2ast:
91
115
  Each field is processed according to its type and metadata to create appropriate
92
116
  variable declarations, type annotations, and initialization code as AST nodes.
93
117
  """
94
- dataclassesDOTdataclassLogicalPathModule: dataclasses.InitVar[str_nameDOTname]
118
+
119
+ dataclassesDOTdataclassLogicalPathModule: dataclasses.InitVar[identifierDotAttribute]
120
+ """Logical path to the module containing the source dataclass definition."""
121
+
95
122
  dataclassClassDef: dataclasses.InitVar[ast.ClassDef]
123
+ """AST class definition node for the source dataclass."""
124
+
96
125
  dataclassesDOTdataclassInstanceIdentifier: dataclasses.InitVar[str]
126
+ """Variable identifier for the dataclass instance in generated code."""
127
+
97
128
  field: dataclasses.InitVar[dataclasses.Field[Any]]
129
+ """Dataclass field object to be transformed into AST components."""
98
130
 
99
131
  ledger: LedgerOfImports = dataclasses.field(default_factory=LedgerOfImports)
132
+ """Import tracking for types and modules required by this field."""
100
133
 
101
134
  name: str = dataclasses.field(init=False)
135
+ """Field name extracted from the dataclass field definition."""
136
+
102
137
  typeBuffalo: type[Any] | str | Any = dataclasses.field(init=False)
138
+ """Type annotation of the field as specified in the dataclass."""
139
+
103
140
  default: Any | None = dataclasses.field(init=False)
141
+ """Default value for the field, or None if no default is specified."""
142
+
104
143
  default_factory: Callable[..., Any] | None = dataclasses.field(init=False)
144
+ """Default factory function for the field, or None if not specified."""
145
+
105
146
  repr: bool = dataclasses.field(init=False)
147
+ """Whether the field should be included in the string representation."""
148
+
106
149
  hash: bool | None = dataclasses.field(init=False)
150
+ """Whether the field should be included in hash computation."""
151
+
107
152
  init: bool = dataclasses.field(init=False)
153
+ """Whether the field should be included in the generated __init__ method."""
154
+
108
155
  compare: bool = dataclasses.field(init=False)
156
+ """Whether the field should be included in comparison operations."""
157
+
109
158
  metadata: dict[Any, Any] = dataclasses.field(init=False)
159
+ """Field metadata dictionary containing additional configuration information."""
160
+
110
161
  kw_only: bool = dataclasses.field(init=False)
162
+ """Whether the field must be specified as a keyword-only argument."""
111
163
 
112
164
  astName: ast.Name = dataclasses.field(init=False)
165
+ """AST name node representing the field identifier."""
166
+
113
167
  ast_keyword_field__field: ast.keyword = dataclasses.field(init=False)
168
+ """AST keyword argument for dataclass initialization using field=field pattern."""
169
+
114
170
  ast_nameDOTname: ast.Attribute = dataclasses.field(init=False)
171
+ """AST attribute access expression for accessing the field from an instance."""
172
+
115
173
  astAnnotation: ast.expr = dataclasses.field(init=False)
174
+ """AST expression representing the field's type annotation."""
175
+
116
176
  ast_argAnnotated: ast.arg = dataclasses.field(init=False)
177
+ """AST function argument with type annotation for parameter specification."""
178
+
117
179
  astAnnAssignConstructor: ast.AnnAssign|ast.Assign = dataclasses.field(init=False)
118
- Z0Z_hack: tuple[ast.AnnAssign|ast.Assign, str] = dataclasses.field(init=False)
180
+ """AST assignment statement for field initialization with appropriate constructor."""
119
181
 
120
- def __post_init__(self, dataclassesDOTdataclassLogicalPathModule: str_nameDOTname, dataclassClassDef: ast.ClassDef, dataclassesDOTdataclassInstanceIdentifier: str, field: dataclasses.Field[Any]) -> None:
182
+ Z0Z_hack: tuple[ast.AnnAssign|ast.Assign, str] = dataclasses.field(init=False)
183
+ """Temporary tuple containing assignment statement and constructor type information."""
184
+
185
+ def __post_init__(self, dataclassesDOTdataclassLogicalPathModule: identifierDotAttribute, dataclassClassDef: ast.ClassDef, dataclassesDOTdataclassInstanceIdentifier: str, field: dataclasses.Field[Any]) -> None:
186
+ """
187
+ Initialize AST components based on the provided dataclass field.
188
+
189
+ This method extracts field attributes and constructs corresponding AST nodes
190
+ for various code generation contexts. It handles special cases for array types,
191
+ scalar types, and complex type annotations, creating appropriate constructor
192
+ calls and import requirements.
193
+
194
+ Parameters
195
+ ----------
196
+ dataclassesDOTdataclassLogicalPathModule : identifierDotAttribute
197
+ Module path containing the dataclass
198
+ dataclassClassDef : ast.ClassDef
199
+ AST class definition for type annotation extraction
200
+ dataclassesDOTdataclassInstanceIdentifier : str
201
+ Instance variable name for attribute access
202
+ field : dataclasses.Field[Any]
203
+ Dataclass field to transform
204
+ """
121
205
  self.compare = field.compare
122
206
  self.default = field.default if field.default is not dataclasses.MISSING else None
123
207
  self.default_factory = field.default_factory if field.default_factory is not dataclasses.MISSING else None
@@ -133,24 +217,21 @@ class DeReConstructField2ast:
133
217
  self.ast_keyword_field__field = Make.keyword(self.name, self.astName)
134
218
  self.ast_nameDOTname = Make.Attribute(Make.Name(dataclassesDOTdataclassInstanceIdentifier), self.name)
135
219
 
136
- sherpa = NodeTourist( # pyright: ignore[reportUnknownVariableType]
137
- findThis=ClassIsAndAttribute.targetIs(ast.AnnAssign, IfThis.isNameIdentifier(self.name))
138
- , doThat=Then.extractIt(DOT.annotation) # pyright: ignore[reportArgumentType]
139
- ).captureLastMatch(dataclassClassDef)
140
-
141
- if sherpa is None: raise raiseIfNoneGitHubIssueNumber3
142
- else: self.astAnnotation = sherpa
220
+ self.astAnnotation = raiseIfNone(NodeTourist[ast.AnnAssign, ast.Name | None](
221
+ findThis = Be.AnnAssign.targetIs(IfThis.isNameIdentifier(self.name))
222
+ , doThat = Then.extractIt(cast("Callable[[ast.AnnAssign], ast.Name | None]", DOT.annotation))
223
+ ).captureLastMatch(dataclassClassDef))
143
224
 
144
- self.ast_argAnnotated = Make.arg(self.name, self.astAnnotation) # pyright: ignore[reportUnknownArgumentType, reportUnknownMemberType]
225
+ self.ast_argAnnotated = Make.arg(self.name, self.astAnnotation)
145
226
 
146
227
  dtype = self.metadata.get('dtype', None)
147
228
  if dtype:
148
- moduleWithLogicalPath: str_nameDOTname = 'numpy'
229
+ moduleWithLogicalPath: identifierDotAttribute = 'numpy'
149
230
  annotationType = 'ndarray'
150
231
  self.ledger.addImportFrom_asStr(moduleWithLogicalPath, annotationType)
151
232
  self.ledger.addImportFrom_asStr(moduleWithLogicalPath, 'dtype')
152
233
  axesSubscript = Make.Subscript(Make.Name('tuple'), Make.Name('uint8'))
153
- dtype_asnameName: ast.Name = cast(ast.Name, self.astAnnotation)
234
+ dtype_asnameName: ast.Name = self.astAnnotation
154
235
  if dtype_asnameName.id == 'Array3D':
155
236
  axesSubscript = Make.Subscript(Make.Name('tuple'), Make.Tuple([Make.Name('uint8'), Make.Name('uint8'), Make.Name('uint8')]))
156
237
  ast_expr = Make.Subscript(Make.Name(annotationType), Make.Tuple([axesSubscript, Make.Subscript(Make.Name('dtype'), dtype_asnameName)]))
@@ -171,4 +252,4 @@ class DeReConstructField2ast:
171
252
  self.astAnnAssignConstructor = Make.AnnAssign(self.astName, self.astAnnotation, takeTheTuple)
172
253
  self.Z0Z_hack = (self.astAnnAssignConstructor, elementConstructor)
173
254
  if isinstance(self.astAnnotation, ast.Name):
174
- self.ledger.addImportFrom_asStr(dataclassesDOTdataclassLogicalPathModule, self.astAnnotation.id) # pyright: ignore [reportUnknownArgumentType, reportUnknownMemberType, reportIJustCalledATypeGuardMethod_WTF]
255
+ self.ledger.addImportFrom_asStr(dataclassesDOTdataclassLogicalPathModule, self.astAnnotation.id)
@@ -24,34 +24,43 @@ While originally part of a tighter integration with the code generation assembly
24
24
  this module now operates as a standalone utility that can be applied to any module
25
25
  containing Numba-compiled functions.
26
26
  """
27
- from importlib.machinery import ModuleSpec
28
27
  from pathlib import Path
29
- from types import ModuleType
28
+ from typing import TYPE_CHECKING
30
29
  import importlib.util
31
30
  import llvmlite.binding
32
31
 
32
+ if TYPE_CHECKING:
33
+ from importlib.machinery import ModuleSpec
34
+ from types import ModuleType
35
+
33
36
  def writeModuleLLVM(pathFilename: Path, identifierCallable: str) -> Path:
34
- """Import the generated module directly and get its LLVM IR.
35
-
36
- Parameters
37
- pathFilename: Path to the Python module file containing the Numba-compiled function
38
- identifierCallable: Name of the function within the module to extract LLVM IR from
39
-
40
- Returns
41
- Path to the generated .ll file containing the extracted LLVM IR
42
-
43
- For an example of the output, see reference/jobsCompleted/[2x19]/[2x19].ll,
44
- which contains the IR for the historically significant 2x19 map calculation.
45
- """
46
- specTarget: ModuleSpec | None = importlib.util.spec_from_file_location("generatedModule", pathFilename)
47
- if specTarget is None or specTarget.loader is None:
48
- raise ImportError(f"Could not create module spec or loader for {pathFilename}")
49
- moduleTarget: ModuleType = importlib.util.module_from_spec(specTarget)
50
- specTarget.loader.exec_module(moduleTarget)
51
-
52
- # Get LLVM IR and write to file
53
- linesLLVM = moduleTarget.__dict__[identifierCallable].inspect_llvm()[()]
54
- moduleLLVM: llvmlite.binding.ModuleRef = llvmlite.binding.module.parse_assembly(linesLLVM)
55
- pathFilenameLLVM: Path = pathFilename.with_suffix(".ll")
56
- pathFilenameLLVM.write_text(str(moduleLLVM))
57
- return pathFilenameLLVM
37
+ """Import the generated module directly and get its LLVM IR.
38
+
39
+ Parameters
40
+ ----------
41
+ pathFilename : Path
42
+ Path to the Python module file containing the Numba-compiled function
43
+ identifierCallable : str
44
+ Name of the function within the module to extract LLVM IR from
45
+
46
+ Returns
47
+ -------
48
+ pathFilenameLLVM : Path
49
+ Path to the generated .ll file containing the extracted LLVM IR
50
+
51
+ For an example of the output, see reference/jobsCompleted/[2x19]/[2x19].ll,
52
+ which contains the IR for the historically significant 2x19 map calculation.
53
+ """
54
+ specTarget: ModuleSpec | None = importlib.util.spec_from_file_location("generatedModule", pathFilename)
55
+ if specTarget is None or specTarget.loader is None:
56
+ message = f"Could not create module spec or loader for {pathFilename}"
57
+ raise ImportError(message)
58
+ moduleTarget: ModuleType = importlib.util.module_from_spec(specTarget)
59
+ specTarget.loader.exec_module(moduleTarget)
60
+
61
+ # Get LLVM IR and write to file
62
+ linesLLVM = moduleTarget.__dict__[identifierCallable].inspect_llvm()[()]
63
+ moduleLLVM: llvmlite.binding.ModuleRef = llvmlite.binding.module.parse_assembly(linesLLVM)
64
+ pathFilenameLLVM: Path = pathFilename.with_suffix(".ll")
65
+ pathFilenameLLVM.write_text(str(moduleLLVM))
66
+ return pathFilenameLLVM
@@ -1,12 +1,36 @@
1
+ """
2
+ Configuration constants and computational complexity estimates for map folding operations.
3
+
4
+ Provides default identifiers for code generation, module organization, and computational
5
+ resource planning. The module serves as a central registry for configuration values
6
+ used throughout the map folding system, particularly for synthetic module generation
7
+ and optimization decision-making.
8
+
9
+ The complexity estimates enable informed choices about computational strategies based
10
+ on empirical measurements and theoretical analysis of map folding algorithms for
11
+ specific dimensional configurations.
12
+ """
13
+
1
14
  algorithmSourceModuleDEFAULT: str = 'daoOfMapFolding'
15
+ """Default identifier for the algorithm source module containing the base implementation."""
16
+
2
17
  dataclassInstanceIdentifierDEFAULT: str = 'state'
18
+ """Default variable name for dataclass instances in generated code."""
19
+
3
20
  dataPackingModuleIdentifierDEFAULT: str = 'dataPacking'
21
+ """Default identifier for modules containing data packing and unpacking functions."""
22
+
4
23
  logicalPathInfixDEFAULT: str = 'syntheticModules'
24
+ """Default path component for organizing synthetic generated modules."""
25
+
5
26
  sourceCallableDispatcherDEFAULT: str = 'doTheNeedful'
27
+ """Default identifier for dispatcher functions that route computational tasks."""
28
+
6
29
  sourceCallableIdentifierDEFAULT: str = 'count'
7
- theCountingIdentifierDEFAULT: str = 'groupsOfFolds'
30
+ """Default identifier for the core counting function in algorithms."""
8
31
 
9
- class raiseIfNoneGitHubIssueNumber3(Exception): pass
32
+ theCountingIdentifierDEFAULT: str = 'groupsOfFolds'
33
+ """Default identifier for the primary counting variable in map folding computations."""
10
34
 
11
35
  dictionaryEstimates: dict[tuple[int, ...], int] = {
12
36
  (2,2,2,2,2,2,2,2): 798148657152000,
@@ -15,3 +39,14 @@ dictionaryEstimates: dict[tuple[int, ...], int] = {
15
39
  (3,3,3,3): 85109616000000000000000000000000,
16
40
  (8,8): 791274195985524900,
17
41
  }
42
+ """
43
+ Registry of computational complexity estimates for specific map dimension configurations.
44
+
45
+ Maps dimensional tuples to estimated fold counts based on empirical measurements and
46
+ theoretical analysis. These estimates guide optimization decisions and resource planning
47
+ for computational tasks with known dimensional parameters.
48
+
49
+ The estimates represent the expected number of computational operations or fold
50
+ configurations for the given map dimensions, helping determine appropriate optimization
51
+ strategies and computational resource allocation.
52
+ """