mapFolding 0.12.3__py3-none-any.whl → 0.13.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
mapFolding/basecamp.py CHANGED
@@ -31,11 +31,11 @@ from os import PathLike
31
31
  from pathlib import PurePath
32
32
  import contextlib
33
33
 
34
- def countFolds(listDimensions: Sequence[int] | None = None # noqa: C901
34
+ def countFolds(listDimensions: Sequence[int] | None = None # noqa: C901, PLR0912, PLR0915
35
35
  , pathLikeWriteFoldsTotal: PathLike[str] | PurePath | None = None
36
36
  , computationDivisions: int | str | None = None
37
- , CPUlimit: int | float | bool | None = None
38
- # , * I need to improve `standardizedEqualToCallableReturn` so it will work with keyword arguments
37
+ # , * # TODO improve `standardizedEqualToCallableReturn` so it will work with keyword arguments
38
+ , CPUlimit: int | float | bool | None = None # noqa: FBT001
39
39
  , mapShape: tuple[int, ...] | None = None
40
40
  , oeisID: str | None = None
41
41
  , oeis_n: int | None = None
@@ -142,7 +142,7 @@ def countFolds(listDimensions: Sequence[int] | None = None # noqa: C901
142
142
  mapFoldingState = doTheNeedful(mapFoldingState)
143
143
  foldsTotal = mapFoldingState.foldsTotal
144
144
 
145
- elif flow == 'theorem2' and any(dimension > 2 for dimension in mapShape): # noqa: PLR2004
145
+ elif flow == 'theorem2' and any(dimension > 2 for dimension in mapShape):
146
146
  from mapFolding.dataBaskets import MapFoldingState # noqa: PLC0415
147
147
  mapFoldingState: MapFoldingState = MapFoldingState(mapShape)
148
148
 
@@ -154,7 +154,7 @@ def countFolds(listDimensions: Sequence[int] | None = None # noqa: C901
154
154
 
155
155
  foldsTotal = mapFoldingState.foldsTotal
156
156
 
157
- elif flow == 'theorem2Trimmed' and any(dimension > 2 for dimension in mapShape): # noqa: PLR2004
157
+ elif flow == 'theorem2Trimmed' and any(dimension > 2 for dimension in mapShape):
158
158
  from mapFolding.dataBaskets import MapFoldingState # noqa: PLC0415
159
159
  mapFoldingState: MapFoldingState = MapFoldingState(mapShape)
160
160
 
@@ -166,7 +166,7 @@ def countFolds(listDimensions: Sequence[int] | None = None # noqa: C901
166
166
 
167
167
  foldsTotal = mapFoldingState.foldsTotal
168
168
 
169
- elif (flow == 'theorem2Numba' or taskDivisions == 0) and any(dimension > 2 for dimension in mapShape): # noqa: PLR2004
169
+ elif (flow == 'theorem2Numba' or taskDivisions == 0) and any(dimension > 2 for dimension in mapShape):
170
170
  from mapFolding.dataBaskets import MapFoldingState # noqa: PLC0415
171
171
  mapFoldingState: MapFoldingState = MapFoldingState(mapShape)
172
172
 
mapFolding/dataBaskets.py CHANGED
@@ -29,122 +29,109 @@ import dataclasses
29
29
  class MapFoldingState:
30
30
  """Core computational state for map folding algorithms.
31
31
 
32
- (AI generated docstring)
33
-
34
- This class encapsulates all data needed to perform map folding computations,
35
- from the basic map dimensions through the complex internal arrays needed
36
- for efficient algorithmic processing. It serves as both a data container
37
- and a computational interface, providing properties and methods that
38
- abstract the underlying complexity.
39
-
40
- The class handles automatic initialization of all computational arrays
41
- based on the map dimensions, ensuring consistent sizing and type usage
42
- throughout the computation. It also manages the relationship between
43
- different data domains (leaves, elephino, folds) defined in the type system.
44
-
45
- Key Design Features include automatic array sizing based on map dimensions,
46
- type-safe access to computational data, lazy initialization of expensive arrays,
47
- integration with NumPy for performance, and metadata preservation for code generation.
32
+ This class encapsulates all data needed to perform map folding computations and metadata useful for code transformations.
48
33
 
49
34
  Attributes
50
35
  ----------
51
36
  mapShape : tuple[DatatypeLeavesTotal, ...]
52
37
  Dimensions of the map being analyzed for folding patterns.
53
38
  groupsOfFolds : DatatypeFoldsTotal = DatatypeFoldsTotal(0)
54
- Current count of distinct folding pattern groups discovered.
39
+ Current count of distinct folding pattern groups: each group has `leavesTotal`-many foldings.
55
40
  gap1ndex : DatatypeElephino = DatatypeElephino(0)
56
- Current position in gap enumeration algorithms.
41
+ The current 1-indexed position of the 'gap' during computation: 1-indexed as opposed to 0-indexed.
57
42
  gap1ndexCeiling : DatatypeElephino = DatatypeElephino(0)
58
- Upper bound for gap enumeration operations.
43
+ The upper bound of `gap1ndex`.
59
44
  indexDimension : DatatypeLeavesTotal = DatatypeLeavesTotal(0)
60
- Current dimension being processed in multi-dimensional algorithms.
45
+ The current 0-indexed position of the dimension during computation.
61
46
  indexLeaf : DatatypeLeavesTotal = DatatypeLeavesTotal(0)
62
- Current leaf being processed in sequential algorithms.
47
+ The current 0-indexed position of a leaf in a loop during computation: not to be confused with `leaf1ndex`.
63
48
  indexMiniGap : DatatypeElephino = DatatypeElephino(0)
64
- Current position within a gap subdivision.
49
+ The current 0-indexed position of a 'gap' in a loop during computation.
65
50
  leaf1ndex : DatatypeLeavesTotal = DatatypeLeavesTotal(1)
66
- One-based leaf index for algorithmic compatibility.
51
+ The current 1-indexed position of the leaf during computation: 1-indexed as opposed to 0-indexed.
67
52
  leafConnectee : DatatypeLeavesTotal = DatatypeLeavesTotal(0)
68
53
  Target leaf for connection operations.
69
54
  dimensionsUnconstrained : DatatypeLeavesTotal = None
70
55
  Count of dimensions not subject to folding constraints.
71
56
  countDimensionsGapped : Array1DLeavesTotal = None
72
- Array tracking gap counts across dimensions.
57
+ Array tracking computed number of dimensions with gaps.
73
58
  gapRangeStart : Array1DElephino = None
74
- Array of starting positions for gap ranges.
59
+ Array tracking computed starting positions of gap ranges.
75
60
  gapsWhere : Array1DLeavesTotal = None
76
61
  Array indicating locations of gaps in the folding pattern.
77
62
  leafAbove : Array1DLeavesTotal = None
78
- Array mapping each leaf to the leaf above it in the folding.
63
+ Array tracking the leaves above to the current leaf, `leaf1ndex`, during computation.
79
64
  leafBelow : Array1DLeavesTotal = None
80
- Array mapping each leaf to the leaf below it in the folding.
65
+ Array tracking the leaves below to the current leaf, `leaf1ndex`, during computation.
81
66
  connectionGraph : Array3D
82
- Three-dimensional representation of leaf connectivity.
67
+ Unchanging array representing connections between all leaves.
83
68
  dimensionsTotal : DatatypeLeavesTotal
84
- Total number of dimensions in the map.
69
+ Unchanging total number of dimensions in the map.
85
70
  leavesTotal : DatatypeLeavesTotal
86
- Total number of individual leaves in the map.
71
+ Unchanging total number of leaves in the map.
87
72
 
88
73
  """
89
74
 
90
75
  mapShape: tuple[DatatypeLeavesTotal, ...] = dataclasses.field(init=True, metadata={'elementConstructor': 'DatatypeLeavesTotal'})
76
+ """Dimensions of the map being analyzed for folding patterns."""
91
77
 
92
78
  groupsOfFolds: DatatypeFoldsTotal = dataclasses.field(default=DatatypeFoldsTotal(0), metadata={'theCountingIdentifier': True})
79
+ """Current count of distinct folding pattern groups: each group has `leavesTotal`-many foldings."""
93
80
 
94
81
  gap1ndex: DatatypeElephino = DatatypeElephino(0) # noqa: RUF009
82
+ """The current 1-indexed position of the 'gap' during computation: 1-indexed as opposed to 0-indexed."""
95
83
  gap1ndexCeiling: DatatypeElephino = DatatypeElephino(0) # noqa: RUF009
84
+ """The upper bound of `gap1ndex`."""
96
85
  indexDimension: DatatypeLeavesTotal = DatatypeLeavesTotal(0) # noqa: RUF009
86
+ """The current 0-indexed position of the dimension during computation."""
97
87
  indexLeaf: DatatypeLeavesTotal = DatatypeLeavesTotal(0) # noqa: RUF009
88
+ """The current 0-indexed position of a leaf in a loop during computation: not to be confused with `leaf1ndex`."""
98
89
  indexMiniGap: DatatypeElephino = DatatypeElephino(0) # noqa: RUF009
90
+ """The current 0-indexed position of a 'gap' in a loop during computation."""
99
91
  leaf1ndex: DatatypeLeavesTotal = DatatypeLeavesTotal(1) # noqa: RUF009
92
+ """The current 1-indexed position of the leaf during computation: 1-indexed as opposed to 0-indexed."""
100
93
  leafConnectee: DatatypeLeavesTotal = DatatypeLeavesTotal(0) # noqa: RUF009
94
+ """Target leaf for connection operations."""
101
95
 
102
96
  dimensionsUnconstrained: DatatypeLeavesTotal = dataclasses.field(default=None, init=True) # pyright: ignore[reportAssignmentType, reportAttributeAccessIssue, reportUnknownMemberType]
97
+ """Count of dimensions not subject to folding constraints."""
103
98
 
104
99
  countDimensionsGapped: Array1DLeavesTotal = dataclasses.field(default=None, init=True, metadata={'dtype': Array1DLeavesTotal.__args__[1].__args__[0]}) # pyright: ignore[reportAssignmentType, reportAttributeAccessIssue, reportUnknownMemberType]
100
+ """Array tracking computed number of dimensions with gaps."""
105
101
  gapRangeStart: Array1DElephino = dataclasses.field(default=None, init=True, metadata={'dtype': Array1DElephino.__args__[1].__args__[0]}) # pyright: ignore[reportAssignmentType, reportAttributeAccessIssue, reportUnknownMemberType]
102
+ """Array tracking computed starting positions of gap ranges."""
106
103
  gapsWhere: Array1DLeavesTotal = dataclasses.field(default=None, init=True, metadata={'dtype': Array1DLeavesTotal.__args__[1].__args__[0]}) # pyright: ignore[reportAssignmentType, reportAttributeAccessIssue, reportUnknownMemberType]
104
+ """Array indicating locations of gaps in the folding pattern."""
107
105
  leafAbove: Array1DLeavesTotal = dataclasses.field(default=None, init=True, metadata={'dtype': Array1DLeavesTotal.__args__[1].__args__[0]}) # pyright: ignore[reportAssignmentType, reportAttributeAccessIssue, reportUnknownMemberType]
106
+ """Array tracking the leaves above to the current leaf, `leaf1ndex`, during computation."""
108
107
  leafBelow: Array1DLeavesTotal = dataclasses.field(default=None, init=True, metadata={'dtype': Array1DLeavesTotal.__args__[1].__args__[0]}) # pyright: ignore[reportAssignmentType, reportAttributeAccessIssue, reportUnknownMemberType]
108
+ """Array tracking the leaves below to the current leaf, `leaf1ndex`, during computation."""
109
109
 
110
110
  connectionGraph: Array3D = dataclasses.field(init=False, metadata={'dtype': Array3D.__args__[1].__args__[0]}) # pyright: ignore[reportUnknownMemberType, reportAttributeAccessIssue]
111
+ """Unchanging array representing connections between all leaves."""
111
112
  dimensionsTotal: DatatypeLeavesTotal = dataclasses.field(init=False)
113
+ """Unchanging total number of dimensions in the map."""
112
114
  leavesTotal: DatatypeLeavesTotal = dataclasses.field(init=False)
115
+ """Unchanging total number of leaves in the map."""
113
116
  @property
114
117
  def foldsTotal(self) -> DatatypeFoldsTotal:
115
- """Calculate the total number of possible folding patterns for this map.
116
-
117
- (AI generated docstring)
118
+ """The total number of possible folding patterns for this map.
118
119
 
119
120
  Returns
120
121
  -------
121
- totalFoldingPatterns : DatatypeFoldsTotal
122
+ totalFoldings : DatatypeFoldsTotal
122
123
  The complete count of distinct folding patterns achievable with the current map configuration.
123
124
 
124
- Notes
125
- -----
126
- This represents the fundamental result of map folding analysis - the total
127
- number of unique ways a map can be folded given its dimensional constraints.
128
-
129
125
  """
130
126
  return DatatypeFoldsTotal(self.leavesTotal) * self.groupsOfFolds
131
127
 
132
128
  def __post_init__(self) -> None:
133
- """Initialize all computational arrays and derived values after dataclass construction.
134
-
135
- (AI generated docstring)
136
-
137
- This method performs the expensive operations needed to prepare the state
138
- for computation, including array allocation, dimension calculation, and
139
- connection graph generation. It runs automatically after the dataclass
140
- constructor completes.
129
+ """Ensure all fields have a value.
141
130
 
142
131
  Notes
143
132
  -----
144
- Arrays that are not explicitly provided (None) are automatically
145
- allocated with appropriate sizes based on the map dimensions.
146
- The connection graph is always regenerated to ensure consistency
147
- with the provided map shape.
133
+ Arrays that are not explicitly provided (None) are automatically allocated with appropriate sizes based on the map
134
+ dimensions. `dimensionsTotal`, `leavesTotal`, and `connectionGraph` cannot be set: they are calculated.
148
135
 
149
136
  """
150
137
  self.dimensionsTotal = DatatypeLeavesTotal(len(self.mapShape))
mapFolding/oeis.py CHANGED
@@ -8,7 +8,7 @@ folding ecosystem into the broader mathematical community through comprehensive
8
8
  integration with the Online Encyclopedia of Integer Sequences (OEIS). This bridge
9
9
  enables validation of computational results against established mathematical
10
10
  knowledge while supporting the discovery of new sequence values through the
11
- sophisticated computational pipeline.
11
+ sophisticated computational assembly line.
12
12
 
13
13
  The integration provides multiple pathways for mathematical verification: direct
14
14
  computation of OEIS sequences using the complete algorithmic implementation,
@@ -1,31 +1,4 @@
1
- """
2
- Map folding AST transformation system: Configuration management and transformation orchestration.
3
-
4
- This module provides the configuration orchestration layer of the map folding AST transformation
5
- system, implementing comprehensive recipes that coordinate the entire transformation process from
6
- abstract mathematical algorithms to optimized computational modules. The `RecipeJobTheorem2Numba`
7
- dataclass serves as the central configuration blueprint that bridges pattern recognition, dataclass
8
- decomposition, function optimization, and Numba compilation into a unified transformation process.
9
-
10
- The recipe system addresses the complexity of managing transformation parameters across multiple
11
- stages while maintaining consistency between source algorithm metadata and target optimization
12
- requirements. The orchestration layer coordinates the systematic extraction of mathematical
13
- functions from source modules, embedding of concrete parameter values, elimination of dead code
14
- paths, and generation of standalone Python modules optimized for specific map dimensions through
15
- the complete transformation process.
16
-
17
- Configuration management separates source analysis capabilities from target generation parameters,
18
- enabling systematic exploration of computational spaces through automated generation of optimized
19
- solvers. Source analysis encompasses parsing and analysis of abstract syntax trees from generic
20
- algorithm modules, extraction of specific mathematical functions for specialization, and
21
- identification of dataclass structures for parameter embedding. Target generation coordinates
22
- creation of standalone Python modules with optimized implementations, integration of Numba
23
- optimization directives, and preservation of mathematical correctness throughout optimization.
24
-
25
- The recipe system enables the broader map folding research framework by providing systematic
26
- control over the transformation process while ensuring that generated modules achieve maximum
27
- performance through compile-time specialization and runtime optimization strategies.
28
- """
1
+ """Configuration by dataclass."""
29
2
 
30
3
  from ast import Module
31
4
  from astToolkit import identifierDotAttribute, parseLogicalPath2astModule
@@ -36,24 +9,21 @@ from mapFolding import (
36
9
  from mapFolding.someAssemblyRequired import dataclassInstanceIdentifierDEFAULT, ShatteredDataclass
37
10
  from mapFolding.someAssemblyRequired.transformationTools import shatter_dataclassesDOTdataclass
38
11
  from pathlib import Path, PurePosixPath
39
- from typing import TypeAlias
40
12
  import dataclasses
41
13
 
42
14
  @dataclasses.dataclass
43
- class RecipeJobTheorem2Numba:
44
- """Configuration recipe for generating Numba-optimized map folding computation jobs.
15
+ class RecipeJobTheorem2:
16
+ """Configuration recipe for generating map folding computation jobs.
45
17
 
46
18
  This dataclass serves as the central configuration hub for the code transformation
47
- pipeline that converts generic map folding algorithms into highly optimized,
19
+ assembly line that converts generic map folding algorithms into highly optimized,
48
20
  specialized computation modules. The recipe encapsulates all parameters required
49
21
  for source code analysis, target file generation, datatype mapping, and compilation
50
22
  optimization settings.
51
23
 
52
24
  The transformation process operates by extracting functions from source modules,
53
25
  embedding concrete parameter values, eliminating dead code paths, and generating
54
- standalone Python modules optimized for specific map dimensions. These generated
55
- modules achieve maximum performance through Numba just-in-time compilation and
56
- embedded compile-time constants.
26
+ standalone Python modules optimized for specific map dimensions.
57
27
 
58
28
  The recipe maintains both source configuration (where to find the generic algorithm)
59
29
  and target configuration (where to write the optimized module), along with the
@@ -61,84 +31,118 @@ class RecipeJobTheorem2Numba:
61
31
 
62
32
  Attributes
63
33
  ----------
64
- state: The map folding computation state containing dimensions and initial values.
65
- foldsTotalEstimated: Estimated total number of folds for progress tracking (0).
66
- shatteredDataclass: Deconstructed dataclass metadata for code transformation.
67
- source_astModule: Parsed AST of the source module containing the generic algorithm.
68
- sourceCountCallable: Name of the counting function to extract ('count').
69
- sourceLogicalPathModuleDataclass: Logical path to the dataclass module.
70
- sourceDataclassIdentifier: Name of the source dataclass ('MapFoldingState').
71
- sourceDataclassInstance: Instance identifier for the dataclass.
72
- sourcePathPackage: Path to the source package.
73
- sourcePackageIdentifier: Name of the source package.
74
- pathPackage: Override path for the target package (None).
75
- pathModule: Override path for the target module directory.
76
- fileExtension: File extension for generated modules.
77
- pathFilenameFoldsTotal: Path for writing fold count results.
78
- packageIdentifier: Target package identifier (None).
79
- logicalPathRoot: Logical path root corresponding to filesystem directory.
80
- moduleIdentifier: Target module identifier.
81
- countCallable: Name of the counting function in generated module.
82
- dataclassIdentifier: Target dataclass identifier.
83
- dataclassInstance: Target dataclass instance identifier.
84
- logicalPathModuleDataclass: Logical path to target dataclass module.
85
- DatatypeFoldsTotal: Type alias for fold count datatype.
86
- DatatypeElephino: Type alias for intermediate computation datatype.
87
- DatatypeLeavesTotal: Type alias for leaf count datatype.
34
+ state : MapFoldingState
35
+ The map folding computation state containing dimensions and initial values.
36
+ foldsTotalEstimated : int = 0
37
+ Estimated total number of folds for progress tracking.
38
+ shatteredDataclass : ShatteredDataclass = None
39
+ Deconstructed dataclass metadata for code transformation.
40
+ source_astModule : Module
41
+ Parsed AST of the source module containing the generic algorithm.
42
+ sourceCountCallable : str = 'count'
43
+ Name of the counting function to extract.
44
+ sourceLogicalPathModuleDataclass : identifierDotAttribute
45
+ Logical path to the dataclass module.
46
+ sourceDataclassIdentifier : str = 'MapFoldingState'
47
+ Name of the source dataclass.
48
+ sourceDataclassInstance : str
49
+ Instance identifier for the dataclass.
50
+ sourcePathPackage : PurePosixPath | None
51
+ Path to the source package.
52
+ sourcePackageIdentifier : str | None
53
+ Name of the source package.
54
+ pathPackage : PurePosixPath | None = None
55
+ Override path for the target package.
56
+ pathModule : PurePosixPath | None
57
+ Override path for the target module directory.
58
+ fileExtension : str
59
+ File extension for generated modules.
60
+ pathFilenameFoldsTotal : PurePosixPath = None
61
+ Path for writing fold count results.
62
+ packageIdentifier : str | None = None
63
+ Target package identifier.
64
+ logicalPathRoot : identifierDotAttribute | None = None
65
+ Logical path root; probably corresponds to physical filesystem directory.
66
+ moduleIdentifier : str = None
67
+ Target module identifier.
68
+ countCallable : str
69
+ Name of the counting function in generated module.
70
+ dataclassIdentifier : str | None
71
+ Target dataclass identifier.
72
+ dataclassInstance : str | None
73
+ Target dataclass instance identifier.
74
+ logicalPathModuleDataclass : identifierDotAttribute | None
75
+ Logical path to target dataclass module.
76
+ DatatypeFoldsTotal : TypeAlias
77
+ Type alias for fold count datatype.
78
+ DatatypeElephino : TypeAlias
79
+ Type alias for intermediate computation datatype.
80
+ DatatypeLeavesTotal : TypeAlias
81
+ Type alias for leaf count datatype.
88
82
  """
89
83
 
90
84
  state: MapFoldingState
91
- # TODO create function to calculate `foldsTotalEstimated`
85
+ """The map folding computation state containing dimensions and initial values."""
92
86
  foldsTotalEstimated: int = 0
87
+ """Estimated total number of folds for progress tracking."""
93
88
  shatteredDataclass: ShatteredDataclass = dataclasses.field(default=None, init=True) # pyright: ignore[reportAssignmentType]
89
+ """Deconstructed dataclass metadata for code transformation."""
94
90
 
95
91
  # Source -----------------------------------------
96
92
  source_astModule: Module = parseLogicalPath2astModule('mapFolding.syntheticModules.theorem2Numba') # noqa: RUF009
93
+ """Parsed AST of the source module containing the generic algorithm."""
97
94
  sourceCountCallable: str = 'count'
95
+ """Name of the counting function to extract."""
98
96
 
99
97
  sourceLogicalPathModuleDataclass: identifierDotAttribute = 'mapFolding.dataBaskets'
98
+ """Logical path to the dataclass module."""
100
99
  sourceDataclassIdentifier: str = 'MapFoldingState'
100
+ """Name of the source dataclass."""
101
101
  sourceDataclassInstance: str = dataclassInstanceIdentifierDEFAULT
102
+ """Instance identifier for the dataclass."""
102
103
 
103
104
  sourcePathPackage: PurePosixPath | None = PurePosixPath(packageSettings.pathPackage) # noqa: RUF009
105
+ """Path to the source package."""
104
106
  sourcePackageIdentifier: str | None = packageSettings.identifierPackage
107
+ """Name of the source package."""
105
108
 
106
109
  # Filesystem, names of physical objects ------------------------------------------
107
110
  pathPackage: PurePosixPath | None = None
111
+ """Override path for the target package."""
108
112
  pathModule: PurePosixPath | None = PurePosixPath(getPathRootJobDEFAULT()) # noqa: RUF009
109
- """ `pathModule` will override `pathPackage` and `logicalPathRoot`."""
113
+ """Override path for the target module directory."""
110
114
  fileExtension: str = packageSettings.fileExtension
115
+ """File extension for generated modules."""
111
116
  pathFilenameFoldsTotal: PurePosixPath = dataclasses.field(default=None, init=True) # pyright: ignore[reportAssignmentType]
117
+ """Path for writing fold count results."""
112
118
 
113
119
  # Logical identifiers, as opposed to physical identifiers ------------------------
114
120
  packageIdentifier: str | None = None
121
+ """Target package identifier."""
115
122
  logicalPathRoot: identifierDotAttribute | None = None
116
- """ `logicalPathRoot` likely corresponds to a physical filesystem directory."""
123
+ """Logical path root; probably corresponds to physical filesystem directory."""
117
124
  moduleIdentifier: str = dataclasses.field(default=None, init=True) # pyright: ignore[reportAssignmentType]
125
+ """Target module identifier."""
118
126
  countCallable: str = sourceCountCallable
127
+ """Name of the counting function in generated module."""
119
128
  dataclassIdentifier: str | None = sourceDataclassIdentifier
129
+ """Target dataclass identifier."""
120
130
  dataclassInstance: str | None = sourceDataclassInstance
131
+ """Target dataclass instance identifier."""
121
132
  logicalPathModuleDataclass: identifierDotAttribute | None = sourceLogicalPathModuleDataclass
133
+ """Logical path to target dataclass module."""
122
134
 
123
135
  # Datatypes ------------------------------------------
124
- DatatypeFoldsTotal: TypeAlias = TheDatatypeFoldsTotal
125
- DatatypeElephino: TypeAlias = TheDatatypeElephino
126
- DatatypeLeavesTotal: TypeAlias = TheDatatypeLeavesTotal
127
-
128
- def _makePathFilename(self,
129
- pathRoot: PurePosixPath | None = None,
130
- logicalPathINFIX: identifierDotAttribute | None = None,
131
- filenameStem: str | None = None,
132
- fileExtension: str | None = None,
133
- ) -> PurePosixPath:
136
+ type DatatypeFoldsTotal = TheDatatypeFoldsTotal
137
+ """Type alias for datatype linked to the magnitude of `foldsTotal`."""
138
+ type DatatypeElephino = TheDatatypeElephino
139
+ """Type alias for intermediate computation datatype."""
140
+ type DatatypeLeavesTotal = TheDatatypeLeavesTotal
141
+ """Type alias for datatype linked to the magnitude of `leavesTotal`."""
142
+
143
+ def _makePathFilename(self, pathRoot: PurePosixPath | None = None, logicalPathINFIX: identifierDotAttribute | None = None, filenameStem: str | None = None, fileExtension: str | None = None) -> PurePosixPath:
134
144
  """Construct a complete file path from component parts.
135
145
 
136
- (AI generated docstring)
137
-
138
- This helper method builds filesystem paths by combining a root directory,
139
- optional subdirectory structure, filename stem, and file extension. It provides
140
- sensible defaults for missing components based on the recipe configuration.
141
-
142
146
  Parameters
143
147
  ----------
144
148
  pathRoot : PurePosixPath | None = None
@@ -172,8 +176,6 @@ class RecipeJobTheorem2Numba:
172
176
  def pathFilenameModule(self) -> PurePosixPath:
173
177
  """Generate the complete path and filename for the output module.
174
178
 
175
- (AI generated docstring)
176
-
177
179
  This property computes the target location where the generated computation
178
180
  module will be written. It respects the `pathModule` override if specified,
179
181
  otherwise constructs the path using the default package structure.
@@ -192,8 +194,6 @@ class RecipeJobTheorem2Numba:
192
194
  def __post_init__(self) -> None:
193
195
  """Initialize computed fields and validate configuration after dataclass creation.
194
196
 
195
- (AI generated docstring)
196
-
197
197
  This method performs post-initialization setup including deriving module
198
198
  identifier from map shape if not explicitly provided, setting default paths
199
199
  for fold total output files, and creating shattered dataclass metadata for
@@ -217,10 +217,10 @@ class DeReConstructField2ast:
217
217
  self.ast_keyword_field__field = Make.keyword(self.name, self.astName)
218
218
  self.ast_nameDOTname = Make.Attribute(Make.Name(dataclassesDOTdataclassInstanceIdentifier), self.name)
219
219
 
220
- self.astAnnotation = raiseIfNone(NodeTourist[ast.AnnAssign, ast.Name | None](
220
+ self.astAnnotation = cast('ast.Name', raiseIfNone(NodeTourist(
221
221
  findThis = Be.AnnAssign.targetIs(IfThis.isNameIdentifier(self.name))
222
- , doThat = Then.extractIt(cast("Callable[[ast.AnnAssign], ast.Name | None]", DOT.annotation))
223
- ).captureLastMatch(dataclassClassDef))
222
+ , doThat = Then.extractIt(DOT.annotation)
223
+ ).captureLastMatch(dataclassClassDef)))
224
224
 
225
225
  self.ast_argAnnotated = Make.arg(self.name, self.astAnnotation)
226
226
 
@@ -299,7 +299,7 @@ def makeDaoOfMapFolding(astModule: ast.Module, moduleIdentifier: str, callableId
299
299
  ingredientsFunctionDispatcher.imports.update(shatteredDataclass.imports)
300
300
  targetCallableIdentifier = ingredientsFunction.astFunctionDef.name
301
301
  ingredientsFunctionDispatcher = unpackDataclassCallFunctionRepackDataclass(ingredientsFunctionDispatcher, targetCallableIdentifier, shatteredDataclass)
302
- astTuple: ast.Tuple = cast('ast.Tuple', raiseIfNone(NodeTourist[ast.Return, ast.expr | None](Be.Return.valueIs(Be.Tuple)
302
+ astTuple: ast.Tuple = cast('ast.Tuple', raiseIfNone(NodeTourist(Be.Return.valueIs(Be.Tuple)
303
303
  , doThat=Then.extractIt(DOT.value)).captureLastMatch(ingredientsFunction.astFunctionDef)))
304
304
  astTuple.ctx = ast.Store()
305
305
 
@@ -329,7 +329,7 @@ def makeDaoOfMapFoldingParallel(astModule: ast.Module, moduleIdentifier: str, ca
329
329
 
330
330
  The generated module contains multiple functions including core counting function with parallel-aware task filtering,
331
331
  dataclass unpacking/repacking function for process communication, and main dispatcher function that manages the parallel
332
- execution pipeline.
332
+ execution assembly line.
333
333
 
334
334
  Parameters
335
335
  ----------
@@ -591,15 +591,12 @@ def trimTheorem2(astModule: ast.Module, moduleIdentifier: str, callableIdentifie
591
591
 
592
592
  (AI generated docstring)
593
593
 
594
- Creates a trimmed version of the Theorem 2 implementation by eliminating
595
- conditional logic that is not needed under specific constraint assumptions.
596
- This transformation removes checks for unconstrained dimensions, simplifying
597
- the algorithm for cases where dimensional constraints are guaranteed to be
598
- satisfied by external conditions.
594
+ Creates a trimmed version of the Theorem 2 implementation by eliminating conditional logic that is not needed under specific
595
+ constraint assumptions. This transformation removes checks for unconstrained dimensions, simplifying the algorithm for cases
596
+ where dimensional constraints are guaranteed to be satisfied by external conditions.
599
597
 
600
- The trimming operation is particularly valuable for generating lean implementations
601
- where the calling context ensures that certain conditions will always be met,
602
- allowing the removal of defensive programming constructs that add computational
598
+ The trimming operation is particularly valuable for generating lean implementations where the calling context ensures that
599
+ certain conditions will always be met, allowing the removal of defensive programming constructs that add computational
603
600
  overhead without providing benefits in the constrained environment.
604
601
 
605
602
  Parameters
@@ -746,7 +743,7 @@ def makeUnRePackDataclass(astImportFrom: ast.ImportFrom) -> None:
746
743
  targetCallableIdentifier = astImportFrom.names[0].name
747
744
  ingredientsFunction = raiseIfNone(unpackDataclassCallFunctionRepackDataclass(ingredientsFunction, targetCallableIdentifier, shatteredDataclass))
748
745
  targetFunctionDef: ast.FunctionDef = raiseIfNone(extractFunctionDef(parseLogicalPath2astModule(raiseIfNone(astImportFrom.module)), targetCallableIdentifier))
749
- astTuple: ast.Tuple = cast('ast.Tuple', raiseIfNone(NodeTourist[ast.Return, ast.expr | None](Be.Return.valueIs(Be.Tuple)
746
+ astTuple: ast.Tuple = cast('ast.Tuple', raiseIfNone(NodeTourist(Be.Return.valueIs(Be.Tuple)
750
747
  , doThat=Then.extractIt(DOT.value)).captureLastMatch(targetFunctionDef)))
751
748
  astTuple.ctx = ast.Store()
752
749
 
@@ -35,7 +35,7 @@ from astToolkit.transformationTools import write_astModule
35
35
  from hunterMakesPy import autoDecodingRLE, raiseIfNone
36
36
  from mapFolding import getPathFilenameFoldsTotal, MapFoldingState, packageSettings
37
37
  from mapFolding.someAssemblyRequired import IfThis
38
- from mapFolding.someAssemblyRequired.RecipeJob import RecipeJobTheorem2Numba
38
+ from mapFolding.someAssemblyRequired.RecipeJob import RecipeJobTheorem2
39
39
  from mapFolding.someAssemblyRequired.toolkitNumba import decorateCallableWithNumba, parametersNumbaLight, SpicesJobNumba
40
40
  from mapFolding.syntheticModules.initializeCount import initializeGroupsOfFolds
41
41
  from pathlib import PurePosixPath
@@ -46,26 +46,9 @@ import ast
46
46
  if TYPE_CHECKING:
47
47
  from collections.abc import Callable
48
48
 
49
- # Configuration lists for code optimization and dead code elimination
50
- listIdentifiersNotUsedAllHARDCODED: list[str] = ['concurrencyLimit', 'foldsTotal', 'mapShape']
51
- """Identifiers that are universally unused across all optimization contexts."""
52
-
53
- listIdentifiersNotUsedParallelSequentialHARDCODED: list[str] = ['indexLeaf']
54
- """Identifiers unused in both parallel and sequential execution modes."""
55
-
56
- listIdentifiersNotUsedSequentialHARDCODED: list[str] = ['foldGroups', 'taskDivisions', 'taskIndex']
57
- """Identifiers unused specifically in sequential execution mode."""
58
-
59
- listIdentifiersReplacedHARDCODED: list[str] = ['groupsOfFolds']
60
- """Identifiers that get replaced with optimized equivalents during transformation."""
61
-
62
49
  listIdentifiersStaticValuesHARDCODED: list[str] = ['dimensionsTotal', 'leavesTotal']
63
- """Identifiers with compile-time constant values that can be embedded directly."""
64
50
 
65
- listIdentifiersNotUsedHARDCODED: list[str] = listIdentifiersStaticValuesHARDCODED + listIdentifiersReplacedHARDCODED + listIdentifiersNotUsedAllHARDCODED + listIdentifiersNotUsedParallelSequentialHARDCODED + listIdentifiersNotUsedSequentialHARDCODED
66
- """Complete list of all identifiers that can be eliminated during optimization."""
67
-
68
- def addLauncherNumbaProgress(ingredientsModule: IngredientsModule, ingredientsFunction: IngredientsFunction, job: RecipeJobTheorem2Numba, spices: SpicesJobNumba) -> tuple[IngredientsModule, IngredientsFunction]:
51
+ def addLauncherNumbaProgress(ingredientsModule: IngredientsModule, ingredientsFunction: IngredientsFunction, job: RecipeJobTheorem2, spices: SpicesJobNumba) -> tuple[IngredientsModule, IngredientsFunction]:
69
52
  """Add progress tracking capabilities to a Numba-optimized function.
70
53
 
71
54
  (AI generated docstring)
@@ -130,7 +113,7 @@ if __name__ == '__main__':
130
113
 
131
114
  return ingredientsModule, ingredientsFunction
132
115
 
133
- def move_arg2FunctionDefDOTbodyAndAssignInitialValues(ingredientsFunction: IngredientsFunction, job: RecipeJobTheorem2Numba) -> IngredientsFunction:
116
+ def move_arg2FunctionDefDOTbodyAndAssignInitialValues(ingredientsFunction: IngredientsFunction, job: RecipeJobTheorem2) -> IngredientsFunction:
134
117
  """Convert function parameters into initialized variables with concrete values.
135
118
 
136
119
  (AI generated docstring)
@@ -201,12 +184,12 @@ def move_arg2FunctionDefDOTbodyAndAssignInitialValues(ingredientsFunction: Ingre
201
184
  ast.fix_missing_locations(ingredientsFunction.astFunctionDef)
202
185
  return ingredientsFunction
203
186
 
204
- def makeJobNumba(job: RecipeJobTheorem2Numba, spices: SpicesJobNumba) -> None:
187
+ def makeJobNumba(job: RecipeJobTheorem2, spices: SpicesJobNumba) -> None:
205
188
  """Generate an optimized Numba-compiled computation module for map folding calculations.
206
189
 
207
190
  (AI generated docstring)
208
191
 
209
- This function orchestrates the complete code transformation pipeline to convert
192
+ This function orchestrates the complete code transformation assembly line to convert
210
193
  a generic map folding algorithm into a highly optimized, specialized computation
211
194
  module. The transformation process includes:
212
195
 
@@ -234,10 +217,6 @@ def makeJobNumba(job: RecipeJobTheorem2Numba, spices: SpicesJobNumba) -> None:
234
217
  astFunctionDef: ast.FunctionDef = raiseIfNone(extractFunctionDef(job.source_astModule, job.countCallable))
235
218
  ingredientsCount: IngredientsFunction = IngredientsFunction(astFunctionDef, LedgerOfImports())
236
219
 
237
- # Remove `foldGroups` and any other unused statements, so you can dynamically determine which variables are not used
238
- # NodeChanger[ast.Name, None](Be.Assign.targetsIs(lambda list_expr: any(IfThis.isSubscriptIdentifier('foldGroups')(node) for node in list_expr)) , Then.removeIt).visit(ingredientsCount.astFunctionDef) # noqa: ERA001
239
-
240
- # replace identifiers with static values with their values, so you can dynamically determine which variables are not used
241
220
  listIdentifiersStaticValues: list[str] = listIdentifiersStaticValuesHARDCODED
242
221
  for identifier in listIdentifiersStaticValues:
243
222
  findThis: Callable[[ast.AST], TypeIs[ast.Name] | bool] = IfThis.isNameIdentifier(identifier)
@@ -308,7 +287,7 @@ if __name__ == '__main__':
308
287
  )
309
288
  ingredientsModule.appendPrologue(statement=statement)
310
289
 
311
- ingredientsCount.imports.removeImportFromModule('mapFolding.theSSOT')
290
+ ingredientsCount.imports.removeImportFromModule('mapFolding.dataBaskets')
312
291
 
313
292
  listNumPyTypeConfigs = [
314
293
  DatatypeConfig(fml='Array1DLeavesTotal', Z0Z_module='numpy', Z0Z_type_name='uint8', Z0Z_asname='Array1DLeavesTotal'),
@@ -347,15 +326,11 @@ if __name__ == '__main__':
347
326
  """
348
327
 
349
328
  if __name__ == '__main__':
350
- mapShape = (2,4)
351
- state = MapFoldingState(mapShape)
352
- state = initializeGroupsOfFolds(state)
353
- # foldsTotalEstimated = getFoldsTotalKnown(state.mapShape) // state.leavesTotal # noqa: ERA001
354
- # foldsTotalEstimated = dictionaryEstimates[state.mapShape] // state.leavesTotal # noqa: ERA001
355
- foldsTotalEstimated = 0
329
+ state = initializeGroupsOfFolds(MapFoldingState((2,4)))
356
330
  pathModule = PurePosixPath(packageSettings.pathPackage, 'jobs')
357
331
  pathFilenameFoldsTotal = PurePosixPath(getPathFilenameFoldsTotal(state.mapShape, pathModule))
358
- aJob = RecipeJobTheorem2Numba(state, foldsTotalEstimated, pathModule=pathModule, pathFilenameFoldsTotal=pathFilenameFoldsTotal)
332
+ aJob = RecipeJobTheorem2(state, pathModule=pathModule, pathFilenameFoldsTotal=pathFilenameFoldsTotal)
359
333
  spices = SpicesJobNumba(useNumbaProgressBar=False, parametersNumba=parametersNumbaLight)
360
- # spices = SpicesJobNumba() # noqa: ERA001
361
334
  makeJobNumba(aJob, spices)
335
+
336
+ # TODO Improve this module with lessons learned in `makeJobTheorem2codon`.