mapFolding 0.12.2__py3-none-any.whl → 0.12.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -380,7 +380,7 @@ def parseDimensions(dimensions: Sequence[int], parameterName: str = 'unnamed par
380
380
  def setCPUlimit(CPUlimit: bool | float | int | None) -> int:
381
381
  # if not (CPUlimit is None or isinstance(CPUlimit, (bool, int, float))):
382
382
  # CPUlimit = oopsieKwargsie(CPUlimit)
383
- # concurrencyLimit = defineConcurrencyLimit(CPUlimit)
383
+ # concurrencyLimit = defineConcurrencyLimit(limit=CPUlimit)
384
384
  # numba.set_num_threads(concurrencyLimit)
385
385
  concurrencyLimitHARDCODED = 1
386
386
  concurrencyLimit = concurrencyLimitHARDCODED
@@ -31,9 +31,8 @@ from ast import Module
31
31
  from astToolkit import identifierDotAttribute, parseLogicalPath2astModule
32
32
  from mapFolding import (
33
33
  DatatypeElephino as TheDatatypeElephino, DatatypeFoldsTotal as TheDatatypeFoldsTotal,
34
- DatatypeLeavesTotal as TheDatatypeLeavesTotal, getPathFilenameFoldsTotal, getPathRootJobDEFAULT,
35
- MapFoldingState, packageSettings,
36
- )
34
+ DatatypeLeavesTotal as TheDatatypeLeavesTotal, getPathFilenameFoldsTotal, getPathRootJobDEFAULT, MapFoldingState,
35
+ packageSettings)
37
36
  from mapFolding.someAssemblyRequired import dataclassInstanceIdentifierDEFAULT, ShatteredDataclass
38
37
  from mapFolding.someAssemblyRequired.transformationTools import shatter_dataclassesDOTdataclass
39
38
  from pathlib import Path, PurePosixPath
@@ -60,59 +59,58 @@ class RecipeJobTheorem2Numba:
60
59
  and target configuration (where to write the optimized module), along with the
61
60
  computational state that provides concrete values for the transformation process.
62
61
 
63
- Attributes:
64
- state: The map folding computation state containing dimensions and initial values.
65
- foldsTotalEstimated: Estimated total number of folds for progress tracking (0).
66
- shatteredDataclass: Deconstructed dataclass metadata for code transformation.
67
- source_astModule: Parsed AST of the source module containing the generic algorithm.
68
- sourceCountCallable: Name of the counting function to extract ('count').
69
- sourceLogicalPathModuleDataclass: Logical path to the dataclass module.
70
- sourceDataclassIdentifier: Name of the source dataclass ('MapFoldingState').
71
- sourceDataclassInstance: Instance identifier for the dataclass.
72
- sourcePathPackage: Path to the source package.
73
- sourcePackageIdentifier: Name of the source package.
74
- pathPackage: Override path for the target package (None).
75
- pathModule: Override path for the target module directory.
76
- fileExtension: File extension for generated modules.
77
- pathFilenameFoldsTotal: Path for writing fold count results.
78
- packageIdentifier: Target package identifier (None).
79
- logicalPathRoot: Logical path root corresponding to filesystem directory.
80
- moduleIdentifier: Target module identifier.
81
- countCallable: Name of the counting function in generated module.
82
- dataclassIdentifier: Target dataclass identifier.
83
- dataclassInstance: Target dataclass instance identifier.
84
- logicalPathModuleDataclass: Logical path to target dataclass module.
85
- DatatypeFoldsTotal: Type alias for fold count datatype.
86
- DatatypeElephino: Type alias for intermediate computation datatype.
87
- DatatypeLeavesTotal: Type alias for leaf count datatype.
62
+ Attributes
63
+ ----------
64
+ state: The map folding computation state containing dimensions and initial values.
65
+ foldsTotalEstimated: Estimated total number of folds for progress tracking (0).
66
+ shatteredDataclass: Deconstructed dataclass metadata for code transformation.
67
+ source_astModule: Parsed AST of the source module containing the generic algorithm.
68
+ sourceCountCallable: Name of the counting function to extract ('count').
69
+ sourceLogicalPathModuleDataclass: Logical path to the dataclass module.
70
+ sourceDataclassIdentifier: Name of the source dataclass ('MapFoldingState').
71
+ sourceDataclassInstance: Instance identifier for the dataclass.
72
+ sourcePathPackage: Path to the source package.
73
+ sourcePackageIdentifier: Name of the source package.
74
+ pathPackage: Override path for the target package (None).
75
+ pathModule: Override path for the target module directory.
76
+ fileExtension: File extension for generated modules.
77
+ pathFilenameFoldsTotal: Path for writing fold count results.
78
+ packageIdentifier: Target package identifier (None).
79
+ logicalPathRoot: Logical path root corresponding to filesystem directory.
80
+ moduleIdentifier: Target module identifier.
81
+ countCallable: Name of the counting function in generated module.
82
+ dataclassIdentifier: Target dataclass identifier.
83
+ dataclassInstance: Target dataclass instance identifier.
84
+ logicalPathModuleDataclass: Logical path to target dataclass module.
85
+ DatatypeFoldsTotal: Type alias for fold count datatype.
86
+ DatatypeElephino: Type alias for intermediate computation datatype.
87
+ DatatypeLeavesTotal: Type alias for leaf count datatype.
88
88
  """
89
+
89
90
  state: MapFoldingState
90
91
  # TODO create function to calculate `foldsTotalEstimated`
91
92
  foldsTotalEstimated: int = 0
92
93
  shatteredDataclass: ShatteredDataclass = dataclasses.field(default=None, init=True) # pyright: ignore[reportAssignmentType]
93
94
 
94
- # ========================================
95
- # Source
96
- source_astModule: Module = parseLogicalPath2astModule('mapFolding.syntheticModules.theorem2Numba')
95
+ # Source -----------------------------------------
96
+ source_astModule: Module = parseLogicalPath2astModule('mapFolding.syntheticModules.theorem2Numba') # noqa: RUF009
97
97
  sourceCountCallable: str = 'count'
98
98
 
99
99
  sourceLogicalPathModuleDataclass: identifierDotAttribute = 'mapFolding.dataBaskets'
100
100
  sourceDataclassIdentifier: str = 'MapFoldingState'
101
101
  sourceDataclassInstance: str = dataclassInstanceIdentifierDEFAULT
102
102
 
103
- sourcePathPackage: PurePosixPath | None = PurePosixPath(packageSettings.pathPackage)
104
- sourcePackageIdentifier: str | None = packageSettings.packageName
103
+ sourcePathPackage: PurePosixPath | None = PurePosixPath(packageSettings.pathPackage) # noqa: RUF009
104
+ sourcePackageIdentifier: str | None = packageSettings.identifierPackage
105
105
 
106
- # ========================================
107
- # Filesystem (names of physical objects)
106
+ # Filesystem, names of physical objects ------------------------------------------
108
107
  pathPackage: PurePosixPath | None = None
109
- pathModule: PurePosixPath | None = PurePosixPath(getPathRootJobDEFAULT())
108
+ pathModule: PurePosixPath | None = PurePosixPath(getPathRootJobDEFAULT()) # noqa: RUF009
110
109
  """ `pathModule` will override `pathPackage` and `logicalPathRoot`."""
111
110
  fileExtension: str = packageSettings.fileExtension
112
111
  pathFilenameFoldsTotal: PurePosixPath = dataclasses.field(default=None, init=True) # pyright: ignore[reportAssignmentType]
113
112
 
114
- # ========================================
115
- # Logical identifiers (as opposed to physical identifiers)
113
+ # Logical identifiers, as opposed to physical identifiers ------------------------
116
114
  packageIdentifier: str | None = None
117
115
  logicalPathRoot: identifierDotAttribute | None = None
118
116
  """ `logicalPathRoot` likely corresponds to a physical filesystem directory."""
@@ -122,8 +120,7 @@ class RecipeJobTheorem2Numba:
122
120
  dataclassInstance: str | None = sourceDataclassInstance
123
121
  logicalPathModuleDataclass: identifierDotAttribute | None = sourceLogicalPathModuleDataclass
124
122
 
125
- # ========================================
126
- # Datatypes
123
+ # Datatypes ------------------------------------------
127
124
  DatatypeFoldsTotal: TypeAlias = TheDatatypeFoldsTotal
128
125
  DatatypeElephino: TypeAlias = TheDatatypeElephino
129
126
  DatatypeLeavesTotal: TypeAlias = TheDatatypeLeavesTotal
@@ -136,18 +133,28 @@ class RecipeJobTheorem2Numba:
136
133
  ) -> PurePosixPath:
137
134
  """Construct a complete file path from component parts.
138
135
 
136
+ (AI generated docstring)
137
+
139
138
  This helper method builds filesystem paths by combining a root directory,
140
139
  optional subdirectory structure, filename stem, and file extension. It provides
141
140
  sensible defaults for missing components based on the recipe configuration.
142
141
 
143
- Parameters:
144
- pathRoot: Base directory path. Defaults to package path or current directory.
145
- logicalPathINFIX: Dot-separated path segments to insert between root and filename.
146
- filenameStem: Base filename without extension. Defaults to module identifier.
147
- fileExtension: File extension including dot. Defaults to configured extension.
142
+ Parameters
143
+ ----------
144
+ pathRoot : PurePosixPath | None = None
145
+ Base directory path. Defaults to package path or current directory.
146
+ logicalPathINFIX : identifierDotAttribute | None = None
147
+ Dot-separated path segments to insert between root and filename.
148
+ filenameStem : str | None = None
149
+ Base filename without extension. Defaults to module identifier.
150
+ fileExtension : str | None = None
151
+ File extension including dot. Defaults to configured extension.
152
+
153
+ Returns
154
+ -------
155
+ pathFilename : PurePosixPath
156
+ Complete file path as a `PurePosixPath` object.
148
157
 
149
- Returns:
150
- Complete file path as a PurePosixPath object.
151
158
  """
152
159
  if pathRoot is None:
153
160
  pathRoot = self.pathPackage or PurePosixPath(Path.cwd())
@@ -165,12 +172,17 @@ class RecipeJobTheorem2Numba:
165
172
  def pathFilenameModule(self) -> PurePosixPath:
166
173
  """Generate the complete path and filename for the output module.
167
174
 
175
+ (AI generated docstring)
176
+
168
177
  This property computes the target location where the generated computation
169
- module will be written. It respects the pathModule override if specified,
178
+ module will be written. It respects the `pathModule` override if specified,
170
179
  otherwise constructs the path using the default package structure.
171
180
 
172
- Returns:
181
+ Returns
182
+ -------
183
+ pathFilename : PurePosixPath
173
184
  Complete path to the target module file.
185
+
174
186
  """
175
187
  if self.pathModule is None:
176
188
  return self._makePathFilename()
@@ -180,13 +192,16 @@ class RecipeJobTheorem2Numba:
180
192
  def __post_init__(self) -> None:
181
193
  """Initialize computed fields and validate configuration after dataclass creation.
182
194
 
183
- This method performs post-initialization setup including:
184
- 1. Deriving module identifier from map shape if not explicitly provided
185
- 2. Setting default paths for fold total output files
186
- 3. Creating shattered dataclass metadata for code transformations
195
+ (AI generated docstring)
196
+
197
+ This method performs post-initialization setup including deriving module
198
+ identifier from map shape if not explicitly provided, setting default paths
199
+ for fold total output files, and creating shattered dataclass metadata for
200
+ code transformations.
187
201
 
188
202
  The initialization ensures all computed fields are properly set based on
189
203
  the provided configuration and sensible defaults.
204
+
190
205
  """
191
206
  pathFilenameFoldsTotal = PurePosixPath(getPathFilenameFoldsTotal(self.state.mapShape))
192
207
 
@@ -1,26 +1,40 @@
1
1
  """
2
2
  Map folding AST transformation system: Comprehensive framework for converting dataclass-based algorithms to optimized implementations.
3
3
 
4
- This subpackage implements a sophisticated Abstract Syntax Tree (AST) transformation system specifically designed to convert high-level dataclass-based map folding algorithms into highly optimized, Numba-compatible implementations. The transformation system addresses a fundamental challenge in high-performance scientific computing: bridging the gap between maintainable, object-oriented algorithm implementations and the performance requirements of computationally intensive mathematical research.
4
+ This subpackage implements a sophisticated Abstract Syntax Tree (AST) transformation system specifically designed to convert
5
+ high-level dataclass-based map folding algorithms into highly optimized, Numba-compatible implementations. The transformation
6
+ system addresses a fundamental challenge in high-performance scientific computing: bridging the gap between maintainable,
7
+ object-oriented algorithm implementations and the performance requirements of computationally intensive mathematical research.
5
8
 
6
- The map folding problem domain involves complex combinatorial calculations that can require hours or days to complete for specific dimensional configurations. While dataclass-based implementations provide clean, maintainable interfaces for managing complex mathematical state, these objects cannot be directly processed by Numba's just-in-time compiler, which excels at optimizing operations on primitive values and tuples. This subpackage resolves this architectural tension through systematic AST manipulation that preserves algorithmic correctness while enabling dramatic performance improvements.
9
+ The map folding problem domain involves complex combinatorial calculations that can require hours or days to complete for specific
10
+ dimensional configurations. While dataclass-based implementations provide clean, maintainable interfaces for managing complex
11
+ mathematical state, these objects cannot be directly processed by Numba's just-in-time compiler, which excels at optimizing
12
+ operations on primitive values and tuples. This subpackage resolves this architectural tension through systematic AST manipulation
13
+ that preserves algorithmic correctness while enabling dramatic performance improvements.
7
14
 
8
15
  ## System Architecture
9
16
 
10
- The transformation system operates through a carefully orchestrated sequence of specialized modules, each contributing essential capabilities to the complete transformation process:
17
+ The transformation system operates through a carefully orchestrated sequence of specialized modules, each contributing essential
18
+ capabilities to the complete transformation process:
11
19
 
12
20
  ### Foundation Layer: Pattern Recognition and Structural Analysis
13
- - `_toolIfThis`: Extended predicate functions for identifying specific code patterns in AST nodes, particularly conditional expressions and control flow structures essential to map folding computations
14
- - `_toolkitContainers`: Dataclass decomposition containers that extract individual fields, type annotations, and reconstruction logic from dataclass definitions into manipulatable AST components
21
+ - `_toolIfThis`: Extended predicate functions for identifying specific code patterns in AST nodes, particularly conditional
22
+ expressions and control flow structures essential to map folding computations
23
+ - `_toolkitContainers`: Dataclass decomposition containers that extract individual fields, type annotations, and reconstruction
24
+ logic from dataclass definitions into manipulatable AST components
15
25
 
16
26
  ### Operational Core: Transformation Implementation
17
- - `transformationTools`: Core functions executing dataclass decomposition, function signature transformation, and calling convention adaptation that convert dataclass-accepting functions into primitive-parameter equivalents
18
- - `toolkitNumba`: Numba integration tools providing just-in-time compilation optimization with configurable performance parameters and strategic compiler directive application
27
+ - `transformationTools`: Core functions executing dataclass decomposition, function signature transformation, and calling
28
+ convention adaptation that convert dataclass-accepting functions into primitive-parameter equivalents
29
+ - `toolkitNumba`: Numba integration tools providing just-in-time compilation optimization with configurable performance parameters
30
+ and strategic compiler directive application
19
31
 
20
32
  ### Configuration and Orchestration
21
33
  - `infoBooth`: Configuration constants, computational complexity estimates, and default identifiers for systematic module generation and optimization decision-making
22
- - `RecipeJob`: Configuration management dataclasses that coordinate transformation parameters across multiple stages while maintaining consistency between source algorithms and target optimizations
23
- - `makeAllModules`: Comprehensive transformation orchestration tools that execute complete transformation processes for diverse computational strategies and performance characteristics
34
+ - `RecipeJob`: Configuration management dataclasses that coordinate transformation parameters across multiple stages while
35
+ maintaining consistency between source algorithms and target optimizations
36
+ - `makeAllModules`: Comprehensive transformation orchestration tools that execute complete transformation processes for diverse
37
+ computational strategies and performance characteristics
24
38
  - `makeJobTheorem2Numba`: Specialized job generation implementing the complete transformation sequence to produce standalone, highly optimized computation modules
25
39
 
26
40
  ### Utility Extensions
@@ -30,21 +44,32 @@ The transformation system operates through a carefully orchestrated sequence of
30
44
 
31
45
  The complete transformation follows a systematic three-stage pattern:
32
46
 
33
- 1. **Analysis and Decomposition**: Pattern recognition identifies dataclass structures and dependencies, followed by decomposition into constituent AST components including field definitions, type annotations, and initialization patterns.
47
+ 1. **Analysis and Decomposition**: Pattern recognition identifies dataclass structures and dependencies, followed by decomposition
48
+ into constituent AST components including field definitions, type annotations, and initialization patterns.
34
49
 
35
- 2. **Function Optimization**: Core transformations convert functions accepting dataclass parameters into functions accepting individual primitive parameters, with systematic updates to signatures, return types, and calling conventions.
50
+ 2. **Function Optimization**: Core transformations convert functions accepting dataclass parameters into functions accepting
51
+ individual primitive parameters, with systematic updates to signatures, return types, and calling conventions.
36
52
 
37
- 3. **Compilation Integration**: Numba decorators with carefully configured optimization parameters are applied to transformed functions, enabling aggressive just-in-time compilation with performance characteristics suitable for large-scale computational research.
53
+ 3. **Compilation Integration**: Numba decorators with carefully configured optimization parameters are applied to transformed
54
+ functions, enabling aggressive just-in-time compilation with performance characteristics suitable for large-scale computational
55
+ research.
38
56
 
39
57
  ## Generated Module Characteristics
40
58
 
41
- The transformation system produces standalone Python modules with embedded constants replacing parameterized values, eliminated dead code paths, optimized data structures, Numba compilation directives, progress feedback for long-running calculations, and consistent naming conventions with systematic filesystem organization. These modules maintain mathematical correctness while providing significant performance improvements essential to map folding research computational demands.
59
+ The transformation system produces standalone Python modules with embedded constants replacing parameterized values, eliminated
60
+ dead code paths, optimized data structures, Numba compilation directives, progress feedback for long-running calculations, and
61
+ consistent naming conventions with systematic filesystem organization. These modules maintain mathematical correctness while
62
+ providing significant performance improvements essential to map folding research computational demands.
42
63
 
43
64
  ## Usage Guidance
44
65
 
45
- Begin exploration with `infoBooth` for understanding configuration options and complexity estimates. Proceed to `transformationTools` for core transformation capabilities, then examine `RecipeJob` for orchestration patterns. Advanced users developing custom transformations should study `_toolIfThis` and `_toolkitContainers` for foundational pattern recognition and structural manipulation capabilities.
66
+ Begin exploration with `infoBooth` for understanding configuration options and complexity estimates. Proceed to
67
+ `transformationTools` for core transformation capabilities, then examine `RecipeJob` for orchestration patterns. Advanced users
68
+ developing custom transformations should study `_toolIfThis` and `_toolkitContainers` for foundational pattern recognition and
69
+ structural manipulation capabilities.
46
70
 
47
- The transformation system represents the culmination of systematic AST manipulation research, enabling previously intractable calculations through the strategic application of compiler optimization techniques to abstract mathematical algorithms.
71
+ The transformation system represents the culmination of systematic AST manipulation research, enabling previously intractable
72
+ calculations through the strategic application of compiler optimization techniques to abstract mathematical algorithms.
48
73
  """
49
74
 
50
75
  from mapFolding.someAssemblyRequired.infoBooth import (
@@ -24,97 +24,125 @@ Classes:
24
24
  algorithm transformations.
25
25
  """
26
26
 
27
- from astToolkit import Be, DOT, IfThis as astToolkit_IfThis
27
+ from astToolkit import Be, IfThis as astToolkit_IfThis
28
28
  from collections.abc import Callable
29
- from typing import TypeGuard
29
+ from typing_extensions import TypeIs
30
30
  import ast
31
31
 
32
32
  class IfThis(astToolkit_IfThis):
33
- """
34
- Provide predicate functions for matching and filtering AST nodes based on various criteria.
33
+ """Provide predicate functions for matching and filtering AST nodes based on various criteria.
34
+
35
+ (AI generated docstring)
35
36
 
36
- The IfThis class contains static methods that generate predicate functions used to test whether AST nodes match
37
- specific criteria. These predicates can be used with NodeChanger and NodeTourist to identify and process specific
37
+ The `IfThis` `class` contains static methods that generate predicate functions used to test whether AST nodes match
38
+ specific criteria. These predicates can be used with `NodeChanger` and `NodeTourist` to identify and process specific
38
39
  patterns in the AST.
39
40
 
40
- The class provides predicates for matching various node types, attributes, identifiers, and structural patterns,
41
+ The `class` provides predicates for matching various node types, attributes, identifiers, and structural patterns,
41
42
  enabling precise targeting of AST elements for analysis or transformation.
43
+
42
44
  """
43
45
 
44
46
  @staticmethod
45
- def isAttributeNamespaceIdentifierGreaterThan0(namespace: str, identifier: str) -> Callable[[ast.AST], TypeGuard[ast.Compare] | bool]:
47
+ def isAttributeNamespaceIdentifierLessThanOrEqual0(namespace: str, identifier: str) -> Callable[[ast.AST], TypeIs[ast.Compare]]:
48
+ """Generate a predicate that matches comparison expressions testing if a namespaced attribute is less than or equal to 0.
49
+
50
+ (AI generated docstring)
51
+
52
+ This function creates a predicate that identifies AST nodes representing comparisons
53
+ of the form `namespace.identifier <= 0`. It's used to identify conditional
54
+ expressions that test non-positive values of counting variables or similar constructs.
55
+
56
+ Parameters
57
+ ----------
58
+ namespace : str
59
+ The namespace or object name containing the attribute.
60
+ identifier : str
61
+ The attribute name to test.
62
+
63
+ Returns
64
+ -------
65
+ predicate : Callable[[ast.AST], TypeIs[ast.Compare]]
66
+ A predicate function that returns `True` for `Compare` nodes matching the pattern.
67
+
46
68
  """
47
- Generate a predicate that matches comparison expressions testing if a namespaced attribute is greater than 0.
69
+ return lambda node: (Be.Compare.leftIs(IfThis.isAttributeNamespaceIdentifier(namespace, identifier))(node)
70
+ and Be.Compare.opsIs(lambda at: Be.LtE(at[0]))(node)
71
+ )
72
+
73
+ @staticmethod
74
+ def isAttributeNamespaceIdentifierGreaterThan0(namespace: str, identifier: str) -> Callable[[ast.AST], TypeIs[ast.Compare] | bool]:
75
+ """Generate a predicate that matches comparison expressions testing if a namespaced attribute is greater than 0.
76
+
77
+ (AI generated docstring)
48
78
 
49
79
  This function creates a predicate that identifies AST nodes representing comparisons
50
80
  of the form `namespace.identifier > 0`. It's commonly used to identify conditional
51
81
  expressions that test positive values of counting variables or similar constructs.
52
82
 
53
- Parameters:
54
- namespace: The namespace or object name containing the attribute
55
- identifier: The attribute name to test
83
+ Parameters
84
+ ----------
85
+ namespace : str
86
+ The namespace or object name containing the attribute.
87
+ identifier : str
88
+ The attribute name to test.
89
+
90
+ Returns
91
+ -------
92
+ predicate : Callable[[ast.AST], TypeIs[ast.Compare]]
93
+ A predicate function that returns `True` for `Compare` nodes matching the pattern.
56
94
 
57
- Returns:
58
- A predicate function that returns True for Compare nodes matching the pattern
59
95
  """
60
- return lambda node: (Be.Compare(node)
61
- and IfThis.isAttributeNamespaceIdentifier(namespace, identifier)(DOT.left(node))
96
+ return lambda node: (Be.Compare.leftIs(IfThis.isAttributeNamespaceIdentifier(namespace, identifier))(node)
62
97
  and Be.Gt(node.ops[0])
63
98
  and IfThis.isConstant_value(0)(node.comparators[0]))
64
99
 
65
100
  @staticmethod
66
- def isIfAttributeNamespaceIdentifierGreaterThan0(namespace: str, identifier: str) -> Callable[[ast.AST], TypeGuard[ast.If] | bool]:
67
- """
68
- Generate a predicate that matches If statements testing if a namespaced attribute is greater than 0.
101
+ def isIfAttributeNamespaceIdentifierGreaterThan0(namespace: str, identifier: str) -> Callable[[ast.AST], TypeIs[ast.If]]:
102
+ """Generate a predicate that matches If statements testing if a namespaced attribute is greater than 0.
103
+
104
+ (AI generated docstring)
69
105
 
70
106
  This function creates a predicate that identifies AST nodes representing conditional
71
107
  statements of the form `if namespace.identifier > 0:`. It's used to find control
72
108
  flow structures that depend on positive values of specific attributes.
73
109
 
74
- Parameters:
75
- namespace: The namespace or object name containing the attribute
76
- identifier: The attribute name to test
110
+ Parameters
111
+ ----------
112
+ namespace : str
113
+ The namespace or object name containing the attribute.
114
+ identifier : str
115
+ The attribute name to test.
116
+
117
+ Returns
118
+ -------
119
+ predicate : Callable[[ast.AST], TypeIs[ast.If]]
120
+ A predicate function that returns `True` for `If` nodes with the specified test condition.
77
121
 
78
- Returns:
79
- A predicate function that returns True for If nodes with the specified test condition
80
122
  """
81
- return lambda node: (Be.If(node)
82
- and IfThis.isAttributeNamespaceIdentifierGreaterThan0(namespace, identifier)(DOT.test(node)))
123
+ return Be.If.testIs(IfThis.isAttributeNamespaceIdentifierGreaterThan0(namespace, identifier))
83
124
 
84
125
  @staticmethod
85
- def isWhileAttributeNamespaceIdentifierGreaterThan0(namespace: str, identifier: str) -> Callable[[ast.AST], TypeGuard[ast.While] | bool]:
86
- """
87
- Generate a predicate that matches While loops testing if a namespaced attribute is greater than 0.
126
+ def isWhileAttributeNamespaceIdentifierGreaterThan0(namespace: str, identifier: str) -> Callable[[ast.AST], TypeIs[ast.While]]:
127
+ """Generate a predicate that matches While loops testing if a namespaced attribute is greater than 0.
128
+
129
+ (AI generated docstring)
88
130
 
89
131
  This function creates a predicate that identifies AST nodes representing loop
90
132
  statements of the form `while namespace.identifier > 0:`. It's used to find
91
133
  iteration constructs that continue while specific attributes remain positive.
92
134
 
93
- Parameters:
94
- namespace: The namespace or object name containing the attribute
95
- identifier: The attribute name to test
96
-
97
- Returns:
98
- A predicate function that returns True for While nodes with the specified test condition
99
- """
100
- return lambda node: (Be.While(node)
101
- and IfThis.isAttributeNamespaceIdentifierGreaterThan0(namespace, identifier)(DOT.test(node)))
102
- @staticmethod
103
- def isAttributeNamespaceIdentifierLessThanOrEqual0(namespace: str, identifier: str) -> Callable[[ast.AST], TypeGuard[ast.Compare] | bool]:
104
- """
105
- Generate a predicate that matches comparison expressions testing if a namespaced attribute is less than or equal to 0.
106
-
107
- This function creates a predicate that identifies AST nodes representing comparisons
108
- of the form `namespace.identifier <= 0`. It's used to identify conditional
109
- expressions that test non-positive values of counting variables or similar constructs.
135
+ Parameters
136
+ ----------
137
+ namespace : str
138
+ The namespace or object name containing the attribute.
139
+ identifier : str
140
+ The attribute name to test.
110
141
 
111
- Parameters:
112
- namespace: The namespace or object name containing the attribute
113
- identifier: The attribute name to test
142
+ Returns
143
+ -------
144
+ predicate : Callable[[ast.AST], TypeIs[ast.While]]
145
+ A predicate function that returns `True` for `While` nodes with the specified test condition.
114
146
 
115
- Returns:
116
- A predicate function that returns True for Compare nodes matching the pattern
117
147
  """
118
- return lambda node: (Be.Compare(node)
119
- and IfThis.isAttributeNamespaceIdentifier(namespace, identifier)(DOT.left(node))
120
- and Be.LtE(node.ops[0]))
148
+ return Be.While.testIs(IfThis.isAttributeNamespaceIdentifierGreaterThan0(namespace, identifier))
@@ -25,15 +25,12 @@ to low-level optimized functions while maintaining semantic equivalence and type
25
25
  the compilation process.
26
26
  """
27
27
 
28
- from astToolkit import (
29
- ClassIsAndAttribute, DOT, hasDOTtarget_NameOrAttributeOrSubscript, identifierDotAttribute,
30
- LedgerOfImports, Make, NodeTourist, Then,
31
- )
28
+ from astToolkit import Be, DOT, identifierDotAttribute, LedgerOfImports, Make, NodeTourist, Then
32
29
  from collections.abc import Callable
33
30
  from copy import deepcopy
31
+ from hunterMakesPy import raiseIfNone
34
32
  from mapFolding.someAssemblyRequired import IfThis
35
33
  from typing import Any, cast
36
- from Z0Z_tools import raiseIfNone
37
34
  import ast
38
35
  import dataclasses
39
36
 
@@ -43,8 +40,7 @@ dummyTuple = Make.Tuple([Make.Name("dummyElement")])
43
40
 
44
41
  @dataclasses.dataclass
45
42
  class ShatteredDataclass:
46
- """
47
- Container for decomposed dataclass components organized as AST nodes for code generation.
43
+ """Container for decomposed dataclass components organized as AST nodes for code generation.
48
44
 
49
45
  This class holds the decomposed representation of a dataclass, breaking it down into individual
50
46
  AST components that can be manipulated and recombined for different code generation contexts.
@@ -59,6 +55,7 @@ class ShatteredDataclass:
59
55
  dataclass fields into individual parameters) and result reconstruction (packing individual
60
56
  values back into dataclass instances).
61
57
  """
58
+
62
59
  countingVariableAnnotation: ast.expr
63
60
  """Type annotation for the counting variable extracted from the dataclass."""
64
61
 
@@ -194,11 +191,16 @@ class DeReConstructField2ast:
194
191
  scalar types, and complex type annotations, creating appropriate constructor
195
192
  calls and import requirements.
196
193
 
197
- Parameters:
198
- dataclassesDOTdataclassLogicalPathModule: Module path containing the dataclass
199
- dataclassClassDef: AST class definition for type annotation extraction
200
- dataclassesDOTdataclassInstanceIdentifier: Instance variable name for attribute access
201
- field: Dataclass field to transform
194
+ Parameters
195
+ ----------
196
+ dataclassesDOTdataclassLogicalPathModule : identifierDotAttribute
197
+ Module path containing the dataclass
198
+ dataclassClassDef : ast.ClassDef
199
+ AST class definition for type annotation extraction
200
+ dataclassesDOTdataclassInstanceIdentifier : str
201
+ Instance variable name for attribute access
202
+ field : dataclasses.Field[Any]
203
+ Dataclass field to transform
202
204
  """
203
205
  self.compare = field.compare
204
206
  self.default = field.default if field.default is not dataclasses.MISSING else None
@@ -215,9 +217,10 @@ class DeReConstructField2ast:
215
217
  self.ast_keyword_field__field = Make.keyword(self.name, self.astName)
216
218
  self.ast_nameDOTname = Make.Attribute(Make.Name(dataclassesDOTdataclassInstanceIdentifier), self.name)
217
219
 
218
- findThis=ClassIsAndAttribute.targetIs(ast.AnnAssign, IfThis.isNameIdentifier(self.name))
219
- doThat=cast(Callable[[hasDOTtarget_NameOrAttributeOrSubscript], ast.expr], Then.extractIt(DOT.annotation))
220
- self.astAnnotation = raiseIfNone(NodeTourist(findThis, doThat).captureLastMatch(dataclassClassDef))
220
+ self.astAnnotation = raiseIfNone(NodeTourist[ast.AnnAssign, ast.Name | None](
221
+ findThis = Be.AnnAssign.targetIs(IfThis.isNameIdentifier(self.name))
222
+ , doThat = Then.extractIt(cast("Callable[[ast.AnnAssign], ast.Name | None]", DOT.annotation))
223
+ ).captureLastMatch(dataclassClassDef))
221
224
 
222
225
  self.ast_argAnnotated = Make.arg(self.name, self.astAnnotation)
223
226
 
@@ -228,7 +231,7 @@ class DeReConstructField2ast:
228
231
  self.ledger.addImportFrom_asStr(moduleWithLogicalPath, annotationType)
229
232
  self.ledger.addImportFrom_asStr(moduleWithLogicalPath, 'dtype')
230
233
  axesSubscript = Make.Subscript(Make.Name('tuple'), Make.Name('uint8'))
231
- dtype_asnameName: ast.Name = cast(ast.Name, self.astAnnotation)
234
+ dtype_asnameName: ast.Name = self.astAnnotation
232
235
  if dtype_asnameName.id == 'Array3D':
233
236
  axesSubscript = Make.Subscript(Make.Name('tuple'), Make.Tuple([Make.Name('uint8'), Make.Name('uint8'), Make.Name('uint8')]))
234
237
  ast_expr = Make.Subscript(Make.Name(annotationType), Make.Tuple([axesSubscript, Make.Subscript(Make.Name('dtype'), dtype_asnameName)]))
@@ -24,34 +24,43 @@ While originally part of a tighter integration with the code generation assembly
24
24
  this module now operates as a standalone utility that can be applied to any module
25
25
  containing Numba-compiled functions.
26
26
  """
27
- from importlib.machinery import ModuleSpec
28
27
  from pathlib import Path
29
- from types import ModuleType
28
+ from typing import TYPE_CHECKING
30
29
  import importlib.util
31
30
  import llvmlite.binding
32
31
 
32
+ if TYPE_CHECKING:
33
+ from importlib.machinery import ModuleSpec
34
+ from types import ModuleType
35
+
33
36
  def writeModuleLLVM(pathFilename: Path, identifierCallable: str) -> Path:
34
- """Import the generated module directly and get its LLVM IR.
35
-
36
- Parameters
37
- pathFilename: Path to the Python module file containing the Numba-compiled function
38
- identifierCallable: Name of the function within the module to extract LLVM IR from
39
-
40
- Returns
41
- Path to the generated .ll file containing the extracted LLVM IR
42
-
43
- For an example of the output, see reference/jobsCompleted/[2x19]/[2x19].ll,
44
- which contains the IR for the historically significant 2x19 map calculation.
45
- """
46
- specTarget: ModuleSpec | None = importlib.util.spec_from_file_location("generatedModule", pathFilename)
47
- if specTarget is None or specTarget.loader is None:
48
- raise ImportError(f"Could not create module spec or loader for {pathFilename}")
49
- moduleTarget: ModuleType = importlib.util.module_from_spec(specTarget)
50
- specTarget.loader.exec_module(moduleTarget)
51
-
52
- # Get LLVM IR and write to file
53
- linesLLVM = moduleTarget.__dict__[identifierCallable].inspect_llvm()[()]
54
- moduleLLVM: llvmlite.binding.ModuleRef = llvmlite.binding.module.parse_assembly(linesLLVM)
55
- pathFilenameLLVM: Path = pathFilename.with_suffix(".ll")
56
- pathFilenameLLVM.write_text(str(moduleLLVM))
57
- return pathFilenameLLVM
37
+ """Import the generated module directly and get its LLVM IR.
38
+
39
+ Parameters
40
+ ----------
41
+ pathFilename : Path
42
+ Path to the Python module file containing the Numba-compiled function
43
+ identifierCallable : str
44
+ Name of the function within the module to extract LLVM IR from
45
+
46
+ Returns
47
+ -------
48
+ pathFilenameLLVM : Path
49
+ Path to the generated .ll file containing the extracted LLVM IR
50
+
51
+ For an example of the output, see reference/jobsCompleted/[2x19]/[2x19].ll,
52
+ which contains the IR for the historically significant 2x19 map calculation.
53
+ """
54
+ specTarget: ModuleSpec | None = importlib.util.spec_from_file_location("generatedModule", pathFilename)
55
+ if specTarget is None or specTarget.loader is None:
56
+ message = f"Could not create module spec or loader for {pathFilename}"
57
+ raise ImportError(message)
58
+ moduleTarget: ModuleType = importlib.util.module_from_spec(specTarget)
59
+ specTarget.loader.exec_module(moduleTarget)
60
+
61
+ # Get LLVM IR and write to file
62
+ linesLLVM = moduleTarget.__dict__[identifierCallable].inspect_llvm()[()]
63
+ moduleLLVM: llvmlite.binding.ModuleRef = llvmlite.binding.module.parse_assembly(linesLLVM)
64
+ pathFilenameLLVM: Path = pathFilename.with_suffix(".ll")
65
+ pathFilenameLLVM.write_text(str(moduleLLVM))
66
+ return pathFilenameLLVM