mapFolding 0.11.1__py3-none-any.whl → 0.11.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (35) hide show
  1. mapFolding/__init__.py +7 -60
  2. mapFolding/basecamp.py +15 -13
  3. mapFolding/beDRY.py +4 -36
  4. mapFolding/dataBaskets.py +24 -2
  5. mapFolding/datatypes.py +0 -3
  6. mapFolding/{toolboxFilesystem.py → filesystemToolkit.py} +3 -3
  7. mapFolding/oeis.py +3 -5
  8. mapFolding/someAssemblyRequired/RecipeJob.py +8 -116
  9. mapFolding/someAssemblyRequired/Z0Z_makeAllModules.py +492 -0
  10. mapFolding/someAssemblyRequired/__init__.py +5 -31
  11. mapFolding/someAssemblyRequired/_toolIfThis.py +5 -6
  12. mapFolding/someAssemblyRequired/{_toolboxContainers.py → _toolkitContainers.py} +6 -127
  13. mapFolding/someAssemblyRequired/infoBooth.py +70 -0
  14. mapFolding/someAssemblyRequired/makeJobTheorem2Numba.py +13 -12
  15. mapFolding/someAssemblyRequired/{toolboxNumba.py → toolkitNumba.py} +2 -44
  16. mapFolding/someAssemblyRequired/transformationTools.py +16 -174
  17. mapFolding/syntheticModules/countParallel.py +98 -0
  18. mapFolding/syntheticModules/dataPacking.py +1 -1
  19. mapFolding/theSSOT.py +12 -246
  20. {mapfolding-0.11.1.dist-info → mapfolding-0.11.3.dist-info}/METADATA +16 -11
  21. mapfolding-0.11.3.dist-info/RECORD +53 -0
  22. {mapfolding-0.11.1.dist-info → mapfolding-0.11.3.dist-info}/WHEEL +1 -1
  23. tests/conftest.py +2 -79
  24. tests/test_computations.py +12 -19
  25. tests/test_filesystem.py +1 -2
  26. tests/test_other.py +1 -1
  27. tests/test_tasks.py +3 -4
  28. mapFolding/someAssemblyRequired/Z0Z_makeSomeModules.py +0 -325
  29. mapFolding/someAssemblyRequired/synthesizeNumbaJob.py +0 -314
  30. mapFolding/syntheticModules/numbaCount.py +0 -201
  31. mapFolding/theDao.py +0 -243
  32. mapfolding-0.11.1.dist-info/RECORD +0 -54
  33. {mapfolding-0.11.1.dist-info → mapfolding-0.11.3.dist-info}/entry_points.txt +0 -0
  34. {mapfolding-0.11.1.dist-info → mapfolding-0.11.3.dist-info}/licenses/LICENSE +0 -0
  35. {mapfolding-0.11.1.dist-info → mapfolding-0.11.3.dist-info}/top_level.txt +0 -0
mapFolding/__init__.py CHANGED
@@ -1,49 +1,3 @@
1
- """
2
- Map folding enumeration and counting algorithms with advanced optimization capabilities.
3
-
4
- This package implements algorithms to count and enumerate the distinct ways a rectangular map can be folded, based on
5
- the mathematical problem described in Lunnon's 1971 paper. It provides multiple layers of functionality, from high-level
6
- user interfaces to sophisticated algorithmic optimizations and code transformation tools.
7
-
8
- Core modules:
9
- - basecamp: Public API with simplified interfaces for end users
10
- - theDao: Core computational algorithm using a functional state-transformation approach
11
- - beDRY: Core utility functions implementing consistent data handling, validation, and resource management across the
12
- package's computational assembly-line
13
- - theSSOT: Single Source of Truth for configuration, types, and state management
14
- - toolboxFilesystem: Cross-platform file management services for storing and retrieving computation results with robust
15
- error handling and fallback mechanisms
16
- - oeis: Interface to the Online Encyclopedia of Integer Sequences for known results
17
-
18
- Extended functionality:
19
- - someAssemblyRequired: Code transformation framework that optimizes the core algorithm through AST manipulation,
20
- dataclass transformation, and compilation techniques
21
- - The system converts readable code into high-performance implementations through a systematic analysis and
22
- transformation assembly line
23
- - Provides tools to "shatter" complex dataclasses into primitive components, enabling compatibility with Numba and
24
- other optimization frameworks
25
- - Creates specialized implementations tailored for specific input parameters
26
-
27
- Testing and extension:
28
- - tests: Comprehensive test suite designed for both verification and extension
29
- - Provides fixtures and utilities that simplify testing of custom implementations
30
- - Enables users to validate their own recipes and job configurations with minimal code
31
- - Offers standardized testing patterns that maintain consistency across the codebase
32
- - See tests/__init__.py for detailed documentation on extending the test suite
33
-
34
- Special directories:
35
- - .cache/: Stores cached data from external sources like OEIS to improve performance
36
- - syntheticModules/: Contains dynamically generated, optimized implementations of the core algorithm created by the code
37
- transformation framework
38
- - reference/: Historical implementations and educational resources for algorithm exploration
39
- - reference/jobsCompleted/: Contains successful computations for previously unknown values, including first-ever
40
- calculations for 2x19 and 2x20 maps (OEIS A001415)
41
-
42
- This package balances algorithm readability and understandability with high-performance computation capabilities,
43
- allowing users to compute map folding totals for larger dimensions than previously feasible while also providing a
44
- foundation for exploring advanced code transformation techniques.
45
- """
46
-
47
1
  from typing import Any, TypeAlias
48
2
  import sys
49
3
 
@@ -76,34 +30,27 @@ from mapFolding.datatypes import (
76
30
  NumPyLeavesTotal as NumPyLeavesTotal,
77
31
  )
78
32
 
79
- from mapFolding.theSSOT import (
80
- ComputationState as ComputationState,
81
- raiseIfNoneGitHubIssueNumber3 as raiseIfNoneGitHubIssueNumber3,
82
- The as The,
83
- )
84
-
85
- from mapFolding.theDao import (
86
- countInitialize as countInitialize,
87
- doTheNeedful as doTheNeedful,
88
- )
33
+ from mapFolding.theSSOT import PackageSettings as PackageSettings, packageSettings as packageSettings
89
34
 
90
35
  from mapFolding.beDRY import (
36
+ getConnectionGraph as getConnectionGraph,
91
37
  getLeavesTotal as getLeavesTotal,
92
38
  getTaskDivisions as getTaskDivisions,
93
- outfitCountFolds as outfitCountFolds,
39
+ makeDataContainer as makeDataContainer,
94
40
  setProcessorLimit as setProcessorLimit,
95
41
  validateListDimensions as validateListDimensions,
96
42
  )
97
43
 
98
- from mapFolding.toolboxFilesystem import (
44
+ from mapFolding.dataBaskets import MapFoldingState as MapFoldingState
45
+
46
+ from mapFolding.filesystemToolkit import (
47
+ getFilenameFoldsTotal as getFilenameFoldsTotal,
99
48
  getPathFilenameFoldsTotal as getPathFilenameFoldsTotal,
100
49
  getPathRootJobDEFAULT as getPathRootJobDEFAULT,
101
50
  saveFoldsTotal as saveFoldsTotal,
102
51
  saveFoldsTotalFAILearly as saveFoldsTotalFAILearly,
103
52
  )
104
53
 
105
- from Z0Z_tools import writeStringToHere as writeStringToHere
106
-
107
54
  from mapFolding.basecamp import countFolds as countFolds
108
55
 
109
56
  from mapFolding.oeis import (
mapFolding/basecamp.py CHANGED
@@ -11,13 +11,11 @@ appropriate algorithm implementation, and optional persistence of results.
11
11
 
12
12
  from collections.abc import Sequence
13
13
  from mapFolding import (
14
- ComputationState,
15
14
  getPathFilenameFoldsTotal,
16
- outfitCountFolds,
15
+ packageSettings,
17
16
  saveFoldsTotal,
18
17
  saveFoldsTotalFAILearly,
19
18
  setProcessorLimit,
20
- The,
21
19
  validateListDimensions,
22
20
  )
23
21
  from os import PathLike
@@ -104,8 +102,7 @@ def countFolds(listDimensions: Sequence[int] | None = None
104
102
  # task division instructions ===============================================
105
103
 
106
104
  if computationDivisions:
107
- # NOTE `The.concurrencyPackage`
108
- concurrencyLimit: int = setProcessorLimit(CPUlimit, The.concurrencyPackage)
105
+ concurrencyLimit: int = setProcessorLimit(CPUlimit, packageSettings.concurrencyPackage)
109
106
  from mapFolding.beDRY import getLeavesTotal, getTaskDivisions
110
107
  leavesTotal: int = getLeavesTotal(mapShape)
111
108
  taskDivisions = getTaskDivisions(computationDivisions, concurrencyLimit, leavesTotal)
@@ -163,20 +160,25 @@ def countFolds(listDimensions: Sequence[int] | None = None
163
160
  from mapFolding.syntheticModules.initializeCount import initializeGroupsOfFolds
164
161
  mapFoldingState = initializeGroupsOfFolds(mapFoldingState)
165
162
 
166
- from mapFolding.syntheticModules.dataPacking import doTheNeedful
167
- mapFoldingState = doTheNeedful(mapFoldingState)
163
+ from mapFolding.syntheticModules.dataPacking import sequential
164
+ mapFoldingState = sequential(mapFoldingState)
168
165
 
169
166
  foldsTotal = mapFoldingState.foldsTotal
170
167
 
171
- # NOTE treat this as a default?
172
- # flow based on `The` and `ComputationState` ====================================
168
+ elif taskDivisions > 1:
169
+ from mapFolding.dataBaskets import ParallelMapFoldingState
170
+ parallelMapFoldingState: ParallelMapFoldingState = ParallelMapFoldingState(mapShape, taskDivisions=taskDivisions)
171
+
172
+ from mapFolding.syntheticModules.countParallel import doTheNeedful
173
+ foldsTotal, listStatesParallel = doTheNeedful(parallelMapFoldingState, concurrencyLimit)
173
174
 
174
175
  else:
175
- computationStateInitialized: ComputationState = outfitCountFolds(mapShape, computationDivisions, concurrencyLimit)
176
- computationStateComplete: ComputationState = The.dispatcher(computationStateInitialized)
176
+ from mapFolding.dataBaskets import MapFoldingState
177
+ mapFoldingState: MapFoldingState = MapFoldingState(mapShape)
177
178
 
178
- computationStateComplete.getFoldsTotal()
179
- foldsTotal = computationStateComplete.foldsTotal
179
+ from mapFolding.syntheticModules.daoOfMapFolding import doTheNeedful
180
+ mapFoldingState = doTheNeedful(mapFoldingState)
181
+ foldsTotal = mapFoldingState.foldsTotal
180
182
 
181
183
  # Follow memorialization instructions ===========================================
182
184
 
mapFolding/beDRY.py CHANGED
@@ -18,12 +18,13 @@ These utilities form a stable internal API that other modules depend on, particu
18
18
  theDao (core algorithm), and the synthetic module generators that produce optimized implementations.
19
19
  """
20
20
  from collections.abc import Sequence
21
- from mapFolding import ComputationState, NumPyIntegerType
21
+ from mapFolding import Array1DElephino, Array1DFoldsTotal, Array1DLeavesTotal, Array3D, DatatypeElephino, DatatypeFoldsTotal, DatatypeLeavesTotal, NumPyIntegerType
22
22
  from numpy import dtype as numpy_dtype, int64 as numpy_int64, ndarray
23
23
  from sys import maxsize as sysMaxsize
24
24
  from typing import Any
25
25
  from Z0Z_tools import defineConcurrencyLimit, intInnit, oopsieKwargsie
26
26
  import numpy
27
+ import dataclasses
27
28
 
28
29
  def getLeavesTotal(mapShape: tuple[int, ...]) -> int:
29
30
  """
@@ -59,7 +60,6 @@ def getTaskDivisions(computationDivisions: int | str | None, concurrencyLimit: i
59
60
  """
60
61
  Determines whether to divide the computation into tasks and how many divisions.
61
62
 
62
-
63
63
  Parameters
64
64
  ----------
65
65
  computationDivisions: None
@@ -208,39 +208,6 @@ def makeDataContainer(shape: int | tuple[int, ...], datatype: type[NumPyIntegerT
208
208
  """
209
209
  return numpy.zeros(shape, dtype=datatype)
210
210
 
211
- def outfitCountFolds(mapShape: tuple[int, ...], computationDivisions: int | str | None = None, concurrencyLimit: int = 1) -> ComputationState:
212
- """
213
- Initialize a `ComputationState` with validated parameters for map folding calculation.
214
-
215
- This function serves as the central initialization point for creating a properly configured `ComputationState`
216
- object, ensuring consistent calculation of the fundamental parameters (`leavesTotal` and `taskDivisions`) across the
217
- entire package.
218
-
219
- Parameters
220
- ----------
221
- mapShape
222
- A tuple of integers representing the dimensions of the map.
223
- computationDivisions: None
224
- Controls how to divide the computation into parallel tasks. I know it is annoying, but please see
225
- `countFolds` for details, so that you and I both know you have the most accurate information.
226
- concurrencyLimit: 1
227
- Maximum number of concurrent processes to use during computation.
228
-
229
- Returns
230
- -------
231
- computationStateInitialized
232
- A fully initialized `ComputationState` object that's ready for computation.
233
-
234
- Notes
235
- -----
236
- This function maintains the Single Source of Truth principle for `leavesTotal` and `taskDivisions` calculation,
237
- ensuring these values are derived consistently throughout the package.
238
- """
239
- leavesTotal = getLeavesTotal(mapShape)
240
- taskDivisions = getTaskDivisions(computationDivisions, concurrencyLimit, leavesTotal)
241
- computationStateInitialized = ComputationState(mapShape, leavesTotal, taskDivisions, concurrencyLimit)
242
- return computationStateInitialized
243
-
244
211
  def setProcessorLimit(CPUlimit: Any | None, concurrencyPackage: str | None = None) -> int:
245
212
  """
246
213
  Whether and how to limit the CPU usage.
@@ -281,7 +248,8 @@ def setProcessorLimit(CPUlimit: Any | None, concurrencyPackage: str | None = Non
281
248
 
282
249
  match concurrencyPackage:
283
250
  case 'multiprocessing' | None:
284
- # When to use multiprocessing.set_start_method https://github.com/hunterhogan/mapFolding/issues/6
251
+ # When to use multiprocessing.set_start_method
252
+ # https://github.com/hunterhogan/mapFolding/issues/6
285
253
  concurrencyLimit: int = defineConcurrencyLimit(CPUlimit)
286
254
  case 'numba':
287
255
  from numba import get_num_threads, set_num_threads
mapFolding/dataBaskets.py CHANGED
@@ -1,5 +1,14 @@
1
- from mapFolding.beDRY import getConnectionGraph, getLeavesTotal, makeDataContainer
2
- from mapFolding.datatypes import Array3D, Array1DElephino, Array1DLeavesTotal, DatatypeElephino, DatatypeFoldsTotal, DatatypeLeavesTotal
1
+ from mapFolding import (
2
+ Array1DElephino,
3
+ Array1DLeavesTotal,
4
+ Array3D,
5
+ DatatypeElephino,
6
+ DatatypeFoldsTotal,
7
+ DatatypeLeavesTotal,
8
+ getConnectionGraph,
9
+ getLeavesTotal,
10
+ makeDataContainer,
11
+ )
3
12
  import dataclasses
4
13
 
5
14
  @dataclasses.dataclass
@@ -48,6 +57,19 @@ class MapFoldingState:
48
57
  if self.leafAbove is None: self.leafAbove = makeDataContainer(leavesTotalAsInt + 1, self.__dataclass_fields__['leafAbove'].metadata['dtype']) # pyright: ignore[reportUnnecessaryComparison]
49
58
  if self.leafBelow is None: self.leafBelow = makeDataContainer(leavesTotalAsInt + 1, self.__dataclass_fields__['leafBelow'].metadata['dtype']) # pyright: ignore[reportUnnecessaryComparison]
50
59
 
60
+ @dataclasses.dataclass
61
+ class ParallelMapFoldingState(MapFoldingState):
62
+ taskDivisions: DatatypeLeavesTotal = DatatypeLeavesTotal(0)
63
+ """Number of tasks into which to divide the computation. If the value is greater than `leavesTotal`, the computation will be wrong. Default is `leavesTotal`."""
64
+
65
+ taskIndex: DatatypeLeavesTotal = DatatypeLeavesTotal(0)
66
+ """Index of the current task when using task divisions."""
67
+
68
+ def __post_init__(self) -> None:
69
+ super().__post_init__()
70
+ if self.taskDivisions == 0:
71
+ self.taskDivisions = DatatypeLeavesTotal(int(self.leavesTotal))
72
+
51
73
  @dataclasses.dataclass
52
74
  class LeafSequenceState(MapFoldingState):
53
75
  leafSequence: Array1DLeavesTotal = dataclasses.field(default=None, init=True, metadata={'dtype': Array1DLeavesTotal.__args__[1].__args__[0]}) # pyright: ignore[reportAssignmentType, reportAttributeAccessIssue, reportUnknownMemberType]
mapFolding/datatypes.py CHANGED
@@ -1,9 +1,6 @@
1
1
  from numpy import dtype, uint8 as numpy_uint8, uint16 as numpy_uint16, uint64 as numpy_uint64, integer, ndarray
2
2
  from typing import Any, TypeAlias, TypeVar
3
3
 
4
- # =============================================================================
5
- # Flexible Data Structure System Needs Enhanced Paradigm https://github.com/hunterhogan/mapFolding/issues/9
6
-
7
4
  NumPyIntegerType = TypeVar('NumPyIntegerType', bound=integer[Any], covariant=True)
8
5
 
9
6
  DatatypeLeavesTotal: TypeAlias = int
@@ -21,7 +21,7 @@ The functions here adhere to a consistent approach to path handling:
21
21
  - Progressive fallback strategies for saving critical computation results.
22
22
  - Preemptive filesystem validation to detect issues before computation begins.
23
23
  """
24
- from mapFolding import The
24
+ from mapFolding import packageSettings
25
25
  from os import PathLike
26
26
  from pathlib import Path, PurePath
27
27
  from sys import modules as sysModules
@@ -101,9 +101,9 @@ def getPathRootJobDEFAULT() -> Path:
101
101
  - For Google Colab, uses a specific path in Google Drive.
102
102
  - Creates the directory if it doesn't exist.
103
103
  """
104
- pathJobDEFAULT = Path(platformdirs.user_data_dir(appname=The.packageName, appauthor=False, ensure_exists=True))
104
+ pathJobDEFAULT = Path(platformdirs.user_data_dir(appname=packageSettings.packageName, appauthor=False, ensure_exists=True))
105
105
  if 'google.colab' in sysModules:
106
- pathJobDEFAULT = Path("/content/drive/MyDrive") / The.packageName
106
+ pathJobDEFAULT = Path("/content/drive/MyDrive") / packageSettings.packageName
107
107
  pathJobDEFAULT.mkdir(parents=True, exist_ok=True)
108
108
  return pathJobDEFAULT
109
109
 
mapFolding/oeis.py CHANGED
@@ -20,9 +20,10 @@ mathematical definition in OEIS and the computational implementation in the pack
20
20
  from collections.abc import Callable
21
21
  from datetime import datetime, timedelta
22
22
  from functools import cache
23
- from mapFolding import countFolds, The, TypedDict, writeStringToHere
23
+ from mapFolding import countFolds, packageSettings, TypedDict
24
24
  from pathlib import Path
25
25
  from typing import Any, Final
26
+ from Z0Z_tools import writeStringToHere
26
27
  import argparse
27
28
  import random
28
29
  import sys
@@ -33,7 +34,7 @@ import warnings
33
34
 
34
35
  cacheDays = 30
35
36
 
36
- pathCache: Path = The.pathPackage / ".cache"
37
+ pathCache: Path = packageSettings.pathPackage / ".cache"
37
38
 
38
39
  class SettingsOEIS(TypedDict):
39
40
  description: str
@@ -143,9 +144,6 @@ def _parseBFileOEIS(OEISbFile: str, oeisID: str) -> dict[int, int]:
143
144
  invalid.
144
145
  """
145
146
  bFileLines: list[str] = OEISbFile.strip().splitlines()
146
- # if not bFileLines.pop(0).startswith(f"# {oeisID}"):
147
- # warnings.warn(f"Content does not match sequence {oeisID}")
148
- # return {-1: -1}
149
147
 
150
148
  OEISsequence: dict[int, int] = {}
151
149
  for line in bFileLines:
@@ -1,107 +1,12 @@
1
- from mapFolding.someAssemblyRequired import ShatteredDataclass, ast_Identifier, parseLogicalPath2astModule, parsePathFilename2astModule, str_nameDOTname
2
- from mapFolding.someAssemblyRequired.toolboxNumba import theNumbaFlow
1
+ from astToolkit import ast_Identifier, parseLogicalPath2astModule, str_nameDOTname
2
+ from mapFolding import getPathFilenameFoldsTotal, getPathRootJobDEFAULT, MapFoldingState
3
+ from mapFolding import DatatypeElephino as TheDatatypeElephino, DatatypeFoldsTotal as TheDatatypeFoldsTotal, DatatypeLeavesTotal as TheDatatypeLeavesTotal
4
+ from mapFolding.someAssemblyRequired import ShatteredDataclass, packageInformation
3
5
  from mapFolding.someAssemblyRequired.transformationTools import shatter_dataclassesDOTdataclass
4
- from mapFolding.theSSOT import ComputationState, DatatypeElephino as TheDatatypeElephino, DatatypeFoldsTotal as TheDatatypeFoldsTotal, DatatypeLeavesTotal as TheDatatypeLeavesTotal
5
- from mapFolding.toolboxFilesystem import getPathFilenameFoldsTotal, getPathRootJobDEFAULT
6
- from mapFolding.dataBaskets import MapFoldingState
7
6
  from pathlib import Path, PurePosixPath
8
7
  from typing import TypeAlias
9
8
  import dataclasses
10
9
 
11
- @dataclasses.dataclass
12
- class RecipeJob:
13
- state: ComputationState
14
- # TODO create function to calculate `foldsTotalEstimated`
15
- foldsTotalEstimated: int = 0
16
- shatteredDataclass: ShatteredDataclass = dataclasses.field(default=None, init=True) # pyright: ignore[reportAssignmentType]
17
-
18
- # ========================================
19
- # Source
20
- source_astModule = parsePathFilename2astModule(theNumbaFlow.pathFilenameSequential)
21
- sourceCountCallable: ast_Identifier = theNumbaFlow.callableSequential
22
-
23
- sourceLogicalPathModuleDataclass: str_nameDOTname = theNumbaFlow.logicalPathModuleDataclass
24
- sourceDataclassIdentifier: ast_Identifier = theNumbaFlow.dataclassIdentifier
25
- sourceDataclassInstance: ast_Identifier = theNumbaFlow.dataclassInstance
26
-
27
- sourcePathPackage: PurePosixPath | None = theNumbaFlow.pathPackage
28
- sourcePackageIdentifier: ast_Identifier | None = theNumbaFlow.packageIdentifier
29
-
30
- # ========================================
31
- # Filesystem (names of physical objects)
32
- pathPackage: PurePosixPath | None = None
33
- pathModule: PurePosixPath | None = PurePosixPath(getPathRootJobDEFAULT())
34
- """ `pathModule` will override `pathPackage` and `logicalPathRoot`."""
35
- fileExtension: str = theNumbaFlow.fileExtension
36
- pathFilenameFoldsTotal: PurePosixPath = dataclasses.field(default=None, init=True) # pyright: ignore[reportAssignmentType]
37
-
38
- # ========================================
39
- # Logical identifiers (as opposed to physical identifiers)
40
- packageIdentifier: ast_Identifier | None = None
41
- logicalPathRoot: str_nameDOTname | None = None
42
- """ `logicalPathRoot` likely corresponds to a physical filesystem directory."""
43
- moduleIdentifier: ast_Identifier = dataclasses.field(default=None, init=True) # pyright: ignore[reportAssignmentType]
44
- countCallable: ast_Identifier = sourceCountCallable
45
- dataclassIdentifier: ast_Identifier | None = sourceDataclassIdentifier
46
- dataclassInstance: ast_Identifier | None = sourceDataclassInstance
47
- logicalPathModuleDataclass: str_nameDOTname | None = sourceLogicalPathModuleDataclass
48
-
49
- # ========================================
50
- # Datatypes
51
- DatatypeFoldsTotal: TypeAlias = TheDatatypeFoldsTotal
52
- DatatypeElephino: TypeAlias = TheDatatypeElephino
53
- DatatypeLeavesTotal: TypeAlias = TheDatatypeLeavesTotal
54
-
55
- def _makePathFilename(self,
56
- pathRoot: PurePosixPath | None = None,
57
- logicalPathINFIX: str_nameDOTname | None = None,
58
- filenameStem: str | None = None,
59
- fileExtension: str | None = None,
60
- ) -> PurePosixPath:
61
- if pathRoot is None:
62
- pathRoot = self.pathPackage or PurePosixPath(Path.cwd())
63
- if logicalPathINFIX:
64
- whyIsThisStillAThing: list[str] = logicalPathINFIX.split('.')
65
- pathRoot = pathRoot.joinpath(*whyIsThisStillAThing)
66
- if filenameStem is None:
67
- filenameStem = self.moduleIdentifier
68
- if fileExtension is None:
69
- fileExtension = self.fileExtension
70
- filename: str = filenameStem + fileExtension
71
- return pathRoot.joinpath(filename)
72
-
73
- @property
74
- def pathFilenameModule(self) -> PurePosixPath:
75
- if self.pathModule is None:
76
- return self._makePathFilename()
77
- else:
78
- return self._makePathFilename(pathRoot=self.pathModule, logicalPathINFIX=None)
79
-
80
- def __post_init__(self):
81
- pathFilenameFoldsTotal = PurePosixPath(getPathFilenameFoldsTotal(self.state.mapShape))
82
-
83
- if self.moduleIdentifier is None: # pyright: ignore[reportUnnecessaryComparison]
84
- self.moduleIdentifier = pathFilenameFoldsTotal.stem
85
-
86
- if self.pathFilenameFoldsTotal is None: # pyright: ignore[reportUnnecessaryComparison]
87
- self.pathFilenameFoldsTotal = pathFilenameFoldsTotal
88
-
89
- if self.shatteredDataclass is None and self.logicalPathModuleDataclass and self.dataclassIdentifier and self.dataclassInstance: # pyright: ignore[reportUnnecessaryComparison]
90
- self.shatteredDataclass = shatter_dataclassesDOTdataclass(self.logicalPathModuleDataclass, self.dataclassIdentifier, self.dataclassInstance)
91
-
92
- # ========================================
93
- # Fields you probably don't need =================================
94
- # Dispatcher =================================
95
- sourceDispatcherCallable: ast_Identifier = theNumbaFlow.callableDispatcher
96
- dispatcherCallable: ast_Identifier = sourceDispatcherCallable
97
- # Parallel counting =================================
98
- sourceDataclassInstanceTaskDistribution: ast_Identifier = theNumbaFlow.dataclassInstanceTaskDistribution
99
- sourceConcurrencyManagerNamespace: ast_Identifier = theNumbaFlow.concurrencyManagerNamespace
100
- sourceConcurrencyManagerIdentifier: ast_Identifier = theNumbaFlow.concurrencyManagerIdentifier
101
- dataclassInstanceTaskDistribution: ast_Identifier = sourceDataclassInstanceTaskDistribution
102
- concurrencyManagerNamespace: ast_Identifier = sourceConcurrencyManagerNamespace
103
- concurrencyManagerIdentifier: ast_Identifier = sourceConcurrencyManagerIdentifier
104
-
105
10
  @dataclasses.dataclass
106
11
  class RecipeJobTheorem2Numba:
107
12
  state: MapFoldingState
@@ -116,17 +21,17 @@ class RecipeJobTheorem2Numba:
116
21
 
117
22
  sourceLogicalPathModuleDataclass: str_nameDOTname = 'mapFolding.dataBaskets'
118
23
  sourceDataclassIdentifier: ast_Identifier = 'MapFoldingState'
119
- sourceDataclassInstance: ast_Identifier = theNumbaFlow.dataclassInstance
24
+ sourceDataclassInstance: ast_Identifier = packageInformation.dataclassInstance
120
25
 
121
- sourcePathPackage: PurePosixPath | None = theNumbaFlow.pathPackage
122
- sourcePackageIdentifier: ast_Identifier | None = theNumbaFlow.packageIdentifier
26
+ sourcePathPackage: PurePosixPath | None = PurePosixPath(packageInformation.pathPackage)
27
+ sourcePackageIdentifier: ast_Identifier | None = packageInformation.packageName
123
28
 
124
29
  # ========================================
125
30
  # Filesystem (names of physical objects)
126
31
  pathPackage: PurePosixPath | None = None
127
32
  pathModule: PurePosixPath | None = PurePosixPath(getPathRootJobDEFAULT())
128
33
  """ `pathModule` will override `pathPackage` and `logicalPathRoot`."""
129
- fileExtension: str = theNumbaFlow.fileExtension
34
+ fileExtension: str = packageInformation.fileExtension
130
35
  pathFilenameFoldsTotal: PurePosixPath = dataclasses.field(default=None, init=True) # pyright: ignore[reportAssignmentType]
131
36
 
132
37
  # ========================================
@@ -182,16 +87,3 @@ class RecipeJobTheorem2Numba:
182
87
 
183
88
  if self.shatteredDataclass is None and self.logicalPathModuleDataclass and self.dataclassIdentifier and self.dataclassInstance: # pyright: ignore[reportUnnecessaryComparison]
184
89
  self.shatteredDataclass = shatter_dataclassesDOTdataclass(self.logicalPathModuleDataclass, self.dataclassIdentifier, self.dataclassInstance)
185
-
186
- # ========================================
187
- # Fields you probably don't need =================================
188
- # Dispatcher =================================
189
- sourceDispatcherCallable: ast_Identifier = theNumbaFlow.callableDispatcher
190
- dispatcherCallable: ast_Identifier = sourceDispatcherCallable
191
- # Parallel counting =================================
192
- sourceDataclassInstanceTaskDistribution: ast_Identifier = theNumbaFlow.dataclassInstanceTaskDistribution
193
- sourceConcurrencyManagerNamespace: ast_Identifier = theNumbaFlow.concurrencyManagerNamespace
194
- sourceConcurrencyManagerIdentifier: ast_Identifier = theNumbaFlow.concurrencyManagerIdentifier
195
- dataclassInstanceTaskDistribution: ast_Identifier = sourceDataclassInstanceTaskDistribution
196
- concurrencyManagerNamespace: ast_Identifier = sourceConcurrencyManagerNamespace
197
- concurrencyManagerIdentifier: ast_Identifier = sourceConcurrencyManagerIdentifier