mapFolding 0.8.3__py3-none-any.whl → 0.8.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- mapFolding/__init__.py +6 -3
- mapFolding/basecamp.py +13 -7
- mapFolding/beDRY.py +241 -68
- mapFolding/oeis.py +4 -4
- mapFolding/reference/hunterNumba.py +1 -1
- mapFolding/someAssemblyRequired/__init__.py +40 -20
- mapFolding/someAssemblyRequired/_theTypes.py +53 -0
- mapFolding/someAssemblyRequired/_tool_Make.py +99 -0
- mapFolding/someAssemblyRequired/_tool_Then.py +72 -0
- mapFolding/someAssemblyRequired/_toolboxAntecedents.py +358 -0
- mapFolding/someAssemblyRequired/_toolboxContainers.py +334 -0
- mapFolding/someAssemblyRequired/_toolboxPython.py +62 -0
- mapFolding/someAssemblyRequired/getLLVMforNoReason.py +2 -2
- mapFolding/someAssemblyRequired/newInliner.py +22 -0
- mapFolding/someAssemblyRequired/synthesizeNumbaJob.py +158 -0
- mapFolding/someAssemblyRequired/toolboxNumba.py +358 -0
- mapFolding/someAssemblyRequired/transformationTools.py +289 -698
- mapFolding/syntheticModules/numbaCount_doTheNeedful.py +36 -33
- mapFolding/theDao.py +13 -11
- mapFolding/theSSOT.py +83 -128
- mapFolding/toolboxFilesystem.py +219 -0
- {mapfolding-0.8.3.dist-info → mapfolding-0.8.5.dist-info}/METADATA +4 -2
- mapfolding-0.8.5.dist-info/RECORD +48 -0
- {mapfolding-0.8.3.dist-info → mapfolding-0.8.5.dist-info}/WHEEL +1 -1
- tests/conftest.py +56 -52
- tests/test_computations.py +42 -32
- tests/test_filesystem.py +4 -4
- tests/test_other.py +2 -2
- tests/test_tasks.py +2 -2
- mapFolding/filesystem.py +0 -129
- mapFolding/someAssemblyRequired/ingredientsNumba.py +0 -206
- mapFolding/someAssemblyRequired/synthesizeNumbaFlow.py +0 -211
- mapFolding/someAssemblyRequired/synthesizeNumbaJobVESTIGIAL.py +0 -413
- mapFolding/someAssemblyRequired/transformDataStructures.py +0 -168
- mapfolding-0.8.3.dist-info/RECORD +0 -43
- {mapfolding-0.8.3.dist-info → mapfolding-0.8.5.dist-info}/entry_points.txt +0 -0
- {mapfolding-0.8.3.dist-info → mapfolding-0.8.5.dist-info}/licenses/LICENSE +0 -0
- {mapfolding-0.8.3.dist-info → mapfolding-0.8.5.dist-info}/top_level.txt +0 -0
|
@@ -2,6 +2,7 @@ from concurrent.futures import Future as ConcurrentFuture, ProcessPoolExecutor
|
|
|
2
2
|
from copy import deepcopy
|
|
3
3
|
from mapFolding.theSSOT import Array1DElephino, Array1DFoldsTotal, Array1DLeavesTotal, Array3D, ComputationState, DatatypeElephino, DatatypeFoldsTotal, DatatypeLeavesTotal
|
|
4
4
|
from numba import jit
|
|
5
|
+
from numpy import array, int16, int64
|
|
5
6
|
|
|
6
7
|
def countInitialize(state: ComputationState) -> ComputationState:
|
|
7
8
|
while state.leaf1ndex > 0:
|
|
@@ -22,11 +23,11 @@ def countInitialize(state: ComputationState) -> ComputationState:
|
|
|
22
23
|
state.leafConnectee = state.connectionGraph[state.indexDimension, state.leaf1ndex, state.leafBelow[state.leafConnectee]]
|
|
23
24
|
state.indexDimension += 1
|
|
24
25
|
if not state.dimensionsUnconstrained:
|
|
25
|
-
indexLeaf = 0
|
|
26
|
-
while indexLeaf < state.leaf1ndex:
|
|
27
|
-
state.gapsWhere[state.gap1ndexCeiling] = indexLeaf
|
|
26
|
+
state.indexLeaf = 0
|
|
27
|
+
while state.indexLeaf < state.leaf1ndex:
|
|
28
|
+
state.gapsWhere[state.gap1ndexCeiling] = state.indexLeaf
|
|
28
29
|
state.gap1ndexCeiling += 1
|
|
29
|
-
indexLeaf += 1
|
|
30
|
+
state.indexLeaf += 1
|
|
30
31
|
state.indexMiniGap = state.gap1ndex
|
|
31
32
|
while state.indexMiniGap < state.gap1ndexCeiling:
|
|
32
33
|
state.gapsWhere[state.gap1ndex] = state.gapsWhere[state.indexMiniGap]
|
|
@@ -139,36 +140,38 @@ def doTheNeedful(state: ComputationState) -> ComputationState:
|
|
|
139
140
|
state = countInitialize(state)
|
|
140
141
|
if state.taskDivisions > 0:
|
|
141
142
|
dictionaryConcurrency: dict[int, ConcurrentFuture[ComputationState]] = {}
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
143
|
+
stateParallel = deepcopy(state)
|
|
144
|
+
with ProcessPoolExecutor(stateParallel.concurrencyLimit) as concurrencyManager:
|
|
145
|
+
for indexSherpa in range(stateParallel.taskDivisions):
|
|
146
|
+
state = deepcopy(stateParallel)
|
|
147
|
+
state.taskIndex = indexSherpa
|
|
148
|
+
mapShape: tuple[DatatypeLeavesTotal, ...] = state.mapShape
|
|
149
|
+
leavesTotal: DatatypeLeavesTotal = state.leavesTotal
|
|
150
|
+
taskDivisions: DatatypeLeavesTotal = state.taskDivisions
|
|
151
|
+
concurrencyLimit: DatatypeElephino = state.concurrencyLimit
|
|
152
|
+
connectionGraph: Array3D = state.connectionGraph
|
|
153
|
+
dimensionsTotal: DatatypeLeavesTotal = state.dimensionsTotal
|
|
154
|
+
countDimensionsGapped: Array1DLeavesTotal = state.countDimensionsGapped
|
|
155
|
+
dimensionsUnconstrained: DatatypeLeavesTotal = state.dimensionsUnconstrained
|
|
156
|
+
gapRangeStart: Array1DElephino = state.gapRangeStart
|
|
157
|
+
gapsWhere: Array1DLeavesTotal = state.gapsWhere
|
|
158
|
+
leafAbove: Array1DLeavesTotal = state.leafAbove
|
|
159
|
+
leafBelow: Array1DLeavesTotal = state.leafBelow
|
|
160
|
+
foldGroups: Array1DFoldsTotal = state.foldGroups
|
|
161
|
+
foldsTotal: DatatypeFoldsTotal = state.foldsTotal
|
|
162
|
+
gap1ndex: DatatypeLeavesTotal = state.gap1ndex
|
|
163
|
+
gap1ndexCeiling: DatatypeElephino = state.gap1ndexCeiling
|
|
164
|
+
groupsOfFolds: DatatypeFoldsTotal = state.groupsOfFolds
|
|
165
|
+
indexDimension: DatatypeLeavesTotal = state.indexDimension
|
|
166
|
+
indexLeaf: DatatypeLeavesTotal = state.indexLeaf
|
|
167
|
+
indexMiniGap: DatatypeElephino = state.indexMiniGap
|
|
168
|
+
leaf1ndex: DatatypeElephino = state.leaf1ndex
|
|
169
|
+
leafConnectee: DatatypeElephino = state.leafConnectee
|
|
170
|
+
taskIndex: DatatypeLeavesTotal = state.taskIndex
|
|
169
171
|
dictionaryConcurrency[indexSherpa] = concurrencyManager.submit(countParallel, mapShape, leavesTotal, taskDivisions, concurrencyLimit, connectionGraph, dimensionsTotal, countDimensionsGapped, dimensionsUnconstrained, gapRangeStart, gapsWhere, leafAbove, leafBelow, foldGroups, foldsTotal, gap1ndex, gap1ndexCeiling, groupsOfFolds, indexDimension, indexLeaf, indexMiniGap, leaf1ndex, leafConnectee, taskIndex)
|
|
170
|
-
for indexSherpa in range(
|
|
171
|
-
|
|
172
|
+
for indexSherpa in range(stateParallel.taskDivisions):
|
|
173
|
+
stateParallel.foldGroups[indexSherpa] = dictionaryConcurrency[indexSherpa].result()
|
|
174
|
+
state = stateParallel
|
|
172
175
|
else:
|
|
173
176
|
mapShape: tuple[DatatypeLeavesTotal, ...] = state.mapShape
|
|
174
177
|
leavesTotal: DatatypeLeavesTotal = state.leavesTotal
|
mapFolding/theDao.py
CHANGED
|
@@ -101,11 +101,11 @@ def initializeVariablesToFindGaps(state: ComputationState) -> ComputationState:
|
|
|
101
101
|
return state
|
|
102
102
|
|
|
103
103
|
def insertUnconstrainedLeaf(state: ComputationState) -> ComputationState:
|
|
104
|
-
indexLeaf = 0
|
|
105
|
-
while indexLeaf < state.leaf1ndex:
|
|
106
|
-
state.gapsWhere[state.gap1ndexCeiling] = indexLeaf
|
|
104
|
+
state.indexLeaf = 0
|
|
105
|
+
while state.indexLeaf < state.leaf1ndex:
|
|
106
|
+
state.gapsWhere[state.gap1ndexCeiling] = state.indexLeaf
|
|
107
107
|
state.gap1ndexCeiling += 1
|
|
108
|
-
indexLeaf += 1
|
|
108
|
+
state.indexLeaf += 1
|
|
109
109
|
return state
|
|
110
110
|
|
|
111
111
|
def leafBelowSentinelIs1(state: ComputationState) -> bool:
|
|
@@ -227,13 +227,15 @@ def doTheNeedful(state: ComputationState) -> ComputationState:
|
|
|
227
227
|
state = countInitialize(state)
|
|
228
228
|
if state.taskDivisions > 0:
|
|
229
229
|
dictionaryConcurrency: dict[int, ConcurrentFuture[ComputationState]] = {}
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
230
|
+
stateParallel = deepcopy(state)
|
|
231
|
+
with ProcessPoolExecutor(stateParallel.concurrencyLimit) as concurrencyManager:
|
|
232
|
+
for indexSherpa in range(stateParallel.taskDivisions):
|
|
233
|
+
state = deepcopy(stateParallel)
|
|
234
|
+
state.taskIndex = indexSherpa
|
|
235
|
+
dictionaryConcurrency[indexSherpa] = concurrencyManager.submit(countParallel, state)
|
|
236
|
+
for indexSherpa in range(stateParallel.taskDivisions):
|
|
237
|
+
stateParallel.foldGroups[indexSherpa] = dictionaryConcurrency[indexSherpa].result().foldGroups[indexSherpa]
|
|
238
|
+
state = stateParallel
|
|
237
239
|
else:
|
|
238
240
|
state = countSequential(state)
|
|
239
241
|
|
mapFolding/theSSOT.py
CHANGED
|
@@ -19,21 +19,13 @@ to avoid namespace collisions when transforming algorithms.
|
|
|
19
19
|
from collections.abc import Callable
|
|
20
20
|
from importlib import import_module as importlib_import_module
|
|
21
21
|
from inspect import getfile as inspect_getfile
|
|
22
|
-
from numpy import dtype, int64 as numpy_int64, int16 as numpy_int16, ndarray
|
|
22
|
+
from numpy import dtype, int64 as numpy_int64, int16 as numpy_int16, integer, ndarray
|
|
23
23
|
from pathlib import Path
|
|
24
|
-
from sys import modules as sysModules
|
|
25
24
|
from tomli import load as tomli_load
|
|
26
25
|
from types import ModuleType
|
|
27
|
-
from typing import TypeAlias
|
|
26
|
+
from typing import Any, TypeAlias, TypeVar
|
|
28
27
|
import dataclasses
|
|
29
28
|
|
|
30
|
-
# Figure out dynamic flow control to synthesized modules https://github.com/hunterhogan/mapFolding/issues/4
|
|
31
|
-
# I _think_, in theSSOT, I have abstracted the flow settings to only these couple of lines:
|
|
32
|
-
packageFlowSynthetic = 'numba'
|
|
33
|
-
# Z0Z_packageFlow = 'algorithm'
|
|
34
|
-
Z0Z_packageFlow = packageFlowSynthetic
|
|
35
|
-
Z0Z_concurrencyPackage = 'multiprocessing'
|
|
36
|
-
|
|
37
29
|
# =============================================================================
|
|
38
30
|
# The Wrong Way: Evaluate When Packaging
|
|
39
31
|
|
|
@@ -50,84 +42,111 @@ def getPathPackageINSTALLING() -> Path:
|
|
|
50
42
|
pathPackage = pathPackage.parent
|
|
51
43
|
return pathPackage
|
|
52
44
|
|
|
45
|
+
# =============================================================================
|
|
46
|
+
# The Wrong Way: HARDCODED
|
|
47
|
+
# Figure out dynamic flow control to synthesized modules https://github.com/hunterhogan/mapFolding/issues/4
|
|
48
|
+
|
|
49
|
+
# from mapFolding.someAssemblyRequired.synthesizeNumbaFlow.theNumbaFlow
|
|
50
|
+
logicalPathModuleDispatcherHARDCODED: str = 'mapFolding.syntheticModules.numbaCount_doTheNeedful'
|
|
51
|
+
callableDispatcherHARDCODED: str = 'doTheNeedful'
|
|
52
|
+
concurrencyPackageHARDCODED = 'multiprocessing'
|
|
53
|
+
|
|
54
|
+
# =============================================================================
|
|
53
55
|
# The following is an improvement, but it is not the full solution.
|
|
54
56
|
# I hope that the standardized markers, `metadata={'evaluateWhen': 'packaging'}` will help to automate
|
|
55
57
|
# whatever needs to happen so that the following is well implemented.
|
|
56
|
-
@dataclasses.dataclass(frozen=True)
|
|
58
|
+
# @dataclasses.dataclass(frozen=True)
|
|
59
|
+
@dataclasses.dataclass
|
|
57
60
|
class PackageSettings:
|
|
58
|
-
|
|
61
|
+
|
|
62
|
+
logicalPathModuleDispatcher: str | None = None
|
|
63
|
+
callableDispatcher: str | None = None
|
|
64
|
+
concurrencyPackage: str |None = None
|
|
59
65
|
dataclassIdentifier: str = dataclasses.field(default='ComputationState', metadata={'evaluateWhen': 'packaging'})
|
|
60
66
|
dataclassInstance: str = dataclasses.field(default='state', metadata={'evaluateWhen': 'packaging'})
|
|
61
67
|
dataclassInstanceTaskDistributionSuffix: str = dataclasses.field(default='Parallel', metadata={'evaluateWhen': 'packaging'})
|
|
62
68
|
dataclassModule: str = dataclasses.field(default='theSSOT', metadata={'evaluateWhen': 'packaging'})
|
|
63
69
|
datatypePackage: str = dataclasses.field(default='numpy', metadata={'evaluateWhen': 'packaging'})
|
|
64
|
-
dispatcherCallable: str = dataclasses.field(default='doTheNeedful', metadata={'evaluateWhen': 'packaging'})
|
|
65
70
|
fileExtension: str = dataclasses.field(default='.py', metadata={'evaluateWhen': 'installing'})
|
|
66
|
-
moduleOfSyntheticModules: str = dataclasses.field(default='syntheticModules', metadata={'evaluateWhen': 'packaging'})
|
|
67
71
|
packageName: str = dataclasses.field(default = packageNamePACKAGING, metadata={'evaluateWhen': 'packaging'})
|
|
68
72
|
pathPackage: Path = dataclasses.field(default_factory=getPathPackageINSTALLING, init=False, metadata={'evaluateWhen': 'installing'})
|
|
69
73
|
sourceAlgorithm: str = dataclasses.field(default='theDao', metadata={'evaluateWhen': 'packaging'})
|
|
74
|
+
sourceCallableDispatcher: str = dataclasses.field(default='doTheNeedful', metadata={'evaluateWhen': 'packaging'})
|
|
75
|
+
sourceCallableInitialize: str = dataclasses.field(default='countInitialize', metadata={'evaluateWhen': 'packaging'})
|
|
76
|
+
sourceCallableParallel: str = dataclasses.field(default='countParallel', metadata={'evaluateWhen': 'packaging'})
|
|
77
|
+
sourceCallableSequential: str = dataclasses.field(default='countSequential', metadata={'evaluateWhen': 'packaging'})
|
|
70
78
|
sourceConcurrencyManagerIdentifier: str = dataclasses.field(default='submit', metadata={'evaluateWhen': 'packaging'})
|
|
71
79
|
sourceConcurrencyManagerNamespace: str = dataclasses.field(default='concurrencyManager', metadata={'evaluateWhen': 'packaging'})
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
def
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
The = PackageSettings()
|
|
80
|
+
sourceConcurrencyPackage: str = dataclasses.field(default='multiprocessing', metadata={'evaluateWhen': 'packaging'})
|
|
81
|
+
|
|
82
|
+
dataclassInstanceTaskDistribution: str = dataclasses.field(init=False, metadata={'evaluateWhen': 'packaging'})
|
|
83
|
+
""" During parallel computation, this identifier helps to create deep copies of the dataclass instance. """
|
|
84
|
+
logicalPathModuleDataclass: str = dataclasses.field(init=False)
|
|
85
|
+
""" The package.module.name logical path to the dataclass. """
|
|
86
|
+
logicalPathModuleSourceAlgorithm: str = dataclasses.field(init=False)
|
|
87
|
+
""" The package.module.name logical path to the source algorithm. """
|
|
88
|
+
|
|
89
|
+
@property # This is not a field, and that annoys me.
|
|
90
|
+
def dispatcher(self) -> Callable[['ComputationState'], 'ComputationState']:
|
|
91
|
+
""" _The_ callable that connects `countFolds` to the logic that does the work."""
|
|
92
|
+
logicalPath: str = self.logicalPathModuleDispatcher or self.logicalPathModuleSourceAlgorithm
|
|
93
|
+
identifier: str = self.callableDispatcher or self.sourceCallableDispatcher
|
|
94
|
+
moduleImported: ModuleType = importlib_import_module(logicalPath)
|
|
95
|
+
return getattr(moduleImported, identifier)
|
|
96
|
+
|
|
97
|
+
def __post_init__(self) -> None:
|
|
98
|
+
self.dataclassInstanceTaskDistribution = self.dataclassInstance + self.dataclassInstanceTaskDistributionSuffix
|
|
99
|
+
|
|
100
|
+
self.logicalPathModuleDataclass = '.'.join([self.packageName, self.dataclassModule])
|
|
101
|
+
self.logicalPathModuleSourceAlgorithm = '.'.join([self.packageName, self.sourceAlgorithm])
|
|
95
102
|
|
|
103
|
+
The = PackageSettings(logicalPathModuleDispatcher=logicalPathModuleDispatcherHARDCODED, callableDispatcher=callableDispatcherHARDCODED, concurrencyPackage=concurrencyPackageHARDCODED)
|
|
104
|
+
|
|
105
|
+
# To remove this function, I need to learn how to change "conftest.py" to patch this.
|
|
106
|
+
def getPackageDispatcher() -> Callable[['ComputationState'], 'ComputationState']:
|
|
107
|
+
"""Get the dispatcher callable for the package.
|
|
108
|
+
|
|
109
|
+
This function retrieves the dispatcher callable for the package based on the
|
|
110
|
+
logical path module and callable dispatcher defined in the PackageSettings.
|
|
111
|
+
"""
|
|
112
|
+
return The.dispatcher
|
|
96
113
|
# =============================================================================
|
|
97
114
|
# Flexible Data Structure System Needs Enhanced Paradigm https://github.com/hunterhogan/mapFolding/issues/9
|
|
115
|
+
# Efficient translation of Python scalar types to Numba types https://github.com/hunterhogan/mapFolding/issues/8
|
|
116
|
+
|
|
117
|
+
numpyIntegerType = TypeVar('numpyIntegerType', bound=integer[Any], covariant=True)
|
|
98
118
|
|
|
99
119
|
DatatypeLeavesTotal: TypeAlias = int
|
|
100
|
-
# this would be uint8, but mapShape (2,2,2,2, 2,2,2,2) has 256 leaves, so generic containers must accommodate at least 256 leaves
|
|
101
|
-
numpyLeavesTotal: TypeAlias = numpy_int16
|
|
120
|
+
NumPyLeavesTotal: TypeAlias = numpy_int16 # this would be uint8, but mapShape (2,2,2,2, 2,2,2,2) has 256 leaves, so generic containers must accommodate at least 256 leaves
|
|
102
121
|
|
|
103
122
|
DatatypeElephino: TypeAlias = int
|
|
104
|
-
|
|
123
|
+
NumPyElephino: TypeAlias = numpy_int16
|
|
105
124
|
|
|
106
125
|
DatatypeFoldsTotal: TypeAlias = int
|
|
107
|
-
|
|
126
|
+
NumPyFoldsTotal: TypeAlias = numpy_int64
|
|
108
127
|
|
|
109
|
-
Array3D: TypeAlias = ndarray[tuple[int, int, int], dtype[
|
|
110
|
-
Array1DLeavesTotal: TypeAlias = ndarray[tuple[int], dtype[
|
|
111
|
-
Array1DElephino: TypeAlias = ndarray[tuple[int], dtype[
|
|
112
|
-
Array1DFoldsTotal: TypeAlias = ndarray[tuple[int], dtype[
|
|
128
|
+
Array3D: TypeAlias = ndarray[tuple[int, int, int], dtype[NumPyLeavesTotal]]
|
|
129
|
+
Array1DLeavesTotal: TypeAlias = ndarray[tuple[int], dtype[NumPyLeavesTotal]]
|
|
130
|
+
Array1DElephino: TypeAlias = ndarray[tuple[int], dtype[NumPyElephino]]
|
|
131
|
+
Array1DFoldsTotal: TypeAlias = ndarray[tuple[int], dtype[NumPyFoldsTotal]]
|
|
113
132
|
|
|
114
133
|
@dataclasses.dataclass
|
|
115
134
|
class ComputationState:
|
|
116
|
-
mapShape: tuple[DatatypeLeavesTotal, ...]
|
|
135
|
+
mapShape: tuple[DatatypeLeavesTotal, ...] = dataclasses.field(init=True, metadata={'elementConstructor': 'DatatypeLeavesTotal'}) # NOTE Python is anti-DRY, again, `DatatypeLeavesTotal` needs to match the type
|
|
117
136
|
leavesTotal: DatatypeLeavesTotal
|
|
118
137
|
taskDivisions: DatatypeLeavesTotal
|
|
119
138
|
concurrencyLimit: DatatypeElephino
|
|
120
139
|
|
|
121
|
-
connectionGraph: Array3D = dataclasses.field(init=False)
|
|
140
|
+
connectionGraph: Array3D = dataclasses.field(init=False, metadata={'dtype': Array3D.__args__[1].__args__[0]}) # pyright: ignore[reportUnknownMemberType, reportAttributeAccessIssue]
|
|
122
141
|
dimensionsTotal: DatatypeLeavesTotal = dataclasses.field(init=False)
|
|
123
142
|
|
|
124
|
-
countDimensionsGapped: Array1DLeavesTotal = dataclasses.field(default=None, init=True) # type: ignore[arg-type, reportAssignmentType]
|
|
143
|
+
countDimensionsGapped: Array1DLeavesTotal = dataclasses.field(default=None, init=True, metadata={'dtype': Array1DLeavesTotal.__args__[1].__args__[0]}) # type: ignore[arg-type, reportAssignmentType]
|
|
125
144
|
dimensionsUnconstrained: DatatypeLeavesTotal = dataclasses.field(default=None, init=True) # type: ignore[assignment, reportAssignmentType]
|
|
126
|
-
gapRangeStart: Array1DElephino = dataclasses.field(default=None, init=True) # type: ignore[arg-type, reportAssignmentType]
|
|
127
|
-
gapsWhere: Array1DLeavesTotal = dataclasses.field(default=None, init=True) # type: ignore[arg-type, reportAssignmentType]
|
|
128
|
-
leafAbove: Array1DLeavesTotal = dataclasses.field(default=None, init=True) # type: ignore[arg-type, reportAssignmentType]
|
|
129
|
-
leafBelow: Array1DLeavesTotal = dataclasses.field(default=None, init=True) # type: ignore[arg-type, reportAssignmentType]
|
|
130
|
-
foldGroups: Array1DFoldsTotal = dataclasses.field(default=None, init=True) # type: ignore[arg-type, reportAssignmentType]
|
|
145
|
+
gapRangeStart: Array1DElephino = dataclasses.field(default=None, init=True, metadata={'dtype': Array1DElephino.__args__[1].__args__[0]}) # type: ignore[arg-type, reportAssignmentType]
|
|
146
|
+
gapsWhere: Array1DLeavesTotal = dataclasses.field(default=None, init=True, metadata={'dtype': Array1DLeavesTotal.__args__[1].__args__[0]}) # type: ignore[arg-type, reportAssignmentType]
|
|
147
|
+
leafAbove: Array1DLeavesTotal = dataclasses.field(default=None, init=True, metadata={'dtype': Array1DLeavesTotal.__args__[1].__args__[0]}) # type: ignore[arg-type, reportAssignmentType]
|
|
148
|
+
leafBelow: Array1DLeavesTotal = dataclasses.field(default=None, init=True, metadata={'dtype': Array1DLeavesTotal.__args__[1].__args__[0]}) # type: ignore[arg-type, reportAssignmentType]
|
|
149
|
+
foldGroups: Array1DFoldsTotal = dataclasses.field(default=None, init=True, metadata={'dtype': Array1DFoldsTotal.__args__[1].__args__[0]}) # type: ignore[arg-type, reportAssignmentType]
|
|
131
150
|
|
|
132
151
|
foldsTotal: DatatypeFoldsTotal = DatatypeFoldsTotal(0)
|
|
133
152
|
gap1ndex: DatatypeLeavesTotal = DatatypeLeavesTotal(0)
|
|
@@ -139,95 +158,31 @@ class ComputationState:
|
|
|
139
158
|
leaf1ndex: DatatypeElephino = DatatypeElephino(1)
|
|
140
159
|
leafConnectee: DatatypeElephino = DatatypeElephino(0)
|
|
141
160
|
taskIndex: DatatypeLeavesTotal = DatatypeLeavesTotal(0)
|
|
142
|
-
# Efficient translation of Python scalar types to Numba types https://github.com/hunterhogan/mapFolding/issues/8
|
|
143
161
|
|
|
144
162
|
def __post_init__(self) -> None:
|
|
145
|
-
from mapFolding.beDRY import
|
|
163
|
+
from mapFolding.beDRY import getConnectionGraph, makeDataContainer
|
|
146
164
|
self.dimensionsTotal = DatatypeLeavesTotal(len(self.mapShape))
|
|
147
|
-
|
|
165
|
+
leavesTotalAsInt = int(self.leavesTotal)
|
|
166
|
+
self.connectionGraph = getConnectionGraph(self.mapShape, leavesTotalAsInt, self.__dataclass_fields__['connectionGraph'].metadata['dtype'])
|
|
148
167
|
|
|
149
|
-
if self.dimensionsUnconstrained is None: #
|
|
168
|
+
if self.dimensionsUnconstrained is None: # type: ignore
|
|
150
169
|
self.dimensionsUnconstrained = DatatypeLeavesTotal(int(self.dimensionsTotal))
|
|
151
170
|
|
|
152
|
-
if self.foldGroups is None:
|
|
153
|
-
self.foldGroups = makeDataContainer(max(2, int(self.taskDivisions) + 1),
|
|
171
|
+
if self.foldGroups is None: # type: ignore
|
|
172
|
+
self.foldGroups = makeDataContainer(max(2, int(self.taskDivisions) + 1), self.__dataclass_fields__['foldGroups'].metadata['dtype'])
|
|
154
173
|
self.foldGroups[-1] = self.leavesTotal
|
|
155
174
|
|
|
156
|
-
|
|
175
|
+
if self.gapsWhere is None: self.gapsWhere = makeDataContainer(leavesTotalAsInt * leavesTotalAsInt + 1, self.__dataclass_fields__['gapsWhere'].metadata['dtype']) # type: ignore
|
|
157
176
|
|
|
158
|
-
if self.countDimensionsGapped is None:
|
|
159
|
-
|
|
160
|
-
if self.
|
|
161
|
-
|
|
162
|
-
if self.gapsWhere is None:
|
|
163
|
-
self.gapsWhere = makeDataContainer(leavesTotalAsInt * leavesTotalAsInt + 1, numpyLeavesTotal)
|
|
164
|
-
if self.leafAbove is None:
|
|
165
|
-
self.leafAbove = makeDataContainer(leavesTotalAsInt + 1, numpyLeavesTotal)
|
|
166
|
-
if self.leafBelow is None:
|
|
167
|
-
self.leafBelow = makeDataContainer(leavesTotalAsInt + 1, numpyLeavesTotal)
|
|
177
|
+
if self.countDimensionsGapped is None: self.countDimensionsGapped = makeDataContainer(leavesTotalAsInt + 1, self.__dataclass_fields__['countDimensionsGapped'].metadata['dtype']) # type: ignore
|
|
178
|
+
if self.gapRangeStart is None: self.gapRangeStart = makeDataContainer(leavesTotalAsInt + 1, self.__dataclass_fields__['gapRangeStart'].metadata['dtype']) # type: ignore
|
|
179
|
+
if self.leafAbove is None: self.leafAbove = makeDataContainer(leavesTotalAsInt + 1, self.__dataclass_fields__['leafAbove'].metadata['dtype']) # type: ignore
|
|
180
|
+
if self.leafBelow is None: self.leafBelow = makeDataContainer(leavesTotalAsInt + 1, self.__dataclass_fields__['leafBelow'].metadata['dtype']) # type: ignore
|
|
168
181
|
|
|
169
182
|
def getFoldsTotal(self) -> None:
|
|
170
183
|
self.foldsTotal = DatatypeFoldsTotal(self.foldGroups[0:-1].sum() * self.leavesTotal)
|
|
171
184
|
|
|
172
|
-
# =============================================================================
|
|
173
|
-
|
|
174
|
-
# TODO learn how to see this from the user's perspective
|
|
175
|
-
def getPathJobRootDEFAULT() -> Path:
|
|
176
|
-
if 'google.colab' in sysModules:
|
|
177
|
-
pathJobDEFAULT: Path = Path("/content/drive/MyDrive") / "jobs"
|
|
178
|
-
else:
|
|
179
|
-
pathJobDEFAULT = The.pathPackage / "jobs"
|
|
180
|
-
return pathJobDEFAULT
|
|
181
|
-
|
|
182
185
|
# =============================================================================
|
|
183
186
|
# The coping way.
|
|
184
187
|
|
|
185
188
|
class raiseIfNoneGitHubIssueNumber3(Exception): pass
|
|
186
|
-
|
|
187
|
-
# =============================================================================
|
|
188
|
-
# THIS IS A STUPID SYSTEM BUT I CAN'T FIGURE OUT AN IMPROVEMENT
|
|
189
|
-
# NOTE This section for _default_ values probably has value
|
|
190
|
-
# https://github.com/hunterhogan/mapFolding/issues/4
|
|
191
|
-
theFormatStrModuleSynthetic = "{packageFlow}Count"
|
|
192
|
-
theFormatStrModuleForCallableSynthetic = theFormatStrModuleSynthetic + "_{callableTarget}"
|
|
193
|
-
|
|
194
|
-
theLogicalPathModuleDispatcher: str = The.logicalPathModuleSourceAlgorithm
|
|
195
|
-
|
|
196
|
-
theModuleDispatcherSynthetic: str = theFormatStrModuleForCallableSynthetic.format(packageFlow=packageFlowSynthetic, callableTarget=The.dispatcherCallable)
|
|
197
|
-
theLogicalPathModuleDispatcherSynthetic: str = '.'.join([The.packageName, The.moduleOfSyntheticModules, theModuleDispatcherSynthetic])
|
|
198
|
-
|
|
199
|
-
if Z0Z_packageFlow == packageFlowSynthetic: # pyright: ignore [reportUnnecessaryComparison]
|
|
200
|
-
# NOTE this as a default value _might_ have value
|
|
201
|
-
theLogicalPathModuleDispatcher = theLogicalPathModuleDispatcherSynthetic
|
|
202
|
-
|
|
203
|
-
# dynamically set the return type https://github.com/hunterhogan/mapFolding/issues/5
|
|
204
|
-
def getPackageDispatcher() -> Callable[[ComputationState], ComputationState]:
|
|
205
|
-
# NOTE but this part, if the package flow is synthetic, probably needs to be delegated
|
|
206
|
-
# to the authority for creating _that_ synthetic flow.
|
|
207
|
-
|
|
208
|
-
moduleImported: ModuleType = importlib_import_module(theLogicalPathModuleDispatcher)
|
|
209
|
-
dispatcherCallable = getattr(moduleImported, The.dispatcherCallable)
|
|
210
|
-
return dispatcherCallable
|
|
211
|
-
|
|
212
|
-
"""Technical concepts I am likely using and likely want to use more effectively:
|
|
213
|
-
- Configuration Registry
|
|
214
|
-
- Write-Once, Read-Many (WORM) / Immutable Initialization
|
|
215
|
-
- Lazy Initialization
|
|
216
|
-
- Separate configuration from business logic
|
|
217
|
-
|
|
218
|
-
----
|
|
219
|
-
theSSOT and yourSSOT
|
|
220
|
-
|
|
221
|
-
----
|
|
222
|
-
delay realization/instantiation until a concrete value is desired
|
|
223
|
-
moment of truth: when the value is needed, not when the value is defined
|
|
224
|
-
|
|
225
|
-
----
|
|
226
|
-
2025 March 11
|
|
227
|
-
Note to self: fundamental concept in Python:
|
|
228
|
-
Identifiers: scope and resolution, LEGB (Local, Enclosing, Global, Builtin)
|
|
229
|
-
- Local: Inside the function
|
|
230
|
-
- Enclosing: Inside enclosing functions
|
|
231
|
-
- Global: At the uppermost level
|
|
232
|
-
- Builtin: Python's built-in names
|
|
233
|
-
"""
|
|
@@ -0,0 +1,219 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Filesystem utilities for managing map folding computation results.
|
|
3
|
+
|
|
4
|
+
This module provides functions for standardized handling of files related to the mapFolding
|
|
5
|
+
package, with a focus on saving, retrieving, and naming computation results. It implements
|
|
6
|
+
consistent naming conventions and path resolution strategies to ensure that:
|
|
7
|
+
|
|
8
|
+
1. Computation results are stored in a predictable location.
|
|
9
|
+
2. Filenames follow a consistent pattern based on map dimensions.
|
|
10
|
+
3. Results can be reliably retrieved for future reference.
|
|
11
|
+
4. The system handles file operations safely with appropriate error handling.
|
|
12
|
+
|
|
13
|
+
The module serves as the standardized interface between the computational components
|
|
14
|
+
of the package and the filesystem, abstracting away the details of file operations
|
|
15
|
+
and path management. It provides robust fallback mechanisms to preserve computation
|
|
16
|
+
results even in the face of filesystem errors, which is critical for long-running
|
|
17
|
+
computations that may take days to complete.
|
|
18
|
+
|
|
19
|
+
The functions here adhere to a consistent approach to path handling:
|
|
20
|
+
- Cross-platform compatibility through the use of `pathlib`.
|
|
21
|
+
- Default locations determined intelligently based on the runtime environment.
|
|
22
|
+
- Progressive fallback strategies for saving critical computation results.
|
|
23
|
+
- Preemptive filesystem validation to detect issues before computation begins.
|
|
24
|
+
"""
|
|
25
|
+
from mapFolding.theSSOT import The
|
|
26
|
+
from os import PathLike
|
|
27
|
+
from pathlib import Path, PurePath
|
|
28
|
+
from sys import modules as sysModules
|
|
29
|
+
import os
|
|
30
|
+
import platformdirs
|
|
31
|
+
|
|
32
|
+
def getFilenameFoldsTotal(mapShape: tuple[int, ...]) -> str:
|
|
33
|
+
"""
|
|
34
|
+
Create a standardized filename for a computed `foldsTotal` value.
|
|
35
|
+
|
|
36
|
+
This function generates a consistent, filesystem-safe filename based on map dimensions.
|
|
37
|
+
Standardizing filenames ensures that results can be reliably stored and retrieved,
|
|
38
|
+
avoiding potential filesystem incompatibilities or Python naming restrictions.
|
|
39
|
+
|
|
40
|
+
Parameters:
|
|
41
|
+
mapShape: A sequence of integers representing the dimensions of the map.
|
|
42
|
+
|
|
43
|
+
Returns:
|
|
44
|
+
filenameFoldsTotal: A filename string in format 'pMxN.foldsTotal' where M,N are sorted dimensions.
|
|
45
|
+
|
|
46
|
+
Notes:
|
|
47
|
+
The filename format ensures:
|
|
48
|
+
- No spaces in the filename
|
|
49
|
+
- Safe filesystem characters
|
|
50
|
+
- Unique extension (.foldsTotal)
|
|
51
|
+
- Python-safe strings (no starting with numbers, no reserved words)
|
|
52
|
+
- The 'p' prefix comes from Lunnan's original code.
|
|
53
|
+
"""
|
|
54
|
+
return 'p' + 'x'.join(str(dimension) for dimension in sorted(mapShape)) + '.foldsTotal'
|
|
55
|
+
|
|
56
|
+
def getPathFilenameFoldsTotal(mapShape: tuple[int, ...], pathLikeWriteFoldsTotal: PathLike[str] | PurePath | None = None) -> Path:
|
|
57
|
+
"""
|
|
58
|
+
Get a standardized path and filename for the computed `foldsTotal` value.
|
|
59
|
+
|
|
60
|
+
This function resolves paths for storing computation results, handling different
|
|
61
|
+
input types including directories, absolute paths, or relative paths. It ensures
|
|
62
|
+
that all parent directories exist in the resulting path.
|
|
63
|
+
|
|
64
|
+
Parameters:
|
|
65
|
+
mapShape: List of dimensions for the map folding problem.
|
|
66
|
+
pathLikeWriteFoldsTotal (getPathJobRootDEFAULT): Path, filename, or relative path and filename.
|
|
67
|
+
If None, uses default path. If a directory, appends standardized filename.
|
|
68
|
+
|
|
69
|
+
Returns:
|
|
70
|
+
pathFilenameFoldsTotal: Absolute path and filename for storing the `foldsTotal` value.
|
|
71
|
+
|
|
72
|
+
Notes:
|
|
73
|
+
The function creates any necessary directories in the path if they don't exist.
|
|
74
|
+
"""
|
|
75
|
+
|
|
76
|
+
if pathLikeWriteFoldsTotal is None:
|
|
77
|
+
pathFilenameFoldsTotal = getPathRootJobDEFAULT() / getFilenameFoldsTotal(mapShape)
|
|
78
|
+
else:
|
|
79
|
+
pathLikeSherpa = Path(pathLikeWriteFoldsTotal)
|
|
80
|
+
if pathLikeSherpa.is_dir():
|
|
81
|
+
pathFilenameFoldsTotal = pathLikeSherpa / getFilenameFoldsTotal(mapShape)
|
|
82
|
+
elif pathLikeSherpa.is_file() and pathLikeSherpa.is_absolute():
|
|
83
|
+
pathFilenameFoldsTotal = pathLikeSherpa
|
|
84
|
+
else:
|
|
85
|
+
pathFilenameFoldsTotal = getPathRootJobDEFAULT() / pathLikeSherpa
|
|
86
|
+
|
|
87
|
+
pathFilenameFoldsTotal.parent.mkdir(parents=True, exist_ok=True)
|
|
88
|
+
return pathFilenameFoldsTotal
|
|
89
|
+
|
|
90
|
+
def getPathRootJobDEFAULT() -> Path:
|
|
91
|
+
"""
|
|
92
|
+
Get the default root directory for map folding computation jobs.
|
|
93
|
+
|
|
94
|
+
This function determines the appropriate default directory for storing computation
|
|
95
|
+
results based on the current runtime environment. It uses platform-specific
|
|
96
|
+
directories for normal environments and adapts to special environments like
|
|
97
|
+
Google Colab.
|
|
98
|
+
|
|
99
|
+
Returns:
|
|
100
|
+
pathJobDEFAULT: Path to the default directory for storing computation results
|
|
101
|
+
|
|
102
|
+
Notes:
|
|
103
|
+
- For standard environments, uses `platformdirs` to find appropriate user data directory.
|
|
104
|
+
- For Google Colab, uses a specific path in Google Drive.
|
|
105
|
+
- Creates the directory if it doesn't exist.
|
|
106
|
+
"""
|
|
107
|
+
pathJobDEFAULT = Path(platformdirs.user_data_dir(appname=The.packageName, appauthor=False, ensure_exists=True))
|
|
108
|
+
if 'google.colab' in sysModules:
|
|
109
|
+
pathJobDEFAULT = Path("/content/drive/MyDrive") / The.packageName
|
|
110
|
+
pathJobDEFAULT.mkdir(parents=True, exist_ok=True)
|
|
111
|
+
return pathJobDEFAULT
|
|
112
|
+
|
|
113
|
+
def _saveFoldsTotal(pathFilename: PathLike[str] | PurePath, foldsTotal: int) -> None:
|
|
114
|
+
"""
|
|
115
|
+
Internal helper function to save a `foldsTotal` value to a file.
|
|
116
|
+
|
|
117
|
+
This is a low-level function used by the public `saveFoldsTotal` function.
|
|
118
|
+
It handles the basic file operation without extensive error handling.
|
|
119
|
+
|
|
120
|
+
Parameters:
|
|
121
|
+
pathFilename: Path where the `foldsTotal` value should be saved
|
|
122
|
+
foldsTotal: The integer value to save
|
|
123
|
+
"""
|
|
124
|
+
pathFilenameFoldsTotal = Path(pathFilename)
|
|
125
|
+
pathFilenameFoldsTotal.parent.mkdir(parents=True, exist_ok=True)
|
|
126
|
+
pathFilenameFoldsTotal.write_text(str(foldsTotal))
|
|
127
|
+
|
|
128
|
+
def saveFoldsTotal(pathFilename: PathLike[str] | PurePath, foldsTotal: int) -> None:
|
|
129
|
+
"""
|
|
130
|
+
Save `foldsTotal` value to disk with multiple fallback mechanisms.
|
|
131
|
+
|
|
132
|
+
This function attempts to save the computed `foldsTotal` value to the specified
|
|
133
|
+
location, with backup strategies in case the primary save attempt fails.
|
|
134
|
+
The robustness is critical since these computations may take days to complete.
|
|
135
|
+
|
|
136
|
+
Parameters:
|
|
137
|
+
pathFilename: Target save location for the `foldsTotal` value
|
|
138
|
+
foldsTotal: The computed value to save
|
|
139
|
+
|
|
140
|
+
Notes:
|
|
141
|
+
If the primary save fails, the function will attempt alternative save methods:
|
|
142
|
+
1. Print the value prominently to `stdout`.
|
|
143
|
+
2. Create a fallback file in the current working directory.
|
|
144
|
+
3. As a last resort, simply print the value.
|
|
145
|
+
"""
|
|
146
|
+
try:
|
|
147
|
+
_saveFoldsTotal(pathFilename, foldsTotal)
|
|
148
|
+
except Exception as ERRORmessage:
|
|
149
|
+
try:
|
|
150
|
+
print(f"\nfoldsTotal foldsTotal foldsTotal foldsTotal foldsTotal\n\n{foldsTotal = }\n\nfoldsTotal foldsTotal foldsTotal foldsTotal foldsTotal\n")
|
|
151
|
+
print(ERRORmessage)
|
|
152
|
+
print(f"\nfoldsTotal foldsTotal foldsTotal foldsTotal foldsTotal\n\n{foldsTotal = }\n\nfoldsTotal foldsTotal foldsTotal foldsTotal foldsTotal\n")
|
|
153
|
+
randomnessPlanB = (int(str(foldsTotal).strip()[-1]) + 1) * ['YO_']
|
|
154
|
+
filenameInfixUnique = ''.join(randomnessPlanB)
|
|
155
|
+
pathFilenamePlanB = os.path.join(os.getcwd(), 'foldsTotal' + filenameInfixUnique + '.txt')
|
|
156
|
+
writeStreamFallback = open(pathFilenamePlanB, 'w')
|
|
157
|
+
writeStreamFallback.write(str(foldsTotal))
|
|
158
|
+
writeStreamFallback.close()
|
|
159
|
+
print(str(pathFilenamePlanB))
|
|
160
|
+
except Exception:
|
|
161
|
+
print(foldsTotal)
|
|
162
|
+
return None
|
|
163
|
+
|
|
164
|
+
def saveFoldsTotalFAILearly(pathFilename: PathLike[str] | PurePath) -> None:
|
|
165
|
+
"""
|
|
166
|
+
Preemptively test file write capabilities before beginning computation.
|
|
167
|
+
|
|
168
|
+
This function performs validation checks on the target file location before
|
|
169
|
+
a potentially long-running computation begins. It tests several critical
|
|
170
|
+
aspects of filesystem functionality to ensure results can be saved:
|
|
171
|
+
|
|
172
|
+
1. Checks if the file already exists to prevent accidental overwrites.
|
|
173
|
+
2. Verifies that parent directories exist.
|
|
174
|
+
3. Tests if the system can write a test value to the file.
|
|
175
|
+
4. Confirms that the written value can be read back correctly.
|
|
176
|
+
|
|
177
|
+
Parameters:
|
|
178
|
+
pathFilename: The path and filename where computation results will be saved.
|
|
179
|
+
|
|
180
|
+
Raises:
|
|
181
|
+
FileExistsError: If the target file already exists.
|
|
182
|
+
FileNotFoundError: If parent directories don't exist or if write tests fail.
|
|
183
|
+
|
|
184
|
+
Notes:
|
|
185
|
+
This function helps prevent a situation where a computation runs for
|
|
186
|
+
hours or days only to discover at the end that results cannot be saved.
|
|
187
|
+
"""
|
|
188
|
+
if Path(pathFilename).exists():
|
|
189
|
+
raise FileExistsError(f"`{pathFilename = }` exists: a battle of overwriting might cause tears.")
|
|
190
|
+
if not Path(pathFilename).parent.exists():
|
|
191
|
+
raise FileNotFoundError(f"I received `{pathFilename = }` 0.000139 seconds ago from a function that promised it created the parent directory, but the parent directory does not exist. Fix that now, so your computation doesn't get deleted later. And be compassionate to others.")
|
|
192
|
+
foldsTotal = 149302889205120
|
|
193
|
+
_saveFoldsTotal(pathFilename, foldsTotal)
|
|
194
|
+
if not Path(pathFilename).exists():
|
|
195
|
+
raise FileNotFoundError(f"I just wrote a test file to `{pathFilename = }`, but it does not exist. Fix that now, so your computation doesn't get deleted later. And continually improve your empathy skills.")
|
|
196
|
+
foldsTotalRead = int(Path(pathFilename).read_text())
|
|
197
|
+
if foldsTotalRead != foldsTotal:
|
|
198
|
+
raise FileNotFoundError(f"I wrote a test file to `{pathFilename = }` with contents of `{str(foldsTotal) = }`, but I read `{foldsTotalRead = }` from the file. Python says the values are not equal. Fix that now, so your computation doesn't get corrupted later. And be pro-social.")
|
|
199
|
+
|
|
200
|
+
def writeStringToHere(this: str, pathFilename: PathLike[str] | PurePath) -> None:
|
|
201
|
+
"""
|
|
202
|
+
Write a string to a file, creating parent directories if needed.
|
|
203
|
+
|
|
204
|
+
This utility function provides a consistent interface for writing string content
|
|
205
|
+
to files across the package. It handles path creation and ensures proper
|
|
206
|
+
string conversion.
|
|
207
|
+
|
|
208
|
+
Parameters:
|
|
209
|
+
this: The string content to write to the file.
|
|
210
|
+
pathFilename: The target file path where the string should be written.
|
|
211
|
+
|
|
212
|
+
Notes:
|
|
213
|
+
This function creates all parent directories in the path if they don't exist,
|
|
214
|
+
making it safe to use with newly created directory structures.
|
|
215
|
+
"""
|
|
216
|
+
pathFilename = Path(pathFilename)
|
|
217
|
+
pathFilename.parent.mkdir(parents=True, exist_ok=True)
|
|
218
|
+
pathFilename.write_text(str(this))
|
|
219
|
+
return None
|