mapFolding 0.8.2__py3-none-any.whl → 0.8.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (37) hide show
  1. mapFolding/__init__.py +6 -2
  2. mapFolding/basecamp.py +11 -5
  3. mapFolding/filesystem.py +134 -109
  4. mapFolding/oeis.py +1 -1
  5. mapFolding/reference/__init__.py +7 -0
  6. mapFolding/reference/jobsCompleted/[2x19]/p2x19.py +197 -0
  7. mapFolding/reference/jobsCompleted/__init__.py +50 -0
  8. mapFolding/reference/jobsCompleted/p2x19/p2x19.py +29 -0
  9. mapFolding/someAssemblyRequired/__init__.py +37 -18
  10. mapFolding/someAssemblyRequired/_theTypes.py +35 -0
  11. mapFolding/someAssemblyRequired/_tool_Make.py +92 -0
  12. mapFolding/someAssemblyRequired/_tool_Then.py +65 -0
  13. mapFolding/someAssemblyRequired/_toolboxAntecedents.py +326 -0
  14. mapFolding/someAssemblyRequired/_toolboxContainers.py +306 -0
  15. mapFolding/someAssemblyRequired/_toolboxPython.py +76 -0
  16. mapFolding/someAssemblyRequired/getLLVMforNoReason.py +20 -1
  17. mapFolding/someAssemblyRequired/ingredientsNumba.py +17 -24
  18. mapFolding/someAssemblyRequired/synthesizeNumbaFlow.py +112 -149
  19. mapFolding/someAssemblyRequired/synthesizeNumbaJob.py +247 -0
  20. mapFolding/someAssemblyRequired/transformDataStructures.py +167 -100
  21. mapFolding/someAssemblyRequired/transformationTools.py +63 -678
  22. mapFolding/syntheticModules/__init__.py +1 -0
  23. mapFolding/syntheticModules/numbaCount_doTheNeedful.py +36 -33
  24. mapFolding/theDao.py +13 -11
  25. mapFolding/theSSOT.py +69 -119
  26. {mapfolding-0.8.2.dist-info → mapfolding-0.8.4.dist-info}/METADATA +4 -2
  27. mapfolding-0.8.4.dist-info/RECORD +49 -0
  28. {mapfolding-0.8.2.dist-info → mapfolding-0.8.4.dist-info}/WHEEL +1 -1
  29. tests/conftest.py +34 -29
  30. tests/test_computations.py +40 -31
  31. tests/test_filesystem.py +3 -3
  32. tests/test_other.py +4 -3
  33. mapFolding/someAssemblyRequired/synthesizeNumbaJobVESTIGIAL.py +0 -413
  34. mapfolding-0.8.2.dist-info/RECORD +0 -39
  35. {mapfolding-0.8.2.dist-info → mapfolding-0.8.4.dist-info}/entry_points.txt +0 -0
  36. {mapfolding-0.8.2.dist-info → mapfolding-0.8.4.dist-info}/licenses/LICENSE +0 -0
  37. {mapfolding-0.8.2.dist-info → mapfolding-0.8.4.dist-info}/top_level.txt +0 -0
@@ -20,149 +20,216 @@ While developed for transforming map folding computation state objects, the util
20
20
  designed to be applicable to various data structure transformation scenarios.
21
21
  """
22
22
 
23
- from collections.abc import Sequence
24
- from importlib import import_module as importlib_import_module
25
- from inspect import getsource as inspect_getsource
26
- from mapFolding.beDRY import outfitCountFolds, validateListDimensions
23
+ from collections.abc import Callable
24
+ from copy import deepcopy
25
+ from mapFolding.beDRY import outfitCountFolds
27
26
  from mapFolding.filesystem import getPathFilenameFoldsTotal
28
27
  from mapFolding.someAssemblyRequired import (
29
28
  ast_Identifier,
29
+ be,
30
30
  extractClassDef,
31
31
  ifThis,
32
- LedgerOfImports,
32
+ ImaAnnotationType,
33
+ importLogicalPath2Callable,
33
34
  Make,
34
- NodeCollector,
35
- strDotStrCuzPyStoopid,
35
+ NodeTourist,
36
+ parseLogicalPath2astModule,
37
+ str_nameDOTname,
36
38
  Then,
37
- Z0Z_executeActionUnlessDescendantMatches,
39
+ 又,
38
40
  )
39
- from mapFolding.theSSOT import ComputationState, The
40
- from pathlib import Path
41
- from types import ModuleType
41
+ from mapFolding.someAssemblyRequired._toolboxContainers import LedgerOfImports
42
+ from mapFolding.theSSOT import ComputationState, raiseIfNoneGitHubIssueNumber3, The
43
+ from os import PathLike
44
+ from pathlib import Path, PurePath
42
45
  from typing import Any, Literal, overload
43
46
  import ast
44
47
  import dataclasses
45
48
  import pickle
46
49
 
47
- # Would `LibCST` be better than `ast` in some cases? https://github.com/hunterhogan/mapFolding/issues/7
48
-
49
- countingIdentifierHARDCODED = 'groupsOfFolds'
50
+ # Create dummy AST elements for use as defaults
51
+ dummyAssign = Make.Assign([Make.Name("dummyTarget")], Make.Constant(None))
52
+ dummySubscript = Make.Subscript(Make.Name("dummy"), Make.Name("slice"))
53
+ dummyTuple = Make.Tuple([Make.Name("dummyElement")])
50
54
 
51
55
  @dataclasses.dataclass
52
56
  class ShatteredDataclass:
53
- astAssignDataclassRepack: ast.Assign
54
- astSubscriptPrimitiveTupleAnnotations4FunctionDef_returns: ast.Subscript
55
- astTuple4AssignTargetsToFragments: ast.Tuple
56
- countingVariableAnnotation: ast.expr
57
+ countingVariableAnnotation: ImaAnnotationType
58
+ """Type annotation for the counting variable extracted from the dataclass."""
57
59
  countingVariableName: ast.Name
58
- ledgerDataclassANDFragments: LedgerOfImports = dataclasses.field(default_factory=LedgerOfImports)
59
- list_ast_argAnnotated4ArgumentsSpecification: list[ast.arg] = dataclasses.field(default_factory=list)
60
- list_keyword4DataclassInitialization: list[ast.keyword] = dataclasses.field(default_factory=list)
61
- listAnnAssign4DataclassUnpack: list[ast.AnnAssign] = dataclasses.field(default_factory=list)
62
- listAnnotations: list[ast.expr] = dataclasses.field(default_factory=list)
63
- listNameDataclassFragments4Parameters: list[ast.Name] = dataclasses.field(default_factory=list)
64
-
65
- def shatter_dataclassesDOTdataclass(logicalPathModule: strDotStrCuzPyStoopid, dataclass_Identifier: ast_Identifier, instance_Identifier: ast_Identifier) -> ShatteredDataclass:
60
+ """AST name node representing the counting variable identifier."""
61
+ field2AnnAssign: dict[ast_Identifier, ast.AnnAssign] = dataclasses.field(default_factory=dict)
62
+ """Maps field names to their corresponding AST call expressions."""
63
+ Z0Z_field2AnnAssign: dict[ast_Identifier, tuple[ast.AnnAssign, str]] = dataclasses.field(default_factory=dict)
64
+ fragments4AssignmentOrParameters: ast.Tuple = dummyTuple
65
+ """AST tuple used as target for assignment to capture returned fragments."""
66
+ ledger: LedgerOfImports = dataclasses.field(default_factory=LedgerOfImports)
67
+ """Import records for the dataclass and its constituent parts."""
68
+ list_argAnnotated4ArgumentsSpecification: list[ast.arg] = dataclasses.field(default_factory=list)
69
+ """Function argument nodes with annotations for parameter specification."""
70
+ list_keyword_field__field4init: list[ast.keyword] = dataclasses.field(default_factory=list)
71
+ """Keyword arguments for dataclass initialization with field=field format."""
72
+ listAnnotations: list[ImaAnnotationType] = dataclasses.field(default_factory=list)
73
+ """Type annotations for each dataclass field."""
74
+ listName4Parameters: list[ast.Name] = dataclasses.field(default_factory=list)
75
+ """Name nodes for each dataclass field used as function parameters."""
76
+ listUnpack: list[ast.AnnAssign] = dataclasses.field(default_factory=list)
77
+ """Annotated assignment statements to extract fields from dataclass."""
78
+ map_stateDOTfield2Name: dict[ast.expr, ast.Name] = dataclasses.field(default_factory=dict)
79
+ """Maps AST expressions to Name nodes for find-replace operations."""
80
+ repack: ast.Assign = dummyAssign
81
+ """AST assignment statement that reconstructs the original dataclass instance."""
82
+ signatureReturnAnnotation: ast.Subscript = dummySubscript
83
+ """tuple-based return type annotation for function definitions."""
84
+
85
+ @dataclasses.dataclass
86
+ class DeReConstructField2ast:
87
+ dataclassesDOTdataclassLogicalPathModule: dataclasses.InitVar[str_nameDOTname]
88
+ dataclassClassDef: dataclasses.InitVar[ast.ClassDef]
89
+ dataclassesDOTdataclassInstance_Identifier: dataclasses.InitVar[ast_Identifier]
90
+ field: dataclasses.InitVar[dataclasses.Field[Any]]
91
+
92
+ ledger: LedgerOfImports = dataclasses.field(default_factory=LedgerOfImports)
93
+
94
+ name: ast_Identifier = dataclasses.field(init=False)
95
+ typeBuffalo: type[Any] | str | Any = dataclasses.field(init=False)
96
+ default: Any | None = dataclasses.field(init=False)
97
+ default_factory: Callable[..., Any] | None = dataclasses.field(init=False)
98
+ repr: bool = dataclasses.field(init=False)
99
+ hash: bool | None = dataclasses.field(init=False)
100
+ init: bool = dataclasses.field(init=False)
101
+ compare: bool = dataclasses.field(init=False)
102
+ metadata: dict[Any, Any] = dataclasses.field(init=False)
103
+ kw_only: bool = dataclasses.field(init=False)
104
+
105
+ astName: ast.Name = dataclasses.field(init=False)
106
+ ast_keyword_field__field: ast.keyword = dataclasses.field(init=False)
107
+ ast_nameDOTname: ast.Attribute = dataclasses.field(init=False)
108
+ astAnnotation: ImaAnnotationType = dataclasses.field(init=False)
109
+ ast_argAnnotated: ast.arg = dataclasses.field(init=False)
110
+ astAnnAssignConstructor: ast.AnnAssign = dataclasses.field(init=False)
111
+ Z0Z_hack: tuple[ast.AnnAssign, str] = dataclasses.field(init=False)
112
+
113
+ def __post_init__(self, dataclassesDOTdataclassLogicalPathModule: str_nameDOTname, dataclassClassDef: ast.ClassDef, dataclassesDOTdataclassInstance_Identifier: ast_Identifier, field: dataclasses.Field[Any]) -> None:
114
+ self.compare = field.compare
115
+ self.default = field.default if field.default is not dataclasses.MISSING else None
116
+ self.default_factory = field.default_factory if field.default_factory is not dataclasses.MISSING else None
117
+ self.hash = field.hash
118
+ self.init = field.init
119
+ self.kw_only = field.kw_only if field.kw_only is not dataclasses.MISSING else False
120
+ self.metadata = dict(field.metadata)
121
+ self.name = field.name
122
+ self.repr = field.repr
123
+ self.typeBuffalo = field.type
124
+
125
+ self.astName = Make.Name(self.name)
126
+ self.ast_keyword_field__field = Make.keyword(self.name, self.astName)
127
+ self.ast_nameDOTname = Make.Attribute(Make.Name(dataclassesDOTdataclassInstance_Identifier), self.name)
128
+
129
+ sherpa = NodeTourist(ifThis.isAnnAssign_targetIs(ifThis.isName_Identifier(self.name)), 又.annotation(Then.getIt)).captureLastMatch(dataclassClassDef)
130
+ if sherpa is None: raise raiseIfNoneGitHubIssueNumber3
131
+ else: self.astAnnotation = sherpa
132
+
133
+ self.ast_argAnnotated = Make.arg(self.name, self.astAnnotation)
134
+
135
+ dtype = self.metadata.get('dtype', None)
136
+ if dtype:
137
+ constructor = 'array'
138
+ self.astAnnAssignConstructor = Make.AnnAssign(self.astName, self.astAnnotation, Make.Call(Make.Name(constructor), list_astKeywords=[Make.keyword('dtype', Make.Name(dtype.__name__))]))
139
+ self.ledger.addImportFrom_asStr('numpy', constructor)
140
+ self.ledger.addImportFrom_asStr('numpy', dtype.__name__)
141
+ self.Z0Z_hack = (self.astAnnAssignConstructor, 'array')
142
+ elif be.Name(self.astAnnotation):
143
+ self.astAnnAssignConstructor = Make.AnnAssign(self.astName, self.astAnnotation, Make.Call(self.astAnnotation, [Make.Constant(-1)]))
144
+ self.ledger.addImportFrom_asStr(dataclassesDOTdataclassLogicalPathModule, self.astAnnotation.id)
145
+ self.Z0Z_hack = (self.astAnnAssignConstructor, 'scalar')
146
+ elif be.Subscript(self.astAnnotation):
147
+ elementConstructor: ast_Identifier = self.metadata['elementConstructor']
148
+ self.ledger.addImportFrom_asStr(dataclassesDOTdataclassLogicalPathModule, elementConstructor)
149
+ takeTheTuple: ast.Tuple = deepcopy(self.astAnnotation.slice)
150
+ self.astAnnAssignConstructor = Make.AnnAssign(self.astName, self.astAnnotation, takeTheTuple)
151
+ self.Z0Z_hack = (self.astAnnAssignConstructor, elementConstructor)
152
+ if be.Name(self.astAnnotation):
153
+ self.ledger.addImportFrom_asStr(dataclassesDOTdataclassLogicalPathModule, self.astAnnotation.id) # pyright: ignore [reportUnknownArgumentType, reportUnknownMemberType, reportIJustCalledATypeGuardMethod_WTF]
154
+
155
+ def shatter_dataclassesDOTdataclass(logicalPathModule: str_nameDOTname, dataclass_Identifier: ast_Identifier, instance_Identifier: ast_Identifier) -> ShatteredDataclass:
66
156
  """
67
157
  Parameters:
68
158
  logicalPathModule: gimme string cuz python is stoopid
69
159
  dataclass_Identifier: The identifier of the dataclass to be dismantled.
70
160
  instance_Identifier: In the synthesized module/function/scope, the identifier that will be used for the instance.
71
161
  """
72
- # TODO learn whether dataclasses.make_dataclass would be useful to transform the target dataclass into the `ShatteredDataclass`
73
-
74
- module: ast.Module = ast.parse(inspect_getsource(importlib_import_module(logicalPathModule)))
75
- astName_dataclassesDOTdataclass = Make.astName(dataclass_Identifier)
76
-
77
- dataclass = extractClassDef(dataclass_Identifier, module)
78
- if not isinstance(dataclass, ast.ClassDef):
79
- raise ValueError(f"I could not find {dataclass_Identifier=} in {logicalPathModule=}.")
80
-
81
- ledgerDataclassANDFragments = LedgerOfImports()
82
- list_ast_argAnnotated4ArgumentsSpecification: list[ast.arg] = []
83
- list_keyword4DataclassInitialization: list[ast.keyword] = []
84
- listAnnAssign4DataclassUnpack: list[ast.AnnAssign] = []
85
- listAnnotations: list[ast.expr] = []
86
- listNameDataclassFragments4Parameters: list[ast.Name] = []
87
-
88
- # TODO get the value from `groupsOfFolds: DatatypeFoldsTotal = dataclasses.field(default=DatatypeFoldsTotal(0), metadata={'theCountingIdentifier': True})`
89
- countingVariable = countingIdentifierHARDCODED
90
-
91
- addToLedgerPredicate = ifThis.isAnnAssignAndAnnotationIsName
92
- addToLedgerAction = Then.Z0Z_ledger(logicalPathModule, ledgerDataclassANDFragments)
93
- addToLedger = NodeCollector(addToLedgerPredicate, [addToLedgerAction])
94
-
95
- exclusionPredicate = ifThis.is_keyword_IdentifierEqualsConstantValue('init', False)
96
- appendKeywordAction = Then.Z0Z_appendKeywordMirroredTo(list_keyword4DataclassInitialization)
97
- filteredAppendKeywordAction = Z0Z_executeActionUnlessDescendantMatches(exclusionPredicate, appendKeywordAction) # type: ignore
98
-
99
- NodeCollector(
100
- ifThis.isAnnAssignAndTargetIsName,
101
- [Then.Z0Z_appendAnnAssignOf_nameDOTnameTo(instance_Identifier, listAnnAssign4DataclassUnpack)
102
- , Then.append_targetTo(listNameDataclassFragments4Parameters) # type: ignore
103
- , lambda node: addToLedger.visit(node)
104
- , filteredAppendKeywordAction
105
- , lambda node: list_ast_argAnnotated4ArgumentsSpecification.append(Make.ast_arg(node.target.id, node.annotation)) # type: ignore
106
- , lambda node: listAnnotations.append(node.annotation) # type: ignore
107
- ]
108
- ).visit(dataclass)
162
+ Official_fieldOrder: list[ast_Identifier] = []
163
+ dictionaryDeReConstruction: dict[ast_Identifier, DeReConstructField2ast] = {}
164
+
165
+ dataclassClassDef = extractClassDef(parseLogicalPath2astModule(logicalPathModule), dataclass_Identifier)
166
+ if not isinstance(dataclassClassDef, ast.ClassDef): raise ValueError(f"I could not find {dataclass_Identifier=} in {logicalPathModule=}.")
167
+
168
+ countingVariable = None
169
+ for aField in dataclasses.fields(importLogicalPath2Callable(logicalPathModule, dataclass_Identifier)): # pyright: ignore [reportArgumentType]
170
+ Official_fieldOrder.append(aField.name)
171
+ dictionaryDeReConstruction[aField.name] = DeReConstructField2ast(logicalPathModule, dataclassClassDef, instance_Identifier, aField)
172
+ if aField.metadata.get('theCountingIdentifier', False):
173
+ countingVariable = dictionaryDeReConstruction[aField.name].name
174
+
175
+ if countingVariable is None:
176
+ raise ValueError(f"I could not find the counting variable in {dataclass_Identifier=} in {logicalPathModule=}.")
109
177
 
110
178
  shatteredDataclass = ShatteredDataclass(
111
- astAssignDataclassRepack = Make.astAssign(listTargets=[Make.astName(instance_Identifier)], value=Make.astCall(astName_dataclassesDOTdataclass, list_astKeywords=list_keyword4DataclassInitialization))
112
- , astSubscriptPrimitiveTupleAnnotations4FunctionDef_returns = Make.astSubscript(Make.astName('tuple'), Make.astTuple(listAnnotations))
113
- , astTuple4AssignTargetsToFragments = Make.astTuple(listNameDataclassFragments4Parameters, ast.Store())
114
- , countingVariableAnnotation = next(ast_arg.annotation for ast_arg in list_ast_argAnnotated4ArgumentsSpecification if ast_arg.arg == countingVariable) or Make.astName('Any')
115
- , countingVariableName = Make.astName(countingVariable)
116
- , ledgerDataclassANDFragments = ledgerDataclassANDFragments
117
- , list_ast_argAnnotated4ArgumentsSpecification = list_ast_argAnnotated4ArgumentsSpecification
118
- , list_keyword4DataclassInitialization = list_keyword4DataclassInitialization
119
- , listAnnAssign4DataclassUnpack = listAnnAssign4DataclassUnpack
120
- , listAnnotations = listAnnotations
121
- , listNameDataclassFragments4Parameters = listNameDataclassFragments4Parameters
122
- )
123
-
124
- shatteredDataclass.ledgerDataclassANDFragments.addImportFromStr(logicalPathModule, dataclass_Identifier)
125
- return shatteredDataclass
179
+ countingVariableAnnotation=dictionaryDeReConstruction[countingVariable].astAnnotation,
180
+ countingVariableName=dictionaryDeReConstruction[countingVariable].astName,
181
+ field2AnnAssign={dictionaryDeReConstruction[field].name: dictionaryDeReConstruction[field].astAnnAssignConstructor for field in Official_fieldOrder},
182
+ Z0Z_field2AnnAssign={dictionaryDeReConstruction[field].name: dictionaryDeReConstruction[field].Z0Z_hack for field in Official_fieldOrder},
183
+ list_argAnnotated4ArgumentsSpecification=[dictionaryDeReConstruction[field].ast_argAnnotated for field in Official_fieldOrder],
184
+ list_keyword_field__field4init=[dictionaryDeReConstruction[field].ast_keyword_field__field for field in Official_fieldOrder if dictionaryDeReConstruction[field].init],
185
+ listAnnotations=[dictionaryDeReConstruction[field].astAnnotation for field in Official_fieldOrder],
186
+ listName4Parameters=[dictionaryDeReConstruction[field].astName for field in Official_fieldOrder],
187
+ listUnpack=[Make.AnnAssign(dictionaryDeReConstruction[field].astName, dictionaryDeReConstruction[field].astAnnotation, dictionaryDeReConstruction[field].ast_nameDOTname) for field in Official_fieldOrder],
188
+ map_stateDOTfield2Name={dictionaryDeReConstruction[field].ast_nameDOTname: dictionaryDeReConstruction[field].astName for field in Official_fieldOrder},
189
+ )
190
+ shatteredDataclass.fragments4AssignmentOrParameters = Make.Tuple(shatteredDataclass.listName4Parameters, ast.Store())
191
+ shatteredDataclass.repack = Make.Assign(listTargets=[Make.Name(instance_Identifier)], value=Make.Call(Make.Name(dataclass_Identifier), list_astKeywords=shatteredDataclass.list_keyword_field__field4init))
192
+ shatteredDataclass.signatureReturnAnnotation = Make.Subscript(Make.Name('tuple'), Make.Tuple(shatteredDataclass.listAnnotations))
193
+
194
+ shatteredDataclass.ledger.update(*(dictionaryDeReConstruction[field].ledger for field in Official_fieldOrder))
195
+ shatteredDataclass.ledger.addImportFrom_asStr(logicalPathModule, dataclass_Identifier)
126
196
 
127
- def getSourceAlgorithmVESTIGIAL() -> ModuleType:
128
- moduleImported: ModuleType = importlib_import_module(The.logicalPathModuleSourceAlgorithm)
129
- return moduleImported
197
+ return shatteredDataclass
130
198
 
131
199
  @overload
132
- def makeStateJobOUTDATED(listDimensions: Sequence[int], *, writeJob: Literal[True], **keywordArguments: Any) -> Path: ...
200
+ def makeInitializedComputationState(mapShape: tuple[int, ...], writeJob: Literal[True], *, pathFilename: PathLike[str] | PurePath | None = None, **keywordArguments: Any) -> Path: ...
133
201
  @overload
134
- def makeStateJobOUTDATED(listDimensions: Sequence[int], *, writeJob: Literal[False], **keywordArguments: Any) -> ComputationState: ...
135
- def makeStateJobOUTDATED(listDimensions: Sequence[int], *, writeJob: bool = True, **keywordArguments: Any) -> ComputationState | Path:
202
+ def makeInitializedComputationState(mapShape: tuple[int, ...], writeJob: Literal[False] = False, **keywordArguments: Any) -> ComputationState: ...
203
+ def makeInitializedComputationState(mapShape: tuple[int, ...], writeJob: bool = False, *, pathFilename: PathLike[str] | PurePath | None = None, **keywordArguments: Any) -> ComputationState | Path:
136
204
  """
137
- Creates a computation state job for map folding calculations and optionally saves it to disk.
205
+ Initializes a computation state and optionally saves it to disk.
138
206
 
139
- This function initializes a computation state for map folding calculations based on the given dimensions,
140
- sets up the initial counting configuration, and can optionally save the state to a pickle file.
207
+ This function initializes a computation state using the source algorithm.
141
208
 
142
- Parameters:
143
- listDimensions: List of integers representing the dimensions of the map to be folded.
144
- writeJob (True): Whether to save the state to disk.
145
- **keywordArguments: Additional keyword arguments to pass to the computation state initialization.
209
+ Hint: If you want an uninitialized state, call `outfitCountFolds` directly.
146
210
 
211
+ Parameters:
212
+ mapShape: List of integers representing the dimensions of the map to be folded.
213
+ writeJob (False): Whether to save the state to disk.
214
+ pathFilename (getPathFilenameFoldsTotal.pkl): The path and filename to save the state. If None, uses a default path.
215
+ **keywordArguments: computationDivisions:int|str|None=None,concurrencyLimit:int=1.
147
216
  Returns:
148
217
  stateUniversal|pathFilenameJob: The computation state for the map folding calculations, or
149
218
  the path to the saved state file if writeJob is True.
150
219
  """
151
- mapShape = validateListDimensions(listDimensions)
152
220
  stateUniversal: ComputationState = outfitCountFolds(mapShape, **keywordArguments)
153
221
 
154
- moduleSource: ModuleType = getSourceAlgorithmVESTIGIAL()
155
- # TODO `countInitialize` is hardcoded
156
- stateUniversal = moduleSource.countInitialize(stateUniversal)
222
+ initializeState = importLogicalPath2Callable(The.logicalPathModuleSourceAlgorithm, The.sourceCallableInitialize)
223
+ stateUniversal = initializeState(stateUniversal)
157
224
 
158
225
  if not writeJob:
159
226
  return stateUniversal
160
227
 
161
- pathFilenameChopChop = getPathFilenameFoldsTotal(stateUniversal.mapShape, None)
162
- suffix = pathFilenameChopChop.suffix
163
- pathJob = Path(str(pathFilenameChopChop)[0:-len(suffix)])
164
- pathJob.mkdir(parents=True, exist_ok=True)
165
- pathFilenameJob = pathJob / 'stateJob.pkl'
228
+ if pathFilename:
229
+ pathFilenameJob = Path(pathFilename)
230
+ pathFilenameJob.parent.mkdir(parents=True, exist_ok=True)
231
+ else:
232
+ pathFilenameJob = getPathFilenameFoldsTotal(stateUniversal.mapShape).with_suffix('.pkl')
166
233
 
167
234
  pathFilenameJob.write_bytes(pickle.dumps(stateUniversal))
168
235
  return pathFilenameJob