mapFolding 0.11.0__py3-none-any.whl → 0.11.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- mapFolding/__init__.py +18 -75
- mapFolding/basecamp.py +13 -10
- mapFolding/beDRY.py +113 -2
- mapFolding/dataBaskets.py +24 -2
- mapFolding/{toolboxFilesystem.py → filesystemToolkit.py} +3 -3
- mapFolding/infoBooth.py +96 -0
- mapFolding/oeis.py +3 -2
- mapFolding/someAssemblyRequired/RecipeJob.py +3 -4
- mapFolding/someAssemblyRequired/Z0Z_makeSomeModules.py +187 -22
- mapFolding/someAssemblyRequired/__init__.py +10 -5
- mapFolding/someAssemblyRequired/_toolIfThis.py +10 -4
- mapFolding/someAssemblyRequired/{_toolboxContainers.py → _toolkitContainers.py} +15 -17
- mapFolding/someAssemblyRequired/makeJobTheorem2Numba.py +9 -8
- mapFolding/someAssemblyRequired/synthesizeNumbaJob.py +5 -3
- mapFolding/someAssemblyRequired/{toolboxNumba.py → toolkitNumba.py} +2 -2
- mapFolding/someAssemblyRequired/transformationTools.py +17 -50
- mapFolding/syntheticModules/countParallel.py +98 -0
- mapFolding/syntheticModules/dataPacking.py +1 -1
- mapFolding/syntheticModules/numbaCount.py +189 -188
- mapFolding/theDao.py +1 -1
- mapFolding/theSSOT.py +4 -243
- {mapfolding-0.11.0.dist-info → mapfolding-0.11.2.dist-info}/METADATA +16 -8
- mapfolding-0.11.2.dist-info/RECORD +56 -0
- {mapfolding-0.11.0.dist-info → mapfolding-0.11.2.dist-info}/WHEEL +1 -1
- tests/conftest.py +7 -9
- tests/test_computations.py +1 -1
- tests/test_filesystem.py +1 -2
- tests/test_other.py +1 -1
- tests/test_tasks.py +1 -3
- mapfolding-0.11.0.dist-info/RECORD +0 -54
- {mapfolding-0.11.0.dist-info → mapfolding-0.11.2.dist-info}/entry_points.txt +0 -0
- {mapfolding-0.11.0.dist-info → mapfolding-0.11.2.dist-info}/licenses/LICENSE +0 -0
- {mapfolding-0.11.0.dist-info → mapfolding-0.11.2.dist-info}/top_level.txt +0 -0
|
@@ -18,10 +18,9 @@ readable, maintainable implementations to highly optimized versions while preser
|
|
|
18
18
|
logical structure and correctness.
|
|
19
19
|
"""
|
|
20
20
|
|
|
21
|
-
from
|
|
22
|
-
from
|
|
23
|
-
from
|
|
24
|
-
from mapFolding.beDRY import outfitCountFolds
|
|
21
|
+
from collections.abc import Callable
|
|
22
|
+
from astToolkit import ClassIsAndAttribute
|
|
23
|
+
from mapFolding import outfitCountFolds, ComputationState, The, getPathFilenameFoldsTotal
|
|
25
24
|
from mapFolding.someAssemblyRequired import (
|
|
26
25
|
ast_Identifier,
|
|
27
26
|
astModuleToIngredientsFunction,
|
|
@@ -33,19 +32,19 @@ from mapFolding.someAssemblyRequired import (
|
|
|
33
32
|
importLogicalPath2Callable,
|
|
34
33
|
IngredientsFunction,
|
|
35
34
|
IngredientsModule,
|
|
35
|
+
inlineFunctionDef,
|
|
36
36
|
LedgerOfImports,
|
|
37
37
|
Make,
|
|
38
38
|
NodeChanger,
|
|
39
39
|
NodeTourist,
|
|
40
40
|
parseLogicalPath2astModule,
|
|
41
41
|
RecipeSynthesizeFlow,
|
|
42
|
+
removeUnusedParameters,
|
|
42
43
|
ShatteredDataclass,
|
|
43
44
|
str_nameDOTname,
|
|
44
45
|
Then,
|
|
45
|
-
|
|
46
|
+
unparseFindReplace,
|
|
46
47
|
)
|
|
47
|
-
from mapFolding.theSSOT import ComputationState, The
|
|
48
|
-
from mapFolding.toolboxFilesystem import getPathFilenameFoldsTotal
|
|
49
48
|
from os import PathLike
|
|
50
49
|
from pathlib import Path, PurePath
|
|
51
50
|
from typing import Any, Literal, overload
|
|
@@ -116,8 +115,8 @@ def shatter_dataclassesDOTdataclass(logicalPathModule: str_nameDOTname, dataclas
|
|
|
116
115
|
instance_Identifier: The variable name to use for the dataclass instance in generated code.
|
|
117
116
|
|
|
118
117
|
Returns:
|
|
119
|
-
A ShatteredDataclass containing AST representations of all dataclass components,
|
|
120
|
-
|
|
118
|
+
shatteredDataclass: A ShatteredDataclass containing AST representations of all dataclass components,
|
|
119
|
+
with imports, field definitions, annotations, and repackaging code.
|
|
121
120
|
|
|
122
121
|
Raises:
|
|
123
122
|
ValueError: If the dataclass cannot be found in the specified module or if no counting variable is identified in the dataclass.
|
|
@@ -136,7 +135,9 @@ def shatter_dataclassesDOTdataclass(logicalPathModule: str_nameDOTname, dataclas
|
|
|
136
135
|
countingVariable = dictionaryDeReConstruction[aField.name].name
|
|
137
136
|
|
|
138
137
|
if countingVariable is None:
|
|
139
|
-
|
|
138
|
+
import warnings
|
|
139
|
+
warnings.warn(message=f"I could not find the counting variable in `{dataclass_Identifier = }` in `{logicalPathModule = }`.", category=UserWarning)
|
|
140
|
+
raise Exception
|
|
140
141
|
|
|
141
142
|
shatteredDataclass = ShatteredDataclass(
|
|
142
143
|
countingVariableAnnotation=dictionaryDeReConstruction[countingVariable].astAnnotation,
|
|
@@ -230,7 +231,7 @@ def makeNewFlow(recipeFlow: RecipeSynthesizeFlow) -> IngredientsModule:
|
|
|
230
231
|
if recipeFlow.removeDataclassParallel:
|
|
231
232
|
ingredientsParallel.astFunctionDef.args = Make.arguments(args=shatteredDataclass.list_argAnnotated4ArgumentsSpecification)
|
|
232
233
|
|
|
233
|
-
ingredientsParallel.astFunctionDef =
|
|
234
|
+
ingredientsParallel.astFunctionDef = unparseFindReplace(ingredientsParallel.astFunctionDef, shatteredDataclass.map_stateDOTfield2Name)
|
|
234
235
|
|
|
235
236
|
ingredientsParallel = removeUnusedParameters(ingredientsParallel)
|
|
236
237
|
|
|
@@ -257,7 +258,7 @@ def makeNewFlow(recipeFlow: RecipeSynthesizeFlow) -> IngredientsModule:
|
|
|
257
258
|
changeReturnParallelCallable = NodeChanger(Be.Return, Then.replaceWith(Make.Return(shatteredDataclass.countingVariableName)))
|
|
258
259
|
ingredientsParallel.astFunctionDef.returns = shatteredDataclass.countingVariableAnnotation
|
|
259
260
|
|
|
260
|
-
unpack4parallelCallable = NodeChanger(
|
|
261
|
+
unpack4parallelCallable = NodeChanger(ClassIsAndAttribute.valueIs(ast.Assign, IfThis.isCallAttributeNamespace_Identifier(recipeFlow.concurrencyManagerNamespace, recipeFlow.concurrencyManagerIdentifier)), Then.insertThisAbove(shatteredDataclass.listUnpack))
|
|
261
262
|
|
|
262
263
|
unpack4parallelCallable.visit(ingredientsDispatcher.astFunctionDef)
|
|
263
264
|
replaceCall2concurrencyManager.visit(ingredientsDispatcher.astFunctionDef)
|
|
@@ -274,14 +275,14 @@ def removeDataclassFromFunction(ingredientsTarget: IngredientsFunction, shattere
|
|
|
274
275
|
ingredientsTarget.astFunctionDef.returns = shatteredDataclass.signatureReturnAnnotation
|
|
275
276
|
changeReturnCallable = NodeChanger(Be.Return, Then.replaceWith(Make.Return(shatteredDataclass.fragments4AssignmentOrParameters)))
|
|
276
277
|
changeReturnCallable.visit(ingredientsTarget.astFunctionDef)
|
|
277
|
-
ingredientsTarget.astFunctionDef =
|
|
278
|
+
ingredientsTarget.astFunctionDef = unparseFindReplace(ingredientsTarget.astFunctionDef, shatteredDataclass.map_stateDOTfield2Name)
|
|
278
279
|
return ingredientsTarget
|
|
279
280
|
|
|
280
281
|
def unpackDataclassCallFunctionRepackDataclass(ingredientsCaller: IngredientsFunction, targetCallableIdentifier: ast_Identifier, shatteredDataclass: ShatteredDataclass) -> IngredientsFunction:
|
|
281
282
|
astCallTargetCallable = Make.Call(Make.Name(targetCallableIdentifier), shatteredDataclass.listName4Parameters)
|
|
282
|
-
replaceAssignTargetCallable = NodeChanger(
|
|
283
|
-
unpack4targetCallable = NodeChanger(
|
|
284
|
-
repack4targetCallable = NodeChanger(
|
|
283
|
+
replaceAssignTargetCallable = NodeChanger(ClassIsAndAttribute.valueIs(ast.Assign, IfThis.isCall_Identifier(targetCallableIdentifier)), Then.replaceWith(Make.Assign([shatteredDataclass.fragments4AssignmentOrParameters], value=astCallTargetCallable)))
|
|
284
|
+
unpack4targetCallable = NodeChanger(ClassIsAndAttribute.valueIs(ast.Assign, IfThis.isCall_Identifier(targetCallableIdentifier)), Then.insertThisAbove(shatteredDataclass.listUnpack))
|
|
285
|
+
repack4targetCallable = NodeChanger(ClassIsAndAttribute.valueIs(ast.Assign, IfThis.isCall_Identifier(targetCallableIdentifier)), Then.insertThisBelow([shatteredDataclass.repack]))
|
|
285
286
|
replaceAssignTargetCallable.visit(ingredientsCaller.astFunctionDef)
|
|
286
287
|
unpack4targetCallable.visit(ingredientsCaller.astFunctionDef)
|
|
287
288
|
repack4targetCallable.visit(ingredientsCaller.astFunctionDef)
|
|
@@ -294,37 +295,3 @@ dictionaryEstimates: dict[tuple[int, ...], int] = {
|
|
|
294
295
|
(3,3,3,3): 85109616000000000000000000000000,
|
|
295
296
|
(8,8): 791274195985524900,
|
|
296
297
|
}
|
|
297
|
-
|
|
298
|
-
# END of marginal classes and functions ======================================================
|
|
299
|
-
def Z0Z_lameFindReplace(astTree: 个, mappingFindReplaceNodes: Mapping[ast.AST, ast.AST]) -> 个:
|
|
300
|
-
"""
|
|
301
|
-
Recursively replace AST nodes based on a mapping of find-replace pairs.
|
|
302
|
-
|
|
303
|
-
This function applies brute-force node replacement throughout an AST tree
|
|
304
|
-
by comparing textual representations of nodes. While not the most efficient
|
|
305
|
-
approach, it provides a reliable way to replace complex nested structures
|
|
306
|
-
when more precise targeting methods are difficult to implement.
|
|
307
|
-
|
|
308
|
-
The function continues replacing nodes until no more changes are detected
|
|
309
|
-
in the AST's textual representation, ensuring complete replacement throughout
|
|
310
|
-
the tree structure.
|
|
311
|
-
|
|
312
|
-
Parameters:
|
|
313
|
-
astTree: The AST structure to modify.
|
|
314
|
-
mappingFindReplaceNodes: A mapping from source nodes to replacement nodes.
|
|
315
|
-
|
|
316
|
-
Returns:
|
|
317
|
-
The modified AST structure with all matching nodes replaced.
|
|
318
|
-
"""
|
|
319
|
-
keepGoing = True
|
|
320
|
-
newTree = deepcopy(astTree)
|
|
321
|
-
|
|
322
|
-
while keepGoing:
|
|
323
|
-
for nodeFind, nodeReplace in mappingFindReplaceNodes.items():
|
|
324
|
-
NodeChanger(IfThis.unparseIs(nodeFind), Then.replaceWith(nodeReplace)).visit(newTree)
|
|
325
|
-
|
|
326
|
-
if ast.unparse(newTree) == ast.unparse(astTree):
|
|
327
|
-
keepGoing = False
|
|
328
|
-
else:
|
|
329
|
-
astTree = deepcopy(newTree)
|
|
330
|
-
return newTree
|
|
@@ -0,0 +1,98 @@
|
|
|
1
|
+
from concurrent.futures import Future as ConcurrentFuture, ProcessPoolExecutor
|
|
2
|
+
from copy import deepcopy
|
|
3
|
+
from mapFolding.dataBaskets import Array1DElephino, Array1DLeavesTotal, Array3D, DatatypeElephino, DatatypeFoldsTotal, DatatypeLeavesTotal, ParallelMapFoldingState
|
|
4
|
+
from multiprocessing import set_start_method as multiprocessing_set_start_method
|
|
5
|
+
from numba import jit
|
|
6
|
+
if __name__ == '__main__':
|
|
7
|
+
multiprocessing_set_start_method('spawn')
|
|
8
|
+
|
|
9
|
+
@jit(cache=True, error_model='numpy', fastmath=True, forceinline=True)
|
|
10
|
+
def count(groupsOfFolds: DatatypeFoldsTotal, gap1ndex: DatatypeElephino, gap1ndexCeiling: DatatypeElephino, indexDimension: DatatypeLeavesTotal, indexLeaf: DatatypeLeavesTotal, indexMiniGap: DatatypeElephino, leaf1ndex: DatatypeLeavesTotal, leafConnectee: DatatypeLeavesTotal, dimensionsUnconstrained: DatatypeLeavesTotal, countDimensionsGapped: Array1DLeavesTotal, gapRangeStart: Array1DElephino, gapsWhere: Array1DLeavesTotal, leafAbove: Array1DLeavesTotal, leafBelow: Array1DLeavesTotal, connectionGraph: Array3D, dimensionsTotal: DatatypeLeavesTotal, leavesTotal: DatatypeLeavesTotal, taskDivisions: DatatypeLeavesTotal, taskIndex: DatatypeLeavesTotal) -> tuple[DatatypeFoldsTotal, DatatypeElephino, DatatypeElephino, DatatypeLeavesTotal, DatatypeLeavesTotal, DatatypeElephino, DatatypeLeavesTotal, DatatypeLeavesTotal, DatatypeLeavesTotal, Array1DLeavesTotal, Array1DElephino, Array1DLeavesTotal, Array1DLeavesTotal, Array1DLeavesTotal, Array3D, DatatypeLeavesTotal, DatatypeLeavesTotal, DatatypeLeavesTotal, DatatypeLeavesTotal]:
|
|
11
|
+
while leaf1ndex > 0:
|
|
12
|
+
if leaf1ndex <= 1 or leafBelow[0] == 1:
|
|
13
|
+
if leaf1ndex > leavesTotal:
|
|
14
|
+
groupsOfFolds += 1
|
|
15
|
+
else:
|
|
16
|
+
dimensionsUnconstrained = dimensionsTotal
|
|
17
|
+
gap1ndexCeiling = gapRangeStart[leaf1ndex - 1]
|
|
18
|
+
indexDimension = 0
|
|
19
|
+
while indexDimension < dimensionsTotal:
|
|
20
|
+
leafConnectee = connectionGraph[indexDimension, leaf1ndex, leaf1ndex]
|
|
21
|
+
if leafConnectee == leaf1ndex:
|
|
22
|
+
dimensionsUnconstrained -= 1
|
|
23
|
+
else:
|
|
24
|
+
while leafConnectee != leaf1ndex:
|
|
25
|
+
if leaf1ndex != taskDivisions or leafConnectee % taskDivisions == taskIndex:
|
|
26
|
+
gapsWhere[gap1ndexCeiling] = leafConnectee
|
|
27
|
+
if countDimensionsGapped[leafConnectee] == 0:
|
|
28
|
+
gap1ndexCeiling += 1
|
|
29
|
+
countDimensionsGapped[leafConnectee] += 1
|
|
30
|
+
leafConnectee = connectionGraph[indexDimension, leaf1ndex, leafBelow[leafConnectee]]
|
|
31
|
+
indexDimension += 1
|
|
32
|
+
if not dimensionsUnconstrained:
|
|
33
|
+
indexLeaf = 0
|
|
34
|
+
while indexLeaf < leaf1ndex:
|
|
35
|
+
gapsWhere[gap1ndexCeiling] = indexLeaf
|
|
36
|
+
gap1ndexCeiling += 1
|
|
37
|
+
indexLeaf += 1
|
|
38
|
+
indexMiniGap = gap1ndex
|
|
39
|
+
while indexMiniGap < gap1ndexCeiling:
|
|
40
|
+
gapsWhere[gap1ndex] = gapsWhere[indexMiniGap]
|
|
41
|
+
if countDimensionsGapped[gapsWhere[indexMiniGap]] == dimensionsUnconstrained:
|
|
42
|
+
gap1ndex += 1
|
|
43
|
+
countDimensionsGapped[gapsWhere[indexMiniGap]] = 0
|
|
44
|
+
indexMiniGap += 1
|
|
45
|
+
while leaf1ndex > 0 and gap1ndex == gapRangeStart[leaf1ndex - 1]:
|
|
46
|
+
leaf1ndex -= 1
|
|
47
|
+
leafBelow[leafAbove[leaf1ndex]] = leafBelow[leaf1ndex]
|
|
48
|
+
leafAbove[leafBelow[leaf1ndex]] = leafAbove[leaf1ndex]
|
|
49
|
+
if leaf1ndex > 0:
|
|
50
|
+
gap1ndex -= 1
|
|
51
|
+
leafAbove[leaf1ndex] = gapsWhere[gap1ndex]
|
|
52
|
+
leafBelow[leaf1ndex] = leafBelow[leafAbove[leaf1ndex]]
|
|
53
|
+
leafBelow[leafAbove[leaf1ndex]] = leaf1ndex
|
|
54
|
+
leafAbove[leafBelow[leaf1ndex]] = leaf1ndex
|
|
55
|
+
gapRangeStart[leaf1ndex] = gap1ndex
|
|
56
|
+
leaf1ndex += 1
|
|
57
|
+
return (groupsOfFolds, gap1ndex, gap1ndexCeiling, indexDimension, indexLeaf, indexMiniGap, leaf1ndex, leafConnectee, dimensionsUnconstrained, countDimensionsGapped, gapRangeStart, gapsWhere, leafAbove, leafBelow, connectionGraph, dimensionsTotal, leavesTotal, taskDivisions, taskIndex)
|
|
58
|
+
|
|
59
|
+
def unRepackParallelMapFoldingState(state: ParallelMapFoldingState) -> ParallelMapFoldingState:
|
|
60
|
+
mapShape: tuple[DatatypeLeavesTotal, ...] = state.mapShape
|
|
61
|
+
groupsOfFolds: DatatypeFoldsTotal = state.groupsOfFolds
|
|
62
|
+
gap1ndex: DatatypeElephino = state.gap1ndex
|
|
63
|
+
gap1ndexCeiling: DatatypeElephino = state.gap1ndexCeiling
|
|
64
|
+
indexDimension: DatatypeLeavesTotal = state.indexDimension
|
|
65
|
+
indexLeaf: DatatypeLeavesTotal = state.indexLeaf
|
|
66
|
+
indexMiniGap: DatatypeElephino = state.indexMiniGap
|
|
67
|
+
leaf1ndex: DatatypeLeavesTotal = state.leaf1ndex
|
|
68
|
+
leafConnectee: DatatypeLeavesTotal = state.leafConnectee
|
|
69
|
+
dimensionsUnconstrained: DatatypeLeavesTotal = state.dimensionsUnconstrained
|
|
70
|
+
countDimensionsGapped: Array1DLeavesTotal = state.countDimensionsGapped
|
|
71
|
+
gapRangeStart: Array1DElephino = state.gapRangeStart
|
|
72
|
+
gapsWhere: Array1DLeavesTotal = state.gapsWhere
|
|
73
|
+
leafAbove: Array1DLeavesTotal = state.leafAbove
|
|
74
|
+
leafBelow: Array1DLeavesTotal = state.leafBelow
|
|
75
|
+
connectionGraph: Array3D = state.connectionGraph
|
|
76
|
+
dimensionsTotal: DatatypeLeavesTotal = state.dimensionsTotal
|
|
77
|
+
leavesTotal: DatatypeLeavesTotal = state.leavesTotal
|
|
78
|
+
taskDivisions: DatatypeLeavesTotal = state.taskDivisions
|
|
79
|
+
taskIndex: DatatypeLeavesTotal = state.taskIndex
|
|
80
|
+
groupsOfFolds, gap1ndex, gap1ndexCeiling, indexDimension, indexLeaf, indexMiniGap, leaf1ndex, leafConnectee, dimensionsUnconstrained, countDimensionsGapped, gapRangeStart, gapsWhere, leafAbove, leafBelow, connectionGraph, dimensionsTotal, leavesTotal, taskDivisions, taskIndex = count(groupsOfFolds, gap1ndex, gap1ndexCeiling, indexDimension, indexLeaf, indexMiniGap, leaf1ndex, leafConnectee, dimensionsUnconstrained, countDimensionsGapped, gapRangeStart, gapsWhere, leafAbove, leafBelow, connectionGraph, dimensionsTotal, leavesTotal, taskDivisions, taskIndex)
|
|
81
|
+
state = ParallelMapFoldingState(mapShape=mapShape, groupsOfFolds=groupsOfFolds, gap1ndex=gap1ndex, gap1ndexCeiling=gap1ndexCeiling, indexDimension=indexDimension, indexLeaf=indexLeaf, indexMiniGap=indexMiniGap, leaf1ndex=leaf1ndex, leafConnectee=leafConnectee, dimensionsUnconstrained=dimensionsUnconstrained, countDimensionsGapped=countDimensionsGapped, gapRangeStart=gapRangeStart, gapsWhere=gapsWhere, leafAbove=leafAbove, leafBelow=leafBelow, taskDivisions=taskDivisions, taskIndex=taskIndex)
|
|
82
|
+
return state
|
|
83
|
+
|
|
84
|
+
def doTheNeedful(state: ParallelMapFoldingState, concurrencyLimit: int) -> tuple[int, list[ParallelMapFoldingState]]:
|
|
85
|
+
stateParallel = deepcopy(state)
|
|
86
|
+
listStatesParallel: list[ParallelMapFoldingState] = [stateParallel] * stateParallel.taskDivisions
|
|
87
|
+
groupsOfFoldsTotal: int = 0
|
|
88
|
+
dictionaryConcurrency: dict[int, ConcurrentFuture[ParallelMapFoldingState]] = {}
|
|
89
|
+
with ProcessPoolExecutor(concurrencyLimit) as concurrencyManager:
|
|
90
|
+
for indexSherpa in range(stateParallel.taskDivisions):
|
|
91
|
+
state = deepcopy(stateParallel)
|
|
92
|
+
state.taskIndex = indexSherpa
|
|
93
|
+
dictionaryConcurrency[indexSherpa] = concurrencyManager.submit(unRepackParallelMapFoldingState, state)
|
|
94
|
+
for indexSherpa in range(stateParallel.taskDivisions):
|
|
95
|
+
listStatesParallel[indexSherpa] = dictionaryConcurrency[indexSherpa].result()
|
|
96
|
+
groupsOfFoldsTotal += listStatesParallel[indexSherpa].groupsOfFolds
|
|
97
|
+
foldsTotal: int = groupsOfFoldsTotal * stateParallel.leavesTotal
|
|
98
|
+
return (foldsTotal, listStatesParallel)
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
from mapFolding.dataBaskets import Array1DElephino, Array1DLeavesTotal, Array3D, DatatypeElephino, DatatypeFoldsTotal, DatatypeLeavesTotal, MapFoldingState
|
|
2
2
|
from mapFolding.syntheticModules.theorem2Numba import count
|
|
3
3
|
|
|
4
|
-
def
|
|
4
|
+
def sequential(state: MapFoldingState) -> MapFoldingState:
|
|
5
5
|
mapShape: tuple[DatatypeLeavesTotal, ...] = state.mapShape
|
|
6
6
|
groupsOfFolds: DatatypeFoldsTotal = state.groupsOfFolds
|
|
7
7
|
gap1ndex: DatatypeElephino = state.gap1ndex
|