mapFolding 0.16.2__py3-none-any.whl → 0.17.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- easyRun/A000682.py +2 -2
- easyRun/NOTcountingFolds.py +16 -8
- easyRun/countFolds.py +9 -2
- easyRun/generateAllModules.py +14 -0
- easyRun/meanders.py +4 -4
- mapFolding/__init__.py +1 -0
- mapFolding/_theSSOT.py +3 -2
- mapFolding/_theTypes.py +3 -0
- mapFolding/algorithms/A000136constraintPropagation.py +95 -0
- mapFolding/algorithms/A000136elimination.py +163 -0
- mapFolding/algorithms/A000136eliminationParallel.py +77 -0
- mapFolding/algorithms/A086345.py +75 -0
- mapFolding/algorithms/matrixMeanders.py +59 -18
- mapFolding/algorithms/matrixMeandersNumPyndas.py +841 -0
- mapFolding/algorithms/oeisIDbyFormula.py +2 -2
- mapFolding/algorithms/symmetricFolds.py +35 -0
- mapFolding/basecamp.py +100 -153
- mapFolding/dataBaskets.py +142 -65
- mapFolding/filesystemToolkit.py +4 -32
- mapFolding/oeis.py +5 -12
- mapFolding/reference/A086345Wu.py +25 -0
- mapFolding/reference/irvineJavaPort.py +3 -3
- mapFolding/reference/matrixMeandersAnalysis/signatures.py +3 -0
- mapFolding/reference/meandersDumpingGround/matrixMeandersNumPyV1finalForm.py +1 -1
- mapFolding/someAssemblyRequired/A007822/A007822rawMaterials.py +10 -45
- mapFolding/someAssemblyRequired/A007822/_asynchronousAnnex.py +51 -0
- mapFolding/someAssemblyRequired/A007822/makeA007822AsynchronousModules.py +39 -196
- mapFolding/someAssemblyRequired/A007822/makeA007822Modules.py +57 -43
- mapFolding/someAssemblyRequired/RecipeJob.py +84 -34
- mapFolding/someAssemblyRequired/__init__.py +4 -8
- mapFolding/someAssemblyRequired/_toolkitContainers.py +38 -7
- mapFolding/someAssemblyRequired/infoBooth.py +41 -23
- mapFolding/someAssemblyRequired/makeJobTheorem2Numba.py +140 -164
- mapFolding/someAssemblyRequired/makeJobTheorem2codon.py +63 -96
- mapFolding/someAssemblyRequired/makingModules_count.py +26 -30
- mapFolding/someAssemblyRequired/makingModules_doTheNeedful.py +10 -72
- mapFolding/someAssemblyRequired/{mapFolding → mapFoldingModules}/makeMapFoldingModules.py +30 -35
- mapFolding/someAssemblyRequired/meanders/makeMeandersModules.py +13 -11
- mapFolding/someAssemblyRequired/toolkitMakeModules.py +5 -31
- mapFolding/someAssemblyRequired/toolkitNumba.py +3 -2
- mapFolding/someAssemblyRequired/transformationTools.py +12 -15
- mapFolding/syntheticModules/A007822/algorithm.py +45 -50
- mapFolding/syntheticModules/A007822/asynchronous.py +92 -36
- mapFolding/syntheticModules/A007822/initializeState.py +19 -23
- mapFolding/syntheticModules/A007822/theorem2.py +20 -24
- mapFolding/syntheticModules/A007822/theorem2Numba.py +23 -25
- mapFolding/syntheticModules/A007822/theorem2Trimmed.py +19 -23
- mapFolding/syntheticModules/countParallelNumba.py +1 -2
- mapFolding/syntheticModules/daoOfMapFoldingNumba.py +5 -4
- mapFolding/syntheticModules/initializeState.py +1 -1
- mapFolding/syntheticModules/meanders/bigInt.py +59 -22
- mapFolding/syntheticModules/theorem2.py +1 -1
- mapFolding/syntheticModules/theorem2Numba.py +30 -9
- mapFolding/syntheticModules/theorem2Trimmed.py +2 -2
- mapFolding/tests/test_computations.py +29 -3
- {mapfolding-0.16.2.dist-info → mapfolding-0.17.0.dist-info}/METADATA +11 -8
- mapfolding-0.17.0.dist-info/RECORD +107 -0
- mapFolding/_dataPacking.py +0 -68
- mapFolding/algorithms/matrixMeandersBeDry.py +0 -182
- mapFolding/algorithms/matrixMeandersNumPy.py +0 -333
- mapFolding/algorithms/matrixMeandersPandas.py +0 -334
- mapFolding/reference/meandersDumpingGround/A005316intOptimized.py +0 -122
- mapFolding/reference/meandersDumpingGround/A005316optimized128bit.py +0 -79
- mapFolding/reference/meandersDumpingGround/matrixMeandersBaseline.py +0 -65
- mapFolding/reference/meandersDumpingGround/matrixMeandersBaselineAnnex.py +0 -84
- mapFolding/reference/meandersDumpingGround/matrixMeandersSimpleQueue.py +0 -90
- mapFolding/syntheticModules/A007822/algorithmNumba.py +0 -94
- mapFolding/syntheticModules/A007822/asynchronousAnnex.py +0 -66
- mapFolding/syntheticModules/A007822/asynchronousAnnexNumba.py +0 -70
- mapFolding/syntheticModules/A007822/asynchronousNumba.py +0 -79
- mapFolding/syntheticModules/A007822/asynchronousTheorem2.py +0 -65
- mapFolding/syntheticModules/A007822/asynchronousTrimmed.py +0 -56
- mapFolding/syntheticModules/dataPacking.py +0 -26
- mapFolding/syntheticModules/dataPackingA007822.py +0 -92
- mapfolding-0.16.2.dist-info/RECORD +0 -115
- /mapFolding/someAssemblyRequired/{mapFolding → mapFoldingModules}/__init__.py +0 -0
- {mapfolding-0.16.2.dist-info → mapfolding-0.17.0.dist-info}/WHEEL +0 -0
- {mapfolding-0.16.2.dist-info → mapfolding-0.17.0.dist-info}/entry_points.txt +0 -0
- {mapfolding-0.16.2.dist-info → mapfolding-0.17.0.dist-info}/licenses/LICENSE +0 -0
- {mapfolding-0.16.2.dist-info → mapfolding-0.17.0.dist-info}/top_level.txt +0 -0
|
@@ -25,12 +25,13 @@ to low-level optimized functions while maintaining semantic equivalence and type
|
|
|
25
25
|
the compilation process.
|
|
26
26
|
"""
|
|
27
27
|
|
|
28
|
-
from astToolkit import Be, DOT, identifierDotAttribute,
|
|
28
|
+
from astToolkit import Be, DOT, identifierDotAttribute, Make, NodeTourist, Then
|
|
29
|
+
from astToolkit.containers import LedgerOfImports
|
|
29
30
|
from collections.abc import Callable
|
|
30
31
|
from copy import deepcopy
|
|
31
32
|
from hunterMakesPy import raiseIfNone
|
|
32
33
|
from mapFolding.someAssemblyRequired import IfThis
|
|
33
|
-
from typing import Any, cast
|
|
34
|
+
from typing import Any, cast, NamedTuple
|
|
34
35
|
import ast
|
|
35
36
|
import dataclasses
|
|
36
37
|
|
|
@@ -39,7 +40,7 @@ dummySubscript = Make.Subscript(Make.Name("dummy"), Make.Name("slice"))
|
|
|
39
40
|
dummyTuple = Make.Tuple([Make.Name("dummyElement")])
|
|
40
41
|
|
|
41
42
|
@dataclasses.dataclass
|
|
42
|
-
class ShatteredDataclass:
|
|
43
|
+
class ShatteredDataclass: # slots?
|
|
43
44
|
"""Container for decomposed dataclass components organized as AST nodes for code generation.
|
|
44
45
|
|
|
45
46
|
This class holds the decomposed representation of a dataclass, breaking it down into individual
|
|
@@ -80,6 +81,9 @@ class ShatteredDataclass:
|
|
|
80
81
|
list_keyword_field__field4init: list[ast.keyword] = dataclasses.field(default_factory=list[ast.keyword])
|
|
81
82
|
"""Keyword arguments for dataclass initialization using field=field format."""
|
|
82
83
|
|
|
84
|
+
listIdentifiersStaticScalars: list[str] = dataclasses.field(default_factory=list[str])
|
|
85
|
+
"""Identifiers of unchanging scalar fields with `init=False`; mutually exclusive with `list_keyword_field__field4init`."""
|
|
86
|
+
|
|
83
87
|
listAnnotations: list[ast.expr] = dataclasses.field(default_factory=list[ast.expr])
|
|
84
88
|
"""Type annotations for each dataclass field in declaration order."""
|
|
85
89
|
|
|
@@ -99,7 +103,7 @@ class ShatteredDataclass:
|
|
|
99
103
|
"""Tuple-based return type annotation for functions returning decomposed field values."""
|
|
100
104
|
|
|
101
105
|
@dataclasses.dataclass
|
|
102
|
-
class DeReConstructField2ast:
|
|
106
|
+
class DeReConstructField2ast: # slots?
|
|
103
107
|
"""
|
|
104
108
|
Transform a dataclass field into AST node representations for code generation.
|
|
105
109
|
|
|
@@ -217,7 +221,7 @@ class DeReConstructField2ast:
|
|
|
217
221
|
self.ast_keyword_field__field = Make.keyword(self.name, self.astName)
|
|
218
222
|
self.ast_nameDOTname = Make.Attribute(Make.Name(dataclassesDOTdataclassInstanceIdentifier), self.name)
|
|
219
223
|
|
|
220
|
-
self.astAnnotation = cast(
|
|
224
|
+
self.astAnnotation = cast(ast.Name, raiseIfNone(NodeTourist(
|
|
221
225
|
findThis = Be.AnnAssign.targetIs(IfThis.isNameIdentifier(self.name))
|
|
222
226
|
, doThat = Then.extractIt(DOT.annotation)
|
|
223
227
|
).captureLastMatch(dataclassClassDef)))
|
|
@@ -234,6 +238,8 @@ class DeReConstructField2ast:
|
|
|
234
238
|
dtype_asnameName: ast.Name = self.astAnnotation
|
|
235
239
|
if dtype_asnameName.id == 'Array3DLeavesTotal':
|
|
236
240
|
axesSubscript = Make.Subscript(Make.Name('tuple'), Make.Tuple([Make.Name('uint8'), Make.Name('uint8'), Make.Name('uint8')]))
|
|
241
|
+
if dtype_asnameName.id == 'Array2DLeavesTotal':
|
|
242
|
+
axesSubscript = Make.Subscript(Make.Name('tuple'), Make.Tuple([Make.Name('uint8'), Make.Name('uint8')]))
|
|
237
243
|
ast_expr = Make.Subscript(Make.Name(annotationType), Make.Tuple([axesSubscript, Make.Subscript(Make.Name('dtype'), dtype_asnameName)]))
|
|
238
244
|
constructor = 'array'
|
|
239
245
|
self.ledger.addImportFrom_asStr(moduleWithLogicalPath, constructor)
|
|
@@ -246,10 +252,35 @@ class DeReConstructField2ast:
|
|
|
246
252
|
self.astAnnAssignConstructor = Make.AnnAssign(self.astName, self.astAnnotation, Make.Call(self.astAnnotation, [Make.Constant(-1)]))
|
|
247
253
|
self.Z0Z_hack = (self.astAnnAssignConstructor, 'scalar')
|
|
248
254
|
elif isinstance(self.astAnnotation, ast.Subscript):
|
|
249
|
-
elementConstructor: str = self.metadata
|
|
250
|
-
|
|
255
|
+
elementConstructor: str = self.metadata.get('elementConstructor', 'generic')
|
|
256
|
+
if elementConstructor != 'generic':
|
|
257
|
+
self.ledger.addImportFrom_asStr(dataclassesDOTdataclassLogicalPathModule, elementConstructor)
|
|
251
258
|
takeTheTuple = deepcopy(self.astAnnotation.slice)
|
|
252
259
|
self.astAnnAssignConstructor = Make.AnnAssign(self.astName, self.astAnnotation, takeTheTuple)
|
|
253
260
|
self.Z0Z_hack = (self.astAnnAssignConstructor, elementConstructor)
|
|
254
261
|
if isinstance(self.astAnnotation, ast.Name):
|
|
255
262
|
self.ledger.addImportFrom_asStr(dataclassesDOTdataclassLogicalPathModule, self.astAnnotation.id)
|
|
263
|
+
|
|
264
|
+
class DatatypeConfiguration(NamedTuple):
|
|
265
|
+
"""Configuration for mapping framework datatypes to compiled datatypes.
|
|
266
|
+
|
|
267
|
+
This configuration class defines how abstract datatypes used in the map folding framework should be replaced with compiled
|
|
268
|
+
datatypes during code generation. Each configuration specifies the source module, target type name, and optional import alias
|
|
269
|
+
for the transformation.
|
|
270
|
+
|
|
271
|
+
Attributes
|
|
272
|
+
----------
|
|
273
|
+
datatypeIdentifier : str
|
|
274
|
+
Framework datatype identifier to be replaced.
|
|
275
|
+
typeModule : identifierDotAttribute
|
|
276
|
+
Module containing the target datatype (e.g., 'codon', 'numpy').
|
|
277
|
+
typeIdentifier : str
|
|
278
|
+
Concrete type name in the target module.
|
|
279
|
+
type_asname : str | None = None
|
|
280
|
+
Optional import alias for the type.
|
|
281
|
+
"""
|
|
282
|
+
|
|
283
|
+
datatypeIdentifier: str
|
|
284
|
+
typeModule: identifierDotAttribute
|
|
285
|
+
typeIdentifier: str
|
|
286
|
+
type_asname: str | None = None
|
|
@@ -11,39 +11,57 @@ on empirical measurements and theoretical analysis of map folding algorithms for
|
|
|
11
11
|
specific dimensional configurations.
|
|
12
12
|
"""
|
|
13
13
|
|
|
14
|
+
from copy import deepcopy
|
|
14
15
|
from hunterMakesPy import identifierDotAttribute
|
|
15
|
-
from typing import Final
|
|
16
|
+
from typing import Final, TypedDict
|
|
16
17
|
|
|
17
18
|
dictionaryEstimatesMapFolding: Final[dict[tuple[int, ...], int]] = {
|
|
18
19
|
(2,2,2,2,2,2,2,2): 798148657152000,
|
|
19
20
|
(2,21): 776374224866624,
|
|
20
21
|
(3,15): 824761667826225,
|
|
21
22
|
(3,3,3,3): 85109616000000000000000000000000,
|
|
22
|
-
(8,8): 791274195985524900,
|
|
23
|
+
(8,8): 791274195985524900, # A test estimated 300,000 hours to compute.
|
|
23
24
|
}
|
|
24
25
|
"""Estimates of multidimensional map folding `foldsTotal`."""
|
|
25
26
|
|
|
26
|
-
|
|
27
|
-
"""Default
|
|
27
|
+
class Default(TypedDict):
|
|
28
|
+
"""Default identifiers."""
|
|
28
29
|
|
|
29
|
-
|
|
30
|
-
|
|
30
|
+
function: dict[str, str]
|
|
31
|
+
logicalPath: dict[str, identifierDotAttribute]
|
|
32
|
+
module: dict[str, str]
|
|
33
|
+
variable: dict[str, str]
|
|
31
34
|
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
35
|
+
default = Default(
|
|
36
|
+
function = {
|
|
37
|
+
'counting': 'count',
|
|
38
|
+
'dispatcher': 'doTheNeedful',
|
|
39
|
+
'initializeState': 'transitionOnGroupsOfFolds',
|
|
40
|
+
},
|
|
41
|
+
logicalPath = {
|
|
42
|
+
'algorithm': 'algorithms',
|
|
43
|
+
'synthetic': 'syntheticModules',
|
|
44
|
+
},
|
|
45
|
+
module = {
|
|
46
|
+
'algorithm': 'daoOfMapFolding',
|
|
47
|
+
'dataBasket': 'dataBaskets',
|
|
48
|
+
'initializeState': 'initializeState',
|
|
49
|
+
},
|
|
50
|
+
variable = {
|
|
51
|
+
'counting': 'groupsOfFolds',
|
|
52
|
+
'stateDataclass': 'MapFoldingState',
|
|
53
|
+
'stateInstance': 'state',
|
|
54
|
+
},
|
|
55
|
+
)
|
|
49
56
|
|
|
57
|
+
defaultA007822: Default = deepcopy(default)
|
|
58
|
+
defaultA007822['function']['_processCompletedFutures'] = '_processCompletedFutures'
|
|
59
|
+
defaultA007822['function']['filterAsymmetricFolds'] = 'filterAsymmetricFolds'
|
|
60
|
+
defaultA007822['function']['getSymmetricFoldsTotal'] = 'getSymmetricFoldsTotal'
|
|
61
|
+
defaultA007822['function']['initializeConcurrencyManager'] = 'initializeConcurrencyManager'
|
|
62
|
+
defaultA007822['logicalPath']['assembly'] = 'someAssemblyRequired.A007822'
|
|
63
|
+
defaultA007822['logicalPath']['synthetic'] += '.A007822'
|
|
64
|
+
defaultA007822['module']['algorithm'] = 'algorithm'
|
|
65
|
+
defaultA007822['module']['asynchronous'] = 'asynchronous'
|
|
66
|
+
defaultA007822['variable']['counting'] = 'symmetricFolds'
|
|
67
|
+
defaultA007822['variable']['stateDataclass'] = 'SymmetricFoldsState'
|
|
@@ -1,70 +1,81 @@
|
|
|
1
1
|
"""
|
|
2
2
|
Map folding AST transformation system: Specialized job generation and optimization implementation.
|
|
3
3
|
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
optimized for specific map dimensions and calculation contexts.
|
|
10
|
-
|
|
11
|
-
The transformation implementation addresses the computational demands of map folding research where
|
|
12
|
-
calculations can require hours or days to complete. The specialization process converts abstract
|
|
13
|
-
algorithms with flexible parameters into concrete, statically-optimized code that leverages
|
|
14
|
-
just-in-time compilation for maximum performance. Each generated module targets specific map
|
|
15
|
-
shapes and calculation modes, enabling aggressive compiler optimizations based on known constraints
|
|
16
|
-
and embedded constants.
|
|
17
|
-
|
|
18
|
-
The optimization process executes systematic transformations including static value embedding to
|
|
19
|
-
replace parameterized values with compile-time constants, dead code elimination to remove unused
|
|
20
|
-
variables and code paths, parameter internalization to convert function parameters into embedded
|
|
21
|
-
variables, import optimization to replace generic imports with specific implementations, Numba
|
|
22
|
-
decoration with appropriate compilation directives, progress integration for long-running calculations,
|
|
23
|
-
and launcher generation for standalone execution entry points.
|
|
24
|
-
|
|
25
|
-
The resulting modules represent the culmination of the entire AST transformation system, producing
|
|
26
|
-
self-contained Python scripts that execute independently with dramatically improved performance
|
|
27
|
-
compared to original generic algorithms while maintaining mathematical correctness and providing
|
|
28
|
-
essential progress feedback capabilities for large-scale computational research.
|
|
4
|
+
Each generated module targets a specific map shape and calculation mode.
|
|
5
|
+
|
|
6
|
+
The optimization process executes systematic transformations including static value embedding, dead code elimination, parameter
|
|
7
|
+
internalization to convert function parameters into embedded variables, Numba decoration with appropriate compilation directives,
|
|
8
|
+
progress integration for long-running calculations, and launcher generation for standalone execution entry points.
|
|
29
9
|
"""
|
|
30
10
|
|
|
31
|
-
from astToolkit import
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
from
|
|
35
|
-
|
|
36
|
-
from mapFolding import
|
|
37
|
-
from mapFolding.
|
|
38
|
-
from mapFolding.someAssemblyRequired import
|
|
39
|
-
from mapFolding.someAssemblyRequired.RecipeJob import RecipeJobTheorem2
|
|
11
|
+
from astToolkit import Be, Make, NodeChanger, NodeTourist, parseLogicalPath2astModule, Then
|
|
12
|
+
from astToolkit.containers import astModuleToIngredientsFunction, IngredientsFunction, IngredientsModule
|
|
13
|
+
from hunterMakesPy import autoDecodingRLE, identifierDotAttribute
|
|
14
|
+
from mapFolding import (
|
|
15
|
+
DatatypeLeavesTotal, dictionaryOEIS, getFoldsTotalKnown, getPathFilenameFoldsTotal, packageSettings)
|
|
16
|
+
from mapFolding.dataBaskets import MapFoldingState, SymmetricFoldsState
|
|
17
|
+
from mapFolding.someAssemblyRequired import DatatypeConfiguration, defaultA007822, dictionaryEstimatesMapFolding, IfThis
|
|
18
|
+
from mapFolding.someAssemblyRequired.RecipeJob import customizeDatatypeViaImport, RecipeJobTheorem2
|
|
40
19
|
from mapFolding.someAssemblyRequired.toolkitNumba import decorateCallableWithNumba, parametersNumbaLight, SpicesJobNumba
|
|
41
|
-
from mapFolding.
|
|
20
|
+
from mapFolding.someAssemblyRequired.transformationTools import shatter_dataclassesDOTdataclass
|
|
42
21
|
from pathlib import PurePosixPath
|
|
43
|
-
from typing import cast
|
|
44
|
-
from typing_extensions import TypeIs
|
|
22
|
+
from typing import cast
|
|
45
23
|
import ast
|
|
46
24
|
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
25
|
+
# TODO More convergence with `makeJobTheorem2codon`
|
|
26
|
+
|
|
27
|
+
# TODO Dynamically calculate the bitwidth of each datatype. NOTE I've delayed dynamic calculation because I don't know how to
|
|
28
|
+
# calculate what 'elephino' needs. But perhaps I can dynamically calculate 'leavesTotal' and 'foldsTotal' and hardcode 'elephino.'
|
|
29
|
+
# That would probably be an improvement.
|
|
30
|
+
listDatatypeConfigurations: list[DatatypeConfiguration] = [
|
|
31
|
+
DatatypeConfiguration(datatypeIdentifier='DatatypeLeavesTotal', typeModule='numba', typeIdentifier='uint8', type_asname='DatatypeLeavesTotal'),
|
|
32
|
+
DatatypeConfiguration(datatypeIdentifier='DatatypeElephino', typeModule='numba', typeIdentifier='uint16', type_asname='DatatypeElephino'),
|
|
33
|
+
DatatypeConfiguration(datatypeIdentifier='DatatypeFoldsTotal', typeModule='numba', typeIdentifier='uint64', type_asname='DatatypeFoldsTotal'),
|
|
34
|
+
DatatypeConfiguration(datatypeIdentifier='Array1DLeavesTotal', typeModule='numpy', typeIdentifier='uint8', type_asname='Array1DLeavesTotal'),
|
|
35
|
+
DatatypeConfiguration(datatypeIdentifier='Array1DElephino', typeModule='numpy', typeIdentifier='uint16', type_asname='Array1DElephino'),
|
|
36
|
+
DatatypeConfiguration(datatypeIdentifier='Array3DLeavesTotal', typeModule='numpy', typeIdentifier='uint8', type_asname='Array3DLeavesTotal'),
|
|
37
|
+
]
|
|
38
|
+
|
|
39
|
+
def addLauncher(ingredientsModule: IngredientsModule, ingredientsCount: IngredientsFunction, job: RecipeJobTheorem2) -> tuple[IngredientsModule, IngredientsFunction]:
|
|
40
|
+
"""Add a standalone launcher section to a computation module."""
|
|
41
|
+
linesLaunch: str = f"""
|
|
42
|
+
if __name__ == '__main__':
|
|
43
|
+
import time
|
|
44
|
+
timeStart = time.perf_counter()
|
|
45
|
+
foldsTotal = int({job.identifierCallable}() * {job.state.leavesTotal})
|
|
46
|
+
print(time.perf_counter() - timeStart)
|
|
47
|
+
print('\\nmap {job.state.mapShape} =', foldsTotal)
|
|
48
|
+
writeStream = open('{job.pathFilenameFoldsTotal.as_posix()}', 'w')
|
|
49
|
+
writeStream.write(str(foldsTotal))
|
|
50
|
+
writeStream.close()
|
|
51
|
+
"""
|
|
52
|
+
ingredientsModule.appendLauncher(ast.parse(linesLaunch))
|
|
53
|
+
NodeChanger(Be.Return, Then.replaceWith(Make.Return(job.shatteredDataclass.countingVariableName))).visit(ingredientsCount.astFunctionDef)
|
|
54
|
+
ingredientsCount.astFunctionDef.returns = job.shatteredDataclass.countingVariableAnnotation
|
|
54
55
|
|
|
55
|
-
|
|
56
|
+
return ingredientsModule, ingredientsCount
|
|
56
57
|
|
|
57
|
-
|
|
58
|
-
|
|
58
|
+
def addLauncherA007822(ingredientsModule: IngredientsModule, ingredientsCount: IngredientsFunction, job: RecipeJobTheorem2) -> tuple[IngredientsModule, IngredientsFunction]:
|
|
59
|
+
"""Add a standalone launcher section to a computation module."""
|
|
60
|
+
linesLaunch: str = f"""
|
|
61
|
+
if __name__ == '__main__':
|
|
62
|
+
import time
|
|
63
|
+
timeStart = time.perf_counter()
|
|
64
|
+
foldsTotal = int({job.identifierCallable}())
|
|
65
|
+
print(time.perf_counter() - timeStart)
|
|
66
|
+
print('\\nmap {job.state.mapShape} =', foldsTotal)
|
|
67
|
+
writeStream = open('{job.pathFilenameFoldsTotal.as_posix()}', 'w')
|
|
68
|
+
writeStream.write(str(foldsTotal))
|
|
69
|
+
writeStream.close()
|
|
70
|
+
"""
|
|
71
|
+
ingredientsModule.appendLauncher(ast.parse(linesLaunch))
|
|
72
|
+
NodeChanger(Be.Return, Then.replaceWith(Make.Return(job.shatteredDataclass.countingVariableName))).visit(ingredientsCount.astFunctionDef)
|
|
73
|
+
ingredientsCount.astFunctionDef.returns = job.shatteredDataclass.countingVariableAnnotation
|
|
59
74
|
|
|
60
|
-
|
|
61
|
-
2. Replaces counting increments with progress bar updates
|
|
62
|
-
3. Creates a launcher section that displays and updates progress
|
|
63
|
-
4. Configures file output to save results upon completion
|
|
75
|
+
return ingredientsModule, ingredientsCount
|
|
64
76
|
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
estimated completion times.
|
|
77
|
+
def addLauncherNumbaProgress(ingredientsModule: IngredientsModule, ingredientsFunction: IngredientsFunction, job: RecipeJobTheorem2, spices: SpicesJobNumba) -> tuple[IngredientsModule, IngredientsFunction]:
|
|
78
|
+
"""Add a tqdm progress bar to a Numba-optimized function.
|
|
68
79
|
|
|
69
80
|
Parameters
|
|
70
81
|
----------
|
|
@@ -81,12 +92,13 @@ def addLauncherNumbaProgress(ingredientsModule: IngredientsModule, ingredientsFu
|
|
|
81
92
|
-------
|
|
82
93
|
moduleAndFunction : tuple[IngredientsModule, IngredientsFunction]
|
|
83
94
|
Modified module and function with integrated progress tracking capabilities.
|
|
84
|
-
|
|
85
95
|
"""
|
|
96
|
+
# TODO When using the progress bar, `count` does not return `groupsOfFolds`, so `count` does not `*= 2`. So, I have to manually
|
|
97
|
+
# change the update value. This should be dynamic.
|
|
86
98
|
linesLaunch: str = f"""
|
|
87
99
|
if __name__ == '__main__':
|
|
88
|
-
with ProgressBar(total={job.foldsTotalEstimated}, update_interval=2) as statusUpdate:
|
|
89
|
-
{job.
|
|
100
|
+
with ProgressBar(total={job.foldsTotalEstimated//job.state.leavesTotal}, update_interval=2) as statusUpdate:
|
|
101
|
+
{job.identifierCallable}(statusUpdate)
|
|
90
102
|
foldsTotal = statusUpdate.n * {job.state.leavesTotal}
|
|
91
103
|
print('\\nmap {job.state.mapShape} =', foldsTotal)
|
|
92
104
|
writeStream = open('{job.pathFilenameFoldsTotal.as_posix()}', 'w')
|
|
@@ -98,17 +110,16 @@ if __name__ == '__main__':
|
|
|
98
110
|
ingredientsModule.imports.addImportFrom_asStr('numba_progress', numba_progressPythonClass)
|
|
99
111
|
ingredientsModule.imports.addImportFrom_asStr('numba_progress', numba_progressNumbaType)
|
|
100
112
|
|
|
101
|
-
ast_argNumbaProgress =
|
|
113
|
+
ast_argNumbaProgress = Make.arg(spices.numbaProgressBarIdentifier, annotation=Make.Name(numba_progressPythonClass))
|
|
102
114
|
ingredientsFunction.astFunctionDef.args.args.append(ast_argNumbaProgress)
|
|
103
115
|
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
116
|
+
NodeChanger(
|
|
117
|
+
findThis = Be.AugAssign.targetIs(IfThis.isNameIdentifier(job.shatteredDataclass.countingVariableName.id))
|
|
118
|
+
, doThat = Then.replaceWith(Make.Expr(Make.Call(Make.Attribute(Make.Name(spices.numbaProgressBarIdentifier),'update'),[Make.Constant(2)])))
|
|
119
|
+
).visit(ingredientsFunction.astFunctionDef)
|
|
108
120
|
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
ingredientsFunction.astFunctionDef.returns = Make.Constant(value=None)
|
|
121
|
+
NodeChanger(Be.Return, Then.removeIt).visit(ingredientsFunction.astFunctionDef)
|
|
122
|
+
ingredientsFunction.astFunctionDef.returns = Make.Constant(None)
|
|
112
123
|
|
|
113
124
|
ingredientsModule.appendLauncher(ast.parse(linesLaunch))
|
|
114
125
|
|
|
@@ -143,7 +154,6 @@ def move_arg2FunctionDefDOTbodyAndAssignInitialValues(ingredientsFunction: Ingre
|
|
|
143
154
|
-------
|
|
144
155
|
modifiedFunction : IngredientsFunction
|
|
145
156
|
The modified function with parameters converted to initialized variables.
|
|
146
|
-
|
|
147
157
|
"""
|
|
148
158
|
ingredientsFunction.imports.update(job.shatteredDataclass.imports)
|
|
149
159
|
|
|
@@ -162,25 +172,23 @@ def move_arg2FunctionDefDOTbodyAndAssignInitialValues(ingredientsFunction: Ingre
|
|
|
162
172
|
ImaAnnAssign, elementConstructor = job.shatteredDataclass.Z0Z_field2AnnAssign[ast_arg.arg]
|
|
163
173
|
match elementConstructor:
|
|
164
174
|
case 'scalar':
|
|
165
|
-
cast(
|
|
175
|
+
cast(ast.Constant, cast(ast.Call, ImaAnnAssign.value).args[0]).value = int(eval(f"job.state.{ast_arg.arg}")) # noqa: S307
|
|
166
176
|
case 'array':
|
|
167
177
|
dataAsStrRLE: str = autoDecodingRLE(eval(f"job.state.{ast_arg.arg}"), assumeAddSpaces=True) # noqa: S307
|
|
168
|
-
dataAs_astExpr: ast.expr = cast(
|
|
169
|
-
cast(
|
|
178
|
+
dataAs_astExpr: ast.expr = cast(ast.Expr, ast.parse(dataAsStrRLE).body[0]).value
|
|
179
|
+
cast(ast.Call, ImaAnnAssign.value).args = [dataAs_astExpr]
|
|
170
180
|
case _:
|
|
171
181
|
list_exprDOTannotation: list[ast.expr] = []
|
|
172
182
|
list_exprDOTvalue: list[ast.expr] = []
|
|
173
183
|
for dimension in job.state.mapShape:
|
|
174
184
|
list_exprDOTannotation.append(Make.Name(elementConstructor))
|
|
175
185
|
list_exprDOTvalue.append(Make.Call(Make.Name(elementConstructor), [Make.Constant(dimension)]))
|
|
176
|
-
cast(
|
|
177
|
-
cast(
|
|
186
|
+
cast(ast.Tuple, cast(ast.Subscript, cast(ast.AnnAssign, ImaAnnAssign).annotation).slice).elts = list_exprDOTannotation
|
|
187
|
+
cast(ast.Tuple, ImaAnnAssign.value).elts = list_exprDOTvalue
|
|
178
188
|
|
|
179
189
|
ingredientsFunction.astFunctionDef.body.insert(0, ImaAnnAssign)
|
|
180
190
|
|
|
181
|
-
|
|
182
|
-
remove_arg: NodeChanger[ast.arg, None] = NodeChanger(findThis, Then.removeIt)
|
|
183
|
-
remove_arg.visit(ingredientsFunction.astFunctionDef)
|
|
191
|
+
NodeChanger(IfThis.is_argIdentifier(ast_arg.arg), Then.removeIt).visit(ingredientsFunction.astFunctionDef)
|
|
184
192
|
|
|
185
193
|
ast.fix_missing_locations(ingredientsFunction.astFunctionDef)
|
|
186
194
|
return ingredientsFunction
|
|
@@ -215,95 +223,35 @@ def makeJobNumba(job: RecipeJobTheorem2, spices: SpicesJobNumba) -> None:
|
|
|
215
223
|
Optimization settings including Numba parameters and progress options.
|
|
216
224
|
|
|
217
225
|
"""
|
|
218
|
-
|
|
219
|
-
ingredientsCount: IngredientsFunction =
|
|
226
|
+
# ingredientsCount: IngredientsFunction = IngredientsFunction(raiseIfNone(extractFunctionDef(job.source_astModule, job.identifierCallableSource))) # noqa: ERA001
|
|
227
|
+
ingredientsCount: IngredientsFunction = astModuleToIngredientsFunction(job.source_astModule, job.identifierCallableSource)
|
|
220
228
|
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
NodeChanger(findThis, doThat).visit(ingredientsCount.astFunctionDef)
|
|
229
|
+
for identifier in job.shatteredDataclass.listIdentifiersStaticScalars:
|
|
230
|
+
NodeChanger(IfThis.isNameIdentifier(identifier)
|
|
231
|
+
, Then.replaceWith(Make.Constant(int(eval(f"job.state.{identifier}")))) # noqa: S307
|
|
232
|
+
).visit(ingredientsCount.astFunctionDef)
|
|
226
233
|
|
|
227
234
|
ingredientsModule = IngredientsModule()
|
|
228
|
-
|
|
235
|
+
# TODO Refactor the subtly complicated interactions of these launchers with `move_arg2FunctionDefDOTbodyAndAssignInitialValues`
|
|
236
|
+
# Consider `astToolkit.transformationTools.removeUnusedParameters`.
|
|
237
|
+
# Generalize some parts of the launchers, especially writing to disk. Writing to disk is NOT robust enough. It doesn't even try to make a directory.
|
|
229
238
|
if spices.useNumbaProgressBar:
|
|
230
239
|
ingredientsModule, ingredientsCount = addLauncherNumbaProgress(ingredientsModule, ingredientsCount, job, spices)
|
|
231
240
|
spices.parametersNumba['nogil'] = True
|
|
232
241
|
else:
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
import time
|
|
236
|
-
timeStart = time.perf_counter()
|
|
237
|
-
foldsTotal = int({job.countCallable}() * {job.state.leavesTotal})
|
|
238
|
-
print(time.perf_counter() - timeStart)
|
|
239
|
-
print('\\nmap {job.state.mapShape} =', foldsTotal)
|
|
240
|
-
writeStream = open('{job.pathFilenameFoldsTotal.as_posix()}', 'w')
|
|
241
|
-
writeStream.write(str(foldsTotal))
|
|
242
|
-
writeStream.close()
|
|
243
|
-
"""
|
|
244
|
-
# from mapFolding.oeis import getFoldsTotalKnown # noqa: ERA001
|
|
245
|
-
# print(foldsTotal == getFoldsTotalKnown({job.state.mapShape})) # noqa: ERA001
|
|
246
|
-
ingredientsModule.appendLauncher(ast.parse(linesLaunch))
|
|
247
|
-
changeReturnParallelCallable = NodeChanger(Be.Return, Then.replaceWith(Make.Return(job.shatteredDataclass.countingVariableName)))
|
|
248
|
-
changeReturnParallelCallable.visit(ingredientsCount.astFunctionDef)
|
|
249
|
-
ingredientsCount.astFunctionDef.returns = job.shatteredDataclass.countingVariableAnnotation
|
|
242
|
+
ingredientsModule, ingredientsCount = addLauncher(ingredientsModule, ingredientsCount, job) # noqa: ERA001
|
|
243
|
+
# ingredientsModule, ingredientsCount = addLauncherA007822(ingredientsModule, ingredientsCount, job)
|
|
250
244
|
|
|
251
245
|
ingredientsCount = move_arg2FunctionDefDOTbodyAndAssignInitialValues(ingredientsCount, job)
|
|
252
|
-
class DatatypeConfig(NamedTuple):
|
|
253
|
-
"""Configuration for mapping framework datatypes to Numba-compatible types.
|
|
254
|
-
|
|
255
|
-
This configuration class defines how abstract datatypes used in the map folding
|
|
256
|
-
framework should be replaced with concrete Numba-compatible types during code
|
|
257
|
-
generation. Each configuration specifies the source module, target type name,
|
|
258
|
-
and optional import alias for the transformation.
|
|
259
|
-
|
|
260
|
-
Attributes
|
|
261
|
-
----------
|
|
262
|
-
fml : str
|
|
263
|
-
Framework datatype identifier to be replaced.
|
|
264
|
-
Z0Z_module : identifierDotAttribute
|
|
265
|
-
Module containing the target datatype (e.g., 'numba', 'numpy').
|
|
266
|
-
Z0Z_type_name : str
|
|
267
|
-
Concrete type name in the target module.
|
|
268
|
-
Z0Z_asname : str | None = None
|
|
269
|
-
Optional import alias for the type.
|
|
270
|
-
"""
|
|
271
|
-
|
|
272
|
-
fml: str
|
|
273
|
-
Z0Z_module: identifierDotAttribute
|
|
274
|
-
Z0Z_type_name: str
|
|
275
|
-
Z0Z_asname: str | None = None
|
|
276
|
-
|
|
277
|
-
listDatatypeConfigs: list[DatatypeConfig] = [
|
|
278
|
-
DatatypeConfig(fml='DatatypeLeavesTotal', Z0Z_module='numba', Z0Z_type_name='uint8'),
|
|
279
|
-
DatatypeConfig(fml='DatatypeElephino', Z0Z_module='numba', Z0Z_type_name='uint16'),
|
|
280
|
-
DatatypeConfig(fml='DatatypeFoldsTotal', Z0Z_module='numba', Z0Z_type_name='uint64'),
|
|
281
|
-
]
|
|
282
|
-
|
|
283
|
-
for datatypeConfig in listDatatypeConfigs:
|
|
284
|
-
ingredientsModule.imports.addImportFrom_asStr(datatypeConfig.Z0Z_module, datatypeConfig.Z0Z_type_name)
|
|
285
|
-
statement = Make.Assign(
|
|
286
|
-
[Make.Name(datatypeConfig.fml, ast.Store())],
|
|
287
|
-
Make.Name(datatypeConfig.Z0Z_type_name)
|
|
288
|
-
)
|
|
289
|
-
ingredientsModule.appendPrologue(statement=statement)
|
|
290
|
-
|
|
291
|
-
ingredientsCount.imports.removeImportFromModule('mapFolding.dataBaskets')
|
|
292
246
|
|
|
293
|
-
|
|
294
|
-
DatatypeConfig(fml='Array1DLeavesTotal', Z0Z_module='numpy', Z0Z_type_name='uint8', Z0Z_asname='Array1DLeavesTotal'),
|
|
295
|
-
DatatypeConfig(fml='Array1DElephino', Z0Z_module='numpy', Z0Z_type_name='uint16', Z0Z_asname='Array1DElephino'),
|
|
296
|
-
DatatypeConfig(fml='Array3DLeavesTotal', Z0Z_module='numpy', Z0Z_type_name='uint8', Z0Z_asname='Array3DLeavesTotal'),
|
|
297
|
-
]
|
|
247
|
+
ingredientsCount, ingredientsModule = customizeDatatypeViaImport(ingredientsCount, ingredientsModule, listDatatypeConfigurations)
|
|
298
248
|
|
|
299
|
-
|
|
300
|
-
ingredientsCount.imports.removeImportFrom(typeConfig.Z0Z_module, None, typeConfig.fml)
|
|
301
|
-
ingredientsCount.imports.addImportFrom_asStr(typeConfig.Z0Z_module, typeConfig.Z0Z_type_name, typeConfig.Z0Z_asname)
|
|
249
|
+
ingredientsCount.imports.removeImportFromModule('mapFolding.dataBaskets')
|
|
302
250
|
|
|
303
251
|
ingredientsCount.astFunctionDef.decorator_list = [] # TODO low-priority, handle this more elegantly
|
|
304
252
|
ingredientsCount = decorateCallableWithNumba(ingredientsCount, spices.parametersNumba)
|
|
305
253
|
ingredientsModule.appendIngredientsFunction(ingredientsCount)
|
|
306
|
-
write_astModule(
|
|
254
|
+
ingredientsModule.write_astModule(job.pathFilenameModule, identifierPackage=job.packageIdentifier or '')
|
|
307
255
|
|
|
308
256
|
"""
|
|
309
257
|
Overview
|
|
@@ -312,26 +260,54 @@ if __name__ == '__main__':
|
|
|
312
260
|
- `makeJobNumba` increases optimization especially by limiting its capabilities to just one set of parameters
|
|
313
261
|
- the synthesized module must run well as a standalone interpreted-Python script
|
|
314
262
|
- the next major optimization step will (probably) be to use the module synthesized by `makeJobNumba` to compile a standalone executable
|
|
315
|
-
- Nevertheless, at each major optimization step, the code is constantly being improved and optimized, so everything must be
|
|
316
|
-
|
|
317
|
-
Necessary
|
|
318
|
-
- Move the function's parameters to the function body,
|
|
319
|
-
- initialize identifiers with their state types and values,
|
|
320
|
-
|
|
321
|
-
Optimizations
|
|
322
|
-
- replace static-valued identifiers with their values
|
|
323
|
-
- narrowly focused imports
|
|
324
|
-
|
|
325
|
-
Minutia
|
|
326
|
-
- do not use `with` statement inside numba jitted code, except to use numba's obj mode
|
|
263
|
+
- Nevertheless, at each major optimization step, the code is constantly being improved and optimized, so everything must be
|
|
264
|
+
well organized (read: semantic) and able to handle a range of arbitrary upstream and not disrupt downstream transformations
|
|
327
265
|
"""
|
|
328
266
|
|
|
329
|
-
|
|
330
|
-
|
|
267
|
+
def fromMapShape(mapShape: tuple[DatatypeLeavesTotal, ...]) -> None:
|
|
268
|
+
"""Generate and write an optimized Numba-compiled map folding module for a specific map shape."""
|
|
269
|
+
from mapFolding.syntheticModules.initializeState import transitionOnGroupsOfFolds # noqa: PLC0415
|
|
270
|
+
state: MapFoldingState = transitionOnGroupsOfFolds(MapFoldingState(mapShape))
|
|
271
|
+
foldsTotalEstimated: int = getFoldsTotalKnown(state.mapShape) or dictionaryEstimatesMapFolding.get(state.mapShape, 0)
|
|
331
272
|
pathModule = PurePosixPath(packageSettings.pathPackage, 'jobs')
|
|
332
273
|
pathFilenameFoldsTotal = PurePosixPath(getPathFilenameFoldsTotal(state.mapShape, pathModule))
|
|
333
|
-
aJob = RecipeJobTheorem2(state, pathModule=pathModule, pathFilenameFoldsTotal=pathFilenameFoldsTotal)
|
|
274
|
+
aJob = RecipeJobTheorem2(state, pathModule=pathModule, pathFilenameFoldsTotal=pathFilenameFoldsTotal, foldsTotalEstimated=foldsTotalEstimated)
|
|
334
275
|
spices = SpicesJobNumba(useNumbaProgressBar=True, parametersNumba=parametersNumbaLight)
|
|
335
276
|
makeJobNumba(aJob, spices)
|
|
336
277
|
|
|
337
|
-
|
|
278
|
+
def A007822(n: int) -> None:
|
|
279
|
+
"""Generate and write an optimized Numba-compiled map folding module for a specific map shape."""
|
|
280
|
+
from mapFolding.syntheticModules.A007822.initializeState import transitionOnGroupsOfFolds # noqa: PLC0415
|
|
281
|
+
state = transitionOnGroupsOfFolds(SymmetricFoldsState((1, 2 * n)))
|
|
282
|
+
foldsTotalEstimated: int = dictionaryOEIS['A007822']['valuesKnown'].get(n, 0)
|
|
283
|
+
shatteredDataclass = shatter_dataclassesDOTdataclass(f"{packageSettings.identifierPackage}.{defaultA007822['module']['dataBasket']}"
|
|
284
|
+
, defaultA007822['variable']['stateDataclass'], defaultA007822['variable']['stateInstance'])
|
|
285
|
+
source_astModule: ast.Module = parseLogicalPath2astModule(f'{packageSettings.identifierPackage}.{defaultA007822['logicalPath']['synthetic']}.theorem2Numba')
|
|
286
|
+
identifierCallableSource: str = defaultA007822['function']['counting']
|
|
287
|
+
sourceLogicalPathModuleDataclass: identifierDotAttribute = f'{packageSettings.identifierPackage}.dataBaskets'
|
|
288
|
+
sourceDataclassIdentifier: str = defaultA007822['variable']['stateDataclass']
|
|
289
|
+
sourceDataclassInstance: str = defaultA007822['variable']['stateInstance']
|
|
290
|
+
sourcePathPackage: PurePosixPath | None = PurePosixPath(packageSettings.pathPackage)
|
|
291
|
+
sourcePackageIdentifier: str | None = packageSettings.identifierPackage
|
|
292
|
+
pathPackage: PurePosixPath | None = None
|
|
293
|
+
pathModule = PurePosixPath(packageSettings.pathPackage, 'jobs')
|
|
294
|
+
fileExtension: str = packageSettings.fileExtension
|
|
295
|
+
pathFilenameFoldsTotal = pathModule / ('A007822_' + str(n))
|
|
296
|
+
packageIdentifier: str | None = None
|
|
297
|
+
logicalPathRoot: identifierDotAttribute | None = None
|
|
298
|
+
moduleIdentifier: str = pathFilenameFoldsTotal.stem
|
|
299
|
+
identifierCallable: str = identifierCallableSource
|
|
300
|
+
identifierDataclass: str | None = sourceDataclassIdentifier
|
|
301
|
+
identifierDataclassInstance: str | None = sourceDataclassInstance
|
|
302
|
+
logicalPathModuleDataclass: identifierDotAttribute | None = sourceLogicalPathModuleDataclass
|
|
303
|
+
aJob = RecipeJobTheorem2(state, foldsTotalEstimated, shatteredDataclass, source_astModule, identifierCallableSource, sourceLogicalPathModuleDataclass
|
|
304
|
+
, sourceDataclassIdentifier, sourceDataclassInstance, sourcePathPackage, sourcePackageIdentifier, pathPackage, pathModule, fileExtension
|
|
305
|
+
, pathFilenameFoldsTotal, packageIdentifier, logicalPathRoot, moduleIdentifier, identifierCallable, identifierDataclass, identifierDataclassInstance
|
|
306
|
+
, logicalPathModuleDataclass)
|
|
307
|
+
spices = SpicesJobNumba(useNumbaProgressBar=False, parametersNumba=parametersNumbaLight)
|
|
308
|
+
makeJobNumba(aJob, spices)
|
|
309
|
+
|
|
310
|
+
if __name__ == '__main__':
|
|
311
|
+
mapShape: tuple[DatatypeLeavesTotal, ...] = (2,21) # noqa: ERA001
|
|
312
|
+
fromMapShape(mapShape) # noqa: ERA001
|
|
313
|
+
# A007822(8)
|