mapFolding 0.11.1__py3-none-any.whl → 0.11.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (35) hide show
  1. mapFolding/__init__.py +7 -60
  2. mapFolding/basecamp.py +15 -13
  3. mapFolding/beDRY.py +4 -36
  4. mapFolding/dataBaskets.py +24 -2
  5. mapFolding/datatypes.py +0 -3
  6. mapFolding/{toolboxFilesystem.py → filesystemToolkit.py} +3 -3
  7. mapFolding/oeis.py +3 -5
  8. mapFolding/someAssemblyRequired/RecipeJob.py +8 -116
  9. mapFolding/someAssemblyRequired/Z0Z_makeAllModules.py +492 -0
  10. mapFolding/someAssemblyRequired/__init__.py +5 -31
  11. mapFolding/someAssemblyRequired/_toolIfThis.py +5 -6
  12. mapFolding/someAssemblyRequired/{_toolboxContainers.py → _toolkitContainers.py} +6 -127
  13. mapFolding/someAssemblyRequired/infoBooth.py +70 -0
  14. mapFolding/someAssemblyRequired/makeJobTheorem2Numba.py +13 -12
  15. mapFolding/someAssemblyRequired/{toolboxNumba.py → toolkitNumba.py} +2 -44
  16. mapFolding/someAssemblyRequired/transformationTools.py +16 -174
  17. mapFolding/syntheticModules/countParallel.py +98 -0
  18. mapFolding/syntheticModules/dataPacking.py +1 -1
  19. mapFolding/theSSOT.py +12 -246
  20. {mapfolding-0.11.1.dist-info → mapfolding-0.11.3.dist-info}/METADATA +16 -11
  21. mapfolding-0.11.3.dist-info/RECORD +53 -0
  22. {mapfolding-0.11.1.dist-info → mapfolding-0.11.3.dist-info}/WHEEL +1 -1
  23. tests/conftest.py +2 -79
  24. tests/test_computations.py +12 -19
  25. tests/test_filesystem.py +1 -2
  26. tests/test_other.py +1 -1
  27. tests/test_tasks.py +3 -4
  28. mapFolding/someAssemblyRequired/Z0Z_makeSomeModules.py +0 -325
  29. mapFolding/someAssemblyRequired/synthesizeNumbaJob.py +0 -314
  30. mapFolding/syntheticModules/numbaCount.py +0 -201
  31. mapFolding/theDao.py +0 -243
  32. mapfolding-0.11.1.dist-info/RECORD +0 -54
  33. {mapfolding-0.11.1.dist-info → mapfolding-0.11.3.dist-info}/entry_points.txt +0 -0
  34. {mapfolding-0.11.1.dist-info → mapfolding-0.11.3.dist-info}/licenses/LICENSE +0 -0
  35. {mapfolding-0.11.1.dist-info → mapfolding-0.11.3.dist-info}/top_level.txt +0 -0
@@ -1,314 +0,0 @@
1
- """
2
- Job-specific Numba Code Generation for Map Folding Calculations
3
-
4
- This module specializes in generating highly-optimized, single-purpose Numba modules
5
- for specific map folding calculation jobs. Unlike the general-purpose transformation
6
- in toolboxNumba.py, this module creates standalone Python modules optimized for a
7
- single map shape with statically-encoded parameters.
8
-
9
- The code generation assembly line focuses on:
10
-
11
- 1. Converting function parameters to initialized variables with concrete values.
12
- 2. Replacing dynamic computations with statically-known values.
13
- 3. Eliminating unused code paths and variables.
14
- 4. Adding progress tracking for long-running calculations.
15
- 5. Applying appropriate Numba optimizations for the specific calculation.
16
-
17
- This creates extremely fast, specialized implementations that can be run directly
18
- as Python scripts or further compiled into standalone executables.
19
- """
20
-
21
- from mapFolding import getPathFilenameFoldsTotal, raiseIfNoneGitHubIssueNumber3, The
22
- from mapFolding.someAssemblyRequired import (
23
- ast_Identifier,
24
- Be,
25
- extractFunctionDef,
26
- IfThis,
27
- IngredientsFunction,
28
- IngredientsModule,
29
- LedgerOfImports,
30
- Make,
31
- NodeChanger,
32
- NodeTourist,
33
- str_nameDOTname,
34
- Then,
35
- write_astModule,
36
- )
37
- from mapFolding.someAssemblyRequired.RecipeJob import RecipeJob
38
- from mapFolding.someAssemblyRequired.toolboxNumba import parametersNumbaLight, SpicesJobNumba, decorateCallableWithNumba
39
- from mapFolding.someAssemblyRequired.transformationTools import dictionaryEstimates, makeInitializedComputationState
40
- from pathlib import PurePosixPath
41
- from typing import cast, NamedTuple
42
- from Z0Z_tools import autoDecodingRLE
43
- import ast
44
- """Synthesize one file to compute `foldsTotal` of `mapShape`."""
45
-
46
- list_IdentifiersNotUsedAllHARDCODED = ['concurrencyLimit', 'foldsTotal', 'mapShape',]
47
- list_IdentifiersNotUsedParallelSequentialHARDCODED = ['indexLeaf']
48
- list_IdentifiersNotUsedSequentialHARDCODED = ['foldGroups', 'taskDivisions', 'taskIndex',]
49
-
50
- list_IdentifiersReplacedHARDCODED = ['groupsOfFolds',]
51
-
52
- list_IdentifiersStaticValuesHARDCODED = ['dimensionsTotal', 'leavesTotal',]
53
-
54
- list_IdentifiersNotUsedHARDCODED = list_IdentifiersStaticValuesHARDCODED + list_IdentifiersReplacedHARDCODED + list_IdentifiersNotUsedAllHARDCODED + list_IdentifiersNotUsedParallelSequentialHARDCODED + list_IdentifiersNotUsedSequentialHARDCODED
55
-
56
- def addLauncherNumbaProgress(ingredientsModule: IngredientsModule, ingredientsFunction: IngredientsFunction, job: RecipeJob, spices: SpicesJobNumba) -> tuple[IngredientsModule, IngredientsFunction]:
57
- """
58
- Add progress tracking capabilities to a Numba-optimized function.
59
-
60
- This function modifies both the module and the function to integrate Numba-compatible
61
- progress tracking for long-running calculations. It performs several key transformations:
62
-
63
- 1. Adds a progress bar parameter to the function signature
64
- 2. Replaces counting increments with progress bar updates
65
- 3. Creates a launcher section that displays and updates progress
66
- 4. Configures file output to save results upon completion
67
-
68
- The progress tracking is particularly important for map folding calculations
69
- which can take hours or days to complete, providing visual feedback and
70
- estimated completion times.
71
-
72
- Parameters:
73
- ingredientsModule: The module where the function is defined.
74
- ingredientsFunction: The function to modify with progress tracking.
75
- job: Configuration specifying shape details and output paths.
76
- spices: Configuration specifying progress bar details.
77
-
78
- Returns:
79
- A tuple containing the modified module and function with progress tracking.
80
- """
81
- linesLaunch: str = f"""
82
- if __name__ == '__main__':
83
- with ProgressBar(total={job.foldsTotalEstimated}, update_interval=2) as statusUpdate:
84
- {job.countCallable}(statusUpdate)
85
- foldsTotal = statusUpdate.n * {job.state.leavesTotal}
86
- print('\\nmap {job.state.mapShape} =', foldsTotal)
87
- writeStream = open('{job.pathFilenameFoldsTotal.as_posix()}', 'w')
88
- writeStream.write(str(foldsTotal))
89
- writeStream.close()
90
- """
91
- numba_progressPythonClass: ast_Identifier = 'ProgressBar'
92
- numba_progressNumbaType: ast_Identifier = 'ProgressBarType'
93
- ingredientsModule.imports.addImportFrom_asStr('numba_progress', numba_progressPythonClass)
94
- ingredientsModule.imports.addImportFrom_asStr('numba_progress', numba_progressNumbaType)
95
-
96
- ast_argNumbaProgress = ast.arg(arg=spices.numbaProgressBarIdentifier, annotation=ast.Name(id=numba_progressPythonClass, ctx=ast.Load()))
97
- ingredientsFunction.astFunctionDef.args.args.append(ast_argNumbaProgress)
98
-
99
- findThis = IfThis.isAugAssignAndTargetIs(IfThis.isName_Identifier(job.shatteredDataclass.countingVariableName.id))
100
- doThat = Then.replaceWith(Make.Expr(Make.Call(Make.Attribute(Make.Name(spices.numbaProgressBarIdentifier),'update'),[Make.Constant(1)])))
101
- countWithProgressBar = NodeChanger(findThis, doThat)
102
- countWithProgressBar.visit(ingredientsFunction.astFunctionDef)
103
-
104
- removeReturnStatement = NodeChanger(Be.Return, Then.removeIt)
105
- removeReturnStatement.visit(ingredientsFunction.astFunctionDef)
106
- ingredientsFunction.astFunctionDef.returns = Make.Constant(value=None)
107
-
108
- ingredientsModule.appendLauncher(ast.parse(linesLaunch))
109
-
110
- return ingredientsModule, ingredientsFunction
111
-
112
- def move_arg2FunctionDefDOTbodyAndAssignInitialValues(ingredientsFunction: IngredientsFunction, job: RecipeJob) -> IngredientsFunction:
113
- """
114
- Convert function parameters into initialized variables with concrete values.
115
-
116
- This function implements a critical transformation that converts function parameters
117
- into statically initialized variables in the function body. This enables several
118
- optimizations:
119
-
120
- 1. Eliminating parameter passing overhead.
121
- 2. Embedding concrete values directly in the code.
122
- 3. Allowing Numba to optimize based on known value characteristics.
123
- 4. Simplifying function signatures for specialized use cases.
124
-
125
- The function handles different data types (scalars, arrays, custom types) appropriately,
126
- replacing abstract parameter references with concrete values from the computation state.
127
- It also removes unused parameters and variables to eliminate dead code.
128
-
129
- Parameters:
130
- ingredientsFunction: The function to transform.
131
- job: Recipe containing concrete values for parameters and field metadata.
132
-
133
- Returns:
134
- The modified function with parameters converted to initialized variables.
135
- """
136
- ingredientsFunction.imports.update(job.shatteredDataclass.imports)
137
-
138
- list_argCuzMyBrainRefusesToThink = ingredientsFunction.astFunctionDef.args.args + ingredientsFunction.astFunctionDef.args.posonlyargs + ingredientsFunction.astFunctionDef.args.kwonlyargs
139
- list_arg_arg: list[ast_Identifier] = [ast_arg.arg for ast_arg in list_argCuzMyBrainRefusesToThink]
140
- listName: list[ast.Name] = []
141
- NodeTourist(Be.Name, Then.appendTo(listName)).visit(ingredientsFunction.astFunctionDef)
142
- list_Identifiers: list[ast_Identifier] = [astName.id for astName in listName]
143
- list_IdentifiersNotUsed: list[ast_Identifier] = list(set(list_arg_arg) - set(list_Identifiers))
144
-
145
- for ast_arg in list_argCuzMyBrainRefusesToThink:
146
- if ast_arg.arg in job.shatteredDataclass.field2AnnAssign:
147
- if ast_arg.arg in list_IdentifiersNotUsed:
148
- pass
149
- else:
150
- ImaAnnAssign, elementConstructor = job.shatteredDataclass.Z0Z_field2AnnAssign[ast_arg.arg]
151
- match elementConstructor:
152
- case 'scalar':
153
- cast(ast.Constant, cast(ast.Call, ImaAnnAssign.value).args[0]).value = int(job.state.__dict__[ast_arg.arg])
154
- case 'array':
155
- dataAsStrRLE: str = autoDecodingRLE(job.state.__dict__[ast_arg.arg], True)
156
- dataAs_astExpr: ast.expr = cast(ast.Expr, ast.parse(dataAsStrRLE).body[0]).value
157
- cast(ast.Call, ImaAnnAssign.value).args = [dataAs_astExpr]
158
- case _:
159
- list_exprDOTannotation: list[ast.expr] = []
160
- list_exprDOTvalue: list[ast.expr] = []
161
- for dimension in job.state.mapShape:
162
- list_exprDOTannotation.append(Make.Name(elementConstructor))
163
- list_exprDOTvalue.append(Make.Call(Make.Name(elementConstructor), [Make.Constant(dimension)]))
164
- cast(ast.Tuple, cast(ast.Subscript, cast(ast.AnnAssign, ImaAnnAssign).annotation).slice).elts = list_exprDOTannotation
165
- cast(ast.Tuple, ImaAnnAssign.value).elts = list_exprDOTvalue
166
-
167
- ingredientsFunction.astFunctionDef.body.insert(0, ImaAnnAssign)
168
-
169
- findThis = IfThis.is_arg_Identifier(ast_arg.arg)
170
- remove_arg = NodeChanger(findThis, Then.removeIt)
171
- remove_arg.visit(ingredientsFunction.astFunctionDef)
172
-
173
- ast.fix_missing_locations(ingredientsFunction.astFunctionDef)
174
- return ingredientsFunction
175
-
176
- def makeJobNumba(job: RecipeJob, spices: SpicesJobNumba) -> None:
177
- """
178
- Generate a highly-optimized, single-purpose Numba module for a specific map shape.
179
-
180
- This function implements the complete transformation assembly line for creating a
181
- standalone, specialized implementation for calculating map folding solutions for
182
- a specific shape. The process includes:
183
-
184
- 1. Extracting the counting function from the source module
185
- 2. Removing unused code paths based on static analysis
186
- 3. Replacing dynamic variables with concrete values
187
- 4. Converting parameters to initialized variables
188
- 5. Adding progress tracking if requested
189
- 6. Applying Numba optimizations and type specifications
190
- 7. Writing the final module to the filesystem
191
-
192
- The resulting Python module is both human-readable and extraordinarily efficient,
193
- with all shape-specific optimizations statically encoded. This creates specialized
194
- implementations that can be orders of magnitude faster than general-purpose code.
195
-
196
- Parameters:
197
- job: Configuration specifying the target shape, paths, and computation state.
198
- spices: Configuration specifying Numba and progress tracking options.
199
- """
200
- astFunctionDef = extractFunctionDef(job.source_astModule, job.countCallable)
201
- if not astFunctionDef: raise raiseIfNoneGitHubIssueNumber3
202
- ingredientsCount: IngredientsFunction = IngredientsFunction(astFunctionDef, LedgerOfImports())
203
-
204
- # Remove `foldGroups` and any other unused statements, so you can dynamically determine which variables are not used
205
- findThis = IfThis.isAssignAndTargets0Is(IfThis.isSubscript_Identifier('foldGroups'))
206
- doThat = Then.removeIt
207
- remove_foldGroups = NodeChanger(findThis, doThat)
208
- remove_foldGroups.visit(ingredientsCount.astFunctionDef)
209
-
210
- # replace identifiers with static values with their values, so you can dynamically determine which variables are not used
211
- list_IdentifiersStaticValues = list_IdentifiersStaticValuesHARDCODED
212
- for identifier in list_IdentifiersStaticValues:
213
- findThis = IfThis.isName_Identifier(identifier)
214
- doThat = Then.replaceWith(Make.Constant(int(job.state.__dict__[identifier])))
215
- NodeChanger(findThis, doThat).visit(ingredientsCount.astFunctionDef)
216
-
217
- ingredientsModule = IngredientsModule()
218
- # This launcher eliminates the use of one identifier, so run it now and you can dynamically determine which variables are not used
219
- if spices.useNumbaProgressBar:
220
- ingredientsModule, ingredientsCount = addLauncherNumbaProgress(ingredientsModule, ingredientsCount, job, spices)
221
- spices.parametersNumba['nogil'] = True
222
- else:
223
- linesLaunch: str = f"""
224
- if __name__ == '__main__':
225
- import time
226
- timeStart = time.perf_counter()
227
- foldsTotal = {job.countCallable}() * {job.state.leavesTotal}
228
- print(time.perf_counter() - timeStart)
229
- print('\\nmap {job.state.mapShape} =', foldsTotal)
230
- writeStream = open('{job.pathFilenameFoldsTotal.as_posix()}', 'w')
231
- writeStream.write(str(foldsTotal))
232
- writeStream.close()
233
- """
234
- # from mapFolding.oeis import getFoldsTotalKnown
235
- # print(foldsTotal == getFoldsTotalKnown({job.state.mapShape}))
236
- ingredientsModule.appendLauncher(ast.parse(linesLaunch))
237
- changeReturnParallelCallable = NodeChanger(Be.Return, Then.replaceWith(Make.Return(job.shatteredDataclass.countingVariableName)))
238
- changeReturnParallelCallable.visit(ingredientsCount.astFunctionDef)
239
- ingredientsCount.astFunctionDef.returns = job.shatteredDataclass.countingVariableAnnotation
240
-
241
- ingredientsCount = move_arg2FunctionDefDOTbodyAndAssignInitialValues(ingredientsCount, job)
242
-
243
- class DatatypeConfig(NamedTuple):
244
- Z0Z_module: str_nameDOTname
245
- fml: ast_Identifier
246
- Z0Z_type_name: ast_Identifier
247
- Z0Z_asname: ast_Identifier | None = None
248
-
249
- listDatatypeConfigs = [
250
- DatatypeConfig(fml='DatatypeLeavesTotal', Z0Z_module='numba', Z0Z_type_name='uint8'),
251
- DatatypeConfig(fml='DatatypeElephino', Z0Z_module='numba', Z0Z_type_name='uint16'),
252
- DatatypeConfig(fml='DatatypeFoldsTotal', Z0Z_module='numba', Z0Z_type_name='uint64'),
253
- ]
254
-
255
- for datatypeConfig in listDatatypeConfigs:
256
- ingredientsModule.imports.addImportFrom_asStr(datatypeConfig.Z0Z_module, datatypeConfig.Z0Z_type_name)
257
- statement = Make.Assign(
258
- [Make.Name(datatypeConfig.fml, ast.Store())],
259
- Make.Name(datatypeConfig.Z0Z_type_name)
260
- )
261
- ingredientsModule.appendPrologue(statement=statement)
262
-
263
- ingredientsCount.imports.removeImportFromModule('mapFolding.theSSOT')
264
-
265
- listNumPyTypeConfigs = [
266
- DatatypeConfig(fml='Array1DLeavesTotal', Z0Z_module='numpy', Z0Z_type_name='uint8', Z0Z_asname='Array1DLeavesTotal'),
267
- DatatypeConfig(fml='Array1DElephino', Z0Z_module='numpy', Z0Z_type_name='uint16', Z0Z_asname='Array1DElephino'),
268
- DatatypeConfig(fml='Array3D', Z0Z_module='numpy', Z0Z_type_name='uint8', Z0Z_asname='Array3D'),
269
- ]
270
-
271
- for typeConfig in listNumPyTypeConfigs:
272
- ingredientsCount.imports.removeImportFrom(typeConfig.Z0Z_module, None, typeConfig.fml)
273
- ingredientsCount.imports.addImportFrom_asStr(typeConfig.Z0Z_module, typeConfig.Z0Z_type_name, typeConfig.Z0Z_asname)
274
-
275
- ingredientsCount.astFunctionDef.decorator_list = [] # TODO low-priority, handle this more elegantly
276
- # TODO when I add the function signature in numba style back to the decorator, the logic needs to handle `ProgressBarType:`
277
- ingredientsCount = decorateCallableWithNumba(ingredientsCount, spices.parametersNumba)
278
-
279
- ingredientsModule.appendIngredientsFunction(ingredientsCount)
280
- write_astModule(ingredientsModule, job.pathFilenameModule, job.packageIdentifier)
281
-
282
- """
283
- Overview
284
- - the code starts life in theDao.py, which has many optimizations;
285
- - `makeNumbaOptimizedFlow` increase optimization especially by using numba;
286
- - `makeJobNumba` increases optimization especially by limiting its capabilities to just one set of parameters
287
- - the synthesized module must run well as a standalone interpreted-Python script
288
- - the next major optimization step will (probably) be to use the module synthesized by `makeJobNumba` to compile a standalone executable
289
- - Nevertheless, at each major optimization step, the code is constantly being improved and optimized, so everything must be well organized (read: semantic) and able to handle a range of arbitrary upstream and not disrupt downstream transformations
290
-
291
- Necessary
292
- - Move the function's parameters to the function body,
293
- - initialize identifiers with their state types and values,
294
-
295
- Optimizations
296
- - replace static-valued identifiers with their values
297
- - narrowly focused imports
298
-
299
- Minutia
300
- - do not use `with` statement inside numba jitted code, except to use numba's obj mode
301
- """
302
-
303
- if __name__ == '__main__':
304
- mapShape = (1,46)
305
- state = makeInitializedComputationState(mapShape)
306
- # foldsTotalEstimated = getFoldsTotalKnown(state.mapShape) // state.leavesTotal
307
- # foldsTotalEstimated = dictionaryEstimates[state.mapShape] // state.leavesTotal
308
- foldsTotalEstimated = 0
309
- pathModule = PurePosixPath(The.pathPackage, 'jobs')
310
- pathFilenameFoldsTotal = PurePosixPath(getPathFilenameFoldsTotal(state.mapShape, pathModule))
311
- aJob = RecipeJob(state, foldsTotalEstimated, pathModule=pathModule, pathFilenameFoldsTotal=pathFilenameFoldsTotal)
312
- spices = SpicesJobNumba(useNumbaProgressBar=False, parametersNumba=parametersNumbaLight)
313
- # spices = SpicesJobNumba()
314
- makeJobNumba(aJob, spices)
@@ -1,201 +0,0 @@
1
- from concurrent.futures import Future as ConcurrentFuture, ProcessPoolExecutor
2
- from copy import deepcopy
3
- from mapFolding.theSSOT import Array1DElephino, Array1DFoldsTotal, Array1DLeavesTotal, Array3D, ComputationState, DatatypeElephino, DatatypeFoldsTotal, DatatypeLeavesTotal
4
- from numba import jit
5
-
6
- def countInitialize(state: ComputationState) -> ComputationState:
7
- while state.gap1ndex == 0:
8
- if state.leaf1ndex <= 1 or state.leafBelow[0] == 1:
9
- state.dimensionsUnconstrained = state.dimensionsTotal
10
- state.gap1ndexCeiling = state.gapRangeStart[state.leaf1ndex - 1]
11
- state.indexDimension = 0
12
- while state.indexDimension < state.dimensionsTotal:
13
- state.leafConnectee = state.connectionGraph[state.indexDimension, state.leaf1ndex, state.leaf1ndex]
14
- if state.leafConnectee == state.leaf1ndex:
15
- state.dimensionsUnconstrained -= 1
16
- else:
17
- while state.leafConnectee != state.leaf1ndex:
18
- state.gapsWhere[state.gap1ndexCeiling] = state.leafConnectee
19
- if state.countDimensionsGapped[state.leafConnectee] == 0:
20
- state.gap1ndexCeiling += 1
21
- state.countDimensionsGapped[state.leafConnectee] += 1
22
- state.leafConnectee = state.connectionGraph[state.indexDimension, state.leaf1ndex, state.leafBelow[state.leafConnectee]]
23
- state.indexDimension += 1
24
- if not state.dimensionsUnconstrained:
25
- state.indexLeaf = 0
26
- while state.indexLeaf < state.leaf1ndex:
27
- state.gapsWhere[state.gap1ndexCeiling] = state.indexLeaf
28
- state.gap1ndexCeiling += 1
29
- state.indexLeaf += 1
30
- state.indexMiniGap = state.gap1ndex
31
- while state.indexMiniGap < state.gap1ndexCeiling:
32
- state.gapsWhere[state.gap1ndex] = state.gapsWhere[state.indexMiniGap]
33
- if state.countDimensionsGapped[state.gapsWhere[state.indexMiniGap]] == state.dimensionsUnconstrained:
34
- state.gap1ndex += 1
35
- state.countDimensionsGapped[state.gapsWhere[state.indexMiniGap]] = 0
36
- state.indexMiniGap += 1
37
- if state.leaf1ndex > 0:
38
- state.gap1ndex -= 1
39
- state.leafAbove[state.leaf1ndex] = state.gapsWhere[state.gap1ndex]
40
- state.leafBelow[state.leaf1ndex] = state.leafBelow[state.leafAbove[state.leaf1ndex]]
41
- state.leafBelow[state.leafAbove[state.leaf1ndex]] = state.leaf1ndex
42
- state.leafAbove[state.leafBelow[state.leaf1ndex]] = state.leaf1ndex
43
- state.gapRangeStart[state.leaf1ndex] = state.gap1ndex
44
- state.leaf1ndex += 1
45
- return state
46
-
47
- @jit(_nrt=True, boundscheck=False, cache=True, error_model='numpy', fastmath=True, forceinline=True, inline='always', looplift=False, no_cfunc_wrapper=False, no_cpython_wrapper=False, nopython=True, parallel=False)
48
- def countParallel(leavesTotal: DatatypeLeavesTotal, taskDivisions: DatatypeLeavesTotal, connectionGraph: Array3D, dimensionsTotal: DatatypeLeavesTotal, countDimensionsGapped: Array1DLeavesTotal, dimensionsUnconstrained: DatatypeLeavesTotal, gapRangeStart: Array1DElephino, gapsWhere: Array1DLeavesTotal, leafAbove: Array1DLeavesTotal, leafBelow: Array1DLeavesTotal, foldGroups: Array1DFoldsTotal, gap1ndex: DatatypeElephino, gap1ndexCeiling: DatatypeElephino, groupsOfFolds: DatatypeFoldsTotal, indexDimension: DatatypeLeavesTotal, indexMiniGap: DatatypeElephino, leaf1ndex: DatatypeLeavesTotal, leafConnectee: DatatypeLeavesTotal, taskIndex: DatatypeLeavesTotal) -> DatatypeFoldsTotal:
49
- while leaf1ndex > 0:
50
- if leaf1ndex <= 1 or leafBelow[0] == 1:
51
- if leaf1ndex > leavesTotal:
52
- groupsOfFolds += 1
53
- else:
54
- dimensionsUnconstrained = dimensionsTotal
55
- gap1ndexCeiling = gapRangeStart[leaf1ndex - 1]
56
- indexDimension = 0
57
- while indexDimension < dimensionsTotal:
58
- if connectionGraph[indexDimension, leaf1ndex, leaf1ndex] == leaf1ndex:
59
- dimensionsUnconstrained -= 1
60
- else:
61
- leafConnectee = connectionGraph[indexDimension, leaf1ndex, leaf1ndex]
62
- while leafConnectee != leaf1ndex:
63
- if leaf1ndex != taskDivisions or leafConnectee % taskDivisions == taskIndex:
64
- gapsWhere[gap1ndexCeiling] = leafConnectee
65
- if countDimensionsGapped[leafConnectee] == 0:
66
- gap1ndexCeiling += 1
67
- countDimensionsGapped[leafConnectee] += 1
68
- leafConnectee = connectionGraph[indexDimension, leaf1ndex, leafBelow[leafConnectee]]
69
- indexDimension += 1
70
- indexMiniGap = gap1ndex
71
- while indexMiniGap < gap1ndexCeiling:
72
- gapsWhere[gap1ndex] = gapsWhere[indexMiniGap]
73
- if countDimensionsGapped[gapsWhere[indexMiniGap]] == dimensionsUnconstrained:
74
- gap1ndex += 1
75
- countDimensionsGapped[gapsWhere[indexMiniGap]] = 0
76
- indexMiniGap += 1
77
- while leaf1ndex > 0 and gap1ndex == gapRangeStart[leaf1ndex - 1]:
78
- leaf1ndex -= 1
79
- leafBelow[leafAbove[leaf1ndex]] = leafBelow[leaf1ndex]
80
- leafAbove[leafBelow[leaf1ndex]] = leafAbove[leaf1ndex]
81
- if leaf1ndex > 0:
82
- gap1ndex -= 1
83
- leafAbove[leaf1ndex] = gapsWhere[gap1ndex]
84
- leafBelow[leaf1ndex] = leafBelow[leafAbove[leaf1ndex]]
85
- leafBelow[leafAbove[leaf1ndex]] = leaf1ndex
86
- leafAbove[leafBelow[leaf1ndex]] = leaf1ndex
87
- gapRangeStart[leaf1ndex] = gap1ndex
88
- leaf1ndex += 1
89
- foldGroups[taskIndex] = groupsOfFolds
90
- return groupsOfFolds
91
-
92
- @jit(_nrt=True, boundscheck=False, cache=True, error_model='numpy', fastmath=True, forceinline=True, inline='always', looplift=False, no_cfunc_wrapper=False, no_cpython_wrapper=False, nopython=True, parallel=False)
93
- def countSequential(mapShape: tuple[DatatypeLeavesTotal, ...], leavesTotal: DatatypeLeavesTotal, taskDivisions: DatatypeLeavesTotal, concurrencyLimit: DatatypeElephino, connectionGraph: Array3D, dimensionsTotal: DatatypeLeavesTotal, countDimensionsGapped: Array1DLeavesTotal, dimensionsUnconstrained: DatatypeLeavesTotal, gapRangeStart: Array1DElephino, gapsWhere: Array1DLeavesTotal, leafAbove: Array1DLeavesTotal, leafBelow: Array1DLeavesTotal, foldGroups: Array1DFoldsTotal, foldsTotal: DatatypeFoldsTotal, gap1ndex: DatatypeElephino, gap1ndexCeiling: DatatypeElephino, groupsOfFolds: DatatypeFoldsTotal, indexDimension: DatatypeLeavesTotal, indexLeaf: DatatypeLeavesTotal, indexMiniGap: DatatypeElephino, leaf1ndex: DatatypeLeavesTotal, leafConnectee: DatatypeLeavesTotal, taskIndex: DatatypeLeavesTotal) -> tuple[tuple[DatatypeLeavesTotal, ...], DatatypeLeavesTotal, DatatypeLeavesTotal, DatatypeElephino, Array3D, DatatypeLeavesTotal, Array1DLeavesTotal, DatatypeLeavesTotal, Array1DElephino, Array1DLeavesTotal, Array1DLeavesTotal, Array1DLeavesTotal, Array1DFoldsTotal, DatatypeFoldsTotal, DatatypeElephino, DatatypeElephino, DatatypeFoldsTotal, DatatypeLeavesTotal, DatatypeLeavesTotal, DatatypeElephino, DatatypeLeavesTotal, DatatypeLeavesTotal, DatatypeLeavesTotal]:
94
- while leaf1ndex > 0:
95
- if leaf1ndex <= 1 or leafBelow[0] == 1:
96
- if leaf1ndex > leavesTotal:
97
- groupsOfFolds += 1
98
- else:
99
- dimensionsUnconstrained = dimensionsTotal
100
- gap1ndexCeiling = gapRangeStart[leaf1ndex - 1]
101
- indexDimension = 0
102
- while indexDimension < dimensionsTotal:
103
- leafConnectee = connectionGraph[indexDimension, leaf1ndex, leaf1ndex]
104
- if leafConnectee == leaf1ndex:
105
- dimensionsUnconstrained -= 1
106
- else:
107
- while leafConnectee != leaf1ndex:
108
- gapsWhere[gap1ndexCeiling] = leafConnectee
109
- if countDimensionsGapped[leafConnectee] == 0:
110
- gap1ndexCeiling += 1
111
- countDimensionsGapped[leafConnectee] += 1
112
- leafConnectee = connectionGraph[indexDimension, leaf1ndex, leafBelow[leafConnectee]]
113
- indexDimension += 1
114
- indexMiniGap = gap1ndex
115
- while indexMiniGap < gap1ndexCeiling:
116
- gapsWhere[gap1ndex] = gapsWhere[indexMiniGap]
117
- if countDimensionsGapped[gapsWhere[indexMiniGap]] == dimensionsUnconstrained:
118
- gap1ndex += 1
119
- countDimensionsGapped[gapsWhere[indexMiniGap]] = 0
120
- indexMiniGap += 1
121
- while leaf1ndex > 0 and gap1ndex == gapRangeStart[leaf1ndex - 1]:
122
- leaf1ndex -= 1
123
- leafBelow[leafAbove[leaf1ndex]] = leafBelow[leaf1ndex]
124
- leafAbove[leafBelow[leaf1ndex]] = leafAbove[leaf1ndex]
125
- if leaf1ndex == 3 and groupsOfFolds:
126
- groupsOfFolds *= 2
127
- break
128
- if leaf1ndex > 0:
129
- gap1ndex -= 1
130
- leafAbove[leaf1ndex] = gapsWhere[gap1ndex]
131
- leafBelow[leaf1ndex] = leafBelow[leafAbove[leaf1ndex]]
132
- leafBelow[leafAbove[leaf1ndex]] = leaf1ndex
133
- leafAbove[leafBelow[leaf1ndex]] = leaf1ndex
134
- gapRangeStart[leaf1ndex] = gap1ndex
135
- leaf1ndex += 1
136
- foldGroups[taskIndex] = groupsOfFolds
137
- return (mapShape, leavesTotal, taskDivisions, concurrencyLimit, connectionGraph, dimensionsTotal, countDimensionsGapped, dimensionsUnconstrained, gapRangeStart, gapsWhere, leafAbove, leafBelow, foldGroups, foldsTotal, gap1ndex, gap1ndexCeiling, groupsOfFolds, indexDimension, indexLeaf, indexMiniGap, leaf1ndex, leafConnectee, taskIndex)
138
-
139
- def doTheNeedful(state: ComputationState) -> ComputationState:
140
- state = countInitialize(state)
141
- if state.taskDivisions > 0:
142
- dictionaryConcurrency: dict[int, ConcurrentFuture[ComputationState]] = {}
143
- stateParallel = deepcopy(state)
144
- with ProcessPoolExecutor(stateParallel.concurrencyLimit) as concurrencyManager:
145
- for indexSherpa in range(stateParallel.taskDivisions):
146
- state = deepcopy(stateParallel)
147
- state.taskIndex = indexSherpa
148
- mapShape: tuple[DatatypeLeavesTotal, ...] = state.mapShape
149
- leavesTotal: DatatypeLeavesTotal = state.leavesTotal
150
- taskDivisions: DatatypeLeavesTotal = state.taskDivisions
151
- concurrencyLimit: DatatypeElephino = state.concurrencyLimit
152
- connectionGraph: Array3D = state.connectionGraph
153
- dimensionsTotal: DatatypeLeavesTotal = state.dimensionsTotal
154
- countDimensionsGapped: Array1DLeavesTotal = state.countDimensionsGapped
155
- dimensionsUnconstrained: DatatypeLeavesTotal = state.dimensionsUnconstrained
156
- gapRangeStart: Array1DElephino = state.gapRangeStart
157
- gapsWhere: Array1DLeavesTotal = state.gapsWhere
158
- leafAbove: Array1DLeavesTotal = state.leafAbove
159
- leafBelow: Array1DLeavesTotal = state.leafBelow
160
- foldGroups: Array1DFoldsTotal = state.foldGroups
161
- foldsTotal: DatatypeFoldsTotal = state.foldsTotal
162
- gap1ndex: DatatypeElephino = state.gap1ndex
163
- gap1ndexCeiling: DatatypeElephino = state.gap1ndexCeiling
164
- groupsOfFolds: DatatypeFoldsTotal = state.groupsOfFolds
165
- indexDimension: DatatypeLeavesTotal = state.indexDimension
166
- indexLeaf: DatatypeLeavesTotal = state.indexLeaf
167
- indexMiniGap: DatatypeElephino = state.indexMiniGap
168
- leaf1ndex: DatatypeLeavesTotal = state.leaf1ndex
169
- leafConnectee: DatatypeLeavesTotal = state.leafConnectee
170
- taskIndex: DatatypeLeavesTotal = state.taskIndex
171
- dictionaryConcurrency[indexSherpa] = concurrencyManager.submit(countParallel, leavesTotal, taskDivisions, connectionGraph, dimensionsTotal, countDimensionsGapped, dimensionsUnconstrained, gapRangeStart, gapsWhere, leafAbove, leafBelow, foldGroups, gap1ndex, gap1ndexCeiling, groupsOfFolds, indexDimension, indexMiniGap, leaf1ndex, leafConnectee, taskIndex)
172
- for indexSherpa in range(stateParallel.taskDivisions):
173
- stateParallel.foldGroups[indexSherpa] = dictionaryConcurrency[indexSherpa].result()
174
- state = stateParallel
175
- else:
176
- mapShape: tuple[DatatypeLeavesTotal, ...] = state.mapShape
177
- leavesTotal: DatatypeLeavesTotal = state.leavesTotal
178
- taskDivisions: DatatypeLeavesTotal = state.taskDivisions
179
- concurrencyLimit: DatatypeElephino = state.concurrencyLimit
180
- connectionGraph: Array3D = state.connectionGraph
181
- dimensionsTotal: DatatypeLeavesTotal = state.dimensionsTotal
182
- countDimensionsGapped: Array1DLeavesTotal = state.countDimensionsGapped
183
- dimensionsUnconstrained: DatatypeLeavesTotal = state.dimensionsUnconstrained
184
- gapRangeStart: Array1DElephino = state.gapRangeStart
185
- gapsWhere: Array1DLeavesTotal = state.gapsWhere
186
- leafAbove: Array1DLeavesTotal = state.leafAbove
187
- leafBelow: Array1DLeavesTotal = state.leafBelow
188
- foldGroups: Array1DFoldsTotal = state.foldGroups
189
- foldsTotal: DatatypeFoldsTotal = state.foldsTotal
190
- gap1ndex: DatatypeElephino = state.gap1ndex
191
- gap1ndexCeiling: DatatypeElephino = state.gap1ndexCeiling
192
- groupsOfFolds: DatatypeFoldsTotal = state.groupsOfFolds
193
- indexDimension: DatatypeLeavesTotal = state.indexDimension
194
- indexLeaf: DatatypeLeavesTotal = state.indexLeaf
195
- indexMiniGap: DatatypeElephino = state.indexMiniGap
196
- leaf1ndex: DatatypeLeavesTotal = state.leaf1ndex
197
- leafConnectee: DatatypeLeavesTotal = state.leafConnectee
198
- taskIndex: DatatypeLeavesTotal = state.taskIndex
199
- mapShape, leavesTotal, taskDivisions, concurrencyLimit, connectionGraph, dimensionsTotal, countDimensionsGapped, dimensionsUnconstrained, gapRangeStart, gapsWhere, leafAbove, leafBelow, foldGroups, foldsTotal, gap1ndex, gap1ndexCeiling, groupsOfFolds, indexDimension, indexLeaf, indexMiniGap, leaf1ndex, leafConnectee, taskIndex = countSequential(mapShape, leavesTotal, taskDivisions, concurrencyLimit, connectionGraph, dimensionsTotal, countDimensionsGapped, dimensionsUnconstrained, gapRangeStart, gapsWhere, leafAbove, leafBelow, foldGroups, foldsTotal, gap1ndex, gap1ndexCeiling, groupsOfFolds, indexDimension, indexLeaf, indexMiniGap, leaf1ndex, leafConnectee, taskIndex)
200
- state = ComputationState(mapShape=mapShape, leavesTotal=leavesTotal, taskDivisions=taskDivisions, concurrencyLimit=concurrencyLimit, countDimensionsGapped=countDimensionsGapped, dimensionsUnconstrained=dimensionsUnconstrained, gapRangeStart=gapRangeStart, gapsWhere=gapsWhere, leafAbove=leafAbove, leafBelow=leafBelow, foldGroups=foldGroups, foldsTotal=foldsTotal, gap1ndex=gap1ndex, gap1ndexCeiling=gap1ndexCeiling, groupsOfFolds=groupsOfFolds, indexDimension=indexDimension, indexLeaf=indexLeaf, indexMiniGap=indexMiniGap, leaf1ndex=leaf1ndex, leafConnectee=leafConnectee, taskIndex=taskIndex)
201
- return state